feat: initialize project with core dependencies and game entry point

This commit is contained in:
2026-01-03 01:24:51 -05:00
commit 45d46ddac6
1382 changed files with 844553 additions and 0 deletions

21
node_modules/three/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,21 @@
The MIT License
Copyright © 2010-2025 three.js authors
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

85
node_modules/three/README.md generated vendored Normal file
View File

@@ -0,0 +1,85 @@
# three.js
[![NPM Package][npm]][npm-url]
[![Build Size][build-size]][build-size-url]
[![NPM Downloads][npm-downloads]][npmtrends-url]
[![jsDelivr Downloads][jsdelivr-downloads]][jsdelivr-url]
[![Discord][discord]][discord-url]
#### JavaScript 3D library
The aim of the project is to create an easy-to-use, lightweight, cross-browser, general-purpose 3D library. The current builds only include WebGL and WebGPU renderers but SVG and CSS3D renderers are also available as addons.
[Examples](https://threejs.org/examples/) —
[Docs](https://threejs.org/docs/) —
[Manual](https://threejs.org/manual/) —
[Wiki](https://github.com/mrdoob/three.js/wiki) —
[Migrating](https://github.com/mrdoob/three.js/wiki/Migration-Guide) —
[Questions](https://stackoverflow.com/questions/tagged/three.js) —
[Forum](https://discourse.threejs.org/) —
[Discord](https://discord.gg/56GBJwAnUS)
### Usage
This code creates a scene, a camera, and a geometric cube, and it adds the cube to the scene. It then creates a `WebGL` renderer for the scene and camera, and it adds that viewport to the `document.body` element. Finally, it animates the cube within the scene for the camera.
```javascript
import * as THREE from 'three';
const width = window.innerWidth, height = window.innerHeight;
// init
const camera = new THREE.PerspectiveCamera( 70, width / height, 0.01, 10 );
camera.position.z = 1;
const scene = new THREE.Scene();
const geometry = new THREE.BoxGeometry( 0.2, 0.2, 0.2 );
const material = new THREE.MeshNormalMaterial();
const mesh = new THREE.Mesh( geometry, material );
scene.add( mesh );
const renderer = new THREE.WebGLRenderer( { antialias: true } );
renderer.setSize( width, height );
renderer.setAnimationLoop( animate );
document.body.appendChild( renderer.domElement );
// animation
function animate( time ) {
mesh.rotation.x = time / 2000;
mesh.rotation.y = time / 1000;
renderer.render( scene, camera );
}
```
If everything goes well, you should see [this](https://jsfiddle.net/w43x5Lgh/).
### Cloning this repository
Cloning the repo with all its history results in a ~2 GB download. If you don't need the whole history you can use the `depth` parameter to significantly reduce download size.
```sh
git clone --depth=1 https://github.com/mrdoob/three.js.git
```
### Change log
[Releases](https://github.com/mrdoob/three.js/releases)
[npm]: https://img.shields.io/npm/v/three
[npm-url]: https://www.npmjs.com/package/three
[build-size]: https://badgen.net/bundlephobia/minzip/three
[build-size-url]: https://bundlephobia.com/result?p=three
[npm-downloads]: https://img.shields.io/npm/dw/three
[npmtrends-url]: https://www.npmtrends.com/three
[jsdelivr-downloads]: https://data.jsdelivr.com/v1/package/npm/three/badge?style=rounded
[jsdelivr-url]: https://www.jsdelivr.com/package/npm/three
[discord]: https://img.shields.io/discord/685241246557667386
[discord-url]: https://discord.gg/56GBJwAnUS

78565
node_modules/three/build/three.cjs generated vendored Normal file

File diff suppressed because one or more lines are too long

59078
node_modules/three/build/three.core.js generated vendored Normal file

File diff suppressed because one or more lines are too long

6
node_modules/three/build/three.core.min.js generated vendored Normal file

File diff suppressed because one or more lines are too long

19059
node_modules/three/build/three.module.js generated vendored Normal file

File diff suppressed because one or more lines are too long

6
node_modules/three/build/three.module.min.js generated vendored Normal file

File diff suppressed because one or more lines are too long

652
node_modules/three/build/three.tsl.js generated vendored Normal file

File diff suppressed because one or more lines are too long

6
node_modules/three/build/three.tsl.min.js generated vendored Normal file

File diff suppressed because one or more lines are too long

81806
node_modules/three/build/three.webgpu.js generated vendored Normal file

File diff suppressed because one or more lines are too long

6
node_modules/three/build/three.webgpu.min.js generated vendored Normal file

File diff suppressed because one or more lines are too long

81577
node_modules/three/build/three.webgpu.nodes.js generated vendored Normal file

File diff suppressed because one or more lines are too long

6
node_modules/three/build/three.webgpu.nodes.min.js generated vendored Normal file

File diff suppressed because one or more lines are too long

13
node_modules/three/examples/fonts/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,13 @@
Copyright @ 2004 by MAGENTA Ltd. All Rights Reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy of the fonts accompanying this license ("Fonts") and associated documentation files (the "Font Software"), to reproduce and distribute the Font Software, including without limitation the rights to use, copy, merge, publish, distribute, and/or sell copies of the Font Software, and to permit persons to whom the Font Software is furnished to do so, subject to the following conditions:
The above copyright and this permission notice shall be included in all copies of one or more of the Font Software typefaces.
The Font Software may be modified, altered, or added to, and in particular the designs of glyphs or characters in the Fonts may be modified and additional glyphs or characters may be added to the Fonts, only if the fonts are renamed to names not containing the word "MgOpen", or if the modifications are accepted for inclusion in the Font Software itself by the each appointed Administrator.
This License becomes null and void to the extent applicable to Fonts or Font Software that has been modified and is distributed under the "MgOpen" name.
The Font Software may be sold as part of a larger software package but no copy of one or more of the Font Software typefaces may be sold by itself.
THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL MAGENTA OR PERSONS OR BODIES IN CHARGE OF ADMINISTRATION AND MAINTENANCE OF THE FONT SOFTWARE BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM OTHER DEALINGS IN THE FONT SOFTWARE.

View File

@@ -0,0 +1,91 @@
This Font Software is licensed under the SIL Open Font License, Version 1.1.
This license is copied below, and is also available with a FAQ at:
https://openfontlicense.org
-----------------------------------------------------------
SIL OPEN FONT LICENSE Version 1.1 - 26 February 2007
-----------------------------------------------------------
PREAMBLE
The goals of the Open Font License (OFL) are to stimulate worldwide
development of collaborative font projects, to support the font creation
efforts of academic and linguistic communities, and to provide a free and
open framework in which fonts may be shared and improved in partnership
with others.
The OFL allows the licensed fonts to be used, studied, modified and
redistributed freely as long as they are not sold by themselves. The
fonts, including any derivative works, can be bundled, embedded,
redistributed and/or sold with any software provided that any reserved
names are not used by derivative works. The fonts and derivatives,
however, cannot be released under any other type of license. The
requirement for fonts to remain under this license does not apply
to any document created using the fonts or their derivatives.
DEFINITIONS
"Font Software" refers to the set of files released by the Copyright
Holder(s) under this license and clearly marked as such. This may
include source files, build scripts and documentation.
"Reserved Font Name" refers to any names specified as such after the
copyright statement(s).
"Original Version" refers to the collection of Font Software components as
distributed by the Copyright Holder(s).
"Modified Version" refers to any derivative made by adding to, deleting,
or substituting -- in part or in whole -- any of the components of the
Original Version, by changing formats or by porting the Font Software to a
new environment.
"Author" refers to any designer, engineer, programmer, technical
writer or other person who contributed to the Font Software.
PERMISSION & CONDITIONS
Permission is hereby granted, free of charge, to any person obtaining
a copy of the Font Software, to use, study, copy, merge, embed, modify,
redistribute, and sell modified and unmodified copies of the Font
Software, subject to the following conditions:
1) Neither the Font Software nor any of its individual components,
in Original or Modified Versions, may be sold by itself.
2) Original or Modified Versions of the Font Software may be bundled,
redistributed and/or sold with any software, provided that each copy
contains the above copyright notice and this license. These can be
included either as stand-alone text files, human-readable headers or
in the appropriate machine-readable metadata fields within text or
binary files as long as those fields can be easily viewed by the user.
3) No Modified Version of the Font Software may use the Reserved Font
Name(s) unless explicit written permission is granted by the corresponding
Copyright Holder. This restriction only applies to the primary font name as
presented to the users.
4) The name(s) of the Copyright Holder(s) or the Author(s) of the Font
Software shall not be used to promote, endorse or advertise any
Modified Version, except to acknowledge the contribution(s) of the
Copyright Holder(s) and the Author(s) or with their explicit written
permission.
5) The Font Software, modified or unmodified, in part or in whole,
must be distributed entirely under this license, and must not be
distributed under any other license. The requirement for fonts to
remain under this license does not apply to any document created
using the Font Software.
TERMINATION
This license becomes null and void if any of the above conditions are
not met.
DISCLAIMER
THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT
OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL THE
COPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL
DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM
OTHER DEALINGS IN THE FONT SOFTWARE.

11
node_modules/three/examples/fonts/README.md generated vendored Normal file
View File

@@ -0,0 +1,11 @@
## MgOpen typefaces
# Source and License
https://web.archive.org/web/20050528114140/https://ellak.gr/fonts/mgopen/index.en
# Usage
Use Facetype.js to generate typeface.json fonts: https://gero3.github.io/facetype.js/
Collection of Google fonts as typeface data for usage with three.js: https://github.com/components-ai/typefaces

190
node_modules/three/examples/fonts/droid/NOTICE generated vendored Normal file
View File

@@ -0,0 +1,190 @@
Copyright (c) 2005-2008, The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS

18
node_modules/three/examples/fonts/droid/README.txt generated vendored Normal file
View File

@@ -0,0 +1,18 @@
Copyright (C) 2008 The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
##########
This directory contains the fonts for the platform. They are licensed
under the Apache 2 license.

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

9
node_modules/three/examples/fonts/ttf/README.md generated vendored Normal file
View File

@@ -0,0 +1,9 @@
# Kenney Fonts
## Source
https://www.kenney.nl/assets/kenney-fonts
## License
CC0 1.0 Universal (CC0 1.0) Public Domain Dedication (https://creativecommons.org/publicdomain/zero/1.0/)

BIN
node_modules/three/examples/fonts/ttf/kenpixel.ttf generated vendored Normal file

Binary file not shown.

285
node_modules/three/examples/jsm/Addons.js generated vendored Normal file
View File

@@ -0,0 +1,285 @@
export * from './animation/AnimationClipCreator.js';
export * from './animation/CCDIKSolver.js';
export { default as WebGL } from './capabilities/WebGL.js';
export * from './controls/ArcballControls.js';
export * from './controls/DragControls.js';
export * from './controls/FirstPersonControls.js';
export * from './controls/FlyControls.js';
export * from './controls/MapControls.js';
export * from './controls/OrbitControls.js';
export * from './controls/PointerLockControls.js';
export * from './controls/TrackballControls.js';
export * from './controls/TransformControls.js';
export * from './csm/CSM.js';
export * from './csm/CSMFrustum.js';
export * from './csm/CSMHelper.js';
export * from './csm/CSMShader.js';
export * as Curves from './curves/CurveExtras.js';
export * from './curves/NURBSCurve.js';
export * from './curves/NURBSSurface.js';
export * from './curves/NURBSVolume.js';
export * as NURBSUtils from './curves/NURBSUtils.js';
export * from './effects/AnaglyphEffect.js';
export * from './effects/AsciiEffect.js';
export * from './effects/OutlineEffect.js';
export * from './effects/ParallaxBarrierEffect.js';
export * from './effects/StereoEffect.js';
export * from './environments/DebugEnvironment.js';
export * from './environments/RoomEnvironment.js';
export * from './exporters/DRACOExporter.js';
export * from './exporters/EXRExporter.js';
export * from './exporters/GLTFExporter.js';
export * from './exporters/KTX2Exporter.js';
export * from './exporters/OBJExporter.js';
export * from './exporters/PLYExporter.js';
export * from './exporters/STLExporter.js';
export * from './exporters/USDZExporter.js';
export * from './geometries/BoxLineGeometry.js';
export * from './geometries/ConvexGeometry.js';
export * from './geometries/DecalGeometry.js';
export * from './geometries/ParametricFunctions.js';
export * from './geometries/ParametricGeometry.js';
export * from './geometries/RoundedBoxGeometry.js';
export * from './geometries/TeapotGeometry.js';
export * from './geometries/TextGeometry.js';
export * from './helpers/LightProbeHelper.js';
export * from './helpers/OctreeHelper.js';
export * from './helpers/PositionalAudioHelper.js';
export * from './helpers/RectAreaLightHelper.js';
export * from './helpers/TextureHelper.js';
export * from './helpers/VertexNormalsHelper.js';
export * from './helpers/VertexTangentsHelper.js';
export * from './helpers/ViewHelper.js';
export * from './interactive/HTMLMesh.js';
export * from './interactive/InteractiveGroup.js';
export * from './interactive/SelectionBox.js';
export * from './interactive/SelectionHelper.js';
export * from './lights/LightProbeGenerator.js';
export * from './lights/RectAreaLightTexturesLib.js';
export * from './lights/RectAreaLightUniformsLib.js';
export * from './lines/Line2.js';
export * from './lines/LineGeometry.js';
export * from './lines/LineMaterial.js';
export * from './lines/LineSegments2.js';
export * from './lines/LineSegmentsGeometry.js';
export * from './lines/Wireframe.js';
export * from './lines/WireframeGeometry2.js';
export * from './loaders/3DMLoader.js';
export * from './loaders/3MFLoader.js';
export * from './loaders/AMFLoader.js';
export * from './loaders/BVHLoader.js';
export * from './loaders/ColladaLoader.js';
export * from './loaders/DDSLoader.js';
export * from './loaders/DRACOLoader.js';
export * from './loaders/EXRLoader.js';
export * from './loaders/FBXLoader.js';
export * from './loaders/FontLoader.js';
export * from './loaders/GCodeLoader.js';
export * from './loaders/GLTFLoader.js';
export * from './loaders/HDRLoader.js';
export * from './loaders/HDRCubeTextureLoader.js';
export * from './loaders/IESLoader.js';
export * from './loaders/KMZLoader.js';
export * from './loaders/KTX2Loader.js';
export * from './loaders/KTXLoader.js';
export * from './loaders/LDrawLoader.js';
export * from './loaders/LUT3dlLoader.js';
export * from './loaders/LUTCubeLoader.js';
export * from './loaders/LWOLoader.js';
export * from './loaders/LottieLoader.js';
export * from './loaders/MD2Loader.js';
export * from './loaders/MDDLoader.js';
export * from './loaders/MTLLoader.js';
export * from './loaders/NRRDLoader.js';
export * from './loaders/OBJLoader.js';
export * from './loaders/PCDLoader.js';
export * from './loaders/PDBLoader.js';
export * from './loaders/PLYLoader.js';
export * from './loaders/PVRLoader.js';
export * from './loaders/RGBELoader.js';
export * from './loaders/UltraHDRLoader.js';
export * from './loaders/STLLoader.js';
export * from './loaders/SVGLoader.js';
export * from './loaders/TDSLoader.js';
export * from './loaders/TGALoader.js';
export * from './loaders/TIFFLoader.js';
export * from './loaders/TTFLoader.js';
export * from './loaders/USDLoader.js';
export * from './loaders/VOXLoader.js';
export * from './loaders/VRMLLoader.js';
export * from './loaders/VTKLoader.js';
export * from './loaders/XYZLoader.js';
export * from './materials/MeshGouraudMaterial.js';
export * from './materials/LDrawConditionalLineMaterial.js';
export * from './materials/MeshPostProcessingMaterial.js';
export * from './math/Capsule.js';
export * from './math/ColorConverter.js';
export * from './math/ConvexHull.js';
export * from './math/ImprovedNoise.js';
export * from './math/Lut.js';
export * from './math/MeshSurfaceSampler.js';
export * from './math/OBB.js';
export * from './math/Octree.js';
export * from './math/SimplexNoise.js';
export * from './misc/ConvexObjectBreaker.js';
export * from './misc/GPUComputationRenderer.js';
export * from './misc/Gyroscope.js';
export * from './misc/MD2Character.js';
export * from './misc/MD2CharacterComplex.js';
export * from './misc/MorphAnimMesh.js';
export * from './misc/MorphBlendMesh.js';
export * from './misc/ProgressiveLightMap.js';
export * from './misc/RollerCoaster.js';
export * from './misc/TubePainter.js';
export * from './misc/Volume.js';
export * from './misc/VolumeSlice.js';
export * from './modifiers/CurveModifier.js';
export * from './modifiers/EdgeSplitModifier.js';
export * from './modifiers/SimplifyModifier.js';
export * from './modifiers/TessellateModifier.js';
export * from './objects/GroundedSkybox.js';
export * from './objects/Lensflare.js';
export * from './objects/MarchingCubes.js';
export * from './objects/Reflector.js';
export * from './objects/ReflectorForSSRPass.js';
export * from './objects/Refractor.js';
export * from './objects/ShadowMesh.js';
export * from './objects/Sky.js';
export * from './objects/Water.js';
export { Water as Water2 } from './objects/Water2.js';
export * from './physics/AmmoPhysics.js';
export * from './physics/RapierPhysics.js';
export * from './postprocessing/AfterimagePass.js';
export * from './postprocessing/BloomPass.js';
export * from './postprocessing/BokehPass.js';
export * from './postprocessing/ClearPass.js';
export * from './postprocessing/CubeTexturePass.js';
export * from './postprocessing/DotScreenPass.js';
export * from './postprocessing/EffectComposer.js';
export * from './postprocessing/FilmPass.js';
export * from './postprocessing/GlitchPass.js';
export * from './postprocessing/GTAOPass.js';
export * from './postprocessing/HalftonePass.js';
export * from './postprocessing/LUTPass.js';
export * from './postprocessing/MaskPass.js';
export * from './postprocessing/OutlinePass.js';
export * from './postprocessing/OutputPass.js';
export * from './postprocessing/Pass.js';
export * from './postprocessing/RenderPass.js';
export * from './postprocessing/RenderPixelatedPass.js';
export * from './postprocessing/SAOPass.js';
export * from './postprocessing/SMAAPass.js';
export * from './postprocessing/SSAARenderPass.js';
export * from './postprocessing/SSAOPass.js';
export * from './postprocessing/SSRPass.js';
export * from './postprocessing/SavePass.js';
export * from './postprocessing/ShaderPass.js';
export * from './postprocessing/TAARenderPass.js';
export * from './postprocessing/TexturePass.js';
export * from './postprocessing/UnrealBloomPass.js';
export * from './renderers/CSS2DRenderer.js';
export * from './renderers/CSS3DRenderer.js';
export * from './renderers/Projector.js';
export * from './renderers/SVGRenderer.js';
export * from './shaders/ACESFilmicToneMappingShader.js';
export * from './shaders/AfterimageShader.js';
export * from './shaders/BasicShader.js';
export * from './shaders/BleachBypassShader.js';
export * from './shaders/BlendShader.js';
export * from './shaders/BokehShader.js';
export { BokehShader as BokehShader2 } from './shaders/BokehShader2.js';
export * from './shaders/BrightnessContrastShader.js';
export * from './shaders/ColorCorrectionShader.js';
export * from './shaders/ColorifyShader.js';
export * from './shaders/ConvolutionShader.js';
export * from './shaders/CopyShader.js';
export * from './shaders/DOFMipMapShader.js';
export * from './shaders/DepthLimitedBlurShader.js';
export * from './shaders/DigitalGlitch.js';
export * from './shaders/DotScreenShader.js';
export * from './shaders/ExposureShader.js';
export * from './shaders/FXAAShader.js';
export * from './shaders/FilmShader.js';
export * from './shaders/FocusShader.js';
export * from './shaders/FreiChenShader.js';
export * from './shaders/GammaCorrectionShader.js';
export * from './shaders/GodRaysShader.js';
export * from './shaders/GTAOShader.js';
export * from './shaders/HalftoneShader.js';
export * from './shaders/HorizontalBlurShader.js';
export * from './shaders/HorizontalTiltShiftShader.js';
export * from './shaders/HueSaturationShader.js';
export * from './shaders/KaleidoShader.js';
export * from './shaders/LuminosityHighPassShader.js';
export * from './shaders/LuminosityShader.js';
export * from './shaders/MirrorShader.js';
export * from './shaders/NormalMapShader.js';
export * from './shaders/OutputShader.js';
export * from './shaders/RGBShiftShader.js';
export * from './shaders/SAOShader.js';
export * from './shaders/SMAAShader.js';
export * from './shaders/SSAOShader.js';
export * from './shaders/SSRShader.js';
export * from './shaders/SepiaShader.js';
export * from './shaders/SobelOperatorShader.js';
export * from './shaders/SubsurfaceScatteringShader.js';
export * from './shaders/TechnicolorShader.js';
export * from './shaders/ToonShader.js';
export * from './shaders/TriangleBlurShader.js';
export * from './shaders/UnpackDepthRGBAShader.js';
export * from './shaders/VelocityShader.js';
export * from './shaders/VerticalBlurShader.js';
export * from './shaders/VerticalTiltShiftShader.js';
export * from './shaders/VignetteShader.js';
export * from './shaders/VolumeShader.js';
export * from './shaders/WaterRefractionShader.js';
export * from './textures/FlakesTexture.js';
export * as BufferGeometryUtils from './utils/BufferGeometryUtils.js';
export * as CameraUtils from './utils/CameraUtils.js';
export * as GeometryCompressionUtils from './utils/GeometryCompressionUtils.js';
export * as GeometryUtils from './utils/GeometryUtils.js';
export * from './utils/LDrawUtils.js';
export * as SceneUtils from './utils/SceneUtils.js';
export * from './utils/ShadowMapViewer.js';
export * as SkeletonUtils from './utils/SkeletonUtils.js';
export * as SortUtils from './utils/SortUtils.js';
export * from './utils/WebGLTextureUtils.js';
export * from './utils/UVsDebug.js';
export * from './utils/WorkerPool.js';
export * from './webxr/ARButton.js';
export * from './webxr/OculusHandModel.js';
export * from './webxr/OculusHandPointerModel.js';
export * from './webxr/Text2D.js';
export * from './webxr/VRButton.js';
export * from './webxr/XRButton.js';
export * from './webxr/XRControllerModelFactory.js';
export * from './webxr/XREstimatedLight.js';
export * from './webxr/XRHandMeshModel.js';
export * from './webxr/XRHandModelFactory.js';
export * from './webxr/XRHandPrimitiveModel.js';
export * from './webxr/XRPlanes.js';

View File

@@ -0,0 +1,168 @@
import {
AnimationClip,
BooleanKeyframeTrack,
ColorKeyframeTrack,
NumberKeyframeTrack,
Vector3,
VectorKeyframeTrack
} from 'three';
/**
* A utility class with factory methods for creating basic animation clips.
*
* @hideconstructor
* @three_import import { AnimationClipCreator } from 'three/addons/animation/AnimationClipCreator.js';
*/
class AnimationClipCreator {
/**
* Creates an animation clip that rotates a 3D object 360 degrees
* in the given period of time around the given axis.
*
* @param {number} period - The duration of the animation.
* @param {('x'|'y'|'z')} [axis='x'] - The axis of rotation.
* @return {AnimationClip} The created animation clip.
*/
static CreateRotationAnimation( period, axis = 'x' ) {
const times = [ 0, period ], values = [ 0, 360 ];
const trackName = '.rotation[' + axis + ']';
const track = new NumberKeyframeTrack( trackName, times, values );
return new AnimationClip( '', period, [ track ] );
}
/**
* Creates an animation clip that scales a 3D object from `0` to `1`
* in the given period of time along the given axis.
*
* @param {number} period - The duration of the animation.
* @param {('x'|'y'|'z')} [axis='x'] - The axis to scale the 3D object along.
* @return {AnimationClip} The created animation clip.
*/
static CreateScaleAxisAnimation( period, axis = 'x' ) {
const times = [ 0, period ], values = [ 0, 1 ];
const trackName = '.scale[' + axis + ']';
const track = new NumberKeyframeTrack( trackName, times, values );
return new AnimationClip( '', period, [ track ] );
}
/**
* Creates an animation clip that translates a 3D object in a shake pattern
* in the given period.
*
* @param {number} duration - The duration of the animation.
* @param {Vector3} shakeScale - The scale of the shake.
* @return {AnimationClip} The created animation clip.
*/
static CreateShakeAnimation( duration, shakeScale ) {
const times = [], values = [], tmp = new Vector3();
for ( let i = 0; i < duration * 10; i ++ ) {
times.push( i / 10 );
tmp.set( Math.random() * 2.0 - 1.0, Math.random() * 2.0 - 1.0, Math.random() * 2.0 - 1.0 ).
multiply( shakeScale ).
toArray( values, values.length );
}
const trackName = '.position';
const track = new VectorKeyframeTrack( trackName, times, values );
return new AnimationClip( '', duration, [ track ] );
}
/**
* Creates an animation clip that scales a 3D object in a pulse pattern
* in the given period.
*
* @param {number} duration - The duration of the animation.
* @param {number} pulseScale - The scale of the pulse.
* @return {AnimationClip} The created animation clip.
*/
static CreatePulsationAnimation( duration, pulseScale ) {
const times = [], values = [], tmp = new Vector3();
for ( let i = 0; i < duration * 10; i ++ ) {
times.push( i / 10 );
const scaleFactor = Math.random() * pulseScale;
tmp.set( scaleFactor, scaleFactor, scaleFactor ).
toArray( values, values.length );
}
const trackName = '.scale';
const track = new VectorKeyframeTrack( trackName, times, values );
return new AnimationClip( '', duration, [ track ] );
}
/**
* Creates an animation clip that toggles the visibility of a 3D object.
*
* @param {number} duration - The duration of the animation.
* @return {AnimationClip} The created animation clip.
*/
static CreateVisibilityAnimation( duration ) {
const times = [ 0, duration / 2, duration ], values = [ true, false, true ];
const trackName = '.visible';
const track = new BooleanKeyframeTrack( trackName, times, values );
return new AnimationClip( '', duration, [ track ] );
}
/**
* Creates an animation clip that animates the `color` property of a 3D object's
* material.
*
* @param {number} duration - The duration of the animation.
* @param {Array<Color>} colors - An array of colors that should be sequentially animated.
* @return {AnimationClip} The created animation clip.
*/
static CreateMaterialColorAnimation( duration, colors ) {
const times = [], values = [],
timeStep = ( colors.length > 1 ) ? duration / ( colors.length - 1 ) : 0;
for ( let i = 0; i < colors.length; i ++ ) {
times.push( i * timeStep );
const color = colors[ i ];
values.push( color.r, color.g, color.b );
}
const trackName = '.material.color';
const track = new ColorKeyframeTrack( trackName, times, values );
return new AnimationClip( '', duration, [ track ] );
}
}
export { AnimationClipCreator };

View File

@@ -0,0 +1,591 @@
import {
BufferAttribute,
BufferGeometry,
Color,
Line,
LineBasicMaterial,
Matrix4,
Mesh,
MeshBasicMaterial,
Object3D,
Quaternion,
SphereGeometry,
Vector3
} from 'three';
const _quaternion = new Quaternion();
const _targetPos = new Vector3();
const _targetVec = new Vector3();
const _effectorPos = new Vector3();
const _effectorVec = new Vector3();
const _linkPos = new Vector3();
const _invLinkQ = new Quaternion();
const _linkScale = new Vector3();
const _axis = new Vector3();
const _vector = new Vector3();
const _matrix = new Matrix4();
/**
* This class solves the Inverse Kinematics Problem with a [CCD Algorithm](https://web.archive.org/web/20221206080850/https://sites.google.com/site/auraliusproject/ccd-algorithm).
*
* `CCDIKSolver` is designed to work with instances of {@link SkinnedMesh}.
*
* @three_import import { CCDIKSolver } from 'three/addons/animation/CCDIKSolver.js';
*/
class CCDIKSolver {
/**
* @param {SkinnedMesh} mesh - The skinned mesh.
* @param {Array<CCDIKSolver~IK>} [iks=[]] - The IK objects.
*/
constructor( mesh, iks = [] ) {
/**
* The skinned mesh.
*
* @type {SkinnedMesh}
*/
this.mesh = mesh;
/**
* The IK objects.
*
* @type {SkinnedMesh}
*/
this.iks = iks;
this._initialQuaternions = [];
this._workingQuaternion = new Quaternion();
for ( const ik of iks ) {
const chainQuats = [];
for ( let i = 0; i < ik.links.length; i ++ ) {
chainQuats.push( new Quaternion() );
}
this._initialQuaternions.push( chainQuats );
}
this._valid();
}
/**
* Updates all IK bones by solving the CCD algorithm.
*
* @param {number} [globalBlendFactor=1.0] - Blend factor applied if an IK chain doesn't have its own .blendFactor.
* @return {CCDIKSolver} A reference to this instance.
*/
update( globalBlendFactor = 1.0 ) {
const iks = this.iks;
for ( let i = 0, il = iks.length; i < il; i ++ ) {
this.updateOne( iks[ i ], globalBlendFactor );
}
return this;
}
/**
* Updates one IK bone solving the CCD algorithm.
*
* @param {CCDIKSolver~IK} ik - The IK to update.
* @param {number} [overrideBlend=1.0] - If the IK object does not define `blendFactor`, this value is used.
* @return {CCDIKSolver} A reference to this instance.
*/
updateOne( ik, overrideBlend = 1.0 ) {
const chainBlend = ik.blendFactor !== undefined ? ik.blendFactor : overrideBlend;
const bones = this.mesh.skeleton.bones;
const chainIndex = this.iks.indexOf( ik );
const initialQuaternions = this._initialQuaternions[ chainIndex ];
// for reference overhead reduction in loop
const math = Math;
const effector = bones[ ik.effector ];
const target = bones[ ik.target ];
// don't use getWorldPosition() here for the performance
// because it calls updateMatrixWorld( true ) inside.
_targetPos.setFromMatrixPosition( target.matrixWorld );
const links = ik.links;
const iteration = ik.iteration !== undefined ? ik.iteration : 1;
if ( chainBlend < 1.0 ) {
for ( let j = 0; j < links.length; j ++ ) {
const linkIndex = links[ j ].index;
initialQuaternions[ j ].copy( bones[ linkIndex ].quaternion );
}
}
for ( let i = 0; i < iteration; i ++ ) {
let rotated = false;
for ( let j = 0, jl = links.length; j < jl; j ++ ) {
const link = bones[ links[ j ].index ];
// skip this link and following links
if ( links[ j ].enabled === false ) break;
const limitation = links[ j ].limitation;
const rotationMin = links[ j ].rotationMin;
const rotationMax = links[ j ].rotationMax;
// don't use getWorldPosition/Quaternion() here for the performance
// because they call updateMatrixWorld( true ) inside.
link.matrixWorld.decompose( _linkPos, _invLinkQ, _linkScale );
_invLinkQ.invert();
_effectorPos.setFromMatrixPosition( effector.matrixWorld );
// work in link world
_effectorVec.subVectors( _effectorPos, _linkPos );
_effectorVec.applyQuaternion( _invLinkQ );
_effectorVec.normalize();
_targetVec.subVectors( _targetPos, _linkPos );
_targetVec.applyQuaternion( _invLinkQ );
_targetVec.normalize();
let angle = _targetVec.dot( _effectorVec );
if ( angle > 1.0 ) {
angle = 1.0;
} else if ( angle < - 1.0 ) {
angle = - 1.0;
}
angle = math.acos( angle );
// skip if changing angle is too small to prevent vibration of bone
if ( angle < 1e-5 ) continue;
if ( ik.minAngle !== undefined && angle < ik.minAngle ) {
angle = ik.minAngle;
}
if ( ik.maxAngle !== undefined && angle > ik.maxAngle ) {
angle = ik.maxAngle;
}
_axis.crossVectors( _effectorVec, _targetVec );
_axis.normalize();
_quaternion.setFromAxisAngle( _axis, angle );
link.quaternion.multiply( _quaternion );
// TODO: re-consider the limitation specification
if ( limitation !== undefined ) {
let c = link.quaternion.w;
if ( c > 1.0 ) c = 1.0;
const c2 = math.sqrt( 1 - c * c );
link.quaternion.set( limitation.x * c2,
limitation.y * c2,
limitation.z * c2,
c );
}
if ( rotationMin !== undefined ) {
link.rotation.setFromVector3( _vector.setFromEuler( link.rotation ).max( rotationMin ) );
}
if ( rotationMax !== undefined ) {
link.rotation.setFromVector3( _vector.setFromEuler( link.rotation ).min( rotationMax ) );
}
link.updateMatrixWorld( true );
rotated = true;
}
if ( ! rotated ) break;
}
if ( chainBlend < 1.0 ) {
for ( let j = 0; j < links.length; j ++ ) {
const linkIndex = links[ j ].index;
const link = bones[ linkIndex ];
this._workingQuaternion.copy( initialQuaternions[ j ] ).slerp( link.quaternion, chainBlend );
link.quaternion.copy( this._workingQuaternion );
link.updateMatrixWorld( true );
}
}
return this;
}
/**
* Creates a helper for visualizing the CCDIK.
*
* @param {number} sphereSize - The sphere size.
* @return {CCDIKHelper} The created helper.
*/
createHelper( sphereSize ) {
return new CCDIKHelper( this.mesh, this.iks, sphereSize );
}
// private methods
_valid() {
const iks = this.iks;
const bones = this.mesh.skeleton.bones;
for ( let i = 0, il = iks.length; i < il; i ++ ) {
const ik = iks[ i ];
const effector = bones[ ik.effector ];
const links = ik.links;
let link0, link1;
link0 = effector;
for ( let j = 0, jl = links.length; j < jl; j ++ ) {
link1 = bones[ links[ j ].index ];
if ( link0.parent !== link1 ) {
console.warn( 'THREE.CCDIKSolver: bone ' + link0.name + ' is not the child of bone ' + link1.name );
}
link0 = link1;
}
}
}
}
function getPosition( bone, matrixWorldInv ) {
return _vector
.setFromMatrixPosition( bone.matrixWorld )
.applyMatrix4( matrixWorldInv );
}
function setPositionOfBoneToAttributeArray( array, index, bone, matrixWorldInv ) {
const v = getPosition( bone, matrixWorldInv );
array[ index * 3 + 0 ] = v.x;
array[ index * 3 + 1 ] = v.y;
array[ index * 3 + 2 ] = v.z;
}
/**
* Helper for visualizing IK bones.
*
* @augments Object3D
* @three_import import { CCDIKHelper } from 'three/addons/animation/CCDIKSolver.js';
*/
class CCDIKHelper extends Object3D {
/**
* @param {SkinnedMesh} mesh - The skinned mesh.
* @param {Array<CCDIKSolver~IK>} [iks=[]] - The IK objects.
* @param {number} [sphereSize=0.25] - The sphere size.
*/
constructor( mesh, iks = [], sphereSize = 0.25 ) {
super();
/**
* The skinned mesh this helper refers to.
*
* @type {SkinnedMesh}
*/
this.root = mesh;
/**
* The IK objects.
*
* @type {Array<CCDIKSolver~IK>}
*/
this.iks = iks;
this.matrix.copy( mesh.matrixWorld );
this.matrixAutoUpdate = false;
/**
* The helpers sphere geometry.
*
* @type {SkinnedMesh}
*/
this.sphereGeometry = new SphereGeometry( sphereSize, 16, 8 );
/**
* The material for the target spheres.
*
* @type {MeshBasicMaterial}
*/
this.targetSphereMaterial = new MeshBasicMaterial( {
color: new Color( 0xff8888 ),
depthTest: false,
depthWrite: false,
transparent: true
} );
/**
* The material for the effector spheres.
*
* @type {MeshBasicMaterial}
*/
this.effectorSphereMaterial = new MeshBasicMaterial( {
color: new Color( 0x88ff88 ),
depthTest: false,
depthWrite: false,
transparent: true
} );
/**
* The material for the link spheres.
*
* @type {MeshBasicMaterial}
*/
this.linkSphereMaterial = new MeshBasicMaterial( {
color: new Color( 0x8888ff ),
depthTest: false,
depthWrite: false,
transparent: true
} );
/**
* A global line material.
*
* @type {LineBasicMaterial}
*/
this.lineMaterial = new LineBasicMaterial( {
color: new Color( 0xff0000 ),
depthTest: false,
depthWrite: false,
transparent: true
} );
this._init();
}
updateMatrixWorld( force ) {
const mesh = this.root;
if ( this.visible ) {
let offset = 0;
const iks = this.iks;
const bones = mesh.skeleton.bones;
_matrix.copy( mesh.matrixWorld ).invert();
for ( let i = 0, il = iks.length; i < il; i ++ ) {
const ik = iks[ i ];
const targetBone = bones[ ik.target ];
const effectorBone = bones[ ik.effector ];
const targetMesh = this.children[ offset ++ ];
const effectorMesh = this.children[ offset ++ ];
targetMesh.position.copy( getPosition( targetBone, _matrix ) );
effectorMesh.position.copy( getPosition( effectorBone, _matrix ) );
for ( let j = 0, jl = ik.links.length; j < jl; j ++ ) {
const link = ik.links[ j ];
const linkBone = bones[ link.index ];
const linkMesh = this.children[ offset ++ ];
linkMesh.position.copy( getPosition( linkBone, _matrix ) );
}
const line = this.children[ offset ++ ];
const array = line.geometry.attributes.position.array;
setPositionOfBoneToAttributeArray( array, 0, targetBone, _matrix );
setPositionOfBoneToAttributeArray( array, 1, effectorBone, _matrix );
for ( let j = 0, jl = ik.links.length; j < jl; j ++ ) {
const link = ik.links[ j ];
const linkBone = bones[ link.index ];
setPositionOfBoneToAttributeArray( array, j + 2, linkBone, _matrix );
}
line.geometry.attributes.position.needsUpdate = true;
}
}
this.matrix.copy( mesh.matrixWorld );
super.updateMatrixWorld( force );
}
/**
* Frees the GPU-related resources allocated by this instance.
* Call this method whenever this instance is no longer used in your app.
*/
dispose() {
this.sphereGeometry.dispose();
this.targetSphereMaterial.dispose();
this.effectorSphereMaterial.dispose();
this.linkSphereMaterial.dispose();
this.lineMaterial.dispose();
const children = this.children;
for ( let i = 0; i < children.length; i ++ ) {
const child = children[ i ];
if ( child.isLine ) child.geometry.dispose();
}
}
// private method
_init() {
const scope = this;
const iks = this.iks;
function createLineGeometry( ik ) {
const geometry = new BufferGeometry();
const vertices = new Float32Array( ( 2 + ik.links.length ) * 3 );
geometry.setAttribute( 'position', new BufferAttribute( vertices, 3 ) );
return geometry;
}
function createTargetMesh() {
return new Mesh( scope.sphereGeometry, scope.targetSphereMaterial );
}
function createEffectorMesh() {
return new Mesh( scope.sphereGeometry, scope.effectorSphereMaterial );
}
function createLinkMesh() {
return new Mesh( scope.sphereGeometry, scope.linkSphereMaterial );
}
function createLine( ik ) {
return new Line( createLineGeometry( ik ), scope.lineMaterial );
}
for ( let i = 0, il = iks.length; i < il; i ++ ) {
const ik = iks[ i ];
this.add( createTargetMesh() );
this.add( createEffectorMesh() );
for ( let j = 0, jl = ik.links.length; j < jl; j ++ ) {
this.add( createLinkMesh() );
}
this.add( createLine( ik ) );
}
}
}
/**
* This type represents IK configuration objects.
*
* @typedef {Object} CCDIKSolver~IK
* @property {number} target - The target bone index which refers to a bone in the `Skeleton.bones` array.
* @property {number} effector - The effector bone index which refers to a bone in the `Skeleton.bones` array.
* @property {Array<CCDIKSolver~BoneLink>} links - An array of bone links.
* @property {number} [iteration=1] - Iteration number of calculation. Smaller is faster but less precise.
* @property {number} [minAngle] - Minimum rotation angle in a step in radians.
* @property {number} [maxAngle] - Minimum rotation angle in a step in radians.
* @property {number} [blendFactor] - The blend factor.
**/
/**
* This type represents bone links.
*
* @typedef {Object} CCDIKSolver~BoneLink
* @property {number} index - The index of a linked bone which refers to a bone in the `Skeleton.bones` array.
* @property {number} [limitation] - Rotation axis.
* @property {number} [rotationMin] - Rotation minimum limit.
* @property {number} [rotationMax] - Rotation maximum limit.
* @property {boolean} [enabled=true] - Whether the link is enabled or not.
**/
export { CCDIKSolver, CCDIKHelper };

113
node_modules/three/examples/jsm/capabilities/WebGL.js generated vendored Normal file
View File

@@ -0,0 +1,113 @@
/**
* A utility module with basic WebGL 2 capability testing.
*
* @hideconstructor
* @three_import import WebGL from 'three/addons/capabilities/WebGL.js';
*/
class WebGL {
/**
* Returns `true` if WebGL 2 is available.
*
* @return {boolean} Whether WebGL 2 is available or not.
*/
static isWebGL2Available() {
try {
const canvas = document.createElement( 'canvas' );
return !! ( window.WebGL2RenderingContext && canvas.getContext( 'webgl2' ) );
} catch ( e ) {
return false;
}
}
/**
* Returns `true` if the given color space is available. This method can only be used
* if WebGL 2 is supported.
*
* @param {string} colorSpace - The color space to test.
* @return {boolean} Whether the given color space is available or not.
*/
static isColorSpaceAvailable( colorSpace ) {
try {
const canvas = document.createElement( 'canvas' );
const ctx = window.WebGL2RenderingContext && canvas.getContext( 'webgl2' );
ctx.drawingBufferColorSpace = colorSpace;
return ctx.drawingBufferColorSpace === colorSpace; // deepscan-disable-line SAME_OPERAND_VALUE
} catch ( e ) {
return false;
}
}
/**
* Returns a `div` element representing a formatted error message that can be appended in
* web sites if WebGL 2 isn't supported.
*
* @return {HTMLDivElement} A `div` element representing a formatted error message that WebGL 2 isn't supported.
*/
static getWebGL2ErrorMessage() {
return this._getErrorMessage( 2 );
}
// private
static _getErrorMessage( version ) {
const names = {
1: 'WebGL',
2: 'WebGL 2'
};
const contexts = {
1: window.WebGLRenderingContext,
2: window.WebGL2RenderingContext
};
let message = 'Your $0 does not seem to support <a href="http://khronos.org/webgl/wiki/Getting_a_WebGL_Implementation" style="color:#000">$1</a>';
const element = document.createElement( 'div' );
element.id = 'webglmessage';
element.style.fontFamily = 'monospace';
element.style.fontSize = '13px';
element.style.fontWeight = 'normal';
element.style.textAlign = 'center';
element.style.background = '#fff';
element.style.color = '#000';
element.style.padding = '1.5em';
element.style.width = '400px';
element.style.margin = '5em auto 0';
if ( contexts[ version ] ) {
message = message.replace( '$0', 'graphics card' );
} else {
message = message.replace( '$0', 'browser' );
}
message = message.replace( '$1', names[ version ] );
element.innerHTML = message;
return element;
}
}
export default WebGL;

59
node_modules/three/examples/jsm/capabilities/WebGPU.js generated vendored Normal file
View File

@@ -0,0 +1,59 @@
let isAvailable = ( typeof navigator !== 'undefined' && navigator.gpu !== undefined );
if ( typeof window !== 'undefined' && isAvailable ) {
isAvailable = Boolean( await navigator.gpu.requestAdapter() );
}
/**
* A utility module with basic WebGPU capability testing.
*
* @hideconstructor
* @three_import import WebGPU from 'three/addons/capabilities/WebGPU.js';
*/
class WebGPU {
/**
* Returns `true` if WebGPU is available.
*
* @return {boolean} Whether WebGPU is available or not.
*/
static isAvailable() {
return Boolean( isAvailable );
}
/**
* Returns a `div` element representing a formatted error message that can be appended in
* web sites if WebGPU isn't supported.
*
* @return {HTMLDivElement} A `div` element representing a formatted error message that WebGPU isn't supported.
*/
static getErrorMessage() {
const message = 'Your browser does not support <a href="https://gpuweb.github.io/gpuweb/" style="color:blue">WebGPU</a> yet';
const element = document.createElement( 'div' );
element.id = 'webgpumessage';
element.style.fontFamily = 'monospace';
element.style.fontSize = '13px';
element.style.fontWeight = 'normal';
element.style.textAlign = 'center';
element.style.background = '#fff';
element.style.color = '#000';
element.style.padding = '1.5em';
element.style.maxWidth = '400px';
element.style.margin = '5em auto 0';
element.innerHTML = message;
return element;
}
}
export default WebGPU;

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,452 @@
import {
Controls,
Matrix4,
Plane,
Raycaster,
Vector2,
Vector3,
MOUSE,
TOUCH
} from 'three';
const _plane = new Plane();
const _pointer = new Vector2();
const _offset = new Vector3();
const _diff = new Vector2();
const _previousPointer = new Vector2();
const _intersection = new Vector3();
const _worldPosition = new Vector3();
const _inverseMatrix = new Matrix4();
const _up = new Vector3();
const _right = new Vector3();
let _selected = null, _hovered = null;
const _intersections = [];
const STATE = {
NONE: - 1,
PAN: 0,
ROTATE: 1
};
/**
* This class can be used to provide a drag'n'drop interaction.
*
* ```js
* const controls = new DragControls( objects, camera, renderer.domElement );
*
* // add event listener to highlight dragged objects
* controls.addEventListener( 'dragstart', function ( event ) {
*
* event.object.material.emissive.set( 0xaaaaaa );
*
* } );
*
* controls.addEventListener( 'dragend', function ( event ) {
*
* event.object.material.emissive.set( 0x000000 );
*
* } );
* ```
*
* @augments Controls
* @three_import import { DragControls } from 'three/addons/controls/DragControls.js';
*/
class DragControls extends Controls {
/**
* Constructs a new controls instance.
*
* @param {Array<Object3D>} objects - An array of draggable 3D objects.
* @param {Camera} camera - The camera of the rendered scene.
* @param {?HTMLElement} [domElement=null] - The HTML DOM element used for event listeners.
*/
constructor( objects, camera, domElement = null ) {
super( camera, domElement );
/**
* An array of draggable 3D objects.
*
* @type {Array<Object3D>}
*/
this.objects = objects;
/**
* Whether children of draggable objects can be dragged independently from their parent.
*
* @type {boolean}
* @default true
*/
this.recursive = true;
/**
* This option only works if the `objects` array contains a single draggable group object.
* If set to `true`, the controls does not transform individual objects but the entire group.
*
* @type {boolean}
* @default false
*/
this.transformGroup = false;
/**
* The speed at which the object will rotate when dragged in `rotate` mode.
* The higher the number the faster the rotation.
*
* @type {number}
* @default 1
*/
this.rotateSpeed = 1;
/**
* The raycaster used for detecting 3D objects.
*
* @type {Raycaster}
*/
this.raycaster = new Raycaster();
// interaction
this.mouseButtons = { LEFT: MOUSE.PAN, MIDDLE: MOUSE.PAN, RIGHT: MOUSE.ROTATE };
this.touches = { ONE: TOUCH.PAN };
// event listeners
this._onPointerMove = onPointerMove.bind( this );
this._onPointerDown = onPointerDown.bind( this );
this._onPointerCancel = onPointerCancel.bind( this );
this._onContextMenu = onContextMenu.bind( this );
//
if ( domElement !== null ) {
this.connect( domElement );
}
}
connect( element ) {
super.connect( element );
this.domElement.addEventListener( 'pointermove', this._onPointerMove );
this.domElement.addEventListener( 'pointerdown', this._onPointerDown );
this.domElement.addEventListener( 'pointerup', this._onPointerCancel );
this.domElement.addEventListener( 'pointerleave', this._onPointerCancel );
this.domElement.addEventListener( 'contextmenu', this._onContextMenu );
this.domElement.style.touchAction = 'none'; // disable touch scroll
}
disconnect() {
this.domElement.removeEventListener( 'pointermove', this._onPointerMove );
this.domElement.removeEventListener( 'pointerdown', this._onPointerDown );
this.domElement.removeEventListener( 'pointerup', this._onPointerCancel );
this.domElement.removeEventListener( 'pointerleave', this._onPointerCancel );
this.domElement.removeEventListener( 'contextmenu', this._onContextMenu );
this.domElement.style.touchAction = 'auto';
this.domElement.style.cursor = '';
}
dispose() {
this.disconnect();
}
_updatePointer( event ) {
const rect = this.domElement.getBoundingClientRect();
_pointer.x = ( event.clientX - rect.left ) / rect.width * 2 - 1;
_pointer.y = - ( event.clientY - rect.top ) / rect.height * 2 + 1;
}
_updateState( event ) {
// determine action
let action;
if ( event.pointerType === 'touch' ) {
action = this.touches.ONE;
} else {
switch ( event.button ) {
case 0:
action = this.mouseButtons.LEFT;
break;
case 1:
action = this.mouseButtons.MIDDLE;
break;
case 2:
action = this.mouseButtons.RIGHT;
break;
default:
action = null;
}
}
// determine state
switch ( action ) {
case MOUSE.PAN:
case TOUCH.PAN:
this.state = STATE.PAN;
break;
case MOUSE.ROTATE:
case TOUCH.ROTATE:
this.state = STATE.ROTATE;
break;
default:
this.state = STATE.NONE;
}
}
}
function onPointerMove( event ) {
const camera = this.object;
const domElement = this.domElement;
const raycaster = this.raycaster;
if ( this.enabled === false ) return;
this._updatePointer( event );
raycaster.setFromCamera( _pointer, camera );
if ( _selected ) {
if ( this.state === STATE.PAN ) {
if ( raycaster.ray.intersectPlane( _plane, _intersection ) ) {
_selected.position.copy( _intersection.sub( _offset ).applyMatrix4( _inverseMatrix ) );
this.dispatchEvent( { type: 'drag', object: _selected } );
}
} else if ( this.state === STATE.ROTATE ) {
_diff.subVectors( _pointer, _previousPointer ).multiplyScalar( this.rotateSpeed );
_selected.rotateOnWorldAxis( _up, _diff.x );
_selected.rotateOnWorldAxis( _right.normalize(), - _diff.y );
this.dispatchEvent( { type: 'drag', object: _selected } );
}
_previousPointer.copy( _pointer );
} else {
// hover support
if ( event.pointerType === 'mouse' || event.pointerType === 'pen' ) {
_intersections.length = 0;
raycaster.setFromCamera( _pointer, camera );
raycaster.intersectObjects( this.objects, this.recursive, _intersections );
if ( _intersections.length > 0 ) {
const object = _intersections[ 0 ].object;
_plane.setFromNormalAndCoplanarPoint( camera.getWorldDirection( _plane.normal ), _worldPosition.setFromMatrixPosition( object.matrixWorld ) );
if ( _hovered !== object && _hovered !== null ) {
this.dispatchEvent( { type: 'hoveroff', object: _hovered } );
domElement.style.cursor = 'auto';
_hovered = null;
}
if ( _hovered !== object ) {
this.dispatchEvent( { type: 'hoveron', object: object } );
domElement.style.cursor = 'pointer';
_hovered = object;
}
} else {
if ( _hovered !== null ) {
this.dispatchEvent( { type: 'hoveroff', object: _hovered } );
domElement.style.cursor = 'auto';
_hovered = null;
}
}
}
}
_previousPointer.copy( _pointer );
}
function onPointerDown( event ) {
const camera = this.object;
const domElement = this.domElement;
const raycaster = this.raycaster;
if ( this.enabled === false ) return;
this._updatePointer( event );
this._updateState( event );
_intersections.length = 0;
raycaster.setFromCamera( _pointer, camera );
raycaster.intersectObjects( this.objects, this.recursive, _intersections );
if ( _intersections.length > 0 ) {
if ( this.transformGroup === true ) {
// look for the outermost group in the object's upper hierarchy
_selected = findGroup( _intersections[ 0 ].object );
} else {
_selected = _intersections[ 0 ].object;
}
_plane.setFromNormalAndCoplanarPoint( camera.getWorldDirection( _plane.normal ), _worldPosition.setFromMatrixPosition( _selected.matrixWorld ) );
if ( raycaster.ray.intersectPlane( _plane, _intersection ) ) {
if ( this.state === STATE.PAN ) {
_inverseMatrix.copy( _selected.parent.matrixWorld ).invert();
_offset.copy( _intersection ).sub( _worldPosition.setFromMatrixPosition( _selected.matrixWorld ) );
domElement.style.cursor = 'move';
this.dispatchEvent( { type: 'dragstart', object: _selected } );
} else if ( this.state === STATE.ROTATE ) {
// the controls only support Y+ up
_up.set( 0, 1, 0 ).applyQuaternion( camera.quaternion ).normalize();
_right.set( 1, 0, 0 ).applyQuaternion( camera.quaternion ).normalize();
domElement.style.cursor = 'move';
this.dispatchEvent( { type: 'dragstart', object: _selected } );
}
}
}
_previousPointer.copy( _pointer );
}
function onPointerCancel() {
if ( this.enabled === false ) return;
if ( _selected ) {
this.dispatchEvent( { type: 'dragend', object: _selected } );
_selected = null;
}
this.domElement.style.cursor = _hovered ? 'pointer' : 'auto';
this.state = STATE.NONE;
}
function onContextMenu( event ) {
if ( this.enabled === false ) return;
event.preventDefault();
}
function findGroup( obj, group = null ) {
if ( obj.isGroup ) group = obj;
if ( obj.parent === null ) return group;
return findGroup( obj.parent, group );
}
/**
* Fires when the user drags a 3D object.
*
* @event DragControls#drag
* @type {Object}
*/
/**
* Fires when the user has finished dragging a 3D object.
*
* @event DragControls#dragend
* @type {Object}
*/
/**
* Fires when the pointer is moved onto a 3D object, or onto one of its children.
*
* @event DragControls#hoveron
* @type {Object}
*/
/**
* Fires when the pointer is moved out of a 3D object.
*
* @event DragControls#hoveroff
* @type {Object}
*/
export { DragControls };

View File

@@ -0,0 +1,447 @@
import {
Controls,
MathUtils,
Spherical,
Vector3
} from 'three';
const _lookDirection = new Vector3();
const _spherical = new Spherical();
const _target = new Vector3();
const _targetPosition = new Vector3();
/**
* This class is an alternative implementation of {@link FlyControls}.
*
* @augments Controls
* @three_import import { FirstPersonControls } from 'three/addons/controls/FirstPersonControls.js';
*/
class FirstPersonControls extends Controls {
/**
* Constructs a new controls instance.
*
* @param {Object3D} object - The object that is managed by the controls.
* @param {?HTMLElement} domElement - The HTML element used for event listeners.
*/
constructor( object, domElement = null ) {
super( object, domElement );
/**
* The movement speed.
*
* @type {number}
* @default 1
*/
this.movementSpeed = 1.0;
/**
* The look around speed.
*
* @type {number}
* @default 0.005
*/
this.lookSpeed = 0.005;
/**
* Whether it's possible to vertically look around or not.
*
* @type {boolean}
* @default true
*/
this.lookVertical = true;
/**
* Whether the camera is automatically moved forward or not.
*
* @type {boolean}
* @default false
*/
this.autoForward = false;
/**
* Whether it's possible to look around or not.
*
* @type {boolean}
* @default true
*/
this.activeLook = true;
/**
* Whether or not the camera's height influences the forward movement speed.
* Use the properties `heightCoef`, `heightMin` and `heightMax` for configuration.
*
* @type {boolean}
* @default false
*/
this.heightSpeed = false;
/**
* Determines how much faster the camera moves when it's y-component is near `heightMax`.
*
* @type {number}
* @default 1
*/
this.heightCoef = 1.0;
/**
* Lower camera height limit used for movement speed adjustment.
*
* @type {number}
* @default 0
*/
this.heightMin = 0.0;
/**
* Upper camera height limit used for movement speed adjustment.
*
* @type {number}
* @default 1
*/
this.heightMax = 1.0;
/**
* Whether or not looking around is vertically constrained by `verticalMin` and `verticalMax`.
*
* @type {boolean}
* @default false
*/
this.constrainVertical = false;
/**
* How far you can vertically look around, lower limit. Range is `0` to `Math.PI` in radians.
*
* @type {number}
* @default 0
*/
this.verticalMin = 0;
/**
* How far you can vertically look around, upper limit. Range is `0` to `Math.PI` in radians.
*
* @type {number}
* @default 0
*/
this.verticalMax = Math.PI;
/**
* Whether the mouse is pressed down or not.
*
* @type {boolean}
* @readonly
* @default false
*/
this.mouseDragOn = false;
// internals
this._autoSpeedFactor = 0.0;
this._pointerX = 0;
this._pointerY = 0;
this._moveForward = false;
this._moveBackward = false;
this._moveLeft = false;
this._moveRight = false;
this._viewHalfX = 0;
this._viewHalfY = 0;
this._lat = 0;
this._lon = 0;
// event listeners
this._onPointerMove = onPointerMove.bind( this );
this._onPointerDown = onPointerDown.bind( this );
this._onPointerUp = onPointerUp.bind( this );
this._onContextMenu = onContextMenu.bind( this );
this._onKeyDown = onKeyDown.bind( this );
this._onKeyUp = onKeyUp.bind( this );
//
if ( domElement !== null ) {
this.connect( domElement );
this.handleResize();
}
this._setOrientation();
}
connect( element ) {
super.connect( element );
window.addEventListener( 'keydown', this._onKeyDown );
window.addEventListener( 'keyup', this._onKeyUp );
this.domElement.addEventListener( 'pointermove', this._onPointerMove );
this.domElement.addEventListener( 'pointerdown', this._onPointerDown );
this.domElement.addEventListener( 'pointerup', this._onPointerUp );
this.domElement.addEventListener( 'contextmenu', this._onContextMenu );
}
disconnect() {
window.removeEventListener( 'keydown', this._onKeyDown );
window.removeEventListener( 'keyup', this._onKeyUp );
this.domElement.removeEventListener( 'pointermove', this._onPointerMove );
this.domElement.removeEventListener( 'pointerdown', this._onPointerDown );
this.domElement.removeEventListener( 'pointerup', this._onPointerUp );
this.domElement.removeEventListener( 'contextmenu', this._onContextMenu );
}
dispose() {
this.disconnect();
}
/**
* Must be called if the application window is resized.
*/
handleResize() {
if ( this.domElement === document ) {
this._viewHalfX = window.innerWidth / 2;
this._viewHalfY = window.innerHeight / 2;
} else {
this._viewHalfX = this.domElement.offsetWidth / 2;
this._viewHalfY = this.domElement.offsetHeight / 2;
}
}
/**
* Rotates the camera towards the defined target position.
*
* @param {number|Vector3} x - The x coordinate of the target position or alternatively a vector representing the target position.
* @param {number} y - The y coordinate of the target position.
* @param {number} z - The z coordinate of the target position.
* @return {FirstPersonControls} A reference to this controls.
*/
lookAt( x, y, z ) {
if ( x.isVector3 ) {
_target.copy( x );
} else {
_target.set( x, y, z );
}
this.object.lookAt( _target );
this._setOrientation();
return this;
}
update( delta ) {
if ( this.enabled === false ) return;
if ( this.heightSpeed ) {
const y = MathUtils.clamp( this.object.position.y, this.heightMin, this.heightMax );
const heightDelta = y - this.heightMin;
this._autoSpeedFactor = delta * ( heightDelta * this.heightCoef );
} else {
this._autoSpeedFactor = 0.0;
}
const actualMoveSpeed = delta * this.movementSpeed;
if ( this._moveForward || ( this.autoForward && ! this._moveBackward ) ) this.object.translateZ( - ( actualMoveSpeed + this._autoSpeedFactor ) );
if ( this._moveBackward ) this.object.translateZ( actualMoveSpeed );
if ( this._moveLeft ) this.object.translateX( - actualMoveSpeed );
if ( this._moveRight ) this.object.translateX( actualMoveSpeed );
if ( this._moveUp ) this.object.translateY( actualMoveSpeed );
if ( this._moveDown ) this.object.translateY( - actualMoveSpeed );
let actualLookSpeed = delta * this.lookSpeed;
if ( ! this.activeLook ) {
actualLookSpeed = 0;
}
let verticalLookRatio = 1;
if ( this.constrainVertical ) {
verticalLookRatio = Math.PI / ( this.verticalMax - this.verticalMin );
}
this._lon -= this._pointerX * actualLookSpeed;
if ( this.lookVertical ) this._lat -= this._pointerY * actualLookSpeed * verticalLookRatio;
this._lat = Math.max( - 85, Math.min( 85, this._lat ) );
let phi = MathUtils.degToRad( 90 - this._lat );
const theta = MathUtils.degToRad( this._lon );
if ( this.constrainVertical ) {
phi = MathUtils.mapLinear( phi, 0, Math.PI, this.verticalMin, this.verticalMax );
}
const position = this.object.position;
_targetPosition.setFromSphericalCoords( 1, phi, theta ).add( position );
this.object.lookAt( _targetPosition );
}
_setOrientation() {
const quaternion = this.object.quaternion;
_lookDirection.set( 0, 0, - 1 ).applyQuaternion( quaternion );
_spherical.setFromVector3( _lookDirection );
this._lat = 90 - MathUtils.radToDeg( _spherical.phi );
this._lon = MathUtils.radToDeg( _spherical.theta );
}
}
function onPointerDown( event ) {
if ( this.domElement !== document ) {
this.domElement.focus();
}
if ( this.activeLook ) {
switch ( event.button ) {
case 0: this._moveForward = true; break;
case 2: this._moveBackward = true; break;
}
}
this.mouseDragOn = true;
}
function onPointerUp( event ) {
if ( this.activeLook ) {
switch ( event.button ) {
case 0: this._moveForward = false; break;
case 2: this._moveBackward = false; break;
}
}
this.mouseDragOn = false;
}
function onPointerMove( event ) {
if ( this.domElement === document ) {
this._pointerX = event.pageX - this._viewHalfX;
this._pointerY = event.pageY - this._viewHalfY;
} else {
this._pointerX = event.pageX - this.domElement.offsetLeft - this._viewHalfX;
this._pointerY = event.pageY - this.domElement.offsetTop - this._viewHalfY;
}
}
function onKeyDown( event ) {
switch ( event.code ) {
case 'ArrowUp':
case 'KeyW': this._moveForward = true; break;
case 'ArrowLeft':
case 'KeyA': this._moveLeft = true; break;
case 'ArrowDown':
case 'KeyS': this._moveBackward = true; break;
case 'ArrowRight':
case 'KeyD': this._moveRight = true; break;
case 'KeyR': this._moveUp = true; break;
case 'KeyF': this._moveDown = true; break;
}
}
function onKeyUp( event ) {
switch ( event.code ) {
case 'ArrowUp':
case 'KeyW': this._moveForward = false; break;
case 'ArrowLeft':
case 'KeyA': this._moveLeft = false; break;
case 'ArrowDown':
case 'KeyS': this._moveBackward = false; break;
case 'ArrowRight':
case 'KeyD': this._moveRight = false; break;
case 'KeyR': this._moveUp = false; break;
case 'KeyF': this._moveDown = false; break;
}
}
function onContextMenu( event ) {
if ( this.enabled === false ) return;
event.preventDefault();
}
export { FirstPersonControls };

380
node_modules/three/examples/jsm/controls/FlyControls.js generated vendored Normal file
View File

@@ -0,0 +1,380 @@
import {
Controls,
Quaternion,
Vector3
} from 'three';
/**
* Fires when the camera has been transformed by the controls.
*
* @event FlyControls#change
* @type {Object}
*/
const _changeEvent = { type: 'change' };
const _EPS = 0.000001;
const _tmpQuaternion = new Quaternion();
/**
* This class enables a navigation similar to fly modes in DCC tools like Blender.
* You can arbitrarily transform the camera in 3D space without any limitations
* (e.g. focus on a specific target).
*
* @augments Controls
* @three_import import { FlyControls } from 'three/addons/controls/FlyControls.js';
*/
class FlyControls extends Controls {
/**
* Constructs a new controls instance.
*
* @param {Object3D} object - The object that is managed by the controls.
* @param {?HTMLElement} domElement - The HTML element used for event listeners.
*/
constructor( object, domElement = null ) {
super( object, domElement );
/**
* The movement speed.
*
* @type {number}
* @default 1
*/
this.movementSpeed = 1.0;
/**
* The rotation speed.
*
* @type {number}
* @default 0.005
*/
this.rollSpeed = 0.005;
/**
* If set to `true`, you can only look around by performing a drag interaction.
*
* @type {boolean}
* @default false
*/
this.dragToLook = false;
/**
* If set to `true`, the camera automatically moves forward (and does not stop) when initially translated.
*
* @type {boolean}
* @default false
*/
this.autoForward = false;
// internals
this._moveState = { up: 0, down: 0, left: 0, right: 0, forward: 0, back: 0, pitchUp: 0, pitchDown: 0, yawLeft: 0, yawRight: 0, rollLeft: 0, rollRight: 0 };
this._moveVector = new Vector3( 0, 0, 0 );
this._rotationVector = new Vector3( 0, 0, 0 );
this._lastQuaternion = new Quaternion();
this._lastPosition = new Vector3();
this._status = 0;
// event listeners
this._onKeyDown = onKeyDown.bind( this );
this._onKeyUp = onKeyUp.bind( this );
this._onPointerMove = onPointerMove.bind( this );
this._onPointerDown = onPointerDown.bind( this );
this._onPointerUp = onPointerUp.bind( this );
this._onPointerCancel = onPointerCancel.bind( this );
this._onContextMenu = onContextMenu.bind( this );
//
if ( domElement !== null ) {
this.connect( domElement );
}
}
connect( element ) {
super.connect( element );
window.addEventListener( 'keydown', this._onKeyDown );
window.addEventListener( 'keyup', this._onKeyUp );
this.domElement.addEventListener( 'pointermove', this._onPointerMove );
this.domElement.addEventListener( 'pointerdown', this._onPointerDown );
this.domElement.addEventListener( 'pointerup', this._onPointerUp );
this.domElement.addEventListener( 'pointercancel', this._onPointerCancel );
this.domElement.addEventListener( 'contextmenu', this._onContextMenu );
}
disconnect() {
window.removeEventListener( 'keydown', this._onKeyDown );
window.removeEventListener( 'keyup', this._onKeyUp );
this.domElement.removeEventListener( 'pointermove', this._onPointerMove );
this.domElement.removeEventListener( 'pointerdown', this._onPointerDown );
this.domElement.removeEventListener( 'pointerup', this._onPointerUp );
this.domElement.removeEventListener( 'pointercancel', this._onPointerCancel );
this.domElement.removeEventListener( 'contextmenu', this._onContextMenu );
}
dispose() {
this.disconnect();
}
update( delta ) {
if ( this.enabled === false ) return;
const object = this.object;
const moveMult = delta * this.movementSpeed;
const rotMult = delta * this.rollSpeed;
object.translateX( this._moveVector.x * moveMult );
object.translateY( this._moveVector.y * moveMult );
object.translateZ( this._moveVector.z * moveMult );
_tmpQuaternion.set( this._rotationVector.x * rotMult, this._rotationVector.y * rotMult, this._rotationVector.z * rotMult, 1 ).normalize();
object.quaternion.multiply( _tmpQuaternion );
if (
this._lastPosition.distanceToSquared( object.position ) > _EPS ||
8 * ( 1 - this._lastQuaternion.dot( object.quaternion ) ) > _EPS
) {
this.dispatchEvent( _changeEvent );
this._lastQuaternion.copy( object.quaternion );
this._lastPosition.copy( object.position );
}
}
// private
_updateMovementVector() {
const forward = ( this._moveState.forward || ( this.autoForward && ! this._moveState.back ) ) ? 1 : 0;
this._moveVector.x = ( - this._moveState.left + this._moveState.right );
this._moveVector.y = ( - this._moveState.down + this._moveState.up );
this._moveVector.z = ( - forward + this._moveState.back );
//console.log( 'move:', [ this._moveVector.x, this._moveVector.y, this._moveVector.z ] );
}
_updateRotationVector() {
this._rotationVector.x = ( - this._moveState.pitchDown + this._moveState.pitchUp );
this._rotationVector.y = ( - this._moveState.yawRight + this._moveState.yawLeft );
this._rotationVector.z = ( - this._moveState.rollRight + this._moveState.rollLeft );
//console.log( 'rotate:', [ this._rotationVector.x, this._rotationVector.y, this._rotationVector.z ] );
}
_getContainerDimensions() {
if ( this.domElement != document ) {
return {
size: [ this.domElement.offsetWidth, this.domElement.offsetHeight ],
offset: [ this.domElement.offsetLeft, this.domElement.offsetTop ]
};
} else {
return {
size: [ window.innerWidth, window.innerHeight ],
offset: [ 0, 0 ]
};
}
}
}
function onKeyDown( event ) {
if ( event.altKey || this.enabled === false ) {
return;
}
switch ( event.code ) {
case 'ShiftLeft':
case 'ShiftRight': this.movementSpeedMultiplier = .1; break;
case 'KeyW': this._moveState.forward = 1; break;
case 'KeyS': this._moveState.back = 1; break;
case 'KeyA': this._moveState.left = 1; break;
case 'KeyD': this._moveState.right = 1; break;
case 'KeyR': this._moveState.up = 1; break;
case 'KeyF': this._moveState.down = 1; break;
case 'ArrowUp': this._moveState.pitchUp = 1; break;
case 'ArrowDown': this._moveState.pitchDown = 1; break;
case 'ArrowLeft': this._moveState.yawLeft = 1; break;
case 'ArrowRight': this._moveState.yawRight = 1; break;
case 'KeyQ': this._moveState.rollLeft = 1; break;
case 'KeyE': this._moveState.rollRight = 1; break;
}
this._updateMovementVector();
this._updateRotationVector();
}
function onKeyUp( event ) {
if ( this.enabled === false ) return;
switch ( event.code ) {
case 'ShiftLeft':
case 'ShiftRight': this.movementSpeedMultiplier = 1; break;
case 'KeyW': this._moveState.forward = 0; break;
case 'KeyS': this._moveState.back = 0; break;
case 'KeyA': this._moveState.left = 0; break;
case 'KeyD': this._moveState.right = 0; break;
case 'KeyR': this._moveState.up = 0; break;
case 'KeyF': this._moveState.down = 0; break;
case 'ArrowUp': this._moveState.pitchUp = 0; break;
case 'ArrowDown': this._moveState.pitchDown = 0; break;
case 'ArrowLeft': this._moveState.yawLeft = 0; break;
case 'ArrowRight': this._moveState.yawRight = 0; break;
case 'KeyQ': this._moveState.rollLeft = 0; break;
case 'KeyE': this._moveState.rollRight = 0; break;
}
this._updateMovementVector();
this._updateRotationVector();
}
function onPointerDown( event ) {
if ( this.enabled === false ) return;
if ( this.dragToLook ) {
this._status ++;
} else {
switch ( event.button ) {
case 0: this._moveState.forward = 1; break;
case 2: this._moveState.back = 1; break;
}
this._updateMovementVector();
}
}
function onPointerMove( event ) {
if ( this.enabled === false ) return;
if ( ! this.dragToLook || this._status > 0 ) {
const container = this._getContainerDimensions();
const halfWidth = container.size[ 0 ] / 2;
const halfHeight = container.size[ 1 ] / 2;
this._moveState.yawLeft = - ( ( event.pageX - container.offset[ 0 ] ) - halfWidth ) / halfWidth;
this._moveState.pitchDown = ( ( event.pageY - container.offset[ 1 ] ) - halfHeight ) / halfHeight;
this._updateRotationVector();
}
}
function onPointerUp( event ) {
if ( this.enabled === false ) return;
if ( this.dragToLook ) {
this._status --;
this._moveState.yawLeft = this._moveState.pitchDown = 0;
} else {
switch ( event.button ) {
case 0: this._moveState.forward = 0; break;
case 2: this._moveState.back = 0; break;
}
this._updateMovementVector();
}
this._updateRotationVector();
}
function onPointerCancel() {
if ( this.enabled === false ) return;
if ( this.dragToLook ) {
this._status = 0;
this._moveState.yawLeft = this._moveState.pitchDown = 0;
} else {
this._moveState.forward = 0;
this._moveState.back = 0;
this._updateMovementVector();
}
this._updateRotationVector();
}
function onContextMenu( event ) {
if ( this.enabled === false ) return;
event.preventDefault();
}
export { FlyControls };

116
node_modules/three/examples/jsm/controls/MapControls.js generated vendored Normal file
View File

@@ -0,0 +1,116 @@
import { MOUSE, TOUCH, Plane, Raycaster, Vector2, Vector3 } from 'three';
import { OrbitControls } from './OrbitControls.js';
const _plane = new Plane();
const _raycaster = new Raycaster();
const _mouse = new Vector2();
const _panCurrent = new Vector3();
/**
* This class is intended for transforming a camera over a map from bird's eye perspective.
* The class shares its implementation with {@link OrbitControls} but uses a specific preset
* for mouse/touch interaction and disables screen space panning by default.
*
* - Orbit: Right mouse, or left mouse + ctrl/meta/shiftKey / touch: two-finger rotate.
* - Zoom: Middle mouse, or mousewheel / touch: two-finger spread or squish.
* - Pan: Left mouse, or arrow keys / touch: one-finger move.
*
* @augments OrbitControls
* @three_import import { MapControls } from 'three/addons/controls/MapControls.js';
*/
class MapControls extends OrbitControls {
constructor( object, domElement ) {
super( object, domElement );
/**
* Overwritten and set to `false` to pan orthogonal to world-space direction `camera.up`.
*
* @type {boolean}
* @default false
*/
this.screenSpacePanning = false;
/**
* This object contains references to the mouse actions used by the controls.
*
* ```js
* controls.mouseButtons = {
* LEFT: THREE.MOUSE.PAN,
* MIDDLE: THREE.MOUSE.DOLLY,
* RIGHT: THREE.MOUSE.ROTATE
* }
* ```
* @type {Object}
*/
this.mouseButtons = { LEFT: MOUSE.PAN, MIDDLE: MOUSE.DOLLY, RIGHT: MOUSE.ROTATE };
/**
* This object contains references to the touch actions used by the controls.
*
* ```js
* controls.mouseButtons = {
* ONE: THREE.TOUCH.PAN,
* TWO: THREE.TOUCH.DOLLY_ROTATE
* }
* ```
* @type {Object}
*/
this.touches = { ONE: TOUCH.PAN, TWO: TOUCH.DOLLY_ROTATE };
this._panWorldStart = new Vector3();
}
_handleMouseDownPan( event ) {
super._handleMouseDownPan( event );
this._panOffset.set( 0, 0, 0 );
if ( this.screenSpacePanning === true ) return;
_plane.setFromNormalAndCoplanarPoint( this.object.up, this.target );
const element = this.domElement;
const rect = element.getBoundingClientRect();
_mouse.x = ( ( event.clientX - rect.left ) / rect.width ) * 2 - 1;
_mouse.y = - ( ( event.clientY - rect.top ) / rect.height ) * 2 + 1;
_raycaster.setFromCamera( _mouse, this.object );
_raycaster.ray.intersectPlane( _plane, this._panWorldStart );
}
_handleMouseMovePan( event ) {
if ( this.screenSpacePanning === true ) {
super._handleMouseMovePan( event );
return;
}
const element = this.domElement;
const rect = element.getBoundingClientRect();
_mouse.x = ( ( event.clientX - rect.left ) / rect.width ) * 2 - 1;
_mouse.y = - ( ( event.clientY - rect.top ) / rect.height ) * 2 + 1;
_raycaster.setFromCamera( _mouse, this.object );
if ( _raycaster.ray.intersectPlane( _plane, _panCurrent ) ) {
_panCurrent.sub( this._panWorldStart );
this._panOffset.copy( _panCurrent ).negate();
this.update();
}
}
}
export { MapControls };

1860
node_modules/three/examples/jsm/controls/OrbitControls.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,264 @@
import {
Controls,
Euler,
Vector3
} from 'three';
const _euler = new Euler( 0, 0, 0, 'YXZ' );
const _vector = new Vector3();
/**
* Fires when the user moves the mouse.
*
* @event PointerLockControls#change
* @type {Object}
*/
const _changeEvent = { type: 'change' };
/**
* Fires when the pointer lock status is "locked" (in other words: the mouse is captured).
*
* @event PointerLockControls#lock
* @type {Object}
*/
const _lockEvent = { type: 'lock' };
/**
* Fires when the pointer lock status is "unlocked" (in other words: the mouse is not captured anymore).
*
* @event PointerLockControls#unlock
* @type {Object}
*/
const _unlockEvent = { type: 'unlock' };
const _MOUSE_SENSITIVITY = 0.002;
const _PI_2 = Math.PI / 2;
/**
* The implementation of this class is based on the [Pointer Lock API](https://developer.mozilla.org/en-US/docs/Web/API/Pointer_Lock_API).
* `PointerLockControls` is a perfect choice for first person 3D games.
*
* ```js
* const controls = new PointerLockControls( camera, document.body );
*
* // add event listener to show/hide a UI (e.g. the game's menu)
* controls.addEventListener( 'lock', function () {
*
* menu.style.display = 'none';
*
* } );
*
* controls.addEventListener( 'unlock', function () {
*
* menu.style.display = 'block';
*
* } );
* ```
*
* @augments Controls
* @three_import import { PointerLockControls } from 'three/addons/controls/PointerLockControls.js';
*/
class PointerLockControls extends Controls {
/**
* Constructs a new controls instance.
*
* @param {Camera} camera - The camera that is managed by the controls.
* @param {?HTMLElement} domElement - The HTML element used for event listeners.
*/
constructor( camera, domElement = null ) {
super( camera, domElement );
/**
* Whether the controls are locked or not.
*
* @type {boolean}
* @readonly
* @default false
*/
this.isLocked = false;
/**
* Camera pitch, lower limit. Range is '[0, Math.PI]' in radians.
*
* @type {number}
* @default 0
*/
this.minPolarAngle = 0;
/**
* Camera pitch, upper limit. Range is '[0, Math.PI]' in radians.
*
* @type {number}
* @default Math.PI
*/
this.maxPolarAngle = Math.PI;
/**
* Multiplier for how much the pointer movement influences the camera rotation.
*
* @type {number}
* @default 1
*/
this.pointerSpeed = 1.0;
// event listeners
this._onMouseMove = onMouseMove.bind( this );
this._onPointerlockChange = onPointerlockChange.bind( this );
this._onPointerlockError = onPointerlockError.bind( this );
if ( this.domElement !== null ) {
this.connect( this.domElement );
}
}
connect( element ) {
super.connect( element );
this.domElement.ownerDocument.addEventListener( 'mousemove', this._onMouseMove );
this.domElement.ownerDocument.addEventListener( 'pointerlockchange', this._onPointerlockChange );
this.domElement.ownerDocument.addEventListener( 'pointerlockerror', this._onPointerlockError );
}
disconnect() {
this.domElement.ownerDocument.removeEventListener( 'mousemove', this._onMouseMove );
this.domElement.ownerDocument.removeEventListener( 'pointerlockchange', this._onPointerlockChange );
this.domElement.ownerDocument.removeEventListener( 'pointerlockerror', this._onPointerlockError );
}
dispose() {
this.disconnect();
}
/**
* Returns the look direction of the camera.
*
* @param {Vector3} v - The target vector that is used to store the method's result.
* @return {Vector3} The normalized direction vector.
*/
getDirection( v ) {
return v.set( 0, 0, - 1 ).applyQuaternion( this.object.quaternion );
}
/**
* Moves the camera forward parallel to the xz-plane. Assumes camera.up is y-up.
*
* @param {number} distance - The signed distance.
*/
moveForward( distance ) {
if ( this.enabled === false ) return;
// move forward parallel to the xz-plane
// assumes camera.up is y-up
const camera = this.object;
_vector.setFromMatrixColumn( camera.matrix, 0 );
_vector.crossVectors( camera.up, _vector );
camera.position.addScaledVector( _vector, distance );
}
/**
* Moves the camera sidewards parallel to the xz-plane.
*
* @param {number} distance - The signed distance.
*/
moveRight( distance ) {
if ( this.enabled === false ) return;
const camera = this.object;
_vector.setFromMatrixColumn( camera.matrix, 0 );
camera.position.addScaledVector( _vector, distance );
}
/**
* Activates the pointer lock.
*
* @param {boolean} [unadjustedMovement=false] - Disables OS-level adjustment for mouse acceleration, and accesses raw mouse input instead.
* Setting it to true will disable mouse acceleration.
*/
lock( unadjustedMovement = false ) {
this.domElement.requestPointerLock( {
unadjustedMovement
} );
}
/**
* Exits the pointer lock.
*/
unlock() {
this.domElement.ownerDocument.exitPointerLock();
}
}
// event listeners
function onMouseMove( event ) {
if ( this.enabled === false || this.isLocked === false ) return;
const camera = this.object;
_euler.setFromQuaternion( camera.quaternion );
_euler.y -= event.movementX * _MOUSE_SENSITIVITY * this.pointerSpeed;
_euler.x -= event.movementY * _MOUSE_SENSITIVITY * this.pointerSpeed;
_euler.x = Math.max( _PI_2 - this.maxPolarAngle, Math.min( _PI_2 - this.minPolarAngle, _euler.x ) );
camera.quaternion.setFromEuler( _euler );
this.dispatchEvent( _changeEvent );
}
function onPointerlockChange() {
if ( this.domElement.ownerDocument.pointerLockElement === this.domElement ) {
this.dispatchEvent( _lockEvent );
this.isLocked = true;
} else {
this.dispatchEvent( _unlockEvent );
this.isLocked = false;
}
}
function onPointerlockError() {
console.error( 'THREE.PointerLockControls: Unable to use Pointer Lock API' );
}
export { PointerLockControls };

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

598
node_modules/three/examples/jsm/csm/CSM.js generated vendored Normal file
View File

@@ -0,0 +1,598 @@
import {
Vector2,
Vector3,
DirectionalLight,
MathUtils,
ShaderChunk,
Matrix4,
Box3
} from 'three';
import { CSMFrustum } from './CSMFrustum.js';
import { CSMShader } from './CSMShader.js';
const _cameraToLightMatrix = new Matrix4();
const _lightSpaceFrustum = new CSMFrustum( { webGL: true } );
const _center = new Vector3();
const _origin = new Vector3();
const _bbox = new Box3();
const _uniformArray = [];
const _logArray = [];
const _lightOrientationMatrix = new Matrix4();
const _lightOrientationMatrixInverse = new Matrix4();
const _up = new Vector3( 0, 1, 0 );
/**
* An implementation of Cascade Shadow Maps (CSM).
*
* This module can only be used with {@link WebGLRenderer}. When using {@link WebGPURenderer},
* use {@link CSMShadowNode} instead.
*
* @three_import import { CSM } from 'three/addons/csm/CSM.js';
*/
export class CSM {
/**
* Constructs a new CSM instance.
*
* @param {CSM~Data} data - The CSM data.
*/
constructor( data ) {
/**
* The scene's camera.
*
* @type {Camera}
*/
this.camera = data.camera;
/**
* The parent object, usually the scene.
*
* @type {Object3D}
*/
this.parent = data.parent;
/**
* The number of cascades.
*
* @type {number}
* @default 3
*/
this.cascades = data.cascades || 3;
/**
* The maximum far value.
*
* @type {number}
* @default 100000
*/
this.maxFar = data.maxFar || 100000;
/**
* The frustum split mode.
*
* @type {('practical'|'uniform'|'logarithmic'|'custom')}
* @default 'practical'
*/
this.mode = data.mode || 'practical';
/**
* The shadow map size.
*
* @type {number}
* @default 2048
*/
this.shadowMapSize = data.shadowMapSize || 2048;
/**
* The shadow bias.
*
* @type {number}
* @default 0.000001
*/
this.shadowBias = data.shadowBias || 0.000001;
/**
* The light direction.
*
* @type {Vector3}
*/
this.lightDirection = data.lightDirection || new Vector3( 1, - 1, 1 ).normalize();
/**
* The light intensity.
*
* @type {number}
* @default 3
*/
this.lightIntensity = data.lightIntensity || 3;
/**
* The light near value.
*
* @type {number}
* @default 1
*/
this.lightNear = data.lightNear || 1;
/**
* The light far value.
*
* @type {number}
* @default 2000
*/
this.lightFar = data.lightFar || 2000;
/**
* The light margin.
*
* @type {number}
* @default 200
*/
this.lightMargin = data.lightMargin || 200;
/**
* Custom split callback when using `mode='custom'`.
*
* @type {Function}
*/
this.customSplitsCallback = data.customSplitsCallback;
/**
* Whether to fade between cascades or not.
*
* @type {boolean}
* @default false
*/
this.fade = false;
/**
* The main frustum.
*
* @type {CSMFrustum}
*/
this.mainFrustum = new CSMFrustum( { webGL: true } );
/**
* An array of frustums representing the cascades.
*
* @type {Array<CSMFrustum>}
*/
this.frustums = [];
/**
* An array of numbers in the range `[0,1]` the defines how the
* mainCSM frustum should be split up.
*
* @type {Array<number>}
*/
this.breaks = [];
/**
* An array of directional lights which cast the shadows for
* the different cascades. There is one directional light for each
* cascade.
*
* @type {Array<DirectionalLight>}
*/
this.lights = [];
/**
* A Map holding enhanced material shaders.
*
* @type {Map<Material,Object>}
*/
this.shaders = new Map();
this._createLights();
this.updateFrustums();
this._injectInclude();
}
/**
* Creates the directional lights of this CSM instance.
*
* @private
*/
_createLights() {
for ( let i = 0; i < this.cascades; i ++ ) {
const light = new DirectionalLight( 0xffffff, this.lightIntensity );
light.castShadow = true;
light.shadow.mapSize.width = this.shadowMapSize;
light.shadow.mapSize.height = this.shadowMapSize;
light.shadow.camera.near = this.lightNear;
light.shadow.camera.far = this.lightFar;
light.shadow.bias = this.shadowBias;
this.parent.add( light );
this.parent.add( light.target );
this.lights.push( light );
}
}
/**
* Inits the cascades according to the scene's camera and breaks configuration.
*
* @private
*/
_initCascades() {
const camera = this.camera;
camera.updateProjectionMatrix();
this.mainFrustum.setFromProjectionMatrix( camera.projectionMatrix, this.maxFar );
this.mainFrustum.split( this.breaks, this.frustums );
}
/**
* Updates the shadow bounds of this CSM instance.
*
* @private
*/
_updateShadowBounds() {
const frustums = this.frustums;
for ( let i = 0; i < frustums.length; i ++ ) {
const light = this.lights[ i ];
const shadowCam = light.shadow.camera;
const frustum = this.frustums[ i ];
// Get the two points that represent that furthest points on the frustum assuming
// that's either the diagonal across the far plane or the diagonal across the whole
// frustum itself.
const nearVerts = frustum.vertices.near;
const farVerts = frustum.vertices.far;
const point1 = farVerts[ 0 ];
let point2;
if ( point1.distanceTo( farVerts[ 2 ] ) > point1.distanceTo( nearVerts[ 2 ] ) ) {
point2 = farVerts[ 2 ];
} else {
point2 = nearVerts[ 2 ];
}
let squaredBBWidth = point1.distanceTo( point2 );
if ( this.fade ) {
// expand the shadow extents by the fade margin if fade is enabled.
const camera = this.camera;
const far = Math.max( camera.far, this.maxFar );
const linearDepth = frustum.vertices.far[ 0 ].z / ( far - camera.near );
const margin = 0.25 * Math.pow( linearDepth, 2.0 ) * ( far - camera.near );
squaredBBWidth += margin;
}
shadowCam.left = - squaredBBWidth / 2;
shadowCam.right = squaredBBWidth / 2;
shadowCam.top = squaredBBWidth / 2;
shadowCam.bottom = - squaredBBWidth / 2;
shadowCam.updateProjectionMatrix();
}
}
/**
* Computes the breaks of this CSM instance based on the scene's camera, number of cascades
* and the selected split mode.
*
* @private
*/
_getBreaks() {
const camera = this.camera;
const far = Math.min( camera.far, this.maxFar );
this.breaks.length = 0;
switch ( this.mode ) {
case 'uniform':
uniformSplit( this.cascades, camera.near, far, this.breaks );
break;
case 'logarithmic':
logarithmicSplit( this.cascades, camera.near, far, this.breaks );
break;
case 'practical':
practicalSplit( this.cascades, camera.near, far, 0.5, this.breaks );
break;
case 'custom':
if ( this.customSplitsCallback === undefined ) console.error( 'CSM: Custom split scheme callback not defined.' );
this.customSplitsCallback( this.cascades, camera.near, far, this.breaks );
break;
}
function uniformSplit( amount, near, far, target ) {
for ( let i = 1; i < amount; i ++ ) {
target.push( ( near + ( far - near ) * i / amount ) / far );
}
target.push( 1 );
}
function logarithmicSplit( amount, near, far, target ) {
for ( let i = 1; i < amount; i ++ ) {
target.push( ( near * ( far / near ) ** ( i / amount ) ) / far );
}
target.push( 1 );
}
function practicalSplit( amount, near, far, lambda, target ) {
_uniformArray.length = 0;
_logArray.length = 0;
logarithmicSplit( amount, near, far, _logArray );
uniformSplit( amount, near, far, _uniformArray );
for ( let i = 1; i < amount; i ++ ) {
target.push( MathUtils.lerp( _uniformArray[ i - 1 ], _logArray[ i - 1 ], lambda ) );
}
target.push( 1 );
}
}
/**
* Updates the CSM. This method must be called in your animation loop before
* calling `renderer.render()`.
*/
update() {
const camera = this.camera;
const frustums = this.frustums;
// for each frustum we need to find its min-max box aligned with the light orientation
// the position in _lightOrientationMatrix does not matter, as we transform there and back
_lightOrientationMatrix.lookAt( _origin, this.lightDirection, _up );
_lightOrientationMatrixInverse.copy( _lightOrientationMatrix ).invert();
for ( let i = 0; i < frustums.length; i ++ ) {
const light = this.lights[ i ];
const shadowCam = light.shadow.camera;
const texelWidth = ( shadowCam.right - shadowCam.left ) / this.shadowMapSize;
const texelHeight = ( shadowCam.top - shadowCam.bottom ) / this.shadowMapSize;
_cameraToLightMatrix.multiplyMatrices( _lightOrientationMatrixInverse, camera.matrixWorld );
frustums[ i ].toSpace( _cameraToLightMatrix, _lightSpaceFrustum );
const nearVerts = _lightSpaceFrustum.vertices.near;
const farVerts = _lightSpaceFrustum.vertices.far;
_bbox.makeEmpty();
for ( let j = 0; j < 4; j ++ ) {
_bbox.expandByPoint( nearVerts[ j ] );
_bbox.expandByPoint( farVerts[ j ] );
}
_bbox.getCenter( _center );
_center.z = _bbox.max.z + this.lightMargin;
_center.x = Math.floor( _center.x / texelWidth ) * texelWidth;
_center.y = Math.floor( _center.y / texelHeight ) * texelHeight;
_center.applyMatrix4( _lightOrientationMatrix );
light.position.copy( _center );
light.target.position.copy( _center );
light.target.position.x += this.lightDirection.x;
light.target.position.y += this.lightDirection.y;
light.target.position.z += this.lightDirection.z;
}
}
/**
* Injects the CSM shader enhancements into the built-in materials.
*
* @private
*/
_injectInclude() {
ShaderChunk.lights_fragment_begin = CSMShader.lights_fragment_begin;
ShaderChunk.lights_pars_begin = CSMShader.lights_pars_begin;
}
/**
* Applications must call this method for all materials that should be affected by CSM.
*
* @param {Material} material - The material to setup for CSM support.
*/
setupMaterial( material ) {
material.defines = material.defines || {};
material.defines.USE_CSM = 1;
material.defines.CSM_CASCADES = this.cascades;
if ( this.fade ) {
material.defines.CSM_FADE = '';
}
const breaksVec2 = [];
const scope = this;
const shaders = this.shaders;
material.onBeforeCompile = function ( shader ) {
const far = Math.min( scope.camera.far, scope.maxFar );
scope._getExtendedBreaks( breaksVec2 );
shader.uniforms.CSM_cascades = { value: breaksVec2 };
shader.uniforms.cameraNear = { value: scope.camera.near };
shader.uniforms.shadowFar = { value: far };
shaders.set( material, shader );
};
shaders.set( material, null );
}
/**
* Updates the CSM uniforms.
*
* @private
*/
_updateUniforms() {
const far = Math.min( this.camera.far, this.maxFar );
const shaders = this.shaders;
shaders.forEach( function ( shader, material ) {
if ( shader !== null ) {
const uniforms = shader.uniforms;
this._getExtendedBreaks( uniforms.CSM_cascades.value );
uniforms.cameraNear.value = this.camera.near;
uniforms.shadowFar.value = far;
}
if ( ! this.fade && 'CSM_FADE' in material.defines ) {
delete material.defines.CSM_FADE;
material.needsUpdate = true;
} else if ( this.fade && ! ( 'CSM_FADE' in material.defines ) ) {
material.defines.CSM_FADE = '';
material.needsUpdate = true;
}
}, this );
}
/**
* Computes the extended breaks for the CSM uniforms.
*
* @private
* @param {Array<Vector2>} target - The target array that holds the extended breaks.
*/
_getExtendedBreaks( target ) {
while ( target.length < this.breaks.length ) {
target.push( new Vector2() );
}
target.length = this.breaks.length;
for ( let i = 0; i < this.cascades; i ++ ) {
const amount = this.breaks[ i ];
const prev = this.breaks[ i - 1 ] || 0;
target[ i ].x = prev;
target[ i ].y = amount;
}
}
/**
* Applications must call this method every time they change camera or CSM settings.
*/
updateFrustums() {
this._getBreaks();
this._initCascades();
this._updateShadowBounds();
this._updateUniforms();
}
/**
* Applications must call this method when they remove the CSM usage from their scene.
*/
remove() {
for ( let i = 0; i < this.lights.length; i ++ ) {
this.parent.remove( this.lights[ i ].target );
this.parent.remove( this.lights[ i ] );
}
}
/**
* Frees the GPU-related resources allocated by this instance. Call this
* method whenever this instance is no longer used in your app.
*/
dispose() {
const shaders = this.shaders;
shaders.forEach( function ( shader, material ) {
delete material.onBeforeCompile;
delete material.defines.USE_CSM;
delete material.defines.CSM_CASCADES;
delete material.defines.CSM_FADE;
if ( shader !== null ) {
delete shader.uniforms.CSM_cascades;
delete shader.uniforms.cameraNear;
delete shader.uniforms.shadowFar;
}
material.needsUpdate = true;
} );
shaders.clear();
}
}
/**
* Constructor data of `CSM`.
*
* @typedef {Object} CSM~Data
* @property {Camera} camera - The scene's camera.
* @property {Object3D} parent - The parent object, usually the scene.
* @property {number} [cascades=3] - The number of cascades.
* @property {number} [maxFar=100000] - The maximum far value.
* @property {('practical'|'uniform'|'logarithmic'|'custom')} [mode='practical'] - The frustum split mode.
* @property {Function} [customSplitsCallback] - Custom split callback when using `mode='custom'`.
* @property {number} [shadowMapSize=2048] - The shadow map size.
* @property {number} [shadowBias=0.000001] - The shadow bias.
* @property {Vector3} [lightDirection] - The light direction.
* @property {number} [lightIntensity=3] - The light intensity.
* @property {number} [lightNear=1] - The light near value.
* @property {number} [lightNear=2000] - The light far value.
* @property {number} [lightMargin=200] - The light margin.
**/

209
node_modules/three/examples/jsm/csm/CSMFrustum.js generated vendored Normal file
View File

@@ -0,0 +1,209 @@
import { Vector3, Matrix4 } from 'three';
const inverseProjectionMatrix = new Matrix4();
/**
* Represents the frustum of a CSM instance.
*
* @three_import import { CSMFrustum } from 'three/addons/csm/CSMFrustum.js';
*/
class CSMFrustum {
/**
* Constructs a new CSM frustum.
*
* @param {CSMFrustum~Data} [data] - The CSM data.
*/
constructor( data ) {
data = data || {};
/**
* The zNear value. This value depends on whether the CSM
* is used with WebGL or WebGPU. Both API use different
* conventions for their projection matrices.
*
* @type {number}
*/
this.zNear = data.webGL === true ? - 1 : 0;
/**
* An object representing the vertices of the near and
* far plane in view space.
*
* @type {Object}
*/
this.vertices = {
near: [
new Vector3(),
new Vector3(),
new Vector3(),
new Vector3()
],
far: [
new Vector3(),
new Vector3(),
new Vector3(),
new Vector3()
]
};
if ( data.projectionMatrix !== undefined ) {
this.setFromProjectionMatrix( data.projectionMatrix, data.maxFar || 10000 );
}
}
/**
* Setups this CSM frustum from the given projection matrix and max far value.
*
* @param {Matrix4} projectionMatrix - The projection matrix, usually of the scene's camera.
* @param {number} maxFar - The maximum far value.
* @returns {Object} An object representing the vertices of the near and far plane in view space.
*/
setFromProjectionMatrix( projectionMatrix, maxFar ) {
const zNear = this.zNear;
const isOrthographic = projectionMatrix.elements[ 2 * 4 + 3 ] === 0;
inverseProjectionMatrix.copy( projectionMatrix ).invert();
// 3 --- 0 vertices.near/far order
// | |
// 2 --- 1
// clip space spans from [-1, 1]
this.vertices.near[ 0 ].set( 1, 1, zNear );
this.vertices.near[ 1 ].set( 1, - 1, zNear );
this.vertices.near[ 2 ].set( - 1, - 1, zNear );
this.vertices.near[ 3 ].set( - 1, 1, zNear );
this.vertices.near.forEach( function ( v ) {
v.applyMatrix4( inverseProjectionMatrix );
} );
this.vertices.far[ 0 ].set( 1, 1, 1 );
this.vertices.far[ 1 ].set( 1, - 1, 1 );
this.vertices.far[ 2 ].set( - 1, - 1, 1 );
this.vertices.far[ 3 ].set( - 1, 1, 1 );
this.vertices.far.forEach( function ( v ) {
v.applyMatrix4( inverseProjectionMatrix );
const absZ = Math.abs( v.z );
if ( isOrthographic ) {
v.z *= Math.min( maxFar / absZ, 1.0 );
} else {
v.multiplyScalar( Math.min( maxFar / absZ, 1.0 ) );
}
} );
return this.vertices;
}
/**
* Splits the CSM frustum by the given array. The new CSM frustum are pushed into the given
* target array.
*
* @param {Array<number>} breaks - An array of numbers in the range `[0,1]` the defines how the
* CSM frustum should be split up.
* @param {Array<CSMFrustum>} target - The target array that holds the new CSM frustums.
*/
split( breaks, target ) {
while ( breaks.length > target.length ) {
target.push( new CSMFrustum() );
}
target.length = breaks.length;
for ( let i = 0; i < breaks.length; i ++ ) {
const cascade = target[ i ];
if ( i === 0 ) {
for ( let j = 0; j < 4; j ++ ) {
cascade.vertices.near[ j ].copy( this.vertices.near[ j ] );
}
} else {
for ( let j = 0; j < 4; j ++ ) {
cascade.vertices.near[ j ].lerpVectors( this.vertices.near[ j ], this.vertices.far[ j ], breaks[ i - 1 ] );
}
}
if ( i === breaks.length - 1 ) {
for ( let j = 0; j < 4; j ++ ) {
cascade.vertices.far[ j ].copy( this.vertices.far[ j ] );
}
} else {
for ( let j = 0; j < 4; j ++ ) {
cascade.vertices.far[ j ].lerpVectors( this.vertices.near[ j ], this.vertices.far[ j ], breaks[ i ] );
}
}
}
}
/**
* Transforms the given target CSM frustum into the different coordinate system defined by the
* given camera matrix.
*
* @param {Matrix4} cameraMatrix - The matrix that defines the new coordinate system.
* @param {CSMFrustum} target - The CSM to convert.
*/
toSpace( cameraMatrix, target ) {
for ( let i = 0; i < 4; i ++ ) {
target.vertices.near[ i ]
.copy( this.vertices.near[ i ] )
.applyMatrix4( cameraMatrix );
target.vertices.far[ i ]
.copy( this.vertices.far[ i ] )
.applyMatrix4( cameraMatrix );
}
}
}
/**
* Constructor data of `CSMFrustum`.
*
* @typedef {Object} CSMFrustum~Data
* @property {boolean} [webGL] - Whether this CSM frustum is used with WebGL or WebGPU.
* @property {Matrix4} [projectionMatrix] - A projection matrix usually of the scene's camera.
* @property {number} [maxFar] - The maximum far value.
**/
export { CSMFrustum };

243
node_modules/three/examples/jsm/csm/CSMHelper.js generated vendored Normal file
View File

@@ -0,0 +1,243 @@
import {
Group,
Mesh,
LineSegments,
BufferGeometry,
LineBasicMaterial,
Box3Helper,
Box3,
PlaneGeometry,
MeshBasicMaterial,
BufferAttribute,
DoubleSide
} from 'three';
/**
* A helper for visualizing the cascades of a CSM instance.
*
* @augments Group
* @three_import import { CSMHelper } from 'three/addons/csm/CSMHelper.js';
*/
class CSMHelper extends Group {
/**
* Constructs a new CSM helper.
*
* @param {CSM|CSMShadowNode} csm - The CSM instance to visualize.
*/
constructor( csm ) {
super();
/**
* The CSM instance to visualize.
*
* @type {CSM|CSMShadowNode}
*/
this.csm = csm;
/**
* Whether to display the CSM frustum or not.
*
* @type {boolean}
* @default true
*/
this.displayFrustum = true;
/**
* Whether to display the cascade planes or not.
*
* @type {boolean}
* @default true
*/
this.displayPlanes = true;
/**
* Whether to display the shadow bounds or not.
*
* @type {boolean}
* @default true
*/
this.displayShadowBounds = true;
const indices = new Uint16Array( [ 0, 1, 1, 2, 2, 3, 3, 0, 4, 5, 5, 6, 6, 7, 7, 4, 0, 4, 1, 5, 2, 6, 3, 7 ] );
const positions = new Float32Array( 24 );
const frustumGeometry = new BufferGeometry();
frustumGeometry.setIndex( new BufferAttribute( indices, 1 ) );
frustumGeometry.setAttribute( 'position', new BufferAttribute( positions, 3, false ) );
const frustumLines = new LineSegments( frustumGeometry, new LineBasicMaterial() );
this.add( frustumLines );
this.frustumLines = frustumLines;
this.cascadeLines = [];
this.cascadePlanes = [];
this.shadowLines = [];
}
/**
* This method must be called if one of the `display*` properties is changed at runtime.
*/
updateVisibility() {
const displayFrustum = this.displayFrustum;
const displayPlanes = this.displayPlanes;
const displayShadowBounds = this.displayShadowBounds;
const frustumLines = this.frustumLines;
const cascadeLines = this.cascadeLines;
const cascadePlanes = this.cascadePlanes;
const shadowLines = this.shadowLines;
for ( let i = 0, l = cascadeLines.length; i < l; i ++ ) {
const cascadeLine = cascadeLines[ i ];
const cascadePlane = cascadePlanes[ i ];
const shadowLineGroup = shadowLines[ i ];
cascadeLine.visible = displayFrustum;
cascadePlane.visible = displayFrustum && displayPlanes;
shadowLineGroup.visible = displayShadowBounds;
}
frustumLines.visible = displayFrustum;
}
/**
* Updates the helper. This method should be called in the app's animation loop.
*/
update() {
const csm = this.csm;
const camera = csm.camera;
const cascades = csm.cascades;
const mainFrustum = csm.mainFrustum;
const frustums = csm.frustums;
const lights = csm.lights;
const frustumLines = this.frustumLines;
const frustumLinePositions = frustumLines.geometry.getAttribute( 'position' );
const cascadeLines = this.cascadeLines;
const cascadePlanes = this.cascadePlanes;
const shadowLines = this.shadowLines;
if ( camera === null ) return;
this.position.copy( camera.position );
this.quaternion.copy( camera.quaternion );
this.scale.copy( camera.scale );
this.updateMatrixWorld( true );
while ( cascadeLines.length > cascades ) {
this.remove( cascadeLines.pop() );
this.remove( cascadePlanes.pop() );
this.remove( shadowLines.pop() );
}
while ( cascadeLines.length < cascades ) {
const cascadeLine = new Box3Helper( new Box3(), 0xffffff );
const planeMat = new MeshBasicMaterial( { transparent: true, opacity: 0.1, depthWrite: false, side: DoubleSide } );
const cascadePlane = new Mesh( new PlaneGeometry(), planeMat );
const shadowLineGroup = new Group();
const shadowLine = new Box3Helper( new Box3(), 0xffff00 );
shadowLineGroup.add( shadowLine );
this.add( cascadeLine );
this.add( cascadePlane );
this.add( shadowLineGroup );
cascadeLines.push( cascadeLine );
cascadePlanes.push( cascadePlane );
shadowLines.push( shadowLineGroup );
}
for ( let i = 0; i < cascades; i ++ ) {
const frustum = frustums[ i ];
const light = lights[ i ];
const shadowCam = light.shadow.camera;
const farVerts = frustum.vertices.far;
const cascadeLine = cascadeLines[ i ];
const cascadePlane = cascadePlanes[ i ];
const shadowLineGroup = shadowLines[ i ];
const shadowLine = shadowLineGroup.children[ 0 ];
cascadeLine.box.min.copy( farVerts[ 2 ] );
cascadeLine.box.max.copy( farVerts[ 0 ] );
cascadeLine.box.max.z += 1e-4;
cascadePlane.position.addVectors( farVerts[ 0 ], farVerts[ 2 ] );
cascadePlane.position.multiplyScalar( 0.5 );
cascadePlane.scale.subVectors( farVerts[ 0 ], farVerts[ 2 ] );
cascadePlane.scale.z = 1e-4;
this.remove( shadowLineGroup );
shadowLineGroup.position.copy( shadowCam.position );
shadowLineGroup.quaternion.copy( shadowCam.quaternion );
shadowLineGroup.scale.copy( shadowCam.scale );
shadowLineGroup.updateMatrixWorld( true );
this.attach( shadowLineGroup );
shadowLine.box.min.set( shadowCam.bottom, shadowCam.left, - shadowCam.far );
shadowLine.box.max.set( shadowCam.top, shadowCam.right, - shadowCam.near );
}
const nearVerts = mainFrustum.vertices.near;
const farVerts = mainFrustum.vertices.far;
frustumLinePositions.setXYZ( 0, farVerts[ 0 ].x, farVerts[ 0 ].y, farVerts[ 0 ].z );
frustumLinePositions.setXYZ( 1, farVerts[ 3 ].x, farVerts[ 3 ].y, farVerts[ 3 ].z );
frustumLinePositions.setXYZ( 2, farVerts[ 2 ].x, farVerts[ 2 ].y, farVerts[ 2 ].z );
frustumLinePositions.setXYZ( 3, farVerts[ 1 ].x, farVerts[ 1 ].y, farVerts[ 1 ].z );
frustumLinePositions.setXYZ( 4, nearVerts[ 0 ].x, nearVerts[ 0 ].y, nearVerts[ 0 ].z );
frustumLinePositions.setXYZ( 5, nearVerts[ 3 ].x, nearVerts[ 3 ].y, nearVerts[ 3 ].z );
frustumLinePositions.setXYZ( 6, nearVerts[ 2 ].x, nearVerts[ 2 ].y, nearVerts[ 2 ].z );
frustumLinePositions.setXYZ( 7, nearVerts[ 1 ].x, nearVerts[ 1 ].y, nearVerts[ 1 ].z );
frustumLinePositions.needsUpdate = true;
}
/**
* Frees the GPU-related resources allocated by this instance. Call this
* method whenever this instance is no longer used in your app.
*/
dispose() {
const frustumLines = this.frustumLines;
const cascadeLines = this.cascadeLines;
const cascadePlanes = this.cascadePlanes;
const shadowLines = this.shadowLines;
frustumLines.geometry.dispose();
frustumLines.material.dispose();
const cascades = this.csm.cascades;
for ( let i = 0; i < cascades; i ++ ) {
const cascadeLine = cascadeLines[ i ];
const cascadePlane = cascadePlanes[ i ];
const shadowLineGroup = shadowLines[ i ];
const shadowLine = shadowLineGroup.children[ 0 ];
cascadeLine.dispose(); // Box3Helper
cascadePlane.geometry.dispose();
cascadePlane.material.dispose();
shadowLine.dispose(); // Box3Helper
}
}
}
export { CSMHelper };

307
node_modules/three/examples/jsm/csm/CSMShader.js generated vendored Normal file
View File

@@ -0,0 +1,307 @@
import { ShaderChunk } from 'three';
/**
* @module CSMShader
* @three_import import { CSMShader } from 'three/addons/csm/CSMShader.js';
*/
/**
* The object that holds the GLSL enhancements to enable CSM. This
* code is injected into the built-in material shaders by {@link CSM}.
*
* @type {Object}
*/
const CSMShader = {
lights_fragment_begin: /* glsl */`
vec3 geometryPosition = - vViewPosition;
vec3 geometryNormal = normal;
vec3 geometryViewDir = ( isOrthographic ) ? vec3( 0, 0, 1 ) : normalize( vViewPosition );
vec3 geometryClearcoatNormal = vec3( 0.0 );
#ifdef USE_CLEARCOAT
geometryClearcoatNormal = clearcoatNormal;
#endif
#ifdef USE_IRIDESCENCE
float dotNVi = saturate( dot( normal, geometryViewDir ) );
if ( material.iridescenceThickness == 0.0 ) {
material.iridescence = 0.0;
} else {
material.iridescence = saturate( material.iridescence );
}
if ( material.iridescence > 0.0 ) {
material.iridescenceFresnel = evalIridescence( 1.0, material.iridescenceIOR, dotNVi, material.iridescenceThickness, material.specularColor );
// Iridescence F0 approximation
material.iridescenceF0 = Schlick_to_F0( material.iridescenceFresnel, 1.0, dotNVi );
}
#endif
IncidentLight directLight;
#if ( NUM_POINT_LIGHTS > 0 ) && defined( RE_Direct )
PointLight pointLight;
#if defined( USE_SHADOWMAP ) && NUM_POINT_LIGHT_SHADOWS > 0
PointLightShadow pointLightShadow;
#endif
#pragma unroll_loop_start
for ( int i = 0; i < NUM_POINT_LIGHTS; i ++ ) {
pointLight = pointLights[ i ];
getPointLightInfo( pointLight, geometryPosition, directLight );
#if defined( USE_SHADOWMAP ) && ( UNROLLED_LOOP_INDEX < NUM_POINT_LIGHT_SHADOWS )
pointLightShadow = pointLightShadows[ i ];
directLight.color *= ( directLight.visible && receiveShadow ) ? getPointShadow( pointShadowMap[ i ], pointLightShadow.shadowMapSize, pointLightShadow.shadowIntensity, pointLightShadow.shadowBias, pointLightShadow.shadowRadius, vPointShadowCoord[ i ], pointLightShadow.shadowCameraNear, pointLightShadow.shadowCameraFar ) : 1.0;
#endif
RE_Direct( directLight, geometryPosition, geometryNormal, geometryViewDir, geometryClearcoatNormal, material, reflectedLight );
}
#pragma unroll_loop_end
#endif
#if ( NUM_SPOT_LIGHTS > 0 ) && defined( RE_Direct )
SpotLight spotLight;
vec4 spotColor;
vec3 spotLightCoord;
bool inSpotLightMap;
#if defined( USE_SHADOWMAP ) && NUM_SPOT_LIGHT_SHADOWS > 0
SpotLightShadow spotLightShadow;
#endif
#pragma unroll_loop_start
for ( int i = 0; i < NUM_SPOT_LIGHTS; i ++ ) {
spotLight = spotLights[ i ];
getSpotLightInfo( spotLight, geometryPosition, directLight );
// spot lights are ordered [shadows with maps, shadows without maps, maps without shadows, none]
#if ( UNROLLED_LOOP_INDEX < NUM_SPOT_LIGHT_SHADOWS_WITH_MAPS )
#define SPOT_LIGHT_MAP_INDEX UNROLLED_LOOP_INDEX
#elif ( UNROLLED_LOOP_INDEX < NUM_SPOT_LIGHT_SHADOWS )
#define SPOT_LIGHT_MAP_INDEX NUM_SPOT_LIGHT_MAPS
#else
#define SPOT_LIGHT_MAP_INDEX ( UNROLLED_LOOP_INDEX - NUM_SPOT_LIGHT_SHADOWS + NUM_SPOT_LIGHT_SHADOWS_WITH_MAPS )
#endif
#if ( SPOT_LIGHT_MAP_INDEX < NUM_SPOT_LIGHT_MAPS )
spotLightCoord = vSpotLightCoord[ i ].xyz / vSpotLightCoord[ i ].w;
inSpotLightMap = all( lessThan( abs( spotLightCoord * 2. - 1. ), vec3( 1.0 ) ) );
spotColor = texture2D( spotLightMap[ SPOT_LIGHT_MAP_INDEX ], spotLightCoord.xy );
directLight.color = inSpotLightMap ? directLight.color * spotColor.rgb : directLight.color;
#endif
#undef SPOT_LIGHT_MAP_INDEX
#if defined( USE_SHADOWMAP ) && ( UNROLLED_LOOP_INDEX < NUM_SPOT_LIGHT_SHADOWS )
spotLightShadow = spotLightShadows[ i ];
directLight.color *= ( directLight.visible && receiveShadow ) ? getShadow( spotShadowMap[ i ], spotLightShadow.shadowMapSize, spotLightShadow.shadowIntensity, spotLightShadow.shadowBias, spotLightShadow.shadowRadius, vSpotLightCoord[ i ] ) : 1.0;
#endif
RE_Direct( directLight, geometryPosition, geometryNormal, geometryViewDir, geometryClearcoatNormal, material, reflectedLight );
}
#pragma unroll_loop_end
#endif
#if ( NUM_DIR_LIGHTS > 0 ) && defined( RE_Direct ) && defined( USE_CSM ) && defined( CSM_CASCADES )
DirectionalLight directionalLight;
float linearDepth = (vViewPosition.z) / (shadowFar - cameraNear);
#if defined( USE_SHADOWMAP ) && NUM_DIR_LIGHT_SHADOWS > 0
DirectionalLightShadow directionalLightShadow;
#endif
#if defined( USE_SHADOWMAP ) && defined( CSM_FADE )
vec2 cascade;
float cascadeCenter;
float closestEdge;
float margin;
float csmx;
float csmy;
#pragma unroll_loop_start
for ( int i = 0; i < NUM_DIR_LIGHTS; i ++ ) {
directionalLight = directionalLights[ i ];
getDirectionalLightInfo( directionalLight, directLight );
#if ( UNROLLED_LOOP_INDEX < NUM_DIR_LIGHT_SHADOWS )
// NOTE: Depth gets larger away from the camera.
// cascade.x is closer, cascade.y is further
cascade = CSM_cascades[ i ];
cascadeCenter = ( cascade.x + cascade.y ) / 2.0;
closestEdge = linearDepth < cascadeCenter ? cascade.x : cascade.y;
margin = 0.25 * pow( closestEdge, 2.0 );
csmx = cascade.x - margin / 2.0;
csmy = cascade.y + margin / 2.0;
if( linearDepth >= csmx && ( linearDepth < csmy || UNROLLED_LOOP_INDEX == CSM_CASCADES - 1 ) ) {
float dist = min( linearDepth - csmx, csmy - linearDepth );
float ratio = clamp( dist / margin, 0.0, 1.0 );
vec3 prevColor = directLight.color;
directionalLightShadow = directionalLightShadows[ i ];
directLight.color *= ( directLight.visible && receiveShadow ) ? getShadow( directionalShadowMap[ i ], directionalLightShadow.shadowMapSize, directionalLightShadow.shadowIntensity, directionalLightShadow.shadowBias, directionalLightShadow.shadowRadius, vDirectionalShadowCoord[ i ] ) : 1.0;
bool shouldFadeLastCascade = UNROLLED_LOOP_INDEX == CSM_CASCADES - 1 && linearDepth > cascadeCenter;
directLight.color = mix( prevColor, directLight.color, shouldFadeLastCascade ? ratio : 1.0 );
ReflectedLight prevLight = reflectedLight;
RE_Direct( directLight, geometryPosition, geometryNormal, geometryViewDir, geometryClearcoatNormal, material, reflectedLight );
bool shouldBlend = UNROLLED_LOOP_INDEX != CSM_CASCADES - 1 || UNROLLED_LOOP_INDEX == CSM_CASCADES - 1 && linearDepth < cascadeCenter;
float blendRatio = shouldBlend ? ratio : 1.0;
reflectedLight.directDiffuse = mix( prevLight.directDiffuse, reflectedLight.directDiffuse, blendRatio );
reflectedLight.directSpecular = mix( prevLight.directSpecular, reflectedLight.directSpecular, blendRatio );
reflectedLight.indirectDiffuse = mix( prevLight.indirectDiffuse, reflectedLight.indirectDiffuse, blendRatio );
reflectedLight.indirectSpecular = mix( prevLight.indirectSpecular, reflectedLight.indirectSpecular, blendRatio );
}
#endif
}
#pragma unroll_loop_end
#elif defined (USE_SHADOWMAP)
#pragma unroll_loop_start
for ( int i = 0; i < NUM_DIR_LIGHTS; i ++ ) {
directionalLight = directionalLights[ i ];
getDirectionalLightInfo( directionalLight, directLight );
#if ( UNROLLED_LOOP_INDEX < NUM_DIR_LIGHT_SHADOWS )
directionalLightShadow = directionalLightShadows[ i ];
if(linearDepth >= CSM_cascades[UNROLLED_LOOP_INDEX].x && linearDepth < CSM_cascades[UNROLLED_LOOP_INDEX].y) directLight.color *= ( directLight.visible && receiveShadow ) ? getShadow( directionalShadowMap[ i ], directionalLightShadow.shadowMapSize, directionalLightShadow.shadowIntensity, directionalLightShadow.shadowBias, directionalLightShadow.shadowRadius, vDirectionalShadowCoord[ i ] ) : 1.0;
if(linearDepth >= CSM_cascades[UNROLLED_LOOP_INDEX].x && (linearDepth < CSM_cascades[UNROLLED_LOOP_INDEX].y || UNROLLED_LOOP_INDEX == CSM_CASCADES - 1)) RE_Direct( directLight, geometryPosition, geometryNormal, geometryViewDir, geometryClearcoatNormal, material, reflectedLight );
#endif
}
#pragma unroll_loop_end
#elif ( NUM_DIR_LIGHT_SHADOWS > 0 )
// note: no loop here - all CSM lights are in fact one light only
getDirectionalLightInfo( directionalLights[0], directLight );
RE_Direct( directLight, geometryPosition, geometryNormal, geometryViewDir, geometryClearcoatNormal, material, reflectedLight );
#endif
#if ( NUM_DIR_LIGHTS > NUM_DIR_LIGHT_SHADOWS)
// compute the lights not casting shadows (if any)
#pragma unroll_loop_start
for ( int i = NUM_DIR_LIGHT_SHADOWS; i < NUM_DIR_LIGHTS; i ++ ) {
directionalLight = directionalLights[ i ];
getDirectionalLightInfo( directionalLight, directLight );
RE_Direct( directLight, geometryPosition, geometryNormal, geometryViewDir, geometryClearcoatNormal, material, reflectedLight );
}
#pragma unroll_loop_end
#endif
#endif
#if ( NUM_DIR_LIGHTS > 0 ) && defined( RE_Direct ) && !defined( USE_CSM ) && !defined( CSM_CASCADES )
DirectionalLight directionalLight;
#if defined( USE_SHADOWMAP ) && NUM_DIR_LIGHT_SHADOWS > 0
DirectionalLightShadow directionalLightShadow;
#endif
#pragma unroll_loop_start
for ( int i = 0; i < NUM_DIR_LIGHTS; i ++ ) {
directionalLight = directionalLights[ i ];
getDirectionalLightInfo( directionalLight, directLight );
#if defined( USE_SHADOWMAP ) && ( UNROLLED_LOOP_INDEX < NUM_DIR_LIGHT_SHADOWS )
directionalLightShadow = directionalLightShadows[ i ];
directLight.color *= ( directLight.visible && receiveShadow ) ? getShadow( directionalShadowMap[ i ], directionalLightShadow.shadowMapSize, directionalLightShadow.shadowIntensity, directionalLightShadow.shadowBias, directionalLightShadow.shadowRadius, vDirectionalShadowCoord[ i ] ) : 1.0;
#endif
RE_Direct( directLight, geometryPosition, geometryNormal, geometryViewDir, geometryClearcoatNormal, material, reflectedLight );
}
#pragma unroll_loop_end
#endif
#if ( NUM_RECT_AREA_LIGHTS > 0 ) && defined( RE_Direct_RectArea )
RectAreaLight rectAreaLight;
#pragma unroll_loop_start
for ( int i = 0; i < NUM_RECT_AREA_LIGHTS; i ++ ) {
rectAreaLight = rectAreaLights[ i ];
RE_Direct_RectArea( rectAreaLight, geometryPosition, geometryNormal, geometryViewDir, geometryClearcoatNormal, material, reflectedLight );
}
#pragma unroll_loop_end
#endif
#if defined( RE_IndirectDiffuse )
vec3 iblIrradiance = vec3( 0.0 );
vec3 irradiance = getAmbientLightIrradiance( ambientLightColor );
#if defined( USE_LIGHT_PROBES )
irradiance += getLightProbeIrradiance( lightProbe, geometryNormal );
#endif
#if ( NUM_HEMI_LIGHTS > 0 )
#pragma unroll_loop_start
for ( int i = 0; i < NUM_HEMI_LIGHTS; i ++ ) {
irradiance += getHemisphereLightIrradiance( hemisphereLights[ i ], geometryNormal );
}
#pragma unroll_loop_end
#endif
#endif
#if defined( RE_IndirectSpecular )
vec3 radiance = vec3( 0.0 );
vec3 clearcoatRadiance = vec3( 0.0 );
#endif
`,
lights_pars_begin: /* glsl */`
#if defined( USE_CSM ) && defined( CSM_CASCADES )
uniform vec2 CSM_cascades[CSM_CASCADES];
uniform float cameraNear;
uniform float shadowFar;
#endif
` + ShaderChunk.lights_pars_begin
};
export { CSMShader };

599
node_modules/three/examples/jsm/csm/CSMShadowNode.js generated vendored Normal file
View File

@@ -0,0 +1,599 @@
import {
Vector2,
Vector3,
MathUtils,
Matrix4,
Box3,
Object3D,
WebGLCoordinateSystem,
ShadowBaseNode
} from 'three/webgpu';
import { CSMFrustum } from './CSMFrustum.js';
import { viewZToOrthographicDepth, reference, uniform, float, vec4, vec2, If, Fn, min, renderGroup, positionView, shadow } from 'three/tsl';
const _cameraToLightMatrix = new Matrix4();
const _lightSpaceFrustum = new CSMFrustum();
const _center = new Vector3();
const _bbox = new Box3();
const _uniformArray = [];
const _logArray = [];
const _lightDirection = new Vector3();
const _lightOrientationMatrix = new Matrix4();
const _lightOrientationMatrixInverse = new Matrix4();
const _up = new Vector3( 0, 1, 0 );
class LwLight extends Object3D {
constructor() {
super();
this.target = new Object3D();
}
}
/**
* An implementation of Cascade Shadow Maps (CSM).
*
* This module can only be used with {@link WebGPURenderer}. When using {@link WebGLRenderer},
* use {@link CSM} instead.
*
* @augments ShadowBaseNode
* @three_import import { CSMShadowNode } from 'three/addons/csm/CSMShadowNode.js';
*/
class CSMShadowNode extends ShadowBaseNode {
/**
* Constructs a new CSM shadow node.
*
* @param {DirectionalLight} light - The CSM light.
* @param {CSMShadowNode~Data} [data={}] - The CSM data.
*/
constructor( light, data = {} ) {
super( light );
/**
* The scene's camera.
*
* @type {?Camera}
* @default null
*/
this.camera = null;
/**
* The number of cascades.
*
* @type {number}
* @default 3
*/
this.cascades = data.cascades || 3;
/**
* The maximum far value.
*
* @type {number}
* @default 100000
*/
this.maxFar = data.maxFar || 100000;
/**
* The frustum split mode.
*
* @type {('practical'|'uniform'|'logarithmic'|'custom')}
* @default 'practical'
*/
this.mode = data.mode || 'practical';
/**
* The light margin.
*
* @type {number}
* @default 200
*/
this.lightMargin = data.lightMargin || 200;
/**
* Custom split callback when using `mode='custom'`.
*
* @type {Function}
*/
this.customSplitsCallback = data.customSplitsCallback;
/**
* Whether to fade between cascades or not.
*
* @type {boolean}
* @default false
*/
this.fade = false;
/**
* An array of numbers in the range `[0,1]` the defines how the
* mainCSM frustum should be split up.
*
* @type {Array<number>}
*/
this.breaks = [];
this._cascades = [];
/**
* The main frustum.
*
* @type {?CSMFrustum}
* @default null
*/
this.mainFrustum = null;
/**
* An array of frustums representing the cascades.
*
* @type {Array<CSMFrustum>}
*/
this.frustums = [];
/**
* An array of directional lights which cast the shadows for
* the different cascades. There is one directional light for each
* cascade.
*
* @type {Array<DirectionalLight>}
*/
this.lights = [];
this._shadowNodes = [];
}
/**
* Inits the CSM shadow node.
*
* @private
* @param {NodeBuilder} builder - The node builder.
*/
_init( { camera, renderer } ) {
this.camera = camera;
const data = { webGL: renderer.coordinateSystem === WebGLCoordinateSystem };
this.mainFrustum = new CSMFrustum( data );
const light = this.light;
for ( let i = 0; i < this.cascades; i ++ ) {
const lwLight = new LwLight();
lwLight.castShadow = true;
const lShadow = light.shadow.clone();
lShadow.bias = lShadow.bias * ( i + 1 );
this.lights.push( lwLight );
lwLight.shadow = lShadow;
this._shadowNodes.push( shadow( lwLight, lShadow ) );
this._cascades.push( new Vector2() );
}
this.updateFrustums();
}
/**
* Inits the cascades according to the scene's camera and breaks configuration.
*
* @private
*/
_initCascades() {
const camera = this.camera;
camera.updateProjectionMatrix();
this.mainFrustum.setFromProjectionMatrix( camera.projectionMatrix, this.maxFar );
this.mainFrustum.split( this.breaks, this.frustums );
}
/**
* Computes the breaks of this CSM instance based on the scene's camera, number of cascades
* and the selected split mode.
*
* @private
*/
_getBreaks() {
const camera = this.camera;
const far = Math.min( camera.far, this.maxFar );
this.breaks.length = 0;
switch ( this.mode ) {
case 'uniform':
uniformSplit( this.cascades, camera.near, far, this.breaks );
break;
case 'logarithmic':
logarithmicSplit( this.cascades, camera.near, far, this.breaks );
break;
case 'practical':
practicalSplit( this.cascades, camera.near, far, 0.5, this.breaks );
break;
case 'custom':
if ( this.customSplitsCallback === undefined ) console.error( 'CSM: Custom split scheme callback not defined.' );
this.customSplitsCallback( this.cascades, camera.near, far, this.breaks );
break;
}
function uniformSplit( amount, near, far, target ) {
for ( let i = 1; i < amount; i ++ ) {
target.push( ( near + ( far - near ) * i / amount ) / far );
}
target.push( 1 );
}
function logarithmicSplit( amount, near, far, target ) {
for ( let i = 1; i < amount; i ++ ) {
target.push( ( near * ( far / near ) ** ( i / amount ) ) / far );
}
target.push( 1 );
}
function practicalSplit( amount, near, far, lambda, target ) {
_uniformArray.length = 0;
_logArray.length = 0;
logarithmicSplit( amount, near, far, _logArray );
uniformSplit( amount, near, far, _uniformArray );
for ( let i = 1; i < amount; i ++ ) {
target.push( MathUtils.lerp( _uniformArray[ i - 1 ], _logArray[ i - 1 ], lambda ) );
}
target.push( 1 );
}
}
/**
* Sets the light breaks.
*
* @private
*/
_setLightBreaks() {
for ( let i = 0, l = this.cascades; i < l; i ++ ) {
const amount = this.breaks[ i ];
const prev = this.breaks[ i - 1 ] || 0;
this._cascades[ i ].set( prev, amount );
}
}
/**
* Updates the shadow bounds of this CSM instance.
*
* @private
*/
_updateShadowBounds() {
const frustums = this.frustums;
for ( let i = 0; i < frustums.length; i ++ ) {
const shadowCam = this.lights[ i ].shadow.camera;
const frustum = this.frustums[ i ];
// Get the two points that represent that furthest points on the frustum assuming
// that's either the diagonal across the far plane or the diagonal across the whole
// frustum itself.
const nearVerts = frustum.vertices.near;
const farVerts = frustum.vertices.far;
const point1 = farVerts[ 0 ];
let point2;
if ( point1.distanceTo( farVerts[ 2 ] ) > point1.distanceTo( nearVerts[ 2 ] ) ) {
point2 = farVerts[ 2 ];
} else {
point2 = nearVerts[ 2 ];
}
let squaredBBWidth = point1.distanceTo( point2 );
if ( this.fade ) {
// expand the shadow extents by the fade margin if fade is enabled.
const camera = this.camera;
const far = Math.max( camera.far, this.maxFar );
const linearDepth = frustum.vertices.far[ 0 ].z / ( far - camera.near );
const margin = 0.25 * Math.pow( linearDepth, 2.0 ) * ( far - camera.near );
squaredBBWidth += margin;
}
shadowCam.left = - squaredBBWidth / 2;
shadowCam.right = squaredBBWidth / 2;
shadowCam.top = squaredBBWidth / 2;
shadowCam.bottom = - squaredBBWidth / 2;
shadowCam.updateProjectionMatrix();
}
}
/**
* Applications must call this method every time they change camera or CSM settings.
*/
updateFrustums() {
this._getBreaks();
this._initCascades();
this._updateShadowBounds();
this._setLightBreaks();
}
/**
* Setups the TSL when using fading.
*
* @private
* @return {ShaderCallNodeInternal}
*/
_setupFade() {
const cameraNear = reference( 'camera.near', 'float', this ).setGroup( renderGroup );
const cascades = reference( '_cascades', 'vec2', this ).setGroup( renderGroup ).setName( 'cascades' );
const shadowFar = uniform( 'float' ).setGroup( renderGroup ).setName( 'shadowFar' )
.onRenderUpdate( () => Math.min( this.maxFar, this.camera.far ) );
const linearDepth = viewZToOrthographicDepth( positionView.z, cameraNear, shadowFar ).toVar( 'linearDepth' );
const lastCascade = this.cascades - 1;
return Fn( ( builder ) => {
this.setupShadowPosition( builder );
const ret = vec4( 1, 1, 1, 1 ).toVar( 'shadowValue' );
const cascade = vec2().toVar( 'cascade' );
const cascadeCenter = float().toVar( 'cascadeCenter' );
const margin = float().toVar( 'margin' );
const csmX = float().toVar( 'csmX' );
const csmY = float().toVar( 'csmY' );
for ( let i = 0; i < this.cascades; i ++ ) {
const isLastCascade = i === lastCascade;
cascade.assign( cascades.element( i ) );
cascadeCenter.assign( cascade.x.add( cascade.y ).div( 2.0 ) );
const closestEdge = linearDepth.lessThan( cascadeCenter ).select( cascade.x, cascade.y );
margin.assign( float( 0.25 ).mul( closestEdge.pow( 2.0 ) ) );
csmX.assign( cascade.x.sub( margin.div( 2.0 ) ) );
if ( isLastCascade ) {
csmY.assign( cascade.y );
} else {
csmY.assign( cascade.y.add( margin.div( 2.0 ) ) );
}
const inRange = linearDepth.greaterThanEqual( csmX ).and( linearDepth.lessThanEqual( csmY ) );
If( inRange, () => {
const dist = min( linearDepth.sub( csmX ), csmY.sub( linearDepth ) ).toVar();
let ratio = dist.div( margin ).clamp( 0.0, 1.0 );
if ( i === 0 ) {
// don't fade at nearest edge
ratio = linearDepth.greaterThan( cascadeCenter ).select( ratio, 1 );
}
ret.subAssign( this._shadowNodes[ i ].oneMinus().mul( ratio ) );
} );
}
return ret;
} )();
}
/**
* Setups the TSL when no fading (default).
*
* @private
* @return {ShaderCallNodeInternal}
*/
_setupStandard() {
const cameraNear = reference( 'camera.near', 'float', this ).setGroup( renderGroup );
const cascades = reference( '_cascades', 'vec2', this ).setGroup( renderGroup ).setName( 'cascades' );
const shadowFar = uniform( 'float' ).setGroup( renderGroup ).setName( 'shadowFar' )
.onRenderUpdate( () => Math.min( this.maxFar, this.camera.far ) );
const linearDepth = viewZToOrthographicDepth( positionView.z, cameraNear, shadowFar ).toVar( 'linearDepth' );
return Fn( ( builder ) => {
this.setupShadowPosition( builder );
const ret = vec4( 1, 1, 1, 1 ).toVar( 'shadowValue' );
const cascade = vec2().toVar( 'cascade' );
for ( let i = 0; i < this.cascades; i ++ ) {
cascade.assign( cascades.element( i ) );
If( linearDepth.greaterThanEqual( cascade.x ).and( linearDepth.lessThanEqual( cascade.y ) ), () => {
ret.assign( this._shadowNodes[ i ] );
} );
}
return ret;
} )();
}
setup( builder ) {
if ( this.camera === null ) this._init( builder );
return this.fade === true ? this._setupFade() : this._setupStandard();
}
updateBefore( /*builder*/ ) {
const light = this.light;
const parent = light.parent;
const camera = this.camera;
const frustums = this.frustums;
// make sure the placeholder light objects which represent the
// multiple cascade shadow casters are part of the scene graph
for ( let i = 0; i < this.lights.length; i ++ ) {
const lwLight = this.lights[ i ];
if ( lwLight.parent === null ) {
parent.add( lwLight.target );
parent.add( lwLight );
}
}
_lightDirection.subVectors( light.target.position, light.position ).normalize();
// for each frustum we need to find its min-max box aligned with the light orientation
// the position in _lightOrientationMatrix does not matter, as we transform there and back
_lightOrientationMatrix.lookAt( light.position, light.target.position, _up );
_lightOrientationMatrixInverse.copy( _lightOrientationMatrix ).invert();
for ( let i = 0; i < frustums.length; i ++ ) {
const lwLight = this.lights[ i ];
const shadow = lwLight.shadow;
const shadowCam = shadow.camera;
const texelWidth = ( shadowCam.right - shadowCam.left ) / shadow.mapSize.width;
const texelHeight = ( shadowCam.top - shadowCam.bottom ) / shadow.mapSize.height;
_cameraToLightMatrix.multiplyMatrices( _lightOrientationMatrixInverse, camera.matrixWorld );
frustums[ i ].toSpace( _cameraToLightMatrix, _lightSpaceFrustum );
const nearVerts = _lightSpaceFrustum.vertices.near;
const farVerts = _lightSpaceFrustum.vertices.far;
_bbox.makeEmpty();
for ( let j = 0; j < 4; j ++ ) {
_bbox.expandByPoint( nearVerts[ j ] );
_bbox.expandByPoint( farVerts[ j ] );
}
_bbox.getCenter( _center );
_center.z = _bbox.max.z + this.lightMargin;
_center.x = Math.floor( _center.x / texelWidth ) * texelWidth;
_center.y = Math.floor( _center.y / texelHeight ) * texelHeight;
_center.applyMatrix4( _lightOrientationMatrix );
lwLight.position.copy( _center );
lwLight.target.position.copy( _center );
lwLight.target.position.add( _lightDirection );
}
}
/**
* Frees the GPU-related resources allocated by this instance. Call this
* method whenever this instance is no longer used in your app.
*/
dispose() {
for ( let i = 0; i < this.lights.length; i ++ ) {
const light = this.lights[ i ];
const parent = light.parent;
parent.remove( light.target );
parent.remove( light );
}
super.dispose();
}
}
/**
* Constructor data of `CSMShadowNode`.
*
* @typedef {Object} CSMShadowNode~Data
* @property {number} [cascades=3] - The number of cascades.
* @property {number} [maxFar=100000] - The maximum far value.
* @property {('practical'|'uniform'|'logarithmic'|'custom')} [mode='practical'] - The frustum split mode.
* @property {Function} [customSplitsCallback] - Custom split callback when using `mode='custom'`.
* @property {number} [lightMargin=200] - The light margin.
**/
export { CSMShadowNode };

694
node_modules/three/examples/jsm/curves/CurveExtras.js generated vendored Normal file
View File

@@ -0,0 +1,694 @@
import {
Curve,
Vector3
} from 'three';
/**
* A bunch of parametric curves
*
* Formulas collected from various sources
* http://mathworld.wolfram.com/HeartCurve.html
* http://en.wikipedia.org/wiki/Viviani%27s_curve
* http://www.mi.sanu.ac.rs/vismath/taylorapril2011/Taylor.pdf
* https://prideout.net/blog/old/blog/index.html@p=44.html
*/
/**
* A Granny Knot curve.
*
* @augments Curve
* @three_import import { GrannyKnot } from 'three/addons/curves/CurveExtras.js';
*/
class GrannyKnot extends Curve {
/**
* This method returns a vector in 3D space for the given interpolation factor.
*
* @param {number} t - A interpolation factor representing a position on the curve. Must be in the range `[0,1]`.
* @param {Vector3} [optionalTarget] - The optional target vector the result is written to.
* @return {Vector3} The position on the curve.
*/
getPoint( t, optionalTarget = new Vector3() ) {
const point = optionalTarget;
t = 2 * Math.PI * t;
const x = - 0.22 * Math.cos( t ) - 1.28 * Math.sin( t ) - 0.44 * Math.cos( 3 * t ) - 0.78 * Math.sin( 3 * t );
const y = - 0.1 * Math.cos( 2 * t ) - 0.27 * Math.sin( 2 * t ) + 0.38 * Math.cos( 4 * t ) + 0.46 * Math.sin( 4 * t );
const z = 0.7 * Math.cos( 3 * t ) - 0.4 * Math.sin( 3 * t );
return point.set( x, y, z ).multiplyScalar( 20 );
}
}
/**
* A heart curve.
*
* @augments Curve
* @three_import import { HeartCurve } from 'three/addons/curves/CurveExtras.js';
*/
class HeartCurve extends Curve {
/**
* Constructs a new heart curve.
*
* @param {number} [scale=5] - The curve's scale.
*/
constructor( scale = 5 ) {
super();
/**
* The curve's scale.
*
* @type {number}
* @default 5
*/
this.scale = scale;
}
/**
* This method returns a vector in 3D space for the given interpolation factor.
*
* @param {number} t - A interpolation factor representing a position on the curve. Must be in the range `[0,1]`.
* @param {Vector3} [optionalTarget] - The optional target vector the result is written to.
* @return {Vector3} The position on the curve.
*/
getPoint( t, optionalTarget = new Vector3() ) {
const point = optionalTarget;
t *= 2 * Math.PI;
const x = 16 * Math.pow( Math.sin( t ), 3 );
const y = 13 * Math.cos( t ) - 5 * Math.cos( 2 * t ) - 2 * Math.cos( 3 * t ) - Math.cos( 4 * t );
const z = 0;
return point.set( x, y, z ).multiplyScalar( this.scale );
}
}
/**
* A Viviani curve.
*
* @augments Curve
* @three_import import { VivianiCurve } from 'three/addons/curves/CurveExtras.js';
*/
class VivianiCurve extends Curve {
/**
* Constructs a new Viviani curve.
*
* @param {number} [scale=70] - The curve's scale.
*/
constructor( scale = 70 ) {
super();
/**
* The curve's scale.
*
* @type {number}
* @default 70
*/
this.scale = scale;
}
/**
* This method returns a vector in 3D space for the given interpolation factor.
*
* @param {number} t - A interpolation factor representing a position on the curve. Must be in the range `[0,1]`.
* @param {Vector3} [optionalTarget] - The optional target vector the result is written to.
* @return {Vector3} The position on the curve.
*/
getPoint( t, optionalTarget = new Vector3() ) {
const point = optionalTarget;
t = t * 4 * Math.PI; // normalized to 0..1
const a = this.scale / 2;
const x = a * ( 1 + Math.cos( t ) );
const y = a * Math.sin( t );
const z = 2 * a * Math.sin( t / 2 );
return point.set( x, y, z );
}
}
/**
* A knot curve.
*
* @augments Curve
* @three_import import { KnotCurve } from 'three/addons/curves/CurveExtras.js';
*/
class KnotCurve extends Curve {
/**
* This method returns a vector in 3D space for the given interpolation factor.
*
* @param {number} t - A interpolation factor representing a position on the curve. Must be in the range `[0,1]`.
* @param {Vector3} [optionalTarget] - The optional target vector the result is written to.
* @return {Vector3} The position on the curve.
*/
getPoint( t, optionalTarget = new Vector3() ) {
const point = optionalTarget;
t *= 2 * Math.PI;
const R = 10;
const s = 50;
const x = s * Math.sin( t );
const y = Math.cos( t ) * ( R + s * Math.cos( t ) );
const z = Math.sin( t ) * ( R + s * Math.cos( t ) );
return point.set( x, y, z );
}
}
/**
* A helix curve.
*
* @augments Curve
* @three_import import { HelixCurve } from 'three/addons/curves/CurveExtras.js';
*/
class HelixCurve extends Curve {
/**
* This method returns a vector in 3D space for the given interpolation factor.
*
* @param {number} t - A interpolation factor representing a position on the curve. Must be in the range `[0,1]`.
* @param {Vector3} [optionalTarget] - The optional target vector the result is written to.
* @return {Vector3} The position on the curve.
*/
getPoint( t, optionalTarget = new Vector3() ) {
const point = optionalTarget;
const a = 30; // radius
const b = 150; // height
const t2 = 2 * Math.PI * t * b / 30;
const x = Math.cos( t2 ) * a;
const y = Math.sin( t2 ) * a;
const z = b * t;
return point.set( x, y, z );
}
}
/**
* A Trefoil Knot.
*
* @augments Curve
* @three_import import { TrefoilKnot } from 'three/addons/curves/CurveExtras.js';
*/
class TrefoilKnot extends Curve {
/**
* Constructs a new Trefoil Knot.
*
* @param {number} [scale=10] - The curve's scale.
*/
constructor( scale = 10 ) {
super();
/**
* The curve's scale.
*
* @type {number}
* @default 10
*/
this.scale = scale;
}
/**
* This method returns a vector in 3D space for the given interpolation factor.
*
* @param {number} t - A interpolation factor representing a position on the curve. Must be in the range `[0,1]`.
* @param {Vector3} [optionalTarget] - The optional target vector the result is written to.
* @return {Vector3} The position on the curve.
*/
getPoint( t, optionalTarget = new Vector3() ) {
const point = optionalTarget;
t *= Math.PI * 2;
const x = ( 2 + Math.cos( 3 * t ) ) * Math.cos( 2 * t );
const y = ( 2 + Math.cos( 3 * t ) ) * Math.sin( 2 * t );
const z = Math.sin( 3 * t );
return point.set( x, y, z ).multiplyScalar( this.scale );
}
}
/**
* A torus knot.
*
* @augments Curve
* @three_import import { TorusKnot } from 'three/addons/curves/CurveExtras.js';
*/
class TorusKnot extends Curve {
/**
* Constructs a new torus knot.
*
* @param {number} [scale=10] - The curve's scale.
*/
constructor( scale = 10 ) {
super();
/**
* The curve's scale.
*
* @type {number}
* @default 10
*/
this.scale = scale;
}
/**
* This method returns a vector in 3D space for the given interpolation factor.
*
* @param {number} t - A interpolation factor representing a position on the curve. Must be in the range `[0,1]`.
* @param {Vector3} [optionalTarget] - The optional target vector the result is written to.
* @return {Vector3} The position on the curve.
*/
getPoint( t, optionalTarget = new Vector3() ) {
const point = optionalTarget;
const p = 3;
const q = 4;
t *= Math.PI * 2;
const x = ( 2 + Math.cos( q * t ) ) * Math.cos( p * t );
const y = ( 2 + Math.cos( q * t ) ) * Math.sin( p * t );
const z = Math.sin( q * t );
return point.set( x, y, z ).multiplyScalar( this.scale );
}
}
/**
* A Cinquefoil Knot.
*
* @augments Curve
* @three_import import { CinquefoilKnot } from 'three/addons/curves/CurveExtras.js';
*/
class CinquefoilKnot extends Curve {
/**
* Constructs a new Cinquefoil Knot.
*
* @param {number} [scale=10] - The curve's scale.
*/
constructor( scale = 10 ) {
super();
/**
* The curve's scale.
*
* @type {number}
* @default 10
*/
this.scale = scale;
}
/**
* This method returns a vector in 3D space for the given interpolation factor.
*
* @param {number} t - A interpolation factor representing a position on the curve. Must be in the range `[0,1]`.
* @param {Vector3} [optionalTarget] - The optional target vector the result is written to.
* @return {Vector3} The position on the curve.
*/
getPoint( t, optionalTarget = new Vector3() ) {
const point = optionalTarget;
const p = 2;
const q = 5;
t *= Math.PI * 2;
const x = ( 2 + Math.cos( q * t ) ) * Math.cos( p * t );
const y = ( 2 + Math.cos( q * t ) ) * Math.sin( p * t );
const z = Math.sin( q * t );
return point.set( x, y, z ).multiplyScalar( this.scale );
}
}
/**
* A Trefoil Polynomial Knot.
*
* @augments Curve
* @three_import import { TrefoilPolynomialKnot } from 'three/addons/curves/CurveExtras.js';
*/
class TrefoilPolynomialKnot extends Curve {
/**
* Constructs a new Trefoil Polynomial Knot.
*
* @param {number} [scale=10] - The curve's scale.
*/
constructor( scale = 10 ) {
super();
/**
* The curve's scale.
*
* @type {number}
* @default 10
*/
this.scale = scale;
}
/**
* This method returns a vector in 3D space for the given interpolation factor.
*
* @param {number} t - A interpolation factor representing a position on the curve. Must be in the range `[0,1]`.
* @param {Vector3} [optionalTarget] - The optional target vector the result is written to.
* @return {Vector3} The position on the curve.
*/
getPoint( t, optionalTarget = new Vector3() ) {
const point = optionalTarget;
t = t * 4 - 2;
const x = Math.pow( t, 3 ) - 3 * t;
const y = Math.pow( t, 4 ) - 4 * t * t;
const z = 1 / 5 * Math.pow( t, 5 ) - 2 * t;
return point.set( x, y, z ).multiplyScalar( this.scale );
}
}
function scaleTo( x, y, t ) {
const r = y - x;
return t * r + x;
}
/**
* A Figure Eight Polynomial Knot.
*
* @augments Curve
* @three_import import { FigureEightPolynomialKnot } from 'three/addons/curves/CurveExtras.js';
*/
class FigureEightPolynomialKnot extends Curve {
/**
* Constructs a new Figure Eight Polynomial Knot.
*
* @param {number} [scale=1] - The curve's scale.
*/
constructor( scale = 1 ) {
super();
/**
* The curve's scale.
*
* @type {number}
* @default 1
*/
this.scale = scale;
}
/**
* This method returns a vector in 3D space for the given interpolation factor.
*
* @param {number} t - A interpolation factor representing a position on the curve. Must be in the range `[0,1]`.
* @param {Vector3} [optionalTarget] - The optional target vector the result is written to.
* @return {Vector3} The position on the curve.
*/
getPoint( t, optionalTarget = new Vector3() ) {
const point = optionalTarget;
t = scaleTo( - 4, 4, t );
const x = 2 / 5 * t * ( t * t - 7 ) * ( t * t - 10 );
const y = Math.pow( t, 4 ) - 13 * t * t;
const z = 1 / 10 * t * ( t * t - 4 ) * ( t * t - 9 ) * ( t * t - 12 );
return point.set( x, y, z ).multiplyScalar( this.scale );
}
}
/**
* A Decorated Torus Knot 4a.
*
* @augments Curve
* @three_import import { DecoratedTorusKnot4a } from 'three/addons/curves/CurveExtras.js';
*/
class DecoratedTorusKnot4a extends Curve {
/**
* Constructs a new Decorated Torus Knot 4a.
*
* @param {number} [scale=1] - The curve's scale.
*/
constructor( scale = 40 ) {
super();
/**
* The curve's scale.
*
* @type {number}
* @default 40
*/
this.scale = scale;
}
/**
* This method returns a vector in 3D space for the given interpolation factor.
*
* @param {number} t - A interpolation factor representing a position on the curve. Must be in the range `[0,1]`.
* @param {Vector3} [optionalTarget] - The optional target vector the result is written to.
* @return {Vector3} The position on the curve.
*/
getPoint( t, optionalTarget = new Vector3() ) {
const point = optionalTarget;
t *= Math.PI * 2;
const x = Math.cos( 2 * t ) * ( 1 + 0.6 * ( Math.cos( 5 * t ) + 0.75 * Math.cos( 10 * t ) ) );
const y = Math.sin( 2 * t ) * ( 1 + 0.6 * ( Math.cos( 5 * t ) + 0.75 * Math.cos( 10 * t ) ) );
const z = 0.35 * Math.sin( 5 * t );
return point.set( x, y, z ).multiplyScalar( this.scale );
}
}
/**
* A Decorated Torus Knot 4b.
*
* @augments Curve
* @three_import import { DecoratedTorusKnot4b } from 'three/addons/curves/CurveExtras.js';
*/
class DecoratedTorusKnot4b extends Curve {
/**
* Constructs a new Decorated Torus Knot 4b.
*
* @param {number} [scale=1] - The curve's scale.
*/
constructor( scale = 40 ) {
super();
/**
* The curve's scale.
*
* @type {number}
* @default 40
*/
this.scale = scale;
}
/**
* This method returns a vector in 3D space for the given interpolation factor.
*
* @param {number} t - A interpolation factor representing a position on the curve. Must be in the range `[0,1]`.
* @param {Vector3} [optionalTarget] - The optional target vector the result is written to.
* @return {Vector3} The position on the curve.
*/
getPoint( t, optionalTarget = new Vector3() ) {
const point = optionalTarget;
const fi = t * Math.PI * 2;
const x = Math.cos( 2 * fi ) * ( 1 + 0.45 * Math.cos( 3 * fi ) + 0.4 * Math.cos( 9 * fi ) );
const y = Math.sin( 2 * fi ) * ( 1 + 0.45 * Math.cos( 3 * fi ) + 0.4 * Math.cos( 9 * fi ) );
const z = 0.2 * Math.sin( 9 * fi );
return point.set( x, y, z ).multiplyScalar( this.scale );
}
}
/**
* A Decorated Torus Knot 5a.
*
* @augments Curve
* @three_import import { DecoratedTorusKnot5a } from 'three/addons/curves/CurveExtras.js';
*/
class DecoratedTorusKnot5a extends Curve {
/**
* Constructs a new Decorated Torus Knot 5a.
*
* @param {number} [scale=1] - The curve's scale.
*/
constructor( scale = 40 ) {
super();
/**
* The curve's scale.
*
* @type {number}
* @default 40
*/
this.scale = scale;
}
/**
* This method returns a vector in 3D space for the given interpolation factor.
*
* @param {number} t - A interpolation factor representing a position on the curve. Must be in the range `[0,1]`.
* @param {Vector3} [optionalTarget] - The optional target vector the result is written to.
* @return {Vector3} The position on the curve.
*/
getPoint( t, optionalTarget = new Vector3() ) {
const point = optionalTarget;
const fi = t * Math.PI * 2;
const x = Math.cos( 3 * fi ) * ( 1 + 0.3 * Math.cos( 5 * fi ) + 0.5 * Math.cos( 10 * fi ) );
const y = Math.sin( 3 * fi ) * ( 1 + 0.3 * Math.cos( 5 * fi ) + 0.5 * Math.cos( 10 * fi ) );
const z = 0.2 * Math.sin( 20 * fi );
return point.set( x, y, z ).multiplyScalar( this.scale );
}
}
/**
* A Decorated Torus Knot 5c.
*
* @augments Curve
* @three_import import { DecoratedTorusKnot5c } from 'three/addons/curves/CurveExtras.js';
*/
class DecoratedTorusKnot5c extends Curve {
/**
* Constructs a new Decorated Torus Knot 5c.
*
* @param {number} [scale=1] - The curve's scale.
*/
constructor( scale = 40 ) {
super();
/**
* The curve's scale.
*
* @type {number}
* @default 40
*/
this.scale = scale;
}
/**
* This method returns a vector in 3D space for the given interpolation factor.
*
* @param {number} t - A interpolation factor representing a position on the curve. Must be in the range `[0,1]`.
* @param {Vector3} [optionalTarget] - The optional target vector the result is written to.
* @return {Vector3} The position on the curve.
*/
getPoint( t, optionalTarget = new Vector3() ) {
const point = optionalTarget;
const fi = t * Math.PI * 2;
const x = Math.cos( 4 * fi ) * ( 1 + 0.5 * ( Math.cos( 5 * fi ) + 0.4 * Math.cos( 20 * fi ) ) );
const y = Math.sin( 4 * fi ) * ( 1 + 0.5 * ( Math.cos( 5 * fi ) + 0.4 * Math.cos( 20 * fi ) ) );
const z = 0.35 * Math.sin( 15 * fi );
return point.set( x, y, z ).multiplyScalar( this.scale );
}
}
export {
GrannyKnot,
HeartCurve,
VivianiCurve,
KnotCurve,
HelixCurve,
TrefoilKnot,
TorusKnot,
CinquefoilKnot,
TrefoilPolynomialKnot,
FigureEightPolynomialKnot,
DecoratedTorusKnot4a,
DecoratedTorusKnot4b,
DecoratedTorusKnot5a,
DecoratedTorusKnot5c
};

155
node_modules/three/examples/jsm/curves/NURBSCurve.js generated vendored Normal file
View File

@@ -0,0 +1,155 @@
import {
Curve,
Vector3,
Vector4
} from 'three';
import * as NURBSUtils from '../curves/NURBSUtils.js';
/**
* This class represents a NURBS curve.
*
* Implementation is based on `(x, y [, z=0 [, w=1]])` control points with `w=weight`.
*
* @augments Curve
* @three_import import { NURBSCurve } from 'three/addons/curves/NURBSCurve.js';
*/
class NURBSCurve extends Curve {
/**
* Constructs a new NURBS curve.
*
* @param {number} degree - The NURBS degree.
* @param {Array<number>} knots - The knots as a flat array of numbers.
* @param {Array<Vector2|Vector3|Vector4>} controlPoints - An array holding control points.
* @param {number} [startKnot] - Index of the start knot into the `knots` array.
* @param {number} [endKnot] - Index of the end knot into the `knots` array.
*/
constructor( degree, knots, controlPoints, startKnot, endKnot ) {
super();
const knotsLength = knots ? knots.length - 1 : 0;
const pointsLength = controlPoints ? controlPoints.length : 0;
/**
* The NURBS degree.
*
* @type {number}
*/
this.degree = degree;
/**
* The knots as a flat array of numbers.
*
* @type {Array<number>}
*/
this.knots = knots;
/**
* An array of control points.
*
* @type {Array<Vector4>}
*/
this.controlPoints = [];
/**
* Index of the start knot into the `knots` array.
*
* @type {number}
*/
this.startKnot = startKnot || 0;
/**
* Index of the end knot into the `knots` array.
*
* @type {number}
*/
this.endKnot = endKnot || knotsLength;
for ( let i = 0; i < pointsLength; ++ i ) {
// ensure Vector4 for control points
const point = controlPoints[ i ];
this.controlPoints[ i ] = new Vector4( point.x, point.y, point.z, point.w );
}
}
/**
* This method returns a vector in 3D space for the given interpolation factor.
*
* @param {number} t - A interpolation factor representing a position on the curve. Must be in the range `[0,1]`.
* @param {Vector3} [optionalTarget] - The optional target vector the result is written to.
* @return {Vector3} The position on the curve.
*/
getPoint( t, optionalTarget = new Vector3() ) {
const point = optionalTarget;
const u = this.knots[ this.startKnot ] + t * ( this.knots[ this.endKnot ] - this.knots[ this.startKnot ] ); // linear mapping t->u
// following results in (wx, wy, wz, w) homogeneous point
const hpoint = NURBSUtils.calcBSplinePoint( this.degree, this.knots, this.controlPoints, u );
if ( hpoint.w !== 1.0 ) {
// project to 3D space: (wx, wy, wz, w) -> (x, y, z, 1)
hpoint.divideScalar( hpoint.w );
}
return point.set( hpoint.x, hpoint.y, hpoint.z );
}
/**
* Returns a unit vector tangent for the given interpolation factor.
*
* @param {number} t - The interpolation factor.
* @param {Vector3} [optionalTarget] - The optional target vector the result is written to.
* @return {Vector3} The tangent vector.
*/
getTangent( t, optionalTarget = new Vector3() ) {
const tangent = optionalTarget;
const u = this.knots[ 0 ] + t * ( this.knots[ this.knots.length - 1 ] - this.knots[ 0 ] );
const ders = NURBSUtils.calcNURBSDerivatives( this.degree, this.knots, this.controlPoints, u, 1 );
tangent.copy( ders[ 1 ] ).normalize();
return tangent;
}
toJSON() {
const data = super.toJSON();
data.degree = this.degree;
data.knots = [ ...this.knots ];
data.controlPoints = this.controlPoints.map( p => p.toArray() );
data.startKnot = this.startKnot;
data.endKnot = this.endKnot;
return data;
}
fromJSON( json ) {
super.fromJSON( json );
this.degree = json.degree;
this.knots = [ ...json.knots ];
this.controlPoints = json.controlPoints.map( p => new Vector4( p[ 0 ], p[ 1 ], p[ 2 ], p[ 3 ] ) );
this.startKnot = json.startKnot;
this.endKnot = json.endKnot;
return this;
}
}
export { NURBSCurve };

98
node_modules/three/examples/jsm/curves/NURBSSurface.js generated vendored Normal file
View File

@@ -0,0 +1,98 @@
import {
Vector4
} from 'three';
import * as NURBSUtils from '../curves/NURBSUtils.js';
/**
* This class represents a NURBS surface.
*
* Implementation is based on `(x, y [, z=0 [, w=1]])` control points with `w=weight`.
*
* @three_import import { NURBSSurface } from 'three/addons/curves/NURBSSurface.js';
*/
class NURBSSurface {
/**
* Constructs a new NURBS surface.
*
* @param {number} degree1 - The first NURBS degree.
* @param {number} degree2 - The second NURBS degree.
* @param {Array<number>} knots1 - The first knots as a flat array of numbers.
* @param {Array<number>} knots2 - The second knots as a flat array of numbers.
* @param {Array<Array<Vector2|Vector3|Vector4>>} controlPoints - An array^2 holding control points.
*/
constructor( degree1, degree2, knots1, knots2, controlPoints ) {
/**
* The first NURBS degree.
*
* @type {number}
*/
this.degree1 = degree1;
/**
* The second NURBS degree.
*
* @type {number}
*/
this.degree2 = degree2;
/**
* The first knots as a flat array of numbers.
*
* @type {Array<number>}
*/
this.knots1 = knots1;
/**
* The second knots as a flat array of numbers.
*
* @type {Array<number>}
*/
this.knots2 = knots2;
/**
* An array holding arrays of control points.
*
* @type {Array<Array<Vector2|Vector3|Vector4>>}
*/
this.controlPoints = [];
const len1 = knots1.length - degree1 - 1;
const len2 = knots2.length - degree2 - 1;
// ensure Vector4 for control points
for ( let i = 0; i < len1; ++ i ) {
this.controlPoints[ i ] = [];
for ( let j = 0; j < len2; ++ j ) {
const point = controlPoints[ i ][ j ];
this.controlPoints[ i ][ j ] = new Vector4( point.x, point.y, point.z, point.w );
}
}
}
/**
* This method returns a vector in 3D space for the given interpolation factor. This vector lies on the NURBS surface.
*
* @param {number} t1 - The first interpolation factor representing the `u` position on the surface. Must be in the range `[0,1]`.
* @param {number} t2 - The second interpolation factor representing the `v` position on the surface. Must be in the range `[0,1]`.
* @param {Vector3} target - The target vector the result is written to.
*/
getPoint( t1, t2, target ) {
const u = this.knots1[ 0 ] + t1 * ( this.knots1[ this.knots1.length - 1 ] - this.knots1[ 0 ] ); // linear mapping t1->u
const v = this.knots2[ 0 ] + t2 * ( this.knots2[ this.knots2.length - 1 ] - this.knots2[ 0 ] ); // linear mapping t2->u
NURBSUtils.calcSurfacePoint( this.degree1, this.degree2, this.knots1, this.knots2, this.controlPoints, u, v, target );
}
}
export { NURBSSurface };

532
node_modules/three/examples/jsm/curves/NURBSUtils.js generated vendored Normal file
View File

@@ -0,0 +1,532 @@
import {
Vector3,
Vector4
} from 'three';
/**
* @module NURBSUtils
* @three_import import * as NURBSUtils from 'three/addons/curves/NURBSUtils.js';
*/
/**
* Finds knot vector span.
*
* @param {number} p - The degree.
* @param {number} u - The parametric value.
* @param {Array<number>} U - The knot vector.
* @return {number} The span.
*/
function findSpan( p, u, U ) {
const n = U.length - p - 1;
if ( u >= U[ n ] ) {
return n - 1;
}
if ( u <= U[ p ] ) {
return p;
}
let low = p;
let high = n;
let mid = Math.floor( ( low + high ) / 2 );
while ( u < U[ mid ] || u >= U[ mid + 1 ] ) {
if ( u < U[ mid ] ) {
high = mid;
} else {
low = mid;
}
mid = Math.floor( ( low + high ) / 2 );
}
return mid;
}
/**
* Calculates basis functions. See The NURBS Book, page 70, algorithm A2.2.
*
* @param {number} span - The span in which `u` lies.
* @param {number} u - The parametric value.
* @param {number} p - The degree.
* @param {Array<number>} U - The knot vector.
* @return {Array<number>} Array[p+1] with basis functions values.
*/
function calcBasisFunctions( span, u, p, U ) {
const N = [];
const left = [];
const right = [];
N[ 0 ] = 1.0;
for ( let j = 1; j <= p; ++ j ) {
left[ j ] = u - U[ span + 1 - j ];
right[ j ] = U[ span + j ] - u;
let saved = 0.0;
for ( let r = 0; r < j; ++ r ) {
const rv = right[ r + 1 ];
const lv = left[ j - r ];
const temp = N[ r ] / ( rv + lv );
N[ r ] = saved + rv * temp;
saved = lv * temp;
}
N[ j ] = saved;
}
return N;
}
/**
* Calculates B-Spline curve points. See The NURBS Book, page 82, algorithm A3.1.
*
* @param {number} p - The degree of the B-Spline.
* @param {Array<number>} U - The knot vector.
* @param {Array<Vector4>} P - The control points
* @param {number} u - The parametric point.
* @return {Vector4} The point for given `u`.
*/
function calcBSplinePoint( p, U, P, u ) {
const span = findSpan( p, u, U );
const N = calcBasisFunctions( span, u, p, U );
const C = new Vector4( 0, 0, 0, 0 );
for ( let j = 0; j <= p; ++ j ) {
const point = P[ span - p + j ];
const Nj = N[ j ];
const wNj = point.w * Nj;
C.x += point.x * wNj;
C.y += point.y * wNj;
C.z += point.z * wNj;
C.w += point.w * Nj;
}
return C;
}
/**
* Calculates basis functions derivatives. See The NURBS Book, page 72, algorithm A2.3.
*
* @param {number} span - The span in which `u` lies.
* @param {number} u - The parametric point.
* @param {number} p - The degree.
* @param {number} n - number of derivatives to calculate
* @param {Array<number>} U - The knot vector.
* @return {Array<Array<number>>} An array[n+1][p+1] with basis functions derivatives.
*/
function calcBasisFunctionDerivatives( span, u, p, n, U ) {
const zeroArr = [];
for ( let i = 0; i <= p; ++ i )
zeroArr[ i ] = 0.0;
const ders = [];
for ( let i = 0; i <= n; ++ i )
ders[ i ] = zeroArr.slice( 0 );
const ndu = [];
for ( let i = 0; i <= p; ++ i )
ndu[ i ] = zeroArr.slice( 0 );
ndu[ 0 ][ 0 ] = 1.0;
const left = zeroArr.slice( 0 );
const right = zeroArr.slice( 0 );
for ( let j = 1; j <= p; ++ j ) {
left[ j ] = u - U[ span + 1 - j ];
right[ j ] = U[ span + j ] - u;
let saved = 0.0;
for ( let r = 0; r < j; ++ r ) {
const rv = right[ r + 1 ];
const lv = left[ j - r ];
ndu[ j ][ r ] = rv + lv;
const temp = ndu[ r ][ j - 1 ] / ndu[ j ][ r ];
ndu[ r ][ j ] = saved + rv * temp;
saved = lv * temp;
}
ndu[ j ][ j ] = saved;
}
for ( let j = 0; j <= p; ++ j ) {
ders[ 0 ][ j ] = ndu[ j ][ p ];
}
for ( let r = 0; r <= p; ++ r ) {
let s1 = 0;
let s2 = 1;
const a = [];
for ( let i = 0; i <= p; ++ i ) {
a[ i ] = zeroArr.slice( 0 );
}
a[ 0 ][ 0 ] = 1.0;
for ( let k = 1; k <= n; ++ k ) {
let d = 0.0;
const rk = r - k;
const pk = p - k;
if ( r >= k ) {
a[ s2 ][ 0 ] = a[ s1 ][ 0 ] / ndu[ pk + 1 ][ rk ];
d = a[ s2 ][ 0 ] * ndu[ rk ][ pk ];
}
const j1 = ( rk >= - 1 ) ? 1 : - rk;
const j2 = ( r - 1 <= pk ) ? k - 1 : p - r;
for ( let j = j1; j <= j2; ++ j ) {
a[ s2 ][ j ] = ( a[ s1 ][ j ] - a[ s1 ][ j - 1 ] ) / ndu[ pk + 1 ][ rk + j ];
d += a[ s2 ][ j ] * ndu[ rk + j ][ pk ];
}
if ( r <= pk ) {
a[ s2 ][ k ] = - a[ s1 ][ k - 1 ] / ndu[ pk + 1 ][ r ];
d += a[ s2 ][ k ] * ndu[ r ][ pk ];
}
ders[ k ][ r ] = d;
const j = s1;
s1 = s2;
s2 = j;
}
}
let r = p;
for ( let k = 1; k <= n; ++ k ) {
for ( let j = 0; j <= p; ++ j ) {
ders[ k ][ j ] *= r;
}
r *= p - k;
}
return ders;
}
/**
* Calculates derivatives of a B-Spline. See The NURBS Book, page 93, algorithm A3.2.
*
* @param {number} p - The degree.
* @param {Array<number>} U - The knot vector.
* @param {Array<Vector4>} P - The control points
* @param {number} u - The parametric point.
* @param {number} nd - The number of derivatives.
* @return {Array<Vector4>} An array[d+1] with derivatives.
*/
function calcBSplineDerivatives( p, U, P, u, nd ) {
const du = nd < p ? nd : p;
const CK = [];
const span = findSpan( p, u, U );
const nders = calcBasisFunctionDerivatives( span, u, p, du, U );
const Pw = [];
for ( let i = 0; i < P.length; ++ i ) {
const point = P[ i ].clone();
const w = point.w;
point.x *= w;
point.y *= w;
point.z *= w;
Pw[ i ] = point;
}
for ( let k = 0; k <= du; ++ k ) {
const point = Pw[ span - p ].clone().multiplyScalar( nders[ k ][ 0 ] );
for ( let j = 1; j <= p; ++ j ) {
point.add( Pw[ span - p + j ].clone().multiplyScalar( nders[ k ][ j ] ) );
}
CK[ k ] = point;
}
for ( let k = du + 1; k <= nd + 1; ++ k ) {
CK[ k ] = new Vector4( 0, 0, 0 );
}
return CK;
}
/**
* Calculates "K over I".
*
* @param {number} k - The K value.
* @param {number} i - The I value.
* @return {number} k!/(i!(k-i)!)
*/
function calcKoverI( k, i ) {
let nom = 1;
for ( let j = 2; j <= k; ++ j ) {
nom *= j;
}
let denom = 1;
for ( let j = 2; j <= i; ++ j ) {
denom *= j;
}
for ( let j = 2; j <= k - i; ++ j ) {
denom *= j;
}
return nom / denom;
}
/**
* Calculates derivatives (0-nd) of rational curve. See The NURBS Book, page 127, algorithm A4.2.
*
* @param {Array<Vector4>} Pders - Array with derivatives.
* @return {Array<Vector3>} An array with derivatives for rational curve.
*/
function calcRationalCurveDerivatives( Pders ) {
const nd = Pders.length;
const Aders = [];
const wders = [];
for ( let i = 0; i < nd; ++ i ) {
const point = Pders[ i ];
Aders[ i ] = new Vector3( point.x, point.y, point.z );
wders[ i ] = point.w;
}
const CK = [];
for ( let k = 0; k < nd; ++ k ) {
const v = Aders[ k ].clone();
for ( let i = 1; i <= k; ++ i ) {
v.sub( CK[ k - i ].clone().multiplyScalar( calcKoverI( k, i ) * wders[ i ] ) );
}
CK[ k ] = v.divideScalar( wders[ 0 ] );
}
return CK;
}
/**
* Calculates NURBS curve derivatives. See The NURBS Book, page 127, algorithm A4.2.
*
* @param {number} p - The degree.
* @param {Array<number>} U - The knot vector.
* @param {Array<Vector4>} P - The control points in homogeneous space.
* @param {number} u - The parametric point.
* @param {number} nd - The number of derivatives.
* @return {Array<Vector3>} array with derivatives for rational curve.
*/
function calcNURBSDerivatives( p, U, P, u, nd ) {
const Pders = calcBSplineDerivatives( p, U, P, u, nd );
return calcRationalCurveDerivatives( Pders );
}
/**
* Calculates a rational B-Spline surface point. See The NURBS Book, page 134, algorithm A4.3.
*
* @param {number} p - The first degree of B-Spline surface.
* @param {number} q - The second degree of B-Spline surface.
* @param {Array<number>} U - The first knot vector.
* @param {Array<number>} V - The second knot vector.
* @param {Array<Array<Vector4>>} P - The control points in homogeneous space.
* @param {number} u - The first parametric point.
* @param {number} v - The second parametric point.
* @param {Vector3} target - The target vector.
*/
function calcSurfacePoint( p, q, U, V, P, u, v, target ) {
const uspan = findSpan( p, u, U );
const vspan = findSpan( q, v, V );
const Nu = calcBasisFunctions( uspan, u, p, U );
const Nv = calcBasisFunctions( vspan, v, q, V );
const temp = [];
for ( let l = 0; l <= q; ++ l ) {
temp[ l ] = new Vector4( 0, 0, 0, 0 );
for ( let k = 0; k <= p; ++ k ) {
const point = P[ uspan - p + k ][ vspan - q + l ].clone();
const w = point.w;
point.x *= w;
point.y *= w;
point.z *= w;
temp[ l ].add( point.multiplyScalar( Nu[ k ] ) );
}
}
const Sw = new Vector4( 0, 0, 0, 0 );
for ( let l = 0; l <= q; ++ l ) {
Sw.add( temp[ l ].multiplyScalar( Nv[ l ] ) );
}
Sw.divideScalar( Sw.w );
target.set( Sw.x, Sw.y, Sw.z );
}
/**
* Calculates a rational B-Spline volume point. See The NURBS Book, page 134, algorithm A4.3.
*
* @param {number} p - The first degree of B-Spline surface.
* @param {number} q - The second degree of B-Spline surface.
* @param {number} r - The third degree of B-Spline surface.
* @param {Array<number>} U - The first knot vector.
* @param {Array<number>} V - The second knot vector.
* @param {Array<number>} W - The third knot vector.
* @param {Array<Array<Array<Vector4>>>} P - The control points in homogeneous space.
* @param {number} u - The first parametric point.
* @param {number} v - The second parametric point.
* @param {number} w - The third parametric point.
* @param {Vector3} target - The target vector.
*/
function calcVolumePoint( p, q, r, U, V, W, P, u, v, w, target ) {
const uspan = findSpan( p, u, U );
const vspan = findSpan( q, v, V );
const wspan = findSpan( r, w, W );
const Nu = calcBasisFunctions( uspan, u, p, U );
const Nv = calcBasisFunctions( vspan, v, q, V );
const Nw = calcBasisFunctions( wspan, w, r, W );
const temp = [];
for ( let m = 0; m <= r; ++ m ) {
temp[ m ] = [];
for ( let l = 0; l <= q; ++ l ) {
temp[ m ][ l ] = new Vector4( 0, 0, 0, 0 );
for ( let k = 0; k <= p; ++ k ) {
const point = P[ uspan - p + k ][ vspan - q + l ][ wspan - r + m ].clone();
const w = point.w;
point.x *= w;
point.y *= w;
point.z *= w;
temp[ m ][ l ].add( point.multiplyScalar( Nu[ k ] ) );
}
}
}
const Sw = new Vector4( 0, 0, 0, 0 );
for ( let m = 0; m <= r; ++ m ) {
for ( let l = 0; l <= q; ++ l ) {
Sw.add( temp[ m ][ l ].multiplyScalar( Nw[ m ] ).multiplyScalar( Nv[ l ] ) );
}
}
Sw.divideScalar( Sw.w );
target.set( Sw.x, Sw.y, Sw.z );
}
export {
findSpan,
calcBasisFunctions,
calcBSplinePoint,
calcBasisFunctionDerivatives,
calcBSplineDerivatives,
calcKoverI,
calcRationalCurveDerivatives,
calcNURBSDerivatives,
calcSurfacePoint,
calcVolumePoint,
};

82
node_modules/three/examples/jsm/curves/NURBSVolume.js generated vendored Normal file
View File

@@ -0,0 +1,82 @@
import {
Vector4
} from 'three';
import * as NURBSUtils from '../curves/NURBSUtils.js';
/**
* This class represents a NURBS volume.
*
* Implementation is based on `(x, y [, z=0 [, w=1]])` control points with `w=weight`.
*
* @three_import import { NURBSVolume } from 'three/addons/curves/NURBSVolume.js';
*/
class NURBSVolume {
/**
* Constructs a new NURBS surface.
*
* @param {number} degree1 - The first NURBS degree.
* @param {number} degree2 - The second NURBS degree.
* @param {number} degree3 - The third NURBS degree.
* @param {Array<number>} knots1 - The first knots as a flat array of numbers.
* @param {Array<number>} knots2 - The second knots as a flat array of numbers.
* @param {Array<number>} knots3 - The third knots as a flat array of numbers.
* @param {Array<Array<Array<Vector2|Vector3|Vector4>>>} controlPoints - An array^3 holding control points.
*/
constructor( degree1, degree2, degree3, knots1, knots2, knots3 /* arrays of reals */, controlPoints /* array^3 of Vector(2|3|4) */ ) {
this.degree1 = degree1;
this.degree2 = degree2;
this.degree3 = degree3;
this.knots1 = knots1;
this.knots2 = knots2;
this.knots3 = knots3;
this.controlPoints = [];
const len1 = knots1.length - degree1 - 1;
const len2 = knots2.length - degree2 - 1;
const len3 = knots3.length - degree3 - 1;
// ensure Vector4 for control points
for ( let i = 0; i < len1; ++ i ) {
this.controlPoints[ i ] = [];
for ( let j = 0; j < len2; ++ j ) {
this.controlPoints[ i ][ j ] = [];
for ( let k = 0; k < len3; ++ k ) {
const point = controlPoints[ i ][ j ][ k ];
this.controlPoints[ i ][ j ][ k ] = new Vector4( point.x, point.y, point.z, point.w );
}
}
}
}
/**
* This method returns a vector in 3D space for the given interpolation factor. This vector lies within the NURBS volume.
*
* @param {number} t1 - The first interpolation factor representing the `u` position within the volume. Must be in the range `[0,1]`.
* @param {number} t2 - The second interpolation factor representing the `v` position within the volume. Must be in the range `[0,1]`.
* @param {number} t3 - The third interpolation factor representing the `w` position within the volume. Must be in the range `[0,1]`.
* @param {Vector3} target - The target vector the result is written to.
*/
getPoint( t1, t2, t3, target ) {
const u = this.knots1[ 0 ] + t1 * ( this.knots1[ this.knots1.length - 1 ] - this.knots1[ 0 ] ); // linear mapping t1->u
const v = this.knots2[ 0 ] + t2 * ( this.knots2[ this.knots2.length - 1 ] - this.knots2[ 0 ] ); // linear mapping t2->v
const w = this.knots3[ 0 ] + t3 * ( this.knots3[ this.knots3.length - 1 ] - this.knots3[ 0 ] ); // linear mapping t3->w
NURBSUtils.calcVolumePoint( this.degree1, this.degree2, this.degree3, this.knots1, this.knots2, this.knots3, this.controlPoints, u, v, w, target );
}
}
export { NURBSVolume };

View File

@@ -0,0 +1,179 @@
import {
LinearFilter,
Matrix3,
NearestFilter,
RGBAFormat,
ShaderMaterial,
StereoCamera,
WebGLRenderTarget
} from 'three';
import { FullScreenQuad } from '../postprocessing/Pass.js';
/**
* A class that creates an anaglyph effect.
*
* Note that this class can only be used with {@link WebGLRenderer}.
* When using {@link WebGPURenderer}, use {@link AnaglyphPassNode}.
*
* @three_import import { AnaglyphEffect } from 'three/addons/effects/AnaglyphEffect.js';
*/
class AnaglyphEffect {
/**
* Constructs a new anaglyph effect.
*
* @param {WebGLRenderer} renderer - The renderer.
* @param {number} width - The width of the effect in physical pixels.
* @param {number} height - The height of the effect in physical pixels.
*/
constructor( renderer, width = 512, height = 512 ) {
// Dubois matrices from https://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.7.6968&rep=rep1&type=pdf#page=4
this.colorMatrixLeft = new Matrix3().fromArray( [
0.456100, - 0.0400822, - 0.0152161,
0.500484, - 0.0378246, - 0.0205971,
0.176381, - 0.0157589, - 0.00546856
] );
this.colorMatrixRight = new Matrix3().fromArray( [
- 0.0434706, 0.378476, - 0.0721527,
- 0.0879388, 0.73364, - 0.112961,
- 0.00155529, - 0.0184503, 1.2264
] );
const _stereo = new StereoCamera();
const _params = { minFilter: LinearFilter, magFilter: NearestFilter, format: RGBAFormat };
const _renderTargetL = new WebGLRenderTarget( width, height, _params );
const _renderTargetR = new WebGLRenderTarget( width, height, _params );
const _material = new ShaderMaterial( {
uniforms: {
'mapLeft': { value: _renderTargetL.texture },
'mapRight': { value: _renderTargetR.texture },
'colorMatrixLeft': { value: this.colorMatrixLeft },
'colorMatrixRight': { value: this.colorMatrixRight }
},
vertexShader: [
'varying vec2 vUv;',
'void main() {',
' vUv = vec2( uv.x, uv.y );',
' gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );',
'}'
].join( '\n' ),
fragmentShader: [
'uniform sampler2D mapLeft;',
'uniform sampler2D mapRight;',
'varying vec2 vUv;',
'uniform mat3 colorMatrixLeft;',
'uniform mat3 colorMatrixRight;',
'void main() {',
' vec2 uv = vUv;',
' vec4 colorL = texture2D( mapLeft, uv );',
' vec4 colorR = texture2D( mapRight, uv );',
' vec3 color = clamp(',
' colorMatrixLeft * colorL.rgb +',
' colorMatrixRight * colorR.rgb, 0., 1. );',
' gl_FragColor = vec4(',
' color.r, color.g, color.b,',
' max( colorL.a, colorR.a ) );',
' #include <tonemapping_fragment>',
' #include <colorspace_fragment>',
'}'
].join( '\n' )
} );
const _quad = new FullScreenQuad( _material );
/**
* Resizes the effect.
*
* @param {number} width - The width of the effect in logical pixels.
* @param {number} height - The height of the effect in logical pixels.
*/
this.setSize = function ( width, height ) {
renderer.setSize( width, height );
const pixelRatio = renderer.getPixelRatio();
_renderTargetL.setSize( width * pixelRatio, height * pixelRatio );
_renderTargetR.setSize( width * pixelRatio, height * pixelRatio );
};
/**
* When using this effect, this method should be called instead of the
* default {@link WebGLRenderer#render}.
*
* @param {Object3D} scene - The scene to render.
* @param {Camera} camera - The camera.
*/
this.render = function ( scene, camera ) {
const currentRenderTarget = renderer.getRenderTarget();
if ( scene.matrixWorldAutoUpdate === true ) scene.updateMatrixWorld();
if ( camera.parent === null && camera.matrixWorldAutoUpdate === true ) camera.updateMatrixWorld();
_stereo.update( camera );
renderer.setRenderTarget( _renderTargetL );
renderer.clear();
renderer.render( scene, _stereo.cameraL );
renderer.setRenderTarget( _renderTargetR );
renderer.clear();
renderer.render( scene, _stereo.cameraR );
renderer.setRenderTarget( null );
_quad.render( renderer );
renderer.setRenderTarget( currentRenderTarget );
};
/**
* Frees internal resources. This method should be called
* when the effect is no longer required.
*/
this.dispose = function () {
_renderTargetL.dispose();
_renderTargetR.dispose();
_material.dispose();
_quad.dispose();
};
}
}
export { AnaglyphEffect };

310
node_modules/three/examples/jsm/effects/AsciiEffect.js generated vendored Normal file
View File

@@ -0,0 +1,310 @@
/**
* A class that creates an ASCII effect.
*
* The ASCII generation is based on [jsascii](https://github.com/hassadee/jsascii/blob/master/jsascii.js).
*
* @three_import import { AsciiEffect } from 'three/addons/effects/AsciiEffect.js';
*/
class AsciiEffect {
/**
* Constructs a new ASCII effect.
*
* @param {WebGLRenderer} renderer - The renderer.
* @param {string} [charSet=' .:-=+*#%@'] - The char set.
* @param {AsciiEffect~Options} [options] - The configuration parameter.
*/
constructor( renderer, charSet = ' .:-=+*#%@', options = {} ) {
// ' .,:;=|iI+hHOE#`$';
// darker bolder character set from https://github.com/saw/Canvas-ASCII-Art/
// ' .\'`^",:;Il!i~+_-?][}{1)(|/tfjrxnuvczXYUJCLQ0OZmwqpdbkhao*#MW&8%B@$'
// Some ASCII settings
const fResolution = options[ 'resolution' ] || 0.15;
const iScale = options[ 'scale' ] || 1;
const bColor = options[ 'color' ] || false;
const bAlpha = options[ 'alpha' ] || false;
const bBlock = options[ 'block' ] || false;
const bInvert = options[ 'invert' ] || false;
const strResolution = options[ 'strResolution' ] || 'low';
let width, height;
const domElement = document.createElement( 'div' );
domElement.style.cursor = 'default';
const oAscii = document.createElement( 'table' );
domElement.appendChild( oAscii );
let iWidth, iHeight;
let oImg;
/**
* Resizes the effect.
*
* @param {number} w - The width of the effect in logical pixels.
* @param {number} h - The height of the effect in logical pixels.
*/
this.setSize = function ( w, h ) {
width = w;
height = h;
renderer.setSize( w, h );
initAsciiSize();
};
/**
* When using this effect, this method should be called instead of the
* default {@link WebGLRenderer#render}.
*
* @param {Object3D} scene - The scene to render.
* @param {Camera} camera - The camera.
*/
this.render = function ( scene, camera ) {
renderer.render( scene, camera );
asciifyImage( oAscii );
};
/**
* The DOM element of the effect. This element must be used instead of the
* default {@link WebGLRenderer#domElement}.
*
* @type {HTMLDivElement}
*/
this.domElement = domElement;
// Throw in ascii library from https://github.com/hassadee/jsascii/blob/master/jsascii.js (MIT License)
function initAsciiSize() {
iWidth = Math.floor( width * fResolution );
iHeight = Math.floor( height * fResolution );
oCanvas.width = iWidth;
oCanvas.height = iHeight;
// oCanvas.style.display = "none";
// oCanvas.style.width = iWidth;
// oCanvas.style.height = iHeight;
oImg = renderer.domElement;
if ( oImg.style.backgroundColor ) {
oAscii.rows[ 0 ].cells[ 0 ].style.backgroundColor = oImg.style.backgroundColor;
oAscii.rows[ 0 ].cells[ 0 ].style.color = oImg.style.color;
}
oAscii.cellSpacing = '0';
oAscii.cellPadding = '0';
const oStyle = oAscii.style;
oStyle.whiteSpace = 'pre';
oStyle.margin = '0px';
oStyle.padding = '0px';
oStyle.letterSpacing = fLetterSpacing + 'px';
oStyle.fontFamily = strFont;
oStyle.fontSize = fFontSize + 'px';
oStyle.lineHeight = fLineHeight + 'px';
oStyle.textAlign = 'left';
oStyle.textDecoration = 'none';
}
const strFont = 'courier new, monospace';
const oCanvasImg = renderer.domElement;
const oCanvas = document.createElement( 'canvas' );
if ( ! oCanvas.getContext ) {
return;
}
const oCtx = oCanvas.getContext( '2d' );
if ( ! oCtx.getImageData ) {
return;
}
let aCharList;
if ( charSet ) {
aCharList = ( charSet ).split( '' );
} else {
const aDefaultCharList = ( ' .,:;i1tfLCG08@' ).split( '' );
const aDefaultColorCharList = ( ' CGO08@' ).split( '' );
aCharList = ( bColor ? aDefaultColorCharList : aDefaultCharList );
}
// Setup dom
const fFontSize = ( 2 / fResolution ) * iScale;
const fLineHeight = ( 2 / fResolution ) * iScale;
// adjust letter-spacing for all combinations of scale and resolution to get it to fit the image width.
let fLetterSpacing = 0;
if ( strResolution == 'low' ) {
switch ( iScale ) {
case 1 : fLetterSpacing = - 1; break;
case 2 :
case 3 : fLetterSpacing = - 2.1; break;
case 4 : fLetterSpacing = - 3.1; break;
case 5 : fLetterSpacing = - 4.15; break;
}
}
if ( strResolution == 'medium' ) {
switch ( iScale ) {
case 1 : fLetterSpacing = 0; break;
case 2 : fLetterSpacing = - 1; break;
case 3 : fLetterSpacing = - 1.04; break;
case 4 :
case 5 : fLetterSpacing = - 2.1; break;
}
}
if ( strResolution == 'high' ) {
switch ( iScale ) {
case 1 :
case 2 : fLetterSpacing = 0; break;
case 3 :
case 4 :
case 5 : fLetterSpacing = - 1; break;
}
}
// can't get a span or div to flow like an img element, but a table works?
// convert img element to ascii
function asciifyImage( oAscii ) {
oCtx.clearRect( 0, 0, iWidth, iHeight );
oCtx.drawImage( oCanvasImg, 0, 0, iWidth, iHeight );
const oImgData = oCtx.getImageData( 0, 0, iWidth, iHeight ).data;
// Coloring loop starts now
let strChars = '';
const maxIdx = aCharList.length - 1;
// console.time('rendering');
for ( let y = 0; y < iHeight; y += 2 ) {
for ( let x = 0; x < iWidth; x ++ ) {
const iOffset = ( y * iWidth + x ) * 4;
const iRed = oImgData[ iOffset ];
const iGreen = oImgData[ iOffset + 1 ];
const iBlue = oImgData[ iOffset + 2 ];
const iAlpha = oImgData[ iOffset + 3 ];
let fBrightness = ( 0.3 * iRed + 0.59 * iGreen + 0.11 * iBlue ) / 255;
// fBrightness = (0.3*iRed + 0.5*iGreen + 0.3*iBlue) / 255;
if ( iAlpha == 0 ) {
// should calculate alpha instead, but quick hack :)
//fBrightness *= (iAlpha / 255);
fBrightness = 1;
}
let iCharIdx = Math.round( ( 1 - fBrightness ) * maxIdx );
if ( bInvert ) {
iCharIdx = maxIdx - iCharIdx;
}
// good for debugging
//fBrightness = Math.floor(fBrightness * 10);
//strThisChar = fBrightness;
let strThisChar = aCharList[ iCharIdx ];
if ( strThisChar === undefined || strThisChar == ' ' )
strThisChar = '&nbsp;';
if ( bColor ) {
strChars += '<span style=\''
+ 'color:rgb(' + iRed + ',' + iGreen + ',' + iBlue + ');'
+ ( bBlock ? 'background-color:rgb(' + iRed + ',' + iGreen + ',' + iBlue + ');' : '' )
+ ( bAlpha ? 'opacity:' + ( iAlpha / 255 ) + ';' : '' )
+ '\'>' + strThisChar + '</span>';
} else {
strChars += strThisChar;
}
}
strChars += '<br/>';
}
oAscii.innerHTML = `<tr><td style="display:block;width:${width}px;height:${height}px;overflow:hidden">${strChars}</td></tr>`;
// console.timeEnd('rendering');
// return oAscii;
}
}
}
/**
* This type represents configuration settings of `AsciiEffect`.
*
* @typedef {Object} AsciiEffect~Options
* @property {number} [resolution=0.15] - A higher value leads to more details.
* @property {number} [scale=1] - The scale of the effect.
* @property {boolean} [color=false] - Whether colors should be enabled or not. Better quality but slows down rendering.
* @property {boolean} [alpha=false] - Whether transparency should be enabled or not.
* @property {boolean} [block=false] - Whether blocked characters should be enabled or not.
* @property {boolean} [invert=false] - Whether colors should be inverted or not.
* @property {('low'|'medium'|'high')} [strResolution='low'] - The string resolution.
**/
export { AsciiEffect };

View File

@@ -0,0 +1,489 @@
import {
BackSide,
Color,
ShaderMaterial,
UniformsLib,
UniformsUtils
} from 'three';
/**
* An outline effect for toon shaders.
*
* Note that this class can only be used with {@link WebGLRenderer}.
* When using {@link WebGPURenderer}, use {@link ToonOutlinePassNode}.
*
* ```js
* const effect = new OutlineEffect( renderer );
*
* function render() {
*
* effect.render( scene, camera );
*
* }
* ```
*
* @three_import import { OutlineEffect } from 'three/addons/effects/OutlineEffect.js';
*/
class OutlineEffect {
/**
* Constructs a new outline effect.
*
* @param {WebGLRenderer} renderer - The renderer.
* @param {OutlineEffect~Options} [parameters] - The configuration parameter.
*/
constructor( renderer, parameters = {} ) {
this.enabled = true;
const defaultThickness = parameters.defaultThickness !== undefined ? parameters.defaultThickness : 0.003;
const defaultColor = new Color().fromArray( parameters.defaultColor !== undefined ? parameters.defaultColor : [ 0, 0, 0 ] );
const defaultAlpha = parameters.defaultAlpha !== undefined ? parameters.defaultAlpha : 1.0;
const defaultKeepAlive = parameters.defaultKeepAlive !== undefined ? parameters.defaultKeepAlive : false;
// object.material.uuid -> outlineMaterial or
// object.material[ n ].uuid -> outlineMaterial
// save at the outline material creation and release
// if it's unused removeThresholdCount frames
// unless keepAlive is true.
const cache = {};
const removeThresholdCount = 60;
// outlineMaterial.uuid -> object.material or
// outlineMaterial.uuid -> object.material[ n ]
// save before render and release after render.
const originalMaterials = {};
// object.uuid -> originalOnBeforeRender
// save before render and release after render.
const originalOnBeforeRenders = {};
//this.cache = cache; // for debug
const uniformsOutline = {
outlineThickness: { value: defaultThickness },
outlineColor: { value: defaultColor },
outlineAlpha: { value: defaultAlpha }
};
const vertexShader = [
'#include <common>',
'#include <uv_pars_vertex>',
'#include <displacementmap_pars_vertex>',
'#include <fog_pars_vertex>',
'#include <morphtarget_pars_vertex>',
'#include <skinning_pars_vertex>',
'#include <logdepthbuf_pars_vertex>',
'#include <clipping_planes_pars_vertex>',
'uniform float outlineThickness;',
'vec4 calculateOutline( vec4 pos, vec3 normal, vec4 skinned ) {',
' float thickness = outlineThickness;',
' const float ratio = 1.0;', // TODO: support outline thickness ratio for each vertex
' vec4 pos2 = projectionMatrix * modelViewMatrix * vec4( skinned.xyz + normal, 1.0 );',
// NOTE: subtract pos2 from pos because BackSide objectNormal is negative
' vec4 norm = normalize( pos - pos2 );',
' return pos + norm * thickness * pos.w * ratio;',
'}',
'void main() {',
' #include <uv_vertex>',
' #include <beginnormal_vertex>',
' #include <morphnormal_vertex>',
' #include <skinbase_vertex>',
' #include <skinnormal_vertex>',
' #include <begin_vertex>',
' #include <morphtarget_vertex>',
' #include <skinning_vertex>',
' #include <displacementmap_vertex>',
' #include <project_vertex>',
' vec3 outlineNormal = - objectNormal;', // the outline material is always rendered with BackSide
' gl_Position = calculateOutline( gl_Position, outlineNormal, vec4( transformed, 1.0 ) );',
' #include <logdepthbuf_vertex>',
' #include <clipping_planes_vertex>',
' #include <fog_vertex>',
'}',
].join( '\n' );
const fragmentShader = [
'#include <common>',
'#include <fog_pars_fragment>',
'#include <logdepthbuf_pars_fragment>',
'#include <clipping_planes_pars_fragment>',
'uniform vec3 outlineColor;',
'uniform float outlineAlpha;',
'void main() {',
' #include <clipping_planes_fragment>',
' #include <logdepthbuf_fragment>',
' gl_FragColor = vec4( outlineColor, outlineAlpha );',
' #include <tonemapping_fragment>',
' #include <colorspace_fragment>',
' #include <fog_fragment>',
' #include <premultiplied_alpha_fragment>',
'}'
].join( '\n' );
function createMaterial() {
return new ShaderMaterial( {
type: 'OutlineEffect',
uniforms: UniformsUtils.merge( [
UniformsLib[ 'fog' ],
UniformsLib[ 'displacementmap' ],
uniformsOutline
] ),
vertexShader: vertexShader,
fragmentShader: fragmentShader,
side: BackSide
} );
}
function getOutlineMaterialFromCache( originalMaterial ) {
let data = cache[ originalMaterial.uuid ];
if ( data === undefined ) {
data = {
material: createMaterial(),
used: true,
keepAlive: defaultKeepAlive,
count: 0
};
cache[ originalMaterial.uuid ] = data;
}
data.used = true;
return data.material;
}
function getOutlineMaterial( originalMaterial ) {
const outlineMaterial = getOutlineMaterialFromCache( originalMaterial );
originalMaterials[ outlineMaterial.uuid ] = originalMaterial;
updateOutlineMaterial( outlineMaterial, originalMaterial );
return outlineMaterial;
}
function isCompatible( object ) {
const geometry = object.geometry;
const hasNormals = ( geometry !== undefined ) && ( geometry.attributes.normal !== undefined );
return ( object.isMesh === true && object.material !== undefined && hasNormals === true );
}
function setOutlineMaterial( object ) {
if ( isCompatible( object ) === false ) return;
if ( Array.isArray( object.material ) ) {
for ( let i = 0, il = object.material.length; i < il; i ++ ) {
object.material[ i ] = getOutlineMaterial( object.material[ i ] );
}
} else {
object.material = getOutlineMaterial( object.material );
}
originalOnBeforeRenders[ object.uuid ] = object.onBeforeRender;
object.onBeforeRender = onBeforeRender;
}
function restoreOriginalMaterial( object ) {
if ( isCompatible( object ) === false ) return;
if ( Array.isArray( object.material ) ) {
for ( let i = 0, il = object.material.length; i < il; i ++ ) {
object.material[ i ] = originalMaterials[ object.material[ i ].uuid ];
}
} else {
object.material = originalMaterials[ object.material.uuid ];
}
object.onBeforeRender = originalOnBeforeRenders[ object.uuid ];
}
function onBeforeRender( renderer, scene, camera, geometry, material ) {
const originalMaterial = originalMaterials[ material.uuid ];
// just in case
if ( originalMaterial === undefined ) return;
updateUniforms( material, originalMaterial );
}
function updateUniforms( material, originalMaterial ) {
const outlineParameters = originalMaterial.userData.outlineParameters;
material.uniforms.outlineAlpha.value = originalMaterial.opacity;
if ( outlineParameters !== undefined ) {
if ( outlineParameters.thickness !== undefined ) material.uniforms.outlineThickness.value = outlineParameters.thickness;
if ( outlineParameters.color !== undefined ) material.uniforms.outlineColor.value.fromArray( outlineParameters.color );
if ( outlineParameters.alpha !== undefined ) material.uniforms.outlineAlpha.value = outlineParameters.alpha;
}
if ( originalMaterial.displacementMap ) {
material.uniforms.displacementMap.value = originalMaterial.displacementMap;
material.uniforms.displacementScale.value = originalMaterial.displacementScale;
material.uniforms.displacementBias.value = originalMaterial.displacementBias;
}
}
function updateOutlineMaterial( material, originalMaterial ) {
if ( material.name === 'invisible' ) return;
const outlineParameters = originalMaterial.userData.outlineParameters;
material.fog = originalMaterial.fog;
material.toneMapped = originalMaterial.toneMapped;
material.premultipliedAlpha = originalMaterial.premultipliedAlpha;
material.displacementMap = originalMaterial.displacementMap;
if ( outlineParameters !== undefined ) {
if ( originalMaterial.visible === false ) {
material.visible = false;
} else {
material.visible = ( outlineParameters.visible !== undefined ) ? outlineParameters.visible : true;
}
material.transparent = ( outlineParameters.alpha !== undefined && outlineParameters.alpha < 1.0 ) ? true : originalMaterial.transparent;
if ( outlineParameters.keepAlive !== undefined ) cache[ originalMaterial.uuid ].keepAlive = outlineParameters.keepAlive;
} else {
material.transparent = originalMaterial.transparent;
material.visible = originalMaterial.visible;
}
if ( originalMaterial.wireframe === true || originalMaterial.depthTest === false ) material.visible = false;
if ( originalMaterial.clippingPlanes ) {
material.clipping = true;
material.clippingPlanes = originalMaterial.clippingPlanes;
material.clipIntersection = originalMaterial.clipIntersection;
material.clipShadows = originalMaterial.clipShadows;
}
material.version = originalMaterial.version; // update outline material if necessary
}
function cleanupCache() {
let keys;
// clear originalMaterials
keys = Object.keys( originalMaterials );
for ( let i = 0, il = keys.length; i < il; i ++ ) {
originalMaterials[ keys[ i ] ] = undefined;
}
// clear originalOnBeforeRenders
keys = Object.keys( originalOnBeforeRenders );
for ( let i = 0, il = keys.length; i < il; i ++ ) {
originalOnBeforeRenders[ keys[ i ] ] = undefined;
}
// remove unused outlineMaterial from cache
keys = Object.keys( cache );
for ( let i = 0, il = keys.length; i < il; i ++ ) {
const key = keys[ i ];
if ( cache[ key ].used === false ) {
cache[ key ].count ++;
if ( cache[ key ].keepAlive === false && cache[ key ].count > removeThresholdCount ) {
delete cache[ key ];
}
} else {
cache[ key ].used = false;
cache[ key ].count = 0;
}
}
}
/**
* When using this effect, this method should be called instead of the
* default {@link WebGLRenderer#render}.
*
* @param {Object3D} scene - The scene to render.
* @param {Camera} camera - The camera.
*/
this.render = function ( scene, camera ) {
if ( this.enabled === false ) {
renderer.render( scene, camera );
return;
}
const currentAutoClear = renderer.autoClear;
renderer.autoClear = this.autoClear;
renderer.render( scene, camera );
renderer.autoClear = currentAutoClear;
this.renderOutline( scene, camera );
};
/**
* This method can be used to render outlines in VR.
*
* ```js
* const effect = new OutlineEffect( renderer );
* let renderingOutline = false;
*
* scene.onAfterRender = function () {
*
* if ( renderingOutline ) return;
*
* renderingOutline = true;
* effect.renderOutline( scene, camera );
* renderingOutline = false;
* };
*
* function render() {
* renderer.render( scene, camera );
* }
* ```
*
* @param {Object3D} scene - The scene to render.
* @param {Camera} camera - The camera.
*/
this.renderOutline = function ( scene, camera ) {
const currentAutoClear = renderer.autoClear;
const currentSceneAutoUpdate = scene.matrixWorldAutoUpdate;
const currentSceneBackground = scene.background;
const currentShadowMapEnabled = renderer.shadowMap.enabled;
scene.matrixWorldAutoUpdate = false;
scene.background = null;
renderer.autoClear = false;
renderer.shadowMap.enabled = false;
scene.traverse( setOutlineMaterial );
renderer.render( scene, camera );
scene.traverse( restoreOriginalMaterial );
cleanupCache();
scene.matrixWorldAutoUpdate = currentSceneAutoUpdate;
scene.background = currentSceneBackground;
renderer.autoClear = currentAutoClear;
renderer.shadowMap.enabled = currentShadowMapEnabled;
};
/**
* Resizes the effect.
*
* @param {number} width - The width of the effect in logical pixels.
* @param {number} height - The height of the effect in logical pixels.
*/
this.setSize = function ( width, height ) {
renderer.setSize( width, height );
};
}
}
/**
* This type represents configuration settings of `OutlineEffect`.
*
* @typedef {Object} OutlineEffect~Options
* @property {number} [defaultThickness=0.003] - The outline thickness.
* @property {Array<number>} [defaultColor=[0,0,0]] - The outline color.
* @property {number} [defaultAlpha=1] - The outline alpha value.
* @property {boolean} [defaultKeepAlive=false] - Whether to keep alive cached internal materials or not.
**/
export { OutlineEffect };

View File

@@ -0,0 +1,155 @@
import {
LinearFilter,
NearestFilter,
RGBAFormat,
ShaderMaterial,
StereoCamera,
WebGLRenderTarget
} from 'three';
import { FullScreenQuad } from '../postprocessing/Pass.js';
/**
* A class that creates an parallax barrier effect.
*
* Note that this class can only be used with {@link WebGLRenderer}.
* When using {@link WebGPURenderer}, use {@link ParallaxBarrierPassNode}.
*
* @three_import import { ParallaxBarrierEffect } from 'three/addons/effects/ParallaxBarrierEffect.js';
*/
class ParallaxBarrierEffect {
/**
* Constructs a new parallax barrier effect.
*
* @param {WebGLRenderer} renderer - The renderer.
*/
constructor( renderer ) {
const _stereo = new StereoCamera();
const _params = { minFilter: LinearFilter, magFilter: NearestFilter, format: RGBAFormat };
const _renderTargetL = new WebGLRenderTarget( 512, 512, _params );
const _renderTargetR = new WebGLRenderTarget( 512, 512, _params );
const _material = new ShaderMaterial( {
uniforms: {
'mapLeft': { value: _renderTargetL.texture },
'mapRight': { value: _renderTargetR.texture }
},
vertexShader: [
'varying vec2 vUv;',
'void main() {',
' vUv = vec2( uv.x, uv.y );',
' gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );',
'}'
].join( '\n' ),
fragmentShader: [
'uniform sampler2D mapLeft;',
'uniform sampler2D mapRight;',
'varying vec2 vUv;',
'void main() {',
' vec2 uv = vUv;',
' if ( ( mod( gl_FragCoord.y, 2.0 ) ) > 1.00 ) {',
' gl_FragColor = texture2D( mapLeft, uv );',
' } else {',
' gl_FragColor = texture2D( mapRight, uv );',
' }',
' #include <tonemapping_fragment>',
' #include <colorspace_fragment>',
'}'
].join( '\n' )
} );
const _quad = new FullScreenQuad( _material );
/**
* Resizes the effect.
*
* @param {number} width - The width of the effect in logical pixels.
* @param {number} height - The height of the effect in logical pixels.
*/
this.setSize = function ( width, height ) {
renderer.setSize( width, height );
const pixelRatio = renderer.getPixelRatio();
_renderTargetL.setSize( width * pixelRatio, height * pixelRatio );
_renderTargetR.setSize( width * pixelRatio, height * pixelRatio );
};
/**
* When using this effect, this method should be called instead of the
* default {@link WebGLRenderer#render}.
*
* @param {Object3D} scene - The scene to render.
* @param {Camera} camera - The camera.
*/
this.render = function ( scene, camera ) {
const currentRenderTarget = renderer.getRenderTarget();
if ( scene.matrixWorldAutoUpdate === true ) scene.updateMatrixWorld();
if ( camera.parent === null && camera.matrixWorldAutoUpdate === true ) camera.updateMatrixWorld();
_stereo.update( camera );
renderer.setRenderTarget( _renderTargetL );
renderer.clear();
renderer.render( scene, _stereo.cameraL );
renderer.setRenderTarget( _renderTargetR );
renderer.clear();
renderer.render( scene, _stereo.cameraR );
renderer.setRenderTarget( null );
_quad.render( renderer );
renderer.setRenderTarget( currentRenderTarget );
};
/**
* Frees internal resources. This method should be called
* when the effect is no longer required.
*/
this.dispose = function () {
_renderTargetL.dispose();
_renderTargetR.dispose();
_material.dispose();
_quad.dispose();
};
}
}
export { ParallaxBarrierEffect };

View File

@@ -0,0 +1,91 @@
import {
StereoCamera,
Vector2
} from 'three';
/**
* A class that creates an stereo effect.
*
* Note that this class can only be used with {@link WebGLRenderer}.
* When using {@link WebGPURenderer}, use {@link StereoPassNode}.
*
* @three_import import { StereoEffect } from 'three/addons/effects/StereoEffect.js';
*/
class StereoEffect {
/**
* Constructs a new stereo effect.
*
* @param {WebGLRenderer} renderer - The renderer.
*/
constructor( renderer ) {
const _stereo = new StereoCamera();
_stereo.aspect = 0.5;
const size = new Vector2();
/**
* Sets the given eye separation.
*
* @param {number} eyeSep - The eye separation to set.
*/
this.setEyeSeparation = function ( eyeSep ) {
_stereo.eyeSep = eyeSep;
};
/**
* Resizes the effect.
*
* @param {number} width - The width of the effect in logical pixels.
* @param {number} height - The height of the effect in logical pixels.
*/
this.setSize = function ( width, height ) {
renderer.setSize( width, height );
};
/**
* When using this effect, this method should be called instead of the
* default {@link WebGLRenderer#render}.
*
* @param {Object3D} scene - The scene to render.
* @param {Camera} camera - The camera.
*/
this.render = function ( scene, camera ) {
if ( scene.matrixWorldAutoUpdate === true ) scene.updateMatrixWorld();
if ( camera.parent === null && camera.matrixWorldAutoUpdate === true ) camera.updateMatrixWorld();
_stereo.update( camera );
const currentAutoClear = renderer.autoClear;
renderer.getSize( size );
renderer.autoClear = false;
renderer.clear();
renderer.setScissorTest( true );
renderer.setScissor( 0, 0, size.width / 2, size.height );
renderer.setViewport( 0, 0, size.width / 2, size.height );
renderer.render( scene, _stereo.cameraL );
renderer.setScissor( size.width / 2, 0, size.width / 2, size.height );
renderer.setViewport( size.width / 2, 0, size.width / 2, size.height );
renderer.render( scene, _stereo.cameraR );
renderer.setScissorTest( false );
renderer.autoClear = currentAutoClear;
};
}
}
export { StereoEffect };

View File

@@ -0,0 +1,102 @@
import {
BackSide,
BoxGeometry,
Mesh,
MeshLambertMaterial,
MeshStandardMaterial,
PointLight,
Scene,
} from 'three';
/**
* This class represents a scene with a very basic room setup that can be used as
* input for {@link PMREMGenerator#fromScene}. The resulting PMREM represents the room's
* lighting and can be used for Image Based Lighting by assigning it to {@link Scene#environment}
* or directly as an environment map to PBR materials.
*
* This class uses a simple room setup and should only be used for development purposes.
* A more appropriate setup for production is {@link RoomEnvironment}.
*
* ```js
* const environment = new DebugEnvironment();
* const pmremGenerator = new THREE.PMREMGenerator( renderer );
*
* const envMap = pmremGenerator.fromScene( environment ).texture;
* scene.environment = envMap;
* ```
*
* @augments Scene
* @three_import import { DebugEnvironment } from 'three/addons/environments/DebugEnvironment.js';
*/
class DebugEnvironment extends Scene {
/**
* Constructs a new debug environment.
*/
constructor() {
super();
const geometry = new BoxGeometry();
geometry.deleteAttribute( 'uv' );
const roomMaterial = new MeshStandardMaterial( { metalness: 0, side: BackSide } );
const room = new Mesh( geometry, roomMaterial );
room.scale.setScalar( 10 );
this.add( room );
const mainLight = new PointLight( 0xffffff, 50, 0, 2 );
this.add( mainLight );
const material1 = new MeshLambertMaterial( { color: 0xff0000, emissive: 0xffffff, emissiveIntensity: 10 } );
const light1 = new Mesh( geometry, material1 );
light1.position.set( - 5, 2, 0 );
light1.scale.set( 0.1, 1, 1 );
this.add( light1 );
const material2 = new MeshLambertMaterial( { color: 0x00ff00, emissive: 0xffffff, emissiveIntensity: 10 } );
const light2 = new Mesh( geometry, material2 );
light2.position.set( 0, 5, 0 );
light2.scale.set( 1, 0.1, 1 );
this.add( light2 );
const material3 = new MeshLambertMaterial( { color: 0x0000ff, emissive: 0xffffff, emissiveIntensity: 10 } );
const light3 = new Mesh( geometry, material3 );
light3.position.set( 2, 1, 5 );
light3.scale.set( 1.5, 2, 0.1 );
this.add( light3 );
}
/**
* Frees internal resources. This method should be called
* when the environment is no longer required.
*/
dispose() {
const resources = new Set();
this.traverse( ( object ) => {
if ( object.isMesh ) {
resources.add( object.geometry );
resources.add( object.material );
}
} );
for ( const resource of resources ) {
resource.dispose();
}
}
}
export { DebugEnvironment };

View File

@@ -0,0 +1,184 @@
import {
BackSide,
BoxGeometry,
InstancedMesh,
Mesh,
MeshLambertMaterial,
MeshStandardMaterial,
PointLight,
Scene,
Object3D,
} from 'three';
/**
* This class represents a scene with a basic room setup that can be used as
* input for {@link PMREMGenerator#fromScene}. The resulting PMREM represents the room's
* lighting and can be used for Image Based Lighting by assigning it to {@link Scene#environment}
* or directly as an environment map to PBR materials.
*
* The implementation is based on the [EnvironmentScene](https://github.com/google/model-viewer/blob/master/packages/model-viewer/src/three-components/EnvironmentScene.ts)
* component from the `model-viewer` project.
*
* ```js
* const environment = new RoomEnvironment();
* const pmremGenerator = new THREE.PMREMGenerator( renderer );
*
* const envMap = pmremGenerator.fromScene( environment ).texture;
* scene.environment = envMap;
* ```
*
* @augments Scene
* @three_import import { RoomEnvironment } from 'three/addons/environments/RoomEnvironment.js';
*/
class RoomEnvironment extends Scene {
constructor() {
super();
this.name = 'RoomEnvironment';
const geometry = new BoxGeometry();
geometry.deleteAttribute( 'uv' );
const roomMaterial = new MeshStandardMaterial( { side: BackSide } );
const boxMaterial = new MeshStandardMaterial();
const mainLight = new PointLight( 0xffffff, 900, 28, 2 );
mainLight.position.set( 0.418, 16.199, 0.300 );
this.add( mainLight );
const room = new Mesh( geometry, roomMaterial );
room.position.set( - 0.757, 13.219, 0.717 );
room.scale.set( 31.713, 28.305, 28.591 );
this.add( room );
const boxes = new InstancedMesh( geometry, boxMaterial, 6 );
const transform = new Object3D();
// box1
transform.position.set( - 10.906, 2.009, 1.846 );
transform.rotation.set( 0, - 0.195, 0 );
transform.scale.set( 2.328, 7.905, 4.651 );
transform.updateMatrix();
boxes.setMatrixAt( 0, transform.matrix );
// box2
transform.position.set( - 5.607, - 0.754, - 0.758 );
transform.rotation.set( 0, 0.994, 0 );
transform.scale.set( 1.970, 1.534, 3.955 );
transform.updateMatrix();
boxes.setMatrixAt( 1, transform.matrix );
// box3
transform.position.set( 6.167, 0.857, 7.803 );
transform.rotation.set( 0, 0.561, 0 );
transform.scale.set( 3.927, 6.285, 3.687 );
transform.updateMatrix();
boxes.setMatrixAt( 2, transform.matrix );
// box4
transform.position.set( - 2.017, 0.018, 6.124 );
transform.rotation.set( 0, 0.333, 0 );
transform.scale.set( 2.002, 4.566, 2.064 );
transform.updateMatrix();
boxes.setMatrixAt( 3, transform.matrix );
// box5
transform.position.set( 2.291, - 0.756, - 2.621 );
transform.rotation.set( 0, - 0.286, 0 );
transform.scale.set( 1.546, 1.552, 1.496 );
transform.updateMatrix();
boxes.setMatrixAt( 4, transform.matrix );
// box6
transform.position.set( - 2.193, - 0.369, - 5.547 );
transform.rotation.set( 0, 0.516, 0 );
transform.scale.set( 3.875, 3.487, 2.986 );
transform.updateMatrix();
boxes.setMatrixAt( 5, transform.matrix );
this.add( boxes );
// -x right
const light1 = new Mesh( geometry, createAreaLightMaterial( 50 ) );
light1.position.set( - 16.116, 14.37, 8.208 );
light1.scale.set( 0.1, 2.428, 2.739 );
this.add( light1 );
// -x left
const light2 = new Mesh( geometry, createAreaLightMaterial( 50 ) );
light2.position.set( - 16.109, 18.021, - 8.207 );
light2.scale.set( 0.1, 2.425, 2.751 );
this.add( light2 );
// +x
const light3 = new Mesh( geometry, createAreaLightMaterial( 17 ) );
light3.position.set( 14.904, 12.198, - 1.832 );
light3.scale.set( 0.15, 4.265, 6.331 );
this.add( light3 );
// +z
const light4 = new Mesh( geometry, createAreaLightMaterial( 43 ) );
light4.position.set( - 0.462, 8.89, 14.520 );
light4.scale.set( 4.38, 5.441, 0.088 );
this.add( light4 );
// -z
const light5 = new Mesh( geometry, createAreaLightMaterial( 20 ) );
light5.position.set( 3.235, 11.486, - 12.541 );
light5.scale.set( 2.5, 2.0, 0.1 );
this.add( light5 );
// +y
const light6 = new Mesh( geometry, createAreaLightMaterial( 100 ) );
light6.position.set( 0.0, 20.0, 0.0 );
light6.scale.set( 1.0, 0.1, 1.0 );
this.add( light6 );
}
/**
* Frees internal resources. This method should be called
* when the environment is no longer required.
*/
dispose() {
const resources = new Set();
this.traverse( ( object ) => {
if ( object.isMesh ) {
resources.add( object.geometry );
resources.add( object.material );
}
} );
for ( const resource of resources ) {
resource.dispose();
}
}
}
function createAreaLightMaterial( intensity ) {
// create an emissive-only material. see #31348
const material = new MeshLambertMaterial( {
color: 0x000000,
emissive: 0xffffff,
emissiveIntensity: intensity
} );
return material;
}
export { RoomEnvironment };

View File

@@ -0,0 +1,311 @@
import { Color, ColorManagement, SRGBColorSpace } from 'three';
/* global DracoEncoderModule */
/**
* An exporter to compress geometry with the Draco library.
*
* [Draco](https://google.github.io/draco/) is an open source library for compressing and
* decompressing 3D meshes and point clouds. Compressed geometry can be significantly smaller,
* at the cost of additional decoding time on the client device.
*
* Standalone Draco files have a `.drc` extension, and contain vertex positions,
* normals, colors, and other attributes. Draco files *do not* contain materials,
* textures, animation, or node hierarchies to use these features, embed Draco geometry
* inside of a glTF file. A normal glTF file can be converted to a Draco-compressed glTF file
* using [glTF-Pipeline](https://github.com/AnalyticalGraphicsInc/gltf-pipeline).
*
* ```js
* const exporter = new DRACOExporter();
* const data = exporter.parse( mesh, options );
* ```
*
* @three_import import { DRACOExporter } from 'three/addons/exporters/DRACOExporter.js';
*/
class DRACOExporter {
/**
* Parses the given mesh or point cloud and generates the Draco output.
*
* @param {(Mesh|Points)} object - The mesh or point cloud to export.
* @param {DRACOExporter~Options} options - The export options.
* @return {Int8Array} The exported Draco.
*/
parse( object, options = {} ) {
options = Object.assign( {
decodeSpeed: 5,
encodeSpeed: 5,
encoderMethod: DRACOExporter.MESH_EDGEBREAKER_ENCODING,
quantization: [ 16, 8, 8, 8, 8 ],
exportUvs: true,
exportNormals: true,
exportColor: false,
}, options );
if ( DracoEncoderModule === undefined ) {
throw new Error( 'THREE.DRACOExporter: required the draco_encoder to work.' );
}
const geometry = object.geometry;
const dracoEncoder = DracoEncoderModule();
const encoder = new dracoEncoder.Encoder();
let builder;
let dracoObject;
if ( object.isMesh === true ) {
builder = new dracoEncoder.MeshBuilder();
dracoObject = new dracoEncoder.Mesh();
const vertices = geometry.getAttribute( 'position' );
builder.AddFloatAttributeToMesh( dracoObject, dracoEncoder.POSITION, vertices.count, vertices.itemSize, vertices.array );
const faces = geometry.getIndex();
if ( faces !== null ) {
builder.AddFacesToMesh( dracoObject, faces.count / 3, faces.array );
} else {
const faces = new ( vertices.count > 65535 ? Uint32Array : Uint16Array )( vertices.count );
for ( let i = 0; i < faces.length; i ++ ) {
faces[ i ] = i;
}
builder.AddFacesToMesh( dracoObject, vertices.count, faces );
}
if ( options.exportNormals === true ) {
const normals = geometry.getAttribute( 'normal' );
if ( normals !== undefined ) {
builder.AddFloatAttributeToMesh( dracoObject, dracoEncoder.NORMAL, normals.count, normals.itemSize, normals.array );
}
}
if ( options.exportUvs === true ) {
const uvs = geometry.getAttribute( 'uv' );
if ( uvs !== undefined ) {
builder.AddFloatAttributeToMesh( dracoObject, dracoEncoder.TEX_COORD, uvs.count, uvs.itemSize, uvs.array );
}
}
if ( options.exportColor === true ) {
const colors = geometry.getAttribute( 'color' );
if ( colors !== undefined ) {
const array = createVertexColorSRGBArray( colors );
builder.AddFloatAttributeToMesh( dracoObject, dracoEncoder.COLOR, colors.count, colors.itemSize, array );
}
}
} else if ( object.isPoints === true ) {
builder = new dracoEncoder.PointCloudBuilder();
dracoObject = new dracoEncoder.PointCloud();
const vertices = geometry.getAttribute( 'position' );
builder.AddFloatAttribute( dracoObject, dracoEncoder.POSITION, vertices.count, vertices.itemSize, vertices.array );
if ( options.exportColor === true ) {
const colors = geometry.getAttribute( 'color' );
if ( colors !== undefined ) {
const array = createVertexColorSRGBArray( colors );
builder.AddFloatAttribute( dracoObject, dracoEncoder.COLOR, colors.count, colors.itemSize, array );
}
}
} else {
throw new Error( 'DRACOExporter: Unsupported object type.' );
}
//Compress using draco encoder
const encodedData = new dracoEncoder.DracoInt8Array();
//Sets the desired encoding and decoding speed for the given options from 0 (slowest speed, but the best compression) to 10 (fastest, but the worst compression).
const encodeSpeed = ( options.encodeSpeed !== undefined ) ? options.encodeSpeed : 5;
const decodeSpeed = ( options.decodeSpeed !== undefined ) ? options.decodeSpeed : 5;
encoder.SetSpeedOptions( encodeSpeed, decodeSpeed );
// Sets the desired encoding method for a given geometry.
if ( options.encoderMethod !== undefined ) {
encoder.SetEncodingMethod( options.encoderMethod );
}
// Sets the quantization (number of bits used to represent) compression options for a named attribute.
// The attribute values will be quantized in a box defined by the maximum extent of the attribute values.
if ( options.quantization !== undefined ) {
for ( let i = 0; i < 5; i ++ ) {
if ( options.quantization[ i ] !== undefined ) {
encoder.SetAttributeQuantization( i, options.quantization[ i ] );
}
}
}
let length;
if ( object.isMesh === true ) {
length = encoder.EncodeMeshToDracoBuffer( dracoObject, encodedData );
} else {
length = encoder.EncodePointCloudToDracoBuffer( dracoObject, true, encodedData );
}
dracoEncoder.destroy( dracoObject );
if ( length === 0 ) {
throw new Error( 'THREE.DRACOExporter: Draco encoding failed.' );
}
//Copy encoded data to buffer.
const outputData = new Int8Array( new ArrayBuffer( length ) );
for ( let i = 0; i < length; i ++ ) {
outputData[ i ] = encodedData.GetValue( i );
}
dracoEncoder.destroy( encodedData );
dracoEncoder.destroy( encoder );
dracoEncoder.destroy( builder );
return outputData;
}
}
function createVertexColorSRGBArray( attribute ) {
// While .drc files do not specify colorspace, the only 'official' tooling
// is PLY and OBJ converters, which use sRGB. We'll assume sRGB is expected
// for .drc files, but note that Draco buffers embedded in glTF files will
// be Linear-sRGB instead.
const _color = new Color();
const count = attribute.count;
const itemSize = attribute.itemSize;
const array = new Float32Array( count * itemSize );
for ( let i = 0, il = count; i < il; i ++ ) {
_color.fromBufferAttribute( attribute, i );
ColorManagement.workingToColorSpace( _color, SRGBColorSpace );
array[ i * itemSize ] = _color.r;
array[ i * itemSize + 1 ] = _color.g;
array[ i * itemSize + 2 ] = _color.b;
if ( itemSize === 4 ) {
array[ i * itemSize + 3 ] = attribute.getW( i );
}
}
return array;
}
// Encoder methods
/**
* Edgebreaker encoding.
*
* @static
* @constant
* @type {number}
* @default 1
*/
DRACOExporter.MESH_EDGEBREAKER_ENCODING = 1;
/**
* Sequential encoding.
*
* @static
* @constant
* @type {number}
* @default 0
*/
DRACOExporter.MESH_SEQUENTIAL_ENCODING = 0;
// Geometry type
DRACOExporter.POINT_CLOUD = 0;
DRACOExporter.TRIANGULAR_MESH = 1;
// Attribute type
DRACOExporter.INVALID = - 1;
DRACOExporter.POSITION = 0;
DRACOExporter.NORMAL = 1;
DRACOExporter.COLOR = 2;
DRACOExporter.TEX_COORD = 3;
DRACOExporter.GENERIC = 4;
/**
* Export options of `DRACOExporter`.
*
* @typedef {Object} DRACOExporter~Options
* @property {number} [decodeSpeed=5] - Indicates how to tune the encoder regarding decode speed (0 gives better speed but worst quality).
* @property {number} [encodeSpeed=5] - Indicates how to tune the encoder parameters (0 gives better speed but worst quality).
* @property {number} [encoderMethod=1] - Either sequential (very little compression) or Edgebreaker. Edgebreaker traverses the triangles of the mesh in a deterministic, spiral-like way which provides most of the benefits of this data format.
* @property {Array<number>} [quantization=[ 16, 8, 8, 8, 8 ]] - Indicates the precision of each type of data stored in the draco file in the order (POSITION, NORMAL, COLOR, TEX_COORD, GENERIC).
* @property {boolean} [exportUvs=true] - Whether to export UVs or not.
* @property {boolean} [exportNormals=true] - Whether to export normals or not.
* @property {boolean} [exportColor=false] - Whether to export colors or not.
**/
export { DRACOExporter };

View File

@@ -0,0 +1,618 @@
import {
FloatType,
HalfFloatType,
RGBAFormat,
DataUtils,
} from 'three';
import * as fflate from '../libs/fflate.module.js';
const textEncoder = new TextEncoder();
const NO_COMPRESSION = 0;
const ZIPS_COMPRESSION = 2;
const ZIP_COMPRESSION = 3;
/**
* An exporter for EXR.
*
* EXR ( Extended Dynamic Range) is an [open format specification](https://github.com/AcademySoftwareFoundation/openexr)
* for professional-grade image storage format of the motion picture industry. The purpose of
* format is to accurately and efficiently represent high-dynamic-range scene-linear image data
* and associated metadata. The library is widely used in host application software where accuracy
* is critical, such as photorealistic rendering, texture access, image compositing, deep compositing,
* and DI.
*
* ```js
* const exporter = new EXRExporter();
* const result = await exporter.parse( renderer, options );
* ```
*
* @three_import import { EXRExporter } from 'three/addons/exporters/EXRExporter.js';
*/
class EXRExporter {
/**
* This method has two variants.
*
* - When exporting a data texture, it receives two parameters. The texture and the exporter options.
* - When exporting a render target (e.g. a PMREM), it receives three parameters. The renderer, the
* render target and the exporter options.
*
* @async
* @param {(DataTexture|WebGPURenderer|WebGLRenderer)} arg1 - The data texture to export or a renderer.
* @param {(EXRExporter~Options|RenderTarget)} arg2 - The exporter options or a render target.
* @param {EXRExporter~Options} [arg3] - The exporter options.
* @return {Promise<Uint8Array>} A Promise that resolves with the exported EXR.
*/
async parse( arg1, arg2, arg3 ) {
if ( ! arg1 || ! ( arg1.isWebGLRenderer || arg1.isWebGPURenderer || arg1.isDataTexture ) ) {
throw Error( 'EXRExporter.parse: Unsupported first parameter, expected instance of WebGLRenderer, WebGPURenderer or DataTexture.' );
} else if ( arg1.isWebGLRenderer || arg1.isWebGPURenderer ) {
const renderer = arg1, renderTarget = arg2, options = arg3;
supportedRTT( renderTarget );
const info = buildInfoRTT( renderTarget, options ),
dataBuffer = await getPixelData( renderer, renderTarget, info ),
rawContentBuffer = reorganizeDataBuffer( dataBuffer, info ),
chunks = compressData( rawContentBuffer, info );
return fillData( chunks, info );
} else if ( arg1.isDataTexture ) {
const texture = arg1, options = arg2;
supportedDT( texture );
const info = buildInfoDT( texture, options ),
dataBuffer = texture.image.data,
rawContentBuffer = reorganizeDataBuffer( dataBuffer, info ),
chunks = compressData( rawContentBuffer, info );
return fillData( chunks, info );
}
}
}
function supportedRTT( renderTarget ) {
if ( ! renderTarget || ! renderTarget.isRenderTarget ) {
throw Error( 'EXRExporter.parse: Unsupported second parameter, expected instance of WebGLRenderTarget.' );
}
if ( renderTarget.isWebGLCubeRenderTarget || renderTarget.isWebGL3DRenderTarget || renderTarget.isWebGLArrayRenderTarget ) {
throw Error( 'EXRExporter.parse: Unsupported render target type, expected instance of WebGLRenderTarget.' );
}
if ( renderTarget.texture.type !== FloatType && renderTarget.texture.type !== HalfFloatType ) {
throw Error( 'EXRExporter.parse: Unsupported WebGLRenderTarget texture type.' );
}
if ( renderTarget.texture.format !== RGBAFormat ) {
throw Error( 'EXRExporter.parse: Unsupported WebGLRenderTarget texture format, expected RGBAFormat.' );
}
}
function supportedDT( texture ) {
if ( texture.type !== FloatType && texture.type !== HalfFloatType ) {
throw Error( 'EXRExporter.parse: Unsupported DataTexture texture type.' );
}
if ( texture.format !== RGBAFormat ) {
throw Error( 'EXRExporter.parse: Unsupported DataTexture texture format, expected RGBAFormat.' );
}
if ( ! texture.image.data ) {
throw Error( 'EXRExporter.parse: Invalid DataTexture image data.' );
}
if ( texture.type === FloatType && texture.image.data.constructor.name !== 'Float32Array' ) {
throw Error( 'EXRExporter.parse: DataTexture image data doesn\'t match type, expected \'Float32Array\'.' );
}
if ( texture.type === HalfFloatType && texture.image.data.constructor.name !== 'Uint16Array' ) {
throw Error( 'EXRExporter.parse: DataTexture image data doesn\'t match type, expected \'Uint16Array\'.' );
}
}
function buildInfoRTT( renderTarget, options = {} ) {
const compressionSizes = {
0: 1,
2: 1,
3: 16
};
const WIDTH = renderTarget.width,
HEIGHT = renderTarget.height,
TYPE = renderTarget.texture.type,
FORMAT = renderTarget.texture.format,
COMPRESSION = ( options.compression !== undefined ) ? options.compression : ZIP_COMPRESSION,
EXPORTER_TYPE = ( options.type !== undefined ) ? options.type : HalfFloatType,
OUT_TYPE = ( EXPORTER_TYPE === FloatType ) ? 2 : 1,
COMPRESSION_SIZE = compressionSizes[ COMPRESSION ],
NUM_CHANNELS = 4;
return {
width: WIDTH,
height: HEIGHT,
type: TYPE,
format: FORMAT,
compression: COMPRESSION,
blockLines: COMPRESSION_SIZE,
dataType: OUT_TYPE,
dataSize: 2 * OUT_TYPE,
numBlocks: Math.ceil( HEIGHT / COMPRESSION_SIZE ),
numInputChannels: 4,
numOutputChannels: NUM_CHANNELS,
};
}
function buildInfoDT( texture, options = {} ) {
const compressionSizes = {
0: 1,
2: 1,
3: 16
};
const WIDTH = texture.image.width,
HEIGHT = texture.image.height,
TYPE = texture.type,
FORMAT = texture.format,
COMPRESSION = ( options.compression !== undefined ) ? options.compression : ZIP_COMPRESSION,
EXPORTER_TYPE = ( options.type !== undefined ) ? options.type : HalfFloatType,
OUT_TYPE = ( EXPORTER_TYPE === FloatType ) ? 2 : 1,
COMPRESSION_SIZE = compressionSizes[ COMPRESSION ],
NUM_CHANNELS = 4;
return {
width: WIDTH,
height: HEIGHT,
type: TYPE,
format: FORMAT,
compression: COMPRESSION,
blockLines: COMPRESSION_SIZE,
dataType: OUT_TYPE,
dataSize: 2 * OUT_TYPE,
numBlocks: Math.ceil( HEIGHT / COMPRESSION_SIZE ),
numInputChannels: 4,
numOutputChannels: NUM_CHANNELS,
};
}
async function getPixelData( renderer, rtt, info ) {
let dataBuffer;
if ( renderer.isWebGLRenderer ) {
if ( info.type === FloatType ) {
dataBuffer = new Float32Array( info.width * info.height * info.numInputChannels );
} else {
dataBuffer = new Uint16Array( info.width * info.height * info.numInputChannels );
}
await renderer.readRenderTargetPixelsAsync( rtt, 0, 0, info.width, info.height, dataBuffer );
} else {
dataBuffer = await renderer.readRenderTargetPixelsAsync( rtt, 0, 0, info.width, info.height );
}
return dataBuffer;
}
function reorganizeDataBuffer( inBuffer, info ) {
const w = info.width,
h = info.height,
dec = { r: 0, g: 0, b: 0, a: 0 },
offset = { value: 0 },
cOffset = ( info.numOutputChannels == 4 ) ? 1 : 0,
getValue = ( info.type == FloatType ) ? getFloat32 : getFloat16,
setValue = ( info.dataType == 1 ) ? setFloat16 : setFloat32,
outBuffer = new Uint8Array( info.width * info.height * info.numOutputChannels * info.dataSize ),
dv = new DataView( outBuffer.buffer );
for ( let y = 0; y < h; ++ y ) {
for ( let x = 0; x < w; ++ x ) {
const i = y * w * 4 + x * 4;
const r = getValue( inBuffer, i );
const g = getValue( inBuffer, i + 1 );
const b = getValue( inBuffer, i + 2 );
const a = getValue( inBuffer, i + 3 );
const line = ( h - y - 1 ) * w * ( 3 + cOffset ) * info.dataSize;
decodeLinear( dec, r, g, b, a );
offset.value = line + x * info.dataSize;
setValue( dv, dec.a, offset );
offset.value = line + ( cOffset ) * w * info.dataSize + x * info.dataSize;
setValue( dv, dec.b, offset );
offset.value = line + ( 1 + cOffset ) * w * info.dataSize + x * info.dataSize;
setValue( dv, dec.g, offset );
offset.value = line + ( 2 + cOffset ) * w * info.dataSize + x * info.dataSize;
setValue( dv, dec.r, offset );
}
}
return outBuffer;
}
function compressData( inBuffer, info ) {
let compress,
tmpBuffer,
sum = 0;
const chunks = { data: new Array(), totalSize: 0 },
size = info.width * info.numOutputChannels * info.blockLines * info.dataSize;
switch ( info.compression ) {
case 0:
compress = compressNONE;
break;
case 2:
case 3:
compress = compressZIP;
break;
}
if ( info.compression !== 0 ) {
tmpBuffer = new Uint8Array( size );
}
for ( let i = 0; i < info.numBlocks; ++ i ) {
const arr = inBuffer.subarray( size * i, size * ( i + 1 ) );
const block = compress( arr, tmpBuffer );
sum += block.length;
chunks.data.push( { dataChunk: block, size: block.length } );
}
chunks.totalSize = sum;
return chunks;
}
function compressNONE( data ) {
return data;
}
function compressZIP( data, tmpBuffer ) {
//
// Reorder the pixel data.
//
let t1 = 0,
t2 = Math.floor( ( data.length + 1 ) / 2 ),
s = 0;
const stop = data.length - 1;
while ( true ) {
if ( s > stop ) break;
tmpBuffer[ t1 ++ ] = data[ s ++ ];
if ( s > stop ) break;
tmpBuffer[ t2 ++ ] = data[ s ++ ];
}
//
// Predictor.
//
let p = tmpBuffer[ 0 ];
for ( let t = 1; t < tmpBuffer.length; t ++ ) {
const d = tmpBuffer[ t ] - p + ( 128 + 256 );
p = tmpBuffer[ t ];
tmpBuffer[ t ] = d;
}
const deflate = fflate.zlibSync( tmpBuffer );
return deflate;
}
function fillHeader( outBuffer, chunks, info ) {
const offset = { value: 0 };
const dv = new DataView( outBuffer.buffer );
setUint32( dv, 20000630, offset ); // magic
setUint32( dv, 2, offset ); // mask
// = HEADER =
setString( dv, 'compression', offset );
setString( dv, 'compression', offset );
setUint32( dv, 1, offset );
setUint8( dv, info.compression, offset );
setString( dv, 'screenWindowCenter', offset );
setString( dv, 'v2f', offset );
setUint32( dv, 8, offset );
setUint32( dv, 0, offset );
setUint32( dv, 0, offset );
setString( dv, 'screenWindowWidth', offset );
setString( dv, 'float', offset );
setUint32( dv, 4, offset );
setFloat32( dv, 1.0, offset );
setString( dv, 'pixelAspectRatio', offset );
setString( dv, 'float', offset );
setUint32( dv, 4, offset );
setFloat32( dv, 1.0, offset );
setString( dv, 'lineOrder', offset );
setString( dv, 'lineOrder', offset );
setUint32( dv, 1, offset );
setUint8( dv, 0, offset );
setString( dv, 'dataWindow', offset );
setString( dv, 'box2i', offset );
setUint32( dv, 16, offset );
setUint32( dv, 0, offset );
setUint32( dv, 0, offset );
setUint32( dv, info.width - 1, offset );
setUint32( dv, info.height - 1, offset );
setString( dv, 'displayWindow', offset );
setString( dv, 'box2i', offset );
setUint32( dv, 16, offset );
setUint32( dv, 0, offset );
setUint32( dv, 0, offset );
setUint32( dv, info.width - 1, offset );
setUint32( dv, info.height - 1, offset );
setString( dv, 'channels', offset );
setString( dv, 'chlist', offset );
setUint32( dv, info.numOutputChannels * 18 + 1, offset );
setString( dv, 'A', offset );
setUint32( dv, info.dataType, offset );
offset.value += 4;
setUint32( dv, 1, offset );
setUint32( dv, 1, offset );
setString( dv, 'B', offset );
setUint32( dv, info.dataType, offset );
offset.value += 4;
setUint32( dv, 1, offset );
setUint32( dv, 1, offset );
setString( dv, 'G', offset );
setUint32( dv, info.dataType, offset );
offset.value += 4;
setUint32( dv, 1, offset );
setUint32( dv, 1, offset );
setString( dv, 'R', offset );
setUint32( dv, info.dataType, offset );
offset.value += 4;
setUint32( dv, 1, offset );
setUint32( dv, 1, offset );
setUint8( dv, 0, offset );
// null-byte
setUint8( dv, 0, offset );
// = OFFSET TABLE =
let sum = offset.value + info.numBlocks * 8;
for ( let i = 0; i < chunks.data.length; ++ i ) {
setUint64( dv, sum, offset );
sum += chunks.data[ i ].size + 8;
}
}
function fillData( chunks, info ) {
const TableSize = info.numBlocks * 8,
HeaderSize = 259 + ( 18 * info.numOutputChannels ), // 259 + 18 * chlist
offset = { value: HeaderSize + TableSize },
outBuffer = new Uint8Array( HeaderSize + TableSize + chunks.totalSize + info.numBlocks * 8 ),
dv = new DataView( outBuffer.buffer );
fillHeader( outBuffer, chunks, info );
for ( let i = 0; i < chunks.data.length; ++ i ) {
const data = chunks.data[ i ].dataChunk;
const size = chunks.data[ i ].size;
setUint32( dv, i * info.blockLines, offset );
setUint32( dv, size, offset );
outBuffer.set( data, offset.value );
offset.value += size;
}
return outBuffer;
}
function decodeLinear( dec, r, g, b, a ) {
dec.r = r;
dec.g = g;
dec.b = b;
dec.a = a;
}
// function decodeSRGB( dec, r, g, b, a ) {
// dec.r = r > 0.04045 ? Math.pow( r * 0.9478672986 + 0.0521327014, 2.4 ) : r * 0.0773993808;
// dec.g = g > 0.04045 ? Math.pow( g * 0.9478672986 + 0.0521327014, 2.4 ) : g * 0.0773993808;
// dec.b = b > 0.04045 ? Math.pow( b * 0.9478672986 + 0.0521327014, 2.4 ) : b * 0.0773993808;
// dec.a = a;
// }
function setUint8( dv, value, offset ) {
dv.setUint8( offset.value, value );
offset.value += 1;
}
function setUint32( dv, value, offset ) {
dv.setUint32( offset.value, value, true );
offset.value += 4;
}
function setFloat16( dv, value, offset ) {
dv.setUint16( offset.value, DataUtils.toHalfFloat( value ), true );
offset.value += 2;
}
function setFloat32( dv, value, offset ) {
dv.setFloat32( offset.value, value, true );
offset.value += 4;
}
function setUint64( dv, value, offset ) {
dv.setBigUint64( offset.value, BigInt( value ), true );
offset.value += 8;
}
function setString( dv, string, offset ) {
const tmp = textEncoder.encode( string + '\0' );
for ( let i = 0; i < tmp.length; ++ i ) {
setUint8( dv, tmp[ i ], offset );
}
}
function decodeFloat16( binary ) {
const exponent = ( binary & 0x7C00 ) >> 10,
fraction = binary & 0x03FF;
return ( binary >> 15 ? - 1 : 1 ) * (
exponent ?
(
exponent === 0x1F ?
fraction ? NaN : Infinity :
Math.pow( 2, exponent - 15 ) * ( 1 + fraction / 0x400 )
) :
6.103515625e-5 * ( fraction / 0x400 )
);
}
function getFloat16( arr, i ) {
return decodeFloat16( arr[ i ] );
}
function getFloat32( arr, i ) {
return arr[ i ];
}
/**
* Export options of `EXRExporter`.
*
* @typedef {Object} EXRExporter~Options
* @property {(HalfFloatType|FloatType)} [type=HalfFloatType] - Output data type.
* @property {(NO_COMPRESSION|ZIP_COMPRESSION|ZIPS_COMPRESSION)} [type=ZIP_COMPRESSION] - The compression algorithm.
**/
export { EXRExporter, NO_COMPRESSION, ZIP_COMPRESSION, ZIPS_COMPRESSION };

3595
node_modules/three/examples/jsm/exporters/GLTFExporter.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,347 @@
import {
ColorManagement,
FloatType,
HalfFloatType,
UnsignedByteType,
RGBAFormat,
RGFormat,
RGIntegerFormat,
RedFormat,
RedIntegerFormat,
NoColorSpace,
LinearSRGBColorSpace,
SRGBColorSpace,
SRGBTransfer,
DataTexture,
REVISION,
} from 'three';
import {
createDefaultContainer,
write,
KHR_DF_CHANNEL_RGBSDA_ALPHA,
KHR_DF_CHANNEL_RGBSDA_BLUE,
KHR_DF_CHANNEL_RGBSDA_GREEN,
KHR_DF_CHANNEL_RGBSDA_RED,
KHR_DF_MODEL_RGBSDA,
KHR_DF_PRIMARIES_BT709,
KHR_DF_PRIMARIES_UNSPECIFIED,
KHR_DF_SAMPLE_DATATYPE_FLOAT,
KHR_DF_SAMPLE_DATATYPE_LINEAR,
KHR_DF_SAMPLE_DATATYPE_SIGNED,
KHR_DF_TRANSFER_LINEAR,
KHR_DF_TRANSFER_SRGB,
VK_FORMAT_R16_SFLOAT,
VK_FORMAT_R16G16_SFLOAT,
VK_FORMAT_R16G16B16A16_SFLOAT,
VK_FORMAT_R32_SFLOAT,
VK_FORMAT_R32G32_SFLOAT,
VK_FORMAT_R32G32B32A32_SFLOAT,
VK_FORMAT_R8_SRGB,
VK_FORMAT_R8_UNORM,
VK_FORMAT_R8G8_SRGB,
VK_FORMAT_R8G8_UNORM,
VK_FORMAT_R8G8B8A8_SRGB,
VK_FORMAT_R8G8B8A8_UNORM,
} from '../libs/ktx-parse.module.js';
/**
* References:
* - https://github.khronos.org/KTX-Specification/ktxspec.v2.html
* - https://registry.khronos.org/DataFormat/specs/1.3/dataformat.1.3.html
* - https://github.com/donmccurdy/KTX-Parse
*/
const VK_FORMAT_MAP = {
[ RGBAFormat ]: {
[ FloatType ]: {
[ NoColorSpace ]: VK_FORMAT_R32G32B32A32_SFLOAT,
[ LinearSRGBColorSpace ]: VK_FORMAT_R32G32B32A32_SFLOAT,
},
[ HalfFloatType ]: {
[ NoColorSpace ]: VK_FORMAT_R16G16B16A16_SFLOAT,
[ LinearSRGBColorSpace ]: VK_FORMAT_R16G16B16A16_SFLOAT,
},
[ UnsignedByteType ]: {
[ NoColorSpace ]: VK_FORMAT_R8G8B8A8_UNORM,
[ LinearSRGBColorSpace ]: VK_FORMAT_R8G8B8A8_UNORM,
[ SRGBColorSpace ]: VK_FORMAT_R8G8B8A8_SRGB,
},
},
[ RGFormat ]: {
[ FloatType ]: {
[ NoColorSpace ]: VK_FORMAT_R32G32_SFLOAT,
[ LinearSRGBColorSpace ]: VK_FORMAT_R32G32_SFLOAT,
},
[ HalfFloatType ]: {
[ NoColorSpace ]: VK_FORMAT_R16G16_SFLOAT,
[ LinearSRGBColorSpace ]: VK_FORMAT_R16G16_SFLOAT,
},
[ UnsignedByteType ]: {
[ NoColorSpace ]: VK_FORMAT_R8G8_UNORM,
[ LinearSRGBColorSpace ]: VK_FORMAT_R8G8_UNORM,
[ SRGBColorSpace ]: VK_FORMAT_R8G8_SRGB,
},
},
[ RedFormat ]: {
[ FloatType ]: {
[ NoColorSpace ]: VK_FORMAT_R32_SFLOAT,
[ LinearSRGBColorSpace ]: VK_FORMAT_R32_SFLOAT,
},
[ HalfFloatType ]: {
[ NoColorSpace ]: VK_FORMAT_R16_SFLOAT,
[ LinearSRGBColorSpace ]: VK_FORMAT_R16_SFLOAT,
},
[ UnsignedByteType ]: {
[ NoColorSpace ]: VK_FORMAT_R8_UNORM,
[ LinearSRGBColorSpace ]: VK_FORMAT_R8_UNORM,
[ SRGBColorSpace ]: VK_FORMAT_R8_SRGB,
},
},
};
const KHR_DF_CHANNEL_MAP = [
KHR_DF_CHANNEL_RGBSDA_RED,
KHR_DF_CHANNEL_RGBSDA_GREEN,
KHR_DF_CHANNEL_RGBSDA_BLUE,
KHR_DF_CHANNEL_RGBSDA_ALPHA,
];
// TODO: sampleLower and sampleUpper may change based on color space.
const KHR_DF_CHANNEL_SAMPLE_LOWER_UPPER = {
[ FloatType ]: [ 0xbf800000, 0x3f800000 ],
[ HalfFloatType ]: [ 0xbf800000, 0x3f800000 ],
[ UnsignedByteType ]: [ 0, 255 ],
};
const ERROR_INPUT = 'THREE.KTX2Exporter: Supported inputs are DataTexture, Data3DTexture, or WebGLRenderer and WebGLRenderTarget.';
const ERROR_FORMAT = 'THREE.KTX2Exporter: Supported formats are RGBAFormat, RGFormat, or RedFormat.';
const ERROR_TYPE = 'THREE.KTX2Exporter: Supported types are FloatType, HalfFloatType, or UnsignedByteType."';
const ERROR_COLOR_SPACE = 'THREE.KTX2Exporter: Supported color spaces are SRGBColorSpace (UnsignedByteType only), LinearSRGBColorSpace, or NoColorSpace.';
/**
* An exporter for KTX2.
*
* ```js
* const exporter = new KTX2Exporter();
* const result = await exporter.parse( dataTexture );
* ```
*
* @three_import import { KTX2Exporter } from 'three/addons/exporters/KTX2Exporter.js';
*/
export class KTX2Exporter {
/**
* This method has two variants.
*
* - When exporting a data texture, it receives one parameter. The data or 3D data texture.
* - When exporting a render target (e.g. a PMREM), it receives two parameters. The renderer and the
* render target.
*
* @async
* @param {(DataTexture|Data3DTexture|WebGPURenderer|WebGLRenderer)} arg1 - The data texture to export or a renderer.
* @param {RenderTarget} [arg2] - The render target that should be exported
* @return {Promise<Uint8Array>} A Promise that resolves with the exported KTX2.
*/
async parse( arg1, arg2 ) {
let texture;
if ( arg1.isDataTexture || arg1.isData3DTexture ) {
texture = arg1;
} else if ( ( arg1.isWebGLRenderer || arg1.isWebGPURenderer ) && arg2.isRenderTarget ) {
texture = await toDataTexture( arg1, arg2 );
} else {
throw new Error( ERROR_INPUT );
}
if ( VK_FORMAT_MAP[ texture.format ] === undefined ) {
throw new Error( ERROR_FORMAT );
}
if ( VK_FORMAT_MAP[ texture.format ][ texture.type ] === undefined ) {
throw new Error( ERROR_TYPE );
}
if ( VK_FORMAT_MAP[ texture.format ][ texture.type ][ texture.colorSpace ] === undefined ) {
throw new Error( ERROR_COLOR_SPACE );
}
//
const array = texture.image.data;
const channelCount = getChannelCount( texture );
const container = createDefaultContainer();
container.vkFormat = VK_FORMAT_MAP[ texture.format ][ texture.type ][ texture.colorSpace ];
container.typeSize = array.BYTES_PER_ELEMENT;
container.pixelWidth = texture.image.width;
container.pixelHeight = texture.image.height;
if ( texture.isData3DTexture ) {
container.pixelDepth = texture.image.depth;
}
//
const basicDesc = container.dataFormatDescriptor[ 0 ];
basicDesc.colorModel = KHR_DF_MODEL_RGBSDA;
basicDesc.colorPrimaries = texture.colorSpace === NoColorSpace
? KHR_DF_PRIMARIES_UNSPECIFIED
: KHR_DF_PRIMARIES_BT709;
basicDesc.transferFunction = ColorManagement.getTransfer( texture.colorSpace ) === SRGBTransfer
? KHR_DF_TRANSFER_SRGB
: KHR_DF_TRANSFER_LINEAR;
basicDesc.texelBlockDimension = [ 0, 0, 0, 0 ];
basicDesc.bytesPlane = [
container.typeSize * channelCount, 0, 0, 0, 0, 0, 0, 0,
];
for ( let i = 0; i < channelCount; ++ i ) {
let channelType = KHR_DF_CHANNEL_MAP[ i ];
// Assign KHR_DF_SAMPLE_DATATYPE_LINEAR if the channel is linear _and_ differs from the transfer function.
if ( channelType === KHR_DF_CHANNEL_RGBSDA_ALPHA && basicDesc.transferFunction !== KHR_DF_TRANSFER_LINEAR ) {
channelType |= KHR_DF_SAMPLE_DATATYPE_LINEAR;
}
if ( texture.type === FloatType || texture.type === HalfFloatType ) {
channelType |= KHR_DF_SAMPLE_DATATYPE_FLOAT;
channelType |= KHR_DF_SAMPLE_DATATYPE_SIGNED;
}
basicDesc.samples.push( {
channelType: channelType,
bitOffset: i * array.BYTES_PER_ELEMENT * 8,
bitLength: array.BYTES_PER_ELEMENT * 8 - 1,
samplePosition: [ 0, 0, 0, 0 ],
sampleLower: KHR_DF_CHANNEL_SAMPLE_LOWER_UPPER[ texture.type ][ 0 ],
sampleUpper: KHR_DF_CHANNEL_SAMPLE_LOWER_UPPER[ texture.type ][ 1 ],
} );
}
//
container.levelCount = 1;
container.levels = [ {
levelData: new Uint8Array( array.buffer, array.byteOffset, array.byteLength ),
uncompressedByteLength: array.byteLength,
} ];
//
container.keyValue[ 'KTXwriter' ] = `three.js ${ REVISION }`;
//
return write( container, { keepWriter: true } );
}
}
async function toDataTexture( renderer, rtt ) {
const channelCount = getChannelCount( rtt.texture );
let view;
if ( renderer.isWebGLRenderer ) {
if ( rtt.texture.type === FloatType ) {
view = new Float32Array( rtt.width * rtt.height * channelCount );
} else if ( rtt.texture.type === HalfFloatType ) {
view = new Uint16Array( rtt.width * rtt.height * channelCount );
} else if ( rtt.texture.type === UnsignedByteType ) {
view = new Uint8Array( rtt.width * rtt.height * channelCount );
} else {
throw new Error( ERROR_TYPE );
}
await renderer.readRenderTargetPixelsAsync( rtt, 0, 0, rtt.width, rtt.height, view );
} else {
view = await renderer.readRenderTargetPixelsAsync( rtt, 0, 0, rtt.width, rtt.height );
}
const texture = new DataTexture( view, rtt.width, rtt.height, rtt.texture.format, rtt.texture.type );
texture.colorSpace = rtt.texture.colorSpace;
return texture;
}
function getChannelCount( texture ) {
switch ( texture.format ) {
case RGBAFormat:
return 4;
case RGFormat:
case RGIntegerFormat:
return 2;
case RedFormat:
case RedIntegerFormat:
return 1;
default:
throw new Error( ERROR_FORMAT );
}
}

View File

@@ -0,0 +1,308 @@
import {
Color,
ColorManagement,
Matrix3,
SRGBColorSpace,
Vector2,
Vector3
} from 'three';
/**
* An exporter for OBJ.
*
* `OBJExporter` is not able to export material data into MTL files so only geometry data are supported.
*
* ```js
* const exporter = new OBJExporter();
* const data = exporter.parse( scene );
* ```
*
* @three_import import { OBJExporter } from 'three/addons/exporters/OBJExporter.js';
*/
class OBJExporter {
/**
* Parses the given 3D object and generates the OBJ output.
*
* If the 3D object is composed of multiple children and geometry, they are merged into a single mesh in the file.
*
* @param {Object3D} object - The 3D object to export.
* @return {string} The exported OBJ.
*/
parse( object ) {
let output = '';
let indexVertex = 0;
let indexVertexUvs = 0;
let indexNormals = 0;
const vertex = new Vector3();
const color = new Color();
const normal = new Vector3();
const uv = new Vector2();
const face = [];
function parseMesh( mesh ) {
let nbVertex = 0;
let nbNormals = 0;
let nbVertexUvs = 0;
const geometry = mesh.geometry;
const normalMatrixWorld = new Matrix3();
// shortcuts
const vertices = geometry.getAttribute( 'position' );
const normals = geometry.getAttribute( 'normal' );
const uvs = geometry.getAttribute( 'uv' );
const indices = geometry.getIndex();
// name of the mesh object
output += 'o ' + mesh.name + '\n';
// name of the mesh material
if ( mesh.material && mesh.material.name ) {
output += 'usemtl ' + mesh.material.name + '\n';
}
// vertices
if ( vertices !== undefined ) {
for ( let i = 0, l = vertices.count; i < l; i ++, nbVertex ++ ) {
vertex.fromBufferAttribute( vertices, i );
// transform the vertex to world space
vertex.applyMatrix4( mesh.matrixWorld );
// transform the vertex to export format
output += 'v ' + vertex.x + ' ' + vertex.y + ' ' + vertex.z + '\n';
}
}
// uvs
if ( uvs !== undefined ) {
for ( let i = 0, l = uvs.count; i < l; i ++, nbVertexUvs ++ ) {
uv.fromBufferAttribute( uvs, i );
// transform the uv to export format
output += 'vt ' + uv.x + ' ' + uv.y + '\n';
}
}
// normals
if ( normals !== undefined ) {
normalMatrixWorld.getNormalMatrix( mesh.matrixWorld );
for ( let i = 0, l = normals.count; i < l; i ++, nbNormals ++ ) {
normal.fromBufferAttribute( normals, i );
// transform the normal to world space
normal.applyMatrix3( normalMatrixWorld ).normalize();
// transform the normal to export format
output += 'vn ' + normal.x + ' ' + normal.y + ' ' + normal.z + '\n';
}
}
// faces
if ( indices !== null ) {
for ( let i = 0, l = indices.count; i < l; i += 3 ) {
for ( let m = 0; m < 3; m ++ ) {
const j = indices.getX( i + m ) + 1;
face[ m ] = ( indexVertex + j ) + ( normals || uvs ? '/' + ( uvs ? ( indexVertexUvs + j ) : '' ) + ( normals ? '/' + ( indexNormals + j ) : '' ) : '' );
}
// transform the face to export format
output += 'f ' + face.join( ' ' ) + '\n';
}
} else {
for ( let i = 0, l = vertices.count; i < l; i += 3 ) {
for ( let m = 0; m < 3; m ++ ) {
const j = i + m + 1;
face[ m ] = ( indexVertex + j ) + ( normals || uvs ? '/' + ( uvs ? ( indexVertexUvs + j ) : '' ) + ( normals ? '/' + ( indexNormals + j ) : '' ) : '' );
}
// transform the face to export format
output += 'f ' + face.join( ' ' ) + '\n';
}
}
// update index
indexVertex += nbVertex;
indexVertexUvs += nbVertexUvs;
indexNormals += nbNormals;
}
function parseLine( line ) {
let nbVertex = 0;
const geometry = line.geometry;
const type = line.type;
// shortcuts
const vertices = geometry.getAttribute( 'position' );
// name of the line object
output += 'o ' + line.name + '\n';
if ( vertices !== undefined ) {
for ( let i = 0, l = vertices.count; i < l; i ++, nbVertex ++ ) {
vertex.fromBufferAttribute( vertices, i );
// transform the vertex to world space
vertex.applyMatrix4( line.matrixWorld );
// transform the vertex to export format
output += 'v ' + vertex.x + ' ' + vertex.y + ' ' + vertex.z + '\n';
}
}
if ( type === 'Line' ) {
output += 'l ';
for ( let j = 1, l = vertices.count; j <= l; j ++ ) {
output += ( indexVertex + j ) + ' ';
}
output += '\n';
}
if ( type === 'LineSegments' ) {
for ( let j = 1, k = j + 1, l = vertices.count; j < l; j += 2, k = j + 1 ) {
output += 'l ' + ( indexVertex + j ) + ' ' + ( indexVertex + k ) + '\n';
}
}
// update index
indexVertex += nbVertex;
}
function parsePoints( points ) {
let nbVertex = 0;
const geometry = points.geometry;
const vertices = geometry.getAttribute( 'position' );
const colors = geometry.getAttribute( 'color' );
output += 'o ' + points.name + '\n';
if ( vertices !== undefined ) {
for ( let i = 0, l = vertices.count; i < l; i ++, nbVertex ++ ) {
vertex.fromBufferAttribute( vertices, i );
vertex.applyMatrix4( points.matrixWorld );
output += 'v ' + vertex.x + ' ' + vertex.y + ' ' + vertex.z;
if ( colors !== undefined ) {
color.fromBufferAttribute( colors, i );
ColorManagement.workingToColorSpace( color, SRGBColorSpace );
output += ' ' + color.r + ' ' + color.g + ' ' + color.b;
}
output += '\n';
}
output += 'p ';
for ( let j = 1, l = vertices.count; j <= l; j ++ ) {
output += ( indexVertex + j ) + ' ';
}
output += '\n';
}
// update index
indexVertex += nbVertex;
}
object.traverse( function ( child ) {
if ( child.isMesh === true ) {
parseMesh( child );
}
if ( child.isLine === true ) {
parseLine( child );
}
if ( child.isPoints === true ) {
parsePoints( child );
}
} );
return output;
}
}
export { OBJExporter };

View File

@@ -0,0 +1,562 @@
import {
Matrix3,
Vector3,
Color,
ColorManagement,
SRGBColorSpace
} from 'three';
/**
* An exporter for PLY.
*
* PLY (Polygon or Stanford Triangle Format) is a file format for efficient delivery and
* loading of simple, static 3D content in a dense format. Both binary and ascii formats are
* supported. PLY can store vertex positions, colors, normals and uv coordinates. No textures
* or texture references are saved.
*
* ```js
* const exporter = new PLYExporter();
* const data = exporter.parse( scene, options );
* ```
*
* @three_import import { PLYExporter } from 'three/addons/exporters/PLYExporter.js';
*/
class PLYExporter {
/**
* Parses the given 3D object and generates the PLY output.
*
* If the 3D object is composed of multiple children and geometry, they are merged into a single mesh in the file.
*
* @param {Object3D} object - The 3D object to export.
* @param {PLYExporter~OnDone} onDone - A callback function that is executed when the export has finished.
* @param {PLYExporter~Options} options - The export options.
* @return {?(string|ArrayBuffer)} The exported PLY.
*/
parse( object, onDone, options = {} ) {
// reference https://github.com/gkjohnson/ply-exporter-js
// Iterate over the valid meshes in the object
function traverseMeshes( cb ) {
object.traverse( function ( child ) {
if ( child.isMesh === true || child.isPoints ) {
const mesh = child;
const geometry = mesh.geometry;
if ( geometry.hasAttribute( 'position' ) === true ) {
cb( mesh, geometry );
}
}
} );
}
// Default options
const defaultOptions = {
binary: false,
excludeAttributes: [], // normal, uv, color, index
littleEndian: false
};
options = Object.assign( defaultOptions, options );
const excludeAttributes = options.excludeAttributes;
let includeIndices = true;
let includeNormals = false;
let includeColors = false;
let includeUVs = false;
// count the vertices, check which properties are used,
// and cache the BufferGeometry
let vertexCount = 0;
let faceCount = 0;
object.traverse( function ( child ) {
if ( child.isMesh === true ) {
const mesh = child;
const geometry = mesh.geometry;
const vertices = geometry.getAttribute( 'position' );
const normals = geometry.getAttribute( 'normal' );
const uvs = geometry.getAttribute( 'uv' );
const colors = geometry.getAttribute( 'color' );
const indices = geometry.getIndex();
if ( vertices === undefined ) {
return;
}
vertexCount += vertices.count;
faceCount += indices ? indices.count / 3 : vertices.count / 3;
if ( normals !== undefined ) includeNormals = true;
if ( uvs !== undefined ) includeUVs = true;
if ( colors !== undefined ) includeColors = true;
} else if ( child.isPoints ) {
const mesh = child;
const geometry = mesh.geometry;
const vertices = geometry.getAttribute( 'position' );
const normals = geometry.getAttribute( 'normal' );
const colors = geometry.getAttribute( 'color' );
vertexCount += vertices.count;
if ( normals !== undefined ) includeNormals = true;
if ( colors !== undefined ) includeColors = true;
includeIndices = false;
}
} );
const tempColor = new Color();
includeIndices = includeIndices && excludeAttributes.indexOf( 'index' ) === - 1;
includeNormals = includeNormals && excludeAttributes.indexOf( 'normal' ) === - 1;
includeColors = includeColors && excludeAttributes.indexOf( 'color' ) === - 1;
includeUVs = includeUVs && excludeAttributes.indexOf( 'uv' ) === - 1;
if ( includeIndices && faceCount !== Math.floor( faceCount ) ) {
// point cloud meshes will not have an index array and may not have a
// number of vertices that is divisible by 3 (and therefore representable
// as triangles)
console.error(
'PLYExporter: Failed to generate a valid PLY file with triangle indices because the ' +
'number of indices is not divisible by 3.'
);
return null;
}
const indexByteCount = 4;
let header =
'ply\n' +
`format ${ options.binary ? ( options.littleEndian ? 'binary_little_endian' : 'binary_big_endian' ) : 'ascii' } 1.0\n` +
`element vertex ${vertexCount}\n` +
// position
'property float x\n' +
'property float y\n' +
'property float z\n';
if ( includeNormals === true ) {
// normal
header +=
'property float nx\n' +
'property float ny\n' +
'property float nz\n';
}
if ( includeUVs === true ) {
// uvs
header +=
'property float s\n' +
'property float t\n';
}
if ( includeColors === true ) {
// colors
header +=
'property uchar red\n' +
'property uchar green\n' +
'property uchar blue\n';
}
if ( includeIndices === true ) {
// faces
header +=
`element face ${faceCount}\n` +
'property list uchar int vertex_index\n';
}
header += 'end_header\n';
// Generate attribute data
const vertex = new Vector3();
const normalMatrixWorld = new Matrix3();
let result = null;
if ( options.binary === true ) {
// Binary File Generation
const headerBin = new TextEncoder().encode( header );
// 3 position values at 4 bytes
// 3 normal values at 4 bytes
// 3 color channels with 1 byte
// 2 uv values at 4 bytes
const vertexListLength = vertexCount * ( 4 * 3 + ( includeNormals ? 4 * 3 : 0 ) + ( includeColors ? 3 : 0 ) + ( includeUVs ? 4 * 2 : 0 ) );
// 1 byte shape descriptor
// 3 vertex indices at ${indexByteCount} bytes
const faceListLength = includeIndices ? faceCount * ( indexByteCount * 3 + 1 ) : 0;
const output = new DataView( new ArrayBuffer( headerBin.length + vertexListLength + faceListLength ) );
new Uint8Array( output.buffer ).set( headerBin, 0 );
let vOffset = headerBin.length;
let fOffset = headerBin.length + vertexListLength;
let writtenVertices = 0;
traverseMeshes( function ( mesh, geometry ) {
const vertices = geometry.getAttribute( 'position' );
const normals = geometry.getAttribute( 'normal' );
const uvs = geometry.getAttribute( 'uv' );
const colors = geometry.getAttribute( 'color' );
const indices = geometry.getIndex();
normalMatrixWorld.getNormalMatrix( mesh.matrixWorld );
for ( let i = 0, l = vertices.count; i < l; i ++ ) {
vertex.fromBufferAttribute( vertices, i );
vertex.applyMatrix4( mesh.matrixWorld );
// Position information
output.setFloat32( vOffset, vertex.x, options.littleEndian );
vOffset += 4;
output.setFloat32( vOffset, vertex.y, options.littleEndian );
vOffset += 4;
output.setFloat32( vOffset, vertex.z, options.littleEndian );
vOffset += 4;
// Normal information
if ( includeNormals === true ) {
if ( normals != null ) {
vertex.fromBufferAttribute( normals, i );
vertex.applyMatrix3( normalMatrixWorld ).normalize();
output.setFloat32( vOffset, vertex.x, options.littleEndian );
vOffset += 4;
output.setFloat32( vOffset, vertex.y, options.littleEndian );
vOffset += 4;
output.setFloat32( vOffset, vertex.z, options.littleEndian );
vOffset += 4;
} else {
output.setFloat32( vOffset, 0, options.littleEndian );
vOffset += 4;
output.setFloat32( vOffset, 0, options.littleEndian );
vOffset += 4;
output.setFloat32( vOffset, 0, options.littleEndian );
vOffset += 4;
}
}
// UV information
if ( includeUVs === true ) {
if ( uvs != null ) {
output.setFloat32( vOffset, uvs.getX( i ), options.littleEndian );
vOffset += 4;
output.setFloat32( vOffset, uvs.getY( i ), options.littleEndian );
vOffset += 4;
} else {
output.setFloat32( vOffset, 0, options.littleEndian );
vOffset += 4;
output.setFloat32( vOffset, 0, options.littleEndian );
vOffset += 4;
}
}
// Color information
if ( includeColors === true ) {
if ( colors != null ) {
tempColor.fromBufferAttribute( colors, i );
ColorManagement.workingToColorSpace( tempColor, SRGBColorSpace );
output.setUint8( vOffset, Math.floor( tempColor.r * 255 ) );
vOffset += 1;
output.setUint8( vOffset, Math.floor( tempColor.g * 255 ) );
vOffset += 1;
output.setUint8( vOffset, Math.floor( tempColor.b * 255 ) );
vOffset += 1;
} else {
output.setUint8( vOffset, 255 );
vOffset += 1;
output.setUint8( vOffset, 255 );
vOffset += 1;
output.setUint8( vOffset, 255 );
vOffset += 1;
}
}
}
if ( includeIndices === true ) {
// Create the face list
if ( indices !== null ) {
for ( let i = 0, l = indices.count; i < l; i += 3 ) {
output.setUint8( fOffset, 3 );
fOffset += 1;
output.setUint32( fOffset, indices.getX( i + 0 ) + writtenVertices, options.littleEndian );
fOffset += indexByteCount;
output.setUint32( fOffset, indices.getX( i + 1 ) + writtenVertices, options.littleEndian );
fOffset += indexByteCount;
output.setUint32( fOffset, indices.getX( i + 2 ) + writtenVertices, options.littleEndian );
fOffset += indexByteCount;
}
} else {
for ( let i = 0, l = vertices.count; i < l; i += 3 ) {
output.setUint8( fOffset, 3 );
fOffset += 1;
output.setUint32( fOffset, writtenVertices + i, options.littleEndian );
fOffset += indexByteCount;
output.setUint32( fOffset, writtenVertices + i + 1, options.littleEndian );
fOffset += indexByteCount;
output.setUint32( fOffset, writtenVertices + i + 2, options.littleEndian );
fOffset += indexByteCount;
}
}
}
// Save the amount of verts we've already written so we can offset
// the face index on the next mesh
writtenVertices += vertices.count;
} );
result = output.buffer;
} else {
// Ascii File Generation
// count the number of vertices
let writtenVertices = 0;
let vertexList = '';
let faceList = '';
traverseMeshes( function ( mesh, geometry ) {
const vertices = geometry.getAttribute( 'position' );
const normals = geometry.getAttribute( 'normal' );
const uvs = geometry.getAttribute( 'uv' );
const colors = geometry.getAttribute( 'color' );
const indices = geometry.getIndex();
normalMatrixWorld.getNormalMatrix( mesh.matrixWorld );
// form each line
for ( let i = 0, l = vertices.count; i < l; i ++ ) {
vertex.fromBufferAttribute( vertices, i );
vertex.applyMatrix4( mesh.matrixWorld );
// Position information
let line =
vertex.x + ' ' +
vertex.y + ' ' +
vertex.z;
// Normal information
if ( includeNormals === true ) {
if ( normals != null ) {
vertex.fromBufferAttribute( normals, i );
vertex.applyMatrix3( normalMatrixWorld ).normalize();
line += ' ' +
vertex.x + ' ' +
vertex.y + ' ' +
vertex.z;
} else {
line += ' 0 0 0';
}
}
// UV information
if ( includeUVs === true ) {
if ( uvs != null ) {
line += ' ' +
uvs.getX( i ) + ' ' +
uvs.getY( i );
} else {
line += ' 0 0';
}
}
// Color information
if ( includeColors === true ) {
if ( colors != null ) {
tempColor.fromBufferAttribute( colors, i );
ColorManagement.workingToColorSpace( tempColor, SRGBColorSpace );
line += ' ' +
Math.floor( tempColor.r * 255 ) + ' ' +
Math.floor( tempColor.g * 255 ) + ' ' +
Math.floor( tempColor.b * 255 );
} else {
line += ' 255 255 255';
}
}
vertexList += line + '\n';
}
// Create the face list
if ( includeIndices === true ) {
if ( indices !== null ) {
for ( let i = 0, l = indices.count; i < l; i += 3 ) {
faceList += `3 ${ indices.getX( i + 0 ) + writtenVertices }`;
faceList += ` ${ indices.getX( i + 1 ) + writtenVertices }`;
faceList += ` ${ indices.getX( i + 2 ) + writtenVertices }\n`;
}
} else {
for ( let i = 0, l = vertices.count; i < l; i += 3 ) {
faceList += `3 ${ writtenVertices + i } ${ writtenVertices + i + 1 } ${ writtenVertices + i + 2 }\n`;
}
}
faceCount += indices ? indices.count / 3 : vertices.count / 3;
}
writtenVertices += vertices.count;
} );
result = `${ header }${vertexList}${ includeIndices ? `${faceList}\n` : '\n' }`;
}
if ( typeof onDone === 'function' ) requestAnimationFrame( () => onDone( result ) );
return result;
}
}
/**
* Export options of `PLYExporter`.
*
* @typedef {Object} PLYExporter~Options
* @property {boolean} [binary=false] - Whether to export in binary format or ASCII.
* @property {Array<string>} [excludeAttributes] - Which properties to explicitly exclude from
* the exported PLY file. Valid values are `'color'`, `'normal'`, `'uv'`, and `'index'`. If triangle
* indices are excluded, then a point cloud is exported.
* @property {boolean} [littleEndian=false] - Whether the binary export uses little or big endian.
**/
/**
* onDone callback of `PLYExporter`.
*
* @callback PLYExporter~OnDone
* @param {string|ArrayBuffer} result - The generated PLY ascii or binary.
*/
export { PLYExporter };

View File

@@ -0,0 +1,221 @@
import { Vector3 } from 'three';
/**
* An exporter for STL.
*
* STL files describe only the surface geometry of a three-dimensional object without
* any representation of color, texture or other common model attributes. The STL format
* specifies both ASCII and binary representations, with binary being more compact.
* STL files contain no scale information or indexes, and the units are arbitrary.
*
* ```js
* const exporter = new STLExporter();
* const data = exporter.parse( mesh, { binary: true } );
* ```
*
* @three_import import { STLExporter } from 'three/addons/exporters/STLExporter.js';
*/
class STLExporter {
/**
* Parses the given 3D object and generates the STL output.
*
* If the 3D object is composed of multiple children and geometry, they are merged into a single mesh in the file.
*
* @param {Object3D} scene - A scene, mesh or any other 3D object containing meshes to encode.
* @param {STLExporter~Options} options - The export options.
* @return {string|ArrayBuffer} The exported STL.
*/
parse( scene, options = {} ) {
options = Object.assign( {
binary: false
}, options );
const binary = options.binary;
//
const objects = [];
let triangles = 0;
scene.traverse( function ( object ) {
if ( object.isMesh ) {
const geometry = object.geometry;
const index = geometry.index;
const positionAttribute = geometry.getAttribute( 'position' );
triangles += ( index !== null ) ? ( index.count / 3 ) : ( positionAttribute.count / 3 );
objects.push( {
object3d: object,
geometry: geometry
} );
}
} );
let output;
let offset = 80; // skip header
if ( binary === true ) {
const bufferLength = triangles * 2 + triangles * 3 * 4 * 4 + 80 + 4;
const arrayBuffer = new ArrayBuffer( bufferLength );
output = new DataView( arrayBuffer );
output.setUint32( offset, triangles, true ); offset += 4;
} else {
output = '';
output += 'solid exported\n';
}
const vA = new Vector3();
const vB = new Vector3();
const vC = new Vector3();
const cb = new Vector3();
const ab = new Vector3();
const normal = new Vector3();
for ( let i = 0, il = objects.length; i < il; i ++ ) {
const object = objects[ i ].object3d;
const geometry = objects[ i ].geometry;
const index = geometry.index;
const positionAttribute = geometry.getAttribute( 'position' );
if ( index !== null ) {
// indexed geometry
for ( let j = 0; j < index.count; j += 3 ) {
const a = index.getX( j + 0 );
const b = index.getX( j + 1 );
const c = index.getX( j + 2 );
writeFace( a, b, c, positionAttribute, object );
}
} else {
// non-indexed geometry
for ( let j = 0; j < positionAttribute.count; j += 3 ) {
const a = j + 0;
const b = j + 1;
const c = j + 2;
writeFace( a, b, c, positionAttribute, object );
}
}
}
if ( binary === false ) {
output += 'endsolid exported\n';
}
return output;
function writeFace( a, b, c, positionAttribute, object ) {
vA.fromBufferAttribute( positionAttribute, a );
vB.fromBufferAttribute( positionAttribute, b );
vC.fromBufferAttribute( positionAttribute, c );
if ( object.isSkinnedMesh === true ) {
object.applyBoneTransform( a, vA );
object.applyBoneTransform( b, vB );
object.applyBoneTransform( c, vC );
}
vA.applyMatrix4( object.matrixWorld );
vB.applyMatrix4( object.matrixWorld );
vC.applyMatrix4( object.matrixWorld );
writeNormal( vA, vB, vC );
writeVertex( vA );
writeVertex( vB );
writeVertex( vC );
if ( binary === true ) {
output.setUint16( offset, 0, true ); offset += 2;
} else {
output += '\t\tendloop\n';
output += '\tendfacet\n';
}
}
function writeNormal( vA, vB, vC ) {
cb.subVectors( vC, vB );
ab.subVectors( vA, vB );
cb.cross( ab ).normalize();
normal.copy( cb ).normalize();
if ( binary === true ) {
output.setFloat32( offset, normal.x, true ); offset += 4;
output.setFloat32( offset, normal.y, true ); offset += 4;
output.setFloat32( offset, normal.z, true ); offset += 4;
} else {
output += '\tfacet normal ' + normal.x + ' ' + normal.y + ' ' + normal.z + '\n';
output += '\t\touter loop\n';
}
}
function writeVertex( vertex ) {
if ( binary === true ) {
output.setFloat32( offset, vertex.x, true ); offset += 4;
output.setFloat32( offset, vertex.y, true ); offset += 4;
output.setFloat32( offset, vertex.z, true ); offset += 4;
} else {
output += '\t\t\tvertex ' + vertex.x + ' ' + vertex.y + ' ' + vertex.z + '\n';
}
}
}
}
/**
* Export options of `STLExporter`.
*
* @typedef {Object} STLExporter~Options
* @property {boolean} [binary=false] - Whether to export in binary format or ASCII.
**/
export { STLExporter };

1235
node_modules/three/examples/jsm/exporters/USDZExporter.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,92 @@
import {
BufferGeometry,
Float32BufferAttribute
} from 'three';
/**
* A special type of box geometry intended for {@link LineSegments}.
*
* ```js
* const geometry = new THREE.BoxLineGeometry();
* const material = new THREE.LineBasicMaterial( { color: 0x00ff00 } );
* const lines = new THREE.LineSegments( geometry, material );
* scene.add( lines );
* ```
*
* @augments BufferGeometry
* @three_import import { BoxLineGeometry } from 'three/addons/geometries/BoxLineGeometry.js';
*/
class BoxLineGeometry extends BufferGeometry {
/**
* Constructs a new box line geometry.
*
* @param {number} [width=1] - The width. That is, the length of the edges parallel to the X axis.
* @param {number} [height=1] - The height. That is, the length of the edges parallel to the Y axis.
* @param {number} [depth=1] - The depth. That is, the length of the edges parallel to the Z axis.
* @param {number} [widthSegments=1] - Number of segmented rectangular sections along the width of the sides.
* @param {number} [heightSegments=1] - Number of segmented rectangular sections along the height of the sides.
* @param {number} [depthSegments=1] - Number of segmented rectangular sections along the depth of the sides.
*/
constructor( width = 1, height = 1, depth = 1, widthSegments = 1, heightSegments = 1, depthSegments = 1 ) {
super();
widthSegments = Math.floor( widthSegments );
heightSegments = Math.floor( heightSegments );
depthSegments = Math.floor( depthSegments );
const widthHalf = width / 2;
const heightHalf = height / 2;
const depthHalf = depth / 2;
const segmentWidth = width / widthSegments;
const segmentHeight = height / heightSegments;
const segmentDepth = depth / depthSegments;
const vertices = [];
let x = - widthHalf;
let y = - heightHalf;
let z = - depthHalf;
for ( let i = 0; i <= widthSegments; i ++ ) {
vertices.push( x, - heightHalf, - depthHalf, x, heightHalf, - depthHalf );
vertices.push( x, heightHalf, - depthHalf, x, heightHalf, depthHalf );
vertices.push( x, heightHalf, depthHalf, x, - heightHalf, depthHalf );
vertices.push( x, - heightHalf, depthHalf, x, - heightHalf, - depthHalf );
x += segmentWidth;
}
for ( let i = 0; i <= heightSegments; i ++ ) {
vertices.push( - widthHalf, y, - depthHalf, widthHalf, y, - depthHalf );
vertices.push( widthHalf, y, - depthHalf, widthHalf, y, depthHalf );
vertices.push( widthHalf, y, depthHalf, - widthHalf, y, depthHalf );
vertices.push( - widthHalf, y, depthHalf, - widthHalf, y, - depthHalf );
y += segmentHeight;
}
for ( let i = 0; i <= depthSegments; i ++ ) {
vertices.push( - widthHalf, - heightHalf, z, - widthHalf, heightHalf, z );
vertices.push( - widthHalf, heightHalf, z, widthHalf, heightHalf, z );
vertices.push( widthHalf, heightHalf, z, widthHalf, - heightHalf, z );
vertices.push( widthHalf, - heightHalf, z, - widthHalf, - heightHalf, z );
z += segmentDepth;
}
this.setAttribute( 'position', new Float32BufferAttribute( vertices, 3 ) );
}
}
export { BoxLineGeometry };

View File

@@ -0,0 +1,72 @@
import {
BufferGeometry,
Float32BufferAttribute
} from 'three';
import { ConvexHull } from '../math/ConvexHull.js';
/**
* This class can be used to generate a convex hull for a given array of 3D points.
* The average time complexity for this task is considered to be O(nlog(n)).
*
* ```js
* const geometry = new ConvexGeometry( points );
* const material = new THREE.MeshBasicMaterial( { color: 0x00ff00 } );
* const mesh = new THREE.Mesh( geometry, material );
* scene.add( mesh );
* ```
*
* @augments BufferGeometry
* @three_import import { ConvexGeometry } from 'three/addons/geometries/ConvexGeometry.js';
*/
class ConvexGeometry extends BufferGeometry {
/**
* Constructs a new convex geometry.
*
* @param {Array<Vector3>} points - An array of points in 3D space which should be enclosed by the convex hull.
*/
constructor( points = [] ) {
super();
// buffers
const vertices = [];
const normals = [];
const convexHull = new ConvexHull().setFromPoints( points );
// generate vertices and normals
const faces = convexHull.faces;
for ( let i = 0; i < faces.length; i ++ ) {
const face = faces[ i ];
let edge = face.edge;
// we move along a doubly-connected edge list to access all face points (see HalfEdge docs)
do {
const point = edge.head().point;
vertices.push( point.x, point.y, point.z );
normals.push( face.normal.x, face.normal.y, face.normal.z );
edge = edge.next;
} while ( edge !== face.edge );
}
// build geometry
this.setAttribute( 'position', new Float32BufferAttribute( vertices, 3 ) );
this.setAttribute( 'normal', new Float32BufferAttribute( normals, 3 ) );
}
}
export { ConvexGeometry };

View File

@@ -0,0 +1,420 @@
import {
BufferGeometry,
Euler,
Float32BufferAttribute,
Matrix3,
Matrix4,
Mesh,
Vector3
} from 'three';
/**
* This class can be used to create a decal mesh that serves different kinds of purposes e.g.
* adding unique details to models, performing dynamic visual environmental changes or covering seams.
*
* Please not that decal projections can be distorted when used around corners. More information at
* this GitHub issue: [Decal projections without distortions](https://github.com/mrdoob/three.js/issues/21187).
*
* Reference: [How to project decals](http://blog.wolfire.com/2009/06/how-to-project-decals/)
*
* ```js
* const geometry = new DecalGeometry( mesh, position, orientation, size );
* const material = new THREE.MeshBasicMaterial( { color: 0x00ff00 } );
* const mesh = new THREE.Mesh( geometry, material );
* scene.add( mesh );
* ```
*
* @augments BufferGeometry
* @three_import import { DecalGeometry } from 'three/addons/geometries/DecalGeometry.js';
*/
class DecalGeometry extends BufferGeometry {
/**
* Constructs a new decal geometry.
*
* @param {Mesh} [mesh] - The base mesh the decal should be projected on.
* @param {Vector3} [position] - The position of the decal projector.
* @param {Euler} [orientation] - The orientation of the decal projector.
* @param {Vector3} [size] - The scale of the decal projector.
*/
constructor( mesh = new Mesh(), position = new Vector3(), orientation = new Euler(), size = new Vector3( 1, 1, 1 ) ) {
super();
// buffers
const vertices = [];
const normals = [];
const uvs = [];
// helpers
const plane = new Vector3();
const normalMatrix = new Matrix3().getNormalMatrix( mesh.matrixWorld );
// this matrix represents the transformation of the decal projector
const projectorMatrix = new Matrix4();
projectorMatrix.makeRotationFromEuler( orientation );
projectorMatrix.setPosition( position );
const projectorMatrixInverse = new Matrix4();
projectorMatrixInverse.copy( projectorMatrix ).invert();
// generate buffers
generate();
// build geometry
this.setAttribute( 'position', new Float32BufferAttribute( vertices, 3 ) );
this.setAttribute( 'uv', new Float32BufferAttribute( uvs, 2 ) );
if ( normals.length > 0 ) {
this.setAttribute( 'normal', new Float32BufferAttribute( normals, 3 ) );
}
//
function generate() {
let decalVertices = [];
const vertex = new Vector3();
const normal = new Vector3();
// handle different geometry types
const geometry = mesh.geometry;
const positionAttribute = geometry.attributes.position;
const normalAttribute = geometry.attributes.normal;
// first, create an array of 'DecalVertex' objects
// three consecutive 'DecalVertex' objects represent a single face
//
// this data structure will be later used to perform the clipping
if ( geometry.index !== null ) {
// indexed BufferGeometry
const index = geometry.index;
for ( let i = 0; i < index.count; i ++ ) {
vertex.fromBufferAttribute( positionAttribute, index.getX( i ) );
if ( normalAttribute ) {
normal.fromBufferAttribute( normalAttribute, index.getX( i ) );
pushDecalVertex( decalVertices, vertex, normal );
} else {
pushDecalVertex( decalVertices, vertex );
}
}
} else {
if ( positionAttribute === undefined ) return; // empty geometry
// non-indexed BufferGeometry
for ( let i = 0; i < positionAttribute.count; i ++ ) {
vertex.fromBufferAttribute( positionAttribute, i );
if ( normalAttribute ) {
normal.fromBufferAttribute( normalAttribute, i );
pushDecalVertex( decalVertices, vertex, normal );
} else {
pushDecalVertex( decalVertices, vertex );
}
}
}
// second, clip the geometry so that it doesn't extend out from the projector
decalVertices = clipGeometry( decalVertices, plane.set( 1, 0, 0 ) );
decalVertices = clipGeometry( decalVertices, plane.set( - 1, 0, 0 ) );
decalVertices = clipGeometry( decalVertices, plane.set( 0, 1, 0 ) );
decalVertices = clipGeometry( decalVertices, plane.set( 0, - 1, 0 ) );
decalVertices = clipGeometry( decalVertices, plane.set( 0, 0, 1 ) );
decalVertices = clipGeometry( decalVertices, plane.set( 0, 0, - 1 ) );
// third, generate final vertices, normals and uvs
for ( let i = 0; i < decalVertices.length; i ++ ) {
const decalVertex = decalVertices[ i ];
// create texture coordinates (we are still in projector space)
uvs.push(
0.5 + ( decalVertex.position.x / size.x ),
0.5 + ( decalVertex.position.y / size.y )
);
// transform the vertex back to world space
decalVertex.position.applyMatrix4( projectorMatrix );
// now create vertex and normal buffer data
vertices.push( decalVertex.position.x, decalVertex.position.y, decalVertex.position.z );
if ( decalVertex.normal !== null ) {
normals.push( decalVertex.normal.x, decalVertex.normal.y, decalVertex.normal.z );
}
}
}
function pushDecalVertex( decalVertices, vertex, normal = null ) {
// transform the vertex to world space, then to projector space
vertex.applyMatrix4( mesh.matrixWorld );
vertex.applyMatrix4( projectorMatrixInverse );
if ( normal ) {
normal.applyNormalMatrix( normalMatrix );
decalVertices.push( new DecalVertex( vertex.clone(), normal.clone() ) );
} else {
decalVertices.push( new DecalVertex( vertex.clone() ) );
}
}
function clipGeometry( inVertices, plane ) {
const outVertices = [];
const s = 0.5 * Math.abs( size.dot( plane ) );
// a single iteration clips one face,
// which consists of three consecutive 'DecalVertex' objects
for ( let i = 0; i < inVertices.length; i += 3 ) {
let total = 0;
let nV1;
let nV2;
let nV3;
let nV4;
const d1 = inVertices[ i + 0 ].position.dot( plane ) - s;
const d2 = inVertices[ i + 1 ].position.dot( plane ) - s;
const d3 = inVertices[ i + 2 ].position.dot( plane ) - s;
const v1Out = d1 > 0;
const v2Out = d2 > 0;
const v3Out = d3 > 0;
// calculate, how many vertices of the face lie outside of the clipping plane
total = ( v1Out ? 1 : 0 ) + ( v2Out ? 1 : 0 ) + ( v3Out ? 1 : 0 );
switch ( total ) {
case 0: {
// the entire face lies inside of the plane, no clipping needed
outVertices.push( inVertices[ i ] );
outVertices.push( inVertices[ i + 1 ] );
outVertices.push( inVertices[ i + 2 ] );
break;
}
case 1: {
// one vertex lies outside of the plane, perform clipping
if ( v1Out ) {
nV1 = inVertices[ i + 1 ];
nV2 = inVertices[ i + 2 ];
nV3 = clip( inVertices[ i ], nV1, plane, s );
nV4 = clip( inVertices[ i ], nV2, plane, s );
}
if ( v2Out ) {
nV1 = inVertices[ i ];
nV2 = inVertices[ i + 2 ];
nV3 = clip( inVertices[ i + 1 ], nV1, plane, s );
nV4 = clip( inVertices[ i + 1 ], nV2, plane, s );
outVertices.push( nV3 );
outVertices.push( nV2.clone() );
outVertices.push( nV1.clone() );
outVertices.push( nV2.clone() );
outVertices.push( nV3.clone() );
outVertices.push( nV4 );
break;
}
if ( v3Out ) {
nV1 = inVertices[ i ];
nV2 = inVertices[ i + 1 ];
nV3 = clip( inVertices[ i + 2 ], nV1, plane, s );
nV4 = clip( inVertices[ i + 2 ], nV2, plane, s );
}
outVertices.push( nV1.clone() );
outVertices.push( nV2.clone() );
outVertices.push( nV3 );
outVertices.push( nV4 );
outVertices.push( nV3.clone() );
outVertices.push( nV2.clone() );
break;
}
case 2: {
// two vertices lies outside of the plane, perform clipping
if ( ! v1Out ) {
nV1 = inVertices[ i ].clone();
nV2 = clip( nV1, inVertices[ i + 1 ], plane, s );
nV3 = clip( nV1, inVertices[ i + 2 ], plane, s );
outVertices.push( nV1 );
outVertices.push( nV2 );
outVertices.push( nV3 );
}
if ( ! v2Out ) {
nV1 = inVertices[ i + 1 ].clone();
nV2 = clip( nV1, inVertices[ i + 2 ], plane, s );
nV3 = clip( nV1, inVertices[ i ], plane, s );
outVertices.push( nV1 );
outVertices.push( nV2 );
outVertices.push( nV3 );
}
if ( ! v3Out ) {
nV1 = inVertices[ i + 2 ].clone();
nV2 = clip( nV1, inVertices[ i ], plane, s );
nV3 = clip( nV1, inVertices[ i + 1 ], plane, s );
outVertices.push( nV1 );
outVertices.push( nV2 );
outVertices.push( nV3 );
}
break;
}
case 3: {
// the entire face lies outside of the plane, so let's discard the corresponding vertices
break;
}
}
}
return outVertices;
}
function clip( v0, v1, p, s ) {
const d0 = v0.position.dot( p ) - s;
const d1 = v1.position.dot( p ) - s;
const s0 = d0 / ( d0 - d1 );
const position = new Vector3(
v0.position.x + s0 * ( v1.position.x - v0.position.x ),
v0.position.y + s0 * ( v1.position.y - v0.position.y ),
v0.position.z + s0 * ( v1.position.z - v0.position.z )
);
let normal = null;
if ( v0.normal !== null && v1.normal !== null ) {
normal = new Vector3(
v0.normal.x + s0 * ( v1.normal.x - v0.normal.x ),
v0.normal.y + s0 * ( v1.normal.y - v0.normal.y ),
v0.normal.z + s0 * ( v1.normal.z - v0.normal.z )
);
}
const v = new DecalVertex( position, normal );
// need to clip more values (texture coordinates)? do it this way:
// intersectpoint.value = a.value + s * ( b.value - a.value );
return v;
}
}
}
// helper
class DecalVertex {
constructor( position, normal = null ) {
this.position = position;
this.normal = normal;
}
clone() {
const position = this.position.clone();
const normal = ( this.normal !== null ) ? this.normal.clone() : null;
return new this.constructor( position, normal );
}
}
export { DecalGeometry, DecalVertex };

View File

@@ -0,0 +1,100 @@
/**
* @module ParametricFunctions
* @three_import import * as ParametricFunctions from 'three/addons/geometries/ParametricFunctions.js';
*/
/**
* A parametric function representing the Klein bottle.
*
* @param {number} v - The `v` coordinate on the surface in the range `[0,1]`.
* @param {number} u - The `u` coordinate on the surface in the range `[0,1]`.
* @param {Vector3} target - The target vector that is used to store the method's result.
*/
function klein( v, u, target ) {
u *= Math.PI;
v *= 2 * Math.PI;
u = u * 2;
let x, z;
if ( u < Math.PI ) {
x = 3 * Math.cos( u ) * ( 1 + Math.sin( u ) ) + ( 2 * ( 1 - Math.cos( u ) / 2 ) ) * Math.cos( u ) * Math.cos( v );
z = - 8 * Math.sin( u ) - 2 * ( 1 - Math.cos( u ) / 2 ) * Math.sin( u ) * Math.cos( v );
} else {
x = 3 * Math.cos( u ) * ( 1 + Math.sin( u ) ) + ( 2 * ( 1 - Math.cos( u ) / 2 ) ) * Math.cos( v + Math.PI );
z = - 8 * Math.sin( u );
}
const y = - 2 * ( 1 - Math.cos( u ) / 2 ) * Math.sin( v );
target.set( x, y, z );
}
/**
* A parametric function representing a flat plane.
*
* @param {number} u - The `u` coordinate on the surface in the range `[0,1]`.
* @param {number} v - The `v` coordinate on the surface in the range `[0,1]`.
* @param {Vector3} target - The target vector that is used to store the method's result.
*/
function plane( u, v, target ) {
target.set( u, 0, v );
}
/**
* A parametric function representing a flat mobius strip.
*
* @param {number} u - The `u` coordinate on the surface in the range `[0,1]`.
* @param {number} t - The `v` coordinate on the surface in the range `[0,1]`.
* @param {Vector3} target - The target vector that is used to store the method's result.
*/
function mobius( u, t, target ) {
// http://www.wolframalpha.com/input/?i=M%C3%B6bius+strip+parametric+equations&lk=1&a=ClashPrefs_*Surface.MoebiusStrip.SurfaceProperty.ParametricEquations-
u = u - 0.5;
const v = 2 * Math.PI * t;
const a = 2;
const x = Math.cos( v ) * ( a + u * Math.cos( v / 2 ) );
const y = Math.sin( v ) * ( a + u * Math.cos( v / 2 ) );
const z = u * Math.sin( v / 2 );
target.set( x, y, z );
}
/**
* A parametric function representing a volumetric mobius strip.
*
* @param {number} u - The `u` coordinate on the surface in the range `[0,1]`.
* @param {number} t - The `v` coordinate on the surface in the range `[0,1]`.
* @param {Vector3} target - The target vector that is used to store the method's result.
*/
function mobius3d( u, t, target ) {
u *= Math.PI;
t *= 2 * Math.PI;
u = u * 2;
const phi = u / 2;
const major = 2.25, a = 0.125, b = 0.65;
let x = a * Math.cos( t ) * Math.cos( phi ) - b * Math.sin( t ) * Math.sin( phi );
const z = a * Math.cos( t ) * Math.sin( phi ) + b * Math.sin( t ) * Math.cos( phi );
const y = ( major + x ) * Math.sin( u );
x = ( major + x ) * Math.cos( u );
target.set( x, y, z );
}
export { klein, plane, mobius, mobius3d };

View File

@@ -0,0 +1,172 @@
import {
BufferGeometry,
Float32BufferAttribute,
Vector3
} from 'three';
/**
* This class can be used to generate a geometry based on a parametric surface.
*
* Reference: [Mesh Generation with Python](https://prideout.net/blog/old/blog/index.html@p=44.html)
*
* ```js
* const geometry = new THREE.ParametricGeometry( klein, 25, 25 );
* const material = new THREE.MeshBasicMaterial( { color: 0x00ff00 } );
* const klein = new THREE.Mesh( geometry, material );
* scene.add( klein );
* ```
*
* @augments BufferGeometry
* @three_import import { ParametricGeometry } from 'three/addons/geometries/ParametricGeometry.js';
*/
class ParametricGeometry extends BufferGeometry {
/**
* Constructs a new parametric geometry.
*
* @param {ParametricGeometry~Func} func - The parametric function. Default is a function that generates a curved plane surface.
* @param {number} [slices=8] - The number of slices to use for the parametric function.
* @param {number} [stacks=8] - The stacks of slices to use for the parametric function.
*/
constructor( func = ( u, v, target ) => target.set( u, v, Math.cos( u ) * Math.sin( v ) ), slices = 8, stacks = 8 ) {
super();
this.type = 'ParametricGeometry';
/**
* Holds the constructor parameters that have been
* used to generate the geometry. Any modification
* after instantiation does not change the geometry.
*
* @type {Object}
*/
this.parameters = {
func: func,
slices: slices,
stacks: stacks
};
// buffers
const indices = [];
const vertices = [];
const normals = [];
const uvs = [];
const EPS = 0.00001;
const normal = new Vector3();
const p0 = new Vector3(), p1 = new Vector3();
const pu = new Vector3(), pv = new Vector3();
// generate vertices, normals and uvs
const sliceCount = slices + 1;
for ( let i = 0; i <= stacks; i ++ ) {
const v = i / stacks;
for ( let j = 0; j <= slices; j ++ ) {
const u = j / slices;
// vertex
func( u, v, p0 );
vertices.push( p0.x, p0.y, p0.z );
// normal
// approximate tangent vectors via finite differences
if ( u - EPS >= 0 ) {
func( u - EPS, v, p1 );
pu.subVectors( p0, p1 );
} else {
func( u + EPS, v, p1 );
pu.subVectors( p1, p0 );
}
if ( v - EPS >= 0 ) {
func( u, v - EPS, p1 );
pv.subVectors( p0, p1 );
} else {
func( u, v + EPS, p1 );
pv.subVectors( p1, p0 );
}
// cross product of tangent vectors returns surface normal
normal.crossVectors( pu, pv ).normalize();
normals.push( normal.x, normal.y, normal.z );
// uv
uvs.push( u, v );
}
}
// generate indices
for ( let i = 0; i < stacks; i ++ ) {
for ( let j = 0; j < slices; j ++ ) {
const a = i * sliceCount + j;
const b = i * sliceCount + j + 1;
const c = ( i + 1 ) * sliceCount + j + 1;
const d = ( i + 1 ) * sliceCount + j;
// faces one and two
indices.push( a, b, d );
indices.push( b, c, d );
}
}
// build geometry
this.setIndex( indices );
this.setAttribute( 'position', new Float32BufferAttribute( vertices, 3 ) );
this.setAttribute( 'normal', new Float32BufferAttribute( normals, 3 ) );
this.setAttribute( 'uv', new Float32BufferAttribute( uvs, 2 ) );
}
copy( source ) {
super.copy( source );
this.parameters = Object.assign( {}, source.parameters );
return this;
}
}
/**
* Parametric function definition of `ParametricGeometry`.
*
* @callback ParametricGeometry~Func
* @param {number} u - The `u` coordinate on the surface in the range `[0,1]`.
* @param {number} v - The `v` coordinate on the surface in the range `[0,1]`.
* @param {Vector3} target - The target vector that is used to store the method's result.
*/
export { ParametricGeometry };

View File

@@ -0,0 +1,216 @@
import {
BoxGeometry,
Vector3
} from 'three';
const _tempNormal = new Vector3();
function getUv( faceDirVector, normal, uvAxis, projectionAxis, radius, sideLength ) {
const totArcLength = 2 * Math.PI * radius / 4;
// length of the planes between the arcs on each axis
const centerLength = Math.max( sideLength - 2 * radius, 0 );
const halfArc = Math.PI / 4;
// Get the vector projected onto the Y plane
_tempNormal.copy( normal );
_tempNormal[ projectionAxis ] = 0;
_tempNormal.normalize();
// total amount of UV space alloted to a single arc
const arcUvRatio = 0.5 * totArcLength / ( totArcLength + centerLength );
// the distance along one arc the point is at
const arcAngleRatio = 1.0 - ( _tempNormal.angleTo( faceDirVector ) / halfArc );
if ( Math.sign( _tempNormal[ uvAxis ] ) === 1 ) {
return arcAngleRatio * arcUvRatio;
} else {
// total amount of UV space alloted to the plane between the arcs
const lenUv = centerLength / ( totArcLength + centerLength );
return lenUv + arcUvRatio + arcUvRatio * ( 1.0 - arcAngleRatio );
}
}
/**
* A special type of box geometry with rounded corners and edges.
*
* ```js
* const geometry = new THREE.RoundedBoxGeometry();
* const material = new THREE.MeshStandardMaterial( { color: 0x00ff00 } );
* const cube = new THREE.Mesh( geometry, material );
* scene.add( cube );
* ```
*
* @augments BoxGeometry
* @three_import import { RoundedBoxGeometry } from 'three/addons/geometries/RoundedBoxGeometry.js';
*/
class RoundedBoxGeometry extends BoxGeometry {
/**
* Constructs a new rounded box geometry.
*
* @param {number} [width=1] - The width. That is, the length of the edges parallel to the X axis.
* @param {number} [height=1] - The height. That is, the length of the edges parallel to the Y axis.
* @param {number} [depth=1] - The depth. That is, the length of the edges parallel to the Z axis.
* @param {number} [segments=2] - Number of segments that form the rounded corners.
* @param {number} [radius=0.1] - The radius of the rounded corners.
*/
constructor( width = 1, height = 1, depth = 1, segments = 2, radius = 0.1 ) {
// calculate total segments needed &
// ensure it's odd so that we have a plane connecting the rounded corners
const totalSegments = segments * 2 + 1;
// ensure radius isn't bigger than shortest side
radius = Math.min( width / 2, height / 2, depth / 2, radius );
// start with a unit box geometry, its vertices will be modified to form the rounded box
super( 1, 1, 1, totalSegments, totalSegments, totalSegments );
this.type = 'RoundedBoxGeometry';
/**
* Holds the constructor parameters that have been
* used to generate the geometry. Any modification
* after instantiation does not change the geometry.
*
* @type {Object}
*/
this.parameters = {
width: width,
height: height,
depth: depth,
segments: segments,
radius: radius,
};
// if totalSegments is 1, no rounding is needed - return regular box
if ( totalSegments === 1 ) return;
const geometry2 = this.toNonIndexed();
this.index = null;
this.attributes.position = geometry2.attributes.position;
this.attributes.normal = geometry2.attributes.normal;
this.attributes.uv = geometry2.attributes.uv;
//
const position = new Vector3();
const normal = new Vector3();
const box = new Vector3( width, height, depth ).divideScalar( 2 ).subScalar( radius );
const positions = this.attributes.position.array;
const normals = this.attributes.normal.array;
const uvs = this.attributes.uv.array;
const faceTris = positions.length / 6;
const faceDirVector = new Vector3();
const halfSegmentSize = 0.5 / totalSegments;
for ( let i = 0, j = 0; i < positions.length; i += 3, j += 2 ) {
position.fromArray( positions, i );
normal.copy( position );
normal.x -= Math.sign( normal.x ) * halfSegmentSize;
normal.y -= Math.sign( normal.y ) * halfSegmentSize;
normal.z -= Math.sign( normal.z ) * halfSegmentSize;
normal.normalize();
positions[ i + 0 ] = box.x * Math.sign( position.x ) + normal.x * radius;
positions[ i + 1 ] = box.y * Math.sign( position.y ) + normal.y * radius;
positions[ i + 2 ] = box.z * Math.sign( position.z ) + normal.z * radius;
normals[ i + 0 ] = normal.x;
normals[ i + 1 ] = normal.y;
normals[ i + 2 ] = normal.z;
const side = Math.floor( i / faceTris );
switch ( side ) {
case 0: // right
// generate UVs along Z then Y
faceDirVector.set( 1, 0, 0 );
uvs[ j + 0 ] = getUv( faceDirVector, normal, 'z', 'y', radius, depth );
uvs[ j + 1 ] = 1.0 - getUv( faceDirVector, normal, 'y', 'z', radius, height );
break;
case 1: // left
// generate UVs along Z then Y
faceDirVector.set( - 1, 0, 0 );
uvs[ j + 0 ] = 1.0 - getUv( faceDirVector, normal, 'z', 'y', radius, depth );
uvs[ j + 1 ] = 1.0 - getUv( faceDirVector, normal, 'y', 'z', radius, height );
break;
case 2: // top
// generate UVs along X then Z
faceDirVector.set( 0, 1, 0 );
uvs[ j + 0 ] = 1.0 - getUv( faceDirVector, normal, 'x', 'z', radius, width );
uvs[ j + 1 ] = getUv( faceDirVector, normal, 'z', 'x', radius, depth );
break;
case 3: // bottom
// generate UVs along X then Z
faceDirVector.set( 0, - 1, 0 );
uvs[ j + 0 ] = 1.0 - getUv( faceDirVector, normal, 'x', 'z', radius, width );
uvs[ j + 1 ] = 1.0 - getUv( faceDirVector, normal, 'z', 'x', radius, depth );
break;
case 4: // front
// generate UVs along X then Y
faceDirVector.set( 0, 0, 1 );
uvs[ j + 0 ] = 1.0 - getUv( faceDirVector, normal, 'x', 'y', radius, width );
uvs[ j + 1 ] = 1.0 - getUv( faceDirVector, normal, 'y', 'x', radius, height );
break;
case 5: // back
// generate UVs along X then Y
faceDirVector.set( 0, 0, - 1 );
uvs[ j + 0 ] = getUv( faceDirVector, normal, 'x', 'y', radius, width );
uvs[ j + 1 ] = 1.0 - getUv( faceDirVector, normal, 'y', 'x', radius, height );
break;
}
}
}
/**
* Factory method for creating an instance of this class from the given
* JSON object.
*
* @param {Object} data - A JSON object representing the serialized geometry.
* @returns {RoundedBoxGeometry} A new instance.
*/
static fromJSON( data ) {
return new RoundedBoxGeometry(
data.width,
data.height,
data.depth,
data.segments,
data.radius
);
}
}
export { RoundedBoxGeometry };

View File

@@ -0,0 +1,689 @@
import {
BufferAttribute,
BufferGeometry,
Matrix4,
Vector3,
Vector4
} from 'three';
/**
* Tessellates the famous Utah teapot database by Martin Newell into triangles.
*
* The teapot should normally be rendered as a double sided object, since for some
* patches both sides can be seen, e.g., the gap around the lid and inside the spout.
*
* Segments 'n' determines the number of triangles output. Total triangles = 32*2*n*n - 8*n
* (degenerates at the top and bottom cusps are deleted).
*
* Code based on [SPD software](http://tog.acm.org/resources/SPD/)
* Created for the Udacity course [Interactive Rendering](http://bit.ly/ericity)
*
* ```js
* const geometry = new TeapotGeometry( 50, 18 );
* const material = new THREE.MeshBasicMaterial( { color: 0x00ff00 } );
* const teapot = new THREE.Mesh( geometry, material );
* scene.add( teapot );
* ```
*
* @augments BufferGeometry
* @three_import import { TeapotGeometry } from 'three/addons/geometries/TeapotGeometry.js';
*/
class TeapotGeometry extends BufferGeometry {
/**
* Constructs a new teapot geometry.
*
* @param {number} [size=50] - Relative scale of the teapot.
* @param {number} [segments=10] - Number of line segments to subdivide each patch edge.
* @param {boolean} [bottom=true] - Whether the bottom of the teapot is generated or not.
* @param {boolean} [lid=true] - Whether the lid is generated or not.
* @param {boolean} [body=true] - Whether the body is generated or not.
* @param {boolean} [fitLid=true] - Whether the lid is slightly stretched to prevent gaps between the body and lid or not.
* @param {boolean} [blinn=true] - Whether the teapot is scaled vertically for better aesthetics or not.
*/
constructor( size = 50, segments = 10, bottom = true, lid = true, body = true, fitLid = true, blinn = true ) {
// 32 * 4 * 4 Bezier spline patches
const teapotPatches = [
/*rim*/
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
3, 16, 17, 18, 7, 19, 20, 21, 11, 22, 23, 24, 15, 25, 26, 27,
18, 28, 29, 30, 21, 31, 32, 33, 24, 34, 35, 36, 27, 37, 38, 39,
30, 40, 41, 0, 33, 42, 43, 4, 36, 44, 45, 8, 39, 46, 47, 12,
/*body*/
12, 13, 14, 15, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59,
15, 25, 26, 27, 51, 60, 61, 62, 55, 63, 64, 65, 59, 66, 67, 68,
27, 37, 38, 39, 62, 69, 70, 71, 65, 72, 73, 74, 68, 75, 76, 77,
39, 46, 47, 12, 71, 78, 79, 48, 74, 80, 81, 52, 77, 82, 83, 56,
56, 57, 58, 59, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95,
59, 66, 67, 68, 87, 96, 97, 98, 91, 99, 100, 101, 95, 102, 103, 104,
68, 75, 76, 77, 98, 105, 106, 107, 101, 108, 109, 110, 104, 111, 112, 113,
77, 82, 83, 56, 107, 114, 115, 84, 110, 116, 117, 88, 113, 118, 119, 92,
/*handle*/
120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135,
123, 136, 137, 120, 127, 138, 139, 124, 131, 140, 141, 128, 135, 142, 143, 132,
132, 133, 134, 135, 144, 145, 146, 147, 148, 149, 150, 151, 68, 152, 153, 154,
135, 142, 143, 132, 147, 155, 156, 144, 151, 157, 158, 148, 154, 159, 160, 68,
/*spout*/
161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176,
164, 177, 178, 161, 168, 179, 180, 165, 172, 181, 182, 169, 176, 183, 184, 173,
173, 174, 175, 176, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196,
176, 183, 184, 173, 188, 197, 198, 185, 192, 199, 200, 189, 196, 201, 202, 193,
/*lid*/
203, 203, 203, 203, 204, 205, 206, 207, 208, 208, 208, 208, 209, 210, 211, 212,
203, 203, 203, 203, 207, 213, 214, 215, 208, 208, 208, 208, 212, 216, 217, 218,
203, 203, 203, 203, 215, 219, 220, 221, 208, 208, 208, 208, 218, 222, 223, 224,
203, 203, 203, 203, 221, 225, 226, 204, 208, 208, 208, 208, 224, 227, 228, 209,
209, 210, 211, 212, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240,
212, 216, 217, 218, 232, 241, 242, 243, 236, 244, 245, 246, 240, 247, 248, 249,
218, 222, 223, 224, 243, 250, 251, 252, 246, 253, 254, 255, 249, 256, 257, 258,
224, 227, 228, 209, 252, 259, 260, 229, 255, 261, 262, 233, 258, 263, 264, 237,
/*bottom*/
265, 265, 265, 265, 266, 267, 268, 269, 270, 271, 272, 273, 92, 119, 118, 113,
265, 265, 265, 265, 269, 274, 275, 276, 273, 277, 278, 279, 113, 112, 111, 104,
265, 265, 265, 265, 276, 280, 281, 282, 279, 283, 284, 285, 104, 103, 102, 95,
265, 265, 265, 265, 282, 286, 287, 266, 285, 288, 289, 270, 95, 94, 93, 92
];
const teapotVertices = [
1.4, 0, 2.4,
1.4, - 0.784, 2.4,
0.784, - 1.4, 2.4,
0, - 1.4, 2.4,
1.3375, 0, 2.53125,
1.3375, - 0.749, 2.53125,
0.749, - 1.3375, 2.53125,
0, - 1.3375, 2.53125,
1.4375, 0, 2.53125,
1.4375, - 0.805, 2.53125,
0.805, - 1.4375, 2.53125,
0, - 1.4375, 2.53125,
1.5, 0, 2.4,
1.5, - 0.84, 2.4,
0.84, - 1.5, 2.4,
0, - 1.5, 2.4,
- 0.784, - 1.4, 2.4,
- 1.4, - 0.784, 2.4,
- 1.4, 0, 2.4,
- 0.749, - 1.3375, 2.53125,
- 1.3375, - 0.749, 2.53125,
- 1.3375, 0, 2.53125,
- 0.805, - 1.4375, 2.53125,
- 1.4375, - 0.805, 2.53125,
- 1.4375, 0, 2.53125,
- 0.84, - 1.5, 2.4,
- 1.5, - 0.84, 2.4,
- 1.5, 0, 2.4,
- 1.4, 0.784, 2.4,
- 0.784, 1.4, 2.4,
0, 1.4, 2.4,
- 1.3375, 0.749, 2.53125,
- 0.749, 1.3375, 2.53125,
0, 1.3375, 2.53125,
- 1.4375, 0.805, 2.53125,
- 0.805, 1.4375, 2.53125,
0, 1.4375, 2.53125,
- 1.5, 0.84, 2.4,
- 0.84, 1.5, 2.4,
0, 1.5, 2.4,
0.784, 1.4, 2.4,
1.4, 0.784, 2.4,
0.749, 1.3375, 2.53125,
1.3375, 0.749, 2.53125,
0.805, 1.4375, 2.53125,
1.4375, 0.805, 2.53125,
0.84, 1.5, 2.4,
1.5, 0.84, 2.4,
1.75, 0, 1.875,
1.75, - 0.98, 1.875,
0.98, - 1.75, 1.875,
0, - 1.75, 1.875,
2, 0, 1.35,
2, - 1.12, 1.35,
1.12, - 2, 1.35,
0, - 2, 1.35,
2, 0, 0.9,
2, - 1.12, 0.9,
1.12, - 2, 0.9,
0, - 2, 0.9,
- 0.98, - 1.75, 1.875,
- 1.75, - 0.98, 1.875,
- 1.75, 0, 1.875,
- 1.12, - 2, 1.35,
- 2, - 1.12, 1.35,
- 2, 0, 1.35,
- 1.12, - 2, 0.9,
- 2, - 1.12, 0.9,
- 2, 0, 0.9,
- 1.75, 0.98, 1.875,
- 0.98, 1.75, 1.875,
0, 1.75, 1.875,
- 2, 1.12, 1.35,
- 1.12, 2, 1.35,
0, 2, 1.35,
- 2, 1.12, 0.9,
- 1.12, 2, 0.9,
0, 2, 0.9,
0.98, 1.75, 1.875,
1.75, 0.98, 1.875,
1.12, 2, 1.35,
2, 1.12, 1.35,
1.12, 2, 0.9,
2, 1.12, 0.9,
2, 0, 0.45,
2, - 1.12, 0.45,
1.12, - 2, 0.45,
0, - 2, 0.45,
1.5, 0, 0.225,
1.5, - 0.84, 0.225,
0.84, - 1.5, 0.225,
0, - 1.5, 0.225,
1.5, 0, 0.15,
1.5, - 0.84, 0.15,
0.84, - 1.5, 0.15,
0, - 1.5, 0.15,
- 1.12, - 2, 0.45,
- 2, - 1.12, 0.45,
- 2, 0, 0.45,
- 0.84, - 1.5, 0.225,
- 1.5, - 0.84, 0.225,
- 1.5, 0, 0.225,
- 0.84, - 1.5, 0.15,
- 1.5, - 0.84, 0.15,
- 1.5, 0, 0.15,
- 2, 1.12, 0.45,
- 1.12, 2, 0.45,
0, 2, 0.45,
- 1.5, 0.84, 0.225,
- 0.84, 1.5, 0.225,
0, 1.5, 0.225,
- 1.5, 0.84, 0.15,
- 0.84, 1.5, 0.15,
0, 1.5, 0.15,
1.12, 2, 0.45,
2, 1.12, 0.45,
0.84, 1.5, 0.225,
1.5, 0.84, 0.225,
0.84, 1.5, 0.15,
1.5, 0.84, 0.15,
- 1.6, 0, 2.025,
- 1.6, - 0.3, 2.025,
- 1.5, - 0.3, 2.25,
- 1.5, 0, 2.25,
- 2.3, 0, 2.025,
- 2.3, - 0.3, 2.025,
- 2.5, - 0.3, 2.25,
- 2.5, 0, 2.25,
- 2.7, 0, 2.025,
- 2.7, - 0.3, 2.025,
- 3, - 0.3, 2.25,
- 3, 0, 2.25,
- 2.7, 0, 1.8,
- 2.7, - 0.3, 1.8,
- 3, - 0.3, 1.8,
- 3, 0, 1.8,
- 1.5, 0.3, 2.25,
- 1.6, 0.3, 2.025,
- 2.5, 0.3, 2.25,
- 2.3, 0.3, 2.025,
- 3, 0.3, 2.25,
- 2.7, 0.3, 2.025,
- 3, 0.3, 1.8,
- 2.7, 0.3, 1.8,
- 2.7, 0, 1.575,
- 2.7, - 0.3, 1.575,
- 3, - 0.3, 1.35,
- 3, 0, 1.35,
- 2.5, 0, 1.125,
- 2.5, - 0.3, 1.125,
- 2.65, - 0.3, 0.9375,
- 2.65, 0, 0.9375,
- 2, - 0.3, 0.9,
- 1.9, - 0.3, 0.6,
- 1.9, 0, 0.6,
- 3, 0.3, 1.35,
- 2.7, 0.3, 1.575,
- 2.65, 0.3, 0.9375,
- 2.5, 0.3, 1.125,
- 1.9, 0.3, 0.6,
- 2, 0.3, 0.9,
1.7, 0, 1.425,
1.7, - 0.66, 1.425,
1.7, - 0.66, 0.6,
1.7, 0, 0.6,
2.6, 0, 1.425,
2.6, - 0.66, 1.425,
3.1, - 0.66, 0.825,
3.1, 0, 0.825,
2.3, 0, 2.1,
2.3, - 0.25, 2.1,
2.4, - 0.25, 2.025,
2.4, 0, 2.025,
2.7, 0, 2.4,
2.7, - 0.25, 2.4,
3.3, - 0.25, 2.4,
3.3, 0, 2.4,
1.7, 0.66, 0.6,
1.7, 0.66, 1.425,
3.1, 0.66, 0.825,
2.6, 0.66, 1.425,
2.4, 0.25, 2.025,
2.3, 0.25, 2.1,
3.3, 0.25, 2.4,
2.7, 0.25, 2.4,
2.8, 0, 2.475,
2.8, - 0.25, 2.475,
3.525, - 0.25, 2.49375,
3.525, 0, 2.49375,
2.9, 0, 2.475,
2.9, - 0.15, 2.475,
3.45, - 0.15, 2.5125,
3.45, 0, 2.5125,
2.8, 0, 2.4,
2.8, - 0.15, 2.4,
3.2, - 0.15, 2.4,
3.2, 0, 2.4,
3.525, 0.25, 2.49375,
2.8, 0.25, 2.475,
3.45, 0.15, 2.5125,
2.9, 0.15, 2.475,
3.2, 0.15, 2.4,
2.8, 0.15, 2.4,
0, 0, 3.15,
0.8, 0, 3.15,
0.8, - 0.45, 3.15,
0.45, - 0.8, 3.15,
0, - 0.8, 3.15,
0, 0, 2.85,
0.2, 0, 2.7,
0.2, - 0.112, 2.7,
0.112, - 0.2, 2.7,
0, - 0.2, 2.7,
- 0.45, - 0.8, 3.15,
- 0.8, - 0.45, 3.15,
- 0.8, 0, 3.15,
- 0.112, - 0.2, 2.7,
- 0.2, - 0.112, 2.7,
- 0.2, 0, 2.7,
- 0.8, 0.45, 3.15,
- 0.45, 0.8, 3.15,
0, 0.8, 3.15,
- 0.2, 0.112, 2.7,
- 0.112, 0.2, 2.7,
0, 0.2, 2.7,
0.45, 0.8, 3.15,
0.8, 0.45, 3.15,
0.112, 0.2, 2.7,
0.2, 0.112, 2.7,
0.4, 0, 2.55,
0.4, - 0.224, 2.55,
0.224, - 0.4, 2.55,
0, - 0.4, 2.55,
1.3, 0, 2.55,
1.3, - 0.728, 2.55,
0.728, - 1.3, 2.55,
0, - 1.3, 2.55,
1.3, 0, 2.4,
1.3, - 0.728, 2.4,
0.728, - 1.3, 2.4,
0, - 1.3, 2.4,
- 0.224, - 0.4, 2.55,
- 0.4, - 0.224, 2.55,
- 0.4, 0, 2.55,
- 0.728, - 1.3, 2.55,
- 1.3, - 0.728, 2.55,
- 1.3, 0, 2.55,
- 0.728, - 1.3, 2.4,
- 1.3, - 0.728, 2.4,
- 1.3, 0, 2.4,
- 0.4, 0.224, 2.55,
- 0.224, 0.4, 2.55,
0, 0.4, 2.55,
- 1.3, 0.728, 2.55,
- 0.728, 1.3, 2.55,
0, 1.3, 2.55,
- 1.3, 0.728, 2.4,
- 0.728, 1.3, 2.4,
0, 1.3, 2.4,
0.224, 0.4, 2.55,
0.4, 0.224, 2.55,
0.728, 1.3, 2.55,
1.3, 0.728, 2.55,
0.728, 1.3, 2.4,
1.3, 0.728, 2.4,
0, 0, 0,
1.425, 0, 0,
1.425, 0.798, 0,
0.798, 1.425, 0,
0, 1.425, 0,
1.5, 0, 0.075,
1.5, 0.84, 0.075,
0.84, 1.5, 0.075,
0, 1.5, 0.075,
- 0.798, 1.425, 0,
- 1.425, 0.798, 0,
- 1.425, 0, 0,
- 0.84, 1.5, 0.075,
- 1.5, 0.84, 0.075,
- 1.5, 0, 0.075,
- 1.425, - 0.798, 0,
- 0.798, - 1.425, 0,
0, - 1.425, 0,
- 1.5, - 0.84, 0.075,
- 0.84, - 1.5, 0.075,
0, - 1.5, 0.075,
0.798, - 1.425, 0,
1.425, - 0.798, 0,
0.84, - 1.5, 0.075,
1.5, - 0.84, 0.075
];
super();
// number of segments per patch
segments = Math.max( 2, Math.floor( segments ) );
// Jim Blinn scaled the teapot down in size by about 1.3 for
// some rendering tests. He liked the new proportions that he kept
// the data in this form. The model was distributed with these new
// proportions and became the norm. Trivia: comparing images of the
// real teapot and the computer model, the ratio for the bowl of the
// real teapot is more like 1.25, but since 1.3 is the traditional
// value given, we use it here.
const blinnScale = 1.3;
// scale the size to be the real scaling factor
const maxHeight = 3.15 * ( blinn ? 1 : blinnScale );
const maxHeight2 = maxHeight / 2;
const trueSize = size / maxHeight2;
// Number of elements depends on what is needed. Subtract degenerate
// triangles at tip of bottom and lid out in advance.
let numTriangles = bottom ? ( 8 * segments - 4 ) * segments : 0;
numTriangles += lid ? ( 16 * segments - 4 ) * segments : 0;
numTriangles += body ? 40 * segments * segments : 0;
const indices = new Uint32Array( numTriangles * 3 );
let numVertices = bottom ? 4 : 0;
numVertices += lid ? 8 : 0;
numVertices += body ? 20 : 0;
numVertices *= ( segments + 1 ) * ( segments + 1 );
const vertices = new Float32Array( numVertices * 3 );
const normals = new Float32Array( numVertices * 3 );
const uvs = new Float32Array( numVertices * 2 );
// Bezier form
const ms = new Matrix4();
ms.set(
- 1.0, 3.0, - 3.0, 1.0,
3.0, - 6.0, 3.0, 0.0,
- 3.0, 3.0, 0.0, 0.0,
1.0, 0.0, 0.0, 0.0 );
const g = [];
const sp = [];
const tp = [];
const dsp = [];
const dtp = [];
// M * G * M matrix, sort of see
// http://www.cs.helsinki.fi/group/goa/mallinnus/curves/surfaces.html
const mgm = [];
const vert = [];
const sdir = [];
const tdir = [];
const norm = new Vector3();
let tcoord;
let sval;
let tval;
let p;
let dsval = 0;
let dtval = 0;
const normOut = new Vector3();
const gmx = new Matrix4();
const tmtx = new Matrix4();
const vsp = new Vector4();
const vtp = new Vector4();
const vdsp = new Vector4();
const vdtp = new Vector4();
const vsdir = new Vector3();
const vtdir = new Vector3();
const mst = ms.clone();
mst.transpose();
// internal function: test if triangle has any matching vertices;
// if so, don't save triangle, since it won't display anything.
const notDegenerate = ( vtx1, vtx2, vtx3 ) => // if any vertex matches, return false
! ( ( ( vertices[ vtx1 * 3 ] === vertices[ vtx2 * 3 ] ) &&
( vertices[ vtx1 * 3 + 1 ] === vertices[ vtx2 * 3 + 1 ] ) &&
( vertices[ vtx1 * 3 + 2 ] === vertices[ vtx2 * 3 + 2 ] ) ) ||
( ( vertices[ vtx1 * 3 ] === vertices[ vtx3 * 3 ] ) &&
( vertices[ vtx1 * 3 + 1 ] === vertices[ vtx3 * 3 + 1 ] ) &&
( vertices[ vtx1 * 3 + 2 ] === vertices[ vtx3 * 3 + 2 ] ) ) || ( vertices[ vtx2 * 3 ] === vertices[ vtx3 * 3 ] ) &&
( vertices[ vtx2 * 3 + 1 ] === vertices[ vtx3 * 3 + 1 ] ) &&
( vertices[ vtx2 * 3 + 2 ] === vertices[ vtx3 * 3 + 2 ] ) );
for ( let i = 0; i < 3; i ++ ) {
mgm[ i ] = new Matrix4();
}
const minPatches = body ? 0 : 20;
const maxPatches = bottom ? 32 : 28;
const vertPerRow = segments + 1;
let surfCount = 0;
let vertCount = 0;
let normCount = 0;
let uvCount = 0;
let indexCount = 0;
for ( let surf = minPatches; surf < maxPatches; surf ++ ) {
// lid is in the middle of the data, patches 20-27,
// so ignore it for this part of the loop if the lid is not desired
if ( lid || ( surf < 20 || surf >= 28 ) ) {
// get M * G * M matrix for x,y,z
for ( let i = 0; i < 3; i ++ ) {
// get control patches
for ( let r = 0; r < 4; r ++ ) {
for ( let c = 0; c < 4; c ++ ) {
// transposed
g[ c * 4 + r ] = teapotVertices[ teapotPatches[ surf * 16 + r * 4 + c ] * 3 + i ];
// is the lid to be made larger, and is this a point on the lid
// that is X or Y?
if ( fitLid && ( surf >= 20 && surf < 28 ) && ( i !== 2 ) ) {
// increase XY size by 7.7%, found empirically. I don't
// increase Z so that the teapot will continue to fit in the
// space -1 to 1 for Y (Y is up for the final model).
g[ c * 4 + r ] *= 1.077;
}
// Blinn "fixed" the teapot by dividing Z by blinnScale, and that's the
// data we now use. The original teapot is taller. Fix it:
if ( ! blinn && ( i === 2 ) ) {
g[ c * 4 + r ] *= blinnScale;
}
}
}
gmx.set( g[ 0 ], g[ 1 ], g[ 2 ], g[ 3 ], g[ 4 ], g[ 5 ], g[ 6 ], g[ 7 ], g[ 8 ], g[ 9 ], g[ 10 ], g[ 11 ], g[ 12 ], g[ 13 ], g[ 14 ], g[ 15 ] );
tmtx.multiplyMatrices( gmx, ms );
mgm[ i ].multiplyMatrices( mst, tmtx );
}
// step along, get points, and output
for ( let sstep = 0; sstep <= segments; sstep ++ ) {
const s = sstep / segments;
for ( let tstep = 0; tstep <= segments; tstep ++ ) {
const t = tstep / segments;
// point from basis
// get power vectors and their derivatives
for ( p = 4, sval = tval = 1.0; p --; ) {
sp[ p ] = sval;
tp[ p ] = tval;
sval *= s;
tval *= t;
if ( p === 3 ) {
dsp[ p ] = dtp[ p ] = 0.0;
dsval = dtval = 1.0;
} else {
dsp[ p ] = dsval * ( 3 - p );
dtp[ p ] = dtval * ( 3 - p );
dsval *= s;
dtval *= t;
}
}
vsp.fromArray( sp );
vtp.fromArray( tp );
vdsp.fromArray( dsp );
vdtp.fromArray( dtp );
// do for x,y,z
for ( let i = 0; i < 3; i ++ ) {
// multiply power vectors times matrix to get value
tcoord = vsp.clone();
tcoord.applyMatrix4( mgm[ i ] );
vert[ i ] = tcoord.dot( vtp );
// get s and t tangent vectors
tcoord = vdsp.clone();
tcoord.applyMatrix4( mgm[ i ] );
sdir[ i ] = tcoord.dot( vtp );
tcoord = vsp.clone();
tcoord.applyMatrix4( mgm[ i ] );
tdir[ i ] = tcoord.dot( vdtp );
}
// find normal
vsdir.fromArray( sdir );
vtdir.fromArray( tdir );
norm.crossVectors( vtdir, vsdir );
norm.normalize();
// if X and Z length is 0, at the cusp, so point the normal up or down, depending on patch number
if ( vert[ 0 ] === 0 && vert[ 1 ] === 0 ) {
// if above the middle of the teapot, normal points up, else down
normOut.set( 0, vert[ 2 ] > maxHeight2 ? 1 : - 1, 0 );
} else {
// standard output: rotate on X axis
normOut.set( norm.x, norm.z, - norm.y );
}
// store it all
vertices[ vertCount ++ ] = trueSize * vert[ 0 ];
vertices[ vertCount ++ ] = trueSize * ( vert[ 2 ] - maxHeight2 );
vertices[ vertCount ++ ] = - trueSize * vert[ 1 ];
normals[ normCount ++ ] = normOut.x;
normals[ normCount ++ ] = normOut.y;
normals[ normCount ++ ] = normOut.z;
uvs[ uvCount ++ ] = 1 - t;
uvs[ uvCount ++ ] = 1 - s;
}
}
// save the faces
for ( let sstep = 0; sstep < segments; sstep ++ ) {
for ( let tstep = 0; tstep < segments; tstep ++ ) {
const v1 = surfCount * vertPerRow * vertPerRow + sstep * vertPerRow + tstep;
const v2 = v1 + 1;
const v3 = v2 + vertPerRow;
const v4 = v1 + vertPerRow;
// Normals and UVs cannot be shared. Without clone(), you can see the consequences
// of sharing if you call geometry.applyMatrix4( matrix ).
if ( notDegenerate( v1, v2, v3 ) ) {
indices[ indexCount ++ ] = v1;
indices[ indexCount ++ ] = v2;
indices[ indexCount ++ ] = v3;
}
if ( notDegenerate( v1, v3, v4 ) ) {
indices[ indexCount ++ ] = v1;
indices[ indexCount ++ ] = v3;
indices[ indexCount ++ ] = v4;
}
}
}
// increment only if a surface was used
surfCount ++;
}
}
this.setIndex( new BufferAttribute( indices, 1 ) );
this.setAttribute( 'position', new BufferAttribute( vertices, 3 ) );
this.setAttribute( 'normal', new BufferAttribute( normals, 3 ) );
this.setAttribute( 'uv', new BufferAttribute( uvs, 2 ) );
this.computeBoundingSphere();
}
}
export { TeapotGeometry };

View File

@@ -0,0 +1,84 @@
import {
ExtrudeGeometry
} from 'three';
/**
* A class for generating text as a single geometry. It is constructed by providing a string of text, and a set of
* parameters consisting of a loaded font and extrude settings.
*
* See the {@link FontLoader} page for additional details.
*
* `TextGeometry` uses [typeface.json](http://gero3.github.io/facetype.js/) generated fonts.
* Some existing fonts can be found located in `/examples/fonts`.
*
* ```js
* const loader = new FontLoader();
* const font = await loader.loadAsync( 'fonts/helvetiker_regular.typeface.json' );
* const geometry = new TextGeometry( 'Hello three.js!', {
* font: font,
* size: 80,
* depth: 5,
* curveSegments: 12
* } );
* ```
*
* @augments ExtrudeGeometry
* @three_import import { TextGeometry } from 'three/addons/geometries/TextGeometry.js';
*/
class TextGeometry extends ExtrudeGeometry {
/**
* Constructs a new text geometry.
*
* @param {string} text - The text that should be transformed into a geometry.
* @param {TextGeometry~Options} [parameters] - The text settings.
*/
constructor( text, parameters = {} ) {
const font = parameters.font;
if ( font === undefined ) {
super(); // generate default extrude geometry
} else {
const shapes = font.generateShapes( text, parameters.size, parameters.direction );
// defaults
if ( parameters.depth === undefined ) parameters.depth = 50;
if ( parameters.bevelThickness === undefined ) parameters.bevelThickness = 10;
if ( parameters.bevelSize === undefined ) parameters.bevelSize = 8;
if ( parameters.bevelEnabled === undefined ) parameters.bevelEnabled = false;
super( shapes, parameters );
}
this.type = 'TextGeometry';
}
}
/**
* Represents the `options` type of the geometry's constructor.
*
* @typedef {Object} TextGeometry~Options
* @property {Font} [font] - The font.
* @property {number} [size=100] - The text size.
* @property {number} [depth=50] - Depth to extrude the shape.
* @property {number} [curveSegments=12] - Number of points on the curves.
* @property {number} [steps=1] - Number of points used for subdividing segments along the depth of the extruded spline.
* @property {boolean} [bevelEnabled=false] - Whether to beveling to the shape or not.
* @property {number} [bevelThickness=10] - How deep into the original shape the bevel goes.
* @property {number} [bevelSize=8] - Distance from the shape outline that the bevel extends.
* @property {number} [bevelOffset=0] - Distance from the shape outline that the bevel starts.
* @property {number} [bevelSegments=3] - Number of bevel layers.
* @property {string} [direction='ltr'] - Char direction: ltr(left to right), rtl(right to left) & tb(top bottom).
* @property {?Curve} [extrudePath=null] - A 3D spline path along which the shape should be extruded. Bevels not supported for path extrusion.
* @property {Object} [UVGenerator] - An object that provides UV generator functions for custom UV generation.
**/
export { TextGeometry };

715
node_modules/three/examples/jsm/gpgpu/BitonicSort.js generated vendored Normal file
View File

@@ -0,0 +1,715 @@
import { Fn, uvec2, If, instancedArray, instanceIndex, invocationLocalIndex, Loop, workgroupArray, workgroupBarrier, workgroupId, uint, select, min, max } from 'three/tsl';
const StepType = {
NONE: 0,
// Swap all values within the local range of workgroupSize * 2
SWAP_LOCAL: 1,
DISPERSE_LOCAL: 2,
// Swap values within global data buffer.
FLIP_GLOBAL: 3,
DISPERSE_GLOBAL: 4,
};
/**
* Returns the indices that will be compared in a bitonic flip operation.
*
* @tsl
* @private
* @param {Node<uint>} index - The compute thread's invocation id.
* @param {Node<uint>} blockHeight - The height of the block within which elements are being swapped.
* @returns {Node<uvec2>} The indices of the elements in the data buffer being compared.
*/
export const getBitonicFlipIndices = /*@__PURE__*/ Fn( ( [ index, blockHeight ] ) => {
const blockOffset = ( index.mul( 2 ).div( blockHeight ) ).mul( blockHeight );
const halfHeight = blockHeight.div( 2 );
const idx = uvec2(
index.mod( halfHeight ),
blockHeight.sub( index.mod( halfHeight ) ).sub( 1 )
);
idx.x.addAssign( blockOffset );
idx.y.addAssign( blockOffset );
return idx;
} ).setLayout( {
name: 'getBitonicFlipIndices',
type: 'uvec2',
inputs: [
{ name: 'index', type: 'uint' },
{ name: 'blockHeight', type: 'uint' }
]
} );
/**
* Returns the indices that will be compared in a bitonic sort's disperse operation.
*
* @tsl
* @private
* @param {Node<uint>} index - The compute thread's invocation id.
* @param {Node<uint>} swapSpan - The maximum span over which elements are being swapped.
* @returns {Node<uvec2>} The indices of the elements in the data buffer being compared.
*/
export const getBitonicDisperseIndices = /*@__PURE__*/ Fn( ( [ index, swapSpan ] ) => {
const blockOffset = ( ( index.mul( 2 ) ).div( swapSpan ) ).mul( swapSpan );
const halfHeight = swapSpan.div( 2 );
const idx = uvec2(
index.mod( halfHeight ),
( index.mod( halfHeight ) ).add( halfHeight )
);
idx.x.addAssign( blockOffset );
idx.y.addAssign( blockOffset );
return idx;
} ).setLayout( {
name: 'getBitonicDisperseIndices',
type: 'uvec2',
inputs: [
{ name: 'index', type: 'uint' },
{ name: 'blockHeight', type: 'uint' }
]
} );
export class BitonicSort {
/**
* Constructs a new light probe helper.
*
* @param {Renderer} renderer - The current scene's renderer.
* @param {StorageBufferNode} dataBuffer - The data buffer to sort.
* @param {Object} [options={}] - Options that modify the bitonic sort.
*/
constructor( renderer, dataBuffer, options = {} ) {
/**
* A reference to the renderer.
*
* @type {Renderer}
*/
this.renderer = renderer;
/**
* A reference to the StorageBufferNode holding the data that will be sorted .
*
* @type {StorageBufferNode}
*/
this.dataBuffer = dataBuffer;
/**
* The size of the data.
*
* @type {StorageBufferNode}
*/
this.count = dataBuffer.value.count;
/**
*
* The size of each compute dispatch.
* @type {number}
*/
this.dispatchSize = this.count / 2;
/**
* The workgroup size of the compute shaders executed during the sort.
*
* @type {StorageBufferNode}
*/
this.workgroupSize = options.workgroupSize ? Math.min( this.dispatchSize, options.workgroupSize ) : Math.min( this.dispatchSize, 64 );
/**
* A node representing a workgroup scoped buffer that holds locally sorted elements.
*
* @type {WorkgroupInfoNode}
*/
this.localStorage = workgroupArray( dataBuffer.nodeType, this.workgroupSize * 2 );
this._tempArray = new Uint32Array( this.count );
for ( let i = 0; i < this.count; i ++ ) {
this._tempArray[ i ] = 0;
}
/**
* A node representing a storage buffer used for transferring the result of the global sort back to the original data buffer.
*
* @type {StorageBufferNode}
*/
this.tempBuffer = instancedArray( this.count, dataBuffer.nodeType ).setName( 'TempStorage' );
/**
* A node containing the current algorithm type, the current swap span, and the highest swap span.
*
* @type {StorageBufferNode}
*/
this.infoStorage = instancedArray( new Uint32Array( [ 1, 2, 2 ] ), 'uint' ).setName( 'BitonicSortInfo' );
/**
* The number of distinct swap operations ('flips' and 'disperses') executed in an in-place
* bitonic sort of the current data buffer.
*
* @type {number}
*/
this.swapOpCount = this._getSwapOpCount();
/**
* The number of steps (i.e prepping and/or executing a swap) needed to fully execute an in-place bitonic sort of the current data buffer.
*
* @type {number}
*/
this.stepCount = this._getStepCount();
/**
* The number of the buffer being read from.
*
* @type {string}
*/
this.readBufferName = 'Data';
/**
* An object containing compute shaders that execute a 'flip' swap within a global address space on elements in the data buffer.
*
* @type {Object<string, ComputeNode>}
*/
this.flipGlobalNodes = {
'Data': this._getFlipGlobal( this.dataBuffer, this.tempBuffer ),
'Temp': this._getFlipGlobal( this.tempBuffer, this.dataBuffer )
};
/**
* An object containing compute shaders that execute a 'disperse' swap within a global address space on elements in the data buffer.
*
* @type {Object<string, ComputeNode>}
*/
this.disperseGlobalNodes = {
'Data': this._getDisperseGlobal( this.dataBuffer, this.tempBuffer ),
'Temp': this._getDisperseGlobal( this.tempBuffer, this.dataBuffer )
};
/**
* A compute shader that executes a sequence of flip and disperse swaps within a local address space on elements in the data buffer.
*
* @type {ComputeNode}
*/
this.swapLocalFn = this._getSwapLocal();
/**
* A compute shader that executes a sequence of disperse swaps within a local address space on elements in the data buffer.
*
* @type {Object<string, ComputeNode>}
*/
this.disperseLocalNodes = {
'Data': this._getDisperseLocal( this.dataBuffer ),
'Temp': this._getDisperseLocal( this.tempBuffer ),
};
// Utility functions
/**
* A compute shader that sets up the algorithm and the swap span for the next swap operation.
*
* @type {ComputeNode}
*/
this.setAlgoFn = this._getSetAlgoFn();
/**
* A compute shader that aligns the result of the global swap operation with the current buffer.
*
* @type {ComputeNode}
*/
this.alignFn = this._getAlignFn();
/**
* A compute shader that resets the algorithm and swap span information.
*
* @type {ComputeNode}
*/
this.resetFn = this._getResetFn();
/**
* The current compute shader dispatch within the list of dispatches needed to complete the sort.
*
* @type {number}
*/
this.currentDispatch = 0;
/**
* The number of global swap operations that must be executed before the sort
* can swap in local address space.
*
* @type {number}
*/
this.globalOpsRemaining = 0;
/**
* The total number of global operations needed to sort elements within the current swap span.
*
* @type {number}
*/
this.globalOpsInSpan = 0;
}
/**
* Get total number of distinct swaps that occur in a bitonic sort.
*
* @private
* @returns {number} - The total number of distinct swaps in a bitonic sort
*/
_getSwapOpCount() {
const n = Math.log2( this.count );
return ( n * ( n + 1 ) ) / 2;
}
/**
* Get the number of steps it takes to execute a complete bitonic sort.
*
* @private
* @returns {number} The number of steps it takes to execute a complete bitonic sort.
*/
_getStepCount() {
const logElements = Math.log2( this.count );
const logSwapSpan = Math.log2( this.workgroupSize * 2 );
const numGlobalFlips = logElements - logSwapSpan;
// Start with 1 for initial sort over all local elements
let numSteps = 1;
let numGlobalDisperses = 0;
for ( let i = 1; i <= numGlobalFlips; i ++ ) {
// Increment by the global flip that starts each global block
numSteps += 1;
// Increment by number of global disperses following the global flip
numSteps += numGlobalDisperses;
// Increment by local disperse that occurs after all global swaps are finished
numSteps += 1;
// Number of global disperse increases as swapSpan increases by factor of 2
numGlobalDisperses += 1;
}
return numSteps;
}
/**
* Compares and swaps two data points in the data buffer within the global address space.
* @param {Node<uint>} idxBefore - The index of the first data element in the data buffer.
* @param {Node<uint>} idxAfter - The index of the second data element in the data buffer.
* @param {StorageBufferNode} dataBuffer - The buffer of data to read from.
* @param {StorageBufferNode} tempBuffer - The buffer of data to write to.
* @private
*
*/
_globalCompareAndSwapTSL( idxBefore, idxAfter, dataBuffer, tempBuffer ) {
const data1 = dataBuffer.element( idxBefore );
const data2 = dataBuffer.element( idxAfter );
tempBuffer.element( idxBefore ).assign( min( data1, data2 ) );
tempBuffer.element( idxAfter ).assign( max( data1, data2 ) );
}
/**
* Compares and swaps two data points in the data buffer within the local address space.
*
* @private
* @param {Node<uint>} idxBefore - The index of the first data element in the data buffer.
* @param {Node<uint>} idxAfter - The index of the second data element in the data buffer
*/
_localCompareAndSwapTSL( idxBefore, idxAfter ) {
const { localStorage } = this;
const data1 = localStorage.element( idxBefore ).toVar();
const data2 = localStorage.element( idxAfter ).toVar();
localStorage.element( idxBefore ).assign( min( data1, data2 ) );
localStorage.element( idxAfter ).assign( max( data1, data2 ) );
}
/**
* Create the compute shader that performs a global disperse swap on the data buffer.
*
* @private
* @param {StorageBufferNode} readBuffer - The data buffer to read from.
* @param {StorageBufferNode} writeBuffer - The data buffer to read from.
* @returns {ComputeNode} - A compute shader that performs a global disperse swap on the data buffer.
*/
_getDisperseGlobal( readBuffer, writeBuffer ) {
const { infoStorage } = this;
const currentSwapSpan = infoStorage.element( 1 );
const fnDef = Fn( () => {
const idx = getBitonicDisperseIndices( instanceIndex, currentSwapSpan );
this._globalCompareAndSwapTSL( idx.x, idx.y, readBuffer, writeBuffer );
} )().compute( this.dispatchSize, [ this.workgroupSize ] );
return fnDef;
}
/**
* Create the compute shader that performs a global flip swap on the data buffer.
*
* @private
* @param {StorageBufferNode} readBuffer - The data buffer to read from.
* @param {StorageBufferNode} writeBuffer - The data buffer to read from.
* @returns {ComputeNode} - A compute shader that executes a global flip swap.
*/
_getFlipGlobal( readBuffer, writeBuffer ) {
const { infoStorage } = this;
const currentSwapSpan = infoStorage.element( 1 );
const fnDef = Fn( () => {
const idx = getBitonicFlipIndices( instanceIndex, currentSwapSpan );
this._globalCompareAndSwapTSL( idx.x, idx.y, readBuffer, writeBuffer );
} )().compute( this.dispatchSize, [ this.workgroupSize ] );
return fnDef;
}
/**
* Create the compute shader that performs a complete local swap on the data buffer.
*
* @private
* @returns {ComputeNode} - A compute shader that executes a full local swap.
*/
_getSwapLocal() {
const { localStorage, dataBuffer, workgroupSize } = this;
const fnDef = Fn( () => {
// Get ids of indices needed to populate workgroup local buffer.
// Use .toVar() to prevent these values from being recalculated multiple times.
const localOffset = uint( workgroupSize ).mul( 2 ).mul( workgroupId.x ).toVar();
const localID1 = invocationLocalIndex.mul( 2 );
const localID2 = invocationLocalIndex.mul( 2 ).add( 1 );
localStorage.element( localID1 ).assign( dataBuffer.element( localOffset.add( localID1 ) ) );
localStorage.element( localID2 ).assign( dataBuffer.element( localOffset.add( localID2 ) ) );
// Ensure that all local data has been populated
workgroupBarrier();
// Perform a chunk of the sort in a single pass that operates entirely in workgroup local space
// SWAP_LOCAL will always be first pass, so we start with known block height of 2
const flipBlockHeight = uint( 2 );
Loop( { start: uint( 2 ), end: uint( workgroupSize * 2 ), type: 'uint', condition: '<=', update: '<<= 1' }, () => {
// Ensure that last dispatch block executed
workgroupBarrier();
const flipIdx = getBitonicFlipIndices( invocationLocalIndex, flipBlockHeight );
this._localCompareAndSwapTSL( flipIdx.x, flipIdx.y );
const localBlockHeight = flipBlockHeight.div( 2 );
Loop( { start: localBlockHeight, end: uint( 1 ), type: 'uint', condition: '>', update: '>>= 1' }, () => {
// Ensure that last dispatch op executed
workgroupBarrier();
const disperseIdx = getBitonicDisperseIndices( invocationLocalIndex, localBlockHeight );
this._localCompareAndSwapTSL( disperseIdx.x, disperseIdx.y );
localBlockHeight.divAssign( 2 );
} );
// flipBlockHeight *= 2;
flipBlockHeight.shiftLeftAssign( 1 );
} );
// Ensure that all invocations have swapped their own regions of data
workgroupBarrier();
dataBuffer.element( localOffset.add( localID1 ) ).assign( localStorage.element( localID1 ) );
dataBuffer.element( localOffset.add( localID2 ) ).assign( localStorage.element( localID2 ) );
} )().compute( this.dispatchSize, [ this.workgroupSize ] );
return fnDef;
}
/**
* Create the compute shader that performs a local disperse swap on the data buffer.
*
* @private
* @param {StorageBufferNode} readWriteBuffer - The data buffer to read from and write to.
* @returns {ComputeNode} - A compute shader that executes a local disperse swap.
*/
_getDisperseLocal( readWriteBuffer ) {
const { localStorage, workgroupSize } = this;
const fnDef = Fn( () => {
// Get ids of indices needed to populate workgroup local buffer.
// Use .toVar() to prevent these values from being recalculated multiple times.
const localOffset = uint( workgroupSize ).mul( 2 ).mul( workgroupId.x ).toVar();
const localID1 = invocationLocalIndex.mul( 2 );
const localID2 = invocationLocalIndex.mul( 2 ).add( 1 );
localStorage.element( localID1 ).assign( readWriteBuffer.element( localOffset.add( localID1 ) ) );
localStorage.element( localID2 ).assign( readWriteBuffer.element( localOffset.add( localID2 ) ) );
// Ensure that all local data has been populated
workgroupBarrier();
const localBlockHeight = uint( workgroupSize * 2 );
Loop( { start: localBlockHeight, end: uint( 1 ), type: 'uint', condition: '>', update: '>>= 1' }, () => {
// Ensure that last dispatch op executed
workgroupBarrier();
const disperseIdx = getBitonicDisperseIndices( invocationLocalIndex, localBlockHeight );
this._localCompareAndSwapTSL( disperseIdx.x, disperseIdx.y );
localBlockHeight.divAssign( 2 );
} );
// Ensure that all invocations have swapped their own regions of data
workgroupBarrier();
readWriteBuffer.element( localOffset.add( localID1 ) ).assign( localStorage.element( localID1 ) );
readWriteBuffer.element( localOffset.add( localID2 ) ).assign( localStorage.element( localID2 ) );
} )().compute( this.dispatchSize, [ this.workgroupSize ] );
return fnDef;
}
/**
* Create the compute shader that resets the sort's algorithm information.
*
* @private
* @returns {ComputeNode} - A compute shader that resets the bitonic sort's algorithm information.
*/
_getResetFn() {
const fnDef = Fn( () => {
const { infoStorage } = this;
const currentAlgo = infoStorage.element( 0 );
const currentSwapSpan = infoStorage.element( 1 );
const maxSwapSpan = infoStorage.element( 2 );
currentAlgo.assign( StepType.SWAP_LOCAL );
currentSwapSpan.assign( 2 );
maxSwapSpan.assign( 2 );
} )().compute( 1 );
return fnDef;
}
/**
* Create the compute shader that copies the state of the last global swap to the data buffer.
*
* @private
* @returns {ComputeNode} - A compute shader that copies the state of the last global swap to the data buffer.
*/
_getAlignFn() {
const { dataBuffer, tempBuffer } = this;
// TODO: Only do this in certain instances by ping-ponging which buffer gets sorted
// And only aligning if numDispatches % 2 === 1
const fnDef = Fn( () => {
dataBuffer.element( instanceIndex ).assign( tempBuffer.element( instanceIndex ) );
} )().compute( this.count, [ this.workgroupSize ] );
return fnDef;
}
/**
* Create the compute shader that sets the bitonic sort algorithm's information.
*
* @private
* @returns {ComputeNode} - A compute shader that sets the bitonic sort algorithm's information.
*/
_getSetAlgoFn() {
const fnDef = Fn( () => {
const { infoStorage, workgroupSize } = this;
const currentAlgo = infoStorage.element( 0 );
const currentSwapSpan = infoStorage.element( 1 );
const maxSwapSpan = infoStorage.element( 2 );
If( currentAlgo.equal( StepType.SWAP_LOCAL ), () => {
const nextHighestSwapSpan = uint( workgroupSize * 4 );
currentAlgo.assign( StepType.FLIP_GLOBAL );
currentSwapSpan.assign( nextHighestSwapSpan );
maxSwapSpan.assign( nextHighestSwapSpan );
} ).ElseIf( currentAlgo.equal( StepType.DISPERSE_LOCAL ), () => {
currentAlgo.assign( StepType.FLIP_GLOBAL );
const nextHighestSwapSpan = maxSwapSpan.mul( 2 );
currentSwapSpan.assign( nextHighestSwapSpan );
maxSwapSpan.assign( nextHighestSwapSpan );
} ).Else( () => {
const nextSwapSpan = currentSwapSpan.div( 2 );
currentAlgo.assign(
select(
nextSwapSpan.lessThanEqual( uint( workgroupSize * 2 ) ),
StepType.DISPERSE_LOCAL,
StepType.DISPERSE_GLOBAL
).uniformFlow()
);
currentSwapSpan.assign( nextSwapSpan );
} );
} )().compute( 1 );
return fnDef;
}
/**
* Executes a step of the bitonic sort operation.
*
* @param {Renderer} renderer - The current scene's renderer.
*/
computeStep( renderer ) {
// Swap local only runs once
if ( this.currentDispatch === 0 ) {
renderer.compute( this.swapLocalFn );
this.globalOpsRemaining = 1;
this.globalOpsInSpan = 1;
} else if ( this.globalOpsRemaining > 0 ) {
const swapType = this.globalOpsRemaining === this.globalOpsInSpan ? 'Flip' : 'Disperse';
renderer.compute( swapType === 'Flip' ? this.flipGlobalNodes[ this.readBufferName ] : this.disperseGlobalNodes[ this.readBufferName ] );
if ( this.readBufferName === 'Data' ) {
this.readBufferName = 'Temp';
} else {
this.readBufferName = 'Data';
}
this.globalOpsRemaining -= 1;
} else {
// Then run local disperses when we've finished all global swaps
renderer.compute( this.disperseLocalNodes[ this.readBufferName ] );
const nextSpanGlobalOps = this.globalOpsInSpan + 1;
this.globalOpsInSpan = nextSpanGlobalOps;
this.globalOpsRemaining = nextSpanGlobalOps;
}
this.currentDispatch += 1;
if ( this.currentDispatch === this.stepCount ) {
// If our last swap addressed only addressed the temp buffer, then re-align it with the data buffer
// to fulfill the requirement of an in-place sort.
if ( this.readBufferName === 'Temp' ) {
renderer.compute( this.alignFn );
this.readBufferName = 'Data';
}
// Just reset the algorithm information
renderer.compute( this.resetFn );
this.currentDispatch = 0;
this.globalOpsRemaining = 0;
this.globalOpsInSpan = 0;
} else {
// Otherwise, determine what next swap span is
renderer.compute( this.setAlgoFn );
}
}
/**
* Executes a complete bitonic sort on the data buffer.
*
* @param {Renderer} renderer - The current scene's renderer.
*/
compute( renderer ) {
this.globalOpsRemaining = 0;
this.globalOpsInSpan = 0;
this.currentDispatch = 0;
for ( let i = 0; i < this.stepCount; i ++ ) {
this.computeStep( renderer );
}
}
}

View File

@@ -0,0 +1,165 @@
import {
Mesh,
ShaderMaterial,
SphereGeometry
} from 'three';
/**
* Renders a sphere to visualize a light probe in the scene.
*
* This helper can only be used with {@link WebGLRenderer}.
* When using {@link WebGPURenderer}, import from `LightProbeHelperGPU.js`.
*
* ```js
* const helper = new LightProbeHelper( lightProbe );
* scene.add( helper );
* ```
*
* @augments Mesh
* @three_import import { LightProbeHelper } from 'three/addons/helpers/LightProbeHelper.js';
*/
class LightProbeHelper extends Mesh {
/**
* Constructs a new light probe helper.
*
* @param {LightProbe} lightProbe - The light probe to visualize.
* @param {number} [size=1] - The size of the helper.
*/
constructor( lightProbe, size = 1 ) {
const material = new ShaderMaterial( {
type: 'LightProbeHelperMaterial',
uniforms: {
sh: { value: lightProbe.sh.coefficients }, // by reference
intensity: { value: lightProbe.intensity }
},
vertexShader: /* glsl */`
varying vec3 vNormal;
void main() {
vNormal = normalize( normalMatrix * normal );
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}
`,
fragmentShader: /* glsl */`
#define RECIPROCAL_PI 0.318309886
vec3 inverseTransformDirection( in vec3 normal, in mat4 matrix ) {
// matrix is assumed to be orthogonal
return normalize( ( vec4( normal, 0.0 ) * matrix ).xyz );
}
// source: https://graphics.stanford.edu/papers/envmap/envmap.pdf,
vec3 shGetIrradianceAt( in vec3 normal, in vec3 shCoefficients[ 9 ] ) {
// normal is assumed to have unit length,
float x = normal.x, y = normal.y, z = normal.z;
// band 0,
vec3 result = shCoefficients[ 0 ] * 0.886227;
// band 1,
result += shCoefficients[ 1 ] * 2.0 * 0.511664 * y;
result += shCoefficients[ 2 ] * 2.0 * 0.511664 * z;
result += shCoefficients[ 3 ] * 2.0 * 0.511664 * x;
// band 2,
result += shCoefficients[ 4 ] * 2.0 * 0.429043 * x * y;
result += shCoefficients[ 5 ] * 2.0 * 0.429043 * y * z;
result += shCoefficients[ 6 ] * ( 0.743125 * z * z - 0.247708 );
result += shCoefficients[ 7 ] * 2.0 * 0.429043 * x * z;
result += shCoefficients[ 8 ] * 0.429043 * ( x * x - y * y );
return result;
}
uniform vec3 sh[ 9 ]; // sh coefficients
uniform float intensity; // light probe intensity
varying vec3 vNormal;
void main() {
vec3 normal = normalize( vNormal );
vec3 worldNormal = inverseTransformDirection( normal, viewMatrix );
vec3 irradiance = shGetIrradianceAt( worldNormal, sh );
vec3 outgoingLight = RECIPROCAL_PI * irradiance * intensity;
gl_FragColor = linearToOutputTexel( vec4( outgoingLight, 1.0 ) );
}
`,
} );
const geometry = new SphereGeometry( 1, 32, 16 );
super( geometry, material );
/**
* The light probe to visualize.
*
* @type {LightProbe}
*/
this.lightProbe = lightProbe;
/**
* The size of the helper.
*
* @type {number}
* @default 1
*/
this.size = size;
this.type = 'LightProbeHelper';
this.onBeforeRender();
}
/**
* Frees the GPU-related resources allocated by this instance. Call this
* method whenever this instance is no longer used in your app.
*/
dispose() {
this.geometry.dispose();
this.material.dispose();
}
onBeforeRender() {
this.position.copy( this.lightProbe.position );
this.scale.set( 1, 1, 1 ).multiplyScalar( this.size );
this.material.uniforms.intensity.value = this.lightProbe.intensity;
}
}
export { LightProbeHelper };

View File

@@ -0,0 +1,102 @@
import {
Mesh,
NodeMaterial,
SphereGeometry
} from 'three/webgpu';
import { float, Fn, getShIrradianceAt, normalWorld, uniformArray, uniform, vec4 } from 'three/tsl';
/**
* Renders a sphere to visualize a light probe in the scene.
*
* This helper can only be used with {@link WebGPURenderer}.
* When using {@link WebGLRenderer}, import from `LightProbeHelper.js`.
*
* ```js
* const helper = new LightProbeHelper( lightProbe );
* scene.add( helper );
* ```
*
* @private
* @augments Mesh
* @three_import import { LightProbeHelper } from 'three/addons/helpers/LightProbeHelperGPU.js';
*/
class LightProbeHelper extends Mesh {
/**
* Constructs a new light probe helper.
*
* @param {LightProbe} lightProbe - The light probe to visualize.
* @param {number} [size=1] - The size of the helper.
*/
constructor( lightProbe, size = 1 ) {
const sh = uniformArray( lightProbe.sh.coefficients );
const intensity = uniform( lightProbe.intensity );
const RECIPROCAL_PI = float( 1 / Math.PI );
const fragmentNode = Fn( () => {
const irradiance = getShIrradianceAt( normalWorld, sh );
const outgoingLight = RECIPROCAL_PI.mul( irradiance ).mul( intensity );
return vec4( outgoingLight, 1.0 );
} )();
const material = new NodeMaterial();
material.fragmentNode = fragmentNode;
const geometry = new SphereGeometry( 1, 32, 16 );
super( geometry, material );
/**
* The light probe to visualize.
*
* @type {LightProbe}
*/
this.lightProbe = lightProbe;
/**
* The size of the helper.
*
* @type {number}
* @default 1
*/
this.size = size;
this.type = 'LightProbeHelper';
this._intensity = intensity;
this._sh = sh;
this.onBeforeRender();
}
/**
* Frees the GPU-related resources allocated by this instance. Call this
* method whenever this instance is no longer used in your app.
*/
dispose() {
this.geometry.dispose();
this.material.dispose();
}
onBeforeRender() {
this.position.copy( this.lightProbe.position );
this.scale.set( 1, 1, 1 ).multiplyScalar( this.size );
this._intensity.value = this.lightProbe.intensity;
this._sh.array = this.lightProbe.sh.coefficients;
}
}
export { LightProbeHelper };

109
node_modules/three/examples/jsm/helpers/OctreeHelper.js generated vendored Normal file
View File

@@ -0,0 +1,109 @@
import {
LineSegments,
BufferGeometry,
Float32BufferAttribute,
LineBasicMaterial
} from 'three';
/**
* A helper for visualizing an Octree.
*
* ```js
* const helper = new OctreeHelper( octree );
* scene.add( helper );
* ```
*
* @augments LineSegments
* @three_import import { OctreeHelper } from 'three/addons/helpers/OctreeHelper.js';
*/
class OctreeHelper extends LineSegments {
/**
* Constructs a new Octree helper.
*
* @param {Octree} octree - The octree to visualize.
* @param {number|Color|string} [color=0xffff00] - The helper's color.
*/
constructor( octree, color = 0xffff00 ) {
super( new BufferGeometry(), new LineBasicMaterial( { color: color, toneMapped: false } ) );
/**
* The octree to visualize.
*
* @type {Octree}
*/
this.octree = octree;
/**
* The helper's color.
*
* @type {number|Color|string}
*/
this.color = color;
this.type = 'OctreeHelper';
this.update();
}
/**
* Updates the helper. This method must be called whenever the Octree's
* structure is changed.
*/
update() {
const vertices = [];
function traverse( tree ) {
for ( let i = 0; i < tree.length; i ++ ) {
const min = tree[ i ].box.min;
const max = tree[ i ].box.max;
vertices.push( max.x, max.y, max.z ); vertices.push( min.x, max.y, max.z ); // 0, 1
vertices.push( min.x, max.y, max.z ); vertices.push( min.x, min.y, max.z ); // 1, 2
vertices.push( min.x, min.y, max.z ); vertices.push( max.x, min.y, max.z ); // 2, 3
vertices.push( max.x, min.y, max.z ); vertices.push( max.x, max.y, max.z ); // 3, 0
vertices.push( max.x, max.y, min.z ); vertices.push( min.x, max.y, min.z ); // 4, 5
vertices.push( min.x, max.y, min.z ); vertices.push( min.x, min.y, min.z ); // 5, 6
vertices.push( min.x, min.y, min.z ); vertices.push( max.x, min.y, min.z ); // 6, 7
vertices.push( max.x, min.y, min.z ); vertices.push( max.x, max.y, min.z ); // 7, 4
vertices.push( max.x, max.y, max.z ); vertices.push( max.x, max.y, min.z ); // 0, 4
vertices.push( min.x, max.y, max.z ); vertices.push( min.x, max.y, min.z ); // 1, 5
vertices.push( min.x, min.y, max.z ); vertices.push( min.x, min.y, min.z ); // 2, 6
vertices.push( max.x, min.y, max.z ); vertices.push( max.x, min.y, min.z ); // 3, 7
traverse( tree[ i ].subTrees );
}
}
traverse( this.octree.subTrees );
this.geometry.dispose();
this.geometry = new BufferGeometry();
this.geometry.setAttribute( 'position', new Float32BufferAttribute( vertices, 3 ) );
}
/**
* Frees the GPU-related resources allocated by this instance. Call this
* method whenever this instance is no longer used in your app.
*/
dispose() {
this.geometry.dispose();
this.material.dispose();
}
}
export { OctreeHelper };

View File

@@ -0,0 +1,169 @@
import {
BufferGeometry,
BufferAttribute,
LineBasicMaterial,
Line,
MathUtils
} from 'three';
/**
* This helper displays the directional cone of a positional audio.
*
* `PositionalAudioHelper` must be added as a child of the positional audio.
*
* ```js
* const positionalAudio = new THREE.PositionalAudio( listener );
* positionalAudio.setDirectionalCone( 180, 230, 0.1 );
* scene.add( positionalAudio );
*
* const helper = new PositionalAudioHelper( positionalAudio );
* positionalAudio.add( helper );
* ```
*
* @augments Line
* @three_import import { PositionalAudioHelper } from 'three/addons/helpers/PositionalAudioHelper.js';
*/
class PositionalAudioHelper extends Line {
/**
* Constructs a new positional audio helper.
*
* @param {PositionalAudio} audio - The audio to visualize.
* @param {number} [range=1] - The range of the directional cone.
* @param {number} [divisionsInnerAngle=16] - The number of divisions of the inner part of the directional cone.
* @param {number} [divisionsOuterAngle=2] The number of divisions of the outer part of the directional cone.
*/
constructor( audio, range = 1, divisionsInnerAngle = 16, divisionsOuterAngle = 2 ) {
const geometry = new BufferGeometry();
const divisions = divisionsInnerAngle + divisionsOuterAngle * 2;
const positions = new Float32Array( ( divisions * 3 + 3 ) * 3 );
geometry.setAttribute( 'position', new BufferAttribute( positions, 3 ) );
const materialInnerAngle = new LineBasicMaterial( { color: 0x00ff00 } );
const materialOuterAngle = new LineBasicMaterial( { color: 0xffff00 } );
super( geometry, [ materialOuterAngle, materialInnerAngle ] );
/**
* The audio to visualize.
*
* @type {PositionalAudio}
*/
this.audio = audio;
/**
* The range of the directional cone.
*
* @type {number}
* @default 1
*/
this.range = range;
/**
* The number of divisions of the inner part of the directional cone.
*
* @type {number}
* @default 16
*/
this.divisionsInnerAngle = divisionsInnerAngle;
/**
* The number of divisions of the outer part of the directional cone.
*
* @type {number}
* @default 2
*/
this.divisionsOuterAngle = divisionsOuterAngle;
this.type = 'PositionalAudioHelper';
this.update();
}
/**
* Updates the helper. This method must be called whenever the directional cone
* of the positional audio is changed.
*/
update() {
const audio = this.audio;
const range = this.range;
const divisionsInnerAngle = this.divisionsInnerAngle;
const divisionsOuterAngle = this.divisionsOuterAngle;
const coneInnerAngle = MathUtils.degToRad( audio.panner.coneInnerAngle );
const coneOuterAngle = MathUtils.degToRad( audio.panner.coneOuterAngle );
const halfConeInnerAngle = coneInnerAngle / 2;
const halfConeOuterAngle = coneOuterAngle / 2;
let start = 0;
let count = 0;
let i;
let stride;
const geometry = this.geometry;
const positionAttribute = geometry.attributes.position;
geometry.clearGroups();
//
function generateSegment( from, to, divisions, materialIndex ) {
const step = ( to - from ) / divisions;
positionAttribute.setXYZ( start, 0, 0, 0 );
count ++;
for ( i = from; i < to; i += step ) {
stride = start + count;
positionAttribute.setXYZ( stride, Math.sin( i ) * range, 0, Math.cos( i ) * range );
positionAttribute.setXYZ( stride + 1, Math.sin( Math.min( i + step, to ) ) * range, 0, Math.cos( Math.min( i + step, to ) ) * range );
positionAttribute.setXYZ( stride + 2, 0, 0, 0 );
count += 3;
}
geometry.addGroup( start, count, materialIndex );
start += count;
count = 0;
}
//
generateSegment( - halfConeOuterAngle, - halfConeInnerAngle, divisionsOuterAngle, 0 );
generateSegment( - halfConeInnerAngle, halfConeInnerAngle, divisionsInnerAngle, 1 );
generateSegment( halfConeInnerAngle, halfConeOuterAngle, divisionsOuterAngle, 0 );
//
positionAttribute.needsUpdate = true;
if ( coneInnerAngle === coneOuterAngle ) this.material[ 0 ].visible = false;
}
/**
* Frees the GPU-related resources allocated by this instance. Call this
* method whenever this instance is no longer used in your app.
*/
dispose() {
this.geometry.dispose();
this.material[ 0 ].dispose();
this.material[ 1 ].dispose();
}
}
export { PositionalAudioHelper };

View File

@@ -0,0 +1,59 @@
import { LineSegments, LineBasicMaterial, BufferAttribute } from 'three';
/**
* This class displays all Rapier Colliders in outline.
*
* @augments LineSegments
* @three_import import { RapierHelper } from 'three/addons/helpers/RapierHelper.js';
*/
class RapierHelper extends LineSegments {
/**
* Constructs a new Rapier debug helper.
*
* @param {RAPIER.world} world - The Rapier world to visualize.
*/
constructor( world ) {
super();
/**
* The Rapier world to visualize.
*
* @type {RAPIER.world}
*/
this.world = world;
this.material = new LineBasicMaterial( { vertexColors: true } );
this.frustumCulled = false;
}
/**
* Call this in the render loop to update the outlines.
*/
update() {
const { vertices, colors } = this.world.debugRender();
this.geometry.deleteAttribute( 'position' );
this.geometry.deleteAttribute( 'color' );
this.geometry.setAttribute( 'position', new BufferAttribute( vertices, 3 ) );
this.geometry.setAttribute( 'color', new BufferAttribute( colors, 4 ) );
}
/**
* Frees the GPU-related resources allocated by this instance. Call this
* method whenever this instance is no longer used in your app.
*/
dispose() {
this.geometry.dispose();
this.material.dispose();
}
}
export { RapierHelper };

View File

@@ -0,0 +1,118 @@
import {
BackSide,
BufferGeometry,
Float32BufferAttribute,
Line,
LineBasicMaterial,
Mesh,
MeshBasicMaterial
} from 'three';
/**
* Creates a visual aid for rect area lights.
*
* `RectAreaLightHelper` must be added as a child of the light.
*
* ```js
* const light = new THREE.RectAreaLight( 0xffffbb, 1.0, 5, 5 );
* const helper = new RectAreaLightHelper( light );
* light.add( helper );
* ```
*
* @augments Line
* @three_import import { RectAreaLightHelper } from 'three/addons/helpers/RectAreaLightHelper.js';
*/
class RectAreaLightHelper extends Line {
/**
* Constructs a new rect area light helper.
*
* @param {RectAreaLight} light - The light to visualize.
* @param {number|Color|string} [color] - The helper's color.
* If this is not the set, the helper will take the color of the light.
*/
constructor( light, color ) {
const positions = [ 1, 1, 0, - 1, 1, 0, - 1, - 1, 0, 1, - 1, 0, 1, 1, 0 ];
const geometry = new BufferGeometry();
geometry.setAttribute( 'position', new Float32BufferAttribute( positions, 3 ) );
geometry.computeBoundingSphere();
const material = new LineBasicMaterial( { fog: false } );
super( geometry, material );
/**
* The light to visualize.
*
* @type {RectAreaLight}
*/
this.light = light;
/**
* The helper's color. If `undefined`, the helper will take the color of the light.
*
* @type {number|Color|string|undefined}
*/
this.color = color;
this.type = 'RectAreaLightHelper';
//
const positions2 = [ 1, 1, 0, - 1, 1, 0, - 1, - 1, 0, 1, 1, 0, - 1, - 1, 0, 1, - 1, 0 ];
const geometry2 = new BufferGeometry();
geometry2.setAttribute( 'position', new Float32BufferAttribute( positions2, 3 ) );
geometry2.computeBoundingSphere();
this.add( new Mesh( geometry2, new MeshBasicMaterial( { side: BackSide, fog: false } ) ) );
}
updateMatrixWorld() {
this.scale.set( 0.5 * this.light.width, 0.5 * this.light.height, 1 );
if ( this.color !== undefined ) {
this.material.color.set( this.color );
this.children[ 0 ].material.color.set( this.color );
} else {
this.material.color.copy( this.light.color ).multiplyScalar( this.light.intensity );
// prevent hue shift
const c = this.material.color;
const max = Math.max( c.r, c.g, c.b );
if ( max > 1 ) c.multiplyScalar( 1 / max );
this.children[ 0 ].material.color.copy( this.material.color );
}
// ignore world scale on light
this.matrixWorld.extractRotation( this.light.matrixWorld ).scale( this.scale ).copyPosition( this.light.matrixWorld );
this.children[ 0 ].matrixWorld.copy( this.matrixWorld );
}
/**
* Frees the GPU-related resources allocated by this instance. Call this
* method whenever this instance is no longer used in your app.
*/
dispose() {
this.geometry.dispose();
this.material.dispose();
this.children[ 0 ].geometry.dispose();
this.children[ 0 ].material.dispose();
}
}
export { RectAreaLightHelper };

View File

@@ -0,0 +1,265 @@
import {
BoxGeometry,
BufferAttribute,
DoubleSide,
Mesh,
PlaneGeometry,
ShaderMaterial,
Vector3,
} from 'three';
import { mergeGeometries } from '../utils/BufferGeometryUtils.js';
/**
* A helper that can be used to display any type of texture for
* debugging purposes. Depending on the type of texture (2D, 3D, Array),
* the helper becomes a plane or box mesh.
*
* This helper can only be used with {@link WebGLRenderer}.
* When using {@link WebGPURenderer}, import from `TextureHelperGPU.js`.
*
* @augments Mesh
* @three_import import { TextureHelper } from 'three/addons/helpers/TextureHelper.js';
*/
class TextureHelper extends Mesh {
/**
* Constructs a new texture helper.
*
* @param {Texture} texture - The texture to visualize.
* @param {number} [width=1] - The helper's width.
* @param {number} [height=1] - The helper's height.
* @param {number} [depth=1] - The helper's depth.
*/
constructor( texture, width = 1, height = 1, depth = 1 ) {
const material = new ShaderMaterial( {
type: 'TextureHelperMaterial',
side: DoubleSide,
transparent: true,
uniforms: {
map: { value: texture },
alpha: { value: getAlpha( texture ) },
},
vertexShader: [
'attribute vec3 uvw;',
'varying vec3 vUvw;',
'void main() {',
' vUvw = uvw;',
' gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );',
'}',
].join( '\n' ),
fragmentShader: [
'precision highp float;',
'precision highp sampler2DArray;',
'precision highp sampler3D;',
'uniform {samplerType} map;',
'uniform float alpha;',
'varying vec3 vUvw;',
'vec4 textureHelper( in sampler2D map ) { return texture( map, vUvw.xy ); }',
'vec4 textureHelper( in sampler2DArray map ) { return texture( map, vUvw ); }',
'vec4 textureHelper( in sampler3D map ) { return texture( map, vUvw ); }',
'vec4 textureHelper( in samplerCube map ) { return texture( map, vUvw ); }',
'void main() {',
' gl_FragColor = linearToOutputTexel( vec4( textureHelper( map ).xyz, alpha ) );',
'}'
].join( '\n' ).replace( '{samplerType}', getSamplerType( texture ) )
} );
const geometry = texture.isCubeTexture
? createCubeGeometry( width, height, depth )
: createSliceGeometry( texture, width, height, depth );
super( geometry, material );
/**
* The texture to visualize.
*
* @type {Texture}
*/
this.texture = texture;
this.type = 'TextureHelper';
}
/**
* Frees the GPU-related resources allocated by this instance. Call this
* method whenever this instance is no longer used in your app.
*/
dispose() {
this.geometry.dispose();
this.material.dispose();
}
}
function getSamplerType( texture ) {
if ( texture.isCubeTexture ) {
return 'samplerCube';
} else if ( texture.isDataArrayTexture || texture.isCompressedArrayTexture ) {
return 'sampler2DArray';
} else if ( texture.isData3DTexture || texture.isCompressed3DTexture ) {
return 'sampler3D';
} else {
return 'sampler2D';
}
}
function getImageCount( texture ) {
if ( texture.isCubeTexture ) {
return 6;
} else if ( texture.isDataArrayTexture || texture.isCompressedArrayTexture ) {
return texture.image.depth;
} else if ( texture.isData3DTexture || texture.isCompressed3DTexture ) {
return texture.image.depth;
} else {
return 1;
}
}
function getAlpha( texture ) {
if ( texture.isCubeTexture ) {
return 1;
} else if ( texture.isDataArrayTexture || texture.isCompressedArrayTexture ) {
return Math.max( 1 / texture.image.depth, 0.25 );
} else if ( texture.isData3DTexture || texture.isCompressed3DTexture ) {
return Math.max( 1 / texture.image.depth, 0.25 );
} else {
return 1;
}
}
function createCubeGeometry( width, height, depth ) {
const geometry = new BoxGeometry( width, height, depth );
const position = geometry.attributes.position;
const uv = geometry.attributes.uv;
const uvw = new BufferAttribute( new Float32Array( uv.count * 3 ), 3 );
const _direction = new Vector3();
for ( let j = 0, jl = uv.count; j < jl; ++ j ) {
_direction.fromBufferAttribute( position, j ).normalize();
const u = _direction.x;
const v = _direction.y;
const w = _direction.z;
uvw.setXYZ( j, u, v, w );
}
geometry.deleteAttribute( 'uv' );
geometry.setAttribute( 'uvw', uvw );
return geometry;
}
function createSliceGeometry( texture, width, height, depth ) {
const sliceCount = getImageCount( texture );
const geometries = [];
for ( let i = 0; i < sliceCount; ++ i ) {
const geometry = new PlaneGeometry( width, height );
if ( sliceCount > 1 ) {
geometry.translate( 0, 0, depth * ( i / ( sliceCount - 1 ) - 0.5 ) );
}
const uv = geometry.attributes.uv;
const uvw = new BufferAttribute( new Float32Array( uv.count * 3 ), 3 );
for ( let j = 0, jl = uv.count; j < jl; ++ j ) {
const u = uv.getX( j );
const v = texture.flipY ? uv.getY( j ) : 1 - uv.getY( j );
const w = sliceCount === 1
? 1
: texture.isDataArrayTexture || texture.isCompressedArrayTexture
? i
: i / ( sliceCount - 1 );
uvw.setXYZ( j, u, v, w );
}
geometry.deleteAttribute( 'uv' );
geometry.setAttribute( 'uvw', uvw );
geometries.push( geometry );
}
return mergeGeometries( geometries );
}
export { TextureHelper };

View File

@@ -0,0 +1,214 @@
import {
NodeMaterial,
BoxGeometry,
BufferAttribute,
Mesh,
PlaneGeometry,
DoubleSide,
Vector3,
} from 'three/webgpu';
import { texture as textureNode, cubeTexture, texture3D, float, vec4, attribute } from 'three/tsl';
import { mergeGeometries } from '../utils/BufferGeometryUtils.js';
/**
* A helper that can be used to display any type of texture for
* debugging purposes. Depending on the type of texture (2D, 3D, Array),
* the helper becomes a plane or box mesh.
*
* This helper can only be used with {@link WebGPURenderer}.
* When using {@link WebGLRenderer}, import from `TextureHelper.js`.
*
* @private
* @augments Mesh
* @three_import import { TextureHelper } from 'three/addons/helpers/TextureHelperGPU.js';
*/
class TextureHelper extends Mesh {
/**
* Constructs a new texture helper.
*
* @param {Texture} texture - The texture to visualize.
* @param {number} [width=1] - The helper's width.
* @param {number} [height=1] - The helper's height.
* @param {number} [depth=1] - The helper's depth.
*/
constructor( texture, width = 1, height = 1, depth = 1 ) {
const material = new NodeMaterial();
material.side = DoubleSide;
material.transparent = true;
material.name = 'TextureHelper';
let colorNode;
const uvw = attribute( 'uvw' );
if ( texture.isCubeTexture ) {
colorNode = cubeTexture( texture ).sample( uvw );
} else if ( texture.isData3DTexture || texture.isCompressed3DTexture ) {
colorNode = texture3D( texture ).sample( uvw );
} else if ( texture.isArrayTexture || texture.isDataArrayTexture || texture.isCompressedArrayTexture ) {
colorNode = textureNode( texture ).sample( uvw.xy ).depth( uvw.z );
} else {
colorNode = textureNode( texture );
}
const alphaNode = float( getAlpha( texture ) );
material.colorNode = vec4( colorNode.rgb, alphaNode );
const geometry = texture.isCubeTexture
? createCubeGeometry( width, height, depth )
: createSliceGeometry( texture, width, height, depth );
super( geometry, material );
/**
* The texture to visualize.
*
* @type {Texture}
*/
this.texture = texture;
this.type = 'TextureHelper';
}
/**
* Frees the GPU-related resources allocated by this instance. Call this
* method whenever this instance is no longer used in your app.
*/
dispose() {
this.geometry.dispose();
this.material.dispose();
}
}
function getImageCount( texture ) {
if ( texture.isCubeTexture ) {
return 6;
} else if ( texture.isArrayTexture || texture.isDataArrayTexture || texture.isCompressedArrayTexture ) {
return texture.image.depth;
} else if ( texture.isData3DTexture || texture.isCompressed3DTexture ) {
return texture.image.depth;
} else {
return 1;
}
}
function getAlpha( texture ) {
if ( texture.isCubeTexture ) {
return 1;
} else if ( texture.isArrayTexture || texture.isDataArrayTexture || texture.isCompressedArrayTexture ) {
return Math.max( 1 / texture.image.depth, 0.25 );
} else if ( texture.isData3DTexture || texture.isCompressed3DTexture ) {
return Math.max( 1 / texture.image.depth, 0.25 );
} else {
return 1;
}
}
function createCubeGeometry( width, height, depth ) {
const geometry = new BoxGeometry( width, height, depth );
const position = geometry.attributes.position;
const uv = geometry.attributes.uv;
const uvw = new BufferAttribute( new Float32Array( uv.count * 3 ), 3 );
const _direction = new Vector3();
for ( let j = 0, jl = uv.count; j < jl; ++ j ) {
_direction.fromBufferAttribute( position, j ).normalize();
const u = _direction.x;
const v = _direction.y;
const w = _direction.z;
uvw.setXYZ( j, u, v, w );
}
geometry.deleteAttribute( 'uv' );
geometry.setAttribute( 'uvw', uvw );
return geometry;
}
function createSliceGeometry( texture, width, height, depth ) {
const sliceCount = getImageCount( texture );
const geometries = [];
for ( let i = 0; i < sliceCount; ++ i ) {
const geometry = new PlaneGeometry( width, height );
if ( sliceCount > 1 ) {
geometry.translate( 0, 0, depth * ( i / ( sliceCount - 1 ) - 0.5 ) );
}
const uv = geometry.attributes.uv;
const uvw = new BufferAttribute( new Float32Array( uv.count * 3 ), 3 );
for ( let j = 0, jl = uv.count; j < jl; ++ j ) {
const u = uv.getX( j );
const v = texture.flipY ? uv.getY( j ) : 1 - uv.getY( j );
const w = sliceCount === 1
? 1
: texture.isArrayTexture || texture.isDataArrayTexture || texture.isCompressedArrayTexture
? i
: i / ( sliceCount - 1 );
uvw.setXYZ( j, u, v, w );
}
geometry.deleteAttribute( 'uv' );
geometry.setAttribute( 'uvw', uvw );
geometries.push( geometry );
}
return mergeGeometries( geometries );
}
export { TextureHelper };

View File

@@ -0,0 +1,155 @@
import {
BufferGeometry,
Float32BufferAttribute,
LineSegments,
LineBasicMaterial,
Matrix3,
Vector3
} from 'three';
const _v1 = new Vector3();
const _v2 = new Vector3();
const _normalMatrix = new Matrix3();
/**
* Visualizes an object's vertex normals.
*
* Requires that normals have been specified in the geometry as a buffer attribute or
* have been calculated using {@link BufferGeometry#computeVertexNormals}.
* ```js
* const geometry = new THREE.BoxGeometry( 10, 10, 10, 2, 2, 2 );
* const material = new THREE.MeshStandardMaterial();
* const mesh = new THREE.Mesh( geometry, material );
* scene.add( mesh );
*
* const helper = new VertexNormalsHelper( mesh, 1, 0xff0000 );
* scene.add( helper );
* ```
*
* @augments LineSegments
* @three_import import { VertexNormalsHelper } from 'three/addons/helpers/VertexNormalsHelper.js';
*/
class VertexNormalsHelper extends LineSegments {
/**
* Constructs a new vertex normals helper.
*
* @param {Object3D} object - The object for which to visualize vertex normals.
* @param {number} [size=1] - The helper's size.
* @param {number|Color|string} [color=0xff0000] - The helper's color.
*/
constructor( object, size = 1, color = 0xff0000 ) {
const geometry = new BufferGeometry();
const nNormals = object.geometry.attributes.normal.count;
const positions = new Float32BufferAttribute( nNormals * 2 * 3, 3 );
geometry.setAttribute( 'position', positions );
super( geometry, new LineBasicMaterial( { color, toneMapped: false } ) );
/**
* The object for which to visualize vertex normals.
*
* @type {Object3D}
*/
this.object = object;
/**
* The helper's size.
*
* @type {number}
* @default 1
*/
this.size = size;
this.type = 'VertexNormalsHelper';
/**
* Overwritten and set to `false` since the object's world transformation
* is encoded in the helper's geometry data.
*
* @type {boolean}
* @default false
*/
this.matrixAutoUpdate = false;
/**
* This flag can be used for type testing.
*
* @type {boolean}
* @readonly
* @default true
*/
this.isVertexNormalsHelper = true;
this.update();
}
/**
* Updates the vertex normals preview based on the object's world transform.
*/
update() {
this.object.updateMatrixWorld( true );
_normalMatrix.getNormalMatrix( this.object.matrixWorld );
const matrixWorld = this.object.matrixWorld;
const position = this.geometry.attributes.position;
//
const objGeometry = this.object.geometry;
if ( objGeometry ) {
const objPos = objGeometry.attributes.position;
const objNorm = objGeometry.attributes.normal;
let idx = 0;
// for simplicity, ignore index and drawcalls, and render every normal
for ( let j = 0, jl = objPos.count; j < jl; j ++ ) {
_v1.fromBufferAttribute( objPos, j ).applyMatrix4( matrixWorld );
_v2.fromBufferAttribute( objNorm, j );
_v2.applyMatrix3( _normalMatrix ).normalize().multiplyScalar( this.size ).add( _v1 );
position.setXYZ( idx, _v1.x, _v1.y, _v1.z );
idx = idx + 1;
position.setXYZ( idx, _v2.x, _v2.y, _v2.z );
idx = idx + 1;
}
}
position.needsUpdate = true;
}
/**
* Frees the GPU-related resources allocated by this instance. Call this
* method whenever this instance is no longer used in your app.
*/
dispose() {
this.geometry.dispose();
this.material.dispose();
}
}
export { VertexNormalsHelper };

View File

@@ -0,0 +1,133 @@
import {
BufferGeometry,
Float32BufferAttribute,
LineSegments,
LineBasicMaterial,
Vector3
} from 'three';
const _v1 = new Vector3();
const _v2 = new Vector3();
/**
* Visualizes an object's vertex tangents.
*
* Requires that tangents have been specified in the geometry as a buffer attribute or
* have been calculated using {@link BufferGeometry#computeTangents}.
* ```js
* const helper = new VertexTangentsHelper( mesh, 1, 0xff0000 );
* scene.add( helper );
* ```
*
* @augments LineSegments
* @three_import import { VertexTangentsHelper } from 'three/addons/helpers/VertexTangentsHelper.js';
*/
class VertexTangentsHelper extends LineSegments {
/**
* Constructs a new vertex tangents helper.
*
* @param {Object3D} object - The object for which to visualize vertex tangents.
* @param {number} [size=1] - The helper's size.
* @param {number|Color|string} [color=0xff0000] - The helper's color.
*/
constructor( object, size = 1, color = 0x00ffff ) {
const geometry = new BufferGeometry();
const nTangents = object.geometry.attributes.tangent.count;
const positions = new Float32BufferAttribute( nTangents * 2 * 3, 3 );
geometry.setAttribute( 'position', positions );
super( geometry, new LineBasicMaterial( { color, toneMapped: false } ) );
/**
* The object for which to visualize vertex tangents.
*
* @type {Object3D}
*/
this.object = object;
/**
* The helper's size.
*
* @type {number}
* @default 1
*/
this.size = size;
this.type = 'VertexTangentsHelper';
/**
* Overwritten and set to `false` since the object's world transformation
* is encoded in the helper's geometry data.
*
* @type {boolean}
* @default false
*/
this.matrixAutoUpdate = false;
this.update();
}
/**
* Updates the vertex normals preview based on the object's world transform.
*/
update() {
this.object.updateMatrixWorld( true );
const matrixWorld = this.object.matrixWorld;
const position = this.geometry.attributes.position;
//
const objGeometry = this.object.geometry;
const objPos = objGeometry.attributes.position;
const objTan = objGeometry.attributes.tangent;
let idx = 0;
// for simplicity, ignore index and drawcalls, and render every tangent
for ( let j = 0, jl = objPos.count; j < jl; j ++ ) {
_v1.fromBufferAttribute( objPos, j ).applyMatrix4( matrixWorld );
_v2.fromBufferAttribute( objTan, j );
_v2.transformDirection( matrixWorld ).multiplyScalar( this.size ).add( _v1 );
position.setXYZ( idx, _v1.x, _v1.y, _v1.z );
idx = idx + 1;
position.setXYZ( idx, _v2.x, _v2.y, _v2.z );
idx = idx + 1;
}
position.needsUpdate = true;
}
/**
* Frees the GPU-related resources allocated by this instance. Call this
* method whenever this instance is no longer used in your app.
*/
dispose() {
this.geometry.dispose();
this.material.dispose();
}
}
export { VertexTangentsHelper };

460
node_modules/three/examples/jsm/helpers/ViewHelper.js generated vendored Normal file
View File

@@ -0,0 +1,460 @@
import {
CylinderGeometry,
CanvasTexture,
Color,
Euler,
Mesh,
MeshBasicMaterial,
Object3D,
OrthographicCamera,
Quaternion,
Raycaster,
Sprite,
SpriteMaterial,
SRGBColorSpace,
Vector2,
Vector3,
Vector4
} from 'three';
/**
* A special type of helper that visualizes the camera's transformation
* in a small viewport area as an axes helper. Such a helper is often wanted
* in 3D modeling tools or scene editors like the [three.js editor](https://threejs.org/editor).
*
* The helper allows to click on the X, Y and Z axes which animates the camera
* so it looks along the selected axis.
*
* @augments Object3D
* @three_import import { ViewHelper } from 'three/addons/helpers/ViewHelper.js';
*/
class ViewHelper extends Object3D {
/**
* Constructs a new view helper.
*
* @param {Camera} camera - The camera whose transformation should be visualized.
* @param {HTMLElement} [domElement] - The DOM element that is used to render the view.
*/
constructor( camera, domElement ) {
super();
/**
* This flag can be used for type testing.
*
* @type {boolean}
* @readonly
* @default true
*/
this.isViewHelper = true;
/**
* Whether the helper is currently animating or not.
*
* @type {boolean}
* @readonly
* @default false
*/
this.animating = false;
/**
* The helper's center point.
*
* @type {Vector3}
*/
this.center = new Vector3();
const color1 = new Color( '#ff4466' );
const color2 = new Color( '#88ff44' );
const color3 = new Color( '#4488ff' );
const color4 = new Color( '#000000' );
const options = {};
const interactiveObjects = [];
const raycaster = new Raycaster();
const mouse = new Vector2();
const dummy = new Object3D();
const orthoCamera = new OrthographicCamera( - 2, 2, 2, - 2, 0, 4 );
orthoCamera.position.set( 0, 0, 2 );
const geometry = new CylinderGeometry( 0.04, 0.04, 0.8, 5 ).rotateZ( - Math.PI / 2 ).translate( 0.4, 0, 0 );
const xAxis = new Mesh( geometry, getAxisMaterial( color1 ) );
const yAxis = new Mesh( geometry, getAxisMaterial( color2 ) );
const zAxis = new Mesh( geometry, getAxisMaterial( color3 ) );
yAxis.rotation.z = Math.PI / 2;
zAxis.rotation.y = - Math.PI / 2;
this.add( xAxis );
this.add( zAxis );
this.add( yAxis );
const spriteMaterial1 = getSpriteMaterial( color1 );
const spriteMaterial2 = getSpriteMaterial( color2 );
const spriteMaterial3 = getSpriteMaterial( color3 );
const spriteMaterial4 = getSpriteMaterial( color4 );
const posXAxisHelper = new Sprite( spriteMaterial1 );
const posYAxisHelper = new Sprite( spriteMaterial2 );
const posZAxisHelper = new Sprite( spriteMaterial3 );
const negXAxisHelper = new Sprite( spriteMaterial4 );
const negYAxisHelper = new Sprite( spriteMaterial4 );
const negZAxisHelper = new Sprite( spriteMaterial4 );
posXAxisHelper.position.x = 1;
posYAxisHelper.position.y = 1;
posZAxisHelper.position.z = 1;
negXAxisHelper.position.x = - 1;
negYAxisHelper.position.y = - 1;
negZAxisHelper.position.z = - 1;
negXAxisHelper.material.opacity = 0.2;
negYAxisHelper.material.opacity = 0.2;
negZAxisHelper.material.opacity = 0.2;
posXAxisHelper.userData.type = 'posX';
posYAxisHelper.userData.type = 'posY';
posZAxisHelper.userData.type = 'posZ';
negXAxisHelper.userData.type = 'negX';
negYAxisHelper.userData.type = 'negY';
negZAxisHelper.userData.type = 'negZ';
this.add( posXAxisHelper );
this.add( posYAxisHelper );
this.add( posZAxisHelper );
this.add( negXAxisHelper );
this.add( negYAxisHelper );
this.add( negZAxisHelper );
interactiveObjects.push( posXAxisHelper );
interactiveObjects.push( posYAxisHelper );
interactiveObjects.push( posZAxisHelper );
interactiveObjects.push( negXAxisHelper );
interactiveObjects.push( negYAxisHelper );
interactiveObjects.push( negZAxisHelper );
const point = new Vector3();
const dim = 128;
const turnRate = 2 * Math.PI; // turn rate in angles per second
/**
* Renders the helper in a separate view in the bottom-right corner
* of the viewport.
*
* @param {WebGLRenderer|WebGPURenderer} renderer - The renderer.
*/
this.render = function ( renderer ) {
this.quaternion.copy( camera.quaternion ).invert();
this.updateMatrixWorld();
point.set( 0, 0, 1 );
point.applyQuaternion( camera.quaternion );
//
const x = domElement.offsetWidth - dim;
const y = renderer.isWebGPURenderer ? domElement.offsetHeight - dim : 0;
renderer.clearDepth();
renderer.getViewport( viewport );
renderer.setViewport( x, y, dim, dim );
renderer.render( this, orthoCamera );
renderer.setViewport( viewport.x, viewport.y, viewport.z, viewport.w );
};
const targetPosition = new Vector3();
const targetQuaternion = new Quaternion();
const q1 = new Quaternion();
const q2 = new Quaternion();
const viewport = new Vector4();
let radius = 0;
/**
* This method should be called when a click or pointer event
* has happened in the app.
*
* @param {PointerEvent} event - The event to process.
* @return {boolean} Whether an intersection with the helper has been detected or not.
*/
this.handleClick = function ( event ) {
if ( this.animating === true ) return false;
const rect = domElement.getBoundingClientRect();
const offsetX = rect.left + ( domElement.offsetWidth - dim );
const offsetY = rect.top + ( domElement.offsetHeight - dim );
mouse.x = ( ( event.clientX - offsetX ) / ( rect.right - offsetX ) ) * 2 - 1;
mouse.y = - ( ( event.clientY - offsetY ) / ( rect.bottom - offsetY ) ) * 2 + 1;
raycaster.setFromCamera( mouse, orthoCamera );
const intersects = raycaster.intersectObjects( interactiveObjects );
if ( intersects.length > 0 ) {
const intersection = intersects[ 0 ];
const object = intersection.object;
prepareAnimationData( object, this.center );
this.animating = true;
return true;
} else {
return false;
}
};
/**
* Sets labels for each axis. By default, they are unlabeled.
*
* @param {string|undefined} labelX - The label for the x-axis.
* @param {string|undefined} labelY - The label for the y-axis.
* @param {string|undefined} labelZ - The label for the z-axis.
*/
this.setLabels = function ( labelX, labelY, labelZ ) {
options.labelX = labelX;
options.labelY = labelY;
options.labelZ = labelZ;
updateLabels();
};
/**
* Sets the label style. Has no effect when the axes are unlabeled.
*
* @param {string} [font='24px Arial'] - The label font.
* @param {string} [color='#000000'] - The label color.
* @param {number} [radius=14] - The label radius.
*/
this.setLabelStyle = function ( font, color, radius ) {
options.font = font;
options.color = color;
options.radius = radius;
updateLabels();
};
/**
* Updates the helper. This method should be called in the app's animation
* loop.
*
* @param {number} delta - The delta time in seconds.
*/
this.update = function ( delta ) {
const step = delta * turnRate;
// animate position by doing a slerp and then scaling the position on the unit sphere
q1.rotateTowards( q2, step );
camera.position.set( 0, 0, 1 ).applyQuaternion( q1 ).multiplyScalar( radius ).add( this.center );
// animate orientation
camera.quaternion.rotateTowards( targetQuaternion, step );
if ( q1.angleTo( q2 ) === 0 ) {
this.animating = false;
}
};
/**
* Frees the GPU-related resources allocated by this instance. Call this
* method whenever this instance is no longer used in your app.
*/
this.dispose = function () {
geometry.dispose();
xAxis.material.dispose();
yAxis.material.dispose();
zAxis.material.dispose();
posXAxisHelper.material.map.dispose();
posYAxisHelper.material.map.dispose();
posZAxisHelper.material.map.dispose();
negXAxisHelper.material.map.dispose();
negYAxisHelper.material.map.dispose();
negZAxisHelper.material.map.dispose();
posXAxisHelper.material.dispose();
posYAxisHelper.material.dispose();
posZAxisHelper.material.dispose();
negXAxisHelper.material.dispose();
negYAxisHelper.material.dispose();
negZAxisHelper.material.dispose();
};
function prepareAnimationData( object, focusPoint ) {
switch ( object.userData.type ) {
case 'posX':
targetPosition.set( 1, 0, 0 );
targetQuaternion.setFromEuler( new Euler( 0, Math.PI * 0.5, 0 ) );
break;
case 'posY':
targetPosition.set( 0, 1, 0 );
targetQuaternion.setFromEuler( new Euler( - Math.PI * 0.5, 0, 0 ) );
break;
case 'posZ':
targetPosition.set( 0, 0, 1 );
targetQuaternion.setFromEuler( new Euler() );
break;
case 'negX':
targetPosition.set( - 1, 0, 0 );
targetQuaternion.setFromEuler( new Euler( 0, - Math.PI * 0.5, 0 ) );
break;
case 'negY':
targetPosition.set( 0, - 1, 0 );
targetQuaternion.setFromEuler( new Euler( Math.PI * 0.5, 0, 0 ) );
break;
case 'negZ':
targetPosition.set( 0, 0, - 1 );
targetQuaternion.setFromEuler( new Euler( 0, Math.PI, 0 ) );
break;
default:
console.error( 'ViewHelper: Invalid axis.' );
}
//
radius = camera.position.distanceTo( focusPoint );
targetPosition.multiplyScalar( radius ).add( focusPoint );
dummy.position.copy( focusPoint );
dummy.lookAt( camera.position );
q1.copy( dummy.quaternion );
dummy.lookAt( targetPosition );
q2.copy( dummy.quaternion );
}
function getAxisMaterial( color ) {
return new MeshBasicMaterial( { color: color, toneMapped: false } );
}
function useOffscreenCanvas() {
let result = false;
try {
// this check has been adapted from WebGLTextures
result = typeof OffscreenCanvas !== 'undefined' && ( new OffscreenCanvas( 1, 1 ).getContext( '2d' ) ) !== null;
} catch ( err ) {
// Ignore any errors
}
return result;
}
function createCanvas( width, height ) {
let canvas;
if ( useOffscreenCanvas() ) {
canvas = new OffscreenCanvas( width, height );
} else {
canvas = document.createElement( 'canvas' );
canvas.width = width;
canvas.height = height;
}
return canvas;
}
function getSpriteMaterial( color, text ) {
const { font = '24px Arial', color: labelColor = '#000000', radius = 14 } = options;
const canvas = createCanvas( 64, 64 );
const context = canvas.getContext( '2d' );
context.beginPath();
context.arc( 32, 32, radius, 0, 2 * Math.PI );
context.closePath();
context.fillStyle = color.getStyle();
context.fill();
if ( text ) {
context.font = font;
context.textAlign = 'center';
context.fillStyle = labelColor;
context.fillText( text, 32, 41 );
}
const texture = new CanvasTexture( canvas );
texture.colorSpace = SRGBColorSpace;
return new SpriteMaterial( { map: texture, toneMapped: false } );
}
function updateLabels() {
posXAxisHelper.material.map.dispose();
posYAxisHelper.material.map.dispose();
posZAxisHelper.material.map.dispose();
posXAxisHelper.material.dispose();
posYAxisHelper.material.dispose();
posZAxisHelper.material.dispose();
posXAxisHelper.material = getSpriteMaterial( color1, options.labelX );
posYAxisHelper.material = getSpriteMaterial( color2, options.labelY );
posZAxisHelper.material = getSpriteMaterial( color3, options.labelZ );
}
}
}
export { ViewHelper };

471
node_modules/three/examples/jsm/inspector/Inspector.js generated vendored Normal file
View File

@@ -0,0 +1,471 @@
import { RendererInspector } from './RendererInspector.js';
import { Profiler } from './ui/Profiler.js';
import { Performance } from './tabs/Performance.js';
import { Console } from './tabs/Console.js';
import { Parameters } from './tabs/Parameters.js';
import { Viewer } from './tabs/Viewer.js';
import { setText, splitPath, splitCamelCase } from './ui/utils.js';
import { QuadMesh, NodeMaterial, CanvasTarget, setConsoleFunction, REVISION, NoToneMapping } from 'three/webgpu';
import { renderOutput, vec2, vec3, vec4, Fn, screenUV, step, OnMaterialUpdate, uniform } from 'three/tsl';
const aspectRatioUV = /*@__PURE__*/ Fn( ( [ uv, textureNode ] ) => {
const aspect = uniform( 0 );
OnMaterialUpdate( () => {
const { width, height } = textureNode.value;
aspect.value = width / height;
} );
const centered = uv.sub( 0.5 );
const corrected = vec2( centered.x.div( aspect ), centered.y );
const finalUV = corrected.add( 0.5 );
const inBounds = step( 0.0, finalUV.x ).mul( step( finalUV.x, 1.0 ) ).mul( step( 0.0, finalUV.y ) ).mul( step( finalUV.y, 1.0 ) );
return vec3( finalUV, inBounds );
} );
class Inspector extends RendererInspector {
constructor() {
super();
// init profiler
const profiler = new Profiler();
const parameters = new Parameters( {
builtin: true,
icon: '<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"><path stroke="none" d="M0 0h24v24H0z" fill="none"/><path d="M14 6m-2 0a2 2 0 1 0 4 0a2 2 0 1 0 -4 0" /><path d="M4 6l8 0" /><path d="M16 6l4 0" /><path d="M8 12m-2 0a2 2 0 1 0 4 0a2 2 0 1 0 -4 0" /><path d="M4 12l2 0" /><path d="M10 12l10 0" /><path d="M17 18m-2 0a2 2 0 1 0 4 0a2 2 0 1 0 -4 0" /><path d="M4 18l11 0" /><path d="M19 18l1 0" /></svg>'
} );
parameters.hide();
profiler.addTab( parameters );
const viewer = new Viewer();
viewer.hide();
profiler.addTab( viewer );
const performance = new Performance();
profiler.addTab( performance );
const consoleTab = new Console();
profiler.addTab( consoleTab );
profiler.loadLayout();
if ( ! profiler.activeTabId ) {
profiler.setActiveTab( performance.id );
}
this.statsData = new Map();
this.canvasNodes = new Map();
this.profiler = profiler;
this.performance = performance;
this.console = consoleTab;
this.parameters = parameters;
this.viewer = viewer;
this.once = {};
this.displayCycle = {
text: {
needsUpdate: false,
duration: .25,
time: 0
},
graph: {
needsUpdate: false,
duration: .02,
time: 0
}
};
}
get domElement() {
return this.profiler.domElement;
}
resolveConsoleOnce( type, message ) {
const key = type + message;
if ( this.once[ key ] !== true ) {
this.resolveConsole( type, message );
this.once[ key ] = true;
}
}
resolveConsole( type, message ) {
switch ( type ) {
case 'log':
this.console.addMessage( 'info', message );
console.log( message );
break;
case 'warn':
this.console.addMessage( 'warn', message );
console.warn( message );
break;
case 'error':
this.console.addMessage( 'error', message );
console.error( message );
break;
}
}
init() {
const renderer = this.getRenderer();
let sign = `THREE.WebGPURenderer: ${ REVISION } [ "`;
if ( renderer.backend.isWebGPUBackend ) {
sign += 'WebGPU';
} else if ( renderer.backend.isWebGLBackend ) {
sign += 'WebGL2';
}
sign += '" ]';
this.console.addMessage( 'info', sign );
//
if ( renderer.inspector.domElement.parentElement === null && renderer.domElement.parentElement !== null ) {
renderer.domElement.parentElement.appendChild( renderer.inspector.domElement );
}
}
setRenderer( renderer ) {
super.setRenderer( renderer );
if ( renderer !== null ) {
setConsoleFunction( this.resolveConsole.bind( this ) );
if ( this.isAvailable ) {
renderer.backend.trackTimestamp = true;
renderer.init().then( () => {
if ( renderer.hasFeature( 'timestamp-query' ) !== true ) {
this.console.addMessage( 'error', 'THREE.Inspector: GPU Timestamp Queries not available.' );
}
} );
}
}
return this;
}
createParameters( name ) {
if ( this.parameters.isVisible === false ) {
this.parameters.show();
if ( this.parameters.isDetached === false ) {
this.profiler.setActiveTab( this.parameters.id );
}
}
return this.parameters.createGroup( name );
}
getStatsData( cid ) {
let data = this.statsData.get( cid );
if ( data === undefined ) {
data = {};
this.statsData.set( cid, data );
}
return data;
}
resolveStats( stats ) {
const data = this.getStatsData( stats.cid );
if ( data.initialized !== true ) {
data.cpu = stats.cpu;
data.gpu = stats.gpu;
data.stats = [];
data.initialized = true;
}
// store stats
if ( data.stats.length > this.maxFrames ) {
data.stats.shift();
}
data.stats.push( stats );
// compute averages
data.cpu = this.getAverageDeltaTime( data, 'cpu' );
data.gpu = this.getAverageDeltaTime( data, 'gpu' );
data.total = data.cpu + data.gpu;
// children
for ( const child of stats.children ) {
this.resolveStats( child );
const childData = this.getStatsData( child.cid );
data.cpu += childData.cpu;
data.gpu += childData.gpu;
data.total += childData.total;
}
}
getCanvasDataByNode( node ) {
let canvasData = this.canvasNodes.get( node );
if ( canvasData === undefined ) {
const renderer = this.getRenderer();
const canvas = document.createElement( 'canvas' );
const canvasTarget = new CanvasTarget( canvas );
canvasTarget.setPixelRatio( window.devicePixelRatio );
canvasTarget.setSize( 140, 140 );
const id = node.id;
const { path, name } = splitPath( splitCamelCase( node.getName() || '(unnamed)' ) );
const target = node.context( { getUV: ( textureNode ) => {
const uvData = aspectRatioUV( screenUV, textureNode );
const correctedUV = uvData.xy;
const mask = uvData.z;
return correctedUV.mul( mask );
} } );
let output = vec4( vec3( target ), 1 );
output = renderOutput( output, NoToneMapping, renderer.outputColorSpace );
output = output.context( { inspector: true } );
const material = new NodeMaterial();
material.outputNode = output;
const quad = new QuadMesh( material );
quad.name = 'Viewer - ' + name;
canvasData = {
id,
name,
path,
node,
quad,
canvasTarget,
material
};
this.canvasNodes.set( node, canvasData );
}
return canvasData;
}
resolveViewer() {
const nodes = this.currentNodes;
const renderer = this.getRenderer();
if ( nodes.length === 0 ) return;
if ( ! renderer.backend.isWebGPUBackend ) {
this.resolveConsoleOnce( 'warn', 'Inspector: Viewer is only available with WebGPU.' );
return;
}
//
if ( ! this.viewer.isVisible ) {
this.viewer.show();
}
const canvasDataList = nodes.map( node => this.getCanvasDataByNode( node ) );
this.viewer.update( renderer, canvasDataList );
}
getAverageDeltaTime( statsData, property, frames = this.fps ) {
const statsArray = statsData.stats;
let sum = 0;
let count = 0;
for ( let i = statsArray.length - 1; i >= 0 && count < frames; i -- ) {
const stats = statsArray[ i ];
const value = stats[ property ];
if ( value > 0 ) {
// ignore invalid values
sum += value;
count ++;
}
}
return count > 0 ? sum / count : 0;
}
resolveFrame( frame ) {
const nextFrame = this.getFrameById( frame.frameId + 1 );
if ( ! nextFrame ) return;
frame.cpu = 0;
frame.gpu = 0;
frame.total = 0;
for ( const stats of frame.children ) {
this.resolveStats( stats );
const data = this.getStatsData( stats.cid );
frame.cpu += data.cpu;
frame.gpu += data.gpu;
frame.total += data.total;
}
// improve stats using next frame
frame.deltaTime = nextFrame.startTime - frame.startTime;
frame.miscellaneous = frame.deltaTime - frame.total;
if ( frame.miscellaneous < 0 ) {
// Frame desync, probably due to async GPU timing.
frame.miscellaneous = 0;
}
//
this.updateCycle( this.displayCycle.text );
this.updateCycle( this.displayCycle.graph );
if ( this.displayCycle.text.needsUpdate ) {
setText( 'fps-counter', this.fps.toFixed() );
this.performance.updateText( this, frame );
}
if ( this.displayCycle.graph.needsUpdate ) {
this.performance.updateGraph( this, frame );
}
this.displayCycle.text.needsUpdate = false;
this.displayCycle.graph.needsUpdate = false;
}
updateCycle( cycle ) {
cycle.time += this.nodeFrame.deltaTime;
if ( cycle.time >= cycle.duration ) {
cycle.needsUpdate = true;
cycle.time = 0;
}
}
}
export { Inspector };

View File

@@ -0,0 +1,425 @@
import { InspectorBase, TimestampQuery, warnOnce } from 'three/webgpu';
class ObjectStats {
constructor( uid, name ) {
this.uid = uid;
this.cid = uid.match( /^(.*):f(\d+)$/ )[ 1 ]; // call id
this.name = name;
this.timestamp = 0;
this.cpu = 0;
this.gpu = 0;
this.fps = 0;
this.children = [];
this.parent = null;
}
}
class RenderStats extends ObjectStats {
constructor( uid, scene, camera, renderTarget ) {
let name = scene.name;
if ( name === '' ) {
if ( scene.isScene ) {
name = 'Scene';
} else if ( scene.isQuadMesh ) {
name = 'QuadMesh';
}
}
super( uid, name );
this.scene = scene;
this.camera = camera;
this.renderTarget = renderTarget;
this.isRenderStats = true;
}
}
class ComputeStats extends ObjectStats {
constructor( uid, computeNode ) {
super( uid, computeNode.name );
this.computeNode = computeNode;
this.isComputeStats = true;
}
}
export class RendererInspector extends InspectorBase {
constructor() {
super();
this.currentFrame = null;
this.currentRender = null;
this.currentNodes = null;
this.lastFrame = null;
this.frames = [];
this.framesLib = {};
this.maxFrames = 512;
this._lastFinishTime = 0;
this._resolveTimestampPromise = null;
this.isRendererInspector = true;
}
getParent() {
return this.currentRender || this.getFrame();
}
begin() {
this.currentFrame = this._createFrame();
this.currentRender = this.currentFrame;
this.currentNodes = [];
}
finish() {
const now = performance.now();
const frame = this.currentFrame;
frame.finishTime = now;
frame.deltaTime = now - ( this._lastFinishTime > 0 ? this._lastFinishTime : now );
this.addFrame( frame );
this.fps = this._getFPS();
this.lastFrame = frame;
this.currentFrame = null;
this.currentRender = null;
this.currentNodes = null;
this._lastFinishTime = now;
}
_getFPS() {
let frameSum = 0;
let timeSum = 0;
for ( let i = this.frames.length - 1; i >= 0; i -- ) {
const frame = this.frames[ i ];
frameSum ++;
timeSum += frame.deltaTime;
if ( timeSum >= 1000 ) break;
}
return ( frameSum * 1000 ) / timeSum;
}
_createFrame() {
return {
frameId: this.nodeFrame.frameId,
resolvedCompute: false,
resolvedRender: false,
deltaTime: 0,
startTime: performance.now(),
finishTime: 0,
miscellaneous: 0,
children: [],
renders: [],
computes: []
};
}
getFrame() {
return this.currentFrame || this.lastFrame;
}
getFrameById( frameId ) {
return this.framesLib[ frameId ] || null;
}
resolveViewer() { }
resolveFrame( /*frame*/ ) { }
async resolveTimestamp() {
if ( this._resolveTimestampPromise !== null ) {
return this._resolveTimestampPromise;
}
this._resolveTimestampPromise = new Promise( ( resolve ) => {
requestAnimationFrame( async () => {
const renderer = this.getRenderer();
await renderer.resolveTimestampsAsync( TimestampQuery.COMPUTE );
await renderer.resolveTimestampsAsync( TimestampQuery.RENDER );
const computeFrames = renderer.backend.getTimestampFrames( TimestampQuery.COMPUTE );
const renderFrames = renderer.backend.getTimestampFrames( TimestampQuery.RENDER );
const frameIds = [ ...new Set( [ ...computeFrames, ...renderFrames ] ) ];
for ( const frameId of frameIds ) {
const frame = this.getFrameById( frameId );
if ( frame !== null ) {
// resolve compute timestamps
if ( frame.resolvedCompute === false ) {
if ( frame.computes.length > 0 ) {
if ( computeFrames.includes( frameId ) ) {
for ( const stats of frame.computes ) {
if ( renderer.backend.hasTimestamp( stats.uid ) ) {
stats.gpu = renderer.backend.getTimestamp( stats.uid );
} else {
stats.gpu = 0;
stats.gpuNotAvailable = true;
}
}
frame.resolvedCompute = true;
}
} else {
frame.resolvedCompute = true;
}
}
// resolve render timestamps
if ( frame.resolvedRender === false ) {
if ( frame.renders.length > 0 ) {
if ( renderFrames.includes( frameId ) ) {
for ( const stats of frame.renders ) {
if ( renderer.backend.hasTimestamp( stats.uid ) ) {
stats.gpu = renderer.backend.getTimestamp( stats.uid );
} else {
stats.gpu = 0;
stats.gpuNotAvailable = true;
}
}
frame.resolvedRender = true;
}
} else {
frame.resolvedRender = true;
}
}
if ( frame.resolvedCompute === true && frame.resolvedRender === true ) {
this.resolveFrame( frame );
}
}
}
this._resolveTimestampPromise = null;
resolve();
} );
} );
return this._resolveTimestampPromise;
}
get isAvailable() {
const renderer = this.getRenderer();
return renderer !== null;
}
addFrame( frame ) {
// Limit to max frames.
if ( this.frames.length >= this.maxFrames ) {
const removedFrame = this.frames.shift();
delete this.framesLib[ removedFrame.frameId ];
}
this.frames.push( frame );
this.framesLib[ frame.frameId ] = frame;
if ( this.isAvailable ) {
this.resolveViewer();
this.resolveTimestamp();
}
}
inspect( node ) {
const currentNodes = this.currentNodes;
if ( currentNodes !== null ) {
currentNodes.push( node );
} else {
warnOnce( 'RendererInspector: Unable to inspect node outside of frame scope. Use "renderer.setAnimationLoop()".' );
}
}
beginCompute( uid, computeNode ) {
const frame = this.getFrame();
if ( ! frame ) return;
const currentCompute = new ComputeStats( uid, computeNode );
currentCompute.timestamp = performance.now();
currentCompute.parent = this.currentCompute || this.getParent();
frame.computes.push( currentCompute );
if ( this.currentRender !== null ) {
this.currentRender.children.push( currentCompute );
} else {
frame.children.push( currentCompute );
}
this.currentCompute = currentCompute;
}
finishCompute() {
const frame = this.getFrame();
if ( ! frame ) return;
const currentCompute = this.currentCompute;
currentCompute.cpu = performance.now() - currentCompute.timestamp;
this.currentCompute = currentCompute.parent.isComputeStats ? currentCompute.parent : null;
}
beginRender( uid, scene, camera, renderTarget ) {
const frame = this.getFrame();
if ( ! frame ) return;
const currentRender = new RenderStats( uid, scene, camera, renderTarget );
currentRender.timestamp = performance.now();
currentRender.parent = this.getParent();
frame.renders.push( currentRender );
if ( this.currentRender !== null ) {
this.currentRender.children.push( currentRender );
} else {
frame.children.push( currentRender );
}
this.currentRender = currentRender;
}
finishRender() {
const frame = this.getFrame();
if ( ! frame ) return;
const currentRender = this.currentRender;
currentRender.cpu = performance.now() - currentRender.timestamp;
this.currentRender = currentRender.parent;
}
}

View File

@@ -0,0 +1,204 @@
import { Tab } from '../ui/Tab.js';
class Console extends Tab {
constructor( options = {} ) {
super( 'Console', options );
this.filters = { info: true, warn: true, error: true };
this.filterText = '';
this.buildHeader();
this.logContainer = document.createElement( 'div' );
this.logContainer.id = 'console-log';
this.content.appendChild( this.logContainer );
}
buildHeader() {
const header = document.createElement( 'div' );
header.className = 'console-header';
const filterInput = document.createElement( 'input' );
filterInput.type = 'text';
filterInput.className = 'console-filter-input';
filterInput.placeholder = 'Filter...';
filterInput.addEventListener( 'input', ( e ) => {
this.filterText = e.target.value.toLowerCase();
this.applyFilters();
} );
const filtersGroup = document.createElement( 'div' );
filtersGroup.className = 'console-filters-group';
Object.keys( this.filters ).forEach( type => {
const label = document.createElement( 'label' );
label.className = 'custom-checkbox';
label.style.color = `var(--${type === 'info' ? 'text-primary' : 'color-' + ( type === 'warn' ? 'yellow' : 'red' )})`;
const checkbox = document.createElement( 'input' );
checkbox.type = 'checkbox';
checkbox.checked = this.filters[ type ];
checkbox.dataset.type = type;
const checkmark = document.createElement( 'span' );
checkmark.className = 'checkmark';
label.appendChild( checkbox );
label.appendChild( checkmark );
label.append( type.charAt( 0 ).toUpperCase() + type.slice( 1 ) );
filtersGroup.appendChild( label );
} );
filtersGroup.addEventListener( 'change', ( e ) => {
const type = e.target.dataset.type;
if ( type in this.filters ) {
this.filters[ type ] = e.target.checked;
this.applyFilters();
}
} );
header.appendChild( filterInput );
header.appendChild( filtersGroup );
this.content.appendChild( header );
}
applyFilters() {
const messages = this.logContainer.querySelectorAll( '.log-message' );
messages.forEach( msg => {
const type = msg.dataset.type;
const text = msg.dataset.rawText.toLowerCase();
const showByType = this.filters[ type ];
const showByText = text.includes( this.filterText );
msg.classList.toggle( 'hidden', ! ( showByType && showByText ) );
} );
}
_getIcon( type, subType ) {
let icon;
if ( subType === 'tip' ) {
icon = '💭';
} else if ( subType === 'tsl' ) {
icon = '✨';
} else if ( subType === 'webgpurenderer' ) {
icon = '🎨';
} else if ( type === 'warn' ) {
icon = '⚠️';
} else if ( type === 'error' ) {
icon = '🔴';
} else if ( type === 'info' ) {
icon = '';
}
return icon;
}
_formatMessage( type, text ) {
const fragment = document.createDocumentFragment();
const prefixMatch = text.match( /^([\w\.]+:\s)/ );
let content = text;
if ( prefixMatch ) {
const fullPrefix = prefixMatch[ 0 ];
const parts = fullPrefix.slice( 0, - 2 ).split( '.' );
const shortPrefix = ( parts.length > 1 ? parts[ parts.length - 1 ] : parts[ 0 ] ) + ':';
const icon = this._getIcon( type, shortPrefix.split( ':' )[ 0 ].toLowerCase() );
fragment.appendChild( document.createTextNode( icon + ' ' ) );
const prefixSpan = document.createElement( 'span' );
prefixSpan.className = 'log-prefix';
prefixSpan.textContent = shortPrefix;
fragment.appendChild( prefixSpan );
content = text.substring( fullPrefix.length );
}
const parts = content.split( /(".*?"|'.*?'|`.*?`)/g ).map( p => p.trim() ).filter( Boolean );
parts.forEach( ( part, index ) => {
if ( /^("|'|`)/.test( part ) ) {
const codeSpan = document.createElement( 'span' );
codeSpan.className = 'log-code';
codeSpan.textContent = part.slice( 1, - 1 );
fragment.appendChild( codeSpan );
} else {
if ( index > 0 ) part = ' ' + part; // add space before parts except the first
if ( index < parts.length - 1 ) part += ' '; // add space between parts
fragment.appendChild( document.createTextNode( part ) );
}
} );
return fragment;
}
addMessage( type, text ) {
const msg = document.createElement( 'div' );
msg.className = `log-message ${type}`;
msg.dataset.type = type;
msg.dataset.rawText = text;
msg.appendChild( this._formatMessage( type, text ) );
const showByType = this.filters[ type ];
const showByText = text.toLowerCase().includes( this.filterText );
msg.classList.toggle( 'hidden', ! ( showByType && showByText ) );
this.logContainer.appendChild( msg );
this.logContainer.scrollTop = this.logContainer.scrollHeight;
if ( this.logContainer.children.length > 200 ) {
this.logContainer.removeChild( this.logContainer.firstChild );
}
}
}
export { Console };

View File

@@ -0,0 +1,332 @@
import { Tab } from '../ui/Tab.js';
import { List } from '../ui/List.js';
import { Item } from '../ui/Item.js';
import { createValueSpan } from '../ui/utils.js';
import { ValueNumber, ValueSlider, ValueSelect, ValueCheckbox, ValueColor, ValueButton } from '../ui/Values.js';
class ParametersGroup {
constructor( parameters, name ) {
this.parameters = parameters;
this.name = name;
this.paramList = new Item( name );
}
close() {
this.paramList.close();
return this;
}
add( object, property, ...params ) {
const value = object[ property ];
const type = typeof value;
let item = null;
if ( typeof params[ 0 ] === 'object' ) {
item = this.addSelect( object, property, params[ 0 ] );
} else if ( type === 'number' ) {
if ( params.length >= 2 ) {
item = this.addSlider( object, property, ...params );
} else {
item = this.addNumber( object, property, ...params );
}
} else if ( type === 'boolean' ) {
item = this.addBoolean( object, property );
} else if ( type === 'function' ) {
item = this.addButton( object, property, ...params );
}
return item;
}
_addParameter( object, property, editor, subItem ) {
editor.name = ( name ) => {
subItem.data[ 0 ].textContent = name;
return editor;
};
editor.listen = () => {
const update = () => {
const value = editor.getValue();
const propertyValue = object[ property ];
if ( value !== propertyValue ) {
editor.setValue( propertyValue );
}
requestAnimationFrame( update );
};
requestAnimationFrame( update );
return editor;
};
}
addFolder( name ) {
const group = new ParametersGroup( this.parameters, name );
this.paramList.add( group.paramList );
return group;
}
addBoolean( object, property ) {
const value = object[ property ];
const editor = new ValueCheckbox( { value } );
editor.addEventListener( 'change', ( { value } ) => {
object[ property ] = value;
} );
const description = createValueSpan();
description.textContent = property;
const subItem = new Item( description, editor.domElement );
this.paramList.add( subItem );
// extends logic to toggle checkbox when clicking on the row
const itemRow = subItem.domElement.firstChild;
itemRow.classList.add( 'actionable' );
itemRow.addEventListener( 'click', ( e ) => {
if ( e.target.closest( 'label' ) ) return;
const checkbox = itemRow.querySelector( 'input[type="checkbox"]' );
if ( checkbox ) {
checkbox.checked = ! checkbox.checked;
checkbox.dispatchEvent( new Event( 'change' ) );
}
} );
// extend object property
this._addParameter( object, property, editor, subItem );
return editor;
}
addSelect( object, property, options ) {
const value = object[ property ];
const editor = new ValueSelect( { options, value } );
editor.addEventListener( 'change', ( { value } ) => {
object[ property ] = value;
} );
const description = createValueSpan();
description.textContent = property;
const subItem = new Item( description, editor.domElement );
this.paramList.add( subItem );
const itemRow = subItem.domElement.firstChild;
itemRow.classList.add( 'actionable' );
// extend object property
this._addParameter( object, property, editor, subItem );
return editor;
}
addColor( object, property ) {
const value = object[ property ];
const editor = new ValueColor( { value } );
editor.addEventListener( 'change', ( { value } ) => {
object[ property ] = value;
} );
const description = createValueSpan();
description.textContent = property;
const subItem = new Item( description, editor.domElement );
this.paramList.add( subItem );
const itemRow = subItem.domElement.firstChild;
itemRow.classList.add( 'actionable' );
// extend object property
this._addParameter( object, property, editor, subItem );
return editor;
}
addSlider( object, property, min = 0, max = 1, step = 0.01 ) {
const value = object[ property ];
const editor = new ValueSlider( { value, min, max, step } );
editor.addEventListener( 'change', ( { value } ) => {
object[ property ] = value;
} );
const description = createValueSpan();
description.textContent = property;
const subItem = new Item( description, editor.domElement );
this.paramList.add( subItem );
const itemRow = subItem.domElement.firstChild;
itemRow.classList.add( 'actionable' );
// extend object property
this._addParameter( object, property, editor, subItem );
return editor;
}
addNumber( object, property, ...params ) {
const value = object[ property ];
const [ min, max ] = params;
const editor = new ValueNumber( { value, min, max } );
editor.addEventListener( 'change', ( { value } ) => {
object[ property ] = value;
} );
const description = createValueSpan();
description.textContent = property;
const subItem = new Item( description, editor.domElement );
this.paramList.add( subItem );
const itemRow = subItem.domElement.firstChild;
itemRow.classList.add( 'actionable' );
// extend object property
this._addParameter( object, property, editor, subItem );
return editor;
}
addButton( object, property ) {
const value = object[ property ];
const editor = new ValueButton( { text: property, value } );
editor.addEventListener( 'change', ( { value } ) => {
object[ property ] = value;
} );
const subItem = new Item( editor.domElement );
subItem.itemRow.childNodes[ 0 ].style.gridColumn = '1 / -1';
this.paramList.add( subItem );
const itemRow = subItem.domElement.firstChild;
itemRow.classList.add( 'actionable' );
// extend object property
editor.name = ( name ) => {
editor.domElement.childNodes[ 0 ].textContent = name;
return editor;
};
return editor;
}
}
class Parameters extends Tab {
constructor( options = {} ) {
super( 'Parameters', options );
const paramList = new List( 'Property', 'Value' );
paramList.domElement.classList.add( 'parameters' );
paramList.setGridStyle( '.5fr 1fr' );
paramList.domElement.style.minWidth = '300px';
const scrollWrapper = document.createElement( 'div' );
scrollWrapper.className = 'list-scroll-wrapper';
scrollWrapper.appendChild( paramList.domElement );
this.content.appendChild( scrollWrapper );
this.paramList = paramList;
}
createGroup( name ) {
const group = new ParametersGroup( this, name );
this.paramList.add( group.paramList );
return group;
}
}
export { Parameters };

View File

@@ -0,0 +1,268 @@
import { Tab } from '../ui/Tab.js';
import { List } from '../ui/List.js';
import { Graph } from '../ui/Graph.js';
import { Item } from '../ui/Item.js';
import { createValueSpan, setText } from '../ui/utils.js';
class Performance extends Tab {
constructor( options = {} ) {
super( 'Performance', options );
const perfList = new List( 'Name', 'CPU', 'GPU', 'Total' );
perfList.setGridStyle( 'minmax(200px, 2fr) 80px 80px 80px' );
perfList.domElement.style.minWidth = '600px';
const scrollWrapper = document.createElement( 'div' );
scrollWrapper.className = 'list-scroll-wrapper';
scrollWrapper.appendChild( perfList.domElement );
this.content.appendChild( scrollWrapper );
//
const graphContainer = document.createElement( 'div' );
graphContainer.className = 'graph-container';
const graph = new Graph();
graph.addLine( 'fps', '--accent-color' );
//graph.addLine( 'gpu', '--color-yellow' );
graphContainer.append( graph.domElement );
//
/*
const label = document.createElement( 'label' );
label.className = 'custom-checkbox';
const checkbox = document.createElement( 'input' );
checkbox.type = 'checkbox';
const checkmark = document.createElement( 'span' );
checkmark.className = 'checkmark';
label.appendChild( checkbox );
label.appendChild( checkmark );
*/
const graphStats = new Item( 'Graph Stats', createValueSpan(), createValueSpan(), createValueSpan( 'graph-fps-counter' ) );
perfList.add( graphStats );
const graphItem = new Item( graphContainer );
graphItem.itemRow.childNodes[ 0 ].style.gridColumn = '1 / -1';
graphStats.add( graphItem );
//
const frameStats = new Item( 'Frame Stats', createValueSpan(), createValueSpan(), createValueSpan() );
perfList.add( frameStats );
const miscellaneous = new Item( 'Miscellaneous & Idle', createValueSpan(), createValueSpan(), createValueSpan() );
miscellaneous.domElement.firstChild.style.backgroundColor = '#00ff0b1a';
miscellaneous.domElement.firstChild.classList.add( 'no-hover' );
frameStats.add( miscellaneous );
//
this.notInUse = new Map();
this.frameStats = frameStats;
this.graphStats = graphStats;
this.graph = graph;
this.miscellaneous = miscellaneous;
//
this.currentRender = null;
this.currentItem = null;
this.frameItems = new Map();
}
resolveStats( inspector, stats ) {
const data = inspector.getStatsData( stats.cid );
let item = data.item;
if ( item === undefined ) {
item = new Item( createValueSpan(), createValueSpan(), createValueSpan(), createValueSpan() );
if ( stats.name ) {
if ( stats.isComputeStats === true ) {
stats.name = `${ stats.name } [ Compute ]`;
}
} else {
stats.name = `Unnamed ${ stats.cid }`;
}
item.userData.name = stats.name;
this.currentItem.add( item );
data.item = item;
} else {
item.userData.name = stats.name;
if ( this.notInUse.has( stats.cid ) ) {
item.domElement.firstElementChild.classList.remove( 'alert' );
this.notInUse.delete( stats.cid );
}
const statsIndex = stats.parent.children.indexOf( stats );
if ( item.parent === null || item.parent.children.indexOf( item ) !== statsIndex ) {
this.currentItem.add( item, statsIndex );
}
}
let name = item.userData.name;
if ( stats.isComputeStats ) {
name += ' [ Compute ]';
}
setText( item.data[ 0 ], name );
setText( item.data[ 1 ], data.cpu.toFixed( 2 ) );
setText( item.data[ 2 ], stats.gpuNotAvailable === true ? '-' : data.gpu.toFixed( 2 ) );
setText( item.data[ 3 ], data.total.toFixed( 2 ) );
//
const previousItem = this.currentItem;
this.currentItem = item;
for ( const child of stats.children ) {
this.resolveStats( inspector, child );
}
this.currentItem = previousItem;
this.frameItems.set( stats.cid, item );
}
updateGraph( inspector/*, frame*/ ) {
this.graph.addPoint( 'fps', inspector.fps );
this.graph.update();
}
addNotInUse( cid, item ) {
item.domElement.firstElementChild.classList.add( 'alert' );
this.notInUse.set( cid, {
item,
time: performance.now()
} );
this.updateNotInUse( cid );
}
updateNotInUse( cid ) {
const { item, time } = this.notInUse.get( cid );
const current = performance.now();
const duration = 5;
const remaining = duration - Math.floor( ( current - time ) / 1000 );
if ( remaining >= 0 ) {
const counter = '*'.repeat( Math.max( 0, remaining ) );
const element = item.domElement.querySelector( '.list-item-cell .value' );
setText( element, item.userData.name + ' (not in use) ' + counter );
} else {
item.domElement.firstElementChild.classList.remove( 'alert' );
item.parent.remove( item );
this.notInUse.delete( cid );
}
}
updateText( inspector, frame ) {
const oldFrameItems = new Map( this.frameItems );
this.frameItems.clear();
this.currentItem = this.frameStats;
for ( const child of frame.children ) {
this.resolveStats( inspector, child );
}
// remove unused frame items
for ( const [ cid, item ] of oldFrameItems ) {
if ( ! this.frameItems.has( cid ) ) {
this.addNotInUse( cid, item );
oldFrameItems.delete( cid );
}
}
// update not in use items
for ( const cid of this.notInUse.keys() ) {
this.updateNotInUse( cid );
}
//
setText( 'graph-fps-counter', inspector.fps.toFixed() + ' FPS' );
//
setText( this.frameStats.data[ 1 ], frame.cpu.toFixed( 2 ) );
setText( this.frameStats.data[ 2 ], frame.gpu.toFixed( 2 ) );
setText( this.frameStats.data[ 3 ], frame.total.toFixed( 2 ) );
//
setText( this.miscellaneous.data[ 1 ], frame.miscellaneous.toFixed( 2 ) );
setText( this.miscellaneous.data[ 2 ], '-' );
setText( this.miscellaneous.data[ 3 ], frame.miscellaneous.toFixed( 2 ) );
//
this.currentItem = null;
}
}
export { Performance };

View File

@@ -0,0 +1,166 @@
import { Tab } from '../ui/Tab.js';
import { List } from '../ui/List.js';
import { Item } from '../ui/Item.js';
import { RendererUtils, NoToneMapping, LinearSRGBColorSpace } from 'three/webgpu';
class Viewer extends Tab {
constructor( options = {} ) {
super( 'Viewer', options );
const nodeList = new List( 'Viewer', 'Name' );
nodeList.setGridStyle( '150px minmax(200px, 2fr)' );
nodeList.domElement.style.minWidth = '400px';
const scrollWrapper = document.createElement( 'div' );
scrollWrapper.className = 'list-scroll-wrapper';
scrollWrapper.appendChild( nodeList.domElement );
this.content.appendChild( scrollWrapper );
const nodes = new Item( 'Nodes' );
nodeList.add( nodes );
//
this.itemLibrary = new Map();
this.folderLibrary = new Map();
this.currentDataList = [];
this.nodeList = nodeList;
this.nodes = nodes;
}
getFolder( name ) {
let folder = this.folderLibrary.get( name );
if ( folder === undefined ) {
folder = new Item( name );
this.folderLibrary.set( name, folder );
this.nodeList.add( folder );
}
return folder;
}
addNodeItem( canvasData ) {
let item = this.itemLibrary.get( canvasData.id );
if ( item === undefined ) {
const name = canvasData.name;
const domElement = canvasData.canvasTarget.domElement;
item = new Item( domElement, name );
item.itemRow.children[ 1 ].style[ 'justify-content' ] = 'flex-start';
this.itemLibrary.set( canvasData.id, item );
}
return item;
}
update( renderer, canvasDataList ) {
if ( ! this.isActive && ! this.isDetached ) return;
//
const previousDataList = [ ...this.currentDataList ];
// remove old
for ( const canvasData of previousDataList ) {
if ( this.itemLibrary.has( canvasData.id ) && canvasDataList.indexOf( canvasData ) === - 1 ) {
const item = this.itemLibrary.get( canvasData.id );
const parent = item.parent;
parent.remove( item );
if ( this.folderLibrary.has( parent.data[ 0 ] ) && parent.children.length === 0 ) {
parent.parent.remove( parent );
this.folderLibrary.delete( parent.data[ 0 ] );
}
this.itemLibrary.delete( canvasData.id );
}
}
//
const indexes = {};
for ( const canvasData of canvasDataList ) {
const item = this.addNodeItem( canvasData );
const previousCanvasTarget = renderer.getCanvasTarget();
const path = canvasData.path;
if ( path ) {
const folder = this.getFolder( path );
if ( indexes[ path ] === undefined ) {
indexes[ path ] = 0;
}
if ( folder.parent === null || item.parent !== folder || folder.children.indexOf( item ) !== indexes[ path ] ) {
folder.add( item );
}
indexes[ path ] ++;
} else {
if ( ! item.parent ) {
this.nodes.add( item );
}
}
this.currentDataList = canvasDataList;
//
const state = RendererUtils.resetRendererState( renderer );
renderer.toneMapping = NoToneMapping;
renderer.outputColorSpace = LinearSRGBColorSpace;
renderer.setCanvasTarget( canvasData.canvasTarget );
canvasData.quad.render( renderer );
renderer.setCanvasTarget( previousCanvasTarget );
RendererUtils.restoreRendererState( renderer, state );
}
}
}
export { Viewer };

95
node_modules/three/examples/jsm/inspector/ui/Graph.js generated vendored Normal file
View File

@@ -0,0 +1,95 @@
export class Graph {
constructor( maxPoints = 512 ) {
this.maxPoints = maxPoints;
this.lines = {};
this.limit = 0;
this.limitIndex = 0;
this.domElement = document.createElementNS( 'http://www.w3.org/2000/svg', 'svg' );
this.domElement.setAttribute( 'class', 'graph-svg' );
}
addLine( id, color ) {
const path = document.createElementNS( 'http://www.w3.org/2000/svg', 'path' );
path.setAttribute( 'class', 'graph-path' );
path.style.stroke = `var(${color})`;
path.style.fill = `var(${color})`;
this.domElement.appendChild( path );
this.lines[ id ] = { path, color, points: [] };
}
addPoint( lineId, value ) {
const line = this.lines[ lineId ];
if ( ! line ) return;
line.points.push( value );
if ( line.points.length > this.maxPoints ) {
line.points.shift();
}
if ( value > this.limit ) {
this.limit = value;
this.limitIndex = 0;
}
}
resetLimit() {
this.limit = 0;
this.limitIndex = 0;
}
update() {
const svgWidth = this.domElement.clientWidth;
const svgHeight = this.domElement.clientHeight;
if ( svgWidth === 0 ) return;
const pointStep = svgWidth / ( this.maxPoints - 1 );
for ( const id in this.lines ) {
const line = this.lines[ id ];
let pathString = `M 0,${ svgHeight }`;
for ( let i = 0; i < line.points.length; i ++ ) {
const x = i * pointStep;
const y = svgHeight - ( line.points[ i ] / this.limit ) * svgHeight;
pathString += ` L ${ x },${ y }`;
}
pathString += ` L ${( line.points.length - 1 ) * pointStep},${ svgHeight } Z`;
const offset = svgWidth - ( ( line.points.length - 1 ) * pointStep );
line.path.setAttribute( 'transform', `translate(${ offset }, 0)` );
line.path.setAttribute( 'd', pathString );
}
//
if ( this.limitIndex ++ > this.maxPoints ) {
this.resetLimit();
}
}
}

170
node_modules/three/examples/jsm/inspector/ui/Item.js generated vendored Normal file
View File

@@ -0,0 +1,170 @@
export class Item {
constructor( ...data ) {
this.children = [];
this.isOpen = true;
this.childrenContainer = null;
this.parent = null;
this.domElement = document.createElement( 'div' );
this.domElement.className = 'list-item-wrapper';
this.itemRow = document.createElement( 'div' );
this.itemRow.className = 'list-item-row';
this.userData = {};
this.data = data;
this.data.forEach( ( cellData ) => {
const cell = document.createElement( 'div' );
cell.className = 'list-item-cell';
if ( cellData instanceof HTMLElement ) {
cell.appendChild( cellData );
} else {
cell.append( String( cellData ) );
}
this.itemRow.appendChild( cell );
} );
this.domElement.appendChild( this.itemRow );
// Bindings
this.onItemClick = this.onItemClick.bind( this );
}
onItemClick( e ) {
if ( e.target.closest( 'button, a, input, label' ) ) return;
this.toggle();
}
add( item, index = this.children.length ) {
if ( item.parent !== null ) {
item.parent.remove( item );
}
item.parent = this;
this.children.splice( index, 0, item );
this.itemRow.classList.add( 'collapsible' );
if ( ! this.childrenContainer ) {
this.childrenContainer = document.createElement( 'div' );
this.childrenContainer.className = 'list-children-container';
this.childrenContainer.classList.toggle( 'closed', ! this.isOpen );
this.domElement.appendChild( this.childrenContainer );
this.itemRow.addEventListener( 'click', this.onItemClick );
}
this.childrenContainer.insertBefore(
item.domElement,
this.childrenContainer.children[ index ] || null
);
this.updateToggler();
return this;
}
remove( item ) {
const index = this.children.indexOf( item );
if ( index !== - 1 ) {
this.children.splice( index, 1 );
this.childrenContainer.removeChild( item.domElement );
item.parent = null;
if ( this.children.length === 0 ) {
this.itemRow.classList.remove( 'collapsible' );
this.itemRow.removeEventListener( 'click', this.onItemClick );
this.childrenContainer.remove();
this.childrenContainer = null;
}
this.updateToggler();
}
return this;
}
updateToggler() {
const firstCell = this.itemRow.querySelector( '.list-item-cell:first-child' );
let toggler = this.itemRow.querySelector( '.item-toggler' );
if ( this.children.length > 0 ) {
if ( ! toggler ) {
toggler = document.createElement( 'span' );
toggler.className = 'item-toggler';
firstCell.prepend( toggler );
}
if ( this.isOpen ) {
this.itemRow.classList.add( 'open' );
}
} else if ( toggler ) {
toggler.remove();
}
}
toggle() {
this.isOpen = ! this.isOpen;
this.itemRow.classList.toggle( 'open', this.isOpen );
if ( this.childrenContainer ) {
this.childrenContainer.classList.toggle( 'closed', ! this.isOpen );
}
return this;
}
close() {
if ( this.isOpen ) {
this.toggle();
}
return this;
}
}

75
node_modules/three/examples/jsm/inspector/ui/List.js generated vendored Normal file
View File

@@ -0,0 +1,75 @@
export class List {
constructor( ...headers ) {
this.headers = headers;
this.children = [];
this.domElement = document.createElement( 'div' );
this.domElement.className = 'list-container';
this.domElement.style.padding = '10px';
this.id = `list-${Math.random().toString( 36 ).substr( 2, 9 )}`;
this.domElement.dataset.listId = this.id;
this.gridStyleElement = document.createElement( 'style' );
this.domElement.appendChild( this.gridStyleElement );
const headerRow = document.createElement( 'div' );
headerRow.className = 'list-header';
this.headers.forEach( headerText => {
const headerCell = document.createElement( 'div' );
headerCell.className = 'list-header-cell';
headerCell.textContent = headerText;
headerRow.appendChild( headerCell );
} );
this.domElement.appendChild( headerRow );
}
setGridStyle( gridTemplate ) {
this.gridStyleElement.textContent = `
[data-list-id="${this.id}"] > .list-header,
[data-list-id="${this.id}"] .list-item-row {
grid-template-columns: ${gridTemplate};
}
`;
}
add( item ) {
if ( item.parent !== null ) {
item.parent.remove( item );
}
item.domElement.classList.add( 'header-wrapper', 'section-start' );
item.parent = this;
this.children.push( item );
this.domElement.appendChild( item.domElement );
}
remove( item ) {
const index = this.children.indexOf( item );
if ( index !== - 1 ) {
this.children.splice( index, 1 );
this.domElement.removeChild( item.domElement );
item.parent = null;
}
return this;
}
}

Some files were not shown because too many files have changed in this diff Show More