From c5da89dcf8da23b1a15f113ca709d61a9d8acd0d Mon Sep 17 00:00:00 2001 From: Sergey Linev Date: Tue, 24 Feb 2026 11:15:54 +0100 Subject: [PATCH 1/9] [jupyter] add support of TFile drawing In the NotebookDrawer implement handling of the TFile instance. Full file content read in binary buffer and injected into HTML page as base64-coded blob. In the JavaScript blob encoded back and HierarchyPainter used to implement file content browsing. File size limited by 10MB. Bigger files only can be drawn when file.Draw("force") is invoked. Fix problem with geometry display --- .../python/ROOT/_jupyroot/helpers/utils.py | 224 ++++++++++++------ 1 file changed, 147 insertions(+), 77 deletions(-) diff --git a/bindings/pyroot/pythonizations/python/ROOT/_jupyroot/helpers/utils.py b/bindings/pyroot/pythonizations/python/ROOT/_jupyroot/helpers/utils.py index 72918f4edd1e7..ee84eb107f63f 100644 --- a/bindings/pyroot/pythonizations/python/ROOT/_jupyroot/helpers/utils.py +++ b/bindings/pyroot/pythonizations/python/ROOT/_jupyroot/helpers/utils.py @@ -21,6 +21,7 @@ import sys import tempfile import time +import ctypes from contextlib import contextmanager from datetime import datetime from hashlib import sha1 @@ -40,6 +41,8 @@ # Keep display handle for canvases to be able update them _canvasHandles = {} +_visualObjects = [] + _jsMagicHighlight = """ Jupyter.CodeCell.options_default.highlight_modes['magic_{cppMIME}'] = {{'reg':[/^%%cpp/]}}; console.log("JupyROOT - %%cpp magic configured"); @@ -85,6 +88,47 @@ """ + +_jsFileCode = """ +
+
+
+
+ +""" + + + TBufferJSONErrorMessage = "The TBufferJSON class is necessary for JS visualisation to work and cannot be found. Did you enable the http module (-D http=ON for CMake)?" @@ -154,6 +198,11 @@ def disableJSVisDebug(): _enableJSVisDebug = False +def addVisualObject(object, kind="none", option=""): + global _visualObjects + _visualObjects.append({ "object": object, "kind": kind, "option": option }) + + def _getPlatform(): return sys.platform @@ -439,60 +488,43 @@ def __del__(self): def GetCanvasDrawers(): lOfC = ROOT.gROOT.GetListOfCanvases() - return [NotebookDrawer(can) for can in lOfC if can.IsDrawn() or can.IsUpdated()] + return [NotebookDrawer(can, "tcanvas") for can in lOfC if can.IsDrawn() or can.IsUpdated()] def GetRCanvasDrawers(): if not RCanvasAvailable(): return [] lOfC = ROOT.Experimental.RCanvas.GetCanvases() - return [NotebookDrawer(can.__smartptr__().get()) for can in lOfC if can.IsShown() or can.IsUpdated()] + return [NotebookDrawer(can.__smartptr__().get(), "rcanvas") for can in lOfC if can.IsShown() or can.IsUpdated()] + +def GetVisualDrawers(): + global _visualObjects + res = [NotebookDrawer(entry.get('object'), entry.get('kind'), entry.get('option')) for entry in _visualObjects] + _visualObjects.clear() + return res -def GetGeometryDrawer(): +def GetGeometryDrawers(): if not hasattr(ROOT, "gGeoManager"): - return + return [] if not ROOT.gGeoManager: - return - if not ROOT.gGeoManager.GetUserPaintVolume(): - return - vol = ROOT.gGeoManager.GetTopVolume() - if vol: - return NotebookDrawer(vol) + return [] + vol = ROOT.gGeoManager.GetUserPaintVolume() + if not vol: + return [] + return [NotebookDrawer(vol, "geom")] def GetDrawers(): - drawers = GetCanvasDrawers() + GetRCanvasDrawers() - geometryDrawer = GetGeometryDrawer() - if geometryDrawer: - drawers.append(geometryDrawer) - return drawers - - -def DrawGeometry(): - drawer = GetGeometryDrawer() - if drawer: - drawer.Draw() + return GetCanvasDrawers() + GetRCanvasDrawers() + GetVisualDrawers() + GetGeometryDrawers() -def DrawCanvases(): - drawers = GetCanvasDrawers() +def NotebookDraw(): + drawers = GetDrawers() for drawer in drawers: drawer.Draw() -def DrawRCanvases(): - rdrawers = GetRCanvasDrawers() - for drawer in rdrawers: - drawer.Draw() - - -def NotebookDraw(): - DrawGeometry() - DrawCanvases() - DrawRCanvases() - - class CaptureDrawnPrimitives(object): """ Capture the canvas which is drawn to display it. @@ -514,15 +546,22 @@ class NotebookDrawer(object): jsROOT. """ - def __init__(self, theObject): + def __init__(self, theObject, kind="none", option=""): self.drawableObject = theObject + self.drawOption = option self.isRCanvas = False self.isCanvas = False + self.isFile = False + self.isGeom = False self.drawableId = str(ROOT.AddressOf(theObject)[0]) - if hasattr(self.drawableObject, "ResolveSharedPtrs"): + if kind == "tfile": + self.isFile = True + elif kind == "rcanvas": self.isRCanvas = True - else: - self.isCanvas = self.drawableObject.ClassName() == "TCanvas" + elif kind == "geom": + self.isGeom = True + elif kind == "tcanvas": + self.isCanvas = True def __del__(self): if self.isRCanvas: @@ -531,8 +570,8 @@ def __del__(self): elif self.isCanvas: self.drawableObject.ResetDrawn() self.drawableObject.ResetUpdated() - else: - ROOT.gGeoManager.SetUserPaintVolume(None) + elif self.isGeom: + self.drawableObject.GetGeoManager().SetUserPaintVolume(ROOT.nullptr) def _getListOfPrimitivesNamesAndTypes(self): """ @@ -561,6 +600,10 @@ def _canJsDisplay(self): # RCanvas clways displayed with jsroot if self.isRCanvas: return True + if self.isFile: + return True + if self.isGeom: + return True # check if jsroot was disabled if not _enableJSVis: return False @@ -581,41 +624,66 @@ def _canJsDisplay(self): return False return True - def _getJsCode(self): - # produce JSON for the canvas - if self.isRCanvas: - json = self.drawableObject.CreateJSON() - else: - json = produceCanvasJson(self.drawableObject).Data() + def _getFileJsCode(self): + sz = self.drawableObject.GetSize() + if sz > 10000000 and self.drawOption != "force": + return f"File size {sz} is too large for JSROOT display. Use 'force' draw option to show file nevertheless" + + # create plain buffer and get pointer on it + u_buffer = (ctypes.c_ubyte * sz)(*range(sz)) + addrc = ctypes.cast(ctypes.pointer(u_buffer), ctypes.c_char_p) + + if self.drawableObject.ReadBuffer(addrc, 0, sz): + return f"Fail to read file {self.drawableObject.GetName()} buffer of size {sz}" + + base64 = ROOT.TBase64.Encode(addrc, sz) divId = self._getUniqueDivId() + thisJsCode = _jsFileCode.format( + jsCanvasHeight=_jsCanvasHeight, + jsDivId=divId, + fileBase64=base64 + ) + return thisJsCode + + def _getJsCode(self): + if self.isFile: + return self._getFileJsCode() + + options = "" width = _jsCanvasWidth height = _jsCanvasHeight - jsonzip = ROOT.TBufferJSON.zipJSON(json) - options = "all" - - if self.isCanvas: - if self.drawableObject.GetWindowWidth() > 0: - width = self.drawableObject.GetWindowWidth() - if self.drawableObject.GetWindowHeight() > 0: - height = self.drawableObject.GetWindowHeight() - options = "" + json = "" + # produce JSON for the draw object if self.isRCanvas: - if self.drawableObject.GetWidth() > 0: - width = self.drawableObject.GetWidth() - if self.drawableObject.GetHeight() > 0: - height = self.drawableObject.GetHeight() - options = "" + json = self.drawableObject.CreateJSON() + if self.drawableObject.GetWidth() > 0: + width = self.drawableObject.GetWidth() + if self.drawableObject.GetHeight() > 0: + height = self.drawableObject.GetHeight() + elif self.isCanvas: + json = produceCanvasJson(self.drawableObject).Data() + if self.drawableObject.GetWindowWidth() > 0: + width = self.drawableObject.GetWindowWidth() + if self.drawableObject.GetWindowHeight() > 0: + height = self.drawableObject.GetWindowHeight() + elif self.isGeom: + json = ROOT.TBufferJSON.ConvertToJSON(self.drawableObject, 23).Data() + options = "all" + else: + return f"Class {self.drawableObject.ClassName()} not supported yet" + + zip = ROOT.TBufferJSON.zipJSON(json) thisJsCode = _jsCode.format( jsCanvasWidth=width, jsCanvasHeight=height, jsonLength=len(json), - jsonZip=jsonzip, + jsonZip=zip, jsDrawOptions=options, - jsDivId=divId, + jsDivId=self._getUniqueDivId(), ) return thisJsCode @@ -627,6 +695,8 @@ def _getDrawId(self): return self.drawableObject.GetName() + self.drawableId if self.isRCanvas: return self.drawableObject.GetUID() + if self.isFile: + return "File" + self.drawableId # all other objects do not support update and can be ignored return "" @@ -660,26 +730,26 @@ def _getPngImage(self): return img def _pngDisplay(self): - global _canvasHandles - name = self._getDrawId() - updated = self._getUpdated() - img = self._getPngImage() - if updated and name and (name in _canvasHandles): - _canvasHandles[name].update(img) - elif name: - _canvasHandles[name] = display.display(img, display_id=True) - else: - display.display(img) + if self.isCanvas or self.isRCanvas: + global _canvasHandles + name = self._getDrawId() + updated = self._getUpdated() + img = self._getPngImage() + if updated and name and (name in _canvasHandles): + _canvasHandles[name].update(img) + elif name: + _canvasHandles[name] = display.display(img, display_id=True) + else: + display.display(img) def _display(self): if _enableJSVisDebug: self._pngDisplay() self._jsDisplay() + elif self._canJsDisplay(): + self._jsDisplay() else: - if self._canJsDisplay(): - self._jsDisplay() - else: - self._pngDisplay() + self._pngDisplay() def GetDrawableObjects(self): if _enableJSVisDebug: From 18b2f60340ea90b212043792953a50607b72ca62 Mon Sep 17 00:00:00 2001 From: Sergey Linev Date: Tue, 24 Feb 2026 12:53:37 +0100 Subject: [PATCH 2/9] [jsroot] dev 23/02/2026 Add implementation for TFile drawing in jupyter Plus changes from last months with full `RNTuple` support Full log: Implement new data types in `RNtuple` - reduced float types kFloat16, kReal32Trunc, kReal32Quant - `std::vector` - `std::map`, `std::unordered_map`, `std::multimap`, `std::unordered_multimap` with `std::pair` - `std::set`, `std::unordered_set`, `std::multiset`, `std::unordered_multiset` - `std::array` - `std::variant` - `std::tuple` - `std::bitset` - `std::atomic` - simple custom classes - streamed types 1. Resort order of ranges in http request, fixing several long-standing problems #374 1. Implement for `TPie` 3d, text, title drawing including interactivity 1. Implement `TCanvas` support in `build3d` function #373 1. Implements `TTree` branches filtering via context menu #364 1. Let define alternative draw function #378 1. Implement padsN draw option for `THStack` and `TMultiGraph` 1. Use `resvg-js` backend for PNG support in node.js #391, thanks to https://github.com/OmarMesqq 1. Remove support for deprectaed `TH1K` class 1. Introduce `settings.ServerTimeout` global timeout for THttpServer operations 1. Let set custom color palette with `setColorPalette` function 1. Upgrade three.js r180 -> r183 1. Fix - paint frame border mode/size from TCanvas 1. Fix - add TLeafG support to TTree #397 1. Fix - interactivity for TH3 palette drawing #398 ## Changes in 7.10.2 1. Fix - correctly process `TLeafB` arrays in tree draw #384 2. Fix - better detect default ranges in `TGraph` histogram 3. Fix - convert BigInt before `RNtuple` drawing 4. Fix - pages and clusters processing in `RNtuple` #390 5. Fix - extra row for legend header, proper horizontal align https://github.com/root-project/root/issues/21173 ## Changes in 7.10.1 1. Fix - proper paint axis labels on both sides when pad.fTickx/y = 2 2. Fix - recover io after bad http response --- js/LICENSE | 2 +- js/build/jsroot.js | 8934 ++++++++++++++-------- js/changes.md | 45 +- js/index.htm | 4 +- js/modules/base/BasePainter.mjs | 36 +- js/modules/base/colors.mjs | 21 +- js/modules/base/math.mjs | 1 - js/modules/core.mjs | 48 +- js/modules/draw.mjs | 37 +- js/modules/draw/RNTuple.mjs | 8 +- js/modules/draw/TTextPainter.mjs | 5 + js/modules/draw/TTree.mjs | 6 + js/modules/geom/TGeoPainter.mjs | 1 - js/modules/geom/geobase.mjs | 6 - js/modules/gpad/TAxisPainter.mjs | 2 +- js/modules/gpad/TCanvasPainter.mjs | 14 + js/modules/gpad/TFramePainter.mjs | 18 +- js/modules/gui.mjs | 14 +- js/modules/gui/HierarchyPainter.mjs | 42 +- js/modules/gui/menu.mjs | 167 +- js/modules/hist/TH3Painter.mjs | 30 +- js/modules/hist/TPavePainter.mjs | 137 +- js/modules/hist2d/TGraphPainter.mjs | 11 +- js/modules/hist2d/THStackPainter.mjs | 13 +- js/modules/hist2d/THistPainter.mjs | 20 +- js/modules/hist2d/TMultiGraphPainter.mjs | 12 +- js/modules/io.mjs | 364 +- js/modules/main.mjs | 2 +- js/modules/rntuple.mjs | 1590 ++-- js/modules/three.mjs | 7006 ++++++++++------- js/modules/three_addons.mjs | 803 +- js/modules/tree.mjs | 18 +- 32 files changed, 12745 insertions(+), 6672 deletions(-) diff --git a/js/LICENSE b/js/LICENSE index b9aea0154d89e..a6d9f24717fe9 100644 --- a/js/LICENSE +++ b/js/LICENSE @@ -1,6 +1,6 @@ The MIT License -Copyright � 2013-2025 JavaScript ROOT authors +Copyright � 2013-2026 JavaScript ROOT authors Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/js/build/jsroot.js b/js/build/jsroot.js index 293dfc20b5d68..a160b67016131 100644 --- a/js/build/jsroot.js +++ b/js/build/jsroot.js @@ -14,7 +14,7 @@ const version_id = 'dev', /** @summary version date * @desc Release date in format day/month/year like '14/04/2022' */ -version_date = '7/11/2025', +version_date = '23/02/2026', /** @summary version id and date * @desc Produced by concatenation of {@link version_id} and {@link version_date} @@ -257,6 +257,8 @@ settings = { Render3DBatch: constants$1.Render3D.Default, /** @summary Way to embed 3D drawing in SVG, see {@link constants.Embed3D} for possible values */ Embed3D: constants$1.Embed3D.Default, + /** @summary Use `resvg-js` backend for converting SVGs in node.js */ + UseResvgJs: true, /** @summary Default canvas width */ CanvasWidth: 1200, /** @summary Default canvas height */ @@ -363,6 +365,10 @@ settings = { * @desc Allows to retry files reading if original URL fails * @private */ FilesRemap: { 'https://root.cern/': 'https://root-eos.web.cern.ch/' }, + /** @summary THttpServer read timeout in ms + * @desc Configures timeout for requests to THttpServer + * @default 0 */ + ServerTimeout: 0, /** @summary Configure xhr.withCredentials = true when submitting http requests from JSROOT */ WithCredentials: false, /** @summary Skip streamer infos from the GUI */ @@ -965,13 +971,22 @@ function findFunction(name) { /** @summary Method to create http request, without promise can be used only in browser environment * @private */ -function createHttpRequest(url, kind, user_accept_callback, user_reject_callback, use_promise) { +function createHttpRequest(url, kind, user_accept_callback, user_reject_callback, use_promise, tmout) { + function handle_error(xhr, message, code, abort_reason) { + if (!xhr.did_abort) { + xhr.did_abort = abort_reason || true; + xhr.abort(); + } + if (!xhr.did_error || abort_reason) + console.warn(message); + if (!xhr.did_error) { + xhr.did_error = true; + xhr.error_callback(Error(message), code); + } + } function configureXhr(xhr) { xhr.http_callback = isFunc(user_accept_callback) ? user_accept_callback.bind(xhr) : () => {}; - xhr.error_callback = isFunc(user_reject_callback) ? user_reject_callback.bind(xhr) : function(err) { - console.warn(err.message); - this.http_callback(null); - }.bind(xhr); + xhr.error_callback = isFunc(user_reject_callback) ? user_reject_callback.bind(xhr) : function() { this.http_callback(null); }; if (!kind) kind = 'buf'; @@ -1007,11 +1022,8 @@ function createHttpRequest(url, kind, user_accept_callback, user_reject_callback if (settings.HandleWrongHttpResponse && (method === 'GET') && isFunc(xhr.addEventListener)) { xhr.addEventListener('progress', function(oEvent) { - if (oEvent.lengthComputable && this.expected_size && (oEvent.loaded > this.expected_size)) { - this.did_abort = true; - this.abort(); - this.error_callback(Error(`Server sends more bytes ${oEvent.loaded} than expected ${this.expected_size}. Abort I/O operation`), 598); - } + if (oEvent.lengthComputable && this.expected_size && (oEvent.loaded > this.expected_size)) + handle_error(this, `Server sends more bytes ${oEvent.loaded} than expected ${this.expected_size}. Abort I/O operation`, 598); }.bind(xhr)); } @@ -1021,11 +1033,8 @@ function createHttpRequest(url, kind, user_accept_callback, user_reject_callback if ((this.readyState === 2) && this.expected_size) { const len = parseInt(this.getResponseHeader('Content-Length')); - if (Number.isInteger(len) && (len > this.expected_size) && !settings.HandleWrongHttpResponse) { - this.did_abort = 'large'; - this.abort(); - return this.error_callback(Error(`Server response size ${len} larger than expected ${this.expected_size}. Abort I/O operation`), 599); - } + if (Number.isInteger(len) && (len > this.expected_size) && !settings.HandleWrongHttpResponse) + return handle_error(this, `Server response size ${len} larger than expected ${this.expected_size}. Abort I/O operation`, 599, 'large'); } if (this.readyState !== 4) @@ -1034,7 +1043,7 @@ function createHttpRequest(url, kind, user_accept_callback, user_reject_callback if ((this.status !== 200) && (this.status !== 206) && !browser.qt6 && // in these special cases browsers not always set status !((this.status === 0) && ((url.indexOf('file://') === 0) || (url.indexOf('blob:') === 0)))) - return this.error_callback(Error(`Fail to load url ${url}`), this.status); + return handle_error(this, `Fail to load url ${url}`, this.status); if (this.nodejs_checkzip && (this.getResponseHeader('content-encoding') === 'gzip')) { // special handling of gzip JSON objects in Node.js @@ -1079,6 +1088,11 @@ function createHttpRequest(url, kind, user_accept_callback, user_reject_callback xhr.responseType = 'arraybuffer'; } + if (tmout && Number.isFinite(tmout)) { + xhr.timeout = tmout; + xhr.ontimeout = function() { handle_error(this, `Request ${url} timeout set ${tmout} ms`, 600, 'timeout'); }; + } + return xhr; } @@ -5731,9 +5745,9 @@ function loggish(transform) { return scale; } -function log() { +function log$1() { const scale = loggish(transformer$2()).domain([1, 10]); - scale.copy = () => copy$1(scale, log()).base(scale.base()); + scale.copy = () => copy$1(scale, log$1()).base(scale.base()); initRange.apply(scale, arguments); return scale; } @@ -7262,14 +7276,14 @@ function clearNow() { clockNow = 0; } -function Timer() { +function Timer$1() { this._call = this._time = this._next = null; } -Timer.prototype = timer.prototype = { - constructor: Timer, +Timer$1.prototype = timer.prototype = { + constructor: Timer$1, restart: function(callback, delay, time) { if (typeof callback !== "function") throw new TypeError("callback is not a function"); time = (time == null ? now() : +time) + (delay == null ? 0 : +delay); @@ -7292,7 +7306,7 @@ Timer.prototype = timer.prototype = { }; function timer(callback, delay, time) { - var t = new Timer; + var t = new Timer$1; t.restart(callback, delay, time); return t; } @@ -7354,7 +7368,7 @@ function sleep(time) { } function timeout(callback, delay, time) { - var t = new Timer; + var t = new Timer$1; delay = delay == null ? 0 : +delay; t.restart(elapsed => { t.stop(); @@ -8510,10 +8524,28 @@ function createGrayPalette() { return new ColorPalette(palette); } +/** @summary Set list of colors for specified color palette + * @desc One also can redefine existing palette + * Array should contain several colors in RGB format like `rgb(10,10,10)` or `#ff00ff` + * @private */ + +const customPalettes = {}; + +function setColorPalette(id, colors) { + if (!Number.isInteger(id) || (id < 0) || !colors?.length) + return false; + + customPalettes[id] = colors; + return true; +} + /** @summary Create color palette * @private */ function getColorPalette(id, grayscale) { id = id || settings.Palette; + if (customPalettes[id]) + return new ColorPalette(customPalettes[id], grayscale); + if ((id > 0) && (id < 10)) return createGrayPalette(); if (id < 51) @@ -9601,9 +9633,14 @@ async function svgToImage(svg, image_format, args) { return internals.makePDF ? internals.makePDF(svg, args) : null; // required with df104.py/df105.py example with RCanvas or any special symbols in TLatex - const doctype = ''; + const doctype = '', + is_rgba = image_format === 'rgba'; if (isNodeJs()) { + if (image_format === 'jpeg') { + console.log('JPEG image format not supported in node.js, use PNG'); + return null; + } svg = encodeURIComponent(doctype + svg); svg = svg.replace(/%([0-9A-F]{2})/g, (match, p1) => { const c = String.fromCharCode('0x' + p1); @@ -9612,6 +9649,31 @@ async function svgToImage(svg, image_format, args) { const img_src = 'data:image/svg+xml;base64,' + btoa_func(decodeURIComponent(svg)); + // Use the newer and stabler `resvg-js` backend for converting SVG to PNG + if (settings.UseResvgJs) { + return Promise.resolve().then(function () { return _rollup_plugin_ignore_empty_module_placeholder$1; }).then(({ Resvg }) => { + const rawSvg = decodeURIComponent(svg), // raw SVG XML + resvg = new Resvg(rawSvg), // Initialize Resvg and create the PNG buffer + renderData = resvg.render(), + pngBuffer = renderData.asPng(); + + // Return raw RGBA pixels if caller requested it + if (is_rgba) { + return { + width: renderData.width, + height: renderData.height, + data: renderData.pixels + }; + } + + if (args?.as_buffer) + return pngBuffer; + + return 'data:image/png;base64,' + pngBuffer.toString('base64'); + }); + } + + // Fallback to `node-canvas` return Promise.resolve().then(function () { return _rollup_plugin_ignore_empty_module_placeholder$1; }).then(async handle => { return handle.default.loadImage(img_src).then(img => { const canvas = handle.default.createCanvas(img.width, img.height); @@ -9621,7 +9683,7 @@ async function svgToImage(svg, image_format, args) { if (args?.as_buffer) return canvas.toBuffer('image/' + image_format); - return image_format ? canvas.toDataURL('image/' + image_format) : canvas; + return image_format && !is_rgba ? canvas.toDataURL('image/' + image_format) : canvas; }); }); } @@ -9643,7 +9705,7 @@ async function svgToImage(svg, image_format, args) { if (args?.as_buffer && image_format) canvas.toBlob(blob => blob.arrayBuffer().then(resolveFunc), 'image/' + image_format); else - resolveFunc(image_format ? canvas.toDataURL('image/' + image_format) : canvas); + resolveFunc(image_format && !is_rgba ? canvas.toDataURL('image/' + image_format) : canvas); }; image.onerror = function(arg) { URL.revokeObjectURL(img_src); @@ -14591,7 +14653,7 @@ Object.assign(internals.jsroot, { ObjectPainter, cleanup, resize }); * Copyright 2010-2025 Three.js Authors * SPDX-License-Identifier: MIT */ -const REVISION = '180'; +const REVISION = '183'; /** * Represents mouse buttons and interaction types in context of controls. @@ -15455,6 +15517,38 @@ const RGB_ETC2_Format = 37492; */ const RGBA_ETC2_EAC_Format = 37496; +/** + * EAC R11 UNORM format. + * + * @type {number} + * @constant + */ +const R11_EAC_Format = 37488; // 0x9270 + +/** + * EAC R11 SNORM format. + * + * @type {number} + * @constant + */ +const SIGNED_R11_EAC_Format = 37489; // 0x9271 + +/** + * EAC RG11 UNORM format. + * + * @type {number} + * @constant + */ +const RG11_EAC_Format = 37490; // 0x9272 + +/** + * EAC RG11 SNORM format. + * + * @type {number} + * @constant + */ +const SIGNED_RG11_EAC_Format = 37491; // 0x9273 + /** * ASTC RGBA 4x4 format. * @@ -15624,21 +15718,13 @@ const RED_GREEN_RGTC2_Format = 36285; const SIGNED_RED_GREEN_RGTC2_Format = 36286; /** - * Basic depth packing. + * The depth value is inverted (1.0 - z) for visualization purposes. * * @type {number} * @constant */ const BasicDepthPacking = 3200; -/** - * A depth value is packed into 32 bit RGBA. - * - * @type {number} - * @constant - */ -const RGBADepthPacking = 3201; - /** * Normal information is relative to the underlying surface. * @@ -15863,7 +15949,7 @@ const WebGPUCoordinateSystem = 2001; /** * This modules allows to dispatch event objects on custom JavaScript objects. * - * Main repository: [eventdispatcher.js]{@link https://github.com/mrdoob/eventdispatcher.js/} + * Main repository: [eventdispatcher.js](https://github.com/mrdoob/eventdispatcher.js/) * * Code Example: * ```js @@ -15989,6 +16075,276 @@ class EventDispatcher { } +/** + * Checks if an array contains values that require Uint32 representation. + * + * This function determines whether the array contains any values >= 65535, + * which would require a Uint32Array rather than a Uint16Array for proper storage. + * The function iterates from the end of the array, assuming larger values are + * typically located at the end. + * + * @private + * @param {Array} array - The array to check. + * @return {boolean} True if the array contains values >= 65535, false otherwise. + */ +function arrayNeedsUint32( array ) { + + // assumes larger values usually on last + + for ( let i = array.length - 1; i >= 0; -- i ) { + + if ( array[ i ] >= 65535 ) return true; // account for PRIMITIVE_RESTART_FIXED_INDEX, #24565 + + } + + return false; + +} + +/** + * Creates an XHTML element with the specified tag name. + * + * This function uses the XHTML namespace to create DOM elements, + * ensuring proper element creation in XML-based contexts. + * + * @private + * @param {string} name - The tag name of the element to create (e.g., 'canvas', 'div'). + * @return {HTMLElement} The created XHTML element. + */ +function createElementNS( name ) { + + return document.createElementNS( 'http://www.w3.org/1999/xhtml', name ); + +} + +/** + * Creates a canvas element configured for block display. + * + * This is a convenience function that creates a canvas element with + * display style set to 'block', which is commonly used in three.js + * rendering contexts to avoid inline element spacing issues. + * + * @return {HTMLCanvasElement} A canvas element with display set to 'block'. + */ +function createCanvasElement() { + + const canvas = createElementNS( 'canvas' ); + canvas.style.display = 'block'; + return canvas; + +} + +/** + * Internal cache for tracking warning messages to prevent duplicate warnings. + * + * @private + * @type {Object} + */ +const _cache = {}; + +/** + * Logs an informational message with the 'THREE.' prefix. + * + * If a custom console function is set via setConsoleFunction(), it will be used + * instead of the native console.log. The first parameter is treated as the + * method name and is automatically prefixed with 'THREE.'. + * + * @param {...any} params - The message components. The first param is used as + * the method name and prefixed with 'THREE.'. + */ +function log( ...params ) { + + const message = 'THREE.' + params.shift(); + + { + + console.log( message, ...params ); + + } + +} + +/** + * Enhances log/warn/error messages related to TSL. + * + * @param {Array} params - The original message parameters. + * @returns {Array} The filtered and enhanced message parameters. + */ +function enhanceLogMessage( params ) { + + const message = params[ 0 ]; + + if ( typeof message === 'string' && message.startsWith( 'TSL:' ) ) { + + const stackTrace = params[ 1 ]; + + if ( stackTrace && stackTrace.isStackTrace ) { + + params[ 0 ] += ' ' + stackTrace.getLocation(); + + } else { + + params[ 1 ] = 'Stack trace not available. Enable "THREE.Node.captureStackTrace" to capture stack traces.'; + + } + + } + + return params; + +} + +/** + * Logs a warning message with the 'THREE.' prefix. + * + * If a custom console function is set via setConsoleFunction(), it will be used + * instead of the native console.warn. The first parameter is treated as the + * method name and is automatically prefixed with 'THREE.'. + * + * @param {...any} params - The message components. The first param is used as + * the method name and prefixed with 'THREE.'. + */ +function warn( ...params ) { + + params = enhanceLogMessage( params ); + + const message = 'THREE.' + params.shift(); + + { + + const stackTrace = params[ 0 ]; + + if ( stackTrace && stackTrace.isStackTrace ) { + + console.warn( stackTrace.getError( message ) ); + + } else { + + console.warn( message, ...params ); + + } + + } + +} + +/** + * Logs an error message with the 'THREE.' prefix. + * + * If a custom console function is set via setConsoleFunction(), it will be used + * instead of the native console.error. The first parameter is treated as the + * method name and is automatically prefixed with 'THREE.'. + * + * @param {...any} params - The message components. The first param is used as + * the method name and prefixed with 'THREE.'. + */ +function error( ...params ) { + + params = enhanceLogMessage( params ); + + const message = 'THREE.' + params.shift(); + + { + + const stackTrace = params[ 0 ]; + + if ( stackTrace && stackTrace.isStackTrace ) { + + console.error( stackTrace.getError( message ) ); + + } else { + + console.error( message, ...params ); + + } + + } + +} + +/** + * Logs a warning message only once, preventing duplicate warnings. + * + * This function maintains an internal cache of warning messages and will only + * output each unique warning message once. Useful for warnings that may be + * triggered repeatedly but should only be shown to the user once. + * + * @param {...any} params - The warning message components. + */ +function warnOnce( ...params ) { + + const message = params.join( ' ' ); + + if ( message in _cache ) return; + + _cache[ message ] = true; + + warn( ...params ); + +} + +/** + * Asynchronously probes for WebGL sync object completion. + * + * This function creates a promise that resolves when the WebGL sync object + * signals completion or rejects if the sync operation fails. It uses polling + * at the specified interval to check the sync status without blocking the + * main thread. This is useful for GPU-CPU synchronization in WebGL contexts. + * + * @private + * @param {WebGL2RenderingContext} gl - The WebGL rendering context. + * @param {WebGLSync} sync - The WebGL sync object to wait for. + * @param {number} interval - The polling interval in milliseconds. + * @return {Promise} A promise that resolves when the sync completes or rejects if it fails. + */ +function probeAsync( gl, sync, interval ) { + + return new Promise( function ( resolve, reject ) { + + function probe() { + + switch ( gl.clientWaitSync( sync, gl.SYNC_FLUSH_COMMANDS_BIT, 0 ) ) { + + case gl.WAIT_FAILED: + reject(); + break; + + case gl.TIMEOUT_EXPIRED: + setTimeout( probe, interval ); + break; + + default: + resolve(); + + } + + } + + setTimeout( probe, interval ); + + } ); + +} + +/** + * Used to select the correct depth functions + * when reversed depth buffer is used. + * + * @private + * @type {Object} + */ +const ReversedDepthFuncs = { + [ NeverDepth ]: AlwaysDepth, + [ LessDepth ]: GreaterDepth, + [ EqualDepth ]: NotEqualDepth, + [ LessEqualDepth ]: GreaterEqualDepth, + + [ AlwaysDepth ]: NeverDepth, + [ GreaterDepth ]: LessDepth, + [ NotEqualDepth ]: EqualDepth, + [ GreaterEqualDepth ]: LessEqualDepth, +}; + const _lut = [ '00', '01', '02', '03', '04', '05', '06', '07', '08', '09', '0a', '0b', '0c', '0d', '0e', '0f', '10', '11', '12', '13', '14', '15', '16', '17', '18', '19', '1a', '1b', '1c', '1d', '1e', '1f', '20', '21', '22', '23', '24', '25', '26', '27', '28', '29', '2a', '2b', '2c', '2d', '2e', '2f', '30', '31', '32', '33', '34', '35', '36', '37', '38', '39', '3a', '3b', '3c', '3d', '3e', '3f', '40', '41', '42', '43', '44', '45', '46', '47', '48', '49', '4a', '4b', '4c', '4d', '4e', '4f', '50', '51', '52', '53', '54', '55', '56', '57', '58', '59', '5a', '5b', '5c', '5d', '5e', '5f', '60', '61', '62', '63', '64', '65', '66', '67', '68', '69', '6a', '6b', '6c', '6d', '6e', '6f', '70', '71', '72', '73', '74', '75', '76', '77', '78', '79', '7a', '7b', '7c', '7d', '7e', '7f', '80', '81', '82', '83', '84', '85', '86', '87', '88', '89', '8a', '8b', '8c', '8d', '8e', '8f', '90', '91', '92', '93', '94', '95', '96', '97', '98', '99', '9a', '9b', '9c', '9d', '9e', '9f', 'a0', 'a1', 'a2', 'a3', 'a4', 'a5', 'a6', 'a7', 'a8', 'a9', 'aa', 'ab', 'ac', 'ad', 'ae', 'af', 'b0', 'b1', 'b2', 'b3', 'b4', 'b5', 'b6', 'b7', 'b8', 'b9', 'ba', 'bb', 'bc', 'bd', 'be', 'bf', 'c0', 'c1', 'c2', 'c3', 'c4', 'c5', 'c6', 'c7', 'c8', 'c9', 'ca', 'cb', 'cc', 'cd', 'ce', 'cf', 'd0', 'd1', 'd2', 'd3', 'd4', 'd5', 'd6', 'd7', 'd8', 'd9', 'da', 'db', 'dc', 'dd', 'de', 'df', 'e0', 'e1', 'e2', 'e3', 'e4', 'e5', 'e6', 'e7', 'e8', 'e9', 'ea', 'eb', 'ec', 'ed', 'ee', 'ef', 'f0', 'f1', 'f2', 'f3', 'f4', 'f5', 'f6', 'f7', 'f8', 'f9', 'fa', 'fb', 'fc', 'fd', 'fe', 'ff' ]; let _seed = 1234567; @@ -15998,7 +16354,7 @@ const DEG2RAD = Math.PI / 180; const RAD2DEG = 180 / Math.PI; /** - * Generate a [UUID]{@link https://en.wikipedia.org/wiki/Universally_unique_identifier} + * Generate a [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier) * (universally unique identifier). * * @return {string} The UUID. @@ -16053,7 +16409,7 @@ function euclideanModulo( n, m ) { /** * Performs a linear mapping from range `` to range `` - * for the given value. + * for the given value. `a2` must be greater than `a1`. * * @param {number} x - The value to be mapped. * @param {number} a1 - Minimum value for range A. @@ -16111,7 +16467,7 @@ function lerp( x, y, t ) { /** * Smoothly interpolate a number from `x` to `y` in a spring-like manner using a delta * time to maintain frame rate independent movement. For details, see - * [Frame rate independent damping using lerp]{@link http://www.rorydriscoll.com/2016/03/07/frame-rate-independent-damping-using-lerp/}. + * [Frame rate independent damping using lerp](http://www.rorydriscoll.com/2016/03/07/frame-rate-independent-damping-using-lerp/). * * @param {number} x - The current point. * @param {number} y - The target point. @@ -16146,11 +16502,11 @@ function pingpong( x, length = 1 ) { * moved between `min` and `max`, but smoothed or slowed down the closer `x` is to * the `min` and `max`. * - * See [Smoothstep]{@link http://en.wikipedia.org/wiki/Smoothstep} for more details. + * See [Smoothstep](http://en.wikipedia.org/wiki/Smoothstep) for more details. * - * @param {number} x - The value to evaluate based on its position between min and max. - * @param {number} min - The min value. Any x value below min will be `0`. - * @param {number} max - The max value. Any x value above max will be `1`. + * @param {number} x - The value to evaluate based on its position between `min` and `max`. + * @param {number} min - The min value. Any `x` value below `min` will be `0`. `min` must be lower than `max`. + * @param {number} max - The max value. Any `x` value above `max` will be `1`. `max` must be greater than `min`. * @return {number} The alternated value. */ function smoothstep( x, min, max ) { @@ -16165,12 +16521,12 @@ function smoothstep( x, min, max ) { } /** - * A [variation on smoothstep]{@link https://en.wikipedia.org/wiki/Smoothstep#Variations} - * that has zero 1st and 2nd order derivatives at x=0 and x=1. + * A [variation on smoothstep](https://en.wikipedia.org/wiki/Smoothstep#Variations) + * that has zero 1st and 2nd order derivatives at `x=0` and `x=1`. * - * @param {number} x - The value to evaluate based on its position between min and max. - * @param {number} min - The min value. Any x value below min will be `0`. - * @param {number} max - The max value. Any x value above max will be `1`. + * @param {number} x - The value to evaluate based on its position between `min` and `max`. + * @param {number} min - The min value. Any `x` value below `min` will be `0`. `min` must be lower than `max`. + * @param {number} max - The max value. Any `x` value above `max` will be `1`. `max` must be greater than `min`. * @return {number} The alternated value. */ function smootherstep( x, min, max ) { @@ -16283,7 +16639,7 @@ function isPowerOfTwo( value ) { /** * Returns the smallest power of two that is greater than or equal to the given number. * - * @param {number} value - The value to find a POT for. + * @param {number} value - The value to find a POT for. Must be greater than `0`. * @return {number} The smallest power of two that is greater than or equal to the given number. */ function ceilPowerOfTwo( value ) { @@ -16295,7 +16651,7 @@ function ceilPowerOfTwo( value ) { /** * Returns the largest power of two that is less than or equal to the given number. * - * @param {number} value - The value to find a POT for. + * @param {number} value - The value to find a POT for. Must be greater than `0`. * @return {number} The largest power of two that is less than or equal to the given number. */ function floorPowerOfTwo( value ) { @@ -16305,7 +16661,7 @@ function floorPowerOfTwo( value ) { } /** - * Sets the given quaternion from the [Intrinsic Proper Euler Angles]{@link https://en.wikipedia.org/wiki/Euler_angles} + * Sets the given quaternion from the [Intrinsic Proper Euler Angles](https://en.wikipedia.org/wiki/Euler_angles) * defined by the given angles and order. * * Rotations are applied to the axes in the order specified by order: @@ -16361,7 +16717,7 @@ function setQuaternionFromProperEuler( q, a, b, c, order ) { break; default: - console.warn( 'THREE.MathUtils: .setQuaternionFromProperEuler() encountered an unknown order: ' + order ); + warn( 'MathUtils: .setQuaternionFromProperEuler() encountered an unknown order: ' + order ); } @@ -16470,7 +16826,7 @@ const MathUtils = { DEG2RAD: DEG2RAD, RAD2DEG: RAD2DEG, /** - * Generate a [UUID]{@link https://en.wikipedia.org/wiki/Universally_unique_identifier} + * Generate a [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier) * (universally unique identifier). * * @static @@ -16541,7 +16897,7 @@ const MathUtils = { /** * Smoothly interpolate a number from `x` to `y` in a spring-like manner using a delta * time to maintain frame rate independent movement. For details, see - * [Frame rate independent damping using lerp]{@link http://www.rorydriscoll.com/2016/03/07/frame-rate-independent-damping-using-lerp/}. + * [Frame rate independent damping using lerp](http://www.rorydriscoll.com/2016/03/07/frame-rate-independent-damping-using-lerp/). * * @static * @method @@ -16568,7 +16924,7 @@ const MathUtils = { * moved between `min` and `max`, but smoothed or slowed down the closer `x` is to * the `min` and `max`. * - * See [Smoothstep]{@link http://en.wikipedia.org/wiki/Smoothstep} for more details. + * See [Smoothstep](http://en.wikipedia.org/wiki/Smoothstep) for more details. * * @static * @method @@ -16579,7 +16935,7 @@ const MathUtils = { */ smoothstep: smoothstep, /** - * A [variation on smoothstep]{@link https://en.wikipedia.org/wiki/Smoothstep#Variations} + * A [variation on smoothstep](https://en.wikipedia.org/wiki/Smoothstep#Variations) * that has zero 1st and 2nd order derivatives at x=0 and x=1. * * @static @@ -16674,7 +17030,7 @@ const MathUtils = { */ floorPowerOfTwo: floorPowerOfTwo, /** - * Sets the given quaternion from the [Intrinsic Proper Euler Angles]{@link https://en.wikipedia.org/wiki/Euler_angles} + * Sets the given quaternion from the [Intrinsic Proper Euler Angles](https://en.wikipedia.org/wiki/Euler_angles) * defined by the given angles and order. * * Rotations are applied to the axes in the order specified by order: @@ -17648,7 +18004,7 @@ class Quaternion { /** * Interpolates between two quaternions via SLERP. This implementation assumes the - * quaternion data are managed in flat arrays. + * quaternion data are managed in flat arrays. * * @param {Array} dst - The destination array. * @param {number} dstOffset - An offset into the destination array. @@ -17656,70 +18012,61 @@ class Quaternion { * @param {number} srcOffset0 - An offset into the first source array. * @param {Array} src1 - The source array of the second quaternion. * @param {number} srcOffset1 - An offset into the second source array. - * @param {number} t - The interpolation factor in the range `[0,1]`. + * @param {number} t - The interpolation factor. A value in the range `[0,1]` will interpolate. A value outside the range `[0,1]` will extrapolate. * @see {@link Quaternion#slerp} */ static slerpFlat( dst, dstOffset, src0, srcOffset0, src1, srcOffset1, t ) { - // fuzz-free, array-based Quaternion SLERP operation - let x0 = src0[ srcOffset0 + 0 ], y0 = src0[ srcOffset0 + 1 ], z0 = src0[ srcOffset0 + 2 ], w0 = src0[ srcOffset0 + 3 ]; - const x1 = src1[ srcOffset1 + 0 ], + let x1 = src1[ srcOffset1 + 0 ], y1 = src1[ srcOffset1 + 1 ], z1 = src1[ srcOffset1 + 2 ], w1 = src1[ srcOffset1 + 3 ]; - if ( t === 0 ) { - - dst[ dstOffset + 0 ] = x0; - dst[ dstOffset + 1 ] = y0; - dst[ dstOffset + 2 ] = z0; - dst[ dstOffset + 3 ] = w0; - return; + if ( w0 !== w1 || x0 !== x1 || y0 !== y1 || z0 !== z1 ) { - } + let dot = x0 * x1 + y0 * y1 + z0 * z1 + w0 * w1; - if ( t === 1 ) { + if ( dot < 0 ) { - dst[ dstOffset + 0 ] = x1; - dst[ dstOffset + 1 ] = y1; - dst[ dstOffset + 2 ] = z1; - dst[ dstOffset + 3 ] = w1; - return; + x1 = - x1; + y1 = - y1; + z1 = - z1; + w1 = - w1; - } + dot = - dot; - if ( w0 !== w1 || x0 !== x1 || y0 !== y1 || z0 !== z1 ) { + } let s = 1 - t; - const cos = x0 * x1 + y0 * y1 + z0 * z1 + w0 * w1, - dir = ( cos >= 0 ? 1 : -1 ), - sqrSin = 1 - cos * cos; - // Skip the Slerp for tiny steps to avoid numeric problems: - if ( sqrSin > Number.EPSILON ) { + if ( dot < 0.9995 ) { - const sin = Math.sqrt( sqrSin ), - len = Math.atan2( sin, cos * dir ); + // slerp - s = Math.sin( s * len ) / sin; - t = Math.sin( t * len ) / sin; + const theta = Math.acos( dot ); + const sin = Math.sin( theta ); - } + s = Math.sin( s * theta ) / sin; + t = Math.sin( t * theta ) / sin; - const tDir = t * dir; + x0 = x0 * s + x1 * t; + y0 = y0 * s + y1 * t; + z0 = z0 * s + z1 * t; + w0 = w0 * s + w1 * t; - x0 = x0 * s + x1 * tDir; - y0 = y0 * s + y1 * tDir; - z0 = z0 * s + z1 * tDir; - w0 = w0 * s + w1 * tDir; + } else { + + // for small angles, lerp then normalize - // Normalize in case we just did a lerp: - if ( s === 1 - t ) { + x0 = x0 * s + x1 * t; + y0 = y0 * s + y1 * t; + z0 = z0 * s + z1 * t; + w0 = w0 * s + w1 * t; const f = 1 / Math.sqrt( x0 * x0 + y0 * y0 + z0 * z0 + w0 * w0 ); @@ -17973,7 +18320,7 @@ class Quaternion { break; default: - console.warn( 'THREE.Quaternion: .setFromEuler() encountered an unknown order: ' + order ); + warn( 'Quaternion: .setFromEuler() encountered an unknown order: ' + order ); } @@ -18321,77 +18668,61 @@ class Quaternion { } /** - * Performs a spherical linear interpolation between quaternions. + * Performs a spherical linear interpolation between this quaternion and the target quaternion. * * @param {Quaternion} qb - The target quaternion. - * @param {number} t - The interpolation factor in the closed interval `[0, 1]`. + * @param {number} t - The interpolation factor. A value in the range `[0,1]` will interpolate. A value outside the range `[0,1]` will extrapolate. * @return {Quaternion} A reference to this quaternion. */ slerp( qb, t ) { - if ( t === 0 ) return this; - if ( t === 1 ) return this.copy( qb ); - - const x = this._x, y = this._y, z = this._z, w = this._w; + let x = qb._x, y = qb._y, z = qb._z, w = qb._w; - // http://www.euclideanspace.com/maths/algebra/realNormedAlgebra/quaternions/slerp/ + let dot = this.dot( qb ); - let cosHalfTheta = w * qb._w + x * qb._x + y * qb._y + z * qb._z; + if ( dot < 0 ) { - if ( cosHalfTheta < 0 ) { + x = - x; + y = - y; + z = - z; + w = - w; - this._w = - qb._w; - this._x = - qb._x; - this._y = - qb._y; - this._z = - qb._z; + dot = - dot; - cosHalfTheta = - cosHalfTheta; + } - } else { + let s = 1 - t; - this.copy( qb ); + if ( dot < 0.9995 ) { - } + // slerp - if ( cosHalfTheta >= 1.0 ) { + const theta = Math.acos( dot ); + const sin = Math.sin( theta ); - this._w = w; - this._x = x; - this._y = y; - this._z = z; + s = Math.sin( s * theta ) / sin; + t = Math.sin( t * theta ) / sin; - return this; + this._x = this._x * s + x * t; + this._y = this._y * s + y * t; + this._z = this._z * s + z * t; + this._w = this._w * s + w * t; - } + this._onChangeCallback(); - const sqrSinHalfTheta = 1.0 - cosHalfTheta * cosHalfTheta; + } else { - if ( sqrSinHalfTheta <= Number.EPSILON ) { + // for small angles, lerp then normalize - const s = 1 - t; - this._w = s * w + t * this._w; - this._x = s * x + t * this._x; - this._y = s * y + t * this._y; - this._z = s * z + t * this._z; + this._x = this._x * s + x * t; + this._y = this._y * s + y * t; + this._z = this._z * s + z * t; + this._w = this._w * s + w * t; this.normalize(); // normalize calls _onChangeCallback() - return this; - } - const sinHalfTheta = Math.sqrt( sqrSinHalfTheta ); - const halfTheta = Math.atan2( sinHalfTheta, cosHalfTheta ); - const ratioA = Math.sin( ( 1 - t ) * halfTheta ) / sinHalfTheta, - ratioB = Math.sin( t * halfTheta ) / sinHalfTheta; - - this._w = ( w * ratioA + this._w * ratioB ); - this._x = ( x * ratioA + this._x * ratioB ); - this._y = ( y * ratioA + this._y * ratioB ); - this._z = ( z * ratioA + this._z * ratioB ); - - this._onChangeCallback(); - return this; } @@ -18647,7 +18978,7 @@ class Vector3 { } /** - * Sets the vector's x component to the given value + * Sets the vector's x component to the given value. * * @param {number} x - The value to set. * @return {Vector3} A reference to this vector. @@ -18661,7 +18992,7 @@ class Vector3 { } /** - * Sets the vector's y component to the given value + * Sets the vector's y component to the given value. * * @param {number} y - The value to set. * @return {Vector3} A reference to this vector. @@ -18675,7 +19006,7 @@ class Vector3 { } /** - * Sets the vector's z component to the given value + * Sets the vector's z component to the given value. * * @param {number} z - The value to set. * @return {Vector3} A reference to this vector. @@ -18928,7 +19259,7 @@ class Vector3 { */ applyEuler( euler ) { - return this.applyQuaternion( _quaternion$2.setFromEuler( euler ) ); + return this.applyQuaternion( _quaternion$3.setFromEuler( euler ) ); } @@ -18941,7 +19272,7 @@ class Vector3 { */ applyAxisAngle( axis, angle ) { - return this.applyQuaternion( _quaternion$2.setFromAxisAngle( axis, angle ) ); + return this.applyQuaternion( _quaternion$3.setFromAxisAngle( axis, angle ) ); } @@ -19285,8 +19616,6 @@ class Vector3 { } - // TODO lengthSquared? - /** * Computes the square of the Euclidean length (straight-line length) from * (0, 0, 0) to (x, y, z). If you are comparing the lengths of vectors, you should @@ -19797,7 +20126,7 @@ class Vector3 { } const _vector$8 = /*@__PURE__*/ new Vector3(); -const _quaternion$2 = /*@__PURE__*/ new Quaternion(); +const _quaternion$3 = /*@__PURE__*/ new Quaternion(); /** * Represents a 3x3 matrix. @@ -19805,7 +20134,7 @@ const _quaternion$2 = /*@__PURE__*/ new Quaternion(); * A Note on Row-Major and Column-Major Ordering: * * The constructor and {@link Matrix3#set} method take arguments in - * [row-major]{@link https://en.wikipedia.org/wiki/Row-_and_column-major_order#Column-major_order} + * [row-major](https://en.wikipedia.org/wiki/Row-_and_column-major_order#Column-major_order) * order, while internally they are stored in the {@link Matrix3#elements} array in column-major order. * This means that calling: * ```js @@ -20079,7 +20408,7 @@ class Matrix3 { } /** - * Inverts this matrix, using the [analytic method]{@link https://en.wikipedia.org/wiki/Invertible_matrix#Analytic_solution}. + * Inverts this matrix, using the [analytic method](https://en.wikipedia.org/wiki/Invertible_matrix#Analytic_solution). * You can not invert with a determinant of zero. If you attempt this, the method produces * a zero matrix instead. * @@ -20411,75 +20740,6 @@ class Matrix3 { const _m3 = /*@__PURE__*/ new Matrix3(); -function arrayNeedsUint32( array ) { - - // assumes larger values usually on last - - for ( let i = array.length - 1; i >= 0; -- i ) { - - if ( array[ i ] >= 65535 ) return true; // account for PRIMITIVE_RESTART_FIXED_INDEX, #24565 - - } - - return false; - -} - -function createElementNS( name ) { - - return document.createElementNS( 'http://www.w3.org/1999/xhtml', name ); - -} - -function createCanvasElement() { - - const canvas = createElementNS( 'canvas' ); - canvas.style.display = 'block'; - return canvas; - -} - -const _cache = {}; - -function warnOnce( message ) { - - if ( message in _cache ) return; - - _cache[ message ] = true; - - console.warn( message ); - -} - -function probeAsync( gl, sync, interval ) { - - return new Promise( function ( resolve, reject ) { - - function probe() { - - switch ( gl.clientWaitSync( sync, gl.SYNC_FLUSH_COMMANDS_BIT, 0 ) ) { - - case gl.WAIT_FAILED: - reject(); - break; - - case gl.TIMEOUT_EXPIRED: - setTimeout( probe, interval ); - break; - - default: - resolve(); - - } - - } - - setTimeout( probe, interval ); - - } ); - -} - const LINEAR_REC709_TO_XYZ = /*@__PURE__*/ new Matrix3().set( 0.4123908, 0.3575843, 0.1804808, 0.2126390, 0.7151687, 0.0721923, @@ -20625,7 +20885,7 @@ function createColorManagement() { fromWorkingColorSpace: function ( color, targetColorSpace ) { - warnOnce( 'THREE.ColorManagement: .fromWorkingColorSpace() has been renamed to .workingToColorSpace().' ); // @deprecated, r177 + warnOnce( 'ColorManagement: .fromWorkingColorSpace() has been renamed to .workingToColorSpace().' ); // @deprecated, r177 return ColorManagement.workingToColorSpace( color, targetColorSpace ); @@ -20633,7 +20893,7 @@ function createColorManagement() { toWorkingColorSpace: function ( color, sourceColorSpace ) { - warnOnce( 'THREE.ColorManagement: .toWorkingColorSpace() has been renamed to .colorSpaceToWorking().' ); // @deprecated, r177 + warnOnce( 'ColorManagement: .toWorkingColorSpace() has been renamed to .colorSpaceToWorking().' ); // @deprecated, r177 return ColorManagement.colorSpaceToWorking( color, sourceColorSpace ); @@ -20816,7 +21076,7 @@ class ImageUtils { } else { - console.warn( 'THREE.ImageUtils.sRGBToLinear(): Unsupported image type. No color space conversion applied.' ); + warn( 'ImageUtils.sRGBToLinear(): Unsupported image type. No color space conversion applied.' ); return image; } @@ -20911,7 +21171,7 @@ class Source { target.set( data.videoWidth, data.videoHeight, 0 ); - } else if ( data instanceof VideoFrame ) { + } else if ( ( typeof VideoFrame !== 'undefined' ) && ( data instanceof VideoFrame ) ) { target.set( data.displayHeight, data.displayWidth, 0 ); @@ -21041,7 +21301,7 @@ function serializeImage( image ) { } else { - console.warn( 'THREE.Texture: Unable to serialize Texture.' ); + warn( 'Texture: Unable to serialize Texture.' ); return {}; } @@ -21101,7 +21361,7 @@ class Texture extends EventDispatcher { Object.defineProperty( this, 'id', { value: _textureId ++ } ); /** - * The UUID of the material. + * The UUID of the texture. * * @type {string} * @readonly @@ -21109,7 +21369,7 @@ class Texture extends EventDispatcher { this.uuid = generateUUID(); /** - * The name of the material. + * The name of the texture. * * @type {string} */ @@ -21191,7 +21451,7 @@ class Texture extends EventDispatcher { * texture samples being used. * * @type {number} - * @default 0 + * @default Texture.DEFAULT_ANISOTROPY */ this.anisotropy = anisotropy; @@ -21555,7 +21815,7 @@ class Texture extends EventDispatcher { if ( newValue === undefined ) { - console.warn( `THREE.Texture.setValues(): parameter '${ key }' has value of undefined.` ); + warn( `Texture.setValues(): parameter '${ key }' has value of undefined.` ); continue; } @@ -21564,7 +21824,7 @@ class Texture extends EventDispatcher { if ( currentValue === undefined ) { - console.warn( `THREE.Texture.setValues(): property '${ key }' does not exist.` ); + warn( `Texture.setValues(): property '${ key }' does not exist.` ); continue; } @@ -23011,10 +23271,6 @@ class RenderTarget extends EventDispatcher { */ this.viewport = new Vector4( 0, 0, width, height ); - const image = { width: width, height: height, depth: options.depth }; - - const texture = new Texture( image ); - /** * An array of textures. Each color attachment is represented as a separate texture. * Has at least a single entry for the default color attachment. @@ -23023,6 +23279,9 @@ class RenderTarget extends EventDispatcher { */ this.textures = []; + const image = { width: width, height: height, depth: options.depth }; + const texture = new Texture( image ); + const count = options.count; for ( let i = 0; i < count; i ++ ) { @@ -23181,7 +23440,16 @@ class RenderTarget extends EventDispatcher { this.textures[ i ].image.width = width; this.textures[ i ].image.height = height; this.textures[ i ].image.depth = depth; - this.textures[ i ].isArrayTexture = this.textures[ i ].image.depth > 1; + + if ( this.textures[ i ].isData3DTexture !== true ) { // Fix for #31693 + + // TODO: Reconsider setting isArrayTexture flag here and in the ctor of Texture. + // Maybe a method `isArrayTexture()` or just a getter could replace a flag since + // both are evaluated on each call? + + this.textures[ i ].isArrayTexture = this.textures[ i ].image.depth > 1; + + } } @@ -23571,7 +23839,7 @@ class Color { /** * Sets this color from a CSS-style string. For example, `rgb(250, 0,0)`, * `rgb(100%, 0%, 0%)`, `hsl(0, 100%, 50%)`, `#ff0000`, `#f00`, or `red` ( or - * any [X11 color name]{@link https://en.wikipedia.org/wiki/X11_color_names#Color_name_chart} - + * any [X11 color name](https://en.wikipedia.org/wiki/X11_color_names#Color_name_chart) - * all 140 color names are supported). * * @param {string} style - Color as a CSS-style string. @@ -23586,7 +23854,7 @@ class Color { if ( parseFloat( string ) < 1 ) { - console.warn( 'THREE.Color: Alpha component of ' + style + ' will be ignored.' ); + warn( 'Color: Alpha component of ' + style + ' will be ignored.' ); } @@ -23662,7 +23930,7 @@ class Color { default: - console.warn( 'THREE.Color: Unknown color model ' + style ); + warn( 'Color: Unknown color model ' + style ); } @@ -23690,7 +23958,7 @@ class Color { } else { - console.warn( 'THREE.Color: Invalid hex color ' + style ); + warn( 'Color: Invalid hex color ' + style ); } @@ -23730,7 +23998,7 @@ class Color { } else { // unknown color - console.warn( 'THREE.Color: Unknown color ' + style ); + warn( 'Color: Unknown color ' + style ); } @@ -25465,7 +25733,7 @@ class Sphere { * Returns a serialized structure of the bounding sphere. * * @param {Object} json - The serialized json to set the sphere from. - * @return {Box3} A reference to this bounding sphere. + * @return {Sphere} A reference to this bounding sphere. */ fromJSON( json ) { @@ -25483,7 +25751,7 @@ const _normalMatrix = /*@__PURE__*/ new Matrix3(); /** * A two dimensional surface that extends infinitely in 3D space, represented - * in [Hessian normal form]{@link http://mathworld.wolfram.com/HessianNormalForm.html} + * in [Hessian normal form](http://mathworld.wolfram.com/HessianNormalForm.html) * by a unit length normal vector and a constant. */ class Plane { @@ -26121,7 +26389,7 @@ class Frustum { * Represents a 4x4 matrix. * * The most common use of a 4x4 matrix in 3D computer graphics is as a transformation matrix. - * For an introduction to transformation matrices as used in WebGL, check out [this tutorial]{@link https://www.opengl-tutorial.org/beginners-tutorials/tutorial-3-matrices} + * For an introduction to transformation matrices as used in WebGL, check out [this tutorial](https://www.opengl-tutorial.org/beginners-tutorials/tutorial-3-matrices) * * This allows a 3D vector representing a point in 3D space to undergo * transformations such as translation, rotation, shear, scale, reflection, @@ -26131,7 +26399,7 @@ class Frustum { * A Note on Row-Major and Column-Major Ordering: * * The constructor and {@link Matrix3#set} method take arguments in - * [row-major]{@link https://en.wikipedia.org/wiki/Row-_and_column-major_order#Column-major_order} + * [row-major](https://en.wikipedia.org/wiki/Row-_and_column-major_order#Column-major_order) * order, while internally they are stored in the {@link Matrix3#elements} array in column-major order. * This means that calling: * ```js @@ -26350,6 +26618,16 @@ class Matrix4 { */ extractBasis( xAxis, yAxis, zAxis ) { + if ( this.determinant() === 0 ) { + + xAxis.set( 1, 0, 0 ); + yAxis.set( 0, 1, 0 ); + zAxis.set( 0, 0, 1 ); + + return this; + + } + xAxis.setFromMatrixColumn( this, 0 ); yAxis.setFromMatrixColumn( this, 1 ); zAxis.setFromMatrixColumn( this, 2 ); @@ -26390,6 +26668,12 @@ class Matrix4 { */ extractRotation( m ) { + if ( m.determinant() === 0 ) { + + return this.identity(); + + } + const te = this.elements; const me = m.elements; @@ -26425,7 +26709,7 @@ class Matrix4 { * Sets the rotation component (the upper left 3x3 matrix) of this matrix to * the rotation specified by the given Euler angles. The rest of * the matrix is set to the identity. Depending on the {@link Euler#order}, - * there are six possible outcomes. See [this page]{@link https://en.wikipedia.org/wiki/Euler_angles#Rotation_matrix} + * there are six possible outcomes. See [this page](https://en.wikipedia.org/wiki/Euler_angles#Rotation_matrix) * for a complete list. * * @param {Euler} euler - The Euler angles. @@ -26555,7 +26839,7 @@ class Matrix4 { /** * Sets the rotation component of this matrix to the rotation specified by - * the given Quaternion as outlined [here]{@link https://en.wikipedia.org/wiki/Rotation_matrix#Quaternion} + * the given Quaternion as outlined [here](https://en.wikipedia.org/wiki/Rotation_matrix#Quaternion) * The rest of the matrix is set to the identity. * * @param {Quaternion} q - The Quaternion. @@ -26717,7 +27001,7 @@ class Matrix4 { /** * Computes and returns the determinant of this matrix. * - * Based on the method outlined [here]{@link http://www.euclideanspace.com/maths/algebra/matrix/functions/inverse/fourD/index.html}. + * Based on the method outlined [here](http://www.euclideanspace.com/maths/algebra/matrix/functions/inverse/fourD/index.html). * * @return {number} The determinant. */ @@ -26730,43 +27014,18 @@ class Matrix4 { const n31 = te[ 2 ], n32 = te[ 6 ], n33 = te[ 10 ], n34 = te[ 14 ]; const n41 = te[ 3 ], n42 = te[ 7 ], n43 = te[ 11 ], n44 = te[ 15 ]; - //TODO: make this more efficient - - return ( - n41 * ( - + n14 * n23 * n32 - - n13 * n24 * n32 - - n14 * n22 * n33 - + n12 * n24 * n33 - + n13 * n22 * n34 - - n12 * n23 * n34 - ) + - n42 * ( - + n11 * n23 * n34 - - n11 * n24 * n33 - + n14 * n21 * n33 - - n13 * n21 * n34 - + n13 * n24 * n31 - - n14 * n23 * n31 - ) + - n43 * ( - + n11 * n24 * n32 - - n11 * n22 * n34 - - n14 * n21 * n32 - + n12 * n21 * n34 - + n14 * n22 * n31 - - n12 * n24 * n31 - ) + - n44 * ( - - n13 * n22 * n31 - - n11 * n23 * n32 - + n11 * n22 * n33 - + n13 * n21 * n32 - - n12 * n21 * n33 - + n12 * n23 * n31 - ) + const t11 = n23 * n34 - n24 * n33; + const t12 = n22 * n34 - n24 * n32; + const t13 = n22 * n33 - n23 * n32; - ); + const t21 = n21 * n34 - n24 * n31; + const t22 = n21 * n33 - n23 * n31; + const t23 = n21 * n32 - n22 * n31; + + return n11 * ( n42 * t11 - n43 * t12 + n44 * t13 ) - + n12 * ( n41 * t11 - n43 * t21 + n44 * t22 ) + + n13 * ( n41 * t12 - n42 * t21 + n44 * t23 ) - + n14 * ( n41 * t13 - n42 * t22 + n43 * t23 ); } @@ -26824,7 +27083,7 @@ class Matrix4 { } /** - * Inverts this matrix, using the [analytic method]{@link https://en.wikipedia.org/wiki/Invertible_matrix#Analytic_solution}. + * Inverts this matrix, using the [analytic method](https://en.wikipedia.org/wiki/Invertible_matrix#Analytic_solution). * You can not invert with a determinant of zero. If you attempt this, the method produces * a zero matrix instead. * @@ -26832,7 +27091,7 @@ class Matrix4 { */ invert() { - // based on http://www.euclideanspace.com/maths/algebra/matrix/functions/inverse/fourD/index.htm + // based on https://github.com/toji/gl-matrix const te = this.elements, n11 = te[ 0 ], n21 = te[ 1 ], n31 = te[ 2 ], n41 = te[ 3 ], @@ -26840,36 +27099,44 @@ class Matrix4 { n13 = te[ 8 ], n23 = te[ 9 ], n33 = te[ 10 ], n43 = te[ 11 ], n14 = te[ 12 ], n24 = te[ 13 ], n34 = te[ 14 ], n44 = te[ 15 ], - t11 = n23 * n34 * n42 - n24 * n33 * n42 + n24 * n32 * n43 - n22 * n34 * n43 - n23 * n32 * n44 + n22 * n33 * n44, - t12 = n14 * n33 * n42 - n13 * n34 * n42 - n14 * n32 * n43 + n12 * n34 * n43 + n13 * n32 * n44 - n12 * n33 * n44, - t13 = n13 * n24 * n42 - n14 * n23 * n42 + n14 * n22 * n43 - n12 * n24 * n43 - n13 * n22 * n44 + n12 * n23 * n44, - t14 = n14 * n23 * n32 - n13 * n24 * n32 - n14 * n22 * n33 + n12 * n24 * n33 + n13 * n22 * n34 - n12 * n23 * n34; - - const det = n11 * t11 + n21 * t12 + n31 * t13 + n41 * t14; + t1 = n11 * n22 - n21 * n12, + t2 = n11 * n32 - n31 * n12, + t3 = n11 * n42 - n41 * n12, + t4 = n21 * n32 - n31 * n22, + t5 = n21 * n42 - n41 * n22, + t6 = n31 * n42 - n41 * n32, + t7 = n13 * n24 - n23 * n14, + t8 = n13 * n34 - n33 * n14, + t9 = n13 * n44 - n43 * n14, + t10 = n23 * n34 - n33 * n24, + t11 = n23 * n44 - n43 * n24, + t12 = n33 * n44 - n43 * n34; + + const det = t1 * t12 - t2 * t11 + t3 * t10 + t4 * t9 - t5 * t8 + t6 * t7; if ( det === 0 ) return this.set( 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ); const detInv = 1 / det; - te[ 0 ] = t11 * detInv; - te[ 1 ] = ( n24 * n33 * n41 - n23 * n34 * n41 - n24 * n31 * n43 + n21 * n34 * n43 + n23 * n31 * n44 - n21 * n33 * n44 ) * detInv; - te[ 2 ] = ( n22 * n34 * n41 - n24 * n32 * n41 + n24 * n31 * n42 - n21 * n34 * n42 - n22 * n31 * n44 + n21 * n32 * n44 ) * detInv; - te[ 3 ] = ( n23 * n32 * n41 - n22 * n33 * n41 - n23 * n31 * n42 + n21 * n33 * n42 + n22 * n31 * n43 - n21 * n32 * n43 ) * detInv; + te[ 0 ] = ( n22 * t12 - n32 * t11 + n42 * t10 ) * detInv; + te[ 1 ] = ( n31 * t11 - n21 * t12 - n41 * t10 ) * detInv; + te[ 2 ] = ( n24 * t6 - n34 * t5 + n44 * t4 ) * detInv; + te[ 3 ] = ( n33 * t5 - n23 * t6 - n43 * t4 ) * detInv; - te[ 4 ] = t12 * detInv; - te[ 5 ] = ( n13 * n34 * n41 - n14 * n33 * n41 + n14 * n31 * n43 - n11 * n34 * n43 - n13 * n31 * n44 + n11 * n33 * n44 ) * detInv; - te[ 6 ] = ( n14 * n32 * n41 - n12 * n34 * n41 - n14 * n31 * n42 + n11 * n34 * n42 + n12 * n31 * n44 - n11 * n32 * n44 ) * detInv; - te[ 7 ] = ( n12 * n33 * n41 - n13 * n32 * n41 + n13 * n31 * n42 - n11 * n33 * n42 - n12 * n31 * n43 + n11 * n32 * n43 ) * detInv; + te[ 4 ] = ( n32 * t9 - n12 * t12 - n42 * t8 ) * detInv; + te[ 5 ] = ( n11 * t12 - n31 * t9 + n41 * t8 ) * detInv; + te[ 6 ] = ( n34 * t3 - n14 * t6 - n44 * t2 ) * detInv; + te[ 7 ] = ( n13 * t6 - n33 * t3 + n43 * t2 ) * detInv; - te[ 8 ] = t13 * detInv; - te[ 9 ] = ( n14 * n23 * n41 - n13 * n24 * n41 - n14 * n21 * n43 + n11 * n24 * n43 + n13 * n21 * n44 - n11 * n23 * n44 ) * detInv; - te[ 10 ] = ( n12 * n24 * n41 - n14 * n22 * n41 + n14 * n21 * n42 - n11 * n24 * n42 - n12 * n21 * n44 + n11 * n22 * n44 ) * detInv; - te[ 11 ] = ( n13 * n22 * n41 - n12 * n23 * n41 - n13 * n21 * n42 + n11 * n23 * n42 + n12 * n21 * n43 - n11 * n22 * n43 ) * detInv; + te[ 8 ] = ( n12 * t11 - n22 * t9 + n42 * t7 ) * detInv; + te[ 9 ] = ( n21 * t9 - n11 * t11 - n41 * t7 ) * detInv; + te[ 10 ] = ( n14 * t5 - n24 * t3 + n44 * t1 ) * detInv; + te[ 11 ] = ( n23 * t3 - n13 * t5 - n43 * t1 ) * detInv; - te[ 12 ] = t14 * detInv; - te[ 13 ] = ( n13 * n24 * n31 - n14 * n23 * n31 + n14 * n21 * n33 - n11 * n24 * n33 - n13 * n21 * n34 + n11 * n23 * n34 ) * detInv; - te[ 14 ] = ( n14 * n22 * n31 - n12 * n24 * n31 - n14 * n21 * n32 + n11 * n24 * n32 + n12 * n21 * n34 - n11 * n22 * n34 ) * detInv; - te[ 15 ] = ( n12 * n23 * n31 - n13 * n22 * n31 + n13 * n21 * n32 - n11 * n23 * n32 - n12 * n21 * n33 + n11 * n22 * n33 ) * detInv; + te[ 12 ] = ( n22 * t8 - n12 * t10 - n32 * t7 ) * detInv; + te[ 13 ] = ( n11 * t10 - n21 * t8 + n31 * t7 ) * detInv; + te[ 14 ] = ( n24 * t2 - n14 * t4 - n34 * t1 ) * detInv; + te[ 15 ] = ( n13 * t4 - n23 * t2 + n33 * t1 ) * detInv; return this; @@ -27027,7 +27294,7 @@ class Matrix4 { * the given angle. * * This is a somewhat controversial but mathematically sound alternative to - * rotating via Quaternions. See the discussion [here]{@link https://www.gamedev.net/articles/programming/math-and-physics/do-we-really-need-quaternions-r1199}. + * rotating via Quaternions. See the discussion [here](https://www.gamedev.net/articles/programming/math-and-physics/do-we-really-need-quaternions-r1199). * * @param {Vector3} axis - The normalized rotation axis. * @param {number} angle - The rotation in radians. @@ -27167,18 +27434,28 @@ class Matrix4 { const te = this.elements; + position.x = te[ 12 ]; + position.y = te[ 13 ]; + position.z = te[ 14 ]; + + const det = this.determinant(); + + if ( det === 0 ) { + + scale.set( 1, 1, 1 ); + quaternion.identity(); + + return this; + + } + let sx = _v1$4.set( te[ 0 ], te[ 1 ], te[ 2 ] ).length(); const sy = _v1$4.set( te[ 4 ], te[ 5 ], te[ 6 ] ).length(); const sz = _v1$4.set( te[ 8 ], te[ 9 ], te[ 10 ] ).length(); - // if determine is negative, we need to invert one scale - const det = this.determinant(); + // if determinant is negative, we need to invert one scale if ( det < 0 ) sx = - sx; - position.x = te[ 12 ]; - position.y = te[ 13 ]; - position.z = te[ 14 ]; - // scale the rotation part _m1$4.copy( this ); @@ -28443,7 +28720,7 @@ class Float32BufferAttribute extends BufferAttribute { } const _matrix$2 = /*@__PURE__*/ new Matrix4(); -const _quaternion$1 = /*@__PURE__*/ new Quaternion(); +const _quaternion$2 = /*@__PURE__*/ new Quaternion(); /** * A class representing Euler angles. @@ -28744,7 +29021,7 @@ class Euler { default: - console.warn( 'THREE.Euler: .setFromRotationMatrix() encountered an unknown order: ' + order ); + warn( 'Euler: .setFromRotationMatrix() encountered an unknown order: ' + order ); } @@ -28797,9 +29074,9 @@ class Euler { */ reorder( newOrder ) { - _quaternion$1.setFromEuler( this ); + _quaternion$2.setFromEuler( this ); - return this.setFromQuaternion( _quaternion$1, newOrder ); + return this.setFromQuaternion( _quaternion$2, newOrder ); } @@ -29011,9 +29288,9 @@ const _q1 = /*@__PURE__*/ new Quaternion(); const _m1$3 = /*@__PURE__*/ new Matrix4(); const _target = /*@__PURE__*/ new Vector3(); -const _position$1 = /*@__PURE__*/ new Vector3(); -const _scale = /*@__PURE__*/ new Vector3(); -const _quaternion = /*@__PURE__*/ new Quaternion(); +const _position$2 = /*@__PURE__*/ new Vector3(); +const _scale$1 = /*@__PURE__*/ new Vector3(); +const _quaternion$1 = /*@__PURE__*/ new Quaternion(); const _xAxis = /*@__PURE__*/ new Vector3( 1, 0, 0 ); const _yAxis = /*@__PURE__*/ new Vector3( 0, 1, 0 ); @@ -29238,7 +29515,8 @@ class Object3D extends EventDispatcher { /** * When set to `true`, the engine automatically computes the local matrix from position, - * rotation and scale every frame. + * rotation and scale every frame. If set to `false`, the app is responsible for recomputing + * the local matrix by calling `updateMatrix()`. * * The default values for all 3D objects is defined by `Object3D.DEFAULT_MATRIX_AUTO_UPDATE`. * @@ -29249,7 +29527,8 @@ class Object3D extends EventDispatcher { /** * When set to `true`, the engine automatically computes the world matrix from the current local - * matrix and the object's transformation hierarchy. + * matrix and the object's transformation hierarchy. If set to `false`, the app is responsible for + * recomputing the world matrix by directly updating the `matrixWorld` property. * * The default values for all 3D objects is defined by `Object3D.DEFAULT_MATRIX_WORLD_AUTO_UPDATE`. * @@ -29350,6 +29629,19 @@ class Object3D extends EventDispatcher { */ this.customDistanceMaterial = undefined; + /** + * Whether the 3D object is supposed to be static or not. If set to `true`, it means + * the 3D object is not going to be changed after the initial renderer. This includes + * geometry and material settings. A static 3D object can be processed by the renderer + * slightly faster since certain state checks can be bypassed. + * + * Only relevant in context of {@link WebGPURenderer}. + * + * @type {boolean} + * @default false + */ + this.static = false; + /** * An object that can be used to store custom data about the 3D object. It * should not hold references to functions as these will not be cloned. @@ -29358,6 +29650,16 @@ class Object3D extends EventDispatcher { */ this.userData = {}; + /** + * The pivot point for rotation and scale transformations. + * When set, rotation and scale are applied around this point + * instead of the object's origin. + * + * @type {?Vector3} + * @default null + */ + this.pivot = null; + } /** @@ -29640,7 +29942,7 @@ class Object3D extends EventDispatcher { } /** - * Converts the given vector from this 3D object's word space to local space. + * Converts the given vector from this 3D object's world space to local space. * * @param {Vector3} vector - The vector to convert. * @return {Vector3} The converted vector. @@ -29680,15 +29982,15 @@ class Object3D extends EventDispatcher { this.updateWorldMatrix( true, false ); - _position$1.setFromMatrixPosition( this.matrixWorld ); + _position$2.setFromMatrixPosition( this.matrixWorld ); if ( this.isCamera || this.isLight ) { - _m1$3.lookAt( _position$1, _target, this.up ); + _m1$3.lookAt( _position$2, _target, this.up ); } else { - _m1$3.lookAt( _target, _position$1, this.up ); + _m1$3.lookAt( _target, _position$2, this.up ); } @@ -29730,7 +30032,7 @@ class Object3D extends EventDispatcher { if ( object === this ) { - console.error( 'THREE.Object3D.add: object can\'t be added as a child of itself.', object ); + error( 'Object3D.add: object can\'t be added as a child of itself.', object ); return this; } @@ -29749,7 +30051,7 @@ class Object3D extends EventDispatcher { } else { - console.error( 'THREE.Object3D.add: object not an instance of THREE.Object3D.', object ); + error( 'Object3D.add: object not an instance of THREE.Object3D.', object ); } @@ -29982,7 +30284,7 @@ class Object3D extends EventDispatcher { this.updateWorldMatrix( true, false ); - this.matrixWorld.decompose( _position$1, target, _scale ); + this.matrixWorld.decompose( _position$2, target, _scale$1 ); return target; @@ -29998,7 +30300,7 @@ class Object3D extends EventDispatcher { this.updateWorldMatrix( true, false ); - this.matrixWorld.decompose( _position$1, _quaternion, target ); + this.matrixWorld.decompose( _position$2, _quaternion$1, target ); return target; @@ -30105,6 +30407,19 @@ class Object3D extends EventDispatcher { this.matrix.compose( this.position, this.quaternion, this.scale ); + const pivot = this.pivot; + + if ( pivot !== null ) { + + const px = pivot.x, py = pivot.y, pz = pivot.z; + const te = this.matrix.elements; + + te[ 12 ] += px - te[ 0 ] * px - te[ 4 ] * py - te[ 8 ] * pz; + te[ 13 ] += py - te[ 1 ] * px - te[ 5 ] * py - te[ 9 ] * pz; + te[ 14 ] += pz - te[ 2 ] * px - te[ 6 ] * py - te[ 10 ] * pz; + + } + this.matrixWorldNeedsUpdate = true; } @@ -30118,7 +30433,7 @@ class Object3D extends EventDispatcher { * `true` by default. Set these flags to `false` if you need more control over the update matrix process. * * @param {boolean} [force=false] - When set to `true`, a recomputation of world matrices is forced even - * when {@link Object3D#matrixWorldAutoUpdate} is set to `false`. + * when {@link Object3D#matrixWorldNeedsUpdate} is `false`. */ updateMatrixWorld( force ) { @@ -30263,14 +30578,20 @@ class Object3D extends EventDispatcher { if ( this.visible === false ) object.visible = false; if ( this.frustumCulled === false ) object.frustumCulled = false; if ( this.renderOrder !== 0 ) object.renderOrder = this.renderOrder; + if ( this.static !== false ) object.static = this.static; if ( Object.keys( this.userData ).length > 0 ) object.userData = this.userData; object.layers = this.layers.mask; object.matrix = this.matrix.toArray(); object.up = this.up.toArray(); + if ( this.pivot !== null ) object.pivot = this.pivot.toArray(); + if ( this.matrixAutoUpdate === false ) object.matrixAutoUpdate = false; + if ( this.morphTargetDictionary !== undefined ) object.morphTargetDictionary = Object.assign( {}, this.morphTargetDictionary ); + if ( this.morphTargetInfluences !== undefined ) object.morphTargetInfluences = this.morphTargetInfluences.slice(); + // object specific properties if ( this.isInstancedMesh ) { @@ -30544,6 +30865,12 @@ class Object3D extends EventDispatcher { this.quaternion.copy( source.quaternion ); this.scale.copy( source.scale ); + if ( source.pivot !== null ) { + + this.pivot = source.pivot.clone(); + + } + this.matrix.copy( source.matrix ); this.matrixWorld.copy( source.matrixWorld ); @@ -30561,6 +30888,8 @@ class Object3D extends EventDispatcher { this.frustumCulled = source.frustumCulled; this.renderOrder = source.renderOrder; + this.static = source.static; + this.animations = source.animations.slice(); this.userData = JSON.parse( JSON.stringify( source.userData ) ); @@ -30713,6 +31042,16 @@ class BufferGeometry extends EventDispatcher { */ this.indirect = null; + /** + * The offset, in bytes, into the indirect drawing buffer where the value data begins. If an array is provided, multiple indirect draw calls will be made for each offset. + * + * Can only be used with {@link WebGPURenderer} and a WebGPU backend. + * + * @type {number|Array} + * @default 0 + */ + this.indirectOffset = 0; + /** * This dictionary has as id the name of the attribute to be set and as value * the buffer attribute to set it to. Rather than accessing this property directly, @@ -30726,7 +31065,7 @@ class BufferGeometry extends EventDispatcher { * This dictionary holds the morph targets of the geometry. * * Note: Once the geometry has been rendered, the morph attribute data cannot - * be changed. You will have to call `dispose()?, and create a new geometry instance. + * be changed. You will have to call `dispose()`, and create a new geometry instance. * * @type {Object} */ @@ -30826,11 +31165,13 @@ class BufferGeometry extends EventDispatcher { * Sets the given indirect attribute to this geometry. * * @param {BufferAttribute} indirect - The attribute holding indirect draw calls. + * @param {number|Array} [indirectOffset=0] - The offset, in bytes, into the indirect drawing buffer where the value data begins. If an array is provided, multiple indirect draw calls will be made for each offset. * @return {BufferGeometry} A reference to this instance. */ - setIndirect( indirect ) { + setIndirect( indirect, indirectOffset = 0 ) { this.indirect = indirect; + this.indirectOffset = indirectOffset; return this; @@ -31198,7 +31539,7 @@ class BufferGeometry extends EventDispatcher { if ( points.length > positionAttribute.count ) { - console.warn( 'THREE.BufferGeometry: Buffer size too small for points data. Use .dispose() and create a new geometry.' ); + warn( 'BufferGeometry: Buffer size too small for points data. Use .dispose() and create a new geometry.' ); } @@ -31228,7 +31569,7 @@ class BufferGeometry extends EventDispatcher { if ( position && position.isGLBufferAttribute ) { - console.error( 'THREE.BufferGeometry.computeBoundingBox(): GLBufferAttribute requires a manual bounding box.', this ); + error( 'BufferGeometry.computeBoundingBox(): GLBufferAttribute requires a manual bounding box.', this ); this.boundingBox.set( new Vector3( - Infinity, - Infinity, - Infinity ), @@ -31279,7 +31620,7 @@ class BufferGeometry extends EventDispatcher { if ( isNaN( this.boundingBox.min.x ) || isNaN( this.boundingBox.min.y ) || isNaN( this.boundingBox.min.z ) ) { - console.error( 'THREE.BufferGeometry.computeBoundingBox(): Computed min/max have NaN values. The "position" attribute is likely to have NaN values.', this ); + error( 'BufferGeometry.computeBoundingBox(): Computed min/max have NaN values. The "position" attribute is likely to have NaN values.', this ); } @@ -31303,7 +31644,7 @@ class BufferGeometry extends EventDispatcher { if ( position && position.isGLBufferAttribute ) { - console.error( 'THREE.BufferGeometry.computeBoundingSphere(): GLBufferAttribute requires a manual bounding sphere.', this ); + error( 'BufferGeometry.computeBoundingSphere(): GLBufferAttribute requires a manual bounding sphere.', this ); this.boundingSphere.set( new Vector3(), Infinity ); @@ -31394,7 +31735,7 @@ class BufferGeometry extends EventDispatcher { if ( isNaN( this.boundingSphere.radius ) ) { - console.error( 'THREE.BufferGeometry.computeBoundingSphere(): Computed radius is NaN. The "position" attribute is likely to have NaN values.', this ); + error( 'BufferGeometry.computeBoundingSphere(): Computed radius is NaN. The "position" attribute is likely to have NaN values.', this ); } @@ -31422,7 +31763,7 @@ class BufferGeometry extends EventDispatcher { attributes.normal === undefined || attributes.uv === undefined ) { - console.error( 'THREE.BufferGeometry: .computeTangents() failed. Missing required attributes (index, position, normal or uv)' ); + error( 'BufferGeometry: .computeTangents() failed. Missing required attributes (index, position, normal or uv)' ); return; } @@ -31732,7 +32073,7 @@ class BufferGeometry extends EventDispatcher { if ( this.index === null ) { - console.warn( 'THREE.BufferGeometry.toNonIndexed(): BufferGeometry is already non-indexed.' ); + warn( 'BufferGeometry.toNonIndexed(): BufferGeometry is already non-indexed.' ); return this; } @@ -32058,6 +32399,7 @@ class BufferGeometry extends EventDispatcher { * ``` * * @augments BufferGeometry + * @demo scenes/geometry-browser.html#BoxGeometry */ class BoxGeometry extends BufferGeometry { @@ -32269,6 +32611,7 @@ class BoxGeometry extends BufferGeometry { * ``` * * @augments BufferGeometry + * @demo scenes/geometry-browser.html#PlaneGeometry */ class PlaneGeometry extends BufferGeometry { @@ -32905,7 +33248,7 @@ class Material extends EventDispatcher { * * This method can only be used when rendering with {@link WebGLRenderer}. The * recommended approach when customizing materials is to use `WebGPURenderer` with the new - * Node Material system and [TSL]{@link https://github.com/mrdoob/three.js/wiki/Three.js-Shading-Language}. + * Node Material system and [TSL](https://github.com/mrdoob/three.js/wiki/Three.js-Shading-Language). * * @param {{vertexShader:string,fragmentShader:string,uniforms:Object}} shaderobject - The object holds the uniforms and the vertex and fragment shader source. * @param {WebGLRenderer} renderer - A reference to the renderer. @@ -32944,7 +33287,7 @@ class Material extends EventDispatcher { if ( newValue === undefined ) { - console.warn( `THREE.Material: parameter '${ key }' has value of undefined.` ); + warn( `Material: parameter '${ key }' has value of undefined.` ); continue; } @@ -32953,7 +33296,7 @@ class Material extends EventDispatcher { if ( currentValue === undefined ) { - console.warn( `THREE.Material: '${ key }' is not a property of THREE.${ this.type }.` ); + warn( `Material: '${ key }' is not a property of THREE.${ this.type }.` ); continue; } @@ -33214,6 +33557,7 @@ class Material extends EventDispatcher { if ( this.alphaToCoverage === true ) data.alphaToCoverage = true; if ( this.premultipliedAlpha === true ) data.premultipliedAlpha = true; if ( this.forceSinglePass === true ) data.forceSinglePass = true; + if ( this.allowOverride === false ) data.allowOverride = false; if ( this.wireframe === true ) data.wireframe = true; if ( this.wireframeLinewidth > 1 ) data.wireframeLinewidth = this.wireframeLinewidth; @@ -33349,6 +33693,7 @@ class Material extends EventDispatcher { this.alphaToCoverage = source.alphaToCoverage; this.premultipliedAlpha = source.premultipliedAlpha; this.forceSinglePass = source.forceSinglePass; + this.allowOverride = source.allowOverride; this.visible = source.visible; @@ -33394,8 +33739,20 @@ class Material extends EventDispatcher { } -// Uniform Utilities +/** + * Provides utility functions for managing uniforms. + * + * @module UniformsUtils + */ +/** + * Clones the given uniform definitions by performing a deep-copy. That means + * if the value of a uniform refers to an object like a Vector3 or Texture, + * the cloned uniform will refer to a new object reference. + * + * @param {Object} src - An object representing uniform definitions. + * @return {Object} The cloned uniforms. + */ function cloneUniforms( src ) { const dst = {}; @@ -33415,7 +33772,7 @@ function cloneUniforms( src ) { if ( property.isRenderTargetTexture ) { - console.warn( 'UniformsUtils: Textures of render targets cannot be cloned via cloneUniforms() or mergeUniforms().' ); + warn( 'UniformsUtils: Textures of render targets cannot be cloned via cloneUniforms() or mergeUniforms().' ); dst[ u ][ p ] = null; } else { @@ -33442,6 +33799,14 @@ function cloneUniforms( src ) { } +/** + * Merges the given uniform definitions into a single object. Since the + * method internally uses cloneUniforms(), it performs a deep-copy when + * producing the merged uniform definitions. + * + * @param {Array} uniforms - An array of objects containing uniform definitions. + * @return {Object} The merged uniforms. + */ function mergeUniforms( uniforms ) { const merged = {}; @@ -33519,7 +33884,7 @@ var default_fragment = "void main() {\n\tgl_FragColor = vec4( 1.0, 0.0, 0.0, 1.0 * - You can use the directive `#pragma unroll_loop_start` and `#pragma unroll_loop_end` * in order to unroll a `for` loop in GLSL by the shader preprocessor. The directive has * to be placed right above the loop. The loop formatting has to correspond to a defined standard. - * - The loop has to be [normalized]{@link https://en.wikipedia.org/wiki/Normalized_loop}. + * - The loop has to be [normalized](https://en.wikipedia.org/wiki/Normalized_loop). * - The loop variable has to be *i*. * - The value `UNROLLED_LOOP_INDEX` will be replaced with the explicitly * value of *i* for the given iteration and can be used in preprocessor @@ -33660,9 +34025,22 @@ class ShaderMaterial extends Material { this.wireframeLinewidth = 1; /** - * Define whether the material color is affected by global fog settings; `true` + * Defines whether the material color is affected by global fog settings; `true` * to pass fog uniforms to the shader. * + * Setting this property to `true` requires the definition of fog uniforms. It is + * recommended to use `UniformsUtils.merge()` to combine the custom shader uniforms + * with predefined fog uniforms. + * + * ```js + * const material = new ShaderMaterial( { + * uniforms: UniformsUtils.merge( [ UniformsLib[ 'fog' ], shaderUniforms ] ); + * vertexShader: vertexShader, + * fragmentShader: fragmentShader, + * fog: true + * } ); + * ``` + * * @type {boolean} * @default false */ @@ -33725,7 +34103,7 @@ class ShaderMaterial extends Material { }; /** - * If set, this calls [gl.bindAttribLocation]{@link https://developer.mozilla.org/en-US/docs/Web/API/WebGLRenderingContext/bindAttribLocation} + * If set, this calls [gl.bindAttribLocation](https://developer.mozilla.org/en-US/docs/Web/API/WebGLRenderingContext/bindAttribLocation) * to bind a generic vertex index to an attribute variable. * * @type {string|undefined} @@ -33781,6 +34159,12 @@ class ShaderMaterial extends Material { this.glslVersion = source.glslVersion; + this.defaultAttributeValues = Object.assign( {}, source.defaultAttributeValues ); + + this.index0AttributeName = source.index0AttributeName; + + this.uniformsNeedUpdate = source.uniformsNeedUpdate; + return this; } @@ -35075,6 +35459,7 @@ class Triangle { * This material is not affected by lights. * * @augments Material + * @demo scenes/material-browser.html#MeshBasicMaterial */ class MeshBasicMaterial extends Material { @@ -35382,7 +35767,7 @@ class Mesh extends Object3D { * morph targets name, the value its attribute index. This member is `undefined` * by default and only set when morph targets are detected in the geometry. * - * @type {Object|undefined} + * @type {Object|undefined} * @default undefined */ this.morphTargetDictionary = undefined; @@ -35812,7 +36197,7 @@ var aomap_fragment = "#ifdef USE_AOMAP\n\tfloat ambientOcclusion = ( texture2D( var aomap_pars_fragment = "#ifdef USE_AOMAP\n\tuniform sampler2D aoMap;\n\tuniform float aoMapIntensity;\n#endif"; -var batching_pars_vertex = "#ifdef USE_BATCHING\n\t#if ! defined( GL_ANGLE_multi_draw )\n\t#define gl_DrawID _gl_DrawID\n\tuniform int _gl_DrawID;\n\t#endif\n\tuniform highp sampler2D batchingTexture;\n\tuniform highp usampler2D batchingIdTexture;\n\tmat4 getBatchingMatrix( const in float i ) {\n\t\tint size = textureSize( batchingTexture, 0 ).x;\n\t\tint j = int( i ) * 4;\n\t\tint x = j % size;\n\t\tint y = j / size;\n\t\tvec4 v1 = texelFetch( batchingTexture, ivec2( x, y ), 0 );\n\t\tvec4 v2 = texelFetch( batchingTexture, ivec2( x + 1, y ), 0 );\n\t\tvec4 v3 = texelFetch( batchingTexture, ivec2( x + 2, y ), 0 );\n\t\tvec4 v4 = texelFetch( batchingTexture, ivec2( x + 3, y ), 0 );\n\t\treturn mat4( v1, v2, v3, v4 );\n\t}\n\tfloat getIndirectIndex( const in int i ) {\n\t\tint size = textureSize( batchingIdTexture, 0 ).x;\n\t\tint x = i % size;\n\t\tint y = i / size;\n\t\treturn float( texelFetch( batchingIdTexture, ivec2( x, y ), 0 ).r );\n\t}\n#endif\n#ifdef USE_BATCHING_COLOR\n\tuniform sampler2D batchingColorTexture;\n\tvec3 getBatchingColor( const in float i ) {\n\t\tint size = textureSize( batchingColorTexture, 0 ).x;\n\t\tint j = int( i );\n\t\tint x = j % size;\n\t\tint y = j / size;\n\t\treturn texelFetch( batchingColorTexture, ivec2( x, y ), 0 ).rgb;\n\t}\n#endif"; +var batching_pars_vertex = "#ifdef USE_BATCHING\n\t#if ! defined( GL_ANGLE_multi_draw )\n\t#define gl_DrawID _gl_DrawID\n\tuniform int _gl_DrawID;\n\t#endif\n\tuniform highp sampler2D batchingTexture;\n\tuniform highp usampler2D batchingIdTexture;\n\tmat4 getBatchingMatrix( const in float i ) {\n\t\tint size = textureSize( batchingTexture, 0 ).x;\n\t\tint j = int( i ) * 4;\n\t\tint x = j % size;\n\t\tint y = j / size;\n\t\tvec4 v1 = texelFetch( batchingTexture, ivec2( x, y ), 0 );\n\t\tvec4 v2 = texelFetch( batchingTexture, ivec2( x + 1, y ), 0 );\n\t\tvec4 v3 = texelFetch( batchingTexture, ivec2( x + 2, y ), 0 );\n\t\tvec4 v4 = texelFetch( batchingTexture, ivec2( x + 3, y ), 0 );\n\t\treturn mat4( v1, v2, v3, v4 );\n\t}\n\tfloat getIndirectIndex( const in int i ) {\n\t\tint size = textureSize( batchingIdTexture, 0 ).x;\n\t\tint x = i % size;\n\t\tint y = i / size;\n\t\treturn float( texelFetch( batchingIdTexture, ivec2( x, y ), 0 ).r );\n\t}\n#endif\n#ifdef USE_BATCHING_COLOR\n\tuniform sampler2D batchingColorTexture;\n\tvec4 getBatchingColor( const in float i ) {\n\t\tint size = textureSize( batchingColorTexture, 0 ).x;\n\t\tint j = int( i );\n\t\tint x = j % size;\n\t\tint y = j / size;\n\t\treturn texelFetch( batchingColorTexture, ivec2( x, y ), 0 );\n\t}\n#endif"; var batching_vertex = "#ifdef USE_BATCHING\n\tmat4 batchingMatrix = getBatchingMatrix( getIndirectIndex( gl_DrawID ) );\n#endif"; @@ -35834,15 +36219,15 @@ var clipping_planes_pars_vertex = "#if NUM_CLIPPING_PLANES > 0\n\tvarying vec3 v var clipping_planes_vertex = "#if NUM_CLIPPING_PLANES > 0\n\tvClipPosition = - mvPosition.xyz;\n#endif"; -var color_fragment = "#if defined( USE_COLOR_ALPHA )\n\tdiffuseColor *= vColor;\n#elif defined( USE_COLOR )\n\tdiffuseColor.rgb *= vColor;\n#endif"; +var color_fragment = "#if defined( USE_COLOR ) || defined( USE_COLOR_ALPHA )\n\tdiffuseColor *= vColor;\n#endif"; -var color_pars_fragment = "#if defined( USE_COLOR_ALPHA )\n\tvarying vec4 vColor;\n#elif defined( USE_COLOR )\n\tvarying vec3 vColor;\n#endif"; +var color_pars_fragment = "#if defined( USE_COLOR ) || defined( USE_COLOR_ALPHA )\n\tvarying vec4 vColor;\n#endif"; -var color_pars_vertex = "#if defined( USE_COLOR_ALPHA )\n\tvarying vec4 vColor;\n#elif defined( USE_COLOR ) || defined( USE_INSTANCING_COLOR ) || defined( USE_BATCHING_COLOR )\n\tvarying vec3 vColor;\n#endif"; +var color_pars_vertex = "#if defined( USE_COLOR ) || defined( USE_COLOR_ALPHA ) || defined( USE_INSTANCING_COLOR ) || defined( USE_BATCHING_COLOR )\n\tvarying vec4 vColor;\n#endif"; -var color_vertex = "#if defined( USE_COLOR_ALPHA )\n\tvColor = vec4( 1.0 );\n#elif defined( USE_COLOR ) || defined( USE_INSTANCING_COLOR ) || defined( USE_BATCHING_COLOR )\n\tvColor = vec3( 1.0 );\n#endif\n#ifdef USE_COLOR\n\tvColor *= color;\n#endif\n#ifdef USE_INSTANCING_COLOR\n\tvColor.xyz *= instanceColor.xyz;\n#endif\n#ifdef USE_BATCHING_COLOR\n\tvec3 batchingColor = getBatchingColor( getIndirectIndex( gl_DrawID ) );\n\tvColor.xyz *= batchingColor.xyz;\n#endif"; +var color_vertex = "#if defined( USE_COLOR ) || defined( USE_COLOR_ALPHA ) || defined( USE_INSTANCING_COLOR ) || defined( USE_BATCHING_COLOR )\n\tvColor = vec4( 1.0 );\n#endif\n#ifdef USE_COLOR_ALPHA\n\tvColor *= color;\n#elif defined( USE_COLOR )\n\tvColor.rgb *= color;\n#endif\n#ifdef USE_INSTANCING_COLOR\n\tvColor.rgb *= instanceColor.rgb;\n#endif\n#ifdef USE_BATCHING_COLOR\n\tvColor *= getBatchingColor( getIndirectIndex( gl_DrawID ) );\n#endif"; -var common$1 = "#define PI 3.141592653589793\n#define PI2 6.283185307179586\n#define PI_HALF 1.5707963267948966\n#define RECIPROCAL_PI 0.3183098861837907\n#define RECIPROCAL_PI2 0.15915494309189535\n#define EPSILON 1e-6\n#ifndef saturate\n#define saturate( a ) clamp( a, 0.0, 1.0 )\n#endif\n#define whiteComplement( a ) ( 1.0 - saturate( a ) )\nfloat pow2( const in float x ) { return x*x; }\nvec3 pow2( const in vec3 x ) { return x*x; }\nfloat pow3( const in float x ) { return x*x*x; }\nfloat pow4( const in float x ) { float x2 = x*x; return x2*x2; }\nfloat max3( const in vec3 v ) { return max( max( v.x, v.y ), v.z ); }\nfloat average( const in vec3 v ) { return dot( v, vec3( 0.3333333 ) ); }\nhighp float rand( const in vec2 uv ) {\n\tconst highp float a = 12.9898, b = 78.233, c = 43758.5453;\n\thighp float dt = dot( uv.xy, vec2( a,b ) ), sn = mod( dt, PI );\n\treturn fract( sin( sn ) * c );\n}\n#ifdef HIGH_PRECISION\n\tfloat precisionSafeLength( vec3 v ) { return length( v ); }\n#else\n\tfloat precisionSafeLength( vec3 v ) {\n\t\tfloat maxComponent = max3( abs( v ) );\n\t\treturn length( v / maxComponent ) * maxComponent;\n\t}\n#endif\nstruct IncidentLight {\n\tvec3 color;\n\tvec3 direction;\n\tbool visible;\n};\nstruct ReflectedLight {\n\tvec3 directDiffuse;\n\tvec3 directSpecular;\n\tvec3 indirectDiffuse;\n\tvec3 indirectSpecular;\n};\n#ifdef USE_ALPHAHASH\n\tvarying vec3 vPosition;\n#endif\nvec3 transformDirection( in vec3 dir, in mat4 matrix ) {\n\treturn normalize( ( matrix * vec4( dir, 0.0 ) ).xyz );\n}\nvec3 inverseTransformDirection( in vec3 dir, in mat4 matrix ) {\n\treturn normalize( ( vec4( dir, 0.0 ) * matrix ).xyz );\n}\nmat3 transposeMat3( const in mat3 m ) {\n\tmat3 tmp;\n\ttmp[ 0 ] = vec3( m[ 0 ].x, m[ 1 ].x, m[ 2 ].x );\n\ttmp[ 1 ] = vec3( m[ 0 ].y, m[ 1 ].y, m[ 2 ].y );\n\ttmp[ 2 ] = vec3( m[ 0 ].z, m[ 1 ].z, m[ 2 ].z );\n\treturn tmp;\n}\nbool isPerspectiveMatrix( mat4 m ) {\n\treturn m[ 2 ][ 3 ] == - 1.0;\n}\nvec2 equirectUv( in vec3 dir ) {\n\tfloat u = atan( dir.z, dir.x ) * RECIPROCAL_PI2 + 0.5;\n\tfloat v = asin( clamp( dir.y, - 1.0, 1.0 ) ) * RECIPROCAL_PI + 0.5;\n\treturn vec2( u, v );\n}\nvec3 BRDF_Lambert( const in vec3 diffuseColor ) {\n\treturn RECIPROCAL_PI * diffuseColor;\n}\nvec3 F_Schlick( const in vec3 f0, const in float f90, const in float dotVH ) {\n\tfloat fresnel = exp2( ( - 5.55473 * dotVH - 6.98316 ) * dotVH );\n\treturn f0 * ( 1.0 - fresnel ) + ( f90 * fresnel );\n}\nfloat F_Schlick( const in float f0, const in float f90, const in float dotVH ) {\n\tfloat fresnel = exp2( ( - 5.55473 * dotVH - 6.98316 ) * dotVH );\n\treturn f0 * ( 1.0 - fresnel ) + ( f90 * fresnel );\n} // validated"; +var common$1 = "#define PI 3.141592653589793\n#define PI2 6.283185307179586\n#define PI_HALF 1.5707963267948966\n#define RECIPROCAL_PI 0.3183098861837907\n#define RECIPROCAL_PI2 0.15915494309189535\n#define EPSILON 1e-6\n#ifndef saturate\n#define saturate( a ) clamp( a, 0.0, 1.0 )\n#endif\n#define whiteComplement( a ) ( 1.0 - saturate( a ) )\nfloat pow2( const in float x ) { return x*x; }\nvec3 pow2( const in vec3 x ) { return x*x; }\nfloat pow3( const in float x ) { return x*x*x; }\nfloat pow4( const in float x ) { float x2 = x*x; return x2*x2; }\nfloat max3( const in vec3 v ) { return max( max( v.x, v.y ), v.z ); }\nfloat average( const in vec3 v ) { return dot( v, vec3( 0.3333333 ) ); }\nhighp float rand( const in vec2 uv ) {\n\tconst highp float a = 12.9898, b = 78.233, c = 43758.5453;\n\thighp float dt = dot( uv.xy, vec2( a,b ) ), sn = mod( dt, PI );\n\treturn fract( sin( sn ) * c );\n}\n#ifdef HIGH_PRECISION\n\tfloat precisionSafeLength( vec3 v ) { return length( v ); }\n#else\n\tfloat precisionSafeLength( vec3 v ) {\n\t\tfloat maxComponent = max3( abs( v ) );\n\t\treturn length( v / maxComponent ) * maxComponent;\n\t}\n#endif\nstruct IncidentLight {\n\tvec3 color;\n\tvec3 direction;\n\tbool visible;\n};\nstruct ReflectedLight {\n\tvec3 directDiffuse;\n\tvec3 directSpecular;\n\tvec3 indirectDiffuse;\n\tvec3 indirectSpecular;\n};\n#ifdef USE_ALPHAHASH\n\tvarying vec3 vPosition;\n#endif\nvec3 transformDirection( in vec3 dir, in mat4 matrix ) {\n\treturn normalize( ( matrix * vec4( dir, 0.0 ) ).xyz );\n}\nvec3 inverseTransformDirection( in vec3 dir, in mat4 matrix ) {\n\treturn normalize( ( vec4( dir, 0.0 ) * matrix ).xyz );\n}\nbool isPerspectiveMatrix( mat4 m ) {\n\treturn m[ 2 ][ 3 ] == - 1.0;\n}\nvec2 equirectUv( in vec3 dir ) {\n\tfloat u = atan( dir.z, dir.x ) * RECIPROCAL_PI2 + 0.5;\n\tfloat v = asin( clamp( dir.y, - 1.0, 1.0 ) ) * RECIPROCAL_PI + 0.5;\n\treturn vec2( u, v );\n}\nvec3 BRDF_Lambert( const in vec3 diffuseColor ) {\n\treturn RECIPROCAL_PI * diffuseColor;\n}\nvec3 F_Schlick( const in vec3 f0, const in float f90, const in float dotVH ) {\n\tfloat fresnel = exp2( ( - 5.55473 * dotVH - 6.98316 ) * dotVH );\n\treturn f0 * ( 1.0 - fresnel ) + ( f90 * fresnel );\n}\nfloat F_Schlick( const in float f0, const in float f90, const in float dotVH ) {\n\tfloat fresnel = exp2( ( - 5.55473 * dotVH - 6.98316 ) * dotVH );\n\treturn f0 * ( 1.0 - fresnel ) + ( f90 * fresnel );\n} // validated"; var cube_uv_reflection_fragment = "#ifdef ENVMAP_TYPE_CUBE_UV\n\t#define cubeUV_minMipLevel 4.0\n\t#define cubeUV_minTileSize 16.0\n\tfloat getFace( vec3 direction ) {\n\t\tvec3 absDirection = abs( direction );\n\t\tfloat face = - 1.0;\n\t\tif ( absDirection.x > absDirection.z ) {\n\t\t\tif ( absDirection.x > absDirection.y )\n\t\t\t\tface = direction.x > 0.0 ? 0.0 : 3.0;\n\t\t\telse\n\t\t\t\tface = direction.y > 0.0 ? 1.0 : 4.0;\n\t\t} else {\n\t\t\tif ( absDirection.z > absDirection.y )\n\t\t\t\tface = direction.z > 0.0 ? 2.0 : 5.0;\n\t\t\telse\n\t\t\t\tface = direction.y > 0.0 ? 1.0 : 4.0;\n\t\t}\n\t\treturn face;\n\t}\n\tvec2 getUV( vec3 direction, float face ) {\n\t\tvec2 uv;\n\t\tif ( face == 0.0 ) {\n\t\t\tuv = vec2( direction.z, direction.y ) / abs( direction.x );\n\t\t} else if ( face == 1.0 ) {\n\t\t\tuv = vec2( - direction.x, - direction.z ) / abs( direction.y );\n\t\t} else if ( face == 2.0 ) {\n\t\t\tuv = vec2( - direction.x, direction.y ) / abs( direction.z );\n\t\t} else if ( face == 3.0 ) {\n\t\t\tuv = vec2( - direction.z, direction.y ) / abs( direction.x );\n\t\t} else if ( face == 4.0 ) {\n\t\t\tuv = vec2( - direction.x, direction.z ) / abs( direction.y );\n\t\t} else {\n\t\t\tuv = vec2( direction.x, direction.y ) / abs( direction.z );\n\t\t}\n\t\treturn 0.5 * ( uv + 1.0 );\n\t}\n\tvec3 bilinearCubeUV( sampler2D envMap, vec3 direction, float mipInt ) {\n\t\tfloat face = getFace( direction );\n\t\tfloat filterInt = max( cubeUV_minMipLevel - mipInt, 0.0 );\n\t\tmipInt = max( mipInt, cubeUV_minMipLevel );\n\t\tfloat faceSize = exp2( mipInt );\n\t\thighp vec2 uv = getUV( direction, face ) * ( faceSize - 2.0 ) + 1.0;\n\t\tif ( face > 2.0 ) {\n\t\t\tuv.y += faceSize;\n\t\t\tface -= 3.0;\n\t\t}\n\t\tuv.x += face * faceSize;\n\t\tuv.x += filterInt * 3.0 * cubeUV_minTileSize;\n\t\tuv.y += 4.0 * ( exp2( CUBEUV_MAX_MIP ) - faceSize );\n\t\tuv.x *= CUBEUV_TEXEL_WIDTH;\n\t\tuv.y *= CUBEUV_TEXEL_HEIGHT;\n\t\t#ifdef texture2DGradEXT\n\t\t\treturn texture2DGradEXT( envMap, uv, vec2( 0.0 ), vec2( 0.0 ) ).rgb;\n\t\t#else\n\t\t\treturn texture2D( envMap, uv ).rgb;\n\t\t#endif\n\t}\n\t#define cubeUV_r0 1.0\n\t#define cubeUV_m0 - 2.0\n\t#define cubeUV_r1 0.8\n\t#define cubeUV_m1 - 1.0\n\t#define cubeUV_r4 0.4\n\t#define cubeUV_m4 2.0\n\t#define cubeUV_r5 0.305\n\t#define cubeUV_m5 3.0\n\t#define cubeUV_r6 0.21\n\t#define cubeUV_m6 4.0\n\tfloat roughnessToMip( float roughness ) {\n\t\tfloat mip = 0.0;\n\t\tif ( roughness >= cubeUV_r1 ) {\n\t\t\tmip = ( cubeUV_r0 - roughness ) * ( cubeUV_m1 - cubeUV_m0 ) / ( cubeUV_r0 - cubeUV_r1 ) + cubeUV_m0;\n\t\t} else if ( roughness >= cubeUV_r4 ) {\n\t\t\tmip = ( cubeUV_r1 - roughness ) * ( cubeUV_m4 - cubeUV_m1 ) / ( cubeUV_r1 - cubeUV_r4 ) + cubeUV_m1;\n\t\t} else if ( roughness >= cubeUV_r5 ) {\n\t\t\tmip = ( cubeUV_r4 - roughness ) * ( cubeUV_m5 - cubeUV_m4 ) / ( cubeUV_r4 - cubeUV_r5 ) + cubeUV_m4;\n\t\t} else if ( roughness >= cubeUV_r6 ) {\n\t\t\tmip = ( cubeUV_r5 - roughness ) * ( cubeUV_m6 - cubeUV_m5 ) / ( cubeUV_r5 - cubeUV_r6 ) + cubeUV_m5;\n\t\t} else {\n\t\t\tmip = - 2.0 * log2( 1.16 * roughness );\t\t}\n\t\treturn mip;\n\t}\n\tvec4 textureCubeUV( sampler2D envMap, vec3 sampleDir, float roughness ) {\n\t\tfloat mip = clamp( roughnessToMip( roughness ), cubeUV_m0, CUBEUV_MAX_MIP );\n\t\tfloat mipF = fract( mip );\n\t\tfloat mipInt = floor( mip );\n\t\tvec3 color0 = bilinearCubeUV( envMap, sampleDir, mipInt );\n\t\tif ( mipF == 0.0 ) {\n\t\t\treturn vec4( color0, 1.0 );\n\t\t} else {\n\t\t\tvec3 color1 = bilinearCubeUV( envMap, sampleDir, mipInt + 1.0 );\n\t\t\treturn vec4( mix( color0, color1, mipF ), 1.0 );\n\t\t}\n\t}\n#endif"; @@ -35860,9 +36245,9 @@ var colorspace_fragment = "gl_FragColor = linearToOutputTexel( gl_FragColor );"; var colorspace_pars_fragment = "vec4 LinearTransferOETF( in vec4 value ) {\n\treturn value;\n}\nvec4 sRGBTransferEOTF( in vec4 value ) {\n\treturn vec4( mix( pow( value.rgb * 0.9478672986 + vec3( 0.0521327014 ), vec3( 2.4 ) ), value.rgb * 0.0773993808, vec3( lessThanEqual( value.rgb, vec3( 0.04045 ) ) ) ), value.a );\n}\nvec4 sRGBTransferOETF( in vec4 value ) {\n\treturn vec4( mix( pow( value.rgb, vec3( 0.41666 ) ) * 1.055 - vec3( 0.055 ), value.rgb * 12.92, vec3( lessThanEqual( value.rgb, vec3( 0.0031308 ) ) ) ), value.a );\n}"; -var envmap_fragment = "#ifdef USE_ENVMAP\n\t#ifdef ENV_WORLDPOS\n\t\tvec3 cameraToFrag;\n\t\tif ( isOrthographic ) {\n\t\t\tcameraToFrag = normalize( vec3( - viewMatrix[ 0 ][ 2 ], - viewMatrix[ 1 ][ 2 ], - viewMatrix[ 2 ][ 2 ] ) );\n\t\t} else {\n\t\t\tcameraToFrag = normalize( vWorldPosition - cameraPosition );\n\t\t}\n\t\tvec3 worldNormal = inverseTransformDirection( normal, viewMatrix );\n\t\t#ifdef ENVMAP_MODE_REFLECTION\n\t\t\tvec3 reflectVec = reflect( cameraToFrag, worldNormal );\n\t\t#else\n\t\t\tvec3 reflectVec = refract( cameraToFrag, worldNormal, refractionRatio );\n\t\t#endif\n\t#else\n\t\tvec3 reflectVec = vReflect;\n\t#endif\n\t#ifdef ENVMAP_TYPE_CUBE\n\t\tvec4 envColor = textureCube( envMap, envMapRotation * vec3( flipEnvMap * reflectVec.x, reflectVec.yz ) );\n\t#else\n\t\tvec4 envColor = vec4( 0.0 );\n\t#endif\n\t#ifdef ENVMAP_BLENDING_MULTIPLY\n\t\toutgoingLight = mix( outgoingLight, outgoingLight * envColor.xyz, specularStrength * reflectivity );\n\t#elif defined( ENVMAP_BLENDING_MIX )\n\t\toutgoingLight = mix( outgoingLight, envColor.xyz, specularStrength * reflectivity );\n\t#elif defined( ENVMAP_BLENDING_ADD )\n\t\toutgoingLight += envColor.xyz * specularStrength * reflectivity;\n\t#endif\n#endif"; +var envmap_fragment = "#ifdef USE_ENVMAP\n\t#ifdef ENV_WORLDPOS\n\t\tvec3 cameraToFrag;\n\t\tif ( isOrthographic ) {\n\t\t\tcameraToFrag = normalize( vec3( - viewMatrix[ 0 ][ 2 ], - viewMatrix[ 1 ][ 2 ], - viewMatrix[ 2 ][ 2 ] ) );\n\t\t} else {\n\t\t\tcameraToFrag = normalize( vWorldPosition - cameraPosition );\n\t\t}\n\t\tvec3 worldNormal = inverseTransformDirection( normal, viewMatrix );\n\t\t#ifdef ENVMAP_MODE_REFLECTION\n\t\t\tvec3 reflectVec = reflect( cameraToFrag, worldNormal );\n\t\t#else\n\t\t\tvec3 reflectVec = refract( cameraToFrag, worldNormal, refractionRatio );\n\t\t#endif\n\t#else\n\t\tvec3 reflectVec = vReflect;\n\t#endif\n\t#ifdef ENVMAP_TYPE_CUBE\n\t\tvec4 envColor = textureCube( envMap, envMapRotation * vec3( flipEnvMap * reflectVec.x, reflectVec.yz ) );\n\t\t#ifdef ENVMAP_BLENDING_MULTIPLY\n\t\t\toutgoingLight = mix( outgoingLight, outgoingLight * envColor.xyz, specularStrength * reflectivity );\n\t\t#elif defined( ENVMAP_BLENDING_MIX )\n\t\t\toutgoingLight = mix( outgoingLight, envColor.xyz, specularStrength * reflectivity );\n\t\t#elif defined( ENVMAP_BLENDING_ADD )\n\t\t\toutgoingLight += envColor.xyz * specularStrength * reflectivity;\n\t\t#endif\n\t#endif\n#endif"; -var envmap_common_pars_fragment = "#ifdef USE_ENVMAP\n\tuniform float envMapIntensity;\n\tuniform float flipEnvMap;\n\tuniform mat3 envMapRotation;\n\t#ifdef ENVMAP_TYPE_CUBE\n\t\tuniform samplerCube envMap;\n\t#else\n\t\tuniform sampler2D envMap;\n\t#endif\n\t\n#endif"; +var envmap_common_pars_fragment = "#ifdef USE_ENVMAP\n\tuniform float envMapIntensity;\n\tuniform float flipEnvMap;\n\tuniform mat3 envMapRotation;\n\t#ifdef ENVMAP_TYPE_CUBE\n\t\tuniform samplerCube envMap;\n\t#else\n\t\tuniform sampler2D envMap;\n\t#endif\n#endif"; var envmap_pars_fragment = "#ifdef USE_ENVMAP\n\tuniform float reflectivity;\n\t#if defined( USE_BUMPMAP ) || defined( USE_NORMALMAP ) || defined( PHONG ) || defined( LAMBERT )\n\t\t#define ENV_WORLDPOS\n\t#endif\n\t#ifdef ENV_WORLDPOS\n\t\tvarying vec3 vWorldPosition;\n\t\tuniform float refractionRatio;\n\t#else\n\t\tvarying vec3 vReflect;\n\t#endif\n#endif"; @@ -35888,7 +36273,7 @@ var lights_lambert_pars_fragment = "varying vec3 vViewPosition;\nstruct LambertM var lights_pars_begin = "uniform bool receiveShadow;\nuniform vec3 ambientLightColor;\n#if defined( USE_LIGHT_PROBES )\n\tuniform vec3 lightProbe[ 9 ];\n#endif\nvec3 shGetIrradianceAt( in vec3 normal, in vec3 shCoefficients[ 9 ] ) {\n\tfloat x = normal.x, y = normal.y, z = normal.z;\n\tvec3 result = shCoefficients[ 0 ] * 0.886227;\n\tresult += shCoefficients[ 1 ] * 2.0 * 0.511664 * y;\n\tresult += shCoefficients[ 2 ] * 2.0 * 0.511664 * z;\n\tresult += shCoefficients[ 3 ] * 2.0 * 0.511664 * x;\n\tresult += shCoefficients[ 4 ] * 2.0 * 0.429043 * x * y;\n\tresult += shCoefficients[ 5 ] * 2.0 * 0.429043 * y * z;\n\tresult += shCoefficients[ 6 ] * ( 0.743125 * z * z - 0.247708 );\n\tresult += shCoefficients[ 7 ] * 2.0 * 0.429043 * x * z;\n\tresult += shCoefficients[ 8 ] * 0.429043 * ( x * x - y * y );\n\treturn result;\n}\nvec3 getLightProbeIrradiance( const in vec3 lightProbe[ 9 ], const in vec3 normal ) {\n\tvec3 worldNormal = inverseTransformDirection( normal, viewMatrix );\n\tvec3 irradiance = shGetIrradianceAt( worldNormal, lightProbe );\n\treturn irradiance;\n}\nvec3 getAmbientLightIrradiance( const in vec3 ambientLightColor ) {\n\tvec3 irradiance = ambientLightColor;\n\treturn irradiance;\n}\nfloat getDistanceAttenuation( const in float lightDistance, const in float cutoffDistance, const in float decayExponent ) {\n\tfloat distanceFalloff = 1.0 / max( pow( lightDistance, decayExponent ), 0.01 );\n\tif ( cutoffDistance > 0.0 ) {\n\t\tdistanceFalloff *= pow2( saturate( 1.0 - pow4( lightDistance / cutoffDistance ) ) );\n\t}\n\treturn distanceFalloff;\n}\nfloat getSpotAttenuation( const in float coneCosine, const in float penumbraCosine, const in float angleCosine ) {\n\treturn smoothstep( coneCosine, penumbraCosine, angleCosine );\n}\n#if NUM_DIR_LIGHTS > 0\n\tstruct DirectionalLight {\n\t\tvec3 direction;\n\t\tvec3 color;\n\t};\n\tuniform DirectionalLight directionalLights[ NUM_DIR_LIGHTS ];\n\tvoid getDirectionalLightInfo( const in DirectionalLight directionalLight, out IncidentLight light ) {\n\t\tlight.color = directionalLight.color;\n\t\tlight.direction = directionalLight.direction;\n\t\tlight.visible = true;\n\t}\n#endif\n#if NUM_POINT_LIGHTS > 0\n\tstruct PointLight {\n\t\tvec3 position;\n\t\tvec3 color;\n\t\tfloat distance;\n\t\tfloat decay;\n\t};\n\tuniform PointLight pointLights[ NUM_POINT_LIGHTS ];\n\tvoid getPointLightInfo( const in PointLight pointLight, const in vec3 geometryPosition, out IncidentLight light ) {\n\t\tvec3 lVector = pointLight.position - geometryPosition;\n\t\tlight.direction = normalize( lVector );\n\t\tfloat lightDistance = length( lVector );\n\t\tlight.color = pointLight.color;\n\t\tlight.color *= getDistanceAttenuation( lightDistance, pointLight.distance, pointLight.decay );\n\t\tlight.visible = ( light.color != vec3( 0.0 ) );\n\t}\n#endif\n#if NUM_SPOT_LIGHTS > 0\n\tstruct SpotLight {\n\t\tvec3 position;\n\t\tvec3 direction;\n\t\tvec3 color;\n\t\tfloat distance;\n\t\tfloat decay;\n\t\tfloat coneCos;\n\t\tfloat penumbraCos;\n\t};\n\tuniform SpotLight spotLights[ NUM_SPOT_LIGHTS ];\n\tvoid getSpotLightInfo( const in SpotLight spotLight, const in vec3 geometryPosition, out IncidentLight light ) {\n\t\tvec3 lVector = spotLight.position - geometryPosition;\n\t\tlight.direction = normalize( lVector );\n\t\tfloat angleCos = dot( light.direction, spotLight.direction );\n\t\tfloat spotAttenuation = getSpotAttenuation( spotLight.coneCos, spotLight.penumbraCos, angleCos );\n\t\tif ( spotAttenuation > 0.0 ) {\n\t\t\tfloat lightDistance = length( lVector );\n\t\t\tlight.color = spotLight.color * spotAttenuation;\n\t\t\tlight.color *= getDistanceAttenuation( lightDistance, spotLight.distance, spotLight.decay );\n\t\t\tlight.visible = ( light.color != vec3( 0.0 ) );\n\t\t} else {\n\t\t\tlight.color = vec3( 0.0 );\n\t\t\tlight.visible = false;\n\t\t}\n\t}\n#endif\n#if NUM_RECT_AREA_LIGHTS > 0\n\tstruct RectAreaLight {\n\t\tvec3 color;\n\t\tvec3 position;\n\t\tvec3 halfWidth;\n\t\tvec3 halfHeight;\n\t};\n\tuniform sampler2D ltc_1;\tuniform sampler2D ltc_2;\n\tuniform RectAreaLight rectAreaLights[ NUM_RECT_AREA_LIGHTS ];\n#endif\n#if NUM_HEMI_LIGHTS > 0\n\tstruct HemisphereLight {\n\t\tvec3 direction;\n\t\tvec3 skyColor;\n\t\tvec3 groundColor;\n\t};\n\tuniform HemisphereLight hemisphereLights[ NUM_HEMI_LIGHTS ];\n\tvec3 getHemisphereLightIrradiance( const in HemisphereLight hemiLight, const in vec3 normal ) {\n\t\tfloat dotNL = dot( normal, hemiLight.direction );\n\t\tfloat hemiDiffuseWeight = 0.5 * dotNL + 0.5;\n\t\tvec3 irradiance = mix( hemiLight.groundColor, hemiLight.skyColor, hemiDiffuseWeight );\n\t\treturn irradiance;\n\t}\n#endif"; -var envmap_physical_pars_fragment = "#ifdef USE_ENVMAP\n\tvec3 getIBLIrradiance( const in vec3 normal ) {\n\t\t#ifdef ENVMAP_TYPE_CUBE_UV\n\t\t\tvec3 worldNormal = inverseTransformDirection( normal, viewMatrix );\n\t\t\tvec4 envMapColor = textureCubeUV( envMap, envMapRotation * worldNormal, 1.0 );\n\t\t\treturn PI * envMapColor.rgb * envMapIntensity;\n\t\t#else\n\t\t\treturn vec3( 0.0 );\n\t\t#endif\n\t}\n\tvec3 getIBLRadiance( const in vec3 viewDir, const in vec3 normal, const in float roughness ) {\n\t\t#ifdef ENVMAP_TYPE_CUBE_UV\n\t\t\tvec3 reflectVec = reflect( - viewDir, normal );\n\t\t\treflectVec = normalize( mix( reflectVec, normal, roughness * roughness) );\n\t\t\treflectVec = inverseTransformDirection( reflectVec, viewMatrix );\n\t\t\tvec4 envMapColor = textureCubeUV( envMap, envMapRotation * reflectVec, roughness );\n\t\t\treturn envMapColor.rgb * envMapIntensity;\n\t\t#else\n\t\t\treturn vec3( 0.0 );\n\t\t#endif\n\t}\n\t#ifdef USE_ANISOTROPY\n\t\tvec3 getIBLAnisotropyRadiance( const in vec3 viewDir, const in vec3 normal, const in float roughness, const in vec3 bitangent, const in float anisotropy ) {\n\t\t\t#ifdef ENVMAP_TYPE_CUBE_UV\n\t\t\t\tvec3 bentNormal = cross( bitangent, viewDir );\n\t\t\t\tbentNormal = normalize( cross( bentNormal, bitangent ) );\n\t\t\t\tbentNormal = normalize( mix( bentNormal, normal, pow2( pow2( 1.0 - anisotropy * ( 1.0 - roughness ) ) ) ) );\n\t\t\t\treturn getIBLRadiance( viewDir, bentNormal, roughness );\n\t\t\t#else\n\t\t\t\treturn vec3( 0.0 );\n\t\t\t#endif\n\t\t}\n\t#endif\n#endif"; +var envmap_physical_pars_fragment = "#ifdef USE_ENVMAP\n\tvec3 getIBLIrradiance( const in vec3 normal ) {\n\t\t#ifdef ENVMAP_TYPE_CUBE_UV\n\t\t\tvec3 worldNormal = inverseTransformDirection( normal, viewMatrix );\n\t\t\tvec4 envMapColor = textureCubeUV( envMap, envMapRotation * worldNormal, 1.0 );\n\t\t\treturn PI * envMapColor.rgb * envMapIntensity;\n\t\t#else\n\t\t\treturn vec3( 0.0 );\n\t\t#endif\n\t}\n\tvec3 getIBLRadiance( const in vec3 viewDir, const in vec3 normal, const in float roughness ) {\n\t\t#ifdef ENVMAP_TYPE_CUBE_UV\n\t\t\tvec3 reflectVec = reflect( - viewDir, normal );\n\t\t\treflectVec = normalize( mix( reflectVec, normal, pow4( roughness ) ) );\n\t\t\treflectVec = inverseTransformDirection( reflectVec, viewMatrix );\n\t\t\tvec4 envMapColor = textureCubeUV( envMap, envMapRotation * reflectVec, roughness );\n\t\t\treturn envMapColor.rgb * envMapIntensity;\n\t\t#else\n\t\t\treturn vec3( 0.0 );\n\t\t#endif\n\t}\n\t#ifdef USE_ANISOTROPY\n\t\tvec3 getIBLAnisotropyRadiance( const in vec3 viewDir, const in vec3 normal, const in float roughness, const in vec3 bitangent, const in float anisotropy ) {\n\t\t\t#ifdef ENVMAP_TYPE_CUBE_UV\n\t\t\t\tvec3 bentNormal = cross( bitangent, viewDir );\n\t\t\t\tbentNormal = normalize( cross( bentNormal, bitangent ) );\n\t\t\t\tbentNormal = normalize( mix( bentNormal, normal, pow2( pow2( 1.0 - anisotropy * ( 1.0 - roughness ) ) ) ) );\n\t\t\t\treturn getIBLRadiance( viewDir, bentNormal, roughness );\n\t\t\t#else\n\t\t\t\treturn vec3( 0.0 );\n\t\t\t#endif\n\t\t}\n\t#endif\n#endif"; var lights_toon_fragment = "ToonMaterial material;\nmaterial.diffuseColor = diffuseColor.rgb;"; @@ -35898,15 +36283,15 @@ var lights_phong_fragment = "BlinnPhongMaterial material;\nmaterial.diffuseColor var lights_phong_pars_fragment = "varying vec3 vViewPosition;\nstruct BlinnPhongMaterial {\n\tvec3 diffuseColor;\n\tvec3 specularColor;\n\tfloat specularShininess;\n\tfloat specularStrength;\n};\nvoid RE_Direct_BlinnPhong( const in IncidentLight directLight, const in vec3 geometryPosition, const in vec3 geometryNormal, const in vec3 geometryViewDir, const in vec3 geometryClearcoatNormal, const in BlinnPhongMaterial material, inout ReflectedLight reflectedLight ) {\n\tfloat dotNL = saturate( dot( geometryNormal, directLight.direction ) );\n\tvec3 irradiance = dotNL * directLight.color;\n\treflectedLight.directDiffuse += irradiance * BRDF_Lambert( material.diffuseColor );\n\treflectedLight.directSpecular += irradiance * BRDF_BlinnPhong( directLight.direction, geometryViewDir, geometryNormal, material.specularColor, material.specularShininess ) * material.specularStrength;\n}\nvoid RE_IndirectDiffuse_BlinnPhong( const in vec3 irradiance, const in vec3 geometryPosition, const in vec3 geometryNormal, const in vec3 geometryViewDir, const in vec3 geometryClearcoatNormal, const in BlinnPhongMaterial material, inout ReflectedLight reflectedLight ) {\n\treflectedLight.indirectDiffuse += irradiance * BRDF_Lambert( material.diffuseColor );\n}\n#define RE_Direct\t\t\t\tRE_Direct_BlinnPhong\n#define RE_IndirectDiffuse\t\tRE_IndirectDiffuse_BlinnPhong"; -var lights_physical_fragment = "PhysicalMaterial material;\nmaterial.diffuseColor = diffuseColor.rgb * ( 1.0 - metalnessFactor );\nvec3 dxy = max( abs( dFdx( nonPerturbedNormal ) ), abs( dFdy( nonPerturbedNormal ) ) );\nfloat geometryRoughness = max( max( dxy.x, dxy.y ), dxy.z );\nmaterial.roughness = max( roughnessFactor, 0.0525 );material.roughness += geometryRoughness;\nmaterial.roughness = min( material.roughness, 1.0 );\n#ifdef IOR\n\tmaterial.ior = ior;\n\t#ifdef USE_SPECULAR\n\t\tfloat specularIntensityFactor = specularIntensity;\n\t\tvec3 specularColorFactor = specularColor;\n\t\t#ifdef USE_SPECULAR_COLORMAP\n\t\t\tspecularColorFactor *= texture2D( specularColorMap, vSpecularColorMapUv ).rgb;\n\t\t#endif\n\t\t#ifdef USE_SPECULAR_INTENSITYMAP\n\t\t\tspecularIntensityFactor *= texture2D( specularIntensityMap, vSpecularIntensityMapUv ).a;\n\t\t#endif\n\t\tmaterial.specularF90 = mix( specularIntensityFactor, 1.0, metalnessFactor );\n\t#else\n\t\tfloat specularIntensityFactor = 1.0;\n\t\tvec3 specularColorFactor = vec3( 1.0 );\n\t\tmaterial.specularF90 = 1.0;\n\t#endif\n\tmaterial.specularColor = mix( min( pow2( ( material.ior - 1.0 ) / ( material.ior + 1.0 ) ) * specularColorFactor, vec3( 1.0 ) ) * specularIntensityFactor, diffuseColor.rgb, metalnessFactor );\n#else\n\tmaterial.specularColor = mix( vec3( 0.04 ), diffuseColor.rgb, metalnessFactor );\n\tmaterial.specularF90 = 1.0;\n#endif\n#ifdef USE_CLEARCOAT\n\tmaterial.clearcoat = clearcoat;\n\tmaterial.clearcoatRoughness = clearcoatRoughness;\n\tmaterial.clearcoatF0 = vec3( 0.04 );\n\tmaterial.clearcoatF90 = 1.0;\n\t#ifdef USE_CLEARCOATMAP\n\t\tmaterial.clearcoat *= texture2D( clearcoatMap, vClearcoatMapUv ).x;\n\t#endif\n\t#ifdef USE_CLEARCOAT_ROUGHNESSMAP\n\t\tmaterial.clearcoatRoughness *= texture2D( clearcoatRoughnessMap, vClearcoatRoughnessMapUv ).y;\n\t#endif\n\tmaterial.clearcoat = saturate( material.clearcoat );\tmaterial.clearcoatRoughness = max( material.clearcoatRoughness, 0.0525 );\n\tmaterial.clearcoatRoughness += geometryRoughness;\n\tmaterial.clearcoatRoughness = min( material.clearcoatRoughness, 1.0 );\n#endif\n#ifdef USE_DISPERSION\n\tmaterial.dispersion = dispersion;\n#endif\n#ifdef USE_IRIDESCENCE\n\tmaterial.iridescence = iridescence;\n\tmaterial.iridescenceIOR = iridescenceIOR;\n\t#ifdef USE_IRIDESCENCEMAP\n\t\tmaterial.iridescence *= texture2D( iridescenceMap, vIridescenceMapUv ).r;\n\t#endif\n\t#ifdef USE_IRIDESCENCE_THICKNESSMAP\n\t\tmaterial.iridescenceThickness = (iridescenceThicknessMaximum - iridescenceThicknessMinimum) * texture2D( iridescenceThicknessMap, vIridescenceThicknessMapUv ).g + iridescenceThicknessMinimum;\n\t#else\n\t\tmaterial.iridescenceThickness = iridescenceThicknessMaximum;\n\t#endif\n#endif\n#ifdef USE_SHEEN\n\tmaterial.sheenColor = sheenColor;\n\t#ifdef USE_SHEEN_COLORMAP\n\t\tmaterial.sheenColor *= texture2D( sheenColorMap, vSheenColorMapUv ).rgb;\n\t#endif\n\tmaterial.sheenRoughness = clamp( sheenRoughness, 0.07, 1.0 );\n\t#ifdef USE_SHEEN_ROUGHNESSMAP\n\t\tmaterial.sheenRoughness *= texture2D( sheenRoughnessMap, vSheenRoughnessMapUv ).a;\n\t#endif\n#endif\n#ifdef USE_ANISOTROPY\n\t#ifdef USE_ANISOTROPYMAP\n\t\tmat2 anisotropyMat = mat2( anisotropyVector.x, anisotropyVector.y, - anisotropyVector.y, anisotropyVector.x );\n\t\tvec3 anisotropyPolar = texture2D( anisotropyMap, vAnisotropyMapUv ).rgb;\n\t\tvec2 anisotropyV = anisotropyMat * normalize( 2.0 * anisotropyPolar.rg - vec2( 1.0 ) ) * anisotropyPolar.b;\n\t#else\n\t\tvec2 anisotropyV = anisotropyVector;\n\t#endif\n\tmaterial.anisotropy = length( anisotropyV );\n\tif( material.anisotropy == 0.0 ) {\n\t\tanisotropyV = vec2( 1.0, 0.0 );\n\t} else {\n\t\tanisotropyV /= material.anisotropy;\n\t\tmaterial.anisotropy = saturate( material.anisotropy );\n\t}\n\tmaterial.alphaT = mix( pow2( material.roughness ), 1.0, pow2( material.anisotropy ) );\n\tmaterial.anisotropyT = tbn[ 0 ] * anisotropyV.x + tbn[ 1 ] * anisotropyV.y;\n\tmaterial.anisotropyB = tbn[ 1 ] * anisotropyV.x - tbn[ 0 ] * anisotropyV.y;\n#endif"; +var lights_physical_fragment = "PhysicalMaterial material;\nmaterial.diffuseColor = diffuseColor.rgb;\nmaterial.diffuseContribution = diffuseColor.rgb * ( 1.0 - metalnessFactor );\nmaterial.metalness = metalnessFactor;\nvec3 dxy = max( abs( dFdx( nonPerturbedNormal ) ), abs( dFdy( nonPerturbedNormal ) ) );\nfloat geometryRoughness = max( max( dxy.x, dxy.y ), dxy.z );\nmaterial.roughness = max( roughnessFactor, 0.0525 );material.roughness += geometryRoughness;\nmaterial.roughness = min( material.roughness, 1.0 );\n#ifdef IOR\n\tmaterial.ior = ior;\n\t#ifdef USE_SPECULAR\n\t\tfloat specularIntensityFactor = specularIntensity;\n\t\tvec3 specularColorFactor = specularColor;\n\t\t#ifdef USE_SPECULAR_COLORMAP\n\t\t\tspecularColorFactor *= texture2D( specularColorMap, vSpecularColorMapUv ).rgb;\n\t\t#endif\n\t\t#ifdef USE_SPECULAR_INTENSITYMAP\n\t\t\tspecularIntensityFactor *= texture2D( specularIntensityMap, vSpecularIntensityMapUv ).a;\n\t\t#endif\n\t\tmaterial.specularF90 = mix( specularIntensityFactor, 1.0, metalnessFactor );\n\t#else\n\t\tfloat specularIntensityFactor = 1.0;\n\t\tvec3 specularColorFactor = vec3( 1.0 );\n\t\tmaterial.specularF90 = 1.0;\n\t#endif\n\tmaterial.specularColor = min( pow2( ( material.ior - 1.0 ) / ( material.ior + 1.0 ) ) * specularColorFactor, vec3( 1.0 ) ) * specularIntensityFactor;\n\tmaterial.specularColorBlended = mix( material.specularColor, diffuseColor.rgb, metalnessFactor );\n#else\n\tmaterial.specularColor = vec3( 0.04 );\n\tmaterial.specularColorBlended = mix( material.specularColor, diffuseColor.rgb, metalnessFactor );\n\tmaterial.specularF90 = 1.0;\n#endif\n#ifdef USE_CLEARCOAT\n\tmaterial.clearcoat = clearcoat;\n\tmaterial.clearcoatRoughness = clearcoatRoughness;\n\tmaterial.clearcoatF0 = vec3( 0.04 );\n\tmaterial.clearcoatF90 = 1.0;\n\t#ifdef USE_CLEARCOATMAP\n\t\tmaterial.clearcoat *= texture2D( clearcoatMap, vClearcoatMapUv ).x;\n\t#endif\n\t#ifdef USE_CLEARCOAT_ROUGHNESSMAP\n\t\tmaterial.clearcoatRoughness *= texture2D( clearcoatRoughnessMap, vClearcoatRoughnessMapUv ).y;\n\t#endif\n\tmaterial.clearcoat = saturate( material.clearcoat );\tmaterial.clearcoatRoughness = max( material.clearcoatRoughness, 0.0525 );\n\tmaterial.clearcoatRoughness += geometryRoughness;\n\tmaterial.clearcoatRoughness = min( material.clearcoatRoughness, 1.0 );\n#endif\n#ifdef USE_DISPERSION\n\tmaterial.dispersion = dispersion;\n#endif\n#ifdef USE_IRIDESCENCE\n\tmaterial.iridescence = iridescence;\n\tmaterial.iridescenceIOR = iridescenceIOR;\n\t#ifdef USE_IRIDESCENCEMAP\n\t\tmaterial.iridescence *= texture2D( iridescenceMap, vIridescenceMapUv ).r;\n\t#endif\n\t#ifdef USE_IRIDESCENCE_THICKNESSMAP\n\t\tmaterial.iridescenceThickness = (iridescenceThicknessMaximum - iridescenceThicknessMinimum) * texture2D( iridescenceThicknessMap, vIridescenceThicknessMapUv ).g + iridescenceThicknessMinimum;\n\t#else\n\t\tmaterial.iridescenceThickness = iridescenceThicknessMaximum;\n\t#endif\n#endif\n#ifdef USE_SHEEN\n\tmaterial.sheenColor = sheenColor;\n\t#ifdef USE_SHEEN_COLORMAP\n\t\tmaterial.sheenColor *= texture2D( sheenColorMap, vSheenColorMapUv ).rgb;\n\t#endif\n\tmaterial.sheenRoughness = clamp( sheenRoughness, 0.0001, 1.0 );\n\t#ifdef USE_SHEEN_ROUGHNESSMAP\n\t\tmaterial.sheenRoughness *= texture2D( sheenRoughnessMap, vSheenRoughnessMapUv ).a;\n\t#endif\n#endif\n#ifdef USE_ANISOTROPY\n\t#ifdef USE_ANISOTROPYMAP\n\t\tmat2 anisotropyMat = mat2( anisotropyVector.x, anisotropyVector.y, - anisotropyVector.y, anisotropyVector.x );\n\t\tvec3 anisotropyPolar = texture2D( anisotropyMap, vAnisotropyMapUv ).rgb;\n\t\tvec2 anisotropyV = anisotropyMat * normalize( 2.0 * anisotropyPolar.rg - vec2( 1.0 ) ) * anisotropyPolar.b;\n\t#else\n\t\tvec2 anisotropyV = anisotropyVector;\n\t#endif\n\tmaterial.anisotropy = length( anisotropyV );\n\tif( material.anisotropy == 0.0 ) {\n\t\tanisotropyV = vec2( 1.0, 0.0 );\n\t} else {\n\t\tanisotropyV /= material.anisotropy;\n\t\tmaterial.anisotropy = saturate( material.anisotropy );\n\t}\n\tmaterial.alphaT = mix( pow2( material.roughness ), 1.0, pow2( material.anisotropy ) );\n\tmaterial.anisotropyT = tbn[ 0 ] * anisotropyV.x + tbn[ 1 ] * anisotropyV.y;\n\tmaterial.anisotropyB = tbn[ 1 ] * anisotropyV.x - tbn[ 0 ] * anisotropyV.y;\n#endif"; -var lights_physical_pars_fragment = "struct PhysicalMaterial {\n\tvec3 diffuseColor;\n\tfloat roughness;\n\tvec3 specularColor;\n\tfloat specularF90;\n\tfloat dispersion;\n\t#ifdef USE_CLEARCOAT\n\t\tfloat clearcoat;\n\t\tfloat clearcoatRoughness;\n\t\tvec3 clearcoatF0;\n\t\tfloat clearcoatF90;\n\t#endif\n\t#ifdef USE_IRIDESCENCE\n\t\tfloat iridescence;\n\t\tfloat iridescenceIOR;\n\t\tfloat iridescenceThickness;\n\t\tvec3 iridescenceFresnel;\n\t\tvec3 iridescenceF0;\n\t#endif\n\t#ifdef USE_SHEEN\n\t\tvec3 sheenColor;\n\t\tfloat sheenRoughness;\n\t#endif\n\t#ifdef IOR\n\t\tfloat ior;\n\t#endif\n\t#ifdef USE_TRANSMISSION\n\t\tfloat transmission;\n\t\tfloat transmissionAlpha;\n\t\tfloat thickness;\n\t\tfloat attenuationDistance;\n\t\tvec3 attenuationColor;\n\t#endif\n\t#ifdef USE_ANISOTROPY\n\t\tfloat anisotropy;\n\t\tfloat alphaT;\n\t\tvec3 anisotropyT;\n\t\tvec3 anisotropyB;\n\t#endif\n};\nvec3 clearcoatSpecularDirect = vec3( 0.0 );\nvec3 clearcoatSpecularIndirect = vec3( 0.0 );\nvec3 sheenSpecularDirect = vec3( 0.0 );\nvec3 sheenSpecularIndirect = vec3(0.0 );\nvec3 Schlick_to_F0( const in vec3 f, const in float f90, const in float dotVH ) {\n float x = clamp( 1.0 - dotVH, 0.0, 1.0 );\n float x2 = x * x;\n float x5 = clamp( x * x2 * x2, 0.0, 0.9999 );\n return ( f - vec3( f90 ) * x5 ) / ( 1.0 - x5 );\n}\nfloat V_GGX_SmithCorrelated( const in float alpha, const in float dotNL, const in float dotNV ) {\n\tfloat a2 = pow2( alpha );\n\tfloat gv = dotNL * sqrt( a2 + ( 1.0 - a2 ) * pow2( dotNV ) );\n\tfloat gl = dotNV * sqrt( a2 + ( 1.0 - a2 ) * pow2( dotNL ) );\n\treturn 0.5 / max( gv + gl, EPSILON );\n}\nfloat D_GGX( const in float alpha, const in float dotNH ) {\n\tfloat a2 = pow2( alpha );\n\tfloat denom = pow2( dotNH ) * ( a2 - 1.0 ) + 1.0;\n\treturn RECIPROCAL_PI * a2 / pow2( denom );\n}\n#ifdef USE_ANISOTROPY\n\tfloat V_GGX_SmithCorrelated_Anisotropic( const in float alphaT, const in float alphaB, const in float dotTV, const in float dotBV, const in float dotTL, const in float dotBL, const in float dotNV, const in float dotNL ) {\n\t\tfloat gv = dotNL * length( vec3( alphaT * dotTV, alphaB * dotBV, dotNV ) );\n\t\tfloat gl = dotNV * length( vec3( alphaT * dotTL, alphaB * dotBL, dotNL ) );\n\t\tfloat v = 0.5 / ( gv + gl );\n\t\treturn saturate(v);\n\t}\n\tfloat D_GGX_Anisotropic( const in float alphaT, const in float alphaB, const in float dotNH, const in float dotTH, const in float dotBH ) {\n\t\tfloat a2 = alphaT * alphaB;\n\t\thighp vec3 v = vec3( alphaB * dotTH, alphaT * dotBH, a2 * dotNH );\n\t\thighp float v2 = dot( v, v );\n\t\tfloat w2 = a2 / v2;\n\t\treturn RECIPROCAL_PI * a2 * pow2 ( w2 );\n\t}\n#endif\n#ifdef USE_CLEARCOAT\n\tvec3 BRDF_GGX_Clearcoat( const in vec3 lightDir, const in vec3 viewDir, const in vec3 normal, const in PhysicalMaterial material) {\n\t\tvec3 f0 = material.clearcoatF0;\n\t\tfloat f90 = material.clearcoatF90;\n\t\tfloat roughness = material.clearcoatRoughness;\n\t\tfloat alpha = pow2( roughness );\n\t\tvec3 halfDir = normalize( lightDir + viewDir );\n\t\tfloat dotNL = saturate( dot( normal, lightDir ) );\n\t\tfloat dotNV = saturate( dot( normal, viewDir ) );\n\t\tfloat dotNH = saturate( dot( normal, halfDir ) );\n\t\tfloat dotVH = saturate( dot( viewDir, halfDir ) );\n\t\tvec3 F = F_Schlick( f0, f90, dotVH );\n\t\tfloat V = V_GGX_SmithCorrelated( alpha, dotNL, dotNV );\n\t\tfloat D = D_GGX( alpha, dotNH );\n\t\treturn F * ( V * D );\n\t}\n#endif\nvec3 BRDF_GGX( const in vec3 lightDir, const in vec3 viewDir, const in vec3 normal, const in PhysicalMaterial material ) {\n\tvec3 f0 = material.specularColor;\n\tfloat f90 = material.specularF90;\n\tfloat roughness = material.roughness;\n\tfloat alpha = pow2( roughness );\n\tvec3 halfDir = normalize( lightDir + viewDir );\n\tfloat dotNL = saturate( dot( normal, lightDir ) );\n\tfloat dotNV = saturate( dot( normal, viewDir ) );\n\tfloat dotNH = saturate( dot( normal, halfDir ) );\n\tfloat dotVH = saturate( dot( viewDir, halfDir ) );\n\tvec3 F = F_Schlick( f0, f90, dotVH );\n\t#ifdef USE_IRIDESCENCE\n\t\tF = mix( F, material.iridescenceFresnel, material.iridescence );\n\t#endif\n\t#ifdef USE_ANISOTROPY\n\t\tfloat dotTL = dot( material.anisotropyT, lightDir );\n\t\tfloat dotTV = dot( material.anisotropyT, viewDir );\n\t\tfloat dotTH = dot( material.anisotropyT, halfDir );\n\t\tfloat dotBL = dot( material.anisotropyB, lightDir );\n\t\tfloat dotBV = dot( material.anisotropyB, viewDir );\n\t\tfloat dotBH = dot( material.anisotropyB, halfDir );\n\t\tfloat V = V_GGX_SmithCorrelated_Anisotropic( material.alphaT, alpha, dotTV, dotBV, dotTL, dotBL, dotNV, dotNL );\n\t\tfloat D = D_GGX_Anisotropic( material.alphaT, alpha, dotNH, dotTH, dotBH );\n\t#else\n\t\tfloat V = V_GGX_SmithCorrelated( alpha, dotNL, dotNV );\n\t\tfloat D = D_GGX( alpha, dotNH );\n\t#endif\n\treturn F * ( V * D );\n}\nvec2 LTC_Uv( const in vec3 N, const in vec3 V, const in float roughness ) {\n\tconst float LUT_SIZE = 64.0;\n\tconst float LUT_SCALE = ( LUT_SIZE - 1.0 ) / LUT_SIZE;\n\tconst float LUT_BIAS = 0.5 / LUT_SIZE;\n\tfloat dotNV = saturate( dot( N, V ) );\n\tvec2 uv = vec2( roughness, sqrt( 1.0 - dotNV ) );\n\tuv = uv * LUT_SCALE + LUT_BIAS;\n\treturn uv;\n}\nfloat LTC_ClippedSphereFormFactor( const in vec3 f ) {\n\tfloat l = length( f );\n\treturn max( ( l * l + f.z ) / ( l + 1.0 ), 0.0 );\n}\nvec3 LTC_EdgeVectorFormFactor( const in vec3 v1, const in vec3 v2 ) {\n\tfloat x = dot( v1, v2 );\n\tfloat y = abs( x );\n\tfloat a = 0.8543985 + ( 0.4965155 + 0.0145206 * y ) * y;\n\tfloat b = 3.4175940 + ( 4.1616724 + y ) * y;\n\tfloat v = a / b;\n\tfloat theta_sintheta = ( x > 0.0 ) ? v : 0.5 * inversesqrt( max( 1.0 - x * x, 1e-7 ) ) - v;\n\treturn cross( v1, v2 ) * theta_sintheta;\n}\nvec3 LTC_Evaluate( const in vec3 N, const in vec3 V, const in vec3 P, const in mat3 mInv, const in vec3 rectCoords[ 4 ] ) {\n\tvec3 v1 = rectCoords[ 1 ] - rectCoords[ 0 ];\n\tvec3 v2 = rectCoords[ 3 ] - rectCoords[ 0 ];\n\tvec3 lightNormal = cross( v1, v2 );\n\tif( dot( lightNormal, P - rectCoords[ 0 ] ) < 0.0 ) return vec3( 0.0 );\n\tvec3 T1, T2;\n\tT1 = normalize( V - N * dot( V, N ) );\n\tT2 = - cross( N, T1 );\n\tmat3 mat = mInv * transposeMat3( mat3( T1, T2, N ) );\n\tvec3 coords[ 4 ];\n\tcoords[ 0 ] = mat * ( rectCoords[ 0 ] - P );\n\tcoords[ 1 ] = mat * ( rectCoords[ 1 ] - P );\n\tcoords[ 2 ] = mat * ( rectCoords[ 2 ] - P );\n\tcoords[ 3 ] = mat * ( rectCoords[ 3 ] - P );\n\tcoords[ 0 ] = normalize( coords[ 0 ] );\n\tcoords[ 1 ] = normalize( coords[ 1 ] );\n\tcoords[ 2 ] = normalize( coords[ 2 ] );\n\tcoords[ 3 ] = normalize( coords[ 3 ] );\n\tvec3 vectorFormFactor = vec3( 0.0 );\n\tvectorFormFactor += LTC_EdgeVectorFormFactor( coords[ 0 ], coords[ 1 ] );\n\tvectorFormFactor += LTC_EdgeVectorFormFactor( coords[ 1 ], coords[ 2 ] );\n\tvectorFormFactor += LTC_EdgeVectorFormFactor( coords[ 2 ], coords[ 3 ] );\n\tvectorFormFactor += LTC_EdgeVectorFormFactor( coords[ 3 ], coords[ 0 ] );\n\tfloat result = LTC_ClippedSphereFormFactor( vectorFormFactor );\n\treturn vec3( result );\n}\n#if defined( USE_SHEEN )\nfloat D_Charlie( float roughness, float dotNH ) {\n\tfloat alpha = pow2( roughness );\n\tfloat invAlpha = 1.0 / alpha;\n\tfloat cos2h = dotNH * dotNH;\n\tfloat sin2h = max( 1.0 - cos2h, 0.0078125 );\n\treturn ( 2.0 + invAlpha ) * pow( sin2h, invAlpha * 0.5 ) / ( 2.0 * PI );\n}\nfloat V_Neubelt( float dotNV, float dotNL ) {\n\treturn saturate( 1.0 / ( 4.0 * ( dotNL + dotNV - dotNL * dotNV ) ) );\n}\nvec3 BRDF_Sheen( const in vec3 lightDir, const in vec3 viewDir, const in vec3 normal, vec3 sheenColor, const in float sheenRoughness ) {\n\tvec3 halfDir = normalize( lightDir + viewDir );\n\tfloat dotNL = saturate( dot( normal, lightDir ) );\n\tfloat dotNV = saturate( dot( normal, viewDir ) );\n\tfloat dotNH = saturate( dot( normal, halfDir ) );\n\tfloat D = D_Charlie( sheenRoughness, dotNH );\n\tfloat V = V_Neubelt( dotNV, dotNL );\n\treturn sheenColor * ( D * V );\n}\n#endif\nfloat IBLSheenBRDF( const in vec3 normal, const in vec3 viewDir, const in float roughness ) {\n\tfloat dotNV = saturate( dot( normal, viewDir ) );\n\tfloat r2 = roughness * roughness;\n\tfloat a = roughness < 0.25 ? -339.2 * r2 + 161.4 * roughness - 25.9 : -8.48 * r2 + 14.3 * roughness - 9.95;\n\tfloat b = roughness < 0.25 ? 44.0 * r2 - 23.7 * roughness + 3.26 : 1.97 * r2 - 3.27 * roughness + 0.72;\n\tfloat DG = exp( a * dotNV + b ) + ( roughness < 0.25 ? 0.0 : 0.1 * ( roughness - 0.25 ) );\n\treturn saturate( DG * RECIPROCAL_PI );\n}\nvec2 DFGApprox( const in vec3 normal, const in vec3 viewDir, const in float roughness ) {\n\tfloat dotNV = saturate( dot( normal, viewDir ) );\n\tconst vec4 c0 = vec4( - 1, - 0.0275, - 0.572, 0.022 );\n\tconst vec4 c1 = vec4( 1, 0.0425, 1.04, - 0.04 );\n\tvec4 r = roughness * c0 + c1;\n\tfloat a004 = min( r.x * r.x, exp2( - 9.28 * dotNV ) ) * r.x + r.y;\n\tvec2 fab = vec2( - 1.04, 1.04 ) * a004 + r.zw;\n\treturn fab;\n}\nvec3 EnvironmentBRDF( const in vec3 normal, const in vec3 viewDir, const in vec3 specularColor, const in float specularF90, const in float roughness ) {\n\tvec2 fab = DFGApprox( normal, viewDir, roughness );\n\treturn specularColor * fab.x + specularF90 * fab.y;\n}\n#ifdef USE_IRIDESCENCE\nvoid computeMultiscatteringIridescence( const in vec3 normal, const in vec3 viewDir, const in vec3 specularColor, const in float specularF90, const in float iridescence, const in vec3 iridescenceF0, const in float roughness, inout vec3 singleScatter, inout vec3 multiScatter ) {\n#else\nvoid computeMultiscattering( const in vec3 normal, const in vec3 viewDir, const in vec3 specularColor, const in float specularF90, const in float roughness, inout vec3 singleScatter, inout vec3 multiScatter ) {\n#endif\n\tvec2 fab = DFGApprox( normal, viewDir, roughness );\n\t#ifdef USE_IRIDESCENCE\n\t\tvec3 Fr = mix( specularColor, iridescenceF0, iridescence );\n\t#else\n\t\tvec3 Fr = specularColor;\n\t#endif\n\tvec3 FssEss = Fr * fab.x + specularF90 * fab.y;\n\tfloat Ess = fab.x + fab.y;\n\tfloat Ems = 1.0 - Ess;\n\tvec3 Favg = Fr + ( 1.0 - Fr ) * 0.047619;\tvec3 Fms = FssEss * Favg / ( 1.0 - Ems * Favg );\n\tsingleScatter += FssEss;\n\tmultiScatter += Fms * Ems;\n}\n#if NUM_RECT_AREA_LIGHTS > 0\n\tvoid RE_Direct_RectArea_Physical( const in RectAreaLight rectAreaLight, const in vec3 geometryPosition, const in vec3 geometryNormal, const in vec3 geometryViewDir, const in vec3 geometryClearcoatNormal, const in PhysicalMaterial material, inout ReflectedLight reflectedLight ) {\n\t\tvec3 normal = geometryNormal;\n\t\tvec3 viewDir = geometryViewDir;\n\t\tvec3 position = geometryPosition;\n\t\tvec3 lightPos = rectAreaLight.position;\n\t\tvec3 halfWidth = rectAreaLight.halfWidth;\n\t\tvec3 halfHeight = rectAreaLight.halfHeight;\n\t\tvec3 lightColor = rectAreaLight.color;\n\t\tfloat roughness = material.roughness;\n\t\tvec3 rectCoords[ 4 ];\n\t\trectCoords[ 0 ] = lightPos + halfWidth - halfHeight;\t\trectCoords[ 1 ] = lightPos - halfWidth - halfHeight;\n\t\trectCoords[ 2 ] = lightPos - halfWidth + halfHeight;\n\t\trectCoords[ 3 ] = lightPos + halfWidth + halfHeight;\n\t\tvec2 uv = LTC_Uv( normal, viewDir, roughness );\n\t\tvec4 t1 = texture2D( ltc_1, uv );\n\t\tvec4 t2 = texture2D( ltc_2, uv );\n\t\tmat3 mInv = mat3(\n\t\t\tvec3( t1.x, 0, t1.y ),\n\t\t\tvec3( 0, 1, 0 ),\n\t\t\tvec3( t1.z, 0, t1.w )\n\t\t);\n\t\tvec3 fresnel = ( material.specularColor * t2.x + ( vec3( 1.0 ) - material.specularColor ) * t2.y );\n\t\treflectedLight.directSpecular += lightColor * fresnel * LTC_Evaluate( normal, viewDir, position, mInv, rectCoords );\n\t\treflectedLight.directDiffuse += lightColor * material.diffuseColor * LTC_Evaluate( normal, viewDir, position, mat3( 1.0 ), rectCoords );\n\t}\n#endif\nvoid RE_Direct_Physical( const in IncidentLight directLight, const in vec3 geometryPosition, const in vec3 geometryNormal, const in vec3 geometryViewDir, const in vec3 geometryClearcoatNormal, const in PhysicalMaterial material, inout ReflectedLight reflectedLight ) {\n\tfloat dotNL = saturate( dot( geometryNormal, directLight.direction ) );\n\tvec3 irradiance = dotNL * directLight.color;\n\t#ifdef USE_CLEARCOAT\n\t\tfloat dotNLcc = saturate( dot( geometryClearcoatNormal, directLight.direction ) );\n\t\tvec3 ccIrradiance = dotNLcc * directLight.color;\n\t\tclearcoatSpecularDirect += ccIrradiance * BRDF_GGX_Clearcoat( directLight.direction, geometryViewDir, geometryClearcoatNormal, material );\n\t#endif\n\t#ifdef USE_SHEEN\n\t\tsheenSpecularDirect += irradiance * BRDF_Sheen( directLight.direction, geometryViewDir, geometryNormal, material.sheenColor, material.sheenRoughness );\n\t#endif\n\treflectedLight.directSpecular += irradiance * BRDF_GGX( directLight.direction, geometryViewDir, geometryNormal, material );\n\treflectedLight.directDiffuse += irradiance * BRDF_Lambert( material.diffuseColor );\n}\nvoid RE_IndirectDiffuse_Physical( const in vec3 irradiance, const in vec3 geometryPosition, const in vec3 geometryNormal, const in vec3 geometryViewDir, const in vec3 geometryClearcoatNormal, const in PhysicalMaterial material, inout ReflectedLight reflectedLight ) {\n\treflectedLight.indirectDiffuse += irradiance * BRDF_Lambert( material.diffuseColor );\n}\nvoid RE_IndirectSpecular_Physical( const in vec3 radiance, const in vec3 irradiance, const in vec3 clearcoatRadiance, const in vec3 geometryPosition, const in vec3 geometryNormal, const in vec3 geometryViewDir, const in vec3 geometryClearcoatNormal, const in PhysicalMaterial material, inout ReflectedLight reflectedLight) {\n\t#ifdef USE_CLEARCOAT\n\t\tclearcoatSpecularIndirect += clearcoatRadiance * EnvironmentBRDF( geometryClearcoatNormal, geometryViewDir, material.clearcoatF0, material.clearcoatF90, material.clearcoatRoughness );\n\t#endif\n\t#ifdef USE_SHEEN\n\t\tsheenSpecularIndirect += irradiance * material.sheenColor * IBLSheenBRDF( geometryNormal, geometryViewDir, material.sheenRoughness );\n\t#endif\n\tvec3 singleScattering = vec3( 0.0 );\n\tvec3 multiScattering = vec3( 0.0 );\n\tvec3 cosineWeightedIrradiance = irradiance * RECIPROCAL_PI;\n\t#ifdef USE_IRIDESCENCE\n\t\tcomputeMultiscatteringIridescence( geometryNormal, geometryViewDir, material.specularColor, material.specularF90, material.iridescence, material.iridescenceFresnel, material.roughness, singleScattering, multiScattering );\n\t#else\n\t\tcomputeMultiscattering( geometryNormal, geometryViewDir, material.specularColor, material.specularF90, material.roughness, singleScattering, multiScattering );\n\t#endif\n\tvec3 totalScattering = singleScattering + multiScattering;\n\tvec3 diffuse = material.diffuseColor * ( 1.0 - max( max( totalScattering.r, totalScattering.g ), totalScattering.b ) );\n\treflectedLight.indirectSpecular += radiance * singleScattering;\n\treflectedLight.indirectSpecular += multiScattering * cosineWeightedIrradiance;\n\treflectedLight.indirectDiffuse += diffuse * cosineWeightedIrradiance;\n}\n#define RE_Direct\t\t\t\tRE_Direct_Physical\n#define RE_Direct_RectArea\t\tRE_Direct_RectArea_Physical\n#define RE_IndirectDiffuse\t\tRE_IndirectDiffuse_Physical\n#define RE_IndirectSpecular\t\tRE_IndirectSpecular_Physical\nfloat computeSpecularOcclusion( const in float dotNV, const in float ambientOcclusion, const in float roughness ) {\n\treturn saturate( pow( dotNV + ambientOcclusion, exp2( - 16.0 * roughness - 1.0 ) ) - 1.0 + ambientOcclusion );\n}"; +var lights_physical_pars_fragment = "uniform sampler2D dfgLUT;\nstruct PhysicalMaterial {\n\tvec3 diffuseColor;\n\tvec3 diffuseContribution;\n\tvec3 specularColor;\n\tvec3 specularColorBlended;\n\tfloat roughness;\n\tfloat metalness;\n\tfloat specularF90;\n\tfloat dispersion;\n\t#ifdef USE_CLEARCOAT\n\t\tfloat clearcoat;\n\t\tfloat clearcoatRoughness;\n\t\tvec3 clearcoatF0;\n\t\tfloat clearcoatF90;\n\t#endif\n\t#ifdef USE_IRIDESCENCE\n\t\tfloat iridescence;\n\t\tfloat iridescenceIOR;\n\t\tfloat iridescenceThickness;\n\t\tvec3 iridescenceFresnel;\n\t\tvec3 iridescenceF0;\n\t\tvec3 iridescenceFresnelDielectric;\n\t\tvec3 iridescenceFresnelMetallic;\n\t#endif\n\t#ifdef USE_SHEEN\n\t\tvec3 sheenColor;\n\t\tfloat sheenRoughness;\n\t#endif\n\t#ifdef IOR\n\t\tfloat ior;\n\t#endif\n\t#ifdef USE_TRANSMISSION\n\t\tfloat transmission;\n\t\tfloat transmissionAlpha;\n\t\tfloat thickness;\n\t\tfloat attenuationDistance;\n\t\tvec3 attenuationColor;\n\t#endif\n\t#ifdef USE_ANISOTROPY\n\t\tfloat anisotropy;\n\t\tfloat alphaT;\n\t\tvec3 anisotropyT;\n\t\tvec3 anisotropyB;\n\t#endif\n};\nvec3 clearcoatSpecularDirect = vec3( 0.0 );\nvec3 clearcoatSpecularIndirect = vec3( 0.0 );\nvec3 sheenSpecularDirect = vec3( 0.0 );\nvec3 sheenSpecularIndirect = vec3(0.0 );\nvec3 Schlick_to_F0( const in vec3 f, const in float f90, const in float dotVH ) {\n float x = clamp( 1.0 - dotVH, 0.0, 1.0 );\n float x2 = x * x;\n float x5 = clamp( x * x2 * x2, 0.0, 0.9999 );\n return ( f - vec3( f90 ) * x5 ) / ( 1.0 - x5 );\n}\nfloat V_GGX_SmithCorrelated( const in float alpha, const in float dotNL, const in float dotNV ) {\n\tfloat a2 = pow2( alpha );\n\tfloat gv = dotNL * sqrt( a2 + ( 1.0 - a2 ) * pow2( dotNV ) );\n\tfloat gl = dotNV * sqrt( a2 + ( 1.0 - a2 ) * pow2( dotNL ) );\n\treturn 0.5 / max( gv + gl, EPSILON );\n}\nfloat D_GGX( const in float alpha, const in float dotNH ) {\n\tfloat a2 = pow2( alpha );\n\tfloat denom = pow2( dotNH ) * ( a2 - 1.0 ) + 1.0;\n\treturn RECIPROCAL_PI * a2 / pow2( denom );\n}\n#ifdef USE_ANISOTROPY\n\tfloat V_GGX_SmithCorrelated_Anisotropic( const in float alphaT, const in float alphaB, const in float dotTV, const in float dotBV, const in float dotTL, const in float dotBL, const in float dotNV, const in float dotNL ) {\n\t\tfloat gv = dotNL * length( vec3( alphaT * dotTV, alphaB * dotBV, dotNV ) );\n\t\tfloat gl = dotNV * length( vec3( alphaT * dotTL, alphaB * dotBL, dotNL ) );\n\t\tfloat v = 0.5 / ( gv + gl );\n\t\treturn v;\n\t}\n\tfloat D_GGX_Anisotropic( const in float alphaT, const in float alphaB, const in float dotNH, const in float dotTH, const in float dotBH ) {\n\t\tfloat a2 = alphaT * alphaB;\n\t\thighp vec3 v = vec3( alphaB * dotTH, alphaT * dotBH, a2 * dotNH );\n\t\thighp float v2 = dot( v, v );\n\t\tfloat w2 = a2 / v2;\n\t\treturn RECIPROCAL_PI * a2 * pow2 ( w2 );\n\t}\n#endif\n#ifdef USE_CLEARCOAT\n\tvec3 BRDF_GGX_Clearcoat( const in vec3 lightDir, const in vec3 viewDir, const in vec3 normal, const in PhysicalMaterial material) {\n\t\tvec3 f0 = material.clearcoatF0;\n\t\tfloat f90 = material.clearcoatF90;\n\t\tfloat roughness = material.clearcoatRoughness;\n\t\tfloat alpha = pow2( roughness );\n\t\tvec3 halfDir = normalize( lightDir + viewDir );\n\t\tfloat dotNL = saturate( dot( normal, lightDir ) );\n\t\tfloat dotNV = saturate( dot( normal, viewDir ) );\n\t\tfloat dotNH = saturate( dot( normal, halfDir ) );\n\t\tfloat dotVH = saturate( dot( viewDir, halfDir ) );\n\t\tvec3 F = F_Schlick( f0, f90, dotVH );\n\t\tfloat V = V_GGX_SmithCorrelated( alpha, dotNL, dotNV );\n\t\tfloat D = D_GGX( alpha, dotNH );\n\t\treturn F * ( V * D );\n\t}\n#endif\nvec3 BRDF_GGX( const in vec3 lightDir, const in vec3 viewDir, const in vec3 normal, const in PhysicalMaterial material ) {\n\tvec3 f0 = material.specularColorBlended;\n\tfloat f90 = material.specularF90;\n\tfloat roughness = material.roughness;\n\tfloat alpha = pow2( roughness );\n\tvec3 halfDir = normalize( lightDir + viewDir );\n\tfloat dotNL = saturate( dot( normal, lightDir ) );\n\tfloat dotNV = saturate( dot( normal, viewDir ) );\n\tfloat dotNH = saturate( dot( normal, halfDir ) );\n\tfloat dotVH = saturate( dot( viewDir, halfDir ) );\n\tvec3 F = F_Schlick( f0, f90, dotVH );\n\t#ifdef USE_IRIDESCENCE\n\t\tF = mix( F, material.iridescenceFresnel, material.iridescence );\n\t#endif\n\t#ifdef USE_ANISOTROPY\n\t\tfloat dotTL = dot( material.anisotropyT, lightDir );\n\t\tfloat dotTV = dot( material.anisotropyT, viewDir );\n\t\tfloat dotTH = dot( material.anisotropyT, halfDir );\n\t\tfloat dotBL = dot( material.anisotropyB, lightDir );\n\t\tfloat dotBV = dot( material.anisotropyB, viewDir );\n\t\tfloat dotBH = dot( material.anisotropyB, halfDir );\n\t\tfloat V = V_GGX_SmithCorrelated_Anisotropic( material.alphaT, alpha, dotTV, dotBV, dotTL, dotBL, dotNV, dotNL );\n\t\tfloat D = D_GGX_Anisotropic( material.alphaT, alpha, dotNH, dotTH, dotBH );\n\t#else\n\t\tfloat V = V_GGX_SmithCorrelated( alpha, dotNL, dotNV );\n\t\tfloat D = D_GGX( alpha, dotNH );\n\t#endif\n\treturn F * ( V * D );\n}\nvec2 LTC_Uv( const in vec3 N, const in vec3 V, const in float roughness ) {\n\tconst float LUT_SIZE = 64.0;\n\tconst float LUT_SCALE = ( LUT_SIZE - 1.0 ) / LUT_SIZE;\n\tconst float LUT_BIAS = 0.5 / LUT_SIZE;\n\tfloat dotNV = saturate( dot( N, V ) );\n\tvec2 uv = vec2( roughness, sqrt( 1.0 - dotNV ) );\n\tuv = uv * LUT_SCALE + LUT_BIAS;\n\treturn uv;\n}\nfloat LTC_ClippedSphereFormFactor( const in vec3 f ) {\n\tfloat l = length( f );\n\treturn max( ( l * l + f.z ) / ( l + 1.0 ), 0.0 );\n}\nvec3 LTC_EdgeVectorFormFactor( const in vec3 v1, const in vec3 v2 ) {\n\tfloat x = dot( v1, v2 );\n\tfloat y = abs( x );\n\tfloat a = 0.8543985 + ( 0.4965155 + 0.0145206 * y ) * y;\n\tfloat b = 3.4175940 + ( 4.1616724 + y ) * y;\n\tfloat v = a / b;\n\tfloat theta_sintheta = ( x > 0.0 ) ? v : 0.5 * inversesqrt( max( 1.0 - x * x, 1e-7 ) ) - v;\n\treturn cross( v1, v2 ) * theta_sintheta;\n}\nvec3 LTC_Evaluate( const in vec3 N, const in vec3 V, const in vec3 P, const in mat3 mInv, const in vec3 rectCoords[ 4 ] ) {\n\tvec3 v1 = rectCoords[ 1 ] - rectCoords[ 0 ];\n\tvec3 v2 = rectCoords[ 3 ] - rectCoords[ 0 ];\n\tvec3 lightNormal = cross( v1, v2 );\n\tif( dot( lightNormal, P - rectCoords[ 0 ] ) < 0.0 ) return vec3( 0.0 );\n\tvec3 T1, T2;\n\tT1 = normalize( V - N * dot( V, N ) );\n\tT2 = - cross( N, T1 );\n\tmat3 mat = mInv * transpose( mat3( T1, T2, N ) );\n\tvec3 coords[ 4 ];\n\tcoords[ 0 ] = mat * ( rectCoords[ 0 ] - P );\n\tcoords[ 1 ] = mat * ( rectCoords[ 1 ] - P );\n\tcoords[ 2 ] = mat * ( rectCoords[ 2 ] - P );\n\tcoords[ 3 ] = mat * ( rectCoords[ 3 ] - P );\n\tcoords[ 0 ] = normalize( coords[ 0 ] );\n\tcoords[ 1 ] = normalize( coords[ 1 ] );\n\tcoords[ 2 ] = normalize( coords[ 2 ] );\n\tcoords[ 3 ] = normalize( coords[ 3 ] );\n\tvec3 vectorFormFactor = vec3( 0.0 );\n\tvectorFormFactor += LTC_EdgeVectorFormFactor( coords[ 0 ], coords[ 1 ] );\n\tvectorFormFactor += LTC_EdgeVectorFormFactor( coords[ 1 ], coords[ 2 ] );\n\tvectorFormFactor += LTC_EdgeVectorFormFactor( coords[ 2 ], coords[ 3 ] );\n\tvectorFormFactor += LTC_EdgeVectorFormFactor( coords[ 3 ], coords[ 0 ] );\n\tfloat result = LTC_ClippedSphereFormFactor( vectorFormFactor );\n\treturn vec3( result );\n}\n#if defined( USE_SHEEN )\nfloat D_Charlie( float roughness, float dotNH ) {\n\tfloat alpha = pow2( roughness );\n\tfloat invAlpha = 1.0 / alpha;\n\tfloat cos2h = dotNH * dotNH;\n\tfloat sin2h = max( 1.0 - cos2h, 0.0078125 );\n\treturn ( 2.0 + invAlpha ) * pow( sin2h, invAlpha * 0.5 ) / ( 2.0 * PI );\n}\nfloat V_Neubelt( float dotNV, float dotNL ) {\n\treturn saturate( 1.0 / ( 4.0 * ( dotNL + dotNV - dotNL * dotNV ) ) );\n}\nvec3 BRDF_Sheen( const in vec3 lightDir, const in vec3 viewDir, const in vec3 normal, vec3 sheenColor, const in float sheenRoughness ) {\n\tvec3 halfDir = normalize( lightDir + viewDir );\n\tfloat dotNL = saturate( dot( normal, lightDir ) );\n\tfloat dotNV = saturate( dot( normal, viewDir ) );\n\tfloat dotNH = saturate( dot( normal, halfDir ) );\n\tfloat D = D_Charlie( sheenRoughness, dotNH );\n\tfloat V = V_Neubelt( dotNV, dotNL );\n\treturn sheenColor * ( D * V );\n}\n#endif\nfloat IBLSheenBRDF( const in vec3 normal, const in vec3 viewDir, const in float roughness ) {\n\tfloat dotNV = saturate( dot( normal, viewDir ) );\n\tfloat r2 = roughness * roughness;\n\tfloat rInv = 1.0 / ( roughness + 0.1 );\n\tfloat a = -1.9362 + 1.0678 * roughness + 0.4573 * r2 - 0.8469 * rInv;\n\tfloat b = -0.6014 + 0.5538 * roughness - 0.4670 * r2 - 0.1255 * rInv;\n\tfloat DG = exp( a * dotNV + b );\n\treturn saturate( DG );\n}\nvec3 EnvironmentBRDF( const in vec3 normal, const in vec3 viewDir, const in vec3 specularColor, const in float specularF90, const in float roughness ) {\n\tfloat dotNV = saturate( dot( normal, viewDir ) );\n\tvec2 fab = texture2D( dfgLUT, vec2( roughness, dotNV ) ).rg;\n\treturn specularColor * fab.x + specularF90 * fab.y;\n}\n#ifdef USE_IRIDESCENCE\nvoid computeMultiscatteringIridescence( const in vec3 normal, const in vec3 viewDir, const in vec3 specularColor, const in float specularF90, const in float iridescence, const in vec3 iridescenceF0, const in float roughness, inout vec3 singleScatter, inout vec3 multiScatter ) {\n#else\nvoid computeMultiscattering( const in vec3 normal, const in vec3 viewDir, const in vec3 specularColor, const in float specularF90, const in float roughness, inout vec3 singleScatter, inout vec3 multiScatter ) {\n#endif\n\tfloat dotNV = saturate( dot( normal, viewDir ) );\n\tvec2 fab = texture2D( dfgLUT, vec2( roughness, dotNV ) ).rg;\n\t#ifdef USE_IRIDESCENCE\n\t\tvec3 Fr = mix( specularColor, iridescenceF0, iridescence );\n\t#else\n\t\tvec3 Fr = specularColor;\n\t#endif\n\tvec3 FssEss = Fr * fab.x + specularF90 * fab.y;\n\tfloat Ess = fab.x + fab.y;\n\tfloat Ems = 1.0 - Ess;\n\tvec3 Favg = Fr + ( 1.0 - Fr ) * 0.047619;\tvec3 Fms = FssEss * Favg / ( 1.0 - Ems * Favg );\n\tsingleScatter += FssEss;\n\tmultiScatter += Fms * Ems;\n}\nvec3 BRDF_GGX_Multiscatter( const in vec3 lightDir, const in vec3 viewDir, const in vec3 normal, const in PhysicalMaterial material ) {\n\tvec3 singleScatter = BRDF_GGX( lightDir, viewDir, normal, material );\n\tfloat dotNL = saturate( dot( normal, lightDir ) );\n\tfloat dotNV = saturate( dot( normal, viewDir ) );\n\tvec2 dfgV = texture2D( dfgLUT, vec2( material.roughness, dotNV ) ).rg;\n\tvec2 dfgL = texture2D( dfgLUT, vec2( material.roughness, dotNL ) ).rg;\n\tvec3 FssEss_V = material.specularColorBlended * dfgV.x + material.specularF90 * dfgV.y;\n\tvec3 FssEss_L = material.specularColorBlended * dfgL.x + material.specularF90 * dfgL.y;\n\tfloat Ess_V = dfgV.x + dfgV.y;\n\tfloat Ess_L = dfgL.x + dfgL.y;\n\tfloat Ems_V = 1.0 - Ess_V;\n\tfloat Ems_L = 1.0 - Ess_L;\n\tvec3 Favg = material.specularColorBlended + ( 1.0 - material.specularColorBlended ) * 0.047619;\n\tvec3 Fms = FssEss_V * FssEss_L * Favg / ( 1.0 - Ems_V * Ems_L * Favg + EPSILON );\n\tfloat compensationFactor = Ems_V * Ems_L;\n\tvec3 multiScatter = Fms * compensationFactor;\n\treturn singleScatter + multiScatter;\n}\n#if NUM_RECT_AREA_LIGHTS > 0\n\tvoid RE_Direct_RectArea_Physical( const in RectAreaLight rectAreaLight, const in vec3 geometryPosition, const in vec3 geometryNormal, const in vec3 geometryViewDir, const in vec3 geometryClearcoatNormal, const in PhysicalMaterial material, inout ReflectedLight reflectedLight ) {\n\t\tvec3 normal = geometryNormal;\n\t\tvec3 viewDir = geometryViewDir;\n\t\tvec3 position = geometryPosition;\n\t\tvec3 lightPos = rectAreaLight.position;\n\t\tvec3 halfWidth = rectAreaLight.halfWidth;\n\t\tvec3 halfHeight = rectAreaLight.halfHeight;\n\t\tvec3 lightColor = rectAreaLight.color;\n\t\tfloat roughness = material.roughness;\n\t\tvec3 rectCoords[ 4 ];\n\t\trectCoords[ 0 ] = lightPos + halfWidth - halfHeight;\t\trectCoords[ 1 ] = lightPos - halfWidth - halfHeight;\n\t\trectCoords[ 2 ] = lightPos - halfWidth + halfHeight;\n\t\trectCoords[ 3 ] = lightPos + halfWidth + halfHeight;\n\t\tvec2 uv = LTC_Uv( normal, viewDir, roughness );\n\t\tvec4 t1 = texture2D( ltc_1, uv );\n\t\tvec4 t2 = texture2D( ltc_2, uv );\n\t\tmat3 mInv = mat3(\n\t\t\tvec3( t1.x, 0, t1.y ),\n\t\t\tvec3( 0, 1, 0 ),\n\t\t\tvec3( t1.z, 0, t1.w )\n\t\t);\n\t\tvec3 fresnel = ( material.specularColorBlended * t2.x + ( material.specularF90 - material.specularColorBlended ) * t2.y );\n\t\treflectedLight.directSpecular += lightColor * fresnel * LTC_Evaluate( normal, viewDir, position, mInv, rectCoords );\n\t\treflectedLight.directDiffuse += lightColor * material.diffuseContribution * LTC_Evaluate( normal, viewDir, position, mat3( 1.0 ), rectCoords );\n\t\t#ifdef USE_CLEARCOAT\n\t\t\tvec3 Ncc = geometryClearcoatNormal;\n\t\t\tvec2 uvClearcoat = LTC_Uv( Ncc, viewDir, material.clearcoatRoughness );\n\t\t\tvec4 t1Clearcoat = texture2D( ltc_1, uvClearcoat );\n\t\t\tvec4 t2Clearcoat = texture2D( ltc_2, uvClearcoat );\n\t\t\tmat3 mInvClearcoat = mat3(\n\t\t\t\tvec3( t1Clearcoat.x, 0, t1Clearcoat.y ),\n\t\t\t\tvec3( 0, 1, 0 ),\n\t\t\t\tvec3( t1Clearcoat.z, 0, t1Clearcoat.w )\n\t\t\t);\n\t\t\tvec3 fresnelClearcoat = material.clearcoatF0 * t2Clearcoat.x + ( material.clearcoatF90 - material.clearcoatF0 ) * t2Clearcoat.y;\n\t\t\tclearcoatSpecularDirect += lightColor * fresnelClearcoat * LTC_Evaluate( Ncc, viewDir, position, mInvClearcoat, rectCoords );\n\t\t#endif\n\t}\n#endif\nvoid RE_Direct_Physical( const in IncidentLight directLight, const in vec3 geometryPosition, const in vec3 geometryNormal, const in vec3 geometryViewDir, const in vec3 geometryClearcoatNormal, const in PhysicalMaterial material, inout ReflectedLight reflectedLight ) {\n\tfloat dotNL = saturate( dot( geometryNormal, directLight.direction ) );\n\tvec3 irradiance = dotNL * directLight.color;\n\t#ifdef USE_CLEARCOAT\n\t\tfloat dotNLcc = saturate( dot( geometryClearcoatNormal, directLight.direction ) );\n\t\tvec3 ccIrradiance = dotNLcc * directLight.color;\n\t\tclearcoatSpecularDirect += ccIrradiance * BRDF_GGX_Clearcoat( directLight.direction, geometryViewDir, geometryClearcoatNormal, material );\n\t#endif\n\t#ifdef USE_SHEEN\n \n \t\tsheenSpecularDirect += irradiance * BRDF_Sheen( directLight.direction, geometryViewDir, geometryNormal, material.sheenColor, material.sheenRoughness );\n \n \t\tfloat sheenAlbedoV = IBLSheenBRDF( geometryNormal, geometryViewDir, material.sheenRoughness );\n \t\tfloat sheenAlbedoL = IBLSheenBRDF( geometryNormal, directLight.direction, material.sheenRoughness );\n \n \t\tfloat sheenEnergyComp = 1.0 - max3( material.sheenColor ) * max( sheenAlbedoV, sheenAlbedoL );\n \n \t\tirradiance *= sheenEnergyComp;\n \n \t#endif\n\treflectedLight.directSpecular += irradiance * BRDF_GGX_Multiscatter( directLight.direction, geometryViewDir, geometryNormal, material );\n\treflectedLight.directDiffuse += irradiance * BRDF_Lambert( material.diffuseContribution );\n}\nvoid RE_IndirectDiffuse_Physical( const in vec3 irradiance, const in vec3 geometryPosition, const in vec3 geometryNormal, const in vec3 geometryViewDir, const in vec3 geometryClearcoatNormal, const in PhysicalMaterial material, inout ReflectedLight reflectedLight ) {\n\tvec3 diffuse = irradiance * BRDF_Lambert( material.diffuseContribution );\n\t#ifdef USE_SHEEN\n\t\tfloat sheenAlbedo = IBLSheenBRDF( geometryNormal, geometryViewDir, material.sheenRoughness );\n\t\tfloat sheenEnergyComp = 1.0 - max3( material.sheenColor ) * sheenAlbedo;\n\t\tdiffuse *= sheenEnergyComp;\n\t#endif\n\treflectedLight.indirectDiffuse += diffuse;\n}\nvoid RE_IndirectSpecular_Physical( const in vec3 radiance, const in vec3 irradiance, const in vec3 clearcoatRadiance, const in vec3 geometryPosition, const in vec3 geometryNormal, const in vec3 geometryViewDir, const in vec3 geometryClearcoatNormal, const in PhysicalMaterial material, inout ReflectedLight reflectedLight) {\n\t#ifdef USE_CLEARCOAT\n\t\tclearcoatSpecularIndirect += clearcoatRadiance * EnvironmentBRDF( geometryClearcoatNormal, geometryViewDir, material.clearcoatF0, material.clearcoatF90, material.clearcoatRoughness );\n\t#endif\n\t#ifdef USE_SHEEN\n\t\tsheenSpecularIndirect += irradiance * material.sheenColor * IBLSheenBRDF( geometryNormal, geometryViewDir, material.sheenRoughness ) * RECIPROCAL_PI;\n \t#endif\n\tvec3 singleScatteringDielectric = vec3( 0.0 );\n\tvec3 multiScatteringDielectric = vec3( 0.0 );\n\tvec3 singleScatteringMetallic = vec3( 0.0 );\n\tvec3 multiScatteringMetallic = vec3( 0.0 );\n\t#ifdef USE_IRIDESCENCE\n\t\tcomputeMultiscatteringIridescence( geometryNormal, geometryViewDir, material.specularColor, material.specularF90, material.iridescence, material.iridescenceFresnelDielectric, material.roughness, singleScatteringDielectric, multiScatteringDielectric );\n\t\tcomputeMultiscatteringIridescence( geometryNormal, geometryViewDir, material.diffuseColor, material.specularF90, material.iridescence, material.iridescenceFresnelMetallic, material.roughness, singleScatteringMetallic, multiScatteringMetallic );\n\t#else\n\t\tcomputeMultiscattering( geometryNormal, geometryViewDir, material.specularColor, material.specularF90, material.roughness, singleScatteringDielectric, multiScatteringDielectric );\n\t\tcomputeMultiscattering( geometryNormal, geometryViewDir, material.diffuseColor, material.specularF90, material.roughness, singleScatteringMetallic, multiScatteringMetallic );\n\t#endif\n\tvec3 singleScattering = mix( singleScatteringDielectric, singleScatteringMetallic, material.metalness );\n\tvec3 multiScattering = mix( multiScatteringDielectric, multiScatteringMetallic, material.metalness );\n\tvec3 totalScatteringDielectric = singleScatteringDielectric + multiScatteringDielectric;\n\tvec3 diffuse = material.diffuseContribution * ( 1.0 - totalScatteringDielectric );\n\tvec3 cosineWeightedIrradiance = irradiance * RECIPROCAL_PI;\n\tvec3 indirectSpecular = radiance * singleScattering;\n\tindirectSpecular += multiScattering * cosineWeightedIrradiance;\n\tvec3 indirectDiffuse = diffuse * cosineWeightedIrradiance;\n\t#ifdef USE_SHEEN\n\t\tfloat sheenAlbedo = IBLSheenBRDF( geometryNormal, geometryViewDir, material.sheenRoughness );\n\t\tfloat sheenEnergyComp = 1.0 - max3( material.sheenColor ) * sheenAlbedo;\n\t\tindirectSpecular *= sheenEnergyComp;\n\t\tindirectDiffuse *= sheenEnergyComp;\n\t#endif\n\treflectedLight.indirectSpecular += indirectSpecular;\n\treflectedLight.indirectDiffuse += indirectDiffuse;\n}\n#define RE_Direct\t\t\t\tRE_Direct_Physical\n#define RE_Direct_RectArea\t\tRE_Direct_RectArea_Physical\n#define RE_IndirectDiffuse\t\tRE_IndirectDiffuse_Physical\n#define RE_IndirectSpecular\t\tRE_IndirectSpecular_Physical\nfloat computeSpecularOcclusion( const in float dotNV, const in float ambientOcclusion, const in float roughness ) {\n\treturn saturate( pow( dotNV + ambientOcclusion, exp2( - 16.0 * roughness - 1.0 ) ) - 1.0 + ambientOcclusion );\n}"; -var lights_fragment_begin = "\nvec3 geometryPosition = - vViewPosition;\nvec3 geometryNormal = normal;\nvec3 geometryViewDir = ( isOrthographic ) ? vec3( 0, 0, 1 ) : normalize( vViewPosition );\nvec3 geometryClearcoatNormal = vec3( 0.0 );\n#ifdef USE_CLEARCOAT\n\tgeometryClearcoatNormal = clearcoatNormal;\n#endif\n#ifdef USE_IRIDESCENCE\n\tfloat dotNVi = saturate( dot( normal, geometryViewDir ) );\n\tif ( material.iridescenceThickness == 0.0 ) {\n\t\tmaterial.iridescence = 0.0;\n\t} else {\n\t\tmaterial.iridescence = saturate( material.iridescence );\n\t}\n\tif ( material.iridescence > 0.0 ) {\n\t\tmaterial.iridescenceFresnel = evalIridescence( 1.0, material.iridescenceIOR, dotNVi, material.iridescenceThickness, material.specularColor );\n\t\tmaterial.iridescenceF0 = Schlick_to_F0( material.iridescenceFresnel, 1.0, dotNVi );\n\t}\n#endif\nIncidentLight directLight;\n#if ( NUM_POINT_LIGHTS > 0 ) && defined( RE_Direct )\n\tPointLight pointLight;\n\t#if defined( USE_SHADOWMAP ) && NUM_POINT_LIGHT_SHADOWS > 0\n\tPointLightShadow pointLightShadow;\n\t#endif\n\t#pragma unroll_loop_start\n\tfor ( int i = 0; i < NUM_POINT_LIGHTS; i ++ ) {\n\t\tpointLight = pointLights[ i ];\n\t\tgetPointLightInfo( pointLight, geometryPosition, directLight );\n\t\t#if defined( USE_SHADOWMAP ) && ( UNROLLED_LOOP_INDEX < NUM_POINT_LIGHT_SHADOWS )\n\t\tpointLightShadow = pointLightShadows[ i ];\n\t\tdirectLight.color *= ( directLight.visible && receiveShadow ) ? getPointShadow( pointShadowMap[ i ], pointLightShadow.shadowMapSize, pointLightShadow.shadowIntensity, pointLightShadow.shadowBias, pointLightShadow.shadowRadius, vPointShadowCoord[ i ], pointLightShadow.shadowCameraNear, pointLightShadow.shadowCameraFar ) : 1.0;\n\t\t#endif\n\t\tRE_Direct( directLight, geometryPosition, geometryNormal, geometryViewDir, geometryClearcoatNormal, material, reflectedLight );\n\t}\n\t#pragma unroll_loop_end\n#endif\n#if ( NUM_SPOT_LIGHTS > 0 ) && defined( RE_Direct )\n\tSpotLight spotLight;\n\tvec4 spotColor;\n\tvec3 spotLightCoord;\n\tbool inSpotLightMap;\n\t#if defined( USE_SHADOWMAP ) && NUM_SPOT_LIGHT_SHADOWS > 0\n\tSpotLightShadow spotLightShadow;\n\t#endif\n\t#pragma unroll_loop_start\n\tfor ( int i = 0; i < NUM_SPOT_LIGHTS; i ++ ) {\n\t\tspotLight = spotLights[ i ];\n\t\tgetSpotLightInfo( spotLight, geometryPosition, directLight );\n\t\t#if ( UNROLLED_LOOP_INDEX < NUM_SPOT_LIGHT_SHADOWS_WITH_MAPS )\n\t\t#define SPOT_LIGHT_MAP_INDEX UNROLLED_LOOP_INDEX\n\t\t#elif ( UNROLLED_LOOP_INDEX < NUM_SPOT_LIGHT_SHADOWS )\n\t\t#define SPOT_LIGHT_MAP_INDEX NUM_SPOT_LIGHT_MAPS\n\t\t#else\n\t\t#define SPOT_LIGHT_MAP_INDEX ( UNROLLED_LOOP_INDEX - NUM_SPOT_LIGHT_SHADOWS + NUM_SPOT_LIGHT_SHADOWS_WITH_MAPS )\n\t\t#endif\n\t\t#if ( SPOT_LIGHT_MAP_INDEX < NUM_SPOT_LIGHT_MAPS )\n\t\t\tspotLightCoord = vSpotLightCoord[ i ].xyz / vSpotLightCoord[ i ].w;\n\t\t\tinSpotLightMap = all( lessThan( abs( spotLightCoord * 2. - 1. ), vec3( 1.0 ) ) );\n\t\t\tspotColor = texture2D( spotLightMap[ SPOT_LIGHT_MAP_INDEX ], spotLightCoord.xy );\n\t\t\tdirectLight.color = inSpotLightMap ? directLight.color * spotColor.rgb : directLight.color;\n\t\t#endif\n\t\t#undef SPOT_LIGHT_MAP_INDEX\n\t\t#if defined( USE_SHADOWMAP ) && ( UNROLLED_LOOP_INDEX < NUM_SPOT_LIGHT_SHADOWS )\n\t\tspotLightShadow = spotLightShadows[ i ];\n\t\tdirectLight.color *= ( directLight.visible && receiveShadow ) ? getShadow( spotShadowMap[ i ], spotLightShadow.shadowMapSize, spotLightShadow.shadowIntensity, spotLightShadow.shadowBias, spotLightShadow.shadowRadius, vSpotLightCoord[ i ] ) : 1.0;\n\t\t#endif\n\t\tRE_Direct( directLight, geometryPosition, geometryNormal, geometryViewDir, geometryClearcoatNormal, material, reflectedLight );\n\t}\n\t#pragma unroll_loop_end\n#endif\n#if ( NUM_DIR_LIGHTS > 0 ) && defined( RE_Direct )\n\tDirectionalLight directionalLight;\n\t#if defined( USE_SHADOWMAP ) && NUM_DIR_LIGHT_SHADOWS > 0\n\tDirectionalLightShadow directionalLightShadow;\n\t#endif\n\t#pragma unroll_loop_start\n\tfor ( int i = 0; i < NUM_DIR_LIGHTS; i ++ ) {\n\t\tdirectionalLight = directionalLights[ i ];\n\t\tgetDirectionalLightInfo( directionalLight, directLight );\n\t\t#if defined( USE_SHADOWMAP ) && ( UNROLLED_LOOP_INDEX < NUM_DIR_LIGHT_SHADOWS )\n\t\tdirectionalLightShadow = directionalLightShadows[ i ];\n\t\tdirectLight.color *= ( directLight.visible && receiveShadow ) ? getShadow( directionalShadowMap[ i ], directionalLightShadow.shadowMapSize, directionalLightShadow.shadowIntensity, directionalLightShadow.shadowBias, directionalLightShadow.shadowRadius, vDirectionalShadowCoord[ i ] ) : 1.0;\n\t\t#endif\n\t\tRE_Direct( directLight, geometryPosition, geometryNormal, geometryViewDir, geometryClearcoatNormal, material, reflectedLight );\n\t}\n\t#pragma unroll_loop_end\n#endif\n#if ( NUM_RECT_AREA_LIGHTS > 0 ) && defined( RE_Direct_RectArea )\n\tRectAreaLight rectAreaLight;\n\t#pragma unroll_loop_start\n\tfor ( int i = 0; i < NUM_RECT_AREA_LIGHTS; i ++ ) {\n\t\trectAreaLight = rectAreaLights[ i ];\n\t\tRE_Direct_RectArea( rectAreaLight, geometryPosition, geometryNormal, geometryViewDir, geometryClearcoatNormal, material, reflectedLight );\n\t}\n\t#pragma unroll_loop_end\n#endif\n#if defined( RE_IndirectDiffuse )\n\tvec3 iblIrradiance = vec3( 0.0 );\n\tvec3 irradiance = getAmbientLightIrradiance( ambientLightColor );\n\t#if defined( USE_LIGHT_PROBES )\n\t\tirradiance += getLightProbeIrradiance( lightProbe, geometryNormal );\n\t#endif\n\t#if ( NUM_HEMI_LIGHTS > 0 )\n\t\t#pragma unroll_loop_start\n\t\tfor ( int i = 0; i < NUM_HEMI_LIGHTS; i ++ ) {\n\t\t\tirradiance += getHemisphereLightIrradiance( hemisphereLights[ i ], geometryNormal );\n\t\t}\n\t\t#pragma unroll_loop_end\n\t#endif\n#endif\n#if defined( RE_IndirectSpecular )\n\tvec3 radiance = vec3( 0.0 );\n\tvec3 clearcoatRadiance = vec3( 0.0 );\n#endif"; +var lights_fragment_begin = "\nvec3 geometryPosition = - vViewPosition;\nvec3 geometryNormal = normal;\nvec3 geometryViewDir = ( isOrthographic ) ? vec3( 0, 0, 1 ) : normalize( vViewPosition );\nvec3 geometryClearcoatNormal = vec3( 0.0 );\n#ifdef USE_CLEARCOAT\n\tgeometryClearcoatNormal = clearcoatNormal;\n#endif\n#ifdef USE_IRIDESCENCE\n\tfloat dotNVi = saturate( dot( normal, geometryViewDir ) );\n\tif ( material.iridescenceThickness == 0.0 ) {\n\t\tmaterial.iridescence = 0.0;\n\t} else {\n\t\tmaterial.iridescence = saturate( material.iridescence );\n\t}\n\tif ( material.iridescence > 0.0 ) {\n\t\tmaterial.iridescenceFresnelDielectric = evalIridescence( 1.0, material.iridescenceIOR, dotNVi, material.iridescenceThickness, material.specularColor );\n\t\tmaterial.iridescenceFresnelMetallic = evalIridescence( 1.0, material.iridescenceIOR, dotNVi, material.iridescenceThickness, material.diffuseColor );\n\t\tmaterial.iridescenceFresnel = mix( material.iridescenceFresnelDielectric, material.iridescenceFresnelMetallic, material.metalness );\n\t\tmaterial.iridescenceF0 = Schlick_to_F0( material.iridescenceFresnel, 1.0, dotNVi );\n\t}\n#endif\nIncidentLight directLight;\n#if ( NUM_POINT_LIGHTS > 0 ) && defined( RE_Direct )\n\tPointLight pointLight;\n\t#if defined( USE_SHADOWMAP ) && NUM_POINT_LIGHT_SHADOWS > 0\n\tPointLightShadow pointLightShadow;\n\t#endif\n\t#pragma unroll_loop_start\n\tfor ( int i = 0; i < NUM_POINT_LIGHTS; i ++ ) {\n\t\tpointLight = pointLights[ i ];\n\t\tgetPointLightInfo( pointLight, geometryPosition, directLight );\n\t\t#if defined( USE_SHADOWMAP ) && ( UNROLLED_LOOP_INDEX < NUM_POINT_LIGHT_SHADOWS ) && ( defined( SHADOWMAP_TYPE_PCF ) || defined( SHADOWMAP_TYPE_BASIC ) )\n\t\tpointLightShadow = pointLightShadows[ i ];\n\t\tdirectLight.color *= ( directLight.visible && receiveShadow ) ? getPointShadow( pointShadowMap[ i ], pointLightShadow.shadowMapSize, pointLightShadow.shadowIntensity, pointLightShadow.shadowBias, pointLightShadow.shadowRadius, vPointShadowCoord[ i ], pointLightShadow.shadowCameraNear, pointLightShadow.shadowCameraFar ) : 1.0;\n\t\t#endif\n\t\tRE_Direct( directLight, geometryPosition, geometryNormal, geometryViewDir, geometryClearcoatNormal, material, reflectedLight );\n\t}\n\t#pragma unroll_loop_end\n#endif\n#if ( NUM_SPOT_LIGHTS > 0 ) && defined( RE_Direct )\n\tSpotLight spotLight;\n\tvec4 spotColor;\n\tvec3 spotLightCoord;\n\tbool inSpotLightMap;\n\t#if defined( USE_SHADOWMAP ) && NUM_SPOT_LIGHT_SHADOWS > 0\n\tSpotLightShadow spotLightShadow;\n\t#endif\n\t#pragma unroll_loop_start\n\tfor ( int i = 0; i < NUM_SPOT_LIGHTS; i ++ ) {\n\t\tspotLight = spotLights[ i ];\n\t\tgetSpotLightInfo( spotLight, geometryPosition, directLight );\n\t\t#if ( UNROLLED_LOOP_INDEX < NUM_SPOT_LIGHT_SHADOWS_WITH_MAPS )\n\t\t#define SPOT_LIGHT_MAP_INDEX UNROLLED_LOOP_INDEX\n\t\t#elif ( UNROLLED_LOOP_INDEX < NUM_SPOT_LIGHT_SHADOWS )\n\t\t#define SPOT_LIGHT_MAP_INDEX NUM_SPOT_LIGHT_MAPS\n\t\t#else\n\t\t#define SPOT_LIGHT_MAP_INDEX ( UNROLLED_LOOP_INDEX - NUM_SPOT_LIGHT_SHADOWS + NUM_SPOT_LIGHT_SHADOWS_WITH_MAPS )\n\t\t#endif\n\t\t#if ( SPOT_LIGHT_MAP_INDEX < NUM_SPOT_LIGHT_MAPS )\n\t\t\tspotLightCoord = vSpotLightCoord[ i ].xyz / vSpotLightCoord[ i ].w;\n\t\t\tinSpotLightMap = all( lessThan( abs( spotLightCoord * 2. - 1. ), vec3( 1.0 ) ) );\n\t\t\tspotColor = texture2D( spotLightMap[ SPOT_LIGHT_MAP_INDEX ], spotLightCoord.xy );\n\t\t\tdirectLight.color = inSpotLightMap ? directLight.color * spotColor.rgb : directLight.color;\n\t\t#endif\n\t\t#undef SPOT_LIGHT_MAP_INDEX\n\t\t#if defined( USE_SHADOWMAP ) && ( UNROLLED_LOOP_INDEX < NUM_SPOT_LIGHT_SHADOWS )\n\t\tspotLightShadow = spotLightShadows[ i ];\n\t\tdirectLight.color *= ( directLight.visible && receiveShadow ) ? getShadow( spotShadowMap[ i ], spotLightShadow.shadowMapSize, spotLightShadow.shadowIntensity, spotLightShadow.shadowBias, spotLightShadow.shadowRadius, vSpotLightCoord[ i ] ) : 1.0;\n\t\t#endif\n\t\tRE_Direct( directLight, geometryPosition, geometryNormal, geometryViewDir, geometryClearcoatNormal, material, reflectedLight );\n\t}\n\t#pragma unroll_loop_end\n#endif\n#if ( NUM_DIR_LIGHTS > 0 ) && defined( RE_Direct )\n\tDirectionalLight directionalLight;\n\t#if defined( USE_SHADOWMAP ) && NUM_DIR_LIGHT_SHADOWS > 0\n\tDirectionalLightShadow directionalLightShadow;\n\t#endif\n\t#pragma unroll_loop_start\n\tfor ( int i = 0; i < NUM_DIR_LIGHTS; i ++ ) {\n\t\tdirectionalLight = directionalLights[ i ];\n\t\tgetDirectionalLightInfo( directionalLight, directLight );\n\t\t#if defined( USE_SHADOWMAP ) && ( UNROLLED_LOOP_INDEX < NUM_DIR_LIGHT_SHADOWS )\n\t\tdirectionalLightShadow = directionalLightShadows[ i ];\n\t\tdirectLight.color *= ( directLight.visible && receiveShadow ) ? getShadow( directionalShadowMap[ i ], directionalLightShadow.shadowMapSize, directionalLightShadow.shadowIntensity, directionalLightShadow.shadowBias, directionalLightShadow.shadowRadius, vDirectionalShadowCoord[ i ] ) : 1.0;\n\t\t#endif\n\t\tRE_Direct( directLight, geometryPosition, geometryNormal, geometryViewDir, geometryClearcoatNormal, material, reflectedLight );\n\t}\n\t#pragma unroll_loop_end\n#endif\n#if ( NUM_RECT_AREA_LIGHTS > 0 ) && defined( RE_Direct_RectArea )\n\tRectAreaLight rectAreaLight;\n\t#pragma unroll_loop_start\n\tfor ( int i = 0; i < NUM_RECT_AREA_LIGHTS; i ++ ) {\n\t\trectAreaLight = rectAreaLights[ i ];\n\t\tRE_Direct_RectArea( rectAreaLight, geometryPosition, geometryNormal, geometryViewDir, geometryClearcoatNormal, material, reflectedLight );\n\t}\n\t#pragma unroll_loop_end\n#endif\n#if defined( RE_IndirectDiffuse )\n\tvec3 iblIrradiance = vec3( 0.0 );\n\tvec3 irradiance = getAmbientLightIrradiance( ambientLightColor );\n\t#if defined( USE_LIGHT_PROBES )\n\t\tirradiance += getLightProbeIrradiance( lightProbe, geometryNormal );\n\t#endif\n\t#if ( NUM_HEMI_LIGHTS > 0 )\n\t\t#pragma unroll_loop_start\n\t\tfor ( int i = 0; i < NUM_HEMI_LIGHTS; i ++ ) {\n\t\t\tirradiance += getHemisphereLightIrradiance( hemisphereLights[ i ], geometryNormal );\n\t\t}\n\t\t#pragma unroll_loop_end\n\t#endif\n#endif\n#if defined( RE_IndirectSpecular )\n\tvec3 radiance = vec3( 0.0 );\n\tvec3 clearcoatRadiance = vec3( 0.0 );\n#endif"; -var lights_fragment_maps = "#if defined( RE_IndirectDiffuse )\n\t#ifdef USE_LIGHTMAP\n\t\tvec4 lightMapTexel = texture2D( lightMap, vLightMapUv );\n\t\tvec3 lightMapIrradiance = lightMapTexel.rgb * lightMapIntensity;\n\t\tirradiance += lightMapIrradiance;\n\t#endif\n\t#if defined( USE_ENVMAP ) && defined( STANDARD ) && defined( ENVMAP_TYPE_CUBE_UV )\n\t\tiblIrradiance += getIBLIrradiance( geometryNormal );\n\t#endif\n#endif\n#if defined( USE_ENVMAP ) && defined( RE_IndirectSpecular )\n\t#ifdef USE_ANISOTROPY\n\t\tradiance += getIBLAnisotropyRadiance( geometryViewDir, geometryNormal, material.roughness, material.anisotropyB, material.anisotropy );\n\t#else\n\t\tradiance += getIBLRadiance( geometryViewDir, geometryNormal, material.roughness );\n\t#endif\n\t#ifdef USE_CLEARCOAT\n\t\tclearcoatRadiance += getIBLRadiance( geometryViewDir, geometryClearcoatNormal, material.clearcoatRoughness );\n\t#endif\n#endif"; +var lights_fragment_maps = "#if defined( RE_IndirectDiffuse )\n\t#ifdef USE_LIGHTMAP\n\t\tvec4 lightMapTexel = texture2D( lightMap, vLightMapUv );\n\t\tvec3 lightMapIrradiance = lightMapTexel.rgb * lightMapIntensity;\n\t\tirradiance += lightMapIrradiance;\n\t#endif\n\t#if defined( USE_ENVMAP ) && defined( ENVMAP_TYPE_CUBE_UV )\n\t\t#if defined( STANDARD ) || defined( LAMBERT ) || defined( PHONG )\n\t\t\tiblIrradiance += getIBLIrradiance( geometryNormal );\n\t\t#endif\n\t#endif\n#endif\n#if defined( USE_ENVMAP ) && defined( RE_IndirectSpecular )\n\t#ifdef USE_ANISOTROPY\n\t\tradiance += getIBLAnisotropyRadiance( geometryViewDir, geometryNormal, material.roughness, material.anisotropyB, material.anisotropy );\n\t#else\n\t\tradiance += getIBLRadiance( geometryViewDir, geometryNormal, material.roughness );\n\t#endif\n\t#ifdef USE_CLEARCOAT\n\t\tclearcoatRadiance += getIBLRadiance( geometryViewDir, geometryClearcoatNormal, material.clearcoatRoughness );\n\t#endif\n#endif"; -var lights_fragment_end = "#if defined( RE_IndirectDiffuse )\n\tRE_IndirectDiffuse( irradiance, geometryPosition, geometryNormal, geometryViewDir, geometryClearcoatNormal, material, reflectedLight );\n#endif\n#if defined( RE_IndirectSpecular )\n\tRE_IndirectSpecular( radiance, iblIrradiance, clearcoatRadiance, geometryPosition, geometryNormal, geometryViewDir, geometryClearcoatNormal, material, reflectedLight );\n#endif"; +var lights_fragment_end = "#if defined( RE_IndirectDiffuse )\n\t#if defined( LAMBERT ) || defined( PHONG )\n\t\tirradiance += iblIrradiance;\n\t#endif\n\tRE_IndirectDiffuse( irradiance, geometryPosition, geometryNormal, geometryViewDir, geometryClearcoatNormal, material, reflectedLight );\n#endif\n#if defined( RE_IndirectSpecular )\n\tRE_IndirectSpecular( radiance, iblIrradiance, clearcoatRadiance, geometryPosition, geometryNormal, geometryViewDir, geometryClearcoatNormal, material, reflectedLight );\n#endif"; var logdepthbuf_fragment = "#if defined( USE_LOGARITHMIC_DEPTH_BUFFER )\n\tgl_FragDepth = vIsPerspective == 0.0 ? gl_FragCoord.z : log2( vFragDepth ) * logDepthBufFC * 0.5;\n#endif"; @@ -35960,7 +36345,7 @@ var iridescence_pars_fragment = "#ifdef USE_IRIDESCENCEMAP\n\tuniform sampler2D var opaque_fragment = "#ifdef OPAQUE\ndiffuseColor.a = 1.0;\n#endif\n#ifdef USE_TRANSMISSION\ndiffuseColor.a *= material.transmissionAlpha;\n#endif\ngl_FragColor = vec4( outgoingLight, diffuseColor.a );"; -var packing = "vec3 packNormalToRGB( const in vec3 normal ) {\n\treturn normalize( normal ) * 0.5 + 0.5;\n}\nvec3 unpackRGBToNormal( const in vec3 rgb ) {\n\treturn 2.0 * rgb.xyz - 1.0;\n}\nconst float PackUpscale = 256. / 255.;const float UnpackDownscale = 255. / 256.;const float ShiftRight8 = 1. / 256.;\nconst float Inv255 = 1. / 255.;\nconst vec4 PackFactors = vec4( 1.0, 256.0, 256.0 * 256.0, 256.0 * 256.0 * 256.0 );\nconst vec2 UnpackFactors2 = vec2( UnpackDownscale, 1.0 / PackFactors.g );\nconst vec3 UnpackFactors3 = vec3( UnpackDownscale / PackFactors.rg, 1.0 / PackFactors.b );\nconst vec4 UnpackFactors4 = vec4( UnpackDownscale / PackFactors.rgb, 1.0 / PackFactors.a );\nvec4 packDepthToRGBA( const in float v ) {\n\tif( v <= 0.0 )\n\t\treturn vec4( 0., 0., 0., 0. );\n\tif( v >= 1.0 )\n\t\treturn vec4( 1., 1., 1., 1. );\n\tfloat vuf;\n\tfloat af = modf( v * PackFactors.a, vuf );\n\tfloat bf = modf( vuf * ShiftRight8, vuf );\n\tfloat gf = modf( vuf * ShiftRight8, vuf );\n\treturn vec4( vuf * Inv255, gf * PackUpscale, bf * PackUpscale, af );\n}\nvec3 packDepthToRGB( const in float v ) {\n\tif( v <= 0.0 )\n\t\treturn vec3( 0., 0., 0. );\n\tif( v >= 1.0 )\n\t\treturn vec3( 1., 1., 1. );\n\tfloat vuf;\n\tfloat bf = modf( v * PackFactors.b, vuf );\n\tfloat gf = modf( vuf * ShiftRight8, vuf );\n\treturn vec3( vuf * Inv255, gf * PackUpscale, bf );\n}\nvec2 packDepthToRG( const in float v ) {\n\tif( v <= 0.0 )\n\t\treturn vec2( 0., 0. );\n\tif( v >= 1.0 )\n\t\treturn vec2( 1., 1. );\n\tfloat vuf;\n\tfloat gf = modf( v * 256., vuf );\n\treturn vec2( vuf * Inv255, gf );\n}\nfloat unpackRGBAToDepth( const in vec4 v ) {\n\treturn dot( v, UnpackFactors4 );\n}\nfloat unpackRGBToDepth( const in vec3 v ) {\n\treturn dot( v, UnpackFactors3 );\n}\nfloat unpackRGToDepth( const in vec2 v ) {\n\treturn v.r * UnpackFactors2.r + v.g * UnpackFactors2.g;\n}\nvec4 pack2HalfToRGBA( const in vec2 v ) {\n\tvec4 r = vec4( v.x, fract( v.x * 255.0 ), v.y, fract( v.y * 255.0 ) );\n\treturn vec4( r.x - r.y / 255.0, r.y, r.z - r.w / 255.0, r.w );\n}\nvec2 unpackRGBATo2Half( const in vec4 v ) {\n\treturn vec2( v.x + ( v.y / 255.0 ), v.z + ( v.w / 255.0 ) );\n}\nfloat viewZToOrthographicDepth( const in float viewZ, const in float near, const in float far ) {\n\treturn ( viewZ + near ) / ( near - far );\n}\nfloat orthographicDepthToViewZ( const in float depth, const in float near, const in float far ) {\n\treturn depth * ( near - far ) - near;\n}\nfloat viewZToPerspectiveDepth( const in float viewZ, const in float near, const in float far ) {\n\treturn ( ( near + viewZ ) * far ) / ( ( far - near ) * viewZ );\n}\nfloat perspectiveDepthToViewZ( const in float depth, const in float near, const in float far ) {\n\treturn ( near * far ) / ( ( far - near ) * depth - far );\n}"; +var packing = "vec3 packNormalToRGB( const in vec3 normal ) {\n\treturn normalize( normal ) * 0.5 + 0.5;\n}\nvec3 unpackRGBToNormal( const in vec3 rgb ) {\n\treturn 2.0 * rgb.xyz - 1.0;\n}\nconst float PackUpscale = 256. / 255.;const float UnpackDownscale = 255. / 256.;const float ShiftRight8 = 1. / 256.;\nconst float Inv255 = 1. / 255.;\nconst vec4 PackFactors = vec4( 1.0, 256.0, 256.0 * 256.0, 256.0 * 256.0 * 256.0 );\nconst vec2 UnpackFactors2 = vec2( UnpackDownscale, 1.0 / PackFactors.g );\nconst vec3 UnpackFactors3 = vec3( UnpackDownscale / PackFactors.rg, 1.0 / PackFactors.b );\nconst vec4 UnpackFactors4 = vec4( UnpackDownscale / PackFactors.rgb, 1.0 / PackFactors.a );\nvec4 packDepthToRGBA( const in float v ) {\n\tif( v <= 0.0 )\n\t\treturn vec4( 0., 0., 0., 0. );\n\tif( v >= 1.0 )\n\t\treturn vec4( 1., 1., 1., 1. );\n\tfloat vuf;\n\tfloat af = modf( v * PackFactors.a, vuf );\n\tfloat bf = modf( vuf * ShiftRight8, vuf );\n\tfloat gf = modf( vuf * ShiftRight8, vuf );\n\treturn vec4( vuf * Inv255, gf * PackUpscale, bf * PackUpscale, af );\n}\nvec3 packDepthToRGB( const in float v ) {\n\tif( v <= 0.0 )\n\t\treturn vec3( 0., 0., 0. );\n\tif( v >= 1.0 )\n\t\treturn vec3( 1., 1., 1. );\n\tfloat vuf;\n\tfloat bf = modf( v * PackFactors.b, vuf );\n\tfloat gf = modf( vuf * ShiftRight8, vuf );\n\treturn vec3( vuf * Inv255, gf * PackUpscale, bf );\n}\nvec2 packDepthToRG( const in float v ) {\n\tif( v <= 0.0 )\n\t\treturn vec2( 0., 0. );\n\tif( v >= 1.0 )\n\t\treturn vec2( 1., 1. );\n\tfloat vuf;\n\tfloat gf = modf( v * 256., vuf );\n\treturn vec2( vuf * Inv255, gf );\n}\nfloat unpackRGBAToDepth( const in vec4 v ) {\n\treturn dot( v, UnpackFactors4 );\n}\nfloat unpackRGBToDepth( const in vec3 v ) {\n\treturn dot( v, UnpackFactors3 );\n}\nfloat unpackRGToDepth( const in vec2 v ) {\n\treturn v.r * UnpackFactors2.r + v.g * UnpackFactors2.g;\n}\nvec4 pack2HalfToRGBA( const in vec2 v ) {\n\tvec4 r = vec4( v.x, fract( v.x * 255.0 ), v.y, fract( v.y * 255.0 ) );\n\treturn vec4( r.x - r.y / 255.0, r.y, r.z - r.w / 255.0, r.w );\n}\nvec2 unpackRGBATo2Half( const in vec4 v ) {\n\treturn vec2( v.x + ( v.y / 255.0 ), v.z + ( v.w / 255.0 ) );\n}\nfloat viewZToOrthographicDepth( const in float viewZ, const in float near, const in float far ) {\n\treturn ( viewZ + near ) / ( near - far );\n}\nfloat orthographicDepthToViewZ( const in float depth, const in float near, const in float far ) {\n\t#ifdef USE_REVERSED_DEPTH_BUFFER\n\t\n\t\treturn depth * ( far - near ) - far;\n\t#else\n\t\treturn depth * ( near - far ) - near;\n\t#endif\n}\nfloat viewZToPerspectiveDepth( const in float viewZ, const in float near, const in float far ) {\n\treturn ( ( near + viewZ ) * far ) / ( ( far - near ) * viewZ );\n}\nfloat perspectiveDepthToViewZ( const in float depth, const in float near, const in float far ) {\n\t\n\t#ifdef USE_REVERSED_DEPTH_BUFFER\n\t\treturn ( near * far ) / ( ( near - far ) * depth - near );\n\t#else\n\t\treturn ( near * far ) / ( ( far - near ) * depth - far );\n\t#endif\n}"; var premultiplied_alpha_fragment = "#ifdef PREMULTIPLIED_ALPHA\n\tgl_FragColor.rgb *= gl_FragColor.a;\n#endif"; @@ -35974,13 +36359,13 @@ var roughnessmap_fragment = "float roughnessFactor = roughness;\n#ifdef USE_ROUG var roughnessmap_pars_fragment = "#ifdef USE_ROUGHNESSMAP\n\tuniform sampler2D roughnessMap;\n#endif"; -var shadowmap_pars_fragment = "#if NUM_SPOT_LIGHT_COORDS > 0\n\tvarying vec4 vSpotLightCoord[ NUM_SPOT_LIGHT_COORDS ];\n#endif\n#if NUM_SPOT_LIGHT_MAPS > 0\n\tuniform sampler2D spotLightMap[ NUM_SPOT_LIGHT_MAPS ];\n#endif\n#ifdef USE_SHADOWMAP\n\t#if NUM_DIR_LIGHT_SHADOWS > 0\n\t\tuniform sampler2D directionalShadowMap[ NUM_DIR_LIGHT_SHADOWS ];\n\t\tvarying vec4 vDirectionalShadowCoord[ NUM_DIR_LIGHT_SHADOWS ];\n\t\tstruct DirectionalLightShadow {\n\t\t\tfloat shadowIntensity;\n\t\t\tfloat shadowBias;\n\t\t\tfloat shadowNormalBias;\n\t\t\tfloat shadowRadius;\n\t\t\tvec2 shadowMapSize;\n\t\t};\n\t\tuniform DirectionalLightShadow directionalLightShadows[ NUM_DIR_LIGHT_SHADOWS ];\n\t#endif\n\t#if NUM_SPOT_LIGHT_SHADOWS > 0\n\t\tuniform sampler2D spotShadowMap[ NUM_SPOT_LIGHT_SHADOWS ];\n\t\tstruct SpotLightShadow {\n\t\t\tfloat shadowIntensity;\n\t\t\tfloat shadowBias;\n\t\t\tfloat shadowNormalBias;\n\t\t\tfloat shadowRadius;\n\t\t\tvec2 shadowMapSize;\n\t\t};\n\t\tuniform SpotLightShadow spotLightShadows[ NUM_SPOT_LIGHT_SHADOWS ];\n\t#endif\n\t#if NUM_POINT_LIGHT_SHADOWS > 0\n\t\tuniform sampler2D pointShadowMap[ NUM_POINT_LIGHT_SHADOWS ];\n\t\tvarying vec4 vPointShadowCoord[ NUM_POINT_LIGHT_SHADOWS ];\n\t\tstruct PointLightShadow {\n\t\t\tfloat shadowIntensity;\n\t\t\tfloat shadowBias;\n\t\t\tfloat shadowNormalBias;\n\t\t\tfloat shadowRadius;\n\t\t\tvec2 shadowMapSize;\n\t\t\tfloat shadowCameraNear;\n\t\t\tfloat shadowCameraFar;\n\t\t};\n\t\tuniform PointLightShadow pointLightShadows[ NUM_POINT_LIGHT_SHADOWS ];\n\t#endif\n\tfloat texture2DCompare( sampler2D depths, vec2 uv, float compare ) {\n\t\tfloat depth = unpackRGBAToDepth( texture2D( depths, uv ) );\n\t\t#ifdef USE_REVERSED_DEPTH_BUFFER\n\t\t\treturn step( depth, compare );\n\t\t#else\n\t\t\treturn step( compare, depth );\n\t\t#endif\n\t}\n\tvec2 texture2DDistribution( sampler2D shadow, vec2 uv ) {\n\t\treturn unpackRGBATo2Half( texture2D( shadow, uv ) );\n\t}\n\tfloat VSMShadow( sampler2D shadow, vec2 uv, float compare ) {\n\t\tfloat occlusion = 1.0;\n\t\tvec2 distribution = texture2DDistribution( shadow, uv );\n\t\t#ifdef USE_REVERSED_DEPTH_BUFFER\n\t\t\tfloat hard_shadow = step( distribution.x, compare );\n\t\t#else\n\t\t\tfloat hard_shadow = step( compare, distribution.x );\n\t\t#endif\n\t\tif ( hard_shadow != 1.0 ) {\n\t\t\tfloat distance = compare - distribution.x;\n\t\t\tfloat variance = max( 0.00000, distribution.y * distribution.y );\n\t\t\tfloat softness_probability = variance / (variance + distance * distance );\t\t\tsoftness_probability = clamp( ( softness_probability - 0.3 ) / ( 0.95 - 0.3 ), 0.0, 1.0 );\t\t\tocclusion = clamp( max( hard_shadow, softness_probability ), 0.0, 1.0 );\n\t\t}\n\t\treturn occlusion;\n\t}\n\tfloat getShadow( sampler2D shadowMap, vec2 shadowMapSize, float shadowIntensity, float shadowBias, float shadowRadius, vec4 shadowCoord ) {\n\t\tfloat shadow = 1.0;\n\t\tshadowCoord.xyz /= shadowCoord.w;\n\t\tshadowCoord.z += shadowBias;\n\t\tbool inFrustum = shadowCoord.x >= 0.0 && shadowCoord.x <= 1.0 && shadowCoord.y >= 0.0 && shadowCoord.y <= 1.0;\n\t\tbool frustumTest = inFrustum && shadowCoord.z <= 1.0;\n\t\tif ( frustumTest ) {\n\t\t#if defined( SHADOWMAP_TYPE_PCF )\n\t\t\tvec2 texelSize = vec2( 1.0 ) / shadowMapSize;\n\t\t\tfloat dx0 = - texelSize.x * shadowRadius;\n\t\t\tfloat dy0 = - texelSize.y * shadowRadius;\n\t\t\tfloat dx1 = + texelSize.x * shadowRadius;\n\t\t\tfloat dy1 = + texelSize.y * shadowRadius;\n\t\t\tfloat dx2 = dx0 / 2.0;\n\t\t\tfloat dy2 = dy0 / 2.0;\n\t\t\tfloat dx3 = dx1 / 2.0;\n\t\t\tfloat dy3 = dy1 / 2.0;\n\t\t\tshadow = (\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( dx0, dy0 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( 0.0, dy0 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( dx1, dy0 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( dx2, dy2 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( 0.0, dy2 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( dx3, dy2 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( dx0, 0.0 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( dx2, 0.0 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy, shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( dx3, 0.0 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( dx1, 0.0 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( dx2, dy3 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( 0.0, dy3 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( dx3, dy3 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( dx0, dy1 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( 0.0, dy1 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( dx1, dy1 ), shadowCoord.z )\n\t\t\t) * ( 1.0 / 17.0 );\n\t\t#elif defined( SHADOWMAP_TYPE_PCF_SOFT )\n\t\t\tvec2 texelSize = vec2( 1.0 ) / shadowMapSize;\n\t\t\tfloat dx = texelSize.x;\n\t\t\tfloat dy = texelSize.y;\n\t\t\tvec2 uv = shadowCoord.xy;\n\t\t\tvec2 f = fract( uv * shadowMapSize + 0.5 );\n\t\t\tuv -= f * texelSize;\n\t\t\tshadow = (\n\t\t\t\ttexture2DCompare( shadowMap, uv, shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, uv + vec2( dx, 0.0 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, uv + vec2( 0.0, dy ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, uv + texelSize, shadowCoord.z ) +\n\t\t\t\tmix( texture2DCompare( shadowMap, uv + vec2( -dx, 0.0 ), shadowCoord.z ),\n\t\t\t\t\t texture2DCompare( shadowMap, uv + vec2( 2.0 * dx, 0.0 ), shadowCoord.z ),\n\t\t\t\t\t f.x ) +\n\t\t\t\tmix( texture2DCompare( shadowMap, uv + vec2( -dx, dy ), shadowCoord.z ),\n\t\t\t\t\t texture2DCompare( shadowMap, uv + vec2( 2.0 * dx, dy ), shadowCoord.z ),\n\t\t\t\t\t f.x ) +\n\t\t\t\tmix( texture2DCompare( shadowMap, uv + vec2( 0.0, -dy ), shadowCoord.z ),\n\t\t\t\t\t texture2DCompare( shadowMap, uv + vec2( 0.0, 2.0 * dy ), shadowCoord.z ),\n\t\t\t\t\t f.y ) +\n\t\t\t\tmix( texture2DCompare( shadowMap, uv + vec2( dx, -dy ), shadowCoord.z ),\n\t\t\t\t\t texture2DCompare( shadowMap, uv + vec2( dx, 2.0 * dy ), shadowCoord.z ),\n\t\t\t\t\t f.y ) +\n\t\t\t\tmix( mix( texture2DCompare( shadowMap, uv + vec2( -dx, -dy ), shadowCoord.z ),\n\t\t\t\t\t\t texture2DCompare( shadowMap, uv + vec2( 2.0 * dx, -dy ), shadowCoord.z ),\n\t\t\t\t\t\t f.x ),\n\t\t\t\t\t mix( texture2DCompare( shadowMap, uv + vec2( -dx, 2.0 * dy ), shadowCoord.z ),\n\t\t\t\t\t\t texture2DCompare( shadowMap, uv + vec2( 2.0 * dx, 2.0 * dy ), shadowCoord.z ),\n\t\t\t\t\t\t f.x ),\n\t\t\t\t\t f.y )\n\t\t\t) * ( 1.0 / 9.0 );\n\t\t#elif defined( SHADOWMAP_TYPE_VSM )\n\t\t\tshadow = VSMShadow( shadowMap, shadowCoord.xy, shadowCoord.z );\n\t\t#else\n\t\t\tshadow = texture2DCompare( shadowMap, shadowCoord.xy, shadowCoord.z );\n\t\t#endif\n\t\t}\n\t\treturn mix( 1.0, shadow, shadowIntensity );\n\t}\n\tvec2 cubeToUV( vec3 v, float texelSizeY ) {\n\t\tvec3 absV = abs( v );\n\t\tfloat scaleToCube = 1.0 / max( absV.x, max( absV.y, absV.z ) );\n\t\tabsV *= scaleToCube;\n\t\tv *= scaleToCube * ( 1.0 - 2.0 * texelSizeY );\n\t\tvec2 planar = v.xy;\n\t\tfloat almostATexel = 1.5 * texelSizeY;\n\t\tfloat almostOne = 1.0 - almostATexel;\n\t\tif ( absV.z >= almostOne ) {\n\t\t\tif ( v.z > 0.0 )\n\t\t\t\tplanar.x = 4.0 - v.x;\n\t\t} else if ( absV.x >= almostOne ) {\n\t\t\tfloat signX = sign( v.x );\n\t\t\tplanar.x = v.z * signX + 2.0 * signX;\n\t\t} else if ( absV.y >= almostOne ) {\n\t\t\tfloat signY = sign( v.y );\n\t\t\tplanar.x = v.x + 2.0 * signY + 2.0;\n\t\t\tplanar.y = v.z * signY - 2.0;\n\t\t}\n\t\treturn vec2( 0.125, 0.25 ) * planar + vec2( 0.375, 0.75 );\n\t}\n\tfloat getPointShadow( sampler2D shadowMap, vec2 shadowMapSize, float shadowIntensity, float shadowBias, float shadowRadius, vec4 shadowCoord, float shadowCameraNear, float shadowCameraFar ) {\n\t\tfloat shadow = 1.0;\n\t\tvec3 lightToPosition = shadowCoord.xyz;\n\t\t\n\t\tfloat lightToPositionLength = length( lightToPosition );\n\t\tif ( lightToPositionLength - shadowCameraFar <= 0.0 && lightToPositionLength - shadowCameraNear >= 0.0 ) {\n\t\t\tfloat dp = ( lightToPositionLength - shadowCameraNear ) / ( shadowCameraFar - shadowCameraNear );\t\t\tdp += shadowBias;\n\t\t\tvec3 bd3D = normalize( lightToPosition );\n\t\t\tvec2 texelSize = vec2( 1.0 ) / ( shadowMapSize * vec2( 4.0, 2.0 ) );\n\t\t\t#if defined( SHADOWMAP_TYPE_PCF ) || defined( SHADOWMAP_TYPE_PCF_SOFT ) || defined( SHADOWMAP_TYPE_VSM )\n\t\t\t\tvec2 offset = vec2( - 1, 1 ) * shadowRadius * texelSize.y;\n\t\t\t\tshadow = (\n\t\t\t\t\ttexture2DCompare( shadowMap, cubeToUV( bd3D + offset.xyy, texelSize.y ), dp ) +\n\t\t\t\t\ttexture2DCompare( shadowMap, cubeToUV( bd3D + offset.yyy, texelSize.y ), dp ) +\n\t\t\t\t\ttexture2DCompare( shadowMap, cubeToUV( bd3D + offset.xyx, texelSize.y ), dp ) +\n\t\t\t\t\ttexture2DCompare( shadowMap, cubeToUV( bd3D + offset.yyx, texelSize.y ), dp ) +\n\t\t\t\t\ttexture2DCompare( shadowMap, cubeToUV( bd3D, texelSize.y ), dp ) +\n\t\t\t\t\ttexture2DCompare( shadowMap, cubeToUV( bd3D + offset.xxy, texelSize.y ), dp ) +\n\t\t\t\t\ttexture2DCompare( shadowMap, cubeToUV( bd3D + offset.yxy, texelSize.y ), dp ) +\n\t\t\t\t\ttexture2DCompare( shadowMap, cubeToUV( bd3D + offset.xxx, texelSize.y ), dp ) +\n\t\t\t\t\ttexture2DCompare( shadowMap, cubeToUV( bd3D + offset.yxx, texelSize.y ), dp )\n\t\t\t\t) * ( 1.0 / 9.0 );\n\t\t\t#else\n\t\t\t\tshadow = texture2DCompare( shadowMap, cubeToUV( bd3D, texelSize.y ), dp );\n\t\t\t#endif\n\t\t}\n\t\treturn mix( 1.0, shadow, shadowIntensity );\n\t}\n#endif"; +var shadowmap_pars_fragment = "#if NUM_SPOT_LIGHT_COORDS > 0\n\tvarying vec4 vSpotLightCoord[ NUM_SPOT_LIGHT_COORDS ];\n#endif\n#if NUM_SPOT_LIGHT_MAPS > 0\n\tuniform sampler2D spotLightMap[ NUM_SPOT_LIGHT_MAPS ];\n#endif\n#ifdef USE_SHADOWMAP\n\t#if NUM_DIR_LIGHT_SHADOWS > 0\n\t\t#if defined( SHADOWMAP_TYPE_PCF )\n\t\t\tuniform sampler2DShadow directionalShadowMap[ NUM_DIR_LIGHT_SHADOWS ];\n\t\t#else\n\t\t\tuniform sampler2D directionalShadowMap[ NUM_DIR_LIGHT_SHADOWS ];\n\t\t#endif\n\t\tvarying vec4 vDirectionalShadowCoord[ NUM_DIR_LIGHT_SHADOWS ];\n\t\tstruct DirectionalLightShadow {\n\t\t\tfloat shadowIntensity;\n\t\t\tfloat shadowBias;\n\t\t\tfloat shadowNormalBias;\n\t\t\tfloat shadowRadius;\n\t\t\tvec2 shadowMapSize;\n\t\t};\n\t\tuniform DirectionalLightShadow directionalLightShadows[ NUM_DIR_LIGHT_SHADOWS ];\n\t#endif\n\t#if NUM_SPOT_LIGHT_SHADOWS > 0\n\t\t#if defined( SHADOWMAP_TYPE_PCF )\n\t\t\tuniform sampler2DShadow spotShadowMap[ NUM_SPOT_LIGHT_SHADOWS ];\n\t\t#else\n\t\t\tuniform sampler2D spotShadowMap[ NUM_SPOT_LIGHT_SHADOWS ];\n\t\t#endif\n\t\tstruct SpotLightShadow {\n\t\t\tfloat shadowIntensity;\n\t\t\tfloat shadowBias;\n\t\t\tfloat shadowNormalBias;\n\t\t\tfloat shadowRadius;\n\t\t\tvec2 shadowMapSize;\n\t\t};\n\t\tuniform SpotLightShadow spotLightShadows[ NUM_SPOT_LIGHT_SHADOWS ];\n\t#endif\n\t#if NUM_POINT_LIGHT_SHADOWS > 0\n\t\t#if defined( SHADOWMAP_TYPE_PCF )\n\t\t\tuniform samplerCubeShadow pointShadowMap[ NUM_POINT_LIGHT_SHADOWS ];\n\t\t#elif defined( SHADOWMAP_TYPE_BASIC )\n\t\t\tuniform samplerCube pointShadowMap[ NUM_POINT_LIGHT_SHADOWS ];\n\t\t#endif\n\t\tvarying vec4 vPointShadowCoord[ NUM_POINT_LIGHT_SHADOWS ];\n\t\tstruct PointLightShadow {\n\t\t\tfloat shadowIntensity;\n\t\t\tfloat shadowBias;\n\t\t\tfloat shadowNormalBias;\n\t\t\tfloat shadowRadius;\n\t\t\tvec2 shadowMapSize;\n\t\t\tfloat shadowCameraNear;\n\t\t\tfloat shadowCameraFar;\n\t\t};\n\t\tuniform PointLightShadow pointLightShadows[ NUM_POINT_LIGHT_SHADOWS ];\n\t#endif\n\t#if defined( SHADOWMAP_TYPE_PCF )\n\t\tfloat interleavedGradientNoise( vec2 position ) {\n\t\t\treturn fract( 52.9829189 * fract( dot( position, vec2( 0.06711056, 0.00583715 ) ) ) );\n\t\t}\n\t\tvec2 vogelDiskSample( int sampleIndex, int samplesCount, float phi ) {\n\t\t\tconst float goldenAngle = 2.399963229728653;\n\t\t\tfloat r = sqrt( ( float( sampleIndex ) + 0.5 ) / float( samplesCount ) );\n\t\t\tfloat theta = float( sampleIndex ) * goldenAngle + phi;\n\t\t\treturn vec2( cos( theta ), sin( theta ) ) * r;\n\t\t}\n\t#endif\n\t#if defined( SHADOWMAP_TYPE_PCF )\n\t\tfloat getShadow( sampler2DShadow shadowMap, vec2 shadowMapSize, float shadowIntensity, float shadowBias, float shadowRadius, vec4 shadowCoord ) {\n\t\t\tfloat shadow = 1.0;\n\t\t\tshadowCoord.xyz /= shadowCoord.w;\n\t\t\tshadowCoord.z += shadowBias;\n\t\t\tbool inFrustum = shadowCoord.x >= 0.0 && shadowCoord.x <= 1.0 && shadowCoord.y >= 0.0 && shadowCoord.y <= 1.0;\n\t\t\tbool frustumTest = inFrustum && shadowCoord.z <= 1.0;\n\t\t\tif ( frustumTest ) {\n\t\t\t\tvec2 texelSize = vec2( 1.0 ) / shadowMapSize;\n\t\t\t\tfloat radius = shadowRadius * texelSize.x;\n\t\t\t\tfloat phi = interleavedGradientNoise( gl_FragCoord.xy ) * PI2;\n\t\t\t\tshadow = (\n\t\t\t\t\ttexture( shadowMap, vec3( shadowCoord.xy + vogelDiskSample( 0, 5, phi ) * radius, shadowCoord.z ) ) +\n\t\t\t\t\ttexture( shadowMap, vec3( shadowCoord.xy + vogelDiskSample( 1, 5, phi ) * radius, shadowCoord.z ) ) +\n\t\t\t\t\ttexture( shadowMap, vec3( shadowCoord.xy + vogelDiskSample( 2, 5, phi ) * radius, shadowCoord.z ) ) +\n\t\t\t\t\ttexture( shadowMap, vec3( shadowCoord.xy + vogelDiskSample( 3, 5, phi ) * radius, shadowCoord.z ) ) +\n\t\t\t\t\ttexture( shadowMap, vec3( shadowCoord.xy + vogelDiskSample( 4, 5, phi ) * radius, shadowCoord.z ) )\n\t\t\t\t) * 0.2;\n\t\t\t}\n\t\t\treturn mix( 1.0, shadow, shadowIntensity );\n\t\t}\n\t#elif defined( SHADOWMAP_TYPE_VSM )\n\t\tfloat getShadow( sampler2D shadowMap, vec2 shadowMapSize, float shadowIntensity, float shadowBias, float shadowRadius, vec4 shadowCoord ) {\n\t\t\tfloat shadow = 1.0;\n\t\t\tshadowCoord.xyz /= shadowCoord.w;\n\t\t\t#ifdef USE_REVERSED_DEPTH_BUFFER\n\t\t\t\tshadowCoord.z -= shadowBias;\n\t\t\t#else\n\t\t\t\tshadowCoord.z += shadowBias;\n\t\t\t#endif\n\t\t\tbool inFrustum = shadowCoord.x >= 0.0 && shadowCoord.x <= 1.0 && shadowCoord.y >= 0.0 && shadowCoord.y <= 1.0;\n\t\t\tbool frustumTest = inFrustum && shadowCoord.z <= 1.0;\n\t\t\tif ( frustumTest ) {\n\t\t\t\tvec2 distribution = texture2D( shadowMap, shadowCoord.xy ).rg;\n\t\t\t\tfloat mean = distribution.x;\n\t\t\t\tfloat variance = distribution.y * distribution.y;\n\t\t\t\t#ifdef USE_REVERSED_DEPTH_BUFFER\n\t\t\t\t\tfloat hard_shadow = step( mean, shadowCoord.z );\n\t\t\t\t#else\n\t\t\t\t\tfloat hard_shadow = step( shadowCoord.z, mean );\n\t\t\t\t#endif\n\t\t\t\t\n\t\t\t\tif ( hard_shadow == 1.0 ) {\n\t\t\t\t\tshadow = 1.0;\n\t\t\t\t} else {\n\t\t\t\t\tvariance = max( variance, 0.0000001 );\n\t\t\t\t\tfloat d = shadowCoord.z - mean;\n\t\t\t\t\tfloat p_max = variance / ( variance + d * d );\n\t\t\t\t\tp_max = clamp( ( p_max - 0.3 ) / 0.65, 0.0, 1.0 );\n\t\t\t\t\tshadow = max( hard_shadow, p_max );\n\t\t\t\t}\n\t\t\t}\n\t\t\treturn mix( 1.0, shadow, shadowIntensity );\n\t\t}\n\t#else\n\t\tfloat getShadow( sampler2D shadowMap, vec2 shadowMapSize, float shadowIntensity, float shadowBias, float shadowRadius, vec4 shadowCoord ) {\n\t\t\tfloat shadow = 1.0;\n\t\t\tshadowCoord.xyz /= shadowCoord.w;\n\t\t\t#ifdef USE_REVERSED_DEPTH_BUFFER\n\t\t\t\tshadowCoord.z -= shadowBias;\n\t\t\t#else\n\t\t\t\tshadowCoord.z += shadowBias;\n\t\t\t#endif\n\t\t\tbool inFrustum = shadowCoord.x >= 0.0 && shadowCoord.x <= 1.0 && shadowCoord.y >= 0.0 && shadowCoord.y <= 1.0;\n\t\t\tbool frustumTest = inFrustum && shadowCoord.z <= 1.0;\n\t\t\tif ( frustumTest ) {\n\t\t\t\tfloat depth = texture2D( shadowMap, shadowCoord.xy ).r;\n\t\t\t\t#ifdef USE_REVERSED_DEPTH_BUFFER\n\t\t\t\t\tshadow = step( depth, shadowCoord.z );\n\t\t\t\t#else\n\t\t\t\t\tshadow = step( shadowCoord.z, depth );\n\t\t\t\t#endif\n\t\t\t}\n\t\t\treturn mix( 1.0, shadow, shadowIntensity );\n\t\t}\n\t#endif\n\t#if NUM_POINT_LIGHT_SHADOWS > 0\n\t#if defined( SHADOWMAP_TYPE_PCF )\n\tfloat getPointShadow( samplerCubeShadow shadowMap, vec2 shadowMapSize, float shadowIntensity, float shadowBias, float shadowRadius, vec4 shadowCoord, float shadowCameraNear, float shadowCameraFar ) {\n\t\tfloat shadow = 1.0;\n\t\tvec3 lightToPosition = shadowCoord.xyz;\n\t\tvec3 bd3D = normalize( lightToPosition );\n\t\tvec3 absVec = abs( lightToPosition );\n\t\tfloat viewSpaceZ = max( max( absVec.x, absVec.y ), absVec.z );\n\t\tif ( viewSpaceZ - shadowCameraFar <= 0.0 && viewSpaceZ - shadowCameraNear >= 0.0 ) {\n\t\t\t#ifdef USE_REVERSED_DEPTH_BUFFER\n\t\t\t\tfloat dp = ( shadowCameraNear * ( shadowCameraFar - viewSpaceZ ) ) / ( viewSpaceZ * ( shadowCameraFar - shadowCameraNear ) );\n\t\t\t\tdp -= shadowBias;\n\t\t\t#else\n\t\t\t\tfloat dp = ( shadowCameraFar * ( viewSpaceZ - shadowCameraNear ) ) / ( viewSpaceZ * ( shadowCameraFar - shadowCameraNear ) );\n\t\t\t\tdp += shadowBias;\n\t\t\t#endif\n\t\t\tfloat texelSize = shadowRadius / shadowMapSize.x;\n\t\t\tvec3 absDir = abs( bd3D );\n\t\t\tvec3 tangent = absDir.x > absDir.z ? vec3( 0.0, 1.0, 0.0 ) : vec3( 1.0, 0.0, 0.0 );\n\t\t\ttangent = normalize( cross( bd3D, tangent ) );\n\t\t\tvec3 bitangent = cross( bd3D, tangent );\n\t\t\tfloat phi = interleavedGradientNoise( gl_FragCoord.xy ) * PI2;\n\t\t\tvec2 sample0 = vogelDiskSample( 0, 5, phi );\n\t\t\tvec2 sample1 = vogelDiskSample( 1, 5, phi );\n\t\t\tvec2 sample2 = vogelDiskSample( 2, 5, phi );\n\t\t\tvec2 sample3 = vogelDiskSample( 3, 5, phi );\n\t\t\tvec2 sample4 = vogelDiskSample( 4, 5, phi );\n\t\t\tshadow = (\n\t\t\t\ttexture( shadowMap, vec4( bd3D + ( tangent * sample0.x + bitangent * sample0.y ) * texelSize, dp ) ) +\n\t\t\t\ttexture( shadowMap, vec4( bd3D + ( tangent * sample1.x + bitangent * sample1.y ) * texelSize, dp ) ) +\n\t\t\t\ttexture( shadowMap, vec4( bd3D + ( tangent * sample2.x + bitangent * sample2.y ) * texelSize, dp ) ) +\n\t\t\t\ttexture( shadowMap, vec4( bd3D + ( tangent * sample3.x + bitangent * sample3.y ) * texelSize, dp ) ) +\n\t\t\t\ttexture( shadowMap, vec4( bd3D + ( tangent * sample4.x + bitangent * sample4.y ) * texelSize, dp ) )\n\t\t\t) * 0.2;\n\t\t}\n\t\treturn mix( 1.0, shadow, shadowIntensity );\n\t}\n\t#elif defined( SHADOWMAP_TYPE_BASIC )\n\tfloat getPointShadow( samplerCube shadowMap, vec2 shadowMapSize, float shadowIntensity, float shadowBias, float shadowRadius, vec4 shadowCoord, float shadowCameraNear, float shadowCameraFar ) {\n\t\tfloat shadow = 1.0;\n\t\tvec3 lightToPosition = shadowCoord.xyz;\n\t\tvec3 absVec = abs( lightToPosition );\n\t\tfloat viewSpaceZ = max( max( absVec.x, absVec.y ), absVec.z );\n\t\tif ( viewSpaceZ - shadowCameraFar <= 0.0 && viewSpaceZ - shadowCameraNear >= 0.0 ) {\n\t\t\tfloat dp = ( shadowCameraFar * ( viewSpaceZ - shadowCameraNear ) ) / ( viewSpaceZ * ( shadowCameraFar - shadowCameraNear ) );\n\t\t\tdp += shadowBias;\n\t\t\tvec3 bd3D = normalize( lightToPosition );\n\t\t\tfloat depth = textureCube( shadowMap, bd3D ).r;\n\t\t\t#ifdef USE_REVERSED_DEPTH_BUFFER\n\t\t\t\tdepth = 1.0 - depth;\n\t\t\t#endif\n\t\t\tshadow = step( dp, depth );\n\t\t}\n\t\treturn mix( 1.0, shadow, shadowIntensity );\n\t}\n\t#endif\n\t#endif\n#endif"; var shadowmap_pars_vertex = "#if NUM_SPOT_LIGHT_COORDS > 0\n\tuniform mat4 spotLightMatrix[ NUM_SPOT_LIGHT_COORDS ];\n\tvarying vec4 vSpotLightCoord[ NUM_SPOT_LIGHT_COORDS ];\n#endif\n#ifdef USE_SHADOWMAP\n\t#if NUM_DIR_LIGHT_SHADOWS > 0\n\t\tuniform mat4 directionalShadowMatrix[ NUM_DIR_LIGHT_SHADOWS ];\n\t\tvarying vec4 vDirectionalShadowCoord[ NUM_DIR_LIGHT_SHADOWS ];\n\t\tstruct DirectionalLightShadow {\n\t\t\tfloat shadowIntensity;\n\t\t\tfloat shadowBias;\n\t\t\tfloat shadowNormalBias;\n\t\t\tfloat shadowRadius;\n\t\t\tvec2 shadowMapSize;\n\t\t};\n\t\tuniform DirectionalLightShadow directionalLightShadows[ NUM_DIR_LIGHT_SHADOWS ];\n\t#endif\n\t#if NUM_SPOT_LIGHT_SHADOWS > 0\n\t\tstruct SpotLightShadow {\n\t\t\tfloat shadowIntensity;\n\t\t\tfloat shadowBias;\n\t\t\tfloat shadowNormalBias;\n\t\t\tfloat shadowRadius;\n\t\t\tvec2 shadowMapSize;\n\t\t};\n\t\tuniform SpotLightShadow spotLightShadows[ NUM_SPOT_LIGHT_SHADOWS ];\n\t#endif\n\t#if NUM_POINT_LIGHT_SHADOWS > 0\n\t\tuniform mat4 pointShadowMatrix[ NUM_POINT_LIGHT_SHADOWS ];\n\t\tvarying vec4 vPointShadowCoord[ NUM_POINT_LIGHT_SHADOWS ];\n\t\tstruct PointLightShadow {\n\t\t\tfloat shadowIntensity;\n\t\t\tfloat shadowBias;\n\t\t\tfloat shadowNormalBias;\n\t\t\tfloat shadowRadius;\n\t\t\tvec2 shadowMapSize;\n\t\t\tfloat shadowCameraNear;\n\t\t\tfloat shadowCameraFar;\n\t\t};\n\t\tuniform PointLightShadow pointLightShadows[ NUM_POINT_LIGHT_SHADOWS ];\n\t#endif\n#endif"; var shadowmap_vertex = "#if ( defined( USE_SHADOWMAP ) && ( NUM_DIR_LIGHT_SHADOWS > 0 || NUM_POINT_LIGHT_SHADOWS > 0 ) ) || ( NUM_SPOT_LIGHT_COORDS > 0 )\n\tvec3 shadowWorldNormal = inverseTransformDirection( transformedNormal, viewMatrix );\n\tvec4 shadowWorldPosition;\n#endif\n#if defined( USE_SHADOWMAP )\n\t#if NUM_DIR_LIGHT_SHADOWS > 0\n\t\t#pragma unroll_loop_start\n\t\tfor ( int i = 0; i < NUM_DIR_LIGHT_SHADOWS; i ++ ) {\n\t\t\tshadowWorldPosition = worldPosition + vec4( shadowWorldNormal * directionalLightShadows[ i ].shadowNormalBias, 0 );\n\t\t\tvDirectionalShadowCoord[ i ] = directionalShadowMatrix[ i ] * shadowWorldPosition;\n\t\t}\n\t\t#pragma unroll_loop_end\n\t#endif\n\t#if NUM_POINT_LIGHT_SHADOWS > 0\n\t\t#pragma unroll_loop_start\n\t\tfor ( int i = 0; i < NUM_POINT_LIGHT_SHADOWS; i ++ ) {\n\t\t\tshadowWorldPosition = worldPosition + vec4( shadowWorldNormal * pointLightShadows[ i ].shadowNormalBias, 0 );\n\t\t\tvPointShadowCoord[ i ] = pointShadowMatrix[ i ] * shadowWorldPosition;\n\t\t}\n\t\t#pragma unroll_loop_end\n\t#endif\n#endif\n#if NUM_SPOT_LIGHT_COORDS > 0\n\t#pragma unroll_loop_start\n\tfor ( int i = 0; i < NUM_SPOT_LIGHT_COORDS; i ++ ) {\n\t\tshadowWorldPosition = worldPosition;\n\t\t#if ( defined( USE_SHADOWMAP ) && UNROLLED_LOOP_INDEX < NUM_SPOT_LIGHT_SHADOWS )\n\t\t\tshadowWorldPosition.xyz += shadowWorldNormal * spotLightShadows[ i ].shadowNormalBias;\n\t\t#endif\n\t\tvSpotLightCoord[ i ] = spotLightMatrix[ i ] * shadowWorldPosition;\n\t}\n\t#pragma unroll_loop_end\n#endif"; -var shadowmask_pars_fragment = "float getShadowMask() {\n\tfloat shadow = 1.0;\n\t#ifdef USE_SHADOWMAP\n\t#if NUM_DIR_LIGHT_SHADOWS > 0\n\tDirectionalLightShadow directionalLight;\n\t#pragma unroll_loop_start\n\tfor ( int i = 0; i < NUM_DIR_LIGHT_SHADOWS; i ++ ) {\n\t\tdirectionalLight = directionalLightShadows[ i ];\n\t\tshadow *= receiveShadow ? getShadow( directionalShadowMap[ i ], directionalLight.shadowMapSize, directionalLight.shadowIntensity, directionalLight.shadowBias, directionalLight.shadowRadius, vDirectionalShadowCoord[ i ] ) : 1.0;\n\t}\n\t#pragma unroll_loop_end\n\t#endif\n\t#if NUM_SPOT_LIGHT_SHADOWS > 0\n\tSpotLightShadow spotLight;\n\t#pragma unroll_loop_start\n\tfor ( int i = 0; i < NUM_SPOT_LIGHT_SHADOWS; i ++ ) {\n\t\tspotLight = spotLightShadows[ i ];\n\t\tshadow *= receiveShadow ? getShadow( spotShadowMap[ i ], spotLight.shadowMapSize, spotLight.shadowIntensity, spotLight.shadowBias, spotLight.shadowRadius, vSpotLightCoord[ i ] ) : 1.0;\n\t}\n\t#pragma unroll_loop_end\n\t#endif\n\t#if NUM_POINT_LIGHT_SHADOWS > 0\n\tPointLightShadow pointLight;\n\t#pragma unroll_loop_start\n\tfor ( int i = 0; i < NUM_POINT_LIGHT_SHADOWS; i ++ ) {\n\t\tpointLight = pointLightShadows[ i ];\n\t\tshadow *= receiveShadow ? getPointShadow( pointShadowMap[ i ], pointLight.shadowMapSize, pointLight.shadowIntensity, pointLight.shadowBias, pointLight.shadowRadius, vPointShadowCoord[ i ], pointLight.shadowCameraNear, pointLight.shadowCameraFar ) : 1.0;\n\t}\n\t#pragma unroll_loop_end\n\t#endif\n\t#endif\n\treturn shadow;\n}"; +var shadowmask_pars_fragment = "float getShadowMask() {\n\tfloat shadow = 1.0;\n\t#ifdef USE_SHADOWMAP\n\t#if NUM_DIR_LIGHT_SHADOWS > 0\n\tDirectionalLightShadow directionalLight;\n\t#pragma unroll_loop_start\n\tfor ( int i = 0; i < NUM_DIR_LIGHT_SHADOWS; i ++ ) {\n\t\tdirectionalLight = directionalLightShadows[ i ];\n\t\tshadow *= receiveShadow ? getShadow( directionalShadowMap[ i ], directionalLight.shadowMapSize, directionalLight.shadowIntensity, directionalLight.shadowBias, directionalLight.shadowRadius, vDirectionalShadowCoord[ i ] ) : 1.0;\n\t}\n\t#pragma unroll_loop_end\n\t#endif\n\t#if NUM_SPOT_LIGHT_SHADOWS > 0\n\tSpotLightShadow spotLight;\n\t#pragma unroll_loop_start\n\tfor ( int i = 0; i < NUM_SPOT_LIGHT_SHADOWS; i ++ ) {\n\t\tspotLight = spotLightShadows[ i ];\n\t\tshadow *= receiveShadow ? getShadow( spotShadowMap[ i ], spotLight.shadowMapSize, spotLight.shadowIntensity, spotLight.shadowBias, spotLight.shadowRadius, vSpotLightCoord[ i ] ) : 1.0;\n\t}\n\t#pragma unroll_loop_end\n\t#endif\n\t#if NUM_POINT_LIGHT_SHADOWS > 0 && ( defined( SHADOWMAP_TYPE_PCF ) || defined( SHADOWMAP_TYPE_BASIC ) )\n\tPointLightShadow pointLight;\n\t#pragma unroll_loop_start\n\tfor ( int i = 0; i < NUM_POINT_LIGHT_SHADOWS; i ++ ) {\n\t\tpointLight = pointLightShadows[ i ];\n\t\tshadow *= receiveShadow ? getPointShadow( pointShadowMap[ i ], pointLight.shadowMapSize, pointLight.shadowIntensity, pointLight.shadowBias, pointLight.shadowRadius, vPointShadowCoord[ i ], pointLight.shadowCameraNear, pointLight.shadowCameraFar ) : 1.0;\n\t}\n\t#pragma unroll_loop_end\n\t#endif\n\t#endif\n\treturn shadow;\n}"; var skinbase_vertex = "#ifdef USE_SKINNING\n\tmat4 boneMatX = getBoneMatrix( skinIndex.x );\n\tmat4 boneMatY = getBoneMatrix( skinIndex.y );\n\tmat4 boneMatZ = getBoneMatrix( skinIndex.z );\n\tmat4 boneMatW = getBoneMatrix( skinIndex.w );\n#endif"; @@ -35998,7 +36383,7 @@ var tonemapping_fragment = "#if defined( TONE_MAPPING )\n\tgl_FragColor.rgb = to var tonemapping_pars_fragment = "#ifndef saturate\n#define saturate( a ) clamp( a, 0.0, 1.0 )\n#endif\nuniform float toneMappingExposure;\nvec3 LinearToneMapping( vec3 color ) {\n\treturn saturate( toneMappingExposure * color );\n}\nvec3 ReinhardToneMapping( vec3 color ) {\n\tcolor *= toneMappingExposure;\n\treturn saturate( color / ( vec3( 1.0 ) + color ) );\n}\nvec3 CineonToneMapping( vec3 color ) {\n\tcolor *= toneMappingExposure;\n\tcolor = max( vec3( 0.0 ), color - 0.004 );\n\treturn pow( ( color * ( 6.2 * color + 0.5 ) ) / ( color * ( 6.2 * color + 1.7 ) + 0.06 ), vec3( 2.2 ) );\n}\nvec3 RRTAndODTFit( vec3 v ) {\n\tvec3 a = v * ( v + 0.0245786 ) - 0.000090537;\n\tvec3 b = v * ( 0.983729 * v + 0.4329510 ) + 0.238081;\n\treturn a / b;\n}\nvec3 ACESFilmicToneMapping( vec3 color ) {\n\tconst mat3 ACESInputMat = mat3(\n\t\tvec3( 0.59719, 0.07600, 0.02840 ),\t\tvec3( 0.35458, 0.90834, 0.13383 ),\n\t\tvec3( 0.04823, 0.01566, 0.83777 )\n\t);\n\tconst mat3 ACESOutputMat = mat3(\n\t\tvec3( 1.60475, -0.10208, -0.00327 ),\t\tvec3( -0.53108, 1.10813, -0.07276 ),\n\t\tvec3( -0.07367, -0.00605, 1.07602 )\n\t);\n\tcolor *= toneMappingExposure / 0.6;\n\tcolor = ACESInputMat * color;\n\tcolor = RRTAndODTFit( color );\n\tcolor = ACESOutputMat * color;\n\treturn saturate( color );\n}\nconst mat3 LINEAR_REC2020_TO_LINEAR_SRGB = mat3(\n\tvec3( 1.6605, - 0.1246, - 0.0182 ),\n\tvec3( - 0.5876, 1.1329, - 0.1006 ),\n\tvec3( - 0.0728, - 0.0083, 1.1187 )\n);\nconst mat3 LINEAR_SRGB_TO_LINEAR_REC2020 = mat3(\n\tvec3( 0.6274, 0.0691, 0.0164 ),\n\tvec3( 0.3293, 0.9195, 0.0880 ),\n\tvec3( 0.0433, 0.0113, 0.8956 )\n);\nvec3 agxDefaultContrastApprox( vec3 x ) {\n\tvec3 x2 = x * x;\n\tvec3 x4 = x2 * x2;\n\treturn + 15.5 * x4 * x2\n\t\t- 40.14 * x4 * x\n\t\t+ 31.96 * x4\n\t\t- 6.868 * x2 * x\n\t\t+ 0.4298 * x2\n\t\t+ 0.1191 * x\n\t\t- 0.00232;\n}\nvec3 AgXToneMapping( vec3 color ) {\n\tconst mat3 AgXInsetMatrix = mat3(\n\t\tvec3( 0.856627153315983, 0.137318972929847, 0.11189821299995 ),\n\t\tvec3( 0.0951212405381588, 0.761241990602591, 0.0767994186031903 ),\n\t\tvec3( 0.0482516061458583, 0.101439036467562, 0.811302368396859 )\n\t);\n\tconst mat3 AgXOutsetMatrix = mat3(\n\t\tvec3( 1.1271005818144368, - 0.1413297634984383, - 0.14132976349843826 ),\n\t\tvec3( - 0.11060664309660323, 1.157823702216272, - 0.11060664309660294 ),\n\t\tvec3( - 0.016493938717834573, - 0.016493938717834257, 1.2519364065950405 )\n\t);\n\tconst float AgxMinEv = - 12.47393;\tconst float AgxMaxEv = 4.026069;\n\tcolor *= toneMappingExposure;\n\tcolor = LINEAR_SRGB_TO_LINEAR_REC2020 * color;\n\tcolor = AgXInsetMatrix * color;\n\tcolor = max( color, 1e-10 );\tcolor = log2( color );\n\tcolor = ( color - AgxMinEv ) / ( AgxMaxEv - AgxMinEv );\n\tcolor = clamp( color, 0.0, 1.0 );\n\tcolor = agxDefaultContrastApprox( color );\n\tcolor = AgXOutsetMatrix * color;\n\tcolor = pow( max( vec3( 0.0 ), color ), vec3( 2.2 ) );\n\tcolor = LINEAR_REC2020_TO_LINEAR_SRGB * color;\n\tcolor = clamp( color, 0.0, 1.0 );\n\treturn color;\n}\nvec3 NeutralToneMapping( vec3 color ) {\n\tconst float StartCompression = 0.8 - 0.04;\n\tconst float Desaturation = 0.15;\n\tcolor *= toneMappingExposure;\n\tfloat x = min( color.r, min( color.g, color.b ) );\n\tfloat offset = x < 0.08 ? x - 6.25 * x * x : 0.04;\n\tcolor -= offset;\n\tfloat peak = max( color.r, max( color.g, color.b ) );\n\tif ( peak < StartCompression ) return color;\n\tfloat d = 1. - StartCompression;\n\tfloat newPeak = 1. - d * d / ( peak + d - StartCompression );\n\tcolor *= newPeak / peak;\n\tfloat g = 1. - 1. / ( Desaturation * ( peak - newPeak ) + 1. );\n\treturn mix( color, vec3( newPeak ), g );\n}\nvec3 CustomToneMapping( vec3 color ) { return color; }"; -var transmission_fragment = "#ifdef USE_TRANSMISSION\n\tmaterial.transmission = transmission;\n\tmaterial.transmissionAlpha = 1.0;\n\tmaterial.thickness = thickness;\n\tmaterial.attenuationDistance = attenuationDistance;\n\tmaterial.attenuationColor = attenuationColor;\n\t#ifdef USE_TRANSMISSIONMAP\n\t\tmaterial.transmission *= texture2D( transmissionMap, vTransmissionMapUv ).r;\n\t#endif\n\t#ifdef USE_THICKNESSMAP\n\t\tmaterial.thickness *= texture2D( thicknessMap, vThicknessMapUv ).g;\n\t#endif\n\tvec3 pos = vWorldPosition;\n\tvec3 v = normalize( cameraPosition - pos );\n\tvec3 n = inverseTransformDirection( normal, viewMatrix );\n\tvec4 transmitted = getIBLVolumeRefraction(\n\t\tn, v, material.roughness, material.diffuseColor, material.specularColor, material.specularF90,\n\t\tpos, modelMatrix, viewMatrix, projectionMatrix, material.dispersion, material.ior, material.thickness,\n\t\tmaterial.attenuationColor, material.attenuationDistance );\n\tmaterial.transmissionAlpha = mix( material.transmissionAlpha, transmitted.a, material.transmission );\n\ttotalDiffuse = mix( totalDiffuse, transmitted.rgb, material.transmission );\n#endif"; +var transmission_fragment = "#ifdef USE_TRANSMISSION\n\tmaterial.transmission = transmission;\n\tmaterial.transmissionAlpha = 1.0;\n\tmaterial.thickness = thickness;\n\tmaterial.attenuationDistance = attenuationDistance;\n\tmaterial.attenuationColor = attenuationColor;\n\t#ifdef USE_TRANSMISSIONMAP\n\t\tmaterial.transmission *= texture2D( transmissionMap, vTransmissionMapUv ).r;\n\t#endif\n\t#ifdef USE_THICKNESSMAP\n\t\tmaterial.thickness *= texture2D( thicknessMap, vThicknessMapUv ).g;\n\t#endif\n\tvec3 pos = vWorldPosition;\n\tvec3 v = normalize( cameraPosition - pos );\n\tvec3 n = inverseTransformDirection( normal, viewMatrix );\n\tvec4 transmitted = getIBLVolumeRefraction(\n\t\tn, v, material.roughness, material.diffuseContribution, material.specularColorBlended, material.specularF90,\n\t\tpos, modelMatrix, viewMatrix, projectionMatrix, material.dispersion, material.ior, material.thickness,\n\t\tmaterial.attenuationColor, material.attenuationDistance );\n\tmaterial.transmissionAlpha = mix( material.transmissionAlpha, transmitted.a, material.transmission );\n\ttotalDiffuse = mix( totalDiffuse, transmitted.rgb, material.transmission );\n#endif"; var transmission_pars_fragment = "#ifdef USE_TRANSMISSION\n\tuniform float transmission;\n\tuniform float thickness;\n\tuniform float attenuationDistance;\n\tuniform vec3 attenuationColor;\n\t#ifdef USE_TRANSMISSIONMAP\n\t\tuniform sampler2D transmissionMap;\n\t#endif\n\t#ifdef USE_THICKNESSMAP\n\t\tuniform sampler2D thicknessMap;\n\t#endif\n\tuniform vec2 transmissionSamplerSize;\n\tuniform sampler2D transmissionSamplerMap;\n\tuniform mat4 modelMatrix;\n\tuniform mat4 projectionMatrix;\n\tvarying vec3 vWorldPosition;\n\tfloat w0( float a ) {\n\t\treturn ( 1.0 / 6.0 ) * ( a * ( a * ( - a + 3.0 ) - 3.0 ) + 1.0 );\n\t}\n\tfloat w1( float a ) {\n\t\treturn ( 1.0 / 6.0 ) * ( a * a * ( 3.0 * a - 6.0 ) + 4.0 );\n\t}\n\tfloat w2( float a ){\n\t\treturn ( 1.0 / 6.0 ) * ( a * ( a * ( - 3.0 * a + 3.0 ) + 3.0 ) + 1.0 );\n\t}\n\tfloat w3( float a ) {\n\t\treturn ( 1.0 / 6.0 ) * ( a * a * a );\n\t}\n\tfloat g0( float a ) {\n\t\treturn w0( a ) + w1( a );\n\t}\n\tfloat g1( float a ) {\n\t\treturn w2( a ) + w3( a );\n\t}\n\tfloat h0( float a ) {\n\t\treturn - 1.0 + w1( a ) / ( w0( a ) + w1( a ) );\n\t}\n\tfloat h1( float a ) {\n\t\treturn 1.0 + w3( a ) / ( w2( a ) + w3( a ) );\n\t}\n\tvec4 bicubic( sampler2D tex, vec2 uv, vec4 texelSize, float lod ) {\n\t\tuv = uv * texelSize.zw + 0.5;\n\t\tvec2 iuv = floor( uv );\n\t\tvec2 fuv = fract( uv );\n\t\tfloat g0x = g0( fuv.x );\n\t\tfloat g1x = g1( fuv.x );\n\t\tfloat h0x = h0( fuv.x );\n\t\tfloat h1x = h1( fuv.x );\n\t\tfloat h0y = h0( fuv.y );\n\t\tfloat h1y = h1( fuv.y );\n\t\tvec2 p0 = ( vec2( iuv.x + h0x, iuv.y + h0y ) - 0.5 ) * texelSize.xy;\n\t\tvec2 p1 = ( vec2( iuv.x + h1x, iuv.y + h0y ) - 0.5 ) * texelSize.xy;\n\t\tvec2 p2 = ( vec2( iuv.x + h0x, iuv.y + h1y ) - 0.5 ) * texelSize.xy;\n\t\tvec2 p3 = ( vec2( iuv.x + h1x, iuv.y + h1y ) - 0.5 ) * texelSize.xy;\n\t\treturn g0( fuv.y ) * ( g0x * textureLod( tex, p0, lod ) + g1x * textureLod( tex, p1, lod ) ) +\n\t\t\tg1( fuv.y ) * ( g0x * textureLod( tex, p2, lod ) + g1x * textureLod( tex, p3, lod ) );\n\t}\n\tvec4 textureBicubic( sampler2D sampler, vec2 uv, float lod ) {\n\t\tvec2 fLodSize = vec2( textureSize( sampler, int( lod ) ) );\n\t\tvec2 cLodSize = vec2( textureSize( sampler, int( lod + 1.0 ) ) );\n\t\tvec2 fLodSizeInv = 1.0 / fLodSize;\n\t\tvec2 cLodSizeInv = 1.0 / cLodSize;\n\t\tvec4 fSample = bicubic( sampler, uv, vec4( fLodSizeInv, fLodSize ), floor( lod ) );\n\t\tvec4 cSample = bicubic( sampler, uv, vec4( cLodSizeInv, cLodSize ), ceil( lod ) );\n\t\treturn mix( fSample, cSample, fract( lod ) );\n\t}\n\tvec3 getVolumeTransmissionRay( const in vec3 n, const in vec3 v, const in float thickness, const in float ior, const in mat4 modelMatrix ) {\n\t\tvec3 refractionVector = refract( - v, normalize( n ), 1.0 / ior );\n\t\tvec3 modelScale;\n\t\tmodelScale.x = length( vec3( modelMatrix[ 0 ].xyz ) );\n\t\tmodelScale.y = length( vec3( modelMatrix[ 1 ].xyz ) );\n\t\tmodelScale.z = length( vec3( modelMatrix[ 2 ].xyz ) );\n\t\treturn normalize( refractionVector ) * thickness * modelScale;\n\t}\n\tfloat applyIorToRoughness( const in float roughness, const in float ior ) {\n\t\treturn roughness * clamp( ior * 2.0 - 2.0, 0.0, 1.0 );\n\t}\n\tvec4 getTransmissionSample( const in vec2 fragCoord, const in float roughness, const in float ior ) {\n\t\tfloat lod = log2( transmissionSamplerSize.x ) * applyIorToRoughness( roughness, ior );\n\t\treturn textureBicubic( transmissionSamplerMap, fragCoord.xy, lod );\n\t}\n\tvec3 volumeAttenuation( const in float transmissionDistance, const in vec3 attenuationColor, const in float attenuationDistance ) {\n\t\tif ( isinf( attenuationDistance ) ) {\n\t\t\treturn vec3( 1.0 );\n\t\t} else {\n\t\t\tvec3 attenuationCoefficient = -log( attenuationColor ) / attenuationDistance;\n\t\t\tvec3 transmittance = exp( - attenuationCoefficient * transmissionDistance );\t\t\treturn transmittance;\n\t\t}\n\t}\n\tvec4 getIBLVolumeRefraction( const in vec3 n, const in vec3 v, const in float roughness, const in vec3 diffuseColor,\n\t\tconst in vec3 specularColor, const in float specularF90, const in vec3 position, const in mat4 modelMatrix,\n\t\tconst in mat4 viewMatrix, const in mat4 projMatrix, const in float dispersion, const in float ior, const in float thickness,\n\t\tconst in vec3 attenuationColor, const in float attenuationDistance ) {\n\t\tvec4 transmittedLight;\n\t\tvec3 transmittance;\n\t\t#ifdef USE_DISPERSION\n\t\t\tfloat halfSpread = ( ior - 1.0 ) * 0.025 * dispersion;\n\t\t\tvec3 iors = vec3( ior - halfSpread, ior, ior + halfSpread );\n\t\t\tfor ( int i = 0; i < 3; i ++ ) {\n\t\t\t\tvec3 transmissionRay = getVolumeTransmissionRay( n, v, thickness, iors[ i ], modelMatrix );\n\t\t\t\tvec3 refractedRayExit = position + transmissionRay;\n\t\t\t\tvec4 ndcPos = projMatrix * viewMatrix * vec4( refractedRayExit, 1.0 );\n\t\t\t\tvec2 refractionCoords = ndcPos.xy / ndcPos.w;\n\t\t\t\trefractionCoords += 1.0;\n\t\t\t\trefractionCoords /= 2.0;\n\t\t\t\tvec4 transmissionSample = getTransmissionSample( refractionCoords, roughness, iors[ i ] );\n\t\t\t\ttransmittedLight[ i ] = transmissionSample[ i ];\n\t\t\t\ttransmittedLight.a += transmissionSample.a;\n\t\t\t\ttransmittance[ i ] = diffuseColor[ i ] * volumeAttenuation( length( transmissionRay ), attenuationColor, attenuationDistance )[ i ];\n\t\t\t}\n\t\t\ttransmittedLight.a /= 3.0;\n\t\t#else\n\t\t\tvec3 transmissionRay = getVolumeTransmissionRay( n, v, thickness, ior, modelMatrix );\n\t\t\tvec3 refractedRayExit = position + transmissionRay;\n\t\t\tvec4 ndcPos = projMatrix * viewMatrix * vec4( refractedRayExit, 1.0 );\n\t\t\tvec2 refractionCoords = ndcPos.xy / ndcPos.w;\n\t\t\trefractionCoords += 1.0;\n\t\t\trefractionCoords /= 2.0;\n\t\t\ttransmittedLight = getTransmissionSample( refractionCoords, roughness, ior );\n\t\t\ttransmittance = diffuseColor * volumeAttenuation( length( transmissionRay ), attenuationColor, attenuationDistance );\n\t\t#endif\n\t\tvec3 attenuatedColor = transmittance * transmittedLight.rgb;\n\t\tvec3 F = EnvironmentBRDF( n, v, specularColor, specularF90, roughness );\n\t\tfloat transmittanceFactor = ( transmittance.r + transmittance.g + transmittance.b ) / 3.0;\n\t\treturn vec4( ( 1.0 - F ) * attenuatedColor, 1.0 - ( 1.0 - transmittedLight.a ) * transmittanceFactor );\n\t}\n#endif"; @@ -36028,7 +36413,7 @@ const fragment$e = "#if DEPTH_PACKING == 3200\n\tuniform float opacity;\n#endif\ const vertex$d = "#define DISTANCE\nvarying vec3 vWorldPosition;\n#include \n#include \n#include \n#include \n#include \n#include \n#include \nvoid main() {\n\t#include \n\t#include \n\t#include \n\t#include \n\t#ifdef USE_DISPLACEMENTMAP\n\t\t#include \n\t\t#include \n\t\t#include \n\t#endif\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\tvWorldPosition = worldPosition.xyz;\n}"; -const fragment$d = "#define DISTANCE\nuniform vec3 referencePosition;\nuniform float nearDistance;\nuniform float farDistance;\nvarying vec3 vWorldPosition;\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \nvoid main () {\n\tvec4 diffuseColor = vec4( 1.0 );\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\tfloat dist = length( vWorldPosition - referencePosition );\n\tdist = ( dist - nearDistance ) / ( farDistance - nearDistance );\n\tdist = saturate( dist );\n\tgl_FragColor = packDepthToRGBA( dist );\n}"; +const fragment$d = "#define DISTANCE\nuniform vec3 referencePosition;\nuniform float nearDistance;\nuniform float farDistance;\nvarying vec3 vWorldPosition;\n#include \n#include \n#include \n#include \n#include \n#include \n#include \nvoid main () {\n\tvec4 diffuseColor = vec4( 1.0 );\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\tfloat dist = length( vWorldPosition - referencePosition );\n\tdist = ( dist - nearDistance ) / ( farDistance - nearDistance );\n\tdist = saturate( dist );\n\tgl_FragColor = vec4( dist, 0.0, 0.0, 1.0 );\n}"; const vertex$c = "varying vec3 vWorldDirection;\n#include \nvoid main() {\n\tvWorldDirection = transformDirection( position, modelMatrix );\n\t#include \n\t#include \n}"; @@ -36044,7 +36429,7 @@ const fragment$a = "uniform vec3 diffuse;\nuniform float opacity;\n#ifndef FLAT_ const vertex$9 = "#define LAMBERT\nvarying vec3 vViewPosition;\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \nvoid main() {\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\tvViewPosition = - mvPosition.xyz;\n\t#include \n\t#include \n\t#include \n\t#include \n}"; -const fragment$9 = "#define LAMBERT\nuniform vec3 diffuse;\nuniform vec3 emissive;\nuniform float opacity;\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \nvoid main() {\n\tvec4 diffuseColor = vec4( diffuse, opacity );\n\t#include \n\tReflectedLight reflectedLight = ReflectedLight( vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ) );\n\tvec3 totalEmissiveRadiance = emissive;\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\tvec3 outgoingLight = reflectedLight.directDiffuse + reflectedLight.indirectDiffuse + totalEmissiveRadiance;\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n}"; +const fragment$9 = "#define LAMBERT\nuniform vec3 diffuse;\nuniform vec3 emissive;\nuniform float opacity;\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \nvoid main() {\n\tvec4 diffuseColor = vec4( diffuse, opacity );\n\t#include \n\tReflectedLight reflectedLight = ReflectedLight( vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ) );\n\tvec3 totalEmissiveRadiance = emissive;\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\tvec3 outgoingLight = reflectedLight.directDiffuse + reflectedLight.indirectDiffuse + totalEmissiveRadiance;\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n}"; const vertex$8 = "#define MATCAP\nvarying vec3 vViewPosition;\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \nvoid main() {\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\tvViewPosition = - mvPosition.xyz;\n}"; @@ -36052,19 +36437,19 @@ const fragment$8 = "#define MATCAP\nuniform vec3 diffuse;\nuniform float opacity const vertex$7 = "#define NORMAL\n#if defined( FLAT_SHADED ) || defined( USE_BUMPMAP ) || defined( USE_NORMALMAP_TANGENTSPACE )\n\tvarying vec3 vViewPosition;\n#endif\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \nvoid main() {\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n#if defined( FLAT_SHADED ) || defined( USE_BUMPMAP ) || defined( USE_NORMALMAP_TANGENTSPACE )\n\tvViewPosition = - mvPosition.xyz;\n#endif\n}"; -const fragment$7 = "#define NORMAL\nuniform float opacity;\n#if defined( FLAT_SHADED ) || defined( USE_BUMPMAP ) || defined( USE_NORMALMAP_TANGENTSPACE )\n\tvarying vec3 vViewPosition;\n#endif\n#include \n#include \n#include \n#include \n#include \n#include \n#include \nvoid main() {\n\tvec4 diffuseColor = vec4( 0.0, 0.0, 0.0, opacity );\n\t#include \n\t#include \n\t#include \n\t#include \n\tgl_FragColor = vec4( packNormalToRGB( normal ), diffuseColor.a );\n\t#ifdef OPAQUE\n\t\tgl_FragColor.a = 1.0;\n\t#endif\n}"; +const fragment$7 = "#define NORMAL\nuniform float opacity;\n#if defined( FLAT_SHADED ) || defined( USE_BUMPMAP ) || defined( USE_NORMALMAP_TANGENTSPACE )\n\tvarying vec3 vViewPosition;\n#endif\n#include \n#include \n#include \n#include \n#include \n#include \nvoid main() {\n\tvec4 diffuseColor = vec4( 0.0, 0.0, 0.0, opacity );\n\t#include \n\t#include \n\t#include \n\t#include \n\tgl_FragColor = vec4( normalize( normal ) * 0.5 + 0.5, diffuseColor.a );\n\t#ifdef OPAQUE\n\t\tgl_FragColor.a = 1.0;\n\t#endif\n}"; const vertex$6 = "#define PHONG\nvarying vec3 vViewPosition;\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \nvoid main() {\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\tvViewPosition = - mvPosition.xyz;\n\t#include \n\t#include \n\t#include \n\t#include \n}"; -const fragment$6 = "#define PHONG\nuniform vec3 diffuse;\nuniform vec3 emissive;\nuniform vec3 specular;\nuniform float shininess;\nuniform float opacity;\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \nvoid main() {\n\tvec4 diffuseColor = vec4( diffuse, opacity );\n\t#include \n\tReflectedLight reflectedLight = ReflectedLight( vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ) );\n\tvec3 totalEmissiveRadiance = emissive;\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\tvec3 outgoingLight = reflectedLight.directDiffuse + reflectedLight.indirectDiffuse + reflectedLight.directSpecular + reflectedLight.indirectSpecular + totalEmissiveRadiance;\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n}"; +const fragment$6 = "#define PHONG\nuniform vec3 diffuse;\nuniform vec3 emissive;\nuniform vec3 specular;\nuniform float shininess;\nuniform float opacity;\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \nvoid main() {\n\tvec4 diffuseColor = vec4( diffuse, opacity );\n\t#include \n\tReflectedLight reflectedLight = ReflectedLight( vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ) );\n\tvec3 totalEmissiveRadiance = emissive;\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\tvec3 outgoingLight = reflectedLight.directDiffuse + reflectedLight.indirectDiffuse + reflectedLight.directSpecular + reflectedLight.indirectSpecular + totalEmissiveRadiance;\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n}"; const vertex$5 = "#define STANDARD\nvarying vec3 vViewPosition;\n#ifdef USE_TRANSMISSION\n\tvarying vec3 vWorldPosition;\n#endif\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \nvoid main() {\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\tvViewPosition = - mvPosition.xyz;\n\t#include \n\t#include \n\t#include \n#ifdef USE_TRANSMISSION\n\tvWorldPosition = worldPosition.xyz;\n#endif\n}"; -const fragment$5 = "#define STANDARD\n#ifdef PHYSICAL\n\t#define IOR\n\t#define USE_SPECULAR\n#endif\nuniform vec3 diffuse;\nuniform vec3 emissive;\nuniform float roughness;\nuniform float metalness;\nuniform float opacity;\n#ifdef IOR\n\tuniform float ior;\n#endif\n#ifdef USE_SPECULAR\n\tuniform float specularIntensity;\n\tuniform vec3 specularColor;\n\t#ifdef USE_SPECULAR_COLORMAP\n\t\tuniform sampler2D specularColorMap;\n\t#endif\n\t#ifdef USE_SPECULAR_INTENSITYMAP\n\t\tuniform sampler2D specularIntensityMap;\n\t#endif\n#endif\n#ifdef USE_CLEARCOAT\n\tuniform float clearcoat;\n\tuniform float clearcoatRoughness;\n#endif\n#ifdef USE_DISPERSION\n\tuniform float dispersion;\n#endif\n#ifdef USE_IRIDESCENCE\n\tuniform float iridescence;\n\tuniform float iridescenceIOR;\n\tuniform float iridescenceThicknessMinimum;\n\tuniform float iridescenceThicknessMaximum;\n#endif\n#ifdef USE_SHEEN\n\tuniform vec3 sheenColor;\n\tuniform float sheenRoughness;\n\t#ifdef USE_SHEEN_COLORMAP\n\t\tuniform sampler2D sheenColorMap;\n\t#endif\n\t#ifdef USE_SHEEN_ROUGHNESSMAP\n\t\tuniform sampler2D sheenRoughnessMap;\n\t#endif\n#endif\n#ifdef USE_ANISOTROPY\n\tuniform vec2 anisotropyVector;\n\t#ifdef USE_ANISOTROPYMAP\n\t\tuniform sampler2D anisotropyMap;\n\t#endif\n#endif\nvarying vec3 vViewPosition;\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \nvoid main() {\n\tvec4 diffuseColor = vec4( diffuse, opacity );\n\t#include \n\tReflectedLight reflectedLight = ReflectedLight( vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ) );\n\tvec3 totalEmissiveRadiance = emissive;\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\tvec3 totalDiffuse = reflectedLight.directDiffuse + reflectedLight.indirectDiffuse;\n\tvec3 totalSpecular = reflectedLight.directSpecular + reflectedLight.indirectSpecular;\n\t#include \n\tvec3 outgoingLight = totalDiffuse + totalSpecular + totalEmissiveRadiance;\n\t#ifdef USE_SHEEN\n\t\tfloat sheenEnergyComp = 1.0 - 0.157 * max3( material.sheenColor );\n\t\toutgoingLight = outgoingLight * sheenEnergyComp + sheenSpecularDirect + sheenSpecularIndirect;\n\t#endif\n\t#ifdef USE_CLEARCOAT\n\t\tfloat dotNVcc = saturate( dot( geometryClearcoatNormal, geometryViewDir ) );\n\t\tvec3 Fcc = F_Schlick( material.clearcoatF0, material.clearcoatF90, dotNVcc );\n\t\toutgoingLight = outgoingLight * ( 1.0 - material.clearcoat * Fcc ) + ( clearcoatSpecularDirect + clearcoatSpecularIndirect ) * material.clearcoat;\n\t#endif\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n}"; +const fragment$5 = "#define STANDARD\n#ifdef PHYSICAL\n\t#define IOR\n\t#define USE_SPECULAR\n#endif\nuniform vec3 diffuse;\nuniform vec3 emissive;\nuniform float roughness;\nuniform float metalness;\nuniform float opacity;\n#ifdef IOR\n\tuniform float ior;\n#endif\n#ifdef USE_SPECULAR\n\tuniform float specularIntensity;\n\tuniform vec3 specularColor;\n\t#ifdef USE_SPECULAR_COLORMAP\n\t\tuniform sampler2D specularColorMap;\n\t#endif\n\t#ifdef USE_SPECULAR_INTENSITYMAP\n\t\tuniform sampler2D specularIntensityMap;\n\t#endif\n#endif\n#ifdef USE_CLEARCOAT\n\tuniform float clearcoat;\n\tuniform float clearcoatRoughness;\n#endif\n#ifdef USE_DISPERSION\n\tuniform float dispersion;\n#endif\n#ifdef USE_IRIDESCENCE\n\tuniform float iridescence;\n\tuniform float iridescenceIOR;\n\tuniform float iridescenceThicknessMinimum;\n\tuniform float iridescenceThicknessMaximum;\n#endif\n#ifdef USE_SHEEN\n\tuniform vec3 sheenColor;\n\tuniform float sheenRoughness;\n\t#ifdef USE_SHEEN_COLORMAP\n\t\tuniform sampler2D sheenColorMap;\n\t#endif\n\t#ifdef USE_SHEEN_ROUGHNESSMAP\n\t\tuniform sampler2D sheenRoughnessMap;\n\t#endif\n#endif\n#ifdef USE_ANISOTROPY\n\tuniform vec2 anisotropyVector;\n\t#ifdef USE_ANISOTROPYMAP\n\t\tuniform sampler2D anisotropyMap;\n\t#endif\n#endif\nvarying vec3 vViewPosition;\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \nvoid main() {\n\tvec4 diffuseColor = vec4( diffuse, opacity );\n\t#include \n\tReflectedLight reflectedLight = ReflectedLight( vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ) );\n\tvec3 totalEmissiveRadiance = emissive;\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\tvec3 totalDiffuse = reflectedLight.directDiffuse + reflectedLight.indirectDiffuse;\n\tvec3 totalSpecular = reflectedLight.directSpecular + reflectedLight.indirectSpecular;\n\t#include \n\tvec3 outgoingLight = totalDiffuse + totalSpecular + totalEmissiveRadiance;\n\t#ifdef USE_SHEEN\n \n\t\toutgoingLight = outgoingLight + sheenSpecularDirect + sheenSpecularIndirect;\n \n \t#endif\n\t#ifdef USE_CLEARCOAT\n\t\tfloat dotNVcc = saturate( dot( geometryClearcoatNormal, geometryViewDir ) );\n\t\tvec3 Fcc = F_Schlick( material.clearcoatF0, material.clearcoatF90, dotNVcc );\n\t\toutgoingLight = outgoingLight * ( 1.0 - material.clearcoat * Fcc ) + ( clearcoatSpecularDirect + clearcoatSpecularIndirect ) * material.clearcoat;\n\t#endif\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n}"; const vertex$4 = "#define TOON\nvarying vec3 vViewPosition;\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \nvoid main() {\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\tvViewPosition = - mvPosition.xyz;\n\t#include \n\t#include \n\t#include \n}"; -const fragment$4 = "#define TOON\nuniform vec3 diffuse;\nuniform vec3 emissive;\nuniform float opacity;\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \nvoid main() {\n\tvec4 diffuseColor = vec4( diffuse, opacity );\n\t#include \n\tReflectedLight reflectedLight = ReflectedLight( vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ) );\n\tvec3 totalEmissiveRadiance = emissive;\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\tvec3 outgoingLight = reflectedLight.directDiffuse + reflectedLight.indirectDiffuse + totalEmissiveRadiance;\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n}"; +const fragment$4 = "#define TOON\nuniform vec3 diffuse;\nuniform vec3 emissive;\nuniform float opacity;\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \nvoid main() {\n\tvec4 diffuseColor = vec4( diffuse, opacity );\n\t#include \n\tReflectedLight reflectedLight = ReflectedLight( vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ) );\n\tvec3 totalEmissiveRadiance = emissive;\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\tvec3 outgoingLight = reflectedLight.directDiffuse + reflectedLight.indirectDiffuse + totalEmissiveRadiance;\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n}"; const vertex$3 = "uniform float size;\nuniform float scale;\n#include \n#include \n#include \n#include \n#include \n#include \n#ifdef USE_POINTS_UV\n\tvarying vec2 vUv;\n\tuniform mat3 uvTransform;\n#endif\nvoid main() {\n\t#ifdef USE_POINTS_UV\n\t\tvUv = ( uvTransform * vec3( uv, 1 ) ).xy;\n\t#endif\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\tgl_PointSize = size;\n\t#ifdef USE_SIZEATTENUATION\n\t\tbool isPerspective = isPerspectiveMatrix( projectionMatrix );\n\t\tif ( isPerspective ) gl_PointSize *= ( scale / - mvPosition.z );\n\t#endif\n\t#include \n\t#include \n\t#include \n\t#include \n}"; @@ -36072,7 +36457,7 @@ const fragment$3 = "uniform vec3 diffuse;\nuniform float opacity;\n#include 0; // use PMREM if the user wants to blur the background - background = ( usePMREM ? cubeuvmaps : cubemaps ).get( background ); + background = environments.get( background, usePMREM ); } @@ -37103,7 +37488,7 @@ function WebGLBindingStates( gl, attributes ) { let updateBuffers = false; - const state = getBindingState( geometry, program, material ); + const state = getBindingState( object, geometry, program, material ); if ( currentState !== state ) { @@ -37156,16 +37541,28 @@ function WebGLBindingStates( gl, attributes ) { } - function getBindingState( geometry, program, material ) { + function getBindingState( object, geometry, program, material ) { const wireframe = ( material.wireframe === true ); - let programMap = bindingStates[ geometry.id ]; + let objectMap = bindingStates[ geometry.id ]; + + if ( objectMap === undefined ) { + + objectMap = {}; + bindingStates[ geometry.id ] = objectMap; + + } + + // Each InstancedMesh requires unique binding states because it contains instanced attributes. + const objectId = ( object.isInstancedMesh === true ) ? object.id : 0; + + let programMap = objectMap[ objectId ]; if ( programMap === undefined ) { programMap = {}; - bindingStates[ geometry.id ] = programMap; + objectMap[ objectId ] = programMap; } @@ -37566,21 +37963,27 @@ function WebGLBindingStates( gl, attributes ) { for ( const geometryId in bindingStates ) { - const programMap = bindingStates[ geometryId ]; + const objectMap = bindingStates[ geometryId ]; - for ( const programId in programMap ) { + for ( const objectId in objectMap ) { - const stateMap = programMap[ programId ]; + const programMap = objectMap[ objectId ]; - for ( const wireframe in stateMap ) { + for ( const programId in programMap ) { - deleteVertexArrayObject( stateMap[ wireframe ].object ); + const stateMap = programMap[ programId ]; - delete stateMap[ wireframe ]; + for ( const wireframe in stateMap ) { - } + deleteVertexArrayObject( stateMap[ wireframe ].object ); - delete programMap[ programId ]; + delete stateMap[ wireframe ]; + + } + + delete programMap[ programId ]; + + } } @@ -37594,21 +37997,27 @@ function WebGLBindingStates( gl, attributes ) { if ( bindingStates[ geometry.id ] === undefined ) return; - const programMap = bindingStates[ geometry.id ]; + const objectMap = bindingStates[ geometry.id ]; - for ( const programId in programMap ) { + for ( const objectId in objectMap ) { - const stateMap = programMap[ programId ]; + const programMap = objectMap[ objectId ]; - for ( const wireframe in stateMap ) { + for ( const programId in programMap ) { - deleteVertexArrayObject( stateMap[ wireframe ].object ); + const stateMap = programMap[ programId ]; - delete stateMap[ wireframe ]; + for ( const wireframe in stateMap ) { - } + deleteVertexArrayObject( stateMap[ wireframe ].object ); - delete programMap[ programId ]; + delete stateMap[ wireframe ]; + + } + + delete programMap[ programId ]; + + } } @@ -37620,26 +38029,73 @@ function WebGLBindingStates( gl, attributes ) { for ( const geometryId in bindingStates ) { - const programMap = bindingStates[ geometryId ]; + const objectMap = bindingStates[ geometryId ]; + + for ( const objectId in objectMap ) { - if ( programMap[ program.id ] === undefined ) continue; + const programMap = objectMap[ objectId ]; - const stateMap = programMap[ program.id ]; + if ( programMap[ program.id ] === undefined ) continue; - for ( const wireframe in stateMap ) { + const stateMap = programMap[ program.id ]; - deleteVertexArrayObject( stateMap[ wireframe ].object ); + for ( const wireframe in stateMap ) { + + deleteVertexArrayObject( stateMap[ wireframe ].object ); + + delete stateMap[ wireframe ]; + + } - delete stateMap[ wireframe ]; + delete programMap[ program.id ]; } - delete programMap[ program.id ]; + } + + } + + function releaseStatesOfObject( object ) { + + for ( const geometryId in bindingStates ) { + + const objectMap = bindingStates[ geometryId ]; + + const objectId = ( object.isInstancedMesh === true ) ? object.id : 0; + + const programMap = objectMap[ objectId ]; + + if ( programMap === undefined ) continue; + + for ( const programId in programMap ) { + + const stateMap = programMap[ programId ]; + + for ( const wireframe in stateMap ) { + + deleteVertexArrayObject( stateMap[ wireframe ].object ); + + delete stateMap[ wireframe ]; + + } + + delete programMap[ programId ]; + + } + + delete objectMap[ objectId ]; + + if ( Object.keys( objectMap ).length === 0 ) { + + delete bindingStates[ geometryId ]; + + } } } + function reset() { resetDefaultState(); @@ -37669,6 +38125,7 @@ function WebGLBindingStates( gl, attributes ) { resetDefaultState: resetDefaultState, dispose: dispose, releaseStatesOfGeometry: releaseStatesOfGeometry, + releaseStatesOfObject: releaseStatesOfObject, releaseStatesOfProgram: releaseStatesOfProgram, initAttributes: initAttributes, @@ -37852,7 +38309,7 @@ function WebGLCapabilities( gl, extensions, parameters, utils ) { if ( maxPrecision !== precision ) { - console.warn( 'THREE.WebGLRenderer:', precision, 'not supported, using', maxPrecision, 'instead.' ); + warn( 'WebGLRenderer:', precision, 'not supported, using', maxPrecision, 'instead.' ); precision = maxPrecision; } @@ -37870,9 +38327,8 @@ function WebGLCapabilities( gl, extensions, parameters, utils ) { const maxVaryings = gl.getParameter( gl.MAX_VARYING_VECTORS ); const maxFragmentUniforms = gl.getParameter( gl.MAX_FRAGMENT_UNIFORM_VECTORS ); - const vertexTextures = maxVertexTextures > 0; - const maxSamples = gl.getParameter( gl.MAX_SAMPLES ); + const samples = gl.getParameter( gl.SAMPLES ); return { @@ -37898,9 +38354,9 @@ function WebGLCapabilities( gl, extensions, parameters, utils ) { maxVaryings: maxVaryings, maxFragmentUniforms: maxFragmentUniforms, - vertexTextures: vertexTextures, + maxSamples: maxSamples, - maxSamples: maxSamples + samples: samples }; @@ -38072,6 +38528,10 @@ function WebGLClipping( properties ) { } +const _position$1 = /*@__PURE__*/ new Vector3(); +const _quaternion = /*@__PURE__*/ new Quaternion(); +const _scale = /*@__PURE__*/ new Vector3(); + /** * Abstract base class for cameras. This class should always be inherited * when you build a new camera. @@ -38177,7 +38637,19 @@ class Camera extends Object3D { super.updateMatrixWorld( force ); - this.matrixWorldInverse.copy( this.matrixWorld ).invert(); + // exclude scale from view matrix to be glTF conform + + this.matrixWorld.decompose( _position$1, _quaternion, _scale ); + + if ( _scale.x === 1 && _scale.y === 1 && _scale.z === 1 ) { + + this.matrixWorldInverse.copy( this.matrixWorld ).invert(); + + } else { + + this.matrixWorldInverse.compose( _position$1, _quaternion, _scale.set( 1, 1, 1 ) ).invert(); + + } } @@ -38185,7 +38657,19 @@ class Camera extends Object3D { super.updateWorldMatrix( updateParents, updateChildren ); - this.matrixWorldInverse.copy( this.matrixWorld ).invert(); + // exclude scale from view matrix to be glTF conform + + this.matrixWorld.decompose( _position$1, _quaternion, _scale ); + + if ( _scale.x === 1 && _scale.y === 1 && _scale.z === 1 ) { + + this.matrixWorldInverse.copy( this.matrixWorld ).invert(); + + } else { + + this.matrixWorldInverse.compose( _position$1, _quaternion, _scale.set( 1, 1, 1 ) ).invert(); + + } } @@ -38197,12 +38681,254 @@ class Camera extends Object3D { } +/** + * Camera that uses [orthographic projection](https://en.wikipedia.org/wiki/Orthographic_projection). + * + * In this projection mode, an object's size in the rendered image stays + * constant regardless of its distance from the camera. This can be useful + * for rendering 2D scenes and UI elements, amongst other things. + * + * ```js + * const camera = new THREE.OrthographicCamera( width / - 2, width / 2, height / 2, height / - 2, 1, 1000 ); + * scene.add( camera ); + * ``` + * + * @augments Camera + */ +class OrthographicCamera extends Camera { + + /** + * Constructs a new orthographic camera. + * + * @param {number} [left=-1] - The left plane of the camera's frustum. + * @param {number} [right=1] - The right plane of the camera's frustum. + * @param {number} [top=1] - The top plane of the camera's frustum. + * @param {number} [bottom=-1] - The bottom plane of the camera's frustum. + * @param {number} [near=0.1] - The camera's near plane. + * @param {number} [far=2000] - The camera's far plane. + */ + constructor( left = -1, right = 1, top = 1, bottom = -1, near = 0.1, far = 2000 ) { + + super(); + + /** + * This flag can be used for type testing. + * + * @type {boolean} + * @readonly + * @default true + */ + this.isOrthographicCamera = true; + + this.type = 'OrthographicCamera'; + + /** + * The zoom factor of the camera. + * + * @type {number} + * @default 1 + */ + this.zoom = 1; + + /** + * Represents the frustum window specification. This property should not be edited + * directly but via {@link PerspectiveCamera#setViewOffset} and {@link PerspectiveCamera#clearViewOffset}. + * + * @type {?Object} + * @default null + */ + this.view = null; + + /** + * The left plane of the camera's frustum. + * + * @type {number} + * @default -1 + */ + this.left = left; + + /** + * The right plane of the camera's frustum. + * + * @type {number} + * @default 1 + */ + this.right = right; + + /** + * The top plane of the camera's frustum. + * + * @type {number} + * @default 1 + */ + this.top = top; + + /** + * The bottom plane of the camera's frustum. + * + * @type {number} + * @default -1 + */ + this.bottom = bottom; + + /** + * The camera's near plane. The valid range is greater than `0` + * and less than the current value of {@link OrthographicCamera#far}. + * + * Note that, unlike for the {@link PerspectiveCamera}, `0` is a + * valid value for an orthographic camera's near plane. + * + * @type {number} + * @default 0.1 + */ + this.near = near; + + /** + * The camera's far plane. Must be greater than the + * current value of {@link OrthographicCamera#near}. + * + * @type {number} + * @default 2000 + */ + this.far = far; + + this.updateProjectionMatrix(); + + } + + copy( source, recursive ) { + + super.copy( source, recursive ); + + this.left = source.left; + this.right = source.right; + this.top = source.top; + this.bottom = source.bottom; + this.near = source.near; + this.far = source.far; + + this.zoom = source.zoom; + this.view = source.view === null ? null : Object.assign( {}, source.view ); + + return this; + + } + + /** + * Sets an offset in a larger frustum. This is useful for multi-window or + * multi-monitor/multi-machine setups. + * + * @param {number} fullWidth - The full width of multiview setup. + * @param {number} fullHeight - The full height of multiview setup. + * @param {number} x - The horizontal offset of the subcamera. + * @param {number} y - The vertical offset of the subcamera. + * @param {number} width - The width of subcamera. + * @param {number} height - The height of subcamera. + * @see {@link PerspectiveCamera#setViewOffset} + */ + setViewOffset( fullWidth, fullHeight, x, y, width, height ) { + + if ( this.view === null ) { + + this.view = { + enabled: true, + fullWidth: 1, + fullHeight: 1, + offsetX: 0, + offsetY: 0, + width: 1, + height: 1 + }; + + } + + this.view.enabled = true; + this.view.fullWidth = fullWidth; + this.view.fullHeight = fullHeight; + this.view.offsetX = x; + this.view.offsetY = y; + this.view.width = width; + this.view.height = height; + + this.updateProjectionMatrix(); + + } + + /** + * Removes the view offset from the projection matrix. + */ + clearViewOffset() { + + if ( this.view !== null ) { + + this.view.enabled = false; + + } + + this.updateProjectionMatrix(); + + } + + /** + * Updates the camera's projection matrix. Must be called after any change of + * camera properties. + */ + updateProjectionMatrix() { + + const dx = ( this.right - this.left ) / ( 2 * this.zoom ); + const dy = ( this.top - this.bottom ) / ( 2 * this.zoom ); + const cx = ( this.right + this.left ) / 2; + const cy = ( this.top + this.bottom ) / 2; + + let left = cx - dx; + let right = cx + dx; + let top = cy + dy; + let bottom = cy - dy; + + if ( this.view !== null && this.view.enabled ) { + + const scaleW = ( this.right - this.left ) / this.view.fullWidth / this.zoom; + const scaleH = ( this.top - this.bottom ) / this.view.fullHeight / this.zoom; + + left += scaleW * this.view.offsetX; + right = left + scaleW * this.view.width; + top -= scaleH * this.view.offsetY; + bottom = top - scaleH * this.view.height; + + } + + this.projectionMatrix.makeOrthographic( left, right, top, bottom, this.near, this.far, this.coordinateSystem, this.reversedDepth ); + + this.projectionMatrixInverse.copy( this.projectionMatrix ).invert(); + + } + + toJSON( meta ) { + + const data = super.toJSON( meta ); + + data.object.zoom = this.zoom; + data.object.left = this.left; + data.object.right = this.right; + data.object.top = this.top; + data.object.bottom = this.bottom; + data.object.near = this.near; + data.object.far = this.far; + + if ( this.view !== null ) data.object.view = Object.assign( {}, this.view ); + + return data; + + } + +} + const _v3 = /*@__PURE__*/ new Vector3(); const _minTarget = /*@__PURE__*/ new Vector2(); const _maxTarget = /*@__PURE__*/ new Vector2(); /** - * Camera that uses [perspective projection]{@link https://en.wikipedia.org/wiki/Perspective_(graphical)}. + * Camera that uses [perspective projection](https://en.wikipedia.org/wiki/Perspective_(graphical)). * * This projection mode is designed to mimic the way the human eye sees. It * is the most common projection mode used for rendering a 3D scene. @@ -38598,838 +39324,20 @@ class PerspectiveCamera extends Camera { } -const fov = -90; // negative fov is not an error -const aspect = 1; - -/** - * A special type of camera that is positioned in 3D space to render its surroundings into a - * cube render target. The render target can then be used as an environment map for rendering - * realtime reflections in your scene. - * - * ```js - * // Create cube render target - * const cubeRenderTarget = new THREE.WebGLCubeRenderTarget( 256, { generateMipmaps: true, minFilter: THREE.LinearMipmapLinearFilter } ); - * - * // Create cube camera - * const cubeCamera = new THREE.CubeCamera( 1, 100000, cubeRenderTarget ); - * scene.add( cubeCamera ); - * - * // Create car - * const chromeMaterial = new THREE.MeshLambertMaterial( { color: 0xffffff, envMap: cubeRenderTarget.texture } ); - * const car = new THREE.Mesh( carGeometry, chromeMaterial ); - * scene.add( car ); - * - * // Update the render target cube - * car.visible = false; - * cubeCamera.position.copy( car.position ); - * cubeCamera.update( renderer, scene ); - * - * // Render the scene - * car.visible = true; - * renderer.render( scene, camera ); - * ``` - * - * @augments Object3D - */ -class CubeCamera extends Object3D { - - /** - * Constructs a new cube camera. - * - * @param {number} near - The camera's near plane. - * @param {number} far - The camera's far plane. - * @param {WebGLCubeRenderTarget} renderTarget - The cube render target. - */ - constructor( near, far, renderTarget ) { - - super(); - - this.type = 'CubeCamera'; - - /** - * A reference to the cube render target. - * - * @type {WebGLCubeRenderTarget} - */ - this.renderTarget = renderTarget; - - /** - * The current active coordinate system. - * - * @type {?(WebGLCoordinateSystem|WebGPUCoordinateSystem)} - * @default null - */ - this.coordinateSystem = null; - - /** - * The current active mipmap level - * - * @type {number} - * @default 0 - */ - this.activeMipmapLevel = 0; - - const cameraPX = new PerspectiveCamera( fov, aspect, near, far ); - cameraPX.layers = this.layers; - this.add( cameraPX ); - - const cameraNX = new PerspectiveCamera( fov, aspect, near, far ); - cameraNX.layers = this.layers; - this.add( cameraNX ); - - const cameraPY = new PerspectiveCamera( fov, aspect, near, far ); - cameraPY.layers = this.layers; - this.add( cameraPY ); - - const cameraNY = new PerspectiveCamera( fov, aspect, near, far ); - cameraNY.layers = this.layers; - this.add( cameraNY ); - - const cameraPZ = new PerspectiveCamera( fov, aspect, near, far ); - cameraPZ.layers = this.layers; - this.add( cameraPZ ); - - const cameraNZ = new PerspectiveCamera( fov, aspect, near, far ); - cameraNZ.layers = this.layers; - this.add( cameraNZ ); - - } - - /** - * Must be called when the coordinate system of the cube camera is changed. - */ - updateCoordinateSystem() { - - const coordinateSystem = this.coordinateSystem; - - const cameras = this.children.concat(); - - const [ cameraPX, cameraNX, cameraPY, cameraNY, cameraPZ, cameraNZ ] = cameras; - - for ( const camera of cameras ) this.remove( camera ); - - if ( coordinateSystem === WebGLCoordinateSystem ) { - - cameraPX.up.set( 0, 1, 0 ); - cameraPX.lookAt( 1, 0, 0 ); - - cameraNX.up.set( 0, 1, 0 ); - cameraNX.lookAt( -1, 0, 0 ); - - cameraPY.up.set( 0, 0, -1 ); - cameraPY.lookAt( 0, 1, 0 ); - - cameraNY.up.set( 0, 0, 1 ); - cameraNY.lookAt( 0, -1, 0 ); - - cameraPZ.up.set( 0, 1, 0 ); - cameraPZ.lookAt( 0, 0, 1 ); - - cameraNZ.up.set( 0, 1, 0 ); - cameraNZ.lookAt( 0, 0, -1 ); - - } else if ( coordinateSystem === WebGPUCoordinateSystem ) { - - cameraPX.up.set( 0, -1, 0 ); - cameraPX.lookAt( -1, 0, 0 ); - - cameraNX.up.set( 0, -1, 0 ); - cameraNX.lookAt( 1, 0, 0 ); - - cameraPY.up.set( 0, 0, 1 ); - cameraPY.lookAt( 0, 1, 0 ); - - cameraNY.up.set( 0, 0, -1 ); - cameraNY.lookAt( 0, -1, 0 ); - - cameraPZ.up.set( 0, -1, 0 ); - cameraPZ.lookAt( 0, 0, 1 ); - - cameraNZ.up.set( 0, -1, 0 ); - cameraNZ.lookAt( 0, 0, -1 ); - - } else { - - throw new Error( 'THREE.CubeCamera.updateCoordinateSystem(): Invalid coordinate system: ' + coordinateSystem ); - - } - - for ( const camera of cameras ) { - - this.add( camera ); - - camera.updateMatrixWorld(); - - } - - } - - /** - * Calling this method will render the given scene with the given renderer - * into the cube render target of the camera. - * - * @param {(Renderer|WebGLRenderer)} renderer - The renderer. - * @param {Scene} scene - The scene to render. - */ - update( renderer, scene ) { - - if ( this.parent === null ) this.updateMatrixWorld(); - - const { renderTarget, activeMipmapLevel } = this; - - if ( this.coordinateSystem !== renderer.coordinateSystem ) { - - this.coordinateSystem = renderer.coordinateSystem; - - this.updateCoordinateSystem(); - - } - - const [ cameraPX, cameraNX, cameraPY, cameraNY, cameraPZ, cameraNZ ] = this.children; - - const currentRenderTarget = renderer.getRenderTarget(); - const currentActiveCubeFace = renderer.getActiveCubeFace(); - const currentActiveMipmapLevel = renderer.getActiveMipmapLevel(); - - const currentXrEnabled = renderer.xr.enabled; - - renderer.xr.enabled = false; - - const generateMipmaps = renderTarget.texture.generateMipmaps; - - renderTarget.texture.generateMipmaps = false; - - renderer.setRenderTarget( renderTarget, 0, activeMipmapLevel ); - renderer.render( scene, cameraPX ); - - renderer.setRenderTarget( renderTarget, 1, activeMipmapLevel ); - renderer.render( scene, cameraNX ); - - renderer.setRenderTarget( renderTarget, 2, activeMipmapLevel ); - renderer.render( scene, cameraPY ); - - renderer.setRenderTarget( renderTarget, 3, activeMipmapLevel ); - renderer.render( scene, cameraNY ); - - renderer.setRenderTarget( renderTarget, 4, activeMipmapLevel ); - renderer.render( scene, cameraPZ ); - - // mipmaps are generated during the last call of render() - // at this point, all sides of the cube render target are defined - - renderTarget.texture.generateMipmaps = generateMipmaps; - - renderer.setRenderTarget( renderTarget, 5, activeMipmapLevel ); - renderer.render( scene, cameraNZ ); - - renderer.setRenderTarget( currentRenderTarget, currentActiveCubeFace, currentActiveMipmapLevel ); - - renderer.xr.enabled = currentXrEnabled; - - renderTarget.texture.needsPMREMUpdate = true; - - } - -} - -/** - * Creates a cube texture made up of six images. - * - * ```js - * const loader = new THREE.CubeTextureLoader(); - * loader.setPath( 'textures/cube/pisa/' ); - * - * const textureCube = loader.load( [ - * 'px.png', 'nx.png', 'py.png', 'ny.png', 'pz.png', 'nz.png' - * ] ); - * - * const material = new THREE.MeshBasicMaterial( { color: 0xffffff, envMap: textureCube } ); - * ``` - * - * @augments Texture - */ -class CubeTexture extends Texture { - - /** - * Constructs a new cube texture. - * - * @param {Array} [images=[]] - An array holding a image for each side of a cube. - * @param {number} [mapping=CubeReflectionMapping] - The texture mapping. - * @param {number} [wrapS=ClampToEdgeWrapping] - The wrapS value. - * @param {number} [wrapT=ClampToEdgeWrapping] - The wrapT value. - * @param {number} [magFilter=LinearFilter] - The mag filter value. - * @param {number} [minFilter=LinearMipmapLinearFilter] - The min filter value. - * @param {number} [format=RGBAFormat] - The texture format. - * @param {number} [type=UnsignedByteType] - The texture type. - * @param {number} [anisotropy=Texture.DEFAULT_ANISOTROPY] - The anisotropy value. - * @param {string} [colorSpace=NoColorSpace] - The color space value. - */ - constructor( images = [], mapping = CubeReflectionMapping, wrapS, wrapT, magFilter, minFilter, format, type, anisotropy, colorSpace ) { - - super( images, mapping, wrapS, wrapT, magFilter, minFilter, format, type, anisotropy, colorSpace ); - - /** - * This flag can be used for type testing. - * - * @type {boolean} - * @readonly - * @default true - */ - this.isCubeTexture = true; - - /** - * If set to `true`, the texture is flipped along the vertical axis when - * uploaded to the GPU. - * - * Overwritten and set to `false` by default. - * - * @type {boolean} - * @default false - */ - this.flipY = false; - - } - - /** - * Alias for {@link CubeTexture#image}. - * - * @type {Array} - */ - get images() { - - return this.image; - - } - - set images( value ) { - - this.image = value; - - } - -} - -/** - * A cube render target used in context of {@link WebGLRenderer}. - * - * @augments WebGLRenderTarget - */ -class WebGLCubeRenderTarget extends WebGLRenderTarget { - - /** - * Constructs a new cube render target. - * - * @param {number} [size=1] - The size of the render target. - * @param {RenderTarget~Options} [options] - The configuration object. - */ - constructor( size = 1, options = {} ) { - - super( size, size, options ); - - /** - * This flag can be used for type testing. - * - * @type {boolean} - * @readonly - * @default true - */ - this.isWebGLCubeRenderTarget = true; - - const image = { width: size, height: size, depth: 1 }; - const images = [ image, image, image, image, image, image ]; - - /** - * Overwritten with a different texture type. - * - * @type {DataArrayTexture} - */ - this.texture = new CubeTexture( images ); - this._setTextureOptions( options ); - - // By convention -- likely based on the RenderMan spec from the 1990's -- cube maps are specified by WebGL (and three.js) - // in a coordinate system in which positive-x is to the right when looking up the positive-z axis -- in other words, - // in a left-handed coordinate system. By continuing this convention, preexisting cube maps continued to render correctly. - - // three.js uses a right-handed coordinate system. So environment maps used in three.js appear to have px and nx swapped - // and the flag isRenderTargetTexture controls this conversion. The flip is not required when using WebGLCubeRenderTarget.texture - // as a cube texture (this is detected when isRenderTargetTexture is set to true for cube textures). - - this.texture.isRenderTargetTexture = true; - - } - - /** - * Converts the given equirectangular texture to a cube map. - * - * @param {WebGLRenderer} renderer - The renderer. - * @param {Texture} texture - The equirectangular texture. - * @return {WebGLCubeRenderTarget} A reference to this cube render target. - */ - fromEquirectangularTexture( renderer, texture ) { - - this.texture.type = texture.type; - this.texture.colorSpace = texture.colorSpace; - - this.texture.generateMipmaps = texture.generateMipmaps; - this.texture.minFilter = texture.minFilter; - this.texture.magFilter = texture.magFilter; - - const shader = { - - uniforms: { - tEquirect: { value: null }, - }, - - vertexShader: /* glsl */` - - varying vec3 vWorldDirection; - - vec3 transformDirection( in vec3 dir, in mat4 matrix ) { - - return normalize( ( matrix * vec4( dir, 0.0 ) ).xyz ); - - } - - void main() { - - vWorldDirection = transformDirection( position, modelMatrix ); - - #include - #include - - } - `, - - fragmentShader: /* glsl */` - - uniform sampler2D tEquirect; - - varying vec3 vWorldDirection; - - #include - - void main() { - - vec3 direction = normalize( vWorldDirection ); - - vec2 sampleUV = equirectUv( direction ); - - gl_FragColor = texture2D( tEquirect, sampleUV ); - - } - ` - }; - - const geometry = new BoxGeometry( 5, 5, 5 ); - - const material = new ShaderMaterial( { - - name: 'CubemapFromEquirect', - - uniforms: cloneUniforms( shader.uniforms ), - vertexShader: shader.vertexShader, - fragmentShader: shader.fragmentShader, - side: BackSide, - blending: NoBlending - - } ); - - material.uniforms.tEquirect.value = texture; - - const mesh = new Mesh( geometry, material ); - - const currentMinFilter = texture.minFilter; - - // Avoid blurred poles - if ( texture.minFilter === LinearMipmapLinearFilter ) texture.minFilter = LinearFilter; - - const camera = new CubeCamera( 1, 10, this ); - camera.update( renderer, mesh ); - - texture.minFilter = currentMinFilter; - - mesh.geometry.dispose(); - mesh.material.dispose(); - - return this; - - } - - /** - * Clears this cube render target. - * - * @param {WebGLRenderer} renderer - The renderer. - * @param {boolean} [color=true] - Whether the color buffer should be cleared or not. - * @param {boolean} [depth=true] - Whether the depth buffer should be cleared or not. - * @param {boolean} [stencil=true] - Whether the stencil buffer should be cleared or not. - */ - clear( renderer, color = true, depth = true, stencil = true ) { - - const currentRenderTarget = renderer.getRenderTarget(); - - for ( let i = 0; i < 6; i ++ ) { - - renderer.setRenderTarget( this, i ); - - renderer.clear( color, depth, stencil ); - - } - - renderer.setRenderTarget( currentRenderTarget ); - - } - -} - -function WebGLCubeMaps( renderer ) { - - let cubemaps = new WeakMap(); - - function mapTextureMapping( texture, mapping ) { - - if ( mapping === EquirectangularReflectionMapping ) { - - texture.mapping = CubeReflectionMapping; - - } else if ( mapping === EquirectangularRefractionMapping ) { - - texture.mapping = CubeRefractionMapping; - - } - - return texture; - - } - - function get( texture ) { - - if ( texture && texture.isTexture ) { - - const mapping = texture.mapping; - - if ( mapping === EquirectangularReflectionMapping || mapping === EquirectangularRefractionMapping ) { - - if ( cubemaps.has( texture ) ) { - - const cubemap = cubemaps.get( texture ).texture; - return mapTextureMapping( cubemap, texture.mapping ); - - } else { - - const image = texture.image; - - if ( image && image.height > 0 ) { - - const renderTarget = new WebGLCubeRenderTarget( image.height ); - renderTarget.fromEquirectangularTexture( renderer, texture ); - cubemaps.set( texture, renderTarget ); - - texture.addEventListener( 'dispose', onTextureDispose ); - - return mapTextureMapping( renderTarget.texture, texture.mapping ); - - } else { - - // image not yet ready. try the conversion next frame - - return null; - - } - - } - - } - - } - - return texture; - - } - - function onTextureDispose( event ) { - - const texture = event.target; - - texture.removeEventListener( 'dispose', onTextureDispose ); - - const cubemap = cubemaps.get( texture ); - - if ( cubemap !== undefined ) { - - cubemaps.delete( texture ); - cubemap.dispose(); - - } - - } - - function dispose() { - - cubemaps = new WeakMap(); - - } - - return { - get: get, - dispose: dispose - }; - -} - -/** - * Camera that uses [orthographic projection]{@link https://en.wikipedia.org/wiki/Orthographic_projection}. - * - * In this projection mode, an object's size in the rendered image stays - * constant regardless of its distance from the camera. This can be useful - * for rendering 2D scenes and UI elements, amongst other things. - * - * ```js - * const camera = new THREE.OrthographicCamera( width / - 2, width / 2, height / 2, height / - 2, 1, 1000 ); - * scene.add( camera ); - * ``` - * - * @augments Camera - */ -class OrthographicCamera extends Camera { - - /** - * Constructs a new orthographic camera. - * - * @param {number} [left=-1] - The left plane of the camera's frustum. - * @param {number} [right=1] - The right plane of the camera's frustum. - * @param {number} [top=1] - The top plane of the camera's frustum. - * @param {number} [bottom=-1] - The bottom plane of the camera's frustum. - * @param {number} [near=0.1] - The camera's near plane. - * @param {number} [far=2000] - The camera's far plane. - */ - constructor( left = -1, right = 1, top = 1, bottom = -1, near = 0.1, far = 2000 ) { - - super(); - - /** - * This flag can be used for type testing. - * - * @type {boolean} - * @readonly - * @default true - */ - this.isOrthographicCamera = true; - - this.type = 'OrthographicCamera'; - - /** - * The zoom factor of the camera. - * - * @type {number} - * @default 1 - */ - this.zoom = 1; - - /** - * Represents the frustum window specification. This property should not be edited - * directly but via {@link PerspectiveCamera#setViewOffset} and {@link PerspectiveCamera#clearViewOffset}. - * - * @type {?Object} - * @default null - */ - this.view = null; - - /** - * The left plane of the camera's frustum. - * - * @type {number} - * @default -1 - */ - this.left = left; - - /** - * The right plane of the camera's frustum. - * - * @type {number} - * @default 1 - */ - this.right = right; - - /** - * The top plane of the camera's frustum. - * - * @type {number} - * @default 1 - */ - this.top = top; - - /** - * The bottom plane of the camera's frustum. - * - * @type {number} - * @default -1 - */ - this.bottom = bottom; - - /** - * The camera's near plane. The valid range is greater than `0` - * and less than the current value of {@link OrthographicCamera#far}. - * - * Note that, unlike for the {@link PerspectiveCamera}, `0` is a - * valid value for an orthographic camera's near plane. - * - * @type {number} - * @default 0.1 - */ - this.near = near; - - /** - * The camera's far plane. Must be greater than the - * current value of {@link OrthographicCamera#near}. - * - * @type {number} - * @default 2000 - */ - this.far = far; - - this.updateProjectionMatrix(); - - } - - copy( source, recursive ) { - - super.copy( source, recursive ); - - this.left = source.left; - this.right = source.right; - this.top = source.top; - this.bottom = source.bottom; - this.near = source.near; - this.far = source.far; - - this.zoom = source.zoom; - this.view = source.view === null ? null : Object.assign( {}, source.view ); - - return this; - - } - - /** - * Sets an offset in a larger frustum. This is useful for multi-window or - * multi-monitor/multi-machine setups. - * - * @param {number} fullWidth - The full width of multiview setup. - * @param {number} fullHeight - The full height of multiview setup. - * @param {number} x - The horizontal offset of the subcamera. - * @param {number} y - The vertical offset of the subcamera. - * @param {number} width - The width of subcamera. - * @param {number} height - The height of subcamera. - * @see {@link PerspectiveCamera#setViewOffset} - */ - setViewOffset( fullWidth, fullHeight, x, y, width, height ) { - - if ( this.view === null ) { - - this.view = { - enabled: true, - fullWidth: 1, - fullHeight: 1, - offsetX: 0, - offsetY: 0, - width: 1, - height: 1 - }; - - } - - this.view.enabled = true; - this.view.fullWidth = fullWidth; - this.view.fullHeight = fullHeight; - this.view.offsetX = x; - this.view.offsetY = y; - this.view.width = width; - this.view.height = height; - - this.updateProjectionMatrix(); - - } - - /** - * Removes the view offset from the projection matrix. - */ - clearViewOffset() { - - if ( this.view !== null ) { - - this.view.enabled = false; - - } - - this.updateProjectionMatrix(); - - } - - /** - * Updates the camera's projection matrix. Must be called after any change of - * camera properties. - */ - updateProjectionMatrix() { - - const dx = ( this.right - this.left ) / ( 2 * this.zoom ); - const dy = ( this.top - this.bottom ) / ( 2 * this.zoom ); - const cx = ( this.right + this.left ) / 2; - const cy = ( this.top + this.bottom ) / 2; - - let left = cx - dx; - let right = cx + dx; - let top = cy + dy; - let bottom = cy - dy; - - if ( this.view !== null && this.view.enabled ) { - - const scaleW = ( this.right - this.left ) / this.view.fullWidth / this.zoom; - const scaleH = ( this.top - this.bottom ) / this.view.fullHeight / this.zoom; - - left += scaleW * this.view.offsetX; - right = left + scaleW * this.view.width; - top -= scaleH * this.view.offsetY; - bottom = top - scaleH * this.view.height; - - } - - this.projectionMatrix.makeOrthographic( left, right, top, bottom, this.near, this.far, this.coordinateSystem, this.reversedDepth ); - - this.projectionMatrixInverse.copy( this.projectionMatrix ).invert(); - - } - - toJSON( meta ) { - - const data = super.toJSON( meta ); - - data.object.zoom = this.zoom; - data.object.left = this.left; - data.object.right = this.right; - data.object.top = this.top; - data.object.bottom = this.bottom; - data.object.near = this.near; - data.object.far = this.far; - - if ( this.view !== null ) data.object.view = Object.assign( {}, this.view ); - - return data; - - } - -} - const LOD_MIN = 4; -// The standard deviations (radians) associated with the extra mips. These are -// chosen to approximate a Trowbridge-Reitz distribution function times the -// geometric shadowing function. These sigma values squared must match the -// variance #defines in cube_uv_reflection_fragment.glsl.js. +// The standard deviations (radians) associated with the extra mips. +// Used for scene blur in fromScene() method. const EXTRA_LOD_SIGMA = [ 0.125, 0.215, 0.35, 0.446, 0.526, 0.582 ]; // The maximum length of the blur for loop. Smaller sigmas will use fewer // samples and exit early, but not recompile the shader. +// Used for scene blur in fromScene() method. const MAX_SAMPLES = 20; +// GGX VNDF importance sampling configuration +const GGX_SAMPLES = 256; + const _flatCamera = /*@__PURE__*/ new OrthographicCamera(); const _clearColor = /*@__PURE__*/ new Color(); let _oldTarget = null; @@ -39437,24 +39345,6 @@ let _oldActiveCubeFace = 0; let _oldActiveMipmapLevel = 0; let _oldXrEnabled = false; -// Golden Ratio -const PHI = ( 1 + Math.sqrt( 5 ) ) / 2; -const INV_PHI = 1 / PHI; - -// Vertices of a dodecahedron (except the opposites, which represent the -// same axis), used as axis directions evenly spread on a sphere. -const _axisDirections = [ - /*@__PURE__*/ new Vector3( - PHI, INV_PHI, 0 ), - /*@__PURE__*/ new Vector3( PHI, INV_PHI, 0 ), - /*@__PURE__*/ new Vector3( - INV_PHI, 0, PHI ), - /*@__PURE__*/ new Vector3( INV_PHI, 0, PHI ), - /*@__PURE__*/ new Vector3( 0, PHI, - INV_PHI ), - /*@__PURE__*/ new Vector3( 0, PHI, INV_PHI ), - /*@__PURE__*/ new Vector3( -1, 1, -1 ), - /*@__PURE__*/ new Vector3( 1, 1, -1 ), - /*@__PURE__*/ new Vector3( -1, 1, 1 ), - /*@__PURE__*/ new Vector3( 1, 1, 1 ) ]; - const _origin = /*@__PURE__*/ new Vector3(); /** @@ -39468,9 +39358,11 @@ const _origin = /*@__PURE__*/ new Vector3(); * higher roughness levels. In this way we maintain resolution to smoothly * interpolate diffuse lighting while limiting sampling computation. * - * Paper: Fast, Accurate Image-Based Lighting: - * {@link https://drive.google.com/file/d/15y8r_UpKlU9SvV4ILb0C3qCPecS8pvLz/view} -*/ + * The prefiltering uses GGX VNDF (Visible Normal Distribution Function) + * importance sampling based on "Sampling the GGX Distribution of Visible Normals" + * (Heitz, 2018) to generate environment maps that accurately match the GGX BRDF + * used in material rendering for physically-based image-based lighting. + */ class PMREMGenerator { /** @@ -39485,15 +39377,17 @@ class PMREMGenerator { this._lodMax = 0; this._cubeSize = 0; - this._lodPlanes = []; this._sizeLods = []; this._sigmas = []; + this._lodMeshes = []; + + this._backgroundBox = null; - this._blurMaterial = null; this._cubemapMaterial = null; this._equirectMaterial = null; - this._compileMaterial( this._blurMaterial ); + this._blurMaterial = null; + this._ggxMaterial = null; } @@ -39509,7 +39403,7 @@ class PMREMGenerator { * @param {number} [far=100] - The far plane distance. * @param {Object} [options={}] - The configuration options. * @param {number} [options.size=256] - The texture size of the PMREM. - * @param {Vector3} [options.renderTarget=origin] - The position of the internal cube camera that renders the scene. + * @param {Vector3} [options.position=origin] - The position of the internal cube camera that renders the scene. * @return {WebGLRenderTarget} The resulting PMREM. */ fromScene( scene, sigma = 0, near = 0.1, far = 100, options = {} ) { @@ -39618,6 +39512,13 @@ class PMREMGenerator { if ( this._cubemapMaterial !== null ) this._cubemapMaterial.dispose(); if ( this._equirectMaterial !== null ) this._equirectMaterial.dispose(); + if ( this._backgroundBox !== null ) { + + this._backgroundBox.geometry.dispose(); + this._backgroundBox.material.dispose(); + + } + } // private interface @@ -39632,12 +39533,13 @@ class PMREMGenerator { _dispose() { if ( this._blurMaterial !== null ) this._blurMaterial.dispose(); + if ( this._ggxMaterial !== null ) this._ggxMaterial.dispose(); if ( this._pingPongRenderTarget !== null ) this._pingPongRenderTarget.dispose(); - for ( let i = 0; i < this._lodPlanes.length; i ++ ) { + for ( let i = 0; i < this._lodMeshes.length; i ++ ) { - this._lodPlanes[ i ].dispose(); + this._lodMeshes[ i ].geometry.dispose(); } @@ -39709,9 +39611,10 @@ class PMREMGenerator { this._pingPongRenderTarget = _createRenderTarget( width, height, params ); const { _lodMax } = this; - ( { sizeLods: this._sizeLods, lodPlanes: this._lodPlanes, sigmas: this._sigmas } = _createPlanes( _lodMax ) ); + ( { lodMeshes: this._lodMeshes, sizeLods: this._sizeLods, sigmas: this._sigmas } = _createPlanes( _lodMax ) ); this._blurMaterial = _getBlurShader( _lodMax, width, height ); + this._ggxMaterial = _getGGXShader( _lodMax, width, height ); } @@ -39721,8 +39624,8 @@ class PMREMGenerator { _compileMaterial( material ) { - const tmpMesh = new Mesh( this._lodPlanes[ 0 ], material ); - this._renderer.compile( tmpMesh, _flatCamera ); + const mesh = new Mesh( new BufferGeometry(), material ); + this._renderer.compile( mesh, _flatCamera ); } @@ -39753,16 +39656,25 @@ class PMREMGenerator { } - const backgroundMaterial = new MeshBasicMaterial( { - name: 'PMREM.Background', - side: BackSide, - depthWrite: false, - depthTest: false, - } ); + if ( this._backgroundBox === null ) { - const backgroundBox = new Mesh( new BoxGeometry(), backgroundMaterial ); + this._backgroundBox = new Mesh( + new BoxGeometry(), + new MeshBasicMaterial( { + name: 'PMREM.Background', + side: BackSide, + depthWrite: false, + depthTest: false, + } ) + ); + + } + + const backgroundBox = this._backgroundBox; + const backgroundMaterial = backgroundBox.material; let useSolidColor = false; + const background = scene.background; if ( background ) { @@ -39823,9 +39735,6 @@ class PMREMGenerator { } - backgroundBox.geometry.dispose(); - backgroundBox.material.dispose(); - renderer.toneMapping = toneMapping; renderer.autoClear = originalAutoClear; scene.background = background; @@ -39859,7 +39768,9 @@ class PMREMGenerator { } const material = isCubeTexture ? this._cubemapMaterial : this._equirectMaterial; - const mesh = new Mesh( this._lodPlanes[ 0 ], material ); + + const mesh = this._lodMeshes[ 0 ]; + mesh.material = material; const uniforms = material.uniforms; @@ -39879,15 +39790,13 @@ class PMREMGenerator { const renderer = this._renderer; const autoClear = renderer.autoClear; renderer.autoClear = false; - const n = this._lodPlanes.length; - - for ( let i = 1; i < n; i ++ ) { - const sigma = Math.sqrt( this._sigmas[ i ] * this._sigmas[ i ] - this._sigmas[ i - 1 ] * this._sigmas[ i - 1 ] ); + const n = this._lodMeshes.length; - const poleAxis = _axisDirections[ ( n - i - 1 ) % _axisDirections.length ]; + // Use GGX VNDF importance sampling + for ( let i = 1; i < n; i ++ ) { - this._blur( cubeUVRenderTarget, i - 1, i, sigma, poleAxis ); + this._applyGGXFilter( cubeUVRenderTarget, i - 1, i ); } @@ -39895,6 +39804,63 @@ class PMREMGenerator { } + /** + * Applies GGX VNDF importance sampling filter to generate a prefiltered environment map. + * Uses Monte Carlo integration with VNDF importance sampling to accurately represent the + * GGX BRDF for physically-based rendering. Reads from the previous LOD level and + * applies incremental roughness filtering to avoid over-blurring. + * + * @private + * @param {WebGLRenderTarget} cubeUVRenderTarget + * @param {number} lodIn - Source LOD level to read from + * @param {number} lodOut - Target LOD level to write to + */ + _applyGGXFilter( cubeUVRenderTarget, lodIn, lodOut ) { + + const renderer = this._renderer; + const pingPongRenderTarget = this._pingPongRenderTarget; + + const ggxMaterial = this._ggxMaterial; + const ggxMesh = this._lodMeshes[ lodOut ]; + ggxMesh.material = ggxMaterial; + + const ggxUniforms = ggxMaterial.uniforms; + + // Calculate incremental roughness between LOD levels + const targetRoughness = lodOut / ( this._lodMeshes.length - 1 ); + const sourceRoughness = lodIn / ( this._lodMeshes.length - 1 ); + const incrementalRoughness = Math.sqrt( targetRoughness * targetRoughness - sourceRoughness * sourceRoughness ); + + // Apply blur strength mapping for better quality across the roughness range + const blurStrength = 0.0 + targetRoughness * 1.25; + const adjustedRoughness = incrementalRoughness * blurStrength; + + // Calculate viewport position based on output LOD level + const { _lodMax } = this; + const outputSize = this._sizeLods[ lodOut ]; + const x = 3 * outputSize * ( lodOut > _lodMax - LOD_MIN ? lodOut - _lodMax + LOD_MIN : 0 ); + const y = 4 * ( this._cubeSize - outputSize ); + + // Read from previous LOD with incremental roughness + ggxUniforms[ 'envMap' ].value = cubeUVRenderTarget.texture; + ggxUniforms[ 'roughness' ].value = adjustedRoughness; + ggxUniforms[ 'mipInt' ].value = _lodMax - lodIn; // Sample from input LOD + + _setViewport( pingPongRenderTarget, x, y, 3 * outputSize, 2 * outputSize ); + renderer.setRenderTarget( pingPongRenderTarget ); + renderer.render( ggxMesh, _flatCamera ); + + // Copy from pingPong back to cubeUV (simple direct copy) + ggxUniforms[ 'envMap' ].value = pingPongRenderTarget.texture; + ggxUniforms[ 'roughness' ].value = 0.0; // Direct copy + ggxUniforms[ 'mipInt' ].value = _lodMax - lodOut; // Read from the level we just wrote + + _setViewport( cubeUVRenderTarget, x, y, 3 * outputSize, 2 * outputSize ); + renderer.setRenderTarget( cubeUVRenderTarget ); + renderer.render( ggxMesh, _flatCamera ); + + } + /** * This is a two-pass Gaussian blur for a cubemap. Normally this is done * vertically and horizontally, but this breaks down on a cube. Here we apply @@ -39902,6 +39868,8 @@ class PMREMGenerator { * the poles) to approximate the orthogonally-separable blur. It is least * accurate at the poles, but still does a decent job. * + * Used for initial scene blur in fromScene() method when sigma > 0. + * * @private * @param {WebGLRenderTarget} cubeUVRenderTarget * @param {number} lodIn @@ -39940,7 +39908,7 @@ class PMREMGenerator { if ( direction !== 'latitudinal' && direction !== 'longitudinal' ) { - console.error( + error( 'blur direction must be either latitudinal or longitudinal!' ); } @@ -39948,7 +39916,9 @@ class PMREMGenerator { // Number of standard deviations at which to cut off the discrete approximation. const STANDARD_DEVIATIONS = 3; - const blurMesh = new Mesh( this._lodPlanes[ lodOut ], blurMaterial ); + const blurMesh = this._lodMeshes[ lodOut ]; + blurMesh.material = blurMaterial; + const blurUniforms = blurMaterial.uniforms; const pixels = this._sizeLods[ lodIn ] - 1; @@ -39958,7 +39928,7 @@ class PMREMGenerator { if ( samples > MAX_SAMPLES ) { - console.warn( `sigmaRadians, ${ + warn( `sigmaRadians, ${ sigmaRadians}, is too large and will clip, as it requested ${ samples} samples when the maximum is set to ${MAX_SAMPLES}` ); @@ -40022,9 +39992,9 @@ class PMREMGenerator { function _createPlanes( lodMax ) { - const lodPlanes = []; const sizeLods = []; const sigmas = []; + const lodMeshes = []; let lod = lodMax; @@ -40086,7 +40056,7 @@ function _createPlanes( lodMax ) { planes.setAttribute( 'position', new BufferAttribute( position, positionSize ) ); planes.setAttribute( 'uv', new BufferAttribute( uv, uvSize ) ); planes.setAttribute( 'faceIndex', new BufferAttribute( faceIndex, faceIndexSize ) ); - lodPlanes.push( planes ); + lodMeshes.push( new Mesh( planes, null ) ); if ( lod > LOD_MIN ) { @@ -40096,7 +40066,7 @@ function _createPlanes( lodMax ) { } - return { lodPlanes, sizeLods, sigmas }; + return { lodMeshes, sizeLods, sigmas }; } @@ -40117,6 +40087,143 @@ function _setViewport( target, x, y, width, height ) { } +function _getGGXShader( lodMax, width, height ) { + + const shaderMaterial = new ShaderMaterial( { + + name: 'PMREMGGXConvolution', + + defines: { + 'GGX_SAMPLES': GGX_SAMPLES, + 'CUBEUV_TEXEL_WIDTH': 1.0 / width, + 'CUBEUV_TEXEL_HEIGHT': 1.0 / height, + 'CUBEUV_MAX_MIP': `${lodMax}.0`, + }, + + uniforms: { + 'envMap': { value: null }, + 'roughness': { value: 0.0 }, + 'mipInt': { value: 0 } + }, + + vertexShader: _getCommonVertexShader(), + + fragmentShader: /* glsl */` + + precision highp float; + precision highp int; + + varying vec3 vOutputDirection; + + uniform sampler2D envMap; + uniform float roughness; + uniform float mipInt; + + #define ENVMAP_TYPE_CUBE_UV + #include + + #define PI 3.14159265359 + + // Van der Corput radical inverse + float radicalInverse_VdC(uint bits) { + bits = (bits << 16u) | (bits >> 16u); + bits = ((bits & 0x55555555u) << 1u) | ((bits & 0xAAAAAAAAu) >> 1u); + bits = ((bits & 0x33333333u) << 2u) | ((bits & 0xCCCCCCCCu) >> 2u); + bits = ((bits & 0x0F0F0F0Fu) << 4u) | ((bits & 0xF0F0F0F0u) >> 4u); + bits = ((bits & 0x00FF00FFu) << 8u) | ((bits & 0xFF00FF00u) >> 8u); + return float(bits) * 2.3283064365386963e-10; // / 0x100000000 + } + + // Hammersley sequence + vec2 hammersley(uint i, uint N) { + return vec2(float(i) / float(N), radicalInverse_VdC(i)); + } + + // GGX VNDF importance sampling (Eric Heitz 2018) + // "Sampling the GGX Distribution of Visible Normals" + // https://jcgt.org/published/0007/04/01/ + vec3 importanceSampleGGX_VNDF(vec2 Xi, vec3 V, float roughness) { + float alpha = roughness * roughness; + + // Section 4.1: Orthonormal basis + vec3 T1 = vec3(1.0, 0.0, 0.0); + vec3 T2 = cross(V, T1); + + // Section 4.2: Parameterization of projected area + float r = sqrt(Xi.x); + float phi = 2.0 * PI * Xi.y; + float t1 = r * cos(phi); + float t2 = r * sin(phi); + float s = 0.5 * (1.0 + V.z); + t2 = (1.0 - s) * sqrt(1.0 - t1 * t1) + s * t2; + + // Section 4.3: Reprojection onto hemisphere + vec3 Nh = t1 * T1 + t2 * T2 + sqrt(max(0.0, 1.0 - t1 * t1 - t2 * t2)) * V; + + // Section 3.4: Transform back to ellipsoid configuration + return normalize(vec3(alpha * Nh.x, alpha * Nh.y, max(0.0, Nh.z))); + } + + void main() { + vec3 N = normalize(vOutputDirection); + vec3 V = N; // Assume view direction equals normal for pre-filtering + + vec3 prefilteredColor = vec3(0.0); + float totalWeight = 0.0; + + // For very low roughness, just sample the environment directly + if (roughness < 0.001) { + gl_FragColor = vec4(bilinearCubeUV(envMap, N, mipInt), 1.0); + return; + } + + // Tangent space basis for VNDF sampling + vec3 up = abs(N.z) < 0.999 ? vec3(0.0, 0.0, 1.0) : vec3(1.0, 0.0, 0.0); + vec3 tangent = normalize(cross(up, N)); + vec3 bitangent = cross(N, tangent); + + for(uint i = 0u; i < uint(GGX_SAMPLES); i++) { + vec2 Xi = hammersley(i, uint(GGX_SAMPLES)); + + // For PMREM, V = N, so in tangent space V is always (0, 0, 1) + vec3 H_tangent = importanceSampleGGX_VNDF(Xi, vec3(0.0, 0.0, 1.0), roughness); + + // Transform H back to world space + vec3 H = normalize(tangent * H_tangent.x + bitangent * H_tangent.y + N * H_tangent.z); + vec3 L = normalize(2.0 * dot(V, H) * H - V); + + float NdotL = max(dot(N, L), 0.0); + + if(NdotL > 0.0) { + // Sample environment at fixed mip level + // VNDF importance sampling handles the distribution filtering + vec3 sampleColor = bilinearCubeUV(envMap, L, mipInt); + + // Weight by NdotL for the split-sum approximation + // VNDF PDF naturally accounts for the visible microfacet distribution + prefilteredColor += sampleColor * NdotL; + totalWeight += NdotL; + } + } + + if (totalWeight > 0.0) { + prefilteredColor = prefilteredColor / totalWeight; + } + + gl_FragColor = vec4(prefilteredColor, 1.0); + } + `, + + blending: NoBlending, + depthTest: false, + depthWrite: false + + } ); + + return shaderMaterial; + +} + function _getBlurShader( lodMax, width, height ) { const weights = new Float32Array( MAX_SAMPLES ); @@ -40357,13 +40464,576 @@ function _getCommonVertexShader() { } -function WebGLCubeUVMaps( renderer ) { +const fov = -90; // negative fov is not an error +const aspect = 1; + +/** + * A special type of camera that is positioned in 3D space to render its surroundings into a + * cube render target. The render target can then be used as an environment map for rendering + * realtime reflections in your scene. + * + * ```js + * // Create cube render target + * const cubeRenderTarget = new THREE.WebGLCubeRenderTarget( 256, { generateMipmaps: true, minFilter: THREE.LinearMipmapLinearFilter } ); + * + * // Create cube camera + * const cubeCamera = new THREE.CubeCamera( 1, 100000, cubeRenderTarget ); + * scene.add( cubeCamera ); + * + * // Create car + * const chromeMaterial = new THREE.MeshLambertMaterial( { color: 0xffffff, envMap: cubeRenderTarget.texture } ); + * const car = new THREE.Mesh( carGeometry, chromeMaterial ); + * scene.add( car ); + * + * // Update the render target cube + * car.visible = false; + * cubeCamera.position.copy( car.position ); + * cubeCamera.update( renderer, scene ); + * + * // Render the scene + * car.visible = true; + * renderer.render( scene, camera ); + * ``` + * + * @augments Object3D + */ +class CubeCamera extends Object3D { + + /** + * Constructs a new cube camera. + * + * @param {number} near - The camera's near plane. + * @param {number} far - The camera's far plane. + * @param {WebGLCubeRenderTarget} renderTarget - The cube render target. + */ + constructor( near, far, renderTarget ) { + + super(); + + this.type = 'CubeCamera'; + + /** + * A reference to the cube render target. + * + * @type {WebGLCubeRenderTarget} + */ + this.renderTarget = renderTarget; + + /** + * The current active coordinate system. + * + * @type {?(WebGLCoordinateSystem|WebGPUCoordinateSystem)} + * @default null + */ + this.coordinateSystem = null; + + /** + * The current active mipmap level + * + * @type {number} + * @default 0 + */ + this.activeMipmapLevel = 0; + + const cameraPX = new PerspectiveCamera( fov, aspect, near, far ); + cameraPX.layers = this.layers; + this.add( cameraPX ); - let cubeUVmaps = new WeakMap(); + const cameraNX = new PerspectiveCamera( fov, aspect, near, far ); + cameraNX.layers = this.layers; + this.add( cameraNX ); + + const cameraPY = new PerspectiveCamera( fov, aspect, near, far ); + cameraPY.layers = this.layers; + this.add( cameraPY ); + + const cameraNY = new PerspectiveCamera( fov, aspect, near, far ); + cameraNY.layers = this.layers; + this.add( cameraNY ); + + const cameraPZ = new PerspectiveCamera( fov, aspect, near, far ); + cameraPZ.layers = this.layers; + this.add( cameraPZ ); + + const cameraNZ = new PerspectiveCamera( fov, aspect, near, far ); + cameraNZ.layers = this.layers; + this.add( cameraNZ ); + + } + + /** + * Must be called when the coordinate system of the cube camera is changed. + */ + updateCoordinateSystem() { + + const coordinateSystem = this.coordinateSystem; + + const cameras = this.children.concat(); + + const [ cameraPX, cameraNX, cameraPY, cameraNY, cameraPZ, cameraNZ ] = cameras; + + for ( const camera of cameras ) this.remove( camera ); + + if ( coordinateSystem === WebGLCoordinateSystem ) { + + cameraPX.up.set( 0, 1, 0 ); + cameraPX.lookAt( 1, 0, 0 ); + + cameraNX.up.set( 0, 1, 0 ); + cameraNX.lookAt( -1, 0, 0 ); + + cameraPY.up.set( 0, 0, -1 ); + cameraPY.lookAt( 0, 1, 0 ); + + cameraNY.up.set( 0, 0, 1 ); + cameraNY.lookAt( 0, -1, 0 ); + + cameraPZ.up.set( 0, 1, 0 ); + cameraPZ.lookAt( 0, 0, 1 ); + + cameraNZ.up.set( 0, 1, 0 ); + cameraNZ.lookAt( 0, 0, -1 ); + + } else if ( coordinateSystem === WebGPUCoordinateSystem ) { + + cameraPX.up.set( 0, -1, 0 ); + cameraPX.lookAt( -1, 0, 0 ); + + cameraNX.up.set( 0, -1, 0 ); + cameraNX.lookAt( 1, 0, 0 ); + + cameraPY.up.set( 0, 0, 1 ); + cameraPY.lookAt( 0, 1, 0 ); + + cameraNY.up.set( 0, 0, -1 ); + cameraNY.lookAt( 0, -1, 0 ); + + cameraPZ.up.set( 0, -1, 0 ); + cameraPZ.lookAt( 0, 0, 1 ); + + cameraNZ.up.set( 0, -1, 0 ); + cameraNZ.lookAt( 0, 0, -1 ); + + } else { + + throw new Error( 'THREE.CubeCamera.updateCoordinateSystem(): Invalid coordinate system: ' + coordinateSystem ); + + } + + for ( const camera of cameras ) { + + this.add( camera ); + + camera.updateMatrixWorld(); + + } + + } + + /** + * Calling this method will render the given scene with the given renderer + * into the cube render target of the camera. + * + * @param {(Renderer|WebGLRenderer)} renderer - The renderer. + * @param {Scene} scene - The scene to render. + */ + update( renderer, scene ) { + + if ( this.parent === null ) this.updateMatrixWorld(); + + const { renderTarget, activeMipmapLevel } = this; + + if ( this.coordinateSystem !== renderer.coordinateSystem ) { + + this.coordinateSystem = renderer.coordinateSystem; + + this.updateCoordinateSystem(); + + } + + const [ cameraPX, cameraNX, cameraPY, cameraNY, cameraPZ, cameraNZ ] = this.children; + + const currentRenderTarget = renderer.getRenderTarget(); + const currentActiveCubeFace = renderer.getActiveCubeFace(); + const currentActiveMipmapLevel = renderer.getActiveMipmapLevel(); + + const currentXrEnabled = renderer.xr.enabled; + + renderer.xr.enabled = false; + + const generateMipmaps = renderTarget.texture.generateMipmaps; + + renderTarget.texture.generateMipmaps = false; + + // https://github.com/mrdoob/three.js/issues/31413#issuecomment-3095966812 + + let reversedDepthBuffer = false; + + if ( renderer.isWebGLRenderer === true ) { + + reversedDepthBuffer = renderer.state.buffers.depth.getReversed(); + + } else { + + reversedDepthBuffer = renderer.reversedDepthBuffer; + + } + + renderer.setRenderTarget( renderTarget, 0, activeMipmapLevel ); + if ( reversedDepthBuffer && renderer.autoClear === false ) renderer.clearDepth(); + renderer.render( scene, cameraPX ); + + renderer.setRenderTarget( renderTarget, 1, activeMipmapLevel ); + if ( reversedDepthBuffer && renderer.autoClear === false ) renderer.clearDepth(); + renderer.render( scene, cameraNX ); + + renderer.setRenderTarget( renderTarget, 2, activeMipmapLevel ); + if ( reversedDepthBuffer && renderer.autoClear === false ) renderer.clearDepth(); + renderer.render( scene, cameraPY ); + + renderer.setRenderTarget( renderTarget, 3, activeMipmapLevel ); + if ( reversedDepthBuffer && renderer.autoClear === false ) renderer.clearDepth(); + renderer.render( scene, cameraNY ); + + renderer.setRenderTarget( renderTarget, 4, activeMipmapLevel ); + if ( reversedDepthBuffer && renderer.autoClear === false ) renderer.clearDepth(); + renderer.render( scene, cameraPZ ); + + // mipmaps are generated during the last call of render() + // at this point, all sides of the cube render target are defined + + renderTarget.texture.generateMipmaps = generateMipmaps; + + renderer.setRenderTarget( renderTarget, 5, activeMipmapLevel ); + if ( reversedDepthBuffer && renderer.autoClear === false ) renderer.clearDepth(); + renderer.render( scene, cameraNZ ); + + renderer.setRenderTarget( currentRenderTarget, currentActiveCubeFace, currentActiveMipmapLevel ); + + renderer.xr.enabled = currentXrEnabled; + + renderTarget.texture.needsPMREMUpdate = true; + + } + +} + +/** + * Creates a cube texture made up of six images. + * + * ```js + * const loader = new THREE.CubeTextureLoader(); + * loader.setPath( 'textures/cube/pisa/' ); + * + * const textureCube = loader.load( [ + * 'px.png', 'nx.png', 'py.png', 'ny.png', 'pz.png', 'nz.png' + * ] ); + * + * const material = new THREE.MeshBasicMaterial( { color: 0xffffff, envMap: textureCube } ); + * ``` + * + * @augments Texture + */ +class CubeTexture extends Texture { + + /** + * Constructs a new cube texture. + * + * @param {Array} [images=[]] - An array holding a image for each side of a cube. + * @param {number} [mapping=CubeReflectionMapping] - The texture mapping. + * @param {number} [wrapS=ClampToEdgeWrapping] - The wrapS value. + * @param {number} [wrapT=ClampToEdgeWrapping] - The wrapT value. + * @param {number} [magFilter=LinearFilter] - The mag filter value. + * @param {number} [minFilter=LinearMipmapLinearFilter] - The min filter value. + * @param {number} [format=RGBAFormat] - The texture format. + * @param {number} [type=UnsignedByteType] - The texture type. + * @param {number} [anisotropy=Texture.DEFAULT_ANISOTROPY] - The anisotropy value. + * @param {string} [colorSpace=NoColorSpace] - The color space value. + */ + constructor( images = [], mapping = CubeReflectionMapping, wrapS, wrapT, magFilter, minFilter, format, type, anisotropy, colorSpace ) { + + super( images, mapping, wrapS, wrapT, magFilter, minFilter, format, type, anisotropy, colorSpace ); + + /** + * This flag can be used for type testing. + * + * @type {boolean} + * @readonly + * @default true + */ + this.isCubeTexture = true; + + /** + * If set to `true`, the texture is flipped along the vertical axis when + * uploaded to the GPU. + * + * Overwritten and set to `false` by default. + * + * @type {boolean} + * @default false + */ + this.flipY = false; + + } + + /** + * Alias for {@link CubeTexture#image}. + * + * @type {Array} + */ + get images() { + + return this.image; + + } + + set images( value ) { + + this.image = value; + + } + +} + +/** + * A cube render target used in context of {@link WebGLRenderer}. + * + * @augments WebGLRenderTarget + */ +class WebGLCubeRenderTarget extends WebGLRenderTarget { + + /** + * Constructs a new cube render target. + * + * @param {number} [size=1] - The size of the render target. + * @param {RenderTarget~Options} [options] - The configuration object. + */ + constructor( size = 1, options = {} ) { + + super( size, size, options ); + + /** + * This flag can be used for type testing. + * + * @type {boolean} + * @readonly + * @default true + */ + this.isWebGLCubeRenderTarget = true; + + const image = { width: size, height: size, depth: 1 }; + const images = [ image, image, image, image, image, image ]; + + /** + * Overwritten with a different texture type. + * + * @type {DataArrayTexture} + */ + this.texture = new CubeTexture( images ); + this._setTextureOptions( options ); + + // By convention -- likely based on the RenderMan spec from the 1990's -- cube maps are specified by WebGL (and three.js) + // in a coordinate system in which positive-x is to the right when looking up the positive-z axis -- in other words, + // in a left-handed coordinate system. By continuing this convention, preexisting cube maps continued to render correctly. + + // three.js uses a right-handed coordinate system. So environment maps used in three.js appear to have px and nx swapped + // and the flag isRenderTargetTexture controls this conversion. The flip is not required when using WebGLCubeRenderTarget.texture + // as a cube texture (this is detected when isRenderTargetTexture is set to true for cube textures). + + this.texture.isRenderTargetTexture = true; + + } + + /** + * Converts the given equirectangular texture to a cube map. + * + * @param {WebGLRenderer} renderer - The renderer. + * @param {Texture} texture - The equirectangular texture. + * @return {WebGLCubeRenderTarget} A reference to this cube render target. + */ + fromEquirectangularTexture( renderer, texture ) { + + this.texture.type = texture.type; + this.texture.colorSpace = texture.colorSpace; + + this.texture.generateMipmaps = texture.generateMipmaps; + this.texture.minFilter = texture.minFilter; + this.texture.magFilter = texture.magFilter; + + const shader = { + + uniforms: { + tEquirect: { value: null }, + }, + + vertexShader: /* glsl */` + + varying vec3 vWorldDirection; + + vec3 transformDirection( in vec3 dir, in mat4 matrix ) { + + return normalize( ( matrix * vec4( dir, 0.0 ) ).xyz ); + + } + + void main() { + + vWorldDirection = transformDirection( position, modelMatrix ); + + #include + #include + + } + `, + + fragmentShader: /* glsl */` + + uniform sampler2D tEquirect; + + varying vec3 vWorldDirection; + + #include + + void main() { + + vec3 direction = normalize( vWorldDirection ); + + vec2 sampleUV = equirectUv( direction ); + + gl_FragColor = texture2D( tEquirect, sampleUV ); + + } + ` + }; + + const geometry = new BoxGeometry( 5, 5, 5 ); + + const material = new ShaderMaterial( { + + name: 'CubemapFromEquirect', + + uniforms: cloneUniforms( shader.uniforms ), + vertexShader: shader.vertexShader, + fragmentShader: shader.fragmentShader, + side: BackSide, + blending: NoBlending + + } ); + + material.uniforms.tEquirect.value = texture; + + const mesh = new Mesh( geometry, material ); + + const currentMinFilter = texture.minFilter; + + // Avoid blurred poles + if ( texture.minFilter === LinearMipmapLinearFilter ) texture.minFilter = LinearFilter; + + const camera = new CubeCamera( 1, 10, this ); + camera.update( renderer, mesh ); + + texture.minFilter = currentMinFilter; + + mesh.geometry.dispose(); + mesh.material.dispose(); + + return this; + + } + + /** + * Clears this cube render target. + * + * @param {WebGLRenderer} renderer - The renderer. + * @param {boolean} [color=true] - Whether the color buffer should be cleared or not. + * @param {boolean} [depth=true] - Whether the depth buffer should be cleared or not. + * @param {boolean} [stencil=true] - Whether the stencil buffer should be cleared or not. + */ + clear( renderer, color = true, depth = true, stencil = true ) { + + const currentRenderTarget = renderer.getRenderTarget(); + + for ( let i = 0; i < 6; i ++ ) { + + renderer.setRenderTarget( this, i ); + + renderer.clear( color, depth, stencil ); + + } + + renderer.setRenderTarget( currentRenderTarget ); + + } + +} + +function WebGLEnvironments( renderer ) { + + let cubeMaps = new WeakMap(); + let pmremMaps = new WeakMap(); let pmremGenerator = null; - function get( texture ) { + function get( texture, usePMREM = false ) { + + if ( texture === null || texture === undefined ) return null; + + if ( usePMREM ) { + + return getPMREM( texture ); + + } + + return getCube( texture ); + + } + + function getCube( texture ) { + + if ( texture && texture.isTexture ) { + + const mapping = texture.mapping; + + if ( mapping === EquirectangularReflectionMapping || mapping === EquirectangularRefractionMapping ) { + + if ( cubeMaps.has( texture ) ) { + + const cubemap = cubeMaps.get( texture ).texture; + return mapTextureMapping( cubemap, texture.mapping ); + + } else { + + const image = texture.image; + + if ( image && image.height > 0 ) { + + const renderTarget = new WebGLCubeRenderTarget( image.height ); + renderTarget.fromEquirectangularTexture( renderer, texture ); + cubeMaps.set( texture, renderTarget ); + + texture.addEventListener( 'dispose', onCubemapDispose ); + + return mapTextureMapping( renderTarget.texture, texture.mapping ); + + } else { + + // image not yet ready. try the conversion next frame + + return null; + + } + + } + + } + + } + + return texture; + + } + + function getPMREM( texture ) { if ( texture && texture.isTexture ) { @@ -40376,7 +41046,7 @@ function WebGLCubeUVMaps( renderer ) { if ( isEquirectMap || isCubeMap ) { - let renderTarget = cubeUVmaps.get( texture ); + let renderTarget = pmremMaps.get( texture ); const currentPMREMVersion = renderTarget !== undefined ? renderTarget.texture.pmremVersion : 0; @@ -40387,7 +41057,7 @@ function WebGLCubeUVMaps( renderer ) { renderTarget = isEquirectMap ? pmremGenerator.fromEquirectangular( texture, renderTarget ) : pmremGenerator.fromCubemap( texture, renderTarget ); renderTarget.texture.pmremVersion = texture.pmremVersion; - cubeUVmaps.set( texture, renderTarget ); + pmremMaps.set( texture, renderTarget ); return renderTarget.texture; @@ -40408,9 +41078,9 @@ function WebGLCubeUVMaps( renderer ) { renderTarget = isEquirectMap ? pmremGenerator.fromEquirectangular( texture ) : pmremGenerator.fromCubemap( texture ); renderTarget.texture.pmremVersion = texture.pmremVersion; - cubeUVmaps.set( texture, renderTarget ); + pmremMaps.set( texture, renderTarget ); - texture.addEventListener( 'dispose', onTextureDispose ); + texture.addEventListener( 'dispose', onPMREMDispose ); return renderTarget.texture; @@ -40434,6 +41104,22 @@ function WebGLCubeUVMaps( renderer ) { } + function mapTextureMapping( texture, mapping ) { + + if ( mapping === EquirectangularReflectionMapping ) { + + texture.mapping = CubeReflectionMapping; + + } else if ( mapping === EquirectangularRefractionMapping ) { + + texture.mapping = CubeRefractionMapping; + + } + + return texture; + + } + function isCubeTextureComplete( image ) { let count = 0; @@ -40447,21 +41133,37 @@ function WebGLCubeUVMaps( renderer ) { return count === length; + } + + function onCubemapDispose( event ) { + + const texture = event.target; + + texture.removeEventListener( 'dispose', onCubemapDispose ); + + const cubemap = cubeMaps.get( texture ); + + if ( cubemap !== undefined ) { + + cubeMaps.delete( texture ); + cubemap.dispose(); + + } } - function onTextureDispose( event ) { + function onPMREMDispose( event ) { const texture = event.target; - texture.removeEventListener( 'dispose', onTextureDispose ); + texture.removeEventListener( 'dispose', onPMREMDispose ); - const cubemapUV = cubeUVmaps.get( texture ); + const pmrem = pmremMaps.get( texture ); - if ( cubemapUV !== undefined ) { + if ( pmrem !== undefined ) { - cubeUVmaps.delete( texture ); - cubemapUV.dispose(); + pmremMaps.delete( texture ); + pmrem.dispose(); } @@ -40469,7 +41171,8 @@ function WebGLCubeUVMaps( renderer ) { function dispose() { - cubeUVmaps = new WeakMap(); + cubeMaps = new WeakMap(); + pmremMaps = new WeakMap(); if ( pmremGenerator !== null ) { @@ -40499,30 +41202,7 @@ function WebGLExtensions( gl ) { } - let extension; - - switch ( name ) { - - case 'WEBGL_depth_texture': - extension = gl.getExtension( 'WEBGL_depth_texture' ) || gl.getExtension( 'MOZ_WEBGL_depth_texture' ) || gl.getExtension( 'WEBKIT_WEBGL_depth_texture' ); - break; - - case 'EXT_texture_filter_anisotropic': - extension = gl.getExtension( 'EXT_texture_filter_anisotropic' ) || gl.getExtension( 'MOZ_EXT_texture_filter_anisotropic' ) || gl.getExtension( 'WEBKIT_EXT_texture_filter_anisotropic' ); - break; - - case 'WEBGL_compressed_texture_s3tc': - extension = gl.getExtension( 'WEBGL_compressed_texture_s3tc' ) || gl.getExtension( 'MOZ_WEBGL_compressed_texture_s3tc' ) || gl.getExtension( 'WEBKIT_WEBGL_compressed_texture_s3tc' ); - break; - - case 'WEBGL_compressed_texture_pvrtc': - extension = gl.getExtension( 'WEBGL_compressed_texture_pvrtc' ) || gl.getExtension( 'WEBKIT_WEBGL_compressed_texture_pvrtc' ); - break; - - default: - extension = gl.getExtension( name ); - - } + const extension = gl.getExtension( name ); extensions[ name ] = extension; @@ -40555,7 +41235,7 @@ function WebGLExtensions( gl ) { if ( extension === null ) { - warnOnce( 'THREE.WebGLRenderer: ' + name + ' extension not supported.' ); + warnOnce( 'WebGLRenderer: ' + name + ' extension not supported.' ); } @@ -40651,6 +41331,12 @@ function WebGLGeometries( gl, attributes, info, bindingStates ) { const geometryPosition = geometry.attributes.position; let version = 0; + if ( geometryPosition === undefined ) { + + return; + + } + if ( geometryIndex !== null ) { const array = geometryIndex.array; @@ -40666,7 +41352,7 @@ function WebGLGeometries( gl, attributes, info, bindingStates ) { } - } else if ( geometryPosition !== undefined ) { + } else { const array = geometryPosition.array; version = geometryPosition.version; @@ -40681,13 +41367,11 @@ function WebGLGeometries( gl, attributes, info, bindingStates ) { } - } else { - - return; - } - const attribute = new ( arrayNeedsUint32( indices ) ? Uint32BufferAttribute : Uint16BufferAttribute )( indices, 1 ); + // check whether a 32 bit or 16 bit buffer is required to store the indices + // account for PRIMITIVE_RESTART_FIXED_INDEX, #24565 + const attribute = new ( geometryPosition.count >= 65535 ? Uint32BufferAttribute : Uint16BufferAttribute )( indices, 1 ); attribute.version = version; // Updating index buffer in VAO now. See WebGLBindingStates @@ -40885,7 +41569,7 @@ function WebGLInfo( gl ) { break; default: - console.error( 'THREE.WebGLInfo: Unknown draw mode:', mode ); + error( 'WebGLInfo: Unknown draw mode:', mode ); break; } @@ -41203,7 +41887,7 @@ function WebGLMorphtargets( gl, capabilities, textures ) { } -function WebGLObjects( gl, geometries, attributes, info ) { +function WebGLObjects( gl, geometries, attributes, bindingStates, info ) { let updateMap = new WeakMap(); @@ -41278,6 +41962,8 @@ function WebGLObjects( gl, geometries, attributes, info ) { instancedMesh.removeEventListener( 'dispose', onInstancedMeshDispose ); + bindingStates.releaseStatesOfObject( instancedMesh ); + attributes.remove( instancedMesh.instanceMatrix ); if ( instancedMesh.instanceColor !== null ) attributes.remove( instancedMesh.instanceColor ); @@ -41293,6 +41979,291 @@ function WebGLObjects( gl, geometries, attributes, info ) { } +/** + * This class works just like {@link ShaderMaterial}, except that definitions + * of built-in uniforms and attributes are not automatically prepended to the + * GLSL shader code. + * + * `RawShaderMaterial` can only be used with {@link WebGLRenderer}. + * + * @augments ShaderMaterial + */ +class RawShaderMaterial extends ShaderMaterial { + + /** + * Constructs a new raw shader material. + * + * @param {Object} [parameters] - An object with one or more properties + * defining the material's appearance. Any property of the material + * (including any property from inherited materials) can be passed + * in here. Color values can be passed any type of value accepted + * by {@link Color#set}. + */ + constructor( parameters ) { + + super( parameters ); + + /** + * This flag can be used for type testing. + * + * @type {boolean} + * @readonly + * @default true + */ + this.isRawShaderMaterial = true; + + this.type = 'RawShaderMaterial'; + + } + +} + +const toneMappingMap = { + [ LinearToneMapping ]: 'LINEAR_TONE_MAPPING', + [ ReinhardToneMapping ]: 'REINHARD_TONE_MAPPING', + [ CineonToneMapping ]: 'CINEON_TONE_MAPPING', + [ ACESFilmicToneMapping ]: 'ACES_FILMIC_TONE_MAPPING', + [ AgXToneMapping ]: 'AGX_TONE_MAPPING', + [ NeutralToneMapping ]: 'NEUTRAL_TONE_MAPPING', + [ CustomToneMapping ]: 'CUSTOM_TONE_MAPPING' +}; + +function WebGLOutput( type, width, height, depth, stencil ) { + + // render targets for scene and post-processing + const targetA = new WebGLRenderTarget( width, height, { + type: type, + depthBuffer: depth, + stencilBuffer: stencil + } ); + + const targetB = new WebGLRenderTarget( width, height, { + type: HalfFloatType, + depthBuffer: false, + stencilBuffer: false + } ); + + // create fullscreen triangle geometry + const geometry = new BufferGeometry(); + geometry.setAttribute( 'position', new Float32BufferAttribute( [ -1, 3, 0, -1, -1, 0, 3, -1, 0 ], 3 ) ); + geometry.setAttribute( 'uv', new Float32BufferAttribute( [ 0, 2, 0, 0, 2, 0 ], 2 ) ); + + // create output material with tone mapping support + const material = new RawShaderMaterial( { + uniforms: { + tDiffuse: { value: null } + }, + vertexShader: /* glsl */` + precision highp float; + + uniform mat4 modelViewMatrix; + uniform mat4 projectionMatrix; + + attribute vec3 position; + attribute vec2 uv; + + varying vec2 vUv; + + void main() { + vUv = uv; + gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 ); + }`, + fragmentShader: /* glsl */` + precision highp float; + + uniform sampler2D tDiffuse; + + varying vec2 vUv; + + #include + #include + + void main() { + gl_FragColor = texture2D( tDiffuse, vUv ); + + #ifdef LINEAR_TONE_MAPPING + gl_FragColor.rgb = LinearToneMapping( gl_FragColor.rgb ); + #elif defined( REINHARD_TONE_MAPPING ) + gl_FragColor.rgb = ReinhardToneMapping( gl_FragColor.rgb ); + #elif defined( CINEON_TONE_MAPPING ) + gl_FragColor.rgb = CineonToneMapping( gl_FragColor.rgb ); + #elif defined( ACES_FILMIC_TONE_MAPPING ) + gl_FragColor.rgb = ACESFilmicToneMapping( gl_FragColor.rgb ); + #elif defined( AGX_TONE_MAPPING ) + gl_FragColor.rgb = AgXToneMapping( gl_FragColor.rgb ); + #elif defined( NEUTRAL_TONE_MAPPING ) + gl_FragColor.rgb = NeutralToneMapping( gl_FragColor.rgb ); + #elif defined( CUSTOM_TONE_MAPPING ) + gl_FragColor.rgb = CustomToneMapping( gl_FragColor.rgb ); + #endif + + #ifdef SRGB_TRANSFER + gl_FragColor = sRGBTransferOETF( gl_FragColor ); + #endif + }`, + depthTest: false, + depthWrite: false + } ); + + const mesh = new Mesh( geometry, material ); + const camera = new OrthographicCamera( -1, 1, 1, -1, 0, 1 ); + + let _outputColorSpace = null; + let _outputToneMapping = null; + let _isCompositing = false; + let _savedToneMapping; + let _savedRenderTarget = null; + let _effects = []; + let _hasRenderPass = false; + + this.setSize = function ( width, height ) { + + targetA.setSize( width, height ); + targetB.setSize( width, height ); + + for ( let i = 0; i < _effects.length; i ++ ) { + + const effect = _effects[ i ]; + if ( effect.setSize ) effect.setSize( width, height ); + + } + + }; + + this.setEffects = function ( effects ) { + + _effects = effects; + _hasRenderPass = _effects.length > 0 && _effects[ 0 ].isRenderPass === true; + + const width = targetA.width; + const height = targetA.height; + + for ( let i = 0; i < _effects.length; i ++ ) { + + const effect = _effects[ i ]; + if ( effect.setSize ) effect.setSize( width, height ); + + } + + }; + + this.begin = function ( renderer, renderTarget ) { + + // Don't begin during compositing phase (post-processing effects call render()) + if ( _isCompositing ) return false; + + if ( renderer.toneMapping === NoToneMapping && _effects.length === 0 ) return false; + + _savedRenderTarget = renderTarget; + + // resize internal buffers to match render target (e.g. XR resolution) + if ( renderTarget !== null ) { + + const width = renderTarget.width; + const height = renderTarget.height; + + if ( targetA.width !== width || targetA.height !== height ) { + + this.setSize( width, height ); + + } + + } + + // if first effect is a RenderPass, it will set its own render target + if ( _hasRenderPass === false ) { + + renderer.setRenderTarget( targetA ); + + } + + // disable tone mapping during render - it will be applied in end() + _savedToneMapping = renderer.toneMapping; + renderer.toneMapping = NoToneMapping; + + return true; + + }; + + this.hasRenderPass = function () { + + return _hasRenderPass; + + }; + + this.end = function ( renderer, deltaTime ) { + + // restore tone mapping + renderer.toneMapping = _savedToneMapping; + + _isCompositing = true; + + // run post-processing effects + let readBuffer = targetA; + let writeBuffer = targetB; + + for ( let i = 0; i < _effects.length; i ++ ) { + + const effect = _effects[ i ]; + + if ( effect.enabled === false ) continue; + + effect.render( renderer, writeBuffer, readBuffer, deltaTime ); + + if ( effect.needsSwap !== false ) { + + const temp = readBuffer; + readBuffer = writeBuffer; + writeBuffer = temp; + + } + + } + + // update output material defines if settings changed + if ( _outputColorSpace !== renderer.outputColorSpace || _outputToneMapping !== renderer.toneMapping ) { + + _outputColorSpace = renderer.outputColorSpace; + _outputToneMapping = renderer.toneMapping; + + material.defines = {}; + + if ( ColorManagement.getTransfer( _outputColorSpace ) === SRGBTransfer ) material.defines.SRGB_TRANSFER = ''; + + const toneMapping = toneMappingMap[ _outputToneMapping ]; + if ( toneMapping ) material.defines[ toneMapping ] = ''; + + material.needsUpdate = true; + + } + + // final output to canvas (or XR render target) + material.uniforms.tDiffuse.value = readBuffer.texture; + renderer.setRenderTarget( _savedRenderTarget ); + renderer.render( mesh, camera ); + + _savedRenderTarget = null; + _isCompositing = false; + + }; + + this.isCompositing = function () { + + return _isCompositing; + + }; + + this.dispose = function () { + + targetA.dispose(); + targetB.dispose(); + geometry.dispose(); + material.dispose(); + + }; + +} + /** * Creates a three-dimensional texture from raw data, with parameters to * divide it into width, height, and depth. @@ -42068,7 +43039,7 @@ function setValueT1( gl, v, textures ) { if ( this.type === gl.SAMPLER_2D_SHADOW ) { - emptyShadowTexture.compareFunction = LessEqualCompare; // #28670 + emptyShadowTexture.compareFunction = textures.isReversedDepthBuffer() ? GreaterEqualCompare : LessEqualCompare; emptyTexture2D = emptyShadowTexture; } else { @@ -42318,9 +43289,21 @@ function setValueT1Array( gl, v, textures ) { } + let emptyTexture2D; + + if ( this.type === gl.SAMPLER_2D_SHADOW ) { + + emptyTexture2D = emptyShadowTexture; + + } else { + + emptyTexture2D = emptyTexture; + + } + for ( let i = 0; i !== n; ++ i ) { - textures.setTexture2D( v[ i ] || emptyTexture, units[ i ] ); + textures.setTexture2D( v[ i ] || emptyTexture2D, units[ i ] ); } @@ -42606,6 +43589,31 @@ class WebGLUniforms { } + // Sort uniforms to prioritize shadow samplers first (for optimal texture unit allocation) + + const shadowSamplers = []; + const otherUniforms = []; + + for ( const u of this.seq ) { + + if ( u.type === gl.SAMPLER_2D_SHADOW || u.type === gl.SAMPLER_CUBE_SHADOW || u.type === gl.SAMPLER_2D_ARRAY_SHADOW ) { + + shadowSamplers.push( u ); + + } else { + + otherUniforms.push( u ); + + } + + } + + if ( shadowSamplers.length > 0 ) { + + this.seq = shadowSamplers.concat( otherUniforms ); + + } + } setValue( gl, name, value, textures ) { @@ -42711,7 +43719,7 @@ function getEncodingComponents( colorSpace ) { return [ encodingMatrix, 'sRGBTransferOETF' ]; default: - console.warn( 'THREE.WebGLProgram: Unsupported color space: ', colorSpace ); + warn( 'WebGLProgram: Unsupported color space: ', colorSpace ); return [ encodingMatrix, 'LinearTransferOETF' ]; } @@ -42731,7 +43739,7 @@ function getShaderErrors( gl, shader, type ) { if ( errorMatches ) { // --enable-privileged-webgl-extension - // console.log( '**' + type + '**', gl.getExtension( 'WEBGL_debug_shaders' ).getTranslatedShaderSource( shader ) ); + // log( '**' + type + '**', gl.getExtension( 'WEBGL_debug_shaders' ).getTranslatedShaderSource( shader ) ); const errorLine = parseInt( errorMatches[ 1 ] ); return type.toUpperCase() + '\n\n' + errors + '\n\n' + handleSource( gl.getShaderSource( shader ), errorLine ); @@ -42760,43 +43768,24 @@ function getTexelEncodingFunction( functionName, colorSpace ) { } -function getToneMappingFunction( functionName, toneMapping ) { - - let toneMappingName; - - switch ( toneMapping ) { - - case LinearToneMapping: - toneMappingName = 'Linear'; - break; - - case ReinhardToneMapping: - toneMappingName = 'Reinhard'; - break; - - case CineonToneMapping: - toneMappingName = 'Cineon'; - break; - - case ACESFilmicToneMapping: - toneMappingName = 'ACESFilmic'; - break; +const toneMappingFunctions = { + [ LinearToneMapping ]: 'Linear', + [ ReinhardToneMapping ]: 'Reinhard', + [ CineonToneMapping ]: 'Cineon', + [ ACESFilmicToneMapping ]: 'ACESFilmic', + [ AgXToneMapping ]: 'AgX', + [ NeutralToneMapping ]: 'Neutral', + [ CustomToneMapping ]: 'Custom' +}; - case AgXToneMapping: - toneMappingName = 'AgX'; - break; +function getToneMappingFunction( functionName, toneMapping ) { - case NeutralToneMapping: - toneMappingName = 'Neutral'; - break; + const toneMappingName = toneMappingFunctions[ toneMapping ]; - case CustomToneMapping: - toneMappingName = 'Custom'; - break; + if ( toneMappingName === undefined ) { - default: - console.warn( 'THREE.WebGLProgram: Unsupported toneMapping:', toneMapping ); - toneMappingName = 'Linear'; + warn( 'WebGLProgram: Unsupported toneMapping:', toneMapping ); + return 'vec3 ' + functionName + '( vec3 color ) { return LinearToneMapping( color ); }'; } @@ -42873,7 +43862,7 @@ function fetchAttributeLocations( gl, program ) { if ( info.type === gl.FLOAT_MAT3 ) locationSize = 3; if ( info.type === gl.FLOAT_MAT4 ) locationSize = 4; - // console.log( 'THREE.WebGLProgram: ACTIVE VERTEX ATTRIBUTE:', name, i ); + // log( 'WebGLProgram: ACTIVE VERTEX ATTRIBUTE:', name, i ); attributes[ name ] = { type: info.type, @@ -42943,7 +43932,7 @@ function includeReplacer( match, include ) { if ( newInclude !== undefined ) { string = ShaderChunk[ newInclude ]; - console.warn( 'THREE.WebGLRenderer: Shader chunk "%s" has been deprecated. Use "%s" instead.', include, newInclude ); + warn( 'WebGLRenderer: Shader chunk "%s" has been deprecated. Use "%s" instead.', include, newInclude ); } else { @@ -43024,99 +44013,54 @@ function generatePrecision( parameters ) { } -function generateShadowMapTypeDefine( parameters ) { - - let shadowMapTypeDefine = 'SHADOWMAP_TYPE_BASIC'; - - if ( parameters.shadowMapType === PCFShadowMap ) { - - shadowMapTypeDefine = 'SHADOWMAP_TYPE_PCF'; - - } else if ( parameters.shadowMapType === PCFSoftShadowMap ) { - - shadowMapTypeDefine = 'SHADOWMAP_TYPE_PCF_SOFT'; - - } else if ( parameters.shadowMapType === VSMShadowMap ) { - - shadowMapTypeDefine = 'SHADOWMAP_TYPE_VSM'; +const shadowMapTypeDefines = { + [ PCFShadowMap ]: 'SHADOWMAP_TYPE_PCF', + [ VSMShadowMap ]: 'SHADOWMAP_TYPE_VSM' +}; - } +function generateShadowMapTypeDefine( parameters ) { - return shadowMapTypeDefine; + return shadowMapTypeDefines[ parameters.shadowMapType ] || 'SHADOWMAP_TYPE_BASIC'; } -function generateEnvMapTypeDefine( parameters ) { - - let envMapTypeDefine = 'ENVMAP_TYPE_CUBE'; - - if ( parameters.envMap ) { - - switch ( parameters.envMapMode ) { - - case CubeReflectionMapping: - case CubeRefractionMapping: - envMapTypeDefine = 'ENVMAP_TYPE_CUBE'; - break; - - case CubeUVReflectionMapping: - envMapTypeDefine = 'ENVMAP_TYPE_CUBE_UV'; - break; +const envMapTypeDefines = { + [ CubeReflectionMapping ]: 'ENVMAP_TYPE_CUBE', + [ CubeRefractionMapping ]: 'ENVMAP_TYPE_CUBE', + [ CubeUVReflectionMapping ]: 'ENVMAP_TYPE_CUBE_UV' +}; - } +function generateEnvMapTypeDefine( parameters ) { - } + if ( parameters.envMap === false ) return 'ENVMAP_TYPE_CUBE'; - return envMapTypeDefine; + return envMapTypeDefines[ parameters.envMapMode ] || 'ENVMAP_TYPE_CUBE'; } -function generateEnvMapModeDefine( parameters ) { - - let envMapModeDefine = 'ENVMAP_MODE_REFLECTION'; - - if ( parameters.envMap ) { - - switch ( parameters.envMapMode ) { - - case CubeRefractionMapping: +const envMapModeDefines = { + [ CubeRefractionMapping ]: 'ENVMAP_MODE_REFRACTION' +}; - envMapModeDefine = 'ENVMAP_MODE_REFRACTION'; - break; +function generateEnvMapModeDefine( parameters ) { - } + if ( parameters.envMap === false ) return 'ENVMAP_MODE_REFLECTION'; - } - - return envMapModeDefine; + return envMapModeDefines[ parameters.envMapMode ] || 'ENVMAP_MODE_REFLECTION'; } -function generateEnvMapBlendingDefine( parameters ) { - - let envMapBlendingDefine = 'ENVMAP_BLENDING_NONE'; - - if ( parameters.envMap ) { - - switch ( parameters.combine ) { - - case MultiplyOperation: - envMapBlendingDefine = 'ENVMAP_BLENDING_MULTIPLY'; - break; - - case MixOperation: - envMapBlendingDefine = 'ENVMAP_BLENDING_MIX'; - break; +const envMapBlendingDefines = { + [ MultiplyOperation ]: 'ENVMAP_BLENDING_MULTIPLY', + [ MixOperation ]: 'ENVMAP_BLENDING_MIX', + [ AddOperation ]: 'ENVMAP_BLENDING_ADD' +}; - case AddOperation: - envMapBlendingDefine = 'ENVMAP_BLENDING_ADD'; - break; +function generateEnvMapBlendingDefine( parameters ) { - } + if ( parameters.envMap === false ) return 'ENVMAP_BLENDING_NONE'; - } - - return envMapBlendingDefine; + return envMapBlendingDefines[ parameters.combine ] || 'ENVMAP_BLENDING_NONE'; } @@ -43139,7 +44083,7 @@ function generateCubeUVSize( parameters ) { function WebGLProgram( renderer, cacheKey, parameters, bindingStates ) { // TODO Send this event to Three.js DevTools - // console.log( 'WebGLProgram', cacheKey ); + // log( 'WebGLProgram', cacheKey ); const gl = renderer.getContext(); @@ -43459,8 +44403,8 @@ function WebGLProgram( renderer, cacheKey, parameters, bindingStates ) { parameters.thicknessMap ? '#define USE_THICKNESSMAP' : '', parameters.vertexTangents && parameters.flatShading === false ? '#define USE_TANGENT' : '', - parameters.vertexColors || parameters.instancingColor || parameters.batchingColor ? '#define USE_COLOR' : '', - parameters.vertexAlphas ? '#define USE_COLOR_ALPHA' : '', + parameters.vertexColors || parameters.instancingColor ? '#define USE_COLOR' : '', + parameters.vertexAlphas || parameters.batchingColor ? '#define USE_COLOR_ALPHA' : '', parameters.vertexUv1s ? '#define USE_UV1' : '', parameters.vertexUv2s ? '#define USE_UV2' : '', parameters.vertexUv3s ? '#define USE_UV3' : '', @@ -43555,8 +44499,8 @@ function WebGLProgram( renderer, cacheKey, parameters, bindingStates ) { const vertexGlsl = versionString + prefixVertex + vertexShader; const fragmentGlsl = versionString + prefixFragment + fragmentShader; - // console.log( '*VERTEX*', vertexGlsl ); - // console.log( '*FRAGMENT*', fragmentGlsl ); + // log( '*VERTEX*', vertexGlsl ); + // log( '*FRAGMENT*', fragmentGlsl ); const glVertexShader = WebGLShader( gl, gl.VERTEX_SHADER, vertexGlsl ); const glFragmentShader = WebGLShader( gl, gl.FRAGMENT_SHADER, fragmentGlsl ); @@ -43610,7 +44554,7 @@ function WebGLProgram( renderer, cacheKey, parameters, bindingStates ) { const vertexErrors = getShaderErrors( gl, glVertexShader, 'vertex' ); const fragmentErrors = getShaderErrors( gl, glFragmentShader, 'fragment' ); - console.error( + error( 'THREE.WebGLProgram: Shader Error ' + gl.getError() + ' - ' + 'VALIDATE_STATUS ' + gl.getProgramParameter( program, gl.VALIDATE_STATUS ) + '\n\n' + 'Material Name: ' + self.name + '\n' + @@ -43624,7 +44568,7 @@ function WebGLProgram( renderer, cacheKey, parameters, bindingStates ) { } else if ( programLog !== '' ) { - console.warn( 'THREE.WebGLProgram: Program Info Log:', programLog ); + warn( 'WebGLProgram: Program Info Log:', programLog ); } else if ( vertexLog === '' || fragmentLog === '' ) { @@ -43874,21 +44818,21 @@ class WebGLShaderStage { } -function WebGLPrograms( renderer, cubemaps, cubeuvmaps, extensions, capabilities, bindingStates, clipping ) { +function WebGLPrograms( renderer, environments, extensions, capabilities, bindingStates, clipping ) { const _programLayers = new Layers(); const _customShaders = new WebGLShaderCache(); const _activeChannels = new Set(); const programs = []; + const programsMap = new Map(); const logarithmicDepthBuffer = capabilities.logarithmicDepthBuffer; - const SUPPORTS_VERTEX_TEXTURES = capabilities.vertexTextures; let precision = capabilities.precision; const shaderIDs = { MeshDepthMaterial: 'depth', - MeshDistanceMaterial: 'distanceRGBA', + MeshDistanceMaterial: 'distance', MeshNormalMaterial: 'normal', MeshBasicMaterial: 'basic', MeshLambertMaterial: 'lambert', @@ -43918,9 +44862,10 @@ function WebGLPrograms( renderer, cubemaps, cubeuvmaps, extensions, capabilities const fog = scene.fog; const geometry = object.geometry; - const environment = material.isMeshStandardMaterial ? scene.environment : null; + const environment = ( material.isMeshStandardMaterial || material.isMeshLambertMaterial || material.isMeshPhongMaterial ) ? scene.environment : null; - const envMap = ( material.isMeshStandardMaterial ? cubeuvmaps : cubemaps ).get( material.envMap || environment ); + const usePMREM = material.isMeshStandardMaterial || ( material.isMeshLambertMaterial && ! material.envMap ) || ( material.isMeshPhongMaterial && ! material.envMap ); + const envMap = environments.get( material.envMap || environment, usePMREM ); const envMapCubeUVHeight = ( !! envMap ) && ( envMap.mapping === CubeUVReflectionMapping ) ? envMap.image.height : null; const shaderID = shaderIDs[ material.type ]; @@ -43934,7 +44879,7 @@ function WebGLPrograms( renderer, cubemaps, cubeuvmaps, extensions, capabilities if ( precision !== material.precision ) { - console.warn( 'THREE.WebGLProgram.getParameters:', material.precision, 'not supported, using', precision, 'instead.' ); + warn( 'WebGLProgram.getParameters:', material.precision, 'not supported, using', precision, 'instead.' ); } @@ -44066,7 +45011,6 @@ function WebGLPrograms( renderer, cubemaps, cubeuvmaps, extensions, capabilities instancingColor: IS_INSTANCEDMESH && object.instanceColor !== null, instancingMorph: IS_INSTANCEDMESH && object.morphTexture !== null, - supportsVertexTextures: SUPPORTS_VERTEX_TEXTURES, outputColorSpace: ( currentRenderTarget === null ) ? renderer.outputColorSpace : ( currentRenderTarget.isXRRenderTarget === true ? currentRenderTarget.texture.colorSpace : LinearSRGBColorSpace ), alphaToCoverage: !! material.alphaToCoverage, @@ -44079,7 +45023,7 @@ function WebGLPrograms( renderer, cubemaps, cubeuvmaps, extensions, capabilities lightMap: HAS_LIGHTMAP, bumpMap: HAS_BUMPMAP, normalMap: HAS_NORMALMAP, - displacementMap: SUPPORTS_VERTEX_TEXTURES && HAS_DISPLACEMENTMAP, + displacementMap: HAS_DISPLACEMENTMAP, emissiveMap: HAS_EMISSIVEMAP, normalMapObjectSpace: HAS_NORMALMAP && material.normalMapType === ObjectSpaceNormalMap, @@ -44170,7 +45114,12 @@ function WebGLPrograms( renderer, cubemaps, cubeuvmaps, extensions, capabilities useFog: material.fog === true, fogExp2: ( !! fog && fog.isFogExp2 ), - flatShading: ( material.flatShading === true && material.wireframe === false ), + flatShading: material.wireframe === false && ( + material.flatShading === true || + ( geometry.attributes.normal === undefined && HAS_NORMALMAP === false && + ( material.isMeshLambertMaterial || material.isMeshPhongMaterial || material.isMeshStandardMaterial || material.isMeshPhysicalMaterial ) + ) + ), sizeAttenuation: material.sizeAttenuation === true, logarithmicDepthBuffer: logarithmicDepthBuffer, @@ -44339,52 +45288,50 @@ function WebGLPrograms( renderer, cubemaps, cubeuvmaps, extensions, capabilities _programLayers.disableAll(); - if ( parameters.supportsVertexTextures ) - _programLayers.enable( 0 ); if ( parameters.instancing ) - _programLayers.enable( 1 ); + _programLayers.enable( 0 ); if ( parameters.instancingColor ) - _programLayers.enable( 2 ); + _programLayers.enable( 1 ); if ( parameters.instancingMorph ) - _programLayers.enable( 3 ); + _programLayers.enable( 2 ); if ( parameters.matcap ) - _programLayers.enable( 4 ); + _programLayers.enable( 3 ); if ( parameters.envMap ) - _programLayers.enable( 5 ); + _programLayers.enable( 4 ); if ( parameters.normalMapObjectSpace ) - _programLayers.enable( 6 ); + _programLayers.enable( 5 ); if ( parameters.normalMapTangentSpace ) - _programLayers.enable( 7 ); + _programLayers.enable( 6 ); if ( parameters.clearcoat ) - _programLayers.enable( 8 ); + _programLayers.enable( 7 ); if ( parameters.iridescence ) - _programLayers.enable( 9 ); + _programLayers.enable( 8 ); if ( parameters.alphaTest ) - _programLayers.enable( 10 ); + _programLayers.enable( 9 ); if ( parameters.vertexColors ) - _programLayers.enable( 11 ); + _programLayers.enable( 10 ); if ( parameters.vertexAlphas ) - _programLayers.enable( 12 ); + _programLayers.enable( 11 ); if ( parameters.vertexUv1s ) - _programLayers.enable( 13 ); + _programLayers.enable( 12 ); if ( parameters.vertexUv2s ) - _programLayers.enable( 14 ); + _programLayers.enable( 13 ); if ( parameters.vertexUv3s ) - _programLayers.enable( 15 ); + _programLayers.enable( 14 ); if ( parameters.vertexTangents ) - _programLayers.enable( 16 ); + _programLayers.enable( 15 ); if ( parameters.anisotropy ) - _programLayers.enable( 17 ); + _programLayers.enable( 16 ); if ( parameters.alphaHash ) - _programLayers.enable( 18 ); + _programLayers.enable( 17 ); if ( parameters.batching ) - _programLayers.enable( 19 ); + _programLayers.enable( 18 ); if ( parameters.dispersion ) - _programLayers.enable( 20 ); + _programLayers.enable( 19 ); if ( parameters.batchingColor ) - _programLayers.enable( 21 ); + _programLayers.enable( 20 ); if ( parameters.gradientMap ) - _programLayers.enable( 22 ); + _programLayers.enable( 21 ); array.push( _programLayers.mask ); _programLayers.disableAll(); @@ -44460,29 +45407,19 @@ function WebGLPrograms( renderer, cubemaps, cubeuvmaps, extensions, capabilities function acquireProgram( parameters, cacheKey ) { - let program; + let program = programsMap.get( cacheKey ); - // Check if code has been already compiled - for ( let p = 0, pl = programs.length; p < pl; p ++ ) { + if ( program !== undefined ) { - const preexistingProgram = programs[ p ]; + ++ program.usedTimes; - if ( preexistingProgram.cacheKey === cacheKey ) { - - program = preexistingProgram; - ++ program.usedTimes; - - break; - - } - - } - - if ( program === undefined ) { + } else { program = new WebGLProgram( renderer, cacheKey, parameters, bindingStates ); programs.push( program ); + programsMap.set( cacheKey, program ); + } return program; @@ -44498,6 +45435,9 @@ function WebGLPrograms( renderer, cubemaps, cubeuvmaps, extensions, capabilities programs[ i ] = programs[ programs.length - 1 ]; programs.pop(); + // Remove from map + programsMap.delete( program.cacheKey ); + // Free WebGL resources program.destroy(); @@ -44598,6 +45538,10 @@ function painterSortStable( a, b ) { return a.material.id - b.material.id; + } else if ( a.materialVariant !== b.materialVariant ) { + + return a.materialVariant - b.materialVariant; + } else if ( a.z !== b.z ) { return a.z - b.z; @@ -44652,6 +45596,15 @@ function WebGLRenderList() { } + function materialVariant( object ) { + + let variant = 0; + if ( object.isInstancedMesh ) variant += 2; + if ( object.isSkinnedMesh ) variant += 1; + return variant; + + } + function getNextRenderItem( object, geometry, material, groupOrder, z, group ) { let renderItem = renderItems[ renderItemsIndex ]; @@ -44663,6 +45616,7 @@ function WebGLRenderList() { object: object, geometry: geometry, material: material, + materialVariant: materialVariant( object ), groupOrder: groupOrder, renderOrder: object.renderOrder, z: z, @@ -44677,6 +45631,7 @@ function WebGLRenderList() { renderItem.object = object; renderItem.geometry = geometry; renderItem.material = material; + renderItem.materialVariant = materialVariant( object ); renderItem.groupOrder = groupOrder; renderItem.renderOrder = object.renderOrder; renderItem.z = z; @@ -45055,7 +46010,23 @@ function WebGLLights( extensions ) { const intensity = light.intensity; const distance = light.distance; - const shadowMap = ( light.shadow && light.shadow.map ) ? light.shadow.map.texture : null; + let shadowMap = null; + + if ( light.shadow && light.shadow.map ) { + + if ( light.shadow.map.texture.format === RGFormat ) { + + // VSM uses color texture with blurred mean/std_dev + shadowMap = light.shadow.map.texture; + + } else { + + // Other types use depth texture + shadowMap = light.shadow.map.depthTexture || light.shadow.map.texture; + + } + + } if ( light.isAmbientLight ) { @@ -45493,6 +46464,7 @@ function WebGLRenderStates( extensions ) { * near and far plane. White is nearest, black is farthest. * * @augments Material + * @demo scenes/material-browser.html#MeshDepthMaterial */ class MeshDepthMaterial extends Material { @@ -45747,9 +46719,95 @@ class MeshDistanceMaterial extends Material { } +/** + * This class can be used to automatically save the depth information of a + * cube rendering into a cube texture with depth format. Used for PointLight shadows. + * + * @augments DepthTexture + */ +class CubeDepthTexture extends DepthTexture { + + /** + * Constructs a new cube depth texture. + * + * @param {number} size - The size (width and height) of each cube face. + * @param {number} [type=UnsignedIntType] - The texture type. + * @param {number} [mapping=CubeReflectionMapping] - The texture mapping. + * @param {number} [wrapS=ClampToEdgeWrapping] - The wrapS value. + * @param {number} [wrapT=ClampToEdgeWrapping] - The wrapT value. + * @param {number} [magFilter=NearestFilter] - The mag filter value. + * @param {number} [minFilter=NearestFilter] - The min filter value. + * @param {number} [anisotropy=Texture.DEFAULT_ANISOTROPY] - The anisotropy value. + * @param {number} [format=DepthFormat] - The texture format. + */ + constructor( size, type = UnsignedIntType, mapping = CubeReflectionMapping, wrapS, wrapT, magFilter = NearestFilter, minFilter = NearestFilter, anisotropy, format = DepthFormat ) { + + // Create 6 identical image descriptors for the cube faces + const image = { width: size, height: size, depth: 1 }; + const images = [ image, image, image, image, image, image ]; + + // Call DepthTexture constructor with width, height + super( size, size, type, mapping, wrapS, wrapT, magFilter, minFilter, anisotropy, format ); + + // Replace the single image with the array of 6 images + this.image = images; + + /** + * This flag can be used for type testing. + * + * @type {boolean} + * @readonly + * @default true + */ + this.isCubeDepthTexture = true; + + /** + * Set to true for cube texture handling in WebGLTextures. + * + * @type {boolean} + * @readonly + * @default true + */ + this.isCubeTexture = true; + + } + + /** + * Alias for {@link CubeDepthTexture#image}. + * + * @type {Array} + */ + get images() { + + return this.image; + + } + + set images( value ) { + + this.image = value; + + } + +} + const vertex = "void main() {\n\tgl_Position = vec4( position, 1.0 );\n}"; -const fragment = "uniform sampler2D shadow_pass;\nuniform vec2 resolution;\nuniform float radius;\n#include \nvoid main() {\n\tconst float samples = float( VSM_SAMPLES );\n\tfloat mean = 0.0;\n\tfloat squared_mean = 0.0;\n\tfloat uvStride = samples <= 1.0 ? 0.0 : 2.0 / ( samples - 1.0 );\n\tfloat uvStart = samples <= 1.0 ? 0.0 : - 1.0;\n\tfor ( float i = 0.0; i < samples; i ++ ) {\n\t\tfloat uvOffset = uvStart + i * uvStride;\n\t\t#ifdef HORIZONTAL_PASS\n\t\t\tvec2 distribution = unpackRGBATo2Half( texture2D( shadow_pass, ( gl_FragCoord.xy + vec2( uvOffset, 0.0 ) * radius ) / resolution ) );\n\t\t\tmean += distribution.x;\n\t\t\tsquared_mean += distribution.y * distribution.y + distribution.x * distribution.x;\n\t\t#else\n\t\t\tfloat depth = unpackRGBAToDepth( texture2D( shadow_pass, ( gl_FragCoord.xy + vec2( 0.0, uvOffset ) * radius ) / resolution ) );\n\t\t\tmean += depth;\n\t\t\tsquared_mean += depth * depth;\n\t\t#endif\n\t}\n\tmean = mean / samples;\n\tsquared_mean = squared_mean / samples;\n\tfloat std_dev = sqrt( squared_mean - mean * mean );\n\tgl_FragColor = pack2HalfToRGBA( vec2( mean, std_dev ) );\n}"; +const fragment = "uniform sampler2D shadow_pass;\nuniform vec2 resolution;\nuniform float radius;\nvoid main() {\n\tconst float samples = float( VSM_SAMPLES );\n\tfloat mean = 0.0;\n\tfloat squared_mean = 0.0;\n\tfloat uvStride = samples <= 1.0 ? 0.0 : 2.0 / ( samples - 1.0 );\n\tfloat uvStart = samples <= 1.0 ? 0.0 : - 1.0;\n\tfor ( float i = 0.0; i < samples; i ++ ) {\n\t\tfloat uvOffset = uvStart + i * uvStride;\n\t\t#ifdef HORIZONTAL_PASS\n\t\t\tvec2 distribution = texture2D( shadow_pass, ( gl_FragCoord.xy + vec2( uvOffset, 0.0 ) * radius ) / resolution ).rg;\n\t\t\tmean += distribution.x;\n\t\t\tsquared_mean += distribution.y * distribution.y + distribution.x * distribution.x;\n\t\t#else\n\t\t\tfloat depth = texture2D( shadow_pass, ( gl_FragCoord.xy + vec2( 0.0, uvOffset ) * radius ) / resolution ).r;\n\t\t\tmean += depth;\n\t\t\tsquared_mean += depth * depth;\n\t\t#endif\n\t}\n\tmean = mean / samples;\n\tsquared_mean = squared_mean / samples;\n\tfloat std_dev = sqrt( max( 0.0, squared_mean - mean * mean ) );\n\tgl_FragColor = vec4( mean, std_dev, 0.0, 1.0 );\n}"; + +const _cubeDirections = [ + /*@__PURE__*/ new Vector3( 1, 0, 0 ), /*@__PURE__*/ new Vector3( -1, 0, 0 ), /*@__PURE__*/ new Vector3( 0, 1, 0 ), + /*@__PURE__*/ new Vector3( 0, -1, 0 ), /*@__PURE__*/ new Vector3( 0, 0, 1 ), /*@__PURE__*/ new Vector3( 0, 0, -1 ) +]; + +const _cubeUps = [ + /*@__PURE__*/ new Vector3( 0, -1, 0 ), /*@__PURE__*/ new Vector3( 0, -1, 0 ), /*@__PURE__*/ new Vector3( 0, 0, 1 ), + /*@__PURE__*/ new Vector3( 0, 0, -1 ), /*@__PURE__*/ new Vector3( 0, -1, 0 ), /*@__PURE__*/ new Vector3( 0, -1, 0 ) +]; + +const _projScreenMatrix$2 = /*@__PURE__*/ new Matrix4(); +const _lightPositionWorld$1 = /*@__PURE__*/ new Vector3(); +const _lookTarget$1 = /*@__PURE__*/ new Vector3(); function WebGLShadowMap( renderer, objects, capabilities ) { @@ -45760,7 +46818,7 @@ function WebGLShadowMap( renderer, objects, capabilities ) { _viewport = new Vector4(), - _depthMaterial = new MeshDepthMaterial( { depthPacking: RGBADepthPacking } ), + _depthMaterial = new MeshDepthMaterial(), _distanceMaterial = new MeshDistanceMaterial(), _materialCache = {}, @@ -45815,6 +46873,13 @@ function WebGLShadowMap( renderer, objects, capabilities ) { if ( lights.length === 0 ) return; + if ( this.type === PCFSoftShadowMap ) { + + warn( 'WebGLShadowMap: PCFSoftShadowMap has been deprecated. Using PCFShadowMap instead.' ); + this.type = PCFShadowMap; + + } + const currentRenderTarget = renderer.getRenderTarget(); const activeCubeFace = renderer.getActiveCubeFace(); const activeMipmapLevel = renderer.getActiveMipmapLevel(); @@ -45839,8 +46904,31 @@ function WebGLShadowMap( renderer, objects, capabilities ) { // check for shadow map type changes - const toVSM = ( _previousType !== VSMShadowMap && this.type === VSMShadowMap ); - const fromVSM = ( _previousType === VSMShadowMap && this.type !== VSMShadowMap ); + const typeChanged = _previousType !== this.type; + + // When shadow map type changes, materials need recompilation because sampler types change + // (sampler2DShadow for PCF vs sampler2D for Basic) + if ( typeChanged ) { + + scene.traverse( function ( object ) { + + if ( object.material ) { + + if ( Array.isArray( object.material ) ) { + + object.material.forEach( mat => mat.needsUpdate = true ); + + } else { + + object.material.needsUpdate = true; + + } + + } + + } ); + + } // render depth map @@ -45851,7 +46939,7 @@ function WebGLShadowMap( renderer, objects, capabilities ) { if ( shadow === undefined ) { - console.warn( 'THREE.WebGLShadowMap:', light, 'has no shadow.' ); + warn( 'WebGLShadowMap:', light, 'has no shadow.' ); continue; } @@ -45886,42 +46974,154 @@ function WebGLShadowMap( renderer, objects, capabilities ) { } - if ( shadow.map === null || toVSM === true || fromVSM === true ) { + const reversedDepthBuffer = renderer.state.buffers.depth.getReversed(); + shadow.camera._reversedDepth = reversedDepthBuffer; - const pars = ( this.type !== VSMShadowMap ) ? { minFilter: NearestFilter, magFilter: NearestFilter } : {}; + if ( shadow.map === null || typeChanged === true ) { if ( shadow.map !== null ) { + if ( shadow.map.depthTexture !== null ) { + + shadow.map.depthTexture.dispose(); + shadow.map.depthTexture = null; + + } + shadow.map.dispose(); } - shadow.map = new WebGLRenderTarget( _shadowMapSize.x, _shadowMapSize.y, pars ); - shadow.map.texture.name = light.name + '.shadowMap'; + if ( this.type === VSMShadowMap ) { + + if ( light.isPointLight ) { + + warn( 'WebGLShadowMap: VSM shadow maps are not supported for PointLights. Use PCF or BasicShadowMap instead.' ); + continue; + + } + + shadow.map = new WebGLRenderTarget( _shadowMapSize.x, _shadowMapSize.y, { + format: RGFormat, + type: HalfFloatType, + minFilter: LinearFilter, + magFilter: LinearFilter, + generateMipmaps: false + } ); + shadow.map.texture.name = light.name + '.shadowMap'; + + // Native depth texture for VSM - depth is captured here, then blurred into the color texture + shadow.map.depthTexture = new DepthTexture( _shadowMapSize.x, _shadowMapSize.y, FloatType ); + shadow.map.depthTexture.name = light.name + '.shadowMapDepth'; + shadow.map.depthTexture.format = DepthFormat; + shadow.map.depthTexture.compareFunction = null; // For regular sampling (not shadow comparison) + shadow.map.depthTexture.minFilter = NearestFilter; + shadow.map.depthTexture.magFilter = NearestFilter; + + } else { + + if ( light.isPointLight ) { + + shadow.map = new WebGLCubeRenderTarget( _shadowMapSize.x ); + shadow.map.depthTexture = new CubeDepthTexture( _shadowMapSize.x, UnsignedIntType ); + + } else { + + shadow.map = new WebGLRenderTarget( _shadowMapSize.x, _shadowMapSize.y ); + shadow.map.depthTexture = new DepthTexture( _shadowMapSize.x, _shadowMapSize.y, UnsignedIntType ); + + } + + shadow.map.depthTexture.name = light.name + '.shadowMap'; + shadow.map.depthTexture.format = DepthFormat; + + if ( this.type === PCFShadowMap ) { + + shadow.map.depthTexture.compareFunction = reversedDepthBuffer ? GreaterEqualCompare : LessEqualCompare; + shadow.map.depthTexture.minFilter = LinearFilter; + shadow.map.depthTexture.magFilter = LinearFilter; + + } else { + + shadow.map.depthTexture.compareFunction = null; + shadow.map.depthTexture.minFilter = NearestFilter; + shadow.map.depthTexture.magFilter = NearestFilter; + + } + + } shadow.camera.updateProjectionMatrix(); } - renderer.setRenderTarget( shadow.map ); - renderer.clear(); + // For cube render targets (PointLights), render all 6 faces. Otherwise, render once. + const faceCount = shadow.map.isWebGLCubeRenderTarget ? 6 : 1; - const viewportCount = shadow.getViewportCount(); + for ( let face = 0; face < faceCount; face ++ ) { - for ( let vp = 0; vp < viewportCount; vp ++ ) { + // For cube render targets, render to each face separately + if ( shadow.map.isWebGLCubeRenderTarget ) { - const viewport = shadow.getViewport( vp ); + renderer.setRenderTarget( shadow.map, face ); + renderer.clear(); - _viewport.set( - _viewportSize.x * viewport.x, - _viewportSize.y * viewport.y, - _viewportSize.x * viewport.z, - _viewportSize.y * viewport.w - ); + } else { + + // For 2D render targets, use viewports + if ( face === 0 ) { + + renderer.setRenderTarget( shadow.map ); + renderer.clear(); + + } + + const viewport = shadow.getViewport( face ); + + _viewport.set( + _viewportSize.x * viewport.x, + _viewportSize.y * viewport.y, + _viewportSize.x * viewport.z, + _viewportSize.y * viewport.w + ); + + _state.viewport( _viewport ); + + } + + if ( light.isPointLight ) { + + const camera = shadow.camera; + const shadowMatrix = shadow.matrix; + + const far = light.distance || camera.far; + + if ( far !== camera.far ) { + + camera.far = far; + camera.updateProjectionMatrix(); + + } + + _lightPositionWorld$1.setFromMatrixPosition( light.matrixWorld ); + camera.position.copy( _lightPositionWorld$1 ); + + _lookTarget$1.copy( camera.position ); + _lookTarget$1.add( _cubeDirections[ face ] ); + camera.up.copy( _cubeUps[ face ] ); + camera.lookAt( _lookTarget$1 ); + camera.updateMatrixWorld(); - _state.viewport( _viewport ); + shadowMatrix.makeTranslation( - _lightPositionWorld$1.x, - _lightPositionWorld$1.y, - _lightPositionWorld$1.z ); - shadow.updateMatrices( light, vp ); + _projScreenMatrix$2.multiplyMatrices( camera.projectionMatrix, camera.matrixWorldInverse ); + shadow._frustum.setFromProjectionMatrix( _projScreenMatrix$2, camera.coordinateSystem, camera.reversedDepth ); + + } else { + + shadow.updateMatrices( light ); + + } _frustum = shadow.getFrustum(); @@ -45965,13 +47165,16 @@ function WebGLShadowMap( renderer, objects, capabilities ) { if ( shadow.mapPass === null ) { - shadow.mapPass = new WebGLRenderTarget( _shadowMapSize.x, _shadowMapSize.y ); + shadow.mapPass = new WebGLRenderTarget( _shadowMapSize.x, _shadowMapSize.y, { + format: RGFormat, + type: HalfFloatType + } ); } - // vertical pass + // vertical pass - read from native depth texture - shadowMaterialVertical.uniforms.shadow_pass.value = shadow.map.texture; + shadowMaterialVertical.uniforms.shadow_pass.value = shadow.map.depthTexture; shadowMaterialVertical.uniforms.resolution.value = shadow.mapSize; shadowMaterialVertical.uniforms.radius.value = shadow.radius; renderer.setRenderTarget( shadow.mapPass ); @@ -46170,18 +47373,6 @@ function WebGLShadowMap( renderer, objects, capabilities ) { } -const reversedFuncs = { - [ NeverDepth ]: AlwaysDepth, - [ LessDepth ]: GreaterDepth, - [ EqualDepth ]: NotEqualDepth, - [ LessEqualDepth ]: GreaterEqualDepth, - - [ AlwaysDepth ]: NeverDepth, - [ GreaterDepth ]: LessDepth, - [ NotEqualDepth ]: EqualDepth, - [ GreaterEqualDepth ]: LessEqualDepth, -}; - function WebGLState( gl, extensions ) { function ColorBuffer() { @@ -46313,7 +47504,7 @@ function WebGLState( gl, extensions ) { setFunc: function ( depthFunc ) { - if ( currentReversed ) depthFunc = reversedFuncs[ depthFunc ]; + if ( currentReversed ) depthFunc = ReversedDepthFuncs[ depthFunc ]; if ( currentDepthFunc !== depthFunc ) { @@ -46381,6 +47572,8 @@ function WebGLState( gl, extensions ) { if ( currentDepthClear !== depth ) { + currentDepthClear = depth; + if ( currentReversed ) { depth = 1 - depth; @@ -46388,7 +47581,6 @@ function WebGLState( gl, extensions ) { } gl.clearDepth( depth ); - currentDepthClear = depth; } @@ -46839,7 +48031,7 @@ function WebGLState( gl, extensions ) { break; default: - console.error( 'THREE.WebGLState: Invalid blending: ', blending ); + error( 'WebGLState: Invalid blending: ', blending ); break; } @@ -46857,15 +48049,15 @@ function WebGLState( gl, extensions ) { break; case SubtractiveBlending: - console.error( 'THREE.WebGLState: SubtractiveBlending requires material.premultipliedAlpha = true' ); + error( 'WebGLState: SubtractiveBlending requires material.premultipliedAlpha = true' ); break; case MultiplyBlending: - console.error( 'THREE.WebGLState: MultiplyBlending requires material.premultipliedAlpha = true' ); + error( 'WebGLState: MultiplyBlending requires material.premultipliedAlpha = true' ); break; default: - console.error( 'THREE.WebGLState: Invalid blending: ', blending ); + error( 'WebGLState: Invalid blending: ', blending ); break; } @@ -47042,11 +48234,17 @@ function WebGLState( gl, extensions ) { if ( currentPolygonOffsetFactor !== factor || currentPolygonOffsetUnits !== units ) { - gl.polygonOffset( factor, units ); - currentPolygonOffsetFactor = factor; currentPolygonOffsetUnits = units; + if ( depthBuffer.getReversed() ) { + + factor = - factor; + + } + + gl.polygonOffset( factor, units ); + } } else { @@ -47150,9 +48348,9 @@ function WebGLState( gl, extensions ) { gl.compressedTexImage2D( ...arguments ); - } catch ( error ) { + } catch ( e ) { - console.error( 'THREE.WebGLState:', error ); + error( 'WebGLState:', e ); } @@ -47164,9 +48362,9 @@ function WebGLState( gl, extensions ) { gl.compressedTexImage3D( ...arguments ); - } catch ( error ) { + } catch ( e ) { - console.error( 'THREE.WebGLState:', error ); + error( 'WebGLState:', e ); } @@ -47178,9 +48376,9 @@ function WebGLState( gl, extensions ) { gl.texSubImage2D( ...arguments ); - } catch ( error ) { + } catch ( e ) { - console.error( 'THREE.WebGLState:', error ); + error( 'WebGLState:', e ); } @@ -47192,9 +48390,9 @@ function WebGLState( gl, extensions ) { gl.texSubImage3D( ...arguments ); - } catch ( error ) { + } catch ( e ) { - console.error( 'THREE.WebGLState:', error ); + error( 'WebGLState:', e ); } @@ -47206,9 +48404,9 @@ function WebGLState( gl, extensions ) { gl.compressedTexSubImage2D( ...arguments ); - } catch ( error ) { + } catch ( e ) { - console.error( 'THREE.WebGLState:', error ); + error( 'WebGLState:', e ); } @@ -47220,9 +48418,9 @@ function WebGLState( gl, extensions ) { gl.compressedTexSubImage3D( ...arguments ); - } catch ( error ) { + } catch ( e ) { - console.error( 'THREE.WebGLState:', error ); + error( 'WebGLState:', e ); } @@ -47234,9 +48432,9 @@ function WebGLState( gl, extensions ) { gl.texStorage2D( ...arguments ); - } catch ( error ) { + } catch ( e ) { - console.error( 'THREE.WebGLState:', error ); + error( 'WebGLState:', e ); } @@ -47248,9 +48446,9 @@ function WebGLState( gl, extensions ) { gl.texStorage3D( ...arguments ); - } catch ( error ) { + } catch ( e ) { - console.error( 'THREE.WebGLState:', error ); + error( 'WebGLState:', e ); } @@ -47262,9 +48460,9 @@ function WebGLState( gl, extensions ) { gl.texImage2D( ...arguments ); - } catch ( error ) { + } catch ( e ) { - console.error( 'THREE.WebGLState:', error ); + error( 'WebGLState:', e ); } @@ -47276,9 +48474,9 @@ function WebGLState( gl, extensions ) { gl.texImage3D( ...arguments ); - } catch ( error ) { + } catch ( e ) { - console.error( 'THREE.WebGLState:', error ); + error( 'WebGLState:', e ); } @@ -47548,8 +48746,12 @@ function getByteLength( width, height, format, type ) { // https://registry.khronos.org/webgl/extensions/WEBGL_compressed_texture_etc/ case RGB_ETC1_Format: case RGB_ETC2_Format: + case R11_EAC_Format: + case SIGNED_R11_EAC_Format: return Math.floor( ( width + 3 ) / 4 ) * Math.floor( ( height + 3 ) / 4 ) * 8; case RGBA_ETC2_EAC_Format: + case RG11_EAC_Format: + case SIGNED_RG11_EAC_Format: return Math.floor( ( width + 3 ) / 4 ) * Math.floor( ( height + 3 ) / 4 ) * 16; // https://registry.khronos.org/webgl/extensions/WEBGL_compressed_texture_astc/ @@ -47652,9 +48854,9 @@ function WebGLTextures( _gl, extensions, state, properties, capabilities, utils, try { useOffscreenCanvas = typeof OffscreenCanvas !== 'undefined' - // eslint-disable-next-line compat/compat && ( new OffscreenCanvas( 1, 1 ).getContext( '2d' ) ) !== null; + } catch ( err ) { // Ignore any errors @@ -47666,7 +48868,6 @@ function WebGLTextures( _gl, extensions, state, properties, capabilities, utils, // Use OffscreenCanvas when available. Specially needed in web workers return useOffscreenCanvas ? - // eslint-disable-next-line compat/compat new OffscreenCanvas( width, height ) : createElementNS( 'canvas' ); } @@ -47711,7 +48912,7 @@ function WebGLTextures( _gl, extensions, state, properties, capabilities, utils, const context = canvas.getContext( '2d' ); context.drawImage( image, 0, 0, width, height ); - console.warn( 'THREE.WebGLRenderer: Texture has been resized from (' + dimensions.width + 'x' + dimensions.height + ') to (' + width + 'x' + height + ').' ); + warn( 'WebGLRenderer: Texture has been resized from (' + dimensions.width + 'x' + dimensions.height + ') to (' + width + 'x' + height + ').' ); return canvas; @@ -47719,7 +48920,7 @@ function WebGLTextures( _gl, extensions, state, properties, capabilities, utils, if ( 'data' in image ) { - console.warn( 'THREE.WebGLRenderer: Image in DataTexture is too big (' + dimensions.width + 'x' + dimensions.height + ').' ); + warn( 'WebGLRenderer: Image in DataTexture is too big (' + dimensions.width + 'x' + dimensions.height + ').' ); } @@ -47760,7 +48961,7 @@ function WebGLTextures( _gl, extensions, state, properties, capabilities, utils, if ( _gl[ internalFormatName ] !== undefined ) return _gl[ internalFormatName ]; - console.warn( 'THREE.WebGLRenderer: Attempt to use non-existing WebGL internal format \'' + internalFormatName + '\'' ); + warn( 'WebGLRenderer: Attempt to use non-existing WebGL internal format \'' + internalFormatName + '\'' ); } @@ -47873,7 +49074,7 @@ function WebGLTextures( _gl, extensions, state, properties, capabilities, utils, } else if ( depthType === UnsignedShortType ) { glInternalFormat = _gl.DEPTH24_STENCIL8; - console.warn( 'DepthTexture: 16 bit depth attachment is not supported with stencil. Using 24-bit attachment.' ); + warn( 'DepthTexture: 16 bit depth attachment is not supported with stencil. Using 24-bit attachment.' ); } @@ -48103,7 +49304,7 @@ function WebGLTextures( _gl, extensions, state, properties, capabilities, utils, if ( textureUnit >= capabilities.maxTextures ) { - console.warn( 'THREE.WebGLTextures: Trying to use ' + textureUnit + ' texture units while this GPU supports only ' + capabilities.maxTextures ); + warn( 'WebGLTextures: Trying to use ' + textureUnit + ' texture units while this GPU supports only ' + capabilities.maxTextures ); } @@ -48150,11 +49351,11 @@ function WebGLTextures( _gl, extensions, state, properties, capabilities, utils, if ( image === null ) { - console.warn( 'THREE.WebGLRenderer: Texture marked for update but no image data found.' ); + warn( 'WebGLRenderer: Texture marked for update but no image data found.' ); } else if ( image.complete === false ) { - console.warn( 'THREE.WebGLRenderer: Texture marked for update but image is incomplete' ); + warn( 'WebGLRenderer: Texture marked for update but image is incomplete' ); } else { @@ -48182,6 +49383,10 @@ function WebGLTextures( _gl, extensions, state, properties, capabilities, utils, uploadTexture( textureProperties, texture, slot ); return; + } else if ( texture.isExternalTexture ) { + + textureProperties.__webglTexture = texture.sourceTexture ? texture.sourceTexture : null; + } state.bindTexture( _gl.TEXTURE_2D_ARRAY, textureProperties.__webglTexture, _gl.TEXTURE0 + slot ); @@ -48207,7 +49412,7 @@ function WebGLTextures( _gl, extensions, state, properties, capabilities, utils, const textureProperties = properties.get( texture ); - if ( texture.version > 0 && textureProperties.__version !== texture.version ) { + if ( texture.isCubeDepthTexture !== true && texture.version > 0 && textureProperties.__version !== texture.version ) { uploadCubeTexture( textureProperties, texture, slot ); return; @@ -48251,7 +49456,7 @@ function WebGLTextures( _gl, extensions, state, properties, capabilities, utils, ( texture.magFilter === LinearFilter || texture.magFilter === LinearMipmapNearestFilter || texture.magFilter === NearestMipmapLinearFilter || texture.magFilter === LinearMipmapLinearFilter || texture.minFilter === LinearFilter || texture.minFilter === LinearMipmapNearestFilter || texture.minFilter === NearestMipmapLinearFilter || texture.minFilter === LinearMipmapLinearFilter ) ) { - console.warn( 'THREE.WebGLRenderer: Unable to use linear filtering with floating point textures. OES_texture_float_linear not supported on this device.' ); + warn( 'WebGLRenderer: Unable to use linear filtering with floating point textures. OES_texture_float_linear not supported on this device.' ); } @@ -48660,7 +49865,7 @@ function WebGLTextures( _gl, extensions, state, properties, capabilities, utils, } else { - console.warn( 'THREE.WebGLRenderer: Attempt to load unsupported compressed texture format in .uploadTexture()' ); + warn( 'WebGLRenderer: Attempt to load unsupported compressed texture format in .uploadTexture()' ); } @@ -48716,7 +49921,7 @@ function WebGLTextures( _gl, extensions, state, properties, capabilities, utils, } else { - console.warn( 'THREE.WebGLRenderer: Attempt to load unsupported compressed texture format in .uploadTexture()' ); + warn( 'WebGLRenderer: Attempt to load unsupported compressed texture format in .uploadTexture()' ); } @@ -49010,7 +50215,7 @@ function WebGLTextures( _gl, extensions, state, properties, capabilities, utils, } else { - console.warn( 'THREE.WebGLRenderer: Attempt to load unsupported compressed texture format in .setTextureCube()' ); + warn( 'WebGLRenderer: Attempt to load unsupported compressed texture format in .setTextureCube()' ); } @@ -49199,7 +50404,7 @@ function WebGLTextures( _gl, extensions, state, properties, capabilities, utils, } // Setup storage for internal depth/stencil buffers and bind to correct framebuffer - function setupRenderBufferStorage( renderbuffer, renderTarget, isMultisample ) { + function setupRenderBufferStorage( renderbuffer, renderTarget, useMultisample ) { _gl.bindRenderbuffer( _gl.RENDERBUFFER, renderbuffer ); @@ -49212,15 +50417,13 @@ function WebGLTextures( _gl, extensions, state, properties, capabilities, utils, const glAttachmentType = renderTarget.stencilBuffer ? _gl.DEPTH_STENCIL_ATTACHMENT : _gl.DEPTH_ATTACHMENT; // set up the attachment - const samples = getRenderTargetSamples( renderTarget ); - const isUseMultisampledRTT = useMultisampledRTT( renderTarget ); - if ( isUseMultisampledRTT ) { + if ( useMultisampledRTT( renderTarget ) ) { - multisampledRTTExt.renderbufferStorageMultisampleEXT( _gl.RENDERBUFFER, samples, glInternalFormat, renderTarget.width, renderTarget.height ); + multisampledRTTExt.renderbufferStorageMultisampleEXT( _gl.RENDERBUFFER, getRenderTargetSamples( renderTarget ), glInternalFormat, renderTarget.width, renderTarget.height ); - } else if ( isMultisample ) { + } else if ( useMultisample ) { - _gl.renderbufferStorageMultisample( _gl.RENDERBUFFER, samples, glInternalFormat, renderTarget.width, renderTarget.height ); + _gl.renderbufferStorageMultisample( _gl.RENDERBUFFER, getRenderTargetSamples( renderTarget ), glInternalFormat, renderTarget.width, renderTarget.height ); } else { @@ -49241,15 +50444,14 @@ function WebGLTextures( _gl, extensions, state, properties, capabilities, utils, const glFormat = utils.convert( texture.format, texture.colorSpace ); const glType = utils.convert( texture.type ); const glInternalFormat = getInternalFormat( texture.internalFormat, glFormat, glType, texture.colorSpace ); - const samples = getRenderTargetSamples( renderTarget ); - if ( isMultisample && useMultisampledRTT( renderTarget ) === false ) { + if ( useMultisampledRTT( renderTarget ) ) { - _gl.renderbufferStorageMultisample( _gl.RENDERBUFFER, samples, glInternalFormat, renderTarget.width, renderTarget.height ); + multisampledRTTExt.renderbufferStorageMultisampleEXT( _gl.RENDERBUFFER, getRenderTargetSamples( renderTarget ), glInternalFormat, renderTarget.width, renderTarget.height ); - } else if ( useMultisampledRTT( renderTarget ) ) { + } else if ( useMultisample ) { - multisampledRTTExt.renderbufferStorageMultisampleEXT( _gl.RENDERBUFFER, samples, glInternalFormat, renderTarget.width, renderTarget.height ); + _gl.renderbufferStorageMultisample( _gl.RENDERBUFFER, getRenderTargetSamples( renderTarget ), glInternalFormat, renderTarget.width, renderTarget.height ); } else { @@ -49266,10 +50468,9 @@ function WebGLTextures( _gl, extensions, state, properties, capabilities, utils, } // Setup resources for a Depth Texture for a FBO (needs an extension) - function setupDepthTexture( framebuffer, renderTarget ) { + function setupDepthTexture( framebuffer, renderTarget, cubeFace ) { - const isCube = ( renderTarget && renderTarget.isWebGLCubeRenderTarget ); - if ( isCube ) throw new Error( 'Depth Texture with cube render targets is not supported' ); + const isCube = ( renderTarget.isWebGLCubeRenderTarget === true ); state.bindFramebuffer( _gl.FRAMEBUFFER, framebuffer ); @@ -49293,20 +50494,69 @@ function WebGLTextures( _gl, extensions, state, properties, capabilities, utils, } - setTexture2D( renderTarget.depthTexture, 0 ); + if ( isCube ) { + + // For cube depth textures, initialize and bind without uploading image data + if ( textureProperties.__webglInit === undefined ) { + + textureProperties.__webglInit = true; + renderTarget.depthTexture.addEventListener( 'dispose', onTextureDispose ); + + } + + // Only create and allocate storage once + if ( textureProperties.__webglTexture === undefined ) { + + textureProperties.__webglTexture = _gl.createTexture(); + + state.bindTexture( _gl.TEXTURE_CUBE_MAP, textureProperties.__webglTexture ); + setTextureParameters( _gl.TEXTURE_CUBE_MAP, renderTarget.depthTexture ); + + // Allocate storage for all 6 faces with correct depth texture format + const glFormat = utils.convert( renderTarget.depthTexture.format ); + const glType = utils.convert( renderTarget.depthTexture.type ); + + // Use proper internal format for depth textures + let glInternalFormat; + if ( renderTarget.depthTexture.format === DepthFormat ) { + + glInternalFormat = _gl.DEPTH_COMPONENT24; + + } else if ( renderTarget.depthTexture.format === DepthStencilFormat ) { + + glInternalFormat = _gl.DEPTH24_STENCIL8; + + } + + for ( let i = 0; i < 6; i ++ ) { + + _gl.texImage2D( _gl.TEXTURE_CUBE_MAP_POSITIVE_X + i, 0, glInternalFormat, renderTarget.width, renderTarget.height, 0, glFormat, glType, null ); + + } + + } + + } else { + + setTexture2D( renderTarget.depthTexture, 0 ); + + } const webglDepthTexture = textureProperties.__webglTexture; const samples = getRenderTargetSamples( renderTarget ); + const glTextureType = isCube ? _gl.TEXTURE_CUBE_MAP_POSITIVE_X + cubeFace : _gl.TEXTURE_2D; + const glAttachmentType = renderTarget.depthTexture.format === DepthStencilFormat ? _gl.DEPTH_STENCIL_ATTACHMENT : _gl.DEPTH_ATTACHMENT; + if ( renderTarget.depthTexture.format === DepthFormat ) { if ( useMultisampledRTT( renderTarget ) ) { - multisampledRTTExt.framebufferTexture2DMultisampleEXT( _gl.FRAMEBUFFER, _gl.DEPTH_ATTACHMENT, _gl.TEXTURE_2D, webglDepthTexture, 0, samples ); + multisampledRTTExt.framebufferTexture2DMultisampleEXT( _gl.FRAMEBUFFER, glAttachmentType, glTextureType, webglDepthTexture, 0, samples ); } else { - _gl.framebufferTexture2D( _gl.FRAMEBUFFER, _gl.DEPTH_ATTACHMENT, _gl.TEXTURE_2D, webglDepthTexture, 0 ); + _gl.framebufferTexture2D( _gl.FRAMEBUFFER, glAttachmentType, glTextureType, webglDepthTexture, 0 ); } @@ -49314,11 +50564,11 @@ function WebGLTextures( _gl, extensions, state, properties, capabilities, utils, if ( useMultisampledRTT( renderTarget ) ) { - multisampledRTTExt.framebufferTexture2DMultisampleEXT( _gl.FRAMEBUFFER, _gl.DEPTH_STENCIL_ATTACHMENT, _gl.TEXTURE_2D, webglDepthTexture, 0, samples ); + multisampledRTTExt.framebufferTexture2DMultisampleEXT( _gl.FRAMEBUFFER, glAttachmentType, glTextureType, webglDepthTexture, 0, samples ); } else { - _gl.framebufferTexture2D( _gl.FRAMEBUFFER, _gl.DEPTH_STENCIL_ATTACHMENT, _gl.TEXTURE_2D, webglDepthTexture, 0 ); + _gl.framebufferTexture2D( _gl.FRAMEBUFFER, glAttachmentType, glTextureType, webglDepthTexture, 0 ); } @@ -49369,17 +50619,28 @@ function WebGLTextures( _gl, extensions, state, properties, capabilities, utils, if ( renderTarget.depthTexture && ! renderTargetProperties.__autoAllocateDepthBuffer ) { - if ( isCube ) throw new Error( 'target.depthTexture not supported in Cube render targets' ); + if ( isCube ) { - const mipmaps = renderTarget.texture.mipmaps; + // For cube render targets with depth texture, setup each face + for ( let i = 0; i < 6; i ++ ) { - if ( mipmaps && mipmaps.length > 0 ) { + setupDepthTexture( renderTargetProperties.__webglFramebuffer[ i ], renderTarget, i ); - setupDepthTexture( renderTargetProperties.__webglFramebuffer[ 0 ], renderTarget ); + } } else { - setupDepthTexture( renderTargetProperties.__webglFramebuffer, renderTarget ); + const mipmaps = renderTarget.texture.mipmaps; + + if ( mipmaps && mipmaps.length > 0 ) { + + setupDepthTexture( renderTargetProperties.__webglFramebuffer[ 0 ], renderTarget, 0 ); + + } else { + + setupDepthTexture( renderTargetProperties.__webglFramebuffer, renderTarget, 0 ); + + } } @@ -49903,13 +51164,13 @@ function WebGLTextures( _gl, extensions, state, properties, capabilities, utils, if ( format !== RGBAFormat || type !== UnsignedByteType ) { - console.warn( 'THREE.WebGLTextures: sRGB encoded textures have to use RGBAFormat and UnsignedByteType.' ); + warn( 'WebGLTextures: sRGB encoded textures have to use RGBAFormat and UnsignedByteType.' ); } } else { - console.error( 'THREE.WebGLTextures: Unsupported texture color space:', colorSpace ); + error( 'WebGLTextures: Unsupported texture color space:', colorSpace ); } @@ -49961,6 +51222,12 @@ function WebGLTextures( _gl, extensions, state, properties, capabilities, utils, this.setupFrameBufferTexture = setupFrameBufferTexture; this.useMultisampledRTT = useMultisampledRTT; + this.isReversedDepthBuffer = function () { + + return state.buffers.depth.getReversed(); + + }; + } function WebGLUtils( gl, extensions ) { @@ -50064,7 +51331,7 @@ function WebGLUtils( gl, extensions ) { // ETC - if ( p === RGB_ETC1_Format || p === RGB_ETC2_Format || p === RGBA_ETC2_EAC_Format ) { + if ( p === RGB_ETC1_Format || p === RGB_ETC2_Format || p === RGBA_ETC2_EAC_Format || p === R11_EAC_Format || p === SIGNED_R11_EAC_Format || p === RG11_EAC_Format || p === SIGNED_RG11_EAC_Format ) { extension = extensions.get( 'WEBGL_compressed_texture_etc' ); @@ -50072,6 +51339,10 @@ function WebGLUtils( gl, extensions ) { if ( p === RGB_ETC1_Format || p === RGB_ETC2_Format ) return ( transfer === SRGBTransfer ) ? extension.COMPRESSED_SRGB8_ETC2 : extension.COMPRESSED_RGB8_ETC2; if ( p === RGBA_ETC2_EAC_Format ) return ( transfer === SRGBTransfer ) ? extension.COMPRESSED_SRGB8_ALPHA8_ETC2_EAC : extension.COMPRESSED_RGBA8_ETC2_EAC; + if ( p === R11_EAC_Format ) return extension.COMPRESSED_R11_EAC; + if ( p === SIGNED_R11_EAC_Format ) return extension.COMPRESSED_SIGNED_R11_EAC; + if ( p === RG11_EAC_Format ) return extension.COMPRESSED_RG11_EAC; + if ( p === SIGNED_RG11_EAC_Format ) return extension.COMPRESSED_SIGNED_RG11_EAC; } else { @@ -51150,7 +52421,7 @@ class WebXRManager extends EventDispatcher { if ( scope.isPresenting === true ) { - console.warn( 'THREE.WebXRManager: Cannot change framebuffer scale while presenting.' ); + warn( 'WebXRManager: Cannot change framebuffer scale while presenting.' ); } @@ -51172,7 +52443,7 @@ class WebXRManager extends EventDispatcher { if ( scope.isPresenting === true ) { - console.warn( 'THREE.WebXRManager: Cannot change reference space type while presenting.' ); + warn( 'WebXRManager: Cannot change reference space type while presenting.' ); } @@ -51620,8 +52891,8 @@ class WebXRManager extends EventDispatcher { // inherit camera layers and enable eye layers (1 = left, 2 = right) cameraXR.layers.mask = camera.layers.mask | 0b110; - cameraL.layers.mask = cameraXR.layers.mask & 0b011; - cameraR.layers.mask = cameraXR.layers.mask & 0b101; + cameraL.layers.mask = cameraXR.layers.mask & -5; + cameraR.layers.mask = cameraXR.layers.mask & -3; const parent = camera.parent; const cameras = cameraXR.cameras; @@ -52015,6 +53286,12 @@ function WebGLMaterials( renderer, properties ) { refreshUniformsCommon( uniforms, material ); + if ( material.envMap ) { + + uniforms.envMapIntensity.value = material.envMapIntensity; + + } + } else if ( material.isMeshToonMaterial ) { refreshUniformsCommon( uniforms, material ); @@ -52025,6 +53302,12 @@ function WebGLMaterials( renderer, properties ) { refreshUniformsCommon( uniforms, material ); refreshUniformsPhong( uniforms, material ); + if ( material.envMap ) { + + uniforms.envMapIntensity.value = material.envMapIntensity; + + } + } else if ( material.isMeshStandardMaterial ) { refreshUniformsCommon( uniforms, material ); @@ -52638,7 +53921,7 @@ function WebGLUniformsGroups( gl, info, capabilities, state ) { } - console.error( 'THREE.WebGLRenderer: Maximum number of simultaneously usable uniforms groups reached.' ); + error( 'WebGLRenderer: Maximum number of simultaneously usable uniforms groups reached.' ); return 0; @@ -52893,11 +54176,11 @@ function WebGLUniformsGroups( gl, info, capabilities, state ) { } else if ( value.isTexture ) { - console.warn( 'THREE.WebGLRenderer: Texture samplers can not be part of an uniforms group.' ); + warn( 'WebGLRenderer: Texture samplers can not be part of an uniforms group.' ); } else { - console.warn( 'THREE.WebGLRenderer: Unsupported uniform value type.', value ); + warn( 'WebGLRenderer: Unsupported uniform value type.', value ); } @@ -52946,6 +54229,137 @@ function WebGLUniformsGroups( gl, info, capabilities, state ) { } +/** + * Creates a texture directly from raw buffer data. + * + * The interpretation of the data depends on type and format: If the type is + * `UnsignedByteType`, a `Uint8Array` will be useful for addressing the + * texel data. If the format is `RGBAFormat`, data needs four values for + * one texel; Red, Green, Blue and Alpha (typically the opacity). + * + * @augments Texture + */ +class DataTexture extends Texture { + + /** + * Constructs a new data texture. + * + * @param {?TypedArray} [data=null] - The buffer data. + * @param {number} [width=1] - The width of the texture. + * @param {number} [height=1] - The height of the texture. + * @param {number} [format=RGBAFormat] - The texture format. + * @param {number} [type=UnsignedByteType] - The texture type. + * @param {number} [mapping=Texture.DEFAULT_MAPPING] - The texture mapping. + * @param {number} [wrapS=ClampToEdgeWrapping] - The wrapS value. + * @param {number} [wrapT=ClampToEdgeWrapping] - The wrapT value. + * @param {number} [magFilter=NearestFilter] - The mag filter value. + * @param {number} [minFilter=NearestFilter] - The min filter value. + * @param {number} [anisotropy=Texture.DEFAULT_ANISOTROPY] - The anisotropy value. + * @param {string} [colorSpace=NoColorSpace] - The color space. + */ + constructor( data = null, width = 1, height = 1, format, type, mapping, wrapS, wrapT, magFilter = NearestFilter, minFilter = NearestFilter, anisotropy, colorSpace ) { + + super( null, mapping, wrapS, wrapT, magFilter, minFilter, format, type, anisotropy, colorSpace ); + + /** + * This flag can be used for type testing. + * + * @type {boolean} + * @readonly + * @default true + */ + this.isDataTexture = true; + + /** + * The image definition of a data texture. + * + * @type {{data:TypedArray,width:number,height:number}} + */ + this.image = { data: data, width: width, height: height }; + + /** + * Whether to generate mipmaps (if possible) for a texture. + * + * Overwritten and set to `false` by default. + * + * @type {boolean} + * @default false + */ + this.generateMipmaps = false; + + /** + * If set to `true`, the texture is flipped along the vertical axis when + * uploaded to the GPU. + * + * Overwritten and set to `false` by default. + * + * @type {boolean} + * @default false + */ + this.flipY = false; + + /** + * Specifies the alignment requirements for the start of each pixel row in memory. + * + * Overwritten and set to `1` by default. + * + * @type {boolean} + * @default 1 + */ + this.unpackAlignment = 1; + + } + +} + +/** + * Precomputed DFG LUT for Image-Based Lighting + * Resolution: 16x16 + * Samples: 4096 per texel + * Format: RG16F (2 half floats per texel: scale, bias) + */ + + +const DATA = new Uint16Array( [ + 0x30b5, 0x3ad1, 0x314c, 0x3a4d, 0x33d2, 0x391c, 0x35ef, 0x3828, 0x37f3, 0x36a6, 0x38d1, 0x3539, 0x3979, 0x3410, 0x39f8, 0x3252, 0x3a53, 0x30f0, 0x3a94, 0x2fc9, 0x3abf, 0x2e35, 0x3ada, 0x2d05, 0x3ae8, 0x2c1f, 0x3aed, 0x2ae0, 0x3aea, 0x29d1, 0x3ae1, 0x28ff, + 0x3638, 0x38e4, 0x364a, 0x38ce, 0x3699, 0x385e, 0x374e, 0x372c, 0x3839, 0x35a4, 0x38dc, 0x3462, 0x396e, 0x32c4, 0x39de, 0x3134, 0x3a2b, 0x3003, 0x3a59, 0x2e3a, 0x3a6d, 0x2ce1, 0x3a6e, 0x2bba, 0x3a5f, 0x2a33, 0x3a49, 0x290a, 0x3a2d, 0x2826, 0x3a0a, 0x26e8, + 0x3894, 0x36d7, 0x3897, 0x36c9, 0x38a3, 0x3675, 0x38bc, 0x35ac, 0x38ee, 0x349c, 0x393e, 0x3332, 0x3997, 0x3186, 0x39e2, 0x3038, 0x3a13, 0x2e75, 0x3a29, 0x2cf5, 0x3a2d, 0x2bac, 0x3a21, 0x29ff, 0x3a04, 0x28bc, 0x39dc, 0x2790, 0x39ad, 0x261a, 0x3978, 0x24fa, + 0x39ac, 0x34a8, 0x39ac, 0x34a3, 0x39ae, 0x3480, 0x39ae, 0x3423, 0x39b1, 0x330e, 0x39c2, 0x31a9, 0x39e0, 0x3063, 0x39fc, 0x2eb5, 0x3a0c, 0x2d1d, 0x3a14, 0x2bcf, 0x3a07, 0x29ff, 0x39e9, 0x28a3, 0x39be, 0x273c, 0x3989, 0x25b3, 0x394a, 0x2488, 0x3907, 0x2345, + 0x3a77, 0x3223, 0x3a76, 0x321f, 0x3a73, 0x3204, 0x3a6a, 0x31b3, 0x3a58, 0x3114, 0x3a45, 0x303b, 0x3a34, 0x2eb6, 0x3a26, 0x2d31, 0x3a1e, 0x2bef, 0x3a0b, 0x2a0d, 0x39ec, 0x28a1, 0x39c0, 0x271b, 0x3987, 0x2580, 0x3944, 0x2449, 0x38fa, 0x22bd, 0x38ac, 0x2155, + 0x3b07, 0x2fca, 0x3b06, 0x2fca, 0x3b00, 0x2fb8, 0x3af4, 0x2f7c, 0x3adb, 0x2eea, 0x3ab4, 0x2e00, 0x3a85, 0x2cec, 0x3a5e, 0x2bc5, 0x3a36, 0x2a00, 0x3a0d, 0x2899, 0x39dc, 0x2707, 0x39a0, 0x2562, 0x395a, 0x2424, 0x390b, 0x2268, 0x38b7, 0x20fd, 0x385f, 0x1fd1, + 0x3b69, 0x2cb9, 0x3b68, 0x2cbb, 0x3b62, 0x2cbb, 0x3b56, 0x2cae, 0x3b3b, 0x2c78, 0x3b0d, 0x2c0a, 0x3acf, 0x2ae3, 0x3a92, 0x2998, 0x3a54, 0x2867, 0x3a17, 0x26d0, 0x39d3, 0x253c, 0x3989, 0x2402, 0x3935, 0x2226, 0x38dc, 0x20bd, 0x387d, 0x1f54, 0x381d, 0x1db3, + 0x3ba9, 0x296b, 0x3ba8, 0x296f, 0x3ba3, 0x297b, 0x3b98, 0x2987, 0x3b7f, 0x2976, 0x3b4e, 0x2927, 0x3b0e, 0x2895, 0x3ac2, 0x27b7, 0x3a73, 0x263b, 0x3a23, 0x24e7, 0x39d0, 0x239b, 0x3976, 0x21d9, 0x3917, 0x207e, 0x38b2, 0x1ee7, 0x384b, 0x1d53, 0x37c7, 0x1c1e, + 0x3bd2, 0x25cb, 0x3bd1, 0x25d3, 0x3bcd, 0x25f0, 0x3bc2, 0x261f, 0x3bad, 0x2645, 0x3b7d, 0x262d, 0x3b3e, 0x25c4, 0x3aec, 0x250f, 0x3a93, 0x243a, 0x3a32, 0x22ce, 0x39d0, 0x215b, 0x3969, 0x202a, 0x38fe, 0x1e6e, 0x388f, 0x1cf1, 0x381f, 0x1b9b, 0x3762, 0x19dd, + 0x3be9, 0x21ab, 0x3be9, 0x21b7, 0x3be5, 0x21e5, 0x3bdd, 0x2241, 0x3bc9, 0x22a7, 0x3ba0, 0x22ec, 0x3b62, 0x22cd, 0x3b0f, 0x2247, 0x3aae, 0x2175, 0x3a44, 0x2088, 0x39d4, 0x1f49, 0x3960, 0x1dbe, 0x38e9, 0x1c77, 0x3870, 0x1ae8, 0x37f1, 0x1953, 0x3708, 0x181b, + 0x3bf6, 0x1cea, 0x3bf6, 0x1cfb, 0x3bf3, 0x1d38, 0x3bec, 0x1dbd, 0x3bda, 0x1e7c, 0x3bb7, 0x1f25, 0x3b7d, 0x1f79, 0x3b2c, 0x1f4c, 0x3ac6, 0x1ea6, 0x3a55, 0x1dbb, 0x39da, 0x1cbd, 0x395a, 0x1b9d, 0x38d8, 0x1a00, 0x3855, 0x18ac, 0x37ab, 0x173c, 0x36b7, 0x1598, + 0x3bfc, 0x1736, 0x3bfc, 0x1759, 0x3bf9, 0x17e7, 0x3bf4, 0x1896, 0x3be4, 0x1997, 0x3bc6, 0x1aa8, 0x3b91, 0x1b84, 0x3b43, 0x1bd2, 0x3ade, 0x1b8a, 0x3a65, 0x1acd, 0x39e2, 0x19d3, 0x3957, 0x18cd, 0x38ca, 0x17b3, 0x383e, 0x1613, 0x376d, 0x14bf, 0x366f, 0x135e, + 0x3bff, 0x101b, 0x3bff, 0x1039, 0x3bfc, 0x10c8, 0x3bf9, 0x1226, 0x3bea, 0x1428, 0x3bcf, 0x1584, 0x3b9f, 0x16c5, 0x3b54, 0x179a, 0x3af0, 0x17ce, 0x3a76, 0x1771, 0x39ea, 0x16a4, 0x3956, 0x15a7, 0x38bf, 0x14a7, 0x3829, 0x1379, 0x3735, 0x11ea, 0x362d, 0x10a1, + 0x3c00, 0x061b, 0x3c00, 0x066a, 0x3bfe, 0x081c, 0x3bfa, 0x0a4c, 0x3bed, 0x0d16, 0x3bd5, 0x0fb3, 0x3ba9, 0x114d, 0x3b63, 0x127c, 0x3b01, 0x132f, 0x3a85, 0x1344, 0x39f4, 0x12d2, 0x3957, 0x120d, 0x38b5, 0x1122, 0x3817, 0x103c, 0x3703, 0x0ed3, 0x35f0, 0x0d6d, + 0x3c00, 0x007a, 0x3c00, 0x0089, 0x3bfe, 0x011d, 0x3bfb, 0x027c, 0x3bf0, 0x04fa, 0x3bda, 0x0881, 0x3bb1, 0x0acd, 0x3b6f, 0x0c97, 0x3b10, 0x0d7b, 0x3a93, 0x0df1, 0x39fe, 0x0def, 0x3959, 0x0d8a, 0x38af, 0x0ce9, 0x3808, 0x0c31, 0x36d5, 0x0af0, 0x35b9, 0x09a3, + 0x3c00, 0x0000, 0x3c00, 0x0001, 0x3bff, 0x0015, 0x3bfb, 0x0059, 0x3bf2, 0x00fd, 0x3bdd, 0x01df, 0x3bb7, 0x031c, 0x3b79, 0x047c, 0x3b1d, 0x05d4, 0x3aa0, 0x06d5, 0x3a08, 0x075a, 0x395d, 0x075e, 0x38aa, 0x06f7, 0x37f4, 0x0648, 0x36ac, 0x0576, 0x3586, 0x049f +] ); + +let lut = null; + +function getDFGLUT() { + + if ( lut === null ) { + + lut = new DataTexture( DATA, 16, 16, RGFormat, HalfFloatType ); + lut.name = 'DFG_LUT'; + lut.minFilter = LinearFilter; + lut.magFilter = LinearFilter; + lut.wrapS = ClampToEdgeWrapping; + lut.wrapT = ClampToEdgeWrapping; + lut.generateMipmaps = false; + lut.needsUpdate = true; + + } + + return lut; + +} + /** * This renderer uses WebGL 2 to display scenes. * @@ -52972,6 +54386,7 @@ class WebGLRenderer { powerPreference = 'default', failIfMajorPerformanceCaveat = false, reversedDepthBuffer = false, + outputBufferType = UnsignedByteType, } = parameters; /** @@ -53001,6 +54416,23 @@ class WebGLRenderer { } + const _outputBufferType = outputBufferType; + + const INTEGER_FORMATS = new Set( [ + RGBAIntegerFormat, + RGIntegerFormat, + RedIntegerFormat + ] ); + + const UNSIGNED_TYPES = new Set( [ + UnsignedByteType, + UnsignedIntType, + UnsignedShortType, + UnsignedInt248Type, + UnsignedShort4444Type, + UnsignedShort5551Type + ] ); + const uintClearColor = new Uint32Array( 4 ); const intClearColor = new Int32Array( 4 ); @@ -53013,16 +54445,20 @@ class WebGLRenderer { const renderListStack = []; const renderStateStack = []; + // internal render target for non-UnsignedByteType color buffer + + let output = null; + // public properties /** - * A canvas where the renderer draws its output.This is automatically created by the renderer + * A canvas where the renderer draws its output. This is automatically created by the renderer * in the constructor (if not provided already); you just need to add it to your page like so: * ```js * document.body.appendChild( renderer.domElement ); * ``` * - * @type {DOMElement} + * @type {HTMLCanvasElement|OffscreenCanvas} */ this.domElement = canvas; @@ -53032,7 +54468,7 @@ class WebGLRenderer { * - `checkShaderErrors`: If it is `true`, defines whether material shader programs are * checked for errors during compilation and linkage process. It may be useful to disable * this check in production for performance gain. It is strongly recommended to keep these - * checks enabled during development. If the shader does not compile and link - it will not + * checks enabled during development. If the shader does not compile and link, it will not * work and associated material will not render. * - `onShaderError(gl, program, glVertexShader,glFragmentShader)`: A callback function that * can be used for custom error reporting. The callback receives the WebGL context, an instance @@ -53273,15 +54709,15 @@ class WebGLRenderer { } - } catch ( error ) { + } catch ( e ) { - console.error( 'THREE.WebGLRenderer: ' + error.message ); - throw error; + error( 'WebGLRenderer: ' + e.message ); + throw e; } let extensions, capabilities, state, info; - let properties, textures, cubemaps, cubeuvmaps, attributes, geometries, objects; + let properties, textures, environments, attributes, geometries, objects; let programCache, materials, renderLists, renderStates, clipping, shadowMap; let background, morphtargets, bufferRenderer, indexedBufferRenderer; @@ -53308,19 +54744,18 @@ class WebGLRenderer { info = new WebGLInfo( _gl ); properties = new WebGLProperties(); textures = new WebGLTextures( _gl, extensions, state, properties, capabilities, utils, info ); - cubemaps = new WebGLCubeMaps( _this ); - cubeuvmaps = new WebGLCubeUVMaps( _this ); + environments = new WebGLEnvironments( _this ); attributes = new WebGLAttributes( _gl ); bindingStates = new WebGLBindingStates( _gl, attributes ); geometries = new WebGLGeometries( _gl, attributes, info, bindingStates ); - objects = new WebGLObjects( _gl, geometries, attributes, info ); + objects = new WebGLObjects( _gl, geometries, attributes, bindingStates, info ); morphtargets = new WebGLMorphtargets( _gl, capabilities, textures ); clipping = new WebGLClipping( properties ); - programCache = new WebGLPrograms( _this, cubemaps, cubeuvmaps, extensions, capabilities, bindingStates, clipping ); + programCache = new WebGLPrograms( _this, environments, extensions, capabilities, bindingStates, clipping ); materials = new WebGLMaterials( _this, properties ); renderLists = new WebGLRenderLists(); renderStates = new WebGLRenderStates( extensions ); - background = new WebGLBackground( _this, cubemaps, cubeuvmaps, state, objects, _alpha, premultipliedAlpha ); + background = new WebGLBackground( _this, environments, state, objects, _alpha, premultipliedAlpha ); shadowMap = new WebGLShadowMap( _this, objects, capabilities ); uniformsGroups = new WebGLUniformsGroups( _gl, info, capabilities, state ); @@ -53408,6 +54843,14 @@ class WebGLRenderer { initGLContext(); + // initialize internal render target for non-UnsignedByteType color buffer + + if ( _outputBufferType !== UnsignedByteType ) { + + output = new WebGLOutput( _outputBufferType, canvas.width, canvas.height, depth, stencil ); + + } + // xr const xr = new WebXRManager( _this, _gl ); @@ -53512,7 +54955,7 @@ class WebGLRenderer { if ( xr.isPresenting ) { - console.warn( 'THREE.WebGLRenderer: Can\'t change size while VR device is presenting.' ); + warn( 'WebGLRenderer: Can\'t change size while VR device is presenting.' ); return; } @@ -53530,6 +54973,12 @@ class WebGLRenderer { } + if ( output !== null ) { + + output.setSize( canvas.width, canvas.height ); + + } + this.setViewport( 0, 0, width, height ); }; @@ -53573,6 +55022,39 @@ class WebGLRenderer { }; + /** + * Sets the post-processing effects to be applied after rendering. + * + * @param {Array} effects - An array of post-processing effects. + */ + this.setEffects = function ( effects ) { + + if ( _outputBufferType === UnsignedByteType ) { + + console.error( 'THREE.WebGLRenderer: setEffects() requires outputBufferType set to HalfFloatType or FloatType.' ); + return; + + } + + if ( effects ) { + + for ( let i = 0; i < effects.length; i ++ ) { + + if ( effects[ i ].isOutputPass === true ) { + + console.warn( 'THREE.WebGLRenderer: OutputPass is not needed in setEffects(). Tone mapping and color space conversion are applied automatically.' ); + break; + + } + + } + + } + + output.setEffects( effects || [] ); + + }; + /** * Returns the current viewport definition. * @@ -53774,9 +55256,7 @@ class WebGLRenderer { if ( _currentRenderTarget !== null ) { const targetFormat = _currentRenderTarget.texture.format; - isIntegerFormat = targetFormat === RGBAIntegerFormat || - targetFormat === RGIntegerFormat || - targetFormat === RedIntegerFormat; + isIntegerFormat = INTEGER_FORMATS.has( targetFormat ); } @@ -53785,12 +55265,7 @@ class WebGLRenderer { if ( isIntegerFormat ) { const targetType = _currentRenderTarget.texture.type; - const isUnsignedType = targetType === UnsignedByteType || - targetType === UnsignedIntType || - targetType === UnsignedShortType || - targetType === UnsignedInt248Type || - targetType === UnsignedShort4444Type || - targetType === UnsignedShort5551Type; + const isUnsignedType = UNSIGNED_TYPES.has( targetType ); const clearColor = background.getClearColor(); const a = background.getClearAlpha(); @@ -53837,7 +55312,11 @@ class WebGLRenderer { } - _gl.clear( bits ); + if ( bits !== 0 ) { + + _gl.clear( bits ); + + } }; @@ -53882,8 +55361,7 @@ class WebGLRenderer { renderLists.dispose(); renderStates.dispose(); properties.dispose(); - cubemaps.dispose(); - cubeuvmaps.dispose(); + environments.dispose(); objects.dispose(); bindingStates.dispose(); uniformsGroups.dispose(); @@ -53904,7 +55382,7 @@ class WebGLRenderer { event.preventDefault(); - console.log( 'THREE.WebGLRenderer: Context Lost.' ); + log( 'WebGLRenderer: Context Lost.' ); _isContextLost = true; @@ -53912,7 +55390,7 @@ class WebGLRenderer { function onContextRestore( /* event */ ) { - console.log( 'THREE.WebGLRenderer: Context Restored.' ); + log( 'WebGLRenderer: Context Restored.' ); _isContextLost = false; @@ -53934,7 +55412,7 @@ class WebGLRenderer { function onContextCreationError( event ) { - console.error( 'THREE.WebGLRenderer: A WebGL context could not be created. Reason: ', event.statusMessage ); + error( 'WebGLRenderer: A WebGL context could not be created. Reason: ', event.statusMessage ); } @@ -54107,7 +55585,7 @@ class WebGLRenderer { if ( object._multiDrawInstances !== null ) { // @deprecated, r174 - warnOnce( 'THREE.WebGLRenderer: renderMultiDrawInstances has been deprecated and will be removed in r184. Append to renderMultiDraw arguments and use indirection.' ); + warnOnce( 'WebGLRenderer: renderMultiDrawInstances has been deprecated and will be removed in r184. Append to renderMultiDraw arguments and use indirection.' ); renderer.renderMultiDrawInstances( object._multiDrawStarts, object._multiDrawCounts, object._multiDrawCount, object._multiDrawInstances ); } else { @@ -54383,6 +55861,13 @@ class WebGLRenderer { if ( typeof self !== 'undefined' ) animation.setContext( self ); + /** + * Applications are advised to always define the animation loop + * with this method and not manually with `requestAnimationFrame()` + * for best compatibility. + * + * @param {?onAnimationCallback} callback - The application's animation loop. + */ this.setAnimationLoop = function ( callback ) { onAnimationFrameCallback = callback; @@ -54415,13 +55900,19 @@ class WebGLRenderer { if ( camera !== undefined && camera.isCamera !== true ) { - console.error( 'THREE.WebGLRenderer.render: camera is not an instance of THREE.Camera.' ); + error( 'WebGLRenderer.render: camera is not an instance of THREE.Camera.' ); return; } if ( _isContextLost === true ) return; + // use internal render target for HalfFloatType color buffer (only when tone mapping is enabled) + + const isXRPresenting = xr.enabled === true && xr.isPresenting === true; + + const useOutput = output !== null && ( _currentRenderTarget === null || isXRPresenting ) && output.begin( _this, _currentRenderTarget ); + // update scene graph if ( scene.matrixWorldAutoUpdate === true ) scene.updateMatrixWorld(); @@ -54430,7 +55921,7 @@ class WebGLRenderer { if ( camera.parent === null && camera.matrixWorldAutoUpdate === true ) camera.updateMatrixWorld(); - if ( xr.enabled === true && xr.isPresenting === true ) { + if ( xr.enabled === true && xr.isPresenting === true && ( output === null || output.isCompositing() === false ) ) { if ( xr.cameraAutoUpdate === true ) xr.updateCamera( camera ); @@ -54502,46 +55993,52 @@ class WebGLRenderer { if ( this.info.autoReset === true ) this.info.reset(); - // render scene + // render scene (skip if first effect is a render pass - it will render the scene itself) - const opaqueObjects = currentRenderList.opaque; - const transmissiveObjects = currentRenderList.transmissive; + const skipSceneRender = useOutput && output.hasRenderPass(); - currentRenderState.setupLights(); + if ( skipSceneRender === false ) { - if ( camera.isArrayCamera ) { + const opaqueObjects = currentRenderList.opaque; + const transmissiveObjects = currentRenderList.transmissive; - const cameras = camera.cameras; + currentRenderState.setupLights(); - if ( transmissiveObjects.length > 0 ) { + if ( camera.isArrayCamera ) { - for ( let i = 0, l = cameras.length; i < l; i ++ ) { + const cameras = camera.cameras; - const camera2 = cameras[ i ]; + if ( transmissiveObjects.length > 0 ) { + + for ( let i = 0, l = cameras.length; i < l; i ++ ) { - renderTransmissionPass( opaqueObjects, transmissiveObjects, scene, camera2 ); + const camera2 = cameras[ i ]; + + renderTransmissionPass( opaqueObjects, transmissiveObjects, scene, camera2 ); + + } } - } + if ( _renderBackground ) background.render( scene ); - if ( _renderBackground ) background.render( scene ); + for ( let i = 0, l = cameras.length; i < l; i ++ ) { - for ( let i = 0, l = cameras.length; i < l; i ++ ) { + const camera2 = cameras[ i ]; - const camera2 = cameras[ i ]; + renderScene( currentRenderList, scene, camera2, camera2.viewport ); - renderScene( currentRenderList, scene, camera2, camera2.viewport ); + } - } + } else { - } else { + if ( transmissiveObjects.length > 0 ) renderTransmissionPass( opaqueObjects, transmissiveObjects, scene, camera ); - if ( transmissiveObjects.length > 0 ) renderTransmissionPass( opaqueObjects, transmissiveObjects, scene, camera ); + if ( _renderBackground ) background.render( scene ); - if ( _renderBackground ) background.render( scene ); + renderScene( currentRenderList, scene, camera ); - renderScene( currentRenderList, scene, camera ); + } } @@ -54559,6 +56056,14 @@ class WebGLRenderer { } + // copy from internal render target to canvas using fullscreen quad + + if ( useOutput ) { + + output.end( _this ); + + } + // if ( scene.isScene === true ) scene.onAfterRender( _this, scene, camera ); @@ -54713,9 +56218,7 @@ class WebGLRenderer { function renderScene( currentRenderList, scene, camera, viewport ) { - const opaqueObjects = currentRenderList.opaque; - const transmissiveObjects = currentRenderList.transmissive; - const transparentObjects = currentRenderList.transparent; + const { opaque: opaqueObjects, transmissive: transmissiveObjects, transparent: transparentObjects } = currentRenderList; currentRenderState.setupLightsView( camera ); @@ -54749,11 +56252,13 @@ class WebGLRenderer { if ( currentRenderState.state.transmissionRenderTarget[ camera.id ] === undefined ) { + const hasHalfFloatSupport = extensions.has( 'EXT_color_buffer_half_float' ) || extensions.has( 'EXT_color_buffer_float' ); + currentRenderState.state.transmissionRenderTarget[ camera.id ] = new WebGLRenderTarget( 1, 1, { generateMipmaps: true, - type: ( extensions.has( 'EXT_color_buffer_half_float' ) || extensions.has( 'EXT_color_buffer_float' ) ) ? HalfFloatType : UnsignedByteType, + type: hasHalfFloatSupport ? HalfFloatType : UnsignedByteType, minFilter: LinearMipmapLinearFilter, - samples: 4, + samples: capabilities.samples, stencilBuffer: stencil, resolveDepthBuffer: false, resolveStencilBuffer: false, @@ -54820,10 +56325,7 @@ class WebGLRenderer { const renderItem = transmissiveObjects[ i ]; - const object = renderItem.object; - const geometry = renderItem.geometry; - const material = renderItem.material; - const group = renderItem.group; + const { object, geometry, material, group } = renderItem; if ( material.side === DoubleSide && object.layers.test( camera.layers ) ) { @@ -54870,9 +56372,7 @@ class WebGLRenderer { const renderItem = renderList[ i ]; - const object = renderItem.object; - const geometry = renderItem.geometry; - const group = renderItem.group; + const { object, geometry, group } = renderItem; let material = renderItem.material; if ( material.allowOverride === true && overrideMaterial !== null ) { @@ -54940,9 +56440,11 @@ class WebGLRenderer { // always update environment and fog - changing these trigger an getProgram call, but it's possible that the program doesn't change - materialProperties.environment = material.isMeshStandardMaterial ? scene.environment : null; + materialProperties.environment = ( material.isMeshStandardMaterial || material.isMeshLambertMaterial || material.isMeshPhongMaterial ) ? scene.environment : null; materialProperties.fog = scene.fog; - materialProperties.envMap = ( material.isMeshStandardMaterial ? cubeuvmaps : cubemaps ).get( material.envMap || materialProperties.environment ); + + const usePMREM = material.isMeshStandardMaterial || ( material.isMeshLambertMaterial && ! material.envMap ) || ( material.isMeshPhongMaterial && ! material.envMap ); + materialProperties.envMap = environments.get( material.envMap || materialProperties.environment, usePMREM ); materialProperties.envMapRotation = ( materialProperties.environment !== null && material.envMap === null ) ? scene.environmentRotation : material.envMapRotation; if ( programs === undefined ) { @@ -55015,12 +56517,9 @@ class WebGLRenderer { uniforms.pointLightShadows.value = lights.state.pointShadow; uniforms.hemisphereLights.value = lights.state.hemi; - uniforms.directionalShadowMap.value = lights.state.directionalShadowMap; uniforms.directionalShadowMatrix.value = lights.state.directionalShadowMatrix; - uniforms.spotShadowMap.value = lights.state.spotShadowMap; uniforms.spotLightMatrix.value = lights.state.spotLightMatrix; uniforms.spotLightMap.value = lights.state.spotLightMap; - uniforms.pointShadowMap.value = lights.state.pointShadowMap; uniforms.pointShadowMatrix.value = lights.state.pointShadowMatrix; // TODO (abelnation): add area lights shadow info to uniforms @@ -55076,9 +56575,10 @@ class WebGLRenderer { textures.resetTextureUnits(); const fog = scene.fog; - const environment = material.isMeshStandardMaterial ? scene.environment : null; + const environment = ( material.isMeshStandardMaterial || material.isMeshLambertMaterial || material.isMeshPhongMaterial ) ? scene.environment : null; const colorSpace = ( _currentRenderTarget === null ) ? _this.outputColorSpace : ( _currentRenderTarget.isXRRenderTarget === true ? _currentRenderTarget.texture.colorSpace : LinearSRGBColorSpace ); - const envMap = ( material.isMeshStandardMaterial ? cubeuvmaps : cubemaps ).get( material.envMap || environment ); + const usePMREM = material.isMeshStandardMaterial || ( material.isMeshLambertMaterial && ! material.envMap ) || ( material.isMeshPhongMaterial && ! material.envMap ); + const envMap = environments.get( material.envMap || environment, usePMREM ); const vertexAlphas = material.vertexColors === true && !! geometry.attributes.color && geometry.attributes.color.itemSize === 4; const vertexTangents = !! geometry.attributes.tangent && ( !! material.normalMap || material.anisotropy > 0 ); const morphTargets = !! geometry.morphAttributes.position; @@ -55326,6 +56826,30 @@ class WebGLRenderer { } + // Pre-allocate texture units for shadow samplers before setting data textures + if ( materialProperties.needsLights ) { + + // Set shadow map uniforms first to ensure they get the first texture units + if ( lights.state.directionalShadowMap.length > 0 ) { + + p_uniforms.setValue( _gl, 'directionalShadowMap', lights.state.directionalShadowMap, textures ); + + } + + if ( lights.state.spotShadowMap.length > 0 ) { + + p_uniforms.setValue( _gl, 'spotShadowMap', lights.state.spotShadowMap, textures ); + + } + + if ( lights.state.pointShadowMap.length > 0 ) { + + p_uniforms.setValue( _gl, 'pointShadowMap', lights.state.pointShadowMap, textures ); + + } + + } + // skinning and morph target uniforms must be set even if material didn't change // auto-setting of texture unit for bone and morph texture must go before other textures // otherwise textures used for skinning and morphing can take over texture units reserved for other material textures @@ -55379,19 +56903,16 @@ class WebGLRenderer { } - // https://github.com/mrdoob/three.js/pull/24467#issuecomment-1209031512 - - if ( material.isMeshGouraudMaterial && material.envMap !== null ) { - - m_uniforms.envMap.value = envMap; + if ( ( material.isMeshStandardMaterial || material.isMeshLambertMaterial || material.isMeshPhongMaterial ) && material.envMap === null && scene.environment !== null ) { - m_uniforms.flipEnvMap.value = ( envMap.isCubeTexture && envMap.isRenderTargetTexture === false ) ? -1 : 1; + m_uniforms.envMapIntensity.value = scene.environmentIntensity; } - if ( material.isMeshStandardMaterial && material.envMap === null && scene.environment !== null ) { + // Set DFG LUT for physically-based materials + if ( m_uniforms.dfgLUT !== undefined ) { - m_uniforms.envMapIntensity.value = scene.environmentIntensity; + m_uniforms.dfgLUT.value = getDFGLUT(); } @@ -55573,7 +57094,6 @@ class WebGLRenderer { _currentActiveCubeFace = activeCubeFace; _currentActiveMipmapLevel = activeMipmapLevel; - let useDefaultFramebuffer = true; let framebuffer = null; let isCube = false; let isRenderTarget3D = false; @@ -55584,9 +57104,21 @@ class WebGLRenderer { if ( renderTargetProperties.__useDefaultFramebuffer !== undefined ) { - // We need to make sure to rebind the framebuffer. - state.bindFramebuffer( _gl.FRAMEBUFFER, null ); - useDefaultFramebuffer = false; + // Externally-managed framebuffer (e.g. XR) + // Bind to the stored framebuffer (may be null for default, or a WebGLFramebuffer) + state.bindFramebuffer( _gl.FRAMEBUFFER, renderTargetProperties.__webglFramebuffer ); + + _currentViewport.copy( renderTarget.viewport ); + _currentScissor.copy( renderTarget.scissor ); + _currentScissorTest = renderTarget.scissorTest; + + state.viewport( _currentViewport ); + state.scissor( _currentScissor ); + state.setScissorTest( _currentScissorTest ); + + _currentMaterialId = -1; + + return; } else if ( renderTargetProperties.__webglFramebuffer === undefined ) { @@ -55685,7 +57217,7 @@ class WebGLRenderer { const framebufferBound = state.bindFramebuffer( _gl.FRAMEBUFFER, framebuffer ); - if ( framebufferBound && useDefaultFramebuffer ) { + if ( framebufferBound ) { state.drawBuffers( renderTarget, framebuffer ); @@ -55741,7 +57273,7 @@ class WebGLRenderer { if ( ! ( renderTarget && renderTarget.isWebGLRenderTarget ) ) { - console.error( 'THREE.WebGLRenderer.readRenderTargetPixels: renderTarget is not THREE.WebGLRenderTarget.' ); + error( 'WebGLRenderer.readRenderTargetPixels: renderTarget is not THREE.WebGLRenderTarget.' ); return; } @@ -55764,16 +57296,20 @@ class WebGLRenderer { const textureFormat = texture.format; const textureType = texture.type; + // when using MRT, select the correct color buffer for the subsequent read command + + if ( renderTarget.textures.length > 1 ) _gl.readBuffer( _gl.COLOR_ATTACHMENT0 + textureIndex ); + if ( ! capabilities.textureFormatReadable( textureFormat ) ) { - console.error( 'THREE.WebGLRenderer.readRenderTargetPixels: renderTarget is not in RGBA or implementation defined format.' ); + error( 'WebGLRenderer.readRenderTargetPixels: renderTarget is not in RGBA or implementation defined format.' ); return; } if ( ! capabilities.textureTypeReadable( textureType ) ) { - console.error( 'THREE.WebGLRenderer.readRenderTargetPixels: renderTarget is not in UnsignedByteType or implementation defined type.' ); + error( 'WebGLRenderer.readRenderTargetPixels: renderTarget is not in UnsignedByteType or implementation defined type.' ); return; } @@ -55782,10 +57318,6 @@ class WebGLRenderer { if ( ( x >= 0 && x <= ( renderTarget.width - width ) ) && ( y >= 0 && y <= ( renderTarget.height - height ) ) ) { - // when using MRT, select the correct color buffer for the subsequent read command - - if ( renderTarget.textures.length > 1 ) _gl.readBuffer( _gl.COLOR_ATTACHMENT0 + textureIndex ); - _gl.readPixels( x, y, width, height, utils.convert( textureFormat ), utils.convert( textureType ), buffer ); } @@ -55846,6 +57378,11 @@ class WebGLRenderer { const textureFormat = texture.format; const textureType = texture.type; + // when using MRT, select the correct color buffer for the subsequent read command + + if ( renderTarget.textures.length > 1 ) _gl.readBuffer( _gl.COLOR_ATTACHMENT0 + textureIndex ); + + if ( ! capabilities.textureFormatReadable( textureFormat ) ) { throw new Error( 'THREE.WebGLRenderer.readRenderTargetPixelsAsync: renderTarget is not in RGBA or implementation defined format.' ); @@ -55862,10 +57399,6 @@ class WebGLRenderer { _gl.bindBuffer( _gl.PIXEL_PACK_BUFFER, glBuffer ); _gl.bufferData( _gl.PIXEL_PACK_BUFFER, buffer.byteLength, _gl.STREAM_READ ); - // when using MRT, select the correct color buffer for the subsequent read command - - if ( renderTarget.textures.length > 1 ) _gl.readBuffer( _gl.COLOR_ATTACHMENT0 + textureIndex ); - _gl.readPixels( x, y, width, height, utils.convert( textureFormat ), utils.convert( textureType ), 0 ); // reset the frame buffer to the currently set buffer before waiting @@ -55935,27 +57468,9 @@ class WebGLRenderer { * @param {?(Box2|Box3)} [srcRegion=null] - A bounding box which describes the source region. Can be two or three-dimensional. * @param {?(Vector2|Vector3)} [dstPosition=null] - A vector that represents the origin of the destination region. Can be two or three-dimensional. * @param {number} [srcLevel=0] - The source mipmap level to copy. - * @param {?number} [dstLevel=null] - The destination mipmap level. + * @param {?number} [dstLevel=0] - The destination mipmap level. */ - this.copyTextureToTexture = function ( srcTexture, dstTexture, srcRegion = null, dstPosition = null, srcLevel = 0, dstLevel = null ) { - - // support the previous signature with just a single dst mipmap level - if ( dstLevel === null ) { - - if ( srcLevel !== 0 ) { - - // @deprecated, r171 - warnOnce( 'WebGLRenderer: copyTextureToTexture function signature has changed to support src and dst mipmap levels.' ); - dstLevel = srcLevel; - srcLevel = 0; - - } else { - - dstLevel = 0; - - } - - } + this.copyTextureToTexture = function ( srcTexture, dstTexture, srcRegion = null, dstPosition = null, srcLevel = 0, dstLevel = 0 ) { // gather the necessary dimensions to copy let width, height, depth, minX, minY, minZ; @@ -56620,89 +58135,6 @@ class InstancedBufferAttribute extends BufferAttribute { } -/** - * Creates a texture directly from raw buffer data. - * - * The interpretation of the data depends on type and format: If the type is - * `UnsignedByteType`, a `Uint8Array` will be useful for addressing the - * texel data. If the format is `RGBAFormat`, data needs four values for - * one texel; Red, Green, Blue and Alpha (typically the opacity). - * - * @augments Texture - */ -class DataTexture extends Texture { - - /** - * Constructs a new data texture. - * - * @param {?TypedArray} [data=null] - The buffer data. - * @param {number} [width=1] - The width of the texture. - * @param {number} [height=1] - The height of the texture. - * @param {number} [format=RGBAFormat] - The texture format. - * @param {number} [type=UnsignedByteType] - The texture type. - * @param {number} [mapping=Texture.DEFAULT_MAPPING] - The texture mapping. - * @param {number} [wrapS=ClampToEdgeWrapping] - The wrapS value. - * @param {number} [wrapT=ClampToEdgeWrapping] - The wrapT value. - * @param {number} [magFilter=NearestFilter] - The mag filter value. - * @param {number} [minFilter=NearestFilter] - The min filter value. - * @param {number} [anisotropy=Texture.DEFAULT_ANISOTROPY] - The anisotropy value. - * @param {string} [colorSpace=NoColorSpace] - The color space. - */ - constructor( data = null, width = 1, height = 1, format, type, mapping, wrapS, wrapT, magFilter = NearestFilter, minFilter = NearestFilter, anisotropy, colorSpace ) { - - super( null, mapping, wrapS, wrapT, magFilter, minFilter, format, type, anisotropy, colorSpace ); - - /** - * This flag can be used for type testing. - * - * @type {boolean} - * @readonly - * @default true - */ - this.isDataTexture = true; - - /** - * The image definition of a data texture. - * - * @type {{data:TypedArray,width:number,height:number}} - */ - this.image = { data: data, width: width, height: height }; - - /** - * Whether to generate mipmaps (if possible) for a texture. - * - * Overwritten and set to `false` by default. - * - * @type {boolean} - * @default false - */ - this.generateMipmaps = false; - - /** - * If set to `true`, the texture is flipped along the vertical axis when - * uploaded to the GPU. - * - * Overwritten and set to `false` by default. - * - * @type {boolean} - * @default false - */ - this.flipY = false; - - /** - * Specifies the alignment requirements for the start of each pixel row in memory. - * - * Overwritten and set to `1` by default. - * - * @type {boolean} - * @default 1 - */ - this.unpackAlignment = 1; - - } - -} - const _instanceLocalMatrix = /*@__PURE__*/ new Matrix4(); const _instanceWorldMatrix = /*@__PURE__*/ new Matrix4(); @@ -56753,6 +58185,15 @@ class InstancedMesh extends Mesh { */ this.instanceMatrix = new InstancedBufferAttribute( new Float32Array( count * 16 ), 16 ); + /** + * Represents the local transformation of all instances of the previous frame. + * Required for computing velocity. Maintained in {@link InstanceNode}. + * + * @type {?InstancedBufferAttribute} + * @default null + */ + this.previousInstanceMatrix = null; + /** * Represents the color of all instances. You have to set its * {@link BufferAttribute#needsUpdate} flag to true if you modify instanced data @@ -56882,6 +58323,8 @@ class InstancedMesh extends Mesh { this.instanceMatrix.copy( source.instanceMatrix ); + if ( source.previousInstanceMatrix !== null ) this.previousInstanceMatrix = source.previousInstanceMatrix.clone(); + if ( source.morphTexture !== null ) this.morphTexture = source.morphTexture.clone(); if ( source.instanceColor !== null ) this.instanceColor = source.instanceColor.clone(); @@ -57280,7 +58723,7 @@ let Line$1 = class Line extends Object3D { * morph targets name, the value its attribute index. This member is `undefined` * by default and only set when morph targets are detected in the geometry. * - * @type {Object|undefined} + * @type {Object|undefined} * @default undefined */ this.morphTargetDictionary = undefined; @@ -57342,7 +58785,7 @@ let Line$1 = class Line extends Object3D { } else { - console.warn( 'THREE.Line.computeLineDistances(): Computation only possible with non-indexed BufferGeometry.' ); + warn( 'Line.computeLineDistances(): Computation only possible with non-indexed BufferGeometry.' ); } @@ -57583,7 +59026,7 @@ class LineSegments extends Line$1 { } else { - console.warn( 'THREE.LineSegments.computeLineDistances(): Computation only possible with non-indexed BufferGeometry.' ); + warn( 'LineSegments.computeLineDistances(): Computation only possible with non-indexed BufferGeometry.' ); } @@ -57680,7 +59123,7 @@ class PointsMaterial extends Material { /** * Defines the size of the points in pixels. * - * Might be capped if the value exceeds hardware dependent parameters like [gl.ALIASED_POINT_SIZE_RANGE]{@link https://developer.mozilla.org/en-US/docs/Web/API/WebGLRenderingContext/getParamete}. + * Might be capped if the value exceeds hardware dependent parameters like [gl.ALIASED_POINT_SIZE_RANGE](https://developer.mozilla.org/en-US/docs/Web/API/WebGLRenderingContext/getParamete). * * @type {number} * @default 1 @@ -57781,7 +59224,7 @@ class Points extends Object3D { * morph targets name, the value its attribute index. This member is `undefined` * by default and only set when morph targets are detected in the geometry. * - * @type {Object|undefined} + * @type {Object|undefined} * @default undefined */ this.morphTargetDictionary = undefined; @@ -58005,6 +59448,7 @@ class CanvasTexture extends Texture { * ``` * * @augments BufferGeometry + * @demo scenes/geometry-browser.html#CircleGeometry */ class CircleGeometry extends BufferGeometry { @@ -58186,7 +59630,7 @@ class Curve { */ getPoint( /* t, optionalTarget */ ) { - console.warn( 'THREE.Curve: .getPoint() not implemented.' ); + warn( 'Curve: .getPoint() not implemented.' ); } @@ -59244,7 +60688,13 @@ class CatmullRomCurve3 extends Curve { } -// Bezier Curves formulas obtained from: https://en.wikipedia.org/wiki/B%C3%A9zier_curve +/** + * Interpolations contains spline and Bézier functions internally used by concrete curve classes. + * + * Bezier Curves formulas obtained from: https://en.wikipedia.org/wiki/B%C3%A9zier_curve + * + * @module Interpolations + */ /** * Computes a point on a Catmull-Rom spline. @@ -61026,8 +62476,8 @@ class Shape extends Path$1 { } /* eslint-disable */ -// copy of mapbox/earcut version 3.0.1 -// https://github.com/mapbox/earcut/tree/v3.0.1 +// copy of mapbox/earcut version 3.0.2 +// https://github.com/mapbox/earcut/tree/v3.0.2 function earcut(data, holeIndices, dim = 2) { @@ -61044,10 +62494,10 @@ function earcut(data, holeIndices, dim = 2) { // if the shape is not too simple, we'll use z-order curve hash later; calculate polygon bbox if (data.length > 80 * dim) { - minX = Infinity; - minY = Infinity; - let maxX = -Infinity; - let maxY = -Infinity; + minX = data[0]; + minY = data[1]; + let maxX = minX; + let maxY = minY; for (let i = dim; i < outerLen; i += dim) { const x = data[i]; @@ -61323,7 +62773,7 @@ function compareXYSlope(a, b) { return result; } -// find a bridge between vertices that connects hole with an outer ring and and link it +// find a bridge between vertices that connects hole with an outer ring and link it function eliminateHole(hole, outerNode) { const bridge = findHoleBridge(hole, outerNode); if (!bridge) { @@ -61661,6 +63111,12 @@ function signedArea(data, start, end, dim) { return sum; } +/** + * An implementation of the earcut polygon triangulation algorithm. + * The code is a port of [mapbox/earcut](https://github.com/mapbox/earcut). + * + * @see https://github.com/mapbox/earcut + */ class Earcut { /** @@ -61811,6 +63267,7 @@ function addContour( vertices, contour ) { * ``` * * @augments BufferGeometry + * @demo scenes/geometry-browser.html#ExtrudeGeometry */ class ExtrudeGeometry extends BufferGeometry { @@ -61895,11 +63352,11 @@ class ExtrudeGeometry extends BufferGeometry { // SETUP TNB variables - // TODO1 - have a .isClosed in spline? + const isClosed = extrudePath.isCatmullRomCurve3 ? extrudePath.closed : false; - splineTube = extrudePath.computeFrenetFrames( steps, false ); + splineTube = extrudePath.computeFrenetFrames( steps, isClosed ); - // console.log(splineTube, 'splineTube', splineTube.normals.length, 'steps', steps, 'extrudePts', extrudePts.length); + // log(splineTube, 'splineTube', splineTube.normals.length, 'steps', steps, 'extrudePts', extrudePts.length); binormal = new Vector3(); normal = new Vector3(); @@ -62004,7 +63461,7 @@ class ExtrudeGeometry extends BufferGeometry { function scalePt2( pt, vec, size ) { - if ( ! vec ) console.error( 'THREE.ExtrudeGeometry: vec does not exist' ); + if ( ! vec ) error( 'ExtrudeGeometry: vec does not exist' ); return pt.clone().addScaledVector( vec, size ); @@ -62119,14 +63576,14 @@ class ExtrudeGeometry extends BufferGeometry { if ( direction_eq ) { - // console.log("Warning: lines are a straight sequence"); + // log("Warning: lines are a straight sequence"); v_trans_x = - v_prev_y; v_trans_y = v_prev_x; shrink_by = Math.sqrt( v_prev_lensq ); } else { - // console.log("Warning: lines are a straight spike"); + // log("Warning: lines are a straight spike"); v_trans_x = v_prev_x; v_trans_y = v_prev_y; shrink_by = Math.sqrt( v_prev_lensq / 2 ); @@ -62148,7 +63605,7 @@ class ExtrudeGeometry extends BufferGeometry { if ( k === il ) k = 0; // (j)---(i)---(k) - // console.log('i,j,k', i, j , k) + // log('i,j,k', i, j , k) contourMovements[ i ] = getBevelVec( contour[ i ], contour[ j ], contour[ k ] ); @@ -62445,7 +63902,7 @@ class ExtrudeGeometry extends BufferGeometry { let k = i - 1; if ( k < 0 ) k = contour.length - 1; - //console.log('b', i,j, i-1, k,vertices.length); + //log('b', i,j, i-1, k,vertices.length); for ( let s = 0, sl = ( steps + bevelSegments * 2 ); s < sl; s ++ ) { @@ -62685,6 +64142,7 @@ function toJSON$1( shapes, options, data ) { * ``` * * @augments BufferGeometry + * @demo scenes/geometry-browser.html#SphereGeometry */ class SphereGeometry extends BufferGeometry { @@ -62847,9 +64305,9 @@ class SphereGeometry extends BufferGeometry { * A standard physically based material, using Metallic-Roughness workflow. * * Physically based rendering (PBR) has recently become the standard in many - * 3D applications, such as [Unity]{@link https://blogs.unity3d.com/2014/10/29/physically-based-shading-in-unity-5-a-primer/}, - * [Unreal]{@link https://docs.unrealengine.com/latest/INT/Engine/Rendering/Materials/PhysicallyBased/} and - * [3D Studio Max]{@link http://area.autodesk.com/blogs/the-3ds-max-blog/what039s-new-for-rendering-in-3ds-max-2017}. + * 3D applications, such as [Unity](https://blogs.unity3d.com/2014/10/29/physically-based-shading-in-unity-5-a-primer/), + * [Unreal](https://docs.unrealengine.com/latest/INT/Engine/Rendering/Materials/PhysicallyBased/) and + * [3D Studio Max](http://area.autodesk.com/blogs/the-3ds-max-blog/what039s-new-for-rendering-in-3ds-max-2017). * * This approach differs from older approaches in that instead of using * approximations for the way in which light interacts with a surface, a @@ -62865,16 +64323,17 @@ class SphereGeometry extends BufferGeometry { * Note that for best results you should always specify an environment map when using this material. * * For a non-technical introduction to the concept of PBR and how to set up a - * PBR material, check out these articles by the people at [marmoset]{@link https://www.marmoset.co}: + * PBR material, check out these articles by the people at [marmoset](https://www.marmoset.co): * - * - [Basic Theory of Physically Based Rendering]{@link https://www.marmoset.co/posts/basic-theory-of-physically-based-rendering/} - * - [Physically Based Rendering and You Can Too]{@link https://www.marmoset.co/posts/physically-based-rendering-and-you-can-too/} + * - [Basic Theory of Physically Based Rendering](https://www.marmoset.co/posts/basic-theory-of-physically-based-rendering/) + * - [Physically Based Rendering and You Can Too](https://www.marmoset.co/posts/physically-based-rendering-and-you-can-too/) * * Technical details of the approach used in three.js (and most other PBR systems) can be found is this - * [paper from Disney]{@link https://media.disneyanimation.com/uploads/production/publication_asset/48/asset/s2012_pbs_disney_brdf_notes_v3.pdf} + * [paper from Disney](https://media.disneyanimation.com/uploads/production/publication_asset/48/asset/s2012_pbs_disney_brdf_notes_v3.pdf) * (pdf), by Brent Burley. * * @augments Material + * @demo scenes/material-browser.html#MeshStandardMaterial */ class MeshStandardMaterial extends Material { @@ -63284,6 +64743,7 @@ class MeshStandardMaterial extends Material { * best results, always specify an environment map when using this material. * * @augments MeshStandardMaterial + * @demo scenes/material-browser.html#MeshPhysicalMaterial */ class MeshPhysicalMaterial extends MeshStandardMaterial { @@ -63580,7 +65040,7 @@ class MeshPhysicalMaterial extends MeshStandardMaterial { } /** - * The anisotropy strength. + * The anisotropy strength, from `0.0` to `1.0`. * * @type {number} * @default 0 @@ -63792,7 +65252,7 @@ class MeshPhysicalMaterial extends MeshStandardMaterial { /** * A material for shiny surfaces with specular highlights. * - * The material uses a non-physically based [Blinn-Phong]{@link https://en.wikipedia.org/wiki/Blinn-Phong_shading_model} + * The material uses a non-physically based [Blinn-Phong](https://en.wikipedia.org/wiki/Blinn-Phong_shading_model) * model for calculating reflectance. Unlike the Lambertian model used in the * {@link MeshLambertMaterial} this can simulate shiny surfaces with specular * highlights (such as varnished wood). `MeshPhongMaterial` uses per-fragment shading. @@ -63802,6 +65262,7 @@ class MeshPhysicalMaterial extends MeshStandardMaterial { * some graphical accuracy. * * @augments Material + * @demo scenes/material-browser.html#MeshPhongMaterial */ class MeshPhongMaterial extends Material { @@ -64068,6 +65529,14 @@ class MeshPhongMaterial extends Material { */ this.reflectivity = 1; + /** + * Scales the effect of the environment map by multiplying its color. + * + * @type {number} + * @default 1 + */ + this.envMapIntensity = 1.0; + /** * The index of refraction (IOR) of air (approximately 1) divided by the * index of refraction of the material. It is used with environment mapping @@ -64176,6 +65645,7 @@ class MeshPhongMaterial extends Material { this.envMapRotation.copy( source.envMapRotation ); this.combine = source.combine; this.reflectivity = source.reflectivity; + this.envMapIntensity = source.envMapIntensity; this.refractionRatio = source.refractionRatio; this.wireframe = source.wireframe; @@ -64197,6 +65667,7 @@ class MeshPhongMaterial extends Material { * A material implementing toon shading. * * @augments Material + * @demo scenes/material-browser.html#MeshToonMaterial */ class MeshToonMaterial extends Material { @@ -64511,6 +65982,7 @@ class MeshToonMaterial extends Material { * A material that maps the normal vectors to RGB colors. * * @augments Material + * @demo scenes/material-browser.html#MeshNormalMaterial */ class MeshNormalMaterial extends Material { @@ -64679,7 +66151,7 @@ class MeshNormalMaterial extends Material { /** * A material for non-shiny surfaces, without specular highlights. * - * The material uses a non-physically based [Lambertian]{@link https://en.wikipedia.org/wiki/Lambertian_reflectance} + * The material uses a non-physically based [Lambertian](https://en.wikipedia.org/wiki/Lambertian_reflectance) * model for calculating reflectance. This can simulate some surfaces (such * as untreated wood or stone) well, but cannot simulate shiny surfaces with * specular highlights (such as varnished wood). `MeshLambertMaterial` uses per-fragment @@ -64691,6 +66163,7 @@ class MeshNormalMaterial extends Material { * {@link MeshPhysicalMaterial}, at the cost of some graphical accuracy. * * @augments Material + * @demo scenes/material-browser.html#MeshLambertMaterial */ class MeshLambertMaterial extends Material { @@ -64938,6 +66411,14 @@ class MeshLambertMaterial extends Material { */ this.reflectivity = 1; + /** + * Scales the effect of the environment map by multiplying its color. + * + * @type {number} + * @default 1 + */ + this.envMapIntensity = 1.0; + /** * The index of refraction (IOR) of air (approximately 1) divided by the * index of refraction of the material. It is used with environment mapping @@ -65044,6 +66525,7 @@ class MeshLambertMaterial extends Material { this.envMapRotation.copy( source.envMapRotation ); this.combine = source.combine; this.reflectivity = source.reflectivity; + this.envMapIntensity = source.envMapIntensity; this.refractionRatio = source.refractionRatio; this.wireframe = source.wireframe; @@ -65071,6 +66553,7 @@ class MeshLambertMaterial extends Material { * shadows. * * @augments Material + * @demo scenes/material-browser.html#MeshMatcapMaterial */ class MeshMatcapMaterial extends Material { @@ -65222,6 +66705,24 @@ class MeshMatcapMaterial extends Material { */ this.alphaMap = null; + /** + * Renders the geometry as a wireframe. + * + * @type {boolean} + * @default false + */ + this.wireframe = false; + + /** + * Controls the thickness of the wireframe. + * + * Can only be used with {@link SVGRenderer}. + * + * @type {number} + * @default 1 + */ + this.wireframeLinewidth = 1; + /** * Whether the material is rendered with flat shading or not. * @@ -65268,6 +66769,9 @@ class MeshMatcapMaterial extends Material { this.alphaMap = source.alphaMap; + this.wireframe = source.wireframe; + this.wireframeLinewidth = source.wireframeLinewidth; + this.flatShading = source.flatShading; this.fog = source.fog; @@ -65398,7 +66902,9 @@ const Cache = { if ( this.enabled === false ) return; - // console.log( 'THREE.Cache', 'Adding key:', key ); + if ( isBlobURL( key ) ) return; + + // log( 'Cache', 'Adding key:', key ); this.files[ key ] = file; @@ -65415,7 +66921,9 @@ const Cache = { if ( this.enabled === false ) return; - // console.log( 'THREE.Cache', 'Checking key:', key ); + if ( isBlobURL( key ) ) return; + + // log( 'Cache', 'Checking key:', key ); return this.files[ key ]; @@ -65446,6 +66954,31 @@ const Cache = { }; +/** + * Returns true if the given cache key contains the blob: scheme. + * + * @private + * @param {string} key - The cache key. + * @return {boolean} Whether the given cache key contains the blob: scheme or not. + */ +function isBlobURL( key ) { + + try { + + const urlString = key.slice( key.indexOf( ':' ) + 1 ); // remove type identifier + + const url = new URL( urlString ); + return url.protocol === 'blob:'; + + } catch ( e ) { + + // If the string is not a valid URL, it throws an error + return false; + + } + +} + /** * Handles and keeps track of loaded and pending data. A default global * instance of this class is created and used by loaders if not supplied @@ -65520,9 +67053,10 @@ class LoadingManager { /** * Used for aborting ongoing requests in loaders using this manager. * - * @type {AbortController} + * @private + * @type {AbortController | null} */ - this.abortController = new AbortController(); + this._abortController = null; /** * This should be called by any loader using the manager when the loader @@ -65733,8 +67267,9 @@ class LoadingManager { */ this.abort = function () { + this.abortController.abort(); - this.abortController = new AbortController(); + this._abortController = null; return this; @@ -65742,6 +67277,26 @@ class LoadingManager { } + // TODO: Revert this back to a single member variable once this issue has been fixed + // https://github.com/cloudflare/workerd/issues/3657 + + /** + * Used for aborting ongoing requests in loaders using this manager. + * + * @type {AbortController} + */ + get abortController() { + + if ( ! this._abortController ) { + + this._abortController = new AbortController(); + + } + + return this._abortController; + + } + } /** @@ -65806,13 +67361,19 @@ class Loader { this.resourcePath = ''; /** - * The [request header]{@link https://developer.mozilla.org/en-US/docs/Glossary/Request_header} + * The [request header](https://developer.mozilla.org/en-US/docs/Glossary/Request_header) * used in HTTP request. * * @type {Object} */ this.requestHeader = {}; + if ( typeof __THREE_DEVTOOLS__ !== 'undefined' ) { + + __THREE_DEVTOOLS__.dispatchEvent( new CustomEvent( 'observe', { detail: this } ) ); + + } + } /** @@ -65871,7 +67432,7 @@ class Loader { /** * Whether the XMLHttpRequest uses credentials such as cookies, authorization - * headers or TLS client certificates, see [XMLHttpRequest.withCredentials]{@link https://developer.mozilla.org/en-US/docs/Web/API/XMLHttpRequest/withCredentials}. + * headers or TLS client certificates, see [XMLHttpRequest.withCredentials](https://developer.mozilla.org/en-US/docs/Web/API/XMLHttpRequest/withCredentials). * * Note: This setting has no effect if you are loading files locally or from the same domain. * @@ -65914,7 +67475,7 @@ class Loader { /** * Sets the given request header. * - * @param {Object} requestHeader - A [request header]{@link https://developer.mozilla.org/en-US/docs/Glossary/Request_header} + * @param {Object} requestHeader - A [request header](https://developer.mozilla.org/en-US/docs/Glossary/Request_header) * for configuring the HTTP request. * @return {Loader} A reference to this instance. */ @@ -65976,7 +67537,7 @@ const _loading = new WeakMap(); * ``` * Please note that `ImageLoader` has dropped support for progress * events in `r84`. For an `ImageLoader` that supports progress events, see - * [this thread]{@link https://github.com/mrdoob/three.js/issues/10439#issuecomment-275785639}. + * [this thread](https://github.com/mrdoob/three.js/issues/10439#issuecomment-275785639). * * @augments Loader */ @@ -66139,7 +67700,7 @@ class ImageLoader extends Loader { * ``` * Please note that `TextureLoader` has dropped support for progress * events in `r84`. For a `TextureLoader` that supports progress events, see - * [this thread]{@link https://github.com/mrdoob/three.js/issues/10439#issuecomment-293260145}. + * [this thread](https://github.com/mrdoob/three.js/issues/10439#issuecomment-293260145). * * @augments Loader */ @@ -66248,7 +67809,7 @@ class Light extends Object3D { */ dispose() { - // Empty here in base class; some subclasses override. + this.dispatchEvent( { type: 'dispose' } ); } @@ -66270,16 +67831,6 @@ class Light extends Object3D { data.object.color = this.color.getHex(); data.object.intensity = this.intensity; - if ( this.groundColor !== undefined ) data.object.groundColor = this.groundColor.getHex(); - - if ( this.distance !== undefined ) data.object.distance = this.distance; - if ( this.angle !== undefined ) data.object.angle = this.angle; - if ( this.decay !== undefined ) data.object.decay = this.decay; - if ( this.penumbra !== undefined ) data.object.penumbra = this.penumbra; - - if ( this.shadow !== undefined ) data.object.shadow = this.shadow.toJSON(); - if ( this.target !== undefined ) data.object.target = this.target.uuid; - return data; } @@ -66345,11 +67896,21 @@ class HemisphereLight extends Light { } + toJSON( meta ) { + + const data = super.toJSON( meta ); + + data.object.groundColor = this.groundColor.getHex(); + + return data; + + } + } -const _projScreenMatrix$1 = /*@__PURE__*/ new Matrix4(); -const _lightPositionWorld$1 = /*@__PURE__*/ new Vector3(); -const _lookTarget$1 = /*@__PURE__*/ new Vector3(); +const _projScreenMatrix = /*@__PURE__*/ new Matrix4(); +const _lightPositionWorld = /*@__PURE__*/ new Vector3(); +const _lookTarget = /*@__PURE__*/ new Vector3(); /** * Abstract base class for light shadow classes. These classes @@ -66394,6 +67955,16 @@ class LightShadow { */ this.bias = 0; + /** + * A node version of `bias`. Only supported with `WebGPURenderer`. + * + * If a bias node is defined, `bias` has no effect. + * + * @type {?Node} + * @default null + */ + this.biasNode = null; + /** * Defines how much the position used to query the shadow map is offset along * the object normal. The default is `0`. Increasing this value can be used to @@ -66411,9 +67982,6 @@ class LightShadow { * map size will allow for a higher value to be used here before these effects * become visible. * - * The property has no effect when the shadow map type is `PCFSoftShadowMap` and - * and it is recommended to increase softness by decreasing the shadow map size instead. - * * The property has no effect when the shadow map type is `BasicShadowMap`. * * @type {number} @@ -66538,22 +68106,22 @@ class LightShadow { const shadowCamera = this.camera; const shadowMatrix = this.matrix; - _lightPositionWorld$1.setFromMatrixPosition( light.matrixWorld ); - shadowCamera.position.copy( _lightPositionWorld$1 ); + _lightPositionWorld.setFromMatrixPosition( light.matrixWorld ); + shadowCamera.position.copy( _lightPositionWorld ); - _lookTarget$1.setFromMatrixPosition( light.target.matrixWorld ); - shadowCamera.lookAt( _lookTarget$1 ); + _lookTarget.setFromMatrixPosition( light.target.matrixWorld ); + shadowCamera.lookAt( _lookTarget ); shadowCamera.updateMatrixWorld(); - _projScreenMatrix$1.multiplyMatrices( shadowCamera.projectionMatrix, shadowCamera.matrixWorldInverse ); - this._frustum.setFromProjectionMatrix( _projScreenMatrix$1, shadowCamera.coordinateSystem, shadowCamera.reversedDepth ); + _projScreenMatrix.multiplyMatrices( shadowCamera.projectionMatrix, shadowCamera.matrixWorldInverse ); + this._frustum.setFromProjectionMatrix( _projScreenMatrix, shadowCamera.coordinateSystem, shadowCamera.reversedDepth ); - if ( shadowCamera.reversedDepth ) { + if ( shadowCamera.coordinateSystem === WebGPUCoordinateSystem || shadowCamera.reversedDepth ) { shadowMatrix.set( 0.5, 0.0, 0.0, 0.5, 0.0, 0.5, 0.0, 0.5, - 0.0, 0.0, 1.0, 0.0, + 0.0, 0.0, 1.0, 0.0, // Identity Z (preserving the correct [0, 1] range from the projection matrix) 0.0, 0.0, 0.0, 1.0 ); @@ -66568,7 +68136,7 @@ class LightShadow { } - shadowMatrix.multiply( _projScreenMatrix$1 ); + shadowMatrix.multiply( _projScreenMatrix ); } @@ -66637,6 +68205,8 @@ class LightShadow { this.mapSize.copy( source.mapSize ); + this.biasNode = source.biasNode; + return this; } @@ -66782,6 +68352,8 @@ class DirectionalLight extends Light { dispose() { + super.dispose(); + this.shadow.dispose(); } @@ -66797,6 +68369,17 @@ class DirectionalLight extends Light { } + toJSON( meta ) { + + const data = super.toJSON( meta ); + + data.object.shadow = this.shadow.toJSON(); + data.object.target = this.target.uuid; + + return data; + + } + } /** @@ -66873,7 +68456,7 @@ class Raycaster { this.near = near; /** - * All results returned are further away than near. Near can't be negative. + * All results returned are closer than far. Far can't be lower than near. * * @type {number} * @default Infinity @@ -66967,7 +68550,7 @@ class Raycaster { } else { - console.error( 'THREE.Raycaster: Unsupported camera type: ' + camera.type ); + error( 'Raycaster: Unsupported camera type: ' + camera.type ); } @@ -67002,7 +68585,7 @@ class Raycaster { * @property {Object3D} object - The 3D object that has been intersected. * @property {Vector2} uv - U,V coordinates at point of intersection. * @property {Vector2} uv1 - Second set of U,V coordinates at point of intersection. - * @property {Vector3} uv1 - Interpolated normal vector at point of intersection. + * @property {Vector3} normal - Interpolated normal vector at point of intersection. * @property {number} instanceId - The index number of the instance where the ray * intersects the {@link InstancedMesh}. */ @@ -67095,135 +68678,191 @@ function intersect( object, raycaster, intersects, recursive ) { } /** - * Class for keeping track of time. + * This class is an alternative to {@link Clock} with a different API design and behavior. + * The goal is to avoid the conceptual flaws that became apparent in `Clock` over time. + * + * - `Timer` has an `update()` method that updates its internal state. That makes it possible to + * call `getDelta()` and `getElapsed()` multiple times per simulation step without getting different values. + * - The class can make use of the Page Visibility API to avoid large time delta values when the app + * is inactive (e.g. tab switched or browser hidden). + * + * ```js + * const timer = new Timer(); + * timer.connect( document ); // use Page Visibility API + * ``` */ -class Clock { +class Timer { /** - * Constructs a new clock. + * Constructs a new timer. + */ + constructor() { + + this._previousTime = 0; + this._currentTime = 0; + this._startTime = performance.now(); + + this._delta = 0; + this._elapsed = 0; + + this._timescale = 1; + + this._document = null; + this._pageVisibilityHandler = null; + + } + + /** + * Connect the timer to the given document.Calling this method is not mandatory to + * use the timer but enables the usage of the Page Visibility API to avoid large time + * delta values. * - * @param {boolean} [autoStart=true] - Whether to automatically start the clock when - * `getDelta()` is called for the first time. + * @param {Document} document - The document. */ - constructor( autoStart = true ) { + connect( document ) { - /** - * If set to `true`, the clock starts automatically when `getDelta()` is called - * for the first time. - * - * @type {boolean} - * @default true - */ - this.autoStart = autoStart; + this._document = document; - /** - * Holds the time at which the clock's `start()` method was last called. - * - * @type {number} - * @default 0 - */ - this.startTime = 0; + // use Page Visibility API to avoid large time delta values - /** - * Holds the time at which the clock's `start()`, `getElapsedTime()` or - * `getDelta()` methods were last called. - * - * @type {number} - * @default 0 - */ - this.oldTime = 0; + if ( document.hidden !== undefined ) { - /** - * Keeps track of the total time that the clock has been running. - * - * @type {number} - * @default 0 - */ - this.elapsedTime = 0; + this._pageVisibilityHandler = handleVisibilityChange.bind( this ); - /** - * Whether the clock is running or not. - * - * @type {boolean} - * @default true - */ - this.running = false; + document.addEventListener( 'visibilitychange', this._pageVisibilityHandler, false ); + + } } /** - * Starts the clock. When `autoStart` is set to `true`, the method is automatically - * called by the class. + * Disconnects the timer from the DOM and also disables the usage of the Page Visibility API. */ - start() { + disconnect() { - this.startTime = performance.now(); + if ( this._pageVisibilityHandler !== null ) { - this.oldTime = this.startTime; - this.elapsedTime = 0; - this.running = true; + this._document.removeEventListener( 'visibilitychange', this._pageVisibilityHandler ); + this._pageVisibilityHandler = null; + + } + + this._document = null; } /** - * Stops the clock. + * Returns the time delta in seconds. + * + * @return {number} The time delta in second. */ - stop() { + getDelta() { - this.getElapsedTime(); - this.running = false; - this.autoStart = false; + return this._delta / 1000; } /** * Returns the elapsed time in seconds. * - * @return {number} The elapsed time. + * @return {number} The elapsed time in second. */ - getElapsedTime() { + getElapsed() { - this.getDelta(); - return this.elapsedTime; + return this._elapsed / 1000; } /** - * Returns the delta time in seconds. + * Returns the timescale. * - * @return {number} The delta time. + * @return {number} The timescale. */ - getDelta() { + getTimescale() { - let diff = 0; + return this._timescale; - if ( this.autoStart && ! this.running ) { + } - this.start(); - return 0; + /** + * Sets the given timescale which scale the time delta computation + * in `update()`. + * + * @param {number} timescale - The timescale to set. + * @return {Timer} A reference to this timer. + */ + setTimescale( timescale ) { - } + this._timescale = timescale; - if ( this.running ) { + return this; - const newTime = performance.now(); + } - diff = ( newTime - this.oldTime ) / 1000; - this.oldTime = newTime; + /** + * Resets the time computation for the current simulation step. + * + * @return {Timer} A reference to this timer. + */ + reset() { + + this._currentTime = performance.now() - this._startTime; + + return this; + + } + + /** + * Can be used to free all internal resources. Usually called when + * the timer instance isn't required anymore. + */ + dispose() { - this.elapsedTime += diff; + this.disconnect(); + + } + + /** + * Updates the internal state of the timer. This method should be called + * once per simulation step and before you perform queries against the timer + * (e.g. via `getDelta()`). + * + * @param {number} timestamp - The current time in milliseconds. Can be obtained + * from the `requestAnimationFrame` callback argument. If not provided, the current + * time will be determined with `performance.now`. + * @return {Timer} A reference to this timer. + */ + update( timestamp ) { + + if ( this._pageVisibilityHandler !== null && this._document.hidden === true ) { + + this._delta = 0; + + } else { + + this._previousTime = this._currentTime; + this._currentTime = ( timestamp !== undefined ? timestamp : performance.now() ) - this._startTime; + + this._delta = ( this._currentTime - this._previousTime ) * this._timescale; + this._elapsed += this._delta; // _elapsed is the accumulation of all previous deltas } - return diff; + return this; } } +function handleVisibilityChange() { + + if ( this._document.hidden === false ) this.reset(); + +} + /** * This class can be used to represent points in 3D space as - * [Spherical coordinates]{@link https://en.wikipedia.org/wiki/Spherical_coordinate_system}. + * [Spherical coordinates](https://en.wikipedia.org/wiki/Spherical_coordinate_system). */ class Spherical { @@ -68026,12 +69665,10 @@ class Line3 { } - c1.copy( p1 ).add( _d1.multiplyScalar( s ) ); - c2.copy( p2 ).add( _d2.multiplyScalar( t ) ); + c1.copy( p1 ).addScaledVector( _d1, s ); + c2.copy( p2 ).addScaledVector( _d2, t ); - c1.sub( c2 ); - - return c1.dot( c1 ); + return c1.distanceToSquared( c2 ); } @@ -68396,7 +70033,7 @@ class ShapePath { let holesFirst = ! isClockWise( subPaths[ 0 ].getPoints() ); holesFirst = isCCW ? ! holesFirst : holesFirst; - // console.log("Holes first", holesFirst); + // log("Holes first", holesFirst); const betterShapeHoles = []; const newShapes = []; @@ -68424,13 +70061,13 @@ class ShapePath { if ( holesFirst ) mainIdx ++; newShapeHoles[ mainIdx ] = []; - //console.log('cw', i); + //log('cw', i); } else { newShapeHoles[ mainIdx ].push( { h: tmpPath, p: tmpPoints[ 0 ] } ); - //console.log('ccw', i); + //log('ccw', i); } @@ -68515,7 +70152,7 @@ class ShapePath { } - //console.log("shape", shapes); + //log("shape", shapes); return shapes; @@ -68535,7 +70172,7 @@ class Controls extends EventDispatcher { * Constructs a new controls instance. * * @param {Object3D} object - The object that is managed by the controls. - * @param {?HTMLDOMElement} domElement - The HTML element used for event listeners. + * @param {?HTMLElement} domElement - The HTML element used for event listeners. */ constructor( object, domElement = null ) { @@ -68551,7 +70188,7 @@ class Controls extends EventDispatcher { /** * The HTML element used for event listeners. * - * @type {?HTMLDOMElement} + * @type {?HTMLElement} * @default null */ this.domElement = domElement; @@ -68601,13 +70238,13 @@ class Controls extends EventDispatcher { * Connects the controls to the DOM. This method has so called "side effects" since * it adds the module's event listeners to the DOM. * - * @param {HTMLDOMElement} element - The DOM element to connect to. + * @param {HTMLElement} element - The DOM element to connect to. */ connect( element ) { if ( element === undefined ) { - console.warn( 'THREE.Controls: connect() now requires an element.' ); // @deprecated, the warning can be removed with r185 + warn( 'Controls: connect() now requires an element.' ); // @deprecated, the warning can be removed with r185 return; } @@ -68677,7 +70314,7 @@ if ( typeof window !== 'undefined' ) { * * See the {@link FontLoader} page for additional details. * - * `TextGeometry` uses [typeface.json]{@link http://gero3.github.io/facetype.js/} generated fonts. + * `TextGeometry` uses [typeface.json](http://gero3.github.io/facetype.js/) generated fonts. * Some existing fonts can be found located in `/examples/fonts`. * * ```js @@ -68712,7 +70349,7 @@ class TextGeometry extends ExtrudeGeometry { } else { - const shapes = font.generateShapes( text, parameters.size ); + const shapes = font.generateShapes( text, parameters.size, parameters.direction ); // defaults @@ -68769,12 +70406,13 @@ class Font { * * @param {string} text - The text. * @param {number} [size=100] - The text size. + * @param {string} [direction='ltr'] - Char direction: ltr(left to right), rtl(right to left) & tb(top bottom). * @return {Array} An array of shapes representing the text. */ - generateShapes( text, size = 100 ) { + generateShapes( text, size = 100, direction = 'ltr' ) { const shapes = []; - const paths = createPaths( text, size, this.data ); + const paths = createPaths( text, size, this.data, direction ); for ( let p = 0, pl = paths.length; p < pl; p ++ ) { @@ -68788,7 +70426,7 @@ class Font { } -function createPaths( text, size, data ) { +function createPaths( text, size, data, direction ) { const chars = Array.from( text ); const scale = size / data.resolution; @@ -68798,6 +70436,12 @@ function createPaths( text, size, data ) { let offsetX = 0, offsetY = 0; + if ( direction == 'rtl' || direction == 'tb' ) { + + chars.reverse(); + + } + for ( let i = 0; i < chars.length; i ++ ) { const char = chars[ i ]; @@ -68810,7 +70454,18 @@ function createPaths( text, size, data ) { } else { const ret = createPath( char, scale, offsetX, offsetY, data ); - offsetX += ret.offsetX; + + if ( direction == 'tb' ) { + + offsetX = 0; + offsetY += data.ascender * scale; + + } else { + + offsetX += ret.offsetX; + + } + paths.push( ret.path ); } @@ -68979,7 +70634,7 @@ class OrbitControls extends Controls { * Constructs a new controls instance. * * @param {Object3D} object - The object that is managed by the controls. - * @param {?HTMLDOMElement} domElement - The HTML element used for event listeners. + * @param {?HTMLElement} domElement - The HTML element used for event listeners. */ constructor( object, domElement = null ) { @@ -69279,6 +70934,8 @@ class OrbitControls extends Controls { */ this.zoom0 = this.object.zoom; + this._cursorStyle = 'auto'; + // the target DOM element for key events this._domElementKeyEvents = null; @@ -69350,6 +71007,34 @@ class OrbitControls extends Controls { } + /** + * Defines the visual representation of the cursor. + * + * @type {('auto'|'grab')} + * @default 'auto' + */ + set cursorStyle( type ) { + + this._cursorStyle = type; + + if ( type === 'grab' ) { + + this.domElement.style.cursor = 'grab'; + + } else { + + this.domElement.style.cursor = 'auto'; + + } + + } + + get cursorStyle() { + + return this._cursorStyle; + + } + connect( element ) { super.connect( element ); @@ -69370,8 +71055,8 @@ class OrbitControls extends Controls { disconnect() { this.domElement.removeEventListener( 'pointerdown', this._onPointerDown ); - this.domElement.removeEventListener( 'pointermove', this._onPointerMove ); - this.domElement.removeEventListener( 'pointerup', this._onPointerUp ); + this.domElement.ownerDocument.removeEventListener( 'pointermove', this._onPointerMove ); + this.domElement.ownerDocument.removeEventListener( 'pointerup', this._onPointerUp ); this.domElement.removeEventListener( 'pointercancel', this._onPointerUp ); this.domElement.removeEventListener( 'wheel', this._onMouseWheel ); @@ -69429,7 +71114,7 @@ class OrbitControls extends Controls { * Adds key event listeners to the given DOM element. * `window` is a recommended argument for using this method. * - * @param {HTMLDOMElement} domElement - The DOM element + * @param {HTMLElement} domElement - The DOM element */ listenToKeyEvents( domElement ) { @@ -69482,6 +71167,67 @@ class OrbitControls extends Controls { } + /** + * Programmatically pan the camera. + * + * @param {number} deltaX - The horizontal pan amount in pixels. + * @param {number} deltaY - The vertical pan amount in pixels. + */ + pan( deltaX, deltaY ) { + + this._pan( deltaX, deltaY ); + this.update(); + + } + + /** + * Programmatically dolly in (zoom in for perspective camera). + * + * @param {number} dollyScale - The dolly scale factor. + */ + dollyIn( dollyScale ) { + + this._dollyIn( dollyScale ); + this.update(); + + } + + /** + * Programmatically dolly out (zoom out for perspective camera). + * + * @param {number} dollyScale - The dolly scale factor. + */ + dollyOut( dollyScale ) { + + this._dollyOut( dollyScale ); + this.update(); + + } + + /** + * Programmatically rotate the camera left (around the vertical axis). + * + * @param {number} angle - The rotation angle in radians. + */ + rotateLeft( angle ) { + + this._rotateLeft( angle ); + this.update(); + + } + + /** + * Programmatically rotate the camera up (around the horizontal axis). + * + * @param {number} angle - The rotation angle in radians. + */ + rotateUp( angle ) { + + this._rotateUp( angle ); + this.update(); + + } + update( deltaTime = null ) { const position = this.object.position; @@ -70346,8 +72092,8 @@ function onPointerDown( event ) { this.domElement.setPointerCapture( event.pointerId ); - this.domElement.addEventListener( 'pointermove', this._onPointerMove ); - this.domElement.addEventListener( 'pointerup', this._onPointerUp ); + this.domElement.ownerDocument.addEventListener( 'pointermove', this._onPointerMove ); + this.domElement.ownerDocument.addEventListener( 'pointerup', this._onPointerUp ); } @@ -70369,6 +72115,12 @@ function onPointerDown( event ) { } + if ( this._cursorStyle === 'grab' ) { + + this.domElement.style.cursor = 'grabbing'; + + } + } function onPointerMove( event ) { @@ -70397,13 +72149,19 @@ function onPointerUp( event ) { this.domElement.releasePointerCapture( event.pointerId ); - this.domElement.removeEventListener( 'pointermove', this._onPointerMove ); - this.domElement.removeEventListener( 'pointerup', this._onPointerUp ); + this.domElement.ownerDocument.removeEventListener( 'pointermove', this._onPointerMove ); + this.domElement.ownerDocument.removeEventListener( 'pointerup', this._onPointerUp ); this.dispatchEvent( _endEvent ); this.state = _STATE.NONE; + if ( this._cursorStyle === 'grab' ) { + + this.domElement.style.cursor = 'grab'; + + } + break; case 1: @@ -71411,12 +73169,12 @@ class EffectComposer { this.copyPass.material.blending = NoBlending; /** - * The internal clock for managing time data. + * The internal timer for managing time data. * * @private - * @type {Clock} + * @type {Timer} */ - this.clock = new Clock(); + this.timer = new Timer(); } @@ -71505,9 +73263,11 @@ class EffectComposer { // deltaTime value is in seconds + this.timer.update(); + if ( deltaTime === undefined ) { - deltaTime = this.clock.getDelta(); + deltaTime = this.timer.getDelta(); } @@ -71740,6 +73500,16 @@ class RenderPass extends Pass { * @default false */ this.needsSwap = false; + + /** + * This flag indicates that this pass renders the scene itself. + * + * @type {boolean} + * @readonly + * @default true + */ + this.isRenderPass = true; + this._oldClearColor = new Color(); } @@ -71897,7 +73667,7 @@ const LuminosityHighPassShader = { * When using this pass, tone mapping must be enabled in the renderer settings. * * Reference: - * - [Bloom in Unreal Engine]{@link https://docs.unrealengine.com/latest/INT/Engine/Rendering/PostProcessEffects/Bloom/} + * - [Bloom in Unreal Engine](https://docs.unrealengine.com/latest/INT/Engine/Rendering/PostProcessEffects/Bloom/) * * ```js * const resolution = new THREE.Vector2( window.innerWidth, window.innerHeight ); @@ -71931,7 +73701,7 @@ class UnrealBloomPass extends Pass { this.strength = strength; /** - * The Bloom radius. + * The Bloom radius. Must be in the range `[0,1]`. * * @type {number} */ @@ -72020,7 +73790,9 @@ class UnrealBloomPass extends Pass { // gaussian blur materials this.separableBlurMaterials = []; - const kernelSizeArray = [ 3, 5, 7, 9, 11 ]; + // These sizes have been changed to account for the altered coefficients-calculation to avoid blockiness, + // while retaining the same blur-strength. For details see https://github.com/mrdoob/three.js/pull/31528 + const kernelSizeArray = [ 6, 10, 14, 18, 22 ]; resx = Math.round( this.resolution.x / 2 ); resy = Math.round( this.resolution.y / 2 ); @@ -72060,6 +73832,7 @@ class UnrealBloomPass extends Pass { uniforms: this.copyUniforms, vertexShader: CopyShader.vertexShader, fragmentShader: CopyShader.fragmentShader, + premultipliedAlpha: true, blending: AdditiveBlending, depthTest: false, depthWrite: false, @@ -72251,10 +74024,11 @@ class UnrealBloomPass extends Pass { _getSeparableBlurMaterial( kernelRadius ) { const coefficients = []; + const sigma = kernelRadius / 3; for ( let i = 0; i < kernelRadius; i ++ ) { - coefficients.push( 0.39894 * Math.exp( -0.5 * i * i / ( kernelRadius * kernelRadius ) ) / kernelRadius ); + coefficients.push( 0.39894 * Math.exp( -0.5 * i * i / ( sigma * sigma ) ) / sigma ); } @@ -72271,34 +74045,46 @@ class UnrealBloomPass extends Pass { 'gaussianCoefficients': { value: coefficients } // precomputed Gaussian coefficients }, - vertexShader: - `varying vec2 vUv; + vertexShader: /* glsl */` + + varying vec2 vUv; + void main() { + vUv = uv; gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 ); + }`, - fragmentShader: - `#include + fragmentShader: /* glsl */` + + #include + varying vec2 vUv; + uniform sampler2D colorTexture; uniform vec2 invSize; uniform vec2 direction; uniform float gaussianCoefficients[KERNEL_RADIUS]; void main() { + float weightSum = gaussianCoefficients[0]; vec3 diffuseSum = texture2D( colorTexture, vUv ).rgb * weightSum; - for( int i = 1; i < KERNEL_RADIUS; i ++ ) { - float x = float(i); + + for ( int i = 1; i < KERNEL_RADIUS; i ++ ) { + + float x = float( i ); float w = gaussianCoefficients[i]; vec2 uvOffset = direction * invSize * x; vec3 sample1 = texture2D( colorTexture, vUv + uvOffset ).rgb; vec3 sample2 = texture2D( colorTexture, vUv - uvOffset ).rgb; - diffuseSum += (sample1 + sample2) * w; - weightSum += 2.0 * w; + diffuseSum += ( sample1 + sample2 ) * w; + } - gl_FragColor = vec4(diffuseSum/weightSum, 1.0); + + gl_FragColor = vec4( diffuseSum, 1.0 ); + }` } ); @@ -72324,15 +74110,21 @@ class UnrealBloomPass extends Pass { 'bloomRadius': { value: 0.0 } }, - vertexShader: - `varying vec2 vUv; + vertexShader: /* glsl */` + + varying vec2 vUv; + void main() { + vUv = uv; gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 ); + }`, - fragmentShader: - `varying vec2 vUv; + fragmentShader: /* glsl */` + + varying vec2 vUv; + uniform sampler2D blurTexture1; uniform sampler2D blurTexture2; uniform sampler2D blurTexture3; @@ -72343,17 +74135,27 @@ class UnrealBloomPass extends Pass { uniform float bloomFactors[NUM_MIPS]; uniform vec3 bloomTintColors[NUM_MIPS]; - float lerpBloomFactor(const in float factor) { + float lerpBloomFactor( const in float factor ) { + float mirrorFactor = 1.2 - factor; - return mix(factor, mirrorFactor, bloomRadius); + return mix( factor, mirrorFactor, bloomRadius ); + } void main() { - gl_FragColor = bloomStrength * ( lerpBloomFactor(bloomFactors[0]) * vec4(bloomTintColors[0], 1.0) * texture2D(blurTexture1, vUv) + - lerpBloomFactor(bloomFactors[1]) * vec4(bloomTintColors[1], 1.0) * texture2D(blurTexture2, vUv) + - lerpBloomFactor(bloomFactors[2]) * vec4(bloomTintColors[2], 1.0) * texture2D(blurTexture3, vUv) + - lerpBloomFactor(bloomFactors[3]) * vec4(bloomTintColors[3], 1.0) * texture2D(blurTexture4, vUv) + - lerpBloomFactor(bloomFactors[4]) * vec4(bloomTintColors[4], 1.0) * texture2D(blurTexture5, vUv) ); + + // 3.0 for backwards compatibility with previous alpha-based intensity + vec3 bloom = 3.0 * bloomStrength * ( + lerpBloomFactor( bloomFactors[ 0 ] ) * bloomTintColors[ 0 ] * texture2D( blurTexture1, vUv ).rgb + + lerpBloomFactor( bloomFactors[ 1 ] ) * bloomTintColors[ 1 ] * texture2D( blurTexture2, vUv ).rgb + + lerpBloomFactor( bloomFactors[ 2 ] ) * bloomTintColors[ 2 ] * texture2D( blurTexture3, vUv ).rgb + + lerpBloomFactor( bloomFactors[ 3 ] ) * bloomTintColors[ 3 ] * texture2D( blurTexture4, vUv ).rgb + + lerpBloomFactor( bloomFactors[ 4 ] ) * bloomTintColors[ 4 ] * texture2D( blurTexture5, vUv ).rgb + ); + + float bloomAlpha = max( bloom.r, max( bloom.g, bloom.b ) ); + gl_FragColor = vec4( bloom, bloomAlpha ); + }` } ); @@ -72493,7 +74295,7 @@ class Projector { _face, _faceCount, _facePoolLength = 0, _line, _lineCount, _linePoolLength = 0, _sprite, _spriteCount, _spritePoolLength = 0, - _modelMatrix; + _modelMatrix, _clipInput = [], _clipOutput = []; const @@ -72513,7 +74315,19 @@ class Projector { _frustum = new Frustum(), - _objectPool = [], _vertexPool = [], _facePool = [], _linePool = [], _spritePool = []; + _objectPool = [], _vertexPool = [], _facePool = [], _linePool = [], _spritePool = [], + + _clipVertexPool = [], + _clipPos1 = new Vector4(), + _clipPos2 = new Vector4(), + _clipPos3 = new Vector4(), + _screenVertexPool = [], + _clipInputVertices = [ null, null, null ], + + _clipPlanes = [ + { sign: 1 }, + { sign: -1 } + ]; // @@ -72652,48 +74466,165 @@ class Projector { const v2 = _vertexPool[ b ]; const v3 = _vertexPool[ c ]; - if ( checkTriangleVisibility( v1, v2, v3 ) === false ) return; + // Get homogeneous clip space positions (before perspective divide) + _clipPos1.copy( v1.positionWorld ).applyMatrix4( _viewProjectionMatrix ); + _clipPos2.copy( v2.positionWorld ).applyMatrix4( _viewProjectionMatrix ); + _clipPos3.copy( v3.positionWorld ).applyMatrix4( _viewProjectionMatrix ); + + // Check if triangle needs clipping + const nearDist1 = _clipPos1.z + _clipPos1.w; + const nearDist2 = _clipPos2.z + _clipPos2.w; + const nearDist3 = _clipPos3.z + _clipPos3.w; + const farDist1 = - _clipPos1.z + _clipPos1.w; + const farDist2 = - _clipPos2.z + _clipPos2.w; + const farDist3 = - _clipPos3.z + _clipPos3.w; + + // Check if completely outside + if ( ( nearDist1 < 0 && nearDist2 < 0 && nearDist3 < 0 ) || + ( farDist1 < 0 && farDist2 < 0 && farDist3 < 0 ) ) { + + return; // Triangle completely clipped + + } + + // Check if completely inside (no clipping needed) + if ( nearDist1 >= 0 && nearDist2 >= 0 && nearDist3 >= 0 && + farDist1 >= 0 && farDist2 >= 0 && farDist3 >= 0 ) { - if ( material.side === DoubleSide || checkBackfaceCulling( v1, v2, v3 ) === true ) { + // No clipping needed - use original path + if ( checkTriangleVisibility( v1, v2, v3 ) === false ) return; - _face = getNextFaceInPool(); + if ( material.side === DoubleSide || checkBackfaceCulling( v1, v2, v3 ) === true ) { - _face.id = object.id; - _face.v1.copy( v1 ); - _face.v2.copy( v2 ); - _face.v3.copy( v3 ); - _face.z = ( v1.positionScreen.z + v2.positionScreen.z + v3.positionScreen.z ) / 3; - _face.renderOrder = object.renderOrder; + _face = getNextFaceInPool(); - // face normal - _vector3.subVectors( v3.position, v2.position ); - _vector4.subVectors( v1.position, v2.position ); - _vector3.cross( _vector4 ); - _face.normalModel.copy( _vector3 ); - _face.normalModel.applyMatrix3( normalMatrix ).normalize(); + _face.id = object.id; + _face.v1.copy( v1 ); + _face.v2.copy( v2 ); + _face.v3.copy( v3 ); + _face.z = ( v1.positionScreen.z + v2.positionScreen.z + v3.positionScreen.z ) / 3; + _face.renderOrder = object.renderOrder; - for ( let i = 0; i < 3; i ++ ) { + // face normal + _vector3.subVectors( v3.position, v2.position ); + _vector4.subVectors( v1.position, v2.position ); + _vector3.cross( _vector4 ); + _face.normalModel.copy( _vector3 ); + _face.normalModel.applyMatrix3( normalMatrix ).normalize(); - const normal = _face.vertexNormalsModel[ i ]; - normal.fromArray( normals, arguments[ i ] * 3 ); - normal.applyMatrix3( normalMatrix ).normalize(); + for ( let i = 0; i < 3; i ++ ) { - const uv = _face.uvs[ i ]; - uv.fromArray( uvs, arguments[ i ] * 2 ); + const normal = _face.vertexNormalsModel[ i ]; + normal.fromArray( normals, arguments[ i ] * 3 ); + normal.applyMatrix3( normalMatrix ).normalize(); + + const uv = _face.uvs[ i ]; + uv.fromArray( uvs, arguments[ i ] * 2 ); + + } + + _face.vertexNormalsLength = 3; + + _face.material = material; + + if ( material.vertexColors ) { + + _face.color.fromArray( colors, a * 3 ); + + } + + _renderData.elements.push( _face ); } - _face.vertexNormalsLength = 3; + return; + + } + + // Triangle needs clipping + _clipInputVertices[ 0 ] = _clipPos1; + _clipInputVertices[ 1 ] = _clipPos2; + _clipInputVertices[ 2 ] = _clipPos3; + const clippedCount = clipTriangle( _clipInputVertices ); + + if ( clippedCount < 3 ) return; // Triangle completely clipped - _face.material = material; + // Perform perspective divide on clipped vertices and create screen vertices + for ( let i = 0; i < clippedCount; i ++ ) { - if ( material.vertexColors ) { + const cv = _clipInput[ i ]; - _face.color.fromArray( colors, a * 3 ); + // Get or create renderable vertex from pool + let sv = _screenVertexPool[ i ]; + if ( ! sv ) { + + sv = new RenderableVertex(); + _screenVertexPool[ i ] = sv; } - _renderData.elements.push( _face ); + // Perform perspective divide + const invW = 1 / cv.w; + sv.positionScreen.set( cv.x * invW, cv.y * invW, cv.z * invW, 1 ); + + // Interpolate world position (simplified - using weighted average based on barycentric-like coords) + // For a proper implementation, we'd need to track interpolation weights + sv.positionWorld.copy( v1.positionWorld ); + + sv.visible = true; + + } + + // Triangulate the clipped polygon (simple fan triangulation) + for ( let i = 1; i < clippedCount - 1; i ++ ) { + + const tv1 = _screenVertexPool[ 0 ]; + const tv2 = _screenVertexPool[ i ]; + const tv3 = _screenVertexPool[ i + 1 ]; + + if ( material.side === DoubleSide || checkBackfaceCulling( tv1, tv2, tv3 ) === true ) { + + _face = getNextFaceInPool(); + + _face.id = object.id; + _face.v1.copy( tv1 ); + _face.v2.copy( tv2 ); + _face.v3.copy( tv3 ); + _face.z = ( tv1.positionScreen.z + tv2.positionScreen.z + tv3.positionScreen.z ) / 3; + _face.renderOrder = object.renderOrder; + + // face normal - use original triangle's normal + _vector3.subVectors( v3.position, v2.position ); + _vector4.subVectors( v1.position, v2.position ); + _vector3.cross( _vector4 ); + _face.normalModel.copy( _vector3 ); + _face.normalModel.applyMatrix3( normalMatrix ).normalize(); + + // Use original vertex normals and UVs (simplified - proper impl would interpolate) + for ( let j = 0; j < 3; j ++ ) { + + const normal = _face.vertexNormalsModel[ j ]; + normal.fromArray( normals, arguments[ j ] * 3 ); + normal.applyMatrix3( normalMatrix ).normalize(); + + const uv = _face.uvs[ j ]; + uv.fromArray( uvs, arguments[ j ] * 2 ); + + } + + _face.vertexNormalsLength = 3; + + _face.material = material; + + if ( material.vertexColors ) { + + _face.color.fromArray( colors, a * 3 ); + + } + + _renderData.elements.push( _face ); + + } } @@ -72802,7 +74733,7 @@ class Projector { if ( sortObjects === true ) { - _renderData.objects.sort( painterSort ); + painterSortStable( _renderData.objects, 0, _renderData.objects.length ); } @@ -73068,7 +74999,7 @@ class Projector { if ( sortElements === true ) { - _renderData.elements.sort( painterSort ); + painterSortStable( _renderData.elements, 0, _renderData.elements.length ); } @@ -73212,6 +75143,115 @@ class Projector { } + function painterSortStable( array, start, length ) { + + // A stable insertion sort for sorting render items + // This avoids the GC overhead of Array.prototype.sort() + + for ( let i = start + 1; i < start + length; i ++ ) { + + const item = array[ i ]; + let j = i - 1; + + while ( j >= start && painterSort( array[ j ], item ) > 0 ) { + + array[ j + 1 ] = array[ j ]; + j --; + + } + + array[ j + 1 ] = item; + + } + + } + + // Sutherland-Hodgman triangle clipping in homogeneous clip space + // Returns count of vertices in clipped polygon (0 if completely clipped, 3+ if partially clipped) + // Result vertices are in _clipInput array + function clipTriangle( vertices ) { + + // Initialize input with the three input vertices + _clipInput[ 0 ] = vertices[ 0 ]; + _clipInput[ 1 ] = vertices[ 1 ]; + _clipInput[ 2 ] = vertices[ 2 ]; + + let inputCount = 3; + let outputCount = 0; + + for ( let p = 0; p < _clipPlanes.length; p ++ ) { + + const plane = _clipPlanes[ p ]; + outputCount = 0; + + if ( inputCount === 0 ) break; + + for ( let i = 0; i < inputCount; i ++ ) { + + const v1 = _clipInput[ i ]; + const v2 = _clipInput[ ( i + 1 ) % inputCount ]; + + const d1 = plane.sign * v1.z + v1.w; + const d2 = plane.sign * v2.z + v2.w; + + const v1Inside = d1 >= 0; + const v2Inside = d2 >= 0; + + if ( v1Inside && v2Inside ) { + + // Both inside - add v1 + _clipOutput[ outputCount ++ ] = v1; + + } else if ( v1Inside && ! v2Inside ) { + + // v1 inside, v2 outside - add v1 and intersection + _clipOutput[ outputCount ++ ] = v1; + + const t = d1 / ( d1 - d2 ); + let intersection = _clipVertexPool[ outputCount ]; + if ( ! intersection ) { + + intersection = new Vector4(); + _clipVertexPool[ outputCount ] = intersection; + + } + + intersection.lerpVectors( v1, v2, t ); + _clipOutput[ outputCount ++ ] = intersection; + + } else if ( ! v1Inside && v2Inside ) { + + // v1 outside, v2 inside - add intersection only + const t = d1 / ( d1 - d2 ); + let intersection = _clipVertexPool[ outputCount ]; + if ( ! intersection ) { + + intersection = new Vector4(); + _clipVertexPool[ outputCount ] = intersection; + + } + + intersection.lerpVectors( v1, v2, t ); + _clipOutput[ outputCount ++ ] = intersection; + + } + + // Both outside - add nothing + + } + + // Swap input/output + const temp = _clipInput; + _clipInput = _clipOutput; + _clipOutput = temp; + inputCount = outputCount; + + } + + return inputCount; + + } + function clipLine( s1, s2 ) { let alpha1 = 0, alpha2 = 1; @@ -73322,6 +75362,8 @@ class SVGRenderer { _svgNode, _pathCount = 0, + _svgObjectCount = 0, + _renderListCount = 0, _precision = null, _quality = 1, @@ -73348,6 +75390,8 @@ class SVGRenderer { _viewProjectionMatrix = new Matrix4(), _svgPathPool = [], + _svgObjectsPool = [], + _renderListPool = [], _projector = new Projector(), _svg = document.createElementNS( 'http://www.w3.org/2000/svg', 'svg' ); @@ -73355,7 +75399,7 @@ class SVGRenderer { /** * The DOM where the renderer appends its child-elements. * - * @type {DOMElement} + * @type {SVGSVGElement} */ this.domElement = _svg; @@ -73418,8 +75462,8 @@ class SVGRenderer { }; /** - * Sets the render quality. Setting to `high` means This value indicates that the browser - * tries to improve the SVG quality over rendering speed and geometric precision. + * Sets the render quality. Setting to `high` makes the browser improve SVG quality + * over rendering speed and geometric precision. * * @param {('low'|'high')} quality - The quality. */ @@ -73510,6 +75554,49 @@ class SVGRenderer { } + function renderSort( a, b ) { + + const aOrder = a.data.renderOrder !== undefined ? a.data.renderOrder : 0; + const bOrder = b.data.renderOrder !== undefined ? b.data.renderOrder : 0; + + if ( aOrder !== bOrder ) { + + return aOrder - bOrder; + + } else { + + const aZ = a.data.z !== undefined ? a.data.z : 0; + const bZ = b.data.z !== undefined ? b.data.z : 0; + + return bZ - aZ; // Painter's algorithm: far to near + + } + + } + + function arraySortStable( array, start, length ) { + + // A stable insertion sort for sorting the render list + // This avoids the GC overhead of Array.prototype.sort() + + for ( let i = start + 1; i < start + length; i ++ ) { + + const item = array[ i ]; + let j = i - 1; + + while ( j >= start && renderSort( array[ j ], item ) > 0 ) { + + array[ j + 1 ] = array[ j ]; + j --; + + } + + array[ j + 1 ] = item; + + } + + } + /** * Performs a manual clear with the defined clear color. */ @@ -73562,10 +75649,7 @@ class SVGRenderer { calculateLights( _lights ); - // reset accumulated path - - _currentPath = ''; - _currentStyle = ''; + _renderListCount = 0; for ( let e = 0, el = _elements.length; e < el; e ++ ) { @@ -73574,88 +75658,126 @@ class SVGRenderer { if ( material === undefined || material.opacity === 0 ) continue; - _elemBox.makeEmpty(); + getRenderItem( _renderListCount ++, 'element', element, material ); - if ( element instanceof RenderableSprite ) { + } - _v1 = element; - _v1.x *= _svgWidthHalf; _v1.y *= - _svgHeightHalf; + _svgObjectCount = 0; - renderSprite( _v1, element, material ); + scene.traverseVisible( function ( object ) { - } else if ( element instanceof RenderableLine ) { + if ( object.isSVGObject ) { - _v1 = element.v1; _v2 = element.v2; + _vector3.setFromMatrixPosition( object.matrixWorld ); + _vector3.applyMatrix4( _viewProjectionMatrix ); - _v1.positionScreen.x *= _svgWidthHalf; _v1.positionScreen.y *= - _svgHeightHalf; - _v2.positionScreen.x *= _svgWidthHalf; _v2.positionScreen.y *= - _svgHeightHalf; + if ( _vector3.z < -1 || _vector3.z > 1 ) return; - _elemBox.setFromPoints( [ _v1.positionScreen, _v2.positionScreen ] ); + const x = _vector3.x * _svgWidthHalf; + const y = - _vector3.y * _svgHeightHalf; - if ( _clipBox.intersectsBox( _elemBox ) === true ) { + const svgObject = getSVGObjectData( _svgObjectCount ++ ); - renderLine( _v1, _v2, material ); + svgObject.node = object.node; + svgObject.x = x; + svgObject.y = y; + svgObject.z = _vector3.z; + svgObject.renderOrder = object.renderOrder; - } + getRenderItem( _renderListCount ++, 'svgObject', svgObject, null ); - } else if ( element instanceof RenderableFace ) { + } - _v1 = element.v1; _v2 = element.v2; _v3 = element.v3; + } ); - if ( _v1.positionScreen.z < -1 || _v1.positionScreen.z > 1 ) continue; - if ( _v2.positionScreen.z < -1 || _v2.positionScreen.z > 1 ) continue; - if ( _v3.positionScreen.z < -1 || _v3.positionScreen.z > 1 ) continue; + if ( this.sortElements ) { - _v1.positionScreen.x *= _svgWidthHalf; _v1.positionScreen.y *= - _svgHeightHalf; - _v2.positionScreen.x *= _svgWidthHalf; _v2.positionScreen.y *= - _svgHeightHalf; - _v3.positionScreen.x *= _svgWidthHalf; _v3.positionScreen.y *= - _svgHeightHalf; + arraySortStable( _renderListPool, 0, _renderListCount ); - if ( this.overdraw > 0 ) { + } - expand( _v1.positionScreen, _v2.positionScreen, this.overdraw ); - expand( _v2.positionScreen, _v3.positionScreen, this.overdraw ); - expand( _v3.positionScreen, _v1.positionScreen, this.overdraw ); + // Reset accumulated path + _currentPath = ''; + _currentStyle = ''; - } + // Render in sorted order + for ( let i = 0; i < _renderListCount; i ++ ) { - _elemBox.setFromPoints( [ - _v1.positionScreen, - _v2.positionScreen, - _v3.positionScreen - ] ); + const item = _renderListPool[ i ]; - if ( _clipBox.intersectsBox( _elemBox ) === true ) { + if ( item.type === 'svgObject' ) { - renderFace3( _v1, _v2, _v3, element, material ); + flushPath(); // Flush any accumulated paths before inserting SVG node - } + const svgObject = item.data; + const node = svgObject.node; + node.setAttribute( 'transform', 'translate(' + svgObject.x + ',' + svgObject.y + ')' ); + _svg.appendChild( node ); - } + } else { - } + const element = item.data; + const material = item.material; - flushPath(); // just to flush last svg:path + _elemBox.makeEmpty(); - scene.traverseVisible( function ( object ) { + if ( element instanceof RenderableSprite ) { - if ( object.isSVGObject ) { + _v1 = element; + _v1.x *= _svgWidthHalf; _v1.y *= - _svgHeightHalf; - _vector3.setFromMatrixPosition( object.matrixWorld ); - _vector3.applyMatrix4( _viewProjectionMatrix ); + renderSprite( _v1, element, material ); - if ( _vector3.z < -1 || _vector3.z > 1 ) return; + } else if ( element instanceof RenderableLine ) { - const x = _vector3.x * _svgWidthHalf; - const y = - _vector3.y * _svgHeightHalf; + _v1 = element.v1; _v2 = element.v2; - const node = object.node; - node.setAttribute( 'transform', 'translate(' + x + ',' + y + ')' ); + _v1.positionScreen.x *= _svgWidthHalf; _v1.positionScreen.y *= - _svgHeightHalf; + _v2.positionScreen.x *= _svgWidthHalf; _v2.positionScreen.y *= - _svgHeightHalf; - _svg.appendChild( node ); + _elemBox.setFromPoints( [ _v1.positionScreen, _v2.positionScreen ] ); + + if ( _clipBox.intersectsBox( _elemBox ) === true ) { + + renderLine( _v1, _v2, material ); + + } + + } else if ( element instanceof RenderableFace ) { + + _v1 = element.v1; _v2 = element.v2; _v3 = element.v3; + + _v1.positionScreen.x *= _svgWidthHalf; _v1.positionScreen.y *= - _svgHeightHalf; + _v2.positionScreen.x *= _svgWidthHalf; _v2.positionScreen.y *= - _svgHeightHalf; + _v3.positionScreen.x *= _svgWidthHalf; _v3.positionScreen.y *= - _svgHeightHalf; + + if ( this.overdraw > 0 ) { + + expand( _v1.positionScreen, _v2.positionScreen, this.overdraw ); + expand( _v2.positionScreen, _v3.positionScreen, this.overdraw ); + expand( _v3.positionScreen, _v1.positionScreen, this.overdraw ); + + } + + _elemBox.setFromPoints( [ + _v1.positionScreen, + _v2.positionScreen, + _v3.positionScreen + ] ); + + if ( _clipBox.intersectsBox( _elemBox ) === true ) { + + renderFace3( _v1, _v2, _v3, element, material ); + + } + + } } - } ); + } + + flushPath(); // Flush any remaining paths }; @@ -73895,21 +76017,71 @@ class SVGRenderer { function getPathNode( id ) { - if ( _svgPathPool[ id ] == null ) { + let path = _svgPathPool[ id ]; - _svgPathPool[ id ] = document.createElementNS( 'http://www.w3.org/2000/svg', 'path' ); + if ( path === undefined ) { + + path = document.createElementNS( 'http://www.w3.org/2000/svg', 'path' ); if ( _quality == 0 ) { - _svgPathPool[ id ].setAttribute( 'shape-rendering', 'crispEdges' ); //optimizeSpeed + path.setAttribute( 'shape-rendering', 'crispEdges' ); //optimizeSpeed } - return _svgPathPool[ id ]; + _svgPathPool[ id ] = path; + + } + + return path; + + } + + function getSVGObjectData( id ) { + + let svgObject = _svgObjectsPool[ id ]; + + if ( svgObject === undefined ) { + + svgObject = { + node: null, + x: 0, + y: 0, + z: 0, + renderOrder: 0 + }; + + _svgObjectsPool[ id ] = svgObject; + + } + + return svgObject; + + } + + function getRenderItem( id, type, data, material ) { + + let item = _renderListPool[ id ]; + + if ( item === undefined ) { + + item = { + type: type, + data: data, + material: material + }; + + _renderListPool[ id ] = item; + + return item; } - return _svgPathPool[ id ]; + item.type = type; + item.data = data; + item.material = material; + + return item; } @@ -75831,7 +78003,6 @@ createLatexGeometry: createLatexGeometry */ /* eslint-disable curly */ -/* eslint-disable no-loss-of-precision */ /* eslint-disable no-useless-assignment */ /* eslint-disable no-use-before-define */ /* eslint-disable no-else-return */ @@ -79020,6 +81191,20 @@ class JSRootMenu { this.endsub(); } + /** @summary Add log scale selection for pad + * @protected */ + addPadLogMenu(kind, value, func) { + this.sub('SetLog ' + kind, + () => this.input('Enter log kind: 0 - off, 1 - log10, 2 - log2, 3 - ln, ...', value, 'int', 0, 10000).then(func)); + this.addchk(value === 0, 'linear', () => func(0)); + this.addchk(value === 1, 'log10', () => func(1)); + this.addchk(value === 2, 'log2', () => func(2)); + this.addchk(value === 3, 'ln', () => func(3)); + this.addchk(value === 4, 'log4', () => func(4)); + this.addchk(value === 8, 'log8', () => func(8)); + this.endsub(); + } + /** @summary Add palette menu entries * @protected */ addPaletteMenu(curr, set_func) { @@ -79465,38 +81650,40 @@ class JSRootMenu { faxis.fNdivisions = val; painter.interactiveRedraw('pad', `exec:SetNdivisions(${val})`, kind); })); - this.sub('Labels'); - this.addchk(faxis.TestBit(EAxisBits.kCenterLabels), 'Center', - arg => { faxis.SetBit(EAxisBits.kCenterLabels, arg); painter.interactiveRedraw('pad', `exec:CenterLabels(${arg})`, kind); }); - this.addchk(faxis.TestBit(EAxisBits.kLabelsVert), 'Rotate', - arg => { faxis.SetBit(EAxisBits.kLabelsVert, arg); painter.interactiveRedraw('pad', `exec:SetBit(TAxis::kLabelsVert,${arg})`, kind); }); - this.addColorMenu('Color', faxis.fLabelColor, - arg => { faxis.fLabelColor = arg; painter.interactiveRedraw('pad', getColorExec(arg, 'SetLabelColor'), kind); }); - this.addSizeMenu('Offset', -0.02, 0.1, 0.01, faxis.fLabelOffset, - arg => { faxis.fLabelOffset = arg; painter.interactiveRedraw('pad', `exec:SetLabelOffset(${arg})`, kind); }); - let a = faxis.fLabelSize >= 1; - this.addSizeMenu('Size', a ? 2 : 0.02, a ? 30 : 0.11, a ? 2 : 0.01, faxis.fLabelSize, - arg => { faxis.fLabelSize = arg; painter.interactiveRedraw('pad', `exec:SetLabelSize(${arg})`, kind); }); - - if (frame_painter && (axis_painter?.kind === kAxisLabels) && (faxis.fNbins > 20)) { - this.add('Find label', () => this.input('Label id').then(id => { - if (!id) - return; - for (let bin = 0; bin < faxis.fNbins; ++bin) { - const lbl = axis_painter.formatLabels(bin); - if (lbl === id) - return frame_painter.zoomSingle(kind, Math.max(0, bin - 4), Math.min(faxis.fNbins, bin + 5)); - } - }), 'Zoom into region around specific label'); - } - if (frame_painter && faxis.fLabels) { - const ignore = `${kind}_ignore_labels`; - this.addchk(!frame_painter[ignore], 'Custom', flag => { - frame_painter[ignore] = !flag; - painter.interactiveRedraw('pad'); - }, `Use of custom labels in axis ${kind}`); + if (kind !== 'v') { + this.sub('Labels'); + this.addchk(faxis.TestBit(EAxisBits.kCenterLabels), 'Center', + arg => { faxis.SetBit(EAxisBits.kCenterLabels, arg); painter.interactiveRedraw('pad', `exec:CenterLabels(${arg})`, kind); }); + this.addchk(faxis.TestBit(EAxisBits.kLabelsVert), 'Rotate', + arg => { faxis.SetBit(EAxisBits.kLabelsVert, arg); painter.interactiveRedraw('pad', `exec:SetBit(TAxis::kLabelsVert,${arg})`, kind); }); + this.addColorMenu('Color', faxis.fLabelColor, + arg => { faxis.fLabelColor = arg; painter.interactiveRedraw('pad', getColorExec(arg, 'SetLabelColor'), kind); }); + this.addSizeMenu('Offset', -0.02, 0.1, 0.01, faxis.fLabelOffset, + arg => { faxis.fLabelOffset = arg; painter.interactiveRedraw('pad', `exec:SetLabelOffset(${arg})`, kind); }); + const a = faxis.fLabelSize >= 1; + this.addSizeMenu('Size', a ? 2 : 0.02, a ? 30 : 0.11, a ? 2 : 0.01, faxis.fLabelSize, + arg => { faxis.fLabelSize = arg; painter.interactiveRedraw('pad', `exec:SetLabelSize(${arg})`, kind); }); + + if (frame_painter && (axis_painter?.kind === kAxisLabels) && (faxis.fNbins > 20)) { + this.add('Find label', () => this.input('Label id').then(id => { + if (!id) + return; + for (let bin = 0; bin < faxis.fNbins; ++bin) { + const lbl = axis_painter.formatLabels(bin); + if (lbl === id) + return frame_painter.zoomSingle(kind, Math.max(0, bin - 4), Math.min(faxis.fNbins, bin + 5)); + } + }), 'Zoom into region around specific label'); + } + if (frame_painter && faxis.fLabels) { + const ignore = `${kind}_ignore_labels`; + this.addchk(!frame_painter[ignore], 'Custom', flag => { + frame_painter[ignore] = !flag; + painter.interactiveRedraw('pad'); + }, `Use of custom labels in axis ${kind}`); + } + this.endsub(); } - this.endsub(); this.sub('Title'); this.add('SetTitle', () => { @@ -79523,8 +81710,8 @@ class JSRootMenu { }); this.addSizeMenu('Offset', 0, 3, 0.2, faxis.fTitleOffset, arg => { faxis.fTitleOffset = arg; painter.interactiveRedraw('pad', `exec:SetTitleOffset(${arg})`, kind); }); - a = faxis.fTitleSize >= 1; - this.addSizeMenu('Size', a ? 2 : 0.02, a ? 30 : 0.11, a ? 2 : 0.01, faxis.fTitleSize, + const p = faxis.fTitleSize >= 1; + this.addSizeMenu('Size', p ? 2 : 0.02, p ? 30 : 0.11, p ? 2 : 0.01, faxis.fTitleSize, arg => { faxis.fTitleSize = arg; painter.interactiveRedraw('pad', `exec:SetTitleSize(${arg})`, kind); }); this.endsub(); @@ -79626,6 +81813,9 @@ class JSRootMenu { this.addchk(settings.StripAxisLabels, 'Strip labels', flag => { settings.StripAxisLabels = flag; }, 'Provide shorter labels like 10^0 -> 1'); this.addchk(settings.CutAxisLabels, 'Cut labels', flag => { settings.CutAxisLabels = flag; }, 'Remove labels which may exceed graphical range'); this.add(`Tilt angle ${settings.AxisTiltAngle}`, () => this.input('Axis tilt angle', settings.AxisTiltAngle, 'int', 0, 180).then(val => { settings.AxisTiltAngle = val; })); + this.add(`X format ${settings.XValuesFormat ?? gStyle.fStatFormat}`, () => this.input('X axis format', settings.XValuesFormat).then(val => { settings.XValuesFormat = val; })); + this.add(`Y format ${settings.YValuesFormat ?? gStyle.fStatFormat}`, () => this.input('Y axis format', settings.YValuesFormat).then(val => { settings.YValuesFormat = val; })); + this.add(`Z format ${settings.ZValuesFormat ?? gStyle.fStatFormat}`, () => this.input('Z axis format', settings.ZValuesFormat).then(val => { settings.ZValuesFormat = val; })); this.endsub(); this.addSelectMenu('Latex', ['Off', 'Symbols', 'Normal', 'MathJax', 'Force MathJax'], settings.Latex, value => { settings.Latex = value; }); this.addSelectMenu('3D rendering', ['Default', 'WebGL', 'Image'], settings.Render3D, value => { settings.Render3D = value; }); @@ -79824,9 +82014,14 @@ class JSRootMenu { * @param {string} [kind] - use 'text' (default), 'number', 'float' or 'int' * @protected */ async input(title, value, kind, min, max) { + let onchange = null; + if (isFunc(kind)) { + onchange = kind; + kind = ''; + } if (!kind) kind = 'text'; - const inp_type = (kind === 'int') ? 'number' : 'text'; + const inp_type = (kind === 'int') ? 'number' : 'text', value0 = value; let ranges = ''; if ((value === undefined) || (value === null)) value = ''; @@ -79839,24 +82034,33 @@ class JSRootMenu { const main_content = '
' + - `` + - '
'; + `` + + '', oninit = !onchange ? null : elem => { + const inp = elem.querySelector('.jsroot_dlginp'); + if (inp) + inp.oninput = () => onchange(inp.value); + }; return new Promise(resolveFunc => { - this.runModal(title, main_content, { btns: true, height: 150, width: 400 }).then(element => { - if (!element) + this.runModal(title, main_content, { btns: true, height: 150, width: 400, oninit }).then(element => { + if (!element) { + if (onchange) + onchange(value0); return; - let val = element.querySelector('.jsroot_dlginp').value; + } + let val = element.querySelector('.jsroot_dlginp').value, isok = true; if (kind === 'float') { val = Number.parseFloat(val); - if (Number.isFinite(val)) - resolveFunc(val); + isok = Number.isFinite(val); } else if (kind === 'int') { val = parseInt(val); - if (Number.isInteger(val)) - resolveFunc(val); - } else + isok = Number.isInteger(val); + } + if (isok) { + if (onchange) + onchange(val); resolveFunc(val); + } }); }); } @@ -80315,25 +82519,25 @@ class StandaloneMenu extends JSRootMenu { select(`#${dlg_id}`).remove(); select(`#${dlg_id}_block`).remove(); - const w = Math.min(args.width || 450, Math.round(0.9 * browser.screenWidth)); - modal.block = select('body').append('div') - .attr('id', `${dlg_id}_block`) - .attr('class', 'jsroot_dialog_block') - .attr('style', 'z-index: 100000; position: absolute; left: 0px; top: 0px; bottom: 0px; right: 0px; opacity: 0.2; background-color: white'); - modal.element = select('body') - .append('div') - .attr('id', dlg_id) - .attr('class', 'jsroot_dialog') - .style('position', 'absolute') - .style('width', `${w}px`) - .style('left', '50%') - .style('top', '50%') - .style('z-index', 100001) - .attr('tabindex', '0'); + const w = Math.min(args.width || 450, Math.round(0.9 * browser.screenWidth)), + b = select('body'); + modal.block = b.append('div') + .attr('id', `${dlg_id}_block`) + .attr('class', 'jsroot_dialog_block') + .attr('style', 'z-index: 100000; position: absolute; left: 0px; top: 0px; bottom: 0px; right: 0px; opacity: 0.2; background-color: white'); + modal.element = b.append('div') + .attr('id', dlg_id) + .attr('class', 'jsroot_dialog') + .style('position', 'absolute') + .style('width', `${w}px`) + .style('left', '50%') + .style('top', '50%') + .style('z-index', 100001) + .attr('tabindex', '0'); modal.element.html( '
' + - `
${title}
` + + `
${title}
` + `
${main_content}
` + '
' + `` + @@ -80341,6 +82545,16 @@ class StandaloneMenu extends JSRootMenu { '
' ); + const drag_move = drag().on('start', () => { modal.y0 = 0; }).on('drag', evnt => { + if (!modal.y0) + modal.y0 = pointer(evnt, modal.element.node())[1]; + let p0 = Math.max(0, pointer(evnt, b.node())[1] - modal.y0); + if (b.node().clientHeight) + p0 = Math.min(p0, 0.8 * b.node().clientHeight); + modal.element.style('top', `${p0}px`); + }); + modal.element.select('.jsroot_dialog_title').call(drag_move); + modal.done = function(res) { if (this._done) return; @@ -80383,6 +82597,8 @@ class StandaloneMenu extends JSRootMenu { f = modal.element.select('.jsroot_dialog_footer').select('button'); if (!f.empty()) f.node().focus(); + if (isFunc(args.oninit)) + args.oninit(modal.element.node()); return modal; } @@ -81021,7 +83237,7 @@ class TAxisPainter extends ObjectPainter { if (this.kind === kAxisFunc) this.func = this.createFuncHandle(opts.axis_func, this.logbase, smin, smax); else - this.func = log().base(this.logbase).domain([smin, smax]); + this.func = log$1().base(this.logbase).domain([smin, smax]); } else if (this.symlog) { let v = Math.max(Math.abs(smin), Math.abs(smax)); if (Number.isInteger(this.symlog) && (this.symlog > 0)) @@ -81526,7 +83742,7 @@ class TAxisPainter extends ObjectPainter { if (handle.kind === 1) { // if not showing labels, not show large tick - // FIXME: for labels last tick is smaller, + // FIXME: for labels last tick is smaller if (!this.isExtraLogTick(handle.tick) && (this.format(handle.tick, true) !== null)) h1 = tickSize; this.ticks.push(handle.grpos); // keep graphical positions of major ticks @@ -84867,21 +87083,9 @@ class TFramePainter extends FrameInteractive { } menu.endsub(); - if (pad) { - const member = 'fLog' + kind[0]; - menu.sub('SetLog ' + kind[0], () => { - menu.input('Enter log kind: 0 - off, 1 - log10, 2 - log2, 3 - ln, ...', pad[member], 'int', 0, 10000).then(v => { - this.changeAxisLog(kind[0], v); - }); - }); - menu.addchk(pad[member] === 0, 'linear', () => this.changeAxisLog(kind[0], 0)); - menu.addchk(pad[member] === 1, 'log10', () => this.changeAxisLog(kind[0], 1)); - menu.addchk(pad[member] === 2, 'log2', () => this.changeAxisLog(kind[0], 2)); - menu.addchk(pad[member] === 3, 'ln', () => this.changeAxisLog(kind[0], 3)); - menu.addchk(pad[member] === 4, 'log4', () => this.changeAxisLog(kind[0], 4)); - menu.addchk(pad[member] === 8, 'log8', () => this.changeAxisLog(kind[0], 8)); - menu.endsub(); - } + if (pad) + menu.addPadLogMenu(kind[0], pad[`fLog${kind[0]}`], v => this.changeAxisLog(kind[0], v)); + menu.addchk(faxis.TestBit(EAxisBits.kMoreLogLabels), 'More log', flag => { faxis.SetBit(EAxisBits.kMoreLogLabels, flag); if (hist_painter?.getSnapId() && (kind.length === 1)) @@ -91032,6 +93236,20 @@ class TCanvasPainter extends TPadPainter { this.getWebsocket()?.resizeWindow(fullW, fullH); } + /** @summary create three.js object for TCanvas */ + static async build3d(can, opt, get_painter) { + const painter = new TCanvasPainter(null, can, opt, true); + painter.checkSpecialsInPrimitives(can, true); + + const fp = new TFramePainter(null, null); + // return dummy frame painter as result + painter.getFramePainter = () => fp; + + return painter.drawPrimitives().then(() => { + return get_painter ? painter : fp.create3DScene(-1, true); + }); + } + /** @summary draw TCanvas */ static async draw(dom, can, opt) { const nocanvas = !can; @@ -91202,18 +93420,26 @@ class TPavePainter extends ObjectPainter { tm = pad?.fTopMargin ?? gStyle.fPadTopMargin, bm = pad?.fBottomMargin ?? gStyle.fPadBottomMargin; - return svgToImage(svg_code).then(canvas => { - if (!canvas) + return svgToImage(svg_code, 'rgba').then(image => { + if (!image) + return false; + + let arr = image.data; + const width = image.width, height = image.height; + + if (!arr && isFunc(image.getContext) && image.getContext('2d')) + arr = image.getContext('2d').getImageData(0, 0, width, height).data; + + if (!arr) return false; let nX = 100, nY = 100; - const context = canvas.getContext('2d'), - arr = context.getImageData(0, 0, canvas.width, canvas.height).data, - boxW = Math.floor(canvas.width / nX), boxH = Math.floor(canvas.height / nY), + const boxW = Math.floor(width / nX), + boxH = Math.floor(height / nY), raster = new Array(nX * nY); - if (arr.length !== canvas.width * canvas.height * 4) { - console.log(`Image size missmatch in TLegend autoplace ${arr.length} expected ${canvas.width * canvas.height * 4}`); + if (arr.length !== width * height * 4) { + console.log(`Image size missmatch in TLegend autoplace ${arr.length} expected ${width * height * 4}`); nX = nY = 0; } @@ -91225,7 +93451,7 @@ class TPavePainter extends ObjectPainter { for (let x = px1; (x < px2) && !filled; ++x) { for (let y = py1; y < py2; ++y) { - const indx = (y * canvas.width + x) * 4; + const indx = (y * width + x) * 4; if (arr[indx] || arr[indx + 1] || arr[indx + 2] || arr[indx + 3]) { filled = 1; break; @@ -91303,6 +93529,11 @@ class TPavePainter extends ObjectPainter { opt = this.getPaveDrawOption().toUpperCase(), fp = this.getFramePainter(), pp = this.getPadPainter(), pad = pp.getRootPad(true); + + // special handling of dummy frame painter + if (fp?.getDrawDom() === null) + return this; + let interactive_element, width, height; if (pt.fInit === 0) { @@ -91909,7 +94140,7 @@ class TPavePainter extends ObjectPainter { nlines = legend.fPrimitives.arr.length, ncols = Math.max(1, legend.fNColumns); let nrows = Math.round(nlines / ncols), - any_text = false, + any_text = false, has_header = false, custom_textg = false; // each text entry has own attributes if (nrows * ncols < nlines) @@ -91919,6 +94150,8 @@ class TPavePainter extends ObjectPainter { for (let ii = 0; ii < nlines; ++ii) { const entry = legend.fPrimitives.arr[ii]; + if ((ii === 0) && (entry.fOption === 'h')) + has_header = true; if (isEmpty(entry)) { if (ncols === 1) nrows--; @@ -91930,6 +94163,9 @@ class TPavePainter extends ObjectPainter { } } + if (has_header && (ncols > 1) && ((nrows - 1) * ncols < nlines - 1)) + nrows++; + if (nrows < 1) nrows = 1; @@ -91992,8 +94228,10 @@ class TPavePainter extends ObjectPainter { if (ncols === 1) ++i; - else + else if (!has_header || ii === 0) i = ii; + else + i = ii - 1 + ncols; const lopt = entry.fOption.toLowerCase(), icol = i % ncols, irow = (i - icol) / ncols, @@ -92098,17 +94336,19 @@ class TPavePainter extends ObjectPainter { .call(painter.lineatt.func); } - let pos_x = tpos_x; + let pos_x = tpos_x, arg_width = Math.round(column_pos[icol + 1] - pos_x); if (isStr(lopt) && (lopt.toLowerCase() !== 'h')) any_opt = true; - else if (!any_opt) - pos_x = x0; + else if (!any_opt) { + pos_x = padding_x; + arg_width = w - 2 * padding_x; + } if (entry.fLabel) { const textatt = this.createAttText({ attr: entry, std: false, attr_alt: legend }), arg = { draw_g: this.getG(), align: textatt.align, - x: pos_x, width: Math.round(column_pos[icol + 1] - pos_x), + x: pos_x, width: arg_width, y: y0, height: Math.round(row_height), scale: (custom_textg && !entry.fTextSize) || !legend.fTextSize, text: entry.fLabel, color: textatt.color @@ -92148,10 +94388,10 @@ class TPavePainter extends ObjectPainter { height = pp.getPadHeight(), pad = pp.getRootPad(true), main = palette.$main_painter || this.getMainPainter(), + is_th3 = isFunc(main.getDimension) && (main.getDimension() === 3), fp = this.getFramePainter(), - contour = main.getContour(false), + contour = main.getContour(is_th3), levels = contour?.getLevels(), - is_th3 = isFunc(main.getDimension) && (main.getDimension() === 3), is_scatter = isFunc(main.getZaxis), log = pad?.fLogv ?? (is_th3 ? false : pad?.fLogz), draw_palette = main.getHistPalette(), @@ -92161,6 +94401,7 @@ class TPavePainter extends ObjectPainter { let zmin = 0, zmax = 100, gzmin, gzmax, axis_transform, axis_second = 0; this.#palette_vertical = (palette.fX2NDC - palette.fX1NDC) < (palette.fY2NDC - palette.fY1NDC); + this.is_th3 = is_th3; axis.fTickSize = 0.03; // adjust axis ticks size @@ -92197,12 +94438,16 @@ class TPavePainter extends ObjectPainter { } } else if ((main.gmaxbin !== undefined) && (main.gminbin !== undefined)) { // this is case of TH2 (needs only for size adjustment) - zmin = main.gminbin; - zmax = main.gmaxbin; + gzmin = zmin = main.gminbin; + gzmax = zmax = main.gmaxbin; + if (contour?.colzmin !== undefined && contour?.colzmax !== undefined) { + zmin = contour.colzmin; + zmax = contour.colzmax; + } } else if ((main.hmin !== undefined) && (main.hmax !== undefined)) { // this is case of TH1 - zmin = main.hmin; - zmax = main.hmax; + gzmin = zmin = main.hmin; + gzmax = zmax = main.hmax; } g.selectAll('rect').style('fill', 'white'); @@ -92374,6 +94619,17 @@ class TPavePainter extends ObjectPainter { zoom_rect.attr('x', Math.min(sel1, sel2)) .attr('width', Math.abs(sel2 - sel1)); } + }, zoomPalette = (z1, z2) => { + if (!this.is_th3) + return this.getFramePainter().zoomSingle('z', z1, z2, true); + const maino = this.getMainPainter().options; + if (z1 === z2) + maino.minimum = maino.maximum = kNoZoom; + else { + maino.minimum = z1; + maino.maximum = z2; + } + this.interactiveRedraw('pad'); }, endRectSel = evnt => { if (!doing_zoom) return; @@ -92385,10 +94641,13 @@ class TPavePainter extends ObjectPainter { zoom_rect = null; doing_zoom = false; + if (sel1 === sel2) + return; + const z1 = this.z_handle.revertPoint(sel1), z2 = this.z_handle.revertPoint(sel2); - this.getFramePainter().zoomSingle('z', Math.min(z1, z2), Math.max(z1, z2), true); + zoomPalette(Math.min(z1, z2), Math.max(z1, z2)); }, startRectSel = evnt => { // ignore when touch selection is activated if (doing_zoom) @@ -92423,7 +94682,7 @@ class TPavePainter extends ObjectPainter { if (settings.Zooming) { this.getG().selectAll('.axis_zoom') .on('mousedown', startRectSel) - .on('dblclick', () => this.getFramePainter().zoomSingle('z', 0, 0, true)); + .on('dblclick', () => zoomPalette(0, 0)); } if (settings.ZoomWheel) { @@ -92432,7 +94691,7 @@ class TPavePainter extends ObjectPainter { coord = this.#palette_vertical ? (1 - pos[1] / s_height) : pos[0] / s_width, item = this.z_handle.analyzeWheelEvent(evnt, coord); if (item?.changed) - this.getFramePainter().zoomSingle('z', item.min, item.max, true); + zoomPalette(item.min, item.max); }); } } @@ -92637,12 +94896,51 @@ class TPavePainter extends ObjectPainter { /** @summary Show pave context menu */ paveContextMenu(evnt) { - if (this.z_handle) { + if (!this.z_handle) + return showPainterMenu(evnt, this); + if (!this.is_th3) { const fp = this.getFramePainter(); if (isFunc(fp?.showContextMenu)) fp.showContextMenu('pal', evnt); - } else - showPainterMenu(evnt, this); + return; + } + + const pp = this.getPadPainter(), + pad = pp?.getRootPad(true), + faxis = this.z_handle.getObject(), + hist_painter = this.z_handle.hist_painter || this.getMainPainter(true); + + if (!pad || !hist_painter) + return; + + if (isFunc(evnt?.stopPropagation)) { + evnt.preventDefault(); + evnt.stopPropagation(); // disable main context menu + } + + createMenu(evnt, this).then(menu => { + menu.header('V axis', `${urlClassPrefix}${clTAxis}.html`); + + menu.addPadLogMenu('v', pad.fLogv || 0, v => { + pad.fLogv = v; + this.interactiveRedraw('pad', 'log'); + }); + + menu.addchk(faxis.TestBit(EAxisBits.kMoreLogLabels), 'More log', flag => { + faxis.SetBit(EAxisBits.kMoreLogLabels, flag); + this.interactiveRedraw('pad'); + }); + menu.addchk(faxis.TestBit(EAxisBits.kNoExponent), 'No exponent', flag => { + faxis.SetBit(EAxisBits.kNoExponent, flag); + this.interactiveRedraw('pad'); + }); + + hist_painter.fillPaletteMenu(menu, false); + + menu.addTAxisMenu(EAxisBits, hist_painter || this, faxis, 'v', this.z_handle, null); + + menu.show(); + }); } /** @summary Returns true when stat box is drawn */ @@ -93014,6 +95312,7 @@ kPosTitle: kPosTitle const kCARTESIAN = 1, kPOLAR = 2, kCYLINDRICAL = 3, kSPHERICAL = 4, kRAPIDITY = 5, kNormal$1 = 0, kPoisson = 1, kPoisson2 = 2, kOnlyCheck = 'only-check'; + /** * @summary Class to decode histograms draw options * @desc All options started from capital letter are major drawing options @@ -94010,7 +96309,7 @@ class FunctionsHandler { #painter; // object painter to which functions belongs #pad_painter; // pad painter - constructor(painter, pp, funcs, statpainter) { + constructor(painter, pp, funcs, statpainter, update_statpainter) { this.#painter = painter; this.#pad_painter = pp; @@ -94069,6 +96368,8 @@ class FunctionsHandler { const indx = painters.indexOf(statpainter); if (indx >= 0) painters.splice(indx, 1); + if (update_statpainter && (update_painters.indexOf(statpainter) < 0)) + update_painters.push(statpainter); } // remove all function which are not found in new list of functions @@ -94139,6 +96440,7 @@ class THistPainter extends ObjectPainter { #auto_exec; // can be reused when sending option back to server #funcs_handler; // special instance for functions drawing #contour; // histogram colors contour + #create_stats; // if stats was created by painter /** @summary Constructor * @param {object|string} dom - DOM element for drawing or element id @@ -94442,7 +96744,7 @@ class THistPainter extends ObjectPainter { histo.fBins = obj.fBins; // remove old functions, update existing, prepare to draw new one - this.#funcs_handler = new FunctionsHandler(this, pp, obj.fFunctions, statpainter); + this.#funcs_handler = new FunctionsHandler(this, pp, obj.fFunctions, statpainter, this.#create_stats); const changed_opt = (histo.fOption !== obj.fOption); histo.fOption = obj.fOption; @@ -94767,7 +97069,7 @@ class THistPainter extends ObjectPainter { /** @summary Returns true if stats box fill can be ignored */ isIgnoreStatsFill() { - return !this.getObject() || (!this.draw_content && !this.create_stats && !this.hasSnapId()); + return !this.getObject() || (!this.draw_content && !this.#create_stats && !this.hasSnapId()); } /** @summary Create stat box for histogram if required */ @@ -94806,7 +97108,7 @@ class THistPainter extends ObjectPainter { if (!stats && !optstat && !optfit) return null; - this.create_stats = true; + this.#create_stats = true; if (stats) return stats; @@ -95281,6 +97583,12 @@ class THistPainter extends ObjectPainter { return cntr; } + /** @summary Reset contour object + * @private */ + resetContour() { + this.#contour = undefined; + } + /** @summary Return Z-scale ranges to create contour */ #getContourRanges(main, fp) { const o = this.getOptions(), @@ -95861,7 +98169,7 @@ class THistPainter extends ObjectPainter { this.maxbin = this.minbin = 0; // force recalculation of z levels - this.#contour = undefined; + this.resetContour(); if (args.zrange) Object.assign(res, this.#getContourRanges(this.getMainPainter(), this.getFramePainter())); @@ -104524,6 +106832,9 @@ class TH3Painter extends THistPainter { const histo = this.getHisto(), fp = this.getFramePainter(); + // ensure proper colors + this.resetContour(); + let use_lambert = false, use_helper = false, use_colors = false, use_opacity = 1, exclude_content = -1, logv = this.getPadPainter()?.getRootPad()?.fLogv, @@ -104597,19 +106908,6 @@ class TH3Painter extends THistPainter { } else if (use_scale) use_scale = (this.gminbin || this.gmaxbin) ? 1 / Math.max(Math.abs(this.gminbin), Math.abs(this.gmaxbin)) : 1; - const get_bin_weight = content => { - if ((exclude_content >= 0) && (content < exclude_content)) - return 0; - if (!use_scale) - return 1; - if (logv) { - if (content <= 0) - return 0; - content = Math.log(content) - scale_offset; - } - return Math.pow(Math.abs(content * use_scale), 0.3333); - }; - // eslint-disable-next-line one-var const i1 = this.getSelectIndex('x', 'left', 0.5), i2 = this.getSelectIndex('x', 'right', 0), j1 = this.getSelectIndex('y', 'left', 0.5), @@ -104623,7 +106921,19 @@ class TH3Painter extends THistPainter { const cntr = use_colors ? this.getContour() : null, palette = use_colors ? this.getHistPalette() : null, bins_matrixes = [], bins_colors = [], bins_ids = [], negative_matrixes = [], bin_opacities = [], - transfer = (this.transferFunc && proivdeEvalPar(this.transferFunc, true)) ? this.transferFunc : null; + transfer = (this.transferFunc && proivdeEvalPar(this.transferFunc, true)) ? this.transferFunc : null, + get_bin_weight = content => { + if ((exclude_content >= 0) && (content < exclude_content)) + return 0; + if (!use_scale) + return 1; + if (logv) { + if (content <= 0) + return 0; + content = Math.log(content) - scale_offset; + } + return Math.pow(Math.abs(content * use_scale), 0.3333); + }; for (let i = i1; i < i2; ++i) { const grx1 = fp.grx(histo.fXaxis.GetBinLowEdge(i + 1)), @@ -105029,11 +107339,14 @@ let TGraphPainter$1 = class TGraphPainter extends ObjectPainter { return true; let is_normal = false; - if (check_axis !== 'y') - is_normal ||= (histo.fXaxis.fXmin !== 0.0011) || (histo.fXaxis.fXmax !== 1.1); + if (check_axis !== 'y') { + is_normal ||= ((histo.fXaxis.fXmin !== 0.0011) && (histo.fXaxis.fXmin !== 0)) || + ((histo.fXaxis.fXmax !== 1.1) && (histo.fXaxis.fXmax !== 1)); + } if (check_axis !== 'x') { - is_normal ||= (histo.fYaxis.fXmin !== 0.0011) || (histo.fYaxis.fXmax !== 1.1) || + is_normal ||= ((histo.fYaxis.fXmin !== 0.0011) && (histo.fYaxis.fXmin !== 0)) || + ((histo.fYaxis.fXmax !== 1.1) && (histo.fYaxis.fXmax !== 1)) || (histo.fMinimum !== 0.0011) || (histo.fMaximum !== 1.1); } @@ -106693,8 +109006,6 @@ let TGraphPainter$1 = class TGraphPainter extends ObjectPainter { if (!st.fOptFit || this.getCanvPainter()?.getRootPad(true)?.fPrimitives?.arr.length) return null; - this.create_stats = true; - stats = create$1(clTPaveStats); Object.assign(stats, { fName: 'stats', fOptStat: 0, fOptFit: st.fOptFit, fBorderSize: 1, @@ -109829,7 +112140,6 @@ function createMatrix(matrix) { break; case 'TGeoGenTrans': scale = matrix.fScale; // no break, translation and rotation follows - // eslint-disable-next-line no-fallthrough case 'TGeoCombiTrans': translation = matrix.fTranslation; rotation = matrix.fRotation?.fRotationMatrix; @@ -110199,7 +112509,6 @@ createGeometry = function(shape, limit = 0) { case clTGeoHalfSpace: if (limit < 0) return 1; // half space if just plane used in composite - // eslint-disable-next-line no-fallthrough default: geoWarn(`unsupported shape type ${shape._typename}`); } @@ -110339,7 +112648,6 @@ function provideObjectInfo(obj) { break; case clTGeoTrd2: info.push(`Dy1=${conv(shape.fDy1)} Dy2=${conv(shape.fDy1)}`); // no break - // eslint-disable-next-line no-fallthrough case clTGeoTrd1: info.push(`Dx1=${conv(shape.fDx1)} Dx2=${conv(shape.fDx1)}`); break; @@ -110350,7 +112658,6 @@ function provideObjectInfo(obj) { break; case clTGeoConeSeg: info.push(`Phi1=${shape.fPhi1} Phi2=${shape.fPhi2}`); - // eslint-disable-next-line no-fallthrough case clTGeoCone: info.push(`Rmin1=${conv(shape.fRmin1)} Rmax1=${conv(shape.fRmax1)}`, `Rmin2=${conv(shape.fRmin2)} Rmax2=${conv(shape.fRmax2)}`); @@ -110358,7 +112665,6 @@ function provideObjectInfo(obj) { case clTGeoCtub: case clTGeoTubeSeg: info.push(`Phi1=${shape.fPhi1} Phi2=${shape.fPhi2}`); - // eslint-disable-next-line no-fallthrough case clTGeoEltu: case clTGeoTube: info.push(`Rmin=${conv(shape.fRmin)} Rmax=${conv(shape.fRmax)}`); @@ -110399,7 +112705,6 @@ function provideObjectInfo(obj) { break; case clTGeoGtra: info.push(`TwistAngle=${conv(shape.fTwistAngle)}`); - // eslint-disable-next-line no-fallthrough case clTGeoTrap: info.push(`Phi=${conv(shape.fPhi)} Theta=${conv(shape.fTheta)}`); break; @@ -117583,7 +119888,6 @@ class TGeoPainter extends ObjectPainter { break; case 'mix': this.#camera.add(new THREE.AmbientLight(0xefefef, p)); - // eslint-disable-next-line no-fallthrough default: // 6 point lights for (let n = 0; n < 6; ++n) { const l = new THREE.DirectionalLight(0xefefef, p); @@ -121143,7 +123447,7 @@ const clTStreamerElement = 'TStreamerElement', clTStreamerObject = 'TStreamerObj clTQObject = 'TQObject', clTBasket = 'TBasket', clTDatime = 'TDatime', nameStreamerInfo = 'StreamerInfo', - kChar = 1, kShort = 2, kInt = 3, kLong = 4, kFloat = 5, kCounter = 6, + kChar$1 = 1, kShort = 2, kInt = 3, kLong = 4, kFloat = 5, kCounter = 6, kCharStar = 7, kDouble = 8, kDouble32 = 9, kLegacyChar = 10, kUChar = 11, kUShort = 12, kUInt = 13, kULong = 14, kBits = 15, kLong64 = 16, kULong64 = 17, kBool = 18, kFloat16 = 19, @@ -121179,7 +123483,13 @@ const clTStreamerElement = 'TStreamerElement', clTStreamerObject = 'TStreamerObj StlNames = ['', 'vector', 'list', 'deque', 'map', 'multimap', 'set', 'multiset', 'bitset'], // TObject bits - kIsReferenced = BIT(4), kHasUUID = BIT(5); + kIsReferenced = BIT(4), kHasUUID = BIT(5), + + // gap in http which can be merged into single http request + kMinimalHttpGap = 128, + + // temporary name assigned for file derived from binary buffer + kTmpFileName = 'localfile.root'; /** @summary Custom streamers for root classes @@ -121783,7 +124093,7 @@ function getTypeId(typname, norecursion) { case 'Bool_t': return kBool; case 'char': case 'signed char': - case 'Char_t': return kChar; + case 'Char_t': return kChar$1; case 'Color_t': case 'Style_t': case 'Width_t': @@ -121844,7 +124154,7 @@ function getArrayKind(type_name) { case 'D': return kDouble; case 'F': return kFloat; case 'S': return kShort; - case 'C': return kChar; + case 'C': return kChar$1; case 'L': return kLong; default: return -1; } @@ -122109,7 +124419,7 @@ function readMapElement(buf) { /** @summary create member entry for streamer element * @desc used for reading of data * @private */ -function createMemberStreamer(element, file) { +function createMemberStreamer(element, file, no_string) { const member = { name: element.fName, type: element.fType, fArrayLength: element.fArrayLength, @@ -122175,6 +124485,7 @@ function createMemberStreamer(element, file) { case kOffsetL + kInt: case kOffsetL + kCounter: case kOffsetL + kDouble: + case kOffsetL + kChar$1: case kOffsetL + kUChar: case kOffsetL + kShort: case kOffsetL + kUShort: @@ -122187,36 +124498,35 @@ function createMemberStreamer(element, file) { case kOffsetL + kFloat: if (element.fArrayDim < 2) { member.arrlength = element.fArrayLength; - member.func = function(buf, obj) { - obj[this.name] = buf.readFastArray(this.arrlength, this.type - kOffsetL); - }; + if ((member.type !== kOffsetL + kChar$1) || no_string) { + member.func = function(buf, obj) { + obj[this.name] = buf.readFastArray(this.arrlength, this.type - kOffsetL); + }; + } else { + member.func = function(buf, obj) { + obj[this.name] = buf.readFastString(this.arrlength); + }; + } } else { - member.arrlength = element.fMaxIndex[element.fArrayDim - 1]; member.minus1 = true; - member.func = function(buf, obj) { - obj[this.name] = buf.readNdimArray(this, (buf2, handle) => - buf2.readFastArray(handle.arrlength, handle.type - kOffsetL)); - }; - } - break; - case kOffsetL + kChar: - if (element.fArrayDim < 2) { - member.arrlength = element.fArrayLength; - member.func = function(buf, obj) { - obj[this.name] = buf.readFastString(this.arrlength); - }; - } else { - member.minus1 = true; // one dimension used for char* member.arrlength = element.fMaxIndex[element.fArrayDim - 1]; - member.func = function(buf, obj) { - obj[this.name] = buf.readNdimArray(this, (buf2, handle) => - buf2.readFastString(handle.arrlength)); - }; + if ((member.type !== kOffsetL + kChar$1) || no_string) { + member.func = function(buf, obj) { + obj[this.name] = buf.readNdimArray(this, (buf2, handle) => + buf2.readFastArray(handle.arrlength, handle.type - kOffsetL)); + }; + } else { + member.func = function(buf, obj) { + obj[this.name] = buf.readNdimArray(this, (buf2, handle) => + buf2.readFastString(handle.arrlength)); + }; + } } break; case kOffsetP + kBool: case kOffsetP + kInt: case kOffsetP + kDouble: + case kOffsetP + kChar$1: case kOffsetP + kUChar: case kOffsetP + kShort: case kOffsetP + kUShort: @@ -122228,21 +124538,20 @@ function createMemberStreamer(element, file) { case kOffsetP + kLong64: case kOffsetP + kFloat: member.cntname = element.fCountName; - member.func = function(buf, obj) { - obj[this.name] = (buf.ntou1() === 1) ? buf.readFastArray(obj[this.cntname], this.type - kOffsetP) : []; - }; - break; - case kOffsetP + kChar: - member.cntname = element.fCountName; - member.func = function(buf, obj) { - obj[this.name] = (buf.ntou1() === 1) ? buf.readFastString(obj[this.cntname]) : null; - }; + if ((member.type !== kOffsetP + kChar$1) || no_string) { + member.func = function(buf, obj) { + obj[this.name] = (buf.ntou1() === 1) ? buf.readFastArray(obj[this.cntname], this.type - kOffsetP) : []; + }; + } else { + member.func = function(buf, obj) { + obj[this.name] = (buf.ntou1() === 1) ? buf.readFastString(obj[this.cntname]) : null; + }; + } break; case kDouble32: case kOffsetL + kDouble32: case kOffsetP + kDouble32: member.double32 = true; - // eslint-disable-next-line no-fallthrough case kFloat16: case kOffsetL + kFloat16: case kOffsetP + kFloat16: @@ -122250,20 +124559,17 @@ function createMemberStreamer(element, file) { member.factor = 1 / element.fFactor; member.min = element.fXmin; member.read = function(buf) { return buf.ntou4() * this.factor + this.min; }; - } else - if ((element.fXmin === 0) && member.double32) - member.read = function(buf) { return buf.ntof(); }; - else { - member.nbits = Math.round(element.fXmin); - if (member.nbits === 0) - member.nbits = 12; - member.dv = new DataView(new ArrayBuffer(8), 0); // used to cast from uint32 to float32 - member.read = function(buf) { - const theExp = buf.ntou1(), theMan = buf.ntou2(); - this.dv.setUint32(0, (theExp << 23) | ((theMan & ((1 << (this.nbits + 1)) - 1)) << (23 - this.nbits))); - return ((1 << (this.nbits + 1) & theMan) ? -1 : 1) * this.dv.getFloat32(0); - }; - } + } else if ((element.fXmin === 0) && member.double32) + member.read = function(buf) { return buf.ntof(); }; + else { + member.nbits = Math.round(element.fXmin) || 12; + member.dv = new DataView(new ArrayBuffer(8), 0); // used to cast from uint32 to float32 + member.read = function(buf) { + const theExp = buf.ntou1(), theMan = buf.ntou2(); + this.dv.setUint32(0, (theExp << 23) | ((theMan & ((1 << (this.nbits + 1)) - 1)) << (23 - this.nbits))); + return ((1 << (this.nbits + 1) & theMan) ? -1 : 1) * this.dv.getFloat32(0); + }; + } member.readarr = function(buf, len) { const arr = this.double32 ? new Float64Array(len) : new Float32Array(len); @@ -122274,23 +124580,21 @@ function createMemberStreamer(element, file) { if (member.type < kOffsetL) member.func = function(buf, obj) { obj[this.name] = this.read(buf); }; - else - if (member.type > kOffsetP) { - member.cntname = element.fCountName; - member.func = function(buf, obj) { - obj[this.name] = (buf.ntou1() === 1) ? this.readarr(buf, obj[this.cntname]) : null; - }; - } else - if (element.fArrayDim < 2) { - member.arrlength = element.fArrayLength; - member.func = function(buf, obj) { obj[this.name] = this.readarr(buf, this.arrlength); }; - } else { - member.arrlength = element.fMaxIndex[element.fArrayDim - 1]; - member.minus1 = true; - member.func = function(buf, obj) { - obj[this.name] = buf.readNdimArray(this, (buf2, handle) => handle.readarr(buf2, handle.arrlength)); - }; - } + else if (member.type > kOffsetP) { + member.cntname = element.fCountName; + member.func = function(buf, obj) { + obj[this.name] = (buf.ntou1() === 1) ? this.readarr(buf, obj[this.cntname]) : null; + }; + } else if (element.fArrayDim < 2) { + member.arrlength = element.fArrayLength; + member.func = function(buf, obj) { obj[this.name] = this.readarr(buf, this.arrlength); }; + } else { + member.arrlength = element.fMaxIndex[element.fArrayDim - 1]; + member.minus1 = true; + member.func = function(buf, obj) { + obj[this.name] = buf.readNdimArray(this, (buf2, handle) => handle.readarr(buf2, handle.arrlength)); + }; + } break; case kAnyP: @@ -122352,7 +124656,7 @@ function createMemberStreamer(element, file) { }; break; } - case kChar: + case kChar$1: member.func = function(buf, obj) { obj[this.name] = buf.ntoi1(); }; break; case kCharStar: @@ -123671,7 +125975,7 @@ class TBuffer { for (; i < n; ++i, o += 2) array[i] = view.getUint16(o); break; - case kChar: + case kChar$1: array = new Int8Array(n); for (; i < n; ++i) array[i] = view.getInt8(o++); @@ -124132,8 +126436,14 @@ class TFile { this.fAcceptRanges = false; } - const pos = Math.max(this.fURL.lastIndexOf('/'), this.fURL.lastIndexOf('\\')); - this.fFileName = pos >= 0 ? this.fURL.slice(pos + 1) : this.fURL; + this.assignFileName(this.fURL); + } + + assignFileName(url) { + if (isStr(url)) { + const pos = Math.max(url.lastIndexOf('/'), url.lastIndexOf('\\')); + this.fFileName = (pos >= 0) && (pos < url.length - 2) ? url.slice(pos + 1) : url; + } } /** @summary Set timeout for File instance @@ -124172,6 +126482,63 @@ class TFile { * @private */ async _open() { return this.readKeys(); } + /** @summary check if requested segments can be reordered or merged + * @private */ + #checkNeedReorder(place) { + let res = false, resort = false; + for (let n = 0; n < place.length - 2; n += 2) { + if (place[n] > place[n + 2]) + res = resort = true; + if (place[n] + place[n + 1] > place[n + 2] - kMinimalHttpGap) + res = true; + } + if (!res) { + return { + place, + blobs: [], + expectedSize(indx) { return this.place[indx + 1]; }, + addBuffer(indx, buf, o) { + this.blobs[indx / 2] = new DataView(buf, o, this.place[indx + 1]); + } + }; + } + + res = { place, reorder: [], place_new: [], blobs: [] }; + + for (let n = 0; n < place.length; n += 2) + res.reorder.push({ pos: place[n], len: place[n + 1], indx: [n] }); + + if (resort) + res.reorder.sort((a, b) => { return a.pos - b.pos; }); + + for (let n = 0; n < res.reorder.length - 1; n++) { + const curr = res.reorder[n], + next = res.reorder[n + 1]; + if (curr.pos + curr.len + kMinimalHttpGap > next.pos) { + curr.indx.push(...next.indx); + curr.len = next.pos + next.len - curr.pos; + res.reorder.splice(n + 1, 1); // remove segment + n--; + } + } + + res.reorder.forEach(elem => res.place_new.push(elem.pos, elem.len)); + + res.expectedSize = function(indx) { + return this.reorder[indx / 2].len; + }; + + res.addBuffer = function(indx, buf, o) { + const elem = this.reorder[indx / 2], + pos0 = elem.pos; + elem.indx.forEach(indx0 => { + this.blobs[indx0 / 2] = new DataView(buf, o + this.place[indx0] - pos0, this.place[indx0 + 1]); + }); + }; + + return res; + } + /** @summary read buffer(s) from the file * @return {Promise} with read buffers * @private */ @@ -124179,10 +126546,13 @@ class TFile { if ((this.fFileContent !== null) && !filename && (!this.fAcceptRanges || this.fFileContent.canExtract(place))) return this.fFileContent.extract(place); + const reorder = this.#checkNeedReorder(place); + if (reorder?.place_new) + place = reorder?.place_new; + let resolveFunc, rejectFunc; const file = this, first_block = (place[0] === 0) && (place.length === 2), - blobs = [], // array of requested segments promise = new Promise((resolve, reject) => { resolveFunc = resolve; rejectFunc = reject; @@ -124208,12 +126578,15 @@ class TFile { } } - function send_new_request(increment) { - if (increment) { + function send_new_request(arg) { + if (arg === 'noranges') { + file.fMaxRanges = 1; + last = Math.min(last, first + file.fMaxRanges * 2); + } else if (arg) { first = last; last = Math.min(first + file.fMaxRanges * 2, place.length); if (first >= place.length) - return resolveFunc(blobs); + return resolveFunc(reorder.blobs.length === 1 ? reorder.blobs[0] : reorder.blobs); } let fullurl = fileurl, ranges = 'bytes', totalsz = 0; @@ -124230,17 +126603,14 @@ class TFile { // when read first block, allow to read more - maybe ranges are not supported and full file content will be returned if (file.fAcceptRanges && first_block) - totalsz = Math.max(totalsz, 1e7); + totalsz = Math.max(totalsz, 1e5); - return createHttpRequest(fullurl, 'buf', read_callback, undefined, true).then(xhr => { + return createHttpRequest(fullurl, 'buf', read_callback, undefined, true, file.fTimeout).then(xhr => { if (file.fAcceptRanges) { xhr.setRequestHeader('Range', ranges); xhr.expected_size = Math.max(Math.round(1.1 * totalsz), totalsz + 200); // 200 if offset for the potential gzip } - if (file.fTimeout) - xhr.timeout = file.fTimeout; - if (isFunc(progress_callback) && isFunc(xhr.addEventListener)) { let sum1 = 0, sum2 = 0, sum_total = 0; for (let n = 1; n < place.length; n += 2) { @@ -124348,70 +126718,34 @@ class TFile { // if only single segment requested, return result as is if (last - first === 2) { - const b = new DataView(res); - if (place.length === 2) - return resolveFunc(b); - blobs.push(b); + reorder.addBuffer(first, res, 0); return send_new_request(true); } // object to access response data - const hdr = this.getResponseHeader('Content-Type'), - ismulti = isStr(hdr) && (hdr.indexOf('multipart') >= 0), - view = new DataView(res); - - if (!ismulti) { - // server may returns simple buffer, which combines all segments together - - const hdr_range = this.getResponseHeader('Content-Range'); - let segm_start = 0, segm_last = -1; - - if (isStr(hdr_range) && hdr_range.indexOf('bytes') >= 0) { - const parts = hdr_range.slice(hdr_range.indexOf('bytes') + 6).split(/[\s-/]+/); - if (parts.length === 3) { - segm_start = Number.parseInt(parts[0]); - segm_last = Number.parseInt(parts[1]); - if (!Number.isInteger(segm_start) || !Number.isInteger(segm_last) || (segm_start > segm_last)) { - segm_start = 0; - segm_last = -1; - } - } - } + const hdr = this.getResponseHeader('Content-Type'); - let canbe_single_segment = (segm_start <= segm_last); - for (let n = first; n < last; n += 2) { - if ((place[n] < segm_start) || (place[n] + place[n + 1] - 1 > segm_last)) - canbe_single_segment = false; - } - - if (canbe_single_segment) { - for (let n = first; n < last; n += 2) - blobs.push(new DataView(res, place[n] - segm_start, place[n + 1])); - return send_new_request(true); - } - - if ((file.fMaxRanges === 1) || !first) - return rejectFunc(Error('Server returns normal response when multipart was requested, disable multirange support')); - - file.fMaxRanges = 1; - last = Math.min(last, file.fMaxRanges * 2); - - return send_new_request(); + if (!isStr(hdr) || (hdr.indexOf('multipart') < 0)) { + console.error('Did not found multipart in content-type - fallback to single range request'); + return send_new_request('noranges'); } // multipart messages requires special handling const indx = hdr.indexOf('boundary='); - let boundary = '', n = first, o = 0, normal_order = true; - if (indx > 0) { - boundary = hdr.slice(indx + 9); - if ((boundary[0] === '"') && (boundary.at(-1) === '"')) - boundary = boundary.slice(1, boundary.length - 1); - boundary = '--' + boundary; - } else - console.error('Did not found boundary id in the response header'); + if (indx <= 0) { + console.error('Did not found boundary id in the response header - fallback to single range request'); + return send_new_request('noranges'); + } + + let boundary = hdr.slice(indx + 9); + if ((boundary[0] === '"') && (boundary.at(-1) === '"')) + boundary = boundary.slice(1, boundary.length - 1); + boundary = '--' + boundary; - while (n < last) { + const view = new DataView(res); + + for (let n = first, o = 0; n < last; n += 2) { let code1, code2 = view.getUint8(o), nline = 0, line = '', finish_header = false, segm_start = 0, segm_last = -1; @@ -124430,6 +126764,7 @@ class TFile { if (parts.length === 3) { segm_start = Number.parseInt(parts[0]); segm_last = Number.parseInt(parts[1]); + // TODO: check for consistency if (!Number.isInteger(segm_start) || !Number.isInteger(segm_last) || (segm_start > segm_last)) { segm_start = 0; segm_last = -1; @@ -124452,44 +126787,16 @@ class TFile { o++; } - if (!finish_header) - return rejectFunc(Error('Cannot decode header in multipart message')); - - if (segm_start > segm_last) { - // fall-back solution, believe that segments same as requested - blobs.push(new DataView(res, o, place[n + 1])); - o += place[n + 1]; - n += 2; - } else if (normal_order) { - const n0 = n; - while ((n < last) && (place[n] >= segm_start) && (place[n] + place[n + 1] - 1 <= segm_last)) { - blobs.push(new DataView(res, o + place[n] - segm_start, place[n + 1])); - n += 2; - } + const segm_size = segm_last - segm_start + 1; - if (n > n0) - o += (segm_last - segm_start + 1); - else - normal_order = false; + if (!finish_header || (segm_size <= 0) || (reorder.expectedSize(n) !== segm_size)) { + console.error('Failure decoding multirange header - fallback to single range request'); + return send_new_request('noranges'); } - if (!normal_order) { - // special situation when server reorder segments in the reply - let isany = false; - for (let n1 = n; n1 < last; n1 += 2) { - if ((place[n1] >= segm_start) && (place[n1] + place[n1 + 1] - 1 <= segm_last)) { - blobs[n1 / 2] = new DataView(res, o + place[n1] - segm_start, place[n1 + 1]); - isany = true; - } - } - if (!isany) - return rejectFunc(Error(`Provided fragment ${segm_start} - ${segm_last} out of requested multi-range request`)); - - while (blobs[n / 2]) - n += 2; + reorder.addBuffer(n, res, o); - o += (segm_last - segm_start + 1); - } + o += segm_size; } send_new_request(true); @@ -124788,10 +127095,14 @@ class TFile { // this part typically read from the header, no need to optimize return this.readBuffer([this.fBEGIN, Math.max(300, nbytes)]); }).then(blob3 => { - const buf3 = new TBuffer(blob3, 0, this); + const buf3 = new TBuffer(blob3, 0, this), + key = buf3.readTKey(); - // keep only title from TKey data - this.fTitle = buf3.readTKey().fTitle; + this.fTitle = key.fTitle; + if (this.fURL === kTmpFileName) { + this.fURL = this.fFullURL = key.fName; + this.assignFileName(key.fName); + } buf3.locate(this.fNbytesName); @@ -125071,9 +127382,8 @@ class TNodejsFile extends TFile { super(null); this.fUseStampPar = false; this.fEND = 0; - this.fFullURL = filename; - this.fURL = filename; - this.fFileName = filename; + this.fFullURL = this.fURL = filename; + this.assignFileName(filename); } /** @summary Open file in node.js @@ -125151,6 +127461,7 @@ class FileProxy { getFileName() { return ''; } getFileSize() { return 0; } async readBuffer(/* pos, sz */) { return null; } + closeFile() {} } // class FileProxy @@ -125177,12 +127488,8 @@ class TProxyFile extends TFile { if (!res) return false; this.fEND = this.proxy.getFileSize(); - this.fFullURL = this.fURL = this.fFileName = this.proxy.getFileName(); - if (isStr(this.fFileName)) { - const p = this.fFileName.lastIndexOf('/'); - if ((p > 0) && (p < this.fFileName.length - 4)) - this.fFileName = this.fFileName.slice(p + 1); - } + this.fFullURL = this.fURL = this.proxy.getFileName(); + this.assignFileName(this.fURL); return this.readKeys(); }); } @@ -125211,6 +127518,15 @@ class TProxyFile extends TFile { return Promise.all(arr); } + /** @summary Fully cleanup TProxyFile data + * @private */ + delete() { + super.delete(); + if (isFunc(this.proxy?.closeFile)) + this.proxy.closeFile(); + delete this.proxy; + } + } // class TProxyFile @@ -125245,7 +127561,7 @@ function openFile(arg, opts) { file = new TProxyFile(arg); if (!file && isObject(arg) && (arg instanceof ArrayBuffer)) { - file = new TFile('localfile.root'); + file = new TFile(kTmpFileName); file.assignFileContent(arg); } @@ -126182,6 +128498,10 @@ class TDrawSelector extends TSelector { case 'dump': args.dump = true; break; + case 'dumpall': + args.dump = true; + args.numentries = this.getNumEntries(tree); + break; case 'staged': args.staged = true; break; @@ -126228,7 +128548,10 @@ class TDrawSelector extends TSelector { } if (harg === 'dump') args.dump = true; - else if (harg === 'elist') + else if (harg === 'dumpall') { + args.dump = true; + args.numentries = this.getNumEntries(tree); + } else if (harg === 'elist') args.dump_entries = true; else if (harg.indexOf('Graph') === 0) args.graph = true; @@ -126404,7 +128727,7 @@ class TDrawSelector extends TSelector { this.leaf = args.leaf; // branch object remains, therefore we need to copy fields to see them all - this.copy_fields = ((args.branch.fLeaves?.arr.length > 1) || args.branch.fBranches?.arr.length) && !args.leaf; + this.copy_fields = args.copy_fields ?? (((args.branch.fLeaves?.arr.length > 1) || args.branch.fBranches?.arr.length) && !args.leaf); this.addBranch(branch, 'br0', args.direct_branch); // add branch @@ -127100,10 +129423,11 @@ async function treeProcess(tree, selector, args) { case 'TLeafF': datakind = kFloat; break; case 'TLeafD': datakind = kDouble; break; case 'TLeafO': datakind = kBool; break; - case 'TLeafB': datakind = leaf.fIsUnsigned ? kUChar : kChar; break; + case 'TLeafB': datakind = leaf.fIsUnsigned ? kUChar : kChar$1; break; case 'TLeafS': datakind = leaf.fIsUnsigned ? kUShort : kShort; break; case 'TLeafI': datakind = leaf.fIsUnsigned ? kUInt : kInt; break; case 'TLeafL': datakind = leaf.fIsUnsigned ? kULong64 : kLong64; break; + case 'TLeafG': datakind = leaf.fIsUnsigned ? kULong : kLong; break; case 'TLeafC': datakind = kTString; break; default: return null; } @@ -127752,7 +130076,7 @@ async function treeProcess(tree, selector, args) { for (let k = 0; k < handle.arr.length; ++k) { const elem = handle.arr[k]; - if ((elem.type <= 0) || (elem.type >= kOffsetL) || (elem.type === kCharStar)) + if ((elem.type <= 0) || (elem.type >= kOffsetL) || (elem.type === kCharStar) || (elem.type === kFloat16) || (elem.type === kDouble32)) handle.process_arrays = false; } @@ -127770,7 +130094,7 @@ async function treeProcess(tree, selector, args) { elem.fArrayDim = 1; elem.fMaxIndex[0] = 10; // 10 if artificial number, will be replaced during reading - item.arrmember = createMemberStreamer(elem, handle.file); + item.arrmember = createMemberStreamer(elem, handle.file, true); } } } else @@ -163189,22 +165513,22 @@ const drawFuncs = { lst: [ { name: 'kind:Command', icon: 'img_execute', execute: true }, { name: 'TFolder', icon: 'img_folder', icon2: 'img_folderopen', noinspect: true, get_expand: () => import_h().then(h => h.folderHierarchy) }, { name: 'TTask', icon: 'img_task', get_expand: () => import_h().then(h => h.taskHierarchy), for_derived: true }, - { name: clTTree, icon: 'img_tree', get_expand: () => Promise.resolve().then(function () { return tree; }).then(h => h.treeHierarchy), draw: () => import_tree().then(h => h.drawTree), dflt: 'expand', opt: 'player;testio', shift: kInspect, pm: true }, + { name: clTTree, icon: 'img_tree', get_expand: () => Promise.resolve().then(function () { return tree; }).then(h => h.treeHierarchy), draw: () => import_tree().then(h => h.drawTree), dflt: 'expand', opt: 'player;testio', shift: kInspect, pm: true, transform: true }, { name: 'TNtuple', sameas: clTTree }, { name: 'TNtupleD', sameas: clTTree }, - { name: clTBranchFunc, icon: 'img_leaf_method', draw: () => import_tree().then(h => h.drawTree), opt: ';dump', noinspect: true }, - { name: /^TBranch/, icon: 'img_branch', draw: () => import_tree().then(h => h.drawTree), dflt: 'expand', opt: ';dump', ctrl: 'dump', shift: kInspect, ignore_online: true, always_draw: true }, - { name: /^TLeaf/, icon: 'img_leaf', noexpand: true, draw: () => import_tree().then(h => h.drawTree), opt: ';dump', ctrl: 'dump', ignore_online: true, always_draw: true }, - { name: 'ROOT::RNTuple', icon: 'img_tree', get_expand: () => Promise.resolve().then(function () { return rntuple; }).then(h => h.tupleHierarchy), draw: () => Promise.resolve().then(function () { return RNTuple; }).then(h => h.drawRNTuple), dflt: 'expand', pm: true }, - { name: 'ROOT::RNTupleField', icon: 'img_leaf', draw: () => Promise.resolve().then(function () { return RNTuple; }).then(h => h.drawRNTuple), opt: ';dump', ctrl: 'dump', shift: kInspect, ignore_online: true, always_draw: true }, + { name: clTBranchFunc, icon: 'img_leaf_method', draw: () => import_tree().then(h => h.drawTree), opt: ';dump;dumpall', noinspect: true, transform: true }, + { name: /^TBranch/, icon: 'img_branch', draw: () => import_tree().then(h => h.drawTree), dflt: 'expand', opt: ';dump;dumpall', ctrl: 'dump', shift: kInspect, ignore_online: true, always_draw: true, transform: true }, + { name: /^TLeaf/, icon: 'img_leaf', noexpand: true, draw: () => import_tree().then(h => h.drawTree), opt: ';dump;dumpall', ctrl: 'dump', ignore_online: true, always_draw: true, transform: true }, + { name: 'ROOT::RNTuple', icon: 'img_tree', get_expand: () => Promise.resolve().then(function () { return rntuple; }).then(h => h.tupleHierarchy), draw: () => Promise.resolve().then(function () { return RNTuple; }).then(h => h.drawRNTuple), dflt: 'expand', pm: true, transform: true }, + { name: 'ROOT::RNTupleField', icon: 'img_leaf', draw: () => Promise.resolve().then(function () { return RNTuple; }).then(h => h.drawRNTuple), opt: ';dump;dumpall', ctrl: 'dump', shift: kInspect, ignore_online: true, always_draw: true, transform: true }, { name: clTList, icon: 'img_list', draw: () => import_h().then(h => h.drawList), get_expand: () => import_h().then(h => h.listHierarchy), dflt: 'expand' }, { name: clTHashList, sameas: clTList }, { name: clTObjArray, sameas: clTList }, { name: clTClonesArray, sameas: clTList }, { name: clTMap, sameas: clTList }, { name: clTColor, icon: 'img_color' }, - { name: clTFile, icon: 'img_file', noinspect: true }, - { name: 'TMemFile', icon: 'img_file', noinspect: true }, + { name: clTFile, icon: 'img_file', noinspect: true, pm: true }, + { name: 'TMemFile', icon: 'img_file', noinspect: true, pm: true }, { name: clTStyle, icon: 'img_question', noexpand: true }, { name: 'Session', icon: 'img_globe' }, { name: 'kind:TopFolder', icon: 'img_base' }, @@ -163228,19 +165552,28 @@ const drawFuncs = { lst: [ /** @summary Register draw function for the class - * @desc List of supported draw options could be provided, separated with ';' * @param {object} args - arguments - * @param {string|regexp} args.name - class name or regexp pattern + * @param {string|regexp} args.name - class name or regexp pattern or '*' * @param {function} [args.func] - draw function + * @param {string} [args.sameas] - let behave same as specified class * @param {function} [args.draw] - async function to load draw function * @param {function} [args.class] - async function to load painter class with static draw function * @param {boolean} [args.direct] - if true, function is just Redraw() method of ObjectPainter * @param {string} [args.opt] - list of supported draw options (separated with semicolon) like 'col;scat;' * @param {string} [args.icon] - icon name shown for the class in hierarchy browser * @param {string} [args.draw_field] - draw only data member from object, like fHistogram + * @param {string} [args.noinspect] - disable inspect + * @param {string} [args.noexpand] - disable expand + * @param {string} [args.pm] - always show plus or minus sign even when no child items exists + * @desc List of supported draw options could be provided, separated with ';' + * If args.name parameter is '*', function will be invoked before object drawing. + * If such function does not return value - normal drawing will be continued. * @protected */ function addDrawFunc(args) { - drawFuncs.lst.push(args); + if (args?.name === '*') + internals._alt_draw = isFunc(args.func) ? args.func : null; + else + drawFuncs.lst.push(args); return args; } @@ -163451,6 +165784,12 @@ async function draw(dom, obj, opt) { if (handle.draw_field && obj[handle.draw_field]) return draw(dom, obj[handle.draw_field], opt || handle.draw_field_opt); + if (internals._alt_draw && !handle.transform) { + const v = internals._alt_draw(dom, obj, opt); + if (v) + return v; + } + if (!canDrawHandle(handle)) { if (opt && (opt.indexOf('same') >= 0)) { const main_painter = getElementMainPainter(dom); @@ -164388,7 +166727,11 @@ function objectHierarchy(top, obj, args = undefined) { } } } - } else if ((typeof fld === 'number') || (typeof fld === 'boolean') || (typeof fld === 'bigint')) { + } else if (typeof fld === 'bigint') { + simple = true; + item._value = fld.toString() + 'n'; + item._vclass = cssValueNum; + } else if ((typeof fld === 'number') || (typeof fld === 'boolean')) { simple = true; if (key === 'fBits') item._value = '0x' + fld.toString(16); @@ -164588,7 +166931,6 @@ function parseAsArray(val) { nbr--; break; } - // eslint-disable-next-line no-fallthrough case ',': if (nbr === 0) { let sub = val.substring(last, indx).trim(); @@ -165248,6 +167590,9 @@ class HierarchyPainter extends BasePainter { if (!element_title) element_title = element_name; + if (hitem._filter) + element_name += ' *'; + d3a.attr('title', element_title) .text(element_name + ('_value' in hitem ? ':' : '')) .style('background', hitem._background ? hitem._background : null); @@ -165267,6 +167612,8 @@ class HierarchyPainter extends BasePainter { for (let i = 0; i < hitem._childs.length; ++i) { const chld = hitem._childs[i]; chld._parent = hitem; + if (hitem._filter && chld._name && chld._name.indexOf(hitem._filter) < 0) + continue; if (!this.addItemHtml(chld, d3chlds, i)) break; // if too many items, skip rest } @@ -165930,6 +168277,15 @@ class HierarchyPainter extends BasePainter { if (hitem._childs === undefined) menu.add('Expand', () => this.expandItem(itemname), 'Expand content of object'); else { + if (sett.handle?.pm || (hitem._childs.length > 25)) { + menu.add('Filter...', () => menu.input('Enter items to select', hitem._filter, f => { + const changed = hitem._filter !== f; + hitem._filter = f; + if (changed) + this.updateTreeNode(hitem); + }), 'Filter out items based on input pattern'); + } + menu.add('Unexpand', () => { hitem._more = true; delete hitem._childs; @@ -166107,10 +168463,8 @@ class HierarchyPainter extends BasePainter { if (use_dflt_opt && !drawopt && handle?.dflt && (handle.dflt !== kExpand)) drawopt = handle.dflt; - if (dom) { - const func = updating ? redraw : draw; - return func(dom, obj, drawopt).then(p => complete(p)).catch(err => complete(null, err)); - } + if (dom) + return (updating ? redraw : draw)(dom, obj, drawopt).then(p => complete(p)).catch(err => complete(null, err)); let did_activate = false; const arr = []; @@ -166155,8 +168509,8 @@ class HierarchyPainter extends BasePainter { mdi.activateFrame(frame); return draw(frame, obj, drawopt) - .then(p => complete(p)) - .catch(err => complete(null, err)); + .then(p => complete(p)) + .catch(err => complete(null, err)); }); }); } @@ -167113,7 +169467,7 @@ class HierarchyPainter extends BasePainter { handleAfterRequest(findFunction(item._after_request)); // v6 support } else handleAfterRequest(draw_handle?.after_request); - }, undefined, true).then(xhr => { + }, undefined, true, settings.ServerTimeout).then(xhr => { itemreq = xhr; xhr.send(null); }); @@ -167429,7 +169783,7 @@ class HierarchyPainter extends BasePainter { } // check that we can found frame where drawing should be done - if (!document.getElementById(this.disp_frameid)) + if (!this.disp_frameid || !document.getElementById(this.disp_frameid)) return null; if (isBatchMode()) @@ -167629,8 +169983,8 @@ class HierarchyPainter extends BasePainter { if (!browser_configured && (browser.screenWidth <= 640)) browser_kind = 'float'; - this.no_select = getOption('noselect'); - this.top_info = getOption('info'); + this.no_select ??= getOption('noselect'); + this.top_info ??= getOption('info'); if (getOption('files_monitoring') !== null) this.files_monitoring = true; @@ -167890,7 +170244,7 @@ class HierarchyPainter extends BasePainter { ''; } else if (!this.no_select) { const myDiv = select('#' + this.gui_div), - files = myDiv.attr('files') || '../files/hsimple.root', + files = myDiv.attr('files') || 'https://root.cern/js/files/hsimple.root', path = decodeUrl().get('path') || myDiv.attr('path') || '', arrFiles = files.split(';'); @@ -167924,6 +170278,7 @@ class HierarchyPainter extends BasePainter { const title_elem = this.brlayout.setBrowserTitle(this.top_info || (this.is_online ? 'ROOT online server' : 'Read a ROOT file')); title_elem?.on('contextmenu', evnt => { evnt.preventDefault(); + evnt.stopPropagation(); createMenu(evnt).then(menu => { this.fillSettingsMenu(menu, true); menu.show(); @@ -168251,6 +170606,12 @@ function readStyleFromURL(url) { if (d.has('prefer_saved_points')) settings.PreferSavedPoints = true; + if (d.has('tmout')) + settings.ServerTimeout = parseFloat(d.get('tmout')); + + if (d.has('ftmout')) + settings.FilesTimeout = parseFloat(d.get('ftmout')); + const tf1_style = d.get('tf1'); if (tf1_style === 'curve') settings.FuncAsCurve = true; @@ -168442,7 +170803,7 @@ async function buildGUI(gui_element, gui_kind = '') { myDiv.html(''); // clear element - const d = decodeUrl(), getSize = name => { + const nb = (gui_kind === 'notebook'), d = decodeUrl(), getSize = name => { const res = d.has(name) ? d.get(name).split('x') : []; if (res.length !== 2) return null; @@ -168457,7 +170818,7 @@ async function buildGUI(gui_element, gui_kind = '') { else if ((gui_kind === 'nobrowser') || d.has('nobrowser') || (myDiv.attr('nobrowser') && myDiv.attr('nobrowser') !== 'false')) nobrowser = true; - if (myDiv.attr('ignoreurl') === 'true') + if (nb || (myDiv.attr('ignoreurl') === 'true')) settings.IgnoreUrlOptions = true; readStyleFromURL(); @@ -168488,6 +170849,10 @@ async function buildGUI(gui_element, gui_kind = '') { if (drawing || isBatchMode()) hpainter.exclude_browser = true; hpainter.start_without_browser = nobrowser; + if (nb) { + hpainter.no_select = true; + hpainter.top_info = 'ROOT notebook'; + } return hpainter.startGUI(myDiv).then(() => { if (!nobrowser) @@ -168893,6 +171258,12 @@ async function drawTreeDrawResult(dom, obj, opt) { if (!typ || !isStr(typ)) return Promise.reject(Error('Object without type cannot be draw with TTree')); + if (internals._alt_draw) { + const v = internals._alt_draw(dom, obj, opt); + if (v) + return v; + } + if (typ.indexOf(clTH1) === 0) return TH1Painter.draw(dom, obj, opt); if (typ.indexOf(clTH2) === 0) @@ -169345,6 +171716,11 @@ class TTextPainter extends ObjectPainter { pp = this.getPadPainter(), fp = this.getFramePainter(), is_url = text.fName.startsWith('http://') || text.fName.startsWith('https://'); + + // special handling of dummy frame painter + if (fp?.getDrawDom() === null) + return this; + let fact = 1, use_frame = false; this.createAttText({ attr: text }); @@ -169828,7 +172204,11 @@ let THStackPainter$2 = class THStackPainter extends ObjectPainter { o.auto += ' ' + f; }); - o.pads = d.check('PADS'); + if (d.check('PADS', true)) { + o.pads = true; + o.pads_columns = d.partAsInt(); + } + if (o.pads) o.nostack = true; @@ -170045,7 +172425,12 @@ let THStackPainter$2 = class THStackPainter extends ObjectPainter { if (o.pads) { pr = ensureTCanvas(this, false).then(() => { pad_painter = this.getPadPainter(); - return pad_painter.divide(o.nhist, 0, true); + let nx = o.nhist, ny = 0; + if (o.pads_columns) { + nx = o.pads_columns; + ny = Math.ceil(o.nhist / nx); + } + return pad_painter.divide(nx, ny, true); }); } else { if (!o.nostack) @@ -174052,6 +176437,7 @@ let TMultiGraphPainter$2 = class TMultiGraphPainter extends ObjectPainter { #auto; // extra options for auto colors #is3d; // if 3d drawing #pads; // pads draw option + #pads_columns; // number pads columns /** @summary Create painter * @param {object|string} dom - DOM element for drawing or element id @@ -174345,7 +176731,9 @@ let TMultiGraphPainter$2 = class TMultiGraphPainter extends ObjectPainter { this.#is3d = d.check('3D'); this.#auto = ''; - this.#pads = d.check('PADS'); + this.#pads = d.check('PADS', true); + if (this.#pads) + this.#pads_columns = d.partAsInt(); ['PFC', 'PLC', 'PMC'].forEach(f => { if (d.check(f)) this.#auto += ' ' + f; @@ -174365,7 +176753,12 @@ let TMultiGraphPainter$2 = class TMultiGraphPainter extends ObjectPainter { if (this.#pads) { promise = ensureTCanvas(this, false).then(() => { pad_painter = this.getPadPainter(); - return pad_painter.divide(mgraph.fGraphs.arr.length, 0, true); + let nx = mgraph.fGraphs.arr.length, ny = 0; + if (this.#pads_columns) { + ny = Math.ceil(nx / this.#pads_columns); + nx = this.#pads_columns; + } + return pad_painter.divide(nx, ny, true); }); } else if (d.check('A') || !this.getMainPainter()) { const histo = this.scanGraphsRange(mgraph.fGraphs, mgraph.fHistogram, this.getPadPainter()?.getRootPad(true)); @@ -176511,7 +178904,7 @@ class TGaxisPainter extends TAxisPainter { const res = function(v) { return res.toGraph(v); }; res._func = func; res._domain = [smin, smax]; - res._scale = logbase ? log().base(logbase) : linear(); + res._scale = logbase ? log$1().base(logbase) : linear(); res._scale.domain(res._domain).range([0, 100]); res.eval = function(v) { try { @@ -177184,7 +179577,40 @@ __proto__: null, TASImagePainter: TASImagePainter }); -const LITTLE_ENDIAN = true; +// ENTupleColumnType - supported column types + +const kBit = 0x00, + kByte = 0x01, + kChar = 0x02, + kInt8 = 0x03, + kUInt8 = 0x04, + kInt16 = 0x05, + kUInt16 = 0x06, + kInt32 = 0x07, + kUInt32 = 0x08, + kInt64 = 0x09, + kUInt64 = 0x0A, + kReal16 = 0x0B, + kReal32 = 0x0C, + kReal64 = 0x0D, + kIndex32 = 0x0E, + kIndex64 = 0x0F, + kSwitch = 0x10, + kSplitInt16 = 0x11, + kSplitUInt16 = 0x12, + kSplitInt32 = 0x13, + kSplitUInt32 = 0x14, + kSplitInt64 = 0x15, + kSplitUInt64 = 0x16, + kSplitReal16 = 0x17, + kSplitReal32 = 0x18, + kSplitReal64 = 0x19, + kSplitIndex32 = 0x1A, + kSplitIndex64 = 0x1B, + kReal32Trunc = 0x1C, + kReal32Quant = 0x1D, + LITTLE_ENDIAN = true; + class RBufferReader { constructor(buffer) { @@ -177296,192 +179722,99 @@ class RBufferReader { } -const ENTupleColumnType = { - kBit: 0x00, - kByte: 0x01, - kChar: 0x02, - kInt8: 0x03, - kUInt8: 0x04, - kInt16: 0x05, - kUInt16: 0x06, - kInt32: 0x07, - kUInt32: 0x08, - kInt64: 0x09, - kUInt64: 0x0A, - kReal16: 0x0B, - kReal32: 0x0C, - kReal64: 0x0D, - kIndex32: 0x0E, - kIndex64: 0x0F, - kSplitInt16: 0x11, - kSplitUInt16: 0x12, - kSplitInt32: 0x13, - kSplitUInt32: 0x14, - kSplitInt64: 0x15, - kSplitUInt64: 0x16, - kSplitReal16: 0x17, - kSplitReal32: 0x18, - kSplitReal64: 0x19, - kSplitIndex32: 0x1A, - kSplitIndex64: 0x1B}; - - -/** - * @summary Rearrange bytes from split format to normal format (row-wise) for decoding - */ -function recontructUnsplitBuffer(blob, columnDescriptor) { - const { coltype } = columnDescriptor; - - if ( - coltype === ENTupleColumnType.kSplitUInt16 || - coltype === ENTupleColumnType.kSplitUInt32 || - coltype === ENTupleColumnType.kSplitUInt64 || - coltype === ENTupleColumnType.kSplitReal16 || - coltype === ENTupleColumnType.kSplitReal32 || - coltype === ENTupleColumnType.kSplitReal64 || - coltype === ENTupleColumnType.kSplitIndex32 || - coltype === ENTupleColumnType.kSplitIndex64 || - coltype === ENTupleColumnType.kSplitInt16 || - coltype === ENTupleColumnType.kSplitInt32 || - coltype === ENTupleColumnType.kSplitInt64 - ) { - // Determine byte size based on column type - let byteSize; - switch (coltype) { - case ENTupleColumnType.kSplitReal64: - case ENTupleColumnType.kSplitInt64: - case ENTupleColumnType.kSplitUInt64: - case ENTupleColumnType.kSplitIndex64: - byteSize = 8; - break; - case ENTupleColumnType.kSplitReal32: - case ENTupleColumnType.kSplitInt32: - case ENTupleColumnType.kSplitIndex32: - case ENTupleColumnType.kSplitUInt32: - byteSize = 4; - break; - case ENTupleColumnType.kSplitInt16: - case ENTupleColumnType.kSplitUInt16: - case ENTupleColumnType.kSplitReal16: - byteSize = 2; - break; - default: - throw new Error(`Unsupported split coltype: ${coltype} (0x${coltype.toString(16).padStart(2, '0')})`); - } - const splitView = new DataView(blob.buffer, blob.byteOffset, blob.byteLength), - count = blob.byteLength / byteSize, - outBuffer = new ArrayBuffer(blob.byteLength), - outBytes = new Uint8Array(outBuffer); +/** @summary Rearrange bytes from split format to normal format (row-wise) for decoding + * @private */ +function recontructUnsplitBuffer(view, coltype) { + // Determine byte size based on column type + let byteSize; + switch (coltype) { + case kSplitReal64: + case kSplitInt64: + case kSplitUInt64: + case kSplitIndex64: + byteSize = 8; + break; + case kSplitReal32: + case kSplitInt32: + case kSplitIndex32: + case kSplitUInt32: + byteSize = 4; + break; + case kSplitInt16: + case kSplitUInt16: + case kSplitReal16: + byteSize = 2; + break; + default: + return view; + } - for (let i = 0; i < count; ++i) { - for (let b = 0; b < byteSize; ++b) { - const splitIndex = b * count + i, - byte = splitView.getUint8(splitIndex), - writeIndex = i * byteSize + b; - outBytes[writeIndex] = byte; - } - } + const count = view.byteLength / byteSize, + outBuffer = new ArrayBuffer(view.byteLength), + outView = new DataView(outBuffer); - // Return updated blob and remapped coltype - const newBlob = outBuffer; - let newColtype; - switch (coltype) { - case ENTupleColumnType.kSplitUInt16: - newColtype = ENTupleColumnType.kUInt16; - break; - case ENTupleColumnType.kSplitUInt32: - newColtype = ENTupleColumnType.kUInt32; - break; - case ENTupleColumnType.kSplitUInt64: - newColtype = ENTupleColumnType.kUInt64; - break; - case ENTupleColumnType.kSplitIndex32: - newColtype = ENTupleColumnType.kIndex32; - break; - case ENTupleColumnType.kSplitIndex64: - newColtype = ENTupleColumnType.kIndex64; - break; - case ENTupleColumnType.kSplitReal16: - newColtype = ENTupleColumnType.kReal16; - break; - case ENTupleColumnType.kSplitReal32: - newColtype = ENTupleColumnType.kReal32; - break; - case ENTupleColumnType.kSplitReal64: - newColtype = ENTupleColumnType.kReal64; - break; - case ENTupleColumnType.kSplitInt16: - newColtype = ENTupleColumnType.kInt16; - break; - case ENTupleColumnType.kSplitInt32: - newColtype = ENTupleColumnType.kInt32; - break; - case ENTupleColumnType.kSplitInt64: - newColtype = ENTupleColumnType.kInt64; - break; - default: - throw new Error(`Unsupported split coltype for reassembly: ${coltype}`); + for (let i = 0; i < count; ++i) { + for (let b = 0; b < byteSize; ++b) { + const splitIndex = b * count + i, + byte = view.getUint8(splitIndex), + writeIndex = i * byteSize + b; + outView.setUint8(writeIndex, byte); } - - return { blob: newBlob, coltype: newColtype }; } - // If no split type, return original blob and coltype - return { blob, coltype }; + return outView; } +/** @summary Decode a 32 bit intex buffer + * @private */ +function decodeIndex32(view) { + for (let o = 0, prev = 0; o < view.byteLength; o += 4) { + const v = prev + view.getInt32(o, LITTLE_ENDIAN); + view.setInt32(o, v, LITTLE_ENDIAN); + prev = v; + } +} -/** - * @summary Decode a reconstructed index buffer (32- or 64-bit deltas to absolute indices) - */ -function DecodeDeltaIndex(blob, coltype) { - let deltas, result; - - if (coltype === ENTupleColumnType.kIndex32) { - deltas = new Int32Array(blob.buffer || blob, blob.byteOffset || 0, blob.byteLength / 4); - result = new Int32Array(deltas.length); - } else if (coltype === ENTupleColumnType.kIndex64) { - deltas = new BigInt64Array(blob.buffer || blob, blob.byteOffset || 0, blob.byteLength / 8); - result = new BigInt64Array(deltas.length); - } else - throw new Error(`DecodeDeltaIndex: unsupported column type ${coltype}`); +/** @summary Decode a 64 bit intex buffer + * @private */ +function decodeIndex64(view, shift) { + for (let o = 0, prev = 0n; o < view.byteLength; o += (8 + shift)) { + const v = prev + view.getBigInt64(o, LITTLE_ENDIAN); + view.setBigInt64(o, v, LITTLE_ENDIAN); + prev = v; + } +} - if (deltas.length > 0) - result[0] = deltas[0]; - for (let i = 1; i < deltas.length; ++i) - result[i] = result[i - 1] + deltas[i]; - return { blob: result, coltype }; +/** @summary Decode a reconstructed 16bit signed integer buffer using ZigZag encoding + * @private */ +function decodeZigzag16(view) { + for (let o = 0; o < view.byteLength; o += 2) { + const x = view.getUint16(o, LITTLE_ENDIAN); + view.setInt16(o, (x >>> 1) ^ (-(x & 1)), LITTLE_ENDIAN); + } } -/** - * @summary Decode a reconstructed signed integer buffer using ZigZag encoding - */ -function decodeZigzag(blob, coltype) { - let zigzag, result; - - if (coltype === ENTupleColumnType.kInt16) { - zigzag = new Uint16Array(blob.buffer || blob, blob.byteOffset || 0, blob.byteLength / 2); - result = new Int16Array(zigzag.length); - } else if (coltype === ENTupleColumnType.kInt32) { - zigzag = new Uint32Array(blob.buffer || blob, blob.byteOffset || 0, blob.byteLength / 4); - result = new Int32Array(zigzag.length); - } else if (coltype === ENTupleColumnType.kInt64) { - zigzag = new BigUint64Array(blob.buffer || blob, blob.byteOffset || 0, blob.byteLength / 8); - result = new BigInt64Array(zigzag.length); - } else - throw new Error(`decodeZigzag: unsupported column type ${coltype}`); - - for (let i = 0; i < zigzag.length; ++i) { - // ZigZag decode: (x >>> 1) ^ (-(x & 1)) - const x = zigzag[i]; - result[i] = (x >>> 1) ^ (-(x & 1)); +/** @summary Decode a reconstructed 32bit signed integer buffer using ZigZag encoding + * @private */ +function decodeZigzag32(view) { + for (let o = 0; o < view.byteLength; o += 4) { + const x = view.getUint32(o, LITTLE_ENDIAN); + view.setInt32(o, (x >>> 1) ^ (-(x & 1)), LITTLE_ENDIAN); } +} - return { blob: result, coltype }; +/** @summary Decode a reconstructed 64bit signed integer buffer using ZigZag encoding + * @private */ +function decodeZigzag64(view) { + for (let o = 0; o < view.byteLength; o += 8) { + const x = view.getUint64(o, LITTLE_ENDIAN); + view.setInt64(o, (x >>> 1) ^ (-(x & 1)), LITTLE_ENDIAN); + } } + // Envelope Types // TODO: Define usage logic for envelope types in future // const kEnvelopeTypeHeader = 0x01, @@ -177504,13 +179837,9 @@ class RNTupleDescriptorBuilder { return; const reader = new RBufferReader(header_blob), - payloadStart = reader.offset, // Read the envelope metadata - { - envelopeLength - } = this._readEnvelopeMetadata(reader), - + { envelopeLength } = this._readEnvelopeMetadata(reader), // Seek to end of envelope to get checksum checksumPos = payloadStart + envelopeLength - 8, currentPos = reader.offset; @@ -177541,7 +179870,6 @@ class RNTupleDescriptorBuilder { // Read the envelope metadata this._readEnvelopeMetadata(reader); - // Feature flag(32 bits) this._readFeatureFlags(reader); // Header checksum (64-bit xxhash3) @@ -177550,7 +179878,6 @@ class RNTupleDescriptorBuilder { throw new Error('RNTuple corrupted: header checksum does not match footer checksum.'); const schemaExtensionSize = reader.readS64(); - if (schemaExtensionSize < 0) throw new Error('Schema extension frame is not a record frame, which is unexpected.'); @@ -177561,10 +179888,8 @@ class RNTupleDescriptorBuilder { this._readClusterGroups(reader); } - _readEnvelopeMetadata(reader) { const typeAndLength = reader.readU64(), - // Envelope metadata // The 16 bits are the envelope type ID, and the 48 bits are the envelope length envelopeType = Number(typeAndLength & 0xFFFFn), @@ -177590,7 +179915,6 @@ class RNTupleDescriptorBuilder { this.extraTypeInfo = (this.extraTypeInfo || []).concat(newExtra); } - _readFeatureFlags(reader) { this.featureFlags = []; while (true) { @@ -177610,14 +179934,12 @@ class RNTupleDescriptorBuilder { fieldListSize = reader.readS64(), // signed 64-bit fieldListIsList = fieldListSize < 0; - if (!fieldListIsList) throw new Error('Field list frame is not a list frame, which is required.'); const fieldListCount = reader.readU32(), // number of field entries - // List frame: list of field record frames + fieldDescriptors = []; // List frame: list of field record frames - fieldDescriptors = []; for (let i = 0; i < fieldListCount; ++i) { const recordStart = BigInt(reader.offset), fieldRecordSize = reader.readS64(), @@ -177626,7 +179948,6 @@ class RNTupleDescriptorBuilder { parentFieldId = reader.readU32(), structRole = reader.readU16(), flags = reader.readU16(), - fieldName = reader.readString(), typeName = reader.readString(), typeAlias = reader.readString(), @@ -177644,7 +179965,6 @@ class RNTupleDescriptorBuilder { if (flags & kFlagHasTypeChecksum) checksum = reader.readU32(); - fieldDescriptors.push({ fieldVersion, typeVersion, @@ -177693,7 +180013,6 @@ class RNTupleDescriptorBuilder { maxValue = reader.readF64(); } - const column = { coltype, bitsOnStorage, @@ -177718,6 +180037,7 @@ class RNTupleDescriptorBuilder { reader.seek(Number(startOffset - columnListSize)); return columnDescriptors; } + _readAliasColumn(reader) { const startOffset = BigInt(reader.offset), aliasColumnListSize = reader.readS64(), @@ -177740,6 +180060,7 @@ class RNTupleDescriptorBuilder { reader.seek(Number(startOffset - aliasColumnListSize)); return aliasColumns; } + _readExtraTypeInformation(reader) { const startOffset = BigInt(reader.offset), extraTypeInfoListSize = reader.readS64(), @@ -177749,7 +180070,6 @@ class RNTupleDescriptorBuilder { throw new Error('Extra type info frame is not a list frame, which is required.'); const entryCount = reader.readU32(), - extraTypeInfo = []; for (let i = 0; i < entryCount; ++i) { const recordStart = BigInt(reader.offset), @@ -177765,6 +180085,7 @@ class RNTupleDescriptorBuilder { reader.seek(Number(startOffset - extraTypeInfoListSize)); return extraTypeInfo; } + _readClusterGroups(reader) { const startOffset = BigInt(reader.offset), clusterGroupListSize = reader.readS64(), @@ -177772,9 +180093,8 @@ class RNTupleDescriptorBuilder { if (!isList) throw new Error('Cluster group frame is not a list frame'); - const groupCount = reader.readU32(), - - clusterGroups = []; + const groupCount = reader.readU32(); + this.clusterGroups = []; for (let i = 0; i < groupCount; ++i) { const recordStart = BigInt(reader.offset), @@ -177783,24 +180103,12 @@ class RNTupleDescriptorBuilder { entrySpan = reader.readU64(), numClusters = reader.readU32(), pageListLength = reader.readU64(), - - - // Locator method to get the page list locator offset - pageListLocator = this._readLocator(reader), - - - group = { - minEntry, - entrySpan, - numClusters, - pageListLocator, - pageListLength - }; - clusterGroups.push(group); + // Locator method to get the page list locator offset + pageListLocator = this._readLocator(reader); + this.clusterGroups.push({ minEntry, entrySpan, numClusters, pageListLocator, pageListLength }); reader.seek(Number(recordStart + clusterRecordSize)); } reader.seek(Number(startOffset - clusterGroupListSize)); - this.clusterGroups = clusterGroups; } _readLocator(reader) { @@ -177809,11 +180117,9 @@ class RNTupleDescriptorBuilder { throw new Error('Non-standard locators (T=1) not supported yet'); const size = sizeAndType, offset = reader.readU64(); // 8 bytes: offset - return { - size, - offset - }; + return { size, offset }; } + deserializePageList(page_list_blob) { if (!page_list_blob) throw new Error('deserializePageList: received an invalid or empty page list blob'); @@ -177830,39 +180136,34 @@ class RNTupleDescriptorBuilder { clusterSummaryListSize = reader.readS64(); if (clusterSummaryListSize >= 0) throw new Error('Expected a list frame for cluster summaries'); - const clusterSummaryCount = reader.readU32(), - - clusterSummaries = []; + const clusterSummaryCount = reader.readU32(); + this.clusterSummaries = []; for (let i = 0; i < clusterSummaryCount; ++i) { const recordStart = BigInt(reader.offset), clusterSummaryRecordSize = reader.readS64(), firstEntry = reader.readU64(), combined = reader.readU64(), - flags = combined >> 56n; + flags = combined >> 56n, + numEntries = Number(combined & 0x00FFFFFFFFFFFFFFn); if (flags & 0x01n) throw new Error('Cluster summary uses unsupported sharded flag (0x01)'); - const numEntries = Number(combined & 0x00FFFFFFFFFFFFFFn); - clusterSummaries.push({ - firstEntry, - numEntries, - flags - }); + this.clusterSummaries.push({ firstEntry, numEntries, flags }); reader.seek(Number(recordStart + clusterSummaryRecordSize)); } reader.seek(Number(listStartOffset - clusterSummaryListSize)); - this.clusterSummaries = clusterSummaries; this._readNestedFrames(reader); - /* const checksumPagelist = */ reader.readU64(); + reader.readU64(); // checksumPagelist } _readNestedFrames(reader) { - const clusterPageLocations = [], - numListClusters = reader.readS64(); + const numListClusters = reader.readS64(), + numRecordCluster = reader.readU32(); if (numListClusters >= 0) throw new Error('Expected list frame for clusters'); - const numRecordCluster = reader.readU32(); + + this.pageLocations = []; for (let i = 0; i < numRecordCluster; ++i) { const outerListSize = reader.readS64(); @@ -177884,7 +180185,6 @@ class RNTupleDescriptorBuilder { const numElementsWithBit = reader.readS32(), hasChecksum = numElementsWithBit < 0, numElements = BigInt(Math.abs(Number(numElementsWithBit))), - locator = this._readLocator(reader); pages.push({ numElements, @@ -177894,11 +180194,8 @@ class RNTupleDescriptorBuilder { } const elementOffset = reader.readS64(), - isSuppressed = elementOffset < 0; - - let compression = null; - if (!isSuppressed) - compression = reader.readU32(); + isSuppressed = elementOffset < 0, + compression = isSuppressed ? null : reader.readU32(); columns.push({ pages, @@ -177908,113 +180205,43 @@ class RNTupleDescriptorBuilder { }); } - clusterPageLocations.push(columns); + this.pageLocations.push(columns); } - - this.pageLocations = clusterPageLocations; } - // Example Of Deserializing Page Content - deserializePage(blob, columnDescriptor, pageInfo) { - const originalColtype = columnDescriptor.coltype, - { - coltype - } = recontructUnsplitBuffer(blob, columnDescriptor); - let { - blob: processedBlob - } = recontructUnsplitBuffer(blob, columnDescriptor); - + /** @summary Search field by name + * @private */ + findField(name) { + for (let n = 0; n < this.fieldDescriptors.length; ++n) { + const field = this.fieldDescriptors[n]; + if (field.fieldName === name) + return field; + } + } - // Handle split index types - if (originalColtype === ENTupleColumnType.kSplitIndex32 || originalColtype === ENTupleColumnType.kSplitIndex64) { - const { - blob: decodedArray - } = DecodeDeltaIndex(processedBlob, coltype); - processedBlob = decodedArray; - } - - // Handle Split Signed Int types - if (originalColtype === ENTupleColumnType.kSplitInt16 || originalColtype === ENTupleColumnType.kSplitInt32 || originalColtype === ENTupleColumnType.kSplitInt64) { - const { - blob: decodedArray - } = decodeZigzag(processedBlob, coltype); - processedBlob = decodedArray; - } - - const reader = new RBufferReader(processedBlob), - values = [], - - // Use numElements from pageInfo parameter - numValues = Number(pageInfo.numElements), - // Helper for all simple types - extractValues = (readFunc) => { - for (let i = 0; i < numValues; ++i) - values.push(readFunc()); - }; - switch (coltype) { - case ENTupleColumnType.kBit: { - let bitCount = 0; - const totalBitsInBuffer = processedBlob.byteLength * 8; - if (totalBitsInBuffer < numValues) - throw new Error(`kBit: Not enough bits in buffer (${totalBitsInBuffer}) for numValues (${numValues})`); - - for (let byteIndex = 0; byteIndex < processedBlob.byteLength; ++byteIndex) { - const byte = reader.readU8(); - - // Extract 8 bits from this byte - for (let bitPos = 0; bitPos < 8 && bitCount < numValues; ++bitPos, ++bitCount) { - const bitValue = (byte >>> bitPos) & 1, - boolValue = bitValue === 1; - values.push(boolValue); - } - } - break; - } + /** @summary Return all childs of specified field + * @private */ + findChildFields(field) { + const indx = this.fieldDescriptors.indexOf(field), res = []; + for (let n = 0; n < this.fieldDescriptors.length; ++n) { + const fld = this.fieldDescriptors[n]; + if ((fld !== field) && (fld.parentFieldId === indx)) + res.push(fld); + } + return res; + } - case ENTupleColumnType.kReal64: - extractValues(reader.readF64.bind(reader)); - break; - case ENTupleColumnType.kReal32: - extractValues(reader.readF32.bind(reader)); - break; - case ENTupleColumnType.kInt64: - extractValues(reader.readS64.bind(reader)); - break; - case ENTupleColumnType.kUInt64: - extractValues(reader.readU64.bind(reader)); - break; - case ENTupleColumnType.kInt32: - extractValues(reader.readS32.bind(reader)); - break; - case ENTupleColumnType.kUInt32: - extractValues(reader.readU32.bind(reader)); - break; - case ENTupleColumnType.kInt16: - extractValues(reader.readS16.bind(reader)); - break; - case ENTupleColumnType.kUInt16: - extractValues(reader.readU16.bind(reader)); - break; - case ENTupleColumnType.kInt8: - extractValues(reader.readS8.bind(reader)); - break; - case ENTupleColumnType.kUInt8: - case ENTupleColumnType.kByte: - extractValues(reader.readU8.bind(reader)); - break; - case ENTupleColumnType.kChar: - extractValues(() => String.fromCharCode(reader.readS8())); - break; - case ENTupleColumnType.kIndex32: - extractValues(reader.readS32.bind(reader)); - break; - case ENTupleColumnType.kIndex64: - extractValues(reader.readS64.bind(reader)); - break; - default: - throw new Error(`Unsupported column type: ${columnDescriptor.coltype}`); + /** @summary Return array of columns for specified field + * @private */ + findColumns(field) { + const res = []; + if (!field) + return res; + for (const colDesc of this.columnDescriptors) { + if (this.fieldDescriptors[colDesc.fieldId] === field) + res.push(colDesc); } - return values; + return res; } } // class RNTupleDescriptorBuilder @@ -178025,15 +180252,15 @@ class RNTupleDescriptorBuilder { async function readHeaderFooter(tuple) { // if already read - return immediately, make possible to call several times if (tuple?.builder) - return true; + return tuple.builder; - if (!tuple.$file) - return false; + if (!tuple?.$file) + return null; // request header and footer buffers from the file return tuple.$file.readBuffer([tuple.fSeekHeader, tuple.fNBytesHeader, tuple.fSeekFooter, tuple.fNBytesFooter]).then(blobs => { if (blobs?.length !== 2) - return false; + throw new Error('Failure reading header or footer blobs'); // Handle both compressed and uncompressed cases const processBlob = (blob, uncompressedSize) => { @@ -178046,254 +180273,895 @@ async function readHeaderFooter(tuple) { return Promise.all([ processBlob(blobs[0], tuple.fLenHeader), processBlob(blobs[1], tuple.fLenFooter) - ]).then(unzip_blobs => { - const [header_blob, footer_blob] = unzip_blobs; - if (!header_blob || !footer_blob) - return false; - - tuple.builder = new RNTupleDescriptorBuilder; - tuple.builder.deserializeHeader(header_blob); - tuple.builder.deserializeFooter(footer_blob); - - // Build fieldToColumns mapping - tuple.fieldToColumns = {}; - for (const colDesc of tuple.builder.columnDescriptors) { - const fieldDesc = tuple.builder.fieldDescriptors[colDesc.fieldId], - fieldName = fieldDesc.fieldName; - if (!tuple.fieldToColumns[fieldName]) - tuple.fieldToColumns[fieldName] = []; - tuple.fieldToColumns[fieldName].push(colDesc); - } - - // Deserialize Page List - const group = tuple.builder.clusterGroups?.[0]; - if (!group || !group.pageListLocator) - throw new Error('No valid cluster group or page list locator found'); - - const offset = Number(group.pageListLocator.offset), - size = Number(group.pageListLocator.size), - uncompressedSize = Number(group.pageListLength); - - return tuple.$file.readBuffer([offset, size]).then(page_list_blob => { - if (!(page_list_blob instanceof DataView)) - throw new Error(`Expected DataView from readBuffer, got ${Object.prototype.toString.call(page_list_blob)}`); - - // Check if page list data is uncompressed - if (page_list_blob.byteLength === uncompressedSize) { - // Data is uncompressed, use directly - tuple.builder.deserializePageList(page_list_blob); - return true; - } - // Attempt to decompress the page list - return R__unzip(page_list_blob, uncompressedSize).then(unzipped_blob => { - if (!(unzipped_blob instanceof DataView)) - throw new Error(`Unzipped page list is not a DataView, got ${Object.prototype.toString.call(unzipped_blob)}`); - - tuple.builder.deserializePageList(unzipped_blob); - return true; - }); - }); - }); + ]); + }).then(unzip_blobs => { + const [header_blob, footer_blob] = unzip_blobs; + if (!header_blob || !footer_blob) + throw new Error('Failure when uncompress header and footer blobs'); + + tuple.builder = new RNTupleDescriptorBuilder; + tuple.builder.deserializeHeader(header_blob); + tuple.builder.deserializeFooter(footer_blob); + + // Deserialize Page List + const group = tuple.builder.clusterGroups?.[0]; + if (!group || !group.pageListLocator) + throw new Error('No valid cluster group or page list locator found'); + + const offset = Number(group.pageListLocator.offset), + size = Number(group.pageListLocator.size); + + return tuple.$file.readBuffer([offset, size]); + }).then(page_list_blob => { + if (!(page_list_blob instanceof DataView)) + throw new Error(`Expected DataView from readBuffer, got ${Object.prototype.toString.call(page_list_blob)}`); + + const group = tuple.builder.clusterGroups?.[0], + uncompressedSize = Number(group.pageListLength); + + // Check if page list data is uncompressed + if (page_list_blob.byteLength === uncompressedSize) + return page_list_blob; + + // Attempt to decompress the page list + return R__unzip(page_list_blob, uncompressedSize); + }).then(unzipped_blob => { + if (!(unzipped_blob instanceof DataView)) + throw new Error(`Unzipped page list is not a DataView, got ${Object.prototype.toString.call(unzipped_blob)}`); + + tuple.builder.deserializePageList(unzipped_blob); + return tuple.builder; }).catch(err => { console.error('Error during readHeaderFooter execution:', err); - throw err; + return null; }); } -function readEntry(rntuple, fieldName, entryIndex) { - const builder = rntuple.builder, - field = builder.fieldDescriptors.find(f => f.fieldName === fieldName), - fieldData = rntuple._clusterData[fieldName]; - if (!field) - throw new Error(`No descriptor for field ${fieldName}`); - if (!fieldData) - throw new Error(`No data for field ${fieldName}`); +/** @class Base class to read columns/fields from RNtuple + * @private */ + +class ReaderItem { + + constructor(column, name) { + this.column = null; + this.name = name; + this.id = -1; + this.coltype = 0; + this.sz = 0; + this.simple = true; + this.page = -1; // current page for the reading + + if (column?.coltype !== undefined) { + this.column = column; + this.id = column.index; + this.coltype = column.coltype; + + // special handling of split types + if ((this.coltype >= kSplitInt16) && (this.coltype <= kSplitIndex64)) { + this.coltype -= (kSplitInt16 - kInt16); + this.simple = false; + } + } else if (column?.length) + this.items = column; + } + + cleanup() { + this.views = null; + this.view = null; + this.view_len = 0; + } + + init_o() { + this.o = 0; + this.o2 = 0; // for bit count + if (this.column && this.views?.length) { + this.view = this.views.shift(); + this.view_len = this.view.byteLength; + } + } + + reset_extras() {} + + shift_o(sz) { + this.o += sz; + while ((this.o >= this.view_len) && this.view_len) { + this.o -= this.view_len; + if (this.views.length) { + this.view = this.views.shift(); + this.view_len = this.view.byteLength; + } else { + this.view = null; + this.view_len = 0; + } + } + } + + shift(entries) { + if (this.sz && this.simple) + this.shift_o(this.sz * entries); + else { + while (entries-- > 0) + this.func({}); + } + } + + /** @summary Simple column with fixed element size - no vectors, no strings */ + is_simple() { return this.sz && this.simple; } + + set_not_simple() { + this.simple = false; + this.items?.forEach(item => item.set_not_simple()); + } + + assignReadFunc() { + switch (this.coltype) { + case kBit: { + this.func = function(obj) { + if (this.o2 === 0) + this.byte = this.view.getUint8(this.o); + obj[this.name] = ((this.byte >>> this.o2++) & 1) === 1; + if (this.o2 === 8) { + this.o2 = 0; + this.shift_o(1); + } + }; + break; + } + case kReal64: + this.func = function(obj) { + obj[this.name] = this.view.getFloat64(this.o, LITTLE_ENDIAN); + this.shift_o(8); + }; + this.sz = 8; + break; + case kReal32: + this.func = function(obj) { + obj[this.name] = this.view.getFloat32(this.o, LITTLE_ENDIAN); + this.shift_o(4); + }; + this.sz = 4; + break; + case kReal16: + this.func = function(obj) { + const value = this.view.getUint16(this.o, LITTLE_ENDIAN); + this.shift_o(2); + // reimplementing of HalfToFloat + let fbits = (value & 0x8000) << 16, + abs = value & 0x7FFF; + if (abs) { + fbits |= 0x38000000 << (abs >= 0x7C00 ? 1 : 0); + for (; abs < 0x400; abs <<= 1, fbits -= 0x800000); + fbits += abs << 13; + } + this.buf.setUint32(0, fbits, true); + obj[this.name] = this.buf.getFloat32(0, true); + }; + this.sz = 2; + this.buf = new DataView(new ArrayBuffer(4), 0); + break; + case kReal32Trunc: + this.buf = new DataView(new ArrayBuffer(4), 0); + case kReal32Quant: + this.nbits = this.column.bitsOnStorage; + if (!this.buf) { + this.factor = (this.column.maxValue - this.column.minValue) / ((1 << this.nbits) - 1); + this.min = this.column.minValue; + } + + this.func = function(obj) { + let res = 0, len = this.nbits; + // extract nbits from the stream + while (len > 0) { + if (this.o2 === 0) { + this.byte = this.view.getUint8(this.o); + this.o2 = 8; // number of bits in the value + } + const pos = this.nbits - len; // extracted bits + if (len >= this.o2) { + res |= (this.byte & ((1 << this.o2) - 1)) << pos; // get all remaining bits + len -= this.o2; + this.o2 = 0; + this.shift_o(1); + } else { + res |= (this.byte & ((1 << len) - 1)) << pos; // get only len bits from the value + this.o2 -= len; + this.byte >>= len; + len = 0; + } + } + if (this.buf) { + this.buf.setUint32(0, res << (32 - this.nbits), true); + obj[this.name] = this.buf.getFloat32(0, true); + } else + obj[this.name] = res * this.factor + this.min; + }; + break; + case kInt64: + case kIndex64: + this.func = function(obj) { + // FIXME: let process BigInt in the TTree::Draw + obj[this.name] = Number(this.view.getBigInt64(this.o, LITTLE_ENDIAN)); + this.shift_o(8); + }; + this.sz = 8; + break; + case kUInt64: + this.func = function(obj) { + // FIXME: let process BigInt in the TTree::Draw + obj[this.name] = Number(this.view.getBigUint64(this.o, LITTLE_ENDIAN)); + this.shift_o(8); + }; + this.sz = 8; + break; + case kSwitch: + this.func = function(obj) { + // index not used in std::variant, may be in some other usecases + // obj[this.name] = Number(this.view.getBigInt64(this.o, LITTLE_ENDIAN)); + this.shift_o(8); // skip value, not used yet + obj[this.name] = this.view.getInt32(this.o, LITTLE_ENDIAN); + this.shift_o(4); + }; + this.sz = 12; + break; + case kInt32: + case kIndex32: + this.func = function(obj) { + obj[this.name] = this.view.getInt32(this.o, LITTLE_ENDIAN); + this.shift_o(4); + }; + this.sz = 4; + break; + case kUInt32: + this.func = function(obj) { + obj[this.name] = this.view.getUint32(this.o, LITTLE_ENDIAN); + this.shift_o(4); + }; + this.sz = 4; + break; + case kInt16: + this.func = function(obj) { + obj[this.name] = this.view.getInt16(this.o, LITTLE_ENDIAN); + this.shift_o(2); + }; + this.sz = 2; + break; + case kUInt16: + this.func = function(obj) { + obj[this.name] = this.view.getUint16(this.o, LITTLE_ENDIAN); + this.shift_o(2); + }; + this.sz = 2; + break; + case kInt8: + this.func = function(obj) { + obj[this.name] = this.view.getInt8(this.o); + this.shift_o(1); + }; + this.sz = 1; + break; + case kUInt8: + case kByte: + this.func = function(obj) { + obj[this.name] = this.view.getUint8(this.o); + this.shift_o(1); + }; + this.sz = 1; + break; + case kChar: + this.func = function(obj) { + obj[this.name] = String.fromCharCode(this.view.getInt8(this.o)); + this.shift_o(1); + }; + this.sz = 1; + break; + default: + throw new Error(`Unsupported column type: ${this.coltype}`); + } + } + + readStr(len) { + let s = ''; + while (len-- > 0) { + s += String.fromCharCode(this.view.getInt8(this.o)); + this.shift_o(1); + } + return s; + } + + collectPages(cluster_locations, dataToRead, itemsToRead, pagesToRead, emin, emax, elist) { + // no pages without real column id + if (!this.column || (this.id < 0)) + return; + + const pages = cluster_locations[this.id].pages; + + this.views = new Array(pages.length); + + let e0 = 0; + for (let p = 0; p < pages.length; ++p) { + const page = pages[p], + e1 = e0 + Number(page.numElements), + margin = this._is_offset_item ? 1 : 0, // offset for previous entry has to be read as well + is_inside = (e, beg, end) => (e >= beg) && (e < end + margin); + let is_entries_inside = false; + if (elist?.length) + elist.forEach(e => { is_entries_inside ||= is_inside(e, e0, e1); }); + else + is_entries_inside = is_inside(e0, emin, emax) || is_inside(e1, emin, emax) || is_inside(emin, e0, e1) || is_inside(emax, e0, e1); + + if (!this.is_simple() || is_entries_inside) { + itemsToRead.push(this); + dataToRead.push(Number(page.locator.offset), page.locator.size); + pagesToRead.push(p); + this.views[p] = null; // placeholder, filled after request + } else + this.views[p] = { byteLength: this.sz * Number(page.numElements) }; // dummy entry only to allow proper navigation + + e0 = e1; + } + } + + async unzipBlob(blob, cluster_locations, page_indx) { + const colEntry = cluster_locations[this.id], // Access column entry + numElements = Number(colEntry.pages[page_indx].numElements), + elementSize = this.column.bitsOnStorage / 8, + expectedSize = Math.ceil(numElements * elementSize); + + // Check if data is compressed + if ((colEntry.compression === 0) || (blob.byteLength === expectedSize)) + return blob; // Uncompressed: use blob directly + + // Try decompression + return R__unzip(blob, expectedSize).then(result => { + return result || blob; // Fallback to original blob ?? + }).catch(err => { + throw new Error(`Failed to unzip page ${page_indx} for column ${this.id}: ${err.message}`); + }); + } + + reconstructBlob(rawblob, page_indx) { + if (!(rawblob instanceof DataView)) + throw new Error(`Invalid blob type for column ${this.id}: ${Object.prototype.toString.call(rawblob)}`); + + const originalColtype = this.column.coltype, + view = recontructUnsplitBuffer(rawblob, originalColtype); + + // Handle split index types + switch (originalColtype) { + case kSplitIndex32: decodeIndex32(view); break; + case kSplitIndex64: decodeIndex64(view, 0); break; + case kSwitch: decodeIndex64(view, 4); break; + case kSplitInt16: decodeZigzag16(view); break; + case kSplitInt32: decodeZigzag32(view); break; + case kSplitInt64: decodeZigzag64(view); break; + } + + this.views[page_indx] = view; + } + +} + + +/** @class reading std::string field + * @private */ + +class StringReaderItem extends ReaderItem { + + constructor(items, name) { + super(items, name); + items[0]._is_offset_item = true; + items[1].set_not_simple(); + this.off0 = 0; + } + + reset_extras() { + this.off0 = 0; + } + + func(tgtobj) { + const tmp = {}; + this.items[0].func(tmp); + const off = Number(tmp.len); + tgtobj[this.name] = this.items[1].readStr(off - this.off0); + this.off0 = off; + } - // Detect and decode string fields - if (Array.isArray(fieldData) && fieldData.length === 2) { - const [offsets, payload] = fieldData, - start = entryIndex === 0 ? 0 : Number(offsets[entryIndex - 1]), - end = Number(offsets[entryIndex]), - decoded = payload.slice(start, end).join(''); // Convert to string - return decoded; + shift(entries) { + this.items[0].shift(entries - 1); + const tmp = {}; + this.items[0].func(tmp); + const off = Number(tmp.len); + this.items[1].shift_o(off - this.off0); + this.off0 = off; } - // Fallback: primitive type (e.g. int, float) - return fieldData[0][entryIndex]; } -/** @summary Return field name for specified branch index - * @desc API let use field name in selector or field object itself */ -function getSelectorFieldName(selector, i) { - const br = selector.getBranch(i); - return isStr(br) ? br : br?.fieldName; +/** @class reading Streamed field + * @private */ + +class StreamedReaderItem extends ReaderItem { + + constructor(items, name, file, classname) { + super(items, name); + items[0]._is_offset_item = true; + items[1].set_not_simple(); + this.file = file; + this.classname = classname; + this.off0 = 0; + } + + reset_extras() { + this.off0 = 0; + } + + func(tgtobj) { + const tmp = {}, res = {}; + this.items[0].func(tmp); + const off = Number(tmp.len), + buf = new TBuffer(this.items[1].view, this.items[1].o, this.file, this.items[1].o + off - this.off0); + + // TODO: if by chance object splited between two pages + if (this.items[1].view.byteLength < this.items[1].o + off - this.off0) + console.error('FAILURE - buffer is splitted, need to be read from next page'); + + buf.classStreamer(res, this.classname); + + this.items[1].shift_o(off - this.off0); + this.off0 = off; + tgtobj[this.name] = res; + } + + shift(entries) { + this.items[0].shift(entries - 1); + const tmp = {}; + this.items[0].func(tmp); + const off = Number(tmp.len); + this.items[1].shift_o(off - this.off0); + this.off0 = off; + } + } -// Read and process the next data cluster from the RNTuple -function readNextCluster(rntuple, selector) { - const builder = rntuple.builder; - // Add validation - if (!builder.clusterSummaries || builder.clusterSummaries.length === 0) - throw new Error('No cluster summaries available - possibly incomplete file reading'); +/** @class reading of std::array + * @private */ - const clusterIndex = selector.currentCluster, - clusterSummary = builder.clusterSummaries[clusterIndex], - // Gather all pages for this cluster from selected fields only - pages = [], - // Collect only selected field names from selector - selectedFields = []; +class ArrayReaderItem extends ReaderItem { - for (let i = 0; i < selector.numBranches(); ++i) - selectedFields.push(getSelectorFieldName(selector, i)); + constructor(items, tgtname, arrsize) { + super(items, tgtname); + this.arrsize = arrsize; + items[0].set_not_simple(); + } - // For each selected field, collect its columns' pages - for (const fieldName of selectedFields) { - const columns = rntuple.fieldToColumns[fieldName]; - if (!columns) - throw new Error(`Selected field '${fieldName}' not found in RNTuple`); + func(tgtobj) { + const arr = [], tmp = {}; + let len = this.arrsize; + while (len-- > 0) { + this.items[0].func(tmp); + arr.push(tmp.value); + } + tgtobj[this.name] = arr; + } - for (const colDesc of columns) { - const colEntry = builder.pageLocations[clusterIndex]?.[colDesc.index]; + shift(entries) { + this.items[0].shift(entries * this.arrsize); + } + +} + + +/** @class reading of std::bitset + * @desc large numbers with more than 48 bits converted to BigInt + * @private */ - // When the data is missing or broken - if (!colEntry || !colEntry.pages) - throw new Error(`No pages for column ${colDesc.index} in cluster ${clusterIndex}`); +class BitsetReaderItem extends ReaderItem { - for (const page of colEntry.pages) - pages.push({ page, colDesc, fieldName }); + constructor(items, tgtname, size) { + super(items, tgtname); + this.size = size; + items[0].set_not_simple(); + this.bigint = size > 48; + } + + func(tgtobj) { + const tmp = {}; + let len = 0, res = this.bigint ? 0n : 0; + while (len < this.size) { + this.items[0].func(tmp); + if (tmp.bit) { + if (this.bigint) + res |= (1n << BigInt(len)); + else + res |= 1 << len; + } + len++; } + tgtobj[this.name] = res; } - selector.currentCluster++; + shift(entries) { + this.items[0].shift(entries * this.size); + } - // Early exit if no pages to read (i.e., no selected fields matched) - if (pages.length === 0) { - selector.Terminate(false); - return Promise.resolve(); - } - - // Build flat array of [offset, size, offset, size, ...] to read pages - const dataToRead = pages.flatMap(p => - [Number(p.page.locator.offset), Number(p.page.locator.size)] - ); - - return rntuple.$file.readBuffer(dataToRead).then(blobsRaw => { - const blobs = Array.isArray(blobsRaw) ? blobsRaw : [blobsRaw], - unzipPromises = blobs.map((blob, idx) => { - const { page, colDesc } = pages[idx], - colEntry = builder.pageLocations[clusterIndex][colDesc.index], // Access column entry - numElements = Number(page.numElements), - elementSize = colDesc.bitsOnStorage / 8; - - // Check if data is compressed - if (colEntry.compression === 0) - return Promise.resolve(blob); // Uncompressed: use blob directly - const expectedSize = numElements * elementSize; - - // Special handling for boolean fields - if (colDesc.coltype === ENTupleColumnType.kBit) { - const expectedBoolSize = Math.ceil(numElements / 8); - if (blob.byteLength === expectedBoolSize) - return Promise.resolve(blob); - // Try decompression but catch errors for boolean fields - return R__unzip(blob, expectedBoolSize).catch(err => { - throw new Error(`Failed to unzip boolean page ${idx}: ${err.message}`); - }); - } +} - // If the blob is already the expected size, treat as uncompressed - if (blob.byteLength === expectedSize) - return Promise.resolve(blob); - - // Try decompression - return R__unzip(blob, expectedSize).then(result => { - if (!result) - return blob; // Fallback to original blob - return result; - }).catch(err => { - throw new Error(`Failed to unzip page ${idx}: ${err.message}`); - }); - }); - return Promise.all(unzipPromises).then(unzipBlobs => { - rntuple._clusterData = {}; // store deserialized data per field - - for (let i = 0; i < unzipBlobs.length; ++i) { - const blob = unzipBlobs[i]; - // Ensure blob is a DataView - if (!(blob instanceof DataView)) - throw new Error(`Invalid blob type for page ${i}: ${Object.prototype.toString.call(blob)}`); - const { - page, - colDesc - } = pages[i], - field = builder.fieldDescriptors[colDesc.fieldId], - values = builder.deserializePage(blob, colDesc, page); - - // Support multiple representations (e.g., string fields with offsets + payload) - if (!rntuple._clusterData[field.fieldName]) - rntuple._clusterData[field.fieldName] = []; - - // splitting string fields into offset and payload components - if (field.typeName === 'std::string') { - if ( - colDesc.coltype === ENTupleColumnType.kIndex64 || - colDesc.coltype === ENTupleColumnType.kIndex32 || - colDesc.coltype === ENTupleColumnType.kSplitIndex64 || - colDesc.coltype === ENTupleColumnType.kSplitIndex32 - ) // Index64/Index32 - rntuple._clusterData[field.fieldName][0] = values; // Offsets - else if (colDesc.coltype === ENTupleColumnType.kChar) - rntuple._clusterData[field.fieldName][1] = values; // Payload - else - throw new Error(`Unsupported column type for string field: ${colDesc.coltype}`); - } else - rntuple._clusterData[field.fieldName][0] = values; +/** @class reading std::vector and other kinds of collections + * @private */ + +class CollectionReaderItem extends ReaderItem { + + constructor(items, tgtname) { + super(items, tgtname); + this.off0 = 0; + items[0]._is_offset_item = true; + items[1].set_not_simple(); + } + + reset_extras() { + this.off0 = 0; + } + + func(tgtobj) { + const arr = [], tmp = {}; + this.items[0].func(tmp); + const off = Number(tmp.len); + let len = off - this.off0; + while (len-- > 0) { + this.items[1].func(tmp); + arr.push(tmp.val); + } + tgtobj[this.name] = arr; + this.off0 = off; + } + + shift(entries) { + const tmp = {}; + this.items[0].shift(entries - 1); + this.items[0].func(tmp); + const off = Number(tmp.len); + this.items[1].shift(off - this.off0); + this.off0 = off; + } + +} + +/** @class reading std::variant field + * @private */ + +class VariantReaderItem extends ReaderItem { + + constructor(items, tgtname) { + super(items, tgtname); + this.set_not_simple(); + } + + func(tgtobj) { + const tmp = {}; + this.items[0].func(tmp); + const id = tmp.switch; + if (id === 0) + tgtobj[this.name] = null; // set null + else if (Number.isInteger(id) && (id > 0) && (id < this.items.length)) + this.items[id].func(tgtobj); + } + +} + + +/** @class reading std::tuple<> field + * @private */ + +class TupleReaderItem extends ReaderItem { + + func(tgtobj) { + const tuple = {}; + this.items.forEach(item => item.func(tuple)); + tgtobj[this.name] = tuple; + } + + shift(entries) { + this.items.forEach(item => item.shift(entries)); + } + +} + +/** @class reading custom class field + * @private */ + +class CustomClassReaderItem extends ReaderItem { + + constructor(items, tgtname, classname) { + super(items, tgtname); + this.classname = classname; + this.set_not_simple(); + } + + func(tgtobj) { + const obj = { _typename: this.classname }; + this.items.forEach(item => item.func(obj)); + tgtobj[this.name] = obj; + } + + shift(entries) { + this.items.forEach(item => item.shift(entries)); + } + +} + + +/** @class reading std::pair field + * @private */ + +class PairReaderItem extends ReaderItem { + + func(tgtobj) { + const res = {}; + this.items[0].func(res); + this.items[1].func(res); + tgtobj[this.name] = res; + } + + shift(entries) { + this.items[0].shift(entries); + this.items[1].shift(entries); + } + +} + + +async function rntupleProcess(rntuple, selector, args = {}) { + const handle = { + file: rntuple.$file, // keep file reference + columns: [], // list of ReaderItem with real columns for reading + items: [], // list of ReaderItem producing output fields + current_cluster: 0, // current cluster to process + current_cluster_first_entry: 0, // first entry in current cluster + current_cluster_last_entry: 0, // last entry in current cluster + current_entry: 0, // current processed entry + process_arrays: false, // one can process all branches as arrays + firstentry: 0, // first entry in the rntuple + lastentry: 0 // last entry in the rntuple + }; + + function readNextPortion(builder, inc_cluster) { + let do_again = true, numClusterEntries, locations; + + while (do_again) { + if (inc_cluster) { + handle.current_cluster++; + handle.current_cluster_first_entry = handle.current_cluster_last_entry; } - // Ensure string fields have ending offset for proper reconstruction of the last entry - for (const fieldName of selectedFields) { - const field = builder.fieldDescriptors.find(f => f.fieldName === fieldName), - colData = rntuple._clusterData[fieldName]; - if (field.typeName === 'std::string') { - if (!Array.isArray(colData) || colData.length !== 2) - throw new Error(`String field '${fieldName}' must have 2 columns`); - if (colData[0].length !== builder.clusterSummaries[clusterIndex].numEntries) - throw new Error(`Malformed string field '${fieldName}': missing final offset`); - } + locations = builder.pageLocations[handle.current_cluster]; + if (!locations) { + selector.Terminate(true); + return selector; } - const numEntries = clusterSummary.numEntries; - for (let i = 0; i < numEntries; ++i) { - for (let b = 0; b < selector.numBranches(); ++b) { - const fieldName = getSelectorFieldName(selector, b), - tgtName = selector.nameOfBranch(b), - values = rntuple._clusterData[fieldName]; + numClusterEntries = builder.clusterSummaries[handle.current_cluster].numEntries; + + handle.current_cluster_last_entry = handle.current_cluster_first_entry + numClusterEntries; + + do_again = inc_cluster && handle.process_entries && + (handle.process_entries[handle.process_entries_indx] >= handle.current_cluster_last_entry); + } + + // calculate entries which can be extracted from the cluster + let emin, emax; + const dataToRead = [], itemsToRead = [], pagesToRead = [], elist = []; - if (!values) - throw new Error(`Missing values for selected field: ${fieldName}`); - selector.tgtobj[tgtName] = readEntry(rntuple, fieldName, i); + if (handle.process_entries) { + let i = handle.process_entries_indx; + while ((i < handle.process_entries.length) && (handle.process_entries[i] < handle.current_cluster_last_entry)) + elist.push(handle.process_entries[i++] - handle.current_cluster_first_entry); + emin = elist[0]; + emax = elist[elist.length - 1]; + } else { + emin = handle.current_entry - handle.current_cluster_first_entry; + emax = Math.min(numClusterEntries, handle.process_max - handle.current_cluster_first_entry); + } + + // loop over all columns and request required pages + handle.columns.forEach(item => item.collectPages(locations, dataToRead, itemsToRead, pagesToRead, emin, emax, elist)); + + return rntuple.$file.readBuffer(dataToRead).then(blobsRaw => { + const blobs = Array.isArray(blobsRaw) ? blobsRaw : [blobsRaw], + unzipPromises = blobs.map((blob, idx) => itemsToRead[idx].unzipBlob(blob, locations, pagesToRead[idx])); + return Promise.all(unzipPromises); + }).then(unzipBlobs => { + unzipBlobs.map((rawblob, idx) => itemsToRead[idx].reconstructBlob(rawblob, pagesToRead[idx])); + + // reset reading pointer after all buffers are there + handle.columns.forEach(item => item.init_o()); + handle.items.forEach(item => item.reset_extras()); + + let skip_entries = handle.current_entry - handle.current_cluster_first_entry; + + while (handle.current_entry < handle.current_cluster_last_entry) { + for (let i = 0; i < handle.items.length; ++i) { + if (skip_entries > 0) + handle.items[i].shift(skip_entries); + handle.items[i].func(selector.tgtobj); + } + skip_entries = 0; + + selector.Process(handle.current_entry); + + if (handle.process_entries) { + if (++handle.process_entries_indx >= handle.process_entries.length) { + selector.Terminate(true); + return selector; + } + const prev_entry = handle.current_entry; + handle.current_entry = handle.process_entries[handle.process_entries_indx]; + skip_entries = handle.current_entry - prev_entry - 1; + } else if (++handle.current_entry >= handle.process_max) { + selector.Terminate(true); + return selector; } - selector.Process(); } - selector.Terminate(true); + return readNextPortion(builder, true); }); - }); -} + } + + function addColumnReadout(column, tgtname) { + const item = new ReaderItem(column, tgtname); + item.assignReadFunc(); + handle.columns.push(item); + return item; + } + + function addFieldReading(builder, field, tgtname) { + const columns = builder.findColumns(field), + childs = builder.findChildFields(field); + if (!columns?.length) { + if ((childs.length === 2) && (field.typeName.indexOf('std::pair') === 0)) { + const item1 = addFieldReading(builder, childs[0], 'first'), + item2 = addFieldReading(builder, childs[1], 'second'); + return new PairReaderItem([item1, item2], tgtname); + } + + if ((childs.length === 1) && (field.typeName.indexOf('std::array') === 0)) { + const item1 = addFieldReading(builder, childs[0], 'value'); + return new ArrayReaderItem([item1], tgtname, Number(field.arraySize)); + } + + if ((childs.length === 1) && (field.typeName.indexOf('std::atomic') === 0)) + return addFieldReading(builder, childs[0], tgtname); + + + if ((childs.length > 0) && (field.typeName.indexOf('std::tuple') === 0)) { + const items = []; + for (let i = 0; i < childs.length; ++i) + items.push(addFieldReading(builder, childs[i], `_${i}`)); + return new TupleReaderItem(items, tgtname); + } -// TODO args can later be used to filter fields, limit entries, etc. -// Create reader and deserialize doubles from the buffer -function rntupleProcess(rntuple, selector, args) { - return readHeaderFooter(rntuple).then(() => { - selector.Begin(); - selector.currentCluster = 0; - return readNextCluster(rntuple, selector); + // this is custom class which is decomposed on several fields + if ((childs.length > 0) && field.checksum && field.typeName) { + const items = []; + for (let i = 0; i < childs.length; ++i) + items.push(addFieldReading(builder, childs[i], childs[i].fieldName)); + return new CustomClassReaderItem(items, tgtname, field.typeName); + } + + throw new Error(`No columns found for field '${field.fieldName}' in RNTuple`); + } + + if ((columns.length === 2) && (field.typeName === 'std::string')) { + const itemlen = addColumnReadout(columns[0], 'len'), + itemstr = addColumnReadout(columns[1], 'str'); + return new StringReaderItem([itemlen, itemstr], tgtname); + } + + if ((columns.length === 1) && (field.typeName.indexOf('std::bitset') === 0)) { + const itembit = addColumnReadout(columns[0], 'bit'); + return new BitsetReaderItem([itembit], tgtname, Number(field.arraySize)); + } + + if ((columns.length === 2) && field.checksum && field.typeName) { + if (!handle.file.getStreamer(field.typeName, { checksum: field.checksum })) + throw new Error(`No streamer for type '${field.typeName}' checksum ${field.checksum}`); + + const itemlen = addColumnReadout(columns[0], 'len'), + itemb = addColumnReadout(columns[1], 'b'); + return new StreamedReaderItem([itemlen, itemb], tgtname, handle.file, field.typeName); + } + + let is_stl = false; + ['vector', 'map', 'unordered_map', 'multimap', 'unordered_multimap', 'set', 'unordered_set', 'multiset', 'unordered_multiset'].forEach(name => { + if (field.typeName.indexOf('std::' + name) === 0) + is_stl = true; + }); + + if ((childs.length === 1) && is_stl) { + const itemlen = addColumnReadout(columns[0], 'len'), + itemval = addFieldReading(builder, childs[0], 'val'); + return new CollectionReaderItem([itemlen, itemval], tgtname); + } + + if ((childs.length > 0) && (field.typeName.indexOf('std::variant') === 0)) { + const items = [addColumnReadout(columns[0], 'switch')]; + for (let i = 0; i < childs.length; ++i) + items.push(addFieldReading(builder, childs[i], tgtname)); + return new VariantReaderItem(items, tgtname); + } + + return addColumnReadout(columns[0], tgtname); + } + + return readHeaderFooter(rntuple).then(builder => { + if (!builder) + throw new Error('Not able to read header for the RNtuple'); + + for (let i = 0; i < selector.numBranches(); ++i) { + const br = selector.getBranch(i), + name = isStr(br) ? br : br?.fieldName, + tgtname = selector.nameOfBranch(i); + if (!name) + throw new Error(`Not able to extract name for field ${i}`); + + const field = builder.findField(name); + if (!field) + throw new Error(`Field ${name} not found`); + + const item = addFieldReading(builder, field, tgtname); + handle.items.push(item); + } + + // calculate number of entries + builder.clusterSummaries.forEach(summary => { handle.lastentry += summary.numEntries; }); + + if (handle.firstentry >= handle.lastentry) + throw new Error('Not able to find entries in the RNtuple'); + + // select range of entries to process + handle.process_min = handle.firstentry; + handle.process_max = handle.lastentry; + + if (args.elist) { + args.firstentry = args.elist.at(0); + args.numentries = args.elist.at(-1) - args.elist.at(0) + 1; + handle.process_entries = args.elist; + handle.process_entries_indx = 0; + handle.process_arrays = false; // do not use arrays process for selected entries + } + + if (Number.isInteger(args.firstentry) && (args.firstentry > handle.firstentry) && (args.firstentry < handle.lastentry)) + handle.process_min = args.firstentry; + + if (Number.isInteger(args.numentries) && (args.numentries > 0)) + handle.process_max = Math.min(handle.process_max, handle.process_min + args.numentries); + + // first check from which cluster one should start + for (let indx = 0, emin = 0; indx < builder.clusterSummaries.length; ++indx) { + const summary = builder.clusterSummaries[indx], + emax = emin + summary.numEntries; + if ((handle.process_min >= emin) && (handle.process_min < emax)) { + handle.current_cluster = indx; + handle.current_cluster_first_entry = emin; + break; + } + emin = emax; + } + + if (handle.current_cluster < 0) + throw new Error(`Not able to find cluster for entry ${handle.process_min} in the RNtuple`); + + handle.current_entry = handle.process_min; + + selector.Begin(rntuple); + + return readNextPortion(builder); }).then(() => selector); } + class TDrawSelectorTuple extends TDrawSelector { /** @summary Return total number of entries @@ -178306,11 +181174,7 @@ class TDrawSelectorTuple extends TDrawSelector { /** @summary Search for field in tuple * @desc TODO: Can be more complex when name includes extra parts referencing member or collection size or more */ - findBranch(tuple, name) { - return tuple.builder?.fieldDescriptors.find(field => { - return field.fieldName === name; - }); - } + findBranch(tuple, name) { return tuple.builder?.findField(name); } /** @summary Returns true if field can be used as array */ isArrayBranch(/* tuple, br */) { return false; } @@ -178340,8 +181204,8 @@ async function rntupleDraw(rntuple, args) { args.SelectorClass = TDrawSelectorTuple; args.processFunction = rntupleProcess; - return readHeaderFooter(rntuple).then(res_header_footer => { - return res_header_footer ? treeDraw(rntuple, args) : null; + return readHeaderFooter(rntuple).then(builder => { + return builder ? treeDraw(rntuple, args) : null; }); } @@ -178352,12 +181216,10 @@ async function rntupleDraw(rntuple, args) { async function tupleHierarchy(tuple_node, tuple) { tuple_node._childs = []; // tuple_node._tuple = tuple; // set reference, will be used later by RNTuple::Draw - - return readHeaderFooter(tuple).then(res => { - if (!res) - return res; - - tuple.builder?.fieldDescriptors.forEach(field => { + return readHeaderFooter(tuple).then(builder => { + builder?.fieldDescriptors.forEach((field, indx) => { + if (field.parentFieldId !== indx) + return; const item = { _name: field.fieldName, _typename: 'ROOT::RNTupleField', // pseudo class name, used in draw.mjs @@ -178366,20 +181228,16 @@ async function tupleHierarchy(tuple_node, tuple) { $tuple: tuple, // reference on tuple, need for drawing $field: field }; - item._obj = item; - tuple_node._childs.push(item); }); - - return true; + return Boolean(builder); }); } var rntuple = /*#__PURE__*/Object.freeze({ __proto__: null, RBufferReader: RBufferReader, -readEntry: readEntry, readHeaderFooter: readHeaderFooter, rntupleDraw: rntupleDraw, rntupleProcess: rntupleProcess, @@ -178394,13 +181252,15 @@ async function drawRNTuple(dom, obj, opt) { const args = {}; let tuple; - if (obj?.$tuple) { + if (obj?.$tuple && obj.$field) { // case of fictional ROOT::RNTupleField tuple = obj.$tuple; args.expr = obj._name; - if (isStr(opt) && opt.indexOf('dump') === 0) + if (isStr(opt) && opt.indexOf('dump') === 0) { args.expr += '>>' + opt; - else if (opt) + args.branch = obj.$field; + args.copy_fields = false; // no need to copy fields, reading is simple + } else if (opt) args.expr += opt; } else { tuple = obj; @@ -178918,7 +181778,7 @@ class RAxisPainter extends RObjectPainter { this.logbase = Math.exp(1); else if (_log > 1.9) this.logbase = Math.round(_log); - this.func = log().base(this.logbase).domain([smin, smax]); + this.func = log$1().base(this.logbase).domain([smin, smax]); } else this.func = linear().domain([smin, smax]); @@ -184942,6 +187802,7 @@ exports.getAbsPosInCanvas = getAbsPosInCanvas; exports.getActivePad = getActivePad; exports.getBoxDecorations = getBoxDecorations; exports.getColor = getColor; +exports.getColorPalette = getColorPalette; exports.getDocument = getDocument; exports.getDomCanvasPainter = getDomCanvasPainter; exports.getElementCanvPainter = getElementCanvPainter; @@ -185001,6 +187862,7 @@ exports.registerMethods = registerMethods; exports.resize = resize; exports.selectActivePad = selectActivePad; exports.setBatchMode = setBatchMode; +exports.setColorPalette = setColorPalette; exports.setDefaultDrawOpt = setDefaultDrawOpt; exports.setHPainter = setHPainter; exports.setHistogramTitle = setHistogramTitle; diff --git a/js/changes.md b/js/changes.md index 7e7f905a1e9f3..bc3986a5a44cb 100644 --- a/js/changes.md +++ b/js/changes.md @@ -2,10 +2,45 @@ ## Changes in dev +1. Implement new data types in `RNtuple` + - reduced float types kFloat16, kReal32Trunc, kReal32Quant + - `std::vector` + - `std::map`, `std::unordered_map`, `std::multimap`, `std::unordered_multimap` with `std::pair` + - `std::set`, `std::unordered_set`, `std::multiset`, `std::unordered_multiset` + - `std::array` + - `std::variant` + - `std::tuple` + - `std::bitset` + - `std::atomic` + - simple custom classes + - streamed types +1. Resort order of ranges in http request, fixing several long-standing problems #374 1. Implement for `TPie` 3d, text, title drawing including interactivity -1. Remove support for deprectaed TH1K class -1. Fix - proper paint axis labels on both sides when pad.fTickx/y = 2 +1. Implement `TCanvas` support in `build3d` function #373 +1. Implements `TTree` branches filtering via context menu #364 +1. Let define alternative draw function #378 +1. Implement padsN draw option for `THStack` and `TMultiGraph` +1. Use `resvg-js` backend for PNG support in node.js #391, thanks to https://github.com/OmarMesqq +1. Remove support for deprectaed `TH1K` class +1. Introduce `settings.ServerTimeout` global timeout for THttpServer operations +1. Let set custom color palette with `setColorPalette` function +1. Upgrade three.js r180 -> r183 1. Fix - paint frame border mode/size from TCanvas +1. Fix - add TLeafG support to TTree #397 +1. Fix - interactivity for TH3 palette drawing #398 + + +## Changes in 7.10.2 +1. Fix - correctly process `TLeafB` arrays in tree draw #384 +2. Fix - better detect default ranges in `TGraph` histogram +3. Fix - convert BigInt before `RNtuple` drawing +4. Fix - pages and clusters processing in `RNtuple` #390 +5. Fix - extra row for legend header, proper horizontal align https://github.com/root-project/root/issues/21173 + + +## Changes in 7.10.1 +1. Fix - proper paint axis labels on both sides when pad.fTickx/y = 2 +2. Fix - recover io after bad http response ## Changes in 7.10.0 @@ -1469,11 +1504,11 @@ 8. Fix several problems with markers drawing; implement plus, asterisk, mult symbols. 9. Implement custom layout, which allows to configure user-defined layout for displayed objects 10. Fix errors with scaling of axis labels. -11. Support also Y axis with custom labels like: http://jsroot.gsi.de/dev/?nobrowser&file=../files/atlas.root&item=LEDShapeHeightCorr_Gain0;1&opt=col +11. Support also Y axis with custom labels like: https://jsroot.gsi.de/dev/?nobrowser&file=https://jsroot.gsi.de/files/atlas.root&item=LEDShapeHeightCorr_Gain0;1&opt=col ## Changes in 3.7 -1. Support of X axis with custom labels like: http://jsroot.gsi.de/dev/?nobrowser&json=../files/hist_xlabels.json +1. Support of X axis with custom labels like: https://jsroot.gsi.de/dev/?nobrowser&json=https://jsroot.gsi.de/files/hist_xlabels.json 2. Extend functionality of JSROOT.addDrawFunc() function. One could register type-specific `make_request` and `after_request` functions; `icon`, `prereq`, `script`, `monitor` properties. This let add more custom elements to the generic gui, implemented with JSROOT.HierarchyPainter @@ -1660,7 +1695,7 @@ 13. Provide example fileitem.htm how read and display item from ROOT file. 14. In default index.htm page one could specify 'file', 'layout', 'item' and 'items' parameters like: - + 15. Support direct reading of objects from sub-sub-directories. 16. Introduce demo.htm, which demonstrates online usage of JSROOT. 17. One could use demo.htm directly with THttpServer providing address like: diff --git a/js/index.htm b/js/index.htm index cd123332065fa..fb99f5640b8e8 100644 --- a/js/index.htm +++ b/js/index.htm @@ -14,7 +14,7 @@ -
+
loading modules ...