class Yl { constructor() { this.rootNodes = [], this.cameras = [], this.lights = [], this.meshes = [], this.skeletons = [], this.particleSystems = [], this.animations = [], this.animationGroups = [], this.multiMaterials = [], this.materials = [], this.morphTargetManagers = [], this.geometries = [], this.transformNodes = [], this.actionManagers = [], this.textures = [], this._environmentTexture = null, this.postProcesses = []; } /** * Adds a parser in the list of available ones * @param name Defines the name of the parser * @param parser Defines the parser to add */ static AddParser(e, t) { this._BabylonFileParsers[e] = t; } /** * Gets a general parser from the list of available ones * @param name Defines the name of the parser * @returns the requested parser or null */ static GetParser(e) { return this._BabylonFileParsers[e] ? this._BabylonFileParsers[e] : null; } /** * Adds n individual parser in the list of available ones * @param name Defines the name of the parser * @param parser Defines the parser to add */ static AddIndividualParser(e, t) { this._IndividualBabylonFileParsers[e] = t; } /** * Gets an individual parser from the list of available ones * @param name Defines the name of the parser * @returns the requested parser or null */ static GetIndividualParser(e) { return this._IndividualBabylonFileParsers[e] ? this._IndividualBabylonFileParsers[e] : null; } /** * Parser json data and populate both a scene and its associated container object * @param jsonData Defines the data to parse * @param scene Defines the scene to parse the data for * @param container Defines the container attached to the parsing sequence * @param rootUrl Defines the root url of the data */ static Parse(e, t, i, r) { for (const s in this._BabylonFileParsers) Object.prototype.hasOwnProperty.call(this._BabylonFileParsers, s) && this._BabylonFileParsers[s](e, t, i, r); } /** * Texture used in all pbr material as the reflection texture. * As in the majority of the scene they are the same (exception for multi room and so on), * this is easier to reference from here than from all the materials. */ get environmentTexture() { return this._environmentTexture; } set environmentTexture(e) { this._environmentTexture = e; } /** * @returns all meshes, lights, cameras, transformNodes and bones */ getNodes() { let e = []; return e = e.concat(this.meshes), e = e.concat(this.lights), e = e.concat(this.cameras), e = e.concat(this.transformNodes), this.skeletons.forEach((t) => e = e.concat(t.bones)), e; } } Yl._BabylonFileParsers = {}; Yl._IndividualBabylonFileParsers = {}; class H_ { constructor() { this.hoverCursor = "", this.actions = [], this.isRecursive = !1; } /** * Does exist one action manager with at least one trigger **/ static get HasTriggers() { for (const e in H_.Triggers) if (Object.prototype.hasOwnProperty.call(H_.Triggers, e)) return !0; return !1; } /** * Does exist one action manager with at least one pick trigger **/ static get HasPickTriggers() { for (const e in H_.Triggers) if (Object.prototype.hasOwnProperty.call(H_.Triggers, e)) { const t = parseInt(e); if (t >= 1 && t <= 7) return !0; } return !1; } /** * Does exist one action manager that handles actions of a given trigger * @param trigger defines the trigger to be tested * @returns a boolean indicating whether the trigger is handled by at least one action manager **/ static HasSpecificTrigger(e) { for (const t in H_.Triggers) if (Object.prototype.hasOwnProperty.call(H_.Triggers, t) && parseInt(t) === e) return !0; return !1; } } H_.Triggers = {}; class XG { /** * Create a new EventState * @param mask defines the mask associated with this state * @param skipNextObservers defines a flag which will instruct the observable to skip following observers when set to true * @param target defines the original target of the state * @param currentTarget defines the current target of the state */ constructor(e, t = !1, i, r) { this.initialize(e, t, i, r); } /** * Initialize the current event state * @param mask defines the mask associated with this state * @param skipNextObservers defines a flag which will instruct the observable to skip following observers when set to true * @param target defines the original target of the state * @param currentTarget defines the current target of the state * @returns the current event state */ initialize(e, t = !1, i, r) { return this.mask = e, this.skipNextObservers = t, this.target = i, this.currentTarget = r, this; } } class Bee { /** * Creates a new observer * @param callback defines the callback to call when the observer is notified * @param mask defines the mask of the observer (used to filter notifications) * @param scope defines the current scope used to restore the JS context */ constructor(e, t, i = null) { this.callback = e, this.mask = t, this.scope = i, this._willBeUnregistered = !1, this.unregisterOnNextCall = !1, this._remove = null; } /** * Remove the observer from its observable * This can be used instead of using the observable's remove function. */ remove() { this._remove && this._remove(); } } class Fe { /** * Create an observable from a Promise. * @param promise a promise to observe for fulfillment. * @param onErrorObservable an observable to notify if a promise was rejected. * @returns the new Observable */ static FromPromise(e, t) { const i = new Fe(); return e.then((r) => { i.notifyObservers(r); }).catch((r) => { if (t) t.notifyObservers(r); else throw r; }), i; } /** * Gets the list of observers * Note that observers that were recently deleted may still be present in the list because they are only really deleted on the next javascript tick! */ get observers() { return this._observers; } /** * Creates a new observable * @param onObserverAdded defines a callback to call when a new observer is added * @param notifyIfTriggered If set to true the observable will notify when an observer was added if the observable was already triggered. */ constructor(e, t = !1) { this.notifyIfTriggered = t, this._observers = new Array(), this._numObserversMarkedAsDeleted = 0, this._hasNotified = !1, this._eventState = new XG(0), e && (this._onObserverAdded = e); } add(e, t = -1, i = !1, r = null, s = !1) { if (!e) return null; const n = new Bee(e, t, r); return n.unregisterOnNextCall = s, i ? this._observers.unshift(n) : this._observers.push(n), this._onObserverAdded && this._onObserverAdded(n), this._hasNotified && this.notifyIfTriggered && this._lastNotifiedValue !== void 0 && this.notifyObserver(n, this._lastNotifiedValue), n._remove = () => { this.remove(n); }, n; } addOnce(e) { return this.add(e, void 0, void 0, void 0, !0); } /** * Remove an Observer from the Observable object * @param observer the instance of the Observer to remove * @returns false if it doesn't belong to this Observable */ remove(e) { return e ? (e._remove = null, this._observers.indexOf(e) !== -1 ? (this._deferUnregister(e), !0) : !1) : !1; } /** * Remove a callback from the Observable object * @param callback the callback to remove * @param scope optional scope. If used only the callbacks with this scope will be removed * @returns false if it doesn't belong to this Observable */ removeCallback(e, t) { for (let i = 0; i < this._observers.length; i++) { const r = this._observers[i]; if (!r._willBeUnregistered && r.callback === e && (!t || t === r.scope)) return this._deferUnregister(r), !0; } return !1; } /** * @internal */ _deferUnregister(e) { e._willBeUnregistered || (this._numObserversMarkedAsDeleted++, e.unregisterOnNextCall = !1, e._willBeUnregistered = !0, setTimeout(() => { this._remove(e); }, 0)); } // This should only be called when not iterating over _observers to avoid callback skipping. // Removes an observer from the _observer Array. _remove(e, t = !0) { if (!e) return !1; const i = this._observers.indexOf(e); return i !== -1 ? (t && this._numObserversMarkedAsDeleted--, this._observers.splice(i, 1), !0) : !1; } /** * Moves the observable to the top of the observer list making it get called first when notified * @param observer the observer to move */ makeObserverTopPriority(e) { this._remove(e, !1), this._observers.unshift(e); } /** * Moves the observable to the bottom of the observer list making it get called last when notified * @param observer the observer to move */ makeObserverBottomPriority(e) { this._remove(e, !1), this._observers.push(e); } /** * Notify all Observers by calling their respective callback with the given data * Will return true if all observers were executed, false if an observer set skipNextObservers to true, then prevent the subsequent ones to execute * @param eventData defines the data to send to all observers * @param mask defines the mask of the current notification (observers with incompatible mask (ie mask & observer.mask === 0) will not be notified) * @param target defines the original target of the state * @param currentTarget defines the current target of the state * @param userInfo defines any user info to send to observers * @returns false if the complete observer chain was not processed (because one observer set the skipNextObservers to true) */ notifyObservers(e, t = -1, i, r, s) { if (this.notifyIfTriggered && (this._hasNotified = !0, this._lastNotifiedValue = e), !this._observers.length) return !0; const n = this._eventState; n.mask = t, n.target = i, n.currentTarget = r, n.skipNextObservers = !1, n.lastReturnValue = e, n.userInfo = s; for (const a of this._observers) if (!a._willBeUnregistered && (a.mask & t && (a.unregisterOnNextCall && this._deferUnregister(a), a.scope ? n.lastReturnValue = a.callback.apply(a.scope, [e, n]) : n.lastReturnValue = a.callback(e, n)), n.skipNextObservers)) return !1; return !0; } /** * Notify a specific observer * @param observer defines the observer to notify * @param eventData defines the data to be sent to each callback * @param mask is used to filter observers defaults to -1 */ notifyObserver(e, t, i = -1) { if (this.notifyIfTriggered && (this._hasNotified = !0, this._lastNotifiedValue = t), e._willBeUnregistered) return; const r = this._eventState; r.mask = i, r.skipNextObservers = !1, e.unregisterOnNextCall && this._deferUnregister(e), e.callback(t, r); } /** * Gets a boolean indicating if the observable has at least one observer * @returns true is the Observable has at least one Observer registered */ hasObservers() { return this._observers.length - this._numObserversMarkedAsDeleted > 0; } /** * Clear the list of observers */ clear() { for (; this._observers.length; ) { const e = this._observers.pop(); e && (e._remove = null); } this._onObserverAdded = null, this._numObserversMarkedAsDeleted = 0, this.cleanLastNotifiedState(); } /** * Clean the last notified state - both the internal last value and the has-notified flag */ cleanLastNotifiedState() { this._hasNotified = !1, this._lastNotifiedValue = void 0; } /** * Clone the current observable * @returns a new observable */ clone() { const e = new Fe(); return e._observers = this._observers.slice(0), e; } /** * Does this observable handles observer registered with a given mask * @param mask defines the mask to be tested * @returns whether or not one observer registered with the given mask is handled **/ hasSpecificMask(e = -1) { for (const t of this._observers) if (t.mask & e || t.mask === e) return !0; return !1; } } class yt { /** * Boolean : true if the absolute difference between a and b is lower than epsilon (default = 1.401298E-45) * @param a number * @param b number * @param epsilon (default = 1.401298E-45) * @returns true if the absolute difference between a and b is lower than epsilon (default = 1.401298E-45) */ static WithinEpsilon(e, t, i = 1401298e-51) { return Math.abs(e - t) <= i; } /** * Returns a string : the upper case translation of the number i to hexadecimal. * @param i number * @returns the upper case translation of the number i to hexadecimal. */ static ToHex(e) { const t = e.toString(16); return e <= 15 ? ("0" + t).toUpperCase() : t.toUpperCase(); } /** * Returns -1 if value is negative and +1 is value is positive. * @param value the value * @returns the value itself if it's equal to zero. */ static Sign(e) { return e = +e, e === 0 || isNaN(e) ? e : e > 0 ? 1 : -1; } /** * Returns the value itself if it's between min and max. * Returns min if the value is lower than min. * Returns max if the value is greater than max. * @param value the value to clmap * @param min the min value to clamp to (default: 0) * @param max the max value to clamp to (default: 1) * @returns the clamped value */ static Clamp(e, t = 0, i = 1) { return Math.min(i, Math.max(t, e)); } /** * the log2 of value. * @param value the value to compute log2 of * @returns the log2 of value. */ static Log2(e) { return Math.log(e) * Math.LOG2E; } /** * the floor part of a log2 value. * @param value the value to compute log2 of * @returns the log2 of value. */ static ILog2(e) { if (Math.log2) return Math.floor(Math.log2(e)); if (e < 0) return NaN; if (e === 0) return -1 / 0; let t = 0; if (e < 1) { for (; e < 1; ) t++, e = e * 2; t = -t; } else if (e > 1) for (; e > 1; ) t++, e = Math.floor(e / 2); return t; } /** * Loops the value, so that it is never larger than length and never smaller than 0. * * This is similar to the modulo operator but it works with floating point numbers. * For example, using 3.0 for t and 2.5 for length, the result would be 0.5. * With t = 5 and length = 2.5, the result would be 0.0. * Note, however, that the behaviour is not defined for negative numbers as it is for the modulo operator * @param value the value * @param length the length * @returns the looped value */ static Repeat(e, t) { return e - Math.floor(e / t) * t; } /** * Normalize the value between 0.0 and 1.0 using min and max values * @param value value to normalize * @param min max to normalize between * @param max min to normalize between * @returns the normalized value */ static Normalize(e, t, i) { return (e - t) / (i - t); } /** * Denormalize the value from 0.0 and 1.0 using min and max values * @param normalized value to denormalize * @param min max to denormalize between * @param max min to denormalize between * @returns the denormalized value */ static Denormalize(e, t, i) { return e * (i - t) + t; } /** * Calculates the shortest difference between two given angles given in degrees. * @param current current angle in degrees * @param target target angle in degrees * @returns the delta */ static DeltaAngle(e, t) { let i = yt.Repeat(t - e, 360); return i > 180 && (i -= 360), i; } /** * PingPongs the value t, so that it is never larger than length and never smaller than 0. * @param tx value * @param length length * @returns The returned value will move back and forth between 0 and length */ static PingPong(e, t) { const i = yt.Repeat(e, t * 2); return t - Math.abs(i - t); } /** * Interpolates between min and max with smoothing at the limits. * * This function interpolates between min and max in a similar way to Lerp. However, the interpolation will gradually speed up * from the start and slow down toward the end. This is useful for creating natural-looking animation, fading and other transitions. * @param from from * @param to to * @param tx value * @returns the smooth stepped value */ static SmoothStep(e, t, i) { let r = yt.Clamp(i); return r = -2 * r * r * r + 3 * r * r, t * r + e * (1 - r); } /** * Moves a value current towards target. * * This is essentially the same as Mathf.Lerp but instead the function will ensure that the speed never exceeds maxDelta. * Negative values of maxDelta pushes the value away from target. * @param current current value * @param target target value * @param maxDelta max distance to move * @returns resulting value */ static MoveTowards(e, t, i) { let r = 0; return Math.abs(t - e) <= i ? r = t : r = e + yt.Sign(t - e) * i, r; } /** * Same as MoveTowards but makes sure the values interpolate correctly when they wrap around 360 degrees. * * Variables current and target are assumed to be in degrees. For optimization reasons, negative values of maxDelta * are not supported and may cause oscillation. To push current away from a target angle, add 180 to that angle instead. * @param current current value * @param target target value * @param maxDelta max distance to move * @returns resulting angle */ static MoveTowardsAngle(e, t, i) { const r = yt.DeltaAngle(e, t); let s = 0; return -i < r && r < i ? s = t : (t = e + r, s = yt.MoveTowards(e, t, i)), s; } /** * Creates a new scalar with values linearly interpolated of "amount" between the start scalar and the end scalar. * @param start start value * @param end target value * @param amount amount to lerp between * @returns the lerped value */ static Lerp(e, t, i) { return e + (t - e) * i; } /** * Same as Lerp but makes sure the values interpolate correctly when they wrap around 360 degrees. * The parameter t is clamped to the range [0, 1]. Variables a and b are assumed to be in degrees. * @param start start value * @param end target value * @param amount amount to lerp between * @returns the lerped value */ static LerpAngle(e, t, i) { let r = yt.Repeat(t - e, 360); return r > 180 && (r -= 360), e + r * yt.Clamp(i); } /** * Calculates the linear parameter t that produces the interpolant value within the range [a, b]. * @param a start value * @param b target value * @param value value between a and b * @returns the inverseLerp value */ static InverseLerp(e, t, i) { let r = 0; return e != t ? r = yt.Clamp((i - e) / (t - e)) : r = 0, r; } /** * Returns a new scalar located for "amount" (float) on the Hermite spline defined by the scalars "value1", "value3", "tangent1", "tangent2". * @see http://mathworld.wolfram.com/HermitePolynomial.html * @param value1 defines the first control point * @param tangent1 defines the first tangent * @param value2 defines the second control point * @param tangent2 defines the second tangent * @param amount defines the amount on the interpolation spline (between 0 and 1) * @returns hermite result */ static Hermite(e, t, i, r, s) { const n = s * s, a = s * n, l = 2 * a - 3 * n + 1, o = -2 * a + 3 * n, u = a - 2 * n + s, h = a - n; return e * l + i * o + t * u + r * h; } /** * Returns a new scalar which is the 1st derivative of the Hermite spline defined by the scalars "value1", "value2", "tangent1", "tangent2". * @param value1 defines the first control point * @param tangent1 defines the first tangent * @param value2 defines the second control point * @param tangent2 defines the second tangent * @param time define where the derivative must be done * @returns 1st derivative */ static Hermite1stDerivative(e, t, i, r, s) { const n = s * s; return (n - s) * 6 * e + (3 * n - 4 * s + 1) * t + (-n + s) * 6 * i + (3 * n - 2 * s) * r; } /** * Returns a random float number between and min and max values * @param min min value of random * @param max max value of random * @returns random value */ static RandomRange(e, t) { return e === t ? e : Math.random() * (t - e) + e; } /** * This function returns percentage of a number in a given range. * * RangeToPercent(40,20,60) will return 0.5 (50%) * RangeToPercent(34,0,100) will return 0.34 (34%) * @param number to convert to percentage * @param min min range * @param max max range * @returns the percentage */ static RangeToPercent(e, t, i) { return (e - t) / (i - t); } /** * This function returns number that corresponds to the percentage in a given range. * * PercentToRange(0.34,0,100) will return 34. * @param percent to convert to number * @param min min range * @param max max range * @returns the number */ static PercentToRange(e, t, i) { return (i - t) * e + t; } /** * Returns the angle converted to equivalent value between -Math.PI and Math.PI radians. * @param angle The angle to normalize in radian. * @returns The converted angle. */ static NormalizeRadians(e) { return e -= yt.TwoPi * Math.floor((e + Math.PI) / yt.TwoPi), e; } /** * Returns the highest common factor of two integers. * @param a first parameter * @param b second parameter * @returns HCF of a and b */ static HCF(e, t) { const i = e % t; return i === 0 ? t : yt.HCF(t, i); } } yt.TwoPi = Math.PI * 2; const nO = 1 / 2.2, V9 = 2.2, Uc = (1 + Math.sqrt(5)) / 2, Sr = 1e-3; class kc { /** * Returns an array of the given size filled with elements built from the given constructor and the parameters. * @param size the number of element to construct and put in the array. * @param itemBuilder a callback responsible for creating new instance of item. Called once per array entry. * @returns a new array filled with new objects. */ static BuildArray(e, t) { const i = []; for (let r = 0; r < e; ++r) i.push(t()); return i; } /** * Returns a tuple of the given size filled with elements built from the given constructor and the parameters. * @param size he number of element to construct and put in the tuple. * @param itemBuilder a callback responsible for creating new instance of item. Called once per tuple entry. * @returns a new tuple filled with new objects. */ static BuildTuple(e, t) { return kc.BuildArray(e, t); } } function Dle(c, e, t) { const i = c[e]; if (typeof i != "function") return null; const r = function() { const s = c.length, n = r.previous.apply(c, arguments); return t(e, s), n; }; return i.next = r, r.previous = i, c[e] = r, () => { const s = r.previous; if (!s) return; const n = r.next; n ? (s.next = n, n.previous = s) : (s.next = void 0, c[e] = s), r.next = void 0, r.previous = void 0; }; } const Ole = ["push", "splice", "pop", "shift", "unshift"]; function Uee(c, e) { const t = Ole.map((i) => Dle(c, i, e)); return () => { t.forEach((i) => { i == null || i(); }); }; } const Vee = {}; function Be(c, e) { Vee[c] = e; } function Qo(c) { return Vee[c]; } class Uu { /** * @internal */ static SetMatrixPrecision(e) { if (Uu.MatrixTrackPrecisionChange = !1, e && !Uu.MatrixUse64Bits && Uu.MatrixTrackedMatrices) for (let t = 0; t < Uu.MatrixTrackedMatrices.length; ++t) { const i = Uu.MatrixTrackedMatrices[t], r = i._m; i._m = new Array(16); for (let s = 0; s < 16; ++s) i._m[s] = r[s]; } Uu.MatrixUse64Bits = e, Uu.MatrixCurrentType = Uu.MatrixUse64Bits ? Array : Float32Array, Uu.MatrixTrackedMatrices = null; } } Uu.MatrixUse64Bits = !1; Uu.MatrixTrackPrecisionChange = !0; Uu.MatrixCurrentType = Float32Array; Uu.MatrixTrackedMatrices = []; class gi { /** * Gets the latest created engine */ static get LastCreatedEngine() { return this.Instances.length === 0 ? null : this.Instances[this.Instances.length - 1]; } /** * Gets the latest created scene */ static get LastCreatedScene() { return this._LastCreatedScene; } } gi.Instances = []; gi.OnEnginesDisposedObservable = new Fe(); gi._LastCreatedScene = null; gi.UseFallbackTexture = !0; gi.FallbackTexture = ""; const W_ = (c) => parseInt(c.toString().replace(/\W/g, "")); class at { /** * Creates a new Vector2 from the given x and y coordinates * @param x defines the first coordinate * @param y defines the second coordinate */ constructor(e = 0, t = 0) { this.x = e, this.y = t; } /** * Gets a string with the Vector2 coordinates * @returns a string with the Vector2 coordinates */ toString() { return `{X: ${this.x} Y: ${this.y}}`; } /** * Gets class name * @returns the string "Vector2" */ getClassName() { return "Vector2"; } /** * Gets current vector hash code * @returns the Vector2 hash code as a number */ getHashCode() { const e = W_(this.x), t = W_(this.y); let i = e; return i = i * 397 ^ t, i; } // Operators /** * Sets the Vector2 coordinates in the given array or Float32Array from the given index. * Example Playground https://playground.babylonjs.com/#QYBWV4#15 * @param array defines the source array * @param index defines the offset in source array * @returns the current Vector2 */ toArray(e, t = 0) { return e[t] = this.x, e[t + 1] = this.y, this; } /** * Update the current vector from an array * Example Playground https://playground.babylonjs.com/#QYBWV4#39 * @param array defines the destination array * @param index defines the offset in the destination array * @returns the current Vector2 */ fromArray(e, t = 0) { return at.FromArrayToRef(e, t, this), this; } /** * Copy the current vector to an array * Example Playground https://playground.babylonjs.com/#QYBWV4#40 * @returns a new array with 2 elements: the Vector2 coordinates. */ asArray() { const e = []; return this.toArray(e, 0), e; } /** * Sets the Vector2 coordinates with the given Vector2 coordinates * Example Playground https://playground.babylonjs.com/#QYBWV4#24 * @param source defines the source Vector2 * @returns the current updated Vector2 */ copyFrom(e) { return this.x = e.x, this.y = e.y, this; } /** * Sets the Vector2 coordinates with the given floats * Example Playground https://playground.babylonjs.com/#QYBWV4#25 * @param x defines the first coordinate * @param y defines the second coordinate * @returns the current updated Vector2 */ copyFromFloats(e, t) { return this.x = e, this.y = t, this; } /** * Sets the Vector2 coordinates with the given floats * Example Playground https://playground.babylonjs.com/#QYBWV4#62 * @param x defines the first coordinate * @param y defines the second coordinate * @returns the current updated Vector2 */ set(e, t) { return this.copyFromFloats(e, t); } /** * Add another vector with the current one * Example Playground https://playground.babylonjs.com/#QYBWV4#11 * @param otherVector defines the other vector * @returns a new Vector2 set with the addition of the current Vector2 and the given one coordinates */ add(e) { return new this.constructor(this.x + e.x, this.y + e.y); } /** * Sets the "result" coordinates with the addition of the current Vector2 and the given one coordinates * Example Playground https://playground.babylonjs.com/#QYBWV4#12 * @param otherVector defines the other vector * @param result defines the target vector * @returns result input */ addToRef(e, t) { return t.x = this.x + e.x, t.y = this.y + e.y, t; } /** * Set the Vector2 coordinates by adding the given Vector2 coordinates * Example Playground https://playground.babylonjs.com/#QYBWV4#13 * @param otherVector defines the other vector * @returns the current updated Vector2 */ addInPlace(e) { return this.x += e.x, this.y += e.y, this; } /** * Gets a new Vector2 by adding the current Vector2 coordinates to the given Vector3 x, y coordinates * Example Playground https://playground.babylonjs.com/#QYBWV4#14 * @param otherVector defines the other vector * @returns a new Vector2 */ addVector3(e) { return new this.constructor(this.x + e.x, this.y + e.y); } /** * Gets a new Vector2 set with the subtracted coordinates of the given one from the current Vector2 * Example Playground https://playground.babylonjs.com/#QYBWV4#61 * @param otherVector defines the other vector * @returns a new Vector2 */ subtract(e) { return new this.constructor(this.x - e.x, this.y - e.y); } /** * Sets the "result" coordinates with the subtraction of the given one from the current Vector2 coordinates. * Example Playground https://playground.babylonjs.com/#QYBWV4#63 * @param otherVector defines the other vector * @param result defines the target vector * @returns result input */ subtractToRef(e, t) { return t.x = this.x - e.x, t.y = this.y - e.y, t; } /** * Sets the current Vector2 coordinates by subtracting from it the given one coordinates * Example Playground https://playground.babylonjs.com/#QYBWV4#88 * @param otherVector defines the other vector * @returns the current updated Vector2 */ subtractInPlace(e) { return this.x -= e.x, this.y -= e.y, this; } /** * Multiplies in place the current Vector2 coordinates by the given ones * Example Playground https://playground.babylonjs.com/#QYBWV4#43 * @param otherVector defines the other vector * @returns the current updated Vector2 */ multiplyInPlace(e) { return this.x *= e.x, this.y *= e.y, this; } /** * Returns a new Vector2 set with the multiplication of the current Vector2 and the given one coordinates * Example Playground https://playground.babylonjs.com/#QYBWV4#42 * @param otherVector defines the other vector * @returns a new Vector2 */ multiply(e) { return new this.constructor(this.x * e.x, this.y * e.y); } /** * Sets "result" coordinates with the multiplication of the current Vector2 and the given one coordinates * Example Playground https://playground.babylonjs.com/#QYBWV4#44 * @param otherVector defines the other vector * @param result defines the target vector * @returns result input */ multiplyToRef(e, t) { return t.x = this.x * e.x, t.y = this.y * e.y, t; } /** * Gets a new Vector2 set with the Vector2 coordinates multiplied by the given floats * Example Playground https://playground.babylonjs.com/#QYBWV4#89 * @param x defines the first coordinate * @param y defines the second coordinate * @returns a new Vector2 */ multiplyByFloats(e, t) { return new this.constructor(this.x * e, this.y * t); } /** * Returns a new Vector2 set with the Vector2 coordinates divided by the given one coordinates * Example Playground https://playground.babylonjs.com/#QYBWV4#27 * @param otherVector defines the other vector * @returns a new Vector2 */ divide(e) { return new this.constructor(this.x / e.x, this.y / e.y); } /** * Sets the "result" coordinates with the Vector2 divided by the given one coordinates * Example Playground https://playground.babylonjs.com/#QYBWV4#30 * @param otherVector defines the other vector * @param result defines the target vector * @returns result input */ divideToRef(e, t) { return t.x = this.x / e.x, t.y = this.y / e.y, t; } /** * Divides the current Vector2 coordinates by the given ones * Example Playground https://playground.babylonjs.com/#QYBWV4#28 * @param otherVector defines the other vector * @returns the current updated Vector2 */ divideInPlace(e) { return this.divideToRef(e, this); } /** * Gets a new Vector2 with current Vector2 negated coordinates * Example Playground https://playground.babylonjs.com/#QYBWV4#22 * @returns a new Vector2 */ negate() { return new this.constructor(-this.x, -this.y); } /** * Negate this vector in place * Example Playground https://playground.babylonjs.com/#QYBWV4#23 * @returns this */ negateInPlace() { return this.x *= -1, this.y *= -1, this; } /** * Negate the current Vector2 and stores the result in the given vector "result" coordinates * Example Playground https://playground.babylonjs.com/#QYBWV4#41 * @param result defines the Vector3 object where to store the result * @returns the result */ negateToRef(e) { return e.copyFromFloats(this.x * -1, this.y * -1); } /** * Multiply the Vector2 coordinates by * Example Playground https://playground.babylonjs.com/#QYBWV4#59 * @param scale defines the scaling factor * @returns the current updated Vector2 */ scaleInPlace(e) { return this.x *= e, this.y *= e, this; } /** * Returns a new Vector2 scaled by "scale" from the current Vector2 * Example Playground https://playground.babylonjs.com/#QYBWV4#52 * @param scale defines the scaling factor * @returns a new Vector2 */ scale(e) { const t = new this.constructor(0, 0); return this.scaleToRef(e, t), t; } /** * Scale the current Vector2 values by a factor to a given Vector2 * Example Playground https://playground.babylonjs.com/#QYBWV4#57 * @param scale defines the scale factor * @param result defines the Vector2 object where to store the result * @returns result input */ scaleToRef(e, t) { return t.x = this.x * e, t.y = this.y * e, t; } /** * Scale the current Vector2 values by a factor and add the result to a given Vector2 * Example Playground https://playground.babylonjs.com/#QYBWV4#58 * @param scale defines the scale factor * @param result defines the Vector2 object where to store the result * @returns result input */ scaleAndAddToRef(e, t) { return t.x += this.x * e, t.y += this.y * e, t; } /** * Gets a boolean if two vectors are equals * Example Playground https://playground.babylonjs.com/#QYBWV4#31 * @param otherVector defines the other vector * @returns true if the given vector coordinates strictly equal the current Vector2 ones */ equals(e) { return e && this.x === e.x && this.y === e.y; } /** * Gets a boolean if two vectors are equals (using an epsilon value) * Example Playground https://playground.babylonjs.com/#QYBWV4#32 * @param otherVector defines the other vector * @param epsilon defines the minimal distance to consider equality * @returns true if the given vector coordinates are close to the current ones by a distance of epsilon. */ equalsWithEpsilon(e, t = Sr) { return e && yt.WithinEpsilon(this.x, e.x, t) && yt.WithinEpsilon(this.y, e.y, t); } /** * Gets a new Vector2 from current Vector2 floored values * Example Playground https://playground.babylonjs.com/#QYBWV4#35 * eg (1.2, 2.31) returns (1, 2) * @returns a new Vector2 */ floor() { return new this.constructor(Math.floor(this.x), Math.floor(this.y)); } /** * Gets a new Vector2 from current Vector2 fractional values * Example Playground https://playground.babylonjs.com/#QYBWV4#34 * eg (1.2, 2.31) returns (0.2, 0.31) * @returns a new Vector2 */ fract() { return new this.constructor(this.x - Math.floor(this.x), this.y - Math.floor(this.y)); } /** * Rotate the current vector into a given result vector * Example Playground https://playground.babylonjs.com/#QYBWV4#49 * @param angle defines the rotation angle * @param result defines the result vector where to store the rotated vector * @returns result input */ rotateToRef(e, t) { const i = Math.cos(e), r = Math.sin(e), s = i * this.x - r * this.y, n = r * this.x + i * this.y; return t.x = s, t.y = n, t; } // Properties /** * Gets the length of the vector * @returns the vector length (float) */ length() { return Math.sqrt(this.x * this.x + this.y * this.y); } /** * Gets the vector squared length * @returns the vector squared length (float) */ lengthSquared() { return this.x * this.x + this.y * this.y; } // Methods /** * Normalize the vector * Example Playground https://playground.babylonjs.com/#QYBWV4#48 * @returns the current updated Vector2 */ normalize() { return this.normalizeFromLength(this.length()); } /** * Normalize the current Vector2 with the given input length. * Please note that this is an in place operation. * @param len the length of the vector * @returns the current updated Vector2 */ normalizeFromLength(e) { return e === 0 || e === 1 ? this : this.scaleInPlace(1 / e); } /** * Normalize the current Vector2 to a new vector * @returns the new Vector2 */ normalizeToNew() { const e = new this.constructor(0, 0); return this.normalizeToRef(e), e; } /** * Normalize the current Vector2 to the reference * @param reference define the Vector2 to update * @returns the updated Vector2 */ normalizeToRef(e) { const t = this.length(); return t === 0 || t === 1 ? e.copyFromFloats(this.x, this.y) : this.scaleToRef(1 / t, e); } /** * Gets a new Vector2 copied from the Vector2 * Example Playground https://playground.babylonjs.com/#QYBWV4#20 * @returns a new Vector2 */ clone() { return new this.constructor(this.x, this.y); } /** * Gets the dot product of the current vector and the vector "otherVector" * @param otherVector defines second vector * @returns the dot product (float) */ dot(e) { return this.x * e.x + this.y * e.y; } // Statics /** * Gets a new Vector2(0, 0) * @returns a new Vector2 */ static Zero() { return new at(0, 0); } /** * Gets a new Vector2(1, 1) * @returns a new Vector2 */ static One() { return new at(1, 1); } /** * Returns a new Vector2 with random values between min and max * @param min the minimum random value * @param max the maximum random value * @returns a Vector2 with random values between min and max */ static Random(e = 0, t = 1) { return new at(yt.RandomRange(e, t), yt.RandomRange(e, t)); } /** * Gets a zero Vector2 that must not be updated */ static get ZeroReadOnly() { return at._ZeroReadOnly; } /** * Gets a new Vector2 set from the given index element of the given array * Example Playground https://playground.babylonjs.com/#QYBWV4#79 * @param array defines the data source * @param offset defines the offset in the data source * @returns a new Vector2 */ static FromArray(e, t = 0) { return new at(e[t], e[t + 1]); } /** * Sets "result" from the given index element of the given array * Example Playground https://playground.babylonjs.com/#QYBWV4#80 * @param array defines the data source * @param offset defines the offset in the data source * @param result defines the target vector * @returns result input */ static FromArrayToRef(e, t, i) { return i.x = e[t], i.y = e[t + 1], i; } /** * Gets a new Vector2 located for "amount" (float) on the CatmullRom spline defined by the given four Vector2 * Example Playground https://playground.babylonjs.com/#QYBWV4#65 * @param value1 defines 1st point of control * @param value2 defines 2nd point of control * @param value3 defines 3rd point of control * @param value4 defines 4th point of control * @param amount defines the interpolation factor * @returns a new Vector2 */ static CatmullRom(e, t, i, r, s) { const n = s * s, a = s * n, l = 0.5 * (2 * t.x + (-e.x + i.x) * s + (2 * e.x - 5 * t.x + 4 * i.x - r.x) * n + (-e.x + 3 * t.x - 3 * i.x + r.x) * a), o = 0.5 * (2 * t.y + (-e.y + i.y) * s + (2 * e.y - 5 * t.y + 4 * i.y - r.y) * n + (-e.y + 3 * t.y - 3 * i.y + r.y) * a); return new e.constructor(l, o); } /** * Returns a new Vector2 set with same the coordinates than "value" ones if the vector "value" is in the square defined by "min" and "max". * If a coordinate of "value" is lower than "min" coordinates, the returned Vector2 is given this "min" coordinate. * If a coordinate of "value" is greater than "max" coordinates, the returned Vector2 is given this "max" coordinate * Example Playground https://playground.babylonjs.com/#QYBWV4#76 * @param value defines the value to clamp * @param min defines the lower limit * @param max defines the upper limit * @returns a new Vector2 */ static Clamp(e, t, i) { let r = e.x; r = r > i.x ? i.x : r, r = r < t.x ? t.x : r; let s = e.y; return s = s > i.y ? i.y : s, s = s < t.y ? t.y : s, new e.constructor(r, s); } /** * Returns a new Vector2 located for "amount" (float) on the Hermite spline defined by the vectors "value1", "value2", "tangent1", "tangent2" * Example Playground https://playground.babylonjs.com/#QYBWV4#81 * @param value1 defines the 1st control point * @param tangent1 defines the outgoing tangent * @param value2 defines the 2nd control point * @param tangent2 defines the incoming tangent * @param amount defines the interpolation factor * @returns a new Vector2 */ static Hermite(e, t, i, r, s) { const n = s * s, a = s * n, l = 2 * a - 3 * n + 1, o = -2 * a + 3 * n, u = a - 2 * n + s, h = a - n, d = e.x * l + i.x * o + t.x * u + r.x * h, f = e.y * l + i.y * o + t.y * u + r.y * h; return new e.constructor(d, f); } /** * Returns a new Vector2 which is the 1st derivative of the Hermite spline defined by the vectors "value1", "value2", "tangent1", "tangent2". * Example Playground https://playground.babylonjs.com/#QYBWV4#82 * @param value1 defines the first control point * @param tangent1 defines the first tangent * @param value2 defines the second control point * @param tangent2 defines the second tangent * @param time define where the derivative must be done * @returns 1st derivative */ static Hermite1stDerivative(e, t, i, r, s) { const n = new e.constructor(); return this.Hermite1stDerivativeToRef(e, t, i, r, s, n), n; } /** * Returns a new Vector2 which is the 1st derivative of the Hermite spline defined by the vectors "value1", "value2", "tangent1", "tangent2". * Example Playground https://playground.babylonjs.com/#QYBWV4#83 * @param value1 defines the first control point * @param tangent1 defines the first tangent * @param value2 defines the second control point * @param tangent2 defines the second tangent * @param time define where the derivative must be done * @param result define where the derivative will be stored * @returns result input */ static Hermite1stDerivativeToRef(e, t, i, r, s, n) { const a = s * s; return n.x = (a - s) * 6 * e.x + (3 * a - 4 * s + 1) * t.x + (-a + s) * 6 * i.x + (3 * a - 2 * s) * r.x, n.y = (a - s) * 6 * e.y + (3 * a - 4 * s + 1) * t.y + (-a + s) * 6 * i.y + (3 * a - 2 * s) * r.y, n; } /** * Returns a new Vector2 located for "amount" (float) on the linear interpolation between the vector "start" adn the vector "end". * Example Playground https://playground.babylonjs.com/#QYBWV4#84 * @param start defines the start vector * @param end defines the end vector * @param amount defines the interpolation factor * @returns a new Vector2 */ static Lerp(e, t, i) { const r = e.x + (t.x - e.x) * i, s = e.y + (t.y - e.y) * i; return new e.constructor(r, s); } /** * Gets the dot product of the vector "left" and the vector "right" * Example Playground https://playground.babylonjs.com/#QYBWV4#90 * @param left defines first vector * @param right defines second vector * @returns the dot product (float) */ static Dot(e, t) { return e.x * t.x + e.y * t.y; } /** * Returns a new Vector2 equal to the normalized given vector * Example Playground https://playground.babylonjs.com/#QYBWV4#46 * @param vector defines the vector to normalize * @returns a new Vector2 */ static Normalize(e) { const t = new e.constructor(); return at.NormalizeToRef(e, t), t; } /** * Normalize a given vector into a second one * Example Playground https://playground.babylonjs.com/#QYBWV4#50 * @param vector defines the vector to normalize * @param result defines the vector where to store the result * @returns result input */ static NormalizeToRef(e, t) { return e.normalizeToRef(t), t; } /** * Gets a new Vector2 set with the minimal coordinate values from the "left" and "right" vectors * Example Playground https://playground.babylonjs.com/#QYBWV4#86 * @param left defines 1st vector * @param right defines 2nd vector * @returns a new Vector2 */ static Minimize(e, t) { const i = e.x < t.x ? e.x : t.x, r = e.y < t.y ? e.y : t.y; return new e.constructor(i, r); } /** * Gets a new Vector2 set with the maximal coordinate values from the "left" and "right" vectors * Example Playground https://playground.babylonjs.com/#QYBWV4#86 * @param left defines 1st vector * @param right defines 2nd vector * @returns a new Vector2 */ static Maximize(e, t) { const i = e.x > t.x ? e.x : t.x, r = e.y > t.y ? e.y : t.y; return new e.constructor(i, r); } /** * Gets a new Vector2 set with the transformed coordinates of the given vector by the given transformation matrix * Example Playground https://playground.babylonjs.com/#QYBWV4#17 * @param vector defines the vector to transform * @param transformation defines the matrix to apply * @returns a new Vector2 */ static Transform(e, t) { const i = new e.constructor(); return at.TransformToRef(e, t, i), i; } /** * Transforms the given vector coordinates by the given transformation matrix and stores the result in the vector "result" coordinates * Example Playground https://playground.babylonjs.com/#QYBWV4#19 * @param vector defines the vector to transform * @param transformation defines the matrix to apply * @param result defines the target vector * @returns result input */ static TransformToRef(e, t, i) { const r = t.m, s = e.x * r[0] + e.y * r[4] + r[12], n = e.x * r[1] + e.y * r[5] + r[13]; return i.x = s, i.y = n, i; } /** * Determines if a given vector is included in a triangle * Example Playground https://playground.babylonjs.com/#QYBWV4#87 * @param p defines the vector to test * @param p0 defines 1st triangle point * @param p1 defines 2nd triangle point * @param p2 defines 3rd triangle point * @returns true if the point "p" is in the triangle defined by the vectors "p0", "p1", "p2" */ static PointInTriangle(e, t, i, r) { const s = 0.5 * (-i.y * r.x + t.y * (-i.x + r.x) + t.x * (i.y - r.y) + i.x * r.y), n = s < 0 ? -1 : 1, a = (t.y * r.x - t.x * r.y + (r.y - t.y) * e.x + (t.x - r.x) * e.y) * n, l = (t.x * i.y - t.y * i.x + (t.y - i.y) * e.x + (i.x - t.x) * e.y) * n; return a > 0 && l > 0 && a + l < 2 * s * n; } /** * Gets the distance between the vectors "value1" and "value2" * Example Playground https://playground.babylonjs.com/#QYBWV4#71 * @param value1 defines first vector * @param value2 defines second vector * @returns the distance between vectors */ static Distance(e, t) { return Math.sqrt(at.DistanceSquared(e, t)); } /** * Returns the squared distance between the vectors "value1" and "value2" * Example Playground https://playground.babylonjs.com/#QYBWV4#72 * @param value1 defines first vector * @param value2 defines second vector * @returns the squared distance between vectors */ static DistanceSquared(e, t) { const i = e.x - t.x, r = e.y - t.y; return i * i + r * r; } /** * Gets a new Vector2 located at the center of the vectors "value1" and "value2" * Example Playground https://playground.babylonjs.com/#QYBWV4#86 * Example Playground https://playground.babylonjs.com/#QYBWV4#66 * @param value1 defines first vector * @param value2 defines second vector * @returns a new Vector2 */ static Center(e, t) { const i = new e.constructor(); return at.CenterToRef(e, t, i); } /** * Gets the center of the vectors "value1" and "value2" and stores the result in the vector "ref" * Example Playground https://playground.babylonjs.com/#QYBWV4#66 * @param value1 defines first vector * @param value2 defines second vector * @param ref defines third vector * @returns ref */ static CenterToRef(e, t, i) { return i.copyFromFloats((e.x + t.x) / 2, (e.y + t.y) / 2); } /** * Gets the shortest distance (float) between the point "p" and the segment defined by the two points "segA" and "segB". * Example Playground https://playground.babylonjs.com/#QYBWV4#77 * @param p defines the middle point * @param segA defines one point of the segment * @param segB defines the other point of the segment * @returns the shortest distance */ static DistanceOfPointFromSegment(e, t, i) { const r = at.DistanceSquared(t, i); if (r === 0) return at.Distance(e, t); const s = i.subtract(t), n = Math.max(0, Math.min(1, at.Dot(e.subtract(t), s) / r)), a = t.add(s.multiplyByFloats(n, n)); return at.Distance(e, a); } } at._ZeroReadOnly = at.Zero(); class D { /** Gets or sets the x coordinate */ get x() { return this._x; } set x(e) { this._x = e, this._isDirty = !0; } /** Gets or sets the y coordinate */ get y() { return this._y; } set y(e) { this._y = e, this._isDirty = !0; } /** Gets or sets the z coordinate */ get z() { return this._z; } set z(e) { this._z = e, this._isDirty = !0; } /** * Creates a new Vector3 object from the given x, y, z (floats) coordinates. * @param x defines the first coordinates (on X axis) * @param y defines the second coordinates (on Y axis) * @param z defines the third coordinates (on Z axis) */ constructor(e = 0, t = 0, i = 0) { this._isDirty = !0, this._x = e, this._y = t, this._z = i; } /** * Creates a string representation of the Vector3 * Example Playground https://playground.babylonjs.com/#R1F8YU#67 * @returns a string with the Vector3 coordinates. */ toString() { return `{X: ${this._x} Y: ${this._y} Z: ${this._z}}`; } /** * Gets the class name * @returns the string "Vector3" */ getClassName() { return "Vector3"; } /** * Creates the Vector3 hash code * @returns a number which tends to be unique between Vector3 instances */ getHashCode() { const e = W_(this._x), t = W_(this._y), i = W_(this._z); let r = e; return r = r * 397 ^ t, r = r * 397 ^ i, r; } // Operators /** * Creates an array containing three elements : the coordinates of the Vector3 * Example Playground https://playground.babylonjs.com/#R1F8YU#10 * @returns a new array of numbers */ asArray() { const e = []; return this.toArray(e, 0), e; } /** * Populates the given array or Float32Array from the given index with the successive coordinates of the Vector3 * Example Playground https://playground.babylonjs.com/#R1F8YU#65 * @param array defines the destination array * @param index defines the offset in the destination array * @returns the current Vector3 */ toArray(e, t = 0) { return e[t] = this._x, e[t + 1] = this._y, e[t + 2] = this._z, this; } /** * Update the current vector from an array * Example Playground https://playground.babylonjs.com/#R1F8YU#24 * @param array defines the destination array * @param index defines the offset in the destination array * @returns the current Vector3 */ fromArray(e, t = 0) { return D.FromArrayToRef(e, t, this), this; } /** * Converts the current Vector3 into a quaternion (considering that the Vector3 contains Euler angles representation of a rotation) * Example Playground https://playground.babylonjs.com/#R1F8YU#66 * @returns a new Quaternion object, computed from the Vector3 coordinates */ toQuaternion() { return Ze.RotationYawPitchRoll(this._y, this._x, this._z); } /** * Adds the given vector to the current Vector3 * Example Playground https://playground.babylonjs.com/#R1F8YU#4 * @param otherVector defines the second operand * @returns the current updated Vector3 */ addInPlace(e) { return this.addInPlaceFromFloats(e._x, e._y, e._z); } /** * Adds the given coordinates to the current Vector3 * Example Playground https://playground.babylonjs.com/#R1F8YU#5 * @param x defines the x coordinate of the operand * @param y defines the y coordinate of the operand * @param z defines the z coordinate of the operand * @returns the current updated Vector3 */ addInPlaceFromFloats(e, t, i) { return this._x += e, this._y += t, this._z += i, this._isDirty = !0, this; } /** * Gets a new Vector3, result of the addition the current Vector3 and the given vector * Example Playground https://playground.babylonjs.com/#R1F8YU#3 * @param otherVector defines the second operand * @returns the resulting Vector3 */ add(e) { return new this.constructor(this._x + e._x, this._y + e._y, this._z + e._z); } /** * Adds the current Vector3 to the given one and stores the result in the vector "result" * Example Playground https://playground.babylonjs.com/#R1F8YU#6 * @param otherVector defines the second operand * @param result defines the Vector3 object where to store the result * @returns the result */ addToRef(e, t) { return t.copyFromFloats(this._x + e._x, this._y + e._y, this._z + e._z); } /** * Subtract the given vector from the current Vector3 * Example Playground https://playground.babylonjs.com/#R1F8YU#61 * @param otherVector defines the second operand * @returns the current updated Vector3 */ subtractInPlace(e) { return this._x -= e._x, this._y -= e._y, this._z -= e._z, this._isDirty = !0, this; } /** * Returns a new Vector3, result of the subtraction of the given vector from the current Vector3 * Example Playground https://playground.babylonjs.com/#R1F8YU#60 * @param otherVector defines the second operand * @returns the resulting Vector3 */ subtract(e) { return new this.constructor(this._x - e._x, this._y - e._y, this._z - e._z); } /** * Subtracts the given vector from the current Vector3 and stores the result in the vector "result". * Example Playground https://playground.babylonjs.com/#R1F8YU#63 * @param otherVector defines the second operand * @param result defines the Vector3 object where to store the result * @returns the result */ subtractToRef(e, t) { return this.subtractFromFloatsToRef(e._x, e._y, e._z, t); } /** * Returns a new Vector3 set with the subtraction of the given floats from the current Vector3 coordinates * Example Playground https://playground.babylonjs.com/#R1F8YU#62 * @param x defines the x coordinate of the operand * @param y defines the y coordinate of the operand * @param z defines the z coordinate of the operand * @returns the resulting Vector3 */ subtractFromFloats(e, t, i) { return new this.constructor(this._x - e, this._y - t, this._z - i); } /** * Subtracts the given floats from the current Vector3 coordinates and set the given vector "result" with this result * Example Playground https://playground.babylonjs.com/#R1F8YU#64 * @param x defines the x coordinate of the operand * @param y defines the y coordinate of the operand * @param z defines the z coordinate of the operand * @param result defines the Vector3 object where to store the result * @returns the result */ subtractFromFloatsToRef(e, t, i, r) { return r.copyFromFloats(this._x - e, this._y - t, this._z - i); } /** * Gets a new Vector3 set with the current Vector3 negated coordinates * Example Playground https://playground.babylonjs.com/#R1F8YU#35 * @returns a new Vector3 */ negate() { return new this.constructor(-this._x, -this._y, -this._z); } /** * Negate this vector in place * Example Playground https://playground.babylonjs.com/#R1F8YU#36 * @returns this */ negateInPlace() { return this._x *= -1, this._y *= -1, this._z *= -1, this._isDirty = !0, this; } /** * Negate the current Vector3 and stores the result in the given vector "result" coordinates * Example Playground https://playground.babylonjs.com/#R1F8YU#37 * @param result defines the Vector3 object where to store the result * @returns the result */ negateToRef(e) { return e.copyFromFloats(this._x * -1, this._y * -1, this._z * -1); } /** * Multiplies the Vector3 coordinates by the float "scale" * Example Playground https://playground.babylonjs.com/#R1F8YU#56 * @param scale defines the multiplier factor * @returns the current updated Vector3 */ scaleInPlace(e) { return this._x *= e, this._y *= e, this._z *= e, this._isDirty = !0, this; } /** * Returns a new Vector3 set with the current Vector3 coordinates multiplied by the float "scale" * Example Playground https://playground.babylonjs.com/#R1F8YU#53 * @param scale defines the multiplier factor * @returns a new Vector3 */ scale(e) { return new this.constructor(this._x * e, this._y * e, this._z * e); } /** * Multiplies the current Vector3 coordinates by the float "scale" and stores the result in the given vector "result" coordinates * Example Playground https://playground.babylonjs.com/#R1F8YU#57 * @param scale defines the multiplier factor * @param result defines the Vector3 object where to store the result * @returns the result */ scaleToRef(e, t) { return t.copyFromFloats(this._x * e, this._y * e, this._z * e); } /** * Creates a vector normal (perpendicular) to the current Vector3 and stores the result in the given vector * Out of the infinite possibilities the normal chosen is the one formed by rotating the current vector * 90 degrees about an axis which lies perpendicular to the current vector * and its projection on the xz plane. In the case of a current vector in the xz plane * the normal is calculated to be along the y axis. * Example Playground https://playground.babylonjs.com/#R1F8YU#230 * Example Playground https://playground.babylonjs.com/#R1F8YU#231 * @param result defines the Vector3 object where to store the resultant normal * returns the result */ getNormalToRef(e) { const t = this.length(); let i = Math.acos(this.y / t); const r = Math.atan2(this.z, this.x); i > Math.PI / 2 ? i -= Math.PI / 2 : i += Math.PI / 2; const s = t * Math.sin(i) * Math.cos(r), n = t * Math.cos(i), a = t * Math.sin(i) * Math.sin(r); return e.set(s, n, a), e; } /** * Rotates the vector using the given unit quaternion and stores the new vector in result * Example Playground https://playground.babylonjs.com/#R1F8YU#9 * @param q the unit quaternion representing the rotation * @param result the output vector * @returns the result */ applyRotationQuaternionToRef(e, t) { const i = this._x, r = this._y, s = this._z, n = e._x, a = e._y, l = e._z, o = e._w, u = 2 * (a * s - l * r), h = 2 * (l * i - n * s), d = 2 * (n * r - a * i); return t._x = i + o * u + a * d - l * h, t._y = r + o * h + l * u - n * d, t._z = s + o * d + n * h - a * u, t._isDirty = !0, t; } /** * Rotates the vector in place using the given unit quaternion * Example Playground https://playground.babylonjs.com/#R1F8YU#8 * @param q the unit quaternion representing the rotation * @returns the current updated Vector3 */ applyRotationQuaternionInPlace(e) { return this.applyRotationQuaternionToRef(e, this); } /** * Rotates the vector using the given unit quaternion and returns the new vector * Example Playground https://playground.babylonjs.com/#R1F8YU#7 * @param q the unit quaternion representing the rotation * @returns a new Vector3 */ applyRotationQuaternion(e) { return this.applyRotationQuaternionToRef(e, new this.constructor()); } /** * Scale the current Vector3 values by a factor and add the result to a given Vector3 * Example Playground https://playground.babylonjs.com/#R1F8YU#55 * @param scale defines the scale factor * @param result defines the Vector3 object where to store the result * @returns result input */ scaleAndAddToRef(e, t) { return t.addInPlaceFromFloats(this._x * e, this._y * e, this._z * e); } /** * Projects the current point Vector3 to a plane along a ray starting from a specified origin and passing through the current point Vector3. * Example Playground https://playground.babylonjs.com/#R1F8YU#48 * @param plane defines the plane to project to * @param origin defines the origin of the projection ray * @returns the projected vector3 */ projectOnPlane(e, t) { const i = new this.constructor(); return this.projectOnPlaneToRef(e, t, i), i; } /** * Projects the current point Vector3 to a plane along a ray starting from a specified origin and passing through the current point Vector3. * Example Playground https://playground.babylonjs.com/#R1F8YU#49 * @param plane defines the plane to project to * @param origin defines the origin of the projection ray * @param result defines the Vector3 where to store the result * @returns result input */ projectOnPlaneToRef(e, t, i) { const r = e.normal, s = e.d, n = Yi.Vector3[0]; this.subtractToRef(t, n), n.normalize(); const a = D.Dot(n, r); if (Math.abs(a) < 1e-10) i.setAll(1 / 0); else { const l = -(D.Dot(t, r) + s) / a, o = n.scaleInPlace(l); t.addToRef(o, i); } return i; } /** * Returns true if the current Vector3 and the given vector coordinates are strictly equal * Example Playground https://playground.babylonjs.com/#R1F8YU#19 * @param otherVector defines the second operand * @returns true if both vectors are equals */ equals(e) { return e && this._x === e._x && this._y === e._y && this._z === e._z; } /** * Returns true if the current Vector3 and the given vector coordinates are distant less than epsilon * Example Playground https://playground.babylonjs.com/#R1F8YU#21 * @param otherVector defines the second operand * @param epsilon defines the minimal distance to define values as equals * @returns true if both vectors are distant less than epsilon */ equalsWithEpsilon(e, t = Sr) { return e && yt.WithinEpsilon(this._x, e._x, t) && yt.WithinEpsilon(this._y, e._y, t) && yt.WithinEpsilon(this._z, e._z, t); } /** * Returns true if the current Vector3 coordinates equals the given floats * Example Playground https://playground.babylonjs.com/#R1F8YU#20 * @param x defines the x coordinate of the operand * @param y defines the y coordinate of the operand * @param z defines the z coordinate of the operand * @returns true if both vectors are equal */ equalsToFloats(e, t, i) { return this._x === e && this._y === t && this._z === i; } /** * Multiplies the current Vector3 coordinates by the given ones * Example Playground https://playground.babylonjs.com/#R1F8YU#32 * @param otherVector defines the second operand * @returns the current updated Vector3 */ multiplyInPlace(e) { return this._x *= e._x, this._y *= e._y, this._z *= e._z, this._isDirty = !0, this; } /** * Returns a new Vector3, result of the multiplication of the current Vector3 by the given vector * Example Playground https://playground.babylonjs.com/#R1F8YU#31 * @param otherVector defines the second operand * @returns the new Vector3 */ multiply(e) { return this.multiplyByFloats(e._x, e._y, e._z); } /** * Multiplies the current Vector3 by the given one and stores the result in the given vector "result" * Example Playground https://playground.babylonjs.com/#R1F8YU#33 * @param otherVector defines the second operand * @param result defines the Vector3 object where to store the result * @returns the result */ multiplyToRef(e, t) { return t.copyFromFloats(this._x * e._x, this._y * e._y, this._z * e._z); } /** * Returns a new Vector3 set with the result of the multiplication of the current Vector3 coordinates by the given floats * Example Playground https://playground.babylonjs.com/#R1F8YU#34 * @param x defines the x coordinate of the operand * @param y defines the y coordinate of the operand * @param z defines the z coordinate of the operand * @returns the new Vector3 */ multiplyByFloats(e, t, i) { return new this.constructor(this._x * e, this._y * t, this._z * i); } /** * Returns a new Vector3 set with the result of the division of the current Vector3 coordinates by the given ones * Example Playground https://playground.babylonjs.com/#R1F8YU#16 * @param otherVector defines the second operand * @returns the new Vector3 */ divide(e) { return new this.constructor(this._x / e._x, this._y / e._y, this._z / e._z); } /** * Divides the current Vector3 coordinates by the given ones and stores the result in the given vector "result" * Example Playground https://playground.babylonjs.com/#R1F8YU#18 * @param otherVector defines the second operand * @param result defines the Vector3 object where to store the result * @returns the result */ divideToRef(e, t) { return t.copyFromFloats(this._x / e._x, this._y / e._y, this._z / e._z); } /** * Divides the current Vector3 coordinates by the given ones. * Example Playground https://playground.babylonjs.com/#R1F8YU#17 * @param otherVector defines the second operand * @returns the current updated Vector3 */ divideInPlace(e) { return this.divideToRef(e, this); } /** * Updates the current Vector3 with the minimal coordinate values between its and the given vector ones * Example Playground https://playground.babylonjs.com/#R1F8YU#29 * @param other defines the second operand * @returns the current updated Vector3 */ minimizeInPlace(e) { return this.minimizeInPlaceFromFloats(e._x, e._y, e._z); } /** * Updates the current Vector3 with the maximal coordinate values between its and the given vector ones. * Example Playground https://playground.babylonjs.com/#R1F8YU#27 * @param other defines the second operand * @returns the current updated Vector3 */ maximizeInPlace(e) { return this.maximizeInPlaceFromFloats(e._x, e._y, e._z); } /** * Updates the current Vector3 with the minimal coordinate values between its and the given coordinates * Example Playground https://playground.babylonjs.com/#R1F8YU#30 * @param x defines the x coordinate of the operand * @param y defines the y coordinate of the operand * @param z defines the z coordinate of the operand * @returns the current updated Vector3 */ minimizeInPlaceFromFloats(e, t, i) { return e < this._x && (this.x = e), t < this._y && (this.y = t), i < this._z && (this.z = i), this; } /** * Updates the current Vector3 with the maximal coordinate values between its and the given coordinates. * Example Playground https://playground.babylonjs.com/#R1F8YU#28 * @param x defines the x coordinate of the operand * @param y defines the y coordinate of the operand * @param z defines the z coordinate of the operand * @returns the current updated Vector3 */ maximizeInPlaceFromFloats(e, t, i) { return e > this._x && (this.x = e), t > this._y && (this.y = t), i > this._z && (this.z = i), this; } /** * Due to float precision, scale of a mesh could be uniform but float values are off by a small fraction * Check if is non uniform within a certain amount of decimal places to account for this * @param epsilon the amount the values can differ * @returns if the vector is non uniform to a certain number of decimal places */ isNonUniformWithinEpsilon(e) { const t = Math.abs(this._x), i = Math.abs(this._y); if (!yt.WithinEpsilon(t, i, e)) return !0; const r = Math.abs(this._z); return !yt.WithinEpsilon(t, r, e) || !yt.WithinEpsilon(i, r, e); } /** * Gets a boolean indicating that the vector is non uniform meaning x, y or z are not all the same */ get isNonUniform() { const e = Math.abs(this._x), t = Math.abs(this._y); if (e !== t) return !0; const i = Math.abs(this._z); return e !== i; } /** * Gets a new Vector3 from current Vector3 floored values * Example Playground https://playground.babylonjs.com/#R1F8YU#22 * @returns a new Vector3 */ floor() { return new this.constructor(Math.floor(this._x), Math.floor(this._y), Math.floor(this._z)); } /** * Gets a new Vector3 from current Vector3 fractional values * Example Playground https://playground.babylonjs.com/#R1F8YU#23 * @returns a new Vector3 */ fract() { return new this.constructor(this._x - Math.floor(this._x), this._y - Math.floor(this._y), this._z - Math.floor(this._z)); } // Properties /** * Gets the length of the Vector3 * Example Playground https://playground.babylonjs.com/#R1F8YU#25 * @returns the length of the Vector3 */ length() { return Math.sqrt(this._x * this._x + this._y * this._y + this._z * this._z); } /** * Gets the squared length of the Vector3 * Example Playground https://playground.babylonjs.com/#R1F8YU#26 * @returns squared length of the Vector3 */ lengthSquared() { return this._x * this._x + this._y * this._y + this._z * this._z; } /** * Gets a boolean indicating if the vector contains a zero in one of its components * Example Playground https://playground.babylonjs.com/#R1F8YU#1 */ get hasAZeroComponent() { return this._x * this._y * this._z === 0; } /** * Normalize the current Vector3. * Please note that this is an in place operation. * Example Playground https://playground.babylonjs.com/#R1F8YU#122 * @returns the current updated Vector3 */ normalize() { return this.normalizeFromLength(this.length()); } /** * Reorders the x y z properties of the vector in place * Example Playground https://playground.babylonjs.com/#R1F8YU#44 * @param order new ordering of the properties (eg. for vector 1,2,3 with "ZYX" will produce 3,2,1) * @returns the current updated vector */ reorderInPlace(e) { if (e = e.toLowerCase(), e === "xyz") return this; const t = Yi.Vector3[0].copyFrom(this); return this.x = t[e[0]], this.y = t[e[1]], this.z = t[e[2]], this; } /** * Rotates the vector around 0,0,0 by a quaternion * Example Playground https://playground.babylonjs.com/#R1F8YU#47 * @param quaternion the rotation quaternion * @param result vector to store the result * @returns the resulting vector */ rotateByQuaternionToRef(e, t) { return e.toRotationMatrix(Yi.Matrix[0]), D.TransformCoordinatesToRef(this, Yi.Matrix[0], t), t; } /** * Rotates a vector around a given point * Example Playground https://playground.babylonjs.com/#R1F8YU#46 * @param quaternion the rotation quaternion * @param point the point to rotate around * @param result vector to store the result * @returns the resulting vector */ rotateByQuaternionAroundPointToRef(e, t, i) { return this.subtractToRef(t, Yi.Vector3[0]), Yi.Vector3[0].rotateByQuaternionToRef(e, Yi.Vector3[0]), t.addToRef(Yi.Vector3[0], i), i; } /** * Returns a new Vector3 as the cross product of the current vector and the "other" one * The cross product is then orthogonal to both current and "other" * Example Playground https://playground.babylonjs.com/#R1F8YU#14 * @param other defines the right operand * @returns the cross product */ cross(e) { const t = new this.constructor(); return D.CrossToRef(this, e, t); } /** * Normalize the current Vector3 with the given input length. * Please note that this is an in place operation. * Example Playground https://playground.babylonjs.com/#R1F8YU#123 * @param len the length of the vector * @returns the current updated Vector3 */ normalizeFromLength(e) { return e === 0 || e === 1 ? this : this.scaleInPlace(1 / e); } /** * Normalize the current Vector3 to a new vector * Example Playground https://playground.babylonjs.com/#R1F8YU#124 * @returns the new Vector3 */ normalizeToNew() { const e = new this.constructor(0, 0, 0); return this.normalizeToRef(e), e; } /** * Normalize the current Vector3 to the reference * Example Playground https://playground.babylonjs.com/#R1F8YU#125 * @param reference define the Vector3 to update * @returns the updated Vector3 */ normalizeToRef(e) { const t = this.length(); return t === 0 || t === 1 ? e.copyFromFloats(this._x, this._y, this._z) : this.scaleToRef(1 / t, e); } /** * Creates a new Vector3 copied from the current Vector3 * Example Playground https://playground.babylonjs.com/#R1F8YU#11 * @returns the new Vector3 */ clone() { return new this.constructor(this._x, this._y, this._z); } /** * Copies the given vector coordinates to the current Vector3 ones * Example Playground https://playground.babylonjs.com/#R1F8YU#12 * @param source defines the source Vector3 * @returns the current updated Vector3 */ copyFrom(e) { return this.copyFromFloats(e._x, e._y, e._z); } /** * Copies the given floats to the current Vector3 coordinates * Example Playground https://playground.babylonjs.com/#R1F8YU#13 * @param x defines the x coordinate of the operand * @param y defines the y coordinate of the operand * @param z defines the z coordinate of the operand * @returns the current updated Vector3 */ copyFromFloats(e, t, i) { return this._x = e, this._y = t, this._z = i, this._isDirty = !0, this; } /** * Copies the given floats to the current Vector3 coordinates * Example Playground https://playground.babylonjs.com/#R1F8YU#58 * @param x defines the x coordinate of the operand * @param y defines the y coordinate of the operand * @param z defines the z coordinate of the operand * @returns the current updated Vector3 */ set(e, t, i) { return this.copyFromFloats(e, t, i); } /** * Copies the given float to the current Vector3 coordinates * Example Playground https://playground.babylonjs.com/#R1F8YU#59 * @param v defines the x, y and z coordinates of the operand * @returns the current updated Vector3 */ setAll(e) { return this._x = this._y = this._z = e, this._isDirty = !0, this; } // Statics /** * Get the clip factor between two vectors * Example Playground https://playground.babylonjs.com/#R1F8YU#126 * @param vector0 defines the first operand * @param vector1 defines the second operand * @param axis defines the axis to use * @param size defines the size along the axis * @returns the clip factor */ static GetClipFactor(e, t, i, r) { const s = D.Dot(e, i), n = D.Dot(t, i); return (s - r) / (s - n); } /** * Get angle between two vectors * Example Playground https://playground.babylonjs.com/#R1F8YU#86 * @param vector0 the starting point * @param vector1 the ending point * @param normal direction of the normal * @returns the angle between vector0 and vector1 */ static GetAngleBetweenVectors(e, t, i) { const r = e.normalizeToRef(Yi.Vector3[1]), s = t.normalizeToRef(Yi.Vector3[2]); let n = D.Dot(r, s); n = yt.Clamp(n, -1, 1); const a = Math.acos(n), l = Yi.Vector3[3]; return D.CrossToRef(r, s, l), D.Dot(l, i) > 0 ? isNaN(a) ? 0 : a : isNaN(a) ? -Math.PI : -Math.acos(n); } /** * Get angle between two vectors projected on a plane * Example Playground https://playground.babylonjs.com/#R1F8YU#87 * Expectation compute time: 0.01 ms (median) and 0.02 ms (percentile 95%) * @param vector0 angle between vector0 and vector1 * @param vector1 angle between vector0 and vector1 * @param normal Normal of the projection plane * @returns the angle in radians (float) between vector0 and vector1 projected on the plane with the specified normal */ static GetAngleBetweenVectorsOnPlane(e, t, i) { Yi.Vector3[0].copyFrom(e); const r = Yi.Vector3[0]; Yi.Vector3[1].copyFrom(t); const s = Yi.Vector3[1]; Yi.Vector3[2].copyFrom(i); const n = Yi.Vector3[2], a = Yi.Vector3[3], l = Yi.Vector3[4]; r.normalize(), s.normalize(), n.normalize(), D.CrossToRef(n, r, a), D.CrossToRef(a, n, l); const o = Math.atan2(D.Dot(s, a), D.Dot(s, l)); return yt.NormalizeRadians(o); } /** * Gets the rotation that aligns the roll axis (Y) to the line joining the start point to the target point and stores it in the ref Vector3 * Example PG https://playground.babylonjs.com/#R1F8YU#189 * @param start the starting point * @param target the target point * @param ref the vector3 to store the result * @returns ref in the form (pitch, yaw, 0) */ static PitchYawRollToMoveBetweenPointsToRef(e, t, i) { const r = de.Vector3[0]; return t.subtractToRef(e, r), i._y = Math.atan2(r.x, r.z) || 0, i._x = Math.atan2(Math.sqrt(r.x ** 2 + r.z ** 2), r.y) || 0, i._z = 0, i._isDirty = !0, i; } /** * Gets the rotation that aligns the roll axis (Y) to the line joining the start point to the target point * Example PG https://playground.babylonjs.com/#R1F8YU#188 * @param start the starting point * @param target the target point * @returns the rotation in the form (pitch, yaw, 0) */ static PitchYawRollToMoveBetweenPoints(e, t) { const i = D.Zero(); return D.PitchYawRollToMoveBetweenPointsToRef(e, t, i); } /** * Slerp between two vectors. See also `SmoothToRef` * Slerp is a spherical linear interpolation * giving a slow in and out effect * Example Playground 1 https://playground.babylonjs.com/#R1F8YU#108 * Example Playground 2 https://playground.babylonjs.com/#R1F8YU#109 * @param vector0 Start vector * @param vector1 End vector * @param slerp amount (will be clamped between 0 and 1) * @param result The slerped vector */ static SlerpToRef(e, t, i, r) { i = yt.Clamp(i, 0, 1); const s = Yi.Vector3[0], n = Yi.Vector3[1]; s.copyFrom(e); const a = s.length(); s.normalizeFromLength(a), n.copyFrom(t); const l = n.length(); n.normalizeFromLength(l); const o = D.Dot(s, n); let u, h; if (o < 1 - Sr) { const d = Math.acos(o), f = 1 / Math.sin(d); u = Math.sin((1 - i) * d) * f, h = Math.sin(i * d) * f; } else u = 1 - i, h = i; return s.scaleInPlace(u), n.scaleInPlace(h), r.copyFrom(s).addInPlace(n), r.scaleInPlace(yt.Lerp(a, l, i)), r; } /** * Smooth interpolation between two vectors using Slerp * Example Playground https://playground.babylonjs.com/#R1F8YU#110 * @param source source vector * @param goal goal vector * @param deltaTime current interpolation frame * @param lerpTime total interpolation time * @param result the smoothed vector */ static SmoothToRef(e, t, i, r, s) { return D.SlerpToRef(e, t, r === 0 ? 1 : i / r, s), s; } /** * Returns a new Vector3 set from the index "offset" of the given array * Example Playground https://playground.babylonjs.com/#R1F8YU#83 * @param array defines the source array * @param offset defines the offset in the source array * @returns the new Vector3 */ static FromArray(e, t = 0) { return new D(e[t], e[t + 1], e[t + 2]); } /** * Returns a new Vector3 set from the index "offset" of the given Float32Array * @param array defines the source array * @param offset defines the offset in the source array * @returns the new Vector3 * @deprecated Please use FromArray instead. */ static FromFloatArray(e, t) { return D.FromArray(e, t); } /** * Sets the given vector "result" with the element values from the index "offset" of the given array * Example Playground https://playground.babylonjs.com/#R1F8YU#84 * @param array defines the source array * @param offset defines the offset in the source array * @param result defines the Vector3 where to store the result * @returns result input */ static FromArrayToRef(e, t, i) { return i._x = e[t], i._y = e[t + 1], i._z = e[t + 2], i._isDirty = !0, i; } /** * Sets the given vector "result" with the element values from the index "offset" of the given Float32Array * @param array defines the source array * @param offset defines the offset in the source array * @param result defines the Vector3 where to store the result * @deprecated Please use FromArrayToRef instead. */ static FromFloatArrayToRef(e, t, i) { return D.FromArrayToRef(e, t, i); } /** * Sets the given vector "result" with the given floats. * Example Playground https://playground.babylonjs.com/#R1F8YU#85 * @param x defines the x coordinate of the source * @param y defines the y coordinate of the source * @param z defines the z coordinate of the source * @param result defines the Vector3 where to store the result */ static FromFloatsToRef(e, t, i, r) { return r.copyFromFloats(e, t, i), r; } /** * Returns a new Vector3 set to (0.0, 0.0, 0.0) * @returns a new empty Vector3 */ static Zero() { return new D(0, 0, 0); } /** * Returns a new Vector3 set to (1.0, 1.0, 1.0) * @returns a new Vector3 */ static One() { return new D(1, 1, 1); } /** * Returns a new Vector3 set to (0.0, 1.0, 0.0) * Example Playground https://playground.babylonjs.com/#R1F8YU#71 * @returns a new up Vector3 */ static Up() { return new D(0, 1, 0); } /** * Gets an up Vector3 that must not be updated */ static get UpReadOnly() { return D._UpReadOnly; } /** * Gets a down Vector3 that must not be updated */ static get DownReadOnly() { return D._DownReadOnly; } /** * Gets a right Vector3 that must not be updated */ static get RightReadOnly() { return D._RightReadOnly; } /** * Gets a left Vector3 that must not be updated */ static get LeftReadOnly() { return D._LeftReadOnly; } /** * Gets a forward Vector3 that must not be updated */ static get LeftHandedForwardReadOnly() { return D._LeftHandedForwardReadOnly; } /** * Gets a forward Vector3 that must not be updated */ static get RightHandedForwardReadOnly() { return D._RightHandedForwardReadOnly; } /** * Gets a backward Vector3 that must not be updated */ static get LeftHandedBackwardReadOnly() { return D._LeftHandedBackwardReadOnly; } /** * Gets a backward Vector3 that must not be updated */ static get RightHandedBackwardReadOnly() { return D._RightHandedBackwardReadOnly; } /** * Gets a zero Vector3 that must not be updated */ static get ZeroReadOnly() { return D._ZeroReadOnly; } /** * Gets a one Vector3 that must not be updated */ static get OneReadOnly() { return D._OneReadOnly; } /** * Returns a new Vector3 set to (0.0, -1.0, 0.0) * Example Playground https://playground.babylonjs.com/#R1F8YU#71 * @returns a new down Vector3 */ static Down() { return new D(0, -1, 0); } /** * Returns a new Vector3 set to (0.0, 0.0, 1.0) * Example Playground https://playground.babylonjs.com/#R1F8YU#71 * @param rightHandedSystem is the scene right-handed (negative z) * @returns a new forward Vector3 */ static Forward(e = !1) { return new D(0, 0, e ? -1 : 1); } /** * Returns a new Vector3 set to (0.0, 0.0, -1.0) * Example Playground https://playground.babylonjs.com/#R1F8YU#71 * @param rightHandedSystem is the scene right-handed (negative-z) * @returns a new Backward Vector3 */ static Backward(e = !1) { return new D(0, 0, e ? 1 : -1); } /** * Returns a new Vector3 set to (1.0, 0.0, 0.0) * Example Playground https://playground.babylonjs.com/#R1F8YU#71 * @returns a new right Vector3 */ static Right() { return new D(1, 0, 0); } /** * Returns a new Vector3 set to (-1.0, 0.0, 0.0) * Example Playground https://playground.babylonjs.com/#R1F8YU#71 * @returns a new left Vector3 */ static Left() { return new D(-1, 0, 0); } /** * Returns a new Vector3 with random values between min and max * @param min the minimum random value * @param max the maximum random value * @returns a Vector3 with random values between min and max */ static Random(e = 0, t = 1) { return new D(yt.RandomRange(e, t), yt.RandomRange(e, t), yt.RandomRange(e, t)); } /** * Returns a new Vector3 set with the result of the transformation by the given matrix of the given vector. * This method computes transformed coordinates only, not transformed direction vectors (ie. it takes translation in account) * Example Playground https://playground.babylonjs.com/#R1F8YU#111 * @param vector defines the Vector3 to transform * @param transformation defines the transformation matrix * @returns the transformed Vector3 */ static TransformCoordinates(e, t) { const i = D.Zero(); return D.TransformCoordinatesToRef(e, t, i), i; } /** * Sets the given vector "result" coordinates with the result of the transformation by the given matrix of the given vector * This method computes transformed coordinates only, not transformed direction vectors (ie. it takes translation in account) * Example Playground https://playground.babylonjs.com/#R1F8YU#113 * @param vector defines the Vector3 to transform * @param transformation defines the transformation matrix * @param result defines the Vector3 where to store the result * @returns result input */ static TransformCoordinatesToRef(e, t, i) { return D.TransformCoordinatesFromFloatsToRef(e._x, e._y, e._z, t, i), i; } /** * Sets the given vector "result" coordinates with the result of the transformation by the given matrix of the given floats (x, y, z) * This method computes transformed coordinates only, not transformed direction vectors * Example Playground https://playground.babylonjs.com/#R1F8YU#115 * @param x define the x coordinate of the source vector * @param y define the y coordinate of the source vector * @param z define the z coordinate of the source vector * @param transformation defines the transformation matrix * @param result defines the Vector3 where to store the result * @returns result input */ static TransformCoordinatesFromFloatsToRef(e, t, i, r, s) { const n = r.m, a = e * n[0] + t * n[4] + i * n[8] + n[12], l = e * n[1] + t * n[5] + i * n[9] + n[13], o = e * n[2] + t * n[6] + i * n[10] + n[14], u = 1 / (e * n[3] + t * n[7] + i * n[11] + n[15]); return s._x = a * u, s._y = l * u, s._z = o * u, s._isDirty = !0, s; } /** * Returns a new Vector3 set with the result of the normal transformation by the given matrix of the given vector * This methods computes transformed normalized direction vectors only (ie. it does not apply translation) * Example Playground https://playground.babylonjs.com/#R1F8YU#112 * @param vector defines the Vector3 to transform * @param transformation defines the transformation matrix * @returns the new Vector3 */ static TransformNormal(e, t) { const i = D.Zero(); return D.TransformNormalToRef(e, t, i), i; } /** * Sets the given vector "result" with the result of the normal transformation by the given matrix of the given vector * This methods computes transformed normalized direction vectors only (ie. it does not apply translation) * Example Playground https://playground.babylonjs.com/#R1F8YU#114 * @param vector defines the Vector3 to transform * @param transformation defines the transformation matrix * @param result defines the Vector3 where to store the result * @returns result input */ static TransformNormalToRef(e, t, i) { return this.TransformNormalFromFloatsToRef(e._x, e._y, e._z, t, i), i; } /** * Sets the given vector "result" with the result of the normal transformation by the given matrix of the given floats (x, y, z) * This methods computes transformed normalized direction vectors only (ie. it does not apply translation) * Example Playground https://playground.babylonjs.com/#R1F8YU#116 * @param x define the x coordinate of the source vector * @param y define the y coordinate of the source vector * @param z define the z coordinate of the source vector * @param transformation defines the transformation matrix * @param result defines the Vector3 where to store the result * @returns result input */ static TransformNormalFromFloatsToRef(e, t, i, r, s) { const n = r.m; return s._x = e * n[0] + t * n[4] + i * n[8], s._y = e * n[1] + t * n[5] + i * n[9], s._z = e * n[2] + t * n[6] + i * n[10], s._isDirty = !0, s; } /** * Returns a new Vector3 located for "amount" on the CatmullRom interpolation spline defined by the vectors "value1", "value2", "value3", "value4" * Example Playground https://playground.babylonjs.com/#R1F8YU#69 * @param value1 defines the first control point * @param value2 defines the second control point * @param value3 defines the third control point * @param value4 defines the fourth control point * @param amount defines the amount on the spline to use * @returns the new Vector3 */ static CatmullRom(e, t, i, r, s) { const n = s * s, a = s * n, l = 0.5 * (2 * t._x + (-e._x + i._x) * s + (2 * e._x - 5 * t._x + 4 * i._x - r._x) * n + (-e._x + 3 * t._x - 3 * i._x + r._x) * a), o = 0.5 * (2 * t._y + (-e._y + i._y) * s + (2 * e._y - 5 * t._y + 4 * i._y - r._y) * n + (-e._y + 3 * t._y - 3 * i._y + r._y) * a), u = 0.5 * (2 * t._z + (-e._z + i._z) * s + (2 * e._z - 5 * t._z + 4 * i._z - r._z) * n + (-e._z + 3 * t._z - 3 * i._z + r._z) * a); return new e.constructor(l, o, u); } /** * Returns a new Vector3 set with the coordinates of "value", if the vector "value" is in the cube defined by the vectors "min" and "max" * If a coordinate value of "value" is lower than one of the "min" coordinate, then this "value" coordinate is set with the "min" one * If a coordinate value of "value" is greater than one of the "max" coordinate, then this "value" coordinate is set with the "max" one * Example Playground https://playground.babylonjs.com/#R1F8YU#76 * @param value defines the current value * @param min defines the lower range value * @param max defines the upper range value * @returns the new Vector3 */ static Clamp(e, t, i) { const r = new e.constructor(); return D.ClampToRef(e, t, i, r), r; } /** * Sets the given vector "result" with the coordinates of "value", if the vector "value" is in the cube defined by the vectors "min" and "max" * If a coordinate value of "value" is lower than one of the "min" coordinate, then this "value" coordinate is set with the "min" one * If a coordinate value of "value" is greater than one of the "max" coordinate, then this "value" coordinate is set with the "max" one * Example Playground https://playground.babylonjs.com/#R1F8YU#77 * @param value defines the current value * @param min defines the lower range value * @param max defines the upper range value * @param result defines the Vector3 where to store the result * @returns result input */ static ClampToRef(e, t, i, r) { let s = e._x; s = s > i._x ? i._x : s, s = s < t._x ? t._x : s; let n = e._y; n = n > i._y ? i._y : n, n = n < t._y ? t._y : n; let a = e._z; return a = a > i._z ? i._z : a, a = a < t._z ? t._z : a, r.copyFromFloats(s, n, a), r; } /** * Checks if a given vector is inside a specific range * Example Playground https://playground.babylonjs.com/#R1F8YU#75 * @param v defines the vector to test * @param min defines the minimum range * @param max defines the maximum range */ static CheckExtends(e, t, i) { t.minimizeInPlace(e), i.maximizeInPlace(e); } /** * Returns a new Vector3 located for "amount" (float) on the Hermite interpolation spline defined by the vectors "value1", "tangent1", "value2", "tangent2" * Example Playground https://playground.babylonjs.com/#R1F8YU#89 * @param value1 defines the first control point * @param tangent1 defines the first tangent vector * @param value2 defines the second control point * @param tangent2 defines the second tangent vector * @param amount defines the amount on the interpolation spline (between 0 and 1) * @returns the new Vector3 */ static Hermite(e, t, i, r, s) { const n = s * s, a = s * n, l = 2 * a - 3 * n + 1, o = -2 * a + 3 * n, u = a - 2 * n + s, h = a - n, d = e._x * l + i._x * o + t._x * u + r._x * h, f = e._y * l + i._y * o + t._y * u + r._y * h, p = e._z * l + i._z * o + t._z * u + r._z * h; return new e.constructor(d, f, p); } /** * Returns a new Vector3 which is the 1st derivative of the Hermite spline defined by the vectors "value1", "value2", "tangent1", "tangent2". * Example Playground https://playground.babylonjs.com/#R1F8YU#90 * @param value1 defines the first control point * @param tangent1 defines the first tangent * @param value2 defines the second control point * @param tangent2 defines the second tangent * @param time define where the derivative must be done * @returns 1st derivative */ static Hermite1stDerivative(e, t, i, r, s) { const n = new e.constructor(); return this.Hermite1stDerivativeToRef(e, t, i, r, s, n), n; } /** * Update a Vector3 with the 1st derivative of the Hermite spline defined by the vectors "value1", "value2", "tangent1", "tangent2". * Example Playground https://playground.babylonjs.com/#R1F8YU#91 * @param value1 defines the first control point * @param tangent1 defines the first tangent * @param value2 defines the second control point * @param tangent2 defines the second tangent * @param time define where the derivative must be done * @param result define where to store the derivative * @returns result input */ static Hermite1stDerivativeToRef(e, t, i, r, s, n) { const a = s * s; return n._x = (a - s) * 6 * e._x + (3 * a - 4 * s + 1) * t._x + (-a + s) * 6 * i._x + (3 * a - 2 * s) * r._x, n._y = (a - s) * 6 * e._y + (3 * a - 4 * s + 1) * t._y + (-a + s) * 6 * i._y + (3 * a - 2 * s) * r._y, n._z = (a - s) * 6 * e._z + (3 * a - 4 * s + 1) * t._z + (-a + s) * 6 * i._z + (3 * a - 2 * s) * r._z, n._isDirty = !0, n; } /** * Returns a new Vector3 located for "amount" (float) on the linear interpolation between the vectors "start" and "end" * Example Playground https://playground.babylonjs.com/#R1F8YU#95 * @param start defines the start value * @param end defines the end value * @param amount max defines amount between both (between 0 and 1) * @returns the new Vector3 */ static Lerp(e, t, i) { const r = new e.constructor(0, 0, 0); return D.LerpToRef(e, t, i, r), r; } /** * Sets the given vector "result" with the result of the linear interpolation from the vector "start" for "amount" to the vector "end" * Example Playground https://playground.babylonjs.com/#R1F8YU#93 * @param start defines the start value * @param end defines the end value * @param amount max defines amount between both (between 0 and 1) * @param result defines the Vector3 where to store the result * @returns result input */ static LerpToRef(e, t, i, r) { return r._x = e._x + (t._x - e._x) * i, r._y = e._y + (t._y - e._y) * i, r._z = e._z + (t._z - e._z) * i, r._isDirty = !0, r; } /** * Returns the dot product (float) between the vectors "left" and "right" * Example Playground https://playground.babylonjs.com/#R1F8YU#82 * @param left defines the left operand * @param right defines the right operand * @returns the dot product */ static Dot(e, t) { return e._x * t._x + e._y * t._y + e._z * t._z; } /** * Returns the dot product (float) between the current vectors and "otherVector" * @param otherVector defines the right operand * @returns the dot product */ dot(e) { return this._x * e._x + this._y * e._y + this._z * e._z; } /** * Returns a new Vector3 as the cross product of the vectors "left" and "right" * The cross product is then orthogonal to both "left" and "right" * Example Playground https://playground.babylonjs.com/#R1F8YU#15 * @param left defines the left operand * @param right defines the right operand * @returns the cross product */ static Cross(e, t) { const i = new e.constructor(); return D.CrossToRef(e, t, i), i; } /** * Sets the given vector "result" with the cross product of "left" and "right" * The cross product is then orthogonal to both "left" and "right" * Example Playground https://playground.babylonjs.com/#R1F8YU#78 * @param left defines the left operand * @param right defines the right operand * @param result defines the Vector3 where to store the result * @returns result input */ static CrossToRef(e, t, i) { const r = e._y * t._z - e._z * t._y, s = e._z * t._x - e._x * t._z, n = e._x * t._y - e._y * t._x; return i.copyFromFloats(r, s, n), i; } /** * Returns a new Vector3 as the normalization of the given vector * Example Playground https://playground.babylonjs.com/#R1F8YU#98 * @param vector defines the Vector3 to normalize * @returns the new Vector3 */ static Normalize(e) { const t = D.Zero(); return D.NormalizeToRef(e, t), t; } /** * Sets the given vector "result" with the normalization of the given first vector * Example Playground https://playground.babylonjs.com/#R1F8YU#98 * @param vector defines the Vector3 to normalize * @param result defines the Vector3 where to store the result * @returns result input */ static NormalizeToRef(e, t) { return e.normalizeToRef(t), t; } /** * Project a Vector3 onto screen space * Example Playground https://playground.babylonjs.com/#R1F8YU#101 * @param vector defines the Vector3 to project * @param world defines the world matrix to use * @param transform defines the transform (view x projection) matrix to use * @param viewport defines the screen viewport to use * @returns the new Vector3 */ static Project(e, t, i, r) { const s = new e.constructor(); return D.ProjectToRef(e, t, i, r, s), s; } /** * Project a Vector3 onto screen space to reference * Example Playground https://playground.babylonjs.com/#R1F8YU#102 * @param vector defines the Vector3 to project * @param world defines the world matrix to use * @param transform defines the transform (view x projection) matrix to use * @param viewport defines the screen viewport to use * @param result the vector in which the screen space will be stored * @returns result input */ static ProjectToRef(e, t, i, r, s) { const n = r.width, a = r.height, l = r.x, o = r.y, u = Yi.Matrix[1]; Ae.FromValuesToRef(n / 2, 0, 0, 0, 0, -a / 2, 0, 0, 0, 0, 0.5, 0, l + n / 2, a / 2 + o, 0.5, 1, u); const h = Yi.Matrix[0]; return t.multiplyToRef(i, h), h.multiplyToRef(u, h), D.TransformCoordinatesToRef(e, h, s), s; } /** * Reflects a vector off the plane defined by a normalized normal * @param inDirection defines the vector direction * @param normal defines the normal - Must be normalized * @returns the resulting vector */ static Reflect(e, t) { return this.ReflectToRef(e, t, new D()); } /** * Reflects a vector off the plane defined by a normalized normal to reference * @param inDirection defines the vector direction * @param normal defines the normal - Must be normalized * @param result defines the Vector3 where to store the result * @returns the resulting vector */ static ReflectToRef(e, t, i) { const r = de.Vector3[0]; return r.copyFrom(t).scaleInPlace(2 * D.Dot(e, t)), i.copyFrom(e).subtractInPlace(r); } /** * @internal */ static _UnprojectFromInvertedMatrixToRef(e, t, i) { D.TransformCoordinatesToRef(e, t, i); const r = t.m, s = e._x * r[3] + e._y * r[7] + e._z * r[11] + r[15]; return yt.WithinEpsilon(s, 1) && i.scaleInPlace(1 / s), i; } /** * Unproject from screen space to object space * Example Playground https://playground.babylonjs.com/#R1F8YU#121 * @param source defines the screen space Vector3 to use * @param viewportWidth defines the current width of the viewport * @param viewportHeight defines the current height of the viewport * @param world defines the world matrix to use (can be set to Identity to go to world space) * @param transform defines the transform (view x projection) matrix to use * @returns the new Vector3 */ static UnprojectFromTransform(e, t, i, r, s) { return this.Unproject(e, t, i, r, s, Ae.IdentityReadOnly); } /** * Unproject from screen space to object space * Example Playground https://playground.babylonjs.com/#R1F8YU#117 * @param source defines the screen space Vector3 to use * @param viewportWidth defines the current width of the viewport * @param viewportHeight defines the current height of the viewport * @param world defines the world matrix to use (can be set to Identity to go to world space) * @param view defines the view matrix to use * @param projection defines the projection matrix to use * @returns the new Vector3 */ static Unproject(e, t, i, r, s, n) { const a = new e.constructor(); return D.UnprojectToRef(e, t, i, r, s, n, a), a; } /** * Unproject from screen space to object space * Example Playground https://playground.babylonjs.com/#R1F8YU#119 * @param source defines the screen space Vector3 to use * @param viewportWidth defines the current width of the viewport * @param viewportHeight defines the current height of the viewport * @param world defines the world matrix to use (can be set to Identity to go to world space) * @param view defines the view matrix to use * @param projection defines the projection matrix to use * @param result defines the Vector3 where to store the result * @returns result input */ static UnprojectToRef(e, t, i, r, s, n, a) { return D.UnprojectFloatsToRef(e._x, e._y, e._z, t, i, r, s, n, a), a; } /** * Unproject from screen space to object space * Example Playground https://playground.babylonjs.com/#R1F8YU#120 * @param sourceX defines the screen space x coordinate to use * @param sourceY defines the screen space y coordinate to use * @param sourceZ defines the screen space z coordinate to use * @param viewportWidth defines the current width of the viewport * @param viewportHeight defines the current height of the viewport * @param world defines the world matrix to use (can be set to Identity to go to world space) * @param view defines the view matrix to use * @param projection defines the projection matrix to use * @param result defines the Vector3 where to store the result * @returns result input */ static UnprojectFloatsToRef(e, t, i, r, s, n, a, l, o) { var u; const h = Yi.Matrix[0]; n.multiplyToRef(a, h), h.multiplyToRef(l, h), h.invert(); const d = Yi.Vector3[0]; return d.x = e / r * 2 - 1, d.y = -(t / s * 2 - 1), !((u = gi.LastCreatedEngine) === null || u === void 0) && u.isNDCHalfZRange ? d.z = i : d.z = 2 * i - 1, D._UnprojectFromInvertedMatrixToRef(d, h, o), o; } /** * Gets the minimal coordinate values between two Vector3 * Example Playground https://playground.babylonjs.com/#R1F8YU#97 * @param left defines the first operand * @param right defines the second operand * @returns the new Vector3 */ static Minimize(e, t) { const i = new e.constructor(); return i.copyFrom(e), i.minimizeInPlace(t), i; } /** * Gets the maximal coordinate values between two Vector3 * Example Playground https://playground.babylonjs.com/#R1F8YU#96 * @param left defines the first operand * @param right defines the second operand * @returns the new Vector3 */ static Maximize(e, t) { const i = new e.constructor(); return i.copyFrom(e), i.maximizeInPlace(t), i; } /** * Returns the distance between the vectors "value1" and "value2" * Example Playground https://playground.babylonjs.com/#R1F8YU#81 * @param value1 defines the first operand * @param value2 defines the second operand * @returns the distance */ static Distance(e, t) { return Math.sqrt(D.DistanceSquared(e, t)); } /** * Returns the squared distance between the vectors "value1" and "value2" * Example Playground https://playground.babylonjs.com/#R1F8YU#80 * @param value1 defines the first operand * @param value2 defines the second operand * @returns the squared distance */ static DistanceSquared(e, t) { const i = e._x - t._x, r = e._y - t._y, s = e._z - t._z; return i * i + r * r + s * s; } /** * Projects "vector" on the triangle determined by its extremities "p0", "p1" and "p2", stores the result in "ref" * and returns the distance to the projected point. * Example Playground https://playground.babylonjs.com/#R1F8YU#104 * From http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.104.4264&rep=rep1&type=pdf * * @param vector the vector to get distance from * @param p0 extremity of the triangle * @param p1 extremity of the triangle * @param p2 extremity of the triangle * @param ref variable to store the result to * @returns The distance between "ref" and "vector" */ static ProjectOnTriangleToRef(e, t, i, r, s) { const n = Yi.Vector3[0], a = Yi.Vector3[1], l = Yi.Vector3[2], o = Yi.Vector3[3], u = Yi.Vector3[4]; i.subtractToRef(t, n), r.subtractToRef(t, a), r.subtractToRef(i, l); const h = n.length(), d = a.length(), f = l.length(); if (h < Sr || d < Sr || f < Sr) return s.copyFrom(t), D.Distance(e, t); e.subtractToRef(t, u), D.CrossToRef(n, a, o); const p = o.length(); if (p < Sr) return s.copyFrom(t), D.Distance(e, t); o.normalizeFromLength(p); let m = u.length(); if (m < Sr) return s.copyFrom(t), 0; u.normalizeFromLength(m); const _ = D.Dot(o, u), v = Yi.Vector3[5], C = Yi.Vector3[6]; v.copyFrom(o).scaleInPlace(-m * _), C.copyFrom(e).addInPlace(v); const x = Yi.Vector3[4], b = Yi.Vector3[5], S = Yi.Vector3[7], M = Yi.Vector3[8]; x.copyFrom(n).scaleInPlace(1 / h), M.copyFrom(a).scaleInPlace(1 / d), x.addInPlace(M).scaleInPlace(-1), b.copyFrom(n).scaleInPlace(-1 / h), M.copyFrom(l).scaleInPlace(1 / f), b.addInPlace(M).scaleInPlace(-1), S.copyFrom(l).scaleInPlace(-1 / f), M.copyFrom(a).scaleInPlace(-1 / d), S.addInPlace(M).scaleInPlace(-1); const R = Yi.Vector3[9]; let w; R.copyFrom(C).subtractInPlace(t), D.CrossToRef(x, R, M), w = D.Dot(M, o); const V = w; R.copyFrom(C).subtractInPlace(i), D.CrossToRef(b, R, M), w = D.Dot(M, o); const k = w; R.copyFrom(C).subtractInPlace(r), D.CrossToRef(S, R, M), w = D.Dot(M, o); const L = w, B = Yi.Vector3[10]; let U, K; V > 0 && k < 0 ? (B.copyFrom(n), U = t, K = i) : k > 0 && L < 0 ? (B.copyFrom(l), U = i, K = r) : (B.copyFrom(a).scaleInPlace(-1), U = r, K = t); const ee = Yi.Vector3[9], Z = Yi.Vector3[4]; if (U.subtractToRef(C, M), K.subtractToRef(C, ee), D.CrossToRef(M, ee, Z), !(D.Dot(Z, o) < 0)) return s.copyFrom(C), Math.abs(m * _); const le = Yi.Vector3[5]; D.CrossToRef(B, Z, le), le.normalize(); const ie = Yi.Vector3[9]; ie.copyFrom(U).subtractInPlace(C); const $ = ie.length(); if ($ < Sr) return s.copyFrom(U), D.Distance(e, U); ie.normalizeFromLength($); const j = D.Dot(le, ie), J = Yi.Vector3[7]; J.copyFrom(C).addInPlace(le.scaleInPlace($ * j)), M.copyFrom(J).subtractInPlace(U), m = B.length(), B.normalizeFromLength(m); let ne = D.Dot(M, B) / Math.max(m, Sr); return ne = yt.Clamp(ne, 0, 1), J.copyFrom(U).addInPlace(B.scaleInPlace(ne * m)), s.copyFrom(J), D.Distance(e, J); } /** * Returns a new Vector3 located at the center between "value1" and "value2" * Example Playground https://playground.babylonjs.com/#R1F8YU#72 * @param value1 defines the first operand * @param value2 defines the second operand * @returns the new Vector3 */ static Center(e, t) { return D.CenterToRef(e, t, D.Zero()); } /** * Gets the center of the vectors "value1" and "value2" and stores the result in the vector "ref" * Example Playground https://playground.babylonjs.com/#R1F8YU#73 * @param value1 defines first vector * @param value2 defines second vector * @param ref defines third vector * @returns ref */ static CenterToRef(e, t, i) { return i.copyFromFloats((e._x + t._x) / 2, (e._y + t._y) / 2, (e._z + t._z) / 2); } /** * Given three orthogonal normalized left-handed oriented Vector3 axis in space (target system), * RotationFromAxis() returns the rotation Euler angles (ex : rotation.x, rotation.y, rotation.z) to apply * to something in order to rotate it from its local system to the given target system * Note: axis1, axis2 and axis3 are normalized during this operation * Example Playground https://playground.babylonjs.com/#R1F8YU#106 * @param axis1 defines the first axis * @param axis2 defines the second axis * @param axis3 defines the third axis * @returns a new Vector3 * @see https://doc.babylonjs.com/features/featuresDeepDive/mesh/transforms/center_origin/target_align */ static RotationFromAxis(e, t, i) { const r = new e.constructor(); return D.RotationFromAxisToRef(e, t, i, r), r; } /** * The same than RotationFromAxis but updates the given ref Vector3 parameter instead of returning a new Vector3 * Example Playground https://playground.babylonjs.com/#R1F8YU#107 * @param axis1 defines the first axis * @param axis2 defines the second axis * @param axis3 defines the third axis * @param ref defines the Vector3 where to store the result * @returns result input */ static RotationFromAxisToRef(e, t, i, r) { const s = Yi.Quaternion[0]; return Ze.RotationQuaternionFromAxisToRef(e, t, i, s), s.toEulerAnglesToRef(r), r; } } D._UpReadOnly = D.Up(); D._DownReadOnly = D.Down(); D._LeftHandedForwardReadOnly = D.Forward(!1); D._RightHandedForwardReadOnly = D.Forward(!0); D._LeftHandedBackwardReadOnly = D.Backward(!1); D._RightHandedBackwardReadOnly = D.Backward(!0); D._RightReadOnly = D.Right(); D._LeftReadOnly = D.Left(); D._ZeroReadOnly = D.Zero(); D._OneReadOnly = D.One(); class Di { /** * Creates a Vector4 object from the given floats. * @param x x value of the vector * @param y y value of the vector * @param z z value of the vector * @param w w value of the vector */ constructor(e = 0, t = 0, i = 0, r = 0) { this.x = e, this.y = t, this.z = i, this.w = r; } /** * Returns the string with the Vector4 coordinates. * @returns a string containing all the vector values */ toString() { return `{X: ${this.x} Y: ${this.y} Z: ${this.z} W: ${this.w}}`; } /** * Returns the string "Vector4". * @returns "Vector4" */ getClassName() { return "Vector4"; } /** * Returns the Vector4 hash code. * @returns a unique hash code */ getHashCode() { const e = W_(this.x), t = W_(this.y), i = W_(this.z), r = W_(this.w); let s = e; return s = s * 397 ^ t, s = s * 397 ^ i, s = s * 397 ^ r, s; } // Operators /** * Returns a new array populated with 4 elements : the Vector4 coordinates. * @returns the resulting array */ asArray() { const e = []; return this.toArray(e, 0), e; } /** * Populates the given array from the given index with the Vector4 coordinates. * @param array array to populate * @param index index of the array to start at (default: 0) * @returns the Vector4. */ toArray(e, t) { return t === void 0 && (t = 0), e[t] = this.x, e[t + 1] = this.y, e[t + 2] = this.z, e[t + 3] = this.w, this; } /** * Update the current vector from an array * @param array defines the destination array * @param index defines the offset in the destination array * @returns the current Vector3 */ fromArray(e, t = 0) { return Di.FromArrayToRef(e, t, this), this; } /** * Adds the given vector to the current Vector4. * @param otherVector the vector to add * @returns the updated Vector4. */ addInPlace(e) { return this.x += e.x, this.y += e.y, this.z += e.z, this.w += e.w, this; } /** * Returns a new Vector4 as the result of the addition of the current Vector4 and the given one. * @param otherVector the vector to add * @returns the resulting vector */ add(e) { return new this.constructor(this.x + e.x, this.y + e.y, this.z + e.z, this.w + e.w); } /** * Updates the given vector "result" with the result of the addition of the current Vector4 and the given one. * @param otherVector the vector to add * @param result the vector to store the result * @returns result input */ addToRef(e, t) { return t.x = this.x + e.x, t.y = this.y + e.y, t.z = this.z + e.z, t.w = this.w + e.w, t; } /** * Subtract in place the given vector from the current Vector4. * @param otherVector the vector to subtract * @returns the updated Vector4. */ subtractInPlace(e) { return this.x -= e.x, this.y -= e.y, this.z -= e.z, this.w -= e.w, this; } /** * Returns a new Vector4 with the result of the subtraction of the given vector from the current Vector4. * @param otherVector the vector to add * @returns the new vector with the result */ subtract(e) { return new this.constructor(this.x - e.x, this.y - e.y, this.z - e.z, this.w - e.w); } /** * Sets the given vector "result" with the result of the subtraction of the given vector from the current Vector4. * @param otherVector the vector to subtract * @param result the vector to store the result * @returns result input */ subtractToRef(e, t) { return t.x = this.x - e.x, t.y = this.y - e.y, t.z = this.z - e.z, t.w = this.w - e.w, t; } /** * Returns a new Vector4 set with the result of the subtraction of the given floats from the current Vector4 coordinates. */ /** * Returns a new Vector4 set with the result of the subtraction of the given floats from the current Vector4 coordinates. * @param x value to subtract * @param y value to subtract * @param z value to subtract * @param w value to subtract * @returns new vector containing the result */ subtractFromFloats(e, t, i, r) { return new this.constructor(this.x - e, this.y - t, this.z - i, this.w - r); } /** * Sets the given vector "result" set with the result of the subtraction of the given floats from the current Vector4 coordinates. * @param x value to subtract * @param y value to subtract * @param z value to subtract * @param w value to subtract * @param result the vector to store the result in * @returns result input */ subtractFromFloatsToRef(e, t, i, r, s) { return s.x = this.x - e, s.y = this.y - t, s.z = this.z - i, s.w = this.w - r, s; } /** * Returns a new Vector4 set with the current Vector4 negated coordinates. * @returns a new vector with the negated values */ negate() { return new this.constructor(-this.x, -this.y, -this.z, -this.w); } /** * Negate this vector in place * @returns this */ negateInPlace() { return this.x *= -1, this.y *= -1, this.z *= -1, this.w *= -1, this; } /** * Negate the current Vector4 and stores the result in the given vector "result" coordinates * @param result defines the Vector3 object where to store the result * @returns the result */ negateToRef(e) { return e.copyFromFloats(this.x * -1, this.y * -1, this.z * -1, this.w * -1); } /** * Multiplies the current Vector4 coordinates by scale (float). * @param scale the number to scale with * @returns the updated Vector4. */ scaleInPlace(e) { return this.x *= e, this.y *= e, this.z *= e, this.w *= e, this; } /** * Returns a new Vector4 set with the current Vector4 coordinates multiplied by scale (float). * @param scale the number to scale with * @returns a new vector with the result */ scale(e) { return new this.constructor(this.x * e, this.y * e, this.z * e, this.w * e); } /** * Sets the given vector "result" with the current Vector4 coordinates multiplied by scale (float). * @param scale the number to scale with * @param result a vector to store the result in * @returns result input */ scaleToRef(e, t) { return t.x = this.x * e, t.y = this.y * e, t.z = this.z * e, t.w = this.w * e, t; } /** * Scale the current Vector4 values by a factor and add the result to a given Vector4 * @param scale defines the scale factor * @param result defines the Vector4 object where to store the result * @returns result input */ scaleAndAddToRef(e, t) { return t.x += this.x * e, t.y += this.y * e, t.z += this.z * e, t.w += this.w * e, t; } /** * Boolean : True if the current Vector4 coordinates are stricly equal to the given ones. * @param otherVector the vector to compare against * @returns true if they are equal */ equals(e) { return e && this.x === e.x && this.y === e.y && this.z === e.z && this.w === e.w; } /** * Boolean : True if the current Vector4 coordinates are each beneath the distance "epsilon" from the given vector ones. * @param otherVector vector to compare against * @param epsilon (Default: very small number) * @returns true if they are equal */ equalsWithEpsilon(e, t = Sr) { return e && yt.WithinEpsilon(this.x, e.x, t) && yt.WithinEpsilon(this.y, e.y, t) && yt.WithinEpsilon(this.z, e.z, t) && yt.WithinEpsilon(this.w, e.w, t); } /** * Boolean : True if the given floats are strictly equal to the current Vector4 coordinates. * @param x x value to compare against * @param y y value to compare against * @param z z value to compare against * @param w w value to compare against * @returns true if equal */ equalsToFloats(e, t, i, r) { return this.x === e && this.y === t && this.z === i && this.w === r; } /** * Multiplies in place the current Vector4 by the given one. * @param otherVector vector to multiple with * @returns the updated Vector4. */ multiplyInPlace(e) { return this.x *= e.x, this.y *= e.y, this.z *= e.z, this.w *= e.w, this; } /** * Returns a new Vector4 set with the multiplication result of the current Vector4 and the given one. * @param otherVector vector to multiple with * @returns resulting new vector */ multiply(e) { return new this.constructor(this.x * e.x, this.y * e.y, this.z * e.z, this.w * e.w); } /** * Updates the given vector "result" with the multiplication result of the current Vector4 and the given one. * @param otherVector vector to multiple with * @param result vector to store the result * @returns result input */ multiplyToRef(e, t) { return t.x = this.x * e.x, t.y = this.y * e.y, t.z = this.z * e.z, t.w = this.w * e.w, t; } /** * Returns a new Vector4 set with the multiplication result of the given floats and the current Vector4 coordinates. * @param x x value multiply with * @param y y value multiply with * @param z z value multiply with * @param w w value multiply with * @returns resulting new vector */ multiplyByFloats(e, t, i, r) { return new this.constructor(this.x * e, this.y * t, this.z * i, this.w * r); } /** * Returns a new Vector4 set with the division result of the current Vector4 by the given one. * @param otherVector vector to devide with * @returns resulting new vector */ divide(e) { return new this.constructor(this.x / e.x, this.y / e.y, this.z / e.z, this.w / e.w); } /** * Updates the given vector "result" with the division result of the current Vector4 by the given one. * @param otherVector vector to devide with * @param result vector to store the result * @returns result input */ divideToRef(e, t) { return t.x = this.x / e.x, t.y = this.y / e.y, t.z = this.z / e.z, t.w = this.w / e.w, t; } /** * Divides the current Vector3 coordinates by the given ones. * @param otherVector vector to devide with * @returns the updated Vector3. */ divideInPlace(e) { return this.divideToRef(e, this); } /** * Updates the Vector4 coordinates with the minimum values between its own and the given vector ones * @param other defines the second operand * @returns the current updated Vector4 */ minimizeInPlace(e) { return e.x < this.x && (this.x = e.x), e.y < this.y && (this.y = e.y), e.z < this.z && (this.z = e.z), e.w < this.w && (this.w = e.w), this; } /** * Updates the Vector4 coordinates with the maximum values between its own and the given vector ones * @param other defines the second operand * @returns the current updated Vector4 */ maximizeInPlace(e) { return e.x > this.x && (this.x = e.x), e.y > this.y && (this.y = e.y), e.z > this.z && (this.z = e.z), e.w > this.w && (this.w = e.w), this; } /** * Gets a new Vector4 from current Vector4 floored values * @returns a new Vector4 */ floor() { return new this.constructor(Math.floor(this.x), Math.floor(this.y), Math.floor(this.z), Math.floor(this.w)); } /** * Gets a new Vector4 from current Vector4 fractional values * @returns a new Vector4 */ fract() { return new this.constructor(this.x - Math.floor(this.x), this.y - Math.floor(this.y), this.z - Math.floor(this.z), this.w - Math.floor(this.w)); } // Properties /** * Returns the Vector4 length (float). * @returns the length */ length() { return Math.sqrt(this.x * this.x + this.y * this.y + this.z * this.z + this.w * this.w); } /** * Returns the Vector4 squared length (float). * @returns the length squared */ lengthSquared() { return this.x * this.x + this.y * this.y + this.z * this.z + this.w * this.w; } // Methods /** * Normalizes in place the Vector4. * @returns the updated Vector4. */ normalize() { return this.normalizeFromLength(this.length()); } /** * Normalize the current Vector4 with the given input length. * Please note that this is an in place operation. * @param len the length of the vector * @returns the current updated Vector4 */ normalizeFromLength(e) { return e === 0 || e === 1 ? this : this.scaleInPlace(1 / e); } /** * Normalize the current Vector4 to a new vector * @returns the new Vector4 */ normalizeToNew() { const e = new this.constructor(0, 0, 0, 0); return this.normalizeToRef(e), e; } /** * Normalize the current Vector4 to the reference * @param reference define the Vector4 to update * @returns the updated Vector4 */ normalizeToRef(e) { const t = this.length(); return t === 0 || t === 1 ? e.copyFromFloats(this.x, this.y, this.z, this.w) : this.scaleToRef(1 / t, e); } /** * Returns a new Vector3 from the Vector4 (x, y, z) coordinates. * @returns this converted to a new vector3 */ toVector3() { return new D(this.x, this.y, this.z); } /** * Returns a new Vector4 copied from the current one. * @returns the new cloned vector */ clone() { return new this.constructor(this.x, this.y, this.z, this.w); } /** * Updates the current Vector4 with the given one coordinates. * @param source the source vector to copy from * @returns the updated Vector4. */ copyFrom(e) { return this.x = e.x, this.y = e.y, this.z = e.z, this.w = e.w, this; } /** * Updates the current Vector4 coordinates with the given floats. * @param x float to copy from * @param y float to copy from * @param z float to copy from * @param w float to copy from * @returns the updated Vector4. */ copyFromFloats(e, t, i, r) { return this.x = e, this.y = t, this.z = i, this.w = r, this; } /** * Updates the current Vector4 coordinates with the given floats. * @param x float to set from * @param y float to set from * @param z float to set from * @param w float to set from * @returns the updated Vector4. */ set(e, t, i, r) { return this.copyFromFloats(e, t, i, r); } /** * Copies the given float to the current Vector3 coordinates * @param v defines the x, y, z and w coordinates of the operand * @returns the current updated Vector3 */ setAll(e) { return this.x = this.y = this.z = this.w = e, this; } /** * Returns the dot product (float) between the current vectors and "otherVector" * @param otherVector defines the right operand * @returns the dot product */ dot(e) { return this.x * e.x + this.y * e.y + this.z * e.z + this.w * e.w; } // Statics /** * Returns a new Vector4 set from the starting index of the given array. * @param array the array to pull values from * @param offset the offset into the array to start at * @returns the new vector */ static FromArray(e, t) { return t || (t = 0), new Di(e[t], e[t + 1], e[t + 2], e[t + 3]); } /** * Updates the given vector "result" from the starting index of the given array. * @param array the array to pull values from * @param offset the offset into the array to start at * @param result the vector to store the result in * @returns result input */ static FromArrayToRef(e, t, i) { return i.x = e[t], i.y = e[t + 1], i.z = e[t + 2], i.w = e[t + 3], i; } /** * Updates the given vector "result" from the starting index of the given Float32Array. * @param array the array to pull values from * @param offset the offset into the array to start at * @param result the vector to store the result in * @returns result input */ static FromFloatArrayToRef(e, t, i) { return Di.FromArrayToRef(e, t, i), i; } /** * Updates the given vector "result" coordinates from the given floats. * @param x float to set from * @param y float to set from * @param z float to set from * @param w float to set from * @param result the vector to the floats in * @returns result input */ static FromFloatsToRef(e, t, i, r, s) { return s.x = e, s.y = t, s.z = i, s.w = r, s; } /** * Returns a new Vector4 set to (0.0, 0.0, 0.0, 0.0) * @returns the new vector */ static Zero() { return new Di(0, 0, 0, 0); } /** * Returns a new Vector4 set to (1.0, 1.0, 1.0, 1.0) * @returns the new vector */ static One() { return new Di(1, 1, 1, 1); } /** * Returns a new Vector4 with random values between min and max * @param min the minimum random value * @param max the maximum random value * @returns a Vector4 with random values between min and max */ static Random(e = 0, t = 1) { return new Di(yt.RandomRange(e, t), yt.RandomRange(e, t), yt.RandomRange(e, t), yt.RandomRange(e, t)); } /** * Gets a zero Vector4 that must not be updated */ static get ZeroReadOnly() { return Di._ZeroReadOnly; } /** * Returns a new normalized Vector4 from the given one. * @param vector the vector to normalize * @returns the vector */ static Normalize(e) { const t = Di.Zero(); return Di.NormalizeToRef(e, t), t; } /** * Updates the given vector "result" from the normalization of the given one. * @param vector the vector to normalize * @param result the vector to store the result in * @returns result input */ static NormalizeToRef(e, t) { return e.normalizeToRef(t), t; } /** * Returns a vector with the minimum values from the left and right vectors * @param left left vector to minimize * @param right right vector to minimize * @returns a new vector with the minimum of the left and right vector values */ static Minimize(e, t) { const i = new e.constructor(); return i.copyFrom(e), i.minimizeInPlace(t), i; } /** * Returns a vector with the maximum values from the left and right vectors * @param left left vector to maximize * @param right right vector to maximize * @returns a new vector with the maximum of the left and right vector values */ static Maximize(e, t) { const i = new e.constructor(); return i.copyFrom(e), i.maximizeInPlace(t), i; } /** * Returns the distance (float) between the vectors "value1" and "value2". * @param value1 value to calulate the distance between * @param value2 value to calulate the distance between * @returns the distance between the two vectors */ static Distance(e, t) { return Math.sqrt(Di.DistanceSquared(e, t)); } /** * Returns the squared distance (float) between the vectors "value1" and "value2". * @param value1 value to calulate the distance between * @param value2 value to calulate the distance between * @returns the distance between the two vectors squared */ static DistanceSquared(e, t) { const i = e.x - t.x, r = e.y - t.y, s = e.z - t.z, n = e.w - t.w; return i * i + r * r + s * s + n * n; } /** * Returns a new Vector4 located at the center between the vectors "value1" and "value2". * @param value1 value to calulate the center between * @param value2 value to calulate the center between * @returns the center between the two vectors */ static Center(e, t) { return Di.CenterToRef(e, t, Di.Zero()); } /** * Gets the center of the vectors "value1" and "value2" and stores the result in the vector "ref" * @param value1 defines first vector * @param value2 defines second vector * @param ref defines third vector * @returns ref */ static CenterToRef(e, t, i) { return i.copyFromFloats((e.x + t.x) / 2, (e.y + t.y) / 2, (e.z + t.z) / 2, (e.w + t.w) / 2); } /** * Returns a new Vector4 set with the result of the transformation by the given matrix of the given vector. * This method computes tranformed coordinates only, not transformed direction vectors (ie. it takes translation in account) * The difference with Vector3.TransformCoordinates is that the w component is not used to divide the other coordinates but is returned in the w coordinate instead * @param vector defines the Vector3 to transform * @param transformation defines the transformation matrix * @returns the transformed Vector4 */ static TransformCoordinates(e, t) { const i = Di.Zero(); return Di.TransformCoordinatesToRef(e, t, i), i; } /** * Sets the given vector "result" coordinates with the result of the transformation by the given matrix of the given vector * This method computes tranformed coordinates only, not transformed direction vectors (ie. it takes translation in account) * The difference with Vector3.TransformCoordinatesToRef is that the w component is not used to divide the other coordinates but is returned in the w coordinate instead * @param vector defines the Vector3 to transform * @param transformation defines the transformation matrix * @param result defines the Vector4 where to store the result * @returns result input */ static TransformCoordinatesToRef(e, t, i) { return Di.TransformCoordinatesFromFloatsToRef(e._x, e._y, e._z, t, i), i; } /** * Sets the given vector "result" coordinates with the result of the transformation by the given matrix of the given floats (x, y, z) * This method computes tranformed coordinates only, not transformed direction vectors * The difference with Vector3.TransformCoordinatesFromFloatsToRef is that the w component is not used to divide the other coordinates but is returned in the w coordinate instead * @param x define the x coordinate of the source vector * @param y define the y coordinate of the source vector * @param z define the z coordinate of the source vector * @param transformation defines the transformation matrix * @param result defines the Vector4 where to store the result * @returns result input */ static TransformCoordinatesFromFloatsToRef(e, t, i, r, s) { const n = r.m, a = e * n[0] + t * n[4] + i * n[8] + n[12], l = e * n[1] + t * n[5] + i * n[9] + n[13], o = e * n[2] + t * n[6] + i * n[10] + n[14], u = e * n[3] + t * n[7] + i * n[11] + n[15]; return s.x = a, s.y = l, s.z = o, s.w = u, s; } /** * Returns a new Vector4 set with the result of the normal transformation by the given matrix of the given vector. * This methods computes transformed normalized direction vectors only. * @param vector the vector to transform * @param transformation the transformation matrix to apply * @returns the new vector */ static TransformNormal(e, t) { const i = new e.constructor(); return Di.TransformNormalToRef(e, t, i), i; } /** * Sets the given vector "result" with the result of the normal transformation by the given matrix of the given vector. * This methods computes transformed normalized direction vectors only. * @param vector the vector to transform * @param transformation the transformation matrix to apply * @param result the vector to store the result in * @returns result input */ static TransformNormalToRef(e, t, i) { const r = t.m, s = e.x * r[0] + e.y * r[4] + e.z * r[8], n = e.x * r[1] + e.y * r[5] + e.z * r[9], a = e.x * r[2] + e.y * r[6] + e.z * r[10]; return i.x = s, i.y = n, i.z = a, i.w = e.w, i; } /** * Sets the given vector "result" with the result of the normal transformation by the given matrix of the given floats (x, y, z, w). * This methods computes transformed normalized direction vectors only. * @param x value to transform * @param y value to transform * @param z value to transform * @param w value to transform * @param transformation the transformation matrix to apply * @param result the vector to store the results in * @returns result input */ static TransformNormalFromFloatsToRef(e, t, i, r, s, n) { const a = s.m; return n.x = e * a[0] + t * a[4] + i * a[8], n.y = e * a[1] + t * a[5] + i * a[9], n.z = e * a[2] + t * a[6] + i * a[10], n.w = r, n; } /** * Creates a new Vector4 from a Vector3 * @param source defines the source data * @param w defines the 4th component (default is 0) * @returns a new Vector4 */ static FromVector3(e, t = 0) { return new Di(e._x, e._y, e._z, t); } /** * Returns the dot product (float) between the vectors "left" and "right" * @param left defines the left operand * @param right defines the right operand * @returns the dot product */ static Dot(e, t) { return e.dot(t); } } Di._ZeroReadOnly = Di.Zero(); class Ze { /** Gets or sets the x coordinate */ get x() { return this._x; } set x(e) { this._x = e, this._isDirty = !0; } /** Gets or sets the y coordinate */ get y() { return this._y; } set y(e) { this._y = e, this._isDirty = !0; } /** Gets or sets the z coordinate */ get z() { return this._z; } set z(e) { this._z = e, this._isDirty = !0; } /** Gets or sets the w coordinate */ get w() { return this._w; } set w(e) { this._w = e, this._isDirty = !0; } /** * Creates a new Quaternion from the given floats * @param x defines the first component (0 by default) * @param y defines the second component (0 by default) * @param z defines the third component (0 by default) * @param w defines the fourth component (1.0 by default) */ constructor(e = 0, t = 0, i = 0, r = 1) { this._isDirty = !0, this._x = e, this._y = t, this._z = i, this._w = r; } /** * Gets a string representation for the current quaternion * @returns a string with the Quaternion coordinates */ toString() { return `{X: ${this._x} Y: ${this._y} Z: ${this._z} W: ${this._w}}`; } /** * Gets the class name of the quaternion * @returns the string "Quaternion" */ getClassName() { return "Quaternion"; } /** * Gets a hash code for this quaternion * @returns the quaternion hash code */ getHashCode() { const e = W_(this._x), t = W_(this._y), i = W_(this._z), r = W_(this._w); let s = e; return s = s * 397 ^ t, s = s * 397 ^ i, s = s * 397 ^ r, s; } /** * Copy the quaternion to an array * Example Playground https://playground.babylonjs.com/#L49EJ7#13 * @returns a new array populated with 4 elements from the quaternion coordinates */ asArray() { return [this._x, this._y, this._z, this._w]; } /** * Stores from the starting index in the given array the Quaternion successive values * Example Playground https://playground.babylonjs.com/#L49EJ7#59 * @param array defines the array where to store the x,y,z,w components * @param index defines an optional index in the target array to define where to start storing values * @returns the current Quaternion object */ toArray(e, t = 0) { return e[t] = this._x, e[t + 1] = this._y, e[t + 2] = this._z, e[t + 3] = this._w, this; } /** * Check if two quaternions are equals * Example Playground https://playground.babylonjs.com/#L49EJ7#38 * @param otherQuaternion defines the second operand * @returns true if the current quaternion and the given one coordinates are strictly equals */ equals(e) { return e && this._x === e._x && this._y === e._y && this._z === e._z && this._w === e._w; } /** * Gets a boolean if two quaternions are equals (using an epsilon value) * Example Playground https://playground.babylonjs.com/#L49EJ7#37 * @param otherQuaternion defines the other quaternion * @param epsilon defines the minimal distance to consider equality * @returns true if the given quaternion coordinates are close to the current ones by a distance of epsilon. */ equalsWithEpsilon(e, t = Sr) { return e && yt.WithinEpsilon(this._x, e._x, t) && yt.WithinEpsilon(this._y, e._y, t) && yt.WithinEpsilon(this._z, e._z, t) && yt.WithinEpsilon(this._w, e._w, t); } /** * Clone the current quaternion * Example Playground https://playground.babylonjs.com/#L49EJ7#12 * @returns a new quaternion copied from the current one */ clone() { return new this.constructor(this._x, this._y, this._z, this._w); } /** * Copy a quaternion to the current one * Example Playground https://playground.babylonjs.com/#L49EJ7#86 * @param other defines the other quaternion * @returns the updated current quaternion */ copyFrom(e) { return this._x = e._x, this._y = e._y, this._z = e._z, this._w = e._w, this._isDirty = !0, this; } /** * Updates the current quaternion with the given float coordinates * Example Playground https://playground.babylonjs.com/#L49EJ7#87 * @param x defines the x coordinate * @param y defines the y coordinate * @param z defines the z coordinate * @param w defines the w coordinate * @returns the updated current quaternion */ copyFromFloats(e, t, i, r) { return this._x = e, this._y = t, this._z = i, this._w = r, this._isDirty = !0, this; } /** * Updates the current quaternion from the given float coordinates * Example Playground https://playground.babylonjs.com/#L49EJ7#56 * @param x defines the x coordinate * @param y defines the y coordinate * @param z defines the z coordinate * @param w defines the w coordinate * @returns the updated current quaternion */ set(e, t, i, r) { return this.copyFromFloats(e, t, i, r); } /** * Adds two quaternions * Example Playground https://playground.babylonjs.com/#L49EJ7#10 * @param other defines the second operand * @returns a new quaternion as the addition result of the given one and the current quaternion */ add(e) { return new this.constructor(this._x + e._x, this._y + e._y, this._z + e._z, this._w + e._w); } /** * Add a quaternion to the current one * Example Playground https://playground.babylonjs.com/#L49EJ7#11 * @param other defines the quaternion to add * @returns the current quaternion */ addInPlace(e) { return this._x += e._x, this._y += e._y, this._z += e._z, this._w += e._w, this._isDirty = !0, this; } /** * Subtract two quaternions * Example Playground https://playground.babylonjs.com/#L49EJ7#57 * @param other defines the second operand * @returns a new quaternion as the subtraction result of the given one from the current one */ subtract(e) { return new this.constructor(this._x - e._x, this._y - e._y, this._z - e._z, this._w - e._w); } /** * Subtract a quaternion to the current one * Example Playground https://playground.babylonjs.com/#L49EJ7#58 * @param other defines the quaternion to subtract * @returns the current quaternion */ subtractInPlace(e) { return this._x -= e._x, this._y -= e._y, this._z -= e._z, this._w -= e._w, this._isDirty = !0, this; } /** * Multiplies the current quaternion by a scale factor * Example Playground https://playground.babylonjs.com/#L49EJ7#88 * @param value defines the scale factor * @returns a new quaternion set by multiplying the current quaternion coordinates by the float "scale" */ scale(e) { return new this.constructor(this._x * e, this._y * e, this._z * e, this._w * e); } /** * Scale the current quaternion values by a factor and stores the result to a given quaternion * Example Playground https://playground.babylonjs.com/#L49EJ7#89 * @param scale defines the scale factor * @param result defines the Quaternion object where to store the result * @returns result input */ scaleToRef(e, t) { return t._x = this._x * e, t._y = this._y * e, t._z = this._z * e, t._w = this._w * e, t._isDirty = !0, t; } /** * Multiplies in place the current quaternion by a scale factor * Example Playground https://playground.babylonjs.com/#L49EJ7#90 * @param value defines the scale factor * @returns the current modified quaternion */ scaleInPlace(e) { return this._x *= e, this._y *= e, this._z *= e, this._w *= e, this._isDirty = !0, this; } /** * Scale the current quaternion values by a factor and add the result to a given quaternion * Example Playground https://playground.babylonjs.com/#L49EJ7#91 * @param scale defines the scale factor * @param result defines the Quaternion object where to store the result * @returns result input */ scaleAndAddToRef(e, t) { return t._x += this._x * e, t._y += this._y * e, t._z += this._z * e, t._w += this._w * e, t._isDirty = !0, t; } /** * Multiplies two quaternions * Example Playground https://playground.babylonjs.com/#L49EJ7#43 * @param q1 defines the second operand * @returns a new quaternion set as the multiplication result of the current one with the given one "q1" */ multiply(e) { const t = new this.constructor(0, 0, 0, 1); return this.multiplyToRef(e, t), t; } /** * Sets the given "result" as the multiplication result of the current one with the given one "q1" * Example Playground https://playground.babylonjs.com/#L49EJ7#45 * @param q1 defines the second operand * @param result defines the target quaternion * @returns the current quaternion */ multiplyToRef(e, t) { const i = this._x * e._w + this._y * e._z - this._z * e._y + this._w * e._x, r = -this._x * e._z + this._y * e._w + this._z * e._x + this._w * e._y, s = this._x * e._y - this._y * e._x + this._z * e._w + this._w * e._z, n = -this._x * e._x - this._y * e._y - this._z * e._z + this._w * e._w; return t.copyFromFloats(i, r, s, n), t; } /** * Updates the current quaternion with the multiplication of itself with the given one "q1" * Example Playground https://playground.babylonjs.com/#L49EJ7#46 * @param q1 defines the second operand * @returns the currentupdated quaternion */ multiplyInPlace(e) { return this.multiplyToRef(e, this), this; } /** * Conjugates the current quaternion and stores the result in the given quaternion * Example Playground https://playground.babylonjs.com/#L49EJ7#81 * @param ref defines the target quaternion * @returns result input */ conjugateToRef(e) { return e.copyFromFloats(-this._x, -this._y, -this._z, this._w), e; } /** * Conjugates in place the current quaternion * Example Playground https://playground.babylonjs.com/#L49EJ7#82 * @returns the current updated quaternion */ conjugateInPlace() { return this._x *= -1, this._y *= -1, this._z *= -1, this._isDirty = !0, this; } /** * Conjugates (1-q) the current quaternion * Example Playground https://playground.babylonjs.com/#L49EJ7#83 * @returns a new quaternion */ conjugate() { return new this.constructor(-this._x, -this._y, -this._z, this._w); } /** * Returns the inverse of the current quaternion * Example Playground https://playground.babylonjs.com/#L49EJ7#84 * @returns a new quaternion */ invert() { const e = this.conjugate(), t = this.lengthSquared(); return t == 0 || t == 1 || e.scaleInPlace(1 / t), e; } /** * Invert in place the current quaternion * Example Playground https://playground.babylonjs.com/#L49EJ7#85 * @returns this quaternion */ invertInPlace() { this.conjugateInPlace(); const e = this.lengthSquared(); return e == 0 || e == 1 ? this : (this.scaleInPlace(1 / e), this); } /** * Gets squared length of current quaternion * Example Playground https://playground.babylonjs.com/#L49EJ7#29 * @returns the quaternion length (float) */ lengthSquared() { return this._x * this._x + this._y * this._y + this._z * this._z + this._w * this._w; } /** * Gets length of current quaternion * Example Playground https://playground.babylonjs.com/#L49EJ7#28 * @returns the quaternion length (float) */ length() { return Math.sqrt(this.lengthSquared()); } /** * Normalize in place the current quaternion * Example Playground https://playground.babylonjs.com/#L49EJ7#54 * @returns the current updated quaternion */ normalize() { return this.normalizeFromLength(this.length()); } /** * Normalize the current quaternion with the given input length. * Please note that this is an in place operation. * @param len the length of the quaternion * @returns the current updated Quaternion */ normalizeFromLength(e) { return e === 0 || e === 1 ? this : this.scaleInPlace(1 / e); } /** * Normalize a copy of the current quaternion * Example Playground https://playground.babylonjs.com/#L49EJ7#55 * @returns the normalized quaternion */ normalizeToNew() { const e = new this.constructor(0, 0, 0, 1); return this.normalizeToRef(e), e; } /** * Normalize the current Quaternion to the reference * @param reference define the Quaternion to update * @returns the updated Quaternion */ normalizeToRef(e) { const t = this.length(); return t === 0 || t === 1 ? e.copyFromFloats(this._x, this._y, this._z, this._w) : this.scaleToRef(1 / t, e); } /** * Returns a new Vector3 set with the Euler angles translated from the current quaternion * Example Playground https://playground.babylonjs.com/#L49EJ7#32 * @returns a new Vector3 containing the Euler angles * @see https://doc.babylonjs.com/features/featuresDeepDive/mesh/transforms/center_origin/rotation_conventions */ toEulerAngles() { const e = D.Zero(); return this.toEulerAnglesToRef(e), e; } /** * Sets the given vector3 "result" with the Euler angles translated from the current quaternion * Example Playground https://playground.babylonjs.com/#L49EJ7#31 * @param result defines the vector which will be filled with the Euler angles * @returns result input * @see https://doc.babylonjs.com/features/featuresDeepDive/mesh/transforms/center_origin/rotation_conventions */ toEulerAnglesToRef(e) { const t = this._z, i = this._x, r = this._y, s = this._w, n = r * t - i * s, a = 0.4999999; if (n < -a) e._y = 2 * Math.atan2(r, s), e._x = Math.PI / 2, e._z = 0, e._isDirty = !0; else if (n > a) e._y = 2 * Math.atan2(r, s), e._x = -Math.PI / 2, e._z = 0, e._isDirty = !0; else { const l = s * s, o = t * t, u = i * i, h = r * r; e._z = Math.atan2(2 * (i * r + t * s), -o - u + h + l), e._x = Math.asin(-2 * n), e._y = Math.atan2(2 * (t * i + r * s), o - u - h + l), e._isDirty = !0; } return e; } /** * Updates the given rotation matrix with the current quaternion values * Example Playground https://playground.babylonjs.com/#L49EJ7#67 * @param result defines the target matrix * @returns the updated matrix with the rotation */ toRotationMatrix(e) { return Ae.FromQuaternionToRef(this, e), e; } /** * Updates the current quaternion from the given rotation matrix values * Example Playground https://playground.babylonjs.com/#L49EJ7#41 * @param matrix defines the source matrix * @returns the current updated quaternion */ fromRotationMatrix(e) { return Ze.FromRotationMatrixToRef(e, this), this; } /** * Returns the dot product (float) between the current quaternions and "other" * @param other defines the right operand * @returns the dot product */ dot(e) { return this._x * e._x + this._y * e._y + this._z * e._z + this._w * e._w; } // Statics /** * Creates a new quaternion from a rotation matrix * Example Playground https://playground.babylonjs.com/#L49EJ7#101 * @param matrix defines the source matrix * @returns a new quaternion created from the given rotation matrix values */ static FromRotationMatrix(e) { const t = new Ze(); return Ze.FromRotationMatrixToRef(e, t), t; } /** * Updates the given quaternion with the given rotation matrix values * Example Playground https://playground.babylonjs.com/#L49EJ7#102 * @param matrix defines the source matrix * @param result defines the target quaternion * @returns result input */ static FromRotationMatrixToRef(e, t) { const i = e.m, r = i[0], s = i[4], n = i[8], a = i[1], l = i[5], o = i[9], u = i[2], h = i[6], d = i[10], f = r + l + d; let p; return f > 0 ? (p = 0.5 / Math.sqrt(f + 1), t._w = 0.25 / p, t._x = (h - o) * p, t._y = (n - u) * p, t._z = (a - s) * p, t._isDirty = !0) : r > l && r > d ? (p = 2 * Math.sqrt(1 + r - l - d), t._w = (h - o) / p, t._x = 0.25 * p, t._y = (s + a) / p, t._z = (n + u) / p, t._isDirty = !0) : l > d ? (p = 2 * Math.sqrt(1 + l - r - d), t._w = (n - u) / p, t._x = (s + a) / p, t._y = 0.25 * p, t._z = (o + h) / p, t._isDirty = !0) : (p = 2 * Math.sqrt(1 + d - r - l), t._w = (a - s) / p, t._x = (n + u) / p, t._y = (o + h) / p, t._z = 0.25 * p, t._isDirty = !0), t; } /** * Returns the dot product (float) between the quaternions "left" and "right" * Example Playground https://playground.babylonjs.com/#L49EJ7#61 * @param left defines the left operand * @param right defines the right operand * @returns the dot product */ static Dot(e, t) { return e._x * t._x + e._y * t._y + e._z * t._z + e._w * t._w; } /** * Checks if the orientations of two rotation quaternions are close to each other * Example Playground https://playground.babylonjs.com/#L49EJ7#60 * @param quat0 defines the first quaternion to check * @param quat1 defines the second quaternion to check * @param epsilon defines closeness, 0 same orientation, 1 PI apart, default 0.1 * @returns true if the two quaternions are close to each other within epsilon */ static AreClose(e, t, i = 0.1) { const r = Ze.Dot(e, t); return 1 - r * r <= i; } /** * Smooth interpolation between two quaternions using Slerp * Example Playground https://playground.babylonjs.com/#L49EJ7#93 * @param source source quaternion * @param goal goal quaternion * @param deltaTime current interpolation frame * @param lerpTime total interpolation time * @param result the smoothed quaternion */ static SmoothToRef(e, t, i, r, s) { let n = r === 0 ? 1 : i / r; return n = yt.Clamp(n, 0, 1), Ze.SlerpToRef(e, t, n, s), s; } /** * Creates an empty quaternion * @returns a new quaternion set to (0.0, 0.0, 0.0) */ static Zero() { return new Ze(0, 0, 0, 0); } /** * Inverse a given quaternion * Example Playground https://playground.babylonjs.com/#L49EJ7#103 * @param q defines the source quaternion * @returns a new quaternion as the inverted current quaternion */ static Inverse(e) { return new e.constructor(-e._x, -e._y, -e._z, e._w); } /** * Inverse a given quaternion * Example Playground https://playground.babylonjs.com/#L49EJ7#104 * @param q defines the source quaternion * @param result the quaternion the result will be stored in * @returns the result quaternion */ static InverseToRef(e, t) { return t.set(-e._x, -e._y, -e._z, e._w), t; } /** * Creates an identity quaternion * @returns the identity quaternion */ static Identity() { return new Ze(0, 0, 0, 1); } /** * Gets a boolean indicating if the given quaternion is identity * @param quaternion defines the quaternion to check * @returns true if the quaternion is identity */ static IsIdentity(e) { return e && e._x === 0 && e._y === 0 && e._z === 0 && e._w === 1; } /** * Creates a quaternion from a rotation around an axis * Example Playground https://playground.babylonjs.com/#L49EJ7#72 * @param axis defines the axis to use * @param angle defines the angle to use * @returns a new quaternion created from the given axis (Vector3) and angle in radians (float) */ static RotationAxis(e, t) { return Ze.RotationAxisToRef(e, t, new Ze()); } /** * Creates a rotation around an axis and stores it into the given quaternion * Example Playground https://playground.babylonjs.com/#L49EJ7#73 * @param axis defines the axis to use * @param angle defines the angle to use * @param result defines the target quaternion * @returns the target quaternion */ static RotationAxisToRef(e, t, i) { const r = Math.sin(t / 2); return e.normalize(), i._w = Math.cos(t / 2), i._x = e._x * r, i._y = e._y * r, i._z = e._z * r, i._isDirty = !0, i; } /** * Creates a new quaternion from data stored into an array * Example Playground https://playground.babylonjs.com/#L49EJ7#63 * @param array defines the data source * @param offset defines the offset in the source array where the data starts * @returns a new quaternion */ static FromArray(e, t) { return t || (t = 0), new Ze(e[t], e[t + 1], e[t + 2], e[t + 3]); } /** * Updates the given quaternion "result" from the starting index of the given array. * Example Playground https://playground.babylonjs.com/#L49EJ7#64 * @param array the array to pull values from * @param offset the offset into the array to start at * @param result the quaternion to store the result in * @returns result input */ static FromArrayToRef(e, t, i) { return i._x = e[t], i._y = e[t + 1], i._z = e[t + 2], i._w = e[t + 3], i._isDirty = !0, i; } /** * Create a quaternion from Euler rotation angles * Example Playground https://playground.babylonjs.com/#L49EJ7#33 * @param x Pitch * @param y Yaw * @param z Roll * @returns the new Quaternion */ static FromEulerAngles(e, t, i) { const r = new Ze(); return Ze.RotationYawPitchRollToRef(t, e, i, r), r; } /** * Updates a quaternion from Euler rotation angles * Example Playground https://playground.babylonjs.com/#L49EJ7#34 * @param x Pitch * @param y Yaw * @param z Roll * @param result the quaternion to store the result * @returns the updated quaternion */ static FromEulerAnglesToRef(e, t, i, r) { return Ze.RotationYawPitchRollToRef(t, e, i, r), r; } /** * Create a quaternion from Euler rotation vector * Example Playground https://playground.babylonjs.com/#L49EJ7#35 * @param vec the Euler vector (x Pitch, y Yaw, z Roll) * @returns the new Quaternion */ static FromEulerVector(e) { const t = new Ze(); return Ze.RotationYawPitchRollToRef(e._y, e._x, e._z, t), t; } /** * Updates a quaternion from Euler rotation vector * Example Playground https://playground.babylonjs.com/#L49EJ7#36 * @param vec the Euler vector (x Pitch, y Yaw, z Roll) * @param result the quaternion to store the result * @returns the updated quaternion */ static FromEulerVectorToRef(e, t) { return Ze.RotationYawPitchRollToRef(e._y, e._x, e._z, t), t; } /** * Updates a quaternion so that it rotates vector vecFrom to vector vecTo * Example Playground - https://playground.babylonjs.com/#L49EJ7#70 * @param vecFrom defines the direction vector from which to rotate * @param vecTo defines the direction vector to which to rotate * @param result the quaternion to store the result * @param epsilon defines the minimal dot value to define vecs as opposite. Default: `BABYLON.Epsilon` * @returns the updated quaternion */ static FromUnitVectorsToRef(e, t, i, r = Sr) { const s = D.Dot(e, t) + 1; return s < r ? Math.abs(e.x) > Math.abs(e.z) ? i.set(-e.y, e.x, 0, 0) : i.set(0, -e.z, e.y, 0) : (D.CrossToRef(e, t, de.Vector3[0]), i.set(de.Vector3[0].x, de.Vector3[0].y, de.Vector3[0].z, s)), i.normalize(); } /** * Creates a new quaternion from the given Euler float angles (y, x, z) * Example Playground https://playground.babylonjs.com/#L49EJ7#77 * @param yaw defines the rotation around Y axis * @param pitch defines the rotation around X axis * @param roll defines the rotation around Z axis * @returns the new quaternion */ static RotationYawPitchRoll(e, t, i) { const r = new Ze(); return Ze.RotationYawPitchRollToRef(e, t, i, r), r; } /** * Creates a new rotation from the given Euler float angles (y, x, z) and stores it in the target quaternion * Example Playground https://playground.babylonjs.com/#L49EJ7#78 * @param yaw defines the rotation around Y axis * @param pitch defines the rotation around X axis * @param roll defines the rotation around Z axis * @param result defines the target quaternion * @returns result input */ static RotationYawPitchRollToRef(e, t, i, r) { const s = i * 0.5, n = t * 0.5, a = e * 0.5, l = Math.sin(s), o = Math.cos(s), u = Math.sin(n), h = Math.cos(n), d = Math.sin(a), f = Math.cos(a); return r._x = f * u * o + d * h * l, r._y = d * h * o - f * u * l, r._z = f * h * l - d * u * o, r._w = f * h * o + d * u * l, r._isDirty = !0, r; } /** * Creates a new quaternion from the given Euler float angles expressed in z-x-z orientation * Example Playground https://playground.babylonjs.com/#L49EJ7#68 * @param alpha defines the rotation around first axis * @param beta defines the rotation around second axis * @param gamma defines the rotation around third axis * @returns the new quaternion */ static RotationAlphaBetaGamma(e, t, i) { const r = new Ze(); return Ze.RotationAlphaBetaGammaToRef(e, t, i, r), r; } /** * Creates a new quaternion from the given Euler float angles expressed in z-x-z orientation and stores it in the target quaternion * Example Playground https://playground.babylonjs.com/#L49EJ7#69 * @param alpha defines the rotation around first axis * @param beta defines the rotation around second axis * @param gamma defines the rotation around third axis * @param result defines the target quaternion * @returns result input */ static RotationAlphaBetaGammaToRef(e, t, i, r) { const s = (i + e) * 0.5, n = (i - e) * 0.5, a = t * 0.5; return r._x = Math.cos(n) * Math.sin(a), r._y = Math.sin(n) * Math.sin(a), r._z = Math.sin(s) * Math.cos(a), r._w = Math.cos(s) * Math.cos(a), r._isDirty = !0, r; } /** * Creates a new quaternion containing the rotation value to reach the target (axis1, axis2, axis3) orientation as a rotated XYZ system (axis1, axis2 and axis3 are normalized during this operation) * Example Playground https://playground.babylonjs.com/#L49EJ7#75 * @param axis1 defines the first axis * @param axis2 defines the second axis * @param axis3 defines the third axis * @returns the new quaternion */ static RotationQuaternionFromAxis(e, t, i) { const r = new Ze(0, 0, 0, 0); return Ze.RotationQuaternionFromAxisToRef(e, t, i, r), r; } /** * Creates a rotation value to reach the target (axis1, axis2, axis3) orientation as a rotated XYZ system (axis1, axis2 and axis3 are normalized during this operation) and stores it in the target quaternion * Example Playground https://playground.babylonjs.com/#L49EJ7#76 * @param axis1 defines the first axis * @param axis2 defines the second axis * @param axis3 defines the third axis * @param ref defines the target quaternion * @returns result input */ static RotationQuaternionFromAxisToRef(e, t, i, r) { const s = Yi.Matrix[0]; return Ae.FromXYZAxesToRef(e.normalize(), t.normalize(), i.normalize(), s), Ze.FromRotationMatrixToRef(s, r), r; } /** * Creates a new rotation value to orient an object to look towards the given forward direction, the up direction being oriented like "up". * This function works in left handed mode * Example Playground https://playground.babylonjs.com/#L49EJ7#96 * @param forward defines the forward direction - Must be normalized and orthogonal to up. * @param up defines the up vector for the entity - Must be normalized and orthogonal to forward. * @returns A new quaternion oriented toward the specified forward and up. */ static FromLookDirectionLH(e, t) { const i = new Ze(); return Ze.FromLookDirectionLHToRef(e, t, i), i; } /** * Creates a new rotation value to orient an object to look towards the given forward direction with the up direction being oriented like "up", and stores it in the target quaternion. * This function works in left handed mode * Example Playground https://playground.babylonjs.com/#L49EJ7#97 * @param forward defines the forward direction - Must be normalized and orthogonal to up. * @param up defines the up vector for the entity - Must be normalized and orthogonal to forward. * @param ref defines the target quaternion. * @returns result input */ static FromLookDirectionLHToRef(e, t, i) { const r = Yi.Matrix[0]; return Ae.LookDirectionLHToRef(e, t, r), Ze.FromRotationMatrixToRef(r, i), i; } /** * Creates a new rotation value to orient an object to look towards the given forward direction, the up direction being oriented like "up". * This function works in right handed mode * Example Playground https://playground.babylonjs.com/#L49EJ7#98 * @param forward defines the forward direction - Must be normalized and orthogonal to up. * @param up defines the up vector for the entity - Must be normalized and orthogonal to forward. * @returns A new quaternion oriented toward the specified forward and up. */ static FromLookDirectionRH(e, t) { const i = new Ze(); return Ze.FromLookDirectionRHToRef(e, t, i), i; } /** * Creates a new rotation value to orient an object to look towards the given forward direction with the up direction being oriented like "up", and stores it in the target quaternion. * This function works in right handed mode * Example Playground https://playground.babylonjs.com/#L49EJ7#105 * @param forward defines the forward direction - Must be normalized and orthogonal to up. * @param up defines the up vector for the entity - Must be normalized and orthogonal to forward. * @param ref defines the target quaternion. * @returns result input */ static FromLookDirectionRHToRef(e, t, i) { const r = Yi.Matrix[0]; return Ae.LookDirectionRHToRef(e, t, r), Ze.FromRotationMatrixToRef(r, i); } /** * Interpolates between two quaternions * Example Playground https://playground.babylonjs.com/#L49EJ7#79 * @param left defines first quaternion * @param right defines second quaternion * @param amount defines the gradient to use * @returns the new interpolated quaternion */ static Slerp(e, t, i) { const r = Ze.Identity(); return Ze.SlerpToRef(e, t, i, r), r; } /** * Interpolates between two quaternions and stores it into a target quaternion * Example Playground https://playground.babylonjs.com/#L49EJ7#92 * @param left defines first quaternion * @param right defines second quaternion * @param amount defines the gradient to use * @param result defines the target quaternion * @returns result input */ static SlerpToRef(e, t, i, r) { let s, n, a = e._x * t._x + e._y * t._y + e._z * t._z + e._w * t._w, l = !1; if (a < 0 && (l = !0, a = -a), a > 0.999999) n = 1 - i, s = l ? -i : i; else { const o = Math.acos(a), u = 1 / Math.sin(o); n = Math.sin((1 - i) * o) * u, s = l ? -Math.sin(i * o) * u : Math.sin(i * o) * u; } return r._x = n * e._x + s * t._x, r._y = n * e._y + s * t._y, r._z = n * e._z + s * t._z, r._w = n * e._w + s * t._w, r._isDirty = !0, r; } /** * Interpolate between two quaternions using Hermite interpolation * Example Playground https://playground.babylonjs.com/#L49EJ7#47 * @see https://doc.babylonjs.com/features/featuresDeepDive/mesh/drawCurves#hermite-quaternion-spline * @param value1 defines first quaternion * @param tangent1 defines the incoming tangent * @param value2 defines second quaternion * @param tangent2 defines the outgoing tangent * @param amount defines the target quaternion * @returns the new interpolated quaternion */ static Hermite(e, t, i, r, s) { const n = s * s, a = s * n, l = 2 * a - 3 * n + 1, o = -2 * a + 3 * n, u = a - 2 * n + s, h = a - n, d = e._x * l + i._x * o + t._x * u + r._x * h, f = e._y * l + i._y * o + t._y * u + r._y * h, p = e._z * l + i._z * o + t._z * u + r._z * h, m = e._w * l + i._w * o + t._w * u + r._w * h; return new e.constructor(d, f, p, m); } /** * Returns a new Quaternion which is the 1st derivative of the Hermite spline defined by the quaternions "value1", "value2", "tangent1", "tangent2". * Example Playground https://playground.babylonjs.com/#L49EJ7#48 * @param value1 defines the first control point * @param tangent1 defines the first tangent * @param value2 defines the second control point * @param tangent2 defines the second tangent * @param time define where the derivative must be done * @returns 1st derivative */ static Hermite1stDerivative(e, t, i, r, s) { const n = new e.constructor(); return this.Hermite1stDerivativeToRef(e, t, i, r, s, n), n; } /** * Update a Quaternion with the 1st derivative of the Hermite spline defined by the quaternions "value1", "value2", "tangent1", "tangent2". * Example Playground https://playground.babylonjs.com/#L49EJ7#49 * @param value1 defines the first control point * @param tangent1 defines the first tangent * @param value2 defines the second control point * @param tangent2 defines the second tangent * @param time define where the derivative must be done * @param result define where to store the derivative * @returns result input */ static Hermite1stDerivativeToRef(e, t, i, r, s, n) { const a = s * s; return n._x = (a - s) * 6 * e._x + (3 * a - 4 * s + 1) * t._x + (-a + s) * 6 * i._x + (3 * a - 2 * s) * r._x, n._y = (a - s) * 6 * e._y + (3 * a - 4 * s + 1) * t._y + (-a + s) * 6 * i._y + (3 * a - 2 * s) * r._y, n._z = (a - s) * 6 * e._z + (3 * a - 4 * s + 1) * t._z + (-a + s) * 6 * i._z + (3 * a - 2 * s) * r._z, n._w = (a - s) * 6 * e._w + (3 * a - 4 * s + 1) * t._w + (-a + s) * 6 * i._w + (3 * a - 2 * s) * r._w, n._isDirty = !0, n; } /** * Returns a new Quaternion as the normalization of the given Quaternion * @param quat defines the Quaternion to normalize * @returns the new Quaternion */ static Normalize(e) { const t = Ze.Zero(); return Ze.NormalizeToRef(e, t), t; } /** * Sets the given Quaternion "result" with the normalization of the given first Quaternion * @param quat defines the Quaternion to normalize * @param result defines the Quaternion where to store the result * @returns result input */ static NormalizeToRef(e, t) { return e.normalizeToRef(t), t; } } class Ae { /** * Gets the precision of matrix computations */ static get Use64Bits() { return Uu.MatrixUse64Bits; } /** * Gets the internal data of the matrix */ get m() { return this._m; } /** * Update the updateFlag to indicate that the matrix has been updated */ markAsUpdated() { this.updateFlag = Ae._UpdateFlagSeed++, this._isIdentity = !1, this._isIdentity3x2 = !1, this._isIdentityDirty = !0, this._isIdentity3x2Dirty = !0; } _updateIdentityStatus(e, t = !1, i = !1, r = !0) { this._isIdentity = e, this._isIdentity3x2 = e || i, this._isIdentityDirty = this._isIdentity ? !1 : t, this._isIdentity3x2Dirty = this._isIdentity3x2 ? !1 : r; } /** * Creates an empty matrix (filled with zeros) */ constructor() { this._isIdentity = !1, this._isIdentityDirty = !0, this._isIdentity3x2 = !0, this._isIdentity3x2Dirty = !0, this.updateFlag = -1, Uu.MatrixTrackPrecisionChange && Uu.MatrixTrackedMatrices.push(this), this._m = new Uu.MatrixCurrentType(16), this.markAsUpdated(); } // Properties /** * Check if the current matrix is identity * @returns true is the matrix is the identity matrix */ isIdentity() { if (this._isIdentityDirty) { this._isIdentityDirty = !1; const e = this._m; this._isIdentity = e[0] === 1 && e[1] === 0 && e[2] === 0 && e[3] === 0 && e[4] === 0 && e[5] === 1 && e[6] === 0 && e[7] === 0 && e[8] === 0 && e[9] === 0 && e[10] === 1 && e[11] === 0 && e[12] === 0 && e[13] === 0 && e[14] === 0 && e[15] === 1; } return this._isIdentity; } /** * Check if the current matrix is identity as a texture matrix (3x2 store in 4x4) * @returns true is the matrix is the identity matrix */ isIdentityAs3x2() { return this._isIdentity3x2Dirty && (this._isIdentity3x2Dirty = !1, this._m[0] !== 1 || this._m[5] !== 1 || this._m[15] !== 1 ? this._isIdentity3x2 = !1 : this._m[1] !== 0 || this._m[2] !== 0 || this._m[3] !== 0 || this._m[4] !== 0 || this._m[6] !== 0 || this._m[7] !== 0 || this._m[8] !== 0 || this._m[9] !== 0 || this._m[10] !== 0 || this._m[11] !== 0 || this._m[12] !== 0 || this._m[13] !== 0 || this._m[14] !== 0 ? this._isIdentity3x2 = !1 : this._isIdentity3x2 = !0), this._isIdentity3x2; } /** * Gets the determinant of the matrix * Example Playground - https://playground.babylonjs.com/#AV9X17#34 * @returns the matrix determinant */ determinant() { if (this._isIdentity === !0) return 1; const e = this._m, t = e[0], i = e[1], r = e[2], s = e[3], n = e[4], a = e[5], l = e[6], o = e[7], u = e[8], h = e[9], d = e[10], f = e[11], p = e[12], m = e[13], _ = e[14], v = e[15], C = d * v - _ * f, x = h * v - m * f, b = h * _ - m * d, S = u * v - p * f, M = u * _ - d * p, R = u * m - p * h, w = +(a * C - l * x + o * b), V = -(n * C - l * S + o * M), k = +(n * x - a * S + o * R), L = -(n * b - a * M + l * R); return t * w + i * V + r * k + s * L; } // Methods /** * Gets a string with the Matrix values * @returns a string with the Matrix values */ toString() { return `{${this.m[0]}, ${this.m[1]}, ${this.m[2]}, ${this.m[3]} ${this.m[4]}, ${this.m[5]}, ${this.m[6]}, ${this.m[7]} ${this.m[8]}, ${this.m[9]}, ${this.m[10]}, ${this.m[11]} ${this.m[12]}, ${this.m[13]}, ${this.m[14]}, ${this.m[15]}}`; } /** * Returns the matrix as a Float32Array or Array * Example Playground - https://playground.babylonjs.com/#AV9X17#49 * @returns the matrix underlying array */ toArray() { return this._m; } /** * Returns the matrix as a Float32Array or Array * Example Playground - https://playground.babylonjs.com/#AV9X17#114 * @returns the matrix underlying array. */ asArray() { return this._m; } /** * Inverts the current matrix in place * Example Playground - https://playground.babylonjs.com/#AV9X17#118 * @returns the current inverted matrix */ invert() { return this.invertToRef(this), this; } /** * Sets all the matrix elements to zero * @returns the current matrix */ reset() { return Ae.FromValuesToRef(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, this), this._updateIdentityStatus(!1), this; } /** * Adds the current matrix with a second one * Example Playground - https://playground.babylonjs.com/#AV9X17#44 * @param other defines the matrix to add * @returns a new matrix as the addition of the current matrix and the given one */ add(e) { const t = new this.constructor(); return this.addToRef(e, t), t; } /** * Sets the given matrix "result" to the addition of the current matrix and the given one * Example Playground - https://playground.babylonjs.com/#AV9X17#45 * @param other defines the matrix to add * @param result defines the target matrix * @returns result input */ addToRef(e, t) { const i = this._m, r = t._m, s = e.m; for (let n = 0; n < 16; n++) r[n] = i[n] + s[n]; return t.markAsUpdated(), t; } /** * Adds in place the given matrix to the current matrix * Example Playground - https://playground.babylonjs.com/#AV9X17#46 * @param other defines the second operand * @returns the current updated matrix */ addToSelf(e) { const t = this._m, i = e.m; for (let r = 0; r < 16; r++) t[r] += i[r]; return this.markAsUpdated(), this; } /** * Sets the given matrix to the current inverted Matrix * Example Playground - https://playground.babylonjs.com/#AV9X17#119 * @param other defines the target matrix * @returns result input */ invertToRef(e) { if (this._isIdentity === !0) return Ae.IdentityToRef(e), e; const t = this._m, i = t[0], r = t[1], s = t[2], n = t[3], a = t[4], l = t[5], o = t[6], u = t[7], h = t[8], d = t[9], f = t[10], p = t[11], m = t[12], _ = t[13], v = t[14], C = t[15], x = f * C - v * p, b = d * C - _ * p, S = d * v - _ * f, M = h * C - m * p, R = h * v - f * m, w = h * _ - m * d, V = +(l * x - o * b + u * S), k = -(a * x - o * M + u * R), L = +(a * b - l * M + u * w), B = -(a * S - l * R + o * w), U = i * V + r * k + s * L + n * B; if (U === 0) return e.copyFrom(this), e; const K = 1 / U, ee = o * C - v * u, Z = l * C - _ * u, q = l * v - _ * o, le = a * C - m * u, ie = a * v - m * o, $ = a * _ - m * l, j = o * p - f * u, J = l * p - d * u, ne = l * f - d * o, pe = a * p - h * u, ge = a * f - h * o, Ie = a * d - h * l, ye = -(r * x - s * b + n * S), Se = +(i * x - s * M + n * R), re = -(i * b - r * M + n * w), te = +(i * S - r * R + s * w), he = +(r * ee - s * Z + n * q), be = -(i * ee - s * le + n * ie), Ue = +(i * Z - r * le + n * $), Ee = -(i * q - r * ie + s * $), He = -(r * j - s * J + n * ne), Xe = +(i * j - s * pe + n * ge), rt = -(i * J - r * pe + n * Ie), dt = +(i * ne - r * ge + s * Ie); return Ae.FromValuesToRef(V * K, ye * K, he * K, He * K, k * K, Se * K, be * K, Xe * K, L * K, re * K, Ue * K, rt * K, B * K, te * K, Ee * K, dt * K, e), e; } /** * add a value at the specified position in the current Matrix * Example Playground - https://playground.babylonjs.com/#AV9X17#47 * @param index the index of the value within the matrix. between 0 and 15. * @param value the value to be added * @returns the current updated matrix */ addAtIndex(e, t) { return this._m[e] += t, this.markAsUpdated(), this; } /** * mutiply the specified position in the current Matrix by a value * @param index the index of the value within the matrix. between 0 and 15. * @param value the value to be added * @returns the current updated matrix */ multiplyAtIndex(e, t) { return this._m[e] *= t, this.markAsUpdated(), this; } /** * Inserts the translation vector (using 3 floats) in the current matrix * Example Playground - https://playground.babylonjs.com/#AV9X17#120 * @param x defines the 1st component of the translation * @param y defines the 2nd component of the translation * @param z defines the 3rd component of the translation * @returns the current updated matrix */ setTranslationFromFloats(e, t, i) { return this._m[12] = e, this._m[13] = t, this._m[14] = i, this.markAsUpdated(), this; } /** * Adds the translation vector (using 3 floats) in the current matrix * Example Playground - https://playground.babylonjs.com/#AV9X17#20 * Example Playground - https://playground.babylonjs.com/#AV9X17#48 * @param x defines the 1st component of the translation * @param y defines the 2nd component of the translation * @param z defines the 3rd component of the translation * @returns the current updated matrix */ addTranslationFromFloats(e, t, i) { return this._m[12] += e, this._m[13] += t, this._m[14] += i, this.markAsUpdated(), this; } /** * Inserts the translation vector in the current matrix * Example Playground - https://playground.babylonjs.com/#AV9X17#121 * @param vector3 defines the translation to insert * @returns the current updated matrix */ setTranslation(e) { return this.setTranslationFromFloats(e._x, e._y, e._z); } /** * Gets the translation value of the current matrix * Example Playground - https://playground.babylonjs.com/#AV9X17#122 * @returns a new Vector3 as the extracted translation from the matrix */ getTranslation() { return new D(this._m[12], this._m[13], this._m[14]); } /** * Fill a Vector3 with the extracted translation from the matrix * Example Playground - https://playground.babylonjs.com/#AV9X17#123 * @param result defines the Vector3 where to store the translation * @returns the current matrix */ getTranslationToRef(e) { return e.x = this._m[12], e.y = this._m[13], e.z = this._m[14], e; } /** * Remove rotation and scaling part from the matrix * @returns the updated matrix */ removeRotationAndScaling() { const e = this.m; return Ae.FromValuesToRef(1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, e[12], e[13], e[14], e[15], this), this._updateIdentityStatus(e[12] === 0 && e[13] === 0 && e[14] === 0 && e[15] === 1), this; } /** * Multiply two matrices * Example Playground - https://playground.babylonjs.com/#AV9X17#15 * A.multiply(B) means apply B to A so result is B x A * @param other defines the second operand * @returns a new matrix set with the multiplication result of the current Matrix and the given one */ multiply(e) { const t = new this.constructor(); return this.multiplyToRef(e, t), t; } /** * Copy the current matrix from the given one * Example Playground - https://playground.babylonjs.com/#AV9X17#21 * @param other defines the source matrix * @returns the current updated matrix */ copyFrom(e) { e.copyToArray(this._m); const t = e; return this.updateFlag = t.updateFlag, this._updateIdentityStatus(t._isIdentity, t._isIdentityDirty, t._isIdentity3x2, t._isIdentity3x2Dirty), this; } /** * Populates the given array from the starting index with the current matrix values * @param array defines the target array * @param offset defines the offset in the target array where to start storing values * @returns the current matrix */ copyToArray(e, t = 0) { const i = this._m; return e[t] = i[0], e[t + 1] = i[1], e[t + 2] = i[2], e[t + 3] = i[3], e[t + 4] = i[4], e[t + 5] = i[5], e[t + 6] = i[6], e[t + 7] = i[7], e[t + 8] = i[8], e[t + 9] = i[9], e[t + 10] = i[10], e[t + 11] = i[11], e[t + 12] = i[12], e[t + 13] = i[13], e[t + 14] = i[14], e[t + 15] = i[15], this; } /** * Sets the given matrix "result" with the multiplication result of the current Matrix and the given one * A.multiplyToRef(B, R) means apply B to A and store in R and R = B x A * Example Playground - https://playground.babylonjs.com/#AV9X17#16 * @param other defines the second operand * @param result defines the matrix where to store the multiplication * @returns result input */ multiplyToRef(e, t) { return this._isIdentity ? (t.copyFrom(e), t) : e._isIdentity ? (t.copyFrom(this), t) : (this.multiplyToArray(e, t._m, 0), t.markAsUpdated(), t); } /** * Sets the Float32Array "result" from the given index "offset" with the multiplication of the current matrix and the given one * @param other defines the second operand * @param result defines the array where to store the multiplication * @param offset defines the offset in the target array where to start storing values * @returns the current matrix */ multiplyToArray(e, t, i) { const r = this._m, s = e.m, n = r[0], a = r[1], l = r[2], o = r[3], u = r[4], h = r[5], d = r[6], f = r[7], p = r[8], m = r[9], _ = r[10], v = r[11], C = r[12], x = r[13], b = r[14], S = r[15], M = s[0], R = s[1], w = s[2], V = s[3], k = s[4], L = s[5], B = s[6], U = s[7], K = s[8], ee = s[9], Z = s[10], q = s[11], le = s[12], ie = s[13], $ = s[14], j = s[15]; return t[i] = n * M + a * k + l * K + o * le, t[i + 1] = n * R + a * L + l * ee + o * ie, t[i + 2] = n * w + a * B + l * Z + o * $, t[i + 3] = n * V + a * U + l * q + o * j, t[i + 4] = u * M + h * k + d * K + f * le, t[i + 5] = u * R + h * L + d * ee + f * ie, t[i + 6] = u * w + h * B + d * Z + f * $, t[i + 7] = u * V + h * U + d * q + f * j, t[i + 8] = p * M + m * k + _ * K + v * le, t[i + 9] = p * R + m * L + _ * ee + v * ie, t[i + 10] = p * w + m * B + _ * Z + v * $, t[i + 11] = p * V + m * U + _ * q + v * j, t[i + 12] = C * M + x * k + b * K + S * le, t[i + 13] = C * R + x * L + b * ee + S * ie, t[i + 14] = C * w + x * B + b * Z + S * $, t[i + 15] = C * V + x * U + b * q + S * j, this; } /** * Check equality between this matrix and a second one * @param value defines the second matrix to compare * @returns true is the current matrix and the given one values are strictly equal */ equals(e) { const t = e; if (!t) return !1; if ((this._isIdentity || t._isIdentity) && !this._isIdentityDirty && !t._isIdentityDirty) return this._isIdentity && t._isIdentity; const i = this.m, r = t.m; return i[0] === r[0] && i[1] === r[1] && i[2] === r[2] && i[3] === r[3] && i[4] === r[4] && i[5] === r[5] && i[6] === r[6] && i[7] === r[7] && i[8] === r[8] && i[9] === r[9] && i[10] === r[10] && i[11] === r[11] && i[12] === r[12] && i[13] === r[13] && i[14] === r[14] && i[15] === r[15]; } /** * Clone the current matrix * Example Playground - https://playground.babylonjs.com/#AV9X17#18 * @returns a new matrix from the current matrix */ clone() { const e = new this.constructor(); return e.copyFrom(this), e; } /** * Returns the name of the current matrix class * @returns the string "Matrix" */ getClassName() { return "Matrix"; } /** * Gets the hash code of the current matrix * @returns the hash code */ getHashCode() { let e = W_(this._m[0]); for (let t = 1; t < 16; t++) e = e * 397 ^ W_(this._m[t]); return e; } /** * Decomposes the current Matrix into a translation, rotation and scaling components of the provided node * Example Playground - https://playground.babylonjs.com/#AV9X17#13 * @param node the node to decompose the matrix to * @returns true if operation was successful */ decomposeToTransformNode(e) { return e.rotationQuaternion = e.rotationQuaternion || new Ze(), this.decompose(e.scaling, e.rotationQuaternion, e.position); } /** * Decomposes the current Matrix into a translation, rotation and scaling components * Example Playground - https://playground.babylonjs.com/#AV9X17#12 * @param scale defines the scale vector3 given as a reference to update * @param rotation defines the rotation quaternion given as a reference to update * @param translation defines the translation vector3 given as a reference to update * @param preserveScalingNode Use scaling sign coming from this node. Otherwise scaling sign might change. * @param useAbsoluteScaling Use scaling sign coming from this absoluteScaling when true or scaling otherwise. * @returns true if operation was successful */ decompose(e, t, i, r, s = !0) { if (this._isIdentity) return i && i.setAll(0), e && e.setAll(1), t && t.copyFromFloats(0, 0, 0, 1), !0; const n = this._m; if (i && i.copyFromFloats(n[12], n[13], n[14]), e = e || Yi.Vector3[0], e.x = Math.sqrt(n[0] * n[0] + n[1] * n[1] + n[2] * n[2]), e.y = Math.sqrt(n[4] * n[4] + n[5] * n[5] + n[6] * n[6]), e.z = Math.sqrt(n[8] * n[8] + n[9] * n[9] + n[10] * n[10]), r) { const a = (s ? r.absoluteScaling.x : r.scaling.x) < 0 ? -1 : 1, l = (s ? r.absoluteScaling.y : r.scaling.y) < 0 ? -1 : 1, o = (s ? r.absoluteScaling.z : r.scaling.z) < 0 ? -1 : 1; e.x *= a, e.y *= l, e.z *= o; } else this.determinant() <= 0 && (e.y *= -1); if (e._x === 0 || e._y === 0 || e._z === 0) return t && t.copyFromFloats(0, 0, 0, 1), !1; if (t) { const a = 1 / e._x, l = 1 / e._y, o = 1 / e._z; Ae.FromValuesToRef(n[0] * a, n[1] * a, n[2] * a, 0, n[4] * l, n[5] * l, n[6] * l, 0, n[8] * o, n[9] * o, n[10] * o, 0, 0, 0, 0, 1, Yi.Matrix[0]), Ze.FromRotationMatrixToRef(Yi.Matrix[0], t); } return !0; } /** * Gets specific row of the matrix * Example Playground - https://playground.babylonjs.com/#AV9X17#36 * @param index defines the number of the row to get * @returns the index-th row of the current matrix as a new Vector4 */ getRow(e) { if (e < 0 || e > 3) return null; const t = e * 4; return new Di(this._m[t + 0], this._m[t + 1], this._m[t + 2], this._m[t + 3]); } /** * Gets specific row of the matrix to ref * Example Playground - https://playground.babylonjs.com/#AV9X17#36 * @param index defines the number of the row to get * @param rowVector vector to store the index-th row of the current matrix * @returns result input */ getRowToRef(e, t) { if (e >= 0 && e <= 3) { const i = e * 4; t.x = this._m[i + 0], t.y = this._m[i + 1], t.z = this._m[i + 2], t.w = this._m[i + 3]; } return t; } /** * Sets the index-th row of the current matrix to the vector4 values * Example Playground - https://playground.babylonjs.com/#AV9X17#36 * @param index defines the number of the row to set * @param row defines the target vector4 * @returns the updated current matrix */ setRow(e, t) { return this.setRowFromFloats(e, t.x, t.y, t.z, t.w); } /** * Compute the transpose of the matrix * Example Playground - https://playground.babylonjs.com/#AV9X17#40 * @returns the new transposed matrix */ transpose() { const e = new this.constructor(); return Ae.TransposeToRef(this, e), e; } /** * Compute the transpose of the matrix and store it in a given matrix * Example Playground - https://playground.babylonjs.com/#AV9X17#41 * @param result defines the target matrix * @returns result input */ transposeToRef(e) { return Ae.TransposeToRef(this, e), e; } /** * Sets the index-th row of the current matrix with the given 4 x float values * Example Playground - https://playground.babylonjs.com/#AV9X17#36 * @param index defines the row index * @param x defines the x component to set * @param y defines the y component to set * @param z defines the z component to set * @param w defines the w component to set * @returns the updated current matrix */ setRowFromFloats(e, t, i, r, s) { if (e < 0 || e > 3) return this; const n = e * 4; return this._m[n + 0] = t, this._m[n + 1] = i, this._m[n + 2] = r, this._m[n + 3] = s, this.markAsUpdated(), this; } /** * Compute a new matrix set with the current matrix values multiplied by scale (float) * @param scale defines the scale factor * @returns a new matrix */ scale(e) { const t = new this.constructor(); return this.scaleToRef(e, t), t; } /** * Scale the current matrix values by a factor to a given result matrix * @param scale defines the scale factor * @param result defines the matrix to store the result * @returns result input */ scaleToRef(e, t) { for (let i = 0; i < 16; i++) t._m[i] = this._m[i] * e; return t.markAsUpdated(), t; } /** * Scale the current matrix values by a factor and add the result to a given matrix * @param scale defines the scale factor * @param result defines the Matrix to store the result * @returns result input */ scaleAndAddToRef(e, t) { for (let i = 0; i < 16; i++) t._m[i] += this._m[i] * e; return t.markAsUpdated(), t; } /** * Writes to the given matrix a normal matrix, computed from this one (using values from identity matrix for fourth row and column). * Example Playground - https://playground.babylonjs.com/#AV9X17#17 * @param ref matrix to store the result */ toNormalMatrix(e) { const t = Yi.Matrix[0]; this.invertToRef(t), t.transposeToRef(e); const i = e._m; return Ae.FromValuesToRef(i[0], i[1], i[2], 0, i[4], i[5], i[6], 0, i[8], i[9], i[10], 0, 0, 0, 0, 1, e), e; } /** * Gets only rotation part of the current matrix * @returns a new matrix sets to the extracted rotation matrix from the current one */ getRotationMatrix() { const e = new this.constructor(); return this.getRotationMatrixToRef(e), e; } /** * Extracts the rotation matrix from the current one and sets it as the given "result" * @param result defines the target matrix to store data to * @returns result input */ getRotationMatrixToRef(e) { const t = Yi.Vector3[0]; if (!this.decompose(t)) return Ae.IdentityToRef(e), e; const i = this._m, r = 1 / t._x, s = 1 / t._y, n = 1 / t._z; return Ae.FromValuesToRef(i[0] * r, i[1] * r, i[2] * r, 0, i[4] * s, i[5] * s, i[6] * s, 0, i[8] * n, i[9] * n, i[10] * n, 0, 0, 0, 0, 1, e), e; } /** * Toggles model matrix from being right handed to left handed in place and vice versa */ toggleModelMatrixHandInPlace() { const e = this._m; return e[2] *= -1, e[6] *= -1, e[8] *= -1, e[9] *= -1, e[14] *= -1, this.markAsUpdated(), this; } /** * Toggles projection matrix from being right handed to left handed in place and vice versa */ toggleProjectionMatrixHandInPlace() { const e = this._m; return e[8] *= -1, e[9] *= -1, e[10] *= -1, e[11] *= -1, this.markAsUpdated(), this; } // Statics /** * Creates a matrix from an array * Example Playground - https://playground.babylonjs.com/#AV9X17#42 * @param array defines the source array * @param offset defines an offset in the source array * @returns a new Matrix set from the starting index of the given array */ static FromArray(e, t = 0) { const i = new Ae(); return Ae.FromArrayToRef(e, t, i), i; } /** * Copy the content of an array into a given matrix * Example Playground - https://playground.babylonjs.com/#AV9X17#43 * @param array defines the source array * @param offset defines an offset in the source array * @param result defines the target matrix * @returns result input */ static FromArrayToRef(e, t, i) { for (let r = 0; r < 16; r++) i._m[r] = e[r + t]; return i.markAsUpdated(), i; } /** * Stores an array into a matrix after having multiplied each component by a given factor * Example Playground - https://playground.babylonjs.com/#AV9X17#50 * @param array defines the source array * @param offset defines the offset in the source array * @param scale defines the scaling factor * @param result defines the target matrix * @returns result input */ static FromFloat32ArrayToRefScaled(e, t, i, r) { for (let s = 0; s < 16; s++) r._m[s] = e[s + t] * i; return r.markAsUpdated(), r; } /** * Gets an identity matrix that must not be updated */ static get IdentityReadOnly() { return Ae._IdentityReadOnly; } /** * Stores a list of values (16) inside a given matrix * @param initialM11 defines 1st value of 1st row * @param initialM12 defines 2nd value of 1st row * @param initialM13 defines 3rd value of 1st row * @param initialM14 defines 4th value of 1st row * @param initialM21 defines 1st value of 2nd row * @param initialM22 defines 2nd value of 2nd row * @param initialM23 defines 3rd value of 2nd row * @param initialM24 defines 4th value of 2nd row * @param initialM31 defines 1st value of 3rd row * @param initialM32 defines 2nd value of 3rd row * @param initialM33 defines 3rd value of 3rd row * @param initialM34 defines 4th value of 3rd row * @param initialM41 defines 1st value of 4th row * @param initialM42 defines 2nd value of 4th row * @param initialM43 defines 3rd value of 4th row * @param initialM44 defines 4th value of 4th row * @param result defines the target matrix * @returns result input */ static FromValuesToRef(e, t, i, r, s, n, a, l, o, u, h, d, f, p, m, _, v) { const C = v._m; C[0] = e, C[1] = t, C[2] = i, C[3] = r, C[4] = s, C[5] = n, C[6] = a, C[7] = l, C[8] = o, C[9] = u, C[10] = h, C[11] = d, C[12] = f, C[13] = p, C[14] = m, C[15] = _, v.markAsUpdated(); } /** * Creates new matrix from a list of values (16) * @param initialM11 defines 1st value of 1st row * @param initialM12 defines 2nd value of 1st row * @param initialM13 defines 3rd value of 1st row * @param initialM14 defines 4th value of 1st row * @param initialM21 defines 1st value of 2nd row * @param initialM22 defines 2nd value of 2nd row * @param initialM23 defines 3rd value of 2nd row * @param initialM24 defines 4th value of 2nd row * @param initialM31 defines 1st value of 3rd row * @param initialM32 defines 2nd value of 3rd row * @param initialM33 defines 3rd value of 3rd row * @param initialM34 defines 4th value of 3rd row * @param initialM41 defines 1st value of 4th row * @param initialM42 defines 2nd value of 4th row * @param initialM43 defines 3rd value of 4th row * @param initialM44 defines 4th value of 4th row * @returns the new matrix */ static FromValues(e, t, i, r, s, n, a, l, o, u, h, d, f, p, m, _) { const v = new Ae(), C = v._m; return C[0] = e, C[1] = t, C[2] = i, C[3] = r, C[4] = s, C[5] = n, C[6] = a, C[7] = l, C[8] = o, C[9] = u, C[10] = h, C[11] = d, C[12] = f, C[13] = p, C[14] = m, C[15] = _, v.markAsUpdated(), v; } /** * Creates a new matrix composed by merging scale (vector3), rotation (quaternion) and translation (vector3) * Example Playground - https://playground.babylonjs.com/#AV9X17#24 * @param scale defines the scale vector3 * @param rotation defines the rotation quaternion * @param translation defines the translation vector3 * @returns a new matrix */ static Compose(e, t, i) { const r = new Ae(); return Ae.ComposeToRef(e, t, i, r), r; } /** * Sets a matrix to a value composed by merging scale (vector3), rotation (quaternion) and translation (vector3) * Example Playground - https://playground.babylonjs.com/#AV9X17#25 * @param scale defines the scale vector3 * @param rotation defines the rotation quaternion * @param translation defines the translation vector3 * @param result defines the target matrix * @returns result input */ static ComposeToRef(e, t, i, r) { const s = r._m, n = t._x, a = t._y, l = t._z, o = t._w, u = n + n, h = a + a, d = l + l, f = n * u, p = n * h, m = n * d, _ = a * h, v = a * d, C = l * d, x = o * u, b = o * h, S = o * d, M = e._x, R = e._y, w = e._z; return s[0] = (1 - (_ + C)) * M, s[1] = (p + S) * M, s[2] = (m - b) * M, s[3] = 0, s[4] = (p - S) * R, s[5] = (1 - (f + C)) * R, s[6] = (v + x) * R, s[7] = 0, s[8] = (m + b) * w, s[9] = (v - x) * w, s[10] = (1 - (f + _)) * w, s[11] = 0, s[12] = i._x, s[13] = i._y, s[14] = i._z, s[15] = 1, r.markAsUpdated(), r; } /** * Creates a new identity matrix * @returns a new identity matrix */ static Identity() { const e = Ae.FromValues(1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1); return e._updateIdentityStatus(!0), e; } /** * Creates a new identity matrix and stores the result in a given matrix * @param result defines the target matrix * @returns result input */ static IdentityToRef(e) { return Ae.FromValuesToRef(1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, e), e._updateIdentityStatus(!0), e; } /** * Creates a new zero matrix * @returns a new zero matrix */ static Zero() { const e = Ae.FromValues(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0); return e._updateIdentityStatus(!1), e; } /** * Creates a new rotation matrix for "angle" radians around the X axis * Example Playground - https://playground.babylonjs.com/#AV9X17#97 * @param angle defines the angle (in radians) to use * @returns the new matrix */ static RotationX(e) { const t = new Ae(); return Ae.RotationXToRef(e, t), t; } /** * Creates a new matrix as the invert of a given matrix * Example Playground - https://playground.babylonjs.com/#AV9X17#124 * @param source defines the source matrix * @returns the new matrix */ static Invert(e) { const t = new e.constructor(); return e.invertToRef(t), t; } /** * Creates a new rotation matrix for "angle" radians around the X axis and stores it in a given matrix * Example Playground - https://playground.babylonjs.com/#AV9X17#98 * @param angle defines the angle (in radians) to use * @param result defines the target matrix * @returns result input */ static RotationXToRef(e, t) { const i = Math.sin(e), r = Math.cos(e); return Ae.FromValuesToRef(1, 0, 0, 0, 0, r, i, 0, 0, -i, r, 0, 0, 0, 0, 1, t), t._updateIdentityStatus(r === 1 && i === 0), t; } /** * Creates a new rotation matrix for "angle" radians around the Y axis * Example Playground - https://playground.babylonjs.com/#AV9X17#99 * @param angle defines the angle (in radians) to use * @returns the new matrix */ static RotationY(e) { const t = new Ae(); return Ae.RotationYToRef(e, t), t; } /** * Creates a new rotation matrix for "angle" radians around the Y axis and stores it in a given matrix * Example Playground - https://playground.babylonjs.com/#AV9X17#100 * @param angle defines the angle (in radians) to use * @param result defines the target matrix * @returns result input */ static RotationYToRef(e, t) { const i = Math.sin(e), r = Math.cos(e); return Ae.FromValuesToRef(r, 0, -i, 0, 0, 1, 0, 0, i, 0, r, 0, 0, 0, 0, 1, t), t._updateIdentityStatus(r === 1 && i === 0), t; } /** * Creates a new rotation matrix for "angle" radians around the Z axis * Example Playground - https://playground.babylonjs.com/#AV9X17#101 * @param angle defines the angle (in radians) to use * @returns the new matrix */ static RotationZ(e) { const t = new Ae(); return Ae.RotationZToRef(e, t), t; } /** * Creates a new rotation matrix for "angle" radians around the Z axis and stores it in a given matrix * Example Playground - https://playground.babylonjs.com/#AV9X17#102 * @param angle defines the angle (in radians) to use * @param result defines the target matrix * @returns result input */ static RotationZToRef(e, t) { const i = Math.sin(e), r = Math.cos(e); return Ae.FromValuesToRef(r, i, 0, 0, -i, r, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, t), t._updateIdentityStatus(r === 1 && i === 0), t; } /** * Creates a new rotation matrix for "angle" radians around the given axis * Example Playground - https://playground.babylonjs.com/#AV9X17#96 * @param axis defines the axis to use * @param angle defines the angle (in radians) to use * @returns the new matrix */ static RotationAxis(e, t) { const i = new Ae(); return Ae.RotationAxisToRef(e, t, i), i; } /** * Creates a new rotation matrix for "angle" radians around the given axis and stores it in a given matrix * Example Playground - https://playground.babylonjs.com/#AV9X17#94 * @param axis defines the axis to use * @param angle defines the angle (in radians) to use * @param result defines the target matrix * @returns result input */ static RotationAxisToRef(e, t, i) { const r = Math.sin(-t), s = Math.cos(-t), n = 1 - s; e.normalize(); const a = i._m; return a[0] = e._x * e._x * n + s, a[1] = e._x * e._y * n - e._z * r, a[2] = e._x * e._z * n + e._y * r, a[3] = 0, a[4] = e._y * e._x * n + e._z * r, a[5] = e._y * e._y * n + s, a[6] = e._y * e._z * n - e._x * r, a[7] = 0, a[8] = e._z * e._x * n - e._y * r, a[9] = e._z * e._y * n + e._x * r, a[10] = e._z * e._z * n + s, a[11] = 0, a[12] = 0, a[13] = 0, a[14] = 0, a[15] = 1, i.markAsUpdated(), i; } /** * Takes normalised vectors and returns a rotation matrix to align "from" with "to". * Taken from http://www.iquilezles.org/www/articles/noacos/noacos.htm * Example Playground - https://playground.babylonjs.com/#AV9X17#93 * @param from defines the vector to align * @param to defines the vector to align to * @param result defines the target matrix * @param useYAxisForCoplanar defines a boolean indicating that we should favor Y axis for coplanar vectors (default is false) * @returns result input */ static RotationAlignToRef(e, t, i, r = !1) { const s = D.Dot(t, e), n = i._m; if (s < -1 + Sr) n[0] = -1, n[1] = 0, n[2] = 0, n[3] = 0, n[4] = 0, n[5] = r ? 1 : -1, n[6] = 0, n[7] = 0, n[8] = 0, n[9] = 0, n[10] = r ? -1 : 1, n[11] = 0; else { const a = D.Cross(t, e), l = 1 / (1 + s); n[0] = a._x * a._x * l + s, n[1] = a._y * a._x * l - a._z, n[2] = a._z * a._x * l + a._y, n[3] = 0, n[4] = a._x * a._y * l + a._z, n[5] = a._y * a._y * l + s, n[6] = a._z * a._y * l - a._x, n[7] = 0, n[8] = a._x * a._z * l - a._y, n[9] = a._y * a._z * l + a._x, n[10] = a._z * a._z * l + s, n[11] = 0; } return n[12] = 0, n[13] = 0, n[14] = 0, n[15] = 1, i.markAsUpdated(), i; } /** * Creates a rotation matrix * Example Playground - https://playground.babylonjs.com/#AV9X17#103 * Example Playground - https://playground.babylonjs.com/#AV9X17#105 * @param yaw defines the yaw angle in radians (Y axis) * @param pitch defines the pitch angle in radians (X axis) * @param roll defines the roll angle in radians (Z axis) * @returns the new rotation matrix */ static RotationYawPitchRoll(e, t, i) { const r = new Ae(); return Ae.RotationYawPitchRollToRef(e, t, i, r), r; } /** * Creates a rotation matrix and stores it in a given matrix * Example Playground - https://playground.babylonjs.com/#AV9X17#104 * @param yaw defines the yaw angle in radians (Y axis) * @param pitch defines the pitch angle in radians (X axis) * @param roll defines the roll angle in radians (Z axis) * @param result defines the target matrix * @returns result input */ static RotationYawPitchRollToRef(e, t, i, r) { return Ze.RotationYawPitchRollToRef(e, t, i, Yi.Quaternion[0]), Yi.Quaternion[0].toRotationMatrix(r), r; } /** * Creates a scaling matrix * Example Playground - https://playground.babylonjs.com/#AV9X17#107 * @param x defines the scale factor on X axis * @param y defines the scale factor on Y axis * @param z defines the scale factor on Z axis * @returns the new matrix */ static Scaling(e, t, i) { const r = new Ae(); return Ae.ScalingToRef(e, t, i, r), r; } /** * Creates a scaling matrix and stores it in a given matrix * Example Playground - https://playground.babylonjs.com/#AV9X17#108 * @param x defines the scale factor on X axis * @param y defines the scale factor on Y axis * @param z defines the scale factor on Z axis * @param result defines the target matrix * @returns result input */ static ScalingToRef(e, t, i, r) { return Ae.FromValuesToRef(e, 0, 0, 0, 0, t, 0, 0, 0, 0, i, 0, 0, 0, 0, 1, r), r._updateIdentityStatus(e === 1 && t === 1 && i === 1), r; } /** * Creates a translation matrix * Example Playground - https://playground.babylonjs.com/#AV9X17#109 * @param x defines the translation on X axis * @param y defines the translation on Y axis * @param z defines the translationon Z axis * @returns the new matrix */ static Translation(e, t, i) { const r = new Ae(); return Ae.TranslationToRef(e, t, i, r), r; } /** * Creates a translation matrix and stores it in a given matrix * Example Playground - https://playground.babylonjs.com/#AV9X17#110 * @param x defines the translation on X axis * @param y defines the translation on Y axis * @param z defines the translationon Z axis * @param result defines the target matrix * @returns result input */ static TranslationToRef(e, t, i, r) { return Ae.FromValuesToRef(1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, e, t, i, 1, r), r._updateIdentityStatus(e === 0 && t === 0 && i === 0), r; } /** * Returns a new Matrix whose values are the interpolated values for "gradient" (float) between the ones of the matrices "startValue" and "endValue". * Example Playground - https://playground.babylonjs.com/#AV9X17#55 * @param startValue defines the start value * @param endValue defines the end value * @param gradient defines the gradient factor * @returns the new matrix */ static Lerp(e, t, i) { const r = new e.constructor(); return Ae.LerpToRef(e, t, i, r), r; } /** * Set the given matrix "result" as the interpolated values for "gradient" (float) between the ones of the matrices "startValue" and "endValue". * Example Playground - https://playground.babylonjs.com/#AV9X17#54 * @param startValue defines the start value * @param endValue defines the end value * @param gradient defines the gradient factor * @param result defines the Matrix object where to store data * @returns result input */ static LerpToRef(e, t, i, r) { const s = r._m, n = e.m, a = t.m; for (let l = 0; l < 16; l++) s[l] = n[l] * (1 - i) + a[l] * i; return r.markAsUpdated(), r; } /** * Builds a new matrix whose values are computed by: * * decomposing the "startValue" and "endValue" matrices into their respective scale, rotation and translation matrices * * interpolating for "gradient" (float) the values between each of these decomposed matrices between the start and the end * * recomposing a new matrix from these 3 interpolated scale, rotation and translation matrices * Example Playground - https://playground.babylonjs.com/#AV9X17#22 * Example Playground - https://playground.babylonjs.com/#AV9X17#51 * @param startValue defines the first matrix * @param endValue defines the second matrix * @param gradient defines the gradient between the two matrices * @returns the new matrix */ static DecomposeLerp(e, t, i) { const r = new e.constructor(); return Ae.DecomposeLerpToRef(e, t, i, r), r; } /** * Update a matrix to values which are computed by: * * decomposing the "startValue" and "endValue" matrices into their respective scale, rotation and translation matrices * * interpolating for "gradient" (float) the values between each of these decomposed matrices between the start and the end * * recomposing a new matrix from these 3 interpolated scale, rotation and translation matrices * Example Playground - https://playground.babylonjs.com/#AV9X17#23 * Example Playground - https://playground.babylonjs.com/#AV9X17#53 * @param startValue defines the first matrix * @param endValue defines the second matrix * @param gradient defines the gradient between the two matrices * @param result defines the target matrix * @returns result input */ static DecomposeLerpToRef(e, t, i, r) { const s = Yi.Vector3[0], n = Yi.Quaternion[0], a = Yi.Vector3[1]; e.decompose(s, n, a); const l = Yi.Vector3[2], o = Yi.Quaternion[1], u = Yi.Vector3[3]; t.decompose(l, o, u); const h = Yi.Vector3[4]; D.LerpToRef(s, l, i, h); const d = Yi.Quaternion[2]; Ze.SlerpToRef(n, o, i, d); const f = Yi.Vector3[5]; return D.LerpToRef(a, u, i, f), Ae.ComposeToRef(h, d, f, r), r; } /** * Creates a new matrix that transforms vertices from world space to camera space. It takes three vectors as arguments that together describe the position and orientation of the camera. * This function generates a matrix suitable for a left handed coordinate system * Example Playground - https://playground.babylonjs.com/#AV9X17#58 * Example Playground - https://playground.babylonjs.com/#AV9X17#59 * @param eye defines the final position of the entity * @param target defines where the entity should look at * @param up defines the up vector for the entity * @returns the new matrix */ static LookAtLH(e, t, i) { const r = new Ae(); return Ae.LookAtLHToRef(e, t, i, r), r; } /** * Sets the given "result" Matrix to a matrix that transforms vertices from world space to camera space. It takes three vectors as arguments that together describe the position and orientation of the camera. * This function generates a matrix suitable for a left handed coordinate system * Example Playground - https://playground.babylonjs.com/#AV9X17#60 * Example Playground - https://playground.babylonjs.com/#AV9X17#61 * @param eye defines the final position of the entity * @param target defines where the entity should look at * @param up defines the up vector for the entity * @param result defines the target matrix * @returns result input */ static LookAtLHToRef(e, t, i, r) { const s = Yi.Vector3[0], n = Yi.Vector3[1], a = Yi.Vector3[2]; t.subtractToRef(e, a), a.normalize(), D.CrossToRef(i, a, s); const l = s.lengthSquared(); l === 0 ? s.x = 1 : s.normalizeFromLength(Math.sqrt(l)), D.CrossToRef(a, s, n), n.normalize(); const o = -D.Dot(s, e), u = -D.Dot(n, e), h = -D.Dot(a, e); return Ae.FromValuesToRef(s._x, n._x, a._x, 0, s._y, n._y, a._y, 0, s._z, n._z, a._z, 0, o, u, h, 1, r), r; } /** * Creates a new matrix that transforms vertices from world space to camera space. It takes three vectors as arguments that together describe the position and orientation of the camera. * This function generates a matrix suitable for a right handed coordinate system * Example Playground - https://playground.babylonjs.com/#AV9X17#62 * Example Playground - https://playground.babylonjs.com/#AV9X17#63 * @param eye defines the final position of the entity * @param target defines where the entity should look at * @param up defines the up vector for the entity * @returns the new matrix */ static LookAtRH(e, t, i) { const r = new Ae(); return Ae.LookAtRHToRef(e, t, i, r), r; } /** * Sets the given "result" Matrix to a matrix that transforms vertices from world space to camera space. It takes three vectors as arguments that together describe the position and orientation of the camera. * This function generates a matrix suitable for a right handed coordinate system * Example Playground - https://playground.babylonjs.com/#AV9X17#64 * Example Playground - https://playground.babylonjs.com/#AV9X17#65 * @param eye defines the final position of the entity * @param target defines where the entity should look at * @param up defines the up vector for the entity * @param result defines the target matrix * @returns result input */ static LookAtRHToRef(e, t, i, r) { const s = Yi.Vector3[0], n = Yi.Vector3[1], a = Yi.Vector3[2]; e.subtractToRef(t, a), a.normalize(), D.CrossToRef(i, a, s); const l = s.lengthSquared(); l === 0 ? s.x = 1 : s.normalizeFromLength(Math.sqrt(l)), D.CrossToRef(a, s, n), n.normalize(); const o = -D.Dot(s, e), u = -D.Dot(n, e), h = -D.Dot(a, e); return Ae.FromValuesToRef(s._x, n._x, a._x, 0, s._y, n._y, a._y, 0, s._z, n._z, a._z, 0, o, u, h, 1, r), r; } /** * Creates a new matrix that transforms vertices from world space to camera space. It takes two vectors as arguments that together describe the orientation of the camera. The position is assumed to be at the origin (0,0,0) * This function generates a matrix suitable for a left handed coordinate system * Example Playground - https://playground.babylonjs.com/#AV9X17#66 * @param forward defines the forward direction - Must be normalized and orthogonal to up. * @param up defines the up vector for the entity - Must be normalized and orthogonal to forward. * @returns the new matrix */ static LookDirectionLH(e, t) { const i = new Ae(); return Ae.LookDirectionLHToRef(e, t, i), i; } /** * Sets the given "result" Matrix to a matrix that transforms vertices from world space to camera space. It takes two vectors as arguments that together describe the orientation of the camera. The position is assumed to be at the origin (0,0,0) * This function generates a matrix suitable for a left handed coordinate system * Example Playground - https://playground.babylonjs.com/#AV9X17#67 * @param forward defines the forward direction - Must be normalized and orthogonal to up. * @param up defines the up vector for the entity - Must be normalized and orthogonal to forward. * @param result defines the target matrix * @returns result input */ static LookDirectionLHToRef(e, t, i) { const r = Yi.Vector3[0]; r.copyFrom(e), r.scaleInPlace(-1); const s = Yi.Vector3[1]; return D.CrossToRef(t, r, s), Ae.FromValuesToRef(s._x, s._y, s._z, 0, t._x, t._y, t._z, 0, r._x, r._y, r._z, 0, 0, 0, 0, 1, i), i; } /** * Creates a new matrix that transforms vertices from world space to camera space. It takes two vectors as arguments that together describe the orientation of the camera. The position is assumed to be at the origin (0,0,0) * This function generates a matrix suitable for a right handed coordinate system * Example Playground - https://playground.babylonjs.com/#AV9X17#68 * @param forward defines the forward direction - Must be normalized and orthogonal to up. * @param up defines the up vector for the entity - Must be normalized and orthogonal to forward. * @returns the new matrix */ static LookDirectionRH(e, t) { const i = new Ae(); return Ae.LookDirectionRHToRef(e, t, i), i; } /** * Sets the given "result" Matrix to a matrix that transforms vertices from world space to camera space. It takes two vectors as arguments that together describe the orientation of the camera. The position is assumed to be at the origin (0,0,0) * This function generates a matrix suitable for a right handed coordinate system * Example Playground - https://playground.babylonjs.com/#AV9X17#69 * @param forward defines the forward direction - Must be normalized and orthogonal to up. * @param up defines the up vector for the entity - Must be normalized and orthogonal to forward. * @param result defines the target matrix * @returns result input */ static LookDirectionRHToRef(e, t, i) { const r = Yi.Vector3[2]; return D.CrossToRef(t, e, r), Ae.FromValuesToRef(r._x, r._y, r._z, 0, t._x, t._y, t._z, 0, e._x, e._y, e._z, 0, 0, 0, 0, 1, i), i; } /** * Create a left-handed orthographic projection matrix * Example Playground - https://playground.babylonjs.com/#AV9X17#70 * @param width defines the viewport width * @param height defines the viewport height * @param znear defines the near clip plane * @param zfar defines the far clip plane * @param halfZRange true to generate NDC coordinates between 0 and 1 instead of -1 and 1 (default: false) * @returns a new matrix as a left-handed orthographic projection matrix */ static OrthoLH(e, t, i, r, s) { const n = new Ae(); return Ae.OrthoLHToRef(e, t, i, r, n, s), n; } /** * Store a left-handed orthographic projection to a given matrix * Example Playground - https://playground.babylonjs.com/#AV9X17#71 * @param width defines the viewport width * @param height defines the viewport height * @param znear defines the near clip plane * @param zfar defines the far clip plane * @param result defines the target matrix * @param halfZRange true to generate NDC coordinates between 0 and 1 instead of -1 and 1 (default: false) * @returns result input */ static OrthoLHToRef(e, t, i, r, s, n) { const a = i, l = r, o = 2 / e, u = 2 / t, h = 2 / (l - a), d = -(l + a) / (l - a); return Ae.FromValuesToRef(o, 0, 0, 0, 0, u, 0, 0, 0, 0, h, 0, 0, 0, d, 1, s), n && s.multiplyToRef(UR, s), s._updateIdentityStatus(o === 1 && u === 1 && h === 1 && d === 0), s; } /** * Create a left-handed orthographic projection matrix * Example Playground - https://playground.babylonjs.com/#AV9X17#72 * @param left defines the viewport left coordinate * @param right defines the viewport right coordinate * @param bottom defines the viewport bottom coordinate * @param top defines the viewport top coordinate * @param znear defines the near clip plane * @param zfar defines the far clip plane * @param halfZRange true to generate NDC coordinates between 0 and 1 instead of -1 and 1 (default: false) * @returns a new matrix as a left-handed orthographic projection matrix */ static OrthoOffCenterLH(e, t, i, r, s, n, a) { const l = new Ae(); return Ae.OrthoOffCenterLHToRef(e, t, i, r, s, n, l, a), l; } /** * Stores a left-handed orthographic projection into a given matrix * Example Playground - https://playground.babylonjs.com/#AV9X17#73 * @param left defines the viewport left coordinate * @param right defines the viewport right coordinate * @param bottom defines the viewport bottom coordinate * @param top defines the viewport top coordinate * @param znear defines the near clip plane * @param zfar defines the far clip plane * @param result defines the target matrix * @param halfZRange true to generate NDC coordinates between 0 and 1 instead of -1 and 1 (default: false) * @returns result input */ static OrthoOffCenterLHToRef(e, t, i, r, s, n, a, l) { const o = s, u = n, h = 2 / (t - e), d = 2 / (r - i), f = 2 / (u - o), p = -(u + o) / (u - o), m = (e + t) / (e - t), _ = (r + i) / (i - r); return Ae.FromValuesToRef(h, 0, 0, 0, 0, d, 0, 0, 0, 0, f, 0, m, _, p, 1, a), l && a.multiplyToRef(UR, a), a.markAsUpdated(), a; } /** * Stores a left-handed oblique projection into a given matrix * @param left defines the viewport left coordinate * @param right defines the viewport right coordinate * @param bottom defines the viewport bottom coordinate * @param top defines the viewport top coordinate * @param znear defines the near clip plane * @param zfar defines the far clip plane * @param angle Angle (along X/Y Plane) to apply shear * @param length Length of the shear * @param distance Distance from shear point * @param result defines the target matrix * @param halfZRange true to generate NDC coordinates between 0 and 1 instead of -1 and 1 (default: false) * @returns result input */ static ObliqueOffCenterLHToRef(e, t, i, r, s, n, a, l, o, u, h) { const d = -a * Math.cos(l), f = -a * Math.sin(l); return Ae.TranslationToRef(0, 0, -o, Yi.Matrix[1]), Ae.FromValuesToRef(1, 0, 0, 0, 0, 1, 0, 0, d, f, 1, 0, 0, 0, 0, 1, Yi.Matrix[0]), Yi.Matrix[1].multiplyToRef(Yi.Matrix[0], Yi.Matrix[0]), Ae.TranslationToRef(0, 0, o, Yi.Matrix[1]), Yi.Matrix[0].multiplyToRef(Yi.Matrix[1], Yi.Matrix[0]), Ae.OrthoOffCenterLHToRef(e, t, i, r, s, n, u, h), Yi.Matrix[0].multiplyToRef(u, u), u; } /** * Creates a right-handed orthographic projection matrix * Example Playground - https://playground.babylonjs.com/#AV9X17#76 * @param left defines the viewport left coordinate * @param right defines the viewport right coordinate * @param bottom defines the viewport bottom coordinate * @param top defines the viewport top coordinate * @param znear defines the near clip plane * @param zfar defines the far clip plane * @param halfZRange true to generate NDC coordinates between 0 and 1 instead of -1 and 1 (default: false) * @returns a new matrix as a right-handed orthographic projection matrix */ static OrthoOffCenterRH(e, t, i, r, s, n, a) { const l = new Ae(); return Ae.OrthoOffCenterRHToRef(e, t, i, r, s, n, l, a), l; } /** * Stores a right-handed orthographic projection into a given matrix * Example Playground - https://playground.babylonjs.com/#AV9X17#77 * @param left defines the viewport left coordinate * @param right defines the viewport right coordinate * @param bottom defines the viewport bottom coordinate * @param top defines the viewport top coordinate * @param znear defines the near clip plane * @param zfar defines the far clip plane * @param result defines the target matrix * @param halfZRange true to generate NDC coordinates between 0 and 1 instead of -1 and 1 (default: false) * @returns result input */ static OrthoOffCenterRHToRef(e, t, i, r, s, n, a, l) { return Ae.OrthoOffCenterLHToRef(e, t, i, r, s, n, a, l), a._m[10] *= -1, a; } /** * Stores a right-handed oblique projection into a given matrix * @param left defines the viewport left coordinate * @param right defines the viewport right coordinate * @param bottom defines the viewport bottom coordinate * @param top defines the viewport top coordinate * @param znear defines the near clip plane * @param zfar defines the far clip plane * @param angle Angle (along X/Y Plane) to apply shear * @param length Length of the shear * @param distance Distance from shear point * @param result defines the target matrix * @param halfZRange true to generate NDC coordinates between 0 and 1 instead of -1 and 1 (default: false) * @returns result input */ static ObliqueOffCenterRHToRef(e, t, i, r, s, n, a, l, o, u, h) { const d = a * Math.cos(l), f = a * Math.sin(l); return Ae.TranslationToRef(0, 0, o, Yi.Matrix[1]), Ae.FromValuesToRef(1, 0, 0, 0, 0, 1, 0, 0, d, f, 1, 0, 0, 0, 0, 1, Yi.Matrix[0]), Yi.Matrix[1].multiplyToRef(Yi.Matrix[0], Yi.Matrix[0]), Ae.TranslationToRef(0, 0, -o, Yi.Matrix[1]), Yi.Matrix[0].multiplyToRef(Yi.Matrix[1], Yi.Matrix[0]), Ae.OrthoOffCenterRHToRef(e, t, i, r, s, n, u, h), Yi.Matrix[0].multiplyToRef(u, u), u; } /** * Creates a left-handed perspective projection matrix * Example Playground - https://playground.babylonjs.com/#AV9X17#85 * @param width defines the viewport width * @param height defines the viewport height * @param znear defines the near clip plane * @param zfar defines the far clip plane * @param halfZRange true to generate NDC coordinates between 0 and 1 instead of -1 and 1 (default: false) * @param projectionPlaneTilt optional tilt angle of the projection plane around the X axis (horizontal) * @returns a new matrix as a left-handed perspective projection matrix */ static PerspectiveLH(e, t, i, r, s, n = 0) { const a = new Ae(), l = i, o = r, u = 2 * l / e, h = 2 * l / t, d = (o + l) / (o - l), f = -2 * o * l / (o - l), p = Math.tan(n); return Ae.FromValuesToRef(u, 0, 0, 0, 0, h, 0, p, 0, 0, d, 1, 0, 0, f, 0, a), s && a.multiplyToRef(UR, a), a._updateIdentityStatus(!1), a; } /** * Creates a left-handed perspective projection matrix * Example Playground - https://playground.babylonjs.com/#AV9X17#78 * @param fov defines the horizontal field of view * @param aspect defines the aspect ratio * @param znear defines the near clip plane * @param zfar defines the far clip plane. If 0, assume we are in "infinite zfar" mode * @param halfZRange true to generate NDC coordinates between 0 and 1 instead of -1 and 1 (default: false) * @param projectionPlaneTilt optional tilt angle of the projection plane around the X axis (horizontal) * @param reverseDepthBufferMode true to indicate that we are in a reverse depth buffer mode (meaning znear and zfar have been inverted when calling the function) * @returns a new matrix as a left-handed perspective projection matrix */ static PerspectiveFovLH(e, t, i, r, s, n = 0, a = !1) { const l = new Ae(); return Ae.PerspectiveFovLHToRef(e, t, i, r, l, !0, s, n, a), l; } /** * Stores a left-handed perspective projection into a given matrix * Example Playground - https://playground.babylonjs.com/#AV9X17#81 * @param fov defines the horizontal field of view * @param aspect defines the aspect ratio * @param znear defines the near clip plane * @param zfar defines the far clip plane. If 0, assume we are in "infinite zfar" mode * @param result defines the target matrix * @param isVerticalFovFixed defines it the fov is vertically fixed (default) or horizontally * @param halfZRange true to generate NDC coordinates between 0 and 1 instead of -1 and 1 (default: false) * @param projectionPlaneTilt optional tilt angle of the projection plane around the X axis (horizontal) * @param reverseDepthBufferMode true to indicate that we are in a reverse depth buffer mode (meaning znear and zfar have been inverted when calling the function) * @returns result input */ static PerspectiveFovLHToRef(e, t, i, r, s, n = !0, a, l = 0, o = !1) { const u = i, h = r, d = 1 / Math.tan(e * 0.5), f = n ? d / t : d, p = n ? d : d * t, m = o && u === 0 ? -1 : h !== 0 ? (h + u) / (h - u) : 1, _ = o && u === 0 ? 2 * h : h !== 0 ? -2 * h * u / (h - u) : -2 * u, v = Math.tan(l); return Ae.FromValuesToRef(f, 0, 0, 0, 0, p, 0, v, 0, 0, m, 1, 0, 0, _, 0, s), a && s.multiplyToRef(UR, s), s._updateIdentityStatus(!1), s; } /** * Stores a left-handed perspective projection into a given matrix with depth reversed * Example Playground - https://playground.babylonjs.com/#AV9X17#89 * @param fov defines the horizontal field of view * @param aspect defines the aspect ratio * @param znear defines the near clip plane * @param zfar not used as infinity is used as far clip * @param result defines the target matrix * @param isVerticalFovFixed defines it the fov is vertically fixed (default) or horizontally * @param halfZRange true to generate NDC coordinates between 0 and 1 instead of -1 and 1 (default: false) * @param projectionPlaneTilt optional tilt angle of the projection plane around the X axis (horizontal) * @returns result input */ static PerspectiveFovReverseLHToRef(e, t, i, r, s, n = !0, a, l = 0) { const o = 1 / Math.tan(e * 0.5), u = n ? o / t : o, h = n ? o : o * t, d = Math.tan(l); return Ae.FromValuesToRef(u, 0, 0, 0, 0, h, 0, d, 0, 0, -i, 1, 0, 0, 1, 0, s), a && s.multiplyToRef(UR, s), s._updateIdentityStatus(!1), s; } /** * Creates a right-handed perspective projection matrix * Example Playground - https://playground.babylonjs.com/#AV9X17#83 * @param fov defines the horizontal field of view * @param aspect defines the aspect ratio * @param znear defines the near clip plane * @param zfar defines the far clip plane. If 0, assume we are in "infinite zfar" mode * @param halfZRange true to generate NDC coordinates between 0 and 1 instead of -1 and 1 (default: false) * @param projectionPlaneTilt optional tilt angle of the projection plane around the X axis (horizontal) * @param reverseDepthBufferMode true to indicate that we are in a reverse depth buffer mode (meaning znear and zfar have been inverted when calling the function) * @returns a new matrix as a right-handed perspective projection matrix */ static PerspectiveFovRH(e, t, i, r, s, n = 0, a = !1) { const l = new Ae(); return Ae.PerspectiveFovRHToRef(e, t, i, r, l, !0, s, n, a), l; } /** * Stores a right-handed perspective projection into a given matrix * Example Playground - https://playground.babylonjs.com/#AV9X17#84 * @param fov defines the horizontal field of view * @param aspect defines the aspect ratio * @param znear defines the near clip plane * @param zfar defines the far clip plane. If 0, assume we are in "infinite zfar" mode * @param result defines the target matrix * @param isVerticalFovFixed defines it the fov is vertically fixed (default) or horizontally * @param halfZRange true to generate NDC coordinates between 0 and 1 instead of -1 and 1 (default: false) * @param projectionPlaneTilt optional tilt angle of the projection plane around the X axis (horizontal) * @param reverseDepthBufferMode true to indicate that we are in a reverse depth buffer mode (meaning znear and zfar have been inverted when calling the function) * @returns result input */ static PerspectiveFovRHToRef(e, t, i, r, s, n = !0, a, l = 0, o = !1) { const u = i, h = r, d = 1 / Math.tan(e * 0.5), f = n ? d / t : d, p = n ? d : d * t, m = o && u === 0 ? 1 : h !== 0 ? -(h + u) / (h - u) : -1, _ = o && u === 0 ? 2 * h : h !== 0 ? -2 * h * u / (h - u) : -2 * u, v = Math.tan(l); return Ae.FromValuesToRef(f, 0, 0, 0, 0, p, 0, v, 0, 0, m, -1, 0, 0, _, 0, s), a && s.multiplyToRef(UR, s), s._updateIdentityStatus(!1), s; } /** * Stores a right-handed perspective projection into a given matrix * Example Playground - https://playground.babylonjs.com/#AV9X17#90 * @param fov defines the horizontal field of view * @param aspect defines the aspect ratio * @param znear defines the near clip plane * @param zfar not used as infinity is used as far clip * @param result defines the target matrix * @param isVerticalFovFixed defines it the fov is vertically fixed (default) or horizontally * @param halfZRange true to generate NDC coordinates between 0 and 1 instead of -1 and 1 (default: false) * @param projectionPlaneTilt optional tilt angle of the projection plane around the X axis (horizontal) * @returns result input */ static PerspectiveFovReverseRHToRef(e, t, i, r, s, n = !0, a, l = 0) { const o = 1 / Math.tan(e * 0.5), u = n ? o / t : o, h = n ? o : o * t, d = Math.tan(l); return Ae.FromValuesToRef(u, 0, 0, 0, 0, h, 0, d, 0, 0, -i, -1, 0, 0, -1, 0, s), a && s.multiplyToRef(UR, s), s._updateIdentityStatus(!1), s; } /** * Computes a complete transformation matrix * Example Playground - https://playground.babylonjs.com/#AV9X17#113 * @param viewport defines the viewport to use * @param world defines the world matrix * @param view defines the view matrix * @param projection defines the projection matrix * @param zmin defines the near clip plane * @param zmax defines the far clip plane * @returns the transformation matrix */ static GetFinalMatrix(e, t, i, r, s, n) { const a = e.width, l = e.height, o = e.x, u = e.y, h = Ae.FromValues(a / 2, 0, 0, 0, 0, -l / 2, 0, 0, 0, 0, n - s, 0, o + a / 2, l / 2 + u, s, 1), d = new t.constructor(); return t.multiplyToRef(i, d), d.multiplyToRef(r, d), d.multiplyToRef(h, d); } /** * Extracts a 2x2 matrix from a given matrix and store the result in a Float32Array * @param matrix defines the matrix to use * @returns a new Float32Array array with 4 elements : the 2x2 matrix extracted from the given matrix */ static GetAsMatrix2x2(e) { const t = e.m, i = [t[0], t[1], t[4], t[5]]; return Uu.MatrixUse64Bits ? i : new Float32Array(i); } /** * Extracts a 3x3 matrix from a given matrix and store the result in a Float32Array * @param matrix defines the matrix to use * @returns a new Float32Array array with 9 elements : the 3x3 matrix extracted from the given matrix */ static GetAsMatrix3x3(e) { const t = e.m, i = [t[0], t[1], t[2], t[4], t[5], t[6], t[8], t[9], t[10]]; return Uu.MatrixUse64Bits ? i : new Float32Array(i); } /** * Compute the transpose of a given matrix * Example Playground - https://playground.babylonjs.com/#AV9X17#111 * @param matrix defines the matrix to transpose * @returns the new matrix */ static Transpose(e) { const t = new e.constructor(); return Ae.TransposeToRef(e, t), t; } /** * Compute the transpose of a matrix and store it in a target matrix * Example Playground - https://playground.babylonjs.com/#AV9X17#112 * @param matrix defines the matrix to transpose * @param result defines the target matrix * @returns result input */ static TransposeToRef(e, t) { const i = e.m, r = i[0], s = i[4], n = i[8], a = i[12], l = i[1], o = i[5], u = i[9], h = i[13], d = i[2], f = i[6], p = i[10], m = i[14], _ = i[3], v = i[7], C = i[11], x = i[15], b = t._m; return b[0] = r, b[1] = s, b[2] = n, b[3] = a, b[4] = l, b[5] = o, b[6] = u, b[7] = h, b[8] = d, b[9] = f, b[10] = p, b[11] = m, b[12] = _, b[13] = v, b[14] = C, b[15] = x, t.markAsUpdated(), t._updateIdentityStatus(e._isIdentity, e._isIdentityDirty), t; } /** * Computes a reflection matrix from a plane * Example Playground - https://playground.babylonjs.com/#AV9X17#87 * @param plane defines the reflection plane * @returns a new matrix */ static Reflection(e) { const t = new Ae(); return Ae.ReflectionToRef(e, t), t; } /** * Computes a reflection matrix from a plane * Example Playground - https://playground.babylonjs.com/#AV9X17#88 * @param plane defines the reflection plane * @param result defines the target matrix * @returns result input */ static ReflectionToRef(e, t) { e.normalize(); const i = e.normal.x, r = e.normal.y, s = e.normal.z, n = -2 * i, a = -2 * r, l = -2 * s; return Ae.FromValuesToRef(n * i + 1, a * i, l * i, 0, n * r, a * r + 1, l * r, 0, n * s, a * s, l * s + 1, 0, n * e.d, a * e.d, l * e.d, 1, t), t; } /** * Sets the given matrix as a rotation matrix composed from the 3 left handed axes * @param xaxis defines the value of the 1st axis * @param yaxis defines the value of the 2nd axis * @param zaxis defines the value of the 3rd axis * @param result defines the target matrix * @returns result input */ static FromXYZAxesToRef(e, t, i, r) { return Ae.FromValuesToRef(e._x, e._y, e._z, 0, t._x, t._y, t._z, 0, i._x, i._y, i._z, 0, 0, 0, 0, 1, r), r; } /** * Creates a rotation matrix from a quaternion and stores it in a target matrix * @param quat defines the quaternion to use * @param result defines the target matrix * @returns result input */ static FromQuaternionToRef(e, t) { const i = e._x * e._x, r = e._y * e._y, s = e._z * e._z, n = e._x * e._y, a = e._z * e._w, l = e._z * e._x, o = e._y * e._w, u = e._y * e._z, h = e._x * e._w; return t._m[0] = 1 - 2 * (r + s), t._m[1] = 2 * (n + a), t._m[2] = 2 * (l - o), t._m[3] = 0, t._m[4] = 2 * (n - a), t._m[5] = 1 - 2 * (s + i), t._m[6] = 2 * (u + h), t._m[7] = 0, t._m[8] = 2 * (l + o), t._m[9] = 2 * (u - h), t._m[10] = 1 - 2 * (r + i), t._m[11] = 0, t._m[12] = 0, t._m[13] = 0, t._m[14] = 0, t._m[15] = 1, t.markAsUpdated(), t; } } Ae._UpdateFlagSeed = 0; Ae._IdentityReadOnly = Ae.Identity(); class Yi { } Yi.Vector3 = kc.BuildTuple(11, D.Zero); Yi.Matrix = kc.BuildTuple(2, Ae.Identity); Yi.Quaternion = kc.BuildTuple(3, Ze.Zero); class de { } de.Vector2 = kc.BuildTuple(3, at.Zero); de.Vector3 = kc.BuildTuple(13, D.Zero); de.Vector4 = kc.BuildTuple(3, Di.Zero); de.Quaternion = kc.BuildTuple(2, Ze.Zero); de.Matrix = kc.BuildTuple(8, Ae.Identity); Be("BABYLON.Vector2", at); Be("BABYLON.Vector3", D); Be("BABYLON.Vector4", Di); Be("BABYLON.Matrix", Ae); const UR = Ae.FromValues(1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0.5, 0, 0, 0, 0.5, 1); function aO(c) { return Math.pow(c, V9); } function oO(c) { return c <= 0.04045 ? 0.0773993808 * c : Math.pow(0.947867299 * (c + 0.055), 2.4); } function lO(c) { return Math.pow(c, nO); } function cO(c) { return c <= 31308e-7 ? 12.92 * c : 1.055 * Math.pow(c, 0.41666) - 0.055; } class ze { /** * Creates a new Color3 object from red, green, blue values, all between 0 and 1 * @param r defines the red component (between 0 and 1, default is 0) * @param g defines the green component (between 0 and 1, default is 0) * @param b defines the blue component (between 0 and 1, default is 0) */ constructor(e = 0, t = 0, i = 0) { this.r = e, this.g = t, this.b = i; } /** * Creates a string with the Color3 current values * @returns the string representation of the Color3 object */ toString() { return "{R: " + this.r + " G:" + this.g + " B:" + this.b + "}"; } /** * Returns the string "Color3" * @returns "Color3" */ getClassName() { return "Color3"; } /** * Compute the Color3 hash code * @returns an unique number that can be used to hash Color3 objects */ getHashCode() { let e = this.r * 255 | 0; return e = e * 397 ^ (this.g * 255 | 0), e = e * 397 ^ (this.b * 255 | 0), e; } // Operators /** * Stores in the given array from the given starting index the red, green, blue values as successive elements * @param array defines the array where to store the r,g,b components * @param index defines an optional index in the target array to define where to start storing values * @returns the current Color3 object */ toArray(e, t = 0) { return e[t] = this.r, e[t + 1] = this.g, e[t + 2] = this.b, this; } /** * Update the current color with values stored in an array from the starting index of the given array * @param array defines the source array * @param offset defines an offset in the source array * @returns the current Color3 object */ fromArray(e, t = 0) { return ze.FromArrayToRef(e, t, this), this; } /** * Returns a new Color4 object from the current Color3 and the given alpha * @param alpha defines the alpha component on the new Color4 object (default is 1) * @returns a new Color4 object */ toColor4(e = 1) { return new Et(this.r, this.g, this.b, e); } /** * Returns a new array populated with 3 numeric elements : red, green and blue values * @returns the new array */ asArray() { return [this.r, this.g, this.b]; } /** * Returns the luminance value * @returns a float value */ toLuminance() { return this.r * 0.3 + this.g * 0.59 + this.b * 0.11; } /** * Multiply each Color3 rgb values by the given Color3 rgb values in a new Color3 object * @param otherColor defines the second operand * @returns the new Color3 object */ multiply(e) { return new ze(this.r * e.r, this.g * e.g, this.b * e.b); } /** * Multiply the rgb values of the Color3 and the given Color3 and stores the result in the object "result" * @param otherColor defines the second operand * @param result defines the Color3 object where to store the result * @returns the current Color3 */ multiplyToRef(e, t) { return t.r = this.r * e.r, t.g = this.g * e.g, t.b = this.b * e.b, this; } /** * Determines equality between Color3 objects * @param otherColor defines the second operand * @returns true if the rgb values are equal to the given ones */ equals(e) { return e && this.r === e.r && this.g === e.g && this.b === e.b; } /** * Determines equality between the current Color3 object and a set of r,b,g values * @param r defines the red component to check * @param g defines the green component to check * @param b defines the blue component to check * @returns true if the rgb values are equal to the given ones */ equalsFloats(e, t, i) { return this.r === e && this.g === t && this.b === i; } /** * Creates a new Color3 with the current Color3 values multiplied by scale * @param scale defines the scaling factor to apply * @returns a new Color3 object */ scale(e) { return new ze(this.r * e, this.g * e, this.b * e); } /** * Multiplies the Color3 values by the float "scale" * @param scale defines the scaling factor to apply * @returns the current updated Color3 */ scaleInPlace(e) { return this.r *= e, this.g *= e, this.b *= e, this; } /** * Multiplies the rgb values by scale and stores the result into "result" * @param scale defines the scaling factor * @param result defines the Color3 object where to store the result * @returns the unmodified current Color3 */ scaleToRef(e, t) { return t.r = this.r * e, t.g = this.g * e, t.b = this.b * e, this; } /** * Scale the current Color3 values by a factor and add the result to a given Color3 * @param scale defines the scale factor * @param result defines color to store the result into * @returns the unmodified current Color3 */ scaleAndAddToRef(e, t) { return t.r += this.r * e, t.g += this.g * e, t.b += this.b * e, this; } /** * Clamps the rgb values by the min and max values and stores the result into "result" * @param min defines minimum clamping value (default is 0) * @param max defines maximum clamping value (default is 1) * @param result defines color to store the result into * @returns the original Color3 */ clampToRef(e = 0, t = 1, i) { return i.r = yt.Clamp(this.r, e, t), i.g = yt.Clamp(this.g, e, t), i.b = yt.Clamp(this.b, e, t), this; } /** * Creates a new Color3 set with the added values of the current Color3 and of the given one * @param otherColor defines the second operand * @returns the new Color3 */ add(e) { return new ze(this.r + e.r, this.g + e.g, this.b + e.b); } /** * Stores the result of the addition of the current Color3 and given one rgb values into "result" * @param otherColor defines the second operand * @param result defines Color3 object to store the result into * @returns the unmodified current Color3 */ addToRef(e, t) { return t.r = this.r + e.r, t.g = this.g + e.g, t.b = this.b + e.b, this; } /** * Returns a new Color3 set with the subtracted values of the given one from the current Color3 * @param otherColor defines the second operand * @returns the new Color3 */ subtract(e) { return new ze(this.r - e.r, this.g - e.g, this.b - e.b); } /** * Stores the result of the subtraction of given one from the current Color3 rgb values into "result" * @param otherColor defines the second operand * @param result defines Color3 object to store the result into * @returns the unmodified current Color3 */ subtractToRef(e, t) { return t.r = this.r - e.r, t.g = this.g - e.g, t.b = this.b - e.b, this; } /** * Copy the current object * @returns a new Color3 copied the current one */ clone() { return new ze(this.r, this.g, this.b); } /** * Copies the rgb values from the source in the current Color3 * @param source defines the source Color3 object * @returns the updated Color3 object */ copyFrom(e) { return this.r = e.r, this.g = e.g, this.b = e.b, this; } /** * Updates the Color3 rgb values from the given floats * @param r defines the red component to read from * @param g defines the green component to read from * @param b defines the blue component to read from * @returns the current Color3 object */ copyFromFloats(e, t, i) { return this.r = e, this.g = t, this.b = i, this; } /** * Updates the Color3 rgb values from the given floats * @param r defines the red component to read from * @param g defines the green component to read from * @param b defines the blue component to read from * @returns the current Color3 object */ set(e, t, i) { return this.copyFromFloats(e, t, i); } /** * Compute the Color3 hexadecimal code as a string * @returns a string containing the hexadecimal representation of the Color3 object */ toHexString() { const e = Math.round(this.r * 255), t = Math.round(this.g * 255), i = Math.round(this.b * 255); return "#" + yt.ToHex(e) + yt.ToHex(t) + yt.ToHex(i); } /** * Converts current color in rgb space to HSV values * @returns a new color3 representing the HSV values */ toHSV() { const e = new ze(); return this.toHSVToRef(e), e; } /** * Converts current color in rgb space to HSV values * @param result defines the Color3 where to store the HSV values */ toHSVToRef(e) { const t = this.r, i = this.g, r = this.b, s = Math.max(t, i, r), n = Math.min(t, i, r); let a = 0, l = 0; const o = s, u = s - n; s !== 0 && (l = u / s), s != n && (s == t ? (a = (i - r) / u, i < r && (a += 6)) : s == i ? a = (r - t) / u + 2 : s == r && (a = (t - i) / u + 4), a *= 60), e.r = a, e.g = l, e.b = o; } /** * Computes a new Color3 converted from the current one to linear space * @param exact defines if the conversion will be done in an exact way which is slower but more accurate (default is false) * @returns a new Color3 object */ toLinearSpace(e = !1) { const t = new ze(); return this.toLinearSpaceToRef(t, e), t; } /** * Converts the Color3 values to linear space and stores the result in "convertedColor" * @param convertedColor defines the Color3 object where to store the linear space version * @param exact defines if the conversion will be done in an exact way which is slower but more accurate (default is false) * @returns the unmodified Color3 */ toLinearSpaceToRef(e, t = !1) { return t ? (e.r = oO(this.r), e.g = oO(this.g), e.b = oO(this.b)) : (e.r = aO(this.r), e.g = aO(this.g), e.b = aO(this.b)), this; } /** * Computes a new Color3 converted from the current one to gamma space * @param exact defines if the conversion will be done in an exact way which is slower but more accurate (default is false) * @returns a new Color3 object */ toGammaSpace(e = !1) { const t = new ze(); return this.toGammaSpaceToRef(t, e), t; } /** * Converts the Color3 values to gamma space and stores the result in "convertedColor" * @param convertedColor defines the Color3 object where to store the gamma space version * @param exact defines if the conversion will be done in an exact way which is slower but more accurate (default is false) * @returns the unmodified Color3 */ toGammaSpaceToRef(e, t = !1) { return t ? (e.r = cO(this.r), e.g = cO(this.g), e.b = cO(this.b)) : (e.r = lO(this.r), e.g = lO(this.g), e.b = lO(this.b)), this; } /** * Converts Hue, saturation and value to a Color3 (RGB) * @param hue defines the hue (value between 0 and 360) * @param saturation defines the saturation (value between 0 and 1) * @param value defines the value (value between 0 and 1) * @param result defines the Color3 where to store the RGB values */ static HSVtoRGBToRef(e, t, i, r) { const s = i * t, n = e / 60, a = s * (1 - Math.abs(n % 2 - 1)); let l = 0, o = 0, u = 0; n >= 0 && n <= 1 ? (l = s, o = a) : n >= 1 && n <= 2 ? (l = a, o = s) : n >= 2 && n <= 3 ? (o = s, u = a) : n >= 3 && n <= 4 ? (o = a, u = s) : n >= 4 && n <= 5 ? (l = a, u = s) : n >= 5 && n <= 6 && (l = s, u = a); const h = i - s; r.set(l + h, o + h, u + h); } /** * Converts Hue, saturation and value to a new Color3 (RGB) * @param hue defines the hue (value between 0 and 360) * @param saturation defines the saturation (value between 0 and 1) * @param value defines the value (value between 0 and 1) * @returns a new Color3 object */ static FromHSV(e, t, i) { const r = new ze(0, 0, 0); return ze.HSVtoRGBToRef(e, t, i, r), r; } /** * Creates a new Color3 from the string containing valid hexadecimal values * @param hex defines a string containing valid hexadecimal values * @returns a new Color3 object */ static FromHexString(e) { if (e.substring(0, 1) !== "#" || e.length !== 7) return new ze(0, 0, 0); const t = parseInt(e.substring(1, 3), 16), i = parseInt(e.substring(3, 5), 16), r = parseInt(e.substring(5, 7), 16); return ze.FromInts(t, i, r); } /** * Creates a new Color3 from the starting index of the given array * @param array defines the source array * @param offset defines an offset in the source array * @returns a new Color3 object */ static FromArray(e, t = 0) { return new ze(e[t], e[t + 1], e[t + 2]); } /** * Creates a new Color3 from the starting index element of the given array * @param array defines the source array to read from * @param offset defines the offset in the source array * @param result defines the target Color3 object */ static FromArrayToRef(e, t = 0, i) { i.r = e[t], i.g = e[t + 1], i.b = e[t + 2]; } /** * Creates a new Color3 from integer values (< 256) * @param r defines the red component to read from (value between 0 and 255) * @param g defines the green component to read from (value between 0 and 255) * @param b defines the blue component to read from (value between 0 and 255) * @returns a new Color3 object */ static FromInts(e, t, i) { return new ze(e / 255, t / 255, i / 255); } /** * Creates a new Color3 with values linearly interpolated of "amount" between the start Color3 and the end Color3 * @param start defines the start Color3 value * @param end defines the end Color3 value * @param amount defines the gradient value between start and end * @returns a new Color3 object */ static Lerp(e, t, i) { const r = new ze(0, 0, 0); return ze.LerpToRef(e, t, i, r), r; } /** * Creates a new Color3 with values linearly interpolated of "amount" between the start Color3 and the end Color3 * @param left defines the start value * @param right defines the end value * @param amount defines the gradient factor * @param result defines the Color3 object where to store the result */ static LerpToRef(e, t, i, r) { r.r = e.r + (t.r - e.r) * i, r.g = e.g + (t.g - e.g) * i, r.b = e.b + (t.b - e.b) * i; } /** * Returns a new Color3 located for "amount" (float) on the Hermite interpolation spline defined by the vectors "value1", "tangent1", "value2", "tangent2" * @param value1 defines the first control point * @param tangent1 defines the first tangent Color3 * @param value2 defines the second control point * @param tangent2 defines the second tangent Color3 * @param amount defines the amount on the interpolation spline (between 0 and 1) * @returns the new Color3 */ static Hermite(e, t, i, r, s) { const n = s * s, a = s * n, l = 2 * a - 3 * n + 1, o = -2 * a + 3 * n, u = a - 2 * n + s, h = a - n, d = e.r * l + i.r * o + t.r * u + r.r * h, f = e.g * l + i.g * o + t.g * u + r.g * h, p = e.b * l + i.b * o + t.b * u + r.b * h; return new ze(d, f, p); } /** * Returns a new Color3 which is the 1st derivative of the Hermite spline defined by the colors "value1", "value2", "tangent1", "tangent2". * @param value1 defines the first control point * @param tangent1 defines the first tangent * @param value2 defines the second control point * @param tangent2 defines the second tangent * @param time define where the derivative must be done * @returns 1st derivative */ static Hermite1stDerivative(e, t, i, r, s) { const n = ze.Black(); return this.Hermite1stDerivativeToRef(e, t, i, r, s, n), n; } /** * Returns a new Color3 which is the 1st derivative of the Hermite spline defined by the colors "value1", "value2", "tangent1", "tangent2". * @param value1 defines the first control point * @param tangent1 defines the first tangent * @param value2 defines the second control point * @param tangent2 defines the second tangent * @param time define where the derivative must be done * @param result define where to store the derivative */ static Hermite1stDerivativeToRef(e, t, i, r, s, n) { const a = s * s; n.r = (a - s) * 6 * e.r + (3 * a - 4 * s + 1) * t.r + (-a + s) * 6 * i.r + (3 * a - 2 * s) * r.r, n.g = (a - s) * 6 * e.g + (3 * a - 4 * s + 1) * t.g + (-a + s) * 6 * i.g + (3 * a - 2 * s) * r.g, n.b = (a - s) * 6 * e.b + (3 * a - 4 * s + 1) * t.b + (-a + s) * 6 * i.b + (3 * a - 2 * s) * r.b; } /** * Returns a Color3 value containing a red color * @returns a new Color3 object */ static Red() { return new ze(1, 0, 0); } /** * Returns a Color3 value containing a green color * @returns a new Color3 object */ static Green() { return new ze(0, 1, 0); } /** * Returns a Color3 value containing a blue color * @returns a new Color3 object */ static Blue() { return new ze(0, 0, 1); } /** * Returns a Color3 value containing a black color * @returns a new Color3 object */ static Black() { return new ze(0, 0, 0); } /** * Gets a Color3 value containing a black color that must not be updated */ static get BlackReadOnly() { return ze._BlackReadOnly; } /** * Returns a Color3 value containing a white color * @returns a new Color3 object */ static White() { return new ze(1, 1, 1); } /** * Returns a Color3 value containing a purple color * @returns a new Color3 object */ static Purple() { return new ze(0.5, 0, 0.5); } /** * Returns a Color3 value containing a magenta color * @returns a new Color3 object */ static Magenta() { return new ze(1, 0, 1); } /** * Returns a Color3 value containing a yellow color * @returns a new Color3 object */ static Yellow() { return new ze(1, 1, 0); } /** * Returns a Color3 value containing a gray color * @returns a new Color3 object */ static Gray() { return new ze(0.5, 0.5, 0.5); } /** * Returns a Color3 value containing a teal color * @returns a new Color3 object */ static Teal() { return new ze(0, 1, 1); } /** * Returns a Color3 value containing a random color * @returns a new Color3 object */ static Random() { return new ze(Math.random(), Math.random(), Math.random()); } } ze._BlackReadOnly = ze.Black(); class Et { /** * Creates a new Color4 object from red, green, blue values, all between 0 and 1 * @param r defines the red component (between 0 and 1, default is 0) * @param g defines the green component (between 0 and 1, default is 0) * @param b defines the blue component (between 0 and 1, default is 0) * @param a defines the alpha component (between 0 and 1, default is 1) */ constructor(e = 0, t = 0, i = 0, r = 1) { this.r = e, this.g = t, this.b = i, this.a = r; } // Operators /** * Adds in place the given Color4 values to the current Color4 object * @param right defines the second operand * @returns the current updated Color4 object */ addInPlace(e) { return this.r += e.r, this.g += e.g, this.b += e.b, this.a += e.a, this; } /** * Creates a new array populated with 4 numeric elements : red, green, blue, alpha values * @returns the new array */ asArray() { return [this.r, this.g, this.b, this.a]; } /** * Stores from the starting index in the given array the Color4 successive values * @param array defines the array where to store the r,g,b components * @param index defines an optional index in the target array to define where to start storing values * @returns the current Color4 object */ toArray(e, t = 0) { return e[t] = this.r, e[t + 1] = this.g, e[t + 2] = this.b, e[t + 3] = this.a, this; } /** * Update the current color with values stored in an array from the starting index of the given array * @param array defines the source array * @param offset defines an offset in the source array * @returns the current Color4 object */ fromArray(e, t = 0) { return Et.FromArrayToRef(e, t, this), this; } /** * Determines equality between Color4 objects * @param otherColor defines the second operand * @returns true if the rgba values are equal to the given ones */ equals(e) { return e && this.r === e.r && this.g === e.g && this.b === e.b && this.a === e.a; } /** * Creates a new Color4 set with the added values of the current Color4 and of the given one * @param right defines the second operand * @returns a new Color4 object */ add(e) { return new Et(this.r + e.r, this.g + e.g, this.b + e.b, this.a + e.a); } /** * Creates a new Color4 set with the subtracted values of the given one from the current Color4 * @param right defines the second operand * @returns a new Color4 object */ subtract(e) { return new Et(this.r - e.r, this.g - e.g, this.b - e.b, this.a - e.a); } /** * Subtracts the given ones from the current Color4 values and stores the results in "result" * @param right defines the second operand * @param result defines the Color4 object where to store the result * @returns the current Color4 object */ subtractToRef(e, t) { return t.r = this.r - e.r, t.g = this.g - e.g, t.b = this.b - e.b, t.a = this.a - e.a, this; } /** * Creates a new Color4 with the current Color4 values multiplied by scale * @param scale defines the scaling factor to apply * @returns a new Color4 object */ scale(e) { return new Et(this.r * e, this.g * e, this.b * e, this.a * e); } /** * Multiplies the Color4 values by the float "scale" * @param scale defines the scaling factor to apply * @returns the current updated Color4 */ scaleInPlace(e) { return this.r *= e, this.g *= e, this.b *= e, this.a *= e, this; } /** * Multiplies the current Color4 values by scale and stores the result in "result" * @param scale defines the scaling factor to apply * @param result defines the Color4 object where to store the result * @returns the current unmodified Color4 */ scaleToRef(e, t) { return t.r = this.r * e, t.g = this.g * e, t.b = this.b * e, t.a = this.a * e, this; } /** * Scale the current Color4 values by a factor and add the result to a given Color4 * @param scale defines the scale factor * @param result defines the Color4 object where to store the result * @returns the unmodified current Color4 */ scaleAndAddToRef(e, t) { return t.r += this.r * e, t.g += this.g * e, t.b += this.b * e, t.a += this.a * e, this; } /** * Clamps the rgb values by the min and max values and stores the result into "result" * @param min defines minimum clamping value (default is 0) * @param max defines maximum clamping value (default is 1) * @param result defines color to store the result into. * @returns the current Color4 */ clampToRef(e = 0, t = 1, i) { return i.r = yt.Clamp(this.r, e, t), i.g = yt.Clamp(this.g, e, t), i.b = yt.Clamp(this.b, e, t), i.a = yt.Clamp(this.a, e, t), this; } /** * Multiply an Color4 value by another and return a new Color4 object * @param color defines the Color4 value to multiply by * @returns a new Color4 object */ multiply(e) { return new Et(this.r * e.r, this.g * e.g, this.b * e.b, this.a * e.a); } /** * Multiply a Color4 value by another and push the result in a reference value * @param color defines the Color4 value to multiply by * @param result defines the Color4 to fill the result in * @returns the result Color4 */ multiplyToRef(e, t) { return t.r = this.r * e.r, t.g = this.g * e.g, t.b = this.b * e.b, t.a = this.a * e.a, t; } /** * Creates a string with the Color4 current values * @returns the string representation of the Color4 object */ toString() { return "{R: " + this.r + " G:" + this.g + " B:" + this.b + " A:" + this.a + "}"; } /** * Returns the string "Color4" * @returns "Color4" */ getClassName() { return "Color4"; } /** * Compute the Color4 hash code * @returns an unique number that can be used to hash Color4 objects */ getHashCode() { let e = this.r * 255 | 0; return e = e * 397 ^ (this.g * 255 | 0), e = e * 397 ^ (this.b * 255 | 0), e = e * 397 ^ (this.a * 255 | 0), e; } /** * Creates a new Color4 copied from the current one * @returns a new Color4 object */ clone() { return new Et(this.r, this.g, this.b, this.a); } /** * Copies the given Color4 values into the current one * @param source defines the source Color4 object * @returns the current updated Color4 object */ copyFrom(e) { return this.r = e.r, this.g = e.g, this.b = e.b, this.a = e.a, this; } /** * Copies the given float values into the current one * @param r defines the red component to read from * @param g defines the green component to read from * @param b defines the blue component to read from * @param a defines the alpha component to read from * @returns the current updated Color4 object */ copyFromFloats(e, t, i, r) { return this.r = e, this.g = t, this.b = i, this.a = r, this; } /** * Copies the given float values into the current one * @param r defines the red component to read from * @param g defines the green component to read from * @param b defines the blue component to read from * @param a defines the alpha component to read from * @returns the current updated Color4 object */ set(e, t, i, r) { return this.copyFromFloats(e, t, i, r); } /** * Compute the Color4 hexadecimal code as a string * @param returnAsColor3 defines if the string should only contains RGB values (off by default) * @returns a string containing the hexadecimal representation of the Color4 object */ toHexString(e = !1) { const t = Math.round(this.r * 255), i = Math.round(this.g * 255), r = Math.round(this.b * 255); if (e) return "#" + yt.ToHex(t) + yt.ToHex(i) + yt.ToHex(r); const s = Math.round(this.a * 255); return "#" + yt.ToHex(t) + yt.ToHex(i) + yt.ToHex(r) + yt.ToHex(s); } /** * Computes a new Color4 converted from the current one to linear space * @param exact defines if the conversion will be done in an exact way which is slower but more accurate (default is false) * @returns a new Color4 object */ toLinearSpace(e = !1) { const t = new Et(); return this.toLinearSpaceToRef(t, e), t; } /** * Converts the Color4 values to linear space and stores the result in "convertedColor" * @param convertedColor defines the Color4 object where to store the linear space version * @param exact defines if the conversion will be done in an exact way which is slower but more accurate (default is false) * @returns the unmodified Color4 */ toLinearSpaceToRef(e, t = !1) { return t ? (e.r = oO(this.r), e.g = oO(this.g), e.b = oO(this.b)) : (e.r = aO(this.r), e.g = aO(this.g), e.b = aO(this.b)), e.a = this.a, this; } /** * Computes a new Color4 converted from the current one to gamma space * @param exact defines if the conversion will be done in an exact way which is slower but more accurate (default is false) * @returns a new Color4 object */ toGammaSpace(e = !1) { const t = new Et(); return this.toGammaSpaceToRef(t, e), t; } /** * Converts the Color4 values to gamma space and stores the result in "convertedColor" * @param convertedColor defines the Color4 object where to store the gamma space version * @param exact defines if the conversion will be done in an exact way which is slower but more accurate (default is false) * @returns the unmodified Color4 */ toGammaSpaceToRef(e, t = !1) { return t ? (e.r = cO(this.r), e.g = cO(this.g), e.b = cO(this.b)) : (e.r = lO(this.r), e.g = lO(this.g), e.b = lO(this.b)), e.a = this.a, this; } // Statics /** * Creates a new Color4 from the string containing valid hexadecimal values. * * A valid hex string is either in the format #RRGGBB or #RRGGBBAA. * * When a hex string without alpha is passed, the resulting Color4 has * its alpha value set to 1.0. * * An invalid string results in a Color with all its channels set to 0.0, * i.e. "transparent black". * * @param hex defines a string containing valid hexadecimal values * @returns a new Color4 object */ static FromHexString(e) { if (e.substring(0, 1) !== "#" || e.length !== 9 && e.length !== 7) return new Et(0, 0, 0, 0); const t = parseInt(e.substring(1, 3), 16), i = parseInt(e.substring(3, 5), 16), r = parseInt(e.substring(5, 7), 16), s = e.length === 9 ? parseInt(e.substring(7, 9), 16) : 255; return Et.FromInts(t, i, r, s); } /** * Creates a new Color4 object set with the linearly interpolated values of "amount" between the left Color4 object and the right Color4 object * @param left defines the start value * @param right defines the end value * @param amount defines the gradient factor * @returns a new Color4 object */ static Lerp(e, t, i) { const r = new Et(0, 0, 0, 0); return Et.LerpToRef(e, t, i, r), r; } /** * Set the given "result" with the linearly interpolated values of "amount" between the left Color4 object and the right Color4 object * @param left defines the start value * @param right defines the end value * @param amount defines the gradient factor * @param result defines the Color4 object where to store data */ static LerpToRef(e, t, i, r) { r.r = e.r + (t.r - e.r) * i, r.g = e.g + (t.g - e.g) * i, r.b = e.b + (t.b - e.b) * i, r.a = e.a + (t.a - e.a) * i; } /** * Interpolate between two Color4 using Hermite interpolation * @param value1 defines first Color4 * @param tangent1 defines the incoming tangent * @param value2 defines second Color4 * @param tangent2 defines the outgoing tangent * @param amount defines the target Color4 * @returns the new interpolated Color4 */ static Hermite(e, t, i, r, s) { const n = s * s, a = s * n, l = 2 * a - 3 * n + 1, o = -2 * a + 3 * n, u = a - 2 * n + s, h = a - n, d = e.r * l + i.r * o + t.r * u + r.r * h, f = e.g * l + i.g * o + t.g * u + r.g * h, p = e.b * l + i.b * o + t.b * u + r.b * h, m = e.a * l + i.a * o + t.a * u + r.a * h; return new Et(d, f, p, m); } /** * Returns a new Color4 which is the 1st derivative of the Hermite spline defined by the colors "value1", "value2", "tangent1", "tangent2". * @param value1 defines the first control point * @param tangent1 defines the first tangent * @param value2 defines the second control point * @param tangent2 defines the second tangent * @param time define where the derivative must be done * @returns 1st derivative */ static Hermite1stDerivative(e, t, i, r, s) { const n = new Et(); return this.Hermite1stDerivativeToRef(e, t, i, r, s, n), n; } /** * Update a Color4 with the 1st derivative of the Hermite spline defined by the colors "value1", "value2", "tangent1", "tangent2". * @param value1 defines the first control point * @param tangent1 defines the first tangent * @param value2 defines the second control point * @param tangent2 defines the second tangent * @param time define where the derivative must be done * @param result define where to store the derivative */ static Hermite1stDerivativeToRef(e, t, i, r, s, n) { const a = s * s; n.r = (a - s) * 6 * e.r + (3 * a - 4 * s + 1) * t.r + (-a + s) * 6 * i.r + (3 * a - 2 * s) * r.r, n.g = (a - s) * 6 * e.g + (3 * a - 4 * s + 1) * t.g + (-a + s) * 6 * i.g + (3 * a - 2 * s) * r.g, n.b = (a - s) * 6 * e.b + (3 * a - 4 * s + 1) * t.b + (-a + s) * 6 * i.b + (3 * a - 2 * s) * r.b, n.a = (a - s) * 6 * e.a + (3 * a - 4 * s + 1) * t.a + (-a + s) * 6 * i.a + (3 * a - 2 * s) * r.a; } /** * Creates a new Color4 from a Color3 and an alpha value * @param color3 defines the source Color3 to read from * @param alpha defines the alpha component (1.0 by default) * @returns a new Color4 object */ static FromColor3(e, t = 1) { return new Et(e.r, e.g, e.b, t); } /** * Creates a new Color4 from the starting index element of the given array * @param array defines the source array to read from * @param offset defines the offset in the source array * @returns a new Color4 object */ static FromArray(e, t = 0) { return new Et(e[t], e[t + 1], e[t + 2], e[t + 3]); } /** * Creates a new Color4 from the starting index element of the given array * @param array defines the source array to read from * @param offset defines the offset in the source array * @param result defines the target Color4 object */ static FromArrayToRef(e, t = 0, i) { i.r = e[t], i.g = e[t + 1], i.b = e[t + 2], i.a = e[t + 3]; } /** * Creates a new Color3 from integer values (< 256) * @param r defines the red component to read from (value between 0 and 255) * @param g defines the green component to read from (value between 0 and 255) * @param b defines the blue component to read from (value between 0 and 255) * @param a defines the alpha component to read from (value between 0 and 255) * @returns a new Color3 object */ static FromInts(e, t, i, r) { return new Et(e / 255, t / 255, i / 255, r / 255); } /** * Check the content of a given array and convert it to an array containing RGBA data * If the original array was already containing count * 4 values then it is returned directly * @param colors defines the array to check * @param count defines the number of RGBA data to expect * @returns an array containing count * 4 values (RGBA) */ static CheckColors4(e, t) { if (e.length === t * 3) { const i = []; for (let r = 0; r < e.length; r += 3) { const s = r / 3 * 4; i[s] = e[r], i[s + 1] = e[r + 1], i[s + 2] = e[r + 2], i[s + 3] = 1; } return i; } return e; } } class mn { } mn.Color3 = kc.BuildArray(3, ze.Black); mn.Color4 = kc.BuildArray(3, () => new Et(0, 0, 0, 0)); Be("BABYLON.Color3", ze); Be("BABYLON.Color4", Et); class Sa { /** * Creates a new Action * @param triggerOptions the trigger, with or without parameters, for the action * @param condition an optional determinant of action */ constructor(e, t) { this.triggerOptions = e, this.onBeforeExecuteObservable = new Fe(), e.parameter ? (this.trigger = e.trigger, this._triggerParameter = e.parameter) : e.trigger ? this.trigger = e.trigger : this.trigger = e, this._nextActiveAction = this, this._condition = t; } /** * Internal only * @internal */ _prepare() { } /** * Gets the trigger parameter * @returns the trigger parameter */ getTriggerParameter() { return this._triggerParameter; } /** * Sets the trigger parameter * @param value defines the new trigger parameter */ setTriggerParameter(e) { this._triggerParameter = e; } /** * Internal only - Returns if the current condition allows to run the action * @internal */ _evaluateConditionForCurrentFrame() { const e = this._condition; if (!e) return !0; const t = this._actionManager.getScene().getRenderId(); return e._evaluationId !== t && (e._evaluationId = t, e._currentResult = e.isValid()), e._currentResult; } /** * Internal only - executes current action event * @internal */ _executeCurrent(e) { this._evaluateConditionForCurrentFrame() && (this.onBeforeExecuteObservable.notifyObservers(this), this._nextActiveAction.execute(e), this.skipToNextActiveAction()); } /** * Execute placeholder for child classes * @param evt optional action event */ // eslint-disable-next-line @typescript-eslint/no-unused-vars execute(e) { } /** * Skips to next active action */ skipToNextActiveAction() { this._nextActiveAction._child ? (this._nextActiveAction._child._actionManager || (this._nextActiveAction._child._actionManager = this._actionManager), this._nextActiveAction = this._nextActiveAction._child) : this._nextActiveAction = this; } /** * Adds action to chain of actions, may be a DoNothingAction * @param action defines the next action to execute * @returns The action passed in * @see https://www.babylonjs-playground.com/#1T30HR#0 */ then(e) { return this._child = e, e._actionManager = this._actionManager, e._prepare(), e; } /** * Internal only * @internal */ _getProperty(e) { return this._actionManager._getProperty(e); } /** * @internal */ _getEffectiveTarget(e, t) { return this._actionManager._getEffectiveTarget(e, t); } /** * Serialize placeholder for child classes * @param parent of child * @returns the serialized object */ // eslint-disable-next-line @typescript-eslint/no-unused-vars serialize(e) { } /** * Internal only called by serialize * @internal */ _serialize(e, t) { const i = { type: 1, children: [], name: e.name, properties: e.properties || [] }; if (this._child && this._child.serialize(i), this._condition) { const r = this._condition.serialize(); return r.children.push(i), t && t.children.push(r), r; } return t && t.children.push(i), i; } } Sa._SerializeValueAsString = (c) => typeof c == "number" ? c.toString() : typeof c == "boolean" ? c ? "true" : "false" : c instanceof at ? c.x + ", " + c.y : c instanceof D ? c.x + ", " + c.y + ", " + c.z : c instanceof ze ? c.r + ", " + c.g + ", " + c.b : c instanceof Et ? c.r + ", " + c.g + ", " + c.b + ", " + c.a : c; Sa._GetTargetProperty = (c) => ({ name: "target", targetType: c._isMesh ? "MeshProperties" : c._isLight ? "LightProperties" : c._isCamera ? "CameraProperties" : c._isMaterial ? "MaterialProperties" : "SceneProperties", value: c._isScene ? "Scene" : c.name }); Be("BABYLON.Action", Sa); class Ro { /** * Creates a new ActionEvent * @param source The mesh or sprite that triggered the action * @param pointerX The X mouse cursor position at the time of the event * @param pointerY The Y mouse cursor position at the time of the event * @param meshUnderPointer The mesh that is currently pointed at (can be null) * @param sourceEvent the original (browser) event that triggered the ActionEvent * @param additionalData additional data for the event */ constructor(e, t, i, r, s, n) { this.source = e, this.pointerX = t, this.pointerY = i, this.meshUnderPointer = r, this.sourceEvent = s, this.additionalData = n; } /** * Helper function to auto-create an ActionEvent from a source mesh. * @param source The source mesh that triggered the event * @param evt The original (browser) event * @param additionalData additional data for the event * @returns the new ActionEvent */ static CreateNew(e, t, i) { const r = e.getScene(); return new Ro(e, r.pointerX, r.pointerY, r.meshUnderPointer || e, t, i); } /** * Helper function to auto-create an ActionEvent from a source sprite * @param source The source sprite that triggered the event * @param scene Scene associated with the sprite * @param evt The original (browser) event * @param additionalData additional data for the event * @returns the new ActionEvent */ static CreateNewFromSprite(e, t, i, r) { return new Ro(e, t.pointerX, t.pointerY, t.meshUnderPointer, i, r); } /** * Helper function to auto-create an ActionEvent from a scene. If triggered by a mesh use ActionEvent.CreateNew * @param scene the scene where the event occurred * @param evt The original (browser) event * @returns the new ActionEvent */ static CreateNewFromScene(e, t) { return new Ro(null, e.pointerX, e.pointerY, e.meshUnderPointer, t); } /** * Helper function to auto-create an ActionEvent from a primitive * @param prim defines the target primitive * @param pointerPos defines the pointer position * @param evt The original (browser) event * @param additionalData additional data for the event * @returns the new ActionEvent */ static CreateNewFromPrimitive(e, t, i, r) { return new Ro(e, t.x, t.y, null, i, r); } } class NO { /** * Creates a new Condition * @param actionManager the manager of the action the condition is applied to */ constructor(e) { this._actionManager = e; } /** * Check if the current condition is valid * @returns a boolean */ isValid() { return !0; } /** * @internal */ _getProperty(e) { return this._actionManager._getProperty(e); } /** * @internal */ _getEffectiveTarget(e, t) { return this._actionManager._getEffectiveTarget(e, t); } /** * Serialize placeholder for child classes * @returns the serialized object */ serialize() { } /** * @internal */ _serialize(e) { return { type: 2, children: [], name: e.name, properties: e.properties }; } } class Nu extends NO { /** * returns the number for IsEqual */ static get IsEqual() { return Nu._IsEqual; } /** * Returns the number for IsDifferent */ static get IsDifferent() { return Nu._IsDifferent; } /** * Returns the number for IsGreater */ static get IsGreater() { return Nu._IsGreater; } /** * Returns the number for IsLesser */ static get IsLesser() { return Nu._IsLesser; } /** * Creates a new ValueCondition * @param actionManager manager for the action the condition applies to * @param target for the action * @param propertyPath path to specify the property of the target the conditional operator uses * @param value the value compared by the conditional operator against the current value of the property * @param operator the conditional operator, default ValueCondition.IsEqual */ constructor(e, t, i, r, s = Nu.IsEqual) { super(e), this.propertyPath = i, this.value = r, this.operator = s, this._target = t, this._effectiveTarget = this._getEffectiveTarget(t, this.propertyPath), this._property = this._getProperty(this.propertyPath); } /** * Compares the given value with the property value for the specified conditional operator * @returns the result of the comparison */ isValid() { switch (this.operator) { case Nu.IsGreater: return this._effectiveTarget[this._property] > this.value; case Nu.IsLesser: return this._effectiveTarget[this._property] < this.value; case Nu.IsEqual: case Nu.IsDifferent: { let e; return this.value.equals ? e = this.value.equals(this._effectiveTarget[this._property]) : e = this.value === this._effectiveTarget[this._property], this.operator === Nu.IsEqual ? e : !e; } } return !1; } /** * Serialize the ValueCondition into a JSON compatible object * @returns serialization object */ serialize() { return this._serialize({ name: "ValueCondition", properties: [ Sa._GetTargetProperty(this._target), { name: "propertyPath", value: this.propertyPath }, { name: "value", value: Sa._SerializeValueAsString(this.value) }, { name: "operator", value: Nu.GetOperatorName(this.operator) } ] }); } /** * Gets the name of the conditional operator for the ValueCondition * @param operator the conditional operator * @returns the name */ static GetOperatorName(e) { switch (e) { case Nu._IsEqual: return "IsEqual"; case Nu._IsDifferent: return "IsDifferent"; case Nu._IsGreater: return "IsGreater"; case Nu._IsLesser: return "IsLesser"; default: return ""; } } } Nu._IsEqual = 0; Nu._IsDifferent = 1; Nu._IsGreater = 2; Nu._IsLesser = 3; class kee extends NO { /** * Creates a new PredicateCondition * @param actionManager manager for the action the condition applies to * @param predicate defines the predicate function used to validate the condition */ constructor(e, t) { super(e), this.predicate = t; } /** * @returns the validity of the predicate condition */ isValid() { return this.predicate(); } } class zee extends NO { /** * Creates a new StateCondition * @param actionManager manager for the action the condition applies to * @param target of the condition * @param value to compare with target state */ constructor(e, t, i) { super(e), this.value = i, this._target = t; } /** * Gets a boolean indicating if the current condition is met * @returns the validity of the state */ isValid() { return this._target.state === this.value; } /** * Serialize the StateCondition into a JSON compatible object * @returns serialization object */ serialize() { return this._serialize({ name: "StateCondition", properties: [Sa._GetTargetProperty(this._target), { name: "value", value: this.value }] }); } } Be("BABYLON.ValueCondition", Nu); Be("BABYLON.PredicateCondition", kee); Be("BABYLON.StateCondition", zee); class Ce { static _CheckLimit(e, t) { let i = Ce._LogLimitOutputs[e]; return i ? i.current++ : (i = { limit: t, current: 1 }, Ce._LogLimitOutputs[e] = i), i.current <= i.limit; } static _GenerateLimitMessage(e, t = 1) { var i; const r = Ce._LogLimitOutputs[e]; if (!r || !Ce.MessageLimitReached) return; const s = this._Levels[t]; r.current === r.limit && Ce[s.name](Ce.MessageLimitReached.replace(/%LIMIT%/g, "" + r.limit).replace(/%TYPE%/g, (i = s.name) !== null && i !== void 0 ? i : "")); } static _AddLogEntry(e) { Ce._LogCache = e + Ce._LogCache, Ce.OnNewCacheEntry && Ce.OnNewCacheEntry(e); } static _FormatMessage(e) { const t = (r) => r < 10 ? "0" + r : "" + r, i = /* @__PURE__ */ new Date(); return "[" + t(i.getHours()) + ":" + t(i.getMinutes()) + ":" + t(i.getSeconds()) + "]: " + e; } // eslint-disable-next-line @typescript-eslint/no-unused-vars static _LogDisabled(e, t) { } static _LogEnabled(e = 1, t, i) { const r = Array.isArray(t) ? t[0] : t; if (i !== void 0 && !Ce._CheckLimit(r, i)) return; const s = Ce._FormatMessage(r), n = this._Levels[e], a = Array.isArray(t) ? t.slice(1) : []; n.logFunc && n.logFunc("BJS - " + s, ...a); const l = `
${s}

`; Ce._AddLogEntry(l), Ce._GenerateLimitMessage(r, e); } /** * Gets current log cache (list of logs) */ static get LogCache() { return Ce._LogCache; } /** * Clears the log cache */ static ClearLogCache() { Ce._LogCache = "", Ce._LogLimitOutputs = {}, Ce.errorsCount = 0; } /** * Sets the current log level (MessageLogLevel / WarningLogLevel / ErrorLogLevel) */ static set LogLevels(e) { Ce.Log = Ce._LogDisabled, Ce.Warn = Ce._LogDisabled, Ce.Error = Ce._LogDisabled, [Ce.MessageLogLevel, Ce.WarningLogLevel, Ce.ErrorLogLevel].forEach((t) => { if ((e & t) === t) { const i = this._Levels[t]; Ce[i.name] = Ce._LogEnabled.bind(Ce, t); } }); } } Ce.NoneLogLevel = 0; Ce.MessageLogLevel = 1; Ce.WarningLogLevel = 2; Ce.ErrorLogLevel = 4; Ce.AllLogLevel = 7; Ce.MessageLimitReached = "Too many %TYPE%s (%LIMIT%), no more %TYPE%s will be reported for this message."; Ce._LogCache = ""; Ce._LogLimitOutputs = {}; Ce._Levels = [ {}, { color: "white", logFunc: console.log, name: "Log" }, { color: "orange", logFunc: console.warn, name: "Warn" }, {}, { color: "red", logFunc: console.error, name: "Error" } ]; Ce.errorsCount = 0; Ce.Log = Ce._LogEnabled.bind(Ce, Ce.MessageLogLevel); Ce.Warn = Ce._LogEnabled.bind(Ce, Ce.WarningLogLevel); Ce.Error = Ce._LogEnabled.bind(Ce, Ce.ErrorLogLevel); class Hee extends Sa { /** * Instantiate the action * @param triggerOptions defines the trigger options * @param target defines the object containing the boolean * @param propertyPath defines the path to the boolean property in the target object * @param condition defines the trigger related conditions */ constructor(e, t, i, r) { super(e, r), this.propertyPath = i, this._target = this._effectiveTarget = t; } /** @internal */ _prepare() { this._effectiveTarget = this._getEffectiveTarget(this._effectiveTarget, this.propertyPath), this._property = this._getProperty(this.propertyPath); } /** * Execute the action toggle the boolean value. */ execute() { this._effectiveTarget[this._property] = !this._effectiveTarget[this._property]; } /** * Serializes the actions and its related information. * @param parent defines the object to serialize in * @returns the serialized object */ serialize(e) { return super._serialize({ name: "SwitchBooleanAction", properties: [Sa._GetTargetProperty(this._target), { name: "propertyPath", value: this.propertyPath }] }, e); } } class Gee extends Sa { /** * Instantiate the action * @param triggerOptions defines the trigger options * @param target defines the object containing the state property * @param value defines the value to store in the state field * @param condition defines the trigger related conditions */ constructor(e, t, i, r) { super(e, r), this.value = i, this._target = t; } /** * Execute the action and store the value on the target state property. */ execute() { this._target.state = this.value; } /** * Serializes the actions and its related information. * @param parent defines the object to serialize in * @returns the serialized object */ serialize(e) { return super._serialize({ name: "SetStateAction", properties: [Sa._GetTargetProperty(this._target), { name: "value", value: this.value }] }, e); } } class Kee extends Sa { /** * Instantiate the action * @param triggerOptions defines the trigger options * @param target defines the object containing the property * @param propertyPath defines the path of the property to set in the target * @param value defines the value to set in the property * @param condition defines the trigger related conditions */ constructor(e, t, i, r, s) { super(e, s), this.propertyPath = i, this.value = r, this._target = this._effectiveTarget = t; } /** @internal */ _prepare() { this._effectiveTarget = this._getEffectiveTarget(this._effectiveTarget, this.propertyPath), this._property = this._getProperty(this.propertyPath); } /** * Execute the action and set the targeted property to the desired value. */ execute() { this._effectiveTarget[this._property] = this.value, this._target.markAsDirty && this._target.markAsDirty(this._property); } /** * Serializes the actions and its related information. * @param parent defines the object to serialize in * @returns the serialized object */ serialize(e) { return super._serialize({ name: "SetValueAction", properties: [ Sa._GetTargetProperty(this._target), { name: "propertyPath", value: this.propertyPath }, { name: "value", value: Sa._SerializeValueAsString(this.value) } ] }, e); } } class Wee extends Sa { /** * Instantiate the action * @param triggerOptions defines the trigger options * @param target defines the object containing the property * @param propertyPath defines the path of the property to increment in the target * @param value defines the value value we should increment the property by * @param condition defines the trigger related conditions */ constructor(e, t, i, r, s) { super(e, s), this.propertyPath = i, this.value = r, this._target = this._effectiveTarget = t; } /** @internal */ _prepare() { this._effectiveTarget = this._getEffectiveTarget(this._effectiveTarget, this.propertyPath), this._property = this._getProperty(this.propertyPath), typeof this._effectiveTarget[this._property] != "number" && Ce.Warn("Warning: IncrementValueAction can only be used with number values"); } /** * Execute the action and increment the target of the value amount. */ execute() { this._effectiveTarget[this._property] += this.value, this._target.markAsDirty && this._target.markAsDirty(this._property); } /** * Serializes the actions and its related information. * @param parent defines the object to serialize in * @returns the serialized object */ serialize(e) { return super._serialize({ name: "IncrementValueAction", properties: [ Sa._GetTargetProperty(this._target), { name: "propertyPath", value: this.propertyPath }, { name: "value", value: Sa._SerializeValueAsString(this.value) } ] }, e); } } class jee extends Sa { /** * Instantiate the action * @param triggerOptions defines the trigger options * @param target defines the target animation or animation name * @param from defines from where the animation should start (animation frame) * @param to defines where the animation should stop (animation frame) * @param loop defines if the animation should loop or stop after the first play * @param condition defines the trigger related conditions */ constructor(e, t, i, r, s, n) { super(e, n), this.from = i, this.to = r, this.loop = s, this._target = t; } /** @internal */ _prepare() { } /** * Execute the action and play the animation. */ execute() { this._actionManager.getScene().beginAnimation(this._target, this.from, this.to, this.loop); } /** * Serializes the actions and its related information. * @param parent defines the object to serialize in * @returns the serialized object */ serialize(e) { return super._serialize({ name: "PlayAnimationAction", properties: [ Sa._GetTargetProperty(this._target), { name: "from", value: String(this.from) }, { name: "to", value: String(this.to) }, { name: "loop", value: Sa._SerializeValueAsString(this.loop) || !1 } ] }, e); } } class Xee extends Sa { /** * Instantiate the action * @param triggerOptions defines the trigger options * @param target defines the target animation or animation name * @param condition defines the trigger related conditions */ constructor(e, t, i) { super(e, i), this._target = t; } /** @internal */ _prepare() { } /** * Execute the action and stop the animation. */ execute() { this._actionManager.getScene().stopAnimation(this._target); } /** * Serializes the actions and its related information. * @param parent defines the object to serialize in * @returns the serialized object */ serialize(e) { return super._serialize({ name: "StopAnimationAction", properties: [Sa._GetTargetProperty(this._target)] }, e); } } class YG extends Sa { /** * Instantiate the action * @param triggerOptions defines the trigger options * @param condition defines the trigger related conditions */ constructor(e = 0, t) { super(e, t); } /** * Execute the action and do nothing. */ execute() { } /** * Serializes the actions and its related information. * @param parent defines the object to serialize in * @returns the serialized object */ serialize(e) { return super._serialize({ name: "DoNothingAction", properties: [] }, e); } } class Yee extends Sa { /** * Instantiate the action * @param triggerOptions defines the trigger options * @param children defines the list of aggregated animations to run * @param condition defines the trigger related conditions * @param enableChildrenConditions defines if the children actions conditions should be check before execution */ constructor(e, t, i, r = !0) { super(e, i), this.children = t, this.enableChildrenConditions = r; } /** @internal */ _prepare() { for (let e = 0; e < this.children.length; e++) this.children[e]._actionManager = this._actionManager, this.children[e]._prepare(); } /** * Execute the action and executes all the aggregated actions. * @param evt */ execute(e) { for (const t of this.children) (!this.enableChildrenConditions || t._evaluateConditionForCurrentFrame()) && t.execute(e); } /** * Serializes the actions and its related information. * @param parent defines the object to serialize in * @returns the serialized object */ serialize(e) { const t = super._serialize({ name: "CombineAction", properties: [], combine: [] }, e); for (let i = 0; i < this.children.length; i++) t.combine.push(this.children[i].serialize(null)); return t; } } class Qee extends Sa { /** * Instantiate the action * @param triggerOptions defines the trigger options * @param func defines the callback function to run * @param condition defines the trigger related conditions */ constructor(e, t, i) { super(e, i), this.func = t; } /** * Execute the action and run the attached code. * @param evt */ execute(e) { this.func(e); } } class QG extends Sa { /** * Instantiate the action * @param triggerOptions defines the trigger options * @param target defines the target containing the parent property * @param parent defines from where the animation should start (animation frame) * @param condition defines the trigger related conditions */ constructor(e, t, i, r) { super(e, r), this._target = t, this._parent = i; } /** @internal */ _prepare() { } /** * Execute the action and set the parent property. */ execute() { if (this._target.parent === this._parent) return; const e = this._parent.getWorldMatrix().clone(); e.invert(), this._target.position = D.TransformCoordinates(this._target.position, e), this._target.parent = this._parent; } /** * Serializes the actions and its related information. * @param parent defines the object to serialize in * @returns the serialized object */ serialize(e) { return super._serialize({ name: "SetParentAction", properties: [Sa._GetTargetProperty(this._target), Sa._GetTargetProperty(this._parent)] }, e); } } Be("BABYLON.SetParentAction", QG); Be("BABYLON.ExecuteCodeAction", Qee); Be("BABYLON.DoNothingAction", YG); Be("BABYLON.StopAnimationAction", Xee); Be("BABYLON.PlayAnimationAction", jee); Be("BABYLON.IncrementValueAction", Wee); Be("BABYLON.SetValueAction", Kee); Be("BABYLON.SetStateAction", Gee); Be("BABYLON.SetParentAction", QG); Be("BABYLON.SwitchBooleanAction", Hee); Be("BABYLON.CombineAction", Yee); const yZ = (c, e, t) => !c || c.getClassName && c.getClassName() === "Mesh" ? null : c.getClassName && (c.getClassName() === "SubMesh" || c.getClassName() === "PhysicsBody") ? c.clone(e) : c.clone ? c.clone() : Array.isArray(c) ? c.slice() : t && typeof c == "object" ? Object.assign({}, c) : null; function wle(c) { const e = []; do Object.getOwnPropertyNames(c).forEach(function(t) { e.indexOf(t) === -1 && e.push(t); }); while (c = Object.getPrototypeOf(c)); return e; } class id { /** * Tries to copy an object by duplicating every property * @param source defines the source object * @param destination defines the target object * @param doNotCopyList defines a list of properties to avoid * @param mustCopyList defines a list of properties to copy (even if they start with _) * @param shallowCopyValues defines wether properties referencing objects (none cloneable) must be shallow copied (false by default) * @remarks shallowCopyValues will not instantite the copied values which makes it only usable for "JSON objects" */ static DeepCopy(e, t, i, r, s = !1) { const n = wle(e); for (const a of n) { if (a[0] === "_" && (!r || r.indexOf(a) === -1) || a.endsWith("Observable") || i && i.indexOf(a) !== -1) continue; const l = e[a], o = typeof l; if (o !== "function") try { if (o === "object") if (l instanceof Uint8Array) t[a] = Uint8Array.from(l); else if (l instanceof Array) { if (t[a] = [], l.length > 0) if (typeof l[0] == "object") for (let u = 0; u < l.length; u++) { const h = yZ(l[u], t, s); t[a].indexOf(h) === -1 && t[a].push(h); } else t[a] = l.slice(0); } else t[a] = yZ(l, t, s); else t[a] = l; } catch (u) { Ce.Warn(u.message); } } } } class Ln extends H_ { /** * Creates a new action manager * @param scene defines the hosting scene */ constructor(e) { super(), e = e || gi.LastCreatedScene, e && (this._scene = e, e.actionManagers.push(this)); } // Methods /** * Releases all associated resources */ dispose() { const e = this._scene.actionManagers.indexOf(this); for (let i = 0; i < this.actions.length; i++) { const r = this.actions[i]; Ln.Triggers[r.trigger]--, Ln.Triggers[r.trigger] === 0 && delete Ln.Triggers[r.trigger]; } this.actions.length = 0, e > -1 && this._scene.actionManagers.splice(e, 1); const t = this._scene.meshes.filter((i) => i.actionManager === this); for (const i of t) i.actionManager = null; } /** * Gets hosting scene * @returns the hosting scene */ getScene() { return this._scene; } /** * Does this action manager handles actions of any of the given triggers * @param triggers defines the triggers to be tested * @returns a boolean indicating whether one (or more) of the triggers is handled */ hasSpecificTriggers(e) { for (let t = 0; t < this.actions.length; t++) { const i = this.actions[t]; if (e.indexOf(i.trigger) > -1) return !0; } return !1; } /** * Does this action manager handles actions of any of the given triggers. This function takes two arguments for * speed. * @param triggerA defines the trigger to be tested * @param triggerB defines the trigger to be tested * @returns a boolean indicating whether one (or more) of the triggers is handled */ hasSpecificTriggers2(e, t) { for (let i = 0; i < this.actions.length; i++) { const r = this.actions[i]; if (e == r.trigger || t == r.trigger) return !0; } return !1; } /** * Does this action manager handles actions of a given trigger * @param trigger defines the trigger to be tested * @param parameterPredicate defines an optional predicate to filter triggers by parameter * @returns whether the trigger is handled */ hasSpecificTrigger(e, t) { for (let i = 0; i < this.actions.length; i++) { const r = this.actions[i]; if (r.trigger === e) if (t) { if (t(r.getTriggerParameter())) return !0; } else return !0; } return !1; } /** * Does this action manager has pointer triggers */ get hasPointerTriggers() { for (let e = 0; e < this.actions.length; e++) { const t = this.actions[e]; if (t.trigger >= Ln.OnPickTrigger && t.trigger <= Ln.OnPointerOutTrigger) return !0; } return !1; } /** * Does this action manager has pick triggers */ get hasPickTriggers() { for (let e = 0; e < this.actions.length; e++) { const t = this.actions[e]; if (t.trigger >= Ln.OnPickTrigger && t.trigger <= Ln.OnPickUpTrigger) return !0; } return !1; } /** * Registers an action to this action manager * @param action defines the action to be registered * @returns the action amended (prepared) after registration */ registerAction(e) { return e.trigger === Ln.OnEveryFrameTrigger && this.getScene().actionManager !== this ? (Ce.Warn("OnEveryFrameTrigger can only be used with scene.actionManager"), null) : (this.actions.push(e), this.getScene()._registeredActions++, Ln.Triggers[e.trigger] ? Ln.Triggers[e.trigger]++ : Ln.Triggers[e.trigger] = 1, e._actionManager = this, e._prepare(), e); } /** * Unregisters an action to this action manager * @param action defines the action to be unregistered * @returns a boolean indicating whether the action has been unregistered */ unregisterAction(e) { const t = this.actions.indexOf(e); return t !== -1 ? (this.actions.splice(t, 1), Ln.Triggers[e.trigger] -= 1, Ln.Triggers[e.trigger] === 0 && delete Ln.Triggers[e.trigger], e._actionManager = null, this.getScene()._registeredActions--, !0) : !1; } /** * Process a specific trigger * @param trigger defines the trigger to process * @param evt defines the event details to be processed */ processTrigger(e, t) { for (let i = 0; i < this.actions.length; i++) { const r = this.actions[i]; if (r.trigger === e) { if (t && (e === Ln.OnKeyUpTrigger || e === Ln.OnKeyDownTrigger)) { const s = r.getTriggerParameter(); if (typeof s == "function") { if (!s(t)) continue; } else if (s && s !== t.sourceEvent.keyCode) { if (!s.toLowerCase) continue; const n = s.toLowerCase(); if (n !== t.sourceEvent.key) { const a = t.sourceEvent.charCode ? t.sourceEvent.charCode : t.sourceEvent.keyCode; if (String.fromCharCode(a).toLowerCase() !== n) continue; } } } r._executeCurrent(t); } } } /** * @internal */ _getEffectiveTarget(e, t) { const i = t.split("."); for (let r = 0; r < i.length - 1; r++) e = e[i[r]]; return e; } /** * @internal */ _getProperty(e) { const t = e.split("."); return t[t.length - 1]; } /** * Serialize this manager to a JSON object * @param name defines the property name to store this manager * @returns a JSON representation of this manager */ serialize(e) { const t = { children: new Array(), name: e, type: 3, properties: new Array() // Empty for root but required }; for (let i = 0; i < this.actions.length; i++) { const r = { type: 0, children: new Array(), name: Ln.GetTriggerName(this.actions[i].trigger), properties: new Array() }, s = this.actions[i].triggerOptions; if (s && typeof s != "number") if (s.parameter instanceof Node) r.properties.push(Sa._GetTargetProperty(s.parameter)); else if (typeof s.parameter == "object") { const n = {}; id.DeepCopy(s.parameter, n, ["mesh"]), s.parameter && s.parameter.mesh && (n._meshId = s.parameter.mesh.id), r.properties.push({ name: "parameter", targetType: null, value: n }); } else r.properties.push({ name: "parameter", targetType: null, value: s.parameter }); this.actions[i].serialize(r), t.children.push(r); } return t; } /** * Creates a new ActionManager from a JSON data * @param parsedActions defines the JSON data to read from * @param object defines the hosting mesh * @param scene defines the hosting scene */ static Parse(e, t, i) { const r = new Ln(i); t === null ? i.actionManager = r : t.actionManager = r; const s = (l, o) => { const u = Qo("BABYLON." + l); return u && new u(...o); }, n = (l, o, u, h) => { if (h === null) { const m = parseFloat(o); return o === "true" || o === "false" ? o === "true" : isNaN(m) ? o : m; } const d = h.split("."), f = o.split(","); for (let m = 0; m < d.length; m++) u = u[d[m]]; if (typeof u == "boolean") return f[0] === "true"; if (typeof u == "string") return f[0]; const p = []; for (let m = 0; m < f.length; m++) p.push(parseFloat(f[m])); return u instanceof D ? D.FromArray(p) : u instanceof Di ? Di.FromArray(p) : u instanceof ze ? ze.FromArray(p) : u instanceof Et ? Et.FromArray(p) : parseFloat(f[0]); }, a = (l, o, u, h, d = null) => { if (l.detached) return; const f = []; let p = null, m = null; const _ = l.combine && l.combine.length > 0; if (l.type === 2 ? f.push(r) : f.push(o), _) { const C = []; for (let x = 0; x < l.combine.length; x++) a(l.combine[x], Ln.NothingTrigger, u, h, C); f.push(C); } else for (let C = 0; C < l.properties.length; C++) { let x = l.properties[C].value; const b = l.properties[C].name, S = l.properties[C].targetType; b === "target" ? S === "SceneProperties" ? x = p = i : S === "MaterialProperties" ? x = p = i.getMaterialByName(x) : x = p = i.getNodeByName(x) : b === "parent" ? x = i.getNodeByName(x) : b === "sound" ? i.getSoundByName && (x = i.getSoundByName(x)) : b !== "propertyPath" ? l.type === 2 && b === "operator" ? x = Nu[x] : x = n(b, x, p, b === "value" ? m : null) : m = x, f.push(x); } if (d === null ? f.push(u) : f.push(null), l.name === "InterpolateValueAction") { const C = f[f.length - 2]; f[f.length - 1] = C, f[f.length - 2] = u; } let v = s(l.name, f); if (v instanceof NO && u !== null) { const C = new YG(o, u); h ? h.then(C) : r.registerAction(C), h = C; } d === null ? v instanceof NO ? (u = v, v = h) : (u = null, h ? h.then(v) : r.registerAction(v)) : d.push(v); for (let C = 0; C < l.children.length; C++) a(l.children[C], o, u, v, null); }; for (let l = 0; l < e.children.length; l++) { let o; const u = e.children[l]; if (u.properties.length > 0) { const h = u.properties[0].value, d = u.properties[0].targetType === null ? h : i.getMeshByName(h); d._meshId && (d.mesh = i.getMeshById(d._meshId)), o = { trigger: Ln[u.name], parameter: d }; } else o = Ln[u.name]; for (let h = 0; h < u.children.length; h++) u.detached || a(u.children[h], o, null, null); } } /** * Get a trigger name by index * @param trigger defines the trigger index * @returns a trigger name */ static GetTriggerName(e) { switch (e) { case 0: return "NothingTrigger"; case 1: return "OnPickTrigger"; case 2: return "OnLeftPickTrigger"; case 3: return "OnRightPickTrigger"; case 4: return "OnCenterPickTrigger"; case 5: return "OnPickDownTrigger"; case 6: return "OnDoublePickTrigger"; case 7: return "OnPickUpTrigger"; case 8: return "OnLongPressTrigger"; case 9: return "OnPointerOverTrigger"; case 10: return "OnPointerOutTrigger"; case 11: return "OnEveryFrameTrigger"; case 12: return "OnIntersectionEnterTrigger"; case 13: return "OnIntersectionExitTrigger"; case 14: return "OnKeyDownTrigger"; case 15: return "OnKeyUpTrigger"; case 16: return "OnPickOutTrigger"; default: return ""; } } } Ln.NothingTrigger = 0; Ln.OnPickTrigger = 1; Ln.OnLeftPickTrigger = 2; Ln.OnRightPickTrigger = 3; Ln.OnCenterPickTrigger = 4; Ln.OnPickDownTrigger = 5; Ln.OnDoublePickTrigger = 6; Ln.OnPickUpTrigger = 7; Ln.OnPickOutTrigger = 16; Ln.OnLongPressTrigger = 8; Ln.OnPointerOverTrigger = 9; Ln.OnPointerOutTrigger = 10; Ln.OnEveryFrameTrigger = 11; Ln.OnIntersectionEnterTrigger = 12; Ln.OnIntersectionExitTrigger = 13; Ln.OnKeyDownTrigger = 14; Ln.OnKeyUpTrigger = 15; class $ee extends Sa { /** * Instantiate the action * @param triggerOptions defines the trigger options * @param sound defines the sound to play * @param condition defines the trigger related conditions */ constructor(e, t, i) { super(e, i), this._sound = t; } /** @internal */ _prepare() { } /** * Execute the action and play the sound. */ execute() { this._sound !== void 0 && this._sound.play(); } /** * Serializes the actions and its related information. * @param parent defines the object to serialize in * @returns the serialized object */ serialize(e) { return super._serialize({ name: "PlaySoundAction", properties: [{ name: "sound", value: this._sound.name }] }, e); } } class Zee extends Sa { /** * Instantiate the action * @param triggerOptions defines the trigger options * @param sound defines the sound to stop * @param condition defines the trigger related conditions */ constructor(e, t, i) { super(e, i), this._sound = t; } /** @internal */ _prepare() { } /** * Execute the action and stop the sound. */ execute() { this._sound !== void 0 && this._sound.stop(); } /** * Serializes the actions and its related information. * @param parent defines the object to serialize in * @returns the serialized object */ serialize(e) { return super._serialize({ name: "StopSoundAction", properties: [{ name: "sound", value: this._sound.name }] }, e); } } Be("BABYLON.PlaySoundAction", $ee); Be("BABYLON.StopSoundAction", Zee); class K8 { /** * Evaluate a query * @param query defines the query to evaluate * @param evaluateCallback defines the callback used to filter result * @returns true if the query matches */ static Eval(e, t) { return e.match(/\([^()]*\)/g) ? e = e.replace(/\([^()]*\)/g, (i) => (i = i.slice(1, i.length - 1), K8._HandleParenthesisContent(i, t))) : e = K8._HandleParenthesisContent(e, t), e === "true" ? !0 : e === "false" ? !1 : K8.Eval(e, t); } static _HandleParenthesisContent(e, t) { t = t || ((s) => s === "true"); let i; const r = e.split("||"); for (const s in r) if (Object.prototype.hasOwnProperty.call(r, s)) { let n = K8._SimplifyNegation(r[s].trim()); const a = n.split("&&"); if (a.length > 1) for (let l = 0; l < a.length; ++l) { const o = K8._SimplifyNegation(a[l].trim()); if (o !== "true" && o !== "false" ? o[0] === "!" ? i = !t(o.substring(1)) : i = t(o) : i = o === "true", !i) { n = "false"; break; } } if (i || n === "true") { i = !0; break; } n !== "true" && n !== "false" ? n[0] === "!" ? i = !t(n.substring(1)) : i = t(n) : i = n === "true"; } return i ? "true" : "false"; } static _SimplifyNegation(e) { return e = e.replace(/^[\s!]+/, (t) => (t = t.replace(/[\s]/g, () => ""), t.length % 2 ? "!" : "")), e = e.trim(), e === "!true" ? e = "false" : e === "!false" && (e = "true"), e; } } class $s { /** * Adds support for tags on the given object * @param obj defines the object to use */ static EnableFor(e) { e._tags = e._tags || {}, e.hasTags = () => $s.HasTags(e), e.addTags = (t) => $s.AddTagsTo(e, t), e.removeTags = (t) => $s.RemoveTagsFrom(e, t), e.matchesTagsQuery = (t) => $s.MatchesQuery(e, t); } /** * Removes tags support * @param obj defines the object to use */ static DisableFor(e) { delete e._tags, delete e.hasTags, delete e.addTags, delete e.removeTags, delete e.matchesTagsQuery; } /** * Gets a boolean indicating if the given object has tags * @param obj defines the object to use * @returns a boolean */ static HasTags(e) { if (!e._tags) return !1; const t = e._tags; for (const i in t) if (Object.prototype.hasOwnProperty.call(t, i)) return !0; return !1; } /** * Gets the tags available on a given object * @param obj defines the object to use * @param asString defines if the tags must be returned as a string instead of an array of strings * @returns the tags */ static GetTags(e, t = !0) { if (!e._tags) return null; if (t) { const i = []; for (const r in e._tags) Object.prototype.hasOwnProperty.call(e._tags, r) && e._tags[r] === !0 && i.push(r); return i.join(" "); } else return e._tags; } /** * Adds tags to an object * @param obj defines the object to use * @param tagsString defines the tag string. The tags 'true' and 'false' are reserved and cannot be used as tags. * A tag cannot start with '||', '&&', and '!'. It cannot contain whitespaces */ static AddTagsTo(e, t) { if (!t || typeof t != "string") return; t.split(" ").forEach(function(r) { $s._AddTagTo(e, r); }); } /** * @internal */ static _AddTagTo(e, t) { t = t.trim(), !(t === "" || t === "true" || t === "false") && (t.match(/[\s]/) || t.match(/^([!]|([|]|[&]){2})/) || ($s.EnableFor(e), e._tags[t] = !0)); } /** * Removes specific tags from a specific object * @param obj defines the object to use * @param tagsString defines the tags to remove */ static RemoveTagsFrom(e, t) { if (!$s.HasTags(e)) return; const i = t.split(" "); for (const r in i) $s._RemoveTagFrom(e, i[r]); } /** * @internal */ static _RemoveTagFrom(e, t) { delete e._tags[t]; } /** * Defines if tags hosted on an object match a given query * @param obj defines the object to use * @param tagsQuery defines the tag query * @returns a boolean */ static MatchesQuery(e, t) { return t === void 0 ? !0 : t === "" ? $s.HasTags(e) : K8.Eval(t, (i) => $s.HasTags(e) && e._tags[i]); } } const CZ = {}; function yr(c, e = !1) { if (!(e && CZ[c])) return CZ[c] = !0, `${c} needs to be imported before as it contains a side-effect required by your code.`; } const UF = {}, EF = {}, xZ = function(c, e, t, i = {}) { const r = c(); $s && $s.HasTags(e) && $s.AddTagsTo(r, $s.GetTags(e, !0)); const s = aH(r), n = {}; for (const a in s) { const l = s[a], o = e[a], u = l.type; if (o != null && (a !== "uniqueId" || St.AllowLoadingUniqueId)) switch (u) { case 0: case 6: case 11: r[a] = o; break; case 1: i.cloneTexturesOnlyOnce && n[o.uniqueId] ? r[a] = n[o.uniqueId] : (r[a] = t || o.isRenderTarget ? o : o.clone(), n[o.uniqueId] = r[a]); break; case 2: case 3: case 4: case 5: case 7: case 10: case 12: r[a] = t ? o : o.clone(); break; } } return r; }; function Lle(c) { const e = c.getClassName(); return UF[e] || (UF[e] = {}), UF[e]; } function aH(c) { const e = c.getClassName(); if (EF[e]) return EF[e]; EF[e] = {}; const t = EF[e]; let i = c, r = e; for (; r; ) { const s = UF[r]; for (const l in s) t[l] = s[l]; let n, a = !1; do { if (n = Object.getPrototypeOf(i), !n.getClassName) { a = !0; break; } if (n.getClassName() !== r) break; i = n; } while (n); if (a) break; r = n.getClassName(), i = n; } return t; } function bg(c, e) { return (t, i) => { const r = Lle(t); r[i] || (r[i] = { type: c, sourceName: e }); }; } function Nle(c, e = null) { return (t, i) => { const r = e || "_" + i; Object.defineProperty(t, i, { get: function() { return this[r]; }, set: function(s) { typeof this.equals == "function" && this.equals(s) || this[r] !== s && (this[r] = s, t[c].apply(this)); }, enumerable: !0, configurable: !0 }); }; } function ct(c, e = null) { return Nle(c, e); } function W(c) { return bg(0, c); } function er(c) { return bg(1, c); } function Fs(c) { return bg(2, c); } function uw(c) { return bg(3, c); } function PL(c) { return bg(4, c); } function oo(c) { return bg(5, c); } function hw(c) { return bg(6, c); } function qee(c) { return bg(7, c); } function dw(c) { return bg(8, c); } function $G(c) { return bg(9, c); } function Jee(c) { return bg(10, c); } function VB(c) { return bg(12, c); } function ete(c) { return bg(11, c); } class St { /** * Appends the serialized animations from the source animations * @param source Source containing the animations * @param destination Target to store the animations */ static AppendSerializedAnimations(e, t) { if (e.animations) { t.animations = []; for (let i = 0; i < e.animations.length; i++) { const r = e.animations[i]; t.animations.push(r.serialize()); } } } /** * Static function used to serialized a specific entity * @param entity defines the entity to serialize * @param serializationObject defines the optional target object where serialization data will be stored * @returns a JSON compatible object representing the serialization of the entity */ static Serialize(e, t) { t || (t = {}), $s && (t.tags = $s.GetTags(e)); const i = aH(e); for (const r in i) { const s = i[r], n = s.sourceName || r, a = s.type, l = e[r]; if (l != null && (r !== "uniqueId" || St.AllowLoadingUniqueId)) switch (a) { case 0: t[n] = l; break; case 1: t[n] = l.serialize(); break; case 2: t[n] = l.asArray(); break; case 3: t[n] = l.serialize(); break; case 4: t[n] = l.asArray(); break; case 5: t[n] = l.asArray(); break; case 6: t[n] = l.id; break; case 7: t[n] = l.serialize(); break; case 8: t[n] = l.asArray(); break; case 9: t[n] = l.serialize(); break; case 10: t[n] = l.asArray(); break; case 11: t[n] = l.id; break; case 12: t[n] = l.asArray(); break; } } return t; } /** * Given a source json and a destination object in a scene, this function will parse the source and will try to apply its content to the destination object * @param source the source json data * @param destination the destination object * @param scene the scene where the object is * @param rootUrl root url to use to load assets */ static ParseProperties(e, t, i, r) { r || (r = ""); const s = aH(t); for (const n in s) { const a = s[n], l = e[a.sourceName || n], o = a.type; if (l != null && (n !== "uniqueId" || St.AllowLoadingUniqueId)) { const u = t; switch (o) { case 0: u[n] = l; break; case 1: i && (u[n] = St._TextureParser(l, i, r)); break; case 2: u[n] = ze.FromArray(l); break; case 3: u[n] = St._FresnelParametersParser(l); break; case 4: u[n] = at.FromArray(l); break; case 5: u[n] = D.FromArray(l); break; case 6: i && (u[n] = i.getLastMeshById(l)); break; case 7: u[n] = St._ColorCurvesParser(l); break; case 8: u[n] = Et.FromArray(l); break; case 9: u[n] = St._ImageProcessingConfigurationParser(l); break; case 10: u[n] = Ze.FromArray(l); break; case 11: i && (u[n] = i.getCameraById(l)); break; case 12: u[n] = Ae.FromArray(l); break; } } } } /** * Creates a new entity from a serialization data object * @param creationFunction defines a function used to instanciated the new entity * @param source defines the source serialization data * @param scene defines the hosting scene * @param rootUrl defines the root url for resources * @returns a new entity */ static Parse(e, t, i, r = null) { const s = e(); return $s && $s.AddTagsTo(s, t.tags), St.ParseProperties(t, s, i, r), s; } /** * Clones an object * @param creationFunction defines the function used to instanciate the new object * @param source defines the source object * @returns the cloned object */ static Clone(e, t, i = {}) { return xZ(e, t, !1, i); } /** * Instanciates a new object based on a source one (some data will be shared between both object) * @param creationFunction defines the function used to instanciate the new object * @param source defines the source object * @returns the new object */ static Instanciate(e, t) { return xZ(e, t, !0); } } St.AllowLoadingUniqueId = !1; St._ImageProcessingConfigurationParser = (c) => { throw yr("ImageProcessingConfiguration"); }; St._FresnelParametersParser = (c) => { throw yr("FresnelParameters"); }; St._ColorCurvesParser = (c) => { throw yr("ColorCurves"); }; St._TextureParser = (c, e, t) => { throw yr("Texture"); }; function gT(c, e, t, i) { const r = t.value; t.value = (...s) => { let n = r; if (typeof _native < "u" && _native[e]) { const a = _native[e]; i ? n = (...l) => i(...l) ? a(...l) : r(...l) : n = a; } return c[e] = n, n(...s); }; } gT.filter = function(c) { return (e, t, i) => gT(e, t, i, c); }; var $9; (function(c) { c[c.NONE = 0] = "NONE", c[c.STEP = 1] = "STEP"; })($9 || ($9 = {})); class hP { /** * Initializes the range of an animation * @param name The name of the animation range * @param from The starting frame of the animation * @param to The ending frame of the animation */ constructor(e, t, i) { this.name = e, this.from = t, this.to = i; } /** * Makes a copy of the animation range * @returns A copy of the animation range */ clone() { return new hP(this.name, this.from, this.to); } } function F(c, e, t, i) { var r = arguments.length, s = r < 3 ? e : i === null ? i = Object.getOwnPropertyDescriptor(e, t) : i, n; if (typeof Reflect == "object" && typeof Reflect.decorate == "function") s = Reflect.decorate(c, e, t, i); else for (var a = c.length - 1; a >= 0; a--) (n = c[a]) && (s = (r < 3 ? n(s) : r > 3 ? n(e, t, s) : n(e, t)) || s); return r > 3 && s && Object.defineProperty(e, t, s), s; } class Fle { constructor() { this._doNotSerialize = !1, this._isDisposed = !1, this._sceneRootNodesIndex = -1, this._isEnabled = !0, this._isParentEnabled = !0, this._isReady = !0, this._onEnabledStateChangedObservable = new Fe(), this._onClonedObservable = new Fe(); } } let In = class oH { /** * Add a new node constructor * @param type defines the type name of the node to construct * @param constructorFunc defines the constructor function */ static AddNodeConstructor(e, t) { this._NodeConstructors[e] = t; } /** * Returns a node constructor based on type name * @param type defines the type name * @param name defines the new node name * @param scene defines the hosting scene * @param options defines optional options to transmit to constructors * @returns the new constructor or null */ static Construct(e, t, i, r) { const s = this._NodeConstructors[e]; return s ? s(t, i, r) : null; } /** * Gets or sets the accessibility tag to describe the node for accessibility purpose. */ set accessibilityTag(e) { this._accessibilityTag = e, this.onAccessibilityTagChangedObservable.notifyObservers(e); } get accessibilityTag() { return this._accessibilityTag; } /** * Gets or sets a boolean used to define if the node must be serialized */ get doNotSerialize() { return this._nodeDataStorage._doNotSerialize ? !0 : this._parentNode ? this._parentNode.doNotSerialize : !1; } set doNotSerialize(e) { this._nodeDataStorage._doNotSerialize = e; } /** * Gets a boolean indicating if the node has been disposed * @returns true if the node was disposed */ isDisposed() { return this._nodeDataStorage._isDisposed; } /** * Gets or sets the parent of the node (without keeping the current position in the scene) * @see https://doc.babylonjs.com/features/featuresDeepDive/mesh/transforms/parent_pivot/parent */ set parent(e) { if (this._parentNode === e) return; const t = this._parentNode; if (this._parentNode && this._parentNode._children !== void 0 && this._parentNode._children !== null) { const i = this._parentNode._children.indexOf(this); i !== -1 && this._parentNode._children.splice(i, 1), !e && !this._nodeDataStorage._isDisposed && this._addToSceneRootNodes(); } this._parentNode = e, this._parentNode && ((this._parentNode._children === void 0 || this._parentNode._children === null) && (this._parentNode._children = new Array()), this._parentNode._children.push(this), t || this._removeFromSceneRootNodes()), this._syncParentEnabledState(); } get parent() { return this._parentNode; } /** * @internal */ _serializeAsParent(e) { e.parentId = this.uniqueId; } /** @internal */ _addToSceneRootNodes() { this._nodeDataStorage._sceneRootNodesIndex === -1 && (this._nodeDataStorage._sceneRootNodesIndex = this._scene.rootNodes.length, this._scene.rootNodes.push(this)); } /** @internal */ _removeFromSceneRootNodes() { if (this._nodeDataStorage._sceneRootNodesIndex !== -1) { const e = this._scene.rootNodes, t = e.length - 1; e[this._nodeDataStorage._sceneRootNodesIndex] = e[t], e[this._nodeDataStorage._sceneRootNodesIndex]._nodeDataStorage._sceneRootNodesIndex = this._nodeDataStorage._sceneRootNodesIndex, this._scene.rootNodes.pop(), this._nodeDataStorage._sceneRootNodesIndex = -1; } } /** * Gets or sets the animation properties override */ get animationPropertiesOverride() { return this._animationPropertiesOverride ? this._animationPropertiesOverride : this._scene.animationPropertiesOverride; } set animationPropertiesOverride(e) { this._animationPropertiesOverride = e; } /** * Gets a string identifying the name of the class * @returns "Node" string */ getClassName() { return "Node"; } /** * Sets a callback that will be raised when the node will be disposed */ set onDispose(e) { this._onDisposeObserver && this.onDisposeObservable.remove(this._onDisposeObserver), this._onDisposeObserver = this.onDisposeObservable.add(e); } /** * An event triggered when the enabled state of the node changes */ get onEnabledStateChangedObservable() { return this._nodeDataStorage._onEnabledStateChangedObservable; } /** * An event triggered when the node is cloned */ get onClonedObservable() { return this._nodeDataStorage._onClonedObservable; } /** * Creates a new Node * @param name the name and id to be given to this node * @param scene the scene this node will be added to */ constructor(e, t = null) { this._isDirty = !1, this._nodeDataStorage = new Fle(), this.state = "", this.metadata = null, this.reservedDataStore = null, this._accessibilityTag = null, this.onAccessibilityTagChangedObservable = new Fe(), this._parentContainer = null, this.animations = [], this._ranges = {}, this.onReady = null, this._currentRenderId = -1, this._parentUpdateId = -1, this._childUpdateId = -1, this._waitingParentId = null, this._waitingParentInstanceIndex = null, this._waitingParsedUniqueId = null, this._cache = {}, this._parentNode = null, this._children = null, this._worldMatrix = Ae.Identity(), this._worldMatrixDeterminant = 0, this._worldMatrixDeterminantIsDirty = !0, this._animationPropertiesOverride = null, this._isNode = !0, this.onDisposeObservable = new Fe(), this._onDisposeObserver = null, this._behaviors = new Array(), this.name = e, this.id = e, this._scene = t || gi.LastCreatedScene, this.uniqueId = this._scene.getUniqueId(), this._initCache(); } /** * Gets the scene of the node * @returns a scene */ getScene() { return this._scene; } /** * Gets the engine of the node * @returns a Engine */ getEngine() { return this._scene.getEngine(); } /** * Attach a behavior to the node * @see https://doc.babylonjs.com/features/featuresDeepDive/behaviors * @param behavior defines the behavior to attach * @param attachImmediately defines that the behavior must be attached even if the scene is still loading * @returns the current Node */ addBehavior(e, t = !1) { return this._behaviors.indexOf(e) !== -1 ? this : (e.init(), this._scene.isLoading && !t ? this._scene.onDataLoadedObservable.addOnce(() => { e.attach(this); }) : e.attach(this), this._behaviors.push(e), this); } /** * Remove an attached behavior * @see https://doc.babylonjs.com/features/featuresDeepDive/behaviors * @param behavior defines the behavior to attach * @returns the current Node */ removeBehavior(e) { const t = this._behaviors.indexOf(e); return t === -1 ? this : (this._behaviors[t].detach(), this._behaviors.splice(t, 1), this); } /** * Gets the list of attached behaviors * @see https://doc.babylonjs.com/features/featuresDeepDive/behaviors */ get behaviors() { return this._behaviors; } /** * Gets an attached behavior by name * @param name defines the name of the behavior to look for * @see https://doc.babylonjs.com/features/featuresDeepDive/behaviors * @returns null if behavior was not found else the requested behavior */ getBehaviorByName(e) { for (const t of this._behaviors) if (t.name === e) return t; return null; } /** * Returns the latest update of the World matrix * @returns a Matrix */ getWorldMatrix() { return this._currentRenderId !== this._scene.getRenderId() && this.computeWorldMatrix(), this._worldMatrix; } /** @internal */ _getWorldMatrixDeterminant() { return this._worldMatrixDeterminantIsDirty && (this._worldMatrixDeterminantIsDirty = !1, this._worldMatrixDeterminant = this._worldMatrix.determinant()), this._worldMatrixDeterminant; } /** * Returns directly the latest state of the mesh World matrix. * A Matrix is returned. */ get worldMatrixFromCache() { return this._worldMatrix; } // override it in derived class if you add new variables to the cache // and call the parent class method /** @internal */ _initCache() { this._cache = {}, this._cache.parent = void 0; } /** * @internal */ updateCache(e) { !e && this.isSynchronized() || (this._cache.parent = this.parent, this._updateCache()); } /** * @internal */ _getActionManagerForTrigger(e, t = !0) { return this.parent ? this.parent._getActionManagerForTrigger(e, !1) : null; } // override it in derived class if you add new variables to the cache // and call the parent class method if !ignoreParentClass /** * @internal */ _updateCache(e) { } // override it in derived class if you add new variables to the cache /** @internal */ _isSynchronized() { return !0; } /** @internal */ _markSyncedWithParent() { this._parentNode && (this._parentUpdateId = this._parentNode._childUpdateId); } /** @internal */ isSynchronizedWithParent() { return this._parentNode ? this._parentNode._isDirty || this._parentUpdateId !== this._parentNode._childUpdateId ? !1 : this._parentNode.isSynchronized() : !0; } /** @internal */ isSynchronized() { return this._cache.parent !== this._parentNode ? (this._cache.parent = this._parentNode, !1) : this._parentNode && !this.isSynchronizedWithParent() ? !1 : this._isSynchronized(); } /** * Is this node ready to be used/rendered * @param _completeCheck defines if a complete check (including materials and lights) has to be done (false by default) * @returns true if the node is ready */ isReady(e = !1) { return this._nodeDataStorage._isReady; } /** * Flag the node as dirty (Forcing it to update everything) * @param _property helps children apply precise "dirtyfication" * @returns this node */ markAsDirty(e) { return this._currentRenderId = Number.MAX_VALUE, this._isDirty = !0, this; } /** * Is this node enabled? * If the node has a parent, all ancestors will be checked and false will be returned if any are false (not enabled), otherwise will return true * @param checkAncestors indicates if this method should check the ancestors. The default is to check the ancestors. If set to false, the method will return the value of this node without checking ancestors * @returns whether this node (and its parent) is enabled */ isEnabled(e = !0) { return e === !1 ? this._nodeDataStorage._isEnabled : this._nodeDataStorage._isEnabled ? this._nodeDataStorage._isParentEnabled : !1; } /** @internal */ _syncParentEnabledState() { this._nodeDataStorage._isParentEnabled = this._parentNode ? this._parentNode.isEnabled() : !0, this._children && this._children.forEach((e) => { e._syncParentEnabledState(); }); } /** * Set the enabled state of this node * @param value defines the new enabled state */ setEnabled(e) { this._nodeDataStorage._isEnabled !== e && (this._nodeDataStorage._isEnabled = e, this._syncParentEnabledState(), this._nodeDataStorage._onEnabledStateChangedObservable.notifyObservers(e)); } /** * Is this node a descendant of the given node? * The function will iterate up the hierarchy until the ancestor was found or no more parents defined * @param ancestor defines the parent node to inspect * @returns a boolean indicating if this node is a descendant of the given node */ isDescendantOf(e) { return this.parent ? this.parent === e ? !0 : this.parent.isDescendantOf(e) : !1; } /** * @internal */ _getDescendants(e, t = !1, i) { if (this._children) for (let r = 0; r < this._children.length; r++) { const s = this._children[r]; (!i || i(s)) && e.push(s), t || s._getDescendants(e, !1, i); } } /** * Will return all nodes that have this node as ascendant * @param directDescendantsOnly defines if true only direct descendants of 'this' will be considered, if false direct and also indirect (children of children, an so on in a recursive manner) descendants of 'this' will be considered * @param predicate defines an optional predicate that will be called on every evaluated child, the predicate must return true for a given child to be part of the result, otherwise it will be ignored * @returns all children nodes of all types */ getDescendants(e, t) { const i = []; return this._getDescendants(i, e, t), i; } /** * Get all child-meshes of this node * @param directDescendantsOnly defines if true only direct descendants of 'this' will be considered, if false direct and also indirect (children of children, an so on in a recursive manner) descendants of 'this' will be considered (Default: false) * @param predicate defines an optional predicate that will be called on every evaluated child, the predicate must return true for a given child to be part of the result, otherwise it will be ignored * @returns an array of AbstractMesh */ getChildMeshes(e, t) { const i = []; return this._getDescendants(i, e, (r) => (!t || t(r)) && r.cullingStrategy !== void 0), i; } /** * Get all direct children of this node * @param predicate defines an optional predicate that will be called on every evaluated child, the predicate must return true for a given child to be part of the result, otherwise it will be ignored * @param directDescendantsOnly defines if true only direct descendants of 'this' will be considered, if false direct and also indirect (children of children, an so on in a recursive manner) descendants of 'this' will be considered (Default: true) * @returns an array of Node */ getChildren(e, t = !0) { return this.getDescendants(t, e); } /** * @internal */ _setReady(e) { if (e !== this._nodeDataStorage._isReady) { if (!e) { this._nodeDataStorage._isReady = !1; return; } this.onReady && this.onReady(this), this._nodeDataStorage._isReady = !0; } } /** * Get an animation by name * @param name defines the name of the animation to look for * @returns null if not found else the requested animation */ getAnimationByName(e) { for (let t = 0; t < this.animations.length; t++) { const i = this.animations[t]; if (i.name === e) return i; } return null; } /** * Creates an animation range for this node * @param name defines the name of the range * @param from defines the starting key * @param to defines the end key */ createAnimationRange(e, t, i) { if (!this._ranges[e]) { this._ranges[e] = oH._AnimationRangeFactory(e, t, i); for (let r = 0, s = this.animations.length; r < s; r++) this.animations[r] && this.animations[r].createRange(e, t, i); } } /** * Delete a specific animation range * @param name defines the name of the range to delete * @param deleteFrames defines if animation frames from the range must be deleted as well */ deleteAnimationRange(e, t = !0) { for (let i = 0, r = this.animations.length; i < r; i++) this.animations[i] && this.animations[i].deleteRange(e, t); this._ranges[e] = null; } /** * Get an animation range by name * @param name defines the name of the animation range to look for * @returns null if not found else the requested animation range */ getAnimationRange(e) { return this._ranges[e] || null; } /** * Clone the current node * @param name Name of the new clone * @param newParent New parent for the clone * @param doNotCloneChildren Do not clone children hierarchy * @returns the new transform node */ clone(e, t, i) { const r = St.Clone(() => new oH(e, this.getScene()), this); if (t && (r.parent = t), !i) { const s = this.getDescendants(!0); for (let n = 0; n < s.length; n++) { const a = s[n]; a.clone(e + "." + a.name, r); } } return r; } /** * Gets the list of all animation ranges defined on this node * @returns an array */ getAnimationRanges() { const e = []; let t; for (t in this._ranges) e.push(this._ranges[t]); return e; } /** * Will start the animation sequence * @param name defines the range frames for animation sequence * @param loop defines if the animation should loop (false by default) * @param speedRatio defines the speed factor in which to run the animation (1 by default) * @param onAnimationEnd defines a function to be executed when the animation ended (undefined by default) * @returns the object created for this animation. If range does not exist, it will return null */ beginAnimation(e, t, i, r) { const s = this.getAnimationRange(e); return s ? this._scene.beginAnimation(this, s.from, s.to, t, i, r) : null; } /** * Serialize animation ranges into a JSON compatible object * @returns serialization object */ serializeAnimationRanges() { const e = []; for (const t in this._ranges) { const i = this._ranges[t]; if (!i) continue; const r = {}; r.name = t, r.from = i.from, r.to = i.to, e.push(r); } return e; } /** * Computes the world matrix of the node * @param _force defines if the cache version should be invalidated forcing the world matrix to be created from scratch * @returns the world matrix */ computeWorldMatrix(e) { return this._worldMatrix || (this._worldMatrix = Ae.Identity()), this._worldMatrix; } /** * Releases resources associated with this node. * @param doNotRecurse Set to true to not recurse into each children (recurse into each children by default) * @param disposeMaterialAndTextures Set to true to also dispose referenced materials and textures (false by default) */ dispose(e, t = !1) { if (this._nodeDataStorage._isDisposed = !0, !e) { const i = this.getDescendants(!0); for (const r of i) r.dispose(e, t); } this.parent ? this.parent = null : this._removeFromSceneRootNodes(), this.onDisposeObservable.notifyObservers(this), this.onDisposeObservable.clear(), this.onEnabledStateChangedObservable.clear(), this.onClonedObservable.clear(); for (const i of this._behaviors) i.detach(); this._behaviors.length = 0, this.metadata = null; } /** * Parse animation range data from a serialization object and store them into a given node * @param node defines where to store the animation ranges * @param parsedNode defines the serialization object to read data from * @param _scene defines the hosting scene */ static ParseAnimationRanges(e, t, i) { if (t.ranges) for (let r = 0; r < t.ranges.length; r++) { const s = t.ranges[r]; e.createAnimationRange(s.name, s.from, s.to); } } /** * Return the minimum and maximum world vectors of the entire hierarchy under current node * @param includeDescendants Include bounding info from descendants as well (true by default) * @param predicate defines a callback function that can be customize to filter what meshes should be included in the list used to compute the bounding vectors * @returns the new bounding vectors */ getHierarchyBoundingVectors(e = !0, t = null) { this.getScene().incrementRenderId(), this.computeWorldMatrix(!0); let i, r; const s = this; if (s.getBoundingInfo && s.subMeshes) { const n = s.getBoundingInfo(); i = n.boundingBox.minimumWorld.clone(), r = n.boundingBox.maximumWorld.clone(); } else i = new D(Number.MAX_VALUE, Number.MAX_VALUE, Number.MAX_VALUE), r = new D(-Number.MAX_VALUE, -Number.MAX_VALUE, -Number.MAX_VALUE); if (e) { const n = this.getDescendants(!1); for (const a of n) { const l = a; if (l.computeWorldMatrix(!0), t && !t(l) || !l.getBoundingInfo || l.getTotalVertices() === 0) continue; const u = l.getBoundingInfo().boundingBox, h = u.minimumWorld, d = u.maximumWorld; D.CheckExtends(h, i, r), D.CheckExtends(d, i, r); } } return { min: i, max: r }; } }; In._AnimationRangeFactory = (c, e, t) => { throw yr("AnimationRange"); }; In._NodeConstructors = {}; F([ W() ], In.prototype, "name", void 0); F([ W() ], In.prototype, "id", void 0); F([ W() ], In.prototype, "uniqueId", void 0); F([ W() ], In.prototype, "state", void 0); F([ W() ], In.prototype, "metadata", void 0); class kf { /** * Creates a Size object from the given width and height (floats). * @param width width of the new size * @param height height of the new size */ constructor(e, t) { this.width = e, this.height = t; } /** * Returns a string with the Size width and height * @returns a string with the Size width and height */ toString() { return `{W: ${this.width}, H: ${this.height}}`; } /** * "Size" * @returns the string "Size" */ getClassName() { return "Size"; } /** * Returns the Size hash code. * @returns a hash code for a unique width and height */ getHashCode() { let e = this.width | 0; return e = e * 397 ^ (this.height | 0), e; } /** * Updates the current size from the given one. * @param src the given size */ copyFrom(e) { this.width = e.width, this.height = e.height; } /** * Updates in place the current Size from the given floats. * @param width width of the new size * @param height height of the new size * @returns the updated Size. */ copyFromFloats(e, t) { return this.width = e, this.height = t, this; } /** * Updates in place the current Size from the given floats. * @param width width to set * @param height height to set * @returns the updated Size. */ set(e, t) { return this.copyFromFloats(e, t); } /** * Multiplies the width and height by numbers * @param w factor to multiple the width by * @param h factor to multiple the height by * @returns a new Size set with the multiplication result of the current Size and the given floats. */ multiplyByFloats(e, t) { return new kf(this.width * e, this.height * t); } /** * Clones the size * @returns a new Size copied from the given one. */ clone() { return new kf(this.width, this.height); } /** * True if the current Size and the given one width and height are strictly equal. * @param other the other size to compare against * @returns True if the current Size and the given one width and height are strictly equal. */ equals(e) { return e ? this.width === e.width && this.height === e.height : !1; } /** * The surface of the Size : width * height (float). */ get surface() { return this.width * this.height; } /** * Create a new size of zero * @returns a new Size set to (0.0, 0.0) */ static Zero() { return new kf(0, 0); } /** * Sums the width and height of two sizes * @param otherSize size to add to this size * @returns a new Size set as the addition result of the current Size and the given one. */ add(e) { return new kf(this.width + e.width, this.height + e.height); } /** * Subtracts the width and height of two * @param otherSize size to subtract to this size * @returns a new Size set as the subtraction result of the given one from the current Size. */ subtract(e) { return new kf(this.width - e.width, this.height - e.height); } /** * Scales the width and height * @param scale the scale to multiply the width and height by * @returns a new Size set with the multiplication result of the current Size and the given floats. */ scale(e) { return new kf(this.width * e, this.height * e); } /** * Creates a new Size set at the linear interpolation "amount" between "start" and "end" * @param start starting size to lerp between * @param end end size to lerp between * @param amount amount to lerp between the start and end values * @returns a new Size set at the linear interpolation "amount" between "start" and "end" */ static Lerp(e, t, i) { const r = e.width + (t.width - e.width) * i, s = e.height + (t.height - e.height) * i; return new kf(r, s); } } function Ble() { return typeof _native < "u" && _native.XMLHttpRequest ? new _native.XMLHttpRequest() : new XMLHttpRequest(); } class go { constructor() { this._xhr = Ble(), this._requestURL = ""; } /** * This function can be called to check if there are request modifiers for network requests * @returns true if there are any custom requests available */ static get IsCustomRequestAvailable() { return Object.keys(go.CustomRequestHeaders).length > 0 || go.CustomRequestModifiers.length > 0; } _injectCustomRequestHeaders() { if (!this._shouldSkipRequestModifications(this._requestURL)) for (const e in go.CustomRequestHeaders) { const t = go.CustomRequestHeaders[e]; t && this._xhr.setRequestHeader(e, t); } } _shouldSkipRequestModifications(e) { return go.SkipRequestModificationForBabylonCDN && (e.includes("preview.babylonjs.com") || e.includes("cdn.babylonjs.com")); } /** * Gets or sets a function to be called when loading progress changes */ get onprogress() { return this._xhr.onprogress; } set onprogress(e) { this._xhr.onprogress = e; } /** * Returns client's state */ get readyState() { return this._xhr.readyState; } /** * Returns client's status */ get status() { return this._xhr.status; } /** * Returns client's status as a text */ get statusText() { return this._xhr.statusText; } /** * Returns client's response */ get response() { return this._xhr.response; } /** * Returns client's response url */ get responseURL() { return this._xhr.responseURL; } /** * Returns client's response as text */ get responseText() { return this._xhr.responseText; } /** * Gets or sets the expected response type */ get responseType() { return this._xhr.responseType; } set responseType(e) { this._xhr.responseType = e; } /** * Gets or sets the timeout value in milliseconds */ get timeout() { return this._xhr.timeout; } set timeout(e) { this._xhr.timeout = e; } addEventListener(e, t, i) { this._xhr.addEventListener(e, t, i); } removeEventListener(e, t, i) { this._xhr.removeEventListener(e, t, i); } /** * Cancels any network activity */ abort() { this._xhr.abort(); } /** * Initiates the request. The optional argument provides the request body. The argument is ignored if request method is GET or HEAD * @param body defines an optional request body */ send(e) { go.CustomRequestHeaders && this._injectCustomRequestHeaders(), this._xhr.send(e); } /** * Sets the request method, request URL * @param method defines the method to use (GET, POST, etc..) * @param url defines the url to connect with */ open(e, t) { for (const i of go.CustomRequestModifiers) { if (this._shouldSkipRequestModifications(t)) return; i(this._xhr, t); } return t = t.replace("file:http:", "http:"), t = t.replace("file:https:", "https:"), this._requestURL = t, this._xhr.open(e, t, !0); } /** * Sets the value of a request header. * @param name The name of the header whose value is to be set * @param value The value to set as the body of the header */ setRequestHeader(e, t) { this._xhr.setRequestHeader(e, t); } /** * Get the string containing the text of a particular header's value. * @param name The name of the header * @returns The string containing the text of the given header name */ getResponseHeader(e) { return this._xhr.getResponseHeader(e); } } go.CustomRequestHeaders = {}; go.CustomRequestModifiers = new Array(); go.SkipRequestModificationForBabylonCDN = !0; const ZG = Object.freeze(new Ze(0, 0, 0, 0)), qG = Object.freeze(D.Zero()), JG = Object.freeze(at.Zero()), eK = Object.freeze(kf.Zero()), tK = Object.freeze(ze.Black()), iK = Object.freeze(new Et(0, 0, 0, 0)), FA = { key: 0, repeatCount: 0, loopMode: 2 }; class nt { /** * @internal Internal use */ static _PrepareAnimation(e, t, i, r, s, n, a, l) { let o; if (!isNaN(parseFloat(s)) && isFinite(s) ? o = nt.ANIMATIONTYPE_FLOAT : s instanceof Ze ? o = nt.ANIMATIONTYPE_QUATERNION : s instanceof D ? o = nt.ANIMATIONTYPE_VECTOR3 : s instanceof at ? o = nt.ANIMATIONTYPE_VECTOR2 : s instanceof ze ? o = nt.ANIMATIONTYPE_COLOR3 : s instanceof Et ? o = nt.ANIMATIONTYPE_COLOR4 : s instanceof kf && (o = nt.ANIMATIONTYPE_SIZE), o == null) return null; const u = new nt(e, t, i, o, a), h = [ { frame: 0, value: s }, { frame: r, value: n } ]; return u.setKeys(h), l !== void 0 && u.setEasingFunction(l), u; } /** * Sets up an animation * @param property The property to animate * @param animationType The animation type to apply * @param framePerSecond The frames per second of the animation * @param easingFunction The easing function used in the animation * @returns The created animation */ static CreateAnimation(e, t, i, r) { const s = new nt(e + "Animation", e, i, t, nt.ANIMATIONLOOPMODE_CONSTANT); return s.setEasingFunction(r), s; } /** * Create and start an animation on a node * @param name defines the name of the global animation that will be run on all nodes * @param target defines the target where the animation will take place * @param targetProperty defines property to animate * @param framePerSecond defines the number of frame per second yo use * @param totalFrame defines the number of frames in total * @param from defines the initial value * @param to defines the final value * @param loopMode defines which loop mode you want to use (off by default) * @param easingFunction defines the easing function to use (linear by default) * @param onAnimationEnd defines the callback to call when animation end * @param scene defines the hosting scene * @returns the animatable created for this animation */ static CreateAndStartAnimation(e, t, i, r, s, n, a, l, o, u, h) { const d = nt._PrepareAnimation(e, i, r, s, n, a, l, o); return !d || (t.getScene && (h = t.getScene()), !h) ? null : h.beginDirectAnimation(t, [d], 0, s, d.loopMode === 1, 1, u); } /** * Create and start an animation on a node and its descendants * @param name defines the name of the global animation that will be run on all nodes * @param node defines the root node where the animation will take place * @param directDescendantsOnly if true only direct descendants will be used, if false direct and also indirect (children of children, an so on in a recursive manner) descendants will be used * @param targetProperty defines property to animate * @param framePerSecond defines the number of frame per second to use * @param totalFrame defines the number of frames in total * @param from defines the initial value * @param to defines the final value * @param loopMode defines which loop mode you want to use (off by default) * @param easingFunction defines the easing function to use (linear by default) * @param onAnimationEnd defines the callback to call when an animation ends (will be called once per node) * @returns the list of animatables created for all nodes * @example https://www.babylonjs-playground.com/#MH0VLI */ static CreateAndStartHierarchyAnimation(e, t, i, r, s, n, a, l, o, u, h) { const d = nt._PrepareAnimation(e, r, s, n, a, l, o, u); return d ? t.getScene().beginDirectHierarchyAnimation(t, i, [d], 0, n, d.loopMode === 1, 1, h) : null; } /** * Creates a new animation, merges it with the existing animations and starts it * @param name Name of the animation * @param node Node which contains the scene that begins the animations * @param targetProperty Specifies which property to animate * @param framePerSecond The frames per second of the animation * @param totalFrame The total number of frames * @param from The frame at the beginning of the animation * @param to The frame at the end of the animation * @param loopMode Specifies the loop mode of the animation * @param easingFunction (Optional) The easing function of the animation, which allow custom mathematical formulas for animations * @param onAnimationEnd Callback to run once the animation is complete * @returns Nullable animation */ static CreateMergeAndStartAnimation(e, t, i, r, s, n, a, l, o, u) { const h = nt._PrepareAnimation(e, i, r, s, n, a, l, o); return h ? (t.animations.push(h), t.getScene().beginAnimation(t, 0, s, h.loopMode === 1, 1, u)) : null; } /** @internal */ static MakeAnimationAdditive(e, t, i, r = !1, s) { var n, a; let l; typeof t == "object" ? l = t : l = { referenceFrame: t ?? 0, range: i, cloneOriginalAnimation: r, clonedAnimationName: s }; let o = e; if (l.cloneOriginalAnimation && (o = e.clone(), o.name = l.clonedAnimationName || o.name), !o._keys.length) return o; const u = l.referenceFrame && l.referenceFrame >= 0 ? l.referenceFrame : 0; let h = 0; const d = o._keys[0]; let f = o._keys.length - 1; const p = o._keys[f], m = { referenceValue: d.value, referencePosition: de.Vector3[0], referenceQuaternion: de.Quaternion[0], referenceScaling: de.Vector3[1], keyPosition: de.Vector3[2], keyQuaternion: de.Quaternion[1], keyScaling: de.Vector3[3] }; let _ = d.frame, v = p.frame; if (l.range) { const b = o.getRange(l.range); b && (_ = b.from, v = b.to); } else _ = (n = l.fromFrame) !== null && n !== void 0 ? n : _, v = (a = l.toFrame) !== null && a !== void 0 ? a : v; if (_ !== d.frame && (h = o.createKeyForFrame(_)), v !== p.frame && (f = o.createKeyForFrame(v)), o._keys.length === 1) { const b = o._getKeyValue(o._keys[0]); m.referenceValue = b.clone ? b.clone() : b; } else if (u <= d.frame) { const b = o._getKeyValue(d.value); m.referenceValue = b.clone ? b.clone() : b; } else if (u >= p.frame) { const b = o._getKeyValue(p.value); m.referenceValue = b.clone ? b.clone() : b; } else { FA.key = 0; const b = o._interpolate(u, FA); m.referenceValue = b.clone ? b.clone() : b; } o.dataType === nt.ANIMATIONTYPE_QUATERNION ? m.referenceValue.normalize().conjugateInPlace() : o.dataType === nt.ANIMATIONTYPE_MATRIX && (m.referenceValue.decompose(m.referenceScaling, m.referenceQuaternion, m.referencePosition), m.referenceQuaternion.normalize().conjugateInPlace()); let C = Number.MAX_VALUE; const x = l.clipKeys ? [] : null; for (let b = h; b <= f; b++) { let S = o._keys[b]; if (x && (S = { frame: S.frame, value: S.value.clone ? S.value.clone() : S.value, inTangent: S.inTangent, outTangent: S.outTangent, interpolation: S.interpolation, lockedTangent: S.lockedTangent }, C === Number.MAX_VALUE && (C = S.frame), S.frame -= C, x.push(S)), !(b && o.dataType !== nt.ANIMATIONTYPE_FLOAT && S.value === d.value)) switch (o.dataType) { case nt.ANIMATIONTYPE_MATRIX: S.value.decompose(m.keyScaling, m.keyQuaternion, m.keyPosition), m.keyPosition.subtractInPlace(m.referencePosition), m.keyScaling.divideInPlace(m.referenceScaling), m.referenceQuaternion.multiplyToRef(m.keyQuaternion, m.keyQuaternion), Ae.ComposeToRef(m.keyScaling, m.keyQuaternion, m.keyPosition, S.value); break; case nt.ANIMATIONTYPE_QUATERNION: m.referenceValue.multiplyToRef(S.value, S.value); break; case nt.ANIMATIONTYPE_VECTOR2: case nt.ANIMATIONTYPE_VECTOR3: case nt.ANIMATIONTYPE_COLOR3: case nt.ANIMATIONTYPE_COLOR4: S.value.subtractToRef(m.referenceValue, S.value); break; case nt.ANIMATIONTYPE_SIZE: S.value.width -= m.referenceValue.width, S.value.height -= m.referenceValue.height; break; default: S.value -= m.referenceValue; } } return x && o.setKeys(x, !0), o; } /** * Transition property of an host to the target Value * @param property The property to transition * @param targetValue The target Value of the property * @param host The object where the property to animate belongs * @param scene Scene used to run the animation * @param frameRate Framerate (in frame/s) to use * @param transition The transition type we want to use * @param duration The duration of the animation, in milliseconds * @param onAnimationEnd Callback trigger at the end of the animation * @returns Nullable animation */ static TransitionTo(e, t, i, r, s, n, a, l = null) { if (a <= 0) return i[e] = t, l && l(), null; const o = s * (a / 1e3); n.setKeys([ { frame: 0, value: i[e].clone ? i[e].clone() : i[e] }, { frame: o, value: t } ]), i.animations || (i.animations = []), i.animations.push(n); const u = r.beginAnimation(i, 0, o, !1); return u.onAnimationEnd = l, u; } /** * Return the array of runtime animations currently using this animation */ get runtimeAnimations() { return this._runtimeAnimations; } /** * Specifies if any of the runtime animations are currently running */ get hasRunningRuntimeAnimations() { for (const e of this._runtimeAnimations) if (!e.isStopped()) return !0; return !1; } /** * Initializes the animation * @param name Name of the animation * @param targetProperty Property to animate * @param framePerSecond The frames per second of the animation * @param dataType The data type of the animation * @param loopMode The loop mode of the animation * @param enableBlending Specifies if blending should be enabled */ constructor(e, t, i, r, s, n) { this.name = e, this.targetProperty = t, this.framePerSecond = i, this.dataType = r, this.loopMode = s, this.enableBlending = n, this._easingFunction = null, this._runtimeAnimations = new Array(), this._events = new Array(), this.blendingSpeed = 0.01, this._ranges = {}, this.targetPropertyPath = t.split("."), this.dataType = r, this.loopMode = s === void 0 ? nt.ANIMATIONLOOPMODE_CYCLE : s, this.uniqueId = nt._UniqueIdGenerator++; } // Methods /** * Converts the animation to a string * @param fullDetails support for multiple levels of logging within scene loading * @returns String form of the animation */ toString(e) { let t = "Name: " + this.name + ", property: " + this.targetProperty; if (t += ", datatype: " + ["Float", "Vector3", "Quaternion", "Matrix", "Color3", "Vector2"][this.dataType], t += ", nKeys: " + (this._keys ? this._keys.length : "none"), t += ", nRanges: " + (this._ranges ? Object.keys(this._ranges).length : "none"), e) { t += ", Ranges: {"; let i = !0; for (const r in this._ranges) i && (t += ", ", i = !1), t += r; t += "}"; } return t; } /** * Add an event to this animation * @param event Event to add */ addEvent(e) { this._events.push(e), this._events.sort((t, i) => t.frame - i.frame); } /** * Remove all events found at the given frame * @param frame The frame to remove events from */ removeEvents(e) { for (let t = 0; t < this._events.length; t++) this._events[t].frame === e && (this._events.splice(t, 1), t--); } /** * Retrieves all the events from the animation * @returns Events from the animation */ getEvents() { return this._events; } /** * Creates an animation range * @param name Name of the animation range * @param from Starting frame of the animation range * @param to Ending frame of the animation */ createRange(e, t, i) { this._ranges[e] || (this._ranges[e] = new hP(e, t, i)); } /** * Deletes an animation range by name * @param name Name of the animation range to delete * @param deleteFrames Specifies if the key frames for the range should also be deleted (true) or not (false) */ deleteRange(e, t = !0) { const i = this._ranges[e]; if (i) { if (t) { const r = i.from, s = i.to; for (let n = this._keys.length - 1; n >= 0; n--) this._keys[n].frame >= r && this._keys[n].frame <= s && this._keys.splice(n, 1); } this._ranges[e] = null; } } /** * Gets the animation range by name, or null if not defined * @param name Name of the animation range * @returns Nullable animation range */ getRange(e) { return this._ranges[e]; } /** * Gets the key frames from the animation * @returns The key frames of the animation */ getKeys() { return this._keys; } /** * Gets the highest frame rate of the animation * @returns Highest frame rate of the animation */ getHighestFrame() { let e = 0; for (let t = 0, i = this._keys.length; t < i; t++) e < this._keys[t].frame && (e = this._keys[t].frame); return e; } /** * Gets the easing function of the animation * @returns Easing function of the animation */ getEasingFunction() { return this._easingFunction; } /** * Sets the easing function of the animation * @param easingFunction A custom mathematical formula for animation */ setEasingFunction(e) { this._easingFunction = e; } /** * Interpolates a scalar linearly * @param startValue Start value of the animation curve * @param endValue End value of the animation curve * @param gradient Scalar amount to interpolate * @returns Interpolated scalar value */ floatInterpolateFunction(e, t, i) { return yt.Lerp(e, t, i); } /** * Interpolates a scalar cubically * @param startValue Start value of the animation curve * @param outTangent End tangent of the animation * @param endValue End value of the animation curve * @param inTangent Start tangent of the animation curve * @param gradient Scalar amount to interpolate * @returns Interpolated scalar value */ floatInterpolateFunctionWithTangents(e, t, i, r, s) { return yt.Hermite(e, t, i, r, s); } /** * Interpolates a quaternion using a spherical linear interpolation * @param startValue Start value of the animation curve * @param endValue End value of the animation curve * @param gradient Scalar amount to interpolate * @returns Interpolated quaternion value */ quaternionInterpolateFunction(e, t, i) { return Ze.Slerp(e, t, i); } /** * Interpolates a quaternion cubically * @param startValue Start value of the animation curve * @param outTangent End tangent of the animation curve * @param endValue End value of the animation curve * @param inTangent Start tangent of the animation curve * @param gradient Scalar amount to interpolate * @returns Interpolated quaternion value */ quaternionInterpolateFunctionWithTangents(e, t, i, r, s) { return Ze.Hermite(e, t, i, r, s).normalize(); } /** * Interpolates a Vector3 linearly * @param startValue Start value of the animation curve * @param endValue End value of the animation curve * @param gradient Scalar amount to interpolate (value between 0 and 1) * @returns Interpolated scalar value */ vector3InterpolateFunction(e, t, i) { return D.Lerp(e, t, i); } /** * Interpolates a Vector3 cubically * @param startValue Start value of the animation curve * @param outTangent End tangent of the animation * @param endValue End value of the animation curve * @param inTangent Start tangent of the animation curve * @param gradient Scalar amount to interpolate (value between 0 and 1) * @returns InterpolatedVector3 value */ vector3InterpolateFunctionWithTangents(e, t, i, r, s) { return D.Hermite(e, t, i, r, s); } /** * Interpolates a Vector2 linearly * @param startValue Start value of the animation curve * @param endValue End value of the animation curve * @param gradient Scalar amount to interpolate (value between 0 and 1) * @returns Interpolated Vector2 value */ vector2InterpolateFunction(e, t, i) { return at.Lerp(e, t, i); } /** * Interpolates a Vector2 cubically * @param startValue Start value of the animation curve * @param outTangent End tangent of the animation * @param endValue End value of the animation curve * @param inTangent Start tangent of the animation curve * @param gradient Scalar amount to interpolate (value between 0 and 1) * @returns Interpolated Vector2 value */ vector2InterpolateFunctionWithTangents(e, t, i, r, s) { return at.Hermite(e, t, i, r, s); } /** * Interpolates a size linearly * @param startValue Start value of the animation curve * @param endValue End value of the animation curve * @param gradient Scalar amount to interpolate * @returns Interpolated Size value */ sizeInterpolateFunction(e, t, i) { return kf.Lerp(e, t, i); } /** * Interpolates a Color3 linearly * @param startValue Start value of the animation curve * @param endValue End value of the animation curve * @param gradient Scalar amount to interpolate * @returns Interpolated Color3 value */ color3InterpolateFunction(e, t, i) { return ze.Lerp(e, t, i); } /** * Interpolates a Color3 cubically * @param startValue Start value of the animation curve * @param outTangent End tangent of the animation * @param endValue End value of the animation curve * @param inTangent Start tangent of the animation curve * @param gradient Scalar amount to interpolate * @returns interpolated value */ color3InterpolateFunctionWithTangents(e, t, i, r, s) { return ze.Hermite(e, t, i, r, s); } /** * Interpolates a Color4 linearly * @param startValue Start value of the animation curve * @param endValue End value of the animation curve * @param gradient Scalar amount to interpolate * @returns Interpolated Color3 value */ color4InterpolateFunction(e, t, i) { return Et.Lerp(e, t, i); } /** * Interpolates a Color4 cubically * @param startValue Start value of the animation curve * @param outTangent End tangent of the animation * @param endValue End value of the animation curve * @param inTangent Start tangent of the animation curve * @param gradient Scalar amount to interpolate * @returns interpolated value */ color4InterpolateFunctionWithTangents(e, t, i, r, s) { return Et.Hermite(e, t, i, r, s); } /** * @internal Internal use only */ _getKeyValue(e) { return typeof e == "function" ? e() : e; } /** * Evaluate the animation value at a given frame * @param currentFrame defines the frame where we want to evaluate the animation * @returns the animation value */ evaluate(e) { return FA.key = 0, this._interpolate(e, FA); } /** * @internal Internal use only */ _interpolate(e, t, i = !1) { var r; if (t.loopMode === nt.ANIMATIONLOOPMODE_CONSTANT && t.repeatCount > 0) return t.highLimitValue.clone ? t.highLimitValue.clone() : t.highLimitValue; const s = this._keys, n = s.length; let a = t.key; for (; a >= 0 && e < s[a].frame; ) --a; for (; a + 1 <= n - 1 && e >= s[a + 1].frame; ) ++a; if (t.key = a, a < 0) return i ? void 0 : this._getKeyValue(s[0].value); if (a + 1 > n - 1) return i ? void 0 : this._getKeyValue(s[n - 1].value); const l = s[a], o = s[a + 1]; if (i && (e === l.frame || e === o.frame)) return; const u = this._getKeyValue(l.value), h = this._getKeyValue(o.value); if (l.interpolation === $9.STEP) return o.frame > e ? u : h; const d = l.outTangent !== void 0 && o.inTangent !== void 0, f = o.frame - l.frame; let p = (e - l.frame) / f; const m = l.easingFunction || this.getEasingFunction(); switch (m !== null && (p = m.ease(p)), this.dataType) { case nt.ANIMATIONTYPE_FLOAT: { const _ = d ? this.floatInterpolateFunctionWithTangents(u, l.outTangent * f, h, o.inTangent * f, p) : this.floatInterpolateFunction(u, h, p); switch (t.loopMode) { case nt.ANIMATIONLOOPMODE_CYCLE: case nt.ANIMATIONLOOPMODE_CONSTANT: case nt.ANIMATIONLOOPMODE_YOYO: return _; case nt.ANIMATIONLOOPMODE_RELATIVE: case nt.ANIMATIONLOOPMODE_RELATIVE_FROM_CURRENT: return ((r = t.offsetValue) !== null && r !== void 0 ? r : 0) * t.repeatCount + _; } break; } case nt.ANIMATIONTYPE_QUATERNION: { const _ = d ? this.quaternionInterpolateFunctionWithTangents(u, l.outTangent.scale(f), h, o.inTangent.scale(f), p) : this.quaternionInterpolateFunction(u, h, p); switch (t.loopMode) { case nt.ANIMATIONLOOPMODE_CYCLE: case nt.ANIMATIONLOOPMODE_CONSTANT: case nt.ANIMATIONLOOPMODE_YOYO: return _; case nt.ANIMATIONLOOPMODE_RELATIVE: case nt.ANIMATIONLOOPMODE_RELATIVE_FROM_CURRENT: return _.addInPlace((t.offsetValue || ZG).scale(t.repeatCount)); } return _; } case nt.ANIMATIONTYPE_VECTOR3: { const _ = d ? this.vector3InterpolateFunctionWithTangents(u, l.outTangent.scale(f), h, o.inTangent.scale(f), p) : this.vector3InterpolateFunction(u, h, p); switch (t.loopMode) { case nt.ANIMATIONLOOPMODE_CYCLE: case nt.ANIMATIONLOOPMODE_CONSTANT: case nt.ANIMATIONLOOPMODE_YOYO: return _; case nt.ANIMATIONLOOPMODE_RELATIVE: case nt.ANIMATIONLOOPMODE_RELATIVE_FROM_CURRENT: return _.add((t.offsetValue || qG).scale(t.repeatCount)); } break; } case nt.ANIMATIONTYPE_VECTOR2: { const _ = d ? this.vector2InterpolateFunctionWithTangents(u, l.outTangent.scale(f), h, o.inTangent.scale(f), p) : this.vector2InterpolateFunction(u, h, p); switch (t.loopMode) { case nt.ANIMATIONLOOPMODE_CYCLE: case nt.ANIMATIONLOOPMODE_CONSTANT: case nt.ANIMATIONLOOPMODE_YOYO: return _; case nt.ANIMATIONLOOPMODE_RELATIVE: case nt.ANIMATIONLOOPMODE_RELATIVE_FROM_CURRENT: return _.add((t.offsetValue || JG).scale(t.repeatCount)); } break; } case nt.ANIMATIONTYPE_SIZE: { switch (t.loopMode) { case nt.ANIMATIONLOOPMODE_CYCLE: case nt.ANIMATIONLOOPMODE_CONSTANT: case nt.ANIMATIONLOOPMODE_YOYO: return this.sizeInterpolateFunction(u, h, p); case nt.ANIMATIONLOOPMODE_RELATIVE: case nt.ANIMATIONLOOPMODE_RELATIVE_FROM_CURRENT: return this.sizeInterpolateFunction(u, h, p).add((t.offsetValue || eK).scale(t.repeatCount)); } break; } case nt.ANIMATIONTYPE_COLOR3: { const _ = d ? this.color3InterpolateFunctionWithTangents(u, l.outTangent.scale(f), h, o.inTangent.scale(f), p) : this.color3InterpolateFunction(u, h, p); switch (t.loopMode) { case nt.ANIMATIONLOOPMODE_CYCLE: case nt.ANIMATIONLOOPMODE_CONSTANT: case nt.ANIMATIONLOOPMODE_YOYO: return _; case nt.ANIMATIONLOOPMODE_RELATIVE: case nt.ANIMATIONLOOPMODE_RELATIVE_FROM_CURRENT: return _.add((t.offsetValue || tK).scale(t.repeatCount)); } break; } case nt.ANIMATIONTYPE_COLOR4: { const _ = d ? this.color4InterpolateFunctionWithTangents(u, l.outTangent.scale(f), h, o.inTangent.scale(f), p) : this.color4InterpolateFunction(u, h, p); switch (t.loopMode) { case nt.ANIMATIONLOOPMODE_CYCLE: case nt.ANIMATIONLOOPMODE_CONSTANT: case nt.ANIMATIONLOOPMODE_YOYO: return _; case nt.ANIMATIONLOOPMODE_RELATIVE: case nt.ANIMATIONLOOPMODE_RELATIVE_FROM_CURRENT: return _.add((t.offsetValue || iK).scale(t.repeatCount)); } break; } case nt.ANIMATIONTYPE_MATRIX: { switch (t.loopMode) { case nt.ANIMATIONLOOPMODE_CYCLE: case nt.ANIMATIONLOOPMODE_CONSTANT: case nt.ANIMATIONLOOPMODE_YOYO: return nt.AllowMatricesInterpolation ? this.matrixInterpolateFunction(u, h, p, t.workValue) : u; case nt.ANIMATIONLOOPMODE_RELATIVE: case nt.ANIMATIONLOOPMODE_RELATIVE_FROM_CURRENT: return u; } break; } } return 0; } /** * Defines the function to use to interpolate matrices * @param startValue defines the start matrix * @param endValue defines the end matrix * @param gradient defines the gradient between both matrices * @param result defines an optional target matrix where to store the interpolation * @returns the interpolated matrix */ matrixInterpolateFunction(e, t, i, r) { return nt.AllowMatrixDecomposeForInterpolation ? r ? (Ae.DecomposeLerpToRef(e, t, i, r), r) : Ae.DecomposeLerp(e, t, i) : r ? (Ae.LerpToRef(e, t, i, r), r) : Ae.Lerp(e, t, i); } /** * Makes a copy of the animation * @returns Cloned animation */ clone() { const e = new nt(this.name, this.targetPropertyPath.join("."), this.framePerSecond, this.dataType, this.loopMode); if (e.enableBlending = this.enableBlending, e.blendingSpeed = this.blendingSpeed, this._keys && e.setKeys(this._keys), this._ranges) { e._ranges = {}; for (const t in this._ranges) { const i = this._ranges[t]; i && (e._ranges[t] = i.clone()); } } return e; } /** * Sets the key frames of the animation * @param values The animation key frames to set * @param dontClone Whether to clone the keys or not (default is false, so the array of keys is cloned) */ setKeys(e, t = !1) { this._keys = t ? e : e.slice(0); } /** * Creates a key for the frame passed as a parameter and adds it to the animation IF a key doesn't already exist for that frame * @param frame Frame number * @returns The key index if the key was added or the index of the pre existing key if the frame passed as parameter already has a corresponding key */ createKeyForFrame(e) { FA.key = 0; const t = this._interpolate(e, FA, !0); if (!t) return this._keys[FA.key].frame === e ? FA.key : FA.key + 1; const i = { frame: e, value: t.clone ? t.clone() : t }; return this._keys.splice(FA.key + 1, 0, i), FA.key + 1; } /** * Serializes the animation to an object * @returns Serialized object */ serialize() { const e = {}; e.name = this.name, e.property = this.targetProperty, e.framePerSecond = this.framePerSecond, e.dataType = this.dataType, e.loopBehavior = this.loopMode, e.enableBlending = this.enableBlending, e.blendingSpeed = this.blendingSpeed; const t = this.dataType; e.keys = []; const i = this.getKeys(); for (let r = 0; r < i.length; r++) { const s = i[r], n = {}; switch (n.frame = s.frame, t) { case nt.ANIMATIONTYPE_FLOAT: n.values = [s.value], s.inTangent !== void 0 && n.values.push(s.inTangent), s.outTangent !== void 0 && (s.inTangent === void 0 && n.values.push(void 0), n.values.push(s.outTangent)), s.interpolation !== void 0 && (s.inTangent === void 0 && n.values.push(void 0), s.outTangent === void 0 && n.values.push(void 0), n.values.push(s.interpolation)); break; case nt.ANIMATIONTYPE_QUATERNION: case nt.ANIMATIONTYPE_MATRIX: case nt.ANIMATIONTYPE_VECTOR3: case nt.ANIMATIONTYPE_COLOR3: case nt.ANIMATIONTYPE_COLOR4: n.values = s.value.asArray(), s.inTangent != null && n.values.push(s.inTangent.asArray()), s.outTangent != null && (s.inTangent === void 0 && n.values.push(void 0), n.values.push(s.outTangent.asArray())), s.interpolation !== void 0 && (s.inTangent === void 0 && n.values.push(void 0), s.outTangent === void 0 && n.values.push(void 0), n.values.push(s.interpolation)); break; } e.keys.push(n); } e.ranges = []; for (const r in this._ranges) { const s = this._ranges[r]; if (!s) continue; const n = {}; n.name = r, n.from = s.from, n.to = s.to, e.ranges.push(n); } return e; } /** * @internal */ static _UniversalLerp(e, t, i) { const r = e.constructor; return r.Lerp ? r.Lerp(e, t, i) : r.Slerp ? r.Slerp(e, t, i) : e.toFixed ? e * (1 - i) + i * t : t; } /** * Parses an animation object and creates an animation * @param parsedAnimation Parsed animation object * @returns Animation object */ static Parse(e) { const t = new nt(e.name, e.property, e.framePerSecond, e.dataType, e.loopBehavior), i = e.dataType, r = []; let s, n; for (e.enableBlending && (t.enableBlending = e.enableBlending), e.blendingSpeed && (t.blendingSpeed = e.blendingSpeed), n = 0; n < e.keys.length; n++) { const a = e.keys[n]; let l, o, u; switch (i) { case nt.ANIMATIONTYPE_FLOAT: s = a.values[0], a.values.length >= 2 && (l = a.values[1]), a.values.length >= 3 && (o = a.values[2]), a.values.length >= 4 && (u = a.values[3]); break; case nt.ANIMATIONTYPE_QUATERNION: if (s = Ze.FromArray(a.values), a.values.length >= 8) { const d = Ze.FromArray(a.values.slice(4, 8)); d.equals(Ze.Zero()) || (l = d); } if (a.values.length >= 12) { const d = Ze.FromArray(a.values.slice(8, 12)); d.equals(Ze.Zero()) || (o = d); } a.values.length >= 13 && (u = a.values[12]); break; case nt.ANIMATIONTYPE_MATRIX: s = Ae.FromArray(a.values), a.values.length >= 17 && (u = a.values[16]); break; case nt.ANIMATIONTYPE_COLOR3: s = ze.FromArray(a.values), a.values[3] && (l = ze.FromArray(a.values[3])), a.values[4] && (o = ze.FromArray(a.values[4])), a.values[5] && (u = a.values[5]); break; case nt.ANIMATIONTYPE_COLOR4: s = Et.FromArray(a.values), a.values[4] && (l = Et.FromArray(a.values[4])), a.values[5] && (o = Et.FromArray(a.values[5])), a.values[6] && (u = Et.FromArray(a.values[6])); break; case nt.ANIMATIONTYPE_VECTOR3: default: s = D.FromArray(a.values), a.values[3] && (l = D.FromArray(a.values[3])), a.values[4] && (o = D.FromArray(a.values[4])), a.values[5] && (u = a.values[5]); break; } const h = {}; h.frame = a.frame, h.value = s, l != null && (h.inTangent = l), o != null && (h.outTangent = o), u != null && (h.interpolation = u), r.push(h); } if (t.setKeys(r), e.ranges) for (n = 0; n < e.ranges.length; n++) s = e.ranges[n], t.createRange(s.name, s.from, s.to); return t; } /** * Appends the serialized animations from the source animations * @param source Source containing the animations * @param destination Target to store the animations */ static AppendSerializedAnimations(e, t) { St.AppendSerializedAnimations(e, t); } /** * Creates a new animation or an array of animations from a snippet saved in a remote file * @param name defines the name of the animation to create (can be null or empty to use the one from the json data) * @param url defines the url to load from * @returns a promise that will resolve to the new animation or an array of animations */ static ParseFromFileAsync(e, t) { return new Promise((i, r) => { const s = new go(); s.addEventListener("readystatechange", () => { if (s.readyState == 4) if (s.status == 200) { let n = JSON.parse(s.responseText); if (n.animations && (n = n.animations), n.length) { const a = []; for (const l of n) a.push(this.Parse(l)); i(a); } else { const a = this.Parse(n); e && (a.name = e), i(a); } } else r("Unable to load the animation"); }), s.open("GET", t), s.send(); }); } /** * Creates an animation or an array of animations from a snippet saved by the Inspector * @param snippetId defines the snippet to load * @returns a promise that will resolve to the new animation or a new array of animations */ static ParseFromSnippetAsync(e) { return new Promise((t, i) => { const r = new go(); r.addEventListener("readystatechange", () => { if (r.readyState == 4) if (r.status == 200) { const s = JSON.parse(JSON.parse(r.responseText).jsonPayload); if (s.animations) { const n = JSON.parse(s.animations), a = []; for (const l of n.animations) { const o = this.Parse(l); o.snippetId = e, a.push(o); } t(a); } else { const n = JSON.parse(s.animation), a = this.Parse(n); a.snippetId = e, t(a); } } else i("Unable to load the snippet " + e); }), r.open("GET", this.SnippetUrl + "/" + e.replace(/#/g, "/")), r.send(); }); } } nt._UniqueIdGenerator = 0; nt.AllowMatricesInterpolation = !1; nt.AllowMatrixDecomposeForInterpolation = !0; nt.SnippetUrl = "https://snippet.babylonjs.com"; nt.ANIMATIONTYPE_FLOAT = 0; nt.ANIMATIONTYPE_VECTOR3 = 1; nt.ANIMATIONTYPE_QUATERNION = 2; nt.ANIMATIONTYPE_MATRIX = 3; nt.ANIMATIONTYPE_COLOR3 = 4; nt.ANIMATIONTYPE_COLOR4 = 7; nt.ANIMATIONTYPE_VECTOR2 = 5; nt.ANIMATIONTYPE_SIZE = 6; nt.ANIMATIONLOOPMODE_RELATIVE = 0; nt.ANIMATIONLOOPMODE_CYCLE = 1; nt.ANIMATIONLOOPMODE_CONSTANT = 2; nt.ANIMATIONLOOPMODE_YOYO = 4; nt.ANIMATIONLOOPMODE_RELATIVE_FROM_CURRENT = 5; nt.CreateFromSnippetAsync = nt.ParseFromSnippetAsync; Be("BABYLON.Animation", nt); In._AnimationRangeFactory = (c, e, t) => new hP(c, e, t); class tte extends Sa { /** * Instantiate the action * @param triggerOptions defines the trigger options * @param target defines the object containing the value to interpolate * @param propertyPath defines the path to the property in the target object * @param value defines the target value at the end of the interpolation * @param duration defines the time it will take for the property to interpolate to the value. * @param condition defines the trigger related conditions * @param stopOtherAnimations defines if the other scene animations should be stopped when the action has been triggered * @param onInterpolationDone defines a callback raised once the interpolation animation has been done */ constructor(e, t, i, r, s = 1e3, n, a, l) { super(e, n), this.duration = 1e3, this.onInterpolationDoneObservable = new Fe(), this.propertyPath = i, this.value = r, this.duration = s, this.stopOtherAnimations = a, this.onInterpolationDone = l, this._target = this._effectiveTarget = t; } /** @internal */ _prepare() { this._effectiveTarget = this._getEffectiveTarget(this._effectiveTarget, this.propertyPath), this._property = this._getProperty(this.propertyPath); } /** * Execute the action starts the value interpolation. */ execute() { const e = this._actionManager.getScene(), t = [ { frame: 0, value: this._effectiveTarget[this._property] }, { frame: 100, value: this.value } ]; let i; if (typeof this.value == "number") i = nt.ANIMATIONTYPE_FLOAT; else if (this.value instanceof ze) i = nt.ANIMATIONTYPE_COLOR3; else if (this.value instanceof D) i = nt.ANIMATIONTYPE_VECTOR3; else if (this.value instanceof Ae) i = nt.ANIMATIONTYPE_MATRIX; else if (this.value instanceof Ze) i = nt.ANIMATIONTYPE_QUATERNION; else { Ce.Warn("InterpolateValueAction: Unsupported type (" + typeof this.value + ")"); return; } const r = new nt("InterpolateValueAction", this._property, 100 * (1e3 / this.duration), i, nt.ANIMATIONLOOPMODE_CONSTANT); r.setKeys(t), this.stopOtherAnimations && e.stopAnimation(this._effectiveTarget); const s = () => { this.onInterpolationDoneObservable.notifyObservers(this), this.onInterpolationDone && this.onInterpolationDone(); }; e.beginDirectAnimation(this._effectiveTarget, [r], 0, 100, !1, 1, s); } /** * Serializes the actions and its related information. * @param parent defines the object to serialize in * @returns the serialized object */ serialize(e) { return super._serialize({ name: "InterpolateValueAction", properties: [ Sa._GetTargetProperty(this._target), { name: "propertyPath", value: this.propertyPath }, { name: "value", value: Sa._SerializeValueAsString(this.value) }, { name: "duration", value: Sa._SerializeValueAsString(this.duration) }, { name: "stopOtherAnimations", value: Sa._SerializeValueAsString(this.stopOtherAnimations) || !1 } ] }, e); } } Be("BABYLON.InterpolateValueAction", tte); class ite { /** * Gets the current frame of the runtime animation */ get currentFrame() { return this._currentFrame; } /** * Gets the weight of the runtime animation */ get weight() { return this._weight; } /** * Gets the current value of the runtime animation */ get currentValue() { return this._currentValue; } /** * Gets or sets the target path of the runtime animation */ get targetPath() { return this._targetPath; } /** * Gets the actual target of the runtime animation */ get target() { return this._currentActiveTarget; } /** * Gets the additive state of the runtime animation */ get isAdditive() { return this._host && this._host.isAdditive; } /** * Create a new RuntimeAnimation object * @param target defines the target of the animation * @param animation defines the source animation object * @param scene defines the hosting scene * @param host defines the initiating Animatable */ constructor(e, t, i, r) { if (this._events = new Array(), this._currentFrame = 0, this._originalValue = new Array(), this._originalBlendValue = null, this._offsetsCache = {}, this._highLimitsCache = {}, this._stopped = !1, this._blendingFactor = 0, this._currentValue = null, this._currentActiveTarget = null, this._directTarget = null, this._targetPath = "", this._weight = 1, this._absoluteFrameOffset = 0, this._previousElapsedTime = 0, this._previousAbsoluteFrame = 0, this._targetIsArray = !1, this._animation = t, this._target = e, this._scene = i, this._host = r, this._activeTargets = [], t._runtimeAnimations.push(this), this._animationState = { key: 0, repeatCount: 0, loopMode: this._getCorrectLoopMode() }, this._animation.dataType === nt.ANIMATIONTYPE_MATRIX && (this._animationState.workValue = Ae.Zero()), this._keys = this._animation.getKeys(), this._minFrame = this._keys[0].frame, this._maxFrame = this._keys[this._keys.length - 1].frame, this._minValue = this._keys[0].value, this._maxValue = this._keys[this._keys.length - 1].value, this._minFrame !== 0) { const n = { frame: 0, value: this._minValue }; this._keys.splice(0, 0, n); } if (this._target instanceof Array) { let n = 0; for (const a of this._target) this._preparePath(a, n), this._getOriginalValues(n), n++; this._targetIsArray = !0; } else this._preparePath(this._target), this._getOriginalValues(), this._targetIsArray = !1, this._directTarget = this._activeTargets[0]; const s = t.getEvents(); s && s.length > 0 && s.forEach((n) => { this._events.push(n._clone()); }), this._enableBlending = e && e.animationPropertiesOverride ? e.animationPropertiesOverride.enableBlending : this._animation.enableBlending; } _preparePath(e, t = 0) { const i = this._animation.targetPropertyPath; if (i.length > 1) { let r = e[i[0]]; for (let s = 1; s < i.length - 1; s++) r = r[i[s]]; this._targetPath = i[i.length - 1], this._activeTargets[t] = r; } else this._targetPath = i[0], this._activeTargets[t] = e; } /** * Gets the animation from the runtime animation */ get animation() { return this._animation; } /** * Resets the runtime animation to the beginning * @param restoreOriginal defines whether to restore the target property to the original value */ reset(e = !1) { if (e) if (this._target instanceof Array) { let t = 0; for (const i of this._target) this._originalValue[t] !== void 0 && this._setValue(i, this._activeTargets[t], this._originalValue[t], -1, t), t++; } else this._originalValue[0] !== void 0 && this._setValue(this._target, this._directTarget, this._originalValue[0], -1, 0); this._offsetsCache = {}, this._highLimitsCache = {}, this._currentFrame = 0, this._blendingFactor = 0; for (let t = 0; t < this._events.length; t++) this._events[t].isDone = !1; } /** * Specifies if the runtime animation is stopped * @returns Boolean specifying if the runtime animation is stopped */ isStopped() { return this._stopped; } /** * Disposes of the runtime animation */ dispose() { const e = this._animation.runtimeAnimations.indexOf(this); e > -1 && this._animation.runtimeAnimations.splice(e, 1); } /** * Apply the interpolated value to the target * @param currentValue defines the value computed by the animation * @param weight defines the weight to apply to this value (Defaults to 1.0) */ setValue(e, t) { if (this._targetIsArray) { for (let i = 0; i < this._target.length; i++) { const r = this._target[i]; this._setValue(r, this._activeTargets[i], e, t, i); } return; } this._setValue(this._target, this._directTarget, e, t, 0); } _getOriginalValues(e = 0) { let t; const i = this._activeTargets[e]; i.getLocalMatrix && this._targetPath === "_matrix" ? t = i.getLocalMatrix() : t = i[this._targetPath], t && t.clone ? this._originalValue[e] = t.clone() : this._originalValue[e] = t; } _setValue(e, t, i, r, s) { if (this._currentActiveTarget = t, this._weight = r, this._enableBlending && this._blendingFactor <= 1) { if (!this._originalBlendValue) { const a = t[this._targetPath]; a.clone ? this._originalBlendValue = a.clone() : this._originalBlendValue = a; } this._originalBlendValue.m ? nt.AllowMatrixDecomposeForInterpolation ? this._currentValue ? Ae.DecomposeLerpToRef(this._originalBlendValue, i, this._blendingFactor, this._currentValue) : this._currentValue = Ae.DecomposeLerp(this._originalBlendValue, i, this._blendingFactor) : this._currentValue ? Ae.LerpToRef(this._originalBlendValue, i, this._blendingFactor, this._currentValue) : this._currentValue = Ae.Lerp(this._originalBlendValue, i, this._blendingFactor) : this._currentValue = nt._UniversalLerp(this._originalBlendValue, i, this._blendingFactor); const n = e && e.animationPropertiesOverride ? e.animationPropertiesOverride.blendingSpeed : this._animation.blendingSpeed; this._blendingFactor += n; } else this._currentValue ? this._currentValue.copyFrom ? this._currentValue.copyFrom(i) : this._currentValue = i : i != null && i.clone ? this._currentValue = i.clone() : this._currentValue = i; r !== -1 ? this._scene._registerTargetForLateAnimationBinding(this, this._originalValue[s]) : this._animationState.loopMode === nt.ANIMATIONLOOPMODE_RELATIVE_FROM_CURRENT ? this._currentValue.addToRef ? this._currentValue.addToRef(this._originalValue[s], t[this._targetPath]) : t[this._targetPath] = this._originalValue[s] + this._currentValue : t[this._targetPath] = this._currentValue, e.markAsDirty && e.markAsDirty(this._animation.targetProperty); } /** * Gets the loop pmode of the runtime animation * @returns Loop Mode */ _getCorrectLoopMode() { return this._target && this._target.animationPropertiesOverride ? this._target.animationPropertiesOverride.loopMode : this._animation.loopMode; } /** * Move the current animation to a given frame * @param frame defines the frame to move to */ goToFrame(e) { const t = this._animation.getKeys(); e < t[0].frame ? e = t[0].frame : e > t[t.length - 1].frame && (e = t[t.length - 1].frame); const i = this._events; if (i.length) for (let s = 0; s < i.length; s++) i[s].onlyOnce || (i[s].isDone = i[s].frame < e); this._currentFrame = e; const r = this._animation._interpolate(e, this._animationState); this.setValue(r, -1); } /** * @internal Internal use only */ _prepareForSpeedRatioChange(e) { const t = this._previousElapsedTime * (this._animation.framePerSecond * e) / 1e3; this._absoluteFrameOffset = this._previousAbsoluteFrame - t; } /** * Execute the current animation * @param elapsedTimeSinceAnimationStart defines the elapsed time (in milliseconds) since the animation was started * @param from defines the lower frame of the animation range * @param to defines the upper frame of the animation range * @param loop defines if the current animation must loop * @param speedRatio defines the current speed ratio * @param weight defines the weight of the animation (default is -1 so no weight) * @returns a boolean indicating if the animation is running */ animate(e, t, i, r, s, n = -1) { const a = this._animation, l = a.targetPropertyPath; if (!l || l.length < 1) return this._stopped = !0, !1; let o = !0; (t < this._minFrame || t > this._maxFrame) && (t = this._minFrame), (i < this._minFrame || i > this._maxFrame) && (i = this._maxFrame); const u = i - t; let h, d = e * (a.framePerSecond * s) / 1e3 + this._absoluteFrameOffset, f = 0; if (r && this._animationState.loopMode === nt.ANIMATIONLOOPMODE_YOYO) { const v = (d - t) / u; d = Math.abs(Math.sin(v * Math.PI)) * u + t; } if (this._previousElapsedTime = e, this._previousAbsoluteFrame = d, !r && i >= t && d >= u) o = !1, f = a._getKeyValue(this._maxValue); else if (!r && t >= i && d <= u) o = !1, f = a._getKeyValue(this._minValue); else if (this._animationState.loopMode !== nt.ANIMATIONLOOPMODE_CYCLE) { const v = i.toString() + t.toString(); if (!this._offsetsCache[v]) { this._animationState.repeatCount = 0, this._animationState.loopMode = nt.ANIMATIONLOOPMODE_CYCLE; const C = a._interpolate(t, this._animationState), x = a._interpolate(i, this._animationState); switch (this._animationState.loopMode = this._getCorrectLoopMode(), a.dataType) { case nt.ANIMATIONTYPE_FLOAT: this._offsetsCache[v] = x - C; break; case nt.ANIMATIONTYPE_QUATERNION: this._offsetsCache[v] = x.subtract(C); break; case nt.ANIMATIONTYPE_VECTOR3: this._offsetsCache[v] = x.subtract(C); break; case nt.ANIMATIONTYPE_VECTOR2: this._offsetsCache[v] = x.subtract(C); break; case nt.ANIMATIONTYPE_SIZE: this._offsetsCache[v] = x.subtract(C); break; case nt.ANIMATIONTYPE_COLOR3: this._offsetsCache[v] = x.subtract(C); break; } this._highLimitsCache[v] = x; } f = this._highLimitsCache[v], h = this._offsetsCache[v]; } if (h === void 0) switch (a.dataType) { case nt.ANIMATIONTYPE_FLOAT: h = 0; break; case nt.ANIMATIONTYPE_QUATERNION: h = ZG; break; case nt.ANIMATIONTYPE_VECTOR3: h = qG; break; case nt.ANIMATIONTYPE_VECTOR2: h = JG; break; case nt.ANIMATIONTYPE_SIZE: h = eK; break; case nt.ANIMATIONTYPE_COLOR3: h = tK; break; case nt.ANIMATIONTYPE_COLOR4: h = iK; break; } let p; if (this._host && this._host.syncRoot) { const v = this._host.syncRoot, C = (v.masterFrame - v.fromFrame) / (v.toFrame - v.fromFrame); p = t + u * C; } else d > 0 && t > i || d < 0 && t < i ? p = o && u !== 0 ? i + d % u : t : p = o && u !== 0 ? t + d % u : i; const m = this._events; if (s > 0 && this.currentFrame > p || s < 0 && this.currentFrame < p) { this._onLoop(); for (let v = 0; v < m.length; v++) m[v].onlyOnce || (m[v].isDone = !1); this._animationState.key = s > 0 ? 0 : a.getKeys().length - 1; } this._currentFrame = p, this._animationState.repeatCount = u === 0 ? 0 : d / u >> 0, this._animationState.highLimitValue = f, this._animationState.offsetValue = h; const _ = a._interpolate(p, this._animationState); if (this.setValue(_, n), m.length) { for (let v = 0; v < m.length; v++) if (u > 0 && p >= m[v].frame && m[v].frame >= t || u < 0 && p <= m[v].frame && m[v].frame <= t) { const C = m[v]; C.isDone || (C.onlyOnce && (m.splice(v, 1), v--), C.isDone = !0, C.action(p)); } } return o || (this._stopped = !0), o; } } function cu() { return typeof window < "u"; } function GR() { return typeof navigator < "u"; } function qR() { return typeof document < "u"; } function IL(c) { let e = "", t = c.firstChild; for (; t; ) t.nodeType === 3 && (e += t.textContent), t = t.nextSibling; return e; } const Ule = { /** * Checks if the window object exists * @returns true if the window object exists */ IsWindowObjectExist: cu, /** * Checks if the navigator object exists * @returns true if the navigator object exists */ IsNavigatorAvailable: GR, /** * Check if the document object exists * @returns true if the document object exists */ IsDocumentAvailable: qR, /** * Extracts text content from a DOM element hierarchy * @param element defines the root element * @returns a string */ GetDOMTextContent: IL }; class Gs { /** * Gets either window.performance.now() if supported or Date.now() else */ static get Now() { return cu() && window.performance && window.performance.now ? window.performance.now() : Date.now(); } } class JR { } JR.FilesToLoad = {}; class rte { /** * Function used to defines an exponential back off strategy * @param maxRetries defines the maximum number of retries (3 by default) * @param baseInterval defines the interval between retries * @returns the strategy function to use */ static ExponentialBackoff(e = 3, t = 500) { return (i, r, s) => r.status !== 0 || s >= e || i.indexOf("file:") !== -1 ? -1 : Math.pow(2, s) * t; } } class dP extends Error { } dP._setPrototypeOf = Object.setPrototypeOf || ((c, e) => (c.__proto__ = e, c)); const $C = { // Mesh errors 0-999 /** Invalid or empty mesh vertex positions. */ MeshInvalidPositionsError: 0, // Texture errors 1000-1999 /** Unsupported texture found. */ UnsupportedTextureError: 1e3, // GLTFLoader errors 2000-2999 /** Unexpected magic number found in GLTF file header. */ GLTFLoaderUnexpectedMagicError: 2e3, // SceneLoader errors 3000-3999 /** SceneLoader generic error code. Ideally wraps the inner exception. */ SceneLoaderError: 3e3, // File related errors 4000-4999 /** Load file error */ LoadFileError: 4e3, /** Request file error */ RequestFileError: 4001, /** Read file error */ ReadFileError: 4002 }; class F4 extends dP { /** * Creates a new RuntimeError * @param message defines the message of the error * @param errorCode the error code * @param innerError the error that caused the outer error */ constructor(e, t, i) { super(e), this.errorCode = t, this.innerError = i, this.name = "RuntimeError", dP._setPrototypeOf(this, F4.prototype); } } const ste = (c, e) => c.endsWith(e), nte = (c, e) => c ? c.startsWith(e) : !1, rK = (c) => { if (typeof TextDecoder < "u") return new TextDecoder().decode(c); let e = ""; for (let t = 0; t < c.byteLength; t++) e += String.fromCharCode(c[t]); return e; }, DL = (c) => { const e = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/="; let t = "", i, r, s, n, a, l, o, u = 0; const h = ArrayBuffer.isView(c) ? new Uint8Array(c.buffer, c.byteOffset, c.byteLength) : new Uint8Array(c); for (; u < h.length; ) i = h[u++], r = u < h.length ? h[u++] : Number.NaN, s = u < h.length ? h[u++] : Number.NaN, n = i >> 2, a = (i & 3) << 4 | r >> 4, l = (r & 15) << 2 | s >> 6, o = s & 63, isNaN(r) ? l = o = 64 : isNaN(s) && (o = 64), t += e.charAt(n) + e.charAt(a) + e.charAt(l) + e.charAt(o); return t; }, kB = (c) => atob(c), OL = (c) => { const e = kB(c), t = e.length, i = new Uint8Array(new ArrayBuffer(t)); for (let r = 0; r < t; r++) i[r] = e.charCodeAt(r); return i.buffer; }, ate = (c, e) => { let t = String(c); for (; t.length < e; ) t = "0" + t; return t; }, Vle = { EndsWith: ste, StartsWith: nte, Decode: rK, EncodeArrayBufferToBase64: DL, DecodeBase64ToString: kB, DecodeBase64ToBinary: OL, PadNumber: ate }, kle = "attribute", zle = "varying"; class k9 { constructor() { this.children = []; } // eslint-disable-next-line @typescript-eslint/no-unused-vars isValid(e) { return !0; } process(e, t) { var i, r, s, n, a, l, o; let u = ""; if (this.line) { let h = this.line; const d = t.processor; if (d) { d.lineProcessor && (h = d.lineProcessor(h, t.isFragment, t.processingContext)); const f = (r = (i = t.processor) === null || i === void 0 ? void 0 : i.attributeKeywordName) !== null && r !== void 0 ? r : kle, p = t.isFragment && (!((s = t.processor) === null || s === void 0) && s.varyingFragmentKeywordName) ? (n = t.processor) === null || n === void 0 ? void 0 : n.varyingFragmentKeywordName : !t.isFragment && (!((a = t.processor) === null || a === void 0) && a.varyingVertexKeywordName) ? (l = t.processor) === null || l === void 0 ? void 0 : l.varyingVertexKeywordName : zle; !t.isFragment && d.attributeProcessor && this.line.startsWith(f) ? h = d.attributeProcessor(this.line, e, t.processingContext) : d.varyingProcessor && (!((o = d.varyingCheck) === null || o === void 0) && o.call(d, this.line, t.isFragment) || !d.varyingCheck && this.line.startsWith(p)) ? h = d.varyingProcessor(this.line, t.isFragment, e, t.processingContext) : d.uniformProcessor && d.uniformRegexp && d.uniformRegexp.test(this.line) ? t.lookForClosingBracketForUniformBuffer || (h = d.uniformProcessor(this.line, t.isFragment, e, t.processingContext)) : d.uniformBufferProcessor && d.uniformBufferRegexp && d.uniformBufferRegexp.test(this.line) ? t.lookForClosingBracketForUniformBuffer || (h = d.uniformBufferProcessor(this.line, t.isFragment, t.processingContext), t.lookForClosingBracketForUniformBuffer = !0) : d.textureProcessor && d.textureRegexp && d.textureRegexp.test(this.line) ? h = d.textureProcessor(this.line, t.isFragment, e, t.processingContext) : (d.uniformProcessor || d.uniformBufferProcessor) && this.line.startsWith("uniform") && !t.lookForClosingBracketForUniformBuffer && (/uniform\s+(?:(?:highp)?|(?:lowp)?)\s*(\S+)\s+(\S+)\s*;/.test(this.line) ? d.uniformProcessor && (h = d.uniformProcessor(this.line, t.isFragment, e, t.processingContext)) : d.uniformBufferProcessor && (h = d.uniformBufferProcessor(this.line, t.isFragment, t.processingContext), t.lookForClosingBracketForUniformBuffer = !0)), t.lookForClosingBracketForUniformBuffer && this.line.indexOf("}") !== -1 && (t.lookForClosingBracketForUniformBuffer = !1, d.endOfUniformBufferProcessor && (h = d.endOfUniformBufferProcessor(this.line, t.isFragment, t.processingContext))); } u += h + ` `; } return this.children.forEach((h) => { u += h.process(e, t); }), this.additionalDefineKey && (e[this.additionalDefineKey] = this.additionalDefineValue || "true"), u; } } class Hle { constructor() { this._lines = []; } get currentLine() { return this._lines[this.lineIndex]; } get canRead() { return this.lineIndex < this._lines.length - 1; } set lines(e) { this._lines.length = 0; for (const t of e) { if (!t || t === "\r") continue; if (t[0] === "#") { this._lines.push(t); continue; } const i = t.trim(); if (!i) continue; if (i.startsWith("//")) { this._lines.push(t); continue; } const r = i.indexOf(";"); if (r === -1) this._lines.push(i); else if (r === i.length - 1) i.length > 1 && this._lines.push(i); else { const s = t.split(";"); for (let n = 0; n < s.length; n++) { let a = s[n]; a && (a = a.trim(), a && this._lines.push(a + (n !== s.length - 1 ? ";" : ""))); } } } } } class Ok extends k9 { process(e, t) { for (let i = 0; i < this.children.length; i++) { const r = this.children[i]; if (r.isValid(e)) return r.process(e, t); } return ""; } } class Gle extends k9 { isValid(e) { return this.testExpression.isTrue(e); } } class Wl { // eslint-disable-next-line @typescript-eslint/no-unused-vars isTrue(e) { return !0; } static postfixToInfix(e) { const t = []; for (const i of e) if (Wl._OperatorPriority[i] === void 0) t.push(i); else { const r = t[t.length - 1], s = t[t.length - 2]; t.length -= 2, t.push(`(${s}${i}${r})`); } return t[t.length - 1]; } /** * Converts an infix expression to a postfix expression. * * This method is used to transform infix expressions, which are more human-readable, * into postfix expressions, also known as Reverse Polish Notation (RPN), that can be * evaluated more efficiently by a computer. The conversion is based on the operator * priority defined in _OperatorPriority. * * The function employs a stack-based algorithm for the conversion and caches the result * to improve performance. The cache keeps track of each converted expression's access time * to manage the cache size and optimize memory usage. When the cache size exceeds a specified * limit, the least recently accessed items in the cache are deleted. * * The cache mechanism is particularly helpful for shader compilation, where the same infix * expressions might be encountered repeatedly, hence the caching can speed up the process. * * @param infix - The infix expression to be converted. * @returns The postfix expression as an array of strings. */ static infixToPostfix(e) { const t = Wl._InfixToPostfixCache.get(e); if (t) return t.accessTime = Date.now(), t.result; if (!e.includes("&&") && !e.includes("||") && !e.includes(")") && !e.includes("(")) return [e]; const i = []; let r = -1; const s = () => { u = u.trim(), u !== "" && (i.push(u), u = ""); }, n = (h) => { r < Wl._Stack.length - 1 && (Wl._Stack[++r] = h); }, a = () => Wl._Stack[r], l = () => r === -1 ? "!!INVALID EXPRESSION!!" : Wl._Stack[r--]; let o = 0, u = ""; for (; o < e.length; ) { const h = e.charAt(o), d = o < e.length - 1 ? e.substr(o, 2) : ""; if (h === "(") u = "", n(h); else if (h === ")") { for (s(); r !== -1 && a() !== "("; ) i.push(l()); l(); } else if (Wl._OperatorPriority[d] > 1) { for (s(); r !== -1 && Wl._OperatorPriority[a()] >= Wl._OperatorPriority[d]; ) i.push(l()); n(d), o++; } else u += h; o++; } for (s(); r !== -1; ) a() === "(" ? l() : i.push(l()); return Wl._InfixToPostfixCache.size >= Wl.InfixToPostfixCacheLimitSize && Wl.ClearCache(), Wl._InfixToPostfixCache.set(e, { result: i, accessTime: Date.now() }), i; } static ClearCache() { const e = Array.from(Wl._InfixToPostfixCache.entries()).sort((t, i) => t[1].accessTime - i[1].accessTime); for (let t = 0; t < Wl.InfixToPostfixCacheCleanupSize; t++) Wl._InfixToPostfixCache.delete(e[t][0]); } } Wl.InfixToPostfixCacheLimitSize = 5e4; Wl.InfixToPostfixCacheCleanupSize = 25e3; Wl._InfixToPostfixCache = /* @__PURE__ */ new Map(); Wl._OperatorPriority = { ")": 0, "(": 1, "||": 2, "&&": 3 }; Wl._Stack = ["", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", ""]; class TF extends Wl { constructor(e, t = !1) { super(), this.define = e, this.not = t; } isTrue(e) { let t = e[this.define] !== void 0; return this.not && (t = !t), t; } } class Kle extends Wl { isTrue(e) { return this.leftOperand.isTrue(e) || this.rightOperand.isTrue(e); } } class Wle extends Wl { isTrue(e) { return this.leftOperand.isTrue(e) && this.rightOperand.isTrue(e); } } class jle extends Wl { constructor(e, t, i) { super(), this.define = e, this.operand = t, this.testValue = i; } isTrue(e) { let t = e[this.define]; t === void 0 && (t = this.define); let i = !1; const r = parseInt(t), s = parseInt(this.testValue); switch (this.operand) { case ">": i = r > s; break; case "<": i = r < s; break; case "<=": i = r <= s; break; case ">=": i = r >= s; break; case "==": i = r === s; break; case "!=": i = r !== s; break; } return i; } } var Xa; (function(c) { c[c.GLSL = 0] = "GLSL", c[c.WGSL = 1] = "WGSL"; })(Xa || (Xa = {})); const Xle = /defined\s*?\((.+?)\)/g, wk = /defined\s*?\[(.+?)\]/g, Yle = /#include\s?<(.+)>(\((.*)\))*(\[(.*)\])*/g, Qle = /__decl__/, bZ = /light\{X\}.(\w*)/g, EZ = /\{X\}/g, SF = []; class pg { static Initialize(e) { e.processor && e.processor.initializeShaders && e.processor.initializeShaders(e.processingContext); } static Process(e, t, i, r) { var s; !((s = t.processor) === null || s === void 0) && s.preProcessShaderCode && (e = t.processor.preProcessShaderCode(e, t.isFragment)), this._ProcessIncludes(e, t, (n) => { t.processCodeAfterIncludes && (n = t.processCodeAfterIncludes(t.isFragment ? "fragment" : "vertex", n)); const a = this._ProcessShaderConversion(n, t, r); i(a, n); }); } static PreProcess(e, t, i, r) { var s; !((s = t.processor) === null || s === void 0) && s.preProcessShaderCode && (e = t.processor.preProcessShaderCode(e, t.isFragment)), this._ProcessIncludes(e, t, (n) => { t.processCodeAfterIncludes && (n = t.processCodeAfterIncludes(t.isFragment ? "fragment" : "vertex", n)); const a = this._ApplyPreProcessing(n, t, r); i(a, n); }); } static Finalize(e, t, i) { return !i.processor || !i.processor.finalizeShaders ? { vertexCode: e, fragmentCode: t } : i.processor.finalizeShaders(e, t, i.processingContext); } static _ProcessPrecision(e, t) { var i; if (!((i = t.processor) === null || i === void 0) && i.noPrecision) return e; const r = t.shouldUseHighPrecisionShader; return e.indexOf("precision highp float") === -1 ? r ? e = `precision highp float; ` + e : e = `precision mediump float; ` + e : r || (e = e.replace("precision highp float", "precision mediump float")), e; } static _ExtractOperation(e) { const i = /defined\((.+)\)/.exec(e); if (i && i.length) return new TF(i[1].trim(), e[0] === "!"); const r = ["==", "!=", ">=", "<=", "<", ">"]; let s = "", n = 0; for (s of r) if (n = e.indexOf(s), n > -1) break; if (n === -1) return new TF(e); const a = e.substring(0, n).trim(), l = e.substring(n + s.length).trim(); return new jle(a, s, l); } static _BuildSubExpression(e) { e = e.replace(Xle, "defined[$1]"); const t = Wl.infixToPostfix(e), i = []; for (const s of t) if (s !== "||" && s !== "&&") i.push(s); else if (i.length >= 2) { let n = i[i.length - 1], a = i[i.length - 2]; i.length -= 2; const l = s == "&&" ? new Wle() : new Kle(); typeof n == "string" && (n = n.replace(wk, "defined($1)")), typeof a == "string" && (a = a.replace(wk, "defined($1)")), l.leftOperand = typeof a == "string" ? this._ExtractOperation(a) : a, l.rightOperand = typeof n == "string" ? this._ExtractOperation(n) : n, i.push(l); } let r = i[i.length - 1]; return typeof r == "string" && (r = r.replace(wk, "defined($1)")), typeof r == "string" ? this._ExtractOperation(r) : r; } static _BuildExpression(e, t) { const i = new Gle(), r = e.substring(0, t); let s = e.substring(t); return s = s.substring(0, (s.indexOf("//") + 1 || s.length + 1) - 1).trim(), r === "#ifdef" ? i.testExpression = new TF(s) : r === "#ifndef" ? i.testExpression = new TF(s, !0) : i.testExpression = this._BuildSubExpression(s), i; } static _MoveCursorWithinIf(e, t, i) { let r = e.currentLine; for (; this._MoveCursor(e, i); ) { r = e.currentLine; const s = r.substring(0, 5).toLowerCase(); if (s === "#else") { const n = new k9(); t.children.push(n), this._MoveCursor(e, n); return; } else if (s === "#elif") { const n = this._BuildExpression(r, 5); t.children.push(n), i = n; } } } static _MoveCursor(e, t) { for (; e.canRead; ) { e.lineIndex++; const i = e.currentLine; if (i.indexOf("#") >= 0) { const s = pg._MoveCursorRegex.exec(i); if (s && s.length) { switch (s[0]) { case "#ifdef": { const a = new Ok(); t.children.push(a); const l = this._BuildExpression(i, 6); a.children.push(l), this._MoveCursorWithinIf(e, a, l); break; } case "#else": case "#elif": return !0; case "#endif": return !1; case "#ifndef": { const a = new Ok(); t.children.push(a); const l = this._BuildExpression(i, 7); a.children.push(l), this._MoveCursorWithinIf(e, a, l); break; } case "#if": { const a = new Ok(), l = this._BuildExpression(i, 3); t.children.push(a), a.children.push(l), this._MoveCursorWithinIf(e, a, l); break; } } continue; } } const r = new k9(); if (r.line = i, t.children.push(r), i[0] === "#" && i[1] === "d") { const s = i.replace(";", "").split(" "); r.additionalDefineKey = s[1], s.length === 3 && (r.additionalDefineValue = s[2]); } } return !1; } static _EvaluatePreProcessors(e, t, i) { const r = new k9(), s = new Hle(); return s.lineIndex = -1, s.lines = e.split(` `), this._MoveCursor(s, r), r.process(t, i); } static _PreparePreProcessors(e, t) { var i; const r = e.defines, s = {}; for (const n of r) { const l = n.replace("#define", "").replace(";", "").trim().split(" "); s[l[0]] = l.length > 1 ? l[1] : ""; } return ((i = e.processor) === null || i === void 0 ? void 0 : i.shaderLanguage) === Xa.GLSL && (s.GL_ES = "true"), s.__VERSION__ = e.version, s[e.platformName] = "true", t._getGlobalDefines(s), s; } static _ProcessShaderConversion(e, t, i) { let r = this._ProcessPrecision(e, t); if (!t.processor || t.processor.shaderLanguage === Xa.GLSL && r.indexOf("#version 3") !== -1 && (r = r.replace("#version 300 es", ""), !t.processor.parseGLES3)) return r; const s = t.defines, n = this._PreparePreProcessors(t, i); return t.processor.preProcessor && (r = t.processor.preProcessor(r, s, t.isFragment, t.processingContext)), r = this._EvaluatePreProcessors(r, n, t), t.processor.postProcessor && (r = t.processor.postProcessor(r, s, t.isFragment, t.processingContext, i)), i._features.needShaderCodeInlining && (r = i.inlineShaderCode(r)), r; } static _ApplyPreProcessing(e, t, i) { var r, s; let n = e; const a = t.defines, l = this._PreparePreProcessors(t, i); return !((r = t.processor) === null || r === void 0) && r.preProcessor && (n = t.processor.preProcessor(n, a, t.isFragment, t.processingContext)), n = this._EvaluatePreProcessors(n, l, t), !((s = t.processor) === null || s === void 0) && s.postProcessor && (n = t.processor.postProcessor(n, a, t.isFragment, t.processingContext, i)), i._features.needShaderCodeInlining && (n = i.inlineShaderCode(n)), n; } /** @internal */ static _ProcessIncludes(e, t, i) { SF.length = 0; let r; for (; (r = Yle.exec(e)) !== null; ) SF.push(r); let s = String(e), n = [e], a = !1; for (const l of SF) { let o = l[1]; if (o.indexOf("__decl__") !== -1 && (o = o.replace(Qle, ""), t.supportsUniformBuffers && (o = o.replace("Vertex", "Ubo").replace("Fragment", "Ubo")), o = o + "Declaration"), t.includesShadersStore[o]) { let u = t.includesShadersStore[o]; if (l[2]) { const d = l[3].split(","); for (let f = 0; f < d.length; f += 2) { const p = new RegExp(d[f], "g"), m = d[f + 1]; u = u.replace(p, m); } } if (l[4]) { const d = l[5]; if (d.indexOf("..") !== -1) { const f = d.split(".."), p = parseInt(f[0]); let m = parseInt(f[1]), _ = u.slice(0); u = "", isNaN(m) && (m = t.indexParameters[f[1]]); for (let v = p; v < m; v++) t.supportsUniformBuffers || (_ = _.replace(bZ, (C, x) => x + "{X}")), u += _.replace(EZ, v.toString()) + ` `; } else t.supportsUniformBuffers || (u = u.replace(bZ, (f, p) => p + "{X}")), u = u.replace(EZ, d); } const h = []; for (const d of n) { const f = d.split(l[0]); for (let p = 0; p < f.length - 1; p++) h.push(f[p]), h.push(u); h.push(f[f.length - 1]); } n = h, a = a || u.indexOf("#include<") >= 0 || u.indexOf("#include <") >= 0; } else { const u = t.shadersRepository + "ShadersInclude/" + o + ".fx"; pg._FileToolsLoadFile(u, (h) => { t.includesShadersStore[o] = h, this._ProcessIncludes(n.join(""), t, i); }); return; } } SF.length = 0, s = n.join(""), a ? this._ProcessIncludes(s.toString(), t, i) : i(s); } /** * Loads a file from a url * @param url url to load * @param onSuccess callback called when the file successfully loads * @param onProgress callback called while file is loading (if the server supports this mode) * @param offlineProvider defines the offline provider for caching * @param useArrayBuffer defines a boolean indicating that date must be returned as ArrayBuffer * @param onError callback called when the file fails to load * @returns a file request object * @internal */ static _FileToolsLoadFile(e, t, i, r, s, n) { throw yr("FileTools"); } } pg._MoveCursorRegex = /(#ifdef)|(#else)|(#elif)|(#endif)|(#ifndef)|(#if)/; class je { /** * Gets the shaders repository path for a given shader language * @param shaderLanguage the shader language * @returns the path to the shaders repository */ static GetShadersRepository(e = Xa.GLSL) { return e === Xa.GLSL ? je.ShadersRepository : je.ShadersRepositoryWGSL; } /** * Gets the shaders store of a given shader language * @param shaderLanguage the shader language * @returns the shaders store */ static GetShadersStore(e = Xa.GLSL) { return e === Xa.GLSL ? je.ShadersStore : je.ShadersStoreWGSL; } /** * Gets the include shaders store of a given shader language * @param shaderLanguage the shader language * @returns the include shaders store */ static GetIncludesShadersStore(e = Xa.GLSL) { return e === Xa.GLSL ? je.IncludesShadersStore : je.IncludesShadersStoreWGSL; } } je.ShadersRepository = "src/Shaders/"; je.ShadersStore = {}; je.IncludesShadersStore = {}; je.ShadersRepositoryWGSL = "src/ShadersWGSL/"; je.ShadersStoreWGSL = {}; je.IncludesShadersStoreWGSL = {}; class Cr { /** * Gets or sets the relative url used to load shaders if using the engine in non-minified mode */ static get ShadersRepository() { return je.ShadersRepository; } static set ShadersRepository(e) { je.ShadersRepository = e; } /** * Observable that will be called when effect is bound. */ get onBindObservable() { return this._onBindObservable || (this._onBindObservable = new Fe()), this._onBindObservable; } /** * Instantiates an effect. * An effect can be used to create/manage/execute vertex and fragment shaders. * @param baseName Name of the effect. * @param attributesNamesOrOptions List of attribute names that will be passed to the shader or set of all options to create the effect. * @param uniformsNamesOrEngine List of uniform variable names that will be passed to the shader or the engine that will be used to render effect. * @param samplers List of sampler variables that will be passed to the shader. * @param engine Engine to be used to render the effect * @param defines Define statements to be added to the shader. * @param fallbacks Possible fallbacks for this effect to improve performance when needed. * @param onCompiled Callback that will be called when the shader is compiled. * @param onError Callback that will be called if an error occurs during shader compilation. * @param indexParameters Parameters to be used with Babylons include syntax to iterate over an array (eg. \{lights: 10\}) * @param key Effect Key identifying uniquely compiled shader variants * @param shaderLanguage the language the shader is written in (default: GLSL) */ constructor(e, t, i, r = null, s, n = null, a = null, l = null, o = null, u, h = "", d = Xa.GLSL) { var f, p, m; if (this.name = null, this.defines = "", this.onCompiled = null, this.onError = null, this.onBind = null, this.uniqueId = 0, this.onCompileObservable = new Fe(), this.onErrorObservable = new Fe(), this._onBindObservable = null, this._wasPreviouslyReady = !1, this._forceRebindOnNextCall = !1, this._wasPreviouslyUsingInstances = null, this._isDisposed = !1, this._bonesComputationForcedToCPU = !1, this._uniformBuffersNames = {}, this._multiTarget = !1, this._samplers = {}, this._isReady = !1, this._compilationError = "", this._allFallbacksProcessed = !1, this._uniforms = {}, this._key = "", this._fallbacks = null, this._vertexSourceCodeOverride = "", this._fragmentSourceCodeOverride = "", this._transformFeedbackVaryings = null, this._pipelineContext = null, this._vertexSourceCode = "", this._fragmentSourceCode = "", this._vertexSourceCodeBeforeMigration = "", this._fragmentSourceCodeBeforeMigration = "", this._rawVertexSourceCode = "", this._rawFragmentSourceCode = "", this._processCodeAfterIncludes = void 0, this._processFinalCode = null, this.name = e, this._key = h, t.attributes) { const _ = t; if (this._engine = i, this._attributesNames = _.attributes, this._uniformsNames = _.uniformsNames.concat(_.samplers), this._samplerList = _.samplers.slice(), this.defines = _.defines, this.onError = _.onError, this.onCompiled = _.onCompiled, this._fallbacks = _.fallbacks, this._indexParameters = _.indexParameters, this._transformFeedbackVaryings = _.transformFeedbackVaryings || null, this._multiTarget = !!_.multiTarget, this._shaderLanguage = (f = _.shaderLanguage) !== null && f !== void 0 ? f : Xa.GLSL, _.uniformBuffersNames) { this._uniformBuffersNamesList = _.uniformBuffersNames.slice(); for (let v = 0; v < _.uniformBuffersNames.length; v++) this._uniformBuffersNames[_.uniformBuffersNames[v]] = v; } this._processFinalCode = (p = _.processFinalCode) !== null && p !== void 0 ? p : null, this._processCodeAfterIncludes = (m = _.processCodeAfterIncludes) !== null && m !== void 0 ? m : void 0; } else this._engine = s, this.defines = n ?? "", this._uniformsNames = i.concat(r), this._samplerList = r ? r.slice() : [], this._attributesNames = t, this._uniformBuffersNamesList = [], this._shaderLanguage = d, this.onError = o, this.onCompiled = l, this._indexParameters = u, this._fallbacks = a; this._attributeLocationByName = {}, this.uniqueId = Cr._UniqueIdSeed++, this._processShaderCode(); } /** @internal */ _processShaderCode(e = null, t = !1) { let i, r; const s = this.name, n = cu() ? this._engine.getHostDocument() : null; s.vertexSource ? i = "source:" + s.vertexSource : s.vertexElement ? (i = n ? n.getElementById(s.vertexElement) : null, i || (i = s.vertexElement)) : i = s.vertex || s, s.fragmentSource ? r = "source:" + s.fragmentSource : s.fragmentElement ? (r = n ? n.getElementById(s.fragmentElement) : null, r || (r = s.fragmentElement)) : r = s.fragment || s, this._processingContext = this._engine._getShaderProcessingContext(this._shaderLanguage); let a = { defines: this.defines.split(` `), indexParameters: this._indexParameters, isFragment: !1, shouldUseHighPrecisionShader: this._engine._shouldUseHighPrecisionShader, processor: e ?? this._engine._getShaderProcessor(this._shaderLanguage), supportsUniformBuffers: this._engine.supportsUniformBuffers, shadersRepository: je.GetShadersRepository(this._shaderLanguage), includesShadersStore: je.GetIncludesShadersStore(this._shaderLanguage), version: (this._engine.version * 100).toString(), platformName: this._engine.shaderPlatformName, processingContext: this._processingContext, isNDCHalfZRange: this._engine.isNDCHalfZRange, useReverseDepthBuffer: this._engine.useReverseDepthBuffer, processCodeAfterIncludes: this._processCodeAfterIncludes }; const l = [void 0, void 0], o = () => { if (l[0] && l[1]) { a.isFragment = !0; const [u, h] = l; pg.Process(h, a, (d, f) => { this._fragmentSourceCodeBeforeMigration = f, this._processFinalCode && (d = this._processFinalCode("fragment", d)); const p = pg.Finalize(u, d, a); a = null, this._useFinalCode(p.vertexCode, p.fragmentCode, s, t); }, this._engine); } }; this._loadShader(i, "Vertex", "", (u) => { pg.Initialize(a), pg.Process(u, a, (h, d) => { this._rawVertexSourceCode = u, this._vertexSourceCodeBeforeMigration = d, this._processFinalCode && (h = this._processFinalCode("vertex", h)), l[0] = h, o(); }, this._engine); }), this._loadShader(r, "Fragment", "Pixel", (u) => { this._rawFragmentSourceCode = u, l[1] = u, o(); }); } _useFinalCode(e, t, i, r = !1) { if (i) { const s = i.vertexElement || i.vertex || i.spectorName || i, n = i.fragmentElement || i.fragment || i.spectorName || i; this._vertexSourceCode = (this._shaderLanguage === Xa.WGSL ? "//" : "") + "#define SHADER_NAME vertex:" + s + ` ` + e, this._fragmentSourceCode = (this._shaderLanguage === Xa.WGSL ? "//" : "") + "#define SHADER_NAME fragment:" + n + ` ` + t; } else this._vertexSourceCode = e, this._fragmentSourceCode = t; this._prepareEffect(r); } /** * Unique key for this effect */ get key() { return this._key; } /** * If the effect has been compiled and prepared. * @returns if the effect is compiled and prepared. */ isReady() { try { return this._isReadyInternal(); } catch { return !1; } } _isReadyInternal() { return this._isReady ? !0 : this._pipelineContext ? this._pipelineContext.isReady : !1; } /** * The engine the effect was initialized with. * @returns the engine. */ getEngine() { return this._engine; } /** * The pipeline context for this effect * @returns the associated pipeline context */ getPipelineContext() { return this._pipelineContext; } /** * The set of names of attribute variables for the shader. * @returns An array of attribute names. */ getAttributesNames() { return this._attributesNames; } /** * Returns the attribute at the given index. * @param index The index of the attribute. * @returns The location of the attribute. */ getAttributeLocation(e) { return this._attributes[e]; } /** * Returns the attribute based on the name of the variable. * @param name of the attribute to look up. * @returns the attribute location. */ getAttributeLocationByName(e) { return this._attributeLocationByName[e]; } /** * The number of attributes. * @returns the number of attributes. */ getAttributesCount() { return this._attributes.length; } /** * Gets the index of a uniform variable. * @param uniformName of the uniform to look up. * @returns the index. */ getUniformIndex(e) { return this._uniformsNames.indexOf(e); } /** * Returns the attribute based on the name of the variable. * @param uniformName of the uniform to look up. * @returns the location of the uniform. */ getUniform(e) { return this._uniforms[e]; } /** * Returns an array of sampler variable names * @returns The array of sampler variable names. */ getSamplers() { return this._samplerList; } /** * Returns an array of uniform variable names * @returns The array of uniform variable names. */ getUniformNames() { return this._uniformsNames; } /** * Returns an array of uniform buffer variable names * @returns The array of uniform buffer variable names. */ getUniformBuffersNames() { return this._uniformBuffersNamesList; } /** * Returns the index parameters used to create the effect * @returns The index parameters object */ getIndexParameters() { return this._indexParameters; } /** * The error from the last compilation. * @returns the error string. */ getCompilationError() { return this._compilationError; } /** * Gets a boolean indicating that all fallbacks were used during compilation * @returns true if all fallbacks were used */ allFallbacksProcessed() { return this._allFallbacksProcessed; } /** * Adds a callback to the onCompiled observable and call the callback immediately if already ready. * @param func The callback to be used. */ executeWhenCompiled(e) { if (this.isReady()) { e(this); return; } this.onCompileObservable.add((t) => { e(t); }), (!this._pipelineContext || this._pipelineContext.isAsync) && setTimeout(() => { this._checkIsReady(null); }, 16); } _checkIsReady(e) { try { if (this._isReadyInternal()) return; } catch (t) { this._processCompilationErrors(t, e); return; } this._isDisposed || setTimeout(() => { this._checkIsReady(e); }, 16); } _loadShader(e, t, i, r) { if (typeof HTMLElement < "u" && e instanceof HTMLElement) { const a = IL(e); r(a); return; } if (e.substr(0, 7) === "source:") { r(e.substr(7)); return; } if (e.substr(0, 7) === "base64:") { const a = window.atob(e.substr(7)); r(a); return; } const s = je.GetShadersStore(this._shaderLanguage); if (s[e + t + "Shader"]) { r(s[e + t + "Shader"]); return; } if (i && s[e + i + "Shader"]) { r(s[e + i + "Shader"]); return; } let n; e[0] === "." || e[0] === "/" || e.indexOf("http") > -1 ? n = e : n = je.GetShadersRepository(this._shaderLanguage) + e, this._engine._loadFile(n + "." + t.toLowerCase() + ".fx", r); } /** * Gets the vertex shader source code of this effect * This is the final source code that will be compiled, after all the processing has been done (pre-processing applied, code injection/replacement, etc) */ get vertexSourceCode() { var e, t; return this._vertexSourceCodeOverride && this._fragmentSourceCodeOverride ? this._vertexSourceCodeOverride : (t = (e = this._pipelineContext) === null || e === void 0 ? void 0 : e._getVertexShaderCode()) !== null && t !== void 0 ? t : this._vertexSourceCode; } /** * Gets the fragment shader source code of this effect * This is the final source code that will be compiled, after all the processing has been done (pre-processing applied, code injection/replacement, etc) */ get fragmentSourceCode() { var e, t; return this._vertexSourceCodeOverride && this._fragmentSourceCodeOverride ? this._fragmentSourceCodeOverride : (t = (e = this._pipelineContext) === null || e === void 0 ? void 0 : e._getFragmentShaderCode()) !== null && t !== void 0 ? t : this._fragmentSourceCode; } /** * Gets the vertex shader source code before migration. * This is the source code after the include directives have been replaced by their contents but before the code is migrated, i.e. before ShaderProcess._ProcessShaderConversion is executed. * This method is, among other things, responsible for parsing #if/#define directives as well as converting GLES2 syntax to GLES3 (in the case of WebGL). */ get vertexSourceCodeBeforeMigration() { return this._vertexSourceCodeBeforeMigration; } /** * Gets the fragment shader source code before migration. * This is the source code after the include directives have been replaced by their contents but before the code is migrated, i.e. before ShaderProcess._ProcessShaderConversion is executed. * This method is, among other things, responsible for parsing #if/#define directives as well as converting GLES2 syntax to GLES3 (in the case of WebGL). */ get fragmentSourceCodeBeforeMigration() { return this._fragmentSourceCodeBeforeMigration; } /** * Gets the vertex shader source code before it has been modified by any processing */ get rawVertexSourceCode() { return this._rawVertexSourceCode; } /** * Gets the fragment shader source code before it has been modified by any processing */ get rawFragmentSourceCode() { return this._rawFragmentSourceCode; } /** * Recompiles the webGL program * @param vertexSourceCode The source code for the vertex shader. * @param fragmentSourceCode The source code for the fragment shader. * @param onCompiled Callback called when completed. * @param onError Callback called on error. * @internal */ _rebuildProgram(e, t, i, r) { this._isReady = !1, this._vertexSourceCodeOverride = e, this._fragmentSourceCodeOverride = t, this.onError = (s, n) => { r && r(n); }, this.onCompiled = () => { const s = this.getEngine().scenes; if (s) for (let n = 0; n < s.length; n++) s[n].markAllMaterialsAsDirty(63); this._pipelineContext._handlesSpectorRebuildCallback(i); }, this._fallbacks = null, this._prepareEffect(); } /** * Prepares the effect * @internal */ _prepareEffect(e = !1) { var t; const i = this._attributesNames, r = this.defines, s = this._pipelineContext; this._isReady = !1; try { const n = this._engine; this._pipelineContext = (t = e ? s : void 0) !== null && t !== void 0 ? t : n.createPipelineContext(this._processingContext), this._pipelineContext._name = this._key.replace(/\r/g, "").replace(/\n/g, "|"); const a = (l, o, u, h) => this._rebuildProgram(l, o, u, h); this._vertexSourceCodeOverride && this._fragmentSourceCodeOverride ? n._preparePipelineContext(this._pipelineContext, this._vertexSourceCodeOverride, this._fragmentSourceCodeOverride, !0, this._rawVertexSourceCode, this._rawFragmentSourceCode, a, null, this._transformFeedbackVaryings, this._key) : n._preparePipelineContext(this._pipelineContext, this._vertexSourceCode, this._fragmentSourceCode, !1, this._rawVertexSourceCode, this._rawFragmentSourceCode, a, r, this._transformFeedbackVaryings, this._key), n._executeWhenRenderingStateIsCompiled(this._pipelineContext, () => { if (this._attributes = [], this._pipelineContext._fillEffectInformation(this, this._uniformBuffersNames, this._uniformsNames, this._uniforms, this._samplerList, this._samplers, i, this._attributes), i) for (let l = 0; l < i.length; l++) { const o = i[l]; this._attributeLocationByName[o] = this._attributes[l]; } n.bindSamplers(this), this._compilationError = "", this._isReady = !0, this.onCompiled && this.onCompiled(this), this.onCompileObservable.notifyObservers(this), this.onCompileObservable.clear(), this._fallbacks && this._fallbacks.unBindMesh(), s && !e && this.getEngine()._deletePipelineContext(s); }), this._pipelineContext.isAsync && this._checkIsReady(s); } catch (n) { this._processCompilationErrors(n, s); } } _getShaderCodeAndErrorLine(e, t, i) { const r = i ? /FRAGMENT SHADER ERROR: 0:(\d+?):/ : /VERTEX SHADER ERROR: 0:(\d+?):/; let s = null; if (t && e) { const n = t.match(r); if (n && n.length === 2) { const a = parseInt(n[1]), l = e.split(` `, -1); l.length >= a && (s = `Offending line [${a}] in ${i ? "fragment" : "vertex"} code: ${l[a - 1]}`); } } return [e, s]; } _processCompilationErrors(e, t = null) { var i, r, s; this._compilationError = e.message; const n = this._attributesNames, a = this._fallbacks; if (Ce.Error("Unable to compile effect:"), Ce.Error("Uniforms: " + this._uniformsNames.map(function(o) { return " " + o; })), Ce.Error("Attributes: " + n.map(function(o) { return " " + o; })), Ce.Error(`Defines: ` + this.defines), Cr.LogShaderCodeOnCompilationError) { let o = null, u = null, h = null; !((i = this._pipelineContext) === null || i === void 0) && i._getVertexShaderCode() && ([h, o] = this._getShaderCodeAndErrorLine(this._pipelineContext._getVertexShaderCode(), this._compilationError, !1), h && (Ce.Error("Vertex code:"), Ce.Error(h))), !((r = this._pipelineContext) === null || r === void 0) && r._getFragmentShaderCode() && ([h, u] = this._getShaderCodeAndErrorLine((s = this._pipelineContext) === null || s === void 0 ? void 0 : s._getFragmentShaderCode(), this._compilationError, !0), h && (Ce.Error("Fragment code:"), Ce.Error(h))), o && Ce.Error(o), u && Ce.Error(u); } Ce.Error("Error: " + this._compilationError); const l = () => { this.onError && this.onError(this, this._compilationError), this.onErrorObservable.notifyObservers(this); }; t && (this._pipelineContext = t, this._isReady = !0, l()), a ? (this._pipelineContext = null, a.hasMoreFallbacks ? (this._allFallbacksProcessed = !1, Ce.Error("Trying next fallback."), this.defines = a.reduce(this.defines, this), this._prepareEffect()) : (this._allFallbacksProcessed = !0, l(), this.onErrorObservable.clear(), this._fallbacks && this._fallbacks.unBindMesh())) : (this._allFallbacksProcessed = !0, t || l()); } /** * Checks if the effect is supported. (Must be called after compilation) */ get isSupported() { return this._compilationError === ""; } /** * Binds a texture to the engine to be used as output of the shader. * @param channel Name of the output variable. * @param texture Texture to bind. * @internal */ _bindTexture(e, t) { this._engine._bindTexture(this._samplers[e], t, e); } /** * Sets a texture on the engine to be used in the shader. * @param channel Name of the sampler variable. * @param texture Texture to set. */ setTexture(e, t) { this._engine.setTexture(this._samplers[e], this._uniforms[e], t, e); } /** * Sets a depth stencil texture from a render target on the engine to be used in the shader. * @param channel Name of the sampler variable. * @param texture Texture to set. */ setDepthStencilTexture(e, t) { this._engine.setDepthStencilTexture(this._samplers[e], this._uniforms[e], t, e); } /** * Sets an array of textures on the engine to be used in the shader. * @param channel Name of the variable. * @param textures Textures to set. */ setTextureArray(e, t) { const i = e + "Ex"; if (this._samplerList.indexOf(i + "0") === -1) { const r = this._samplerList.indexOf(e); for (let n = 1; n < t.length; n++) { const a = i + (n - 1).toString(); this._samplerList.splice(r + n, 0, a); } let s = 0; for (const n of this._samplerList) this._samplers[n] = s, s += 1; } this._engine.setTextureArray(this._samplers[e], this._uniforms[e], t, e); } /** * Sets a texture to be the input of the specified post process. (To use the output, pass in the next post process in the pipeline) * @param channel Name of the sampler variable. * @param postProcess Post process to get the input texture from. */ setTextureFromPostProcess(e, t) { this._engine.setTextureFromPostProcess(this._samplers[e], t, e); } /** * (Warning! setTextureFromPostProcessOutput may be desired instead) * Sets the input texture of the passed in post process to be input of this effect. (To use the output of the passed in post process use setTextureFromPostProcessOutput) * @param channel Name of the sampler variable. * @param postProcess Post process to get the output texture from. */ setTextureFromPostProcessOutput(e, t) { this._engine.setTextureFromPostProcessOutput(this._samplers[e], t, e); } /** * Binds a buffer to a uniform. * @param buffer Buffer to bind. * @param name Name of the uniform variable to bind to. */ bindUniformBuffer(e, t) { const i = this._uniformBuffersNames[t]; i === void 0 || Cr._BaseCache[i] === e && this._engine._features.useUBOBindingCache || (Cr._BaseCache[i] = e, this._engine.bindUniformBufferBase(e, i, t)); } /** * Binds block to a uniform. * @param blockName Name of the block to bind. * @param index Index to bind. */ bindUniformBlock(e, t) { this._engine.bindUniformBlock(this._pipelineContext, e, t); } /** * Sets an integer value on a uniform variable. * @param uniformName Name of the variable. * @param value Value to be set. * @returns this effect. */ setInt(e, t) { return this._pipelineContext.setInt(e, t), this; } /** * Sets an int2 value on a uniform variable. * @param uniformName Name of the variable. * @param x First int in int2. * @param y Second int in int2. * @returns this effect. */ setInt2(e, t, i) { return this._pipelineContext.setInt2(e, t, i), this; } /** * Sets an int3 value on a uniform variable. * @param uniformName Name of the variable. * @param x First int in int3. * @param y Second int in int3. * @param z Third int in int3. * @returns this effect. */ setInt3(e, t, i, r) { return this._pipelineContext.setInt3(e, t, i, r), this; } /** * Sets an int4 value on a uniform variable. * @param uniformName Name of the variable. * @param x First int in int4. * @param y Second int in int4. * @param z Third int in int4. * @param w Fourth int in int4. * @returns this effect. */ setInt4(e, t, i, r, s) { return this._pipelineContext.setInt4(e, t, i, r, s), this; } /** * Sets an int array on a uniform variable. * @param uniformName Name of the variable. * @param array array to be set. * @returns this effect. */ setIntArray(e, t) { return this._pipelineContext.setIntArray(e, t), this; } /** * Sets an int array 2 on a uniform variable. (Array is specified as single array eg. [1,2,3,4] will result in [[1,2],[3,4]] in the shader) * @param uniformName Name of the variable. * @param array array to be set. * @returns this effect. */ setIntArray2(e, t) { return this._pipelineContext.setIntArray2(e, t), this; } /** * Sets an int array 3 on a uniform variable. (Array is specified as single array eg. [1,2,3,4,5,6] will result in [[1,2,3],[4,5,6]] in the shader) * @param uniformName Name of the variable. * @param array array to be set. * @returns this effect. */ setIntArray3(e, t) { return this._pipelineContext.setIntArray3(e, t), this; } /** * Sets an int array 4 on a uniform variable. (Array is specified as single array eg. [1,2,3,4,5,6,7,8] will result in [[1,2,3,4],[5,6,7,8]] in the shader) * @param uniformName Name of the variable. * @param array array to be set. * @returns this effect. */ setIntArray4(e, t) { return this._pipelineContext.setIntArray4(e, t), this; } /** * Sets an unsigned integer value on a uniform variable. * @param uniformName Name of the variable. * @param value Value to be set. * @returns this effect. */ setUInt(e, t) { return this._pipelineContext.setUInt(e, t), this; } /** * Sets an unsigned int2 value on a uniform variable. * @param uniformName Name of the variable. * @param x First unsigned int in uint2. * @param y Second unsigned int in uint2. * @returns this effect. */ setUInt2(e, t, i) { return this._pipelineContext.setUInt2(e, t, i), this; } /** * Sets an unsigned int3 value on a uniform variable. * @param uniformName Name of the variable. * @param x First unsigned int in uint3. * @param y Second unsigned int in uint3. * @param z Third unsigned int in uint3. * @returns this effect. */ setUInt3(e, t, i, r) { return this._pipelineContext.setUInt3(e, t, i, r), this; } /** * Sets an unsigned int4 value on a uniform variable. * @param uniformName Name of the variable. * @param x First unsigned int in uint4. * @param y Second unsigned int in uint4. * @param z Third unsigned int in uint4. * @param w Fourth unsigned int in uint4. * @returns this effect. */ setUInt4(e, t, i, r, s) { return this._pipelineContext.setUInt4(e, t, i, r, s), this; } /** * Sets an unsigned int array on a uniform variable. * @param uniformName Name of the variable. * @param array array to be set. * @returns this effect. */ setUIntArray(e, t) { return this._pipelineContext.setUIntArray(e, t), this; } /** * Sets an unsigned int array 2 on a uniform variable. (Array is specified as single array eg. [1,2,3,4] will result in [[1,2],[3,4]] in the shader) * @param uniformName Name of the variable. * @param array array to be set. * @returns this effect. */ setUIntArray2(e, t) { return this._pipelineContext.setUIntArray2(e, t), this; } /** * Sets an unsigned int array 3 on a uniform variable. (Array is specified as single array eg. [1,2,3,4,5,6] will result in [[1,2,3],[4,5,6]] in the shader) * @param uniformName Name of the variable. * @param array array to be set. * @returns this effect. */ setUIntArray3(e, t) { return this._pipelineContext.setUIntArray3(e, t), this; } /** * Sets an unsigned int array 4 on a uniform variable. (Array is specified as single array eg. [1,2,3,4,5,6,7,8] will result in [[1,2,3,4],[5,6,7,8]] in the shader) * @param uniformName Name of the variable. * @param array array to be set. * @returns this effect. */ setUIntArray4(e, t) { return this._pipelineContext.setUIntArray4(e, t), this; } /** * Sets an float array on a uniform variable. * @param uniformName Name of the variable. * @param array array to be set. * @returns this effect. */ setFloatArray(e, t) { return this._pipelineContext.setArray(e, t), this; } /** * Sets an float array 2 on a uniform variable. (Array is specified as single array eg. [1,2,3,4] will result in [[1,2],[3,4]] in the shader) * @param uniformName Name of the variable. * @param array array to be set. * @returns this effect. */ setFloatArray2(e, t) { return this._pipelineContext.setArray2(e, t), this; } /** * Sets an float array 3 on a uniform variable. (Array is specified as single array eg. [1,2,3,4,5,6] will result in [[1,2,3],[4,5,6]] in the shader) * @param uniformName Name of the variable. * @param array array to be set. * @returns this effect. */ setFloatArray3(e, t) { return this._pipelineContext.setArray3(e, t), this; } /** * Sets an float array 4 on a uniform variable. (Array is specified as single array eg. [1,2,3,4,5,6,7,8] will result in [[1,2,3,4],[5,6,7,8]] in the shader) * @param uniformName Name of the variable. * @param array array to be set. * @returns this effect. */ setFloatArray4(e, t) { return this._pipelineContext.setArray4(e, t), this; } /** * Sets an array on a uniform variable. * @param uniformName Name of the variable. * @param array array to be set. * @returns this effect. */ setArray(e, t) { return this._pipelineContext.setArray(e, t), this; } /** * Sets an array 2 on a uniform variable. (Array is specified as single array eg. [1,2,3,4] will result in [[1,2],[3,4]] in the shader) * @param uniformName Name of the variable. * @param array array to be set. * @returns this effect. */ setArray2(e, t) { return this._pipelineContext.setArray2(e, t), this; } /** * Sets an array 3 on a uniform variable. (Array is specified as single array eg. [1,2,3,4,5,6] will result in [[1,2,3],[4,5,6]] in the shader) * @param uniformName Name of the variable. * @param array array to be set. * @returns this effect. */ setArray3(e, t) { return this._pipelineContext.setArray3(e, t), this; } /** * Sets an array 4 on a uniform variable. (Array is specified as single array eg. [1,2,3,4,5,6,7,8] will result in [[1,2,3,4],[5,6,7,8]] in the shader) * @param uniformName Name of the variable. * @param array array to be set. * @returns this effect. */ setArray4(e, t) { return this._pipelineContext.setArray4(e, t), this; } /** * Sets matrices on a uniform variable. * @param uniformName Name of the variable. * @param matrices matrices to be set. * @returns this effect. */ setMatrices(e, t) { return this._pipelineContext.setMatrices(e, t), this; } /** * Sets matrix on a uniform variable. * @param uniformName Name of the variable. * @param matrix matrix to be set. * @returns this effect. */ setMatrix(e, t) { return this._pipelineContext.setMatrix(e, t), this; } /** * Sets a 3x3 matrix on a uniform variable. (Specified as [1,2,3,4,5,6,7,8,9] will result in [1,2,3][4,5,6][7,8,9] matrix) * @param uniformName Name of the variable. * @param matrix matrix to be set. * @returns this effect. */ setMatrix3x3(e, t) { return this._pipelineContext.setMatrix3x3(e, t), this; } /** * Sets a 2x2 matrix on a uniform variable. (Specified as [1,2,3,4] will result in [1,2][3,4] matrix) * @param uniformName Name of the variable. * @param matrix matrix to be set. * @returns this effect. */ setMatrix2x2(e, t) { return this._pipelineContext.setMatrix2x2(e, t), this; } /** * Sets a float on a uniform variable. * @param uniformName Name of the variable. * @param value value to be set. * @returns this effect. */ setFloat(e, t) { return this._pipelineContext.setFloat(e, t), this; } /** * Sets a boolean on a uniform variable. * @param uniformName Name of the variable. * @param bool value to be set. * @returns this effect. */ setBool(e, t) { return this._pipelineContext.setInt(e, t ? 1 : 0), this; } /** * Sets a Vector2 on a uniform variable. * @param uniformName Name of the variable. * @param vector2 vector2 to be set. * @returns this effect. */ setVector2(e, t) { return this._pipelineContext.setVector2(e, t), this; } /** * Sets a float2 on a uniform variable. * @param uniformName Name of the variable. * @param x First float in float2. * @param y Second float in float2. * @returns this effect. */ setFloat2(e, t, i) { return this._pipelineContext.setFloat2(e, t, i), this; } /** * Sets a Vector3 on a uniform variable. * @param uniformName Name of the variable. * @param vector3 Value to be set. * @returns this effect. */ setVector3(e, t) { return this._pipelineContext.setVector3(e, t), this; } /** * Sets a float3 on a uniform variable. * @param uniformName Name of the variable. * @param x First float in float3. * @param y Second float in float3. * @param z Third float in float3. * @returns this effect. */ setFloat3(e, t, i, r) { return this._pipelineContext.setFloat3(e, t, i, r), this; } /** * Sets a Vector4 on a uniform variable. * @param uniformName Name of the variable. * @param vector4 Value to be set. * @returns this effect. */ setVector4(e, t) { return this._pipelineContext.setVector4(e, t), this; } /** * Sets a Quaternion on a uniform variable. * @param uniformName Name of the variable. * @param quaternion Value to be set. * @returns this effect. */ setQuaternion(e, t) { return this._pipelineContext.setQuaternion(e, t), this; } /** * Sets a float4 on a uniform variable. * @param uniformName Name of the variable. * @param x First float in float4. * @param y Second float in float4. * @param z Third float in float4. * @param w Fourth float in float4. * @returns this effect. */ setFloat4(e, t, i, r, s) { return this._pipelineContext.setFloat4(e, t, i, r, s), this; } /** * Sets a Color3 on a uniform variable. * @param uniformName Name of the variable. * @param color3 Value to be set. * @returns this effect. */ setColor3(e, t) { return this._pipelineContext.setColor3(e, t), this; } /** * Sets a Color4 on a uniform variable. * @param uniformName Name of the variable. * @param color3 Value to be set. * @param alpha Alpha value to be set. * @returns this effect. */ setColor4(e, t, i) { return this._pipelineContext.setColor4(e, t, i), this; } /** * Sets a Color4 on a uniform variable * @param uniformName defines the name of the variable * @param color4 defines the value to be set * @returns this effect. */ setDirectColor4(e, t) { return this._pipelineContext.setDirectColor4(e, t), this; } /** * Release all associated resources. **/ dispose() { this._pipelineContext && this._pipelineContext.dispose(), this._engine._releaseEffect(this), this._isDisposed = !0; } /** * This function will add a new shader to the shader store * @param name the name of the shader * @param pixelShader optional pixel shader content * @param vertexShader optional vertex shader content * @param shaderLanguage the language the shader is written in (default: GLSL) */ static RegisterShader(e, t, i, r = Xa.GLSL) { t && (je.GetShadersStore(r)[`${e}PixelShader`] = t), i && (je.GetShadersStore(r)[`${e}VertexShader`] = i); } /** * Resets the cache of effects. */ static ResetCache() { Cr._BaseCache = {}; } } Cr.LogShaderCodeOnCompilationError = !0; Cr._UniqueIdSeed = 0; Cr._BaseCache = {}; Cr.ShadersStore = je.ShadersStore; Cr.IncludesShadersStore = je.IncludesShadersStore; class sK { /** * Initializes the state. * @param reset */ constructor(e = !0) { this._isDepthTestDirty = !1, this._isDepthMaskDirty = !1, this._isDepthFuncDirty = !1, this._isCullFaceDirty = !1, this._isCullDirty = !1, this._isZOffsetDirty = !1, this._isFrontFaceDirty = !1, e && this.reset(); } get isDirty() { return this._isDepthFuncDirty || this._isDepthTestDirty || this._isDepthMaskDirty || this._isCullFaceDirty || this._isCullDirty || this._isZOffsetDirty || this._isFrontFaceDirty; } get zOffset() { return this._zOffset; } set zOffset(e) { this._zOffset !== e && (this._zOffset = e, this._isZOffsetDirty = !0); } get zOffsetUnits() { return this._zOffsetUnits; } set zOffsetUnits(e) { this._zOffsetUnits !== e && (this._zOffsetUnits = e, this._isZOffsetDirty = !0); } get cullFace() { return this._cullFace; } set cullFace(e) { this._cullFace !== e && (this._cullFace = e, this._isCullFaceDirty = !0); } get cull() { return this._cull; } set cull(e) { this._cull !== e && (this._cull = e, this._isCullDirty = !0); } get depthFunc() { return this._depthFunc; } set depthFunc(e) { this._depthFunc !== e && (this._depthFunc = e, this._isDepthFuncDirty = !0); } get depthMask() { return this._depthMask; } set depthMask(e) { this._depthMask !== e && (this._depthMask = e, this._isDepthMaskDirty = !0); } get depthTest() { return this._depthTest; } set depthTest(e) { this._depthTest !== e && (this._depthTest = e, this._isDepthTestDirty = !0); } get frontFace() { return this._frontFace; } set frontFace(e) { this._frontFace !== e && (this._frontFace = e, this._isFrontFaceDirty = !0); } reset() { this._depthMask = !0, this._depthTest = !0, this._depthFunc = null, this._cullFace = null, this._cull = null, this._zOffset = 0, this._zOffsetUnits = 0, this._frontFace = null, this._isDepthTestDirty = !0, this._isDepthMaskDirty = !0, this._isDepthFuncDirty = !1, this._isCullFaceDirty = !1, this._isCullDirty = !1, this._isZOffsetDirty = !0, this._isFrontFaceDirty = !1; } apply(e) { this.isDirty && (this._isCullDirty && (this.cull ? e.enable(e.CULL_FACE) : e.disable(e.CULL_FACE), this._isCullDirty = !1), this._isCullFaceDirty && (e.cullFace(this.cullFace), this._isCullFaceDirty = !1), this._isDepthMaskDirty && (e.depthMask(this.depthMask), this._isDepthMaskDirty = !1), this._isDepthTestDirty && (this.depthTest ? e.enable(e.DEPTH_TEST) : e.disable(e.DEPTH_TEST), this._isDepthTestDirty = !1), this._isDepthFuncDirty && (e.depthFunc(this.depthFunc), this._isDepthFuncDirty = !1), this._isZOffsetDirty && (this.zOffset || this.zOffsetUnits ? (e.enable(e.POLYGON_OFFSET_FILL), e.polygonOffset(this.zOffset, this.zOffsetUnits)) : e.disable(e.POLYGON_OFFSET_FILL), this._isZOffsetDirty = !1), this._isFrontFaceDirty && (e.frontFace(this.frontFace), this._isFrontFaceDirty = !1)); } } class WC { constructor() { this.reset(); } reset() { this.enabled = !1, this.mask = 255, this.func = WC.ALWAYS, this.funcRef = 1, this.funcMask = 255, this.opStencilFail = WC.KEEP, this.opDepthFail = WC.KEEP, this.opStencilDepthPass = WC.REPLACE; } get stencilFunc() { return this.func; } set stencilFunc(e) { this.func = e; } get stencilFuncRef() { return this.funcRef; } set stencilFuncRef(e) { this.funcRef = e; } get stencilFuncMask() { return this.funcMask; } set stencilFuncMask(e) { this.funcMask = e; } get stencilOpStencilFail() { return this.opStencilFail; } set stencilOpStencilFail(e) { this.opStencilFail = e; } get stencilOpDepthFail() { return this.opDepthFail; } set stencilOpDepthFail(e) { this.opDepthFail = e; } get stencilOpStencilDepthPass() { return this.opStencilDepthPass; } set stencilOpStencilDepthPass(e) { this.opStencilDepthPass = e; } get stencilMask() { return this.mask; } set stencilMask(e) { this.mask = e; } get stencilTest() { return this.enabled; } set stencilTest(e) { this.enabled = e; } } WC.ALWAYS = 519; WC.KEEP = 7680; WC.REPLACE = 7681; class ote { /** * Initializes the state. */ constructor() { this._blendFunctionParameters = new Array(4), this._blendEquationParameters = new Array(2), this._blendConstants = new Array(4), this._isBlendConstantsDirty = !1, this._alphaBlend = !1, this._isAlphaBlendDirty = !1, this._isBlendFunctionParametersDirty = !1, this._isBlendEquationParametersDirty = !1, this.reset(); } get isDirty() { return this._isAlphaBlendDirty || this._isBlendFunctionParametersDirty || this._isBlendEquationParametersDirty; } get alphaBlend() { return this._alphaBlend; } set alphaBlend(e) { this._alphaBlend !== e && (this._alphaBlend = e, this._isAlphaBlendDirty = !0); } setAlphaBlendConstants(e, t, i, r) { this._blendConstants[0] === e && this._blendConstants[1] === t && this._blendConstants[2] === i && this._blendConstants[3] === r || (this._blendConstants[0] = e, this._blendConstants[1] = t, this._blendConstants[2] = i, this._blendConstants[3] = r, this._isBlendConstantsDirty = !0); } setAlphaBlendFunctionParameters(e, t, i, r) { this._blendFunctionParameters[0] === e && this._blendFunctionParameters[1] === t && this._blendFunctionParameters[2] === i && this._blendFunctionParameters[3] === r || (this._blendFunctionParameters[0] = e, this._blendFunctionParameters[1] = t, this._blendFunctionParameters[2] = i, this._blendFunctionParameters[3] = r, this._isBlendFunctionParametersDirty = !0); } setAlphaEquationParameters(e, t) { this._blendEquationParameters[0] === e && this._blendEquationParameters[1] === t || (this._blendEquationParameters[0] = e, this._blendEquationParameters[1] = t, this._isBlendEquationParametersDirty = !0); } reset() { this._alphaBlend = !1, this._blendFunctionParameters[0] = null, this._blendFunctionParameters[1] = null, this._blendFunctionParameters[2] = null, this._blendFunctionParameters[3] = null, this._blendEquationParameters[0] = null, this._blendEquationParameters[1] = null, this._blendConstants[0] = null, this._blendConstants[1] = null, this._blendConstants[2] = null, this._blendConstants[3] = null, this._isAlphaBlendDirty = !0, this._isBlendFunctionParametersDirty = !1, this._isBlendEquationParametersDirty = !1, this._isBlendConstantsDirty = !1; } apply(e) { this.isDirty && (this._isAlphaBlendDirty && (this._alphaBlend ? e.enable(e.BLEND) : e.disable(e.BLEND), this._isAlphaBlendDirty = !1), this._isBlendFunctionParametersDirty && (e.blendFuncSeparate(this._blendFunctionParameters[0], this._blendFunctionParameters[1], this._blendFunctionParameters[2], this._blendFunctionParameters[3]), this._isBlendFunctionParametersDirty = !1), this._isBlendEquationParametersDirty && (e.blendEquationSeparate(this._blendEquationParameters[0], this._blendEquationParameters[1]), this._isBlendEquationParametersDirty = !1), this._isBlendConstantsDirty && (e.blendColor(this._blendConstants[0], this._blendConstants[1], this._blendConstants[2], this._blendConstants[3]), this._isBlendConstantsDirty = !1)); } } class nK { /** * | Value | Type | Description | * | ----- | ------------------ | ----------- | * | 0 | CLAMP_ADDRESSMODE | | * | 1 | WRAP_ADDRESSMODE | | * | 2 | MIRROR_ADDRESSMODE | | */ get wrapU() { return this._cachedWrapU; } set wrapU(e) { this._cachedWrapU = e; } /** * | Value | Type | Description | * | ----- | ------------------ | ----------- | * | 0 | CLAMP_ADDRESSMODE | | * | 1 | WRAP_ADDRESSMODE | | * | 2 | MIRROR_ADDRESSMODE | | */ get wrapV() { return this._cachedWrapV; } set wrapV(e) { this._cachedWrapV = e; } /** * | Value | Type | Description | * | ----- | ------------------ | ----------- | * | 0 | CLAMP_ADDRESSMODE | | * | 1 | WRAP_ADDRESSMODE | | * | 2 | MIRROR_ADDRESSMODE | | */ get wrapR() { return this._cachedWrapR; } set wrapR(e) { this._cachedWrapR = e; } /** * With compliant hardware and browser (supporting anisotropic filtering) * this defines the level of anisotropic filtering in the texture. * The higher the better but the slower. */ get anisotropicFilteringLevel() { return this._cachedAnisotropicFilteringLevel; } set anisotropicFilteringLevel(e) { this._cachedAnisotropicFilteringLevel = e; } /** * Gets or sets the comparison function (513, 514, etc). Set 0 to not use a comparison function */ get comparisonFunction() { return this._comparisonFunction; } set comparisonFunction(e) { this._comparisonFunction = e; } /** * Indicates to use the mip maps (if available on the texture). * Thanks to this flag, you can instruct the sampler to not sample the mipmaps even if they exist (and if the sampling mode is set to a value that normally samples the mipmaps!) */ get useMipMaps() { return this._useMipMaps; } set useMipMaps(e) { this._useMipMaps = e; } /** * Creates a Sampler instance */ constructor() { this.samplingMode = -1, this._useMipMaps = !0, this._cachedWrapU = null, this._cachedWrapV = null, this._cachedWrapR = null, this._cachedAnisotropicFilteringLevel = null, this._comparisonFunction = 0; } /** * Sets all the parameters of the sampler * @param wrapU u address mode (default: TEXTURE_WRAP_ADDRESSMODE) * @param wrapV v address mode (default: TEXTURE_WRAP_ADDRESSMODE) * @param wrapR r address mode (default: TEXTURE_WRAP_ADDRESSMODE) * @param anisotropicFilteringLevel anisotropic level (default: 1) * @param samplingMode sampling mode (default: 2) * @param comparisonFunction comparison function (default: 0 - no comparison function) * @returns the current sampler instance */ setParameters(e = 1, t = 1, i = 1, r = 1, s = 2, n = 0) { return this._cachedWrapU = e, this._cachedWrapV = t, this._cachedWrapR = i, this._cachedAnisotropicFilteringLevel = r, this.samplingMode = s, this._comparisonFunction = n, this; } /** * Compares this sampler with another one * @param other sampler to compare with * @returns true if the samplers have the same parametres, else false */ compareSampler(e) { return this._cachedWrapU === e._cachedWrapU && this._cachedWrapV === e._cachedWrapV && this._cachedWrapR === e._cachedWrapR && this._cachedAnisotropicFilteringLevel === e._cachedAnisotropicFilteringLevel && this.samplingMode === e.samplingMode && this._comparisonFunction === e._comparisonFunction && this._useMipMaps === e._useMipMaps; } } var ts; (function(c) { c[c.Unknown = 0] = "Unknown", c[c.Url = 1] = "Url", c[c.Temp = 2] = "Temp", c[c.Raw = 3] = "Raw", c[c.Dynamic = 4] = "Dynamic", c[c.RenderTarget = 5] = "RenderTarget", c[c.MultiRenderTarget = 6] = "MultiRenderTarget", c[c.Cube = 7] = "Cube", c[c.CubeRaw = 8] = "CubeRaw", c[c.CubePrefiltered = 9] = "CubePrefiltered", c[c.Raw3D = 10] = "Raw3D", c[c.Raw2DArray = 11] = "Raw2DArray", c[c.DepthStencil = 12] = "DepthStencil", c[c.CubeRawRGBD = 13] = "CubeRawRGBD", c[c.Depth = 14] = "Depth"; })(ts || (ts = {})); class ln extends nK { /** * Gets a boolean indicating if the texture uses mipmaps * TODO implements useMipMaps as a separate setting from generateMipMaps */ get useMipMaps() { return this.generateMipMaps; } set useMipMaps(e) { this.generateMipMaps = e; } /** Gets the unique id of the internal texture */ get uniqueId() { return this._uniqueId; } /** @internal */ _setUniqueId(e) { this._uniqueId = e; } /** * Gets the Engine the texture belongs to. * @returns The babylon engine */ getEngine() { return this._engine; } /** * Gets the data source type of the texture */ get source() { return this._source; } /** * Creates a new InternalTexture * @param engine defines the engine to use * @param source defines the type of data that will be used * @param delayAllocation if the texture allocation should be delayed (default: false) */ constructor(e, t, i = !1) { super(), this.isReady = !1, this.isCube = !1, this.is3D = !1, this.is2DArray = !1, this.isMultiview = !1, this.url = "", this.generateMipMaps = !1, this.samples = 0, this.type = -1, this.format = -1, this.onLoadedObservable = new Fe(), this.onErrorObservable = new Fe(), this.onRebuildCallback = null, this.width = 0, this.height = 0, this.depth = 0, this.baseWidth = 0, this.baseHeight = 0, this.baseDepth = 0, this.invertY = !1, this._invertVScale = !1, this._associatedChannel = -1, this._source = ts.Unknown, this._buffer = null, this._bufferView = null, this._bufferViewArray = null, this._bufferViewArrayArray = null, this._size = 0, this._extension = "", this._files = null, this._workingCanvas = null, this._workingContext = null, this._cachedCoordinatesMode = null, this._isDisabled = !1, this._compression = null, this._sphericalPolynomial = null, this._sphericalPolynomialPromise = null, this._sphericalPolynomialComputed = !1, this._lodGenerationScale = 0, this._lodGenerationOffset = 0, this._useSRGBBuffer = !1, this._lodTextureHigh = null, this._lodTextureMid = null, this._lodTextureLow = null, this._isRGBD = !1, this._linearSpecularLOD = !1, this._irradianceTexture = null, this._hardwareTexture = null, this._maxLodLevel = null, this._references = 1, this._gammaSpace = null, this._premulAlpha = !1, this._dynamicTextureSource = null, this._engine = e, this._source = t, this._uniqueId = ln._Counter++, i || (this._hardwareTexture = e._createHardwareTexture()); } /** * Increments the number of references (ie. the number of Texture that point to it) */ incrementReferences() { this._references++; } /** * Change the size of the texture (not the size of the content) * @param width defines the new width * @param height defines the new height * @param depth defines the new depth (1 by default) */ updateSize(e, t, i = 1) { this._engine.updateTextureDimensions(this, e, t, i), this.width = e, this.height = t, this.depth = i, this.baseWidth = e, this.baseHeight = t, this.baseDepth = i, this._size = e * t * i; } /** @internal */ _rebuild() { var e; if (this.isReady = !1, this._cachedCoordinatesMode = null, this._cachedWrapU = null, this._cachedWrapV = null, this._cachedWrapR = null, this._cachedAnisotropicFilteringLevel = null, this.onRebuildCallback) { const i = this.onRebuildCallback(this), r = (s) => { s._swapAndDie(this, !1), this.isReady = i.isReady; }; i.isAsync ? i.proxy.then(r) : r(i.proxy); return; } let t; switch (this.source) { case ts.Temp: break; case ts.Url: t = this._engine.createTexture( (e = this._originalUrl) !== null && e !== void 0 ? e : this.url, !this.generateMipMaps, this.invertY, null, this.samplingMode, // Do not use Proxy here as it could be fully synchronous // and proxy would be undefined. (i) => { i._swapAndDie(this, !1), this.isReady = !0; }, null, this._buffer, void 0, this.format, this._extension, void 0, void 0, void 0, this._useSRGBBuffer ); return; case ts.Raw: t = this._engine.createRawTexture(this._bufferView, this.baseWidth, this.baseHeight, this.format, this.generateMipMaps, this.invertY, this.samplingMode, this._compression, this.type, void 0, this._useSRGBBuffer), t._swapAndDie(this, !1), this.isReady = !0; break; case ts.Raw3D: t = this._engine.createRawTexture3D(this._bufferView, this.baseWidth, this.baseHeight, this.baseDepth, this.format, this.generateMipMaps, this.invertY, this.samplingMode, this._compression, this.type), t._swapAndDie(this, !1), this.isReady = !0; break; case ts.Raw2DArray: t = this._engine.createRawTexture2DArray(this._bufferView, this.baseWidth, this.baseHeight, this.baseDepth, this.format, this.generateMipMaps, this.invertY, this.samplingMode, this._compression, this.type), t._swapAndDie(this, !1), this.isReady = !0; break; case ts.Dynamic: t = this._engine.createDynamicTexture(this.baseWidth, this.baseHeight, this.generateMipMaps, this.samplingMode), t._swapAndDie(this, !1), this._dynamicTextureSource && this._engine.updateDynamicTexture(this, this._dynamicTextureSource, this.invertY, this._premulAlpha, this.format, !0); break; case ts.Cube: t = this._engine.createCubeTexture(this.url, null, this._files, !this.generateMipMaps, () => { t._swapAndDie(this, !1), this.isReady = !0; }, null, this.format, this._extension, !1, 0, 0, null, void 0, this._useSRGBBuffer); return; case ts.CubeRaw: t = this._engine.createRawCubeTexture(this._bufferViewArray, this.width, this.format, this.type, this.generateMipMaps, this.invertY, this.samplingMode, this._compression), t._swapAndDie(this, !1), this.isReady = !0; break; case ts.CubeRawRGBD: return; case ts.CubePrefiltered: t = this._engine.createPrefilteredCubeTexture(this.url, null, this._lodGenerationScale, this._lodGenerationOffset, (i) => { i && i._swapAndDie(this, !1), this.isReady = !0; }, null, this.format, this._extension), t._sphericalPolynomial = this._sphericalPolynomial; return; } } /** * @internal */ _swapAndDie(e, t = !0) { var i; (i = this._hardwareTexture) === null || i === void 0 || i.setUsage(e._source, this.generateMipMaps, this.isCube, this.width, this.height), e._hardwareTexture = this._hardwareTexture, t && (e._isRGBD = this._isRGBD), this._lodTextureHigh && (e._lodTextureHigh && e._lodTextureHigh.dispose(), e._lodTextureHigh = this._lodTextureHigh), this._lodTextureMid && (e._lodTextureMid && e._lodTextureMid.dispose(), e._lodTextureMid = this._lodTextureMid), this._lodTextureLow && (e._lodTextureLow && e._lodTextureLow.dispose(), e._lodTextureLow = this._lodTextureLow), this._irradianceTexture && (e._irradianceTexture && e._irradianceTexture.dispose(), e._irradianceTexture = this._irradianceTexture); const r = this._engine.getLoadedTexturesCache(); let s = r.indexOf(this); s !== -1 && r.splice(s, 1), s = r.indexOf(e), s === -1 && r.push(e); } /** * Dispose the current allocated resources */ dispose() { this._references--, this.onLoadedObservable.clear(), this.onErrorObservable.clear(), this._references === 0 && (this._engine._releaseTexture(this), this._hardwareTexture = null, this._dynamicTextureSource = null); } } ln._Counter = 0; class $le { constructor() { this.shaderLanguage = Xa.GLSL; } postProcessor(e, t, i, r, s) { if (!s.getCaps().drawBuffersExtension) { const n = /#extension.+GL_EXT_draw_buffers.+(enable|require)/g; e = e.replace(n, ""); } return e; } } const Zle = /(flat\s)?\s*varying\s*.*/; class aK { constructor() { this.shaderLanguage = Xa.GLSL; } attributeProcessor(e) { return e.replace("attribute", "in"); } varyingCheck(e, t) { return Zle.test(e); } varyingProcessor(e, t) { return e.replace("varying", t ? "in" : "out"); } postProcessor(e, t, i) { const r = e.search(/#extension.+GL_EXT_draw_buffers.+require/) !== -1, s = /#extension.+(GL_OVR_multiview2|GL_OES_standard_derivatives|GL_EXT_shader_texture_lod|GL_EXT_frag_depth|GL_EXT_draw_buffers).+(enable|require)/g; if (e = e.replace(s, ""), e = e.replace(/texture2D\s*\(/g, "texture("), i) { const n = e.search(/layout *\(location *= *0\) *out/g) !== -1; e = e.replace(/texture2DLodEXT\s*\(/g, "textureLod("), e = e.replace(/textureCubeLodEXT\s*\(/g, "textureLod("), e = e.replace(/textureCube\s*\(/g, "texture("), e = e.replace(/gl_FragDepthEXT/g, "gl_FragDepth"), e = e.replace(/gl_FragColor/g, "glFragColor"), e = e.replace(/gl_FragData/g, "glFragData"), e = e.replace(/void\s+?main\s*\(/g, (r || n ? "" : `layout(location = 0) out vec4 glFragColor; `) + "void main("); } else if (t.indexOf("#define MULTIVIEW") !== -1) return `#extension GL_OVR_multiview2 : require layout (num_views = 2) in; ` + e; return e; } } class JA { /** * Gets the underlying buffer */ get underlyingResource() { return null; } /** * Constructs the buffer */ constructor() { this.references = 0, this.capacity = 0, this.is32Bits = !1, this.uniqueId = JA._Counter++; } } JA._Counter = 0; class FO extends JA { constructor(e) { super(), this._buffer = e; } get underlyingResource() { return this._buffer; } } class lte { constructor() { this._valueCache = {}, this.vertexCompilationError = null, this.fragmentCompilationError = null, this.programLinkError = null, this.programValidationError = null, this._isDisposed = !1; } get isAsync() { return this.isParallelCompiled; } get isReady() { return this.program ? this.isParallelCompiled ? this.engine._isRenderingStateCompiled(this) : !0 : !1; } _handlesSpectorRebuildCallback(e) { e && this.program && e(this.program); } _fillEffectInformation(e, t, i, r, s, n, a, l) { const o = this.engine; if (o.supportsUniformBuffers) for (const d in t) e.bindUniformBlock(d, t[d]); this.engine.getUniforms(this, i).forEach((d, f) => { r[i[f]] = d; }), this._uniforms = r; let h; for (h = 0; h < s.length; h++) e.getUniform(s[h]) == null && (s.splice(h, 1), h--); s.forEach((d, f) => { n[d] = f; }); for (const d of o.getAttributes(this, a)) l.push(d); } /** * Release all associated resources. **/ dispose() { this._uniforms = {}, this._isDisposed = !0; } /** * @internal */ _cacheMatrix(e, t) { const i = this._valueCache[e], r = t.updateFlag; return i !== void 0 && i === r ? !1 : (this._valueCache[e] = r, !0); } /** * @internal */ _cacheFloat2(e, t, i) { let r = this._valueCache[e]; if (!r || r.length !== 2) return r = [t, i], this._valueCache[e] = r, !0; let s = !1; return r[0] !== t && (r[0] = t, s = !0), r[1] !== i && (r[1] = i, s = !0), s; } /** * @internal */ _cacheFloat3(e, t, i, r) { let s = this._valueCache[e]; if (!s || s.length !== 3) return s = [t, i, r], this._valueCache[e] = s, !0; let n = !1; return s[0] !== t && (s[0] = t, n = !0), s[1] !== i && (s[1] = i, n = !0), s[2] !== r && (s[2] = r, n = !0), n; } /** * @internal */ _cacheFloat4(e, t, i, r, s) { let n = this._valueCache[e]; if (!n || n.length !== 4) return n = [t, i, r, s], this._valueCache[e] = n, !0; let a = !1; return n[0] !== t && (n[0] = t, a = !0), n[1] !== i && (n[1] = i, a = !0), n[2] !== r && (n[2] = r, a = !0), n[3] !== s && (n[3] = s, a = !0), a; } /** * Sets an integer value on a uniform variable. * @param uniformName Name of the variable. * @param value Value to be set. */ setInt(e, t) { const i = this._valueCache[e]; i !== void 0 && i === t || this.engine.setInt(this._uniforms[e], t) && (this._valueCache[e] = t); } /** * Sets a int2 on a uniform variable. * @param uniformName Name of the variable. * @param x First int in int2. * @param y Second int in int2. */ setInt2(e, t, i) { this._cacheFloat2(e, t, i) && (this.engine.setInt2(this._uniforms[e], t, i) || (this._valueCache[e] = null)); } /** * Sets a int3 on a uniform variable. * @param uniformName Name of the variable. * @param x First int in int3. * @param y Second int in int3. * @param z Third int in int3. */ setInt3(e, t, i, r) { this._cacheFloat3(e, t, i, r) && (this.engine.setInt3(this._uniforms[e], t, i, r) || (this._valueCache[e] = null)); } /** * Sets a int4 on a uniform variable. * @param uniformName Name of the variable. * @param x First int in int4. * @param y Second int in int4. * @param z Third int in int4. * @param w Fourth int in int4. */ setInt4(e, t, i, r, s) { this._cacheFloat4(e, t, i, r, s) && (this.engine.setInt4(this._uniforms[e], t, i, r, s) || (this._valueCache[e] = null)); } /** * Sets an int array on a uniform variable. * @param uniformName Name of the variable. * @param array array to be set. */ setIntArray(e, t) { this._valueCache[e] = null, this.engine.setIntArray(this._uniforms[e], t); } /** * Sets an int array 2 on a uniform variable. (Array is specified as single array eg. [1,2,3,4] will result in [[1,2],[3,4]] in the shader) * @param uniformName Name of the variable. * @param array array to be set. */ setIntArray2(e, t) { this._valueCache[e] = null, this.engine.setIntArray2(this._uniforms[e], t); } /** * Sets an int array 3 on a uniform variable. (Array is specified as single array eg. [1,2,3,4,5,6] will result in [[1,2,3],[4,5,6]] in the shader) * @param uniformName Name of the variable. * @param array array to be set. */ setIntArray3(e, t) { this._valueCache[e] = null, this.engine.setIntArray3(this._uniforms[e], t); } /** * Sets an int array 4 on a uniform variable. (Array is specified as single array eg. [1,2,3,4,5,6,7,8] will result in [[1,2,3,4],[5,6,7,8]] in the shader) * @param uniformName Name of the variable. * @param array array to be set. */ setIntArray4(e, t) { this._valueCache[e] = null, this.engine.setIntArray4(this._uniforms[e], t); } /** * Sets an unsigned integer value on a uniform variable. * @param uniformName Name of the variable. * @param value Value to be set. */ setUInt(e, t) { const i = this._valueCache[e]; i !== void 0 && i === t || this.engine.setUInt(this._uniforms[e], t) && (this._valueCache[e] = t); } /** * Sets an unsigned int2 value on a uniform variable. * @param uniformName Name of the variable. * @param x First unsigned int in uint2. * @param y Second unsigned int in uint2. */ setUInt2(e, t, i) { this._cacheFloat2(e, t, i) && (this.engine.setUInt2(this._uniforms[e], t, i) || (this._valueCache[e] = null)); } /** * Sets an unsigned int3 value on a uniform variable. * @param uniformName Name of the variable. * @param x First unsigned int in uint3. * @param y Second unsigned int in uint3. * @param z Third unsigned int in uint3. */ setUInt3(e, t, i, r) { this._cacheFloat3(e, t, i, r) && (this.engine.setUInt3(this._uniforms[e], t, i, r) || (this._valueCache[e] = null)); } /** * Sets an unsigned int4 value on a uniform variable. * @param uniformName Name of the variable. * @param x First unsigned int in uint4. * @param y Second unsigned int in uint4. * @param z Third unsigned int in uint4. * @param w Fourth unsigned int in uint4. */ setUInt4(e, t, i, r, s) { this._cacheFloat4(e, t, i, r, s) && (this.engine.setUInt4(this._uniforms[e], t, i, r, s) || (this._valueCache[e] = null)); } /** * Sets an unsigned int array on a uniform variable. * @param uniformName Name of the variable. * @param array array to be set. */ setUIntArray(e, t) { this._valueCache[e] = null, this.engine.setUIntArray(this._uniforms[e], t); } /** * Sets an unsigned int array 2 on a uniform variable. (Array is specified as single array eg. [1,2,3,4] will result in [[1,2],[3,4]] in the shader) * @param uniformName Name of the variable. * @param array array to be set. */ setUIntArray2(e, t) { this._valueCache[e] = null, this.engine.setUIntArray2(this._uniforms[e], t); } /** * Sets an unsigned int array 3 on a uniform variable. (Array is specified as single array eg. [1,2,3,4,5,6] will result in [[1,2,3],[4,5,6]] in the shader) * @param uniformName Name of the variable. * @param array array to be set. */ setUIntArray3(e, t) { this._valueCache[e] = null, this.engine.setUIntArray3(this._uniforms[e], t); } /** * Sets an unsigned int array 4 on a uniform variable. (Array is specified as single array eg. [1,2,3,4,5,6,7,8] will result in [[1,2,3,4],[5,6,7,8]] in the shader) * @param uniformName Name of the variable. * @param array array to be set. */ setUIntArray4(e, t) { this._valueCache[e] = null, this.engine.setUIntArray4(this._uniforms[e], t); } /** * Sets an array on a uniform variable. * @param uniformName Name of the variable. * @param array array to be set. */ setArray(e, t) { this._valueCache[e] = null, this.engine.setArray(this._uniforms[e], t); } /** * Sets an array 2 on a uniform variable. (Array is specified as single array eg. [1,2,3,4] will result in [[1,2],[3,4]] in the shader) * @param uniformName Name of the variable. * @param array array to be set. */ setArray2(e, t) { this._valueCache[e] = null, this.engine.setArray2(this._uniforms[e], t); } /** * Sets an array 3 on a uniform variable. (Array is specified as single array eg. [1,2,3,4,5,6] will result in [[1,2,3],[4,5,6]] in the shader) * @param uniformName Name of the variable. * @param array array to be set. * @returns this effect. */ setArray3(e, t) { this._valueCache[e] = null, this.engine.setArray3(this._uniforms[e], t); } /** * Sets an array 4 on a uniform variable. (Array is specified as single array eg. [1,2,3,4,5,6,7,8] will result in [[1,2,3,4],[5,6,7,8]] in the shader) * @param uniformName Name of the variable. * @param array array to be set. */ setArray4(e, t) { this._valueCache[e] = null, this.engine.setArray4(this._uniforms[e], t); } /** * Sets matrices on a uniform variable. * @param uniformName Name of the variable. * @param matrices matrices to be set. */ setMatrices(e, t) { t && (this._valueCache[e] = null, this.engine.setMatrices(this._uniforms[e], t)); } /** * Sets matrix on a uniform variable. * @param uniformName Name of the variable. * @param matrix matrix to be set. */ setMatrix(e, t) { this._cacheMatrix(e, t) && (this.engine.setMatrices(this._uniforms[e], t.toArray()) || (this._valueCache[e] = null)); } /** * Sets a 3x3 matrix on a uniform variable. (Specified as [1,2,3,4,5,6,7,8,9] will result in [1,2,3][4,5,6][7,8,9] matrix) * @param uniformName Name of the variable. * @param matrix matrix to be set. */ setMatrix3x3(e, t) { this._valueCache[e] = null, this.engine.setMatrix3x3(this._uniforms[e], t); } /** * Sets a 2x2 matrix on a uniform variable. (Specified as [1,2,3,4] will result in [1,2][3,4] matrix) * @param uniformName Name of the variable. * @param matrix matrix to be set. */ setMatrix2x2(e, t) { this._valueCache[e] = null, this.engine.setMatrix2x2(this._uniforms[e], t); } /** * Sets a float on a uniform variable. * @param uniformName Name of the variable. * @param value value to be set. * @returns this effect. */ setFloat(e, t) { const i = this._valueCache[e]; i !== void 0 && i === t || this.engine.setFloat(this._uniforms[e], t) && (this._valueCache[e] = t); } /** * Sets a Vector2 on a uniform variable. * @param uniformName Name of the variable. * @param vector2 vector2 to be set. */ setVector2(e, t) { this._cacheFloat2(e, t.x, t.y) && (this.engine.setFloat2(this._uniforms[e], t.x, t.y) || (this._valueCache[e] = null)); } /** * Sets a float2 on a uniform variable. * @param uniformName Name of the variable. * @param x First float in float2. * @param y Second float in float2. */ setFloat2(e, t, i) { this._cacheFloat2(e, t, i) && (this.engine.setFloat2(this._uniforms[e], t, i) || (this._valueCache[e] = null)); } /** * Sets a Vector3 on a uniform variable. * @param uniformName Name of the variable. * @param vector3 Value to be set. */ setVector3(e, t) { this._cacheFloat3(e, t.x, t.y, t.z) && (this.engine.setFloat3(this._uniforms[e], t.x, t.y, t.z) || (this._valueCache[e] = null)); } /** * Sets a float3 on a uniform variable. * @param uniformName Name of the variable. * @param x First float in float3. * @param y Second float in float3. * @param z Third float in float3. */ setFloat3(e, t, i, r) { this._cacheFloat3(e, t, i, r) && (this.engine.setFloat3(this._uniforms[e], t, i, r) || (this._valueCache[e] = null)); } /** * Sets a Vector4 on a uniform variable. * @param uniformName Name of the variable. * @param vector4 Value to be set. */ setVector4(e, t) { this._cacheFloat4(e, t.x, t.y, t.z, t.w) && (this.engine.setFloat4(this._uniforms[e], t.x, t.y, t.z, t.w) || (this._valueCache[e] = null)); } /** * Sets a Quaternion on a uniform variable. * @param uniformName Name of the variable. * @param quaternion Value to be set. */ setQuaternion(e, t) { this._cacheFloat4(e, t.x, t.y, t.z, t.w) && (this.engine.setFloat4(this._uniforms[e], t.x, t.y, t.z, t.w) || (this._valueCache[e] = null)); } /** * Sets a float4 on a uniform variable. * @param uniformName Name of the variable. * @param x First float in float4. * @param y Second float in float4. * @param z Third float in float4. * @param w Fourth float in float4. * @returns this effect. */ setFloat4(e, t, i, r, s) { this._cacheFloat4(e, t, i, r, s) && (this.engine.setFloat4(this._uniforms[e], t, i, r, s) || (this._valueCache[e] = null)); } /** * Sets a Color3 on a uniform variable. * @param uniformName Name of the variable. * @param color3 Value to be set. */ setColor3(e, t) { this._cacheFloat3(e, t.r, t.g, t.b) && (this.engine.setFloat3(this._uniforms[e], t.r, t.g, t.b) || (this._valueCache[e] = null)); } /** * Sets a Color4 on a uniform variable. * @param uniformName Name of the variable. * @param color3 Value to be set. * @param alpha Alpha value to be set. */ setColor4(e, t, i) { this._cacheFloat4(e, t.r, t.g, t.b, i) && (this.engine.setFloat4(this._uniforms[e], t.r, t.g, t.b, i) || (this._valueCache[e] = null)); } /** * Sets a Color4 on a uniform variable * @param uniformName defines the name of the variable * @param color4 defines the value to be set */ setDirectColor4(e, t) { this._cacheFloat4(e, t.r, t.g, t.b, t.a) && (this.engine.setFloat4(this._uniforms[e], t.r, t.g, t.b, t.a) || (this._valueCache[e] = null)); } _getVertexShaderCode() { return this.vertexShader ? this.engine._getShaderSource(this.vertexShader) : null; } _getFragmentShaderCode() { return this.fragmentShader ? this.engine._getShaderSource(this.fragmentShader) : null; } } class BI { get underlyingResource() { return this._webGLTexture; } constructor(e = null, t) { if (this._MSAARenderBuffers = null, this._context = t, !e && (e = t.createTexture(), !e)) throw new Error("Unable to create webGL texture"); this.set(e); } setUsage() { } set(e) { this._webGLTexture = e; } reset() { this._webGLTexture = null, this._MSAARenderBuffers = null; } addMSAARenderBuffer(e) { this._MSAARenderBuffers || (this._MSAARenderBuffers = []), this._MSAARenderBuffers.push(e); } releaseMSAARenderBuffers() { if (this._MSAARenderBuffers) { for (const e of this._MSAARenderBuffers) this._context.deleteRenderbuffer(e); this._MSAARenderBuffers = null; } } getMSAARenderBuffer(e = 0) { var t, i; return (i = (t = this._MSAARenderBuffers) === null || t === void 0 ? void 0 : t[e]) !== null && i !== void 0 ? i : null; } release() { this.releaseMSAARenderBuffers(), this._webGLTexture && this._context.deleteTexture(this._webGLTexture), this.reset(); } } class $o { static IsWrapper(e) { return e.getPipelineContext === void 0; } static GetEffect(e) { return e.getPipelineContext === void 0 ? e.effect : e; } constructor(e, t = !0) { this.effect = null, this.defines = null, this.drawContext = e.createDrawContext(), t && (this.materialContext = e.createMaterialContext()); } setEffect(e, t, i = !0) { var r; this.effect = e, t !== void 0 && (this.defines = t), i && ((r = this.drawContext) === null || r === void 0 || r.reset()); } dispose() { var e; (e = this.drawContext) === null || e === void 0 || e.dispose(); } } class oK { get isDirty() { return this._isStencilTestDirty || this._isStencilMaskDirty || this._isStencilFuncDirty || this._isStencilOpDirty; } get func() { return this._func; } set func(e) { this._func !== e && (this._func = e, this._isStencilFuncDirty = !0); } get funcRef() { return this._funcRef; } set funcRef(e) { this._funcRef !== e && (this._funcRef = e, this._isStencilFuncDirty = !0); } get funcMask() { return this._funcMask; } set funcMask(e) { this._funcMask !== e && (this._funcMask = e, this._isStencilFuncDirty = !0); } get opStencilFail() { return this._opStencilFail; } set opStencilFail(e) { this._opStencilFail !== e && (this._opStencilFail = e, this._isStencilOpDirty = !0); } get opDepthFail() { return this._opDepthFail; } set opDepthFail(e) { this._opDepthFail !== e && (this._opDepthFail = e, this._isStencilOpDirty = !0); } get opStencilDepthPass() { return this._opStencilDepthPass; } set opStencilDepthPass(e) { this._opStencilDepthPass !== e && (this._opStencilDepthPass = e, this._isStencilOpDirty = !0); } get mask() { return this._mask; } set mask(e) { this._mask !== e && (this._mask = e, this._isStencilMaskDirty = !0); } get enabled() { return this._enabled; } set enabled(e) { this._enabled !== e && (this._enabled = e, this._isStencilTestDirty = !0); } constructor(e = !0) { this._isStencilTestDirty = !1, this._isStencilMaskDirty = !1, this._isStencilFuncDirty = !1, this._isStencilOpDirty = !1, this.useStencilGlobalOnly = !1, e && this.reset(); } reset() { var e; this.stencilMaterial = void 0, (e = this.stencilGlobal) === null || e === void 0 || e.reset(), this._isStencilTestDirty = !0, this._isStencilMaskDirty = !0, this._isStencilFuncDirty = !0, this._isStencilOpDirty = !0; } apply(e) { var t; if (!e) return; const i = !this.useStencilGlobalOnly && !!(!((t = this.stencilMaterial) === null || t === void 0) && t.enabled); this.enabled = i ? this.stencilMaterial.enabled : this.stencilGlobal.enabled, this.func = i ? this.stencilMaterial.func : this.stencilGlobal.func, this.funcRef = i ? this.stencilMaterial.funcRef : this.stencilGlobal.funcRef, this.funcMask = i ? this.stencilMaterial.funcMask : this.stencilGlobal.funcMask, this.opStencilFail = i ? this.stencilMaterial.opStencilFail : this.stencilGlobal.opStencilFail, this.opDepthFail = i ? this.stencilMaterial.opDepthFail : this.stencilGlobal.opDepthFail, this.opStencilDepthPass = i ? this.stencilMaterial.opStencilDepthPass : this.stencilGlobal.opStencilDepthPass, this.mask = i ? this.stencilMaterial.mask : this.stencilGlobal.mask, this.isDirty && (this._isStencilTestDirty && (this.enabled ? e.enable(e.STENCIL_TEST) : e.disable(e.STENCIL_TEST), this._isStencilTestDirty = !1), this._isStencilMaskDirty && (e.stencilMask(this.mask), this._isStencilMaskDirty = !1), this._isStencilFuncDirty && (e.stencilFunc(this.func, this.funcRef, this.funcMask), this._isStencilFuncDirty = !1), this._isStencilOpDirty && (e.stencilOp(this.opStencilFail, this.opDepthFail, this.opStencilDepthPass), this._isStencilOpDirty = !1)); } } class qle { } class mi { /** * Returns the current npm package of the sdk */ // Not mixed with Version for tooling purpose. static get NpmPackage() { return "babylonjs@6.35.0"; } /** * Returns the current version of the framework */ static get Version() { return "6.35.0"; } /** * Returns a string describing the current engine */ get description() { let e = this.name + this.webGLVersion; return this._caps.parallelShaderCompile && (e += " - Parallel shader compilation"), e; } /** * Gets or sets the name of the engine */ get name() { return this._name; } set name(e) { this._name = e; } /** * Returns the version of the engine */ get version() { return this._webGLVersion; } get isDisposed() { return this._isDisposed; } /** * Gets or sets the relative url used to load shaders if using the engine in non-minified mode */ static get ShadersRepository() { return Cr.ShadersRepository; } static set ShadersRepository(e) { Cr.ShadersRepository = e; } /** * @internal */ _getShaderProcessor(e) { return this._shaderProcessor; } /** * Gets or sets a boolean indicating if depth buffer should be reverse, going from far to near. * This can provide greater z depth for distant objects. */ get useReverseDepthBuffer() { return this._useReverseDepthBuffer; } set useReverseDepthBuffer(e) { e !== this._useReverseDepthBuffer && (this._useReverseDepthBuffer = e, e ? this._depthCullingState.depthFunc = 518 : this._depthCullingState.depthFunc = 515); } /** * Gets the current frame id */ get frameId() { return this._frameId; } /** * Gets a boolean indicating that the engine supports uniform buffers * @see https://doc.babylonjs.com/setup/support/webGL2#uniform-buffer-objets */ get supportsUniformBuffers() { return this.webGLVersion > 1 && !this.disableUniformBuffers; } /** * Gets the options used for engine creation * @returns EngineOptions object */ getCreationOptions() { return this._creationOptions; } /** @internal */ get _shouldUseHighPrecisionShader() { return !!(this._caps.highPrecisionShaderSupported && this._highPrecisionShadersAllowed); } /** * Gets a boolean indicating that only power of 2 textures are supported * Please note that you can still use non power of 2 textures but in this case the engine will forcefully convert them */ get needPOTTextures() { return this._webGLVersion < 2 || this.forcePOTTextures; } /** * Gets the list of current active render loop functions * @returns an array with the current render loop functions */ get activeRenderLoops() { return this._activeRenderLoops; } /** * Gets or sets a boolean indicating if resources should be retained to be able to handle context lost events * @see https://doc.babylonjs.com/features/featuresDeepDive/scene/optimize_your_scene#handling-webgl-context-lost */ get doNotHandleContextLost() { return this._doNotHandleContextLost; } set doNotHandleContextLost(e) { this._doNotHandleContextLost = e; } get _supportsHardwareTextureRescaling() { return !1; } /** * sets the object from which width and height will be taken from when getting render width and height * Will fallback to the gl object * @param dimensions the framebuffer width and height that will be used. */ set framebufferDimensionsObject(e) { this._framebufferDimensionsObject = e; } /** * Gets the current viewport */ get currentViewport() { return this._cachedViewport; } /** * Gets the default empty texture */ get emptyTexture() { return this._emptyTexture || (this._emptyTexture = this.createRawTexture(new Uint8Array(4), 1, 1, 5, !1, !1, 1)), this._emptyTexture; } /** * Gets the default empty 3D texture */ get emptyTexture3D() { return this._emptyTexture3D || (this._emptyTexture3D = this.createRawTexture3D(new Uint8Array(4), 1, 1, 1, 5, !1, !1, 1)), this._emptyTexture3D; } /** * Gets the default empty 2D array texture */ get emptyTexture2DArray() { return this._emptyTexture2DArray || (this._emptyTexture2DArray = this.createRawTexture2DArray(new Uint8Array(4), 1, 1, 1, 5, !1, !1, 1)), this._emptyTexture2DArray; } /** * Gets the default empty cube texture */ get emptyCubeTexture() { if (!this._emptyCubeTexture) { const e = new Uint8Array(4), t = [e, e, e, e, e, e]; this._emptyCubeTexture = this.createRawCubeTexture(t, 1, 5, 0, !1, !1, 1); } return this._emptyCubeTexture; } /** * Gets a boolean indicating if the engine runs in WebGPU or not. */ get isWebGPU() { return this._isWebGPU; } /** * Gets the shader platform name used by the effects. */ get shaderPlatformName() { return this._shaderPlatformName; } /** * Enables or disables the snapshot rendering mode * Note that the WebGL engine does not support snapshot rendering so setting the value won't have any effect for this engine */ get snapshotRendering() { return !1; } set snapshotRendering(e) { } /** * Gets or sets the snapshot rendering mode */ get snapshotRenderingMode() { return this._snapshotRenderingMode; } set snapshotRenderingMode(e) { this._snapshotRenderingMode = e; } /** * Creates a new snapshot at the next frame using the current snapshotRenderingMode */ snapshotRenderingReset() { this.snapshotRendering = !1; } static _CreateCanvas(e, t) { if (typeof document > "u") return new OffscreenCanvas(e, t); const i = document.createElement("canvas"); return i.width = e, i.height = t, i; } /** * Create a canvas. This method is overridden by other engines * @param width width * @param height height * @returns ICanvas interface */ createCanvas(e, t) { return mi._CreateCanvas(e, t); } /** * Create an image to use with canvas * @returns IImage interface */ createCanvasImage() { return document.createElement("img"); } /** * Creates a new engine * @param canvasOrContext defines the canvas or WebGL context to use for rendering. If you provide a WebGL context, Babylon.js will not hook events on the canvas (like pointers, keyboards, etc...) so no event observables will be available. This is mostly used when Babylon.js is used as a plugin on a system which already used the WebGL context * @param antialias defines enable antialiasing (default: false) * @param options defines further options to be sent to the getContext() function * @param adaptToDeviceRatio defines whether to adapt to the device's viewport characteristics (default: false) */ constructor(e, t, i, r) { var s, n, a, l, o, u, h, d, f, p, m; this._name = "WebGL", this._isDisposed = !1, this.forcePOTTextures = !1, this.isFullscreen = !1, this.cullBackFaces = null, this.renderEvenInBackground = !0, this.preventCacheWipeBetweenFrames = !1, this.validateShaderPrograms = !1, this._useReverseDepthBuffer = !1, this.isNDCHalfZRange = !1, this.hasOriginBottomLeft = !0, this.disableUniformBuffers = !1, this.onDisposeObservable = new Fe(), this._frameId = 0, this._uniformBuffers = new Array(), this._storageBuffers = new Array(), this._webGLVersion = 1, this._windowIsBackground = !1, this._highPrecisionShadersAllowed = !0, this._badOS = !1, this._badDesktopOS = !1, this._renderingQueueLaunched = !1, this._activeRenderLoops = new Array(), this.onContextLostObservable = new Fe(), this.onContextRestoredObservable = new Fe(), this._contextWasLost = !1, this._doNotHandleContextLost = !1, this.disableVertexArrayObjects = !1, this._colorWrite = !0, this._colorWriteChanged = !0, this._depthCullingState = new sK(), this._stencilStateComposer = new oK(), this._stencilState = new WC(), this._alphaState = new ote(), this._alphaMode = 1, this._alphaEquation = 0, this._internalTexturesCache = new Array(), this._renderTargetWrapperCache = new Array(), this._activeChannel = 0, this._currentTextureChannel = -1, this._boundTexturesCache = {}, this._compiledEffects = {}, this._vertexAttribArraysEnabled = [], this._currentRenderTarget = null, this._uintIndicesCurrentlySet = !1, this._currentBoundBuffer = new Array(), this._currentFramebuffer = null, this._dummyFramebuffer = null, this._currentBufferPointers = new Array(), this._currentInstanceLocations = new Array(), this._currentInstanceBuffers = new Array(), this._vaoRecordInProgress = !1, this._mustWipeVertexAttributes = !1, this._nextFreeTextureSlots = new Array(), this._maxSimultaneousTextures = 0, this._maxMSAASamplesOverride = null, this._activeRequests = new Array(), this.adaptToDeviceRatio = !1, this._lastDevicePixelRatio = 1, this._transformTextureUrl = null, this.hostInformation = { isMobile: !1 }, this.premultipliedAlpha = !0, this.onBeforeTextureInitObservable = new Fe(), this._isWebGPU = !1, this._snapshotRenderingMode = 0, this._viewportCached = { x: 0, y: 0, z: 0, w: 0 }, this._unpackFlipYCached = null, this.enableUnpackFlipYCached = !0, this._boundUniforms = {}, this.startTime = Gs.Now; let _ = null; i = i || {}, this._creationOptions = i, this.adaptToDeviceRatio = r ?? !1, this._stencilStateComposer.stencilGlobal = this._stencilState, Uu.SetMatrixPrecision(!!i.useHighPrecisionMatrix), i.antialias = t ?? i.antialias, i.deterministicLockstep = (s = i.deterministicLockstep) !== null && s !== void 0 ? s : !1, i.lockstepMaxSteps = (n = i.lockstepMaxSteps) !== null && n !== void 0 ? n : 4, i.timeStep = (a = i.timeStep) !== null && a !== void 0 ? a : 1 / 60, i.audioEngine = (l = i.audioEngine) !== null && l !== void 0 ? l : !0, i.stencil = (o = i.stencil) !== null && o !== void 0 ? o : !0, this._audioContext = (h = (u = i.audioEngineOptions) === null || u === void 0 ? void 0 : u.audioContext) !== null && h !== void 0 ? h : null, this._audioDestination = (f = (d = i.audioEngineOptions) === null || d === void 0 ? void 0 : d.audioDestination) !== null && f !== void 0 ? f : null, this.premultipliedAlpha = (p = i.premultipliedAlpha) !== null && p !== void 0 ? p : !0, this.useExactSrgbConversions = (m = i.useExactSrgbConversions) !== null && m !== void 0 ? m : !1, this._doNotHandleContextLost = !!i.doNotHandleContextLost, this._isStencilEnable = !!i.stencil, r = r || i.adaptToDeviceRatio || !1; const v = cu() && window.devicePixelRatio || 1, C = i.limitDeviceRatio || v; if (this._hardwareScalingLevel = r ? 1 / Math.min(C, v) : 1, this._lastDevicePixelRatio = v, !e) return; if (e.getContext) { if (_ = e, this._renderingCanvas = _, i.preserveDrawingBuffer === void 0 && (i.preserveDrawingBuffer = !1), i.xrCompatible === void 0 && (i.xrCompatible = !0), navigator && navigator.userAgent) { this._setupMobileChecks(); const b = navigator.userAgent; for (const S of mi.ExceptionList) { const M = S.key, R = S.targets; if (new RegExp(M).test(b)) { if (S.capture && S.captureConstraint) { const V = S.capture, k = S.captureConstraint, B = new RegExp(V).exec(b); if (B && B.length > 0 && parseInt(B[B.length - 1]) >= k) continue; } for (const V of R) switch (V) { case "uniformBuffer": this.disableUniformBuffers = !0; break; case "vao": this.disableVertexArrayObjects = !0; break; case "antialias": i.antialias = !1; break; case "maxMSAASamples": this._maxMSAASamplesOverride = 1; break; } } } } if (this._doNotHandleContextLost || (this._onContextLost = (b) => { b.preventDefault(), this._contextWasLost = !0, Ce.Warn("WebGL context lost."), this.onContextLostObservable.notifyObservers(this); }, this._onContextRestored = () => { this._restoreEngineAfterContextLost(() => this._initGLContext()); }, _.addEventListener("webglcontextlost", this._onContextLost, !1), _.addEventListener("webglcontextrestored", this._onContextRestored, !1), i.powerPreference = i.powerPreference || "high-performance"), this._badDesktopOS = /^((?!chrome|android).)*safari/i.test(navigator.userAgent), this._badDesktopOS && (i.xrCompatible = !1), !i.disableWebGL2Support) try { this._gl = _.getContext("webgl2", i) || _.getContext("experimental-webgl2", i), this._gl && (this._webGLVersion = 2, this._shaderPlatformName = "WEBGL2", this._gl.deleteQuery || (this._webGLVersion = 1, this._shaderPlatformName = "WEBGL1")); } catch { } if (!this._gl) { if (!_) throw new Error("The provided canvas is null or undefined."); try { this._gl = _.getContext("webgl", i) || _.getContext("experimental-webgl", i); } catch { throw new Error("WebGL not supported"); } } if (!this._gl) throw new Error("WebGL not supported"); } else { this._gl = e, this._renderingCanvas = this._gl.canvas, this._gl.renderbufferStorageMultisample ? (this._webGLVersion = 2, this._shaderPlatformName = "WEBGL2") : this._shaderPlatformName = "WEBGL1"; const b = this._gl.getContextAttributes(); b && (i.stencil = b.stencil); } this._gl.pixelStorei(this._gl.UNPACK_COLORSPACE_CONVERSION_WEBGL, this._gl.NONE), i.useHighPrecisionFloats !== void 0 && (this._highPrecisionShadersAllowed = i.useHighPrecisionFloats), this.resize(), this._initGLContext(), this._initFeatures(); for (let b = 0; b < this._caps.maxVertexAttribs; b++) this._currentBufferPointers[b] = new qle(); this._shaderProcessor = this.webGLVersion > 1 ? new aK() : new $le(), this._badOS = /iPad/i.test(navigator.userAgent) || /iPhone/i.test(navigator.userAgent); const x = `Babylon.js v${mi.Version}`; Ce.Log(x + ` - ${this.description}`), this._renderingCanvas && this._renderingCanvas.setAttribute && this._renderingCanvas.setAttribute("data-engine", x); } _setupMobileChecks() { navigator && navigator.userAgent && (this._checkForMobile = () => { const e = navigator.userAgent; this.hostInformation.isMobile = e.indexOf("Mobile") !== -1 || // Needed for iOS 13+ detection on iPad (inspired by solution from https://stackoverflow.com/questions/9038625/detect-if-device-is-ios) e.indexOf("Mac") !== -1 && qR() && "ontouchend" in document; }, this._checkForMobile(), cu() && window.addEventListener("resize", this._checkForMobile)); } _restoreEngineAfterContextLost(e) { setTimeout(async () => { var t; this._dummyFramebuffer = null; const i = this._depthCullingState.depthTest, r = this._depthCullingState.depthFunc, s = this._depthCullingState.depthMask, n = this._stencilState.stencilTest; await e(), this.wipeCaches(!0), this._rebuildEffects(), (t = this._rebuildComputeEffects) === null || t === void 0 || t.call(this), this._rebuildBuffers(), this._rebuildInternalTextures(), this._rebuildRenderTargetWrappers(), this.wipeCaches(!0), this._depthCullingState.depthTest = i, this._depthCullingState.depthFunc = r, this._depthCullingState.depthMask = s, this._stencilState.stencilTest = n, Ce.Warn(this.name + " context successfully restored."), this.onContextRestoredObservable.notifyObservers(this), this._contextWasLost = !1; }, 0); } /** * Shared initialization across engines types. * @param canvas The canvas associated with this instance of the engine. */ _sharedInit(e) { this._renderingCanvas = e; } /** * @internal */ _getShaderProcessingContext(e) { return null; } _rebuildInternalTextures() { const e = this._internalTexturesCache.slice(); for (const t of e) t._rebuild(); } _rebuildRenderTargetWrappers() { const e = this._renderTargetWrapperCache.slice(); for (const t of e) t._rebuild(); } _rebuildEffects() { for (const e in this._compiledEffects) { const t = this._compiledEffects[e]; t._pipelineContext = null, t._wasPreviouslyReady = !1, t._prepareEffect(); } Cr.ResetCache(); } /** * Gets a boolean indicating if all created effects are ready * @returns true if all effects are ready */ areAllEffectsReady() { for (const e in this._compiledEffects) if (!this._compiledEffects[e].isReady()) return !1; return !0; } _rebuildBuffers() { for (const e of this._uniformBuffers) e._rebuild(); for (const e of this._storageBuffers) e._rebuild(); } _initGLContext() { var e; this._caps = { maxTexturesImageUnits: this._gl.getParameter(this._gl.MAX_TEXTURE_IMAGE_UNITS), maxCombinedTexturesImageUnits: this._gl.getParameter(this._gl.MAX_COMBINED_TEXTURE_IMAGE_UNITS), maxVertexTextureImageUnits: this._gl.getParameter(this._gl.MAX_VERTEX_TEXTURE_IMAGE_UNITS), maxTextureSize: this._gl.getParameter(this._gl.MAX_TEXTURE_SIZE), maxSamples: this._webGLVersion > 1 ? this._gl.getParameter(this._gl.MAX_SAMPLES) : 1, maxCubemapTextureSize: this._gl.getParameter(this._gl.MAX_CUBE_MAP_TEXTURE_SIZE), maxRenderTextureSize: this._gl.getParameter(this._gl.MAX_RENDERBUFFER_SIZE), maxVertexAttribs: this._gl.getParameter(this._gl.MAX_VERTEX_ATTRIBS), maxVaryingVectors: this._gl.getParameter(this._gl.MAX_VARYING_VECTORS), maxFragmentUniformVectors: this._gl.getParameter(this._gl.MAX_FRAGMENT_UNIFORM_VECTORS), maxVertexUniformVectors: this._gl.getParameter(this._gl.MAX_VERTEX_UNIFORM_VECTORS), parallelShaderCompile: this._gl.getExtension("KHR_parallel_shader_compile") || void 0, standardDerivatives: this._webGLVersion > 1 || this._gl.getExtension("OES_standard_derivatives") !== null, maxAnisotropy: 1, astc: this._gl.getExtension("WEBGL_compressed_texture_astc") || this._gl.getExtension("WEBKIT_WEBGL_compressed_texture_astc"), bptc: this._gl.getExtension("EXT_texture_compression_bptc") || this._gl.getExtension("WEBKIT_EXT_texture_compression_bptc"), s3tc: this._gl.getExtension("WEBGL_compressed_texture_s3tc") || this._gl.getExtension("WEBKIT_WEBGL_compressed_texture_s3tc"), // eslint-disable-next-line @typescript-eslint/naming-convention s3tc_srgb: this._gl.getExtension("WEBGL_compressed_texture_s3tc_srgb") || this._gl.getExtension("WEBKIT_WEBGL_compressed_texture_s3tc_srgb"), pvrtc: this._gl.getExtension("WEBGL_compressed_texture_pvrtc") || this._gl.getExtension("WEBKIT_WEBGL_compressed_texture_pvrtc"), etc1: this._gl.getExtension("WEBGL_compressed_texture_etc1") || this._gl.getExtension("WEBKIT_WEBGL_compressed_texture_etc1"), etc2: this._gl.getExtension("WEBGL_compressed_texture_etc") || this._gl.getExtension("WEBKIT_WEBGL_compressed_texture_etc") || this._gl.getExtension("WEBGL_compressed_texture_es3_0"), textureAnisotropicFilterExtension: this._gl.getExtension("EXT_texture_filter_anisotropic") || this._gl.getExtension("WEBKIT_EXT_texture_filter_anisotropic") || this._gl.getExtension("MOZ_EXT_texture_filter_anisotropic"), uintIndices: this._webGLVersion > 1 || this._gl.getExtension("OES_element_index_uint") !== null, fragmentDepthSupported: this._webGLVersion > 1 || this._gl.getExtension("EXT_frag_depth") !== null, highPrecisionShaderSupported: !1, timerQuery: this._gl.getExtension("EXT_disjoint_timer_query_webgl2") || this._gl.getExtension("EXT_disjoint_timer_query"), supportOcclusionQuery: this._webGLVersion > 1, canUseTimestampForTimerQuery: !1, drawBuffersExtension: !1, maxMSAASamples: 1, colorBufferFloat: !!(this._webGLVersion > 1 && this._gl.getExtension("EXT_color_buffer_float")), supportFloatTexturesResolve: !1, colorBufferHalfFloat: !!(this._webGLVersion > 1 && this._gl.getExtension("EXT_color_buffer_half_float")), textureFloat: !!(this._webGLVersion > 1 || this._gl.getExtension("OES_texture_float")), textureHalfFloat: !!(this._webGLVersion > 1 || this._gl.getExtension("OES_texture_half_float")), textureHalfFloatRender: !1, textureFloatLinearFiltering: !1, textureFloatRender: !1, textureHalfFloatLinearFiltering: !1, vertexArrayObject: !1, instancedArrays: !1, textureLOD: !!(this._webGLVersion > 1 || this._gl.getExtension("EXT_shader_texture_lod")), texelFetch: this._webGLVersion !== 1, blendMinMax: !1, multiview: this._gl.getExtension("OVR_multiview2"), oculusMultiview: this._gl.getExtension("OCULUS_multiview"), depthTextureExtension: !1, canUseGLInstanceID: this._webGLVersion > 1, canUseGLVertexID: this._webGLVersion > 1, supportComputeShaders: !1, supportSRGBBuffers: !1, supportTransformFeedbacks: this._webGLVersion > 1, textureMaxLevel: this._webGLVersion > 1, texture2DArrayMaxLayerCount: this._webGLVersion > 1 ? this._gl.getParameter(this._gl.MAX_ARRAY_TEXTURE_LAYERS) : 128, disableMorphTargetTexture: !1 }, this._caps.supportFloatTexturesResolve = this._caps.colorBufferFloat, this._glVersion = this._gl.getParameter(this._gl.VERSION); const t = this._gl.getExtension("WEBGL_debug_renderer_info"); if (t != null && (this._glRenderer = this._gl.getParameter(t.UNMASKED_RENDERER_WEBGL), this._glVendor = this._gl.getParameter(t.UNMASKED_VENDOR_WEBGL)), this._glVendor || (this._glVendor = this._gl.getParameter(this._gl.VENDOR) || "Unknown vendor"), this._glRenderer || (this._glRenderer = this._gl.getParameter(this._gl.RENDERER) || "Unknown renderer"), this._gl.HALF_FLOAT_OES !== 36193 && (this._gl.HALF_FLOAT_OES = 36193), this._gl.RGBA16F !== 34842 && (this._gl.RGBA16F = 34842), this._gl.RGBA32F !== 34836 && (this._gl.RGBA32F = 34836), this._gl.DEPTH24_STENCIL8 !== 35056 && (this._gl.DEPTH24_STENCIL8 = 35056), this._caps.timerQuery && (this._webGLVersion === 1 && (this._gl.getQuery = this._caps.timerQuery.getQueryEXT.bind(this._caps.timerQuery)), this._caps.canUseTimestampForTimerQuery = ((e = this._gl.getQuery(this._caps.timerQuery.TIMESTAMP_EXT, this._caps.timerQuery.QUERY_COUNTER_BITS_EXT)) !== null && e !== void 0 ? e : 0) > 0), this._caps.maxAnisotropy = this._caps.textureAnisotropicFilterExtension ? this._gl.getParameter(this._caps.textureAnisotropicFilterExtension.MAX_TEXTURE_MAX_ANISOTROPY_EXT) : 0, this._caps.textureFloatLinearFiltering = !!(this._caps.textureFloat && this._gl.getExtension("OES_texture_float_linear")), this._caps.textureFloatRender = !!(this._caps.textureFloat && this._canRenderToFloatFramebuffer()), this._caps.textureHalfFloatLinearFiltering = !!(this._webGLVersion > 1 || this._caps.textureHalfFloat && this._gl.getExtension("OES_texture_half_float_linear")), this._caps.astc && (this._gl.COMPRESSED_SRGB8_ALPHA8_ASTC_4x4_KHR = this._caps.astc.COMPRESSED_SRGB8_ALPHA8_ASTC_4x4_KHR), this._caps.bptc && (this._gl.COMPRESSED_SRGB_ALPHA_BPTC_UNORM_EXT = this._caps.bptc.COMPRESSED_SRGB_ALPHA_BPTC_UNORM_EXT), this._caps.s3tc_srgb && (this._gl.COMPRESSED_SRGB_S3TC_DXT1_EXT = this._caps.s3tc_srgb.COMPRESSED_SRGB_S3TC_DXT1_EXT, this._gl.COMPRESSED_SRGB_ALPHA_S3TC_DXT1_EXT = this._caps.s3tc_srgb.COMPRESSED_SRGB_ALPHA_S3TC_DXT1_EXT, this._gl.COMPRESSED_SRGB_ALPHA_S3TC_DXT5_EXT = this._caps.s3tc_srgb.COMPRESSED_SRGB_ALPHA_S3TC_DXT5_EXT), this._caps.etc2 && (this._gl.COMPRESSED_SRGB8_ETC2 = this._caps.etc2.COMPRESSED_SRGB8_ETC2, this._gl.COMPRESSED_SRGB8_ALPHA8_ETC2_EAC = this._caps.etc2.COMPRESSED_SRGB8_ALPHA8_ETC2_EAC), this._webGLVersion > 1 && this._gl.HALF_FLOAT_OES !== 5131 && (this._gl.HALF_FLOAT_OES = 5131), this._caps.textureHalfFloatRender = this._caps.textureHalfFloat && this._canRenderToHalfFloatFramebuffer(), this._webGLVersion > 1) this._caps.drawBuffersExtension = !0, this._caps.maxMSAASamples = this._maxMSAASamplesOverride !== null ? this._maxMSAASamplesOverride : this._gl.getParameter(this._gl.MAX_SAMPLES); else { const i = this._gl.getExtension("WEBGL_draw_buffers"); if (i !== null) { this._caps.drawBuffersExtension = !0, this._gl.drawBuffers = i.drawBuffersWEBGL.bind(i), this._gl.DRAW_FRAMEBUFFER = this._gl.FRAMEBUFFER; for (let r = 0; r < 16; r++) this._gl["COLOR_ATTACHMENT" + r + "_WEBGL"] = i["COLOR_ATTACHMENT" + r + "_WEBGL"]; } } if (this._webGLVersion > 1) this._caps.depthTextureExtension = !0; else { const i = this._gl.getExtension("WEBGL_depth_texture"); i != null && (this._caps.depthTextureExtension = !0, this._gl.UNSIGNED_INT_24_8 = i.UNSIGNED_INT_24_8_WEBGL); } if (this.disableVertexArrayObjects) this._caps.vertexArrayObject = !1; else if (this._webGLVersion > 1) this._caps.vertexArrayObject = !0; else { const i = this._gl.getExtension("OES_vertex_array_object"); i != null && (this._caps.vertexArrayObject = !0, this._gl.createVertexArray = i.createVertexArrayOES.bind(i), this._gl.bindVertexArray = i.bindVertexArrayOES.bind(i), this._gl.deleteVertexArray = i.deleteVertexArrayOES.bind(i)); } if (this._webGLVersion > 1) this._caps.instancedArrays = !0; else { const i = this._gl.getExtension("ANGLE_instanced_arrays"); i != null ? (this._caps.instancedArrays = !0, this._gl.drawArraysInstanced = i.drawArraysInstancedANGLE.bind(i), this._gl.drawElementsInstanced = i.drawElementsInstancedANGLE.bind(i), this._gl.vertexAttribDivisor = i.vertexAttribDivisorANGLE.bind(i)) : this._caps.instancedArrays = !1; } if (this._gl.getShaderPrecisionFormat) { const i = this._gl.getShaderPrecisionFormat(this._gl.VERTEX_SHADER, this._gl.HIGH_FLOAT), r = this._gl.getShaderPrecisionFormat(this._gl.FRAGMENT_SHADER, this._gl.HIGH_FLOAT); i && r && (this._caps.highPrecisionShaderSupported = i.precision !== 0 && r.precision !== 0); } if (this._webGLVersion > 1) this._caps.blendMinMax = !0; else { const i = this._gl.getExtension("EXT_blend_minmax"); i != null && (this._caps.blendMinMax = !0, this._gl.MAX = i.MAX_EXT, this._gl.MIN = i.MIN_EXT); } if (!this._caps.supportSRGBBuffers) { if (this._webGLVersion > 1) this._caps.supportSRGBBuffers = !0, this._glSRGBExtensionValues = { SRGB: WebGL2RenderingContext.SRGB, SRGB8: WebGL2RenderingContext.SRGB8, SRGB8_ALPHA8: WebGL2RenderingContext.SRGB8_ALPHA8 }; else { const i = this._gl.getExtension("EXT_sRGB"); i != null && (this._caps.supportSRGBBuffers = !0, this._glSRGBExtensionValues = { SRGB: i.SRGB_EXT, SRGB8: i.SRGB_ALPHA_EXT, SRGB8_ALPHA8: i.SRGB_ALPHA_EXT }); } this._caps.supportSRGBBuffers = this._caps.supportSRGBBuffers && !!(this._creationOptions && this._creationOptions.forceSRGBBufferSupportState); } this._depthCullingState.depthTest = !0, this._depthCullingState.depthFunc = this._gl.LEQUAL, this._depthCullingState.depthMask = !0, this._maxSimultaneousTextures = this._caps.maxCombinedTexturesImageUnits; for (let i = 0; i < this._maxSimultaneousTextures; i++) this._nextFreeTextureSlots.push(i); this._glRenderer === "Mali-G72" && (this._caps.disableMorphTargetTexture = !0); } _initFeatures() { this._features = { forceBitmapOverHTMLImageElement: !1, supportRenderAndCopyToLodForFloatTextures: this._webGLVersion !== 1, supportDepthStencilTexture: this._webGLVersion !== 1, supportShadowSamplers: this._webGLVersion !== 1, uniformBufferHardCheckMatrix: !1, allowTexturePrefiltering: this._webGLVersion !== 1, trackUbosInFrame: !1, checkUbosContentBeforeUpload: !1, supportCSM: this._webGLVersion !== 1, basisNeedsPOT: this._webGLVersion === 1, support3DTextures: this._webGLVersion !== 1, needTypeSuffixInShaderConstants: this._webGLVersion !== 1, supportMSAA: this._webGLVersion !== 1, supportSSAO2: this._webGLVersion !== 1, supportExtendedTextureFormats: this._webGLVersion !== 1, supportSwitchCaseInShader: this._webGLVersion !== 1, supportSyncTextureRead: !0, needsInvertingBitmap: !0, useUBOBindingCache: !0, needShaderCodeInlining: !1, needToAlwaysBindUniformBuffers: !1, supportRenderPasses: !1, supportSpriteInstancing: !0, forceVertexBufferStrideMultiple4Bytes: !1, _collectUbosUpdatedInFrame: !1 }; } /** * Gets version of the current webGL context * Keep it for back compat - use version instead */ get webGLVersion() { return this._webGLVersion; } /** * Gets a string identifying the name of the class * @returns "Engine" string */ getClassName() { return "ThinEngine"; } /** * Returns true if the stencil buffer has been enabled through the creation option of the context. */ get isStencilEnable() { return this._isStencilEnable; } /** @internal */ _prepareWorkingCanvas() { if (this._workingCanvas) return; this._workingCanvas = this.createCanvas(1, 1); const e = this._workingCanvas.getContext("2d"); e && (this._workingContext = e); } /** * Reset the texture cache to empty state */ resetTextureCache() { for (const e in this._boundTexturesCache) Object.prototype.hasOwnProperty.call(this._boundTexturesCache, e) && (this._boundTexturesCache[e] = null); this._currentTextureChannel = -1; } /** * Gets an object containing information about the current engine context * @returns an object containing the vendor, the renderer and the version of the current engine context */ getInfo() { return this.getGlInfo(); } /** * Gets an object containing information about the current webGL context * @returns an object containing the vendor, the renderer and the version of the current webGL context */ getGlInfo() { return { vendor: this._glVendor, renderer: this._glRenderer, version: this._glVersion }; } /** * Defines the hardware scaling level. * By default the hardware scaling level is computed from the window device ratio. * if level = 1 then the engine will render at the exact resolution of the canvas. If level = 0.5 then the engine will render at twice the size of the canvas. * @param level defines the level to use */ setHardwareScalingLevel(e) { this._hardwareScalingLevel = e, this.resize(); } /** * Gets the current hardware scaling level. * By default the hardware scaling level is computed from the window device ratio. * if level = 1 then the engine will render at the exact resolution of the canvas. If level = 0.5 then the engine will render at twice the size of the canvas. * @returns a number indicating the current hardware scaling level */ getHardwareScalingLevel() { return this._hardwareScalingLevel; } /** * Gets the list of loaded textures * @returns an array containing all loaded textures */ getLoadedTexturesCache() { return this._internalTexturesCache; } /** * Gets the object containing all engine capabilities * @returns the EngineCapabilities object */ getCaps() { return this._caps; } /** * stop executing a render loop function and remove it from the execution array * @param renderFunction defines the function to be removed. If not provided all functions will be removed. */ stopRenderLoop(e) { if (!e) { this._activeRenderLoops.length = 0, this._cancelFrame(); return; } const t = this._activeRenderLoops.indexOf(e); t >= 0 && (this._activeRenderLoops.splice(t, 1), this._activeRenderLoops.length == 0 && this._cancelFrame()); } _cancelFrame() { if (this._renderingQueueLaunched && this._frameHandler) { if (this._renderingQueueLaunched = !1, cu()) { const { cancelAnimationFrame: e } = this.getHostWindow() || window; if (typeof e == "function") return e(this._frameHandler); } else if (typeof cancelAnimationFrame == "function") return cancelAnimationFrame(this._frameHandler); return clearTimeout(this._frameHandler); } } /** @internal */ _renderLoop() { if (!this._contextWasLost) { let e = !0; if ((this._isDisposed || !this.renderEvenInBackground && this._windowIsBackground) && (e = !1), e) { this.beginFrame(); for (let t = 0; t < this._activeRenderLoops.length; t++) { const i = this._activeRenderLoops[t]; i(); } this.endFrame(); } } this._activeRenderLoops.length > 0 ? this._frameHandler = this._queueNewFrame(this._boundRenderFunction, this.getHostWindow()) : this._renderingQueueLaunched = !1; } /** * Gets the HTML canvas attached with the current webGL context * @returns a HTML canvas */ getRenderingCanvas() { return this._renderingCanvas; } /** * Gets the audio context specified in engine initialization options * @returns an Audio Context */ getAudioContext() { return this._audioContext; } /** * Gets the audio destination specified in engine initialization options * @returns an audio destination node */ getAudioDestination() { return this._audioDestination; } /** * Gets host window * @returns the host window object */ getHostWindow() { return cu() ? this._renderingCanvas && this._renderingCanvas.ownerDocument && this._renderingCanvas.ownerDocument.defaultView ? this._renderingCanvas.ownerDocument.defaultView : window : null; } /** * Gets the current render width * @param useScreen defines if screen size must be used (or the current render target if any) * @returns a number defining the current render width */ getRenderWidth(e = !1) { return !e && this._currentRenderTarget ? this._currentRenderTarget.width : this._framebufferDimensionsObject ? this._framebufferDimensionsObject.framebufferWidth : this._gl.drawingBufferWidth; } /** * Gets the current render height * @param useScreen defines if screen size must be used (or the current render target if any) * @returns a number defining the current render height */ getRenderHeight(e = !1) { return !e && this._currentRenderTarget ? this._currentRenderTarget.height : this._framebufferDimensionsObject ? this._framebufferDimensionsObject.framebufferHeight : this._gl.drawingBufferHeight; } /** * Can be used to override the current requestAnimationFrame requester. * @internal */ _queueNewFrame(e, t) { return mi.QueueNewFrame(e, t); } /** * Register and execute a render loop. The engine can have more than one render function * @param renderFunction defines the function to continuously execute */ runRenderLoop(e) { this._activeRenderLoops.indexOf(e) === -1 && (this._activeRenderLoops.push(e), this._renderingQueueLaunched || (this._renderingQueueLaunched = !0, this._boundRenderFunction = () => this._renderLoop(), this._frameHandler = this._queueNewFrame(this._boundRenderFunction, this.getHostWindow()))); } /** * Clear the current render buffer or the current render target (if any is set up) * @param color defines the color to use * @param backBuffer defines if the back buffer must be cleared * @param depth defines if the depth buffer must be cleared * @param stencil defines if the stencil buffer must be cleared */ clear(e, t, i, r = !1) { var s, n; const a = this.stencilStateComposer.useStencilGlobalOnly; this.stencilStateComposer.useStencilGlobalOnly = !0, this.applyStates(), this.stencilStateComposer.useStencilGlobalOnly = a; let l = 0; if (t && e) { let o = !0; if (this._currentRenderTarget) { const u = (s = this._currentRenderTarget.texture) === null || s === void 0 ? void 0 : s.format; if (u === 8 || u === 9 || u === 10 || u === 11) { const h = (n = this._currentRenderTarget.texture) === null || n === void 0 ? void 0 : n.type; h === 7 || h === 5 ? (mi._TempClearColorUint32[0] = e.r * 255, mi._TempClearColorUint32[1] = e.g * 255, mi._TempClearColorUint32[2] = e.b * 255, mi._TempClearColorUint32[3] = e.a * 255, this._gl.clearBufferuiv(this._gl.COLOR, 0, mi._TempClearColorUint32), o = !1) : (mi._TempClearColorInt32[0] = e.r * 255, mi._TempClearColorInt32[1] = e.g * 255, mi._TempClearColorInt32[2] = e.b * 255, mi._TempClearColorInt32[3] = e.a * 255, this._gl.clearBufferiv(this._gl.COLOR, 0, mi._TempClearColorInt32), o = !1); } } o && (this._gl.clearColor(e.r, e.g, e.b, e.a !== void 0 ? e.a : 1), l |= this._gl.COLOR_BUFFER_BIT); } i && (this.useReverseDepthBuffer ? (this._depthCullingState.depthFunc = this._gl.GEQUAL, this._gl.clearDepth(0)) : this._gl.clearDepth(1), l |= this._gl.DEPTH_BUFFER_BIT), r && (this._gl.clearStencil(0), l |= this._gl.STENCIL_BUFFER_BIT), this._gl.clear(l); } /** * @internal */ _viewport(e, t, i, r) { (e !== this._viewportCached.x || t !== this._viewportCached.y || i !== this._viewportCached.z || r !== this._viewportCached.w) && (this._viewportCached.x = e, this._viewportCached.y = t, this._viewportCached.z = i, this._viewportCached.w = r, this._gl.viewport(e, t, i, r)); } /** * Set the WebGL's viewport * @param viewport defines the viewport element to be used * @param requiredWidth defines the width required for rendering. If not provided the rendering canvas' width is used * @param requiredHeight defines the height required for rendering. If not provided the rendering canvas' height is used */ setViewport(e, t, i) { const r = t || this.getRenderWidth(), s = i || this.getRenderHeight(), n = e.x || 0, a = e.y || 0; this._cachedViewport = e, this._viewport(n * r, a * s, r * e.width, s * e.height); } /** * Begin a new frame */ beginFrame() { } /** * Enf the current frame */ endFrame() { this._badOS && this.flushFramebuffer(), this._frameId++; } /** * Resize the view according to the canvas' size * @param forceSetSize true to force setting the sizes of the underlying canvas */ resize(e = !1) { let t, i; if (this.adaptToDeviceRatio) { const r = cu() && window.devicePixelRatio || 1, s = this._lastDevicePixelRatio / r; this._lastDevicePixelRatio = r, this._hardwareScalingLevel *= s; } if (cu() && qR()) if (this._renderingCanvas) { const r = this._renderingCanvas.getBoundingClientRect ? this._renderingCanvas.getBoundingClientRect() : { // fallback to last solution in case the function doesn't exist width: this._renderingCanvas.width * this._hardwareScalingLevel, height: this._renderingCanvas.height * this._hardwareScalingLevel }; t = this._renderingCanvas.clientWidth || r.width || this._renderingCanvas.width || 100, i = this._renderingCanvas.clientHeight || r.height || this._renderingCanvas.height || 100; } else t = window.innerWidth, i = window.innerHeight; else t = this._renderingCanvas ? this._renderingCanvas.width : 100, i = this._renderingCanvas ? this._renderingCanvas.height : 100; this.setSize(t / this._hardwareScalingLevel, i / this._hardwareScalingLevel, e); } /** * Force a specific size of the canvas * @param width defines the new canvas' width * @param height defines the new canvas' height * @param forceSetSize true to force setting the sizes of the underlying canvas * @returns true if the size was changed */ setSize(e, t, i = !1) { return !this._renderingCanvas || (e = e | 0, t = t | 0, !i && this._renderingCanvas.width === e && this._renderingCanvas.height === t) ? !1 : (this._renderingCanvas.width = e, this._renderingCanvas.height = t, !0); } /** * Binds the frame buffer to the specified texture. * @param rtWrapper The render target wrapper to render to * @param faceIndex The face of the texture to render to in case of cube texture and if the render target wrapper is not a multi render target * @param requiredWidth The width of the target to render to * @param requiredHeight The height of the target to render to * @param forceFullscreenViewport Forces the viewport to be the entire texture/screen if true * @param lodLevel Defines the lod level to bind to the frame buffer * @param layer Defines the 2d array index to bind to the frame buffer if the render target wrapper is not a multi render target */ bindFramebuffer(e, t = 0, i, r, s, n = 0, a = 0) { var l, o, u, h, d, f; const p = e; this._currentRenderTarget && this.unBindFramebuffer(this._currentRenderTarget), this._currentRenderTarget = e, this._bindUnboundFramebuffer(p._MSAAFramebuffer ? p._MSAAFramebuffer : p._framebuffer); const m = this._gl; e.isMulti || (e.is2DArray ? m.framebufferTextureLayer(m.FRAMEBUFFER, m.COLOR_ATTACHMENT0, (l = e.texture._hardwareTexture) === null || l === void 0 ? void 0 : l.underlyingResource, n, a) : e.isCube ? m.framebufferTexture2D(m.FRAMEBUFFER, m.COLOR_ATTACHMENT0, m.TEXTURE_CUBE_MAP_POSITIVE_X + t, (o = e.texture._hardwareTexture) === null || o === void 0 ? void 0 : o.underlyingResource, n) : p._currentLOD !== n && (m.framebufferTexture2D(m.FRAMEBUFFER, m.COLOR_ATTACHMENT0, m.TEXTURE_2D, (u = e.texture._hardwareTexture) === null || u === void 0 ? void 0 : u.underlyingResource, n), p._currentLOD = n)); const _ = e._depthStencilTexture; if (_) { const v = e._depthStencilTextureWithStencil ? m.DEPTH_STENCIL_ATTACHMENT : m.DEPTH_ATTACHMENT; e.is2DArray ? m.framebufferTextureLayer(m.FRAMEBUFFER, v, (h = _._hardwareTexture) === null || h === void 0 ? void 0 : h.underlyingResource, n, a) : e.isCube ? m.framebufferTexture2D(m.FRAMEBUFFER, v, m.TEXTURE_CUBE_MAP_POSITIVE_X + t, (d = _._hardwareTexture) === null || d === void 0 ? void 0 : d.underlyingResource, n) : m.framebufferTexture2D(m.FRAMEBUFFER, v, m.TEXTURE_2D, (f = _._hardwareTexture) === null || f === void 0 ? void 0 : f.underlyingResource, n); } this._cachedViewport && !s ? this.setViewport(this._cachedViewport, i, r) : (i || (i = e.width, n && (i = i / Math.pow(2, n))), r || (r = e.height, n && (r = r / Math.pow(2, n))), this._viewport(0, 0, i, r)), this.wipeCaches(); } /** * Set various states to the webGL context * @param culling defines culling state: true to enable culling, false to disable it * @param zOffset defines the value to apply to zOffset (0 by default) * @param force defines if states must be applied even if cache is up to date * @param reverseSide defines if culling must be reversed (CCW if false, CW if true) * @param cullBackFaces true to cull back faces, false to cull front faces (if culling is enabled) * @param stencil stencil states to set * @param zOffsetUnits defines the value to apply to zOffsetUnits (0 by default) */ setState(e, t = 0, i, r = !1, s, n, a = 0) { var l, o; (this._depthCullingState.cull !== e || i) && (this._depthCullingState.cull = e); const u = !((o = (l = this.cullBackFaces) !== null && l !== void 0 ? l : s) !== null && o !== void 0) || o ? this._gl.BACK : this._gl.FRONT; (this._depthCullingState.cullFace !== u || i) && (this._depthCullingState.cullFace = u), this.setZOffset(t), this.setZOffsetUnits(a); const h = r ? this._gl.CW : this._gl.CCW; (this._depthCullingState.frontFace !== h || i) && (this._depthCullingState.frontFace = h), this._stencilStateComposer.stencilMaterial = n; } /** * Gets a boolean indicating if depth testing is enabled * @returns the current state */ getDepthBuffer() { return this._depthCullingState.depthTest; } /** * Enable or disable depth buffering * @param enable defines the state to set */ setDepthBuffer(e) { this._depthCullingState.depthTest = e; } /** * Set the z offset Factor to apply to current rendering * @param value defines the offset to apply */ setZOffset(e) { this._depthCullingState.zOffset = this.useReverseDepthBuffer ? -e : e; } /** * Gets the current value of the zOffset Factor * @returns the current zOffset Factor state */ getZOffset() { const e = this._depthCullingState.zOffset; return this.useReverseDepthBuffer ? -e : e; } /** * Set the z offset Units to apply to current rendering * @param value defines the offset to apply */ setZOffsetUnits(e) { this._depthCullingState.zOffsetUnits = this.useReverseDepthBuffer ? -e : e; } /** * Gets the current value of the zOffset Units * @returns the current zOffset Units state */ getZOffsetUnits() { const e = this._depthCullingState.zOffsetUnits; return this.useReverseDepthBuffer ? -e : e; } /** * @internal */ _bindUnboundFramebuffer(e) { this._currentFramebuffer !== e && (this._gl.bindFramebuffer(this._gl.FRAMEBUFFER, e), this._currentFramebuffer = e); } /** @internal */ _currentFrameBufferIsDefaultFrameBuffer() { return this._currentFramebuffer === null; } /** * Generates the mipmaps for a texture * @param texture texture to generate the mipmaps for */ generateMipmaps(e) { this._bindTextureDirectly(this._gl.TEXTURE_2D, e, !0), this._gl.generateMipmap(this._gl.TEXTURE_2D), this._bindTextureDirectly(this._gl.TEXTURE_2D, null); } /** * Unbind the current render target texture from the webGL context * @param texture defines the render target wrapper to unbind * @param disableGenerateMipMaps defines a boolean indicating that mipmaps must not be generated * @param onBeforeUnbind defines a function which will be called before the effective unbind */ unBindFramebuffer(e, t = !1, i) { var r; const s = e; this._currentRenderTarget = null; const n = this._gl; if (s._MSAAFramebuffer) { if (e.isMulti) { this.unBindMultiColorAttachmentFramebuffer(e, t, i); return; } n.bindFramebuffer(n.READ_FRAMEBUFFER, s._MSAAFramebuffer), n.bindFramebuffer(n.DRAW_FRAMEBUFFER, s._framebuffer), n.blitFramebuffer(0, 0, e.width, e.height, 0, 0, e.width, e.height, n.COLOR_BUFFER_BIT, n.NEAREST); } !((r = e.texture) === null || r === void 0) && r.generateMipMaps && !t && !e.isCube && this.generateMipmaps(e.texture), i && (s._MSAAFramebuffer && this._bindUnboundFramebuffer(s._framebuffer), i()), this._bindUnboundFramebuffer(null); } /** * Force a webGL flush (ie. a flush of all waiting webGL commands) */ flushFramebuffer() { this._gl.flush(); } /** * Unbind the current render target and bind the default framebuffer */ restoreDefaultFramebuffer() { this._currentRenderTarget ? this.unBindFramebuffer(this._currentRenderTarget) : this._bindUnboundFramebuffer(null), this._cachedViewport && this.setViewport(this._cachedViewport), this.wipeCaches(); } // VBOs /** @internal */ _resetVertexBufferBinding() { this.bindArrayBuffer(null), this._cachedVertexBuffers = null; } /** * Creates a vertex buffer * @param data the data for the vertex buffer * @param _updatable whether the buffer should be created as updatable * @param _label defines the label of the buffer (for debug purpose) * @returns the new WebGL static buffer */ createVertexBuffer(e, t, i) { return this._createVertexBuffer(e, this._gl.STATIC_DRAW); } _createVertexBuffer(e, t) { const i = this._gl.createBuffer(); if (!i) throw new Error("Unable to create vertex buffer"); const r = new FO(i); return this.bindArrayBuffer(r), e instanceof Array ? this._gl.bufferData(this._gl.ARRAY_BUFFER, new Float32Array(e), t) : this._gl.bufferData(this._gl.ARRAY_BUFFER, e, t), this._resetVertexBufferBinding(), r.references = 1, r; } /** * Creates a dynamic vertex buffer * @param data the data for the dynamic vertex buffer * @param _label defines the label of the buffer (for debug purpose) * @returns the new WebGL dynamic buffer */ createDynamicVertexBuffer(e, t) { return this._createVertexBuffer(e, this._gl.DYNAMIC_DRAW); } _resetIndexBufferBinding() { this.bindIndexBuffer(null), this._cachedIndexBuffer = null; } /** * Creates a new index buffer * @param indices defines the content of the index buffer * @param updatable defines if the index buffer must be updatable * @param _label defines the label of the buffer (for debug purpose) * @returns a new webGL buffer */ createIndexBuffer(e, t, i) { const r = this._gl.createBuffer(), s = new FO(r); if (!r) throw new Error("Unable to create index buffer"); this.bindIndexBuffer(s); const n = this._normalizeIndexData(e); return this._gl.bufferData(this._gl.ELEMENT_ARRAY_BUFFER, n, t ? this._gl.DYNAMIC_DRAW : this._gl.STATIC_DRAW), this._resetIndexBufferBinding(), s.references = 1, s.is32Bits = n.BYTES_PER_ELEMENT === 4, s; } _normalizeIndexData(e) { if (e.BYTES_PER_ELEMENT === 2) return e; if (this._caps.uintIndices) { if (e instanceof Uint32Array) return e; for (let i = 0; i < e.length; i++) if (e[i] >= 65535) return new Uint32Array(e); return new Uint16Array(e); } return new Uint16Array(e); } /** * Bind a webGL buffer to the webGL context * @param buffer defines the buffer to bind */ bindArrayBuffer(e) { this._vaoRecordInProgress || this._unbindVertexArrayObject(), this._bindBuffer(e, this._gl.ARRAY_BUFFER); } /** * Bind a specific block at a given index in a specific shader program * @param pipelineContext defines the pipeline context to use * @param blockName defines the block name * @param index defines the index where to bind the block */ bindUniformBlock(e, t, i) { const r = e.program, s = this._gl.getUniformBlockIndex(r, t); this._gl.uniformBlockBinding(r, s, i); } // eslint-disable-next-line @typescript-eslint/naming-convention bindIndexBuffer(e) { this._vaoRecordInProgress || this._unbindVertexArrayObject(), this._bindBuffer(e, this._gl.ELEMENT_ARRAY_BUFFER); } _bindBuffer(e, t) { (this._vaoRecordInProgress || this._currentBoundBuffer[t] !== e) && (this._gl.bindBuffer(t, e ? e.underlyingResource : null), this._currentBoundBuffer[t] = e); } /** * update the bound buffer with the given data * @param data defines the data to update */ updateArrayBuffer(e) { this._gl.bufferSubData(this._gl.ARRAY_BUFFER, 0, e); } _vertexAttribPointer(e, t, i, r, s, n, a) { const l = this._currentBufferPointers[t]; if (!l) return; let o = !1; l.active ? (l.buffer !== e && (l.buffer = e, o = !0), l.size !== i && (l.size = i, o = !0), l.type !== r && (l.type = r, o = !0), l.normalized !== s && (l.normalized = s, o = !0), l.stride !== n && (l.stride = n, o = !0), l.offset !== a && (l.offset = a, o = !0)) : (o = !0, l.active = !0, l.index = t, l.size = i, l.type = r, l.normalized = s, l.stride = n, l.offset = a, l.buffer = e), (o || this._vaoRecordInProgress) && (this.bindArrayBuffer(e), r === this._gl.UNSIGNED_INT || r === this._gl.INT ? this._gl.vertexAttribIPointer(t, i, r, n, a) : this._gl.vertexAttribPointer(t, i, r, s, n, a)); } /** * @internal */ _bindIndexBufferWithCache(e) { e != null && this._cachedIndexBuffer !== e && (this._cachedIndexBuffer = e, this.bindIndexBuffer(e), this._uintIndicesCurrentlySet = e.is32Bits); } _bindVertexBuffersAttributes(e, t, i) { const r = t.getAttributesNames(); this._vaoRecordInProgress || this._unbindVertexArrayObject(), this.unbindAllAttributes(); for (let s = 0; s < r.length; s++) { const n = t.getAttributeLocation(s); if (n >= 0) { const a = r[s]; let l = null; if (i && (l = i[a]), l || (l = e[a]), !l) continue; this._gl.enableVertexAttribArray(n), this._vaoRecordInProgress || (this._vertexAttribArraysEnabled[n] = !0); const o = l.getBuffer(); o && (this._vertexAttribPointer(o, n, l.getSize(), l.type, l.normalized, l.byteStride, l.byteOffset), l.getIsInstanced() && (this._gl.vertexAttribDivisor(n, l.getInstanceDivisor()), this._vaoRecordInProgress || (this._currentInstanceLocations.push(n), this._currentInstanceBuffers.push(o)))); } } } /** * Records a vertex array object * @see https://doc.babylonjs.com/setup/support/webGL2#vertex-array-objects * @param vertexBuffers defines the list of vertex buffers to store * @param indexBuffer defines the index buffer to store * @param effect defines the effect to store * @param overrideVertexBuffers defines optional list of avertex buffers that overrides the entries in vertexBuffers * @returns the new vertex array object */ recordVertexArrayObject(e, t, i, r) { const s = this._gl.createVertexArray(); if (!s) throw new Error("Unable to create VAO"); return this._vaoRecordInProgress = !0, this._gl.bindVertexArray(s), this._mustWipeVertexAttributes = !0, this._bindVertexBuffersAttributes(e, i, r), this.bindIndexBuffer(t), this._vaoRecordInProgress = !1, this._gl.bindVertexArray(null), s; } /** * Bind a specific vertex array object * @see https://doc.babylonjs.com/setup/support/webGL2#vertex-array-objects * @param vertexArrayObject defines the vertex array object to bind * @param indexBuffer defines the index buffer to bind */ bindVertexArrayObject(e, t) { this._cachedVertexArrayObject !== e && (this._cachedVertexArrayObject = e, this._gl.bindVertexArray(e), this._cachedVertexBuffers = null, this._cachedIndexBuffer = null, this._uintIndicesCurrentlySet = t != null && t.is32Bits, this._mustWipeVertexAttributes = !0); } /** * Bind webGl buffers directly to the webGL context * @param vertexBuffer defines the vertex buffer to bind * @param indexBuffer defines the index buffer to bind * @param vertexDeclaration defines the vertex declaration to use with the vertex buffer * @param vertexStrideSize defines the vertex stride of the vertex buffer * @param effect defines the effect associated with the vertex buffer */ bindBuffersDirectly(e, t, i, r, s) { if (this._cachedVertexBuffers !== e || this._cachedEffectForVertexBuffers !== s) { this._cachedVertexBuffers = e, this._cachedEffectForVertexBuffers = s; const n = s.getAttributesCount(); this._unbindVertexArrayObject(), this.unbindAllAttributes(); let a = 0; for (let l = 0; l < n; l++) if (l < i.length) { const o = s.getAttributeLocation(l); o >= 0 && (this._gl.enableVertexAttribArray(o), this._vertexAttribArraysEnabled[o] = !0, this._vertexAttribPointer(e, o, i[l], this._gl.FLOAT, !1, r, a)), a += i[l] * 4; } } this._bindIndexBufferWithCache(t); } _unbindVertexArrayObject() { this._cachedVertexArrayObject && (this._cachedVertexArrayObject = null, this._gl.bindVertexArray(null)); } /** * Bind a list of vertex buffers to the webGL context * @param vertexBuffers defines the list of vertex buffers to bind * @param indexBuffer defines the index buffer to bind * @param effect defines the effect associated with the vertex buffers * @param overrideVertexBuffers defines optional list of avertex buffers that overrides the entries in vertexBuffers */ bindBuffers(e, t, i, r) { (this._cachedVertexBuffers !== e || this._cachedEffectForVertexBuffers !== i) && (this._cachedVertexBuffers = e, this._cachedEffectForVertexBuffers = i, this._bindVertexBuffersAttributes(e, i, r)), this._bindIndexBufferWithCache(t); } /** * Unbind all instance attributes */ unbindInstanceAttributes() { let e; for (let t = 0, i = this._currentInstanceLocations.length; t < i; t++) { const r = this._currentInstanceBuffers[t]; e != r && r.references && (e = r, this.bindArrayBuffer(r)); const s = this._currentInstanceLocations[t]; this._gl.vertexAttribDivisor(s, 0); } this._currentInstanceBuffers.length = 0, this._currentInstanceLocations.length = 0; } /** * Release and free the memory of a vertex array object * @param vao defines the vertex array object to delete */ releaseVertexArrayObject(e) { this._gl.deleteVertexArray(e); } /** * @internal */ _releaseBuffer(e) { return e.references--, e.references === 0 ? (this._deleteBuffer(e), !0) : !1; } _deleteBuffer(e) { this._gl.deleteBuffer(e.underlyingResource); } /** * Update the content of a webGL buffer used with instantiation and bind it to the webGL context * @param instancesBuffer defines the webGL buffer to update and bind * @param data defines the data to store in the buffer * @param offsetLocations defines the offsets or attributes information used to determine where data must be stored in the buffer */ updateAndBindInstancesBuffer(e, t, i) { if (this.bindArrayBuffer(e), t && this._gl.bufferSubData(this._gl.ARRAY_BUFFER, 0, t), i[0].index !== void 0) this.bindInstancesBuffer(e, i, !0); else for (let r = 0; r < 4; r++) { const s = i[r]; this._vertexAttribArraysEnabled[s] || (this._gl.enableVertexAttribArray(s), this._vertexAttribArraysEnabled[s] = !0), this._vertexAttribPointer(e, s, 4, this._gl.FLOAT, !1, 64, r * 16), this._gl.vertexAttribDivisor(s, 1), this._currentInstanceLocations.push(s), this._currentInstanceBuffers.push(e); } } /** * Bind the content of a webGL buffer used with instantiation * @param instancesBuffer defines the webGL buffer to bind * @param attributesInfo defines the offsets or attributes information used to determine where data must be stored in the buffer * @param computeStride defines Whether to compute the strides from the info or use the default 0 */ bindInstancesBuffer(e, t, i = !0) { this.bindArrayBuffer(e); let r = 0; if (i) for (let s = 0; s < t.length; s++) { const n = t[s]; r += n.attributeSize * 4; } for (let s = 0; s < t.length; s++) { const n = t[s]; n.index === void 0 && (n.index = this._currentEffect.getAttributeLocationByName(n.attributeName)), !(n.index < 0) && (this._vertexAttribArraysEnabled[n.index] || (this._gl.enableVertexAttribArray(n.index), this._vertexAttribArraysEnabled[n.index] = !0), this._vertexAttribPointer(e, n.index, n.attributeSize, n.attributeType || this._gl.FLOAT, n.normalized || !1, r, n.offset), this._gl.vertexAttribDivisor(n.index, n.divisor === void 0 ? 1 : n.divisor), this._currentInstanceLocations.push(n.index), this._currentInstanceBuffers.push(e)); } } /** * Disable the instance attribute corresponding to the name in parameter * @param name defines the name of the attribute to disable */ disableInstanceAttributeByName(e) { if (!this._currentEffect) return; const t = this._currentEffect.getAttributeLocationByName(e); this.disableInstanceAttribute(t); } /** * Disable the instance attribute corresponding to the location in parameter * @param attributeLocation defines the attribute location of the attribute to disable */ disableInstanceAttribute(e) { let t = !1, i; for (; (i = this._currentInstanceLocations.indexOf(e)) !== -1; ) this._currentInstanceLocations.splice(i, 1), this._currentInstanceBuffers.splice(i, 1), t = !0, i = this._currentInstanceLocations.indexOf(e); t && (this._gl.vertexAttribDivisor(e, 0), this.disableAttributeByIndex(e)); } /** * Disable the attribute corresponding to the location in parameter * @param attributeLocation defines the attribute location of the attribute to disable */ disableAttributeByIndex(e) { this._gl.disableVertexAttribArray(e), this._vertexAttribArraysEnabled[e] = !1, this._currentBufferPointers[e].active = !1; } /** * Send a draw order * @param useTriangles defines if triangles must be used to draw (else wireframe will be used) * @param indexStart defines the starting index * @param indexCount defines the number of index to draw * @param instancesCount defines the number of instances to draw (if instantiation is enabled) */ draw(e, t, i, r) { this.drawElementsType(e ? 0 : 1, t, i, r); } /** * Draw a list of points * @param verticesStart defines the index of first vertex to draw * @param verticesCount defines the count of vertices to draw * @param instancesCount defines the number of instances to draw (if instantiation is enabled) */ drawPointClouds(e, t, i) { this.drawArraysType(2, e, t, i); } /** * Draw a list of unindexed primitives * @param useTriangles defines if triangles must be used to draw (else wireframe will be used) * @param verticesStart defines the index of first vertex to draw * @param verticesCount defines the count of vertices to draw * @param instancesCount defines the number of instances to draw (if instantiation is enabled) */ drawUnIndexed(e, t, i, r) { this.drawArraysType(e ? 0 : 1, t, i, r); } /** * Draw a list of indexed primitives * @param fillMode defines the primitive to use * @param indexStart defines the starting index * @param indexCount defines the number of index to draw * @param instancesCount defines the number of instances to draw (if instantiation is enabled) */ drawElementsType(e, t, i, r) { this.applyStates(), this._reportDrawCall(); const s = this._drawMode(e), n = this._uintIndicesCurrentlySet ? this._gl.UNSIGNED_INT : this._gl.UNSIGNED_SHORT, a = this._uintIndicesCurrentlySet ? 4 : 2; r ? this._gl.drawElementsInstanced(s, i, n, t * a, r) : this._gl.drawElements(s, i, n, t * a); } /** * Draw a list of unindexed primitives * @param fillMode defines the primitive to use * @param verticesStart defines the index of first vertex to draw * @param verticesCount defines the count of vertices to draw * @param instancesCount defines the number of instances to draw (if instantiation is enabled) */ drawArraysType(e, t, i, r) { this.applyStates(), this._reportDrawCall(); const s = this._drawMode(e); r ? this._gl.drawArraysInstanced(s, t, i, r) : this._gl.drawArrays(s, t, i); } _drawMode(e) { switch (e) { case 0: return this._gl.TRIANGLES; case 2: return this._gl.POINTS; case 1: return this._gl.LINES; case 3: return this._gl.POINTS; case 4: return this._gl.LINES; case 5: return this._gl.LINE_LOOP; case 6: return this._gl.LINE_STRIP; case 7: return this._gl.TRIANGLE_STRIP; case 8: return this._gl.TRIANGLE_FAN; default: return this._gl.TRIANGLES; } } /** @internal */ _reportDrawCall() { } // Shaders /** * @internal */ _releaseEffect(e) { this._compiledEffects[e._key] && delete this._compiledEffects[e._key]; const t = e.getPipelineContext(); t && this._deletePipelineContext(t); } /** * @internal */ _deletePipelineContext(e) { const t = e; t && t.program && (t.program.__SPECTOR_rebuildProgram = null, this._gl.deleteProgram(t.program)); } /** @internal */ _getGlobalDefines(e) { if (e) { this.isNDCHalfZRange ? e.IS_NDC_HALF_ZRANGE = "" : delete e.IS_NDC_HALF_ZRANGE, this.useReverseDepthBuffer ? e.USE_REVERSE_DEPTHBUFFER = "" : delete e.USE_REVERSE_DEPTHBUFFER, this.useExactSrgbConversions ? e.USE_EXACT_SRGB_CONVERSIONS = "" : delete e.USE_EXACT_SRGB_CONVERSIONS; return; } else { let t = ""; return this.isNDCHalfZRange && (t += "#define IS_NDC_HALF_ZRANGE"), this.useReverseDepthBuffer && (t && (t += ` `), t += "#define USE_REVERSE_DEPTHBUFFER"), this.useExactSrgbConversions && (t && (t += ` `), t += "#define USE_EXACT_SRGB_CONVERSIONS"), t; } } /** * Create a new effect (used to store vertex/fragment shaders) * @param baseName defines the base name of the effect (The name of file without .fragment.fx or .vertex.fx) * @param attributesNamesOrOptions defines either a list of attribute names or an IEffectCreationOptions object * @param uniformsNamesOrEngine defines either a list of uniform names or the engine to use * @param samplers defines an array of string used to represent textures * @param defines defines the string containing the defines to use to compile the shaders * @param fallbacks defines the list of potential fallbacks to use if shader compilation fails * @param onCompiled defines a function to call when the effect creation is successful * @param onError defines a function to call when the effect creation has failed * @param indexParameters defines an object containing the index values to use to compile shaders (like the maximum number of simultaneous lights) * @param shaderLanguage the language the shader is written in (default: GLSL) * @returns the new Effect */ createEffect(e, t, i, r, s, n, a, l, o, u = Xa.GLSL) { var h; const d = e.vertexElement || e.vertex || e.vertexToken || e.vertexSource || e, f = e.fragmentElement || e.fragment || e.fragmentToken || e.fragmentSource || e, p = this._getGlobalDefines(); let m = (h = s ?? t.defines) !== null && h !== void 0 ? h : ""; p && (m += p); const _ = d + "+" + f + "@" + m; if (this._compiledEffects[_]) { const C = this._compiledEffects[_]; return a && C.isReady() && a(C), C; } const v = new Cr(e, t, i, r, this, s, n, a, l, o, _, u); return this._compiledEffects[_] = v, v; } // eslint-disable-next-line @typescript-eslint/naming-convention static _ConcatenateShader(e, t, i = "") { return i + (t ? t + ` ` : "") + e; } _compileShader(e, t, i, r) { return this._compileRawShader(mi._ConcatenateShader(e, i, r), t); } _compileRawShader(e, t) { const i = this._gl, r = i.createShader(t === "vertex" ? i.VERTEX_SHADER : i.FRAGMENT_SHADER); if (!r) { let s = i.NO_ERROR, n = i.NO_ERROR; for (; (n = i.getError()) !== i.NO_ERROR; ) s = n; throw new Error(`Something went wrong while creating a gl ${t} shader object. gl error=${s}, gl isContextLost=${i.isContextLost()}, _contextWasLost=${this._contextWasLost}`); } return i.shaderSource(r, e), i.compileShader(r), r; } /** * @internal */ _getShaderSource(e) { return this._gl.getShaderSource(e); } /** * Directly creates a webGL program * @param pipelineContext defines the pipeline context to attach to * @param vertexCode defines the vertex shader code to use * @param fragmentCode defines the fragment shader code to use * @param context defines the webGL context to use (if not set, the current one will be used) * @param transformFeedbackVaryings defines the list of transform feedback varyings to use * @returns the new webGL program */ createRawShaderProgram(e, t, i, r, s = null) { r = r || this._gl; const n = this._compileRawShader(t, "vertex"), a = this._compileRawShader(i, "fragment"); return this._createShaderProgram(e, n, a, r, s); } /** * Creates a webGL program * @param pipelineContext defines the pipeline context to attach to * @param vertexCode defines the vertex shader code to use * @param fragmentCode defines the fragment shader code to use * @param defines defines the string containing the defines to use to compile the shaders * @param context defines the webGL context to use (if not set, the current one will be used) * @param transformFeedbackVaryings defines the list of transform feedback varyings to use * @returns the new webGL program */ createShaderProgram(e, t, i, r, s, n = null) { s = s || this._gl; const a = this._webGLVersion > 1 ? `#version 300 es #define WEBGL2 ` : "", l = this._compileShader(t, "vertex", r, a), o = this._compileShader(i, "fragment", r, a); return this._createShaderProgram(e, l, o, s, n); } /** * Inline functions in shader code that are marked to be inlined * @param code code to inline * @returns inlined code */ inlineShaderCode(e) { return e; } /** * Creates a new pipeline context * @param shaderProcessingContext defines the shader processing context used during the processing if available * @returns the new pipeline */ createPipelineContext(e) { const t = new lte(); return t.engine = this, this._caps.parallelShaderCompile && (t.isParallelCompiled = !0), t; } /** * Creates a new material context * @returns the new context */ createMaterialContext() { } /** * Creates a new draw context * @returns the new context */ createDrawContext() { } _createShaderProgram(e, t, i, r, s = null) { const n = r.createProgram(); if (e.program = n, !n) throw new Error("Unable to create program"); return r.attachShader(n, t), r.attachShader(n, i), r.linkProgram(n), e.context = r, e.vertexShader = t, e.fragmentShader = i, e.isParallelCompiled || this._finalizePipelineContext(e), n; } _finalizePipelineContext(e) { const t = e.context, i = e.vertexShader, r = e.fragmentShader, s = e.program; if (!t.getProgramParameter(s, t.LINK_STATUS)) { if (!this._gl.getShaderParameter(i, this._gl.COMPILE_STATUS)) { const l = this._gl.getShaderInfoLog(i); if (l) throw e.vertexCompilationError = l, new Error("VERTEX SHADER " + l); } if (!this._gl.getShaderParameter(r, this._gl.COMPILE_STATUS)) { const l = this._gl.getShaderInfoLog(r); if (l) throw e.fragmentCompilationError = l, new Error("FRAGMENT SHADER " + l); } const a = t.getProgramInfoLog(s); if (a) throw e.programLinkError = a, new Error(a); } if (this.validateShaderPrograms && (t.validateProgram(s), !t.getProgramParameter(s, t.VALIDATE_STATUS))) { const l = t.getProgramInfoLog(s); if (l) throw e.programValidationError = l, new Error(l); } t.deleteShader(i), t.deleteShader(r), e.vertexShader = void 0, e.fragmentShader = void 0, e.onCompiled && (e.onCompiled(), e.onCompiled = void 0); } /** * @internal */ _preparePipelineContext(e, t, i, r, s, n, a, l, o, u) { const h = e; r ? h.program = this.createRawShaderProgram(h, t, i, void 0, o) : h.program = this.createShaderProgram(h, t, i, l, void 0, o), h.program.__SPECTOR_rebuildProgram = a; } /** * @internal */ _isRenderingStateCompiled(e) { const t = e; return this._isDisposed || t._isDisposed ? !1 : this._gl.getProgramParameter(t.program, this._caps.parallelShaderCompile.COMPLETION_STATUS_KHR) ? (this._finalizePipelineContext(t), !0) : !1; } /** * @internal */ _executeWhenRenderingStateIsCompiled(e, t) { const i = e; if (!i.isParallelCompiled) { t(); return; } const r = i.onCompiled; r ? i.onCompiled = () => { r(), t(); } : i.onCompiled = t; } /** * Gets the list of webGL uniform locations associated with a specific program based on a list of uniform names * @param pipelineContext defines the pipeline context to use * @param uniformsNames defines the list of uniform names * @returns an array of webGL uniform locations */ getUniforms(e, t) { const i = new Array(), r = e; for (let s = 0; s < t.length; s++) i.push(this._gl.getUniformLocation(r.program, t[s])); return i; } /** * Gets the list of active attributes for a given webGL program * @param pipelineContext defines the pipeline context to use * @param attributesNames defines the list of attribute names to get * @returns an array of indices indicating the offset of each attribute */ getAttributes(e, t) { const i = [], r = e; for (let s = 0; s < t.length; s++) try { i.push(this._gl.getAttribLocation(r.program, t[s])); } catch { i.push(-1); } return i; } /** * Activates an effect, making it the current one (ie. the one used for rendering) * @param effect defines the effect to activate */ enableEffect(e) { e = e !== null && $o.IsWrapper(e) ? e.effect : e, !(!e || e === this._currentEffect) && (this._stencilStateComposer.stencilMaterial = void 0, e = e, this.bindSamplers(e), this._currentEffect = e, e.onBind && e.onBind(e), e._onBindObservable && e._onBindObservable.notifyObservers(e)); } /** * Set the value of an uniform to a number (int) * @param uniform defines the webGL uniform location where to store the value * @param value defines the int number to store * @returns true if the value was set */ setInt(e, t) { return e ? (this._gl.uniform1i(e, t), !0) : !1; } /** * Set the value of an uniform to a int2 * @param uniform defines the webGL uniform location where to store the value * @param x defines the 1st component of the value * @param y defines the 2nd component of the value * @returns true if the value was set */ setInt2(e, t, i) { return e ? (this._gl.uniform2i(e, t, i), !0) : !1; } /** * Set the value of an uniform to a int3 * @param uniform defines the webGL uniform location where to store the value * @param x defines the 1st component of the value * @param y defines the 2nd component of the value * @param z defines the 3rd component of the value * @returns true if the value was set */ setInt3(e, t, i, r) { return e ? (this._gl.uniform3i(e, t, i, r), !0) : !1; } /** * Set the value of an uniform to a int4 * @param uniform defines the webGL uniform location where to store the value * @param x defines the 1st component of the value * @param y defines the 2nd component of the value * @param z defines the 3rd component of the value * @param w defines the 4th component of the value * @returns true if the value was set */ setInt4(e, t, i, r, s) { return e ? (this._gl.uniform4i(e, t, i, r, s), !0) : !1; } /** * Set the value of an uniform to an array of int32 * @param uniform defines the webGL uniform location where to store the value * @param array defines the array of int32 to store * @returns true if the value was set */ setIntArray(e, t) { return e ? (this._gl.uniform1iv(e, t), !0) : !1; } /** * Set the value of an uniform to an array of int32 (stored as vec2) * @param uniform defines the webGL uniform location where to store the value * @param array defines the array of int32 to store * @returns true if the value was set */ setIntArray2(e, t) { return !e || t.length % 2 !== 0 ? !1 : (this._gl.uniform2iv(e, t), !0); } /** * Set the value of an uniform to an array of int32 (stored as vec3) * @param uniform defines the webGL uniform location where to store the value * @param array defines the array of int32 to store * @returns true if the value was set */ setIntArray3(e, t) { return !e || t.length % 3 !== 0 ? !1 : (this._gl.uniform3iv(e, t), !0); } /** * Set the value of an uniform to an array of int32 (stored as vec4) * @param uniform defines the webGL uniform location where to store the value * @param array defines the array of int32 to store * @returns true if the value was set */ setIntArray4(e, t) { return !e || t.length % 4 !== 0 ? !1 : (this._gl.uniform4iv(e, t), !0); } /** * Set the value of an uniform to a number (unsigned int) * @param uniform defines the webGL uniform location where to store the value * @param value defines the unsigned int number to store * @returns true if the value was set */ setUInt(e, t) { return e ? (this._gl.uniform1ui(e, t), !0) : !1; } /** * Set the value of an uniform to a unsigned int2 * @param uniform defines the webGL uniform location where to store the value * @param x defines the 1st component of the value * @param y defines the 2nd component of the value * @returns true if the value was set */ setUInt2(e, t, i) { return e ? (this._gl.uniform2ui(e, t, i), !0) : !1; } /** * Set the value of an uniform to a unsigned int3 * @param uniform defines the webGL uniform location where to store the value * @param x defines the 1st component of the value * @param y defines the 2nd component of the value * @param z defines the 3rd component of the value * @returns true if the value was set */ setUInt3(e, t, i, r) { return e ? (this._gl.uniform3ui(e, t, i, r), !0) : !1; } /** * Set the value of an uniform to a unsigned int4 * @param uniform defines the webGL uniform location where to store the value * @param x defines the 1st component of the value * @param y defines the 2nd component of the value * @param z defines the 3rd component of the value * @param w defines the 4th component of the value * @returns true if the value was set */ setUInt4(e, t, i, r, s) { return e ? (this._gl.uniform4ui(e, t, i, r, s), !0) : !1; } /** * Set the value of an uniform to an array of unsigned int32 * @param uniform defines the webGL uniform location where to store the value * @param array defines the array of unsigned int32 to store * @returns true if the value was set */ setUIntArray(e, t) { return e ? (this._gl.uniform1uiv(e, t), !0) : !1; } /** * Set the value of an uniform to an array of unsigned int32 (stored as vec2) * @param uniform defines the webGL uniform location where to store the value * @param array defines the array of unsigned int32 to store * @returns true if the value was set */ setUIntArray2(e, t) { return !e || t.length % 2 !== 0 ? !1 : (this._gl.uniform2uiv(e, t), !0); } /** * Set the value of an uniform to an array of unsigned int32 (stored as vec3) * @param uniform defines the webGL uniform location where to store the value * @param array defines the array of unsigned int32 to store * @returns true if the value was set */ setUIntArray3(e, t) { return !e || t.length % 3 !== 0 ? !1 : (this._gl.uniform3uiv(e, t), !0); } /** * Set the value of an uniform to an array of unsigned int32 (stored as vec4) * @param uniform defines the webGL uniform location where to store the value * @param array defines the array of unsigned int32 to store * @returns true if the value was set */ setUIntArray4(e, t) { return !e || t.length % 4 !== 0 ? !1 : (this._gl.uniform4uiv(e, t), !0); } /** * Set the value of an uniform to an array of number * @param uniform defines the webGL uniform location where to store the value * @param array defines the array of number to store * @returns true if the value was set */ setArray(e, t) { return !e || t.length < 1 ? !1 : (this._gl.uniform1fv(e, t), !0); } /** * Set the value of an uniform to an array of number (stored as vec2) * @param uniform defines the webGL uniform location where to store the value * @param array defines the array of number to store * @returns true if the value was set */ setArray2(e, t) { return !e || t.length % 2 !== 0 ? !1 : (this._gl.uniform2fv(e, t), !0); } /** * Set the value of an uniform to an array of number (stored as vec3) * @param uniform defines the webGL uniform location where to store the value * @param array defines the array of number to store * @returns true if the value was set */ setArray3(e, t) { return !e || t.length % 3 !== 0 ? !1 : (this._gl.uniform3fv(e, t), !0); } /** * Set the value of an uniform to an array of number (stored as vec4) * @param uniform defines the webGL uniform location where to store the value * @param array defines the array of number to store * @returns true if the value was set */ setArray4(e, t) { return !e || t.length % 4 !== 0 ? !1 : (this._gl.uniform4fv(e, t), !0); } /** * Set the value of an uniform to an array of float32 (stored as matrices) * @param uniform defines the webGL uniform location where to store the value * @param matrices defines the array of float32 to store * @returns true if the value was set */ setMatrices(e, t) { return e ? (this._gl.uniformMatrix4fv(e, !1, t), !0) : !1; } /** * Set the value of an uniform to a matrix (3x3) * @param uniform defines the webGL uniform location where to store the value * @param matrix defines the Float32Array representing the 3x3 matrix to store * @returns true if the value was set */ setMatrix3x3(e, t) { return e ? (this._gl.uniformMatrix3fv(e, !1, t), !0) : !1; } /** * Set the value of an uniform to a matrix (2x2) * @param uniform defines the webGL uniform location where to store the value * @param matrix defines the Float32Array representing the 2x2 matrix to store * @returns true if the value was set */ setMatrix2x2(e, t) { return e ? (this._gl.uniformMatrix2fv(e, !1, t), !0) : !1; } /** * Set the value of an uniform to a number (float) * @param uniform defines the webGL uniform location where to store the value * @param value defines the float number to store * @returns true if the value was transferred */ setFloat(e, t) { return e ? (this._gl.uniform1f(e, t), !0) : !1; } /** * Set the value of an uniform to a vec2 * @param uniform defines the webGL uniform location where to store the value * @param x defines the 1st component of the value * @param y defines the 2nd component of the value * @returns true if the value was set */ setFloat2(e, t, i) { return e ? (this._gl.uniform2f(e, t, i), !0) : !1; } /** * Set the value of an uniform to a vec3 * @param uniform defines the webGL uniform location where to store the value * @param x defines the 1st component of the value * @param y defines the 2nd component of the value * @param z defines the 3rd component of the value * @returns true if the value was set */ setFloat3(e, t, i, r) { return e ? (this._gl.uniform3f(e, t, i, r), !0) : !1; } /** * Set the value of an uniform to a vec4 * @param uniform defines the webGL uniform location where to store the value * @param x defines the 1st component of the value * @param y defines the 2nd component of the value * @param z defines the 3rd component of the value * @param w defines the 4th component of the value * @returns true if the value was set */ setFloat4(e, t, i, r, s) { return e ? (this._gl.uniform4f(e, t, i, r, s), !0) : !1; } // States /** * Apply all cached states (depth, culling, stencil and alpha) */ applyStates() { if (this._depthCullingState.apply(this._gl), this._stencilStateComposer.apply(this._gl), this._alphaState.apply(this._gl), this._colorWriteChanged) { this._colorWriteChanged = !1; const e = this._colorWrite; this._gl.colorMask(e, e, e, e); } } /** * Enable or disable color writing * @param enable defines the state to set */ setColorWrite(e) { e !== this._colorWrite && (this._colorWriteChanged = !0, this._colorWrite = e); } /** * Gets a boolean indicating if color writing is enabled * @returns the current color writing state */ getColorWrite() { return this._colorWrite; } /** * Gets the depth culling state manager */ get depthCullingState() { return this._depthCullingState; } /** * Gets the alpha state manager */ get alphaState() { return this._alphaState; } /** * Gets the stencil state manager */ get stencilState() { return this._stencilState; } /** * Gets the stencil state composer */ get stencilStateComposer() { return this._stencilStateComposer; } // Textures /** * Clears the list of texture accessible through engine. * This can help preventing texture load conflict due to name collision. */ clearInternalTexturesCache() { this._internalTexturesCache.length = 0; } /** * Force the entire cache to be cleared * You should not have to use this function unless your engine needs to share the webGL context with another engine * @param bruteForce defines a boolean to force clearing ALL caches (including stencil, detoh and alpha states) */ wipeCaches(e) { this.preventCacheWipeBetweenFrames && !e || (this._currentEffect = null, this._viewportCached.x = 0, this._viewportCached.y = 0, this._viewportCached.z = 0, this._viewportCached.w = 0, this._unbindVertexArrayObject(), e && (this._currentProgram = null, this.resetTextureCache(), this._stencilStateComposer.reset(), this._depthCullingState.reset(), this._depthCullingState.depthFunc = this._gl.LEQUAL, this._alphaState.reset(), this._alphaMode = 1, this._alphaEquation = 0, this._colorWrite = !0, this._colorWriteChanged = !0, this._unpackFlipYCached = null, this._gl.pixelStorei(this._gl.UNPACK_COLORSPACE_CONVERSION_WEBGL, this._gl.NONE), this._gl.pixelStorei(this._gl.UNPACK_PREMULTIPLY_ALPHA_WEBGL, 0), this._mustWipeVertexAttributes = !0, this.unbindAllAttributes()), this._resetVertexBufferBinding(), this._cachedIndexBuffer = null, this._cachedEffectForVertexBuffers = null, this.bindIndexBuffer(null)); } /** * @internal */ _getSamplingParameters(e, t) { const i = this._gl; let r = i.NEAREST, s = i.NEAREST; switch (e) { case 11: r = i.LINEAR, t ? s = i.LINEAR_MIPMAP_NEAREST : s = i.LINEAR; break; case 3: r = i.LINEAR, t ? s = i.LINEAR_MIPMAP_LINEAR : s = i.LINEAR; break; case 8: r = i.NEAREST, t ? s = i.NEAREST_MIPMAP_LINEAR : s = i.NEAREST; break; case 4: r = i.NEAREST, t ? s = i.NEAREST_MIPMAP_NEAREST : s = i.NEAREST; break; case 5: r = i.NEAREST, t ? s = i.LINEAR_MIPMAP_NEAREST : s = i.LINEAR; break; case 6: r = i.NEAREST, t ? s = i.LINEAR_MIPMAP_LINEAR : s = i.LINEAR; break; case 7: r = i.NEAREST, s = i.LINEAR; break; case 1: r = i.NEAREST, s = i.NEAREST; break; case 9: r = i.LINEAR, t ? s = i.NEAREST_MIPMAP_NEAREST : s = i.NEAREST; break; case 10: r = i.LINEAR, t ? s = i.NEAREST_MIPMAP_LINEAR : s = i.NEAREST; break; case 2: r = i.LINEAR, s = i.LINEAR; break; case 12: r = i.LINEAR, s = i.NEAREST; break; } return { min: s, mag: r }; } /** @internal */ _createTexture() { const e = this._gl.createTexture(); if (!e) throw new Error("Unable to create texture"); return e; } /** @internal */ _createHardwareTexture() { return new BI(this._createTexture(), this._gl); } /** * Creates an internal texture without binding it to a framebuffer * @internal * @param size defines the size of the texture * @param options defines the options used to create the texture * @param delayGPUTextureCreation true to delay the texture creation the first time it is really needed. false to create it right away * @param source source type of the texture * @returns a new internal texture */ _createInternalTexture(e, t, i = !0, r = ts.Unknown) { var s; let n = !1, a = 0, l = 3, o = 5, u = !1, h = 1, d; t !== void 0 && typeof t == "object" ? (n = !!t.generateMipMaps, a = t.type === void 0 ? 0 : t.type, l = t.samplingMode === void 0 ? 3 : t.samplingMode, o = t.format === void 0 ? 5 : t.format, u = t.useSRGBBuffer === void 0 ? !1 : t.useSRGBBuffer, h = (s = t.samples) !== null && s !== void 0 ? s : 1, d = t.label) : n = !!t, u && (u = this._caps.supportSRGBBuffers && (this.webGLVersion > 1 || this.isWebGPU)), (a === 1 && !this._caps.textureFloatLinearFiltering || a === 2 && !this._caps.textureHalfFloatLinearFiltering) && (l = 1), a === 1 && !this._caps.textureFloat && (a = 0, Ce.Warn("Float textures are not supported. Type forced to TEXTURETYPE_UNSIGNED_BYTE")); const f = this._gl, p = new ln(this, r), m = e.width || e, _ = e.height || e, v = e.layers || 0, C = this._getSamplingParameters(l, n), x = v !== 0 ? f.TEXTURE_2D_ARRAY : f.TEXTURE_2D, b = this._getRGBABufferInternalSizedFormat(a, o, u), S = this._getInternalFormat(o), M = this._getWebGLTextureType(a); return this._bindTextureDirectly(x, p), v !== 0 ? (p.is2DArray = !0, f.texImage3D(x, 0, b, m, _, v, 0, S, M, null)) : f.texImage2D(x, 0, b, m, _, 0, S, M, null), f.texParameteri(x, f.TEXTURE_MAG_FILTER, C.mag), f.texParameteri(x, f.TEXTURE_MIN_FILTER, C.min), f.texParameteri(x, f.TEXTURE_WRAP_S, f.CLAMP_TO_EDGE), f.texParameteri(x, f.TEXTURE_WRAP_T, f.CLAMP_TO_EDGE), n && this._gl.generateMipmap(x), this._bindTextureDirectly(x, null), p._useSRGBBuffer = u, p.baseWidth = m, p.baseHeight = _, p.width = m, p.height = _, p.depth = v, p.isReady = !0, p.samples = h, p.generateMipMaps = n, p.samplingMode = l, p.type = a, p.format = o, p.label = d, this._internalTexturesCache.push(p), p; } /** * @internal */ _getUseSRGBBuffer(e, t) { return e && this._caps.supportSRGBBuffers && (this.webGLVersion > 1 || this.isWebGPU || t); } _createTextureBase(e, t, i, r, s = 3, n = null, a = null, l, o, u = null, h = null, d = null, f = null, p, m, _) { e = e || ""; const v = e.substr(0, 5) === "data:", C = e.substr(0, 5) === "blob:", x = v && e.indexOf(";base64,") !== -1, b = h || new ln(this, ts.Url); b !== h && (b.label = e.substring(0, 60)); const S = e; this._transformTextureUrl && !x && !h && !u && (e = this._transformTextureUrl(e)), S !== e && (b._originalUrl = S); const M = e.lastIndexOf("."); let R = f || (M > -1 ? e.substring(M).toLowerCase() : ""), w = null; R.indexOf("?") > -1 && (R = R.split("?")[0]); for (const B of mi._TextureLoaders) if (B.canLoad(R, p)) { w = B; break; } r && r.addPendingData(b), b.url = e, b.generateMipMaps = !t, b.samplingMode = s, b.invertY = i, b._useSRGBBuffer = this._getUseSRGBBuffer(!!_, t), this._doNotHandleContextLost || (b._buffer = u); let k = null; n && !h && (k = b.onLoadedObservable.add(n)), h || this._internalTexturesCache.push(b); const L = (B, U) => { r && r.removePendingData(b), e === S ? (k && b.onLoadedObservable.remove(k), gi.UseFallbackTexture && this._createTextureBase(gi.FallbackTexture, t, b.invertY, r, s, null, a, l, o, u, b), B = (B || "Unknown error") + (gi.UseFallbackTexture ? " - Fallback texture was used" : ""), b.onErrorObservable.notifyObservers({ message: B, exception: U }), a && a(B, U)) : (Ce.Warn(`Failed to load ${e}, falling back to ${S}`), this._createTextureBase(S, t, b.invertY, r, s, n, a, l, o, u, b, d, f, p, m, _)); }; if (w) { const B = (U) => { w.loadData(U, b, (K, ee, Z, q, le, ie) => { ie ? L("TextureLoader failed to load data") : l(b, R, r, { width: K, height: ee }, b.invertY, !Z, q, () => (le(), !1), s); }, m); }; u ? u instanceof ArrayBuffer ? B(new Uint8Array(u)) : ArrayBuffer.isView(u) ? B(u) : a && a("Unable to load: only ArrayBuffer or ArrayBufferView is supported", null) : this._loadFile(e, (U) => B(new Uint8Array(U)), void 0, r ? r.offlineProvider : void 0, !0, (U, K) => { L("Unable to load " + (U && U.responseURL, K)); }); } else { const B = (U) => { C && !this._doNotHandleContextLost && (b._buffer = U), l(b, R, r, U, b.invertY, t, !1, o, s); }; !v || x ? u && (typeof u.decoding == "string" || u.close) ? B(u) : mi._FileToolsLoadImage(e, B, L, r ? r.offlineProvider : null, p, b.invertY && this._features.needsInvertingBitmap ? { imageOrientation: "flipY" } : void 0) : typeof u == "string" || u instanceof ArrayBuffer || ArrayBuffer.isView(u) || u instanceof Blob ? mi._FileToolsLoadImage(u, B, L, r ? r.offlineProvider : null, p, b.invertY && this._features.needsInvertingBitmap ? { imageOrientation: "flipY" } : void 0) : u && B(u); } return b; } /** * Usually called from Texture.ts. * Passed information to create a WebGLTexture * @param url defines a value which contains one of the following: * * A conventional http URL, e.g. 'http://...' or 'file://...' * * A base64 string of in-line texture data, e.g. 'data:image/jpg;base64,/...' * * An indicator that data being passed using the buffer parameter, e.g. 'data:mytexture.jpg' * @param noMipmap defines a boolean indicating that no mipmaps shall be generated. Ignored for compressed textures. They must be in the file * @param invertY when true, image is flipped when loaded. You probably want true. Certain compressed textures may invert this if their default is inverted (eg. ktx) * @param scene needed for loading to the correct scene * @param samplingMode mode with should be used sample / access the texture (Default: Texture.TRILINEAR_SAMPLINGMODE) * @param onLoad optional callback to be called upon successful completion * @param onError optional callback to be called upon failure * @param buffer a source of a file previously fetched as either a base64 string, an ArrayBuffer (compressed or image format), HTMLImageElement (image format), or a Blob * @param fallback an internal argument in case the function must be called again, due to etc1 not having alpha capabilities * @param format internal format. Default: RGB when extension is '.jpg' else RGBA. Ignored for compressed textures * @param forcedExtension defines the extension to use to pick the right loader * @param mimeType defines an optional mime type * @param loaderOptions options to be passed to the loader * @param creationFlags specific flags to use when creating the texture (1 for storage textures, for eg) * @param useSRGBBuffer defines if the texture must be loaded in a sRGB GPU buffer (if supported by the GPU). * @returns a InternalTexture for assignment back into BABYLON.Texture */ createTexture(e, t, i, r, s = 3, n = null, a = null, l = null, o = null, u = null, h = null, d, f, p, m) { return this._createTextureBase(e, t, i, r, s, n, a, this._prepareWebGLTexture.bind(this), (_, v, C, x, b, S) => { const M = this._gl, R = C.width === _ && C.height === v, w = this._getTexImageParametersForCreateTexture(u, x, b._useSRGBBuffer); if (R) return M.texImage2D(M.TEXTURE_2D, 0, w.internalFormat, w.format, w.type, C), !1; const V = this._caps.maxTextureSize; if (C.width > V || C.height > V || !this._supportsHardwareTextureRescaling) return this._prepareWorkingCanvas(), !this._workingCanvas || !this._workingContext || (this._workingCanvas.width = _, this._workingCanvas.height = v, this._workingContext.drawImage(C, 0, 0, C.width, C.height, 0, 0, _, v), M.texImage2D(M.TEXTURE_2D, 0, w.internalFormat, w.format, w.type, this._workingCanvas), b.width = _, b.height = v), !1; { const k = new ln(this, ts.Temp); this._bindTextureDirectly(M.TEXTURE_2D, k, !0), M.texImage2D(M.TEXTURE_2D, 0, w.internalFormat, w.format, w.type, C), this._rescaleTexture(k, b, r, w.format, () => { this._releaseTexture(k), this._bindTextureDirectly(M.TEXTURE_2D, b, !0), S(); }); } return !0; }, l, o, u, h, d, f, m); } /** * Calls to the GL texImage2D and texImage3D functions require three arguments describing the pixel format of the texture. * createTexture derives these from the babylonFormat and useSRGBBuffer arguments and also the file extension of the URL it's working with. * This function encapsulates that derivation for easy unit testing. * @param babylonFormat Babylon's format enum, as specified in ITextureCreationOptions. * @param fileExtension The file extension including the dot, e.g. .jpg. * @param useSRGBBuffer Use SRGB not linear. * @returns The options to pass to texImage2D or texImage3D calls. * @internal */ _getTexImageParametersForCreateTexture(e, t, i) { e == null && (e = t === ".jpg" && !i ? 4 : 5); let r, s; return this.webGLVersion === 1 ? (r = this._getInternalFormat(e, i), s = r) : (r = this._getInternalFormat(e, !1), s = this._getRGBABufferInternalSizedFormat(0, e, i)), { internalFormat: s, format: r, type: this._gl.UNSIGNED_BYTE }; } /** * Loads an image as an HTMLImageElement. * @param input url string, ArrayBuffer, or Blob to load * @param onLoad callback called when the image successfully loads * @param onError callback called when the image fails to load * @param offlineProvider offline provider for caching * @param mimeType optional mime type * @param imageBitmapOptions optional the options to use when creating an ImageBitmap * @returns the HTMLImageElement of the loaded image * @internal */ static _FileToolsLoadImage(e, t, i, r, s, n) { throw yr("FileTools"); } /** * @internal */ _rescaleTexture(e, t, i, r, s) { } /** * Creates a raw texture * @param data defines the data to store in the texture * @param width defines the width of the texture * @param height defines the height of the texture * @param format defines the format of the data * @param generateMipMaps defines if the engine should generate the mip levels * @param invertY defines if data must be stored with Y axis inverted * @param samplingMode defines the required sampling mode (Texture.NEAREST_SAMPLINGMODE by default) * @param compression defines the compression used (null by default) * @param type defines the type fo the data (Engine.TEXTURETYPE_UNSIGNED_INT by default) * @param creationFlags specific flags to use when creating the texture (1 for storage textures, for eg) * @param useSRGBBuffer defines if the texture must be loaded in a sRGB GPU buffer (if supported by the GPU). * @returns the raw texture inside an InternalTexture */ createRawTexture(e, t, i, r, s, n, a, l = null, o = 0, u = 0, h = !1) { throw yr("Engine.RawTexture"); } /** * Creates a new raw cube texture * @param data defines the array of data to use to create each face * @param size defines the size of the textures * @param format defines the format of the data * @param type defines the type of the data (like Engine.TEXTURETYPE_UNSIGNED_INT) * @param generateMipMaps defines if the engine should generate the mip levels * @param invertY defines if data must be stored with Y axis inverted * @param samplingMode defines the required sampling mode (like Texture.NEAREST_SAMPLINGMODE) * @param compression defines the compression used (null by default) * @returns the cube texture as an InternalTexture */ createRawCubeTexture(e, t, i, r, s, n, a, l = null) { throw yr("Engine.RawTexture"); } /** * Creates a new raw 3D texture * @param data defines the data used to create the texture * @param width defines the width of the texture * @param height defines the height of the texture * @param depth defines the depth of the texture * @param format defines the format of the texture * @param generateMipMaps defines if the engine must generate mip levels * @param invertY defines if data must be stored with Y axis inverted * @param samplingMode defines the required sampling mode (like Texture.NEAREST_SAMPLINGMODE) * @param compression defines the compressed used (can be null) * @param textureType defines the compressed used (can be null) * @returns a new raw 3D texture (stored in an InternalTexture) */ createRawTexture3D(e, t, i, r, s, n, a, l, o = null, u = 0) { throw yr("Engine.RawTexture"); } /** * Creates a new raw 2D array texture * @param data defines the data used to create the texture * @param width defines the width of the texture * @param height defines the height of the texture * @param depth defines the number of layers of the texture * @param format defines the format of the texture * @param generateMipMaps defines if the engine must generate mip levels * @param invertY defines if data must be stored with Y axis inverted * @param samplingMode defines the required sampling mode (like Texture.NEAREST_SAMPLINGMODE) * @param compression defines the compressed used (can be null) * @param textureType defines the compressed used (can be null) * @returns a new raw 2D array texture (stored in an InternalTexture) */ createRawTexture2DArray(e, t, i, r, s, n, a, l, o = null, u = 0) { throw yr("Engine.RawTexture"); } /** * @internal */ _unpackFlipY(e) { this._unpackFlipYCached !== e && (this._gl.pixelStorei(this._gl.UNPACK_FLIP_Y_WEBGL, e ? 1 : 0), this.enableUnpackFlipYCached && (this._unpackFlipYCached = e)); } /** @internal */ _getUnpackAlignement() { return this._gl.getParameter(this._gl.UNPACK_ALIGNMENT); } _getTextureTarget(e) { return e.isCube ? this._gl.TEXTURE_CUBE_MAP : e.is3D ? this._gl.TEXTURE_3D : e.is2DArray || e.isMultiview ? this._gl.TEXTURE_2D_ARRAY : this._gl.TEXTURE_2D; } /** * Update the sampling mode of a given texture * @param samplingMode defines the required sampling mode * @param texture defines the texture to update * @param generateMipMaps defines whether to generate mipmaps for the texture */ updateTextureSamplingMode(e, t, i = !1) { const r = this._getTextureTarget(t), s = this._getSamplingParameters(e, t.useMipMaps || i); this._setTextureParameterInteger(r, this._gl.TEXTURE_MAG_FILTER, s.mag, t), this._setTextureParameterInteger(r, this._gl.TEXTURE_MIN_FILTER, s.min), i && (t.generateMipMaps = !0, this._gl.generateMipmap(r)), this._bindTextureDirectly(r, null), t.samplingMode = e; } /** * Update the dimensions of a texture * @param texture texture to update * @param width new width of the texture * @param height new height of the texture * @param depth new depth of the texture */ updateTextureDimensions(e, t, i, r = 1) { } /** * Update the sampling mode of a given texture * @param texture defines the texture to update * @param wrapU defines the texture wrap mode of the u coordinates * @param wrapV defines the texture wrap mode of the v coordinates * @param wrapR defines the texture wrap mode of the r coordinates */ updateTextureWrappingMode(e, t, i = null, r = null) { const s = this._getTextureTarget(e); t !== null && (this._setTextureParameterInteger(s, this._gl.TEXTURE_WRAP_S, this._getTextureWrapMode(t), e), e._cachedWrapU = t), i !== null && (this._setTextureParameterInteger(s, this._gl.TEXTURE_WRAP_T, this._getTextureWrapMode(i), e), e._cachedWrapV = i), (e.is2DArray || e.is3D) && r !== null && (this._setTextureParameterInteger(s, this._gl.TEXTURE_WRAP_R, this._getTextureWrapMode(r), e), e._cachedWrapR = r), this._bindTextureDirectly(s, null); } /** * @internal */ _setupDepthStencilTexture(e, t, i, r, s, n = 1) { const a = t.width || t, l = t.height || t, o = t.layers || 0; e.baseWidth = a, e.baseHeight = l, e.width = a, e.height = l, e.is2DArray = o > 0, e.depth = o, e.isReady = !0, e.samples = n, e.generateMipMaps = !1, e.samplingMode = r ? 2 : 1, e.type = 0, e._comparisonFunction = s; const u = this._gl, h = this._getTextureTarget(e), d = this._getSamplingParameters(e.samplingMode, !1); u.texParameteri(h, u.TEXTURE_MAG_FILTER, d.mag), u.texParameteri(h, u.TEXTURE_MIN_FILTER, d.min), u.texParameteri(h, u.TEXTURE_WRAP_S, u.CLAMP_TO_EDGE), u.texParameteri(h, u.TEXTURE_WRAP_T, u.CLAMP_TO_EDGE), this.webGLVersion > 1 && (s === 0 ? (u.texParameteri(h, u.TEXTURE_COMPARE_FUNC, 515), u.texParameteri(h, u.TEXTURE_COMPARE_MODE, u.NONE)) : (u.texParameteri(h, u.TEXTURE_COMPARE_FUNC, s), u.texParameteri(h, u.TEXTURE_COMPARE_MODE, u.COMPARE_REF_TO_TEXTURE))); } /** * @internal */ _uploadCompressedDataToTextureDirectly(e, t, i, r, s, n = 0, a = 0) { const l = this._gl; let o = l.TEXTURE_2D; if (e.isCube && (o = l.TEXTURE_CUBE_MAP_POSITIVE_X + n), e._useSRGBBuffer) switch (t) { case 37492: case 36196: this._caps.etc2 ? t = l.COMPRESSED_SRGB8_ETC2 : e._useSRGBBuffer = !1; break; case 37496: this._caps.etc2 ? t = l.COMPRESSED_SRGB8_ALPHA8_ETC2_EAC : e._useSRGBBuffer = !1; break; case 36492: t = l.COMPRESSED_SRGB_ALPHA_BPTC_UNORM_EXT; break; case 37808: t = l.COMPRESSED_SRGB8_ALPHA8_ASTC_4x4_KHR; break; case 33776: this._caps.s3tc_srgb ? t = l.COMPRESSED_SRGB_S3TC_DXT1_EXT : e._useSRGBBuffer = !1; break; case 33777: this._caps.s3tc_srgb ? t = l.COMPRESSED_SRGB_ALPHA_S3TC_DXT1_EXT : e._useSRGBBuffer = !1; break; case 33779: this._caps.s3tc_srgb ? t = l.COMPRESSED_SRGB_ALPHA_S3TC_DXT5_EXT : e._useSRGBBuffer = !1; break; default: e._useSRGBBuffer = !1; break; } this._gl.compressedTexImage2D(o, a, t, i, r, 0, s); } /** * @internal */ _uploadDataToTextureDirectly(e, t, i = 0, r = 0, s, n = !1) { const a = this._gl, l = this._getWebGLTextureType(e.type), o = this._getInternalFormat(e.format), u = s === void 0 ? this._getRGBABufferInternalSizedFormat(e.type, e.format, e._useSRGBBuffer) : this._getInternalFormat(s, e._useSRGBBuffer); this._unpackFlipY(e.invertY); let h = a.TEXTURE_2D; e.isCube && (h = a.TEXTURE_CUBE_MAP_POSITIVE_X + i); const d = Math.round(Math.log(e.width) * Math.LOG2E), f = Math.round(Math.log(e.height) * Math.LOG2E), p = n ? e.width : Math.pow(2, Math.max(d - r, 0)), m = n ? e.height : Math.pow(2, Math.max(f - r, 0)); a.texImage2D(h, r, u, p, m, 0, o, l, t); } /** * Update a portion of an internal texture * @param texture defines the texture to update * @param imageData defines the data to store into the texture * @param xOffset defines the x coordinates of the update rectangle * @param yOffset defines the y coordinates of the update rectangle * @param width defines the width of the update rectangle * @param height defines the height of the update rectangle * @param faceIndex defines the face index if texture is a cube (0 by default) * @param lod defines the lod level to update (0 by default) * @param generateMipMaps defines whether to generate mipmaps or not */ updateTextureData(e, t, i, r, s, n, a = 0, l = 0, o = !1) { const u = this._gl, h = this._getWebGLTextureType(e.type), d = this._getInternalFormat(e.format); this._unpackFlipY(e.invertY); let f = u.TEXTURE_2D, p = u.TEXTURE_2D; e.isCube && (p = u.TEXTURE_CUBE_MAP_POSITIVE_X + a, f = u.TEXTURE_CUBE_MAP), this._bindTextureDirectly(f, e, !0), u.texSubImage2D(p, l, i, r, s, n, d, h, t), o && this._gl.generateMipmap(p), this._bindTextureDirectly(f, null); } /** * @internal */ _uploadArrayBufferViewToTexture(e, t, i = 0, r = 0) { const s = this._gl, n = e.isCube ? s.TEXTURE_CUBE_MAP : s.TEXTURE_2D; this._bindTextureDirectly(n, e, !0), this._uploadDataToTextureDirectly(e, t, i, r), this._bindTextureDirectly(n, null, !0); } _prepareWebGLTextureContinuation(e, t, i, r, s) { const n = this._gl; if (!n) return; const a = this._getSamplingParameters(s, !i); n.texParameteri(n.TEXTURE_2D, n.TEXTURE_MAG_FILTER, a.mag), n.texParameteri(n.TEXTURE_2D, n.TEXTURE_MIN_FILTER, a.min), !i && !r && n.generateMipmap(n.TEXTURE_2D), this._bindTextureDirectly(n.TEXTURE_2D, null), t && t.removePendingData(e), e.onLoadedObservable.notifyObservers(e), e.onLoadedObservable.clear(); } _prepareWebGLTexture(e, t, i, r, s, n, a, l, o = 3) { const u = this.getCaps().maxTextureSize, h = Math.min(u, this.needPOTTextures ? mi.GetExponentOfTwo(r.width, u) : r.width), d = Math.min(u, this.needPOTTextures ? mi.GetExponentOfTwo(r.height, u) : r.height), f = this._gl; if (f) { if (!e._hardwareTexture) { i && i.removePendingData(e); return; } this._bindTextureDirectly(f.TEXTURE_2D, e, !0), this._unpackFlipY(s === void 0 ? !0 : !!s), e.baseWidth = r.width, e.baseHeight = r.height, e.width = h, e.height = d, e.isReady = !0, e.type = e.type !== -1 ? e.type : 0, e.format = e.format !== -1 ? e.format : t === ".jpg" && !e._useSRGBBuffer ? 4 : 5, !l(h, d, r, t, e, () => { this._prepareWebGLTextureContinuation(e, i, n, a, o); }) && this._prepareWebGLTextureContinuation(e, i, n, a, o); } } /** * @internal */ _setupFramebufferDepthAttachments(e, t, i, r, s = 1) { const n = this._gl; if (e && t) return this._createRenderBuffer(i, r, s, n.DEPTH_STENCIL, n.DEPTH24_STENCIL8, n.DEPTH_STENCIL_ATTACHMENT); if (t) { let a = n.DEPTH_COMPONENT16; return this._webGLVersion > 1 && (a = n.DEPTH_COMPONENT32F), this._createRenderBuffer(i, r, s, a, a, n.DEPTH_ATTACHMENT); } return e ? this._createRenderBuffer(i, r, s, n.STENCIL_INDEX8, n.STENCIL_INDEX8, n.STENCIL_ATTACHMENT) : null; } /** * @internal */ _createRenderBuffer(e, t, i, r, s, n, a = !0) { const o = this._gl.createRenderbuffer(); return this._updateRenderBuffer(o, e, t, i, r, s, n, a); } _updateRenderBuffer(e, t, i, r, s, n, a, l = !0) { const o = this._gl; return o.bindRenderbuffer(o.RENDERBUFFER, e), r > 1 && o.renderbufferStorageMultisample ? o.renderbufferStorageMultisample(o.RENDERBUFFER, r, n, t, i) : o.renderbufferStorage(o.RENDERBUFFER, s, t, i), o.framebufferRenderbuffer(o.FRAMEBUFFER, a, o.RENDERBUFFER, e), l && o.bindRenderbuffer(o.RENDERBUFFER, null), e; } /** * @internal */ _releaseTexture(e) { var t; this._deleteTexture((t = e._hardwareTexture) === null || t === void 0 ? void 0 : t.underlyingResource), this.unbindAllTextures(); const i = this._internalTexturesCache.indexOf(e); i !== -1 && this._internalTexturesCache.splice(i, 1), e._lodTextureHigh && e._lodTextureHigh.dispose(), e._lodTextureMid && e._lodTextureMid.dispose(), e._lodTextureLow && e._lodTextureLow.dispose(), e._irradianceTexture && e._irradianceTexture.dispose(); } /** * @internal */ _releaseRenderTargetWrapper(e) { const t = this._renderTargetWrapperCache.indexOf(e); t !== -1 && this._renderTargetWrapperCache.splice(t, 1); } _deleteTexture(e) { e && this._gl.deleteTexture(e); } _setProgram(e) { this._currentProgram !== e && (this._gl.useProgram(e), this._currentProgram = e); } /** * Binds an effect to the webGL context * @param effect defines the effect to bind */ bindSamplers(e) { const t = e.getPipelineContext(); this._setProgram(t.program); const i = e.getSamplers(); for (let r = 0; r < i.length; r++) { const s = e.getUniform(i[r]); s && (this._boundUniforms[r] = s); } this._currentEffect = null; } _activateCurrentTexture() { this._currentTextureChannel !== this._activeChannel && (this._gl.activeTexture(this._gl.TEXTURE0 + this._activeChannel), this._currentTextureChannel = this._activeChannel); } /** * @internal */ _bindTextureDirectly(e, t, i = !1, r = !1) { var s, n; let a = !1; const l = t && t._associatedChannel > -1; if (i && l && (this._activeChannel = t._associatedChannel), this._boundTexturesCache[this._activeChannel] !== t || r) { if (this._activateCurrentTexture(), t && t.isMultiview) throw Ce.Error(["_bindTextureDirectly called with a multiview texture!", e, t]), "_bindTextureDirectly called with a multiview texture!"; this._gl.bindTexture(e, (n = (s = t == null ? void 0 : t._hardwareTexture) === null || s === void 0 ? void 0 : s.underlyingResource) !== null && n !== void 0 ? n : null), this._boundTexturesCache[this._activeChannel] = t, t && (t._associatedChannel = this._activeChannel); } else i && (a = !0, this._activateCurrentTexture()); return l && !i && this._bindSamplerUniformToChannel(t._associatedChannel, this._activeChannel), a; } /** * @internal */ _bindTexture(e, t, i) { if (e === void 0) return; t && (t._associatedChannel = e), this._activeChannel = e; const r = t ? this._getTextureTarget(t) : this._gl.TEXTURE_2D; this._bindTextureDirectly(r, t); } /** * Unbind all textures from the webGL context */ unbindAllTextures() { for (let e = 0; e < this._maxSimultaneousTextures; e++) this._activeChannel = e, this._bindTextureDirectly(this._gl.TEXTURE_2D, null), this._bindTextureDirectly(this._gl.TEXTURE_CUBE_MAP, null), this.webGLVersion > 1 && (this._bindTextureDirectly(this._gl.TEXTURE_3D, null), this._bindTextureDirectly(this._gl.TEXTURE_2D_ARRAY, null)); } /** * Sets a texture to the according uniform. * @param channel The texture channel * @param uniform The uniform to set * @param texture The texture to apply * @param name The name of the uniform in the effect */ setTexture(e, t, i, r) { e !== void 0 && (t && (this._boundUniforms[e] = t), this._setTexture(e, i)); } _bindSamplerUniformToChannel(e, t) { const i = this._boundUniforms[e]; !i || i._currentState === t || (this._gl.uniform1i(i, t), i._currentState = t); } _getTextureWrapMode(e) { switch (e) { case 1: return this._gl.REPEAT; case 0: return this._gl.CLAMP_TO_EDGE; case 2: return this._gl.MIRRORED_REPEAT; } return this._gl.REPEAT; } _setTexture(e, t, i = !1, r = !1, s = "") { if (!t) return this._boundTexturesCache[e] != null && (this._activeChannel = e, this._bindTextureDirectly(this._gl.TEXTURE_2D, null), this._bindTextureDirectly(this._gl.TEXTURE_CUBE_MAP, null), this.webGLVersion > 1 && (this._bindTextureDirectly(this._gl.TEXTURE_3D, null), this._bindTextureDirectly(this._gl.TEXTURE_2D_ARRAY, null))), !1; if (t.video) { this._activeChannel = e; const o = t.getInternalTexture(); o && (o._associatedChannel = e), t.update(); } else if (t.delayLoadState === 4) return t.delayLoad(), !1; let n; r ? n = t.depthStencilTexture : t.isReady() ? n = t.getInternalTexture() : t.isCube ? n = this.emptyCubeTexture : t.is3D ? n = this.emptyTexture3D : t.is2DArray ? n = this.emptyTexture2DArray : n = this.emptyTexture, !i && n && (n._associatedChannel = e); let a = !0; this._boundTexturesCache[e] === n && (i || this._bindSamplerUniformToChannel(n._associatedChannel, e), a = !1), this._activeChannel = e; const l = this._getTextureTarget(n); if (a && this._bindTextureDirectly(l, n, i), n && !n.isMultiview) { if (n.isCube && n._cachedCoordinatesMode !== t.coordinatesMode) { n._cachedCoordinatesMode = t.coordinatesMode; const o = t.coordinatesMode !== 3 && t.coordinatesMode !== 5 ? 1 : 0; t.wrapU = o, t.wrapV = o; } n._cachedWrapU !== t.wrapU && (n._cachedWrapU = t.wrapU, this._setTextureParameterInteger(l, this._gl.TEXTURE_WRAP_S, this._getTextureWrapMode(t.wrapU), n)), n._cachedWrapV !== t.wrapV && (n._cachedWrapV = t.wrapV, this._setTextureParameterInteger(l, this._gl.TEXTURE_WRAP_T, this._getTextureWrapMode(t.wrapV), n)), n.is3D && n._cachedWrapR !== t.wrapR && (n._cachedWrapR = t.wrapR, this._setTextureParameterInteger(l, this._gl.TEXTURE_WRAP_R, this._getTextureWrapMode(t.wrapR), n)), this._setAnisotropicLevel(l, n, t.anisotropicFilteringLevel); } return !0; } /** * Sets an array of texture to the webGL context * @param channel defines the channel where the texture array must be set * @param uniform defines the associated uniform location * @param textures defines the array of textures to bind * @param name name of the channel */ setTextureArray(e, t, i, r) { if (!(e === void 0 || !t)) { (!this._textureUnits || this._textureUnits.length !== i.length) && (this._textureUnits = new Int32Array(i.length)); for (let s = 0; s < i.length; s++) { const n = i[s].getInternalTexture(); n ? (this._textureUnits[s] = e + s, n._associatedChannel = e + s) : this._textureUnits[s] = -1; } this._gl.uniform1iv(t, this._textureUnits); for (let s = 0; s < i.length; s++) this._setTexture(this._textureUnits[s], i[s], !0); } } /** * @internal */ _setAnisotropicLevel(e, t, i) { const r = this._caps.textureAnisotropicFilterExtension; t.samplingMode !== 11 && t.samplingMode !== 3 && t.samplingMode !== 2 && (i = 1), r && t._cachedAnisotropicFilteringLevel !== i && (this._setTextureParameterFloat(e, r.TEXTURE_MAX_ANISOTROPY_EXT, Math.min(i, this._caps.maxAnisotropy), t), t._cachedAnisotropicFilteringLevel = i); } _setTextureParameterFloat(e, t, i, r) { this._bindTextureDirectly(e, r, !0, !0), this._gl.texParameterf(e, t, i); } _setTextureParameterInteger(e, t, i, r) { r && this._bindTextureDirectly(e, r, !0, !0), this._gl.texParameteri(e, t, i); } /** * Unbind all vertex attributes from the webGL context */ unbindAllAttributes() { if (this._mustWipeVertexAttributes) { this._mustWipeVertexAttributes = !1; for (let e = 0; e < this._caps.maxVertexAttribs; e++) this.disableAttributeByIndex(e); return; } for (let e = 0, t = this._vertexAttribArraysEnabled.length; e < t; e++) e >= this._caps.maxVertexAttribs || !this._vertexAttribArraysEnabled[e] || this.disableAttributeByIndex(e); } /** * Force the engine to release all cached effects. This means that next effect compilation will have to be done completely even if a similar effect was already compiled */ releaseEffects() { for (const e in this._compiledEffects) { const t = this._compiledEffects[e].getPipelineContext(); this._deletePipelineContext(t); } this._compiledEffects = {}; } /** * Dispose and release all associated resources */ dispose() { var e, t; this._isDisposed = !0, this.stopRenderLoop(), this.onBeforeTextureInitObservable && this.onBeforeTextureInitObservable.clear(), this._emptyTexture && (this._releaseTexture(this._emptyTexture), this._emptyTexture = null), this._emptyCubeTexture && (this._releaseTexture(this._emptyCubeTexture), this._emptyCubeTexture = null), this._dummyFramebuffer && this._gl.deleteFramebuffer(this._dummyFramebuffer), this.releaseEffects(), (e = this.releaseComputeEffects) === null || e === void 0 || e.call(this), this.unbindAllAttributes(), this._boundUniforms = {}, cu() && this._renderingCanvas && (this._doNotHandleContextLost || (this._renderingCanvas.removeEventListener("webglcontextlost", this._onContextLost), this._renderingCanvas.removeEventListener("webglcontextrestored", this._onContextRestored)), window.removeEventListener("resize", this._checkForMobile)), this._workingCanvas = null, this._workingContext = null, this._currentBufferPointers.length = 0, this._renderingCanvas = null, this._currentProgram = null, this._boundRenderFunction = null, Cr.ResetCache(); for (const i of this._activeRequests) i.abort(); this.onDisposeObservable.notifyObservers(this), this.onDisposeObservable.clear(), this._creationOptions.loseContextOnDispose && ((t = this._gl.getExtension("WEBGL_lose_context")) === null || t === void 0 || t.loseContext()); } /** * Attach a new callback raised when context lost event is fired * @param callback defines the callback to call */ attachContextLostEvent(e) { this._renderingCanvas && this._renderingCanvas.addEventListener("webglcontextlost", e, !1); } /** * Attach a new callback raised when context restored event is fired * @param callback defines the callback to call */ attachContextRestoredEvent(e) { this._renderingCanvas && this._renderingCanvas.addEventListener("webglcontextrestored", e, !1); } /** * Get the current error code of the webGL context * @returns the error code * @see https://developer.mozilla.org/en-US/docs/Web/API/WebGLRenderingContext/getError */ getError() { return this._gl.getError(); } _canRenderToFloatFramebuffer() { return this._webGLVersion > 1 ? this._caps.colorBufferFloat : this._canRenderToFramebuffer(1); } _canRenderToHalfFloatFramebuffer() { return this._webGLVersion > 1 ? this._caps.colorBufferFloat : this._canRenderToFramebuffer(2); } // Thank you : http://stackoverflow.com/questions/28827511/webgl-ios-render-to-floating-point-texture _canRenderToFramebuffer(e) { const t = this._gl; for (; t.getError() !== t.NO_ERROR; ) ; let i = !0; const r = t.createTexture(); t.bindTexture(t.TEXTURE_2D, r), t.texImage2D(t.TEXTURE_2D, 0, this._getRGBABufferInternalSizedFormat(e), 1, 1, 0, t.RGBA, this._getWebGLTextureType(e), null), t.texParameteri(t.TEXTURE_2D, t.TEXTURE_MIN_FILTER, t.NEAREST), t.texParameteri(t.TEXTURE_2D, t.TEXTURE_MAG_FILTER, t.NEAREST); const s = t.createFramebuffer(); t.bindFramebuffer(t.FRAMEBUFFER, s), t.framebufferTexture2D(t.FRAMEBUFFER, t.COLOR_ATTACHMENT0, t.TEXTURE_2D, r, 0); const n = t.checkFramebufferStatus(t.FRAMEBUFFER); if (i = i && n === t.FRAMEBUFFER_COMPLETE, i = i && t.getError() === t.NO_ERROR, i && (t.clear(t.COLOR_BUFFER_BIT), i = i && t.getError() === t.NO_ERROR), i) { t.bindFramebuffer(t.FRAMEBUFFER, null); const a = t.RGBA, l = t.UNSIGNED_BYTE, o = new Uint8Array(4); t.readPixels(0, 0, 1, 1, a, l, o), i = i && t.getError() === t.NO_ERROR; } for (t.deleteTexture(r), t.deleteFramebuffer(s), t.bindFramebuffer(t.FRAMEBUFFER, null); !i && t.getError() !== t.NO_ERROR; ) ; return i; } /** * @internal */ _getWebGLTextureType(e) { if (this._webGLVersion === 1) { switch (e) { case 1: return this._gl.FLOAT; case 2: return this._gl.HALF_FLOAT_OES; case 0: return this._gl.UNSIGNED_BYTE; case 8: return this._gl.UNSIGNED_SHORT_4_4_4_4; case 9: return this._gl.UNSIGNED_SHORT_5_5_5_1; case 10: return this._gl.UNSIGNED_SHORT_5_6_5; } return this._gl.UNSIGNED_BYTE; } switch (e) { case 3: return this._gl.BYTE; case 0: return this._gl.UNSIGNED_BYTE; case 4: return this._gl.SHORT; case 5: return this._gl.UNSIGNED_SHORT; case 6: return this._gl.INT; case 7: return this._gl.UNSIGNED_INT; case 1: return this._gl.FLOAT; case 2: return this._gl.HALF_FLOAT; case 8: return this._gl.UNSIGNED_SHORT_4_4_4_4; case 9: return this._gl.UNSIGNED_SHORT_5_5_5_1; case 10: return this._gl.UNSIGNED_SHORT_5_6_5; case 11: return this._gl.UNSIGNED_INT_2_10_10_10_REV; case 12: return this._gl.UNSIGNED_INT_24_8; case 13: return this._gl.UNSIGNED_INT_10F_11F_11F_REV; case 14: return this._gl.UNSIGNED_INT_5_9_9_9_REV; case 15: return this._gl.FLOAT_32_UNSIGNED_INT_24_8_REV; } return this._gl.UNSIGNED_BYTE; } /** * @internal */ _getInternalFormat(e, t = !1) { let i = t ? this._glSRGBExtensionValues.SRGB8_ALPHA8 : this._gl.RGBA; switch (e) { case 0: i = this._gl.ALPHA; break; case 1: i = this._gl.LUMINANCE; break; case 2: i = this._gl.LUMINANCE_ALPHA; break; case 6: i = this._gl.RED; break; case 7: i = this._gl.RG; break; case 4: i = t ? this._glSRGBExtensionValues.SRGB : this._gl.RGB; break; case 5: i = t ? this._glSRGBExtensionValues.SRGB8_ALPHA8 : this._gl.RGBA; break; } if (this._webGLVersion > 1) switch (e) { case 8: i = this._gl.RED_INTEGER; break; case 9: i = this._gl.RG_INTEGER; break; case 10: i = this._gl.RGB_INTEGER; break; case 11: i = this._gl.RGBA_INTEGER; break; } return i; } /** * @internal */ _getRGBABufferInternalSizedFormat(e, t, i = !1) { if (this._webGLVersion === 1) { if (t !== void 0) switch (t) { case 0: return this._gl.ALPHA; case 1: return this._gl.LUMINANCE; case 2: return this._gl.LUMINANCE_ALPHA; case 4: return i ? this._glSRGBExtensionValues.SRGB : this._gl.RGB; } return this._gl.RGBA; } switch (e) { case 3: switch (t) { case 6: return this._gl.R8_SNORM; case 7: return this._gl.RG8_SNORM; case 4: return this._gl.RGB8_SNORM; case 8: return this._gl.R8I; case 9: return this._gl.RG8I; case 10: return this._gl.RGB8I; case 11: return this._gl.RGBA8I; default: return this._gl.RGBA8_SNORM; } case 0: switch (t) { case 6: return this._gl.R8; case 7: return this._gl.RG8; case 4: return i ? this._glSRGBExtensionValues.SRGB8 : this._gl.RGB8; case 5: return i ? this._glSRGBExtensionValues.SRGB8_ALPHA8 : this._gl.RGBA8; case 8: return this._gl.R8UI; case 9: return this._gl.RG8UI; case 10: return this._gl.RGB8UI; case 11: return this._gl.RGBA8UI; case 0: return this._gl.ALPHA; case 1: return this._gl.LUMINANCE; case 2: return this._gl.LUMINANCE_ALPHA; default: return this._gl.RGBA8; } case 4: switch (t) { case 8: return this._gl.R16I; case 9: return this._gl.RG16I; case 10: return this._gl.RGB16I; case 11: return this._gl.RGBA16I; default: return this._gl.RGBA16I; } case 5: switch (t) { case 8: return this._gl.R16UI; case 9: return this._gl.RG16UI; case 10: return this._gl.RGB16UI; case 11: return this._gl.RGBA16UI; default: return this._gl.RGBA16UI; } case 6: switch (t) { case 8: return this._gl.R32I; case 9: return this._gl.RG32I; case 10: return this._gl.RGB32I; case 11: return this._gl.RGBA32I; default: return this._gl.RGBA32I; } case 7: switch (t) { case 8: return this._gl.R32UI; case 9: return this._gl.RG32UI; case 10: return this._gl.RGB32UI; case 11: return this._gl.RGBA32UI; default: return this._gl.RGBA32UI; } case 1: switch (t) { case 6: return this._gl.R32F; case 7: return this._gl.RG32F; case 4: return this._gl.RGB32F; case 5: return this._gl.RGBA32F; default: return this._gl.RGBA32F; } case 2: switch (t) { case 6: return this._gl.R16F; case 7: return this._gl.RG16F; case 4: return this._gl.RGB16F; case 5: return this._gl.RGBA16F; default: return this._gl.RGBA16F; } case 10: return this._gl.RGB565; case 13: return this._gl.R11F_G11F_B10F; case 14: return this._gl.RGB9_E5; case 8: return this._gl.RGBA4; case 9: return this._gl.RGB5_A1; case 11: switch (t) { case 5: return this._gl.RGB10_A2; case 11: return this._gl.RGB10_A2UI; default: return this._gl.RGB10_A2; } } return i ? this._glSRGBExtensionValues.SRGB8_ALPHA8 : this._gl.RGBA8; } /** * @internal */ _loadFile(e, t, i, r, s, n) { const a = mi._FileToolsLoadFile(e, t, i, r, s, n); return this._activeRequests.push(a), a.onCompleteObservable.add((l) => { this._activeRequests.splice(this._activeRequests.indexOf(l), 1); }), a; } /** * Loads a file from a url * @param url url to load * @param onSuccess callback called when the file successfully loads * @param onProgress callback called while file is loading (if the server supports this mode) * @param offlineProvider defines the offline provider for caching * @param useArrayBuffer defines a boolean indicating that date must be returned as ArrayBuffer * @param onError callback called when the file fails to load * @returns a file request object * @internal */ static _FileToolsLoadFile(e, t, i, r, s, n) { throw yr("FileTools"); } /** * Reads pixels from the current frame buffer. Please note that this function can be slow * @param x defines the x coordinate of the rectangle where pixels must be read * @param y defines the y coordinate of the rectangle where pixels must be read * @param width defines the width of the rectangle where pixels must be read * @param height defines the height of the rectangle where pixels must be read * @param hasAlpha defines whether the output should have alpha or not (defaults to true) * @param flushRenderer true to flush the renderer from the pending commands before reading the pixels * @returns a ArrayBufferView promise (Uint8Array) containing RGBA colors */ readPixels(e, t, i, r, s = !0, n = !0) { const a = s ? 4 : 3, l = s ? this._gl.RGBA : this._gl.RGB, o = new Uint8Array(r * i * a); return n && this.flushFramebuffer(), this._gl.readPixels(e, t, i, r, l, this._gl.UNSIGNED_BYTE, o), Promise.resolve(o); } /** * Gets a Promise indicating if the engine can be instantiated (ie. if a webGL context can be found) */ static get IsSupportedAsync() { return Promise.resolve(this.isSupported()); } /** * Gets a boolean indicating if the engine can be instantiated (ie. if a webGL context can be found) */ static get IsSupported() { return this.isSupported(); } /** * Gets a boolean indicating if the engine can be instantiated (ie. if a webGL context can be found) * @returns true if the engine can be created * @ignorenaming */ // eslint-disable-next-line @typescript-eslint/naming-convention static isSupported() { if (this._HasMajorPerformanceCaveat !== null) return !this._HasMajorPerformanceCaveat; if (this._IsSupported === null) try { const e = this._CreateCanvas(1, 1), t = e.getContext("webgl") || e.getContext("experimental-webgl"); this._IsSupported = t != null && !!window.WebGLRenderingContext; } catch { this._IsSupported = !1; } return this._IsSupported; } /** * Gets a boolean indicating if the engine can be instantiated on a performant device (ie. if a webGL context can be found and it does not use a slow implementation) */ static get HasMajorPerformanceCaveat() { if (this._HasMajorPerformanceCaveat === null) try { const e = this._CreateCanvas(1, 1), t = e.getContext("webgl", { failIfMajorPerformanceCaveat: !0 }) || e.getContext("experimental-webgl", { failIfMajorPerformanceCaveat: !0 }); this._HasMajorPerformanceCaveat = !t; } catch { this._HasMajorPerformanceCaveat = !1; } return this._HasMajorPerformanceCaveat; } /** * Find the next highest power of two. * @param x Number to start search from. * @returns Next highest power of two. */ static CeilingPOT(e) { return e--, e |= e >> 1, e |= e >> 2, e |= e >> 4, e |= e >> 8, e |= e >> 16, e++, e; } /** * Find the next lowest power of two. * @param x Number to start search from. * @returns Next lowest power of two. */ static FloorPOT(e) { return e = e | e >> 1, e = e | e >> 2, e = e | e >> 4, e = e | e >> 8, e = e | e >> 16, e - (e >> 1); } /** * Find the nearest power of two. * @param x Number to start search from. * @returns Next nearest power of two. */ static NearestPOT(e) { const t = mi.CeilingPOT(e), i = mi.FloorPOT(e); return t - e > e - i ? i : t; } /** * Get the closest exponent of two * @param value defines the value to approximate * @param max defines the maximum value to return * @param mode defines how to define the closest value * @returns closest exponent of two of the given value */ static GetExponentOfTwo(e, t, i = 2) { let r; switch (i) { case 1: r = mi.FloorPOT(e); break; case 2: r = mi.NearestPOT(e); break; case 3: default: r = mi.CeilingPOT(e); break; } return Math.min(r, t); } /** * Queue a new function into the requested animation frame pool (ie. this function will be executed by the browser (or the javascript engine) for the next frame) * @param func - the function to be called * @param requester - the object that will request the next frame. Falls back to window. * @returns frame number */ static QueueNewFrame(e, t) { if (cu()) { const { requestAnimationFrame: i } = t || window; if (typeof i == "function") return i(e); } else if (typeof requestAnimationFrame == "function") return requestAnimationFrame(e); return setTimeout(e, 16); } /** * Gets host document * @returns the host document object */ getHostDocument() { return this._renderingCanvas && this._renderingCanvas.ownerDocument ? this._renderingCanvas.ownerDocument : qR() ? document : null; } } mi._TempClearColorUint32 = new Uint32Array(4); mi._TempClearColorInt32 = new Int32Array(4); mi.ExceptionList = [ { key: "Chrome/63.0", capture: "63\\.0\\.3239\\.(\\d+)", captureConstraint: 108, targets: ["uniformBuffer"] }, { key: "Firefox/58", capture: null, captureConstraint: null, targets: ["uniformBuffer"] }, { key: "Firefox/59", capture: null, captureConstraint: null, targets: ["uniformBuffer"] }, { key: "Chrome/72.+?Mobile", capture: null, captureConstraint: null, targets: ["vao"] }, { key: "Chrome/73.+?Mobile", capture: null, captureConstraint: null, targets: ["vao"] }, { key: "Chrome/74.+?Mobile", capture: null, captureConstraint: null, targets: ["vao"] }, { key: "Mac OS.+Chrome/71", capture: null, captureConstraint: null, targets: ["vao"] }, { key: "Mac OS.+Chrome/72", capture: null, captureConstraint: null, targets: ["vao"] }, { key: "Mac OS.+Chrome", capture: null, captureConstraint: null, targets: ["uniformBuffer"] }, { key: "Chrome/12\\d\\..+?Mobile", capture: null, captureConstraint: null, targets: ["uniformBuffer"] }, // desktop osx safari 15.4 { key: ".*AppleWebKit.*(15.4).*Safari", capture: null, captureConstraint: null, targets: ["antialias", "maxMSAASamples"] }, // mobile browsers using safari 15.4 on ios { key: ".*(15.4).*AppleWebKit.*Safari", capture: null, captureConstraint: null, targets: ["antialias", "maxMSAASamples"] } ]; mi._TextureLoaders = []; mi.CollisionsEpsilon = 1e-3; mi._IsSupported = null; mi._HasMajorPerformanceCaveat = null; class BO { /** * Polyfill for setImmediate * @param action defines the action to execute after the current execution block */ static SetImmediate(e) { cu() && window.setImmediate ? window.setImmediate(e) : setTimeout(e, 1); } } const cte = new RegExp(/^data:([^,]+\/[^,]+)?;base64,/i); class UO extends F4 { /** * Creates a new LoadFileError * @param message defines the message of the error * @param object defines the optional web request */ constructor(e, t) { super(e, $C.LoadFileError), this.name = "LoadFileError", dP._setPrototypeOf(this, UO.prototype), t instanceof go ? this.request = t : this.file = t; } } class Z9 extends F4 { /** * Creates a new LoadFileError * @param message defines the message of the error * @param request defines the optional web request */ constructor(e, t) { super(e, $C.RequestFileError), this.request = t, this.name = "RequestFileError", dP._setPrototypeOf(this, Z9.prototype); } } class zB extends F4 { /** * Creates a new ReadFileError * @param message defines the message of the error * @param file defines the optional file */ constructor(e, t) { super(e, $C.ReadFileError), this.file = t, this.name = "ReadFileError", dP._setPrototypeOf(this, zB.prototype); } } const ou = { /** * Gets or sets the retry strategy to apply when an error happens while loading an asset. * When defining this function, return the wait time before trying again or return -1 to * stop retrying and error out. */ DefaultRetryStrategy: rte.ExponentialBackoff(), /** * Gets or sets the base URL to use to load assets */ BaseUrl: "", /** * Default behaviour for cors in the application. * It can be a string if the expected behavior is identical in the entire app. * Or a callback to be able to set it per url or on a group of them (in case of Video source for instance) */ CorsBehavior: "anonymous", /** * Gets or sets a function used to pre-process url before using them to load assets * @param url */ PreprocessUrl: (c) => c, /** * Gets or sets the base URL to use to load scripts * Used for both JS and WASM */ ScriptBaseUrl: "", /** * Gets or sets a function used to pre-process script url before using them to load. * Used for both JS and WASM * @param url defines the url to process */ ScriptPreprocessUrl: (c) => c }, ute = (c) => (c = c.replace(/#/gm, "%23"), c), HB = (c, e) => { if (!(c && c.indexOf("data:") === 0) && ou.CorsBehavior) if (typeof ou.CorsBehavior == "string" || ou.CorsBehavior instanceof String) e.crossOrigin = ou.CorsBehavior; else { const t = ou.CorsBehavior(c); t && (e.crossOrigin = t); } }, fw = (c, e, t, i, r = "", s) => { const n = gi.LastCreatedEngine; if (typeof HTMLImageElement > "u" && !(n != null && n._features.forceBitmapOverHTMLImageElement)) return t("LoadImage is only supported in web or BabylonNative environments."), null; let a, l = !1; c instanceof ArrayBuffer || ArrayBuffer.isView(c) ? typeof Blob < "u" && typeof URL < "u" ? (a = URL.createObjectURL(new Blob([c], { type: r })), l = !0) : a = `data:${r};base64,` + DL(c) : c instanceof Blob ? (a = URL.createObjectURL(c), l = !0) : (a = ute(c), a = ou.PreprocessUrl(c)); const o = (S) => { if (t) { const M = a || c.toString(); t(`Error while trying to load image: ${M.indexOf("http") === 0 || M.length <= 128 ? M : M.slice(0, 128) + "..."}`, S); } }; if (n != null && n._features.forceBitmapOverHTMLImageElement) return vT(a, (S) => { n.createImageBitmap(new Blob([S], { type: r }), Object.assign({ premultiplyAlpha: "none" }, s)).then((M) => { e(M), l && URL.revokeObjectURL(a); }).catch((M) => { t && t("Error while trying to load image: " + c, M); }); }, void 0, i || void 0, !0, (S, M) => { o(M); }), null; const u = new Image(); HB(a, u); const h = [], d = () => { h.forEach((S) => { S.target.addEventListener(S.name, S.handler); }); }, f = () => { h.forEach((S) => { S.target.removeEventListener(S.name, S.handler); }), h.length = 0; }, p = () => { f(), e(u), l && u.src && URL.revokeObjectURL(u.src); }, m = (S) => { f(), o(S), l && u.src && URL.revokeObjectURL(u.src); }, _ = (S) => { if (S.blockedURI !== u.src) return; f(); const M = new Error(`CSP violation of policy ${S.effectiveDirective} ${S.blockedURI}. Current policy is ${S.originalPolicy}`); gi.UseFallbackTexture = !1, o(M), l && u.src && URL.revokeObjectURL(u.src), u.src = ""; }; h.push({ target: u, name: "load", handler: p }), h.push({ target: u, name: "error", handler: m }), h.push({ target: document, name: "securitypolicyviolation", handler: _ }), d(); const v = a.substring(0, 5) === "blob:", C = a.substring(0, 5) === "data:", x = () => { v || C || !go.IsCustomRequestAvailable ? u.src = a : vT(a, (S, M, R) => { const w = !r && R ? R : r, V = new Blob([S], { type: w }), k = URL.createObjectURL(V); l = !0, u.src = k; }, void 0, i || void 0, !0, (S, M) => { o(M); }); }, b = () => { i && i.loadImage(a, u); }; if (!v && !C && i && i.enableTexturesOffline) i.open(b, x); else { if (a.indexOf("file:") !== -1) { const S = decodeURIComponent(a.substring(5).toLowerCase()); if (JR.FilesToLoad[S] && typeof URL < "u") { try { let M; try { M = URL.createObjectURL(JR.FilesToLoad[S]); } catch { M = URL.createObjectURL(JR.FilesToLoad[S]); } u.src = M, l = !0; } catch { u.src = ""; } return u; } } x(); } return u; }, VO = (c, e, t, i, r) => { const s = new FileReader(), n = { onCompleteObservable: new Fe(), abort: () => s.abort() }; return s.onloadend = () => n.onCompleteObservable.notifyObservers(n), r && (s.onerror = () => { r(new zB(`Unable to read ${c.name}`, c)); }), s.onload = (a) => { e(a.target.result); }, t && (s.onprogress = t), i ? s.readAsArrayBuffer(c) : s.readAsText(c), n; }, vT = (c, e, t, i, r, s, n) => { if (c.name) return VO(c, e, t, r, s ? (u) => { s(void 0, u); } : void 0); const a = c; if (a.indexOf("file:") !== -1) { let u = decodeURIComponent(a.substring(5).toLowerCase()); u.indexOf("./") === 0 && (u = u.substring(2)); const h = JR.FilesToLoad[u]; if (h) return VO(h, e, t, r, s ? (d) => s(void 0, new UO(d.message, d.file)) : void 0); } const { match: l, type: o } = hte(a); if (l) { const u = { onCompleteObservable: new Fe(), abort: () => () => { } }; try { const h = r ? pw(a) : cK(a); e(h, void 0, o); } catch (h) { s ? s(void 0, h) : Ce.Error(h.message || "Failed to parse the Data URL"); } return BO.SetImmediate(() => { u.onCompleteObservable.notifyObservers(u); }), u; } return GB(a, (u, h) => { e(u, h == null ? void 0 : h.responseURL, h == null ? void 0 : h.getResponseHeader("content-type")); }, t, i, r, s ? (u) => { s(u.request, new UO(u.message, u.request)); } : void 0, n); }, GB = (c, e, t, i, r, s, n) => { c = ute(c), c = ou.PreprocessUrl(c); const a = ou.BaseUrl + c; let l = !1; const o = { onCompleteObservable: new Fe(), abort: () => l = !0 }, u = () => { let h = new go(), d = null, f; const p = () => { h && (t && h.removeEventListener("progress", t), f && h.removeEventListener("readystatechange", f), h.removeEventListener("loadend", m)); }; let m = () => { p(), o.onCompleteObservable.notifyObservers(o), o.onCompleteObservable.clear(), t = void 0, f = null, m = null, s = void 0, n = void 0, e = void 0; }; o.abort = () => { l = !0, m && m(), h && h.readyState !== (XMLHttpRequest.DONE || 4) && h.abort(), d !== null && (clearTimeout(d), d = null), h = null; }; const _ = (C) => { const x = C.message || "Unknown error"; s && h ? s(new Z9(x, h)) : Ce.Error(x); }, v = (C) => { if (h) { if (h.open("GET", a), n) try { n(h); } catch (x) { _(x); return; } r && (h.responseType = "arraybuffer"), t && h.addEventListener("progress", t), m && h.addEventListener("loadend", m), f = () => { if (!(l || !h) && h.readyState === (XMLHttpRequest.DONE || 4)) { if (f && h.removeEventListener("readystatechange", f), h.status >= 200 && h.status < 300 || h.status === 0 && (!cu() || lK())) { try { e && e(r ? h.response : h.responseText, h); } catch (S) { _(S); } return; } const x = ou.DefaultRetryStrategy; if (x) { const S = x(a, h, C); if (S !== -1) { p(), h = new go(), d = setTimeout(() => v(C + 1), S); return; } } const b = new Z9("Error status: " + h.status + " " + h.statusText + " - Unable to load " + a, h); s && s(b); } }, h.addEventListener("readystatechange", f), h.send(); } }; v(0); }; if (i && i.enableSceneOffline) { const h = (f) => { f && f.status > 400 ? s && s(f) : u(); }, d = () => { i && i.loadFile(ou.BaseUrl + c, (f) => { !l && e && e(f), o.onCompleteObservable.notifyObservers(o); }, t ? (f) => { !l && t && t(f); } : void 0, h, r); }; i.open(d, h); } else u(); return o; }, lK = () => typeof location < "u" && location.protocol === "file:", wL = (c) => cte.test(c), hte = (c) => { const e = cte.exec(c); return e === null || e.length === 0 ? { match: !1, type: "" } : { match: !0, type: e[0].replace("data:", "").replace("base64,", "") }; }; function pw(c) { return OL(c.split(",")[1]); } const cK = (c) => kB(c.split(",")[1]), Jle = () => { mi._FileToolsLoadImage = fw, mi._FileToolsLoadFile = vT, pg._FileToolsLoadFile = vT; }; Jle(); let ZD; const dte = (c, e, t, i, r, s, n, a, l, o) => { ZD = { DecodeBase64UrlToBinary: c, DecodeBase64UrlToString: e, DefaultRetryStrategy: t.DefaultRetryStrategy, BaseUrl: t.BaseUrl, CorsBehavior: t.CorsBehavior, PreprocessUrl: t.PreprocessUrl, IsBase64DataUrl: i, IsFileURL: r, LoadFile: s, LoadImage: n, ReadFile: a, RequestFile: l, SetCorsBehavior: o }, Object.defineProperty(ZD, "DefaultRetryStrategy", { get: function() { return t.DefaultRetryStrategy; }, set: function(u) { t.DefaultRetryStrategy = u; } }), Object.defineProperty(ZD, "BaseUrl", { get: function() { return t.BaseUrl; }, set: function(u) { t.BaseUrl = u; } }), Object.defineProperty(ZD, "PreprocessUrl", { get: function() { return t.PreprocessUrl; }, set: function(u) { t.PreprocessUrl = u; } }), Object.defineProperty(ZD, "CorsBehavior", { get: function() { return t.CorsBehavior; }, set: function(u) { t.CorsBehavior = u; } }); }; dte(pw, cK, ou, wL, lK, vT, fw, VO, GB, HB); class z9 { /** * Tries to instantiate a new object from a given class name * @param className defines the class name to instantiate * @returns the new object or null if the system was not able to do the instantiation */ static Instantiate(e) { if (this.RegisteredExternalClasses && this.RegisteredExternalClasses[e]) return this.RegisteredExternalClasses[e]; const t = Qo(e); if (t) return t; Ce.Warn(e + " not found, you may have missed an import."); const i = e.split("."); let r = window || this; for (let s = 0, n = i.length; s < n; s++) r = r[i[s]]; return typeof r != "function" ? null : r; } } z9.RegisteredExternalClasses = {}; function G_() { return "xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx".replace(/[xy]/g, (c) => { const e = Math.random() * 16 | 0; return (c === "x" ? e : e & 3 | 8).toString(16); }); } const ece = { /** * Implementation from http://stackoverflow.com/questions/105034/how-to-create-a-guid-uuid-in-javascript/2117523#answer-2117523 * Be aware Math.random() could cause collisions, but: * "All but 6 of the 128 bits of the ID are randomly generated, which means that for any two ids, there's a 1 in 2^^122 (or 5.3x10^^36) chance they'll collide" * @returns a pseudo random id */ // eslint-disable-next-line @typescript-eslint/naming-convention RandomId: G_ }; class Ve { /** * Gets or sets the base URL to use to load assets */ static get BaseUrl() { return ou.BaseUrl; } static set BaseUrl(e) { ou.BaseUrl = e; } /** * This function checks whether a URL is absolute or not. * It will also detect data and blob URLs * @param url the url to check * @returns is the url absolute or relative */ static IsAbsoluteUrl(e) { return e.indexOf("//") === 0 ? !0 : e.indexOf("://") === -1 || e.indexOf(".") === -1 || e.indexOf("/") === -1 || e.indexOf(":") > e.indexOf("/") ? !1 : e.indexOf("://") < e.indexOf(".") || e.indexOf("data:") === 0 || e.indexOf("blob:") === 0; } /** * Sets the base URL to use to load scripts */ static set ScriptBaseUrl(e) { ou.ScriptBaseUrl = e; } static get ScriptBaseUrl() { return ou.ScriptBaseUrl; } /** * Sets a preprocessing function to run on a source URL before importing it * Note that this function will execute AFTER the base URL is appended to the URL */ static set ScriptPreprocessUrl(e) { ou.ScriptPreprocessUrl = e; } static get ScriptPreprocessUrl() { return ou.ScriptPreprocessUrl; } /** * Gets or sets the retry strategy to apply when an error happens while loading an asset */ static get DefaultRetryStrategy() { return ou.DefaultRetryStrategy; } static set DefaultRetryStrategy(e) { ou.DefaultRetryStrategy = e; } /** * Default behavior for cors in the application. * It can be a string if the expected behavior is identical in the entire app. * Or a callback to be able to set it per url or on a group of them (in case of Video source for instance) */ static get CorsBehavior() { return ou.CorsBehavior; } static set CorsBehavior(e) { ou.CorsBehavior = e; } /** * Gets or sets a global variable indicating if fallback texture must be used when a texture cannot be loaded * @ignorenaming */ static get UseFallbackTexture() { return gi.UseFallbackTexture; } static set UseFallbackTexture(e) { gi.UseFallbackTexture = e; } /** * Use this object to register external classes like custom textures or material * to allow the loaders to instantiate them */ static get RegisteredExternalClasses() { return z9.RegisteredExternalClasses; } static set RegisteredExternalClasses(e) { z9.RegisteredExternalClasses = e; } /** * Texture content used if a texture cannot loaded * @ignorenaming */ // eslint-disable-next-line @typescript-eslint/naming-convention static get fallbackTexture() { return gi.FallbackTexture; } // eslint-disable-next-line @typescript-eslint/naming-convention static set fallbackTexture(e) { gi.FallbackTexture = e; } /** * Read the content of a byte array at a specified coordinates (taking in account wrapping) * @param u defines the coordinate on X axis * @param v defines the coordinate on Y axis * @param width defines the width of the source data * @param height defines the height of the source data * @param pixels defines the source byte array * @param color defines the output color */ static FetchToRef(e, t, i, r, s, n) { const a = Math.abs(e) * i % i | 0, l = Math.abs(t) * r % r | 0, o = (a + l * i) * 4; n.r = s[o] / 255, n.g = s[o + 1] / 255, n.b = s[o + 2] / 255, n.a = s[o + 3] / 255; } /** * Interpolates between a and b via alpha * @param a The lower value (returned when alpha = 0) * @param b The upper value (returned when alpha = 1) * @param alpha The interpolation-factor * @returns The mixed value */ static Mix(e, t, i) { return e * (1 - i) + t * i; } /** * Tries to instantiate a new object from a given class name * @param className defines the class name to instantiate * @returns the new object or null if the system was not able to do the instantiation */ static Instantiate(e) { return z9.Instantiate(e); } /** * Polyfill for setImmediate * @param action defines the action to execute after the current execution block */ static SetImmediate(e) { BO.SetImmediate(e); } /** * Function indicating if a number is an exponent of 2 * @param value defines the value to test * @returns true if the value is an exponent of 2 */ static IsExponentOfTwo(e) { let t = 1; do t *= 2; while (t < e); return t === e; } /** * Returns the nearest 32-bit single precision float representation of a Number * @param value A Number. If the parameter is of a different type, it will get converted * to a number or to NaN if it cannot be converted * @returns number */ static FloatRound(e) { return Math.fround(e); } /** * Extracts the filename from a path * @param path defines the path to use * @returns the filename */ static GetFilename(e) { const t = e.lastIndexOf("/"); return t < 0 ? e : e.substring(t + 1); } /** * Extracts the "folder" part of a path (everything before the filename). * @param uri The URI to extract the info from * @param returnUnchangedIfNoSlash Do not touch the URI if no slashes are present * @returns The "folder" part of the path */ static GetFolderPath(e, t = !1) { const i = e.lastIndexOf("/"); return i < 0 ? t ? e : "" : e.substring(0, i + 1); } /** * Convert an angle in radians to degrees * @param angle defines the angle to convert * @returns the angle in degrees */ static ToDegrees(e) { return e * 180 / Math.PI; } /** * Convert an angle in degrees to radians * @param angle defines the angle to convert * @returns the angle in radians */ static ToRadians(e) { return e * Math.PI / 180; } /** * Smooth angle changes (kind of low-pass filter), in particular for device orientation "shaking" * Use trigonometric functions to avoid discontinuity (0/360, -180/180) * @param previousAngle defines last angle value, in degrees * @param newAngle defines new angle value, in degrees * @param smoothFactor defines smoothing sensitivity; min 0: no smoothing, max 1: new data ignored * @returns the angle in degrees */ static SmoothAngleChange(e, t, i = 0.9) { const r = this.ToRadians(e), s = this.ToRadians(t); return this.ToDegrees(Math.atan2((1 - i) * Math.sin(s) + i * Math.sin(r), (1 - i) * Math.cos(s) + i * Math.cos(r))); } /** * Returns an array if obj is not an array * @param obj defines the object to evaluate as an array * @param allowsNullUndefined defines a boolean indicating if obj is allowed to be null or undefined * @returns either obj directly if obj is an array or a new array containing obj */ static MakeArray(e, t) { return t !== !0 && (e === void 0 || e == null) ? null : Array.isArray(e) ? e : [e]; } /** * Gets the pointer prefix to use * @param engine defines the engine we are finding the prefix for * @returns "pointer" if touch is enabled. Else returns "mouse" */ static GetPointerPrefix(e) { let t = "pointer"; return cu() && !window.PointerEvent && (t = "mouse"), e._badDesktopOS && !e._badOS && // And not ipad pros who claim to be macs... !(document && "ontouchend" in document) && (t = "mouse"), t; } /** * Sets the cors behavior on a dom element. This will add the required Tools.CorsBehavior to the element. * @param url define the url we are trying * @param element define the dom element where to configure the cors policy * @param element.crossOrigin */ static SetCorsBehavior(e, t) { HB(e, t); } /** * Sets the referrerPolicy behavior on a dom element. * @param referrerPolicy define the referrer policy to use * @param element define the dom element where to configure the referrer policy * @param element.referrerPolicy */ static SetReferrerPolicyBehavior(e, t) { t.referrerPolicy = e; } // External files /** * Removes unwanted characters from an url * @param url defines the url to clean * @returns the cleaned url */ static CleanUrl(e) { return e = e.replace(/#/gm, "%23"), e; } /** * Gets or sets a function used to pre-process url before using them to load assets */ static get PreprocessUrl() { return ou.PreprocessUrl; } static set PreprocessUrl(e) { ou.PreprocessUrl = e; } /** * Loads an image as an HTMLImageElement. * @param input url string, ArrayBuffer, or Blob to load * @param onLoad callback called when the image successfully loads * @param onError callback called when the image fails to load * @param offlineProvider offline provider for caching * @param mimeType optional mime type * @param imageBitmapOptions optional the options to use when creating an ImageBitmap * @returns the HTMLImageElement of the loaded image */ static LoadImage(e, t, i, r, s, n) { return fw(e, t, i, r, s, n); } /** * Loads a file from a url * @param url url string, ArrayBuffer, or Blob to load * @param onSuccess callback called when the file successfully loads * @param onProgress callback called while file is loading (if the server supports this mode) * @param offlineProvider defines the offline provider for caching * @param useArrayBuffer defines a boolean indicating that date must be returned as ArrayBuffer * @param onError callback called when the file fails to load * @returns a file request object */ static LoadFile(e, t, i, r, s, n) { return vT(e, t, i, r, s, n); } /** * Loads a file from a url * @param url the file url to load * @param useArrayBuffer defines a boolean indicating that date must be returned as ArrayBuffer * @returns a promise containing an ArrayBuffer corresponding to the loaded file */ static LoadFileAsync(e, t = !0) { return new Promise((i, r) => { vT(e, (s) => { i(s); }, void 0, void 0, t, (s, n) => { r(n); }); }); } /** * Get a script URL including preprocessing * @param scriptUrl the script Url to process * @returns a modified URL to use */ static GetBabylonScriptURL(e, t) { if (!e) return ""; if (Ve.ScriptBaseUrl && e.startsWith(Ve._DefaultCdnUrl)) { const i = Ve.ScriptBaseUrl[Ve.ScriptBaseUrl.length - 1] === "/" ? Ve.ScriptBaseUrl.substring(0, Ve.ScriptBaseUrl.length - 1) : Ve.ScriptBaseUrl; e = e.replace(Ve._DefaultCdnUrl, i); } return e = Ve.ScriptPreprocessUrl(e), t && (e = Ve.GetAbsoluteUrl(e)), e; } /** * This function is used internally by babylon components to load a script (identified by an url). When the url returns, the * content of this file is added into a new script element, attached to the DOM (body element) * @param scriptUrl defines the url of the script to load * @param onSuccess defines the callback called when the script is loaded * @param onError defines the callback to call if an error occurs * @param scriptId defines the id of the script element */ static LoadBabylonScript(e, t, i, r) { e = Ve.GetBabylonScriptURL(e), Ve.LoadScript(e, t, i); } /** * Load an asynchronous script (identified by an url). When the url returns, the * content of this file is added into a new script element, attached to the DOM (body element) * @param scriptUrl defines the url of the script to laod * @returns a promise request object */ static LoadBabylonScriptAsync(e) { return e = Ve.GetBabylonScriptURL(e), Ve.LoadScriptAsync(e); } /** * This function is used internally by babylon components to load a script (identified by an url). When the url returns, the * content of this file is added into a new script element, attached to the DOM (body element) * @param scriptUrl defines the url of the script to load * @param onSuccess defines the callback called when the script is loaded * @param onError defines the callback to call if an error occurs * @param scriptId defines the id of the script element */ static LoadScript(e, t, i, r) { if (typeof importScripts == "function") { try { importScripts(e), t(); } catch (a) { i == null || i(`Unable to load script '${e}' in worker`, a); } return; } else if (!cu()) { i == null || i(`Cannot load script '${e}' outside of a window or a worker`); return; } const s = document.getElementsByTagName("head")[0], n = document.createElement("script"); n.setAttribute("type", "text/javascript"), n.setAttribute("src", e), r && (n.id = r), n.onload = () => { t && t(); }, n.onerror = (a) => { i && i(`Unable to load script '${e}'`, a); }, s.appendChild(n); } /** * Load an asynchronous script (identified by an url). When the url returns, the * content of this file is added into a new script element, attached to the DOM (body element) * @param scriptUrl defines the url of the script to laod * @returns a promise request object */ static LoadScriptAsync(e) { return new Promise((t, i) => { this.LoadScript(e, () => { t(); }, (r, s) => { i(s || new Error(r)); }); }); } /** * Loads a file from a blob * @param fileToLoad defines the blob to use * @param callback defines the callback to call when data is loaded * @param progressCallback defines the callback to call during loading process * @returns a file request object */ static ReadFileAsDataURL(e, t, i) { const r = new FileReader(), s = { onCompleteObservable: new Fe(), abort: () => r.abort() }; return r.onloadend = () => { s.onCompleteObservable.notifyObservers(s); }, r.onload = (n) => { t(n.target.result); }, r.onprogress = i, r.readAsDataURL(e), s; } /** * Reads a file from a File object * @param file defines the file to load * @param onSuccess defines the callback to call when data is loaded * @param onProgress defines the callback to call during loading process * @param useArrayBuffer defines a boolean indicating that data must be returned as an ArrayBuffer * @param onError defines the callback to call when an error occurs * @returns a file request object */ static ReadFile(e, t, i, r, s) { return VO(e, t, i, r, s); } /** * Creates a data url from a given string content * @param content defines the content to convert * @returns the new data url link */ static FileAsURL(e) { const t = new Blob([e]); return window.URL.createObjectURL(t); } /** * Format the given number to a specific decimal format * @param value defines the number to format * @param decimals defines the number of decimals to use * @returns the formatted string */ static Format(e, t = 2) { return e.toFixed(t); } /** * Tries to copy an object by duplicating every property * @param source defines the source object * @param destination defines the target object * @param doNotCopyList defines a list of properties to avoid * @param mustCopyList defines a list of properties to copy (even if they start with _) */ static DeepCopy(e, t, i, r) { id.DeepCopy(e, t, i, r); } /** * Gets a boolean indicating if the given object has no own property * @param obj defines the object to test * @returns true if object has no own property */ static IsEmpty(e) { for (const t in e) if (Object.prototype.hasOwnProperty.call(e, t)) return !1; return !0; } /** * Function used to register events at window level * @param windowElement defines the Window object to use * @param events defines the events to register */ static RegisterTopRootEvents(e, t) { for (let i = 0; i < t.length; i++) { const r = t[i]; e.addEventListener(r.name, r.handler, !1); try { window.parent && window.parent.addEventListener(r.name, r.handler, !1); } catch { } } } /** * Function used to unregister events from window level * @param windowElement defines the Window object to use * @param events defines the events to unregister */ static UnregisterTopRootEvents(e, t) { for (let i = 0; i < t.length; i++) { const r = t[i]; e.removeEventListener(r.name, r.handler); try { e.parent && e.parent.removeEventListener(r.name, r.handler); } catch { } } } /** * Dumps the current bound framebuffer * @param width defines the rendering width * @param height defines the rendering height * @param engine defines the hosting engine * @param successCallback defines the callback triggered once the data are available * @param mimeType defines the mime type of the result * @param fileName defines the filename to download. If present, the result will automatically be downloaded * @param quality The quality of the image if lossy mimeType is used (e.g. image/jpeg, image/webp). See {@link https://developer.mozilla.org/en-US/docs/Web/API/HTMLCanvasElement/toBlob | HTMLCanvasElement.toBlob()}'s `quality` parameter. * @returns a void promise */ static async DumpFramebuffer(e, t, i, r, s = "image/png", n, a) { throw yr("DumpTools"); } /** * Dumps an array buffer * @param width defines the rendering width * @param height defines the rendering height * @param data the data array * @param successCallback defines the callback triggered once the data are available * @param mimeType defines the mime type of the result * @param fileName defines the filename to download. If present, the result will automatically be downloaded * @param invertY true to invert the picture in the Y dimension * @param toArrayBuffer true to convert the data to an ArrayBuffer (encoded as `mimeType`) instead of a base64 string * @param quality The quality of the image if lossy mimeType is used (e.g. image/jpeg, image/webp). See {@link https://developer.mozilla.org/en-US/docs/Web/API/HTMLCanvasElement/toBlob | HTMLCanvasElement.toBlob()}'s `quality` parameter. */ static DumpData(e, t, i, r, s = "image/png", n, a = !1, l = !1, o) { throw yr("DumpTools"); } /** * Dumps an array buffer * @param width defines the rendering width * @param height defines the rendering height * @param data the data array * @param mimeType defines the mime type of the result * @param fileName defines the filename to download. If present, the result will automatically be downloaded * @param invertY true to invert the picture in the Y dimension * @param toArrayBuffer true to convert the data to an ArrayBuffer (encoded as `mimeType`) instead of a base64 string * @param quality The quality of the image if lossy mimeType is used (e.g. image/jpeg, image/webp). See {@link https://developer.mozilla.org/en-US/docs/Web/API/HTMLCanvasElement/toBlob | HTMLCanvasElement.toBlob()}'s `quality` parameter. * @returns a promise that resolve to the final data */ static DumpDataAsync(e, t, i, r = "image/png", s, n = !1, a = !1, l) { throw yr("DumpTools"); } static _IsOffScreenCanvas(e) { return e.convertToBlob !== void 0; } /** * Converts the canvas data to blob. * This acts as a polyfill for browsers not supporting the to blob function. * @param canvas Defines the canvas to extract the data from (can be an offscreen canvas) * @param successCallback Defines the callback triggered once the data are available * @param mimeType Defines the mime type of the result * @param quality The quality of the image if lossy mimeType is used (e.g. image/jpeg, image/webp). See {@link https://developer.mozilla.org/en-US/docs/Web/API/HTMLCanvasElement/toBlob | HTMLCanvasElement.toBlob()}'s `quality` parameter. */ static ToBlob(e, t, i = "image/png", r) { !Ve._IsOffScreenCanvas(e) && !e.toBlob && (e.toBlob = function(s, n, a) { setTimeout(() => { const l = atob(this.toDataURL(n, a).split(",")[1]), o = l.length, u = new Uint8Array(o); for (let h = 0; h < o; h++) u[h] = l.charCodeAt(h); s(new Blob([u])); }); }), Ve._IsOffScreenCanvas(e) ? e.convertToBlob({ type: i, quality: r }).then((s) => t(s)) : e.toBlob(function(s) { t(s); }, i, r); } /** * Download a Blob object * @param blob the Blob object * @param fileName the file name to download * @returns */ static DownloadBlob(e, t) { if ("download" in document.createElement("a")) { if (!t) { const i = /* @__PURE__ */ new Date(); t = "screenshot_" + ((i.getFullYear() + "-" + (i.getMonth() + 1)).slice(2) + "-" + i.getDate() + "_" + i.getHours() + "-" + ("0" + i.getMinutes()).slice(-2)) + ".png"; } Ve.Download(e, t); } else if (e && typeof URL < "u") { const i = URL.createObjectURL(e), r = window.open(""); if (!r) return; const s = r.document.createElement("img"); s.onload = function() { URL.revokeObjectURL(i); }, s.src = i, r.document.body.appendChild(s); } } /** * Encodes the canvas data to base 64, or automatically downloads the result if `fileName` is defined. * @param canvas The canvas to get the data from, which can be an offscreen canvas. * @param successCallback The callback which is triggered once the data is available. If `fileName` is defined, the callback will be invoked after the download occurs, and the `data` argument will be an empty string. * @param mimeType The mime type of the result. * @param fileName The name of the file to download. If defined, the result will automatically be downloaded. If not defined, and `successCallback` is also not defined, the result will automatically be downloaded with an auto-generated file name. * @param quality The quality of the image if lossy mimeType is used (e.g. image/jpeg, image/webp). See {@link https://developer.mozilla.org/en-US/docs/Web/API/HTMLCanvasElement/toBlob | HTMLCanvasElement.toBlob()}'s `quality` parameter. */ static EncodeScreenshotCanvasData(e, t, i = "image/png", r, s) { if (typeof r == "string" || !t) this.ToBlob(e, function(n) { n && Ve.DownloadBlob(n, r), t && t(""); }, i, s); else if (t) { if (Ve._IsOffScreenCanvas(e)) { e.convertToBlob({ type: i, quality: s }).then((a) => { const l = new FileReader(); l.readAsDataURL(a), l.onloadend = () => { const o = l.result; t(o); }; }); return; } const n = e.toDataURL(i, s); t(n); } } /** * Downloads a blob in the browser * @param blob defines the blob to download * @param fileName defines the name of the downloaded file */ static Download(e, t) { if (typeof URL > "u") return; const i = window.URL.createObjectURL(e), r = document.createElement("a"); document.body.appendChild(r), r.style.display = "none", r.href = i, r.download = t, r.addEventListener("click", () => { r.parentElement && r.parentElement.removeChild(r); }), r.click(), window.URL.revokeObjectURL(i); } /** * Will return the right value of the noPreventDefault variable * Needed to keep backwards compatibility to the old API. * * @param args arguments passed to the attachControl function * @returns the correct value for noPreventDefault */ static BackCompatCameraNoPreventDefault(e) { return typeof e[0] == "boolean" ? e[0] : typeof e[1] == "boolean" ? e[1] : !1; } /** * Captures a screenshot of the current rendering * @see https://doc.babylonjs.com/features/featuresDeepDive/scene/renderToPNG * @param engine defines the rendering engine * @param camera defines the source camera * @param size This parameter can be set to a single number or to an object with the * following (optional) properties: precision, width, height. If a single number is passed, * it will be used for both width and height. If an object is passed, the screenshot size * will be derived from the parameters. The precision property is a multiplier allowing * rendering at a higher or lower resolution * @param successCallback defines the callback receives a single parameter which contains the * screenshot as a string of base64-encoded characters. This string can be assigned to the * src parameter of an to display it * @param mimeType defines the MIME type of the screenshot image (default: image/png). * Check your browser for supported MIME types * @param forceDownload force the system to download the image even if a successCallback is provided * @param quality The quality of the image if lossy mimeType is used (e.g. image/jpeg, image/webp). See {@link https://developer.mozilla.org/en-US/docs/Web/API/HTMLCanvasElement/toBlob | HTMLCanvasElement.toBlob()}'s `quality` parameter. */ // eslint-disable-next-line @typescript-eslint/no-unused-vars static CreateScreenshot(e, t, i, r, s = "image/png", n = !1, a) { throw yr("ScreenshotTools"); } /** * Captures a screenshot of the current rendering * @see https://doc.babylonjs.com/features/featuresDeepDive/scene/renderToPNG * @param engine defines the rendering engine * @param camera defines the source camera * @param size This parameter can be set to a single number or to an object with the * following (optional) properties: precision, width, height. If a single number is passed, * it will be used for both width and height. If an object is passed, the screenshot size * will be derived from the parameters. The precision property is a multiplier allowing * rendering at a higher or lower resolution * @param mimeType defines the MIME type of the screenshot image (default: image/png). * Check your browser for supported MIME types * @param quality The quality of the image if lossy mimeType is used (e.g. image/jpeg, image/webp). See {@link https://developer.mozilla.org/en-US/docs/Web/API/HTMLCanvasElement/toBlob | HTMLCanvasElement.toBlob()}'s `quality` parameter. * @returns screenshot as a string of base64-encoded characters. This string can be assigned * to the src parameter of an to display it */ // eslint-disable-next-line @typescript-eslint/no-unused-vars static CreateScreenshotAsync(e, t, i, r = "image/png", s) { throw yr("ScreenshotTools"); } /** * Generates an image screenshot from the specified camera. * @see https://doc.babylonjs.com/features/featuresDeepDive/scene/renderToPNG * @param engine The engine to use for rendering * @param camera The camera to use for rendering * @param size This parameter can be set to a single number or to an object with the * following (optional) properties: precision, width, height. If a single number is passed, * it will be used for both width and height. If an object is passed, the screenshot size * will be derived from the parameters. The precision property is a multiplier allowing * rendering at a higher or lower resolution * @param successCallback The callback receives a single parameter which contains the * screenshot as a string of base64-encoded characters. This string can be assigned to the * src parameter of an to display it * @param mimeType The MIME type of the screenshot image (default: image/png). * Check your browser for supported MIME types * @param samples Texture samples (default: 1) * @param antialiasing Whether antialiasing should be turned on or not (default: false) * @param fileName A name for for the downloaded file. * @param renderSprites Whether the sprites should be rendered or not (default: false) * @param enableStencilBuffer Whether the stencil buffer should be enabled or not (default: false) * @param useLayerMask if the camera's layer mask should be used to filter what should be rendered (default: true) * @param quality The quality of the image if lossy mimeType is used (e.g. image/jpeg, image/webp). See {@link https://developer.mozilla.org/en-US/docs/Web/API/HTMLCanvasElement/toBlob | HTMLCanvasElement.toBlob()}'s `quality` parameter. */ // eslint-disable-next-line @typescript-eslint/no-unused-vars static CreateScreenshotUsingRenderTarget(e, t, i, r, s = "image/png", n = 1, a = !1, l, o = !1, u = !1, h = !0, d) { throw yr("ScreenshotTools"); } /** * Generates an image screenshot from the specified camera. * @see https://doc.babylonjs.com/features/featuresDeepDive/scene/renderToPNG * @param engine The engine to use for rendering * @param camera The camera to use for rendering * @param size This parameter can be set to a single number or to an object with the * following (optional) properties: precision, width, height. If a single number is passed, * it will be used for both width and height. If an object is passed, the screenshot size * will be derived from the parameters. The precision property is a multiplier allowing * rendering at a higher or lower resolution * @param mimeType The MIME type of the screenshot image (default: image/png). * Check your browser for supported MIME types * @param samples Texture samples (default: 1) * @param antialiasing Whether antialiasing should be turned on or not (default: false) * @param fileName A name for for the downloaded file. * @returns screenshot as a string of base64-encoded characters. This string can be assigned * @param renderSprites Whether the sprites should be rendered or not (default: false) * @param enableStencilBuffer Whether the stencil buffer should be enabled or not (default: false) * @param useLayerMask if the camera's layer mask should be used to filter what should be rendered (default: true) * @param quality The quality of the image if lossy mimeType is used (e.g. image/jpeg, image/webp). See {@link https://developer.mozilla.org/en-US/docs/Web/API/HTMLCanvasElement/toBlob | HTMLCanvasElement.toBlob()}'s `quality` parameter. * to the src parameter of an to display it */ // eslint-disable-next-line @typescript-eslint/no-unused-vars static CreateScreenshotUsingRenderTargetAsync(e, t, i, r = "image/png", s = 1, n = !1, a, l = !1, o = !1, u = !0, h) { throw yr("ScreenshotTools"); } /** * Implementation from http://stackoverflow.com/questions/105034/how-to-create-a-guid-uuid-in-javascript/2117523#answer-2117523 * Be aware Math.random() could cause collisions, but: * "All but 6 of the 128 bits of the ID are randomly generated, which means that for any two ids, there's a 1 in 2^^122 (or 5.3x10^^36) chance they'll collide" * @returns a pseudo random id */ static RandomId() { return G_(); } /** * Test if the given uri is a base64 string * @deprecated Please use FileTools.IsBase64DataUrl instead. * @param uri The uri to test * @returns True if the uri is a base64 string or false otherwise */ static IsBase64(e) { return wL(e); } /** * Decode the given base64 uri. * @deprecated Please use FileTools.DecodeBase64UrlToBinary instead. * @param uri The uri to decode * @returns The decoded base64 data. */ static DecodeBase64(e) { return pw(e); } /** * Gets a value indicating the number of loading errors * @ignorenaming */ // eslint-disable-next-line @typescript-eslint/naming-convention static get errorsCount() { return Ce.errorsCount; } /** * Log a message to the console * @param message defines the message to log */ static Log(e) { Ce.Log(e); } /** * Write a warning message to the console * @param message defines the message to log */ static Warn(e) { Ce.Warn(e); } /** * Write an error message to the console * @param message defines the message to log */ static Error(e) { Ce.Error(e); } /** * Gets current log cache (list of logs) */ static get LogCache() { return Ce.LogCache; } /** * Clears the log cache */ static ClearLogCache() { Ce.ClearLogCache(); } /** * Sets the current log level (MessageLogLevel / WarningLogLevel / ErrorLogLevel) */ static set LogLevels(e) { Ce.LogLevels = e; } /** * Sets the current performance log level */ static set PerformanceLogLevel(e) { if ((e & Ve.PerformanceUserMarkLogLevel) === Ve.PerformanceUserMarkLogLevel) { Ve.StartPerformanceCounter = Ve._StartUserMark, Ve.EndPerformanceCounter = Ve._EndUserMark; return; } if ((e & Ve.PerformanceConsoleLogLevel) === Ve.PerformanceConsoleLogLevel) { Ve.StartPerformanceCounter = Ve._StartPerformanceConsole, Ve.EndPerformanceCounter = Ve._EndPerformanceConsole; return; } Ve.StartPerformanceCounter = Ve._StartPerformanceCounterDisabled, Ve.EndPerformanceCounter = Ve._EndPerformanceCounterDisabled; } // eslint-disable-next-line @typescript-eslint/no-unused-vars static _StartPerformanceCounterDisabled(e, t) { } // eslint-disable-next-line @typescript-eslint/no-unused-vars static _EndPerformanceCounterDisabled(e, t) { } static _StartUserMark(e, t = !0) { if (!Ve._Performance) { if (!cu()) return; Ve._Performance = window.performance; } !t || !Ve._Performance.mark || Ve._Performance.mark(e + "-Begin"); } static _EndUserMark(e, t = !0) { !t || !Ve._Performance.mark || (Ve._Performance.mark(e + "-End"), Ve._Performance.measure(e, e + "-Begin", e + "-End")); } static _StartPerformanceConsole(e, t = !0) { t && (Ve._StartUserMark(e, t), console.time && console.time(e)); } static _EndPerformanceConsole(e, t = !0) { t && (Ve._EndUserMark(e, t), console.timeEnd(e)); } /** * Gets either window.performance.now() if supported or Date.now() else */ static get Now() { return Gs.Now; } /** * This method will return the name of the class used to create the instance of the given object. * It will works only on Javascript basic data types (number, string, ...) and instance of class declared with the @className decorator. * @param object the object to get the class name from * @param isType defines if the object is actually a type * @returns the name of the class, will be "object" for a custom data type not using the @className decorator */ static GetClassName(e, t = !1) { let i = null; return !t && e.getClassName ? i = e.getClassName() : (e instanceof Object && (i = (t ? e : Object.getPrototypeOf(e)).constructor.__bjsclassName__), i || (i = typeof e)), i; } /** * Gets the first element of an array satisfying a given predicate * @param array defines the array to browse * @param predicate defines the predicate to use * @returns null if not found or the element */ static First(e, t) { for (const i of e) if (t(i)) return i; return null; } /** * This method will return the name of the full name of the class, including its owning module (if any). * It will works only on Javascript basic data types (number, string, ...) and instance of class declared with the @className decorator or implementing a method getClassName():string (in which case the module won't be specified). * @param object the object to get the class name from * @param isType defines if the object is actually a type * @returns a string that can have two forms: "moduleName.className" if module was specified when the class' Name was registered or "className" if there was not module specified. * @ignorenaming */ // eslint-disable-next-line @typescript-eslint/naming-convention static getFullClassName(e, t = !1) { let i = null, r = null; if (!t && e.getClassName) i = e.getClassName(); else { if (e instanceof Object) { const s = t ? e : Object.getPrototypeOf(e); i = s.constructor.__bjsclassName__, r = s.constructor.__bjsmoduleName__; } i || (i = typeof e); } return i ? (r != null ? r + "." : "") + i : null; } /** * Returns a promise that resolves after the given amount of time. * @param delay Number of milliseconds to delay * @returns Promise that resolves after the given amount of time */ static DelayAsync(e) { return new Promise((t) => { setTimeout(() => { t(); }, e); }); } /** * Utility function to detect if the current user agent is Safari * @returns whether or not the current user agent is safari */ static IsSafari() { return GR() ? /^((?!chrome|android).)*safari/i.test(navigator.userAgent) : !1; } } Ve.UseCustomRequestHeaders = !1; Ve.CustomRequestHeaders = go.CustomRequestHeaders; Ve.GetDOMTextContent = IL; Ve._DefaultCdnUrl = "https://cdn.babylonjs.com"; Ve.GetAbsoluteUrl = typeof document == "object" ? (c) => { const e = document.createElement("a"); return e.href = c, e.href; } : typeof URL == "function" && typeof location == "object" ? (c) => new URL(c, location.origin).href : () => { throw new Error("Unable to get absolute URL. Override BABYLON.Tools.GetAbsoluteUrl to a custom implementation for the current context."); }; Ve.NoneLogLevel = Ce.NoneLogLevel; Ve.MessageLogLevel = Ce.MessageLogLevel; Ve.WarningLogLevel = Ce.WarningLogLevel; Ve.ErrorLogLevel = Ce.ErrorLogLevel; Ve.AllLogLevel = Ce.AllLogLevel; Ve.IsWindowObjectExist = cu; Ve.PerformanceNoneLogLevel = 0; Ve.PerformanceUserMarkLogLevel = 1; Ve.PerformanceConsoleLogLevel = 2; Ve.StartPerformanceCounter = Ve._StartPerformanceCounterDisabled; Ve.EndPerformanceCounter = Ve._EndPerformanceCounterDisabled; function tce(c, e) { return (t) => { t.__bjsclassName__ = c, t.__bjsmoduleName__ = e ?? null; }; } class ug { /** * Constructor. * @param iterations the number of iterations. * @param func the function to run each iteration * @param successCallback the callback that will be called upon successful execution * @param offset starting offset. */ constructor(e, t, i, r = 0) { this.iterations = e, this.index = r - 1, this._done = !1, this._fn = t, this._successCallback = i; } /** * Execute the next iteration. Must be called after the last iteration was finished. */ executeNext() { this._done || (this.index + 1 < this.iterations ? (++this.index, this._fn(this)) : this.breakLoop()); } /** * Break the loop and run the success callback. */ breakLoop() { this._done = !0, this._successCallback(); } /** * Create and run an async loop. * @param iterations the number of iterations. * @param fn the function to run each iteration * @param successCallback the callback that will be called upon successful execution * @param offset starting offset. * @returns the created async loop object */ static Run(e, t, i, r = 0) { const s = new ug(e, t, i, r); return s.executeNext(), s; } /** * A for-loop that will run a given number of iterations synchronous and the rest async. * @param iterations total number of iterations * @param syncedIterations number of synchronous iterations in each async iteration. * @param fn the function to call each iteration. * @param callback a success call back that will be called when iterating stops. * @param breakFunction a break condition (optional) * @param timeout timeout settings for the setTimeout function. default - 0. * @returns the created async loop object */ static SyncAsyncForLoop(e, t, i, r, s, n = 0) { return ug.Run(Math.ceil(e / t), (a) => { s && s() ? a.breakLoop() : setTimeout(() => { for (let l = 0; l < t; ++l) { const o = a.index * t + l; if (o >= e) break; if (i(o), s && s()) { a.breakLoop(); break; } } a.executeNext(); }, n); }, r); } } gi.FallbackTexture = "data:image/jpg;base64,/9j/4AAQSkZJRgABAQEAYABgAAD/4QBmRXhpZgAATU0AKgAAAAgABAEaAAUAAAABAAAAPgEbAAUAAAABAAAARgEoAAMAAAABAAIAAAExAAIAAAAQAAAATgAAAAAAAABgAAAAAQAAAGAAAAABcGFpbnQubmV0IDQuMC41AP/bAEMABAIDAwMCBAMDAwQEBAQFCQYFBQUFCwgIBgkNCw0NDQsMDA4QFBEODxMPDAwSGBITFRYXFxcOERkbGRYaFBYXFv/bAEMBBAQEBQUFCgYGChYPDA8WFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFv/AABEIAQABAAMBIgACEQEDEQH/xAAfAAABBQEBAQEBAQAAAAAAAAAAAQIDBAUGBwgJCgv/xAC1EAACAQMDAgQDBQUEBAAAAX0BAgMABBEFEiExQQYTUWEHInEUMoGRoQgjQrHBFVLR8CQzYnKCCQoWFxgZGiUmJygpKjQ1Njc4OTpDREVGR0hJSlNUVVZXWFlaY2RlZmdoaWpzdHV2d3h5eoOEhYaHiImKkpOUlZaXmJmaoqOkpaanqKmqsrO0tba3uLm6wsPExcbHyMnK0tPU1dbX2Nna4eLj5OXm5+jp6vHy8/T19vf4+fr/xAAfAQADAQEBAQEBAQEBAAAAAAAAAQIDBAUGBwgJCgv/xAC1EQACAQIEBAMEBwUEBAABAncAAQIDEQQFITEGEkFRB2FxEyIygQgUQpGhscEJIzNS8BVictEKFiQ04SXxFxgZGiYnKCkqNTY3ODk6Q0RFRkdISUpTVFVWV1hZWmNkZWZnaGlqc3R1dnd4eXqCg4SFhoeIiYqSk5SVlpeYmZqio6Slpqeoqaqys7S1tre4ubrCw8TFxsfIycrS09TV1tfY2dri4+Tl5ufo6ery8/T19vf4+fr/2gAMAwEAAhEDEQA/APH6KKK+FP76Pl+iiivuj+BT6gooor4U/vo+X6KKK+6P4FPqCiiivhT++j5fooor7o/gU+oKKKK+FP76Pl+iiivuj+BT6gooor4U/vo+X6KKK+6P4FPqCiiivhT++j5fooor7o/gU+oKKKK+FP76Pl+iiivuj+BT6gooor4U/vo+X6KKK+6P4FCiiigD6gooor4U/vo+X6KKK+6P4FPqCiiivhT++j5fooor7o/gU+oKKKK+FP76Pl+iiivuj+BT6gooor4U/vo+X6KKK+6P4FPqCiiivhT++j5fooor7o/gU+oKKKK+FP76Pl+iiivuj+BT6gooor4U/vo+X6KKK+6P4FPqCiiivhT++gooooA+X6KKK+6P4FPqCiiivhT++j5fooor7o/gU+oKKKK+FP76Pl+iiivuj+BT6gooor4U/vo+X6KKK+6P4FPqCiiivhT++j5fooor7o/gU+oKKKK+FP76Pl+iiivuj+BT6gooor4U/vo+X6KKK+6P4FPqCiiivhT++j5fooor7o/gUKKKKAPqCiiivhT++j5fooor7o/gU+oKKKK+FP76Pl+iiivuj+BT6gooor4U/vo+X6KKK+6P4FPqCiiivhT++j5fooor7o/gU+oKKKK+FP76Pl+iiivuj+BT6gooor4U/vo+X6KKK+6P4FPqCiiivhT++j5fooor7o/gU+oKKKK+FP76CiiigD5fooor7o/gU+oKKKK+FP76Pl+iiivuj+BT6gooor4U/vo+X6KKK+6P4FPqCiiivhT++j5fooor7o/gU+oKKKK+FP76Pl+iiivuj+BT6gooor4U/vo+X6KKK+6P4FPqCiiivhT++j5fooor7o/gU+oKKKK+FP76Pl+iiivuj+BQooooA+oKKKK+FP76Pl+iiivuj+BT6gooor4U/vo+X6KKK+6P4FPqCiiivhT++j5fooor7o/gU+oKKKK+FP76Pl+iiivuj+BT6gooor4U/vo+X6KKK+6P4FPqCiiivhT++j5fooor7o/gU+oKKKK+FP76Pl+iiivuj+BT6gooor4U/voKKKKAPl+iiivuj+BT6gooor4U/vo+X6KKK+6P4FPqCiiivhT++j5fooor7o/gU+oKKKK+FP76Pl+iiivuj+BT6gooor4U/vo+X6KKK+6P4FPqCiiivhT++j5fooor7o/gU+oKKKK+FP76Pl+iiivuj+BT6gooor4U/vo+X6KKK+6P4FCiiigD6gooor4U/vo+X6KKK+6P4FPqCiiivhT++j5fooor7o/gU+oKKKK+FP76Pl+iiivuj+BT6gooor4U/vo+X6KKK+6P4FPqCiiivhT++j5fooor7o/gU+oKKKK+FP76Pl+iiivuj+BT6gooor4U/vo+X6KKK+6P4FPqCiiivhT++gooooA+X6KKK+6P4FPqCiiivhT++j5fooor7o/gU+oKKKK+FP76Pl+iiivuj+BT6gooor4U/vo+X6KKK+6P4FPqCiiivhT++j5fooor7o/gU+oKKKK+FP76Pl+iiivuj+BT6gooor4U/vo+X6KKK+6P4FPqCiiivhT++j5fooor7o/gUKKKKAPqCiiivhT++j5fooor7o/gU+oKKKK+FP76Pl+iiivuj+BT6gooor4U/vo+X6KKK+6P4FPqCiiivhT++j5fooor7o/gU+oKKKK+FP76Pl+iiivuj+BT6gooor4U/vo+X6KKK+6P4FPqCiiivhT++j5fooor7o/gU+oKKKK+FP76CiiigD5fooor7o/gU+oKKKK+FP76Pl+iiivuj+BT6gooor4U/vo+X6KKK+6P4FPqCiiivhT++j5fooor7o/gU+oKKKK+FP76Pl+iiivuj+BT6gooor4U/vo+X6KKK+6P4FPqCiiivhT++j5fooor7o/gU+oKKKK+FP76Pl+iiivuj+BQooooA+oKKKK+FP76Pl+iiivuj+BT6gooor4U/vo+X6KKK+6P4FPqCiiivhT++j5fooor7o/gU+oKKKK+FP76Pl+iiivuj+BT6gooor4U/vo+X6KKK+6P4FPqCiiivhT++j5fooor7o/gU+oKKKK+FP76Pl+iiivuj+BT6gooor4U/voKKKKAPl+iiivuj+BT6gooor4U/vo+X6KKK+6P4FPqCiiivhT++j5fooor7o/gU+oKKKK+FP76Pl+iiivuj+BT6gooor4U/vo+X6KKK+6P4FPqCiiivhT++j5fooor7o/gU+oKKKK+FP76Pl+iiivuj+BT6gooor4U/vo+X6KKK+6P4FCiiigD6gooor4U/vo+X6KKK+6P4FPqCiiivhT++j5fooor7o/gU+oKKKK+FP76Pl+iiivuj+BT6gooor4U/vo+X6KKK+6P4FPqCiiivhT++j5fooor7o/gU+oKKKK+FP76Pl+iiivuj+BT6gooor4U/vo+X6KKK+6P4FPqCiiivhT++gooooA+X6KKK+6P4FPqCiiivhT++j5fooor7o/gU+oKKKK+FP76Pl+iiivuj+BT6gooor4U/vo+X6KKK+6P4FPqCiiivhT++j5fooor7o/gU+oKKKK+FP76Pl+iiivuj+BT6gooor4U/vo+X6KKK+6P4FPqCiiivhT++j5fooor7o/gUKKKKAPqCiiivhT++j5fooor7o/gU+oKKKK+FP76Pl+iiivuj+BT6gooor4U/vo+X6KKK+6P4FPqCiiivhT++j5fooor7o/gU+oKKKK+FP76Pl+iiivuj+BT6gooor4U/vo+X6KKK+6P4FPqCiiivhT++j5fooor7o/gU+oKKKK+FP76P//Z"; class xc { /** * Instantiates a Smart Array. * @param capacity defines the default capacity of the array. */ constructor(e) { this.length = 0, this.data = new Array(e), this._id = xc._GlobalId++; } /** * Pushes a value at the end of the active data. * @param value defines the object to push in the array. */ push(e) { this.data[this.length++] = e, this.length > this.data.length && (this.data.length *= 2); } /** * Iterates over the active data and apply the lambda to them. * @param func defines the action to apply on each value. */ forEach(e) { for (let t = 0; t < this.length; t++) e(this.data[t]); } /** * Sorts the full sets of data. * @param compareFn defines the comparison function to apply. */ sort(e) { this.data.sort(e); } /** * Resets the active data to an empty array. */ reset() { this.length = 0; } /** * Releases all the data from the array as well as the array. */ dispose() { this.reset(), this.data && (this.data.length = 0); } /** * Concats the active data with a given array. * @param array defines the data to concatenate with. */ concat(e) { if (e.length !== 0) { this.length + e.length > this.data.length && (this.data.length = (this.length + e.length) * 2); for (let t = 0; t < e.length; t++) this.data[this.length++] = (e.data || e)[t]; } } /** * Returns the position of a value in the active data. * @param value defines the value to find the index for * @returns the index if found in the active data otherwise -1 */ indexOf(e) { const t = this.data.indexOf(e); return t >= this.length ? -1 : t; } /** * Returns whether an element is part of the active data. * @param value defines the value to look for * @returns true if found in the active data otherwise false */ contains(e) { return this.indexOf(e) !== -1; } } xc._GlobalId = 0; class XE extends xc { constructor() { super(...arguments), this._duplicateId = 0; } /** * Pushes a value at the end of the active data. * THIS DOES NOT PREVENT DUPPLICATE DATA * @param value defines the object to push in the array. */ push(e) { super.push(e), e.__smartArrayFlags || (e.__smartArrayFlags = {}), e.__smartArrayFlags[this._id] = this._duplicateId; } /** * Pushes a value at the end of the active data. * If the data is already present, it won t be added again * @param value defines the object to push in the array. * @returns true if added false if it was already present */ pushNoDuplicate(e) { return e.__smartArrayFlags && e.__smartArrayFlags[this._id] === this._duplicateId ? !1 : (this.push(e), !0); } /** * Resets the active data to an empty array. */ reset() { super.reset(), this._duplicateId++; } /** * Concats the active data with a given array. * This ensures no duplicate will be present in the result. * @param array defines the data to concatenate with. */ concatWithNoDuplicate(e) { if (e.length !== 0) { this.length + e.length > this.data.length && (this.data.length = (this.length + e.length) * 2); for (let t = 0; t < e.length; t++) { const i = (e.data || e)[t]; this.pushNoDuplicate(i); } } } } class iB { constructor() { this._count = 0, this._data = {}; } /** * This will clear this dictionary and copy the content from the 'source' one. * If the T value is a custom object, it won't be copied/cloned, the same object will be used * @param source the dictionary to take the content from and copy to this dictionary */ copyFrom(e) { this.clear(), e.forEach((t, i) => this.add(t, i)); } /** * Get a value based from its key * @param key the given key to get the matching value from * @returns the value if found, otherwise undefined is returned */ get(e) { const t = this._data[e]; if (t !== void 0) return t; } /** * Get a value from its key or add it if it doesn't exist. * This method will ensure you that a given key/data will be present in the dictionary. * @param key the given key to get the matching value from * @param factory the factory that will create the value if the key is not present in the dictionary. * The factory will only be invoked if there's no data for the given key. * @returns the value corresponding to the key. */ getOrAddWithFactory(e, t) { let i = this.get(e); return i !== void 0 || (i = t(e), i && this.add(e, i)), i; } /** * Get a value from its key if present in the dictionary otherwise add it * @param key the key to get the value from * @param val if there's no such key/value pair in the dictionary add it with this value * @returns the value corresponding to the key */ getOrAdd(e, t) { const i = this.get(e); return i !== void 0 ? i : (this.add(e, t), t); } /** * Check if there's a given key in the dictionary * @param key the key to check for * @returns true if the key is present, false otherwise */ contains(e) { return this._data[e] !== void 0; } /** * Add a new key and its corresponding value * @param key the key to add * @param value the value corresponding to the key * @returns true if the operation completed successfully, false if we couldn't insert the key/value because there was already this key in the dictionary */ add(e, t) { return this._data[e] !== void 0 ? !1 : (this._data[e] = t, ++this._count, !0); } /** * Update a specific value associated to a key * @param key defines the key to use * @param value defines the value to store * @returns true if the value was updated (or false if the key was not found) */ set(e, t) { return this._data[e] === void 0 ? !1 : (this._data[e] = t, !0); } /** * Get the element of the given key and remove it from the dictionary * @param key defines the key to search * @returns the value associated with the key or null if not found */ getAndRemove(e) { const t = this.get(e); return t !== void 0 ? (delete this._data[e], --this._count, t) : null; } /** * Remove a key/value from the dictionary. * @param key the key to remove * @returns true if the item was successfully deleted, false if no item with such key exist in the dictionary */ remove(e) { return this.contains(e) ? (delete this._data[e], --this._count, !0) : !1; } /** * Clear the whole content of the dictionary */ clear() { this._data = {}, this._count = 0; } /** * Gets the current count */ get count() { return this._count; } /** * Execute a callback on each key/val of the dictionary. * Note that you can remove any element in this dictionary in the callback implementation * @param callback the callback to execute on a given key/value pair */ forEach(e) { for (const t in this._data) { const i = this._data[t]; e(t, i); } } /** * Execute a callback on every occurrence of the dictionary until it returns a valid TRes object. * If the callback returns null or undefined the method will iterate to the next key/value pair * Note that you can remove any element in this dictionary in the callback implementation * @param callback the callback to execute, if it return a valid T instanced object the enumeration will stop and the object will be returned * @returns the first item */ first(e) { for (const t in this._data) { const i = this._data[t], r = e(t, i); if (r) return r; } return null; } } class sa { /** * Creates a new instance * @param externalProperties list of external properties to inject into the object */ constructor(e) { if (this._keys = [], this._isDirty = !0, this._areLightsDirty = !0, this._areLightsDisposed = !1, this._areAttributesDirty = !0, this._areTexturesDirty = !0, this._areFresnelDirty = !0, this._areMiscDirty = !0, this._arePrePassDirty = !0, this._areImageProcessingDirty = !0, this._normals = !1, this._uvs = !1, this._needNormals = !1, this._needUVs = !1, this._externalProperties = e, e) for (const t in e) Object.prototype.hasOwnProperty.call(e, t) && this._setDefaultValue(t); } /** * Specifies if the material needs to be re-calculated */ get isDirty() { return this._isDirty; } /** * Marks the material to indicate that it has been re-calculated */ markAsProcessed() { this._isDirty = !1, this._areAttributesDirty = !1, this._areTexturesDirty = !1, this._areFresnelDirty = !1, this._areLightsDirty = !1, this._areLightsDisposed = !1, this._areMiscDirty = !1, this._arePrePassDirty = !1, this._areImageProcessingDirty = !1; } /** * Marks the material to indicate that it needs to be re-calculated */ markAsUnprocessed() { this._isDirty = !0; } /** * Marks the material to indicate all of its defines need to be re-calculated */ markAllAsDirty() { this._areTexturesDirty = !0, this._areAttributesDirty = !0, this._areLightsDirty = !0, this._areFresnelDirty = !0, this._areMiscDirty = !0, this._arePrePassDirty = !1, this._areImageProcessingDirty = !0, this._isDirty = !0; } /** * Marks the material to indicate that image processing needs to be re-calculated */ markAsImageProcessingDirty() { this._areImageProcessingDirty = !0, this._isDirty = !0; } /** * Marks the material to indicate the lights need to be re-calculated * @param disposed Defines whether the light is dirty due to dispose or not */ markAsLightDirty(e = !1) { this._areLightsDirty = !0, this._areLightsDisposed = this._areLightsDisposed || e, this._isDirty = !0; } /** * Marks the attribute state as changed */ markAsAttributesDirty() { this._areAttributesDirty = !0, this._isDirty = !0; } /** * Marks the texture state as changed */ markAsTexturesDirty() { this._areTexturesDirty = !0, this._isDirty = !0; } /** * Marks the fresnel state as changed */ markAsFresnelDirty() { this._areFresnelDirty = !0, this._isDirty = !0; } /** * Marks the misc state as changed */ markAsMiscDirty() { this._areMiscDirty = !0, this._isDirty = !0; } /** * Marks the prepass state as changed */ markAsPrePassDirty() { this._arePrePassDirty = !0, this._isDirty = !0; } /** * Rebuilds the material defines */ rebuild() { this._keys.length = 0; for (const e of Object.keys(this)) e[0] !== "_" && this._keys.push(e); if (this._externalProperties) for (const e in this._externalProperties) this._keys.indexOf(e) === -1 && this._keys.push(e); } /** * Specifies if two material defines are equal * @param other - A material define instance to compare to * @returns - Boolean indicating if the material defines are equal (true) or not (false) */ isEqual(e) { if (this._keys.length !== e._keys.length) return !1; for (let t = 0; t < this._keys.length; t++) { const i = this._keys[t]; if (this[i] !== e[i]) return !1; } return !0; } /** * Clones this instance's defines to another instance * @param other - material defines to clone values to */ cloneTo(e) { this._keys.length !== e._keys.length && (e._keys = this._keys.slice(0)); for (let t = 0; t < this._keys.length; t++) { const i = this._keys[t]; e[i] = this[i]; } } /** * Resets the material define values */ reset() { this._keys.forEach((e) => this._setDefaultValue(e)); } _setDefaultValue(e) { var t, i, r, s, n; const a = (r = (i = (t = this._externalProperties) === null || t === void 0 ? void 0 : t[e]) === null || i === void 0 ? void 0 : i.type) !== null && r !== void 0 ? r : typeof this[e], l = (n = (s = this._externalProperties) === null || s === void 0 ? void 0 : s[e]) === null || n === void 0 ? void 0 : n.default; switch (a) { case "number": this[e] = l ?? 0; break; case "string": this[e] = l ?? ""; break; default: this[e] = l ?? !1; break; } } /** * Converts the material define values to a string * @returns - String of material define information */ toString() { let e = ""; for (let t = 0; t < this._keys.length; t++) { const i = this._keys[t], r = this[i]; switch (typeof r) { case "number": case "string": e += "#define " + i + " " + r + ` `; break; default: r && (e += "#define " + i + ` `); break; } } return e; } } class so { constructor() { this._dirty = !0, this._tempColor = new Et(0, 0, 0, 0), this._globalCurve = new Et(0, 0, 0, 0), this._highlightsCurve = new Et(0, 0, 0, 0), this._midtonesCurve = new Et(0, 0, 0, 0), this._shadowsCurve = new Et(0, 0, 0, 0), this._positiveCurve = new Et(0, 0, 0, 0), this._negativeCurve = new Et(0, 0, 0, 0), this._globalHue = 30, this._globalDensity = 0, this._globalSaturation = 0, this._globalExposure = 0, this._highlightsHue = 30, this._highlightsDensity = 0, this._highlightsSaturation = 0, this._highlightsExposure = 0, this._midtonesHue = 30, this._midtonesDensity = 0, this._midtonesSaturation = 0, this._midtonesExposure = 0, this._shadowsHue = 30, this._shadowsDensity = 0, this._shadowsSaturation = 0, this._shadowsExposure = 0; } /** * Gets the global Hue value. * The hue value is a standard HSB hue in the range [0,360] where 0=red, 120=green and 240=blue. The default value is 30 degrees (orange). */ get globalHue() { return this._globalHue; } /** * Sets the global Hue value. * The hue value is a standard HSB hue in the range [0,360] where 0=red, 120=green and 240=blue. The default value is 30 degrees (orange). */ set globalHue(e) { this._globalHue = e, this._dirty = !0; } /** * Gets the global Density value. * The density value is in range [-100,+100] where 0 means the color filter has no effect and +100 means the color filter has maximum effect. * Values less than zero provide a filter of opposite hue. */ get globalDensity() { return this._globalDensity; } /** * Sets the global Density value. * The density value is in range [-100,+100] where 0 means the color filter has no effect and +100 means the color filter has maximum effect. * Values less than zero provide a filter of opposite hue. */ set globalDensity(e) { this._globalDensity = e, this._dirty = !0; } /** * Gets the global Saturation value. * This is an adjustment value in the range [-100,+100], where the default value of 0.0 makes no adjustment, positive values increase saturation and negative values decrease saturation. */ get globalSaturation() { return this._globalSaturation; } /** * Sets the global Saturation value. * This is an adjustment value in the range [-100,+100], where the default value of 0.0 makes no adjustment, positive values increase saturation and negative values decrease saturation. */ set globalSaturation(e) { this._globalSaturation = e, this._dirty = !0; } /** * Gets the global Exposure value. * This is an adjustment value in the range [-100,+100], where the default value of 0.0 makes no adjustment, positive values increase exposure and negative values decrease exposure. */ get globalExposure() { return this._globalExposure; } /** * Sets the global Exposure value. * This is an adjustment value in the range [-100,+100], where the default value of 0.0 makes no adjustment, positive values increase exposure and negative values decrease exposure. */ set globalExposure(e) { this._globalExposure = e, this._dirty = !0; } /** * Gets the highlights Hue value. * The hue value is a standard HSB hue in the range [0,360] where 0=red, 120=green and 240=blue. The default value is 30 degrees (orange). */ get highlightsHue() { return this._highlightsHue; } /** * Sets the highlights Hue value. * The hue value is a standard HSB hue in the range [0,360] where 0=red, 120=green and 240=blue. The default value is 30 degrees (orange). */ set highlightsHue(e) { this._highlightsHue = e, this._dirty = !0; } /** * Gets the highlights Density value. * The density value is in range [-100,+100] where 0 means the color filter has no effect and +100 means the color filter has maximum effect. * Values less than zero provide a filter of opposite hue. */ get highlightsDensity() { return this._highlightsDensity; } /** * Sets the highlights Density value. * The density value is in range [-100,+100] where 0 means the color filter has no effect and +100 means the color filter has maximum effect. * Values less than zero provide a filter of opposite hue. */ set highlightsDensity(e) { this._highlightsDensity = e, this._dirty = !0; } /** * Gets the highlights Saturation value. * This is an adjustment value in the range [-100,+100], where the default value of 0.0 makes no adjustment, positive values increase saturation and negative values decrease saturation. */ get highlightsSaturation() { return this._highlightsSaturation; } /** * Sets the highlights Saturation value. * This is an adjustment value in the range [-100,+100], where the default value of 0.0 makes no adjustment, positive values increase saturation and negative values decrease saturation. */ set highlightsSaturation(e) { this._highlightsSaturation = e, this._dirty = !0; } /** * Gets the highlights Exposure value. * This is an adjustment value in the range [-100,+100], where the default value of 0.0 makes no adjustment, positive values increase exposure and negative values decrease exposure. */ get highlightsExposure() { return this._highlightsExposure; } /** * Sets the highlights Exposure value. * This is an adjustment value in the range [-100,+100], where the default value of 0.0 makes no adjustment, positive values increase exposure and negative values decrease exposure. */ set highlightsExposure(e) { this._highlightsExposure = e, this._dirty = !0; } /** * Gets the midtones Hue value. * The hue value is a standard HSB hue in the range [0,360] where 0=red, 120=green and 240=blue. The default value is 30 degrees (orange). */ get midtonesHue() { return this._midtonesHue; } /** * Sets the midtones Hue value. * The hue value is a standard HSB hue in the range [0,360] where 0=red, 120=green and 240=blue. The default value is 30 degrees (orange). */ set midtonesHue(e) { this._midtonesHue = e, this._dirty = !0; } /** * Gets the midtones Density value. * The density value is in range [-100,+100] where 0 means the color filter has no effect and +100 means the color filter has maximum effect. * Values less than zero provide a filter of opposite hue. */ get midtonesDensity() { return this._midtonesDensity; } /** * Sets the midtones Density value. * The density value is in range [-100,+100] where 0 means the color filter has no effect and +100 means the color filter has maximum effect. * Values less than zero provide a filter of opposite hue. */ set midtonesDensity(e) { this._midtonesDensity = e, this._dirty = !0; } /** * Gets the midtones Saturation value. * This is an adjustment value in the range [-100,+100], where the default value of 0.0 makes no adjustment, positive values increase saturation and negative values decrease saturation. */ get midtonesSaturation() { return this._midtonesSaturation; } /** * Sets the midtones Saturation value. * This is an adjustment value in the range [-100,+100], where the default value of 0.0 makes no adjustment, positive values increase saturation and negative values decrease saturation. */ set midtonesSaturation(e) { this._midtonesSaturation = e, this._dirty = !0; } /** * Gets the midtones Exposure value. * This is an adjustment value in the range [-100,+100], where the default value of 0.0 makes no adjustment, positive values increase exposure and negative values decrease exposure. */ get midtonesExposure() { return this._midtonesExposure; } /** * Sets the midtones Exposure value. * This is an adjustment value in the range [-100,+100], where the default value of 0.0 makes no adjustment, positive values increase exposure and negative values decrease exposure. */ set midtonesExposure(e) { this._midtonesExposure = e, this._dirty = !0; } /** * Gets the shadows Hue value. * The hue value is a standard HSB hue in the range [0,360] where 0=red, 120=green and 240=blue. The default value is 30 degrees (orange). */ get shadowsHue() { return this._shadowsHue; } /** * Sets the shadows Hue value. * The hue value is a standard HSB hue in the range [0,360] where 0=red, 120=green and 240=blue. The default value is 30 degrees (orange). */ set shadowsHue(e) { this._shadowsHue = e, this._dirty = !0; } /** * Gets the shadows Density value. * The density value is in range [-100,+100] where 0 means the color filter has no effect and +100 means the color filter has maximum effect. * Values less than zero provide a filter of opposite hue. */ get shadowsDensity() { return this._shadowsDensity; } /** * Sets the shadows Density value. * The density value is in range [-100,+100] where 0 means the color filter has no effect and +100 means the color filter has maximum effect. * Values less than zero provide a filter of opposite hue. */ set shadowsDensity(e) { this._shadowsDensity = e, this._dirty = !0; } /** * Gets the shadows Saturation value. * This is an adjustment value in the range [-100,+100], where the default value of 0.0 makes no adjustment, positive values increase saturation and negative values decrease saturation. */ get shadowsSaturation() { return this._shadowsSaturation; } /** * Sets the shadows Saturation value. * This is an adjustment value in the range [-100,+100], where the default value of 0.0 makes no adjustment, positive values increase saturation and negative values decrease saturation. */ set shadowsSaturation(e) { this._shadowsSaturation = e, this._dirty = !0; } /** * Gets the shadows Exposure value. * This is an adjustment value in the range [-100,+100], where the default value of 0.0 makes no adjustment, positive values increase exposure and negative values decrease exposure. */ get shadowsExposure() { return this._shadowsExposure; } /** * Sets the shadows Exposure value. * This is an adjustment value in the range [-100,+100], where the default value of 0.0 makes no adjustment, positive values increase exposure and negative values decrease exposure. */ set shadowsExposure(e) { this._shadowsExposure = e, this._dirty = !0; } /** * Returns the class name * @returns The class name */ getClassName() { return "ColorCurves"; } /** * Binds the color curves to the shader. * @param colorCurves The color curve to bind * @param effect The effect to bind to * @param positiveUniform The positive uniform shader parameter * @param neutralUniform The neutral uniform shader parameter * @param negativeUniform The negative uniform shader parameter */ static Bind(e, t, i = "vCameraColorCurvePositive", r = "vCameraColorCurveNeutral", s = "vCameraColorCurveNegative") { e._dirty && (e._dirty = !1, e._getColorGradingDataToRef(e._globalHue, e._globalDensity, e._globalSaturation, e._globalExposure, e._globalCurve), e._getColorGradingDataToRef(e._highlightsHue, e._highlightsDensity, e._highlightsSaturation, e._highlightsExposure, e._tempColor), e._tempColor.multiplyToRef(e._globalCurve, e._highlightsCurve), e._getColorGradingDataToRef(e._midtonesHue, e._midtonesDensity, e._midtonesSaturation, e._midtonesExposure, e._tempColor), e._tempColor.multiplyToRef(e._globalCurve, e._midtonesCurve), e._getColorGradingDataToRef(e._shadowsHue, e._shadowsDensity, e._shadowsSaturation, e._shadowsExposure, e._tempColor), e._tempColor.multiplyToRef(e._globalCurve, e._shadowsCurve), e._highlightsCurve.subtractToRef(e._midtonesCurve, e._positiveCurve), e._midtonesCurve.subtractToRef(e._shadowsCurve, e._negativeCurve)), t && (t.setFloat4(i, e._positiveCurve.r, e._positiveCurve.g, e._positiveCurve.b, e._positiveCurve.a), t.setFloat4(r, e._midtonesCurve.r, e._midtonesCurve.g, e._midtonesCurve.b, e._midtonesCurve.a), t.setFloat4(s, e._negativeCurve.r, e._negativeCurve.g, e._negativeCurve.b, e._negativeCurve.a)); } /** * Prepare the list of uniforms associated with the ColorCurves effects. * @param uniformsList The list of uniforms used in the effect */ static PrepareUniforms(e) { e.push("vCameraColorCurveNeutral", "vCameraColorCurvePositive", "vCameraColorCurveNegative"); } /** * Returns color grading data based on a hue, density, saturation and exposure value. * @param hue * @param density * @param saturation The saturation. * @param exposure The exposure. * @param result The result data container. */ _getColorGradingDataToRef(e, t, i, r, s) { e != null && (e = so._Clamp(e, 0, 360), t = so._Clamp(t, -100, 100), i = so._Clamp(i, -100, 100), r = so._Clamp(r, -100, 100), t = so._ApplyColorGradingSliderNonlinear(t), t *= 0.5, r = so._ApplyColorGradingSliderNonlinear(r), t < 0 && (t *= -1, e = (e + 180) % 360), so._FromHSBToRef(e, t, 50 + 0.25 * r, s), s.scaleToRef(2, s), s.a = 1 + 0.01 * i); } /** * Takes an input slider value and returns an adjusted value that provides extra control near the centre. * @param value The input slider value in range [-100,100]. * @returns Adjusted value. */ static _ApplyColorGradingSliderNonlinear(e) { e /= 100; let t = Math.abs(e); return t = Math.pow(t, 2), e < 0 && (t *= -1), t *= 100, t; } /** * Returns an RGBA Color4 based on Hue, Saturation and Brightness (also referred to as value, HSV). * @param hue The hue (H) input. * @param saturation The saturation (S) input. * @param brightness The brightness (B) input. * @param result * @result An RGBA color represented as Vector4. */ static _FromHSBToRef(e, t, i, r) { let s = so._Clamp(e, 0, 360); const n = so._Clamp(t / 100, 0, 1), a = so._Clamp(i / 100, 0, 1); if (n === 0) r.r = a, r.g = a, r.b = a; else { s /= 60; const l = Math.floor(s), o = s - l, u = a * (1 - n), h = a * (1 - n * o), d = a * (1 - n * (1 - o)); switch (l) { case 0: r.r = a, r.g = d, r.b = u; break; case 1: r.r = h, r.g = a, r.b = u; break; case 2: r.r = u, r.g = a, r.b = d; break; case 3: r.r = u, r.g = h, r.b = a; break; case 4: r.r = d, r.g = u, r.b = a; break; default: r.r = a, r.g = u, r.b = h; break; } } r.a = 1; } /** * Returns a value clamped between min and max * @param value The value to clamp * @param min The minimum of value * @param max The maximum of value * @returns The clamped value. */ static _Clamp(e, t, i) { return Math.min(Math.max(e, t), i); } /** * Clones the current color curve instance. * @returns The cloned curves */ clone() { return St.Clone(() => new so(), this); } /** * Serializes the current color curve instance to a json representation. * @returns a JSON representation */ serialize() { return St.Serialize(this); } /** * Parses the color curve from a json representation. * @param source the JSON source to parse * @returns The parsed curves */ static Parse(e) { return St.Parse(() => new so(), e, null, null); } } F([ W() ], so.prototype, "_globalHue", void 0); F([ W() ], so.prototype, "_globalDensity", void 0); F([ W() ], so.prototype, "_globalSaturation", void 0); F([ W() ], so.prototype, "_globalExposure", void 0); F([ W() ], so.prototype, "_highlightsHue", void 0); F([ W() ], so.prototype, "_highlightsDensity", void 0); F([ W() ], so.prototype, "_highlightsSaturation", void 0); F([ W() ], so.prototype, "_highlightsExposure", void 0); F([ W() ], so.prototype, "_midtonesHue", void 0); F([ W() ], so.prototype, "_midtonesDensity", void 0); F([ W() ], so.prototype, "_midtonesSaturation", void 0); F([ W() ], so.prototype, "_midtonesExposure", void 0); St._ColorCurvesParser = so.Parse; class fte extends sa { constructor() { super(), this.IMAGEPROCESSING = !1, this.VIGNETTE = !1, this.VIGNETTEBLENDMODEMULTIPLY = !1, this.VIGNETTEBLENDMODEOPAQUE = !1, this.TONEMAPPING = !1, this.TONEMAPPING_ACES = !1, this.CONTRAST = !1, this.COLORCURVES = !1, this.COLORGRADING = !1, this.COLORGRADING3D = !1, this.SAMPLER3DGREENDEPTH = !1, this.SAMPLER3DBGRMAP = !1, this.DITHER = !1, this.IMAGEPROCESSINGPOSTPROCESS = !1, this.EXPOSURE = !1, this.SKIPFINALCOLORCLAMP = !1, this.rebuild(); } } class Ds { constructor() { this.colorCurves = new so(), this._colorCurvesEnabled = !1, this._colorGradingEnabled = !1, this._colorGradingWithGreenDepth = !0, this._colorGradingBGR = !0, this._exposure = 1, this._toneMappingEnabled = !1, this._toneMappingType = Ds.TONEMAPPING_STANDARD, this._contrast = 1, this.vignetteStretch = 0, this.vignetteCenterX = 0, this.vignetteCenterY = 0, this.vignetteWeight = 1.5, this.vignetteColor = new Et(0, 0, 0, 0), this.vignetteCameraFov = 0.5, this._vignetteBlendMode = Ds.VIGNETTEMODE_MULTIPLY, this._vignetteEnabled = !1, this._ditheringEnabled = !1, this._ditheringIntensity = 1 / 255, this._skipFinalColorClamp = !1, this._applyByPostProcess = !1, this._isEnabled = !0, this.onUpdateParameters = new Fe(); } /** * Gets whether the color curves effect is enabled. */ get colorCurvesEnabled() { return this._colorCurvesEnabled; } /** * Sets whether the color curves effect is enabled. */ set colorCurvesEnabled(e) { this._colorCurvesEnabled !== e && (this._colorCurvesEnabled = e, this._updateParameters()); } /** * Color grading LUT texture used in the effect if colorGradingEnabled is set to true */ get colorGradingTexture() { return this._colorGradingTexture; } /** * Color grading LUT texture used in the effect if colorGradingEnabled is set to true */ set colorGradingTexture(e) { this._colorGradingTexture !== e && (this._colorGradingTexture = e, this._updateParameters()); } /** * Gets whether the color grading effect is enabled. */ get colorGradingEnabled() { return this._colorGradingEnabled; } /** * Sets whether the color grading effect is enabled. */ set colorGradingEnabled(e) { this._colorGradingEnabled !== e && (this._colorGradingEnabled = e, this._updateParameters()); } /** * Gets whether the color grading effect is using a green depth for the 3d Texture. */ get colorGradingWithGreenDepth() { return this._colorGradingWithGreenDepth; } /** * Sets whether the color grading effect is using a green depth for the 3d Texture. */ set colorGradingWithGreenDepth(e) { this._colorGradingWithGreenDepth !== e && (this._colorGradingWithGreenDepth = e, this._updateParameters()); } /** * Gets whether the color grading texture contains BGR values. */ get colorGradingBGR() { return this._colorGradingBGR; } /** * Sets whether the color grading texture contains BGR values. */ set colorGradingBGR(e) { this._colorGradingBGR !== e && (this._colorGradingBGR = e, this._updateParameters()); } /** * Gets the Exposure used in the effect. */ get exposure() { return this._exposure; } /** * Sets the Exposure used in the effect. */ set exposure(e) { this._exposure !== e && (this._exposure = e, this._updateParameters()); } /** * Gets whether the tone mapping effect is enabled. */ get toneMappingEnabled() { return this._toneMappingEnabled; } /** * Sets whether the tone mapping effect is enabled. */ set toneMappingEnabled(e) { this._toneMappingEnabled !== e && (this._toneMappingEnabled = e, this._updateParameters()); } /** * Gets the type of tone mapping effect. */ get toneMappingType() { return this._toneMappingType; } /** * Sets the type of tone mapping effect used in BabylonJS. */ set toneMappingType(e) { this._toneMappingType !== e && (this._toneMappingType = e, this._updateParameters()); } /** * Gets the contrast used in the effect. */ get contrast() { return this._contrast; } /** * Sets the contrast used in the effect. */ set contrast(e) { this._contrast !== e && (this._contrast = e, this._updateParameters()); } /** * Back Compat: Vignette center Y Offset. * @deprecated use vignetteCenterY instead */ get vignetteCentreY() { return this.vignetteCenterY; } set vignetteCentreY(e) { this.vignetteCenterY = e; } /** * Back Compat: Vignette center X Offset. * @deprecated use vignetteCenterX instead */ get vignetteCentreX() { return this.vignetteCenterX; } set vignetteCentreX(e) { this.vignetteCenterX = e; } /** * Gets the vignette blend mode allowing different kind of effect. */ get vignetteBlendMode() { return this._vignetteBlendMode; } /** * Sets the vignette blend mode allowing different kind of effect. */ set vignetteBlendMode(e) { this._vignetteBlendMode !== e && (this._vignetteBlendMode = e, this._updateParameters()); } /** * Gets whether the vignette effect is enabled. */ get vignetteEnabled() { return this._vignetteEnabled; } /** * Sets whether the vignette effect is enabled. */ set vignetteEnabled(e) { this._vignetteEnabled !== e && (this._vignetteEnabled = e, this._updateParameters()); } /** * Gets whether the dithering effect is enabled. * The dithering effect can be used to reduce banding. */ get ditheringEnabled() { return this._ditheringEnabled; } /** * Sets whether the dithering effect is enabled. * The dithering effect can be used to reduce banding. */ set ditheringEnabled(e) { this._ditheringEnabled !== e && (this._ditheringEnabled = e, this._updateParameters()); } /** * Gets the dithering intensity. 0 is no dithering. Default is 1.0 / 255.0. */ get ditheringIntensity() { return this._ditheringIntensity; } /** * Sets the dithering intensity. 0 is no dithering. Default is 1.0 / 255.0. */ set ditheringIntensity(e) { this._ditheringIntensity !== e && (this._ditheringIntensity = e, this._updateParameters()); } /** * If apply by post process is set to true, setting this to true will skip the final color clamp step in the fragment shader * Applies to PBR materials. */ get skipFinalColorClamp() { return this._skipFinalColorClamp; } /** * If apply by post process is set to true, setting this to true will skip the final color clamp step in the fragment shader * Applies to PBR materials. */ set skipFinalColorClamp(e) { this._skipFinalColorClamp !== e && (this._skipFinalColorClamp = e, this._updateParameters()); } /** * Gets whether the image processing is applied through a post process or not. */ get applyByPostProcess() { return this._applyByPostProcess; } /** * Sets whether the image processing is applied through a post process or not. */ set applyByPostProcess(e) { this._applyByPostProcess !== e && (this._applyByPostProcess = e, this._updateParameters()); } /** * Gets whether the image processing is enabled or not. */ get isEnabled() { return this._isEnabled; } /** * Sets whether the image processing is enabled or not. */ set isEnabled(e) { this._isEnabled !== e && (this._isEnabled = e, this._updateParameters()); } /** * Method called each time the image processing information changes requires to recompile the effect. */ _updateParameters() { this.onUpdateParameters.notifyObservers(this); } /** * Gets the current class name. * @returns "ImageProcessingConfiguration" */ getClassName() { return "ImageProcessingConfiguration"; } /** * Prepare the list of uniforms associated with the Image Processing effects. * @param uniforms The list of uniforms used in the effect * @param defines the list of defines currently in use */ static PrepareUniforms(e, t) { t.EXPOSURE && e.push("exposureLinear"), t.CONTRAST && e.push("contrast"), t.COLORGRADING && e.push("colorTransformSettings"), (t.VIGNETTE || t.DITHER) && e.push("vInverseScreenSize"), t.VIGNETTE && (e.push("vignetteSettings1"), e.push("vignetteSettings2")), t.COLORCURVES && so.PrepareUniforms(e), t.DITHER && e.push("ditherIntensity"); } /** * Prepare the list of samplers associated with the Image Processing effects. * @param samplersList The list of uniforms used in the effect * @param defines the list of defines currently in use */ static PrepareSamplers(e, t) { t.COLORGRADING && e.push("txColorTransform"); } /** * Prepare the list of defines associated to the shader. * @param defines the list of defines to complete * @param forPostProcess Define if we are currently in post process mode or not */ prepareDefines(e, t = !1) { if (t !== this.applyByPostProcess || !this._isEnabled) { e.VIGNETTE = !1, e.TONEMAPPING = !1, e.TONEMAPPING_ACES = !1, e.CONTRAST = !1, e.EXPOSURE = !1, e.COLORCURVES = !1, e.COLORGRADING = !1, e.COLORGRADING3D = !1, e.DITHER = !1, e.IMAGEPROCESSING = !1, e.SKIPFINALCOLORCLAMP = this.skipFinalColorClamp, e.IMAGEPROCESSINGPOSTPROCESS = this.applyByPostProcess && this._isEnabled; return; } switch (e.VIGNETTE = this.vignetteEnabled, e.VIGNETTEBLENDMODEMULTIPLY = this.vignetteBlendMode === Ds._VIGNETTEMODE_MULTIPLY, e.VIGNETTEBLENDMODEOPAQUE = !e.VIGNETTEBLENDMODEMULTIPLY, e.TONEMAPPING = this.toneMappingEnabled, this._toneMappingType) { case Ds.TONEMAPPING_ACES: e.TONEMAPPING_ACES = !0; break; default: e.TONEMAPPING_ACES = !1; break; } e.CONTRAST = this.contrast !== 1, e.EXPOSURE = this.exposure !== 1, e.COLORCURVES = this.colorCurvesEnabled && !!this.colorCurves, e.COLORGRADING = this.colorGradingEnabled && !!this.colorGradingTexture, e.COLORGRADING ? e.COLORGRADING3D = this.colorGradingTexture.is3D : e.COLORGRADING3D = !1, e.SAMPLER3DGREENDEPTH = this.colorGradingWithGreenDepth, e.SAMPLER3DBGRMAP = this.colorGradingBGR, e.DITHER = this._ditheringEnabled, e.IMAGEPROCESSINGPOSTPROCESS = this.applyByPostProcess, e.SKIPFINALCOLORCLAMP = this.skipFinalColorClamp, e.IMAGEPROCESSING = e.VIGNETTE || e.TONEMAPPING || e.CONTRAST || e.EXPOSURE || e.COLORCURVES || e.COLORGRADING || e.DITHER; } /** * Returns true if all the image processing information are ready. * @returns True if ready, otherwise, false */ isReady() { return !this.colorGradingEnabled || !this.colorGradingTexture || this.colorGradingTexture.isReady(); } /** * Binds the image processing to the shader. * @param effect The effect to bind to * @param overrideAspectRatio Override the aspect ratio of the effect */ bind(e, t) { if (this._colorCurvesEnabled && this.colorCurves && so.Bind(this.colorCurves, e), this._vignetteEnabled || this._ditheringEnabled) { const i = 1 / e.getEngine().getRenderWidth(), r = 1 / e.getEngine().getRenderHeight(); if (e.setFloat2("vInverseScreenSize", i, r), this._ditheringEnabled && e.setFloat("ditherIntensity", 0.5 * this._ditheringIntensity), this._vignetteEnabled) { const s = t ?? r / i; let n = Math.tan(this.vignetteCameraFov * 0.5), a = n * s; const l = Math.sqrt(a * n); a = Ve.Mix(a, l, this.vignetteStretch), n = Ve.Mix(n, l, this.vignetteStretch), e.setFloat4("vignetteSettings1", a, n, -a * this.vignetteCenterX, -n * this.vignetteCenterY); const o = -2 * this.vignetteWeight; e.setFloat4("vignetteSettings2", this.vignetteColor.r, this.vignetteColor.g, this.vignetteColor.b, o); } } if (e.setFloat("exposureLinear", this.exposure), e.setFloat("contrast", this.contrast), this.colorGradingTexture) { e.setTexture("txColorTransform", this.colorGradingTexture); const i = this.colorGradingTexture.getSize().height; e.setFloat4( "colorTransformSettings", (i - 1) / i, // textureScale 0.5 / i, // textureOffset i, // textureSize this.colorGradingTexture.level // weight ); } } /** * Clones the current image processing instance. * @returns The cloned image processing */ clone() { return St.Clone(() => new Ds(), this); } /** * Serializes the current image processing instance to a json representation. * @returns a JSON representation */ serialize() { return St.Serialize(this); } /** * Parses the image processing from a json representation. * @param source the JSON source to parse * @returns The parsed image processing */ static Parse(e) { const t = St.Parse(() => new Ds(), e, null, null); return e.vignetteCentreX !== void 0 && (t.vignetteCenterX = e.vignetteCentreX), e.vignetteCentreY !== void 0 && (t.vignetteCenterY = e.vignetteCentreY), t; } /** * Used to apply the vignette as a mix with the pixel color. */ static get VIGNETTEMODE_MULTIPLY() { return this._VIGNETTEMODE_MULTIPLY; } /** * Used to apply the vignette as a replacement of the pixel color. */ static get VIGNETTEMODE_OPAQUE() { return this._VIGNETTEMODE_OPAQUE; } } Ds.TONEMAPPING_STANDARD = 0; Ds.TONEMAPPING_ACES = 1; Ds._VIGNETTEMODE_MULTIPLY = 0; Ds._VIGNETTEMODE_OPAQUE = 1; F([ qee() ], Ds.prototype, "colorCurves", void 0); F([ W() ], Ds.prototype, "_colorCurvesEnabled", void 0); F([ er("colorGradingTexture") ], Ds.prototype, "_colorGradingTexture", void 0); F([ W() ], Ds.prototype, "_colorGradingEnabled", void 0); F([ W() ], Ds.prototype, "_colorGradingWithGreenDepth", void 0); F([ W() ], Ds.prototype, "_colorGradingBGR", void 0); F([ W() ], Ds.prototype, "_exposure", void 0); F([ W() ], Ds.prototype, "_toneMappingEnabled", void 0); F([ W() ], Ds.prototype, "_toneMappingType", void 0); F([ W() ], Ds.prototype, "_contrast", void 0); F([ W() ], Ds.prototype, "vignetteStretch", void 0); F([ W() ], Ds.prototype, "vignetteCenterX", void 0); F([ W() ], Ds.prototype, "vignetteCenterY", void 0); F([ W() ], Ds.prototype, "vignetteWeight", void 0); F([ dw() ], Ds.prototype, "vignetteColor", void 0); F([ W() ], Ds.prototype, "vignetteCameraFov", void 0); F([ W() ], Ds.prototype, "_vignetteBlendMode", void 0); F([ W() ], Ds.prototype, "_vignetteEnabled", void 0); F([ W() ], Ds.prototype, "_ditheringEnabled", void 0); F([ W() ], Ds.prototype, "_ditheringIntensity", void 0); F([ W() ], Ds.prototype, "_skipFinalColorClamp", void 0); F([ W() ], Ds.prototype, "_applyByPostProcess", void 0); F([ W() ], Ds.prototype, "_isEnabled", void 0); St._ImageProcessingConfigurationParser = Ds.Parse; mi.prototype.createUniformBuffer = function(c, e) { const t = this._gl.createBuffer(); if (!t) throw new Error("Unable to create uniform buffer"); const i = new FO(t); return this.bindUniformBuffer(i), c instanceof Float32Array ? this._gl.bufferData(this._gl.UNIFORM_BUFFER, c, this._gl.STATIC_DRAW) : this._gl.bufferData(this._gl.UNIFORM_BUFFER, new Float32Array(c), this._gl.STATIC_DRAW), this.bindUniformBuffer(null), i.references = 1, i; }; mi.prototype.createDynamicUniformBuffer = function(c, e) { const t = this._gl.createBuffer(); if (!t) throw new Error("Unable to create dynamic uniform buffer"); const i = new FO(t); return this.bindUniformBuffer(i), c instanceof Float32Array ? this._gl.bufferData(this._gl.UNIFORM_BUFFER, c, this._gl.DYNAMIC_DRAW) : this._gl.bufferData(this._gl.UNIFORM_BUFFER, new Float32Array(c), this._gl.DYNAMIC_DRAW), this.bindUniformBuffer(null), i.references = 1, i; }; mi.prototype.updateUniformBuffer = function(c, e, t, i) { this.bindUniformBuffer(c), t === void 0 && (t = 0), i === void 0 ? e instanceof Float32Array ? this._gl.bufferSubData(this._gl.UNIFORM_BUFFER, t, e) : this._gl.bufferSubData(this._gl.UNIFORM_BUFFER, t, new Float32Array(e)) : e instanceof Float32Array ? this._gl.bufferSubData(this._gl.UNIFORM_BUFFER, 0, e.subarray(t, t + i)) : this._gl.bufferSubData(this._gl.UNIFORM_BUFFER, 0, new Float32Array(e).subarray(t, t + i)), this.bindUniformBuffer(null); }; mi.prototype.bindUniformBuffer = function(c) { this._gl.bindBuffer(this._gl.UNIFORM_BUFFER, c ? c.underlyingResource : null); }; mi.prototype.bindUniformBufferBase = function(c, e, t) { this._gl.bindBufferBase(this._gl.UNIFORM_BUFFER, e, c ? c.underlyingResource : null); }; mi.prototype.bindUniformBlock = function(c, e, t) { const i = c.program, r = this._gl.getUniformBlockIndex(i, e); r !== 4294967295 && this._gl.uniformBlockBinding(i, r, t); }; class Vi { /** * Instantiates a new Uniform buffer objects. * * Handles blocks of uniform on the GPU. * * If WebGL 2 is not available, this class falls back on traditional setUniformXXX calls. * * For more information, please refer to : * @see https://www.khronos.org/opengl/wiki/Uniform_Buffer_Object * @param engine Define the engine the buffer is associated with * @param data Define the data contained in the buffer * @param dynamic Define if the buffer is updatable * @param name to assign to the buffer (debugging purpose) * @param forceNoUniformBuffer define that this object must not rely on UBO objects */ constructor(e, t, i, r, s = !1) { this._valueCache = {}, this._engine = e, this._noUBO = !e.supportsUniformBuffers || s, this._dynamic = i, this._name = r ?? "no-name", this._data = t || [], this._uniformLocations = {}, this._uniformSizes = {}, this._uniformArraySizes = {}, this._uniformLocationPointer = 0, this._needSync = !1, this._engine._features.trackUbosInFrame && (this._buffers = [], this._bufferIndex = -1, this._createBufferOnWrite = !1, this._currentFrameId = 0), this._noUBO ? (this.updateMatrix3x3 = this._updateMatrix3x3ForEffect, this.updateMatrix2x2 = this._updateMatrix2x2ForEffect, this.updateFloat = this._updateFloatForEffect, this.updateFloat2 = this._updateFloat2ForEffect, this.updateFloat3 = this._updateFloat3ForEffect, this.updateFloat4 = this._updateFloat4ForEffect, this.updateFloatArray = this._updateFloatArrayForEffect, this.updateArray = this._updateArrayForEffect, this.updateIntArray = this._updateIntArrayForEffect, this.updateUIntArray = this._updateUIntArrayForEffect, this.updateMatrix = this._updateMatrixForEffect, this.updateMatrices = this._updateMatricesForEffect, this.updateVector3 = this._updateVector3ForEffect, this.updateVector4 = this._updateVector4ForEffect, this.updateColor3 = this._updateColor3ForEffect, this.updateColor4 = this._updateColor4ForEffect, this.updateDirectColor4 = this._updateDirectColor4ForEffect, this.updateInt = this._updateIntForEffect, this.updateInt2 = this._updateInt2ForEffect, this.updateInt3 = this._updateInt3ForEffect, this.updateInt4 = this._updateInt4ForEffect, this.updateUInt = this._updateUIntForEffect, this.updateUInt2 = this._updateUInt2ForEffect, this.updateUInt3 = this._updateUInt3ForEffect, this.updateUInt4 = this._updateUInt4ForEffect) : (this._engine._uniformBuffers.push(this), this.updateMatrix3x3 = this._updateMatrix3x3ForUniform, this.updateMatrix2x2 = this._updateMatrix2x2ForUniform, this.updateFloat = this._updateFloatForUniform, this.updateFloat2 = this._updateFloat2ForUniform, this.updateFloat3 = this._updateFloat3ForUniform, this.updateFloat4 = this._updateFloat4ForUniform, this.updateFloatArray = this._updateFloatArrayForUniform, this.updateArray = this._updateArrayForUniform, this.updateIntArray = this._updateIntArrayForUniform, this.updateUIntArray = this._updateUIntArrayForUniform, this.updateMatrix = this._updateMatrixForUniform, this.updateMatrices = this._updateMatricesForUniform, this.updateVector3 = this._updateVector3ForUniform, this.updateVector4 = this._updateVector4ForUniform, this.updateColor3 = this._updateColor3ForUniform, this.updateColor4 = this._updateColor4ForUniform, this.updateDirectColor4 = this._updateDirectColor4ForUniform, this.updateInt = this._updateIntForUniform, this.updateInt2 = this._updateInt2ForUniform, this.updateInt3 = this._updateInt3ForUniform, this.updateInt4 = this._updateInt4ForUniform, this.updateUInt = this._updateUIntForUniform, this.updateUInt2 = this._updateUInt2ForUniform, this.updateUInt3 = this._updateUInt3ForUniform, this.updateUInt4 = this._updateUInt4ForUniform); } /** * Indicates if the buffer is using the WebGL2 UBO implementation, * or just falling back on setUniformXXX calls. */ get useUbo() { return !this._noUBO; } /** * Indicates if the WebGL underlying uniform buffer is in sync * with the javascript cache data. */ get isSync() { return !this._needSync; } /** * Indicates if the WebGL underlying uniform buffer is dynamic. * Also, a dynamic UniformBuffer will disable cache verification and always * update the underlying WebGL uniform buffer to the GPU. * @returns if Dynamic, otherwise false */ isDynamic() { return this._dynamic !== void 0; } /** * The data cache on JS side. * @returns the underlying data as a float array */ getData() { return this._bufferData; } /** * The underlying WebGL Uniform buffer. * @returns the webgl buffer */ getBuffer() { return this._buffer; } /** * std140 layout specifies how to align data within an UBO structure. * See https://khronos.org/registry/OpenGL/specs/gl/glspec45.core.pdf#page=159 * for specs. * @param size */ _fillAlignment(e) { let t; if (e <= 2 ? t = e : t = 4, this._uniformLocationPointer % t !== 0) { const i = this._uniformLocationPointer; this._uniformLocationPointer += t - this._uniformLocationPointer % t; const r = this._uniformLocationPointer - i; for (let s = 0; s < r; s++) this._data.push(0); } } /** * Adds an uniform in the buffer. * Warning : the subsequents calls of this function must be in the same order as declared in the shader * for the layout to be correct ! The addUniform function only handles types like float, vec2, vec3, vec4, mat4, * meaning size=1,2,3,4 or 16. It does not handle struct types. * @param name Name of the uniform, as used in the uniform block in the shader. * @param size Data size, or data directly. * @param arraySize The number of elements in the array, 0 if not an array. */ addUniform(e, t, i = 0) { if (this._noUBO || this._uniformLocations[e] !== void 0) return; let r; if (i > 0) { if (t instanceof Array) throw "addUniform should not be use with Array in UBO: " + e; if (this._fillAlignment(4), this._uniformArraySizes[e] = { strideSize: t, arraySize: i }, t == 16) t = t * i; else { const n = (4 - t) * i; t = t * i + n; } r = []; for (let s = 0; s < t; s++) r.push(0); } else { if (t instanceof Array) r = t, t = r.length; else { t = t, r = []; for (let s = 0; s < t; s++) r.push(0); } this._fillAlignment(t); } this._uniformSizes[e] = t, this._uniformLocations[e] = this._uniformLocationPointer, this._uniformLocationPointer += t; for (let s = 0; s < t; s++) this._data.push(r[s]); this._needSync = !0; } /** * Adds a Matrix 4x4 to the uniform buffer. * @param name Name of the uniform, as used in the uniform block in the shader. * @param mat A 4x4 matrix. */ addMatrix(e, t) { this.addUniform(e, Array.prototype.slice.call(t.toArray())); } /** * Adds a vec2 to the uniform buffer. * @param name Name of the uniform, as used in the uniform block in the shader. * @param x Define the x component value of the vec2 * @param y Define the y component value of the vec2 */ addFloat2(e, t, i) { const r = [t, i]; this.addUniform(e, r); } /** * Adds a vec3 to the uniform buffer. * @param name Name of the uniform, as used in the uniform block in the shader. * @param x Define the x component value of the vec3 * @param y Define the y component value of the vec3 * @param z Define the z component value of the vec3 */ addFloat3(e, t, i, r) { const s = [t, i, r]; this.addUniform(e, s); } /** * Adds a vec3 to the uniform buffer. * @param name Name of the uniform, as used in the uniform block in the shader. * @param color Define the vec3 from a Color */ addColor3(e, t) { const i = [t.r, t.g, t.b]; this.addUniform(e, i); } /** * Adds a vec4 to the uniform buffer. * @param name Name of the uniform, as used in the uniform block in the shader. * @param color Define the rgb components from a Color * @param alpha Define the a component of the vec4 */ addColor4(e, t, i) { const r = [t.r, t.g, t.b, i]; this.addUniform(e, r); } /** * Adds a vec3 to the uniform buffer. * @param name Name of the uniform, as used in the uniform block in the shader. * @param vector Define the vec3 components from a Vector */ addVector3(e, t) { const i = [t.x, t.y, t.z]; this.addUniform(e, i); } /** * Adds a Matrix 3x3 to the uniform buffer. * @param name Name of the uniform, as used in the uniform block in the shader. */ addMatrix3x3(e) { this.addUniform(e, 12); } /** * Adds a Matrix 2x2 to the uniform buffer. * @param name Name of the uniform, as used in the uniform block in the shader. */ addMatrix2x2(e) { this.addUniform(e, 8); } /** * Effectively creates the WebGL Uniform Buffer, once layout is completed with `addUniform`. */ create() { this._noUBO || this._buffer || (this._fillAlignment(4), this._bufferData = new Float32Array(this._data), this._rebuild(), this._needSync = !0); } _getNames() { const e = []; for (const t in this._uniformLocations) e.push(t); return e.join(","); } /** @internal */ _rebuild() { this._noUBO || !this._bufferData || (this._dynamic ? this._buffer = this._engine.createDynamicUniformBuffer(this._bufferData, this._name + "_UniformList:" + this._getNames()) : this._buffer = this._engine.createUniformBuffer(this._bufferData, this._name + "_UniformList:" + this._getNames()), this._engine._features.trackUbosInFrame && (this._buffers.push([this._buffer, this._engine._features.checkUbosContentBeforeUpload ? this._bufferData.slice() : void 0]), this._bufferIndex = this._buffers.length - 1, this._createBufferOnWrite = !1)); } /** @internal */ get _numBuffers() { return this._buffers.length; } /** @internal */ get _indexBuffer() { return this._bufferIndex; } /** Gets the name of this buffer */ get name() { return this._name; } /** Gets the current effect */ get currentEffect() { return this._currentEffect; } _buffersEqual(e, t) { for (let i = 0; i < e.length; ++i) if (e[i] !== t[i]) return !1; return !0; } _copyBuffer(e, t) { for (let i = 0; i < e.length; ++i) t[i] = e[i]; } /** * Updates the WebGL Uniform Buffer on the GPU. * If the `dynamic` flag is set to true, no cache comparison is done. * Otherwise, the buffer will be updated only if the cache differs. */ update() { if (!this._noUBO) { if (this.bindUniformBuffer(), !this._buffer) { this.create(); return; } if (!this._dynamic && !this._needSync) { this._createBufferOnWrite = this._engine._features.trackUbosInFrame; return; } if (this._buffers && this._buffers.length > 1 && this._buffers[this._bufferIndex][1]) if (this._buffersEqual(this._bufferData, this._buffers[this._bufferIndex][1])) { this._needSync = !1, this._createBufferOnWrite = this._engine._features.trackUbosInFrame; return; } else this._copyBuffer(this._bufferData, this._buffers[this._bufferIndex][1]); this._engine.updateUniformBuffer(this._buffer, this._bufferData), this._engine._features._collectUbosUpdatedInFrame && (Vi._UpdatedUbosInFrame[this._name] || (Vi._UpdatedUbosInFrame[this._name] = 0), Vi._UpdatedUbosInFrame[this._name]++), this._needSync = !1, this._createBufferOnWrite = this._engine._features.trackUbosInFrame; } } _createNewBuffer() { this._bufferIndex + 1 < this._buffers.length ? (this._bufferIndex++, this._buffer = this._buffers[this._bufferIndex][0], this._createBufferOnWrite = !1, this._needSync = !0) : this._rebuild(); } _checkNewFrame() { this._engine._features.trackUbosInFrame && this._currentFrameId !== this._engine.frameId && (this._currentFrameId = this._engine.frameId, this._createBufferOnWrite = !1, this._buffers && this._buffers.length > 0 ? (this._needSync = this._bufferIndex !== 0, this._bufferIndex = 0, this._buffer = this._buffers[this._bufferIndex][0]) : this._bufferIndex = -1); } /** * Updates the value of an uniform. The `update` method must be called afterwards to make it effective in the GPU. * @param uniformName Define the name of the uniform, as used in the uniform block in the shader. * @param data Define the flattened data * @param size Define the size of the data. */ updateUniform(e, t, i) { this._checkNewFrame(); let r = this._uniformLocations[e]; if (r === void 0) { if (this._buffer) { Ce.Error("Cannot add an uniform after UBO has been created."); return; } this.addUniform(e, i), r = this._uniformLocations[e]; } if (this._buffer || this.create(), this._dynamic) for (let s = 0; s < i; s++) this._bufferData[r + s] = t[s]; else { let s = !1; for (let n = 0; n < i; n++) (i === 16 && !this._engine._features.uniformBufferHardCheckMatrix || this._bufferData[r + n] !== Math.fround(t[n])) && (s = !0, this._createBufferOnWrite && this._createNewBuffer(), this._bufferData[r + n] = t[n]); this._needSync = this._needSync || s; } } /** * Updates the value of an uniform. The `update` method must be called afterwards to make it effective in the GPU. * @param uniformName Define the name of the uniform, as used in the uniform block in the shader. * @param data Define the flattened data * @param size Define the size of the data. */ updateUniformArray(e, t, i) { this._checkNewFrame(); const r = this._uniformLocations[e]; if (r === void 0) { Ce.Error("Cannot add an uniform Array dynamically. Please, add it using addUniform and make sure that uniform buffers are supported by the current engine."); return; } this._buffer || this.create(); const s = this._uniformArraySizes[e]; if (this._dynamic) for (let n = 0; n < i; n++) this._bufferData[r + n] = t[n]; else { let n = !1, a = 0, l = 0; for (let o = 0; o < i; o++) if (this._bufferData[r + l * 4 + a] !== Ve.FloatRound(t[o]) && (n = !0, this._createBufferOnWrite && this._createNewBuffer(), this._bufferData[r + l * 4 + a] = t[o]), a++, a === s.strideSize) { for (; a < 4; a++) this._bufferData[r + l * 4 + a] = 0; a = 0, l++; } this._needSync = this._needSync || n; } } _cacheMatrix(e, t) { this._checkNewFrame(); const i = this._valueCache[e], r = t.updateFlag; return i !== void 0 && i === r ? !1 : (this._valueCache[e] = r, !0); } // Update methods _updateMatrix3x3ForUniform(e, t) { for (let i = 0; i < 3; i++) Vi._TempBuffer[i * 4] = t[i * 3], Vi._TempBuffer[i * 4 + 1] = t[i * 3 + 1], Vi._TempBuffer[i * 4 + 2] = t[i * 3 + 2], Vi._TempBuffer[i * 4 + 3] = 0; this.updateUniform(e, Vi._TempBuffer, 12); } _updateMatrix3x3ForEffect(e, t) { this._currentEffect.setMatrix3x3(e, t); } _updateMatrix2x2ForEffect(e, t) { this._currentEffect.setMatrix2x2(e, t); } _updateMatrix2x2ForUniform(e, t) { for (let i = 0; i < 2; i++) Vi._TempBuffer[i * 4] = t[i * 2], Vi._TempBuffer[i * 4 + 1] = t[i * 2 + 1], Vi._TempBuffer[i * 4 + 2] = 0, Vi._TempBuffer[i * 4 + 3] = 0; this.updateUniform(e, Vi._TempBuffer, 8); } _updateFloatForEffect(e, t) { this._currentEffect.setFloat(e, t); } _updateFloatForUniform(e, t) { Vi._TempBuffer[0] = t, this.updateUniform(e, Vi._TempBuffer, 1); } _updateFloat2ForEffect(e, t, i, r = "") { this._currentEffect.setFloat2(e + r, t, i); } _updateFloat2ForUniform(e, t, i) { Vi._TempBuffer[0] = t, Vi._TempBuffer[1] = i, this.updateUniform(e, Vi._TempBuffer, 2); } _updateFloat3ForEffect(e, t, i, r, s = "") { this._currentEffect.setFloat3(e + s, t, i, r); } _updateFloat3ForUniform(e, t, i, r) { Vi._TempBuffer[0] = t, Vi._TempBuffer[1] = i, Vi._TempBuffer[2] = r, this.updateUniform(e, Vi._TempBuffer, 3); } _updateFloat4ForEffect(e, t, i, r, s, n = "") { this._currentEffect.setFloat4(e + n, t, i, r, s); } _updateFloat4ForUniform(e, t, i, r, s) { Vi._TempBuffer[0] = t, Vi._TempBuffer[1] = i, Vi._TempBuffer[2] = r, Vi._TempBuffer[3] = s, this.updateUniform(e, Vi._TempBuffer, 4); } _updateFloatArrayForEffect(e, t) { this._currentEffect.setFloatArray(e, t); } _updateFloatArrayForUniform(e, t) { this.updateUniformArray(e, t, t.length); } _updateArrayForEffect(e, t) { this._currentEffect.setArray(e, t); } _updateArrayForUniform(e, t) { this.updateUniformArray(e, t, t.length); } _updateIntArrayForEffect(e, t) { this._currentEffect.setIntArray(e, t); } _updateIntArrayForUniform(e, t) { Vi._TempBufferInt32View.set(t), this.updateUniformArray(e, Vi._TempBuffer, t.length); } _updateUIntArrayForEffect(e, t) { this._currentEffect.setUIntArray(e, t); } _updateUIntArrayForUniform(e, t) { Vi._TempBufferUInt32View.set(t), this.updateUniformArray(e, Vi._TempBuffer, t.length); } _updateMatrixForEffect(e, t) { this._currentEffect.setMatrix(e, t); } _updateMatrixForUniform(e, t) { this._cacheMatrix(e, t) && this.updateUniform(e, t.toArray(), 16); } _updateMatricesForEffect(e, t) { this._currentEffect.setMatrices(e, t); } _updateMatricesForUniform(e, t) { this.updateUniform(e, t, t.length); } _updateVector3ForEffect(e, t) { this._currentEffect.setVector3(e, t); } _updateVector3ForUniform(e, t) { Vi._TempBuffer[0] = t.x, Vi._TempBuffer[1] = t.y, Vi._TempBuffer[2] = t.z, this.updateUniform(e, Vi._TempBuffer, 3); } _updateVector4ForEffect(e, t) { this._currentEffect.setVector4(e, t); } _updateVector4ForUniform(e, t) { Vi._TempBuffer[0] = t.x, Vi._TempBuffer[1] = t.y, Vi._TempBuffer[2] = t.z, Vi._TempBuffer[3] = t.w, this.updateUniform(e, Vi._TempBuffer, 4); } _updateColor3ForEffect(e, t, i = "") { this._currentEffect.setColor3(e + i, t); } _updateColor3ForUniform(e, t) { Vi._TempBuffer[0] = t.r, Vi._TempBuffer[1] = t.g, Vi._TempBuffer[2] = t.b, this.updateUniform(e, Vi._TempBuffer, 3); } _updateColor4ForEffect(e, t, i, r = "") { this._currentEffect.setColor4(e + r, t, i); } _updateDirectColor4ForEffect(e, t, i = "") { this._currentEffect.setDirectColor4(e + i, t); } _updateColor4ForUniform(e, t, i) { Vi._TempBuffer[0] = t.r, Vi._TempBuffer[1] = t.g, Vi._TempBuffer[2] = t.b, Vi._TempBuffer[3] = i, this.updateUniform(e, Vi._TempBuffer, 4); } _updateDirectColor4ForUniform(e, t) { Vi._TempBuffer[0] = t.r, Vi._TempBuffer[1] = t.g, Vi._TempBuffer[2] = t.b, Vi._TempBuffer[3] = t.a, this.updateUniform(e, Vi._TempBuffer, 4); } _updateIntForEffect(e, t, i = "") { this._currentEffect.setInt(e + i, t); } _updateIntForUniform(e, t) { Vi._TempBufferInt32View[0] = t, this.updateUniform(e, Vi._TempBuffer, 1); } _updateInt2ForEffect(e, t, i, r = "") { this._currentEffect.setInt2(e + r, t, i); } _updateInt2ForUniform(e, t, i) { Vi._TempBufferInt32View[0] = t, Vi._TempBufferInt32View[1] = i, this.updateUniform(e, Vi._TempBuffer, 2); } _updateInt3ForEffect(e, t, i, r, s = "") { this._currentEffect.setInt3(e + s, t, i, r); } _updateInt3ForUniform(e, t, i, r) { Vi._TempBufferInt32View[0] = t, Vi._TempBufferInt32View[1] = i, Vi._TempBufferInt32View[2] = r, this.updateUniform(e, Vi._TempBuffer, 3); } _updateInt4ForEffect(e, t, i, r, s, n = "") { this._currentEffect.setInt4(e + n, t, i, r, s); } _updateInt4ForUniform(e, t, i, r, s) { Vi._TempBufferInt32View[0] = t, Vi._TempBufferInt32View[1] = i, Vi._TempBufferInt32View[2] = r, Vi._TempBufferInt32View[3] = s, this.updateUniform(e, Vi._TempBuffer, 4); } _updateUIntForEffect(e, t, i = "") { this._currentEffect.setUInt(e + i, t); } _updateUIntForUniform(e, t) { Vi._TempBufferUInt32View[0] = t, this.updateUniform(e, Vi._TempBuffer, 1); } _updateUInt2ForEffect(e, t, i, r = "") { this._currentEffect.setUInt2(e + r, t, i); } _updateUInt2ForUniform(e, t, i) { Vi._TempBufferUInt32View[0] = t, Vi._TempBufferUInt32View[1] = i, this.updateUniform(e, Vi._TempBuffer, 2); } _updateUInt3ForEffect(e, t, i, r, s = "") { this._currentEffect.setUInt3(e + s, t, i, r); } _updateUInt3ForUniform(e, t, i, r) { Vi._TempBufferUInt32View[0] = t, Vi._TempBufferUInt32View[1] = i, Vi._TempBufferUInt32View[2] = r, this.updateUniform(e, Vi._TempBuffer, 3); } _updateUInt4ForEffect(e, t, i, r, s, n = "") { this._currentEffect.setUInt4(e + n, t, i, r, s); } _updateUInt4ForUniform(e, t, i, r, s) { Vi._TempBufferUInt32View[0] = t, Vi._TempBufferUInt32View[1] = i, Vi._TempBufferUInt32View[2] = r, Vi._TempBufferUInt32View[3] = s, this.updateUniform(e, Vi._TempBuffer, 4); } /** * Sets a sampler uniform on the effect. * @param name Define the name of the sampler. * @param texture Define the texture to set in the sampler */ setTexture(e, t) { this._currentEffect.setTexture(e, t); } /** * Sets a sampler uniform on the effect. * @param name Define the name of the sampler. * @param texture Define the (internal) texture to set in the sampler */ bindTexture(e, t) { this._currentEffect._bindTexture(e, t); } /** * Directly updates the value of the uniform in the cache AND on the GPU. * @param uniformName Define the name of the uniform, as used in the uniform block in the shader. * @param data Define the flattened data */ updateUniformDirectly(e, t) { this.updateUniform(e, t, t.length), this.update(); } /** * Associates an effect to this uniform buffer * @param effect Define the effect to associate the buffer to * @param name Name of the uniform block in the shader. */ bindToEffect(e, t) { this._currentEffect = e, this._currentEffectName = t; } /** * Binds the current (GPU) buffer to the effect */ bindUniformBuffer() { !this._noUBO && this._buffer && this._currentEffect && this._currentEffect.bindUniformBuffer(this._buffer, this._currentEffectName); } /** * Dissociates the current effect from this uniform buffer */ unbindEffect() { this._currentEffect = void 0, this._currentEffectName = void 0; } /** * Sets the current state of the class (_bufferIndex, _buffer) to point to the data buffer passed in parameter if this buffer is one of the buffers handled by the class (meaning if it can be found in the _buffers array) * This method is meant to be able to update a buffer at any time: just call setDataBuffer to set the class in the right state, call some updateXXX methods and then call udpate() => that will update the GPU buffer on the graphic card * @param dataBuffer buffer to look for * @returns true if the buffer has been found and the class internal state points to it, else false */ setDataBuffer(e) { if (!this._buffers) return this._buffer === e; for (let t = 0; t < this._buffers.length; ++t) if (this._buffers[t][0] === e) return this._bufferIndex = t, this._buffer = e, this._createBufferOnWrite = !1, this._currentEffect = void 0, !0; return !1; } /** * Disposes the uniform buffer. */ dispose() { if (this._noUBO) return; const e = this._engine._uniformBuffers, t = e.indexOf(this); if (t !== -1 && (e[t] = e[e.length - 1], e.pop()), this._engine._features.trackUbosInFrame && this._buffers) for (let i = 0; i < this._buffers.length; ++i) { const r = this._buffers[i][0]; this._engine._releaseBuffer(r); } else this._buffer && this._engine._releaseBuffer(this._buffer) && (this._buffer = null); } } Vi._UpdatedUbosInFrame = {}; Vi._MAX_UNIFORM_SIZE = 256; Vi._TempBuffer = new Float32Array(Vi._MAX_UNIFORM_SIZE); Vi._TempBufferInt32View = new Int32Array(Vi._TempBuffer.buffer); Vi._TempBufferUInt32View = new Uint32Array(Vi._TempBuffer.buffer); let hu = class { /** * Gets a boolean indicating if the Buffer is disposed */ get isDisposed() { return this._isDisposed; } /** * Constructor * @param engine the engine * @param data the data to use for this buffer * @param updatable whether the data is updatable * @param stride the stride (optional) * @param postponeInternalCreation whether to postpone creating the internal WebGL buffer (optional) * @param instanced whether the buffer is instanced (optional) * @param useBytes set to true if the stride in in bytes (optional) * @param divisor sets an optional divisor for instances (1 by default) * @param label defines the label of the buffer (for debug purpose) */ constructor(e, t, i, r = 0, s = !1, n = !1, a = !1, l, o) { this._isAlreadyOwned = !1, this._isDisposed = !1, e && e.getScene ? this._engine = e.getScene().getEngine() : this._engine = e, this._updatable = i, this._instanced = n, this._divisor = l || 1, this._label = o, t instanceof JA ? (this._data = null, this._buffer = t) : (this._data = t, this._buffer = null), this.byteStride = a ? r : r * Float32Array.BYTES_PER_ELEMENT, s || this.create(); } /** * Create a new VertexBuffer based on the current buffer * @param kind defines the vertex buffer kind (position, normal, etc.) * @param offset defines offset in the buffer (0 by default) * @param size defines the size in floats of attributes (position is 3 for instance) * @param stride defines the stride size in floats in the buffer (the offset to apply to reach next value when data is interleaved) * @param instanced defines if the vertex buffer contains indexed data * @param useBytes defines if the offset and stride are in bytes * * @param divisor sets an optional divisor for instances (1 by default) * @returns the new vertex buffer */ createVertexBuffer(e, t, i, r, s, n = !1, a) { const l = n ? t : t * Float32Array.BYTES_PER_ELEMENT, o = r ? n ? r : r * Float32Array.BYTES_PER_ELEMENT : this.byteStride; return new Y(this._engine, this, e, this._updatable, !0, o, s === void 0 ? this._instanced : s, l, i, void 0, void 0, !0, this._divisor || a); } // Properties /** * Gets a boolean indicating if the Buffer is updatable? * @returns true if the buffer is updatable */ isUpdatable() { return this._updatable; } /** * Gets current buffer's data * @returns a DataArray or null */ getData() { return this._data; } /** * Gets underlying native buffer * @returns underlying native buffer */ getBuffer() { return this._buffer; } /** * Gets the stride in float32 units (i.e. byte stride / 4). * May not be an integer if the byte stride is not divisible by 4. * @returns the stride in float32 units * @deprecated Please use byteStride instead. */ getStrideSize() { return this.byteStride / Float32Array.BYTES_PER_ELEMENT; } // Methods /** * Store data into the buffer. Creates the buffer if not used already. * If the buffer was already used, it will be updated only if it is updatable, otherwise it will do nothing. * @param data defines the data to store */ create(e = null) { !e && this._buffer || (e = e || this._data, e && (this._buffer ? this._updatable && (this._engine.updateDynamicVertexBuffer(this._buffer, e), this._data = e) : this._updatable ? (this._buffer = this._engine.createDynamicVertexBuffer(e, this._label), this._data = e) : this._buffer = this._engine.createVertexBuffer(e, void 0, this._label))); } /** @internal */ _rebuild() { this._buffer = null, this.create(this._data); } /** * Update current buffer data * @param data defines the data to store */ update(e) { this.create(e); } /** * Updates the data directly. * @param data the new data * @param offset the new offset * @param vertexCount the vertex count (optional) * @param useBytes set to true if the offset is in bytes */ updateDirectly(e, t, i, r = !1) { this._buffer && this._updatable && (this._engine.updateDynamicVertexBuffer(this._buffer, e, r ? t : t * Float32Array.BYTES_PER_ELEMENT, i ? i * this.byteStride : void 0), t === 0 && i === void 0 ? this._data = e : this._data = null); } /** @internal */ _increaseReferences() { if (this._buffer) { if (!this._isAlreadyOwned) { this._isAlreadyOwned = !0; return; } this._buffer.references++; } } /** * Release all resources */ dispose() { this._buffer && this._engine._releaseBuffer(this._buffer) && (this._isDisposed = !0, this._data = null, this._buffer = null); } }; class Y { /** * Gets a boolean indicating if the Buffer is disposed */ get isDisposed() { return this._isDisposed; } /** * Gets or sets the instance divisor when in instanced mode */ get instanceDivisor() { return this._instanceDivisor; } set instanceDivisor(e) { const t = e != 0; this._instanceDivisor = e, t !== this._instanced && (this._instanced = t, this._computeHashCode()); } /** * Gets the number of vertices in the buffer */ get totalVertices() { const e = this.getData(); return e ? Array.isArray(e) ? e.length / (this.byteStride / 4) - this.byteOffset / 4 : (e.byteLength - this.byteOffset) / this.byteStride : 0; } /** @internal */ constructor(e, t, i, r, s, n, a, l, o, u, h = !1, d = !1, f = 1, p = !1) { var m, _, v, C, x; this._isDisposed = !1; let b = !1; if (this.engine = e, typeof r == "object" && r !== null ? (b = (m = r.updatable) !== null && m !== void 0 ? m : !1, s = r.postponeInternalCreation, n = r.stride, a = r.instanced, l = r.offset, o = r.size, u = r.type, h = (_ = r.normalized) !== null && _ !== void 0 ? _ : !1, d = (v = r.useBytes) !== null && v !== void 0 ? v : !1, f = (C = r.divisor) !== null && C !== void 0 ? C : 1, p = (x = r.takeBufferOwnership) !== null && x !== void 0 ? x : !1, this._label = r.label) : b = !!r, t instanceof hu ? (this._buffer = t, this._ownsBuffer = p) : (this._buffer = new hu(e, t, b, n, s, a, d, f, this._label), this._ownsBuffer = !0), this.uniqueId = Y._Counter++, this._kind = i, u === void 0) { const M = this.getData(); this.type = M ? Y.GetDataType(M) : Y.FLOAT; } else this.type = u; const S = Y.GetTypeByteLength(this.type); d ? (this._size = o || (n ? n / S : Y.DeduceStride(i)), this.byteStride = n || this._buffer.byteStride || this._size * S, this.byteOffset = l || 0) : (this._size = o || n || Y.DeduceStride(i), this.byteStride = n ? n * S : this._buffer.byteStride || this._size * S, this.byteOffset = (l || 0) * S), this.normalized = h, this._instanced = a !== void 0 ? a : !1, this._instanceDivisor = a ? f : 0, this._alignBuffer(), this._computeHashCode(); } _computeHashCode() { this.hashCode = (this.type - 5120 << 0) + ((this.normalized ? 1 : 0) << 3) + (this._size << 4) + ((this._instanced ? 1 : 0) << 6) + /* keep 5 bits free */ (this.byteStride << 12); } /** @internal */ _rebuild() { var e; (e = this._buffer) === null || e === void 0 || e._rebuild(); } /** * Returns the kind of the VertexBuffer (string) * @returns a string */ getKind() { return this._kind; } // Properties /** * Gets a boolean indicating if the VertexBuffer is updatable? * @returns true if the buffer is updatable */ isUpdatable() { return this._buffer.isUpdatable(); } /** * Gets current buffer's data * @returns a DataArray or null */ getData() { return this._buffer.getData(); } /** * Gets current buffer's data as a float array. Float data is constructed if the vertex buffer data cannot be returned directly. * @param totalVertices number of vertices in the buffer to take into account * @param forceCopy defines a boolean indicating that the returned array must be cloned upon returning it * @returns a float array containing vertex data */ getFloatData(e, t) { const i = this.getData(); return i ? (e = e ?? this.totalVertices, Y.GetFloatData(i, this._size, this.type, this.byteOffset, this.byteStride, this.normalized, e, t)) : null; } /** * Gets underlying native buffer * @returns underlying native buffer */ getBuffer() { return this._buffer.getBuffer(); } /** * Gets the stride in float32 units (i.e. byte stride / 4). * May not be an integer if the byte stride is not divisible by 4. * @returns the stride in float32 units * @deprecated Please use byteStride instead. */ getStrideSize() { return this.byteStride / Y.GetTypeByteLength(this.type); } /** * Returns the offset as a multiple of the type byte length. * @returns the offset in bytes * @deprecated Please use byteOffset instead. */ getOffset() { return this.byteOffset / Y.GetTypeByteLength(this.type); } /** * Returns the number of components or the byte size per vertex attribute * @param sizeInBytes If true, returns the size in bytes or else the size in number of components of the vertex attribute (default: false) * @returns the number of components */ getSize(e = !1) { return e ? this._size * Y.GetTypeByteLength(this.type) : this._size; } /** * Gets a boolean indicating is the internal buffer of the VertexBuffer is instanced * @returns true if this buffer is instanced */ getIsInstanced() { return this._instanced; } /** * Returns the instancing divisor, zero for non-instanced (integer). * @returns a number */ getInstanceDivisor() { return this._instanceDivisor; } // Methods /** * Store data into the buffer. If the buffer was already used it will be either recreated or updated depending on isUpdatable property * @param data defines the data to store */ create(e) { this._buffer.create(e), this._alignBuffer(); } /** * Updates the underlying buffer according to the passed numeric array or Float32Array. * This function will create a new buffer if the current one is not updatable * @param data defines the data to store */ update(e) { this._buffer.update(e), this._alignBuffer(); } /** * Updates directly the underlying WebGLBuffer according to the passed numeric array or Float32Array. * Returns the directly updated WebGLBuffer. * @param data the new data * @param offset the new offset * @param useBytes set to true if the offset is in bytes */ updateDirectly(e, t, i = !1) { this._buffer.updateDirectly(e, t, void 0, i), this._alignBuffer(); } /** * Disposes the VertexBuffer and the underlying WebGLBuffer. */ dispose() { this._ownsBuffer && this._buffer.dispose(), this._isDisposed = !0; } /** * Enumerates each value of this vertex buffer as numbers. * @param count the number of values to enumerate * @param callback the callback function called for each value */ forEach(e, t) { Y.ForEach(this._buffer.getData(), this.byteOffset, this.byteStride, this._size, this.type, e, this.normalized, t); } /** @internal */ _alignBuffer() { } /** * Deduces the stride given a kind. * @param kind The kind string to deduce * @returns The deduced stride */ static DeduceStride(e) { switch (e) { case Y.UVKind: case Y.UV2Kind: case Y.UV3Kind: case Y.UV4Kind: case Y.UV5Kind: case Y.UV6Kind: return 2; case Y.NormalKind: case Y.PositionKind: return 3; case Y.ColorKind: case Y.ColorInstanceKind: case Y.MatricesIndicesKind: case Y.MatricesIndicesExtraKind: case Y.MatricesWeightsKind: case Y.MatricesWeightsExtraKind: case Y.TangentKind: return 4; default: throw new Error("Invalid kind '" + e + "'"); } } /** * Gets the vertex buffer type of the given data array. * @param data the data array * @returns the vertex buffer type */ static GetDataType(e) { return e instanceof Int8Array ? Y.BYTE : e instanceof Uint8Array ? Y.UNSIGNED_BYTE : e instanceof Int16Array ? Y.SHORT : e instanceof Uint16Array ? Y.UNSIGNED_SHORT : e instanceof Int32Array ? Y.INT : e instanceof Uint32Array ? Y.UNSIGNED_INT : Y.FLOAT; } /** * Gets the byte length of the given type. * @param type the type * @returns the number of bytes */ static GetTypeByteLength(e) { switch (e) { case Y.BYTE: case Y.UNSIGNED_BYTE: return 1; case Y.SHORT: case Y.UNSIGNED_SHORT: return 2; case Y.INT: case Y.UNSIGNED_INT: case Y.FLOAT: return 4; default: throw new Error(`Invalid type '${e}'`); } } /** * Enumerates each value of the given parameters as numbers. * @param data the data to enumerate * @param byteOffset the byte offset of the data * @param byteStride the byte stride of the data * @param componentCount the number of components per element * @param componentType the type of the component * @param count the number of values to enumerate * @param normalized whether the data is normalized * @param callback the callback function called for each value */ static ForEach(e, t, i, r, s, n, a, l) { if (e instanceof Array) { let o = t / 4; const u = i / 4; for (let h = 0; h < n; h += r) { for (let d = 0; d < r; d++) l(e[o + d], h + d); o += u; } } else { const o = e instanceof ArrayBuffer ? new DataView(e) : new DataView(e.buffer, e.byteOffset, e.byteLength), u = Y.GetTypeByteLength(s); for (let h = 0; h < n; h += r) { let d = t; for (let f = 0; f < r; f++) { const p = Y._GetFloatValue(o, s, d, a); l(p, h + f), d += u; } t += i; } } } static _GetFloatValue(e, t, i, r) { switch (t) { case Y.BYTE: { let s = e.getInt8(i); return r && (s = Math.max(s / 127, -1)), s; } case Y.UNSIGNED_BYTE: { let s = e.getUint8(i); return r && (s = s / 255), s; } case Y.SHORT: { let s = e.getInt16(i, !0); return r && (s = Math.max(s / 32767, -1)), s; } case Y.UNSIGNED_SHORT: { let s = e.getUint16(i, !0); return r && (s = s / 65535), s; } case Y.INT: return e.getInt32(i, !0); case Y.UNSIGNED_INT: return e.getUint32(i, !0); case Y.FLOAT: return e.getFloat32(i, !0); default: throw new Error(`Invalid component type ${t}`); } } /** * Gets the given data array as a float array. Float data is constructed if the data array cannot be returned directly. * @param data the input data array * @param size the number of components * @param type the component type * @param byteOffset the byte offset of the data * @param byteStride the byte stride of the data * @param normalized whether the data is normalized * @param totalVertices number of vertices in the buffer to take into account * @param forceCopy defines a boolean indicating that the returned array must be cloned upon returning it * @returns a float array containing vertex data */ static GetFloatData(e, t, i, r, s, n, a, l) { const o = t * Y.GetTypeByteLength(i), u = a * t; if (i !== Y.FLOAT || s !== o) { const h = new Float32Array(u); return Y.ForEach(e, r, s, t, i, u, n, (d, f) => h[f] = d), h; } if (!(e instanceof Array || e instanceof Float32Array) || r !== 0 || e.length !== u) if (e instanceof Array) { const h = r / 4; return e.slice(h, h + u); } else { if (e instanceof ArrayBuffer) return new Float32Array(e, r, u); { let h = e.byteOffset + r; if (l) { const f = new Float32Array(u), p = new Float32Array(e.buffer, h, u); return f.set(p), f; } const d = h % 4; return d && (h = Math.max(0, h - d)), new Float32Array(e.buffer, h, u); } } return l ? e.slice() : e; } } Y._Counter = 0; Y.BYTE = 5120; Y.UNSIGNED_BYTE = 5121; Y.SHORT = 5122; Y.UNSIGNED_SHORT = 5123; Y.INT = 5124; Y.UNSIGNED_INT = 5125; Y.FLOAT = 5126; Y.PositionKind = "position"; Y.NormalKind = "normal"; Y.TangentKind = "tangent"; Y.UVKind = "uv"; Y.UV2Kind = "uv2"; Y.UV3Kind = "uv3"; Y.UV4Kind = "uv4"; Y.UV5Kind = "uv5"; Y.UV6Kind = "uv6"; Y.ColorKind = "color"; Y.ColorInstanceKind = "instanceColor"; Y.MatricesIndicesKind = "matricesIndices"; Y.MatricesWeightsKind = "matricesWeights"; Y.MatricesIndicesExtraKind = "matricesIndicesExtra"; Y.MatricesWeightsExtraKind = "matricesWeightsExtra"; class ku { constructor() { this.hit = !1, this.distance = 0, this.pickedPoint = null, this.pickedMesh = null, this.bu = 0, this.bv = 0, this.faceId = -1, this.subMeshFaceId = -1, this.subMeshId = 0, this.pickedSprite = null, this.thinInstanceIndex = -1, this.ray = null, this.originMesh = null, this.aimTransform = null, this.gripTransform = null; } /** * Gets the normal corresponding to the face the pick collided with * @param useWorldCoordinates If the resulting normal should be relative to the world (default: false) * @param useVerticesNormals If the vertices normals should be used to calculate the normal instead of the normal map (default: true) * @returns The normal corresponding to the face the pick collided with * @remarks Note that the returned normal will always point towards the picking ray. */ getNormal(e = !1, t = !0) { if (!this.pickedMesh || t && !this.pickedMesh.isVerticesDataPresent(Y.NormalKind)) return null; let i = this.pickedMesh.getIndices(); (i == null ? void 0 : i.length) === 0 && (i = null); let r; const s = de.Vector3[0], n = de.Vector3[1], a = de.Vector3[2]; if (t) { const o = this.pickedMesh.getVerticesData(Y.NormalKind); let u = i ? D.FromArrayToRef(o, i[this.faceId * 3] * 3, s) : s.copyFromFloats(o[this.faceId * 3 * 3], o[this.faceId * 3 * 3 + 1], o[this.faceId * 3 * 3 + 2]), h = i ? D.FromArrayToRef(o, i[this.faceId * 3 + 1] * 3, n) : n.copyFromFloats(o[(this.faceId * 3 + 1) * 3], o[(this.faceId * 3 + 1) * 3 + 1], o[(this.faceId * 3 + 1) * 3 + 2]), d = i ? D.FromArrayToRef(o, i[this.faceId * 3 + 2] * 3, a) : a.copyFromFloats(o[(this.faceId * 3 + 2) * 3], o[(this.faceId * 3 + 2) * 3 + 1], o[(this.faceId * 3 + 2) * 3 + 2]); u = u.scale(this.bu), h = h.scale(this.bv), d = d.scale(1 - this.bu - this.bv), r = new D(u.x + h.x + d.x, u.y + h.y + d.y, u.z + h.z + d.z); } else { const o = this.pickedMesh.getVerticesData(Y.PositionKind), u = i ? D.FromArrayToRef(o, i[this.faceId * 3] * 3, s) : s.copyFromFloats(o[this.faceId * 3 * 3], o[this.faceId * 3 * 3 + 1], o[this.faceId * 3 * 3 + 2]), h = i ? D.FromArrayToRef(o, i[this.faceId * 3 + 1] * 3, n) : n.copyFromFloats(o[(this.faceId * 3 + 1) * 3], o[(this.faceId * 3 + 1) * 3 + 1], o[(this.faceId * 3 + 1) * 3 + 2]), d = i ? D.FromArrayToRef(o, i[this.faceId * 3 + 2] * 3, a) : a.copyFromFloats(o[(this.faceId * 3 + 2) * 3], o[(this.faceId * 3 + 2) * 3 + 1], o[(this.faceId * 3 + 2) * 3 + 2]), f = u.subtract(h), p = d.subtract(h); r = D.Cross(f, p); } const l = (o, u) => { let h = o.getWorldMatrix(); o.nonUniformScaling && (de.Matrix[0].copyFrom(h), h = de.Matrix[0], h.setTranslationFromFloats(0, 0, 0), h.invert(), h.transposeToRef(de.Matrix[1]), h = de.Matrix[1]), D.TransformNormalToRef(u, h, u); }; if (e && l(this.pickedMesh, r), this.ray) { const o = de.Vector3[0].copyFrom(r); e || l(this.pickedMesh, o), D.Dot(o, this.ray.direction) > 0 && r.negateInPlace(); } return r.normalize(), r; } /** * Gets the texture coordinates of where the pick occurred * @param uvSet The UV set to use to calculate the texture coordinates (default: VertexBuffer.UVKind) * @returns The vector containing the coordinates of the texture */ getTextureCoordinates(e = Y.UVKind) { if (!this.pickedMesh || !this.pickedMesh.isVerticesDataPresent(e)) return null; const t = this.pickedMesh.getIndices(); if (!t) return null; const i = this.pickedMesh.getVerticesData(e); if (!i) return null; let r = at.FromArray(i, t[this.faceId * 3] * 2), s = at.FromArray(i, t[this.faceId * 3 + 1] * 2), n = at.FromArray(i, t[this.faceId * 3 + 2] * 2); return r = r.scale(this.bu), s = s.scale(this.bv), n = n.scale(1 - this.bu - this.bv), new at(r.x + s.x + n.x, r.y + s.y + n.y); } } class q9 { /** * Creates a new instance PostProcess * @param scene The scene that the post process is associated with. */ constructor(e) { this._vertexBuffers = {}, this._scene = e; } _prepareBuffers() { if (this._vertexBuffers[Y.PositionKind]) return; const e = []; e.push(1, 1), e.push(-1, 1), e.push(-1, -1), e.push(1, -1), this._vertexBuffers[Y.PositionKind] = new Y(this._scene.getEngine(), e, Y.PositionKind, !1, !1, 2), this._buildIndexBuffer(); } _buildIndexBuffer() { const e = []; e.push(0), e.push(1), e.push(2), e.push(0), e.push(2), e.push(3), this._indexBuffer = this._scene.getEngine().createIndexBuffer(e); } /** * Rebuilds the vertex buffers of the manager. * @internal */ _rebuild() { const e = this._vertexBuffers[Y.PositionKind]; e && (e._rebuild(), this._buildIndexBuffer()); } // Methods /** * Prepares a frame to be run through a post process. * @param sourceTexture The input texture to the post processes. (default: null) * @param postProcesses An array of post processes to be run. (default: null) * @returns True if the post processes were able to be run. * @internal */ _prepareFrame(e = null, t = null) { const i = this._scene.activeCamera; return !i || (t = t || i._postProcesses.filter((r) => r != null), !t || t.length === 0 || !this._scene.postProcessesEnabled) ? !1 : (t[0].activate(i, e, t != null), !0); } /** * Manually render a set of post processes to a texture. * Please note, the frame buffer won't be unbound after the call in case you have more render to do. * @param postProcesses An array of post processes to be run. * @param targetTexture The render target wrapper to render to. * @param forceFullscreenViewport force gl.viewport to be full screen eg. 0,0,textureWidth,textureHeight * @param faceIndex defines the face to render to if a cubemap is defined as the target * @param lodLevel defines which lod of the texture to render to * @param doNotBindFrambuffer If set to true, assumes that the framebuffer has been bound previously */ directRender(e, t = null, i = !1, r = 0, s = 0, n = !1) { var a; const l = this._scene.getEngine(); for (let o = 0; o < e.length; o++) { o < e.length - 1 ? e[o + 1].activate(this._scene.activeCamera, t == null ? void 0 : t.texture) : (t ? l.bindFramebuffer(t, r, void 0, void 0, i, s) : n || l.restoreDefaultFramebuffer(), (a = l._debugInsertMarker) === null || a === void 0 || a.call(l, `post process ${e[o].name} output`)); const u = e[o], h = u.apply(); h && (u.onBeforeRenderObservable.notifyObservers(h), this._prepareBuffers(), l.bindBuffers(this._vertexBuffers, this._indexBuffer, h), l.drawElementsType(0, 0, 6), u.onAfterRenderObservable.notifyObservers(h)); } l.setDepthBuffer(!0), l.setDepthWrite(!0); } /** * Finalize the result of the output of the postprocesses. * @param doNotPresent If true the result will not be displayed to the screen. * @param targetTexture The render target wrapper to render to. * @param faceIndex The index of the face to bind the target texture to. * @param postProcesses The array of post processes to render. * @param forceFullscreenViewport force gl.viewport to be full screen eg. 0,0,textureWidth,textureHeight (default: false) * @internal */ _finalizeFrame(e, t, i, r, s = !1) { var n; const a = this._scene.activeCamera; if (!a || (r = r || a._postProcesses.filter((o) => o != null), r.length === 0 || !this._scene.postProcessesEnabled)) return; const l = this._scene.getEngine(); for (let o = 0, u = r.length; o < u; o++) { const h = r[o]; if (o < u - 1 ? h._outputTexture = r[o + 1].activate(a, t == null ? void 0 : t.texture) : (t ? (l.bindFramebuffer(t, i, void 0, void 0, s), h._outputTexture = t) : (l.restoreDefaultFramebuffer(), h._outputTexture = null), (n = l._debugInsertMarker) === null || n === void 0 || n.call(l, `post process ${r[o].name} output`)), e) break; const d = h.apply(); d && (h.onBeforeRenderObservable.notifyObservers(d), this._prepareBuffers(), l.bindBuffers(this._vertexBuffers, this._indexBuffer, d), l.drawElementsType(0, 0, 6), h.onAfterRenderObservable.notifyObservers(d)); } l.setDepthBuffer(!0), l.setDepthWrite(!0), l.setAlphaMode(0); } /** * Disposes of the post process manager. */ dispose() { const e = this._vertexBuffers[Y.PositionKind]; e && (e.dispose(), this._vertexBuffers[Y.PositionKind] = null), this._indexBuffer && (this._scene.getEngine()._releaseBuffer(this._indexBuffer), this._indexBuffer = null); } } class b4 { /** * Set the opaque sort comparison function. * If null the sub meshes will be render in the order they were created */ set opaqueSortCompareFn(e) { e ? this._opaqueSortCompareFn = e : this._opaqueSortCompareFn = b4.PainterSortCompare, this._renderOpaque = this._renderOpaqueSorted; } /** * Set the alpha test sort comparison function. * If null the sub meshes will be render in the order they were created */ set alphaTestSortCompareFn(e) { e ? this._alphaTestSortCompareFn = e : this._alphaTestSortCompareFn = b4.PainterSortCompare, this._renderAlphaTest = this._renderAlphaTestSorted; } /** * Set the transparent sort comparison function. * If null the sub meshes will be render in the order they were created */ set transparentSortCompareFn(e) { e ? this._transparentSortCompareFn = e : this._transparentSortCompareFn = b4.defaultTransparentSortCompare, this._renderTransparent = this._renderTransparentSorted; } /** * Creates a new rendering group. * @param index The rendering group index * @param scene * @param opaqueSortCompareFn The opaque sort comparison function. If null no order is applied * @param alphaTestSortCompareFn The alpha test sort comparison function. If null no order is applied * @param transparentSortCompareFn The transparent sort comparison function. If null back to front + alpha index sort is applied */ constructor(e, t, i = null, r = null, s = null) { this.index = e, this._opaqueSubMeshes = new xc(256), this._transparentSubMeshes = new xc(256), this._alphaTestSubMeshes = new xc(256), this._depthOnlySubMeshes = new xc(256), this._particleSystems = new xc(256), this._spriteManagers = new xc(256), this._empty = !0, this._edgesRenderers = new XE(16), this._scene = t, this.opaqueSortCompareFn = i, this.alphaTestSortCompareFn = r, this.transparentSortCompareFn = s; } /** * Render all the sub meshes contained in the group. * @param customRenderFunction Used to override the default render behaviour of the group. * @param renderSprites * @param renderParticles * @param activeMeshes * @returns true if rendered some submeshes. */ render(e, t, i, r) { if (e) { e(this._opaqueSubMeshes, this._alphaTestSubMeshes, this._transparentSubMeshes, this._depthOnlySubMeshes); return; } const s = this._scene.getEngine(); this._depthOnlySubMeshes.length !== 0 && (s.setColorWrite(!1), this._renderAlphaTest(this._depthOnlySubMeshes), s.setColorWrite(!0)), this._opaqueSubMeshes.length !== 0 && this._renderOpaque(this._opaqueSubMeshes), this._alphaTestSubMeshes.length !== 0 && this._renderAlphaTest(this._alphaTestSubMeshes); const n = s.getStencilBuffer(); if (s.setStencilBuffer(!1), t && this._renderSprites(), i && this._renderParticles(r), this.onBeforeTransparentRendering && this.onBeforeTransparentRendering(), this._transparentSubMeshes.length !== 0 || this._scene.useOrderIndependentTransparency) { if (s.setStencilBuffer(n), this._scene.useOrderIndependentTransparency) { const a = this._scene.depthPeelingRenderer.render(this._transparentSubMeshes); a.length && this._renderTransparent(a); } else this._renderTransparent(this._transparentSubMeshes); s.setAlphaMode(0); } if (s.setStencilBuffer(!1), this._edgesRenderers.length) { for (let a = 0; a < this._edgesRenderers.length; a++) this._edgesRenderers.data[a].render(); s.setAlphaMode(0); } s.setStencilBuffer(n); } /** * Renders the opaque submeshes in the order from the opaqueSortCompareFn. * @param subMeshes The submeshes to render */ _renderOpaqueSorted(e) { return b4._RenderSorted(e, this._opaqueSortCompareFn, this._scene.activeCamera, !1); } /** * Renders the opaque submeshes in the order from the alphatestSortCompareFn. * @param subMeshes The submeshes to render */ _renderAlphaTestSorted(e) { return b4._RenderSorted(e, this._alphaTestSortCompareFn, this._scene.activeCamera, !1); } /** * Renders the opaque submeshes in the order from the transparentSortCompareFn. * @param subMeshes The submeshes to render */ _renderTransparentSorted(e) { return b4._RenderSorted(e, this._transparentSortCompareFn, this._scene.activeCamera, !0); } /** * Renders the submeshes in a specified order. * @param subMeshes The submeshes to sort before render * @param sortCompareFn The comparison function use to sort * @param camera The camera position use to preprocess the submeshes to help sorting * @param transparent Specifies to activate blending if true */ static _RenderSorted(e, t, i, r) { let s = 0, n; const a = i ? i.globalPosition : b4._ZeroVector; if (r) for (; s < e.length; s++) n = e.data[s], n._alphaIndex = n.getMesh().alphaIndex, n._distanceToCamera = D.Distance(n.getBoundingInfo().boundingSphere.centerWorld, a); const l = e.length === e.data.length ? e.data : e.data.slice(0, e.length); t && l.sort(t); const o = l[0].getMesh().getScene(); for (s = 0; s < l.length; s++) if (n = l[s], !(o._activeMeshesFrozenButKeepClipping && !n.isInFrustum(o._frustumPlanes))) { if (r) { const u = n.getMaterial(); if (u && u.needDepthPrePass) { const h = u.getScene().getEngine(); h.setColorWrite(!1), h.setAlphaMode(0), n.render(!1), h.setColorWrite(!0); } } n.render(r); } } /** * Build in function which can be applied to ensure meshes of a special queue (opaque, alpha test, transparent) * are rendered back to front if in the same alpha index. * * @param a The first submesh * @param b The second submesh * @returns The result of the comparison */ // eslint-disable-next-line @typescript-eslint/naming-convention static defaultTransparentSortCompare(e, t) { return e._alphaIndex > t._alphaIndex ? 1 : e._alphaIndex < t._alphaIndex ? -1 : b4.backToFrontSortCompare(e, t); } /** * Build in function which can be applied to ensure meshes of a special queue (opaque, alpha test, transparent) * are rendered back to front. * * @param a The first submesh * @param b The second submesh * @returns The result of the comparison */ // eslint-disable-next-line @typescript-eslint/naming-convention static backToFrontSortCompare(e, t) { return e._distanceToCamera < t._distanceToCamera ? 1 : e._distanceToCamera > t._distanceToCamera ? -1 : 0; } /** * Build in function which can be applied to ensure meshes of a special queue (opaque, alpha test, transparent) * are rendered front to back (prevent overdraw). * * @param a The first submesh * @param b The second submesh * @returns The result of the comparison */ // eslint-disable-next-line @typescript-eslint/naming-convention static frontToBackSortCompare(e, t) { return e._distanceToCamera < t._distanceToCamera ? -1 : e._distanceToCamera > t._distanceToCamera ? 1 : 0; } /** * Build in function which can be applied to ensure meshes of a special queue (opaque, alpha test, transparent) * are grouped by material then geometry. * * @param a The first submesh * @param b The second submesh * @returns The result of the comparison */ static PainterSortCompare(e, t) { const i = e.getMesh(), r = t.getMesh(); return i.material && r.material ? i.material.uniqueId - r.material.uniqueId : i.uniqueId - r.uniqueId; } /** * Resets the different lists of submeshes to prepare a new frame. */ prepare() { this._opaqueSubMeshes.reset(), this._transparentSubMeshes.reset(), this._alphaTestSubMeshes.reset(), this._depthOnlySubMeshes.reset(), this._particleSystems.reset(), this.prepareSprites(), this._edgesRenderers.reset(), this._empty = !0; } /** * Resets the different lists of sprites to prepare a new frame. */ prepareSprites() { this._spriteManagers.reset(); } dispose() { this._opaqueSubMeshes.dispose(), this._transparentSubMeshes.dispose(), this._alphaTestSubMeshes.dispose(), this._depthOnlySubMeshes.dispose(), this._particleSystems.dispose(), this._spriteManagers.dispose(), this._edgesRenderers.dispose(); } /** * Inserts the submesh in its correct queue depending on its material. * @param subMesh The submesh to dispatch * @param [mesh] Optional reference to the submeshes's mesh. Provide if you have an exiting reference to improve performance. * @param [material] Optional reference to the submeshes's material. Provide if you have an exiting reference to improve performance. */ dispatch(e, t, i) { t === void 0 && (t = e.getMesh()), i === void 0 && (i = e.getMaterial()), i != null && (i.needAlphaBlendingForMesh(t) ? this._transparentSubMeshes.push(e) : i.needAlphaTesting() ? (i.needDepthPrePass && this._depthOnlySubMeshes.push(e), this._alphaTestSubMeshes.push(e)) : (i.needDepthPrePass && this._depthOnlySubMeshes.push(e), this._opaqueSubMeshes.push(e)), t._renderingGroup = this, t._edgesRenderer && t._edgesRenderer.isEnabled && this._edgesRenderers.pushNoDuplicate(t._edgesRenderer), this._empty = !1); } dispatchSprites(e) { this._spriteManagers.push(e), this._empty = !1; } dispatchParticles(e) { this._particleSystems.push(e), this._empty = !1; } _renderParticles(e) { if (this._particleSystems.length === 0) return; const t = this._scene.activeCamera; this._scene.onBeforeParticlesRenderingObservable.notifyObservers(this._scene); for (let i = 0; i < this._particleSystems.length; i++) { const r = this._particleSystems.data[i]; if ((t && t.layerMask & r.layerMask) === 0) continue; const s = r.emitter; (!s.position || !e || e.indexOf(s) !== -1) && this._scene._activeParticles.addCount(r.render(), !1); } this._scene.onAfterParticlesRenderingObservable.notifyObservers(this._scene); } _renderSprites() { if (!this._scene.spritesEnabled || this._spriteManagers.length === 0) return; const e = this._scene.activeCamera; this._scene.onBeforeSpritesRenderingObservable.notifyObservers(this._scene); for (let t = 0; t < this._spriteManagers.length; t++) { const i = this._spriteManagers.data[t]; (e && e.layerMask & i.layerMask) !== 0 && i.render(); } this._scene.onAfterSpritesRenderingObservable.notifyObservers(this._scene); } } b4._ZeroVector = D.Zero(); class pte { } class Zh { /** * Gets or sets a boolean indicating that the manager will not reset between frames. * This means that if a mesh becomes invisible or transparent it will not be visible until this boolean is set to false again. * By default, the rendering manager will dispatch all active meshes per frame (moving them to the transparent, opaque or alpha testing lists). * By turning this property on, you will accelerate the rendering by keeping all these lists unchanged between frames. */ get maintainStateBetweenFrames() { return this._maintainStateBetweenFrames; } set maintainStateBetweenFrames(e) { e !== this._maintainStateBetweenFrames && (this._maintainStateBetweenFrames = e, this._maintainStateBetweenFrames || this.restoreDispachedFlags()); } /** * Restore wasDispatched flags on the lists of elements to render. */ restoreDispachedFlags() { for (const e of this._scene.meshes) if (e.subMeshes) for (const t of e.subMeshes) t._wasDispatched = !1; if (this._scene.spriteManagers) for (const e of this._scene.spriteManagers) e._wasDispatched = !1; for (const e of this._scene.particleSystems) e._wasDispatched = !1; } /** * Instantiates a new rendering group for a particular scene * @param scene Defines the scene the groups belongs to */ constructor(e) { this._useSceneAutoClearSetup = !1, this._renderingGroups = new Array(), this._autoClearDepthStencil = {}, this._customOpaqueSortCompareFn = {}, this._customAlphaTestSortCompareFn = {}, this._customTransparentSortCompareFn = {}, this._renderingGroupInfo = new pte(), this._maintainStateBetweenFrames = !1, this._scene = e; for (let t = Zh.MIN_RENDERINGGROUPS; t < Zh.MAX_RENDERINGGROUPS; t++) this._autoClearDepthStencil[t] = { autoClear: !0, depth: !0, stencil: !0 }; } /** * Gets the rendering group with the specified id. */ getRenderingGroup(e) { const t = e || 0; return this._prepareRenderingGroup(t), this._renderingGroups[t]; } _clearDepthStencilBuffer(e = !0, t = !0) { this._depthStencilBufferAlreadyCleaned || (this._scene.getEngine().clear(null, !1, e, t), this._depthStencilBufferAlreadyCleaned = !0); } /** * Renders the entire managed groups. This is used by the scene or the different render targets. * @internal */ render(e, t, i, r) { const s = this._renderingGroupInfo; if (s.scene = this._scene, s.camera = this._scene.activeCamera, this._scene.spriteManagers && r) for (let n = 0; n < this._scene.spriteManagers.length; n++) { const a = this._scene.spriteManagers[n]; this.dispatchSprites(a); } for (let n = Zh.MIN_RENDERINGGROUPS; n < Zh.MAX_RENDERINGGROUPS; n++) { this._depthStencilBufferAlreadyCleaned = n === Zh.MIN_RENDERINGGROUPS; const a = this._renderingGroups[n]; if (!a || a._empty) continue; const l = Math.pow(2, n); if (s.renderingGroupId = n, this._scene.onBeforeRenderingGroupObservable.notifyObservers(s, l), Zh.AUTOCLEAR) { const o = this._useSceneAutoClearSetup ? this._scene.getAutoClearDepthStencilSetup(n) : this._autoClearDepthStencil[n]; o && o.autoClear && this._clearDepthStencilBuffer(o.depth, o.stencil); } for (const o of this._scene._beforeRenderingGroupDrawStage) o.action(n); a.render(e, r, i, t); for (const o of this._scene._afterRenderingGroupDrawStage) o.action(n); this._scene.onAfterRenderingGroupObservable.notifyObservers(s, l); } } /** * Resets the different information of the group to prepare a new frame * @internal */ reset() { if (!this.maintainStateBetweenFrames) for (let e = Zh.MIN_RENDERINGGROUPS; e < Zh.MAX_RENDERINGGROUPS; e++) { const t = this._renderingGroups[e]; t && t.prepare(); } } /** * Resets the sprites information of the group to prepare a new frame * @internal */ resetSprites() { if (!this.maintainStateBetweenFrames) for (let e = Zh.MIN_RENDERINGGROUPS; e < Zh.MAX_RENDERINGGROUPS; e++) { const t = this._renderingGroups[e]; t && t.prepareSprites(); } } /** * Dispose and release the group and its associated resources. * @internal */ dispose() { this.freeRenderingGroups(), this._renderingGroups.length = 0, this._renderingGroupInfo = null; } /** * Clear the info related to rendering groups preventing retention points during dispose. */ freeRenderingGroups() { for (let e = Zh.MIN_RENDERINGGROUPS; e < Zh.MAX_RENDERINGGROUPS; e++) { const t = this._renderingGroups[e]; t && t.dispose(); } } _prepareRenderingGroup(e) { this._renderingGroups[e] === void 0 && (this._renderingGroups[e] = new b4(e, this._scene, this._customOpaqueSortCompareFn[e], this._customAlphaTestSortCompareFn[e], this._customTransparentSortCompareFn[e])); } /** * Add a sprite manager to the rendering manager in order to render it this frame. * @param spriteManager Define the sprite manager to render */ dispatchSprites(e) { this.maintainStateBetweenFrames && e._wasDispatched || (e._wasDispatched = !0, this.getRenderingGroup(e.renderingGroupId).dispatchSprites(e)); } /** * Add a particle system to the rendering manager in order to render it this frame. * @param particleSystem Define the particle system to render */ dispatchParticles(e) { this.maintainStateBetweenFrames && e._wasDispatched || (e._wasDispatched = !0, this.getRenderingGroup(e.renderingGroupId).dispatchParticles(e)); } /** * Add a submesh to the manager in order to render it this frame * @param subMesh The submesh to dispatch * @param mesh Optional reference to the submeshes's mesh. Provide if you have an exiting reference to improve performance. * @param material Optional reference to the submeshes's material. Provide if you have an exiting reference to improve performance. */ dispatch(e, t, i) { t === void 0 && (t = e.getMesh()), !(this.maintainStateBetweenFrames && e._wasDispatched) && (e._wasDispatched = !0, this.getRenderingGroup(t.renderingGroupId).dispatch(e, t, i)); } /** * Overrides the default sort function applied in the rendering group to prepare the meshes. * This allowed control for front to back rendering or reversely depending of the special needs. * * @param renderingGroupId The rendering group id corresponding to its index * @param opaqueSortCompareFn The opaque queue comparison function use to sort. * @param alphaTestSortCompareFn The alpha test queue comparison function use to sort. * @param transparentSortCompareFn The transparent queue comparison function use to sort. */ setRenderingOrder(e, t = null, i = null, r = null) { if (this._customOpaqueSortCompareFn[e] = t, this._customAlphaTestSortCompareFn[e] = i, this._customTransparentSortCompareFn[e] = r, this._renderingGroups[e]) { const s = this._renderingGroups[e]; s.opaqueSortCompareFn = this._customOpaqueSortCompareFn[e], s.alphaTestSortCompareFn = this._customAlphaTestSortCompareFn[e], s.transparentSortCompareFn = this._customTransparentSortCompareFn[e]; } } /** * Specifies whether or not the stencil and depth buffer are cleared between two rendering groups. * * @param renderingGroupId The rendering group id corresponding to its index * @param autoClearDepthStencil Automatically clears depth and stencil between groups if true. * @param depth Automatically clears depth between groups if true and autoClear is true. * @param stencil Automatically clears stencil between groups if true and autoClear is true. */ setRenderingAutoClearDepthStencil(e, t, i = !0, r = !0) { this._autoClearDepthStencil[e] = { autoClear: t, depth: i, stencil: r }; } /** * Gets the current auto clear configuration for one rendering group of the rendering * manager. * @param index the rendering group index to get the information for * @returns The auto clear setup for the requested rendering group */ getAutoClearDepthStencilSetup(e) { return this._autoClearDepthStencil[e]; } } Zh.MAX_RENDERINGGROUPS = 4; Zh.MIN_RENDERINGGROUPS = 0; Zh.AUTOCLEAR = !0; class Bt { } Bt.NAME_EFFECTLAYER = "EffectLayer"; Bt.NAME_LAYER = "Layer"; Bt.NAME_LENSFLARESYSTEM = "LensFlareSystem"; Bt.NAME_BOUNDINGBOXRENDERER = "BoundingBoxRenderer"; Bt.NAME_PARTICLESYSTEM = "ParticleSystem"; Bt.NAME_GAMEPAD = "Gamepad"; Bt.NAME_SIMPLIFICATIONQUEUE = "SimplificationQueue"; Bt.NAME_GEOMETRYBUFFERRENDERER = "GeometryBufferRenderer"; Bt.NAME_PREPASSRENDERER = "PrePassRenderer"; Bt.NAME_DEPTHRENDERER = "DepthRenderer"; Bt.NAME_DEPTHPEELINGRENDERER = "DepthPeelingRenderer"; Bt.NAME_POSTPROCESSRENDERPIPELINEMANAGER = "PostProcessRenderPipelineManager"; Bt.NAME_SPRITE = "Sprite"; Bt.NAME_SUBSURFACE = "SubSurface"; Bt.NAME_OUTLINERENDERER = "Outline"; Bt.NAME_PROCEDURALTEXTURE = "ProceduralTexture"; Bt.NAME_SHADOWGENERATOR = "ShadowGenerator"; Bt.NAME_OCTREE = "Octree"; Bt.NAME_PHYSICSENGINE = "PhysicsEngine"; Bt.NAME_AUDIO = "Audio"; Bt.NAME_FLUIDRENDERER = "FluidRenderer"; Bt.STEP_ISREADYFORMESH_EFFECTLAYER = 0; Bt.STEP_BEFOREEVALUATEACTIVEMESH_BOUNDINGBOXRENDERER = 0; Bt.STEP_EVALUATESUBMESH_BOUNDINGBOXRENDERER = 0; Bt.STEP_PREACTIVEMESH_BOUNDINGBOXRENDERER = 0; Bt.STEP_CAMERADRAWRENDERTARGET_EFFECTLAYER = 1; Bt.STEP_BEFORECAMERADRAW_PREPASS = 0; Bt.STEP_BEFORECAMERADRAW_EFFECTLAYER = 1; Bt.STEP_BEFORECAMERADRAW_LAYER = 2; Bt.STEP_BEFORERENDERTARGETDRAW_PREPASS = 0; Bt.STEP_BEFORERENDERTARGETDRAW_LAYER = 1; Bt.STEP_BEFORERENDERINGMESH_PREPASS = 0; Bt.STEP_BEFORERENDERINGMESH_OUTLINE = 1; Bt.STEP_AFTERRENDERINGMESH_PREPASS = 0; Bt.STEP_AFTERRENDERINGMESH_OUTLINE = 1; Bt.STEP_AFTERRENDERINGGROUPDRAW_EFFECTLAYER_DRAW = 0; Bt.STEP_AFTERRENDERINGGROUPDRAW_BOUNDINGBOXRENDERER = 1; Bt.STEP_BEFORECAMERAUPDATE_SIMPLIFICATIONQUEUE = 0; Bt.STEP_BEFORECAMERAUPDATE_GAMEPAD = 1; Bt.STEP_BEFORECLEAR_PROCEDURALTEXTURE = 0; Bt.STEP_BEFORECLEAR_PREPASS = 1; Bt.STEP_BEFORERENDERTARGETCLEAR_PREPASS = 0; Bt.STEP_AFTERRENDERTARGETDRAW_PREPASS = 0; Bt.STEP_AFTERRENDERTARGETDRAW_LAYER = 1; Bt.STEP_AFTERCAMERADRAW_PREPASS = 0; Bt.STEP_AFTERCAMERADRAW_EFFECTLAYER = 1; Bt.STEP_AFTERCAMERADRAW_LENSFLARESYSTEM = 2; Bt.STEP_AFTERCAMERADRAW_EFFECTLAYER_DRAW = 3; Bt.STEP_AFTERCAMERADRAW_LAYER = 4; Bt.STEP_AFTERCAMERADRAW_FLUIDRENDERER = 5; Bt.STEP_AFTERCAMERAPOSTPROCESS_LAYER = 0; Bt.STEP_AFTERRENDERTARGETPOSTPROCESS_LAYER = 0; Bt.STEP_AFTERRENDER_AUDIO = 0; Bt.STEP_GATHERRENDERTARGETS_DEPTHRENDERER = 0; Bt.STEP_GATHERRENDERTARGETS_GEOMETRYBUFFERRENDERER = 1; Bt.STEP_GATHERRENDERTARGETS_SHADOWGENERATOR = 2; Bt.STEP_GATHERRENDERTARGETS_POSTPROCESSRENDERPIPELINEMANAGER = 3; Bt.STEP_GATHERACTIVECAMERARENDERTARGETS_DEPTHRENDERER = 0; Bt.STEP_GATHERACTIVECAMERARENDERTARGETS_FLUIDRENDERER = 1; Bt.STEP_POINTERMOVE_SPRITE = 0; Bt.STEP_POINTERDOWN_SPRITE = 0; Bt.STEP_POINTERUP_SPRITE = 0; class Kl extends Array { /** * Hide ctor from the rest of the world. * @param items The items to add. */ constructor(e) { super(...e); } /** * Creates a new Stage. * @returns A new instance of a Stage */ static Create() { return Object.create(Kl.prototype); } /** * Registers a step in an ordered way in the targeted stage. * @param index Defines the position to register the step in * @param component Defines the component attached to the step * @param action Defines the action to launch during the step */ registerStep(e, t, i) { let r = 0, s = Number.MAX_VALUE; for (; r < this.length && (s = this[r].index, !(e < s)); r++) ; this.splice(r, 0, { index: e, component: t, action: i.bind(t) }); } /** * Clears all the steps from the stage. */ clear() { this.length = 0; } } class si { } si.POINTERDOWN = 1; si.POINTERUP = 2; si.POINTERMOVE = 4; si.POINTERWHEEL = 8; si.POINTERPICK = 16; si.POINTERTAP = 32; si.POINTERDOUBLETAP = 64; class uK { /** * Instantiates the base class of pointers info. * @param type Defines the type of event (PointerEventTypes) * @param event Defines the related dom event */ constructor(e, t) { this.type = e, this.event = t; } } class _te extends uK { /** * Instantiates a PointerInfoPre to store pointer related info to the onPrePointerObservable event. * @param type Defines the type of event (PointerEventTypes) * @param event Defines the related dom event * @param localX Defines the local x coordinates of the pointer when the event occured * @param localY Defines the local y coordinates of the pointer when the event occured */ constructor(e, t, i, r) { super(e, t), this.ray = null, this.originalPickingInfo = null, this.skipOnPointerObservable = !1, this.localPosition = new at(i, r); } } class cg extends uK { /** * Defines the picking info associated with this PointerInfo object (if applicable) */ get pickInfo() { return this._pickInfo || this._generatePickInfo(), this._pickInfo; } /** * Instantiates a PointerInfo to store pointer related info to the onPointerObservable event. * @param type Defines the type of event (PointerEventTypes) * @param event Defines the related dom event * @param pickInfo Defines the picking info associated to the info (if any) * @param inputManager Defines the InputManager to use if there is no pickInfo */ constructor(e, t, i, r = null) { super(e, t), this._pickInfo = i, this._inputManager = r; } /** * Generates the picking info if needed */ /** @internal */ _generatePickInfo() { this._inputManager && (this._pickInfo = this._inputManager._pickMove(this.event), this._inputManager._setRayOnPointerInfo(this._pickInfo, this.event), this._inputManager = null); } } class rx { } rx.KEYDOWN = 1; rx.KEYUP = 2; class rB { /** * Instantiates a new keyboard info. * This class is used to store keyboard related info for the onKeyboardObservable event. * @param type Defines the type of event (KeyboardEventTypes) * @param event Defines the related dom event */ constructor(e, t) { this.type = e, this.event = t; } } class lH extends rB { /** * Defines whether the engine should skip the next onKeyboardObservable associated to this pre. * @deprecated use skipOnKeyboardObservable property instead */ get skipOnPointerObservable() { return this.skipOnKeyboardObservable; } set skipOnPointerObservable(e) { this.skipOnKeyboardObservable = e; } /** * Instantiates a new keyboard pre info. * This class is used to store keyboard related info for the onPreKeyboardObservable event. * @param type Defines the type of event (KeyboardEventTypes) * @param event Defines the related dom event */ constructor(e, t) { super(e, t), this.type = e, this.event = t, this.skipOnKeyboardObservable = !1; } } var cr; (function(c) { c[c.Generic = 0] = "Generic", c[c.Keyboard = 1] = "Keyboard", c[c.Mouse = 2] = "Mouse", c[c.Touch = 3] = "Touch", c[c.DualShock = 4] = "DualShock", c[c.Xbox = 5] = "Xbox", c[c.Switch = 6] = "Switch", c[c.DualSense = 7] = "DualSense"; })(cr || (cr = {})); var Gr; (function(c) { c[c.Horizontal = 0] = "Horizontal", c[c.Vertical = 1] = "Vertical", c[c.LeftClick = 2] = "LeftClick", c[c.MiddleClick = 3] = "MiddleClick", c[c.RightClick = 4] = "RightClick", c[c.BrowserBack = 5] = "BrowserBack", c[c.BrowserForward = 6] = "BrowserForward", c[c.MouseWheelX = 7] = "MouseWheelX", c[c.MouseWheelY = 8] = "MouseWheelY", c[c.MouseWheelZ = 9] = "MouseWheelZ", c[c.Move = 12] = "Move"; })(Gr || (Gr = {})); var J9; (function(c) { c[c.Horizontal = 0] = "Horizontal", c[c.Vertical = 1] = "Vertical", c[c.LeftClick = 2] = "LeftClick", c[c.MiddleClick = 3] = "MiddleClick", c[c.RightClick = 4] = "RightClick", c[c.BrowserBack = 5] = "BrowserBack", c[c.BrowserForward = 6] = "BrowserForward", c[c.MouseWheelX = 7] = "MouseWheelX", c[c.MouseWheelY = 8] = "MouseWheelY", c[c.MouseWheelZ = 9] = "MouseWheelZ", c[c.DeltaHorizontal = 10] = "DeltaHorizontal", c[c.DeltaVertical = 11] = "DeltaVertical"; })(J9 || (J9 = {})); var cH; (function(c) { c[c.Cross = 0] = "Cross", c[c.Circle = 1] = "Circle", c[c.Square = 2] = "Square", c[c.Triangle = 3] = "Triangle", c[c.L1 = 4] = "L1", c[c.R1 = 5] = "R1", c[c.L2 = 6] = "L2", c[c.R2 = 7] = "R2", c[c.Share = 8] = "Share", c[c.Options = 9] = "Options", c[c.L3 = 10] = "L3", c[c.R3 = 11] = "R3", c[c.DPadUp = 12] = "DPadUp", c[c.DPadDown = 13] = "DPadDown", c[c.DPadLeft = 14] = "DPadLeft", c[c.DPadRight = 15] = "DPadRight", c[c.Home = 16] = "Home", c[c.TouchPad = 17] = "TouchPad", c[c.LStickXAxis = 18] = "LStickXAxis", c[c.LStickYAxis = 19] = "LStickYAxis", c[c.RStickXAxis = 20] = "RStickXAxis", c[c.RStickYAxis = 21] = "RStickYAxis"; })(cH || (cH = {})); var uH; (function(c) { c[c.Cross = 0] = "Cross", c[c.Circle = 1] = "Circle", c[c.Square = 2] = "Square", c[c.Triangle = 3] = "Triangle", c[c.L1 = 4] = "L1", c[c.R1 = 5] = "R1", c[c.L2 = 6] = "L2", c[c.R2 = 7] = "R2", c[c.Create = 8] = "Create", c[c.Options = 9] = "Options", c[c.L3 = 10] = "L3", c[c.R3 = 11] = "R3", c[c.DPadUp = 12] = "DPadUp", c[c.DPadDown = 13] = "DPadDown", c[c.DPadLeft = 14] = "DPadLeft", c[c.DPadRight = 15] = "DPadRight", c[c.Home = 16] = "Home", c[c.TouchPad = 17] = "TouchPad", c[c.LStickXAxis = 18] = "LStickXAxis", c[c.LStickYAxis = 19] = "LStickYAxis", c[c.RStickXAxis = 20] = "RStickXAxis", c[c.RStickYAxis = 21] = "RStickYAxis"; })(uH || (uH = {})); var hH; (function(c) { c[c.A = 0] = "A", c[c.B = 1] = "B", c[c.X = 2] = "X", c[c.Y = 3] = "Y", c[c.LB = 4] = "LB", c[c.RB = 5] = "RB", c[c.LT = 6] = "LT", c[c.RT = 7] = "RT", c[c.Back = 8] = "Back", c[c.Start = 9] = "Start", c[c.LS = 10] = "LS", c[c.RS = 11] = "RS", c[c.DPadUp = 12] = "DPadUp", c[c.DPadDown = 13] = "DPadDown", c[c.DPadLeft = 14] = "DPadLeft", c[c.DPadRight = 15] = "DPadRight", c[c.Home = 16] = "Home", c[c.LStickXAxis = 17] = "LStickXAxis", c[c.LStickYAxis = 18] = "LStickYAxis", c[c.RStickXAxis = 19] = "RStickXAxis", c[c.RStickYAxis = 20] = "RStickYAxis"; })(hH || (hH = {})); var dH; (function(c) { c[c.B = 0] = "B", c[c.A = 1] = "A", c[c.Y = 2] = "Y", c[c.X = 3] = "X", c[c.L = 4] = "L", c[c.R = 5] = "R", c[c.ZL = 6] = "ZL", c[c.ZR = 7] = "ZR", c[c.Minus = 8] = "Minus", c[c.Plus = 9] = "Plus", c[c.LS = 10] = "LS", c[c.RS = 11] = "RS", c[c.DPadUp = 12] = "DPadUp", c[c.DPadDown = 13] = "DPadDown", c[c.DPadLeft = 14] = "DPadLeft", c[c.DPadRight = 15] = "DPadRight", c[c.Home = 16] = "Home", c[c.Capture = 17] = "Capture", c[c.LStickXAxis = 18] = "LStickXAxis", c[c.LStickYAxis = 19] = "LStickYAxis", c[c.RStickXAxis = 20] = "RStickXAxis", c[c.RStickYAxis = 21] = "RStickYAxis"; })(dH || (dH = {})); var fH; (function(c) { c[c.PointerMove = 0] = "PointerMove", c[c.PointerDown = 1] = "PointerDown", c[c.PointerUp = 2] = "PointerUp"; })(fH || (fH = {})); class UI { } UI.DOM_DELTA_PIXEL = 0; UI.DOM_DELTA_LINE = 1; UI.DOM_DELTA_PAGE = 2; class kR { /** * Create device input events based on provided type and slot * * @param deviceType Type of device * @param deviceSlot "Slot" or index that device is referenced in * @param inputIndex Id of input to be checked * @param currentState Current value for given input * @param deviceInputSystem Reference to DeviceInputSystem * @param elementToAttachTo HTMLElement to reference as target for inputs * @returns IUIEvent object */ static CreateDeviceEvent(e, t, i, r, s, n, a) { switch (e) { case cr.Keyboard: return this._CreateKeyboardEvent(i, r, s, n); case cr.Mouse: if (i === Gr.MouseWheelX || i === Gr.MouseWheelY || i === Gr.MouseWheelZ) return this._CreateWheelEvent(e, t, i, r, s, n); case cr.Touch: return this._CreatePointerEvent(e, t, i, r, s, n, a); default: throw `Unable to generate event for device ${cr[e]}`; } } /** * Creates pointer event * * @param deviceType Type of device * @param deviceSlot "Slot" or index that device is referenced in * @param inputIndex Id of input to be checked * @param currentState Current value for given input * @param deviceInputSystem Reference to DeviceInputSystem * @param elementToAttachTo HTMLElement to reference as target for inputs * @returns IUIEvent object (Pointer) */ static _CreatePointerEvent(e, t, i, r, s, n, a) { const l = this._CreateMouseEvent(e, t, i, r, s, n); e === cr.Mouse ? (l.deviceType = cr.Mouse, l.pointerId = 1, l.pointerType = "mouse") : (l.deviceType = cr.Touch, l.pointerId = a ?? t, l.pointerType = "touch"); let o = 0; return o += s.pollInput(e, t, Gr.LeftClick), o += s.pollInput(e, t, Gr.RightClick) * 2, o += s.pollInput(e, t, Gr.MiddleClick) * 4, l.buttons = o, i === Gr.Move ? l.type = "pointermove" : i >= Gr.LeftClick && i <= Gr.RightClick && (l.type = r === 1 ? "pointerdown" : "pointerup", l.button = i - 2), l; } /** * Create Mouse Wheel Event * @param deviceType Type of device * @param deviceSlot "Slot" or index that device is referenced in * @param inputIndex Id of input to be checked * @param currentState Current value for given input * @param deviceInputSystem Reference to DeviceInputSystem * @param elementToAttachTo HTMLElement to reference as target for inputs * @returns IUIEvent object (Wheel) */ static _CreateWheelEvent(e, t, i, r, s, n) { const a = this._CreateMouseEvent(e, t, i, r, s, n); switch (a.pointerId = 1, a.type = "wheel", a.deltaMode = UI.DOM_DELTA_PIXEL, a.deltaX = 0, a.deltaY = 0, a.deltaZ = 0, i) { case Gr.MouseWheelX: a.deltaX = r; break; case Gr.MouseWheelY: a.deltaY = r; break; case Gr.MouseWheelZ: a.deltaZ = r; break; } return a; } /** * Create Mouse Event * @param deviceType Type of device * @param deviceSlot "Slot" or index that device is referenced in * @param inputIndex Id of input to be checked * @param currentState Current value for given input * @param deviceInputSystem Reference to DeviceInputSystem * @param elementToAttachTo HTMLElement to reference as target for inputs * @returns IUIEvent object (Mouse) */ static _CreateMouseEvent(e, t, i, r, s, n) { const a = this._CreateEvent(n), l = s.pollInput(e, t, Gr.Horizontal), o = s.pollInput(e, t, Gr.Vertical); return n ? (a.movementX = 0, a.movementY = 0, a.offsetX = a.movementX - n.getBoundingClientRect().x, a.offsetY = a.movementY - n.getBoundingClientRect().y) : (a.movementX = s.pollInput(e, t, J9.DeltaHorizontal), a.movementY = s.pollInput(e, t, J9.DeltaVertical), a.offsetX = 0, a.offsetY = 0), this._CheckNonCharacterKeys(a, s), a.clientX = l, a.clientY = o, a.x = l, a.y = o, a.deviceType = e, a.deviceSlot = t, a.inputIndex = i, a; } /** * Create Keyboard Event * @param inputIndex Id of input to be checked * @param currentState Current value for given input * @param deviceInputSystem Reference to DeviceInputSystem * @param elementToAttachTo HTMLElement to reference as target for inputs * @returns IEvent object (Keyboard) */ static _CreateKeyboardEvent(e, t, i, r) { const s = this._CreateEvent(r); return this._CheckNonCharacterKeys(s, i), s.deviceType = cr.Keyboard, s.deviceSlot = 0, s.inputIndex = e, s.type = t === 1 ? "keydown" : "keyup", s.key = String.fromCharCode(e), s.keyCode = e, s; } /** * Add parameters for non-character keys (Ctrl, Alt, Meta, Shift) * @param evt Event object to add parameters to * @param deviceInputSystem DeviceInputSystem to pull values from */ static _CheckNonCharacterKeys(e, t) { const i = t.isDeviceAvailable(cr.Keyboard), r = i && t.pollInput(cr.Keyboard, 0, 18) === 1, s = i && t.pollInput(cr.Keyboard, 0, 17) === 1, n = i && (t.pollInput(cr.Keyboard, 0, 91) === 1 || t.pollInput(cr.Keyboard, 0, 92) === 1 || t.pollInput(cr.Keyboard, 0, 93) === 1), a = i && t.pollInput(cr.Keyboard, 0, 16) === 1; e.altKey = r, e.ctrlKey = s, e.metaKey = n, e.shiftKey = a; } /** * Create base event object * @param elementToAttachTo Value to use as event target * @returns */ static _CreateEvent(e) { const t = {}; return t.preventDefault = () => { }, t.target = e, t; } } class ice { constructor(e, t, i) { this._nativeInput = _native.DeviceInputSystem ? new _native.DeviceInputSystem(e, t, (r, s, n, a) => { const l = kR.CreateDeviceEvent(r, s, n, a, this); i(r, s, l); }) : this._createDummyNativeInput(); } // Public functions /** * Checks for current device input value, given an id and input index. Throws exception if requested device not initialized. * @param deviceType Enum specifying device type * @param deviceSlot "Slot" or index that device is referenced in * @param inputIndex Id of input to be checked * @returns Current value of input */ pollInput(e, t, i) { return this._nativeInput.pollInput(e, t, i); } /** * Check for a specific device in the DeviceInputSystem * @param deviceType Type of device to check for * @returns bool with status of device's existence */ isDeviceAvailable(e) { return e === cr.Mouse || e === cr.Touch; } /** * Dispose of all the observables */ dispose() { this._nativeInput.dispose(); } /** * For versions of BabylonNative that don't have the NativeInput plugin initialized, create a dummy version * @returns Object with dummy functions */ _createDummyNativeInput() { return { pollInput: () => 0, isDeviceAvailable: () => !1, dispose: () => { } }; } } const TZ = 255, SZ = Object.keys(Gr).length / 2; class rce { /** * Constructor for the WebDeviceInputSystem * @param engine Engine to reference * @param onDeviceConnected Callback to execute when device is connected * @param onDeviceDisconnected Callback to execute when device is disconnected * @param onInputChanged Callback to execute when input changes on device */ constructor(e, t, i, r) { this._inputs = [], this._keyboardActive = !1, this._pointerActive = !1, this._usingSafari = Ve.IsSafari(), this._usingMacOS = GR() && /(Mac|iPhone|iPod|iPad)/i.test(navigator.platform), this._keyboardDownEvent = (s) => { }, this._keyboardUpEvent = (s) => { }, this._keyboardBlurEvent = (s) => { }, this._pointerMoveEvent = (s) => { }, this._pointerDownEvent = (s) => { }, this._pointerUpEvent = (s) => { }, this._pointerCancelEvent = (s) => { }, this._pointerWheelEvent = (s) => { }, this._pointerBlurEvent = (s) => { }, this._pointerMacOSChromeOutEvent = (s) => { }, this._eventsAttached = !1, this._mouseId = -1, this._isUsingFirefox = GR() && navigator.userAgent && navigator.userAgent.indexOf("Firefox") !== -1, this._isUsingChromium = GR() && navigator.userAgent && navigator.userAgent.indexOf("Chrome") !== -1, this._maxTouchPoints = 0, this._pointerInputClearObserver = null, this._gamepadConnectedEvent = (s) => { }, this._gamepadDisconnectedEvent = (s) => { }, this._eventPrefix = Ve.GetPointerPrefix(e), this._engine = e, this._onDeviceConnected = t, this._onDeviceDisconnected = i, this._onInputChanged = r, this._mouseId = this._isUsingFirefox ? 0 : 1, this._enableEvents(), this._usingMacOS && (this._metaKeys = []), this._engine._onEngineViewChanged || (this._engine._onEngineViewChanged = () => { this._enableEvents(); }); } // Public functions /** * Checks for current device input value, given an id and input index. Throws exception if requested device not initialized. * @param deviceType Enum specifying device type * @param deviceSlot "Slot" or index that device is referenced in * @param inputIndex Id of input to be checked * @returns Current value of input */ pollInput(e, t, i) { const r = this._inputs[e][t]; if (!r) throw `Unable to find device ${cr[e]}`; e >= cr.DualShock && e <= cr.DualSense && this._updateDevice(e, t, i); const s = r[i]; if (s === void 0) throw `Unable to find input ${i} for device ${cr[e]} in slot ${t}`; return i === Gr.Move && Ve.Warn("Unable to provide information for PointerInput.Move. Try using PointerInput.Horizontal or PointerInput.Vertical for move data."), s; } /** * Check for a specific device in the DeviceInputSystem * @param deviceType Type of device to check for * @returns bool with status of device's existence */ isDeviceAvailable(e) { return this._inputs[e] !== void 0; } /** * Dispose of all the eventlisteners */ dispose() { this._onDeviceConnected = () => { }, this._onDeviceDisconnected = () => { }, this._onInputChanged = () => { }, delete this._engine._onEngineViewChanged, this._elementToAttachTo && this._disableEvents(); } /** * Enable listening for user input events */ _enableEvents() { const e = this === null || this === void 0 ? void 0 : this._engine.getInputElement(); if (e && (!this._eventsAttached || this._elementToAttachTo !== e)) { if (this._disableEvents(), this._inputs) { for (const t of this._inputs) if (t) for (const i in t) { const r = +i, s = t[r]; if (s) for (let n = 0; n < s.length; n++) s[n] = 0; } } this._elementToAttachTo = e, this._elementToAttachTo.tabIndex = this._elementToAttachTo.tabIndex !== -1 ? this._elementToAttachTo.tabIndex : this._engine.canvasTabIndex, this._handleKeyActions(), this._handlePointerActions(), this._handleGamepadActions(), this._eventsAttached = !0, this._checkForConnectedDevices(); } } /** * Disable listening for user input events */ _disableEvents() { this._elementToAttachTo && (this._elementToAttachTo.removeEventListener("blur", this._keyboardBlurEvent), this._elementToAttachTo.removeEventListener("blur", this._pointerBlurEvent), this._elementToAttachTo.removeEventListener("keydown", this._keyboardDownEvent), this._elementToAttachTo.removeEventListener("keyup", this._keyboardUpEvent), this._elementToAttachTo.removeEventListener(this._eventPrefix + "move", this._pointerMoveEvent), this._elementToAttachTo.removeEventListener(this._eventPrefix + "down", this._pointerDownEvent), this._elementToAttachTo.removeEventListener(this._eventPrefix + "up", this._pointerUpEvent), this._elementToAttachTo.removeEventListener(this._eventPrefix + "cancel", this._pointerCancelEvent), this._elementToAttachTo.removeEventListener(this._wheelEventName, this._pointerWheelEvent), this._usingMacOS && this._isUsingChromium && this._elementToAttachTo.removeEventListener("lostpointercapture", this._pointerMacOSChromeOutEvent), window.removeEventListener("gamepadconnected", this._gamepadConnectedEvent), window.removeEventListener("gamepaddisconnected", this._gamepadDisconnectedEvent)), this._pointerInputClearObserver && this._engine.onEndFrameObservable.remove(this._pointerInputClearObserver), this._eventsAttached = !1; } /** * Checks for existing connections to devices and register them, if necessary * Currently handles gamepads and mouse */ _checkForConnectedDevices() { if (navigator.getGamepads) { const e = navigator.getGamepads(); for (const t of e) t && this._addGamePad(t); } typeof matchMedia == "function" && matchMedia("(pointer:fine)").matches && this._addPointerDevice(cr.Mouse, 0, 0, 0); } // Private functions /** * Add a gamepad to the DeviceInputSystem * @param gamepad A single DOM Gamepad object */ _addGamePad(e) { const t = this._getGamepadDeviceType(e.id), i = e.index; this._gamepads = this._gamepads || new Array(e.index + 1), this._registerDevice(t, i, e.buttons.length + e.axes.length), this._gamepads[i] = t; } /** * Add pointer device to DeviceInputSystem * @param deviceType Type of Pointer to add * @param deviceSlot Pointer ID (0 for mouse, pointerId for Touch) * @param currentX Current X at point of adding * @param currentY Current Y at point of adding */ _addPointerDevice(e, t, i, r) { this._pointerActive || (this._pointerActive = !0), this._registerDevice(e, t, SZ); const s = this._inputs[e][t]; s[0] = i, s[1] = r; } /** * Add device and inputs to device array * @param deviceType Enum specifying device type * @param deviceSlot "Slot" or index that device is referenced in * @param numberOfInputs Number of input entries to create for given device */ _registerDevice(e, t, i) { if (t === void 0) throw `Unable to register device ${cr[e]} to undefined slot.`; if (this._inputs[e] || (this._inputs[e] = {}), !this._inputs[e][t]) { const r = new Array(i); r.fill(0), this._inputs[e][t] = r, this._onDeviceConnected(e, t); } } /** * Given a specific device name, remove that device from the device map * @param deviceType Enum specifying device type * @param deviceSlot "Slot" or index that device is referenced in */ _unregisterDevice(e, t) { this._inputs[e][t] && (delete this._inputs[e][t], this._onDeviceDisconnected(e, t)); } /** * Handle all actions that come from keyboard interaction */ _handleKeyActions() { this._keyboardDownEvent = (e) => { this._keyboardActive || (this._keyboardActive = !0, this._registerDevice(cr.Keyboard, 0, TZ)); const t = this._inputs[cr.Keyboard][0]; if (t) { t[e.keyCode] = 1; const i = e; i.inputIndex = e.keyCode, this._usingMacOS && e.metaKey && e.key !== "Meta" && (this._metaKeys.includes(e.keyCode) || this._metaKeys.push(e.keyCode)), this._onInputChanged(cr.Keyboard, 0, i); } }, this._keyboardUpEvent = (e) => { this._keyboardActive || (this._keyboardActive = !0, this._registerDevice(cr.Keyboard, 0, TZ)); const t = this._inputs[cr.Keyboard][0]; if (t) { t[e.keyCode] = 0; const i = e; if (i.inputIndex = e.keyCode, this._usingMacOS && e.key === "Meta" && this._metaKeys.length > 0) { for (const r of this._metaKeys) { const s = kR.CreateDeviceEvent(cr.Keyboard, 0, r, 0, this, this._elementToAttachTo); t[r] = 0, this._onInputChanged(cr.Keyboard, 0, s); } this._metaKeys.splice(0, this._metaKeys.length); } this._onInputChanged(cr.Keyboard, 0, i); } }, this._keyboardBlurEvent = () => { if (this._keyboardActive) { const e = this._inputs[cr.Keyboard][0]; for (let t = 0; t < e.length; t++) if (e[t] !== 0) { e[t] = 0; const i = kR.CreateDeviceEvent(cr.Keyboard, 0, t, 0, this, this._elementToAttachTo); this._onInputChanged(cr.Keyboard, 0, i); } this._usingMacOS && this._metaKeys.splice(0, this._metaKeys.length); } }, this._elementToAttachTo.addEventListener("keydown", this._keyboardDownEvent), this._elementToAttachTo.addEventListener("keyup", this._keyboardUpEvent), this._elementToAttachTo.addEventListener("blur", this._keyboardBlurEvent); } /** * Handle all actions that come from pointer interaction */ _handlePointerActions() { this._maxTouchPoints = GR() && navigator.maxTouchPoints || 2, this._activeTouchIds || (this._activeTouchIds = new Array(this._maxTouchPoints)); for (let i = 0; i < this._maxTouchPoints; i++) this._activeTouchIds[i] = -1; this._pointerMoveEvent = (i) => { const r = this._getPointerType(i); let s = r === cr.Mouse ? 0 : this._activeTouchIds.indexOf(i.pointerId); if (r === cr.Touch && s === -1) { const a = this._activeTouchIds.indexOf(-1); if (a >= 0) s = a, this._activeTouchIds[a] = i.pointerId, this._onDeviceConnected(r, s); else { Ve.Warn(`Max number of touches exceeded. Ignoring touches in excess of ${this._maxTouchPoints}`); return; } } this._inputs[r] || (this._inputs[r] = {}), this._inputs[r][s] || this._addPointerDevice(r, s, i.clientX, i.clientY); const n = this._inputs[r][s]; if (n) { const a = i; a.inputIndex = Gr.Move, n[Gr.Horizontal] = i.clientX, n[Gr.Vertical] = i.clientY, r === cr.Touch && n[Gr.LeftClick] === 0 && (n[Gr.LeftClick] = 1), i.pointerId === void 0 && (i.pointerId = this._mouseId), this._onInputChanged(r, s, a), !this._usingSafari && i.button !== -1 && (a.inputIndex = i.button + 2, n[i.button + 2] = n[i.button + 2] ? 0 : 1, this._onInputChanged(r, s, a)); } }, this._pointerDownEvent = (i) => { const r = this._getPointerType(i); let s = r === cr.Mouse ? 0 : i.pointerId; if (r === cr.Touch) { const a = this._activeTouchIds.indexOf(-1); if (a >= 0) s = a, this._activeTouchIds[a] = i.pointerId; else { Ve.Warn(`Max number of touches exceeded. Ignoring touches in excess of ${this._maxTouchPoints}`); return; } } this._inputs[r] || (this._inputs[r] = {}), this._inputs[r][s] ? r === cr.Touch && this._onDeviceConnected(r, s) : this._addPointerDevice(r, s, i.clientX, i.clientY); const n = this._inputs[r][s]; if (n) { const a = n[Gr.Horizontal], l = n[Gr.Vertical]; if (r === cr.Mouse) { if (i.pointerId === void 0 && (i.pointerId = this._mouseId), !document.pointerLockElement) try { this._elementToAttachTo.setPointerCapture(this._mouseId); } catch { } } else if (i.pointerId && !document.pointerLockElement) try { this._elementToAttachTo.setPointerCapture(i.pointerId); } catch { } n[Gr.Horizontal] = i.clientX, n[Gr.Vertical] = i.clientY, n[i.button + 2] = 1; const o = i; o.inputIndex = i.button + 2, this._onInputChanged(r, s, o), (a !== i.clientX || l !== i.clientY) && (o.inputIndex = Gr.Move, this._onInputChanged(r, s, o)); } }, this._pointerUpEvent = (i) => { var r, s, n, a, l; const o = this._getPointerType(i), u = o === cr.Mouse ? 0 : this._activeTouchIds.indexOf(i.pointerId); if (o === cr.Touch) { if (u === -1) return; this._activeTouchIds[u] = -1; } const h = (r = this._inputs[o]) === null || r === void 0 ? void 0 : r[u]; if (h && h[i.button + 2] !== 0) { const d = h[Gr.Horizontal], f = h[Gr.Vertical]; h[Gr.Horizontal] = i.clientX, h[Gr.Vertical] = i.clientY, h[i.button + 2] = 0; const p = i; i.pointerId === void 0 && (i.pointerId = this._mouseId), (d !== i.clientX || f !== i.clientY) && (p.inputIndex = Gr.Move, this._onInputChanged(o, u, p)), p.inputIndex = i.button + 2, o === cr.Mouse && this._mouseId >= 0 && (!((n = (s = this._elementToAttachTo).hasPointerCapture) === null || n === void 0) && n.call(s, this._mouseId)) ? this._elementToAttachTo.releasePointerCapture(this._mouseId) : i.pointerId && (!((l = (a = this._elementToAttachTo).hasPointerCapture) === null || l === void 0) && l.call(a, i.pointerId)) && this._elementToAttachTo.releasePointerCapture(i.pointerId), this._onInputChanged(o, u, p), o === cr.Touch && this._onDeviceDisconnected(o, u); } }, this._pointerCancelEvent = (i) => { var r, s, n, a; if (i.pointerType === "mouse") { const l = this._inputs[cr.Mouse][0]; this._mouseId >= 0 && (!((s = (r = this._elementToAttachTo).hasPointerCapture) === null || s === void 0) && s.call(r, this._mouseId)) && this._elementToAttachTo.releasePointerCapture(this._mouseId); for (let o = Gr.LeftClick; o <= Gr.BrowserForward; o++) if (l[o] === 1) { l[o] = 0; const u = kR.CreateDeviceEvent(cr.Mouse, 0, o, 0, this, this._elementToAttachTo); this._onInputChanged(cr.Mouse, 0, u); } } else { const l = this._activeTouchIds.indexOf(i.pointerId); if (l === -1) return; !((a = (n = this._elementToAttachTo).hasPointerCapture) === null || a === void 0) && a.call(n, i.pointerId) && this._elementToAttachTo.releasePointerCapture(i.pointerId), this._inputs[cr.Touch][l][Gr.LeftClick] = 0; const o = kR.CreateDeviceEvent(cr.Touch, l, Gr.LeftClick, 0, this, this._elementToAttachTo, i.pointerId); this._onInputChanged(cr.Touch, l, o), this._activeTouchIds[l] = -1, this._onDeviceDisconnected(cr.Touch, l); } }, this._wheelEventName = "onwheel" in document.createElement("div") ? "wheel" : document.onmousewheel !== void 0 ? "mousewheel" : "DOMMouseScroll"; let e = !1; const t = function() { }; try { const i = Object.defineProperty({}, "passive", { get: function() { e = !0; } }); this._elementToAttachTo.addEventListener("test", t, i), this._elementToAttachTo.removeEventListener("test", t, i); } catch { } this._pointerBlurEvent = () => { var i, r, s, n, a; if (this.isDeviceAvailable(cr.Mouse)) { const l = this._inputs[cr.Mouse][0]; this._mouseId >= 0 && (!((r = (i = this._elementToAttachTo).hasPointerCapture) === null || r === void 0) && r.call(i, this._mouseId)) && this._elementToAttachTo.releasePointerCapture(this._mouseId); for (let o = Gr.LeftClick; o <= Gr.BrowserForward; o++) if (l[o] === 1) { l[o] = 0; const u = kR.CreateDeviceEvent(cr.Mouse, 0, o, 0, this, this._elementToAttachTo); this._onInputChanged(cr.Mouse, 0, u); } } if (this.isDeviceAvailable(cr.Touch)) { const l = this._inputs[cr.Touch]; for (let o = 0; o < this._activeTouchIds.length; o++) { const u = this._activeTouchIds[o]; if (!((n = (s = this._elementToAttachTo).hasPointerCapture) === null || n === void 0) && n.call(s, u) && this._elementToAttachTo.releasePointerCapture(u), u !== -1 && ((a = l[o]) === null || a === void 0 ? void 0 : a[Gr.LeftClick]) === 1) { l[o][Gr.LeftClick] = 0; const h = kR.CreateDeviceEvent(cr.Touch, o, Gr.LeftClick, 0, this, this._elementToAttachTo, u); this._onInputChanged(cr.Touch, o, h), this._activeTouchIds[o] = -1, this._onDeviceDisconnected(cr.Touch, o); } } } }, this._pointerWheelEvent = (i) => { const r = cr.Mouse, s = 0; this._inputs[r] || (this._inputs[r] = []), this._inputs[r][s] || (this._pointerActive = !0, this._registerDevice(r, s, SZ)); const n = this._inputs[r][s]; if (n) { n[Gr.MouseWheelX] = i.deltaX || 0, n[Gr.MouseWheelY] = i.deltaY || i.wheelDelta || 0, n[Gr.MouseWheelZ] = i.deltaZ || 0; const a = i; i.pointerId === void 0 && (i.pointerId = this._mouseId), n[Gr.MouseWheelX] !== 0 && (a.inputIndex = Gr.MouseWheelX, this._onInputChanged(r, s, a)), n[Gr.MouseWheelY] !== 0 && (a.inputIndex = Gr.MouseWheelY, this._onInputChanged(r, s, a)), n[Gr.MouseWheelZ] !== 0 && (a.inputIndex = Gr.MouseWheelZ, this._onInputChanged(r, s, a)); } }, this._usingMacOS && this._isUsingChromium && (this._pointerMacOSChromeOutEvent = (i) => { i.buttons > 1 && this._pointerCancelEvent(i); }, this._elementToAttachTo.addEventListener("lostpointercapture", this._pointerMacOSChromeOutEvent)), this._elementToAttachTo.addEventListener(this._eventPrefix + "move", this._pointerMoveEvent), this._elementToAttachTo.addEventListener(this._eventPrefix + "down", this._pointerDownEvent), this._elementToAttachTo.addEventListener(this._eventPrefix + "up", this._pointerUpEvent), this._elementToAttachTo.addEventListener(this._eventPrefix + "cancel", this._pointerCancelEvent), this._elementToAttachTo.addEventListener("blur", this._pointerBlurEvent), this._elementToAttachTo.addEventListener(this._wheelEventName, this._pointerWheelEvent, e ? { passive: !1 } : !1), this._pointerInputClearObserver = this._engine.onEndFrameObservable.add(() => { if (this.isDeviceAvailable(cr.Mouse)) { const i = this._inputs[cr.Mouse][0]; i[Gr.MouseWheelX] = 0, i[Gr.MouseWheelY] = 0, i[Gr.MouseWheelZ] = 0; } }); } /** * Handle all actions that come from gamepad interaction */ _handleGamepadActions() { this._gamepadConnectedEvent = (e) => { this._addGamePad(e.gamepad); }, this._gamepadDisconnectedEvent = (e) => { if (this._gamepads) { const t = this._getGamepadDeviceType(e.gamepad.id), i = e.gamepad.index; this._unregisterDevice(t, i), delete this._gamepads[i]; } }, window.addEventListener("gamepadconnected", this._gamepadConnectedEvent), window.addEventListener("gamepaddisconnected", this._gamepadDisconnectedEvent); } /** * Update all non-event based devices with each frame * @param deviceType Enum specifying device type * @param deviceSlot "Slot" or index that device is referenced in * @param inputIndex Id of input to be checked */ _updateDevice(e, t, i) { const r = navigator.getGamepads()[t]; if (r && e === this._gamepads[t]) { const s = this._inputs[e][t]; i >= r.buttons.length ? s[i] = r.axes[i - r.buttons.length].valueOf() : s[i] = r.buttons[i].value; } } /** * Gets DeviceType from the device name * @param deviceName Name of Device from DeviceInputSystem * @returns DeviceType enum value */ _getGamepadDeviceType(e) { return e.indexOf("054c") !== -1 ? e.indexOf("0ce6") !== -1 ? cr.DualSense : cr.DualShock : e.indexOf("Xbox One") !== -1 || e.search("Xbox 360") !== -1 || e.search("xinput") !== -1 ? cr.Xbox : e.indexOf("057e") !== -1 ? cr.Switch : cr.Generic; } /** * Get DeviceType from a given pointer/mouse/touch event. * @param evt PointerEvent to evaluate * @returns DeviceType interpreted from event */ _getPointerType(e) { let t = cr.Mouse; return (e.pointerType === "touch" || e.pointerType === "pen" || e.touches) && (t = cr.Touch), t; } } class pH { /** * Default Constructor * @param deviceInputSystem - Reference to DeviceInputSystem * @param deviceType - Type of device * @param deviceSlot - "Slot" or index that device is referenced in */ constructor(e, t, i = 0) { this.deviceType = t, this.deviceSlot = i, this.onInputChangedObservable = new Fe(), this._deviceInputSystem = e; } /** * Get input for specific input * @param inputIndex - index of specific input on device * @returns Input value from DeviceInputSystem */ getInput(e) { return this._deviceInputSystem.pollInput(this.deviceType, this.deviceSlot, e); } } class sce { constructor(e) { this._registeredManagers = new Array(), this._refCount = 0, this.registerManager = (n) => { for (let a = 0; a < this._devices.length; a++) { const l = this._devices[a]; for (const o in l) { const u = +o; n._addDevice(new pH(this._deviceInputSystem, a, u)); } } this._registeredManagers.push(n); }, this.unregisterManager = (n) => { const a = this._registeredManagers.indexOf(n); a > -1 && this._registeredManagers.splice(a, 1); }; const t = Object.keys(cr).length / 2; this._devices = new Array(t); const i = (n, a) => { this._devices[n] || (this._devices[n] = new Array()), this._devices[n][a] || (this._devices[n][a] = a); for (const l of this._registeredManagers) { const o = new pH(this._deviceInputSystem, n, a); l._addDevice(o); } }, r = (n, a) => { var l; !((l = this._devices[n]) === null || l === void 0) && l[a] && delete this._devices[n][a]; for (const o of this._registeredManagers) o._removeDevice(n, a); }, s = (n, a, l) => { if (l) for (const o of this._registeredManagers) o._onInputChanged(n, a, l); }; typeof _native < "u" ? this._deviceInputSystem = new ice(i, r, s) : this._deviceInputSystem = new rce(e, i, r, s); } dispose() { this._deviceInputSystem.dispose(); } } class mte { // Public Functions /** * Gets a DeviceSource, given a type and slot * @param deviceType - Type of Device * @param deviceSlot - Slot or ID of device * @returns DeviceSource */ getDeviceSource(e, t) { if (t === void 0) { if (this._firstDevice[e] === void 0) return null; t = this._firstDevice[e]; } return !this._devices[e] || this._devices[e][t] === void 0 ? null : this._devices[e][t]; } /** * Gets an array of DeviceSource objects for a given device type * @param deviceType - Type of Device * @returns All available DeviceSources of a given type */ getDeviceSources(e) { return this._devices[e] ? this._devices[e].filter((t) => !!t) : []; } /** * Default constructor * @param engine - Used to get canvas (if applicable) */ constructor(e) { const t = Object.keys(cr).length / 2; this._devices = new Array(t), this._firstDevice = new Array(t), this._engine = e, this._engine._deviceSourceManager || (this._engine._deviceSourceManager = new sce(e)), this._engine._deviceSourceManager._refCount++, this.onDeviceConnectedObservable = new Fe((i) => { for (const r of this._devices) if (r) for (const s of r) s && this.onDeviceConnectedObservable.notifyObserver(i, s); }), this.onDeviceDisconnectedObservable = new Fe(), this._engine._deviceSourceManager.registerManager(this), this._onDisposeObserver = e.onDisposeObservable.add(() => { this.dispose(); }); } /** * Dispose of DeviceSourceManager */ dispose() { this.onDeviceConnectedObservable.clear(), this.onDeviceDisconnectedObservable.clear(), this._engine._deviceSourceManager && (this._engine._deviceSourceManager.unregisterManager(this), --this._engine._deviceSourceManager._refCount < 1 && (this._engine._deviceSourceManager.dispose(), delete this._engine._deviceSourceManager)), this._engine.onDisposeObservable.remove(this._onDisposeObserver); } // Hidden Functions /** * @param deviceSource - Source to add * @internal */ _addDevice(e) { this._devices[e.deviceType] || (this._devices[e.deviceType] = new Array()), this._devices[e.deviceType][e.deviceSlot] || (this._devices[e.deviceType][e.deviceSlot] = e, this._updateFirstDevices(e.deviceType)), this.onDeviceConnectedObservable.notifyObservers(e); } /** * @param deviceType - DeviceType * @param deviceSlot - DeviceSlot * @internal */ _removeDevice(e, t) { var i, r; const s = (i = this._devices[e]) === null || i === void 0 ? void 0 : i[t]; this.onDeviceDisconnectedObservable.notifyObservers(s), !((r = this._devices[e]) === null || r === void 0) && r[t] && delete this._devices[e][t], this._updateFirstDevices(e); } /** * @param deviceType - DeviceType * @param deviceSlot - DeviceSlot * @param eventData - Event * @internal */ _onInputChanged(e, t, i) { var r, s; (s = (r = this._devices[e]) === null || r === void 0 ? void 0 : r[t]) === null || s === void 0 || s.onInputChangedObservable.notifyObservers(i); } // Private Functions _updateFirstDevices(e) { switch (e) { case cr.Keyboard: case cr.Mouse: this._firstDevice[e] = 0; break; case cr.Touch: case cr.DualSense: case cr.DualShock: case cr.Xbox: case cr.Switch: case cr.Generic: { delete this._firstDevice[e]; const t = this._devices[e]; if (t) { for (let i = 0; i < t.length; i++) if (t[i]) { this._firstDevice[e] = i; break; } } break; } } } } class MZ { constructor() { this._singleClick = !1, this._doubleClick = !1, this._hasSwiped = !1, this._ignore = !1; } get singleClick() { return this._singleClick; } get doubleClick() { return this._doubleClick; } get hasSwiped() { return this._hasSwiped; } get ignore() { return this._ignore; } set singleClick(e) { this._singleClick = e; } set doubleClick(e) { this._doubleClick = e; } set hasSwiped(e) { this._hasSwiped = e; } set ignore(e) { this._ignore = e; } } class Ac { /** * Creates a new InputManager * @param scene - defines the hosting scene */ constructor(e) { this._alreadyAttached = !1, this._meshPickProceed = !1, this._currentPickResult = null, this._previousPickResult = null, this._totalPointersPressed = 0, this._doubleClickOccured = !1, this._isSwiping = !1, this._swipeButtonPressed = -1, this._skipPointerTap = !1, this._isMultiTouchGesture = !1, this._pointerX = 0, this._pointerY = 0, this._startingPointerPosition = new at(0, 0), this._previousStartingPointerPosition = new at(0, 0), this._startingPointerTime = 0, this._previousStartingPointerTime = 0, this._pointerCaptures = {}, this._meshUnderPointerId = {}, this._movePointerInfo = null, this._cameraObserverCount = 0, this._delayedClicks = [null, null, null, null, null], this._deviceSourceManager = null, this._scene = e || gi.LastCreatedScene, this._scene; } /** * Gets the mesh that is currently under the pointer * @returns Mesh that the pointer is pointer is hovering over */ get meshUnderPointer() { return this._movePointerInfo && (this._movePointerInfo._generatePickInfo(), this._movePointerInfo = null), this._pointerOverMesh; } /** * When using more than one pointer (for example in XR) you can get the mesh under the specific pointer * @param pointerId - the pointer id to use * @returns The mesh under this pointer id or null if not found */ getMeshUnderPointerByPointerId(e) { return this._meshUnderPointerId[e] || null; } /** * Gets the pointer coordinates in 2D without any translation (ie. straight out of the pointer event) * @returns Vector with X/Y values directly from pointer event */ get unTranslatedPointer() { return new at(this._unTranslatedPointerX, this._unTranslatedPointerY); } /** * Gets or sets the current on-screen X position of the pointer * @returns Translated X with respect to screen */ get pointerX() { return this._pointerX; } set pointerX(e) { this._pointerX = e; } /** * Gets or sets the current on-screen Y position of the pointer * @returns Translated Y with respect to screen */ get pointerY() { return this._pointerY; } set pointerY(e) { this._pointerY = e; } _updatePointerPosition(e) { const t = this._scene.getEngine().getInputElementClientRect(); t && (this._pointerX = e.clientX - t.left, this._pointerY = e.clientY - t.top, this._unTranslatedPointerX = this._pointerX, this._unTranslatedPointerY = this._pointerY); } _processPointerMove(e, t) { const i = this._scene, r = i.getEngine(), s = r.getInputElement(); s && (s.tabIndex = r.canvasTabIndex, i.doNotHandleCursors || (s.style.cursor = i.defaultCursor)), this._setCursorAndPointerOverMesh(e, t, i); for (const l of i._pointerMoveStage) { e = e || this._pickMove(t); const o = !!(e != null && e.pickedMesh); e = l.action(this._unTranslatedPointerX, this._unTranslatedPointerY, e, o, s); } const n = t.inputIndex >= Gr.MouseWheelX && t.inputIndex <= Gr.MouseWheelZ ? si.POINTERWHEEL : si.POINTERMOVE; i.onPointerMove && (e = e || this._pickMove(t), i.onPointerMove(t, e, n)); let a; e ? (a = new cg(n, t, e), this._setRayOnPointerInfo(e, t)) : (a = new cg(n, t, null, this), this._movePointerInfo = a), i.onPointerObservable.hasObservers() && i.onPointerObservable.notifyObservers(a, n); } // Pointers handling /** @internal */ _setRayOnPointerInfo(e, t) { const i = this._scene; e && i._pickingAvailable && (e.ray || (e.ray = i.createPickingRay(t.offsetX, t.offsetY, Ae.Identity(), i.activeCamera))); } /** @internal */ _addCameraPointerObserver(e, t) { return this._cameraObserverCount++, this._scene.onPointerObservable.add(e, t); } /** @internal */ _removeCameraPointerObserver(e) { return this._cameraObserverCount--, this._scene.onPointerObservable.remove(e); } _checkForPicking() { return !!(this._scene.onPointerObservable.observers.length > this._cameraObserverCount || this._scene.onPointerPick); } _checkPrePointerObservable(e, t, i) { const r = this._scene, s = new _te(i, t, this._unTranslatedPointerX, this._unTranslatedPointerY); return e && (s.originalPickingInfo = e, s.ray = e.ray, e.originMesh && (s.nearInteractionPickingInfo = e)), r.onPrePointerObservable.notifyObservers(s, i), !!s.skipOnPointerObservable; } /** @internal */ _pickMove(e) { const t = this._scene, i = t.pick(this._unTranslatedPointerX, this._unTranslatedPointerY, t.pointerMovePredicate, t.pointerMoveFastCheck, t.cameraToUseForPointers, t.pointerMoveTrianglePredicate); return this._setCursorAndPointerOverMesh(i, e, t), i; } _setCursorAndPointerOverMesh(e, t, i) { const s = i.getEngine().getInputElement(); if (e != null && e.pickedMesh) { if (this.setPointerOverMesh(e.pickedMesh, t.pointerId, e, t), !i.doNotHandleCursors && s && this._pointerOverMesh) { const n = this._pointerOverMesh._getActionManagerForTrigger(); n && n.hasPointerTriggers && (s.style.cursor = n.hoverCursor || i.hoverCursor); } } else this.setPointerOverMesh(null, t.pointerId, e, t); } /** * Use this method to simulate a pointer move on a mesh * The pickResult parameter can be obtained from a scene.pick or scene.pickWithRay * @param pickResult - pickingInfo of the object wished to simulate pointer event on * @param pointerEventInit - pointer event state to be used when simulating the pointer event (eg. pointer id for multitouch) */ simulatePointerMove(e, t) { const i = new PointerEvent("pointermove", t); i.inputIndex = Gr.Move, !this._checkPrePointerObservable(e, i, si.POINTERMOVE) && this._processPointerMove(e, i); } /** * Use this method to simulate a pointer down on a mesh * The pickResult parameter can be obtained from a scene.pick or scene.pickWithRay * @param pickResult - pickingInfo of the object wished to simulate pointer event on * @param pointerEventInit - pointer event state to be used when simulating the pointer event (eg. pointer id for multitouch) */ simulatePointerDown(e, t) { const i = new PointerEvent("pointerdown", t); i.inputIndex = i.button + 2, !this._checkPrePointerObservable(e, i, si.POINTERDOWN) && this._processPointerDown(e, i); } _processPointerDown(e, t) { const i = this._scene; if (e != null && e.pickedMesh) { this._pickedDownMesh = e.pickedMesh; const n = e.pickedMesh._getActionManagerForTrigger(); if (n) { if (n.hasPickTriggers) switch (n.processTrigger(5, Ro.CreateNew(e.pickedMesh, t, e)), t.button) { case 0: n.processTrigger(2, Ro.CreateNew(e.pickedMesh, t, e)); break; case 1: n.processTrigger(4, Ro.CreateNew(e.pickedMesh, t, e)); break; case 2: n.processTrigger(3, Ro.CreateNew(e.pickedMesh, t, e)); break; } n.hasSpecificTrigger(8) && window.setTimeout(() => { const a = i.pick(this._unTranslatedPointerX, this._unTranslatedPointerY, (l) => l.isPickable && l.isVisible && l.isReady() && l.actionManager && l.actionManager.hasSpecificTrigger(8) && l === this._pickedDownMesh, !1, i.cameraToUseForPointers); a != null && a.pickedMesh && n && this._totalPointersPressed !== 0 && Date.now() - this._startingPointerTime > Ac.LongPressDelay && !this._isPointerSwiping() && (this._startingPointerTime = 0, n.processTrigger(8, Ro.CreateNew(a.pickedMesh, t))); }, Ac.LongPressDelay); } } else for (const n of i._pointerDownStage) e = n.action(this._unTranslatedPointerX, this._unTranslatedPointerY, e, t, !1); let r; const s = si.POINTERDOWN; e ? (i.onPointerDown && i.onPointerDown(t, e, s), r = new cg(s, t, e), this._setRayOnPointerInfo(e, t)) : r = new cg(s, t, null, this), i.onPointerObservable.hasObservers() && i.onPointerObservable.notifyObservers(r, s); } /** * @internal * @internals Boolean if delta for pointer exceeds drag movement threshold */ _isPointerSwiping() { return this._isSwiping; } /** * Use this method to simulate a pointer up on a mesh * The pickResult parameter can be obtained from a scene.pick or scene.pickWithRay * @param pickResult - pickingInfo of the object wished to simulate pointer event on * @param pointerEventInit - pointer event state to be used when simulating the pointer event (eg. pointer id for multitouch) * @param doubleTap - indicates that the pointer up event should be considered as part of a double click (false by default) */ simulatePointerUp(e, t, i) { const r = new PointerEvent("pointerup", t); r.inputIndex = Gr.Move; const s = new MZ(); i ? s.doubleClick = !0 : s.singleClick = !0, !this._checkPrePointerObservable(e, r, si.POINTERUP) && this._processPointerUp(e, r, s); } _processPointerUp(e, t, i) { const r = this._scene; if (e != null && e.pickedMesh) { if (this._pickedUpMesh = e.pickedMesh, this._pickedDownMesh === this._pickedUpMesh && (r.onPointerPick && r.onPointerPick(t, e), i.singleClick && !i.ignore && r.onPointerObservable.observers.length > this._cameraObserverCount)) { const n = si.POINTERPICK, a = new cg(n, t, e); this._setRayOnPointerInfo(e, t), r.onPointerObservable.notifyObservers(a, n); } const s = e.pickedMesh._getActionManagerForTrigger(); if (s && !i.ignore) { s.processTrigger(7, Ro.CreateNew(e.pickedMesh, t, e)), !i.hasSwiped && i.singleClick && s.processTrigger(1, Ro.CreateNew(e.pickedMesh, t, e)); const n = e.pickedMesh._getActionManagerForTrigger(6); i.doubleClick && n && n.processTrigger(6, Ro.CreateNew(e.pickedMesh, t, e)); } } else if (!i.ignore) for (const s of r._pointerUpStage) e = s.action(this._unTranslatedPointerX, this._unTranslatedPointerY, e, t, i.doubleClick); if (this._pickedDownMesh && this._pickedDownMesh !== this._pickedUpMesh) { const s = this._pickedDownMesh._getActionManagerForTrigger(16); s && s.processTrigger(16, Ro.CreateNew(this._pickedDownMesh, t)); } if (!i.ignore) { const s = new cg(si.POINTERUP, t, e); if (this._setRayOnPointerInfo(e, t), r.onPointerObservable.notifyObservers(s, si.POINTERUP), r.onPointerUp && r.onPointerUp(t, e, si.POINTERUP), !i.hasSwiped && !this._skipPointerTap && !this._isMultiTouchGesture) { let n = 0; if (i.singleClick ? n = si.POINTERTAP : i.doubleClick && (n = si.POINTERDOUBLETAP), n) { const a = new cg(n, t, e); r.onPointerObservable.hasObservers() && r.onPointerObservable.hasSpecificMask(n) && r.onPointerObservable.notifyObservers(a, n); } } } } /** * Gets a boolean indicating if the current pointer event is captured (meaning that the scene has already handled the pointer down) * @param pointerId - defines the pointer id to use in a multi-touch scenario (0 by default) * @returns true if the pointer was captured */ isPointerCaptured(e = 0) { return this._pointerCaptures[e]; } /** * Attach events to the canvas (To handle actionManagers triggers and raise onPointerMove, onPointerDown and onPointerUp * @param attachUp - defines if you want to attach events to pointerup * @param attachDown - defines if you want to attach events to pointerdown * @param attachMove - defines if you want to attach events to pointermove * @param elementToAttachTo - defines the target DOM element to attach to (will use the canvas by default) */ attachControl(e = !0, t = !0, i = !0, r = null) { const s = this._scene, n = s.getEngine(); r || (r = n.getInputElement()), this._alreadyAttached && this.detachControl(), r && (this._alreadyAttachedTo = r), this._deviceSourceManager = new mte(n), this._initActionManager = (a) => { if (!this._meshPickProceed) { const l = s.skipPointerUpPicking || s._registeredActions === 0 && !this._checkForPicking() && !s.onPointerUp ? null : s.pick(this._unTranslatedPointerX, this._unTranslatedPointerY, s.pointerUpPredicate, s.pointerUpFastCheck, s.cameraToUseForPointers); this._currentPickResult = l, l && (a = l.hit && l.pickedMesh ? l.pickedMesh._getActionManagerForTrigger() : null), this._meshPickProceed = !0; } return a; }, this._delayedSimpleClick = (a, l, o) => { if ((Date.now() - this._previousStartingPointerTime > Ac.DoubleClickDelay && !this._doubleClickOccured || a !== this._previousButtonPressed) && (this._doubleClickOccured = !1, l.singleClick = !0, l.ignore = !1, this._delayedClicks[a])) { const u = this._delayedClicks[a].evt, h = si.POINTERTAP, d = new cg(h, u, this._currentPickResult); s.onPointerObservable.hasObservers() && s.onPointerObservable.hasSpecificMask(h) && s.onPointerObservable.notifyObservers(d, h), this._delayedClicks[a] = null; } }, this._initClickEvent = (a, l, o, u) => { var h, d; const f = new MZ(); this._currentPickResult = null; let p = null, m = a.hasSpecificMask(si.POINTERPICK) || l.hasSpecificMask(si.POINTERPICK) || a.hasSpecificMask(si.POINTERTAP) || l.hasSpecificMask(si.POINTERTAP) || a.hasSpecificMask(si.POINTERDOUBLETAP) || l.hasSpecificMask(si.POINTERDOUBLETAP); !m && H_ && (p = this._initActionManager(p, f), p && (m = p.hasPickTriggers)); let _ = !1; if (m) { const v = o.button; if (f.hasSwiped = this._isPointerSwiping(), !f.hasSwiped) { let C = !Ac.ExclusiveDoubleClickMode; if (C || (C = !a.hasSpecificMask(si.POINTERDOUBLETAP) && !l.hasSpecificMask(si.POINTERDOUBLETAP), C && !H_.HasSpecificTrigger(6) && (p = this._initActionManager(p, f), p && (C = !p.hasSpecificTrigger(6)))), C) (Date.now() - this._previousStartingPointerTime > Ac.DoubleClickDelay || v !== this._previousButtonPressed) && (f.singleClick = !0, u(f, this._currentPickResult), _ = !0); else { const b = { evt: o, clickInfo: f, timeoutId: window.setTimeout(this._delayedSimpleClick.bind(this, v, f, u), Ac.DoubleClickDelay) }; this._delayedClicks[v] = b; } let x = a.hasSpecificMask(si.POINTERDOUBLETAP) || l.hasSpecificMask(si.POINTERDOUBLETAP); !x && H_.HasSpecificTrigger(6) && (p = this._initActionManager(p, f), p && (x = p.hasSpecificTrigger(6))), x && (v === this._previousButtonPressed && Date.now() - this._previousStartingPointerTime < Ac.DoubleClickDelay && !this._doubleClickOccured ? (!f.hasSwiped && !this._isPointerSwiping() ? (this._previousStartingPointerTime = 0, this._doubleClickOccured = !0, f.doubleClick = !0, f.ignore = !1, Ac.ExclusiveDoubleClickMode && this._delayedClicks[v] && (clearTimeout((h = this._delayedClicks[v]) === null || h === void 0 ? void 0 : h.timeoutId), this._delayedClicks[v] = null), u(f, this._currentPickResult)) : (this._doubleClickOccured = !1, this._previousStartingPointerTime = this._startingPointerTime, this._previousStartingPointerPosition.x = this._startingPointerPosition.x, this._previousStartingPointerPosition.y = this._startingPointerPosition.y, this._previousButtonPressed = v, Ac.ExclusiveDoubleClickMode ? (this._delayedClicks[v] && (clearTimeout((d = this._delayedClicks[v]) === null || d === void 0 ? void 0 : d.timeoutId), this._delayedClicks[v] = null), u(f, this._previousPickResult)) : u(f, this._currentPickResult)), _ = !0) : (this._doubleClickOccured = !1, this._previousStartingPointerTime = this._startingPointerTime, this._previousStartingPointerPosition.x = this._startingPointerPosition.x, this._previousStartingPointerPosition.y = this._startingPointerPosition.y, this._previousButtonPressed = v)); } } _ || u(f, this._currentPickResult); }, this._onPointerMove = (a) => { if (this._updatePointerPosition(a), !this._isSwiping && this._swipeButtonPressed !== -1 && (this._isSwiping = Math.abs(this._startingPointerPosition.x - this._pointerX) > Ac.DragMovementThreshold || Math.abs(this._startingPointerPosition.y - this._pointerY) > Ac.DragMovementThreshold), n.isPointerLock && n._verifyPointerLock(), this._checkPrePointerObservable(null, a, a.inputIndex >= Gr.MouseWheelX && a.inputIndex <= Gr.MouseWheelZ ? si.POINTERWHEEL : si.POINTERMOVE) || !s.cameraToUseForPointers && !s.activeCamera) return; if (s.skipPointerMovePicking) { this._processPointerMove(new ku(), a); return; } s.pointerMovePredicate || (s.pointerMovePredicate = (o) => o.isPickable && o.isVisible && o.isReady() && o.isEnabled() && (o.enablePointerMoveEvents || s.constantlyUpdateMeshUnderPointer || o._getActionManagerForTrigger() !== null) && (!s.cameraToUseForPointers || (s.cameraToUseForPointers.layerMask & o.layerMask) !== 0)); const l = s._registeredActions > 0 || s.constantlyUpdateMeshUnderPointer ? this._pickMove(a) : null; this._processPointerMove(l, a); }, this._onPointerDown = (a) => { var l; if (this._totalPointersPressed++, this._pickedDownMesh = null, this._meshPickProceed = !1, Ac.ExclusiveDoubleClickMode) { for (let u = 0; u < this._delayedClicks.length; u++) if (this._delayedClicks[u]) if (a.button === u) clearTimeout((l = this._delayedClicks[u]) === null || l === void 0 ? void 0 : l.timeoutId); else { const h = this._delayedClicks[u].clickInfo; this._doubleClickOccured = !1, h.singleClick = !0, h.ignore = !1; const d = this._delayedClicks[u].evt, f = si.POINTERTAP, p = new cg(f, d, this._currentPickResult); s.onPointerObservable.hasObservers() && s.onPointerObservable.hasSpecificMask(f) && s.onPointerObservable.notifyObservers(p, f), this._delayedClicks[u] = null; } } if (this._updatePointerPosition(a), this._swipeButtonPressed === -1 && (this._swipeButtonPressed = a.button), s.preventDefaultOnPointerDown && r && (a.preventDefault(), r.focus()), this._startingPointerPosition.x = this._pointerX, this._startingPointerPosition.y = this._pointerY, this._startingPointerTime = Date.now(), this._checkPrePointerObservable(null, a, si.POINTERDOWN) || !s.cameraToUseForPointers && !s.activeCamera) return; this._pointerCaptures[a.pointerId] = !0, s.pointerDownPredicate || (s.pointerDownPredicate = (u) => u.isPickable && u.isVisible && u.isReady() && u.isEnabled() && (!s.cameraToUseForPointers || (s.cameraToUseForPointers.layerMask & u.layerMask) !== 0)), this._pickedDownMesh = null; let o; s.skipPointerDownPicking || s._registeredActions === 0 && !this._checkForPicking() && !s.onPointerDown ? o = new ku() : o = s.pick(this._unTranslatedPointerX, this._unTranslatedPointerY, s.pointerDownPredicate, s.pointerDownFastCheck, s.cameraToUseForPointers), this._processPointerDown(o, a); }, this._onPointerUp = (a) => { this._totalPointersPressed !== 0 && (this._totalPointersPressed--, this._pickedUpMesh = null, this._meshPickProceed = !1, this._updatePointerPosition(a), s.preventDefaultOnPointerUp && r && (a.preventDefault(), r.focus()), this._initClickEvent(s.onPrePointerObservable, s.onPointerObservable, a, (l, o) => { if (s.onPrePointerObservable.hasObservers() && (this._skipPointerTap = !1, !l.ignore)) { if (this._checkPrePointerObservable(null, a, si.POINTERUP)) { this._swipeButtonPressed === a.button && (this._isSwiping = !1, this._swipeButtonPressed = -1), a.buttons === 0 && (this._pointerCaptures[a.pointerId] = !1); return; } l.hasSwiped || (l.singleClick && s.onPrePointerObservable.hasSpecificMask(si.POINTERTAP) && this._checkPrePointerObservable(null, a, si.POINTERTAP) && (this._skipPointerTap = !0), l.doubleClick && s.onPrePointerObservable.hasSpecificMask(si.POINTERDOUBLETAP) && this._checkPrePointerObservable(null, a, si.POINTERDOUBLETAP) && (this._skipPointerTap = !0)); } if (!this._pointerCaptures[a.pointerId]) { this._swipeButtonPressed === a.button && (this._isSwiping = !1, this._swipeButtonPressed = -1); return; } a.buttons === 0 && (this._pointerCaptures[a.pointerId] = !1), !(!s.cameraToUseForPointers && !s.activeCamera) && (s.pointerUpPredicate || (s.pointerUpPredicate = (u) => u.isPickable && u.isVisible && u.isReady() && u.isEnabled() && (!s.cameraToUseForPointers || (s.cameraToUseForPointers.layerMask & u.layerMask) !== 0)), !this._meshPickProceed && (H_ && H_.HasTriggers || this._checkForPicking() || s.onPointerUp) && this._initActionManager(null, l), o || (o = this._currentPickResult), this._processPointerUp(o, a, l), this._previousPickResult = this._currentPickResult, this._swipeButtonPressed === a.button && (this._isSwiping = !1, this._swipeButtonPressed = -1)); })); }, this._onKeyDown = (a) => { const l = rx.KEYDOWN; if (s.onPreKeyboardObservable.hasObservers()) { const o = new lH(l, a); if (s.onPreKeyboardObservable.notifyObservers(o, l), o.skipOnKeyboardObservable) return; } if (s.onKeyboardObservable.hasObservers()) { const o = new rB(l, a); s.onKeyboardObservable.notifyObservers(o, l); } s.actionManager && s.actionManager.processTrigger(14, Ro.CreateNewFromScene(s, a)); }, this._onKeyUp = (a) => { const l = rx.KEYUP; if (s.onPreKeyboardObservable.hasObservers()) { const o = new lH(l, a); if (s.onPreKeyboardObservable.notifyObservers(o, l), o.skipOnKeyboardObservable) return; } if (s.onKeyboardObservable.hasObservers()) { const o = new rB(l, a); s.onKeyboardObservable.notifyObservers(o, l); } s.actionManager && s.actionManager.processTrigger(15, Ro.CreateNewFromScene(s, a)); }, this._deviceSourceManager.onDeviceConnectedObservable.add((a) => { a.deviceType === cr.Mouse ? a.onInputChangedObservable.add((l) => { l.inputIndex === Gr.LeftClick || l.inputIndex === Gr.MiddleClick || l.inputIndex === Gr.RightClick || l.inputIndex === Gr.BrowserBack || l.inputIndex === Gr.BrowserForward ? t && a.getInput(l.inputIndex) === 1 ? this._onPointerDown(l) : e && a.getInput(l.inputIndex) === 0 && this._onPointerUp(l) : i && (l.inputIndex === Gr.Move ? this._onPointerMove(l) : (l.inputIndex === Gr.MouseWheelX || l.inputIndex === Gr.MouseWheelY || l.inputIndex === Gr.MouseWheelZ) && this._onPointerMove(l)); }) : a.deviceType === cr.Touch ? a.onInputChangedObservable.add((l) => { l.inputIndex === Gr.LeftClick && (t && a.getInput(l.inputIndex) === 1 ? (this._onPointerDown(l), this._totalPointersPressed > 1 && (this._isMultiTouchGesture = !0)) : e && a.getInput(l.inputIndex) === 0 && (this._onPointerUp(l), this._totalPointersPressed === 0 && (this._isMultiTouchGesture = !1))), i && l.inputIndex === Gr.Move && this._onPointerMove(l); }) : a.deviceType === cr.Keyboard && a.onInputChangedObservable.add((l) => { l.type === "keydown" ? this._onKeyDown(l) : l.type === "keyup" && this._onKeyUp(l); }); }), this._alreadyAttached = !0; } /** * Detaches all event handlers */ detachControl() { this._alreadyAttached && (this._deviceSourceManager.dispose(), this._deviceSourceManager = null, this._alreadyAttachedTo && !this._scene.doNotHandleCursors && (this._alreadyAttachedTo.style.cursor = this._scene.defaultCursor), this._alreadyAttached = !1, this._alreadyAttachedTo = null); } /** * Force the value of meshUnderPointer * @param mesh - defines the mesh to use * @param pointerId - optional pointer id when using more than one pointer. Defaults to 0 * @param pickResult - optional pickingInfo data used to find mesh * @param evt - optional pointer event */ setPointerOverMesh(e, t = 0, i, r) { if (this._meshUnderPointerId[t] === e && (!e || !e._internalAbstractMeshDataInfo._pointerOverDisableMeshTesting)) return; const s = this._meshUnderPointerId[t]; let n; s && (n = s._getActionManagerForTrigger(10), n && n.processTrigger(10, Ro.CreateNew(s, r, { pointerId: t }))), e ? (this._meshUnderPointerId[t] = e, this._pointerOverMesh = e, n = e._getActionManagerForTrigger(9), n && n.processTrigger(9, Ro.CreateNew(e, r, { pointerId: t, pickResult: i }))) : (delete this._meshUnderPointerId[t], this._pointerOverMesh = null); } /** * Gets the mesh under the pointer * @returns a Mesh or null if no mesh is under the pointer */ getPointerOverMesh() { return this.meshUnderPointer; } /** * @param mesh - Mesh to invalidate * @internal */ _invalidateMesh(e) { this._pointerOverMesh === e && (this._pointerOverMesh = null), this._pickedDownMesh === e && (this._pickedDownMesh = null), this._pickedUpMesh === e && (this._pickedUpMesh = null); for (const t in this._meshUnderPointerId) this._meshUnderPointerId[t] === e && delete this._meshUnderPointerId[t]; } } Ac.DragMovementThreshold = 10; Ac.LongPressDelay = 500; Ac.DoubleClickDelay = 300; Ac.ExclusiveDoubleClickMode = !1; class Vc { /** * Returns the smallest value ever */ get min() { return this._min; } /** * Returns the biggest value ever */ get max() { return this._max; } /** * Returns the average value since the performance counter is running */ get average() { return this._average; } /** * Returns the average value of the last second the counter was monitored */ get lastSecAverage() { return this._lastSecAverage; } /** * Returns the current value */ get current() { return this._current; } /** * Gets the accumulated total */ get total() { return this._totalAccumulated; } /** * Gets the total value count */ get count() { return this._totalValueCount; } /** * Creates a new counter */ constructor() { this._startMonitoringTime = 0, this._min = 0, this._max = 0, this._average = 0, this._lastSecAverage = 0, this._current = 0, this._totalValueCount = 0, this._totalAccumulated = 0, this._lastSecAccumulated = 0, this._lastSecTime = 0, this._lastSecValueCount = 0; } /** * Call this method to start monitoring a new frame. * This scenario is typically used when you accumulate monitoring time many times for a single frame, you call this method at the start of the frame, then beginMonitoring to start recording and endMonitoring(false) to accumulated the recorded time to the PerfCounter or addCount() to accumulate a monitored count. */ fetchNewFrame() { this._totalValueCount++, this._current = 0, this._lastSecValueCount++; } /** * Call this method to monitor a count of something (e.g. mesh drawn in viewport count) * @param newCount the count value to add to the monitored count * @param fetchResult true when it's the last time in the frame you add to the counter and you wish to update the statistics properties (min/max/average), false if you only want to update statistics. */ addCount(e, t) { Vc.Enabled && (this._current += e, t && this._fetchResult()); } /** * Start monitoring this performance counter */ beginMonitoring() { Vc.Enabled && (this._startMonitoringTime = Gs.Now); } /** * Compute the time lapsed since the previous beginMonitoring() call. * @param newFrame true by default to fetch the result and monitor a new frame, if false the time monitored will be added to the current frame counter */ endMonitoring(e = !0) { if (!Vc.Enabled) return; e && this.fetchNewFrame(); const t = Gs.Now; this._current = t - this._startMonitoringTime, e && this._fetchResult(); } /** * Call this method to end the monitoring of a frame. * This scenario is typically used when you accumulate monitoring time many times for a single frame, you call this method at the end of the frame, after beginMonitoring to start recording and endMonitoring(false) to accumulated the recorded time to the PerfCounter or addCount() to accumulate a monitored count. */ endFrame() { this._fetchResult(); } /** @internal */ _fetchResult() { this._totalAccumulated += this._current, this._lastSecAccumulated += this._current, this._min = Math.min(this._min, this._current), this._max = Math.max(this._max, this._current), this._average = this._totalAccumulated / this._totalValueCount; const e = Gs.Now; e - this._lastSecTime > 1e3 && (this._lastSecAverage = this._lastSecAccumulated / this._lastSecValueCount, this._lastSecTime = e, this._lastSecAccumulated = 0, this._lastSecValueCount = 0); } } Vc.Enabled = !0; class Sd { /** * Creates a Plane object according to the given floats a, b, c, d and the plane equation : ax + by + cz + d = 0 * @param a a component of the plane * @param b b component of the plane * @param c c component of the plane * @param d d component of the plane */ constructor(e, t, i, r) { this.normal = new D(e, t, i), this.d = r; } /** * @returns the plane coordinates as a new array of 4 elements [a, b, c, d]. */ asArray() { return [this.normal.x, this.normal.y, this.normal.z, this.d]; } // Methods /** * @returns a new plane copied from the current Plane. */ clone() { return new Sd(this.normal.x, this.normal.y, this.normal.z, this.d); } /** * @returns the string "Plane". */ getClassName() { return "Plane"; } /** * @returns the Plane hash code. */ getHashCode() { let e = this.normal.getHashCode(); return e = e * 397 ^ (this.d | 0), e; } /** * Normalize the current Plane in place. * @returns the updated Plane. */ normalize() { const e = Math.sqrt(this.normal.x * this.normal.x + this.normal.y * this.normal.y + this.normal.z * this.normal.z); let t = 0; return e !== 0 && (t = 1 / e), this.normal.x *= t, this.normal.y *= t, this.normal.z *= t, this.d *= t, this; } /** * Applies a transformation the plane and returns the result * @param transformation the transformation matrix to be applied to the plane * @returns a new Plane as the result of the transformation of the current Plane by the given matrix. */ transform(e) { const t = Sd._TmpMatrix; e.invertToRef(t); const i = t.m, r = this.normal.x, s = this.normal.y, n = this.normal.z, a = this.d, l = r * i[0] + s * i[1] + n * i[2] + a * i[3], o = r * i[4] + s * i[5] + n * i[6] + a * i[7], u = r * i[8] + s * i[9] + n * i[10] + a * i[11], h = r * i[12] + s * i[13] + n * i[14] + a * i[15]; return new Sd(l, o, u, h); } /** * Compute the dot product between the point and the plane normal * @param point point to calculate the dot product with * @returns the dot product (float) of the point coordinates and the plane normal. */ dotCoordinate(e) { return this.normal.x * e.x + this.normal.y * e.y + this.normal.z * e.z + this.d; } /** * Updates the current Plane from the plane defined by the three given points. * @param point1 one of the points used to construct the plane * @param point2 one of the points used to construct the plane * @param point3 one of the points used to construct the plane * @returns the updated Plane. */ copyFromPoints(e, t, i) { const r = t.x - e.x, s = t.y - e.y, n = t.z - e.z, a = i.x - e.x, l = i.y - e.y, o = i.z - e.z, u = s * o - n * l, h = n * a - r * o, d = r * l - s * a, f = Math.sqrt(u * u + h * h + d * d); let p; return f !== 0 ? p = 1 / f : p = 0, this.normal.x = u * p, this.normal.y = h * p, this.normal.z = d * p, this.d = -(this.normal.x * e.x + this.normal.y * e.y + this.normal.z * e.z), this; } /** * Checks if the plane is facing a given direction (meaning if the plane's normal is pointing in the opposite direction of the given vector). * Note that for this function to work as expected you should make sure that: * - direction and the plane normal are normalized * - epsilon is a number just bigger than -1, something like -0.99 for eg * @param direction the direction to check if the plane is facing * @param epsilon value the dot product is compared against (returns true if dot <= epsilon) * @returns True if the plane is facing the given direction */ isFrontFacingTo(e, t) { return D.Dot(this.normal, e) <= t; } /** * Calculates the distance to a point * @param point point to calculate distance to * @returns the signed distance (float) from the given point to the Plane. */ signedDistanceTo(e) { return D.Dot(e, this.normal) + this.d; } // Statics /** * Creates a plane from an array * @param array the array to create a plane from * @returns a new Plane from the given array. */ static FromArray(e) { return new Sd(e[0], e[1], e[2], e[3]); } /** * Creates a plane from three points * @param point1 point used to create the plane * @param point2 point used to create the plane * @param point3 point used to create the plane * @returns a new Plane defined by the three given points. */ static FromPoints(e, t, i) { const r = new Sd(0, 0, 0, 0); return r.copyFromPoints(e, t, i), r; } /** * Creates a plane from an origin point and a normal * @param origin origin of the plane to be constructed * @param normal normal of the plane to be constructed * @returns a new Plane the normal vector to this plane at the given origin point. */ static FromPositionAndNormal(e, t) { const i = new Sd(0, 0, 0, 0); return this.FromPositionAndNormalToRef(e, t, i); } /** * Updates the given Plane "result" from an origin point and a normal. * @param origin origin of the plane to be constructed * @param normal the normalized normals of the plane to be constructed * @param result defines the Plane where to store the result * @returns result input */ static FromPositionAndNormalToRef(e, t, i) { return i.normal.copyFrom(t), i.normal.normalize(), i.d = -e.dot(i.normal), i; } /** * Calculates the distance from a plane and a point * @param origin origin of the plane to be constructed * @param normal normal of the plane to be constructed * @param point point to calculate distance to * @returns the signed distance between the plane defined by the normal vector at the "origin"" point and the given other point. */ static SignedDistanceToPlaneFromPositionAndNormal(e, t, i) { const r = -(t.x * e.x + t.y * e.y + t.z * e.z); return D.Dot(i, t) + r; } } Sd._TmpMatrix = Ae.Identity(); class gm { /** * Gets the planes representing the frustum * @param transform matrix to be applied to the returned planes * @returns a new array of 6 Frustum planes computed by the given transformation matrix. */ static GetPlanes(e) { const t = []; for (let i = 0; i < 6; i++) t.push(new Sd(0, 0, 0, 0)); return gm.GetPlanesToRef(e, t), t; } /** * Gets the near frustum plane transformed by the transform matrix * @param transform transformation matrix to be applied to the resulting frustum plane * @param frustumPlane the resulting frustum plane */ static GetNearPlaneToRef(e, t) { const i = e.m; t.normal.x = i[3] + i[2], t.normal.y = i[7] + i[6], t.normal.z = i[11] + i[10], t.d = i[15] + i[14], t.normalize(); } /** * Gets the far frustum plane transformed by the transform matrix * @param transform transformation matrix to be applied to the resulting frustum plane * @param frustumPlane the resulting frustum plane */ static GetFarPlaneToRef(e, t) { const i = e.m; t.normal.x = i[3] - i[2], t.normal.y = i[7] - i[6], t.normal.z = i[11] - i[10], t.d = i[15] - i[14], t.normalize(); } /** * Gets the left frustum plane transformed by the transform matrix * @param transform transformation matrix to be applied to the resulting frustum plane * @param frustumPlane the resulting frustum plane */ static GetLeftPlaneToRef(e, t) { const i = e.m; t.normal.x = i[3] + i[0], t.normal.y = i[7] + i[4], t.normal.z = i[11] + i[8], t.d = i[15] + i[12], t.normalize(); } /** * Gets the right frustum plane transformed by the transform matrix * @param transform transformation matrix to be applied to the resulting frustum plane * @param frustumPlane the resulting frustum plane */ static GetRightPlaneToRef(e, t) { const i = e.m; t.normal.x = i[3] - i[0], t.normal.y = i[7] - i[4], t.normal.z = i[11] - i[8], t.d = i[15] - i[12], t.normalize(); } /** * Gets the top frustum plane transformed by the transform matrix * @param transform transformation matrix to be applied to the resulting frustum plane * @param frustumPlane the resulting frustum plane */ static GetTopPlaneToRef(e, t) { const i = e.m; t.normal.x = i[3] - i[1], t.normal.y = i[7] - i[5], t.normal.z = i[11] - i[9], t.d = i[15] - i[13], t.normalize(); } /** * Gets the bottom frustum plane transformed by the transform matrix * @param transform transformation matrix to be applied to the resulting frustum plane * @param frustumPlane the resulting frustum plane */ static GetBottomPlaneToRef(e, t) { const i = e.m; t.normal.x = i[3] + i[1], t.normal.y = i[7] + i[5], t.normal.z = i[11] + i[9], t.d = i[15] + i[13], t.normalize(); } /** * Sets the given array "frustumPlanes" with the 6 Frustum planes computed by the given transformation matrix. * @param transform transformation matrix to be applied to the resulting frustum planes * @param frustumPlanes the resulting frustum planes */ static GetPlanesToRef(e, t) { gm.GetNearPlaneToRef(e, t[0]), gm.GetFarPlaneToRef(e, t[1]), gm.GetLeftPlaneToRef(e, t[2]), gm.GetRightPlaneToRef(e, t[3]), gm.GetTopPlaneToRef(e, t[4]), gm.GetBottomPlaneToRef(e, t[5]); } /** * Tests if a point is located between the frustum planes. * @param point defines the point to test * @param frustumPlanes defines the frustum planes to test * @returns true if the point is located between the frustum planes */ static IsPointInFrustum(e, t) { for (let i = 0; i < 6; i++) if (t[i].dotCoordinate(e) < 0) return !1; return !0; } } class LL { /** * Gets an unique (relatively to the current scene) Id */ static get UniqueId() { const e = this._UniqueIdCounter; return this._UniqueIdCounter++, e; } } LL._UniqueIdCounter = 1; class ia { /** * Sort function to order lights for rendering. * @param a First Light object to compare to second. * @param b Second Light object to compare first. * @returns -1 to reduce's a's index relative to be, 0 for no change, 1 to increase a's index relative to b. */ static CompareLightsPriority(e, t) { return e.shadowEnabled !== t.shadowEnabled ? (t.shadowEnabled ? 1 : 0) - (e.shadowEnabled ? 1 : 0) : t.renderPriority - e.renderPriority; } } ia.FALLOFF_DEFAULT = 0; ia.FALLOFF_PHYSICAL = 1; ia.FALLOFF_GLTF = 2; ia.FALLOFF_STANDARD = 3; ia.LIGHTMAP_DEFAULT = 0; ia.LIGHTMAP_SPECULAR = 1; ia.LIGHTMAP_SHADOWSONLY = 2; ia.INTENSITYMODE_AUTOMATIC = 0; ia.INTENSITYMODE_LUMINOUSPOWER = 1; ia.INTENSITYMODE_LUMINOUSINTENSITY = 2; ia.INTENSITYMODE_ILLUMINANCE = 3; ia.INTENSITYMODE_LUMINANCE = 4; ia.LIGHTTYPEID_POINTLIGHT = 0; ia.LIGHTTYPEID_DIRECTIONALLIGHT = 1; ia.LIGHTTYPEID_SPOTLIGHT = 2; ia.LIGHTTYPEID_HEMISPHERICLIGHT = 3; class nce { constructor() { this.pointerDownFastCheck = !1, this.pointerUpFastCheck = !1, this.pointerMoveFastCheck = !1, this.skipPointerMovePicking = !1, this.skipPointerDownPicking = !1, this.skipPointerUpPicking = !1; } } var $A; (function(c) { c[c.BackwardCompatible = 0] = "BackwardCompatible", c[c.Intermediate = 1] = "Intermediate", c[c.Aggressive = 2] = "Aggressive"; })($A || ($A = {})); class ii extends Yl { /** * Factory used to create the default material. * @param scene The scene to create the material for * @returns The default material */ static DefaultMaterialFactory(e) { throw yr("StandardMaterial"); } /** * Factory used to create the a collision coordinator. * @returns The collision coordinator */ static CollisionCoordinatorFactory() { throw yr("DefaultCollisionCoordinator"); } /** * Texture used in all pbr material as the reflection texture. * As in the majority of the scene they are the same (exception for multi room and so on), * this is easier to reference from here than from all the materials. */ get environmentTexture() { return this._environmentTexture; } /** * Texture used in all pbr material as the reflection texture. * As in the majority of the scene they are the same (exception for multi room and so on), * this is easier to set here than in all the materials. */ set environmentTexture(e) { this._environmentTexture !== e && (this._environmentTexture = e, this.markAllMaterialsAsDirty(1)); } /** * Default image processing configuration used either in the rendering * Forward main pass or through the imageProcessingPostProcess if present. * As in the majority of the scene they are the same (exception for multi camera), * this is easier to reference from here than from all the materials and post process. * * No setter as we it is a shared configuration, you can set the values instead. */ get imageProcessingConfiguration() { return this._imageProcessingConfiguration; } /** * Gets or sets a value indicating how to treat performance relatively to ease of use and backward compatibility */ get performancePriority() { return this._performancePriority; } set performancePriority(e) { if (e !== this._performancePriority) { switch (this._performancePriority = e, e) { case $A.BackwardCompatible: this.skipFrustumClipping = !1, this._renderingManager.maintainStateBetweenFrames = !1, this.skipPointerMovePicking = !1, this.autoClear = !0; break; case $A.Intermediate: this.skipFrustumClipping = !1, this._renderingManager.maintainStateBetweenFrames = !1, this.skipPointerMovePicking = !0, this.autoClear = !1; break; case $A.Aggressive: this.skipFrustumClipping = !0, this._renderingManager.maintainStateBetweenFrames = !0, this.skipPointerMovePicking = !0, this.autoClear = !1; break; } this.onScenePerformancePriorityChangedObservable.notifyObservers(e); } } /** * Gets or sets a boolean indicating if all rendering must be done in wireframe */ set forceWireframe(e) { this._forceWireframe !== e && (this._forceWireframe = e, this.markAllMaterialsAsDirty(16)); } get forceWireframe() { return this._forceWireframe; } /** * Gets or sets a boolean indicating if we should skip the frustum clipping part of the active meshes selection */ set skipFrustumClipping(e) { this._skipFrustumClipping !== e && (this._skipFrustumClipping = e); } get skipFrustumClipping() { return this._skipFrustumClipping; } /** * Gets or sets a boolean indicating if all rendering must be done in point cloud */ set forcePointsCloud(e) { this._forcePointsCloud !== e && (this._forcePointsCloud = e, this.markAllMaterialsAsDirty(16)); } get forcePointsCloud() { return this._forcePointsCloud; } /** * Gets or sets the animation properties override */ get animationPropertiesOverride() { return this._animationPropertiesOverride; } set animationPropertiesOverride(e) { this._animationPropertiesOverride = e; } /** Sets a function to be executed when this scene is disposed. */ set onDispose(e) { this._onDisposeObserver && this.onDisposeObservable.remove(this._onDisposeObserver), this._onDisposeObserver = this.onDisposeObservable.add(e); } /** Sets a function to be executed before rendering this scene */ set beforeRender(e) { this._onBeforeRenderObserver && this.onBeforeRenderObservable.remove(this._onBeforeRenderObserver), e && (this._onBeforeRenderObserver = this.onBeforeRenderObservable.add(e)); } /** Sets a function to be executed after rendering this scene */ set afterRender(e) { this._onAfterRenderObserver && this.onAfterRenderObservable.remove(this._onAfterRenderObserver), e && (this._onAfterRenderObserver = this.onAfterRenderObservable.add(e)); } /** Sets a function to be executed before rendering a camera*/ set beforeCameraRender(e) { this._onBeforeCameraRenderObserver && this.onBeforeCameraRenderObservable.remove(this._onBeforeCameraRenderObserver), this._onBeforeCameraRenderObserver = this.onBeforeCameraRenderObservable.add(e); } /** Sets a function to be executed after rendering a camera*/ set afterCameraRender(e) { this._onAfterCameraRenderObserver && this.onAfterCameraRenderObservable.remove(this._onAfterCameraRenderObserver), this._onAfterCameraRenderObserver = this.onAfterCameraRenderObservable.add(e); } /** * Gets or sets a predicate used to select candidate meshes for a pointer down event */ get pointerDownPredicate() { return this._pointerPickingConfiguration.pointerDownPredicate; } set pointerDownPredicate(e) { this._pointerPickingConfiguration.pointerDownPredicate = e; } /** * Gets or sets a predicate used to select candidate meshes for a pointer up event */ get pointerUpPredicate() { return this._pointerPickingConfiguration.pointerUpPredicate; } set pointerUpPredicate(e) { this._pointerPickingConfiguration.pointerUpPredicate = e; } /** * Gets or sets a predicate used to select candidate meshes for a pointer move event */ get pointerMovePredicate() { return this._pointerPickingConfiguration.pointerMovePredicate; } set pointerMovePredicate(e) { this._pointerPickingConfiguration.pointerMovePredicate = e; } /** * Gets or sets a predicate used to select candidate meshes for a pointer down event */ get pointerDownFastCheck() { return this._pointerPickingConfiguration.pointerDownFastCheck; } set pointerDownFastCheck(e) { this._pointerPickingConfiguration.pointerDownFastCheck = e; } /** * Gets or sets a predicate used to select candidate meshes for a pointer up event */ get pointerUpFastCheck() { return this._pointerPickingConfiguration.pointerUpFastCheck; } set pointerUpFastCheck(e) { this._pointerPickingConfiguration.pointerUpFastCheck = e; } /** * Gets or sets a predicate used to select candidate meshes for a pointer move event */ get pointerMoveFastCheck() { return this._pointerPickingConfiguration.pointerMoveFastCheck; } set pointerMoveFastCheck(e) { this._pointerPickingConfiguration.pointerMoveFastCheck = e; } /** * Gets or sets a boolean indicating if the user want to entirely skip the picking phase when a pointer move event occurs. */ get skipPointerMovePicking() { return this._pointerPickingConfiguration.skipPointerMovePicking; } set skipPointerMovePicking(e) { this._pointerPickingConfiguration.skipPointerMovePicking = e; } /** * Gets or sets a boolean indicating if the user want to entirely skip the picking phase when a pointer down event occurs. */ get skipPointerDownPicking() { return this._pointerPickingConfiguration.skipPointerDownPicking; } set skipPointerDownPicking(e) { this._pointerPickingConfiguration.skipPointerDownPicking = e; } /** * Gets or sets a boolean indicating if the user want to entirely skip the picking phase when a pointer up event occurs. Off by default. */ get skipPointerUpPicking() { return this._pointerPickingConfiguration.skipPointerUpPicking; } set skipPointerUpPicking(e) { this._pointerPickingConfiguration.skipPointerUpPicking = e; } /** * Gets the pointer coordinates without any translation (ie. straight out of the pointer event) */ get unTranslatedPointer() { return this._inputManager.unTranslatedPointer; } /** * Gets or sets the distance in pixel that you have to move to prevent some events. Default is 10 pixels */ static get DragMovementThreshold() { return Ac.DragMovementThreshold; } static set DragMovementThreshold(e) { Ac.DragMovementThreshold = e; } /** * Time in milliseconds to wait to raise long press events if button is still pressed. Default is 500 ms */ static get LongPressDelay() { return Ac.LongPressDelay; } static set LongPressDelay(e) { Ac.LongPressDelay = e; } /** * Time in milliseconds to wait to raise long press events if button is still pressed. Default is 300 ms */ static get DoubleClickDelay() { return Ac.DoubleClickDelay; } static set DoubleClickDelay(e) { Ac.DoubleClickDelay = e; } /** If you need to check double click without raising a single click at first click, enable this flag */ static get ExclusiveDoubleClickMode() { return Ac.ExclusiveDoubleClickMode; } static set ExclusiveDoubleClickMode(e) { Ac.ExclusiveDoubleClickMode = e; } /** * Bind the current view position to an effect. * @param effect The effect to be bound * @param variableName name of the shader variable that will hold the eye position * @param isVector3 true to indicates that variableName is a Vector3 and not a Vector4 * @returns the computed eye position */ bindEyePosition(e, t = "vEyePosition", i = !1) { const r = this._forcedViewPosition ? this._forcedViewPosition : this._mirroredCameraPosition ? this._mirroredCameraPosition : this.activeCamera.globalPosition, s = this.useRightHandedSystem === (this._mirroredCameraPosition != null); return de.Vector4[0].set(r.x, r.y, r.z, s ? -1 : 1), e && (i ? e.setFloat3(t, de.Vector4[0].x, de.Vector4[0].y, de.Vector4[0].z) : e.setVector4(t, de.Vector4[0])), de.Vector4[0]; } /** * Update the scene ubo before it can be used in rendering processing * @returns the scene UniformBuffer */ finalizeSceneUbo() { const e = this.getSceneUniformBuffer(), t = this.bindEyePosition(null); return e.updateFloat4("vEyePosition", t.x, t.y, t.z, t.w), e.update(), e; } /** * Gets or sets a boolean indicating if the scene must use right-handed coordinates system */ set useRightHandedSystem(e) { this._useRightHandedSystem !== e && (this._useRightHandedSystem = e, this.markAllMaterialsAsDirty(16)); } get useRightHandedSystem() { return this._useRightHandedSystem; } /** * Sets the step Id used by deterministic lock step * @see https://doc.babylonjs.com/features/featuresDeepDive/animation/advanced_animations#deterministic-lockstep * @param newStepId defines the step Id */ setStepId(e) { this._currentStepId = e; } /** * Gets the step Id used by deterministic lock step * @see https://doc.babylonjs.com/features/featuresDeepDive/animation/advanced_animations#deterministic-lockstep * @returns the step Id */ getStepId() { return this._currentStepId; } /** * Gets the internal step used by deterministic lock step * @see https://doc.babylonjs.com/features/featuresDeepDive/animation/advanced_animations#deterministic-lockstep * @returns the internal step */ getInternalStep() { return this._currentInternalStep; } /** * Gets or sets a boolean indicating if fog is enabled on this scene * @see https://doc.babylonjs.com/features/featuresDeepDive/environment/environment_introduction#fog * (Default is true) */ set fogEnabled(e) { this._fogEnabled !== e && (this._fogEnabled = e, this.markAllMaterialsAsDirty(16)); } get fogEnabled() { return this._fogEnabled; } /** * Gets or sets the fog mode to use * @see https://doc.babylonjs.com/features/featuresDeepDive/environment/environment_introduction#fog * | mode | value | * | --- | --- | * | FOGMODE_NONE | 0 | * | FOGMODE_EXP | 1 | * | FOGMODE_EXP2 | 2 | * | FOGMODE_LINEAR | 3 | */ set fogMode(e) { this._fogMode !== e && (this._fogMode = e, this.markAllMaterialsAsDirty(16)); } get fogMode() { return this._fogMode; } /** * Flag indicating that the frame buffer binding is handled by another component */ get prePass() { return !!this.prePassRenderer && this.prePassRenderer.defaultRT.enabled; } /** * Gets or sets a boolean indicating if shadows are enabled on this scene */ set shadowsEnabled(e) { this._shadowsEnabled !== e && (this._shadowsEnabled = e, this.markAllMaterialsAsDirty(2)); } get shadowsEnabled() { return this._shadowsEnabled; } /** * Gets or sets a boolean indicating if lights are enabled on this scene */ set lightsEnabled(e) { this._lightsEnabled !== e && (this._lightsEnabled = e, this.markAllMaterialsAsDirty(2)); } get lightsEnabled() { return this._lightsEnabled; } /** All of the active cameras added to this scene. */ get activeCameras() { return this._activeCameras; } set activeCameras(e) { this._unObserveActiveCameras && (this._unObserveActiveCameras(), this._unObserveActiveCameras = null), e && (this._unObserveActiveCameras = Uee(e, () => { this.onActiveCamerasChanged.notifyObservers(this); })), this._activeCameras = e; } /** Gets or sets the current active camera */ get activeCamera() { return this._activeCamera; } set activeCamera(e) { e !== this._activeCamera && (this._activeCamera = e, this.onActiveCameraChanged.notifyObservers(this)); } /** The default material used on meshes when no material is affected */ get defaultMaterial() { return this._defaultMaterial || (this._defaultMaterial = ii.DefaultMaterialFactory(this)), this._defaultMaterial; } /** The default material used on meshes when no material is affected */ set defaultMaterial(e) { this._defaultMaterial = e; } /** * Gets or sets a boolean indicating if textures are enabled on this scene */ set texturesEnabled(e) { this._texturesEnabled !== e && (this._texturesEnabled = e, this.markAllMaterialsAsDirty(1)); } get texturesEnabled() { return this._texturesEnabled; } /** * Gets or sets a boolean indicating if skeletons are enabled on this scene */ set skeletonsEnabled(e) { this._skeletonsEnabled !== e && (this._skeletonsEnabled = e, this.markAllMaterialsAsDirty(8)); } get skeletonsEnabled() { return this._skeletonsEnabled; } /** @internal */ get collisionCoordinator() { return this._collisionCoordinator || (this._collisionCoordinator = ii.CollisionCoordinatorFactory(), this._collisionCoordinator.init(this)), this._collisionCoordinator; } /** * Gets the scene's rendering manager */ get renderingManager() { return this._renderingManager; } /** * Gets the list of frustum planes (built from the active camera) */ get frustumPlanes() { return this._frustumPlanes; } /** * Registers the transient components if needed. */ _registerTransientComponents() { if (this._transientComponents.length > 0) { for (const e of this._transientComponents) e.register(); this._transientComponents.length = 0; } } /** * @internal * Add a component to the scene. * Note that the ccomponent could be registered on th next frame if this is called after * the register component stage. * @param component Defines the component to add to the scene */ _addComponent(e) { this._components.push(e), this._transientComponents.push(e); const t = e; t.addFromContainer && t.serialize && this._serializableComponents.push(t); } /** * @internal * Gets a component from the scene. * @param name defines the name of the component to retrieve * @returns the component or null if not present */ _getComponent(e) { for (const t of this._components) if (t.name === e) return t; return null; } /** * Creates a new Scene * @param engine defines the engine to use to render this scene * @param options defines the scene options */ constructor(e, t) { super(), this._inputManager = new Ac(this), this.cameraToUseForPointers = null, this._isScene = !0, this._blockEntityCollection = !1, this.autoClear = !0, this.autoClearDepthAndStencil = !0, this.clearColor = new Et(0.2, 0.2, 0.3, 1), this.ambientColor = new ze(0, 0, 0), this.environmentIntensity = 1, this._performancePriority = $A.BackwardCompatible, this.onScenePerformancePriorityChangedObservable = new Fe(), this._forceWireframe = !1, this._skipFrustumClipping = !1, this._forcePointsCloud = !1, this.animationsEnabled = !0, this._animationPropertiesOverride = null, this.useConstantAnimationDeltaTime = !1, this.constantlyUpdateMeshUnderPointer = !1, this.hoverCursor = "pointer", this.defaultCursor = "", this.doNotHandleCursors = !1, this.preventDefaultOnPointerDown = !0, this.preventDefaultOnPointerUp = !0, this.metadata = null, this.reservedDataStore = null, this.disableOfflineSupportExceptionRules = [], this.onDisposeObservable = new Fe(), this._onDisposeObserver = null, this.onBeforeRenderObservable = new Fe(), this._onBeforeRenderObserver = null, this.onAfterRenderObservable = new Fe(), this.onAfterRenderCameraObservable = new Fe(), this._onAfterRenderObserver = null, this.onBeforeAnimationsObservable = new Fe(), this.onAfterAnimationsObservable = new Fe(), this.onBeforeDrawPhaseObservable = new Fe(), this.onAfterDrawPhaseObservable = new Fe(), this.onReadyObservable = new Fe(), this.onBeforeCameraRenderObservable = new Fe(), this._onBeforeCameraRenderObserver = null, this.onAfterCameraRenderObservable = new Fe(), this._onAfterCameraRenderObserver = null, this.onBeforeActiveMeshesEvaluationObservable = new Fe(), this.onAfterActiveMeshesEvaluationObservable = new Fe(), this.onBeforeParticlesRenderingObservable = new Fe(), this.onAfterParticlesRenderingObservable = new Fe(), this.onDataLoadedObservable = new Fe(), this.onNewCameraAddedObservable = new Fe(), this.onCameraRemovedObservable = new Fe(), this.onNewLightAddedObservable = new Fe(), this.onLightRemovedObservable = new Fe(), this.onNewGeometryAddedObservable = new Fe(), this.onGeometryRemovedObservable = new Fe(), this.onNewTransformNodeAddedObservable = new Fe(), this.onTransformNodeRemovedObservable = new Fe(), this.onNewMeshAddedObservable = new Fe(), this.onMeshRemovedObservable = new Fe(), this.onNewSkeletonAddedObservable = new Fe(), this.onSkeletonRemovedObservable = new Fe(), this.onNewMaterialAddedObservable = new Fe(), this.onNewMultiMaterialAddedObservable = new Fe(), this.onMaterialRemovedObservable = new Fe(), this.onMultiMaterialRemovedObservable = new Fe(), this.onNewTextureAddedObservable = new Fe(), this.onTextureRemovedObservable = new Fe(), this.onBeforeRenderTargetsRenderObservable = new Fe(), this.onAfterRenderTargetsRenderObservable = new Fe(), this.onBeforeStepObservable = new Fe(), this.onAfterStepObservable = new Fe(), this.onActiveCameraChanged = new Fe(), this.onActiveCamerasChanged = new Fe(), this.onBeforeRenderingGroupObservable = new Fe(), this.onAfterRenderingGroupObservable = new Fe(), this.onMeshImportedObservable = new Fe(), this.onAnimationFileImportedObservable = new Fe(), this._registeredForLateAnimationBindings = new XE(256), this._pointerPickingConfiguration = new nce(), this.onPrePointerObservable = new Fe(), this.onPointerObservable = new Fe(), this.onPreKeyboardObservable = new Fe(), this.onKeyboardObservable = new Fe(), this._useRightHandedSystem = !1, this._timeAccumulator = 0, this._currentStepId = 0, this._currentInternalStep = 0, this._fogEnabled = !0, this._fogMode = ii.FOGMODE_NONE, this.fogColor = new ze(0.2, 0.2, 0.3), this.fogDensity = 0.1, this.fogStart = 0, this.fogEnd = 1e3, this.needsPreviousWorldMatrices = !1, this._shadowsEnabled = !0, this._lightsEnabled = !0, this._unObserveActiveCameras = null, this._texturesEnabled = !0, this.physicsEnabled = !0, this.particlesEnabled = !0, this.spritesEnabled = !0, this._skeletonsEnabled = !0, this.lensFlaresEnabled = !0, this.collisionsEnabled = !0, this.gravity = new D(0, -9.807, 0), this.postProcessesEnabled = !0, this.renderTargetsEnabled = !0, this.dumpNextRenderTargets = !1, this.customRenderTargets = [], this.importedMeshesFiles = [], this.probesEnabled = !0, this._meshesForIntersections = new XE(256), this.proceduralTexturesEnabled = !0, this._totalVertices = new Vc(), this._activeIndices = new Vc(), this._activeParticles = new Vc(), this._activeBones = new Vc(), this._animationTime = 0, this.animationTimeScale = 1, this._renderId = 0, this._frameId = 0, this._executeWhenReadyTimeoutId = null, this._intermediateRendering = !1, this._defaultFrameBufferCleared = !1, this._viewUpdateFlag = -1, this._projectionUpdateFlag = -1, this._toBeDisposed = new Array(256), this._activeRequests = new Array(), this._pendingData = new Array(), this._isDisposed = !1, this.dispatchAllSubMeshesOfActiveMeshes = !1, this._activeMeshes = new xc(256), this._processedMaterials = new xc(256), this._renderTargets = new XE(256), this._materialsRenderTargets = new XE(256), this._activeParticleSystems = new xc(256), this._activeSkeletons = new XE(32), this._softwareSkinnedMeshes = new XE(32), this._activeAnimatables = new Array(), this._transformMatrix = Ae.Zero(), this.requireLightSorting = !1, this._components = [], this._serializableComponents = [], this._transientComponents = [], this._beforeCameraUpdateStage = Kl.Create(), this._beforeClearStage = Kl.Create(), this._beforeRenderTargetClearStage = Kl.Create(), this._gatherRenderTargetsStage = Kl.Create(), this._gatherActiveCameraRenderTargetsStage = Kl.Create(), this._isReadyForMeshStage = Kl.Create(), this._beforeEvaluateActiveMeshStage = Kl.Create(), this._evaluateSubMeshStage = Kl.Create(), this._preActiveMeshStage = Kl.Create(), this._cameraDrawRenderTargetStage = Kl.Create(), this._beforeCameraDrawStage = Kl.Create(), this._beforeRenderTargetDrawStage = Kl.Create(), this._beforeRenderingGroupDrawStage = Kl.Create(), this._beforeRenderingMeshStage = Kl.Create(), this._afterRenderingMeshStage = Kl.Create(), this._afterRenderingGroupDrawStage = Kl.Create(), this._afterCameraDrawStage = Kl.Create(), this._afterCameraPostProcessStage = Kl.Create(), this._afterRenderTargetDrawStage = Kl.Create(), this._afterRenderTargetPostProcessStage = Kl.Create(), this._afterRenderStage = Kl.Create(), this._pointerMoveStage = Kl.Create(), this._pointerDownStage = Kl.Create(), this._pointerUpStage = Kl.Create(), this._geometriesByUniqueId = null, this._defaultMeshCandidates = { data: [], length: 0 }, this._defaultSubMeshCandidates = { data: [], length: 0 }, this._preventFreeActiveMeshesAndRenderingGroups = !1, this._activeMeshesFrozen = !1, this._activeMeshesFrozenButKeepClipping = !1, this._skipEvaluateActiveMeshesCompletely = !1, this._allowPostProcessClearColor = !0, this.getDeterministicFrameTime = () => this._engine.getTimeStep(), this._registeredActions = 0, this._blockMaterialDirtyMechanism = !1, this._perfCollector = null, this.activeCameras = []; const i = Object.assign({ useGeometryUniqueIdsMap: !0, useMaterialMeshMap: !0, useClonedMeshMap: !0, virtual: !1 }, t); e = this._engine = e || gi.LastCreatedEngine, i.virtual ? e._virtualScenes.push(this) : (gi._LastCreatedScene = this, e.scenes.push(this)), this._uid = null, this._renderingManager = new Zh(this), q9 && (this.postProcessManager = new q9(this)), cu() && this.attachControl(), this._createUbo(), Ds && (this._imageProcessingConfiguration = new Ds()), this.setDefaultCandidateProviders(), i.useGeometryUniqueIdsMap && (this._geometriesByUniqueId = {}), this.useMaterialMeshMap = i.useMaterialMeshMap, this.useClonedMeshMap = i.useClonedMeshMap, (!t || !t.virtual) && e.onNewSceneAddedObservable.notifyObservers(this); } /** * Gets a string identifying the name of the class * @returns "Scene" string */ getClassName() { return "Scene"; } /** * @internal */ _getDefaultMeshCandidates() { return this._defaultMeshCandidates.data = this.meshes, this._defaultMeshCandidates.length = this.meshes.length, this._defaultMeshCandidates; } /** * @internal */ _getDefaultSubMeshCandidates(e) { return this._defaultSubMeshCandidates.data = e.subMeshes, this._defaultSubMeshCandidates.length = e.subMeshes.length, this._defaultSubMeshCandidates; } /** * Sets the default candidate providers for the scene. * This sets the getActiveMeshCandidates, getActiveSubMeshCandidates, getIntersectingSubMeshCandidates * and getCollidingSubMeshCandidates to their default function */ setDefaultCandidateProviders() { this.getActiveMeshCandidates = () => this._getDefaultMeshCandidates(), this.getActiveSubMeshCandidates = (e) => this._getDefaultSubMeshCandidates(e), this.getIntersectingSubMeshCandidates = (e, t) => this._getDefaultSubMeshCandidates(e), this.getCollidingSubMeshCandidates = (e, t) => this._getDefaultSubMeshCandidates(e); } /** * Gets the mesh that is currently under the pointer */ get meshUnderPointer() { return this._inputManager.meshUnderPointer; } /** * Gets or sets the current on-screen X position of the pointer */ get pointerX() { return this._inputManager.pointerX; } set pointerX(e) { this._inputManager.pointerX = e; } /** * Gets or sets the current on-screen Y position of the pointer */ get pointerY() { return this._inputManager.pointerY; } set pointerY(e) { this._inputManager.pointerY = e; } /** * Gets the cached material (ie. the latest rendered one) * @returns the cached material */ getCachedMaterial() { return this._cachedMaterial; } /** * Gets the cached effect (ie. the latest rendered one) * @returns the cached effect */ getCachedEffect() { return this._cachedEffect; } /** * Gets the cached visibility state (ie. the latest rendered one) * @returns the cached visibility state */ getCachedVisibility() { return this._cachedVisibility; } /** * Gets a boolean indicating if the current material / effect / visibility must be bind again * @param material defines the current material * @param effect defines the current effect * @param visibility defines the current visibility state * @returns true if one parameter is not cached */ isCachedMaterialInvalid(e, t, i = 1) { return this._cachedEffect !== t || this._cachedMaterial !== e || this._cachedVisibility !== i; } /** * Gets the engine associated with the scene * @returns an Engine */ getEngine() { return this._engine; } /** * Gets the total number of vertices rendered per frame * @returns the total number of vertices rendered per frame */ getTotalVertices() { return this._totalVertices.current; } /** * Gets the performance counter for total vertices * @see https://doc.babylonjs.com/features/featuresDeepDive/scene/optimize_your_scene#instrumentation */ get totalVerticesPerfCounter() { return this._totalVertices; } /** * Gets the total number of active indices rendered per frame (You can deduce the number of rendered triangles by dividing this number by 3) * @returns the total number of active indices rendered per frame */ getActiveIndices() { return this._activeIndices.current; } /** * Gets the performance counter for active indices * @see https://doc.babylonjs.com/features/featuresDeepDive/scene/optimize_your_scene#instrumentation */ get totalActiveIndicesPerfCounter() { return this._activeIndices; } /** * Gets the total number of active particles rendered per frame * @returns the total number of active particles rendered per frame */ getActiveParticles() { return this._activeParticles.current; } /** * Gets the performance counter for active particles * @see https://doc.babylonjs.com/features/featuresDeepDive/scene/optimize_your_scene#instrumentation */ get activeParticlesPerfCounter() { return this._activeParticles; } /** * Gets the total number of active bones rendered per frame * @returns the total number of active bones rendered per frame */ getActiveBones() { return this._activeBones.current; } /** * Gets the performance counter for active bones * @see https://doc.babylonjs.com/features/featuresDeepDive/scene/optimize_your_scene#instrumentation */ get activeBonesPerfCounter() { return this._activeBones; } /** * Gets the array of active meshes * @returns an array of AbstractMesh */ getActiveMeshes() { return this._activeMeshes; } /** * Gets the animation ratio (which is 1.0 is the scene renders at 60fps and 2 if the scene renders at 30fps, etc.) * @returns a number */ getAnimationRatio() { return this._animationRatio !== void 0 ? this._animationRatio : 1; } /** * Gets an unique Id for the current render phase * @returns a number */ getRenderId() { return this._renderId; } /** * Gets an unique Id for the current frame * @returns a number */ getFrameId() { return this._frameId; } /** Call this function if you want to manually increment the render Id*/ incrementRenderId() { this._renderId++; } _createUbo() { this.setSceneUniformBuffer(this.createSceneUniformBuffer()); } /** * Use this method to simulate a pointer move on a mesh * The pickResult parameter can be obtained from a scene.pick or scene.pickWithRay * @param pickResult pickingInfo of the object wished to simulate pointer event on * @param pointerEventInit pointer event state to be used when simulating the pointer event (eg. pointer id for multitouch) * @returns the current scene */ simulatePointerMove(e, t) { return this._inputManager.simulatePointerMove(e, t), this; } /** * Use this method to simulate a pointer down on a mesh * The pickResult parameter can be obtained from a scene.pick or scene.pickWithRay * @param pickResult pickingInfo of the object wished to simulate pointer event on * @param pointerEventInit pointer event state to be used when simulating the pointer event (eg. pointer id for multitouch) * @returns the current scene */ simulatePointerDown(e, t) { return this._inputManager.simulatePointerDown(e, t), this; } /** * Use this method to simulate a pointer up on a mesh * The pickResult parameter can be obtained from a scene.pick or scene.pickWithRay * @param pickResult pickingInfo of the object wished to simulate pointer event on * @param pointerEventInit pointer event state to be used when simulating the pointer event (eg. pointer id for multitouch) * @param doubleTap indicates that the pointer up event should be considered as part of a double click (false by default) * @returns the current scene */ simulatePointerUp(e, t, i) { return this._inputManager.simulatePointerUp(e, t, i), this; } /** * Gets a boolean indicating if the current pointer event is captured (meaning that the scene has already handled the pointer down) * @param pointerId defines the pointer id to use in a multi-touch scenario (0 by default) * @returns true if the pointer was captured */ isPointerCaptured(e = 0) { return this._inputManager.isPointerCaptured(e); } /** * Attach events to the canvas (To handle actionManagers triggers and raise onPointerMove, onPointerDown and onPointerUp * @param attachUp defines if you want to attach events to pointerup * @param attachDown defines if you want to attach events to pointerdown * @param attachMove defines if you want to attach events to pointermove */ attachControl(e = !0, t = !0, i = !0) { this._inputManager.attachControl(e, t, i); } /** Detaches all event handlers*/ detachControl() { this._inputManager.detachControl(); } /** * This function will check if the scene can be rendered (textures are loaded, shaders are compiled) * Delay loaded resources are not taking in account * @param checkRenderTargets true to also check that the meshes rendered as part of a render target are ready (default: true) * @returns true if all required resources are ready */ isReady(e = !0) { var t, i, r; if (this._isDisposed) return !1; let s; const n = this.getEngine(), a = n.currentRenderPassId; n.currentRenderPassId = (i = (t = this.activeCamera) === null || t === void 0 ? void 0 : t.renderPassId) !== null && i !== void 0 ? i : a; let l = !0; for (this._pendingData.length > 0 && (l = !1), (r = this.prePassRenderer) === null || r === void 0 || r.update(), this.useOrderIndependentTransparency && this.depthPeelingRenderer && l && (l = this.depthPeelingRenderer.isReady()), e && (this._processedMaterials.reset(), this._materialsRenderTargets.reset()), s = 0; s < this.meshes.length; s++) { const o = this.meshes[s]; if (!o.subMeshes || o.subMeshes.length === 0) continue; if (!o.isReady(!0)) { l = !1; continue; } const u = o.hasThinInstances || o.getClassName() === "InstancedMesh" || o.getClassName() === "InstancedLinesMesh" || n.getCaps().instancedArrays && o.instances.length > 0; for (const d of this._isReadyForMeshStage) d.action(o, u) || (l = !1); if (!e) continue; const h = o.material || this.defaultMaterial; if (h) if (h._storeEffectOnSubMeshes) for (const d of o.subMeshes) { const f = d.getMaterial(); f && f.hasRenderTargetTextures && f.getRenderTargetTextures != null && this._processedMaterials.indexOf(f) === -1 && (this._processedMaterials.push(f), this._materialsRenderTargets.concatWithNoDuplicate(f.getRenderTargetTextures())); } else h.hasRenderTargetTextures && h.getRenderTargetTextures != null && this._processedMaterials.indexOf(h) === -1 && (this._processedMaterials.push(h), this._materialsRenderTargets.concatWithNoDuplicate(h.getRenderTargetTextures())); } if (e) for (s = 0; s < this._materialsRenderTargets.length; ++s) this._materialsRenderTargets.data[s].isReadyForRendering() || (l = !1); for (s = 0; s < this.geometries.length; s++) this.geometries[s].delayLoadState === 2 && (l = !1); if (this.activeCameras && this.activeCameras.length > 0) for (const o of this.activeCameras) o.isReady(!0) || (l = !1); else this.activeCamera && (this.activeCamera.isReady(!0) || (l = !1)); for (const o of this.particleSystems) o.isReady() || (l = !1); if (this.layers) for (const o of this.layers) o.isReady() || (l = !1); return n.areAllEffectsReady() || (l = !1), n.currentRenderPassId = a, l; } /** Resets all cached information relative to material (including effect and visibility) */ resetCachedMaterial() { this._cachedMaterial = null, this._cachedEffect = null, this._cachedVisibility = null; } /** * Registers a function to be called before every frame render * @param func defines the function to register */ registerBeforeRender(e) { this.onBeforeRenderObservable.add(e); } /** * Unregisters a function called before every frame render * @param func defines the function to unregister */ unregisterBeforeRender(e) { this.onBeforeRenderObservable.removeCallback(e); } /** * Registers a function to be called after every frame render * @param func defines the function to register */ registerAfterRender(e) { this.onAfterRenderObservable.add(e); } /** * Unregisters a function called after every frame render * @param func defines the function to unregister */ unregisterAfterRender(e) { this.onAfterRenderObservable.removeCallback(e); } _executeOnceBeforeRender(e) { const t = () => { e(), setTimeout(() => { this.unregisterBeforeRender(t); }); }; this.registerBeforeRender(t); } /** * The provided function will run before render once and will be disposed afterwards. * A timeout delay can be provided so that the function will be executed in N ms. * The timeout is using the browser's native setTimeout so time percision cannot be guaranteed. * @param func The function to be executed. * @param timeout optional delay in ms */ executeOnceBeforeRender(e, t) { t !== void 0 ? setTimeout(() => { this._executeOnceBeforeRender(e); }, t) : this._executeOnceBeforeRender(e); } /** * This function can help adding any object to the list of data awaited to be ready in order to check for a complete scene loading. * @param data defines the object to wait for */ addPendingData(e) { this._pendingData.push(e); } /** * Remove a pending data from the loading list which has previously been added with addPendingData. * @param data defines the object to remove from the pending list */ removePendingData(e) { const t = this.isLoading, i = this._pendingData.indexOf(e); i !== -1 && this._pendingData.splice(i, 1), t && !this.isLoading && this.onDataLoadedObservable.notifyObservers(this); } /** * Returns the number of items waiting to be loaded * @returns the number of items waiting to be loaded */ getWaitingItemsCount() { return this._pendingData.length; } /** * Returns a boolean indicating if the scene is still loading data */ get isLoading() { return this._pendingData.length > 0; } /** * Registers a function to be executed when the scene is ready * @param func - the function to be executed * @param checkRenderTargets true to also check that the meshes rendered as part of a render target are ready (default: false) */ executeWhenReady(e, t = !1) { this.onReadyObservable.addOnce(e), this._executeWhenReadyTimeoutId === null && this._checkIsReady(t); } /** * Returns a promise that resolves when the scene is ready * @param checkRenderTargets true to also check that the meshes rendered as part of a render target are ready (default: false) * @returns A promise that resolves when the scene is ready */ whenReadyAsync(e = !1) { return new Promise((t) => { this.executeWhenReady(() => { t(); }, e); }); } /** * @internal */ _checkIsReady(e = !1) { if (this._registerTransientComponents(), this.isReady(e)) { this.onReadyObservable.notifyObservers(this), this.onReadyObservable.clear(), this._executeWhenReadyTimeoutId = null; return; } if (this._isDisposed) { this.onReadyObservable.clear(), this._executeWhenReadyTimeoutId = null; return; } this._executeWhenReadyTimeoutId = setTimeout(() => { this.incrementRenderId(), this._checkIsReady(e); }, 100); } /** * Gets all animatable attached to the scene */ get animatables() { return this._activeAnimatables; } /** * Resets the last animation time frame. * Useful to override when animations start running when loading a scene for the first time. */ resetLastAnimationTimeFrame() { this._animationTimeLast = Gs.Now; } // Matrix /** * Gets the current view matrix * @returns a Matrix */ getViewMatrix() { return this._viewMatrix; } /** * Gets the current projection matrix * @returns a Matrix */ getProjectionMatrix() { return this._projectionMatrix; } /** * Gets the current transform matrix * @returns a Matrix made of View * Projection */ getTransformMatrix() { return this._transformMatrix; } /** * Sets the current transform matrix * @param viewL defines the View matrix to use * @param projectionL defines the Projection matrix to use * @param viewR defines the right View matrix to use (if provided) * @param projectionR defines the right Projection matrix to use (if provided) */ setTransformMatrix(e, t, i, r) { !i && !r && this._multiviewSceneUbo && (this._multiviewSceneUbo.dispose(), this._multiviewSceneUbo = null), !(this._viewUpdateFlag === e.updateFlag && this._projectionUpdateFlag === t.updateFlag) && (this._viewUpdateFlag = e.updateFlag, this._projectionUpdateFlag = t.updateFlag, this._viewMatrix = e, this._projectionMatrix = t, this._viewMatrix.multiplyToRef(this._projectionMatrix, this._transformMatrix), this._frustumPlanes ? gm.GetPlanesToRef(this._transformMatrix, this._frustumPlanes) : this._frustumPlanes = gm.GetPlanes(this._transformMatrix), this._multiviewSceneUbo && this._multiviewSceneUbo.useUbo ? this._updateMultiviewUbo(i, r) : this._sceneUbo.useUbo && (this._sceneUbo.updateMatrix("viewProjection", this._transformMatrix), this._sceneUbo.updateMatrix("view", this._viewMatrix), this._sceneUbo.updateMatrix("projection", this._projectionMatrix))); } /** * Gets the uniform buffer used to store scene data * @returns a UniformBuffer */ getSceneUniformBuffer() { return this._multiviewSceneUbo ? this._multiviewSceneUbo : this._sceneUbo; } /** * Creates a scene UBO * @param name name of the uniform buffer (optional, for debugging purpose only) * @returns a new ubo */ createSceneUniformBuffer(e) { const t = new Vi(this._engine, void 0, !1, e ?? "scene"); return t.addUniform("viewProjection", 16), t.addUniform("view", 16), t.addUniform("projection", 16), t.addUniform("vEyePosition", 4), t; } /** * Sets the scene ubo * @param ubo the ubo to set for the scene */ setSceneUniformBuffer(e) { this._sceneUbo = e, this._viewUpdateFlag = -1, this._projectionUpdateFlag = -1; } /** * Gets an unique (relatively to the current scene) Id * @returns an unique number for the scene */ getUniqueId() { return LL.UniqueId; } /** * Add a mesh to the list of scene's meshes * @param newMesh defines the mesh to add * @param recursive if all child meshes should also be added to the scene */ addMesh(e, t = !1) { this._blockEntityCollection || (this.meshes.push(e), e._resyncLightSources(), e.parent || e._addToSceneRootNodes(), this.onNewMeshAddedObservable.notifyObservers(e), t && e.getChildMeshes().forEach((i) => { this.addMesh(i); })); } /** * Remove a mesh for the list of scene's meshes * @param toRemove defines the mesh to remove * @param recursive if all child meshes should also be removed from the scene * @returns the index where the mesh was in the mesh list */ removeMesh(e, t = !1) { const i = this.meshes.indexOf(e); return i !== -1 && (this.meshes[i] = this.meshes[this.meshes.length - 1], this.meshes.pop(), e.parent || e._removeFromSceneRootNodes()), this._inputManager._invalidateMesh(e), this.onMeshRemovedObservable.notifyObservers(e), t && e.getChildMeshes().forEach((r) => { this.removeMesh(r); }), i; } /** * Add a transform node to the list of scene's transform nodes * @param newTransformNode defines the transform node to add */ addTransformNode(e) { this._blockEntityCollection || e.getScene() === this && e._indexInSceneTransformNodesArray !== -1 || (e._indexInSceneTransformNodesArray = this.transformNodes.length, this.transformNodes.push(e), e.parent || e._addToSceneRootNodes(), this.onNewTransformNodeAddedObservable.notifyObservers(e)); } /** * Remove a transform node for the list of scene's transform nodes * @param toRemove defines the transform node to remove * @returns the index where the transform node was in the transform node list */ removeTransformNode(e) { const t = e._indexInSceneTransformNodesArray; if (t !== -1) { if (t !== this.transformNodes.length - 1) { const i = this.transformNodes[this.transformNodes.length - 1]; this.transformNodes[t] = i, i._indexInSceneTransformNodesArray = t; } e._indexInSceneTransformNodesArray = -1, this.transformNodes.pop(), e.parent || e._removeFromSceneRootNodes(); } return this.onTransformNodeRemovedObservable.notifyObservers(e), t; } /** * Remove a skeleton for the list of scene's skeletons * @param toRemove defines the skeleton to remove * @returns the index where the skeleton was in the skeleton list */ removeSkeleton(e) { const t = this.skeletons.indexOf(e); return t !== -1 && (this.skeletons.splice(t, 1), this.onSkeletonRemovedObservable.notifyObservers(e), this._executeActiveContainerCleanup(this._activeSkeletons)), t; } /** * Remove a morph target for the list of scene's morph targets * @param toRemove defines the morph target to remove * @returns the index where the morph target was in the morph target list */ removeMorphTargetManager(e) { const t = this.morphTargetManagers.indexOf(e); return t !== -1 && this.morphTargetManagers.splice(t, 1), t; } /** * Remove a light for the list of scene's lights * @param toRemove defines the light to remove * @returns the index where the light was in the light list */ removeLight(e) { const t = this.lights.indexOf(e); if (t !== -1) { for (const i of this.meshes) i._removeLightSource(e, !1); this.lights.splice(t, 1), this.sortLightsByPriority(), e.parent || e._removeFromSceneRootNodes(); } return this.onLightRemovedObservable.notifyObservers(e), t; } /** * Remove a camera for the list of scene's cameras * @param toRemove defines the camera to remove * @returns the index where the camera was in the camera list */ removeCamera(e) { const t = this.cameras.indexOf(e); if (t !== -1 && (this.cameras.splice(t, 1), e.parent || e._removeFromSceneRootNodes()), this.activeCameras) { const i = this.activeCameras.indexOf(e); i !== -1 && this.activeCameras.splice(i, 1); } return this.activeCamera === e && (this.cameras.length > 0 ? this.activeCamera = this.cameras[0] : this.activeCamera = null), this.onCameraRemovedObservable.notifyObservers(e), t; } /** * Remove a particle system for the list of scene's particle systems * @param toRemove defines the particle system to remove * @returns the index where the particle system was in the particle system list */ removeParticleSystem(e) { const t = this.particleSystems.indexOf(e); return t !== -1 && (this.particleSystems.splice(t, 1), this._executeActiveContainerCleanup(this._activeParticleSystems)), t; } /** * Remove a animation for the list of scene's animations * @param toRemove defines the animation to remove * @returns the index where the animation was in the animation list */ removeAnimation(e) { const t = this.animations.indexOf(e); return t !== -1 && this.animations.splice(t, 1), t; } /** * Will stop the animation of the given target * @param target - the target * @param animationName - the name of the animation to stop (all animations will be stopped if both this and targetMask are empty) * @param targetMask - a function that determines if the animation should be stopped based on its target (all animations will be stopped if both this and animationName are empty) */ stopAnimation(e, t, i) { } /** * Removes the given animation group from this scene. * @param toRemove The animation group to remove * @returns The index of the removed animation group */ removeAnimationGroup(e) { const t = this.animationGroups.indexOf(e); return t !== -1 && this.animationGroups.splice(t, 1), t; } /** * Removes the given multi-material from this scene. * @param toRemove The multi-material to remove * @returns The index of the removed multi-material */ removeMultiMaterial(e) { const t = this.multiMaterials.indexOf(e); return t !== -1 && this.multiMaterials.splice(t, 1), this.onMultiMaterialRemovedObservable.notifyObservers(e), t; } /** * Removes the given material from this scene. * @param toRemove The material to remove * @returns The index of the removed material */ removeMaterial(e) { const t = e._indexInSceneMaterialArray; if (t !== -1 && t < this.materials.length) { if (t !== this.materials.length - 1) { const i = this.materials[this.materials.length - 1]; this.materials[t] = i, i._indexInSceneMaterialArray = t; } e._indexInSceneMaterialArray = -1, this.materials.pop(); } return this.onMaterialRemovedObservable.notifyObservers(e), t; } /** * Removes the given action manager from this scene. * @deprecated * @param toRemove The action manager to remove * @returns The index of the removed action manager */ removeActionManager(e) { const t = this.actionManagers.indexOf(e); return t !== -1 && this.actionManagers.splice(t, 1), t; } /** * Removes the given texture from this scene. * @param toRemove The texture to remove * @returns The index of the removed texture */ removeTexture(e) { const t = this.textures.indexOf(e); return t !== -1 && this.textures.splice(t, 1), this.onTextureRemovedObservable.notifyObservers(e), t; } /** * Adds the given light to this scene * @param newLight The light to add */ addLight(e) { if (!this._blockEntityCollection) { this.lights.push(e), this.sortLightsByPriority(), e.parent || e._addToSceneRootNodes(); for (const t of this.meshes) t.lightSources.indexOf(e) === -1 && (t.lightSources.push(e), t._resyncLightSources()); this.onNewLightAddedObservable.notifyObservers(e); } } /** * Sorts the list list based on light priorities */ sortLightsByPriority() { this.requireLightSorting && this.lights.sort(ia.CompareLightsPriority); } /** * Adds the given camera to this scene * @param newCamera The camera to add */ addCamera(e) { this._blockEntityCollection || (this.cameras.push(e), this.onNewCameraAddedObservable.notifyObservers(e), e.parent || e._addToSceneRootNodes()); } /** * Adds the given skeleton to this scene * @param newSkeleton The skeleton to add */ addSkeleton(e) { this._blockEntityCollection || (this.skeletons.push(e), this.onNewSkeletonAddedObservable.notifyObservers(e)); } /** * Adds the given particle system to this scene * @param newParticleSystem The particle system to add */ addParticleSystem(e) { this._blockEntityCollection || this.particleSystems.push(e); } /** * Adds the given animation to this scene * @param newAnimation The animation to add */ addAnimation(e) { this._blockEntityCollection || this.animations.push(e); } /** * Adds the given animation group to this scene. * @param newAnimationGroup The animation group to add */ addAnimationGroup(e) { this._blockEntityCollection || this.animationGroups.push(e); } /** * Adds the given multi-material to this scene * @param newMultiMaterial The multi-material to add */ addMultiMaterial(e) { this._blockEntityCollection || (this.multiMaterials.push(e), this.onNewMultiMaterialAddedObservable.notifyObservers(e)); } /** * Adds the given material to this scene * @param newMaterial The material to add */ addMaterial(e) { this._blockEntityCollection || e.getScene() === this && e._indexInSceneMaterialArray !== -1 || (e._indexInSceneMaterialArray = this.materials.length, this.materials.push(e), this.onNewMaterialAddedObservable.notifyObservers(e)); } /** * Adds the given morph target to this scene * @param newMorphTargetManager The morph target to add */ addMorphTargetManager(e) { this._blockEntityCollection || this.morphTargetManagers.push(e); } /** * Adds the given geometry to this scene * @param newGeometry The geometry to add */ addGeometry(e) { this._blockEntityCollection || (this._geometriesByUniqueId && (this._geometriesByUniqueId[e.uniqueId] = this.geometries.length), this.geometries.push(e)); } /** * Adds the given action manager to this scene * @deprecated * @param newActionManager The action manager to add */ addActionManager(e) { this.actionManagers.push(e); } /** * Adds the given texture to this scene. * @param newTexture The texture to add */ addTexture(e) { this._blockEntityCollection || (this.textures.push(e), this.onNewTextureAddedObservable.notifyObservers(e)); } /** * Switch active camera * @param newCamera defines the new active camera * @param attachControl defines if attachControl must be called for the new active camera (default: true) */ switchActiveCamera(e, t = !0) { this._engine.getInputElement() && (this.activeCamera && this.activeCamera.detachControl(), this.activeCamera = e, t && e.attachControl()); } /** * sets the active camera of the scene using its Id * @param id defines the camera's Id * @returns the new active camera or null if none found. */ setActiveCameraById(e) { const t = this.getCameraById(e); return t ? (this.activeCamera = t, t) : null; } /** * sets the active camera of the scene using its name * @param name defines the camera's name * @returns the new active camera or null if none found. */ setActiveCameraByName(e) { const t = this.getCameraByName(e); return t ? (this.activeCamera = t, t) : null; } /** * get an animation group using its name * @param name defines the material's name * @returns the animation group or null if none found. */ getAnimationGroupByName(e) { for (let t = 0; t < this.animationGroups.length; t++) if (this.animationGroups[t].name === e) return this.animationGroups[t]; return null; } _getMaterial(e, t) { for (let i = 0; i < this.materials.length; i++) { const r = this.materials[i]; if (t(r)) return r; } if (e) for (let i = 0; i < this.multiMaterials.length; i++) { const r = this.multiMaterials[i]; if (t(r)) return r; } return null; } /** * Get a material using its unique id * @param uniqueId defines the material's unique id * @param allowMultiMaterials determines whether multimaterials should be considered * @returns the material or null if none found. */ getMaterialByUniqueID(e, t = !1) { return this._getMaterial(t, (i) => i.uniqueId === e); } /** * get a material using its id * @param id defines the material's Id * @param allowMultiMaterials determines whether multimaterials should be considered * @returns the material or null if none found. */ getMaterialById(e, t = !1) { return this._getMaterial(t, (i) => i.id === e); } /** * Gets a material using its name * @param name defines the material's name * @param allowMultiMaterials determines whether multimaterials should be considered * @returns the material or null if none found. */ getMaterialByName(e, t = !1) { return this._getMaterial(t, (i) => i.name === e); } /** * Gets a last added material using a given id * @param id defines the material's id * @param allowMultiMaterials determines whether multimaterials should be considered * @returns the last material with the given id or null if none found. */ getLastMaterialById(e, t = !1) { for (let i = this.materials.length - 1; i >= 0; i--) if (this.materials[i].id === e) return this.materials[i]; if (t) { for (let i = this.multiMaterials.length - 1; i >= 0; i--) if (this.multiMaterials[i].id === e) return this.multiMaterials[i]; } return null; } /** * Get a texture using its unique id * @param uniqueId defines the texture's unique id * @returns the texture or null if none found. */ getTextureByUniqueId(e) { for (let t = 0; t < this.textures.length; t++) if (this.textures[t].uniqueId === e) return this.textures[t]; return null; } /** * Gets a texture using its name * @param name defines the texture's name * @returns the texture or null if none found. */ getTextureByName(e) { for (let t = 0; t < this.textures.length; t++) if (this.textures[t].name === e) return this.textures[t]; return null; } /** * Gets a camera using its Id * @param id defines the Id to look for * @returns the camera or null if not found */ getCameraById(e) { for (let t = 0; t < this.cameras.length; t++) if (this.cameras[t].id === e) return this.cameras[t]; return null; } /** * Gets a camera using its unique Id * @param uniqueId defines the unique Id to look for * @returns the camera or null if not found */ getCameraByUniqueId(e) { for (let t = 0; t < this.cameras.length; t++) if (this.cameras[t].uniqueId === e) return this.cameras[t]; return null; } /** * Gets a camera using its name * @param name defines the camera's name * @returns the camera or null if none found. */ getCameraByName(e) { for (let t = 0; t < this.cameras.length; t++) if (this.cameras[t].name === e) return this.cameras[t]; return null; } /** * Gets a bone using its Id * @param id defines the bone's Id * @returns the bone or null if not found */ getBoneById(e) { for (let t = 0; t < this.skeletons.length; t++) { const i = this.skeletons[t]; for (let r = 0; r < i.bones.length; r++) if (i.bones[r].id === e) return i.bones[r]; } return null; } /** * Gets a bone using its id * @param name defines the bone's name * @returns the bone or null if not found */ getBoneByName(e) { for (let t = 0; t < this.skeletons.length; t++) { const i = this.skeletons[t]; for (let r = 0; r < i.bones.length; r++) if (i.bones[r].name === e) return i.bones[r]; } return null; } /** * Gets a light node using its name * @param name defines the light's name * @returns the light or null if none found. */ getLightByName(e) { for (let t = 0; t < this.lights.length; t++) if (this.lights[t].name === e) return this.lights[t]; return null; } /** * Gets a light node using its Id * @param id defines the light's Id * @returns the light or null if none found. */ getLightById(e) { for (let t = 0; t < this.lights.length; t++) if (this.lights[t].id === e) return this.lights[t]; return null; } /** * Gets a light node using its scene-generated unique Id * @param uniqueId defines the light's unique Id * @returns the light or null if none found. */ getLightByUniqueId(e) { for (let t = 0; t < this.lights.length; t++) if (this.lights[t].uniqueId === e) return this.lights[t]; return null; } /** * Gets a particle system by Id * @param id defines the particle system Id * @returns the corresponding system or null if none found */ getParticleSystemById(e) { for (let t = 0; t < this.particleSystems.length; t++) if (this.particleSystems[t].id === e) return this.particleSystems[t]; return null; } /** * Gets a geometry using its Id * @param id defines the geometry's Id * @returns the geometry or null if none found. */ getGeometryById(e) { for (let t = 0; t < this.geometries.length; t++) if (this.geometries[t].id === e) return this.geometries[t]; return null; } _getGeometryByUniqueId(e) { if (this._geometriesByUniqueId) { const t = this._geometriesByUniqueId[e]; if (t !== void 0) return this.geometries[t]; } else for (let t = 0; t < this.geometries.length; t++) if (this.geometries[t].uniqueId === e) return this.geometries[t]; return null; } /** * Add a new geometry to this scene * @param geometry defines the geometry to be added to the scene. * @param force defines if the geometry must be pushed even if a geometry with this id already exists * @returns a boolean defining if the geometry was added or not */ pushGeometry(e, t) { return !t && this._getGeometryByUniqueId(e.uniqueId) ? !1 : (this.addGeometry(e), this.onNewGeometryAddedObservable.notifyObservers(e), !0); } /** * Removes an existing geometry * @param geometry defines the geometry to be removed from the scene * @returns a boolean defining if the geometry was removed or not */ removeGeometry(e) { let t; if (this._geometriesByUniqueId) { if (t = this._geometriesByUniqueId[e.uniqueId], t === void 0) return !1; } else if (t = this.geometries.indexOf(e), t < 0) return !1; if (t !== this.geometries.length - 1) { const i = this.geometries[this.geometries.length - 1]; i && (this.geometries[t] = i, this._geometriesByUniqueId && (this._geometriesByUniqueId[i.uniqueId] = t)); } return this._geometriesByUniqueId && (this._geometriesByUniqueId[e.uniqueId] = void 0), this.geometries.pop(), this.onGeometryRemovedObservable.notifyObservers(e), !0; } /** * Gets the list of geometries attached to the scene * @returns an array of Geometry */ getGeometries() { return this.geometries; } /** * Gets the first added mesh found of a given Id * @param id defines the Id to search for * @returns the mesh found or null if not found at all */ getMeshById(e) { for (let t = 0; t < this.meshes.length; t++) if (this.meshes[t].id === e) return this.meshes[t]; return null; } /** * Gets a list of meshes using their Id * @param id defines the Id to search for * @returns a list of meshes */ getMeshesById(e) { return this.meshes.filter(function(t) { return t.id === e; }); } /** * Gets the first added transform node found of a given Id * @param id defines the Id to search for * @returns the found transform node or null if not found at all. */ getTransformNodeById(e) { for (let t = 0; t < this.transformNodes.length; t++) if (this.transformNodes[t].id === e) return this.transformNodes[t]; return null; } /** * Gets a transform node with its auto-generated unique Id * @param uniqueId defines the unique Id to search for * @returns the found transform node or null if not found at all. */ getTransformNodeByUniqueId(e) { for (let t = 0; t < this.transformNodes.length; t++) if (this.transformNodes[t].uniqueId === e) return this.transformNodes[t]; return null; } /** * Gets a list of transform nodes using their Id * @param id defines the Id to search for * @returns a list of transform nodes */ getTransformNodesById(e) { return this.transformNodes.filter(function(t) { return t.id === e; }); } /** * Gets a mesh with its auto-generated unique Id * @param uniqueId defines the unique Id to search for * @returns the found mesh or null if not found at all. */ getMeshByUniqueId(e) { for (let t = 0; t < this.meshes.length; t++) if (this.meshes[t].uniqueId === e) return this.meshes[t]; return null; } /** * Gets a the last added mesh using a given Id * @param id defines the Id to search for * @returns the found mesh or null if not found at all. */ getLastMeshById(e) { for (let t = this.meshes.length - 1; t >= 0; t--) if (this.meshes[t].id === e) return this.meshes[t]; return null; } /** * Gets a the last transform node using a given Id * @param id defines the Id to search for * @returns the found mesh or null if not found at all. */ getLastTransformNodeById(e) { for (let t = this.transformNodes.length - 1; t >= 0; t--) if (this.transformNodes[t].id === e) return this.transformNodes[t]; return null; } /** * Gets a the last added node (Mesh, Camera, Light) using a given Id * @param id defines the Id to search for * @returns the found node or null if not found at all */ getLastEntryById(e) { let t; for (t = this.meshes.length - 1; t >= 0; t--) if (this.meshes[t].id === e) return this.meshes[t]; for (t = this.transformNodes.length - 1; t >= 0; t--) if (this.transformNodes[t].id === e) return this.transformNodes[t]; for (t = this.cameras.length - 1; t >= 0; t--) if (this.cameras[t].id === e) return this.cameras[t]; for (t = this.lights.length - 1; t >= 0; t--) if (this.lights[t].id === e) return this.lights[t]; return null; } /** * Gets a node (Mesh, Camera, Light) using a given Id * @param id defines the Id to search for * @returns the found node or null if not found at all */ getNodeById(e) { const t = this.getMeshById(e); if (t) return t; const i = this.getTransformNodeById(e); if (i) return i; const r = this.getLightById(e); if (r) return r; const s = this.getCameraById(e); if (s) return s; const n = this.getBoneById(e); return n || null; } /** * Gets a node (Mesh, Camera, Light) using a given name * @param name defines the name to search for * @returns the found node or null if not found at all. */ getNodeByName(e) { const t = this.getMeshByName(e); if (t) return t; const i = this.getTransformNodeByName(e); if (i) return i; const r = this.getLightByName(e); if (r) return r; const s = this.getCameraByName(e); if (s) return s; const n = this.getBoneByName(e); return n || null; } /** * Gets a mesh using a given name * @param name defines the name to search for * @returns the found mesh or null if not found at all. */ getMeshByName(e) { for (let t = 0; t < this.meshes.length; t++) if (this.meshes[t].name === e) return this.meshes[t]; return null; } /** * Gets a transform node using a given name * @param name defines the name to search for * @returns the found transform node or null if not found at all. */ getTransformNodeByName(e) { for (let t = 0; t < this.transformNodes.length; t++) if (this.transformNodes[t].name === e) return this.transformNodes[t]; return null; } /** * Gets a skeleton using a given Id (if many are found, this function will pick the last one) * @param id defines the Id to search for * @returns the found skeleton or null if not found at all. */ getLastSkeletonById(e) { for (let t = this.skeletons.length - 1; t >= 0; t--) if (this.skeletons[t].id === e) return this.skeletons[t]; return null; } /** * Gets a skeleton using a given auto generated unique id * @param uniqueId defines the unique id to search for * @returns the found skeleton or null if not found at all. */ getSkeletonByUniqueId(e) { for (let t = 0; t < this.skeletons.length; t++) if (this.skeletons[t].uniqueId === e) return this.skeletons[t]; return null; } /** * Gets a skeleton using a given id (if many are found, this function will pick the first one) * @param id defines the id to search for * @returns the found skeleton or null if not found at all. */ getSkeletonById(e) { for (let t = 0; t < this.skeletons.length; t++) if (this.skeletons[t].id === e) return this.skeletons[t]; return null; } /** * Gets a skeleton using a given name * @param name defines the name to search for * @returns the found skeleton or null if not found at all. */ getSkeletonByName(e) { for (let t = 0; t < this.skeletons.length; t++) if (this.skeletons[t].name === e) return this.skeletons[t]; return null; } /** * Gets a morph target manager using a given id (if many are found, this function will pick the last one) * @param id defines the id to search for * @returns the found morph target manager or null if not found at all. */ getMorphTargetManagerById(e) { for (let t = 0; t < this.morphTargetManagers.length; t++) if (this.morphTargetManagers[t].uniqueId === e) return this.morphTargetManagers[t]; return null; } /** * Gets a morph target using a given id (if many are found, this function will pick the first one) * @param id defines the id to search for * @returns the found morph target or null if not found at all. */ getMorphTargetById(e) { for (let t = 0; t < this.morphTargetManagers.length; ++t) { const i = this.morphTargetManagers[t]; for (let r = 0; r < i.numTargets; ++r) { const s = i.getTarget(r); if (s.id === e) return s; } } return null; } /** * Gets a morph target using a given name (if many are found, this function will pick the first one) * @param name defines the name to search for * @returns the found morph target or null if not found at all. */ getMorphTargetByName(e) { for (let t = 0; t < this.morphTargetManagers.length; ++t) { const i = this.morphTargetManagers[t]; for (let r = 0; r < i.numTargets; ++r) { const s = i.getTarget(r); if (s.name === e) return s; } } return null; } /** * Gets a post process using a given name (if many are found, this function will pick the first one) * @param name defines the name to search for * @returns the found post process or null if not found at all. */ getPostProcessByName(e) { for (let t = 0; t < this.postProcesses.length; ++t) { const i = this.postProcesses[t]; if (i.name === e) return i; } return null; } /** * Gets a boolean indicating if the given mesh is active * @param mesh defines the mesh to look for * @returns true if the mesh is in the active list */ isActiveMesh(e) { return this._activeMeshes.indexOf(e) !== -1; } /** * Return a unique id as a string which can serve as an identifier for the scene */ get uid() { return this._uid || (this._uid = Ve.RandomId()), this._uid; } /** * Add an externally attached data from its key. * This method call will fail and return false, if such key already exists. * If you don't care and just want to get the data no matter what, use the more convenient getOrAddExternalDataWithFactory() method. * @param key the unique key that identifies the data * @param data the data object to associate to the key for this Engine instance * @returns true if no such key were already present and the data was added successfully, false otherwise */ addExternalData(e, t) { return this._externalData || (this._externalData = new iB()), this._externalData.add(e, t); } /** * Get an externally attached data from its key * @param key the unique key that identifies the data * @returns the associated data, if present (can be null), or undefined if not present */ getExternalData(e) { return this._externalData ? this._externalData.get(e) : null; } /** * Get an externally attached data from its key, create it using a factory if it's not already present * @param key the unique key that identifies the data * @param factory the factory that will be called to create the instance if and only if it doesn't exists * @returns the associated data, can be null if the factory returned null. */ getOrAddExternalDataWithFactory(e, t) { return this._externalData || (this._externalData = new iB()), this._externalData.getOrAddWithFactory(e, t); } /** * Remove an externally attached data from the Engine instance * @param key the unique key that identifies the data * @returns true if the data was successfully removed, false if it doesn't exist */ removeExternalData(e) { return this._externalData.remove(e); } _evaluateSubMesh(e, t, i, r) { if (r || e.isInFrustum(this._frustumPlanes)) { for (const n of this._evaluateSubMeshStage) n.action(t, e); const s = e.getMaterial(); s != null && (s.hasRenderTargetTextures && s.getRenderTargetTextures != null && this._processedMaterials.indexOf(s) === -1 && (this._processedMaterials.push(s), this._materialsRenderTargets.concatWithNoDuplicate(s.getRenderTargetTextures())), this._renderingManager.dispatch(e, t, s)); } } /** * Clear the processed materials smart array preventing retention point in material dispose. */ freeProcessedMaterials() { this._processedMaterials.dispose(); } /** Gets or sets a boolean blocking all the calls to freeActiveMeshes and freeRenderingGroups * It can be used in order to prevent going through methods freeRenderingGroups and freeActiveMeshes several times to improve performance * when disposing several meshes in a row or a hierarchy of meshes. * When used, it is the responsibility of the user to blockfreeActiveMeshesAndRenderingGroups back to false. */ get blockfreeActiveMeshesAndRenderingGroups() { return this._preventFreeActiveMeshesAndRenderingGroups; } set blockfreeActiveMeshesAndRenderingGroups(e) { this._preventFreeActiveMeshesAndRenderingGroups !== e && (e && (this.freeActiveMeshes(), this.freeRenderingGroups()), this._preventFreeActiveMeshesAndRenderingGroups = e); } /** * Clear the active meshes smart array preventing retention point in mesh dispose. */ freeActiveMeshes() { if (!this.blockfreeActiveMeshesAndRenderingGroups && (this._activeMeshes.dispose(), this.activeCamera && this.activeCamera._activeMeshes && this.activeCamera._activeMeshes.dispose(), this.activeCameras)) for (let e = 0; e < this.activeCameras.length; e++) { const t = this.activeCameras[e]; t && t._activeMeshes && t._activeMeshes.dispose(); } } /** * Clear the info related to rendering groups preventing retention points during dispose. */ freeRenderingGroups() { if (!this.blockfreeActiveMeshesAndRenderingGroups && (this._renderingManager && this._renderingManager.freeRenderingGroups(), this.textures)) for (let e = 0; e < this.textures.length; e++) { const t = this.textures[e]; t && t.renderList && t.freeRenderingGroups(); } } /** @internal */ _isInIntermediateRendering() { return this._intermediateRendering; } /** * Use this function to stop evaluating active meshes. The current list will be keep alive between frames * @param skipEvaluateActiveMeshes defines an optional boolean indicating that the evaluate active meshes step must be completely skipped * @param onSuccess optional success callback * @param onError optional error callback * @param freezeMeshes defines if meshes should be frozen (true by default) * @param keepFrustumCulling defines if you want to keep running the frustum clipping (false by default) * @returns the current scene */ freezeActiveMeshes(e = !1, t, i, r = !0, s = !1) { return this.executeWhenReady(() => { if (!this.activeCamera) { i && i("No active camera found"); return; } if (this._frustumPlanes || this.updateTransformMatrix(), this._evaluateActiveMeshes(), this._activeMeshesFrozen = !0, this._activeMeshesFrozenButKeepClipping = s, this._skipEvaluateActiveMeshesCompletely = e, r) for (let n = 0; n < this._activeMeshes.length; n++) this._activeMeshes.data[n]._freeze(); t && t(); }), this; } /** * Use this function to restart evaluating active meshes on every frame * @returns the current scene */ unfreezeActiveMeshes() { for (let e = 0; e < this.meshes.length; e++) { const t = this.meshes[e]; t._internalAbstractMeshDataInfo && (t._internalAbstractMeshDataInfo._isActive = !1); } for (let e = 0; e < this._activeMeshes.length; e++) this._activeMeshes.data[e]._unFreeze(); return this._activeMeshesFrozen = !1, this; } _executeActiveContainerCleanup(e) { !(this._engine.snapshotRendering && this._engine.snapshotRenderingMode === 1) && this._activeMeshesFrozen && this._activeMeshes.length || this.onBeforeRenderObservable.addOnce(() => e.dispose()); } _evaluateActiveMeshes() { var e; if (this._engine.snapshotRendering && this._engine.snapshotRenderingMode === 1) { this._activeMeshes.length > 0 && ((e = this.activeCamera) === null || e === void 0 || e._activeMeshes.reset(), this._activeMeshes.reset(), this._renderingManager.reset(), this._processedMaterials.reset(), this._activeParticleSystems.reset(), this._activeSkeletons.reset(), this._softwareSkinnedMeshes.reset()); return; } if (this._activeMeshesFrozen && this._activeMeshes.length) { if (!this._skipEvaluateActiveMeshesCompletely) { const r = this._activeMeshes.length; for (let s = 0; s < r; s++) this._activeMeshes.data[s].computeWorldMatrix(); } if (this._activeParticleSystems) { const r = this._activeParticleSystems.length; for (let s = 0; s < r; s++) this._activeParticleSystems.data[s].animate(); } this._renderingManager.resetSprites(); return; } if (!this.activeCamera) return; this.onBeforeActiveMeshesEvaluationObservable.notifyObservers(this), this.activeCamera._activeMeshes.reset(), this._activeMeshes.reset(), this._renderingManager.reset(), this._processedMaterials.reset(), this._activeParticleSystems.reset(), this._activeSkeletons.reset(), this._softwareSkinnedMeshes.reset(), this._materialsRenderTargets.reset(); for (const r of this._beforeEvaluateActiveMeshStage) r.action(); const t = this.getActiveMeshCandidates(), i = t.length; for (let r = 0; r < i; r++) { const s = t.data[r]; if (s._internalAbstractMeshDataInfo._currentLODIsUpToDate = !1, s.isBlocked || (this._totalVertices.addCount(s.getTotalVertices(), !1), !s.isReady() || !s.isEnabled() || s.scaling.hasAZeroComponent)) continue; s.computeWorldMatrix(), s.actionManager && s.actionManager.hasSpecificTriggers2(12, 13) && this._meshesForIntersections.pushNoDuplicate(s); let n = this.customLODSelector ? this.customLODSelector(s, this.activeCamera) : s.getLOD(this.activeCamera); if (s._internalAbstractMeshDataInfo._currentLOD = n, s._internalAbstractMeshDataInfo._currentLODIsUpToDate = !0, n != null && (n !== s && n.billboardMode !== 0 && n.computeWorldMatrix(), s._preActivate(), s.isVisible && s.visibility > 0 && s.layerMask & this.activeCamera.layerMask && (this._skipFrustumClipping || s.alwaysSelectAsActiveMesh || s.isInFrustum(this._frustumPlanes)))) { this._activeMeshes.push(s), this.activeCamera._activeMeshes.push(s), n !== s && n._activate(this._renderId, !1); for (const a of this._preActiveMeshStage) a.action(s); s._activate(this._renderId, !1) && (s.isAnInstance ? s._internalAbstractMeshDataInfo._actAsRegularMesh && (n = s) : n._internalAbstractMeshDataInfo._onlyForInstances = !1, n._internalAbstractMeshDataInfo._isActive = !0, this._activeMesh(s, n)), s._postActivate(); } } if (this.onAfterActiveMeshesEvaluationObservable.notifyObservers(this), this.particlesEnabled) { this.onBeforeParticlesRenderingObservable.notifyObservers(this); for (let r = 0; r < this.particleSystems.length; r++) { const s = this.particleSystems[r]; if (!s.isStarted() || !s.emitter) continue; const n = s.emitter; (!n.position || n.isEnabled()) && (this._activeParticleSystems.push(s), s.animate(), this._renderingManager.dispatchParticles(s)); } this.onAfterParticlesRenderingObservable.notifyObservers(this); } } _activeMesh(e, t) { this._skeletonsEnabled && t.skeleton !== null && t.skeleton !== void 0 && (this._activeSkeletons.pushNoDuplicate(t.skeleton) && (t.skeleton.prepare(), this._activeBones.addCount(t.skeleton.bones.length, !1)), t.computeBonesUsingShaders || this._softwareSkinnedMeshes.pushNoDuplicate(t)); let i = e.hasInstances || e.isAnInstance || this.dispatchAllSubMeshesOfActiveMeshes || this._skipFrustumClipping || t.alwaysSelectAsActiveMesh; if (t && t.subMeshes && t.subMeshes.length > 0) { const r = this.getActiveSubMeshCandidates(t), s = r.length; i = i || s === 1; for (let n = 0; n < s; n++) { const a = r.data[n]; this._evaluateSubMesh(a, t, e, i); } } } /** * Update the transform matrix to update from the current active camera * @param force defines a boolean used to force the update even if cache is up to date */ updateTransformMatrix(e) { const t = this.activeCamera; if (t) if (t._renderingMultiview) { const i = t._rigCameras[0], r = t._rigCameras[1]; this.setTransformMatrix(i.getViewMatrix(), i.getProjectionMatrix(e), r.getViewMatrix(), r.getProjectionMatrix(e)); } else this.setTransformMatrix(t.getViewMatrix(), t.getProjectionMatrix(e)); } _bindFrameBuffer(e, t = !0) { e && e._multiviewTexture ? e._multiviewTexture._bindFrameBuffer() : e && e.outputRenderTarget ? e.outputRenderTarget._bindFrameBuffer() : this._engine._currentFrameBufferIsDefaultFrameBuffer() || this._engine.restoreDefaultFramebuffer(), t && this._clearFrameBuffer(e); } _clearFrameBuffer(e) { if (!(e && e._multiviewTexture)) if (e && e.outputRenderTarget && !e._renderingMultiview) { const t = e.outputRenderTarget; t.onClearObservable.hasObservers() ? t.onClearObservable.notifyObservers(this._engine) : t.skipInitialClear || (this.autoClear && this._engine.clear(t.clearColor || this.clearColor, !t._cleared, !0, !0), t._cleared = !0); } else this._defaultFrameBufferCleared ? this._engine.clear(null, !1, !0, !0) : (this._defaultFrameBufferCleared = !0, this._clear()); } /** * @internal */ _renderForCamera(e, t, i = !0) { var r, s, n; if (e && e._skipRendering) return; const a = this._engine; if (this._activeCamera = e, !this.activeCamera) throw new Error("Active camera not set"); if (a.setViewport(this.activeCamera.viewport), this.resetCachedMaterial(), this._renderId++, !this.prePass && i) { let o = !0; e._renderingMultiview && e.outputRenderTarget && (o = e.outputRenderTarget.skipInitialClear, this.autoClear && (this._defaultFrameBufferCleared = !1, e.outputRenderTarget.skipInitialClear = !1)), this._bindFrameBuffer(this._activeCamera), e._renderingMultiview && e.outputRenderTarget && (e.outputRenderTarget.skipInitialClear = o); } this.updateTransformMatrix(), this.onBeforeCameraRenderObservable.notifyObservers(this.activeCamera), this._evaluateActiveMeshes(); for (let o = 0; o < this._softwareSkinnedMeshes.length; o++) { const u = this._softwareSkinnedMeshes.data[o]; u.applySkeleton(u.skeleton); } this.onBeforeRenderTargetsRenderObservable.notifyObservers(this), this._renderTargets.concatWithNoDuplicate(this._materialsRenderTargets), e.customRenderTargets && e.customRenderTargets.length > 0 && this._renderTargets.concatWithNoDuplicate(e.customRenderTargets), t && t.customRenderTargets && t.customRenderTargets.length > 0 && this._renderTargets.concatWithNoDuplicate(t.customRenderTargets), this.environmentTexture && this.environmentTexture.isRenderTarget && this._renderTargets.pushNoDuplicate(this.environmentTexture); for (const o of this._gatherActiveCameraRenderTargetsStage) o.action(this._renderTargets); let l = !1; if (this.renderTargetsEnabled) { if (this._intermediateRendering = !0, this._renderTargets.length > 0) { Ve.StartPerformanceCounter("Render targets", this._renderTargets.length > 0); for (let o = 0; o < this._renderTargets.length; o++) { const u = this._renderTargets.data[o]; if (u._shouldRender()) { this._renderId++; const h = u.activeCamera && u.activeCamera !== this.activeCamera; u.render(h, this.dumpNextRenderTargets), l = !0; } } Ve.EndPerformanceCounter("Render targets", this._renderTargets.length > 0), this._renderId++; } for (const o of this._cameraDrawRenderTargetStage) l = o.action(this.activeCamera) || l; this._intermediateRendering = !1; } this._engine.currentRenderPassId = (n = (s = (r = e.outputRenderTarget) === null || r === void 0 ? void 0 : r.renderPassId) !== null && s !== void 0 ? s : e.renderPassId) !== null && n !== void 0 ? n : 0, l && !this.prePass && (this._bindFrameBuffer(this._activeCamera, !1), this.updateTransformMatrix()), this.onAfterRenderTargetsRenderObservable.notifyObservers(this), this.postProcessManager && !e._multiviewTexture && !this.prePass && this.postProcessManager._prepareFrame(); for (const o of this._beforeCameraDrawStage) o.action(this.activeCamera); this.onBeforeDrawPhaseObservable.notifyObservers(this), a.snapshotRendering && a.snapshotRenderingMode === 1 && this.finalizeSceneUbo(), this._renderingManager.render(null, null, !0, !0), this.onAfterDrawPhaseObservable.notifyObservers(this); for (const o of this._afterCameraDrawStage) o.action(this.activeCamera); if (this.postProcessManager && !e._multiviewTexture) { const o = e.outputRenderTarget ? e.outputRenderTarget.renderTarget : void 0; this.postProcessManager._finalizeFrame(e.isIntermediate, o); } for (const o of this._afterCameraPostProcessStage) o.action(this.activeCamera); this._renderTargets.reset(), this.onAfterCameraRenderObservable.notifyObservers(this.activeCamera); } _processSubCameras(e, t = !0) { if (e.cameraRigMode === 0 || e._renderingMultiview) { e._renderingMultiview && !this._multiviewSceneUbo && this._createMultiviewUbo(), this._renderForCamera(e, void 0, t), this.onAfterRenderCameraObservable.notifyObservers(e); return; } if (e._useMultiviewToSingleView) this._renderMultiviewToSingleView(e); else { this.onBeforeCameraRenderObservable.notifyObservers(e); for (let i = 0; i < e._rigCameras.length; i++) this._renderForCamera(e._rigCameras[i], e); } this._activeCamera = e, this.updateTransformMatrix(), this.onAfterRenderCameraObservable.notifyObservers(e); } _checkIntersections() { for (let e = 0; e < this._meshesForIntersections.length; e++) { const t = this._meshesForIntersections.data[e]; if (t.actionManager) for (let i = 0; t.actionManager && i < t.actionManager.actions.length; i++) { const r = t.actionManager.actions[i]; if (r.trigger === 12 || r.trigger === 13) { const s = r.getTriggerParameter(), n = s.mesh ? s.mesh : s, a = n.intersectsMesh(t, s.usePreciseIntersection), l = t._intersectionsInProgress.indexOf(n); a && l === -1 ? r.trigger === 12 ? (r._executeCurrent(Ro.CreateNew(t, void 0, n)), t._intersectionsInProgress.push(n)) : r.trigger === 13 && t._intersectionsInProgress.push(n) : !a && l > -1 && (r.trigger === 13 && r._executeCurrent(Ro.CreateNew(t, void 0, n)), (!t.actionManager.hasSpecificTrigger(13, (o) => { const u = o.mesh ? o.mesh : o; return n === u; }) || r.trigger === 13) && t._intersectionsInProgress.splice(l, 1)); } } } } /** * @internal */ _advancePhysicsEngineStep(e) { } /** @internal */ _animate() { } /** Execute all animations (for a frame) */ animate() { if (this._engine.isDeterministicLockStep()) { let e = Math.max(ii.MinDeltaTime, Math.min(this._engine.getDeltaTime(), ii.MaxDeltaTime)) + this._timeAccumulator; const t = this._engine.getTimeStep(), i = 1e3 / t / 1e3; let r = 0; const s = this._engine.getLockstepMaxSteps(); let n = Math.floor(e / t); for (n = Math.min(n, s); e > 0 && r < n; ) this.onBeforeStepObservable.notifyObservers(this), this._animationRatio = t * i, this._animate(), this.onAfterAnimationsObservable.notifyObservers(this), this.physicsEnabled && this._advancePhysicsEngineStep(t), this.onAfterStepObservable.notifyObservers(this), this._currentStepId++, r++, e -= t; this._timeAccumulator = e < 0 ? 0 : e; } else { const e = this.useConstantAnimationDeltaTime ? 16 : Math.max(ii.MinDeltaTime, Math.min(this._engine.getDeltaTime(), ii.MaxDeltaTime)); this._animationRatio = e * (60 / 1e3), this._animate(), this.onAfterAnimationsObservable.notifyObservers(this), this.physicsEnabled && this._advancePhysicsEngineStep(e); } } _clear() { (this.autoClearDepthAndStencil || this.autoClear) && this._engine.clear(this.clearColor, this.autoClear || this.forceWireframe || this.forcePointsCloud, this.autoClearDepthAndStencil, this.autoClearDepthAndStencil); } _checkCameraRenderTarget(e) { var t; if (e != null && e.outputRenderTarget && !(e != null && e.isRigCamera) && (e.outputRenderTarget._cleared = !1), !((t = e == null ? void 0 : e.rigCameras) === null || t === void 0) && t.length) for (let i = 0; i < e.rigCameras.length; ++i) { const r = e.rigCameras[i].outputRenderTarget; r && (r._cleared = !1); } } /** * Resets the draw wrappers cache of all meshes * @param passId If provided, releases only the draw wrapper corresponding to this render pass id */ resetDrawCache(e) { if (this.meshes) for (const t of this.meshes) t.resetDrawCache(e); } /** * Render the scene * @param updateCameras defines a boolean indicating if cameras must update according to their inputs (true by default) * @param ignoreAnimations defines a boolean indicating if animations should not be executed (false by default) */ render(e = !0, t = !1) { var i, r, s; if (this.isDisposed) return; this.onReadyObservable.hasObservers() && this._executeWhenReadyTimeoutId === null && this._checkIsReady(), this._frameId++, this._defaultFrameBufferCleared = !1, this._checkCameraRenderTarget(this.activeCamera), !((i = this.activeCameras) === null || i === void 0) && i.length && this.activeCameras.forEach(this._checkCameraRenderTarget), this._registerTransientComponents(), this._activeParticles.fetchNewFrame(), this._totalVertices.fetchNewFrame(), this._activeIndices.fetchNewFrame(), this._activeBones.fetchNewFrame(), this._meshesForIntersections.reset(), this.resetCachedMaterial(), this.onBeforeAnimationsObservable.notifyObservers(this), this.actionManager && this.actionManager.processTrigger(11), t || this.animate(); for (const l of this._beforeCameraUpdateStage) l.action(); if (e) { if (this.activeCameras && this.activeCameras.length > 0) for (let l = 0; l < this.activeCameras.length; l++) { const o = this.activeCameras[l]; if (o.update(), o.cameraRigMode !== 0) for (let u = 0; u < o._rigCameras.length; u++) o._rigCameras[u].update(); } else if (this.activeCamera && (this.activeCamera.update(), this.activeCamera.cameraRigMode !== 0)) for (let l = 0; l < this.activeCamera._rigCameras.length; l++) this.activeCamera._rigCameras[l].update(); } this.onBeforeRenderObservable.notifyObservers(this); const n = this.getEngine(); this.onBeforeRenderTargetsRenderObservable.notifyObservers(this); const a = !((r = this.activeCameras) === null || r === void 0) && r.length ? this.activeCameras[0] : this.activeCamera; if (this.renderTargetsEnabled) { Ve.StartPerformanceCounter("Custom render targets", this.customRenderTargets.length > 0), this._intermediateRendering = !0; for (let l = 0; l < this.customRenderTargets.length; l++) { const o = this.customRenderTargets[l]; if (o._shouldRender()) { if (this._renderId++, this.activeCamera = o.activeCamera || this.activeCamera, !this.activeCamera) throw new Error("Active camera not set"); n.setViewport(this.activeCamera.viewport), this.updateTransformMatrix(), o.render(a !== this.activeCamera, this.dumpNextRenderTargets); } } Ve.EndPerformanceCounter("Custom render targets", this.customRenderTargets.length > 0), this._intermediateRendering = !1, this._renderId++; } this._engine.currentRenderPassId = (s = a == null ? void 0 : a.renderPassId) !== null && s !== void 0 ? s : 0, this.activeCamera = a, this._activeCamera && this._activeCamera.cameraRigMode !== 22 && !this.prePass && this._bindFrameBuffer(this._activeCamera, !1), this.onAfterRenderTargetsRenderObservable.notifyObservers(this); for (const l of this._beforeClearStage) l.action(); this._clearFrameBuffer(this.activeCamera); for (const l of this._gatherRenderTargetsStage) l.action(this._renderTargets); if (this.activeCameras && this.activeCameras.length > 0) for (let l = 0; l < this.activeCameras.length; l++) this._processSubCameras(this.activeCameras[l], l > 0); else { if (!this.activeCamera) throw new Error("No camera defined"); this._processSubCameras(this.activeCamera, !!this.activeCamera.outputRenderTarget); } this._checkIntersections(); for (const l of this._afterRenderStage) l.action(); if (this.afterRender && this.afterRender(), this.onAfterRenderObservable.notifyObservers(this), this._toBeDisposed.length) { for (let l = 0; l < this._toBeDisposed.length; l++) { const o = this._toBeDisposed[l]; o && o.dispose(); } this._toBeDisposed.length = 0; } this.dumpNextRenderTargets && (this.dumpNextRenderTargets = !1), this._activeBones.addCount(0, !0), this._activeIndices.addCount(0, !0), this._activeParticles.addCount(0, !0), this._engine.restoreDefaultFramebuffer(); } /** * Freeze all materials * A frozen material will not be updatable but should be faster to render * Note: multimaterials will not be frozen, but their submaterials will */ freezeMaterials() { for (let e = 0; e < this.materials.length; e++) this.materials[e].freeze(); } /** * Unfreeze all materials * A frozen material will not be updatable but should be faster to render */ unfreezeMaterials() { for (let e = 0; e < this.materials.length; e++) this.materials[e].unfreeze(); } /** * Releases all held resources */ dispose() { if (this.isDisposed) return; this.beforeRender = null, this.afterRender = null, this.metadata = null, this.skeletons.length = 0, this.morphTargetManagers.length = 0, this._transientComponents.length = 0, this._isReadyForMeshStage.clear(), this._beforeEvaluateActiveMeshStage.clear(), this._evaluateSubMeshStage.clear(), this._preActiveMeshStage.clear(), this._cameraDrawRenderTargetStage.clear(), this._beforeCameraDrawStage.clear(), this._beforeRenderTargetDrawStage.clear(), this._beforeRenderingGroupDrawStage.clear(), this._beforeRenderingMeshStage.clear(), this._afterRenderingMeshStage.clear(), this._afterRenderingGroupDrawStage.clear(), this._afterCameraDrawStage.clear(), this._afterRenderTargetDrawStage.clear(), this._afterRenderStage.clear(), this._beforeCameraUpdateStage.clear(), this._beforeClearStage.clear(), this._gatherRenderTargetsStage.clear(), this._gatherActiveCameraRenderTargetsStage.clear(), this._pointerMoveStage.clear(), this._pointerDownStage.clear(), this._pointerUpStage.clear(), this.importedMeshesFiles = [], this.stopAllAnimations && (this._activeAnimatables.forEach((s) => { s.onAnimationEndObservable.clear(), s.onAnimationEnd = null; }), this.stopAllAnimations()), this.resetCachedMaterial(), this.activeCamera && (this.activeCamera._activeMeshes.dispose(), this.activeCamera = null), this.activeCameras = null, this._activeMeshes.dispose(), this._renderingManager.dispose(), this._processedMaterials.dispose(), this._activeParticleSystems.dispose(), this._activeSkeletons.dispose(), this._softwareSkinnedMeshes.dispose(), this._renderTargets.dispose(), this._materialsRenderTargets.dispose(), this._registeredForLateAnimationBindings.dispose(), this._meshesForIntersections.dispose(), this._toBeDisposed.length = 0; const e = this._activeRequests.slice(); for (const s of e) s.abort(); this._activeRequests.length = 0; try { this.onDisposeObservable.notifyObservers(this); } catch (s) { Ce.Error("An error occurred while calling onDisposeObservable!", s); } if (this.detachControl(), this._engine.getInputElement()) for (let s = 0; s < this.cameras.length; s++) this.cameras[s].detachControl(); this._disposeList(this.animationGroups), this._disposeList(this.lights), this._disposeList(this.meshes, (s) => s.dispose(!0)), this._disposeList(this.transformNodes, (s) => s.dispose(!0)); const i = this.cameras; this._disposeList(i), this._defaultMaterial && this._defaultMaterial.dispose(), this._disposeList(this.multiMaterials), this._disposeList(this.materials), this._disposeList(this.particleSystems), this._disposeList(this.postProcesses), this._disposeList(this.textures), this._disposeList(this.morphTargetManagers), this._sceneUbo.dispose(), this._multiviewSceneUbo && this._multiviewSceneUbo.dispose(), this.postProcessManager.dispose(), this._disposeList(this._components); let r = this._engine.scenes.indexOf(this); r > -1 && this._engine.scenes.splice(r, 1), gi._LastCreatedScene === this && (this._engine.scenes.length > 0 ? gi._LastCreatedScene = this._engine.scenes[this._engine.scenes.length - 1] : gi._LastCreatedScene = null), r = this._engine._virtualScenes.indexOf(this), r > -1 && this._engine._virtualScenes.splice(r, 1), this._engine.wipeCaches(!0), this.onDisposeObservable.clear(), this.onBeforeRenderObservable.clear(), this.onAfterRenderObservable.clear(), this.onBeforeRenderTargetsRenderObservable.clear(), this.onAfterRenderTargetsRenderObservable.clear(), this.onAfterStepObservable.clear(), this.onBeforeStepObservable.clear(), this.onBeforeActiveMeshesEvaluationObservable.clear(), this.onAfterActiveMeshesEvaluationObservable.clear(), this.onBeforeParticlesRenderingObservable.clear(), this.onAfterParticlesRenderingObservable.clear(), this.onBeforeDrawPhaseObservable.clear(), this.onAfterDrawPhaseObservable.clear(), this.onBeforeAnimationsObservable.clear(), this.onAfterAnimationsObservable.clear(), this.onDataLoadedObservable.clear(), this.onBeforeRenderingGroupObservable.clear(), this.onAfterRenderingGroupObservable.clear(), this.onMeshImportedObservable.clear(), this.onBeforeCameraRenderObservable.clear(), this.onAfterCameraRenderObservable.clear(), this.onAfterRenderCameraObservable.clear(), this.onReadyObservable.clear(), this.onNewCameraAddedObservable.clear(), this.onCameraRemovedObservable.clear(), this.onNewLightAddedObservable.clear(), this.onLightRemovedObservable.clear(), this.onNewGeometryAddedObservable.clear(), this.onGeometryRemovedObservable.clear(), this.onNewTransformNodeAddedObservable.clear(), this.onTransformNodeRemovedObservable.clear(), this.onNewMeshAddedObservable.clear(), this.onMeshRemovedObservable.clear(), this.onNewSkeletonAddedObservable.clear(), this.onSkeletonRemovedObservable.clear(), this.onNewMaterialAddedObservable.clear(), this.onNewMultiMaterialAddedObservable.clear(), this.onMaterialRemovedObservable.clear(), this.onMultiMaterialRemovedObservable.clear(), this.onNewTextureAddedObservable.clear(), this.onTextureRemovedObservable.clear(), this.onPrePointerObservable.clear(), this.onPointerObservable.clear(), this.onPreKeyboardObservable.clear(), this.onKeyboardObservable.clear(), this.onActiveCameraChanged.clear(), this.onScenePerformancePriorityChangedObservable.clear(), this._isDisposed = !0; } _disposeList(e, t) { const i = e.slice(0); t = t ?? ((r) => r.dispose()); for (const r of i) t(r); e.length = 0; } /** * Gets if the scene is already disposed */ get isDisposed() { return this._isDisposed; } /** * Call this function to reduce memory footprint of the scene. * Vertex buffers will not store CPU data anymore (this will prevent picking, collisions or physics to work correctly) */ clearCachedVertexData() { for (let e = 0; e < this.meshes.length; e++) { const i = this.meshes[e].geometry; i && i.clearCachedData(); } } /** * This function will remove the local cached buffer data from texture. * It will save memory but will prevent the texture from being rebuilt */ cleanCachedTextureBuffer() { for (const e of this.textures) e._buffer && (e._buffer = null); } /** * Get the world extend vectors with an optional filter * * @param filterPredicate the predicate - which meshes should be included when calculating the world size * @returns {{ min: Vector3; max: Vector3 }} min and max vectors */ getWorldExtends(e) { const t = new D(Number.MAX_VALUE, Number.MAX_VALUE, Number.MAX_VALUE), i = new D(-Number.MAX_VALUE, -Number.MAX_VALUE, -Number.MAX_VALUE); return e = e || (() => !0), this.meshes.filter(e).forEach((r) => { if (r.computeWorldMatrix(!0), !r.subMeshes || r.subMeshes.length === 0 || r.infiniteDistance) return; const s = r.getBoundingInfo(), n = s.boundingBox.minimumWorld, a = s.boundingBox.maximumWorld; D.CheckExtends(n, t, i), D.CheckExtends(a, t, i); }), { min: t, max: i }; } // Picking /** * Creates a ray that can be used to pick in the scene * @param x defines the x coordinate of the origin (on-screen) * @param y defines the y coordinate of the origin (on-screen) * @param world defines the world matrix to use if you want to pick in object space (instead of world space) * @param camera defines the camera to use for the picking * @param cameraViewSpace defines if picking will be done in view space (false by default) * @returns a Ray */ createPickingRay(e, t, i, r, s = !1) { throw yr("Ray"); } /** * Creates a ray that can be used to pick in the scene * @param x defines the x coordinate of the origin (on-screen) * @param y defines the y coordinate of the origin (on-screen) * @param world defines the world matrix to use if you want to pick in object space (instead of world space) * @param result defines the ray where to store the picking ray * @param camera defines the camera to use for the picking * @param cameraViewSpace defines if picking will be done in view space (false by default) * @param enableDistantPicking defines if picking should handle large values for mesh position/scaling (false by default) * @returns the current scene */ createPickingRayToRef(e, t, i, r, s, n = !1, a = !1) { throw yr("Ray"); } /** * Creates a ray that can be used to pick in the scene * @param x defines the x coordinate of the origin (on-screen) * @param y defines the y coordinate of the origin (on-screen) * @param camera defines the camera to use for the picking * @returns a Ray */ createPickingRayInCameraSpace(e, t, i) { throw yr("Ray"); } /** * Creates a ray that can be used to pick in the scene * @param x defines the x coordinate of the origin (on-screen) * @param y defines the y coordinate of the origin (on-screen) * @param result defines the ray where to store the picking ray * @param camera defines the camera to use for the picking * @returns the current scene */ createPickingRayInCameraSpaceToRef(e, t, i, r) { throw yr("Ray"); } /** @internal */ get _pickingAvailable() { return !1; } /** Launch a ray to try to pick a mesh in the scene * @param x position on screen * @param y position on screen * @param predicate Predicate function used to determine eligible meshes. Can be set to null. In this case, a mesh must be enabled, visible and with isPickable set to true * @param fastCheck defines if the first intersection will be used (and not the closest) * @param camera to use for computing the picking ray. Can be set to null. In this case, the scene.activeCamera will be used * @param trianglePredicate defines an optional predicate used to select faces when a mesh intersection is detected * @returns a PickingInfo */ pick(e, t, i, r, s, n) { const a = yr("Ray", !0); return a && Ce.Warn(a), new ku(); } /** Launch a ray to try to pick a mesh in the scene using only bounding information of the main mesh (not using submeshes) * @param x position on screen * @param y position on screen * @param predicate Predicate function used to determine eligible meshes. Can be set to null. In this case, a mesh must be enabled, visible and with isPickable set to true * @param fastCheck defines if the first intersection will be used (and not the closest) * @param camera to use for computing the picking ray. Can be set to null. In this case, the scene.activeCamera will be used * @returns a PickingInfo (Please note that some info will not be set like distance, bv, bu and everything that cannot be capture by only using bounding infos) */ pickWithBoundingInfo(e, t, i, r, s) { const n = yr("Ray", !0); return n && Ce.Warn(n), new ku(); } /** * Use the given ray to pick a mesh in the scene. A mesh triangle can be picked both from its front and back sides, * irrespective of orientation. * @param ray The ray to use to pick meshes * @param predicate Predicate function used to determine eligible meshes. Can be set to null. In this case, a mesh must have isPickable set to true * @param fastCheck defines if the first intersection will be used (and not the closest) * @param trianglePredicate defines an optional predicate used to select faces when a mesh intersection is detected * @returns a PickingInfo */ pickWithRay(e, t, i, r) { throw yr("Ray"); } /** * Launch a ray to try to pick a mesh in the scene. A mesh triangle can be picked both from its front and back sides, * irrespective of orientation. * @param x X position on screen * @param y Y position on screen * @param predicate Predicate function used to determine eligible meshes. Can be set to null. In this case, a mesh must be enabled, visible and with isPickable set to true * @param camera camera to use for computing the picking ray. Can be set to null. In this case, the scene.activeCamera will be used * @param trianglePredicate defines an optional predicate used to select faces when a mesh intersection is detected * @returns an array of PickingInfo */ multiPick(e, t, i, r, s) { throw yr("Ray"); } /** * Launch a ray to try to pick a mesh in the scene * @param ray Ray to use * @param predicate Predicate function used to determine eligible meshes. Can be set to null. In this case, a mesh must be enabled, visible and with isPickable set to true * @param trianglePredicate defines an optional predicate used to select faces when a mesh intersection is detected * @returns an array of PickingInfo */ multiPickWithRay(e, t, i) { throw yr("Ray"); } /** * Force the value of meshUnderPointer * @param mesh defines the mesh to use * @param pointerId optional pointer id when using more than one pointer * @param pickResult optional pickingInfo data used to find mesh */ setPointerOverMesh(e, t, i) { this._inputManager.setPointerOverMesh(e, t, i); } /** * Gets the mesh under the pointer * @returns a Mesh or null if no mesh is under the pointer */ getPointerOverMesh() { return this._inputManager.getPointerOverMesh(); } // Misc. /** @internal */ _rebuildGeometries() { for (const e of this.geometries) e._rebuild(); for (const e of this.meshes) e._rebuild(); this.postProcessManager && this.postProcessManager._rebuild(); for (const e of this._components) e.rebuild(); for (const e of this.particleSystems) e.rebuild(); if (this.spriteManagers) for (const e of this.spriteManagers) e.rebuild(); } /** @internal */ _rebuildTextures() { for (const e of this.textures) e._rebuild(); this.markAllMaterialsAsDirty(1); } /** * Get from a list of objects by tags * @param list the list of objects to use * @param tagsQuery the query to use * @param filter a predicate to filter for tags * @returns */ _getByTags(e, t, i) { if (t === void 0) return e; const r = []; for (const s in e) { const n = e[s]; $s && $s.MatchesQuery(n, t) && (!i || i(n)) && r.push(n); } return r; } /** * Get a list of meshes by tags * @param tagsQuery defines the tags query to use * @param filter defines a predicate used to filter results * @returns an array of Mesh */ getMeshesByTags(e, t) { return this._getByTags(this.meshes, e, t); } /** * Get a list of cameras by tags * @param tagsQuery defines the tags query to use * @param filter defines a predicate used to filter results * @returns an array of Camera */ getCamerasByTags(e, t) { return this._getByTags(this.cameras, e, t); } /** * Get a list of lights by tags * @param tagsQuery defines the tags query to use * @param filter defines a predicate used to filter results * @returns an array of Light */ getLightsByTags(e, t) { return this._getByTags(this.lights, e, t); } /** * Get a list of materials by tags * @param tagsQuery defines the tags query to use * @param filter defines a predicate used to filter results * @returns an array of Material */ getMaterialByTags(e, t) { return this._getByTags(this.materials, e, t).concat(this._getByTags(this.multiMaterials, e, t)); } /** * Get a list of transform nodes by tags * @param tagsQuery defines the tags query to use * @param filter defines a predicate used to filter results * @returns an array of TransformNode */ getTransformNodesByTags(e, t) { return this._getByTags(this.transformNodes, e, t); } /** * Overrides the default sort function applied in the rendering group to prepare the meshes. * This allowed control for front to back rendering or reversly depending of the special needs. * * @param renderingGroupId The rendering group id corresponding to its index * @param opaqueSortCompareFn The opaque queue comparison function use to sort. * @param alphaTestSortCompareFn The alpha test queue comparison function use to sort. * @param transparentSortCompareFn The transparent queue comparison function use to sort. */ setRenderingOrder(e, t = null, i = null, r = null) { this._renderingManager.setRenderingOrder(e, t, i, r); } /** * Specifies whether or not the stencil and depth buffer are cleared between two rendering groups. * * @param renderingGroupId The rendering group id corresponding to its index * @param autoClearDepthStencil Automatically clears depth and stencil between groups if true. * @param depth Automatically clears depth between groups if true and autoClear is true. * @param stencil Automatically clears stencil between groups if true and autoClear is true. */ setRenderingAutoClearDepthStencil(e, t, i = !0, r = !0) { this._renderingManager.setRenderingAutoClearDepthStencil(e, t, i, r); } /** * Gets the current auto clear configuration for one rendering group of the rendering * manager. * @param index the rendering group index to get the information for * @returns The auto clear setup for the requested rendering group */ getAutoClearDepthStencilSetup(e) { return this._renderingManager.getAutoClearDepthStencilSetup(e); } /** @internal */ _forceBlockMaterialDirtyMechanism(e) { this._blockMaterialDirtyMechanism = e; } /** Gets or sets a boolean blocking all the calls to markAllMaterialsAsDirty (ie. the materials won't be updated if they are out of sync) */ get blockMaterialDirtyMechanism() { return this._blockMaterialDirtyMechanism; } set blockMaterialDirtyMechanism(e) { this._blockMaterialDirtyMechanism !== e && (this._blockMaterialDirtyMechanism = e, e || this.markAllMaterialsAsDirty(63)); } /** * Will flag all materials as dirty to trigger new shader compilation * @param flag defines the flag used to specify which material part must be marked as dirty * @param predicate If not null, it will be used to specify if a material has to be marked as dirty */ markAllMaterialsAsDirty(e, t) { if (!this._blockMaterialDirtyMechanism) for (const i of this.materials) t && !t(i) || i.markAsDirty(e); } /** * @internal */ _loadFile(e, t, i, r, s, n, a) { const l = vT(e, t, i, r ? this.offlineProvider : void 0, s, n, a); return this._activeRequests.push(l), l.onCompleteObservable.add((o) => { this._activeRequests.splice(this._activeRequests.indexOf(o), 1); }), l; } /** * @internal */ _loadFileAsync(e, t, i, r, s) { return new Promise((n, a) => { this._loadFile(e, (l) => { n(l); }, t, i, r, (l, o) => { a(o); }, s); }); } /** * @internal */ _requestFile(e, t, i, r, s, n, a) { const l = GB(e, t, i, r ? this.offlineProvider : void 0, s, n, a); return this._activeRequests.push(l), l.onCompleteObservable.add((o) => { this._activeRequests.splice(this._activeRequests.indexOf(o), 1); }), l; } /** * @internal */ _requestFileAsync(e, t, i, r, s) { return new Promise((n, a) => { this._requestFile(e, (l) => { n(l); }, t, i, r, (l) => { a(l); }, s); }); } /** * @internal */ _readFile(e, t, i, r, s) { const n = VO(e, t, i, r, s); return this._activeRequests.push(n), n.onCompleteObservable.add((a) => { this._activeRequests.splice(this._activeRequests.indexOf(a), 1); }), n; } /** * @internal */ _readFileAsync(e, t, i) { return new Promise((r, s) => { this._readFile(e, (n) => { r(n); }, t, i, (n) => { s(n); }); }); } /** * This method gets the performance collector belonging to the scene, which is generally shared with the inspector. * @returns the perf collector belonging to the scene. */ getPerfCollector() { throw yr("performanceViewerSceneExtension"); } // deprecated /** * Sets the active camera of the scene using its Id * @param id defines the camera's Id * @returns the new active camera or null if none found. * @deprecated Please use setActiveCameraById instead */ setActiveCameraByID(e) { return this.setActiveCameraById(e); } /** * Get a material using its id * @param id defines the material's Id * @returns the material or null if none found. * @deprecated Please use getMaterialById instead */ getMaterialByID(e) { return this.getMaterialById(e); } /** * Gets a the last added material using a given id * @param id defines the material's Id * @returns the last material with the given id or null if none found. * @deprecated Please use getLastMaterialById instead */ getLastMaterialByID(e) { return this.getLastMaterialById(e); } /** * Get a texture using its unique id * @param uniqueId defines the texture's unique id * @returns the texture or null if none found. * @deprecated Please use getTextureByUniqueId instead */ getTextureByUniqueID(e) { return this.getTextureByUniqueId(e); } /** * Gets a camera using its Id * @param id defines the Id to look for * @returns the camera or null if not found * @deprecated Please use getCameraById instead */ getCameraByID(e) { return this.getCameraById(e); } /** * Gets a camera using its unique Id * @param uniqueId defines the unique Id to look for * @returns the camera or null if not found * @deprecated Please use getCameraByUniqueId instead */ getCameraByUniqueID(e) { return this.getCameraByUniqueId(e); } /** * Gets a bone using its Id * @param id defines the bone's Id * @returns the bone or null if not found * @deprecated Please use getBoneById instead */ getBoneByID(e) { return this.getBoneById(e); } /** * Gets a light node using its Id * @param id defines the light's Id * @returns the light or null if none found. * @deprecated Please use getLightById instead */ getLightByID(e) { return this.getLightById(e); } /** * Gets a light node using its scene-generated unique Id * @param uniqueId defines the light's unique Id * @returns the light or null if none found. * @deprecated Please use getLightByUniqueId instead */ getLightByUniqueID(e) { return this.getLightByUniqueId(e); } /** * Gets a particle system by Id * @param id defines the particle system Id * @returns the corresponding system or null if none found * @deprecated Please use getParticleSystemById instead */ getParticleSystemByID(e) { return this.getParticleSystemById(e); } /** * Gets a geometry using its Id * @param id defines the geometry's Id * @returns the geometry or null if none found. * @deprecated Please use getGeometryById instead */ getGeometryByID(e) { return this.getGeometryById(e); } /** * Gets the first added mesh found of a given Id * @param id defines the Id to search for * @returns the mesh found or null if not found at all * @deprecated Please use getMeshById instead */ getMeshByID(e) { return this.getMeshById(e); } /** * Gets a mesh with its auto-generated unique Id * @param uniqueId defines the unique Id to search for * @returns the found mesh or null if not found at all. * @deprecated Please use getMeshByUniqueId instead */ getMeshByUniqueID(e) { return this.getMeshByUniqueId(e); } /** * Gets a the last added mesh using a given Id * @param id defines the Id to search for * @returns the found mesh or null if not found at all. * @deprecated Please use getLastMeshById instead */ getLastMeshByID(e) { return this.getLastMeshById(e); } /** * Gets a list of meshes using their Id * @param id defines the Id to search for * @returns a list of meshes * @deprecated Please use getMeshesById instead */ getMeshesByID(e) { return this.getMeshesById(e); } /** * Gets the first added transform node found of a given Id * @param id defines the Id to search for * @returns the found transform node or null if not found at all. * @deprecated Please use getTransformNodeById instead */ getTransformNodeByID(e) { return this.getTransformNodeById(e); } /** * Gets a transform node with its auto-generated unique Id * @param uniqueId defines the unique Id to search for * @returns the found transform node or null if not found at all. * @deprecated Please use getTransformNodeByUniqueId instead */ getTransformNodeByUniqueID(e) { return this.getTransformNodeByUniqueId(e); } /** * Gets a list of transform nodes using their Id * @param id defines the Id to search for * @returns a list of transform nodes * @deprecated Please use getTransformNodesById instead */ getTransformNodesByID(e) { return this.getTransformNodesById(e); } /** * Gets a node (Mesh, Camera, Light) using a given Id * @param id defines the Id to search for * @returns the found node or null if not found at all * @deprecated Please use getNodeById instead */ getNodeByID(e) { return this.getNodeById(e); } /** * Gets a the last added node (Mesh, Camera, Light) using a given Id * @param id defines the Id to search for * @returns the found node or null if not found at all * @deprecated Please use getLastEntryById instead */ getLastEntryByID(e) { return this.getLastEntryById(e); } /** * Gets a skeleton using a given Id (if many are found, this function will pick the last one) * @param id defines the Id to search for * @returns the found skeleton or null if not found at all. * @deprecated Please use getLastSkeletonById instead */ getLastSkeletonByID(e) { return this.getLastSkeletonById(e); } } ii.FOGMODE_NONE = 0; ii.FOGMODE_EXP = 1; ii.FOGMODE_EXP2 = 2; ii.FOGMODE_LINEAR = 3; ii.MinDeltaTime = 1; ii.MaxDeltaTime = 1e3; var qr; (function(c) { c[c.LOCAL = 0] = "LOCAL", c[c.WORLD = 1] = "WORLD", c[c.BONE = 2] = "BONE"; })(qr || (qr = {})); class bl { } bl.X = new D(1, 0, 0); bl.Y = new D(0, 1, 0); bl.Z = new D(0, 0, 1); var W8; (function(c) { c[c.X = 0] = "X", c[c.Y = 1] = "Y", c[c.Z = 2] = "Z"; })(W8 || (W8 = {})); class ha extends In { /** @internal */ get _matrix() { return this._compose(), this._localMatrix; } /** @internal */ set _matrix(e) { e.updateFlag === this._localMatrix.updateFlag && !this._needToCompose || (this._needToCompose = !1, this._localMatrix.copyFrom(e), this._markAsDirtyAndDecompose()); } /** * Create a new bone * @param name defines the bone name * @param skeleton defines the parent skeleton * @param parentBone defines the parent (can be null if the bone is the root) * @param localMatrix defines the local matrix (default: identity) * @param restMatrix defines the rest matrix (default: localMatrix) * @param bindMatrix defines the bind matrix (default: localMatrix) * @param index defines index of the bone in the hierarchy (default: null) */ constructor(e, t, i = null, r = null, s = null, n = null, a = null) { var l; super(e, t.getScene()), this.name = e, this.children = [], this.animations = [], this._index = null, this._scalingDeterminant = 1, this._needToDecompose = !0, this._needToCompose = !1, this._linkedTransformNode = null, this._waitingTransformNodeId = null, this._skeleton = t, this._localMatrix = (l = r == null ? void 0 : r.clone()) !== null && l !== void 0 ? l : Ae.Identity(), this._restMatrix = s ?? this._localMatrix.clone(), this._bindMatrix = n ?? this._localMatrix.clone(), this._index = a, this._absoluteMatrix = new Ae(), this._absoluteBindMatrix = new Ae(), this._absoluteInverseBindMatrix = new Ae(), this._finalMatrix = new Ae(), t.bones.push(this), this.setParent(i, !1), this._updateAbsoluteBindMatrices(); } /** * Gets the current object class name. * @returns the class name */ getClassName() { return "Bone"; } // Members /** * Gets the parent skeleton * @returns a skeleton */ getSkeleton() { return this._skeleton; } get parent() { return this._parentNode; } /** * Gets parent bone * @returns a bone or null if the bone is the root of the bone hierarchy */ getParent() { return this.parent; } /** * Returns an array containing the children of the bone * @returns an array containing the children of the bone (can be empty if the bone has no children) */ getChildren() { return this.children; } /** * Gets the node index in matrix array generated for rendering * @returns the node index */ getIndex() { return this._index === null ? this.getSkeleton().bones.indexOf(this) : this._index; } set parent(e) { this.setParent(e); } /** * Sets the parent bone * @param parent defines the parent (can be null if the bone is the root) * @param updateAbsoluteBindMatrices defines if the absolute bind and absolute inverse bind matrices must be updated */ setParent(e, t = !0) { if (this.parent !== e) { if (this.parent) { const i = this.parent.children.indexOf(this); i !== -1 && this.parent.children.splice(i, 1); } this._parentNode = e, this.parent && this.parent.children.push(this), t && this._updateAbsoluteBindMatrices(), this.markAsDirty(); } } /** * Gets the local matrix * @returns the local matrix */ getLocalMatrix() { return this._compose(), this._localMatrix; } /** * Gets the bind matrix * @returns the bind matrix */ getBindMatrix() { return this._bindMatrix; } /** * Gets the bind matrix. * @returns the bind matrix * @deprecated Please use getBindMatrix instead */ getBaseMatrix() { return this.getBindMatrix(); } /** * Gets the rest matrix * @returns the rest matrix */ getRestMatrix() { return this._restMatrix; } /** * Gets the rest matrix * @returns the rest matrix * @deprecated Please use getRestMatrix instead */ getRestPose() { return this.getRestMatrix(); } /** * Sets the rest matrix * @param matrix the local-space rest matrix to set for this bone */ setRestMatrix(e) { this._restMatrix.copyFrom(e); } /** * Sets the rest matrix * @param matrix the local-space rest to set for this bone * @deprecated Please use setRestMatrix instead */ setRestPose(e) { this.setRestMatrix(e); } /** * Gets the bind matrix * @returns the bind matrix * @deprecated Please use getBindMatrix instead */ getBindPose() { return this.getBindMatrix(); } /** * Sets the bind matrix * This will trigger a recomputation of the absolute bind and absolute inverse bind matrices for this bone and its children * Note that the local matrix will also be set with the matrix passed in parameter! * @param matrix the local-space bind matrix to set for this bone */ setBindMatrix(e) { this.updateMatrix(e); } /** * Sets the bind matrix * @param matrix the local-space bind to set for this bone * @deprecated Please use setBindMatrix instead */ setBindPose(e) { this.setBindMatrix(e); } /** * Gets the matrix used to store the final world transformation of the bone (ie. the matrix sent to shaders) */ getFinalMatrix() { return this._finalMatrix; } /** * Gets the matrix used to store the final world transformation of the bone (ie. the matrix sent to shaders) * @deprecated Please use getFinalMatrix instead */ getWorldMatrix() { return this.getFinalMatrix(); } /** * Sets the local matrix to the rest matrix */ returnToRest() { var e; if (this._linkedTransformNode) { const t = de.Vector3[0], i = de.Quaternion[0], r = de.Vector3[1]; this.getRestMatrix().decompose(t, i, r), this._linkedTransformNode.position.copyFrom(r), this._linkedTransformNode.rotationQuaternion = (e = this._linkedTransformNode.rotationQuaternion) !== null && e !== void 0 ? e : Ze.Identity(), this._linkedTransformNode.rotationQuaternion.copyFrom(i), this._linkedTransformNode.scaling.copyFrom(t); } else this._matrix = this._restMatrix; } /** * Gets the inverse of the bind matrix, in world space (relative to the skeleton root) * @returns the inverse bind matrix, in world space */ getAbsoluteInverseBindMatrix() { return this._absoluteInverseBindMatrix; } /** * Gets the inverse of the bind matrix, in world space (relative to the skeleton root) * @returns the inverse bind matrix, in world space * @deprecated Please use getAbsoluteInverseBindMatrix instead */ getInvertedAbsoluteTransform() { return this.getAbsoluteInverseBindMatrix(); } /** * Gets the bone matrix, in world space (relative to the skeleton root) * @returns the bone matrix, in world space */ getAbsoluteMatrix() { return this._absoluteMatrix; } /** * Gets the bone matrix, in world space (relative to the skeleton root) * @returns the bone matrix, in world space * @deprecated Please use getAbsoluteMatrix instead */ getAbsoluteTransform() { return this._absoluteMatrix; } /** * Links with the given transform node. * The local matrix of this bone is overwritten by the transform of the node every frame. * @param transformNode defines the transform node to link to */ linkTransformNode(e) { this._linkedTransformNode && this._skeleton._numBonesWithLinkedTransformNode--, this._linkedTransformNode = e, this._linkedTransformNode && this._skeleton._numBonesWithLinkedTransformNode++; } // Properties (matches TransformNode properties) /** * Gets the node used to drive the bone's transformation * @returns a transform node or null */ getTransformNode() { return this._linkedTransformNode; } /** Gets or sets current position (in local space) */ get position() { return this._decompose(), this._localPosition; } set position(e) { this._decompose(), this._localPosition.copyFrom(e), this._markAsDirtyAndCompose(); } /** Gets or sets current rotation (in local space) */ get rotation() { return this.getRotation(); } set rotation(e) { this.setRotation(e); } /** Gets or sets current rotation quaternion (in local space) */ get rotationQuaternion() { return this._decompose(), this._localRotation; } set rotationQuaternion(e) { this.setRotationQuaternion(e); } /** Gets or sets current scaling (in local space) */ get scaling() { return this.getScale(); } set scaling(e) { this.setScale(e); } /** * Gets the animation properties override */ get animationPropertiesOverride() { return this._skeleton.animationPropertiesOverride; } // Methods _decompose() { this._needToDecompose && (this._needToDecompose = !1, this._localScaling || (this._localScaling = D.Zero(), this._localRotation = Ze.Zero(), this._localPosition = D.Zero()), this._localMatrix.decompose(this._localScaling, this._localRotation, this._localPosition)); } _compose() { if (this._needToCompose) { if (!this._localScaling) { this._needToCompose = !1; return; } this._needToCompose = !1, Ae.ComposeToRef(this._localScaling, this._localRotation, this._localPosition, this._localMatrix); } } /** * Update the bind (and optionally the local) matrix * @param bindMatrix defines the new matrix to set to the bind/local matrix, in local space * @param updateAbsoluteBindMatrices defines if the absolute bind and absolute inverse bind matrices must be recomputed (default: true) * @param updateLocalMatrix defines if the local matrix should also be updated with the matrix passed in parameter (default: true) */ updateMatrix(e, t = !0, i = !0) { this._bindMatrix.copyFrom(e), t && this._updateAbsoluteBindMatrices(), i ? this._matrix = e : this.markAsDirty(); } /** * @internal */ _updateAbsoluteBindMatrices(e, t = !0) { if (e || (e = this._bindMatrix), this.parent ? e.multiplyToRef(this.parent._absoluteBindMatrix, this._absoluteBindMatrix) : this._absoluteBindMatrix.copyFrom(e), this._absoluteBindMatrix.invertToRef(this._absoluteInverseBindMatrix), t) for (let i = 0; i < this.children.length; i++) this.children[i]._updateAbsoluteBindMatrices(); this._scalingDeterminant = this._absoluteBindMatrix.determinant() < 0 ? -1 : 1; } /** * Flag the bone as dirty (Forcing it to update everything) * @returns this bone */ markAsDirty() { return this._currentRenderId++, this._childUpdateId++, this._skeleton._markAsDirty(), this; } /** @internal */ _markAsDirtyAndCompose() { this.markAsDirty(), this._needToCompose = !0; } _markAsDirtyAndDecompose() { this.markAsDirty(), this._needToDecompose = !0; } _updatePosition(e, t = qr.LOCAL, i, r = !0) { const s = this.getLocalMatrix(); if (t == qr.LOCAL) r ? (s.addAtIndex(12, e.x), s.addAtIndex(13, e.y), s.addAtIndex(14, e.z)) : s.setTranslationFromFloats(e.x, e.y, e.z); else { let n = null; i && (n = i.getWorldMatrix()), this._skeleton.computeAbsoluteMatrices(); const a = ha._TmpMats[0], l = ha._TmpVecs[0]; this.parent ? i && n ? (a.copyFrom(this.parent.getAbsoluteMatrix()), a.multiplyToRef(n, a)) : a.copyFrom(this.parent.getAbsoluteMatrix()) : Ae.IdentityToRef(a), r && a.setTranslationFromFloats(0, 0, 0), a.invert(), D.TransformCoordinatesToRef(e, a, l), r ? (s.addAtIndex(12, l.x), s.addAtIndex(13, l.y), s.addAtIndex(14, l.z)) : s.setTranslationFromFloats(l.x, l.y, l.z); } this._markAsDirtyAndDecompose(); } /** * Translate the bone in local or world space * @param vec The amount to translate the bone * @param space The space that the translation is in (default: Space.LOCAL) * @param tNode A TransformNode whose world matrix is to be applied to the calculated absolute matrix. In most cases, you'll want to pass the mesh associated with the skeleton from which this bone comes. Used only when space=Space.WORLD */ translate(e, t = qr.LOCAL, i) { this._updatePosition(e, t, i, !0); } /** * Set the position of the bone in local or world space * @param position The position to set the bone * @param space The space that the position is in (default: Space.LOCAL) * @param tNode A TransformNode whose world matrix is to be applied to the calculated absolute matrix. In most cases, you'll want to pass the mesh associated with the skeleton from which this bone comes. Used only when space=Space.WORLD */ setPosition(e, t = qr.LOCAL, i) { this._updatePosition(e, t, i, !1); } /** * Set the absolute position of the bone (world space) * @param position The position to set the bone * @param tNode A TransformNode whose world matrix is to be applied to the calculated absolute matrix. In most cases, you'll want to pass the mesh associated with the skeleton from which this bone comes. Used only when space=Space.WORLD */ setAbsolutePosition(e, t) { this.setPosition(e, qr.WORLD, t); } /** * Scale the bone on the x, y and z axes (in local space) * @param x The amount to scale the bone on the x axis * @param y The amount to scale the bone on the y axis * @param z The amount to scale the bone on the z axis * @param scaleChildren sets this to true if children of the bone should be scaled as well (false by default) */ scale(e, t, i, r = !1) { const s = this.getLocalMatrix(), n = ha._TmpMats[0]; Ae.ScalingToRef(e, t, i, n), n.multiplyToRef(s, s), n.invert(); for (const a of this.children) { const l = a.getLocalMatrix(); l.multiplyToRef(n, l), l.multiplyAtIndex(12, e), l.multiplyAtIndex(13, t), l.multiplyAtIndex(14, i), a._markAsDirtyAndDecompose(); } if (this._markAsDirtyAndDecompose(), r) for (const a of this.children) a.scale(e, t, i, r); } /** * Set the bone scaling in local space * @param scale defines the scaling vector */ setScale(e) { this._decompose(), this._localScaling.copyFrom(e), this._markAsDirtyAndCompose(); } /** * Gets the current scaling in local space * @returns the current scaling vector */ getScale() { return this._decompose(), this._localScaling; } /** * Gets the current scaling in local space and stores it in a target vector * @param result defines the target vector */ getScaleToRef(e) { this._decompose(), e.copyFrom(this._localScaling); } /** * Set the yaw, pitch, and roll of the bone in local or world space * @param yaw The rotation of the bone on the y axis * @param pitch The rotation of the bone on the x axis * @param roll The rotation of the bone on the z axis * @param space The space that the axes of rotation are in * @param tNode A TransformNode whose world matrix is to be applied to the calculated absolute matrix. In most cases, you'll want to pass the mesh associated with the skeleton from which this bone comes. Used only when space=Space.WORLD */ setYawPitchRoll(e, t, i, r = qr.LOCAL, s) { if (r === qr.LOCAL) { const l = ha._TmpQuat; Ze.RotationYawPitchRollToRef(e, t, i, l), this.setRotationQuaternion(l, r, s); return; } const n = ha._TmpMats[0]; if (!this._getAbsoluteInverseMatrixUnscaledToRef(n, s)) return; const a = ha._TmpMats[1]; Ae.RotationYawPitchRollToRef(e, t, i, a), n.multiplyToRef(a, a), this._rotateWithMatrix(a, r, s); } /** * Add a rotation to the bone on an axis in local or world space * @param axis The axis to rotate the bone on * @param amount The amount to rotate the bone * @param space The space that the axis is in * @param tNode A TransformNode whose world matrix is to be applied to the calculated absolute matrix. In most cases, you'll want to pass the mesh associated with the skeleton from which this bone comes. Used only when space=Space.WORLD */ rotate(e, t, i = qr.LOCAL, r) { const s = ha._TmpMats[0]; s.setTranslationFromFloats(0, 0, 0), Ae.RotationAxisToRef(e, t, s), this._rotateWithMatrix(s, i, r); } /** * Set the rotation of the bone to a particular axis angle in local or world space * @param axis The axis to rotate the bone on * @param angle The angle that the bone should be rotated to * @param space The space that the axis is in * @param tNode A TransformNode whose world matrix is to be applied to the calculated absolute matrix. In most cases, you'll want to pass the mesh associated with the skeleton from which this bone comes. Used only when space=Space.WORLD */ setAxisAngle(e, t, i = qr.LOCAL, r) { if (i === qr.LOCAL) { const a = ha._TmpQuat; Ze.RotationAxisToRef(e, t, a), this.setRotationQuaternion(a, i, r); return; } const s = ha._TmpMats[0]; if (!this._getAbsoluteInverseMatrixUnscaledToRef(s, r)) return; const n = ha._TmpMats[1]; Ae.RotationAxisToRef(e, t, n), s.multiplyToRef(n, n), this._rotateWithMatrix(n, i, r); } /** * Set the euler rotation of the bone in local or world space * @param rotation The euler rotation that the bone should be set to * @param space The space that the rotation is in * @param tNode A TransformNode whose world matrix is to be applied to the calculated absolute matrix. In most cases, you'll want to pass the mesh associated with the skeleton from which this bone comes. Used only when space=Space.WORLD */ setRotation(e, t = qr.LOCAL, i) { this.setYawPitchRoll(e.y, e.x, e.z, t, i); } /** * Set the quaternion rotation of the bone in local or world space * @param quat The quaternion rotation that the bone should be set to * @param space The space that the rotation is in * @param tNode A TransformNode whose world matrix is to be applied to the calculated absolute matrix. In most cases, you'll want to pass the mesh associated with the skeleton from which this bone comes. Used only when space=Space.WORLD */ setRotationQuaternion(e, t = qr.LOCAL, i) { if (t === qr.LOCAL) { this._decompose(), this._localRotation.copyFrom(e), this._markAsDirtyAndCompose(); return; } const r = ha._TmpMats[0]; if (!this._getAbsoluteInverseMatrixUnscaledToRef(r, i)) return; const s = ha._TmpMats[1]; Ae.FromQuaternionToRef(e, s), r.multiplyToRef(s, s), this._rotateWithMatrix(s, t, i); } /** * Set the rotation matrix of the bone in local or world space * @param rotMat The rotation matrix that the bone should be set to * @param space The space that the rotation is in * @param tNode A TransformNode whose world matrix is to be applied to the calculated absolute matrix. In most cases, you'll want to pass the mesh associated with the skeleton from which this bone comes. Used only when space=Space.WORLD */ setRotationMatrix(e, t = qr.LOCAL, i) { if (t === qr.LOCAL) { const n = ha._TmpQuat; Ze.FromRotationMatrixToRef(e, n), this.setRotationQuaternion(n, t, i); return; } const r = ha._TmpMats[0]; if (!this._getAbsoluteInverseMatrixUnscaledToRef(r, i)) return; const s = ha._TmpMats[1]; s.copyFrom(e), r.multiplyToRef(e, s), this._rotateWithMatrix(s, t, i); } _rotateWithMatrix(e, t = qr.LOCAL, i) { const r = this.getLocalMatrix(), s = r.m[12], n = r.m[13], a = r.m[14], l = this.getParent(), o = ha._TmpMats[3], u = ha._TmpMats[4]; l && t == qr.WORLD ? (i ? (o.copyFrom(i.getWorldMatrix()), l.getAbsoluteMatrix().multiplyToRef(o, o)) : o.copyFrom(l.getAbsoluteMatrix()), u.copyFrom(o), u.invert(), r.multiplyToRef(o, r), r.multiplyToRef(e, r), r.multiplyToRef(u, r)) : t == qr.WORLD && i ? (o.copyFrom(i.getWorldMatrix()), u.copyFrom(o), u.invert(), r.multiplyToRef(o, r), r.multiplyToRef(e, r), r.multiplyToRef(u, r)) : r.multiplyToRef(e, r), r.setTranslationFromFloats(s, n, a), this.computeAbsoluteMatrices(), this._markAsDirtyAndDecompose(); } _getAbsoluteInverseMatrixUnscaledToRef(e, t) { const i = ha._TmpMats[2]; return e.copyFrom(this.getAbsoluteMatrix()), t ? (e.multiplyToRef(t.getWorldMatrix(), e), Ae.ScalingToRef(t.scaling.x, t.scaling.y, t.scaling.z, i)) : Ae.IdentityToRef(i), e.invert(), isNaN(e.m[0]) ? !1 : (i.multiplyAtIndex(0, this._scalingDeterminant), e.multiplyToRef(i, e), !0); } /** * Get the position of the bone in local or world space * @param space The space that the returned position is in * @param tNode A TransformNode whose world matrix is to be applied to the calculated absolute matrix. In most cases, you'll want to pass the mesh associated with the skeleton from which this bone comes. Used only when space=Space.WORLD * @returns The position of the bone */ getPosition(e = qr.LOCAL, t = null) { const i = D.Zero(); return this.getPositionToRef(e, t, i), i; } /** * Copy the position of the bone to a vector3 in local or world space * @param space The space that the returned position is in * @param tNode A TransformNode whose world matrix is to be applied to the calculated absolute matrix. In most cases, you'll want to pass the mesh associated with the skeleton from which this bone comes. Used only when space=Space.WORLD * @param result The vector3 to copy the position to */ getPositionToRef(e = qr.LOCAL, t, i) { if (e == qr.LOCAL) { const r = this.getLocalMatrix(); i.x = r.m[12], i.y = r.m[13], i.z = r.m[14]; } else { let r = null; t && (r = t.getWorldMatrix()), this._skeleton.computeAbsoluteMatrices(); let s = ha._TmpMats[0]; t && r ? (s.copyFrom(this.getAbsoluteMatrix()), s.multiplyToRef(r, s)) : s = this.getAbsoluteMatrix(), i.x = s.m[12], i.y = s.m[13], i.z = s.m[14]; } } /** * Get the absolute position of the bone (world space) * @param tNode A TransformNode whose world matrix is to be applied to the calculated absolute matrix. In most cases, you'll want to pass the mesh associated with the skeleton from which this bone comes. Used only when space=Space.WORLD * @returns The absolute position of the bone */ getAbsolutePosition(e = null) { const t = D.Zero(); return this.getPositionToRef(qr.WORLD, e, t), t; } /** * Copy the absolute position of the bone (world space) to the result param * @param tNode A TransformNode whose world matrix is to be applied to the calculated absolute matrix. In most cases, you'll want to pass the mesh associated with the skeleton from which this bone comes. Used only when space=Space.WORLD * @param result The vector3 to copy the absolute position to */ getAbsolutePositionToRef(e, t) { this.getPositionToRef(qr.WORLD, e, t); } /** * Compute the absolute matrices of this bone and its children */ computeAbsoluteMatrices() { if (this._compose(), this.parent) this._localMatrix.multiplyToRef(this.parent._absoluteMatrix, this._absoluteMatrix); else { this._absoluteMatrix.copyFrom(this._localMatrix); const i = this._skeleton.getPoseMatrix(); i && this._absoluteMatrix.multiplyToRef(i, this._absoluteMatrix); } const e = this.children, t = e.length; for (let i = 0; i < t; i++) e[i].computeAbsoluteMatrices(); } /** * Compute the absolute matrices of this bone and its children * @deprecated Please use computeAbsoluteMatrices instead */ computeAbsoluteTransforms() { this.computeAbsoluteMatrices(); } /** * Get the world direction from an axis that is in the local space of the bone * @param localAxis The local direction that is used to compute the world direction * @param tNode A TransformNode whose world matrix is to be applied to the calculated absolute matrix. In most cases, you'll want to pass the mesh associated with the skeleton from which this bone comes. Used only when space=Space.WORLD * @returns The world direction */ getDirection(e, t = null) { const i = D.Zero(); return this.getDirectionToRef(e, t, i), i; } /** * Copy the world direction to a vector3 from an axis that is in the local space of the bone * @param localAxis The local direction that is used to compute the world direction * @param tNode A TransformNode whose world matrix is to be applied to the calculated absolute matrix. In most cases, you'll want to pass the mesh associated with the skeleton from which this bone comes. Used only when space=Space.WORLD * @param result The vector3 that the world direction will be copied to */ getDirectionToRef(e, t = null, i) { let r = null; t && (r = t.getWorldMatrix()), this._skeleton.computeAbsoluteMatrices(); const s = ha._TmpMats[0]; s.copyFrom(this.getAbsoluteMatrix()), t && r && s.multiplyToRef(r, s), D.TransformNormalToRef(e, s, i), i.normalize(); } /** * Get the euler rotation of the bone in local or world space * @param space The space that the rotation should be in * @param tNode A TransformNode whose world matrix is to be applied to the calculated absolute matrix. In most cases, you'll want to pass the mesh associated with the skeleton from which this bone comes. Used only when space=Space.WORLD * @returns The euler rotation */ getRotation(e = qr.LOCAL, t = null) { const i = D.Zero(); return this.getRotationToRef(e, t, i), i; } /** * Copy the euler rotation of the bone to a vector3. The rotation can be in either local or world space * @param space The space that the rotation should be in * @param tNode A TransformNode whose world matrix is to be applied to the calculated absolute matrix. In most cases, you'll want to pass the mesh associated with the skeleton from which this bone comes. Used only when space=Space.WORLD * @param result The vector3 that the rotation should be copied to */ getRotationToRef(e = qr.LOCAL, t = null, i) { const r = ha._TmpQuat; this.getRotationQuaternionToRef(e, t, r), r.toEulerAnglesToRef(i); } /** * Get the quaternion rotation of the bone in either local or world space * @param space The space that the rotation should be in * @param tNode A TransformNode whose world matrix is to be applied to the calculated absolute matrix. In most cases, you'll want to pass the mesh associated with the skeleton from which this bone comes. Used only when space=Space.WORLD * @returns The quaternion rotation */ getRotationQuaternion(e = qr.LOCAL, t = null) { const i = Ze.Identity(); return this.getRotationQuaternionToRef(e, t, i), i; } /** * Copy the quaternion rotation of the bone to a quaternion. The rotation can be in either local or world space * @param space The space that the rotation should be in * @param tNode A TransformNode whose world matrix is to be applied to the calculated absolute matrix. In most cases, you'll want to pass the mesh associated with the skeleton from which this bone comes. Used only when space=Space.WORLD * @param result The quaternion that the rotation should be copied to */ getRotationQuaternionToRef(e = qr.LOCAL, t = null, i) { if (e == qr.LOCAL) this._decompose(), i.copyFrom(this._localRotation); else { const r = ha._TmpMats[0], s = this.getAbsoluteMatrix(); t ? s.multiplyToRef(t.getWorldMatrix(), r) : r.copyFrom(s), r.multiplyAtIndex(0, this._scalingDeterminant), r.multiplyAtIndex(1, this._scalingDeterminant), r.multiplyAtIndex(2, this._scalingDeterminant), r.decompose(void 0, i, void 0); } } /** * Get the rotation matrix of the bone in local or world space * @param space The space that the rotation should be in * @param tNode A TransformNode whose world matrix is to be applied to the calculated absolute matrix. In most cases, you'll want to pass the mesh associated with the skeleton from which this bone comes. Used only when space=Space.WORLD * @returns The rotation matrix */ getRotationMatrix(e = qr.LOCAL, t) { const i = Ae.Identity(); return this.getRotationMatrixToRef(e, t, i), i; } /** * Copy the rotation matrix of the bone to a matrix. The rotation can be in either local or world space * @param space The space that the rotation should be in * @param tNode A TransformNode whose world matrix is to be applied to the calculated absolute matrix. In most cases, you'll want to pass the mesh associated with the skeleton from which this bone comes. Used only when space=Space.WORLD * @param result The quaternion that the rotation should be copied to */ getRotationMatrixToRef(e = qr.LOCAL, t, i) { if (e == qr.LOCAL) this.getLocalMatrix().getRotationMatrixToRef(i); else { const r = ha._TmpMats[0], s = this.getAbsoluteMatrix(); t ? s.multiplyToRef(t.getWorldMatrix(), r) : r.copyFrom(s), r.multiplyAtIndex(0, this._scalingDeterminant), r.multiplyAtIndex(1, this._scalingDeterminant), r.multiplyAtIndex(2, this._scalingDeterminant), r.getRotationMatrixToRef(i); } } /** * Get the world position of a point that is in the local space of the bone * @param position The local position * @param tNode A TransformNode whose world matrix is to be applied to the calculated absolute matrix. In most cases, you'll want to pass the mesh associated with the skeleton from which this bone comes. Used only when space=Space.WORLD * @returns The world position */ getAbsolutePositionFromLocal(e, t = null) { const i = D.Zero(); return this.getAbsolutePositionFromLocalToRef(e, t, i), i; } /** * Get the world position of a point that is in the local space of the bone and copy it to the result param * @param position The local position * @param tNode A TransformNode whose world matrix is to be applied to the calculated absolute matrix. In most cases, you'll want to pass the mesh associated with the skeleton from which this bone comes. Used only when space=Space.WORLD * @param result The vector3 that the world position should be copied to */ getAbsolutePositionFromLocalToRef(e, t = null, i) { let r = null; t && (r = t.getWorldMatrix()), this._skeleton.computeAbsoluteMatrices(); const s = ha._TmpMats[0]; s.copyFrom(this.getAbsoluteMatrix()), t && r && s.multiplyToRef(r, s), D.TransformCoordinatesToRef(e, s, i); } /** * Get the local position of a point that is in world space * @param position The world position * @param tNode A TransformNode whose world matrix is to be applied to the calculated absolute matrix. In most cases, you'll want to pass the mesh associated with the skeleton from which this bone comes. Used only when space=Space.WORLD * @returns The local position */ getLocalPositionFromAbsolute(e, t = null) { const i = D.Zero(); return this.getLocalPositionFromAbsoluteToRef(e, t, i), i; } /** * Get the local position of a point that is in world space and copy it to the result param * @param position The world position * @param tNode A TransformNode whose world matrix is to be applied to the calculated absolute matrix. In most cases, you'll want to pass the mesh associated with the skeleton from which this bone comes. Used only when space=Space.WORLD * @param result The vector3 that the local position should be copied to */ getLocalPositionFromAbsoluteToRef(e, t = null, i) { let r = null; t && (r = t.getWorldMatrix()), this._skeleton.computeAbsoluteMatrices(); const s = ha._TmpMats[0]; s.copyFrom(this.getAbsoluteMatrix()), t && r && s.multiplyToRef(r, s), s.invert(), D.TransformCoordinatesToRef(e, s, i); } /** * Set the current local matrix as the restMatrix for this bone. */ setCurrentPoseAsRest() { this.setRestMatrix(this.getLocalMatrix()); } } ha._TmpVecs = kc.BuildArray(2, D.Zero); ha._TmpQuat = Ze.Identity(); ha._TmpMats = kc.BuildArray(5, Ae.Identity); class hK { /** * Gets the root Animatable used to synchronize and normalize animations */ get syncRoot() { return this._syncRoot; } /** * Gets the current frame of the first RuntimeAnimation * Used to synchronize Animatables */ get masterFrame() { return this._runtimeAnimations.length === 0 ? 0 : this._runtimeAnimations[0].currentFrame; } /** * Gets or sets the animatable weight (-1.0 by default meaning not weighted) */ get weight() { return this._weight; } set weight(e) { if (e === -1) { this._weight = -1; return; } this._weight = Math.min(Math.max(e, 0), 1); } /** * Gets or sets the speed ratio to apply to the animatable (1.0 by default) */ get speedRatio() { return this._speedRatio; } set speedRatio(e) { for (let t = 0; t < this._runtimeAnimations.length; t++) this._runtimeAnimations[t]._prepareForSpeedRatioChange(e); this._speedRatio = e, this._goToFrame !== null && this.goToFrame(this._goToFrame); } /** * Gets the elapsed time since the animatable started in milliseconds */ get elapsedTime() { return this._localDelayOffset === null ? 0 : this._scene._animationTime - this._localDelayOffset; } /** * Creates a new Animatable * @param scene defines the hosting scene * @param target defines the target object * @param fromFrame defines the starting frame number (default is 0) * @param toFrame defines the ending frame number (default is 100) * @param loopAnimation defines if the animation must loop (default is false) * @param speedRatio defines the factor to apply to animation speed (default is 1) * @param onAnimationEnd defines a callback to call when animation ends if it is not looping * @param animations defines a group of animation to add to the new Animatable * @param onAnimationLoop defines a callback to call when animation loops * @param isAdditive defines whether the animation should be evaluated additively * @param playOrder defines the order in which this animatable should be processed in the list of active animatables (default: 0) */ constructor(e, t, i = 0, r = 100, s = !1, n = 1, a, l, o, u = !1, h = 0) { this.target = t, this.fromFrame = i, this.toFrame = r, this.loopAnimation = s, this.onAnimationEnd = a, this.onAnimationLoop = o, this.isAdditive = u, this.playOrder = h, this._localDelayOffset = null, this._pausedDelay = null, this._manualJumpDelay = null, this._runtimeAnimations = new Array(), this._paused = !1, this._speedRatio = 1, this._weight = -1, this._syncRoot = null, this._frameToSyncFromJump = null, this._goToFrame = null, this.disposeOnEnd = !0, this.animationStarted = !1, this.onAnimationEndObservable = new Fe(), this.onAnimationLoopObservable = new Fe(), this._scene = e, l && this.appendAnimations(t, l), this._speedRatio = n, e._activeAnimatables.push(this); } // Methods /** * Synchronize and normalize current Animatable with a source Animatable * This is useful when using animation weights and when animations are not of the same length * @param root defines the root Animatable to synchronize with (null to stop synchronizing) * @returns the current Animatable */ syncWith(e) { if (this._syncRoot = e, e) { const t = this._scene._activeAnimatables.indexOf(this); t > -1 && (this._scene._activeAnimatables.splice(t, 1), this._scene._activeAnimatables.push(this)); } return this; } /** * Gets the list of runtime animations * @returns an array of RuntimeAnimation */ getAnimations() { return this._runtimeAnimations; } /** * Adds more animations to the current animatable * @param target defines the target of the animations * @param animations defines the new animations to add */ appendAnimations(e, t) { for (let i = 0; i < t.length; i++) { const r = t[i], s = new ite(e, r, this._scene, this); s._onLoop = () => { this.onAnimationLoopObservable.notifyObservers(this), this.onAnimationLoop && this.onAnimationLoop(); }, this._runtimeAnimations.push(s); } } /** * Gets the source animation for a specific property * @param property defines the property to look for * @returns null or the source animation for the given property */ getAnimationByTargetProperty(e) { const t = this._runtimeAnimations; for (let i = 0; i < t.length; i++) if (t[i].animation.targetProperty === e) return t[i].animation; return null; } /** * Gets the runtime animation for a specific property * @param property defines the property to look for * @returns null or the runtime animation for the given property */ getRuntimeAnimationByTargetProperty(e) { const t = this._runtimeAnimations; for (let i = 0; i < t.length; i++) if (t[i].animation.targetProperty === e) return t[i]; return null; } /** * Resets the animatable to its original state */ reset() { const e = this._runtimeAnimations; for (let t = 0; t < e.length; t++) e[t].reset(!0); this._localDelayOffset = null, this._pausedDelay = null; } /** * Allows the animatable to blend with current running animations * @see https://doc.babylonjs.com/features/featuresDeepDive/animation/advanced_animations#animation-blending * @param blendingSpeed defines the blending speed to use */ enableBlending(e) { const t = this._runtimeAnimations; for (let i = 0; i < t.length; i++) t[i].animation.enableBlending = !0, t[i].animation.blendingSpeed = e; } /** * Disable animation blending * @see https://doc.babylonjs.com/features/featuresDeepDive/animation/advanced_animations#animation-blending */ disableBlending() { const e = this._runtimeAnimations; for (let t = 0; t < e.length; t++) e[t].animation.enableBlending = !1; } /** * Jump directly to a given frame * @param frame defines the frame to jump to */ goToFrame(e) { var t; const i = this._runtimeAnimations; if (i[0]) { const r = i[0].animation.framePerSecond; this._frameToSyncFromJump = (t = this._frameToSyncFromJump) !== null && t !== void 0 ? t : i[0].currentFrame; const s = this.speedRatio === 0 ? 0 : (e - this._frameToSyncFromJump) / r * 1e3 / this.speedRatio; this._manualJumpDelay = -s; } for (let r = 0; r < i.length; r++) i[r].goToFrame(e); this._goToFrame = e; } /** * Returns true if the animations for this animatable are paused */ get paused() { return this._paused; } /** * Pause the animation */ pause() { this._paused || (this._paused = !0); } /** * Restart the animation */ restart() { this._paused = !1; } _raiseOnAnimationEnd() { this.onAnimationEnd && this.onAnimationEnd(), this.onAnimationEndObservable.notifyObservers(this); } /** * Stop and delete the current animation * @param animationName defines a string used to only stop some of the runtime animations instead of all * @param targetMask a function that determines if the animation should be stopped based on its target (all animations will be stopped if both this and animationName are empty) * @param useGlobalSplice if true, the animatables will be removed by the caller of this function (false by default) */ stop(e, t, i = !1) { if (e || t) { const r = this._scene._activeAnimatables.indexOf(this); if (r > -1) { const s = this._runtimeAnimations; for (let n = s.length - 1; n >= 0; n--) { const a = s[n]; e && a.animation.name != e || t && !t(a.target) || (a.dispose(), s.splice(n, 1)); } s.length == 0 && (i || this._scene._activeAnimatables.splice(r, 1), this._raiseOnAnimationEnd()); } } else { const r = this._scene._activeAnimatables.indexOf(this); if (r > -1) { i || this._scene._activeAnimatables.splice(r, 1); const s = this._runtimeAnimations; for (let n = 0; n < s.length; n++) s[n].dispose(); this._runtimeAnimations.length = 0, this._raiseOnAnimationEnd(); } } } /** * Wait asynchronously for the animation to end * @returns a promise which will be fulfilled when the animation ends */ waitAsync() { return new Promise((e) => { this.onAnimationEndObservable.add(() => { e(this); }, void 0, void 0, this, !0); }); } /** * @internal */ _animate(e) { if (this._paused) return this.animationStarted = !1, this._pausedDelay === null && (this._pausedDelay = e), !0; if (this._localDelayOffset === null ? (this._localDelayOffset = e, this._pausedDelay = null) : this._pausedDelay !== null && (this._localDelayOffset += e - this._pausedDelay, this._pausedDelay = null), this._manualJumpDelay !== null && (this._localDelayOffset += this._manualJumpDelay, this._manualJumpDelay = null, this._frameToSyncFromJump = null), this._goToFrame = null, this._weight === 0) return !0; let t = !1; const i = this._runtimeAnimations; let r; for (r = 0; r < i.length; r++) { const n = i[r].animate(e - this._localDelayOffset, this.fromFrame, this.toFrame, this.loopAnimation, this._speedRatio, this._weight); t = t || n; } if (this.animationStarted = t, !t) { if (this.disposeOnEnd) for (r = this._scene._activeAnimatables.indexOf(this), this._scene._activeAnimatables.splice(r, 1), r = 0; r < i.length; r++) i[r].dispose(); this._raiseOnAnimationEnd(), this.disposeOnEnd && (this.onAnimationEnd = null, this.onAnimationLoop = null, this.onAnimationLoopObservable.clear(), this.onAnimationEndObservable.clear()); } return t; } } ii.prototype._animate = function() { if (!this.animationsEnabled) return; const c = Gs.Now; if (!this._animationTimeLast) { if (this._pendingData.length > 0) return; this._animationTimeLast = c; } this.deltaTime = this.useConstantAnimationDeltaTime ? 16 : (c - this._animationTimeLast) * this.animationTimeScale, this._animationTimeLast = c; const e = this._activeAnimatables; if (e.length === 0) return; this._animationTime += this.deltaTime; const t = this._animationTime; for (let i = 0; i < e.length; i++) { const r = e[i]; !r._animate(t) && r.disposeOnEnd && i--; } this._processLateAnimationBindings(); }; ii.prototype.sortActiveAnimatables = function() { this._activeAnimatables.sort((c, e) => c.playOrder - e.playOrder); }; ii.prototype.beginWeightedAnimation = function(c, e, t, i = 1, r, s = 1, n, a, l, o, u = !1) { const h = this.beginAnimation(c, e, t, r, s, n, a, !1, l, o, u); return h.weight = i, h; }; ii.prototype.beginAnimation = function(c, e, t, i, r = 1, s, n, a = !0, l, o, u = !1) { e > t && r > 0 && (r *= -1), a && this.stopAnimation(c, void 0, l), n || (n = new hK(this, c, e, t, i, r, s, void 0, o, u)); const h = l ? l(c) : !0; if (c.animations && h && n.appendAnimations(c, c.animations), c.getAnimatables) { const d = c.getAnimatables(); for (let f = 0; f < d.length; f++) this.beginAnimation(d[f], e, t, i, r, s, n, a, l, o); } return n.reset(), n; }; ii.prototype.beginHierarchyAnimation = function(c, e, t, i, r, s = 1, n, a, l = !0, o, u, h = !1) { const d = c.getDescendants(e), f = []; f.push(this.beginAnimation(c, t, i, r, s, n, a, l, o, void 0, h)); for (const p of d) f.push(this.beginAnimation(p, t, i, r, s, n, a, l, o, void 0, h)); return f; }; ii.prototype.beginDirectAnimation = function(c, e, t, i, r, s, n, a, l = !1) { if (s === void 0 && (s = 1), t > i && s > 0) s *= -1; else if (i > t && s < 0) { const u = i; i = t, t = u; } return new hK(this, c, t, i, r, s, n, e, a, l); }; ii.prototype.beginDirectHierarchyAnimation = function(c, e, t, i, r, s, n, a, l, o = !1) { const u = c.getDescendants(e), h = []; h.push(this.beginDirectAnimation(c, t, i, r, s, n, a, l, o)); for (const d of u) h.push(this.beginDirectAnimation(d, t, i, r, s, n, a, l, o)); return h; }; ii.prototype.getAnimatableByTarget = function(c) { for (let e = 0; e < this._activeAnimatables.length; e++) if (this._activeAnimatables[e].target === c) return this._activeAnimatables[e]; return null; }; ii.prototype.getAllAnimatablesByTarget = function(c) { const e = []; for (let t = 0; t < this._activeAnimatables.length; t++) this._activeAnimatables[t].target === c && e.push(this._activeAnimatables[t]); return e; }; ii.prototype.stopAnimation = function(c, e, t) { const i = this.getAllAnimatablesByTarget(c); for (const r of i) r.stop(e, t); }; ii.prototype.stopAllAnimations = function() { if (this._activeAnimatables) { for (let c = 0; c < this._activeAnimatables.length; c++) this._activeAnimatables[c].stop(void 0, void 0, !0); this._activeAnimatables.length = 0; } for (const c of this.animationGroups) c.stop(); }; ii.prototype._registerTargetForLateAnimationBinding = function(c, e) { const t = c.target; this._registeredForLateAnimationBindings.pushNoDuplicate(t), t._lateAnimationHolders || (t._lateAnimationHolders = {}), t._lateAnimationHolders[c.targetPath] || (t._lateAnimationHolders[c.targetPath] = { totalWeight: 0, totalAdditiveWeight: 0, animations: [], additiveAnimations: [], originalValue: e }), c.isAdditive ? (t._lateAnimationHolders[c.targetPath].additiveAnimations.push(c), t._lateAnimationHolders[c.targetPath].totalAdditiveWeight += c.weight) : (t._lateAnimationHolders[c.targetPath].animations.push(c), t._lateAnimationHolders[c.targetPath].totalWeight += c.weight); }; ii.prototype._processLateAnimationBindingsForMatrices = function(c) { if (c.totalWeight === 0 && c.totalAdditiveWeight === 0) return c.originalValue; let e = 1; const t = de.Vector3[0], i = de.Vector3[1], r = de.Quaternion[0]; let s = 0; const n = c.animations[0], a = c.originalValue; let l = 1, o = !1; if (c.totalWeight < 1) l = 1 - c.totalWeight, a.decompose(i, r, t); else { if (s = 1, e = c.totalWeight, l = n.weight / e, l == 1) if (c.totalAdditiveWeight) o = !0; else return n.currentValue; n.currentValue.decompose(i, r, t); } if (!o) { i.scaleInPlace(l), t.scaleInPlace(l), r.scaleInPlace(l); for (let h = s; h < c.animations.length; h++) { const d = c.animations[h]; if (d.weight === 0) continue; l = d.weight / e; const f = de.Vector3[2], p = de.Vector3[3], m = de.Quaternion[1]; d.currentValue.decompose(p, m, f), p.scaleAndAddToRef(l, i), m.scaleAndAddToRef(Ze.Dot(r, m) > 0 ? l : -l, r), f.scaleAndAddToRef(l, t); } r.normalize(); } for (let h = 0; h < c.additiveAnimations.length; h++) { const d = c.additiveAnimations[h]; if (d.weight === 0) continue; const f = de.Vector3[2], p = de.Vector3[3], m = de.Quaternion[1]; d.currentValue.decompose(p, m, f), p.multiplyToRef(i, p), D.LerpToRef(i, p, d.weight, i), r.multiplyToRef(m, m), Ze.SlerpToRef(r, m, d.weight, r), f.scaleAndAddToRef(d.weight, t); } const u = n ? n._animationState.workValue : de.Matrix[0].clone(); return Ae.ComposeToRef(i, r, t, u), u; }; ii.prototype._processLateAnimationBindingsForQuaternions = function(c, e) { if (c.totalWeight === 0 && c.totalAdditiveWeight === 0) return e; const t = c.animations[0], i = c.originalValue; let r = e; if (c.totalWeight === 0 && c.totalAdditiveWeight > 0) r.copyFrom(i); else if (c.animations.length === 1) { if (Ze.SlerpToRef(i, t.currentValue, Math.min(1, c.totalWeight), r), c.totalAdditiveWeight === 0) return r; } else if (c.animations.length > 1) { let s = 1, n, a; if (c.totalWeight < 1) { const o = 1 - c.totalWeight; n = [], a = [], n.push(i), a.push(o); } else { if (c.animations.length === 2 && (Ze.SlerpToRef(c.animations[0].currentValue, c.animations[1].currentValue, c.animations[1].weight / c.totalWeight, e), c.totalAdditiveWeight === 0)) return e; n = [], a = [], s = c.totalWeight; } for (let o = 0; o < c.animations.length; o++) { const u = c.animations[o]; n.push(u.currentValue), a.push(u.weight / s); } let l = 0; for (let o = 0; o < n.length; ) { if (!o) { Ze.SlerpToRef(n[o], n[o + 1], a[o + 1] / (a[o] + a[o + 1]), e), r = e, l = a[o] + a[o + 1], o += 2; continue; } l += a[o], Ze.SlerpToRef(r, n[o], a[o] / l, r), o++; } } for (let s = 0; s < c.additiveAnimations.length; s++) { const n = c.additiveAnimations[s]; n.weight !== 0 && (r.multiplyToRef(n.currentValue, de.Quaternion[0]), Ze.SlerpToRef(r, de.Quaternion[0], n.weight, r)); } return r; }; ii.prototype._processLateAnimationBindings = function() { if (this._registeredForLateAnimationBindings.length) { for (let c = 0; c < this._registeredForLateAnimationBindings.length; c++) { const e = this._registeredForLateAnimationBindings.data[c]; for (const t in e._lateAnimationHolders) { const i = e._lateAnimationHolders[t], r = i.animations[0], s = i.originalValue; if (s == null) continue; const n = nt.AllowMatrixDecomposeForInterpolation && s.m; let a = e[t]; if (n) a = this._processLateAnimationBindingsForMatrices(i); else if (s.w !== void 0) a = this._processLateAnimationBindingsForQuaternions(i, a || Ze.Identity()); else { let o = 0, u = 1; const h = r && r._animationState.loopMode === nt.ANIMATIONLOOPMODE_RELATIVE_FROM_CURRENT; if (i.totalWeight < 1) h ? a = s.clone ? s.clone() : s : r && s.scale ? a = s.scale(1 - i.totalWeight) : r ? a = s * (1 - i.totalWeight) : s.clone ? a = s.clone() : a = s; else if (r) { u = i.totalWeight; const d = r.weight / u; d !== 1 ? r.currentValue.scale ? a = r.currentValue.scale(d) : a = r.currentValue * d : a = r.currentValue, h && (a.addToRef ? a.addToRef(s, a) : a += s), o = 1; } for (let d = o; d < i.animations.length; d++) { const f = i.animations[d], p = f.weight / u; if (p) f.currentValue.scaleAndAddToRef ? f.currentValue.scaleAndAddToRef(p, a) : a += f.currentValue * p; else continue; } for (let d = 0; d < i.additiveAnimations.length; d++) { const f = i.additiveAnimations[d], p = f.weight; if (p) f.currentValue.scaleAndAddToRef ? f.currentValue.scaleAndAddToRef(p, a) : a += f.currentValue * p; else continue; } } e[t] = a; } e._lateAnimationHolders = {}; } this._registeredForLateAnimationBindings.reset(); } }; ha.prototype.copyAnimationRange = function(c, e, t, i = !1, r = null) { this.animations.length === 0 && (this.animations.push(new nt(this.name, "_matrix", c.animations[0].framePerSecond, nt.ANIMATIONTYPE_MATRIX, 0)), this.animations[0].setKeys([])); const s = c.animations[0].getRange(e); if (!s) return !1; const n = s.from, a = s.to, l = c.animations[0].getKeys(), o = c.length, u = c.getParent(), h = this.getParent(), d = i && u && o && this.length && o !== this.length, f = d && h && u ? h.length / u.length : 1, p = i && !h && r && (r.x !== 1 || r.y !== 1 || r.z !== 1), m = this.animations[0].getKeys(); let _, v, C; for (let x = 0, b = l.length; x < b; x++) _ = l[x], _.frame >= n && _.frame <= a && (i ? (C = _.value.clone(), d ? (v = C.getTranslation(), C.setTranslation(v.scaleInPlace(f))) : p && r ? (v = C.getTranslation(), C.setTranslation(v.multiplyInPlace(r))) : C = _.value) : C = _.value, m.push({ frame: _.frame + t, value: C })); return this.animations[0].createRange(e, n + t, a + t), !0; }; class ace { constructor() { this.enableBlending = !1, this.blendingSpeed = 0.01, this.loopMode = nt.ANIMATIONLOOPMODE_CYCLE; } } var eP; (function(c) { c[c.CW = 0] = "CW", c[c.CCW = 1] = "CCW"; })(eP || (eP = {})); class gte { /** * Returns the cubic Bezier interpolated value (float) at "t" (float) from the given x1, y1, x2, y2 floats * @param t defines the time * @param x1 defines the left coordinate on X axis * @param y1 defines the left coordinate on Y axis * @param x2 defines the right coordinate on X axis * @param y2 defines the right coordinate on Y axis * @returns the interpolated value */ static Interpolate(e, t, i, r, s) { const n = 1 - 3 * r + 3 * t, a = 3 * r - 6 * t, l = 3 * t; let o = e; for (let u = 0; u < 5; u++) { const h = o * o, d = h * o, f = n * d + a * h + l * o, p = 1 / (3 * n * h + 2 * a * o + l); o -= (f - e) * p, o = Math.min(1, Math.max(0, o)); } return 3 * Math.pow(1 - o, 2) * o * i + 3 * (1 - o) * Math.pow(o, 2) * s + Math.pow(o, 3); } } class WA { /** * Creates an Angle object of "radians" radians (float). * @param radians the angle in radians */ constructor(e) { this._radians = e, this._radians < 0 && (this._radians += 2 * Math.PI); } /** * Get value in degrees * @returns the Angle value in degrees (float) */ degrees() { return this._radians * 180 / Math.PI; } /** * Get value in radians * @returns the Angle value in radians (float) */ radians() { return this._radians; } /** * Gets a new Angle object with a value of the angle (in radians) between the line connecting the two points and the x-axis * @param a defines first point as the origin * @param b defines point * @returns a new Angle */ static BetweenTwoPoints(e, t) { const i = t.subtract(e), r = Math.atan2(i.y, i.x); return new WA(r); } /** * Gets the angle between the two vectors * @param a defines first vector * @param b defines vector * @returns Returns an new Angle between 0 and PI */ static BetweenTwoVectors(e, t) { let i = e.lengthSquared() * t.lengthSquared(); if (i === 0) return new WA(Math.PI / 2); i = Math.sqrt(i); let r = e.dot(t) / i; r = yt.Clamp(r, -1, 1); const s = Math.acos(r); return new WA(s); } /** * Gets a new Angle object from the given float in radians * @param radians defines the angle value in radians * @returns a new Angle */ static FromRadians(e) { return new WA(e); } /** * Gets a new Angle object from the given float in degrees * @param degrees defines the angle value in degrees * @returns a new Angle */ static FromDegrees(e) { return new WA(e * Math.PI / 180); } } class vte { /** * Creates an Arc object from the three given points : start, middle and end. * @param startPoint Defines the start point of the arc * @param midPoint Defines the middle point of the arc * @param endPoint Defines the end point of the arc */ constructor(e, t, i) { this.startPoint = e, this.midPoint = t, this.endPoint = i; const r = Math.pow(t.x, 2) + Math.pow(t.y, 2), s = (Math.pow(e.x, 2) + Math.pow(e.y, 2) - r) / 2, n = (r - Math.pow(i.x, 2) - Math.pow(i.y, 2)) / 2, a = (e.x - t.x) * (t.y - i.y) - (t.x - i.x) * (e.y - t.y); this.centerPoint = new at((s * (t.y - i.y) - n * (e.y - t.y)) / a, ((e.x - t.x) * n - (t.x - i.x) * s) / a), this.radius = this.centerPoint.subtract(this.startPoint).length(), this.startAngle = WA.BetweenTwoPoints(this.centerPoint, this.startPoint); const l = this.startAngle.degrees(); let o = WA.BetweenTwoPoints(this.centerPoint, this.midPoint).degrees(), u = WA.BetweenTwoPoints(this.centerPoint, this.endPoint).degrees(); o - l > 180 && (o -= 360), o - l < -180 && (o += 360), u - o > 180 && (u -= 360), u - o < -180 && (u += 360), this.orientation = o - l < 0 ? eP.CW : eP.CCW, this.angle = WA.FromDegrees(this.orientation === eP.CW ? l - u : u - l); } } class _w { /** * Creates a Path2 object from the starting 2D coordinates x and y. * @param x the starting points x value * @param y the starting points y value */ constructor(e, t) { this._points = new Array(), this._length = 0, this.closed = !1, this._points.push(new at(e, t)); } /** * Adds a new segment until the given coordinates (x, y) to the current Path2. * @param x the added points x value * @param y the added points y value * @returns the updated Path2. */ addLineTo(e, t) { if (this.closed) return this; const i = new at(e, t), r = this._points[this._points.length - 1]; return this._points.push(i), this._length += i.subtract(r).length(), this; } /** * Adds _numberOfSegments_ segments according to the arc definition (middle point coordinates, end point coordinates, the arc start point being the current Path2 last point) to the current Path2. * @param midX middle point x value * @param midY middle point y value * @param endX end point x value * @param endY end point y value * @param numberOfSegments (default: 36) * @returns the updated Path2. */ addArcTo(e, t, i, r, s = 36) { if (this.closed) return this; const n = this._points[this._points.length - 1], a = new at(e, t), l = new at(i, r), o = new vte(n, a, l); let u = o.angle.radians() / s; o.orientation === eP.CW && (u *= -1); let h = o.startAngle.radians() + u; for (let d = 0; d < s; d++) { const f = Math.cos(h) * o.radius + o.centerPoint.x, p = Math.sin(h) * o.radius + o.centerPoint.y; this.addLineTo(f, p), h += u; } return this; } /** * Adds _numberOfSegments_ segments according to the quadratic curve definition to the current Path2. * @param controlX control point x value * @param controlY control point y value * @param endX end point x value * @param endY end point y value * @param numberOfSegments (default: 36) * @returns the updated Path2. */ addQuadraticCurveTo(e, t, i, r, s = 36) { if (this.closed) return this; const n = (l, o, u, h) => (1 - l) * (1 - l) * o + 2 * l * (1 - l) * u + l * l * h, a = this._points[this._points.length - 1]; for (let l = 0; l <= s; l++) { const o = l / s, u = n(o, a.x, e, i), h = n(o, a.y, t, r); this.addLineTo(u, h); } return this; } /** * Adds _numberOfSegments_ segments according to the bezier curve definition to the current Path2. * @param originTangentX tangent vector at the origin point x value * @param originTangentY tangent vector at the origin point y value * @param destinationTangentX tangent vector at the destination point x value * @param destinationTangentY tangent vector at the destination point y value * @param endX end point x value * @param endY end point y value * @param numberOfSegments (default: 36) * @returns the updated Path2. */ addBezierCurveTo(e, t, i, r, s, n, a = 36) { if (this.closed) return this; const l = (u, h, d, f, p) => (1 - u) * (1 - u) * (1 - u) * h + 3 * u * (1 - u) * (1 - u) * d + 3 * u * u * (1 - u) * f + u * u * u * p, o = this._points[this._points.length - 1]; for (let u = 0; u <= a; u++) { const h = u / a, d = l(h, o.x, e, i, s), f = l(h, o.y, t, r, n); this.addLineTo(d, f); } return this; } /** * Defines if a given point is inside the polygon defines by the path * @param point defines the point to test * @returns true if the point is inside */ isPointInside(e) { let t = !1; const i = this._points.length; for (let r = i - 1, s = 0; s < i; r = s++) { let n = this._points[r], a = this._points[s], l = a.x - n.x, o = a.y - n.y; if (Math.abs(o) > Number.EPSILON) { if (o < 0 && (n = this._points[s], l = -l, a = this._points[r], o = -o), e.y < n.y || e.y > a.y) continue; if (e.y === n.y && e.x === n.x) return !0; { const u = o * (e.x - n.x) - l * (e.y - n.y); if (u === 0) return !0; if (u < 0) continue; t = !t; } } else { if (e.y !== n.y) continue; if (a.x <= e.x && e.x <= n.x || n.x <= e.x && e.x <= a.x) return !0; } } return t; } /** * Closes the Path2. * @returns the Path2. */ close() { return this.closed = !0, this; } /** * Gets the sum of the distance between each sequential point in the path * @returns the Path2 total length (float). */ length() { let e = this._length; if (this.closed) { const t = this._points[this._points.length - 1], i = this._points[0]; e += i.subtract(t).length(); } return e; } /** * Gets the area of the polygon defined by the path * @returns area value */ area() { const e = this._points.length; let t = 0; for (let i = e - 1, r = 0; r < e; i = r++) t += this._points[i].x * this._points[r].y - this._points[r].x * this._points[i].y; return t * 0.5; } /** * Gets the points which construct the path * @returns the Path2 internal array of points. */ getPoints() { return this._points; } /** * Retrieves the point at the distance aways from the starting point * @param normalizedLengthPosition the length along the path to retrieve the point from * @returns a new Vector2 located at a percentage of the Path2 total length on this path. */ getPointAtLengthPosition(e) { if (e < 0 || e > 1) return at.Zero(); const t = e * this.length(); let i = 0; for (let r = 0; r < this._points.length; r++) { const s = (r + 1) % this._points.length, n = this._points[r], l = this._points[s].subtract(n), o = l.length() + i; if (t >= i && t <= o) { const u = l.normalize(), h = t - i; return new at(n.x + u.x * h, n.y + u.y * h); } i = o; } return at.Zero(); } /** * Creates a new path starting from an x and y position * @param x starting x value * @param y starting y value * @returns a new Path2 starting at the coordinates (x, y). */ static StartingAt(e, t) { return new _w(e, t); } } class fP { /** * new Path3D(path, normal, raw) * Creates a Path3D. A Path3D is a logical math object, so not a mesh. * please read the description in the tutorial : https://doc.babylonjs.com/features/featuresDeepDive/mesh/path3D * @param path an array of Vector3, the curve axis of the Path3D * @param firstNormal (options) Vector3, the first wanted normal to the curve. Ex (0, 1, 0) for a vertical normal. * @param raw (optional, default false) : boolean, if true the returned Path3D isn't normalized. Useful to depict path acceleration or speed. * @param alignTangentsWithPath (optional, default false) : boolean, if true the tangents will be aligned with the path. */ constructor(e, t = null, i, r = !1) { this.path = e, this._curve = new Array(), this._distances = new Array(), this._tangents = new Array(), this._normals = new Array(), this._binormals = new Array(), this._pointAtData = { id: 0, point: D.Zero(), previousPointArrayIndex: 0, position: 0, subPosition: 0, interpolateReady: !1, interpolationMatrix: Ae.Identity() }; for (let s = 0; s < e.length; s++) this._curve[s] = e[s].clone(); this._raw = i || !1, this._alignTangentsWithPath = r, this._compute(t, r); } /** * Returns the Path3D array of successive Vector3 designing its curve. * @returns the Path3D array of successive Vector3 designing its curve. */ getCurve() { return this._curve; } /** * Returns the Path3D array of successive Vector3 designing its curve. * @returns the Path3D array of successive Vector3 designing its curve. */ getPoints() { return this._curve; } /** * @returns the computed length (float) of the path. */ length() { return this._distances[this._distances.length - 1]; } /** * Returns an array populated with tangent vectors on each Path3D curve point. * @returns an array populated with tangent vectors on each Path3D curve point. */ getTangents() { return this._tangents; } /** * Returns an array populated with normal vectors on each Path3D curve point. * @returns an array populated with normal vectors on each Path3D curve point. */ getNormals() { return this._normals; } /** * Returns an array populated with binormal vectors on each Path3D curve point. * @returns an array populated with binormal vectors on each Path3D curve point. */ getBinormals() { return this._binormals; } /** * Returns an array populated with distances (float) of the i-th point from the first curve point. * @returns an array populated with distances (float) of the i-th point from the first curve point. */ getDistances() { return this._distances; } /** * Returns an interpolated point along this path * @param position the position of the point along this path, from 0.0 to 1.0 * @returns a new Vector3 as the point */ getPointAt(e) { return this._updatePointAtData(e).point; } /** * Returns the tangent vector of an interpolated Path3D curve point at the specified position along this path. * @param position the position of the point along this path, from 0.0 to 1.0 * @param interpolated (optional, default false) : boolean, if true returns an interpolated tangent instead of the tangent of the previous path point. * @returns a tangent vector corresponding to the interpolated Path3D curve point, if not interpolated, the tangent is taken from the precomputed tangents array. */ getTangentAt(e, t = !1) { return this._updatePointAtData(e, t), t ? D.TransformCoordinates(D.Forward(), this._pointAtData.interpolationMatrix) : this._tangents[this._pointAtData.previousPointArrayIndex]; } /** * Returns the tangent vector of an interpolated Path3D curve point at the specified position along this path. * @param position the position of the point along this path, from 0.0 to 1.0 * @param interpolated (optional, default false) : boolean, if true returns an interpolated normal instead of the normal of the previous path point. * @returns a normal vector corresponding to the interpolated Path3D curve point, if not interpolated, the normal is taken from the precomputed normals array. */ getNormalAt(e, t = !1) { return this._updatePointAtData(e, t), t ? D.TransformCoordinates(D.Right(), this._pointAtData.interpolationMatrix) : this._normals[this._pointAtData.previousPointArrayIndex]; } /** * Returns the binormal vector of an interpolated Path3D curve point at the specified position along this path. * @param position the position of the point along this path, from 0.0 to 1.0 * @param interpolated (optional, default false) : boolean, if true returns an interpolated binormal instead of the binormal of the previous path point. * @returns a binormal vector corresponding to the interpolated Path3D curve point, if not interpolated, the binormal is taken from the precomputed binormals array. */ getBinormalAt(e, t = !1) { return this._updatePointAtData(e, t), t ? D.TransformCoordinates(D.UpReadOnly, this._pointAtData.interpolationMatrix) : this._binormals[this._pointAtData.previousPointArrayIndex]; } /** * Returns the distance (float) of an interpolated Path3D curve point at the specified position along this path. * @param position the position of the point along this path, from 0.0 to 1.0 * @returns the distance of the interpolated Path3D curve point at the specified position along this path. */ getDistanceAt(e) { return this.length() * e; } /** * Returns the array index of the previous point of an interpolated point along this path * @param position the position of the point to interpolate along this path, from 0.0 to 1.0 * @returns the array index */ getPreviousPointIndexAt(e) { return this._updatePointAtData(e), this._pointAtData.previousPointArrayIndex; } /** * Returns the position of an interpolated point relative to the two path points it lies between, from 0.0 (point A) to 1.0 (point B) * @param position the position of the point to interpolate along this path, from 0.0 to 1.0 * @returns the sub position */ getSubPositionAt(e) { return this._updatePointAtData(e), this._pointAtData.subPosition; } /** * Returns the position of the closest virtual point on this path to an arbitrary Vector3, from 0.0 to 1.0 * @param target the vector of which to get the closest position to * @returns the position of the closest virtual point on this path to the target vector */ getClosestPositionTo(e) { let t = Number.MAX_VALUE, i = 0; for (let r = 0; r < this._curve.length - 1; r++) { const s = this._curve[r + 0], n = this._curve[r + 1].subtract(s).normalize(), a = this._distances[r + 1] - this._distances[r + 0], l = Math.min(Math.max(D.Dot(n, e.subtract(s).normalize()), 0) * D.Distance(s, e) / a, 1), o = D.Distance(s.add(n.scale(l * a)), e); o < t && (t = o, i = (this._distances[r + 0] + a * l) / this.length()); } return i; } /** * Returns a sub path (slice) of this path * @param start the position of the fist path point, from 0.0 to 1.0, or a negative value, which will get wrapped around from the end of the path to 0.0 to 1.0 values * @param end the position of the last path point, from 0.0 to 1.0, or a negative value, which will get wrapped around from the end of the path to 0.0 to 1.0 values * @returns a sub path (slice) of this path */ slice(e = 0, t = 1) { if (e < 0 && (e = 1 - e * -1 % 1), t < 0 && (t = 1 - t * -1 % 1), e > t) { const o = e; e = t, t = o; } const i = this.getCurve(), r = this.getPointAt(e); let s = this.getPreviousPointIndexAt(e); const n = this.getPointAt(t), a = this.getPreviousPointIndexAt(t) + 1, l = []; return e !== 0 && (s++, l.push(r)), l.push(...i.slice(s, a)), (t !== 1 || e === 1) && l.push(n), new fP(l, this.getNormalAt(e), this._raw, this._alignTangentsWithPath); } /** * Forces the Path3D tangent, normal, binormal and distance recomputation. * @param path path which all values are copied into the curves points * @param firstNormal which should be projected onto the curve * @param alignTangentsWithPath (optional, default false) : boolean, if true the tangents will be aligned with the path * @returns the same object updated. */ update(e, t = null, i = !1) { for (let r = 0; r < e.length; r++) this._curve[r].x = e[r].x, this._curve[r].y = e[r].y, this._curve[r].z = e[r].z; return this._compute(t, i), this; } // private function compute() : computes tangents, normals and binormals _compute(e, t = !1) { const i = this._curve.length; if (i < 2) return; this._tangents[0] = this._getFirstNonNullVector(0), this._raw || this._tangents[0].normalize(), this._tangents[i - 1] = this._curve[i - 1].subtract(this._curve[i - 2]), this._raw || this._tangents[i - 1].normalize(); const r = this._tangents[0], s = this._normalVector(r, e); this._normals[0] = s, this._raw || this._normals[0].normalize(), this._binormals[0] = D.Cross(r, this._normals[0]), this._raw || this._binormals[0].normalize(), this._distances[0] = 0; let n, a, l, o, u; for (let h = 1; h < i; h++) n = this._getLastNonNullVector(h), h < i - 1 && (a = this._getFirstNonNullVector(h), this._tangents[h] = t ? a : n.add(a), this._tangents[h].normalize()), this._distances[h] = this._distances[h - 1] + this._curve[h].subtract(this._curve[h - 1]).length(), l = this._tangents[h], u = this._binormals[h - 1], this._normals[h] = D.Cross(u, l), this._raw || (this._normals[h].length() === 0 ? (o = this._normals[h - 1], this._normals[h] = o.clone()) : this._normals[h].normalize()), this._binormals[h] = D.Cross(l, this._normals[h]), this._raw || this._binormals[h].normalize(); this._pointAtData.id = NaN; } // private function getFirstNonNullVector(index) // returns the first non null vector from index : curve[index + N].subtract(curve[index]) _getFirstNonNullVector(e) { let t = 1, i = this._curve[e + t].subtract(this._curve[e]); for (; i.length() === 0 && e + t + 1 < this._curve.length; ) t++, i = this._curve[e + t].subtract(this._curve[e]); return i; } // private function getLastNonNullVector(index) // returns the last non null vector from index : curve[index].subtract(curve[index - N]) _getLastNonNullVector(e) { let t = 1, i = this._curve[e].subtract(this._curve[e - t]); for (; i.length() === 0 && e > t + 1; ) t++, i = this._curve[e].subtract(this._curve[e - t]); return i; } // private function normalVector(v0, vt, va) : // returns an arbitrary point in the plane defined by the point v0 and the vector vt orthogonal to this plane // if va is passed, it returns the va projection on the plane orthogonal to vt at the point v0 _normalVector(e, t) { let i, r = e.length(); if (r === 0 && (r = 1), t == null) { let s; yt.WithinEpsilon(Math.abs(e.y) / r, 1, Sr) ? yt.WithinEpsilon(Math.abs(e.x) / r, 1, Sr) ? yt.WithinEpsilon(Math.abs(e.z) / r, 1, Sr) ? s = D.Zero() : s = new D(0, 0, 1) : s = new D(1, 0, 0) : s = new D(0, -1, 0), i = D.Cross(e, s); } else i = D.Cross(e, t), D.CrossToRef(i, e, i); return i.normalize(), i; } /** * Updates the point at data for an interpolated point along this curve * @param position the position of the point along this curve, from 0.0 to 1.0 * @param interpolateTNB * @interpolateTNB whether to compute the interpolated tangent, normal and binormal * @returns the (updated) point at data */ _updatePointAtData(e, t = !1) { if (this._pointAtData.id === e) return this._pointAtData.interpolateReady || this._updateInterpolationMatrix(), this._pointAtData; this._pointAtData.id = e; const i = this.getPoints(); if (e <= 0) return this._setPointAtData(0, 0, i[0], 0, t); if (e >= 1) return this._setPointAtData(1, 1, i[i.length - 1], i.length - 1, t); let r = i[0], s, n = 0; const a = e * this.length(); for (let l = 1; l < i.length; l++) { s = i[l]; const o = D.Distance(r, s); if (n += o, n === a) return this._setPointAtData(e, 1, s, l, t); if (n > a) { const h = (n - a) / o, d = r.subtract(s), f = s.add(d.scaleInPlace(h)); return this._setPointAtData(e, 1 - h, f, l - 1, t); } r = s; } return this._pointAtData; } /** * Updates the point at data from the specified parameters * @param position where along the path the interpolated point is, from 0.0 to 1.0 * @param subPosition * @param point the interpolated point * @param parentIndex the index of an existing curve point that is on, or else positionally the first behind, the interpolated point * @param interpolateTNB */ _setPointAtData(e, t, i, r, s) { return this._pointAtData.point = i, this._pointAtData.position = e, this._pointAtData.subPosition = t, this._pointAtData.previousPointArrayIndex = r, this._pointAtData.interpolateReady = s, s && this._updateInterpolationMatrix(), this._pointAtData; } /** * Updates the point at interpolation matrix for the tangents, normals and binormals */ _updateInterpolationMatrix() { this._pointAtData.interpolationMatrix = Ae.Identity(); const e = this._pointAtData.previousPointArrayIndex; if (e !== this._tangents.length - 1) { const t = e + 1, i = this._tangents[e].clone(), r = this._normals[e].clone(), s = this._binormals[e].clone(), n = this._tangents[t].clone(), a = this._normals[t].clone(), l = this._binormals[t].clone(), o = Ze.RotationQuaternionFromAxis(r, s, i), u = Ze.RotationQuaternionFromAxis(a, l, n); Ze.Slerp(o, u, this._pointAtData.subPosition).toRotationMatrix(this._pointAtData.interpolationMatrix); } } } class T4 { /** * Returns a Curve3 object along a Quadratic Bezier curve : https://doc.babylonjs.com/features/featuresDeepDive/mesh/drawCurves#quadratic-bezier-curve * @param v0 (Vector3) the origin point of the Quadratic Bezier * @param v1 (Vector3) the control point * @param v2 (Vector3) the end point of the Quadratic Bezier * @param nbPoints (integer) the wanted number of points in the curve * @returns the created Curve3 */ static CreateQuadraticBezier(e, t, i, r) { r = r > 2 ? r : 3; const s = [], n = (a, l, o, u) => (1 - a) * (1 - a) * l + 2 * a * (1 - a) * o + a * a * u; for (let a = 0; a <= r; a++) s.push(new D(n(a / r, e.x, t.x, i.x), n(a / r, e.y, t.y, i.y), n(a / r, e.z, t.z, i.z))); return new T4(s); } /** * Returns a Curve3 object along a Cubic Bezier curve : https://doc.babylonjs.com/features/featuresDeepDive/mesh/drawCurves#cubic-bezier-curve * @param v0 (Vector3) the origin point of the Cubic Bezier * @param v1 (Vector3) the first control point * @param v2 (Vector3) the second control point * @param v3 (Vector3) the end point of the Cubic Bezier * @param nbPoints (integer) the wanted number of points in the curve * @returns the created Curve3 */ static CreateCubicBezier(e, t, i, r, s) { s = s > 3 ? s : 4; const n = [], a = (l, o, u, h, d) => (1 - l) * (1 - l) * (1 - l) * o + 3 * l * (1 - l) * (1 - l) * u + 3 * l * l * (1 - l) * h + l * l * l * d; for (let l = 0; l <= s; l++) n.push(new D(a(l / s, e.x, t.x, i.x, r.x), a(l / s, e.y, t.y, i.y, r.y), a(l / s, e.z, t.z, i.z, r.z))); return new T4(n); } /** * Returns a Curve3 object along a Hermite Spline curve : https://doc.babylonjs.com/features/featuresDeepDive/mesh/drawCurves#hermite-spline * @param p1 (Vector3) the origin point of the Hermite Spline * @param t1 (Vector3) the tangent vector at the origin point * @param p2 (Vector3) the end point of the Hermite Spline * @param t2 (Vector3) the tangent vector at the end point * @param nSeg (integer) the number of curve segments or nSeg + 1 points in the array * @returns the created Curve3 */ static CreateHermiteSpline(e, t, i, r, s) { const n = [], a = 1 / s; for (let l = 0; l <= s; l++) n.push(D.Hermite(e, t, i, r, l * a)); return new T4(n); } /** * Returns a Curve3 object along a CatmullRom Spline curve : * @param points (array of Vector3) the points the spline must pass through. At least, four points required * @param nbPoints (integer) the wanted number of points between each curve control points * @param closed (boolean) optional with default false, when true forms a closed loop from the points * @returns the created Curve3 */ static CreateCatmullRomSpline(e, t, i) { const r = [], s = 1 / t; let n = 0; if (i) { const a = e.length; for (let l = 0; l < a; l++) { n = 0; for (let o = 0; o < t; o++) r.push(D.CatmullRom(e[l % a], e[(l + 1) % a], e[(l + 2) % a], e[(l + 3) % a], n)), n += s; } r.push(r[0]); } else { const a = []; a.push(e[0].clone()), Array.prototype.push.apply(a, e), a.push(e[e.length - 1].clone()); let l = 0; for (; l < a.length - 3; l++) { n = 0; for (let o = 0; o < t; o++) r.push(D.CatmullRom(a[l], a[l + 1], a[l + 2], a[l + 3], n)), n += s; } l--, r.push(D.CatmullRom(a[l], a[l + 1], a[l + 2], a[l + 3], n)); } return new T4(r); } /** * Returns a Curve3 object along an arc through three vector3 points: * The three points should not be colinear. When they are the Curve3 is empty. * @param first (Vector3) the first point the arc must pass through. * @param second (Vector3) the second point the arc must pass through. * @param third (Vector3) the third point the arc must pass through. * @param steps (number) the larger the number of steps the more detailed the arc. * @param closed (boolean) optional with default false, when true forms the chord from the first and third point * @param fullCircle Circle (boolean) optional with default false, when true forms the complete circle through the three points * @returns the created Curve3 */ static ArcThru3Points(e, t, i, r = 32, s = !1, n = !1) { const a = [], l = t.subtract(e), o = i.subtract(t), u = e.subtract(i), h = D.Cross(l, o), d = h.length(); if (d < Math.pow(10, -8)) return new T4(a); const f = l.lengthSquared(), p = o.lengthSquared(), m = u.lengthSquared(), _ = h.lengthSquared(), v = l.length(), C = o.length(), x = u.length(), b = 0.5 * v * C * x / d, S = D.Dot(l, u), M = D.Dot(l, o), R = D.Dot(o, u), w = -0.5 * p * S / _, V = -0.5 * m * M / _, k = -0.5 * f * R / _, L = e.scale(w).add(t.scale(V)).add(i.scale(k)), U = e.subtract(L).normalize(), K = D.Cross(h, U).normalize(); if (n) { const ee = 2 * Math.PI / r; for (let Z = 0; Z <= 2 * Math.PI; Z += ee) a.push(L.add(U.scale(b * Math.cos(Z)).add(K.scale(b * Math.sin(Z))))); a.push(e); } else { const ee = 1 / r; let Z = 0, q = D.Zero(); do q = L.add(U.scale(b * Math.cos(Z)).add(K.scale(b * Math.sin(Z)))), a.push(q), Z += ee; while (!q.equalsWithEpsilon(i, b * ee * 1.1)); a.push(i), s && a.push(e); } return new T4(a); } /** * A Curve3 object is a logical object, so not a mesh, to handle curves in the 3D geometric space. * A Curve3 is designed from a series of successive Vector3. * Tuto : https://doc.babylonjs.com/features/featuresDeepDive/mesh/drawCurves#curve3-object * @param points points which make up the curve */ constructor(e) { this._length = 0, this._points = e, this._length = this._computeLength(e); } /** * @returns the Curve3 stored array of successive Vector3 */ getPoints() { return this._points; } /** * @returns the computed length (float) of the curve. */ length() { return this._length; } /** * Returns a new instance of Curve3 object : var curve = curveA.continue(curveB); * This new Curve3 is built by translating and sticking the curveB at the end of the curveA. * curveA and curveB keep unchanged. * @param curve the curve to continue from this curve * @returns the newly constructed curve */ continue(e) { const t = this._points[this._points.length - 1], i = this._points.slice(), r = e.getPoints(); for (let n = 1; n < r.length; n++) i.push(r[n].subtract(r[0]).add(t)); return new T4(i); } _computeLength(e) { let t = 0; for (let i = 1; i < e.length; i++) t += e[i].subtract(e[i - 1]).length(); return t; } } class hl { constructor() { this._easingMode = hl.EASINGMODE_EASEIN; } /** * Sets the easing mode of the current function. * @param easingMode Defines the willing mode (EASINGMODE_EASEIN, EASINGMODE_EASEOUT or EASINGMODE_EASEINOUT) */ setEasingMode(e) { const t = Math.min(Math.max(e, 0), 2); this._easingMode = t; } /** * Gets the current easing mode. * @returns the easing mode */ getEasingMode() { return this._easingMode; } /** * @internal */ // eslint-disable-next-line @typescript-eslint/no-unused-vars easeInCore(e) { throw new Error("You must implement this method"); } /** * Given an input gradient between 0 and 1, this returns the corresponding value * of the easing function. * @param gradient Defines the value between 0 and 1 we want the easing value for * @returns the corresponding value on the curve defined by the easing function */ ease(e) { switch (this._easingMode) { case hl.EASINGMODE_EASEIN: return this.easeInCore(e); case hl.EASINGMODE_EASEOUT: return 1 - this.easeInCore(1 - e); } return e >= 0.5 ? (1 - this.easeInCore((1 - e) * 2)) * 0.5 + 0.5 : this.easeInCore(e * 2) * 0.5; } } hl.EASINGMODE_EASEIN = 0; hl.EASINGMODE_EASEOUT = 1; hl.EASINGMODE_EASEINOUT = 2; class Ate extends hl { /** * @internal */ easeInCore(e) { return e = Math.max(0, Math.min(1, e)), 1 - Math.sqrt(1 - e * e); } } class yte extends hl { /** * Instantiates a back ease easing * @see https://easings.net/#easeInBack * @param amplitude Defines the amplitude of the function */ constructor(e = 1) { super(), this.amplitude = e; } /** * @internal */ easeInCore(e) { const t = Math.max(0, this.amplitude); return Math.pow(e, 3) - e * t * Math.sin(3.141592653589793 * e); } } class oce extends hl { /** * Instantiates a bounce easing * @see https://easings.net/#easeInBounce * @param bounces Defines the number of bounces * @param bounciness Defines the amplitude of the bounce */ constructor(e = 3, t = 2) { super(), this.bounces = e, this.bounciness = t; } /** * @internal */ easeInCore(e) { const t = Math.max(0, this.bounces); let i = this.bounciness; i <= 1 && (i = 1.001); const r = Math.pow(i, t), s = 1 - i, n = (1 - r) / s + r * 0.5, a = e * n, l = Math.log(-a * (1 - i) + 1) / Math.log(i), o = Math.floor(l), u = o + 1, h = (1 - Math.pow(i, o)) / (s * n), d = (1 - Math.pow(i, u)) / (s * n), f = (h + d) * 0.5, p = e - f, m = f - h; return -Math.pow(1 / i, t - o) / (m * m) * (p - m) * (p + m); } } class lce extends hl { /** * @internal */ easeInCore(e) { return e * e * e; } } class cce extends hl { /** * Instantiates an elastic easing function * @see https://easings.net/#easeInElastic * @param oscillations Defines the number of oscillations * @param springiness Defines the amplitude of the oscillations */ constructor(e = 3, t = 3) { super(), this.oscillations = e, this.springiness = t; } /** * @internal */ easeInCore(e) { let t; const i = Math.max(0, this.oscillations), r = Math.max(0, this.springiness); return r == 0 ? t = e : t = (Math.exp(r * e) - 1) / (Math.exp(r) - 1), t * Math.sin((6.283185307179586 * i + 1.5707963267948966) * e); } } class Cte extends hl { /** * Instantiates an exponential easing function * @see https://easings.net/#easeInExpo * @param exponent Defines the exponent of the function */ constructor(e = 2) { super(), this.exponent = e; } /** * @internal */ easeInCore(e) { return this.exponent <= 0 ? e : (Math.exp(this.exponent * e) - 1) / (Math.exp(this.exponent) - 1); } } class uce extends hl { /** * Instantiates an power base easing function * @see https://easings.net/#easeInQuad * @param power Defines the power of the function */ constructor(e = 2) { super(), this.power = e; } /** * @internal */ easeInCore(e) { const t = Math.max(0, this.power); return Math.pow(e, t); } } class dK extends hl { /** * @internal */ easeInCore(e) { return e * e; } } class hce extends hl { /** * @internal */ easeInCore(e) { return e * e * e * e; } } class dce extends hl { /** * @internal */ easeInCore(e) { return e * e * e * e * e; } } class fK extends hl { /** * @internal */ easeInCore(e) { return 1 - Math.sin(1.5707963267948966 * (1 - e)); } } class fce extends hl { /** * Instantiates a bezier function * @see http://cubic-bezier.com/#.17,.67,.83,.67 * @param x1 Defines the x component of the start tangent in the bezier curve * @param y1 Defines the y component of the start tangent in the bezier curve * @param x2 Defines the x component of the end tangent in the bezier curve * @param y2 Defines the y component of the end tangent in the bezier curve */ constructor(e = 0, t = 0, i = 1, r = 1) { super(), this.x1 = e, this.y1 = t, this.x2 = i, this.y2 = r; } /** * @internal */ easeInCore(e) { return gte.Interpolate(e, this.x1, this.y1, this.x2, this.y2); } } class KB { /** * Initializes the animation event * @param frame The frame for which the event is triggered * @param action The event to perform when triggered * @param onlyOnce Specifies if the event should be triggered only once */ constructor(e, t, i) { this.frame = e, this.action = t, this.onlyOnce = i, this.isDone = !1; } /** @internal */ _clone() { return new KB(this.frame, this.action, this.onlyOnce); } } class xte { /** * Returns the string "TargetedAnimation" * @returns "TargetedAnimation" */ getClassName() { return "TargetedAnimation"; } /** * Serialize the object * @returns the JSON object representing the current entity */ serialize() { const e = {}; return e.animation = this.animation.serialize(), e.targetId = this.target.id, e; } } class S4 { /** * Makes sure that the animations are either played or stopped according to the animation group mask. * Note however that the call won't have any effect if the animation group has not been started yet. * You should call this function if you modify the mask after the animation group has been started. */ syncWithMask() { if (!this.mask) { this._numActiveAnimatables = this._targetedAnimations.length; return; } this._numActiveAnimatables = 0; for (let e = 0; e < this._animatables.length; ++e) { const t = this._animatables[e]; this.mask.disabled || this.mask.retainsTarget(t.target.name) ? (this._numActiveAnimatables++, t.paused && t.restart()) : t.paused || t.pause(); } } /** * Removes all animations for the targets not retained by the animation group mask. * Use this function if you know you won't need those animations anymore and if you want to free memory. */ removeUnmaskedAnimations() { if (!(!this.mask || this.mask.disabled)) { for (let e = 0; e < this._animatables.length; ++e) { const t = this._animatables[e]; this.mask.retainsTarget(t.target.name) || (t.stop(), this._animatables.splice(e, 1), --e); } for (let e = 0; e < this._targetedAnimations.length; e++) { const t = this._targetedAnimations[e]; this.mask.retainsTarget(t.target.name) || (this._targetedAnimations.splice(e, 1), --e); } } } /** * Gets the first frame */ get from() { return this._from; } /** * Gets the last frame */ get to() { return this._to; } /** * Define if the animations are started */ get isStarted() { return this._isStarted; } /** * Gets a value indicating that the current group is playing */ get isPlaying() { return this._isStarted && !this._isPaused; } /** * Gets or sets the speed ratio to use for all animations */ get speedRatio() { return this._speedRatio; } /** * Gets or sets the speed ratio to use for all animations */ set speedRatio(e) { if (this._speedRatio !== e) { this._speedRatio = e; for (let t = 0; t < this._animatables.length; t++) { const i = this._animatables[t]; i.speedRatio = this._speedRatio; } } } /** * Gets or sets if all animations should loop or not */ get loopAnimation() { return this._loopAnimation; } set loopAnimation(e) { if (this._loopAnimation !== e) { this._loopAnimation = e; for (let t = 0; t < this._animatables.length; t++) { const i = this._animatables[t]; i.loopAnimation = this._loopAnimation; } } } /** * Gets or sets if all animations should be evaluated additively */ get isAdditive() { return this._isAdditive; } set isAdditive(e) { if (this._isAdditive !== e) { this._isAdditive = e; for (let t = 0; t < this._animatables.length; t++) { const i = this._animatables[t]; i.isAdditive = this._isAdditive; } } } /** * Gets or sets the weight to apply to all animations of the group */ get weight() { return this._weight; } set weight(e) { this._weight !== e && (this._weight = e, this.setWeightForAllAnimatables(this._weight)); } /** * Gets the targeted animations for this animation group */ get targetedAnimations() { return this._targetedAnimations; } /** * returning the list of animatables controlled by this animation group. */ get animatables() { return this._animatables; } /** * Gets the list of target animations */ get children() { return this._targetedAnimations; } /** * Gets or sets the order of play of the animation group (default: 0) */ get playOrder() { return this._playOrder; } set playOrder(e) { if (this._playOrder !== e && (this._playOrder = e, this._animatables.length > 0)) { for (let t = 0; t < this._animatables.length; t++) this._animatables[t].playOrder = this._playOrder; this._scene.sortActiveAnimatables(); } } /** * Allows the animations of the animation group to blend with current running animations * Note that a null value means that each animation will use their own existing blending configuration (Animation.enableBlending) */ get enableBlending() { return this._enableBlending; } set enableBlending(e) { if (this._enableBlending !== e && (this._enableBlending = e, e !== null)) for (let t = 0; t < this._targetedAnimations.length; ++t) this._targetedAnimations[t].animation.enableBlending = e; } /** * Gets or sets the animation blending speed * Note that a null value means that each animation will use their own existing blending configuration (Animation.blendingSpeed) */ get blendingSpeed() { return this._blendingSpeed; } set blendingSpeed(e) { if (this._blendingSpeed !== e && (this._blendingSpeed = e, e !== null)) for (let t = 0; t < this._targetedAnimations.length; ++t) this._targetedAnimations[t].animation.blendingSpeed = e; } /** * Gets the length (in seconds) of the animation group * This function assumes that all animations are played at the same framePerSecond speed! * Note: you can only call this method after you've added at least one targeted animation! * @param from Starting frame range (default is AnimationGroup.from) * @param to Ending frame range (default is AnimationGroup.to) * @returns The length in seconds */ getLength(e, t) { e = e ?? this._from, t = t ?? this._to; const i = this.targetedAnimations[0].animation.framePerSecond * this._speedRatio; return (t - e) / i; } /** * Merge the array of animation groups into a new animation group * @param animationGroups List of animation groups to merge * @param disposeSource If true, animation groups will be disposed after being merged (default: true) * @param normalize If true, animation groups will be normalized before being merged, so that all animations have the same "from" and "to" frame (default: false) * @param weight Weight for the new animation group. If not provided, it will inherit the weight from the first animation group of the array * @returns The new animation group or null if no animation groups were passed */ static MergeAnimationGroups(e, t = !0, i = !1, r) { if (e.length === 0) return null; r = r ?? e[0].weight; let s = Number.MAX_VALUE, n = -Number.MAX_VALUE; if (i) for (const l of e) l.from < s && (s = l.from), l.to > n && (n = l.to); const a = new S4(e[0].name + "_merged", e[0]._scene, r); for (const l of e) { i && l.normalize(s, n); for (const o of l.targetedAnimations) a.addTargetedAnimation(o.animation, o.target); t && l.dispose(); } return a; } /** * Instantiates a new Animation Group. * This helps managing several animations at once. * @see https://doc.babylonjs.com/features/featuresDeepDive/animation/groupAnimations * @param name Defines the name of the group * @param scene Defines the scene the group belongs to * @param weight Defines the weight to use for animations in the group (-1.0 by default, meaning "no weight") * @param playOrder Defines the order of play of the animation group (default is 0) */ constructor(e, t = null, i = -1, r = 0) { this.name = e, this._targetedAnimations = new Array(), this._animatables = new Array(), this._from = Number.MAX_VALUE, this._to = -Number.MAX_VALUE, this._speedRatio = 1, this._loopAnimation = !1, this._isAdditive = !1, this._weight = -1, this._playOrder = 0, this._enableBlending = null, this._blendingSpeed = null, this._numActiveAnimatables = 0, this._parentContainer = null, this.onAnimationEndObservable = new Fe(), this.onAnimationLoopObservable = new Fe(), this.onAnimationGroupLoopObservable = new Fe(), this.onAnimationGroupEndObservable = new Fe(), this.onAnimationGroupPauseObservable = new Fe(), this.onAnimationGroupPlayObservable = new Fe(), this.metadata = null, this._animationLoopFlags = [], this._scene = t || gi.LastCreatedScene, this._weight = i, this._playOrder = r, this.uniqueId = this._scene.getUniqueId(), this._scene.addAnimationGroup(this); } /** * Add an animation (with its target) in the group * @param animation defines the animation we want to add * @param target defines the target of the animation * @returns the TargetedAnimation object */ addTargetedAnimation(e, t) { const i = new xte(); i.animation = e, i.target = t; const r = e.getKeys(); return this._from > r[0].frame && (this._from = r[0].frame), this._to < r[r.length - 1].frame && (this._to = r[r.length - 1].frame), this._enableBlending !== null && (e.enableBlending = this._enableBlending), this._blendingSpeed !== null && (e.blendingSpeed = this._blendingSpeed), this._targetedAnimations.push(i), i; } /** * Remove an animation from the group * @param animation defines the animation we want to remove */ removeTargetedAnimation(e) { for (let t = this._targetedAnimations.length - 1; t > -1; t--) this._targetedAnimations[t].animation === e && this._targetedAnimations.splice(t, 1); } /** * This function will normalize every animation in the group to make sure they all go from beginFrame to endFrame * It can add constant keys at begin or end * @param beginFrame defines the new begin frame for all animations or the smallest begin frame of all animations if null (defaults to null) * @param endFrame defines the new end frame for all animations or the largest end frame of all animations if null (defaults to null) * @returns the animation group */ normalize(e = null, t = null) { e == null && (e = this._from), t == null && (t = this._to); for (let i = 0; i < this._targetedAnimations.length; i++) { const s = this._targetedAnimations[i].animation.getKeys(), n = s[0], a = s[s.length - 1]; if (n.frame > e) { const l = { frame: e, value: n.value, inTangent: n.inTangent, outTangent: n.outTangent, interpolation: n.interpolation }; s.splice(0, 0, l); } if (a.frame < t) { const l = { frame: t, value: a.value, inTangent: a.inTangent, outTangent: a.outTangent, interpolation: a.interpolation }; s.push(l); } } return this._from = e, this._to = t, this; } _processLoop(e, t, i) { e.onAnimationLoop = () => { this.onAnimationLoopObservable.notifyObservers(t), !this._animationLoopFlags[i] && (this._animationLoopFlags[i] = !0, this._animationLoopCount++, this._animationLoopCount === this._numActiveAnimatables && (this.onAnimationGroupLoopObservable.notifyObservers(this), this._animationLoopCount = 0, this._animationLoopFlags.length = 0)); }; } /** * Start all animations on given targets * @param loop defines if animations must loop * @param speedRatio defines the ratio to apply to animation speed (1 by default) * @param from defines the from key (optional) * @param to defines the to key (optional) * @param isAdditive defines the additive state for the resulting animatables (optional) * @returns the current animation group */ start(e = !1, t = 1, i, r, s) { if (this._isStarted || this._targetedAnimations.length === 0) return this; this._loopAnimation = e, this._animationLoopCount = 0, this._animationLoopFlags.length = 0; for (let n = 0; n < this._targetedAnimations.length; n++) { const a = this._targetedAnimations[n], l = this._scene.beginDirectAnimation(a.target, [a.animation], i !== void 0 ? i : this._from, r !== void 0 ? r : this._to, e, t, void 0, void 0, s !== void 0 ? s : this._isAdditive); l.weight = this._weight, l.playOrder = this._playOrder, l.onAnimationEnd = () => { this.onAnimationEndObservable.notifyObservers(a), this._checkAnimationGroupEnded(l); }, this._processLoop(l, a, n), this._animatables.push(l); } return this.syncWithMask(), this._scene.sortActiveAnimatables(), this._speedRatio = t, this._isStarted = !0, this._isPaused = !1, this.onAnimationGroupPlayObservable.notifyObservers(this), this; } /** * Pause all animations * @returns the animation group */ pause() { if (!this._isStarted) return this; this._isPaused = !0; for (let e = 0; e < this._animatables.length; e++) this._animatables[e].pause(); return this.onAnimationGroupPauseObservable.notifyObservers(this), this; } /** * Play all animations to initial state * This function will start() the animations if they were not started or will restart() them if they were paused * @param loop defines if animations must loop * @returns the animation group */ play(e) { return this.isStarted && this._animatables.length === this._targetedAnimations.length ? (e !== void 0 && (this.loopAnimation = e), this.restart()) : (this.stop(), this.start(e, this._speedRatio)), this._isPaused = !1, this; } /** * Reset all animations to initial state * @returns the animation group */ reset() { if (!this._isStarted) return this.play(), this.goToFrame(0), this.stop(), this; for (let e = 0; e < this._animatables.length; e++) this._animatables[e].reset(); return this; } /** * Restart animations from key 0 * @returns the animation group */ restart() { if (!this._isStarted) return this; for (let e = 0; e < this._animatables.length; e++) this._animatables[e].restart(); return this.onAnimationGroupPlayObservable.notifyObservers(this), this; } /** * Stop all animations * @returns the animation group */ stop() { if (!this._isStarted) return this; const e = this._animatables.slice(); for (let i = 0; i < e.length; i++) e[i].stop(void 0, void 0, !0); let t = 0; for (let i = 0; i < this._scene._activeAnimatables.length; i++) { const r = this._scene._activeAnimatables[i]; r._runtimeAnimations.length > 0 && (this._scene._activeAnimatables[t++] = r); } return this._scene._activeAnimatables.length = t, this._isStarted = !1, this; } /** * Set animation weight for all animatables * * @since 6.12.4 * You can pass the weight to the AnimationGroup constructor, or use the weight property to set it after the group has been created, * making it easier to define the overall animation weight than calling setWeightForAllAnimatables() after the animation group has been started * @param weight defines the weight to use * @returns the animationGroup * @see https://doc.babylonjs.com/features/featuresDeepDive/animation/advanced_animations#animation-weights */ setWeightForAllAnimatables(e) { for (let t = 0; t < this._animatables.length; t++) { const i = this._animatables[t]; i.weight = e; } return this; } /** * Synchronize and normalize all animatables with a source animatable * @param root defines the root animatable to synchronize with (null to stop synchronizing) * @returns the animationGroup * @see https://doc.babylonjs.com/features/featuresDeepDive/animation/advanced_animations#animation-weights */ syncAllAnimationsWith(e) { for (let t = 0; t < this._animatables.length; t++) this._animatables[t].syncWith(e); return this; } /** * Goes to a specific frame in this animation group * @param frame the frame number to go to * @returns the animationGroup */ goToFrame(e) { if (!this._isStarted) return this; for (let t = 0; t < this._animatables.length; t++) this._animatables[t].goToFrame(e); return this; } /** * Dispose all associated resources */ dispose() { this._targetedAnimations.length = 0, this._animatables.length = 0; const e = this._scene.animationGroups.indexOf(this); if (e > -1 && this._scene.animationGroups.splice(e, 1), this._parentContainer) { const t = this._parentContainer.animationGroups.indexOf(this); t > -1 && this._parentContainer.animationGroups.splice(t, 1), this._parentContainer = null; } this.onAnimationEndObservable.clear(), this.onAnimationGroupEndObservable.clear(), this.onAnimationGroupPauseObservable.clear(), this.onAnimationGroupPlayObservable.clear(), this.onAnimationLoopObservable.clear(), this.onAnimationGroupLoopObservable.clear(); } _checkAnimationGroupEnded(e) { const t = this._animatables.indexOf(e); t > -1 && this._animatables.splice(t, 1), this._animatables.length === 0 && (this._isStarted = !1, this.onAnimationGroupEndObservable.notifyObservers(this)); } /** * Clone the current animation group and returns a copy * @param newName defines the name of the new group * @param targetConverter defines an optional function used to convert current animation targets to new ones * @param cloneAnimations defines if the animations should be cloned or referenced * @returns the new animation group */ clone(e, t, i = !1) { const r = new S4(e || this.name, this._scene, this._weight, this._playOrder); r._from = this.from, r._to = this.to, r._speedRatio = this.speedRatio, r._loopAnimation = this.loopAnimation, r._isAdditive = this.isAdditive, r._enableBlending = this.enableBlending, r._blendingSpeed = this.blendingSpeed, r.metadata = this.metadata, r.mask = this.mask; for (const s of this._targetedAnimations) r.addTargetedAnimation(i ? s.animation.clone() : s.animation, t ? t(s.target) : s.target); return r; } /** * Serializes the animationGroup to an object * @returns Serialized object */ serialize() { const e = {}; e.name = this.name, e.from = this.from, e.to = this.to, e.speedRatio = this.speedRatio, e.loopAnimation = this.loopAnimation, e.isAdditive = this.isAdditive, e.weight = this.weight, e.playOrder = this.playOrder, e.enableBlending = this.enableBlending, e.blendingSpeed = this.blendingSpeed, e.targetedAnimations = []; for (let t = 0; t < this.targetedAnimations.length; t++) { const i = this.targetedAnimations[t]; e.targetedAnimations[t] = i.serialize(); } return $s && $s.HasTags(this) && (e.tags = $s.GetTags(this)), this.metadata && (e.metadata = this.metadata), e; } // Statics /** * Returns a new AnimationGroup object parsed from the source provided. * @param parsedAnimationGroup defines the source * @param scene defines the scene that will receive the animationGroup * @returns a new AnimationGroup */ static Parse(e, t) { const i = new S4(e.name, t, e.weight, e.playOrder); for (let r = 0; r < e.targetedAnimations.length; r++) { const s = e.targetedAnimations[r], n = nt.Parse(s.animation), a = s.targetId; if (s.animation.property === "influence") { const l = t.getMorphTargetById(a); l && i.addTargetedAnimation(n, l); } else { const l = t.getNodeById(a); l != null && i.addTargetedAnimation(n, l); } } return $s && $s.AddTagsTo(i, e.tags), e.from !== null && e.to !== null && i.normalize(e.from, e.to), e.speedRatio !== void 0 && (i._speedRatio = e.speedRatio), e.loopAnimation !== void 0 && (i._loopAnimation = e.loopAnimation), e.isAdditive !== void 0 && (i._isAdditive = e.isAdditive), e.weight !== void 0 && (i._weight = e.weight), e.playOrder !== void 0 && (i._playOrder = e.playOrder), e.enableBlending !== void 0 && (i._enableBlending = e.enableBlending), e.blendingSpeed !== void 0 && (i._blendingSpeed = e.blendingSpeed), e.metadata !== void 0 && (i.metadata = e.metadata), i; } /** @internal */ static MakeAnimationAdditive(e, t, i, r = !1, s) { let n; typeof t == "object" ? n = t : n = { referenceFrame: t, range: i, cloneOriginalAnimationGroup: r, clonedAnimationName: s }; let a = e; n.cloneOriginalAnimationGroup && (a = e.clone(n.clonedAnimationGroupName || a.name)); const l = a.targetedAnimations; for (let o = 0; o < l.length; o++) { const u = l[o]; u.animation = nt.MakeAnimationAdditive(u.animation, n); } if (a.isAdditive = !0, n.clipKeys) { let o = Number.MAX_VALUE, u = -Number.MAX_VALUE; const h = a.targetedAnimations; for (let d = 0; d < h.length; d++) { const m = h[d].animation.getKeys(); o > m[0].frame && (o = m[0].frame), u < m[m.length - 1].frame && (u = m[m.length - 1].frame); } a._from = o, a._to = u; } return a; } /** * Creates a new animation, keeping only the keys that are inside a given key range * @param sourceAnimationGroup defines the animation group on which to operate * @param fromKey defines the lower bound of the range * @param toKey defines the upper bound of the range * @param name defines the name of the new animation group. If not provided, use the same name as animationGroup * @param dontCloneAnimations defines whether or not the animations should be cloned before clipping the keys. Default is false, so animations will be cloned * @returns a new animation group stripped from all the keys outside the given range */ static ClipKeys(e, t, i, r, s) { const n = e.clone(r || e.name); return S4.ClipKeysInPlace(n, t, i, s); } /** * Updates an existing animation, keeping only the keys that are inside a given key range * @param animationGroup defines the animation group on which to operate * @param fromKey defines the lower bound of the range * @param toKey defines the upper bound of the range * @param dontCloneAnimations defines whether or not the animations should be cloned before clipping the keys. Default is false, so animations will be cloned * @returns the animationGroup stripped from all the keys outside the given range */ static ClipKeysInPlace(e, t, i, r) { return S4.ClipInPlace(e, t, i, r, !1); } /** * Creates a new animation, keeping only the frames that are inside a given frame range * @param sourceAnimationGroup defines the animation group on which to operate * @param fromFrame defines the lower bound of the range * @param toFrame defines the upper bound of the range * @param name defines the name of the new animation group. If not provided, use the same name as animationGroup * @param dontCloneAnimations defines whether or not the animations should be cloned before clipping the frames. Default is false, so animations will be cloned * @returns a new animation group stripped from all the frames outside the given range */ static ClipFrames(e, t, i, r, s) { const n = e.clone(r || e.name); return S4.ClipFramesInPlace(n, t, i, s); } /** * Updates an existing animation, keeping only the frames that are inside a given frame range * @param animationGroup defines the animation group on which to operate * @param fromFrame defines the lower bound of the range * @param toFrame defines the upper bound of the range * @param dontCloneAnimations defines whether or not the animations should be cloned before clipping the frames. Default is false, so animations will be cloned * @returns the animationGroup stripped from all the frames outside the given range */ static ClipFramesInPlace(e, t, i, r) { return S4.ClipInPlace(e, t, i, r, !0); } /** * Updates an existing animation, keeping only the keys that are inside a given key or frame range * @param animationGroup defines the animation group on which to operate * @param start defines the lower bound of the range * @param end defines the upper bound of the range * @param dontCloneAnimations defines whether or not the animations should be cloned before clipping the keys. Default is false, so animations will be cloned * @param useFrame defines if the range is defined by frame numbers or key indices (default is false which means use key indices) * @returns the animationGroup stripped from all the keys outside the given range */ static ClipInPlace(e, t, i, r, s = !1) { let n = Number.MAX_VALUE, a = -Number.MAX_VALUE; const l = e.targetedAnimations; for (let o = 0; o < l.length; o++) { const u = l[o], h = r ? u.animation : u.animation.clone(); s && (h.createKeyForFrame(t), h.createKeyForFrame(i)); const d = h.getKeys(), f = []; let p = Number.MAX_VALUE; for (let m = 0; m < d.length; m++) { const _ = d[m]; if (!s && m >= t && m <= i || s && _.frame >= t && _.frame <= i) { const v = { frame: _.frame, value: _.value.clone ? _.value.clone() : _.value, inTangent: _.inTangent, outTangent: _.outTangent, interpolation: _.interpolation, lockedTangent: _.lockedTangent }; p === Number.MAX_VALUE && (p = v.frame), v.frame -= p, f.push(v); } } if (f.length === 0) { l.splice(o, 1), o--; continue; } n > f[0].frame && (n = f[0].frame), a < f[f.length - 1].frame && (a = f[f.length - 1].frame), h.setKeys(f, !0), u.animation = h; } return e._from = n, e._to = a, e; } /** * Returns the string "AnimationGroup" * @returns "AnimationGroup" */ getClassName() { return "AnimationGroup"; } /** * Creates a detailed string about the object * @param fullDetails defines if the output string will support multiple levels of logging within scene loading * @returns a string representing the object */ toString(e) { let t = "Name: " + this.name; return t += ", type: " + this.getClassName(), e && (t += ", from: " + this._from, t += ", to: " + this._to, t += ", isStarted: " + this._isStarted, t += ", speedRatio: " + this._speedRatio, t += ", targetedAnimations length: " + this._targetedAnimations.length, t += ", animatables length: " + this._animatables), t; } } class pce { /** * Initializes the path cursor * @param _path The path to track */ constructor(e) { this._path = e, this._onchange = new Array(), this.value = 0, this.animations = []; } /** * Gets the cursor point on the path * @returns A point on the path cursor at the cursor location */ getPoint() { const e = this._path.getPointAtLengthPosition(this.value); return new D(e.x, 0, e.y); } /** * Moves the cursor ahead by the step amount * @param step The amount to move the cursor forward * @returns This path cursor */ moveAhead(e = 2e-3) { return this.move(e), this; } /** * Moves the cursor behind by the step amount * @param step The amount to move the cursor back * @returns This path cursor */ moveBack(e = 2e-3) { return this.move(-e), this; } /** * Moves the cursor by the step amount * If the step amount is greater than one, an exception is thrown * @param step The amount to move the cursor * @returns This path cursor */ move(e) { if (Math.abs(e) > 1) throw "step size should be less than 1."; return this.value += e, this._ensureLimits(), this._raiseOnChange(), this; } /** * Ensures that the value is limited between zero and one * @returns This path cursor */ _ensureLimits() { for (; this.value > 1; ) this.value -= 1; for (; this.value < 0; ) this.value += 1; return this; } /** * Runs onchange callbacks on change (used by the animation engine) * @returns This path cursor */ _raiseOnChange() { return this._onchange.forEach((e) => e(this)), this; } /** * Executes a function on change * @param f A path cursor onchange callback * @returns This path cursor */ onchange(e) { return this._onchange.push(e), this; } } var eL; (function(c) { c[c.Include = 0] = "Include", c[c.Exclude = 1] = "Exclude"; })(eL || (eL = {})); class _ce { /** * Creates a new mask * @param names The list of target names to add to the mask (optional) * @param mode Defines the mode for the mask (default: AnimationGroupMaskMode.Include) */ constructor(e, t = eL.Include) { this.mode = t, this.disabled = !1, this._targetNames = /* @__PURE__ */ new Set(), e && this.addTargetName(e); } /** * Adds one or several target names to the mask * @param name The name(s) to add to the mask */ addTargetName(e) { if (Array.isArray(e)) { for (const t of e) this._targetNames.add(t); return; } this._targetNames.add(e); } /** * Removes one or several target names from the mask * @param name The name(s) to remove from the mask */ removeTargetName(e) { if (Array.isArray(e)) { for (const t of e) this._targetNames.delete(t); return; } this._targetNames.delete(e); } /** * Checks if the mask includes a target name. * This method is intended to know if a given target name is included in the mask, not if the name is actually retained by the mask (see retainsTarget() instead). * @param name The name to check with the mask * @returns True if the mask includes the name, false otherwise */ hasTarget(e) { return this._targetNames.has(e); } /** * Checks if the mask retains a target name. * Note that in the "Exclude" mode, this will return false if the mask includes the name, and true otherwise! * This method is intended to know if a given target name is retained by the mask, not if the name is in the list of target names. * @param name The name to check with the mask * @returns True if the mask retains the name, false otherwise */ retainsTarget(e) { return this._targetNames.has(e) === (this.mode === eL.Include); } } function tL(c, e, t) { try { const i = c.next(); i.done ? e(i) : i.value ? i.value.then(() => { i.value = void 0, e(i); }, t) : e(i); } catch (i) { t(i); } } function bte(c = 25) { let e; return (t, i, r) => { const s = performance.now(); e === void 0 || s - e > c ? (e = s, setTimeout(() => { tL(t, i, r); }, 0)) : tL(t, i, r); }; } function pK(c, e, t, i, r) { const s = () => { let n; const a = (l) => { l.done ? t(l.value) : n === void 0 ? n = !0 : s(); }; do n = void 0, !r || !r.aborted ? e(c, a, i) : i(new Error("Aborted")), n === void 0 && (n = !1); while (n); }; s(); } function WB(c, e) { let t; return pK(c, tL, (i) => t = i, (i) => { throw i; }, e), t; } function jB(c, e, t) { return new Promise((i, r) => { pK(c, e, i, r, t); }); } function Ete(c, e) { return (...t) => WB(c(...t), e); } function mce(c, e, t) { return (...i) => jB(c(...i), e, t); } class Md { /** * Creates a Viewport object located at (x, y) and sized (width, height) * @param x defines viewport left coordinate * @param y defines viewport top coordinate * @param width defines the viewport width * @param height defines the viewport height */ constructor(e, t, i, r) { this.x = e, this.y = t, this.width = i, this.height = r; } /** * Creates a new viewport using absolute sizing (from 0-> width, 0-> height instead of 0->1) * @param renderWidth defines the rendering width * @param renderHeight defines the rendering height * @returns a new Viewport */ toGlobal(e, t) { return new Md(this.x * e, this.y * t, this.width * e, this.height * t); } /** * Stores absolute viewport value into a target viewport (from 0-> width, 0-> height instead of 0->1) * @param renderWidth defines the rendering width * @param renderHeight defines the rendering height * @param ref defines the target viewport * @returns the current viewport */ toGlobalToRef(e, t, i) { return i.x = this.x * e, i.y = this.y * t, i.width = this.width * e, i.height = this.height * t, this; } /** * Returns a new Viewport copied from the current one * @returns a new Viewport */ clone() { return new Md(this.x, this.y, this.width, this.height); } } class Ai extends In { /** * Define the current local position of the camera in the scene */ get position() { return this._position; } set position(e) { this._position = e; } /** * The vector the camera should consider as up. * (default is Vector3(0, 1, 0) aka Vector3.Up()) */ set upVector(e) { this._upVector = e; } get upVector() { return this._upVector; } /** * The screen area in scene units squared */ get screenArea() { var e, t, i, r; let s = 0, n = 0; if (this.mode === Ai.PERSPECTIVE_CAMERA) this.fovMode === Ai.FOVMODE_VERTICAL_FIXED ? (n = this.minZ * 2 * Math.tan(this.fov / 2), s = this.getEngine().getAspectRatio(this) * n) : (s = this.minZ * 2 * Math.tan(this.fov / 2), n = s / this.getEngine().getAspectRatio(this)); else { const a = this.getEngine().getRenderWidth() / 2, l = this.getEngine().getRenderHeight() / 2; s = ((e = this.orthoRight) !== null && e !== void 0 ? e : a) - ((t = this.orthoLeft) !== null && t !== void 0 ? t : -a), n = ((i = this.orthoTop) !== null && i !== void 0 ? i : l) - ((r = this.orthoBottom) !== null && r !== void 0 ? r : -l); } return s * n; } set orthoLeft(e) { this._orthoLeft = e; for (const t of this._rigCameras) t.orthoLeft = e; } get orthoLeft() { return this._orthoLeft; } set orthoRight(e) { this._orthoRight = e; for (const t of this._rigCameras) t.orthoRight = e; } get orthoRight() { return this._orthoRight; } set orthoBottom(e) { this._orthoBottom = e; for (const t of this._rigCameras) t.orthoBottom = e; } get orthoBottom() { return this._orthoBottom; } set orthoTop(e) { this._orthoTop = e; for (const t of this._rigCameras) t.orthoTop = e; } get orthoTop() { return this._orthoTop; } set mode(e) { this._mode = e; for (const t of this._rigCameras) t.mode = e; } get mode() { return this._mode; } /** * Instantiates a new camera object. * This should not be used directly but through the inherited cameras: ArcRotate, Free... * @see https://doc.babylonjs.com/features/featuresDeepDive/cameras * @param name Defines the name of the camera in the scene * @param position Defines the position of the camera * @param scene Defines the scene the camera belongs too * @param setActiveOnSceneIfNoneActive Defines if the camera should be set as active after creation if no other camera have been defined in the scene */ constructor(e, t, i, r = !0) { super(e, i), this._position = D.Zero(), this._upVector = D.Up(), this.oblique = null, this._orthoLeft = null, this._orthoRight = null, this._orthoBottom = null, this._orthoTop = null, this.fov = 0.8, this.projectionPlaneTilt = 0, this.minZ = 1, this.maxZ = 1e4, this.inertia = 0.9, this._mode = Ai.PERSPECTIVE_CAMERA, this.isIntermediate = !1, this.viewport = new Md(0, 0, 1, 1), this.layerMask = 268435455, this.fovMode = Ai.FOVMODE_VERTICAL_FIXED, this.cameraRigMode = Ai.RIG_MODE_NONE, this.customRenderTargets = [], this.outputRenderTarget = null, this.onViewMatrixChangedObservable = new Fe(), this.onProjectionMatrixChangedObservable = new Fe(), this.onAfterCheckInputsObservable = new Fe(), this.onRestoreStateObservable = new Fe(), this.isRigCamera = !1, this._rigCameras = new Array(), this._skipRendering = !1, this._projectionMatrix = new Ae(), this._postProcesses = new Array(), this._activeMeshes = new xc(256), this._globalPosition = D.Zero(), this._computedViewMatrix = Ae.Identity(), this._doNotComputeProjectionMatrix = !1, this._transformMatrix = Ae.Zero(), this._refreshFrustumPlanes = !0, this._absoluteRotation = Ze.Identity(), this._isCamera = !0, this._isLeftCamera = !1, this._isRightCamera = !1, this.getScene().addCamera(this), r && !this.getScene().activeCamera && (this.getScene().activeCamera = this), this.position = t, this.renderPassId = this.getScene().getEngine().createRenderPassId(`Camera ${e}`); } /** * Store current camera state (fov, position, etc..) * @returns the camera */ storeState() { return this._stateStored = !0, this._storedFov = this.fov, this; } /** * Restores the camera state values if it has been stored. You must call storeState() first */ _restoreStateValues() { return this._stateStored ? (this.fov = this._storedFov, !0) : !1; } /** * Restored camera state. You must call storeState() first. * @returns true if restored and false otherwise */ restoreState() { return this._restoreStateValues() ? (this.onRestoreStateObservable.notifyObservers(this), !0) : !1; } /** * Gets the class name of the camera. * @returns the class name */ getClassName() { return "Camera"; } /** * Gets a string representation of the camera useful for debug purpose. * @param fullDetails Defines that a more verbose level of logging is required * @returns the string representation */ toString(e) { let t = "Name: " + this.name; if (t += ", type: " + this.getClassName(), this.animations) for (let i = 0; i < this.animations.length; i++) t += ", animation[0]: " + this.animations[i].toString(e); return t; } /** * Automatically tilts the projection plane, using `projectionPlaneTilt`, to correct the perspective effect on vertical lines. */ applyVerticalCorrection() { const e = this.absoluteRotation.toEulerAngles(); this.projectionPlaneTilt = this._scene.useRightHandedSystem ? -e.x : e.x; } /** * Gets the current world space position of the camera. */ get globalPosition() { return this._globalPosition; } /** * Gets the list of active meshes this frame (meshes no culled or excluded by lod s in the frame) * @returns the active meshe list */ getActiveMeshes() { return this._activeMeshes; } /** * Check whether a mesh is part of the current active mesh list of the camera * @param mesh Defines the mesh to check * @returns true if active, false otherwise */ isActiveMesh(e) { return this._activeMeshes.indexOf(e) !== -1; } /** * Is this camera ready to be used/rendered * @param completeCheck defines if a complete check (including post processes) has to be done (false by default) * @returns true if the camera is ready */ isReady(e = !1) { if (e) { for (const t of this._postProcesses) if (t && !t.isReady()) return !1; } return super.isReady(e); } /** @internal */ _initCache() { super._initCache(), this._cache.position = new D(Number.MAX_VALUE, Number.MAX_VALUE, Number.MAX_VALUE), this._cache.upVector = new D(Number.MAX_VALUE, Number.MAX_VALUE, Number.MAX_VALUE), this._cache.mode = void 0, this._cache.minZ = void 0, this._cache.maxZ = void 0, this._cache.fov = void 0, this._cache.fovMode = void 0, this._cache.aspectRatio = void 0, this._cache.orthoLeft = void 0, this._cache.orthoRight = void 0, this._cache.orthoBottom = void 0, this._cache.orthoTop = void 0, this._cache.obliqueAngle = void 0, this._cache.obliqueLength = void 0, this._cache.obliqueOffset = void 0, this._cache.renderWidth = void 0, this._cache.renderHeight = void 0; } /** * @internal */ _updateCache(e) { e || super._updateCache(), this._cache.position.copyFrom(this.position), this._cache.upVector.copyFrom(this.upVector); } /** @internal */ _isSynchronized() { return this._isSynchronizedViewMatrix() && this._isSynchronizedProjectionMatrix(); } /** @internal */ _isSynchronizedViewMatrix() { return super._isSynchronized() ? this._cache.position.equals(this.position) && this._cache.upVector.equals(this.upVector) && this.isSynchronizedWithParent() : !1; } /** @internal */ _isSynchronizedProjectionMatrix() { let e = this._cache.mode === this.mode && this._cache.minZ === this.minZ && this._cache.maxZ === this.maxZ; if (!e) return !1; const t = this.getEngine(); return this.mode === Ai.PERSPECTIVE_CAMERA ? e = this._cache.fov === this.fov && this._cache.fovMode === this.fovMode && this._cache.aspectRatio === t.getAspectRatio(this) && this._cache.projectionPlaneTilt === this.projectionPlaneTilt : (e = this._cache.orthoLeft === this.orthoLeft && this._cache.orthoRight === this.orthoRight && this._cache.orthoBottom === this.orthoBottom && this._cache.orthoTop === this.orthoTop && this._cache.renderWidth === t.getRenderWidth() && this._cache.renderHeight === t.getRenderHeight(), this.oblique && (e = e && this._cache.obliqueAngle === this.oblique.angle && this._cache.obliqueLength === this.oblique.length && this._cache.obliqueOffset === this.oblique.offset)), e; } /** * Attach the input controls to a specific dom element to get the input from. * This function is here because typescript removes the typing of the last function. * @param _ignored defines an ignored parameter kept for backward compatibility. * @param _noPreventDefault Defines whether event caught by the controls should call preventdefault() (https://developer.mozilla.org/en-US/docs/Web/API/Event/preventDefault) */ attachControl(e, t) { } /** * Detach the current controls from the specified dom element. * This function is here because typescript removes the typing of the last function. * @param _ignored defines an ignored parameter kept for backward compatibility. */ detachControl(e) { } /** * Update the camera state according to the different inputs gathered during the frame. */ update() { this._checkInputs(), this.cameraRigMode !== Ai.RIG_MODE_NONE && this._updateRigCameras(), this.getViewMatrix(), this.getProjectionMatrix(); } /** @internal */ _checkInputs() { this.onAfterCheckInputsObservable.notifyObservers(this); } /** @internal */ get rigCameras() { return this._rigCameras; } /** * Gets the post process used by the rig cameras */ get rigPostProcess() { return this._rigPostProcess; } /** * Internal, gets the first post process. * @returns the first post process to be run on this camera. */ _getFirstPostProcess() { for (let e = 0; e < this._postProcesses.length; e++) if (this._postProcesses[e] !== null) return this._postProcesses[e]; return null; } _cascadePostProcessesToRigCams() { const e = this._getFirstPostProcess(); e && e.markTextureDirty(); for (let t = 0, i = this._rigCameras.length; t < i; t++) { const r = this._rigCameras[t], s = r._rigPostProcess; s ? (s.getEffectName() === "pass" && (r.isIntermediate = this._postProcesses.length === 0), r._postProcesses = this._postProcesses.slice(0).concat(s), s.markTextureDirty()) : r._postProcesses = this._postProcesses.slice(0); } } /** * Attach a post process to the camera. * @see https://doc.babylonjs.com/features/featuresDeepDive/postProcesses/usePostProcesses#attach-postprocess * @param postProcess The post process to attach to the camera * @param insertAt The position of the post process in case several of them are in use in the scene * @returns the position the post process has been inserted at */ attachPostProcess(e, t = null) { return !e.isReusable() && this._postProcesses.indexOf(e) > -1 ? (Ce.Error("You're trying to reuse a post process not defined as reusable."), 0) : (t == null || t < 0 ? this._postProcesses.push(e) : this._postProcesses[t] === null ? this._postProcesses[t] = e : this._postProcesses.splice(t, 0, e), this._cascadePostProcessesToRigCams(), this._scene.prePassRenderer && this._scene.prePassRenderer.markAsDirty(), this._postProcesses.indexOf(e)); } /** * Detach a post process to the camera. * @see https://doc.babylonjs.com/features/featuresDeepDive/postProcesses/usePostProcesses#attach-postprocess * @param postProcess The post process to detach from the camera */ detachPostProcess(e) { const t = this._postProcesses.indexOf(e); t !== -1 && (this._postProcesses[t] = null), this._scene.prePassRenderer && this._scene.prePassRenderer.markAsDirty(), this._cascadePostProcessesToRigCams(); } /** * Gets the current world matrix of the camera */ getWorldMatrix() { return this._isSynchronizedViewMatrix() ? this._worldMatrix : (this.getViewMatrix(), this._worldMatrix); } /** @internal */ _getViewMatrix() { return Ae.Identity(); } /** * Gets the current view matrix of the camera. * @param force forces the camera to recompute the matrix without looking at the cached state * @returns the view matrix */ getViewMatrix(e) { return !e && this._isSynchronizedViewMatrix() ? this._computedViewMatrix : (this.updateCache(), this._computedViewMatrix = this._getViewMatrix(), this._currentRenderId = this.getScene().getRenderId(), this._childUpdateId++, this._refreshFrustumPlanes = !0, this._cameraRigParams && this._cameraRigParams.vrPreViewMatrix && this._computedViewMatrix.multiplyToRef(this._cameraRigParams.vrPreViewMatrix, this._computedViewMatrix), this.parent && this.parent.onViewMatrixChangedObservable && this.parent.onViewMatrixChangedObservable.notifyObservers(this.parent), this.onViewMatrixChangedObservable.notifyObservers(this), this._computedViewMatrix.invertToRef(this._worldMatrix), this._computedViewMatrix); } /** * Freeze the projection matrix. * It will prevent the cache check of the camera projection compute and can speed up perf * if no parameter of the camera are meant to change * @param projection Defines manually a projection if necessary */ freezeProjectionMatrix(e) { this._doNotComputeProjectionMatrix = !0, e !== void 0 && (this._projectionMatrix = e); } /** * Unfreeze the projection matrix if it has previously been freezed by freezeProjectionMatrix. */ unfreezeProjectionMatrix() { this._doNotComputeProjectionMatrix = !1; } /** * Gets the current projection matrix of the camera. * @param force forces the camera to recompute the matrix without looking at the cached state * @returns the projection matrix */ getProjectionMatrix(e) { var t, i, r, s, n, a, l, o, u, h, d, f, p, m, _, v, C, x, b; if (this._doNotComputeProjectionMatrix || !e && this._isSynchronizedProjectionMatrix()) return this._projectionMatrix; this._cache.mode = this.mode, this._cache.minZ = this.minZ, this._cache.maxZ = this.maxZ, this._refreshFrustumPlanes = !0; const S = this.getEngine(), M = this.getScene(), R = S.useReverseDepthBuffer; if (this.mode === Ai.PERSPECTIVE_CAMERA) { this._cache.fov = this.fov, this._cache.fovMode = this.fovMode, this._cache.aspectRatio = S.getAspectRatio(this), this._cache.projectionPlaneTilt = this.projectionPlaneTilt, this.minZ <= 0 && (this.minZ = 0.1); let w; M.useRightHandedSystem ? w = Ae.PerspectiveFovRHToRef : w = Ae.PerspectiveFovLHToRef, w(this.fov, S.getAspectRatio(this), R ? this.maxZ : this.minZ, R ? this.minZ : this.maxZ, this._projectionMatrix, this.fovMode === Ai.FOVMODE_VERTICAL_FIXED, S.isNDCHalfZRange, this.projectionPlaneTilt, R); } else { const w = S.getRenderWidth() / 2, V = S.getRenderHeight() / 2; M.useRightHandedSystem ? this.oblique ? Ae.ObliqueOffCenterRHToRef((t = this.orthoLeft) !== null && t !== void 0 ? t : -w, (i = this.orthoRight) !== null && i !== void 0 ? i : w, (r = this.orthoBottom) !== null && r !== void 0 ? r : -V, (s = this.orthoTop) !== null && s !== void 0 ? s : V, R ? this.maxZ : this.minZ, R ? this.minZ : this.maxZ, this.oblique.length, this.oblique.angle, this._computeObliqueDistance(this.oblique.offset), this._projectionMatrix, S.isNDCHalfZRange) : Ae.OrthoOffCenterRHToRef((n = this.orthoLeft) !== null && n !== void 0 ? n : -w, (a = this.orthoRight) !== null && a !== void 0 ? a : w, (l = this.orthoBottom) !== null && l !== void 0 ? l : -V, (o = this.orthoTop) !== null && o !== void 0 ? o : V, R ? this.maxZ : this.minZ, R ? this.minZ : this.maxZ, this._projectionMatrix, S.isNDCHalfZRange) : this.oblique ? Ae.ObliqueOffCenterLHToRef((u = this.orthoLeft) !== null && u !== void 0 ? u : -w, (h = this.orthoRight) !== null && h !== void 0 ? h : w, (d = this.orthoBottom) !== null && d !== void 0 ? d : -V, (f = this.orthoTop) !== null && f !== void 0 ? f : V, R ? this.maxZ : this.minZ, R ? this.minZ : this.maxZ, this.oblique.length, this.oblique.angle, this._computeObliqueDistance(this.oblique.offset), this._projectionMatrix, S.isNDCHalfZRange) : Ae.OrthoOffCenterLHToRef((p = this.orthoLeft) !== null && p !== void 0 ? p : -w, (m = this.orthoRight) !== null && m !== void 0 ? m : w, (_ = this.orthoBottom) !== null && _ !== void 0 ? _ : -V, (v = this.orthoTop) !== null && v !== void 0 ? v : V, R ? this.maxZ : this.minZ, R ? this.minZ : this.maxZ, this._projectionMatrix, S.isNDCHalfZRange), this._cache.orthoLeft = this.orthoLeft, this._cache.orthoRight = this.orthoRight, this._cache.orthoBottom = this.orthoBottom, this._cache.orthoTop = this.orthoTop, this._cache.obliqueAngle = (C = this.oblique) === null || C === void 0 ? void 0 : C.angle, this._cache.obliqueLength = (x = this.oblique) === null || x === void 0 ? void 0 : x.length, this._cache.obliqueOffset = (b = this.oblique) === null || b === void 0 ? void 0 : b.offset, this._cache.renderWidth = S.getRenderWidth(), this._cache.renderHeight = S.getRenderHeight(); } return this.onProjectionMatrixChangedObservable.notifyObservers(this), this._projectionMatrix; } /** * Gets the transformation matrix (ie. the multiplication of view by projection matrices) * @returns a Matrix */ getTransformationMatrix() { return this._computedViewMatrix.multiplyToRef(this._projectionMatrix, this._transformMatrix), this._transformMatrix; } _computeObliqueDistance(e) { const t = this, i = this; return (t.radius || (i.target ? D.Distance(this.position, i.target) : this.position.length())) + e; } _updateFrustumPlanes() { this._refreshFrustumPlanes && (this.getTransformationMatrix(), this._frustumPlanes ? gm.GetPlanesToRef(this._transformMatrix, this._frustumPlanes) : this._frustumPlanes = gm.GetPlanes(this._transformMatrix), this._refreshFrustumPlanes = !1); } /** * Checks if a cullable object (mesh...) is in the camera frustum * This checks the bounding box center. See isCompletelyInFrustum for a full bounding check * @param target The object to check * @param checkRigCameras If the rig cameras should be checked (eg. with VR camera both eyes should be checked) (Default: false) * @returns true if the object is in frustum otherwise false */ isInFrustum(e, t = !1) { if (this._updateFrustumPlanes(), t && this.rigCameras.length > 0) { let i = !1; return this.rigCameras.forEach((r) => { r._updateFrustumPlanes(), i = i || e.isInFrustum(r._frustumPlanes); }), i; } else return e.isInFrustum(this._frustumPlanes); } /** * Checks if a cullable object (mesh...) is in the camera frustum * Unlike isInFrustum this checks the full bounding box * @param target The object to check * @returns true if the object is in frustum otherwise false */ isCompletelyInFrustum(e) { return this._updateFrustumPlanes(), e.isCompletelyInFrustum(this._frustumPlanes); } /** * Gets a ray in the forward direction from the camera. * @param length Defines the length of the ray to create * @param transform Defines the transform to apply to the ray, by default the world matrix is used to create a workd space ray * @param origin Defines the start point of the ray which defaults to the camera position * @returns the forward ray */ // eslint-disable-next-line @typescript-eslint/no-unused-vars getForwardRay(e = 100, t, i) { throw yr("Ray"); } /** * Gets a ray in the forward direction from the camera. * @param refRay the ray to (re)use when setting the values * @param length Defines the length of the ray to create * @param transform Defines the transform to apply to the ray, by default the world matrx is used to create a workd space ray * @param origin Defines the start point of the ray which defaults to the camera position * @returns the forward ray */ // eslint-disable-next-line @typescript-eslint/no-unused-vars getForwardRayToRef(e, t = 100, i, r) { throw yr("Ray"); } /** * Releases resources associated with this node. * @param doNotRecurse Set to true to not recurse into each children (recurse into each children by default) * @param disposeMaterialAndTextures Set to true to also dispose referenced materials and textures (false by default) */ dispose(e, t = !1) { for (this.onViewMatrixChangedObservable.clear(), this.onProjectionMatrixChangedObservable.clear(), this.onAfterCheckInputsObservable.clear(), this.onRestoreStateObservable.clear(), this.inputs && this.inputs.clear(), this.getScene().stopAnimation(this), this.getScene().removeCamera(this); this._rigCameras.length > 0; ) { const r = this._rigCameras.pop(); r && r.dispose(); } if (this._parentContainer) { const r = this._parentContainer.cameras.indexOf(this); r > -1 && this._parentContainer.cameras.splice(r, 1), this._parentContainer = null; } if (this._rigPostProcess) this._rigPostProcess.dispose(this), this._rigPostProcess = null, this._postProcesses.length = 0; else if (this.cameraRigMode !== Ai.RIG_MODE_NONE) this._rigPostProcess = null, this._postProcesses.length = 0; else { let r = this._postProcesses.length; for (; --r >= 0; ) { const s = this._postProcesses[r]; s && s.dispose(this); } } let i = this.customRenderTargets.length; for (; --i >= 0; ) this.customRenderTargets[i].dispose(); this.customRenderTargets.length = 0, this._activeMeshes.dispose(), this.getScene().getEngine().releaseRenderPassId(this.renderPassId), super.dispose(e, t); } /** * Gets the left camera of a rig setup in case of Rigged Camera */ get isLeftCamera() { return this._isLeftCamera; } /** * Gets the right camera of a rig setup in case of Rigged Camera */ get isRightCamera() { return this._isRightCamera; } /** * Gets the left camera of a rig setup in case of Rigged Camera */ get leftCamera() { return this._rigCameras.length < 1 ? null : this._rigCameras[0]; } /** * Gets the right camera of a rig setup in case of Rigged Camera */ get rightCamera() { return this._rigCameras.length < 2 ? null : this._rigCameras[1]; } /** * Gets the left camera target of a rig setup in case of Rigged Camera * @returns the target position */ getLeftTarget() { return this._rigCameras.length < 1 ? null : this._rigCameras[0].getTarget(); } /** * Gets the right camera target of a rig setup in case of Rigged Camera * @returns the target position */ getRightTarget() { return this._rigCameras.length < 2 ? null : this._rigCameras[1].getTarget(); } /** * @internal */ setCameraRigMode(e, t) { if (this.cameraRigMode !== e) { for (; this._rigCameras.length > 0; ) { const i = this._rigCameras.pop(); i && i.dispose(); } if (this.cameraRigMode = e, this._cameraRigParams = {}, this._cameraRigParams.interaxialDistance = t.interaxialDistance || 0.0637, this._cameraRigParams.stereoHalfAngle = Ve.ToRadians(this._cameraRigParams.interaxialDistance / 0.0637), this.cameraRigMode !== Ai.RIG_MODE_NONE) { const i = this.createRigCamera(this.name + "_L", 0); i && (i._isLeftCamera = !0); const r = this.createRigCamera(this.name + "_R", 1); r && (r._isRightCamera = !0), i && r && (this._rigCameras.push(i), this._rigCameras.push(r)); } this._setRigMode(t), this._cascadePostProcessesToRigCams(), this.update(); } } // eslint-disable-next-line @typescript-eslint/no-unused-vars _setRigMode(e) { } /** @internal */ _getVRProjectionMatrix() { return Ae.PerspectiveFovLHToRef(this._cameraRigParams.vrMetrics.aspectRatioFov, this._cameraRigParams.vrMetrics.aspectRatio, this.minZ, this.maxZ, this._cameraRigParams.vrWorkMatrix, !0, this.getEngine().isNDCHalfZRange), this._cameraRigParams.vrWorkMatrix.multiplyToRef(this._cameraRigParams.vrHMatrix, this._projectionMatrix), this._projectionMatrix; } /** * @internal */ setCameraRigParameter(e, t) { this._cameraRigParams || (this._cameraRigParams = {}), this._cameraRigParams[e] = t, e === "interaxialDistance" && (this._cameraRigParams.stereoHalfAngle = Ve.ToRadians(t / 0.0637)); } /** * needs to be overridden by children so sub has required properties to be copied * @internal */ // eslint-disable-next-line @typescript-eslint/no-unused-vars createRigCamera(e, t) { return null; } /** * May need to be overridden by children * @internal */ _updateRigCameras() { for (let e = 0; e < this._rigCameras.length; e++) this._rigCameras[e].minZ = this.minZ, this._rigCameras[e].maxZ = this.maxZ, this._rigCameras[e].fov = this.fov, this._rigCameras[e].upVector.copyFrom(this.upVector); this.cameraRigMode === Ai.RIG_MODE_STEREOSCOPIC_ANAGLYPH && (this._rigCameras[0].viewport = this._rigCameras[1].viewport = this.viewport); } /** @internal */ _setupInputs() { } /** * Serialiaze the camera setup to a json representation * @returns the JSON representation */ serialize() { const e = St.Serialize(this); return e.uniqueId = this.uniqueId, e.type = this.getClassName(), this.parent && this.parent._serializeAsParent(e), this.inputs && this.inputs.serialize(e), St.AppendSerializedAnimations(this, e), e.ranges = this.serializeAnimationRanges(), e.isEnabled = this.isEnabled(), e; } /** * Clones the current camera. * @param name The cloned camera name * @param newParent The cloned camera's new parent (none by default) * @returns the cloned camera */ clone(e, t = null) { const i = St.Clone(Ai.GetConstructorFromName(this.getClassName(), e, this.getScene(), this.interaxialDistance, this.isStereoscopicSideBySide), this); return i.name = e, i.parent = t, this.onClonedObservable.notifyObservers(i), i; } /** * Gets the direction of the camera relative to a given local axis. * @param localAxis Defines the reference axis to provide a relative direction. * @returns the direction */ getDirection(e) { const t = D.Zero(); return this.getDirectionToRef(e, t), t; } /** * Returns the current camera absolute rotation */ get absoluteRotation() { return this.getWorldMatrix().decompose(void 0, this._absoluteRotation), this._absoluteRotation; } /** * Gets the direction of the camera relative to a given local axis into a passed vector. * @param localAxis Defines the reference axis to provide a relative direction. * @param result Defines the vector to store the result in */ getDirectionToRef(e, t) { D.TransformNormalToRef(e, this.getWorldMatrix(), t); } /** * Gets a camera constructor for a given camera type * @param type The type of the camera to construct (should be equal to one of the camera class name) * @param name The name of the camera the result will be able to instantiate * @param scene The scene the result will construct the camera in * @param interaxial_distance In case of stereoscopic setup, the distance between both eyes * @param isStereoscopicSideBySide In case of stereoscopic setup, should the sereo be side b side * @returns a factory method to construct the camera */ // eslint-disable-next-line @typescript-eslint/naming-convention static GetConstructorFromName(e, t, i, r = 0, s = !0) { const n = In.Construct(e, t, i, { // eslint-disable-next-line @typescript-eslint/naming-convention interaxial_distance: r, isStereoscopicSideBySide: s }); return n || (() => Ai._CreateDefaultParsedCamera(t, i)); } /** * Compute the world matrix of the camera. * @returns the camera world matrix */ computeWorldMatrix() { return this.getWorldMatrix(); } /** * Parse a JSON and creates the camera from the parsed information * @param parsedCamera The JSON to parse * @param scene The scene to instantiate the camera in * @returns the newly constructed camera */ static Parse(e, t) { const i = e.type, r = Ai.GetConstructorFromName(i, e.name, t, e.interaxial_distance, e.isStereoscopicSideBySide), s = St.Parse(r, e, t); if (e.parentId !== void 0 && (s._waitingParentId = e.parentId), e.parentInstanceIndex !== void 0 && (s._waitingParentInstanceIndex = e.parentInstanceIndex), s.inputs && (s.inputs.parse(e), s._setupInputs()), e.upVector && (s.upVector = D.FromArray(e.upVector)), s.setPosition && (s.position.copyFromFloats(0, 0, 0), s.setPosition(D.FromArray(e.position))), e.target && s.setTarget && s.setTarget(D.FromArray(e.target)), e.cameraRigMode) { const n = e.interaxial_distance ? { interaxialDistance: e.interaxial_distance } : {}; s.setCameraRigMode(e.cameraRigMode, n); } if (e.animations) { for (let n = 0; n < e.animations.length; n++) { const a = e.animations[n], l = Qo("BABYLON.Animation"); l && s.animations.push(l.Parse(a)); } In.ParseAnimationRanges(s, e, t); } return e.autoAnimate && t.beginAnimation(s, e.autoAnimateFrom, e.autoAnimateTo, e.autoAnimateLoop, e.autoAnimateSpeed || 1), e.isEnabled !== void 0 && s.setEnabled(e.isEnabled), s; } /** @internal */ _calculateHandednessMultiplier() { let e = this.getScene().useRightHandedSystem ? -1 : 1; return this.parent && this.parent._getWorldMatrixDeterminant() < 0 && (e *= -1), e; } } Ai._CreateDefaultParsedCamera = (c, e) => { throw yr("UniversalCamera"); }; Ai.PERSPECTIVE_CAMERA = 0; Ai.ORTHOGRAPHIC_CAMERA = 1; Ai.FOVMODE_VERTICAL_FIXED = 0; Ai.FOVMODE_HORIZONTAL_FIXED = 1; Ai.RIG_MODE_NONE = 0; Ai.RIG_MODE_STEREOSCOPIC_ANAGLYPH = 10; Ai.RIG_MODE_STEREOSCOPIC_SIDEBYSIDE_PARALLEL = 11; Ai.RIG_MODE_STEREOSCOPIC_SIDEBYSIDE_CROSSEYED = 12; Ai.RIG_MODE_STEREOSCOPIC_OVERUNDER = 13; Ai.RIG_MODE_STEREOSCOPIC_INTERLACED = 14; Ai.RIG_MODE_VR = 20; Ai.RIG_MODE_CUSTOM = 22; Ai.ForceAttachControlToAlwaysPreventDefault = !1; F([ oo("position") ], Ai.prototype, "_position", void 0); F([ oo("upVector") ], Ai.prototype, "_upVector", void 0); F([ W() ], Ai.prototype, "orthoLeft", null); F([ W() ], Ai.prototype, "orthoRight", null); F([ W() ], Ai.prototype, "orthoBottom", null); F([ W() ], Ai.prototype, "orthoTop", null); F([ W() ], Ai.prototype, "fov", void 0); F([ W() ], Ai.prototype, "projectionPlaneTilt", void 0); F([ W() ], Ai.prototype, "minZ", void 0); F([ W() ], Ai.prototype, "maxZ", void 0); F([ W() ], Ai.prototype, "inertia", void 0); F([ W() ], Ai.prototype, "mode", null); F([ W() ], Ai.prototype, "layerMask", void 0); F([ W() ], Ai.prototype, "fovMode", void 0); F([ W() ], Ai.prototype, "cameraRigMode", void 0); F([ W() ], Ai.prototype, "interaxialDistance", void 0); F([ W() ], Ai.prototype, "isStereoscopicSideBySide", void 0); class sB { constructor(e, t, i) { this.bu = e, this.bv = t, this.distance = i, this.faceId = 0, this.subMeshId = 0; } } class fg { /** * Creates a new bounding box * @param min defines the minimum vector (in local space) * @param max defines the maximum vector (in local space) * @param worldMatrix defines the new world matrix */ constructor(e, t, i) { this.vectors = kc.BuildArray(8, D.Zero), this.center = D.Zero(), this.centerWorld = D.Zero(), this.extendSize = D.Zero(), this.extendSizeWorld = D.Zero(), this.directions = kc.BuildArray(3, D.Zero), this.vectorsWorld = kc.BuildArray(8, D.Zero), this.minimumWorld = D.Zero(), this.maximumWorld = D.Zero(), this.minimum = D.Zero(), this.maximum = D.Zero(), this._drawWrapperFront = null, this._drawWrapperBack = null, this.reConstruct(e, t, i); } // Methods /** * Recreates the entire bounding box from scratch as if we call the constructor in place * @param min defines the new minimum vector (in local space) * @param max defines the new maximum vector (in local space) * @param worldMatrix defines the new world matrix */ reConstruct(e, t, i) { const r = e.x, s = e.y, n = e.z, a = t.x, l = t.y, o = t.z, u = this.vectors; this.minimum.copyFromFloats(r, s, n), this.maximum.copyFromFloats(a, l, o), u[0].copyFromFloats(r, s, n), u[1].copyFromFloats(a, l, o), u[2].copyFromFloats(a, s, n), u[3].copyFromFloats(r, l, n), u[4].copyFromFloats(r, s, o), u[5].copyFromFloats(a, l, n), u[6].copyFromFloats(r, l, o), u[7].copyFromFloats(a, s, o), t.addToRef(e, this.center).scaleInPlace(0.5), t.subtractToRef(e, this.extendSize).scaleInPlace(0.5), this._worldMatrix = i || Ae.IdentityReadOnly, this._update(this._worldMatrix); } /** * Scale the current bounding box by applying a scale factor * @param factor defines the scale factor to apply * @returns the current bounding box */ scale(e) { const t = fg._TmpVector3, i = this.maximum.subtractToRef(this.minimum, t[0]), r = i.length(); i.normalizeFromLength(r); const s = r * e, n = i.scaleInPlace(s * 0.5), a = this.center.subtractToRef(n, t[1]), l = this.center.addToRef(n, t[2]); return this.reConstruct(a, l, this._worldMatrix), this; } /** * Gets the world matrix of the bounding box * @returns a matrix */ getWorldMatrix() { return this._worldMatrix; } /** * @internal */ _update(e) { const t = this.minimumWorld, i = this.maximumWorld, r = this.directions, s = this.vectorsWorld, n = this.vectors; if (e.isIdentity()) { t.copyFrom(this.minimum), i.copyFrom(this.maximum); for (let a = 0; a < 8; ++a) s[a].copyFrom(n[a]); this.extendSizeWorld.copyFrom(this.extendSize), this.centerWorld.copyFrom(this.center); } else { t.setAll(Number.MAX_VALUE), i.setAll(-Number.MAX_VALUE); for (let a = 0; a < 8; ++a) { const l = s[a]; D.TransformCoordinatesToRef(n[a], e, l), t.minimizeInPlace(l), i.maximizeInPlace(l); } i.subtractToRef(t, this.extendSizeWorld).scaleInPlace(0.5), i.addToRef(t, this.centerWorld).scaleInPlace(0.5); } D.FromArrayToRef(e.m, 0, r[0]), D.FromArrayToRef(e.m, 4, r[1]), D.FromArrayToRef(e.m, 8, r[2]), this._worldMatrix = e; } /** * Tests if the bounding box is intersecting the frustum planes * @param frustumPlanes defines the frustum planes to test * @returns true if there is an intersection */ isInFrustum(e) { return fg.IsInFrustum(this.vectorsWorld, e); } /** * Tests if the bounding box is entirely inside the frustum planes * @param frustumPlanes defines the frustum planes to test * @returns true if there is an inclusion */ isCompletelyInFrustum(e) { return fg.IsCompletelyInFrustum(this.vectorsWorld, e); } /** * Tests if a point is inside the bounding box * @param point defines the point to test * @returns true if the point is inside the bounding box */ intersectsPoint(e) { const t = this.minimumWorld, i = this.maximumWorld, r = t.x, s = t.y, n = t.z, a = i.x, l = i.y, o = i.z, u = e.x, h = e.y, d = e.z, f = -Sr; return !(a - u < f || f > u - r || l - h < f || f > h - s || o - d < f || f > d - n); } /** * Tests if the bounding box intersects with a bounding sphere * @param sphere defines the sphere to test * @returns true if there is an intersection */ intersectsSphere(e) { return fg.IntersectsSphere(this.minimumWorld, this.maximumWorld, e.centerWorld, e.radiusWorld); } /** * Tests if the bounding box intersects with a box defined by a min and max vectors * @param min defines the min vector to use * @param max defines the max vector to use * @returns true if there is an intersection */ intersectsMinMax(e, t) { const i = this.minimumWorld, r = this.maximumWorld, s = i.x, n = i.y, a = i.z, l = r.x, o = r.y, u = r.z, h = e.x, d = e.y, f = e.z, p = t.x, m = t.y, _ = t.z; return !(l < h || s > p || o < d || n > m || u < f || a > _); } /** * Disposes the resources of the class */ dispose() { var e, t; (e = this._drawWrapperFront) === null || e === void 0 || e.dispose(), (t = this._drawWrapperBack) === null || t === void 0 || t.dispose(); } // Statics /** * Tests if two bounding boxes are intersections * @param box0 defines the first box to test * @param box1 defines the second box to test * @returns true if there is an intersection */ static Intersects(e, t) { return e.intersectsMinMax(t.minimumWorld, t.maximumWorld); } /** * Tests if a bounding box defines by a min/max vectors intersects a sphere * @param minPoint defines the minimum vector of the bounding box * @param maxPoint defines the maximum vector of the bounding box * @param sphereCenter defines the sphere center * @param sphereRadius defines the sphere radius * @returns true if there is an intersection */ static IntersectsSphere(e, t, i, r) { const s = fg._TmpVector3[0]; return D.ClampToRef(i, e, t, s), D.DistanceSquared(i, s) <= r * r; } /** * Tests if a bounding box defined with 8 vectors is entirely inside frustum planes * @param boundingVectors defines an array of 8 vectors representing a bounding box * @param frustumPlanes defines the frustum planes to test * @returns true if there is an inclusion */ static IsCompletelyInFrustum(e, t) { for (let i = 0; i < 6; ++i) { const r = t[i]; for (let s = 0; s < 8; ++s) if (r.dotCoordinate(e[s]) < 0) return !1; } return !0; } /** * Tests if a bounding box defined with 8 vectors intersects frustum planes * @param boundingVectors defines an array of 8 vectors representing a bounding box * @param frustumPlanes defines the frustum planes to test * @returns true if there is an intersection */ static IsInFrustum(e, t) { for (let i = 0; i < 6; ++i) { let r = !0; const s = t[i]; for (let n = 0; n < 8; ++n) if (s.dotCoordinate(e[n]) >= 0) { r = !1; break; } if (r) return !1; } return !0; } } fg._TmpVector3 = kc.BuildArray(3, D.Zero); class e6 { /** * Creates a new bounding sphere * @param min defines the minimum vector (in local space) * @param max defines the maximum vector (in local space) * @param worldMatrix defines the new world matrix */ constructor(e, t, i) { this.center = D.Zero(), this.centerWorld = D.Zero(), this.minimum = D.Zero(), this.maximum = D.Zero(), this.reConstruct(e, t, i); } /** * Recreates the entire bounding sphere from scratch as if we call the constructor in place * @param min defines the new minimum vector (in local space) * @param max defines the new maximum vector (in local space) * @param worldMatrix defines the new world matrix */ reConstruct(e, t, i) { this.minimum.copyFrom(e), this.maximum.copyFrom(t); const r = D.Distance(e, t); t.addToRef(e, this.center).scaleInPlace(0.5), this.radius = r * 0.5, this._update(i || Ae.IdentityReadOnly); } /** * Scale the current bounding sphere by applying a scale factor * @param factor defines the scale factor to apply * @returns the current bounding box */ scale(e) { const t = this.radius * e, i = e6._TmpVector3, r = i[0].setAll(t), s = this.center.subtractToRef(r, i[1]), n = this.center.addToRef(r, i[2]); return this.reConstruct(s, n, this._worldMatrix), this; } /** * Gets the world matrix of the bounding box * @returns a matrix */ getWorldMatrix() { return this._worldMatrix; } // Methods /** * @internal */ _update(e) { if (e.isIdentity()) this.centerWorld.copyFrom(this.center), this.radiusWorld = this.radius; else { D.TransformCoordinatesToRef(this.center, e, this.centerWorld); const t = e6._TmpVector3[0]; D.TransformNormalFromFloatsToRef(1, 1, 1, e, t), this.radiusWorld = Math.max(Math.abs(t.x), Math.abs(t.y), Math.abs(t.z)) * this.radius; } } /** * Tests if the bounding sphere is intersecting the frustum planes * @param frustumPlanes defines the frustum planes to test * @returns true if there is an intersection */ isInFrustum(e) { const t = this.centerWorld, i = this.radiusWorld; for (let r = 0; r < 6; r++) if (e[r].dotCoordinate(t) <= -i) return !1; return !0; } /** * Tests if the bounding sphere center is in between the frustum planes. * Used for optimistic fast inclusion. * @param frustumPlanes defines the frustum planes to test * @returns true if the sphere center is in between the frustum planes */ isCenterInFrustum(e) { const t = this.centerWorld; for (let i = 0; i < 6; i++) if (e[i].dotCoordinate(t) < 0) return !1; return !0; } /** * Tests if a point is inside the bounding sphere * @param point defines the point to test * @returns true if the point is inside the bounding sphere */ intersectsPoint(e) { const t = D.DistanceSquared(this.centerWorld, e); return !(this.radiusWorld * this.radiusWorld < t); } // Statics /** * Checks if two sphere intersect * @param sphere0 sphere 0 * @param sphere1 sphere 1 * @returns true if the spheres intersect */ static Intersects(e, t) { const i = D.DistanceSquared(e.centerWorld, t.centerWorld), r = e.radiusWorld + t.radiusWorld; return !(r * r < i); } /** * Creates a sphere from a center and a radius * @param center The center * @param radius radius * @param matrix Optional worldMatrix * @returns The sphere */ static CreateFromCenterAndRadius(e, t, i) { this._TmpVector3[0].copyFrom(e), this._TmpVector3[1].copyFromFloats(0, 0, t), this._TmpVector3[2].copyFrom(e), this._TmpVector3[0].addInPlace(this._TmpVector3[1]), this._TmpVector3[2].subtractInPlace(this._TmpVector3[1]); const r = new e6(this._TmpVector3[0], this._TmpVector3[2]); return i ? r._worldMatrix = i : r._worldMatrix = Ae.Identity(), r; } } e6._TmpVector3 = kc.BuildArray(3, D.Zero); const Lk = { min: 0, max: 0 }, Nk = { min: 0, max: 0 }, RZ = (c, e, t) => { const i = D.Dot(e.centerWorld, c), r = Math.abs(D.Dot(e.directions[0], c)) * e.extendSize.x, s = Math.abs(D.Dot(e.directions[1], c)) * e.extendSize.y, n = Math.abs(D.Dot(e.directions[2], c)) * e.extendSize.z, a = r + s + n; t.min = i - a, t.max = i + a; }, w_ = (c, e, t) => (RZ(c, e, Lk), RZ(c, t, Nk), !(Lk.min > Nk.max || Nk.min > Lk.max)); class zf { /** * Constructs bounding info * @param minimum min vector of the bounding box/sphere * @param maximum max vector of the bounding box/sphere * @param worldMatrix defines the new world matrix */ constructor(e, t, i) { this._isLocked = !1, this.boundingBox = new fg(e, t, i), this.boundingSphere = new e6(e, t, i); } /** * Recreates the entire bounding info from scratch as if we call the constructor in place * @param min defines the new minimum vector (in local space) * @param max defines the new maximum vector (in local space) * @param worldMatrix defines the new world matrix */ reConstruct(e, t, i) { this.boundingBox.reConstruct(e, t, i), this.boundingSphere.reConstruct(e, t, i); } /** * min vector of the bounding box/sphere */ get minimum() { return this.boundingBox.minimum; } /** * max vector of the bounding box/sphere */ get maximum() { return this.boundingBox.maximum; } /** * If the info is locked and won't be updated to avoid perf overhead */ get isLocked() { return this._isLocked; } set isLocked(e) { this._isLocked = e; } // Methods /** * Updates the bounding sphere and box * @param world world matrix to be used to update */ update(e) { this._isLocked || (this.boundingBox._update(e), this.boundingSphere._update(e)); } /** * Recreate the bounding info to be centered around a specific point given a specific extend. * @param center New center of the bounding info * @param extend New extend of the bounding info * @returns the current bounding info */ centerOn(e, t) { const i = zf._TmpVector3[0].copyFrom(e).subtractInPlace(t), r = zf._TmpVector3[1].copyFrom(e).addInPlace(t); return this.boundingBox.reConstruct(i, r, this.boundingBox.getWorldMatrix()), this.boundingSphere.reConstruct(i, r, this.boundingBox.getWorldMatrix()), this; } /** * Grows the bounding info to include the given point. * @param point The point that will be included in the current bounding info (in local space) * @returns the current bounding info */ encapsulate(e) { const t = D.Minimize(this.minimum, e), i = D.Maximize(this.maximum, e); return this.reConstruct(t, i, this.boundingBox.getWorldMatrix()), this; } /** * Grows the bounding info to encapsulate the given bounding info. * @param toEncapsulate The bounding info that will be encapsulated in the current bounding info * @returns the current bounding info */ encapsulateBoundingInfo(e) { const t = de.Matrix[0]; this.boundingBox.getWorldMatrix().invertToRef(t); const i = de.Vector3[0]; return D.TransformCoordinatesToRef(e.boundingBox.minimumWorld, t, i), this.encapsulate(i), D.TransformCoordinatesToRef(e.boundingBox.maximumWorld, t, i), this.encapsulate(i), this; } /** * Scale the current bounding info by applying a scale factor * @param factor defines the scale factor to apply * @returns the current bounding info */ scale(e) { return this.boundingBox.scale(e), this.boundingSphere.scale(e), this; } /** * Returns `true` if the bounding info is within the frustum defined by the passed array of planes. * @param frustumPlanes defines the frustum to test * @param strategy defines the strategy to use for the culling (default is BABYLON.AbstractMesh.CULLINGSTRATEGY_STANDARD) * The different strategies available are: * * BABYLON.AbstractMesh.CULLINGSTRATEGY_STANDARD most accurate but slower @see https://doc.babylonjs.com/typedoc/classes/BABYLON.AbstractMesh#CULLINGSTRATEGY_STANDARD * * BABYLON.AbstractMesh.CULLINGSTRATEGY_BOUNDINGSPHERE_ONLY faster but less accurate @see https://doc.babylonjs.com/typedoc/classes/BABYLON.AbstractMesh#CULLINGSTRATEGY_BOUNDINGSPHERE_ONLY * * BABYLON.AbstractMesh.CULLINGSTRATEGY_OPTIMISTIC_INCLUSION can be faster if always visible @see https://doc.babylonjs.com/typedoc/classes/BABYLON.AbstractMesh#CULLINGSTRATEGY_OPTIMISTIC_INCLUSION * * BABYLON.AbstractMesh.CULLINGSTRATEGY_OPTIMISTIC_INCLUSION_THEN_BSPHERE_ONLY can be faster if always visible @see https://doc.babylonjs.com/typedoc/classes/BABYLON.AbstractMesh#CULLINGSTRATEGY_OPTIMISTIC_INCLUSION_THEN_BSPHERE_ONLY * @returns true if the bounding info is in the frustum planes */ isInFrustum(e, t = 0) { return (t === 2 || t === 3) && this.boundingSphere.isCenterInFrustum(e) ? !0 : this.boundingSphere.isInFrustum(e) ? t === 1 || t === 3 ? !0 : this.boundingBox.isInFrustum(e) : !1; } /** * Gets the world distance between the min and max points of the bounding box */ get diagonalLength() { const e = this.boundingBox; return e.maximumWorld.subtractToRef(e.minimumWorld, zf._TmpVector3[0]).length(); } /** * Checks if a cullable object (mesh...) is in the camera frustum * Unlike isInFrustum this checks the full bounding box * @param frustumPlanes Camera near/planes * @returns true if the object is in frustum otherwise false */ isCompletelyInFrustum(e) { return this.boundingBox.isCompletelyInFrustum(e); } /** * @internal */ _checkCollision(e) { return e._canDoCollision(this.boundingSphere.centerWorld, this.boundingSphere.radiusWorld, this.boundingBox.minimumWorld, this.boundingBox.maximumWorld); } /** * Checks if a point is inside the bounding box and bounding sphere or the mesh * @see https://doc.babylonjs.com/features/featuresDeepDive/mesh/interactions/mesh_intersect * @param point the point to check intersection with * @returns if the point intersects */ intersectsPoint(e) { return !(!this.boundingSphere.centerWorld || !this.boundingSphere.intersectsPoint(e) || !this.boundingBox.intersectsPoint(e)); } /** * Checks if another bounding info intersects the bounding box and bounding sphere or the mesh * @see https://doc.babylonjs.com/features/featuresDeepDive/mesh/interactions/mesh_intersect * @param boundingInfo the bounding info to check intersection with * @param precise if the intersection should be done using OBB * @returns if the bounding info intersects */ intersects(e, t) { if (!e6.Intersects(this.boundingSphere, e.boundingSphere) || !fg.Intersects(this.boundingBox, e.boundingBox)) return !1; if (!t) return !0; const i = this.boundingBox, r = e.boundingBox; return !(!w_(i.directions[0], i, r) || !w_(i.directions[1], i, r) || !w_(i.directions[2], i, r) || !w_(r.directions[0], i, r) || !w_(r.directions[1], i, r) || !w_(r.directions[2], i, r) || !w_(D.Cross(i.directions[0], r.directions[0]), i, r) || !w_(D.Cross(i.directions[0], r.directions[1]), i, r) || !w_(D.Cross(i.directions[0], r.directions[2]), i, r) || !w_(D.Cross(i.directions[1], r.directions[0]), i, r) || !w_(D.Cross(i.directions[1], r.directions[1]), i, r) || !w_(D.Cross(i.directions[1], r.directions[2]), i, r) || !w_(D.Cross(i.directions[2], r.directions[0]), i, r) || !w_(D.Cross(i.directions[2], r.directions[1]), i, r) || !w_(D.Cross(i.directions[2], r.directions[2]), i, r)); } } zf._TmpVector3 = kc.BuildArray(2, D.Zero); class XB { static extractMinAndMaxIndexed(e, t, i, r, s, n) { for (let a = i; a < i + r; a++) { const l = t[a] * 3, o = e[l], u = e[l + 1], h = e[l + 2]; s.minimizeInPlaceFromFloats(o, u, h), n.maximizeInPlaceFromFloats(o, u, h); } } static extractMinAndMax(e, t, i, r, s, n) { for (let a = t, l = t * r; a < t + i; a++, l += r) { const o = e[l], u = e[l + 1], h = e[l + 2]; s.minimizeInPlaceFromFloats(o, u, h), n.maximizeInPlaceFromFloats(o, u, h); } } } F([ gT.filter((...[c, e]) => !Array.isArray(c) && !Array.isArray(e)) // eslint-disable-next-line @typescript-eslint/naming-convention ], XB, "extractMinAndMaxIndexed", null); F([ gT.filter((...[c]) => !Array.isArray(c)) // eslint-disable-next-line @typescript-eslint/naming-convention ], XB, "extractMinAndMax", null); function Tte(c, e, t, i, r = null) { const s = new D(Number.MAX_VALUE, Number.MAX_VALUE, Number.MAX_VALUE), n = new D(-Number.MAX_VALUE, -Number.MAX_VALUE, -Number.MAX_VALUE); return XB.extractMinAndMaxIndexed(c, e, t, i, s, n), r && (s.x -= s.x * r.x + r.y, s.y -= s.y * r.x + r.y, s.z -= s.z * r.x + r.y, n.x += n.x * r.x + r.y, n.y += n.y * r.x + r.y, n.z += n.z * r.x + r.y), { minimum: s, maximum: n }; } function kO(c, e, t, i = null, r) { const s = new D(Number.MAX_VALUE, Number.MAX_VALUE, Number.MAX_VALUE), n = new D(-Number.MAX_VALUE, -Number.MAX_VALUE, -Number.MAX_VALUE); return r || (r = 3), XB.extractMinAndMax(c, e, t, r, s, n), i && (s.x -= s.x * i.x + i.y, s.y -= s.y * i.x + i.y, s.z -= s.z * i.x + i.y, n.x += n.x * i.x + i.y, n.y += n.y * i.x + i.y, n.z += n.z * i.x + i.y), { minimum: s, maximum: n }; } class ed { /** * Gets material defines used by the effect associated to the sub mesh */ get materialDefines() { var e; return this._mainDrawWrapperOverride ? this._mainDrawWrapperOverride.defines : (e = this._getDrawWrapper()) === null || e === void 0 ? void 0 : e.defines; } /** * Sets material defines used by the effect associated to the sub mesh */ set materialDefines(e) { var t; const i = (t = this._mainDrawWrapperOverride) !== null && t !== void 0 ? t : this._getDrawWrapper(void 0, !0); i.defines = e; } /** * @internal */ _getDrawWrapper(e, t = !1) { e = e ?? this._engine.currentRenderPassId; let i = this._drawWrappers[e]; return !i && t && (this._drawWrappers[e] = i = new $o(this._mesh.getScene().getEngine())), i; } /** * @internal */ _removeDrawWrapper(e, t = !0) { var i; t && ((i = this._drawWrappers[e]) === null || i === void 0 || i.dispose()), this._drawWrappers[e] = void 0; } /** * Gets associated (main) effect (possibly the effect override if defined) */ get effect() { var e, t; return this._mainDrawWrapperOverride ? this._mainDrawWrapperOverride.effect : (t = (e = this._getDrawWrapper()) === null || e === void 0 ? void 0 : e.effect) !== null && t !== void 0 ? t : null; } /** @internal */ get _drawWrapper() { var e; return (e = this._mainDrawWrapperOverride) !== null && e !== void 0 ? e : this._getDrawWrapper(void 0, !0); } /** @internal */ get _drawWrapperOverride() { return this._mainDrawWrapperOverride; } /** * @internal */ _setMainDrawWrapperOverride(e) { this._mainDrawWrapperOverride = e; } /** * Sets associated effect (effect used to render this submesh) * @param effect defines the effect to associate with * @param defines defines the set of defines used to compile this effect * @param materialContext material context associated to the effect * @param resetContext true to reset the draw context */ setEffect(e, t = null, i, r = !0) { const s = this._drawWrapper; s.setEffect(e, t, r), i !== void 0 && (s.materialContext = i), e || (s.defines = null, s.materialContext = void 0); } /** * Resets the draw wrappers cache * @param passId If provided, releases only the draw wrapper corresponding to this render pass id */ resetDrawCache(e) { if (this._drawWrappers) if (e !== void 0) { this._removeDrawWrapper(e); return; } else for (const t of this._drawWrappers) t == null || t.dispose(); this._drawWrappers = []; } /** * Add a new submesh to a mesh * @param materialIndex defines the material index to use * @param verticesStart defines vertex index start * @param verticesCount defines vertices count * @param indexStart defines index start * @param indexCount defines indices count * @param mesh defines the parent mesh * @param renderingMesh defines an optional rendering mesh * @param createBoundingBox defines if bounding box should be created for this submesh * @returns the new submesh */ static AddToMesh(e, t, i, r, s, n, a, l = !0) { return new ed(e, t, i, r, s, n, a, l); } /** * Creates a new submesh * @param materialIndex defines the material index to use * @param verticesStart defines vertex index start * @param verticesCount defines vertices count * @param indexStart defines index start * @param indexCount defines indices count * @param mesh defines the parent mesh * @param renderingMesh defines an optional rendering mesh * @param createBoundingBox defines if bounding box should be created for this submesh * @param addToMesh defines a boolean indicating that the submesh must be added to the mesh.subMeshes array (true by default) */ constructor(e, t, i, r, s, n, a, l = !0, o = !0) { this.materialIndex = e, this.verticesStart = t, this.verticesCount = i, this.indexStart = r, this.indexCount = s, this._mainDrawWrapperOverride = null, this._linesIndexCount = 0, this._linesIndexBuffer = null, this._lastColliderWorldVertices = null, this._lastColliderTransformMatrix = null, this._wasDispatched = !1, this._renderId = 0, this._alphaIndex = 0, this._distanceToCamera = 0, this._currentMaterial = null, this._mesh = n, this._renderingMesh = a || n, o && n.subMeshes.push(this), this._engine = this._mesh.getScene().getEngine(), this.resetDrawCache(), this._trianglePlanes = [], this._id = n.subMeshes.length - 1, l && (this.refreshBoundingInfo(), n.computeWorldMatrix(!0)); } /** * Returns true if this submesh covers the entire parent mesh * @ignorenaming */ // eslint-disable-next-line @typescript-eslint/naming-convention get IsGlobal() { return this.verticesStart === 0 && this.verticesCount === this._mesh.getTotalVertices() && this.indexStart === 0 && this.indexCount === this._mesh.getTotalIndices(); } /** * Returns the submesh BoundingInfo object * @returns current bounding info (or mesh's one if the submesh is global) */ getBoundingInfo() { return this.IsGlobal || this._mesh.hasThinInstances ? this._mesh.getBoundingInfo() : this._boundingInfo; } /** * Sets the submesh BoundingInfo * @param boundingInfo defines the new bounding info to use * @returns the SubMesh */ setBoundingInfo(e) { return this._boundingInfo = e, this; } /** * Returns the mesh of the current submesh * @returns the parent mesh */ getMesh() { return this._mesh; } /** * Returns the rendering mesh of the submesh * @returns the rendering mesh (could be different from parent mesh) */ getRenderingMesh() { return this._renderingMesh; } /** * Returns the replacement mesh of the submesh * @returns the replacement mesh (could be different from parent mesh) */ getReplacementMesh() { return this._mesh._internalAbstractMeshDataInfo._actAsRegularMesh ? this._mesh : null; } /** * Returns the effective mesh of the submesh * @returns the effective mesh (could be different from parent mesh) */ getEffectiveMesh() { const e = this._mesh._internalAbstractMeshDataInfo._actAsRegularMesh ? this._mesh : null; return e || this._renderingMesh; } /** * Returns the submesh material * @param getDefaultMaterial Defines whether or not to get the default material if nothing has been defined. * @returns null or the current material */ getMaterial(e = !0) { var t; const i = (t = this._renderingMesh.getMaterialForRenderPass(this._engine.currentRenderPassId)) !== null && t !== void 0 ? t : this._renderingMesh.material; if (i) { if (this._isMultiMaterial(i)) { const r = i.getSubMaterial(this.materialIndex); return this._currentMaterial !== r && (this._currentMaterial = r, this.resetDrawCache()), r; } } else return e ? this._mesh.getScene().defaultMaterial : null; return i; } _isMultiMaterial(e) { return e.getSubMaterial !== void 0; } // Methods /** * Sets a new updated BoundingInfo object to the submesh * @param data defines an optional position array to use to determine the bounding info * @returns the SubMesh */ refreshBoundingInfo(e = null) { if (this._lastColliderWorldVertices = null, this.IsGlobal || !this._renderingMesh || !this._renderingMesh.geometry) return this; if (e || (e = this._renderingMesh.getVerticesData(Y.PositionKind)), !e) return this._boundingInfo = this._mesh.getBoundingInfo(), this; const t = this._renderingMesh.getIndices(); let i; if (this.indexStart === 0 && this.indexCount === t.length) { const r = this._renderingMesh.getBoundingInfo(); i = { minimum: r.minimum.clone(), maximum: r.maximum.clone() }; } else i = Tte(e, t, this.indexStart, this.indexCount, this._renderingMesh.geometry.boundingBias); return this._boundingInfo ? this._boundingInfo.reConstruct(i.minimum, i.maximum) : this._boundingInfo = new zf(i.minimum, i.maximum), this; } /** * @internal */ _checkCollision(e) { return this.getBoundingInfo()._checkCollision(e); } /** * Updates the submesh BoundingInfo * @param world defines the world matrix to use to update the bounding info * @returns the submesh */ updateBoundingInfo(e) { let t = this.getBoundingInfo(); return t || (this.refreshBoundingInfo(), t = this.getBoundingInfo()), t && t.update(e), this; } /** * True is the submesh bounding box intersects the frustum defined by the passed array of planes. * @param frustumPlanes defines the frustum planes * @returns true if the submesh is intersecting with the frustum */ isInFrustum(e) { const t = this.getBoundingInfo(); return t ? t.isInFrustum(e, this._mesh.cullingStrategy) : !1; } /** * True is the submesh bounding box is completely inside the frustum defined by the passed array of planes * @param frustumPlanes defines the frustum planes * @returns true if the submesh is inside the frustum */ isCompletelyInFrustum(e) { const t = this.getBoundingInfo(); return t ? t.isCompletelyInFrustum(e) : !1; } /** * Renders the submesh * @param enableAlphaMode defines if alpha needs to be used * @returns the submesh */ render(e) { return this._renderingMesh.render(this, e, this._mesh._internalAbstractMeshDataInfo._actAsRegularMesh ? this._mesh : void 0), this; } /** * @internal */ _getLinesIndexBuffer(e, t) { if (!this._linesIndexBuffer) { const i = []; for (let r = this.indexStart; r < this.indexStart + this.indexCount; r += 3) i.push(e[r], e[r + 1], e[r + 1], e[r + 2], e[r + 2], e[r]); this._linesIndexBuffer = t.createIndexBuffer(i), this._linesIndexCount = i.length; } return this._linesIndexBuffer; } /** * Checks if the submesh intersects with a ray * @param ray defines the ray to test * @returns true is the passed ray intersects the submesh bounding box */ canIntersects(e) { const t = this.getBoundingInfo(); return t ? e.intersectsBox(t.boundingBox) : !1; } /** * Intersects current submesh with a ray * @param ray defines the ray to test * @param positions defines mesh's positions array * @param indices defines mesh's indices array * @param fastCheck defines if the first intersection will be used (and not the closest) * @param trianglePredicate defines an optional predicate used to select faces when a mesh intersection is detected * @returns intersection info or null if no intersection */ intersects(e, t, i, r, s) { const n = this.getMaterial(); if (!n) return null; let a = 3, l = !1; switch (n.fillMode) { case 3: case 5: case 6: case 8: return null; case 7: a = 1, l = !0; break; } return n.fillMode === 4 ? i.length ? this._intersectLines(e, t, i, this._mesh.intersectionThreshold, r) : this._intersectUnIndexedLines(e, t, i, this._mesh.intersectionThreshold, r) : !i.length && this._mesh._unIndexed ? this._intersectUnIndexedTriangles(e, t, i, r, s) : this._intersectTriangles(e, t, i, a, l, r, s); } /** * @internal */ _intersectLines(e, t, i, r, s) { let n = null; for (let a = this.indexStart; a < this.indexStart + this.indexCount; a += 2) { const l = t[i[a]], o = t[i[a + 1]], u = e.intersectionSegment(l, o, r); if (!(u < 0) && (s || !n || u < n.distance) && (n = new sB(null, null, u), n.faceId = a / 2, s)) break; } return n; } /** * @internal */ _intersectUnIndexedLines(e, t, i, r, s) { let n = null; for (let a = this.verticesStart; a < this.verticesStart + this.verticesCount; a += 2) { const l = t[a], o = t[a + 1], u = e.intersectionSegment(l, o, r); if (!(u < 0) && (s || !n || u < n.distance) && (n = new sB(null, null, u), n.faceId = a / 2, s)) break; } return n; } /** * @internal */ _intersectTriangles(e, t, i, r, s, n, a) { let l = null, o = -1; for (let u = this.indexStart; u < this.indexStart + this.indexCount - (3 - r); u += r) { o++; const h = i[u], d = i[u + 1], f = i[u + 2]; if (s && f === 4294967295) { u += 2; continue; } const p = t[h], m = t[d], _ = t[f]; if (!p || !m || !_ || a && !a(p, m, _, e, h, d, f)) continue; const v = e.intersectsTriangle(p, m, _); if (v) { if (v.distance < 0) continue; if ((n || !l || v.distance < l.distance) && (l = v, l.faceId = o, n)) break; } } return l; } /** * @internal */ _intersectUnIndexedTriangles(e, t, i, r, s) { let n = null; for (let a = this.verticesStart; a < this.verticesStart + this.verticesCount; a += 3) { const l = t[a], o = t[a + 1], u = t[a + 2]; if (s && !s(l, o, u, e, -1, -1, -1)) continue; const h = e.intersectsTriangle(l, o, u); if (h) { if (h.distance < 0) continue; if ((r || !n || h.distance < n.distance) && (n = h, n.faceId = a / 3, r)) break; } } return n; } /** @internal */ _rebuild() { this._linesIndexBuffer && (this._linesIndexBuffer = null); } // Clone /** * Creates a new submesh from the passed mesh * @param newMesh defines the new hosting mesh * @param newRenderingMesh defines an optional rendering mesh * @returns the new submesh */ clone(e, t) { const i = new ed(this.materialIndex, this.verticesStart, this.verticesCount, this.indexStart, this.indexCount, e, t, !1); if (!this.IsGlobal) { const r = this.getBoundingInfo(); if (!r) return i; i._boundingInfo = new zf(r.minimum, r.maximum); } return i; } // Dispose /** * Release associated resources */ dispose() { this._linesIndexBuffer && (this._mesh.getScene().getEngine()._releaseBuffer(this._linesIndexBuffer), this._linesIndexBuffer = null); const e = this._mesh.subMeshes.indexOf(this); this._mesh.subMeshes.splice(e, 1), this.resetDrawCache(); } /** * Gets the class name * @returns the string "SubMesh". */ getClassName() { return "SubMesh"; } // Statics /** * Creates a new submesh from indices data * @param materialIndex the index of the main mesh material * @param startIndex the index where to start the copy in the mesh indices array * @param indexCount the number of indices to copy then from the startIndex * @param mesh the main mesh to create the submesh from * @param renderingMesh the optional rendering mesh * @param createBoundingBox defines if bounding box should be created for this submesh * @returns a new submesh */ static CreateFromIndices(e, t, i, r, s, n = !0) { let a = Number.MAX_VALUE, l = -Number.MAX_VALUE; const u = (s || r).getIndices(); for (let h = t; h < t + i; h++) { const d = u[h]; d < a && (a = d), d > l && (l = d); } return new ed(e, a, l - a + 1, t, i, r, s, n); } } class H9 { } class Ot { /** * Creates a new VertexData */ constructor() { this.uniqueId = 0, this.metadata = {}, this._applyTo = Ete(this._applyToCoroutine.bind(this)), this.uniqueId = Ot._UniqueIDGenerator, Ot._UniqueIDGenerator++; } /** * Uses the passed data array to set the set the values for the specified kind of data * @param data a linear array of floating numbers * @param kind the type of data that is being set, eg positions, colors etc */ set(e, t) { switch (e.length || Ce.Warn(`Setting vertex data kind '${t}' with an empty array`), t) { case Y.PositionKind: this.positions = e; break; case Y.NormalKind: this.normals = e; break; case Y.TangentKind: this.tangents = e; break; case Y.UVKind: this.uvs = e; break; case Y.UV2Kind: this.uvs2 = e; break; case Y.UV3Kind: this.uvs3 = e; break; case Y.UV4Kind: this.uvs4 = e; break; case Y.UV5Kind: this.uvs5 = e; break; case Y.UV6Kind: this.uvs6 = e; break; case Y.ColorKind: this.colors = e; break; case Y.MatricesIndicesKind: this.matricesIndices = e; break; case Y.MatricesWeightsKind: this.matricesWeights = e; break; case Y.MatricesIndicesExtraKind: this.matricesIndicesExtra = e; break; case Y.MatricesWeightsExtraKind: this.matricesWeightsExtra = e; break; } } /** * Associates the vertexData to the passed Mesh. * Sets it as updatable or not (default `false`) * @param mesh the mesh the vertexData is applied to * @param updatable when used and having the value true allows new data to update the vertexData * @returns the VertexData */ applyToMesh(e, t) { return this._applyTo(e, t, !1), this; } /** * Associates the vertexData to the passed Geometry. * Sets it as updatable or not (default `false`) * @param geometry the geometry the vertexData is applied to * @param updatable when used and having the value true allows new data to update the vertexData * @returns VertexData */ applyToGeometry(e, t) { return this._applyTo(e, t, !1), this; } /** * Updates the associated mesh * @param mesh the mesh to be updated * @returns VertexData */ updateMesh(e) { return this._update(e), this; } /** * Updates the associated geometry * @param geometry the geometry to be updated * @returns VertexData. */ updateGeometry(e) { return this._update(e), this; } /** * @internal */ *_applyToCoroutine(e, t = !1, i) { if (this.positions && (e.setVerticesData(Y.PositionKind, this.positions, t), i && (yield)), this.normals && (e.setVerticesData(Y.NormalKind, this.normals, t), i && (yield)), this.tangents && (e.setVerticesData(Y.TangentKind, this.tangents, t), i && (yield)), this.uvs && (e.setVerticesData(Y.UVKind, this.uvs, t), i && (yield)), this.uvs2 && (e.setVerticesData(Y.UV2Kind, this.uvs2, t), i && (yield)), this.uvs3 && (e.setVerticesData(Y.UV3Kind, this.uvs3, t), i && (yield)), this.uvs4 && (e.setVerticesData(Y.UV4Kind, this.uvs4, t), i && (yield)), this.uvs5 && (e.setVerticesData(Y.UV5Kind, this.uvs5, t), i && (yield)), this.uvs6 && (e.setVerticesData(Y.UV6Kind, this.uvs6, t), i && (yield)), this.colors && (e.setVerticesData(Y.ColorKind, this.colors, t), i && (yield)), this.matricesIndices && (e.setVerticesData(Y.MatricesIndicesKind, this.matricesIndices, t), i && (yield)), this.matricesWeights && (e.setVerticesData(Y.MatricesWeightsKind, this.matricesWeights, t), i && (yield)), this.matricesIndicesExtra && (e.setVerticesData(Y.MatricesIndicesExtraKind, this.matricesIndicesExtra, t), i && (yield)), this.matricesWeightsExtra && (e.setVerticesData(Y.MatricesWeightsExtraKind, this.matricesWeightsExtra, t), i && (yield)), this.indices ? (e.setIndices(this.indices, null, t), i && (yield)) : e.setIndices([], null), e.subMeshes && this.materialInfos && this.materialInfos.length > 1) { const r = e; r.subMeshes = []; for (const s of this.materialInfos) new ed(s.materialIndex, s.verticesStart, s.verticesCount, s.indexStart, s.indexCount, r); } return this; } _update(e, t, i) { return this.positions && e.updateVerticesData(Y.PositionKind, this.positions, t, i), this.normals && e.updateVerticesData(Y.NormalKind, this.normals, t, i), this.tangents && e.updateVerticesData(Y.TangentKind, this.tangents, t, i), this.uvs && e.updateVerticesData(Y.UVKind, this.uvs, t, i), this.uvs2 && e.updateVerticesData(Y.UV2Kind, this.uvs2, t, i), this.uvs3 && e.updateVerticesData(Y.UV3Kind, this.uvs3, t, i), this.uvs4 && e.updateVerticesData(Y.UV4Kind, this.uvs4, t, i), this.uvs5 && e.updateVerticesData(Y.UV5Kind, this.uvs5, t, i), this.uvs6 && e.updateVerticesData(Y.UV6Kind, this.uvs6, t, i), this.colors && e.updateVerticesData(Y.ColorKind, this.colors, t, i), this.matricesIndices && e.updateVerticesData(Y.MatricesIndicesKind, this.matricesIndices, t, i), this.matricesWeights && e.updateVerticesData(Y.MatricesWeightsKind, this.matricesWeights, t, i), this.matricesIndicesExtra && e.updateVerticesData(Y.MatricesIndicesExtraKind, this.matricesIndicesExtra, t, i), this.matricesWeightsExtra && e.updateVerticesData(Y.MatricesWeightsExtraKind, this.matricesWeightsExtra, t, i), this.indices && e.setIndices(this.indices, null), this; } static _TransformVector3Coordinates(e, t, i = 0, r = e.length) { const s = de.Vector3[0], n = de.Vector3[1]; for (let a = i; a < i + r; a += 3) D.FromArrayToRef(e, a, s), D.TransformCoordinatesToRef(s, t, n), e[a] = n.x, e[a + 1] = n.y, e[a + 2] = n.z; } static _TransformVector3Normals(e, t, i = 0, r = e.length) { const s = de.Vector3[0], n = de.Vector3[1]; for (let a = i; a < i + r; a += 3) D.FromArrayToRef(e, a, s), D.TransformNormalToRef(s, t, n), e[a] = n.x, e[a + 1] = n.y, e[a + 2] = n.z; } static _TransformVector4Normals(e, t, i = 0, r = e.length) { const s = de.Vector4[0], n = de.Vector4[1]; for (let a = i; a < i + r; a += 4) Di.FromArrayToRef(e, a, s), Di.TransformNormalToRef(s, t, n), e[a] = n.x, e[a + 1] = n.y, e[a + 2] = n.z, e[a + 3] = n.w; } static _FlipFaces(e, t = 0, i = e.length) { for (let r = t; r < t + i; r += 3) { const s = e[r + 1]; e[r + 1] = e[r + 2], e[r + 2] = s; } } /** * Transforms each position and each normal of the vertexData according to the passed Matrix * @param matrix the transforming matrix * @returns the VertexData */ transform(e) { const t = e.determinant() < 0; return this.positions && Ot._TransformVector3Coordinates(this.positions, e), this.normals && Ot._TransformVector3Normals(this.normals, e), this.tangents && Ot._TransformVector4Normals(this.tangents, e), t && this.indices && Ot._FlipFaces(this.indices), this; } /** * Generates an array of vertex data where each vertex data only has one material info * @returns An array of VertexData */ splitBasedOnMaterialID() { if (!this.materialInfos || this.materialInfos.length < 2) return [this]; const e = []; for (const t of this.materialInfos) { const i = new Ot(); if (this.positions && (i.positions = this.positions.slice(t.verticesStart * 3, (t.verticesCount + t.verticesStart) * 3)), this.normals && (i.normals = this.normals.slice(t.verticesStart * 3, (t.verticesCount + t.verticesStart) * 3)), this.tangents && (i.tangents = this.tangents.slice(t.verticesStart * 4, (t.verticesCount + t.verticesStart) * 4)), this.colors && (i.colors = this.colors.slice(t.verticesStart * 4, (t.verticesCount + t.verticesStart) * 4)), this.uvs && (i.uvs = this.uvs.slice(t.verticesStart * 2, (t.verticesCount + t.verticesStart) * 2)), this.uvs2 && (i.uvs2 = this.uvs2.slice(t.verticesStart * 2, (t.verticesCount + t.verticesStart) * 2)), this.uvs3 && (i.uvs3 = this.uvs3.slice(t.verticesStart * 2, (t.verticesCount + t.verticesStart) * 2)), this.uvs4 && (i.uvs4 = this.uvs4.slice(t.verticesStart * 2, (t.verticesCount + t.verticesStart) * 2)), this.uvs5 && (i.uvs5 = this.uvs5.slice(t.verticesStart * 2, (t.verticesCount + t.verticesStart) * 2)), this.uvs6 && (i.uvs6 = this.uvs6.slice(t.verticesStart * 2, (t.verticesCount + t.verticesStart) * 2)), this.matricesIndices && (i.matricesIndices = this.matricesIndices.slice(t.verticesStart * 4, (t.verticesCount + t.verticesStart) * 4)), this.matricesIndicesExtra && (i.matricesIndicesExtra = this.matricesIndicesExtra.slice(t.verticesStart * 4, (t.verticesCount + t.verticesStart) * 4)), this.matricesWeights && (i.matricesWeights = this.matricesWeights.slice(t.verticesStart * 4, (t.verticesCount + t.verticesStart) * 4)), this.matricesWeightsExtra && (i.matricesWeightsExtra = this.matricesWeightsExtra.slice(t.verticesStart * 4, (t.verticesCount + t.verticesStart) * 4)), this.indices) { i.indices = []; for (let s = t.indexStart; s < t.indexStart + t.indexCount; s++) i.indices.push(this.indices[s] - t.verticesStart); } const r = new H9(); r.indexStart = 0, r.indexCount = i.indices ? i.indices.length : 0, r.materialIndex = t.materialIndex, r.verticesStart = 0, r.verticesCount = (i.positions ? i.positions.length : 0) / 3, i.materialInfos = [r], e.push(i); } return e; } /** * Merges the passed VertexData into the current one * @param others the VertexData to be merged into the current one * @param use32BitsIndices defines a boolean indicating if indices must be store in a 32 bits array * @param forceCloneIndices defines a boolean indicating if indices are forced to be cloned * @param mergeMaterialIds defines a boolean indicating if we need to merge the material infos * @param enableCompletion defines a boolean indicating if the vertex data should be completed to be compatible * @returns the modified VertexData */ merge(e, t = !1, i = !1, r = !1, s = !1) { const n = Array.isArray(e) ? e.map((a) => ({ vertexData: a })) : [{ vertexData: e }]; return WB(this._mergeCoroutine(void 0, n, t, !1, i, r, s)); } /** * @internal */ *_mergeCoroutine(e, t, i = !1, r, s, n = !1, a = !1) { var l, o, u, h; this._validate(); let d = t.map((v) => v.vertexData), f = this; if (a) for (const v of d) v && (v._validate(), !this.normals && v.normals && (this.normals = new Float32Array(this.positions.length)), !this.tangents && v.tangents && (this.tangents = new Float32Array(this.positions.length / 3 * 4)), !this.uvs && v.uvs && (this.uvs = new Float32Array(this.positions.length / 3 * 2)), !this.uvs2 && v.uvs2 && (this.uvs2 = new Float32Array(this.positions.length / 3 * 2)), !this.uvs3 && v.uvs3 && (this.uvs3 = new Float32Array(this.positions.length / 3 * 2)), !this.uvs4 && v.uvs4 && (this.uvs4 = new Float32Array(this.positions.length / 3 * 2)), !this.uvs5 && v.uvs5 && (this.uvs5 = new Float32Array(this.positions.length / 3 * 2)), !this.uvs6 && v.uvs6 && (this.uvs6 = new Float32Array(this.positions.length / 3 * 2)), !this.colors && v.colors && (this.colors = new Float32Array(this.positions.length / 3 * 4), this.colors.fill(1)), !this.matricesIndices && v.matricesIndices && (this.matricesIndices = new Float32Array(this.positions.length / 3 * 4)), !this.matricesWeights && v.matricesWeights && (this.matricesWeights = new Float32Array(this.positions.length / 3 * 4)), !this.matricesIndicesExtra && v.matricesIndicesExtra && (this.matricesIndicesExtra = new Float32Array(this.positions.length / 3 * 4)), !this.matricesWeightsExtra && v.matricesWeightsExtra && (this.matricesWeightsExtra = new Float32Array(this.positions.length / 3 * 4))); for (const v of d) if (v) { if (a) this.normals && !v.normals && (v.normals = new Float32Array(v.positions.length)), this.tangents && !v.tangents && (v.tangents = new Float32Array(v.positions.length / 3 * 4)), this.uvs && !v.uvs && (v.uvs = new Float32Array(v.positions.length / 3 * 2)), this.uvs2 && !v.uvs2 && (v.uvs2 = new Float32Array(v.positions.length / 3 * 2)), this.uvs3 && !v.uvs3 && (v.uvs3 = new Float32Array(v.positions.length / 3 * 2)), this.uvs4 && !v.uvs4 && (v.uvs4 = new Float32Array(v.positions.length / 3 * 2)), this.uvs5 && !v.uvs5 && (v.uvs5 = new Float32Array(v.positions.length / 3 * 2)), this.uvs6 && !v.uvs6 && (v.uvs6 = new Float32Array(v.positions.length / 3 * 2)), this.colors && !v.colors && (v.colors = new Float32Array(v.positions.length / 3 * 4), v.colors.fill(1)), this.matricesIndices && !v.matricesIndices && (v.matricesIndices = new Float32Array(v.positions.length / 3 * 4)), this.matricesWeights && !v.matricesWeights && (v.matricesWeights = new Float32Array(v.positions.length / 3 * 4)), this.matricesIndicesExtra && !v.matricesIndicesExtra && (v.matricesIndicesExtra = new Float32Array(v.positions.length / 3 * 4)), this.matricesWeightsExtra && !v.matricesWeightsExtra && (v.matricesWeightsExtra = new Float32Array(v.positions.length / 3 * 4)); else if (v._validate(), !this.normals != !v.normals || !this.tangents != !v.tangents || !this.uvs != !v.uvs || !this.uvs2 != !v.uvs2 || !this.uvs3 != !v.uvs3 || !this.uvs4 != !v.uvs4 || !this.uvs5 != !v.uvs5 || !this.uvs6 != !v.uvs6 || !this.colors != !v.colors || !this.matricesIndices != !v.matricesIndices || !this.matricesWeights != !v.matricesWeights || !this.matricesIndicesExtra != !v.matricesIndicesExtra || !this.matricesWeightsExtra != !v.matricesWeightsExtra) throw new Error("Cannot merge vertex data that do not have the same set of attributes"); } if (n) { let v = 0, C = 0, x = 0; const b = []; let S = null; const M = []; for (const w of this.splitBasedOnMaterialID()) M.push({ vertexData: w, transform: e }); for (const w of t) if (w.vertexData) for (const V of w.vertexData.splitBasedOnMaterialID()) M.push({ vertexData: V, transform: w.transform }); M.sort((w, V) => { const k = w.vertexData.materialInfos ? w.vertexData.materialInfos[0].materialIndex : 0, L = V.vertexData.materialInfos ? V.vertexData.materialInfos[0].materialIndex : 0; return k > L ? 1 : k === L ? 0 : -1; }); for (const w of M) { const V = w.vertexData; if (V.materialInfos ? v = V.materialInfos[0].materialIndex : v = 0, S && S.materialIndex === v) S.indexCount += V.indices.length, S.verticesCount += V.positions.length / 3; else { const k = new H9(); k.materialIndex = v, k.indexStart = C, k.indexCount = V.indices.length, k.verticesStart = x, k.verticesCount = V.positions.length / 3, b.push(k), S = k; } C += V.indices.length, x += V.positions.length / 3; } const R = M.splice(0, 1)[0]; f = R.vertexData, e = R.transform, d = M.map((w) => w.vertexData), t = M, this.materialInfos = b; } const p = d.reduce((v, C) => { var x, b; return v + ((b = (x = C.indices) === null || x === void 0 ? void 0 : x.length) !== null && b !== void 0 ? b : 0); }, (o = (l = f.indices) === null || l === void 0 ? void 0 : l.length) !== null && o !== void 0 ? o : 0); let _ = s || d.some((v) => v.indices === f.indices) ? (u = f.indices) === null || u === void 0 ? void 0 : u.slice() : f.indices; if (p > 0) { let v = (h = _ == null ? void 0 : _.length) !== null && h !== void 0 ? h : 0; if (_ || (_ = new Array(p)), _.length !== p) { if (Array.isArray(_)) _.length = p; else { const x = i || _ instanceof Uint32Array ? new Uint32Array(p) : new Uint16Array(p); x.set(_), _ = x; } e && e.determinant() < 0 && Ot._FlipFaces(_, 0, v); } let C = f.positions ? f.positions.length / 3 : 0; for (const { vertexData: x, transform: b } of t) if (x.indices) { for (let S = 0; S < x.indices.length; S++) _[v + S] = x.indices[S] + C; b && b.determinant() < 0 && Ot._FlipFaces(_, v, x.indices.length), C += x.positions.length / 3, v += x.indices.length, r && (yield); } } return this.indices = _, this.positions = Ot._MergeElement(Y.PositionKind, f.positions, e, t.map((v) => [v.vertexData.positions, v.transform])), r && (yield), f.normals && (this.normals = Ot._MergeElement(Y.NormalKind, f.normals, e, t.map((v) => [v.vertexData.normals, v.transform])), r && (yield)), f.tangents && (this.tangents = Ot._MergeElement(Y.TangentKind, f.tangents, e, t.map((v) => [v.vertexData.tangents, v.transform])), r && (yield)), f.uvs && (this.uvs = Ot._MergeElement(Y.UVKind, f.uvs, e, t.map((v) => [v.vertexData.uvs, v.transform])), r && (yield)), f.uvs2 && (this.uvs2 = Ot._MergeElement(Y.UV2Kind, f.uvs2, e, t.map((v) => [v.vertexData.uvs2, v.transform])), r && (yield)), f.uvs3 && (this.uvs3 = Ot._MergeElement(Y.UV3Kind, f.uvs3, e, t.map((v) => [v.vertexData.uvs3, v.transform])), r && (yield)), f.uvs4 && (this.uvs4 = Ot._MergeElement(Y.UV4Kind, f.uvs4, e, t.map((v) => [v.vertexData.uvs4, v.transform])), r && (yield)), f.uvs5 && (this.uvs5 = Ot._MergeElement(Y.UV5Kind, f.uvs5, e, t.map((v) => [v.vertexData.uvs5, v.transform])), r && (yield)), f.uvs6 && (this.uvs6 = Ot._MergeElement(Y.UV6Kind, f.uvs6, e, t.map((v) => [v.vertexData.uvs6, v.transform])), r && (yield)), f.colors && (this.colors = Ot._MergeElement(Y.ColorKind, f.colors, e, t.map((v) => [v.vertexData.colors, v.transform])), r && (yield)), f.matricesIndices && (this.matricesIndices = Ot._MergeElement(Y.MatricesIndicesKind, f.matricesIndices, e, t.map((v) => [v.vertexData.matricesIndices, v.transform])), r && (yield)), f.matricesWeights && (this.matricesWeights = Ot._MergeElement(Y.MatricesWeightsKind, f.matricesWeights, e, t.map((v) => [v.vertexData.matricesWeights, v.transform])), r && (yield)), f.matricesIndicesExtra && (this.matricesIndicesExtra = Ot._MergeElement(Y.MatricesIndicesExtraKind, f.matricesIndicesExtra, e, t.map((v) => [v.vertexData.matricesIndicesExtra, v.transform])), r && (yield)), f.matricesWeightsExtra && (this.matricesWeightsExtra = Ot._MergeElement(Y.MatricesWeightsExtraKind, f.matricesWeightsExtra, e, t.map((v) => [v.vertexData.matricesWeightsExtra, v.transform]))), this; } static _MergeElement(e, t, i, r) { const s = r.filter((l) => l[0] !== null && l[0] !== void 0); if (!t && s.length == 0) return t; if (!t) return this._MergeElement(e, s[0][0], s[0][1], s.slice(1)); const n = s.reduce((l, o) => l + o[0].length, t.length), a = e === Y.PositionKind ? Ot._TransformVector3Coordinates : e === Y.NormalKind ? Ot._TransformVector3Normals : e === Y.TangentKind ? Ot._TransformVector4Normals : () => { }; if (t instanceof Float32Array) { const l = new Float32Array(n); l.set(t), i && a(l, i, 0, t.length); let o = t.length; for (const [u, h] of s) l.set(u, o), h && a(l, h, o, u.length), o += u.length; return l; } else { const l = new Array(n); for (let u = 0; u < t.length; u++) l[u] = t[u]; i && a(l, i, 0, t.length); let o = t.length; for (const [u, h] of s) { for (let d = 0; d < u.length; d++) l[o + d] = u[d]; h && a(l, h, o, u.length), o += u.length; } return l; } } _validate() { if (!this.positions) throw new F4("Positions are required", $C.MeshInvalidPositionsError); const e = (r, s) => { const n = Y.DeduceStride(r); if (s.length % n !== 0) throw new Error("The " + r + "s array count must be a multiple of " + n); return s.length / n; }, t = e(Y.PositionKind, this.positions), i = (r, s) => { const n = e(r, s); if (n !== t) throw new Error("The " + r + "s element count (" + n + ") does not match the positions count (" + t + ")"); }; this.normals && i(Y.NormalKind, this.normals), this.tangents && i(Y.TangentKind, this.tangents), this.uvs && i(Y.UVKind, this.uvs), this.uvs2 && i(Y.UV2Kind, this.uvs2), this.uvs3 && i(Y.UV3Kind, this.uvs3), this.uvs4 && i(Y.UV4Kind, this.uvs4), this.uvs5 && i(Y.UV5Kind, this.uvs5), this.uvs6 && i(Y.UV6Kind, this.uvs6), this.colors && i(Y.ColorKind, this.colors), this.matricesIndices && i(Y.MatricesIndicesKind, this.matricesIndices), this.matricesWeights && i(Y.MatricesWeightsKind, this.matricesWeights), this.matricesIndicesExtra && i(Y.MatricesIndicesExtraKind, this.matricesIndicesExtra), this.matricesWeightsExtra && i(Y.MatricesWeightsExtraKind, this.matricesWeightsExtra); } /** * Clone the current vertex data * @returns a copy of the current data */ clone() { const e = this.serialize(); return Ot.Parse(e); } /** * Serializes the VertexData * @returns a serialized object */ serialize() { const e = {}; if (this.positions && (e.positions = Array.from(this.positions)), this.normals && (e.normals = Array.from(this.normals)), this.tangents && (e.tangents = Array.from(this.tangents)), this.uvs && (e.uvs = Array.from(this.uvs)), this.uvs2 && (e.uvs2 = Array.from(this.uvs2)), this.uvs3 && (e.uvs3 = Array.from(this.uvs3)), this.uvs4 && (e.uvs4 = Array.from(this.uvs4)), this.uvs5 && (e.uvs5 = Array.from(this.uvs5)), this.uvs6 && (e.uvs6 = Array.from(this.uvs6)), this.colors && (e.colors = Array.from(this.colors)), this.matricesIndices && (e.matricesIndices = Array.from(this.matricesIndices), e.matricesIndices._isExpanded = !0), this.matricesWeights && (e.matricesWeights = Array.from(this.matricesWeights)), this.matricesIndicesExtra && (e.matricesIndicesExtra = Array.from(this.matricesIndicesExtra), e.matricesIndicesExtra._isExpanded = !0), this.matricesWeightsExtra && (e.matricesWeightsExtra = Array.from(this.matricesWeightsExtra)), e.indices = Array.from(this.indices), this.materialInfos) { e.materialInfos = []; for (const t of this.materialInfos) { const i = { indexStart: t.indexStart, indexCount: t.indexCount, materialIndex: t.materialIndex, verticesStart: t.verticesStart, verticesCount: t.verticesCount }; e.materialInfos.push(i); } } return e; } // Statics /** * Extracts the vertexData from a mesh * @param mesh the mesh from which to extract the VertexData * @param copyWhenShared defines if the VertexData must be cloned when shared between multiple meshes, optional, default false * @param forceCopy indicating that the VertexData must be cloned, optional, default false * @returns the object VertexData associated to the passed mesh */ static ExtractFromMesh(e, t, i) { return Ot._ExtractFrom(e, t, i); } /** * Extracts the vertexData from the geometry * @param geometry the geometry from which to extract the VertexData * @param copyWhenShared defines if the VertexData must be cloned when the geometry is shared between multiple meshes, optional, default false * @param forceCopy indicating that the VertexData must be cloned, optional, default false * @returns the object VertexData associated to the passed mesh */ static ExtractFromGeometry(e, t, i) { return Ot._ExtractFrom(e, t, i); } static _ExtractFrom(e, t, i) { const r = new Ot(); return e.isVerticesDataPresent(Y.PositionKind) && (r.positions = e.getVerticesData(Y.PositionKind, t, i)), e.isVerticesDataPresent(Y.NormalKind) && (r.normals = e.getVerticesData(Y.NormalKind, t, i)), e.isVerticesDataPresent(Y.TangentKind) && (r.tangents = e.getVerticesData(Y.TangentKind, t, i)), e.isVerticesDataPresent(Y.UVKind) && (r.uvs = e.getVerticesData(Y.UVKind, t, i)), e.isVerticesDataPresent(Y.UV2Kind) && (r.uvs2 = e.getVerticesData(Y.UV2Kind, t, i)), e.isVerticesDataPresent(Y.UV3Kind) && (r.uvs3 = e.getVerticesData(Y.UV3Kind, t, i)), e.isVerticesDataPresent(Y.UV4Kind) && (r.uvs4 = e.getVerticesData(Y.UV4Kind, t, i)), e.isVerticesDataPresent(Y.UV5Kind) && (r.uvs5 = e.getVerticesData(Y.UV5Kind, t, i)), e.isVerticesDataPresent(Y.UV6Kind) && (r.uvs6 = e.getVerticesData(Y.UV6Kind, t, i)), e.isVerticesDataPresent(Y.ColorKind) && (r.colors = e.getVerticesData(Y.ColorKind, t, i)), e.isVerticesDataPresent(Y.MatricesIndicesKind) && (r.matricesIndices = e.getVerticesData(Y.MatricesIndicesKind, t, i)), e.isVerticesDataPresent(Y.MatricesWeightsKind) && (r.matricesWeights = e.getVerticesData(Y.MatricesWeightsKind, t, i)), e.isVerticesDataPresent(Y.MatricesIndicesExtraKind) && (r.matricesIndicesExtra = e.getVerticesData(Y.MatricesIndicesExtraKind, t, i)), e.isVerticesDataPresent(Y.MatricesWeightsExtraKind) && (r.matricesWeightsExtra = e.getVerticesData(Y.MatricesWeightsExtraKind, t, i)), r.indices = e.getIndices(t, i), r; } /** * Creates the VertexData for a Ribbon * @param options an object used to set the following optional parameters for the ribbon, required but can be empty * * pathArray array of paths, each of which an array of successive Vector3 * * closeArray creates a seam between the first and the last paths of the pathArray, optional, default false * * closePath creates a seam between the first and the last points of each path of the path array, optional, default false * * offset a positive integer, only used when pathArray contains a single path (offset = 10 means the point 1 is joined to the point 11), default rounded half size of the pathArray length * * sideOrientation optional and takes the values : Mesh.FRONTSIDE (default), Mesh.BACKSIDE or Mesh.DOUBLESIDE * * frontUvs only usable when you create a double-sided mesh, used to choose what parts of the texture image to crop and apply on the front side, optional, default vector4 (0, 0, 1, 1) * * backUVs only usable when you create a double-sided mesh, used to choose what parts of the texture image to crop and apply on the back side, optional, default vector4 (0, 0, 1, 1) * * invertUV swaps in the U and V coordinates when applying a texture, optional, default false * * uvs a linear array, of length 2 * number of vertices, of custom UV values, optional * * colors a linear array, of length 4 * number of vertices, of custom color values, optional * @param options.pathArray * @param options.closeArray * @param options.closePath * @param options.offset * @param options.sideOrientation * @param options.frontUVs * @param options.backUVs * @param options.invertUV * @param options.uvs * @param options.colors * @returns the VertexData of the ribbon * @deprecated use CreateRibbonVertexData instead */ static CreateRibbon(e) { throw yr("ribbonBuilder"); } /** * Creates the VertexData for a box * @param options an object used to set the following optional parameters for the box, required but can be empty * * size sets the width, height and depth of the box to the value of size, optional default 1 * * width sets the width (x direction) of the box, overwrites the width set by size, optional, default size * * height sets the height (y direction) of the box, overwrites the height set by size, optional, default size * * depth sets the depth (z direction) of the box, overwrites the depth set by size, optional, default size * * faceUV an array of 6 Vector4 elements used to set different images to each box side * * faceColors an array of 6 Color3 elements used to set different colors to each box side * * sideOrientation optional and takes the values : Mesh.FRONTSIDE (default), Mesh.BACKSIDE or Mesh.DOUBLESIDE * * frontUvs only usable when you create a double-sided mesh, used to choose what parts of the texture image to crop and apply on the front side, optional, default vector4 (0, 0, 1, 1) * * backUVs only usable when you create a double-sided mesh, used to choose what parts of the texture image to crop and apply on the back side, optional, default vector4 (0, 0, 1, 1) * @param options.size * @param options.width * @param options.height * @param options.depth * @param options.faceUV * @param options.faceColors * @param options.sideOrientation * @param options.frontUVs * @param options.backUVs * @returns the VertexData of the box * @deprecated Please use CreateBoxVertexData from the BoxBuilder file instead */ static CreateBox(e) { throw yr("boxBuilder"); } /** * Creates the VertexData for a tiled box * @param options an object used to set the following optional parameters for the box, required but can be empty * * faceTiles sets the pattern, tile size and number of tiles for a face * * faceUV an array of 6 Vector4 elements used to set different images to each box side * * faceColors an array of 6 Color3 elements used to set different colors to each box side * * sideOrientation optional and takes the values : Mesh.FRONTSIDE (default), Mesh.BACKSIDE or Mesh.DOUBLESIDE * @param options.pattern * @param options.width * @param options.height * @param options.depth * @param options.tileSize * @param options.tileWidth * @param options.tileHeight * @param options.alignHorizontal * @param options.alignVertical * @param options.faceUV * @param options.faceColors * @param options.sideOrientation * @returns the VertexData of the box * @deprecated Please use CreateTiledBoxVertexData instead */ static CreateTiledBox(e) { throw yr("tiledBoxBuilder"); } /** * Creates the VertexData for a tiled plane * @param options an object used to set the following optional parameters for the box, required but can be empty * * pattern a limited pattern arrangement depending on the number * * tileSize sets the width, height and depth of the tile to the value of size, optional default 1 * * tileWidth sets the width (x direction) of the tile, overwrites the width set by size, optional, default size * * tileHeight sets the height (y direction) of the tile, overwrites the height set by size, optional, default size * * sideOrientation optional and takes the values : Mesh.FRONTSIDE (default), Mesh.BACKSIDE or Mesh.DOUBLESIDE * * frontUvs only usable when you create a double-sided mesh, used to choose what parts of the texture image to crop and apply on the front side, optional, default vector4 (0, 0, 1, 1) * * backUVs only usable when you create a double-sided mesh, used to choose what parts of the texture image to crop and apply on the back side, optional, default vector4 (0, 0, 1, 1) * @param options.pattern * @param options.tileSize * @param options.tileWidth * @param options.tileHeight * @param options.size * @param options.width * @param options.height * @param options.alignHorizontal * @param options.alignVertical * @param options.sideOrientation * @param options.frontUVs * @param options.backUVs * @returns the VertexData of the tiled plane * @deprecated use CreateTiledPlaneVertexData instead */ static CreateTiledPlane(e) { throw yr("tiledPlaneBuilder"); } /** * Creates the VertexData for an ellipsoid, defaults to a sphere * @param options an object used to set the following optional parameters for the box, required but can be empty * * segments sets the number of horizontal strips optional, default 32 * * diameter sets the axes dimensions, diameterX, diameterY and diameterZ to the value of diameter, optional default 1 * * diameterX sets the diameterX (x direction) of the ellipsoid, overwrites the diameterX set by diameter, optional, default diameter * * diameterY sets the diameterY (y direction) of the ellipsoid, overwrites the diameterY set by diameter, optional, default diameter * * diameterZ sets the diameterZ (z direction) of the ellipsoid, overwrites the diameterZ set by diameter, optional, default diameter * * arc a number from 0 to 1, to create an unclosed ellipsoid based on the fraction of the circumference (latitude) given by the arc value, optional, default 1 * * slice a number from 0 to 1, to create an unclosed ellipsoid based on the fraction of the height (latitude) given by the arc value, optional, default 1 * * sideOrientation optional and takes the values : Mesh.FRONTSIDE (default), Mesh.BACKSIDE or Mesh.DOUBLESIDE * * frontUvs only usable when you create a double-sided mesh, used to choose what parts of the texture image to crop and apply on the front side, optional, default vector4 (0, 0, 1, 1) * * backUVs only usable when you create a double-sided mesh, used to choose what parts of the texture image to crop and apply on the back side, optional, default vector4 (0, 0, 1, 1) * @param options.segments * @param options.diameter * @param options.diameterX * @param options.diameterY * @param options.diameterZ * @param options.arc * @param options.slice * @param options.sideOrientation * @param options.frontUVs * @param options.backUVs * @returns the VertexData of the ellipsoid * @deprecated use CreateSphereVertexData instead */ static CreateSphere(e) { throw yr("sphereBuilder"); } /** * Creates the VertexData for a cylinder, cone or prism * @param options an object used to set the following optional parameters for the box, required but can be empty * * height sets the height (y direction) of the cylinder, optional, default 2 * * diameterTop sets the diameter of the top of the cone, overwrites diameter, optional, default diameter * * diameterBottom sets the diameter of the bottom of the cone, overwrites diameter, optional, default diameter * * diameter sets the diameter of the top and bottom of the cone, optional default 1 * * tessellation the number of prism sides, 3 for a triangular prism, optional, default 24 * * subdivisions` the number of rings along the cylinder height, optional, default 1 * * arc a number from 0 to 1, to create an unclosed cylinder based on the fraction of the circumference given by the arc value, optional, default 1 * * faceColors an array of Color3 elements used to set different colors to the top, rings and bottom respectively * * faceUV an array of Vector4 elements used to set different images to the top, rings and bottom respectively * * hasRings when true makes each subdivision independently treated as a face for faceUV and faceColors, optional, default false * * enclose when true closes an open cylinder by adding extra flat faces between the height axis and vertical edges, think cut cake * * sideOrientation optional and takes the values : Mesh.FRONTSIDE (default), Mesh.BACKSIDE or Mesh.DOUBLESIDE * * frontUvs only usable when you create a double-sided mesh, used to choose what parts of the texture image to crop and apply on the front side, optional, default vector4 (0, 0, 1, 1) * * backUVs only usable when you create a double-sided mesh, used to choose what parts of the texture image to crop and apply on the back side, optional, default vector4 (0, 0, 1, 1) * @param options.height * @param options.diameterTop * @param options.diameterBottom * @param options.diameter * @param options.tessellation * @param options.subdivisions * @param options.arc * @param options.faceColors * @param options.faceUV * @param options.hasRings * @param options.enclose * @param options.sideOrientation * @param options.frontUVs * @param options.backUVs * @returns the VertexData of the cylinder, cone or prism * @deprecated please use CreateCylinderVertexData instead */ static CreateCylinder(e) { throw yr("cylinderBuilder"); } /** * Creates the VertexData for a torus * @param options an object used to set the following optional parameters for the box, required but can be empty * * diameter the diameter of the torus, optional default 1 * * thickness the diameter of the tube forming the torus, optional default 0.5 * * tessellation the number of prism sides, 3 for a triangular prism, optional, default 24 * * sideOrientation optional and takes the values : Mesh.FRONTSIDE (default), Mesh.BACKSIDE or Mesh.DOUBLESIDE * * frontUvs only usable when you create a double-sided mesh, used to choose what parts of the texture image to crop and apply on the front side, optional, default vector4 (0, 0, 1, 1) * * backUVs only usable when you create a double-sided mesh, used to choose what parts of the texture image to crop and apply on the back side, optional, default vector4 (0, 0, 1, 1) * @param options.diameter * @param options.thickness * @param options.tessellation * @param options.sideOrientation * @param options.frontUVs * @param options.backUVs * @returns the VertexData of the torus * @deprecated use CreateTorusVertexData instead */ static CreateTorus(e) { throw yr("torusBuilder"); } /** * Creates the VertexData of the LineSystem * @param options an object used to set the following optional parameters for the LineSystem, required but can be empty * - lines an array of lines, each line being an array of successive Vector3 * - colors an array of line colors, each of the line colors being an array of successive Color4, one per line point * @param options.lines * @param options.colors * @returns the VertexData of the LineSystem * @deprecated use CreateLineSystemVertexData instead */ static CreateLineSystem(e) { throw yr("linesBuilder"); } /** * Create the VertexData for a DashedLines * @param options an object used to set the following optional parameters for the DashedLines, required but can be empty * - points an array successive Vector3 * - dashSize the size of the dashes relative to the dash number, optional, default 3 * - gapSize the size of the gap between two successive dashes relative to the dash number, optional, default 1 * - dashNb the intended total number of dashes, optional, default 200 * @param options.points * @param options.dashSize * @param options.gapSize * @param options.dashNb * @returns the VertexData for the DashedLines * @deprecated use CreateDashedLinesVertexData instead */ static CreateDashedLines(e) { throw yr("linesBuilder"); } /** * Creates the VertexData for a Ground * @param options an object used to set the following optional parameters for the Ground, required but can be empty * - width the width (x direction) of the ground, optional, default 1 * - height the height (z direction) of the ground, optional, default 1 * - subdivisions the number of subdivisions per side, optional, default 1 * @param options.width * @param options.height * @param options.subdivisions * @param options.subdivisionsX * @param options.subdivisionsY * @returns the VertexData of the Ground * @deprecated Please use CreateGroundVertexData instead */ static CreateGround(e) { throw yr("groundBuilder"); } /** * Creates the VertexData for a TiledGround by subdividing the ground into tiles * @param options an object used to set the following optional parameters for the Ground, required but can be empty * * xmin the ground minimum X coordinate, optional, default -1 * * zmin the ground minimum Z coordinate, optional, default -1 * * xmax the ground maximum X coordinate, optional, default 1 * * zmax the ground maximum Z coordinate, optional, default 1 * * subdivisions a javascript object {w: positive integer, h: positive integer}, `w` and `h` are the numbers of subdivisions on the ground width and height creating 'tiles', default {w: 6, h: 6} * * precision a javascript object {w: positive integer, h: positive integer}, `w` and `h` are the numbers of subdivisions on the tile width and height, default {w: 2, h: 2} * @param options.xmin * @param options.zmin * @param options.xmax * @param options.zmax * @param options.subdivisions * @param options.subdivisions.w * @param options.subdivisions.h * @param options.precision * @param options.precision.w * @param options.precision.h * @returns the VertexData of the TiledGround * @deprecated use CreateTiledGroundVertexData instead */ static CreateTiledGround(e) { throw yr("groundBuilder"); } /** * Creates the VertexData of the Ground designed from a heightmap * @param options an object used to set the following parameters for the Ground, required and provided by CreateGroundFromHeightMap * * width the width (x direction) of the ground * * height the height (z direction) of the ground * * subdivisions the number of subdivisions per side * * minHeight the minimum altitude on the ground, optional, default 0 * * maxHeight the maximum altitude on the ground, optional default 1 * * colorFilter the filter to apply to the image pixel colors to compute the height, optional Color3, default (0.3, 0.59, 0.11) * * buffer the array holding the image color data * * bufferWidth the width of image * * bufferHeight the height of image * * alphaFilter Remove any data where the alpha channel is below this value, defaults 0 (all data visible) * @param options.width * @param options.height * @param options.subdivisions * @param options.minHeight * @param options.maxHeight * @param options.colorFilter * @param options.buffer * @param options.bufferWidth * @param options.bufferHeight * @param options.alphaFilter * @returns the VertexData of the Ground designed from a heightmap * @deprecated use CreateGroundFromHeightMapVertexData instead */ static CreateGroundFromHeightMap(e) { throw yr("groundBuilder"); } /** * Creates the VertexData for a Plane * @param options an object used to set the following optional parameters for the plane, required but can be empty * * size sets the width and height of the plane to the value of size, optional default 1 * * width sets the width (x direction) of the plane, overwrites the width set by size, optional, default size * * height sets the height (y direction) of the plane, overwrites the height set by size, optional, default size * * sideOrientation optional and takes the values : Mesh.FRONTSIDE (default), Mesh.BACKSIDE or Mesh.DOUBLESIDE * * frontUvs only usable when you create a double-sided mesh, used to choose what parts of the texture image to crop and apply on the front side, optional, default vector4 (0, 0, 1, 1) * * backUVs only usable when you create a double-sided mesh, used to choose what parts of the texture image to crop and apply on the back side, optional, default vector4 (0, 0, 1, 1) * @param options.size * @param options.width * @param options.height * @param options.sideOrientation * @param options.frontUVs * @param options.backUVs * @returns the VertexData of the box * @deprecated use CreatePlaneVertexData instead */ static CreatePlane(e) { throw yr("planeBuilder"); } /** * Creates the VertexData of the Disc or regular Polygon * @param options an object used to set the following optional parameters for the disc, required but can be empty * * radius the radius of the disc, optional default 0.5 * * tessellation the number of polygon sides, optional, default 64 * * arc a number from 0 to 1, to create an unclosed polygon based on the fraction of the circumference given by the arc value, optional, default 1 * * sideOrientation optional and takes the values : Mesh.FRONTSIDE (default), Mesh.BACKSIDE or Mesh.DOUBLESIDE * * frontUvs only usable when you create a double-sided mesh, used to choose what parts of the texture image to crop and apply on the front side, optional, default vector4 (0, 0, 1, 1) * * backUVs only usable when you create a double-sided mesh, used to choose what parts of the texture image to crop and apply on the back side, optional, default vector4 (0, 0, 1, 1) * @param options.radius * @param options.tessellation * @param options.arc * @param options.sideOrientation * @param options.frontUVs * @param options.backUVs * @returns the VertexData of the box * @deprecated use CreateDiscVertexData instead */ static CreateDisc(e) { throw yr("discBuilder"); } /** * Creates the VertexData for an irregular Polygon in the XoZ plane using a mesh built by polygonTriangulation.build() * All parameters are provided by CreatePolygon as needed * @param polygon a mesh built from polygonTriangulation.build() * @param sideOrientation takes the values Mesh.FRONTSIDE (default), Mesh.BACKSIDE or Mesh.DOUBLESIDE * @param fUV an array of Vector4 elements used to set different images to the top, rings and bottom respectively * @param fColors an array of Color3 elements used to set different colors to the top, rings and bottom respectively * @param frontUVs only usable when you create a double-sided mesh, used to choose what parts of the texture image to crop and apply on the front side, optional, default vector4 (0, 0, 1, 1) * @param backUVs only usable when you create a double-sided mesh, used to choose what parts of the texture image to crop and apply on the back side, optional, default vector4 (0, 0, 1, 1) * @param wrap a boolean, default false, when true and fUVs used texture is wrapped around all sides, when false texture is applied side * @returns the VertexData of the Polygon * @deprecated use CreatePolygonVertexData instead */ static CreatePolygon(e, t, i, r, s, n, a) { throw yr("polygonBuilder"); } /** * Creates the VertexData of the IcoSphere * @param options an object used to set the following optional parameters for the IcoSphere, required but can be empty * * radius the radius of the IcoSphere, optional default 1 * * radiusX allows stretching in the x direction, optional, default radius * * radiusY allows stretching in the y direction, optional, default radius * * radiusZ allows stretching in the z direction, optional, default radius * * flat when true creates a flat shaded mesh, optional, default true * * subdivisions increasing the subdivisions increases the number of faces, optional, default 4 * * sideOrientation optional and takes the values : Mesh.FRONTSIDE (default), Mesh.BACKSIDE or Mesh.DOUBLESIDE * * frontUvs only usable when you create a double-sided mesh, used to choose what parts of the texture image to crop and apply on the front side, optional, default vector4 (0, 0, 1, 1) * * backUVs only usable when you create a double-sided mesh, used to choose what parts of the texture image to crop and apply on the back side, optional, default vector4 (0, 0, 1, 1) * @param options.radius * @param options.radiusX * @param options.radiusY * @param options.radiusZ * @param options.flat * @param options.subdivisions * @param options.sideOrientation * @param options.frontUVs * @param options.backUVs * @returns the VertexData of the IcoSphere * @deprecated use CreateIcoSphereVertexData instead */ static CreateIcoSphere(e) { throw yr("icoSphereBuilder"); } // inspired from // http://stemkoski.github.io/Three.js/Polyhedra.html /** * Creates the VertexData for a Polyhedron * @param options an object used to set the following optional parameters for the polyhedron, required but can be empty * * type provided types are: * * 0 : Tetrahedron, 1 : Octahedron, 2 : Dodecahedron, 3 : Icosahedron, 4 : Rhombicuboctahedron, 5 : Triangular Prism, 6 : Pentagonal Prism, 7 : Hexagonal Prism, 8 : Square Pyramid (J1) * * 9 : Pentagonal Pyramid (J2), 10 : Triangular Dipyramid (J12), 11 : Pentagonal Dipyramid (J13), 12 : Elongated Square Dipyramid (J15), 13 : Elongated Pentagonal Dipyramid (J16), 14 : Elongated Pentagonal Cupola (J20) * * size the size of the IcoSphere, optional default 1 * * sizeX allows stretching in the x direction, optional, default size * * sizeY allows stretching in the y direction, optional, default size * * sizeZ allows stretching in the z direction, optional, default size * * custom a number that overwrites the type to create from an extended set of polyhedron from https://www.babylonjs-playground.com/#21QRSK#15 with minimised editor * * faceUV an array of Vector4 elements used to set different images to the top, rings and bottom respectively * * faceColors an array of Color3 elements used to set different colors to the top, rings and bottom respectively * * flat when true creates a flat shaded mesh, optional, default true * * subdivisions increasing the subdivisions increases the number of faces, optional, default 4 * * sideOrientation optional and takes the values : Mesh.FRONTSIDE (default), Mesh.BACKSIDE or Mesh.DOUBLESIDE * * frontUvs only usable when you create a double-sided mesh, used to choose what parts of the texture image to crop and apply on the front side, optional, default vector4 (0, 0, 1, 1) * * backUVs only usable when you create a double-sided mesh, used to choose what parts of the texture image to crop and apply on the back side, optional, default vector4 (0, 0, 1, 1) * @param options.type * @param options.size * @param options.sizeX * @param options.sizeY * @param options.sizeZ * @param options.custom * @param options.faceUV * @param options.faceColors * @param options.flat * @param options.sideOrientation * @param options.frontUVs * @param options.backUVs * @returns the VertexData of the Polyhedron * @deprecated use CreatePolyhedronVertexData instead */ static CreatePolyhedron(e) { throw yr("polyhedronBuilder"); } /** * Creates the VertexData for a Capsule, inspired from https://github.com/maximeq/three-js-capsule-geometry/blob/master/src/CapsuleBufferGeometry.js * @param options an object used to set the following optional parameters for the capsule, required but can be empty * @returns the VertexData of the Capsule * @deprecated Please use CreateCapsuleVertexData from the capsuleBuilder file instead */ static CreateCapsule(e = { orientation: D.Up(), subdivisions: 2, tessellation: 16, height: 1, radius: 0.25, capSubdivisions: 6 }) { throw yr("capsuleBuilder"); } // based on http://code.google.com/p/away3d/source/browse/trunk/fp10/Away3D/src/away3d/primitives/TorusKnot.as?spec=svn2473&r=2473 /** * Creates the VertexData for a TorusKnot * @param options an object used to set the following optional parameters for the TorusKnot, required but can be empty * * radius the radius of the torus knot, optional, default 2 * * tube the thickness of the tube, optional, default 0.5 * * radialSegments the number of sides on each tube segments, optional, default 32 * * tubularSegments the number of tubes to decompose the knot into, optional, default 32 * * p the number of windings around the z axis, optional, default 2 * * q the number of windings around the x axis, optional, default 3 * * sideOrientation optional and takes the values : Mesh.FRONTSIDE (default), Mesh.BACKSIDE or Mesh.DOUBLESIDE * * frontUvs only usable when you create a double-sided mesh, used to choose what parts of the texture image to crop and apply on the front side, optional, default vector4 (0, 0, 1, 1) * * backUVs only usable when you create a double-sided mesh, used to choose what parts of the texture image to crop and apply on the back side, optional, default vector4 (0, 0, 1, 1) * @param options.radius * @param options.tube * @param options.radialSegments * @param options.tubularSegments * @param options.p * @param options.q * @param options.sideOrientation * @param options.frontUVs * @param options.backUVs * @returns the VertexData of the Torus Knot * @deprecated use CreateTorusKnotVertexData instead */ static CreateTorusKnot(e) { throw yr("torusKnotBuilder"); } // Tools /** * Compute normals for given positions and indices * @param positions an array of vertex positions, [...., x, y, z, ......] * @param indices an array of indices in groups of three for each triangular facet, [...., i, j, k, ......] * @param normals an array of vertex normals, [...., x, y, z, ......] * @param options an object used to set the following optional parameters for the TorusKnot, optional * * facetNormals : optional array of facet normals (vector3) * * facetPositions : optional array of facet positions (vector3) * * facetPartitioning : optional partitioning array. facetPositions is required for facetPartitioning computation * * ratio : optional partitioning ratio / bounding box, required for facetPartitioning computation * * bInfo : optional bounding info, required for facetPartitioning computation * * bbSize : optional bounding box size data, required for facetPartitioning computation * * subDiv : optional partitioning data about subdivisions on each axis (int), required for facetPartitioning computation * * useRightHandedSystem: optional boolean to for right handed system computation * * depthSort : optional boolean to enable the facet depth sort computation * * distanceTo : optional Vector3 to compute the facet depth from this location * * depthSortedFacets : optional array of depthSortedFacets to store the facet distances from the reference location * @param options.facetNormals * @param options.facetPositions * @param options.facetPartitioning * @param options.ratio * @param options.bInfo * @param options.bbSize * @param options.subDiv * @param options.useRightHandedSystem * @param options.depthSort * @param options.distanceTo * @param options.depthSortedFacets */ static ComputeNormals(e, t, i, r) { let s = 0, n = 0, a = 0, l = 0, o = 0, u = 0, h = 0, d = 0, f = 0, p = 0, m = 0, _ = 0, v = 0, C = 0, x = 0, b = 0, S = 0, M = 0, R = 0, w = 0, V = !1, k = !1, L = !1, B = !1, U = 1, K = 0, ee = null; r && (V = !!r.facetNormals, k = !!r.facetPositions, L = !!r.facetPartitioning, U = r.useRightHandedSystem === !0 ? -1 : 1, K = r.ratio || 0, B = !!r.depthSort, ee = r.distanceTo, B && ee === void 0 && (ee = D.Zero())); let Z = 0, q = 0, le = 0, ie = 0; for (L && r && r.bbSize && (Z = r.subDiv.X * K / r.bbSize.x, q = r.subDiv.Y * K / r.bbSize.y, le = r.subDiv.Z * K / r.bbSize.z, ie = r.subDiv.max * r.subDiv.max, r.facetPartitioning.length = 0), s = 0; s < e.length; s++) i[s] = 0; const $ = t.length / 3 | 0; for (s = 0; s < $; s++) { if (_ = t[s * 3] * 3, v = _ + 1, C = _ + 2, x = t[s * 3 + 1] * 3, b = x + 1, S = x + 2, M = t[s * 3 + 2] * 3, R = M + 1, w = M + 2, n = e[_] - e[x], a = e[v] - e[b], l = e[C] - e[S], o = e[M] - e[x], u = e[R] - e[b], h = e[w] - e[S], d = U * (a * h - l * u), f = U * (l * o - n * h), p = U * (n * u - a * o), m = Math.sqrt(d * d + f * f + p * p), m = m === 0 ? 1 : m, d /= m, f /= m, p /= m, V && r && (r.facetNormals[s].x = d, r.facetNormals[s].y = f, r.facetNormals[s].z = p), k && r && (r.facetPositions[s].x = (e[_] + e[x] + e[M]) / 3, r.facetPositions[s].y = (e[v] + e[b] + e[R]) / 3, r.facetPositions[s].z = (e[C] + e[S] + e[w]) / 3), L && r) { const j = Math.floor((r.facetPositions[s].x - r.bInfo.minimum.x * K) * Z), J = Math.floor((r.facetPositions[s].y - r.bInfo.minimum.y * K) * q), ne = Math.floor((r.facetPositions[s].z - r.bInfo.minimum.z * K) * le), pe = Math.floor((e[_] - r.bInfo.minimum.x * K) * Z), ge = Math.floor((e[v] - r.bInfo.minimum.y * K) * q), Ie = Math.floor((e[C] - r.bInfo.minimum.z * K) * le), ye = Math.floor((e[x] - r.bInfo.minimum.x * K) * Z), Se = Math.floor((e[b] - r.bInfo.minimum.y * K) * q), re = Math.floor((e[S] - r.bInfo.minimum.z * K) * le), te = Math.floor((e[M] - r.bInfo.minimum.x * K) * Z), he = Math.floor((e[R] - r.bInfo.minimum.y * K) * q), be = Math.floor((e[w] - r.bInfo.minimum.z * K) * le), Ue = pe + r.subDiv.max * ge + ie * Ie, Ee = ye + r.subDiv.max * Se + ie * re, He = te + r.subDiv.max * he + ie * be, Xe = j + r.subDiv.max * J + ie * ne; r.facetPartitioning[Xe] = r.facetPartitioning[Xe] ? r.facetPartitioning[Xe] : new Array(), r.facetPartitioning[Ue] = r.facetPartitioning[Ue] ? r.facetPartitioning[Ue] : new Array(), r.facetPartitioning[Ee] = r.facetPartitioning[Ee] ? r.facetPartitioning[Ee] : new Array(), r.facetPartitioning[He] = r.facetPartitioning[He] ? r.facetPartitioning[He] : new Array(), r.facetPartitioning[Ue].push(s), Ee != Ue && r.facetPartitioning[Ee].push(s), He == Ee || He == Ue || r.facetPartitioning[He].push(s), Xe == Ue || Xe == Ee || Xe == He || r.facetPartitioning[Xe].push(s); } if (B && r && r.facetPositions) { const j = r.depthSortedFacets[s]; j.ind = s * 3, j.sqDistance = D.DistanceSquared(r.facetPositions[s], ee); } i[_] += d, i[v] += f, i[C] += p, i[x] += d, i[b] += f, i[S] += p, i[M] += d, i[R] += f, i[w] += p; } for (s = 0; s < i.length / 3; s++) d = i[s * 3], f = i[s * 3 + 1], p = i[s * 3 + 2], m = Math.sqrt(d * d + f * f + p * p), m = m === 0 ? 1 : m, d /= m, f /= m, p /= m, i[s * 3] = d, i[s * 3 + 1] = f, i[s * 3 + 2] = p; } /** * @internal */ static _ComputeSides(e, t, i, r, s, n, a) { const l = i.length, o = r.length; let u, h; switch (e = e || Ot.DEFAULTSIDE, e) { case Ot.FRONTSIDE: break; case Ot.BACKSIDE: for (u = 0; u < l; u += 3) { const d = i[u]; i[u] = i[u + 2], i[u + 2] = d; } for (h = 0; h < o; h++) r[h] = -r[h]; break; case Ot.DOUBLESIDE: { const d = t.length, f = d / 3; for (let _ = 0; _ < d; _++) t[d + _] = t[_]; for (u = 0; u < l; u += 3) i[u + l] = i[u + 2] + f, i[u + 1 + l] = i[u + 1] + f, i[u + 2 + l] = i[u] + f; for (h = 0; h < o; h++) r[o + h] = -r[h]; const p = s.length; let m = 0; for (m = 0; m < p; m++) s[m + p] = s[m]; for (n = n || new Di(0, 0, 1, 1), a = a || new Di(0, 0, 1, 1), m = 0, u = 0; u < p / 2; u++) s[m] = n.x + (n.z - n.x) * s[m], s[m + 1] = n.y + (n.w - n.y) * s[m + 1], s[m + p] = a.x + (a.z - a.x) * s[m + p], s[m + p + 1] = a.y + (a.w - a.y) * s[m + p + 1], m += 2; break; } } } /** * Creates a VertexData from serialized data * @param parsedVertexData the parsed data from an imported file * @returns a VertexData */ static Parse(e) { const t = new Ot(), i = e.positions; i && t.set(i, Y.PositionKind); const r = e.normals; r && t.set(r, Y.NormalKind); const s = e.tangents; s && t.set(s, Y.TangentKind); const n = e.uvs; n && t.set(n, Y.UVKind); const a = e.uvs2; a && t.set(a, Y.UV2Kind); const l = e.uvs3; l && t.set(l, Y.UV3Kind); const o = e.uvs4; o && t.set(o, Y.UV4Kind); const u = e.uvs5; u && t.set(u, Y.UV5Kind); const h = e.uvs6; h && t.set(h, Y.UV6Kind); const d = e.colors; d && t.set(Et.CheckColors4(d, i.length / 3), Y.ColorKind); const f = e.matricesIndices; f && t.set(f, Y.MatricesIndicesKind); const p = e.matricesWeights; p && t.set(p, Y.MatricesWeightsKind); const m = e.indices; m && (t.indices = m); const _ = e.materialInfos; if (_) { t.materialInfos = []; for (const v of _) { const C = new H9(); C.indexCount = v.indexCount, C.indexStart = v.indexStart, C.verticesCount = v.verticesCount, C.verticesStart = v.verticesStart, C.materialIndex = v.materialIndex, t.materialInfos.push(C); } } return t; } /** * Applies VertexData created from the imported parameters to the geometry * @param parsedVertexData the parsed data from an imported file * @param geometry the geometry to apply the VertexData to */ static ImportVertexData(e, t) { const i = Ot.Parse(e); t.setAllVerticesData(i, e.updatable); } } Ot.FRONTSIDE = 0; Ot.BACKSIDE = 1; Ot.DOUBLESIDE = 2; Ot.DEFAULTSIDE = 0; Ot._UniqueIDGenerator = 0; F([ gT.filter((...[c]) => !Array.isArray(c)) ], Ot, "_TransformVector3Coordinates", null); F([ gT.filter((...[c]) => !Array.isArray(c)) ], Ot, "_TransformVector3Normals", null); F([ gT.filter((...[c]) => !Array.isArray(c)) ], Ot, "_TransformVector4Normals", null); F([ gT.filter((...[c]) => !Array.isArray(c)) ], Ot, "_FlipFaces", null); class uu { /** * Gets or sets a boolean indicating if entire scene must be loaded even if scene contains incremental data */ static get ForceFullSceneLoadingForIncremental() { return uu._ForceFullSceneLoadingForIncremental; } static set ForceFullSceneLoadingForIncremental(e) { uu._ForceFullSceneLoadingForIncremental = e; } /** * Gets or sets a boolean indicating if loading screen must be displayed while loading a scene */ static get ShowLoadingScreen() { return uu._ShowLoadingScreen; } static set ShowLoadingScreen(e) { uu._ShowLoadingScreen = e; } /** * Defines the current logging level (while loading the scene) * @ignorenaming */ // eslint-disable-next-line @typescript-eslint/naming-convention static get loggingLevel() { return uu._LoggingLevel; } // eslint-disable-next-line @typescript-eslint/naming-convention static set loggingLevel(e) { uu._LoggingLevel = e; } /** * Gets or set a boolean indicating if matrix weights must be cleaned upon loading */ static get CleanBoneMatrixWeights() { return uu._CleanBoneMatrixWeights; } static set CleanBoneMatrixWeights(e) { uu._CleanBoneMatrixWeights = e; } } uu._ForceFullSceneLoadingForIncremental = !1; uu._ShowLoadingScreen = !0; uu._CleanBoneMatrixWeights = !1; uu._LoggingLevel = 0; class hn { } hn.UseOpenGLOrientationForUV = !1; class yc { /** * Gets or sets the Bias Vector to apply on the bounding elements (box/sphere), the max extend is computed as v += v * bias.x + bias.y, the min is computed as v -= v * bias.x + bias.y */ get boundingBias() { return this._boundingBias; } /** * Gets or sets the Bias Vector to apply on the bounding elements (box/sphere), the max extend is computed as v += v * bias.x + bias.y, the min is computed as v -= v * bias.x + bias.y */ set boundingBias(e) { this._boundingBias ? this._boundingBias.copyFrom(e) : this._boundingBias = e.clone(), this._updateBoundingInfo(!0, null); } /** * Static function used to attach a new empty geometry to a mesh * @param mesh defines the mesh to attach the geometry to * @returns the new Geometry */ static CreateGeometryForMesh(e) { const t = new yc(yc.RandomId(), e.getScene()); return t.applyToMesh(e), t; } /** Get the list of meshes using this geometry */ get meshes() { return this._meshes; } /** * Creates a new geometry * @param id defines the unique ID * @param scene defines the hosting scene * @param vertexData defines the VertexData used to get geometry data * @param updatable defines if geometry must be updatable (false by default) * @param mesh defines the mesh that will be associated with the geometry */ constructor(e, t, i, r = !1, s = null) { this.delayLoadState = 0, this._totalVertices = 0, this._isDisposed = !1, this._indexBufferIsUpdatable = !1, this._positionsCache = [], this._parentContainer = null, this.useBoundingInfoFromGeometry = !1, this._scene = t || gi.LastCreatedScene, this._scene && (this.id = e, this.uniqueId = this._scene.getUniqueId(), this._engine = this._scene.getEngine(), this._meshes = [], this._vertexBuffers = {}, this._indices = [], this._updatable = r, i ? this.setAllVerticesData(i, r) : this._totalVertices = 0, this._engine.getCaps().vertexArrayObject && (this._vertexArrayObjects = {}), s && (this.applyToMesh(s), s.computeWorldMatrix(!0))); } /** * Gets the current extend of the geometry */ get extend() { return this._extend; } /** * Gets the hosting scene * @returns the hosting Scene */ getScene() { return this._scene; } /** * Gets the hosting engine * @returns the hosting Engine */ getEngine() { return this._engine; } /** * Defines if the geometry is ready to use * @returns true if the geometry is ready to be used */ isReady() { return this.delayLoadState === 1 || this.delayLoadState === 0; } /** * Gets a value indicating that the geometry should not be serialized */ get doNotSerialize() { for (let e = 0; e < this._meshes.length; e++) if (!this._meshes[e].doNotSerialize) return !1; return !0; } /** @internal */ _rebuild() { this._vertexArrayObjects && (this._vertexArrayObjects = {}), this._meshes.length !== 0 && this._indices && (this._indexBuffer = this._engine.createIndexBuffer(this._indices, this._updatable)); for (const e in this._vertexBuffers) this._vertexBuffers[e]._rebuild(); } /** * Affects all geometry data in one call * @param vertexData defines the geometry data * @param updatable defines if the geometry must be flagged as updatable (false as default) */ setAllVerticesData(e, t) { e.applyToGeometry(this, t), this._notifyUpdate(); } /** * Set specific vertex data * @param kind defines the data kind (Position, normal, etc...) * @param data defines the vertex data to use * @param updatable defines if the vertex must be flagged as updatable (false as default) * @param stride defines the stride to use (0 by default). This value is deduced from the kind value if not specified */ setVerticesData(e, t, i = !1, r) { i && Array.isArray(t) && (t = new Float32Array(t)); const s = new Y(this._engine, t, e, { updatable: i, postponeInternalCreation: this._meshes.length === 0, stride: r, label: "Geometry_" + this.id + "_" + e }); this.setVerticesBuffer(s); } /** * Removes a specific vertex data * @param kind defines the data kind (Position, normal, etc...) */ removeVerticesData(e) { this._vertexBuffers[e] && (this._vertexBuffers[e].dispose(), delete this._vertexBuffers[e]), this._vertexArrayObjects && this._disposeVertexArrayObjects(); } /** * Affect a vertex buffer to the geometry. the vertexBuffer.getKind() function is used to determine where to store the data * @param buffer defines the vertex buffer to use * @param totalVertices defines the total number of vertices for position kind (could be null) * @param disposeExistingBuffer disposes the existing buffer, if any (default: true) */ setVerticesBuffer(e, t = null, i = !0) { const r = e.getKind(); this._vertexBuffers[r] && i && this._vertexBuffers[r].dispose(), e._buffer && e._buffer._increaseReferences(), this._vertexBuffers[r] = e; const s = this._meshes, n = s.length; if (r === Y.PositionKind) { this._totalVertices = t ?? e.totalVertices, this._updateExtend(e.getFloatData()), this._resetPointsArrayCache(); const a = this._extend && this._extend.minimum || new D(-Number.MAX_VALUE, -Number.MAX_VALUE, -Number.MAX_VALUE), l = this._extend && this._extend.maximum || new D(Number.MAX_VALUE, Number.MAX_VALUE, Number.MAX_VALUE); for (let o = 0; o < n; o++) { const u = s[o]; u.buildBoundingInfo(a, l), u._createGlobalSubMesh(u.isUnIndexed), u.computeWorldMatrix(!0), u.synchronizeInstances(); } } this._notifyUpdate(r); } /** * Update a specific vertex buffer * This function will directly update the underlying DataBuffer according to the passed numeric array or Float32Array * It will do nothing if the buffer is not updatable * @param kind defines the data kind (Position, normal, etc...) * @param data defines the data to use * @param offset defines the offset in the target buffer where to store the data * @param useBytes set to true if the offset is in bytes */ updateVerticesDataDirectly(e, t, i, r = !1) { const s = this.getVertexBuffer(e); s && (s.updateDirectly(t, i, r), this._notifyUpdate(e)); } /** * Update a specific vertex buffer * This function will create a new buffer if the current one is not updatable * @param kind defines the data kind (Position, normal, etc...) * @param data defines the data to use * @param updateExtends defines if the geometry extends must be recomputed (false by default) */ updateVerticesData(e, t, i = !1) { const r = this.getVertexBuffer(e); r && (r.update(t), e === Y.PositionKind && this._updateBoundingInfo(i, t), this._notifyUpdate(e)); } _updateBoundingInfo(e, t) { if (e && this._updateExtend(t), this._resetPointsArrayCache(), e) { const i = this._meshes; for (const r of i) { r.hasBoundingInfo ? r.getBoundingInfo().reConstruct(this._extend.minimum, this._extend.maximum) : r.buildBoundingInfo(this._extend.minimum, this._extend.maximum); const s = r.subMeshes; for (const n of s) n.refreshBoundingInfo(); } } } /** * @internal */ _bind(e, t, i, r) { if (!e) return; t === void 0 && (t = this._indexBuffer); const s = this.getVertexBuffers(); if (!s) return; if (t != this._indexBuffer || !this._vertexArrayObjects && !r) { this._engine.bindBuffers(s, t, e, i); return; } const n = r || this._vertexArrayObjects; n[e.key] || (n[e.key] = this._engine.recordVertexArrayObject(s, t, e, i)), this._engine.bindVertexArrayObject(n[e.key], t); } /** * Gets total number of vertices * @returns the total number of vertices */ getTotalVertices() { return this.isReady() ? this._totalVertices : 0; } /** * Gets a specific vertex data attached to this geometry. Float data is constructed if the vertex buffer data cannot be returned directly. * @param kind defines the data kind (Position, normal, etc...) * @param copyWhenShared defines if the returned array must be cloned upon returning it if the current geometry is shared between multiple meshes * @param forceCopy defines a boolean indicating that the returned array must be cloned upon returning it * @returns a float array containing vertex data */ getVerticesData(e, t, i) { const r = this.getVertexBuffer(e); return r ? r.getFloatData(this._totalVertices, i || t && this._meshes.length !== 1) : null; } /** * Returns a boolean defining if the vertex data for the requested `kind` is updatable * @param kind defines the data kind (Position, normal, etc...) * @returns true if the vertex buffer with the specified kind is updatable */ isVertexBufferUpdatable(e) { const t = this._vertexBuffers[e]; return t ? t.isUpdatable() : !1; } /** * Gets a specific vertex buffer * @param kind defines the data kind (Position, normal, etc...) * @returns a VertexBuffer */ getVertexBuffer(e) { return this.isReady() ? this._vertexBuffers[e] : null; } /** * Returns all vertex buffers * @returns an object holding all vertex buffers indexed by kind */ getVertexBuffers() { return this.isReady() ? this._vertexBuffers : null; } /** * Gets a boolean indicating if specific vertex buffer is present * @param kind defines the data kind (Position, normal, etc...) * @returns true if data is present */ isVerticesDataPresent(e) { return this._vertexBuffers ? this._vertexBuffers[e] !== void 0 : this._delayInfo ? this._delayInfo.indexOf(e) !== -1 : !1; } /** * Gets a list of all attached data kinds (Position, normal, etc...) * @returns a list of string containing all kinds */ getVerticesDataKinds() { const e = []; let t; if (!this._vertexBuffers && this._delayInfo) for (t in this._delayInfo) e.push(t); else for (t in this._vertexBuffers) e.push(t); return e; } /** * Update index buffer * @param indices defines the indices to store in the index buffer * @param offset defines the offset in the target buffer where to store the data * @param gpuMemoryOnly defines a boolean indicating that only the GPU memory must be updated leaving the CPU version of the indices unchanged (false by default) */ updateIndices(e, t, i = !1) { if (this._indexBuffer) if (!this._indexBufferIsUpdatable) this.setIndices(e, null, !0); else { const r = e.length !== this._indices.length; if (i || (this._indices = e.slice()), this._engine.updateDynamicIndexBuffer(this._indexBuffer, e, t), r) for (const s of this._meshes) s._createGlobalSubMesh(!0); } } /** * Sets the index buffer for this geometry. * @param indexBuffer Defines the index buffer to use for this geometry * @param totalVertices Defines the total number of vertices used by the buffer * @param totalIndices Defines the total number of indices in the index buffer */ setIndexBuffer(e, t, i) { this._indices = [], this._indexBufferIsUpdatable = !1, this._indexBuffer = e, this._totalVertices = t, this._totalIndices = i, e.is32Bits || (e.is32Bits = this._totalIndices > 65535); for (const r of this._meshes) r._createGlobalSubMesh(!0), r.synchronizeInstances(); this._notifyUpdate(); } /** * Creates a new index buffer * @param indices defines the indices to store in the index buffer * @param totalVertices defines the total number of vertices (could be null) * @param updatable defines if the index buffer must be flagged as updatable (false by default) */ setIndices(e, t = null, i = !1) { this._indexBuffer && this._engine._releaseBuffer(this._indexBuffer), this._indices = e, this._indexBufferIsUpdatable = i, this._meshes.length !== 0 && this._indices && (this._indexBuffer = this._engine.createIndexBuffer(this._indices, i)), t != null && (this._totalVertices = t); for (const r of this._meshes) r._createGlobalSubMesh(!0), r.synchronizeInstances(); this._notifyUpdate(); } /** * Return the total number of indices * @returns the total number of indices */ getTotalIndices() { return this.isReady() ? this._totalIndices !== void 0 ? this._totalIndices : this._indices.length : 0; } /** * Gets the index buffer array * @param copyWhenShared defines if the returned array must be cloned upon returning it if the current geometry is shared between multiple meshes * @param forceCopy defines a boolean indicating that the returned array must be cloned upon returning it * @returns the index buffer array */ getIndices(e, t) { if (!this.isReady()) return null; const i = this._indices; return !t && (!e || this._meshes.length === 1) ? i : i.slice(); } /** * Gets the index buffer * @returns the index buffer */ getIndexBuffer() { return this.isReady() ? this._indexBuffer : null; } /** * @internal */ _releaseVertexArrayObject(e = null) { !e || !this._vertexArrayObjects || this._vertexArrayObjects[e.key] && (this._engine.releaseVertexArrayObject(this._vertexArrayObjects[e.key]), delete this._vertexArrayObjects[e.key]); } /** * Release the associated resources for a specific mesh * @param mesh defines the source mesh * @param shouldDispose defines if the geometry must be disposed if there is no more mesh pointing to it */ releaseForMesh(e, t) { const i = this._meshes, r = i.indexOf(e); r !== -1 && (i.splice(r, 1), this._vertexArrayObjects && e._invalidateInstanceVertexArrayObject(), e._geometry = null, i.length === 0 && t && this.dispose()); } /** * Apply current geometry to a given mesh * @param mesh defines the mesh to apply geometry to */ applyToMesh(e) { if (e._geometry === this) return; const t = e._geometry; t && t.releaseForMesh(e), this._vertexArrayObjects && e._invalidateInstanceVertexArrayObject(); const i = this._meshes; e._geometry = this, e._internalAbstractMeshDataInfo._positions = null, this._scene.pushGeometry(this), i.push(e), this.isReady() ? this._applyToMesh(e) : this._boundingInfo && e.setBoundingInfo(this._boundingInfo); } _updateExtend(e = null) { if (this.useBoundingInfoFromGeometry && this._boundingInfo) this._extend = { minimum: this._boundingInfo.minimum.clone(), maximum: this._boundingInfo.maximum.clone() }; else { if (!e && (e = this.getVerticesData(Y.PositionKind), !e)) return; this._extend = kO(e, 0, this._totalVertices, this.boundingBias, 3); } } _applyToMesh(e) { const t = this._meshes.length; for (const i in this._vertexBuffers) t === 1 && this._vertexBuffers[i].create(), i === Y.PositionKind && (this._extend || this._updateExtend(), e.buildBoundingInfo(this._extend.minimum, this._extend.maximum), e._createGlobalSubMesh(e.isUnIndexed), e._updateBoundingInfo()); t === 1 && this._indices && this._indices.length > 0 && (this._indexBuffer = this._engine.createIndexBuffer(this._indices, this._updatable)), e._syncGeometryWithMorphTargetManager(), e.synchronizeInstances(); } _notifyUpdate(e) { this.onGeometryUpdated && this.onGeometryUpdated(this, e), this._vertexArrayObjects && this._disposeVertexArrayObjects(); for (const t of this._meshes) t._markSubMeshesAsAttributesDirty(); } /** * Load the geometry if it was flagged as delay loaded * @param scene defines the hosting scene * @param onLoaded defines a callback called when the geometry is loaded */ load(e, t) { if (this.delayLoadState !== 2) { if (this.isReady()) { t && t(); return; } this.delayLoadState = 2, this._queueLoad(e, t); } } _queueLoad(e, t) { this.delayLoadingFile && (e.addPendingData(this), e._loadFile(this.delayLoadingFile, (i) => { if (!this._delayLoadingFunction) return; this._delayLoadingFunction(JSON.parse(i), this), this.delayLoadState = 1, this._delayInfo = [], e.removePendingData(this); const r = this._meshes, s = r.length; for (let n = 0; n < s; n++) this._applyToMesh(r[n]); t && t(); }, void 0, !0)); } /** * Invert the geometry to move from a right handed system to a left handed one. */ toLeftHanded() { const e = this.getIndices(!1); if (e != null && e.length > 0) { for (let r = 0; r < e.length; r += 3) { const s = e[r + 0]; e[r + 0] = e[r + 2], e[r + 2] = s; } this.setIndices(e); } const t = this.getVerticesData(Y.PositionKind, !1); if (t != null && t.length > 0) { for (let r = 0; r < t.length; r += 3) t[r + 2] = -t[r + 2]; this.setVerticesData(Y.PositionKind, t, !1); } const i = this.getVerticesData(Y.NormalKind, !1); if (i != null && i.length > 0) { for (let r = 0; r < i.length; r += 3) i[r + 2] = -i[r + 2]; this.setVerticesData(Y.NormalKind, i, !1); } } // Cache /** @internal */ _resetPointsArrayCache() { this._positions = null; } /** @internal */ _generatePointsArray() { if (this._positions) return !0; const e = this.getVerticesData(Y.PositionKind); if (!e || e.length === 0) return !1; for (let t = this._positionsCache.length * 3, i = this._positionsCache.length; t < e.length; t += 3, ++i) this._positionsCache[i] = D.FromArray(e, t); for (let t = 0, i = 0; t < e.length; t += 3, ++i) this._positionsCache[i].set(e[0 + t], e[1 + t], e[2 + t]); return this._positionsCache.length = e.length / 3, this._positions = this._positionsCache, !0; } /** * Gets a value indicating if the geometry is disposed * @returns true if the geometry was disposed */ isDisposed() { return this._isDisposed; } _disposeVertexArrayObjects() { if (this._vertexArrayObjects) { for (const i in this._vertexArrayObjects) this._engine.releaseVertexArrayObject(this._vertexArrayObjects[i]); this._vertexArrayObjects = {}; const e = this._meshes, t = e.length; for (let i = 0; i < t; i++) e[i]._invalidateInstanceVertexArrayObject(); } } /** * Free all associated resources */ dispose() { const e = this._meshes, t = e.length; let i; for (i = 0; i < t; i++) this.releaseForMesh(e[i]); this._meshes.length = 0, this._disposeVertexArrayObjects(); for (const r in this._vertexBuffers) this._vertexBuffers[r].dispose(); if (this._vertexBuffers = {}, this._totalVertices = 0, this._indexBuffer && this._engine._releaseBuffer(this._indexBuffer), this._indexBuffer = null, this._indices = [], this.delayLoadState = 0, this.delayLoadingFile = null, this._delayLoadingFunction = null, this._delayInfo = [], this._boundingInfo = null, this._scene.removeGeometry(this), this._parentContainer) { const r = this._parentContainer.geometries.indexOf(this); r > -1 && this._parentContainer.geometries.splice(r, 1), this._parentContainer = null; } this._isDisposed = !0; } /** * Clone the current geometry into a new geometry * @param id defines the unique ID of the new geometry * @returns a new geometry object */ copy(e) { const t = new Ot(); t.indices = []; const i = this.getIndices(); if (i) for (let l = 0; l < i.length; l++) t.indices.push(i[l]); let r = !1, s = !1, n; for (n in this._vertexBuffers) { const l = this.getVerticesData(n); if (l && (l instanceof Float32Array ? t.set(new Float32Array(l), n) : t.set(l.slice(0), n), !s)) { const o = this.getVertexBuffer(n); o && (r = o.isUpdatable(), s = !r); } } const a = new yc(e, this._scene, t, r); a.delayLoadState = this.delayLoadState, a.delayLoadingFile = this.delayLoadingFile, a._delayLoadingFunction = this._delayLoadingFunction; for (n in this._delayInfo) a._delayInfo = a._delayInfo || [], a._delayInfo.push(n); return a._boundingInfo = new zf(this._extend.minimum, this._extend.maximum), a; } /** * Serialize the current geometry info (and not the vertices data) into a JSON object * @returns a JSON representation of the current geometry data (without the vertices data) */ serialize() { const e = {}; return e.id = this.id, e.uniqueId = this.uniqueId, e.updatable = this._updatable, $s && $s.HasTags(this) && (e.tags = $s.GetTags(this)), e; } _toNumberArray(e) { return Array.isArray(e) ? e : Array.prototype.slice.call(e); } /** * Release any memory retained by the cached data on the Geometry. * * Call this function to reduce memory footprint of the mesh. * Vertex buffers will not store CPU data anymore (this will prevent picking, collisions or physics to work correctly) */ clearCachedData() { this._indices = [], this._resetPointsArrayCache(); for (const e in this._vertexBuffers) Object.prototype.hasOwnProperty.call(this._vertexBuffers, e) && (this._vertexBuffers[e]._buffer._data = null); } /** * Serialize all vertices data into a JSON object * @returns a JSON representation of the current geometry data */ serializeVerticeData() { const e = this.serialize(); return this.isVerticesDataPresent(Y.PositionKind) && (e.positions = this._toNumberArray(this.getVerticesData(Y.PositionKind)), this.isVertexBufferUpdatable(Y.PositionKind) && (e.positions._updatable = !0)), this.isVerticesDataPresent(Y.NormalKind) && (e.normals = this._toNumberArray(this.getVerticesData(Y.NormalKind)), this.isVertexBufferUpdatable(Y.NormalKind) && (e.normals._updatable = !0)), this.isVerticesDataPresent(Y.TangentKind) && (e.tangents = this._toNumberArray(this.getVerticesData(Y.TangentKind)), this.isVertexBufferUpdatable(Y.TangentKind) && (e.tangents._updatable = !0)), this.isVerticesDataPresent(Y.UVKind) && (e.uvs = this._toNumberArray(this.getVerticesData(Y.UVKind)), this.isVertexBufferUpdatable(Y.UVKind) && (e.uvs._updatable = !0)), this.isVerticesDataPresent(Y.UV2Kind) && (e.uvs2 = this._toNumberArray(this.getVerticesData(Y.UV2Kind)), this.isVertexBufferUpdatable(Y.UV2Kind) && (e.uvs2._updatable = !0)), this.isVerticesDataPresent(Y.UV3Kind) && (e.uvs3 = this._toNumberArray(this.getVerticesData(Y.UV3Kind)), this.isVertexBufferUpdatable(Y.UV3Kind) && (e.uvs3._updatable = !0)), this.isVerticesDataPresent(Y.UV4Kind) && (e.uvs4 = this._toNumberArray(this.getVerticesData(Y.UV4Kind)), this.isVertexBufferUpdatable(Y.UV4Kind) && (e.uvs4._updatable = !0)), this.isVerticesDataPresent(Y.UV5Kind) && (e.uvs5 = this._toNumberArray(this.getVerticesData(Y.UV5Kind)), this.isVertexBufferUpdatable(Y.UV5Kind) && (e.uvs5._updatable = !0)), this.isVerticesDataPresent(Y.UV6Kind) && (e.uvs6 = this._toNumberArray(this.getVerticesData(Y.UV6Kind)), this.isVertexBufferUpdatable(Y.UV6Kind) && (e.uvs6._updatable = !0)), this.isVerticesDataPresent(Y.ColorKind) && (e.colors = this._toNumberArray(this.getVerticesData(Y.ColorKind)), this.isVertexBufferUpdatable(Y.ColorKind) && (e.colors._updatable = !0)), this.isVerticesDataPresent(Y.MatricesIndicesKind) && (e.matricesIndices = this._toNumberArray(this.getVerticesData(Y.MatricesIndicesKind)), e.matricesIndices._isExpanded = !0, this.isVertexBufferUpdatable(Y.MatricesIndicesKind) && (e.matricesIndices._updatable = !0)), this.isVerticesDataPresent(Y.MatricesWeightsKind) && (e.matricesWeights = this._toNumberArray(this.getVerticesData(Y.MatricesWeightsKind)), this.isVertexBufferUpdatable(Y.MatricesWeightsKind) && (e.matricesWeights._updatable = !0)), e.indices = this._toNumberArray(this.getIndices()), e; } // Statics /** * Extracts a clone of a mesh geometry * @param mesh defines the source mesh * @param id defines the unique ID of the new geometry object * @returns the new geometry object */ static ExtractFromMesh(e, t) { const i = e._geometry; return i ? i.copy(t) : null; } /** * You should now use Tools.RandomId(), this method is still here for legacy reasons. * Implementation from http://stackoverflow.com/questions/105034/how-to-create-a-guid-uuid-in-javascript/2117523#answer-2117523 * Be aware Math.random() could cause collisions, but: * "All but 6 of the 128 bits of the ID are randomly generated, which means that for any two ids, there's a 1 in 2^^122 (or 5.3x10^^36) chance they'll collide" * @returns a string containing a new GUID */ static RandomId() { return Ve.RandomId(); } static _GetGeometryByLoadedUniqueId(e, t) { for (let i = 0; i < t.geometries.length; i++) if (t.geometries[i]._loadedUniqueId === e) return t.geometries[i]; return null; } /** * @internal */ static _ImportGeometry(e, t) { const i = t.getScene(), r = e.geometryUniqueId, s = e.geometryId; if (r || s) { const n = r ? this._GetGeometryByLoadedUniqueId(r, i) : i.getGeometryById(s); n && n.applyToMesh(t); } else if (e instanceof ArrayBuffer) { const n = t._binaryInfo; if (n.positionsAttrDesc && n.positionsAttrDesc.count > 0) { const a = new Float32Array(e, n.positionsAttrDesc.offset, n.positionsAttrDesc.count); t.setVerticesData(Y.PositionKind, a, !1); } if (n.normalsAttrDesc && n.normalsAttrDesc.count > 0) { const a = new Float32Array(e, n.normalsAttrDesc.offset, n.normalsAttrDesc.count); t.setVerticesData(Y.NormalKind, a, !1); } if (n.tangetsAttrDesc && n.tangetsAttrDesc.count > 0) { const a = new Float32Array(e, n.tangetsAttrDesc.offset, n.tangetsAttrDesc.count); t.setVerticesData(Y.TangentKind, a, !1); } if (n.uvsAttrDesc && n.uvsAttrDesc.count > 0) { const a = new Float32Array(e, n.uvsAttrDesc.offset, n.uvsAttrDesc.count); if (hn.UseOpenGLOrientationForUV) for (let l = 1; l < a.length; l += 2) a[l] = 1 - a[l]; t.setVerticesData(Y.UVKind, a, !1); } if (n.uvs2AttrDesc && n.uvs2AttrDesc.count > 0) { const a = new Float32Array(e, n.uvs2AttrDesc.offset, n.uvs2AttrDesc.count); if (hn.UseOpenGLOrientationForUV) for (let l = 1; l < a.length; l += 2) a[l] = 1 - a[l]; t.setVerticesData(Y.UV2Kind, a, !1); } if (n.uvs3AttrDesc && n.uvs3AttrDesc.count > 0) { const a = new Float32Array(e, n.uvs3AttrDesc.offset, n.uvs3AttrDesc.count); if (hn.UseOpenGLOrientationForUV) for (let l = 1; l < a.length; l += 2) a[l] = 1 - a[l]; t.setVerticesData(Y.UV3Kind, a, !1); } if (n.uvs4AttrDesc && n.uvs4AttrDesc.count > 0) { const a = new Float32Array(e, n.uvs4AttrDesc.offset, n.uvs4AttrDesc.count); if (hn.UseOpenGLOrientationForUV) for (let l = 1; l < a.length; l += 2) a[l] = 1 - a[l]; t.setVerticesData(Y.UV4Kind, a, !1); } if (n.uvs5AttrDesc && n.uvs5AttrDesc.count > 0) { const a = new Float32Array(e, n.uvs5AttrDesc.offset, n.uvs5AttrDesc.count); if (hn.UseOpenGLOrientationForUV) for (let l = 1; l < a.length; l += 2) a[l] = 1 - a[l]; t.setVerticesData(Y.UV5Kind, a, !1); } if (n.uvs6AttrDesc && n.uvs6AttrDesc.count > 0) { const a = new Float32Array(e, n.uvs6AttrDesc.offset, n.uvs6AttrDesc.count); if (hn.UseOpenGLOrientationForUV) for (let l = 1; l < a.length; l += 2) a[l] = 1 - a[l]; t.setVerticesData(Y.UV6Kind, a, !1); } if (n.colorsAttrDesc && n.colorsAttrDesc.count > 0) { const a = new Float32Array(e, n.colorsAttrDesc.offset, n.colorsAttrDesc.count); t.setVerticesData(Y.ColorKind, a, !1, n.colorsAttrDesc.stride); } if (n.matricesIndicesAttrDesc && n.matricesIndicesAttrDesc.count > 0) { const a = new Int32Array(e, n.matricesIndicesAttrDesc.offset, n.matricesIndicesAttrDesc.count), l = []; for (let o = 0; o < a.length; o++) { const u = a[o]; l.push(u & 255), l.push((u & 65280) >> 8), l.push((u & 16711680) >> 16), l.push(u >> 24 & 255); } t.setVerticesData(Y.MatricesIndicesKind, l, !1); } if (n.matricesIndicesExtraAttrDesc && n.matricesIndicesExtraAttrDesc.count > 0) { const a = new Int32Array(e, n.matricesIndicesExtraAttrDesc.offset, n.matricesIndicesExtraAttrDesc.count), l = []; for (let o = 0; o < a.length; o++) { const u = a[o]; l.push(u & 255), l.push((u & 65280) >> 8), l.push((u & 16711680) >> 16), l.push(u >> 24 & 255); } t.setVerticesData(Y.MatricesIndicesExtraKind, l, !1); } if (n.matricesWeightsAttrDesc && n.matricesWeightsAttrDesc.count > 0) { const a = new Float32Array(e, n.matricesWeightsAttrDesc.offset, n.matricesWeightsAttrDesc.count); t.setVerticesData(Y.MatricesWeightsKind, a, !1); } if (n.indicesAttrDesc && n.indicesAttrDesc.count > 0) { const a = new Int32Array(e, n.indicesAttrDesc.offset, n.indicesAttrDesc.count); t.setIndices(a, null); } if (n.subMeshesAttrDesc && n.subMeshesAttrDesc.count > 0) { const a = new Int32Array(e, n.subMeshesAttrDesc.offset, n.subMeshesAttrDesc.count * 5); t.subMeshes = []; for (let l = 0; l < n.subMeshesAttrDesc.count; l++) { const o = a[l * 5 + 0], u = a[l * 5 + 1], h = a[l * 5 + 2], d = a[l * 5 + 3], f = a[l * 5 + 4]; ed.AddToMesh(o, u, h, d, f, t); } } } else if (e.positions && e.normals && e.indices) { if (t.setVerticesData(Y.PositionKind, e.positions, e.positions._updatable), t.setVerticesData(Y.NormalKind, e.normals, e.normals._updatable), e.tangents && t.setVerticesData(Y.TangentKind, e.tangents, e.tangents._updatable), e.uvs && t.setVerticesData(Y.UVKind, e.uvs, e.uvs._updatable), e.uvs2 && t.setVerticesData(Y.UV2Kind, e.uvs2, e.uvs2._updatable), e.uvs3 && t.setVerticesData(Y.UV3Kind, e.uvs3, e.uvs3._updatable), e.uvs4 && t.setVerticesData(Y.UV4Kind, e.uvs4, e.uvs4._updatable), e.uvs5 && t.setVerticesData(Y.UV5Kind, e.uvs5, e.uvs5._updatable), e.uvs6 && t.setVerticesData(Y.UV6Kind, e.uvs6, e.uvs6._updatable), e.colors && t.setVerticesData(Y.ColorKind, Et.CheckColors4(e.colors, e.positions.length / 3), e.colors._updatable), e.matricesIndices) if (e.matricesIndices._isExpanded) delete e.matricesIndices._isExpanded, t.setVerticesData(Y.MatricesIndicesKind, e.matricesIndices, e.matricesIndices._updatable); else { const n = []; for (let a = 0; a < e.matricesIndices.length; a++) { const l = e.matricesIndices[a]; n.push(l & 255), n.push((l & 65280) >> 8), n.push((l & 16711680) >> 16), n.push(l >> 24 & 255); } t.setVerticesData(Y.MatricesIndicesKind, n, e.matricesIndices._updatable); } if (e.matricesIndicesExtra) if (e.matricesIndicesExtra._isExpanded) delete e.matricesIndices._isExpanded, t.setVerticesData(Y.MatricesIndicesExtraKind, e.matricesIndicesExtra, e.matricesIndicesExtra._updatable); else { const n = []; for (let a = 0; a < e.matricesIndicesExtra.length; a++) { const l = e.matricesIndicesExtra[a]; n.push(l & 255), n.push((l & 65280) >> 8), n.push((l & 16711680) >> 16), n.push(l >> 24 & 255); } t.setVerticesData(Y.MatricesIndicesExtraKind, n, e.matricesIndicesExtra._updatable); } e.matricesWeights && (yc._CleanMatricesWeights(e, t), t.setVerticesData(Y.MatricesWeightsKind, e.matricesWeights, e.matricesWeights._updatable)), e.matricesWeightsExtra && t.setVerticesData(Y.MatricesWeightsExtraKind, e.matricesWeightsExtra, e.matricesWeights._updatable), t.setIndices(e.indices, null); } if (e.subMeshes) { t.subMeshes = []; for (let n = 0; n < e.subMeshes.length; n++) { const a = e.subMeshes[n]; ed.AddToMesh(a.materialIndex, a.verticesStart, a.verticesCount, a.indexStart, a.indexCount, t); } } t._shouldGenerateFlatShading && (t.convertToFlatShadedMesh(), t._shouldGenerateFlatShading = !1), t.computeWorldMatrix(!0), i.onMeshImportedObservable.notifyObservers(t); } static _CleanMatricesWeights(e, t) { if (!uu.CleanBoneMatrixWeights) return; let r = 0; if (e.skeletonId > -1) { const h = t.getScene().getLastSkeletonById(e.skeletonId); if (!h) return; r = h.bones.length; } else return; const s = t.getVerticesData(Y.MatricesIndicesKind), n = t.getVerticesData(Y.MatricesIndicesExtraKind), a = e.matricesWeights, l = e.matricesWeightsExtra, o = e.numBoneInfluencer, u = a.length; for (let h = 0; h < u; h += 4) { let d = 0, f = -1; for (let p = 0; p < 4; p++) { const m = a[h + p]; d += m, m < 1e-3 && f < 0 && (f = p); } if (l) for (let p = 0; p < 4; p++) { const m = l[h + p]; d += m, m < 1e-3 && f < 0 && (f = p + 4); } if ((f < 0 || f > o - 1) && (f = o - 1), d > 1e-3) { const p = 1 / d; for (let m = 0; m < 4; m++) a[h + m] *= p; if (l) for (let m = 0; m < 4; m++) l[h + m] *= p; } else f >= 4 ? (l[h + f - 4] = 1 - d, n[h + f - 4] = r) : (a[h + f] = 1 - d, s[h + f] = r); } t.setVerticesData(Y.MatricesIndicesKind, s), e.matricesWeightsExtra && t.setVerticesData(Y.MatricesIndicesExtraKind, n); } /** * Create a new geometry from persisted data (Using .babylon file format) * @param parsedVertexData defines the persisted data * @param scene defines the hosting scene * @param rootUrl defines the root url to use to load assets (like delayed data) * @returns the new geometry object */ static Parse(e, t, i) { const r = new yc(e.id, t, void 0, e.updatable); return r._loadedUniqueId = e.uniqueId, $s && $s.AddTagsTo(r, e.tags), e.delayLoadingFile ? (r.delayLoadState = 4, r.delayLoadingFile = i + e.delayLoadingFile, r._boundingInfo = new zf(D.FromArray(e.boundingBoxMinimum), D.FromArray(e.boundingBoxMaximum)), r._delayInfo = [], e.hasUVs && r._delayInfo.push(Y.UVKind), e.hasUVs2 && r._delayInfo.push(Y.UV2Kind), e.hasUVs3 && r._delayInfo.push(Y.UV3Kind), e.hasUVs4 && r._delayInfo.push(Y.UV4Kind), e.hasUVs5 && r._delayInfo.push(Y.UV5Kind), e.hasUVs6 && r._delayInfo.push(Y.UV6Kind), e.hasColors && r._delayInfo.push(Y.ColorKind), e.hasMatricesIndices && r._delayInfo.push(Y.MatricesIndicesKind), e.hasMatricesWeights && r._delayInfo.push(Y.MatricesWeightsKind), r._delayLoadingFunction = Ot.ImportVertexData) : Ot.ImportVertexData(e, r), t.pushGeometry(r, !0), r; } } class Ste { /** * constructor * @param frameSampleSize The number of samples required to saturate the sliding window */ constructor(e = 30) { this._enabled = !0, this._rollingFrameTime = new Mte(e); } /** * Samples current frame * @param timeMs A timestamp in milliseconds of the current frame to compare with other frames */ sampleFrame(e = Gs.Now) { if (this._enabled) { if (this._lastFrameTimeMs != null) { const t = e - this._lastFrameTimeMs; this._rollingFrameTime.add(t); } this._lastFrameTimeMs = e; } } /** * Returns the average frame time in milliseconds over the sliding window (or the subset of frames sampled so far) */ get averageFrameTime() { return this._rollingFrameTime.average; } /** * Returns the variance frame time in milliseconds over the sliding window (or the subset of frames sampled so far) */ get averageFrameTimeVariance() { return this._rollingFrameTime.variance; } /** * Returns the frame time of the most recent frame */ get instantaneousFrameTime() { return this._rollingFrameTime.history(0); } /** * Returns the average framerate in frames per second over the sliding window (or the subset of frames sampled so far) */ get averageFPS() { return 1e3 / this._rollingFrameTime.average; } /** * Returns the average framerate in frames per second using the most recent frame time */ get instantaneousFPS() { const e = this._rollingFrameTime.history(0); return e === 0 ? 0 : 1e3 / e; } /** * Returns true if enough samples have been taken to completely fill the sliding window */ get isSaturated() { return this._rollingFrameTime.isSaturated(); } /** * Enables contributions to the sliding window sample set */ enable() { this._enabled = !0; } /** * Disables contributions to the sliding window sample set * Samples will not be interpolated over the disabled period */ disable() { this._enabled = !1, this._lastFrameTimeMs = null; } /** * Returns true if sampling is enabled */ get isEnabled() { return this._enabled; } /** * Resets performance monitor */ reset() { this._lastFrameTimeMs = null, this._rollingFrameTime.reset(); } } class Mte { /** * constructor * @param length The number of samples required to saturate the sliding window */ constructor(e) { this._samples = new Array(e), this.reset(); } /** * Adds a sample to the sample set * @param v The sample value */ add(e) { let t; if (this.isSaturated()) { const i = this._samples[this._pos]; t = i - this.average, this.average -= t / (this._sampleCount - 1), this._m2 -= t * (i - this.average); } else this._sampleCount++; t = e - this.average, this.average += t / this._sampleCount, this._m2 += t * (e - this.average), this.variance = this._m2 / (this._sampleCount - 1), this._samples[this._pos] = e, this._pos++, this._pos %= this._samples.length; } /** * Returns previously added values or null if outside of history or outside the sliding window domain * @param i Index in history. For example, pass 0 for the most recent value and 1 for the value before that * @returns Value previously recorded with add() or null if outside of range */ history(e) { if (e >= this._sampleCount || e >= this._samples.length) return 0; const t = this._wrapPosition(this._pos - 1); return this._samples[this._wrapPosition(t - e)]; } /** * Returns true if enough samples have been taken to completely fill the sliding window * @returns true if sample-set saturated */ isSaturated() { return this._sampleCount >= this._samples.length; } /** * Resets the rolling average (equivalent to 0 samples taken so far) */ reset() { this.average = 0, this.variance = 0, this._sampleCount = 0, this._pos = 0, this._m2 = 0; } /** * Wraps a value around the sample range boundaries * @param i Position in sample range, for example if the sample length is 5, and i is -3, then 2 will be returned. * @returns Wrapped position in sample range */ _wrapPosition(e) { const t = this._samples.length; return (e % t + t) % t; } } mi.prototype.setAlphaConstants = function(c, e, t, i) { this._alphaState.setAlphaBlendConstants(c, e, t, i); }; mi.prototype.setAlphaMode = function(c, e = !1) { if (this._alphaMode === c) { if (!e) { const t = c === 0; this.depthCullingState.depthMask !== t && (this.depthCullingState.depthMask = t); } return; } switch (c) { case 0: this._alphaState.alphaBlend = !1; break; case 7: this._alphaState.setAlphaBlendFunctionParameters(this._gl.ONE, this._gl.ONE_MINUS_SRC_ALPHA, this._gl.ONE, this._gl.ONE), this._alphaState.alphaBlend = !0; break; case 8: this._alphaState.setAlphaBlendFunctionParameters(this._gl.ONE, this._gl.ONE_MINUS_SRC_ALPHA, this._gl.ONE, this._gl.ONE_MINUS_SRC_ALPHA), this._alphaState.alphaBlend = !0; break; case 2: this._alphaState.setAlphaBlendFunctionParameters(this._gl.SRC_ALPHA, this._gl.ONE_MINUS_SRC_ALPHA, this._gl.ONE, this._gl.ONE), this._alphaState.alphaBlend = !0; break; case 6: this._alphaState.setAlphaBlendFunctionParameters(this._gl.ONE, this._gl.ONE, this._gl.ZERO, this._gl.ONE), this._alphaState.alphaBlend = !0; break; case 1: this._alphaState.setAlphaBlendFunctionParameters(this._gl.SRC_ALPHA, this._gl.ONE, this._gl.ZERO, this._gl.ONE), this._alphaState.alphaBlend = !0; break; case 3: this._alphaState.setAlphaBlendFunctionParameters(this._gl.ZERO, this._gl.ONE_MINUS_SRC_COLOR, this._gl.ONE, this._gl.ONE), this._alphaState.alphaBlend = !0; break; case 4: this._alphaState.setAlphaBlendFunctionParameters(this._gl.DST_COLOR, this._gl.ZERO, this._gl.ONE, this._gl.ONE), this._alphaState.alphaBlend = !0; break; case 5: this._alphaState.setAlphaBlendFunctionParameters(this._gl.SRC_ALPHA, this._gl.ONE_MINUS_SRC_COLOR, this._gl.ONE, this._gl.ONE), this._alphaState.alphaBlend = !0; break; case 9: this._alphaState.setAlphaBlendFunctionParameters(this._gl.CONSTANT_COLOR, this._gl.ONE_MINUS_CONSTANT_COLOR, this._gl.CONSTANT_ALPHA, this._gl.ONE_MINUS_CONSTANT_ALPHA), this._alphaState.alphaBlend = !0; break; case 10: this._alphaState.setAlphaBlendFunctionParameters(this._gl.ONE, this._gl.ONE_MINUS_SRC_COLOR, this._gl.ONE, this._gl.ONE_MINUS_SRC_ALPHA), this._alphaState.alphaBlend = !0; break; case 11: this._alphaState.setAlphaBlendFunctionParameters(this._gl.ONE, this._gl.ONE, this._gl.ONE, this._gl.ONE), this._alphaState.alphaBlend = !0; break; case 12: this._alphaState.setAlphaBlendFunctionParameters(this._gl.DST_ALPHA, this._gl.ONE, this._gl.ZERO, this._gl.ZERO), this._alphaState.alphaBlend = !0; break; case 13: this._alphaState.setAlphaBlendFunctionParameters(this._gl.ONE_MINUS_DST_COLOR, this._gl.ONE_MINUS_SRC_COLOR, this._gl.ONE_MINUS_DST_ALPHA, this._gl.ONE_MINUS_SRC_ALPHA), this._alphaState.alphaBlend = !0; break; case 14: this._alphaState.setAlphaBlendFunctionParameters(this._gl.ONE, this._gl.ONE_MINUS_SRC_ALPHA, this._gl.ONE, this._gl.ONE_MINUS_SRC_ALPHA), this._alphaState.alphaBlend = !0; break; case 15: this._alphaState.setAlphaBlendFunctionParameters(this._gl.ONE, this._gl.ONE, this._gl.ONE, this._gl.ZERO), this._alphaState.alphaBlend = !0; break; case 16: this._alphaState.setAlphaBlendFunctionParameters(this._gl.ONE_MINUS_DST_COLOR, this._gl.ONE_MINUS_SRC_COLOR, this._gl.ZERO, this._gl.ONE), this._alphaState.alphaBlend = !0; break; case 17: this._alphaState.setAlphaBlendFunctionParameters(this._gl.SRC_ALPHA, this._gl.ONE_MINUS_SRC_ALPHA, this._gl.ONE, this._gl.ONE_MINUS_SRC_ALPHA), this._alphaState.alphaBlend = !0; break; } e || (this.depthCullingState.depthMask = c === 0), this._alphaMode = c; }; mi.prototype.getAlphaMode = function() { return this._alphaMode; }; mi.prototype.setAlphaEquation = function(c) { if (this._alphaEquation !== c) { switch (c) { case 0: this._alphaState.setAlphaEquationParameters(32774, 32774); break; case 1: this._alphaState.setAlphaEquationParameters(32778, 32778); break; case 2: this._alphaState.setAlphaEquationParameters(32779, 32779); break; case 3: this._alphaState.setAlphaEquationParameters(32776, 32776); break; case 4: this._alphaState.setAlphaEquationParameters(32775, 32775); break; case 5: this._alphaState.setAlphaEquationParameters(32775, 32774); break; } this._alphaEquation = c; } }; mi.prototype.getAlphaEquation = function() { return this._alphaEquation; }; function nB(c, e, t = !1, i) { switch (c) { case 3: { const s = e instanceof ArrayBuffer ? new Int8Array(e) : new Int8Array(e); return i && s.set(new Int8Array(i)), s; } case 0: { const s = e instanceof ArrayBuffer ? new Uint8Array(e) : new Uint8Array(e); return i && s.set(new Uint8Array(i)), s; } case 4: { const s = e instanceof ArrayBuffer ? new Int16Array(e) : new Int16Array(t ? e / 2 : e); return i && s.set(new Int16Array(i)), s; } case 5: case 8: case 9: case 10: case 2: { const s = e instanceof ArrayBuffer ? new Uint16Array(e) : new Uint16Array(t ? e / 2 : e); return i && s.set(new Uint16Array(i)), s; } case 6: { const s = e instanceof ArrayBuffer ? new Int32Array(e) : new Int32Array(t ? e / 4 : e); return i && s.set(new Int32Array(i)), s; } case 7: case 11: case 12: case 13: case 14: case 15: { const s = e instanceof ArrayBuffer ? new Uint32Array(e) : new Uint32Array(t ? e / 4 : e); return i && s.set(new Uint32Array(i)), s; } case 1: { const s = e instanceof ArrayBuffer ? new Float32Array(e) : new Float32Array(t ? e / 4 : e); return i && s.set(new Float32Array(i)), s; } } const r = e instanceof ArrayBuffer ? new Uint8Array(e) : new Uint8Array(e); return i && r.set(new Uint8Array(i)), r; } mi.prototype._readTexturePixelsSync = function(c, e, t, i = -1, r = 0, s = null, n = !0, a = !1, l = 0, o = 0) { var u, h; const d = this._gl; if (!d) throw new Error("Engine does not have gl rendering context."); if (!this._dummyFramebuffer) { const p = d.createFramebuffer(); if (!p) throw new Error("Unable to create dummy framebuffer"); this._dummyFramebuffer = p; } d.bindFramebuffer(d.FRAMEBUFFER, this._dummyFramebuffer), i > -1 ? d.framebufferTexture2D(d.FRAMEBUFFER, d.COLOR_ATTACHMENT0, d.TEXTURE_CUBE_MAP_POSITIVE_X + i, (u = c._hardwareTexture) === null || u === void 0 ? void 0 : u.underlyingResource, r) : d.framebufferTexture2D(d.FRAMEBUFFER, d.COLOR_ATTACHMENT0, d.TEXTURE_2D, (h = c._hardwareTexture) === null || h === void 0 ? void 0 : h.underlyingResource, r); let f = c.type !== void 0 ? this._getWebGLTextureType(c.type) : d.UNSIGNED_BYTE; if (a) s || (s = nB(c.type, 4 * e * t)); else switch (f) { case d.UNSIGNED_BYTE: s || (s = new Uint8Array(4 * e * t)), f = d.UNSIGNED_BYTE; break; default: s || (s = new Float32Array(4 * e * t)), f = d.FLOAT; break; } return n && this.flushFramebuffer(), d.readPixels(l, o, e, t, d.RGBA, f, s), d.bindFramebuffer(d.FRAMEBUFFER, this._currentFramebuffer), s; }; mi.prototype._readTexturePixels = function(c, e, t, i = -1, r = 0, s = null, n = !0, a = !1, l = 0, o = 0) { return Promise.resolve(this._readTexturePixelsSync(c, e, t, i, r, s, n, a, l, o)); }; mi.prototype.updateDynamicIndexBuffer = function(c, e, t = 0) { this._currentBoundBuffer[this._gl.ELEMENT_ARRAY_BUFFER] = null, this.bindIndexBuffer(c); let i; c.is32Bits ? i = e instanceof Uint32Array ? e : new Uint32Array(e) : i = e instanceof Uint16Array ? e : new Uint16Array(e), this._gl.bufferData(this._gl.ELEMENT_ARRAY_BUFFER, i, this._gl.DYNAMIC_DRAW), this._resetIndexBufferBinding(); }; mi.prototype.updateDynamicVertexBuffer = function(c, e, t, i) { this.bindArrayBuffer(c), t === void 0 && (t = 0); const r = e.byteLength || e.length; i === void 0 || i >= r && t === 0 ? e instanceof Array ? this._gl.bufferSubData(this._gl.ARRAY_BUFFER, t, new Float32Array(e)) : this._gl.bufferSubData(this._gl.ARRAY_BUFFER, t, e) : e instanceof Array ? this._gl.bufferSubData(this._gl.ARRAY_BUFFER, 0, new Float32Array(e).subarray(t, t + i)) : (e instanceof ArrayBuffer ? e = new Uint8Array(e, t, i) : e = new Uint8Array(e.buffer, e.byteOffset + t, i), this._gl.bufferSubData(this._gl.ARRAY_BUFFER, 0, e)), this._resetVertexBufferBinding(); }; class $e extends mi { /** * Returns the current npm package of the sdk */ // Not mixed with Version for tooling purpose. static get NpmPackage() { return mi.NpmPackage; } /** * Returns the current version of the framework */ static get Version() { return mi.Version; } /** Gets the list of created engines */ static get Instances() { return gi.Instances; } /** * Gets the latest created engine */ static get LastCreatedEngine() { return gi.LastCreatedEngine; } /** * Gets the latest created scene */ static get LastCreatedScene() { return gi.LastCreatedScene; } /** @internal */ /** * Engine abstraction for loading and creating an image bitmap from a given source string. * @param imageSource source to load the image from. * @param options An object that sets options for the image's extraction. * @returns ImageBitmap. */ _createImageBitmapFromSource(e, t) { return new Promise((r, s) => { const n = new Image(); n.onload = () => { n.decode().then(() => { this.createImageBitmap(n, t).then((a) => { r(a); }); }); }, n.onerror = () => { s(`Error loading image ${n.src}`); }, n.src = e; }); } /** * Engine abstraction for createImageBitmap * @param image source for image * @param options An object that sets options for the image's extraction. * @returns ImageBitmap */ createImageBitmap(e, t) { return createImageBitmap(e, t); } /** * Resize an image and returns the image data as an uint8array * @param image image to resize * @param bufferWidth destination buffer width * @param bufferHeight destination buffer height * @returns an uint8array containing RGBA values of bufferWidth * bufferHeight size */ resizeImageBitmap(e, t, i) { const s = this.createCanvas(t, i).getContext("2d"); if (!s) throw new Error("Unable to get 2d context for resizeImageBitmap"); return s.drawImage(e, 0, 0), s.getImageData(0, 0, t, i).data; } /** * Will flag all materials in all scenes in all engines as dirty to trigger new shader compilation * @param flag defines which part of the materials must be marked as dirty * @param predicate defines a predicate used to filter which materials should be affected */ static MarkAllMaterialsAsDirty(e, t) { for (let i = 0; i < $e.Instances.length; i++) { const r = $e.Instances[i]; for (let s = 0; s < r.scenes.length; s++) r.scenes[s].markAllMaterialsAsDirty(e, t); } } /** * Method called to create the default loading screen. * This can be overridden in your own app. * @param canvas The rendering canvas element * @returns The loading screen */ // eslint-disable-next-line @typescript-eslint/no-unused-vars static DefaultLoadingScreenFactory(e) { throw yr("LoadingScreen"); } get _supportsHardwareTextureRescaling() { return !!$e._RescalePostProcessFactory; } /** * Gets the performance monitor attached to this engine * @see https://doc.babylonjs.com/features/featuresDeepDive/scene/optimize_your_scene#engineinstrumentation */ get performanceMonitor() { return this._performanceMonitor; } /** * (WebGPU only) True (default) to be in compatibility mode, meaning rendering all existing scenes without artifacts (same rendering than WebGL). * Setting the property to false will improve performances but may not work in some scenes if some precautions are not taken. * See https://doc.babylonjs.com/setup/support/webGPU/webGPUOptimization/webGPUNonCompatibilityMode for more details */ get compatibilityMode() { return this._compatibilityMode; } set compatibilityMode(e) { this._compatibilityMode = !0; } // Events /** * Gets the HTML element used to attach event listeners * @returns a HTML element */ getInputElement() { return this._renderingCanvas; } /** * Creates a new engine * @param canvasOrContext defines the canvas or WebGL context to use for rendering. If you provide a WebGL context, Babylon.js will not hook events on the canvas (like pointers, keyboards, etc...) so no event observables will be available. This is mostly used when Babylon.js is used as a plugin on a system which already used the WebGL context * @param antialias defines enable antialiasing (default: false) * @param options defines further options to be sent to the getContext() function * @param adaptToDeviceRatio defines whether to adapt to the device's viewport characteristics (default: false) */ constructor(e, t, i, r = !1) { if (super(e, t, i, r), this.enableOfflineSupport = !1, this.disableManifestCheck = !1, this.disableContextMenu = !0, this.scenes = [], this._virtualScenes = new Array(), this.onNewSceneAddedObservable = new Fe(), this.postProcesses = [], this.isPointerLock = !1, this.onResizeObservable = new Fe(), this.onCanvasBlurObservable = new Fe(), this.onCanvasFocusObservable = new Fe(), this.onCanvasPointerOutObservable = new Fe(), this.onBeginFrameObservable = new Fe(), this.customAnimationFrameRequester = null, this.onEndFrameObservable = new Fe(), this.onBeforeShaderCompilationObservable = new Fe(), this.onAfterShaderCompilationObservable = new Fe(), this._deterministicLockstep = !1, this._lockstepMaxSteps = 4, this._timeStep = 1 / 60, this._fps = 60, this._deltaTime = 0, this._drawCalls = new Vc(), this.canvasTabIndex = 1, this.disablePerformanceMonitorInBackground = !1, this._performanceMonitor = new Ste(), this._compatibilityMode = !0, this.currentRenderPassId = 0, this._renderPassNames = ["main"], $e.Instances.push(this), !!e && (this._features.supportRenderPasses = !0, i = this._creationOptions, e.getContext)) { const s = e; this._sharedInit(s); } } _initGLContext() { super._initGLContext(), this._rescalePostProcess = null; } /** * Shared initialization across engines types. * @param canvas The canvas associated with this instance of the engine. */ _sharedInit(e) { super._sharedInit(e), this._onCanvasFocus = () => { this.onCanvasFocusObservable.notifyObservers(this); }, this._onCanvasBlur = () => { this.onCanvasBlurObservable.notifyObservers(this); }, this._onCanvasContextMenu = (i) => { this.disableContextMenu && i.preventDefault(); }, e.addEventListener("focus", this._onCanvasFocus), e.addEventListener("blur", this._onCanvasBlur), e.addEventListener("contextmenu", this._onCanvasContextMenu), this._onBlur = () => { this.disablePerformanceMonitorInBackground && this._performanceMonitor.disable(), this._windowIsBackground = !0; }, this._onFocus = () => { this.disablePerformanceMonitorInBackground && this._performanceMonitor.enable(), this._windowIsBackground = !1; }, this._onCanvasPointerOut = (i) => { document.elementFromPoint(i.clientX, i.clientY) !== e && this.onCanvasPointerOutObservable.notifyObservers(i); }; const t = this.getHostWindow(); t && typeof t.addEventListener == "function" && (t.addEventListener("blur", this._onBlur), t.addEventListener("focus", this._onFocus)), e.addEventListener("pointerout", this._onCanvasPointerOut), this._creationOptions.doNotHandleTouchAction || this._disableTouchAction(), !$e.audioEngine && this._creationOptions.audioEngine && $e.AudioEngineFactory && ($e.audioEngine = $e.AudioEngineFactory(this.getRenderingCanvas(), this.getAudioContext(), this.getAudioDestination())), qR() && (this._onFullscreenChange = () => { this.isFullscreen = !!document.fullscreenElement, this.isFullscreen && this._pointerLockRequested && e && $e._RequestPointerlock(e); }, document.addEventListener("fullscreenchange", this._onFullscreenChange, !1), document.addEventListener("webkitfullscreenchange", this._onFullscreenChange, !1), this._onPointerLockChange = () => { this.isPointerLock = document.pointerLockElement === e; }, document.addEventListener("pointerlockchange", this._onPointerLockChange, !1), document.addEventListener("webkitpointerlockchange", this._onPointerLockChange, !1)), this.enableOfflineSupport = $e.OfflineProviderFactory !== void 0, this._deterministicLockstep = !!this._creationOptions.deterministicLockstep, this._lockstepMaxSteps = this._creationOptions.lockstepMaxSteps || 0, this._timeStep = this._creationOptions.timeStep || 1 / 60; } /** @internal */ _verifyPointerLock() { var e; (e = this._onPointerLockChange) === null || e === void 0 || e.call(this); } /** * Gets current aspect ratio * @param viewportOwner defines the camera to use to get the aspect ratio * @param useScreen defines if screen size must be used (or the current render target if any) * @returns a number defining the aspect ratio */ getAspectRatio(e, t = !1) { const i = e.viewport; return this.getRenderWidth(t) * i.width / (this.getRenderHeight(t) * i.height); } /** * Gets current screen aspect ratio * @returns a number defining the aspect ratio */ getScreenAspectRatio() { return this.getRenderWidth(!0) / this.getRenderHeight(!0); } /** * Gets the client rect of the HTML canvas attached with the current webGL context * @returns a client rectangle */ getRenderingCanvasClientRect() { return this._renderingCanvas ? this._renderingCanvas.getBoundingClientRect() : null; } /** * Gets the client rect of the HTML element used for events * @returns a client rectangle */ getInputElementClientRect() { return this._renderingCanvas ? this.getInputElement().getBoundingClientRect() : null; } /** * Gets a boolean indicating that the engine is running in deterministic lock step mode * @see https://doc.babylonjs.com/features/featuresDeepDive/animation/advanced_animations#deterministic-lockstep * @returns true if engine is in deterministic lock step mode */ isDeterministicLockStep() { return this._deterministicLockstep; } /** * Gets the max steps when engine is running in deterministic lock step * @see https://doc.babylonjs.com/features/featuresDeepDive/animation/advanced_animations#deterministic-lockstep * @returns the max steps */ getLockstepMaxSteps() { return this._lockstepMaxSteps; } /** * Returns the time in ms between steps when using deterministic lock step. * @returns time step in (ms) */ getTimeStep() { return this._timeStep * 1e3; } /** * Force the mipmap generation for the given render target texture * @param texture defines the render target texture to use * @param unbind defines whether or not to unbind the texture after generation. Defaults to true. */ generateMipMapsForCubemap(e, t = !0) { if (e.generateMipMaps) { const i = this._gl; this._bindTextureDirectly(i.TEXTURE_CUBE_MAP, e, !0), i.generateMipmap(i.TEXTURE_CUBE_MAP), t && this._bindTextureDirectly(i.TEXTURE_CUBE_MAP, null); } } /** States */ /** * Gets a boolean indicating if depth writing is enabled * @returns the current depth writing state */ getDepthWrite() { return this._depthCullingState.depthMask; } /** * Enable or disable depth writing * @param enable defines the state to set */ setDepthWrite(e) { this._depthCullingState.depthMask = e; } /** * Gets a boolean indicating if stencil buffer is enabled * @returns the current stencil buffer state */ getStencilBuffer() { return this._stencilState.stencilTest; } /** * Enable or disable the stencil buffer * @param enable defines if the stencil buffer must be enabled or disabled */ setStencilBuffer(e) { this._stencilState.stencilTest = e; } /** * Gets the current stencil mask * @returns a number defining the new stencil mask to use */ getStencilMask() { return this._stencilState.stencilMask; } /** * Sets the current stencil mask * @param mask defines the new stencil mask to use */ setStencilMask(e) { this._stencilState.stencilMask = e; } /** * Gets the current stencil function * @returns a number defining the stencil function to use */ getStencilFunction() { return this._stencilState.stencilFunc; } /** * Gets the current stencil reference value * @returns a number defining the stencil reference value to use */ getStencilFunctionReference() { return this._stencilState.stencilFuncRef; } /** * Gets the current stencil mask * @returns a number defining the stencil mask to use */ getStencilFunctionMask() { return this._stencilState.stencilFuncMask; } /** * Sets the current stencil function * @param stencilFunc defines the new stencil function to use */ setStencilFunction(e) { this._stencilState.stencilFunc = e; } /** * Sets the current stencil reference * @param reference defines the new stencil reference to use */ setStencilFunctionReference(e) { this._stencilState.stencilFuncRef = e; } /** * Sets the current stencil mask * @param mask defines the new stencil mask to use */ setStencilFunctionMask(e) { this._stencilState.stencilFuncMask = e; } /** * Gets the current stencil operation when stencil fails * @returns a number defining stencil operation to use when stencil fails */ getStencilOperationFail() { return this._stencilState.stencilOpStencilFail; } /** * Gets the current stencil operation when depth fails * @returns a number defining stencil operation to use when depth fails */ getStencilOperationDepthFail() { return this._stencilState.stencilOpDepthFail; } /** * Gets the current stencil operation when stencil passes * @returns a number defining stencil operation to use when stencil passes */ getStencilOperationPass() { return this._stencilState.stencilOpStencilDepthPass; } /** * Sets the stencil operation to use when stencil fails * @param operation defines the stencil operation to use when stencil fails */ setStencilOperationFail(e) { this._stencilState.stencilOpStencilFail = e; } /** * Sets the stencil operation to use when depth fails * @param operation defines the stencil operation to use when depth fails */ setStencilOperationDepthFail(e) { this._stencilState.stencilOpDepthFail = e; } /** * Sets the stencil operation to use when stencil passes * @param operation defines the stencil operation to use when stencil passes */ setStencilOperationPass(e) { this._stencilState.stencilOpStencilDepthPass = e; } /** * Sets a boolean indicating if the dithering state is enabled or disabled * @param value defines the dithering state */ setDitheringState(e) { e ? this._gl.enable(this._gl.DITHER) : this._gl.disable(this._gl.DITHER); } /** * Sets a boolean indicating if the rasterizer state is enabled or disabled * @param value defines the rasterizer state */ setRasterizerState(e) { e ? this._gl.disable(this._gl.RASTERIZER_DISCARD) : this._gl.enable(this._gl.RASTERIZER_DISCARD); } /** * Gets the current depth function * @returns a number defining the depth function */ getDepthFunction() { return this._depthCullingState.depthFunc; } /** * Sets the current depth function * @param depthFunc defines the function to use */ setDepthFunction(e) { this._depthCullingState.depthFunc = e; } /** * Sets the current depth function to GREATER */ setDepthFunctionToGreater() { this.setDepthFunction(516); } /** * Sets the current depth function to GEQUAL */ setDepthFunctionToGreaterOrEqual() { this.setDepthFunction(518); } /** * Sets the current depth function to LESS */ setDepthFunctionToLess() { this.setDepthFunction(513); } /** * Sets the current depth function to LEQUAL */ setDepthFunctionToLessOrEqual() { this.setDepthFunction(515); } /** * Caches the state of the stencil buffer */ cacheStencilState() { this._cachedStencilBuffer = this.getStencilBuffer(), this._cachedStencilFunction = this.getStencilFunction(), this._cachedStencilMask = this.getStencilMask(), this._cachedStencilOperationPass = this.getStencilOperationPass(), this._cachedStencilOperationFail = this.getStencilOperationFail(), this._cachedStencilOperationDepthFail = this.getStencilOperationDepthFail(), this._cachedStencilReference = this.getStencilFunctionReference(); } /** * Restores the state of the stencil buffer */ restoreStencilState() { this.setStencilFunction(this._cachedStencilFunction), this.setStencilMask(this._cachedStencilMask), this.setStencilBuffer(this._cachedStencilBuffer), this.setStencilOperationPass(this._cachedStencilOperationPass), this.setStencilOperationFail(this._cachedStencilOperationFail), this.setStencilOperationDepthFail(this._cachedStencilOperationDepthFail), this.setStencilFunctionReference(this._cachedStencilReference); } /** * Directly set the WebGL Viewport * @param x defines the x coordinate of the viewport (in screen space) * @param y defines the y coordinate of the viewport (in screen space) * @param width defines the width of the viewport (in screen space) * @param height defines the height of the viewport (in screen space) * @returns the current viewport Object (if any) that is being replaced by this call. You can restore this viewport later on to go back to the original state */ setDirectViewport(e, t, i, r) { const s = this._cachedViewport; return this._cachedViewport = null, this._viewport(e, t, i, r), s; } /** * Executes a scissor clear (ie. a clear on a specific portion of the screen) * @param x defines the x-coordinate of the bottom left corner of the clear rectangle * @param y defines the y-coordinate of the corner of the clear rectangle * @param width defines the width of the clear rectangle * @param height defines the height of the clear rectangle * @param clearColor defines the clear color */ scissorClear(e, t, i, r, s) { this.enableScissor(e, t, i, r), this.clear(s, !0, !0, !0), this.disableScissor(); } /** * Enable scissor test on a specific rectangle (ie. render will only be executed on a specific portion of the screen) * @param x defines the x-coordinate of the bottom left corner of the clear rectangle * @param y defines the y-coordinate of the corner of the clear rectangle * @param width defines the width of the clear rectangle * @param height defines the height of the clear rectangle */ enableScissor(e, t, i, r) { const s = this._gl; s.enable(s.SCISSOR_TEST), s.scissor(e, t, i, r); } /** * Disable previously set scissor test rectangle */ disableScissor() { const e = this._gl; e.disable(e.SCISSOR_TEST); } /** * @internal */ _reportDrawCall(e = 1) { this._drawCalls.addCount(e, !1); } /** * @internal */ _loadFileAsync(e, t, i) { return new Promise((r, s) => { this._loadFile(e, (n) => { r(n); }, void 0, t, i, (n, a) => { s(a); }); }); } /** * Gets the source code of the vertex shader associated with a specific webGL program * @param program defines the program to use * @returns a string containing the source code of the vertex shader associated with the program */ getVertexShaderSource(e) { const t = this._gl.getAttachedShaders(e); return t ? this._gl.getShaderSource(t[0]) : null; } /** * Gets the source code of the fragment shader associated with a specific webGL program * @param program defines the program to use * @returns a string containing the source code of the fragment shader associated with the program */ getFragmentShaderSource(e) { const t = this._gl.getAttachedShaders(e); return t ? this._gl.getShaderSource(t[1]) : null; } /** * Sets a depth stencil texture from a render target to the according uniform. * @param channel The texture channel * @param uniform The uniform to set * @param texture The render target texture containing the depth stencil texture to apply * @param name The texture name */ setDepthStencilTexture(e, t, i, r) { e !== void 0 && (t && (this._boundUniforms[e] = t), !i || !i.depthStencilTexture ? this._setTexture(e, null, void 0, void 0, r) : this._setTexture(e, i, !1, !0, r)); } /** * Sets a texture to the webGL context from a postprocess * @param channel defines the channel to use * @param postProcess defines the source postprocess * @param name name of the channel */ setTextureFromPostProcess(e, t, i) { var r; let s = null; t && (t._forcedOutputTexture ? s = t._forcedOutputTexture : t._textures.data[t._currentRenderTextureInd] && (s = t._textures.data[t._currentRenderTextureInd])), this._bindTexture(e, (r = s == null ? void 0 : s.texture) !== null && r !== void 0 ? r : null, i); } /** * Binds the output of the passed in post process to the texture channel specified * @param channel The channel the texture should be bound to * @param postProcess The post process which's output should be bound * @param name name of the channel */ setTextureFromPostProcessOutput(e, t, i) { var r, s; this._bindTexture(e, (s = (r = t == null ? void 0 : t._outputTexture) === null || r === void 0 ? void 0 : r.texture) !== null && s !== void 0 ? s : null, i); } _rebuildBuffers() { for (const e of this.scenes) e.resetCachedMaterial(), e._rebuildGeometries(), e._rebuildTextures(); for (const e of this._virtualScenes) e.resetCachedMaterial(), e._rebuildGeometries(), e._rebuildTextures(); super._rebuildBuffers(); } /** @internal */ _renderFrame() { for (let e = 0; e < this._activeRenderLoops.length; e++) { const t = this._activeRenderLoops[e]; t(); } } _cancelFrame() { if (this._renderingQueueLaunched && this.customAnimationFrameRequester) { this._renderingQueueLaunched = !1; const { cancelAnimationFrame: e } = this.customAnimationFrameRequester; e && e(this.customAnimationFrameRequester.requestID); } else super._cancelFrame(); } _renderLoop() { if (!this._contextWasLost) { let e = !0; (this.isDisposed || !this.renderEvenInBackground && this._windowIsBackground) && (e = !1), e && (this.beginFrame(), this._renderViews() || this._renderFrame(), this.endFrame()); } this._activeRenderLoops.length > 0 ? this.customAnimationFrameRequester ? (this.customAnimationFrameRequester.requestID = this._queueNewFrame(this.customAnimationFrameRequester.renderFunction || this._boundRenderFunction, this.customAnimationFrameRequester), this._frameHandler = this.customAnimationFrameRequester.requestID) : this._frameHandler = this._queueNewFrame(this._boundRenderFunction, this.getHostWindow()) : this._renderingQueueLaunched = !1; } /** @internal */ _renderViews() { return !1; } /** * Toggle full screen mode * @param requestPointerLock defines if a pointer lock should be requested from the user */ switchFullscreen(e) { this.isFullscreen ? this.exitFullscreen() : this.enterFullscreen(e); } /** * Enters full screen mode * @param requestPointerLock defines if a pointer lock should be requested from the user */ enterFullscreen(e) { this.isFullscreen || (this._pointerLockRequested = e, this._renderingCanvas && $e._RequestFullscreen(this._renderingCanvas)); } /** * Exits full screen mode */ exitFullscreen() { this.isFullscreen && $e._ExitFullscreen(); } /** * Enters Pointerlock mode */ enterPointerlock() { this._renderingCanvas && $e._RequestPointerlock(this._renderingCanvas); } /** * Exits Pointerlock mode */ exitPointerlock() { $e._ExitPointerlock(); } /** * Begin a new frame */ beginFrame() { this._measureFps(), this.onBeginFrameObservable.notifyObservers(this), super.beginFrame(); } /** * End the current frame */ endFrame() { super.endFrame(), this.onEndFrameObservable.notifyObservers(this); } /** * Force a specific size of the canvas * @param width defines the new canvas' width * @param height defines the new canvas' height * @param forceSetSize true to force setting the sizes of the underlying canvas * @returns true if the size was changed */ setSize(e, t, i = !1) { if (!this._renderingCanvas || !super.setSize(e, t, i)) return !1; if (this.scenes) { for (let r = 0; r < this.scenes.length; r++) { const s = this.scenes[r]; for (let n = 0; n < s.cameras.length; n++) { const a = s.cameras[n]; a._currentRenderId = 0; } } this.onResizeObservable.hasObservers() && this.onResizeObservable.notifyObservers(this); } return !0; } _deletePipelineContext(e) { const t = e; t && t.program && t.transformFeedback && (this.deleteTransformFeedback(t.transformFeedback), t.transformFeedback = null), super._deletePipelineContext(e); } createShaderProgram(e, t, i, r, s, n = null) { s = s || this._gl, this.onBeforeShaderCompilationObservable.notifyObservers(this); const a = super.createShaderProgram(e, t, i, r, s, n); return this.onAfterShaderCompilationObservable.notifyObservers(this), a; } _createShaderProgram(e, t, i, r, s = null) { const n = r.createProgram(); if (e.program = n, !n) throw new Error("Unable to create program"); if (r.attachShader(n, t), r.attachShader(n, i), this.webGLVersion > 1 && s) { const a = this.createTransformFeedback(); this.bindTransformFeedback(a), this.setTranformFeedbackVaryings(n, s), e.transformFeedback = a; } return r.linkProgram(n), this.webGLVersion > 1 && s && this.bindTransformFeedback(null), e.context = r, e.vertexShader = t, e.fragmentShader = i, e.isParallelCompiled || this._finalizePipelineContext(e), n; } /** * @internal */ _releaseTexture(e) { super._releaseTexture(e); } /** * @internal */ _releaseRenderTargetWrapper(e) { super._releaseRenderTargetWrapper(e), this.scenes.forEach((t) => { t.postProcesses.forEach((i) => { i._outputTexture === e && (i._outputTexture = null); }), t.cameras.forEach((i) => { i._postProcesses.forEach((r) => { r && r._outputTexture === e && (r._outputTexture = null); }); }); }); } /** * Gets the names of the render passes that are currently created * @returns list of the render pass names */ getRenderPassNames() { return this._renderPassNames; } /** * Gets the name of the current render pass * @returns name of the current render pass */ getCurrentRenderPassName() { return this._renderPassNames[this.currentRenderPassId]; } /** * Creates a render pass id * @param name Name of the render pass (for debug purpose only) * @returns the id of the new render pass */ createRenderPassId(e) { const t = ++$e._RenderPassIdCounter; return this._renderPassNames[t] = e ?? "NONAME", t; } /** * Releases a render pass id * @param id id of the render pass to release */ releaseRenderPassId(e) { this._renderPassNames[e] = void 0; for (let t = 0; t < this.scenes.length; ++t) { const i = this.scenes[t]; for (let r = 0; r < i.meshes.length; ++r) { const s = i.meshes[r]; if (s.subMeshes) for (let n = 0; n < s.subMeshes.length; ++n) s.subMeshes[n]._removeDrawWrapper(e); } } } /** * @internal * Rescales a texture * @param source input texture * @param destination destination texture * @param scene scene to use to render the resize * @param internalFormat format to use when resizing * @param onComplete callback to be called when resize has completed */ _rescaleTexture(e, t, i, r, s) { this._gl.texParameteri(this._gl.TEXTURE_2D, this._gl.TEXTURE_MAG_FILTER, this._gl.LINEAR), this._gl.texParameteri(this._gl.TEXTURE_2D, this._gl.TEXTURE_MIN_FILTER, this._gl.LINEAR), this._gl.texParameteri(this._gl.TEXTURE_2D, this._gl.TEXTURE_WRAP_S, this._gl.CLAMP_TO_EDGE), this._gl.texParameteri(this._gl.TEXTURE_2D, this._gl.TEXTURE_WRAP_T, this._gl.CLAMP_TO_EDGE); const n = this.createRenderTargetTexture({ width: t.width, height: t.height }, { generateMipMaps: !1, type: 0, samplingMode: 2, generateDepthBuffer: !1, generateStencilBuffer: !1 }); !this._rescalePostProcess && $e._RescalePostProcessFactory && (this._rescalePostProcess = $e._RescalePostProcessFactory(this)), this._rescalePostProcess && (this._rescalePostProcess.externalTextureSamplerBinding = !0, this._rescalePostProcess.getEffect().executeWhenCompiled(() => { this._rescalePostProcess.onApply = function(l) { l._bindTexture("textureSampler", e); }; let a = i; a || (a = this.scenes[this.scenes.length - 1]), a.postProcessManager.directRender([this._rescalePostProcess], n, !0), this._bindTextureDirectly(this._gl.TEXTURE_2D, t, !0), this._gl.copyTexImage2D(this._gl.TEXTURE_2D, 0, r, 0, 0, t.width, t.height, 0), this.unBindFramebuffer(n), n.dispose(), s && s(); })); } // FPS /** * Gets the current framerate * @returns a number representing the framerate */ getFps() { return this._fps; } /** * Gets the time spent between current and previous frame * @returns a number representing the delta time in ms */ getDeltaTime() { return this._deltaTime; } _measureFps() { this._performanceMonitor.sampleFrame(), this._fps = this._performanceMonitor.averageFPS, this._deltaTime = this._performanceMonitor.instantaneousFrameTime || 0; } /** * Wraps an external web gl texture in a Babylon texture. * @param texture defines the external texture * @param hasMipMaps defines whether the external texture has mip maps (default: false) * @param samplingMode defines the sampling mode for the external texture (default: 3) * @param width defines the width for the external texture (default: 0) * @param height defines the height for the external texture (default: 0) * @returns the babylon internal texture */ wrapWebGLTexture(e, t = !1, i = 3, r = 0, s = 0) { const n = new BI(e, this._gl), a = new ln(this, ts.Unknown, !0); return a._hardwareTexture = n, a.baseWidth = r, a.baseHeight = s, a.width = r, a.height = s, a.isReady = !0, a.useMipMaps = t, this.updateTextureSamplingMode(i, a), a; } /** * @internal */ _uploadImageToTexture(e, t, i = 0, r = 0) { const s = this._gl, n = this._getWebGLTextureType(e.type), a = this._getInternalFormat(e.format), l = this._getRGBABufferInternalSizedFormat(e.type, a), o = e.isCube ? s.TEXTURE_CUBE_MAP : s.TEXTURE_2D; this._bindTextureDirectly(o, e, !0), this._unpackFlipY(e.invertY); let u = s.TEXTURE_2D; e.isCube && (u = s.TEXTURE_CUBE_MAP_POSITIVE_X + i), s.texImage2D(u, r, l, a, n, t), this._bindTextureDirectly(o, null, !0); } /** * Updates a depth texture Comparison Mode and Function. * If the comparison Function is equal to 0, the mode will be set to none. * Otherwise, this only works in webgl 2 and requires a shadow sampler in the shader. * @param texture The texture to set the comparison function for * @param comparisonFunction The comparison function to set, 0 if no comparison required */ updateTextureComparisonFunction(e, t) { if (this.webGLVersion === 1) { Ce.Error("WebGL 1 does not support texture comparison."); return; } const i = this._gl; e.isCube ? (this._bindTextureDirectly(this._gl.TEXTURE_CUBE_MAP, e, !0), t === 0 ? (i.texParameteri(i.TEXTURE_CUBE_MAP, i.TEXTURE_COMPARE_FUNC, 515), i.texParameteri(i.TEXTURE_CUBE_MAP, i.TEXTURE_COMPARE_MODE, i.NONE)) : (i.texParameteri(i.TEXTURE_CUBE_MAP, i.TEXTURE_COMPARE_FUNC, t), i.texParameteri(i.TEXTURE_CUBE_MAP, i.TEXTURE_COMPARE_MODE, i.COMPARE_REF_TO_TEXTURE)), this._bindTextureDirectly(this._gl.TEXTURE_CUBE_MAP, null)) : (this._bindTextureDirectly(this._gl.TEXTURE_2D, e, !0), t === 0 ? (i.texParameteri(i.TEXTURE_2D, i.TEXTURE_COMPARE_FUNC, 515), i.texParameteri(i.TEXTURE_2D, i.TEXTURE_COMPARE_MODE, i.NONE)) : (i.texParameteri(i.TEXTURE_2D, i.TEXTURE_COMPARE_FUNC, t), i.texParameteri(i.TEXTURE_2D, i.TEXTURE_COMPARE_MODE, i.COMPARE_REF_TO_TEXTURE)), this._bindTextureDirectly(this._gl.TEXTURE_2D, null)), e._comparisonFunction = t; } /** * Creates a webGL buffer to use with instantiation * @param capacity defines the size of the buffer * @returns the webGL buffer */ createInstancesBuffer(e) { const t = this._gl.createBuffer(); if (!t) throw new Error("Unable to create instance buffer"); const i = new FO(t); return i.capacity = e, this.bindArrayBuffer(i), this._gl.bufferData(this._gl.ARRAY_BUFFER, e, this._gl.DYNAMIC_DRAW), i.references = 1, i; } /** * Delete a webGL buffer used with instantiation * @param buffer defines the webGL buffer to delete */ deleteInstancesBuffer(e) { this._gl.deleteBuffer(e); } _clientWaitAsync(e, t = 0, i = 10) { const r = this._gl; return new Promise((s, n) => { const a = () => { const l = r.clientWaitSync(e, t, 0); if (l == r.WAIT_FAILED) { n(); return; } if (l == r.TIMEOUT_EXPIRED) { setTimeout(a, i); return; } s(); }; a(); }); } /** * @internal */ _readPixelsAsync(e, t, i, r, s, n, a) { if (this._webGLVersion < 2) throw new Error("_readPixelsAsync only work on WebGL2+"); const l = this._gl, o = l.createBuffer(); l.bindBuffer(l.PIXEL_PACK_BUFFER, o), l.bufferData(l.PIXEL_PACK_BUFFER, a.byteLength, l.STREAM_READ), l.readPixels(e, t, i, r, s, n, 0), l.bindBuffer(l.PIXEL_PACK_BUFFER, null); const u = l.fenceSync(l.SYNC_GPU_COMMANDS_COMPLETE, 0); return u ? (l.flush(), this._clientWaitAsync(u, 0, 10).then(() => (l.deleteSync(u), l.bindBuffer(l.PIXEL_PACK_BUFFER, o), l.getBufferSubData(l.PIXEL_PACK_BUFFER, 0, a), l.bindBuffer(l.PIXEL_PACK_BUFFER, null), l.deleteBuffer(o), a))) : null; } dispose() { for (this.hideLoadingUI(), this.onNewSceneAddedObservable.clear(); this.postProcesses.length; ) this.postProcesses[0].dispose(); for (this._rescalePostProcess && this._rescalePostProcess.dispose(); this.scenes.length; ) this.scenes[0].dispose(); for (; this._virtualScenes.length; ) this._virtualScenes[0].dispose(); gi.Instances.length === 1 && $e.audioEngine && ($e.audioEngine.dispose(), $e.audioEngine = null); const e = this.getHostWindow(); e && typeof e.removeEventListener == "function" && (e.removeEventListener("blur", this._onBlur), e.removeEventListener("focus", this._onFocus)), this._renderingCanvas && (this._renderingCanvas.removeEventListener("focus", this._onCanvasFocus), this._renderingCanvas.removeEventListener("blur", this._onCanvasBlur), this._renderingCanvas.removeEventListener("pointerout", this._onCanvasPointerOut), this._renderingCanvas.removeEventListener("contextmenu", this._onCanvasContextMenu)), qR() && (document.removeEventListener("fullscreenchange", this._onFullscreenChange), document.removeEventListener("mozfullscreenchange", this._onFullscreenChange), document.removeEventListener("webkitfullscreenchange", this._onFullscreenChange), document.removeEventListener("msfullscreenchange", this._onFullscreenChange), document.removeEventListener("pointerlockchange", this._onPointerLockChange), document.removeEventListener("mspointerlockchange", this._onPointerLockChange), document.removeEventListener("mozpointerlockchange", this._onPointerLockChange), document.removeEventListener("webkitpointerlockchange", this._onPointerLockChange)), super.dispose(); const t = gi.Instances.indexOf(this); t >= 0 && gi.Instances.splice(t, 1), $e.Instances.length || gi.OnEnginesDisposedObservable.notifyObservers(this), this.onResizeObservable.clear(), this.onCanvasBlurObservable.clear(), this.onCanvasFocusObservable.clear(), this.onCanvasPointerOutObservable.clear(), this.onBeginFrameObservable.clear(), this.onEndFrameObservable.clear(); } _disableTouchAction() { !this._renderingCanvas || !this._renderingCanvas.setAttribute || (this._renderingCanvas.setAttribute("touch-action", "none"), this._renderingCanvas.style.touchAction = "none", this._renderingCanvas.style.webkitTapHighlightColor = "transparent"); } // Loading screen /** * Display the loading screen * @see https://doc.babylonjs.com/features/featuresDeepDive/scene/customLoadingScreen */ displayLoadingUI() { if (!cu()) return; const e = this.loadingScreen; e && e.displayLoadingUI(); } /** * Hide the loading screen * @see https://doc.babylonjs.com/features/featuresDeepDive/scene/customLoadingScreen */ hideLoadingUI() { if (!cu()) return; const e = this._loadingScreen; e && e.hideLoadingUI(); } /** * Gets the current loading screen object * @see https://doc.babylonjs.com/features/featuresDeepDive/scene/customLoadingScreen */ get loadingScreen() { return !this._loadingScreen && this._renderingCanvas && (this._loadingScreen = $e.DefaultLoadingScreenFactory(this._renderingCanvas)), this._loadingScreen; } /** * Sets the current loading screen object * @see https://doc.babylonjs.com/features/featuresDeepDive/scene/customLoadingScreen */ set loadingScreen(e) { this._loadingScreen = e; } /** * Sets the current loading screen text * @see https://doc.babylonjs.com/features/featuresDeepDive/scene/customLoadingScreen */ set loadingUIText(e) { this.loadingScreen.loadingUIText = e; } /** * Sets the current loading screen background color * @see https://doc.babylonjs.com/features/featuresDeepDive/scene/customLoadingScreen */ set loadingUIBackgroundColor(e) { this.loadingScreen.loadingUIBackgroundColor = e; } /** * creates and returns a new video element * @param constraints video constraints * @returns video element */ // eslint-disable-next-line @typescript-eslint/no-unused-vars createVideoElement(e) { return document.createElement("video"); } /** Pointerlock and fullscreen */ /** * Ask the browser to promote the current element to pointerlock mode * @param element defines the DOM element to promote */ static _RequestPointerlock(e) { if (e.requestPointerLock) { const t = e.requestPointerLock(); t instanceof Promise ? t.then(() => { e.focus(); }).catch(() => { }) : e.focus(); } } /** * Asks the browser to exit pointerlock mode */ static _ExitPointerlock() { document.exitPointerLock && document.exitPointerLock(); } /** * Ask the browser to promote the current element to fullscreen rendering mode * @param element defines the DOM element to promote */ static _RequestFullscreen(e) { const t = e.requestFullscreen || e.webkitRequestFullscreen; t && t.call(e); } /** * Asks the browser to exit fullscreen mode */ static _ExitFullscreen() { const e = document; document.exitFullscreen ? document.exitFullscreen() : e.webkitCancelFullScreen && e.webkitCancelFullScreen(); } /** * Get Font size information * @param font font name * @returns an object containing ascent, height and descent */ getFontOffset(e) { const t = document.createElement("span"); t.innerHTML = "Hg", t.setAttribute("style", `font: ${e} !important`); const i = document.createElement("div"); i.style.display = "inline-block", i.style.width = "1px", i.style.height = "0px", i.style.verticalAlign = "bottom"; const r = document.createElement("div"); r.style.whiteSpace = "nowrap", r.appendChild(t), r.appendChild(i), document.body.appendChild(r); let s = 0, n = 0; try { n = i.getBoundingClientRect().top - t.getBoundingClientRect().top, i.style.verticalAlign = "baseline", s = i.getBoundingClientRect().top - t.getBoundingClientRect().top; } finally { document.body.removeChild(r); } return { ascent: s, height: n, descent: n - s }; } } $e.ALPHA_DISABLE = 0; $e.ALPHA_ADD = 1; $e.ALPHA_COMBINE = 2; $e.ALPHA_SUBTRACT = 3; $e.ALPHA_MULTIPLY = 4; $e.ALPHA_MAXIMIZED = 5; $e.ALPHA_ONEONE = 6; $e.ALPHA_PREMULTIPLIED = 7; $e.ALPHA_PREMULTIPLIED_PORTERDUFF = 8; $e.ALPHA_INTERPOLATE = 9; $e.ALPHA_SCREENMODE = 10; $e.DELAYLOADSTATE_NONE = 0; $e.DELAYLOADSTATE_LOADED = 1; $e.DELAYLOADSTATE_LOADING = 2; $e.DELAYLOADSTATE_NOTLOADED = 4; $e.NEVER = 512; $e.ALWAYS = 519; $e.LESS = 513; $e.EQUAL = 514; $e.LEQUAL = 515; $e.GREATER = 516; $e.GEQUAL = 518; $e.NOTEQUAL = 517; $e.KEEP = 7680; $e.REPLACE = 7681; $e.INCR = 7682; $e.DECR = 7683; $e.INVERT = 5386; $e.INCR_WRAP = 34055; $e.DECR_WRAP = 34056; $e.TEXTURE_CLAMP_ADDRESSMODE = 0; $e.TEXTURE_WRAP_ADDRESSMODE = 1; $e.TEXTURE_MIRROR_ADDRESSMODE = 2; $e.TEXTUREFORMAT_ALPHA = 0; $e.TEXTUREFORMAT_LUMINANCE = 1; $e.TEXTUREFORMAT_LUMINANCE_ALPHA = 2; $e.TEXTUREFORMAT_RGB = 4; $e.TEXTUREFORMAT_RGBA = 5; $e.TEXTUREFORMAT_RED = 6; $e.TEXTUREFORMAT_R = 6; $e.TEXTUREFORMAT_RG = 7; $e.TEXTUREFORMAT_RED_INTEGER = 8; $e.TEXTUREFORMAT_R_INTEGER = 8; $e.TEXTUREFORMAT_RG_INTEGER = 9; $e.TEXTUREFORMAT_RGB_INTEGER = 10; $e.TEXTUREFORMAT_RGBA_INTEGER = 11; $e.TEXTURETYPE_UNSIGNED_BYTE = 0; $e.TEXTURETYPE_UNSIGNED_INT = 0; $e.TEXTURETYPE_FLOAT = 1; $e.TEXTURETYPE_HALF_FLOAT = 2; $e.TEXTURETYPE_BYTE = 3; $e.TEXTURETYPE_SHORT = 4; $e.TEXTURETYPE_UNSIGNED_SHORT = 5; $e.TEXTURETYPE_INT = 6; $e.TEXTURETYPE_UNSIGNED_INTEGER = 7; $e.TEXTURETYPE_UNSIGNED_SHORT_4_4_4_4 = 8; $e.TEXTURETYPE_UNSIGNED_SHORT_5_5_5_1 = 9; $e.TEXTURETYPE_UNSIGNED_SHORT_5_6_5 = 10; $e.TEXTURETYPE_UNSIGNED_INT_2_10_10_10_REV = 11; $e.TEXTURETYPE_UNSIGNED_INT_24_8 = 12; $e.TEXTURETYPE_UNSIGNED_INT_10F_11F_11F_REV = 13; $e.TEXTURETYPE_UNSIGNED_INT_5_9_9_9_REV = 14; $e.TEXTURETYPE_FLOAT_32_UNSIGNED_INT_24_8_REV = 15; $e.TEXTURE_NEAREST_SAMPLINGMODE = 1; $e.TEXTURE_BILINEAR_SAMPLINGMODE = 2; $e.TEXTURE_TRILINEAR_SAMPLINGMODE = 3; $e.TEXTURE_NEAREST_NEAREST_MIPLINEAR = 8; $e.TEXTURE_LINEAR_LINEAR_MIPNEAREST = 11; $e.TEXTURE_LINEAR_LINEAR_MIPLINEAR = 3; $e.TEXTURE_NEAREST_NEAREST_MIPNEAREST = 4; $e.TEXTURE_NEAREST_LINEAR_MIPNEAREST = 5; $e.TEXTURE_NEAREST_LINEAR_MIPLINEAR = 6; $e.TEXTURE_NEAREST_LINEAR = 7; $e.TEXTURE_NEAREST_NEAREST = 1; $e.TEXTURE_LINEAR_NEAREST_MIPNEAREST = 9; $e.TEXTURE_LINEAR_NEAREST_MIPLINEAR = 10; $e.TEXTURE_LINEAR_LINEAR = 2; $e.TEXTURE_LINEAR_NEAREST = 12; $e.TEXTURE_EXPLICIT_MODE = 0; $e.TEXTURE_SPHERICAL_MODE = 1; $e.TEXTURE_PLANAR_MODE = 2; $e.TEXTURE_CUBIC_MODE = 3; $e.TEXTURE_PROJECTION_MODE = 4; $e.TEXTURE_SKYBOX_MODE = 5; $e.TEXTURE_INVCUBIC_MODE = 6; $e.TEXTURE_EQUIRECTANGULAR_MODE = 7; $e.TEXTURE_FIXED_EQUIRECTANGULAR_MODE = 8; $e.TEXTURE_FIXED_EQUIRECTANGULAR_MIRRORED_MODE = 9; $e.SCALEMODE_FLOOR = 1; $e.SCALEMODE_NEAREST = 2; $e.SCALEMODE_CEILING = 3; $e._RescalePostProcessFactory = null; $e._RenderPassIdCounter = 0; const gce = Ae.Compose(D.One(), Ze.FromEulerAngles(0, Math.PI, 0), D.Zero()); class xi extends In { /** * Gets or sets the billboard mode. Default is 0. * * | Value | Type | Description | * | --- | --- | --- | * | 0 | BILLBOARDMODE_NONE | | * | 1 | BILLBOARDMODE_X | | * | 2 | BILLBOARDMODE_Y | | * | 4 | BILLBOARDMODE_Z | | * | 7 | BILLBOARDMODE_ALL | | * */ get billboardMode() { return this._billboardMode; } set billboardMode(e) { this._billboardMode !== e && (this._billboardMode = e, this._cache.useBillboardPosition = (this._billboardMode & xi.BILLBOARDMODE_USE_POSITION) !== 0, this._computeUseBillboardPath()); } /** * Gets or sets a boolean indicating that parent rotation should be preserved when using billboards. * This could be useful for glTF objects where parent rotation helps converting from right handed to left handed */ get preserveParentRotationForBillboard() { return this._preserveParentRotationForBillboard; } set preserveParentRotationForBillboard(e) { e !== this._preserveParentRotationForBillboard && (this._preserveParentRotationForBillboard = e, this._computeUseBillboardPath()); } _computeUseBillboardPath() { this._cache.useBillboardPath = this._billboardMode !== xi.BILLBOARDMODE_NONE && !this.preserveParentRotationForBillboard; } /** * Gets or sets the distance of the object to max, often used by skybox */ get infiniteDistance() { return this._infiniteDistance; } set infiniteDistance(e) { this._infiniteDistance !== e && (this._infiniteDistance = e); } constructor(e, t = null, i = !0) { super(e, t), this._forward = new D(0, 0, 1), this._up = new D(0, 1, 0), this._right = new D(1, 0, 0), this._position = D.Zero(), this._rotation = D.Zero(), this._rotationQuaternion = null, this._scaling = D.One(), this._transformToBoneReferal = null, this._isAbsoluteSynced = !1, this._billboardMode = xi.BILLBOARDMODE_NONE, this._preserveParentRotationForBillboard = !1, this.scalingDeterminant = 1, this._infiniteDistance = !1, this.ignoreNonUniformScaling = !1, this.reIntegrateRotationIntoRotationQuaternion = !1, this._poseMatrix = null, this._localMatrix = Ae.Zero(), this._usePivotMatrix = !1, this._absolutePosition = D.Zero(), this._absoluteScaling = D.Zero(), this._absoluteRotationQuaternion = Ze.Identity(), this._pivotMatrix = Ae.Identity(), this._postMultiplyPivotMatrix = !1, this._isWorldMatrixFrozen = !1, this._indexInSceneTransformNodesArray = -1, this.onAfterWorldMatrixUpdateObservable = new Fe(), this._nonUniformScaling = !1, i && this.getScene().addTransformNode(this); } /** * Gets a string identifying the name of the class * @returns "TransformNode" string */ getClassName() { return "TransformNode"; } /** * Gets or set the node position (default is (0.0, 0.0, 0.0)) */ get position() { return this._position; } set position(e) { this._position = e, this._isDirty = !0; } /** * return true if a pivot has been set * @returns true if a pivot matrix is used */ isUsingPivotMatrix() { return this._usePivotMatrix; } /** * return true if pivot matrix must be cancelled in the world matrix. When this parameter is set to true (default), the inverse of the pivot matrix is also applied at the end to cancel the transformation effect. */ isUsingPostMultiplyPivotMatrix() { return this._postMultiplyPivotMatrix; } /** * Gets or sets the rotation property : a Vector3 defining the rotation value in radians around each local axis X, Y, Z (default is (0.0, 0.0, 0.0)). * If rotation quaternion is set, this Vector3 will be ignored and copy from the quaternion */ get rotation() { return this._rotation; } set rotation(e) { this._rotation = e, this._rotationQuaternion = null, this._isDirty = !0; } /** * Gets or sets the scaling property : a Vector3 defining the node scaling along each local axis X, Y, Z (default is (1.0, 1.0, 1.0)). */ get scaling() { return this._scaling; } set scaling(e) { this._scaling = e, this._isDirty = !0; } /** * Gets or sets the rotation Quaternion property : this a Quaternion object defining the node rotation by using a unit quaternion (undefined by default, but can be null). * If set, only the rotationQuaternion is then used to compute the node rotation (ie. node.rotation will be ignored) */ get rotationQuaternion() { return this._rotationQuaternion; } set rotationQuaternion(e) { this._rotationQuaternion = e, e && this._rotation.setAll(0), this._isDirty = !0; } /** * The forward direction of that transform in world space. */ get forward() { return D.TransformNormalFromFloatsToRef(0, 0, this.getScene().useRightHandedSystem ? -1 : 1, this.getWorldMatrix(), this._forward), this._forward.normalize(); } /** * The up direction of that transform in world space. */ get up() { return D.TransformNormalFromFloatsToRef(0, 1, 0, this.getWorldMatrix(), this._up), this._up.normalize(); } /** * The right direction of that transform in world space. */ get right() { return D.TransformNormalFromFloatsToRef(this.getScene().useRightHandedSystem ? -1 : 1, 0, 0, this.getWorldMatrix(), this._right), this._right.normalize(); } /** * Copies the parameter passed Matrix into the mesh Pose matrix. * @param matrix the matrix to copy the pose from * @returns this TransformNode. */ updatePoseMatrix(e) { return this._poseMatrix ? (this._poseMatrix.copyFrom(e), this) : (this._poseMatrix = e.clone(), this); } /** * Returns the mesh Pose matrix. * @returns the pose matrix */ getPoseMatrix() { return this._poseMatrix || (this._poseMatrix = Ae.Identity()), this._poseMatrix; } /** @internal */ _isSynchronized() { const e = this._cache; return !(this._billboardMode !== e.billboardMode || this._billboardMode !== xi.BILLBOARDMODE_NONE || e.pivotMatrixUpdated || this._infiniteDistance || this._position._isDirty || this._scaling._isDirty || this._rotationQuaternion && this._rotationQuaternion._isDirty || this._rotation._isDirty); } /** @internal */ _initCache() { super._initCache(); const e = this._cache; e.localMatrixUpdated = !1, e.billboardMode = -1, e.infiniteDistance = !1, e.useBillboardPosition = !1, e.useBillboardPath = !1; } /** * Returns the current mesh absolute position. * Returns a Vector3. */ get absolutePosition() { return this.getAbsolutePosition(); } /** * Returns the current mesh absolute scaling. * Returns a Vector3. */ get absoluteScaling() { return this._syncAbsoluteScalingAndRotation(), this._absoluteScaling; } /** * Returns the current mesh absolute rotation. * Returns a Quaternion. */ get absoluteRotationQuaternion() { return this._syncAbsoluteScalingAndRotation(), this._absoluteRotationQuaternion; } /** * Sets a new matrix to apply before all other transformation * @param matrix defines the transform matrix * @returns the current TransformNode */ setPreTransformMatrix(e) { return this.setPivotMatrix(e, !1); } /** * Sets a new pivot matrix to the current node * @param matrix defines the new pivot matrix to use * @param postMultiplyPivotMatrix defines if the pivot matrix must be cancelled in the world matrix. When this parameter is set to true (default), the inverse of the pivot matrix is also applied at the end to cancel the transformation effect * @returns the current TransformNode */ setPivotMatrix(e, t = !0) { return this._pivotMatrix.copyFrom(e), this._usePivotMatrix = !this._pivotMatrix.isIdentity(), this._cache.pivotMatrixUpdated = !0, this._postMultiplyPivotMatrix = t, this._postMultiplyPivotMatrix && (this._pivotMatrixInverse ? this._pivotMatrix.invertToRef(this._pivotMatrixInverse) : this._pivotMatrixInverse = Ae.Invert(this._pivotMatrix)), this; } /** * Returns the mesh pivot matrix. * Default : Identity. * @returns the matrix */ getPivotMatrix() { return this._pivotMatrix; } /** * Instantiate (when possible) or clone that node with its hierarchy * @param newParent defines the new parent to use for the instance (or clone) * @param options defines options to configure how copy is done * @param options.doNotInstantiate defines if the model must be instantiated or just cloned * @param onNewNodeCreated defines an option callback to call when a clone or an instance is created * @returns an instance (or a clone) of the current node with its hierarchy */ instantiateHierarchy(e = null, t, i) { const r = this.clone("Clone of " + (this.name || this.id), e || this.parent, !0); r && i && i(this, r); for (const s of this.getChildTransformNodes(!0)) s.instantiateHierarchy(r, t, i); return r; } /** * Prevents the World matrix to be computed any longer * @param newWorldMatrix defines an optional matrix to use as world matrix * @param decompose defines whether to decompose the given newWorldMatrix or directly assign * @returns the TransformNode. */ freezeWorldMatrix(e = null, t = !1) { return e ? t ? (this._rotation.setAll(0), this._rotationQuaternion = this._rotationQuaternion || Ze.Identity(), e.decompose(this._scaling, this._rotationQuaternion, this._position), this.computeWorldMatrix(!0)) : (this._worldMatrix = e, this._absolutePosition.copyFromFloats(this._worldMatrix.m[12], this._worldMatrix.m[13], this._worldMatrix.m[14]), this._afterComputeWorldMatrix()) : (this._isWorldMatrixFrozen = !1, this.computeWorldMatrix(!0)), this._isDirty = !1, this._isWorldMatrixFrozen = !0, this; } /** * Allows back the World matrix computation. * @returns the TransformNode. */ unfreezeWorldMatrix() { return this._isWorldMatrixFrozen = !1, this.computeWorldMatrix(!0), this; } /** * True if the World matrix has been frozen. */ get isWorldMatrixFrozen() { return this._isWorldMatrixFrozen; } /** * Returns the mesh absolute position in the World. * @returns a Vector3. */ getAbsolutePosition() { return this.computeWorldMatrix(), this._absolutePosition; } /** * Sets the mesh absolute position in the World from a Vector3 or an Array(3). * @param absolutePosition the absolute position to set * @returns the TransformNode. */ setAbsolutePosition(e) { if (!e) return this; let t, i, r; if (e.x === void 0) { if (arguments.length < 3) return this; t = arguments[0], i = arguments[1], r = arguments[2]; } else t = e.x, i = e.y, r = e.z; if (this.parent) { const s = de.Matrix[0]; this.parent.getWorldMatrix().invertToRef(s), D.TransformCoordinatesFromFloatsToRef(t, i, r, s, this.position); } else this.position.x = t, this.position.y = i, this.position.z = r; return this._absolutePosition.copyFrom(e), this; } /** * Sets the mesh position in its local space. * @param vector3 the position to set in localspace * @returns the TransformNode. */ setPositionWithLocalVector(e) { return this.computeWorldMatrix(), this.position = D.TransformNormal(e, this._localMatrix), this; } /** * Returns the mesh position in the local space from the current World matrix values. * @returns a new Vector3. */ getPositionExpressedInLocalSpace() { this.computeWorldMatrix(); const e = de.Matrix[0]; return this._localMatrix.invertToRef(e), D.TransformNormal(this.position, e); } /** * Translates the mesh along the passed Vector3 in its local space. * @param vector3 the distance to translate in localspace * @returns the TransformNode. */ locallyTranslate(e) { return this.computeWorldMatrix(!0), this.position = D.TransformCoordinates(e, this._localMatrix), this; } /** * Orients a mesh towards a target point. Mesh must be drawn facing user. * @param targetPoint the position (must be in same space as current mesh) to look at * @param yawCor optional yaw (y-axis) correction in radians * @param pitchCor optional pitch (x-axis) correction in radians * @param rollCor optional roll (z-axis) correction in radians * @param space the chosen space of the target * @returns the TransformNode. */ lookAt(e, t = 0, i = 0, r = 0, s = qr.LOCAL) { const n = xi._LookAtVectorCache, a = s === qr.LOCAL ? this.position : this.getAbsolutePosition(); if (e.subtractToRef(a, n), this.setDirection(n, t, i, r), s === qr.WORLD && this.parent) if (this.rotationQuaternion) { const l = de.Matrix[0]; this.rotationQuaternion.toRotationMatrix(l); const o = de.Matrix[1]; this.parent.getWorldMatrix().getRotationMatrixToRef(o), o.invert(), l.multiplyToRef(o, l), this.rotationQuaternion.fromRotationMatrix(l); } else { const l = de.Quaternion[0]; Ze.FromEulerVectorToRef(this.rotation, l); const o = de.Matrix[0]; l.toRotationMatrix(o); const u = de.Matrix[1]; this.parent.getWorldMatrix().getRotationMatrixToRef(u), u.invert(), o.multiplyToRef(u, o), l.fromRotationMatrix(o), l.toEulerAnglesToRef(this.rotation); } return this; } /** * Returns a new Vector3 that is the localAxis, expressed in the mesh local space, rotated like the mesh. * This Vector3 is expressed in the World space. * @param localAxis axis to rotate * @returns a new Vector3 that is the localAxis, expressed in the mesh local space, rotated like the mesh. */ getDirection(e) { const t = D.Zero(); return this.getDirectionToRef(e, t), t; } /** * Sets the Vector3 "result" as the rotated Vector3 "localAxis" in the same rotation than the mesh. * localAxis is expressed in the mesh local space. * result is computed in the World space from the mesh World matrix. * @param localAxis axis to rotate * @param result the resulting transformnode * @returns this TransformNode. */ getDirectionToRef(e, t) { return D.TransformNormalToRef(e, this.getWorldMatrix(), t), this; } /** * Sets this transform node rotation to the given local axis. * @param localAxis the axis in local space * @param yawCor optional yaw (y-axis) correction in radians * @param pitchCor optional pitch (x-axis) correction in radians * @param rollCor optional roll (z-axis) correction in radians * @returns this TransformNode */ setDirection(e, t = 0, i = 0, r = 0) { const s = -Math.atan2(e.z, e.x) + Math.PI / 2, n = Math.sqrt(e.x * e.x + e.z * e.z), a = -Math.atan2(e.y, n); return this.rotationQuaternion ? Ze.RotationYawPitchRollToRef(s + t, a + i, r, this.rotationQuaternion) : (this.rotation.x = a + i, this.rotation.y = s + t, this.rotation.z = r), this; } /** * Sets a new pivot point to the current node * @param point defines the new pivot point to use * @param space defines if the point is in world or local space (local by default) * @returns the current TransformNode */ setPivotPoint(e, t = qr.LOCAL) { this.getScene().getRenderId() == 0 && this.computeWorldMatrix(!0); const i = this.getWorldMatrix(); if (t == qr.WORLD) { const r = de.Matrix[0]; i.invertToRef(r), e = D.TransformCoordinates(e, r); } return this.setPivotMatrix(Ae.Translation(-e.x, -e.y, -e.z), !0); } /** * Returns a new Vector3 set with the mesh pivot point coordinates in the local space. * @returns the pivot point */ getPivotPoint() { const e = D.Zero(); return this.getPivotPointToRef(e), e; } /** * Sets the passed Vector3 "result" with the coordinates of the mesh pivot point in the local space. * @param result the vector3 to store the result * @returns this TransformNode. */ getPivotPointToRef(e) { return e.x = -this._pivotMatrix.m[12], e.y = -this._pivotMatrix.m[13], e.z = -this._pivotMatrix.m[14], this; } /** * Returns a new Vector3 set with the mesh pivot point World coordinates. * @returns a new Vector3 set with the mesh pivot point World coordinates. */ getAbsolutePivotPoint() { const e = D.Zero(); return this.getAbsolutePivotPointToRef(e), e; } /** * Sets the Vector3 "result" coordinates with the mesh pivot point World coordinates. * @param result vector3 to store the result * @returns this TransformNode. */ getAbsolutePivotPointToRef(e) { return this.getPivotPointToRef(e), D.TransformCoordinatesToRef(e, this.getWorldMatrix(), e), this; } /** * Flag the transform node as dirty (Forcing it to update everything) * @param property if set to "rotation" the objects rotationQuaternion will be set to null * @returns this node */ markAsDirty(e) { if (this._isDirty) return this; if (this._children) for (const t of this._children) t.markAsDirty(e); return super.markAsDirty(e); } /** * Defines the passed node as the parent of the current node. * The node will remain exactly where it is and its position / rotation will be updated accordingly. * Note that if the mesh has a pivot matrix / point defined it will be applied after the parent was updated. * In that case the node will not remain in the same space as it is, as the pivot will be applied. * To avoid this, you can set updatePivot to true and the pivot will be updated to identity * @see https://doc.babylonjs.com/features/featuresDeepDive/mesh/transforms/parent_pivot/parent * @param node the node ot set as the parent * @param preserveScalingSign if true, keep scaling sign of child. Otherwise, scaling sign might change. * @param updatePivot if true, update the pivot matrix to keep the node in the same space as before * @returns this TransformNode. */ setParent(e, t = !1, i = !1) { if (!e && !this.parent) return this; const r = de.Quaternion[0], s = de.Vector3[0], n = de.Vector3[1], a = de.Matrix[1]; Ae.IdentityToRef(a); const l = de.Matrix[0]; this.computeWorldMatrix(!0); let o = this.rotationQuaternion; return o || (o = xi._TmpRotation, Ze.RotationYawPitchRollToRef(this._rotation.y, this._rotation.x, this._rotation.z, o)), Ae.ComposeToRef(this.scaling, o, this.position, l), this.parent && l.multiplyToRef(this.parent.computeWorldMatrix(!0), l), e && (e.computeWorldMatrix(!0).invertToRef(a), l.multiplyToRef(a, l)), l.decompose(n, r, s, t ? this : void 0), this.rotationQuaternion ? this.rotationQuaternion.copyFrom(r) : r.toEulerAnglesToRef(this.rotation), this.scaling.copyFrom(n), this.position.copyFrom(s), this.parent = e, i && this.setPivotMatrix(Ae.Identity()), this; } /** * True if the scaling property of this object is non uniform eg. (1,2,1) */ get nonUniformScaling() { return this._nonUniformScaling; } /** * @internal */ _updateNonUniformScalingState(e) { return this._nonUniformScaling === e ? !1 : (this._nonUniformScaling = e, !0); } /** * Attach the current TransformNode to another TransformNode associated with a bone * @param bone Bone affecting the TransformNode * @param affectedTransformNode TransformNode associated with the bone * @returns this object */ attachToBone(e, t) { return this._currentParentWhenAttachingToBone = this.parent, this._transformToBoneReferal = t, this.parent = e, e.getSkeleton().prepare(!0), e.getFinalMatrix().determinant() < 0 && (this.scalingDeterminant *= -1), this; } /** * Detach the transform node if its associated with a bone * @param resetToPreviousParent Indicates if the parent that was in effect when attachToBone was called should be set back or if we should set parent to null instead (defaults to the latter) * @returns this object */ detachFromBone(e = !1) { return this.parent ? (this.parent.getWorldMatrix().determinant() < 0 && (this.scalingDeterminant *= -1), this._transformToBoneReferal = null, e ? this.parent = this._currentParentWhenAttachingToBone : this.parent = null, this) : (e && (this.parent = this._currentParentWhenAttachingToBone), this); } /** * Rotates the mesh around the axis vector for the passed angle (amount) expressed in radians, in the given space. * space (default LOCAL) can be either Space.LOCAL, either Space.WORLD. * Note that the property `rotationQuaternion` is then automatically updated and the property `rotation` is set to (0,0,0) and no longer used. * The passed axis is also normalized. * @param axis the axis to rotate around * @param amount the amount to rotate in radians * @param space Space to rotate in (Default: local) * @returns the TransformNode. */ rotate(e, t, i) { e.normalize(), this.rotationQuaternion || (this.rotationQuaternion = this.rotation.toQuaternion(), this.rotation.setAll(0)); let r; if (!i || i === qr.LOCAL) r = Ze.RotationAxisToRef(e, t, xi._RotationAxisCache), this.rotationQuaternion.multiplyToRef(r, this.rotationQuaternion); else { if (this.parent) { const s = this.parent.getWorldMatrix(), n = de.Matrix[0]; s.invertToRef(n), e = D.TransformNormal(e, n), s.determinant() < 0 && (t *= -1); } r = Ze.RotationAxisToRef(e, t, xi._RotationAxisCache), r.multiplyToRef(this.rotationQuaternion, this.rotationQuaternion); } return this; } /** * Rotates the mesh around the axis vector for the passed angle (amount) expressed in radians, in world space. * Note that the property `rotationQuaternion` is then automatically updated and the property `rotation` is set to (0,0,0) and no longer used. * The passed axis is also normalized. . * Method is based on http://www.euclideanspace.com/maths/geometry/affine/aroundPoint/index.htm * @param point the point to rotate around * @param axis the axis to rotate around * @param amount the amount to rotate in radians * @returns the TransformNode */ rotateAround(e, t, i) { t.normalize(), this.rotationQuaternion || (this.rotationQuaternion = Ze.RotationYawPitchRoll(this.rotation.y, this.rotation.x, this.rotation.z), this.rotation.setAll(0)); const r = de.Vector3[0], s = de.Vector3[1], n = de.Vector3[2], a = de.Quaternion[0], l = de.Matrix[0], o = de.Matrix[1], u = de.Matrix[2], h = de.Matrix[3]; return e.subtractToRef(this.position, r), Ae.TranslationToRef(r.x, r.y, r.z, l), Ae.TranslationToRef(-r.x, -r.y, -r.z, o), Ae.RotationAxisToRef(t, i, u), o.multiplyToRef(u, h), h.multiplyToRef(l, h), h.decompose(s, a, n), this.position.addInPlace(n), a.multiplyToRef(this.rotationQuaternion, this.rotationQuaternion), this; } /** * Translates the mesh along the axis vector for the passed distance in the given space. * space (default LOCAL) can be either Space.LOCAL, either Space.WORLD. * @param axis the axis to translate in * @param distance the distance to translate * @param space Space to rotate in (Default: local) * @returns the TransformNode. */ translate(e, t, i) { const r = e.scale(t); if (!i || i === qr.LOCAL) { const s = this.getPositionExpressedInLocalSpace().add(r); this.setPositionWithLocalVector(s); } else this.setAbsolutePosition(this.getAbsolutePosition().add(r)); return this; } /** * Adds a rotation step to the mesh current rotation. * x, y, z are Euler angles expressed in radians. * This methods updates the current mesh rotation, either mesh.rotation, either mesh.rotationQuaternion if it's set. * This means this rotation is made in the mesh local space only. * It's useful to set a custom rotation order different from the BJS standard one YXZ. * Example : this rotates the mesh first around its local X axis, then around its local Z axis, finally around its local Y axis. * ```javascript * mesh.addRotation(x1, 0, 0).addRotation(0, 0, z2).addRotation(0, 0, y3); * ``` * Note that `addRotation()` accumulates the passed rotation values to the current ones and computes the .rotation or .rotationQuaternion updated values. * Under the hood, only quaternions are used. So it's a little faster is you use .rotationQuaternion because it doesn't need to translate them back to Euler angles. * @param x Rotation to add * @param y Rotation to add * @param z Rotation to add * @returns the TransformNode. */ addRotation(e, t, i) { let r; this.rotationQuaternion ? r = this.rotationQuaternion : (r = de.Quaternion[1], Ze.RotationYawPitchRollToRef(this.rotation.y, this.rotation.x, this.rotation.z, r)); const s = de.Quaternion[0]; return Ze.RotationYawPitchRollToRef(t, e, i, s), r.multiplyInPlace(s), this.rotationQuaternion || r.toEulerAnglesToRef(this.rotation), this; } /** * @internal */ _getEffectiveParent() { return this.parent; } /** * Returns whether the transform node world matrix computation needs the camera information to be computed. * This is the case when the node is a billboard or has an infinite distance for instance. * @returns true if the world matrix computation needs the camera information to be computed */ isWorldMatrixCameraDependent() { return this._infiniteDistance && !this.parent || this._billboardMode !== xi.BILLBOARDMODE_NONE && !this.preserveParentRotationForBillboard; } /** * Computes the world matrix of the node * @param force defines if the cache version should be invalidated forcing the world matrix to be created from scratch * @param camera defines the camera used if different from the scene active camera (This is used with modes like Billboard or infinite distance) * @returns the world matrix */ computeWorldMatrix(e = !1, t = null) { if (this._isWorldMatrixFrozen && !this._isDirty) return this._worldMatrix; const i = this.getScene().getRenderId(); if (!this._isDirty && !e && (this._currentRenderId === i || this.isSynchronized())) return this._currentRenderId = i, this._worldMatrix; t = t || this.getScene().activeCamera, this._updateCache(); const r = this._cache; r.pivotMatrixUpdated = !1, r.billboardMode = this.billboardMode, r.infiniteDistance = this.infiniteDistance, r.parent = this._parentNode, this._currentRenderId = i, this._childUpdateId += 1, this._isDirty = !1, this._position._isDirty = !1, this._rotation._isDirty = !1, this._scaling._isDirty = !1; const s = this._getEffectiveParent(), n = xi._TmpScaling; let a = this._position; if (this._infiniteDistance && !this.parent && t) { const o = t.getWorldMatrix(), u = new D(o.m[12], o.m[13], o.m[14]); a = xi._TmpTranslation, a.copyFromFloats(this._position.x + u.x, this._position.y + u.y, this._position.z + u.z); } n.copyFromFloats(this._scaling.x * this.scalingDeterminant, this._scaling.y * this.scalingDeterminant, this._scaling.z * this.scalingDeterminant); let l; if (this._rotationQuaternion ? (this._rotationQuaternion._isDirty = !1, l = this._rotationQuaternion, this.reIntegrateRotationIntoRotationQuaternion && this.rotation.lengthSquared() && (this._rotationQuaternion.multiplyInPlace(Ze.RotationYawPitchRoll(this._rotation.y, this._rotation.x, this._rotation.z)), this._rotation.copyFromFloats(0, 0, 0))) : (l = xi._TmpRotation, Ze.RotationYawPitchRollToRef(this._rotation.y, this._rotation.x, this._rotation.z, l)), this._usePivotMatrix) { const o = de.Matrix[1]; Ae.ScalingToRef(n.x, n.y, n.z, o); const u = de.Matrix[0]; l.toRotationMatrix(u), this._pivotMatrix.multiplyToRef(o, de.Matrix[4]), de.Matrix[4].multiplyToRef(u, this._localMatrix), this._postMultiplyPivotMatrix && this._localMatrix.multiplyToRef(this._pivotMatrixInverse, this._localMatrix), this._localMatrix.addTranslationFromFloats(a.x, a.y, a.z); } else Ae.ComposeToRef(n, l, a, this._localMatrix); if (s && s.getWorldMatrix) { if (e && s.computeWorldMatrix(e), r.useBillboardPath) { if (this._transformToBoneReferal) { const d = this.parent; d.getSkeleton().prepare(), d.getFinalMatrix().multiplyToRef(this._transformToBoneReferal.getWorldMatrix(), de.Matrix[7]); } else de.Matrix[7].copyFrom(s.getWorldMatrix()); const o = de.Vector3[5], u = de.Vector3[6], h = de.Quaternion[0]; de.Matrix[7].decompose(u, h, o), Ae.ScalingToRef(u.x, u.y, u.z, de.Matrix[7]), de.Matrix[7].setTranslation(o), xi.BillboardUseParentOrientation && (this._position.applyRotationQuaternionToRef(h, o), this._localMatrix.setTranslation(o)), this._localMatrix.multiplyToRef(de.Matrix[7], this._worldMatrix); } else if (this._transformToBoneReferal) { const o = this.parent; o.getSkeleton().prepare(), this._localMatrix.multiplyToRef(o.getFinalMatrix(), de.Matrix[6]), de.Matrix[6].multiplyToRef(this._transformToBoneReferal.getWorldMatrix(), this._worldMatrix); } else this._localMatrix.multiplyToRef(s.getWorldMatrix(), this._worldMatrix); this._markSyncedWithParent(); } else this._worldMatrix.copyFrom(this._localMatrix); if (r.useBillboardPath && t && this.billboardMode && !r.useBillboardPosition) { const o = de.Vector3[0]; if (this._worldMatrix.getTranslationToRef(o), de.Matrix[1].copyFrom(t.getViewMatrix()), this._scene.useRightHandedSystem && de.Matrix[1].multiplyToRef(gce, de.Matrix[1]), de.Matrix[1].setTranslationFromFloats(0, 0, 0), de.Matrix[1].invertToRef(de.Matrix[0]), (this.billboardMode & xi.BILLBOARDMODE_ALL) !== xi.BILLBOARDMODE_ALL) { de.Matrix[0].decompose(void 0, de.Quaternion[0], void 0); const u = de.Vector3[1]; de.Quaternion[0].toEulerAnglesToRef(u), (this.billboardMode & xi.BILLBOARDMODE_X) !== xi.BILLBOARDMODE_X && (u.x = 0), (this.billboardMode & xi.BILLBOARDMODE_Y) !== xi.BILLBOARDMODE_Y && (u.y = 0), (this.billboardMode & xi.BILLBOARDMODE_Z) !== xi.BILLBOARDMODE_Z && (u.z = 0), Ae.RotationYawPitchRollToRef(u.y, u.x, u.z, de.Matrix[0]); } this._worldMatrix.setTranslationFromFloats(0, 0, 0), this._worldMatrix.multiplyToRef(de.Matrix[0], this._worldMatrix), this._worldMatrix.setTranslation(de.Vector3[0]); } else if (r.useBillboardPath && t && r.useBillboardPosition) { const o = de.Vector3[0]; this._worldMatrix.getTranslationToRef(o); const u = t.globalPosition; this._worldMatrix.invertToRef(de.Matrix[1]); const h = de.Vector3[1]; D.TransformCoordinatesToRef(u, de.Matrix[1], h), h.normalize(); const d = -Math.atan2(h.z, h.x) + Math.PI / 2, f = Math.sqrt(h.x * h.x + h.z * h.z), p = -Math.atan2(h.y, f); if (Ze.RotationYawPitchRollToRef(d, p, 0, de.Quaternion[0]), (this.billboardMode & xi.BILLBOARDMODE_ALL) !== xi.BILLBOARDMODE_ALL) { const m = de.Vector3[1]; de.Quaternion[0].toEulerAnglesToRef(m), (this.billboardMode & xi.BILLBOARDMODE_X) !== xi.BILLBOARDMODE_X && (m.x = 0), (this.billboardMode & xi.BILLBOARDMODE_Y) !== xi.BILLBOARDMODE_Y && (m.y = 0), (this.billboardMode & xi.BILLBOARDMODE_Z) !== xi.BILLBOARDMODE_Z && (m.z = 0), Ae.RotationYawPitchRollToRef(m.y, m.x, m.z, de.Matrix[0]); } else Ae.FromQuaternionToRef(de.Quaternion[0], de.Matrix[0]); this._worldMatrix.setTranslationFromFloats(0, 0, 0), this._worldMatrix.multiplyToRef(de.Matrix[0], this._worldMatrix), this._worldMatrix.setTranslation(de.Vector3[0]); } return this.ignoreNonUniformScaling ? this._updateNonUniformScalingState(!1) : this._scaling.isNonUniformWithinEpsilon(1e-6) ? this._updateNonUniformScalingState(!0) : s && s._nonUniformScaling ? this._updateNonUniformScalingState(s._nonUniformScaling) : this._updateNonUniformScalingState(!1), this._afterComputeWorldMatrix(), this._absolutePosition.copyFromFloats(this._worldMatrix.m[12], this._worldMatrix.m[13], this._worldMatrix.m[14]), this._isAbsoluteSynced = !1, this.onAfterWorldMatrixUpdateObservable.notifyObservers(this), this._poseMatrix || (this._poseMatrix = Ae.Invert(this._worldMatrix)), this._worldMatrixDeterminantIsDirty = !0, this._worldMatrix; } /** * Resets this nodeTransform's local matrix to Matrix.Identity(). * @param independentOfChildren indicates if all child nodeTransform's world-space transform should be preserved. */ resetLocalMatrix(e = !0) { if (this.computeWorldMatrix(), e) { const t = this.getChildren(); for (let i = 0; i < t.length; ++i) { const r = t[i]; if (r) { r.computeWorldMatrix(); const s = de.Matrix[0]; r._localMatrix.multiplyToRef(this._localMatrix, s); const n = de.Quaternion[0]; s.decompose(r.scaling, n, r.position), r.rotationQuaternion ? r.rotationQuaternion.copyFrom(n) : n.toEulerAnglesToRef(r.rotation); } } } this.scaling.copyFromFloats(1, 1, 1), this.position.copyFromFloats(0, 0, 0), this.rotation.copyFromFloats(0, 0, 0), this.rotationQuaternion && (this.rotationQuaternion = Ze.Identity()), this._worldMatrix = Ae.Identity(); } _afterComputeWorldMatrix() { } /** * If you'd like to be called back after the mesh position, rotation or scaling has been updated. * @param func callback function to add * * @returns the TransformNode. */ registerAfterWorldMatrixUpdate(e) { return this.onAfterWorldMatrixUpdateObservable.add(e), this; } /** * Removes a registered callback function. * @param func callback function to remove * @returns the TransformNode. */ unregisterAfterWorldMatrixUpdate(e) { return this.onAfterWorldMatrixUpdateObservable.removeCallback(e), this; } /** * Gets the position of the current mesh in camera space * @param camera defines the camera to use * @returns a position */ getPositionInCameraSpace(e = null) { return e || (e = this.getScene().activeCamera), D.TransformCoordinates(this.getAbsolutePosition(), e.getViewMatrix()); } /** * Returns the distance from the mesh to the active camera * @param camera defines the camera to use * @returns the distance */ getDistanceToCamera(e = null) { return e || (e = this.getScene().activeCamera), this.getAbsolutePosition().subtract(e.globalPosition).length(); } /** * Clone the current transform node * @param name Name of the new clone * @param newParent New parent for the clone * @param doNotCloneChildren Do not clone children hierarchy * @returns the new transform node */ clone(e, t, i) { const r = St.Clone(() => new xi(e, this.getScene()), this); if (r.name = e, r.id = e, t && (r.parent = t), !i) { const s = this.getDescendants(!0); for (let n = 0; n < s.length; n++) { const a = s[n]; a.clone && a.clone(e + "." + a.name, r); } } return r; } /** * Serializes the objects information. * @param currentSerializationObject defines the object to serialize in * @returns the serialized object */ serialize(e) { const t = St.Serialize(this, e); return t.type = this.getClassName(), t.uniqueId = this.uniqueId, this.parent && this.parent._serializeAsParent(t), t.localMatrix = this.getPivotMatrix().asArray(), t.isEnabled = this.isEnabled(), t; } // Statics /** * Returns a new TransformNode object parsed from the source provided. * @param parsedTransformNode is the source. * @param scene the scene the object belongs to * @param rootUrl is a string, it's the root URL to prefix the `delayLoadingFile` property with * @returns a new TransformNode object parsed from the source provided. */ static Parse(e, t, i) { const r = St.Parse(() => new xi(e.name, t), e, t, i); return e.localMatrix ? r.setPreTransformMatrix(Ae.FromArray(e.localMatrix)) : e.pivotMatrix && r.setPivotMatrix(Ae.FromArray(e.pivotMatrix)), r.setEnabled(e.isEnabled), r._waitingParsedUniqueId = e.uniqueId, e.parentId !== void 0 && (r._waitingParentId = e.parentId), e.parentInstanceIndex !== void 0 && (r._waitingParentInstanceIndex = e.parentInstanceIndex), r; } /** * Get all child-transformNodes of this node * @param directDescendantsOnly defines if true only direct descendants of 'this' will be considered, if false direct and also indirect (children of children, an so on in a recursive manner) descendants of 'this' will be considered * @param predicate defines an optional predicate that will be called on every evaluated child, the predicate must return true for a given child to be part of the result, otherwise it will be ignored * @returns an array of TransformNode */ getChildTransformNodes(e, t) { const i = []; return this._getDescendants(i, e, (r) => (!t || t(r)) && r instanceof xi), i; } /** * Releases resources associated with this transform node. * @param doNotRecurse Set to true to not recurse into each children (recurse into each children by default) * @param disposeMaterialAndTextures Set to true to also dispose referenced materials and textures (false by default) */ dispose(e, t = !1) { if (this.getScene().stopAnimation(this), this.getScene().removeTransformNode(this), this._parentContainer) { const i = this._parentContainer.transformNodes.indexOf(this); i > -1 && this._parentContainer.transformNodes.splice(i, 1), this._parentContainer = null; } if (this.onAfterWorldMatrixUpdateObservable.clear(), e) { const i = this.getChildTransformNodes(!0); for (const r of i) r.parent = null, r.computeWorldMatrix(!0); } super.dispose(e, t); } /** * Uniformly scales the mesh to fit inside of a unit cube (1 X 1 X 1 units) * @param includeDescendants Use the hierarchy's bounding box instead of the mesh's bounding box. Default is false * @param ignoreRotation ignore rotation when computing the scale (ie. object will be axis aligned). Default is false * @param predicate predicate that is passed in to getHierarchyBoundingVectors when selecting which object should be included when scaling * @returns the current mesh */ normalizeToUnitCube(e = !0, t = !1, i) { let r = null, s = null; t && (this.rotationQuaternion ? (s = this.rotationQuaternion.clone(), this.rotationQuaternion.copyFromFloats(0, 0, 0, 1)) : this.rotation && (r = this.rotation.clone(), this.rotation.copyFromFloats(0, 0, 0))); const n = this.getHierarchyBoundingVectors(e, i), a = n.max.subtract(n.min), l = Math.max(a.x, a.y, a.z); if (l === 0) return this; const o = 1 / l; return this.scaling.scaleInPlace(o), t && (this.rotationQuaternion && s ? this.rotationQuaternion.copyFrom(s) : this.rotation && r && this.rotation.copyFrom(r)), this; } _syncAbsoluteScalingAndRotation() { this._isAbsoluteSynced || (this._worldMatrix.decompose(this._absoluteScaling, this._absoluteRotationQuaternion), this._isAbsoluteSynced = !0); } } xi.BILLBOARDMODE_NONE = 0; xi.BILLBOARDMODE_X = 1; xi.BILLBOARDMODE_Y = 2; xi.BILLBOARDMODE_Z = 4; xi.BILLBOARDMODE_ALL = 7; xi.BILLBOARDMODE_USE_POSITION = 128; xi.BillboardUseParentOrientation = !1; xi._TmpRotation = Ze.Zero(); xi._TmpScaling = D.Zero(); xi._TmpTranslation = D.Zero(); xi._LookAtVectorCache = new D(0, 0, 0); xi._RotationAxisCache = new Ze(); F([ oo("position") ], xi.prototype, "_position", void 0); F([ oo("rotation") ], xi.prototype, "_rotation", void 0); F([ Jee("rotationQuaternion") ], xi.prototype, "_rotationQuaternion", void 0); F([ oo("scaling") ], xi.prototype, "_scaling", void 0); F([ W("billboardMode") ], xi.prototype, "_billboardMode", void 0); F([ W() ], xi.prototype, "scalingDeterminant", void 0); F([ W("infiniteDistance") ], xi.prototype, "_infiniteDistance", void 0); F([ W() ], xi.prototype, "ignoreNonUniformScaling", void 0); F([ W() ], xi.prototype, "reIntegrateRotationIntoRotationQuaternion", void 0); class Rte { constructor() { this._checkCollisions = !1, this._collisionMask = -1, this._collisionGroup = -1, this._surroundingMeshes = null, this._collider = null, this._oldPositionForCollisions = new D(0, 0, 0), this._diffPositionForCollisions = new D(0, 0, 0), this._collisionResponse = !0; } } class vce { constructor() { this.facetNb = 0, this.partitioningSubdivisions = 10, this.partitioningBBoxRatio = 1.01, this.facetDataEnabled = !1, this.facetParameters = {}, this.bbSize = D.Zero(), this.subDiv = { // actual number of subdivisions per axis for ComputeNormals() max: 1, // eslint-disable-next-line @typescript-eslint/naming-convention X: 1, // eslint-disable-next-line @typescript-eslint/naming-convention Y: 1, // eslint-disable-next-line @typescript-eslint/naming-convention Z: 1 }, this.facetDepthSort = !1, this.facetDepthSortEnabled = !1; } } class Ace { constructor() { this._hasVertexAlpha = !1, this._useVertexColors = !0, this._numBoneInfluencers = 4, this._applyFog = !0, this._receiveShadows = !1, this._facetData = new vce(), this._visibility = 1, this._skeleton = null, this._layerMask = 268435455, this._computeBonesUsingShaders = !0, this._isActive = !1, this._onlyForInstances = !1, this._isActiveIntermediate = !1, this._onlyForInstancesIntermediate = !1, this._actAsRegularMesh = !1, this._currentLOD = null, this._currentLODIsUpToDate = !1, this._collisionRetryCount = 3, this._morphTargetManager = null, this._renderingGroupId = 0, this._bakedVertexAnimationManager = null, this._material = null, this._positions = null, this._pointerOverDisableMeshTesting = !1, this._meshCollisionData = new Rte(), this._enableDistantPicking = !1, this._rawBoundingInfo = null; } } class xr extends xi { /** * No billboard */ static get BILLBOARDMODE_NONE() { return xi.BILLBOARDMODE_NONE; } /** Billboard on X axis */ static get BILLBOARDMODE_X() { return xi.BILLBOARDMODE_X; } /** Billboard on Y axis */ static get BILLBOARDMODE_Y() { return xi.BILLBOARDMODE_Y; } /** Billboard on Z axis */ static get BILLBOARDMODE_Z() { return xi.BILLBOARDMODE_Z; } /** Billboard on all axes */ static get BILLBOARDMODE_ALL() { return xi.BILLBOARDMODE_ALL; } /** Billboard on using position instead of orientation */ static get BILLBOARDMODE_USE_POSITION() { return xi.BILLBOARDMODE_USE_POSITION; } /** * Gets the number of facets in the mesh * @see https://doc.babylonjs.com/features/featuresDeepDive/mesh/facetData#what-is-a-mesh-facet */ get facetNb() { return this._internalAbstractMeshDataInfo._facetData.facetNb; } /** * Gets or set the number (integer) of subdivisions per axis in the partitioning space * @see https://doc.babylonjs.com/features/featuresDeepDive/mesh/facetData#tweaking-the-partitioning */ get partitioningSubdivisions() { return this._internalAbstractMeshDataInfo._facetData.partitioningSubdivisions; } set partitioningSubdivisions(e) { this._internalAbstractMeshDataInfo._facetData.partitioningSubdivisions = e; } /** * The ratio (float) to apply to the bounding box size to set to the partitioning space. * Ex : 1.01 (default) the partitioning space is 1% bigger than the bounding box * @see https://doc.babylonjs.com/features/featuresDeepDive/mesh/facetData#tweaking-the-partitioning */ get partitioningBBoxRatio() { return this._internalAbstractMeshDataInfo._facetData.partitioningBBoxRatio; } set partitioningBBoxRatio(e) { this._internalAbstractMeshDataInfo._facetData.partitioningBBoxRatio = e; } /** * Gets or sets a boolean indicating that the facets must be depth sorted on next call to `updateFacetData()`. * Works only for updatable meshes. * Doesn't work with multi-materials * @see https://doc.babylonjs.com/features/featuresDeepDive/mesh/facetData#facet-depth-sort */ get mustDepthSortFacets() { return this._internalAbstractMeshDataInfo._facetData.facetDepthSort; } set mustDepthSortFacets(e) { this._internalAbstractMeshDataInfo._facetData.facetDepthSort = e; } /** * The location (Vector3) where the facet depth sort must be computed from. * By default, the active camera position. * Used only when facet depth sort is enabled * @see https://doc.babylonjs.com/features/featuresDeepDive/mesh/facetData#facet-depth-sort */ get facetDepthSortFrom() { return this._internalAbstractMeshDataInfo._facetData.facetDepthSortFrom; } set facetDepthSortFrom(e) { this._internalAbstractMeshDataInfo._facetData.facetDepthSortFrom = e; } /** number of collision detection tries. Change this value if not all collisions are detected and handled properly */ get collisionRetryCount() { return this._internalAbstractMeshDataInfo._collisionRetryCount; } set collisionRetryCount(e) { this._internalAbstractMeshDataInfo._collisionRetryCount = e; } /** * gets a boolean indicating if facetData is enabled * @see https://doc.babylonjs.com/features/featuresDeepDive/mesh/facetData#what-is-a-mesh-facet */ get isFacetDataEnabled() { return this._internalAbstractMeshDataInfo._facetData.facetDataEnabled; } /** * Gets or sets the morph target manager * @see https://doc.babylonjs.com/features/featuresDeepDive/mesh/morphTargets */ get morphTargetManager() { return this._internalAbstractMeshDataInfo._morphTargetManager; } set morphTargetManager(e) { this._internalAbstractMeshDataInfo._morphTargetManager !== e && (this._internalAbstractMeshDataInfo._morphTargetManager = e, this._syncGeometryWithMorphTargetManager()); } /** * Gets or sets the baked vertex animation manager * @see https://doc.babylonjs.com/features/featuresDeepDive/animation/baked_texture_animations */ get bakedVertexAnimationManager() { return this._internalAbstractMeshDataInfo._bakedVertexAnimationManager; } set bakedVertexAnimationManager(e) { this._internalAbstractMeshDataInfo._bakedVertexAnimationManager !== e && (this._internalAbstractMeshDataInfo._bakedVertexAnimationManager = e, this._markSubMeshesAsAttributesDirty()); } /** @internal */ _syncGeometryWithMorphTargetManager() { } /** * @internal */ _updateNonUniformScalingState(e) { return super._updateNonUniformScalingState(e) ? (this._markSubMeshesAsMiscDirty(), !0) : !1; } /** @internal */ get rawBoundingInfo() { return this._internalAbstractMeshDataInfo._rawBoundingInfo; } set rawBoundingInfo(e) { this._internalAbstractMeshDataInfo._rawBoundingInfo = e; } /** Set a function to call when this mesh collides with another one */ set onCollide(e) { this._internalAbstractMeshDataInfo._meshCollisionData._onCollideObserver && this.onCollideObservable.remove(this._internalAbstractMeshDataInfo._meshCollisionData._onCollideObserver), this._internalAbstractMeshDataInfo._meshCollisionData._onCollideObserver = this.onCollideObservable.add(e); } /** Set a function to call when the collision's position changes */ set onCollisionPositionChange(e) { this._internalAbstractMeshDataInfo._meshCollisionData._onCollisionPositionChangeObserver && this.onCollisionPositionChangeObservable.remove(this._internalAbstractMeshDataInfo._meshCollisionData._onCollisionPositionChangeObserver), this._internalAbstractMeshDataInfo._meshCollisionData._onCollisionPositionChangeObserver = this.onCollisionPositionChangeObservable.add(e); } /** * Gets or sets mesh visibility between 0 and 1 (default is 1) */ get visibility() { return this._internalAbstractMeshDataInfo._visibility; } /** * Gets or sets mesh visibility between 0 and 1 (default is 1) */ set visibility(e) { if (this._internalAbstractMeshDataInfo._visibility === e) return; const t = this._internalAbstractMeshDataInfo._visibility; this._internalAbstractMeshDataInfo._visibility = e, (t === 1 && e !== 1 || t !== 1 && e === 1) && this._markSubMeshesAsDirty((i) => { i.markAsMiscDirty(), i.markAsPrePassDirty(); }); } /** * Gets or sets the property which disables the test that is checking that the mesh under the pointer is the same than the previous time we tested for it (default: false). * Set this property to true if you want thin instances picking to be reported accurately when moving over the mesh. * Note that setting this property to true will incur some performance penalties when dealing with pointer events for this mesh so use it sparingly. */ get pointerOverDisableMeshTesting() { return this._internalAbstractMeshDataInfo._pointerOverDisableMeshTesting; } set pointerOverDisableMeshTesting(e) { this._internalAbstractMeshDataInfo._pointerOverDisableMeshTesting = e; } /** * Specifies the rendering group id for this mesh (0 by default) * @see https://doc.babylonjs.com/features/featuresDeepDive/materials/advanced/transparent_rendering#rendering-groups */ get renderingGroupId() { return this._internalAbstractMeshDataInfo._renderingGroupId; } set renderingGroupId(e) { this._internalAbstractMeshDataInfo._renderingGroupId = e; } /** Gets or sets current material */ get material() { return this._internalAbstractMeshDataInfo._material; } set material(e) { this._internalAbstractMeshDataInfo._material !== e && (this._internalAbstractMeshDataInfo._material && this._internalAbstractMeshDataInfo._material.meshMap && (this._internalAbstractMeshDataInfo._material.meshMap[this.uniqueId] = void 0), this._internalAbstractMeshDataInfo._material = e, e && e.meshMap && (e.meshMap[this.uniqueId] = this), this.onMaterialChangedObservable.hasObservers() && this.onMaterialChangedObservable.notifyObservers(this), this.subMeshes && (this.resetDrawCache(), this._unBindEffect())); } /** * Gets the material used to render the mesh in a specific render pass * @param renderPassId render pass id * @returns material used for the render pass. If no specific material is used for this render pass, undefined is returned (meaning mesh.material is used for this pass) */ getMaterialForRenderPass(e) { var t; return (t = this._internalAbstractMeshDataInfo._materialForRenderPass) === null || t === void 0 ? void 0 : t[e]; } /** * Sets the material to be used to render the mesh in a specific render pass * @param renderPassId render pass id * @param material material to use for this render pass. If undefined is passed, no specific material will be used for this render pass but the regular material will be used instead (mesh.material) */ setMaterialForRenderPass(e, t) { this.resetDrawCache(e), this._internalAbstractMeshDataInfo._materialForRenderPass || (this._internalAbstractMeshDataInfo._materialForRenderPass = []), this._internalAbstractMeshDataInfo._materialForRenderPass[e] = t; } /** * Gets or sets a boolean indicating that this mesh can receive realtime shadows * @see https://doc.babylonjs.com/features/featuresDeepDive/lights/shadows */ get receiveShadows() { return this._internalAbstractMeshDataInfo._receiveShadows; } set receiveShadows(e) { this._internalAbstractMeshDataInfo._receiveShadows !== e && (this._internalAbstractMeshDataInfo._receiveShadows = e, this._markSubMeshesAsLightDirty()); } /** Gets or sets a boolean indicating that this mesh contains vertex color data with alpha values */ get hasVertexAlpha() { return this._internalAbstractMeshDataInfo._hasVertexAlpha; } set hasVertexAlpha(e) { this._internalAbstractMeshDataInfo._hasVertexAlpha !== e && (this._internalAbstractMeshDataInfo._hasVertexAlpha = e, this._markSubMeshesAsAttributesDirty(), this._markSubMeshesAsMiscDirty()); } /** Gets or sets a boolean indicating that this mesh needs to use vertex color data to render (if this kind of vertex data is available in the geometry) */ get useVertexColors() { return this._internalAbstractMeshDataInfo._useVertexColors; } set useVertexColors(e) { this._internalAbstractMeshDataInfo._useVertexColors !== e && (this._internalAbstractMeshDataInfo._useVertexColors = e, this._markSubMeshesAsAttributesDirty()); } /** * Gets or sets a boolean indicating that bone animations must be computed by the GPU (true by default) */ get computeBonesUsingShaders() { return this._internalAbstractMeshDataInfo._computeBonesUsingShaders; } set computeBonesUsingShaders(e) { this._internalAbstractMeshDataInfo._computeBonesUsingShaders !== e && (this._internalAbstractMeshDataInfo._computeBonesUsingShaders = e, this._markSubMeshesAsAttributesDirty()); } /** Gets or sets the number of allowed bone influences per vertex (4 by default) */ get numBoneInfluencers() { return this._internalAbstractMeshDataInfo._numBoneInfluencers; } set numBoneInfluencers(e) { this._internalAbstractMeshDataInfo._numBoneInfluencers !== e && (this._internalAbstractMeshDataInfo._numBoneInfluencers = e, this._markSubMeshesAsAttributesDirty()); } /** Gets or sets a boolean indicating that this mesh will allow fog to be rendered on it (true by default) */ get applyFog() { return this._internalAbstractMeshDataInfo._applyFog; } set applyFog(e) { this._internalAbstractMeshDataInfo._applyFog !== e && (this._internalAbstractMeshDataInfo._applyFog = e, this._markSubMeshesAsMiscDirty()); } /** When enabled, decompose picking matrices for better precision with large values for mesh position and scling */ get enableDistantPicking() { return this._internalAbstractMeshDataInfo._enableDistantPicking; } set enableDistantPicking(e) { this._internalAbstractMeshDataInfo._enableDistantPicking = e; } /** * Gets or sets the current layer mask (default is 0x0FFFFFFF) * @see https://doc.babylonjs.com/features/featuresDeepDive/cameras/layerMasksAndMultiCam */ get layerMask() { return this._internalAbstractMeshDataInfo._layerMask; } set layerMask(e) { e !== this._internalAbstractMeshDataInfo._layerMask && (this._internalAbstractMeshDataInfo._layerMask = e, this._resyncLightSources()); } /** * Gets or sets a collision mask used to mask collisions (default is -1). * A collision between A and B will happen if A.collisionGroup & b.collisionMask !== 0 */ get collisionMask() { return this._internalAbstractMeshDataInfo._meshCollisionData._collisionMask; } set collisionMask(e) { this._internalAbstractMeshDataInfo._meshCollisionData._collisionMask = isNaN(e) ? -1 : e; } /** * Gets or sets a collision response flag (default is true). * when collisionResponse is false, events are still triggered but colliding entity has no response * This helps creating trigger volume when user wants collision feedback events but not position/velocity * to respond to the collision. */ get collisionResponse() { return this._internalAbstractMeshDataInfo._meshCollisionData._collisionResponse; } set collisionResponse(e) { this._internalAbstractMeshDataInfo._meshCollisionData._collisionResponse = e; } /** * Gets or sets the current collision group mask (-1 by default). * A collision between A and B will happen if A.collisionGroup & b.collisionMask !== 0 */ get collisionGroup() { return this._internalAbstractMeshDataInfo._meshCollisionData._collisionGroup; } set collisionGroup(e) { this._internalAbstractMeshDataInfo._meshCollisionData._collisionGroup = isNaN(e) ? -1 : e; } /** * Gets or sets current surrounding meshes (null by default). * * By default collision detection is tested against every mesh in the scene. * It is possible to set surroundingMeshes to a defined list of meshes and then only these specified * meshes will be tested for the collision. * * Note: if set to an empty array no collision will happen when this mesh is moved. */ get surroundingMeshes() { return this._internalAbstractMeshDataInfo._meshCollisionData._surroundingMeshes; } set surroundingMeshes(e) { this._internalAbstractMeshDataInfo._meshCollisionData._surroundingMeshes = e; } /** Gets the list of lights affecting that mesh */ get lightSources() { return this._lightSources; } /** @internal */ get _positions() { return null; } /** * Gets or sets a skeleton to apply skinning transformations * @see https://doc.babylonjs.com/features/featuresDeepDive/mesh/bonesSkeletons */ set skeleton(e) { const t = this._internalAbstractMeshDataInfo._skeleton; t && t.needInitialSkinMatrix && t._unregisterMeshWithPoseMatrix(this), e && e.needInitialSkinMatrix && e._registerMeshWithPoseMatrix(this), this._internalAbstractMeshDataInfo._skeleton = e, this._internalAbstractMeshDataInfo._skeleton || (this._bonesTransformMatrices = null), this._markSubMeshesAsAttributesDirty(); } get skeleton() { return this._internalAbstractMeshDataInfo._skeleton; } // Constructor /** * Creates a new AbstractMesh * @param name defines the name of the mesh * @param scene defines the hosting scene */ constructor(e, t = null) { switch (super(e, t, !1), this._internalAbstractMeshDataInfo = new Ace(), this._waitingMaterialId = null, this.cullingStrategy = xr.CULLINGSTRATEGY_BOUNDINGSPHERE_ONLY, this.onCollideObservable = new Fe(), this.onCollisionPositionChangeObservable = new Fe(), this.onMaterialChangedObservable = new Fe(), this.definedFacingForward = !0, this._occlusionQuery = null, this._renderingGroup = null, this.alphaIndex = Number.MAX_VALUE, this.isVisible = !0, this.isPickable = !0, this.isNearPickable = !1, this.isNearGrabbable = !1, this.showSubMeshesBoundingBox = !1, this.isBlocker = !1, this.enablePointerMoveEvents = !1, this.outlineColor = ze.Red(), this.outlineWidth = 0.02, this.overlayColor = ze.Red(), this.overlayAlpha = 0.5, this.useOctreeForRenderingSelection = !0, this.useOctreeForPicking = !0, this.useOctreeForCollisions = !0, this.alwaysSelectAsActiveMesh = !1, this.doNotSyncBoundingInfo = !1, this.actionManager = null, this.ellipsoid = new D(0.5, 1, 0.5), this.ellipsoidOffset = new D(0, 0, 0), this.edgesWidth = 1, this.edgesColor = new Et(1, 0, 0, 1), this._edgesRenderer = null, this._masterMesh = null, this._boundingInfo = null, this._boundingInfoIsDirty = !0, this._renderId = 0, this._intersectionsInProgress = new Array(), this._unIndexed = !1, this._lightSources = new Array(), this._waitingData = { lods: null, actions: null, freezeWorldMatrix: null }, this._bonesTransformMatrices = null, this._transformMatrixTexture = null, this.onRebuildObservable = new Fe(), this._onCollisionPositionChange = (i, r, s = null) => { r.subtractToRef(this._internalAbstractMeshDataInfo._meshCollisionData._oldPositionForCollisions, this._internalAbstractMeshDataInfo._meshCollisionData._diffPositionForCollisions), this._internalAbstractMeshDataInfo._meshCollisionData._diffPositionForCollisions.length() > $e.CollisionsEpsilon && this.position.addInPlace(this._internalAbstractMeshDataInfo._meshCollisionData._diffPositionForCollisions), s && this.onCollideObservable.notifyObservers(s), this.onCollisionPositionChangeObservable.notifyObservers(this.position); }, t = this.getScene(), t.addMesh(this), this._resyncLightSources(), this._uniformBuffer = new Vi(this.getScene().getEngine(), void 0, void 0, e, !this.getScene().getEngine().isWebGPU), this._buildUniformLayout(), t.performancePriority) { case $A.Aggressive: this.doNotSyncBoundingInfo = !0; case $A.Intermediate: this.alwaysSelectAsActiveMesh = !0, this.isPickable = !1; break; } } _buildUniformLayout() { this._uniformBuffer.addUniform("world", 16), this._uniformBuffer.addUniform("visibility", 1), this._uniformBuffer.create(); } /** * Transfer the mesh values to its UBO. * @param world The world matrix associated with the mesh */ transferToEffect(e) { const t = this._uniformBuffer; t.updateMatrix("world", e), t.updateFloat("visibility", this._internalAbstractMeshDataInfo._visibility), t.update(); } /** * Gets the mesh uniform buffer. * @returns the uniform buffer of the mesh. */ getMeshUniformBuffer() { return this._uniformBuffer; } /** * Returns the string "AbstractMesh" * @returns "AbstractMesh" */ getClassName() { return "AbstractMesh"; } /** * Gets a string representation of the current mesh * @param fullDetails defines a boolean indicating if full details must be included * @returns a string representation of the current mesh */ toString(e) { let t = "Name: " + this.name + ", isInstance: " + (this.getClassName() !== "InstancedMesh" ? "YES" : "NO"); t += ", # of submeshes: " + (this.subMeshes ? this.subMeshes.length : 0); const i = this._internalAbstractMeshDataInfo._skeleton; return i && (t += ", skeleton: " + i.name), e && (t += ", billboard mode: " + ["NONE", "X", "Y", null, "Z", null, null, "ALL"][this.billboardMode], t += ", freeze wrld mat: " + (this._isWorldMatrixFrozen || this._waitingData.freezeWorldMatrix ? "YES" : "NO")), t; } /** * @internal */ _getEffectiveParent() { return this._masterMesh && this.billboardMode !== xi.BILLBOARDMODE_NONE ? this._masterMesh : super._getEffectiveParent(); } /** * @internal */ _getActionManagerForTrigger(e, t = !0) { if (this.actionManager && (t || this.actionManager.isRecursive)) if (e) { if (this.actionManager.hasSpecificTrigger(e)) return this.actionManager; } else return this.actionManager; return this.parent ? this.parent._getActionManagerForTrigger(e, !1) : null; } /** * @internal */ // eslint-disable-next-line @typescript-eslint/no-unused-vars _rebuild(e = !1) { if (this.onRebuildObservable.notifyObservers(this), this._occlusionQuery !== null && (this._occlusionQuery = null), !!this.subMeshes) for (const t of this.subMeshes) t._rebuild(); } /** @internal */ _resyncLightSources() { this._lightSources.length = 0; for (const e of this.getScene().lights) e.isEnabled() && e.canAffectMesh(this) && this._lightSources.push(e); this._markSubMeshesAsLightDirty(); } /** * @internal */ _resyncLightSource(e) { const t = e.isEnabled() && e.canAffectMesh(this), i = this._lightSources.indexOf(e); let r = !1; if (i === -1) { if (!t) return; this._lightSources.push(e); } else { if (t) return; r = !0, this._lightSources.splice(i, 1); } this._markSubMeshesAsLightDirty(r); } /** @internal */ _unBindEffect() { for (const e of this.subMeshes) e.setEffect(null); } /** * @internal */ _removeLightSource(e, t) { const i = this._lightSources.indexOf(e); i !== -1 && (this._lightSources.splice(i, 1), this._markSubMeshesAsLightDirty(t)); } _markSubMeshesAsDirty(e) { if (this.subMeshes) for (const t of this.subMeshes) for (let i = 0; i < t._drawWrappers.length; ++i) { const r = t._drawWrappers[i]; !r || !r.defines || !r.defines.markAllAsDirty || e(r.defines); } } /** * @internal */ _markSubMeshesAsLightDirty(e = !1) { this._markSubMeshesAsDirty((t) => t.markAsLightDirty(e)); } /** @internal */ _markSubMeshesAsAttributesDirty() { this._markSubMeshesAsDirty((e) => e.markAsAttributesDirty()); } /** @internal */ _markSubMeshesAsMiscDirty() { this._markSubMeshesAsDirty((e) => e.markAsMiscDirty()); } /** * Flag the AbstractMesh as dirty (Forcing it to update everything) * @param property if set to "rotation" the objects rotationQuaternion will be set to null * @returns this AbstractMesh */ // eslint-disable-next-line @typescript-eslint/no-unused-vars markAsDirty(e) { return this._currentRenderId = Number.MAX_VALUE, this._isDirty = !0, this; } /** * Resets the draw wrappers cache for all submeshes of this abstract mesh * @param passId If provided, releases only the draw wrapper corresponding to this render pass id */ resetDrawCache(e) { if (this.subMeshes) for (const t of this.subMeshes) t.resetDrawCache(e); } // Methods /** * Returns true if the mesh is blocked. Implemented by child classes */ get isBlocked() { return !1; } /** * Returns the mesh itself by default. Implemented by child classes * @param camera defines the camera to use to pick the right LOD level * @returns the currentAbstractMesh */ // eslint-disable-next-line @typescript-eslint/no-unused-vars getLOD(e) { return this; } /** * Returns 0 by default. Implemented by child classes * @returns an integer */ getTotalVertices() { return 0; } /** * Returns a positive integer : the total number of indices in this mesh geometry. * @returns the number of indices or zero if the mesh has no geometry. */ getTotalIndices() { return 0; } /** * Returns null by default. Implemented by child classes * @returns null */ getIndices() { return null; } /** * Returns the array of the requested vertex data kind. Implemented by child classes * @param kind defines the vertex data kind to use * @returns null */ // eslint-disable-next-line @typescript-eslint/no-unused-vars getVerticesData(e) { return null; } /** * Sets the vertex data of the mesh geometry for the requested `kind`. * If the mesh has no geometry, a new Geometry object is set to the mesh and then passed this vertex data. * Note that a new underlying VertexBuffer object is created each call. * If the `kind` is the `PositionKind`, the mesh BoundingInfo is renewed, so the bounding box and sphere, and the mesh World Matrix is recomputed. * @param kind defines vertex data kind: * * VertexBuffer.PositionKind * * VertexBuffer.UVKind * * VertexBuffer.UV2Kind * * VertexBuffer.UV3Kind * * VertexBuffer.UV4Kind * * VertexBuffer.UV5Kind * * VertexBuffer.UV6Kind * * VertexBuffer.ColorKind * * VertexBuffer.MatricesIndicesKind * * VertexBuffer.MatricesIndicesExtraKind * * VertexBuffer.MatricesWeightsKind * * VertexBuffer.MatricesWeightsExtraKind * @param data defines the data source * @param updatable defines if the data must be flagged as updatable (or static) * @param stride defines the vertex stride (size of an entire vertex). Can be null and in this case will be deduced from vertex data kind * @returns the current mesh */ // eslint-disable-next-line @typescript-eslint/no-unused-vars setVerticesData(e, t, i, r) { return this; } /** * Updates the existing vertex data of the mesh geometry for the requested `kind`. * If the mesh has no geometry, it is simply returned as it is. * @param kind defines vertex data kind: * * VertexBuffer.PositionKind * * VertexBuffer.UVKind * * VertexBuffer.UV2Kind * * VertexBuffer.UV3Kind * * VertexBuffer.UV4Kind * * VertexBuffer.UV5Kind * * VertexBuffer.UV6Kind * * VertexBuffer.ColorKind * * VertexBuffer.MatricesIndicesKind * * VertexBuffer.MatricesIndicesExtraKind * * VertexBuffer.MatricesWeightsKind * * VertexBuffer.MatricesWeightsExtraKind * @param data defines the data source * @param updateExtends If `kind` is `PositionKind` and if `updateExtends` is true, the mesh BoundingInfo is renewed, so the bounding box and sphere, and the mesh World Matrix is recomputed * @param makeItUnique If true, a new global geometry is created from this data and is set to the mesh * @returns the current mesh */ // eslint-disable-next-line @typescript-eslint/no-unused-vars updateVerticesData(e, t, i, r) { return this; } /** * Sets the mesh indices, * If the mesh has no geometry, a new Geometry object is created and set to the mesh. * @param indices Expects an array populated with integers or a typed array (Int32Array, Uint32Array, Uint16Array) * @param totalVertices Defines the total number of vertices * @returns the current mesh */ // eslint-disable-next-line @typescript-eslint/no-unused-vars setIndices(e, t) { return this; } /** * Gets a boolean indicating if specific vertex data is present * @param kind defines the vertex data kind to use * @returns true is data kind is present */ // eslint-disable-next-line @typescript-eslint/no-unused-vars isVerticesDataPresent(e) { return !1; } /** * Returns the mesh BoundingInfo object or creates a new one and returns if it was undefined. * Note that it returns a shallow bounding of the mesh (i.e. it does not include children). * However, if the mesh contains thin instances, it will be expanded to include them. If you want the "raw" bounding data instead, then use `getRawBoundingInfo()`. * To get the full bounding of all children, call `getHierarchyBoundingVectors` instead. * @returns a BoundingInfo */ getBoundingInfo() { return this._masterMesh ? this._masterMesh.getBoundingInfo() : (this._boundingInfoIsDirty && (this._boundingInfoIsDirty = !1, this._updateBoundingInfo()), this._boundingInfo); } /** * Returns the bounding info unnafected by instance data. * @returns the bounding info of the mesh unaffected by instance data. */ getRawBoundingInfo() { var e; return (e = this.rawBoundingInfo) !== null && e !== void 0 ? e : this.getBoundingInfo(); } /** * Overwrite the current bounding info * @param boundingInfo defines the new bounding info * @returns the current mesh */ setBoundingInfo(e) { return this._boundingInfo = e, this; } /** * Returns true if there is already a bounding info */ get hasBoundingInfo() { return this._boundingInfo !== null; } /** * Creates a new bounding info for the mesh * @param minimum min vector of the bounding box/sphere * @param maximum max vector of the bounding box/sphere * @param worldMatrix defines the new world matrix * @returns the new bounding info */ buildBoundingInfo(e, t, i) { return this._boundingInfo = new zf(e, t, i), this._boundingInfo; } /** * Uniformly scales the mesh to fit inside of a unit cube (1 X 1 X 1 units) * @param includeDescendants Use the hierarchy's bounding box instead of the mesh's bounding box. Default is false * @param ignoreRotation ignore rotation when computing the scale (ie. object will be axis aligned). Default is false * @param predicate predicate that is passed in to getHierarchyBoundingVectors when selecting which object should be included when scaling * @returns the current mesh */ normalizeToUnitCube(e = !0, t = !1, i) { return super.normalizeToUnitCube(e, t, i); } /** Gets a boolean indicating if this mesh has skinning data and an attached skeleton */ get useBones() { return this.skeleton && this.getScene().skeletonsEnabled && this.isVerticesDataPresent(Y.MatricesIndicesKind) && this.isVerticesDataPresent(Y.MatricesWeightsKind); } /** @internal */ _preActivate() { } /** * @internal */ // eslint-disable-next-line @typescript-eslint/no-unused-vars _preActivateForIntermediateRendering(e) { } /** * @internal */ // eslint-disable-next-line @typescript-eslint/no-unused-vars _activate(e, t) { return this._renderId = e, !0; } /** @internal */ _postActivate() { } /** @internal */ _freeze() { } /** @internal */ _unFreeze() { } /** * Gets the current world matrix * @returns a Matrix */ getWorldMatrix() { return this._masterMesh && this.billboardMode === xi.BILLBOARDMODE_NONE ? this._masterMesh.getWorldMatrix() : super.getWorldMatrix(); } /** @internal */ _getWorldMatrixDeterminant() { return this._masterMesh ? this._masterMesh._getWorldMatrixDeterminant() : super._getWorldMatrixDeterminant(); } /** * Gets a boolean indicating if this mesh is an instance or a regular mesh */ get isAnInstance() { return !1; } /** * Gets a boolean indicating if this mesh has instances */ get hasInstances() { return !1; } /** * Gets a boolean indicating if this mesh has thin instances */ get hasThinInstances() { return !1; } // ================================== Point of View Movement ================================= /** * Perform relative position change from the point of view of behind the front of the mesh. * This is performed taking into account the meshes current rotation, so you do not have to care. * Supports definition of mesh facing forward or backward {@link definedFacingForwardSearch | See definedFacingForwardSearch }. * @param amountRight defines the distance on the right axis * @param amountUp defines the distance on the up axis * @param amountForward defines the distance on the forward axis * @returns the current mesh */ movePOV(e, t, i) { return this.position.addInPlace(this.calcMovePOV(e, t, i)), this; } /** * Calculate relative position change from the point of view of behind the front of the mesh. * This is performed taking into account the meshes current rotation, so you do not have to care. * Supports definition of mesh facing forward or backward {@link definedFacingForwardSearch | See definedFacingForwardSearch }. * @param amountRight defines the distance on the right axis * @param amountUp defines the distance on the up axis * @param amountForward defines the distance on the forward axis * @returns the new displacement vector */ calcMovePOV(e, t, i) { const r = new Ae(); (this.rotationQuaternion ? this.rotationQuaternion : Ze.RotationYawPitchRoll(this.rotation.y, this.rotation.x, this.rotation.z)).toRotationMatrix(r); const n = D.Zero(), a = this.definedFacingForward ? -1 : 1; return D.TransformCoordinatesFromFloatsToRef(e * a, t, i * a, r, n), n; } // ================================== Point of View Rotation ================================= /** * Perform relative rotation change from the point of view of behind the front of the mesh. * Supports definition of mesh facing forward or backward {@link definedFacingForwardSearch | See definedFacingForwardSearch }. * @param flipBack defines the flip * @param twirlClockwise defines the twirl * @param tiltRight defines the tilt * @returns the current mesh */ rotatePOV(e, t, i) { return this.rotation.addInPlace(this.calcRotatePOV(e, t, i)), this; } /** * Calculate relative rotation change from the point of view of behind the front of the mesh. * Supports definition of mesh facing forward or backward {@link definedFacingForwardSearch | See definedFacingForwardSearch }. * @param flipBack defines the flip * @param twirlClockwise defines the twirl * @param tiltRight defines the tilt * @returns the new rotation vector */ calcRotatePOV(e, t, i) { const r = this.definedFacingForward ? 1 : -1; return new D(e * r, t, i * r); } /** * This method recomputes and sets a new BoundingInfo to the mesh unless it is locked. * This means the mesh underlying bounding box and sphere are recomputed. * @param applySkeleton defines whether to apply the skeleton before computing the bounding info * @param applyMorph defines whether to apply the morph target before computing the bounding info * @returns the current mesh */ refreshBoundingInfo(e = !1, t = !1) { return this._boundingInfo && this._boundingInfo.isLocked ? this : (this._refreshBoundingInfo(this._getPositionData(e, t), null), this); } /** * @internal */ _refreshBoundingInfo(e, t) { if (e) { const i = kO(e, 0, this.getTotalVertices(), t); this._boundingInfo ? this._boundingInfo.reConstruct(i.minimum, i.maximum) : this._boundingInfo = new zf(i.minimum, i.maximum); } if (this.subMeshes) for (let i = 0; i < this.subMeshes.length; i++) this.subMeshes[i].refreshBoundingInfo(e); this._updateBoundingInfo(); } /** * Internal function to get buffer data and possibly apply morphs and normals * @param applySkeleton * @param applyMorph * @param data * @param kind the kind of data you want. Can be Normal or Position */ _getData(e = !1, t = !1, i, r = Y.PositionKind) { if (i = i ?? this.getVerticesData(r).slice(), i && t && this.morphTargetManager) { let s = 0, n = 0; for (let a = 0; a < i.length; a++) { let l = i[a]; for (let o = 0; o < this.morphTargetManager.numTargets; o++) { const u = this.morphTargetManager.getTarget(o), h = u.influence; if (h !== 0) { let d = null; switch (r) { case Y.PositionKind: d = u.getPositions(); break; case Y.NormalKind: d = u.getNormals(); break; case Y.TangentKind: d = u.getTangents(); break; case Y.UVKind: d = u.getUVs(); break; } d && (l += (d[a] - i[a]) * h); } } if (i[a] = l, s++, r === Y.PositionKind && this._positions && s === 3) { s = 0; const o = n * 3; this._positions[n++].copyFromFloats(i[o], i[o + 1], i[o + 2]); } } } if (i && e && this.skeleton) { const s = this.getVerticesData(Y.MatricesIndicesKind), n = this.getVerticesData(Y.MatricesWeightsKind); if (n && s) { const a = this.numBoneInfluencers > 4, l = a ? this.getVerticesData(Y.MatricesIndicesExtraKind) : null, o = a ? this.getVerticesData(Y.MatricesWeightsExtraKind) : null, u = this.skeleton.getTransformMatrices(this), h = de.Vector3[0], d = de.Matrix[0], f = de.Matrix[1]; let p = 0; for (let m = 0; m < i.length; m += 3, p += 4) { d.reset(); let _, v; for (_ = 0; _ < 4; _++) v = n[p + _], v > 0 && (Ae.FromFloat32ArrayToRefScaled(u, Math.floor(s[p + _] * 16), v, f), d.addToSelf(f)); if (a) for (_ = 0; _ < 4; _++) v = o[p + _], v > 0 && (Ae.FromFloat32ArrayToRefScaled(u, Math.floor(l[p + _] * 16), v, f), d.addToSelf(f)); r === Y.NormalKind ? D.TransformNormalFromFloatsToRef(i[m], i[m + 1], i[m + 2], d, h) : D.TransformCoordinatesFromFloatsToRef(i[m], i[m + 1], i[m + 2], d, h), h.toArray(i, m), r === Y.PositionKind && this._positions && this._positions[m / 3].copyFrom(h); } } } return i; } /** * Get the normals vertex data and optionally apply skeleton and morphing. * @param applySkeleton defines whether to apply the skeleton * @param applyMorph defines whether to apply the morph target * @returns the normals data */ getNormalsData(e = !1, t = !1) { return this._getData(e, t, null, Y.NormalKind); } /** * Get the position vertex data and optionally apply skeleton and morphing. * @param applySkeleton defines whether to apply the skeleton * @param applyMorph defines whether to apply the morph target * @param data defines the position data to apply the skeleton and morph to * @returns the position data */ getPositionData(e = !1, t = !1, i) { return this._getData(e, t, i, Y.PositionKind); } /** * @internal */ _getPositionData(e, t) { var i; let r = this.getVerticesData(Y.PositionKind); if (this._internalAbstractMeshDataInfo._positions && (this._internalAbstractMeshDataInfo._positions = null), r && (e && this.skeleton || t && this.morphTargetManager)) { if (r = r.slice(), this._generatePointsArray(), this._positions) { const s = this._positions; this._internalAbstractMeshDataInfo._positions = new Array(s.length); for (let n = 0; n < s.length; n++) this._internalAbstractMeshDataInfo._positions[n] = ((i = s[n]) === null || i === void 0 ? void 0 : i.clone()) || new D(); } return this.getPositionData(e, t, r); } return r; } /** @internal */ _updateBoundingInfo() { return this._boundingInfo ? this._boundingInfo.update(this.worldMatrixFromCache) : this._boundingInfo = new zf(D.Zero(), D.Zero(), this.worldMatrixFromCache), this._updateSubMeshesBoundingInfo(this.worldMatrixFromCache), this; } /** * @internal */ _updateSubMeshesBoundingInfo(e) { if (!this.subMeshes) return this; const t = this.subMeshes.length; for (let i = 0; i < t; i++) { const r = this.subMeshes[i]; (t > 1 || !r.IsGlobal) && r.updateBoundingInfo(e); } return this; } /** @internal */ _afterComputeWorldMatrix() { this.doNotSyncBoundingInfo || (this._boundingInfoIsDirty = !0); } /** * Returns `true` if the mesh is within the frustum defined by the passed array of planes. * A mesh is in the frustum if its bounding box intersects the frustum * @param frustumPlanes defines the frustum to test * @returns true if the mesh is in the frustum planes */ isInFrustum(e) { return this.getBoundingInfo().isInFrustum(e, this.cullingStrategy); } /** * Returns `true` if the mesh is completely in the frustum defined be the passed array of planes. * A mesh is completely in the frustum if its bounding box it completely inside the frustum. * @param frustumPlanes defines the frustum to test * @returns true if the mesh is completely in the frustum planes */ isCompletelyInFrustum(e) { return this.getBoundingInfo().isCompletelyInFrustum(e); } /** * True if the mesh intersects another mesh or a SolidParticle object * @param mesh defines a target mesh or SolidParticle to test * @param precise Unless the parameter `precise` is set to `true` the intersection is computed according to Axis Aligned Bounding Boxes (AABB), else according to OBB (Oriented BBoxes) * @param includeDescendants Can be set to true to test if the mesh defined in parameters intersects with the current mesh or any child meshes * @returns true if there is an intersection */ intersectsMesh(e, t = !1, i) { const r = this.getBoundingInfo(), s = e.getBoundingInfo(); if (r.intersects(s, t)) return !0; if (i) { for (const n of this.getChildMeshes()) if (n.intersectsMesh(e, t, !0)) return !0; } return !1; } /** * Returns true if the passed point (Vector3) is inside the mesh bounding box * @param point defines the point to test * @returns true if there is an intersection */ intersectsPoint(e) { return this.getBoundingInfo().intersectsPoint(e); } // Collisions /** * Gets or sets a boolean indicating that this mesh can be used in the collision engine * @see https://doc.babylonjs.com/features/featuresDeepDive/cameras/camera_collisions */ get checkCollisions() { return this._internalAbstractMeshDataInfo._meshCollisionData._checkCollisions; } set checkCollisions(e) { this._internalAbstractMeshDataInfo._meshCollisionData._checkCollisions = e; } /** * Gets Collider object used to compute collisions (not physics) * @see https://doc.babylonjs.com/features/featuresDeepDive/cameras/camera_collisions */ get collider() { return this._internalAbstractMeshDataInfo._meshCollisionData._collider; } /** * Move the mesh using collision engine * @see https://doc.babylonjs.com/features/featuresDeepDive/cameras/camera_collisions * @param displacement defines the requested displacement vector * @returns the current mesh */ moveWithCollisions(e) { this.getAbsolutePosition().addToRef(this.ellipsoidOffset, this._internalAbstractMeshDataInfo._meshCollisionData._oldPositionForCollisions); const i = this.getScene().collisionCoordinator; return this._internalAbstractMeshDataInfo._meshCollisionData._collider || (this._internalAbstractMeshDataInfo._meshCollisionData._collider = i.createCollider()), this._internalAbstractMeshDataInfo._meshCollisionData._collider._radius = this.ellipsoid, i.getNewPosition(this._internalAbstractMeshDataInfo._meshCollisionData._oldPositionForCollisions, e, this._internalAbstractMeshDataInfo._meshCollisionData._collider, this.collisionRetryCount, this, this._onCollisionPositionChange, this.uniqueId), this; } // Collisions /** * @internal */ _collideForSubMesh(e, t, i) { var r; if (this._generatePointsArray(), !this._positions) return this; if (!e._lastColliderWorldVertices || !e._lastColliderTransformMatrix.equals(t)) { e._lastColliderTransformMatrix = t.clone(), e._lastColliderWorldVertices = [], e._trianglePlanes = []; const s = e.verticesStart, n = e.verticesStart + e.verticesCount; for (let a = s; a < n; a++) e._lastColliderWorldVertices.push(D.TransformCoordinates(this._positions[a], t)); } return i._collide(e._trianglePlanes, e._lastColliderWorldVertices, this.getIndices(), e.indexStart, e.indexStart + e.indexCount, e.verticesStart, !!e.getMaterial(), this, this._shouldConvertRHS(), ((r = e.getMaterial()) === null || r === void 0 ? void 0 : r.fillMode) === 7), this; } /** * @internal */ _processCollisionsForSubMeshes(e, t) { const i = this._scene.getCollidingSubMeshCandidates(this, e), r = i.length; for (let s = 0; s < r; s++) { const n = i.data[s]; r > 1 && !n._checkCollision(e) || this._collideForSubMesh(n, t, e); } return this; } /** @internal */ _shouldConvertRHS() { return !1; } /** * @internal */ _checkCollision(e) { if (!this.getBoundingInfo()._checkCollision(e)) return this; const t = de.Matrix[0], i = de.Matrix[1]; return Ae.ScalingToRef(1 / e._radius.x, 1 / e._radius.y, 1 / e._radius.z, t), this.worldMatrixFromCache.multiplyToRef(t, i), this._processCollisionsForSubMeshes(e, i), this; } // Picking /** @internal */ _generatePointsArray() { return !1; } /** * Checks if the passed Ray intersects with the mesh. A mesh triangle can be picked both from its front and back sides, * irrespective of orientation. * @param ray defines the ray to use. It should be in the mesh's LOCAL coordinate space. * @param fastCheck defines if fast mode (but less precise) must be used (false by default) * @param trianglePredicate defines an optional predicate used to select faces when a mesh intersection is detected * @param onlyBoundingInfo defines a boolean indicating if picking should only happen using bounding info (false by default) * @param worldToUse defines the world matrix to use to get the world coordinate of the intersection point * @param skipBoundingInfo a boolean indicating if we should skip the bounding info check * @returns the picking info * @see https://doc.babylonjs.com/features/featuresDeepDive/mesh/interactions/mesh_intersect */ intersects(e, t, i, r = !1, s, n = !1) { const a = new ku(), l = this.getClassName(), o = l === "InstancedLinesMesh" || l === "LinesMesh" || l === "GreasedLineMesh" ? this.intersectionThreshold : 0, u = this.getBoundingInfo(); if (!this.subMeshes || !n && (!e.intersectsSphere(u.boundingSphere, o) || !e.intersectsBox(u.boundingBox, o))) return a; if (r) return a.hit = !n, a.pickedMesh = n ? null : this, a.distance = n ? 0 : D.Distance(e.origin, u.boundingSphere.center), a.subMeshId = 0, a; if (!this._generatePointsArray()) return a; let h = null; const d = this._scene.getIntersectingSubMeshCandidates(this, e), f = d.length; let p = !1; for (let m = 0; m < f; m++) { const v = d.data[m].getMaterial(); if (v && (v.fillMode == 7 || v.fillMode == 0 || v.fillMode == 1 || v.fillMode == 2 || v.fillMode == 4)) { p = !0; break; } } if (!p) return a.hit = !0, a.pickedMesh = this, a.distance = D.Distance(e.origin, u.boundingSphere.center), a.subMeshId = -1, a; for (let m = 0; m < f; m++) { const _ = d.data[m]; if (f > 1 && !n && !_.canIntersects(e)) continue; const v = _.intersects(e, this._positions, this.getIndices(), t, i); if (v && (t || !h || v.distance < h.distance) && (h = v, h.subMeshId = m, t)) break; } if (h) { const m = s ?? this.getWorldMatrix(), _ = de.Vector3[0], v = de.Vector3[1]; D.TransformCoordinatesToRef(e.origin, m, _), e.direction.scaleToRef(h.distance, v); const x = D.TransformNormal(v, m).addInPlace(_); return a.hit = !0, a.distance = D.Distance(_, x), a.pickedPoint = x, a.pickedMesh = this, a.bu = h.bu || 0, a.bv = h.bv || 0, a.subMeshFaceId = h.faceId, a.faceId = h.faceId + d.data[h.subMeshId].indexStart / (this.getClassName().indexOf("LinesMesh") !== -1 ? 2 : 3), a.subMeshId = h.subMeshId, a; } return a; } /** * Clones the current mesh * @param name defines the mesh name * @param newParent defines the new mesh parent * @param doNotCloneChildren defines a boolean indicating that children must not be cloned (false by default) * @returns the new mesh */ // eslint-disable-next-line @typescript-eslint/no-unused-vars clone(e, t, i) { return null; } /** * Disposes all the submeshes of the current meshnp * @returns the current mesh */ releaseSubMeshes() { if (this.subMeshes) for (; this.subMeshes.length; ) this.subMeshes[0].dispose(); else this.subMeshes = []; return this; } /** * Releases resources associated with this abstract mesh. * @param doNotRecurse Set to true to not recurse into each children (recurse into each children by default) * @param disposeMaterialAndTextures Set to true to also dispose referenced materials and textures (false by default) */ dispose(e, t = !1) { let i; const r = this.getScene(); for (this._scene.useMaterialMeshMap && this._internalAbstractMeshDataInfo._material && this._internalAbstractMeshDataInfo._material.meshMap && (this._internalAbstractMeshDataInfo._material.meshMap[this.uniqueId] = void 0), r.freeActiveMeshes(), r.freeRenderingGroups(), r.renderingManager.maintainStateBetweenFrames && r.renderingManager.restoreDispachedFlags(), this.actionManager !== void 0 && this.actionManager !== null && (this._scene.meshes.some((a) => a !== this && a.actionManager === this.actionManager) || this.actionManager.dispose(), this.actionManager = null), this._internalAbstractMeshDataInfo._skeleton = null, this._transformMatrixTexture && (this._transformMatrixTexture.dispose(), this._transformMatrixTexture = null), i = 0; i < this._intersectionsInProgress.length; i++) { const a = this._intersectionsInProgress[i], l = a._intersectionsInProgress.indexOf(this); a._intersectionsInProgress.splice(l, 1); } this._intersectionsInProgress.length = 0, r.lights.forEach((a) => { let l = a.includedOnlyMeshes.indexOf(this); l !== -1 && a.includedOnlyMeshes.splice(l, 1), l = a.excludedMeshes.indexOf(this), l !== -1 && a.excludedMeshes.splice(l, 1); const o = a.getShadowGenerators(); if (o) { const u = o.values(); for (let h = u.next(); h.done !== !0; h = u.next()) { const f = h.value.getShadowMap(); f && f.renderList && (l = f.renderList.indexOf(this), l !== -1 && f.renderList.splice(l, 1)); } } }), (this.getClassName() !== "InstancedMesh" || this.getClassName() !== "InstancedLinesMesh") && this.releaseSubMeshes(); const n = r.getEngine(); if (this._occlusionQuery !== null && (this.isOcclusionQueryInProgress = !1, n.deleteQuery(this._occlusionQuery), this._occlusionQuery = null), n.wipeCaches(), r.removeMesh(this), this._parentContainer) { const a = this._parentContainer.meshes.indexOf(this); a > -1 && this._parentContainer.meshes.splice(a, 1), this._parentContainer = null; } if (t && this.material && (this.material.getClassName() === "MultiMaterial" ? this.material.dispose(!1, !0, !0) : this.material.dispose(!1, !0)), !e) for (i = 0; i < r.particleSystems.length; i++) r.particleSystems[i].emitter === this && (r.particleSystems[i].dispose(), i--); this._internalAbstractMeshDataInfo._facetData.facetDataEnabled && this.disableFacetData(), this._uniformBuffer.dispose(), this.onAfterWorldMatrixUpdateObservable.clear(), this.onCollideObservable.clear(), this.onCollisionPositionChangeObservable.clear(), this.onRebuildObservable.clear(), super.dispose(e, t); } /** * Adds the passed mesh as a child to the current mesh * @param mesh defines the child mesh * @param preserveScalingSign if true, keep scaling sign of child. Otherwise, scaling sign might change. * @returns the current mesh */ addChild(e, t = !1) { return e.setParent(this, t), this; } /** * Removes the passed mesh from the current mesh children list * @param mesh defines the child mesh * @param preserveScalingSign if true, keep scaling sign of child. Otherwise, scaling sign might change. * @returns the current mesh */ removeChild(e, t = !1) { return e.setParent(null, t), this; } // Facet data /** @internal */ _initFacetData() { const e = this._internalAbstractMeshDataInfo._facetData; e.facetNormals || (e.facetNormals = []), e.facetPositions || (e.facetPositions = []), e.facetPartitioning || (e.facetPartitioning = new Array()), e.facetNb = this.getIndices().length / 3 | 0, e.partitioningSubdivisions = e.partitioningSubdivisions ? e.partitioningSubdivisions : 10, e.partitioningBBoxRatio = e.partitioningBBoxRatio ? e.partitioningBBoxRatio : 1.01; for (let t = 0; t < e.facetNb; t++) e.facetNormals[t] = D.Zero(), e.facetPositions[t] = D.Zero(); return e.facetDataEnabled = !0, this; } /** * Updates the mesh facetData arrays and the internal partitioning when the mesh is morphed or updated. * This method can be called within the render loop. * You don't need to call this method by yourself in the render loop when you update/morph a mesh with the methods CreateXXX() as they automatically manage this computation * @returns the current mesh * @see https://doc.babylonjs.com/features/featuresDeepDive/mesh/facetData */ updateFacetData() { const e = this._internalAbstractMeshDataInfo._facetData; e.facetDataEnabled || this._initFacetData(); const t = this.getVerticesData(Y.PositionKind), i = this.getIndices(), r = this.getVerticesData(Y.NormalKind), s = this.getBoundingInfo(); if (e.facetDepthSort && !e.facetDepthSortEnabled) { if (e.facetDepthSortEnabled = !0, i instanceof Uint16Array) e.depthSortedIndices = new Uint16Array(i); else if (i instanceof Uint32Array) e.depthSortedIndices = new Uint32Array(i); else { let a = !1; for (let l = 0; l < i.length; l++) if (i[l] > 65535) { a = !0; break; } a ? e.depthSortedIndices = new Uint32Array(i) : e.depthSortedIndices = new Uint16Array(i); } if (e.facetDepthSortFunction = function(a, l) { return l.sqDistance - a.sqDistance; }, !e.facetDepthSortFrom) { const a = this.getScene().activeCamera; e.facetDepthSortFrom = a ? a.position : D.Zero(); } e.depthSortedFacets = []; for (let a = 0; a < e.facetNb; a++) { const l = { ind: a * 3, sqDistance: 0 }; e.depthSortedFacets.push(l); } e.invertedMatrix = Ae.Identity(), e.facetDepthSortOrigin = D.Zero(); } e.bbSize.x = s.maximum.x - s.minimum.x > Sr ? s.maximum.x - s.minimum.x : Sr, e.bbSize.y = s.maximum.y - s.minimum.y > Sr ? s.maximum.y - s.minimum.y : Sr, e.bbSize.z = s.maximum.z - s.minimum.z > Sr ? s.maximum.z - s.minimum.z : Sr; let n = e.bbSize.x > e.bbSize.y ? e.bbSize.x : e.bbSize.y; if (n = n > e.bbSize.z ? n : e.bbSize.z, e.subDiv.max = e.partitioningSubdivisions, e.subDiv.X = Math.floor(e.subDiv.max * e.bbSize.x / n), e.subDiv.Y = Math.floor(e.subDiv.max * e.bbSize.y / n), e.subDiv.Z = Math.floor(e.subDiv.max * e.bbSize.z / n), e.subDiv.X = e.subDiv.X < 1 ? 1 : e.subDiv.X, e.subDiv.Y = e.subDiv.Y < 1 ? 1 : e.subDiv.Y, e.subDiv.Z = e.subDiv.Z < 1 ? 1 : e.subDiv.Z, e.facetParameters.facetNormals = this.getFacetLocalNormals(), e.facetParameters.facetPositions = this.getFacetLocalPositions(), e.facetParameters.facetPartitioning = this.getFacetLocalPartitioning(), e.facetParameters.bInfo = s, e.facetParameters.bbSize = e.bbSize, e.facetParameters.subDiv = e.subDiv, e.facetParameters.ratio = this.partitioningBBoxRatio, e.facetParameters.depthSort = e.facetDepthSort, e.facetDepthSort && e.facetDepthSortEnabled && (this.computeWorldMatrix(!0), this._worldMatrix.invertToRef(e.invertedMatrix), D.TransformCoordinatesToRef(e.facetDepthSortFrom, e.invertedMatrix, e.facetDepthSortOrigin), e.facetParameters.distanceTo = e.facetDepthSortOrigin), e.facetParameters.depthSortedFacets = e.depthSortedFacets, r && Ot.ComputeNormals(t, i, r, e.facetParameters), e.facetDepthSort && e.facetDepthSortEnabled) { e.depthSortedFacets.sort(e.facetDepthSortFunction); const a = e.depthSortedIndices.length / 3 | 0; for (let l = 0; l < a; l++) { const o = e.depthSortedFacets[l].ind; e.depthSortedIndices[l * 3] = i[o], e.depthSortedIndices[l * 3 + 1] = i[o + 1], e.depthSortedIndices[l * 3 + 2] = i[o + 2]; } this.updateIndices(e.depthSortedIndices, void 0, !0); } return this; } /** * Returns the facetLocalNormals array. * The normals are expressed in the mesh local spac * @returns an array of Vector3 * @see https://doc.babylonjs.com/features/featuresDeepDive/mesh/facetData */ getFacetLocalNormals() { const e = this._internalAbstractMeshDataInfo._facetData; return e.facetNormals || this.updateFacetData(), e.facetNormals; } /** * Returns the facetLocalPositions array. * The facet positions are expressed in the mesh local space * @returns an array of Vector3 * @see https://doc.babylonjs.com/features/featuresDeepDive/mesh/facetData */ getFacetLocalPositions() { const e = this._internalAbstractMeshDataInfo._facetData; return e.facetPositions || this.updateFacetData(), e.facetPositions; } /** * Returns the facetLocalPartitioning array * @returns an array of array of numbers * @see https://doc.babylonjs.com/features/featuresDeepDive/mesh/facetData */ getFacetLocalPartitioning() { const e = this._internalAbstractMeshDataInfo._facetData; return e.facetPartitioning || this.updateFacetData(), e.facetPartitioning; } /** * Returns the i-th facet position in the world system. * This method allocates a new Vector3 per call * @param i defines the facet index * @returns a new Vector3 * @see https://doc.babylonjs.com/features/featuresDeepDive/mesh/facetData */ getFacetPosition(e) { const t = D.Zero(); return this.getFacetPositionToRef(e, t), t; } /** * Sets the reference Vector3 with the i-th facet position in the world system * @param i defines the facet index * @param ref defines the target vector * @returns the current mesh * @see https://doc.babylonjs.com/features/featuresDeepDive/mesh/facetData */ getFacetPositionToRef(e, t) { const i = this.getFacetLocalPositions()[e], r = this.getWorldMatrix(); return D.TransformCoordinatesToRef(i, r, t), this; } /** * Returns the i-th facet normal in the world system. * This method allocates a new Vector3 per call * @param i defines the facet index * @returns a new Vector3 * @see https://doc.babylonjs.com/features/featuresDeepDive/mesh/facetData */ getFacetNormal(e) { const t = D.Zero(); return this.getFacetNormalToRef(e, t), t; } /** * Sets the reference Vector3 with the i-th facet normal in the world system * @param i defines the facet index * @param ref defines the target vector * @returns the current mesh * @see https://doc.babylonjs.com/features/featuresDeepDive/mesh/facetData */ getFacetNormalToRef(e, t) { const i = this.getFacetLocalNormals()[e]; return D.TransformNormalToRef(i, this.getWorldMatrix(), t), this; } /** * Returns the facets (in an array) in the same partitioning block than the one the passed coordinates are located (expressed in the mesh local system) * @param x defines x coordinate * @param y defines y coordinate * @param z defines z coordinate * @returns the array of facet indexes * @see https://doc.babylonjs.com/features/featuresDeepDive/mesh/facetData */ getFacetsAtLocalCoordinates(e, t, i) { const r = this.getBoundingInfo(), s = this._internalAbstractMeshDataInfo._facetData, n = Math.floor((e - r.minimum.x * s.partitioningBBoxRatio) * s.subDiv.X * s.partitioningBBoxRatio / s.bbSize.x), a = Math.floor((t - r.minimum.y * s.partitioningBBoxRatio) * s.subDiv.Y * s.partitioningBBoxRatio / s.bbSize.y), l = Math.floor((i - r.minimum.z * s.partitioningBBoxRatio) * s.subDiv.Z * s.partitioningBBoxRatio / s.bbSize.z); return n < 0 || n > s.subDiv.max || a < 0 || a > s.subDiv.max || l < 0 || l > s.subDiv.max ? null : s.facetPartitioning[n + s.subDiv.max * a + s.subDiv.max * s.subDiv.max * l]; } /** * Returns the closest mesh facet index at (x,y,z) World coordinates, null if not found * @param x defines x coordinate * @param y defines y coordinate * @param z defines z coordinate * @param projected sets as the (x,y,z) world projection on the facet * @param checkFace if true (default false), only the facet "facing" to (x,y,z) or only the ones "turning their backs", according to the parameter "facing" are returned * @param facing if facing and checkFace are true, only the facet "facing" to (x, y, z) are returned : positive dot (x, y, z) * facet position. If facing si false and checkFace is true, only the facet "turning their backs" to (x, y, z) are returned : negative dot (x, y, z) * facet position * @returns the face index if found (or null instead) * @see https://doc.babylonjs.com/features/featuresDeepDive/mesh/facetData */ getClosestFacetAtCoordinates(e, t, i, r, s = !1, n = !0) { const a = this.getWorldMatrix(), l = de.Matrix[5]; a.invertToRef(l); const o = de.Vector3[8]; D.TransformCoordinatesFromFloatsToRef(e, t, i, l, o); const u = this.getClosestFacetAtLocalCoordinates(o.x, o.y, o.z, r, s, n); return r && D.TransformCoordinatesFromFloatsToRef(r.x, r.y, r.z, a, r), u; } /** * Returns the closest mesh facet index at (x,y,z) local coordinates, null if not found * @param x defines x coordinate * @param y defines y coordinate * @param z defines z coordinate * @param projected sets as the (x,y,z) local projection on the facet * @param checkFace if true (default false), only the facet "facing" to (x,y,z) or only the ones "turning their backs", according to the parameter "facing" are returned * @param facing if facing and checkFace are true, only the facet "facing" to (x, y, z) are returned : positive dot (x, y, z) * facet position. If facing si false and checkFace is true, only the facet "turning their backs" to (x, y, z) are returned : negative dot (x, y, z) * facet position * @returns the face index if found (or null instead) * @see https://doc.babylonjs.com/features/featuresDeepDive/mesh/facetData */ getClosestFacetAtLocalCoordinates(e, t, i, r, s = !1, n = !0) { let a = null, l = 0, o = 0, u = 0, h = 0, d = 0, f = 0, p = 0, m = 0; const _ = this.getFacetLocalPositions(), v = this.getFacetLocalNormals(), C = this.getFacetsAtLocalCoordinates(e, t, i); if (!C) return null; let x = Number.MAX_VALUE, b = x, S, M, R; for (let w = 0; w < C.length; w++) S = C[w], M = v[S], R = _[S], h = (e - R.x) * M.x + (t - R.y) * M.y + (i - R.z) * M.z, (!s || s && n && h >= 0 || s && !n && h <= 0) && (h = M.x * R.x + M.y * R.y + M.z * R.z, d = -(M.x * e + M.y * t + M.z * i - h) / (M.x * M.x + M.y * M.y + M.z * M.z), f = e + M.x * d, p = t + M.y * d, m = i + M.z * d, l = f - e, o = p - t, u = m - i, b = l * l + o * o + u * u, b < x && (x = b, a = S, r && (r.x = f, r.y = p, r.z = m))); return a; } /** * Returns the object "parameter" set with all the expected parameters for facetData computation by ComputeNormals() * @returns the parameters * @see https://doc.babylonjs.com/features/featuresDeepDive/mesh/facetData */ getFacetDataParameters() { return this._internalAbstractMeshDataInfo._facetData.facetParameters; } /** * Disables the feature FacetData and frees the related memory * @returns the current mesh * @see https://doc.babylonjs.com/features/featuresDeepDive/mesh/facetData */ disableFacetData() { const e = this._internalAbstractMeshDataInfo._facetData; return e.facetDataEnabled && (e.facetDataEnabled = !1, e.facetPositions = [], e.facetNormals = [], e.facetPartitioning = new Array(), e.facetParameters = null, e.depthSortedIndices = new Uint32Array(0)), this; } /** * Updates the AbstractMesh indices array * @param indices defines the data source * @param offset defines the offset in the index buffer where to store the new data (can be null) * @param gpuMemoryOnly defines a boolean indicating that only the GPU memory must be updated leaving the CPU version of the indices unchanged (false by default) * @returns the current mesh */ // eslint-disable-next-line @typescript-eslint/no-unused-vars updateIndices(e, t, i = !1) { return this; } /** * Creates new normals data for the mesh * @param updatable defines if the normal vertex buffer must be flagged as updatable * @returns the current mesh */ createNormals(e) { const t = this.getVerticesData(Y.PositionKind), i = this.getIndices(); let r; return this.isVerticesDataPresent(Y.NormalKind) ? r = this.getVerticesData(Y.NormalKind) : r = [], Ot.ComputeNormals(t, i, r, { useRightHandedSystem: this.getScene().useRightHandedSystem }), this.setVerticesData(Y.NormalKind, r, e), this; } /** * Align the mesh with a normal * @param normal defines the normal to use * @param upDirection can be used to redefined the up vector to use (will use the (0, 1, 0) by default) * @returns the current mesh */ alignWithNormal(e, t) { t || (t = bl.Y); const i = de.Vector3[0], r = de.Vector3[1]; return D.CrossToRef(t, e, r), D.CrossToRef(e, r, i), this.rotationQuaternion ? Ze.RotationQuaternionFromAxisToRef(i, e, r, this.rotationQuaternion) : D.RotationFromAxisToRef(i, e, r, this.rotation), this; } /** @internal */ _checkOcclusionQuery() { return !1; } /** * Disables the mesh edge rendering mode * @returns the currentAbstractMesh */ disableEdgesRendering() { throw yr("EdgesRenderer"); } /** * Enables the edge rendering mode on the mesh. * This mode makes the mesh edges visible * @param epsilon defines the maximal distance between two angles to detect a face * @param checkVerticesInsteadOfIndices indicates that we should check vertex list directly instead of faces * @param options options to the edge renderer * @returns the currentAbstractMesh * @see https://www.babylonjs-playground.com/#19O9TU#0 */ // eslint-disable-next-line @typescript-eslint/no-unused-vars enableEdgesRendering(e, t, i) { throw yr("EdgesRenderer"); } /** * This function returns all of the particle systems in the scene that use the mesh as an emitter. * @returns an array of particle systems in the scene that use the mesh as an emitter */ getConnectedParticleSystems() { return this._scene.particleSystems.filter((e) => e.emitter === this); } } xr.OCCLUSION_TYPE_NONE = 0; xr.OCCLUSION_TYPE_OPTIMISTIC = 1; xr.OCCLUSION_TYPE_STRICT = 2; xr.OCCLUSION_ALGORITHM_TYPE_ACCURATE = 0; xr.OCCLUSION_ALGORITHM_TYPE_CONSERVATIVE = 1; xr.CULLINGSTRATEGY_STANDARD = 0; xr.CULLINGSTRATEGY_BOUNDINGSPHERE_ONLY = 1; xr.CULLINGSTRATEGY_OPTIMISTIC_INCLUSION = 2; xr.CULLINGSTRATEGY_OPTIMISTIC_INCLUSION_THEN_BSPHERE_ONLY = 3; Be("BABYLON.AbstractMesh", xr); function Gc(c) { c.indexOf("vClipPlane") === -1 && c.push("vClipPlane"), c.indexOf("vClipPlane2") === -1 && c.push("vClipPlane2"), c.indexOf("vClipPlane3") === -1 && c.push("vClipPlane3"), c.indexOf("vClipPlane4") === -1 && c.push("vClipPlane4"), c.indexOf("vClipPlane5") === -1 && c.push("vClipPlane5"), c.indexOf("vClipPlane6") === -1 && c.push("vClipPlane6"); } function bT(c, e, t) { var i, r, s, n, a, l; const o = !!((i = c.clipPlane) !== null && i !== void 0 ? i : e.clipPlane), u = !!((r = c.clipPlane2) !== null && r !== void 0 ? r : e.clipPlane2), h = !!((s = c.clipPlane3) !== null && s !== void 0 ? s : e.clipPlane3), d = !!((n = c.clipPlane4) !== null && n !== void 0 ? n : e.clipPlane4), f = !!((a = c.clipPlane5) !== null && a !== void 0 ? a : e.clipPlane5), p = !!((l = c.clipPlane6) !== null && l !== void 0 ? l : e.clipPlane6); o && t.push("#define CLIPPLANE"), u && t.push("#define CLIPPLANE2"), h && t.push("#define CLIPPLANE3"), d && t.push("#define CLIPPLANE4"), f && t.push("#define CLIPPLANE5"), p && t.push("#define CLIPPLANE6"); } function Pte(c, e, t) { var i, r, s, n, a, l; let o = !1; const u = !!((i = c.clipPlane) !== null && i !== void 0 ? i : e.clipPlane), h = !!((r = c.clipPlane2) !== null && r !== void 0 ? r : e.clipPlane2), d = !!((s = c.clipPlane3) !== null && s !== void 0 ? s : e.clipPlane3), f = !!((n = c.clipPlane4) !== null && n !== void 0 ? n : e.clipPlane4), p = !!((a = c.clipPlane5) !== null && a !== void 0 ? a : e.clipPlane5), m = !!((l = c.clipPlane6) !== null && l !== void 0 ? l : e.clipPlane6); return t.CLIPPLANE !== u && (t.CLIPPLANE = u, o = !0), t.CLIPPLANE2 !== h && (t.CLIPPLANE2 = h, o = !0), t.CLIPPLANE3 !== d && (t.CLIPPLANE3 = d, o = !0), t.CLIPPLANE4 !== f && (t.CLIPPLANE4 = f, o = !0), t.CLIPPLANE5 !== p && (t.CLIPPLANE5 = p, o = !0), t.CLIPPLANE6 !== m && (t.CLIPPLANE6 = m, o = !0), o; } function Ec(c, e, t) { var i, r, s, n, a, l; let o = (i = e.clipPlane) !== null && i !== void 0 ? i : t.clipPlane; VD(c, "vClipPlane", o), o = (r = e.clipPlane2) !== null && r !== void 0 ? r : t.clipPlane2, VD(c, "vClipPlane2", o), o = (s = e.clipPlane3) !== null && s !== void 0 ? s : t.clipPlane3, VD(c, "vClipPlane3", o), o = (n = e.clipPlane4) !== null && n !== void 0 ? n : t.clipPlane4, VD(c, "vClipPlane4", o), o = (a = e.clipPlane5) !== null && a !== void 0 ? a : t.clipPlane5, VD(c, "vClipPlane5", o), o = (l = e.clipPlane6) !== null && l !== void 0 ? l : t.clipPlane6, VD(c, "vClipPlane6", o); } function VD(c, e, t) { t && c.setFloat4(e, t.normal.x, t.normal.y, t.normal.z, t.d); } class Ke { /** * Binds the scene's uniform buffer to the effect. * @param effect defines the effect to bind to the scene uniform buffer * @param sceneUbo defines the uniform buffer storing scene data */ static BindSceneUniformBuffer(e, t) { t.bindToEffect(e, "Scene"); } /** * Helps preparing the defines values about the UVs in used in the effect. * UVs are shared as much as we can across channels in the shaders. * @param texture The texture we are preparing the UVs for * @param defines The defines to update * @param key The channel key "diffuse", "specular"... used in the shader */ static PrepareDefinesForMergedUV(e, t, i) { t._needUVs = !0, t[i] = !0, e.optimizeUVAllocation && e.getTextureMatrix().isIdentityAs3x2() ? (t[i + "DIRECTUV"] = e.coordinatesIndex + 1, t["MAINUV" + (e.coordinatesIndex + 1)] = !0) : t[i + "DIRECTUV"] = 0; } /** * Binds a texture matrix value to its corresponding uniform * @param texture The texture to bind the matrix for * @param uniformBuffer The uniform buffer receiving the data * @param key The channel key "diffuse", "specular"... used in the shader */ static BindTextureMatrix(e, t, i) { const r = e.getTextureMatrix(); t.updateMatrix(i + "Matrix", r); } /** * Gets the current status of the fog (should it be enabled?) * @param mesh defines the mesh to evaluate for fog support * @param scene defines the hosting scene * @returns true if fog must be enabled */ static GetFogState(e, t) { return t.fogEnabled && e.applyFog && t.fogMode !== ii.FOGMODE_NONE; } /** * Helper used to prepare the list of defines associated with misc. values for shader compilation * @param mesh defines the current mesh * @param scene defines the current scene * @param useLogarithmicDepth defines if logarithmic depth has to be turned on * @param pointsCloud defines if point cloud rendering has to be turned on * @param fogEnabled defines if fog has to be turned on * @param alphaTest defines if alpha testing has to be turned on * @param defines defines the current list of defines * @param applyDecalAfterDetail Defines if the decal is applied after or before the detail */ static PrepareDefinesForMisc(e, t, i, r, s, n, a, l = !1) { a._areMiscDirty && (a.LOGARITHMICDEPTH = i, a.POINTSIZE = r, a.FOG = s && this.GetFogState(e, t), a.NONUNIFORMSCALING = e.nonUniformScaling, a.ALPHATEST = n, a.DECAL_AFTER_DETAIL = l); } /** * Helper used to prepare the defines relative to the active camera * @param scene defines the current scene * @param defines specifies the list of active defines * @returns true if the defines have been updated, else false */ static PrepareDefinesForCamera(e, t) { let i = !1; if (e.activeCamera) { const r = t.CAMERA_ORTHOGRAPHIC ? 1 : 0, s = t.CAMERA_PERSPECTIVE ? 1 : 0, n = e.activeCamera.mode === Ai.ORTHOGRAPHIC_CAMERA ? 1 : 0, a = e.activeCamera.mode === Ai.PERSPECTIVE_CAMERA ? 1 : 0; (r ^ n || s ^ a) && (t.CAMERA_ORTHOGRAPHIC = n === 1, t.CAMERA_PERSPECTIVE = a === 1, i = !0); } return i; } /** * Helper used to prepare the list of defines associated with frame values for shader compilation * @param scene defines the current scene * @param engine defines the current engine * @param material defines the material we are compiling the shader for * @param defines specifies the list of active defines * @param useInstances defines if instances have to be turned on * @param useClipPlane defines if clip plane have to be turned on * @param useThinInstances defines if thin instances have to be turned on */ static PrepareDefinesForFrameBoundValues(e, t, i, r, s, n = null, a = !1) { let l = Ke.PrepareDefinesForCamera(e, r); n !== !1 && (l = Pte(i, e, r)), r.DEPTHPREPASS !== !t.getColorWrite() && (r.DEPTHPREPASS = !r.DEPTHPREPASS, l = !0), r.INSTANCES !== s && (r.INSTANCES = s, l = !0), r.THIN_INSTANCES !== a && (r.THIN_INSTANCES = a, l = !0), l && r.markAsUnprocessed(); } /** * Prepares the defines for bones * @param mesh The mesh containing the geometry data we will draw * @param defines The defines to update */ static PrepareDefinesForBones(e, t) { if (e.useBones && e.computeBonesUsingShaders && e.skeleton) { t.NUM_BONE_INFLUENCERS = e.numBoneInfluencers; const i = t.BONETEXTURE !== void 0; if (e.skeleton.isUsingTextureForMatrices && i) t.BONETEXTURE = !0; else { t.BonesPerMesh = e.skeleton.bones.length + 1, t.BONETEXTURE = i ? !1 : void 0; const r = e.getScene().prePassRenderer; if (r && r.enabled) { const s = r.excludedSkinnedMesh.indexOf(e) === -1; t.BONES_VELOCITY_ENABLED = s; } } } else t.NUM_BONE_INFLUENCERS = 0, t.BonesPerMesh = 0, t.BONETEXTURE !== void 0 && (t.BONETEXTURE = !1); } /** * Prepares the defines for morph targets * @param mesh The mesh containing the geometry data we will draw * @param defines The defines to update */ static PrepareDefinesForMorphTargets(e, t) { const i = e.morphTargetManager; i ? (t.MORPHTARGETS_UV = i.supportsUVs && t.UV1, t.MORPHTARGETS_TANGENT = i.supportsTangents && t.TANGENT, t.MORPHTARGETS_NORMAL = i.supportsNormals && t.NORMAL, t.MORPHTARGETS = i.numInfluencers > 0, t.NUM_MORPH_INFLUENCERS = i.numInfluencers, t.MORPHTARGETS_TEXTURE = i.isUsingTextureForTargets) : (t.MORPHTARGETS_UV = !1, t.MORPHTARGETS_TANGENT = !1, t.MORPHTARGETS_NORMAL = !1, t.MORPHTARGETS = !1, t.NUM_MORPH_INFLUENCERS = 0); } /** * Prepares the defines for baked vertex animation * @param mesh The mesh containing the geometry data we will draw * @param defines The defines to update */ static PrepareDefinesForBakedVertexAnimation(e, t) { const i = e.bakedVertexAnimationManager; t.BAKED_VERTEX_ANIMATION_TEXTURE = !!(i && i.isEnabled); } /** * Prepares the defines used in the shader depending on the attributes data available in the mesh * @param mesh The mesh containing the geometry data we will draw * @param defines The defines to update * @param useVertexColor Precise whether vertex colors should be used or not (override mesh info) * @param useBones Precise whether bones should be used or not (override mesh info) * @param useMorphTargets Precise whether morph targets should be used or not (override mesh info) * @param useVertexAlpha Precise whether vertex alpha should be used or not (override mesh info) * @param useBakedVertexAnimation Precise whether baked vertex animation should be used or not (override mesh info) * @returns false if defines are considered not dirty and have not been checked */ static PrepareDefinesForAttributes(e, t, i, r, s = !1, n = !0, a = !0) { if (!t._areAttributesDirty && t._needNormals === t._normals && t._needUVs === t._uvs) return !1; t._normals = t._needNormals, t._uvs = t._needUVs, t.NORMAL = t._needNormals && e.isVerticesDataPresent(Y.NormalKind), t._needNormals && e.isVerticesDataPresent(Y.TangentKind) && (t.TANGENT = !0); for (let l = 1; l <= 6; ++l) t["UV" + l] = t._needUVs ? e.isVerticesDataPresent(`uv${l === 1 ? "" : l}`) : !1; if (i) { const l = e.useVertexColors && e.isVerticesDataPresent(Y.ColorKind); t.VERTEXCOLOR = l, t.VERTEXALPHA = e.hasVertexAlpha && l && n; } return e.isVerticesDataPresent(Y.ColorInstanceKind) && (e.hasInstances || e.hasThinInstances) && (t.INSTANCESCOLOR = !0), r && this.PrepareDefinesForBones(e, t), s && this.PrepareDefinesForMorphTargets(e, t), a && this.PrepareDefinesForBakedVertexAnimation(e, t), !0; } /** * Prepares the defines related to multiview * @param scene The scene we are intending to draw * @param defines The defines to update */ static PrepareDefinesForMultiview(e, t) { if (e.activeCamera) { const i = t.MULTIVIEW; t.MULTIVIEW = e.activeCamera.outputRenderTarget !== null && e.activeCamera.outputRenderTarget.getViewCount() > 1, t.MULTIVIEW != i && t.markAsUnprocessed(); } } /** * Prepares the defines related to order independant transparency * @param scene The scene we are intending to draw * @param defines The defines to update * @param needAlphaBlending Determines if the material needs alpha blending */ static PrepareDefinesForOIT(e, t, i) { const r = t.ORDER_INDEPENDENT_TRANSPARENCY, s = t.ORDER_INDEPENDENT_TRANSPARENCY_16BITS; t.ORDER_INDEPENDENT_TRANSPARENCY = e.useOrderIndependentTransparency && i, t.ORDER_INDEPENDENT_TRANSPARENCY_16BITS = !e.getEngine().getCaps().textureFloatLinearFiltering, (r !== t.ORDER_INDEPENDENT_TRANSPARENCY || s !== t.ORDER_INDEPENDENT_TRANSPARENCY_16BITS) && t.markAsUnprocessed(); } /** * Prepares the defines related to the prepass * @param scene The scene we are intending to draw * @param defines The defines to update * @param canRenderToMRT Indicates if this material renders to several textures in the prepass */ static PrepareDefinesForPrePass(e, t, i) { const r = t.PREPASS; if (!t._arePrePassDirty) return; const s = [ { type: 1, define: "PREPASS_POSITION", index: "PREPASS_POSITION_INDEX" }, { type: 2, define: "PREPASS_VELOCITY", index: "PREPASS_VELOCITY_INDEX" }, { type: 3, define: "PREPASS_REFLECTIVITY", index: "PREPASS_REFLECTIVITY_INDEX" }, { type: 0, define: "PREPASS_IRRADIANCE", index: "PREPASS_IRRADIANCE_INDEX" }, { type: 7, define: "PREPASS_ALBEDO_SQRT", index: "PREPASS_ALBEDO_SQRT_INDEX" }, { type: 5, define: "PREPASS_DEPTH", index: "PREPASS_DEPTH_INDEX" }, { type: 6, define: "PREPASS_NORMAL", index: "PREPASS_NORMAL_INDEX" } ]; if (e.prePassRenderer && e.prePassRenderer.enabled && i) { t.PREPASS = !0, t.SCENE_MRT_COUNT = e.prePassRenderer.mrtCount, t.PREPASS_NORMAL_WORLDSPACE = e.prePassRenderer.generateNormalsInWorldSpace; for (let n = 0; n < s.length; n++) { const a = e.prePassRenderer.getIndex(s[n].type); a !== -1 ? (t[s[n].define] = !0, t[s[n].index] = a) : t[s[n].define] = !1; } } else { t.PREPASS = !1; for (let n = 0; n < s.length; n++) t[s[n].define] = !1; } t.PREPASS != r && (t.markAsUnprocessed(), t.markAsImageProcessingDirty()); } /** * Prepares the defines related to the light information passed in parameter * @param scene The scene we are intending to draw * @param mesh The mesh the effect is compiling for * @param light The light the effect is compiling for * @param lightIndex The index of the light * @param defines The defines to update * @param specularSupported Specifies whether specular is supported or not (override lights data) * @param state Defines the current state regarding what is needed (normals, etc...) * @param state.needNormals * @param state.needRebuild * @param state.shadowEnabled * @param state.specularEnabled * @param state.lightmapMode */ static PrepareDefinesForLight(e, t, i, r, s, n, a) { var l; switch (a.needNormals = !0, s["LIGHT" + r] === void 0 && (a.needRebuild = !0), s["LIGHT" + r] = !0, s["SPOTLIGHT" + r] = !1, s["HEMILIGHT" + r] = !1, s["POINTLIGHT" + r] = !1, s["DIRLIGHT" + r] = !1, i.prepareLightSpecificDefines(s, r), s["LIGHT_FALLOFF_PHYSICAL" + r] = !1, s["LIGHT_FALLOFF_GLTF" + r] = !1, s["LIGHT_FALLOFF_STANDARD" + r] = !1, i.falloffType) { case ia.FALLOFF_GLTF: s["LIGHT_FALLOFF_GLTF" + r] = !0; break; case ia.FALLOFF_PHYSICAL: s["LIGHT_FALLOFF_PHYSICAL" + r] = !0; break; case ia.FALLOFF_STANDARD: s["LIGHT_FALLOFF_STANDARD" + r] = !0; break; } if (n && !i.specular.equalsFloats(0, 0, 0) && (a.specularEnabled = !0), s["SHADOW" + r] = !1, s["SHADOWCSM" + r] = !1, s["SHADOWCSMDEBUG" + r] = !1, s["SHADOWCSMNUM_CASCADES" + r] = !1, s["SHADOWCSMUSESHADOWMAXZ" + r] = !1, s["SHADOWCSMNOBLEND" + r] = !1, s["SHADOWCSM_RIGHTHANDED" + r] = !1, s["SHADOWPCF" + r] = !1, s["SHADOWPCSS" + r] = !1, s["SHADOWPOISSON" + r] = !1, s["SHADOWESM" + r] = !1, s["SHADOWCLOSEESM" + r] = !1, s["SHADOWCUBE" + r] = !1, s["SHADOWLOWQUALITY" + r] = !1, s["SHADOWMEDIUMQUALITY" + r] = !1, t && t.receiveShadows && e.shadowsEnabled && i.shadowEnabled) { const o = (l = i.getShadowGenerator(e.activeCamera)) !== null && l !== void 0 ? l : i.getShadowGenerator(); if (o) { const u = o.getShadowMap(); u && u.renderList && u.renderList.length > 0 && (a.shadowEnabled = !0, o.prepareDefines(s, r)); } } i.lightmapMode != ia.LIGHTMAP_DEFAULT ? (a.lightmapMode = !0, s["LIGHTMAPEXCLUDED" + r] = !0, s["LIGHTMAPNOSPECULAR" + r] = i.lightmapMode == ia.LIGHTMAP_SHADOWSONLY) : (s["LIGHTMAPEXCLUDED" + r] = !1, s["LIGHTMAPNOSPECULAR" + r] = !1); } /** * Prepares the defines related to the light information passed in parameter * @param scene The scene we are intending to draw * @param mesh The mesh the effect is compiling for * @param defines The defines to update * @param specularSupported Specifies whether specular is supported or not (override lights data) * @param maxSimultaneousLights Specifies how manuy lights can be added to the effect at max * @param disableLighting Specifies whether the lighting is disabled (override scene and light) * @returns true if normals will be required for the rest of the effect */ static PrepareDefinesForLights(e, t, i, r, s = 4, n = !1) { if (!i._areLightsDirty) return i._needNormals; let a = 0; const l = { needNormals: i._needNormals, needRebuild: !1, lightmapMode: !1, shadowEnabled: !1, specularEnabled: !1 }; if (e.lightsEnabled && !n) { for (const u of t.lightSources) if (this.PrepareDefinesForLight(e, t, u, a, i, r, l), a++, a === s) break; } i.SPECULARTERM = l.specularEnabled, i.SHADOWS = l.shadowEnabled; for (let u = a; u < s; u++) i["LIGHT" + u] !== void 0 && (i["LIGHT" + u] = !1, i["HEMILIGHT" + u] = !1, i["POINTLIGHT" + u] = !1, i["DIRLIGHT" + u] = !1, i["SPOTLIGHT" + u] = !1, i["SHADOW" + u] = !1, i["SHADOWCSM" + u] = !1, i["SHADOWCSMDEBUG" + u] = !1, i["SHADOWCSMNUM_CASCADES" + u] = !1, i["SHADOWCSMUSESHADOWMAXZ" + u] = !1, i["SHADOWCSMNOBLEND" + u] = !1, i["SHADOWCSM_RIGHTHANDED" + u] = !1, i["SHADOWPCF" + u] = !1, i["SHADOWPCSS" + u] = !1, i["SHADOWPOISSON" + u] = !1, i["SHADOWESM" + u] = !1, i["SHADOWCLOSEESM" + u] = !1, i["SHADOWCUBE" + u] = !1, i["SHADOWLOWQUALITY" + u] = !1, i["SHADOWMEDIUMQUALITY" + u] = !1); const o = e.getEngine().getCaps(); return i.SHADOWFLOAT === void 0 && (l.needRebuild = !0), i.SHADOWFLOAT = l.shadowEnabled && (o.textureFloatRender && o.textureFloatLinearFiltering || o.textureHalfFloatRender && o.textureHalfFloatLinearFiltering), i.LIGHTMAPEXCLUDED = l.lightmapMode, l.needRebuild && i.rebuild(), l.needNormals; } /** * Prepares the uniforms and samplers list to be used in the effect (for a specific light) * @param lightIndex defines the light index * @param uniformsList The uniform list * @param samplersList The sampler list * @param projectedLightTexture defines if projected texture must be used * @param uniformBuffersList defines an optional list of uniform buffers * @param updateOnlyBuffersList True to only update the uniformBuffersList array */ static PrepareUniformsAndSamplersForLight(e, t, i, r, s = null, n = !1) { s && s.push("Light" + e), !n && (t.push("vLightData" + e, "vLightDiffuse" + e, "vLightSpecular" + e, "vLightDirection" + e, "vLightFalloff" + e, "vLightGround" + e, "lightMatrix" + e, "shadowsInfo" + e, "depthValues" + e), i.push("shadowSampler" + e), i.push("depthSampler" + e), t.push("viewFrustumZ" + e, "cascadeBlendFactor" + e, "lightSizeUVCorrection" + e, "depthCorrection" + e, "penumbraDarkness" + e, "frustumLengths" + e), r && (i.push("projectionLightSampler" + e), t.push("textureProjectionMatrix" + e))); } /** * Prepares the uniforms and samplers list to be used in the effect * @param uniformsListOrOptions The uniform names to prepare or an EffectCreationOptions containing the list and extra information * @param samplersList The sampler list * @param defines The defines helping in the list generation * @param maxSimultaneousLights The maximum number of simultaneous light allowed in the effect */ static PrepareUniformsAndSamplersList(e, t, i, r = 4) { let s, n = null; if (e.uniformsNames) { const a = e; s = a.uniformsNames, n = a.uniformBuffersNames, t = a.samplers, i = a.defines, r = a.maxSimultaneousLights || 0; } else s = e, t || (t = []); for (let a = 0; a < r && i["LIGHT" + a]; a++) this.PrepareUniformsAndSamplersForLight(a, s, t, i["PROJECTEDLIGHTTEXTURE" + a], n); i.NUM_MORPH_INFLUENCERS && s.push("morphTargetInfluences"), i.BAKED_VERTEX_ANIMATION_TEXTURE && (s.push("bakedVertexAnimationSettings"), s.push("bakedVertexAnimationTextureSizeInverted"), s.push("bakedVertexAnimationTime"), t.push("bakedVertexAnimationTexture")); } /** * This helps decreasing rank by rank the shadow quality (0 being the highest rank and quality) * @param defines The defines to update while falling back * @param fallbacks The authorized effect fallbacks * @param maxSimultaneousLights The maximum number of lights allowed * @param rank the current rank of the Effect * @returns The newly affected rank */ static HandleFallbacksForShadows(e, t, i = 4, r = 0) { let s = 0; for (let n = 0; n < i && e["LIGHT" + n]; n++) n > 0 && (s = r + n, t.addFallback(s, "LIGHT" + n)), e.SHADOWS || (e["SHADOW" + n] && t.addFallback(r, "SHADOW" + n), e["SHADOWPCF" + n] && t.addFallback(r, "SHADOWPCF" + n), e["SHADOWPCSS" + n] && t.addFallback(r, "SHADOWPCSS" + n), e["SHADOWPOISSON" + n] && t.addFallback(r, "SHADOWPOISSON" + n), e["SHADOWESM" + n] && t.addFallback(r, "SHADOWESM" + n), e["SHADOWCLOSEESM" + n] && t.addFallback(r, "SHADOWCLOSEESM" + n)); return s++; } /** * Prepares the list of attributes required for morph targets according to the effect defines. * @param attribs The current list of supported attribs * @param mesh The mesh to prepare the morph targets attributes for * @param influencers The number of influencers */ static PrepareAttributesForMorphTargetsInfluencers(e, t, i) { this._TmpMorphInfluencers.NUM_MORPH_INFLUENCERS = i, this.PrepareAttributesForMorphTargets(e, t, this._TmpMorphInfluencers); } /** * Prepares the list of attributes required for morph targets according to the effect defines. * @param attribs The current list of supported attribs * @param mesh The mesh to prepare the morph targets attributes for * @param defines The current Defines of the effect */ static PrepareAttributesForMorphTargets(e, t, i) { const r = i.NUM_MORPH_INFLUENCERS; if (r > 0 && gi.LastCreatedEngine) { const s = gi.LastCreatedEngine.getCaps().maxVertexAttribs, n = t.morphTargetManager; if (n != null && n.isUsingTextureForTargets) return; const a = n && n.supportsNormals && i.NORMAL, l = n && n.supportsTangents && i.TANGENT, o = n && n.supportsUVs && i.UV1; for (let u = 0; u < r; u++) e.push(Y.PositionKind + u), a && e.push(Y.NormalKind + u), l && e.push(Y.TangentKind + u), o && e.push(Y.UVKind + "_" + u), e.length > s && Ce.Error("Cannot add more vertex attributes for mesh " + t.name); } } /** * Prepares the list of attributes required for baked vertex animations according to the effect defines. * @param attribs The current list of supported attribs * @param mesh The mesh to prepare the morph targets attributes for * @param defines The current Defines of the effect */ static PrepareAttributesForBakedVertexAnimation(e, t, i) { i.BAKED_VERTEX_ANIMATION_TEXTURE && i.INSTANCES && e.push("bakedVertexAnimationSettingsInstanced"); } /** * Prepares the list of attributes required for bones according to the effect defines. * @param attribs The current list of supported attribs * @param mesh The mesh to prepare the bones attributes for * @param defines The current Defines of the effect * @param fallbacks The current effect fallback strategy */ static PrepareAttributesForBones(e, t, i, r) { i.NUM_BONE_INFLUENCERS > 0 && (r.addCPUSkinningFallback(0, t), e.push(Y.MatricesIndicesKind), e.push(Y.MatricesWeightsKind), i.NUM_BONE_INFLUENCERS > 4 && (e.push(Y.MatricesIndicesExtraKind), e.push(Y.MatricesWeightsExtraKind))); } /** * Check and prepare the list of attributes required for instances according to the effect defines. * @param attribs The current list of supported attribs * @param defines The current MaterialDefines of the effect */ static PrepareAttributesForInstances(e, t) { (t.INSTANCES || t.THIN_INSTANCES) && this.PushAttributesForInstances(e, !!t.PREPASS_VELOCITY), t.INSTANCESCOLOR && e.push(Y.ColorInstanceKind); } /** * Add the list of attributes required for instances to the attribs array. * @param attribs The current list of supported attribs * @param needsPreviousMatrices If the shader needs previous matrices */ static PushAttributesForInstances(e, t = !1) { e.push("world0"), e.push("world1"), e.push("world2"), e.push("world3"), t && (e.push("previousWorld0"), e.push("previousWorld1"), e.push("previousWorld2"), e.push("previousWorld3")); } /** * Binds the light information to the effect. * @param light The light containing the generator * @param effect The effect we are binding the data to * @param lightIndex The light index in the effect used to render */ static BindLightProperties(e, t, i) { e.transferToEffect(t, i + ""); } /** * Binds the lights information from the scene to the effect for the given mesh. * @param light Light to bind * @param lightIndex Light index * @param scene The scene where the light belongs to * @param effect The effect we are binding the data to * @param useSpecular Defines if specular is supported * @param receiveShadows Defines if the effect (mesh) we bind the light for receives shadows */ static BindLight(e, t, i, r, s, n = !0) { e._bindLight(t, i, r, s, n); } /** * Binds the lights information from the scene to the effect for the given mesh. * @param scene The scene the lights belongs to * @param mesh The mesh we are binding the information to render * @param effect The effect we are binding the data to * @param defines The generated defines for the effect * @param maxSimultaneousLights The maximum number of light that can be bound to the effect */ static BindLights(e, t, i, r, s = 4) { const n = Math.min(t.lightSources.length, s); for (let a = 0; a < n; a++) { const l = t.lightSources[a]; this.BindLight(l, a, e, i, typeof r == "boolean" ? r : r.SPECULARTERM, t.receiveShadows); } } /** * Binds the fog information from the scene to the effect for the given mesh. * @param scene The scene the lights belongs to * @param mesh The mesh we are binding the information to render * @param effect The effect we are binding the data to * @param linearSpace Defines if the fog effect is applied in linear space */ static BindFogParameters(e, t, i, r = !1) { e.fogEnabled && t.applyFog && e.fogMode !== ii.FOGMODE_NONE && (i.setFloat4("vFogInfos", e.fogMode, e.fogStart, e.fogEnd, e.fogDensity), r ? (e.fogColor.toLinearSpaceToRef(this._TempFogColor, e.getEngine().useExactSrgbConversions), i.setColor3("vFogColor", this._TempFogColor)) : i.setColor3("vFogColor", e.fogColor)); } /** * Binds the bones information from the mesh to the effect. * @param mesh The mesh we are binding the information to render * @param effect The effect we are binding the data to * @param prePassConfiguration Configuration for the prepass, in case prepass is activated */ static BindBonesParameters(e, t, i) { if (!(!t || !e) && (e.computeBonesUsingShaders && t._bonesComputationForcedToCPU && (e.computeBonesUsingShaders = !1), e.useBones && e.computeBonesUsingShaders && e.skeleton)) { const r = e.skeleton; if (r.isUsingTextureForMatrices && t.getUniformIndex("boneTextureWidth") > -1) { const s = r.getTransformMatrixTexture(e); t.setTexture("boneSampler", s), t.setFloat("boneTextureWidth", 4 * (r.bones.length + 1)); } else { const s = r.getTransformMatrices(e); s && (t.setMatrices("mBones", s), i && e.getScene().prePassRenderer && e.getScene().prePassRenderer.getIndex(2) && (i.previousBones[e.uniqueId] || (i.previousBones[e.uniqueId] = s.slice()), t.setMatrices("mPreviousBones", i.previousBones[e.uniqueId]), Ke._CopyBonesTransformationMatrices(s, i.previousBones[e.uniqueId]))); } } } // Copies the bones transformation matrices into the target array and returns the target's reference static _CopyBonesTransformationMatrices(e, t) { return t.set(e), t; } /** * Binds the morph targets information from the mesh to the effect. * @param abstractMesh The mesh we are binding the information to render * @param effect The effect we are binding the data to */ static BindMorphTargetParameters(e, t) { const i = e.morphTargetManager; !e || !i || t.setFloatArray("morphTargetInfluences", i.influences); } /** * Binds the logarithmic depth information from the scene to the effect for the given defines. * @param defines The generated defines used in the effect * @param effect The effect we are binding the data to * @param scene The scene we are willing to render with logarithmic scale for */ static BindLogDepth(e, t, i) { if (!e || e.LOGARITHMICDEPTH || e.indexOf && e.indexOf("LOGARITHMICDEPTH") >= 0) { const r = i.activeCamera; r.mode === Ai.ORTHOGRAPHIC_CAMERA && Ce.Error("Logarithmic depth is not compatible with orthographic cameras!", 20), t.setFloat("logarithmicDepthConstant", 2 / (Math.log(r.maxZ + 1) / Math.LN2)); } } } Ke._TmpMorphInfluencers = { NUM_MORPH_INFLUENCERS: 0 }; Ke._TempFogColor = ze.Black(); class ET { /** * Creates a material stencil state instance */ constructor() { this.reset(); } /** * Resets all the stencil states to default values */ reset() { this.enabled = !1, this.mask = 255, this.func = 519, this.funcRef = 1, this.funcMask = 255, this.opStencilFail = 7680, this.opDepthFail = 7680, this.opStencilDepthPass = 7681; } /** * Gets or sets the stencil function */ get func() { return this._func; } set func(e) { this._func = e; } /** * Gets or sets the stencil function reference */ get funcRef() { return this._funcRef; } set funcRef(e) { this._funcRef = e; } /** * Gets or sets the stencil function mask */ get funcMask() { return this._funcMask; } set funcMask(e) { this._funcMask = e; } /** * Gets or sets the operation when the stencil test fails */ get opStencilFail() { return this._opStencilFail; } set opStencilFail(e) { this._opStencilFail = e; } /** * Gets or sets the operation when the depth test fails */ get opDepthFail() { return this._opDepthFail; } set opDepthFail(e) { this._opDepthFail = e; } /** * Gets or sets the operation when the stencil+depth test succeeds */ get opStencilDepthPass() { return this._opStencilDepthPass; } set opStencilDepthPass(e) { this._opStencilDepthPass = e; } /** * Gets or sets the stencil mask */ get mask() { return this._mask; } set mask(e) { this._mask = e; } /** * Enables or disables the stencil test */ get enabled() { return this._enabled; } set enabled(e) { this._enabled = e; } /** * Get the current class name, useful for serialization or dynamic coding. * @returns "MaterialStencilState" */ getClassName() { return "MaterialStencilState"; } /** * Makes a duplicate of the current configuration into another one. * @param stencilState defines stencil state where to copy the info */ copyTo(e) { St.Clone(() => e, this); } /** * Serializes this stencil configuration. * @returns - An object with the serialized config. */ serialize() { return St.Serialize(this); } /** * Parses a stencil state configuration from a serialized object. * @param source - Serialized object. * @param scene Defines the scene we are parsing for * @param rootUrl Defines the rootUrl to load from */ parse(e, t, i) { St.Parse(() => this, e, t, i); } } F([ W() ], ET.prototype, "func", null); F([ W() ], ET.prototype, "funcRef", null); F([ W() ], ET.prototype, "funcMask", null); F([ W() ], ET.prototype, "opStencilFail", null); F([ W() ], ET.prototype, "opDepthFail", null); F([ W() ], ET.prototype, "opStencilDepthPass", null); F([ W() ], ET.prototype, "mask", null); F([ W() ], ET.prototype, "enabled", null); var xh; (function(c) { c[c.Created = 1] = "Created", c[c.Disposed = 2] = "Disposed", c[c.GetDefineNames = 4] = "GetDefineNames", c[c.PrepareUniformBuffer = 8] = "PrepareUniformBuffer", c[c.IsReadyForSubMesh = 16] = "IsReadyForSubMesh", c[c.PrepareDefines = 32] = "PrepareDefines", c[c.BindForSubMesh = 64] = "BindForSubMesh", c[c.PrepareEffect = 128] = "PrepareEffect", c[c.GetAnimatables = 256] = "GetAnimatables", c[c.GetActiveTextures = 512] = "GetActiveTextures", c[c.HasTexture = 1024] = "HasTexture", c[c.FillRenderTargetTextures = 2048] = "FillRenderTargetTextures", c[c.HasRenderTargetTextures = 4096] = "HasRenderTargetTextures", c[c.HardBindForSubMesh = 8192] = "HardBindForSubMesh"; })(xh || (xh = {})); class At { /** * If the material can be rendered to several textures with MRT extension */ get canRenderToMRT() { return !1; } /** * Sets the alpha value of the material */ set alpha(e) { if (this._alpha === e) return; const t = this._alpha; this._alpha = e, (t === 1 || e === 1) && this.markAsDirty(At.MiscDirtyFlag + At.PrePassDirtyFlag); } /** * Gets the alpha value of the material */ get alpha() { return this._alpha; } /** * Sets the culling state (true to enable culling, false to disable) */ set backFaceCulling(e) { this._backFaceCulling !== e && (this._backFaceCulling = e, this.markAsDirty(At.TextureDirtyFlag)); } /** * Gets the culling state */ get backFaceCulling() { return this._backFaceCulling; } /** * Sets the type of faces that should be culled (true for back faces, false for front faces) */ set cullBackFaces(e) { this._cullBackFaces !== e && (this._cullBackFaces = e, this.markAsDirty(At.TextureDirtyFlag)); } /** * Gets the type of faces that should be culled */ get cullBackFaces() { return this._cullBackFaces; } /** * Block the dirty-mechanism for this specific material * When set to false after being true the material will be marked as dirty. */ get blockDirtyMechanism() { return this._blockDirtyMechanism; } set blockDirtyMechanism(e) { this._blockDirtyMechanism !== e && (this._blockDirtyMechanism = e, e || this.markDirty()); } /** * This allows you to modify the material without marking it as dirty after every change. * This function should be used if you need to make more than one dirty-enabling change to the material - adding a texture, setting a new fill mode and so on. * The callback will pass the material as an argument, so you can make your changes to it. * @param callback the callback to be executed that will update the material */ atomicMaterialsUpdate(e) { this.blockDirtyMechanism = !0; try { e(this); } finally { this.blockDirtyMechanism = !1; } } /** * Gets a boolean indicating that current material needs to register RTT */ get hasRenderTargetTextures() { return this._eventInfo.hasRenderTargetTextures = !1, this._callbackPluginEventHasRenderTargetTextures(this._eventInfo), this._eventInfo.hasRenderTargetTextures; } /** * Called during a dispose event */ set onDispose(e) { this._onDisposeObserver && this.onDisposeObservable.remove(this._onDisposeObserver), this._onDisposeObserver = this.onDisposeObservable.add(e); } /** * An event triggered when the material is bound */ get onBindObservable() { return this._onBindObservable || (this._onBindObservable = new Fe()), this._onBindObservable; } /** * Called during a bind event */ set onBind(e) { this._onBindObserver && this.onBindObservable.remove(this._onBindObserver), this._onBindObserver = this.onBindObservable.add(e); } /** * An event triggered when the material is unbound */ get onUnBindObservable() { return this._onUnBindObservable || (this._onUnBindObservable = new Fe()), this._onUnBindObservable; } /** * An event triggered when the effect is (re)created */ get onEffectCreatedObservable() { return this._onEffectCreatedObservable || (this._onEffectCreatedObservable = new Fe()), this._onEffectCreatedObservable; } /** * Sets the value of the alpha mode. * * | Value | Type | Description | * | --- | --- | --- | * | 0 | ALPHA_DISABLE | | * | 1 | ALPHA_ADD | | * | 2 | ALPHA_COMBINE | | * | 3 | ALPHA_SUBTRACT | | * | 4 | ALPHA_MULTIPLY | | * | 5 | ALPHA_MAXIMIZED | | * | 6 | ALPHA_ONEONE | | * | 7 | ALPHA_PREMULTIPLIED | | * | 8 | ALPHA_PREMULTIPLIED_PORTERDUFF | | * | 9 | ALPHA_INTERPOLATE | | * | 10 | ALPHA_SCREENMODE | | * */ set alphaMode(e) { this._alphaMode !== e && (this._alphaMode = e, this.markAsDirty(At.TextureDirtyFlag)); } /** * Gets the value of the alpha mode */ get alphaMode() { return this._alphaMode; } /** * Sets the need depth pre-pass value */ set needDepthPrePass(e) { this._needDepthPrePass !== e && (this._needDepthPrePass = e, this._needDepthPrePass && (this.checkReadyOnEveryCall = !0)); } /** * Gets the depth pre-pass value */ get needDepthPrePass() { return this._needDepthPrePass; } /** * Can this material render to prepass */ get isPrePassCapable() { return !1; } /** * Sets the state for enabling fog */ set fogEnabled(e) { this._fogEnabled !== e && (this._fogEnabled = e, this.markAsDirty(At.MiscDirtyFlag)); } /** * Gets the value of the fog enabled state */ get fogEnabled() { return this._fogEnabled; } get wireframe() { switch (this._fillMode) { case At.WireFrameFillMode: case At.LineListDrawMode: case At.LineLoopDrawMode: case At.LineStripDrawMode: return !0; } return this._scene.forceWireframe; } /** * Sets the state of wireframe mode */ set wireframe(e) { this.fillMode = e ? At.WireFrameFillMode : At.TriangleFillMode; } /** * Gets the value specifying if point clouds are enabled */ get pointsCloud() { switch (this._fillMode) { case At.PointFillMode: case At.PointListDrawMode: return !0; } return this._scene.forcePointsCloud; } /** * Sets the state of point cloud mode */ set pointsCloud(e) { this.fillMode = e ? At.PointFillMode : At.TriangleFillMode; } /** * Gets the material fill mode */ get fillMode() { return this._fillMode; } /** * Sets the material fill mode */ set fillMode(e) { this._fillMode !== e && (this._fillMode = e, this.markAsDirty(At.MiscDirtyFlag)); } /** * In case the depth buffer does not allow enough depth precision for your scene (might be the case in large scenes) * You can try switching to logarithmic depth. * @see https://doc.babylonjs.com/features/featuresDeepDive/materials/advanced/logarithmicDepthBuffer */ get useLogarithmicDepth() { return this._useLogarithmicDepth; } set useLogarithmicDepth(e) { const t = this.getScene().getEngine().getCaps().fragmentDepthSupported; e && !t && Ce.Warn("Logarithmic depth has been requested for a material on a device that doesn't support it."), this._useLogarithmicDepth = e && t, this._markAllSubMeshesAsMiscDirty(); } /** @internal */ _getDrawWrapper() { return this._drawWrapper; } /** * @internal */ _setDrawWrapper(e) { this._drawWrapper = e; } /** * Creates a material instance * @param name defines the name of the material * @param scene defines the scene to reference * @param doNotAdd specifies if the material should be added to the scene */ constructor(e, t, i) { this.shadowDepthWrapper = null, this.allowShaderHotSwapping = !0, this.metadata = null, this.reservedDataStore = null, this.checkReadyOnEveryCall = !1, this.checkReadyOnlyOnce = !1, this.state = "", this._alpha = 1, this._backFaceCulling = !0, this._cullBackFaces = !0, this._blockDirtyMechanism = !1, this.onCompiled = null, this.onError = null, this.getRenderTargetTextures = null, this.doNotSerialize = !1, this._storeEffectOnSubMeshes = !1, this.animations = null, this.onDisposeObservable = new Fe(), this._onDisposeObserver = null, this._onUnBindObservable = null, this._onBindObserver = null, this._alphaMode = 2, this._needDepthPrePass = !1, this.disableDepthWrite = !1, this.disableColorWrite = !1, this.forceDepthWrite = !1, this.depthFunction = 0, this.separateCullingPass = !1, this._fogEnabled = !0, this.pointSize = 1, this.zOffset = 0, this.zOffsetUnits = 0, this.stencil = new ET(), this._useUBO = !1, this._fillMode = At.TriangleFillMode, this._cachedDepthWriteState = !1, this._cachedColorWriteState = !1, this._cachedDepthFunctionState = 0, this._indexInSceneMaterialArray = -1, this.meshMap = null, this._parentContainer = null, this._uniformBufferLayoutBuilt = !1, this._eventInfo = {}, this._callbackPluginEventGeneric = () => { }, this._callbackPluginEventIsReadyForSubMesh = () => { }, this._callbackPluginEventPrepareDefines = () => { }, this._callbackPluginEventPrepareDefinesBeforeAttributes = () => { }, this._callbackPluginEventHardBindForSubMesh = () => { }, this._callbackPluginEventBindForSubMesh = () => { }, this._callbackPluginEventHasRenderTargetTextures = () => { }, this._callbackPluginEventFillRenderTargetTextures = () => { }, this._forceAlphaTest = !1, this._transparencyMode = null, this.name = e; const r = t || gi.LastCreatedScene; r && (this._scene = r, this._dirtyCallbacks = {}, this._dirtyCallbacks[1] = this._markAllSubMeshesAsTexturesDirty.bind(this), this._dirtyCallbacks[2] = this._markAllSubMeshesAsLightsDirty.bind(this), this._dirtyCallbacks[4] = this._markAllSubMeshesAsFresnelDirty.bind(this), this._dirtyCallbacks[8] = this._markAllSubMeshesAsAttributesDirty.bind(this), this._dirtyCallbacks[16] = this._markAllSubMeshesAsMiscDirty.bind(this), this._dirtyCallbacks[32] = this._markAllSubMeshesAsPrePassDirty.bind(this), this._dirtyCallbacks[63] = this._markAllSubMeshesAsAllDirty.bind(this), this.id = e || Ve.RandomId(), this.uniqueId = this._scene.getUniqueId(), this._materialContext = this._scene.getEngine().createMaterialContext(), this._drawWrapper = new $o(this._scene.getEngine(), !1), this._drawWrapper.materialContext = this._materialContext, this._scene.useRightHandedSystem ? this.sideOrientation = At.ClockWiseSideOrientation : this.sideOrientation = At.CounterClockWiseSideOrientation, this._uniformBuffer = new Vi(this._scene.getEngine(), void 0, void 0, e), this._useUBO = this.getScene().getEngine().supportsUniformBuffers, i || this._scene.addMaterial(this), this._scene.useMaterialMeshMap && (this.meshMap = {}), At.OnEventObservable.notifyObservers(this, xh.Created)); } /** * Returns a string representation of the current material * @param fullDetails defines a boolean indicating which levels of logging is desired * @returns a string with material information */ // eslint-disable-next-line @typescript-eslint/no-unused-vars toString(e) { return "Name: " + this.name; } /** * Gets the class name of the material * @returns a string with the class name of the material */ getClassName() { return "Material"; } /** @internal */ get _isMaterial() { return !0; } /** * Specifies if updates for the material been locked */ get isFrozen() { return this.checkReadyOnlyOnce; } /** * Locks updates for the material */ freeze() { this.markDirty(), this.checkReadyOnlyOnce = !0; } /** * Unlocks updates for the material */ unfreeze() { this.markDirty(), this.checkReadyOnlyOnce = !1; } /** * Specifies if the material is ready to be used * @param mesh defines the mesh to check * @param useInstances specifies if instances should be used * @returns a boolean indicating if the material is ready to be used */ // eslint-disable-next-line @typescript-eslint/no-unused-vars isReady(e, t) { return !0; } /** * Specifies that the submesh is ready to be used * @param mesh defines the mesh to check * @param subMesh defines which submesh to check * @param useInstances specifies that instances should be used * @returns a boolean indicating that the submesh is ready or not */ // eslint-disable-next-line @typescript-eslint/no-unused-vars isReadyForSubMesh(e, t, i) { const r = t.materialDefines; return r ? (this._eventInfo.isReadyForSubMesh = !0, this._eventInfo.defines = r, this._callbackPluginEventIsReadyForSubMesh(this._eventInfo), this._eventInfo.isReadyForSubMesh) : !1; } /** * Returns the material effect * @returns the effect associated with the material */ getEffect() { return this._drawWrapper.effect; } /** * Returns the current scene * @returns a Scene */ getScene() { return this._scene; } /** * Gets the current transparency mode. */ get transparencyMode() { return this._transparencyMode; } /** * Sets the transparency mode of the material. * * | Value | Type | Description | * | ----- | ----------------------------------- | ----------- | * | 0 | OPAQUE | | * | 1 | ALPHATEST | | * | 2 | ALPHABLEND | | * | 3 | ALPHATESTANDBLEND | | * */ set transparencyMode(e) { this._transparencyMode !== e && (this._transparencyMode = e, this._forceAlphaTest = e === At.MATERIAL_ALPHATESTANDBLEND, this._markAllSubMeshesAsTexturesAndMiscDirty()); } /** * Returns true if alpha blending should be disabled. */ get _disableAlphaBlending() { return this._transparencyMode === At.MATERIAL_OPAQUE || this._transparencyMode === At.MATERIAL_ALPHATEST; } /** * Specifies whether or not this material should be rendered in alpha blend mode. * @returns a boolean specifying if alpha blending is needed */ needAlphaBlending() { return this._disableAlphaBlending ? !1 : this.alpha < 1; } /** * Specifies if the mesh will require alpha blending * @param mesh defines the mesh to check * @returns a boolean specifying if alpha blending is needed for the mesh */ needAlphaBlendingForMesh(e) { return e.visibility < 1 ? !0 : this._disableAlphaBlending ? !1 : e.hasVertexAlpha || this.needAlphaBlending(); } /** * Specifies whether or not this material should be rendered in alpha test mode. * @returns a boolean specifying if an alpha test is needed. */ needAlphaTesting() { return !!this._forceAlphaTest; } /** * Specifies if material alpha testing should be turned on for the mesh * @param mesh defines the mesh to check */ _shouldTurnAlphaTestOn(e) { return !this.needAlphaBlendingForMesh(e) && this.needAlphaTesting(); } /** * Gets the texture used for the alpha test * @returns the texture to use for alpha testing */ getAlphaTestTexture() { return null; } /** * Marks the material to indicate that it needs to be re-calculated * @param forceMaterialDirty - Forces the material to be marked as dirty for all components (same as this.markAsDirty(Material.AllDirtyFlag)). You should use this flag if the material is frozen and you want to force a recompilation. */ markDirty(e = !1) { const t = this.getScene().meshes; for (const i of t) if (i.subMeshes) for (const r of i.subMeshes) r.getMaterial() === this && r.effect && (r.effect._wasPreviouslyReady = !1, r.effect._wasPreviouslyUsingInstances = null, r.effect._forceRebindOnNextCall = e); e && this.markAsDirty(At.AllDirtyFlag); } /** * @internal */ _preBind(e, t = null) { const i = this._scene.getEngine(), s = (t ?? this.sideOrientation) === At.ClockWiseSideOrientation; return i.enableEffect(e || this._getDrawWrapper()), i.setState(this.backFaceCulling, this.zOffset, !1, s, this._scene._mirroredCameraPosition ? !this.cullBackFaces : this.cullBackFaces, this.stencil, this.zOffsetUnits), s; } /** * Binds the material to the mesh * @param world defines the world transformation matrix * @param mesh defines the mesh to bind the material to */ // eslint-disable-next-line @typescript-eslint/no-unused-vars bind(e, t) { } /** * Initializes the uniform buffer layout for the shader. */ buildUniformLayout() { const e = this._uniformBuffer; this._eventInfo.ubo = e, this._callbackPluginEventGeneric(xh.PrepareUniformBuffer, this._eventInfo), e.create(), this._uniformBufferLayoutBuilt = !0; } /** * Binds the submesh to the material * @param world defines the world transformation matrix * @param mesh defines the mesh containing the submesh * @param subMesh defines the submesh to bind the material to */ bindForSubMesh(e, t, i) { const r = i.effect; r && (this._eventInfo.subMesh = i, this._callbackPluginEventBindForSubMesh(this._eventInfo), r._forceRebindOnNextCall = !1); } /** * Binds the world matrix to the material * @param world defines the world transformation matrix */ // eslint-disable-next-line @typescript-eslint/no-unused-vars bindOnlyWorldMatrix(e) { } /** * Binds the view matrix to the effect * @param effect defines the effect to bind the view matrix to */ bindView(e) { this._useUBO ? this._needToBindSceneUbo = !0 : e.setMatrix("view", this.getScene().getViewMatrix()); } /** * Binds the view projection and projection matrices to the effect * @param effect defines the effect to bind the view projection and projection matrices to */ bindViewProjection(e) { this._useUBO ? this._needToBindSceneUbo = !0 : (e.setMatrix("viewProjection", this.getScene().getTransformMatrix()), e.setMatrix("projection", this.getScene().getProjectionMatrix())); } /** * Binds the view matrix to the effect * @param effect defines the effect to bind the view matrix to * @param variableName name of the shader variable that will hold the eye position */ bindEyePosition(e, t) { this._useUBO ? this._needToBindSceneUbo = !0 : this._scene.bindEyePosition(e, t); } /** * Processes to execute after binding the material to a mesh * @param mesh defines the rendered mesh * @param effect */ _afterBind(e, t = null) { if (this._scene._cachedMaterial = this, this._needToBindSceneUbo && t && (this._needToBindSceneUbo = !1, Ke.BindSceneUniformBuffer(t, this.getScene().getSceneUniformBuffer()), this._scene.finalizeSceneUbo()), e ? this._scene._cachedVisibility = e.visibility : this._scene._cachedVisibility = 1, this._onBindObservable && e && this._onBindObservable.notifyObservers(e), this.disableDepthWrite) { const i = this._scene.getEngine(); this._cachedDepthWriteState = i.getDepthWrite(), i.setDepthWrite(!1); } if (this.disableColorWrite) { const i = this._scene.getEngine(); this._cachedColorWriteState = i.getColorWrite(), i.setColorWrite(!1); } if (this.depthFunction !== 0) { const i = this._scene.getEngine(); this._cachedDepthFunctionState = i.getDepthFunction() || 0, i.setDepthFunction(this.depthFunction); } } /** * Unbinds the material from the mesh */ unbind() { this._onUnBindObservable && this._onUnBindObservable.notifyObservers(this), this.depthFunction !== 0 && this._scene.getEngine().setDepthFunction(this._cachedDepthFunctionState), this.disableDepthWrite && this._scene.getEngine().setDepthWrite(this._cachedDepthWriteState), this.disableColorWrite && this._scene.getEngine().setColorWrite(this._cachedColorWriteState); } /** * Returns the animatable textures. * @returns - Array of animatable textures. */ getAnimatables() { return this._eventInfo.animatables = [], this._callbackPluginEventGeneric(xh.GetAnimatables, this._eventInfo), this._eventInfo.animatables; } /** * Gets the active textures from the material * @returns an array of textures */ getActiveTextures() { return this._eventInfo.activeTextures = [], this._callbackPluginEventGeneric(xh.GetActiveTextures, this._eventInfo), this._eventInfo.activeTextures; } /** * Specifies if the material uses a texture * @param texture defines the texture to check against the material * @returns a boolean specifying if the material uses the texture */ hasTexture(e) { return this._eventInfo.hasTexture = !1, this._eventInfo.texture = e, this._callbackPluginEventGeneric(xh.HasTexture, this._eventInfo), this._eventInfo.hasTexture; } /** * Makes a duplicate of the material, and gives it a new name * @param name defines the new name for the duplicated material * @returns the cloned material */ // eslint-disable-next-line @typescript-eslint/no-unused-vars clone(e) { return null; } _clonePlugins(e, t) { const i = {}; if (this._serializePlugins(i), At._parsePlugins(i, e, this._scene, t), this.pluginManager) for (const r of this.pluginManager._plugins) { const s = e.pluginManager.getPlugin(r.name); r.copyTo(s); } } /** * Gets the meshes bound to the material * @returns an array of meshes bound to the material */ getBindedMeshes() { if (this.meshMap) { const e = []; for (const t in this.meshMap) { const i = this.meshMap[t]; i && e.push(i); } return e; } else return this._scene.meshes.filter((t) => t.material === this); } /** * Force shader compilation * @param mesh defines the mesh associated with this material * @param onCompiled defines a function to execute once the material is compiled * @param options defines the options to configure the compilation * @param onError defines a function to execute if the material fails compiling */ forceCompilation(e, t, i, r) { const s = Object.assign({ clipPlane: !1, useInstances: !1 }, i), n = this.getScene(), a = this.allowShaderHotSwapping; this.allowShaderHotSwapping = !1; const l = () => { if (!this._scene || !this._scene.getEngine()) return; const o = n.clipPlane; if (s.clipPlane && (n.clipPlane = new Sd(0, 0, 0, 1)), this._storeEffectOnSubMeshes) { let u = !0, h = null; if (e.subMeshes) { const d = new ed(0, 0, 0, 0, 0, e, void 0, !1, !1); d.materialDefines && (d.materialDefines._renderId = -1), this.isReadyForSubMesh(e, d, s.useInstances) || (d.effect && d.effect.getCompilationError() && d.effect.allFallbacksProcessed() ? h = d.effect.getCompilationError() : (u = !1, setTimeout(l, 16))); } u && (this.allowShaderHotSwapping = a, h && r && r(h), t && t(this)); } else this.isReady() ? (this.allowShaderHotSwapping = a, t && t(this)) : setTimeout(l, 16); s.clipPlane && (n.clipPlane = o); }; l(); } /** * Force shader compilation * @param mesh defines the mesh that will use this material * @param options defines additional options for compiling the shaders * @returns a promise that resolves when the compilation completes */ forceCompilationAsync(e, t) { return new Promise((i, r) => { this.forceCompilation(e, () => { i(); }, t, (s) => { r(s); }); }); } /** * Marks a define in the material to indicate that it needs to be re-computed * @param flag defines a flag used to determine which parts of the material have to be marked as dirty */ markAsDirty(e) { this.getScene().blockMaterialDirtyMechanism || this._blockDirtyMechanism || (At._DirtyCallbackArray.length = 0, e & At.TextureDirtyFlag && At._DirtyCallbackArray.push(At._TextureDirtyCallBack), e & At.LightDirtyFlag && At._DirtyCallbackArray.push(At._LightsDirtyCallBack), e & At.FresnelDirtyFlag && At._DirtyCallbackArray.push(At._FresnelDirtyCallBack), e & At.AttributesDirtyFlag && At._DirtyCallbackArray.push(At._AttributeDirtyCallBack), e & At.MiscDirtyFlag && At._DirtyCallbackArray.push(At._MiscDirtyCallBack), e & At.PrePassDirtyFlag && At._DirtyCallbackArray.push(At._PrePassDirtyCallBack), At._DirtyCallbackArray.length && this._markAllSubMeshesAsDirty(At._RunDirtyCallBacks), this.getScene().resetCachedMaterial()); } /** * Resets the draw wrappers cache for all submeshes that are using this material */ resetDrawCache() { const e = this.getScene().meshes; for (const t of e) if (t.subMeshes) for (const i of t.subMeshes) i.getMaterial() === this && i.resetDrawCache(); } /** * Marks all submeshes of a material to indicate that their material defines need to be re-calculated * @param func defines a function which checks material defines against the submeshes */ _markAllSubMeshesAsDirty(e) { if (this.getScene().blockMaterialDirtyMechanism || this._blockDirtyMechanism) return; const t = this.getScene().meshes; for (const i of t) if (i.subMeshes) { for (const r of i.subMeshes) if (r.getMaterial(!1) === this) for (const s of r._drawWrappers) !s || !s.defines || !s.defines.markAllAsDirty || this._materialContext === s.materialContext && e(s.defines); } } /** * Indicates that the scene should check if the rendering now needs a prepass */ _markScenePrePassDirty() { if (this.getScene().blockMaterialDirtyMechanism || this._blockDirtyMechanism) return; const e = this.getScene().enablePrePassRenderer(); e && e.markAsDirty(); } /** * Indicates that we need to re-calculated for all submeshes */ _markAllSubMeshesAsAllDirty() { this._markAllSubMeshesAsDirty(At._AllDirtyCallBack); } /** * Indicates that image processing needs to be re-calculated for all submeshes */ _markAllSubMeshesAsImageProcessingDirty() { this._markAllSubMeshesAsDirty(At._ImageProcessingDirtyCallBack); } /** * Indicates that textures need to be re-calculated for all submeshes */ _markAllSubMeshesAsTexturesDirty() { this._markAllSubMeshesAsDirty(At._TextureDirtyCallBack); } /** * Indicates that fresnel needs to be re-calculated for all submeshes */ _markAllSubMeshesAsFresnelDirty() { this._markAllSubMeshesAsDirty(At._FresnelDirtyCallBack); } /** * Indicates that fresnel and misc need to be re-calculated for all submeshes */ _markAllSubMeshesAsFresnelAndMiscDirty() { this._markAllSubMeshesAsDirty(At._FresnelAndMiscDirtyCallBack); } /** * Indicates that lights need to be re-calculated for all submeshes */ _markAllSubMeshesAsLightsDirty() { this._markAllSubMeshesAsDirty(At._LightsDirtyCallBack); } /** * Indicates that attributes need to be re-calculated for all submeshes */ _markAllSubMeshesAsAttributesDirty() { this._markAllSubMeshesAsDirty(At._AttributeDirtyCallBack); } /** * Indicates that misc needs to be re-calculated for all submeshes */ _markAllSubMeshesAsMiscDirty() { this._markAllSubMeshesAsDirty(At._MiscDirtyCallBack); } /** * Indicates that prepass needs to be re-calculated for all submeshes */ _markAllSubMeshesAsPrePassDirty() { this._markAllSubMeshesAsDirty(At._MiscDirtyCallBack); } /** * Indicates that textures and misc need to be re-calculated for all submeshes */ _markAllSubMeshesAsTexturesAndMiscDirty() { this._markAllSubMeshesAsDirty(At._TextureAndMiscDirtyCallBack); } _checkScenePerformancePriority() { if (this._scene.performancePriority !== $A.BackwardCompatible) { this.checkReadyOnlyOnce = !0; const e = this._scene.onScenePerformancePriorityChangedObservable.addOnce(() => { this.checkReadyOnlyOnce = !1; }); this.onDisposeObservable.add(() => { this._scene.onScenePerformancePriorityChangedObservable.remove(e); }); } } /** * Sets the required values to the prepass renderer. * @param prePassRenderer defines the prepass renderer to setup. * @returns true if the pre pass is needed. */ // eslint-disable-next-line @typescript-eslint/no-unused-vars setPrePassRenderer(e) { return !1; } /** * Disposes the material * @param forceDisposeEffect specifies if effects should be forcefully disposed * @param forceDisposeTextures specifies if textures should be forcefully disposed * @param notBoundToMesh specifies if the material that is being disposed is known to be not bound to any mesh */ dispose(e, t, i) { const r = this.getScene(); if (r.stopAnimation(this), r.freeProcessedMaterials(), r.removeMaterial(this), this._eventInfo.forceDisposeTextures = t, this._callbackPluginEventGeneric(xh.Disposed, this._eventInfo), this._parentContainer) { const s = this._parentContainer.materials.indexOf(this); s > -1 && this._parentContainer.materials.splice(s, 1), this._parentContainer = null; } if (i !== !0) if (this.meshMap) for (const s in this.meshMap) { const n = this.meshMap[s]; n && (n.material = null, this.releaseVertexArrayObject(n, e)); } else { const s = r.meshes; for (const n of s) n.material === this && !n.sourceMesh && (n.material = null, this.releaseVertexArrayObject(n, e)); } this._uniformBuffer.dispose(), e && this._drawWrapper.effect && (this._storeEffectOnSubMeshes || this._drawWrapper.effect.dispose(), this._drawWrapper.effect = null), this.metadata = null, this.onDisposeObservable.notifyObservers(this), this.onDisposeObservable.clear(), this._onBindObservable && this._onBindObservable.clear(), this._onUnBindObservable && this._onUnBindObservable.clear(), this._onEffectCreatedObservable && this._onEffectCreatedObservable.clear(), this._eventInfo && (this._eventInfo = {}); } /** * @internal */ // eslint-disable-next-line @typescript-eslint/naming-convention releaseVertexArrayObject(e, t) { const i = e.geometry; if (i) if (this._storeEffectOnSubMeshes) { if (e.subMeshes) for (const r of e.subMeshes) i._releaseVertexArrayObject(r.effect), t && r.effect && r.effect.dispose(); } else i._releaseVertexArrayObject(this._drawWrapper.effect); } /** * Serializes this material * @returns the serialized material object */ serialize() { const e = St.Serialize(this); return e.stencil = this.stencil.serialize(), e.uniqueId = this.uniqueId, this._serializePlugins(e), e; } _serializePlugins(e) { if (e.plugins = {}, this.pluginManager) for (const t of this.pluginManager._plugins) e.plugins[t.getClassName()] = t.serialize(); } /** * Creates a material from parsed material data * @param parsedMaterial defines parsed material data * @param scene defines the hosting scene * @param rootUrl defines the root URL to use to load textures * @returns a new material */ static Parse(e, t, i) { if (!e.customType) e.customType = "BABYLON.StandardMaterial"; else if (e.customType === "BABYLON.PBRMaterial" && e.overloadedAlbedo && (e.customType = "BABYLON.LegacyPBRMaterial", !BABYLON.LegacyPBRMaterial)) return Ce.Error("Your scene is trying to load a legacy version of the PBRMaterial, please, include it from the materials library."), null; const s = Ve.Instantiate(e.customType).Parse(e, t, i); return s._loadedUniqueId = e.uniqueId, s; } static _parsePlugins(e, t, i, r) { var s; if (e.plugins) for (const n in e.plugins) { const a = e.plugins[n]; let l = (s = t.pluginManager) === null || s === void 0 ? void 0 : s.getPlugin(a.name); if (!l) { const o = Ve.Instantiate("BABYLON." + n); o && (l = new o(t)); } l == null || l.parse(a, i, r); } } } At.TriangleFillMode = 0; At.WireFrameFillMode = 1; At.PointFillMode = 2; At.PointListDrawMode = 3; At.LineListDrawMode = 4; At.LineLoopDrawMode = 5; At.LineStripDrawMode = 6; At.TriangleStripDrawMode = 7; At.TriangleFanDrawMode = 8; At.ClockWiseSideOrientation = 0; At.CounterClockWiseSideOrientation = 1; At.TextureDirtyFlag = 1; At.LightDirtyFlag = 2; At.FresnelDirtyFlag = 4; At.AttributesDirtyFlag = 8; At.MiscDirtyFlag = 16; At.PrePassDirtyFlag = 32; At.AllDirtyFlag = 63; At.MATERIAL_OPAQUE = 0; At.MATERIAL_ALPHATEST = 1; At.MATERIAL_ALPHABLEND = 2; At.MATERIAL_ALPHATESTANDBLEND = 3; At.MATERIAL_NORMALBLENDMETHOD_WHITEOUT = 0; At.MATERIAL_NORMALBLENDMETHOD_RNM = 1; At.OnEventObservable = new Fe(); At._AllDirtyCallBack = (c) => c.markAllAsDirty(); At._ImageProcessingDirtyCallBack = (c) => c.markAsImageProcessingDirty(); At._TextureDirtyCallBack = (c) => c.markAsTexturesDirty(); At._FresnelDirtyCallBack = (c) => c.markAsFresnelDirty(); At._MiscDirtyCallBack = (c) => c.markAsMiscDirty(); At._PrePassDirtyCallBack = (c) => c.markAsPrePassDirty(); At._LightsDirtyCallBack = (c) => c.markAsLightDirty(); At._AttributeDirtyCallBack = (c) => c.markAsAttributesDirty(); At._FresnelAndMiscDirtyCallBack = (c) => { At._FresnelDirtyCallBack(c), At._MiscDirtyCallBack(c); }; At._TextureAndMiscDirtyCallBack = (c) => { At._TextureDirtyCallBack(c), At._MiscDirtyCallBack(c); }; At._DirtyCallbackArray = []; At._RunDirtyCallBacks = (c) => { for (const e of At._DirtyCallbackArray) e(c); }; F([ W() ], At.prototype, "id", void 0); F([ W() ], At.prototype, "uniqueId", void 0); F([ W() ], At.prototype, "name", void 0); F([ W() ], At.prototype, "metadata", void 0); F([ W() ], At.prototype, "checkReadyOnEveryCall", void 0); F([ W() ], At.prototype, "checkReadyOnlyOnce", void 0); F([ W() ], At.prototype, "state", void 0); F([ W("alpha") ], At.prototype, "_alpha", void 0); F([ W("backFaceCulling") ], At.prototype, "_backFaceCulling", void 0); F([ W("cullBackFaces") ], At.prototype, "_cullBackFaces", void 0); F([ W() ], At.prototype, "sideOrientation", void 0); F([ W("alphaMode") ], At.prototype, "_alphaMode", void 0); F([ W() ], At.prototype, "_needDepthPrePass", void 0); F([ W() ], At.prototype, "disableDepthWrite", void 0); F([ W() ], At.prototype, "disableColorWrite", void 0); F([ W() ], At.prototype, "forceDepthWrite", void 0); F([ W() ], At.prototype, "depthFunction", void 0); F([ W() ], At.prototype, "separateCullingPass", void 0); F([ W("fogEnabled") ], At.prototype, "_fogEnabled", void 0); F([ W() ], At.prototype, "pointSize", void 0); F([ W() ], At.prototype, "zOffset", void 0); F([ W() ], At.prototype, "zOffsetUnits", void 0); F([ W() ], At.prototype, "pointsCloud", null); F([ W() ], At.prototype, "fillMode", null); F([ W() ], At.prototype, "useLogarithmicDepth", null); F([ W() ], At.prototype, "transparencyMode", null); class xm extends At { /** * Gets or Sets the list of Materials used within the multi material. * They need to be ordered according to the submeshes order in the associated mesh */ get subMaterials() { return this._subMaterials; } set subMaterials(e) { this._subMaterials = e, this._hookArray(e); } /** * Function used to align with Node.getChildren() * @returns the list of Materials used within the multi material */ getChildren() { return this.subMaterials; } /** * Instantiates a new Multi Material * A multi-material is used to apply different materials to different parts of the same object without the need of * separate meshes. This can be use to improve performances. * @see https://doc.babylonjs.com/features/featuresDeepDive/materials/using/multiMaterials * @param name Define the name in the scene * @param scene Define the scene the material belongs to */ constructor(e, t) { super(e, t, !0), this._waitingSubMaterialsUniqueIds = [], this.getScene().addMultiMaterial(this), this.subMaterials = [], this._storeEffectOnSubMeshes = !0; } _hookArray(e) { const t = e.push; e.push = (...r) => { const s = t.apply(e, r); return this._markAllSubMeshesAsTexturesDirty(), s; }; const i = e.splice; e.splice = (r, s) => { const n = i.apply(e, [r, s]); return this._markAllSubMeshesAsTexturesDirty(), n; }; } /** * Get one of the submaterial by its index in the submaterials array * @param index The index to look the sub material at * @returns The Material if the index has been defined */ getSubMaterial(e) { return e < 0 || e >= this.subMaterials.length ? this.getScene().defaultMaterial : this.subMaterials[e]; } /** * Get the list of active textures for the whole sub materials list. * @returns All the textures that will be used during the rendering */ getActiveTextures() { return super.getActiveTextures().concat(...this.subMaterials.map((e) => e ? e.getActiveTextures() : [])); } /** * Specifies if any sub-materials of this multi-material use a given texture. * @param texture Defines the texture to check against this multi-material's sub-materials. * @returns A boolean specifying if any sub-material of this multi-material uses the texture. */ hasTexture(e) { var t; if (super.hasTexture(e)) return !0; for (let i = 0; i < this.subMaterials.length; i++) if (!((t = this.subMaterials[i]) === null || t === void 0) && t.hasTexture(e)) return !0; return !1; } /** * Gets the current class name of the material e.g. "MultiMaterial" * Mainly use in serialization. * @returns the class name */ getClassName() { return "MultiMaterial"; } /** * Checks if the material is ready to render the requested sub mesh * @param mesh Define the mesh the submesh belongs to * @param subMesh Define the sub mesh to look readiness for * @param useInstances Define whether or not the material is used with instances * @returns true if ready, otherwise false */ isReadyForSubMesh(e, t, i) { for (let r = 0; r < this.subMaterials.length; r++) { const s = this.subMaterials[r]; if (s) { if (s._storeEffectOnSubMeshes) { if (!s.isReadyForSubMesh(e, t, i)) return !1; continue; } if (!s.isReady(e)) return !1; } } return !0; } /** * Clones the current material and its related sub materials * @param name Define the name of the newly cloned material * @param cloneChildren Define if submaterial will be cloned or shared with the parent instance * @returns the cloned material */ clone(e, t) { const i = new xm(e, this.getScene()); for (let r = 0; r < this.subMaterials.length; r++) { let s = null; const n = this.subMaterials[r]; t && n ? s = n.clone(e + "-" + n.name) : s = this.subMaterials[r], i.subMaterials.push(s); } return i; } /** * Serializes the materials into a JSON representation. * @returns the JSON representation */ serialize() { const e = {}; e.name = this.name, e.id = this.id, e.uniqueId = this.uniqueId, $s && (e.tags = $s.GetTags(this)), e.materialsUniqueIds = [], e.materials = []; for (let t = 0; t < this.subMaterials.length; t++) { const i = this.subMaterials[t]; i ? (e.materialsUniqueIds.push(i.uniqueId), e.materials.push(i.id)) : (e.materialsUniqueIds.push(null), e.materials.push(null)); } return e; } /** * Dispose the material and release its associated resources * @param forceDisposeEffect Define if we want to force disposing the associated effect (if false the shader is not released and could be reuse later on) * @param forceDisposeTextures Define if we want to force disposing the associated textures (if false, they will not be disposed and can still be use elsewhere in the app) * @param forceDisposeChildren Define if we want to force disposing the associated submaterials (if false, they will not be disposed and can still be use elsewhere in the app) */ dispose(e, t, i) { const r = this.getScene(); if (!r) return; if (i) for (let n = 0; n < this.subMaterials.length; n++) { const a = this.subMaterials[n]; a && a.dispose(e, t); } const s = r.multiMaterials.indexOf(this); s >= 0 && r.multiMaterials.splice(s, 1), super.dispose(e, t); } /** * Creates a MultiMaterial from parsed MultiMaterial data. * @param parsedMultiMaterial defines parsed MultiMaterial data. * @param scene defines the hosting scene * @returns a new MultiMaterial */ static ParseMultiMaterial(e, t) { const i = new xm(e.name, t); return i.id = e.id, i._loadedUniqueId = e.uniqueId, $s && $s.AddTagsTo(i, e.tags), e.materialsUniqueIds ? i._waitingSubMaterialsUniqueIds = e.materialsUniqueIds : e.materials.forEach((r) => i.subMaterials.push(t.getLastMaterialById(r))), i; } } Be("BABYLON.MultiMaterial", xm); class Ite { /** * Creates a new LOD level * @param distanceOrScreenCoverage defines either the distance or the screen coverage where this level should start being displayed * @param mesh defines the mesh to use to render this level */ constructor(e, t) { this.distanceOrScreenCoverage = e, this.mesh = t; } } class _K { } class yce { constructor() { this.visibleInstances = {}, this.batchCache = new _H(), this.batchCacheReplacementModeInFrozenMode = new _H(), this.instancesBufferSize = 32 * 16 * 4; } } class _H { constructor() { this.mustReturn = !1, this.visibleInstances = new Array(), this.renderSelf = [], this.hardwareInstancedRendering = []; } } class Cce { constructor() { this.instancesCount = 0, this.matrixBuffer = null, this.previousMatrixBuffer = null, this.matrixBufferSize = 32 * 16, this.matrixData = null, this.boundingVectors = [], this.worldMatrices = null; } } class xce { constructor() { this._areNormalsFrozen = !1, this._source = null, this.meshMap = null, this._preActivateId = -1, this._LODLevels = new Array(), this._useLODScreenCoverage = !1, this._effectiveMaterial = null, this._forcedInstanceCount = 0, this._overrideRenderingFillMode = null; } } class ke extends xr { /** * Gets the default side orientation. * @param orientation the orientation to value to attempt to get * @returns the default orientation * @internal */ static _GetDefaultSideOrientation(e) { return e || ke.FRONTSIDE; } /** * Determines if the LOD levels are intended to be calculated using screen coverage (surface area ratio) instead of distance. */ get useLODScreenCoverage() { return this._internalMeshDataInfo._useLODScreenCoverage; } set useLODScreenCoverage(e) { this._internalMeshDataInfo._useLODScreenCoverage = e, this._sortLODLevels(); } get computeBonesUsingShaders() { return this._internalAbstractMeshDataInfo._computeBonesUsingShaders; } set computeBonesUsingShaders(e) { this._internalAbstractMeshDataInfo._computeBonesUsingShaders !== e && (e && this._internalMeshDataInfo._sourcePositions && (this.setVerticesData(Y.PositionKind, this._internalMeshDataInfo._sourcePositions, !0), this._internalMeshDataInfo._sourceNormals && this.setVerticesData(Y.NormalKind, this._internalMeshDataInfo._sourceNormals, !0), this._internalMeshDataInfo._sourcePositions = null, this._internalMeshDataInfo._sourceNormals = null), this._internalAbstractMeshDataInfo._computeBonesUsingShaders = e, this._markSubMeshesAsAttributesDirty()); } /** * An event triggered before rendering the mesh */ get onBeforeRenderObservable() { return this._internalMeshDataInfo._onBeforeRenderObservable || (this._internalMeshDataInfo._onBeforeRenderObservable = new Fe()), this._internalMeshDataInfo._onBeforeRenderObservable; } /** * An event triggered before binding the mesh */ get onBeforeBindObservable() { return this._internalMeshDataInfo._onBeforeBindObservable || (this._internalMeshDataInfo._onBeforeBindObservable = new Fe()), this._internalMeshDataInfo._onBeforeBindObservable; } /** * An event triggered after rendering the mesh */ get onAfterRenderObservable() { return this._internalMeshDataInfo._onAfterRenderObservable || (this._internalMeshDataInfo._onAfterRenderObservable = new Fe()), this._internalMeshDataInfo._onAfterRenderObservable; } /** * An event triggeredbetween rendering pass when using separateCullingPass = true */ get onBetweenPassObservable() { return this._internalMeshDataInfo._onBetweenPassObservable || (this._internalMeshDataInfo._onBetweenPassObservable = new Fe()), this._internalMeshDataInfo._onBetweenPassObservable; } /** * An event triggered before drawing the mesh */ get onBeforeDrawObservable() { return this._internalMeshDataInfo._onBeforeDrawObservable || (this._internalMeshDataInfo._onBeforeDrawObservable = new Fe()), this._internalMeshDataInfo._onBeforeDrawObservable; } /** * Sets a callback to call before drawing the mesh. It is recommended to use onBeforeDrawObservable instead */ set onBeforeDraw(e) { this._onBeforeDrawObserver && this.onBeforeDrawObservable.remove(this._onBeforeDrawObserver), this._onBeforeDrawObserver = this.onBeforeDrawObservable.add(e); } get hasInstances() { return this.instances.length > 0; } get hasThinInstances() { return (this.forcedInstanceCount || this._thinInstanceDataStorage.instancesCount || 0) > 0; } /** * Gets or sets the forced number of instances to display. * If 0 (default value), the number of instances is not forced and depends on the draw type * (regular / instance / thin instances mesh) */ get forcedInstanceCount() { return this._internalMeshDataInfo._forcedInstanceCount; } set forcedInstanceCount(e) { this._internalMeshDataInfo._forcedInstanceCount = e; } /** * Use this property to override the Material's fillMode value */ get overrideRenderingFillMode() { return this._internalMeshDataInfo._overrideRenderingFillMode; } set overrideRenderingFillMode(e) { this._internalMeshDataInfo._overrideRenderingFillMode = e; } /** * Gets the source mesh (the one used to clone this one from) */ get source() { return this._internalMeshDataInfo._source; } /** * Gets the list of clones of this mesh * The scene must have been constructed with useClonedMeshMap=true for this to work! * Note that useClonedMeshMap=true is the default setting */ get cloneMeshMap() { return this._internalMeshDataInfo.meshMap; } /** * Gets or sets a boolean indicating that this mesh does not use index buffer */ get isUnIndexed() { return this._unIndexed; } set isUnIndexed(e) { this._unIndexed !== e && (this._unIndexed = e, this._markSubMeshesAsAttributesDirty()); } /** Gets the array buffer used to store the instanced buffer used for instances' world matrices */ get worldMatrixInstancedBuffer() { return this._instanceDataStorage.instancesData; } /** Gets the array buffer used to store the instanced buffer used for instances' previous world matrices */ get previousWorldMatrixInstancedBuffer() { return this._instanceDataStorage.instancesPreviousData; } /** Gets or sets a boolean indicating that the update of the instance buffer of the world matrices is manual */ get manualUpdateOfWorldMatrixInstancedBuffer() { return this._instanceDataStorage.manualUpdate; } set manualUpdateOfWorldMatrixInstancedBuffer(e) { this._instanceDataStorage.manualUpdate = e; } /** Gets or sets a boolean indicating that the update of the instance buffer of the world matrices is manual */ get manualUpdateOfPreviousWorldMatrixInstancedBuffer() { return this._instanceDataStorage.previousManualUpdate; } set manualUpdateOfPreviousWorldMatrixInstancedBuffer(e) { this._instanceDataStorage.previousManualUpdate = e; } /** Gets or sets a boolean indicating that the update of the instance buffer of the world matrices must be performed in all cases (and notably even in frozen mode) */ get forceWorldMatrixInstancedBufferUpdate() { return this._instanceDataStorage.forceMatrixUpdates; } set forceWorldMatrixInstancedBufferUpdate(e) { this._instanceDataStorage.forceMatrixUpdates = e; } /** * @constructor * @param name The value used by scene.getMeshByName() to do a lookup. * @param scene The scene to add this mesh to. * @param parent The parent of this mesh, if it has one * @param source An optional Mesh from which geometry is shared, cloned. * @param doNotCloneChildren When cloning, skip cloning child meshes of source, default False. * When false, achieved by calling a clone(), also passing False. * This will make creation of children, recursive. * @param clonePhysicsImpostor When cloning, include cloning mesh physics impostor, default True. */ constructor(e, t = null, i = null, r = null, s, n = !0) { if (super(e, t), this._internalMeshDataInfo = new xce(), this.delayLoadState = 0, this.instances = [], this._creationDataStorage = null, this._geometry = null, this._instanceDataStorage = new yce(), this._thinInstanceDataStorage = new Cce(), this._shouldGenerateFlatShading = !1, this._originalBuilderSideOrientation = ke.DEFAULTSIDE, this.overrideMaterialSideOrientation = null, this.ignoreCameraMaxZ = !1, t = this.getScene(), this._onBeforeDraw = (a, l, o) => { a && o && (this._uniformBuffer ? this.transferToEffect(l) : o.bindOnlyWorldMatrix(l)); }, r) { if (r._geometry && r._geometry.applyToMesh(this), id.DeepCopy(r, this, [ "name", "material", "skeleton", "instances", "parent", "uniqueId", "source", "metadata", "morphTargetManager", "hasInstances", "worldMatrixInstancedBuffer", "previousWorldMatrixInstancedBuffer", "hasLODLevels", "geometry", "isBlocked", "areNormalsFrozen", "facetNb", "isFacetDataEnabled", "lightSources", "useBones", "isAnInstance", "collider", "edgesRenderer", "forward", "up", "right", "absolutePosition", "absoluteScaling", "absoluteRotationQuaternion", "isWorldMatrixFrozen", "nonUniformScaling", "behaviors", "worldMatrixFromCache", "hasThinInstances", "cloneMeshMap", "hasBoundingInfo", "physicsBody", "physicsImpostor" ], ["_poseMatrix"]), this._internalMeshDataInfo._source = r, t.useClonedMeshMap && (r._internalMeshDataInfo.meshMap || (r._internalMeshDataInfo.meshMap = {}), r._internalMeshDataInfo.meshMap[this.uniqueId] = this), this._originalBuilderSideOrientation = r._originalBuilderSideOrientation, this._creationDataStorage = r._creationDataStorage, r._ranges) { const a = r._ranges; for (const l in a) Object.prototype.hasOwnProperty.call(a, l) && a[l] && this.createAnimationRange(l, a[l].from, a[l].to); } if (r.metadata && r.metadata.clone ? this.metadata = r.metadata.clone() : this.metadata = r.metadata, this._internalMetadata = r._internalMetadata, $s && $s.HasTags(r) && $s.AddTagsTo(this, $s.GetTags(r, !0)), this.setEnabled(r.isEnabled(!1)), this.parent = r.parent, this.setPivotMatrix(r.getPivotMatrix()), this.id = e + "." + r.id, this.material = r.material, !s) { const a = r.getDescendants(!0); for (let l = 0; l < a.length; l++) { const o = a[l]; o.clone && o.clone(e + "." + o.name, this); } } if (r.morphTargetManager && (this.morphTargetManager = r.morphTargetManager), t.getPhysicsEngine) { const a = t.getPhysicsEngine(); if (n && a) if (a.getPluginVersion() === 1) { const l = a.getImpostorForPhysicsObject(r); l && (this.physicsImpostor = l.clone(this)); } else a.getPluginVersion() === 2 && r.physicsBody && r.physicsBody.clone(this); } for (let a = 0; a < t.particleSystems.length; a++) { const l = t.particleSystems[a]; l.emitter === r && l.clone(l.name, this); } this.skeleton = r.skeleton, this.refreshBoundingInfo(!0, !0), this.computeWorldMatrix(!0); } i !== null && (this.parent = i), this._instanceDataStorage.hardwareInstancedRendering = this.getEngine().getCaps().instancedArrays, this._internalMeshDataInfo._onMeshReadyObserverAdded = (a) => { a.unregisterOnNextCall = !0, this.isReady(!0) ? this.onMeshReadyObservable.notifyObservers(this) : this._internalMeshDataInfo._checkReadinessObserver || (this._internalMeshDataInfo._checkReadinessObserver = this._scene.onBeforeRenderObservable.add(() => { this.isReady(!0) && (this._scene.onBeforeRenderObservable.remove(this._internalMeshDataInfo._checkReadinessObserver), this._internalMeshDataInfo._checkReadinessObserver = null, this.onMeshReadyObservable.notifyObservers(this)); })); }, this.onMeshReadyObservable = new Fe(this._internalMeshDataInfo._onMeshReadyObserverAdded), r && r.onClonedObservable.notifyObservers(this); } instantiateHierarchy(e = null, t, i) { const r = this.getTotalVertices() === 0 || t && t.doNotInstantiate && (t.doNotInstantiate === !0 || t.doNotInstantiate(this)) ? this.clone("Clone of " + (this.name || this.id), e || this.parent, !0) : this.createInstance("instance of " + (this.name || this.id)); r.parent = e || this.parent, r.position = this.position.clone(), r.scaling = this.scaling.clone(), this.rotationQuaternion ? r.rotationQuaternion = this.rotationQuaternion.clone() : r.rotation = this.rotation.clone(), i && i(this, r); for (const s of this.getChildTransformNodes(!0)) s.getClassName() === "InstancedMesh" && r.getClassName() === "Mesh" && s.sourceMesh === this ? s.instantiateHierarchy(r, { doNotInstantiate: t && t.doNotInstantiate || !1, newSourcedMesh: r }, i) : s.instantiateHierarchy(r, t, i); return r; } /** * Gets the class name * @returns the string "Mesh". */ getClassName() { return "Mesh"; } /** @internal */ get _isMesh() { return !0; } /** * Returns a description of this mesh * @param fullDetails define if full details about this mesh must be used * @returns a descriptive string representing this mesh */ toString(e) { let t = super.toString(e); if (t += ", n vertices: " + this.getTotalVertices(), t += ", parent: " + (this._waitingParentId ? this._waitingParentId : this.parent ? this.parent.name : "NONE"), this.animations) for (let i = 0; i < this.animations.length; i++) t += ", animation[0]: " + this.animations[i].toString(e); if (e) if (this._geometry) { const i = this.getIndices(), r = this.getVerticesData(Y.PositionKind); r && i && (t += ", flat shading: " + (r.length / 3 === i.length ? "YES" : "NO")); } else t += ", flat shading: UNKNOWN"; return t; } /** @internal */ _unBindEffect() { super._unBindEffect(); for (const e of this.instances) e._unBindEffect(); } /** * Gets a boolean indicating if this mesh has LOD */ get hasLODLevels() { return this._internalMeshDataInfo._LODLevels.length > 0; } /** * Gets the list of MeshLODLevel associated with the current mesh * @returns an array of MeshLODLevel */ getLODLevels() { return this._internalMeshDataInfo._LODLevels; } _sortLODLevels() { const e = this._internalMeshDataInfo._useLODScreenCoverage ? -1 : 1; this._internalMeshDataInfo._LODLevels.sort((t, i) => t.distanceOrScreenCoverage < i.distanceOrScreenCoverage ? e : t.distanceOrScreenCoverage > i.distanceOrScreenCoverage ? -e : 0); } /** * Add a mesh as LOD level triggered at the given distance. * @see https://doc.babylonjs.com/features/featuresDeepDive/mesh/LOD * @param distanceOrScreenCoverage Either distance from the center of the object to show this level or the screen coverage if `useScreenCoverage` is set to `true`. * If screen coverage, value is a fraction of the screen's total surface, between 0 and 1. * Example Playground for distance https://playground.babylonjs.com/#QE7KM#197 * Example Playground for screen coverage https://playground.babylonjs.com/#QE7KM#196 * @param mesh The mesh to be added as LOD level (can be null) * @returns This mesh (for chaining) */ addLODLevel(e, t) { if (t && t._masterMesh) return Ce.Warn("You cannot use a mesh as LOD level twice"), this; const i = new Ite(e, t); return this._internalMeshDataInfo._LODLevels.push(i), t && (t._masterMesh = this), this._sortLODLevels(), this; } /** * Returns the LOD level mesh at the passed distance or null if not found. * @see https://doc.babylonjs.com/features/featuresDeepDive/mesh/LOD * @param distance The distance from the center of the object to show this level * @returns a Mesh or `null` */ getLODLevelAtDistance(e) { const t = this._internalMeshDataInfo; for (let i = 0; i < t._LODLevels.length; i++) { const r = t._LODLevels[i]; if (r.distanceOrScreenCoverage === e) return r.mesh; } return null; } /** * Remove a mesh from the LOD array * @see https://doc.babylonjs.com/features/featuresDeepDive/mesh/LOD * @param mesh defines the mesh to be removed * @returns This mesh (for chaining) */ removeLODLevel(e) { const t = this._internalMeshDataInfo; for (let i = 0; i < t._LODLevels.length; i++) t._LODLevels[i].mesh === e && (t._LODLevels.splice(i, 1), e && (e._masterMesh = null)); return this._sortLODLevels(), this; } /** * Returns the registered LOD mesh distant from the parameter `camera` position if any, else returns the current mesh. * @see https://doc.babylonjs.com/features/featuresDeepDive/mesh/LOD * @param camera defines the camera to use to compute distance * @param boundingSphere defines a custom bounding sphere to use instead of the one from this mesh * @returns This mesh (for chaining) */ getLOD(e, t) { const i = this._internalMeshDataInfo; if (!i._LODLevels || i._LODLevels.length === 0) return this; const r = t || this.getBoundingInfo().boundingSphere, s = e.mode === Ai.ORTHOGRAPHIC_CAMERA ? e.minZ : r.centerWorld.subtract(e.globalPosition).length(); let n = s, a = 1; if (i._useLODScreenCoverage) { const l = e.screenArea; let o = r.radiusWorld * e.minZ / s; o = o * o * Math.PI, n = o / l, a = -1; } if (a * i._LODLevels[i._LODLevels.length - 1].distanceOrScreenCoverage > a * n) return this.onLODLevelSelection && this.onLODLevelSelection(n, this, this), this; for (let l = 0; l < i._LODLevels.length; l++) { const o = i._LODLevels[l]; if (a * o.distanceOrScreenCoverage < a * n) { if (o.mesh) { if (o.mesh.delayLoadState === 4) return o.mesh._checkDelayState(), this; if (o.mesh.delayLoadState === 2) return this; o.mesh._preActivate(), o.mesh._updateSubMeshesBoundingInfo(this.worldMatrixFromCache); } return this.onLODLevelSelection && this.onLODLevelSelection(n, this, o.mesh), o.mesh; } } return this.onLODLevelSelection && this.onLODLevelSelection(n, this, this), this; } /** * Gets the mesh internal Geometry object */ get geometry() { return this._geometry; } /** * Returns the total number of vertices within the mesh geometry or zero if the mesh has no geometry. * @returns the total number of vertices */ getTotalVertices() { return this._geometry === null || this._geometry === void 0 ? 0 : this._geometry.getTotalVertices(); } /** * Returns the content of an associated vertex buffer * @param kind defines which buffer to read from (positions, indices, normals, etc). Possible `kind` values : * - VertexBuffer.PositionKind * - VertexBuffer.UVKind * - VertexBuffer.UV2Kind * - VertexBuffer.UV3Kind * - VertexBuffer.UV4Kind * - VertexBuffer.UV5Kind * - VertexBuffer.UV6Kind * - VertexBuffer.ColorKind * - VertexBuffer.MatricesIndicesKind * - VertexBuffer.MatricesIndicesExtraKind * - VertexBuffer.MatricesWeightsKind * - VertexBuffer.MatricesWeightsExtraKind * @param copyWhenShared defines a boolean indicating that if the mesh geometry is shared among some other meshes, the returned array is a copy of the internal one * @param forceCopy defines a boolean forcing the copy of the buffer no matter what the value of copyWhenShared is * @param bypassInstanceData defines a boolean indicating that the function should not take into account the instance data (applies only if the mesh has instances). Default: false * @returns a FloatArray or null if the mesh has no geometry or no vertex buffer for this kind. */ getVerticesData(e, t, i, r) { var s, n; if (!this._geometry) return null; let a = r || (n = (s = this._userInstancedBuffersStorage) === null || s === void 0 ? void 0 : s.vertexBuffers[e]) === null || n === void 0 ? void 0 : n.getFloatData( this.instances.length + 1, // +1 because the master mesh is not included in the instances array i || t && this._geometry.meshes.length !== 1 ); return a || (a = this._geometry.getVerticesData(e, t, i)), a; } /** * Returns the mesh VertexBuffer object from the requested `kind` * @param kind defines which buffer to read from (positions, indices, normals, etc). Possible `kind` values : * - VertexBuffer.PositionKind * - VertexBuffer.NormalKind * - VertexBuffer.UVKind * - VertexBuffer.UV2Kind * - VertexBuffer.UV3Kind * - VertexBuffer.UV4Kind * - VertexBuffer.UV5Kind * - VertexBuffer.UV6Kind * - VertexBuffer.ColorKind * - VertexBuffer.MatricesIndicesKind * - VertexBuffer.MatricesIndicesExtraKind * - VertexBuffer.MatricesWeightsKind * - VertexBuffer.MatricesWeightsExtraKind * @param bypassInstanceData defines a boolean indicating that the function should not take into account the instance data (applies only if the mesh has instances). Default: false * @returns a FloatArray or null if the mesh has no vertex buffer for this kind. */ getVertexBuffer(e, t) { var i, r; return this._geometry ? (r = t || (i = this._userInstancedBuffersStorage) === null || i === void 0 ? void 0 : i.vertexBuffers[e]) !== null && r !== void 0 ? r : this._geometry.getVertexBuffer(e) : null; } /** * Tests if a specific vertex buffer is associated with this mesh * @param kind defines which buffer to check (positions, indices, normals, etc). Possible `kind` values : * - VertexBuffer.PositionKind * - VertexBuffer.NormalKind * - VertexBuffer.UVKind * - VertexBuffer.UV2Kind * - VertexBuffer.UV3Kind * - VertexBuffer.UV4Kind * - VertexBuffer.UV5Kind * - VertexBuffer.UV6Kind * - VertexBuffer.ColorKind * - VertexBuffer.MatricesIndicesKind * - VertexBuffer.MatricesIndicesExtraKind * - VertexBuffer.MatricesWeightsKind * - VertexBuffer.MatricesWeightsExtraKind * @param bypassInstanceData defines a boolean indicating that the function should not take into account the instance data (applies only if the mesh has instances). Default: false * @returns a boolean */ isVerticesDataPresent(e, t) { var i; return this._geometry ? !t && ((i = this._userInstancedBuffersStorage) === null || i === void 0 ? void 0 : i.vertexBuffers[e]) !== void 0 || this._geometry.isVerticesDataPresent(e) : this._delayInfo ? this._delayInfo.indexOf(e) !== -1 : !1; } /** * Returns a boolean defining if the vertex data for the requested `kind` is updatable. * @param kind defines which buffer to check (positions, indices, normals, etc). Possible `kind` values : * - VertexBuffer.PositionKind * - VertexBuffer.UVKind * - VertexBuffer.UV2Kind * - VertexBuffer.UV3Kind * - VertexBuffer.UV4Kind * - VertexBuffer.UV5Kind * - VertexBuffer.UV6Kind * - VertexBuffer.ColorKind * - VertexBuffer.MatricesIndicesKind * - VertexBuffer.MatricesIndicesExtraKind * - VertexBuffer.MatricesWeightsKind * - VertexBuffer.MatricesWeightsExtraKind * @param bypassInstanceData defines a boolean indicating that the function should not take into account the instance data (applies only if the mesh has instances). Default: false * @returns a boolean */ isVertexBufferUpdatable(e, t) { var i; if (!this._geometry) return this._delayInfo ? this._delayInfo.indexOf(e) !== -1 : !1; if (!t) { const r = (i = this._userInstancedBuffersStorage) === null || i === void 0 ? void 0 : i.vertexBuffers[e]; if (r) return r.isUpdatable(); } return this._geometry.isVertexBufferUpdatable(e); } /** * Returns a string which contains the list of existing `kinds` of Vertex Data associated with this mesh. * @param bypassInstanceData defines a boolean indicating that the function should not take into account the instance data (applies only if the mesh has instances). Default: false * @returns an array of strings */ getVerticesDataKinds(e) { if (!this._geometry) { const i = []; return this._delayInfo && this._delayInfo.forEach(function(r) { i.push(r); }), i; } const t = this._geometry.getVerticesDataKinds(); if (!e && this._userInstancedBuffersStorage) for (const i in this._userInstancedBuffersStorage.vertexBuffers) t.indexOf(i) === -1 && t.push(i); return t; } /** * Returns a positive integer : the total number of indices in this mesh geometry. * @returns the numner of indices or zero if the mesh has no geometry. */ getTotalIndices() { return this._geometry ? this._geometry.getTotalIndices() : 0; } /** * Returns an array of integers or a typed array (Int32Array, Uint32Array, Uint16Array) populated with the mesh indices. * @param copyWhenShared If true (default false) and and if the mesh geometry is shared among some other meshes, the returned array is a copy of the internal one. * @param forceCopy defines a boolean indicating that the returned array must be cloned upon returning it * @returns the indices array or an empty array if the mesh has no geometry */ getIndices(e, t) { return this._geometry ? this._geometry.getIndices(e, t) : []; } get isBlocked() { return this._masterMesh !== null && this._masterMesh !== void 0; } /** * Determine if the current mesh is ready to be rendered * @param completeCheck defines if a complete check (including materials and lights) has to be done (false by default) * @param forceInstanceSupport will check if the mesh will be ready when used with instances (false by default) * @returns true if all associated assets are ready (material, textures, shaders) */ isReady(e = !1, t = !1) { var i, r, s, n, a, l, o; if (this.delayLoadState === 2 || !super.isReady(e)) return !1; if (!this.subMeshes || this.subMeshes.length === 0 || !e) return !0; const u = this.getEngine(), h = this.getScene(), d = t || u.getCaps().instancedArrays && (this.instances.length > 0 || this.hasThinInstances); this.computeWorldMatrix(); const f = this.material || h.defaultMaterial; if (f) { if (f._storeEffectOnSubMeshes) for (const m of this.subMeshes) { const _ = m.getMaterial(); if (_) { if (_._storeEffectOnSubMeshes) { if (!_.isReadyForSubMesh(this, m, d)) return !1; } else if (!_.isReady(this, d)) return !1; } } else if (!f.isReady(this, d)) return !1; } const p = u.currentRenderPassId; for (const m of this.lightSources) { const _ = m.getShadowGenerators(); if (!_) continue; const v = _.values(); for (let C = v.next(); C.done !== !0; C = v.next()) { const x = C.value; if (x && (!(!((i = x.getShadowMap()) === null || i === void 0) && i.renderList) || !((r = x.getShadowMap()) === null || r === void 0) && r.renderList && ((n = (s = x.getShadowMap()) === null || s === void 0 ? void 0 : s.renderList) === null || n === void 0 ? void 0 : n.indexOf(this)) !== -1)) { const S = (a = x.getShadowMap().renderPassIds) !== null && a !== void 0 ? a : [u.currentRenderPassId]; for (let M = 0; M < S.length; ++M) { u.currentRenderPassId = S[M]; for (const R of this.subMeshes) if (!x.isReady(R, d, (o = (l = R.getMaterial()) === null || l === void 0 ? void 0 : l.needAlphaBlendingForMesh(this)) !== null && o !== void 0 ? o : !1)) return u.currentRenderPassId = p, !1; } u.currentRenderPassId = p; } } } for (const m of this._internalMeshDataInfo._LODLevels) if (m.mesh && !m.mesh.isReady(d)) return !1; return !0; } /** * Gets a boolean indicating if the normals aren't to be recomputed on next mesh `positions` array update. This property is pertinent only for updatable parametric shapes. */ get areNormalsFrozen() { return this._internalMeshDataInfo._areNormalsFrozen; } /** * This function affects parametric shapes on vertex position update only : ribbons, tubes, etc. It has no effect at all on other shapes. It prevents the mesh normals from being recomputed on next `positions` array update. * @returns the current mesh */ freezeNormals() { return this._internalMeshDataInfo._areNormalsFrozen = !0, this; } /** * This function affects parametric shapes on vertex position update only : ribbons, tubes, etc. It has no effect at all on other shapes. It reactivates the mesh normals computation if it was previously frozen * @returns the current mesh */ unfreezeNormals() { return this._internalMeshDataInfo._areNormalsFrozen = !1, this; } /** * Sets a value overriding the instance count. Only applicable when custom instanced InterleavedVertexBuffer are used rather than InstancedMeshs */ set overridenInstanceCount(e) { this._instanceDataStorage.overridenInstanceCount = e; } // Methods /** @internal */ _preActivate() { const e = this._internalMeshDataInfo, t = this.getScene().getRenderId(); return e._preActivateId === t ? this : (e._preActivateId = t, this._instanceDataStorage.visibleInstances = null, this); } /** * @internal */ _preActivateForIntermediateRendering(e) { return this._instanceDataStorage.visibleInstances && (this._instanceDataStorage.visibleInstances.intermediateDefaultRenderId = e), this; } /** * @internal */ _registerInstanceForRenderId(e, t) { return this._instanceDataStorage.visibleInstances || (this._instanceDataStorage.visibleInstances = { defaultRenderId: t, selfDefaultRenderId: this._renderId }), this._instanceDataStorage.visibleInstances[t] || (this._instanceDataStorage.previousRenderId !== void 0 && this._instanceDataStorage.isFrozen && (this._instanceDataStorage.visibleInstances[this._instanceDataStorage.previousRenderId] = null), this._instanceDataStorage.previousRenderId = t, this._instanceDataStorage.visibleInstances[t] = new Array()), this._instanceDataStorage.visibleInstances[t].push(e), this; } _afterComputeWorldMatrix() { super._afterComputeWorldMatrix(), this.hasThinInstances && (this.doNotSyncBoundingInfo || this.thinInstanceRefreshBoundingInfo(!1)); } /** @internal */ _postActivate() { this.edgesShareWithInstances && this.edgesRenderer && this.edgesRenderer.isEnabled && this._renderingGroup && (this._renderingGroup._edgesRenderers.pushNoDuplicate(this.edgesRenderer), this.edgesRenderer.customInstances.push(this.getWorldMatrix())); } /** * This method recomputes and sets a new BoundingInfo to the mesh unless it is locked. * This means the mesh underlying bounding box and sphere are recomputed. * @param applySkeleton defines whether to apply the skeleton before computing the bounding info * @param applyMorph defines whether to apply the morph target before computing the bounding info * @returns the current mesh */ refreshBoundingInfo(e = !1, t = !1) { if (this.hasBoundingInfo && this.getBoundingInfo().isLocked) return this; const i = this.geometry ? this.geometry.boundingBias : null; return this._refreshBoundingInfo(this._getPositionData(e, t), i), this; } /** * @internal */ _createGlobalSubMesh(e) { const t = this.getTotalVertices(); if (!t || !this.getIndices()) return null; if (this.subMeshes && this.subMeshes.length > 0) { const i = this.getIndices(); if (!i) return null; const r = i.length; let s = !1; if (e) s = !0; else for (const n of this.subMeshes) { if (n.indexStart + n.indexCount > r) { s = !0; break; } if (n.verticesStart + n.verticesCount > t) { s = !0; break; } } if (!s) return this.subMeshes[0]; } return this.releaseSubMeshes(), new ed(0, 0, t, 0, this.getTotalIndices(), this); } /** * This function will subdivide the mesh into multiple submeshes * @param count defines the expected number of submeshes */ subdivide(e) { if (e < 1) return; const t = this.getTotalIndices(); let i = t / e | 0, r = 0; for (; i % 3 !== 0; ) i++; this.releaseSubMeshes(); for (let s = 0; s < e && !(r >= t); s++) ed.CreateFromIndices(0, r, s === e - 1 ? t - r : i, this, void 0, !1), r += i; this.refreshBoundingInfo(), this.synchronizeInstances(); } /** * Copy a FloatArray into a specific associated vertex buffer * @param kind defines which buffer to write to (positions, indices, normals, etc). Possible `kind` values : * - VertexBuffer.PositionKind * - VertexBuffer.UVKind * - VertexBuffer.UV2Kind * - VertexBuffer.UV3Kind * - VertexBuffer.UV4Kind * - VertexBuffer.UV5Kind * - VertexBuffer.UV6Kind * - VertexBuffer.ColorKind * - VertexBuffer.MatricesIndicesKind * - VertexBuffer.MatricesIndicesExtraKind * - VertexBuffer.MatricesWeightsKind * - VertexBuffer.MatricesWeightsExtraKind * @param data defines the data source * @param updatable defines if the updated vertex buffer must be flagged as updatable * @param stride defines the data stride size (can be null) * @returns the current mesh */ setVerticesData(e, t, i = !1, r) { if (this._geometry) this._geometry.setVerticesData(e, t, i, r); else { const s = new Ot(); s.set(t, e); const n = this.getScene(); new yc(yc.RandomId(), n, s, i, this); } return this; } /** * Delete a vertex buffer associated with this mesh * @param kind defines which buffer to delete (positions, indices, normals, etc). Possible `kind` values : * - VertexBuffer.PositionKind * - VertexBuffer.UVKind * - VertexBuffer.UV2Kind * - VertexBuffer.UV3Kind * - VertexBuffer.UV4Kind * - VertexBuffer.UV5Kind * - VertexBuffer.UV6Kind * - VertexBuffer.ColorKind * - VertexBuffer.MatricesIndicesKind * - VertexBuffer.MatricesIndicesExtraKind * - VertexBuffer.MatricesWeightsKind * - VertexBuffer.MatricesWeightsExtraKind */ removeVerticesData(e) { this._geometry && this._geometry.removeVerticesData(e); } /** * Flags an associated vertex buffer as updatable * @param kind defines which buffer to use (positions, indices, normals, etc). Possible `kind` values : * - VertexBuffer.PositionKind * - VertexBuffer.UVKind * - VertexBuffer.UV2Kind * - VertexBuffer.UV3Kind * - VertexBuffer.UV4Kind * - VertexBuffer.UV5Kind * - VertexBuffer.UV6Kind * - VertexBuffer.ColorKind * - VertexBuffer.MatricesIndicesKind * - VertexBuffer.MatricesIndicesExtraKind * - VertexBuffer.MatricesWeightsKind * - VertexBuffer.MatricesWeightsExtraKind * @param updatable defines if the updated vertex buffer must be flagged as updatable */ markVerticesDataAsUpdatable(e, t = !0) { const i = this.getVertexBuffer(e); !i || i.isUpdatable() === t || this.setVerticesData(e, this.getVerticesData(e), t); } /** * Sets the mesh global Vertex Buffer * @param buffer defines the buffer to use * @param disposeExistingBuffer disposes the existing buffer, if any (default: true) * @returns the current mesh */ setVerticesBuffer(e, t = !0) { return this._geometry || (this._geometry = yc.CreateGeometryForMesh(this)), this._geometry.setVerticesBuffer(e, null, t), this; } /** * Update a specific associated vertex buffer * @param kind defines which buffer to write to (positions, indices, normals, etc). Possible `kind` values : * - VertexBuffer.PositionKind * - VertexBuffer.UVKind * - VertexBuffer.UV2Kind * - VertexBuffer.UV3Kind * - VertexBuffer.UV4Kind * - VertexBuffer.UV5Kind * - VertexBuffer.UV6Kind * - VertexBuffer.ColorKind * - VertexBuffer.MatricesIndicesKind * - VertexBuffer.MatricesIndicesExtraKind * - VertexBuffer.MatricesWeightsKind * - VertexBuffer.MatricesWeightsExtraKind * @param data defines the data source * @param updateExtends defines if extends info of the mesh must be updated (can be null). This is mostly useful for "position" kind * @param makeItUnique defines if the geometry associated with the mesh must be cloned to make the change only for this mesh (and not all meshes associated with the same geometry) * @returns the current mesh */ updateVerticesData(e, t, i, r) { return this._geometry ? (r ? (this.makeGeometryUnique(), this.updateVerticesData(e, t, i, !1)) : this._geometry.updateVerticesData(e, t, i), this) : this; } /** * This method updates the vertex positions of an updatable mesh according to the `positionFunction` returned values. * @see https://doc.babylonjs.com/features/featuresDeepDive/mesh/dynamicMeshMorph#other-shapes-updatemeshpositions * @param positionFunction is a simple JS function what is passed the mesh `positions` array. It doesn't need to return anything * @param computeNormals is a boolean (default true) to enable/disable the mesh normal recomputation after the vertex position update * @returns the current mesh */ updateMeshPositions(e, t = !0) { const i = this.getVerticesData(Y.PositionKind); if (!i) return this; if (e(i), this.updateVerticesData(Y.PositionKind, i, !1, !1), t) { const r = this.getIndices(), s = this.getVerticesData(Y.NormalKind); if (!s) return this; Ot.ComputeNormals(i, r, s), this.updateVerticesData(Y.NormalKind, s, !1, !1); } return this; } /** * Creates a un-shared specific occurence of the geometry for the mesh. * @returns the current mesh */ makeGeometryUnique() { if (!this._geometry) return this; if (this._geometry.meshes.length === 1) return this; const e = this._geometry, t = this._geometry.copy(yc.RandomId()); return e.releaseForMesh(this, !0), t.applyToMesh(this), this; } /** * Sets the index buffer of this mesh. * @param indexBuffer Defines the index buffer to use for this mesh * @param totalVertices Defines the total number of vertices used by the buffer * @param totalIndices Defines the total number of indices in the index buffer */ setIndexBuffer(e, t, i) { let r = this._geometry; r || (r = new yc(yc.RandomId(), this.getScene(), void 0, void 0, this)), r.setIndexBuffer(e, t, i); } /** * Set the index buffer of this mesh * @param indices defines the source data * @param totalVertices defines the total number of vertices referenced by this index data (can be null) * @param updatable defines if the updated index buffer must be flagged as updatable (default is false) * @returns the current mesh */ setIndices(e, t = null, i = !1) { if (this._geometry) this._geometry.setIndices(e, t, i); else { const r = new Ot(); r.indices = e; const s = this.getScene(); new yc(yc.RandomId(), s, r, i, this); } return this; } /** * Update the current index buffer * @param indices defines the source data * @param offset defines the offset in the index buffer where to store the new data (can be null) * @param gpuMemoryOnly defines a boolean indicating that only the GPU memory must be updated leaving the CPU version of the indices unchanged (false by default) * @returns the current mesh */ updateIndices(e, t, i = !1) { return this._geometry ? (this._geometry.updateIndices(e, t, i), this) : this; } /** * Invert the geometry to move from a right handed system to a left handed one. * @returns the current mesh */ toLeftHanded() { return this._geometry ? (this._geometry.toLeftHanded(), this) : this; } /** * @internal */ _bind(e, t, i, r = !0) { if (!this._geometry) return this; const s = this.getScene().getEngine(); this.morphTargetManager && this.morphTargetManager.isUsingTextureForTargets && this.morphTargetManager._bind(t); let n; if (this._unIndexed) n = null; else switch (this._getRenderingFillMode(i)) { case At.PointFillMode: n = null; break; case At.WireFrameFillMode: n = e._getLinesIndexBuffer(this.getIndices(), s); break; default: case At.TriangleFillMode: n = this._geometry.getIndexBuffer(); break; } return !r || !this._userInstancedBuffersStorage || this.hasThinInstances ? this._geometry._bind(t, n) : this._geometry._bind(t, n, this._userInstancedBuffersStorage.vertexBuffers, this._userInstancedBuffersStorage.vertexArrayObjects), this; } /** * @internal */ _draw(e, t, i) { if (!this._geometry || !this._geometry.getVertexBuffers() || !this._unIndexed && !this._geometry.getIndexBuffer()) return this; this._internalMeshDataInfo._onBeforeDrawObservable && this._internalMeshDataInfo._onBeforeDrawObservable.notifyObservers(this); const s = this.getScene().getEngine(); return this._unIndexed || t == At.PointFillMode ? s.drawArraysType(t, e.verticesStart, e.verticesCount, this.forcedInstanceCount || i) : t == At.WireFrameFillMode ? s.drawElementsType(t, 0, e._linesIndexCount, this.forcedInstanceCount || i) : s.drawElementsType(t, e.indexStart, e.indexCount, this.forcedInstanceCount || i), this; } /** * Registers for this mesh a javascript function called just before the rendering process * @param func defines the function to call before rendering this mesh * @returns the current mesh */ registerBeforeRender(e) { return this.onBeforeRenderObservable.add(e), this; } /** * Disposes a previously registered javascript function called before the rendering * @param func defines the function to remove * @returns the current mesh */ unregisterBeforeRender(e) { return this.onBeforeRenderObservable.removeCallback(e), this; } /** * Registers for this mesh a javascript function called just after the rendering is complete * @param func defines the function to call after rendering this mesh * @returns the current mesh */ registerAfterRender(e) { return this.onAfterRenderObservable.add(e), this; } /** * Disposes a previously registered javascript function called after the rendering. * @param func defines the function to remove * @returns the current mesh */ unregisterAfterRender(e) { return this.onAfterRenderObservable.removeCallback(e), this; } /** * @internal */ _getInstancesRenderList(e, t = !1) { if (this._instanceDataStorage.isFrozen) { if (t) return this._instanceDataStorage.batchCacheReplacementModeInFrozenMode.hardwareInstancedRendering[e] = !1, this._instanceDataStorage.batchCacheReplacementModeInFrozenMode.renderSelf[e] = !0, this._instanceDataStorage.batchCacheReplacementModeInFrozenMode; if (this._instanceDataStorage.previousBatch) return this._instanceDataStorage.previousBatch; } const i = this.getScene(), r = i._isInIntermediateRendering(), s = r ? this._internalAbstractMeshDataInfo._onlyForInstancesIntermediate : this._internalAbstractMeshDataInfo._onlyForInstances, n = this._instanceDataStorage.batchCache; if (n.mustReturn = !1, n.renderSelf[e] = t || !s && this.isEnabled() && this.isVisible, n.visibleInstances[e] = null, this._instanceDataStorage.visibleInstances && !t) { const a = this._instanceDataStorage.visibleInstances, l = i.getRenderId(), o = r ? a.intermediateDefaultRenderId : a.defaultRenderId; n.visibleInstances[e] = a[l], !n.visibleInstances[e] && o && (n.visibleInstances[e] = a[o]); } return n.hardwareInstancedRendering[e] = !t && this._instanceDataStorage.hardwareInstancedRendering && n.visibleInstances[e] !== null && n.visibleInstances[e] !== void 0, this._instanceDataStorage.previousBatch = n, n; } /** * @internal */ _renderWithInstances(e, t, i, r, s) { var n; const a = i.visibleInstances[e._id], l = a ? a.length : 0, o = this._instanceDataStorage, u = o.instancesBufferSize; let h = o.instancesBuffer, d = o.instancesPreviousBuffer; const p = (l + 1) * 16 * 4; for (; o.instancesBufferSize < p; ) o.instancesBufferSize *= 2; (!o.instancesData || u != o.instancesBufferSize) && (o.instancesData = new Float32Array(o.instancesBufferSize / 4)), (this._scene.needsPreviousWorldMatrices && !o.instancesPreviousData || u != o.instancesBufferSize) && (o.instancesPreviousData = new Float32Array(o.instancesBufferSize / 4)); let m = 0, _ = 0; const v = i.renderSelf[e._id], C = !h || u !== o.instancesBufferSize || this._scene.needsPreviousWorldMatrices && !o.instancesPreviousBuffer; if (!this._instanceDataStorage.manualUpdate && (!o.isFrozen || C)) { const x = this.getWorldMatrix(); if (v && (this._scene.needsPreviousWorldMatrices && (o.masterMeshPreviousWorldMatrix ? (o.masterMeshPreviousWorldMatrix.copyToArray(o.instancesPreviousData, m), o.masterMeshPreviousWorldMatrix.copyFrom(x)) : (o.masterMeshPreviousWorldMatrix = x.clone(), o.masterMeshPreviousWorldMatrix.copyToArray(o.instancesPreviousData, m))), x.copyToArray(o.instancesData, m), m += 16, _++), a) { if (ke.INSTANCEDMESH_SORT_TRANSPARENT && this._scene.activeCamera && (!((n = e.getMaterial()) === null || n === void 0) && n.needAlphaBlendingForMesh(e.getRenderingMesh()))) { const b = this._scene.activeCamera.globalPosition; for (let S = 0; S < a.length; S++) { const M = a[S]; M._distanceToCamera = D.Distance(M.getBoundingInfo().boundingSphere.centerWorld, b); } a.sort((S, M) => S._distanceToCamera > M._distanceToCamera ? -1 : S._distanceToCamera < M._distanceToCamera ? 1 : 0); } for (let b = 0; b < a.length; b++) { const S = a[b], M = S.getWorldMatrix(); M.copyToArray(o.instancesData, m), this._scene.needsPreviousWorldMatrices && (S._previousWorldMatrix ? (S._previousWorldMatrix.copyToArray(o.instancesPreviousData, m), S._previousWorldMatrix.copyFrom(M)) : (S._previousWorldMatrix = M.clone(), S._previousWorldMatrix.copyToArray(o.instancesPreviousData, m))), m += 16, _++; } } } else _ = (v ? 1 : 0) + l; return C ? (h && h.dispose(), d && d.dispose(), h = new hu(s, o.instancesData, !0, 16, !1, !0), o.instancesBuffer = h, this._userInstancedBuffersStorage || (this._userInstancedBuffersStorage = { data: {}, vertexBuffers: {}, strides: {}, sizes: {}, vertexArrayObjects: this.getEngine().getCaps().vertexArrayObject ? {} : void 0 }), this._userInstancedBuffersStorage.vertexBuffers.world0 = h.createVertexBuffer("world0", 0, 4), this._userInstancedBuffersStorage.vertexBuffers.world1 = h.createVertexBuffer("world1", 4, 4), this._userInstancedBuffersStorage.vertexBuffers.world2 = h.createVertexBuffer("world2", 8, 4), this._userInstancedBuffersStorage.vertexBuffers.world3 = h.createVertexBuffer("world3", 12, 4), this._scene.needsPreviousWorldMatrices && (d = new hu(s, o.instancesPreviousData, !0, 16, !1, !0), o.instancesPreviousBuffer = d, this._userInstancedBuffersStorage.vertexBuffers.previousWorld0 = d.createVertexBuffer("previousWorld0", 0, 4), this._userInstancedBuffersStorage.vertexBuffers.previousWorld1 = d.createVertexBuffer("previousWorld1", 4, 4), this._userInstancedBuffersStorage.vertexBuffers.previousWorld2 = d.createVertexBuffer("previousWorld2", 8, 4), this._userInstancedBuffersStorage.vertexBuffers.previousWorld3 = d.createVertexBuffer("previousWorld3", 12, 4)), this._invalidateInstanceVertexArrayObject()) : (!this._instanceDataStorage.isFrozen || this._instanceDataStorage.forceMatrixUpdates) && (h.updateDirectly(o.instancesData, 0, _), this._scene.needsPreviousWorldMatrices && (!this._instanceDataStorage.manualUpdate || this._instanceDataStorage.previousManualUpdate) && d.updateDirectly(o.instancesPreviousData, 0, _)), this._processInstancedBuffers(a, v), this.getScene()._activeIndices.addCount(e.indexCount * _, !1), s._currentDrawContext && (s._currentDrawContext.useInstancing = !0), this._bind(e, r, t), this._draw(e, t, _), this._scene.needsPreviousWorldMatrices && !C && this._instanceDataStorage.manualUpdate && (!this._instanceDataStorage.isFrozen || this._instanceDataStorage.forceMatrixUpdates) && !this._instanceDataStorage.previousManualUpdate && d.updateDirectly(o.instancesData, 0, _), s.unbindInstanceAttributes(), this; } /** * @internal */ _renderWithThinInstances(e, t, i, r) { var s, n; const a = (n = (s = this._thinInstanceDataStorage) === null || s === void 0 ? void 0 : s.instancesCount) !== null && n !== void 0 ? n : 0; this.getScene()._activeIndices.addCount(e.indexCount * a, !1), r._currentDrawContext && (r._currentDrawContext.useInstancing = !0), this._bind(e, i, t), this._draw(e, t, a), this._scene.needsPreviousWorldMatrices && !this._thinInstanceDataStorage.previousMatrixData && this._thinInstanceDataStorage.matrixData && (this._thinInstanceDataStorage.previousMatrixBuffer ? this._thinInstanceDataStorage.previousMatrixBuffer.updateDirectly(this._thinInstanceDataStorage.matrixData, 0, a) : this._thinInstanceDataStorage.previousMatrixBuffer = this._thinInstanceCreateMatrixBuffer("previousWorld", this._thinInstanceDataStorage.matrixData, !1)), r.unbindInstanceAttributes(); } /** * @internal */ // eslint-disable-next-line @typescript-eslint/no-unused-vars _processInstancedBuffers(e, t) { } /** * @internal */ _processRendering(e, t, i, r, s, n, a, l) { const o = this.getScene(), u = o.getEngine(); if (r = this._getRenderingFillMode(r), n && t.getRenderingMesh().hasThinInstances) return this._renderWithThinInstances(t, r, i, u), this; if (n) this._renderWithInstances(t, r, s, i, u); else { u._currentDrawContext && (u._currentDrawContext.useInstancing = !1); let h = 0; s.renderSelf[t._id] && (a && a(!1, e.getWorldMatrix(), l), h++, this._draw(t, r, this._instanceDataStorage.overridenInstanceCount)); const d = s.visibleInstances[t._id]; if (d) { const f = d.length; h += f; for (let p = 0; p < f; p++) { const _ = d[p].getWorldMatrix(); a && a(!0, _, l), this._draw(t, r); } } o._activeIndices.addCount(t.indexCount * h, !1); } return this; } /** * @internal */ _rebuild(e = !1) { if (this._instanceDataStorage.instancesBuffer && (e && this._instanceDataStorage.instancesBuffer.dispose(), this._instanceDataStorage.instancesBuffer = null), this._userInstancedBuffersStorage) { for (const t in this._userInstancedBuffersStorage.vertexBuffers) { const i = this._userInstancedBuffersStorage.vertexBuffers[t]; i && (e && i.dispose(), this._userInstancedBuffersStorage.vertexBuffers[t] = null); } this._userInstancedBuffersStorage.vertexArrayObjects && (this._userInstancedBuffersStorage.vertexArrayObjects = {}); } this._internalMeshDataInfo._effectiveMaterial = null, super._rebuild(e); } /** @internal */ _freeze() { if (this.subMeshes) { for (let e = 0; e < this.subMeshes.length; e++) this._getInstancesRenderList(e); this._internalMeshDataInfo._effectiveMaterial = null, this._instanceDataStorage.isFrozen = !0; } } /** @internal */ _unFreeze() { this._instanceDataStorage.isFrozen = !1, this._instanceDataStorage.previousBatch = null; } /** * Triggers the draw call for the mesh (or a submesh), for a specific render pass id * @param renderPassId defines the render pass id to use to draw the mesh / submesh. If not provided, use the current renderPassId of the engine. * @param enableAlphaMode defines if alpha mode can be changed (default: false) * @param effectiveMeshReplacement defines an optional mesh used to provide info for the rendering (default: undefined) * @param subMesh defines the subMesh to render. If not provided, draw all mesh submeshes (default: undefined) * @param checkFrustumCulling defines if frustum culling must be checked (default: true). If you know the mesh is in the frustum (or if you don't care!), you can pass false to optimize. * @returns the current mesh */ renderWithRenderPassId(e, t, i, r, s = !0) { const n = this._scene.getEngine(), a = n.currentRenderPassId; if (e !== void 0 && (n.currentRenderPassId = e), r) (!s || s && r.isInFrustum(this._scene._frustumPlanes)) && this.render(r, !!t, i); else for (let l = 0; l < this.subMeshes.length; l++) { const o = this.subMeshes[l]; (!s || s && o.isInFrustum(this._scene._frustumPlanes)) && this.render(o, !!t, i); } return e !== void 0 && (n.currentRenderPassId = a), this; } /** * Triggers the draw call for the mesh. Usually, you don't need to call this method by your own because the mesh rendering is handled by the scene rendering manager * @param subMesh defines the subMesh to render * @param enableAlphaMode defines if alpha mode can be changed * @param effectiveMeshReplacement defines an optional mesh used to provide info for the rendering * @returns the current mesh */ render(e, t, i) { var r, s, n, a, l; const o = this.getScene(); this._internalAbstractMeshDataInfo._isActiveIntermediate ? this._internalAbstractMeshDataInfo._isActiveIntermediate = !1 : this._internalAbstractMeshDataInfo._isActive = !1; const u = (s = (r = o.activeCameras) === null || r === void 0 ? void 0 : r.length) !== null && s !== void 0 ? s : 0; if ((u > 1 && o.activeCamera === o.activeCameras[0] || u <= 1) && this._checkOcclusionQuery() && !this._occlusionDataStorage.forceRenderingWhenOccluded) return this; const d = this._getInstancesRenderList(e._id, !!i); if (d.mustReturn) return this; if (!this._geometry || !this._geometry.getVertexBuffers() || !this._unIndexed && !this._geometry.getIndexBuffer()) return this; const f = o.getEngine(); let p = 0, m = null; this.ignoreCameraMaxZ && o.activeCamera && !o._isInIntermediateRendering() && (p = o.activeCamera.maxZ, m = o.activeCamera, o.activeCamera.maxZ = 0, o.updateTransformMatrix(!0)), this._internalMeshDataInfo._onBeforeRenderObservable && this._internalMeshDataInfo._onBeforeRenderObservable.notifyObservers(this); const _ = e.getRenderingMesh(), v = d.hardwareInstancedRendering[e._id] || _.hasThinInstances || !!this._userInstancedBuffersStorage && !e.getMesh()._internalAbstractMeshDataInfo._actAsRegularMesh, C = this._instanceDataStorage, x = e.getMaterial(); if (!x) return m && (m.maxZ = p, o.updateTransformMatrix(!0)), this; if (!C.isFrozen || !this._internalMeshDataInfo._effectiveMaterial || this._internalMeshDataInfo._effectiveMaterial !== x) { if (x._storeEffectOnSubMeshes) { if (!x.isReadyForSubMesh(this, e, v)) return m && (m.maxZ = p, o.updateTransformMatrix(!0)), this; } else if (!x.isReady(this, v)) return m && (m.maxZ = p, o.updateTransformMatrix(!0)), this; this._internalMeshDataInfo._effectiveMaterial = x; } else if (x._storeEffectOnSubMeshes && !(!((n = e.effect) === null || n === void 0) && n._wasPreviouslyReady) || !x._storeEffectOnSubMeshes && !(!((a = x.getEffect()) === null || a === void 0) && a._wasPreviouslyReady)) return m && (m.maxZ = p, o.updateTransformMatrix(!0)), this; t && f.setAlphaMode(this._internalMeshDataInfo._effectiveMaterial.alphaMode); let b; this._internalMeshDataInfo._effectiveMaterial._storeEffectOnSubMeshes ? b = e._drawWrapper : b = this._internalMeshDataInfo._effectiveMaterial._getDrawWrapper(); const S = (l = b == null ? void 0 : b.effect) !== null && l !== void 0 ? l : null; for (const B of o._beforeRenderingMeshStage) B.action(this, e, d, S); if (!b || !S) return m && (m.maxZ = p, o.updateTransformMatrix(!0)), this; const M = i || this; let R; if (!C.isFrozen && (this._internalMeshDataInfo._effectiveMaterial.backFaceCulling || this.overrideMaterialSideOrientation !== null || this._internalMeshDataInfo._effectiveMaterial.twoSidedLighting)) { const B = M._getWorldMatrixDeterminant(); R = this.overrideMaterialSideOrientation, R == null && (R = this._internalMeshDataInfo._effectiveMaterial.sideOrientation), B < 0 && (R = R === At.ClockWiseSideOrientation ? At.CounterClockWiseSideOrientation : At.ClockWiseSideOrientation), C.sideOrientation = R; } else R = C.sideOrientation; const w = this._internalMeshDataInfo._effectiveMaterial._preBind(b, R); this._internalMeshDataInfo._effectiveMaterial.forceDepthWrite && f.setDepthWrite(!0); const V = this._internalMeshDataInfo._effectiveMaterial, k = V.fillMode; this._internalMeshDataInfo._onBeforeBindObservable && this._internalMeshDataInfo._onBeforeBindObservable.notifyObservers(this), v || this._bind(e, S, k, !1); const L = M.getWorldMatrix(); V._storeEffectOnSubMeshes ? V.bindForSubMesh(L, this, e) : V.bind(L, this), !V.backFaceCulling && V.separateCullingPass && (f.setState(!0, V.zOffset, !1, !w, V.cullBackFaces, V.stencil, V.zOffsetUnits), this._processRendering(this, e, S, k, d, v, this._onBeforeDraw, this._internalMeshDataInfo._effectiveMaterial), f.setState(!0, V.zOffset, !1, w, V.cullBackFaces, V.stencil, V.zOffsetUnits), this._internalMeshDataInfo._onBetweenPassObservable && this._internalMeshDataInfo._onBetweenPassObservable.notifyObservers(e)), this._processRendering(this, e, S, k, d, v, this._onBeforeDraw, this._internalMeshDataInfo._effectiveMaterial), this._internalMeshDataInfo._effectiveMaterial.unbind(); for (const B of o._afterRenderingMeshStage) B.action(this, e, d, S); return this._internalMeshDataInfo._onAfterRenderObservable && this._internalMeshDataInfo._onAfterRenderObservable.notifyObservers(this), m && (m.maxZ = p, o.updateTransformMatrix(!0)), o.performancePriority === $A.Aggressive && !C.isFrozen && this._freeze(), this; } /** * Renormalize the mesh and patch it up if there are no weights * Similar to normalization by adding the weights compute the reciprocal and multiply all elements, this wil ensure that everything adds to 1. * However in the case of zero weights then we set just a single influence to 1. * We check in the function for extra's present and if so we use the normalizeSkinWeightsWithExtras rather than the FourWeights version. */ cleanMatrixWeights() { this.isVerticesDataPresent(Y.MatricesWeightsKind) && (this.isVerticesDataPresent(Y.MatricesWeightsExtraKind) ? this._normalizeSkinWeightsAndExtra() : this._normalizeSkinFourWeights()); } // faster 4 weight version. _normalizeSkinFourWeights() { const e = this.getVerticesData(Y.MatricesWeightsKind), t = e.length; for (let i = 0; i < t; i += 4) { const r = e[i] + e[i + 1] + e[i + 2] + e[i + 3]; if (r === 0) e[i] = 1; else { const s = 1 / r; e[i] *= s, e[i + 1] *= s, e[i + 2] *= s, e[i + 3] *= s; } } this.setVerticesData(Y.MatricesWeightsKind, e); } // handle special case of extra verts. (in theory gltf can handle 12 influences) _normalizeSkinWeightsAndExtra() { const e = this.getVerticesData(Y.MatricesWeightsExtraKind), t = this.getVerticesData(Y.MatricesWeightsKind), i = t.length; for (let r = 0; r < i; r += 4) { let s = t[r] + t[r + 1] + t[r + 2] + t[r + 3]; if (s += e[r] + e[r + 1] + e[r + 2] + e[r + 3], s === 0) t[r] = 1; else { const n = 1 / s; t[r] *= n, t[r + 1] *= n, t[r + 2] *= n, t[r + 3] *= n, e[r] *= n, e[r + 1] *= n, e[r + 2] *= n, e[r + 3] *= n; } } this.setVerticesData(Y.MatricesWeightsKind, t), this.setVerticesData(Y.MatricesWeightsKind, e); } /** * ValidateSkinning is used to determine that a mesh has valid skinning data along with skin metrics, if missing weights, * or not normalized it is returned as invalid mesh the string can be used for console logs, or on screen messages to let * the user know there was an issue with importing the mesh * @returns a validation object with skinned, valid and report string */ validateSkinning() { const e = this.getVerticesData(Y.MatricesWeightsExtraKind), t = this.getVerticesData(Y.MatricesWeightsKind); if (t === null || this.skeleton == null) return { skinned: !1, valid: !0, report: "not skinned" }; const i = t.length; let r = 0, s = 0, n = 0, a = 0; const l = e === null ? 4 : 8, o = []; for (let _ = 0; _ <= l; _++) o[_] = 0; const u = 1e-3; for (let _ = 0; _ < i; _ += 4) { let v = t[_], C = v, x = C === 0 ? 0 : 1; for (let b = 1; b < l; b++) { const S = b < 4 ? t[_ + b] : e[_ + b - 4]; S > v && r++, S !== 0 && x++, C += S, v = S; } if (o[x]++, x > n && (n = x), C === 0) s++; else { const b = 1 / C; let S = 0; for (let M = 0; M < l; M++) M < 4 ? S += Math.abs(t[_ + M] - t[_ + M] * b) : S += Math.abs(e[_ + M - 4] - e[_ + M - 4] * b); S > u && a++; } } const h = this.skeleton.bones.length, d = this.getVerticesData(Y.MatricesIndicesKind), f = this.getVerticesData(Y.MatricesIndicesExtraKind); let p = 0; for (let _ = 0; _ < i; _ += 4) for (let v = 0; v < l; v++) { const C = v < 4 ? d[_ + v] : f[_ + v - 4]; (C >= h || C < 0) && p++; } const m = "Number of Weights = " + i / 4 + ` Maximum influences = ` + n + ` Missing Weights = ` + s + ` Not Sorted = ` + r + ` Not Normalized = ` + a + ` WeightCounts = [` + o + `] Number of bones = ` + h + ` Bad Bone Indices = ` + p; return { skinned: !0, valid: s === 0 && a === 0 && p === 0, report: m }; } /** @internal */ _checkDelayState() { const e = this.getScene(); return this._geometry ? this._geometry.load(e) : this.delayLoadState === 4 && (this.delayLoadState = 2, this._queueLoad(e)), this; } _queueLoad(e) { e.addPendingData(this); const t = this.delayLoadingFile.indexOf(".babylonbinarymeshdata") !== -1; return Ve.LoadFile(this.delayLoadingFile, (i) => { i instanceof ArrayBuffer ? this._delayLoadingFunction(i, this) : this._delayLoadingFunction(JSON.parse(i), this), this.instances.forEach((r) => { r.refreshBoundingInfo(), r._syncSubMeshes(); }), this.delayLoadState = 1, e.removePendingData(this); }, () => { }, e.offlineProvider, t), this; } /** * Returns `true` if the mesh is within the frustum defined by the passed array of planes. * A mesh is in the frustum if its bounding box intersects the frustum * @param frustumPlanes defines the frustum to test * @returns true if the mesh is in the frustum planes */ isInFrustum(e) { return this.delayLoadState === 2 || !super.isInFrustum(e) ? !1 : (this._checkDelayState(), !0); } /** * Sets the mesh material by the material or multiMaterial `id` property * @param id is a string identifying the material or the multiMaterial * @returns the current mesh */ setMaterialById(e) { const t = this.getScene().materials; let i; for (i = t.length - 1; i > -1; i--) if (t[i].id === e) return this.material = t[i], this; const r = this.getScene().multiMaterials; for (i = r.length - 1; i > -1; i--) if (r[i].id === e) return this.material = r[i], this; return this; } /** * Returns as a new array populated with the mesh material and/or skeleton, if any. * @returns an array of IAnimatable */ getAnimatables() { const e = []; return this.material && e.push(this.material), this.skeleton && e.push(this.skeleton), e; } /** * Modifies the mesh geometry according to the passed transformation matrix. * This method returns nothing, but it really modifies the mesh even if it's originally not set as updatable. * The mesh normals are modified using the same transformation. * Note that, under the hood, this method sets a new VertexBuffer each call. * @param transform defines the transform matrix to use * @see https://doc.babylonjs.com/features/featuresDeepDive/mesh/transforms/center_origin/bakingTransforms * @returns the current mesh */ bakeTransformIntoVertices(e) { if (!this.isVerticesDataPresent(Y.PositionKind)) return this; const t = this.subMeshes.splice(0); this._resetPointsArrayCache(); let i = this.getVerticesData(Y.PositionKind); const r = D.Zero(); let s; for (s = 0; s < i.length; s += 3) D.TransformCoordinatesFromFloatsToRef(i[s], i[s + 1], i[s + 2], e, r).toArray(i, s); if (this.setVerticesData(Y.PositionKind, i, this.getVertexBuffer(Y.PositionKind).isUpdatable()), this.isVerticesDataPresent(Y.NormalKind)) { for (i = this.getVerticesData(Y.NormalKind), s = 0; s < i.length; s += 3) D.TransformNormalFromFloatsToRef(i[s], i[s + 1], i[s + 2], e, r).normalize().toArray(i, s); this.setVerticesData(Y.NormalKind, i, this.getVertexBuffer(Y.NormalKind).isUpdatable()); } return e.determinant() < 0 && this.flipFaces(), this.releaseSubMeshes(), this.subMeshes = t, this; } /** * Modifies the mesh geometry according to its own current World Matrix. * The mesh World Matrix is then reset. * This method returns nothing but really modifies the mesh even if it's originally not set as updatable. * Note that, under the hood, this method sets a new VertexBuffer each call. * @see https://doc.babylonjs.com/features/featuresDeepDive/mesh/transforms/center_origin/bakingTransforms * @param bakeIndependentlyOfChildren indicates whether to preserve all child nodes' World Matrix during baking * @returns the current mesh */ bakeCurrentTransformIntoVertices(e = !0) { return this.bakeTransformIntoVertices(this.computeWorldMatrix(!0)), this.resetLocalMatrix(e), this; } // Cache /** @internal */ get _positions() { return this._internalAbstractMeshDataInfo._positions ? this._internalAbstractMeshDataInfo._positions : this._geometry ? this._geometry._positions : null; } /** @internal */ _resetPointsArrayCache() { return this._geometry && this._geometry._resetPointsArrayCache(), this; } /** @internal */ _generatePointsArray() { return this._geometry ? this._geometry._generatePointsArray() : !1; } /** * Returns a new Mesh object generated from the current mesh properties. * This method must not get confused with createInstance() * @param name is a string, the name given to the new mesh * @param newParent can be any Node object (default `null`) * @param doNotCloneChildren allows/denies the recursive cloning of the original mesh children if any (default `false`) * @param clonePhysicsImpostor allows/denies the cloning in the same time of the original mesh `body` used by the physics engine, if any (default `true`) * @returns a new mesh */ clone(e = "", t = null, i, r = !0) { return new ke(e, this.getScene(), t, this, i, r); } /** * Releases resources associated with this mesh. * @param doNotRecurse Set to true to not recurse into each children (recurse into each children by default) * @param disposeMaterialAndTextures Set to true to also dispose referenced materials and textures (false by default) */ dispose(e, t = !1) { this.morphTargetManager = null, this._geometry && this._geometry.releaseForMesh(this, !0); const i = this._internalMeshDataInfo; if (i._onBeforeDrawObservable && i._onBeforeDrawObservable.clear(), i._onBeforeBindObservable && i._onBeforeBindObservable.clear(), i._onBeforeRenderObservable && i._onBeforeRenderObservable.clear(), i._onAfterRenderObservable && i._onAfterRenderObservable.clear(), i._onBetweenPassObservable && i._onBetweenPassObservable.clear(), this._scene.useClonedMeshMap) { if (i.meshMap) for (const r in i.meshMap) { const s = i.meshMap[r]; s && (s._internalMeshDataInfo._source = null, i.meshMap[r] = void 0); } i._source && i._source._internalMeshDataInfo.meshMap && (i._source._internalMeshDataInfo.meshMap[this.uniqueId] = void 0); } else { const r = this.getScene().meshes; for (const s of r) { const n = s; n._internalMeshDataInfo && n._internalMeshDataInfo._source && n._internalMeshDataInfo._source === this && (n._internalMeshDataInfo._source = null); } } i._source = null, this._instanceDataStorage.visibleInstances = {}, this._disposeInstanceSpecificData(), this._disposeThinInstanceSpecificData(), this._internalMeshDataInfo._checkReadinessObserver && this._scene.onBeforeRenderObservable.remove(this._internalMeshDataInfo._checkReadinessObserver), super.dispose(e, t); } /** @internal */ _disposeInstanceSpecificData() { } /** @internal */ _disposeThinInstanceSpecificData() { } /** @internal */ _invalidateInstanceVertexArrayObject() { } /** * Modifies the mesh geometry according to a displacement map. * A displacement map is a colored image. Each pixel color value (actually a gradient computed from red, green, blue values) will give the displacement to apply to each mesh vertex. * The mesh must be set as updatable. Its internal geometry is directly modified, no new buffer are allocated. * @param url is a string, the URL from the image file is to be downloaded. * @param minHeight is the lower limit of the displacement. * @param maxHeight is the upper limit of the displacement. * @param onSuccess is an optional Javascript function to be called just after the mesh is modified. It is passed the modified mesh and must return nothing. * @param uvOffset is an optional vector2 used to offset UV. * @param uvScale is an optional vector2 used to scale UV. * @param forceUpdate defines whether or not to force an update of the generated buffers. This is useful to apply on a deserialized model for instance. * @param onError defines a callback called when an error occurs during the processing of the request. * @returns the Mesh. */ applyDisplacementMap(e, t, i, r, s, n, a = !1, l) { const o = this.getScene(), u = (h) => { const d = h.width, f = h.height, m = this.getEngine().createCanvas(d, f).getContext("2d"); m.drawImage(h, 0, 0); const _ = m.getImageData(0, 0, d, f).data; this.applyDisplacementMapFromBuffer(_, d, f, t, i, s, n, a), r && r(this); }; return Ve.LoadImage(e, u, l || (() => { }), o.offlineProvider), this; } /** * Modifies the mesh geometry according to a displacementMap buffer. * A displacement map is a colored image. Each pixel color value (actually a gradient computed from red, green, blue values) will give the displacement to apply to each mesh vertex. * The mesh must be set as updatable. Its internal geometry is directly modified, no new buffer are allocated. * @param buffer is a `Uint8Array` buffer containing series of `Uint8` lower than 255, the red, green, blue and alpha values of each successive pixel. * @param heightMapWidth is the width of the buffer image. * @param heightMapHeight is the height of the buffer image. * @param minHeight is the lower limit of the displacement. * @param maxHeight is the upper limit of the displacement. * @param uvOffset is an optional vector2 used to offset UV. * @param uvScale is an optional vector2 used to scale UV. * @param forceUpdate defines whether or not to force an update of the generated buffers. This is useful to apply on a deserialized model for instance. * @returns the Mesh. */ applyDisplacementMapFromBuffer(e, t, i, r, s, n, a, l = !1) { if (!this.isVerticesDataPresent(Y.PositionKind) || !this.isVerticesDataPresent(Y.NormalKind) || !this.isVerticesDataPresent(Y.UVKind)) return Ce.Warn("Cannot call applyDisplacementMap: Given mesh is not complete. Position, Normal or UV are missing"), this; const o = this.getVerticesData(Y.PositionKind, !0, !0), u = this.getVerticesData(Y.NormalKind), h = this.getVerticesData(Y.UVKind); let d = D.Zero(); const f = D.Zero(), p = at.Zero(); n = n || at.Zero(), a = a || new at(1, 1); for (let m = 0; m < o.length; m += 3) { D.FromArrayToRef(o, m, d), D.FromArrayToRef(u, m, f), at.FromArrayToRef(h, m / 3 * 2, p); const _ = Math.abs(p.x * a.x + n.x % 1) * (t - 1) % t | 0, v = Math.abs(p.y * a.y + n.y % 1) * (i - 1) % i | 0, C = (_ + v * t) * 4, x = e[C] / 255, b = e[C + 1] / 255, S = e[C + 2] / 255, M = x * 0.3 + b * 0.59 + S * 0.11; f.normalize(), f.scaleInPlace(r + (s - r) * M), d = d.add(f), d.toArray(o, m); } return Ot.ComputeNormals(o, this.getIndices(), u), l ? (this.setVerticesData(Y.PositionKind, o), this.setVerticesData(Y.NormalKind, u), this.setVerticesData(Y.UVKind, h)) : (this.updateVerticesData(Y.PositionKind, o), this.updateVerticesData(Y.NormalKind, u)), this; } _getFlattenedNormals(e, t) { const i = new Float32Array(e.length * 3); let r = 0; const s = this.overrideMaterialSideOrientation === (this._scene.useRightHandedSystem ? 1 : 0); for (let n = 0; n < e.length; n += 3) { const a = D.FromArray(t, e[n] * 3), l = D.FromArray(t, e[n + 1] * 3), o = D.FromArray(t, e[n + 2] * 3), u = a.subtract(l), h = o.subtract(l), d = D.Normalize(D.Cross(u, h)); s && d.scaleInPlace(-1); for (let f = 0; f < 3; f++) i[r++] = d.x, i[r++] = d.y, i[r++] = d.z; } return i; } _convertToUnIndexedMesh(e = !1) { const t = this.getVerticesDataKinds(), i = this.getIndices(), r = {}, s = (a, l) => { const o = new Float32Array(i.length * l); let u = 0; for (let h = 0; h < i.length; h++) for (let d = 0; d < l; d++) o[u++] = a[i[h] * l + d]; return o; }, n = this.geometry ? this.subMeshes.slice(0) : []; for (const a of t) r[a] = this.getVerticesData(a); for (const a of t) { const l = this.getVertexBuffer(a), o = l.getStrideSize(); if (e && a === Y.NormalKind) { const u = this._getFlattenedNormals(i, r[Y.PositionKind]); this.setVerticesData(Y.NormalKind, u, l.isUpdatable(), o); } else this.setVerticesData(a, s(r[a], o), l.isUpdatable(), o); } if (this.morphTargetManager) { for (let a = 0; a < this.morphTargetManager.numTargets; a++) { const l = this.morphTargetManager.getTarget(a), o = l.getPositions(); l.setPositions(s(o, 3)); const u = l.getNormals(); u && l.setNormals(e ? this._getFlattenedNormals(i, o) : s(u, 3)); const h = l.getTangents(); h && l.setTangents(s(h, 3)); const d = l.getUVs(); d && l.setUVs(s(d, 2)); } this.morphTargetManager.synchronize(); } for (let a = 0; a < i.length; a++) i[a] = a; this.setIndices(i), this._unIndexed = !0, this.releaseSubMeshes(); for (const a of n) ed.AddToMesh(a.materialIndex, a.indexStart, a.indexCount, a.indexStart, a.indexCount, this); return this.synchronizeInstances(), this; } /** * Modify the mesh to get a flat shading rendering. * This means each mesh facet will then have its own normals. Usually new vertices are added in the mesh geometry to get this result. * Warning : the mesh is really modified even if not set originally as updatable and, under the hood, a new VertexBuffer is allocated. * @returns current mesh */ convertToFlatShadedMesh() { return this._convertToUnIndexedMesh(!0); } /** * This method removes all the mesh indices and add new vertices (duplication) in order to unfold facets into buffers. * In other words, more vertices, no more indices and a single bigger VBO. * The mesh is really modified even if not set originally as updatable. Under the hood, a new VertexBuffer is allocated. * @returns current mesh */ convertToUnIndexedMesh() { return this._convertToUnIndexedMesh(); } /** * Inverses facet orientations. * Warning : the mesh is really modified even if not set originally as updatable. A new VertexBuffer is created under the hood each call. * @param flipNormals will also inverts the normals * @returns current mesh */ flipFaces(e = !1) { const t = Ot.ExtractFromMesh(this); let i; if (e && this.isVerticesDataPresent(Y.NormalKind) && t.normals) for (i = 0; i < t.normals.length; i++) t.normals[i] *= -1; if (t.indices) { let r; for (i = 0; i < t.indices.length; i += 3) r = t.indices[i + 1], t.indices[i + 1] = t.indices[i + 2], t.indices[i + 2] = r; } return t.applyToMesh(this, this.isVertexBufferUpdatable(Y.PositionKind)), this; } /** * Increase the number of facets and hence vertices in a mesh * Vertex normals are interpolated from existing vertex normals * Warning : the mesh is really modified even if not set originally as updatable. A new VertexBuffer is created under the hood each call. * @param numberPerEdge the number of new vertices to add to each edge of a facet, optional default 1 */ increaseVertices(e = 1) { const t = Ot.ExtractFromMesh(this), i = t.indices && !Array.isArray(t.indices) && Array.from ? Array.from(t.indices) : t.indices, r = t.positions && !Array.isArray(t.positions) && Array.from ? Array.from(t.positions) : t.positions, s = t.uvs && !Array.isArray(t.uvs) && Array.from ? Array.from(t.uvs) : t.uvs, n = t.normals && !Array.isArray(t.normals) && Array.from ? Array.from(t.normals) : t.normals; if (!i || !r) Ce.Warn("Couldn't increase number of vertices : VertexData must contain at least indices and positions"); else { t.indices = i, t.positions = r, s && (t.uvs = s), n && (t.normals = n); const a = e + 1, l = new Array(); for (let S = 0; S < a + 1; S++) l[S] = new Array(); let o, u; const h = new D(0, 0, 0), d = new D(0, 0, 0), f = new at(0, 0), p = new Array(), m = new Array(), _ = new Array(); let v, C = r.length, x; s && (x = s.length); let b; n && (b = n.length); for (let S = 0; S < i.length; S += 3) { m[0] = i[S], m[1] = i[S + 1], m[2] = i[S + 2]; for (let M = 0; M < 3; M++) if (o = m[M], u = m[(M + 1) % 3], _[o] === void 0 && _[u] === void 0 ? (_[o] = new Array(), _[u] = new Array()) : (_[o] === void 0 && (_[o] = new Array()), _[u] === void 0 && (_[u] = new Array())), _[o][u] === void 0 && _[u][o] === void 0) { _[o][u] = [], h.x = (r[3 * u] - r[3 * o]) / a, h.y = (r[3 * u + 1] - r[3 * o + 1]) / a, h.z = (r[3 * u + 2] - r[3 * o + 2]) / a, n && (d.x = (n[3 * u] - n[3 * o]) / a, d.y = (n[3 * u + 1] - n[3 * o + 1]) / a, d.z = (n[3 * u + 2] - n[3 * o + 2]) / a), s && (f.x = (s[2 * u] - s[2 * o]) / a, f.y = (s[2 * u + 1] - s[2 * o + 1]) / a), _[o][u].push(o); for (let R = 1; R < a; R++) _[o][u].push(r.length / 3), r[C++] = r[3 * o] + R * h.x, r[C++] = r[3 * o + 1] + R * h.y, r[C++] = r[3 * o + 2] + R * h.z, n && (n[b++] = n[3 * o] + R * d.x, n[b++] = n[3 * o + 1] + R * d.y, n[b++] = n[3 * o + 2] + R * d.z), s && (s[x++] = s[2 * o] + R * f.x, s[x++] = s[2 * o + 1] + R * f.y); _[o][u].push(u), _[u][o] = new Array(), v = _[o][u].length; for (let R = 0; R < v; R++) _[u][o][R] = _[o][u][v - 1 - R]; } l[0][0] = i[S], l[1][0] = _[i[S]][i[S + 1]][1], l[1][1] = _[i[S]][i[S + 2]][1]; for (let M = 2; M < a; M++) { l[M][0] = _[i[S]][i[S + 1]][M], l[M][M] = _[i[S]][i[S + 2]][M], h.x = (r[3 * l[M][M]] - r[3 * l[M][0]]) / M, h.y = (r[3 * l[M][M] + 1] - r[3 * l[M][0] + 1]) / M, h.z = (r[3 * l[M][M] + 2] - r[3 * l[M][0] + 2]) / M, n && (d.x = (n[3 * l[M][M]] - n[3 * l[M][0]]) / M, d.y = (n[3 * l[M][M] + 1] - n[3 * l[M][0] + 1]) / M, d.z = (n[3 * l[M][M] + 2] - n[3 * l[M][0] + 2]) / M), s && (f.x = (s[2 * l[M][M]] - s[2 * l[M][0]]) / M, f.y = (s[2 * l[M][M] + 1] - s[2 * l[M][0] + 1]) / M); for (let R = 1; R < M; R++) l[M][R] = r.length / 3, r[C++] = r[3 * l[M][0]] + R * h.x, r[C++] = r[3 * l[M][0] + 1] + R * h.y, r[C++] = r[3 * l[M][0] + 2] + R * h.z, n && (n[b++] = n[3 * l[M][0]] + R * d.x, n[b++] = n[3 * l[M][0] + 1] + R * d.y, n[b++] = n[3 * l[M][0] + 2] + R * d.z), s && (s[x++] = s[2 * l[M][0]] + R * f.x, s[x++] = s[2 * l[M][0] + 1] + R * f.y); } l[a] = _[i[S + 1]][i[S + 2]], p.push(l[0][0], l[1][0], l[1][1]); for (let M = 1; M < a; M++) { let R; for (R = 0; R < M; R++) p.push(l[M][R], l[M + 1][R], l[M + 1][R + 1]), p.push(l[M][R], l[M + 1][R + 1], l[M][R + 1]); p.push(l[M][R], l[M + 1][R], l[M + 1][R + 1]); } } t.indices = p, t.applyToMesh(this, this.isVertexBufferUpdatable(Y.PositionKind)); } } /** * Force adjacent facets to share vertices and remove any facets that have all vertices in a line * This will undo any application of covertToFlatShadedMesh * Warning : the mesh is really modified even if not set originally as updatable. A new VertexBuffer is created under the hood each call. */ forceSharedVertices() { const e = Ot.ExtractFromMesh(this), t = e.uvs, i = e.indices, r = e.positions, s = e.colors, n = e.matricesIndices, a = e.matricesWeights, l = e.matricesIndicesExtra, o = e.matricesWeightsExtra; if (i === void 0 || r === void 0 || i === null || r === null) Ce.Warn("VertexData contains empty entries"); else { const u = new Array(), h = new Array(), d = new Array(), f = new Array(), p = new Array(), m = new Array(), _ = new Array(), v = new Array(); let C = new Array(), x = 0; const b = {}; let S, M; for (let w = 0; w < i.length; w += 3) { M = [i[w], i[w + 1], i[w + 2]], C = []; for (let V = 0; V < 3; V++) { C[V] = ""; for (let k = 0; k < 3; k++) Math.abs(r[3 * M[V] + k]) < 1e-8 && (r[3 * M[V] + k] = 0), C[V] += r[3 * M[V] + k] + "|"; } if (!(C[0] == C[1] || C[0] == C[2] || C[1] == C[2])) for (let V = 0; V < 3; V++) { if (S = b[C[V]], S === void 0) { b[C[V]] = x, S = x++; for (let k = 0; k < 3; k++) u.push(r[3 * M[V] + k]); if (s != null) for (let k = 0; k < 4; k++) f.push(s[4 * M[V] + k]); if (t != null) for (let k = 0; k < 2; k++) d.push(t[2 * M[V] + k]); if (n != null) for (let k = 0; k < 4; k++) p.push(n[4 * M[V] + k]); if (a != null) for (let k = 0; k < 4; k++) m.push(a[4 * M[V] + k]); if (l != null) for (let k = 0; k < 4; k++) _.push(l[4 * M[V] + k]); if (o != null) for (let k = 0; k < 4; k++) v.push(o[4 * M[V] + k]); } h.push(S); } } const R = new Array(); Ot.ComputeNormals(u, h, R), e.positions = u, e.indices = h, e.normals = R, t != null && (e.uvs = d), s != null && (e.colors = f), n != null && (e.matricesIndices = p), a != null && (e.matricesWeights = m), l != null && (e.matricesIndicesExtra = _), a != null && (e.matricesWeightsExtra = v), e.applyToMesh(this, this.isVertexBufferUpdatable(Y.PositionKind)); } } // Instances /** * @internal */ // eslint-disable-next-line @typescript-eslint/no-unused-vars, @typescript-eslint/naming-convention static _instancedMeshFactory(e, t) { throw yr("InstancedMesh"); } /** * @internal */ // eslint-disable-next-line @typescript-eslint/no-unused-vars static _PhysicsImpostorParser(e, t, i) { throw yr("PhysicsImpostor"); } /** * Creates a new InstancedMesh object from the mesh model. * @see https://doc.babylonjs.com/features/featuresDeepDive/mesh/copies/instances * @param name defines the name of the new instance * @returns a new InstancedMesh */ createInstance(e) { return ke._instancedMeshFactory(e, this); } /** * Synchronises all the mesh instance submeshes to the current mesh submeshes, if any. * After this call, all the mesh instances have the same submeshes than the current mesh. * @returns the current mesh */ synchronizeInstances() { for (let e = 0; e < this.instances.length; e++) this.instances[e]._syncSubMeshes(); return this; } /** * Optimization of the mesh's indices, in case a mesh has duplicated vertices. * The function will only reorder the indices and will not remove unused vertices to avoid problems with submeshes. * This should be used together with the simplification to avoid disappearing triangles. * @param successCallback an optional success callback to be called after the optimization finished. * @returns the current mesh */ optimizeIndices(e) { const t = this.getIndices(), i = this.getVerticesData(Y.PositionKind); if (!i || !t) return this; const r = []; for (let n = 0; n < i.length; n = n + 3) r.push(D.FromArray(i, n)); const s = []; return ug.SyncAsyncForLoop(r.length, 40, (n) => { const a = r.length - 1 - n, l = r[a]; for (let o = 0; o < a; ++o) { const u = r[o]; if (l.equals(u)) { s[a] = o; break; } } }, () => { for (let a = 0; a < t.length; ++a) t[a] = s[t[a]] || t[a]; const n = this.subMeshes.slice(0); this.setIndices(t), this.subMeshes = n, e && e(this); }), this; } /** * Serialize current mesh * @param serializationObject defines the object which will receive the serialization data */ serialize(e = {}) { e.name = this.name, e.id = this.id, e.uniqueId = this.uniqueId, e.type = this.getClassName(), $s && $s.HasTags(this) && (e.tags = $s.GetTags(this)), e.position = this.position.asArray(), this.rotationQuaternion ? e.rotationQuaternion = this.rotationQuaternion.asArray() : this.rotation && (e.rotation = this.rotation.asArray()), e.scaling = this.scaling.asArray(), this._postMultiplyPivotMatrix ? e.pivotMatrix = this.getPivotMatrix().asArray() : e.localMatrix = this.getPivotMatrix().asArray(), e.isEnabled = this.isEnabled(!1), e.isVisible = this.isVisible, e.infiniteDistance = this.infiniteDistance, e.pickable = this.isPickable, e.receiveShadows = this.receiveShadows, e.billboardMode = this.billboardMode, e.visibility = this.visibility, e.checkCollisions = this.checkCollisions, e.isBlocker = this.isBlocker, e.overrideMaterialSideOrientation = this.overrideMaterialSideOrientation, this.parent && this.parent._serializeAsParent(e), e.isUnIndexed = this.isUnIndexed; const t = this._geometry; if (t && this.subMeshes) { e.geometryUniqueId = t.uniqueId, e.geometryId = t.id, e.subMeshes = []; for (let i = 0; i < this.subMeshes.length; i++) { const r = this.subMeshes[i]; e.subMeshes.push({ materialIndex: r.materialIndex, verticesStart: r.verticesStart, verticesCount: r.verticesCount, indexStart: r.indexStart, indexCount: r.indexCount }); } } if (this.material ? this.material.doNotSerialize || (e.materialUniqueId = this.material.uniqueId, e.materialId = this.material.id) : (this.material = null, e.materialUniqueId = this._scene.defaultMaterial.uniqueId, e.materialId = this._scene.defaultMaterial.id), this.morphTargetManager && (e.morphTargetManagerId = this.morphTargetManager.uniqueId), this.skeleton && (e.skeletonId = this.skeleton.id, e.numBoneInfluencers = this.numBoneInfluencers), this.getScene()._getComponent(Bt.NAME_PHYSICSENGINE)) { const i = this.getPhysicsImpostor(); i && (e.physicsMass = i.getParam("mass"), e.physicsFriction = i.getParam("friction"), e.physicsRestitution = i.getParam("mass"), e.physicsImpostor = i.type); } this.metadata && (e.metadata = this.metadata), e.instances = []; for (let i = 0; i < this.instances.length; i++) { const r = this.instances[i]; if (r.doNotSerialize) continue; const s = { name: r.name, id: r.id, isEnabled: r.isEnabled(!1), isVisible: r.isVisible, isPickable: r.isPickable, checkCollisions: r.checkCollisions, position: r.position.asArray(), scaling: r.scaling.asArray() }; if (r.parent && r.parent._serializeAsParent(s), r.rotationQuaternion ? s.rotationQuaternion = r.rotationQuaternion.asArray() : r.rotation && (s.rotation = r.rotation.asArray()), this.getScene()._getComponent(Bt.NAME_PHYSICSENGINE)) { const n = r.getPhysicsImpostor(); n && (s.physicsMass = n.getParam("mass"), s.physicsFriction = n.getParam("friction"), s.physicsRestitution = n.getParam("mass"), s.physicsImpostor = n.type); } r.metadata && (s.metadata = r.metadata), r.actionManager && (s.actions = r.actionManager.serialize(r.name)), e.instances.push(s), St.AppendSerializedAnimations(r, s), s.ranges = r.serializeAnimationRanges(); } if (this._thinInstanceDataStorage.instancesCount && this._thinInstanceDataStorage.matrixData && (e.thinInstances = { instancesCount: this._thinInstanceDataStorage.instancesCount, matrixData: Array.from(this._thinInstanceDataStorage.matrixData), matrixBufferSize: this._thinInstanceDataStorage.matrixBufferSize, enablePicking: this.thinInstanceEnablePicking }, this._userThinInstanceBuffersStorage)) { const i = { data: {}, sizes: {}, strides: {} }; for (const r in this._userThinInstanceBuffersStorage.data) i.data[r] = Array.from(this._userThinInstanceBuffersStorage.data[r]), i.sizes[r] = this._userThinInstanceBuffersStorage.sizes[r], i.strides[r] = this._userThinInstanceBuffersStorage.strides[r]; e.thinInstances.userThinInstance = i; } return St.AppendSerializedAnimations(this, e), e.ranges = this.serializeAnimationRanges(), e.layerMask = this.layerMask, e.alphaIndex = this.alphaIndex, e.hasVertexAlpha = this.hasVertexAlpha, e.overlayAlpha = this.overlayAlpha, e.overlayColor = this.overlayColor.asArray(), e.renderOverlay = this.renderOverlay, e.applyFog = this.applyFog, this.actionManager && (e.actions = this.actionManager.serialize(this.name)), e; } /** @internal */ _syncGeometryWithMorphTargetManager() { if (!this.geometry) return; this._markSubMeshesAsAttributesDirty(); const e = this._internalAbstractMeshDataInfo._morphTargetManager; if (e && e.vertexCount) { if (e.vertexCount !== this.getTotalVertices()) { Ce.Error("Mesh is incompatible with morph targets. Targets and mesh must all have the same vertices count."), this.morphTargetManager = null; return; } if (e.isUsingTextureForTargets) return; for (let t = 0; t < e.numInfluencers; t++) { const i = e.getActiveTarget(t), r = i.getPositions(); if (!r) { Ce.Error("Invalid morph target. Target must have positions."); return; } this.geometry.setVerticesData(Y.PositionKind + t, r, !1, 3); const s = i.getNormals(); s && this.geometry.setVerticesData(Y.NormalKind + t, s, !1, 3); const n = i.getTangents(); n && this.geometry.setVerticesData(Y.TangentKind + t, n, !1, 3); const a = i.getUVs(); a && this.geometry.setVerticesData(Y.UVKind + "_" + t, a, !1, 2); } } else { let t = 0; for (; this.geometry.isVerticesDataPresent(Y.PositionKind + t); ) this.geometry.removeVerticesData(Y.PositionKind + t), this.geometry.isVerticesDataPresent(Y.NormalKind + t) && this.geometry.removeVerticesData(Y.NormalKind + t), this.geometry.isVerticesDataPresent(Y.TangentKind + t) && this.geometry.removeVerticesData(Y.TangentKind + t), this.geometry.isVerticesDataPresent(Y.UVKind + t) && this.geometry.removeVerticesData(Y.UVKind + "_" + t), t++; } } /** * Returns a new Mesh object parsed from the source provided. * @param parsedMesh is the source * @param scene defines the hosting scene * @param rootUrl is the root URL to prefix the `delayLoadingFile` property with * @returns a new Mesh */ static Parse(e, t, i) { let r; if (e.type && e.type === "LinesMesh" ? r = ke._LinesMeshParser(e, t) : e.type && e.type === "GroundMesh" ? r = ke._GroundMeshParser(e, t) : e.type && e.type === "GoldbergMesh" ? r = ke._GoldbergMeshParser(e, t) : e.type && e.type === "GreasedLineMesh" ? r = ke._GreasedLineMeshParser(e, t) : e.type && e.type === "TrailMesh" ? r = ke._TrailMeshParser(e, t) : r = new ke(e.name, t), r.id = e.id, r._waitingParsedUniqueId = e.uniqueId, $s && $s.AddTagsTo(r, e.tags), r.position = D.FromArray(e.position), e.metadata !== void 0 && (r.metadata = e.metadata), e.rotationQuaternion ? r.rotationQuaternion = Ze.FromArray(e.rotationQuaternion) : e.rotation && (r.rotation = D.FromArray(e.rotation)), r.scaling = D.FromArray(e.scaling), e.localMatrix ? r.setPreTransformMatrix(Ae.FromArray(e.localMatrix)) : e.pivotMatrix && r.setPivotMatrix(Ae.FromArray(e.pivotMatrix)), r.setEnabled(e.isEnabled), r.isVisible = e.isVisible, r.infiniteDistance = e.infiniteDistance, r.showBoundingBox = e.showBoundingBox, r.showSubMeshesBoundingBox = e.showSubMeshesBoundingBox, e.applyFog !== void 0 && (r.applyFog = e.applyFog), e.pickable !== void 0 && (r.isPickable = e.pickable), e.alphaIndex !== void 0 && (r.alphaIndex = e.alphaIndex), r.receiveShadows = e.receiveShadows, e.billboardMode !== void 0 && (r.billboardMode = e.billboardMode), e.visibility !== void 0 && (r.visibility = e.visibility), r.checkCollisions = e.checkCollisions, e.overrideMaterialSideOrientation !== void 0 && (r.overrideMaterialSideOrientation = e.overrideMaterialSideOrientation), e.isBlocker !== void 0 && (r.isBlocker = e.isBlocker), r._shouldGenerateFlatShading = e.useFlatShading, e.freezeWorldMatrix && (r._waitingData.freezeWorldMatrix = e.freezeWorldMatrix), e.parentId !== void 0 && (r._waitingParentId = e.parentId), e.parentInstanceIndex !== void 0 && (r._waitingParentInstanceIndex = e.parentInstanceIndex), e.actions !== void 0 && (r._waitingData.actions = e.actions), e.overlayAlpha !== void 0 && (r.overlayAlpha = e.overlayAlpha), e.overlayColor !== void 0 && (r.overlayColor = ze.FromArray(e.overlayColor)), e.renderOverlay !== void 0 && (r.renderOverlay = e.renderOverlay), r.isUnIndexed = !!e.isUnIndexed, r.hasVertexAlpha = e.hasVertexAlpha, e.delayLoadingFile ? (r.delayLoadState = 4, r.delayLoadingFile = i + e.delayLoadingFile, r.buildBoundingInfo(D.FromArray(e.boundingBoxMinimum), D.FromArray(e.boundingBoxMaximum)), e._binaryInfo && (r._binaryInfo = e._binaryInfo), r._delayInfo = [], e.hasUVs && r._delayInfo.push(Y.UVKind), e.hasUVs2 && r._delayInfo.push(Y.UV2Kind), e.hasUVs3 && r._delayInfo.push(Y.UV3Kind), e.hasUVs4 && r._delayInfo.push(Y.UV4Kind), e.hasUVs5 && r._delayInfo.push(Y.UV5Kind), e.hasUVs6 && r._delayInfo.push(Y.UV6Kind), e.hasColors && r._delayInfo.push(Y.ColorKind), e.hasMatricesIndices && r._delayInfo.push(Y.MatricesIndicesKind), e.hasMatricesWeights && r._delayInfo.push(Y.MatricesWeightsKind), r._delayLoadingFunction = yc._ImportGeometry, uu.ForceFullSceneLoadingForIncremental && r._checkDelayState()) : yc._ImportGeometry(e, r), e.materialUniqueId ? r._waitingMaterialId = e.materialUniqueId : e.materialId && (r._waitingMaterialId = e.materialId), e.morphTargetManagerId > -1 && (r.morphTargetManager = t.getMorphTargetManagerById(e.morphTargetManagerId)), e.skeletonId !== void 0 && e.skeletonId !== null && (r.skeleton = t.getLastSkeletonById(e.skeletonId), e.numBoneInfluencers && (r.numBoneInfluencers = e.numBoneInfluencers)), e.animations) { for (let s = 0; s < e.animations.length; s++) { const n = e.animations[s], a = Qo("BABYLON.Animation"); a && r.animations.push(a.Parse(n)); } In.ParseAnimationRanges(r, e, t); } if (e.autoAnimate && t.beginAnimation(r, e.autoAnimateFrom, e.autoAnimateTo, e.autoAnimateLoop, e.autoAnimateSpeed || 1), e.layerMask && !isNaN(e.layerMask) ? r.layerMask = Math.abs(parseInt(e.layerMask)) : r.layerMask = 268435455, e.physicsImpostor && ke._PhysicsImpostorParser(t, r, e), e.lodMeshIds && (r._waitingData.lods = { ids: e.lodMeshIds, distances: e.lodDistances ? e.lodDistances : null, coverages: e.lodCoverages ? e.lodCoverages : null }), e.instances) for (let s = 0; s < e.instances.length; s++) { const n = e.instances[s], a = r.createInstance(n.name); if (n.id && (a.id = n.id), $s && (n.tags ? $s.AddTagsTo(a, n.tags) : $s.AddTagsTo(a, e.tags)), a.position = D.FromArray(n.position), n.metadata !== void 0 && (a.metadata = n.metadata), n.parentId !== void 0 && (a._waitingParentId = n.parentId), n.parentInstanceIndex !== void 0 && (a._waitingParentInstanceIndex = n.parentInstanceIndex), n.isEnabled !== void 0 && n.isEnabled !== null && a.setEnabled(n.isEnabled), n.isVisible !== void 0 && n.isVisible !== null && (a.isVisible = n.isVisible), n.isPickable !== void 0 && n.isPickable !== null && (a.isPickable = n.isPickable), n.rotationQuaternion ? a.rotationQuaternion = Ze.FromArray(n.rotationQuaternion) : n.rotation && (a.rotation = D.FromArray(n.rotation)), a.scaling = D.FromArray(n.scaling), n.checkCollisions != null && n.checkCollisions != null && (a.checkCollisions = n.checkCollisions), n.pickable != null && n.pickable != null && (a.isPickable = n.pickable), n.showBoundingBox != null && n.showBoundingBox != null && (a.showBoundingBox = n.showBoundingBox), n.showSubMeshesBoundingBox != null && n.showSubMeshesBoundingBox != null && (a.showSubMeshesBoundingBox = n.showSubMeshesBoundingBox), n.alphaIndex != null && n.showSubMeshesBoundingBox != null && (a.alphaIndex = n.alphaIndex), n.physicsImpostor && ke._PhysicsImpostorParser(t, a, n), n.actions !== void 0 && (a._waitingData.actions = n.actions), n.animations) { for (let l = 0; l < n.animations.length; l++) { const o = n.animations[l], u = Qo("BABYLON.Animation"); u && a.animations.push(u.Parse(o)); } In.ParseAnimationRanges(a, n, t), n.autoAnimate && t.beginAnimation(a, n.autoAnimateFrom, n.autoAnimateTo, n.autoAnimateLoop, n.autoAnimateSpeed || 1); } } if (e.thinInstances) { const s = e.thinInstances; if (r.thinInstanceEnablePicking = !!s.enablePicking, s.matrixData ? (r.thinInstanceSetBuffer("matrix", new Float32Array(s.matrixData), 16, !1), r._thinInstanceDataStorage.matrixBufferSize = s.matrixBufferSize, r._thinInstanceDataStorage.instancesCount = s.instancesCount) : r._thinInstanceDataStorage.matrixBufferSize = s.matrixBufferSize, e.thinInstances.userThinInstance) { const n = e.thinInstances.userThinInstance; for (const a in n.data) r.thinInstanceSetBuffer(a, new Float32Array(n.data[a]), n.strides[a], !1), r._userThinInstanceBuffersStorage.sizes[a] = n.sizes[a]; } } return r; } // Skeletons /** * Prepare internal position array for software CPU skinning * @returns original positions used for CPU skinning. Useful for integrating Morphing with skeletons in same mesh */ setPositionsForCPUSkinning() { const e = this._internalMeshDataInfo; if (!e._sourcePositions) { const t = this.getVerticesData(Y.PositionKind); if (!t) return e._sourcePositions; e._sourcePositions = new Float32Array(t), this.isVertexBufferUpdatable(Y.PositionKind) || this.setVerticesData(Y.PositionKind, t, !0); } return e._sourcePositions; } /** * Prepare internal normal array for software CPU skinning * @returns original normals used for CPU skinning. Useful for integrating Morphing with skeletons in same mesh. */ setNormalsForCPUSkinning() { const e = this._internalMeshDataInfo; if (!e._sourceNormals) { const t = this.getVerticesData(Y.NormalKind); if (!t) return e._sourceNormals; e._sourceNormals = new Float32Array(t), this.isVertexBufferUpdatable(Y.NormalKind) || this.setVerticesData(Y.NormalKind, t, !0); } return e._sourceNormals; } /** * Updates the vertex buffer by applying transformation from the bones * @param skeleton defines the skeleton to apply to current mesh * @returns the current mesh */ applySkeleton(e) { if (!this.geometry) return this; if (this.geometry._softwareSkinningFrameId == this.getScene().getFrameId()) return this; if (this.geometry._softwareSkinningFrameId = this.getScene().getFrameId(), !this.isVerticesDataPresent(Y.PositionKind)) return this; if (!this.isVerticesDataPresent(Y.MatricesIndicesKind)) return this; if (!this.isVerticesDataPresent(Y.MatricesWeightsKind)) return this; const t = this.isVerticesDataPresent(Y.NormalKind), i = this._internalMeshDataInfo; if (!i._sourcePositions) { const v = this.subMeshes.slice(); this.setPositionsForCPUSkinning(), this.subMeshes = v; } t && !i._sourceNormals && this.setNormalsForCPUSkinning(); let r = this.getVerticesData(Y.PositionKind); if (!r) return this; r instanceof Float32Array || (r = new Float32Array(r)); let s = this.getVerticesData(Y.NormalKind); if (t) { if (!s) return this; s instanceof Float32Array || (s = new Float32Array(s)); } const n = this.getVerticesData(Y.MatricesIndicesKind), a = this.getVerticesData(Y.MatricesWeightsKind); if (!a || !n) return this; const l = this.numBoneInfluencers > 4, o = l ? this.getVerticesData(Y.MatricesIndicesExtraKind) : null, u = l ? this.getVerticesData(Y.MatricesWeightsExtraKind) : null, h = e.getTransformMatrices(this), d = D.Zero(), f = new Ae(), p = new Ae(); let m = 0, _; for (let v = 0; v < r.length; v += 3, m += 4) { let C; for (_ = 0; _ < 4; _++) C = a[m + _], C > 0 && (Ae.FromFloat32ArrayToRefScaled(h, Math.floor(n[m + _] * 16), C, p), f.addToSelf(p)); if (l) for (_ = 0; _ < 4; _++) C = u[m + _], C > 0 && (Ae.FromFloat32ArrayToRefScaled(h, Math.floor(o[m + _] * 16), C, p), f.addToSelf(p)); D.TransformCoordinatesFromFloatsToRef(i._sourcePositions[v], i._sourcePositions[v + 1], i._sourcePositions[v + 2], f, d), d.toArray(r, v), t && (D.TransformNormalFromFloatsToRef(i._sourceNormals[v], i._sourceNormals[v + 1], i._sourceNormals[v + 2], f, d), d.toArray(s, v)), f.reset(); } return this.updateVerticesData(Y.PositionKind, r), t && this.updateVerticesData(Y.NormalKind, s), this; } // Tools /** * Returns an object containing a min and max Vector3 which are the minimum and maximum vectors of each mesh bounding box from the passed array, in the world coordinates * @param meshes defines the list of meshes to scan * @returns an object `{min:` Vector3`, max:` Vector3`}` */ static MinMax(e) { let t = null, i = null; return e.forEach(function(r) { const n = r.getBoundingInfo().boundingBox; !t || !i ? (t = n.minimumWorld, i = n.maximumWorld) : (t.minimizeInPlace(n.minimumWorld), i.maximizeInPlace(n.maximumWorld)); }), !t || !i ? { min: D.Zero(), max: D.Zero() } : { min: t, max: i }; } /** * Returns the center of the `{min:` Vector3`, max:` Vector3`}` or the center of MinMax vector3 computed from a mesh array * @param meshesOrMinMaxVector could be an array of meshes or a `{min:` Vector3`, max:` Vector3`}` object * @returns a vector3 */ static Center(e) { const t = e instanceof Array ? ke.MinMax(e) : e; return D.Center(t.min, t.max); } /** * Merge the array of meshes into a single mesh for performance reasons. * @param meshes array of meshes with the vertices to merge. Entries cannot be empty meshes. * @param disposeSource when true (default), dispose of the vertices from the source meshes. * @param allow32BitsIndices when the sum of the vertices > 64k, this must be set to true. * @param meshSubclass (optional) can be set to a Mesh where the merged vertices will be inserted. * @param subdivideWithSubMeshes when true (false default), subdivide mesh into subMeshes. * @param multiMultiMaterials when true (false default), subdivide mesh into subMeshes with multiple materials, ignores subdivideWithSubMeshes. * @returns a new mesh */ static MergeMeshes(e, t = !0, i, r, s, n) { return WB(ke._MergeMeshesCoroutine(e, t, i, r, s, n, !1)); } /** * Merge the array of meshes into a single mesh for performance reasons. * @param meshes array of meshes with the vertices to merge. Entries cannot be empty meshes. * @param disposeSource when true (default), dispose of the vertices from the source meshes. * @param allow32BitsIndices when the sum of the vertices > 64k, this must be set to true. * @param meshSubclass (optional) can be set to a Mesh where the merged vertices will be inserted. * @param subdivideWithSubMeshes when true (false default), subdivide mesh into subMeshes. * @param multiMultiMaterials when true (false default), subdivide mesh into subMeshes with multiple materials, ignores subdivideWithSubMeshes. * @returns a new mesh */ static MergeMeshesAsync(e, t = !0, i, r, s, n) { return jB(ke._MergeMeshesCoroutine(e, t, i, r, s, n, !0), bte()); } static *_MergeMeshesCoroutine(e, t = !0, i, r, s, n, a) { if (e = e.filter(Boolean), e.length === 0) return null; let l; if (!i) { let R = 0; for (l = 0; l < e.length; l++) if (R += e[l].getTotalVertices(), R >= 65536) return Ce.Warn("Cannot merge meshes because resulting mesh will have more than 65536 vertices. Please use allow32BitsIndices = true to use 32 bits indices"), null; } n && (s = !1); const o = new Array(), u = new Array(), h = new Array(), d = e[0].overrideMaterialSideOrientation; for (l = 0; l < e.length; l++) { const R = e[l]; if (R.isAnInstance) return Ce.Warn("Cannot merge instance meshes."), null; if (d !== R.overrideMaterialSideOrientation) return Ce.Warn("Cannot merge meshes with different overrideMaterialSideOrientation values."), null; if (s && h.push(R.getTotalIndices()), n) if (R.material) { const w = R.material; if (w instanceof xm) { for (let V = 0; V < w.subMaterials.length; V++) o.indexOf(w.subMaterials[V]) < 0 && o.push(w.subMaterials[V]); for (let V = 0; V < R.subMeshes.length; V++) u.push(o.indexOf(w.subMaterials[R.subMeshes[V].materialIndex])), h.push(R.subMeshes[V].indexCount); } else { o.indexOf(w) < 0 && o.push(w); for (let V = 0; V < R.subMeshes.length; V++) u.push(o.indexOf(w)), h.push(R.subMeshes[V].indexCount); } } else for (let w = 0; w < R.subMeshes.length; w++) u.push(0), h.push(R.subMeshes[w].indexCount); } const f = e[0], p = (R) => { const w = R.computeWorldMatrix(!0); return { vertexData: Ot.ExtractFromMesh(R, !1, !1), transform: w }; }, { vertexData: m, transform: _ } = p(f); a && (yield); const v = new Array(e.length - 1); for (let R = 1; R < e.length; R++) v[R - 1] = p(e[R]), a && (yield); const C = m._mergeCoroutine(_, v, i, a, !t); let x = C.next(); for (; !x.done; ) a && (yield), x = C.next(); const b = x.value; r || (r = new ke(f.name + "_merged", f.getScene())); const S = b._applyToCoroutine(r, void 0, a); let M = S.next(); for (; !M.done; ) a && (yield), M = S.next(); if (r.checkCollisions = f.checkCollisions, r.overrideMaterialSideOrientation = f.overrideMaterialSideOrientation, t) for (l = 0; l < e.length; l++) e[l].dispose(); if (s || n) { r.releaseSubMeshes(), l = 0; let R = 0; for (; l < h.length; ) ed.CreateFromIndices(0, R, h[l], r, void 0, !1), R += h[l], l++; for (const w of r.subMeshes) w.refreshBoundingInfo(); r.computeWorldMatrix(!0); } if (n) { const R = new xm(f.name + "_merged", f.getScene()); R.subMaterials = o; for (let w = 0; w < r.subMeshes.length; w++) r.subMeshes[w].materialIndex = u[w]; r.material = R; } else r.material = f.material; return r; } /** * @internal */ addInstance(e) { e._indexInSourceMeshInstanceArray = this.instances.length, this.instances.push(e); } /** * @internal */ removeInstance(e) { const t = e._indexInSourceMeshInstanceArray; if (t != -1) { if (t !== this.instances.length - 1) { const i = this.instances[this.instances.length - 1]; this.instances[t] = i, i._indexInSourceMeshInstanceArray = t; } e._indexInSourceMeshInstanceArray = -1, this.instances.pop(); } } /** @internal */ _shouldConvertRHS() { return this.overrideMaterialSideOrientation === At.CounterClockWiseSideOrientation; } /** @internal */ _getRenderingFillMode(e) { var t; const i = this.getScene(); return i.forcePointsCloud ? At.PointFillMode : i.forceWireframe ? At.WireFrameFillMode : (t = this.overrideRenderingFillMode) !== null && t !== void 0 ? t : e; } // deprecated methods /** * Sets the mesh material by the material or multiMaterial `id` property * @param id is a string identifying the material or the multiMaterial * @returns the current mesh * @deprecated Please use MeshBuilder instead Please use setMaterialById instead */ setMaterialByID(e) { return this.setMaterialById(e); } /** * Creates a ribbon mesh. * @see https://doc.babylonjs.com/features/featuresDeepDive/mesh/creation/param * @param name defines the name of the mesh to create * @param pathArray is a required array of paths, what are each an array of successive Vector3. The pathArray parameter depicts the ribbon geometry. * @param closeArray creates a seam between the first and the last paths of the path array (default is false) * @param closePath creates a seam between the first and the last points of each path of the path array * @param offset is taken in account only if the `pathArray` is containing a single path * @param scene defines the hosting scene * @param updatable defines if the mesh must be flagged as updatable * @param sideOrientation defines the mesh side orientation (https://doc.babylonjs.com/features/featuresDeepDive/mesh/creation/set#side-orientation) * @param instance defines an instance of an existing Ribbon object to be updated with the passed `pathArray` parameter (https://doc.babylonjs.com/how_to/How_to_dynamically_morph_a_mesh#ribbon) * @returns a new Mesh * @deprecated Please use MeshBuilder instead */ static CreateRibbon(e, t, i, r, s, n, a, l, o) { throw new Error("Import MeshBuilder to populate this function"); } /** * Creates a plane polygonal mesh. By default, this is a disc. * @param name defines the name of the mesh to create * @param radius sets the radius size (float) of the polygon (default 0.5) * @param tessellation sets the number of polygon sides (positive integer, default 64). So a tessellation valued to 3 will build a triangle, to 4 a square, etc * @param scene defines the hosting scene * @param updatable defines if the mesh must be flagged as updatable * @param sideOrientation defines the mesh side orientation (https://doc.babylonjs.com/features/featuresDeepDive/mesh/creation/set#side-orientation) * @returns a new Mesh * @deprecated Please use MeshBuilder instead */ static CreateDisc(e, t, i, r, s, n) { throw new Error("Import MeshBuilder to populate this function"); } /** * Creates a box mesh. * @param name defines the name of the mesh to create * @param size sets the size (float) of each box side (default 1) * @param scene defines the hosting scene * @param updatable defines if the mesh must be flagged as updatable * @param sideOrientation defines the mesh side orientation (https://doc.babylonjs.com/features/featuresDeepDive/mesh/creation/set#side-orientation) * @returns a new Mesh * @deprecated Please use MeshBuilder instead */ static CreateBox(e, t, i, r, s) { throw new Error("Import MeshBuilder to populate this function"); } /** * Creates a sphere mesh. * @param name defines the name of the mesh to create * @param segments sets the sphere number of horizontal stripes (positive integer, default 32) * @param diameter sets the diameter size (float) of the sphere (default 1) * @param scene defines the hosting scene * @param updatable defines if the mesh must be flagged as updatable * @param sideOrientation defines the mesh side orientation (https://doc.babylonjs.com/features/featuresDeepDive/mesh/creation/set#side-orientation) * @returns a new Mesh * @deprecated Please use MeshBuilder instead */ static CreateSphere(e, t, i, r, s, n) { throw new Error("Import MeshBuilder to populate this function"); } /** * Creates a hemisphere mesh. * @param name defines the name of the mesh to create * @param segments sets the sphere number of horizontal stripes (positive integer, default 32) * @param diameter sets the diameter size (float) of the sphere (default 1) * @param scene defines the hosting scene * @returns a new Mesh * @deprecated Please use MeshBuilder instead */ static CreateHemisphere(e, t, i, r) { throw new Error("Import MeshBuilder to populate this function"); } /** * Creates a cylinder or a cone mesh. * @param name defines the name of the mesh to create * @param height sets the height size (float) of the cylinder/cone (float, default 2) * @param diameterTop set the top cap diameter (floats, default 1) * @param diameterBottom set the bottom cap diameter (floats, default 1). This value can't be zero * @param tessellation sets the number of cylinder sides (positive integer, default 24). Set it to 3 to get a prism for instance * @param subdivisions sets the number of rings along the cylinder height (positive integer, default 1) * @param scene defines the hosting scene * @param updatable defines if the mesh must be flagged as updatable * @param sideOrientation defines the mesh side orientation (https://doc.babylonjs.com/features/featuresDeepDive/mesh/creation/set#side-orientation) * @returns a new Mesh * @deprecated Please use MeshBuilder instead */ static CreateCylinder(e, t, i, r, s, n, a, l, o) { throw new Error("Import MeshBuilder to populate this function"); } // Torus (Code from SharpDX.org) /** * Creates a torus mesh. * @param name defines the name of the mesh to create * @param diameter sets the diameter size (float) of the torus (default 1) * @param thickness sets the diameter size of the tube of the torus (float, default 0.5) * @param tessellation sets the number of torus sides (positive integer, default 16) * @param scene defines the hosting scene * @param updatable defines if the mesh must be flagged as updatable * @param sideOrientation defines the mesh side orientation (https://doc.babylonjs.com/features/featuresDeepDive/mesh/creation/set#side-orientation) * @returns a new Mesh * @deprecated Please use MeshBuilder instead */ static CreateTorus(e, t, i, r, s, n, a) { throw new Error("Import MeshBuilder to populate this function"); } /** * Creates a torus knot mesh. * @param name defines the name of the mesh to create * @param radius sets the global radius size (float) of the torus knot (default 2) * @param tube sets the diameter size of the tube of the torus (float, default 0.5) * @param radialSegments sets the number of sides on each tube segments (positive integer, default 32) * @param tubularSegments sets the number of tubes to decompose the knot into (positive integer, default 32) * @param p the number of windings on X axis (positive integers, default 2) * @param q the number of windings on Y axis (positive integers, default 3) * @param scene defines the hosting scene * @param updatable defines if the mesh must be flagged as updatable * @param sideOrientation defines the mesh side orientation (https://doc.babylonjs.com/features/featuresDeepDive/mesh/creation/set#side-orientation) * @returns a new Mesh * @deprecated Please use MeshBuilder instead */ static CreateTorusKnot(e, t, i, r, s, n, a, l, o, u) { throw new Error("Import MeshBuilder to populate this function"); } /** * Creates a line mesh.. * @param name defines the name of the mesh to create * @param points is an array successive Vector3 * @param scene defines the hosting scene * @param updatable defines if the mesh must be flagged as updatable * @param instance is an instance of an existing LineMesh object to be updated with the passed `points` parameter (https://doc.babylonjs.com/how_to/How_to_dynamically_morph_a_mesh#lines-and-dashedlines). * @returns a new Mesh * @deprecated Please use MeshBuilder instead */ static CreateLines(e, t, i, r, s) { throw new Error("Import MeshBuilder to populate this function"); } /** * Creates a dashed line mesh. * @param name defines the name of the mesh to create * @param points is an array successive Vector3 * @param dashSize is the size of the dashes relatively the dash number (positive float, default 3) * @param gapSize is the size of the gap between two successive dashes relatively the dash number (positive float, default 1) * @param dashNb is the intended total number of dashes (positive integer, default 200) * @param scene defines the hosting scene * @param updatable defines if the mesh must be flagged as updatable * @param instance is an instance of an existing LineMesh object to be updated with the passed `points` parameter (https://doc.babylonjs.com/how_to/How_to_dynamically_morph_a_mesh#lines-and-dashedlines) * @returns a new Mesh * @deprecated Please use MeshBuilder instead */ static CreateDashedLines(e, t, i, r, s, n, a, l) { throw new Error("Import MeshBuilder to populate this function"); } /** * Creates a polygon mesh.Please consider using the same method from the MeshBuilder class instead * The polygon's shape will depend on the input parameters and is constructed parallel to a ground mesh. * The parameter `shape` is a required array of successive Vector3 representing the corners of the polygon in th XoZ plane, that is y = 0 for all vectors. * You can set the mesh side orientation with the values : Mesh.FRONTSIDE (default), Mesh.BACKSIDE or Mesh.DOUBLESIDE * The mesh can be set to updatable with the boolean parameter `updatable` (default false) if its internal geometry is supposed to change once created. * Remember you can only change the shape positions, not their number when updating a polygon. * @see https://doc.babylonjs.com/features/featuresDeepDive/mesh/creation/param#non-regular-polygon * @param name defines the name of the mesh to create * @param shape is a required array of successive Vector3 representing the corners of the polygon in th XoZ plane, that is y = 0 for all vectors * @param scene defines the hosting scene * @param holes is a required array of arrays of successive Vector3 used to defines holes in the polygon * @param updatable defines if the mesh must be flagged as updatable * @param sideOrientation defines the mesh side orientation (https://doc.babylonjs.com/features/featuresDeepDive/mesh/creation/set#side-orientation) * @param earcutInjection can be used to inject your own earcut reference * @returns a new Mesh * @deprecated Please use MeshBuilder instead */ static CreatePolygon(e, t, i, r, s, n, a) { throw new Error("Import MeshBuilder to populate this function"); } /** * Creates an extruded polygon mesh, with depth in the Y direction.. * @see https://doc.babylonjs.com/features/featuresDeepDive/mesh/creation/param#extruded-non-regular-polygon * @param name defines the name of the mesh to create * @param shape is a required array of successive Vector3 representing the corners of the polygon in th XoZ plane, that is y = 0 for all vectors * @param depth defines the height of extrusion * @param scene defines the hosting scene * @param holes is a required array of arrays of successive Vector3 used to defines holes in the polygon * @param updatable defines if the mesh must be flagged as updatable * @param sideOrientation defines the mesh side orientation (https://doc.babylonjs.com/features/featuresDeepDive/mesh/creation/set#side-orientation) * @param earcutInjection can be used to inject your own earcut reference * @returns a new Mesh * @deprecated Please use MeshBuilder instead */ static ExtrudePolygon(e, t, i, r, s, n, a, l) { throw new Error("Import MeshBuilder to populate this function"); } /** * Creates an extruded shape mesh. * The extrusion is a parametric shape. It has no predefined shape. Its final shape will depend on the input parameters. * @see https://doc.babylonjs.com/features/featuresDeepDive/mesh/creation/param * @see https://doc.babylonjs.com/features/featuresDeepDive/mesh/creation/param#extruded-shapes * @param name defines the name of the mesh to create * @param shape is a required array of successive Vector3. This array depicts the shape to be extruded in its local space : the shape must be designed in the xOy plane and will be extruded along the Z axis * @param path is a required array of successive Vector3. This is the axis curve the shape is extruded along * @param scale is the value to scale the shape * @param rotation is the angle value to rotate the shape each step (each path point), from the former step (so rotation added each step) along the curve * @param cap sets the way the extruded shape is capped. Possible values : Mesh.NO_CAP (default), Mesh.CAP_START, Mesh.CAP_END, Mesh.CAP_ALL * @param scene defines the hosting scene * @param updatable defines if the mesh must be flagged as updatable * @param sideOrientation defines the mesh side orientation (https://doc.babylonjs.com/features/featuresDeepDive/mesh/creation/set#side-orientation) * @param instance is an instance of an existing ExtrudedShape object to be updated with the passed `shape`, `path`, `scale` or `rotation` parameters (https://doc.babylonjs.com/how_to/How_to_dynamically_morph_a_mesh#extruded-shape) * @returns a new Mesh * @deprecated Please use MeshBuilder instead */ static ExtrudeShape(e, t, i, r, s, n, a, l, o, u) { throw new Error("Import MeshBuilder to populate this function"); } /** * Creates an custom extruded shape mesh. * The custom extrusion is a parametric shape. * It has no predefined shape. Its final shape will depend on the input parameters. * * @see https://doc.babylonjs.com/features/featuresDeepDive/mesh/creation/param#extruded-shapes * @param name defines the name of the mesh to create * @param shape is a required array of successive Vector3. This array depicts the shape to be extruded in its local space : the shape must be designed in the xOy plane and will be extruded along the Z axis * @param path is a required array of successive Vector3. This is the axis curve the shape is extruded along * @param scaleFunction is a custom Javascript function called on each path point * @param rotationFunction is a custom Javascript function called on each path point * @param ribbonCloseArray forces the extrusion underlying ribbon to close all the paths in its `pathArray` * @param ribbonClosePath forces the extrusion underlying ribbon to close its `pathArray` * @param cap sets the way the extruded shape is capped. Possible values : Mesh.NO_CAP (default), Mesh.CAP_START, Mesh.CAP_END, Mesh.CAP_ALL * @param scene defines the hosting scene * @param updatable defines if the mesh must be flagged as updatable * @param sideOrientation defines the mesh side orientation (https://doc.babylonjs.com/features/featuresDeepDive/mesh/creation/set#side-orientation) * @param instance is an instance of an existing ExtrudedShape object to be updated with the passed `shape`, `path`, `scale` or `rotation` parameters (https://doc.babylonjs.com/features/featuresDeepDive/mesh/dynamicMeshMorph#extruded-shape) * @returns a new Mesh * @deprecated Please use MeshBuilder instead */ static ExtrudeShapeCustom(e, t, i, r, s, n, a, l, o, u, h, d) { throw new Error("Import MeshBuilder to populate this function"); } /** * Creates lathe mesh. * The lathe is a shape with a symmetry axis : a 2D model shape is rotated around this axis to design the lathe. * @param name defines the name of the mesh to create * @param shape is a required array of successive Vector3. This array depicts the shape to be rotated in its local space : the shape must be designed in the xOy plane and will be rotated around the Y axis. It's usually a 2D shape, so the Vector3 z coordinates are often set to zero * @param radius is the radius value of the lathe * @param tessellation is the side number of the lathe. * @param scene defines the hosting scene * @param updatable defines if the mesh must be flagged as updatable * @param sideOrientation defines the mesh side orientation (https://doc.babylonjs.com/features/featuresDeepDive/mesh/creation/set#side-orientation) * @returns a new Mesh * @deprecated Please use MeshBuilder instead */ static CreateLathe(e, t, i, r, s, n, a) { throw new Error("Import MeshBuilder to populate this function"); } /** * Creates a plane mesh. * @param name defines the name of the mesh to create * @param size sets the size (float) of both sides of the plane at once (default 1) * @param scene defines the hosting scene * @param updatable defines if the mesh must be flagged as updatable * @param sideOrientation defines the mesh side orientation (https://doc.babylonjs.com/features/featuresDeepDive/mesh/creation/set#side-orientation) * @returns a new Mesh * @deprecated Please use MeshBuilder instead */ static CreatePlane(e, t, i, r, s) { throw new Error("Import MeshBuilder to populate this function"); } /** * Creates a ground mesh. * @param name defines the name of the mesh to create * @param width set the width of the ground * @param height set the height of the ground * @param subdivisions sets the number of subdivisions per side * @param scene defines the hosting scene * @param updatable defines if the mesh must be flagged as updatable * @returns a new Mesh * @deprecated Please use MeshBuilder instead */ static CreateGround(e, t, i, r, s, n) { throw new Error("Import MeshBuilder to populate this function"); } /** * Creates a tiled ground mesh. * @param name defines the name of the mesh to create * @param xmin set the ground minimum X coordinate * @param zmin set the ground minimum Y coordinate * @param xmax set the ground maximum X coordinate * @param zmax set the ground maximum Z coordinate * @param subdivisions is an object `{w: positive integer, h: positive integer}` (default `{w: 6, h: 6}`). `w` and `h` are the numbers of subdivisions on the ground width and height. Each subdivision is called a tile * @param precision is an object `{w: positive integer, h: positive integer}` (default `{w: 2, h: 2}`). `w` and `h` are the numbers of subdivisions on the ground width and height of each tile * @param scene defines the hosting scene * @param updatable defines if the mesh must be flagged as updatable * @returns a new Mesh * @deprecated Please use MeshBuilder instead */ static CreateTiledGround(e, t, i, r, s, n, a, l, o) { throw new Error("Import MeshBuilder to populate this function"); } /** * Creates a ground mesh from a height map. * @see https://doc.babylonjs.com/features/featuresDeepDive/mesh/creation/set/height_map * @param name defines the name of the mesh to create * @param url sets the URL of the height map image resource * @param width set the ground width size * @param height set the ground height size * @param subdivisions sets the number of subdivision per side * @param minHeight is the minimum altitude on the ground * @param maxHeight is the maximum altitude on the ground * @param scene defines the hosting scene * @param updatable defines if the mesh must be flagged as updatable * @param onReady is a callback function that will be called once the mesh is built (the height map download can last some time) * @param alphaFilter will filter any data where the alpha channel is below this value, defaults 0 (all data visible) * @returns a new Mesh * @deprecated Please use MeshBuilder instead */ static CreateGroundFromHeightMap(e, t, i, r, s, n, a, l, o, u, h) { throw new Error("Import MeshBuilder to populate this function"); } /** * Creates a tube mesh. * The tube is a parametric shape. * It has no predefined shape. Its final shape will depend on the input parameters. * * @see https://doc.babylonjs.com/features/featuresDeepDive/mesh/creation/param * @param name defines the name of the mesh to create * @param path is a required array of successive Vector3. It is the curve used as the axis of the tube * @param radius sets the tube radius size * @param tessellation is the number of sides on the tubular surface * @param radiusFunction is a custom function. If it is not null, it overrides the parameter `radius`. This function is called on each point of the tube path and is passed the index `i` of the i-th point and the distance of this point from the first point of the path * @param cap sets the way the extruded shape is capped. Possible values : Mesh.NO_CAP (default), Mesh.CAP_START, Mesh.CAP_END, Mesh.CAP_ALL * @param scene defines the hosting scene * @param updatable defines if the mesh must be flagged as updatable * @param sideOrientation defines the mesh side orientation (https://doc.babylonjs.com/features/featuresDeepDive/mesh/creation/set#side-orientation) * @param instance is an instance of an existing Tube object to be updated with the passed `pathArray` parameter (https://doc.babylonjs.com/how_to/How_to_dynamically_morph_a_mesh#tube) * @returns a new Mesh * @deprecated Please use MeshBuilder instead */ static CreateTube(e, t, i, r, s, n, a, l, o, u) { throw new Error("Import MeshBuilder to populate this function"); } /** * Creates a polyhedron mesh. *. * * The parameter `type` (positive integer, max 14, default 0) sets the polyhedron type to build among the 15 embedded types. Please refer to the type sheet in the tutorial to choose the wanted type * * The parameter `size` (positive float, default 1) sets the polygon size * * You can overwrite the `size` on each dimension bu using the parameters `sizeX`, `sizeY` or `sizeZ` (positive floats, default to `size` value) * * You can build other polyhedron types than the 15 embbeded ones by setting the parameter `custom` (`polyhedronObject`, default null). If you set the parameter `custom`, this overwrittes the parameter `type` * * A `polyhedronObject` is a formatted javascript object. You'll find a full file with pre-set polyhedra here : https://github.com/BabylonJS/Extensions/tree/master/Polyhedron * * You can set the color and the UV of each side of the polyhedron with the parameters `faceColors` (Color4, default `(1, 1, 1, 1)`) and faceUV (Vector4, default `(0, 0, 1, 1)`) * * To understand how to set `faceUV` or `faceColors`, please read this by considering the right number of faces of your polyhedron, instead of only 6 for the box : https://doc.babylonjs.com/features/featuresDeepDive/materials/using/texturePerBoxFace * * The parameter `flat` (boolean, default true). If set to false, it gives the polyhedron a single global face, so less vertices and shared normals. In this case, `faceColors` and `faceUV` are ignored * * You can also set the mesh side orientation with the values : Mesh.FRONTSIDE (default), Mesh.BACKSIDE or Mesh.DOUBLESIDE * * If you create a double-sided mesh, you can choose what parts of the texture image to crop and stick respectively on the front and the back sides with the parameters `frontUVs` and `backUVs` (Vector4). Detail here : https://doc.babylonjs.com/features/featuresDeepDive/mesh/creation/set#side-orientation * * The mesh can be set to updatable with the boolean parameter `updatable` (default false) if its internal geometry is supposed to change once created * @param name defines the name of the mesh to create * @param options defines the options used to create the mesh * @param scene defines the hosting scene * @returns a new Mesh * @deprecated Please use MeshBuilder instead */ static CreatePolyhedron(e, t, i) { throw new Error("Import MeshBuilder to populate this function"); } /** * Creates a sphere based upon an icosahedron with 20 triangular faces which can be subdivided * * The parameter `radius` sets the radius size (float) of the icosphere (default 1) * * You can set some different icosphere dimensions, for instance to build an ellipsoid, by using the parameters `radiusX`, `radiusY` and `radiusZ` (all by default have the same value than `radius`) * * The parameter `subdivisions` sets the number of subdivisions (positive integer, default 4). The more subdivisions, the more faces on the icosphere whatever its size * * The parameter `flat` (boolean, default true) gives each side its own normals. Set it to false to get a smooth continuous light reflection on the surface * * You can also set the mesh side orientation with the values : Mesh.FRONTSIDE (default), Mesh.BACKSIDE or Mesh.DOUBLESIDE * * If you create a double-sided mesh, you can choose what parts of the texture image to crop and stick respectively on the front and the back sides with the parameters `frontUVs` and `backUVs` (Vector4). Detail here : https://doc.babylonjs.com/features/featuresDeepDive/mesh/creation/set#side-orientation * * The mesh can be set to updatable with the boolean parameter `updatable` (default false) if its internal geometry is supposed to change once created * @see https://doc.babylonjs.com/features/featuresDeepDive/mesh/creation/polyhedra#icosphere * @param name defines the name of the mesh * @param options defines the options used to create the mesh * @param scene defines the hosting scene * @returns a new Mesh * @deprecated Please use MeshBuilder instead */ static CreateIcoSphere(e, t, i) { throw new Error("Import MeshBuilder to populate this function"); } /** * Creates a decal mesh. *. * A decal is a mesh usually applied as a model onto the surface of another mesh * @param name defines the name of the mesh * @param sourceMesh defines the mesh receiving the decal * @param position sets the position of the decal in world coordinates * @param normal sets the normal of the mesh where the decal is applied onto in world coordinates * @param size sets the decal scaling * @param angle sets the angle to rotate the decal * @returns a new Mesh * @deprecated Please use MeshBuilder instead */ static CreateDecal(e, t, i, r, s, n) { throw new Error("Import MeshBuilder to populate this function"); } /** Creates a Capsule Mesh * @param name defines the name of the mesh. * @param options the constructors options used to shape the mesh. * @param scene defines the scene the mesh is scoped to. * @returns the capsule mesh * @see https://doc.babylonjs.com/how_to/capsule_shape * @deprecated Please use MeshBuilder instead */ static CreateCapsule(e, t, i) { throw new Error("Import MeshBuilder to populate this function"); } /** * Extends a mesh to a Goldberg mesh * Warning the mesh to convert MUST be an import of a perviously exported Goldberg mesh * @param mesh the mesh to convert * @returns the extended mesh * @deprecated Please use ExtendMeshToGoldberg instead */ static ExtendToGoldberg(e) { throw new Error("Import MeshBuilder to populate this function"); } } ke.FRONTSIDE = Ot.FRONTSIDE; ke.BACKSIDE = Ot.BACKSIDE; ke.DOUBLESIDE = Ot.DOUBLESIDE; ke.DEFAULTSIDE = Ot.DEFAULTSIDE; ke.NO_CAP = 0; ke.CAP_START = 1; ke.CAP_END = 2; ke.CAP_ALL = 3; ke.NO_FLIP = 0; ke.FLIP_TILE = 1; ke.ROTATE_TILE = 2; ke.FLIP_ROW = 3; ke.ROTATE_ROW = 4; ke.FLIP_N_ROTATE_TILE = 5; ke.FLIP_N_ROTATE_ROW = 6; ke.CENTER = 0; ke.LEFT = 1; ke.RIGHT = 2; ke.TOP = 3; ke.BOTTOM = 4; ke.INSTANCEDMESH_SORT_TRANSPARENT = !1; ke._GroundMeshParser = (c, e) => { throw yr("GroundMesh"); }; ke._GoldbergMeshParser = (c, e) => { throw yr("GoldbergMesh"); }; ke._LinesMeshParser = (c, e) => { throw yr("LinesMesh"); }; ke._GreasedLineMeshParser = (c, e) => { throw yr("GreasedLineMesh"); }; ke._GreasedLineRibbonMeshParser = (c, e) => { throw yr("GreasedLineRibbonMesh"); }; ke._TrailMeshParser = (c, e) => { throw yr("TrailMesh"); }; Be("BABYLON.Mesh", ke); ke._instancedMeshFactory = (c, e) => { const t = new Cg(c, e); if (e.instancedBuffers) { t.instancedBuffers = {}; for (const i in e.instancedBuffers) t.instancedBuffers[i] = e.instancedBuffers[i]; } return t; }; class Cg extends xr { /** * Creates a new InstancedMesh object from the mesh source. * @param name defines the name of the instance * @param source the mesh to create the instance from */ constructor(e, t) { super(e, t.getScene()), this._indexInSourceMeshInstanceArray = -1, this._distanceToCamera = 0, t.addInstance(this), this._sourceMesh = t, this._unIndexed = t._unIndexed, this.position.copyFrom(t.position), this.rotation.copyFrom(t.rotation), this.scaling.copyFrom(t.scaling), t.rotationQuaternion && (this.rotationQuaternion = t.rotationQuaternion.clone()), this.animations = t.animations.slice(); for (const i of t.getAnimationRanges()) i != null && this.createAnimationRange(i.name, i.from, i.to); this.infiniteDistance = t.infiniteDistance, this.setPivotMatrix(t.getPivotMatrix()), this.refreshBoundingInfo(!0, !0), this._syncSubMeshes(); } /** * Returns the string "InstancedMesh". */ getClassName() { return "InstancedMesh"; } /** Gets the list of lights affecting that mesh */ get lightSources() { return this._sourceMesh._lightSources; } _resyncLightSources() { } _resyncLightSource() { } _removeLightSource() { } // Methods /** * If the source mesh receives shadows */ get receiveShadows() { return this._sourceMesh.receiveShadows; } set receiveShadows(e) { var t; ((t = this._sourceMesh) === null || t === void 0 ? void 0 : t.receiveShadows) !== e && Ve.Warn("Setting receiveShadows on an instanced mesh has no effect"); } /** * The material of the source mesh */ get material() { return this._sourceMesh.material; } set material(e) { var t; ((t = this._sourceMesh) === null || t === void 0 ? void 0 : t.material) !== e && Ve.Warn("Setting material on an instanced mesh has no effect"); } /** * Visibility of the source mesh */ get visibility() { return this._sourceMesh.visibility; } set visibility(e) { var t; ((t = this._sourceMesh) === null || t === void 0 ? void 0 : t.visibility) !== e && Ve.Warn("Setting visibility on an instanced mesh has no effect"); } /** * Skeleton of the source mesh */ get skeleton() { return this._sourceMesh.skeleton; } set skeleton(e) { var t; ((t = this._sourceMesh) === null || t === void 0 ? void 0 : t.skeleton) !== e && Ve.Warn("Setting skeleton on an instanced mesh has no effect"); } /** * Rendering ground id of the source mesh */ get renderingGroupId() { return this._sourceMesh.renderingGroupId; } set renderingGroupId(e) { !this._sourceMesh || e === this._sourceMesh.renderingGroupId || Ce.Warn("Note - setting renderingGroupId of an instanced mesh has no effect on the scene"); } /** * Returns the total number of vertices (integer). */ getTotalVertices() { return this._sourceMesh ? this._sourceMesh.getTotalVertices() : 0; } /** * Returns a positive integer : the total number of indices in this mesh geometry. * @returns the number of indices or zero if the mesh has no geometry. */ getTotalIndices() { return this._sourceMesh.getTotalIndices(); } /** * The source mesh of the instance */ get sourceMesh() { return this._sourceMesh; } /** * Creates a new InstancedMesh object from the mesh model. * @see https://doc.babylonjs.com/features/featuresDeepDive/mesh/copies/instances * @param name defines the name of the new instance * @returns a new InstancedMesh */ createInstance(e) { return this._sourceMesh.createInstance(e); } /** * Is this node ready to be used/rendered * @param completeCheck defines if a complete check (including materials and lights) has to be done (false by default) * @returns {boolean} is it ready */ isReady(e = !1) { return this._sourceMesh.isReady(e, !0); } /** * Returns an array of integers or a typed array (Int32Array, Uint32Array, Uint16Array) populated with the mesh indices. * @param kind kind of verticies to retrieve (eg. positions, normals, uvs, etc.) * @param copyWhenShared If true (default false) and and if the mesh geometry is shared among some other meshes, the returned array is a copy of the internal one. * @param forceCopy defines a boolean forcing the copy of the buffer no matter what the value of copyWhenShared is * @returns a float array or a Float32Array of the requested kind of data : positions, normals, uvs, etc. */ getVerticesData(e, t, i) { return this._sourceMesh.getVerticesData(e, t, i); } /** * Sets the vertex data of the mesh geometry for the requested `kind`. * If the mesh has no geometry, a new Geometry object is set to the mesh and then passed this vertex data. * The `data` are either a numeric array either a Float32Array. * The parameter `updatable` is passed as is to the underlying Geometry object constructor (if initially none) or updater. * The parameter `stride` is an optional positive integer, it is usually automatically deducted from the `kind` (3 for positions or normals, 2 for UV, etc). * Note that a new underlying VertexBuffer object is created each call. * If the `kind` is the `PositionKind`, the mesh BoundingInfo is renewed, so the bounding box and sphere, and the mesh World Matrix is recomputed. * * Possible `kind` values : * - VertexBuffer.PositionKind * - VertexBuffer.UVKind * - VertexBuffer.UV2Kind * - VertexBuffer.UV3Kind * - VertexBuffer.UV4Kind * - VertexBuffer.UV5Kind * - VertexBuffer.UV6Kind * - VertexBuffer.ColorKind * - VertexBuffer.MatricesIndicesKind * - VertexBuffer.MatricesIndicesExtraKind * - VertexBuffer.MatricesWeightsKind * - VertexBuffer.MatricesWeightsExtraKind * * Returns the Mesh. * @param kind * @param data * @param updatable * @param stride */ setVerticesData(e, t, i, r) { return this.sourceMesh && this.sourceMesh.setVerticesData(e, t, i, r), this.sourceMesh; } /** * Updates the existing vertex data of the mesh geometry for the requested `kind`. * If the mesh has no geometry, it is simply returned as it is. * The `data` are either a numeric array either a Float32Array. * No new underlying VertexBuffer object is created. * If the `kind` is the `PositionKind` and if `updateExtends` is true, the mesh BoundingInfo is renewed, so the bounding box and sphere, and the mesh World Matrix is recomputed. * If the parameter `makeItUnique` is true, a new global geometry is created from this positions and is set to the mesh. * * Possible `kind` values : * - VertexBuffer.PositionKind * - VertexBuffer.UVKind * - VertexBuffer.UV2Kind * - VertexBuffer.UV3Kind * - VertexBuffer.UV4Kind * - VertexBuffer.UV5Kind * - VertexBuffer.UV6Kind * - VertexBuffer.ColorKind * - VertexBuffer.MatricesIndicesKind * - VertexBuffer.MatricesIndicesExtraKind * - VertexBuffer.MatricesWeightsKind * - VertexBuffer.MatricesWeightsExtraKind * * Returns the Mesh. * @param kind * @param data * @param updateExtends * @param makeItUnique */ updateVerticesData(e, t, i, r) { return this.sourceMesh && this.sourceMesh.updateVerticesData(e, t, i, r), this.sourceMesh; } /** * Sets the mesh indices. * Expects an array populated with integers or a typed array (Int32Array, Uint32Array, Uint16Array). * If the mesh has no geometry, a new Geometry object is created and set to the mesh. * This method creates a new index buffer each call. * Returns the Mesh. * @param indices * @param totalVertices */ setIndices(e, t = null) { return this.sourceMesh && this.sourceMesh.setIndices(e, t), this.sourceMesh; } /** * Boolean : True if the mesh owns the requested kind of data. * @param kind */ isVerticesDataPresent(e) { return this._sourceMesh.isVerticesDataPresent(e); } /** * Returns an array of indices (IndicesArray). */ getIndices() { return this._sourceMesh.getIndices(); } get _positions() { return this._sourceMesh._positions; } /** * This method recomputes and sets a new BoundingInfo to the mesh unless it is locked. * This means the mesh underlying bounding box and sphere are recomputed. * @param applySkeleton defines whether to apply the skeleton before computing the bounding info * @param applyMorph defines whether to apply the morph target before computing the bounding info * @returns the current mesh */ refreshBoundingInfo(e = !1, t = !1) { if (this.hasBoundingInfo && this.getBoundingInfo().isLocked) return this; const i = this._sourceMesh.geometry ? this._sourceMesh.geometry.boundingBias : null; return this._refreshBoundingInfo(this._sourceMesh._getPositionData(e, t), i), this; } /** @internal */ _preActivate() { return this._currentLOD && this._currentLOD._preActivate(), this; } /** * @internal */ _activate(e, t) { if (super._activate(e, t), this._sourceMesh.subMeshes || Ce.Warn("Instances should only be created for meshes with geometry."), this._currentLOD) { if (this._currentLOD._getWorldMatrixDeterminant() >= 0 != this._getWorldMatrixDeterminant() >= 0) return this._internalAbstractMeshDataInfo._actAsRegularMesh = !0, !0; if (this._internalAbstractMeshDataInfo._actAsRegularMesh = !1, this._currentLOD._registerInstanceForRenderId(this, e), t) { if (!this._currentLOD._internalAbstractMeshDataInfo._isActiveIntermediate) return this._currentLOD._internalAbstractMeshDataInfo._onlyForInstancesIntermediate = !0, !0; } else if (!this._currentLOD._internalAbstractMeshDataInfo._isActive) return this._currentLOD._internalAbstractMeshDataInfo._onlyForInstances = !0, !0; } return !1; } /** @internal */ _postActivate() { this._sourceMesh.edgesShareWithInstances && this._sourceMesh._edgesRenderer && this._sourceMesh._edgesRenderer.isEnabled && this._sourceMesh._renderingGroup ? (this._sourceMesh._renderingGroup._edgesRenderers.pushNoDuplicate(this._sourceMesh._edgesRenderer), this._sourceMesh._edgesRenderer.customInstances.push(this.getWorldMatrix())) : this._edgesRenderer && this._edgesRenderer.isEnabled && this._sourceMesh._renderingGroup && this._sourceMesh._renderingGroup._edgesRenderers.push(this._edgesRenderer); } getWorldMatrix() { if (this._currentLOD && this._currentLOD.billboardMode !== xi.BILLBOARDMODE_NONE && this._currentLOD._masterMesh !== this) { this._billboardWorldMatrix || (this._billboardWorldMatrix = new Ae()); const e = this._currentLOD._masterMesh; return this._currentLOD._masterMesh = this, de.Vector3[7].copyFrom(this._currentLOD.position), this._currentLOD.position.set(0, 0, 0), this._billboardWorldMatrix.copyFrom(this._currentLOD.computeWorldMatrix(!0)), this._currentLOD.position.copyFrom(de.Vector3[7]), this._currentLOD._masterMesh = e, this._billboardWorldMatrix; } return super.getWorldMatrix(); } get isAnInstance() { return !0; } /** * Returns the current associated LOD AbstractMesh. * @param camera */ getLOD(e) { if (!e) return this; const t = this.sourceMesh.getLODLevels(); if (!t || t.length === 0) this._currentLOD = this.sourceMesh; else { const i = this.getBoundingInfo(); this._currentLOD = this.sourceMesh.getLOD(e, i.boundingSphere); } return this._currentLOD; } /** * @internal */ _preActivateForIntermediateRendering(e) { return this.sourceMesh._preActivateForIntermediateRendering(e); } /** @internal */ _syncSubMeshes() { if (this.releaseSubMeshes(), this._sourceMesh.subMeshes) for (let e = 0; e < this._sourceMesh.subMeshes.length; e++) this._sourceMesh.subMeshes[e].clone(this, this._sourceMesh); return this; } /** @internal */ _generatePointsArray() { return this._sourceMesh._generatePointsArray(); } /** @internal */ _updateBoundingInfo() { return this.hasBoundingInfo ? this.getBoundingInfo().update(this.worldMatrixFromCache) : this.buildBoundingInfo(this.absolutePosition, this.absolutePosition, this.worldMatrixFromCache), this._updateSubMeshesBoundingInfo(this.worldMatrixFromCache), this; } /** * Creates a new InstancedMesh from the current mesh. * * Returns the clone. * @param name the cloned mesh name * @param newParent the optional Node to parent the clone to. * @param doNotCloneChildren if `true` the model children aren't cloned. * @param newSourceMesh if set this mesh will be used as the source mesh instead of ths instance's one * @returns the clone */ clone(e, t = null, i, r) { const s = (r || this._sourceMesh).createInstance(e); if (id.DeepCopy(this, s, [ "name", "subMeshes", "uniqueId", "parent", "lightSources", "receiveShadows", "material", "visibility", "skeleton", "sourceMesh", "isAnInstance", "facetNb", "isFacetDataEnabled", "isBlocked", "useBones", "hasInstances", "collider", "edgesRenderer", "forward", "up", "right", "absolutePosition", "absoluteScaling", "absoluteRotationQuaternion", "isWorldMatrixFrozen", "nonUniformScaling", "behaviors", "worldMatrixFromCache", "hasThinInstances", "hasBoundingInfo" ], []), this.refreshBoundingInfo(), t && (s.parent = t), !i) for (let n = 0; n < this.getScene().meshes.length; n++) { const a = this.getScene().meshes[n]; a.parent === this && a.clone(a.name, s); } return s.computeWorldMatrix(!0), this.onClonedObservable.notifyObservers(s), s; } /** * Disposes the InstancedMesh. * Returns nothing. * @param doNotRecurse * @param disposeMaterialAndTextures */ dispose(e, t = !1) { this._sourceMesh.removeInstance(this), super.dispose(e, t); } /** * @internal */ _serializeAsParent(e) { super._serializeAsParent(e), e.parentId = this._sourceMesh.uniqueId, e.parentInstanceIndex = this._indexInSourceMeshInstanceArray; } /** * Instantiate (when possible) or clone that node with its hierarchy * @param newParent defines the new parent to use for the instance (or clone) * @param options defines options to configure how copy is done * @param options.doNotInstantiate defines if the model must be instantiated or just cloned * @param options.newSourcedMesh newSourcedMesh the new source mesh for the instance (or clone) * @param onNewNodeCreated defines an option callback to call when a clone or an instance is created * @returns an instance (or a clone) of the current node with its hierarchy */ instantiateHierarchy(e = null, t, i) { const r = this.clone("Clone of " + (this.name || this.id), e || this.parent, !0, t && t.newSourcedMesh); r && i && i(this, r); for (const s of this.getChildTransformNodes(!0)) s.instantiateHierarchy(r, t, i); return r; } } ke.prototype.registerInstancedBuffer = function(c, e) { var t, i; if ((i = (t = this._userInstancedBuffersStorage) === null || t === void 0 ? void 0 : t.vertexBuffers[c]) === null || i === void 0 || i.dispose(), !this.instancedBuffers) { this.instancedBuffers = {}; for (const r of this.instances) r.instancedBuffers = {}; } this._userInstancedBuffersStorage || (this._userInstancedBuffersStorage = { data: {}, vertexBuffers: {}, strides: {}, sizes: {}, vertexArrayObjects: this.getEngine().getCaps().vertexArrayObject ? {} : void 0 }), this.instancedBuffers[c] = null, this._userInstancedBuffersStorage.strides[c] = e, this._userInstancedBuffersStorage.sizes[c] = e * 32, this._userInstancedBuffersStorage.data[c] = new Float32Array(this._userInstancedBuffersStorage.sizes[c]), this._userInstancedBuffersStorage.vertexBuffers[c] = new Y(this.getEngine(), this._userInstancedBuffersStorage.data[c], c, !0, !1, e, !0); for (const r of this.instances) r.instancedBuffers[c] = null; this._invalidateInstanceVertexArrayObject(), this._markSubMeshesAsAttributesDirty(); }; ke.prototype._processInstancedBuffers = function(c, e) { const t = c ? c.length : 0; for (const i in this.instancedBuffers) { let r = this._userInstancedBuffersStorage.sizes[i]; const s = this._userInstancedBuffersStorage.strides[i], n = (t + 1) * s; for (; r < n; ) r *= 2; this._userInstancedBuffersStorage.data[i].length != r && (this._userInstancedBuffersStorage.data[i] = new Float32Array(r), this._userInstancedBuffersStorage.sizes[i] = r, this._userInstancedBuffersStorage.vertexBuffers[i] && (this._userInstancedBuffersStorage.vertexBuffers[i].dispose(), this._userInstancedBuffersStorage.vertexBuffers[i] = null)); const a = this._userInstancedBuffersStorage.data[i]; let l = 0; if (e) { const o = this.instancedBuffers[i]; o.toArray ? o.toArray(a, l) : o.copyToArray ? o.copyToArray(a, l) : a[l] = o, l += s; } for (let o = 0; o < t; o++) { const h = c[o].instancedBuffers[i]; h.toArray ? h.toArray(a, l) : h.copyToArray ? h.copyToArray(a, l) : a[l] = h, l += s; } this._userInstancedBuffersStorage.vertexBuffers[i] ? this._userInstancedBuffersStorage.vertexBuffers[i].updateDirectly(a, 0) : (this._userInstancedBuffersStorage.vertexBuffers[i] = new Y(this.getEngine(), this._userInstancedBuffersStorage.data[i], i, !0, !1, s, !0), this._invalidateInstanceVertexArrayObject()); } }; ke.prototype._invalidateInstanceVertexArrayObject = function() { if (!(!this._userInstancedBuffersStorage || this._userInstancedBuffersStorage.vertexArrayObjects === void 0)) { for (const c in this._userInstancedBuffersStorage.vertexArrayObjects) this.getEngine().releaseVertexArrayObject(this._userInstancedBuffersStorage.vertexArrayObjects[c]); this._userInstancedBuffersStorage.vertexArrayObjects = {}; } }; ke.prototype._disposeInstanceSpecificData = function() { for (this._instanceDataStorage.instancesBuffer && (this._instanceDataStorage.instancesBuffer.dispose(), this._instanceDataStorage.instancesBuffer = null); this.instances.length; ) this.instances[0].dispose(); for (const c in this.instancedBuffers) this._userInstancedBuffersStorage.vertexBuffers[c] && this._userInstancedBuffersStorage.vertexBuffers[c].dispose(); this._invalidateInstanceVertexArrayObject(), this.instancedBuffers = {}; }; class hs extends In { /** * Defines how far from the source the light is impacting in scene units. * Note: Unused in PBR material as the distance light falloff is defined following the inverse squared falloff. */ get range() { return this._range; } /** * Defines how far from the source the light is impacting in scene units. * Note: Unused in PBR material as the distance light falloff is defined following the inverse squared falloff. */ set range(e) { this._range = e, this._inverseSquaredRange = 1 / (this.range * this.range); } /** * Gets the photometric scale used to interpret the intensity. * This is only relevant with PBR Materials where the light intensity can be defined in a physical way. */ get intensityMode() { return this._intensityMode; } /** * Sets the photometric scale used to interpret the intensity. * This is only relevant with PBR Materials where the light intensity can be defined in a physical way. */ set intensityMode(e) { this._intensityMode = e, this._computePhotometricScale(); } /** * Gets the light radius used by PBR Materials to simulate soft area lights. */ get radius() { return this._radius; } /** * sets the light radius used by PBR Materials to simulate soft area lights. */ set radius(e) { this._radius = e, this._computePhotometricScale(); } /** * Gets whether or not the shadows are enabled for this light. This can help turning off/on shadow without detaching * the current shadow generator. */ get shadowEnabled() { return this._shadowEnabled; } /** * Sets whether or not the shadows are enabled for this light. This can help turning off/on shadow without detaching * the current shadow generator. */ set shadowEnabled(e) { this._shadowEnabled !== e && (this._shadowEnabled = e, this._markMeshesAsLightDirty()); } /** * Gets the only meshes impacted by this light. */ get includedOnlyMeshes() { return this._includedOnlyMeshes; } /** * Sets the only meshes impacted by this light. */ set includedOnlyMeshes(e) { this._includedOnlyMeshes = e, this._hookArrayForIncludedOnly(e); } /** * Gets the meshes not impacted by this light. */ get excludedMeshes() { return this._excludedMeshes; } /** * Sets the meshes not impacted by this light. */ set excludedMeshes(e) { this._excludedMeshes = e, this._hookArrayForExcluded(e); } /** * Gets the layer id use to find what meshes are not impacted by the light. * Inactive if 0 */ get excludeWithLayerMask() { return this._excludeWithLayerMask; } /** * Sets the layer id use to find what meshes are not impacted by the light. * Inactive if 0 */ set excludeWithLayerMask(e) { this._excludeWithLayerMask = e, this._resyncMeshes(); } /** * Gets the layer id use to find what meshes are impacted by the light. * Inactive if 0 */ get includeOnlyWithLayerMask() { return this._includeOnlyWithLayerMask; } /** * Sets the layer id use to find what meshes are impacted by the light. * Inactive if 0 */ set includeOnlyWithLayerMask(e) { this._includeOnlyWithLayerMask = e, this._resyncMeshes(); } /** * Gets the lightmap mode of this light (should be one of the constants defined by Light.LIGHTMAP_x) */ get lightmapMode() { return this._lightmapMode; } /** * Sets the lightmap mode of this light (should be one of the constants defined by Light.LIGHTMAP_x) */ set lightmapMode(e) { this._lightmapMode !== e && (this._lightmapMode = e, this._markMeshesAsLightDirty()); } /** * Returns the view matrix. * @param _faceIndex The index of the face for which we want to extract the view matrix. Only used for point light types. * @returns The view matrix. Can be null, if a view matrix cannot be defined for the type of light considered (as for a hemispherical light, for example). */ getViewMatrix(e) { return null; } /** * Returns the projection matrix. * Note that viewMatrix and renderList are optional and are only used by lights that calculate the projection matrix from a list of meshes (e.g. directional lights with automatic extents calculation). * @param _viewMatrix The view transform matrix of the light (optional). * @param _renderList The list of meshes to take into account when calculating the projection matrix (optional). * @returns The projection matrix. Can be null, if a projection matrix cannot be defined for the type of light considered (as for a hemispherical light, for example). */ getProjectionMatrix(e, t) { return null; } /** * Creates a Light object in the scene. * Documentation : https://doc.babylonjs.com/features/featuresDeepDive/lights/lights_introduction * @param name The friendly name of the light * @param scene The scene the light belongs too */ constructor(e, t) { super(e, t), this.diffuse = new ze(1, 1, 1), this.specular = new ze(1, 1, 1), this.falloffType = hs.FALLOFF_DEFAULT, this.intensity = 1, this._range = Number.MAX_VALUE, this._inverseSquaredRange = 0, this._photometricScale = 1, this._intensityMode = hs.INTENSITYMODE_AUTOMATIC, this._radius = 1e-5, this.renderPriority = 0, this._shadowEnabled = !0, this._excludeWithLayerMask = 0, this._includeOnlyWithLayerMask = 0, this._lightmapMode = 0, this._shadowGenerators = null, this._excludedMeshesIds = new Array(), this._includedOnlyMeshesIds = new Array(), this._isLight = !0, this.getScene().addLight(this), this._uniformBuffer = new Vi(this.getScene().getEngine(), void 0, void 0, e), this._buildUniformLayout(), this.includedOnlyMeshes = [], this.excludedMeshes = [], this._resyncMeshes(); } /** * Sets the passed Effect "effect" with the Light textures. * @param effect The effect to update * @param lightIndex The index of the light in the effect to update * @returns The light */ // eslint-disable-next-line @typescript-eslint/no-unused-vars transferTexturesToEffect(e, t) { return this; } /** * Binds the lights information from the scene to the effect for the given mesh. * @param lightIndex Light index * @param scene The scene where the light belongs to * @param effect The effect we are binding the data to * @param useSpecular Defines if specular is supported * @param receiveShadows Defines if the effect (mesh) we bind the light for receives shadows */ _bindLight(e, t, i, r, s = !0) { var n; const a = e.toString(); let l = !1; if (this._uniformBuffer.bindToEffect(i, "Light" + a), this._renderId !== t.getRenderId() || this._lastUseSpecular !== r || !this._uniformBuffer.useUbo) { this._renderId = t.getRenderId(), this._lastUseSpecular = r; const o = this.getScaledIntensity(); this.transferToEffect(i, a), this.diffuse.scaleToRef(o, mn.Color3[0]), this._uniformBuffer.updateColor4("vLightDiffuse", mn.Color3[0], this.range, a), r && (this.specular.scaleToRef(o, mn.Color3[1]), this._uniformBuffer.updateColor4("vLightSpecular", mn.Color3[1], this.radius, a)), l = !0; } if (this.transferTexturesToEffect(i, a), t.shadowsEnabled && this.shadowEnabled && s) { const o = (n = this.getShadowGenerator(t.activeCamera)) !== null && n !== void 0 ? n : this.getShadowGenerator(); o && (o.bindShadowLight(a, i), l = !0); } l ? this._uniformBuffer.update() : this._uniformBuffer.bindUniformBuffer(); } /** * Returns the string "Light". * @returns the class name */ getClassName() { return "Light"; } /** * Converts the light information to a readable string for debug purpose. * @param fullDetails Supports for multiple levels of logging within scene loading * @returns the human readable light info */ toString(e) { let t = "Name: " + this.name; if (t += ", type: " + ["Point", "Directional", "Spot", "Hemispheric"][this.getTypeID()], this.animations) for (let i = 0; i < this.animations.length; i++) t += ", animation[0]: " + this.animations[i].toString(e); return t; } /** @internal */ _syncParentEnabledState() { super._syncParentEnabledState(), this.isDisposed() || this._resyncMeshes(); } /** * Set the enabled state of this node. * @param value - the new enabled state */ setEnabled(e) { super.setEnabled(e), this._resyncMeshes(); } /** * Returns the Light associated shadow generator if any. * @param camera Camera for which the shadow generator should be retrieved (default: null). If null, retrieves the default shadow generator * @returns the associated shadow generator. */ getShadowGenerator(e = null) { var t; return this._shadowGenerators === null ? null : (t = this._shadowGenerators.get(e)) !== null && t !== void 0 ? t : null; } /** * Returns all the shadow generators associated to this light * @returns */ getShadowGenerators() { return this._shadowGenerators; } /** * Returns a Vector3, the absolute light position in the World. * @returns the world space position of the light */ getAbsolutePosition() { return D.Zero(); } /** * Specifies if the light will affect the passed mesh. * @param mesh The mesh to test against the light * @returns true the mesh is affected otherwise, false. */ canAffectMesh(e) { return e ? !(this.includedOnlyMeshes && this.includedOnlyMeshes.length > 0 && this.includedOnlyMeshes.indexOf(e) === -1 || this.excludedMeshes && this.excludedMeshes.length > 0 && this.excludedMeshes.indexOf(e) !== -1 || this.includeOnlyWithLayerMask !== 0 && !(this.includeOnlyWithLayerMask & e.layerMask) || this.excludeWithLayerMask !== 0 && this.excludeWithLayerMask & e.layerMask) : !0; } /** * Releases resources associated with this node. * @param doNotRecurse Set to true to not recurse into each children (recurse into each children by default) * @param disposeMaterialAndTextures Set to true to also dispose referenced materials and textures (false by default) */ dispose(e, t = !1) { if (this._shadowGenerators) { const i = this._shadowGenerators.values(); for (let r = i.next(); r.done !== !0; r = i.next()) r.value.dispose(); this._shadowGenerators = null; } if (this.getScene().stopAnimation(this), this._parentContainer) { const i = this._parentContainer.lights.indexOf(this); i > -1 && this._parentContainer.lights.splice(i, 1), this._parentContainer = null; } for (const i of this.getScene().meshes) i._removeLightSource(this, !0); this._uniformBuffer.dispose(), this.getScene().removeLight(this), super.dispose(e, t); } /** * Returns the light type ID (integer). * @returns The light Type id as a constant defines in Light.LIGHTTYPEID_x */ getTypeID() { return 0; } /** * Returns the intensity scaled by the Photometric Scale according to the light type and intensity mode. * @returns the scaled intensity in intensity mode unit */ getScaledIntensity() { return this._photometricScale * this.intensity; } /** * Returns a new Light object, named "name", from the current one. * @param name The name of the cloned light * @param newParent The parent of this light, if it has one * @returns the new created light */ clone(e, t = null) { const i = hs.GetConstructorFromName(this.getTypeID(), e, this.getScene()); if (!i) return null; const r = St.Clone(i, this); return e && (r.name = e), t && (r.parent = t), r.setEnabled(this.isEnabled()), this.onClonedObservable.notifyObservers(r), r; } /** * Serializes the current light into a Serialization object. * @returns the serialized object. */ serialize() { const e = St.Serialize(this); return e.uniqueId = this.uniqueId, e.type = this.getTypeID(), this.parent && this.parent._serializeAsParent(e), this.excludedMeshes.length > 0 && (e.excludedMeshesIds = [], this.excludedMeshes.forEach((t) => { e.excludedMeshesIds.push(t.id); })), this.includedOnlyMeshes.length > 0 && (e.includedOnlyMeshesIds = [], this.includedOnlyMeshes.forEach((t) => { e.includedOnlyMeshesIds.push(t.id); })), St.AppendSerializedAnimations(this, e), e.ranges = this.serializeAnimationRanges(), e.isEnabled = this.isEnabled(), e; } /** * Creates a new typed light from the passed type (integer) : point light = 0, directional light = 1, spot light = 2, hemispheric light = 3. * This new light is named "name" and added to the passed scene. * @param type Type according to the types available in Light.LIGHTTYPEID_x * @param name The friendly name of the light * @param scene The scene the new light will belong to * @returns the constructor function */ static GetConstructorFromName(e, t, i) { const r = In.Construct("Light_Type_" + e, t, i); return r || null; } /** * Parses the passed "parsedLight" and returns a new instanced Light from this parsing. * @param parsedLight The JSON representation of the light * @param scene The scene to create the parsed light in * @returns the created light after parsing */ static Parse(e, t) { const i = hs.GetConstructorFromName(e.type, e.name, t); if (!i) return null; const r = St.Parse(i, e, t); if (e.excludedMeshesIds && (r._excludedMeshesIds = e.excludedMeshesIds), e.includedOnlyMeshesIds && (r._includedOnlyMeshesIds = e.includedOnlyMeshesIds), e.parentId !== void 0 && (r._waitingParentId = e.parentId), e.parentInstanceIndex !== void 0 && (r._waitingParentInstanceIndex = e.parentInstanceIndex), e.falloffType !== void 0 && (r.falloffType = e.falloffType), e.lightmapMode !== void 0 && (r.lightmapMode = e.lightmapMode), e.animations) { for (let s = 0; s < e.animations.length; s++) { const n = e.animations[s], a = Qo("BABYLON.Animation"); a && r.animations.push(a.Parse(n)); } In.ParseAnimationRanges(r, e, t); } return e.autoAnimate && t.beginAnimation(r, e.autoAnimateFrom, e.autoAnimateTo, e.autoAnimateLoop, e.autoAnimateSpeed || 1), e.isEnabled !== void 0 && r.setEnabled(e.isEnabled), r; } _hookArrayForExcluded(e) { const t = e.push; e.push = (...r) => { const s = t.apply(e, r); for (const n of r) n._resyncLightSource(this); return s; }; const i = e.splice; e.splice = (r, s) => { const n = i.apply(e, [r, s]); for (const a of n) a._resyncLightSource(this); return n; }; for (const r of e) r._resyncLightSource(this); } _hookArrayForIncludedOnly(e) { const t = e.push; e.push = (...r) => { const s = t.apply(e, r); return this._resyncMeshes(), s; }; const i = e.splice; e.splice = (r, s) => { const n = i.apply(e, [r, s]); return this._resyncMeshes(), n; }, this._resyncMeshes(); } _resyncMeshes() { for (const e of this.getScene().meshes) e._resyncLightSource(this); } /** * Forces the meshes to update their light related information in their rendering used effects * @internal Internal Use Only */ _markMeshesAsLightDirty() { for (const e of this.getScene().meshes) e.lightSources.indexOf(this) !== -1 && e._markSubMeshesAsLightDirty(); } /** * Recomputes the cached photometric scale if needed. */ _computePhotometricScale() { this._photometricScale = this._getPhotometricScale(), this.getScene().resetCachedMaterial(); } /** * Returns the Photometric Scale according to the light type and intensity mode. */ _getPhotometricScale() { let e = 0; const t = this.getTypeID(); let i = this.intensityMode; switch (i === hs.INTENSITYMODE_AUTOMATIC && (t === hs.LIGHTTYPEID_DIRECTIONALLIGHT ? i = hs.INTENSITYMODE_ILLUMINANCE : i = hs.INTENSITYMODE_LUMINOUSINTENSITY), t) { case hs.LIGHTTYPEID_POINTLIGHT: case hs.LIGHTTYPEID_SPOTLIGHT: switch (i) { case hs.INTENSITYMODE_LUMINOUSPOWER: e = 1 / (4 * Math.PI); break; case hs.INTENSITYMODE_LUMINOUSINTENSITY: e = 1; break; case hs.INTENSITYMODE_LUMINANCE: e = this.radius * this.radius; break; } break; case hs.LIGHTTYPEID_DIRECTIONALLIGHT: switch (i) { case hs.INTENSITYMODE_ILLUMINANCE: e = 1; break; case hs.INTENSITYMODE_LUMINANCE: { let r = this.radius; r = Math.max(r, 1e-3), e = 2 * Math.PI * (1 - Math.cos(r)); break; } } break; case hs.LIGHTTYPEID_HEMISPHERICLIGHT: e = 1; break; } return e; } /** * Reorder the light in the scene according to their defined priority. * @internal Internal Use Only */ _reorderLightsInScene() { const e = this.getScene(); this._renderPriority != 0 && (e.requireLightSorting = !0), this.getScene().sortLightsByPriority(); } } hs.FALLOFF_DEFAULT = ia.FALLOFF_DEFAULT; hs.FALLOFF_PHYSICAL = ia.FALLOFF_PHYSICAL; hs.FALLOFF_GLTF = ia.FALLOFF_GLTF; hs.FALLOFF_STANDARD = ia.FALLOFF_STANDARD; hs.LIGHTMAP_DEFAULT = ia.LIGHTMAP_DEFAULT; hs.LIGHTMAP_SPECULAR = ia.LIGHTMAP_SPECULAR; hs.LIGHTMAP_SHADOWSONLY = ia.LIGHTMAP_SHADOWSONLY; hs.INTENSITYMODE_AUTOMATIC = ia.INTENSITYMODE_AUTOMATIC; hs.INTENSITYMODE_LUMINOUSPOWER = ia.INTENSITYMODE_LUMINOUSPOWER; hs.INTENSITYMODE_LUMINOUSINTENSITY = ia.INTENSITYMODE_LUMINOUSINTENSITY; hs.INTENSITYMODE_ILLUMINANCE = ia.INTENSITYMODE_ILLUMINANCE; hs.INTENSITYMODE_LUMINANCE = ia.INTENSITYMODE_LUMINANCE; hs.LIGHTTYPEID_POINTLIGHT = ia.LIGHTTYPEID_POINTLIGHT; hs.LIGHTTYPEID_DIRECTIONALLIGHT = ia.LIGHTTYPEID_DIRECTIONALLIGHT; hs.LIGHTTYPEID_SPOTLIGHT = ia.LIGHTTYPEID_SPOTLIGHT; hs.LIGHTTYPEID_HEMISPHERICLIGHT = ia.LIGHTTYPEID_HEMISPHERICLIGHT; F([ Fs() ], hs.prototype, "diffuse", void 0); F([ Fs() ], hs.prototype, "specular", void 0); F([ W() ], hs.prototype, "falloffType", void 0); F([ W() ], hs.prototype, "intensity", void 0); F([ W() ], hs.prototype, "range", null); F([ W() ], hs.prototype, "intensityMode", null); F([ W() ], hs.prototype, "radius", null); F([ W() ], hs.prototype, "_renderPriority", void 0); F([ ct("_reorderLightsInScene") ], hs.prototype, "renderPriority", void 0); F([ W("shadowEnabled") ], hs.prototype, "_shadowEnabled", void 0); F([ W("excludeWithLayerMask") ], hs.prototype, "_excludeWithLayerMask", void 0); F([ W("includeOnlyWithLayerMask") ], hs.prototype, "_includeOnlyWithLayerMask", void 0); F([ W("lightmapMode") ], hs.prototype, "_lightmapMode", void 0); class Dte extends Yl { } class Ote { constructor() { this.rootNodes = [], this.skeletons = [], this.animationGroups = []; } /** * Disposes the instantiated entries from the scene */ dispose() { this.rootNodes.slice(0).forEach((e) => { e.dispose(); }), this.rootNodes.length = 0, this.skeletons.slice(0).forEach((e) => { e.dispose(); }), this.skeletons.length = 0, this.animationGroups.slice(0).forEach((e) => { e.dispose(); }), this.animationGroups.length = 0; } } class NL extends Yl { /** * Instantiates an AssetContainer. * @param scene The scene the AssetContainer belongs to. */ constructor(e) { super(), this._wasAddedToScene = !1, e = e || gi.LastCreatedScene, e && (this.scene = e, this.sounds = [], this.effectLayers = [], this.layers = [], this.lensFlareSystems = [], this.proceduralTextures = [], this.reflectionProbes = [], e.onDisposeObservable.add(() => { this._wasAddedToScene || this.dispose(); }), this._onContextRestoredObserver = e.getEngine().onContextRestoredObservable.add(() => { for (const t of this.geometries) t._rebuild(); for (const t of this.meshes) t._rebuild(); for (const t of this.particleSystems) t.rebuild(); for (const t of this.textures) t._rebuild(); })); } /** * Given a list of nodes, return a topological sorting of them. * @param nodes */ _topologicalSort(e) { const t = /* @__PURE__ */ new Map(); for (const a of e) t.set(a.uniqueId, a); const i = { dependsOn: /* @__PURE__ */ new Map(), dependedBy: /* @__PURE__ */ new Map() // given a node id, what are the ids of the nodes that depend on it }; for (const a of e) { const l = a.uniqueId; i.dependsOn.set(l, /* @__PURE__ */ new Set()), i.dependedBy.set(l, /* @__PURE__ */ new Set()); } for (const a of e) { const l = a.uniqueId, o = i.dependsOn.get(l); if (a instanceof Cg) { const h = a.sourceMesh; t.has(h.uniqueId) && (o.add(h.uniqueId), i.dependedBy.get(h.uniqueId).add(l)); } const u = i.dependedBy.get(l); for (const h of a.getDescendants()) { const d = h.uniqueId; t.has(d) && (u.add(d), i.dependsOn.get(d).add(l)); } } const r = [], s = []; for (const a of e) { const l = a.uniqueId; i.dependsOn.get(l).size === 0 && (s.push(a), t.delete(l)); } const n = s; for (; n.length > 0; ) { const a = n.shift(); r.push(a); const l = i.dependedBy.get(a.uniqueId); for (const o of Array.from(l.values())) { const u = i.dependsOn.get(o); u.delete(a.uniqueId), u.size === 0 && t.get(o) && (n.push(t.get(o)), t.delete(o)); } } return t.size > 0 && (Ce.Error("SceneSerializer._topologicalSort: There were unvisited nodes:"), t.forEach((a) => Ce.Error(a.name))), r; } _addNodeAndDescendantsToList(e, t, i, r) { if (!(!i || r && !r(i) || t.has(i.uniqueId))) { e.push(i), t.add(i.uniqueId); for (const s of i.getDescendants(!0)) this._addNodeAndDescendantsToList(e, t, s, r); } } /** * Check if a specific node is contained in this asset container. * @param node */ _isNodeInContainer(e) { return e instanceof ke && this.meshes.indexOf(e) !== -1 || e instanceof xi && this.transformNodes.indexOf(e) !== -1 || e instanceof hs && this.lights.indexOf(e) !== -1 || e instanceof Ai && this.cameras.indexOf(e) !== -1; } /** * For every node in the scene, check if its parent node is also in the scene. */ _isValidHierarchy() { for (const e of this.meshes) if (e.parent && !this._isNodeInContainer(e.parent)) return Ce.Warn(`Node ${e.name} has a parent that is not in the container.`), !1; for (const e of this.transformNodes) if (e.parent && !this._isNodeInContainer(e.parent)) return Ce.Warn(`Node ${e.name} has a parent that is not in the container.`), !1; for (const e of this.lights) if (e.parent && !this._isNodeInContainer(e.parent)) return Ce.Warn(`Node ${e.name} has a parent that is not in the container.`), !1; for (const e of this.cameras) if (e.parent && !this._isNodeInContainer(e.parent)) return Ce.Warn(`Node ${e.name} has a parent that is not in the container.`), !1; return !0; } /** * Instantiate or clone all meshes and add the new ones to the scene. * Skeletons and animation groups will all be cloned * @param nameFunction defines an optional function used to get new names for clones * @param cloneMaterials defines an optional boolean that defines if materials must be cloned as well (false by default) * @param options defines an optional list of options to control how to instantiate / clone models * @param options.doNotInstantiate defines if the model must be instantiated or just cloned * @param options.predicate defines a predicate used to filter whih mesh to instantiate/clone * @returns a list of rootNodes, skeletons and animation groups that were duplicated */ instantiateModelsToScene(e, t = !1, i) { this._isValidHierarchy() || Ve.Warn("SceneSerializer.InstantiateModelsToScene: The Asset Container hierarchy is not valid."); const r = {}, s = {}, n = new Ote(), a = [], l = [], o = Object.assign({ doNotInstantiate: !0 }, i), u = (m, _) => { if (r[m.uniqueId] = _.uniqueId, s[_.uniqueId] = _, e && (_.name = e(m.name)), _ instanceof ke) { const v = _; if (v.morphTargetManager) { const C = m.morphTargetManager; v.morphTargetManager = C.clone(); for (let x = 0; x < C.numTargets; x++) { const b = C.getTarget(x), S = v.morphTargetManager.getTarget(x); r[b.uniqueId] = S.uniqueId, s[S.uniqueId] = S; } } } }, h = [], d = /* @__PURE__ */ new Set(); for (const m of this.transformNodes) m.parent === null && this._addNodeAndDescendantsToList(h, d, m, o.predicate); for (const m of this.meshes) m.parent === null && this._addNodeAndDescendantsToList(h, d, m, o.predicate); const f = this._topologicalSort(h), p = (m, _) => { if (u(m, _), m.parent) { const v = r[m.parent.uniqueId], C = s[v]; C ? _.parent = C : _.parent = m.parent; } if (_.position && m.position && _.position.copyFrom(m.position), _.rotationQuaternion && m.rotationQuaternion && _.rotationQuaternion.copyFrom(m.rotationQuaternion), _.rotation && m.rotation && _.rotation.copyFrom(m.rotation), _.scaling && m.scaling && _.scaling.copyFrom(m.scaling), _.material) { const v = _; if (v.material) if (t) { const C = m.material; if (l.indexOf(C) === -1) { let x = C.clone(e ? e(C.name) : "Clone of " + C.name); if (l.push(C), r[C.uniqueId] = x.uniqueId, s[x.uniqueId] = x, C.getClassName() === "MultiMaterial") { const b = C; for (const S of b.subMaterials) S && (x = S.clone(e ? e(S.name) : "Clone of " + S.name), l.push(S), r[S.uniqueId] = x.uniqueId, s[x.uniqueId] = x); b.subMaterials = b.subMaterials.map((S) => S && s[r[S.uniqueId]]); } } v.getClassName() !== "InstancedMesh" && (v.material = s[r[C.uniqueId]]); } else v.material.getClassName() === "MultiMaterial" ? this.scene.multiMaterials.indexOf(v.material) === -1 && this.scene.addMultiMaterial(v.material) : this.scene.materials.indexOf(v.material) === -1 && this.scene.addMaterial(v.material); } _.parent === null && n.rootNodes.push(_); }; return f.forEach((m) => { if (m.getClassName() === "InstancedMesh") { const _ = m, v = _.sourceMesh, C = r[v.uniqueId], b = (typeof C == "number" ? s[C] : v).createInstance(_.name); p(_, b); } else { let _ = !0; m.getClassName() === "TransformNode" || m.getClassName() === "Node" || m.skeleton || !m.getTotalVertices || m.getTotalVertices() === 0 ? _ = !1 : o.doNotInstantiate && (typeof o.doNotInstantiate == "function" ? _ = !o.doNotInstantiate(m) : _ = !o.doNotInstantiate); const v = _ ? m.createInstance(`instance of ${m.name}`) : m.clone(`Clone of ${m.name}`, null, !0); if (!v) throw new Error(`Could not clone or instantiate node on Asset Container ${m.name}`); p(m, v); } }), this.skeletons.forEach((m) => { if (o.predicate && !o.predicate(m)) return; const _ = m.clone(e ? e(m.name) : "Clone of " + m.name); for (const v of this.meshes) if (v.skeleton === m && !v.isAnInstance) { const C = s[r[v.uniqueId]]; if (!C || C.isAnInstance || (C.skeleton = _, a.indexOf(_) !== -1)) continue; a.push(_); for (const x of _.bones) x._linkedTransformNode && (x._linkedTransformNode = s[r[x._linkedTransformNode.uniqueId]]); } n.skeletons.push(_); }), this.animationGroups.forEach((m) => { if (o.predicate && !o.predicate(m)) return; const _ = m.clone(e ? e(m.name) : "Clone of " + m.name, (v) => s[r[v.uniqueId]] || v); n.animationGroups.push(_); }), n; } /** * Adds all the assets from the container to the scene. */ addAllToScene() { if (!this._wasAddedToScene) { this._isValidHierarchy() || Ve.Warn("SceneSerializer.addAllToScene: The Asset Container hierarchy is not valid."), this._wasAddedToScene = !0, this.addToScene(null), this.environmentTexture && (this.scene.environmentTexture = this.environmentTexture); for (const e of this.scene._serializableComponents) e.addFromContainer(this); this.scene.getEngine().onContextRestoredObservable.remove(this._onContextRestoredObserver), this._onContextRestoredObserver = null; } } /** * Adds assets from the container to the scene. * @param predicate defines a predicate used to select which entity will be added (can be null) */ addToScene(e = null) { const t = []; this.cameras.forEach((i) => { e && !e(i) || (this.scene.addCamera(i), t.push(i)); }), this.lights.forEach((i) => { e && !e(i) || (this.scene.addLight(i), t.push(i)); }), this.meshes.forEach((i) => { e && !e(i) || (this.scene.addMesh(i), t.push(i)); }), this.skeletons.forEach((i) => { e && !e(i) || this.scene.addSkeleton(i); }), this.animations.forEach((i) => { e && !e(i) || this.scene.addAnimation(i); }), this.animationGroups.forEach((i) => { e && !e(i) || this.scene.addAnimationGroup(i); }), this.multiMaterials.forEach((i) => { e && !e(i) || this.scene.addMultiMaterial(i); }), this.materials.forEach((i) => { e && !e(i) || this.scene.addMaterial(i); }), this.morphTargetManagers.forEach((i) => { e && !e(i) || this.scene.addMorphTargetManager(i); }), this.geometries.forEach((i) => { e && !e(i) || this.scene.addGeometry(i); }), this.transformNodes.forEach((i) => { e && !e(i) || (this.scene.addTransformNode(i), t.push(i)); }), this.actionManagers.forEach((i) => { e && !e(i) || this.scene.addActionManager(i); }), this.textures.forEach((i) => { e && !e(i) || this.scene.addTexture(i); }), this.reflectionProbes.forEach((i) => { e && !e(i) || this.scene.addReflectionProbe(i); }); for (const i of t) i.parent && this.scene.getNodes().indexOf(i.parent) === -1 && (i.setParent ? i.setParent(null) : i.parent = null); } /** * Removes all the assets in the container from the scene */ removeAllFromScene() { this._isValidHierarchy() || Ve.Warn("SceneSerializer.removeAllFromScene: The Asset Container hierarchy is not valid."), this._wasAddedToScene = !1, this.removeFromScene(null), this.environmentTexture === this.scene.environmentTexture && (this.scene.environmentTexture = null); for (const e of this.scene._serializableComponents) e.removeFromContainer(this); } /** * Removes assets in the container from the scene * @param predicate defines a predicate used to select which entity will be added (can be null) */ removeFromScene(e = null) { this.cameras.forEach((t) => { e && !e(t) || this.scene.removeCamera(t); }), this.lights.forEach((t) => { e && !e(t) || this.scene.removeLight(t); }), this.meshes.forEach((t) => { e && !e(t) || this.scene.removeMesh(t, !0); }), this.skeletons.forEach((t) => { e && !e(t) || this.scene.removeSkeleton(t); }), this.animations.forEach((t) => { e && !e(t) || this.scene.removeAnimation(t); }), this.animationGroups.forEach((t) => { e && !e(t) || this.scene.removeAnimationGroup(t); }), this.multiMaterials.forEach((t) => { e && !e(t) || this.scene.removeMultiMaterial(t); }), this.materials.forEach((t) => { e && !e(t) || this.scene.removeMaterial(t); }), this.morphTargetManagers.forEach((t) => { e && !e(t) || this.scene.removeMorphTargetManager(t); }), this.geometries.forEach((t) => { e && !e(t) || this.scene.removeGeometry(t); }), this.transformNodes.forEach((t) => { e && !e(t) || this.scene.removeTransformNode(t); }), this.actionManagers.forEach((t) => { e && !e(t) || this.scene.removeActionManager(t); }), this.textures.forEach((t) => { e && !e(t) || this.scene.removeTexture(t); }), this.reflectionProbes.forEach((t) => { e && !e(t) || this.scene.removeReflectionProbe(t); }); } /** * Disposes all the assets in the container */ dispose() { this.cameras.slice(0).forEach((e) => { e.dispose(); }), this.cameras.length = 0, this.lights.slice(0).forEach((e) => { e.dispose(); }), this.lights.length = 0, this.meshes.slice(0).forEach((e) => { e.dispose(); }), this.meshes.length = 0, this.skeletons.slice(0).forEach((e) => { e.dispose(); }), this.skeletons.length = 0, this.animationGroups.slice(0).forEach((e) => { e.dispose(); }), this.animationGroups.length = 0, this.multiMaterials.slice(0).forEach((e) => { e.dispose(); }), this.multiMaterials.length = 0, this.materials.slice(0).forEach((e) => { e.dispose(); }), this.materials.length = 0, this.geometries.slice(0).forEach((e) => { e.dispose(); }), this.geometries.length = 0, this.transformNodes.slice(0).forEach((e) => { e.dispose(); }), this.transformNodes.length = 0, this.actionManagers.slice(0).forEach((e) => { e.dispose(); }), this.actionManagers.length = 0, this.textures.slice(0).forEach((e) => { e.dispose(); }), this.textures.length = 0, this.reflectionProbes.slice(0).forEach((e) => { e.dispose(); }), this.reflectionProbes.length = 0, this.morphTargetManagers.slice(0).forEach((e) => { e.dispose(); }), this.morphTargetManagers.length = 0, this.environmentTexture && (this.environmentTexture.dispose(), this.environmentTexture = null); for (const e of this.scene._serializableComponents) e.removeFromContainer(this, !0); this._onContextRestoredObserver && (this.scene.getEngine().onContextRestoredObservable.remove(this._onContextRestoredObserver), this._onContextRestoredObserver = null); } _moveAssets(e, t, i) { if (!(!e || !t)) for (const r of e) { let s = !0; if (i) { for (const n of i) if (r === n) { s = !1; break; } } s && (t.push(r), r._parentContainer = this); } } /** * Removes all the assets contained in the scene and adds them to the container. * @param keepAssets Set of assets to keep in the scene. (default: empty) */ moveAllFromScene(e) { this._wasAddedToScene = !1, e === void 0 && (e = new Dte()); for (const t in this) Object.prototype.hasOwnProperty.call(this, t) && (this[t] = this[t] || (t === "_environmentTexture" ? null : []), this._moveAssets(this.scene[t], this[t], e[t])); this.environmentTexture = this.scene.environmentTexture, this.removeAllFromScene(); } /** * Adds all meshes in the asset container to a root mesh that can be used to position all the contained meshes. The root mesh is then added to the front of the meshes in the assetContainer. * @returns the root mesh */ createRootMesh() { const e = new ke("assetContainerRootMesh", this.scene); return this.meshes.forEach((t) => { t.parent || e.addChild(t); }), this.meshes.unshift(e), e; } /** * Merge animations (direct and animation groups) from this asset container into a scene * @param scene is the instance of BABYLON.Scene to append to (default: last created scene) * @param animatables set of animatables to retarget to a node from the scene * @param targetConverter defines a function used to convert animation targets from the asset container to the scene (default: search node by name) * @returns an array of the new AnimationGroup added to the scene (empty array if none) */ mergeAnimationsTo(e = gi.LastCreatedScene, t, i = null) { if (!e) return Ce.Error("No scene available to merge animations to"), []; const r = i || ((a) => { let l = null; const o = a.animations.length ? a.animations[0].targetProperty : "", u = a.name.split(".").join("").split("_primitive")[0]; switch (o) { case "position": case "rotationQuaternion": l = e.getTransformNodeByName(a.name) || e.getTransformNodeByName(u); break; case "influence": l = e.getMorphTargetByName(a.name) || e.getMorphTargetByName(u); break; default: l = e.getNodeByName(a.name) || e.getNodeByName(u); } return l; }); this.getNodes().forEach((a) => { const l = r(a); if (l !== null) { for (const o of a.animations) { const u = l.animations.filter((h) => h.targetProperty === o.targetProperty); for (const h of u) { const d = l.animations.indexOf(h, 0); d > -1 && l.animations.splice(d, 1); } } l.animations = l.animations.concat(a.animations); } }); const n = []; return this.animationGroups.slice().forEach((a) => { n.push(a.clone(a.name, r)), a.animatables.forEach((l) => { l.stop(); }); }), t.forEach((a) => { const l = r(a.target); l && (e.beginAnimation(l, a.fromFrame, a.toFrame, a.loopAnimation, a.speedRatio, a.onAnimationEnd ? a.onAnimationEnd : void 0, void 0, !0, void 0, a.onAnimationLoop ? a.onAnimationLoop : void 0), e.stopAnimation(a.target)); }), n; } /** * @since 6.15.0 * This method checks for any node that has no parent * and is not in the rootNodes array, and adds the node * there, if so. */ populateRootNodes() { this.rootNodes.length = 0, this.meshes.forEach((e) => { !e.parent && this.rootNodes.indexOf(e) === -1 && this.rootNodes.push(e); }), this.transformNodes.forEach((e) => { !e.parent && this.rootNodes.indexOf(e) === -1 && this.rootNodes.push(e); }), this.lights.forEach((e) => { !e.parent && this.rootNodes.indexOf(e) === -1 && this.rootNodes.push(e); }), this.cameras.forEach((e) => { !e.parent && this.rootNodes.indexOf(e) === -1 && this.rootNodes.push(e); }); } /** * @since 6.26.0 * Given a root asset, this method will traverse its hierarchy and add it, its children and any materials/skeletons/animation groups to the container. * @param root */ addAllAssetsToContainer(e) { if (!e) return; const t = [], i = /* @__PURE__ */ new Set(); for (t.push(e); t.length > 0; ) { const r = t.pop(); if (r instanceof ke ? (r.geometry && this.geometries.indexOf(r.geometry) === -1 && this.geometries.push(r.geometry), this.meshes.push(r)) : r instanceof xi ? this.transformNodes.push(r) : r instanceof hs ? this.lights.push(r) : r instanceof Ai && this.cameras.push(r), r instanceof xr) { if (r.material && this.materials.indexOf(r.material) === -1) { this.materials.push(r.material); for (const s of r.material.getActiveTextures()) this.textures.indexOf(s) === -1 && this.textures.push(s); } r.skeleton && this.skeletons.indexOf(r.skeleton) === -1 && this.skeletons.push(r.skeleton), r.morphTargetManager && this.morphTargetManagers.indexOf(r.morphTargetManager) === -1 && this.morphTargetManagers.push(r.morphTargetManager); } for (const s of r.getChildren()) i.has(s) || t.push(s); i.add(r); } this.populateRootNodes(); } } class bce { /** * Creates a new analyser * @param scene defines hosting scene */ constructor(e) { if (this.SMOOTHING = 0.75, this.FFT_SIZE = 512, this.BARGRAPHAMPLITUDE = 256, this.DEBUGCANVASPOS = { x: 20, y: 20 }, this.DEBUGCANVASSIZE = { width: 320, height: 200 }, e = e || gi.LastCreatedScene, !!e) { if (this._scene = e, !$e.audioEngine) { Ve.Warn("No audio engine initialized, failed to create an audio analyser"); return; } this._audioEngine = $e.audioEngine, this._audioEngine.canUseWebAudio && this._audioEngine.audioContext && (this._webAudioAnalyser = this._audioEngine.audioContext.createAnalyser(), this._webAudioAnalyser.minDecibels = -140, this._webAudioAnalyser.maxDecibels = 0, this._byteFreqs = new Uint8Array(this._webAudioAnalyser.frequencyBinCount), this._byteTime = new Uint8Array(this._webAudioAnalyser.frequencyBinCount), this._floatFreqs = new Float32Array(this._webAudioAnalyser.frequencyBinCount)); } } /** * Get the number of data values you will have to play with for the visualization * @see https://developer.mozilla.org/en-US/docs/Web/API/AnalyserNode/frequencyBinCount * @returns a number */ getFrequencyBinCount() { return this._audioEngine.canUseWebAudio ? this._webAudioAnalyser.frequencyBinCount : 0; } /** * Gets the current frequency data as a byte array * @see https://developer.mozilla.org/en-US/docs/Web/API/AnalyserNode/getByteFrequencyData * @returns a Uint8Array */ getByteFrequencyData() { return this._audioEngine.canUseWebAudio && (this._webAudioAnalyser.smoothingTimeConstant = this.SMOOTHING, this._webAudioAnalyser.fftSize = this.FFT_SIZE, this._webAudioAnalyser.getByteFrequencyData(this._byteFreqs)), this._byteFreqs; } /** * Gets the current waveform as a byte array * @see https://developer.mozilla.org/en-US/docs/Web/API/AnalyserNode/getByteTimeDomainData * @returns a Uint8Array */ getByteTimeDomainData() { return this._audioEngine.canUseWebAudio && (this._webAudioAnalyser.smoothingTimeConstant = this.SMOOTHING, this._webAudioAnalyser.fftSize = this.FFT_SIZE, this._webAudioAnalyser.getByteTimeDomainData(this._byteTime)), this._byteTime; } /** * Gets the current frequency data as a float array * @see https://developer.mozilla.org/en-US/docs/Web/API/AnalyserNode/getByteFrequencyData * @returns a Float32Array */ getFloatFrequencyData() { return this._audioEngine.canUseWebAudio && (this._webAudioAnalyser.smoothingTimeConstant = this.SMOOTHING, this._webAudioAnalyser.fftSize = this.FFT_SIZE, this._webAudioAnalyser.getFloatFrequencyData(this._floatFreqs)), this._floatFreqs; } /** * Renders the debug canvas */ drawDebugCanvas() { if (this._audioEngine.canUseWebAudio && (this._debugCanvas || (this._debugCanvas = document.createElement("canvas"), this._debugCanvas.width = this.DEBUGCANVASSIZE.width, this._debugCanvas.height = this.DEBUGCANVASSIZE.height, this._debugCanvas.style.position = "absolute", this._debugCanvas.style.top = this.DEBUGCANVASPOS.y + "px", this._debugCanvas.style.left = this.DEBUGCANVASPOS.x + "px", this._debugCanvasContext = this._debugCanvas.getContext("2d"), document.body.appendChild(this._debugCanvas), this._registerFunc = () => { this.drawDebugCanvas(); }, this._scene.registerBeforeRender(this._registerFunc)), this._registerFunc && this._debugCanvasContext)) { const e = this.getByteFrequencyData(); this._debugCanvasContext.fillStyle = "rgb(0, 0, 0)", this._debugCanvasContext.fillRect(0, 0, this.DEBUGCANVASSIZE.width, this.DEBUGCANVASSIZE.height); for (let t = 0; t < this.getFrequencyBinCount(); t++) { const r = e[t] / this.BARGRAPHAMPLITUDE, s = this.DEBUGCANVASSIZE.height * r, n = this.DEBUGCANVASSIZE.height - s - 1, a = this.DEBUGCANVASSIZE.width / this.getFrequencyBinCount(), l = t / this.getFrequencyBinCount() * 360; this._debugCanvasContext.fillStyle = "hsl(" + l + ", 100%, 50%)", this._debugCanvasContext.fillRect(t * a, n, a, s); } } } /** * Stops rendering the debug canvas and removes it */ stopDebugCanvas() { this._debugCanvas && (this._registerFunc && (this._scene.unregisterBeforeRender(this._registerFunc), this._registerFunc = null), document.body.removeChild(this._debugCanvas), this._debugCanvas = null, this._debugCanvasContext = null); } /** * Connects two audio nodes * @param inputAudioNode defines first node to connect * @param outputAudioNode defines second node to connect */ connectAudioNodes(e, t) { this._audioEngine.canUseWebAudio && (e.connect(this._webAudioAnalyser), this._webAudioAnalyser.connect(t)); } /** * Releases all associated resources */ dispose() { this._audioEngine.canUseWebAudio && this._webAudioAnalyser.disconnect(); } } $e.AudioEngineFactory = (c, e, t) => new wte(c, e, t); class wte { /** * Gets the current AudioContext if available. */ get audioContext() { return this._audioContextInitialized || this._initializeAudioContext(), this._audioContext; } /** * Instantiates a new audio engine. * * There should be only one per page as some browsers restrict the number * of audio contexts you can create. * @param hostElement defines the host element where to display the mute icon if necessary * @param audioContext defines the audio context to be used by the audio engine * @param audioDestination defines the audio destination node to be used by audio engine */ constructor(e = null, t = null, i = null) { if (this._audioContext = null, this._audioContextInitialized = !1, this._muteButton = null, this._audioDestination = null, this.canUseWebAudio = !1, this.WarnedWebAudioUnsupported = !1, this.isMP3supported = !1, this.isOGGsupported = !1, this.unlocked = !1, this.useCustomUnlockedButton = !1, this.onAudioUnlockedObservable = new Fe(), this.onAudioLockedObservable = new Fe(), this._tryToRun = !1, this._onResize = () => { this._moveButtonToTopLeft(); }, !cu()) return; typeof window.AudioContext < "u" && (this.canUseWebAudio = !0); const r = document.createElement("audio"); this._hostElement = e, this._audioContext = t, this._audioDestination = i; try { r && r.canPlayType && (r.canPlayType('audio/mpeg; codecs="mp3"').replace(/^no$/, "") || r.canPlayType("audio/mp3").replace(/^no$/, "")) && (this.isMP3supported = !0); } catch { } try { r && r.canPlayType && r.canPlayType('audio/ogg; codecs="vorbis"').replace(/^no$/, "") && (this.isOGGsupported = !0); } catch { } } /** * Flags the audio engine in Locked state. * This happens due to new browser policies preventing audio to autoplay. */ lock() { this._triggerSuspendedState(); } /** * Unlocks the audio engine once a user action has been done on the dom. * This is helpful to resume play once browser policies have been satisfied. */ unlock() { var e, t; if (((e = this._audioContext) === null || e === void 0 ? void 0 : e.state) === "running") { this._hideMuteButton(); return; } this._tryToRun ? (t = this._audioContext) === null || t === void 0 || t.suspend().then(() => { this._tryToRun = !1, this._triggerRunningState(); }) : this._triggerRunningState(); } _resumeAudioContext() { var e; return !((e = this._audioContext) === null || e === void 0) && e.resume ? this._audioContext.resume() : Promise.resolve(); } _initializeAudioContext() { try { this.canUseWebAudio && (this._audioContext || (this._audioContext = new AudioContext()), this.masterGain = this._audioContext.createGain(), this.masterGain.gain.value = 1, this._audioDestination || (this._audioDestination = this._audioContext.destination), this.masterGain.connect(this._audioDestination), this._audioContextInitialized = !0, this._audioContext.state === "running" && this._triggerRunningState()); } catch (e) { this.canUseWebAudio = !1, Ce.Error("Web Audio: " + e.message); } } _triggerRunningState() { this._tryToRun || (this._tryToRun = !0, this._resumeAudioContext().then(() => { this._tryToRun = !1, this._muteButton && this._hideMuteButton(), this.unlocked = !0, this.onAudioUnlockedObservable.notifyObservers(this); }).catch(() => { this._tryToRun = !1, this.unlocked = !1; })); } _triggerSuspendedState() { this.unlocked = !1, this.onAudioLockedObservable.notifyObservers(this), this._displayMuteButton(); } _displayMuteButton() { if (this.useCustomUnlockedButton || this._muteButton) return; this._muteButton = document.createElement("BUTTON"), this._muteButton.className = "babylonUnmuteIcon", this._muteButton.id = "babylonUnmuteIconBtn", this._muteButton.title = "Unmute"; const t = ".babylonUnmuteIcon { position: absolute; left: 20px; top: 20px; height: 40px; width: 60px; background-color: rgba(51,51,51,0.7); background-image: url(" + (window.SVGSVGElement ? "data:image/svg+xml;charset=UTF-8,%3Csvg%20version%3D%221.1%22%20xmlns%3D%22http%3A%2F%2Fwww.w3.org%2F2000%2Fsvg%22%20width%3D%2239%22%20height%3D%2232%22%20viewBox%3D%220%200%2039%2032%22%3E%3Cpath%20fill%3D%22white%22%20d%3D%22M9.625%2018.938l-0.031%200.016h-4.953q-0.016%200-0.031-0.016v-12.453q0-0.016%200.031-0.016h4.953q0.031%200%200.031%200.016v12.453zM12.125%207.688l8.719-8.703v27.453l-8.719-8.719-0.016-0.047v-9.938zM23.359%207.875l1.406-1.406%204.219%204.203%204.203-4.203%201.422%201.406-4.219%204.219%204.219%204.203-1.484%201.359-4.141-4.156-4.219%204.219-1.406-1.422%204.219-4.203z%22%3E%3C%2Fpath%3E%3C%2Fsvg%3E" : "https://cdn.babylonjs.com/Assets/audio.png") + "); background-size: 80%; background-repeat:no-repeat; background-position: center; background-position-y: 4px; border: none; outline: none; transition: transform 0.125s ease-out; cursor: pointer; z-index: 9999; } .babylonUnmuteIcon:hover { transform: scale(1.05) } .babylonUnmuteIcon:active { background-color: rgba(51,51,51,1) }", i = document.createElement("style"); i.appendChild(document.createTextNode(t)), document.getElementsByTagName("head")[0].appendChild(i), document.body.appendChild(this._muteButton), this._moveButtonToTopLeft(), this._muteButton.addEventListener("touchend", () => { this._triggerRunningState(); }, !0), this._muteButton.addEventListener("click", () => { this.unlock(); }, !0), window.addEventListener("resize", this._onResize); } _moveButtonToTopLeft() { this._hostElement && this._muteButton && (this._muteButton.style.top = this._hostElement.offsetTop + 20 + "px", this._muteButton.style.left = this._hostElement.offsetLeft + 20 + "px"); } _hideMuteButton() { this._muteButton && (document.body.removeChild(this._muteButton), this._muteButton = null); } /** * Destroy and release the resources associated with the audio context. */ dispose() { this.canUseWebAudio && this._audioContextInitialized && (this._connectedAnalyser && this._audioContext && (this._connectedAnalyser.stopDebugCanvas(), this._connectedAnalyser.dispose(), this.masterGain.disconnect(), this.masterGain.connect(this._audioContext.destination), this._connectedAnalyser = null), this.masterGain.gain.value = 1), this.WarnedWebAudioUnsupported = !1, this._hideMuteButton(), window.removeEventListener("resize", this._onResize), this.onAudioUnlockedObservable.clear(), this.onAudioLockedObservable.clear(); } /** * Gets the global volume sets on the master gain. * @returns the global volume if set or -1 otherwise */ getGlobalVolume() { return this.canUseWebAudio && this._audioContextInitialized ? this.masterGain.gain.value : -1; } /** * Sets the global volume of your experience (sets on the master gain). * @param newVolume Defines the new global volume of the application */ setGlobalVolume(e) { this.canUseWebAudio && this._audioContextInitialized && (this.masterGain.gain.value = e); } /** * Connect the audio engine to an audio analyser allowing some amazing * synchronization between the sounds/music and your visualization (VuMeter for instance). * @see https://doc.babylonjs.com/features/featuresDeepDive/audio/playingSoundsMusic#using-the-analyser * @param analyser The analyser to connect to the engine */ connectToAnalyser(e) { this._connectedAnalyser && this._connectedAnalyser.stopDebugCanvas(), this.canUseWebAudio && this._audioContextInitialized && this._audioContext && (this._connectedAnalyser = e, this.masterGain.disconnect(), this._connectedAnalyser.connectAudioNodes(this.masterGain, this._audioContext.destination)); } } class I4 { /** * Does the sound loop after it finishes playing once. */ get loop() { return this._loop; } set loop(e) { e !== this._loop && (this._loop = e, this.updateOptions({ loop: e })); } /** * Gets the current time for the sound. */ get currentTime() { var e; if (this._htmlAudioElement) return this._htmlAudioElement.currentTime; if (!((e = $e.audioEngine) === null || e === void 0) && e.audioContext && (this.isPlaying || this.isPaused)) { const t = this.isPaused ? 0 : $e.audioEngine.audioContext.currentTime - this._startTime; return this._currentTime + t; } return 0; } /** * Does this sound enables spatial sound. * @see https://doc.babylonjs.com/features/featuresDeepDive/audio/playingSoundsMusic#creating-a-spatial-3d-sound */ get spatialSound() { return this._spatialSound; } /** * Does this sound enables spatial sound. * @see https://doc.babylonjs.com/features/featuresDeepDive/audio/playingSoundsMusic#creating-a-spatial-3d-sound */ set spatialSound(e) { if (e == this._spatialSound) return; const t = this.isPlaying; this.pause(), e ? (this._spatialSound = e, this._updateSpatialParameters()) : this._disableSpatialSound(), t && this.play(); } /** * Create a sound and attach it to a scene * @param name Name of your sound * @param urlOrArrayBuffer Url to the sound to load async or ArrayBuffer, it also works with MediaStreams and AudioBuffers * @param scene defines the scene the sound belongs to * @param readyToPlayCallback Provide a callback function if you'd like to load your code once the sound is ready to be played * @param options Objects to provide with the current available options: autoplay, loop, volume, spatialSound, maxDistance, rolloffFactor, refDistance, distanceModel, panningModel, streaming */ constructor(e, t, i, r = null, s) { var n, a, l, o, u; if (this.autoplay = !1, this._loop = !1, this.useCustomAttenuation = !1, this.isPlaying = !1, this.isPaused = !1, this.refDistance = 1, this.rolloffFactor = 1, this.maxDistance = 100, this.distanceModel = "linear", this.metadata = null, this.onEndedObservable = new Fe(), this._spatialSound = !1, this._panningModel = "equalpower", this._playbackRate = 1, this._streaming = !1, this._startTime = 0, this._currentTime = 0, this._position = D.Zero(), this._localDirection = new D(1, 0, 0), this._volume = 1, this._isReadyToPlay = !1, this._isDirectional = !1, this._coneInnerAngle = 360, this._coneOuterAngle = 360, this._coneOuterGain = 0, this._isOutputConnected = !1, this._urlType = "Unknown", this.name = e, i = i || gi.LastCreatedScene, !!i) if (this._scene = i, I4._SceneComponentInitialization(i), this._readyToPlayCallback = r, this._customAttenuationFunction = (h, d, f, p, m) => d < f ? h * (1 - d / f) : 0, s && (this.autoplay = s.autoplay || !1, this._loop = s.loop || !1, s.volume !== void 0 && (this._volume = s.volume), this._spatialSound = (n = s.spatialSound) !== null && n !== void 0 ? n : !1, this.maxDistance = (a = s.maxDistance) !== null && a !== void 0 ? a : 100, this.useCustomAttenuation = (l = s.useCustomAttenuation) !== null && l !== void 0 ? l : !1, this.rolloffFactor = s.rolloffFactor || 1, this.refDistance = s.refDistance || 1, this.distanceModel = s.distanceModel || "linear", this._playbackRate = s.playbackRate || 1, this._streaming = (o = s.streaming) !== null && o !== void 0 ? o : !1, this._length = s.length, this._offset = s.offset), !((u = $e.audioEngine) === null || u === void 0) && u.canUseWebAudio && $e.audioEngine.audioContext) { this._soundGain = $e.audioEngine.audioContext.createGain(), this._soundGain.gain.value = this._volume, this._inputAudioNode = this._soundGain, this._outputAudioNode = this._soundGain, this._spatialSound && this._createSpatialParameters(), this._scene.mainSoundTrack.addSound(this); let h = !0; if (t) try { typeof t == "string" ? (this._urlType = "String", this._url = t) : t instanceof ArrayBuffer ? this._urlType = "ArrayBuffer" : t instanceof HTMLMediaElement ? this._urlType = "MediaElement" : t instanceof MediaStream ? this._urlType = "MediaStream" : t instanceof AudioBuffer ? this._urlType = "AudioBuffer" : Array.isArray(t) && (this._urlType = "Array"); let d = [], f = !1; switch (this._urlType) { case "MediaElement": this._streaming = !0, this._isReadyToPlay = !0, this._streamingSource = $e.audioEngine.audioContext.createMediaElementSource(t), this.autoplay && this.play(0, this._offset, this._length), this._readyToPlayCallback && this._readyToPlayCallback(); break; case "MediaStream": this._streaming = !0, this._isReadyToPlay = !0, this._streamingSource = $e.audioEngine.audioContext.createMediaStreamSource(t), this.autoplay && this.play(0, this._offset, this._length), this._readyToPlayCallback && this._readyToPlayCallback(); break; case "ArrayBuffer": t.byteLength > 0 && (f = !0, this._soundLoaded(t)); break; case "AudioBuffer": this._audioBufferLoaded(t); break; case "String": d.push(t); case "Array": d.length === 0 && (d = t); for (let p = 0; p < d.length; p++) { const m = d[p]; if (f = s && s.skipCodecCheck || m.indexOf(".mp3", m.length - 4) !== -1 && $e.audioEngine.isMP3supported || m.indexOf(".ogg", m.length - 4) !== -1 && $e.audioEngine.isOGGsupported || m.indexOf(".wav", m.length - 4) !== -1 || m.indexOf(".m4a", m.length - 4) !== -1 || m.indexOf(".mp4", m.length - 4) !== -1 || m.indexOf("blob:") !== -1, f) { this._streaming ? (this._htmlAudioElement = new Audio(m), this._htmlAudioElement.controls = !1, this._htmlAudioElement.loop = this.loop, Ve.SetCorsBehavior(m, this._htmlAudioElement), this._htmlAudioElement.preload = "auto", this._htmlAudioElement.addEventListener("canplaythrough", () => { this._isReadyToPlay = !0, this.autoplay && this.play(0, this._offset, this._length), this._readyToPlayCallback && this._readyToPlayCallback(); }), document.body.appendChild(this._htmlAudioElement), this._htmlAudioElement.load()) : this._scene._loadFile(m, (_) => { this._soundLoaded(_); }, void 0, !0, !0, (_) => { _ && Ce.Error("XHR " + _.status + " error on: " + m + "."), Ce.Error("Sound creation aborted."), this._scene.mainSoundTrack.removeSound(this); }); break; } } break; default: h = !1; break; } h ? f || (this._isReadyToPlay = !0, this._readyToPlayCallback && setTimeout(() => { this._readyToPlayCallback && this._readyToPlayCallback(); }, 1e3)) : Ce.Error("Parameter must be a URL to the sound, an Array of URLs (.mp3 & .ogg) or an ArrayBuffer of the sound."); } catch { Ce.Error("Unexpected error. Sound creation aborted."), this._scene.mainSoundTrack.removeSound(this); } } else this._scene.mainSoundTrack.addSound(this), $e.audioEngine && !$e.audioEngine.WarnedWebAudioUnsupported && (Ce.Error("Web Audio is not supported by your browser."), $e.audioEngine.WarnedWebAudioUnsupported = !0), this._readyToPlayCallback && setTimeout(() => { this._readyToPlayCallback && this._readyToPlayCallback(); }, 1e3); } /** * Release the sound and its associated resources */ dispose() { var e; !((e = $e.audioEngine) === null || e === void 0) && e.canUseWebAudio && (this.isPlaying && this.stop(), this._isReadyToPlay = !1, this.soundTrackId === -1 ? this._scene.mainSoundTrack.removeSound(this) : this._scene.soundTracks && this._scene.soundTracks[this.soundTrackId].removeSound(this), this._soundGain && (this._soundGain.disconnect(), this._soundGain = null), this._soundPanner && (this._soundPanner.disconnect(), this._soundPanner = null), this._soundSource && (this._soundSource.disconnect(), this._soundSource = null), this._audioBuffer = null, this._htmlAudioElement && (this._htmlAudioElement.pause(), this._htmlAudioElement.src = "", document.body.removeChild(this._htmlAudioElement)), this._streamingSource && this._streamingSource.disconnect(), this._connectedTransformNode && this._registerFunc && (this._connectedTransformNode.unregisterAfterWorldMatrixUpdate(this._registerFunc), this._connectedTransformNode = null), this._clearTimeoutsAndObservers()); } /** * Gets if the sounds is ready to be played or not. * @returns true if ready, otherwise false */ isReady() { return this._isReadyToPlay; } /** * Get the current class name. * @returns current class name */ getClassName() { return "Sound"; } _audioBufferLoaded(e) { var t; !((t = $e.audioEngine) === null || t === void 0) && t.audioContext && (this._audioBuffer = e, this._isReadyToPlay = !0, this.autoplay && this.play(0, this._offset, this._length), this._readyToPlayCallback && this._readyToPlayCallback()); } _soundLoaded(e) { var t; !((t = $e.audioEngine) === null || t === void 0) && t.audioContext && $e.audioEngine.audioContext.decodeAudioData(e, (i) => { this._audioBufferLoaded(i); }, (i) => { Ce.Error("Error while decoding audio data for: " + this.name + " / Error: " + i); }); } /** * Sets the data of the sound from an audiobuffer * @param audioBuffer The audioBuffer containing the data */ setAudioBuffer(e) { var t; !((t = $e.audioEngine) === null || t === void 0) && t.canUseWebAudio && (this._audioBuffer = e, this._isReadyToPlay = !0); } /** * Updates the current sounds options such as maxdistance, loop... * @param options A JSON object containing values named as the object properties */ updateOptions(e) { var t, i, r, s, n, a, l, o, u, h, d; e && (this.loop = (t = e.loop) !== null && t !== void 0 ? t : this.loop, this.maxDistance = (i = e.maxDistance) !== null && i !== void 0 ? i : this.maxDistance, this.useCustomAttenuation = (r = e.useCustomAttenuation) !== null && r !== void 0 ? r : this.useCustomAttenuation, this.rolloffFactor = (s = e.rolloffFactor) !== null && s !== void 0 ? s : this.rolloffFactor, this.refDistance = (n = e.refDistance) !== null && n !== void 0 ? n : this.refDistance, this.distanceModel = (a = e.distanceModel) !== null && a !== void 0 ? a : this.distanceModel, this._playbackRate = (l = e.playbackRate) !== null && l !== void 0 ? l : this._playbackRate, this._length = (o = e.length) !== null && o !== void 0 ? o : void 0, this.spatialSound = (u = e.spatialSound) !== null && u !== void 0 ? u : this._spatialSound, this._setOffset((h = e.offset) !== null && h !== void 0 ? h : void 0), this.setVolume((d = e.volume) !== null && d !== void 0 ? d : this._volume), this._updateSpatialParameters(), this.isPlaying && (this._streaming && this._htmlAudioElement ? (this._htmlAudioElement.playbackRate = this._playbackRate, this._htmlAudioElement.loop !== this.loop && (this._htmlAudioElement.loop = this.loop)) : this._soundSource && (this._soundSource.playbackRate.value = this._playbackRate, this._soundSource.loop !== this.loop && (this._soundSource.loop = this.loop), this._offset !== void 0 && this._soundSource.loopStart !== this._offset && (this._soundSource.loopStart = this._offset), this._length !== void 0 && this._length !== this._soundSource.loopEnd && (this._soundSource.loopEnd = (this._offset | 0) + this._length)))); } _createSpatialParameters() { var e, t; !((e = $e.audioEngine) === null || e === void 0) && e.canUseWebAudio && $e.audioEngine.audioContext && (this._scene.headphone && (this._panningModel = "HRTF"), this._soundPanner = (t = this._soundPanner) !== null && t !== void 0 ? t : $e.audioEngine.audioContext.createPanner(), this._soundPanner && this._outputAudioNode && (this._updateSpatialParameters(), this._soundPanner.connect(this._outputAudioNode), this._inputAudioNode = this._soundPanner)); } _disableSpatialSound() { var e; this._spatialSound && (this._inputAudioNode = this._soundGain, (e = this._soundPanner) === null || e === void 0 || e.disconnect(), this._soundPanner = null, this._spatialSound = !1); } _updateSpatialParameters() { this._spatialSound && (this._soundPanner ? this.useCustomAttenuation ? (this._soundPanner.distanceModel = "linear", this._soundPanner.maxDistance = Number.MAX_VALUE, this._soundPanner.refDistance = 1, this._soundPanner.rolloffFactor = 1, this._soundPanner.panningModel = this._panningModel) : (this._soundPanner.distanceModel = this.distanceModel, this._soundPanner.maxDistance = this.maxDistance, this._soundPanner.refDistance = this.refDistance, this._soundPanner.rolloffFactor = this.rolloffFactor, this._soundPanner.panningModel = this._panningModel) : this._createSpatialParameters()); } /** * Switch the panning model to HRTF: * Renders a stereo output of higher quality than equalpower — it uses a convolution with measured impulse responses from human subjects. * @see https://doc.babylonjs.com/features/featuresDeepDive/audio/playingSoundsMusic#creating-a-spatial-3d-sound */ switchPanningModelToHRTF() { this._panningModel = "HRTF", this._switchPanningModel(); } /** * Switch the panning model to Equal Power: * Represents the equal-power panning algorithm, generally regarded as simple and efficient. equalpower is the default value. * @see https://doc.babylonjs.com/features/featuresDeepDive/audio/playingSoundsMusic#creating-a-spatial-3d-sound */ switchPanningModelToEqualPower() { this._panningModel = "equalpower", this._switchPanningModel(); } _switchPanningModel() { var e; !((e = $e.audioEngine) === null || e === void 0) && e.canUseWebAudio && this._spatialSound && this._soundPanner && (this._soundPanner.panningModel = this._panningModel); } /** * Connect this sound to a sound track audio node like gain... * @param soundTrackAudioNode the sound track audio node to connect to */ connectToSoundTrackAudioNode(e) { var t; !((t = $e.audioEngine) === null || t === void 0) && t.canUseWebAudio && this._outputAudioNode && (this._isOutputConnected && this._outputAudioNode.disconnect(), this._outputAudioNode.connect(e), this._isOutputConnected = !0); } /** * Transform this sound into a directional source * @param coneInnerAngle Size of the inner cone in degree * @param coneOuterAngle Size of the outer cone in degree * @param coneOuterGain Volume of the sound outside the outer cone (between 0.0 and 1.0) */ setDirectionalCone(e, t, i) { if (t < e) { Ce.Error("setDirectionalCone(): outer angle of the cone must be superior or equal to the inner angle."); return; } this._coneInnerAngle = e, this._coneOuterAngle = t, this._coneOuterGain = i, this._isDirectional = !0, this.isPlaying && this.loop && (this.stop(), this.play(0, this._offset, this._length)); } /** * Gets or sets the inner angle for the directional cone. */ get directionalConeInnerAngle() { return this._coneInnerAngle; } /** * Gets or sets the inner angle for the directional cone. */ set directionalConeInnerAngle(e) { var t; if (e != this._coneInnerAngle) { if (this._coneOuterAngle < e) { Ce.Error("directionalConeInnerAngle: outer angle of the cone must be superior or equal to the inner angle."); return; } this._coneInnerAngle = e, !((t = $e.audioEngine) === null || t === void 0) && t.canUseWebAudio && this._spatialSound && this._soundPanner && (this._soundPanner.coneInnerAngle = this._coneInnerAngle); } } /** * Gets or sets the outer angle for the directional cone. */ get directionalConeOuterAngle() { return this._coneOuterAngle; } /** * Gets or sets the outer angle for the directional cone. */ set directionalConeOuterAngle(e) { var t; if (e != this._coneOuterAngle) { if (e < this._coneInnerAngle) { Ce.Error("directionalConeOuterAngle: outer angle of the cone must be superior or equal to the inner angle."); return; } this._coneOuterAngle = e, !((t = $e.audioEngine) === null || t === void 0) && t.canUseWebAudio && this._spatialSound && this._soundPanner && (this._soundPanner.coneOuterAngle = this._coneOuterAngle); } } /** * Sets the position of the emitter if spatial sound is enabled * @param newPosition Defines the new position */ setPosition(e) { var t; e.equals(this._position) || (this._position.copyFrom(e), !((t = $e.audioEngine) === null || t === void 0) && t.canUseWebAudio && this._spatialSound && this._soundPanner && !isNaN(this._position.x) && !isNaN(this._position.y) && !isNaN(this._position.z) && (this._soundPanner.positionX.value = this._position.x, this._soundPanner.positionY.value = this._position.y, this._soundPanner.positionZ.value = this._position.z)); } /** * Sets the local direction of the emitter if spatial sound is enabled * @param newLocalDirection Defines the new local direction */ setLocalDirectionToMesh(e) { var t; this._localDirection = e, !((t = $e.audioEngine) === null || t === void 0) && t.canUseWebAudio && this._connectedTransformNode && this.isPlaying && this._updateDirection(); } _updateDirection() { if (!this._connectedTransformNode || !this._soundPanner) return; const e = this._connectedTransformNode.getWorldMatrix(), t = D.TransformNormal(this._localDirection, e); t.normalize(), this._soundPanner.orientationX.value = t.x, this._soundPanner.orientationY.value = t.y, this._soundPanner.orientationZ.value = t.z; } /** @internal */ updateDistanceFromListener() { var e; if (!((e = $e.audioEngine) === null || e === void 0) && e.canUseWebAudio && this._connectedTransformNode && this.useCustomAttenuation && this._soundGain && this._scene.activeCamera) { const t = this._scene.audioListenerPositionProvider ? this._connectedTransformNode.position.subtract(this._scene.audioListenerPositionProvider()).length() : this._connectedTransformNode.getDistanceToCamera(this._scene.activeCamera); this._soundGain.gain.value = this._customAttenuationFunction(this._volume, t, this.maxDistance, this.refDistance, this.rolloffFactor); } } /** * Sets a new custom attenuation function for the sound. * @param callback Defines the function used for the attenuation * @see https://doc.babylonjs.com/features/featuresDeepDive/audio/playingSoundsMusic#creating-your-own-custom-attenuation-function */ setAttenuationFunction(e) { this._customAttenuationFunction = e; } /** * Play the sound * @param time (optional) Start the sound after X seconds. Start immediately (0) by default. * @param offset (optional) Start the sound at a specific time in seconds * @param length (optional) Sound duration (in seconds) */ play(e, t, i) { var r, s, n, a; if (this._isReadyToPlay && this._scene.audioEnabled && (!((r = $e.audioEngine) === null || r === void 0) && r.audioContext)) try { this._clearTimeoutsAndObservers(); let l = e ? ((s = $e.audioEngine) === null || s === void 0 ? void 0 : s.audioContext.currentTime) + e : (n = $e.audioEngine) === null || n === void 0 ? void 0 : n.audioContext.currentTime; if ((!this._soundSource || !this._streamingSource) && this._spatialSound && this._soundPanner && (!isNaN(this._position.x) && !isNaN(this._position.y) && !isNaN(this._position.z) && (this._soundPanner.positionX.value = this._position.x, this._soundPanner.positionY.value = this._position.y, this._soundPanner.positionZ.value = this._position.z), this._isDirectional && (this._soundPanner.coneInnerAngle = this._coneInnerAngle, this._soundPanner.coneOuterAngle = this._coneOuterAngle, this._soundPanner.coneOuterGain = this._coneOuterGain, this._connectedTransformNode ? this._updateDirection() : this._soundPanner.setOrientation(this._localDirection.x, this._localDirection.y, this._localDirection.z))), this._streaming) { if (this._streamingSource || (this._streamingSource = $e.audioEngine.audioContext.createMediaElementSource(this._htmlAudioElement), this._htmlAudioElement.onended = () => { this._onended(); }, this._htmlAudioElement.playbackRate = this._playbackRate), this._streamingSource.disconnect(), this._inputAudioNode && this._streamingSource.connect(this._inputAudioNode), this._htmlAudioElement) { const o = () => { var u, h; if (!((u = $e.audioEngine) === null || u === void 0) && u.unlocked) { const d = this._htmlAudioElement.play(); d !== void 0 && d.catch(() => { var f, p; (f = $e.audioEngine) === null || f === void 0 || f.lock(), (this.loop || this.autoplay) && (this._audioUnlockedObserver = (p = $e.audioEngine) === null || p === void 0 ? void 0 : p.onAudioUnlockedObservable.addOnce(() => { o(); })); }); } else (this.loop || this.autoplay) && (this._audioUnlockedObserver = (h = $e.audioEngine) === null || h === void 0 ? void 0 : h.onAudioUnlockedObservable.addOnce(() => { o(); })); }; o(); } } else { const o = () => { var u, h, d, f; if (!((u = $e.audioEngine) === null || u === void 0) && u.audioContext) { if (i = i || this._length, t !== void 0 && this._setOffset(t), this._soundSource) { const p = this._soundSource; p.onended = () => { p.disconnect(); }; } if (this._soundSource = (h = $e.audioEngine) === null || h === void 0 ? void 0 : h.audioContext.createBufferSource(), this._soundSource && this._inputAudioNode) { this._soundSource.buffer = this._audioBuffer, this._soundSource.connect(this._inputAudioNode), this._soundSource.loop = this.loop, t !== void 0 && (this._soundSource.loopStart = t), i !== void 0 && (this._soundSource.loopEnd = (t | 0) + i), this._soundSource.playbackRate.value = this._playbackRate, this._soundSource.onended = () => { this._onended(); }, l = e ? ((d = $e.audioEngine) === null || d === void 0 ? void 0 : d.audioContext.currentTime) + e : $e.audioEngine.audioContext.currentTime; const p = ((this.isPaused ? this.currentTime : 0) + ((f = this._offset) !== null && f !== void 0 ? f : 0)) % this._soundSource.buffer.duration; this._soundSource.start(l, p, this.loop ? void 0 : i); } } }; ((a = $e.audioEngine) === null || a === void 0 ? void 0 : a.audioContext.state) === "suspended" ? this._tryToPlayTimeout = setTimeout(() => { var u; ((u = $e.audioEngine) === null || u === void 0 ? void 0 : u.audioContext.state) === "suspended" ? ($e.audioEngine.lock(), (this.loop || this.autoplay) && (this._audioUnlockedObserver = $e.audioEngine.onAudioUnlockedObservable.addOnce(() => { o(); }))) : o(); }, 500) : o(); } this._startTime = l, this.isPlaying = !0, this.isPaused = !1; } catch (l) { Ce.Error("Error while trying to play audio: " + this.name + ", " + l.message); } } _onended() { this.isPlaying = !1, this._startTime = 0, this._currentTime = 0, this.onended && this.onended(), this.onEndedObservable.notifyObservers(this); } /** * Stop the sound * @param time (optional) Stop the sound after X seconds. Stop immediately (0) by default. */ stop(e) { var t; if (this.isPlaying) if (this._clearTimeoutsAndObservers(), this._streaming) this._htmlAudioElement ? (this._htmlAudioElement.pause(), this._htmlAudioElement.currentTime > 0 && (this._htmlAudioElement.currentTime = 0)) : this._streamingSource.disconnect(), this.isPlaying = !1; else if (!((t = $e.audioEngine) === null || t === void 0) && t.audioContext && this._soundSource) { const i = e ? $e.audioEngine.audioContext.currentTime + e : void 0; this._soundSource.onended = () => { this.isPlaying = !1, this.isPaused = !1, this._startTime = 0, this._currentTime = 0, this._soundSource && (this._soundSource.onended = () => { }), this._onended(); }, this._soundSource.stop(i); } else this.isPlaying = !1; else this.isPaused && (this.isPaused = !1, this._startTime = 0, this._currentTime = 0); } /** * Put the sound in pause */ pause() { var e; this.isPlaying && (this._clearTimeoutsAndObservers(), this._streaming ? (this._htmlAudioElement ? this._htmlAudioElement.pause() : this._streamingSource.disconnect(), this.isPlaying = !1, this.isPaused = !0) : !((e = $e.audioEngine) === null || e === void 0) && e.audioContext && this._soundSource && (this._soundSource.onended = () => { }, this._soundSource.stop(), this.isPlaying = !1, this.isPaused = !0, this._currentTime += $e.audioEngine.audioContext.currentTime - this._startTime)); } /** * Sets a dedicated volume for this sounds * @param newVolume Define the new volume of the sound * @param time Define time for gradual change to new volume */ setVolume(e, t) { var i; !((i = $e.audioEngine) === null || i === void 0) && i.canUseWebAudio && this._soundGain && (t && $e.audioEngine.audioContext ? (this._soundGain.gain.cancelScheduledValues($e.audioEngine.audioContext.currentTime), this._soundGain.gain.setValueAtTime(this._soundGain.gain.value, $e.audioEngine.audioContext.currentTime), this._soundGain.gain.linearRampToValueAtTime(e, $e.audioEngine.audioContext.currentTime + t)) : this._soundGain.gain.value = e), this._volume = e; } /** * Set the sound play back rate * @param newPlaybackRate Define the playback rate the sound should be played at */ setPlaybackRate(e) { this._playbackRate = e, this.isPlaying && (this._streaming && this._htmlAudioElement ? this._htmlAudioElement.playbackRate = this._playbackRate : this._soundSource && (this._soundSource.playbackRate.value = this._playbackRate)); } /** * Gets the sound play back rate. * @returns the play back rate of the sound */ getPlaybackRate() { return this._playbackRate; } /** * Gets the volume of the sound. * @returns the volume of the sound */ getVolume() { return this._volume; } /** * Attach the sound to a dedicated mesh * @param transformNode The transform node to connect the sound with * @see https://doc.babylonjs.com/features/featuresDeepDive/audio/playingSoundsMusic#attaching-a-sound-to-a-mesh */ attachToMesh(e) { this._connectedTransformNode && this._registerFunc && (this._connectedTransformNode.unregisterAfterWorldMatrixUpdate(this._registerFunc), this._registerFunc = null), this._connectedTransformNode = e, this._spatialSound || (this._spatialSound = !0, this._createSpatialParameters(), this.isPlaying && this.loop && (this.stop(), this.play(0, this._offset, this._length))), this._onRegisterAfterWorldMatrixUpdate(this._connectedTransformNode), this._registerFunc = (t) => this._onRegisterAfterWorldMatrixUpdate(t), this._connectedTransformNode.registerAfterWorldMatrixUpdate(this._registerFunc); } /** * Detach the sound from the previously attached mesh * @see https://doc.babylonjs.com/features/featuresDeepDive/audio/playingSoundsMusic#attaching-a-sound-to-a-mesh */ detachFromMesh() { this._connectedTransformNode && this._registerFunc && (this._connectedTransformNode.unregisterAfterWorldMatrixUpdate(this._registerFunc), this._registerFunc = null, this._connectedTransformNode = null); } _onRegisterAfterWorldMatrixUpdate(e) { var t; if (!e.getBoundingInfo) this.setPosition(e.absolutePosition); else { const r = e.getBoundingInfo(); this.setPosition(r.boundingSphere.centerWorld); } !((t = $e.audioEngine) === null || t === void 0) && t.canUseWebAudio && this._isDirectional && this.isPlaying && this._updateDirection(); } /** * Clone the current sound in the scene. * @returns the new sound clone */ clone() { if (this._streaming) return null; { const e = () => { this._isReadyToPlay ? (i._audioBuffer = this.getAudioBuffer(), i._isReadyToPlay = !0, i.autoplay && i.play(0, this._offset, this._length)) : setTimeout(e, 300); }, t = { autoplay: this.autoplay, loop: this.loop, volume: this._volume, spatialSound: this._spatialSound, maxDistance: this.maxDistance, useCustomAttenuation: this.useCustomAttenuation, rolloffFactor: this.rolloffFactor, refDistance: this.refDistance, distanceModel: this.distanceModel }, i = new I4(this.name + "_cloned", new ArrayBuffer(0), this._scene, null, t); return this.useCustomAttenuation && i.setAttenuationFunction(this._customAttenuationFunction), i.setPosition(this._position), i.setPlaybackRate(this._playbackRate), e(), i; } } /** * Gets the current underlying audio buffer containing the data * @returns the audio buffer */ getAudioBuffer() { return this._audioBuffer; } /** * Gets the WebAudio AudioBufferSourceNode, lets you keep track of and stop instances of this Sound. * @returns the source node */ getSoundSource() { return this._soundSource; } /** * Gets the WebAudio GainNode, gives you precise control over the gain of instances of this Sound. * @returns the gain node */ getSoundGain() { return this._soundGain; } /** * Serializes the Sound in a JSON representation * @returns the JSON representation of the sound */ serialize() { const e = { name: this.name, url: this._url, autoplay: this.autoplay, loop: this.loop, volume: this._volume, spatialSound: this._spatialSound, maxDistance: this.maxDistance, rolloffFactor: this.rolloffFactor, refDistance: this.refDistance, distanceModel: this.distanceModel, playbackRate: this._playbackRate, panningModel: this._panningModel, soundTrackId: this.soundTrackId, metadata: this.metadata }; return this._spatialSound && (this._connectedTransformNode && (e.connectedMeshId = this._connectedTransformNode.id), e.position = this._position.asArray(), e.refDistance = this.refDistance, e.distanceModel = this.distanceModel, e.isDirectional = this._isDirectional, e.localDirectionToMesh = this._localDirection.asArray(), e.coneInnerAngle = this._coneInnerAngle, e.coneOuterAngle = this._coneOuterAngle, e.coneOuterGain = this._coneOuterGain), e; } /** * Parse a JSON representation of a sound to instantiate in a given scene * @param parsedSound Define the JSON representation of the sound (usually coming from the serialize method) * @param scene Define the scene the new parsed sound should be created in * @param rootUrl Define the rooturl of the load in case we need to fetch relative dependencies * @param sourceSound Define a sound place holder if do not need to instantiate a new one * @returns the newly parsed sound */ static Parse(e, t, i, r) { const s = e.name; let n; e.url ? n = i + e.url : n = i + s; const a = { autoplay: e.autoplay, loop: e.loop, volume: e.volume, spatialSound: e.spatialSound, maxDistance: e.maxDistance, rolloffFactor: e.rolloffFactor, refDistance: e.refDistance, distanceModel: e.distanceModel, playbackRate: e.playbackRate }; let l; if (!r) l = new I4(s, n, t, () => { t.removePendingData(l); }, a), t.addPendingData(l); else { const o = () => { r._isReadyToPlay ? (l._audioBuffer = r.getAudioBuffer(), l._isReadyToPlay = !0, l.autoplay && l.play(0, l._offset, l._length)) : setTimeout(o, 300); }; l = new I4(s, new ArrayBuffer(0), t, null, a), o(); } if (e.position) { const o = D.FromArray(e.position); l.setPosition(o); } if (e.isDirectional && (l.setDirectionalCone(e.coneInnerAngle || 360, e.coneOuterAngle || 360, e.coneOuterGain || 0), e.localDirectionToMesh)) { const o = D.FromArray(e.localDirectionToMesh); l.setLocalDirectionToMesh(o); } if (e.connectedMeshId) { const o = t.getMeshById(e.connectedMeshId); o && l.attachToMesh(o); } return e.metadata && (l.metadata = e.metadata), l; } _setOffset(e) { this._offset !== e && (this.isPaused && (this.stop(), this.isPaused = !1), this._offset = e); } _clearTimeoutsAndObservers() { var e; this._tryToPlayTimeout && (clearTimeout(this._tryToPlayTimeout), this._tryToPlayTimeout = null), this._audioUnlockedObserver && ((e = $e.audioEngine) === null || e === void 0 || e.onAudioUnlockedObservable.remove(this._audioUnlockedObserver), this._audioUnlockedObserver = null); } } I4._SceneComponentInitialization = (c) => { throw yr("AudioSceneComponent"); }; class Lte { /** * Creates a new sound track. * @see https://doc.babylonjs.com/features/featuresDeepDive/audio/playingSoundsMusic#using-sound-tracks * @param scene Define the scene the sound track belongs to * @param options */ constructor(e, t = {}) { this.id = -1, this._isInitialized = !1, e = e || gi.LastCreatedScene, e && (this._scene = e, this.soundCollection = [], this._options = t, !this._options.mainTrack && this._scene.soundTracks && (this._scene.soundTracks.push(this), this.id = this._scene.soundTracks.length - 1)); } _initializeSoundTrackAudioGraph() { var e; !((e = $e.audioEngine) === null || e === void 0) && e.canUseWebAudio && $e.audioEngine.audioContext && (this._outputAudioNode = $e.audioEngine.audioContext.createGain(), this._outputAudioNode.connect($e.audioEngine.masterGain), this._options && this._options.volume && (this._outputAudioNode.gain.value = this._options.volume), this._isInitialized = !0); } /** * Release the sound track and its associated resources */ dispose() { if ($e.audioEngine && $e.audioEngine.canUseWebAudio) { for (this._connectedAnalyser && this._connectedAnalyser.stopDebugCanvas(); this.soundCollection.length; ) this.soundCollection[0].dispose(); this._outputAudioNode && this._outputAudioNode.disconnect(), this._outputAudioNode = null; } } /** * Adds a sound to this sound track * @param sound define the sound to add * @ignoreNaming */ addSound(e) { var t; this._isInitialized || this._initializeSoundTrackAudioGraph(), !((t = $e.audioEngine) === null || t === void 0) && t.canUseWebAudio && this._outputAudioNode && e.connectToSoundTrackAudioNode(this._outputAudioNode), e.soundTrackId !== void 0 && (e.soundTrackId === -1 ? this._scene.mainSoundTrack.removeSound(e) : this._scene.soundTracks && this._scene.soundTracks[e.soundTrackId].removeSound(e)), this.soundCollection.push(e), e.soundTrackId = this.id; } /** * Removes a sound to this sound track * @param sound define the sound to remove * @ignoreNaming */ removeSound(e) { const t = this.soundCollection.indexOf(e); t !== -1 && this.soundCollection.splice(t, 1); } /** * Set a global volume for the full sound track. * @param newVolume Define the new volume of the sound track */ setVolume(e) { var t; !((t = $e.audioEngine) === null || t === void 0) && t.canUseWebAudio && this._outputAudioNode && (this._outputAudioNode.gain.value = e); } /** * Switch the panning model to HRTF: * Renders a stereo output of higher quality than equalpower — it uses a convolution with measured impulse responses from human subjects. * @see https://doc.babylonjs.com/features/featuresDeepDive/audio/playingSoundsMusic#creating-a-spatial-3d-sound */ switchPanningModelToHRTF() { var e; if (!((e = $e.audioEngine) === null || e === void 0) && e.canUseWebAudio) for (let t = 0; t < this.soundCollection.length; t++) this.soundCollection[t].switchPanningModelToHRTF(); } /** * Switch the panning model to Equal Power: * Represents the equal-power panning algorithm, generally regarded as simple and efficient. equalpower is the default value. * @see https://doc.babylonjs.com/features/featuresDeepDive/audio/playingSoundsMusic#creating-a-spatial-3d-sound */ switchPanningModelToEqualPower() { var e; if (!((e = $e.audioEngine) === null || e === void 0) && e.canUseWebAudio) for (let t = 0; t < this.soundCollection.length; t++) this.soundCollection[t].switchPanningModelToEqualPower(); } /** * Connect the sound track to an audio analyser allowing some amazing * synchronization between the sounds/music and your visualization (VuMeter for instance). * @see https://doc.babylonjs.com/features/featuresDeepDive/audio/playingSoundsMusic#using-the-analyser * @param analyser The analyser to connect to the engine */ connectToAnalyser(e) { var t; this._connectedAnalyser && this._connectedAnalyser.stopDebugCanvas(), this._connectedAnalyser = e, !((t = $e.audioEngine) === null || t === void 0) && t.canUseWebAudio && this._outputAudioNode && (this._outputAudioNode.disconnect(), this._connectedAnalyser.connectAudioNodes(this._outputAudioNode, $e.audioEngine.masterGain)); } } Yl.AddParser(Bt.NAME_AUDIO, (c, e, t, i) => { var r; let s = [], n; if (t.sounds = t.sounds || [], c.sounds !== void 0 && c.sounds !== null) for (let a = 0, l = c.sounds.length; a < l; a++) { const o = c.sounds[a]; !((r = $e.audioEngine) === null || r === void 0) && r.canUseWebAudio ? (o.url || (o.url = o.name), s[o.url] ? t.sounds.push(I4.Parse(o, e, i, s[o.url])) : (n = I4.Parse(o, e, i), s[o.url] = n, t.sounds.push(n))) : t.sounds.push(new I4(o.name, null, e)); } s = []; }); Object.defineProperty(ii.prototype, "mainSoundTrack", { get: function() { let c = this._getComponent(Bt.NAME_AUDIO); return c || (c = new L1(this), this._addComponent(c)), this._mainSoundTrack || (this._mainSoundTrack = new Lte(this, { mainTrack: !0 })), this._mainSoundTrack; }, enumerable: !0, configurable: !0 }); ii.prototype.getSoundByName = function(c) { let e; for (e = 0; e < this.mainSoundTrack.soundCollection.length; e++) if (this.mainSoundTrack.soundCollection[e].name === c) return this.mainSoundTrack.soundCollection[e]; if (this.soundTracks) { for (let t = 0; t < this.soundTracks.length; t++) for (e = 0; e < this.soundTracks[t].soundCollection.length; e++) if (this.soundTracks[t].soundCollection[e].name === c) return this.soundTracks[t].soundCollection[e]; } return null; }; Object.defineProperty(ii.prototype, "audioEnabled", { get: function() { let c = this._getComponent(Bt.NAME_AUDIO); return c || (c = new L1(this), this._addComponent(c)), c.audioEnabled; }, set: function(c) { let e = this._getComponent(Bt.NAME_AUDIO); e || (e = new L1(this), this._addComponent(e)), c ? e.enableAudio() : e.disableAudio(); }, enumerable: !0, configurable: !0 }); Object.defineProperty(ii.prototype, "headphone", { get: function() { let c = this._getComponent(Bt.NAME_AUDIO); return c || (c = new L1(this), this._addComponent(c)), c.headphone; }, set: function(c) { let e = this._getComponent(Bt.NAME_AUDIO); e || (e = new L1(this), this._addComponent(e)), c ? e.switchAudioModeForHeadphones() : e.switchAudioModeForNormalSpeakers(); }, enumerable: !0, configurable: !0 }); Object.defineProperty(ii.prototype, "audioListenerPositionProvider", { get: function() { let c = this._getComponent(Bt.NAME_AUDIO); return c || (c = new L1(this), this._addComponent(c)), c.audioListenerPositionProvider; }, set: function(c) { let e = this._getComponent(Bt.NAME_AUDIO); if (e || (e = new L1(this), this._addComponent(e)), c && typeof c != "function") throw new Error("The value passed to [Scene.audioListenerPositionProvider] must be a function that returns a Vector3"); e.audioListenerPositionProvider = c; }, enumerable: !0, configurable: !0 }); Object.defineProperty(ii.prototype, "audioListenerRotationProvider", { get: function() { let c = this._getComponent(Bt.NAME_AUDIO); return c || (c = new L1(this), this._addComponent(c)), c.audioListenerRotationProvider; }, set: function(c) { let e = this._getComponent(Bt.NAME_AUDIO); if (e || (e = new L1(this), this._addComponent(e)), c && typeof c != "function") throw new Error("The value passed to [Scene.audioListenerRotationProvider] must be a function that returns a Vector3"); e.audioListenerRotationProvider = c; }, enumerable: !0, configurable: !0 }); Object.defineProperty(ii.prototype, "audioPositioningRefreshRate", { get: function() { let c = this._getComponent(Bt.NAME_AUDIO); return c || (c = new L1(this), this._addComponent(c)), c.audioPositioningRefreshRate; }, set: function(c) { let e = this._getComponent(Bt.NAME_AUDIO); e || (e = new L1(this), this._addComponent(e)), e.audioPositioningRefreshRate = c; }, enumerable: !0, configurable: !0 }); class L1 { /** * Gets whether audio is enabled or not. * Please use related enable/disable method to switch state. */ get audioEnabled() { return this._audioEnabled; } /** * Gets whether audio is outputting to headphone or not. * Please use the according Switch methods to change output. */ get headphone() { return this._headphone; } /** * Creates a new instance of the component for the given scene * @param scene Defines the scene to register the component in */ constructor(e) { this.name = Bt.NAME_AUDIO, this._audioEnabled = !0, this._headphone = !1, this.audioPositioningRefreshRate = 500, this.audioListenerPositionProvider = null, this.audioListenerRotationProvider = null, this._cachedCameraDirection = new D(), this._cachedCameraPosition = new D(), this._lastCheck = 0, this._invertMatrixTemp = new Ae(), this._cameraDirectionTemp = new D(), e = e || gi.LastCreatedScene, e && (this.scene = e, e.soundTracks = [], e.sounds = []); } /** * Registers the component in a given scene */ register() { this.scene._afterRenderStage.registerStep(Bt.STEP_AFTERRENDER_AUDIO, this, this._afterRender); } /** * Rebuilds the elements related to this component in case of * context lost for instance. */ rebuild() { } /** * Serializes the component data to the specified json object * @param serializationObject The object to serialize to */ serialize(e) { if (e.sounds = [], this.scene.soundTracks) for (let t = 0; t < this.scene.soundTracks.length; t++) { const i = this.scene.soundTracks[t]; for (let r = 0; r < i.soundCollection.length; r++) e.sounds.push(i.soundCollection[r].serialize()); } } /** * Adds all the elements from the container to the scene * @param container the container holding the elements */ addFromContainer(e) { e.sounds && e.sounds.forEach((t) => { t.play(), t.autoplay = !0, this.scene.mainSoundTrack.addSound(t); }); } /** * Removes all the elements in the container from the scene * @param container contains the elements to remove * @param dispose if the removed element should be disposed (default: false) */ removeFromContainer(e, t = !1) { e.sounds && e.sounds.forEach((i) => { i.stop(), i.autoplay = !1, this.scene.mainSoundTrack.removeSound(i), t && i.dispose(); }); } /** * Disposes the component and the associated resources. */ dispose() { const e = this.scene; if (e._mainSoundTrack && e.mainSoundTrack.dispose(), e.soundTracks) for (let t = 0; t < e.soundTracks.length; t++) e.soundTracks[t].dispose(); } /** * Disables audio in the associated scene. */ disableAudio() { const e = this.scene; this._audioEnabled = !1, $e.audioEngine && $e.audioEngine.audioContext && $e.audioEngine.audioContext.suspend(); let t; for (t = 0; t < e.mainSoundTrack.soundCollection.length; t++) e.mainSoundTrack.soundCollection[t].pause(); if (e.soundTracks) for (t = 0; t < e.soundTracks.length; t++) for (let i = 0; i < e.soundTracks[t].soundCollection.length; i++) e.soundTracks[t].soundCollection[i].pause(); } /** * Enables audio in the associated scene. */ enableAudio() { const e = this.scene; this._audioEnabled = !0, $e.audioEngine && $e.audioEngine.audioContext && $e.audioEngine.audioContext.resume(); let t; for (t = 0; t < e.mainSoundTrack.soundCollection.length; t++) e.mainSoundTrack.soundCollection[t].isPaused && e.mainSoundTrack.soundCollection[t].play(); if (e.soundTracks) for (t = 0; t < e.soundTracks.length; t++) for (let i = 0; i < e.soundTracks[t].soundCollection.length; i++) e.soundTracks[t].soundCollection[i].isPaused && e.soundTracks[t].soundCollection[i].play(); } /** * Switch audio to headphone output. */ switchAudioModeForHeadphones() { const e = this.scene; if (this._headphone = !0, e.mainSoundTrack.switchPanningModelToHRTF(), e.soundTracks) for (let t = 0; t < e.soundTracks.length; t++) e.soundTracks[t].switchPanningModelToHRTF(); } /** * Switch audio to normal speakers. */ switchAudioModeForNormalSpeakers() { const e = this.scene; if (this._headphone = !1, e.mainSoundTrack.switchPanningModelToEqualPower(), e.soundTracks) for (let t = 0; t < e.soundTracks.length; t++) e.soundTracks[t].switchPanningModelToEqualPower(); } _afterRender() { const e = Gs.Now; if (this._lastCheck && e - this._lastCheck < this.audioPositioningRefreshRate) return; this._lastCheck = e; const t = this.scene; if (!this._audioEnabled || !t._mainSoundTrack || !t.soundTracks || t._mainSoundTrack.soundCollection.length === 0 && t.soundTracks.length === 1) return; const i = $e.audioEngine; if (i && i.audioContext) { let r = t.activeCamera; if (t.activeCameras && t.activeCameras.length > 0 && (r = t.activeCameras[0]), this.audioListenerPositionProvider) { const n = this.audioListenerPositionProvider(); i.audioContext.listener.setPosition(n.x || 0, n.y || 0, n.z || 0); } else r ? this._cachedCameraPosition.equals(r.globalPosition) || (this._cachedCameraPosition.copyFrom(r.globalPosition), i.audioContext.listener.setPosition(r.globalPosition.x, r.globalPosition.y, r.globalPosition.z)) : i.audioContext.listener.setPosition(0, 0, 0); if (this.audioListenerRotationProvider) { const n = this.audioListenerRotationProvider(); i.audioContext.listener.setOrientation(n.x || 0, n.y || 0, n.z || 0, 0, 1, 0); } else r ? (r.rigCameras && r.rigCameras.length > 0 && (r = r.rigCameras[0]), r.getViewMatrix().invertToRef(this._invertMatrixTemp), D.TransformNormalToRef(L1._CameraDirection, this._invertMatrixTemp, this._cameraDirectionTemp), this._cameraDirectionTemp.normalize(), !isNaN(this._cameraDirectionTemp.x) && !isNaN(this._cameraDirectionTemp.y) && !isNaN(this._cameraDirectionTemp.z) && (this._cachedCameraDirection.equals(this._cameraDirectionTemp) || (this._cachedCameraDirection.copyFrom(this._cameraDirectionTemp), i.audioContext.listener.setOrientation(this._cameraDirectionTemp.x, this._cameraDirectionTemp.y, this._cameraDirectionTemp.z, 0, 1, 0)))) : i.audioContext.listener.setOrientation(0, 0, 0, 0, 1, 0); let s; for (s = 0; s < t.mainSoundTrack.soundCollection.length; s++) { const n = t.mainSoundTrack.soundCollection[s]; n.useCustomAttenuation && n.updateDistanceFromListener(); } if (t.soundTracks) for (s = 0; s < t.soundTracks.length; s++) for (let n = 0; n < t.soundTracks[s].soundCollection.length; n++) { const a = t.soundTracks[s].soundCollection[n]; a.useCustomAttenuation && a.updateDistanceFromListener(); } } } } L1._CameraDirection = new D(0, 0, -1); I4._SceneComponentInitialization = (c) => { let e = c._getComponent(Bt.NAME_AUDIO); e || (e = new L1(c), c._addComponent(e)); }; class Nte { /** * Creates a new WeightedSound from the list of sounds given. * @param loop When true a Sound will be selected and played when the current playing Sound completes. * @param sounds Array of Sounds that will be selected from. * @param weights Array of number values for selection weights; length must equal sounds, values will be normalized to 1 */ constructor(e, t, i) { if (this.loop = !1, this._coneInnerAngle = 360, this._coneOuterAngle = 360, this._volume = 1, this.isPlaying = !1, this.isPaused = !1, this._sounds = [], this._weights = [], t.length !== i.length) throw new Error("Sounds length does not equal weights length"); this.loop = e, this._weights = i; let r = 0; for (const n of i) r += n; const s = r > 0 ? 1 / r : 0; for (let n = 0; n < this._weights.length; n++) this._weights[n] *= s; this._sounds = t; for (const n of this._sounds) n.onEndedObservable.add(() => { this._onended(); }); } /** * The size of cone in degrees for a directional sound in which there will be no attenuation. */ get directionalConeInnerAngle() { return this._coneInnerAngle; } /** * The size of cone in degrees for a directional sound in which there will be no attenuation. */ set directionalConeInnerAngle(e) { if (e !== this._coneInnerAngle) { if (this._coneOuterAngle < e) { Ce.Error("directionalConeInnerAngle: outer angle of the cone must be superior or equal to the inner angle."); return; } this._coneInnerAngle = e; for (const t of this._sounds) t.directionalConeInnerAngle = e; } } /** * Size of cone in degrees for a directional sound outside of which there will be no sound. * Listener angles between innerAngle and outerAngle will falloff linearly. */ get directionalConeOuterAngle() { return this._coneOuterAngle; } /** * Size of cone in degrees for a directional sound outside of which there will be no sound. * Listener angles between innerAngle and outerAngle will falloff linearly. */ set directionalConeOuterAngle(e) { if (e !== this._coneOuterAngle) { if (e < this._coneInnerAngle) { Ce.Error("directionalConeOuterAngle: outer angle of the cone must be superior or equal to the inner angle."); return; } this._coneOuterAngle = e; for (const t of this._sounds) t.directionalConeOuterAngle = e; } } /** * Playback volume. */ get volume() { return this._volume; } /** * Playback volume. */ set volume(e) { if (e !== this._volume) for (const t of this._sounds) t.setVolume(e); } _onended() { this._currentIndex !== void 0 && (this._sounds[this._currentIndex].autoplay = !1), this.loop && this.isPlaying ? this.play() : this.isPlaying = !1; } /** * Suspend playback */ pause() { this.isPaused = !0, this._currentIndex !== void 0 && this._sounds[this._currentIndex].pause(); } /** * Stop playback */ stop() { this.isPlaying = !1, this._currentIndex !== void 0 && this._sounds[this._currentIndex].stop(); } /** * Start playback. * @param startOffset Position the clip head at a specific time in seconds. */ play(e) { if (!this.isPaused) { this.stop(); const i = Math.random(); let r = 0; for (let s = 0; s < this._weights.length; s++) if (r += this._weights[s], i <= r) { this._currentIndex = s; break; } } const t = this._sounds[this._currentIndex]; t.isReady() ? t.play(0, this.isPaused ? void 0 : e) : t.autoplay = !0, this.isPlaying = !0, this.isPaused = !1; } } class VI { /** * Creates a new BakedVertexAnimationManager * @param scene defines the current scene */ constructor(e) { this._texture = null, this._isEnabled = !0, this.isEnabled = !0, this.time = 0, e = e || gi.LastCreatedScene, e && (this._scene = e, this.animationParameters = new Di(0, 0, 0, 30)); } /** @internal */ _markSubMeshesAsAttributesDirty() { for (const e of this._scene.meshes) e.bakedVertexAnimationManager === this && e._markSubMeshesAsAttributesDirty(); } /** * Binds to the effect. * @param effect The effect to bind to. * @param useInstances True when it's an instance. */ bind(e, t = !1) { if (!this._texture || !this._isEnabled) return; const i = this._texture.getSize(); e.setFloat2("bakedVertexAnimationTextureSizeInverted", 1 / i.width, 1 / i.height), e.setFloat("bakedVertexAnimationTime", this.time), t || e.setVector4("bakedVertexAnimationSettings", this.animationParameters), e.setTexture("bakedVertexAnimationTexture", this._texture); } /** * Clone the current manager * @returns a new BakedVertexAnimationManager */ clone() { const e = new VI(this._scene); return this.copyTo(e), e; } /** * Sets animation parameters. * @param startFrame The first frame of the animation. * @param endFrame The last frame of the animation. * @param offset The offset when starting the animation. * @param speedFramesPerSecond The frame rate. */ setAnimationParameters(e, t, i = 0, r = 30) { this.animationParameters = new Di(e, t, i, r); } /** * Disposes the resources of the manager. * @param forceDisposeTextures - Forces the disposal of all textures. */ dispose(e) { var t; e && ((t = this._texture) === null || t === void 0 || t.dispose()); } /** * Get the current class name useful for serialization or dynamic coding. * @returns "BakedVertexAnimationManager" */ getClassName() { return "BakedVertexAnimationManager"; } /** * Makes a duplicate of the current instance into another one. * @param vatMap define the instance where to copy the info */ copyTo(e) { St.Clone(() => e, this); } /** * Serializes this vertex animation instance * @returns - An object with the serialized instance. */ serialize() { return St.Serialize(this); } /** * Parses a vertex animation setting from a serialized object. * @param source - Serialized object. * @param scene Defines the scene we are parsing for * @param rootUrl Defines the rootUrl to load from */ parse(e, t, i) { St.Parse(() => this, e, t, i); } } F([ er(), ct("_markSubMeshesAsAttributesDirty") ], VI.prototype, "texture", void 0); F([ W(), ct("_markSubMeshesAsAttributesDirty") ], VI.prototype, "isEnabled", void 0); F([ W() ], VI.prototype, "animationParameters", void 0); F([ W() ], VI.prototype, "time", void 0); class rT { /** * | Value | Type | Description | * | ----- | ------------------ | ----------- | * | 0 | CLAMP_ADDRESSMODE | | * | 1 | WRAP_ADDRESSMODE | | * | 2 | MIRROR_ADDRESSMODE | | */ get wrapU() { return this._wrapU; } set wrapU(e) { this._wrapU = e; } /** * | Value | Type | Description | * | ----- | ------------------ | ----------- | * | 0 | CLAMP_ADDRESSMODE | | * | 1 | WRAP_ADDRESSMODE | | * | 2 | MIRROR_ADDRESSMODE | | */ get wrapV() { return this._wrapV; } set wrapV(e) { this._wrapV = e; } /** * How a texture is mapped. * Unused in thin texture mode. */ get coordinatesMode() { return 0; } /** * Define if the texture is a cube texture or if false a 2d texture. */ get isCube() { return this._texture ? this._texture.isCube : !1; } set isCube(e) { this._texture && (this._texture.isCube = e); } /** * Define if the texture is a 3d texture (webgl 2) or if false a 2d texture. */ get is3D() { return this._texture ? this._texture.is3D : !1; } set is3D(e) { this._texture && (this._texture.is3D = e); } /** * Define if the texture is a 2d array texture (webgl 2) or if false a 2d texture. */ get is2DArray() { return this._texture ? this._texture.is2DArray : !1; } set is2DArray(e) { this._texture && (this._texture.is2DArray = e); } /** * Get the class name of the texture. * @returns "ThinTexture" */ getClassName() { return "ThinTexture"; } static _IsRenderTargetWrapper(e) { return (e == null ? void 0 : e._shareDepth) !== void 0; } /** * Instantiates a new ThinTexture. * Base class of all the textures in babylon. * This can be used as an internal texture wrapper in ThinEngine to benefit from the cache * @param internalTexture Define the internalTexture to wrap. You can also pass a RenderTargetWrapper, in which case the texture will be the render target's texture */ constructor(e) { this._wrapU = 1, this._wrapV = 1, this.wrapR = 1, this.anisotropicFilteringLevel = 4, this.delayLoadState = 0, this._texture = null, this._engine = null, this._cachedSize = kf.Zero(), this._cachedBaseSize = kf.Zero(), this._initialSamplingMode = 2, this._texture = rT._IsRenderTargetWrapper(e) ? e.texture : e, this._texture && (this._engine = this._texture.getEngine()); } /** * Get if the texture is ready to be used (downloaded, converted, mip mapped...). * @returns true if fully ready */ isReady() { return this.delayLoadState === 4 ? (this.delayLoad(), !1) : this._texture ? this._texture.isReady : !1; } /** * Triggers the load sequence in delayed load mode. */ delayLoad() { } /** * Get the underlying lower level texture from Babylon. * @returns the internal texture */ getInternalTexture() { return this._texture; } /** * Get the size of the texture. * @returns the texture size. */ getSize() { if (this._texture) { if (this._texture.width) return this._cachedSize.width = this._texture.width, this._cachedSize.height = this._texture.height, this._cachedSize; if (this._texture._size) return this._cachedSize.width = this._texture._size, this._cachedSize.height = this._texture._size, this._cachedSize; } return this._cachedSize; } /** * Get the base size of the texture. * It can be different from the size if the texture has been resized for POT for instance * @returns the base size */ getBaseSize() { return !this.isReady() || !this._texture ? (this._cachedBaseSize.width = 0, this._cachedBaseSize.height = 0, this._cachedBaseSize) : this._texture._size ? (this._cachedBaseSize.width = this._texture._size, this._cachedBaseSize.height = this._texture._size, this._cachedBaseSize) : (this._cachedBaseSize.width = this._texture.baseWidth, this._cachedBaseSize.height = this._texture.baseHeight, this._cachedBaseSize); } /** * Get the current sampling mode associated with the texture. */ get samplingMode() { return this._texture ? this._texture.samplingMode : this._initialSamplingMode; } /** * Update the sampling mode of the texture. * Default is Trilinear mode. * * | Value | Type | Description | * | ----- | ------------------ | ----------- | * | 1 | NEAREST_SAMPLINGMODE or NEAREST_NEAREST_MIPLINEAR | Nearest is: mag = nearest, min = nearest, mip = linear | * | 2 | BILINEAR_SAMPLINGMODE or LINEAR_LINEAR_MIPNEAREST | Bilinear is: mag = linear, min = linear, mip = nearest | * | 3 | TRILINEAR_SAMPLINGMODE or LINEAR_LINEAR_MIPLINEAR | Trilinear is: mag = linear, min = linear, mip = linear | * | 4 | NEAREST_NEAREST_MIPNEAREST | | * | 5 | NEAREST_LINEAR_MIPNEAREST | | * | 6 | NEAREST_LINEAR_MIPLINEAR | | * | 7 | NEAREST_LINEAR | | * | 8 | NEAREST_NEAREST | | * | 9 | LINEAR_NEAREST_MIPNEAREST | | * | 10 | LINEAR_NEAREST_MIPLINEAR | | * | 11 | LINEAR_LINEAR | | * | 12 | LINEAR_NEAREST | | * * > _mag_: magnification filter (close to the viewer) * > _min_: minification filter (far from the viewer) * > _mip_: filter used between mip map levels *@param samplingMode Define the new sampling mode of the texture */ updateSamplingMode(e) { this._texture && this._engine && this._engine.updateTextureSamplingMode(e, this._texture); } /** * Release and destroy the underlying lower level texture aka internalTexture. */ releaseInternalTexture() { this._texture && (this._texture.dispose(), this._texture = null); } /** * Dispose the texture and release its associated resources. */ dispose() { this._texture && (this.releaseInternalTexture(), this._engine = null); } } class dn extends rT { /** * Define if the texture is having a usable alpha value (can be use for transparency or glossiness for instance). */ set hasAlpha(e) { this._hasAlpha !== e && (this._hasAlpha = e, this._scene && this._scene.markAllMaterialsAsDirty(1, (t) => t.hasTexture(this))); } get hasAlpha() { return this._hasAlpha; } /** * Defines if the alpha value should be determined via the rgb values. * If true the luminance of the pixel might be used to find the corresponding alpha value. */ set getAlphaFromRGB(e) { this._getAlphaFromRGB !== e && (this._getAlphaFromRGB = e, this._scene && this._scene.markAllMaterialsAsDirty(1, (t) => t.hasTexture(this))); } get getAlphaFromRGB() { return this._getAlphaFromRGB; } /** * Define the UV channel to use starting from 0 and defaulting to 0. * This is part of the texture as textures usually maps to one uv set. */ set coordinatesIndex(e) { this._coordinatesIndex !== e && (this._coordinatesIndex = e, this._scene && this._scene.markAllMaterialsAsDirty(1, (t) => t.hasTexture(this))); } get coordinatesIndex() { return this._coordinatesIndex; } /** * How a texture is mapped. * * | Value | Type | Description | * | ----- | ----------------------------------- | ----------- | * | 0 | EXPLICIT_MODE | | * | 1 | SPHERICAL_MODE | | * | 2 | PLANAR_MODE | | * | 3 | CUBIC_MODE | | * | 4 | PROJECTION_MODE | | * | 5 | SKYBOX_MODE | | * | 6 | INVCUBIC_MODE | | * | 7 | EQUIRECTANGULAR_MODE | | * | 8 | FIXED_EQUIRECTANGULAR_MODE | | * | 9 | FIXED_EQUIRECTANGULAR_MIRRORED_MODE | | */ set coordinatesMode(e) { this._coordinatesMode !== e && (this._coordinatesMode = e, this._scene && this._scene.markAllMaterialsAsDirty(1, (t) => t.hasTexture(this))); } get coordinatesMode() { return this._coordinatesMode; } /** * | Value | Type | Description | * | ----- | ------------------ | ----------- | * | 0 | CLAMP_ADDRESSMODE | | * | 1 | WRAP_ADDRESSMODE | | * | 2 | MIRROR_ADDRESSMODE | | */ get wrapU() { return this._wrapU; } set wrapU(e) { this._wrapU = e; } /** * | Value | Type | Description | * | ----- | ------------------ | ----------- | * | 0 | CLAMP_ADDRESSMODE | | * | 1 | WRAP_ADDRESSMODE | | * | 2 | MIRROR_ADDRESSMODE | | */ get wrapV() { return this._wrapV; } set wrapV(e) { this._wrapV = e; } /** * Define if the texture is a cube texture or if false a 2d texture. */ get isCube() { return this._texture ? this._texture.isCube : this._isCube; } set isCube(e) { this._texture ? this._texture.isCube = e : this._isCube = e; } /** * Define if the texture is a 3d texture (webgl 2) or if false a 2d texture. */ get is3D() { return this._texture ? this._texture.is3D : !1; } set is3D(e) { this._texture && (this._texture.is3D = e); } /** * Define if the texture is a 2d array texture (webgl 2) or if false a 2d texture. */ get is2DArray() { return this._texture ? this._texture.is2DArray : !1; } set is2DArray(e) { this._texture && (this._texture.is2DArray = e); } /** * Define if the texture contains data in gamma space (most of the png/jpg aside bump). * HDR texture are usually stored in linear space. * This only impacts the PBR and Background materials */ get gammaSpace() { if (this._texture) this._texture._gammaSpace === null && (this._texture._gammaSpace = this._gammaSpace); else return this._gammaSpace; return this._texture._gammaSpace && !this._texture._useSRGBBuffer; } set gammaSpace(e) { var t; if (this._texture) { if (this._texture._gammaSpace === e) return; this._texture._gammaSpace = e; } else { if (this._gammaSpace === e) return; this._gammaSpace = e; } (t = this.getScene()) === null || t === void 0 || t.markAllMaterialsAsDirty(1, (i) => i.hasTexture(this)); } /** * Gets or sets whether or not the texture contains RGBD data. */ get isRGBD() { return this._texture != null && this._texture._isRGBD; } set isRGBD(e) { var t; e !== this.isRGBD && (this._texture && (this._texture._isRGBD = e), (t = this.getScene()) === null || t === void 0 || t.markAllMaterialsAsDirty(1, (i) => i.hasTexture(this))); } /** * Are mip maps generated for this texture or not. */ get noMipmap() { return !1; } /** * With prefiltered texture, defined the offset used during the prefiltering steps. */ get lodGenerationOffset() { return this._texture ? this._texture._lodGenerationOffset : 0; } set lodGenerationOffset(e) { this._texture && (this._texture._lodGenerationOffset = e); } /** * With prefiltered texture, defined the scale used during the prefiltering steps. */ get lodGenerationScale() { return this._texture ? this._texture._lodGenerationScale : 0; } set lodGenerationScale(e) { this._texture && (this._texture._lodGenerationScale = e); } /** * With prefiltered texture, defined if the specular generation is based on a linear ramp. * By default we are using a log2 of the linear roughness helping to keep a better resolution for * average roughness values. */ get linearSpecularLOD() { return this._texture ? this._texture._linearSpecularLOD : !1; } set linearSpecularLOD(e) { this._texture && (this._texture._linearSpecularLOD = e); } /** * In case a better definition than spherical harmonics is required for the diffuse part of the environment. * You can set the irradiance texture to rely on a texture instead of the spherical approach. * This texture need to have the same characteristics than its parent (Cube vs 2d, coordinates mode, Gamma/Linear, RGBD). */ get irradianceTexture() { return this._texture ? this._texture._irradianceTexture : null; } set irradianceTexture(e) { this._texture && (this._texture._irradianceTexture = e); } /** * Define the unique id of the texture in the scene. */ get uid() { return this._uid || (this._uid = G_()), this._uid; } /** * Return a string representation of the texture. * @returns the texture as a string */ toString() { return this.name; } /** * Get the class name of the texture. * @returns "BaseTexture" */ getClassName() { return "BaseTexture"; } /** * Callback triggered when the texture has been disposed. * Kept for back compatibility, you can use the onDisposeObservable instead. */ set onDispose(e) { this._onDisposeObserver && this.onDisposeObservable.remove(this._onDisposeObserver), this._onDisposeObserver = this.onDisposeObservable.add(e); } /** * Define if the texture is preventing a material to render or not. * If not and the texture is not ready, the engine will use a default black texture instead. */ get isBlocking() { return !0; } /** * Was there any loading error? */ get loadingError() { return this._loadingError; } /** * If a loading error occurred this object will be populated with information about the error. */ get errorObject() { return this._errorObject; } /** * Instantiates a new BaseTexture. * Base class of all the textures in babylon. * It groups all the common properties the materials, post process, lights... might need * in order to make a correct use of the texture. * @param sceneOrEngine Define the scene or engine the texture belongs to * @param internalTexture Define the internal texture associated with the texture */ constructor(e, t = null) { super(null), this.metadata = null, this.reservedDataStore = null, this._hasAlpha = !1, this._getAlphaFromRGB = !1, this.level = 1, this._coordinatesIndex = 0, this.optimizeUVAllocation = !0, this._coordinatesMode = 0, this.wrapR = 1, this.anisotropicFilteringLevel = dn.DEFAULT_ANISOTROPIC_FILTERING_LEVEL, this._isCube = !1, this._gammaSpace = !0, this.invertZ = !1, this.lodLevelInAlpha = !1, this.isRenderTarget = !1, this._prefiltered = !1, this._forceSerialize = !1, this.animations = [], this.onDisposeObservable = new Fe(), this._onDisposeObserver = null, this._scene = null, this._uid = null, this._parentContainer = null, this._loadingError = !1, e ? dn._IsScene(e) ? this._scene = e : this._engine = e : this._scene = gi.LastCreatedScene, this._scene && (this.uniqueId = this._scene.getUniqueId(), this._scene.addTexture(this), this._engine = this._scene.getEngine()), this._texture = t, this._uid = null; } /** * Get the scene the texture belongs to. * @returns the scene or null if undefined */ getScene() { return this._scene; } /** @internal */ _getEngine() { return this._engine; } /** * Checks if the texture has the same transform matrix than another texture * @param texture texture to check against * @returns true if the transforms are the same, else false */ checkTransformsAreIdentical(e) { return e !== null; } /** * Get the texture transform matrix used to offset tile the texture for instance. * @returns the transformation matrix */ getTextureMatrix() { return Ae.IdentityReadOnly; } /** * Get the texture reflection matrix used to rotate/transform the reflection. * @returns the reflection matrix */ getReflectionTextureMatrix() { return Ae.IdentityReadOnly; } /** * Gets a suitable rotate/transform matrix when the texture is used for refraction. * There's a separate function from getReflectionTextureMatrix because refraction requires a special configuration of the matrix in right-handed mode. * @returns The refraction matrix */ getRefractionTextureMatrix() { return this.getReflectionTextureMatrix(); } /** * Get if the texture is ready to be consumed (either it is ready or it is not blocking) * @returns true if ready, not blocking or if there was an error loading the texture */ isReadyOrNotBlocking() { return !this.isBlocking || this.isReady() || this.loadingError; } /** * Scales the texture if is `canRescale()` * @param ratio the resize factor we want to use to rescale */ // eslint-disable-next-line @typescript-eslint/no-unused-vars scale(e) { } /** * Get if the texture can rescale. */ get canRescale() { return !1; } /** * @internal */ _getFromCache(e, t, i, r, s, n) { const a = this._getEngine(); if (!a) return null; const l = a._getUseSRGBBuffer(!!s, t), o = a.getLoadedTexturesCache(); for (let u = 0; u < o.length; u++) { const h = o[u]; if ((s === void 0 || l === h._useSRGBBuffer) && (r === void 0 || r === h.invertY) && h.url === e && h.generateMipMaps === !t && (!i || i === h.samplingMode) && (n === void 0 || n === h.isCube)) return h.incrementReferences(), h; } return null; } /** @internal */ _rebuild() { } /** * Clones the texture. * @returns the cloned texture */ clone() { return null; } /** * Get the texture underlying type (INT, FLOAT...) */ get textureType() { return this._texture && this._texture.type !== void 0 ? this._texture.type : 0; } /** * Get the texture underlying format (RGB, RGBA...) */ get textureFormat() { return this._texture && this._texture.format !== void 0 ? this._texture.format : 5; } /** * Indicates that textures need to be re-calculated for all materials */ _markAllSubMeshesAsTexturesDirty() { const e = this.getScene(); e && e.markAllMaterialsAsDirty(1); } /** * Reads the pixels stored in the webgl texture and returns them as an ArrayBuffer. * This will returns an RGBA array buffer containing either in values (0-255) or * float values (0-1) depending of the underlying buffer type. * @param faceIndex defines the face of the texture to read (in case of cube texture) * @param level defines the LOD level of the texture to read (in case of Mip Maps) * @param buffer defines a user defined buffer to fill with data (can be null) * @param flushRenderer true to flush the renderer from the pending commands before reading the pixels * @param noDataConversion false to convert the data to Uint8Array (if texture type is UNSIGNED_BYTE) or to Float32Array (if texture type is anything but UNSIGNED_BYTE). If true, the type of the generated buffer (if buffer==null) will depend on the type of the texture * @param x defines the region x coordinates to start reading from (default to 0) * @param y defines the region y coordinates to start reading from (default to 0) * @param width defines the region width to read from (default to the texture size at level) * @param height defines the region width to read from (default to the texture size at level) * @returns The Array buffer promise containing the pixels data. */ readPixels(e = 0, t = 0, i = null, r = !0, s = !1, n = 0, a = 0, l = Number.MAX_VALUE, o = Number.MAX_VALUE) { if (!this._texture) return null; const u = this._getEngine(); if (!u) return null; const h = this.getSize(); let d = h.width, f = h.height; t !== 0 && (d = d / Math.pow(2, t), f = f / Math.pow(2, t), d = Math.round(d), f = Math.round(f)), l = Math.min(d, l), o = Math.min(f, o); try { return this._texture.isCube ? u._readTexturePixels(this._texture, l, o, e, t, i, r, s, n, a) : u._readTexturePixels(this._texture, l, o, -1, t, i, r, s, n, a); } catch { return null; } } /** * @internal */ _readPixelsSync(e = 0, t = 0, i = null, r = !0, s = !1) { if (!this._texture) return null; const n = this.getSize(); let a = n.width, l = n.height; const o = this._getEngine(); if (!o) return null; t != 0 && (a = a / Math.pow(2, t), l = l / Math.pow(2, t), a = Math.round(a), l = Math.round(l)); try { return this._texture.isCube ? o._readTexturePixelsSync(this._texture, a, l, e, t, i, r, s) : o._readTexturePixelsSync(this._texture, a, l, -1, t, i, r, s); } catch { return null; } } /** @internal */ get _lodTextureHigh() { return this._texture ? this._texture._lodTextureHigh : null; } /** @internal */ get _lodTextureMid() { return this._texture ? this._texture._lodTextureMid : null; } /** @internal */ get _lodTextureLow() { return this._texture ? this._texture._lodTextureLow : null; } /** * Dispose the texture and release its associated resources. */ dispose() { if (this._scene) { this._scene.stopAnimation && this._scene.stopAnimation(this), this._scene.removePendingData(this); const e = this._scene.textures.indexOf(this); if (e >= 0 && this._scene.textures.splice(e, 1), this._scene.onTextureRemovedObservable.notifyObservers(this), this._scene = null, this._parentContainer) { const t = this._parentContainer.textures.indexOf(this); t > -1 && this._parentContainer.textures.splice(t, 1), this._parentContainer = null; } } this.onDisposeObservable.notifyObservers(this), this.onDisposeObservable.clear(), this.metadata = null, super.dispose(); } /** * Serialize the texture into a JSON representation that can be parsed later on. * @param allowEmptyName True to force serialization even if name is empty. Default: false * @returns the JSON representation of the texture */ serialize(e = !1) { if (!this.name && !e) return null; const t = St.Serialize(this); return St.AppendSerializedAnimations(this, t), t; } /** * Helper function to be called back once a list of texture contains only ready textures. * @param textures Define the list of textures to wait for * @param callback Define the callback triggered once the entire list will be ready */ static WhenAllReady(e, t) { let i = e.length; if (i === 0) { t(); return; } for (let r = 0; r < e.length; r++) { const s = e[r]; if (s.isReady()) --i === 0 && t(); else { const n = s.onLoadObservable; n ? n.addOnce(() => { --i === 0 && t(); }) : --i === 0 && t(); } } } static _IsScene(e) { return e.getClassName() === "Scene"; } } dn.DEFAULT_ANISOTROPIC_FILTERING_LEVEL = 4; F([ W() ], dn.prototype, "uniqueId", void 0); F([ W() ], dn.prototype, "name", void 0); F([ W() ], dn.prototype, "metadata", void 0); F([ W("hasAlpha") ], dn.prototype, "_hasAlpha", void 0); F([ W("getAlphaFromRGB") ], dn.prototype, "_getAlphaFromRGB", void 0); F([ W() ], dn.prototype, "level", void 0); F([ W("coordinatesIndex") ], dn.prototype, "_coordinatesIndex", void 0); F([ W() ], dn.prototype, "optimizeUVAllocation", void 0); F([ W("coordinatesMode") ], dn.prototype, "_coordinatesMode", void 0); F([ W() ], dn.prototype, "wrapU", null); F([ W() ], dn.prototype, "wrapV", null); F([ W() ], dn.prototype, "wrapR", void 0); F([ W() ], dn.prototype, "anisotropicFilteringLevel", void 0); F([ W() ], dn.prototype, "isCube", null); F([ W() ], dn.prototype, "is3D", null); F([ W() ], dn.prototype, "is2DArray", null); F([ W() ], dn.prototype, "gammaSpace", null); F([ W() ], dn.prototype, "invertZ", void 0); F([ W() ], dn.prototype, "lodLevelInAlpha", void 0); F([ W() ], dn.prototype, "lodGenerationOffset", null); F([ W() ], dn.prototype, "lodGenerationScale", null); F([ W() ], dn.prototype, "linearSpecularLOD", null); F([ er() ], dn.prototype, "irradianceTexture", null); F([ W() ], dn.prototype, "isRenderTarget", void 0); function YB(c, e, t = !1) { const i = e.width, r = e.height; if (c instanceof Float32Array) { let o = c.byteLength / c.BYTES_PER_ELEMENT; const u = new Uint8Array(o); for (; --o >= 0; ) { let h = c[o]; h < 0 ? h = 0 : h > 1 && (h = 1), u[o] = h * 255; } c = u; } const s = document.createElement("canvas"); s.width = i, s.height = r; const n = s.getContext("2d"); if (!n) return null; const a = n.createImageData(i, r); if (a.data.set(c), n.putImageData(a, 0, 0), t) { const o = document.createElement("canvas"); o.width = i, o.height = r; const u = o.getContext("2d"); return u ? (u.translate(0, r), u.scale(1, -1), u.drawImage(s, 0, 0), o.toDataURL("image/png")) : null; } return s.toDataURL("image/png"); } function mK(c, e = 0, t = 0) { const i = c.getInternalTexture(); if (!i) return null; const r = c._readPixelsSync(e, t); return r ? YB(r, c.getSize(), i.invertY) : null; } async function gK(c, e = 0, t = 0) { const i = c.getInternalTexture(); if (!i) return null; const r = await c.readPixels(e, t); return r ? YB(r, c.getSize(), i.invertY) : null; } const Ece = { /** * Transform some pixel data to a base64 string * @param pixels defines the pixel data to transform to base64 * @param size defines the width and height of the (texture) data * @param invertY true if the data must be inverted for the Y coordinate during the conversion * @returns The base64 encoded string or null */ GenerateBase64StringFromPixelData: YB, /** * Reads the pixels stored in the webgl texture and returns them as a base64 string * @param texture defines the texture to read pixels from * @param faceIndex defines the face of the texture to read (in case of cube texture) * @param level defines the LOD level of the texture to read (in case of Mip Maps) * @returns The base64 encoded string or null */ GenerateBase64StringFromTexture: mK, /** * Reads the pixels stored in the webgl texture and returns them as a base64 string * @param texture defines the texture to read pixels from * @param faceIndex defines the face of the texture to read (in case of cube texture) * @param level defines the LOD level of the texture to read (in case of Mip Maps) * @returns The base64 encoded string or null wrapped in a promise */ GenerateBase64StringFromTextureAsync: gK }; class De extends dn { static _CreateVideoTexture(e, t, i, r = !1, s = !1, n = De.TRILINEAR_SAMPLINGMODE, a = {}, l, o = 5) { throw yr("VideoTexture"); } /** * Are mip maps generated for this texture or not. */ get noMipmap() { return this._noMipmap; } /** Returns the texture mime type if it was defined by a loader (undefined else) */ get mimeType() { return this._mimeType; } /** * Is the texture preventing material to render while loading. * If false, a default texture will be used instead of the loading one during the preparation step. */ set isBlocking(e) { this._isBlocking = e; } get isBlocking() { return this._isBlocking; } /** * Gets a boolean indicating if the texture needs to be inverted on the y axis during loading */ get invertY() { return this._invertY; } /** * Instantiates a new texture. * This represents a texture in babylon. It can be easily loaded from a network, base64 or html input. * @see https://doc.babylonjs.com/features/featuresDeepDive/materials/using/materials_introduction#texture * @param url defines the url of the picture to load as a texture * @param sceneOrEngine defines the scene or engine the texture will belong to * @param noMipmapOrOptions defines if the texture will require mip maps or not or set of all options to create the texture * @param invertY defines if the texture needs to be inverted on the y axis during loading * @param samplingMode defines the sampling mode we want for the texture while fetching from it (Texture.NEAREST_SAMPLINGMODE...) * @param onLoad defines a callback triggered when the texture has been loaded * @param onError defines a callback triggered when an error occurred during the loading session * @param buffer defines the buffer to load the texture from in case the texture is loaded from a buffer representation * @param deleteBuffer defines if the buffer we are loading the texture from should be deleted after load * @param format defines the format of the texture we are trying to load (Engine.TEXTUREFORMAT_RGBA...) * @param mimeType defines an optional mime type information * @param loaderOptions options to be passed to the loader * @param creationFlags specific flags to use when creating the texture (1 for storage textures, for eg) * @param forcedExtension defines the extension to use to pick the right loader */ constructor(e, t, i, r, s = De.TRILINEAR_SAMPLINGMODE, n = null, a = null, l = null, o = !1, u, h, d, f, p) { var m, _, v, C, x, b, S, M, R, w; super(t), this.url = null, this.uOffset = 0, this.vOffset = 0, this.uScale = 1, this.vScale = 1, this.uAng = 0, this.vAng = 0, this.wAng = 0, this.uRotationCenter = 0.5, this.vRotationCenter = 0.5, this.wRotationCenter = 0.5, this.homogeneousRotationInUVTransform = !1, this.inspectableCustomProperties = null, this._noMipmap = !1, this._invertY = !1, this._rowGenerationMatrix = null, this._cachedTextureMatrix = null, this._projectionModeMatrix = null, this._t0 = null, this._t1 = null, this._t2 = null, this._cachedUOffset = -1, this._cachedVOffset = -1, this._cachedUScale = 0, this._cachedVScale = 0, this._cachedUAng = -1, this._cachedVAng = -1, this._cachedWAng = -1, this._cachedReflectionProjectionMatrixId = -1, this._cachedURotationCenter = -1, this._cachedVRotationCenter = -1, this._cachedWRotationCenter = -1, this._cachedHomogeneousRotationInUVTransform = !1, this._cachedReflectionTextureMatrix = null, this._cachedReflectionUOffset = -1, this._cachedReflectionVOffset = -1, this._cachedReflectionUScale = 0, this._cachedReflectionVScale = 0, this._cachedReflectionCoordinatesMode = -1, this._buffer = null, this._deleteBuffer = !1, this._format = null, this._delayedOnLoad = null, this._delayedOnError = null, this.onLoadObservable = new Fe(), this._isBlocking = !0, this.name = e || "", this.url = e; let V, k = !1, L = null, B = !0; typeof i == "object" && i !== null ? (V = (m = i.noMipmap) !== null && m !== void 0 ? m : !1, r = (_ = i.invertY) !== null && _ !== void 0 ? _ : !hn.UseOpenGLOrientationForUV, s = (v = i.samplingMode) !== null && v !== void 0 ? v : De.TRILINEAR_SAMPLINGMODE, n = (C = i.onLoad) !== null && C !== void 0 ? C : null, a = (x = i.onError) !== null && x !== void 0 ? x : null, l = (b = i.buffer) !== null && b !== void 0 ? b : null, o = (S = i.deleteBuffer) !== null && S !== void 0 ? S : !1, u = i.format, h = i.mimeType, d = i.loaderOptions, f = i.creationFlags, k = (M = i.useSRGBBuffer) !== null && M !== void 0 ? M : !1, L = (R = i.internalTexture) !== null && R !== void 0 ? R : null, B = (w = i.gammaSpace) !== null && w !== void 0 ? w : B) : V = !!i, this._gammaSpace = B, this._noMipmap = V, this._invertY = r === void 0 ? !hn.UseOpenGLOrientationForUV : r, this._initialSamplingMode = s, this._buffer = l, this._deleteBuffer = o, this._mimeType = h, this._loaderOptions = d, this._creationFlags = f, this._useSRGBBuffer = k, this._forcedExtension = p, u && (this._format = u); const U = this.getScene(), K = this._getEngine(); if (!K) return; K.onBeforeTextureInitObservable.notifyObservers(this); const ee = () => { this._texture && (this._texture._invertVScale && (this.vScale *= -1, this.vOffset += 1), this._texture._cachedWrapU !== null && (this.wrapU = this._texture._cachedWrapU, this._texture._cachedWrapU = null), this._texture._cachedWrapV !== null && (this.wrapV = this._texture._cachedWrapV, this._texture._cachedWrapV = null), this._texture._cachedWrapR !== null && (this.wrapR = this._texture._cachedWrapR, this._texture._cachedWrapR = null)), this.onLoadObservable.hasObservers() && this.onLoadObservable.notifyObservers(this), n && n(), !this.isBlocking && U && U.resetCachedMaterial(); }, Z = (q, le) => { this._loadingError = !0, this._errorObject = { message: q, exception: le }, a && a(q, le), De.OnTextureLoadErrorObservable.notifyObservers(this); }; if (!this.url && !L) { this._delayedOnLoad = ee, this._delayedOnError = Z; return; } if (this._texture = L ?? this._getFromCache(this.url, V, s, this._invertY, k, this.isCube), this._texture) if (this._texture.isReady) BO.SetImmediate(() => ee()); else { const q = this._texture.onLoadedObservable.add(ee); this._texture.onErrorObservable.add((le) => { var ie; Z(le.message, le.exception), (ie = this._texture) === null || ie === void 0 || ie.onLoadedObservable.remove(q); }); } else if (!U || !U.useDelayedTextureLoading) { try { this._texture = K.createTexture(this.url, V, this._invertY, U, s, ee, Z, this._buffer, void 0, this._format, this._forcedExtension, h, d, f, k); } catch (q) { throw Z("error loading", q), q; } o && (this._buffer = null); } else this.delayLoadState = 4, this._delayedOnLoad = ee, this._delayedOnError = Z; } /** * Update the url (and optional buffer) of this texture if url was null during construction. * @param url the url of the texture * @param buffer the buffer of the texture (defaults to null) * @param onLoad callback called when the texture is loaded (defaults to null) * @param forcedExtension defines the extension to use to pick the right loader */ updateURL(e, t = null, i, r) { this.url && (this.releaseInternalTexture(), this.getScene().markAllMaterialsAsDirty(1, (s) => s.hasTexture(this))), (!this.name || this.name.startsWith("data:")) && (this.name = e), this.url = e, this._buffer = t, this._forcedExtension = r, this.delayLoadState = 4, i && (this._delayedOnLoad = i), this.delayLoad(); } /** * Finish the loading sequence of a texture flagged as delayed load. * @internal */ delayLoad() { if (this.delayLoadState !== 4) return; const e = this.getScene(); e && (this.delayLoadState = 1, this._texture = this._getFromCache(this.url, this._noMipmap, this.samplingMode, this._invertY, this._useSRGBBuffer, this.isCube), this._texture ? this._delayedOnLoad && (this._texture.isReady ? BO.SetImmediate(this._delayedOnLoad) : this._texture.onLoadedObservable.add(this._delayedOnLoad)) : (this._texture = e.getEngine().createTexture(this.url, this._noMipmap, this._invertY, e, this.samplingMode, this._delayedOnLoad, this._delayedOnError, this._buffer, null, this._format, this._forcedExtension, this._mimeType, this._loaderOptions, this._creationFlags, this._useSRGBBuffer), this._deleteBuffer && (this._buffer = null)), this._delayedOnLoad = null, this._delayedOnError = null); } _prepareRowForTextureGeneration(e, t, i, r) { e *= this._cachedUScale, t *= this._cachedVScale, e -= this.uRotationCenter * this._cachedUScale, t -= this.vRotationCenter * this._cachedVScale, i -= this.wRotationCenter, D.TransformCoordinatesFromFloatsToRef(e, t, i, this._rowGenerationMatrix, r), r.x += this.uRotationCenter * this._cachedUScale + this._cachedUOffset, r.y += this.vRotationCenter * this._cachedVScale + this._cachedVOffset, r.z += this.wRotationCenter; } /** * Checks if the texture has the same transform matrix than another texture * @param texture texture to check against * @returns true if the transforms are the same, else false */ checkTransformsAreIdentical(e) { return e !== null && this.uOffset === e.uOffset && this.vOffset === e.vOffset && this.uScale === e.uScale && this.vScale === e.vScale && this.uAng === e.uAng && this.vAng === e.vAng && this.wAng === e.wAng; } /** * Get the current texture matrix which includes the requested offsetting, tiling and rotation components. * @param uBase * @returns the transform matrix of the texture. */ getTextureMatrix(e = 1) { if (this.uOffset === this._cachedUOffset && this.vOffset === this._cachedVOffset && this.uScale * e === this._cachedUScale && this.vScale === this._cachedVScale && this.uAng === this._cachedUAng && this.vAng === this._cachedVAng && this.wAng === this._cachedWAng && this.uRotationCenter === this._cachedURotationCenter && this.vRotationCenter === this._cachedVRotationCenter && this.wRotationCenter === this._cachedWRotationCenter && this.homogeneousRotationInUVTransform === this._cachedHomogeneousRotationInUVTransform) return this._cachedTextureMatrix; this._cachedUOffset = this.uOffset, this._cachedVOffset = this.vOffset, this._cachedUScale = this.uScale * e, this._cachedVScale = this.vScale, this._cachedUAng = this.uAng, this._cachedVAng = this.vAng, this._cachedWAng = this.wAng, this._cachedURotationCenter = this.uRotationCenter, this._cachedVRotationCenter = this.vRotationCenter, this._cachedWRotationCenter = this.wRotationCenter, this._cachedHomogeneousRotationInUVTransform = this.homogeneousRotationInUVTransform, (!this._cachedTextureMatrix || !this._rowGenerationMatrix) && (this._cachedTextureMatrix = Ae.Zero(), this._rowGenerationMatrix = new Ae(), this._t0 = D.Zero(), this._t1 = D.Zero(), this._t2 = D.Zero()), Ae.RotationYawPitchRollToRef(this.vAng, this.uAng, this.wAng, this._rowGenerationMatrix), this.homogeneousRotationInUVTransform ? (Ae.TranslationToRef(-this._cachedURotationCenter, -this._cachedVRotationCenter, -this._cachedWRotationCenter, de.Matrix[0]), Ae.TranslationToRef(this._cachedURotationCenter, this._cachedVRotationCenter, this._cachedWRotationCenter, de.Matrix[1]), Ae.ScalingToRef(this._cachedUScale, this._cachedVScale, 0, de.Matrix[2]), Ae.TranslationToRef(this._cachedUOffset, this._cachedVOffset, 0, de.Matrix[3]), de.Matrix[0].multiplyToRef(this._rowGenerationMatrix, this._cachedTextureMatrix), this._cachedTextureMatrix.multiplyToRef(de.Matrix[1], this._cachedTextureMatrix), this._cachedTextureMatrix.multiplyToRef(de.Matrix[2], this._cachedTextureMatrix), this._cachedTextureMatrix.multiplyToRef(de.Matrix[3], this._cachedTextureMatrix), this._cachedTextureMatrix.setRowFromFloats(2, this._cachedTextureMatrix.m[12], this._cachedTextureMatrix.m[13], this._cachedTextureMatrix.m[14], 1)) : (this._prepareRowForTextureGeneration(0, 0, 0, this._t0), this._prepareRowForTextureGeneration(1, 0, 0, this._t1), this._prepareRowForTextureGeneration(0, 1, 0, this._t2), this._t1.subtractInPlace(this._t0), this._t2.subtractInPlace(this._t0), Ae.FromValuesToRef(this._t1.x, this._t1.y, this._t1.z, 0, this._t2.x, this._t2.y, this._t2.z, 0, this._t0.x, this._t0.y, this._t0.z, 0, 0, 0, 0, 1, this._cachedTextureMatrix)); const t = this.getScene(); return t ? (this.optimizeUVAllocation && t.markAllMaterialsAsDirty(1, (i) => i.hasTexture(this)), this._cachedTextureMatrix) : this._cachedTextureMatrix; } /** * Get the current matrix used to apply reflection. This is useful to rotate an environment texture for instance. * @returns The reflection texture transform */ getReflectionTextureMatrix() { const e = this.getScene(); if (!e) return this._cachedReflectionTextureMatrix; if (this.uOffset === this._cachedReflectionUOffset && this.vOffset === this._cachedReflectionVOffset && this.uScale === this._cachedReflectionUScale && this.vScale === this._cachedReflectionVScale && this.coordinatesMode === this._cachedReflectionCoordinatesMode) if (this.coordinatesMode === De.PROJECTION_MODE) { if (this._cachedReflectionProjectionMatrixId === e.getProjectionMatrix().updateFlag) return this._cachedReflectionTextureMatrix; } else return this._cachedReflectionTextureMatrix; this._cachedReflectionTextureMatrix || (this._cachedReflectionTextureMatrix = Ae.Zero()), this._projectionModeMatrix || (this._projectionModeMatrix = Ae.Zero()); const t = this._cachedReflectionCoordinatesMode !== this.coordinatesMode; switch (this._cachedReflectionUOffset = this.uOffset, this._cachedReflectionVOffset = this.vOffset, this._cachedReflectionUScale = this.uScale, this._cachedReflectionVScale = this.vScale, this._cachedReflectionCoordinatesMode = this.coordinatesMode, this.coordinatesMode) { case De.PLANAR_MODE: { Ae.IdentityToRef(this._cachedReflectionTextureMatrix), this._cachedReflectionTextureMatrix[0] = this.uScale, this._cachedReflectionTextureMatrix[5] = this.vScale, this._cachedReflectionTextureMatrix[12] = this.uOffset, this._cachedReflectionTextureMatrix[13] = this.vOffset; break; } case De.PROJECTION_MODE: { Ae.FromValuesToRef(0.5, 0, 0, 0, 0, -0.5, 0, 0, 0, 0, 0, 0, 0.5, 0.5, 1, 1, this._projectionModeMatrix); const i = e.getProjectionMatrix(); this._cachedReflectionProjectionMatrixId = i.updateFlag, i.multiplyToRef(this._projectionModeMatrix, this._cachedReflectionTextureMatrix); break; } default: Ae.IdentityToRef(this._cachedReflectionTextureMatrix); break; } return t && e.markAllMaterialsAsDirty(1, (i) => i.hasTexture(this)), this._cachedReflectionTextureMatrix; } /** * Clones the texture. * @returns the cloned texture */ clone() { const e = { noMipmap: this._noMipmap, invertY: this._invertY, samplingMode: this.samplingMode, onLoad: void 0, onError: void 0, buffer: this._texture ? this._texture._buffer : void 0, deleteBuffer: this._deleteBuffer, format: this.textureFormat, mimeType: this.mimeType, loaderOptions: this._loaderOptions, creationFlags: this._creationFlags, useSRGBBuffer: this._useSRGBBuffer }; return St.Clone(() => new De(this._texture ? this._texture.url : null, this.getScene(), e), this); } /** * Serialize the texture to a JSON representation we can easily use in the respective Parse function. * @returns The JSON representation of the texture */ serialize() { var e, t; const i = this.name; De.SerializeBuffers || this.name.startsWith("data:") && (this.name = ""), this.name.startsWith("data:") && this.url === this.name && (this.url = ""); const r = super.serialize(De._SerializeInternalTextureUniqueId); return r ? ((De.SerializeBuffers || De.ForceSerializeBuffers) && (typeof this._buffer == "string" && this._buffer.substr(0, 5) === "data:" ? (r.base64String = this._buffer, r.name = r.name.replace("data:", "")) : this.url && this.url.startsWith("data:") && this._buffer instanceof Uint8Array ? r.base64String = "data:image/png;base64," + DL(this._buffer) : (De.ForceSerializeBuffers || this.url && this.url.startsWith("blob:") || this._forceSerialize) && (r.base64String = !this._engine || this._engine._features.supportSyncTextureRead ? mK(this) : gK(this))), r.invertY = this._invertY, r.samplingMode = this.samplingMode, r._creationFlags = this._creationFlags, r._useSRGBBuffer = this._useSRGBBuffer, De._SerializeInternalTextureUniqueId && (r.internalTextureUniqueId = (t = (e = this._texture) === null || e === void 0 ? void 0 : e.uniqueId) !== null && t !== void 0 ? t : void 0), r.noMipmap = this._noMipmap, this.name = i, r) : null; } /** * Get the current class name of the texture useful for serialization or dynamic coding. * @returns "Texture" */ getClassName() { return "Texture"; } /** * Dispose the texture and release its associated resources. */ dispose() { super.dispose(), this.onLoadObservable.clear(), this._delayedOnLoad = null, this._delayedOnError = null, this._buffer = null; } /** * Parse the JSON representation of a texture in order to recreate the texture in the given scene. * @param parsedTexture Define the JSON representation of the texture * @param scene Define the scene the parsed texture should be instantiated in * @param rootUrl Define the root url of the parsing sequence in the case of relative dependencies * @returns The parsed texture if successful */ static Parse(e, t, i) { if (e.customType) { const o = z9.Instantiate(e.customType).Parse(e, t, i); return e.samplingMode && o.updateSamplingMode && o._samplingMode && o._samplingMode !== e.samplingMode && o.updateSamplingMode(e.samplingMode), o; } if (e.isCube && !e.isRenderTarget) return De._CubeTextureParser(e, t, i); const r = e.internalTextureUniqueId !== void 0; if (!e.name && !e.isRenderTarget && !r) return null; let s; if (r) { const l = t.getEngine().getLoadedTexturesCache(); for (const o of l) if (o.uniqueId === e.internalTextureUniqueId) { s = o; break; } } const n = (l) => { var o; if (l && l._texture && (l._texture._cachedWrapU = null, l._texture._cachedWrapV = null, l._texture._cachedWrapR = null), e.samplingMode) { const u = e.samplingMode; l && l.samplingMode !== u && l.updateSamplingMode(u); } if (l && e.animations) for (let u = 0; u < e.animations.length; u++) { const h = e.animations[u], d = Qo("BABYLON.Animation"); d && l.animations.push(d.Parse(h)); } r && !s && ((o = l == null ? void 0 : l._texture) === null || o === void 0 || o._setUniqueId(e.internalTextureUniqueId)); }; return St.Parse(() => { var l, o, u; let h = !0; if (e.noMipmap && (h = !1), e.mirrorPlane) { const d = De._CreateMirror(e.name, e.renderTargetSize, t, h); return d._waitingRenderList = e.renderList, d.mirrorPlane = Sd.FromArray(e.mirrorPlane), n(d), d; } else if (e.isRenderTarget) { let d = null; if (e.isCube) { if (t.reflectionProbes) for (let f = 0; f < t.reflectionProbes.length; f++) { const p = t.reflectionProbes[f]; if (p.name === e.name) return p.cubeTexture; } } else d = De._CreateRenderTargetTexture(e.name, e.renderTargetSize, t, h, (l = e._creationFlags) !== null && l !== void 0 ? l : 0), d._waitingRenderList = e.renderList; return n(d), d; } else if (e.isVideo) { const d = De._CreateVideoTexture(i + (e.url || e.name), i + (e.src || e.url), t, h, e.invertY, e.samplingMode, e.settings || {}); return n(d), d; } else { let d; if (e.base64String && !s) d = De.CreateFromBase64String(e.base64String, e.base64String, t, !h, e.invertY, e.samplingMode, () => { n(d); }, (o = e._creationFlags) !== null && o !== void 0 ? o : 0, (u = e._useSRGBBuffer) !== null && u !== void 0 ? u : !1), d.name = e.name; else { let f; e.name && (e.name.indexOf("://") > 0 || e.name.startsWith("data:")) ? f = e.name : f = i + e.name, e.url && (e.url.startsWith("data:") || De.UseSerializedUrlIfAny) && (f = e.url); const p = { noMipmap: !h, invertY: e.invertY, samplingMode: e.samplingMode, onLoad: () => { n(d); }, internalTexture: s }; d = new De(f, t, p); } return d; } }, e, t); } /** * Creates a texture from its base 64 representation. * @param data Define the base64 payload without the data: prefix * @param name Define the name of the texture in the scene useful fo caching purpose for instance * @param scene Define the scene the texture should belong to * @param noMipmapOrOptions defines if the texture will require mip maps or not or set of all options to create the texture * @param invertY define if the texture needs to be inverted on the y axis during loading * @param samplingMode define the sampling mode we want for the texture while fetching from it (Texture.NEAREST_SAMPLINGMODE...) * @param onLoad define a callback triggered when the texture has been loaded * @param onError define a callback triggered when an error occurred during the loading session * @param format define the format of the texture we are trying to load (Engine.TEXTUREFORMAT_RGBA...) * @param creationFlags specific flags to use when creating the texture (1 for storage textures, for eg) * @returns the created texture */ static CreateFromBase64String(e, t, i, r, s, n = De.TRILINEAR_SAMPLINGMODE, a = null, l = null, o = 5, u) { return new De("data:" + t, i, r, s, n, a, l, e, !1, o, void 0, void 0, u); } /** * Creates a texture from its data: representation. (data: will be added in case only the payload has been passed in) * @param name Define the name of the texture in the scene useful fo caching purpose for instance * @param buffer define the buffer to load the texture from in case the texture is loaded from a buffer representation * @param scene Define the scene the texture should belong to * @param deleteBuffer define if the buffer we are loading the texture from should be deleted after load * @param noMipmapOrOptions defines if the texture will require mip maps or not or set of all options to create the texture * @param invertY define if the texture needs to be inverted on the y axis during loading * @param samplingMode define the sampling mode we want for the texture while fetching from it (Texture.NEAREST_SAMPLINGMODE...) * @param onLoad define a callback triggered when the texture has been loaded * @param onError define a callback triggered when an error occurred during the loading session * @param format define the format of the texture we are trying to load (Engine.TEXTUREFORMAT_RGBA...) * @param creationFlags specific flags to use when creating the texture (1 for storage textures, for eg) * @returns the created texture */ static LoadFromDataString(e, t, i, r = !1, s, n = !0, a = De.TRILINEAR_SAMPLINGMODE, l = null, o = null, u = 5, h) { return e.substr(0, 5) !== "data:" && (e = "data:" + e), new De(e, i, s, n, a, l, o, t, r, u, void 0, void 0, h); } } De.SerializeBuffers = !0; De.ForceSerializeBuffers = !1; De.OnTextureLoadErrorObservable = new Fe(); De._SerializeInternalTextureUniqueId = !1; De._CubeTextureParser = (c, e, t) => { throw yr("CubeTexture"); }; De._CreateMirror = (c, e, t, i) => { throw yr("MirrorTexture"); }; De._CreateRenderTargetTexture = (c, e, t, i, r) => { throw yr("RenderTargetTexture"); }; De.NEAREST_SAMPLINGMODE = 1; De.NEAREST_NEAREST_MIPLINEAR = 8; De.BILINEAR_SAMPLINGMODE = 2; De.LINEAR_LINEAR_MIPNEAREST = 11; De.TRILINEAR_SAMPLINGMODE = 3; De.LINEAR_LINEAR_MIPLINEAR = 3; De.NEAREST_NEAREST_MIPNEAREST = 4; De.NEAREST_LINEAR_MIPNEAREST = 5; De.NEAREST_LINEAR_MIPLINEAR = 6; De.NEAREST_LINEAR = 7; De.NEAREST_NEAREST = 1; De.LINEAR_NEAREST_MIPNEAREST = 9; De.LINEAR_NEAREST_MIPLINEAR = 10; De.LINEAR_LINEAR = 2; De.LINEAR_NEAREST = 12; De.EXPLICIT_MODE = 0; De.SPHERICAL_MODE = 1; De.PLANAR_MODE = 2; De.CUBIC_MODE = 3; De.PROJECTION_MODE = 4; De.SKYBOX_MODE = 5; De.INVCUBIC_MODE = 6; De.EQUIRECTANGULAR_MODE = 7; De.FIXED_EQUIRECTANGULAR_MODE = 8; De.FIXED_EQUIRECTANGULAR_MIRRORED_MODE = 9; De.CLAMP_ADDRESSMODE = 0; De.WRAP_ADDRESSMODE = 1; De.MIRROR_ADDRESSMODE = 2; De.UseSerializedUrlIfAny = !1; F([ W() ], De.prototype, "url", void 0); F([ W() ], De.prototype, "uOffset", void 0); F([ W() ], De.prototype, "vOffset", void 0); F([ W() ], De.prototype, "uScale", void 0); F([ W() ], De.prototype, "vScale", void 0); F([ W() ], De.prototype, "uAng", void 0); F([ W() ], De.prototype, "vAng", void 0); F([ W() ], De.prototype, "wAng", void 0); F([ W() ], De.prototype, "uRotationCenter", void 0); F([ W() ], De.prototype, "vRotationCenter", void 0); F([ W() ], De.prototype, "wRotationCenter", void 0); F([ W() ], De.prototype, "homogeneousRotationInUVTransform", void 0); F([ W() ], De.prototype, "isBlocking", null); Be("BABYLON.Texture", De); St._TextureParser = De.Parse; mi.prototype.updateRawTexture = function(c, e, t, i, r = null, s = 0, n = !1) { if (!c) return; const a = this._getRGBABufferInternalSizedFormat(s, t, n), l = this._getInternalFormat(t), o = this._getWebGLTextureType(s); this._bindTextureDirectly(this._gl.TEXTURE_2D, c, !0), this._unpackFlipY(i === void 0 ? !0 : !!i), this._doNotHandleContextLost || (c._bufferView = e, c.format = t, c.type = s, c.invertY = i, c._compression = r), c.width % 4 !== 0 && this._gl.pixelStorei(this._gl.UNPACK_ALIGNMENT, 1), r && e ? this._gl.compressedTexImage2D(this._gl.TEXTURE_2D, 0, this.getCaps().s3tc[r], c.width, c.height, 0, e) : this._gl.texImage2D(this._gl.TEXTURE_2D, 0, a, c.width, c.height, 0, l, o, e), c.generateMipMaps && this._gl.generateMipmap(this._gl.TEXTURE_2D), this._bindTextureDirectly(this._gl.TEXTURE_2D, null), c.isReady = !0; }; mi.prototype.createRawTexture = function(c, e, t, i, r, s, n, a = null, l = 0, o = 0, u = !1) { const h = new ln(this, ts.Raw); h.baseWidth = e, h.baseHeight = t, h.width = e, h.height = t, h.format = i, h.generateMipMaps = r, h.samplingMode = n, h.invertY = s, h._compression = a, h.type = l, h._useSRGBBuffer = this._getUseSRGBBuffer(u, !r), this._doNotHandleContextLost || (h._bufferView = c), this.updateRawTexture(h, c, i, s, a, l, h._useSRGBBuffer), this._bindTextureDirectly(this._gl.TEXTURE_2D, h, !0); const d = this._getSamplingParameters(n, r); return this._gl.texParameteri(this._gl.TEXTURE_2D, this._gl.TEXTURE_MAG_FILTER, d.mag), this._gl.texParameteri(this._gl.TEXTURE_2D, this._gl.TEXTURE_MIN_FILTER, d.min), r && this._gl.generateMipmap(this._gl.TEXTURE_2D), this._bindTextureDirectly(this._gl.TEXTURE_2D, null), this._internalTexturesCache.push(h), h; }; mi.prototype.createRawCubeTexture = function(c, e, t, i, r, s, n, a = null) { const l = this._gl, o = new ln(this, ts.CubeRaw); o.isCube = !0, o.format = t, o.type = i, this._doNotHandleContextLost || (o._bufferViewArray = c); const u = this._getWebGLTextureType(i); let h = this._getInternalFormat(t); h === l.RGB && (h = l.RGBA), u === l.FLOAT && !this._caps.textureFloatLinearFiltering ? (r = !1, n = 1, Ce.Warn("Float texture filtering is not supported. Mipmap generation and sampling mode are forced to false and TEXTURE_NEAREST_SAMPLINGMODE, respectively.")) : u === this._gl.HALF_FLOAT_OES && !this._caps.textureHalfFloatLinearFiltering ? (r = !1, n = 1, Ce.Warn("Half float texture filtering is not supported. Mipmap generation and sampling mode are forced to false and TEXTURE_NEAREST_SAMPLINGMODE, respectively.")) : u === l.FLOAT && !this._caps.textureFloatRender ? (r = !1, Ce.Warn("Render to float textures is not supported. Mipmap generation forced to false.")) : u === l.HALF_FLOAT && !this._caps.colorBufferFloat && (r = !1, Ce.Warn("Render to half float textures is not supported. Mipmap generation forced to false.")); const d = e, f = d; if (o.width = d, o.height = f, o.invertY = s, o._compression = a, !this.needPOTTextures || Ve.IsExponentOfTwo(o.width) && Ve.IsExponentOfTwo(o.height) || (r = !1), c) this.updateRawCubeTexture(o, c, t, i, s, a); else { const _ = this._getRGBABufferInternalSizedFormat(i), v = 0; this._bindTextureDirectly(l.TEXTURE_CUBE_MAP, o, !0); for (let C = 0; C < 6; C++) a ? l.compressedTexImage2D(l.TEXTURE_CUBE_MAP_POSITIVE_X + C, v, this.getCaps().s3tc[a], o.width, o.height, 0, void 0) : l.texImage2D(l.TEXTURE_CUBE_MAP_POSITIVE_X + C, v, _, o.width, o.height, 0, h, u, null); this._bindTextureDirectly(this._gl.TEXTURE_CUBE_MAP, null); } this._bindTextureDirectly(this._gl.TEXTURE_CUBE_MAP, o, !0), c && r && this._gl.generateMipmap(this._gl.TEXTURE_CUBE_MAP); const m = this._getSamplingParameters(n, r); return l.texParameteri(l.TEXTURE_CUBE_MAP, l.TEXTURE_MAG_FILTER, m.mag), l.texParameteri(l.TEXTURE_CUBE_MAP, l.TEXTURE_MIN_FILTER, m.min), l.texParameteri(l.TEXTURE_CUBE_MAP, l.TEXTURE_WRAP_S, l.CLAMP_TO_EDGE), l.texParameteri(l.TEXTURE_CUBE_MAP, l.TEXTURE_WRAP_T, l.CLAMP_TO_EDGE), this._bindTextureDirectly(l.TEXTURE_CUBE_MAP, null), o.generateMipMaps = r, o.samplingMode = n, o.isReady = !0, o; }; mi.prototype.updateRawCubeTexture = function(c, e, t, i, r, s = null, n = 0) { c._bufferViewArray = e, c.format = t, c.type = i, c.invertY = r, c._compression = s; const a = this._gl, l = this._getWebGLTextureType(i); let o = this._getInternalFormat(t); const u = this._getRGBABufferInternalSizedFormat(i); let h = !1; o === a.RGB && (o = a.RGBA, h = !0), this._bindTextureDirectly(a.TEXTURE_CUBE_MAP, c, !0), this._unpackFlipY(r === void 0 ? !0 : !!r), c.width % 4 !== 0 && a.pixelStorei(a.UNPACK_ALIGNMENT, 1); for (let f = 0; f < 6; f++) { let p = e[f]; s ? a.compressedTexImage2D(a.TEXTURE_CUBE_MAP_POSITIVE_X + f, n, this.getCaps().s3tc[s], c.width, c.height, 0, p) : (h && (p = Fte(p, c.width, c.height, i)), a.texImage2D(a.TEXTURE_CUBE_MAP_POSITIVE_X + f, n, u, c.width, c.height, 0, o, l, p)); } (!this.needPOTTextures || Ve.IsExponentOfTwo(c.width) && Ve.IsExponentOfTwo(c.height)) && c.generateMipMaps && n === 0 && this._gl.generateMipmap(this._gl.TEXTURE_CUBE_MAP), this._bindTextureDirectly(this._gl.TEXTURE_CUBE_MAP, null), c.isReady = !0; }; mi.prototype.createRawCubeTextureFromUrl = function(c, e, t, i, r, s, n, a, l = null, o = null, u = 3, h = !1) { const d = this._gl, f = this.createRawCubeTexture(null, t, i, r, !s, h, u, null); e == null || e.addPendingData(f), f.url = c, f.isReady = !1, this._internalTexturesCache.push(f); const p = (_, v) => { e == null || e.removePendingData(f), o && _ && o(_.status + " " + _.statusText, v); }, m = (_) => { const v = f.width, C = n(_); if (C) { if (a) { const x = this._getWebGLTextureType(r); let b = this._getInternalFormat(i); const S = this._getRGBABufferInternalSizedFormat(r); let M = !1; b === d.RGB && (b = d.RGBA, M = !0), this._bindTextureDirectly(d.TEXTURE_CUBE_MAP, f, !0), this._unpackFlipY(!1); const R = a(C); for (let w = 0; w < R.length; w++) { const V = v >> w; for (let k = 0; k < 6; k++) { let L = R[w][k]; M && (L = Fte(L, V, V, r)), d.texImage2D(k, w, S, V, V, 0, b, x, L); } } this._bindTextureDirectly(d.TEXTURE_CUBE_MAP, null); } else this.updateRawCubeTexture(f, C, i, r, h); f.isReady = !0, e == null || e.removePendingData(f), f.onLoadedObservable.notifyObservers(f), f.onLoadedObservable.clear(), l && l(); } }; return this._loadFile(c, (_) => { m(_); }, void 0, e == null ? void 0 : e.offlineProvider, !0, p), f; }; function Fte(c, e, t, i) { let r, s = 1; i === 1 ? r = new Float32Array(e * t * 4) : i === 2 ? (r = new Uint16Array(e * t * 4), s = 15360) : i === 7 ? r = new Uint32Array(e * t * 4) : r = new Uint8Array(e * t * 4); for (let n = 0; n < e; n++) for (let a = 0; a < t; a++) { const l = (a * e + n) * 3, o = (a * e + n) * 4; r[o + 0] = c[l + 0], r[o + 1] = c[l + 1], r[o + 2] = c[l + 2], r[o + 3] = s; } return r; } function Bte(c) { return function(e, t, i, r, s, n, a, l, o = null, u = 0) { const h = c ? this._gl.TEXTURE_3D : this._gl.TEXTURE_2D_ARRAY, d = c ? ts.Raw3D : ts.Raw2DArray, f = new ln(this, d); f.baseWidth = t, f.baseHeight = i, f.baseDepth = r, f.width = t, f.height = i, f.depth = r, f.format = s, f.type = u, f.generateMipMaps = n, f.samplingMode = l, c ? f.is3D = !0 : f.is2DArray = !0, this._doNotHandleContextLost || (f._bufferView = e), c ? this.updateRawTexture3D(f, e, s, a, o, u) : this.updateRawTexture2DArray(f, e, s, a, o, u), this._bindTextureDirectly(h, f, !0); const p = this._getSamplingParameters(l, n); return this._gl.texParameteri(h, this._gl.TEXTURE_MAG_FILTER, p.mag), this._gl.texParameteri(h, this._gl.TEXTURE_MIN_FILTER, p.min), n && this._gl.generateMipmap(h), this._bindTextureDirectly(h, null), this._internalTexturesCache.push(f), f; }; } mi.prototype.createRawTexture2DArray = Bte(!1); mi.prototype.createRawTexture3D = Bte(!0); function Ute(c) { return function(e, t, i, r, s = null, n = 0) { const a = c ? this._gl.TEXTURE_3D : this._gl.TEXTURE_2D_ARRAY, l = this._getWebGLTextureType(n), o = this._getInternalFormat(i), u = this._getRGBABufferInternalSizedFormat(n, i); this._bindTextureDirectly(a, e, !0), this._unpackFlipY(r === void 0 ? !0 : !!r), this._doNotHandleContextLost || (e._bufferView = t, e.format = i, e.invertY = r, e._compression = s), e.width % 4 !== 0 && this._gl.pixelStorei(this._gl.UNPACK_ALIGNMENT, 1), s && t ? this._gl.compressedTexImage3D(a, 0, this.getCaps().s3tc[s], e.width, e.height, e.depth, 0, t) : this._gl.texImage3D(a, 0, u, e.width, e.height, e.depth, 0, o, l, t), e.generateMipMaps && this._gl.generateMipmap(a), this._bindTextureDirectly(a, null), e.isReady = !0; }; } mi.prototype.updateRawTexture2DArray = Ute(!1); mi.prototype.updateRawTexture3D = Ute(!0); class Po extends De { /** * Instantiates a new RawTexture. * Raw texture can help creating a texture directly from an array of data. * This can be super useful if you either get the data from an uncompressed source or * if you wish to create your texture pixel by pixel. * @param data define the array of data to use to create the texture (null to create an empty texture) * @param width define the width of the texture * @param height define the height of the texture * @param format define the format of the data (RGB, RGBA... Engine.TEXTUREFORMAT_xxx) * @param sceneOrEngine defines the scene or engine the texture will belong to * @param generateMipMaps define whether mip maps should be generated or not * @param invertY define if the data should be flipped on Y when uploaded to the GPU * @param samplingMode define the texture sampling mode (Texture.xxx_SAMPLINGMODE) * @param type define the format of the data (int, float... Engine.TEXTURETYPE_xxx) * @param creationFlags specific flags to use when creating the texture (1 for storage textures, for eg) * @param useSRGBBuffer defines if the texture must be loaded in a sRGB GPU buffer (if supported by the GPU). */ constructor(e, t, i, r, s, n = !0, a = !1, l = 3, o = 0, u, h) { super(null, s, !n, a, void 0, void 0, void 0, void 0, void 0, void 0, void 0, void 0, u), this.format = r, this._engine && (!this._engine._caps.textureFloatLinearFiltering && o === 1 && (l = 1), !this._engine._caps.textureHalfFloatLinearFiltering && o === 2 && (l = 1), this._texture = this._engine.createRawTexture(e, t, i, r, n, a, l, null, o, u ?? 0, h ?? !1), this.wrapU = De.CLAMP_ADDRESSMODE, this.wrapV = De.CLAMP_ADDRESSMODE); } /** * Updates the texture underlying data. * @param data Define the new data of the texture */ update(e) { this._getEngine().updateRawTexture(this._texture, e, this._texture.format, this._texture.invertY, null, this._texture.type, this._texture._useSRGBBuffer); } /** * Creates a luminance texture from some data. * @param data Define the texture data * @param width Define the width of the texture * @param height Define the height of the texture * @param sceneOrEngine defines the scene or engine the texture will belong to * @param generateMipMaps Define whether or not to create mip maps for the texture * @param invertY define if the data should be flipped on Y when uploaded to the GPU * @param samplingMode define the texture sampling mode (Texture.xxx_SAMPLINGMODE) * @returns the luminance texture */ static CreateLuminanceTexture(e, t, i, r, s = !0, n = !1, a = 3) { return new Po(e, t, i, 1, r, s, n, a); } /** * Creates a luminance alpha texture from some data. * @param data Define the texture data * @param width Define the width of the texture * @param height Define the height of the texture * @param sceneOrEngine defines the scene or engine the texture will belong to * @param generateMipMaps Define whether or not to create mip maps for the texture * @param invertY define if the data should be flipped on Y when uploaded to the GPU * @param samplingMode define the texture sampling mode (Texture.xxx_SAMPLINGMODE) * @returns the luminance alpha texture */ static CreateLuminanceAlphaTexture(e, t, i, r, s = !0, n = !1, a = 3) { return new Po(e, t, i, 2, r, s, n, a); } /** * Creates an alpha texture from some data. * @param data Define the texture data * @param width Define the width of the texture * @param height Define the height of the texture * @param sceneOrEngine defines the scene or engine the texture will belong to * @param generateMipMaps Define whether or not to create mip maps for the texture * @param invertY define if the data should be flipped on Y when uploaded to the GPU * @param samplingMode define the texture sampling mode (Texture.xxx_SAMPLINGMODE) * @returns the alpha texture */ static CreateAlphaTexture(e, t, i, r, s = !0, n = !1, a = 3) { return new Po(e, t, i, 0, r, s, n, a); } /** * Creates a RGB texture from some data. * @param data Define the texture data * @param width Define the width of the texture * @param height Define the height of the texture * @param sceneOrEngine defines the scene or engine the texture will belong to * @param generateMipMaps Define whether or not to create mip maps for the texture * @param invertY define if the data should be flipped on Y when uploaded to the GPU * @param samplingMode define the texture sampling mode (Texture.xxx_SAMPLINGMODE) * @param type define the format of the data (int, float... Engine.TEXTURETYPE_xxx) * @param creationFlags specific flags to use when creating the texture (1 for storage textures, for eg) * @param useSRGBBuffer defines if the texture must be loaded in a sRGB GPU buffer (if supported by the GPU). * @returns the RGB alpha texture */ static CreateRGBTexture(e, t, i, r, s = !0, n = !1, a = 3, l = 0, o = 0, u = !1) { return new Po(e, t, i, 4, r, s, n, a, l, o, u); } /** * Creates a RGBA texture from some data. * @param data Define the texture data * @param width Define the width of the texture * @param height Define the height of the texture * @param sceneOrEngine defines the scene or engine the texture will belong to * @param generateMipMaps Define whether or not to create mip maps for the texture * @param invertY define if the data should be flipped on Y when uploaded to the GPU * @param samplingMode define the texture sampling mode (Texture.xxx_SAMPLINGMODE) * @param type define the format of the data (int, float... Engine.TEXTURETYPE_xxx) * @param creationFlags specific flags to use when creating the texture (1 for storage textures, for eg) * @param useSRGBBuffer defines if the texture must be loaded in a sRGB GPU buffer (if supported by the GPU). * @returns the RGBA texture */ static CreateRGBATexture(e, t, i, r, s = !0, n = !1, a = 3, l = 0, o = 0, u = !1) { return new Po(e, t, i, 5, r, s, n, a, l, o, u); } /** * Creates a RGBA storage texture from some data. * @param data Define the texture data * @param width Define the width of the texture * @param height Define the height of the texture * @param sceneOrEngine defines the scene or engine the texture will belong to * @param generateMipMaps Define whether or not to create mip maps for the texture * @param invertY define if the data should be flipped on Y when uploaded to the GPU * @param samplingMode define the texture sampling mode (Texture.xxx_SAMPLINGMODE) * @param type define the format of the data (int, float... Engine.TEXTURETYPE_xxx) * @param useSRGBBuffer defines if the texture must be loaded in a sRGB GPU buffer (if supported by the GPU). * @returns the RGBA texture */ static CreateRGBAStorageTexture(e, t, i, r, s = !0, n = !1, a = 3, l = 0, o = !1) { return new Po(e, t, i, 5, r, s, n, a, l, 1, o); } /** * Creates a R texture from some data. * @param data Define the texture data * @param width Define the width of the texture * @param height Define the height of the texture * @param sceneOrEngine defines the scene or engine the texture will belong to * @param generateMipMaps Define whether or not to create mip maps for the texture * @param invertY define if the data should be flipped on Y when uploaded to the GPU * @param samplingMode define the texture sampling mode (Texture.xxx_SAMPLINGMODE) * @param type define the format of the data (int, float... Engine.TEXTURETYPE_xxx) * @returns the R texture */ static CreateRTexture(e, t, i, r, s = !0, n = !1, a = De.TRILINEAR_SAMPLINGMODE, l = 1) { return new Po(e, t, i, 6, r, s, n, a, l); } /** * Creates a R storage texture from some data. * @param data Define the texture data * @param width Define the width of the texture * @param height Define the height of the texture * @param sceneOrEngine defines the scene or engine the texture will belong to * @param generateMipMaps Define whether or not to create mip maps for the texture * @param invertY define if the data should be flipped on Y when uploaded to the GPU * @param samplingMode define the texture sampling mode (Texture.xxx_SAMPLINGMODE) * @param type define the format of the data (int, float... Engine.TEXTURETYPE_xxx) * @returns the R texture */ static CreateRStorageTexture(e, t, i, r, s = !0, n = !1, a = De.TRILINEAR_SAMPLINGMODE, l = 1) { return new Po(e, t, i, 6, r, s, n, a, l, 1); } } class Tce { /** * Create a new VertexAnimationBaker object which can help baking animations into a texture. * @param scene Defines the scene the VAT belongs to * @param mesh Defines the mesh the VAT belongs to */ constructor(e, t) { this._scene = e, this._mesh = t; } /** * Bakes the animation into the texture. This should be called once, when the * scene starts, so the VAT is generated and associated to the mesh. * @param ranges Defines the ranges in the animation that will be baked. * @returns The array of matrix transforms for each vertex (columns) and frame (rows), as a Float32Array. */ async bakeVertexData(e) { if (!this._mesh.skeleton) throw new Error("No skeleton in this mesh."); const t = this._mesh.skeleton.bones.length, i = e.reduce((a, l) => a + l.to - l.from + 1, 0); if (isNaN(i)) throw new Error("Invalid animation ranges."); let r = 0; const s = (t + 1) * 4 * 4 * i, n = new Float32Array(s); this._scene.stopAnimation(this._mesh), this._mesh.skeleton.returnToRest(); for (const a of e) for (let l = a.from; l <= a.to; l++) await this._executeAnimationFrame(n, l, r++); return n; } /** * Runs an animation frame and stores its vertex data * * @param vertexData The array to save data to. * @param frameIndex Current frame in the skeleton animation to render. * @param textureIndex Current index of the texture data. */ async _executeAnimationFrame(e, t, i) { return new Promise((r, s) => { this._scene.beginAnimation(this._mesh.skeleton, t, t, !1, 1, () => { const n = this._mesh.skeleton.getTransformMatrices(this._mesh); e.set(n, i * n.length), r(); }); }); } /** * Builds a vertex animation texture given the vertexData in an array. * @param vertexData The vertex animation data. You can generate it with bakeVertexData(). * @returns The vertex animation texture to be used with BakedVertexAnimationManager. */ textureFromBakedVertexData(e) { if (!this._mesh.skeleton) throw new Error("No skeleton in this mesh."); const t = this._mesh.skeleton.bones.length, i = Po.CreateRGBATexture(e, (t + 1) * 4, e.length / ((t + 1) * 4 * 4), this._scene, !1, !1, De.NEAREST_NEAREST, 1); return i.name = "VAT" + this._mesh.skeleton.name, i; } /** * Serializes our vertexData to an object, with a nice string for the vertexData. * @param vertexData The vertex array data. * @returns This object serialized to a JS dict. */ serializeBakedVertexDataToObject(e) { if (!this._mesh.skeleton) throw new Error("No skeleton in this mesh."); const t = this._mesh.skeleton.bones.length, i = (t + 1) * 4, r = e.length / ((t + 1) * 4 * 4); return { vertexData: DL(e), width: i, height: r }; } /** * Loads previously baked data. * @param data The object as serialized by serializeBakedVertexDataToObject() * @returns The array of matrix transforms for each vertex (columns) and frame (rows), as a Float32Array. */ loadBakedVertexDataFromObject(e) { return new Float32Array(OL(e.vertexData)); } /** * Serializes our vertexData to a JSON string, with a nice string for the vertexData. * Should be called right after bakeVertexData(). * @param vertexData The vertex array data. * @returns This object serialized to a safe string. */ serializeBakedVertexDataToJSON(e) { return JSON.stringify(this.serializeBakedVertexDataToObject(e)); } /** * Loads previously baked data in string format. * @param json The json string as serialized by serializeBakedVertexDataToJSON(). * @returns The array of matrix transforms for each vertex (columns) and frame (rows), as a Float32Array. */ loadBakedVertexDataFromJSON(e) { return this.loadBakedVertexDataFromObject(JSON.parse(e)); } } class Vte { constructor() { this._zoomStopsAnimation = !1, this._idleRotationSpeed = 0.05, this._idleRotationWaitTime = 2e3, this._idleRotationSpinupTime = 2e3, this.targetAlpha = null, this._isPointerDown = !1, this._lastFrameTime = null, this._lastInteractionTime = -1 / 0, this._cameraRotationSpeed = 0, this._lastFrameRadius = 0; } /** * Gets the name of the behavior. */ get name() { return "AutoRotation"; } /** * Sets the flag that indicates if user zooming should stop animation. */ set zoomStopsAnimation(e) { this._zoomStopsAnimation = e; } /** * Gets the flag that indicates if user zooming should stop animation. */ get zoomStopsAnimation() { return this._zoomStopsAnimation; } /** * Sets the default speed at which the camera rotates around the model. */ set idleRotationSpeed(e) { this._idleRotationSpeed = e; } /** * Gets the default speed at which the camera rotates around the model. */ get idleRotationSpeed() { return this._idleRotationSpeed; } /** * Sets the time (in milliseconds) to wait after user interaction before the camera starts rotating. */ set idleRotationWaitTime(e) { this._idleRotationWaitTime = e; } /** * Gets the time (milliseconds) to wait after user interaction before the camera starts rotating. */ get idleRotationWaitTime() { return this._idleRotationWaitTime; } /** * Sets the time (milliseconds) to take to spin up to the full idle rotation speed. */ set idleRotationSpinupTime(e) { this._idleRotationSpinupTime = e; } /** * Gets the time (milliseconds) to take to spin up to the full idle rotation speed. */ get idleRotationSpinupTime() { return this._idleRotationSpinupTime; } /** * Gets a value indicating if the camera is currently rotating because of this behavior */ get rotationInProgress() { return Math.abs(this._cameraRotationSpeed) > 0; } /** * Initializes the behavior. */ init() { } /** * Attaches the behavior to its arc rotate camera. * @param camera Defines the camera to attach the behavior to */ attach(e) { this._attachedCamera = e; const t = this._attachedCamera.getScene(); this._onPrePointerObservableObserver = t.onPrePointerObservable.add((i) => { if (i.type === si.POINTERDOWN) { this._isPointerDown = !0; return; } i.type === si.POINTERUP && (this._isPointerDown = !1); }), this._onAfterCheckInputsObserver = e.onAfterCheckInputsObservable.add(() => { if (this._reachTargetAlpha()) return; const i = Gs.Now; let r = 0; this._lastFrameTime != null && (r = i - this._lastFrameTime), this._lastFrameTime = i, this._applyUserInteraction(); const s = i - this._lastInteractionTime - this._idleRotationWaitTime, n = Math.max(Math.min(s / this._idleRotationSpinupTime, 1), 0); this._cameraRotationSpeed = this._idleRotationSpeed * n, this._attachedCamera && (this._attachedCamera.alpha -= this._cameraRotationSpeed * (r / 1e3)); }); } /** * Detaches the behavior from its current arc rotate camera. */ detach() { if (!this._attachedCamera) return; const e = this._attachedCamera.getScene(); this._onPrePointerObservableObserver && e.onPrePointerObservable.remove(this._onPrePointerObservableObserver), this._attachedCamera.onAfterCheckInputsObservable.remove(this._onAfterCheckInputsObserver), this._attachedCamera = null; } /** * Force-reset the last interaction time * @param customTime an optional time that will be used instead of the current last interaction time. For example `Date.now()` */ resetLastInteractionTime(e) { this._lastInteractionTime = e ?? Gs.Now; } /** * Returns true if camera alpha reaches the target alpha * @returns true if camera alpha reaches the target alpha */ _reachTargetAlpha() { return this._attachedCamera && this.targetAlpha ? Math.abs(this._attachedCamera.alpha - this.targetAlpha) < Sr : !1; } /** * Returns true if user is scrolling. * @returns true if user is scrolling. */ _userIsZooming() { return this._attachedCamera ? this._attachedCamera.inertialRadiusOffset !== 0 : !1; } _shouldAnimationStopForInteraction() { if (!this._attachedCamera) return !1; let e = !1; return this._lastFrameRadius === this._attachedCamera.radius && this._attachedCamera.inertialRadiusOffset !== 0 && (e = !0), this._lastFrameRadius = this._attachedCamera.radius, this._zoomStopsAnimation ? e : this._userIsZooming(); } /** * Applies any current user interaction to the camera. Takes into account maximum alpha rotation. */ _applyUserInteraction() { this._userIsMoving() && !this._shouldAnimationStopForInteraction() && (this._lastInteractionTime = Gs.Now); } // Tools _userIsMoving() { return this._attachedCamera ? this._attachedCamera.inertialAlphaOffset !== 0 || this._attachedCamera.inertialBetaOffset !== 0 || this._attachedCamera.inertialRadiusOffset !== 0 || this._attachedCamera.inertialPanningX !== 0 || this._attachedCamera.inertialPanningY !== 0 || this._isPointerDown : !1; } } class a5 { constructor() { this.transitionDuration = 450, this.lowerRadiusTransitionRange = 2, this.upperRadiusTransitionRange = -2, this._autoTransitionRange = !1, this._radiusIsAnimating = !1, this._radiusBounceTransition = null, this._animatables = new Array(); } /** * Gets the name of the behavior. */ get name() { return "Bouncing"; } /** * Gets a value indicating if the lowerRadiusTransitionRange and upperRadiusTransitionRange are defined automatically */ get autoTransitionRange() { return this._autoTransitionRange; } /** * Sets a value indicating if the lowerRadiusTransitionRange and upperRadiusTransitionRange are defined automatically * Transition ranges will be set to 5% of the bounding box diagonal in world space */ set autoTransitionRange(e) { if (this._autoTransitionRange === e) return; this._autoTransitionRange = e; const t = this._attachedCamera; t && (e ? this._onMeshTargetChangedObserver = t.onMeshTargetChangedObservable.add((i) => { if (!i) return; i.computeWorldMatrix(!0); const r = i.getBoundingInfo().diagonalLength; this.lowerRadiusTransitionRange = r * 0.05, this.upperRadiusTransitionRange = r * 0.05; }) : this._onMeshTargetChangedObserver && t.onMeshTargetChangedObservable.remove(this._onMeshTargetChangedObserver)); } /** * Initializes the behavior. */ init() { } /** * Attaches the behavior to its arc rotate camera. * @param camera Defines the camera to attach the behavior to */ attach(e) { this._attachedCamera = e, this._onAfterCheckInputsObserver = e.onAfterCheckInputsObservable.add(() => { this._attachedCamera && (this._isRadiusAtLimit(this._attachedCamera.lowerRadiusLimit) && this._applyBoundRadiusAnimation(this.lowerRadiusTransitionRange), this._isRadiusAtLimit(this._attachedCamera.upperRadiusLimit) && this._applyBoundRadiusAnimation(this.upperRadiusTransitionRange)); }); } /** * Detaches the behavior from its current arc rotate camera. */ detach() { this._attachedCamera && (this._onAfterCheckInputsObserver && this._attachedCamera.onAfterCheckInputsObservable.remove(this._onAfterCheckInputsObserver), this._onMeshTargetChangedObserver && this._attachedCamera.onMeshTargetChangedObservable.remove(this._onMeshTargetChangedObserver), this._attachedCamera = null); } /** * Checks if the camera radius is at the specified limit. Takes into account animation locks. * @param radiusLimit The limit to check against. * @returns Bool to indicate if at limit. */ _isRadiusAtLimit(e) { return this._attachedCamera ? this._attachedCamera.radius === e && !this._radiusIsAnimating : !1; } /** * Applies an animation to the radius of the camera, extending by the radiusDelta. * @param radiusDelta The delta by which to animate to. Can be negative. */ _applyBoundRadiusAnimation(e) { if (!this._attachedCamera) return; this._radiusBounceTransition || (a5.EasingFunction.setEasingMode(a5.EasingMode), this._radiusBounceTransition = nt.CreateAnimation("radius", nt.ANIMATIONTYPE_FLOAT, 60, a5.EasingFunction)), this._cachedWheelPrecision = this._attachedCamera.wheelPrecision, this._attachedCamera.wheelPrecision = 1 / 0, this._attachedCamera.inertialRadiusOffset = 0, this.stopAllAnimations(), this._radiusIsAnimating = !0; const t = nt.TransitionTo("radius", this._attachedCamera.radius + e, this._attachedCamera, this._attachedCamera.getScene(), 60, this._radiusBounceTransition, this.transitionDuration, () => this._clearAnimationLocks()); t && this._animatables.push(t); } /** * Removes all animation locks. Allows new animations to be added to any of the camera properties. */ _clearAnimationLocks() { this._radiusIsAnimating = !1, this._attachedCamera && (this._attachedCamera.wheelPrecision = this._cachedWheelPrecision); } /** * Stops and removes all animations that have been applied to the camera */ stopAllAnimations() { for (this._attachedCamera && (this._attachedCamera.animations = []); this._animatables.length; ) this._animatables[0].onAnimationEnd = null, this._animatables[0].stop(), this._animatables.shift(); } } a5.EasingFunction = new yte(0.3); a5.EasingMode = hl.EASINGMODE_EASEOUT; class I1 { constructor() { this.onTargetFramingAnimationEndObservable = new Fe(), this._mode = I1.FitFrustumSidesMode, this._radiusScale = 1, this._positionScale = 0.5, this._defaultElevation = 0.3, this._elevationReturnTime = 1500, this._elevationReturnWaitTime = 1e3, this._zoomStopsAnimation = !1, this._framingTime = 1500, this.autoCorrectCameraLimitsAndSensibility = !0, this._isPointerDown = !1, this._lastInteractionTime = -1 / 0, this._animatables = new Array(), this._betaIsAnimating = !1; } /** * Gets the name of the behavior. */ get name() { return "Framing"; } /** * Sets the current mode used by the behavior */ set mode(e) { this._mode = e; } /** * Gets current mode used by the behavior. */ get mode() { return this._mode; } /** * Sets the scale applied to the radius (1 by default) */ set radiusScale(e) { this._radiusScale = e; } /** * Gets the scale applied to the radius */ get radiusScale() { return this._radiusScale; } /** * Sets the scale to apply on Y axis to position camera focus. 0.5 by default which means the center of the bounding box. */ set positionScale(e) { this._positionScale = e; } /** * Gets the scale to apply on Y axis to position camera focus. 0.5 by default which means the center of the bounding box. */ get positionScale() { return this._positionScale; } /** * Sets the angle above/below the horizontal plane to return to when the return to default elevation idle * behaviour is triggered, in radians. */ set defaultElevation(e) { this._defaultElevation = e; } /** * Gets the angle above/below the horizontal plane to return to when the return to default elevation idle * behaviour is triggered, in radians. */ get defaultElevation() { return this._defaultElevation; } /** * Sets the time (in milliseconds) taken to return to the default beta position. * Negative value indicates camera should not return to default. */ set elevationReturnTime(e) { this._elevationReturnTime = e; } /** * Gets the time (in milliseconds) taken to return to the default beta position. * Negative value indicates camera should not return to default. */ get elevationReturnTime() { return this._elevationReturnTime; } /** * Sets the delay (in milliseconds) taken before the camera returns to the default beta position. */ set elevationReturnWaitTime(e) { this._elevationReturnWaitTime = e; } /** * Gets the delay (in milliseconds) taken before the camera returns to the default beta position. */ get elevationReturnWaitTime() { return this._elevationReturnWaitTime; } /** * Sets the flag that indicates if user zooming should stop animation. */ set zoomStopsAnimation(e) { this._zoomStopsAnimation = e; } /** * Gets the flag that indicates if user zooming should stop animation. */ get zoomStopsAnimation() { return this._zoomStopsAnimation; } /** * Sets the transition time when framing the mesh, in milliseconds */ set framingTime(e) { this._framingTime = e; } /** * Gets the transition time when framing the mesh, in milliseconds */ get framingTime() { return this._framingTime; } /** * Initializes the behavior. */ init() { } /** * Attaches the behavior to its arc rotate camera. * @param camera Defines the camera to attach the behavior to */ attach(e) { this._attachedCamera = e; const t = this._attachedCamera.getScene(); I1.EasingFunction.setEasingMode(I1.EasingMode), this._onPrePointerObservableObserver = t.onPrePointerObservable.add((i) => { if (i.type === si.POINTERDOWN) { this._isPointerDown = !0; return; } i.type === si.POINTERUP && (this._isPointerDown = !1); }), this._onMeshTargetChangedObserver = e.onMeshTargetChangedObservable.add((i) => { i && this.zoomOnMesh(i, void 0, () => { this.onTargetFramingAnimationEndObservable.notifyObservers(); }); }), this._onAfterCheckInputsObserver = e.onAfterCheckInputsObservable.add(() => { this._applyUserInteraction(), this._maintainCameraAboveGround(); }); } /** * Detaches the behavior from its current arc rotate camera. */ detach() { if (!this._attachedCamera) return; const e = this._attachedCamera.getScene(); this._onPrePointerObservableObserver && e.onPrePointerObservable.remove(this._onPrePointerObservableObserver), this._onAfterCheckInputsObserver && this._attachedCamera.onAfterCheckInputsObservable.remove(this._onAfterCheckInputsObserver), this._onMeshTargetChangedObserver && this._attachedCamera.onMeshTargetChangedObservable.remove(this._onMeshTargetChangedObserver), this._attachedCamera = null; } /** * Targets the given mesh and updates zoom level accordingly. * @param mesh The mesh to target. * @param focusOnOriginXZ Determines if the camera should focus on 0 in the X and Z axis instead of the mesh * @param onAnimationEnd Callback triggered at the end of the framing animation */ zoomOnMesh(e, t = !1, i = null) { e.computeWorldMatrix(!0); const r = e.getBoundingInfo().boundingBox; this.zoomOnBoundingInfo(r.minimumWorld, r.maximumWorld, t, i); } /** * Targets the given mesh with its children and updates zoom level accordingly. * @param mesh The mesh to target. * @param focusOnOriginXZ Determines if the camera should focus on 0 in the X and Z axis instead of the mesh * @param onAnimationEnd Callback triggered at the end of the framing animation */ zoomOnMeshHierarchy(e, t = !1, i = null) { e.computeWorldMatrix(!0); const r = e.getHierarchyBoundingVectors(!0); this.zoomOnBoundingInfo(r.min, r.max, t, i); } /** * Targets the given meshes with their children and updates zoom level accordingly. * @param meshes The mesh to target. * @param focusOnOriginXZ Determines if the camera should focus on 0 in the X and Z axis instead of the mesh * @param onAnimationEnd Callback triggered at the end of the framing animation */ zoomOnMeshesHierarchy(e, t = !1, i = null) { const r = new D(Number.MAX_VALUE, Number.MAX_VALUE, Number.MAX_VALUE), s = new D(-Number.MAX_VALUE, -Number.MAX_VALUE, -Number.MAX_VALUE); for (let n = 0; n < e.length; n++) { const a = e[n].getHierarchyBoundingVectors(!0); D.CheckExtends(a.min, r, s), D.CheckExtends(a.max, r, s); } this.zoomOnBoundingInfo(r, s, t, i); } /** * Targets the bounding box info defined by its extends and updates zoom level accordingly. * @param minimumWorld Determines the smaller position of the bounding box extend * @param maximumWorld Determines the bigger position of the bounding box extend * @param focusOnOriginXZ Determines if the camera should focus on 0 in the X and Z axis instead of the mesh * @param onAnimationEnd Callback triggered at the end of the framing animation * @returns true if the zoom was done */ zoomOnBoundingInfo(e, t, i = !1, r = null) { let s; if (!this._attachedCamera) return !1; const n = e.y, a = t.y, l = n + (a - n) * this._positionScale, o = t.subtract(e).scale(0.5); if (i) s = new D(0, l, 0); else { const d = e.add(o); s = new D(d.x, l, d.z); } this._vectorTransition || (this._vectorTransition = nt.CreateAnimation("target", nt.ANIMATIONTYPE_VECTOR3, 60, I1.EasingFunction)), this._betaIsAnimating = !0; let u = nt.TransitionTo("target", s, this._attachedCamera, this._attachedCamera.getScene(), 60, this._vectorTransition, this._framingTime); u && this._animatables.push(u); let h = 0; if (this._mode === I1.FitFrustumSidesMode) { const d = this._calculateLowerRadiusFromModelBoundingSphere(e, t); this.autoCorrectCameraLimitsAndSensibility && (this._attachedCamera.lowerRadiusLimit = o.length() + this._attachedCamera.minZ), h = d; } else this._mode === I1.IgnoreBoundsSizeMode && (h = this._calculateLowerRadiusFromModelBoundingSphere(e, t), this.autoCorrectCameraLimitsAndSensibility && this._attachedCamera.lowerRadiusLimit === null && (this._attachedCamera.lowerRadiusLimit = this._attachedCamera.minZ)); if (this.autoCorrectCameraLimitsAndSensibility) { const d = t.subtract(e).length(); this._attachedCamera.panningSensibility = 5e3 / d, this._attachedCamera.wheelPrecision = 100 / h; } return this._radiusTransition || (this._radiusTransition = nt.CreateAnimation("radius", nt.ANIMATIONTYPE_FLOAT, 60, I1.EasingFunction)), u = nt.TransitionTo("radius", h, this._attachedCamera, this._attachedCamera.getScene(), 60, this._radiusTransition, this._framingTime, () => { this.stopAllAnimations(), r && r(), this._attachedCamera && this._attachedCamera.useInputToRestoreState && this._attachedCamera.storeState(); }), u && this._animatables.push(u), !0; } /** * Calculates the lowest radius for the camera based on the bounding box of the mesh. * @param minimumWorld * @param maximumWorld * @returns The minimum distance from the primary mesh's center point at which the camera must be kept in order * to fully enclose the mesh in the viewing frustum. */ _calculateLowerRadiusFromModelBoundingSphere(e, t) { const i = this._attachedCamera; if (!i) return 0; let r = i._calculateLowerRadiusFromModelBoundingSphere(e, t, this._radiusScale); return i.lowerRadiusLimit && this._mode === I1.IgnoreBoundsSizeMode && (r = r < i.lowerRadiusLimit ? i.lowerRadiusLimit : r), i.upperRadiusLimit && (r = r > i.upperRadiusLimit ? i.upperRadiusLimit : r), r; } /** * Keeps the camera above the ground plane. If the user pulls the camera below the ground plane, the camera * is automatically returned to its default position (expected to be above ground plane). */ _maintainCameraAboveGround() { if (this._elevationReturnTime < 0) return; const e = Gs.Now - this._lastInteractionTime, t = Math.PI * 0.5 - this._defaultElevation, i = Math.PI * 0.5; if (this._attachedCamera && !this._betaIsAnimating && this._attachedCamera.beta > i && e >= this._elevationReturnWaitTime) { this._betaIsAnimating = !0, this.stopAllAnimations(), this._betaTransition || (this._betaTransition = nt.CreateAnimation("beta", nt.ANIMATIONTYPE_FLOAT, 60, I1.EasingFunction)); const r = nt.TransitionTo("beta", t, this._attachedCamera, this._attachedCamera.getScene(), 60, this._betaTransition, this._elevationReturnTime, () => { this._clearAnimationLocks(), this.stopAllAnimations(); }); r && this._animatables.push(r); } } /** * Removes all animation locks. Allows new animations to be added to any of the arcCamera properties. */ _clearAnimationLocks() { this._betaIsAnimating = !1; } /** * Applies any current user interaction to the camera. Takes into account maximum alpha rotation. */ _applyUserInteraction() { this.isUserIsMoving && (this._lastInteractionTime = Gs.Now, this.stopAllAnimations(), this._clearAnimationLocks()); } /** * Stops and removes all animations that have been applied to the camera */ stopAllAnimations() { for (this._attachedCamera && (this._attachedCamera.animations = []); this._animatables.length; ) this._animatables[0] && (this._animatables[0].onAnimationEnd = null, this._animatables[0].stop()), this._animatables.shift(); } /** * Gets a value indicating if the user is moving the camera */ get isUserIsMoving() { return this._attachedCamera ? this._attachedCamera.inertialAlphaOffset !== 0 || this._attachedCamera.inertialBetaOffset !== 0 || this._attachedCamera.inertialRadiusOffset !== 0 || this._attachedCamera.inertialPanningX !== 0 || this._attachedCamera.inertialPanningY !== 0 || this._isPointerDown : !1; } } I1.EasingFunction = new Cte(); I1.EasingMode = hl.EASINGMODE_EASEINOUT; I1.IgnoreBoundsSizeMode = 0; I1.FitFrustumSidesMode = 1; class kD { constructor(e, t = new D(), i = 0, r = !1) { this.direction = e, this.rotatedDirection = t, this.diff = i, this.ignore = r; } } class Sce { /** * Creates the AttachToBoxBehavior, used to attach UI to the closest face of the box to a camera * @param _ui The transform node that should be attached to the mesh */ constructor(e) { this._ui = e, this.name = "AttachToBoxBehavior", this.distanceAwayFromFace = 0.15, this.distanceAwayFromBottomOfFace = 0.15, this._faceVectors = [ new kD(D.Up()), new kD(D.Down()), new kD(D.Left()), new kD(D.Right()), new kD(D.Forward()), new kD(D.Forward().scaleInPlace(-1)) ], this._tmpMatrix = new Ae(), this._tmpVector = new D(), this._zeroVector = D.Zero(), this._lookAtTmpMatrix = new Ae(); } /** * Initializes the behavior */ init() { } _closestFace(e) { return this._faceVectors.forEach((t) => { this._target.rotationQuaternion || (this._target.rotationQuaternion = Ze.RotationYawPitchRoll(this._target.rotation.y, this._target.rotation.x, this._target.rotation.z)), this._target.rotationQuaternion.toRotationMatrix(this._tmpMatrix), D.TransformCoordinatesToRef(t.direction, this._tmpMatrix, t.rotatedDirection), t.diff = D.GetAngleBetweenVectors(t.rotatedDirection, e, D.Cross(t.rotatedDirection, e)); }), this._faceVectors.reduce((t, i) => t.ignore ? i : i.ignore || t.diff < i.diff ? t : i, this._faceVectors[0]); } _lookAtToRef(e, t = new D(0, 1, 0), i) { Ae.LookAtLHToRef(this._zeroVector, e, t, this._lookAtTmpMatrix), this._lookAtTmpMatrix.invert(), Ze.FromRotationMatrixToRef(this._lookAtTmpMatrix, i); } /** * Attaches the AttachToBoxBehavior to the passed in mesh * @param target The mesh that the specified node will be attached to */ attach(e) { this._target = e, this._scene = this._target.getScene(), this._onRenderObserver = this._scene.onBeforeRenderObservable.add(() => { if (!this._scene.activeCamera) return; let t = this._scene.activeCamera.position; this._scene.activeCamera.devicePosition && (t = this._scene.activeCamera.devicePosition); const i = this._closestFace(t.subtract(e.position)); this._scene.activeCamera.leftCamera ? this._scene.activeCamera.leftCamera.computeWorldMatrix().getRotationMatrixToRef(this._tmpMatrix) : this._scene.activeCamera.computeWorldMatrix().getRotationMatrixToRef(this._tmpMatrix), D.TransformCoordinatesToRef(D.Up(), this._tmpMatrix, this._tmpVector), this._faceVectors.forEach((s) => { i.direction.x && s.direction.x && (s.ignore = !0), i.direction.y && s.direction.y && (s.ignore = !0), i.direction.z && s.direction.z && (s.ignore = !0); }); const r = this._closestFace(this._tmpVector); this._faceVectors.forEach((s) => { s.ignore = !1; }), this._ui.position.copyFrom(e.position), i.direction.x && (i.rotatedDirection.scaleToRef(e.scaling.x / 2 + this.distanceAwayFromFace, this._tmpVector), this._ui.position.addInPlace(this._tmpVector)), i.direction.y && (i.rotatedDirection.scaleToRef(e.scaling.y / 2 + this.distanceAwayFromFace, this._tmpVector), this._ui.position.addInPlace(this._tmpVector)), i.direction.z && (i.rotatedDirection.scaleToRef(e.scaling.z / 2 + this.distanceAwayFromFace, this._tmpVector), this._ui.position.addInPlace(this._tmpVector)), this._ui.rotationQuaternion || (this._ui.rotationQuaternion = Ze.RotationYawPitchRoll(this._ui.rotation.y, this._ui.rotation.x, this._ui.rotation.z)), i.rotatedDirection.scaleToRef(-1, this._tmpVector), this._lookAtToRef(this._tmpVector, r.rotatedDirection, this._ui.rotationQuaternion), r.direction.x && this._ui.up.scaleToRef(this.distanceAwayFromBottomOfFace - e.scaling.x / 2, this._tmpVector), r.direction.y && this._ui.up.scaleToRef(this.distanceAwayFromBottomOfFace - e.scaling.y / 2, this._tmpVector), r.direction.z && this._ui.up.scaleToRef(this.distanceAwayFromBottomOfFace - e.scaling.z / 2, this._tmpVector), this._ui.position.addInPlace(this._tmpVector); }); } /** * Detaches the behavior from the mesh */ detach() { this._scene.onBeforeRenderObservable.remove(this._onRenderObserver); } } class Mce { /** * Time in milliseconds to delay before fading in (Default: 0) * Will set both fade in and out delay to the same value */ get delay() { return this.fadeInDelay; } set delay(e) { this.fadeInDelay = e, this.fadeOutDelay = e; } /** * Instantiates the FadeInOutBehavior */ constructor() { this.fadeInDelay = 0, this.fadeOutDelay = 0, this.fadeInTime = 300, this.fadeOutTime = 300, this._millisecondsPerFrame = 1e3 / 60, this._hovered = !1, this._hoverValue = 0, this._ownerNode = null, this._delay = 0, this._time = 300, this._update = () => { if (this._ownerNode) { if (this._hoverValue += this._hovered ? this._millisecondsPerFrame : -this._millisecondsPerFrame, this._setAllVisibility(this._ownerNode, (this._hoverValue - this._delay) / this._time), this._ownerNode.visibility > 1) { if (this._setAllVisibility(this._ownerNode, 1), this._hoverValue > this._time) { this._hoverValue = this._time, this._detachObserver(); return; } } else if (this._ownerNode.visibility < 0 && (this._setAllVisibility(this._ownerNode, 0), this._hoverValue < 0)) { this._hoverValue = 0, this._detachObserver(); return; } this._attachObserver(); } }; } /** * The name of the behavior */ get name() { return "FadeInOut"; } /** * Initializes the behavior */ init() { } /** * Attaches the fade behavior on the passed in mesh * @param ownerNode The mesh that will be faded in/out once attached */ attach(e) { this._ownerNode = e, this._setAllVisibility(this._ownerNode, 0); } /** * Detaches the behavior from the mesh */ detach() { this._ownerNode = null; } /** * Triggers the mesh to begin fading in (or out) * @param fadeIn if the object should fade in or out (true to fade in) */ fadeIn(e = !0) { this._delay = e ? this.fadeInDelay : this.fadeOutDelay, this._time = e ? this.fadeInTime : this.fadeOutTime, this._detachObserver(), !(this._ownerNode && (e && this._ownerNode.visibility >= 1 || !e && this._ownerNode.visibility <= 0)) && (this._hovered = e, this._hovered || (this._delay *= -1), this._ownerNode.visibility >= 1 ? this._hoverValue = this._time : this._ownerNode.visibility <= 0 && (this._hoverValue = 0), this._update()); } /** * Triggers the mesh to begin fading out */ fadeOut() { this.fadeIn(!1); } _setAllVisibility(e, t) { e.visibility = t, e.getChildMeshes().forEach((i) => { this._setAllVisibility(i, t); }); } _attachObserver() { var e; this._onBeforeRenderObserver || (this._onBeforeRenderObserver = (e = this._ownerNode) === null || e === void 0 ? void 0 : e.getScene().onBeforeRenderObservable.add(this._update)); } _detachObserver() { var e; this._onBeforeRenderObserver && ((e = this._ownerNode) === null || e === void 0 || e.getScene().onBeforeRenderObservable.remove(this._onBeforeRenderObserver), this._onBeforeRenderObserver = null); } } class gs { /** * Creates a new ray * @param origin origin point * @param direction direction * @param length length of the ray */ constructor(e, t, i = Number.MAX_VALUE) { this.origin = e, this.direction = t, this.length = i; } // Methods /** * Clone the current ray * @returns a new ray */ clone() { return new gs(this.origin.clone(), this.direction.clone(), this.length); } /** * Checks if the ray intersects a box * This does not account for the ray length by design to improve perfs. * @param minimum bound of the box * @param maximum bound of the box * @param intersectionTreshold extra extend to be added to the box in all direction * @returns if the box was hit */ intersectsBoxMinMax(e, t, i = 0) { const r = gs._TmpVector3[0].copyFromFloats(e.x - i, e.y - i, e.z - i), s = gs._TmpVector3[1].copyFromFloats(t.x + i, t.y + i, t.z + i); let n = 0, a = Number.MAX_VALUE, l, o, u, h; if (Math.abs(this.direction.x) < 1e-7) { if (this.origin.x < r.x || this.origin.x > s.x) return !1; } else if (l = 1 / this.direction.x, o = (r.x - this.origin.x) * l, u = (s.x - this.origin.x) * l, u === -1 / 0 && (u = 1 / 0), o > u && (h = o, o = u, u = h), n = Math.max(o, n), a = Math.min(u, a), n > a) return !1; if (Math.abs(this.direction.y) < 1e-7) { if (this.origin.y < r.y || this.origin.y > s.y) return !1; } else if (l = 1 / this.direction.y, o = (r.y - this.origin.y) * l, u = (s.y - this.origin.y) * l, u === -1 / 0 && (u = 1 / 0), o > u && (h = o, o = u, u = h), n = Math.max(o, n), a = Math.min(u, a), n > a) return !1; if (Math.abs(this.direction.z) < 1e-7) { if (this.origin.z < r.z || this.origin.z > s.z) return !1; } else if (l = 1 / this.direction.z, o = (r.z - this.origin.z) * l, u = (s.z - this.origin.z) * l, u === -1 / 0 && (u = 1 / 0), o > u && (h = o, o = u, u = h), n = Math.max(o, n), a = Math.min(u, a), n > a) return !1; return !0; } /** * Checks if the ray intersects a box * This does not account for the ray lenght by design to improve perfs. * @param box the bounding box to check * @param intersectionTreshold extra extend to be added to the BoundingBox in all direction * @returns if the box was hit */ intersectsBox(e, t = 0) { return this.intersectsBoxMinMax(e.minimum, e.maximum, t); } /** * If the ray hits a sphere * @param sphere the bounding sphere to check * @param intersectionTreshold extra extend to be added to the BoundingSphere in all direction * @returns true if it hits the sphere */ intersectsSphere(e, t = 0) { const i = e.center.x - this.origin.x, r = e.center.y - this.origin.y, s = e.center.z - this.origin.z, n = i * i + r * r + s * s, a = e.radius + t, l = a * a; if (n <= l) return !0; const o = i * this.direction.x + r * this.direction.y + s * this.direction.z; return o < 0 ? !1 : n - o * o <= l; } /** * If the ray hits a triange * @param vertex0 triangle vertex * @param vertex1 triangle vertex * @param vertex2 triangle vertex * @returns intersection information if hit */ intersectsTriangle(e, t, i) { const r = gs._TmpVector3[0], s = gs._TmpVector3[1], n = gs._TmpVector3[2], a = gs._TmpVector3[3], l = gs._TmpVector3[4]; t.subtractToRef(e, r), i.subtractToRef(e, s), D.CrossToRef(this.direction, s, n); const o = D.Dot(r, n); if (o === 0) return null; const u = 1 / o; this.origin.subtractToRef(e, a); const h = D.Dot(a, n) * u; if (h < 0 || h > 1) return null; D.CrossToRef(a, r, l); const d = D.Dot(this.direction, l) * u; if (d < 0 || h + d > 1) return null; const f = D.Dot(s, l) * u; return f > this.length ? null : new sB(1 - h - d, h, f); } /** * Checks if ray intersects a plane * @param plane the plane to check * @returns the distance away it was hit */ intersectsPlane(e) { let t; const i = D.Dot(e.normal, this.direction); if (Math.abs(i) < 999999997475243e-21) return null; { const r = D.Dot(e.normal, this.origin); return t = (-e.d - r) / i, t < 0 ? t < -999999997475243e-21 ? null : 0 : t; } } /** * Calculate the intercept of a ray on a given axis * @param axis to check 'x' | 'y' | 'z' * @param offset from axis interception (i.e. an offset of 1y is intercepted above ground) * @returns a vector containing the coordinates where 'axis' is equal to zero (else offset), or null if there is no intercept. */ intersectsAxis(e, t = 0) { switch (e) { case "y": { const i = (this.origin.y - t) / this.direction.y; return i > 0 ? null : new D(this.origin.x + this.direction.x * -i, t, this.origin.z + this.direction.z * -i); } case "x": { const i = (this.origin.x - t) / this.direction.x; return i > 0 ? null : new D(t, this.origin.y + this.direction.y * -i, this.origin.z + this.direction.z * -i); } case "z": { const i = (this.origin.z - t) / this.direction.z; return i > 0 ? null : new D(this.origin.x + this.direction.x * -i, this.origin.y + this.direction.y * -i, t); } default: return null; } } /** * Checks if ray intersects a mesh. The ray is defined in WORLD space. A mesh triangle can be picked both from its front and back sides, * irrespective of orientation. * @param mesh the mesh to check * @param fastCheck defines if the first intersection will be used (and not the closest) * @param trianglePredicate defines an optional predicate used to select faces when a mesh intersection is detected * @param onlyBoundingInfo defines a boolean indicating if picking should only happen using bounding info (false by default) * @param worldToUse defines the world matrix to use to get the world coordinate of the intersection point * @param skipBoundingInfo a boolean indicating if we should skip the bounding info check * @returns picking info of the intersection */ intersectsMesh(e, t, i, r = !1, s, n = !1) { const a = de.Matrix[0]; return e.getWorldMatrix().invertToRef(a), this._tmpRay ? gs.TransformToRef(this, a, this._tmpRay) : this._tmpRay = gs.Transform(this, a), e.intersects(this._tmpRay, t, i, r, s, n); } /** * Checks if ray intersects a mesh * @param meshes the meshes to check * @param fastCheck defines if the first intersection will be used (and not the closest) * @param results array to store result in * @returns Array of picking infos */ intersectsMeshes(e, t, i) { i ? i.length = 0 : i = []; for (let r = 0; r < e.length; r++) { const s = this.intersectsMesh(e[r], t); s.hit && i.push(s); } return i.sort(this._comparePickingInfo), i; } _comparePickingInfo(e, t) { return e.distance < t.distance ? -1 : e.distance > t.distance ? 1 : 0; } /** * Intersection test between the ray and a given segment within a given tolerance (threshold) * @param sega the first point of the segment to test the intersection against * @param segb the second point of the segment to test the intersection against * @param threshold the tolerance margin, if the ray doesn't intersect the segment but is close to the given threshold, the intersection is successful * @returns the distance from the ray origin to the intersection point if there's intersection, or -1 if there's no intersection */ intersectionSegment(e, t, i) { const r = this.origin, s = de.Vector3[0], n = de.Vector3[1], a = de.Vector3[2], l = de.Vector3[3]; t.subtractToRef(e, s), this.direction.scaleToRef(gs._Rayl, a), r.addToRef(a, n), e.subtractToRef(r, l); const o = D.Dot(s, s), u = D.Dot(s, a), h = D.Dot(a, a), d = D.Dot(s, l), f = D.Dot(a, l), p = o * h - u * u; let m, _ = p, v, C = p; p < gs._Smallnum ? (m = 0, _ = 1, v = f, C = h) : (m = u * f - h * d, v = o * f - u * d, m < 0 ? (m = 0, v = f, C = h) : m > _ && (m = _, v = f + u, C = h)), v < 0 ? (v = 0, -d < 0 ? m = 0 : -d > o ? m = _ : (m = -d, _ = o)) : v > C && (v = C, -d + u < 0 ? m = 0 : -d + u > o ? m = _ : (m = -d + u, _ = o)); const x = Math.abs(m) < gs._Smallnum ? 0 : m / _, b = Math.abs(v) < gs._Smallnum ? 0 : v / C, S = de.Vector3[4]; a.scaleToRef(b, S); const M = de.Vector3[5]; s.scaleToRef(x, M), M.addInPlace(l); const R = de.Vector3[6]; return M.subtractToRef(S, R), b > 0 && b <= this.length && R.lengthSquared() < i * i ? M.length() : -1; } /** * Update the ray from viewport position * @param x position * @param y y position * @param viewportWidth viewport width * @param viewportHeight viewport height * @param world world matrix * @param view view matrix * @param projection projection matrix * @param enableDistantPicking defines if picking should handle large values for mesh position/scaling (false by default) * @returns this ray updated */ update(e, t, i, r, s, n, a, l = !1) { if (l) { gs._RayDistant || (gs._RayDistant = gs.Zero()), gs._RayDistant.unprojectRayToRef(e, t, i, r, Ae.IdentityReadOnly, n, a); const o = de.Matrix[0]; s.invertToRef(o), gs.TransformToRef(gs._RayDistant, o, this); } else this.unprojectRayToRef(e, t, i, r, s, n, a); return this; } // Statics /** * Creates a ray with origin and direction of 0,0,0 * @returns the new ray */ static Zero() { return new gs(D.Zero(), D.Zero()); } /** * Creates a new ray from screen space and viewport * @param x position * @param y y position * @param viewportWidth viewport width * @param viewportHeight viewport height * @param world world matrix * @param view view matrix * @param projection projection matrix * @returns new ray */ static CreateNew(e, t, i, r, s, n, a) { return gs.Zero().update(e, t, i, r, s, n, a); } /** * Function will create a new transformed ray starting from origin and ending at the end point. Ray's length will be set, and ray will be * transformed to the given world matrix. * @param origin The origin point * @param end The end point * @param world a matrix to transform the ray to. Default is the identity matrix. * @returns the new ray */ static CreateNewFromTo(e, t, i = Ae.IdentityReadOnly) { const r = t.subtract(e), s = Math.sqrt(r.x * r.x + r.y * r.y + r.z * r.z); return r.normalize(), gs.Transform(new gs(e, r, s), i); } /** * Transforms a ray by a matrix * @param ray ray to transform * @param matrix matrix to apply * @returns the resulting new ray */ static Transform(e, t) { const i = new gs(new D(0, 0, 0), new D(0, 0, 0)); return gs.TransformToRef(e, t, i), i; } /** * Transforms a ray by a matrix * @param ray ray to transform * @param matrix matrix to apply * @param result ray to store result in */ static TransformToRef(e, t, i) { D.TransformCoordinatesToRef(e.origin, t, i.origin), D.TransformNormalToRef(e.direction, t, i.direction), i.length = e.length; const r = i.direction, s = r.length(); if (!(s === 0 || s === 1)) { const n = 1 / s; r.x *= n, r.y *= n, r.z *= n, i.length *= s; } } /** * Unproject a ray from screen space to object space * @param sourceX defines the screen space x coordinate to use * @param sourceY defines the screen space y coordinate to use * @param viewportWidth defines the current width of the viewport * @param viewportHeight defines the current height of the viewport * @param world defines the world matrix to use (can be set to Identity to go to world space) * @param view defines the view matrix to use * @param projection defines the projection matrix to use */ unprojectRayToRef(e, t, i, r, s, n, a) { const l = de.Matrix[0]; s.multiplyToRef(n, l), l.multiplyToRef(a, l), l.invert(); const o = gi.LastCreatedEngine, u = de.Vector3[0]; u.x = e / i * 2 - 1, u.y = -(t / r * 2 - 1), u.z = o != null && o.useReverseDepthBuffer ? 1 : o != null && o.isNDCHalfZRange ? 0 : -1; const h = de.Vector3[1].copyFromFloats(u.x, u.y, 1 - 1e-8), d = de.Vector3[2], f = de.Vector3[3]; D._UnprojectFromInvertedMatrixToRef(u, l, d), D._UnprojectFromInvertedMatrixToRef(h, l, f), this.origin.copyFrom(d), f.subtractToRef(d, this.direction), this.direction.normalize(); } } gs._TmpVector3 = kc.BuildArray(6, D.Zero); gs._RayDistant = gs.Zero(); gs._Smallnum = 1e-8; gs._Rayl = 1e9; ii.prototype.createPickingRay = function(c, e, t, i, r = !1) { const s = gs.Zero(); return this.createPickingRayToRef(c, e, t, s, i, r), s; }; ii.prototype.createPickingRayToRef = function(c, e, t, i, r, s = !1, n = !1) { const a = this.getEngine(); if (!r && !(r = this.activeCamera)) return this; const l = r.viewport, o = a.getRenderHeight(), { x: u, y: h, width: d, height: f } = l.toGlobal(a.getRenderWidth(), o), p = 1 / a.getHardwareScalingLevel(); return c = c * p - u, e = e * p - (o - h - f), i.update(c, e, d, f, t || Ae.IdentityReadOnly, s ? Ae.IdentityReadOnly : r.getViewMatrix(), r.getProjectionMatrix(), n), this; }; ii.prototype.createPickingRayInCameraSpace = function(c, e, t) { const i = gs.Zero(); return this.createPickingRayInCameraSpaceToRef(c, e, i, t), i; }; ii.prototype.createPickingRayInCameraSpaceToRef = function(c, e, t, i) { if (!ku) return this; const r = this.getEngine(); if (!i && !(i = this.activeCamera)) throw new Error("Active camera not set"); const s = i.viewport, n = r.getRenderHeight(), { x: a, y: l, width: o, height: u } = s.toGlobal(r.getRenderWidth(), n), h = Ae.Identity(), d = 1 / r.getHardwareScalingLevel(); return c = c * d - a, e = e * d - (n - l - u), t.update(c, e, o, u, h, h, i.getProjectionMatrix()), this; }; ii.prototype._internalPickForMesh = function(c, e, t, i, r, s, n, a) { const l = e(i, t.enableDistantPicking), o = t.intersects(l, r, n, s, i, a); return !o || !o.hit || !r && c != null && o.distance >= c.distance ? null : o; }; ii.prototype._internalPick = function(c, e, t, i, r) { let s = null; const n = !!(this.activeCameras && this.activeCameras.length > 1 && this.cameraToUseForPointers !== this.activeCamera), a = this.cameraToUseForPointers || this.activeCamera; for (let l = 0; l < this.meshes.length; l++) { const o = this.meshes[l]; if (e) { if (!e(o)) continue; } else if (!o.isEnabled() || !o.isVisible || !o.isPickable) continue; const u = n && o.isWorldMatrixCameraDependent(), h = o.computeWorldMatrix(u, a); if (o.hasThinInstances && o.thinInstanceEnablePicking) { const d = this._internalPickForMesh(s, c, o, h, !0, !0, r); if (d) { if (i) return d; const f = de.Matrix[1], p = o.thinInstanceGetWorldMatrices(); for (let m = 0; m < p.length; m++) { p[m].multiplyToRef(h, f); const v = this._internalPickForMesh(s, c, o, f, t, i, r, !0); if (v && (s = v, s.thinInstanceIndex = m, t)) return s; } } } else { const d = this._internalPickForMesh(s, c, o, h, t, i, r); if (d && (s = d, t)) return s; } } return s || new ku(); }; ii.prototype._internalMultiPick = function(c, e, t) { if (!ku) return null; const i = [], r = !!(this.activeCameras && this.activeCameras.length > 1 && this.cameraToUseForPointers !== this.activeCamera), s = this.cameraToUseForPointers || this.activeCamera; for (let n = 0; n < this.meshes.length; n++) { const a = this.meshes[n]; if (e) { if (!e(a)) continue; } else if (!a.isEnabled() || !a.isVisible || !a.isPickable) continue; const l = r && a.isWorldMatrixCameraDependent(), o = a.computeWorldMatrix(l, s); if (a.hasThinInstances && a.thinInstanceEnablePicking) { if (this._internalPickForMesh(null, c, a, o, !0, !0, t)) { const h = de.Matrix[1], d = a.thinInstanceGetWorldMatrices(); for (let f = 0; f < d.length; f++) { d[f].multiplyToRef(o, h); const m = this._internalPickForMesh(null, c, a, h, !1, !1, t, !0); m && (m.thinInstanceIndex = f, i.push(m)); } } } else { const u = this._internalPickForMesh(null, c, a, o, !1, !1, t); u && i.push(u); } } return i; }; ii.prototype.pickWithBoundingInfo = function(c, e, t, i, r) { if (!ku) return null; const s = this._internalPick((n) => (this._tempPickingRay || (this._tempPickingRay = gs.Zero()), this.createPickingRayToRef(c, e, n, this._tempPickingRay, r || null), this._tempPickingRay), t, i, !0); return s && (s.ray = this.createPickingRay(c, e, Ae.Identity(), r || null)), s; }; Object.defineProperty(ii.prototype, "_pickingAvailable", { get: () => !0, enumerable: !1, configurable: !1 }); ii.prototype.pick = function(c, e, t, i, r, s, n = !1) { const a = this._internalPick((l, o) => (this._tempPickingRay || (this._tempPickingRay = gs.Zero()), this.createPickingRayToRef(c, e, l, this._tempPickingRay, r || null, !1, o), this._tempPickingRay), t, i, !1, s); return a && (a.ray = this.createPickingRay(c, e, Ae.Identity(), r || null)), a; }; ii.prototype.pickWithRay = function(c, e, t, i) { const r = this._internalPick((s) => (this._pickWithRayInverseMatrix || (this._pickWithRayInverseMatrix = Ae.Identity()), s.invertToRef(this._pickWithRayInverseMatrix), this._cachedRayForTransform || (this._cachedRayForTransform = gs.Zero()), gs.TransformToRef(c, this._pickWithRayInverseMatrix, this._cachedRayForTransform), this._cachedRayForTransform), e, t, !1, i); return r && (r.ray = c), r; }; ii.prototype.multiPick = function(c, e, t, i, r) { return this._internalMultiPick((s) => this.createPickingRay(c, e, s, i || null), t, r); }; ii.prototype.multiPickWithRay = function(c, e, t) { return this._internalMultiPick((i) => (this._pickWithRayInverseMatrix || (this._pickWithRayInverseMatrix = Ae.Identity()), i.invertToRef(this._pickWithRayInverseMatrix), this._cachedRayForTransform || (this._cachedRayForTransform = gs.Zero()), gs.TransformToRef(c, this._pickWithRayInverseMatrix, this._cachedRayForTransform), this._cachedRayForTransform), e, t); }; Ai.prototype.getForwardRay = function(c = 100, e, t) { return this.getForwardRayToRef(new gs(D.Zero(), D.Zero(), c), c, e, t); }; Ai.prototype.getForwardRayToRef = function(c, e = 100, t, i) { t || (t = this.getWorldMatrix()), c.length = e, i ? c.origin.copyFrom(i) : c.origin.copyFrom(this.position); const r = de.Vector3[2]; r.set(0, 0, this._scene.useRightHandedSystem ? -1 : 1); const s = de.Vector3[3]; return D.TransformNormalToRef(r, t, s), D.NormalizeToRef(s, c.direction), c; }; class Nn { /** * @internal */ static _RemoveAndStorePivotPoint(e) { e && Nn._PivotCached === 0 && (e.getPivotPointToRef(Nn._OldPivotPoint), Nn._PivotPostMultiplyPivotMatrix = e._postMultiplyPivotMatrix, Nn._OldPivotPoint.equalsToFloats(0, 0, 0) || (e.setPivotMatrix(Ae.IdentityReadOnly), Nn._OldPivotPoint.subtractToRef(e.getPivotPoint(), Nn._PivotTranslation), Nn._PivotTmpVector.copyFromFloats(1, 1, 1), Nn._PivotTmpVector.subtractInPlace(e.scaling), Nn._PivotTmpVector.multiplyInPlace(Nn._PivotTranslation), e.position.addInPlace(Nn._PivotTmpVector))), Nn._PivotCached++; } /** * @internal */ static _RestorePivotPoint(e) { e && !Nn._OldPivotPoint.equalsToFloats(0, 0, 0) && Nn._PivotCached === 1 && (e.setPivotPoint(Nn._OldPivotPoint), e._postMultiplyPivotMatrix = Nn._PivotPostMultiplyPivotMatrix, Nn._PivotTmpVector.copyFromFloats(1, 1, 1), Nn._PivotTmpVector.subtractInPlace(e.scaling), Nn._PivotTmpVector.multiplyInPlace(Nn._PivotTranslation), e.position.subtractInPlace(Nn._PivotTmpVector)), this._PivotCached--; } } Nn._PivotCached = 0; Nn._OldPivotPoint = new D(); Nn._PivotTranslation = new D(); Nn._PivotTmpVector = new D(); Nn._PivotPostMultiplyPivotMatrix = !1; function QB(c) { const e = [], t = [], i = [], r = [], s = c.width || c.size || 1, n = c.height || c.size || 1, a = c.sideOrientation === 0 ? 0 : c.sideOrientation || Ot.DEFAULTSIDE, l = s / 2, o = n / 2; t.push(-l, -o, 0), i.push(0, 0, -1), r.push(0, hn.UseOpenGLOrientationForUV ? 1 : 0), t.push(l, -o, 0), i.push(0, 0, -1), r.push(1, hn.UseOpenGLOrientationForUV ? 1 : 0), t.push(l, o, 0), i.push(0, 0, -1), r.push(1, hn.UseOpenGLOrientationForUV ? 0 : 1), t.push(-l, o, 0), i.push(0, 0, -1), r.push(0, hn.UseOpenGLOrientationForUV ? 0 : 1), e.push(0), e.push(1), e.push(2), e.push(0), e.push(2), e.push(3), Ot._ComputeSides(a, t, e, i, r, c.frontUVs, c.backUVs); const u = new Ot(); return u.indices = e, u.positions = t, u.normals = i, u.uvs = r, u; } function hx(c, e = {}, t = null) { const i = new ke(c, t); return e.sideOrientation = ke._GetDefaultSideOrientation(e.sideOrientation), i._originalBuilderSideOrientation = e.sideOrientation, QB(e).applyToMesh(i, e.updatable), e.sourcePlane && (i.translate(e.sourcePlane.normal, -e.sourcePlane.d), i.setDirection(e.sourcePlane.normal.scale(-1))), i; } const Rce = { // eslint-disable-next-line @typescript-eslint/naming-convention CreatePlane: hx }; Ot.CreatePlane = QB; ke.CreatePlane = (c, e, t, i, r) => hx(c, { size: e, width: e, height: e, sideOrientation: r, updatable: i }, t); class Fu { /** * Get or set the currentDraggingPointerId * @deprecated Please use currentDraggingPointerId instead */ get currentDraggingPointerID() { return this.currentDraggingPointerId; } set currentDraggingPointerID(e) { this.currentDraggingPointerId = e; } /** * If the drag behavior will react to drag events (Default: true) */ set enabled(e) { e != this._enabled && this.onEnabledObservable.notifyObservers(e), this._enabled = e; } get enabled() { return this._enabled; } /** * Gets the options used by the behavior */ get options() { return this._options; } /** * Sets the options used by the behavior */ set options(e) { this._options = e; } /** * Creates a pointer drag behavior that can be attached to a mesh * @param options The drag axis or normal of the plane that will be dragged across. If no options are specified the drag plane will always face the ray's origin (eg. camera) * @param options.dragAxis * @param options.dragPlaneNormal */ constructor(e) { this._useAlternatePickedPointAboveMaxDragAngleDragSpeed = -1.1, this._activeDragButton = -1, this.maxDragAngle = 0, this.dragButtons = [0, 1, 2], this._useAlternatePickedPointAboveMaxDragAngle = !1, this.currentDraggingPointerId = -1, this.dragging = !1, this.dragDeltaRatio = 0.2, this.updateDragPlane = !0, this._debugMode = !1, this._moving = !1, this.onDragObservable = new Fe(), this.onDragStartObservable = new Fe(), this.onDragEndObservable = new Fe(), this.onEnabledObservable = new Fe(), this.moveAttached = !0, this._enabled = !0, this.startAndReleaseDragOnPointerEvents = !0, this.detachCameraControls = !0, this.useObjectOrientationForDragging = !0, this.validateDrag = (i) => !0, this._tmpVector = new D(0, 0, 0), this._alternatePickedPoint = new D(0, 0, 0), this._worldDragAxis = new D(0, 0, 0), this._targetPosition = new D(0, 0, 0), this._attachedToElement = !1, this._startDragRay = new gs(new D(), new D()), this._lastPointerRay = {}, this._dragDelta = new D(), this._pointA = new D(0, 0, 0), this._pointC = new D(0, 0, 0), this._localAxis = new D(0, 0, 0), this._lookAt = new D(0, 0, 0), this._options = e || {}; let t = 0; if (this._options.dragAxis && t++, this._options.dragPlaneNormal && t++, t > 1) throw "Multiple drag modes specified in dragBehavior options. Only one expected"; } /** * The name of the behavior */ get name() { return "PointerDrag"; } /** * Initializes the behavior */ init() { } /** * Attaches the drag behavior the passed in mesh * @param ownerNode The mesh that will be dragged around once attached * @param predicate Predicate to use for pick filtering */ attach(e, t) { this._scene = e.getScene(), e.isNearGrabbable = !0, this.attachedNode = e, Fu._PlaneScene || (this._debugMode ? Fu._PlaneScene = this._scene : (Fu._PlaneScene = new ii(this._scene.getEngine(), { virtual: !0 }), Fu._PlaneScene.detachControl(), this._scene.onDisposeObservable.addOnce(() => { Fu._PlaneScene.dispose(), Fu._PlaneScene = null; }))), this._dragPlane = hx("pointerDragPlane", { size: this._debugMode ? 1 : 1e4, updatable: !1, sideOrientation: ke.DOUBLESIDE }, Fu._PlaneScene), this.lastDragPosition = new D(0, 0, 0); const i = t || ((r) => this.attachedNode == r || r.isDescendantOf(this.attachedNode)); this._pointerObserver = this._scene.onPointerObservable.add((r) => { if (!this.enabled) { this._attachedToElement && this.releaseDrag(); return; } if (r.type == si.POINTERDOWN) this.startAndReleaseDragOnPointerEvents && !this.dragging && r.pickInfo && r.pickInfo.hit && r.pickInfo.pickedMesh && r.pickInfo.pickedPoint && r.pickInfo.ray && i(r.pickInfo.pickedMesh) && this._activeDragButton === -1 && this.dragButtons.indexOf(r.event.button) !== -1 && (this._activeDragButton = r.event.button, this._activePointerInfo = r, this._startDrag(r.event.pointerId, r.pickInfo.ray, r.pickInfo.pickedPoint)); else if (r.type == si.POINTERUP) this.startAndReleaseDragOnPointerEvents && this.currentDraggingPointerId == r.event.pointerId && (this._activeDragButton === r.event.button || this._activeDragButton === -1) && this.releaseDrag(); else if (r.type == si.POINTERMOVE) { const s = r.event.pointerId; if (this.currentDraggingPointerId === Fu._AnyMouseId && s !== Fu._AnyMouseId) { const n = r.event; (n.pointerType === "mouse" || !this._scene.getEngine().hostInformation.isMobile && n instanceof MouseEvent) && (this._lastPointerRay[this.currentDraggingPointerId] && (this._lastPointerRay[s] = this._lastPointerRay[this.currentDraggingPointerId], delete this._lastPointerRay[this.currentDraggingPointerId]), this.currentDraggingPointerId = s); } this._lastPointerRay[s] || (this._lastPointerRay[s] = new gs(new D(), new D())), r.pickInfo && r.pickInfo.ray && (this._lastPointerRay[s].origin.copyFrom(r.pickInfo.ray.origin), this._lastPointerRay[s].direction.copyFrom(r.pickInfo.ray.direction), this.currentDraggingPointerId == s && this.dragging && this._moveDrag(r.pickInfo.ray)); } }), this._beforeRenderObserver = this._scene.onBeforeRenderObservable.add(() => { if (this._moving && this.moveAttached) { let r = !1; Nn._RemoveAndStorePivotPoint(this.attachedNode), this._targetPosition.subtractToRef(this.attachedNode.absolutePosition, this._tmpVector), this._tmpVector.scaleInPlace(this.dragDeltaRatio), this.attachedNode.getAbsolutePosition().addToRef(this._tmpVector, this._tmpVector), this.validateDrag(this._tmpVector) && (this.attachedNode.setAbsolutePosition(this._tmpVector), r = !0), Nn._RestorePivotPoint(this.attachedNode), r && this.attachedNode.computeWorldMatrix(); } }); } /** * Force release the drag action by code. */ releaseDrag() { if (this.dragging && (this.dragging = !1, this.onDragEndObservable.notifyObservers({ dragPlanePoint: this.lastDragPosition, pointerId: this.currentDraggingPointerId, pointerInfo: this._activePointerInfo })), this.currentDraggingPointerId = -1, this._activeDragButton = -1, this._activePointerInfo = null, this._moving = !1, this.detachCameraControls && this._attachedToElement && this._scene.activeCamera && !this._scene.activeCamera.leftCamera) { if (this._scene.activeCamera.getClassName() === "ArcRotateCamera") { const e = this._scene.activeCamera; e.attachControl(e.inputs ? e.inputs.noPreventDefault : !0, e._useCtrlForPanning, e._panningMouseButton); } else this._scene.activeCamera.attachControl(this._scene.activeCamera.inputs ? this._scene.activeCamera.inputs.noPreventDefault : !0); this._attachedToElement = !1; } } /** * Simulates the start of a pointer drag event on the behavior * @param pointerId pointerID of the pointer that should be simulated (Default: Any mouse pointer ID) * @param fromRay initial ray of the pointer to be simulated (Default: Ray from camera to attached mesh) * @param startPickedPoint picked point of the pointer to be simulated (Default: attached mesh position) */ startDrag(e = Fu._AnyMouseId, t, i) { this._startDrag(e, t, i); let r = this._lastPointerRay[e]; e === Fu._AnyMouseId && (r = this._lastPointerRay[Object.keys(this._lastPointerRay)[0]]), r && this._moveDrag(r); } _startDrag(e, t, i) { if (!this._scene.activeCamera || this.dragging || !this.attachedNode) return; Nn._RemoveAndStorePivotPoint(this.attachedNode), t ? (this._startDragRay.direction.copyFrom(t.direction), this._startDragRay.origin.copyFrom(t.origin)) : (this._startDragRay.origin.copyFrom(this._scene.activeCamera.position), this.attachedNode.getWorldMatrix().getTranslationToRef(this._tmpVector), this._tmpVector.subtractToRef(this._scene.activeCamera.position, this._startDragRay.direction)), this._updateDragPlanePosition(this._startDragRay, i || this._tmpVector); const r = this._pickWithRayOnDragPlane(this._startDragRay); r ? (this.dragging = !0, this.currentDraggingPointerId = e, this.lastDragPosition.copyFrom(r), this.onDragStartObservable.notifyObservers({ dragPlanePoint: r, pointerId: this.currentDraggingPointerId, pointerInfo: this._activePointerInfo }), this._targetPosition.copyFrom(this.attachedNode.getAbsolutePosition()), this.detachCameraControls && this._scene.activeCamera && this._scene.activeCamera.inputs && !this._scene.activeCamera.leftCamera && (this._scene.activeCamera.inputs.attachedToElement ? (this._scene.activeCamera.detachControl(), this._attachedToElement = !0) : this._attachedToElement = !1)) : this.releaseDrag(), Nn._RestorePivotPoint(this.attachedNode); } _moveDrag(e) { this._moving = !0; const t = this._pickWithRayOnDragPlane(e); if (t) { Nn._RemoveAndStorePivotPoint(this.attachedNode), this.updateDragPlane && this._updateDragPlanePosition(e, t); let i = 0; this._options.dragAxis ? (this.useObjectOrientationForDragging ? D.TransformCoordinatesToRef(this._options.dragAxis, this.attachedNode.getWorldMatrix().getRotationMatrix(), this._worldDragAxis) : this._worldDragAxis.copyFrom(this._options.dragAxis), t.subtractToRef(this.lastDragPosition, this._tmpVector), i = D.Dot(this._tmpVector, this._worldDragAxis), this._worldDragAxis.scaleToRef(i, this._dragDelta)) : (i = this._dragDelta.length(), t.subtractToRef(this.lastDragPosition, this._dragDelta)), this._targetPosition.addInPlace(this._dragDelta), this.onDragObservable.notifyObservers({ dragDistance: i, delta: this._dragDelta, dragPlanePoint: t, dragPlaneNormal: this._dragPlane.forward, pointerId: this.currentDraggingPointerId, pointerInfo: this._activePointerInfo }), this.lastDragPosition.copyFrom(t), Nn._RestorePivotPoint(this.attachedNode); } } _pickWithRayOnDragPlane(e) { if (!e) return null; let t = Math.acos(D.Dot(this._dragPlane.forward, e.direction)); if (t > Math.PI / 2 && (t = Math.PI - t), this.maxDragAngle > 0 && t > this.maxDragAngle) if (this._useAlternatePickedPointAboveMaxDragAngle) { this._tmpVector.copyFrom(e.direction), this.attachedNode.absolutePosition.subtractToRef(e.origin, this._alternatePickedPoint), this._alternatePickedPoint.normalize(), this._alternatePickedPoint.scaleInPlace(this._useAlternatePickedPointAboveMaxDragAngleDragSpeed * D.Dot(this._alternatePickedPoint, this._tmpVector)), this._tmpVector.addInPlace(this._alternatePickedPoint); const l = D.Dot(this._dragPlane.forward, this._tmpVector); return this._dragPlane.forward.scaleToRef(-l, this._alternatePickedPoint), this._alternatePickedPoint.addInPlace(this._tmpVector), this._alternatePickedPoint.addInPlace(this.attachedNode.absolutePosition), this._alternatePickedPoint; } else return null; const i = this._dragPlane.forward, r = this._dragPlane.position, s = e.direction.dot(i); if (Math.abs(s) < Sr) return null; r.subtractToRef(e.origin, de.Vector3[0]); const n = de.Vector3[0].dot(i) / s; return n < 0 ? null : (e.direction.scaleToRef(n, de.Vector3[0]), e.origin.add(de.Vector3[0])); } // Position the drag plane based on the attached mesh position, for single axis rotate the plane along the axis to face the camera _updateDragPlanePosition(e, t) { this._pointA.copyFrom(t), this._options.dragAxis ? (this.useObjectOrientationForDragging ? D.TransformCoordinatesToRef(this._options.dragAxis, this.attachedNode.getWorldMatrix().getRotationMatrix(), this._localAxis) : this._localAxis.copyFrom(this._options.dragAxis), e.origin.subtractToRef(this._pointA, this._pointC), this._pointC.normalize(), Math.abs(D.Dot(this._localAxis, this._pointC)) > 0.999 ? Math.abs(D.Dot(D.UpReadOnly, this._pointC)) > 0.999 ? this._lookAt.copyFrom(D.Right()) : this._lookAt.copyFrom(D.UpReadOnly) : (D.CrossToRef(this._localAxis, this._pointC, this._lookAt), D.CrossToRef(this._localAxis, this._lookAt, this._lookAt), this._lookAt.normalize()), this._dragPlane.position.copyFrom(this._pointA), this._pointA.addToRef(this._lookAt, this._lookAt), this._dragPlane.lookAt(this._lookAt)) : this._options.dragPlaneNormal ? (this.useObjectOrientationForDragging ? D.TransformCoordinatesToRef(this._options.dragPlaneNormal, this.attachedNode.getWorldMatrix().getRotationMatrix(), this._localAxis) : this._localAxis.copyFrom(this._options.dragPlaneNormal), this._dragPlane.position.copyFrom(this._pointA), this._pointA.addToRef(this._localAxis, this._lookAt), this._dragPlane.lookAt(this._lookAt)) : (this._dragPlane.position.copyFrom(this._pointA), this._dragPlane.lookAt(e.origin)), this._dragPlane.position.copyFrom(this.attachedNode.getAbsolutePosition()), this._dragPlane.computeWorldMatrix(!0); } /** * Detaches the behavior from the mesh */ detach() { this._lastPointerRay = {}, this.attachedNode && (this.attachedNode.isNearGrabbable = !1), this._pointerObserver && this._scene.onPointerObservable.remove(this._pointerObserver), this._beforeRenderObserver && this._scene.onBeforeRenderObservable.remove(this._beforeRenderObserver), this._dragPlane && this._dragPlane.dispose(), this.releaseDrag(); } } Fu._AnyMouseId = -2; class Pce { /** * Instantiate a new behavior that when attached to a mesh will allow the mesh to be scaled */ constructor() { this._startDistance = 0, this._initialScale = new D(0, 0, 0), this._targetScale = new D(0, 0, 0), this._sceneRenderObserver = null, this._dragBehaviorA = new Fu({}), this._dragBehaviorA.moveAttached = !1, this._dragBehaviorB = new Fu({}), this._dragBehaviorB.moveAttached = !1; } /** * The name of the behavior */ get name() { return "MultiPointerScale"; } /** * Initializes the behavior */ init() { } _getCurrentDistance() { return this._dragBehaviorA.lastDragPosition.subtract(this._dragBehaviorB.lastDragPosition).length(); } /** * Attaches the scale behavior the passed in mesh * @param ownerNode The mesh that will be scaled around once attached */ attach(e) { this._ownerNode = e, this._dragBehaviorA.onDragStartObservable.add(() => { this._dragBehaviorA.dragging && this._dragBehaviorB.dragging && (this._dragBehaviorA.currentDraggingPointerId == this._dragBehaviorB.currentDraggingPointerId ? this._dragBehaviorA.releaseDrag() : (this._initialScale.copyFrom(e.scaling), this._startDistance = this._getCurrentDistance())); }), this._dragBehaviorB.onDragStartObservable.add(() => { this._dragBehaviorA.dragging && this._dragBehaviorB.dragging && (this._dragBehaviorA.currentDraggingPointerId == this._dragBehaviorB.currentDraggingPointerId ? this._dragBehaviorB.releaseDrag() : (this._initialScale.copyFrom(e.scaling), this._startDistance = this._getCurrentDistance())); }), [this._dragBehaviorA, this._dragBehaviorB].forEach((t) => { t.onDragObservable.add(() => { if (this._dragBehaviorA.dragging && this._dragBehaviorB.dragging) { const i = this._getCurrentDistance() / this._startDistance; this._initialScale.scaleToRef(i, this._targetScale); } }); }), e.addBehavior(this._dragBehaviorA), e.addBehavior(this._dragBehaviorB), this._sceneRenderObserver = e.getScene().onBeforeRenderObservable.add(() => { if (this._dragBehaviorA.dragging && this._dragBehaviorB.dragging) { const t = this._targetScale.subtract(e.scaling).scaleInPlace(0.1); t.length() > 0.01 && e.scaling.addInPlace(t); } }); } /** * Detaches the behavior from the mesh */ detach() { this._ownerNode.getScene().onBeforeRenderObservable.remove(this._sceneRenderObserver), [this._dragBehaviorA, this._dragBehaviorB].forEach((e) => { e.onDragStartObservable.clear(), e.onDragObservable.clear(), this._ownerNode.removeBehavior(e); }); } } class NC { constructor() { this._attachedToElement = !1, this._virtualMeshesInfo = {}, this._tmpVector = new D(), this._tmpQuaternion = new Ze(), this._dragType = { NONE: 0, DRAG: 1, DRAG_WITH_CONTROLLER: 2, NEAR_DRAG: 3 }, this._moving = !1, this._dragging = this._dragType.NONE, this.draggableMeshes = null, this.zDragFactor = 3, this.currentDraggingPointerIds = [], this.detachCameraControls = !0, this.onDragStartObservable = new Fe(), this.onDragObservable = new Fe(), this.onDragEndObservable = new Fe(), this.allowMultiPointer = !0; } /** * The id of the pointer that is currently interacting with the behavior (-1 when no pointer is active) */ get currentDraggingPointerId() { return this.currentDraggingPointerIds[0] !== void 0 ? this.currentDraggingPointerIds[0] : -1; } set currentDraggingPointerId(e) { this.currentDraggingPointerIds[0] = e; } /** * Get or set the currentDraggingPointerId * @deprecated Please use currentDraggingPointerId instead */ get currentDraggingPointerID() { return this.currentDraggingPointerId; } set currentDraggingPointerID(e) { this.currentDraggingPointerId = e; } /** * The name of the behavior */ get name() { return "BaseSixDofDrag"; } /** * Returns true if the attached mesh is currently moving with this behavior */ get isMoving() { return this._moving; } /** * Initializes the behavior */ init() { } /** * In the case of multiple active cameras, the cameraToUseForPointers should be used if set instead of active camera */ get _pointerCamera() { return this._scene.cameraToUseForPointers ? this._scene.cameraToUseForPointers : this._scene.activeCamera; } _createVirtualMeshInfo() { const e = new xr("", NC._virtualScene); e.rotationQuaternion = new Ze(); const t = new xr("", NC._virtualScene); t.rotationQuaternion = new Ze(); const i = new xr("", NC._virtualScene); return i.rotationQuaternion = new Ze(), { dragging: !1, moving: !1, dragMesh: e, originMesh: t, pivotMesh: i, startingPivotPosition: new D(), startingPivotOrientation: new Ze(), startingPosition: new D(), startingOrientation: new Ze(), lastOriginPosition: new D(), lastDragPosition: new D() }; } _resetVirtualMeshesPosition() { for (let e = 0; e < this.currentDraggingPointerIds.length; e++) this._virtualMeshesInfo[this.currentDraggingPointerIds[e]].pivotMesh.position.copyFrom(this._ownerNode.getAbsolutePivotPoint()), this._virtualMeshesInfo[this.currentDraggingPointerIds[e]].pivotMesh.rotationQuaternion.copyFrom(this._ownerNode.rotationQuaternion), this._virtualMeshesInfo[this.currentDraggingPointerIds[e]].startingPivotPosition.copyFrom(this._virtualMeshesInfo[this.currentDraggingPointerIds[e]].pivotMesh.position), this._virtualMeshesInfo[this.currentDraggingPointerIds[e]].startingPivotOrientation.copyFrom(this._virtualMeshesInfo[this.currentDraggingPointerIds[e]].pivotMesh.rotationQuaternion), this._virtualMeshesInfo[this.currentDraggingPointerIds[e]].startingPosition.copyFrom(this._virtualMeshesInfo[this.currentDraggingPointerIds[e]].dragMesh.position), this._virtualMeshesInfo[this.currentDraggingPointerIds[e]].startingOrientation.copyFrom(this._virtualMeshesInfo[this.currentDraggingPointerIds[e]].dragMesh.rotationQuaternion); } _pointerUpdate2D(e, t, i) { this._pointerCamera && this._pointerCamera.cameraRigMode == Ai.RIG_MODE_NONE && !this._pointerCamera._isLeftCamera && !this._pointerCamera._isRightCamera && (e.origin.copyFrom(this._pointerCamera.globalPosition), i = 0); const r = this._virtualMeshesInfo[t], s = de.Vector3[0]; e.origin.subtractToRef(r.lastOriginPosition, s), r.lastOriginPosition.copyFrom(e.origin); const n = -D.Dot(s, e.direction); r.originMesh.addChild(r.dragMesh), r.originMesh.addChild(r.pivotMesh), this._applyZOffset(r.dragMesh, n, i), this._applyZOffset(r.pivotMesh, n, i), r.originMesh.position.copyFrom(e.origin); const a = de.Vector3[0]; e.origin.addToRef(e.direction, a), r.originMesh.lookAt(a), r.originMesh.removeChild(r.dragMesh), r.originMesh.removeChild(r.pivotMesh); } _pointerUpdateXR(e, t, i, r) { const s = this._virtualMeshesInfo[i]; if (s.originMesh.position.copyFrom(e.position), this._dragging === this._dragType.NEAR_DRAG && t ? s.originMesh.rotationQuaternion.copyFrom(t.rotationQuaternion) : s.originMesh.rotationQuaternion.copyFrom(e.rotationQuaternion), s.pivotMesh.computeWorldMatrix(!0), s.dragMesh.computeWorldMatrix(!0), r !== 0) { const n = de.Vector3[0], a = de.Vector3[1]; n.copyFrom(this._pointerCamera.getForwardRay().direction), s.originMesh.position.subtractToRef(s.lastOriginPosition, a), s.lastOriginPosition.copyFrom(s.originMesh.position); const l = a.length(); a.normalize(); const o = de.Vector3[2], u = de.Vector3[3]; s.dragMesh.absolutePosition.subtractToRef(this._pointerCamera.globalPosition, o), s.dragMesh.absolutePosition.subtractToRef(s.originMesh.position, u); const h = u.length(); o.normalize(), u.normalize(); let f = Math.abs(D.Dot(a, u)) * D.Dot(a, n) * r * l * h; const p = 0.01; f < 0 && p - h > f && (f = Math.min(p - h, 0)), u.scaleInPlace(f), u.addToRef(s.pivotMesh.absolutePosition, this._tmpVector), s.pivotMesh.setAbsolutePosition(this._tmpVector), u.addToRef(s.dragMesh.absolutePosition, this._tmpVector), s.dragMesh.setAbsolutePosition(this._tmpVector); } } /** * Attaches the scale behavior the passed in mesh * @param ownerNode The mesh that will be scaled around once attached */ attach(e) { this._ownerNode = e, this._scene = this._ownerNode.getScene(), NC._virtualScene || (NC._virtualScene = new ii(this._scene.getEngine(), { virtual: !0 }), NC._virtualScene.detachControl()); const t = (i) => this._ownerNode === i || i.isDescendantOf(this._ownerNode) && (!this.draggableMeshes || this.draggableMeshes.indexOf(i) !== -1); this._pointerObserver = this._scene.onPointerObservable.add((i) => { const r = i.event.pointerId; this._virtualMeshesInfo[r] || (this._virtualMeshesInfo[r] = this._createVirtualMeshInfo()); const s = this._virtualMeshesInfo[r], n = i.event.pointerType === "xr-near"; if (i.type == si.POINTERDOWN) { if (!s.dragging && i.pickInfo && i.pickInfo.hit && i.pickInfo.pickedMesh && i.pickInfo.pickedPoint && i.pickInfo.ray && (!n || i.pickInfo.aimTransform) && t(i.pickInfo.pickedMesh)) { if (!this.allowMultiPointer && this.currentDraggingPointerIds.length > 0) return; this._pointerCamera && this._pointerCamera.cameraRigMode === Ai.RIG_MODE_NONE && !this._pointerCamera._isLeftCamera && !this._pointerCamera._isRightCamera && i.pickInfo.ray.origin.copyFrom(this._pointerCamera.globalPosition), this._ownerNode.computeWorldMatrix(!0); const a = this._virtualMeshesInfo[r]; n ? (this._dragging = i.pickInfo.originMesh ? this._dragType.NEAR_DRAG : this._dragType.DRAG_WITH_CONTROLLER, a.originMesh.position.copyFrom(i.pickInfo.aimTransform.position), this._dragging === this._dragType.NEAR_DRAG && i.pickInfo.gripTransform ? a.originMesh.rotationQuaternion.copyFrom(i.pickInfo.gripTransform.rotationQuaternion) : a.originMesh.rotationQuaternion.copyFrom(i.pickInfo.aimTransform.rotationQuaternion)) : (this._dragging = this._dragType.DRAG, a.originMesh.position.copyFrom(i.pickInfo.ray.origin)), a.lastOriginPosition.copyFrom(a.originMesh.position), a.dragMesh.position.copyFrom(i.pickInfo.pickedPoint), a.lastDragPosition.copyFrom(i.pickInfo.pickedPoint), a.pivotMesh.position.copyFrom(this._ownerNode.getAbsolutePivotPoint()), a.pivotMesh.rotationQuaternion.copyFrom(this._ownerNode.absoluteRotationQuaternion), a.startingPosition.copyFrom(a.dragMesh.position), a.startingPivotPosition.copyFrom(a.pivotMesh.position), a.startingOrientation.copyFrom(a.dragMesh.rotationQuaternion), a.startingPivotOrientation.copyFrom(a.pivotMesh.rotationQuaternion), n ? (a.originMesh.addChild(a.dragMesh), a.originMesh.addChild(a.pivotMesh)) : a.originMesh.lookAt(a.dragMesh.position), a.dragging = !0, this.currentDraggingPointerIds.indexOf(r) === -1 && this.currentDraggingPointerIds.push(r), this.detachCameraControls && this._pointerCamera && !this._pointerCamera.leftCamera && (this._pointerCamera.inputs && this._pointerCamera.inputs.attachedToElement ? (this._pointerCamera.detachControl(), this._attachedToElement = !0) : this._attachedToElement = !1), this._targetDragStart(a.pivotMesh.position, a.pivotMesh.rotationQuaternion, r), this.onDragStartObservable.notifyObservers({ position: a.pivotMesh.position }); } } else if (i.type == si.POINTERUP || i.type == si.POINTERDOUBLETAP) { const a = this.currentDraggingPointerIds.indexOf(r); s.dragging = !1, a !== -1 && (this.currentDraggingPointerIds.splice(a, 1), this.currentDraggingPointerIds.length === 0 && (this._moving = !1, this._dragging = this._dragType.NONE, this.detachCameraControls && this._attachedToElement && this._pointerCamera && !this._pointerCamera.leftCamera && (this._reattachCameraControls(), this._attachedToElement = !1)), s.originMesh.removeChild(s.dragMesh), s.originMesh.removeChild(s.pivotMesh), this._targetDragEnd(r), this.onDragEndObservable.notifyObservers({})); } else if (i.type == si.POINTERMOVE && this.currentDraggingPointerIds.indexOf(r) !== -1 && s.dragging && i.pickInfo && (i.pickInfo.ray || i.pickInfo.aimTransform)) { let l = this.zDragFactor; (this.currentDraggingPointerIds.length > 1 || i.pickInfo.originMesh) && (l = 0), this._ownerNode.computeWorldMatrix(!0), n ? this._pointerUpdateXR(i.pickInfo.aimTransform, i.pickInfo.gripTransform, r, l) : this._pointerUpdate2D(i.pickInfo.ray, r, l), this._tmpQuaternion.copyFrom(s.startingPivotOrientation), this._tmpQuaternion.x = -this._tmpQuaternion.x, this._tmpQuaternion.y = -this._tmpQuaternion.y, this._tmpQuaternion.z = -this._tmpQuaternion.z, s.pivotMesh.absoluteRotationQuaternion.multiplyToRef(this._tmpQuaternion, this._tmpQuaternion), s.pivotMesh.absolutePosition.subtractToRef(s.startingPivotPosition, this._tmpVector), this.onDragObservable.notifyObservers({ delta: this._tmpVector, position: s.pivotMesh.position, pickInfo: i.pickInfo }), this._targetDrag(this._tmpVector, this._tmpQuaternion, r), s.lastDragPosition.copyFrom(s.dragMesh.absolutePosition), this._moving = !0; } }); } _applyZOffset(e, t, i) { e.position.z -= e.position.z < 1 ? t * i : t * i * e.position.z, e.position.z < 0 && (e.position.z = 0); } // eslint-disable-next-line @typescript-eslint/no-unused-vars _targetDragStart(e, t, i) { } _targetDrag(e, t, i) { } _targetDragEnd(e) { } _reattachCameraControls() { if (this._pointerCamera) if (this._pointerCamera.getClassName() === "ArcRotateCamera") { const e = this._pointerCamera; e.attachControl(e.inputs ? e.inputs.noPreventDefault : !0, e._useCtrlForPanning, e._panningMouseButton); } else this._pointerCamera.attachControl(this._pointerCamera.inputs ? this._pointerCamera.inputs.noPreventDefault : !0); } /** * Detaches the behavior from the mesh */ detach() { this._scene && (this.detachCameraControls && this._attachedToElement && this._pointerCamera && !this._pointerCamera.leftCamera && (this._reattachCameraControls(), this._attachedToElement = !1), this._scene.onPointerObservable.remove(this._pointerObserver)); for (const e in this._virtualMeshesInfo) this._virtualMeshesInfo[e].originMesh.dispose(), this._virtualMeshesInfo[e].dragMesh.dispose(); this.onDragEndObservable.clear(), this.onDragObservable.clear(), this.onDragStartObservable.clear(); } } class kte extends NC { constructor() { super(...arguments), this._sceneRenderObserver = null, this._targetPosition = new D(0, 0, 0), this._targetOrientation = new Ze(), this._targetScaling = new D(1, 1, 1), this._startingPosition = new D(0, 0, 0), this._startingOrientation = new Ze(), this._startingScaling = new D(1, 1, 1), this.onPositionChangedObservable = new Fe(), this.dragDeltaRatio = 0.2, this.rotateDraggedObject = !0, this.rotateAroundYOnly = !1, this.rotateWithMotionController = !0, this.disableMovement = !1, this.faceCameraOnDragStart = !1; } /** * The name of the behavior */ get name() { return "SixDofDrag"; } /** * Attaches the six DoF drag behavior * @param ownerNode The mesh that will be dragged around once attached */ attach(e) { super.attach(e), e.isNearGrabbable = !0, this._virtualTransformNode = new xi("virtual_sixDof", NC._virtualScene), this._virtualTransformNode.rotationQuaternion = Ze.Identity(), this._sceneRenderObserver = e.getScene().onBeforeRenderObservable.add(() => { if (this.currentDraggingPointerIds.length === 1 && this._moving && !this.disableMovement) { const t = e.parent; e.setParent(null), e.position.addInPlace(this._targetPosition.subtract(e.position).scale(this.dragDeltaRatio)), this.onPositionChangedObservable.notifyObservers({ position: e.absolutePosition }), (!t || t.scaling && !t.scaling.isNonUniformWithinEpsilon(1e-3)) && Ze.SlerpToRef(e.rotationQuaternion, this._targetOrientation, this.dragDeltaRatio, e.rotationQuaternion), e.setParent(t); } }); } _getPositionOffsetAround(e, t, i) { const r = de.Matrix[0], s = de.Matrix[1], n = de.Matrix[2], a = de.Matrix[3], l = de.Matrix[4]; return Ae.TranslationToRef(e.x, e.y, e.z, r), Ae.TranslationToRef(-e.x, -e.y, -e.z, s), Ae.FromQuaternionToRef(i, n), Ae.ScalingToRef(t, t, t, a), s.multiplyToRef(n, l), l.multiplyToRef(a, l), l.multiplyToRef(r, l), l.getTranslation(); } _onePointerPositionUpdated(e, t) { de.Vector3[0].setAll(0), this._dragging === this._dragType.DRAG ? this.rotateDraggedObject && (this.rotateAroundYOnly ? Ze.RotationYawPitchRollToRef(t.toEulerAngles().y, 0, 0, de.Quaternion[0]) : de.Quaternion[0].copyFrom(t), de.Quaternion[0].multiplyToRef(this._startingOrientation, this._targetOrientation)) : (this._dragging === this._dragType.NEAR_DRAG || this._dragging === this._dragType.DRAG_WITH_CONTROLLER && this.rotateWithMotionController) && t.multiplyToRef(this._startingOrientation, this._targetOrientation), this._targetPosition.copyFrom(this._startingPosition).addInPlace(e); } _twoPointersPositionUpdated() { const e = this._virtualMeshesInfo[this.currentDraggingPointerIds[0]].startingPosition, t = this._virtualMeshesInfo[this.currentDraggingPointerIds[1]].startingPosition, i = de.Vector3[0]; e.addToRef(t, i), i.scaleInPlace(0.5); const r = de.Vector3[1]; t.subtractToRef(e, r); const s = this._virtualMeshesInfo[this.currentDraggingPointerIds[0]].dragMesh.absolutePosition, n = this._virtualMeshesInfo[this.currentDraggingPointerIds[1]].dragMesh.absolutePosition, a = de.Vector3[2]; s.addToRef(n, a), a.scaleInPlace(0.5); const l = de.Vector3[3]; n.subtractToRef(s, l); const o = l.length() / r.length(), u = a.subtract(i), h = Ze.FromEulerAngles(0, D.GetAngleBetweenVectorsOnPlane(r.normalize(), l.normalize(), D.UpReadOnly), 0), d = this._ownerNode.parent; this._ownerNode.setParent(null); const f = this._getPositionOffsetAround(i.subtract(this._virtualTransformNode.getAbsolutePivotPoint()), o, h); this._virtualTransformNode.rotationQuaternion.multiplyToRef(h, this._ownerNode.rotationQuaternion), this._virtualTransformNode.scaling.scaleToRef(o, this._ownerNode.scaling), this._virtualTransformNode.position.addToRef(u.addInPlace(f), this._ownerNode.position), this.onPositionChangedObservable.notifyObservers({ position: this._ownerNode.position }), this._ownerNode.setParent(d); } _targetDragStart() { const e = this.currentDraggingPointerIds.length, t = this._ownerNode.parent; this._ownerNode.rotationQuaternion || (this._ownerNode.rotationQuaternion = Ze.RotationYawPitchRoll(this._ownerNode.rotation.y, this._ownerNode.rotation.x, this._ownerNode.rotation.z)); const i = this._ownerNode.getAbsolutePivotPoint(); if (this._ownerNode.setParent(null), e === 1) { if (this._targetPosition.copyFrom(this._ownerNode.position), this._targetOrientation.copyFrom(this._ownerNode.rotationQuaternion), this._targetScaling.copyFrom(this._ownerNode.scaling), this.faceCameraOnDragStart && this._scene.activeCamera) { const r = de.Vector3[0]; this._scene.activeCamera.position.subtractToRef(i, r), r.normalize(); const s = de.Quaternion[0]; this._scene.useRightHandedSystem ? Ze.FromLookDirectionRHToRef(r, new D(0, 1, 0), s) : Ze.FromLookDirectionLHToRef(r, new D(0, 1, 0), s), s.normalize(), Ze.RotationYawPitchRollToRef(s.toEulerAngles().y, 0, 0, de.Quaternion[0]), this._targetOrientation.copyFrom(de.Quaternion[0]); } this._startingPosition.copyFrom(this._targetPosition), this._startingOrientation.copyFrom(this._targetOrientation), this._startingScaling.copyFrom(this._targetScaling); } else e === 2 && (this._virtualTransformNode.setPivotPoint(new D(0, 0, 0), qr.LOCAL), this._virtualTransformNode.position.copyFrom(this._ownerNode.position), this._virtualTransformNode.scaling.copyFrom(this._ownerNode.scaling), this._virtualTransformNode.rotationQuaternion.copyFrom(this._ownerNode.rotationQuaternion), this._virtualTransformNode.setPivotPoint(i, qr.WORLD), this._resetVirtualMeshesPosition()); this._ownerNode.setParent(t); } _targetDrag(e, t) { this.currentDraggingPointerIds.length === 1 ? this._onePointerPositionUpdated(e, t) : this.currentDraggingPointerIds.length === 2 && this._twoPointersPositionUpdated(); } _targetDragEnd() { if (this.currentDraggingPointerIds.length === 1) { this._resetVirtualMeshesPosition(); const e = this.faceCameraOnDragStart; this.faceCameraOnDragStart = !1, this._targetDragStart(), this.faceCameraOnDragStart = e; } } /** * Detaches the behavior from the mesh */ detach() { super.detach(), this._ownerNode && (this._ownerNode.isNearGrabbable = !1, this._ownerNode.getScene().onBeforeRenderObservable.remove(this._sceneRenderObserver)), this._virtualTransformNode && this._virtualTransformNode.dispose(); } } class Ice { constructor() { this._attachPointLocalOffset = new D(), this._workingPosition = new D(), this._workingQuaternion = new Ze(), this._lastTick = -1, this._hit = !1, this.hitNormalOffset = 0.05, this.meshes = [], this.interpolatePose = !0, this.lerpTime = 250, this.keepOrientationVertical = !0, this.enabled = !0, this.maxStickingDistance = 0.8; } /** * Name of the behavior */ get name() { return "SurfaceMagnetism"; } /** * Function called when the behavior needs to be initialized (after attaching it to a target) */ init() { } /** * Attaches the behavior to a transform node * @param target defines the target where the behavior is attached to * @param scene the scene */ attach(e, t) { this._attachedMesh = e, this._scene = t || e.getScene(), this._attachedMesh.rotationQuaternion || (this._attachedMesh.rotationQuaternion = Ze.RotationYawPitchRoll(this._attachedMesh.rotation.y, this._attachedMesh.rotation.x, this._attachedMesh.rotation.z)), this.updateAttachPoint(), this._workingPosition.copyFrom(this._attachedMesh.position), this._workingQuaternion.copyFrom(this._attachedMesh.rotationQuaternion), this._addObservables(); } /** * Detaches the behavior */ detach() { this._attachedMesh = null, this._removeObservables(); } _getTargetPose(e) { if (!this._attachedMesh) return null; if (e && e.hit) { const t = e.getNormal(!0, !0), i = e.pickedPoint; if (!t || !i) return null; t.normalize(); const r = de.Vector3[0]; return r.copyFrom(t), r.scaleInPlace(this.hitNormalOffset), r.addInPlace(i), this._attachedMesh.parent && (de.Matrix[0].copyFrom(this._attachedMesh.parent.getWorldMatrix()).invert(), D.TransformNormalToRef(r, de.Matrix[0], r)), { position: r, quaternion: Ze.RotationYawPitchRoll(-Math.atan2(t.x, -t.z), this.keepOrientationVertical ? 0 : Math.atan2(t.y, Math.sqrt(t.z * t.z + t.x * t.x)), 0) }; } return null; } /** * Updates the attach point with the current geometry extents of the attached mesh */ updateAttachPoint() { this._getAttachPointOffsetToRef(this._attachPointLocalOffset); } /** * Finds the intersection point of the given ray onto the meshes and updates the target. * Transformation will be interpolated according to `interpolatePose` and `lerpTime` properties. * If no mesh of `meshes` are hit, this does nothing. * @param pickInfo The input pickingInfo that will be used to intersect the meshes * @returns a boolean indicating if we found a hit to stick to */ findAndUpdateTarget(e) { if (this._hit = !1, !e.ray) return !1; const t = e.ray.intersectsMeshes(this.meshes)[0]; if (this._attachedMesh && t && t.hit && t.pickedMesh) { const i = this._getTargetPose(t); i && D.Distance(this._attachedMesh.position, i.position) < this.maxStickingDistance && (this._workingPosition.copyFrom(i.position), this._workingQuaternion.copyFrom(i.quaternion), this._hit = !0); } return this._hit; } _getAttachPointOffsetToRef(e) { if (!this._attachedMesh) { e.setAll(0); return; } const t = de.Quaternion[0]; t.copyFrom(this._attachedMesh.rotationQuaternion), this._attachedMesh.rotationQuaternion.copyFromFloats(0, 0, 0, 1), this._attachedMesh.computeWorldMatrix(); const i = this._attachedMesh.getHierarchyBoundingVectors(), r = de.Vector3[0]; i.max.addToRef(i.min, r), r.scaleInPlace(0.5), r.z = i.max.z; const s = de.Matrix[0]; this._attachedMesh.getWorldMatrix().invertToRef(s), D.TransformCoordinatesToRef(r, s, e), this._attachedMesh.rotationQuaternion.copyFrom(t); } _updateTransformToGoal(e) { if (!this._attachedMesh || !this._hit) return; const t = this._attachedMesh.parent; this._attachedMesh.setParent(null); const i = de.Vector3[0]; if (D.TransformNormalToRef(this._attachPointLocalOffset, this._attachedMesh.getWorldMatrix(), i), !this.interpolatePose) { this._attachedMesh.position.copyFrom(this._workingPosition).subtractInPlace(i), this._attachedMesh.rotationQuaternion.copyFrom(this._workingQuaternion); return; } const r = new D(); D.SmoothToRef(this._attachedMesh.position, this._workingPosition, e, this.lerpTime, r), this._attachedMesh.position.copyFrom(r); const s = new Ze(); s.copyFrom(this._attachedMesh.rotationQuaternion), Ze.SmoothToRef(s, this._workingQuaternion, e, this.lerpTime, this._attachedMesh.rotationQuaternion), this._attachedMesh.setParent(t); } _addObservables() { this._pointerObserver = this._scene.onPointerObservable.add((e) => { this.enabled && e.type == si.POINTERMOVE && e.pickInfo && this.findAndUpdateTarget(e.pickInfo); }), this._lastTick = Date.now(), this._onBeforeRender = this._scene.onBeforeRenderObservable.add(() => { const e = Date.now(); this._updateTransformToGoal(e - this._lastTick), this._lastTick = e; }); } _removeObservables() { this._scene.onPointerObservable.remove(this._pointerObserver), this._scene.onBeforeRenderObservable.remove(this._onBeforeRender), this._pointerObserver = null, this._onBeforeRender = null; } } class Dce { constructor() { this._tmpQuaternion = new Ze(), this._tmpVectors = [new D(), new D(), new D(), new D(), new D(), new D(), new D()], this._tmpMatrix = new Ae(), this._tmpInvertView = new Ae(), this._tmpForward = new D(), this._tmpNodeForward = new D(), this._tmpPosition = new D(), this._workingPosition = new D(), this._workingQuaternion = new Ze(), this._lastTick = -1, this._recenterNextUpdate = !0, this.interpolatePose = !0, this.lerpTime = 500, this.ignoreCameraPitchAndRoll = !1, this.pitchOffset = 15, this.maxViewVerticalDegrees = 30, this.maxViewHorizontalDegrees = 30, this.orientToCameraDeadzoneDegrees = 60, this.ignoreDistanceClamp = !1, this.ignoreAngleClamp = !1, this.verticalMaxDistance = 0, this.defaultDistance = 0.8, this.maximumDistance = 2, this.minimumDistance = 0.3, this.useFixedVerticalOffset = !1, this.fixedVerticalOffset = 0, this._enabled = !0; } /** * The camera that should be followed by this behavior */ get followedCamera() { return this._followedCamera || this._scene.activeCamera; } set followedCamera(e) { this._followedCamera = e; } /** * The name of the behavior */ get name() { return "Follow"; } /** * Initializes the behavior */ init() { } /** * Attaches the follow behavior * @param ownerNode The mesh that will be following once attached * @param followedCamera The camera that should be followed by the node */ attach(e, t) { this._scene = e.getScene(), this.attachedNode = e, t && (this.followedCamera = t), this._addObservables(); } /** * Detaches the behavior from the mesh */ detach() { this.attachedNode = null, this._removeObservables(); } /** * Recenters the attached node in front of the camera on the next update */ recenter() { this._recenterNextUpdate = !0; } _angleBetweenVectorAndPlane(e, t) { return this._tmpVectors[0].copyFrom(e), e = this._tmpVectors[0], this._tmpVectors[1].copyFrom(t), t = this._tmpVectors[1], e.normalize(), t.normalize(), Math.PI / 2 - Math.acos(D.Dot(e, t)); } _length2D(e) { return Math.sqrt(e.x * e.x + e.z * e.z); } _distanceClamp(e, t = !1) { let i = this.minimumDistance, r = this.maximumDistance; const s = this.defaultDistance, n = this._tmpVectors[0]; n.copyFrom(e); let a = n.length(); if (n.normalizeFromLength(a), this.ignoreCameraPitchAndRoll) { i = this._length2D(n) * i, r = this._length2D(n) * r; const o = this._length2D(e); n.scaleInPlace(a / o), a = o; } let l = a; return t ? l = s : l = yt.Clamp(a, i, r), e.copyFrom(n).scaleInPlace(l), a !== l; } _applyVerticalClamp(e) { this.verticalMaxDistance !== 0 && (e.y = yt.Clamp(e.y, -this.verticalMaxDistance, this.verticalMaxDistance)); } _toOrientationQuatToRef(e, t) { Ze.RotationYawPitchRollToRef(Math.atan2(e.x, e.z), Math.atan2(e.y, Math.sqrt(e.z * e.z + e.x * e.x)), 0, t); } _applyPitchOffset(e) { const t = this._tmpVectors[0], i = this._tmpVectors[1]; t.copyFromFloats(0, 0, this._scene.useRightHandedSystem ? -1 : 1), i.copyFromFloats(1, 0, 0), D.TransformNormalToRef(t, e, t), t.y = 0, t.normalize(), D.TransformNormalToRef(i, e, i), Ze.RotationAxisToRef(i, this.pitchOffset * Math.PI / 180, this._tmpQuaternion), t.rotateByQuaternionToRef(this._tmpQuaternion, t), this._toOrientationQuatToRef(t, this._tmpQuaternion), this._tmpQuaternion.toRotationMatrix(this._tmpMatrix), e.copyFrom(this._tmpMatrix); } _angularClamp(e, t) { const i = this._tmpVectors[5]; i.copyFromFloats(0, 0, this._scene.useRightHandedSystem ? -1 : 1); const r = this._tmpVectors[6]; r.copyFromFloats(1, 0, 0), D.TransformNormalToRef(i, e, i), D.TransformNormalToRef(r, e, r); const s = D.UpReadOnly; if (t.length() < Sr) return !1; let a = !1; const l = this._tmpQuaternion; if (this.ignoreCameraPitchAndRoll) { const h = D.GetAngleBetweenVectorsOnPlane(t, i, r); Ze.RotationAxisToRef(r, h, l), t.rotateByQuaternionToRef(l, t); } else { const h = -D.GetAngleBetweenVectorsOnPlane(t, i, r), d = this.maxViewVerticalDegrees * Math.PI / 180 * 0.5; h < -d ? (Ze.RotationAxisToRef(r, -h - d, l), t.rotateByQuaternionToRef(l, t), a = !0) : h > d && (Ze.RotationAxisToRef(r, -h + d, l), t.rotateByQuaternionToRef(l, t), a = !0); } const o = this._angleBetweenVectorAndPlane(t, r) * (this._scene.useRightHandedSystem ? -1 : 1), u = this.maxViewHorizontalDegrees * Math.PI / 180 * 0.5; return o < -u ? (Ze.RotationAxisToRef(s, -o - u, l), t.rotateByQuaternionToRef(l, t), a = !0) : o > u && (Ze.RotationAxisToRef(s, -o + u, l), t.rotateByQuaternionToRef(l, t), a = !0), a; } _orientationClamp(e, t) { var i; const r = this._tmpVectors[0]; r.copyFrom(e).scaleInPlace(-1).normalize(); const s = this._tmpVectors[1], n = this._tmpVectors[2]; s.copyFromFloats(0, 1, 0), D.CrossToRef(r, s, n); const a = n.length(); a < Sr || (n.normalizeFromLength(a), D.CrossToRef(n, r, s), !((i = this.attachedNode) === null || i === void 0) && i.getScene().useRightHandedSystem ? Ze.FromLookDirectionRHToRef(r, s, t) : Ze.FromLookDirectionLHToRef(r, s, t)); } _passedOrientationDeadzone(e, t) { const i = this._tmpVectors[5]; return i.copyFrom(e), i.normalize(), Math.abs(D.GetAngleBetweenVectorsOnPlane(t, i, D.UpReadOnly)) * 180 / Math.PI > this.orientToCameraDeadzoneDegrees; } _updateLeashing(e) { if (this.attachedNode && this._enabled) { const t = this.attachedNode.parent; this.attachedNode.setParent(null); const i = this.attachedNode.getWorldMatrix(), r = this._workingPosition, s = this._workingQuaternion, n = this.attachedNode.getPivotPoint(), a = this._tmpInvertView; a.copyFrom(e.getViewMatrix()), a.invert(), D.TransformCoordinatesToRef(n, i, r); const l = this._tmpPosition; l.copyFromFloats(0, 0, 0), D.TransformCoordinatesToRef(l, i, l), l.scaleInPlace(-1).subtractInPlace(n), r.subtractInPlace(e.globalPosition), this.ignoreCameraPitchAndRoll && this._applyPitchOffset(a); let o = !1; const u = this._tmpForward; u.copyFromFloats(0, 0, this._scene.useRightHandedSystem ? -1 : 1), D.TransformNormalToRef(u, a, u); const h = this._tmpNodeForward; if (h.copyFromFloats(0, 0, this._scene.useRightHandedSystem ? -1 : 1), D.TransformNormalToRef(h, i, h), this._recenterNextUpdate) r.copyFrom(u).scaleInPlace(this.defaultDistance); else if (this.ignoreAngleClamp) { const f = r.length(); r.copyFrom(u).scaleInPlace(f); } else o = this._angularClamp(a, r); let d = !1; this.ignoreDistanceClamp || (d = this._distanceClamp(r, o), this._applyVerticalClamp(r)), this.useFixedVerticalOffset && (r.y = l.y - e.globalPosition.y + this.fixedVerticalOffset), (o || d || this._passedOrientationDeadzone(r, h) || this._recenterNextUpdate) && this._orientationClamp(r, s), this._workingPosition.subtractInPlace(n), this._recenterNextUpdate = !1, this.attachedNode.setParent(t); } } _updateTransformToGoal(e) { if (!this.attachedNode || !this.followedCamera || !this._enabled) return; this.attachedNode.rotationQuaternion || (this.attachedNode.rotationQuaternion = Ze.Identity()); const t = this.attachedNode.parent; if (this.attachedNode.setParent(null), !this.interpolatePose) { this.attachedNode.position.copyFrom(this.followedCamera.globalPosition).addInPlace(this._workingPosition), this.attachedNode.rotationQuaternion.copyFrom(this._workingQuaternion); return; } const i = new D(); i.copyFrom(this.attachedNode.position).subtractInPlace(this.followedCamera.globalPosition), D.SmoothToRef(i, this._workingPosition, e, this.lerpTime, i), i.addInPlace(this.followedCamera.globalPosition), this.attachedNode.position.copyFrom(i); const r = new Ze(); r.copyFrom(this.attachedNode.rotationQuaternion), Ze.SmoothToRef(r, this._workingQuaternion, e, this.lerpTime, this.attachedNode.rotationQuaternion), this.attachedNode.setParent(t); } _addObservables() { this._lastTick = Date.now(), this._onBeforeRender = this._scene.onBeforeRenderObservable.add(() => { if (!this.followedCamera) return; const e = Date.now(); this._updateLeashing(this.followedCamera), this._updateTransformToGoal(e - this._lastTick), this._lastTick = e; }); } _removeObservables() { this._onBeforeRender && this._scene.onBeforeRenderObservable.remove(this._onBeforeRender); } } class Qs { } Qs.ANCHOR_SYSTEM = "xr-anchor-system"; Qs.BACKGROUND_REMOVER = "xr-background-remover"; Qs.HIT_TEST = "xr-hit-test"; Qs.MESH_DETECTION = "xr-mesh-detection"; Qs.PHYSICS_CONTROLLERS = "xr-physics-controller"; Qs.PLANE_DETECTION = "xr-plane-detection"; Qs.POINTER_SELECTION = "xr-controller-pointer-selection"; Qs.TELEPORTATION = "xr-controller-teleportation"; Qs.FEATURE_POINTS = "xr-feature-points"; Qs.HAND_TRACKING = "xr-hand-tracking"; Qs.IMAGE_TRACKING = "xr-image-tracking"; Qs.NEAR_INTERACTION = "xr-near-interaction"; Qs.DOM_OVERLAY = "xr-dom-overlay"; Qs.MOVEMENT = "xr-controller-movement"; Qs.LIGHT_ESTIMATION = "xr-light-estimation"; Qs.EYE_TRACKING = "xr-eye-tracking"; Qs.WALKING_LOCOMOTION = "xr-walking-locomotion"; Qs.LAYERS = "xr-layers"; Qs.DEPTH_SENSING = "xr-depth-sensing"; Qs.SPACE_WARP = "xr-space-warp"; Qs.RAW_CAMERA_ACCESS = "xr-raw-camera-access"; class Oo { /** * constructs a new features manages. * * @param _xrSessionManager an instance of WebXRSessionManager */ constructor(e) { this._xrSessionManager = e, this._features = {}, this._xrSessionManager.onXRSessionInit.add(() => { this.getEnabledFeatures().forEach((t) => { const i = this._features[t]; i.enabled && !i.featureImplementation.attached && !i.featureImplementation.disableAutoAttach && this.attachFeature(t); }); }), this._xrSessionManager.onXRSessionEnded.add(() => { this.getEnabledFeatures().forEach((t) => { const i = this._features[t]; i.enabled && i.featureImplementation.attached && this.detachFeature(t); }); }); } /** * Used to register a module. After calling this function a developer can use this feature in the scene. * Mainly used internally. * * @param featureName the name of the feature to register * @param constructorFunction the function used to construct the module * @param version the (babylon) version of the module * @param stable is that a stable version of this module */ static AddWebXRFeature(e, t, i = 1, r = !1) { this._AvailableFeatures[e] = this._AvailableFeatures[e] || { latest: i }, i > this._AvailableFeatures[e].latest && (this._AvailableFeatures[e].latest = i), r && (this._AvailableFeatures[e].stable = i), this._AvailableFeatures[e][i] = t; } /** * Returns a constructor of a specific feature. * * @param featureName the name of the feature to construct * @param version the version of the feature to load * @param xrSessionManager the xrSessionManager. Used to construct the module * @param options optional options provided to the module. * @returns a function that, when called, will return a new instance of this feature */ static ConstructFeature(e, t = 1, i, r) { const s = this._AvailableFeatures[e][t]; if (!s) throw new Error("feature not found"); return s(i, r); } /** * Can be used to return the list of features currently registered * * @returns an Array of available features */ static GetAvailableFeatures() { return Object.keys(this._AvailableFeatures); } /** * Gets the versions available for a specific feature * @param featureName the name of the feature * @returns an array with the available versions */ static GetAvailableVersions(e) { return Object.keys(this._AvailableFeatures[e]); } /** * Return the latest unstable version of this feature * @param featureName the name of the feature to search * @returns the version number. if not found will return -1 */ static GetLatestVersionOfFeature(e) { return this._AvailableFeatures[e] && this._AvailableFeatures[e].latest || -1; } /** * Return the latest stable version of this feature * @param featureName the name of the feature to search * @returns the version number. if not found will return -1 */ static GetStableVersionOfFeature(e) { return this._AvailableFeatures[e] && this._AvailableFeatures[e].stable || -1; } /** * Attach a feature to the current session. Mainly used when session started to start the feature effect. * Can be used during a session to start a feature * @param featureName the name of feature to attach */ attachFeature(e) { const t = this._features[e]; t && t.enabled && !t.featureImplementation.attached && t.featureImplementation.attach(); } /** * Can be used inside a session or when the session ends to detach a specific feature * @param featureName the name of the feature to detach */ detachFeature(e) { const t = this._features[e]; t && t.featureImplementation.attached && t.featureImplementation.detach(); } /** * Used to disable an already-enabled feature * The feature will be disposed and will be recreated once enabled. * @param featureName the feature to disable * @returns true if disable was successful */ // eslint-disable-next-line @typescript-eslint/naming-convention disableFeature(e) { const t = typeof e == "string" ? e : e.Name, i = this._features[t]; return i && i.enabled ? (i.enabled = !1, this.detachFeature(t), i.featureImplementation.dispose(), delete this._features[t], !0) : !1; } /** * dispose this features manager */ dispose() { this.getEnabledFeatures().forEach((e) => { this.disableFeature(e); }); } /** * Enable a feature using its name and a version. This will enable it in the scene, and will be responsible to attach it when the session starts. * If used twice, the old version will be disposed and a new one will be constructed. This way you can re-enable with different configuration. * * @param featureName the name of the feature to load or the class of the feature * @param version optional version to load. if not provided the latest version will be enabled * @param moduleOptions options provided to the module. Ses the module documentation / constructor * @param attachIfPossible if set to true (default) the feature will be automatically attached, if it is currently possible * @param required is this feature required to the app. If set to true the session init will fail if the feature is not available. * @returns a new constructed feature or throws an error if feature not found or conflicts with another enabled feature. */ enableFeature(e, t = "latest", i = {}, r = !0, s = !0) { const n = typeof e == "string" ? e : e.Name; let a = 0; if (typeof t == "string") { if (!t) throw new Error(`Error in provided version - ${n} (${t})`); if (t === "stable" ? a = Oo.GetStableVersionOfFeature(n) : t === "latest" ? a = Oo.GetLatestVersionOfFeature(n) : a = +t, a === -1 || isNaN(a)) throw new Error(`feature not found - ${n} (${t})`); } else a = t; const l = Oo._ConflictingFeatures[n]; if (l !== void 0 && this.getEnabledFeatures().indexOf(l) !== -1) throw new Error(`Feature ${n} cannot be enabled while ${l} is enabled.`); const o = this._features[n], u = Oo.ConstructFeature(n, a, this._xrSessionManager, i); if (!u) throw new Error(`feature not found - ${n}`); o && this.disableFeature(n); const h = u(); if (h.dependsOn && !h.dependsOn.every((f) => !!this._features[f])) throw new Error(`Dependant features missing. Make sure the following features are enabled - ${h.dependsOn.join(", ")}`); if (h.isCompatible()) return this._features[n] = { featureImplementation: h, enabled: !0, version: a, required: s }, r ? this._xrSessionManager.session && !this._features[n].featureImplementation.attached && this.attachFeature(n) : this._features[n].featureImplementation.disableAutoAttach = !0, this._features[n].featureImplementation; if (s) throw new Error("required feature not compatible"); return Ve.Warn(`Feature ${n} not compatible with the current environment/browser and was not enabled.`), h; } /** * get the implementation of an enabled feature. * @param featureName the name of the feature to load * @returns the feature class, if found */ getEnabledFeature(e) { return this._features[e] && this._features[e].featureImplementation; } /** * Get the list of enabled features * @returns an array of enabled features */ getEnabledFeatures() { return Object.keys(this._features); } /** * This function will extend the session creation configuration object with enabled features. * If, for example, the anchors feature is enabled, it will be automatically added to the optional or required features list, * according to the defined "required" variable, provided during enableFeature call * @param xrSessionInit the xr Session init object to extend * * @returns an extended XRSessionInit object */ async _extendXRSessionInitObject(e) { const t = this.getEnabledFeatures(); for (const i of t) { const r = this._features[i], s = r.featureImplementation.xrNativeFeatureName; if (s && (r.required ? (e.requiredFeatures = e.requiredFeatures || [], e.requiredFeatures.indexOf(s) === -1 && e.requiredFeatures.push(s)) : (e.optionalFeatures = e.optionalFeatures || [], e.optionalFeatures.indexOf(s) === -1 && e.optionalFeatures.push(s))), r.featureImplementation.getXRSessionInitExtension) { const n = await r.featureImplementation.getXRSessionInitExtension(); e = Object.assign(Object.assign({}, e), n); } } return e; } } Oo._AvailableFeatures = {}; Oo._ConflictingFeatures = { [Qs.TELEPORTATION]: Qs.MOVEMENT, [Qs.MOVEMENT]: Qs.TELEPORTATION }; class Ku { /** * Construct a new (abstract) WebXR feature * @param _xrSessionManager the xr session manager for this feature */ constructor(e) { this._xrSessionManager = e, this._attached = !1, this._removeOnDetach = [], this.isDisposed = !1, this.disableAutoAttach = !1, this.xrNativeFeatureName = "", this.onFeatureAttachObservable = new Fe(), this.onFeatureDetachObservable = new Fe(); } /** * Is this feature attached */ get attached() { return this._attached; } /** * attach this feature * * @param force should attachment be forced (even when already attached) * @returns true if successful, false is failed or already attached */ attach(e) { if (this.isDisposed) return !1; if (e) this.attached && this.detach(); else if (this.attached) return !1; return this._attached = !0, this._addNewAttachObserver(this._xrSessionManager.onXRFrameObservable, (t) => this._onXRFrame(t)), this.onFeatureAttachObservable.notifyObservers(this), !0; } /** * detach this feature. * * @returns true if successful, false if failed or already detached */ detach() { return this._attached ? (this._attached = !1, this._removeOnDetach.forEach((e) => { e.observable.remove(e.observer); }), this.onFeatureDetachObservable.notifyObservers(this), !0) : (this.disableAutoAttach = !0, !1); } /** * Dispose this feature and all of the resources attached */ dispose() { this.detach(), this.isDisposed = !0, this.onFeatureAttachObservable.clear(), this.onFeatureDetachObservable.clear(); } /** * This function will be executed during before enabling the feature and can be used to not-allow enabling it. * Note that at this point the session has NOT started, so this is purely checking if the browser supports it * * @returns whether or not the feature is compatible in this environment */ isCompatible() { return !0; } /** * This is used to register callbacks that will automatically be removed when detach is called. * @param observable the observable to which the observer will be attached * @param callback the callback to register */ _addNewAttachObserver(e, t) { this._removeOnDetach.push({ observable: e, observer: e.add(t) }); } } class ta { /** * Initializes the physics joint * @param type The type of the physics joint * @param jointData The data for the physics joint */ constructor(e, t) { this.type = e, this.jointData = t, t.nativeParams = t.nativeParams || {}; } /** * Gets the physics joint */ get physicsJoint() { return this._physicsJoint; } /** * Sets the physics joint */ set physicsJoint(e) { this._physicsJoint, this._physicsJoint = e; } /** * Sets the physics plugin */ set physicsPlugin(e) { this._physicsPlugin = e; } /** * Execute a function that is physics-plugin specific. * @param {Function} func the function that will be executed. * It accepts two parameters: the physics world and the physics joint */ executeNativeFunction(e) { e(this._physicsPlugin.world, this._physicsJoint); } } ta.DistanceJoint = 0; ta.HingeJoint = 1; ta.BallAndSocketJoint = 2; ta.WheelJoint = 3; ta.SliderJoint = 4; ta.PrismaticJoint = 5; ta.UniversalJoint = 6; ta.Hinge2Joint = ta.WheelJoint; ta.PointToPointJoint = 8; ta.SpringJoint = 9; ta.LockJoint = 10; class Oce extends ta { /** * * @param jointData The data for the Distance-Joint */ constructor(e) { super(ta.DistanceJoint, e); } /** * Update the predefined distance. * @param maxDistance The maximum preferred distance * @param minDistance The minimum preferred distance */ updateDistance(e, t) { this._physicsPlugin.updateDistanceJoint(this, e, t); } } class vK extends ta { /** * Initializes the Motor-Enabled Joint * @param type The type of the joint * @param jointData The physical joint data for the joint */ constructor(e, t) { super(e, t); } /** * Set the motor values. * Attention, this function is plugin specific. Engines won't react 100% the same. * @param force the force to apply * @param maxForce max force for this motor. */ setMotor(e, t) { this._physicsPlugin.setMotor(this, e || 0, t); } /** * Set the motor's limits. * Attention, this function is plugin specific. Engines won't react 100% the same. * @param upperLimit The upper limit of the motor * @param lowerLimit The lower limit of the motor */ setLimit(e, t) { this._physicsPlugin.setLimit(this, e, t); } } class wce extends vK { /** * Initializes the Hinge-Joint * @param jointData The joint data for the Hinge-Joint */ constructor(e) { super(ta.HingeJoint, e); } /** * Set the motor values. * Attention, this function is plugin specific. Engines won't react 100% the same. * @param {number} force the force to apply * @param {number} maxForce max force for this motor. */ setMotor(e, t) { this._physicsPlugin.setMotor(this, e || 0, t); } /** * Set the motor's limits. * Attention, this function is plugin specific. Engines won't react 100% the same. * @param upperLimit The upper limit of the motor * @param lowerLimit The lower limit of the motor */ setLimit(e, t) { this._physicsPlugin.setLimit(this, e, t); } } class Lce extends vK { /** * Initializes the Hinge2-Joint * @param jointData The joint data for the Hinge2-Joint */ constructor(e) { super(ta.Hinge2Joint, e); } /** * Set the motor values. * Attention, this function is plugin specific. Engines won't react 100% the same. * @param targetSpeed the speed the motor is to reach * @param maxForce max force for this motor. * @param motorIndex motor's index, 0 or 1. */ setMotor(e, t, i = 0) { this._physicsPlugin.setMotor(this, e || 0, t, i); } /** * Set the motor limits. * Attention, this function is plugin specific. Engines won't react 100% the same. * @param upperLimit the upper limit * @param lowerLimit lower limit * @param motorIndex the motor's index, 0 or 1. */ setLimit(e, t, i = 0) { this._physicsPlugin.setLimit(this, e, t, i); } } ke._PhysicsImpostorParser = function(c, e, t) { return new tr(e, t.physicsImpostor, { mass: t.physicsMass, friction: t.physicsFriction, restitution: t.physicsRestitution }, c); }; class tr { /** * Specifies if the physics imposter is disposed */ get isDisposed() { return this._isDisposed; } /** * Gets the mass of the physics imposter */ get mass() { return this._physicsEngine ? this._physicsEngine.getPhysicsPlugin().getBodyMass(this) : 0; } set mass(e) { this.setMass(e); } /** * Gets the coefficient of friction */ get friction() { return this._physicsEngine ? this._physicsEngine.getPhysicsPlugin().getBodyFriction(this) : 0; } /** * Sets the coefficient of friction */ set friction(e) { this._physicsEngine && this._physicsEngine.getPhysicsPlugin().setBodyFriction(this, e); } /** * Gets the coefficient of restitution */ get restitution() { return this._physicsEngine ? this._physicsEngine.getPhysicsPlugin().getBodyRestitution(this) : 0; } /** * Sets the coefficient of restitution */ set restitution(e) { this._physicsEngine && this._physicsEngine.getPhysicsPlugin().setBodyRestitution(this, e); } /** * Gets the pressure of a soft body; only supported by the AmmoJSPlugin */ get pressure() { if (!this._physicsEngine) return 0; const e = this._physicsEngine.getPhysicsPlugin(); return e.setBodyPressure ? e.getBodyPressure(this) : 0; } /** * Sets the pressure of a soft body; only supported by the AmmoJSPlugin */ set pressure(e) { if (!this._physicsEngine) return; const t = this._physicsEngine.getPhysicsPlugin(); t.setBodyPressure && t.setBodyPressure(this, e); } /** * Gets the stiffness of a soft body; only supported by the AmmoJSPlugin */ get stiffness() { if (!this._physicsEngine) return 0; const e = this._physicsEngine.getPhysicsPlugin(); return e.getBodyStiffness ? e.getBodyStiffness(this) : 0; } /** * Sets the stiffness of a soft body; only supported by the AmmoJSPlugin */ set stiffness(e) { if (!this._physicsEngine) return; const t = this._physicsEngine.getPhysicsPlugin(); t.setBodyStiffness && t.setBodyStiffness(this, e); } /** * Gets the velocityIterations of a soft body; only supported by the AmmoJSPlugin */ get velocityIterations() { if (!this._physicsEngine) return 0; const e = this._physicsEngine.getPhysicsPlugin(); return e.getBodyVelocityIterations ? e.getBodyVelocityIterations(this) : 0; } /** * Sets the velocityIterations of a soft body; only supported by the AmmoJSPlugin */ set velocityIterations(e) { if (!this._physicsEngine) return; const t = this._physicsEngine.getPhysicsPlugin(); t.setBodyVelocityIterations && t.setBodyVelocityIterations(this, e); } /** * Gets the positionIterations of a soft body; only supported by the AmmoJSPlugin */ get positionIterations() { if (!this._physicsEngine) return 0; const e = this._physicsEngine.getPhysicsPlugin(); return e.getBodyPositionIterations ? e.getBodyPositionIterations(this) : 0; } /** * Sets the positionIterations of a soft body; only supported by the AmmoJSPlugin */ set positionIterations(e) { if (!this._physicsEngine) return; const t = this._physicsEngine.getPhysicsPlugin(); t.setBodyPositionIterations && t.setBodyPositionIterations(this, e); } /** * Initializes the physics imposter * @param object The physics-enabled object used as the physics imposter * @param type The type of the physics imposter. Types are available as static members of this class. * @param _options The options for the physics imposter * @param _scene The Babylon scene */ constructor(e, t, i = { mass: 0 }, r) { if (this.object = e, this.type = t, this._options = i, this._scene = r, this._pluginData = {}, this._bodyUpdateRequired = !1, this._onBeforePhysicsStepCallbacks = new Array(), this._onAfterPhysicsStepCallbacks = new Array(), this._onPhysicsCollideCallbacks = [], this._deltaPosition = D.Zero(), this._isDisposed = !1, this.soft = !1, this.segments = 0, this._tmpQuat = new Ze(), this._tmpQuat2 = new Ze(), this.beforeStep = () => { this._physicsEngine && (this.object.translate(this._deltaPosition, -1), this._deltaRotationConjugated && this.object.rotationQuaternion && this.object.rotationQuaternion.multiplyToRef(this._deltaRotationConjugated, this.object.rotationQuaternion), this.object.computeWorldMatrix(!1), this.object.parent && this.object.rotationQuaternion ? (this.getParentsRotation(), this._tmpQuat.multiplyToRef(this.object.rotationQuaternion, this._tmpQuat)) : this._tmpQuat.copyFrom(this.object.rotationQuaternion || new Ze()), this._options.disableBidirectionalTransformation || this.object.rotationQuaternion && this._physicsEngine.getPhysicsPlugin().setPhysicsBodyTransformation( this, /*bInfo.boundingBox.centerWorld*/ this.object.getAbsolutePosition(), this._tmpQuat ), this._onBeforePhysicsStepCallbacks.forEach((s) => { s(this); })); }, this.afterStep = () => { this._physicsEngine && (this._onAfterPhysicsStepCallbacks.forEach((s) => { s(this); }), this._physicsEngine.getPhysicsPlugin().setTransformationFromPhysicsBody(this), this.object.parent && this.object.rotationQuaternion && (this.getParentsRotation(), this._tmpQuat.conjugateInPlace(), this._tmpQuat.multiplyToRef(this.object.rotationQuaternion, this.object.rotationQuaternion)), this.object.setAbsolutePosition(this.object.position), this._deltaRotation ? (this.object.rotationQuaternion && this.object.rotationQuaternion.multiplyToRef(this._deltaRotation, this.object.rotationQuaternion), this._deltaPosition.applyRotationQuaternionToRef(this._deltaRotation, tr._TmpVecs[0]), this.object.translate(tr._TmpVecs[0], 1)) : this.object.translate(this._deltaPosition, 1), this.object.computeWorldMatrix(!0)); }, this.onCollideEvent = null, this.onCollide = (s) => { if (!this._onPhysicsCollideCallbacks.length && !this.onCollideEvent || !this._physicsEngine) return; const n = this._physicsEngine.getImpostorWithPhysicsBody(s.body); n && (this.onCollideEvent && this.onCollideEvent(this, n), this._onPhysicsCollideCallbacks.filter((a) => a.otherImpostors.indexOf(n) !== -1).forEach((a) => { a.callback(this, n, s.point, s.distance, s.impulse, s.normal); })); }, !this.object) { Ce.Error("No object was provided. A physics object is obligatory"); return; } this.object.parent && i.mass !== 0 && Ce.Warn("A physics impostor has been created for an object which has a parent. Babylon physics currently works in local space so unexpected issues may occur."), !this._scene && e.getScene && (this._scene = e.getScene()), this._scene && (this.type > 100 && (this.soft = !0), this._physicsEngine = this._scene.getPhysicsEngine(), this._physicsEngine ? (this.object.rotationQuaternion || (this.object.rotation ? this.object.rotationQuaternion = Ze.RotationYawPitchRoll(this.object.rotation.y, this.object.rotation.x, this.object.rotation.z) : this.object.rotationQuaternion = new Ze()), this._options.mass = i.mass === void 0 ? 0 : i.mass, this._options.friction = i.friction === void 0 ? 0.2 : i.friction, this._options.restitution = i.restitution === void 0 ? 0.2 : i.restitution, this.soft && (this._options.mass = this._options.mass > 0 ? this._options.mass : 1, this._options.pressure = i.pressure === void 0 ? 200 : i.pressure, this._options.stiffness = i.stiffness === void 0 ? 1 : i.stiffness, this._options.velocityIterations = i.velocityIterations === void 0 ? 20 : i.velocityIterations, this._options.positionIterations = i.positionIterations === void 0 ? 20 : i.positionIterations, this._options.fixedPoints = i.fixedPoints === void 0 ? 0 : i.fixedPoints, this._options.margin = i.margin === void 0 ? 0 : i.margin, this._options.damping = i.damping === void 0 ? 0 : i.damping, this._options.path = i.path === void 0 ? null : i.path, this._options.shape = i.shape === void 0 ? null : i.shape), this._joints = [], !this.object.parent || this._options.ignoreParent ? this._init() : this.object.parent.physicsImpostor && Ce.Warn("You must affect impostors to children before affecting impostor to parent.")) : Ce.Error("Physics not enabled. Please use scene.enablePhysics(...) before creating impostors.")); } /** * This function will completely initialize this impostor. * It will create a new body - but only if this mesh has no parent. * If it has, this impostor will not be used other than to define the impostor * of the child mesh. * @internal */ _init() { this._physicsEngine && (this._physicsEngine.removeImpostor(this), this.physicsBody = null, this._parent = this._parent || this._getPhysicsParent(), !this._isDisposed && (!this.parent || this._options.ignoreParent) && this._physicsEngine.addImpostor(this)); } _getPhysicsParent() { return this.object.parent instanceof xr ? this.object.parent.physicsImpostor : null; } /** * Should a new body be generated. * @returns boolean specifying if body initialization is required */ isBodyInitRequired() { return this._bodyUpdateRequired || !this._physicsBody && (!this._parent || !!this._options.ignoreParent); } /** * Sets the updated scaling */ setScalingUpdated() { this.forceUpdate(); } /** * Force a regeneration of this or the parent's impostor's body. * Use with caution - This will remove all previously-instantiated joints. */ forceUpdate() { this._init(), this.parent && !this._options.ignoreParent && this.parent.forceUpdate(); } /*public get mesh(): AbstractMesh { return this._mesh; }*/ /** * Gets the body that holds this impostor. Either its own, or its parent. */ get physicsBody() { return this._parent && !this._options.ignoreParent ? this._parent.physicsBody : this._physicsBody; } /** * Get the parent of the physics imposter * @returns Physics imposter or null */ get parent() { return !this._options.ignoreParent && this._parent ? this._parent : null; } /** * Sets the parent of the physics imposter */ set parent(e) { this._parent = e; } /** * Set the physics body. Used mainly by the physics engine/plugin */ set physicsBody(e) { this._physicsBody && this._physicsEngine && this._physicsEngine.getPhysicsPlugin().removePhysicsBody(this), this._physicsBody = e, this.resetUpdateFlags(); } /** * Resets the update flags */ resetUpdateFlags() { this._bodyUpdateRequired = !1; } /** * Gets the object extents * @returns the object extents */ getObjectExtents() { if (this.object.getBoundingInfo) { const e = this.object.rotationQuaternion, t = this.object.scaling.clone(); this.object.rotationQuaternion = tr.IDENTITY_QUATERNION; const i = this.object.computeWorldMatrix && this.object.computeWorldMatrix(!0); i && i.decompose(t, void 0, void 0); const s = this.object.getBoundingInfo().boundingBox.extendSize.scale(2).multiplyInPlace(t); return s.x = Math.abs(s.x), s.y = Math.abs(s.y), s.z = Math.abs(s.z), this.object.rotationQuaternion = e, this.object.computeWorldMatrix && this.object.computeWorldMatrix(!0), s; } else return tr.DEFAULT_OBJECT_SIZE; } /** * Gets the object center * @returns The object center */ getObjectCenter() { return this.object.getBoundingInfo ? this.object.getBoundingInfo().boundingBox.centerWorld : this.object.position; } /** * Get a specific parameter from the options parameters * @param paramName The object parameter name * @returns The object parameter */ getParam(e) { return this._options[e]; } /** * Sets a specific parameter in the options given to the physics plugin * @param paramName The parameter name * @param value The value of the parameter */ setParam(e, t) { this._options[e] = t, this._bodyUpdateRequired = !0; } /** * Specifically change the body's mass. Won't recreate the physics body object * @param mass The mass of the physics imposter */ setMass(e) { this.getParam("mass") !== e && this.setParam("mass", e), this._physicsEngine && this._physicsEngine.getPhysicsPlugin().setBodyMass(this, e); } /** * Gets the linear velocity * @returns linear velocity or null */ getLinearVelocity() { return this._physicsEngine ? this._physicsEngine.getPhysicsPlugin().getLinearVelocity(this) : D.Zero(); } /** * Sets the linear velocity * @param velocity linear velocity or null */ setLinearVelocity(e) { this._physicsEngine && this._physicsEngine.getPhysicsPlugin().setLinearVelocity(this, e); } /** * Gets the angular velocity * @returns angular velocity or null */ getAngularVelocity() { return this._physicsEngine ? this._physicsEngine.getPhysicsPlugin().getAngularVelocity(this) : D.Zero(); } /** * Sets the angular velocity * @param velocity The velocity or null */ setAngularVelocity(e) { this._physicsEngine && this._physicsEngine.getPhysicsPlugin().setAngularVelocity(this, e); } /** * Execute a function with the physics plugin native code * Provide a function the will have two variables - the world object and the physics body object * @param func The function to execute with the physics plugin native code */ executeNativeFunction(e) { this._physicsEngine && e(this._physicsEngine.getPhysicsPlugin().world, this.physicsBody); } /** * Register a function that will be executed before the physics world is stepping forward * @param func The function to execute before the physics world is stepped forward */ registerBeforePhysicsStep(e) { this._onBeforePhysicsStepCallbacks.push(e); } /** * Unregister a function that will be executed before the physics world is stepping forward * @param func The function to execute before the physics world is stepped forward */ unregisterBeforePhysicsStep(e) { const t = this._onBeforePhysicsStepCallbacks.indexOf(e); t > -1 ? this._onBeforePhysicsStepCallbacks.splice(t, 1) : Ce.Warn("Function to remove was not found"); } /** * Register a function that will be executed after the physics step * @param func The function to execute after physics step */ registerAfterPhysicsStep(e) { this._onAfterPhysicsStepCallbacks.push(e); } /** * Unregisters a function that will be executed after the physics step * @param func The function to execute after physics step */ unregisterAfterPhysicsStep(e) { const t = this._onAfterPhysicsStepCallbacks.indexOf(e); t > -1 ? this._onAfterPhysicsStepCallbacks.splice(t, 1) : Ce.Warn("Function to remove was not found"); } /** * register a function that will be executed when this impostor collides against a different body * @param collideAgainst Physics imposter, or array of physics imposters to collide against * @param func Callback that is executed on collision */ registerOnPhysicsCollide(e, t) { const i = e instanceof Array ? e : [e]; this._onPhysicsCollideCallbacks.push({ callback: t, otherImpostors: i }); } /** * Unregisters the physics imposter's collision callback * @param collideAgainst The physics object to collide against * @param func Callback to execute on collision */ unregisterOnPhysicsCollide(e, t) { const i = e instanceof Array ? e : [e]; let r = -1; this._onPhysicsCollideCallbacks.some((n, a) => { if (n.callback === t && n.otherImpostors.length === i.length) { const l = n.otherImpostors.every((o) => i.indexOf(o) > -1); return l && (r = a), l; } return !1; }) ? this._onPhysicsCollideCallbacks.splice(r, 1) : Ce.Warn("Function to remove was not found"); } /** * Get the parent rotation * @returns The parent rotation */ getParentsRotation() { let e = this.object.parent; for (this._tmpQuat.copyFromFloats(0, 0, 0, 1); e; ) e.rotationQuaternion ? this._tmpQuat2.copyFrom(e.rotationQuaternion) : Ze.RotationYawPitchRollToRef(e.rotation.y, e.rotation.x, e.rotation.z, this._tmpQuat2), this._tmpQuat.multiplyToRef(this._tmpQuat2, this._tmpQuat), e = e.parent; return this._tmpQuat; } /** * Apply a force * @param force The force to apply * @param contactPoint The contact point for the force * @returns The physics imposter */ applyForce(e, t) { return this._physicsEngine && this._physicsEngine.getPhysicsPlugin().applyForce(this, e, t), this; } /** * Apply an impulse * @param force The impulse force * @param contactPoint The contact point for the impulse force * @returns The physics imposter */ applyImpulse(e, t) { return this._physicsEngine && this._physicsEngine.getPhysicsPlugin().applyImpulse(this, e, t), this; } /** * A help function to create a joint * @param otherImpostor A physics imposter used to create a joint * @param jointType The type of joint * @param jointData The data for the joint * @returns The physics imposter */ createJoint(e, t, i) { const r = new ta(t, i); return this.addJoint(e, r), this; } /** * Add a joint to this impostor with a different impostor * @param otherImpostor A physics imposter used to add a joint * @param joint The joint to add * @returns The physics imposter */ addJoint(e, t) { return this._joints.push({ otherImpostor: e, joint: t }), this._physicsEngine && this._physicsEngine.addJoint(this, e, t), this; } /** * Add an anchor to a cloth impostor * @param otherImpostor rigid impostor to anchor to * @param width ratio across width from 0 to 1 * @param height ratio up height from 0 to 1 * @param influence the elasticity between cloth impostor and anchor from 0, very stretchy to 1, little stretch * @param noCollisionBetweenLinkedBodies when true collisions between cloth impostor and anchor are ignored; default false * @returns impostor the soft imposter */ addAnchor(e, t, i, r, s) { if (!this._physicsEngine) return this; const n = this._physicsEngine.getPhysicsPlugin(); return n.appendAnchor ? (this._physicsEngine && n.appendAnchor(this, e, t, i, r, s), this) : this; } /** * Add a hook to a rope impostor * @param otherImpostor rigid impostor to anchor to * @param length ratio across rope from 0 to 1 * @param influence the elasticity between rope impostor and anchor from 0, very stretchy to 1, little stretch * @param noCollisionBetweenLinkedBodies when true collisions between soft impostor and anchor are ignored; default false * @returns impostor the rope imposter */ addHook(e, t, i, r) { if (!this._physicsEngine) return this; const s = this._physicsEngine.getPhysicsPlugin(); return s.appendAnchor ? (this._physicsEngine && s.appendHook(this, e, t, i, r), this) : this; } /** * Will keep this body still, in a sleep mode. * @returns the physics imposter */ sleep() { return this._physicsEngine && this._physicsEngine.getPhysicsPlugin().sleepBody(this), this; } /** * Wake the body up. * @returns The physics imposter */ wakeUp() { return this._physicsEngine && this._physicsEngine.getPhysicsPlugin().wakeUpBody(this), this; } /** * Clones the physics imposter * @param newObject The physics imposter clones to this physics-enabled object * @returns A nullable physics imposter */ clone(e) { return e ? new tr(e, this.type, this._options, this._scene) : null; } /** * Disposes the physics imposter */ dispose() { this._physicsEngine && (this._joints.forEach((e) => { this._physicsEngine && this._physicsEngine.removeJoint(this, e.otherImpostor, e.joint); }), this._physicsEngine.removeImpostor(this), this.parent && this.parent.forceUpdate(), this._isDisposed = !0); } /** * Sets the delta position * @param position The delta position amount */ setDeltaPosition(e) { this._deltaPosition.copyFrom(e); } /** * Sets the delta rotation * @param rotation The delta rotation amount */ setDeltaRotation(e) { this._deltaRotation || (this._deltaRotation = new Ze()), this._deltaRotation.copyFrom(e), this._deltaRotationConjugated = this._deltaRotation.conjugate(); } /** * Gets the box size of the physics imposter and stores the result in the input parameter * @param result Stores the box size * @returns The physics imposter */ getBoxSizeToRef(e) { return this._physicsEngine && this._physicsEngine.getPhysicsPlugin().getBoxSizeToRef(this, e), this; } /** * Gets the radius of the physics imposter * @returns Radius of the physics imposter */ getRadius() { return this._physicsEngine ? this._physicsEngine.getPhysicsPlugin().getRadius(this) : 0; } /** * Sync a bone with this impostor * @param bone The bone to sync to the impostor. * @param boneMesh The mesh that the bone is influencing. * @param jointPivot The pivot of the joint / bone in local space. * @param distToJoint Optional distance from the impostor to the joint. * @param adjustRotation Optional quaternion for adjusting the local rotation of the bone. */ syncBoneWithImpostor(e, t, i, r, s) { const n = tr._TmpVecs[0], a = this.object; if (a.rotationQuaternion) if (s) { const l = tr._TmpQuat; a.rotationQuaternion.multiplyToRef(s, l), e.setRotationQuaternion(l, qr.WORLD, t); } else e.setRotationQuaternion(a.rotationQuaternion, qr.WORLD, t); n.x = 0, n.y = 0, n.z = 0, i && (n.x = i.x, n.y = i.y, n.z = i.z, e.getDirectionToRef(n, t, n), r == null && (r = i.length()), n.x *= r, n.y *= r, n.z *= r), e.getParent() ? (n.addInPlace(a.getAbsolutePosition()), e.setAbsolutePosition(n, t)) : (t.setAbsolutePosition(a.getAbsolutePosition()), t.position.x -= n.x, t.position.y -= n.y, t.position.z -= n.z); } /** * Sync impostor to a bone * @param bone The bone that the impostor will be synced to. * @param boneMesh The mesh that the bone is influencing. * @param jointPivot The pivot of the joint / bone in local space. * @param distToJoint Optional distance from the impostor to the joint. * @param adjustRotation Optional quaternion for adjusting the local rotation of the bone. * @param boneAxis Optional vector3 axis the bone is aligned with */ syncImpostorWithBone(e, t, i, r, s, n) { const a = this.object; if (a.rotationQuaternion) if (s) { const u = tr._TmpQuat; e.getRotationQuaternionToRef(qr.WORLD, t, u), u.multiplyToRef(s, a.rotationQuaternion); } else e.getRotationQuaternionToRef(qr.WORLD, t, a.rotationQuaternion); const l = tr._TmpVecs[0], o = tr._TmpVecs[1]; n || (n = tr._TmpVecs[2], n.x = 0, n.y = 1, n.z = 0), e.getDirectionToRef(n, t, o), e.getAbsolutePositionToRef(t, l), r == null && i && (r = i.length()), r != null && (l.x += o.x * r, l.y += o.y * r, l.z += o.z * r), a.setAbsolutePosition(l); } } tr.DEFAULT_OBJECT_SIZE = new D(1, 1, 1); tr.IDENTITY_QUATERNION = Ze.Identity(); tr._TmpVecs = kc.BuildArray(3, D.Zero); tr._TmpQuat = Ze.Identity(); tr.NoImpostor = 0; tr.SphereImpostor = 1; tr.BoxImpostor = 2; tr.PlaneImpostor = 3; tr.MeshImpostor = 4; tr.CapsuleImpostor = 6; tr.CylinderImpostor = 7; tr.ParticleImpostor = 8; tr.HeightmapImpostor = 9; tr.ConvexHullImpostor = 10; tr.CustomImpostor = 100; tr.RopeImpostor = 101; tr.ClothImpostor = 102; tr.SoftbodyImpostor = 103; var FC; (function(c) { c[c.Clean = 0] = "Clean", c[c.Stop = 1] = "Stop", c[c.Sync = 2] = "Sync", c[c.NoSync = 3] = "NoSync"; })(FC || (FC = {})); class fr { /** * Gets or sets a boolean indicating if entire scene must be loaded even if scene contains incremental data */ static get ForceFullSceneLoadingForIncremental() { return uu.ForceFullSceneLoadingForIncremental; } static set ForceFullSceneLoadingForIncremental(e) { uu.ForceFullSceneLoadingForIncremental = e; } /** * Gets or sets a boolean indicating if loading screen must be displayed while loading a scene */ static get ShowLoadingScreen() { return uu.ShowLoadingScreen; } static set ShowLoadingScreen(e) { uu.ShowLoadingScreen = e; } /** * Defines the current logging level (while loading the scene) * @ignorenaming */ // eslint-disable-next-line @typescript-eslint/naming-convention static get loggingLevel() { return uu.loggingLevel; } // eslint-disable-next-line @typescript-eslint/naming-convention static set loggingLevel(e) { uu.loggingLevel = e; } /** * Gets or set a boolean indicating if matrix weights must be cleaned upon loading */ static get CleanBoneMatrixWeights() { return uu.CleanBoneMatrixWeights; } static set CleanBoneMatrixWeights(e) { uu.CleanBoneMatrixWeights = e; } /** * Gets the default plugin (used to load Babylon files) * @returns the .babylon plugin */ static GetDefaultPlugin() { return fr._RegisteredPlugins[".babylon"]; } static _GetPluginForExtension(e) { const t = fr._RegisteredPlugins[e]; return t || (Ce.Warn("Unable to find a plugin to load " + e + " files. Trying to use .babylon default plugin. To load from a specific filetype (eg. gltf) see: https://doc.babylonjs.com/features/featuresDeepDive/importers/loadingFileTypes"), fr.GetDefaultPlugin()); } static _GetPluginForDirectLoad(e) { for (const t in fr._RegisteredPlugins) { const i = fr._RegisteredPlugins[t].plugin; if (i.canDirectLoad && i.canDirectLoad(e)) return fr._RegisteredPlugins[t]; } return fr.GetDefaultPlugin(); } static _GetPluginForFilename(e) { const t = e.indexOf("?"); t !== -1 && (e = e.substring(0, t)); const i = e.lastIndexOf("."), r = e.substring(i, e.length).toLowerCase(); return fr._GetPluginForExtension(r); } static _GetDirectLoad(e) { return e.substr(0, 5) === "data:" ? e.substr(5) : null; } static _FormatErrorMessage(e, t, i) { let s = "Unable to load from " + (e.rawData ? "binary data" : e.url); return t ? s += `: ${t}` : i && (s += `: ${i}`), s; } static _LoadData(e, t, i, r, s, n, a, l) { const o = fr._GetDirectLoad(e.url); if (e.rawData && !a) throw "When using ArrayBufferView to load data the file extension must be provided."; const u = a ? fr._GetPluginForExtension(a) : o ? fr._GetPluginForDirectLoad(e.url) : fr._GetPluginForFilename(e.url); if (e.rawData && !u.isBinary) throw "Loading from ArrayBufferView can not be used with plugins that don't support binary loading."; let h; if (u.plugin.createPlugin !== void 0 ? h = u.plugin.createPlugin() : h = u.plugin, !h) throw "The loader plugin corresponding to the file type you are trying to load has not been found. If using es6, please import the plugin you wish to use before."; if (fr.OnPluginActivatedObservable.notifyObservers(h), o && (h.canDirectLoad && h.canDirectLoad(e.url) || !wL(e.url))) { if (h.directLoad) { const b = h.directLoad(t, o); b.then ? b.then((S) => { i(h, S); }).catch((S) => { s("Error in directLoad of _loadData: " + S, S); }) : i(h, b); } else i(h, o); return h; } const d = u.isBinary, f = (b, S) => { if (t.isDisposed) { s("Scene has been disposed"); return; } i(h, b, S); }; let p = null, m = !1; const _ = h.onDisposeObservable; _ && _.add(() => { m = !0, p && (p.abort(), p = null), n(); }); const v = () => { if (m) return; const b = (S, M) => { s(S == null ? void 0 : S.statusText, M); }; if (!h.loadFile && e.rawData) throw "Plugin does not support loading ArrayBufferView."; p = h.loadFile ? h.loadFile(t, e.rawData || e.file || e.url, e.rootUrl, f, r, d, b, l) : t._loadFile(e.file || e.url, f, r, !0, d, b); }, C = t.getEngine(); let x = C.enableOfflineSupport; if (x) { let b = !1; for (const S of t.disableOfflineSupportExceptionRules) if (S.test(e.url)) { b = !0; break; } x = !b; } return x && $e.OfflineProviderFactory ? t.offlineProvider = $e.OfflineProviderFactory(e.url, v, C.disableManifestCheck) : v(), h; } static _GetFileInfo(e, t) { let i, r, s = null, n = null; if (!t) i = e, r = Ve.GetFilename(e), e = Ve.GetFolderPath(e); else if (t.name) { const a = t; i = `file:${a.name}`, r = a.name, s = a; } else if (ArrayBuffer.isView(t)) i = "", r = "arrayBuffer", n = t; else if (typeof t == "string" && t.startsWith("data:")) i = t, r = ""; else { const a = t; if (a.substr(0, 1) === "/") return Ve.Error("Wrong sceneFilename parameter"), null; i = e + a, r = a; } return { url: i, rootUrl: e, name: r, file: s, rawData: n }; } // Public functions /** * Gets a plugin that can load the given extension * @param extension defines the extension to load * @returns a plugin or null if none works */ static GetPluginForExtension(e) { return fr._GetPluginForExtension(e).plugin; } /** * Gets a boolean indicating that the given extension can be loaded * @param extension defines the extension to load * @returns true if the extension is supported */ static IsPluginForExtensionAvailable(e) { return !!fr._RegisteredPlugins[e]; } /** * Adds a new plugin to the list of registered plugins * @param plugin defines the plugin to add */ static RegisterPlugin(e) { if (typeof e.extensions == "string") { const t = e.extensions; fr._RegisteredPlugins[t.toLowerCase()] = { plugin: e, isBinary: !1 }; } else { const t = e.extensions; Object.keys(t).forEach((i) => { fr._RegisteredPlugins[i.toLowerCase()] = { plugin: e, isBinary: t[i].isBinary }; }); } } /** * Import meshes into a scene * @param meshNames an array of mesh names, a single mesh name, or empty string for all meshes that filter what meshes are imported * @param rootUrl a string that defines the root url for the scene and resources or the concatenation of rootURL and filename (e.g. http://example.com/test.glb) * @param sceneFilename a string that defines the name of the scene file or starts with "data:" following by the stringified version of the scene or a File object (default: empty string) * @param scene the instance of BABYLON.Scene to append to * @param onSuccess a callback with a list of imported meshes, particleSystems, skeletons, and animationGroups when import succeeds * @param onProgress a callback with a progress event for each file being loaded * @param onError a callback with the scene, a message, and possibly an exception when import fails * @param pluginExtension the extension used to determine the plugin * @returns The loaded plugin */ static ImportMesh(e, t, i = "", r = gi.LastCreatedScene, s = null, n = null, a = null, l = null, o = "") { if (!r) return Ce.Error("No scene available to import mesh to"), null; const u = fr._GetFileInfo(t, i); if (!u) return null; const h = {}; r.addPendingData(h); const d = () => { r.removePendingData(h); }, f = (_, v) => { const C = fr._FormatErrorMessage(u, _, v); a ? a(r, C, new F4(C, $C.SceneLoaderError, v)) : Ce.Error(C), d(); }, p = n ? (_) => { try { n(_); } catch (v) { f("Error in onProgress callback: " + v, v); } } : void 0, m = (_, v, C, x, b, S, M) => { if (r.importedMeshesFiles.push(u.url), s) try { s(_, v, C, x, b, S, M); } catch (R) { f("Error in onSuccess callback: " + R, R); } r.removePendingData(h); }; return fr._LoadData(u, r, (_, v, C) => { if (_.rewriteRootURL && (u.rootUrl = _.rewriteRootURL(u.rootUrl, C)), _.importMesh) { const x = _, b = [], S = [], M = []; if (!x.importMesh(e, r, v, u.rootUrl, b, S, M, f)) return; r.loadingPluginName = _.name, m(b, S, M, [], [], [], []); } else _.importMeshAsync(e, r, v, u.rootUrl, p, u.name).then((b) => { r.loadingPluginName = _.name, m(b.meshes, b.particleSystems, b.skeletons, b.animationGroups, b.transformNodes, b.geometries, b.lights); }).catch((b) => { f(b.message, b); }); }, p, f, d, l, o); } /** * Import meshes into a scene * @param meshNames an array of mesh names, a single mesh name, or empty string for all meshes that filter what meshes are imported * @param rootUrl a string that defines the root url for the scene and resources or the concatenation of rootURL and filename (e.g. http://example.com/test.glb) * @param sceneFilename a string that defines the name of the scene file or starts with "data:" following by the stringified version of the scene or a File object (default: empty string) * @param scene the instance of BABYLON.Scene to append to * @param onProgress a callback with a progress event for each file being loaded * @param pluginExtension the extension used to determine the plugin * @returns The loaded list of imported meshes, particle systems, skeletons, and animation groups */ static ImportMeshAsync(e, t, i = "", r = gi.LastCreatedScene, s = null, n = null, a = "") { return new Promise((l, o) => { fr.ImportMesh(e, t, i, r, (u, h, d, f, p, m, _) => { l({ meshes: u, particleSystems: h, skeletons: d, animationGroups: f, transformNodes: p, geometries: m, lights: _ }); }, s, (u, h, d) => { o(d || new Error(h)); }, n, a); }); } /** * Load a scene * @param rootUrl a string that defines the root url for the scene and resources or the concatenation of rootURL and filename (e.g. http://example.com/test.glb) * @param sceneFilename a string that defines the name of the scene file or starts with "data:" following by the stringified version of the scene or a File object (default: empty string) * @param engine is the instance of BABYLON.Engine to use to create the scene * @param onSuccess a callback with the scene when import succeeds * @param onProgress a callback with a progress event for each file being loaded * @param onError a callback with the scene, a message, and possibly an exception when import fails * @param pluginExtension the extension used to determine the plugin * @returns The loaded plugin */ static Load(e, t = "", i = gi.LastCreatedEngine, r = null, s = null, n = null, a = null, l = "") { return i ? fr.Append(e, t, new ii(i), r, s, n, a, l) : (Ve.Error("No engine available"), null); } /** * Load a scene * @param rootUrl a string that defines the root url for the scene and resources or the concatenation of rootURL and filename (e.g. http://example.com/test.glb) * @param sceneFilename a string that defines the name of the scene file or starts with "data:" following by the stringified version of the scene or a File object (default: empty string) * @param engine is the instance of BABYLON.Engine to use to create the scene * @param onProgress a callback with a progress event for each file being loaded * @param pluginExtension the extension used to determine the plugin * @returns The loaded scene */ static LoadAsync(e, t = "", i = gi.LastCreatedEngine, r = null, s = null, n = "") { return new Promise((a, l) => { fr.Load(e, t, i, (o) => { a(o); }, r, (o, u, h) => { l(h || new Error(u)); }, s, n); }); } /** * Append a scene * @param rootUrl a string that defines the root url for the scene and resources or the concatenation of rootURL and filename (e.g. http://example.com/test.glb) * @param sceneFilename a string that defines the name of the scene file or starts with "data:" following by the stringified version of the scene or a File object (default: empty string) * @param scene is the instance of BABYLON.Scene to append to * @param onSuccess a callback with the scene when import succeeds * @param onProgress a callback with a progress event for each file being loaded * @param onError a callback with the scene, a message, and possibly an exception when import fails * @param pluginExtension the extension used to determine the plugin * @returns The loaded plugin */ static Append(e, t = "", i = gi.LastCreatedScene, r = null, s = null, n = null, a = null, l = "") { if (!i) return Ce.Error("No scene available to append to"), null; const o = fr._GetFileInfo(e, t); if (!o) return null; const u = {}; i.addPendingData(u); const h = () => { i.removePendingData(u); }; fr.ShowLoadingScreen && !this._ShowingLoadingScreen && (this._ShowingLoadingScreen = !0, i.getEngine().displayLoadingUI(), i.executeWhenReady(() => { i.getEngine().hideLoadingUI(), this._ShowingLoadingScreen = !1; })); const d = (m, _) => { const v = fr._FormatErrorMessage(o, m, _); n ? n(i, v, new F4(v, $C.SceneLoaderError, _)) : Ce.Error(v), h(); }, f = s ? (m) => { try { s(m); } catch (_) { d("Error in onProgress callback", _); } } : void 0, p = () => { if (r) try { r(i); } catch (m) { d("Error in onSuccess callback", m); } i.removePendingData(u); }; return fr._LoadData(o, i, (m, _) => { if (m.load) { if (!m.load(i, _, o.rootUrl, d)) return; i.loadingPluginName = m.name, p(); } else m.loadAsync(i, _, o.rootUrl, f, o.name).then(() => { i.loadingPluginName = m.name, p(); }).catch((C) => { d(C.message, C); }); }, f, d, h, a, l); } /** * Append a scene * @param rootUrl a string that defines the root url for the scene and resources or the concatenation of rootURL and filename (e.g. http://example.com/test.glb) * @param sceneFilename a string that defines the name of the scene file or starts with "data:" following by the stringified version of the scene or a File object (default: empty string) * @param scene is the instance of BABYLON.Scene to append to * @param onProgress a callback with a progress event for each file being loaded * @param pluginExtension the extension used to determine the plugin * @returns The given scene */ static AppendAsync(e, t = "", i = gi.LastCreatedScene, r = null, s = null, n = "") { return new Promise((a, l) => { fr.Append(e, t, i, (o) => { a(o); }, r, (o, u, h) => { l(h || new Error(u)); }, s, n); }); } /** * Load a scene into an asset container * @param rootUrl a string that defines the root url for the scene and resources or the concatenation of rootURL and filename (e.g. http://example.com/test.glb) * @param sceneFilename a string that defines the name of the scene file or starts with "data:" following by the stringified version of the scene or a File object (default: empty string) * @param scene is the instance of BABYLON.Scene to append to (default: last created scene) * @param onSuccess a callback with the scene when import succeeds * @param onProgress a callback with a progress event for each file being loaded * @param onError a callback with the scene, a message, and possibly an exception when import fails * @param pluginExtension the extension used to determine the plugin * @returns The loaded plugin */ static LoadAssetContainer(e, t = "", i = gi.LastCreatedScene, r = null, s = null, n = null, a = null, l = "") { if (!i) return Ce.Error("No scene available to load asset container to"), null; const o = fr._GetFileInfo(e, t); if (!o) return null; const u = {}; i.addPendingData(u); const h = () => { i.removePendingData(u); }, d = (m, _) => { const v = fr._FormatErrorMessage(o, m, _); n ? n(i, v, new F4(v, $C.SceneLoaderError, _)) : Ce.Error(v), h(); }, f = s ? (m) => { try { s(m); } catch (_) { d("Error in onProgress callback", _); } } : void 0, p = (m) => { if (r) try { r(m); } catch (_) { d("Error in onSuccess callback", _); } i.removePendingData(u); }; return fr._LoadData(o, i, (m, _) => { if (m.loadAssetContainer) { const C = m.loadAssetContainer(i, _, o.rootUrl, d); if (!C) return; C.populateRootNodes(), i.loadingPluginName = m.name, p(C); } else m.loadAssetContainerAsync ? m.loadAssetContainerAsync(i, _, o.rootUrl, f, o.name).then((C) => { C.populateRootNodes(), i.loadingPluginName = m.name, p(C); }).catch((C) => { d(C.message, C); }) : d("LoadAssetContainer is not supported by this plugin. Plugin did not provide a loadAssetContainer or loadAssetContainerAsync method."); }, f, d, h, a, l); } /** * Load a scene into an asset container * @param rootUrl a string that defines the root url for the scene and resources or the concatenation of rootURL and filename (e.g. http://example.com/test.glb) * @param sceneFilename a string that defines the name of the scene file or starts with "data:" following by the stringified version of the scene (default: empty string) * @param scene is the instance of Scene to append to * @param onProgress a callback with a progress event for each file being loaded * @param pluginExtension the extension used to determine the plugin * @returns The loaded asset container */ static LoadAssetContainerAsync(e, t = "", i = gi.LastCreatedScene, r = null, s = null) { return new Promise((n, a) => { fr.LoadAssetContainer(e, t, i, (l) => { n(l); }, r, (l, o, u) => { a(u || new Error(o)); }, s); }); } /** * Import animations from a file into a scene * @param rootUrl a string that defines the root url for the scene and resources or the concatenation of rootURL and filename (e.g. http://example.com/test.glb) * @param sceneFilename a string that defines the name of the scene file or starts with "data:" following by the stringified version of the scene or a File object (default: empty string) * @param scene is the instance of BABYLON.Scene to append to (default: last created scene) * @param overwriteAnimations when true, animations are cleaned before importing new ones. Animations are appended otherwise * @param animationGroupLoadingMode defines how to handle old animations groups before importing new ones * @param targetConverter defines a function used to convert animation targets from loaded scene to current scene (default: search node by name) * @param onSuccess a callback with the scene when import succeeds * @param onProgress a callback with a progress event for each file being loaded * @param onError a callback with the scene, a message, and possibly an exception when import fails * @param pluginExtension the extension used to determine the plugin */ static ImportAnimations(e, t = "", i = gi.LastCreatedScene, r = !0, s = FC.Clean, n = null, a = null, l = null, o = null, u = null) { if (!i) { Ce.Error("No scene available to load animations to"); return; } if (r) { for (const p of i.animatables) p.reset(); i.stopAllAnimations(), i.animationGroups.slice().forEach((p) => { p.dispose(); }), i.getNodes().forEach((p) => { p.animations && (p.animations = []); }); } else switch (s) { case FC.Clean: i.animationGroups.slice().forEach((f) => { f.dispose(); }); break; case FC.Stop: i.animationGroups.forEach((f) => { f.stop(); }); break; case FC.Sync: i.animationGroups.forEach((f) => { f.reset(), f.restart(); }); break; case FC.NoSync: break; default: Ce.Error("Unknown animation group loading mode value '" + s + "'"); return; } const h = i.animatables.length, d = (f) => { f.mergeAnimationsTo(i, i.animatables.slice(h), n), f.dispose(), i.onAnimationFileImportedObservable.notifyObservers(i), a && a(i); }; this.LoadAssetContainer(e, t, i, d, l, o, u); } /** * Import animations from a file into a scene * @param rootUrl a string that defines the root url for the scene and resources or the concatenation of rootURL and filename (e.g. http://example.com/test.glb) * @param sceneFilename a string that defines the name of the scene file or starts with "data:" following by the stringified version of the scene or a File object (default: empty string) * @param scene is the instance of BABYLON.Scene to append to (default: last created scene) * @param overwriteAnimations when true, animations are cleaned before importing new ones. Animations are appended otherwise * @param animationGroupLoadingMode defines how to handle old animations groups before importing new ones * @param targetConverter defines a function used to convert animation targets from loaded scene to current scene (default: search node by name) * @param onSuccess a callback with the scene when import succeeds * @param onProgress a callback with a progress event for each file being loaded * @param onError a callback with the scene, a message, and possibly an exception when import fails * @param pluginExtension the extension used to determine the plugin * @returns the updated scene with imported animations */ static ImportAnimationsAsync(e, t = "", i = gi.LastCreatedScene, r = !0, s = FC.Clean, n = null, a = null, l = null, o = null, u = null) { return new Promise((h, d) => { fr.ImportAnimations(e, t, i, r, s, n, (f) => { h(f); }, l, (f, p, m) => { d(m || new Error(p)); }, u); }); } } fr.NO_LOGGING = 0; fr.MINIMAL_LOGGING = 1; fr.SUMMARY_LOGGING = 2; fr.DETAILED_LOGGING = 3; fr.OnPluginActivatedObservable = new Fe(); fr._RegisteredPlugins = {}; fr._ShowingLoadingScreen = !1; class fl extends At { constructor(e, t, i = !0) { super(e, t), this._normalMatrix = new Ae(), this._storeEffectOnSubMeshes = i; } getEffect() { return this._storeEffectOnSubMeshes ? this._activeEffect : super.getEffect(); } isReady(e, t) { return e ? !this._storeEffectOnSubMeshes || !e.subMeshes || e.subMeshes.length === 0 ? !0 : this.isReadyForSubMesh(e, e.subMeshes[0], t) : !1; } _isReadyForSubMesh(e) { const t = e.materialDefines; return !!(!this.checkReadyOnEveryCall && e.effect && t && t._renderId === this.getScene().getRenderId()); } /** * Binds the given world matrix to the active effect * * @param world the matrix to bind */ bindOnlyWorldMatrix(e) { this._activeEffect.setMatrix("world", e); } /** * Binds the given normal matrix to the active effect * * @param normalMatrix the matrix to bind */ bindOnlyNormalMatrix(e) { this._activeEffect.setMatrix("normalMatrix", e); } bind(e, t) { t && this.bindForSubMesh(e, t, t.subMeshes[0]); } _afterBind(e, t = null) { super._afterBind(e, t), this.getScene()._cachedEffect = t, t && (t._forceRebindOnNextCall = !1); } _mustRebind(e, t, i = 1) { return e.isCachedMaterialInvalid(this, t, i); } dispose(e, t, i) { this._activeEffect = void 0, super.dispose(e, t, i); } } var ue; (function(c) { c[c.Float = 1] = "Float", c[c.Int = 2] = "Int", c[c.Vector2 = 4] = "Vector2", c[c.Vector3 = 8] = "Vector3", c[c.Vector4 = 16] = "Vector4", c[c.Color3 = 32] = "Color3", c[c.Color4 = 64] = "Color4", c[c.Matrix = 128] = "Matrix", c[c.Object = 256] = "Object", c[c.AutoDetect = 1024] = "AutoDetect", c[c.BasedOnInput = 2048] = "BasedOnInput", c[c.All = 4095] = "All"; })(ue || (ue = {})); var Le; (function(c) { c[c.Vertex = 1] = "Vertex", c[c.Fragment = 2] = "Fragment", c[c.Neutral = 4] = "Neutral", c[c.VertexAndFragment = 3] = "VertexAndFragment"; })(Le || (Le = {})); class PZ { constructor() { this.supportUniformBuffers = !1, this.attributes = [], this.uniforms = [], this.constants = [], this.samplers = [], this.functions = {}, this.extensions = {}, this.prePassOutput = {}, this.counters = {}, this._attributeDeclaration = "", this._uniformDeclaration = "", this._constantDeclaration = "", this._samplerDeclaration = "", this._varyingTransfer = "", this._injectAtEnd = "", this._repeatableContentAnchorIndex = 0, this._builtCompilationString = "", this.compilationString = ""; } /** * Finalize the compilation strings * @param state defines the current compilation state */ finalize(e) { const t = e.sharedData.emitComments, i = this.target === Le.Fragment; this.compilationString = ` ${t ? `//Entry point ` : ""}void main(void) { ${this.compilationString}`, this._constantDeclaration && (this.compilationString = ` ${t ? `//Constants ` : ""}${this._constantDeclaration} ${this.compilationString}`); let r = ""; for (const s in this.functions) r += this.functions[s] + ` `; this.compilationString = ` ${r} ${this.compilationString}`, !i && this._varyingTransfer && (this.compilationString = `${this.compilationString} ${this._varyingTransfer}`), this._injectAtEnd && (this.compilationString = `${this.compilationString} ${this._injectAtEnd}`), this.compilationString = `${this.compilationString} }`, this.sharedData.varyingDeclaration && (this.compilationString = ` ${t ? `//Varyings ` : ""}${this.sharedData.varyingDeclaration} ${this.compilationString}`), this._samplerDeclaration && (this.compilationString = ` ${t ? `//Samplers ` : ""}${this._samplerDeclaration} ${this.compilationString}`), this._uniformDeclaration && (this.compilationString = ` ${t ? `//Uniforms ` : ""}${this._uniformDeclaration} ${this.compilationString}`), this._attributeDeclaration && !i && (this.compilationString = ` ${t ? `//Attributes ` : ""}${this._attributeDeclaration} ${this.compilationString}`), this.compilationString = `precision highp float; ` + this.compilationString, this.compilationString = `#if defined(WEBGL2) || defines(WEBGPU) precision highp sampler2DArray; #endif ` + this.compilationString, i && (this.compilationString = `#if defined(PREPASS)\r #extension GL_EXT_draw_buffers : require\r layout(location = 0) out highp vec4 glFragData[SCENE_MRT_COUNT];\r highp vec4 gl_FragColor;\r #endif\r ` + this.compilationString); for (const s in this.extensions) { const n = this.extensions[s]; this.compilationString = ` ${n} ${this.compilationString}`; } this._builtCompilationString = this.compilationString; } /** @internal */ get _repeatableContentAnchor() { return `###___ANCHOR${this._repeatableContentAnchorIndex++}___###`; } /** * @internal */ _getFreeVariableName(e) { return e = e.replace(/[^a-zA-Z_]+/g, ""), this.sharedData.variableNames[e] === void 0 ? (this.sharedData.variableNames[e] = 0, e === "output" || e === "texture" ? e + this.sharedData.variableNames[e] : e) : (this.sharedData.variableNames[e]++, e + this.sharedData.variableNames[e]); } /** * @internal */ _getFreeDefineName(e) { return this.sharedData.defineNames[e] === void 0 ? this.sharedData.defineNames[e] = 0 : this.sharedData.defineNames[e]++, e + this.sharedData.defineNames[e]; } /** * @internal */ _excludeVariableName(e) { this.sharedData.variableNames[e] = 0; } /** * @internal */ _emit2DSampler(e) { this.samplers.indexOf(e) < 0 && (this._samplerDeclaration += `uniform sampler2D ${e}; `, this.samplers.push(e)); } /** * @internal */ _emit2DArraySampler(e) { this.samplers.indexOf(e) < 0 && (this._samplerDeclaration += `uniform sampler2DArray ${e}; `, this.samplers.push(e)); } /** * @internal */ _getGLType(e) { switch (e) { case ue.Float: return "float"; case ue.Int: return "int"; case ue.Vector2: return "vec2"; case ue.Color3: case ue.Vector3: return "vec3"; case ue.Color4: case ue.Vector4: return "vec4"; case ue.Matrix: return "mat4"; } return ""; } /** * @internal */ _emitExtension(e, t, i = "") { this.extensions[e] || (i && (t = `#if ${i} ${t} #endif`), this.extensions[e] = t); } /** * @internal */ _emitFunction(e, t, i) { this.functions[e] || (this.sharedData.emitComments && (t = i + ` ` + t), this.functions[e] = t); } /** * @internal */ _emitCodeFromInclude(e, t, i) { if (i && i.repeatKey) return `#include<${e}>${i.substitutionVars ? "(" + i.substitutionVars + ")" : ""}[0..${i.repeatKey}] `; let r = Cr.IncludesShadersStore[e] + ` `; if (this.sharedData.emitComments && (r = t + ` ` + r), !i) return r; if (i.replaceStrings) for (let s = 0; s < i.replaceStrings.length; s++) { const n = i.replaceStrings[s]; r = r.replace(n.search, n.replace); } return r; } /** * @internal */ _emitFunctionFromInclude(e, t, i, r = "") { const s = e + r; if (!this.functions[s]) { if (!i || !i.removeAttributes && !i.removeUniforms && !i.removeVaryings && !i.removeIfDef && !i.replaceStrings) { i && i.repeatKey ? this.functions[s] = `#include<${e}>${i.substitutionVars ? "(" + i.substitutionVars + ")" : ""}[0..${i.repeatKey}] ` : this.functions[s] = `#include<${e}>${i != null && i.substitutionVars ? "(" + (i == null ? void 0 : i.substitutionVars) + ")" : ""} `, this.sharedData.emitComments && (this.functions[s] = t + ` ` + this.functions[s]); return; } if (this.functions[s] = Cr.IncludesShadersStore[e], this.sharedData.emitComments && (this.functions[s] = t + ` ` + this.functions[s]), i.removeIfDef && (this.functions[s] = this.functions[s].replace(/^\s*?#ifdef.+$/gm, ""), this.functions[s] = this.functions[s].replace(/^\s*?#endif.*$/gm, ""), this.functions[s] = this.functions[s].replace(/^\s*?#else.*$/gm, ""), this.functions[s] = this.functions[s].replace(/^\s*?#elif.*$/gm, "")), i.removeAttributes && (this.functions[s] = this.functions[s].replace(/\s*?attribute .+?;/g, ` `)), i.removeUniforms && (this.functions[s] = this.functions[s].replace(/\s*?uniform .*?;/g, ` `)), i.removeVaryings && (this.functions[s] = this.functions[s].replace(/\s*?(varying|in) .+?;/g, ` `)), i.replaceStrings) for (let n = 0; n < i.replaceStrings.length; n++) { const a = i.replaceStrings[n]; this.functions[s] = this.functions[s].replace(a.search, a.replace); } } } /** * @internal */ _registerTempVariable(e) { return this.sharedData.temps.indexOf(e) !== -1 ? !1 : (this.sharedData.temps.push(e), !0); } /** * @internal */ _emitVaryingFromString(e, t, i = "", r = !1) { return this.sharedData.varyings.indexOf(e) !== -1 ? !1 : (this.sharedData.varyings.push(e), i && (i.startsWith("defined(") ? this.sharedData.varyingDeclaration += `#if ${i} ` : this.sharedData.varyingDeclaration += `${r ? "#ifndef" : "#ifdef"} ${i} `), this.sharedData.varyingDeclaration += `varying ${t} ${e}; `, i && (this.sharedData.varyingDeclaration += `#endif `), !0); } /** * @internal */ _emitUniformFromString(e, t, i = "", r = !1) { this.uniforms.indexOf(e) === -1 && (this.uniforms.push(e), i && (i.startsWith("defined(") ? this._uniformDeclaration += `#if ${i} ` : this._uniformDeclaration += `${r ? "#ifndef" : "#ifdef"} ${i} `), this._uniformDeclaration += `uniform ${t} ${e}; `, i && (this._uniformDeclaration += `#endif `)); } /** * @internal */ _emitFloat(e) { return e.toString() === e.toFixed(0) ? `${e}.0` : e.toString(); } } class Nce { /** Creates a new shared data */ constructor() { this.temps = [], this.varyings = [], this.varyingDeclaration = "", this.inputBlocks = [], this.textureBlocks = [], this.bindableBlocks = [], this.forcedBindableBlocks = [], this.blocksWithFallbacks = [], this.blocksWithDefines = [], this.repeatableContentBlocks = [], this.dynamicUniformBlocks = [], this.blockingBlocks = [], this.animatedInputs = [], this.variableNames = {}, this.defineNames = {}, this.hints = { needWorldViewMatrix: !1, needWorldViewProjectionMatrix: !1, needAlphaBlending: !1, needAlphaTesting: !1 }, this.checks = { emitVertex: !1, emitFragment: !1, notConnectedNonOptionalInputs: new Array() }, this.allowEmptyVertexProgram = !1, this.variableNames.position = 0, this.variableNames.normal = 0, this.variableNames.tangent = 0, this.variableNames.uv = 0, this.variableNames.uv2 = 0, this.variableNames.uv3 = 0, this.variableNames.uv4 = 0, this.variableNames.uv5 = 0, this.variableNames.uv6 = 0, this.variableNames.color = 0, this.variableNames.matricesIndices = 0, this.variableNames.matricesWeights = 0, this.variableNames.matricesIndicesExtra = 0, this.variableNames.matricesWeightsExtra = 0, this.variableNames.diffuseBase = 0, this.variableNames.specularBase = 0, this.variableNames.worldPos = 0, this.variableNames.shadow = 0, this.variableNames.view = 0, this.variableNames.vTBN = 0, this.defineNames.MAINUV0 = 0, this.defineNames.MAINUV1 = 0, this.defineNames.MAINUV2 = 0, this.defineNames.MAINUV3 = 0, this.defineNames.MAINUV4 = 0, this.defineNames.MAINUV5 = 0, this.defineNames.MAINUV6 = 0, this.defineNames.MAINUV7 = 0; } /** * Emits console errors and exceptions if there is a failing check */ emitErrors() { let e = ""; !this.checks.emitVertex && !this.allowEmptyVertexProgram && (e += `NodeMaterial does not have a vertex output. You need to at least add a block that generates a glPosition value. `), this.checks.emitFragment || (e += `NodeMaterial does not have a fragment output. You need to at least add a block that generates a glFragColor value. `); for (const t of this.checks.notConnectedNonOptionalInputs) e += `input ${t.name} from block ${t.ownerBlock.name}[${t.ownerBlock.getClassName()}] is not connected and is not optional. `; if (e) throw `Build of NodeMaterial failed: ` + e; } } var fm; (function(c) { c[c.Compatible = 0] = "Compatible", c[c.TypeIncompatible = 1] = "TypeIncompatible", c[c.TargetIncompatible = 2] = "TargetIncompatible", c[c.HierarchyIssue = 3] = "HierarchyIssue"; })(fm || (fm = {})); var no; (function(c) { c[c.Input = 0] = "Input", c[c.Output = 1] = "Output"; })(no || (no = {})); class pP { /** * Checks if two types are equivalent * @param type1 type 1 to check * @param type2 type 2 to check * @returns true if both types are equivalent, else false */ static AreEquivalentTypes(e, t) { switch (e) { case ue.Vector3: { if (t === ue.Color3) return !0; break; } case ue.Vector4: { if (t === ue.Color4) return !0; break; } case ue.Color3: { if (t === ue.Vector3) return !0; break; } case ue.Color4: { if (t === ue.Vector4) return !0; break; } } return !1; } /** Gets the direction of the point */ get direction() { return this._direction; } /** * Gets or sets the associated variable name in the shader */ get associatedVariableName() { return this._ownerBlock.isInput ? this._ownerBlock.associatedVariableName : (!this._enforceAssociatedVariableName || !this._associatedVariableName) && this._connectedPoint ? this._connectedPoint.associatedVariableName : this._associatedVariableName; } set associatedVariableName(e) { this._associatedVariableName = e; } /** Get the inner type (ie AutoDetect for instance instead of the inferred one) */ get innerType() { return this._linkedConnectionSource && this._linkedConnectionSource.isConnected ? this.type : this._type; } /** * Gets or sets the connection point type (default is float) */ get type() { if (this._type === ue.AutoDetect) { if (this._ownerBlock.isInput) return this._ownerBlock.type; if (this._connectedPoint) return this._connectedPoint.type; if (this._linkedConnectionSource && this._linkedConnectionSource.isConnected) return this._linkedConnectionSource.type; } if (this._type === ue.BasedOnInput) { if (this._typeConnectionSource) return !this._typeConnectionSource.isConnected && this._defaultConnectionPointType ? this._defaultConnectionPointType : this._typeConnectionSource.type; if (this._defaultConnectionPointType) return this._defaultConnectionPointType; } return this._type; } set type(e) { this._type = e; } /** Gets or sets the target of that connection point */ get target() { return !this._prioritizeVertex || !this._ownerBlock ? this._target : this._target !== Le.VertexAndFragment ? this._target : this._ownerBlock.target === Le.Fragment ? Le.Fragment : Le.Vertex; } set target(e) { this._target = e; } /** * Gets a boolean indicating that the current point is connected to another NodeMaterialBlock */ get isConnected() { return this.connectedPoint !== null || this.hasEndpoints; } /** * Gets a boolean indicating that the current point is connected to an input block */ get isConnectedToInputBlock() { return this.connectedPoint !== null && this.connectedPoint.ownerBlock.isInput; } /** * Gets a the connected input block (if any) */ get connectInputBlock() { return this.isConnectedToInputBlock ? this.connectedPoint.ownerBlock : null; } /** Get the other side of the connection (if any) */ get connectedPoint() { return this._connectedPoint; } /** Get the block that owns this connection point */ get ownerBlock() { return this._ownerBlock; } /** Get the block connected on the other side of this connection (if any) */ get sourceBlock() { return this._connectedPoint ? this._connectedPoint.ownerBlock : null; } /** Get the block connected on the endpoints of this connection (if any) */ get connectedBlocks() { return this._endpoints.length === 0 ? [] : this._endpoints.map((e) => e.ownerBlock); } /** Gets the list of connected endpoints */ get endpoints() { return this._endpoints; } /** Gets a boolean indicating if that output point is connected to at least one input */ get hasEndpoints() { return this._endpoints && this._endpoints.length > 0; } /** Gets a boolean indicating that this connection has a path to the vertex output*/ get isDirectlyConnectedToVertexOutput() { if (!this.hasEndpoints) return !1; for (const e of this._endpoints) if (e.ownerBlock.target === Le.Vertex || (e.ownerBlock.target === Le.Neutral || e.ownerBlock.target === Le.VertexAndFragment) && e.ownerBlock.outputs.some((t) => t.isDirectlyConnectedToVertexOutput)) return !0; return !1; } /** Gets a boolean indicating that this connection will be used in the vertex shader */ get isConnectedInVertexShader() { if (this.target === Le.Vertex) return !0; if (!this.hasEndpoints) return !1; for (const e of this._endpoints) if (e.ownerBlock.target === Le.Vertex || e.target === Le.Vertex || (e.ownerBlock.target === Le.Neutral || e.ownerBlock.target === Le.VertexAndFragment) && e.ownerBlock.outputs.some((t) => t.isConnectedInVertexShader)) return !0; return !1; } /** Gets a boolean indicating that this connection will be used in the fragment shader */ get isConnectedInFragmentShader() { if (this.target === Le.Fragment) return !0; if (!this.hasEndpoints) return !1; for (const e of this._endpoints) if (e.ownerBlock.target === Le.Fragment || (e.ownerBlock.target === Le.Neutral || e.ownerBlock.target === Le.VertexAndFragment) && e.ownerBlock.isConnectedInFragmentShader()) return !0; return !1; } /** * Creates a block suitable to be used as an input for this input point. * If null is returned, a block based on the point type will be created. * @returns The returned string parameter is the name of the output point of NodeMaterialBlock (first parameter of the returned array) that can be connected to the input */ createCustomInputBlock() { return null; } /** * Creates a new connection point * @param name defines the connection point name * @param ownerBlock defines the block hosting this connection point * @param direction defines the direction of the connection point */ constructor(e, t, i) { this._connectedPoint = null, this._endpoints = new Array(), this._typeConnectionSource = null, this._defaultConnectionPointType = null, this._linkedConnectionSource = null, this._acceptedConnectionPointType = null, this._type = ue.Float, this._enforceAssociatedVariableName = !1, this.needDualDirectionValidation = !1, this.acceptedConnectionPointTypes = [], this.excludedConnectionPointTypes = [], this.onConnectionObservable = new Fe(), this.isExposedOnFrame = !1, this.exposedPortPosition = -1, this._prioritizeVertex = !1, this._target = Le.VertexAndFragment, this._ownerBlock = t, this.name = e, this._direction = i; } /** * Gets the current class name e.g. "NodeMaterialConnectionPoint" * @returns the class name */ getClassName() { return "NodeMaterialConnectionPoint"; } /** * Gets a boolean indicating if the current point can be connected to another point * @param connectionPoint defines the other connection point * @returns a boolean */ canConnectTo(e) { return this.checkCompatibilityState(e) === fm.Compatible; } /** * Gets a number indicating if the current point can be connected to another point * @param connectionPoint defines the other connection point * @returns a number defining the compatibility state */ checkCompatibilityState(e) { const t = this._ownerBlock, i = e.ownerBlock; if (t.target === Le.Fragment) { if (i.target === Le.Vertex) return fm.TargetIncompatible; for (const n of i.outputs) if (n.ownerBlock.target != Le.Neutral && n.isConnectedInVertexShader) return fm.TargetIncompatible; } if (this.type !== e.type && e.innerType !== ue.AutoDetect) return pP.AreEquivalentTypes(this.type, e.type) || e.acceptedConnectionPointTypes && e.acceptedConnectionPointTypes.indexOf(this.type) !== -1 || e._acceptedConnectionPointType && pP.AreEquivalentTypes(e._acceptedConnectionPointType.type, this.type) ? fm.Compatible : fm.TypeIncompatible; if (e.excludedConnectionPointTypes && e.excludedConnectionPointTypes.indexOf(this.type) !== -1) return fm.TypeIncompatible; let r = i, s = t; return this.direction === no.Input && (r = t, s = i), r.isAnAncestorOf(s) ? fm.HierarchyIssue : fm.Compatible; } /** * Connect this point to another connection point * @param connectionPoint defines the other connection point * @param ignoreConstraints defines if the system will ignore connection type constraints (default is false) * @returns the current connection point */ connectTo(e, t = !1) { if (!t && !this.canConnectTo(e)) throw "Cannot connect these two connectors."; return this._endpoints.push(e), e._connectedPoint = this, this._enforceAssociatedVariableName = !1, this.onConnectionObservable.notifyObservers(e), e.onConnectionObservable.notifyObservers(this), this; } /** * Disconnect this point from one of his endpoint * @param endpoint defines the other connection point * @returns the current connection point */ disconnectFrom(e) { const t = this._endpoints.indexOf(e); return t === -1 ? this : (this._endpoints.splice(t, 1), e._connectedPoint = null, this._enforceAssociatedVariableName = !1, e._enforceAssociatedVariableName = !1, this); } /** * Fill the list of excluded connection point types with all types other than those passed in the parameter * @param mask Types (ORed values of NodeMaterialBlockConnectionPointTypes) that are allowed, and thus will not be pushed to the excluded list */ addExcludedConnectionPointFromAllowedTypes(e) { let t = 1; for (; t < ue.All; ) e & t || this.excludedConnectionPointTypes.push(t), t = t << 1; } /** * Serializes this point in a JSON representation * @param isInput defines if the connection point is an input (default is true) * @returns the serialized point object */ serialize(e = !0) { const t = {}; return t.name = this.name, t.displayName = this.displayName, e && this.connectedPoint && (t.inputName = this.name, t.targetBlockId = this.connectedPoint.ownerBlock.uniqueId, t.targetConnectionName = this.connectedPoint.name, t.isExposedOnFrame = !0, t.exposedPortPosition = this.exposedPortPosition), (this.isExposedOnFrame || this.exposedPortPosition >= 0) && (t.isExposedOnFrame = !0, t.exposedPortPosition = this.exposedPortPosition), t; } /** * Release resources */ dispose() { this.onConnectionObservable.clear(); } } class Wi { /** * Gets the name of the block */ get name() { return this._name; } /** * Sets the name of the block. Will check if the name is valid. */ set name(e) { this.validateBlockName(e) && (this._name = e); } /** * Gets a boolean indicating that this block can only be used once per NodeMaterial */ get isUnique() { return this._isUnique; } /** * Gets a boolean indicating that this block is an end block (e.g. it is generating a system value) */ get isFinalMerger() { return this._isFinalMerger; } /** * Gets a boolean indicating that this block is an input (e.g. it sends data to the shader) */ get isInput() { return this._isInput; } /** * Gets a boolean indicating if this block is a teleport out */ get isTeleportOut() { return this._isTeleportOut; } /** * Gets a boolean indicating if this block is a teleport in */ get isTeleportIn() { return this._isTeleportIn; } /** * Gets or sets the build Id */ get buildId() { return this._buildId; } set buildId(e) { this._buildId = e; } /** * Gets or sets the target of the block */ get target() { return this._target; } set target(e) { this._target & e || (this._target = e); } /** * Gets the list of input points */ get inputs() { return this._inputs; } /** Gets the list of output points */ get outputs() { return this._outputs; } /** * Find an input by its name * @param name defines the name of the input to look for * @returns the input or null if not found */ getInputByName(e) { const t = this._inputs.filter((i) => i.name === e); return t.length ? t[0] : null; } /** * Find an output by its name * @param name defines the name of the output to look for * @returns the output or null if not found */ getOutputByName(e) { const t = this._outputs.filter((i) => i.name === e); return t.length ? t[0] : null; } /** * Creates a new NodeMaterialBlock * @param name defines the block name * @param target defines the target of that block (Vertex by default) * @param isFinalMerger defines a boolean indicating that this block is an end block (e.g. it is generating a system value). Default is false */ constructor(e, t = Le.Vertex, i = !1) { this._isFinalMerger = !1, this._isInput = !1, this._isTeleportOut = !1, this._isTeleportIn = !1, this._name = "", this._isUnique = !1, this.inputsAreExclusive = !1, this._codeVariableName = "", this._inputs = new Array(), this._outputs = new Array(), this.comments = "", this.visibleInInspector = !1, this.visibleOnFrame = !1, this._target = t, this._originalTargetIsNeutral = t === Le.Neutral, this._isFinalMerger = i, this._isInput = this.getClassName() === "InputBlock", this._isTeleportOut = this.getClassName() === "NodeMaterialTeleportOutBlock", this._isTeleportIn = this.getClassName() === "NodeMaterialTeleportInBlock", this._name = e, this.uniqueId = LL.UniqueId; } /** @internal */ _setInitialTarget(e) { this._target = e, this._originalTargetIsNeutral = e === Le.Neutral; } /** * Initialize the block and prepare the context for build * @param state defines the state that will be used for the build */ // eslint-disable-next-line @typescript-eslint/no-unused-vars initialize(e) { } /** * Bind data to effect. Will only be called for blocks with isBindable === true * @param effect defines the effect to bind data to * @param nodeMaterial defines the hosting NodeMaterial * @param mesh defines the mesh that will be rendered * @param subMesh defines the submesh that will be rendered */ // eslint-disable-next-line @typescript-eslint/no-unused-vars bind(e, t, i, r) { } _declareOutput(e, t) { return `${t._getGLType(e.type)} ${e.associatedVariableName}`; } _writeVariable(e) { return e.connectedPoint ? `${e.associatedVariableName}` : "0."; } _writeFloat(e) { let t = e.toString(); return t.indexOf(".") === -1 && (t += ".0"), `${t}`; } /** * Gets the current class name e.g. "NodeMaterialBlock" * @returns the class name */ getClassName() { return "NodeMaterialBlock"; } /** Gets a boolean indicating that this connection will be used in the fragment shader */ isConnectedInFragmentShader() { return this.outputs.some((e) => e.isConnectedInFragmentShader); } /** * Register a new input. Must be called inside a block constructor * @param name defines the connection point name * @param type defines the connection point type * @param isOptional defines a boolean indicating that this input can be omitted * @param target defines the target to use to limit the connection point (will be VertexAndFragment by default) * @param point an already created connection point. If not provided, create a new one * @returns the current block */ registerInput(e, t, i = !1, r, s) { return s = s ?? new pP(e, this, no.Input), s.type = t, s.isOptional = i, r && (s.target = r), this._inputs.push(s), this; } /** * Register a new output. Must be called inside a block constructor * @param name defines the connection point name * @param type defines the connection point type * @param target defines the target to use to limit the connection point (will be VertexAndFragment by default) * @param point an already created connection point. If not provided, create a new one * @returns the current block */ registerOutput(e, t, i, r) { return r = r ?? new pP(e, this, no.Output), r.type = t, i && (r.target = i), this._outputs.push(r), this; } /** * Will return the first available input e.g. the first one which is not an uniform or an attribute * @param forOutput defines an optional connection point to check compatibility with * @returns the first available input or null */ getFirstAvailableInput(e = null) { for (const t of this._inputs) if (!t.connectedPoint && (!e || e.type === t.type || t.type === ue.AutoDetect)) return t; return null; } /** * Will return the first available output e.g. the first one which is not yet connected and not a varying * @param forBlock defines an optional block to check compatibility with * @returns the first available input or null */ getFirstAvailableOutput(e = null) { for (const t of this._outputs) if (!e || !e.target || e.target === Le.Neutral || e.target & t.target) return t; return null; } /** * Gets the sibling of the given output * @param current defines the current output * @returns the next output in the list or null */ getSiblingOutput(e) { const t = this._outputs.indexOf(e); return t === -1 || t >= this._outputs.length ? null : this._outputs[t + 1]; } /** * Checks if the current block is an ancestor of a given block * @param block defines the potential descendant block to check * @returns true if block is a descendant */ isAnAncestorOf(e) { for (const t of this._outputs) if (t.hasEndpoints) { for (const i of t.endpoints) if (i.ownerBlock === e || i.ownerBlock.isAnAncestorOf(e)) return !0; } return !1; } /** * Connect current block with another block * @param other defines the block to connect with * @param options define the various options to help pick the right connections * @param options.input * @param options.output * @param options.outputSwizzle * @returns the current block */ connectTo(e, t) { if (this._outputs.length === 0) return; let i = t && t.output ? this.getOutputByName(t.output) : this.getFirstAvailableOutput(e), r = !0; for (; r; ) { const s = t && t.input ? e.getInputByName(t.input) : e.getFirstAvailableInput(i); if (i && s && i.canConnectTo(s)) i.connectTo(s), r = !1; else if (i) i = this.getSiblingOutput(i); else throw "Unable to find a compatible match"; } return this; } // eslint-disable-next-line @typescript-eslint/no-unused-vars _buildBlock(e) { } /** * Add uniforms, samplers and uniform buffers at compilation time * @param state defines the state to update * @param nodeMaterial defines the node material requesting the update * @param defines defines the material defines to update * @param uniformBuffers defines the list of uniform buffer names */ // eslint-disable-next-line @typescript-eslint/no-unused-vars updateUniformsAndSamples(e, t, i, r) { } /** * Add potential fallbacks if shader compilation fails * @param mesh defines the mesh to be rendered * @param fallbacks defines the current prioritized list of fallbacks */ // eslint-disable-next-line @typescript-eslint/no-unused-vars provideFallbacks(e, t) { } /** * Initialize defines for shader compilation * @param mesh defines the mesh to be rendered * @param nodeMaterial defines the node material requesting the update * @param defines defines the material defines to update * @param useInstances specifies that instances should be used */ // eslint-disable-next-line @typescript-eslint/no-unused-vars initializeDefines(e, t, i, r = !1) { } /** * Update defines for shader compilation * @param mesh defines the mesh to be rendered * @param nodeMaterial defines the node material requesting the update * @param defines defines the material defines to update * @param useInstances specifies that instances should be used * @param subMesh defines which submesh to render */ // eslint-disable-next-line @typescript-eslint/no-unused-vars prepareDefines(e, t, i, r = !1, s) { } /** * Lets the block try to connect some inputs automatically * @param material defines the hosting NodeMaterial * @param additionalFilteringInfo optional additional filtering condition when looking for compatible blocks */ // eslint-disable-next-line @typescript-eslint/no-unused-vars autoConfigure(e, t = () => !0) { } /** * Function called when a block is declared as repeatable content generator * @param vertexShaderState defines the current compilation state for the vertex shader * @param fragmentShaderState defines the current compilation state for the fragment shader * @param mesh defines the mesh to be rendered * @param defines defines the material defines to update */ // eslint-disable-next-line @typescript-eslint/no-unused-vars replaceRepeatableContent(e, t, i, r) { } /** Gets a boolean indicating that the code of this block will be promoted to vertex shader even if connected to fragment output */ get willBeGeneratedIntoVertexShaderFromFragmentShader() { return this.isInput || this.isFinalMerger || this._outputs.some((e) => e.isDirectlyConnectedToVertexOutput) || this.target === Le.Vertex ? !1 : !!((this.target === Le.VertexAndFragment || this.target === Le.Neutral) && this._outputs.some((e) => e.isConnectedInVertexShader)); } /** * Checks if the block is ready * @param mesh defines the mesh to be rendered * @param nodeMaterial defines the node material requesting the update * @param defines defines the material defines to update * @param useInstances specifies that instances should be used * @returns true if the block is ready */ // eslint-disable-next-line @typescript-eslint/no-unused-vars isReady(e, t, i, r = !1) { return !0; } _linkConnectionTypes(e, t, i = !1) { i ? this._inputs[t]._acceptedConnectionPointType = this._inputs[e] : this._inputs[e]._linkedConnectionSource = this._inputs[t], this._inputs[t]._linkedConnectionSource = this._inputs[e]; } _processBuild(e, t, i, r) { e.build(t, r); const s = t._vertexState != null, n = e._buildTarget === Le.Vertex && e.target !== Le.VertexAndFragment; if (s && (!(e.target & e._buildTarget) || !(e.target & i.target) || this.target !== Le.VertexAndFragment && n) && (!e.isInput && t.target !== e._buildTarget || // block was already emitted by vertex shader e.isInput && e.isAttribute && !e._noContextSwitch)) { const a = i.connectedPoint; t._vertexState._emitVaryingFromString("v_" + a.associatedVariableName, t._getGLType(a.type)) && (t._vertexState.compilationString += `${"v_" + a.associatedVariableName} = ${a.associatedVariableName}; `), i.associatedVariableName = "v_" + a.associatedVariableName, i._enforceAssociatedVariableName = !0; } } /** * Validates the new name for the block node. * @param newName the new name to be given to the node. * @returns false if the name is a reserve word, else true. */ validateBlockName(e) { const t = [ "position", "normal", "tangent", "particle_positionw", "uv", "uv2", "uv3", "uv4", "uv5", "uv6", "position2d", "particle_uv", "matricesIndices", "matricesWeights", "world0", "world1", "world2", "world3", "particle_color", "particle_texturemask" ]; for (const i of t) if (e === i) return !1; return !0; } // eslint-disable-next-line @typescript-eslint/no-unused-vars _customBuildStep(e, t) { } /** * Compile the current node and generate the shader code * @param state defines the current compilation state (uniforms, samplers, current string) * @param activeBlocks defines the list of active blocks (i.e. blocks to compile) * @returns true if already built */ build(e, t) { if (this._buildId === e.sharedData.buildId) return !0; if (!this.isInput) for (const i of this._outputs) i.associatedVariableName || (i.associatedVariableName = e._getFreeVariableName(i.name)); for (const i of this._inputs) { if (!i.connectedPoint) { i.isOptional || e.sharedData.checks.notConnectedNonOptionalInputs.push(i); continue; } if (this.target !== Le.Neutral && (!(i.target & this.target) || !(i.target & e.target))) continue; const r = i.connectedPoint.ownerBlock; r && r !== this && this._processBuild(r, e, i, t); } if (this._customBuildStep(e, t), this._buildId === e.sharedData.buildId) return !0; if (e.sharedData.verbose && Ce.Log(`${e.target === Le.Vertex ? "Vertex shader" : "Fragment shader"}: Building ${this.name} [${this.getClassName()}]`), this.isFinalMerger) switch (e.target) { case Le.Vertex: e.sharedData.checks.emitVertex = !0; break; case Le.Fragment: e.sharedData.checks.emitFragment = !0; break; } !this.isInput && e.sharedData.emitComments && (e.compilationString += ` //${this.name} `), this._buildBlock(e), this._buildId = e.sharedData.buildId, this._buildTarget = e.target; for (const i of this._outputs) if (i.target & e.target) for (const r of i.endpoints) { const s = r.ownerBlock; s && s.target & e.target && t.indexOf(s) !== -1 && this._processBuild(s, e, r, t); } return !1; } _inputRename(e) { return e; } _outputRename(e) { return e; } _dumpPropertiesCode() { const e = this._codeVariableName; return `${e}.visibleInInspector = ${this.visibleInInspector}; ${e}.visibleOnFrame = ${this.visibleOnFrame}; ${e}.target = ${this.target}; `; } /** * @internal */ _dumpCode(e, t) { t.push(this); const i = this.name.replace(/[^A-Za-z_]+/g, ""); if (this._codeVariableName = i || `${this.getClassName()}_${this.uniqueId}`, e.indexOf(this._codeVariableName) !== -1) { let s = 0; do s++, this._codeVariableName = i + s; while (e.indexOf(this._codeVariableName) !== -1); } e.push(this._codeVariableName); let r = ` // ${this.getClassName()} `; this.comments && (r += `// ${this.comments} `), r += `var ${this._codeVariableName} = new BABYLON.${this.getClassName()}("${this.name}"); `, r += this._dumpPropertiesCode(); for (const s of this.inputs) { if (!s.isConnected) continue; const a = s.connectedPoint.ownerBlock; t.indexOf(a) === -1 && (r += a._dumpCode(e, t)); } for (const s of this.outputs) if (s.hasEndpoints) for (const n of s.endpoints) { const a = n.ownerBlock; a && t.indexOf(a) === -1 && (r += a._dumpCode(e, t)); } return r; } /** * @internal */ _dumpCodeForOutputConnections(e) { let t = ""; if (e.indexOf(this) !== -1) return t; e.push(this); for (const i of this.inputs) { if (!i.isConnected) continue; const r = i.connectedPoint, s = r.ownerBlock; t += s._dumpCodeForOutputConnections(e), t += `${s._codeVariableName}.${s._outputRename(r.name)}.connectTo(${this._codeVariableName}.${this._inputRename(i.name)}); `; } return t; } /** * Clone the current block to a new identical block * @param scene defines the hosting scene * @param rootUrl defines the root URL to use to load textures and relative dependencies * @returns a copy of the current block */ clone(e, t = "") { const i = this.serialize(), r = Qo(i.customType); if (r) { const s = new r(); return s._deserialize(i, e, t), s; } return null; } /** * Serializes this block in a JSON representation * @returns the serialized block object */ serialize() { const e = {}; e.customType = "BABYLON." + this.getClassName(), e.id = this.uniqueId, e.name = this.name, e.comments = this.comments, e.visibleInInspector = this.visibleInInspector, e.visibleOnFrame = this.visibleOnFrame, e.target = this.target, e.inputs = [], e.outputs = []; for (const t of this.inputs) e.inputs.push(t.serialize()); for (const t of this.outputs) e.outputs.push(t.serialize(!1)); return e; } /** * @internal */ // eslint-disable-next-line @typescript-eslint/no-unused-vars _deserialize(e, t, i) { var r; this.name = e.name, this.comments = e.comments, this.visibleInInspector = !!e.visibleInInspector, this.visibleOnFrame = !!e.visibleOnFrame, this._target = (r = e.target) !== null && r !== void 0 ? r : this.target, this._deserializePortDisplayNamesAndExposedOnFrame(e); } _deserializePortDisplayNamesAndExposedOnFrame(e) { const t = e.inputs, i = e.outputs; t && t.forEach((r, s) => { r.displayName && (this.inputs[s].displayName = r.displayName), r.isExposedOnFrame && (this.inputs[s].isExposedOnFrame = r.isExposedOnFrame, this.inputs[s].exposedPortPosition = r.exposedPortPosition); }), i && i.forEach((r, s) => { r.displayName && (this.outputs[s].displayName = r.displayName), r.isExposedOnFrame && (this.outputs[s].isExposedOnFrame = r.isExposedOnFrame, this.outputs[s].exposedPortPosition = r.exposedPortPosition); }); } /** * Release resources */ dispose() { for (const e of this.inputs) e.dispose(); for (const e of this.outputs) e.dispose(); } } class aB extends Wi { /** * Creates a new TransformBlock * @param name defines the block name */ constructor(e) { super(e, Le.Neutral), this.complementW = 1, this.complementZ = 0, this.target = Le.Vertex, this.registerInput("vector", ue.AutoDetect), this.registerInput("transform", ue.Matrix), this.registerOutput("output", ue.Vector4), this.registerOutput("xyz", ue.Vector3), this._inputs[0].onConnectionObservable.add((t) => { if (t.ownerBlock.isInput) { const i = t.ownerBlock; (i.name === "normal" || i.name === "tangent") && (this.complementW = 0); } }); } /** * Gets the current class name * @returns the class name */ getClassName() { return "TransformBlock"; } /** * Gets the vector input */ get vector() { return this._inputs[0]; } /** * Gets the output component */ get output() { return this._outputs[0]; } /** * Gets the xyz output component */ get xyz() { return this._outputs[1]; } /** * Gets the matrix transform input */ get transform() { return this._inputs[1]; } _buildBlock(e) { super._buildBlock(e); const t = this.vector, i = this.transform; if (t.connectedPoint) { if (this.complementW === 0) { const r = `//${this.name}`; e._emitFunctionFromInclude("helperFunctions", r), e.sharedData.blocksWithDefines.push(this); const s = e._getFreeVariableName(`${i.associatedVariableName}_NUS`); switch (e.compilationString += `mat3 ${s} = mat3(${i.associatedVariableName}); `, e.compilationString += `#ifdef NONUNIFORMSCALING `, e.compilationString += `${s} = transposeMat3(inverseMat3(${s})); `, e.compilationString += `#endif `, t.connectedPoint.type) { case ue.Vector2: e.compilationString += this._declareOutput(this.output, e) + ` = vec4(${s} * vec3(${t.associatedVariableName}, ${this._writeFloat(this.complementZ)}), ${this._writeFloat(this.complementW)}); `; break; case ue.Vector3: case ue.Color3: e.compilationString += this._declareOutput(this.output, e) + ` = vec4(${s} * ${t.associatedVariableName}, ${this._writeFloat(this.complementW)}); `; break; default: e.compilationString += this._declareOutput(this.output, e) + ` = vec4(${s} * ${t.associatedVariableName}.xyz, ${this._writeFloat(this.complementW)}); `; break; } } else { const r = i.associatedVariableName; switch (t.connectedPoint.type) { case ue.Vector2: e.compilationString += this._declareOutput(this.output, e) + ` = ${r} * vec4(${t.associatedVariableName}, ${this._writeFloat(this.complementZ)}, ${this._writeFloat(this.complementW)}); `; break; case ue.Vector3: case ue.Color3: e.compilationString += this._declareOutput(this.output, e) + ` = ${r} * vec4(${t.associatedVariableName}, ${this._writeFloat(this.complementW)}); `; break; default: e.compilationString += this._declareOutput(this.output, e) + ` = ${r} * ${t.associatedVariableName}; `; break; } } this.xyz.hasEndpoints && (e.compilationString += this._declareOutput(this.xyz, e) + ` = ${this.output.associatedVariableName}.xyz; `); } return this; } /** * Update defines for shader compilation * @param mesh defines the mesh to be rendered * @param nodeMaterial defines the node material requesting the update * @param defines defines the material defines to update */ prepareDefines(e, t, i) { e.nonUniformScaling && i.setValue("NONUNIFORMSCALING", !0); } serialize() { const e = super.serialize(); return e.complementZ = this.complementZ, e.complementW = this.complementW, e; } _deserialize(e, t, i) { super._deserialize(e, t, i), this.complementZ = e.complementZ !== void 0 ? e.complementZ : 0, this.complementW = e.complementW !== void 0 ? e.complementW : 1; } _dumpPropertiesCode() { let e = super._dumpPropertiesCode() + `${this._codeVariableName}.complementZ = ${this.complementZ}; `; return e += `${this._codeVariableName}.complementW = ${this.complementW}; `, e; } } Be("BABYLON.TransformBlock", aB); class G9 extends Wi { /** * Creates a new VertexOutputBlock * @param name defines the block name */ constructor(e) { super(e, Le.Vertex, !0), this.registerInput("vector", ue.Vector4); } /** * Gets the current class name * @returns the class name */ getClassName() { return "VertexOutputBlock"; } /** * Gets the vector input component */ get vector() { return this._inputs[0]; } _isLogarithmicDepthEnabled(e, t) { if (t) return !0; for (const i of e) if (i.useLogarithmicDepth) return !0; return !1; } _buildBlock(e) { super._buildBlock(e); const t = this.vector; return e.compilationString += `gl_Position = ${t.associatedVariableName}; `, this._isLogarithmicDepthEnabled(e.sharedData.fragmentOutputNodes, e.sharedData.nodeMaterial.useLogarithmicDepth) && (e._emitUniformFromString("logarithmicDepthConstant", "float"), e._emitVaryingFromString("vFragmentDepth", "float"), e.compilationString += `vFragmentDepth = 1.0 + gl_Position.w; `, e.compilationString += `gl_Position.z = log2(max(0.000001, vFragmentDepth)) * logarithmicDepthConstant; `), this; } } Be("BABYLON.VertexOutputBlock", G9); var $i; (function(c) { c[c.Boolean = 0] = "Boolean", c[c.Float = 1] = "Float", c[c.Int = 2] = "Int", c[c.Vector2 = 3] = "Vector2", c[c.List = 4] = "List"; })($i || ($i = {})); function ir(c, e = $i.Boolean, t = "PROPERTIES", i) { return (r, s) => { let n = r._propStore; n || (n = [], r._propStore = n), n.push({ propertyName: s, displayName: c, type: e, groupName: t, options: i ?? {} }); }; } class sT extends Wi { /** * Create a new FragmentOutputBlock * @param name defines the block name */ constructor(e) { super(e, Le.Fragment, !0), this.convertToGammaSpace = !1, this.convertToLinearSpace = !1, this.useLogarithmicDepth = !1, this.registerInput("rgba", ue.Color4, !0), this.registerInput("rgb", ue.AutoDetect, !0), this.registerInput("a", ue.Float, !0), this.rgb.addExcludedConnectionPointFromAllowedTypes(ue.Color3 | ue.Vector3 | ue.Float); } /** * Gets the current class name * @returns the class name */ getClassName() { return "FragmentOutputBlock"; } /** * Initialize the block and prepare the context for build * @param state defines the state that will be used for the build */ initialize(e) { e._excludeVariableName("logarithmicDepthConstant"), e._excludeVariableName("vFragmentDepth"); } /** * Gets the rgba input component */ get rgba() { return this._inputs[0]; } /** * Gets the rgb input component */ get rgb() { return this._inputs[1]; } /** * Gets the a input component */ get a() { return this._inputs[2]; } prepareDefines(e, t, i) { i.setValue(this._linearDefineName, this.convertToLinearSpace, !0), i.setValue(this._gammaDefineName, this.convertToGammaSpace, !0); } bind(e, t, i) { (this.useLogarithmicDepth || t.useLogarithmicDepth) && i && Ke.BindLogDepth(void 0, e, i.getScene()); } _buildBlock(e) { super._buildBlock(e); const t = this.rgba, i = this.rgb, r = this.a; e.sharedData.hints.needAlphaBlending = t.isConnected || r.isConnected, e.sharedData.blocksWithDefines.push(this), (this.useLogarithmicDepth || e.sharedData.nodeMaterial.useLogarithmicDepth) && (e._emitUniformFromString("logarithmicDepthConstant", "float"), e._emitVaryingFromString("vFragmentDepth", "float"), e.sharedData.bindableBlocks.push(this)), this._linearDefineName = e._getFreeDefineName("CONVERTTOLINEAR"), this._gammaDefineName = e._getFreeDefineName("CONVERTTOGAMMA"); const s = `//${this.name}`; if (e._emitFunctionFromInclude("helperFunctions", s), t.connectedPoint) r.isConnected ? e.compilationString += `gl_FragColor = vec4(${t.associatedVariableName}.rgb, ${r.associatedVariableName}); ` : e.compilationString += `gl_FragColor = ${t.associatedVariableName}; `; else if (i.connectedPoint) { let n = "1.0"; r.connectedPoint && (n = r.associatedVariableName), i.connectedPoint.type === ue.Float ? e.compilationString += `gl_FragColor = vec4(${i.associatedVariableName}, ${i.associatedVariableName}, ${i.associatedVariableName}, ${n}); ` : e.compilationString += `gl_FragColor = vec4(${i.associatedVariableName}, ${n}); `; } else e.sharedData.checks.notConnectedNonOptionalInputs.push(t); return e.compilationString += `#ifdef ${this._linearDefineName} `, e.compilationString += `gl_FragColor = toLinearSpace(gl_FragColor); `, e.compilationString += `#endif `, e.compilationString += `#ifdef ${this._gammaDefineName} `, e.compilationString += `gl_FragColor = toGammaSpace(gl_FragColor); `, e.compilationString += `#endif `, (this.useLogarithmicDepth || e.sharedData.nodeMaterial.useLogarithmicDepth) && (e.compilationString += `gl_FragDepthEXT = log2(vFragmentDepth) * logarithmicDepthConstant * 0.5; `), e.compilationString += `#if defined(PREPASS)\r `, e.compilationString += `gl_FragData[0] = gl_FragColor;\r `, e.compilationString += `#endif\r `, this; } _dumpPropertiesCode() { let e = super._dumpPropertiesCode(); return e += `${this._codeVariableName}.convertToGammaSpace = ${this.convertToGammaSpace}; `, e += `${this._codeVariableName}.convertToLinearSpace = ${this.convertToLinearSpace}; `, e += `${this._codeVariableName}.useLogarithmicDepth = ${this.useLogarithmicDepth}; `, e; } serialize() { const e = super.serialize(); return e.convertToGammaSpace = this.convertToGammaSpace, e.convertToLinearSpace = this.convertToLinearSpace, e.useLogarithmicDepth = this.useLogarithmicDepth, e; } _deserialize(e, t, i) { var r; super._deserialize(e, t, i), this.convertToGammaSpace = e.convertToGammaSpace, this.convertToLinearSpace = e.convertToLinearSpace, this.useLogarithmicDepth = (r = e.useLogarithmicDepth) !== null && r !== void 0 ? r : !1; } } F([ ir("Convert to gamma space", $i.Boolean, "PROPERTIES", { notifiers: { update: !0 } }) ], sT.prototype, "convertToGammaSpace", void 0); F([ ir("Convert to linear space", $i.Boolean, "PROPERTIES", { notifiers: { update: !0 } }) ], sT.prototype, "convertToLinearSpace", void 0); F([ ir("Use logarithmic depth", $i.Boolean, "PROPERTIES") ], sT.prototype, "useLogarithmicDepth", void 0); Be("BABYLON.FragmentOutputBlock", sT); var bd; (function(c) { c[c.Uniform = 0] = "Uniform", c[c.Attribute = 1] = "Attribute", c[c.Varying = 2] = "Varying", c[c.Undefined = 3] = "Undefined"; })(bd || (bd = {})); var Ms; (function(c) { c[c.World = 1] = "World", c[c.View = 2] = "View", c[c.Projection = 3] = "Projection", c[c.ViewProjection = 4] = "ViewProjection", c[c.WorldView = 5] = "WorldView", c[c.WorldViewProjection = 6] = "WorldViewProjection", c[c.CameraPosition = 7] = "CameraPosition", c[c.FogColor = 8] = "FogColor", c[c.DeltaTime = 9] = "DeltaTime", c[c.CameraParameters = 10] = "CameraParameters", c[c.MaterialAlpha = 11] = "MaterialAlpha"; })(Ms || (Ms = {})); class AK { /** * Creates a PositionNormalVertex * @param position the position of the vertex (defaut: 0,0,0) * @param normal the normal of the vertex (defaut: 0,1,0) */ constructor(e = D.Zero(), t = D.Up()) { this.position = e, this.normal = t; } /** * Clones the PositionNormalVertex * @returns the cloned PositionNormalVertex */ clone() { return new AK(this.position.clone(), this.normal.clone()); } } class yK { /** * Creates a PositionNormalTextureVertex * @param position the position of the vertex (defaut: 0,0,0) * @param normal the normal of the vertex (defaut: 0,1,0) * @param uv the uv of the vertex (default: 0,0) */ constructor(e = D.Zero(), t = D.Up(), i = at.Zero()) { this.position = e, this.normal = t, this.uv = i; } /** * Clones the PositionNormalTextureVertex * @returns the cloned PositionNormalTextureVertex */ clone() { return new yK(this.position.clone(), this.normal.clone(), this.uv.clone()); } } var tT; (function(c) { c[c.None = 0] = "None", c[c.Time = 1] = "Time", c[c.RealTime = 2] = "RealTime"; })(tT || (tT = {})); const Fce = { position2d: "position", particle_uv: "vUV", particle_color: "vColor", particle_texturemask: "textureMask", particle_positionw: "vPositionW" }, Fk = { particle_uv: !0, particle_color: !0, particle_texturemask: !0, particle_positionw: !0 }, IZ = { particle_texturemask: !0 }; class vs extends Wi { /** * Gets or sets the connection point type (default is float) */ get type() { if (this._type === ue.AutoDetect) { if (this.isUniform && this.value != null) { if (!isNaN(this.value)) return this._type = ue.Float, this._type; switch (this.value.getClassName()) { case "Vector2": return this._type = ue.Vector2, this._type; case "Vector3": return this._type = ue.Vector3, this._type; case "Vector4": return this._type = ue.Vector4, this._type; case "Color3": return this._type = ue.Color3, this._type; case "Color4": return this._type = ue.Color4, this._type; case "Matrix": return this._type = ue.Matrix, this._type; } } if (this.isAttribute) switch (this.name) { case "position": case "normal": case "particle_positionw": return this._type = ue.Vector3, this._type; case "uv": case "uv2": case "uv3": case "uv4": case "uv5": case "uv6": case "position2d": case "particle_uv": return this._type = ue.Vector2, this._type; case "matricesIndices": case "matricesWeights": case "matricesIndicesExtra": case "matricesWeightsExtra": case "world0": case "world1": case "world2": case "world3": case "tangent": return this._type = ue.Vector4, this._type; case "color": case "instanceColor": case "particle_color": case "particle_texturemask": return this._type = ue.Color4, this._type; } if (this.isSystemValue) switch (this._systemValue) { case Ms.World: case Ms.WorldView: case Ms.WorldViewProjection: case Ms.View: case Ms.ViewProjection: case Ms.Projection: return this._type = ue.Matrix, this._type; case Ms.CameraPosition: return this._type = ue.Vector3, this._type; case Ms.FogColor: return this._type = ue.Color3, this._type; case Ms.DeltaTime: case Ms.MaterialAlpha: return this._type = ue.Float, this._type; case Ms.CameraParameters: return this._type = ue.Vector4, this._type; } } return this._type; } /** * Creates a new InputBlock * @param name defines the block name * @param target defines the target of that block (Vertex by default) * @param type defines the type of the input (can be set to NodeMaterialBlockConnectionPointTypes.AutoDetect) */ constructor(e, t = Le.Vertex, i = ue.AutoDetect) { super(e, t, !1), this._mode = bd.Undefined, this._animationType = tT.None, this.min = 0, this.max = 0, this.isBoolean = !1, this.matrixMode = 0, this._systemValue = null, this.isConstant = !1, this.groupInInspector = "", this.onValueChangedObservable = new Fe(), this.convertToGammaSpace = !1, this.convertToLinearSpace = !1, this._type = i, this.setDefaultValue(), this.registerOutput("output", i); } /** * Validates if a name is a reserve word. * @param newName the new name to be given to the node. * @returns false if the name is a reserve word, else true. */ validateBlockName(e) { return this.isAttribute ? !0 : super.validateBlockName(e); } /** * Gets the output component */ get output() { return this._outputs[0]; } /** * Set the source of this connection point to a vertex attribute * @param attributeName defines the attribute name (position, uv, normal, etc...). If not specified it will take the connection point name * @returns the current connection point */ setAsAttribute(e) { return this._mode = bd.Attribute, e && (this.name = e), this; } /** * Set the source of this connection point to a system value * @param value define the system value to use (world, view, etc...) or null to switch to manual value * @returns the current connection point */ setAsSystemValue(e) { return this.systemValue = e, this; } /** * Gets or sets the value of that point. * Please note that this value will be ignored if valueCallback is defined */ get value() { return this._storedValue; } set value(e) { this.type === ue.Float && (this.isBoolean ? e = e ? 1 : 0 : this.min !== this.max && (e = Math.max(this.min, e), e = Math.min(this.max, e))), this._storedValue = e, this._mode = bd.Uniform, this.onValueChangedObservable.notifyObservers(this); } /** * Gets or sets a callback used to get the value of that point. * Please note that setting this value will force the connection point to ignore the value property */ get valueCallback() { return this._valueCallback; } set valueCallback(e) { this._valueCallback = e, this._mode = bd.Uniform; } /** * Gets or sets the associated variable name in the shader */ get associatedVariableName() { return this._associatedVariableName; } set associatedVariableName(e) { this._associatedVariableName = e; } /** Gets or sets the type of animation applied to the input */ get animationType() { return this._animationType; } set animationType(e) { this._animationType = e; } /** * Gets a boolean indicating that this connection point not defined yet */ get isUndefined() { return this._mode === bd.Undefined; } /** * Gets or sets a boolean indicating that this connection point is coming from an uniform. * In this case the connection point name must be the name of the uniform to use. * Can only be set on inputs */ get isUniform() { return this._mode === bd.Uniform; } set isUniform(e) { this._mode = e ? bd.Uniform : bd.Undefined, this.associatedVariableName = ""; } /** * Gets or sets a boolean indicating that this connection point is coming from an attribute. * In this case the connection point name must be the name of the attribute to use * Can only be set on inputs */ get isAttribute() { return this._mode === bd.Attribute; } set isAttribute(e) { this._mode = e ? bd.Attribute : bd.Undefined, this.associatedVariableName = ""; } /** * Gets or sets a boolean indicating that this connection point is generating a varying variable. * Can only be set on exit points */ get isVarying() { return this._mode === bd.Varying; } set isVarying(e) { this._mode = e ? bd.Varying : bd.Undefined, this.associatedVariableName = ""; } /** * Gets a boolean indicating that the current connection point is a system value */ get isSystemValue() { return this._systemValue != null; } /** * Gets or sets the current well known value or null if not defined as a system value */ get systemValue() { return this._systemValue; } set systemValue(e) { this._mode = bd.Uniform, this.associatedVariableName = "", this._systemValue = e; } /** * Gets the current class name * @returns the class name */ getClassName() { return "InputBlock"; } /** * Animate the input if animationType !== None * @param scene defines the rendering scene */ animate(e) { switch (this._animationType) { case tT.Time: { this.type === ue.Float && (this.value += e.getAnimationRatio() * 0.01); break; } case tT.RealTime: { this.type === ue.Float && (this.value = (Gs.Now - e.getEngine().startTime) / 1e3); break; } } } _emitDefine(e) { return e[0] === "!" ? `#ifndef ${e.substring(1)} ` : `#ifdef ${e} `; } initialize() { this.associatedVariableName = ""; } /** * Set the input block to its default value (based on its type) */ setDefaultValue() { switch (this.type) { case ue.Float: this.value = 0; break; case ue.Vector2: this.value = at.Zero(); break; case ue.Vector3: this.value = D.Zero(); break; case ue.Vector4: this.value = Di.Zero(); break; case ue.Color3: this.value = ze.White(); break; case ue.Color4: this.value = new Et(1, 1, 1, 1); break; case ue.Matrix: this.value = Ae.Identity(); break; } } _emitConstant(e) { switch (this.type) { case ue.Float: return `${e._emitFloat(this.value)}`; case ue.Vector2: return `vec2(${this.value.x}, ${this.value.y})`; case ue.Vector3: return `vec3(${this.value.x}, ${this.value.y}, ${this.value.z})`; case ue.Vector4: return `vec4(${this.value.x}, ${this.value.y}, ${this.value.z}, ${this.value.w})`; case ue.Color3: return mn.Color3[0].set(this.value.r, this.value.g, this.value.b), this.convertToGammaSpace && mn.Color3[0].toGammaSpaceToRef(mn.Color3[0], e.sharedData.scene.getEngine().useExactSrgbConversions), this.convertToLinearSpace && mn.Color3[0].toLinearSpaceToRef(mn.Color3[0], e.sharedData.scene.getEngine().useExactSrgbConversions), `vec3(${mn.Color3[0].r}, ${mn.Color3[0].g}, ${mn.Color3[0].b})`; case ue.Color4: return mn.Color4[0].set(this.value.r, this.value.g, this.value.b, this.value.a), this.convertToGammaSpace && mn.Color4[0].toGammaSpaceToRef(mn.Color4[0], e.sharedData.scene.getEngine().useExactSrgbConversions), this.convertToLinearSpace && mn.Color4[0].toLinearSpaceToRef(mn.Color4[0], e.sharedData.scene.getEngine().useExactSrgbConversions), `vec4(${mn.Color4[0].r}, ${mn.Color4[0].g}, ${mn.Color4[0].b}, ${mn.Color4[0].a})`; } return ""; } /** @internal */ get _noContextSwitch() { return Fk[this.name]; } _emit(e, t) { var i; if (this.isUniform) { if (this.associatedVariableName || (this.associatedVariableName = e._getFreeVariableName("u_" + this.name)), this.isConstant) { if (e.constants.indexOf(this.associatedVariableName) !== -1) return; e.constants.push(this.associatedVariableName), e._constantDeclaration += this._declareOutput(this.output, e) + ` = ${this._emitConstant(e)}; `; return; } if (e.uniforms.indexOf(this.associatedVariableName) !== -1) return; e.uniforms.push(this.associatedVariableName), t && (e._uniformDeclaration += this._emitDefine(t)), e._uniformDeclaration += `uniform ${e._getGLType(this.type)} ${this.associatedVariableName}; `, t && (e._uniformDeclaration += `#endif `); const r = e.sharedData.hints; if (this._systemValue !== null && this._systemValue !== void 0) switch (this._systemValue) { case Ms.WorldView: r.needWorldViewMatrix = !0; break; case Ms.WorldViewProjection: r.needWorldViewProjectionMatrix = !0; break; } else this._animationType !== tT.None && e.sharedData.animatedInputs.push(this); return; } if (this.isAttribute) { if (this.associatedVariableName = (i = Fce[this.name]) !== null && i !== void 0 ? i : this.name, this.target === Le.Vertex && e._vertexState) { Fk[this.name] ? IZ[this.name] ? e._emitUniformFromString(this.associatedVariableName, e._getGLType(this.type), t) : e._emitVaryingFromString(this.associatedVariableName, e._getGLType(this.type), t) : this._emit(e._vertexState, t); return; } if (e.attributes.indexOf(this.associatedVariableName) !== -1) return; e.attributes.push(this.associatedVariableName), Fk[this.name] ? IZ[this.name] ? e._emitUniformFromString(this.associatedVariableName, e._getGLType(this.type), t) : e._emitVaryingFromString(this.associatedVariableName, e._getGLType(this.type), t) : (t && (e._attributeDeclaration += this._emitDefine(t)), e._attributeDeclaration += `attribute ${e._getGLType(this.type)} ${this.associatedVariableName}; `, t && (e._attributeDeclaration += `#endif `)); } } /** * @internal */ _transmitWorld(e, t, i, r) { if (!this._systemValue) return; const s = this.associatedVariableName; switch (this._systemValue) { case Ms.World: e.setMatrix(s, t); break; case Ms.WorldView: e.setMatrix(s, i); break; case Ms.WorldViewProjection: e.setMatrix(s, r); break; } } /** * @internal */ _transmit(e, t, i) { if (this.isAttribute) return; const r = this.associatedVariableName; if (this._systemValue) { switch (this._systemValue) { case Ms.World: case Ms.WorldView: case Ms.WorldViewProjection: return; case Ms.View: e.setMatrix(r, t.getViewMatrix()); break; case Ms.Projection: e.setMatrix(r, t.getProjectionMatrix()); break; case Ms.ViewProjection: e.setMatrix(r, t.getTransformMatrix()); break; case Ms.CameraPosition: t.bindEyePosition(e, r, !0); break; case Ms.FogColor: e.setColor3(r, t.fogColor); break; case Ms.DeltaTime: e.setFloat(r, t.deltaTime / 1e3); break; case Ms.CameraParameters: t.activeCamera && e.setFloat4(r, t.getEngine().hasOriginBottomLeft ? -1 : 1, t.activeCamera.minZ, t.activeCamera.maxZ, 1 / t.activeCamera.maxZ); break; case Ms.MaterialAlpha: e.setFloat(r, i.alpha); break; } return; } const s = this._valueCallback ? this._valueCallback() : this._storedValue; if (s !== null) switch (this.type) { case ue.Float: e.setFloat(r, s); break; case ue.Int: e.setInt(r, s); break; case ue.Color3: mn.Color3[0].set(this.value.r, this.value.g, this.value.b), this.convertToGammaSpace && mn.Color3[0].toGammaSpaceToRef(mn.Color3[0], t.getEngine().useExactSrgbConversions), this.convertToLinearSpace && mn.Color3[0].toLinearSpaceToRef(mn.Color3[0], t.getEngine().useExactSrgbConversions), e.setColor3(r, mn.Color3[0]); break; case ue.Color4: mn.Color4[0].set(this.value.r, this.value.g, this.value.b, this.value.a), this.convertToGammaSpace && mn.Color4[0].toGammaSpaceToRef(mn.Color4[0], t.getEngine().useExactSrgbConversions), this.convertToLinearSpace && mn.Color4[0].toLinearSpaceToRef(mn.Color4[0], t.getEngine().useExactSrgbConversions), e.setDirectColor4(r, mn.Color4[0]); break; case ue.Vector2: e.setVector2(r, s); break; case ue.Vector3: e.setVector3(r, s); break; case ue.Vector4: e.setVector4(r, s); break; case ue.Matrix: e.setMatrix(r, s); break; } } _buildBlock(e) { super._buildBlock(e), (this.isUniform || this.isSystemValue) && e.sharedData.inputBlocks.push(this), this._emit(e); } _dumpPropertiesCode() { const e = this._codeVariableName; if (this.isAttribute) return super._dumpPropertiesCode() + `${e}.setAsAttribute("${this.name}"); `; if (this.isSystemValue) return super._dumpPropertiesCode() + `${e}.setAsSystemValue(BABYLON.NodeMaterialSystemValues.${Ms[this._systemValue]}); `; if (this.isUniform) { const t = []; let i = ""; switch (this.type) { case ue.Float: i = `${this.value}`; break; case ue.Vector2: i = `new BABYLON.Vector2(${this.value.x}, ${this.value.y})`; break; case ue.Vector3: i = `new BABYLON.Vector3(${this.value.x}, ${this.value.y}, ${this.value.z})`; break; case ue.Vector4: i = `new BABYLON.Vector4(${this.value.x}, ${this.value.y}, ${this.value.z}, ${this.value.w})`; break; case ue.Color3: i = `new BABYLON.Color3(${this.value.r}, ${this.value.g}, ${this.value.b})`, this.convertToGammaSpace && (i += ".toGammaSpace()"), this.convertToLinearSpace && (i += ".toLinearSpace()"); break; case ue.Color4: i = `new BABYLON.Color4(${this.value.r}, ${this.value.g}, ${this.value.b}, ${this.value.a})`, this.convertToGammaSpace && (i += ".toGammaSpace()"), this.convertToLinearSpace && (i += ".toLinearSpace()"); break; case ue.Matrix: i = `BABYLON.Matrix.FromArray([${this.value.m}])`; break; } return t.push(`${e}.value = ${i}`), this.type === ue.Float && t.push(`${e}.min = ${this.min}`, `${e}.max = ${this.max}`, `${e}.isBoolean = ${this.isBoolean}`, `${e}.matrixMode = ${this.matrixMode}`, `${e}.animationType = BABYLON.AnimatedInputBlockTypes.${tT[this.animationType]}`), t.push(`${e}.isConstant = ${this.isConstant}`), t.push(""), super._dumpPropertiesCode() + t.join(`; `); } return super._dumpPropertiesCode(); } dispose() { this.onValueChangedObservable.clear(), super.dispose(); } serialize() { const e = super.serialize(); return e.type = this.type, e.mode = this._mode, e.systemValue = this._systemValue, e.animationType = this._animationType, e.min = this.min, e.max = this.max, e.isBoolean = this.isBoolean, e.matrixMode = this.matrixMode, e.isConstant = this.isConstant, e.groupInInspector = this.groupInInspector, e.convertToGammaSpace = this.convertToGammaSpace, e.convertToLinearSpace = this.convertToLinearSpace, this._storedValue != null && this._mode === bd.Uniform && (this._storedValue.asArray ? (e.valueType = "BABYLON." + this._storedValue.getClassName(), e.value = this._storedValue.asArray()) : (e.valueType = "number", e.value = this._storedValue)), e; } _deserialize(e, t, i) { if (this._mode = e.mode, super._deserialize(e, t, i), this._type = e.type, this._systemValue = e.systemValue || e.wellKnownValue, this._animationType = e.animationType, this.min = e.min || 0, this.max = e.max || 0, this.isBoolean = !!e.isBoolean, this.matrixMode = e.matrixMode || 0, this.isConstant = !!e.isConstant, this.groupInInspector = e.groupInInspector || "", this.convertToGammaSpace = !!e.convertToGammaSpace, this.convertToLinearSpace = !!e.convertToLinearSpace, e.name === "tangent" && e.mode === bd.Attribute && e.type === ue.Vector3 && (this._type = ue.Vector4), !!e.valueType) if (e.valueType === "number") this._storedValue = e.value; else { const r = Qo(e.valueType); r && (this._storedValue = r.FromArray(e.value)); } } } Be("BABYLON.InputBlock", vs); class CK extends Wi { /** * Create a new CurrentScreenBlock * @param name defines the block name */ constructor(e) { super(e, Le.VertexAndFragment), this._samplerName = "textureSampler", this.convertToGammaSpace = !1, this.convertToLinearSpace = !1, this._isUnique = !1, this.registerInput("uv", ue.AutoDetect, !1, Le.VertexAndFragment), this.registerOutput("rgba", ue.Color4, Le.Neutral), this.registerOutput("rgb", ue.Color3, Le.Neutral), this.registerOutput("r", ue.Float, Le.Neutral), this.registerOutput("g", ue.Float, Le.Neutral), this.registerOutput("b", ue.Float, Le.Neutral), this.registerOutput("a", ue.Float, Le.Neutral), this._inputs[0].addExcludedConnectionPointFromAllowedTypes(ue.Vector2 | ue.Vector3 | ue.Vector4), this._inputs[0]._prioritizeVertex = !1; } /** * Gets the current class name * @returns the class name */ getClassName() { return "CurrentScreenBlock"; } /** * Gets the uv input component */ get uv() { return this._inputs[0]; } /** * Gets the rgba output component */ get rgba() { return this._outputs[0]; } /** * Gets the rgb output component */ get rgb() { return this._outputs[1]; } /** * Gets the r output component */ get r() { return this._outputs[2]; } /** * Gets the g output component */ get g() { return this._outputs[3]; } /** * Gets the b output component */ get b() { return this._outputs[4]; } /** * Gets the a output component */ get a() { return this._outputs[5]; } /** * Initialize the block and prepare the context for build * @param state defines the state that will be used for the build */ initialize(e) { e._excludeVariableName("textureSampler"); } get target() { return !this.uv.isConnected || this.uv.sourceBlock.isInput ? Le.VertexAndFragment : Le.Fragment; } prepareDefines(e, t, i) { i.setValue(this._linearDefineName, this.convertToGammaSpace, !0), i.setValue(this._gammaDefineName, this.convertToLinearSpace, !0); } isReady() { return !(this.texture && !this.texture.isReadyOrNotBlocking()); } _injectVertexCode(e) { const t = this.uv; if (t.connectedPoint.ownerBlock.isInput && (t.connectedPoint.ownerBlock.isAttribute || e._emitUniformFromString(t.associatedVariableName, "vec2")), this._mainUVName = "vMain" + t.associatedVariableName, e._emitVaryingFromString(this._mainUVName, "vec2"), e.compilationString += `${this._mainUVName} = ${t.associatedVariableName}.xy; `, !!this._outputs.some((i) => i.isConnectedInVertexShader)) { this._writeTextureRead(e, !0); for (const i of this._outputs) i.hasEndpoints && this._writeOutput(e, i, i.name, !0); } } _writeTextureRead(e, t = !1) { const i = this.uv; if (t) { if (e.target === Le.Fragment) return; e.compilationString += `vec4 ${this._tempTextureRead} = texture2D(${this._samplerName}, ${i.associatedVariableName}); `; return; } if (this.uv.ownerBlock.target === Le.Fragment) { e.compilationString += `vec4 ${this._tempTextureRead} = texture2D(${this._samplerName}, ${i.associatedVariableName}); `; return; } e.compilationString += `vec4 ${this._tempTextureRead} = texture2D(${this._samplerName}, ${this._mainUVName}); `; } _writeOutput(e, t, i, r = !1) { if (r) { if (e.target === Le.Fragment) return; e.compilationString += `${this._declareOutput(t, e)} = ${this._tempTextureRead}.${i}; `; return; } if (this.uv.ownerBlock.target === Le.Fragment) { e.compilationString += `${this._declareOutput(t, e)} = ${this._tempTextureRead}.${i}; `; return; } e.compilationString += `${this._declareOutput(t, e)} = ${this._tempTextureRead}.${i}; `, e.compilationString += `#ifdef ${this._linearDefineName} `, e.compilationString += `${t.associatedVariableName} = toGammaSpace(${t.associatedVariableName}); `, e.compilationString += `#endif `, e.compilationString += `#ifdef ${this._gammaDefineName} `, e.compilationString += `${t.associatedVariableName} = toLinearSpace(${t.associatedVariableName}); `, e.compilationString += `#endif `; } _buildBlock(e) { if (super._buildBlock(e), this._tempTextureRead = e._getFreeVariableName("tempTextureRead"), e.sharedData.blockingBlocks.indexOf(this) < 0 && e.sharedData.blockingBlocks.push(this), e.sharedData.textureBlocks.indexOf(this) < 0 && e.sharedData.textureBlocks.push(this), e.sharedData.blocksWithDefines.indexOf(this) < 0 && e.sharedData.blocksWithDefines.push(this), e.target !== Le.Fragment) { e._emit2DSampler(this._samplerName), this._injectVertexCode(e); return; } if (!this._outputs.some((i) => i.isConnectedInFragmentShader)) return; e._emit2DSampler(this._samplerName), this._linearDefineName = e._getFreeDefineName("ISLINEAR"), this._gammaDefineName = e._getFreeDefineName("ISGAMMA"); const t = `//${this.name}`; e._emitFunctionFromInclude("helperFunctions", t), this._writeTextureRead(e); for (const i of this._outputs) i.hasEndpoints && this._writeOutput(e, i, i.name); return this; } serialize() { const e = super.serialize(); return e.convertToGammaSpace = this.convertToGammaSpace, e.convertToLinearSpace = this.convertToLinearSpace, this.texture && !this.texture.isRenderTarget && (e.texture = this.texture.serialize()), e; } _deserialize(e, t, i) { super._deserialize(e, t, i), this.convertToGammaSpace = e.convertToGammaSpace, this.convertToLinearSpace = !!e.convertToLinearSpace, e.texture && (i = e.texture.url.indexOf("data:") === 0 ? "" : i, this.texture = De.Parse(e.texture, t, i)); } } Be("BABYLON.CurrentScreenBlock", CK); class xK extends Wi { /** * Create a new ParticleTextureBlock * @param name defines the block name */ constructor(e) { super(e, Le.Fragment), this._samplerName = "diffuseSampler", this.convertToGammaSpace = !1, this.convertToLinearSpace = !1, this._isUnique = !1, this.registerInput("uv", ue.AutoDetect, !1, Le.VertexAndFragment), this.registerOutput("rgba", ue.Color4, Le.Neutral), this.registerOutput("rgb", ue.Color3, Le.Neutral), this.registerOutput("r", ue.Float, Le.Neutral), this.registerOutput("g", ue.Float, Le.Neutral), this.registerOutput("b", ue.Float, Le.Neutral), this.registerOutput("a", ue.Float, Le.Neutral), this._inputs[0].addExcludedConnectionPointFromAllowedTypes(ue.Vector2 | ue.Vector3 | ue.Vector4); } /** * Gets the current class name * @returns the class name */ getClassName() { return "ParticleTextureBlock"; } /** * Gets the uv input component */ get uv() { return this._inputs[0]; } /** * Gets the rgba output component */ get rgba() { return this._outputs[0]; } /** * Gets the rgb output component */ get rgb() { return this._outputs[1]; } /** * Gets the r output component */ get r() { return this._outputs[2]; } /** * Gets the g output component */ get g() { return this._outputs[3]; } /** * Gets the b output component */ get b() { return this._outputs[4]; } /** * Gets the a output component */ get a() { return this._outputs[5]; } /** * Initialize the block and prepare the context for build * @param state defines the state that will be used for the build */ initialize(e) { e._excludeVariableName("diffuseSampler"); } autoConfigure(e, t = () => !0) { if (!this.uv.isConnected) { let i = e.getInputBlockByPredicate((r) => r.isAttribute && r.name === "particle_uv" && t(r)); i || (i = new vs("uv"), i.setAsAttribute("particle_uv")), i.output.connectTo(this.uv); } } prepareDefines(e, t, i) { i.setValue(this._linearDefineName, this.convertToGammaSpace, !0), i.setValue(this._gammaDefineName, this.convertToLinearSpace, !0); } isReady() { return !(this.texture && !this.texture.isReadyOrNotBlocking()); } _writeOutput(e, t, i) { e.compilationString += `${this._declareOutput(t, e)} = ${this._tempTextureRead}.${i}; `, e.compilationString += `#ifdef ${this._linearDefineName} `, e.compilationString += `${t.associatedVariableName} = toGammaSpace(${t.associatedVariableName}); `, e.compilationString += `#endif `, e.compilationString += `#ifdef ${this._gammaDefineName} `, e.compilationString += `${t.associatedVariableName} = toLinearSpace(${t.associatedVariableName}); `, e.compilationString += `#endif `; } _buildBlock(e) { if (super._buildBlock(e), e.target === Le.Vertex) return; this._tempTextureRead = e._getFreeVariableName("tempTextureRead"), e._emit2DSampler(this._samplerName), e.sharedData.blockingBlocks.push(this), e.sharedData.textureBlocks.push(this), e.sharedData.blocksWithDefines.push(this), this._linearDefineName = e._getFreeDefineName("ISLINEAR"), this._gammaDefineName = e._getFreeDefineName("ISGAMMA"); const t = `//${this.name}`; e._emitFunctionFromInclude("helperFunctions", t), e.compilationString += `vec4 ${this._tempTextureRead} = texture2D(${this._samplerName}, ${this.uv.associatedVariableName}); `; for (const i of this._outputs) i.hasEndpoints && this._writeOutput(e, i, i.name); return this; } serialize() { const e = super.serialize(); return e.convertToGammaSpace = this.convertToGammaSpace, e.convertToLinearSpace = this.convertToLinearSpace, this.texture && !this.texture.isRenderTarget && (e.texture = this.texture.serialize()), e; } _deserialize(e, t, i) { super._deserialize(e, t, i), this.convertToGammaSpace = e.convertToGammaSpace, this.convertToLinearSpace = !!e.convertToLinearSpace, e.texture && (i = e.texture.url.indexOf("data:") === 0 ? "" : i, this.texture = De.Parse(e.texture, t, i)); } } Be("BABYLON.ParticleTextureBlock", xK); class bK extends Wi { /** * Create a new ParticleRampGradientBlock * @param name defines the block name */ constructor(e) { super(e, Le.Fragment), this._isUnique = !0, this.registerInput("color", ue.Color4, !1, Le.Fragment), this.registerOutput("rampColor", ue.Color4, Le.Fragment); } /** * Gets the current class name * @returns the class name */ getClassName() { return "ParticleRampGradientBlock"; } /** * Gets the color input component */ get color() { return this._inputs[0]; } /** * Gets the rampColor output component */ get rampColor() { return this._outputs[0]; } /** * Initialize the block and prepare the context for build * @param state defines the state that will be used for the build */ initialize(e) { e._excludeVariableName("remapRanges"), e._excludeVariableName("rampSampler"), e._excludeVariableName("baseColor"), e._excludeVariableName("alpha"), e._excludeVariableName("remappedColorIndex"), e._excludeVariableName("rampColor"), e._excludeVariableName("finalAlpha"); } _buildBlock(e) { if (super._buildBlock(e), e.target !== Le.Vertex) return e._emit2DSampler("rampSampler"), e._emitVaryingFromString("remapRanges", "vec4", "RAMPGRADIENT"), e.compilationString += ` #ifdef RAMPGRADIENT vec4 baseColor = ${this.color.associatedVariableName}; float alpha = ${this.color.associatedVariableName}.a; float remappedColorIndex = clamp((alpha - remapRanges.x) / remapRanges.y, 0.0, 1.0); vec4 rampColor = texture2D(rampSampler, vec2(1.0 - remappedColorIndex, 0.)); baseColor.rgb *= rampColor.rgb; // Remapped alpha float finalAlpha = baseColor.a; baseColor.a = clamp((alpha * rampColor.a - remapRanges.z) / remapRanges.w, 0.0, 1.0); ${this._declareOutput(this.rampColor, e)} = baseColor; #else ${this._declareOutput(this.rampColor, e)} = ${this.color.associatedVariableName}; #endif `, this; } } Be("BABYLON.ParticleRampGradientBlock", bK); class EK extends Wi { /** * Create a new ParticleBlendMultiplyBlock * @param name defines the block name */ constructor(e) { super(e, Le.Fragment), this._isUnique = !0, this.registerInput("color", ue.Color4, !1, Le.Fragment), this.registerInput("alphaTexture", ue.Float, !1, Le.Fragment), this.registerInput("alphaColor", ue.Float, !1, Le.Fragment), this.registerOutput("blendColor", ue.Color4, Le.Fragment); } /** * Gets the current class name * @returns the class name */ getClassName() { return "ParticleBlendMultiplyBlock"; } /** * Gets the color input component */ get color() { return this._inputs[0]; } /** * Gets the alphaTexture input component */ get alphaTexture() { return this._inputs[1]; } /** * Gets the alphaColor input component */ get alphaColor() { return this._inputs[2]; } /** * Gets the blendColor output component */ get blendColor() { return this._outputs[0]; } /** * Initialize the block and prepare the context for build * @param state defines the state that will be used for the build */ initialize(e) { e._excludeVariableName("sourceAlpha"); } _buildBlock(e) { if (super._buildBlock(e), e.target !== Le.Vertex) return e.compilationString += ` #ifdef BLENDMULTIPLYMODE ${this._declareOutput(this.blendColor, e)}; float sourceAlpha = ${this.alphaColor.associatedVariableName} * ${this.alphaTexture.associatedVariableName}; ${this.blendColor.associatedVariableName}.rgb = ${this.color.associatedVariableName}.rgb * sourceAlpha + vec3(1.0) * (1.0 - sourceAlpha); ${this.blendColor.associatedVariableName}.a = ${this.color.associatedVariableName}.a; #else ${this._declareOutput(this.blendColor, e)} = ${this.color.associatedVariableName}; #endif `, this; } } Be("BABYLON.ParticleBlendMultiplyBlock", EK); class pl { constructor() { this._defines = {}, this._currentRank = 32, this._maxRank = -1, this._mesh = null; } /** * Removes the fallback from the bound mesh. */ unBindMesh() { this._mesh = null; } /** * Adds a fallback on the specified property. * @param rank The rank of the fallback (Lower ranks will be fallbacked to first) * @param define The name of the define in the shader */ addFallback(e, t) { this._defines[e] || (e < this._currentRank && (this._currentRank = e), e > this._maxRank && (this._maxRank = e), this._defines[e] = new Array()), this._defines[e].push(t); } /** * Sets the mesh to use CPU skinning when needing to fallback. * @param rank The rank of the fallback (Lower ranks will be fallbacked to first) * @param mesh The mesh to use the fallbacks. */ addCPUSkinningFallback(e, t) { this._mesh = t, e < this._currentRank && (this._currentRank = e), e > this._maxRank && (this._maxRank = e); } /** * Checks to see if more fallbacks are still available. */ get hasMoreFallbacks() { return this._currentRank <= this._maxRank; } /** * Removes the defines that should be removed when falling back. * @param currentDefines defines the current define statements for the shader. * @param effect defines the current effect we try to compile * @returns The resulting defines with defines of the current rank removed. */ reduce(e, t) { if (this._mesh && this._mesh.computeBonesUsingShaders && this._mesh.numBoneInfluencers > 0) { this._mesh.computeBonesUsingShaders = !1, e = e.replace("#define NUM_BONE_INFLUENCERS " + this._mesh.numBoneInfluencers, "#define NUM_BONE_INFLUENCERS 0"), t._bonesComputationForcedToCPU = !0; const i = this._mesh.getScene(); for (let r = 0; r < i.meshes.length; r++) { const s = i.meshes[r]; if (!s.material) { !this._mesh.material && s.computeBonesUsingShaders && s.numBoneInfluencers > 0 && (s.computeBonesUsingShaders = !1); continue; } if (!(!s.computeBonesUsingShaders || s.numBoneInfluencers === 0)) { if (s.material.getEffect() === t) s.computeBonesUsingShaders = !1; else if (s.subMeshes) { for (const n of s.subMeshes) if (n.effect === t) { s.computeBonesUsingShaders = !1; break; } } } } } else { const i = this._defines[this._currentRank]; if (i) for (let r = 0; r < i.length; r++) e = e.replace("#define " + i[r], ""); this._currentRank++; } return e; } } const Bce = "postprocessVertexShader", Uce = `attribute vec2 position;uniform vec2 scale;varying vec2 vUV;const vec2 madd=vec2(0.5,0.5); #define CUSTOM_VERTEX_DEFINITIONS void main(void) { #define CUSTOM_VERTEX_MAIN_BEGIN vUV=(position*madd+madd)*scale;gl_Position=vec4(position,0.0,1.0); #define CUSTOM_VERTEX_MAIN_END }`; je.ShadersStore[Bce] = Uce; class FL { /** * Gets the depth/stencil texture (if created by a createDepthStencilTexture() call) */ get depthStencilTexture() { return this._depthStencilTexture; } /** * Indicates if the depth/stencil texture has a stencil aspect */ get depthStencilTextureWithStencil() { return this._depthStencilTextureWithStencil; } /** * Defines if the render target wrapper is for a cube texture or if false a 2d texture */ get isCube() { return this._isCube; } /** * Defines if the render target wrapper is for a single or multi target render wrapper */ get isMulti() { return this._isMulti; } /** * Defines if the render target wrapper is for a single or an array of textures */ get is2DArray() { return this.layers > 0; } /** * Gets the size of the render target wrapper (used for cubes, as width=height in this case) */ get size() { return this.width; } /** * Gets the width of the render target wrapper */ get width() { return this._size.width || this._size; } /** * Gets the height of the render target wrapper */ get height() { return this._size.height || this._size; } /** * Gets the number of layers of the render target wrapper (only used if is2DArray is true and wrapper is not a multi render target) */ get layers() { return this._size.layers || 0; } /** * Gets the render texture. If this is a multi render target, gets the first texture */ get texture() { var e, t; return (t = (e = this._textures) === null || e === void 0 ? void 0 : e[0]) !== null && t !== void 0 ? t : null; } /** * Gets the list of render textures. If we are not in a multi render target, the list will be null (use the texture getter instead) */ get textures() { return this._textures; } /** * Gets the face indices that correspond to the list of render textures. If we are not in a multi render target, the list will be null */ get faceIndices() { return this._faceIndices; } /** * Gets the layer indices that correspond to the list of render textures. If we are not in a multi render target, the list will be null */ get layerIndices() { return this._layerIndices; } /** * Gets the sample count of the render target */ get samples() { return this._samples; } /** * Sets the sample count of the render target * @param value sample count * @param initializeBuffers If set to true, the engine will make an initializing call to drawBuffers (only used when isMulti=true). * @param force true to force calling the update sample count engine function even if the current sample count is equal to value * @returns the sample count that has been set */ setSamples(e, t = !0, i = !1) { if (this.samples === e && !i) return e; const r = this._isMulti ? this._engine.updateMultipleRenderTargetTextureSampleCount(this, e, t) : this._engine.updateRenderTargetTextureSampleCount(this, e); return this._samples = e, r; } /** * Initializes the render target wrapper * @param isMulti true if the wrapper is a multi render target * @param isCube true if the wrapper should render to a cube texture * @param size size of the render target (width/height/layers) * @param engine engine used to create the render target * @param label defines the label to use for the wrapper (for debugging purpose only) */ constructor(e, t, i, r, s) { this._textures = null, this._faceIndices = null, this._layerIndices = null, this._samples = 1, this._attachments = null, this._generateStencilBuffer = !1, this._generateDepthBuffer = !1, this._depthStencilTextureWithStencil = !1, this._isMulti = e, this._isCube = t, this._size = i, this._engine = r, this._depthStencilTexture = null, this.label = s; } /** * Sets the render target texture(s) * @param textures texture(s) to set */ setTextures(e) { Array.isArray(e) ? this._textures = e : e ? this._textures = [e] : this._textures = null; } /** * Set a texture in the textures array * @param texture The texture to set * @param index The index in the textures array to set * @param disposePrevious If this function should dispose the previous texture */ setTexture(e, t = 0, i = !0) { this._textures || (this._textures = []), this._textures[t] !== e && (this._textures[t] && i && this._textures[t].dispose(), this._textures[t] = e); } /** * Sets the layer and face indices of every render target texture bound to each color attachment * @param layers The layers of each texture to be set * @param faces The faces of each texture to be set */ setLayerAndFaceIndices(e, t) { this._layerIndices = e, this._faceIndices = t; } /** * Sets the layer and face indices of a texture in the textures array that should be bound to each color attachment * @param index The index of the texture in the textures array to modify * @param layer The layer of the texture to be set * @param face The face of the texture to be set */ setLayerAndFaceIndex(e = 0, t, i) { this._layerIndices || (this._layerIndices = []), this._faceIndices || (this._faceIndices = []), t !== void 0 && t >= 0 && (this._layerIndices[e] = t), i !== void 0 && i >= 0 && (this._faceIndices[e] = i); } /** * Creates the depth/stencil texture * @param comparisonFunction Comparison function to use for the texture * @param bilinearFiltering true if bilinear filtering should be used when sampling the texture * @param generateStencil true if the stencil aspect should also be created * @param samples sample count to use when creating the texture * @param format format of the depth texture * @param label defines the label to use for the texture (for debugging purpose only) * @returns the depth/stencil created texture */ createDepthStencilTexture(e = 0, t = !0, i = !1, r = 1, s = 14, n) { var a; return (a = this._depthStencilTexture) === null || a === void 0 || a.dispose(), this._depthStencilTextureWithStencil = i, this._depthStencilTexture = this._engine.createDepthStencilTexture(this._size, { bilinearFiltering: t, comparisonFunction: e, generateStencil: i, isCube: this._isCube, samples: r, depthTextureFormat: s, label: n }, this), this._depthStencilTexture; } /** * Shares the depth buffer of this render target with another render target. * @internal * @param renderTarget Destination renderTarget */ _shareDepth(e) { this._depthStencilTexture && (e._depthStencilTexture && e._depthStencilTexture.dispose(), e._depthStencilTexture = this._depthStencilTexture, this._depthStencilTexture.incrementReferences()); } /** * @internal */ _swapAndDie(e) { this.texture && this.texture._swapAndDie(e), this._textures = null, this.dispose(!0); } _cloneRenderTargetWrapper() { var e, t, i, r, s, n, a, l; let o = null; if (this._isMulti) { const u = this.textures; if (u && u.length > 0) { let h = !1, d = u.length; const f = u[u.length - 1]._source; (f === ts.Depth || f === ts.DepthStencil) && (h = !0, d--); const p = [], m = [], _ = [], v = [], C = [], x = [], b = [], S = {}; for (let w = 0; w < d; ++w) { const V = u[w]; p.push(V.samplingMode), m.push(V.type), _.push(V.format), S[V.uniqueId] !== void 0 ? (v.push(-1), b.push(0)) : (S[V.uniqueId] = w, V.is2DArray ? (v.push(35866), b.push(V.depth)) : V.isCube ? (v.push(34067), b.push(0)) : V.is3D ? (v.push(32879), b.push(V.depth)) : (v.push(3553), b.push(0))), this._faceIndices && C.push((e = this._faceIndices[w]) !== null && e !== void 0 ? e : 0), this._layerIndices && x.push((t = this._layerIndices[w]) !== null && t !== void 0 ? t : 0); } const M = { samplingModes: p, generateMipMaps: u[0].generateMipMaps, generateDepthBuffer: this._generateDepthBuffer, generateStencilBuffer: this._generateStencilBuffer, generateDepthTexture: h, types: m, formats: _, textureCount: d, targetTypes: v, faceIndex: C, layerIndex: x, layerCounts: b }, R = { width: this.width, height: this.height }; o = this._engine.createMultipleRenderTarget(R, M); for (let w = 0; w < d; ++w) { if (v[w] !== -1) continue; const V = S[u[w].uniqueId]; o.setTexture(o.textures[V], w); } } } else { const u = {}; if (u.generateDepthBuffer = this._generateDepthBuffer, u.generateMipMaps = (r = (i = this.texture) === null || i === void 0 ? void 0 : i.generateMipMaps) !== null && r !== void 0 ? r : !1, u.generateStencilBuffer = this._generateStencilBuffer, u.samplingMode = (s = this.texture) === null || s === void 0 ? void 0 : s.samplingMode, u.type = (n = this.texture) === null || n === void 0 ? void 0 : n.type, u.format = (a = this.texture) === null || a === void 0 ? void 0 : a.format, this.isCube) o = this._engine.createRenderTargetCubeTexture(this.width, u); else { const h = { width: this.width, height: this.height, layers: this.is2DArray ? (l = this.texture) === null || l === void 0 ? void 0 : l.depth : void 0 }; o = this._engine.createRenderTargetTexture(h, u); } o.texture.isReady = !0; } return o; } _swapRenderTargetWrapper(e) { if (this._textures && e._textures) for (let t = 0; t < this._textures.length; ++t) this._textures[t]._swapAndDie(e._textures[t], !1), e._textures[t].isReady = !0; this._depthStencilTexture && e._depthStencilTexture && (this._depthStencilTexture._swapAndDie(e._depthStencilTexture), e._depthStencilTexture.isReady = !0), this._textures = null, this._depthStencilTexture = null; } /** @internal */ _rebuild() { const e = this._cloneRenderTargetWrapper(); if (e) { if (this._depthStencilTexture) { const t = this._depthStencilTexture.samplingMode, i = t === 2 || t === 3 || t === 11; e.createDepthStencilTexture(this._depthStencilTexture._comparisonFunction, i, this._depthStencilTextureWithStencil, this._depthStencilTexture.samples); } this.samples > 1 && e.setSamples(this.samples), e._swapRenderTargetWrapper(this), e.dispose(); } } /** * Releases the internal render textures */ releaseTextures() { var e, t; if (this._textures) for (let i = 0; (t = i < ((e = this._textures) === null || e === void 0 ? void 0 : e.length)) !== null && t !== void 0 && t; ++i) this._textures[i].dispose(); this._textures = null; } /** * Disposes the whole render target wrapper * @param disposeOnlyFramebuffers true if only the frame buffers should be released (used for the WebGL engine). If false, all the textures will also be released */ dispose(e = !1) { var t; e || ((t = this._depthStencilTexture) === null || t === void 0 || t.dispose(), this._depthStencilTexture = null, this.releaseTextures()), this._engine._releaseRenderTargetWrapper(this); } } class Vce extends FL { constructor(e, t, i, r, s) { super(e, t, i, r), this._framebuffer = null, this._depthStencilBuffer = null, this._MSAAFramebuffer = null, this._colorTextureArray = null, this._depthStencilTextureArray = null, this._disposeOnlyFramebuffers = !1, this._currentLOD = 0, this._context = s; } _cloneRenderTargetWrapper() { let e = null; return this._colorTextureArray && this._depthStencilTextureArray ? (e = this._engine.createMultiviewRenderTargetTexture(this.width, this.height), e.texture.isReady = !0) : e = super._cloneRenderTargetWrapper(), e; } _swapRenderTargetWrapper(e) { super._swapRenderTargetWrapper(e), e._framebuffer = this._framebuffer, e._depthStencilBuffer = this._depthStencilBuffer, e._MSAAFramebuffer = this._MSAAFramebuffer, e._colorTextureArray = this._colorTextureArray, e._depthStencilTextureArray = this._depthStencilTextureArray, this._framebuffer = this._depthStencilBuffer = this._MSAAFramebuffer = this._colorTextureArray = this._depthStencilTextureArray = null; } /** * Shares the depth buffer of this render target with another render target. * @internal * @param renderTarget Destination renderTarget */ _shareDepth(e) { super._shareDepth(e); const t = this._context, i = this._depthStencilBuffer, r = e._MSAAFramebuffer || e._framebuffer; e._depthStencilBuffer && e._depthStencilBuffer !== i && t.deleteRenderbuffer(e._depthStencilBuffer), e._depthStencilBuffer = i; const s = e._generateStencilBuffer ? t.DEPTH_STENCIL_ATTACHMENT : t.DEPTH_ATTACHMENT; this._engine._bindUnboundFramebuffer(r), t.framebufferRenderbuffer(t.FRAMEBUFFER, s, t.RENDERBUFFER, i), this._engine._bindUnboundFramebuffer(null); } /** * Binds a texture to this render target on a specific attachment * @param texture The texture to bind to the framebuffer * @param attachmentIndex Index of the attachment * @param faceIndexOrLayer The face or layer of the texture to render to in case of cube texture or array texture * @param lodLevel defines the lod level to bind to the frame buffer */ _bindTextureRenderTarget(e, t = 0, i, r = 0) { var s, n, a, l; if (!e._hardwareTexture) return; const o = this._framebuffer, u = this._engine._currentFramebuffer; if (this._engine._bindUnboundFramebuffer(o), this._engine.webGLVersion > 1) { const h = this._context, d = h["COLOR_ATTACHMENT" + t]; e.is2DArray || e.is3D ? (i = (n = i ?? ((s = this.layerIndices) === null || s === void 0 ? void 0 : s[t])) !== null && n !== void 0 ? n : 0, h.framebufferTextureLayer(h.FRAMEBUFFER, d, e._hardwareTexture.underlyingResource, r, i)) : e.isCube ? (i = (l = i ?? ((a = this.faceIndices) === null || a === void 0 ? void 0 : a[t])) !== null && l !== void 0 ? l : 0, h.framebufferTexture2D(h.FRAMEBUFFER, d, h.TEXTURE_CUBE_MAP_POSITIVE_X + i, e._hardwareTexture.underlyingResource, r)) : h.framebufferTexture2D(h.FRAMEBUFFER, d, h.TEXTURE_2D, e._hardwareTexture.underlyingResource, r); } else { const h = this._context, d = h["COLOR_ATTACHMENT" + t + "_WEBGL"], f = i !== void 0 ? h.TEXTURE_CUBE_MAP_POSITIVE_X + i : h.TEXTURE_2D; h.framebufferTexture2D(h.FRAMEBUFFER, d, f, e._hardwareTexture.underlyingResource, r); } this._engine._bindUnboundFramebuffer(u); } /** * Set a texture in the textures array * @param texture the texture to set * @param index the index in the textures array to set * @param disposePrevious If this function should dispose the previous texture */ setTexture(e, t = 0, i = !0) { super.setTexture(e, t, i), this._bindTextureRenderTarget(e, t); } /** * Sets the layer and face indices of every render target texture * @param layers The layer of the texture to be set (make negative to not modify) * @param faces The face of the texture to be set (make negative to not modify) */ setLayerAndFaceIndices(e, t) { var i, r; if (super.setLayerAndFaceIndices(e, t), !this.textures || !this.layerIndices || !this.faceIndices) return; const s = (r = (i = this._attachments) === null || i === void 0 ? void 0 : i.length) !== null && r !== void 0 ? r : this.textures.length; for (let n = 0; n < s; n++) { const a = this.textures[n]; a && (a.is2DArray || a.is3D ? this._bindTextureRenderTarget(a, n, this.layerIndices[n]) : a.isCube ? this._bindTextureRenderTarget(a, n, this.faceIndices[n]) : this._bindTextureRenderTarget(a, n)); } } /** * Set the face and layer indices of a texture in the textures array * @param index The index of the texture in the textures array to modify * @param layer The layer of the texture to be set * @param face The face of the texture to be set */ setLayerAndFaceIndex(e = 0, t, i) { if (super.setLayerAndFaceIndex(e, t, i), !this.textures || !this.layerIndices || !this.faceIndices) return; const r = this.textures[e]; r.is2DArray || r.is3D ? this._bindTextureRenderTarget(this.textures[e], e, this.layerIndices[e]) : r.isCube && this._bindTextureRenderTarget(this.textures[e], e, this.faceIndices[e]); } dispose(e = this._disposeOnlyFramebuffers) { const t = this._context; e || (this._colorTextureArray && (this._context.deleteTexture(this._colorTextureArray), this._colorTextureArray = null), this._depthStencilTextureArray && (this._context.deleteTexture(this._depthStencilTextureArray), this._depthStencilTextureArray = null)), this._framebuffer && (t.deleteFramebuffer(this._framebuffer), this._framebuffer = null), this._depthStencilBuffer && (t.deleteRenderbuffer(this._depthStencilBuffer), this._depthStencilBuffer = null), this._MSAAFramebuffer && (t.deleteFramebuffer(this._MSAAFramebuffer), this._MSAAFramebuffer = null), super.dispose(e); } } mi.prototype._createHardwareRenderTargetWrapper = function(c, e, t) { const i = new Vce(c, e, t, this, this._gl); return this._renderTargetWrapperCache.push(i), i; }; mi.prototype.createRenderTargetTexture = function(c, e) { var t, i; const r = this._createHardwareRenderTargetWrapper(!1, !1, c); let s = !0, n = !1, a = !1, l, o = 1; e !== void 0 && typeof e == "object" && (s = (t = e.generateDepthBuffer) !== null && t !== void 0 ? t : !0, n = !!e.generateStencilBuffer, a = !!e.noColorAttachment, l = e.colorAttachment, o = (i = e.samples) !== null && i !== void 0 ? i : 1); const u = l || (a ? null : this._createInternalTexture(c, e, !0, ts.RenderTarget)), h = c.width || c, d = c.height || c, f = this._currentFramebuffer, p = this._gl, m = p.createFramebuffer(); return this._bindUnboundFramebuffer(m), r._depthStencilBuffer = this._setupFramebufferDepthAttachments(n, s, h, d), u && !u.is2DArray && p.framebufferTexture2D(p.FRAMEBUFFER, p.COLOR_ATTACHMENT0, p.TEXTURE_2D, u._hardwareTexture.underlyingResource, 0), this._bindUnboundFramebuffer(f), r._framebuffer = m, r._generateDepthBuffer = s, r._generateStencilBuffer = n, r.setTextures(u), this.updateRenderTargetTextureSampleCount(r, o), r; }; mi.prototype.createDepthStencilTexture = function(c, e, t) { if (e.isCube) { const i = c.width || c; return this._createDepthStencilCubeTexture(i, e, t); } else return this._createDepthStencilTexture(c, e, t); }; mi.prototype._createDepthStencilTexture = function(c, e, t) { const i = this._gl, r = c.layers || 0, s = r !== 0 ? i.TEXTURE_2D_ARRAY : i.TEXTURE_2D, n = new ln(this, ts.DepthStencil); if (!this._caps.depthTextureExtension) return Ce.Error("Depth texture is not supported by your browser or hardware."), n; const a = Object.assign({ bilinearFiltering: !1, comparisonFunction: 0, generateStencil: !1 }, e); if (this._bindTextureDirectly(s, n, !0), this._setupDepthStencilTexture(n, c, a.generateStencil, a.comparisonFunction === 0 ? !1 : a.bilinearFiltering, a.comparisonFunction, a.samples), a.depthTextureFormat !== void 0) { if (a.depthTextureFormat !== 15 && a.depthTextureFormat !== 16 && a.depthTextureFormat !== 17 && a.depthTextureFormat !== 13 && a.depthTextureFormat !== 14 && a.depthTextureFormat !== 18) return Ce.Error("Depth texture format is not supported."), n; n.format = a.depthTextureFormat; } else n.format = a.generateStencil ? 13 : 16; const l = n.format === 17 || n.format === 13 || n.format === 18; t._depthStencilTexture = n, t._depthStencilTextureWithStencil = l; let o = i.UNSIGNED_INT; n.format === 15 ? o = i.UNSIGNED_SHORT : n.format === 17 || n.format === 13 ? o = i.UNSIGNED_INT_24_8 : n.format === 14 ? o = i.FLOAT : n.format === 18 && (o = i.FLOAT_32_UNSIGNED_INT_24_8_REV); const u = l ? i.DEPTH_STENCIL : i.DEPTH_COMPONENT; let h = u; this.webGLVersion > 1 && (n.format === 15 ? h = i.DEPTH_COMPONENT16 : n.format === 16 ? h = i.DEPTH_COMPONENT24 : n.format === 17 || n.format === 13 ? h = i.DEPTH24_STENCIL8 : n.format === 14 ? h = i.DEPTH_COMPONENT32F : n.format === 18 && (h = i.DEPTH32F_STENCIL8)), n.is2DArray ? i.texImage3D(s, 0, h, n.width, n.height, r, 0, u, o, null) : i.texImage2D(s, 0, h, n.width, n.height, 0, u, o, null), this._bindTextureDirectly(s, null), this._internalTexturesCache.push(n); const d = t; if (d._depthStencilBuffer) { const f = this._currentFramebuffer; this._bindUnboundFramebuffer(d._framebuffer), i.framebufferRenderbuffer(i.FRAMEBUFFER, i.DEPTH_STENCIL_ATTACHMENT, i.RENDERBUFFER, null), i.framebufferRenderbuffer(i.FRAMEBUFFER, i.DEPTH_ATTACHMENT, i.RENDERBUFFER, null), i.framebufferRenderbuffer(i.FRAMEBUFFER, i.STENCIL_ATTACHMENT, i.RENDERBUFFER, null), this._bindUnboundFramebuffer(f), i.deleteRenderbuffer(d._depthStencilBuffer), d._depthStencilBuffer = null; } return n; }; mi.prototype.updateRenderTargetTextureSampleCount = function(c, e) { if (this.webGLVersion < 2 || !c || !c.texture) return 1; if (c.samples === e) return e; const t = this._gl; e = Math.min(e, this.getCaps().maxMSAASamples), c._depthStencilBuffer && (t.deleteRenderbuffer(c._depthStencilBuffer), c._depthStencilBuffer = null), c._MSAAFramebuffer && (t.deleteFramebuffer(c._MSAAFramebuffer), c._MSAAFramebuffer = null); const i = c.texture._hardwareTexture; if (i.releaseMSAARenderBuffers(), e > 1 && typeof t.renderbufferStorageMultisample == "function") { const r = t.createFramebuffer(); if (!r) throw new Error("Unable to create multi sampled framebuffer"); c._MSAAFramebuffer = r, this._bindUnboundFramebuffer(c._MSAAFramebuffer); const s = this._createRenderBuffer(c.texture.width, c.texture.height, e, -1, this._getRGBABufferInternalSizedFormat(c.texture.type, c.texture.format, c.texture._useSRGBBuffer), t.COLOR_ATTACHMENT0, !1); if (!s) throw new Error("Unable to create multi sampled framebuffer"); i.addMSAARenderBuffer(s); } else this._bindUnboundFramebuffer(c._framebuffer); return c.texture.samples = e, c._samples = e, c._depthStencilBuffer = this._setupFramebufferDepthAttachments(c._generateStencilBuffer, c._generateDepthBuffer, c.texture.width, c.texture.height, e), this._bindUnboundFramebuffer(null), e; }; class Bi { /** * Registers a shader code processing with a post process name. * @param postProcessName name of the post process. Use null for the fallback shader code processing. This is the shader code processing that will be used in case no specific shader code processing has been associated to a post process name * @param customShaderCodeProcessing shader code processing to associate to the post process name * @returns */ static RegisterShaderCodeProcessing(e, t) { if (!t) { delete Bi._CustomShaderCodeProcessing[e ?? ""]; return; } Bi._CustomShaderCodeProcessing[e ?? ""] = t; } static _GetShaderCodeProcessing(e) { var t; return (t = Bi._CustomShaderCodeProcessing[e]) !== null && t !== void 0 ? t : Bi._CustomShaderCodeProcessing[""]; } /** * Number of sample textures (default: 1) */ get samples() { return this._samples; } set samples(e) { this._samples = Math.min(e, this._engine.getCaps().maxMSAASamples), this._textures.forEach((t) => { t.setSamples(this._samples); }); } /** * Returns the fragment url or shader name used in the post process. * @returns the fragment url or name in the shader store. */ getEffectName() { return this._fragmentUrl; } /** * A function that is added to the onActivateObservable */ set onActivate(e) { this._onActivateObserver && this.onActivateObservable.remove(this._onActivateObserver), e && (this._onActivateObserver = this.onActivateObservable.add(e)); } /** * A function that is added to the onSizeChangedObservable */ set onSizeChanged(e) { this._onSizeChangedObserver && this.onSizeChangedObservable.remove(this._onSizeChangedObserver), this._onSizeChangedObserver = this.onSizeChangedObservable.add(e); } /** * A function that is added to the onApplyObservable */ set onApply(e) { this._onApplyObserver && this.onApplyObservable.remove(this._onApplyObserver), this._onApplyObserver = this.onApplyObservable.add(e); } /** * A function that is added to the onBeforeRenderObservable */ set onBeforeRender(e) { this._onBeforeRenderObserver && this.onBeforeRenderObservable.remove(this._onBeforeRenderObserver), this._onBeforeRenderObserver = this.onBeforeRenderObservable.add(e); } /** * A function that is added to the onAfterRenderObservable */ set onAfterRender(e) { this._onAfterRenderObserver && this.onAfterRenderObservable.remove(this._onAfterRenderObserver), this._onAfterRenderObserver = this.onAfterRenderObservable.add(e); } /** * The input texture for this post process and the output texture of the previous post process. When added to a pipeline the previous post process will * render it's output into this texture and this texture will be used as textureSampler in the fragment shader of this post process. */ get inputTexture() { return this._textures.data[this._currentRenderTextureInd]; } set inputTexture(e) { this._forcedOutputTexture = e; } /** * Since inputTexture should always be defined, if we previously manually set `inputTexture`, * the only way to unset it is to use this function to restore its internal state */ restoreDefaultInputTexture() { this._forcedOutputTexture && (this._forcedOutputTexture = null, this.markTextureDirty()); } /** * Gets the camera which post process is applied to. * @returns The camera the post process is applied to. */ getCamera() { return this._camera; } /** * Gets the texel size of the postprocess. * See https://en.wikipedia.org/wiki/Texel_(graphics) */ get texelSize() { return this._shareOutputWithPostProcess ? this._shareOutputWithPostProcess.texelSize : (this._forcedOutputTexture && this._texelSize.copyFromFloats(1 / this._forcedOutputTexture.width, 1 / this._forcedOutputTexture.height), this._texelSize); } /** @internal */ constructor(e, t, i, r, s, n, a = 1, l, o, u = null, h = 0, d = "postprocess", f, p = !1, m = 5, _ = Xa.GLSL) { var v, C, x, b, S, M, R, w, V, k, L, B; this._parentContainer = null, this.width = -1, this.height = -1, this.nodeMaterialSource = null, this._outputTexture = null, this.autoClear = !0, this.forceAutoClearInAlphaMode = !1, this.alphaMode = 0, this.animations = [], this.enablePixelPerfectMode = !1, this.forceFullscreenViewport = !0, this.scaleMode = 1, this.alwaysForcePOT = !1, this._samples = 1, this.adaptScaleToCurrentViewport = !1, this._reusable = !1, this._renderId = 0, this.externalTextureSamplerBinding = !1, this._textures = new xc(2), this._textureCache = [], this._currentRenderTextureInd = 0, this._scaleRatio = new at(1, 1), this._texelSize = at.Zero(), this.onActivateObservable = new Fe(), this.onSizeChangedObservable = new Fe(), this.onApplyObservable = new Fe(), this.onBeforeRenderObservable = new Fe(), this.onAfterRenderObservable = new Fe(), this.name = e; let U = 1, K = null; if (i && !Array.isArray(i)) { const ee = i; i = (v = ee.uniforms) !== null && v !== void 0 ? v : null, r = (C = ee.samplers) !== null && C !== void 0 ? C : null, U = (x = ee.size) !== null && x !== void 0 ? x : 1, n = (b = ee.camera) !== null && b !== void 0 ? b : null, a = (S = ee.samplingMode) !== null && S !== void 0 ? S : 1, l = ee.engine, o = ee.reusable, u = (M = ee.defines) !== null && M !== void 0 ? M : null, h = (R = ee.textureType) !== null && R !== void 0 ? R : 0, d = (w = ee.vertexUrl) !== null && w !== void 0 ? w : "postprocess", f = ee.indexParameters, p = (V = ee.blockCompilation) !== null && V !== void 0 ? V : !1, m = (k = ee.textureFormat) !== null && k !== void 0 ? k : 5, _ = (L = ee.shaderLanguage) !== null && L !== void 0 ? L : Xa.GLSL, K = (B = ee.uniformBuffers) !== null && B !== void 0 ? B : null; } else s && (typeof s == "number" ? U = s : U = { width: s.width, height: s.height }); n != null ? (this._camera = n, this._scene = n.getScene(), n.attachPostProcess(this), this._engine = this._scene.getEngine(), this._scene.postProcesses.push(this), this.uniqueId = this._scene.getUniqueId()) : l && (this._engine = l, this._engine.postProcesses.push(this)), this._options = U, this.renderTargetSamplingMode = a || 1, this._reusable = o || !1, this._textureType = h, this._textureFormat = m, this._shaderLanguage = _, this._samplers = r || [], this._samplers.push("textureSampler"), this._fragmentUrl = t, this._vertexUrl = d, this._parameters = i || [], this._parameters.push("scale"), this._uniformBuffers = K || [], this._indexParameters = f, this._drawWrapper = new $o(this._engine), p || this.updateEffect(u); } /** * Gets a string identifying the name of the class * @returns "PostProcess" string */ getClassName() { return "PostProcess"; } /** * Gets the engine which this post process belongs to. * @returns The engine the post process was enabled with. */ getEngine() { return this._engine; } /** * The effect that is created when initializing the post process. * @returns The created effect corresponding the postprocess. */ getEffect() { return this._drawWrapper.effect; } /** * To avoid multiple redundant textures for multiple post process, the output the output texture for this post process can be shared with another. * @param postProcess The post process to share the output with. * @returns This post process. */ shareOutputWith(e) { return this._disposeTextures(), this._shareOutputWithPostProcess = e, this; } /** * Reverses the effect of calling shareOutputWith and returns the post process back to its original state. * This should be called if the post process that shares output with this post process is disabled/disposed. */ useOwnOutput() { this._textures.length == 0 && (this._textures = new xc(2)), this._shareOutputWithPostProcess = null; } /** * Updates the effect with the current post process compile time values and recompiles the shader. * @param defines Define statements that should be added at the beginning of the shader. (default: null) * @param uniforms Set of uniform variables that will be passed to the shader. (default: null) * @param samplers Set of Texture2D variables that will be passed to the shader. (default: null) * @param indexParameters The index parameters to be used for babylons include syntax "#include[0..varyingCount]". (default: undefined) See usage in babylon.blurPostProcess.ts and kernelBlur.vertex.fx * @param onCompiled Called when the shader has been compiled. * @param onError Called if there is an error when compiling a shader. * @param vertexUrl The url of the vertex shader to be used (default: the one given at construction time) * @param fragmentUrl The url of the fragment shader to be used (default: the one given at construction time) */ updateEffect(e = null, t = null, i = null, r, s, n, a, l) { var o, u; const h = Bi._GetShaderCodeProcessing(this.name); if (h != null && h.defineCustomBindings) { const d = (o = t == null ? void 0 : t.slice()) !== null && o !== void 0 ? o : []; d.push(...this._parameters); const f = (u = i == null ? void 0 : i.slice()) !== null && u !== void 0 ? u : []; f.push(...this._samplers), e = h.defineCustomBindings(this.name, e, d, f), t = d, i = f; } this._postProcessDefines = e, this._drawWrapper.effect = this._engine.createEffect({ vertex: a ?? this._vertexUrl, fragment: l ?? this._fragmentUrl }, { attributes: ["position"], uniformsNames: t || this._parameters, uniformBuffersNames: this._uniformBuffers, samplers: i || this._samplers, defines: e !== null ? e : "", fallbacks: null, onCompiled: s ?? null, onError: n ?? null, indexParameters: r || this._indexParameters, processCodeAfterIncludes: h != null && h.processCodeAfterIncludes ? (d, f) => h.processCodeAfterIncludes(this.name, d, f) : null, processFinalCode: h != null && h.processFinalCode ? (d, f) => h.processFinalCode(this.name, d, f) : null, shaderLanguage: this._shaderLanguage }, this._engine); } /** * The post process is reusable if it can be used multiple times within one frame. * @returns If the post process is reusable */ isReusable() { return this._reusable; } /** invalidate frameBuffer to hint the postprocess to create a depth buffer */ markTextureDirty() { this.width = -1; } _createRenderTargetTexture(e, t, i = 0) { for (let s = 0; s < this._textureCache.length; s++) if (this._textureCache[s].texture.width === e.width && this._textureCache[s].texture.height === e.height && this._textureCache[s].postProcessChannel === i && this._textureCache[s].texture._generateDepthBuffer === t.generateDepthBuffer && this._textureCache[s].texture.samples === t.samples) return this._textureCache[s].texture; const r = this._engine.createRenderTargetTexture(e, t); return this._textureCache.push({ texture: r, postProcessChannel: i, lastUsedRenderId: -1 }), r; } _flushTextureCache() { const e = this._renderId; for (let t = this._textureCache.length - 1; t >= 0; t--) if (e - this._textureCache[t].lastUsedRenderId > 100) { let i = !1; for (let r = 0; r < this._textures.length; r++) if (this._textures.data[r] === this._textureCache[t].texture) { i = !0; break; } i || (this._textureCache[t].texture.dispose(), this._textureCache.splice(t, 1)); } } /** * Resizes the post-process texture * @param width Width of the texture * @param height Height of the texture * @param camera The camera this post-process is applied to. Pass null if the post-process is used outside the context of a camera post-process chain (default: null) * @param needMipMaps True if mip maps need to be generated after render (default: false) * @param forceDepthStencil True to force post-process texture creation with stencil depth and buffer (default: false) */ resize(e, t, i = null, r = !1, s = !1) { this._textures.length > 0 && this._textures.reset(), this.width = e, this.height = t; let n = null; if (i) { for (let o = 0; o < i._postProcesses.length; o++) if (i._postProcesses[o] !== null) { n = i._postProcesses[o]; break; } } const a = { width: this.width, height: this.height }, l = { generateMipMaps: r, generateDepthBuffer: s || n === this, generateStencilBuffer: (s || n === this) && this._engine.isStencilEnable, samplingMode: this.renderTargetSamplingMode, type: this._textureType, format: this._textureFormat, samples: this._samples, label: "PostProcessRTT-" + this.name }; this._textures.push(this._createRenderTargetTexture(a, l, 0)), this._reusable && this._textures.push(this._createRenderTargetTexture(a, l, 1)), this._texelSize.copyFromFloats(1 / this.width, 1 / this.height), this.onSizeChangedObservable.notifyObservers(this); } _getTarget() { let e; if (this._shareOutputWithPostProcess) e = this._shareOutputWithPostProcess.inputTexture; else if (this._forcedOutputTexture) e = this._forcedOutputTexture, this.width = this._forcedOutputTexture.width, this.height = this._forcedOutputTexture.height; else { e = this.inputTexture; let t; for (let i = 0; i < this._textureCache.length; i++) if (this._textureCache[i].texture === e) { t = this._textureCache[i]; break; } t && (t.lastUsedRenderId = this._renderId); } return e; } /** * Activates the post process by intializing the textures to be used when executed. Notifies onActivateObservable. * When this post process is used in a pipeline, this is call will bind the input texture of this post process to the output of the previous. * @param camera The camera that will be used in the post process. This camera will be used when calling onActivateObservable. * @param sourceTexture The source texture to be inspected to get the width and height if not specified in the post process constructor. (default: null) * @param forceDepthStencil If true, a depth and stencil buffer will be generated. (default: false) * @returns The render target wrapper that was bound to be written to. */ activate(e, t = null, i) { var r, s; e = e || this._camera; const n = e.getScene(), a = n.getEngine(), l = a.getCaps().maxTextureSize, o = (t ? t.width : this._engine.getRenderWidth(!0)) * this._options | 0, u = (t ? t.height : this._engine.getRenderHeight(!0)) * this._options | 0; let h = this._options.width || o, d = this._options.height || u; const f = this.renderTargetSamplingMode !== 7 && this.renderTargetSamplingMode !== 1 && this.renderTargetSamplingMode !== 2; let p = null; if (!this._shareOutputWithPostProcess && !this._forcedOutputTexture) { if (this.adaptScaleToCurrentViewport) { const m = a.currentViewport; m && (h *= m.width, d *= m.height); } (f || this.alwaysForcePOT) && (this._options.width || (h = a.needPOTTextures ? $e.GetExponentOfTwo(h, l, this.scaleMode) : h), this._options.height || (d = a.needPOTTextures ? $e.GetExponentOfTwo(d, l, this.scaleMode) : d)), (this.width !== h || this.height !== d || !(p = this._getTarget())) && this.resize(h, d, e, f, i), this._textures.forEach((m) => { m.samples !== this.samples && this._engine.updateRenderTargetTextureSampleCount(m, this.samples); }), this._flushTextureCache(), this._renderId++; } return p || (p = this._getTarget()), this.enablePixelPerfectMode ? (this._scaleRatio.copyFromFloats(o / h, u / d), this._engine.bindFramebuffer(p, 0, o, u, this.forceFullscreenViewport)) : (this._scaleRatio.copyFromFloats(1, 1), this._engine.bindFramebuffer(p, 0, void 0, void 0, this.forceFullscreenViewport)), (s = (r = this._engine)._debugInsertMarker) === null || s === void 0 || s.call(r, `post process ${this.name} input`), this.onActivateObservable.notifyObservers(e), this.autoClear && (this.alphaMode === 0 || this.forceAutoClearInAlphaMode) && this._engine.clear(this.clearColor ? this.clearColor : n.clearColor, n._allowPostProcessClearColor, !0, !0), this._reusable && (this._currentRenderTextureInd = (this._currentRenderTextureInd + 1) % 2), p; } /** * If the post process is supported. */ get isSupported() { return this._drawWrapper.effect.isSupported; } /** * The aspect ratio of the output texture. */ get aspectRatio() { return this._shareOutputWithPostProcess ? this._shareOutputWithPostProcess.aspectRatio : this._forcedOutputTexture ? this._forcedOutputTexture.width / this._forcedOutputTexture.height : this.width / this.height; } /** * Get a value indicating if the post-process is ready to be used * @returns true if the post-process is ready (shader is compiled) */ isReady() { var e, t; return (t = (e = this._drawWrapper.effect) === null || e === void 0 ? void 0 : e.isReady()) !== null && t !== void 0 ? t : !1; } /** * Binds all textures and uniforms to the shader, this will be run on every pass. * @returns the effect corresponding to this post process. Null if not compiled or not ready. */ apply() { var e, t, i; if (!(!((e = this._drawWrapper.effect) === null || e === void 0) && e.isReady())) return null; this._engine.enableEffect(this._drawWrapper), this._engine.setState(!1), this._engine.setDepthBuffer(!1), this._engine.setDepthWrite(!1), this._engine.setAlphaMode(this.alphaMode), this.alphaConstants && this.getEngine().setAlphaConstants(this.alphaConstants.r, this.alphaConstants.g, this.alphaConstants.b, this.alphaConstants.a); let r; return this._shareOutputWithPostProcess ? r = this._shareOutputWithPostProcess.inputTexture : this._forcedOutputTexture ? r = this._forcedOutputTexture : r = this.inputTexture, this.externalTextureSamplerBinding || this._drawWrapper.effect._bindTexture("textureSampler", r == null ? void 0 : r.texture), this._drawWrapper.effect.setVector2("scale", this._scaleRatio), this.onApplyObservable.notifyObservers(this._drawWrapper.effect), (i = (t = Bi._GetShaderCodeProcessing(this.name)) === null || t === void 0 ? void 0 : t.bindCustomBindings) === null || i === void 0 || i.call(t, this.name, this._drawWrapper.effect), this._drawWrapper.effect; } _disposeTextures() { if (this._shareOutputWithPostProcess || this._forcedOutputTexture) { this._disposeTextureCache(); return; } this._disposeTextureCache(), this._textures.dispose(); } _disposeTextureCache() { for (let e = this._textureCache.length - 1; e >= 0; e--) this._textureCache[e].texture.dispose(); this._textureCache.length = 0; } /** * Sets the required values to the prepass renderer. * @param prePassRenderer defines the prepass renderer to setup. * @returns true if the pre pass is needed. */ setPrePassRenderer(e) { return this._prePassEffectConfiguration ? (this._prePassEffectConfiguration = e.addEffectConfiguration(this._prePassEffectConfiguration), this._prePassEffectConfiguration.enabled = !0, !0) : !1; } /** * Disposes the post process. * @param camera The camera to dispose the post process on. */ dispose(e) { e = e || this._camera, this._disposeTextures(); let t; if (this._scene && (t = this._scene.postProcesses.indexOf(this), t !== -1 && this._scene.postProcesses.splice(t, 1)), this._parentContainer) { const i = this._parentContainer.postProcesses.indexOf(this); i > -1 && this._parentContainer.postProcesses.splice(i, 1), this._parentContainer = null; } if (t = this._engine.postProcesses.indexOf(this), t !== -1 && this._engine.postProcesses.splice(t, 1), !!e) { if (e.detachPostProcess(this), t = e._postProcesses.indexOf(this), t === 0 && e._postProcesses.length > 0) { const i = this._camera._getFirstPostProcess(); i && i.markTextureDirty(); } this.onActivateObservable.clear(), this.onAfterRenderObservable.clear(), this.onApplyObservable.clear(), this.onBeforeRenderObservable.clear(), this.onSizeChangedObservable.clear(); } } /** * Serializes the post process to a JSON object * @returns the JSON object */ serialize() { const e = St.Serialize(this), t = this.getCamera() || this._scene && this._scene.activeCamera; return e.customType = "BABYLON." + this.getClassName(), e.cameraId = t ? t.id : null, e.reusable = this._reusable, e.textureType = this._textureType, e.fragmentUrl = this._fragmentUrl, e.parameters = this._parameters, e.samplers = this._samplers, e.options = this._options, e.defines = this._postProcessDefines, e.textureFormat = this._textureFormat, e.vertexUrl = this._vertexUrl, e.indexParameters = this._indexParameters, e; } /** * Clones this post process * @returns a new post process similar to this one */ clone() { const e = this.serialize(); e._engine = this._engine, e.cameraId = null; const t = Bi.Parse(e, this._scene, ""); return t ? (t.onActivateObservable = this.onActivateObservable.clone(), t.onSizeChangedObservable = this.onSizeChangedObservable.clone(), t.onApplyObservable = this.onApplyObservable.clone(), t.onBeforeRenderObservable = this.onBeforeRenderObservable.clone(), t.onAfterRenderObservable = this.onAfterRenderObservable.clone(), t._prePassEffectConfiguration = this._prePassEffectConfiguration, t) : null; } /** * Creates a material from parsed material data * @param parsedPostProcess defines parsed post process data * @param scene defines the hosting scene * @param rootUrl defines the root URL to use to load textures * @returns a new post process */ static Parse(e, t, i) { const r = Qo(e.customType); if (!r || !r._Parse) return null; const s = t ? t.getCameraById(e.cameraId) : null; return r._Parse(e, s, t, i); } /** * @internal */ static _Parse(e, t, i, r) { return St.Parse(() => new Bi(e.name, e.fragmentUrl, e.parameters, e.samplers, e.options, t, e.renderTargetSamplingMode, e._engine, e.reusable, e.defines, e.textureType, e.vertexUrl, e.indexParameters, !1, e.textureFormat), e, i, r); } } Bi._CustomShaderCodeProcessing = {}; F([ W() ], Bi.prototype, "uniqueId", void 0); F([ W() ], Bi.prototype, "name", void 0); F([ W() ], Bi.prototype, "width", void 0); F([ W() ], Bi.prototype, "height", void 0); F([ W() ], Bi.prototype, "renderTargetSamplingMode", void 0); F([ dw() ], Bi.prototype, "clearColor", void 0); F([ W() ], Bi.prototype, "autoClear", void 0); F([ W() ], Bi.prototype, "forceAutoClearInAlphaMode", void 0); F([ W() ], Bi.prototype, "alphaMode", void 0); F([ W() ], Bi.prototype, "alphaConstants", void 0); F([ W() ], Bi.prototype, "enablePixelPerfectMode", void 0); F([ W() ], Bi.prototype, "forceFullscreenViewport", void 0); F([ W() ], Bi.prototype, "scaleMode", void 0); F([ W() ], Bi.prototype, "alwaysForcePOT", void 0); F([ W("samples") ], Bi.prototype, "_samples", void 0); F([ W() ], Bi.prototype, "adaptScaleToCurrentViewport", void 0); Be("BABYLON.PostProcess", Bi); class K9 extends Wi { /** * Create a new VectorMergerBlock * @param name defines the block name */ constructor(e) { super(e, Le.Neutral), this.xSwizzle = "x", this.ySwizzle = "y", this.zSwizzle = "z", this.wSwizzle = "w", this.registerInput("xyzw ", ue.Vector4, !0), this.registerInput("xyz ", ue.Vector3, !0), this.registerInput("xy ", ue.Vector2, !0), this.registerInput("zw ", ue.Vector2, !0), this.registerInput("x", ue.Float, !0), this.registerInput("y", ue.Float, !0), this.registerInput("z", ue.Float, !0), this.registerInput("w", ue.Float, !0), this.registerOutput("xyzw", ue.Vector4), this.registerOutput("xyz", ue.Vector3), this.registerOutput("xy", ue.Vector2), this.registerOutput("zw", ue.Vector2); } /** * Gets the current class name * @returns the class name */ getClassName() { return "VectorMergerBlock"; } /** * Gets the xyzw component (input) */ get xyzwIn() { return this._inputs[0]; } /** * Gets the xyz component (input) */ get xyzIn() { return this._inputs[1]; } /** * Gets the xy component (input) */ get xyIn() { return this._inputs[2]; } /** * Gets the zw component (input) */ get zwIn() { return this._inputs[3]; } /** * Gets the x component (input) */ get x() { return this._inputs[4]; } /** * Gets the y component (input) */ get y() { return this._inputs[5]; } /** * Gets the z component (input) */ get z() { return this._inputs[6]; } /** * Gets the w component (input) */ get w() { return this._inputs[7]; } /** * Gets the xyzw component (output) */ get xyzw() { return this._outputs[0]; } /** * Gets the xyz component (output) */ get xyzOut() { return this._outputs[1]; } /** * Gets the xy component (output) */ get xyOut() { return this._outputs[2]; } /** * Gets the zw component (output) */ get zwOut() { return this._outputs[3]; } /** * Gets the xy component (output) * @deprecated Please use xyOut instead. */ get xy() { return this.xyOut; } /** * Gets the xyz component (output) * @deprecated Please use xyzOut instead. */ get xyz() { return this.xyzOut; } _inputRename(e) { return e === "xyzw " ? "xyzwIn" : e === "xyz " ? "xyzIn" : e === "xy " ? "xyIn" : e === "zw " ? "zwIn" : e; } _buildSwizzle(e) { return "." + (this.xSwizzle + this.ySwizzle + this.zSwizzle + this.wSwizzle).substr(0, e); } _buildBlock(e) { super._buildBlock(e); const t = this.x, i = this.y, r = this.z, s = this.w, n = this.xyIn, a = this.zwIn, l = this.xyzIn, o = this.xyzwIn, u = this._outputs[0], h = this._outputs[1], d = this._outputs[2], f = this._outputs[3]; return o.isConnected ? (u.hasEndpoints && (e.compilationString += this._declareOutput(u, e) + ` = ${o.associatedVariableName}${this._buildSwizzle(4)}; `), h.hasEndpoints && (e.compilationString += this._declareOutput(h, e) + ` = ${o.associatedVariableName}${this._buildSwizzle(3)}; `), d.hasEndpoints && (e.compilationString += this._declareOutput(d, e) + ` = ${o.associatedVariableName}${this._buildSwizzle(2)}; `)) : l.isConnected ? (u.hasEndpoints && (e.compilationString += this._declareOutput(u, e) + ` = vec4(${l.associatedVariableName}, ${s.isConnected ? this._writeVariable(s) : "0.0"})${this._buildSwizzle(4)}; `), h.hasEndpoints && (e.compilationString += this._declareOutput(h, e) + ` = ${l.associatedVariableName}${this._buildSwizzle(3)}; `), d.hasEndpoints && (e.compilationString += this._declareOutput(d, e) + ` = ${l.associatedVariableName}${this._buildSwizzle(2)}; `)) : n.isConnected ? (u.hasEndpoints && (a.isConnected ? e.compilationString += this._declareOutput(u, e) + ` = vec4(${n.associatedVariableName}, ${a.associatedVariableName})${this._buildSwizzle(4)}; ` : e.compilationString += this._declareOutput(u, e) + ` = vec4(${n.associatedVariableName}, ${r.isConnected ? this._writeVariable(r) : "0.0"}, ${s.isConnected ? this._writeVariable(s) : "0.0"})${this._buildSwizzle(4)}; `), h.hasEndpoints && (e.compilationString += this._declareOutput(h, e) + ` = vec3(${n.associatedVariableName}, ${r.isConnected ? this._writeVariable(r) : "0.0"})${this._buildSwizzle(3)}; `), d.hasEndpoints && (e.compilationString += this._declareOutput(d, e) + ` = ${n.associatedVariableName}${this._buildSwizzle(2)}; `), f.hasEndpoints && (a.isConnected ? e.compilationString += this._declareOutput(f, e) + ` = ${a.associatedVariableName}${this._buildSwizzle(2)}; ` : e.compilationString += this._declareOutput(f, e) + ` = vec2(${r.isConnected ? this._writeVariable(r) : "0.0"}, ${s.isConnected ? this._writeVariable(s) : "0.0"})${this._buildSwizzle(2)}; `)) : (u.hasEndpoints && (a.isConnected ? e.compilationString += this._declareOutput(u, e) + ` = vec4(${t.isConnected ? this._writeVariable(t) : "0.0"}, ${i.isConnected ? this._writeVariable(i) : "0.0"}, ${a.associatedVariableName})${this._buildSwizzle(4)}; ` : e.compilationString += this._declareOutput(u, e) + ` = vec4(${t.isConnected ? this._writeVariable(t) : "0.0"}, ${i.isConnected ? this._writeVariable(i) : "0.0"}, ${r.isConnected ? this._writeVariable(r) : "0.0"}, ${s.isConnected ? this._writeVariable(s) : "0.0"})${this._buildSwizzle(4)}; `), h.hasEndpoints && (e.compilationString += this._declareOutput(h, e) + ` = vec3(${t.isConnected ? this._writeVariable(t) : "0.0"}, ${i.isConnected ? this._writeVariable(i) : "0.0"}, ${r.isConnected ? this._writeVariable(r) : "0.0"})${this._buildSwizzle(3)}; `), d.hasEndpoints && (e.compilationString += this._declareOutput(d, e) + ` = vec2(${t.isConnected ? this._writeVariable(t) : "0.0"}, ${i.isConnected ? this._writeVariable(i) : "0.0"})${this._buildSwizzle(2)}; `), f.hasEndpoints && (a.isConnected ? e.compilationString += this._declareOutput(f, e) + ` = ${a.associatedVariableName}${this._buildSwizzle(2)}; ` : e.compilationString += this._declareOutput(f, e) + ` = vec2(${r.isConnected ? this._writeVariable(r) : "0.0"}, ${s.isConnected ? this._writeVariable(s) : "0.0"})${this._buildSwizzle(2)}; `)), this; } serialize() { const e = super.serialize(); return e.xSwizzle = this.xSwizzle, e.ySwizzle = this.ySwizzle, e.zSwizzle = this.zSwizzle, e.wSwizzle = this.wSwizzle, e; } _deserialize(e, t, i) { var r, s, n, a; super._deserialize(e, t, i), this.xSwizzle = (r = e.xSwizzle) !== null && r !== void 0 ? r : "x", this.ySwizzle = (s = e.ySwizzle) !== null && s !== void 0 ? s : "y", this.zSwizzle = (n = e.zSwizzle) !== null && n !== void 0 ? n : "z", this.wSwizzle = (a = e.wSwizzle) !== null && a !== void 0 ? a : "w"; } _dumpPropertiesCode() { let e = super._dumpPropertiesCode(); return e += `${this._codeVariableName}.xSwizzle = "${this.xSwizzle}"; `, e += `${this._codeVariableName}.ySwizzle = "${this.ySwizzle}"; `, e += `${this._codeVariableName}.zSwizzle = "${this.zSwizzle}"; `, e += `${this._codeVariableName}.wSwizzle = "${this.wSwizzle}"; `, e; } } Be("BABYLON.VectorMergerBlock", K9); class BL extends Wi { /** * Creates a new RemapBlock * @param name defines the block name */ constructor(e) { super(e, Le.Neutral), this.sourceRange = new at(-1, 1), this.targetRange = new at(0, 1), this.registerInput("input", ue.AutoDetect), this.registerInput("sourceMin", ue.Float, !0), this.registerInput("sourceMax", ue.Float, !0), this.registerInput("targetMin", ue.Float, !0), this.registerInput("targetMax", ue.Float, !0), this.registerOutput("output", ue.BasedOnInput), this._outputs[0]._typeConnectionSource = this._inputs[0]; } /** * Gets the current class name * @returns the class name */ getClassName() { return "RemapBlock"; } /** * Gets the input component */ get input() { return this._inputs[0]; } /** * Gets the source min input component */ get sourceMin() { return this._inputs[1]; } /** * Gets the source max input component */ get sourceMax() { return this._inputs[2]; } /** * Gets the target min input component */ get targetMin() { return this._inputs[3]; } /** * Gets the target max input component */ get targetMax() { return this._inputs[4]; } /** * Gets the output component */ get output() { return this._outputs[0]; } _buildBlock(e) { super._buildBlock(e); const t = this._outputs[0], i = this.sourceMin.isConnected ? this.sourceMin.associatedVariableName : this._writeFloat(this.sourceRange.x), r = this.sourceMax.isConnected ? this.sourceMax.associatedVariableName : this._writeFloat(this.sourceRange.y), s = this.targetMin.isConnected ? this.targetMin.associatedVariableName : this._writeFloat(this.targetRange.x), n = this.targetMax.isConnected ? this.targetMax.associatedVariableName : this._writeFloat(this.targetRange.y); return e.compilationString += this._declareOutput(t, e) + ` = ${s} + (${this._inputs[0].associatedVariableName} - ${i}) * (${n} - ${s}) / (${r} - ${i}); `, this; } _dumpPropertiesCode() { let e = super._dumpPropertiesCode() + `${this._codeVariableName}.sourceRange = new BABYLON.Vector2(${this.sourceRange.x}, ${this.sourceRange.y}); `; return e += `${this._codeVariableName}.targetRange = new BABYLON.Vector2(${this.targetRange.x}, ${this.targetRange.y}); `, e; } serialize() { const e = super.serialize(); return e.sourceRange = this.sourceRange.asArray(), e.targetRange = this.targetRange.asArray(), e; } _deserialize(e, t, i) { super._deserialize(e, t, i), this.sourceRange = at.FromArray(e.sourceRange), this.targetRange = at.FromArray(e.targetRange); } } F([ ir("From", $i.Vector2) ], BL.prototype, "sourceRange", void 0); F([ ir("To", $i.Vector2) ], BL.prototype, "targetRange", void 0); Be("BABYLON.RemapBlock", BL); class oB extends Wi { /** * Creates a new MultiplyBlock * @param name defines the block name */ constructor(e) { super(e, Le.Neutral), this.registerInput("left", ue.AutoDetect), this.registerInput("right", ue.AutoDetect), this.registerOutput("output", ue.BasedOnInput), this._outputs[0]._typeConnectionSource = this._inputs[0], this._linkConnectionTypes(0, 1), this._inputs[0].acceptedConnectionPointTypes.push(ue.Float), this._inputs[1].acceptedConnectionPointTypes.push(ue.Float); } /** * Gets the current class name * @returns the class name */ getClassName() { return "MultiplyBlock"; } /** * Gets the left operand input component */ get left() { return this._inputs[0]; } /** * Gets the right operand input component */ get right() { return this._inputs[1]; } /** * Gets the output component */ get output() { return this._outputs[0]; } _buildBlock(e) { super._buildBlock(e); const t = this._outputs[0]; return e.compilationString += this._declareOutput(t, e) + ` = ${this.left.associatedVariableName} * ${this.right.associatedVariableName}; `, this; } } Be("BABYLON.MultiplyBlock", oB); var Ip; (function(c) { c[c.Material = 0] = "Material", c[c.PostProcess = 1] = "PostProcess", c[c.Particle = 2] = "Particle", c[c.ProceduralTexture = 3] = "ProceduralTexture"; })(Ip || (Ip = {})); class o5 { /** * Creates a new instance BoxParticleEmitter */ constructor() { this.direction1 = new D(0, 1, 0), this.direction2 = new D(0, 1, 0), this.minEmitBox = new D(-0.5, -0.5, -0.5), this.maxEmitBox = new D(0.5, 0.5, 0.5); } /** * Called by the particle System when the direction is computed for the created particle. * @param worldMatrix is the world matrix of the particle system * @param directionToUpdate is the direction vector to update with the result * @param particle is the particle we are computed the direction for * @param isLocal defines if the direction should be set in local space */ startDirectionFunction(e, t, i, r) { const s = yt.RandomRange(this.direction1.x, this.direction2.x), n = yt.RandomRange(this.direction1.y, this.direction2.y), a = yt.RandomRange(this.direction1.z, this.direction2.z); if (r) { t.x = s, t.y = n, t.z = a; return; } D.TransformNormalFromFloatsToRef(s, n, a, e, t); } /** * Called by the particle System when the position is computed for the created particle. * @param worldMatrix is the world matrix of the particle system * @param positionToUpdate is the position vector to update with the result * @param particle is the particle we are computed the position for * @param isLocal defines if the position should be set in local space */ startPositionFunction(e, t, i, r) { const s = yt.RandomRange(this.minEmitBox.x, this.maxEmitBox.x), n = yt.RandomRange(this.minEmitBox.y, this.maxEmitBox.y), a = yt.RandomRange(this.minEmitBox.z, this.maxEmitBox.z); if (r) { t.x = s, t.y = n, t.z = a; return; } D.TransformCoordinatesFromFloatsToRef(s, n, a, e, t); } /** * Clones the current emitter and returns a copy of it * @returns the new emitter */ clone() { const e = new o5(); return id.DeepCopy(this, e), e; } /** * Called by the GPUParticleSystem to setup the update shader * @param uboOrEffect defines the update shader */ applyToShader(e) { e.setVector3("direction1", this.direction1), e.setVector3("direction2", this.direction2), e.setVector3("minEmitBox", this.minEmitBox), e.setVector3("maxEmitBox", this.maxEmitBox); } /** * Creates the structure of the ubo for this particle emitter * @param ubo ubo to create the structure for */ buildUniformLayout(e) { e.addUniform("direction1", 3), e.addUniform("direction2", 3), e.addUniform("minEmitBox", 3), e.addUniform("maxEmitBox", 3); } /** * Returns a string to use to update the GPU particles update shader * @returns a string containing the defines string */ getEffectDefines() { return "#define BOXEMITTER"; } /** * Returns the string "BoxParticleEmitter" * @returns a string containing the class name */ getClassName() { return "BoxParticleEmitter"; } /** * Serializes the particle system to a JSON object. * @returns the JSON object */ serialize() { const e = {}; return e.type = this.getClassName(), e.direction1 = this.direction1.asArray(), e.direction2 = this.direction2.asArray(), e.minEmitBox = this.minEmitBox.asArray(), e.maxEmitBox = this.maxEmitBox.asArray(), e; } /** * Parse properties from a JSON object * @param serializationObject defines the JSON object */ parse(e) { D.FromArrayToRef(e.direction1, 0, this.direction1), D.FromArrayToRef(e.direction2, 0, this.direction2), D.FromArrayToRef(e.minEmitBox, 0, this.minEmitBox), D.FromArrayToRef(e.maxEmitBox, 0, this.maxEmitBox); } } class UL { /** * Gets or sets the radius of the emission cone */ get radius() { return this._radius; } set radius(e) { this._radius = e, this._buildHeight(); } /** * Gets or sets the angle of the emission cone */ get angle() { return this._angle; } set angle(e) { this._angle = e, this._buildHeight(); } _buildHeight() { this._angle !== 0 ? this._height = this._radius / Math.tan(this._angle / 2) : this._height = 1; } /** * Creates a new instance ConeParticleEmitter * @param radius the radius of the emission cone (1 by default) * @param angle the cone base angle (PI by default) * @param directionRandomizer defines how much to randomize the particle direction [0-1] (default is 0) */ constructor(e = 1, t = Math.PI, i = 0) { this.directionRandomizer = i, this.radiusRange = 1, this.heightRange = 1, this.emitFromSpawnPointOnly = !1, this.angle = t, this.radius = e; } /** * Called by the particle System when the direction is computed for the created particle. * @param worldMatrix is the world matrix of the particle system * @param directionToUpdate is the direction vector to update with the result * @param particle is the particle we are computed the direction for * @param isLocal defines if the direction should be set in local space */ startDirectionFunction(e, t, i, r) { r ? de.Vector3[0].copyFrom(i._localPosition).normalize() : i.position.subtractToRef(e.getTranslation(), de.Vector3[0]).normalize(); const s = yt.RandomRange(0, this.directionRandomizer), n = yt.RandomRange(0, this.directionRandomizer), a = yt.RandomRange(0, this.directionRandomizer); t.x = de.Vector3[0].x + s, t.y = de.Vector3[0].y + n, t.z = de.Vector3[0].z + a, t.normalize(); } /** * Called by the particle System when the position is computed for the created particle. * @param worldMatrix is the world matrix of the particle system * @param positionToUpdate is the position vector to update with the result * @param particle is the particle we are computed the position for * @param isLocal defines if the position should be set in local space */ startPositionFunction(e, t, i, r) { const s = yt.RandomRange(0, Math.PI * 2); let n; this.emitFromSpawnPointOnly ? n = 1e-4 : (n = yt.RandomRange(0, this.heightRange), n = 1 - n * n); let a = this._radius - yt.RandomRange(0, this._radius * this.radiusRange); a = a * n; const l = a * Math.sin(s), o = a * Math.cos(s), u = n * this._height; if (r) { t.x = l, t.y = u, t.z = o; return; } D.TransformCoordinatesFromFloatsToRef(l, u, o, e, t); } /** * Clones the current emitter and returns a copy of it * @returns the new emitter */ clone() { const e = new UL(this._radius, this._angle, this.directionRandomizer); return id.DeepCopy(this, e), e; } /** * Called by the GPUParticleSystem to setup the update shader * @param uboOrEffect defines the update shader */ applyToShader(e) { e.setFloat2("radius", this._radius, this.radiusRange), e.setFloat("coneAngle", this._angle), e.setFloat2("height", this._height, this.heightRange), e.setFloat("directionRandomizer", this.directionRandomizer); } /** * Creates the structure of the ubo for this particle emitter * @param ubo ubo to create the structure for */ buildUniformLayout(e) { e.addUniform("radius", 2), e.addUniform("coneAngle", 1), e.addUniform("height", 2), e.addUniform("directionRandomizer", 1); } /** * Returns a string to use to update the GPU particles update shader * @returns a string containing the defines string */ getEffectDefines() { let e = "#define CONEEMITTER"; return this.emitFromSpawnPointOnly && (e += ` #define CONEEMITTERSPAWNPOINT`), e; } /** * Returns the string "ConeParticleEmitter" * @returns a string containing the class name */ getClassName() { return "ConeParticleEmitter"; } /** * Serializes the particle system to a JSON object. * @returns the JSON object */ serialize() { const e = {}; return e.type = this.getClassName(), e.radius = this._radius, e.angle = this._angle, e.directionRandomizer = this.directionRandomizer, e.radiusRange = this.radiusRange, e.heightRange = this.heightRange, e.emitFromSpawnPointOnly = this.emitFromSpawnPointOnly, e; } /** * Parse properties from a JSON object * @param serializationObject defines the JSON object */ parse(e) { this.radius = e.radius, this.angle = e.angle, this.directionRandomizer = e.directionRandomizer, this.radiusRange = e.radiusRange !== void 0 ? e.radiusRange : 1, this.heightRange = e.radiusRange !== void 0 ? e.heightRange : 1, this.emitFromSpawnPointOnly = e.emitFromSpawnPointOnly !== void 0 ? e.emitFromSpawnPointOnly : !1; } } class mw { /** * Creates a new instance CylinderParticleEmitter * @param radius the radius of the emission cylinder (1 by default) * @param height the height of the emission cylinder (1 by default) * @param radiusRange the range of the emission cylinder [0-1] 0 Surface only, 1 Entire Radius (1 by default) * @param directionRandomizer defines how much to randomize the particle direction [0-1] */ constructor(e = 1, t = 1, i = 1, r = 0) { this.radius = e, this.height = t, this.radiusRange = i, this.directionRandomizer = r, this._tempVector = D.Zero(); } /** * Called by the particle System when the direction is computed for the created particle. * @param worldMatrix is the world matrix of the particle system * @param directionToUpdate is the direction vector to update with the result * @param particle is the particle we are computed the direction for * @param isLocal defines if the direction should be set in local space * @param inverseWorldMatrix defines the inverted world matrix to use if isLocal is false */ startDirectionFunction(e, t, i, r, s) { i.position.subtractToRef(e.getTranslation(), this._tempVector), this._tempVector.normalize(), D.TransformNormalToRef(this._tempVector, s, this._tempVector); const n = yt.RandomRange(-this.directionRandomizer / 2, this.directionRandomizer / 2); let a = Math.atan2(this._tempVector.x, this._tempVector.z); if (a += yt.RandomRange(-Math.PI / 2, Math.PI / 2) * this.directionRandomizer, this._tempVector.y = n, this._tempVector.x = Math.sin(a), this._tempVector.z = Math.cos(a), this._tempVector.normalize(), r) { t.copyFrom(this._tempVector); return; } D.TransformNormalFromFloatsToRef(this._tempVector.x, this._tempVector.y, this._tempVector.z, e, t); } /** * Called by the particle System when the position is computed for the created particle. * @param worldMatrix is the world matrix of the particle system * @param positionToUpdate is the position vector to update with the result * @param particle is the particle we are computed the position for * @param isLocal defines if the position should be set in local space */ startPositionFunction(e, t, i, r) { const s = yt.RandomRange(-this.height / 2, this.height / 2), n = yt.RandomRange(0, 2 * Math.PI), a = yt.RandomRange((1 - this.radiusRange) * (1 - this.radiusRange), 1), l = Math.sqrt(a) * this.radius, o = l * Math.cos(n), u = l * Math.sin(n); if (r) { t.copyFromFloats(o, s, u); return; } D.TransformCoordinatesFromFloatsToRef(o, s, u, e, t); } /** * Clones the current emitter and returns a copy of it * @returns the new emitter */ clone() { const e = new mw(this.radius, this.directionRandomizer); return id.DeepCopy(this, e), e; } /** * Called by the GPUParticleSystem to setup the update shader * @param uboOrEffect defines the update shader */ applyToShader(e) { e.setFloat("radius", this.radius), e.setFloat("height", this.height), e.setFloat("radiusRange", this.radiusRange), e.setFloat("directionRandomizer", this.directionRandomizer); } /** * Creates the structure of the ubo for this particle emitter * @param ubo ubo to create the structure for */ buildUniformLayout(e) { e.addUniform("radius", 1), e.addUniform("height", 1), e.addUniform("radiusRange", 1), e.addUniform("directionRandomizer", 1); } /** * Returns a string to use to update the GPU particles update shader * @returns a string containing the defines string */ getEffectDefines() { return "#define CYLINDEREMITTER"; } /** * Returns the string "CylinderParticleEmitter" * @returns a string containing the class name */ getClassName() { return "CylinderParticleEmitter"; } /** * Serializes the particle system to a JSON object. * @returns the JSON object */ serialize() { const e = {}; return e.type = this.getClassName(), e.radius = this.radius, e.height = this.height, e.radiusRange = this.radiusRange, e.directionRandomizer = this.directionRandomizer, e; } /** * Parse properties from a JSON object * @param serializationObject defines the JSON object */ parse(e) { this.radius = e.radius, this.height = e.height, this.radiusRange = e.radiusRange, this.directionRandomizer = e.directionRandomizer; } } class VL extends mw { /** * Creates a new instance CylinderDirectedParticleEmitter * @param radius the radius of the emission cylinder (1 by default) * @param height the height of the emission cylinder (1 by default) * @param radiusRange the range of the emission cylinder [0-1] 0 Surface only, 1 Entire Radius (1 by default) * @param direction1 the min limit of the emission direction (up vector by default) * @param direction2 the max limit of the emission direction (up vector by default) */ constructor(e = 1, t = 1, i = 1, r = new D(0, 1, 0), s = new D(0, 1, 0)) { super(e, t, i), this.direction1 = r, this.direction2 = s; } /** * Called by the particle System when the direction is computed for the created particle. * @param worldMatrix is the world matrix of the particle system * @param directionToUpdate is the direction vector to update with the result */ startDirectionFunction(e, t) { const i = yt.RandomRange(this.direction1.x, this.direction2.x), r = yt.RandomRange(this.direction1.y, this.direction2.y), s = yt.RandomRange(this.direction1.z, this.direction2.z); D.TransformNormalFromFloatsToRef(i, r, s, e, t); } /** * Clones the current emitter and returns a copy of it * @returns the new emitter */ clone() { const e = new VL(this.radius, this.height, this.radiusRange, this.direction1, this.direction2); return id.DeepCopy(this, e), e; } /** * Called by the GPUParticleSystem to setup the update shader * @param uboOrEffect defines the update shader */ applyToShader(e) { e.setFloat("radius", this.radius), e.setFloat("height", this.height), e.setFloat("radiusRange", this.radiusRange), e.setVector3("direction1", this.direction1), e.setVector3("direction2", this.direction2); } /** * Creates the structure of the ubo for this particle emitter * @param ubo ubo to create the structure for */ buildUniformLayout(e) { e.addUniform("radius", 1), e.addUniform("height", 1), e.addUniform("radiusRange", 1), e.addUniform("direction1", 3), e.addUniform("direction2", 3); } /** * Returns a string to use to update the GPU particles update shader * @returns a string containing the defines string */ getEffectDefines() { return `#define CYLINDEREMITTER #define DIRECTEDCYLINDEREMITTER`; } /** * Returns the string "CylinderDirectedParticleEmitter" * @returns a string containing the class name */ getClassName() { return "CylinderDirectedParticleEmitter"; } /** * Serializes the particle system to a JSON object. * @returns the JSON object */ serialize() { const e = super.serialize(); return e.direction1 = this.direction1.asArray(), e.direction2 = this.direction2.asArray(), e; } /** * Parse properties from a JSON object * @param serializationObject defines the JSON object */ parse(e) { super.parse(e), this.direction1.copyFrom(e.direction1), this.direction2.copyFrom(e.direction2); } } class kL { /** * Creates a new instance HemisphericParticleEmitter * @param radius the radius of the emission hemisphere (1 by default) * @param radiusRange the range of the emission hemisphere [0-1] 0 Surface only, 1 Entire Radius (1 by default) * @param directionRandomizer defines how much to randomize the particle direction [0-1] */ constructor(e = 1, t = 1, i = 0) { this.radius = e, this.radiusRange = t, this.directionRandomizer = i; } /** * Called by the particle System when the direction is computed for the created particle. * @param worldMatrix is the world matrix of the particle system * @param directionToUpdate is the direction vector to update with the result * @param particle is the particle we are computed the direction for * @param isLocal defines if the direction should be set in local space */ startDirectionFunction(e, t, i, r) { const s = i.position.subtract(e.getTranslation()).normalize(), n = yt.RandomRange(0, this.directionRandomizer), a = yt.RandomRange(0, this.directionRandomizer), l = yt.RandomRange(0, this.directionRandomizer); if (s.x += n, s.y += a, s.z += l, s.normalize(), r) { t.copyFrom(s); return; } D.TransformNormalFromFloatsToRef(s.x, s.y, s.z, e, t); } /** * Called by the particle System when the position is computed for the created particle. * @param worldMatrix is the world matrix of the particle system * @param positionToUpdate is the position vector to update with the result * @param particle is the particle we are computed the position for * @param isLocal defines if the position should be set in local space */ startPositionFunction(e, t, i, r) { const s = this.radius - yt.RandomRange(0, this.radius * this.radiusRange), n = yt.RandomRange(0, 1), a = yt.RandomRange(0, 2 * Math.PI), l = Math.acos(2 * n - 1), o = s * Math.cos(a) * Math.sin(l), u = s * Math.cos(l), h = s * Math.sin(a) * Math.sin(l); if (r) { t.copyFromFloats(o, Math.abs(u), h); return; } D.TransformCoordinatesFromFloatsToRef(o, Math.abs(u), h, e, t); } /** * Clones the current emitter and returns a copy of it * @returns the new emitter */ clone() { const e = new kL(this.radius, this.directionRandomizer); return id.DeepCopy(this, e), e; } /** * Called by the GPUParticleSystem to setup the update shader * @param uboOrEffect defines the update shader */ applyToShader(e) { e.setFloat("radius", this.radius), e.setFloat("radiusRange", this.radiusRange), e.setFloat("directionRandomizer", this.directionRandomizer); } /** * Creates the structure of the ubo for this particle emitter * @param ubo ubo to create the structure for */ buildUniformLayout(e) { e.addUniform("radius", 1), e.addUniform("radiusRange", 1), e.addUniform("directionRandomizer", 1); } /** * Returns a string to use to update the GPU particles update shader * @returns a string containing the defines string */ getEffectDefines() { return "#define HEMISPHERICEMITTER"; } /** * Returns the string "HemisphericParticleEmitter" * @returns a string containing the class name */ getClassName() { return "HemisphericParticleEmitter"; } /** * Serializes the particle system to a JSON object. * @returns the JSON object */ serialize() { const e = {}; return e.type = this.getClassName(), e.radius = this.radius, e.radiusRange = this.radiusRange, e.directionRandomizer = this.directionRandomizer, e; } /** * Parse properties from a JSON object * @param serializationObject defines the JSON object */ parse(e) { this.radius = e.radius, this.radiusRange = e.radiusRange, this.directionRandomizer = e.directionRandomizer; } } class zL { /** * Creates a new instance PointParticleEmitter */ constructor() { this.direction1 = new D(0, 1, 0), this.direction2 = new D(0, 1, 0); } /** * Called by the particle System when the direction is computed for the created particle. * @param worldMatrix is the world matrix of the particle system * @param directionToUpdate is the direction vector to update with the result * @param particle is the particle we are computed the direction for * @param isLocal defines if the direction should be set in local space */ startDirectionFunction(e, t, i, r) { const s = yt.RandomRange(this.direction1.x, this.direction2.x), n = yt.RandomRange(this.direction1.y, this.direction2.y), a = yt.RandomRange(this.direction1.z, this.direction2.z); if (r) { t.copyFromFloats(s, n, a); return; } D.TransformNormalFromFloatsToRef(s, n, a, e, t); } /** * Called by the particle System when the position is computed for the created particle. * @param worldMatrix is the world matrix of the particle system * @param positionToUpdate is the position vector to update with the result * @param particle is the particle we are computed the position for * @param isLocal defines if the position should be set in local space */ startPositionFunction(e, t, i, r) { if (r) { t.copyFromFloats(0, 0, 0); return; } D.TransformCoordinatesFromFloatsToRef(0, 0, 0, e, t); } /** * Clones the current emitter and returns a copy of it * @returns the new emitter */ clone() { const e = new zL(); return id.DeepCopy(this, e), e; } /** * Called by the GPUParticleSystem to setup the update shader * @param uboOrEffect defines the update shader */ applyToShader(e) { e.setVector3("direction1", this.direction1), e.setVector3("direction2", this.direction2); } /** * Creates the structure of the ubo for this particle emitter * @param ubo ubo to create the structure for */ buildUniformLayout(e) { e.addUniform("direction1", 3), e.addUniform("direction2", 3); } /** * Returns a string to use to update the GPU particles update shader * @returns a string containing the defines string */ getEffectDefines() { return "#define POINTEMITTER"; } /** * Returns the string "PointParticleEmitter" * @returns a string containing the class name */ getClassName() { return "PointParticleEmitter"; } /** * Serializes the particle system to a JSON object. * @returns the JSON object */ serialize() { const e = {}; return e.type = this.getClassName(), e.direction1 = this.direction1.asArray(), e.direction2 = this.direction2.asArray(), e; } /** * Parse properties from a JSON object * @param serializationObject defines the JSON object */ parse(e) { D.FromArrayToRef(e.direction1, 0, this.direction1), D.FromArrayToRef(e.direction2, 0, this.direction2); } } class gw { /** * Creates a new instance SphereParticleEmitter * @param radius the radius of the emission sphere (1 by default) * @param radiusRange the range of the emission sphere [0-1] 0 Surface only, 1 Entire Radius (1 by default) * @param directionRandomizer defines how much to randomize the particle direction [0-1] */ constructor(e = 1, t = 1, i = 0) { this.radius = e, this.radiusRange = t, this.directionRandomizer = i; } /** * Called by the particle System when the direction is computed for the created particle. * @param worldMatrix is the world matrix of the particle system * @param directionToUpdate is the direction vector to update with the result * @param particle is the particle we are computed the direction for * @param isLocal defines if the direction should be set in local space */ startDirectionFunction(e, t, i, r) { const s = i.position.subtract(e.getTranslation()).normalize(), n = yt.RandomRange(0, this.directionRandomizer), a = yt.RandomRange(0, this.directionRandomizer), l = yt.RandomRange(0, this.directionRandomizer); if (s.x += n, s.y += a, s.z += l, s.normalize(), r) { t.copyFrom(s); return; } D.TransformNormalFromFloatsToRef(s.x, s.y, s.z, e, t); } /** * Called by the particle System when the position is computed for the created particle. * @param worldMatrix is the world matrix of the particle system * @param positionToUpdate is the position vector to update with the result * @param particle is the particle we are computed the position for * @param isLocal defines if the position should be set in local space */ startPositionFunction(e, t, i, r) { const s = this.radius - yt.RandomRange(0, this.radius * this.radiusRange), n = yt.RandomRange(0, 1), a = yt.RandomRange(0, 2 * Math.PI), l = Math.acos(2 * n - 1), o = s * Math.cos(a) * Math.sin(l), u = s * Math.cos(l), h = s * Math.sin(a) * Math.sin(l); if (r) { t.copyFromFloats(o, u, h); return; } D.TransformCoordinatesFromFloatsToRef(o, u, h, e, t); } /** * Clones the current emitter and returns a copy of it * @returns the new emitter */ clone() { const e = new gw(this.radius, this.directionRandomizer); return id.DeepCopy(this, e), e; } /** * Called by the GPUParticleSystem to setup the update shader * @param uboOrEffect defines the update shader */ applyToShader(e) { e.setFloat("radius", this.radius), e.setFloat("radiusRange", this.radiusRange), e.setFloat("directionRandomizer", this.directionRandomizer); } /** * Creates the structure of the ubo for this particle emitter * @param ubo ubo to create the structure for */ buildUniformLayout(e) { e.addUniform("radius", 1), e.addUniform("radiusRange", 1), e.addUniform("directionRandomizer", 1); } /** * Returns a string to use to update the GPU particles update shader * @returns a string containing the defines string */ getEffectDefines() { return "#define SPHEREEMITTER"; } /** * Returns the string "SphereParticleEmitter" * @returns a string containing the class name */ getClassName() { return "SphereParticleEmitter"; } /** * Serializes the particle system to a JSON object. * @returns the JSON object */ serialize() { const e = {}; return e.type = this.getClassName(), e.radius = this.radius, e.radiusRange = this.radiusRange, e.directionRandomizer = this.directionRandomizer, e; } /** * Parse properties from a JSON object * @param serializationObject defines the JSON object */ parse(e) { this.radius = e.radius, this.radiusRange = e.radiusRange, this.directionRandomizer = e.directionRandomizer; } } class HL extends gw { /** * Creates a new instance SphereDirectedParticleEmitter * @param radius the radius of the emission sphere (1 by default) * @param direction1 the min limit of the emission direction (up vector by default) * @param direction2 the max limit of the emission direction (up vector by default) */ constructor(e = 1, t = new D(0, 1, 0), i = new D(0, 1, 0)) { super(e), this.direction1 = t, this.direction2 = i; } /** * Called by the particle System when the direction is computed for the created particle. * @param worldMatrix is the world matrix of the particle system * @param directionToUpdate is the direction vector to update with the result */ startDirectionFunction(e, t) { const i = yt.RandomRange(this.direction1.x, this.direction2.x), r = yt.RandomRange(this.direction1.y, this.direction2.y), s = yt.RandomRange(this.direction1.z, this.direction2.z); D.TransformNormalFromFloatsToRef(i, r, s, e, t); } /** * Clones the current emitter and returns a copy of it * @returns the new emitter */ clone() { const e = new HL(this.radius, this.direction1, this.direction2); return id.DeepCopy(this, e), e; } /** * Called by the GPUParticleSystem to setup the update shader * @param uboOrEffect defines the update shader */ applyToShader(e) { e.setFloat("radius", this.radius), e.setFloat("radiusRange", this.radiusRange), e.setVector3("direction1", this.direction1), e.setVector3("direction2", this.direction2); } /** * Creates the structure of the ubo for this particle emitter * @param ubo ubo to create the structure for */ buildUniformLayout(e) { e.addUniform("radius", 1), e.addUniform("radiusRange", 1), e.addUniform("direction1", 3), e.addUniform("direction2", 3); } /** * Returns a string to use to update the GPU particles update shader * @returns a string containing the defines string */ getEffectDefines() { return `#define SPHEREEMITTER #define DIRECTEDSPHEREEMITTER`; } /** * Returns the string "SphereDirectedParticleEmitter" * @returns a string containing the class name */ getClassName() { return "SphereDirectedParticleEmitter"; } /** * Serializes the particle system to a JSON object. * @returns the JSON object */ serialize() { const e = super.serialize(); return e.direction1 = this.direction1.asArray(), e.direction2 = this.direction2.asArray(), e; } /** * Parse properties from a JSON object * @param serializationObject defines the JSON object */ parse(e) { super.parse(e), this.direction1.copyFrom(e.direction1), this.direction2.copyFrom(e.direction2); } } class l5 { /** * Creates a new instance CustomParticleEmitter */ constructor() { this.particlePositionGenerator = () => { }, this.particleDestinationGenerator = () => { }; } /** * Called by the particle System when the direction is computed for the created particle. * @param worldMatrix is the world matrix of the particle system * @param directionToUpdate is the direction vector to update with the result * @param particle is the particle we are computed the direction for * @param isLocal defines if the direction should be set in local space */ startDirectionFunction(e, t, i, r) { const s = de.Vector3[0]; if (this.particleDestinationGenerator) { this.particleDestinationGenerator(-1, i, s); const n = de.Vector3[1]; s.subtractToRef(i.position, n), n.scaleToRef(1 / i.lifeTime, s); } else s.set(0, 0, 0); if (r) { t.copyFrom(s); return; } D.TransformNormalToRef(s, e, t); } /** * Called by the particle System when the position is computed for the created particle. * @param worldMatrix is the world matrix of the particle system * @param positionToUpdate is the position vector to update with the result * @param particle is the particle we are computed the position for * @param isLocal defines if the position should be set in local space */ startPositionFunction(e, t, i, r) { const s = de.Vector3[0]; if (this.particlePositionGenerator ? this.particlePositionGenerator(-1, i, s) : s.set(0, 0, 0), r) { t.copyFrom(s); return; } D.TransformCoordinatesToRef(s, e, t); } /** * Clones the current emitter and returns a copy of it * @returns the new emitter */ clone() { const e = new l5(); return id.DeepCopy(this, e), e; } /** * Called by the GPUParticleSystem to setup the update shader * @param uboOrEffect defines the update shader */ // eslint-disable-next-line @typescript-eslint/no-unused-vars applyToShader(e) { } /** * Creates the structure of the ubo for this particle emitter * @param ubo ubo to create the structure for */ // eslint-disable-next-line @typescript-eslint/no-unused-vars buildUniformLayout(e) { } /** * Returns a string to use to update the GPU particles update shader * @returns a string containing the defines string */ getEffectDefines() { return "#define CUSTOMEMITTER"; } /** * Returns the string "PointParticleEmitter" * @returns a string containing the class name */ getClassName() { return "CustomParticleEmitter"; } /** * Serializes the particle system to a JSON object. * @returns the JSON object */ serialize() { const e = {}; return e.type = this.getClassName(), e; } /** * Parse properties from a JSON object * @param serializationObject defines the JSON object */ // eslint-disable-next-line @typescript-eslint/no-unused-vars parse(e) { } } class $B { /** Defines the mesh to use as source */ get mesh() { return this._mesh; } set mesh(e) { this._mesh !== e && (this._mesh = e, e ? (this._indices = e.getIndices(), this._positions = e.getVerticesData(Y.PositionKind), this._normals = e.getVerticesData(Y.NormalKind)) : (this._indices = null, this._positions = null, this._normals = null)); } /** * Creates a new instance MeshParticleEmitter * @param mesh defines the mesh to use as source */ constructor(e = null) { this._indices = null, this._positions = null, this._normals = null, this._storedNormal = D.Zero(), this._mesh = null, this.direction1 = new D(0, 1, 0), this.direction2 = new D(0, 1, 0), this.useMeshNormalsForDirection = !0, this.mesh = e; } /** * Called by the particle System when the direction is computed for the created particle. * @param worldMatrix is the world matrix of the particle system * @param directionToUpdate is the direction vector to update with the result * @param particle is the particle we are computed the direction for * @param isLocal defines if the direction should be set in local space */ startDirectionFunction(e, t, i, r) { if (this.useMeshNormalsForDirection && this._normals) { D.TransformNormalToRef(this._storedNormal, e, t); return; } const s = yt.RandomRange(this.direction1.x, this.direction2.x), n = yt.RandomRange(this.direction1.y, this.direction2.y), a = yt.RandomRange(this.direction1.z, this.direction2.z); if (r) { t.copyFromFloats(s, n, a); return; } D.TransformNormalFromFloatsToRef(s, n, a, e, t); } /** * Called by the particle System when the position is computed for the created particle. * @param worldMatrix is the world matrix of the particle system * @param positionToUpdate is the position vector to update with the result * @param particle is the particle we are computed the position for * @param isLocal defines if the position should be set in local space */ startPositionFunction(e, t, i, r) { if (!this._indices || !this._positions) return; const s = 3 * Math.random() * (this._indices.length / 3) | 0, n = Math.random(), a = Math.random() * (1 - n), l = 1 - n - a, o = this._indices[s], u = this._indices[s + 1], h = this._indices[s + 2], d = de.Vector3[0], f = de.Vector3[1], p = de.Vector3[2], m = de.Vector3[3]; D.FromArrayToRef(this._positions, o * 3, d), D.FromArrayToRef(this._positions, u * 3, f), D.FromArrayToRef(this._positions, h * 3, p), m.x = n * d.x + a * f.x + l * p.x, m.y = n * d.y + a * f.y + l * p.y, m.z = n * d.z + a * f.z + l * p.z, r ? t.copyFromFloats(m.x, m.y, m.z) : D.TransformCoordinatesFromFloatsToRef(m.x, m.y, m.z, e, t), this.useMeshNormalsForDirection && this._normals && (D.FromArrayToRef(this._normals, o * 3, d), D.FromArrayToRef(this._normals, u * 3, f), D.FromArrayToRef(this._normals, h * 3, p), this._storedNormal.x = n * d.x + a * f.x + l * p.x, this._storedNormal.y = n * d.y + a * f.y + l * p.y, this._storedNormal.z = n * d.z + a * f.z + l * p.z); } /** * Clones the current emitter and returns a copy of it * @returns the new emitter */ clone() { const e = new $B(this.mesh); return id.DeepCopy(this, e), e; } /** * Called by the GPUParticleSystem to setup the update shader * @param uboOrEffect defines the update shader */ applyToShader(e) { e.setVector3("direction1", this.direction1), e.setVector3("direction2", this.direction2); } /** * Creates the structure of the ubo for this particle emitter * @param ubo ubo to create the structure for */ buildUniformLayout(e) { e.addUniform("direction1", 3), e.addUniform("direction2", 3); } /** * Returns a string to use to update the GPU particles update shader * @returns a string containing the defines string */ getEffectDefines() { return ""; } /** * Returns the string "BoxParticleEmitter" * @returns a string containing the class name */ getClassName() { return "MeshParticleEmitter"; } /** * Serializes the particle system to a JSON object. * @returns the JSON object */ serialize() { var e; const t = {}; return t.type = this.getClassName(), t.direction1 = this.direction1.asArray(), t.direction2 = this.direction2.asArray(), t.meshId = (e = this.mesh) === null || e === void 0 ? void 0 : e.id, t.useMeshNormalsForDirection = this.useMeshNormalsForDirection, t; } /** * Parse properties from a JSON object * @param serializationObject defines the JSON object * @param scene defines the hosting scene */ parse(e, t) { D.FromArrayToRef(e.direction1, 0, this.direction1), D.FromArrayToRef(e.direction2, 0, this.direction2), e.meshId && t && (this.mesh = t.getLastMeshById(e.meshId)), this.useMeshNormalsForDirection = e.useMeshNormalsForDirection; } } class V4 { /** * Gets or sets a texture used to add random noise to particle positions */ get noiseTexture() { return this._noiseTexture; } set noiseTexture(e) { this._noiseTexture !== e && (this._noiseTexture = e, this._reset()); } /** * Gets or sets whether an animation sprite sheet is enabled or not on the particle system */ get isAnimationSheetEnabled() { return this._isAnimationSheetEnabled; } set isAnimationSheetEnabled(e) { this._isAnimationSheetEnabled != e && (this._isAnimationSheetEnabled = e, this._reset()); } /** * Gets or sets a boolean enabling the use of logarithmic depth buffers, which is good for wide depth buffers. */ get useLogarithmicDepth() { return this._useLogarithmicDepth; } set useLogarithmicDepth(e) { this._useLogarithmicDepth = e && this.getScene().getEngine().getCaps().fragmentDepthSupported; } /** * Get hosting scene * @returns the scene */ getScene() { return this._scene; } _hasTargetStopDurationDependantGradient() { return this._startSizeGradients && this._startSizeGradients.length > 0 || this._emitRateGradients && this._emitRateGradients.length > 0 || this._lifeTimeGradients && this._lifeTimeGradients.length > 0; } /** * Gets the current list of drag gradients. * You must use addDragGradient and removeDragGradient to update this list * @returns the list of drag gradients */ getDragGradients() { return this._dragGradients; } /** * Gets the current list of limit velocity gradients. * You must use addLimitVelocityGradient and removeLimitVelocityGradient to update this list * @returns the list of limit velocity gradients */ getLimitVelocityGradients() { return this._limitVelocityGradients; } /** * Gets the current list of color gradients. * You must use addColorGradient and removeColorGradient to update this list * @returns the list of color gradients */ getColorGradients() { return this._colorGradients; } /** * Gets the current list of size gradients. * You must use addSizeGradient and removeSizeGradient to update this list * @returns the list of size gradients */ getSizeGradients() { return this._sizeGradients; } /** * Gets the current list of color remap gradients. * You must use addColorRemapGradient and removeColorRemapGradient to update this list * @returns the list of color remap gradients */ getColorRemapGradients() { return this._colorRemapGradients; } /** * Gets the current list of alpha remap gradients. * You must use addAlphaRemapGradient and removeAlphaRemapGradient to update this list * @returns the list of alpha remap gradients */ getAlphaRemapGradients() { return this._alphaRemapGradients; } /** * Gets the current list of life time gradients. * You must use addLifeTimeGradient and removeLifeTimeGradient to update this list * @returns the list of life time gradients */ getLifeTimeGradients() { return this._lifeTimeGradients; } /** * Gets the current list of angular speed gradients. * You must use addAngularSpeedGradient and removeAngularSpeedGradient to update this list * @returns the list of angular speed gradients */ getAngularSpeedGradients() { return this._angularSpeedGradients; } /** * Gets the current list of velocity gradients. * You must use addVelocityGradient and removeVelocityGradient to update this list * @returns the list of velocity gradients */ getVelocityGradients() { return this._velocityGradients; } /** * Gets the current list of start size gradients. * You must use addStartSizeGradient and removeStartSizeGradient to update this list * @returns the list of start size gradients */ getStartSizeGradients() { return this._startSizeGradients; } /** * Gets the current list of emit rate gradients. * You must use addEmitRateGradient and removeEmitRateGradient to update this list * @returns the list of emit rate gradients */ getEmitRateGradients() { return this._emitRateGradients; } /** * Random direction of each particle after it has been emitted, between direction1 and direction2 vectors. * This only works when particleEmitterTyps is a BoxParticleEmitter */ get direction1() { return this.particleEmitterType.direction1 ? this.particleEmitterType.direction1 : D.Zero(); } set direction1(e) { this.particleEmitterType.direction1 && (this.particleEmitterType.direction1 = e); } /** * Random direction of each particle after it has been emitted, between direction1 and direction2 vectors. * This only works when particleEmitterTyps is a BoxParticleEmitter */ get direction2() { return this.particleEmitterType.direction2 ? this.particleEmitterType.direction2 : D.Zero(); } set direction2(e) { this.particleEmitterType.direction2 && (this.particleEmitterType.direction2 = e); } /** * Minimum box point around our emitter. Our emitter is the center of particles source, but if you want your particles to emit from more than one point, then you can tell it to do so. * This only works when particleEmitterTyps is a BoxParticleEmitter */ get minEmitBox() { return this.particleEmitterType.minEmitBox ? this.particleEmitterType.minEmitBox : D.Zero(); } set minEmitBox(e) { this.particleEmitterType.minEmitBox && (this.particleEmitterType.minEmitBox = e); } /** * Maximum box point around our emitter. Our emitter is the center of particles source, but if you want your particles to emit from more than one point, then you can tell it to do so. * This only works when particleEmitterTyps is a BoxParticleEmitter */ get maxEmitBox() { return this.particleEmitterType.maxEmitBox ? this.particleEmitterType.maxEmitBox : D.Zero(); } set maxEmitBox(e) { this.particleEmitterType.maxEmitBox && (this.particleEmitterType.maxEmitBox = e); } /** * Gets or sets the billboard mode to use when isBillboardBased = true. * Value can be: ParticleSystem.BILLBOARDMODE_ALL, ParticleSystem.BILLBOARDMODE_Y, ParticleSystem.BILLBOARDMODE_STRETCHED */ get billboardMode() { return this._billboardMode; } set billboardMode(e) { this._billboardMode !== e && (this._billboardMode = e, this._reset()); } /** * Gets or sets a boolean indicating if the particles must be rendered as billboard or aligned with the direction */ get isBillboardBased() { return this._isBillboardBased; } set isBillboardBased(e) { this._isBillboardBased !== e && (this._isBillboardBased = e, this._reset()); } /** * Gets the image processing configuration used either in this material. */ get imageProcessingConfiguration() { return this._imageProcessingConfiguration; } /** * Sets the Default image processing configuration used either in the this material. * * If sets to null, the scene one is in use. */ set imageProcessingConfiguration(e) { this._attachImageProcessingConfiguration(e); } /** * Attaches a new image processing configuration to the Standard Material. * @param configuration */ _attachImageProcessingConfiguration(e) { e !== this._imageProcessingConfiguration && (!e && this._scene ? this._imageProcessingConfiguration = this._scene.imageProcessingConfiguration : this._imageProcessingConfiguration = e); } /** @internal */ _reset() { } /** * @internal */ _removeGradientAndTexture(e, t, i) { if (!t) return this; let r = 0; for (const s of t) { if (s.gradient === e) { t.splice(r, 1); break; } r++; } return i && i.dispose(), this; } /** * Instantiates a particle system. * Particles are often small sprites used to simulate hard-to-reproduce phenomena like fire, smoke, water, or abstract visual effects like magic glitter and faery dust. * @param name The name of the particle system */ constructor(e) { this.animations = [], this.renderingGroupId = 0, this.emitter = D.Zero(), this.emitRate = 10, this.manualEmitCount = -1, this.updateSpeed = 0.01, this.targetStopDuration = 0, this.disposeOnStop = !1, this.minEmitPower = 1, this.maxEmitPower = 1, this.minLifeTime = 1, this.maxLifeTime = 1, this.minSize = 1, this.maxSize = 1, this.minScaleX = 1, this.maxScaleX = 1, this.minScaleY = 1, this.maxScaleY = 1, this.minInitialRotation = 0, this.maxInitialRotation = 0, this.minAngularSpeed = 0, this.maxAngularSpeed = 0, this.layerMask = 268435455, this.customShader = null, this.preventAutoStart = !1, this._wasDispatched = !1, this._rootUrl = "", this.noiseStrength = new D(10, 10, 10), this.onAnimationEnd = null, this.blendMode = V4.BLENDMODE_ONEONE, this.forceDepthWrite = !1, this.preWarmCycles = 0, this.preWarmStepOffset = 1, this.spriteCellChangeSpeed = 1, this.startSpriteCellID = 0, this.endSpriteCellID = 0, this.spriteCellWidth = 0, this.spriteCellHeight = 0, this.spriteCellLoop = !0, this.spriteRandomStartCell = !1, this.translationPivot = new at(0, 0), this.beginAnimationOnStart = !1, this.beginAnimationFrom = 0, this.beginAnimationTo = 60, this.beginAnimationLoop = !1, this.worldOffset = new D(0, 0, 0), this._useLogarithmicDepth = !1, this.gravity = D.Zero(), this._colorGradients = null, this._sizeGradients = null, this._lifeTimeGradients = null, this._angularSpeedGradients = null, this._velocityGradients = null, this._limitVelocityGradients = null, this._dragGradients = null, this._emitRateGradients = null, this._startSizeGradients = null, this._rampGradients = null, this._colorRemapGradients = null, this._alphaRemapGradients = null, this.startDelay = 0, this.limitVelocityDamping = 0.4, this.color1 = new Et(1, 1, 1, 1), this.color2 = new Et(1, 1, 1, 1), this.colorDead = new Et(0, 0, 0, 1), this.textureMask = new Et(1, 1, 1, 1), this._isSubEmitter = !1, this._billboardMode = 7, this._isBillboardBased = !0, this._imageProcessingConfigurationDefines = new fte(), this.id = e, this.name = e; } /** * Creates a Point Emitter for the particle system (emits directly from the emitter position) * @param direction1 Particles are emitted between the direction1 and direction2 from within the box * @param direction2 Particles are emitted between the direction1 and direction2 from within the box * @returns the emitter */ createPointEmitter(e, t) { const i = new zL(); return i.direction1 = e, i.direction2 = t, this.particleEmitterType = i, i; } /** * Creates a Hemisphere Emitter for the particle system (emits along the hemisphere radius) * @param radius The radius of the hemisphere to emit from * @param radiusRange The range of the hemisphere to emit from [0-1] 0 Surface Only, 1 Entire Radius * @returns the emitter */ createHemisphericEmitter(e = 1, t = 1) { const i = new kL(e, t); return this.particleEmitterType = i, i; } /** * Creates a Sphere Emitter for the particle system (emits along the sphere radius) * @param radius The radius of the sphere to emit from * @param radiusRange The range of the sphere to emit from [0-1] 0 Surface Only, 1 Entire Radius * @returns the emitter */ createSphereEmitter(e = 1, t = 1) { const i = new gw(e, t); return this.particleEmitterType = i, i; } /** * Creates a Directed Sphere Emitter for the particle system (emits between direction1 and direction2) * @param radius The radius of the sphere to emit from * @param direction1 Particles are emitted between the direction1 and direction2 from within the sphere * @param direction2 Particles are emitted between the direction1 and direction2 from within the sphere * @returns the emitter */ createDirectedSphereEmitter(e = 1, t = new D(0, 1, 0), i = new D(0, 1, 0)) { const r = new HL(e, t, i); return this.particleEmitterType = r, r; } /** * Creates a Cylinder Emitter for the particle system (emits from the cylinder to the particle position) * @param radius The radius of the emission cylinder * @param height The height of the emission cylinder * @param radiusRange The range of emission [0-1] 0 Surface only, 1 Entire Radius * @param directionRandomizer How much to randomize the particle direction [0-1] * @returns the emitter */ createCylinderEmitter(e = 1, t = 1, i = 1, r = 0) { const s = new mw(e, t, i, r); return this.particleEmitterType = s, s; } /** * Creates a Directed Cylinder Emitter for the particle system (emits between direction1 and direction2) * @param radius The radius of the cylinder to emit from * @param height The height of the emission cylinder * @param radiusRange the range of the emission cylinder [0-1] 0 Surface only, 1 Entire Radius (1 by default) * @param direction1 Particles are emitted between the direction1 and direction2 from within the cylinder * @param direction2 Particles are emitted between the direction1 and direction2 from within the cylinder * @returns the emitter */ createDirectedCylinderEmitter(e = 1, t = 1, i = 1, r = new D(0, 1, 0), s = new D(0, 1, 0)) { const n = new VL(e, t, i, r, s); return this.particleEmitterType = n, n; } /** * Creates a Cone Emitter for the particle system (emits from the cone to the particle position) * @param radius The radius of the cone to emit from * @param angle The base angle of the cone * @returns the emitter */ createConeEmitter(e = 1, t = Math.PI / 4) { const i = new UL(e, t); return this.particleEmitterType = i, i; } /** * Creates a Box Emitter for the particle system. (emits between direction1 and direction2 from withing the box defined by minEmitBox and maxEmitBox) * @param direction1 Particles are emitted between the direction1 and direction2 from within the box * @param direction2 Particles are emitted between the direction1 and direction2 from within the box * @param minEmitBox Particles are emitted from the box between minEmitBox and maxEmitBox * @param maxEmitBox Particles are emitted from the box between minEmitBox and maxEmitBox * @returns the emitter */ createBoxEmitter(e, t, i, r) { const s = new o5(); return this.particleEmitterType = s, this.direction1 = e, this.direction2 = t, this.minEmitBox = i, this.maxEmitBox = r, s; } } V4.BLENDMODE_ONEONE = 0; V4.BLENDMODE_STANDARD = 1; V4.BLENDMODE_ADD = 2; V4.BLENDMODE_MULTIPLY = 3; V4.BLENDMODE_MULTIPLYADD = 4; class TK extends Wi { /** * Create a new ColorSplitterBlock * @param name defines the block name */ constructor(e) { super(e, Le.Neutral), this.registerInput("rgba", ue.Color4, !0), this.registerInput("rgb ", ue.Color3, !0), this.registerOutput("rgb", ue.Color3), this.registerOutput("r", ue.Float), this.registerOutput("g", ue.Float), this.registerOutput("b", ue.Float), this.registerOutput("a", ue.Float), this.inputsAreExclusive = !0; } /** * Gets the current class name * @returns the class name */ getClassName() { return "ColorSplitterBlock"; } /** * Gets the rgba component (input) */ get rgba() { return this._inputs[0]; } /** * Gets the rgb component (input) */ get rgbIn() { return this._inputs[1]; } /** * Gets the rgb component (output) */ get rgbOut() { return this._outputs[0]; } /** * Gets the r component (output) */ get r() { return this._outputs[1]; } /** * Gets the g component (output) */ get g() { return this._outputs[2]; } /** * Gets the b component (output) */ get b() { return this._outputs[3]; } /** * Gets the a component (output) */ get a() { return this._outputs[4]; } _inputRename(e) { return e === "rgb " ? "rgbIn" : e; } _outputRename(e) { return e === "rgb" ? "rgbOut" : e; } _buildBlock(e) { super._buildBlock(e); const t = this.rgba.isConnected ? this.rgba : this.rgbIn; if (!t.isConnected) return; const i = this._outputs[0], r = this._outputs[1], s = this._outputs[2], n = this._outputs[3], a = this._outputs[4]; return i.hasEndpoints && (e.compilationString += this._declareOutput(i, e) + ` = ${t.associatedVariableName}.rgb; `), r.hasEndpoints && (e.compilationString += this._declareOutput(r, e) + ` = ${t.associatedVariableName}.r; `), s.hasEndpoints && (e.compilationString += this._declareOutput(s, e) + ` = ${t.associatedVariableName}.g; `), n.hasEndpoints && (e.compilationString += this._declareOutput(n, e) + ` = ${t.associatedVariableName}.b; `), a.hasEndpoints && (e.compilationString += this._declareOutput(a, e) + ` = ${t.associatedVariableName}.a; `), this; } } Be("BABYLON.ColorSplitterBlock", TK); mi.prototype.createRenderTargetCubeTexture = function(c, e) { const t = this._createHardwareRenderTargetWrapper(!1, !0, c), i = Object.assign({ generateMipMaps: !0, generateDepthBuffer: !0, generateStencilBuffer: !1, type: 0, samplingMode: 3, format: 5 }, e); i.generateStencilBuffer = i.generateDepthBuffer && i.generateStencilBuffer, (i.type === 1 && !this._caps.textureFloatLinearFiltering || i.type === 2 && !this._caps.textureHalfFloatLinearFiltering) && (i.samplingMode = 1); const r = this._gl, s = new ln(this, ts.RenderTarget); this._bindTextureDirectly(r.TEXTURE_CUBE_MAP, s, !0); const n = this._getSamplingParameters(i.samplingMode, i.generateMipMaps); i.type === 1 && !this._caps.textureFloat && (i.type = 0, Ce.Warn("Float textures are not supported. Cube render target forced to TEXTURETYPE_UNESIGNED_BYTE type")), r.texParameteri(r.TEXTURE_CUBE_MAP, r.TEXTURE_MAG_FILTER, n.mag), r.texParameteri(r.TEXTURE_CUBE_MAP, r.TEXTURE_MIN_FILTER, n.min), r.texParameteri(r.TEXTURE_CUBE_MAP, r.TEXTURE_WRAP_S, r.CLAMP_TO_EDGE), r.texParameteri(r.TEXTURE_CUBE_MAP, r.TEXTURE_WRAP_T, r.CLAMP_TO_EDGE); for (let l = 0; l < 6; l++) r.texImage2D(r.TEXTURE_CUBE_MAP_POSITIVE_X + l, 0, this._getRGBABufferInternalSizedFormat(i.type, i.format), c, c, 0, this._getInternalFormat(i.format), this._getWebGLTextureType(i.type), null); const a = r.createFramebuffer(); return this._bindUnboundFramebuffer(a), t._depthStencilBuffer = this._setupFramebufferDepthAttachments(i.generateStencilBuffer, i.generateDepthBuffer, c, c), i.generateMipMaps && r.generateMipmap(r.TEXTURE_CUBE_MAP), this._bindTextureDirectly(r.TEXTURE_CUBE_MAP, null), this._bindUnboundFramebuffer(null), t._framebuffer = a, t._generateDepthBuffer = i.generateDepthBuffer, t._generateStencilBuffer = i.generateStencilBuffer, s.width = c, s.height = c, s.isReady = !0, s.isCube = !0, s.samples = 1, s.generateMipMaps = i.generateMipMaps, s.samplingMode = i.samplingMode, s.type = i.type, s.format = i.format, this._internalTexturesCache.push(s), t.setTextures(s), t; }; const Bk = { positions: [1, 1, -1, 1, -1, -1, 1, -1], indices: [0, 1, 2, 0, 2, 3] }; class vw { /** * Creates an effect renderer * @param engine the engine to use for rendering * @param options defines the options of the effect renderer */ constructor(e, t = Bk) { var i, r; this._fullscreenViewport = new Md(0, 0, 1, 1); const s = (i = t.positions) !== null && i !== void 0 ? i : Bk.positions, n = (r = t.indices) !== null && r !== void 0 ? r : Bk.indices; this.engine = e, this._vertexBuffers = { [Y.PositionKind]: new Y(e, s, Y.PositionKind, !1, !1, 2) }, this._indexBuffer = e.createIndexBuffer(n), this._onContextRestoredObserver = e.onContextRestoredObservable.add(() => { this._indexBuffer = e.createIndexBuffer(n); for (const a in this._vertexBuffers) this._vertexBuffers[a]._rebuild(); }); } /** * Sets the current viewport in normalized coordinates 0-1 * @param viewport Defines the viewport to set (defaults to 0 0 1 1) */ setViewport(e = this._fullscreenViewport) { this.engine.setViewport(e); } /** * Binds the embedded attributes buffer to the effect. * @param effect Defines the effect to bind the attributes for */ bindBuffers(e) { this.engine.bindBuffers(this._vertexBuffers, this._indexBuffer, e); } /** * Sets the current effect wrapper to use during draw. * The effect needs to be ready before calling this api. * This also sets the default full screen position attribute. * @param effectWrapper Defines the effect to draw with */ applyEffectWrapper(e) { this.engine.setState(!0), this.engine.depthCullingState.depthTest = !1, this.engine.stencilState.stencilTest = !1, this.engine.enableEffect(e._drawWrapper), this.bindBuffers(e.effect), e.onApplyObservable.notifyObservers({}); } /** * Saves engine states */ saveStates() { this._savedStateDepthTest = this.engine.depthCullingState.depthTest, this._savedStateStencilTest = this.engine.stencilState.stencilTest; } /** * Restores engine states */ restoreStates() { this.engine.depthCullingState.depthTest = this._savedStateDepthTest, this.engine.stencilState.stencilTest = this._savedStateStencilTest; } /** * Draws a full screen quad. */ draw() { this.engine.drawElementsType(0, 0, 6); } _isRenderTargetTexture(e) { return e.renderTarget !== void 0; } /** * renders one or more effects to a specified texture * @param effectWrapper the effect to renderer * @param outputTexture texture to draw to, if null it will render to the screen. */ render(e, t = null) { if (!e.effect.isReady()) return; this.saveStates(), this.setViewport(); const i = t === null ? null : this._isRenderTargetTexture(t) ? t.renderTarget : t; i && this.engine.bindFramebuffer(i), this.applyEffectWrapper(e), this.draw(), i && this.engine.unBindFramebuffer(i), this.restoreStates(); } /** * Disposes of the effect renderer */ dispose() { const e = this._vertexBuffers[Y.PositionKind]; e && (e.dispose(), delete this._vertexBuffers[Y.PositionKind]), this._indexBuffer && this.engine._releaseBuffer(this._indexBuffer), this._onContextRestoredObserver && (this.engine.onContextRestoredObservable.remove(this._onContextRestoredObserver), this._onContextRestoredObserver = null); } } class t6 { /** * The underlying effect */ get effect() { return this._drawWrapper.effect; } set effect(e) { this._drawWrapper.effect = e; } /** * Creates an effect to be renderer * @param creationOptions options to create the effect */ constructor(e) { this.onApplyObservable = new Fe(); let t; const i = e.uniformNames || []; e.vertexShader ? t = { fragmentSource: e.fragmentShader, vertexSource: e.vertexShader, spectorName: e.name || "effectWrapper" } : (i.push("scale"), t = { fragmentSource: e.fragmentShader, vertex: "postprocess", spectorName: e.name || "effectWrapper" }, this.onApplyObservable.add(() => { this.effect.setFloat2("scale", 1, 1); })); const r = e.defines ? e.defines.join(` `) : ""; this._drawWrapper = new $o(e.engine), e.useShaderStore ? (t.fragment = t.fragmentSource, t.vertex || (t.vertex = t.vertexSource), delete t.fragmentSource, delete t.vertexSource, this.effect = e.engine.createEffect(t, e.attributeNames || ["position"], i, e.samplerNames, r, void 0, e.onCompiled, void 0, void 0, e.shaderLanguage)) : (this.effect = new Cr(t, e.attributeNames || ["position"], i, e.samplerNames, e.engine, r, void 0, e.onCompiled, void 0, void 0, void 0, e.shaderLanguage), this._onContextRestoredObserver = e.engine.onContextRestoredObservable.add(() => { this.effect._pipelineContext = null, this.effect._wasPreviouslyReady = !1, this.effect._prepareEffect(); })); } /** * Disposes of the effect wrapper */ dispose() { this._onContextRestoredObserver && (this.effect.getEngine().onContextRestoredObservable.remove(this._onContextRestoredObserver), this._onContextRestoredObserver = null), this.effect.dispose(); } } const zte = "passPixelShader", Hte = `varying vec2 vUV;uniform sampler2D textureSampler; #define CUSTOM_FRAGMENT_DEFINITIONS void main(void) {gl_FragColor=texture2D(textureSampler,vUV);}`; je.ShadersStore[zte] = Hte; const DZ = { name: zte, shader: Hte }; class qh { static _CreateDumpRenderer() { if (!qh._DumpToolsEngine) { let e, t = null; const i = { preserveDrawingBuffer: !0, depth: !1, stencil: !1, alpha: !0, premultipliedAlpha: !1, antialias: !1, failIfMajorPerformanceCaveat: !1 }; try { e = new OffscreenCanvas(100, 100), t = new mi(e, !1, i); } catch { e = document.createElement("canvas"), t = new mi(e, !1, i); } t.getCaps().parallelShaderCompile = void 0; const r = new vw(t), s = new t6({ engine: t, name: DZ.name, fragmentShader: DZ.shader, samplerNames: ["textureSampler"] }); qh._DumpToolsEngine = { canvas: e, engine: t, renderer: r, wrapper: s }; } return qh._DumpToolsEngine; } /** * Dumps the current bound framebuffer * @param width defines the rendering width * @param height defines the rendering height * @param engine defines the hosting engine * @param successCallback defines the callback triggered once the data are available * @param mimeType defines the mime type of the result * @param fileName defines the filename to download. If present, the result will automatically be downloaded * @param quality The quality of the image if lossy mimeType is used (e.g. image/jpeg, image/webp). See {@link https://developer.mozilla.org/en-US/docs/Web/API/HTMLCanvasElement/toBlob | HTMLCanvasElement.toBlob()}'s `quality` parameter. * @returns a void promise */ static async DumpFramebuffer(e, t, i, r, s = "image/png", n, a) { const l = await i.readPixels(0, 0, e, t), o = new Uint8Array(l.buffer); qh.DumpData(e, t, o, r, s, n, !0, void 0, a); } /** * Dumps an array buffer * @param width defines the rendering width * @param height defines the rendering height * @param data the data array * @param mimeType defines the mime type of the result * @param fileName defines the filename to download. If present, the result will automatically be downloaded * @param invertY true to invert the picture in the Y dimension * @param toArrayBuffer true to convert the data to an ArrayBuffer (encoded as `mimeType`) instead of a base64 string * @param quality The quality of the image if lossy mimeType is used (e.g. image/jpeg, image/webp). See {@link https://developer.mozilla.org/en-US/docs/Web/API/HTMLCanvasElement/toBlob | HTMLCanvasElement.toBlob()}'s `quality` parameter. * @returns a promise that resolve to the final data */ static DumpDataAsync(e, t, i, r = "image/png", s, n = !1, a = !1, l) { return new Promise((o) => { qh.DumpData(e, t, i, (u) => o(u), r, s, n, a, l); }); } /** * Dumps an array buffer * @param width defines the rendering width * @param height defines the rendering height * @param data the data array * @param successCallback defines the callback triggered once the data are available * @param mimeType defines the mime type of the result * @param fileName defines the filename to download. If present, the result will automatically be downloaded * @param invertY true to invert the picture in the Y dimension * @param toArrayBuffer true to convert the data to an ArrayBuffer (encoded as `mimeType`) instead of a base64 string * @param quality The quality of the image if lossy mimeType is used (e.g. image/jpeg, image/webp). See {@link https://developer.mozilla.org/en-US/docs/Web/API/HTMLCanvasElement/toBlob | HTMLCanvasElement.toBlob()}'s `quality` parameter. */ static DumpData(e, t, i, r, s = "image/png", n, a = !1, l = !1, o) { const u = qh._CreateDumpRenderer(); if (u.engine.setSize(e, t, !0), i instanceof Float32Array) { const d = new Uint8Array(i.length); let f = i.length; for (; f--; ) { const p = i[f]; d[f] = Math.round(yt.Clamp(p) * 255); } i = d; } const h = u.engine.createRawTexture(i, e, t, 5, !1, !a, 1); u.renderer.setViewport(), u.renderer.applyEffectWrapper(u.wrapper), u.wrapper.effect._bindTexture("textureSampler", h), u.renderer.draw(), l ? Ve.ToBlob(u.canvas, (d) => { const f = new FileReader(); f.onload = (p) => { const m = p.target.result; r && r(m); }, f.readAsArrayBuffer(d); }, s, o) : Ve.EncodeScreenshotCanvasData(u.canvas, r, s, n, o), h.dispose(); } /** * Dispose the dump tools associated resources */ static Dispose() { qh._DumpToolsEngine && (qh._DumpToolsEngine.wrapper.dispose(), qh._DumpToolsEngine.renderer.dispose(), qh._DumpToolsEngine.engine.dispose()), qh._DumpToolsEngine = null; } } const kce = () => { Ve.DumpData = qh.DumpData, Ve.DumpDataAsync = qh.DumpDataAsync, Ve.DumpFramebuffer = qh.DumpFramebuffer; }; kce(); class ra extends De { /** * Use this list to define the list of mesh you want to render. */ get renderList() { return this._renderList; } set renderList(e) { this._unObserveRenderList && (this._unObserveRenderList(), this._unObserveRenderList = null), e && (this._unObserveRenderList = Uee(e, this._renderListHasChanged)), this._renderList = e; } /** * Post-processes for this render target */ get postProcesses() { return this._postProcesses; } get _prePassEnabled() { return !!this._prePassRenderTarget && this._prePassRenderTarget.enabled; } /** * Set a after unbind callback in the texture. * This has been kept for backward compatibility and use of onAfterUnbindObservable is recommended. */ set onAfterUnbind(e) { this._onAfterUnbindObserver && this.onAfterUnbindObservable.remove(this._onAfterUnbindObserver), this._onAfterUnbindObserver = this.onAfterUnbindObservable.add(e); } /** * Set a before render callback in the texture. * This has been kept for backward compatibility and use of onBeforeRenderObservable is recommended. */ set onBeforeRender(e) { this._onBeforeRenderObserver && this.onBeforeRenderObservable.remove(this._onBeforeRenderObserver), this._onBeforeRenderObserver = this.onBeforeRenderObservable.add(e); } /** * Set a after render callback in the texture. * This has been kept for backward compatibility and use of onAfterRenderObservable is recommended. */ set onAfterRender(e) { this._onAfterRenderObserver && this.onAfterRenderObservable.remove(this._onAfterRenderObserver), this._onAfterRenderObserver = this.onAfterRenderObservable.add(e); } /** * Set a clear callback in the texture. * This has been kept for backward compatibility and use of onClearObservable is recommended. */ set onClear(e) { this._onClearObserver && this.onClearObservable.remove(this._onClearObserver), this._onClearObserver = this.onClearObservable.add(e); } /** * Gets the render pass ids used by the render target texture. For a single render target the array length will be 1, for a cube texture it will be 6 and for * a 2D texture array it will return an array of ids the size of the 2D texture array */ get renderPassIds() { return this._renderPassIds; } /** * Gets the current value of the refreshId counter */ get currentRefreshId() { return this._currentRefreshId; } /** * Sets a specific material to be used to render a mesh/a list of meshes in this render target texture * @param mesh mesh or array of meshes * @param material material or array of materials to use for this render pass. If undefined is passed, no specific material will be used but the regular material instead (mesh.material). It's possible to provide an array of materials to use a different material for each rendering in the case of a cube texture (6 rendering) and a 2D texture array (as many rendering as the length of the array) */ setMaterialForRendering(e, t) { let i; Array.isArray(e) ? i = e : i = [e]; for (let r = 0; r < i.length; ++r) for (let s = 0; s < this._renderPassIds.length; ++s) i[r].setMaterialForRenderPass(this._renderPassIds[s], t !== void 0 ? Array.isArray(t) ? t[s] : t : void 0); } /** * Define if the texture has multiple draw buffers or if false a single draw buffer. */ get isMulti() { var e, t; return (t = (e = this._renderTarget) === null || e === void 0 ? void 0 : e.isMulti) !== null && t !== void 0 ? t : !1; } /** * Gets render target creation options that were used. */ get renderTargetOptions() { return this._renderTargetOptions; } /** * Gets the render target wrapper associated with this render target */ get renderTarget() { return this._renderTarget; } _onRatioRescale() { this._sizeRatio && this.resize(this._initialSizeParameter); } /** * Gets or sets the size of the bounding box associated with the texture (when in cube mode) * When defined, the cubemap will switch to local mode * @see https://community.arm.com/graphics/b/blog/posts/reflections-based-on-local-cubemaps-in-unity * @example https://www.babylonjs-playground.com/#RNASML */ set boundingBoxSize(e) { if (this._boundingBoxSize && this._boundingBoxSize.equals(e)) return; this._boundingBoxSize = e; const t = this.getScene(); t && t.markAllMaterialsAsDirty(1); } get boundingBoxSize() { return this._boundingBoxSize; } /** * In case the RTT has been created with a depth texture, get the associated * depth texture. * Otherwise, return null. */ get depthStencilTexture() { var e, t; return (t = (e = this._renderTarget) === null || e === void 0 ? void 0 : e._depthStencilTexture) !== null && t !== void 0 ? t : null; } /** @internal */ constructor(e, t, i, r = !1, s = !0, n = 0, a = !1, l = De.TRILINEAR_SAMPLINGMODE, o = !0, u = !1, h = !1, d = 5, f = !1, p, m, _ = !1, v = !1) { var C, x, b, S, M, R, w; let V, k = !0; if (typeof r == "object") { const B = r; r = !!B.generateMipMaps, s = (C = B.doNotChangeAspectRatio) !== null && C !== void 0 ? C : !0, n = (x = B.type) !== null && x !== void 0 ? x : 0, a = !!B.isCube, l = (b = B.samplingMode) !== null && b !== void 0 ? b : De.TRILINEAR_SAMPLINGMODE, o = (S = B.generateDepthBuffer) !== null && S !== void 0 ? S : !0, u = !!B.generateStencilBuffer, h = !!B.isMulti, d = (M = B.format) !== null && M !== void 0 ? M : 5, f = !!B.delayAllocation, p = B.samples, m = B.creationFlags, _ = !!B.noColorAttachment, v = !!B.useSRGBBuffer, V = B.colorAttachment, k = (R = B.gammaSpace) !== null && R !== void 0 ? R : k; } if (super(null, i, !r, void 0, l, void 0, void 0, void 0, void 0, d), this._unObserveRenderList = null, this._renderListHasChanged = (B, U) => { var K; const ee = this._renderList ? this._renderList.length : 0; (U === 0 && ee > 0 || ee === 0) && ((K = this.getScene()) === null || K === void 0 || K.meshes.forEach((Z) => { Z._markSubMeshesAsLightDirty(); })); }, this.renderParticles = !0, this.renderSprites = !1, this.forceLayerMaskCheck = !1, this.ignoreCameraViewport = !1, this.onBeforeBindObservable = new Fe(), this.onAfterUnbindObservable = new Fe(), this.onBeforeRenderObservable = new Fe(), this.onAfterRenderObservable = new Fe(), this.onClearObservable = new Fe(), this.onResizeObservable = new Fe(), this._cleared = !1, this.skipInitialClear = !1, this._currentRefreshId = -1, this._refreshRate = 1, this._samples = 1, this._canRescale = !0, this._renderTarget = null, this.boundingBoxPosition = D.Zero(), i = this.getScene(), !i) return; const L = this.getScene().getEngine(); this._gammaSpace = k, this._coordinatesMode = De.PROJECTION_MODE, this.renderList = [], this.name = e, this.isRenderTarget = !0, this._initialSizeParameter = t, this._renderPassIds = [], this._isCubeData = a, this._processSizeParameter(t), this.renderPassId = this._renderPassIds[0], this._resizeObserver = L.onResizeObservable.add(() => { }), this._generateMipMaps = !!r, this._doNotChangeAspectRatio = s, this._renderingManager = new Zh(i), this._renderingManager._useSceneAutoClearSetup = !0, !h && (this._renderTargetOptions = { generateMipMaps: r, type: n, format: (w = this._format) !== null && w !== void 0 ? w : void 0, samplingMode: this.samplingMode, generateDepthBuffer: o, generateStencilBuffer: u, samples: p, creationFlags: m, noColorAttachment: _, useSRGBBuffer: v, colorAttachment: V, label: this.name }, this.samplingMode === De.NEAREST_SAMPLINGMODE && (this.wrapU = De.CLAMP_ADDRESSMODE, this.wrapV = De.CLAMP_ADDRESSMODE), f || (a ? (this._renderTarget = i.getEngine().createRenderTargetCubeTexture(this.getRenderSize(), this._renderTargetOptions), this.coordinatesMode = De.INVCUBIC_MODE, this._textureMatrix = Ae.Identity()) : this._renderTarget = i.getEngine().createRenderTargetTexture(this._size, this._renderTargetOptions), this._texture = this._renderTarget.texture, p !== void 0 && (this.samples = p))); } /** * Creates a depth stencil texture. * This is only available in WebGL 2 or with the depth texture extension available. * @param comparisonFunction Specifies the comparison function to set on the texture. If 0 or undefined, the texture is not in comparison mode (default: 0) * @param bilinearFiltering Specifies whether or not bilinear filtering is enable on the texture (default: true) * @param generateStencil Specifies whether or not a stencil should be allocated in the texture (default: false) * @param samples sample count of the depth/stencil texture (default: 1) * @param format format of the depth texture (default: 14) */ createDepthStencilTexture(e = 0, t = !0, i = !1, r = 1, s = 14) { var n; (n = this._renderTarget) === null || n === void 0 || n.createDepthStencilTexture(e, t, i, r, s); } _releaseRenderPassId() { if (this._scene) { const e = this._scene.getEngine(); for (let t = 0; t < this._renderPassIds.length; ++t) e.releaseRenderPassId(this._renderPassIds[t]); } this._renderPassIds = []; } _createRenderPassId() { this._releaseRenderPassId(); const e = this._scene.getEngine(), t = this._isCubeData ? 6 : this.getRenderLayers() || 1; for (let i = 0; i < t; ++i) this._renderPassIds[i] = e.createRenderPassId(`RenderTargetTexture - ${this.name}#${i}`); } _processSizeParameter(e, t = !0) { if (e.ratio) { this._sizeRatio = e.ratio; const i = this._getEngine(); this._size = { width: this._bestReflectionRenderTargetDimension(i.getRenderWidth(), this._sizeRatio), height: this._bestReflectionRenderTargetDimension(i.getRenderHeight(), this._sizeRatio) }; } else this._size = e; t && this._createRenderPassId(); } /** * Define the number of samples to use in case of MSAA. * It defaults to one meaning no MSAA has been enabled. */ get samples() { var e, t; return (t = (e = this._renderTarget) === null || e === void 0 ? void 0 : e.samples) !== null && t !== void 0 ? t : this._samples; } set samples(e) { this._renderTarget && (this._samples = this._renderTarget.setSamples(e)); } /** * Resets the refresh counter of the texture and start bak from scratch. * Could be useful to regenerate the texture if it is setup to render only once. */ resetRefreshCounter() { this._currentRefreshId = -1; } /** * Define the refresh rate of the texture or the rendering frequency. * Use 0 to render just once, 1 to render on every frame, 2 to render every two frames and so on... */ get refreshRate() { return this._refreshRate; } set refreshRate(e) { this._refreshRate = e, this.resetRefreshCounter(); } /** * Adds a post process to the render target rendering passes. * @param postProcess define the post process to add */ addPostProcess(e) { if (!this._postProcessManager) { const t = this.getScene(); if (!t) return; this._postProcessManager = new q9(t), this._postProcesses = new Array(); } this._postProcesses.push(e), this._postProcesses[0].autoClear = !1; } /** * Clear all the post processes attached to the render target * @param dispose define if the cleared post processes should also be disposed (false by default) */ clearPostProcesses(e = !1) { if (this._postProcesses) { if (e) for (const t of this._postProcesses) t.dispose(); this._postProcesses = []; } } /** * Remove one of the post process from the list of attached post processes to the texture * @param postProcess define the post process to remove from the list */ removePostProcess(e) { if (!this._postProcesses) return; const t = this._postProcesses.indexOf(e); t !== -1 && (this._postProcesses.splice(t, 1), this._postProcesses.length > 0 && (this._postProcesses[0].autoClear = !1)); } /** @internal */ _shouldRender() { return this._currentRefreshId === -1 ? (this._currentRefreshId = 1, !0) : this.refreshRate === this._currentRefreshId ? (this._currentRefreshId = 1, !0) : (this._currentRefreshId++, !1); } /** * Gets the actual render size of the texture. * @returns the width of the render size */ getRenderSize() { return this.getRenderWidth(); } /** * Gets the actual render width of the texture. * @returns the width of the render size */ getRenderWidth() { return this._size.width ? this._size.width : this._size; } /** * Gets the actual render height of the texture. * @returns the height of the render size */ getRenderHeight() { return this._size.width ? this._size.height : this._size; } /** * Gets the actual number of layers of the texture. * @returns the number of layers */ getRenderLayers() { const e = this._size.layers; return e || 0; } /** * Don't allow this render target texture to rescale. Mainly used to prevent rescaling by the scene optimizer. */ disableRescaling() { this._canRescale = !1; } /** * Get if the texture can be rescaled or not. */ get canRescale() { return this._canRescale; } /** * Resize the texture using a ratio. * @param ratio the ratio to apply to the texture size in order to compute the new target size */ scale(e) { const t = Math.max(1, this.getRenderSize() * e); this.resize(t); } /** * Get the texture reflection matrix used to rotate/transform the reflection. * @returns the reflection matrix */ getReflectionTextureMatrix() { return this.isCube ? this._textureMatrix : super.getReflectionTextureMatrix(); } /** * Resize the texture to a new desired size. * Be careful as it will recreate all the data in the new texture. * @param size Define the new size. It can be: * - a number for squared texture, * - an object containing { width: number, height: number } * - or an object containing a ratio { ratio: number } */ resize(e) { var t; const i = this.isCube; (t = this._renderTarget) === null || t === void 0 || t.dispose(), this._renderTarget = null; const r = this.getScene(); r && (this._processSizeParameter(e, !1), i ? this._renderTarget = r.getEngine().createRenderTargetCubeTexture(this.getRenderSize(), this._renderTargetOptions) : this._renderTarget = r.getEngine().createRenderTargetTexture(this._size, this._renderTargetOptions), this._texture = this._renderTarget.texture, this._renderTargetOptions.samples !== void 0 && (this.samples = this._renderTargetOptions.samples), this.onResizeObservable.hasObservers() && this.onResizeObservable.notifyObservers(this)); } /** * Renders all the objects from the render list into the texture. * @param useCameraPostProcess Define if camera post processes should be used during the rendering * @param dumpForDebug Define if the rendering result should be dumped (copied) for debugging purpose */ render(e = !1, t = !1) { this._render(e, t); } /** * This function will check if the render target texture can be rendered (textures are loaded, shaders are compiled) * @returns true if all required resources are ready */ isReadyForRendering() { return this._render(!1, !1, !0); } _render(e = !1, t = !1, i = !1) { var r; const s = this.getScene(); if (!s) return i; const n = s.getEngine(); if (this.useCameraPostProcesses !== void 0 && (e = this.useCameraPostProcesses), this._waitingRenderList) { this.renderList = []; for (let h = 0; h < this._waitingRenderList.length; h++) { const d = this._waitingRenderList[h], f = s.getMeshById(d); f && this.renderList.push(f); } this._waitingRenderList = void 0; } if (this.renderListPredicate) { this.renderList ? this.renderList.length = 0 : this.renderList = []; const h = this.getScene(); if (!h) return i; const d = h.meshes; for (let f = 0; f < d.length; f++) { const p = d[f]; this.renderListPredicate(p) && this.renderList.push(p); } } const a = n.currentRenderPassId; this.onBeforeBindObservable.notifyObservers(this); const l = (r = this.activeCamera) !== null && r !== void 0 ? r : s.activeCamera, o = s.activeCamera; l && (l !== s.activeCamera && (s.setTransformMatrix(l.getViewMatrix(), l.getProjectionMatrix(!0)), s.activeCamera = l), n.setViewport(l.rigParent ? l.rigParent.viewport : l.viewport, this.getRenderWidth(), this.getRenderHeight())), this._defaultRenderListPrepared = !1; let u = i; if (i) { s.getViewMatrix() || s.updateTransformMatrix(); const h = this.is2DArray ? this.getRenderLayers() : this.isCube ? 6 : 1; for (let d = 0; d < h && u; d++) { let f = null; const p = this.renderList ? this.renderList : s.getActiveMeshes().data, m = this.renderList ? this.renderList.length : s.getActiveMeshes().length; n.currentRenderPassId = this._renderPassIds[d], this.onBeforeRenderObservable.notifyObservers(d), this.getCustomRenderList && (f = this.getCustomRenderList(d, p, m)), f || (f = p), this._doNotChangeAspectRatio || s.updateTransformMatrix(!0); for (let _ = 0; _ < f.length && u; ++_) { const v = f[_]; if (!(!v.isEnabled() || v.isBlocked || !v.isVisible || !v.subMeshes)) { if (this.customIsReadyFunction) { if (!this.customIsReadyFunction(v, this.refreshRate, i)) { u = !1; continue; } } else if (!v.isReady(!0)) { u = !1; continue; } } } this.onAfterRenderObservable.notifyObservers(d), (this.is2DArray || this.isCube) && (s.incrementRenderId(), s.resetCachedMaterial()); } } else if (this.is2DArray && !this.isMulti) for (let h = 0; h < this.getRenderLayers(); h++) this._renderToTarget(0, e, t, h, l), s.incrementRenderId(), s.resetCachedMaterial(); else if (this.isCube && !this.isMulti) for (let h = 0; h < 6; h++) this._renderToTarget(h, e, t, void 0, l), s.incrementRenderId(), s.resetCachedMaterial(); else this._renderToTarget(0, e, t, void 0, l); return this.onAfterUnbindObservable.notifyObservers(this), n.currentRenderPassId = a, o && (s.activeCamera = o, this.activeCamera && this.activeCamera !== s.activeCamera && s.setTransformMatrix(s.activeCamera.getViewMatrix(), s.activeCamera.getProjectionMatrix(!0)), n.setViewport(s.activeCamera.viewport)), s.resetCachedMaterial(), u; } _bestReflectionRenderTargetDimension(e, t) { const r = e * t, s = $e.NearestPOT(r + 128 * 128 / (128 + r)); return Math.min($e.FloorPOT(e), s); } _prepareRenderingManager(e, t, i, r) { const s = this.getScene(); if (!s) return; this._renderingManager.reset(); const n = s.getRenderId(); for (let a = 0; a < t; a++) { const l = e[a]; if (l && !l.isBlocked) { if (this.customIsReadyFunction) { if (!this.customIsReadyFunction(l, this.refreshRate, !1)) { this.resetRefreshCounter(); continue; } } else if (!l.isReady(this.refreshRate === 0)) { this.resetRefreshCounter(); continue; } if (!l._internalAbstractMeshDataInfo._currentLODIsUpToDate && s.activeCamera && (l._internalAbstractMeshDataInfo._currentLOD = s.customLODSelector ? s.customLODSelector(l, this.activeCamera || s.activeCamera) : l.getLOD(this.activeCamera || s.activeCamera), l._internalAbstractMeshDataInfo._currentLODIsUpToDate = !0), !l._internalAbstractMeshDataInfo._currentLOD) continue; let o = l._internalAbstractMeshDataInfo._currentLOD; o._preActivateForIntermediateRendering(n); let u; if (r && i ? u = (l.layerMask & i.layerMask) === 0 : u = !1, l.isEnabled() && l.isVisible && l.subMeshes && !u && (o !== l && o._activate(n, !0), l._activate(n, !0) && l.subMeshes.length)) { l.isAnInstance ? l._internalAbstractMeshDataInfo._actAsRegularMesh && (o = l) : o._internalAbstractMeshDataInfo._onlyForInstancesIntermediate = !1, o._internalAbstractMeshDataInfo._isActiveIntermediate = !0; for (let h = 0; h < o.subMeshes.length; h++) { const d = o.subMeshes[h]; this._renderingManager.dispatch(d, o); } } } } for (let a = 0; a < s.particleSystems.length; a++) { const l = s.particleSystems[a], o = l.emitter; !l.isStarted() || !o || o.position && !o.isEnabled() || this._renderingManager.dispatchParticles(l); } } /** * @internal * @param faceIndex face index to bind to if this is a cubetexture * @param layer defines the index of the texture to bind in the array */ _bindFrameBuffer(e = 0, t = 0) { const i = this.getScene(); if (!i) return; const r = i.getEngine(); this._renderTarget && r.bindFramebuffer(this._renderTarget, this.isCube ? e : void 0, void 0, void 0, this.ignoreCameraViewport, 0, t); } _unbindFrameBuffer(e, t) { this._renderTarget && e.unBindFramebuffer(this._renderTarget, this.isCube, () => { this.onAfterRenderObservable.notifyObservers(t); }); } /** * @internal */ _prepareFrame(e, t, i, r) { this._postProcessManager ? this._prePassEnabled || this._postProcessManager._prepareFrame(this._texture, this._postProcesses) : (!r || !e.postProcessManager._prepareFrame(this._texture)) && this._bindFrameBuffer(t, i); } _renderToTarget(e, t, i, r = 0, s = null) { var n, a, l, o, u, h; const d = this.getScene(); if (!d) return; const f = d.getEngine(); if ((n = f._debugPushGroup) === null || n === void 0 || n.call(f, `render to face #${e} layer #${r}`, 1), this._prepareFrame(d, e, r, t), this.is2DArray ? (f.currentRenderPassId = this._renderPassIds[r], this.onBeforeRenderObservable.notifyObservers(r)) : (f.currentRenderPassId = this._renderPassIds[e], this.onBeforeRenderObservable.notifyObservers(e)), f.snapshotRendering && f.snapshotRenderingMode === 1) this.onClearObservable.hasObservers() ? this.onClearObservable.notifyObservers(f) : this.skipInitialClear || f.clear(this.clearColor || d.clearColor, !0, !0, !0); else { let m = null; const _ = this.renderList ? this.renderList : d.getActiveMeshes().data, v = this.renderList ? this.renderList.length : d.getActiveMeshes().length; this.getCustomRenderList && (m = this.getCustomRenderList(this.is2DArray ? r : e, _, v)), m ? this._prepareRenderingManager(m, m.length, s, this.forceLayerMaskCheck) : (this._defaultRenderListPrepared || (this._prepareRenderingManager(_, v, s, !this.renderList || this.forceLayerMaskCheck), this._defaultRenderListPrepared = !0), m = _); for (const x of d._beforeRenderTargetClearStage) x.action(this, e, r); this.onClearObservable.hasObservers() ? this.onClearObservable.notifyObservers(f) : this.skipInitialClear || f.clear(this.clearColor || d.clearColor, !0, !0, !0), this._doNotChangeAspectRatio || d.updateTransformMatrix(!0); for (const x of d._beforeRenderTargetDrawStage) x.action(this, e, r); this._renderingManager.render(this.customRenderFunction, m, this.renderParticles, this.renderSprites); for (const x of d._afterRenderTargetDrawStage) x.action(this, e, r); const C = (l = (a = this._texture) === null || a === void 0 ? void 0 : a.generateMipMaps) !== null && l !== void 0 ? l : !1; this._texture && (this._texture.generateMipMaps = !1), this._postProcessManager ? this._postProcessManager._finalizeFrame(!1, (o = this._renderTarget) !== null && o !== void 0 ? o : void 0, e, this._postProcesses, this.ignoreCameraViewport) : t && d.postProcessManager._finalizeFrame(!1, (u = this._renderTarget) !== null && u !== void 0 ? u : void 0, e); for (const x of d._afterRenderTargetPostProcessStage) x.action(this, e, r); this._texture && (this._texture.generateMipMaps = C), this._doNotChangeAspectRatio || d.updateTransformMatrix(!0), i && qh.DumpFramebuffer(this.getRenderWidth(), this.getRenderHeight(), f); } this._unbindFrameBuffer(f, e), this._texture && this.isCube && e === 5 && f.generateMipMapsForCubemap(this._texture), (h = f._debugPopGroup) === null || h === void 0 || h.call(f, 1); } /** * Overrides the default sort function applied in the rendering group to prepare the meshes. * This allowed control for front to back rendering or reversely depending of the special needs. * * @param renderingGroupId The rendering group id corresponding to its index * @param opaqueSortCompareFn The opaque queue comparison function use to sort. * @param alphaTestSortCompareFn The alpha test queue comparison function use to sort. * @param transparentSortCompareFn The transparent queue comparison function use to sort. */ setRenderingOrder(e, t = null, i = null, r = null) { this._renderingManager.setRenderingOrder(e, t, i, r); } /** * Specifies whether or not the stencil and depth buffer are cleared between two rendering groups. * * @param renderingGroupId The rendering group id corresponding to its index * @param autoClearDepthStencil Automatically clears depth and stencil between groups if true. */ setRenderingAutoClearDepthStencil(e, t) { this._renderingManager.setRenderingAutoClearDepthStencil(e, t), this._renderingManager._useSceneAutoClearSetup = !1; } /** * Clones the texture. * @returns the cloned texture */ clone() { const e = this.getSize(), t = new ra(this.name, e, this.getScene(), this._renderTargetOptions.generateMipMaps, this._doNotChangeAspectRatio, this._renderTargetOptions.type, this.isCube, this._renderTargetOptions.samplingMode, this._renderTargetOptions.generateDepthBuffer, this._renderTargetOptions.generateStencilBuffer, void 0, this._renderTargetOptions.format, void 0, this._renderTargetOptions.samples); return t.hasAlpha = this.hasAlpha, t.level = this.level, t.coordinatesMode = this.coordinatesMode, this.renderList && (t.renderList = this.renderList.slice(0)), t; } /** * Serialize the texture to a JSON representation we can easily use in the respective Parse function. * @returns The JSON representation of the texture */ serialize() { if (!this.name) return null; const e = super.serialize(); if (e.renderTargetSize = this.getRenderSize(), e.renderList = [], this.renderList) for (let t = 0; t < this.renderList.length; t++) e.renderList.push(this.renderList[t].id); return e; } /** * This will remove the attached framebuffer objects. The texture will not be able to be used as render target anymore */ disposeFramebufferObjects() { var e; (e = this._renderTarget) === null || e === void 0 || e.dispose(!0); } /** * Release and destroy the underlying lower level texture aka internalTexture. */ releaseInternalTexture() { var e; (e = this._renderTarget) === null || e === void 0 || e.releaseTextures(), this._texture = null; } /** * Dispose the texture and release its associated resources. */ dispose() { var e; this.onResizeObservable.clear(), this.onClearObservable.clear(), this.onAfterRenderObservable.clear(), this.onAfterUnbindObservable.clear(), this.onBeforeBindObservable.clear(), this.onBeforeRenderObservable.clear(), this._postProcessManager && (this._postProcessManager.dispose(), this._postProcessManager = null), this._prePassRenderTarget && this._prePassRenderTarget.dispose(), this._releaseRenderPassId(), this.clearPostProcesses(!0), this._resizeObserver && (this.getScene().getEngine().onResizeObservable.remove(this._resizeObserver), this._resizeObserver = null), this.renderList = null; const t = this.getScene(); if (!t) return; let i = t.customRenderTargets.indexOf(this); i >= 0 && t.customRenderTargets.splice(i, 1); for (const r of t.cameras) i = r.customRenderTargets.indexOf(this), i >= 0 && r.customRenderTargets.splice(i, 1); (e = this._renderTarget) === null || e === void 0 || e.dispose(), this._renderTarget = null, this._texture = null, super.dispose(); } /** @internal */ _rebuild() { this.refreshRate === ra.REFRESHRATE_RENDER_ONCE && (this.refreshRate = ra.REFRESHRATE_RENDER_ONCE), this._postProcessManager && this._postProcessManager._rebuild(); } /** * Clear the info related to rendering groups preventing retention point in material dispose. */ freeRenderingGroups() { this._renderingManager && this._renderingManager.freeRenderingGroups(); } /** * Gets the number of views the corresponding to the texture (eg. a MultiviewRenderTarget will have > 1) * @returns the view count */ getViewCount() { return 1; } } ra.REFRESHRATE_RENDER_ONCE = 0; ra.REFRESHRATE_RENDER_ONEVERYFRAME = 1; ra.REFRESHRATE_RENDER_ONEVERYTWOFRAMES = 2; De._CreateRenderTargetTexture = (c, e, t, i, r) => new ra(c, e, t, i); class Gte { /** * Creates a new instance of the component for the given scene * @param scene Defines the scene to register the component in */ constructor(e) { this.name = Bt.NAME_PROCEDURALTEXTURE, this.scene = e, this.scene.proceduralTextures = []; } /** * Registers the component in a given scene */ register() { this.scene._beforeClearStage.registerStep(Bt.STEP_BEFORECLEAR_PROCEDURALTEXTURE, this, this._beforeClear); } /** * Rebuilds the elements related to this component in case of * context lost for instance. */ rebuild() { } /** * Disposes the component and the associated resources. */ dispose() { } _beforeClear() { if (this.scene.proceduralTexturesEnabled) { Ve.StartPerformanceCounter("Procedural textures", this.scene.proceduralTextures.length > 0); for (let e = 0; e < this.scene.proceduralTextures.length; e++) { const t = this.scene.proceduralTextures[e]; t._shouldRender() && t.render(); } Ve.EndPerformanceCounter("Procedural textures", this.scene.proceduralTextures.length > 0); } } } const zce = "proceduralVertexShader", Hce = `attribute vec2 position;varying vec2 vPosition;varying vec2 vUV;const vec2 madd=vec2(0.5,0.5); #define CUSTOM_VERTEX_DEFINITIONS void main(void) { #define CUSTOM_VERTEX_MAIN_BEGIN vPosition=position;vUV=position*madd+madd;gl_Position=vec4(position,0.0,1.0); #define CUSTOM_VERTEX_MAIN_END }`; je.ShadersStore[zce] = Hce; class z4 extends De { /** * Instantiates a new procedural texture. * Procedural texturing is a way to programmatically create a texture. There are 2 types of procedural textures: code-only, and code that references some classic 2D images, sometimes called 'refMaps' or 'sampler' images. * This is the base class of any Procedural texture and contains most of the shareable code. * @see https://doc.babylonjs.com/features/featuresDeepDive/materials/using/proceduralTextures * @param name Define the name of the texture * @param size Define the size of the texture to create * @param fragment Define the fragment shader to use to generate the texture or null if it is defined later: * * object: \{ fragmentElement: "fragmentShaderCode" \}, used with shader code in script tags * * object: \{ fragmentSource: "fragment shader code string" \}, the string contains the shader code * * string: the string contains a name "XXX" to lookup in Effect.ShadersStore["XXXFragmentShader"] * @param scene Define the scene the texture belongs to * @param fallbackTexture Define a fallback texture in case there were issues to create the custom texture * @param generateMipMaps Define if the texture should creates mip maps or not * @param isCube Define if the texture is a cube texture or not (this will render each faces of the cube) * @param textureType The FBO internal texture type */ constructor(e, t, i, r, s = null, n = !0, a = !1, l = 0) { super(null, r, !n), this.isEnabled = !0, this.autoClear = !0, this.onGeneratedObservable = new Fe(), this.onBeforeGenerationObservable = new Fe(), this.nodeMaterialSource = null, this._textures = {}, this._currentRefreshId = -1, this._frameId = -1, this._refreshRate = 1, this._vertexBuffers = {}, this._uniforms = new Array(), this._samplers = new Array(), this._floats = {}, this._ints = {}, this._floatsArrays = {}, this._colors3 = {}, this._colors4 = {}, this._vectors2 = {}, this._vectors3 = {}, this._matrices = {}, this._fallbackTextureUsed = !1, this._cachedDefines = null, this._contentUpdateId = -1, this._rtWrapper = null, r = this.getScene() || gi.LastCreatedScene; let o = r._getComponent(Bt.NAME_PROCEDURALTEXTURE); o || (o = new Gte(r), r._addComponent(o)), r.proceduralTextures.push(this), this._fullEngine = r.getEngine(), this.name = e, this.isRenderTarget = !0, this._size = t, this._textureType = l, this._generateMipMaps = n, this._drawWrapper = new $o(this._fullEngine), this.setFragment(i), this._fallbackTexture = s; const u = this._createRtWrapper(a, t, n, l); this._texture = u.texture; const h = []; h.push(1, 1), h.push(-1, 1), h.push(-1, -1), h.push(1, -1), this._vertexBuffers[Y.PositionKind] = new Y(this._fullEngine, h, Y.PositionKind, !1, !1, 2), this._createIndexBuffer(); } _createRtWrapper(e, t, i, r) { return e ? (this._rtWrapper = this._fullEngine.createRenderTargetCubeTexture(t, { generateMipMaps: i, generateDepthBuffer: !1, generateStencilBuffer: !1, type: r }), this.setFloat("face", 0)) : this._rtWrapper = this._fullEngine.createRenderTargetTexture(t, { generateMipMaps: i, generateDepthBuffer: !1, generateStencilBuffer: !1, type: r }), this._rtWrapper; } /** * The effect that is created when initializing the post process. * @returns The created effect corresponding the postprocess. */ getEffect() { return this._drawWrapper.effect; } /** * @internal* */ _setEffect(e) { this._drawWrapper.effect = e; } /** * Gets texture content (Use this function wisely as reading from a texture can be slow) * @returns an ArrayBufferView promise (Uint8Array or Float32Array) */ getContent() { return this._contentData && this._frameId === this._contentUpdateId ? this._contentData : (this._contentData ? this._contentData.then((e) => { this._contentData = this.readPixels(0, 0, e), this._contentUpdateId = this._frameId; }) : (this._contentData = this.readPixels(0, 0), this._contentUpdateId = this._frameId), this._contentData); } _createIndexBuffer() { const e = this._fullEngine, t = []; t.push(0), t.push(1), t.push(2), t.push(0), t.push(2), t.push(3), this._indexBuffer = e.createIndexBuffer(t); } /** @internal */ _rebuild() { const e = this._vertexBuffers[Y.PositionKind]; e && e._rebuild(), this._createIndexBuffer(), this.refreshRate === ra.REFRESHRATE_RENDER_ONCE && (this.refreshRate = ra.REFRESHRATE_RENDER_ONCE); } /** * Resets the texture in order to recreate its associated resources. * This can be called in case of context loss or if you change the shader code and need to regenerate the texture with the new code */ reset() { var e; (e = this._drawWrapper.effect) === null || e === void 0 || e.dispose(), this._drawWrapper.effect = null, this._cachedDefines = null; } _getDefines() { return ""; } /** * Executes a function when the texture will be ready to be drawn. * @param func The callback to be used. */ executeWhenReady(e) { if (this.isReady()) { e(this); return; } const t = this.getEffect(); t && t.executeWhenCompiled(() => { e(this); }); } /** * Is the texture ready to be used ? (rendered at least once) * @returns true if ready, otherwise, false. */ isReady() { const e = this._fullEngine; if (this.nodeMaterialSource) return this._drawWrapper.effect.isReady(); if (!this._fragment) return !1; if (this._fallbackTextureUsed) return !0; if (!this._texture) return !1; const t = this._getDefines(); if (this._drawWrapper.effect && t === this._cachedDefines && this._drawWrapper.effect.isReady()) return !0; const i = { vertex: "procedural", fragmentElement: this._fragment.fragmentElement, fragmentSource: this._fragment.fragmentSource, fragment: typeof this._fragment == "string" ? this._fragment : void 0 }; return this._cachedDefines !== t && (this._cachedDefines = t, this._drawWrapper.effect = e.createEffect(i, [Y.PositionKind], this._uniforms, this._samplers, t, void 0, void 0, () => { var r; (r = this._rtWrapper) === null || r === void 0 || r.dispose(), this._rtWrapper = this._texture = null, this._fallbackTexture && (this._texture = this._fallbackTexture._texture, this._texture && this._texture.incrementReferences()), this._fallbackTextureUsed = !0; })), this._drawWrapper.effect.isReady(); } /** * Resets the refresh counter of the texture and start bak from scratch. * Could be useful to regenerate the texture if it is setup to render only once. */ resetRefreshCounter() { this._currentRefreshId = -1; } /** * Set the fragment shader to use in order to render the texture. * @param fragment This can be set to a path (into the shader store) or to a json object containing a fragmentElement property. */ setFragment(e) { this._fragment = e; } /** * Define the refresh rate of the texture or the rendering frequency. * Use 0 to render just once, 1 to render on every frame, 2 to render every two frames and so on... */ get refreshRate() { return this._refreshRate; } set refreshRate(e) { this._refreshRate = e, this.resetRefreshCounter(); } /** @internal */ _shouldRender() { return !this.isEnabled || !this.isReady() || !this._texture ? (this._texture && (this._texture.isReady = !1), !1) : this._fallbackTextureUsed ? !1 : this._currentRefreshId === -1 ? (this._currentRefreshId = 1, this._frameId++, !0) : this.refreshRate === this._currentRefreshId ? (this._currentRefreshId = 1, this._frameId++, !0) : (this._currentRefreshId++, !1); } /** * Get the size the texture is rendering at. * @returns the size (on cube texture it is always squared) */ getRenderSize() { return this._size; } /** * Resize the texture to new value. * @param size Define the new size the texture should have * @param generateMipMaps Define whether the new texture should create mip maps */ resize(e, t) { if (this._fallbackTextureUsed || !this._rtWrapper || !this._texture) return; const i = this._texture.isCube; this._rtWrapper.dispose(); const r = this._createRtWrapper(i, e, t, this._textureType); this._texture = r.texture, this._size = e, this._generateMipMaps = t; } _checkUniform(e) { this._uniforms.indexOf(e) === -1 && this._uniforms.push(e); } /** * Set a texture in the shader program used to render. * @param name Define the name of the uniform samplers as defined in the shader * @param texture Define the texture to bind to this sampler * @returns the texture itself allowing "fluent" like uniform updates */ setTexture(e, t) { return this._samplers.indexOf(e) === -1 && this._samplers.push(e), this._textures[e] = t, this; } /** * Set a float in the shader. * @param name Define the name of the uniform as defined in the shader * @param value Define the value to give to the uniform * @returns the texture itself allowing "fluent" like uniform updates */ setFloat(e, t) { return this._checkUniform(e), this._floats[e] = t, this; } /** * Set a int in the shader. * @param name Define the name of the uniform as defined in the shader * @param value Define the value to give to the uniform * @returns the texture itself allowing "fluent" like uniform updates */ setInt(e, t) { return this._checkUniform(e), this._ints[e] = t, this; } /** * Set an array of floats in the shader. * @param name Define the name of the uniform as defined in the shader * @param value Define the value to give to the uniform * @returns the texture itself allowing "fluent" like uniform updates */ setFloats(e, t) { return this._checkUniform(e), this._floatsArrays[e] = t, this; } /** * Set a vec3 in the shader from a Color3. * @param name Define the name of the uniform as defined in the shader * @param value Define the value to give to the uniform * @returns the texture itself allowing "fluent" like uniform updates */ setColor3(e, t) { return this._checkUniform(e), this._colors3[e] = t, this; } /** * Set a vec4 in the shader from a Color4. * @param name Define the name of the uniform as defined in the shader * @param value Define the value to give to the uniform * @returns the texture itself allowing "fluent" like uniform updates */ setColor4(e, t) { return this._checkUniform(e), this._colors4[e] = t, this; } /** * Set a vec2 in the shader from a Vector2. * @param name Define the name of the uniform as defined in the shader * @param value Define the value to give to the uniform * @returns the texture itself allowing "fluent" like uniform updates */ setVector2(e, t) { return this._checkUniform(e), this._vectors2[e] = t, this; } /** * Set a vec3 in the shader from a Vector3. * @param name Define the name of the uniform as defined in the shader * @param value Define the value to give to the uniform * @returns the texture itself allowing "fluent" like uniform updates */ setVector3(e, t) { return this._checkUniform(e), this._vectors3[e] = t, this; } /** * Set a mat4 in the shader from a MAtrix. * @param name Define the name of the uniform as defined in the shader * @param value Define the value to give to the uniform * @returns the texture itself allowing "fluent" like uniform updates */ setMatrix(e, t) { return this._checkUniform(e), this._matrices[e] = t, this; } /** * Render the texture to its associated render target. * @param useCameraPostProcess Define if camera post process should be applied to the texture */ // eslint-disable-next-line @typescript-eslint/no-unused-vars render(e) { var t, i; const r = this.getScene(); if (!r) return; const s = this._fullEngine; if (s.enableEffect(this._drawWrapper), this.onBeforeGenerationObservable.notifyObservers(this), s.setState(!1), !this.nodeMaterialSource) { for (const a in this._textures) this._drawWrapper.effect.setTexture(a, this._textures[a]); for (const a in this._ints) this._drawWrapper.effect.setInt(a, this._ints[a]); for (const a in this._floats) this._drawWrapper.effect.setFloat(a, this._floats[a]); for (const a in this._floatsArrays) this._drawWrapper.effect.setArray(a, this._floatsArrays[a]); for (const a in this._colors3) this._drawWrapper.effect.setColor3(a, this._colors3[a]); for (const a in this._colors4) { const l = this._colors4[a]; this._drawWrapper.effect.setFloat4(a, l.r, l.g, l.b, l.a); } for (const a in this._vectors2) this._drawWrapper.effect.setVector2(a, this._vectors2[a]); for (const a in this._vectors3) this._drawWrapper.effect.setVector3(a, this._vectors3[a]); for (const a in this._matrices) this._drawWrapper.effect.setMatrix(a, this._matrices[a]); } if (!this._texture || !this._rtWrapper) return; (t = s._debugPushGroup) === null || t === void 0 || t.call(s, `procedural texture generation for ${this.name}`, 1); const n = s.currentViewport; if (this.isCube) for (let a = 0; a < 6; a++) s.bindFramebuffer(this._rtWrapper, a, void 0, void 0, !0), s.bindBuffers(this._vertexBuffers, this._indexBuffer, this._drawWrapper.effect), this._drawWrapper.effect.setFloat("face", a), this.autoClear && s.clear(r.clearColor, !0, !1, !1), s.drawElementsType(At.TriangleFillMode, 0, 6); else s.bindFramebuffer(this._rtWrapper, 0, void 0, void 0, !0), s.bindBuffers(this._vertexBuffers, this._indexBuffer, this._drawWrapper.effect), this.autoClear && s.clear(r.clearColor, !0, !1, !1), s.drawElementsType(At.TriangleFillMode, 0, 6); s.unBindFramebuffer(this._rtWrapper, this.isCube), n && s.setViewport(n), this.isCube && s.generateMipMapsForCubemap(this._texture), (i = s._debugPopGroup) === null || i === void 0 || i.call(s, 1), this.onGenerated && this.onGenerated(), this.onGeneratedObservable.notifyObservers(this); } /** * Clone the texture. * @returns the cloned texture */ clone() { const e = this.getSize(), t = new z4(this.name, e.width, this._fragment, this.getScene(), this._fallbackTexture, this._generateMipMaps); return t.hasAlpha = this.hasAlpha, t.level = this.level, t.coordinatesMode = this.coordinatesMode, t; } /** * Dispose the texture and release its associated resources. */ dispose() { const e = this.getScene(); if (!e) return; const t = e.proceduralTextures.indexOf(this); t >= 0 && e.proceduralTextures.splice(t, 1); const i = this._vertexBuffers[Y.PositionKind]; i && (i.dispose(), this._vertexBuffers[Y.PositionKind] = null), this._indexBuffer && this._fullEngine._releaseBuffer(this._indexBuffer) && (this._indexBuffer = null), this.onGeneratedObservable.clear(), this.onBeforeGenerationObservable.clear(), super.dispose(); } } F([ W() ], z4.prototype, "isEnabled", void 0); F([ W() ], z4.prototype, "autoClear", void 0); F([ W() ], z4.prototype, "_generateMipMaps", void 0); F([ W() ], z4.prototype, "_size", void 0); F([ W() ], z4.prototype, "refreshRate", null); Be("BABYLON.ProceduralTexture", z4); var nu; (function(c) { c[c.Cos = 0] = "Cos", c[c.Sin = 1] = "Sin", c[c.Abs = 2] = "Abs", c[c.Exp = 3] = "Exp", c[c.Exp2 = 4] = "Exp2", c[c.Round = 5] = "Round", c[c.Floor = 6] = "Floor", c[c.Ceiling = 7] = "Ceiling", c[c.Sqrt = 8] = "Sqrt", c[c.Log = 9] = "Log", c[c.Tan = 10] = "Tan", c[c.ArcTan = 11] = "ArcTan", c[c.ArcCos = 12] = "ArcCos", c[c.ArcSin = 13] = "ArcSin", c[c.Fract = 14] = "Fract", c[c.Sign = 15] = "Sign", c[c.Radians = 16] = "Radians", c[c.Degrees = 17] = "Degrees"; })(nu || (nu = {})); class SK extends Wi { /** * Creates a new TrigonometryBlock * @param name defines the block name */ constructor(e) { super(e, Le.Neutral), this.operation = nu.Cos, this.registerInput("input", ue.AutoDetect), this.registerOutput("output", ue.BasedOnInput), this._outputs[0]._typeConnectionSource = this._inputs[0]; } /** * Gets the current class name * @returns the class name */ getClassName() { return "TrigonometryBlock"; } /** * Gets the input component */ get input() { return this._inputs[0]; } /** * Gets the output component */ get output() { return this._outputs[0]; } _buildBlock(e) { super._buildBlock(e); const t = this._outputs[0]; let i = ""; switch (this.operation) { case nu.Cos: { i = "cos"; break; } case nu.Sin: { i = "sin"; break; } case nu.Abs: { i = "abs"; break; } case nu.Exp: { i = "exp"; break; } case nu.Exp2: { i = "exp2"; break; } case nu.Round: { i = "round"; break; } case nu.Floor: { i = "floor"; break; } case nu.Ceiling: { i = "ceil"; break; } case nu.Sqrt: { i = "sqrt"; break; } case nu.Log: { i = "log"; break; } case nu.Tan: { i = "tan"; break; } case nu.ArcTan: { i = "atan"; break; } case nu.ArcCos: { i = "acos"; break; } case nu.ArcSin: { i = "asin"; break; } case nu.Fract: { i = "fract"; break; } case nu.Sign: { i = "sign"; break; } case nu.Radians: { i = "radians"; break; } case nu.Degrees: { i = "degrees"; break; } } return e.compilationString += this._declareOutput(t, e) + ` = ${i}(${this.input.associatedVariableName}); `, this; } serialize() { const e = super.serialize(); return e.operation = this.operation, e; } _deserialize(e, t, i) { super._deserialize(e, t, i), this.operation = e.operation; } _dumpPropertiesCode() { return super._dumpPropertiesCode() + `${this._codeVariableName}.operation = BABYLON.TrigonometryBlockOperations.${nu[this.operation]}; `; } } Be("BABYLON.TrigonometryBlock", SK); const Uk = { effect: null, subMesh: null }; class D9 extends sa { constructor() { super(), this.NORMAL = !1, this.TANGENT = !1, this.VERTEXCOLOR_NME = !1, this.UV1 = !1, this.UV2 = !1, this.UV3 = !1, this.UV4 = !1, this.UV5 = !1, this.UV6 = !1, this.PREPASS = !1, this.PREPASS_NORMAL = !1, this.PREPASS_NORMAL_INDEX = -1, this.PREPASS_POSITION = !1, this.PREPASS_POSITION_INDEX = -1, this.PREPASS_DEPTH = !1, this.PREPASS_DEPTH_INDEX = -1, this.SCENE_MRT_COUNT = 0, this.NUM_BONE_INFLUENCERS = 0, this.BonesPerMesh = 0, this.BONETEXTURE = !1, this.MORPHTARGETS = !1, this.MORPHTARGETS_NORMAL = !1, this.MORPHTARGETS_TANGENT = !1, this.MORPHTARGETS_UV = !1, this.NUM_MORPH_INFLUENCERS = 0, this.MORPHTARGETS_TEXTURE = !1, this.IMAGEPROCESSING = !1, this.VIGNETTE = !1, this.VIGNETTEBLENDMODEMULTIPLY = !1, this.VIGNETTEBLENDMODEOPAQUE = !1, this.TONEMAPPING = !1, this.TONEMAPPING_ACES = !1, this.CONTRAST = !1, this.EXPOSURE = !1, this.COLORCURVES = !1, this.COLORGRADING = !1, this.COLORGRADING3D = !1, this.SAMPLER3DGREENDEPTH = !1, this.SAMPLER3DBGRMAP = !1, this.DITHER = !1, this.IMAGEPROCESSINGPOSTPROCESS = !1, this.SKIPFINALCOLORCLAMP = !1, this.BUMPDIRECTUV = 0, this.CAMERA_ORTHOGRAPHIC = !1, this.CAMERA_PERSPECTIVE = !1, this.rebuild(); } setValue(e, t, i = !1) { this[e] === void 0 && this._keys.push(e), i && this[e] !== t && this.markAsUnprocessed(), this[e] = t; } } class Ta extends fl { /** * Checks if a block is a texture block * @param block The block to check * @returns True if the block is a texture block */ static _BlockIsTextureBlock(e) { return e.getClassName() === "TextureBlock" || e.getClassName() === "ReflectionTextureBaseBlock" || e.getClassName() === "RefractionBlock" || e.getClassName() === "CurrentScreenBlock" || e.getClassName() === "ParticleTextureBlock" || e.getClassName() === "ImageSourceBlock" || e.getClassName() === "TriPlanarBlock" || e.getClassName() === "BiPlanarBlock" || e.getClassName() === "PrePassTextureBlock"; } /** Get the inspector from bundle or global */ _getGlobalNodeMaterialEditor() { if (typeof NODEEDITOR < "u") return NODEEDITOR; if (typeof BABYLON < "u" && typeof BABYLON.NodeEditor < "u") return BABYLON; } /** Gets or sets options to control the node material overall behavior */ get options() { return this._options; } set options(e) { this._options = e; } /** * Gets the image processing configuration used either in this material. */ get imageProcessingConfiguration() { return this._imageProcessingConfiguration; } /** * Sets the Default image processing configuration used either in the this material. * * If sets to null, the scene one is in use. */ set imageProcessingConfiguration(e) { this._attachImageProcessingConfiguration(e), this._markAllSubMeshesAsTexturesDirty(); } /** * Gets or sets the mode property */ get mode() { return this._mode; } set mode(e) { this._mode = e; } /** Gets or sets the unique identifier used to identified the effect associated with the material */ get buildId() { return this._buildId; } set buildId(e) { this._buildId = e; } /** * Create a new node based material * @param name defines the material name * @param scene defines the hosting scene * @param options defines creation option */ constructor(e, t, i = {}) { super(e, t || gi.LastCreatedScene), this._buildId = Ta._BuildIdGenerator++, this._buildWasSuccessful = !1, this._cachedWorldViewMatrix = new Ae(), this._cachedWorldViewProjectionMatrix = new Ae(), this._optimizers = new Array(), this._animationFrame = -1, this.BJSNODEMATERIALEDITOR = this._getGlobalNodeMaterialEditor(), this.editorData = null, this.ignoreAlpha = !1, this.maxSimultaneousLights = 4, this.onBuildObservable = new Fe(), this._vertexOutputNodes = new Array(), this._fragmentOutputNodes = new Array(), this.attachedBlocks = [], this._mode = Ip.Material, this.forceAlphaBlending = !1, this._options = Object.assign({ emitComments: !1 }, i), this._attachImageProcessingConfiguration(null); } /** * Gets the current class name of the material e.g. "NodeMaterial" * @returns the class name */ getClassName() { return "NodeMaterial"; } /** * Attaches a new image processing configuration to the Standard Material. * @param configuration */ _attachImageProcessingConfiguration(e) { e !== this._imageProcessingConfiguration && (this._imageProcessingConfiguration && this._imageProcessingObserver && this._imageProcessingConfiguration.onUpdateParameters.remove(this._imageProcessingObserver), e ? this._imageProcessingConfiguration = e : this._imageProcessingConfiguration = this.getScene().imageProcessingConfiguration, this._imageProcessingConfiguration && (this._imageProcessingObserver = this._imageProcessingConfiguration.onUpdateParameters.add(() => { this._markAllSubMeshesAsImageProcessingDirty(); }))); } /** * Get a block by its name * @param name defines the name of the block to retrieve * @returns the required block or null if not found */ getBlockByName(e) { let t = null; for (const i of this.attachedBlocks) if (i.name === e) if (!t) t = i; else return Ve.Warn("More than one block was found with the name `" + e + "`"), t; return t; } /** * Get a block by its name * @param predicate defines the predicate used to find the good candidate * @returns the required block or null if not found */ getBlockByPredicate(e) { for (const t of this.attachedBlocks) if (e(t)) return t; return null; } /** * Get an input block by its name * @param predicate defines the predicate used to find the good candidate * @returns the required input block or null if not found */ getInputBlockByPredicate(e) { for (const t of this.attachedBlocks) if (t.isInput && e(t)) return t; return null; } /** * Gets the list of input blocks attached to this material * @returns an array of InputBlocks */ getInputBlocks() { const e = []; for (const t of this.attachedBlocks) t.isInput && e.push(t); return e; } /** * Adds a new optimizer to the list of optimizers * @param optimizer defines the optimizers to add * @returns the current material */ registerOptimizer(e) { if (!(this._optimizers.indexOf(e) > -1)) return this._optimizers.push(e), this; } /** * Remove an optimizer from the list of optimizers * @param optimizer defines the optimizers to remove * @returns the current material */ unregisterOptimizer(e) { const t = this._optimizers.indexOf(e); if (t !== -1) return this._optimizers.splice(t, 1), this; } /** * Add a new block to the list of output nodes * @param node defines the node to add * @returns the current material */ addOutputNode(e) { if (e.target === null) throw "This node is not meant to be an output node. You may want to explicitly set its target value."; return e.target & Le.Vertex && this._addVertexOutputNode(e), e.target & Le.Fragment && this._addFragmentOutputNode(e), this; } /** * Remove a block from the list of root nodes * @param node defines the node to remove * @returns the current material */ removeOutputNode(e) { return e.target === null ? this : (e.target & Le.Vertex && this._removeVertexOutputNode(e), e.target & Le.Fragment && this._removeFragmentOutputNode(e), this); } _addVertexOutputNode(e) { if (this._vertexOutputNodes.indexOf(e) === -1) return e.target = Le.Vertex, this._vertexOutputNodes.push(e), this; } _removeVertexOutputNode(e) { const t = this._vertexOutputNodes.indexOf(e); if (t !== -1) return this._vertexOutputNodes.splice(t, 1), this; } _addFragmentOutputNode(e) { if (this._fragmentOutputNodes.indexOf(e) === -1) return e.target = Le.Fragment, this._fragmentOutputNodes.push(e), this; } _removeFragmentOutputNode(e) { const t = this._fragmentOutputNodes.indexOf(e); if (t !== -1) return this._fragmentOutputNodes.splice(t, 1), this; } /** * Specifies if the material will require alpha blending * @returns a boolean specifying if alpha blending is needed */ needAlphaBlending() { return this.ignoreAlpha ? !1 : this.forceAlphaBlending || this.alpha < 1 || this._sharedData && this._sharedData.hints.needAlphaBlending; } /** * Specifies if this material should be rendered in alpha test mode * @returns a boolean specifying if an alpha test is needed. */ needAlphaTesting() { return this._sharedData && this._sharedData.hints.needAlphaTesting; } _processInitializeOnLink(e, t, i, r = !0) { (e.target === Le.VertexAndFragment || t.target === Le.Fragment && e.target === Le.Vertex && e._preparationId !== this._buildId) && i.push(e), this._initializeBlock(e, t, i, r); } _initializeBlock(e, t, i, r = !0) { if (e.initialize(t), r && e.autoConfigure(this), e._preparationId = this._buildId, this.attachedBlocks.indexOf(e) === -1) { if (e.isUnique) { const s = e.getClassName(); for (const n of this.attachedBlocks) if (n.getClassName() === s) throw `Cannot have multiple blocks of type ${s} in the same NodeMaterial`; } this.attachedBlocks.push(e); } for (const s of e.inputs) { s.associatedVariableName = ""; const n = s.connectedPoint; if (n) { const a = n.ownerBlock; a !== e && this._processInitializeOnLink(a, t, i, r); } } if (e.isTeleportOut) { const s = e; s.entryPoint && this._processInitializeOnLink(s.entryPoint, t, i, r); } for (const s of e.outputs) s.associatedVariableName = ""; } _resetDualBlocks(e, t) { e.target === Le.VertexAndFragment && (e.buildId = t); for (const i of e.inputs) { const r = i.connectedPoint; if (r) { const s = r.ownerBlock; s !== e && this._resetDualBlocks(s, t); } } if (e.isTeleportOut) { const i = e; i.entryPoint && this._resetDualBlocks(i.entryPoint, t); } } /** * Remove a block from the current node material * @param block defines the block to remove */ removeBlock(e) { const t = this.attachedBlocks.indexOf(e); t > -1 && this.attachedBlocks.splice(t, 1), e.isFinalMerger && this.removeOutputNode(e); } /** * Build the material and generates the inner effect * @param verbose defines if the build should log activity * @param updateBuildId defines if the internal build Id should be updated (default is true) * @param autoConfigure defines if the autoConfigure method should be called when initializing blocks (default is false) */ build(e = !1, t = !0, i = !1) { !this._vertexCompilationState && !i && (i = !0), this._buildWasSuccessful = !1; const r = this.getScene().getEngine(), s = this._mode === Ip.Particle; if (this._vertexOutputNodes.length === 0 && !s) throw "You must define at least one vertexOutputNode"; if (this._fragmentOutputNodes.length === 0) throw "You must define at least one fragmentOutputNode"; this._vertexCompilationState = new PZ(), this._vertexCompilationState.supportUniformBuffers = r.supportsUniformBuffers, this._vertexCompilationState.target = Le.Vertex, this._fragmentCompilationState = new PZ(), this._fragmentCompilationState.supportUniformBuffers = r.supportsUniformBuffers, this._fragmentCompilationState.target = Le.Fragment, this._sharedData = new Nce(), this._sharedData.nodeMaterial = this, this._sharedData.fragmentOutputNodes = this._fragmentOutputNodes, this._vertexCompilationState.sharedData = this._sharedData, this._fragmentCompilationState.sharedData = this._sharedData, this._sharedData.buildId = this._buildId, this._sharedData.emitComments = this._options.emitComments, this._sharedData.verbose = e, this._sharedData.scene = this.getScene(), this._sharedData.allowEmptyVertexProgram = s; const n = [], a = []; for (const u of this._vertexOutputNodes) n.push(u), this._initializeBlock(u, this._vertexCompilationState, a, i); for (const u of this._fragmentOutputNodes) a.push(u), this._initializeBlock(u, this._fragmentCompilationState, n, i); this.optimize(); for (const u of n) u.build(this._vertexCompilationState, n); this._fragmentCompilationState.uniforms = this._vertexCompilationState.uniforms.slice(0), this._fragmentCompilationState._uniformDeclaration = this._vertexCompilationState._uniformDeclaration, this._fragmentCompilationState._constantDeclaration = this._vertexCompilationState._constantDeclaration, this._fragmentCompilationState._vertexState = this._vertexCompilationState; for (const u of a) this._resetDualBlocks(u, this._buildId - 1); for (const u of a) u.build(this._fragmentCompilationState, a); this._vertexCompilationState.finalize(this._vertexCompilationState), this._fragmentCompilationState.finalize(this._fragmentCompilationState), t && (this._buildId = Ta._BuildIdGenerator++), this._sharedData.emitErrors(), e && (Ce.Log("Vertex shader:"), Ce.Log(this._vertexCompilationState.compilationString), Ce.Log("Fragment shader:"), Ce.Log(this._fragmentCompilationState.compilationString)), this._buildWasSuccessful = !0, this.onBuildObservable.notifyObservers(this); const l = this.getScene().meshes; for (const u of l) if (u.subMeshes) for (const h of u.subMeshes) { if (h.getMaterial() !== this || !h.materialDefines) continue; const d = h.materialDefines; d.markAllAsDirty(), d.reset(); } this.prePassTextureInputs.length && this.getScene().enablePrePassRenderer(); const o = this.getScene().prePassRenderer; o && o.markAsDirty(); } /** * Runs an otpimization phase to try to improve the shader code */ optimize() { for (const e of this._optimizers) e.optimize(this._vertexOutputNodes, this._fragmentOutputNodes); } _prepareDefinesForAttributes(e, t) { const i = t.NORMAL, r = t.TANGENT, s = t.VERTEXCOLOR_NME; t.NORMAL = e.isVerticesDataPresent(Y.NormalKind), t.TANGENT = e.isVerticesDataPresent(Y.TangentKind); const n = e.useVertexColors && e.isVerticesDataPresent(Y.ColorKind); t.VERTEXCOLOR_NME = n; let a = !1; for (let o = 1; o <= 6; ++o) { const u = t["UV" + o]; t["UV" + o] = e.isVerticesDataPresent(`uv${o === 1 ? "" : o}`), a = a || t["UV" + o] !== u; } const l = this.needAlphaBlendingForMesh(e) && this.getScene().useOrderIndependentTransparency; Ke.PrepareDefinesForPrePass(this.getScene(), t, !l), (i !== t.NORMAL || r !== t.TANGENT || s !== t.VERTEXCOLOR_NME || a) && t.markAsAttributesDirty(); } /** * Can this material render to prepass */ get isPrePassCapable() { return !0; } /** * Outputs written to the prepass */ get prePassTextureOutputs() { const e = this.getBlockByPredicate((i) => i.getClassName() === "PrePassOutputBlock"), t = [4]; return !e || this.prePassTextureInputs.length || (e.viewDepth.isConnected && t.push(5), e.viewNormal.isConnected && t.push(6), e.worldPosition.isConnected && t.push(1)), t; } /** * Gets the list of prepass texture required */ get prePassTextureInputs() { const e = this.getAllTextureBlocks().filter((i) => i.getClassName() === "PrePassTextureBlock"), t = []; for (const i of e) i.position.isConnected && !t.includes(1) && t.push(1), i.depth.isConnected && !t.includes(5) && t.push(5), i.normal.isConnected && !t.includes(6) && t.push(6); return t; } /** * Sets the required values to the prepass renderer. */ setPrePassRenderer(e) { const t = this.prePassTextureInputs.concat(this.prePassTextureOutputs); if (e && t.length > 1) { let i = e.getEffectConfiguration("nodeMaterial"); i || (i = e.addEffectConfiguration({ enabled: !0, needsImageProcessing: !1, name: "nodeMaterial", texturesRequired: [] })); for (const r of t) i.texturesRequired.includes(r) || i.texturesRequired.push(r); i.enabled = !0; } return t.length > 1; } /** * Create a post process from the material * @param camera The camera to apply the render pass to. * @param options The required width/height ratio to downsize to before computing the render pass. (Use 1.0 for full size) * @param samplingMode The sampling mode to be used when computing the pass. (default: 0) * @param engine The engine which the post process will be applied. (default: current engine) * @param reusable If the post process can be reused on the same frame. (default: false) * @param textureType Type of textures used when performing the post process. (default: 0) * @param textureFormat Format of textures used when performing the post process. (default: TEXTUREFORMAT_RGBA) * @returns the post process created */ createPostProcess(e, t = 1, i = 1, r, s, n = 0, a = 5) { return this.mode !== Ip.PostProcess ? (Ce.Log("Incompatible material mode"), null) : this._createEffectForPostProcess(null, e, t, i, r, s, n, a); } /** * Create the post process effect from the material * @param postProcess The post process to create the effect for */ createEffectForPostProcess(e) { this._createEffectForPostProcess(e); } _createEffectForPostProcess(e, t, i = 1, r = 1, s, n, a = 0, l = 5) { let o = this.name + this._buildId; const u = new D9(), h = new xr(o + "PostProcess", this.getScene()); let d = this._buildId; return this._processDefines(h, u), Cr.RegisterShader(o, this._fragmentCompilationState._builtCompilationString, this._vertexCompilationState._builtCompilationString), e ? e.updateEffect(u.toString(), this._fragmentCompilationState.uniforms, this._fragmentCompilationState.samplers, { maxSimultaneousLights: this.maxSimultaneousLights }, void 0, void 0, o, o) : e = new Bi(this.name + "PostProcess", o, this._fragmentCompilationState.uniforms, this._fragmentCompilationState.samplers, i, t, r, s, n, u.toString(), a, o, { maxSimultaneousLights: this.maxSimultaneousLights }, !1, l), e.nodeMaterialSource = this, e.onApplyObservable.add((f) => { d !== this._buildId && (delete Cr.ShadersStore[o + "VertexShader"], delete Cr.ShadersStore[o + "PixelShader"], o = this.name + this._buildId, u.markAllAsDirty(), d = this._buildId), this._processDefines(h, u) && (Cr.RegisterShader(o, this._fragmentCompilationState._builtCompilationString, this._vertexCompilationState._builtCompilationString), BO.SetImmediate(() => e.updateEffect(u.toString(), this._fragmentCompilationState.uniforms, this._fragmentCompilationState.samplers, { maxSimultaneousLights: this.maxSimultaneousLights }, void 0, void 0, o, o))), this._checkInternals(f); }), e; } /** * Create a new procedural texture based on this node material * @param size defines the size of the texture * @param scene defines the hosting scene * @returns the new procedural texture attached to this node material */ createProceduralTexture(e, t) { if (this.mode !== Ip.ProceduralTexture) return Ce.Log("Incompatible material mode"), null; let i = this.name + this._buildId; const r = new z4(i, e, null, t), s = new xr(i + "Procedural", this.getScene()); s.reservedDataStore = { hidden: !0 }; const n = new D9(), a = this._processDefines(s, n); Cr.RegisterShader(i, this._fragmentCompilationState._builtCompilationString, this._vertexCompilationState._builtCompilationString); let l = this.getScene().getEngine().createEffect({ vertexElement: i, fragmentElement: i }, [Y.PositionKind], this._fragmentCompilationState.uniforms, this._fragmentCompilationState.samplers, n.toString(), a == null ? void 0 : a.fallbacks, void 0); r.nodeMaterialSource = this, r._setEffect(l); let o = this._buildId; return r.onBeforeGenerationObservable.add(() => { o !== this._buildId && (delete Cr.ShadersStore[i + "VertexShader"], delete Cr.ShadersStore[i + "PixelShader"], i = this.name + this._buildId, n.markAllAsDirty(), o = this._buildId); const u = this._processDefines(s, n); u && (Cr.RegisterShader(i, this._fragmentCompilationState._builtCompilationString, this._vertexCompilationState._builtCompilationString), BO.SetImmediate(() => { l = this.getScene().getEngine().createEffect({ vertexElement: i, fragmentElement: i }, [Y.PositionKind], this._fragmentCompilationState.uniforms, this._fragmentCompilationState.samplers, n.toString(), u == null ? void 0 : u.fallbacks, void 0), r._setEffect(l); })), this._checkInternals(l); }), r; } _createEffectForParticles(e, t, i, r, s, n, a, l = "") { let o = this.name + this._buildId + "_" + t; n || (n = new D9()), a || (a = this.getScene().getMeshByName(this.name + "Particle"), a || (a = new xr(this.name + "Particle", this.getScene()), a.reservedDataStore = { hidden: !0 })); let u = this._buildId; const h = []; let d = l; if (!s) { const f = this._processDefines(a, n); Cr.RegisterShader(o, this._fragmentCompilationState._builtCompilationString), e.fillDefines(h, t), d = h.join(` `), s = this.getScene().getEngine().createEffectForParticles(o, this._fragmentCompilationState.uniforms, this._fragmentCompilationState.samplers, n.toString() + ` ` + d, f == null ? void 0 : f.fallbacks, i, r, e), e.setCustomEffect(s, t); } s.onBindObservable.add((f) => { u !== this._buildId && (delete Cr.ShadersStore[o + "PixelShader"], o = this.name + this._buildId + "_" + t, n.markAllAsDirty(), u = this._buildId), h.length = 0, e.fillDefines(h, t); const p = h.join(` `); p !== d && (n.markAllAsDirty(), d = p); const m = this._processDefines(a, n); if (m) { Cr.RegisterShader(o, this._fragmentCompilationState._builtCompilationString), f = this.getScene().getEngine().createEffectForParticles(o, this._fragmentCompilationState.uniforms, this._fragmentCompilationState.samplers, n.toString() + ` ` + d, m == null ? void 0 : m.fallbacks, i, r, e), e.setCustomEffect(f, t), this._createEffectForParticles(e, t, i, r, f, n, a, l); return; } this._checkInternals(f); }); } _checkInternals(e) { if (this._sharedData.animatedInputs) { const t = this.getScene(), i = t.getFrameId(); if (this._animationFrame !== i) { for (const r of this._sharedData.animatedInputs) r.animate(t); this._animationFrame = i; } } for (const t of this._sharedData.bindableBlocks) t.bind(e, this); for (const t of this._sharedData.inputBlocks) t._transmit(e, this.getScene(), this); } /** * Create the effect to be used as the custom effect for a particle system * @param particleSystem Particle system to create the effect for * @param onCompiled defines a function to call when the effect creation is successful * @param onError defines a function to call when the effect creation has failed */ createEffectForParticles(e, t, i) { if (this.mode !== Ip.Particle) { Ce.Log("Incompatible material mode"); return; } this._createEffectForParticles(e, V4.BLENDMODE_ONEONE, t, i), this._createEffectForParticles(e, V4.BLENDMODE_MULTIPLY, t, i); } /** * Use this material as the shadow depth wrapper of a target material * @param targetMaterial defines the target material */ createAsShadowDepthWrapper(e) { if (this.mode !== Ip.Material) { Ce.Log("Incompatible material mode"); return; } e.shadowDepthWrapper = new BABYLON.ShadowDepthWrapper(this, this.getScene()); } _processDefines(e, t, i = !1, r) { let s = null; const n = this.getScene(); if (Ke.PrepareDefinesForCamera(n, t) && t.markAsMiscDirty(), this._sharedData.blocksWithDefines.forEach((a) => { a.initializeDefines(e, this, t, i); }), this._sharedData.blocksWithDefines.forEach((a) => { a.prepareDefines(e, this, t, i, r); }), t.isDirty) { const a = t._areLightsDisposed; t.markAsProcessed(), this._vertexCompilationState.compilationString = this._vertexCompilationState._builtCompilationString, this._fragmentCompilationState.compilationString = this._fragmentCompilationState._builtCompilationString, this._sharedData.repeatableContentBlocks.forEach((d) => { d.replaceRepeatableContent(this._vertexCompilationState, this._fragmentCompilationState, e, t); }); const l = []; this._sharedData.dynamicUniformBlocks.forEach((d) => { d.updateUniformsAndSamples(this._vertexCompilationState, this, t, l); }); const o = this._vertexCompilationState.uniforms; this._fragmentCompilationState.uniforms.forEach((d) => { o.indexOf(d) === -1 && o.push(d); }); const u = this._vertexCompilationState.samplers; this._fragmentCompilationState.samplers.forEach((d) => { u.indexOf(d) === -1 && u.push(d); }); const h = new pl(); this._sharedData.blocksWithFallbacks.forEach((d) => { d.provideFallbacks(e, h); }), s = { lightDisposed: a, uniformBuffers: l, mergedUniforms: o, mergedSamplers: u, fallbacks: h }; } return s; } /** * Get if the submesh is ready to be used and all its information available. * Child classes can use it to update shaders * @param mesh defines the mesh to check * @param subMesh defines which submesh to check * @param useInstances specifies that instances should be used * @returns a boolean indicating that the submesh is ready or not */ isReadyForSubMesh(e, t, i = !1) { if (!this._buildWasSuccessful) return !1; const r = this.getScene(); if (this._sharedData.animatedInputs) { const l = r.getFrameId(); if (this._animationFrame !== l) { for (const o of this._sharedData.animatedInputs) o.animate(r); this._animationFrame = l; } } if (t.effect && this.isFrozen && t.effect._wasPreviouslyReady && t.effect._wasPreviouslyUsingInstances === i) return !0; t.materialDefines || (t.materialDefines = new D9()); const s = t.materialDefines; if (this._isReadyForSubMesh(t)) return !0; const n = r.getEngine(); if (this._prepareDefinesForAttributes(e, s), this._sharedData.blockingBlocks.some((l) => !l.isReady(e, this, s, i))) return !1; const a = this._processDefines(e, s, i, t); if (a) { const l = t.effect, o = s.toString(); let u = n.createEffect({ vertex: "nodeMaterial" + this._buildId, fragment: "nodeMaterial" + this._buildId, vertexSource: this._vertexCompilationState.compilationString, fragmentSource: this._fragmentCompilationState.compilationString }, { attributes: this._vertexCompilationState.attributes, uniformsNames: a.mergedUniforms, uniformBuffersNames: a.uniformBuffers, samplers: a.mergedSamplers, defines: o, fallbacks: a.fallbacks, onCompiled: this.onCompiled, onError: this.onError, multiTarget: s.PREPASS, indexParameters: { maxSimultaneousLights: this.maxSimultaneousLights, maxSimultaneousMorphTargets: s.NUM_MORPH_INFLUENCERS } }, n); if (u) if (this._onEffectCreatedObservable && (Uk.effect = u, Uk.subMesh = t, this._onEffectCreatedObservable.notifyObservers(Uk)), this.allowShaderHotSwapping && l && !u.isReady()) { if (u = l, s.markAsUnprocessed(), a.lightDisposed) return s._areLightsDisposed = !0, !1; } else r.resetCachedMaterial(), t.setEffect(u, s, this._materialContext); } return !t.effect || !t.effect.isReady() ? !1 : (s._renderId = r.getRenderId(), t.effect._wasPreviouslyReady = !0, t.effect._wasPreviouslyUsingInstances = i, this._checkScenePerformancePriority(), !0); } /** * Get a string representing the shaders built by the current node graph */ get compiledShaders() { return `// Vertex shader ${this._vertexCompilationState.compilationString} // Fragment shader ${this._fragmentCompilationState.compilationString}`; } /** * Binds the world matrix to the material * @param world defines the world transformation matrix */ bindOnlyWorldMatrix(e) { const t = this.getScene(); if (!this._activeEffect) return; const i = this._sharedData.hints; i.needWorldViewMatrix && e.multiplyToRef(t.getViewMatrix(), this._cachedWorldViewMatrix), i.needWorldViewProjectionMatrix && e.multiplyToRef(t.getTransformMatrix(), this._cachedWorldViewProjectionMatrix); for (const r of this._sharedData.inputBlocks) r._transmitWorld(this._activeEffect, e, this._cachedWorldViewMatrix, this._cachedWorldViewProjectionMatrix); } /** * Binds the submesh to this material by preparing the effect and shader to draw * @param world defines the world transformation matrix * @param mesh defines the mesh containing the submesh * @param subMesh defines the submesh to bind the material to */ bindForSubMesh(e, t, i) { const r = this.getScene(), s = i.effect; if (!s) return; this._activeEffect = s, this.bindOnlyWorldMatrix(e); const n = this._mustRebind(r, s, t.visibility), a = this._sharedData; if (n) { for (const l of a.bindableBlocks) l.bind(s, this, t, i); for (const l of a.forcedBindableBlocks) l.bind(s, this, t, i); for (const l of a.inputBlocks) l._transmit(s, r, this); } else if (!this.isFrozen) for (const l of a.forcedBindableBlocks) l.bind(s, this, t, i); this._afterBind(t, this._activeEffect); } /** * Gets the active textures from the material * @returns an array of textures */ getActiveTextures() { const e = super.getActiveTextures(); return this._sharedData && e.push(...this._sharedData.textureBlocks.filter((t) => t.texture).map((t) => t.texture)), e; } /** * Gets the list of texture blocks * Note that this method will only return blocks that are reachable from the final block(s) and only after the material has been built! * @returns an array of texture blocks */ getTextureBlocks() { return this._sharedData ? this._sharedData.textureBlocks : []; } /** * Gets the list of all texture blocks * Note that this method will scan all attachedBlocks and return blocks that are texture blocks * @returns */ getAllTextureBlocks() { const e = []; for (const t of this.attachedBlocks) Ta._BlockIsTextureBlock(t) && e.push(t); return e; } /** * Specifies if the material uses a texture * @param texture defines the texture to check against the material * @returns a boolean specifying if the material uses the texture */ hasTexture(e) { if (super.hasTexture(e)) return !0; if (!this._sharedData) return !1; for (const t of this._sharedData.textureBlocks) if (t.texture === e) return !0; return !1; } /** * Disposes the material * @param forceDisposeEffect specifies if effects should be forcefully disposed * @param forceDisposeTextures specifies if textures should be forcefully disposed * @param notBoundToMesh specifies if the material that is being disposed is known to be not bound to any mesh */ dispose(e, t, i) { if (t) for (const r of this.getTextureBlocks().filter((s) => s.texture).map((s) => s.texture)) r.dispose(); for (const r of this.attachedBlocks) r.dispose(); this.attachedBlocks.length = 0, this._sharedData = null, this._vertexCompilationState = null, this._fragmentCompilationState = null, this.onBuildObservable.clear(), this._imageProcessingObserver && (this._imageProcessingConfiguration.onUpdateParameters.remove(this._imageProcessingObserver), this._imageProcessingObserver = null), super.dispose(e, t, i); } /** Creates the node editor window. */ _createNodeEditor(e) { const t = Object.assign({ nodeMaterial: this }, e); this.BJSNODEMATERIALEDITOR.NodeEditor.Show(t); } /** * Launch the node material editor * @param config Define the configuration of the editor * @returns a promise fulfilled when the node editor is visible */ edit(e) { return new Promise((t) => { if (this.BJSNODEMATERIALEDITOR = this.BJSNODEMATERIALEDITOR || this._getGlobalNodeMaterialEditor(), typeof this.BJSNODEMATERIALEDITOR > "u") { const i = e && e.editorURL ? e.editorURL : Ta.EditorURL; Ve.LoadBabylonScript(i, () => { this.BJSNODEMATERIALEDITOR = this.BJSNODEMATERIALEDITOR || this._getGlobalNodeMaterialEditor(), this._createNodeEditor(e == null ? void 0 : e.nodeEditorConfig), t(); }); } else this._createNodeEditor(e == null ? void 0 : e.nodeEditorConfig), t(); }); } /** * Clear the current material */ clear() { this._vertexOutputNodes.length = 0, this._fragmentOutputNodes.length = 0, this.attachedBlocks.length = 0; } /** * Clear the current material and set it to a default state */ setToDefault() { this.clear(), this.editorData = null; const e = new vs("Position"); e.setAsAttribute("position"); const t = new vs("World"); t.setAsSystemValue(Ms.World); const i = new aB("WorldPos"); e.connectTo(i), t.connectTo(i); const r = new vs("ViewProjection"); r.setAsSystemValue(Ms.ViewProjection); const s = new aB("WorldPos * ViewProjectionTransform"); i.connectTo(s), r.connectTo(s); const n = new G9("VertexOutput"); s.connectTo(n); const a = new vs("color"); a.value = new Et(0.8, 0.8, 0.8, 1); const l = new sT("FragmentOutput"); a.connectTo(l), this.addOutputNode(n), this.addOutputNode(l), this._mode = Ip.Material; } /** * Clear the current material and set it to a default state for post process */ setToDefaultPostProcess() { this.clear(), this.editorData = null; const e = new vs("Position"); e.setAsAttribute("position2d"); const t = new vs("Constant1"); t.isConstant = !0, t.value = 1; const i = new K9("Position3D"); e.connectTo(i), t.connectTo(i, { input: "w" }); const r = new G9("VertexOutput"); i.connectTo(r); const s = new vs("Scale"); s.visibleInInspector = !0, s.value = new at(1, 1); const n = new BL("uv0"); e.connectTo(n); const a = new oB("UV scale"); n.connectTo(a), s.connectTo(a); const l = new CK("CurrentScreen"); a.connectTo(l), l.texture = new De("https://assets.babylonjs.com/nme/currentScreenPostProcess.png", this.getScene()); const o = new sT("FragmentOutput"); l.connectTo(o, { output: "rgba" }), this.addOutputNode(r), this.addOutputNode(o), this._mode = Ip.PostProcess; } /** * Clear the current material and set it to a default state for procedural texture */ setToDefaultProceduralTexture() { this.clear(), this.editorData = null; const e = new vs("Position"); e.setAsAttribute("position2d"); const t = new vs("Constant1"); t.isConstant = !0, t.value = 1; const i = new K9("Position3D"); e.connectTo(i), t.connectTo(i, { input: "w" }); const r = new G9("VertexOutput"); i.connectTo(r); const s = new vs("Time"); s.value = 0, s.min = 0, s.max = 0, s.isBoolean = !1, s.matrixMode = 0, s.animationType = tT.Time, s.isConstant = !1; const n = new vs("Color3"); n.value = new ze(1, 1, 1), n.isConstant = !1; const a = new sT("FragmentOutput"), l = new K9("VectorMerger"); l.visibleInInspector = !1; const o = new SK("Cos"); o.operation = nu.Cos, e.connectTo(l), s.output.connectTo(o.input), o.output.connectTo(l.z), l.xyzOut.connectTo(a.rgb), this.addOutputNode(r), this.addOutputNode(a), this._mode = Ip.ProceduralTexture; } /** * Clear the current material and set it to a default state for particle */ setToDefaultParticle() { this.clear(), this.editorData = null; const e = new vs("uv"); e.setAsAttribute("particle_uv"); const t = new xK("ParticleTexture"); e.connectTo(t); const i = new vs("Color"); i.setAsAttribute("particle_color"); const r = new oB("Texture * Color"); t.connectTo(r), i.connectTo(r); const s = new bK("ParticleRampGradient"); r.connectTo(s); const n = new TK("ColorSplitter"); i.connectTo(n); const a = new EK("ParticleBlendMultiply"); s.connectTo(a), t.connectTo(a, { output: "a" }), n.connectTo(a, { output: "a" }); const l = new sT("FragmentOutput"); a.connectTo(l), this.addOutputNode(l), this._mode = Ip.Particle; } /** * Loads the current Node Material from a url pointing to a file save by the Node Material Editor * @deprecated Please use NodeMaterial.ParseFromFileAsync instead * @param url defines the url to load from * @param rootUrl defines the root URL for nested url in the node material * @returns a promise that will fulfil when the material is fully loaded */ async loadAsync(e, t = "") { return Ta.ParseFromFileAsync("", e, this.getScene(), t, !0, this); } _gatherBlocks(e, t) { if (t.indexOf(e) === -1) { t.push(e); for (const i of e.inputs) { const r = i.connectedPoint; if (r) { const s = r.ownerBlock; s !== e && this._gatherBlocks(s, t); } } if (e.isTeleportOut) { const i = e; i.entryPoint && this._gatherBlocks(i.entryPoint, t); } } } /** * Generate a string containing the code declaration required to create an equivalent of this material * @returns a string */ generateCode() { let e = []; const t = [], i = ["const", "var", "let"]; for (const n of this._vertexOutputNodes) this._gatherBlocks(n, t); const r = []; for (const n of this._fragmentOutputNodes) this._gatherBlocks(n, r); let s = `var nodeMaterial = new BABYLON.NodeMaterial("${this.name || "node material"}"); `; s += `nodeMaterial.mode = BABYLON.NodeMaterialModes.${Ip[this.mode]}; `; for (const n of t) n.isInput && e.indexOf(n) === -1 && (s += n._dumpCode(i, e)); for (const n of r) n.isInput && e.indexOf(n) === -1 && (s += n._dumpCode(i, e)); e = [], s += ` // Connections `; for (const n of this._vertexOutputNodes) s += n._dumpCodeForOutputConnections(e); for (const n of this._fragmentOutputNodes) s += n._dumpCodeForOutputConnections(e); s += ` // Output nodes `; for (const n of this._vertexOutputNodes) s += `nodeMaterial.addOutputNode(${n._codeVariableName}); `; for (const n of this._fragmentOutputNodes) s += `nodeMaterial.addOutputNode(${n._codeVariableName}); `; return s += `nodeMaterial.build(); `, s; } /** * Serializes this material in a JSON representation * @param selectedBlocks * @returns the serialized material object */ serialize(e) { const t = e ? {} : St.Serialize(this); t.editorData = JSON.parse(JSON.stringify(this.editorData)); let i = []; if (e) i = e; else { t.customType = "BABYLON.NodeMaterial", t.outputNodes = []; for (const r of this._vertexOutputNodes) this._gatherBlocks(r, i), t.outputNodes.push(r.uniqueId); for (const r of this._fragmentOutputNodes) this._gatherBlocks(r, i), t.outputNodes.indexOf(r.uniqueId) === -1 && t.outputNodes.push(r.uniqueId); } t.blocks = []; for (const r of i) t.blocks.push(r.serialize()); if (!e) for (const r of this.attachedBlocks) i.indexOf(r) === -1 && t.blocks.push(r.serialize()); return t; } _restoreConnections(e, t, i) { for (const r of e.outputs) for (const s of t.blocks) { const n = i[s.id]; if (n) { for (const a of s.inputs) if (i[a.targetBlockId] === e && a.targetConnectionName === r.name) { const l = n.getInputByName(a.inputName); if (!l || l.isConnected) continue; r.connectTo(l, !0), this._restoreConnections(n, t, i); continue; } } } } /** * Clear the current graph and load a new one from a serialization object * @param source defines the JSON representation of the material * @param rootUrl defines the root URL to use to load textures and relative dependencies * @param merge defines whether or not the source must be merged or replace the current content */ parseSerializedObject(e, t = "", i = !1) { var r; i || this.clear(); const s = {}; for (const n of e.blocks) { const a = Qo(n.customType); if (a) { const l = new a(); l._deserialize(n, this.getScene(), t), s[n.id] = l, this.attachedBlocks.push(l); } } for (const n of this.attachedBlocks) if (n.isTeleportOut) { const a = n, l = a._tempEntryPointUniqueId; l && s[l].attachToEndpoint(a); } for (let n = 0; n < e.blocks.length; n++) { const a = e.blocks[n], l = s[a.id]; l && (l.inputs.length && !i || this._restoreConnections(l, e, s)); } if (e.outputNodes) for (const n of e.outputNodes) this.addOutputNode(s[n]); if (e.locations || e.editorData && e.editorData.locations) { const n = e.locations || e.editorData.locations; for (const l of n) s[l.blockId] && (l.blockId = s[l.blockId].uniqueId); i && this.editorData && this.editorData.locations && n.concat(this.editorData.locations), e.locations ? this.editorData = { locations: n } : (this.editorData = e.editorData, this.editorData.locations = n); const a = []; for (const l in s) a[l] = s[l].uniqueId; this.editorData.map = a; } this.comment = e.comment, e.forceAlphaBlending !== void 0 && (this.forceAlphaBlending = e.forceAlphaBlending), i || (this._mode = (r = e.mode) !== null && r !== void 0 ? r : Ip.Material); } /** * Clear the current graph and load a new one from a serialization object * @param source defines the JSON representation of the material * @param rootUrl defines the root URL to use to load textures and relative dependencies * @param merge defines whether or not the source must be merged or replace the current content * @deprecated Please use the parseSerializedObject method instead */ loadFromSerialization(e, t = "", i = !1) { this.parseSerializedObject(e, t, i); } /** * Makes a duplicate of the current material. * @param name defines the name to use for the new material * @param shareEffect defines if the clone material should share the same effect (default is false) */ clone(e, t = !1) { const i = this.serialize(), r = St.Clone(() => new Ta(e, this.getScene(), this.options), this); return r.id = e, r.name = e, r.parseSerializedObject(i), r._buildId = this._buildId, r.build(!1, !t), r; } /** * Awaits for all the material textures to be ready before resolving the returned promise. */ whenTexturesReadyAsync() { const e = []; return this.getActiveTextures().forEach((t) => { const i = t.getInternalTexture(); i && !i.isReady && e.push(new Promise((r, s) => { i.onLoadedObservable.addOnce(() => { r(); }), i.onErrorObservable.addOnce((n) => { s(n); }); })); }), Promise.all(e); } /** * Creates a node material from parsed material data * @param source defines the JSON representation of the material * @param scene defines the hosting scene * @param rootUrl defines the root URL to use to load textures and relative dependencies * @returns a new node material */ static Parse(e, t, i = "") { const r = St.Parse(() => new Ta(e.name, t), e, t, i); return r.parseSerializedObject(e, i), r.build(), r; } /** * Creates a node material from a snippet saved in a remote file * @param name defines the name of the material to create * @param url defines the url to load from * @param scene defines the hosting scene * @param rootUrl defines the root URL for nested url in the node material * @param skipBuild defines whether to build the node material * @param targetMaterial defines a material to use instead of creating a new one * @returns a promise that will resolve to the new node material */ static async ParseFromFileAsync(e, t, i, r = "", s = !1, n) { const a = n ?? new Ta(e, i), l = await i._loadFileAsync(t), o = JSON.parse(l); return a.parseSerializedObject(o, r), s || a.build(), a; } /** * Creates a node material from a snippet saved by the node material editor * @param snippetId defines the snippet to load * @param scene defines the hosting scene * @param rootUrl defines the root URL to use to load textures and relative dependencies * @param nodeMaterial defines a node material to update (instead of creating a new one) * @param skipBuild defines whether to build the node material * @param waitForTextureReadyness defines whether to wait for texture readiness resolving the promise (default: false) * @returns a promise that will resolve to the new node material */ static ParseFromSnippetAsync(e, t = gi.LastCreatedScene, i = "", r, s = !1, n = !1) { return e === "_BLANK" ? Promise.resolve(Ta.CreateDefault("blank", t)) : new Promise((a, l) => { const o = new go(); o.addEventListener("readystatechange", () => { if (o.readyState == 4) if (o.status == 200) { const u = JSON.parse(JSON.parse(o.responseText).jsonPayload), h = JSON.parse(u.nodeMaterial); r || (r = St.Parse(() => new Ta(e, t), h, t, i), r.uniqueId = t.getUniqueId()), r.parseSerializedObject(h), r.snippetId = e; try { s || r.build(); } catch (d) { l(d); } n ? r.whenTexturesReadyAsync().then(() => { a(r); }).catch((d) => { l(d); }) : a(r); } else l("Unable to load the snippet " + e); }), o.open("GET", this.SnippetUrl + "/" + e.replace(/#/g, "/")), o.send(); }); } /** * Creates a new node material set to default basic configuration * @param name defines the name of the material * @param scene defines the hosting scene * @returns a new NodeMaterial */ static CreateDefault(e, t) { const i = new Ta(e, t); return i.setToDefault(), i.build(), i; } } Ta._BuildIdGenerator = 0; Ta.EditorURL = `${Ve._DefaultCdnUrl}/v${$e.Version}/nodeEditor/babylon.nodeEditor.js`; Ta.SnippetUrl = "https://snippet.babylonjs.com"; Ta.IgnoreTexturesAtLoadTime = !1; F([ W() ], Ta.prototype, "ignoreAlpha", void 0); F([ W() ], Ta.prototype, "maxSimultaneousLights", void 0); F([ W("mode") ], Ta.prototype, "_mode", void 0); F([ W("comment") ], Ta.prototype, "comment", void 0); F([ W() ], Ta.prototype, "forceAlphaBlending", void 0); Be("BABYLON.NodeMaterial", Ta); function ZB(c) { const e = c.sideOrientation || Ot.DEFAULTSIDE, t = c.radius || 1, i = c.flat === void 0 ? !0 : c.flat, r = (c.subdivisions || 4) | 0, s = c.radiusX || t, n = c.radiusY || t, a = c.radiusZ || t, l = (1 + Math.sqrt(5)) / 2, o = [ -1, l, -0, 1, l, 0, -1, -l, 0, 1, -l, 0, 0, -1, -l, 0, 1, -l, 0, -1, l, 0, 1, l, l, 0, 1, l, 0, -1, -l, 0, 1, -l, 0, -1 // v8-11 ], u = [ 0, 11, 5, 0, 5, 1, 0, 1, 7, 0, 7, 10, 12, 22, 23, 1, 5, 20, 5, 11, 4, 23, 22, 13, 22, 18, 6, 7, 1, 8, 14, 21, 4, 14, 4, 2, 16, 13, 6, 15, 6, 19, 3, 8, 9, 4, 21, 5, 13, 17, 23, 6, 13, 22, 19, 6, 18, 9, 8, 1 ], h = [ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, // vertex alias 0, 2, 3, 3, 3, 4, 7, 8, 9, 9, 10, 11 // 23: B + 12 ], d = [ 5, 1, 3, 1, 6, 4, 0, 0, 5, 3, 4, 2, 2, 2, 4, 0, 2, 0, 1, 1, 6, 0, 6, 2, // vertex alias (for same vertex on different faces) 0, 4, 3, 3, 4, 4, 3, 1, 4, 2, 4, 4, 0, 2, 1, 1, 2, 2, 3, 3, 1, 3, 2, 4 // 23: B + 12 ], f = 138 / 1024, p = 239 / 1024, m = 60 / 1024, _ = 26 / 1024, v = -40 / 1024, C = 20 / 1024, x = [ 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0 // 15 - 19 ], b = [], S = [], M = [], R = []; let w = 0; const V = new Array(3), k = new Array(3); let L; for (L = 0; L < 3; L++) V[L] = D.Zero(), k[L] = at.Zero(); for (let U = 0; U < 20; U++) { for (L = 0; L < 3; L++) { const ee = u[3 * U + L]; V[L].copyFromFloats(o[3 * h[ee]], o[3 * h[ee] + 1], o[3 * h[ee] + 2]), V[L].normalize(), k[L].copyFromFloats(d[2 * ee] * f + m + x[U] * v, d[2 * ee + 1] * p + _ + x[U] * C); } const K = (ee, Z, q, le) => { const ie = D.Lerp(V[0], V[2], Z / r), $ = D.Lerp(V[1], V[2], Z / r), j = r === Z ? V[2] : D.Lerp(ie, $, ee / (r - Z)); j.normalize(); let J; if (i) { const Ie = D.Lerp(V[0], V[2], le / r), ye = D.Lerp(V[1], V[2], le / r); J = D.Lerp(Ie, ye, q / (r - le)); } else J = new D(j.x, j.y, j.z); J.x /= s, J.y /= n, J.z /= a, J.normalize(); const ne = at.Lerp(k[0], k[2], Z / r), pe = at.Lerp(k[1], k[2], Z / r), ge = r === Z ? k[2] : at.Lerp(ne, pe, ee / (r - Z)); S.push(j.x * s, j.y * n, j.z * a), M.push(J.x, J.y, J.z), R.push(ge.x, hn.UseOpenGLOrientationForUV ? 1 - ge.y : ge.y), b.push(w), w++; }; for (let ee = 0; ee < r; ee++) for (let Z = 0; Z + ee < r; Z++) K(Z, ee, Z + 1 / 3, ee + 1 / 3), K(Z + 1, ee, Z + 1 / 3, ee + 1 / 3), K(Z, ee + 1, Z + 1 / 3, ee + 1 / 3), Z + ee + 1 < r && (K(Z + 1, ee, Z + 2 / 3, ee + 2 / 3), K(Z + 1, ee + 1, Z + 2 / 3, ee + 2 / 3), K(Z, ee + 1, Z + 2 / 3, ee + 2 / 3)); } Ot._ComputeSides(e, S, b, M, R, c.frontUVs, c.backUVs); const B = new Ot(); return B.indices = b, B.positions = S, B.normals = M, B.uvs = R, B; } function GL(c, e = {}, t = null) { const i = new ke(c, t); return e.sideOrientation = ke._GetDefaultSideOrientation(e.sideOrientation), i._originalBuilderSideOrientation = e.sideOrientation, ZB(e).applyToMesh(i, e.updatable), i; } const Gce = { // eslint-disable-next-line @typescript-eslint/naming-convention CreateIcoSphere: GL }; Ot.CreateIcoSphere = ZB; ke.CreateIcoSphere = (c, e, t) => GL(c, e, t); var YE; (function(c) { c.WRIST = "wrist", c.THUMB = "thumb", c.INDEX = "index", c.MIDDLE = "middle", c.RING = "ring", c.LITTLE = "little"; })(YE || (YE = {})); var gr; (function(c) { c.WRIST = "wrist", c.THUMB_METACARPAL = "thumb-metacarpal", c.THUMB_PHALANX_PROXIMAL = "thumb-phalanx-proximal", c.THUMB_PHALANX_DISTAL = "thumb-phalanx-distal", c.THUMB_TIP = "thumb-tip", c.INDEX_FINGER_METACARPAL = "index-finger-metacarpal", c.INDEX_FINGER_PHALANX_PROXIMAL = "index-finger-phalanx-proximal", c.INDEX_FINGER_PHALANX_INTERMEDIATE = "index-finger-phalanx-intermediate", c.INDEX_FINGER_PHALANX_DISTAL = "index-finger-phalanx-distal", c.INDEX_FINGER_TIP = "index-finger-tip", c.MIDDLE_FINGER_METACARPAL = "middle-finger-metacarpal", c.MIDDLE_FINGER_PHALANX_PROXIMAL = "middle-finger-phalanx-proximal", c.MIDDLE_FINGER_PHALANX_INTERMEDIATE = "middle-finger-phalanx-intermediate", c.MIDDLE_FINGER_PHALANX_DISTAL = "middle-finger-phalanx-distal", c.MIDDLE_FINGER_TIP = "middle-finger-tip", c.RING_FINGER_METACARPAL = "ring-finger-metacarpal", c.RING_FINGER_PHALANX_PROXIMAL = "ring-finger-phalanx-proximal", c.RING_FINGER_PHALANX_INTERMEDIATE = "ring-finger-phalanx-intermediate", c.RING_FINGER_PHALANX_DISTAL = "ring-finger-phalanx-distal", c.RING_FINGER_TIP = "ring-finger-tip", c.PINKY_FINGER_METACARPAL = "pinky-finger-metacarpal", c.PINKY_FINGER_PHALANX_PROXIMAL = "pinky-finger-phalanx-proximal", c.PINKY_FINGER_PHALANX_INTERMEDIATE = "pinky-finger-phalanx-intermediate", c.PINKY_FINGER_PHALANX_DISTAL = "pinky-finger-phalanx-distal", c.PINKY_FINGER_TIP = "pinky-finger-tip"; })(gr || (gr = {})); const VA = [ gr.WRIST, gr.THUMB_METACARPAL, gr.THUMB_PHALANX_PROXIMAL, gr.THUMB_PHALANX_DISTAL, gr.THUMB_TIP, gr.INDEX_FINGER_METACARPAL, gr.INDEX_FINGER_PHALANX_PROXIMAL, gr.INDEX_FINGER_PHALANX_INTERMEDIATE, gr.INDEX_FINGER_PHALANX_DISTAL, gr.INDEX_FINGER_TIP, gr.MIDDLE_FINGER_METACARPAL, gr.MIDDLE_FINGER_PHALANX_PROXIMAL, gr.MIDDLE_FINGER_PHALANX_INTERMEDIATE, gr.MIDDLE_FINGER_PHALANX_DISTAL, gr.MIDDLE_FINGER_TIP, gr.RING_FINGER_METACARPAL, gr.RING_FINGER_PHALANX_PROXIMAL, gr.RING_FINGER_PHALANX_INTERMEDIATE, gr.RING_FINGER_PHALANX_DISTAL, gr.RING_FINGER_TIP, gr.PINKY_FINGER_METACARPAL, gr.PINKY_FINGER_PHALANX_PROXIMAL, gr.PINKY_FINGER_PHALANX_INTERMEDIATE, gr.PINKY_FINGER_PHALANX_DISTAL, gr.PINKY_FINGER_TIP ], Kce = { [YE.WRIST]: [gr.WRIST], [YE.THUMB]: [gr.THUMB_METACARPAL, gr.THUMB_PHALANX_PROXIMAL, gr.THUMB_PHALANX_DISTAL, gr.THUMB_TIP], [YE.INDEX]: [ gr.INDEX_FINGER_METACARPAL, gr.INDEX_FINGER_PHALANX_PROXIMAL, gr.INDEX_FINGER_PHALANX_INTERMEDIATE, gr.INDEX_FINGER_PHALANX_DISTAL, gr.INDEX_FINGER_TIP ], [YE.MIDDLE]: [ gr.MIDDLE_FINGER_METACARPAL, gr.MIDDLE_FINGER_PHALANX_PROXIMAL, gr.MIDDLE_FINGER_PHALANX_INTERMEDIATE, gr.MIDDLE_FINGER_PHALANX_DISTAL, gr.MIDDLE_FINGER_TIP ], [YE.RING]: [ gr.RING_FINGER_METACARPAL, gr.RING_FINGER_PHALANX_PROXIMAL, gr.RING_FINGER_PHALANX_INTERMEDIATE, gr.RING_FINGER_PHALANX_DISTAL, gr.RING_FINGER_TIP ], [YE.LITTLE]: [ gr.PINKY_FINGER_METACARPAL, gr.PINKY_FINGER_PHALANX_PROXIMAL, gr.PINKY_FINGER_PHALANX_INTERMEDIATE, gr.PINKY_FINGER_PHALANX_DISTAL, gr.PINKY_FINGER_TIP ] }; class Kte { /** * Get the hand mesh. */ get handMesh() { return this._handMesh; } /** * Get meshes of part of the hand. * @param part The part of hand to get. * @returns An array of meshes that correlate to the hand part requested. */ getHandPartMeshes(e) { return Kce[e].map((t) => this._jointMeshes[VA.indexOf(t)]); } /** * Retrieves a mesh linked to a named joint in the hand. * @param jointName The name of the joint. * @returns An AbstractMesh whose position corresponds with the joint position. */ getJointMesh(e) { return this._jointMeshes[VA.indexOf(e)]; } /** * Construct a new hand object * @param xrController The controller to which the hand correlates. * @param _jointMeshes The meshes to be used to track the hand joints. * @param _handMesh An optional hand mesh. * @param rigMapping An optional rig mapping for the hand mesh. * If not provided (but a hand mesh is provided), * it will be assumed that the hand mesh's bones are named * directly after the WebXR bone names. * @param _leftHandedMeshes Are the hand meshes left-handed-system meshes * @param _jointsInvisible Are the tracked joint meshes visible * @param _jointScaleFactor Scale factor for all joint meshes */ constructor(e, t, i, r, s = !1, n = !1, a = 1) { this.xrController = e, this._jointMeshes = t, this._handMesh = i, this.rigMapping = r, this._leftHandedMeshes = s, this._jointsInvisible = n, this._jointScaleFactor = a, this._jointTransforms = new Array(VA.length), this._jointTransformMatrices = new Float32Array(VA.length * 16), this._tempJointMatrix = new Ae(), this._jointRadii = new Float32Array(VA.length), this._scene = t[0].getScene(); for (let l = 0; l < this._jointTransforms.length; l++) { const o = this._jointTransforms[l] = new xi(VA[l], this._scene); o.rotationQuaternion = new Ze(), t[l].rotationQuaternion = new Ze(); } i && this.setHandMesh(i, r), this.xrController.motionController && (this.xrController.motionController.rootMesh ? this.xrController.motionController.rootMesh.setEnabled(!1) : this.xrController.motionController.onModelLoadedObservable.add((l) => { l.rootMesh && l.rootMesh.setEnabled(!1); })), this.xrController.onMotionControllerInitObservable.add((l) => { l.onModelLoadedObservable.add((o) => { o.rootMesh && o.rootMesh.setEnabled(!1); }), l.rootMesh && l.rootMesh.setEnabled(!1); }); } /** * Sets the current hand mesh to render for the WebXRHand. * @param handMesh The rigged hand mesh that will be tracked to the user's hand. * @param rigMapping The mapping from XRHandJoint to bone names to use with the mesh. */ setHandMesh(e, t) { if (this._handMesh = e, e.alwaysSelectAsActiveMesh = !0, e.getChildMeshes().forEach((i) => i.alwaysSelectAsActiveMesh = !0), this._handMesh.skeleton) { const i = this._handMesh.skeleton; VA.forEach((r, s) => { const n = i.getBoneIndexByName(t ? t[r] : r); n !== -1 && i.bones[n].linkTransformNode(this._jointTransforms[s]); }); } } /** * Update this hand from the latest xr frame. * @param xrFrame The latest frame received from WebXR. * @param referenceSpace The current viewer reference space. */ updateFromXRFrame(e, t) { const i = this.xrController.inputSource.hand; if (!i) return; const r = i, s = VA.map((a) => r[a] || i.get(a)); let n = !1; if (e.fillPoses && e.fillJointRadii) n = e.fillPoses(s, t, this._jointTransformMatrices) && e.fillJointRadii(s, this._jointRadii); else if (e.getJointPose) { n = !0; for (let a = 0; a < s.length; a++) { const l = e.getJointPose(s[a], t); if (l) this._jointTransformMatrices.set(l.transform.matrix, a * 16), this._jointRadii[a] = l.radius || 8e-3; else { n = !1; break; } } } n && (VA.forEach((a, l) => { const o = this._jointTransforms[l]; Ae.FromArrayToRef(this._jointTransformMatrices, l * 16, this._tempJointMatrix), this._tempJointMatrix.decompose(void 0, o.rotationQuaternion, o.position); const u = this._jointRadii[l] * this._jointScaleFactor, h = this._jointMeshes[l]; h.isVisible = !this._handMesh && !this._jointsInvisible, h.position.copyFrom(o.position), h.rotationQuaternion.copyFrom(o.rotationQuaternion), h.scaling.setAll(u), this._scene.useRightHandedSystem || (h.position.z *= -1, h.rotationQuaternion.z *= -1, h.rotationQuaternion.w *= -1, this._leftHandedMeshes && this._handMesh && (o.position.z *= -1, o.rotationQuaternion.z *= -1, o.rotationQuaternion.w *= -1)); }), this._handMesh && (this._handMesh.isVisible = !0)); } /** * Dispose this Hand object */ dispose() { this._handMesh && (this._handMesh.isVisible = !1); } } class da extends Ku { static _GenerateTrackedJointMeshes(e) { const t = {}; return ["left", "right"].map((i) => { var r, s, n, a, l; const o = [], u = ((r = e.jointMeshes) === null || r === void 0 ? void 0 : r.sourceMesh) || GL("jointParent", da._ICOSPHERE_PARAMS); u.isVisible = !!(!((s = e.jointMeshes) === null || s === void 0) && s.keepOriginalVisible); for (let h = 0; h < VA.length; ++h) { let d = u.createInstance(`${i}-handJoint-${h}`); if (!((n = e.jointMeshes) === null || n === void 0) && n.onHandJointMeshGenerated) { const f = e.jointMeshes.onHandJointMeshGenerated(d, h, i); f && f !== d && (d.dispose(), d = f); } if (d.isPickable = !1, !((a = e.jointMeshes) === null || a === void 0) && a.enablePhysics) { const f = ((l = e.jointMeshes) === null || l === void 0 ? void 0 : l.physicsProps) || {}; d.scaling.setAll(0.02); const p = f.impostorType !== void 0 ? f.impostorType : tr.SphereImpostor; d.physicsImpostor = new tr(d, p, Object.assign({ mass: 0 }, f)); } d.rotationQuaternion = new Ze(), d.isVisible = !1, o.push(d); } t[i] = o; }), { left: t.left, right: t.right }; } static _GenerateDefaultHandMeshesAsync(e, t) { return new Promise(async (i) => { var r, s, n, a, l; const o = {}; !((s = (r = da._RightHandGLB) === null || r === void 0 ? void 0 : r.meshes[1]) === null || s === void 0) && s.isDisposed() && (da._RightHandGLB = null), !((a = (n = da._LeftHandGLB) === null || n === void 0 ? void 0 : n.meshes[1]) === null || a === void 0) && a.isDisposed() && (da._LeftHandGLB = null); const u = !!(da._RightHandGLB && da._LeftHandGLB), h = await Promise.all([ da._RightHandGLB || fr.ImportMeshAsync("", da.DEFAULT_HAND_MODEL_BASE_URL, da.DEFAULT_HAND_MODEL_RIGHT_FILENAME, e), da._LeftHandGLB || fr.ImportMeshAsync("", da.DEFAULT_HAND_MODEL_BASE_URL, da.DEFAULT_HAND_MODEL_LEFT_FILENAME, e) ]); da._RightHandGLB = h[0], da._LeftHandGLB = h[1]; const d = new Ta("handShader", e, { emitComments: !1 }); await d.loadAsync(da.DEFAULT_HAND_MODEL_SHADER_URL), d.needDepthPrePass = !0, d.transparencyMode = At.MATERIAL_ALPHABLEND, d.alphaMode = 2, d.build(!1); const f = Object.assign({ base: ze.FromInts(116, 63, 203), fresnel: ze.FromInts(149, 102, 229), fingerColor: ze.FromInts(177, 130, 255), tipFresnel: ze.FromInts(220, 200, 255) }, (l = t == null ? void 0 : t.handMeshes) === null || l === void 0 ? void 0 : l.customColors), p = { base: d.getBlockByName("baseColor"), fresnel: d.getBlockByName("fresnelColor"), fingerColor: d.getBlockByName("fingerColor"), tipFresnel: d.getBlockByName("tipFresnelColor") }; p.base.value = f.base, p.fresnel.value = f.fresnel, p.fingerColor.value = f.fingerColor, p.tipFresnel.value = f.tipFresnel, ["left", "right"].forEach((m) => { const _ = m == "left" ? da._LeftHandGLB : da._RightHandGLB; if (!_) throw new Error("Could not load hand model"); const v = _.meshes[1]; v._internalAbstractMeshDataInfo._computeBonesUsingShaders = !0, v.material = d.clone(`${m}HandShaderClone`, !0), v.isVisible = !1, o[m] = v, !u && !e.useRightHandedSystem && _.meshes[1].rotate(bl.Y, Math.PI); }), d.dispose(), i({ left: o.left, right: o.right }); }); } /** * Generates a mapping from XRHandJoint to bone name for the default hand mesh. * @param handedness The handedness being mapped for. */ static _GenerateDefaultHandMeshRigMapping(e) { const t = e == "right" ? "R" : "L"; return { [gr.WRIST]: `wrist_${t}`, [gr.THUMB_METACARPAL]: `thumb_metacarpal_${t}`, [gr.THUMB_PHALANX_PROXIMAL]: `thumb_proxPhalanx_${t}`, [gr.THUMB_PHALANX_DISTAL]: `thumb_distPhalanx_${t}`, [gr.THUMB_TIP]: `thumb_tip_${t}`, [gr.INDEX_FINGER_METACARPAL]: `index_metacarpal_${t}`, [gr.INDEX_FINGER_PHALANX_PROXIMAL]: `index_proxPhalanx_${t}`, [gr.INDEX_FINGER_PHALANX_INTERMEDIATE]: `index_intPhalanx_${t}`, [gr.INDEX_FINGER_PHALANX_DISTAL]: `index_distPhalanx_${t}`, [gr.INDEX_FINGER_TIP]: `index_tip_${t}`, [gr.MIDDLE_FINGER_METACARPAL]: `middle_metacarpal_${t}`, [gr.MIDDLE_FINGER_PHALANX_PROXIMAL]: `middle_proxPhalanx_${t}`, [gr.MIDDLE_FINGER_PHALANX_INTERMEDIATE]: `middle_intPhalanx_${t}`, [gr.MIDDLE_FINGER_PHALANX_DISTAL]: `middle_distPhalanx_${t}`, [gr.MIDDLE_FINGER_TIP]: `middle_tip_${t}`, [gr.RING_FINGER_METACARPAL]: `ring_metacarpal_${t}`, [gr.RING_FINGER_PHALANX_PROXIMAL]: `ring_proxPhalanx_${t}`, [gr.RING_FINGER_PHALANX_INTERMEDIATE]: `ring_intPhalanx_${t}`, [gr.RING_FINGER_PHALANX_DISTAL]: `ring_distPhalanx_${t}`, [gr.RING_FINGER_TIP]: `ring_tip_${t}`, [gr.PINKY_FINGER_METACARPAL]: `little_metacarpal_${t}`, [gr.PINKY_FINGER_PHALANX_PROXIMAL]: `little_proxPhalanx_${t}`, [gr.PINKY_FINGER_PHALANX_INTERMEDIATE]: `little_intPhalanx_${t}`, [gr.PINKY_FINGER_PHALANX_DISTAL]: `little_distPhalanx_${t}`, [gr.PINKY_FINGER_TIP]: `little_tip_${t}` }; } /** * Check if the needed objects are defined. * This does not mean that the feature is enabled, but that the objects needed are well defined. */ isCompatible() { return typeof XRHand < "u"; } /** * Get the hand object according to the controller id * @param controllerId the controller id to which we want to get the hand * @returns null if not found or the WebXRHand object if found */ getHandByControllerId(e) { return this._attachedHands[e]; } /** * Get a hand object according to the requested handedness * @param handedness the handedness to request * @returns null if not found or the WebXRHand object if found */ getHandByHandedness(e) { return e == "none" ? null : this._trackingHands[e]; } /** * Creates a new instance of the XR hand tracking feature. * @param _xrSessionManager An instance of WebXRSessionManager. * @param options Options to use when constructing this feature. */ constructor(e, t) { super(e), this.options = t, this._attachedHands = {}, this._trackingHands = { left: null, right: null }, this._handResources = { jointMeshes: null, handMeshes: null, rigMappings: null }, this.onHandAddedObservable = new Fe(), this.onHandRemovedObservable = new Fe(), this._attachHand = (s) => { var n, a, l; if (!s.inputSource.hand || s.inputSource.handedness == "none" || !this._handResources.jointMeshes) return; const o = s.inputSource.handedness, u = new Kte(s, this._handResources.jointMeshes[o], this._handResources.handMeshes && this._handResources.handMeshes[o], this._handResources.rigMappings && this._handResources.rigMappings[o], (n = this.options.handMeshes) === null || n === void 0 ? void 0 : n.meshesUseLeftHandedCoordinates, (a = this.options.jointMeshes) === null || a === void 0 ? void 0 : a.invisible, (l = this.options.jointMeshes) === null || l === void 0 ? void 0 : l.scaleFactor); this._attachedHands[s.uniqueId] = u, this._trackingHands[o] = u, this.onHandAddedObservable.notifyObservers(u); }, this._detachHand = (s) => { this._detachHandById(s.uniqueId); }, this.xrNativeFeatureName = "hand-tracking"; const r = t.jointMeshes; if (r && (typeof r.disableDefaultHandMesh < "u" && (t.handMeshes = t.handMeshes || {}, t.handMeshes.disableDefaultMeshes = r.disableDefaultHandMesh), typeof r.handMeshes < "u" && (t.handMeshes = t.handMeshes || {}, t.handMeshes.customMeshes = r.handMeshes), typeof r.leftHandedSystemMeshes < "u" && (t.handMeshes = t.handMeshes || {}, t.handMeshes.meshesUseLeftHandedCoordinates = r.leftHandedSystemMeshes), typeof r.rigMapping < "u")) { t.handMeshes = t.handMeshes || {}; const s = {}, n = {}; [ [r.rigMapping.left, s], [r.rigMapping.right, n] ].forEach((a) => { const l = a[0], o = a[1]; l.forEach((u, h) => { o[VA[h]] = u; }); }), t.handMeshes.customRigMappings = { left: s, right: n }; } } /** * Attach this feature. * Will usually be called by the features manager. * * @returns true if successful. */ attach() { var e, t, i, r; return super.attach() ? (this._handResources = { jointMeshes: da._GenerateTrackedJointMeshes(this.options), handMeshes: ((e = this.options.handMeshes) === null || e === void 0 ? void 0 : e.customMeshes) || null, rigMappings: ((t = this.options.handMeshes) === null || t === void 0 ? void 0 : t.customRigMappings) || null }, !(!((i = this.options.handMeshes) === null || i === void 0) && i.customMeshes) && !(!((r = this.options.handMeshes) === null || r === void 0) && r.disableDefaultMeshes) && da._GenerateDefaultHandMeshesAsync(gi.LastCreatedScene, this.options).then((s) => { var n, a; this._handResources.handMeshes = s, this._handResources.rigMappings = { left: da._GenerateDefaultHandMeshRigMapping("left"), right: da._GenerateDefaultHandMeshRigMapping("right") }, (n = this._trackingHands.left) === null || n === void 0 || n.setHandMesh(this._handResources.handMeshes.left, this._handResources.rigMappings.left), (a = this._trackingHands.right) === null || a === void 0 || a.setHandMesh(this._handResources.handMeshes.right, this._handResources.rigMappings.right); }), this.options.xrInput.controllers.forEach(this._attachHand), this._addNewAttachObserver(this.options.xrInput.onControllerAddedObservable, this._attachHand), this._addNewAttachObserver(this.options.xrInput.onControllerRemovedObservable, this._detachHand), !0) : !1; } _onXRFrame(e) { var t, i; (t = this._trackingHands.left) === null || t === void 0 || t.updateFromXRFrame(e, this._xrSessionManager.referenceSpace), (i = this._trackingHands.right) === null || i === void 0 || i.updateFromXRFrame(e, this._xrSessionManager.referenceSpace); } _detachHandById(e) { var t; const i = this.getHandByControllerId(e); if (i) { const r = i.xrController.inputSource.handedness == "left" ? "left" : "right"; ((t = this._trackingHands[r]) === null || t === void 0 ? void 0 : t.xrController.uniqueId) === e && (this._trackingHands[r] = null), this.onHandRemovedObservable.notifyObservers(i), i.dispose(), delete this._attachedHands[e]; } } /** * Detach this feature. * Will usually be called by the features manager. * * @returns true if successful. */ detach() { return super.detach() ? (Object.keys(this._attachedHands).forEach((e) => this._detachHandById(e)), !0) : !1; } /** * Dispose this feature and all of the resources attached. */ dispose() { var e; super.dispose(), this.onHandAddedObservable.clear(), this.onHandRemovedObservable.clear(), this._handResources.handMeshes && !(!((e = this.options.handMeshes) === null || e === void 0) && e.customMeshes) && (this._handResources.handMeshes.left.dispose(), this._handResources.handMeshes.right.dispose(), da._RightHandGLB = null, da._LeftHandGLB = null), this._handResources.jointMeshes && (this._handResources.jointMeshes.left.forEach((t) => t.dispose()), this._handResources.jointMeshes.right.forEach((t) => t.dispose())); } } da.Name = Qs.HAND_TRACKING; da.Version = 1; da.DEFAULT_HAND_MODEL_BASE_URL = "https://assets.babylonjs.com/meshes/HandMeshes/"; da.DEFAULT_HAND_MODEL_RIGHT_FILENAME = "r_hand_rhs.glb"; da.DEFAULT_HAND_MODEL_LEFT_FILENAME = "l_hand_rhs.glb"; da.DEFAULT_HAND_MODEL_SHADER_URL = "https://assets.babylonjs.com/meshes/HandMeshes/handsShader.json"; da._ICOSPHERE_PARAMS = { radius: 0.5, flat: !1, subdivisions: 2 }; da._RightHandGLB = null; da._LeftHandGLB = null; Oo.AddWebXRFeature(da.Name, (c, e) => () => new da(c, e), da.Version, !1); var j8; (function(c) { c[c.ABOVE_FINGER_TIPS = 0] = "ABOVE_FINGER_TIPS", c[c.RADIAL_SIDE = 1] = "RADIAL_SIDE", c[c.ULNAR_SIDE = 2] = "ULNAR_SIDE", c[c.BELOW_WRIST = 3] = "BELOW_WRIST"; })(j8 || (j8 = {})); var QE; (function(c) { c[c.LOOK_AT_CAMERA = 0] = "LOOK_AT_CAMERA", c[c.HAND_ROTATION = 1] = "HAND_ROTATION"; })(QE || (QE = {})); var X8; (function(c) { c[c.ALWAYS_VISIBLE = 0] = "ALWAYS_VISIBLE", c[c.PALM_UP = 1] = "PALM_UP", c[c.GAZE_FOCUS = 2] = "GAZE_FOCUS", c[c.PALM_AND_GAZE = 3] = "PALM_AND_GAZE"; })(X8 || (X8 = {})); class Wce { /** * Builds a hand constraint behavior */ constructor() { this._sceneRenderObserver = null, this._zoneAxis = {}, this.handConstraintVisibility = X8.PALM_AND_GAZE, this.palmUpStrictness = 0.95, this.gazeProximityRadius = 0.15, this.targetOffset = 0.1, this.targetZone = j8.ULNAR_SIDE, this.zoneOrientationMode = QE.HAND_ROTATION, this.nodeOrientationMode = QE.HAND_ROTATION, this.handedness = "none", this.lerpTime = 100, this._zoneAxis[j8.ABOVE_FINGER_TIPS] = new D(0, 1, 0), this._zoneAxis[j8.RADIAL_SIDE] = new D(-1, 0, 0), this._zoneAxis[j8.ULNAR_SIDE] = new D(1, 0, 0), this._zoneAxis[j8.BELOW_WRIST] = new D(0, -1, 0); } /** gets or sets behavior's name */ get name() { return "HandConstraint"; } /** Enable the behavior */ enable() { this._node.setEnabled(!0); } /** Disable the behavior */ disable() { this._node.setEnabled(!1); } _getHandPose() { if (!this._handTracking) return null; let e; if (this.handedness === "none" ? e = this._handTracking.getHandByHandedness("left") || this._handTracking.getHandByHandedness("right") : e = this._handTracking.getHandByHandedness(this.handedness), e) { const t = e.getJointMesh(gr.PINKY_FINGER_METACARPAL), i = e.getJointMesh(gr.MIDDLE_FINGER_METACARPAL), r = e.getJointMesh(gr.WRIST); if (r && i && t) { const s = { position: i.absolutePosition, quaternion: new Ze(), id: e.xrController.uniqueId }, n = de.Vector3[0], a = de.Vector3[1], l = de.Vector3[2]; return n.copyFrom(i.absolutePosition).subtractInPlace(r.absolutePosition).normalize(), a.copyFrom(t.absolutePosition).subtractInPlace(i.absolutePosition).normalize(), D.CrossToRef(n, a, a), D.CrossToRef(a, n, l), Ze.FromLookDirectionLHToRef(a, n, s.quaternion), s; } } return null; } /** * Initializes the hand constraint behavior */ init() { } /** * Attaches the hand constraint to a `TransformNode` * @param node defines the node to attach the behavior to */ attach(e) { this._node = e, this._scene = e.getScene(), this._node.rotationQuaternion || (this._node.rotationQuaternion = Ze.RotationYawPitchRoll(this._node.rotation.y, this._node.rotation.x, this._node.rotation.z)); let t = Date.now(); this._sceneRenderObserver = this._scene.onBeforeRenderObservable.add(() => { const i = this._getHandPose(); if (this._node.reservedDataStore = this._node.reservedDataStore || {}, this._node.reservedDataStore.nearInteraction = this._node.reservedDataStore.nearInteraction || {}, this._node.reservedDataStore.nearInteraction.excludedControllerId = null, i) { const r = de.Vector3[0], s = this._scene.activeCamera; r.copyFrom(this._zoneAxis[this.targetZone]); const n = de.Quaternion[0]; if (s && (this.zoneOrientationMode === QE.LOOK_AT_CAMERA || this.nodeOrientationMode === QE.LOOK_AT_CAMERA)) { const u = de.Vector3[1]; u.copyFrom(s.position).subtractInPlace(i.position).normalize(), this._scene.useRightHandedSystem ? Ze.FromLookDirectionRHToRef(u, D.UpReadOnly, n) : Ze.FromLookDirectionLHToRef(u, D.UpReadOnly, n); } this.zoneOrientationMode === QE.HAND_ROTATION ? i.quaternion.toRotationMatrix(de.Matrix[0]) : n.toRotationMatrix(de.Matrix[0]), D.TransformNormalToRef(r, de.Matrix[0], r), r.scaleInPlace(this.targetOffset); const a = de.Vector3[2], l = de.Quaternion[1]; a.copyFrom(i.position).addInPlace(r), this.nodeOrientationMode === QE.HAND_ROTATION ? l.copyFrom(i.quaternion) : l.copyFrom(n); const o = Date.now() - t; D.SmoothToRef(this._node.position, a, o, this.lerpTime, this._node.position), Ze.SmoothToRef(this._node.rotationQuaternion, l, o, this.lerpTime, this._node.rotationQuaternion), this._node.reservedDataStore.nearInteraction.excludedControllerId = i.id; } this._setVisibility(i), t = Date.now(); }); } _setVisibility(e) { let t = !0, i = !0; const r = this._scene.activeCamera; if (r) { const s = r.getForwardRay(); if (this.handConstraintVisibility === X8.GAZE_FOCUS || this.handConstraintVisibility === X8.PALM_AND_GAZE) { i = !1; let n; this._eyeTracking && (n = this._eyeTracking.getEyeGaze()), n = n || s; const a = de.Vector3[0]; e ? e.position.subtractToRef(n.origin, a) : this._node.getAbsolutePosition().subtractToRef(n.origin, a); const l = D.Dot(a, n.direction), o = l * l; l > 0 && a.lengthSquared() - o < this.gazeProximityRadius * this.gazeProximityRadius && (i = !0); } if ((this.handConstraintVisibility === X8.PALM_UP || this.handConstraintVisibility === X8.PALM_AND_GAZE) && (t = !1, e)) { const n = de.Vector3[0]; D.LeftHandedForwardReadOnly.rotateByQuaternionToRef(e.quaternion, n), D.Dot(n, s.direction) > this.palmUpStrictness * 2 - 1 && (t = !0); } } this._node.setEnabled(t && i); } /** * Detaches the behavior from the `TransformNode` */ detach() { this._scene.onBeforeRenderObservable.remove(this._sceneRenderObserver); } /** * Links the behavior to the XR experience in which to retrieve hand transform information. * @param xr xr experience */ linkToXRExperience(e) { const t = e.featuresManager ? e.featuresManager : e; if (!t) Ve.Error("XR features manager must be available or provided directly for the Hand Menu to work"); else { try { this._eyeTracking = t.getEnabledFeature(Qs.EYE_TRACKING); } catch { } try { this._handTracking = t.getEnabledFeature(Qs.HAND_TRACKING); } catch { Ve.Error("Hand tracking must be enabled for the Hand Menu to work"); } } } } class pm { /** * Gets or sets maximum allowed angle */ get maxAngle() { return this._maxAngle; } set maxAngle(e) { this._setMaxAngle(e); } /** * Creates a new BoneIKController * @param mesh defines the TransformNode to control * @param bone defines the bone to control. The bone needs to have a parent bone. It also needs to have a length greater than 0 or a children we can use to infer its length. * @param options defines options to set up the controller * @param options.targetMesh * @param options.poleTargetMesh * @param options.poleTargetBone * @param options.poleTargetLocalOffset * @param options.poleAngle * @param options.bendAxis * @param options.maxAngle * @param options.slerpAmount */ constructor(e, t, i) { this.targetPosition = D.Zero(), this.poleTargetPosition = D.Zero(), this.poleTargetLocalOffset = D.Zero(), this.poleAngle = 0, this.slerpAmount = 1, this._bone1Quat = Ze.Identity(), this._bone1Mat = Ae.Identity(), this._bone2Ang = Math.PI, this._maxAngle = Math.PI, this._rightHandedSystem = !1, this._bendAxis = D.Right(), this._slerping = !1, this._adjustRoll = 0, this._notEnoughInformation = !1, this._bone2 = t; const r = t.getParent(); if (!r) { this._notEnoughInformation = !0, Ce.Error("BoneIKController: bone must have a parent for IK to work."); return; } if (this._bone1 = r, this._bone2.children.length === 0 && !this._bone2.length) { this._notEnoughInformation = !0, Ce.Error("BoneIKController: bone must not be a leaf or it should have a length for IK to work."); return; } this.mesh = e, t.getSkeleton().computeAbsoluteMatrices(); const s = t.getPosition(); if (t.getAbsoluteMatrix().determinant() > 0 && (this._rightHandedSystem = !0, this._bendAxis.x = 0, this._bendAxis.y = 0, this._bendAxis.z = -1, s.x > s.y && s.x > s.z && (this._adjustRoll = Math.PI * 0.5, this._bendAxis.z = 1)), this._bone1.length && this._bone2.length) { const n = this._bone1.getScale(), a = this._bone2.getScale(); this._bone1Length = this._bone1.length * n.y * this.mesh.scaling.y, this._bone2Length = this._bone2.length * a.y * this.mesh.scaling.y; } else if (this._bone2.children[0]) { e.computeWorldMatrix(!0); const n = this._bone2.children[0].getAbsolutePosition(e), a = this._bone2.getAbsolutePosition(e), l = this._bone1.getAbsolutePosition(e); this._bone2Length = D.Distance(n, a), this._bone1Length = D.Distance(a, l); } else { e.computeWorldMatrix(!0); const n = this._bone2.getScale(); this._bone2Length = this._bone2.length * n.y * this.mesh.scaling.y; const a = this._bone2.getAbsolutePosition(e), l = this._bone1.getAbsolutePosition(e); this._bone1Length = D.Distance(a, l); } this._bone1.getRotationMatrixToRef(qr.WORLD, e, this._bone1Mat), this.maxAngle = Math.PI, i && (i.targetMesh && (this.targetMesh = i.targetMesh, this.targetMesh.computeWorldMatrix(!0)), i.poleTargetMesh ? (this.poleTargetMesh = i.poleTargetMesh, this.poleTargetMesh.computeWorldMatrix(!0)) : i.poleTargetBone ? this.poleTargetBone = i.poleTargetBone : this._bone1.getParent() && (this.poleTargetBone = this._bone1.getParent()), i.poleTargetLocalOffset && this.poleTargetLocalOffset.copyFrom(i.poleTargetLocalOffset), i.poleAngle && (this.poleAngle = i.poleAngle), i.bendAxis && this._bendAxis.copyFrom(i.bendAxis), i.maxAngle && (this.maxAngle = i.maxAngle), i.slerpAmount && (this.slerpAmount = i.slerpAmount)); } _setMaxAngle(e) { e < 0 && (e = 0), (e > Math.PI || e == null) && (e = Math.PI), this._maxAngle = e; const t = this._bone1Length, i = this._bone2Length; this._maxReach = Math.sqrt(t * t + i * i - 2 * t * i * Math.cos(e)); } /** * Force the controller to update the bones */ update() { if (this._notEnoughInformation) return; const e = this.targetPosition, t = this.poleTargetPosition, i = pm._TmpMats[0], r = pm._TmpMats[1]; this.targetMesh && e.copyFrom(this.targetMesh.getAbsolutePosition()), this.poleTargetBone ? this.poleTargetBone.getAbsolutePositionFromLocalToRef(this.poleTargetLocalOffset, this.mesh, t) : this.poleTargetMesh && D.TransformCoordinatesToRef(this.poleTargetLocalOffset, this.poleTargetMesh.getWorldMatrix(), t); const s = pm._TmpVecs[0], n = pm._TmpVecs[1], a = pm._TmpVecs[2], l = pm._TmpVecs[3], o = pm._TmpVecs[4], u = pm._TmpQuat; this._bone1.getAbsolutePositionToRef(this.mesh, s), t.subtractToRef(s, o), o.x == 0 && o.y == 0 && o.z == 0 ? o.y = 1 : o.normalize(), e.subtractToRef(s, l), l.normalize(), D.CrossToRef(l, o, n), n.normalize(), D.CrossToRef(l, n, a), a.normalize(), Ae.FromXYZAxesToRef(a, l, n, i); const h = this._bone1Length, d = this._bone2Length; let f = D.Distance(s, e); this._maxReach > 0 && (f = Math.min(this._maxReach, f)); let p = (d * d + f * f - h * h) / (2 * d * f), m = (f * f + h * h - d * d) / (2 * f * h); p > 1 && (p = 1), m > 1 && (m = 1), p < -1 && (p = -1), m < -1 && (m = -1); const _ = Math.acos(p), v = Math.acos(m); let C = -_ - v; if (this._rightHandedSystem) Ae.RotationYawPitchRollToRef(0, 0, this._adjustRoll, r), r.multiplyToRef(i, i), Ae.RotationAxisToRef(this._bendAxis, v, r), r.multiplyToRef(i, i); else { const x = pm._TmpVecs[5]; x.copyFrom(this._bendAxis), x.x *= -1, Ae.RotationAxisToRef(x, -v, r), r.multiplyToRef(i, i); } this.poleAngle && (Ae.RotationAxisToRef(l, this.poleAngle, r), i.multiplyToRef(r, i)), this._bone1 && (this.slerpAmount < 1 ? (this._slerping || Ze.FromRotationMatrixToRef(this._bone1Mat, this._bone1Quat), Ze.FromRotationMatrixToRef(i, u), Ze.SlerpToRef(this._bone1Quat, u, this.slerpAmount, this._bone1Quat), C = this._bone2Ang * (1 - this.slerpAmount) + C * this.slerpAmount, this._bone1.setRotationQuaternion(this._bone1Quat, qr.WORLD, this.mesh), this._slerping = !0) : (this._bone1.setRotationMatrix(i, qr.WORLD, this.mesh), this._bone1Mat.copyFrom(i), this._slerping = !1), this._updateLinkedTransformRotation(this._bone1)), this._bone2.setAxisAngle(this._bendAxis, C, qr.LOCAL), this._updateLinkedTransformRotation(this._bone2), this._bone2Ang = C; } _updateLinkedTransformRotation(e) { e._linkedTransformNode && (e._linkedTransformNode.rotationQuaternion || (e._linkedTransformNode.rotationQuaternion = new Ze()), e.getRotationQuaternionToRef(qr.LOCAL, null, e._linkedTransformNode.rotationQuaternion)); } } pm._TmpVecs = [D.Zero(), D.Zero(), D.Zero(), D.Zero(), D.Zero(), D.Zero()]; pm._TmpQuat = Ze.Identity(); pm._TmpMats = [Ae.Identity(), Ae.Identity()]; class Ed { /** * Gets or sets the minimum yaw angle that the bone can look to */ get minYaw() { return this._minYaw; } set minYaw(e) { this._minYaw = e, this._minYawSin = Math.sin(e), this._minYawCos = Math.cos(e), this._maxYaw != null && (this._midYawConstraint = this._getAngleDiff(this._minYaw, this._maxYaw) * 0.5 + this._minYaw, this._yawRange = this._maxYaw - this._minYaw); } /** * Gets or sets the maximum yaw angle that the bone can look to */ get maxYaw() { return this._maxYaw; } set maxYaw(e) { this._maxYaw = e, this._maxYawSin = Math.sin(e), this._maxYawCos = Math.cos(e), this._minYaw != null && (this._midYawConstraint = this._getAngleDiff(this._minYaw, this._maxYaw) * 0.5 + this._minYaw, this._yawRange = this._maxYaw - this._minYaw); } /** * Gets or sets the minimum pitch angle that the bone can look to */ get minPitch() { return this._minPitch; } set minPitch(e) { this._minPitch = e, this._minPitchTan = Math.tan(e); } /** * Gets or sets the maximum pitch angle that the bone can look to */ get maxPitch() { return this._maxPitch; } set maxPitch(e) { this._maxPitch = e, this._maxPitchTan = Math.tan(e); } /** * Create a BoneLookController * @param mesh the TransformNode that the bone belongs to * @param bone the bone that will be looking to the target * @param target the target Vector3 to look at * @param options optional settings: * * maxYaw: the maximum angle the bone will yaw to * * minYaw: the minimum angle the bone will yaw to * * maxPitch: the maximum angle the bone will pitch to * * minPitch: the minimum angle the bone will yaw to * * slerpAmount: set the between 0 and 1 to make the bone slerp to the target. * * upAxis: the up axis of the coordinate system * * upAxisSpace: the space that the up axis is in - Space.BONE, Space.LOCAL (default), or Space.WORLD. * * yawAxis: set yawAxis if the bone does not yaw on the y axis * * pitchAxis: set pitchAxis if the bone does not pitch on the x axis * * adjustYaw: used to make an adjustment to the yaw of the bone * * adjustPitch: used to make an adjustment to the pitch of the bone * * adjustRoll: used to make an adjustment to the roll of the bone * @param options.maxYaw * @param options.minYaw * @param options.maxPitch * @param options.minPitch * @param options.slerpAmount * @param options.upAxis * @param options.upAxisSpace * @param options.yawAxis * @param options.pitchAxis * @param options.adjustYaw * @param options.adjustPitch * @param options.adjustRoll **/ constructor(e, t, i, r) { if (this.upAxis = D.Up(), this.upAxisSpace = qr.LOCAL, this.adjustYaw = 0, this.adjustPitch = 0, this.adjustRoll = 0, this.slerpAmount = 1, this._boneQuat = Ze.Identity(), this._slerping = !1, this._firstFrameSkipped = !1, this._fowardAxis = D.Forward(), this.useAbsoluteValueForYaw = !1, this.mesh = e, this.bone = t, this.target = i, r) { if (r.adjustYaw && (this.adjustYaw = r.adjustYaw), r.adjustPitch && (this.adjustPitch = r.adjustPitch), r.adjustRoll && (this.adjustRoll = r.adjustRoll), r.maxYaw != null ? this.maxYaw = r.maxYaw : this.maxYaw = Math.PI, r.minYaw != null ? this.minYaw = r.minYaw : this.minYaw = -Math.PI, r.maxPitch != null ? this.maxPitch = r.maxPitch : this.maxPitch = Math.PI, r.minPitch != null ? this.minPitch = r.minPitch : this.minPitch = -Math.PI, r.slerpAmount != null && (this.slerpAmount = r.slerpAmount), r.upAxis != null && (this.upAxis = r.upAxis), r.upAxisSpace != null && (this.upAxisSpace = r.upAxisSpace), r.yawAxis != null || r.pitchAxis != null) { let s = bl.Y, n = bl.X; r.yawAxis != null && (s = r.yawAxis.clone(), s.normalize()), r.pitchAxis != null && (n = r.pitchAxis.clone(), n.normalize()); const a = D.Cross(n, s); this._transformYawPitch = Ae.Identity(), Ae.FromXYZAxesToRef(n, s, a, this._transformYawPitch), this._transformYawPitchInv = this._transformYawPitch.clone(), this._transformYawPitch.invert(); } r.useAbsoluteValueForYaw !== void 0 && (this.useAbsoluteValueForYaw = r.useAbsoluteValueForYaw); } !t.getParent() && this.upAxisSpace == qr.BONE && (this.upAxisSpace = qr.LOCAL); } /** * Update the bone to look at the target. This should be called before the scene is rendered (use scene.registerBeforeRender()) */ update() { if (this.slerpAmount < 1 && !this._firstFrameSkipped) { this._firstFrameSkipped = !0; return; } const e = this.bone, t = Ed._TmpVecs[0]; e.getAbsolutePositionToRef(this.mesh, t); let i = this.target; const r = Ed._TmpMats[0], s = Ed._TmpMats[1], n = this.mesh, a = e.getParent(), l = Ed._TmpVecs[1]; l.copyFrom(this.upAxis), this.upAxisSpace == qr.BONE && a ? (this._transformYawPitch && D.TransformCoordinatesToRef(l, this._transformYawPitchInv, l), a.getDirectionToRef(l, this.mesh, l)) : this.upAxisSpace == qr.LOCAL && (n.getDirectionToRef(l, l), (n.scaling.x != 1 || n.scaling.y != 1 || n.scaling.z != 1) && l.normalize()); let o = !1, u = !1; if ((this._maxYaw != Math.PI || this._minYaw != -Math.PI) && (o = !0), (this._maxPitch != Math.PI || this._minPitch != -Math.PI) && (u = !0), o || u) { const m = Ed._TmpMats[2], _ = Ed._TmpMats[3]; if (this.upAxisSpace == qr.BONE && l.y == 1 && a) a.getRotationMatrixToRef(qr.WORLD, this.mesh, m); else if (this.upAxisSpace == qr.LOCAL && l.y == 1 && !a) m.copyFrom(n.getWorldMatrix()); else { let C = Ed._TmpVecs[2]; C.copyFrom(this._fowardAxis), this._transformYawPitch && D.TransformCoordinatesToRef(C, this._transformYawPitchInv, C), a ? a.getDirectionToRef(C, this.mesh, C) : n.getDirectionToRef(C, C); const x = D.Cross(l, C); x.normalize(), C = D.Cross(x, l), Ae.FromXYZAxesToRef(x, l, C, m); } m.invertToRef(_); let v = null; if (u) { const C = Ed._TmpVecs[3]; i.subtractToRef(t, C), D.TransformCoordinatesToRef(C, _, C), v = Math.sqrt(C.x * C.x + C.z * C.z); const x = Math.atan2(C.y, v); let b = x; x > this._maxPitch ? (C.y = this._maxPitchTan * v, b = this._maxPitch) : x < this._minPitch && (C.y = this._minPitchTan * v, b = this._minPitch), x != b && (D.TransformCoordinatesToRef(C, m, C), C.addInPlace(t), i = C); } if (o) { const C = Ed._TmpVecs[4]; i.subtractToRef(t, C), D.TransformCoordinatesToRef(C, _, C); const x = Math.atan2(C.x, C.z), b = this.useAbsoluteValueForYaw ? Math.abs(x) : x; let S = x; if ((b > this._maxYaw || b < this._minYaw) && (v == null && (v = Math.sqrt(C.x * C.x + C.z * C.z)), this._yawRange > Math.PI ? this._isAngleBetween(x, this._maxYaw, this._midYawConstraint) ? (C.z = this._maxYawCos * v, C.x = this._maxYawSin * v, S = this._maxYaw) : this._isAngleBetween(x, this._midYawConstraint, this._minYaw) && (C.z = this._minYawCos * v, C.x = this._minYawSin * v, S = this._minYaw) : b > this._maxYaw ? (C.z = this._maxYawCos * v, C.x = this._maxYawSin * v, x < 0 && this.useAbsoluteValueForYaw && (C.x *= -1), S = this._maxYaw) : b < this._minYaw && (C.z = this._minYawCos * v, C.x = this._minYawSin * v, x < 0 && this.useAbsoluteValueForYaw && (C.x *= -1), S = this._minYaw)), this._slerping && this._yawRange > Math.PI) { const M = Ed._TmpVecs[8]; M.copyFrom(bl.Z), this._transformYawPitch && D.TransformCoordinatesToRef(M, this._transformYawPitchInv, M); const R = Ed._TmpMats[4]; this._boneQuat.toRotationMatrix(R), this.mesh.getWorldMatrix().multiplyToRef(R, R), D.TransformCoordinatesToRef(M, R, M), D.TransformCoordinatesToRef(M, _, M); const w = Math.atan2(M.x, M.z), V = this._getAngleBetween(w, x), k = this._getAngleBetween(w, this._midYawConstraint); if (V > k) { v == null && (v = Math.sqrt(C.x * C.x + C.z * C.z)); const L = this._getAngleBetween(w, this._maxYaw); this._getAngleBetween(w, this._minYaw) < L ? (S = w + Math.PI * 0.75, C.z = Math.cos(S) * v, C.x = Math.sin(S) * v) : (S = w - Math.PI * 0.75, C.z = Math.cos(S) * v, C.x = Math.sin(S) * v); } } x != S && (D.TransformCoordinatesToRef(C, m, C), C.addInPlace(t), i = C); } } const h = Ed._TmpVecs[5], d = Ed._TmpVecs[6], f = Ed._TmpVecs[7], p = Ed._TmpQuat; i.subtractToRef(t, h), h.normalize(), D.CrossToRef(l, h, d), d.normalize(), D.CrossToRef(h, d, f), f.normalize(), Ae.FromXYZAxesToRef(d, f, h, r), !(d.x === 0 && d.y === 0 && d.z === 0) && (f.x === 0 && f.y === 0 && f.z === 0 || h.x === 0 && h.y === 0 && h.z === 0 || ((this.adjustYaw || this.adjustPitch || this.adjustRoll) && (Ae.RotationYawPitchRollToRef(this.adjustYaw, this.adjustPitch, this.adjustRoll, s), s.multiplyToRef(r, r)), this.slerpAmount < 1 ? (this._slerping || this.bone.getRotationQuaternionToRef(qr.WORLD, this.mesh, this._boneQuat), this._transformYawPitch && this._transformYawPitch.multiplyToRef(r, r), Ze.FromRotationMatrixToRef(r, p), Ze.SlerpToRef(this._boneQuat, p, this.slerpAmount, this._boneQuat), this.bone.setRotationQuaternion(this._boneQuat, qr.WORLD, this.mesh), this._slerping = !0) : (this._transformYawPitch && this._transformYawPitch.multiplyToRef(r, r), this.bone.setRotationMatrix(r, qr.WORLD, this.mesh), this._slerping = !1), this._updateLinkedTransformRotation())); } _getAngleDiff(e, t) { let i = t - e; return i %= Math.PI * 2, i > Math.PI ? i -= Math.PI * 2 : i < -Math.PI && (i += Math.PI * 2), i; } _getAngleBetween(e, t) { e %= 2 * Math.PI, e = e < 0 ? e + 2 * Math.PI : e, t %= 2 * Math.PI, t = t < 0 ? t + 2 * Math.PI : t; let i = 0; return e < t ? i = t - e : i = e - t, i > Math.PI && (i = Math.PI * 2 - i), i; } _isAngleBetween(e, t, i) { if (e %= 2 * Math.PI, e = e < 0 ? e + 2 * Math.PI : e, t %= 2 * Math.PI, t = t < 0 ? t + 2 * Math.PI : t, i %= 2 * Math.PI, i = i < 0 ? i + 2 * Math.PI : i, t < i) { if (e > t && e < i) return !0; } else if (e > i && e < t) return !0; return !1; } _updateLinkedTransformRotation() { const e = this.bone; e._linkedTransformNode && (e._linkedTransformNode.rotationQuaternion || (e._linkedTransformNode.rotationQuaternion = new Ze()), e.getRotationQuaternionToRef(qr.LOCAL, null, e._linkedTransformNode.rotationQuaternion)); } } Ed._TmpVecs = kc.BuildArray(10, D.Zero); Ed._TmpQuat = Ze.Identity(); Ed._TmpMats = kc.BuildArray(5, Ae.Identity); class sx { /** * Gets or sets a boolean indicating that bone matrices should be stored as a texture instead of using shader uniforms (default is true). * Please note that this option is not available if the hardware does not support it */ get useTextureToStoreBoneMatrices() { return this._useTextureToStoreBoneMatrices; } set useTextureToStoreBoneMatrices(e) { this._useTextureToStoreBoneMatrices = e, this._markAsDirty(); } /** * Gets or sets the animation properties override */ get animationPropertiesOverride() { return this._animationPropertiesOverride ? this._animationPropertiesOverride : this._scene.animationPropertiesOverride; } set animationPropertiesOverride(e) { this._animationPropertiesOverride = e; } /** * Gets a boolean indicating that the skeleton effectively stores matrices into a texture */ get isUsingTextureForMatrices() { return this.useTextureToStoreBoneMatrices && this._canUseTextureForBones; } /** * Gets the unique ID of this skeleton */ get uniqueId() { return this._uniqueId; } /** * Creates a new skeleton * @param name defines the skeleton name * @param id defines the skeleton Id * @param scene defines the hosting scene */ constructor(e, t, i) { this.name = e, this.id = t, this.bones = [], this.needInitialSkinMatrix = !1, this._isDirty = !0, this._meshesWithPoseMatrix = new Array(), this._identity = Ae.Identity(), this._currentRenderId = -1, this._ranges = {}, this._absoluteTransformIsDirty = !0, this._canUseTextureForBones = !1, this._uniqueId = 0, this._numBonesWithLinkedTransformNode = 0, this._hasWaitingData = null, this._parentContainer = null, this.doNotSerialize = !1, this._useTextureToStoreBoneMatrices = !0, this._animationPropertiesOverride = null, this.onBeforeComputeObservable = new Fe(), this.bones = [], this._scene = i || gi.LastCreatedScene, this._uniqueId = this._scene.getUniqueId(), this._scene.addSkeleton(this), this._isDirty = !0; const r = this._scene.getEngine().getCaps(); this._canUseTextureForBones = r.textureFloat && r.maxVertexTextureImageUnits > 0; } /** * Gets the current object class name. * @returns the class name */ getClassName() { return "Skeleton"; } /** * Returns an array containing the root bones * @returns an array containing the root bones */ getChildren() { return this.bones.filter((e) => !e.getParent()); } // Members /** * Gets the list of transform matrices to send to shaders (one matrix per bone) * @param mesh defines the mesh to use to get the root matrix (if needInitialSkinMatrix === true) * @returns a Float32Array containing matrices data */ getTransformMatrices(e) { return this.needInitialSkinMatrix ? (e._bonesTransformMatrices || this.prepare(), e._bonesTransformMatrices) : ((!this._transformMatrices || this._isDirty) && this.prepare(), this._transformMatrices); } /** * Gets the list of transform matrices to send to shaders inside a texture (one matrix per bone) * @param mesh defines the mesh to use to get the root matrix (if needInitialSkinMatrix === true) * @returns a raw texture containing the data */ getTransformMatrixTexture(e) { return this.needInitialSkinMatrix && e._transformMatrixTexture ? e._transformMatrixTexture : this._transformMatrixTexture; } /** * Gets the current hosting scene * @returns a scene object */ getScene() { return this._scene; } // Methods /** * Gets a string representing the current skeleton data * @param fullDetails defines a boolean indicating if we want a verbose version * @returns a string representing the current skeleton data */ toString(e) { let t = `Name: ${this.name}, nBones: ${this.bones.length}`; if (t += `, nAnimationRanges: ${this._ranges ? Object.keys(this._ranges).length : "none"}`, e) { t += ", Ranges: {"; let i = !0; for (const r in this._ranges) i && (t += ", ", i = !1), t += r; t += "}"; } return t; } /** * Get bone's index searching by name * @param name defines bone's name to search for * @returns the indice of the bone. Returns -1 if not found */ getBoneIndexByName(e) { for (let t = 0, i = this.bones.length; t < i; t++) if (this.bones[t].name === e) return t; return -1; } /** * Create a new animation range * @param name defines the name of the range * @param from defines the start key * @param to defines the end key */ createAnimationRange(e, t, i) { if (!this._ranges[e]) { this._ranges[e] = new hP(e, t, i); for (let r = 0, s = this.bones.length; r < s; r++) this.bones[r].animations[0] && this.bones[r].animations[0].createRange(e, t, i); } } /** * Delete a specific animation range * @param name defines the name of the range * @param deleteFrames defines if frames must be removed as well */ deleteAnimationRange(e, t = !0) { for (let i = 0, r = this.bones.length; i < r; i++) this.bones[i].animations[0] && this.bones[i].animations[0].deleteRange(e, t); this._ranges[e] = null; } /** * Gets a specific animation range * @param name defines the name of the range to look for * @returns the requested animation range or null if not found */ getAnimationRange(e) { return this._ranges[e] || null; } /** * Gets the list of all animation ranges defined on this skeleton * @returns an array */ getAnimationRanges() { const e = []; let t; for (t in this._ranges) e.push(this._ranges[t]); return e; } /** * Copy animation range from a source skeleton. * This is not for a complete retargeting, only between very similar skeleton's with only possible bone length differences * @param source defines the source skeleton * @param name defines the name of the range to copy * @param rescaleAsRequired defines if rescaling must be applied if required * @returns true if operation was successful */ copyAnimationRange(e, t, i = !1) { if (this._ranges[t] || !e.getAnimationRange(t)) return !1; let r = !0; const s = this._getHighestAnimationFrame() + 1, n = {}, a = e.bones; let l, o; for (o = 0, l = a.length; o < l; o++) n[a[o].name] = a[o]; this.bones.length !== a.length && (Ce.Warn(`copyAnimationRange: this rig has ${this.bones.length} bones, while source as ${a.length}`), r = !1); const u = i && this.dimensionsAtRest && e.dimensionsAtRest ? this.dimensionsAtRest.divide(e.dimensionsAtRest) : null; for (o = 0, l = this.bones.length; o < l; o++) { const d = this.bones[o].name, f = n[d]; f ? r = r && this.bones[o].copyAnimationRange(f, t, s, i, u) : (Ce.Warn("copyAnimationRange: not same rig, missing source bone " + d), r = !1); } const h = e.getAnimationRange(t); return h && (this._ranges[t] = new hP(t, h.from + s, h.to + s)), r; } /** * Forces the skeleton to go to rest pose */ returnToRest() { for (const e of this.bones) e._index !== -1 && e.returnToRest(); } _getHighestAnimationFrame() { let e = 0; for (let t = 0, i = this.bones.length; t < i; t++) if (this.bones[t].animations[0]) { const r = this.bones[t].animations[0].getHighestFrame(); e < r && (e = r); } return e; } /** * Begin a specific animation range * @param name defines the name of the range to start * @param loop defines if looping must be turned on (false by default) * @param speedRatio defines the speed ratio to apply (1 by default) * @param onAnimationEnd defines a callback which will be called when animation will end * @returns a new animatable */ beginAnimation(e, t, i, r) { const s = this.getAnimationRange(e); return s ? this._scene.beginAnimation(this, s.from, s.to, t, i, r) : null; } /** * Convert the keyframes for a range of animation on a skeleton to be relative to a given reference frame. * @param skeleton defines the Skeleton containing the animation range to convert * @param referenceFrame defines the frame that keyframes in the range will be relative to * @param range defines the name of the AnimationRange belonging to the Skeleton to convert * @returns the original skeleton */ static MakeAnimationAdditive(e, t = 0, i) { const r = e.getAnimationRange(i); if (!r) return null; const s = e._scene.getAllAnimatablesByTarget(e); let n = null; for (let l = 0; l < s.length; l++) { const o = s[l]; if (o.fromFrame === (r == null ? void 0 : r.from) && o.toFrame === (r == null ? void 0 : r.to)) { n = o; break; } } const a = e.getAnimatables(); for (let l = 0; l < a.length; l++) { const u = a[l].animations; if (u) for (let h = 0; h < u.length; h++) nt.MakeAnimationAdditive(u[h], t, i); } return n && (n.isAdditive = !0), e; } /** @internal */ _markAsDirty() { this._isDirty = !0, this._absoluteTransformIsDirty = !0; } /** * @internal */ _registerMeshWithPoseMatrix(e) { this._meshesWithPoseMatrix.push(e); } /** * @internal */ _unregisterMeshWithPoseMatrix(e) { const t = this._meshesWithPoseMatrix.indexOf(e); t > -1 && this._meshesWithPoseMatrix.splice(t, 1); } _computeTransformMatrices(e, t) { this.onBeforeComputeObservable.notifyObservers(this); for (let i = 0; i < this.bones.length; i++) { const r = this.bones[i]; r._childUpdateId++; const s = r.getParent(); if (s ? r.getLocalMatrix().multiplyToRef(s.getFinalMatrix(), r.getFinalMatrix()) : t ? r.getLocalMatrix().multiplyToRef(t, r.getFinalMatrix()) : r.getFinalMatrix().copyFrom(r.getLocalMatrix()), r._index !== -1) { const n = r._index === null ? i : r._index; r.getAbsoluteInverseBindMatrix().multiplyToArray(r.getFinalMatrix(), e, n * 16); } } this._identity.copyToArray(e, this.bones.length * 16); } /** * Build all resources required to render a skeleton * @param dontCheckFrameId defines a boolean indicating if prepare should be run without checking first the current frame id (default: false) */ prepare(e = !1) { if (!e) { const t = this.getScene().getRenderId(); if (this._currentRenderId === t) return; this._currentRenderId = t; } if (this._numBonesWithLinkedTransformNode > 0) { for (const t of this.bones) if (t._linkedTransformNode) { const i = t._linkedTransformNode; t.position = i.position, i.rotationQuaternion ? t.rotationQuaternion = i.rotationQuaternion : t.rotation = i.rotation, t.scaling = i.scaling; } } if (this.needInitialSkinMatrix) for (const t of this._meshesWithPoseMatrix) { const i = t.getPoseMatrix(); let r = this._isDirty; if ((!t._bonesTransformMatrices || t._bonesTransformMatrices.length !== 16 * (this.bones.length + 1)) && (t._bonesTransformMatrices = new Float32Array(16 * (this.bones.length + 1)), r = !0), !!r) { if (this._synchronizedWithMesh !== t) { this._synchronizedWithMesh = t; for (const s of this.bones) s.getParent() || (s.getBindMatrix().multiplyToRef(i, de.Matrix[1]), s._updateAbsoluteBindMatrices(de.Matrix[1])); if (this.isUsingTextureForMatrices) { const s = (this.bones.length + 1) * 4; (!t._transformMatrixTexture || t._transformMatrixTexture.getSize().width !== s) && (t._transformMatrixTexture && t._transformMatrixTexture.dispose(), t._transformMatrixTexture = Po.CreateRGBATexture(t._bonesTransformMatrices, (this.bones.length + 1) * 4, 1, this._scene, !1, !1, 1, 1)); } } this._computeTransformMatrices(t._bonesTransformMatrices, i), this.isUsingTextureForMatrices && t._transformMatrixTexture && t._transformMatrixTexture.update(t._bonesTransformMatrices); } } else { if (!this._isDirty) return; (!this._transformMatrices || this._transformMatrices.length !== 16 * (this.bones.length + 1)) && (this._transformMatrices = new Float32Array(16 * (this.bones.length + 1)), this.isUsingTextureForMatrices && (this._transformMatrixTexture && this._transformMatrixTexture.dispose(), this._transformMatrixTexture = Po.CreateRGBATexture(this._transformMatrices, (this.bones.length + 1) * 4, 1, this._scene, !1, !1, 1, 1))), this._computeTransformMatrices(this._transformMatrices, null), this.isUsingTextureForMatrices && this._transformMatrixTexture && this._transformMatrixTexture.update(this._transformMatrices); } this._isDirty = !1; } /** * Gets the list of animatables currently running for this skeleton * @returns an array of animatables */ getAnimatables() { if (!this._animatables || this._animatables.length !== this.bones.length) { this._animatables = []; for (let e = 0; e < this.bones.length; e++) this._animatables.push(this.bones[e]); } return this._animatables; } /** * Clone the current skeleton * @param name defines the name of the new skeleton * @param id defines the id of the new skeleton * @returns the new skeleton */ clone(e, t) { const i = new sx(e, t || e, this._scene); i.needInitialSkinMatrix = this.needInitialSkinMatrix; for (let r = 0; r < this.bones.length; r++) { const s = this.bones[r]; let n = null; const a = s.getParent(); if (a) { const o = this.bones.indexOf(a); n = i.bones[o]; } const l = new ha(s.name, i, n, s.getBindMatrix().clone(), s.getRestMatrix().clone()); l._index = s._index, s._linkedTransformNode && l.linkTransformNode(s._linkedTransformNode), id.DeepCopy(s.animations, l.animations); } if (this._ranges) { i._ranges = {}; for (const r in this._ranges) { const s = this._ranges[r]; s && (i._ranges[r] = s.clone()); } } return this._isDirty = !0, i.prepare(!0), i; } /** * Enable animation blending for this skeleton * @param blendingSpeed defines the blending speed to apply * @see https://doc.babylonjs.com/features/featuresDeepDive/animation/advanced_animations#animation-blending */ enableBlending(e = 0.01) { this.bones.forEach((t) => { t.animations.forEach((i) => { i.enableBlending = !0, i.blendingSpeed = e; }); }); } /** * Releases all resources associated with the current skeleton */ dispose() { if (this._meshesWithPoseMatrix.length = 0, this.getScene().stopAnimation(this), this.getScene().removeSkeleton(this), this._parentContainer) { const e = this._parentContainer.skeletons.indexOf(this); e > -1 && this._parentContainer.skeletons.splice(e, 1), this._parentContainer = null; } this._transformMatrixTexture && (this._transformMatrixTexture.dispose(), this._transformMatrixTexture = null); } /** * Serialize the skeleton in a JSON object * @returns a JSON object */ serialize() { var e; const t = {}; t.name = this.name, t.id = this.id, this.dimensionsAtRest && (t.dimensionsAtRest = this.dimensionsAtRest.asArray()), t.bones = [], t.needInitialSkinMatrix = this.needInitialSkinMatrix; for (let i = 0; i < this.bones.length; i++) { const r = this.bones[i], s = r.getParent(), n = { parentBoneIndex: s ? this.bones.indexOf(s) : -1, index: r.getIndex(), name: r.name, id: r.id, matrix: r.getBindMatrix().toArray(), rest: r.getRestMatrix().toArray(), linkedTransformNodeId: (e = r.getTransformNode()) === null || e === void 0 ? void 0 : e.id }; t.bones.push(n), r.length && (n.length = r.length), r.metadata && (n.metadata = r.metadata), r.animations && r.animations.length > 0 && (n.animation = r.animations[0].serialize()), t.ranges = []; for (const a in this._ranges) { const l = this._ranges[a]; if (!l) continue; const o = {}; o.name = a, o.from = l.from, o.to = l.to, t.ranges.push(o); } } return t; } /** * Creates a new skeleton from serialized data * @param parsedSkeleton defines the serialized data * @param scene defines the hosting scene * @returns a new skeleton */ static Parse(e, t) { const i = new sx(e.name, e.id, t); e.dimensionsAtRest && (i.dimensionsAtRest = D.FromArray(e.dimensionsAtRest)), i.needInitialSkinMatrix = e.needInitialSkinMatrix; let r; for (r = 0; r < e.bones.length; r++) { const s = e.bones[r], n = e.bones[r].index; let a = null; s.parentBoneIndex > -1 && (a = i.bones[s.parentBoneIndex]); const l = s.rest ? Ae.FromArray(s.rest) : null, o = new ha(s.name, i, a, Ae.FromArray(s.matrix), l, null, n); s.id !== void 0 && s.id !== null && (o.id = s.id), s.length && (o.length = s.length), s.metadata && (o.metadata = s.metadata), s.animation && o.animations.push(nt.Parse(s.animation)), s.linkedTransformNodeId !== void 0 && s.linkedTransformNodeId !== null && (i._hasWaitingData = !0, o._waitingTransformNodeId = s.linkedTransformNodeId); } if (e.ranges) for (r = 0; r < e.ranges.length; r++) { const s = e.ranges[r]; i.createAnimationRange(s.name, s.from, s.to); } return i; } /** * Compute all node absolute matrices * @param forceUpdate defines if computation must be done even if cache is up to date */ computeAbsoluteMatrices(e = !1) { (this._absoluteTransformIsDirty || e) && (this.bones[0].computeAbsoluteMatrices(), this._absoluteTransformIsDirty = !1); } /** * Compute all node absolute matrices * @param forceUpdate defines if computation must be done even if cache is up to date * @deprecated Please use computeAbsoluteMatrices instead */ computeAbsoluteTransforms(e = !1) { this.computeAbsoluteMatrices(e); } /** * Gets the root pose matrix * @returns a matrix */ getPoseMatrix() { let e = null; return this._meshesWithPoseMatrix.length > 0 && (e = this._meshesWithPoseMatrix[0].getPoseMatrix()), e; } /** * Sorts bones per internal index */ sortBones() { const e = [], t = new Array(this.bones.length); for (let i = 0; i < this.bones.length; i++) this._sortBones(i, e, t); this.bones = e; } _sortBones(e, t, i) { if (i[e]) return; i[e] = !0; const r = this.bones[e]; if (!r) return; r._index === void 0 && (r._index = e); const s = r.getParent(); s && this._sortBones(this.bones.indexOf(s), t, i), t.push(r); } /** * Set the current local matrix as the restPose for all bones in the skeleton. */ setCurrentPoseAsRest() { this.bones.forEach((e) => { e.setCurrentPoseAsRest(); }); } } class Wte { /** * Creates a new storage buffer instance * @param engine The engine the buffer will be created inside * @param size The size of the buffer in bytes * @param creationFlags flags to use when creating the buffer (see undefined). The BUFFER_CREATIONFLAG_STORAGE flag will be automatically added. * @param label defines the label of the buffer (for debug purpose) */ constructor(e, t, i = 3, r) { this._engine = e, this._label = r, this._engine._storageBuffers.push(this), this._create(t, i); } _create(e, t) { this._bufferSize = e, this._creationFlags = t, this._buffer = this._engine.createStorageBuffer(e, t, this._label); } /** @internal */ _rebuild() { this._create(this._bufferSize, this._creationFlags); } /** * Gets underlying native buffer * @returns underlying native buffer */ getBuffer() { return this._buffer; } /** * Updates the storage buffer * @param data the data used to update the storage buffer * @param byteOffset the byte offset of the data (optional) * @param byteLength the byte length of the data (optional) */ update(e, t, i) { this._buffer && this._engine.updateStorageBuffer(this._buffer, e, t, i); } /** * Reads data from the storage buffer * @param offset The offset in the storage buffer to start reading from (default: 0) * @param size The number of bytes to read from the storage buffer (default: capacity of the buffer) * @param buffer The buffer to write the data we have read from the storage buffer to (optional) * @param noDelay If true, a call to flushFramebuffer will be issued so that the data can be read back immediately. This can speed up data retrieval, at the cost of a small perf penalty (default: false). * @returns If not undefined, returns the (promise) buffer (as provided by the 4th parameter) filled with the data, else it returns a (promise) Uint8Array with the data read from the storage buffer */ read(e, t, i, r) { return this._engine.readFromStorageBuffer(this._buffer, e, t, i, r); } /** * Disposes the storage buffer */ dispose() { const e = this._engine._storageBuffers, t = e.indexOf(this); t !== -1 && (e[t] = e[e.length - 1], e.pop()), this._engine._releaseBuffer(this._buffer), this._buffer = null; } } const x9 = (() => { const c = new Uint8Array(4), e = new Uint32Array(c.buffer); return !!((e[0] = 1) & c[0]); })(); Object.defineProperty(Y.prototype, "effectiveByteStride", { get: function() { return this._alignedBuffer && this._alignedBuffer.byteStride || this.byteStride; }, enumerable: !0, configurable: !0 }); Object.defineProperty(Y.prototype, "effectiveByteOffset", { get: function() { return this._alignedBuffer ? 0 : this.byteOffset; }, enumerable: !0, configurable: !0 }); Object.defineProperty(Y.prototype, "effectiveBuffer", { get: function() { return this._alignedBuffer && this._alignedBuffer.getBuffer() || this._buffer.getBuffer(); }, enumerable: !0, configurable: !0 }); Y.prototype._rebuild = function() { var c, e; (c = this._buffer) === null || c === void 0 || c._rebuild(), (e = this._alignedBuffer) === null || e === void 0 || e._rebuild(); }; Y.prototype.dispose = function() { var c; this._ownsBuffer && this._buffer.dispose(), (c = this._alignedBuffer) === null || c === void 0 || c.dispose(), this._alignedBuffer = void 0, this._isDisposed = !0; }; Y.prototype._alignBuffer = function() { var c, e; const t = this._buffer.getData(); if (!this.engine._features.forceVertexBufferStrideMultiple4Bytes || this.byteStride % 4 === 0 || !t) return; const i = Y.GetTypeByteLength(this.type), r = this.byteStride + 3 & -4, s = r / i, n = this.totalVertices, l = n * r / i; let o; if (Array.isArray(t)) { const f = new Float32Array(t); o = new DataView(f.buffer, f.byteOffset, f.byteLength); } else t instanceof ArrayBuffer ? o = new DataView(t, 0, t.byteLength) : o = new DataView(t.buffer, t.byteOffset, t.byteLength); let u; this.type === Y.BYTE ? u = new Int8Array(l) : this.type === Y.UNSIGNED_BYTE ? u = new Uint8Array(l) : this.type === Y.SHORT ? u = new Int16Array(l) : this.type === Y.UNSIGNED_SHORT ? u = new Uint16Array(l) : this.type === Y.INT ? u = new Int32Array(l) : this.type === Y.UNSIGNED_INT ? u = new Uint32Array(l) : u = new Float32Array(l); const h = this.getSize(); let d = this.byteOffset; for (let f = 0; f < n; ++f) { for (let p = 0; p < h; ++p) switch (this.type) { case Y.BYTE: u[f * s + p] = o.getInt8(d + p); break; case Y.UNSIGNED_BYTE: u[f * s + p] = o.getUint8(d + p); break; case Y.SHORT: u[f * s + p] = o.getInt16(d + p * 2, x9); break; case Y.UNSIGNED_SHORT: u[f * s + p] = o.getUint16(d + p * 2, x9); break; case Y.INT: u[f * s + p] = o.getInt32(d + p * 4, x9); break; case Y.UNSIGNED_INT: u[f * s + p] = o.getUint32(d + p * 4, x9); break; case Y.FLOAT: u[f * s + p] = o.getFloat32(d + p * 4, x9); break; } d += this.byteStride; } (c = this._alignedBuffer) === null || c === void 0 || c.dispose(), this._alignedBuffer = new hu(this.engine, u, !1, r, !1, this.getIsInstanced(), !0, this.instanceDivisor, ((e = this._label) !== null && e !== void 0 ? e : "VertexBuffer") + "_aligned"); }; class KL { constructor() { this.wheelPrecisionX = 3, this.wheelPrecisionY = 3, this.wheelPrecisionZ = 3, this.onChangedObservable = new Fe(), this._wheelDeltaX = 0, this._wheelDeltaY = 0, this._wheelDeltaZ = 0, this._ffMultiplier = 12, this._normalize = 120; } /** * Attach the input controls to a specific dom element to get the input from. * @param noPreventDefault Defines whether event caught by the controls * should call preventdefault(). * (https://developer.mozilla.org/en-US/docs/Web/API/Event/preventDefault) */ attachControl(e) { e = Ve.BackCompatCameraNoPreventDefault(arguments), this._wheel = (t) => { if (t.type !== si.POINTERWHEEL) return; const i = t.event, r = i.deltaMode === UI.DOM_DELTA_LINE ? this._ffMultiplier : 1; this._wheelDeltaX += this.wheelPrecisionX * r * i.deltaX / this._normalize, this._wheelDeltaY -= this.wheelPrecisionY * r * i.deltaY / this._normalize, this._wheelDeltaZ += this.wheelPrecisionZ * r * i.deltaZ / this._normalize, i.preventDefault && (e || i.preventDefault()); }, this._observer = this.camera.getScene()._inputManager._addCameraPointerObserver(this._wheel, si.POINTERWHEEL); } /** * Detach the current controls from the specified dom element. */ detachControl() { this._observer && (this.camera.getScene()._inputManager._removeCameraPointerObserver(this._observer), this._observer = null, this._wheel = null), this.onChangedObservable && this.onChangedObservable.clear(); } /** * Called for each rendered frame. */ checkInputs() { this.onChangedObservable.notifyObservers({ wheelDeltaX: this._wheelDeltaX, wheelDeltaY: this._wheelDeltaY, wheelDeltaZ: this._wheelDeltaZ }), this._wheelDeltaX = 0, this._wheelDeltaY = 0, this._wheelDeltaZ = 0; } /** * Gets the class name of the current input. * @returns the class name */ getClassName() { return "BaseCameraMouseWheelInput"; } /** * Get the friendly name associated with the input class. * @returns the input friendly name */ getSimpleName() { return "mousewheel"; } } F([ W() ], KL.prototype, "wheelPrecisionX", void 0); F([ W() ], KL.prototype, "wheelPrecisionY", void 0); F([ W() ], KL.prototype, "wheelPrecisionZ", void 0); class qB { constructor() { this._currentActiveButton = -1, this.buttons = [0, 1, 2]; } /** * Attach the input controls to a specific dom element to get the input from. * @param noPreventDefault Defines whether event caught by the controls should call preventdefault() (https://developer.mozilla.org/en-US/docs/Web/API/Event/preventDefault) */ attachControl(e) { e = Ve.BackCompatCameraNoPreventDefault(arguments); const t = this.camera.getEngine(), i = t.getInputElement(); let r = 0, s = null; this._pointA = null, this._pointB = null, this._altKey = !1, this._ctrlKey = !1, this._metaKey = !1, this._shiftKey = !1, this._buttonsPressed = 0, this._pointerInput = (a) => { var l, o; const u = a.event, h = u.pointerType === "touch"; if (a.type !== si.POINTERMOVE && this.buttons.indexOf(u.button) === -1) return; const d = u.target; if (this._altKey = u.altKey, this._ctrlKey = u.ctrlKey, this._metaKey = u.metaKey, this._shiftKey = u.shiftKey, this._buttonsPressed = u.buttons, t.isPointerLock) { const f = u.movementX, p = u.movementY; this.onTouch(null, f, p), this._pointA = null, this._pointB = null; } else { if (a.type !== si.POINTERDOWN && h && ((l = this._pointA) === null || l === void 0 ? void 0 : l.pointerId) !== u.pointerId && ((o = this._pointB) === null || o === void 0 ? void 0 : o.pointerId) !== u.pointerId) return; if (a.type === si.POINTERDOWN && (this._currentActiveButton === -1 || h)) { try { d == null || d.setPointerCapture(u.pointerId); } catch { } if (this._pointA === null) this._pointA = { x: u.clientX, y: u.clientY, pointerId: u.pointerId, type: u.pointerType }; else if (this._pointB === null) this._pointB = { x: u.clientX, y: u.clientY, pointerId: u.pointerId, type: u.pointerType }; else return; this._currentActiveButton === -1 && !h && (this._currentActiveButton = u.button), this.onButtonDown(u), e || (u.preventDefault(), i && i.focus()); } else if (a.type === si.POINTERDOUBLETAP) this.onDoubleTap(u.pointerType); else if (a.type === si.POINTERUP && (this._currentActiveButton === u.button || h)) { try { d == null || d.releasePointerCapture(u.pointerId); } catch { } h || (this._pointB = null), t._badOS ? this._pointA = this._pointB = null : this._pointB && this._pointA && this._pointA.pointerId == u.pointerId ? (this._pointA = this._pointB, this._pointB = null) : this._pointA && this._pointB && this._pointB.pointerId == u.pointerId ? this._pointB = null : this._pointA = this._pointB = null, (r !== 0 || s) && (this.onMultiTouch( this._pointA, this._pointB, r, 0, // pinchSquaredDistance s, null // multiTouchPanPosition ), r = 0, s = null), this._currentActiveButton = -1, this.onButtonUp(u), e || u.preventDefault(); } else if (a.type === si.POINTERMOVE) { if (e || u.preventDefault(), this._pointA && this._pointB === null) { const f = u.clientX - this._pointA.x, p = u.clientY - this._pointA.y; this.onTouch(this._pointA, f, p), this._pointA.x = u.clientX, this._pointA.y = u.clientY; } else if (this._pointA && this._pointB) { const f = this._pointA.pointerId === u.pointerId ? this._pointA : this._pointB; f.x = u.clientX, f.y = u.clientY; const p = this._pointA.x - this._pointB.x, m = this._pointA.y - this._pointB.y, _ = p * p + m * m, v = { x: (this._pointA.x + this._pointB.x) / 2, y: (this._pointA.y + this._pointB.y) / 2, pointerId: u.pointerId, type: a.type }; this.onMultiTouch(this._pointA, this._pointB, r, _, s, v), s = v, r = _; } } } }, this._observer = this.camera.getScene()._inputManager._addCameraPointerObserver(this._pointerInput, si.POINTERDOWN | si.POINTERUP | si.POINTERMOVE | si.POINTERDOUBLETAP), this._onLostFocus = () => { this._pointA = this._pointB = null, r = 0, s = null, this.onLostFocus(); }, this._contextMenuBind = (a) => this.onContextMenu(a), i && i.addEventListener("contextmenu", this._contextMenuBind, !1); const n = this.camera.getScene().getEngine().getHostWindow(); n && Ve.RegisterTopRootEvents(n, [{ name: "blur", handler: this._onLostFocus }]); } /** * Detach the current controls from the specified dom element. */ detachControl() { if (this._onLostFocus) { const e = this.camera.getScene().getEngine().getHostWindow(); e && Ve.UnregisterTopRootEvents(e, [{ name: "blur", handler: this._onLostFocus }]); } if (this._observer) { if (this.camera.getScene()._inputManager._removeCameraPointerObserver(this._observer), this._observer = null, this._contextMenuBind) { const e = this.camera.getScene().getEngine().getInputElement(); e && e.removeEventListener("contextmenu", this._contextMenuBind); } this._onLostFocus = null; } this._altKey = !1, this._ctrlKey = !1, this._metaKey = !1, this._shiftKey = !1, this._buttonsPressed = 0, this._currentActiveButton = -1; } /** * Gets the class name of the current input. * @returns the class name */ getClassName() { return "BaseCameraPointersInput"; } /** * Get the friendly name associated with the input class. * @returns the input friendly name */ getSimpleName() { return "pointers"; } /** * Called on pointer POINTERDOUBLETAP event. * Override this method to provide functionality on POINTERDOUBLETAP event. * @param type */ // eslint-disable-next-line @typescript-eslint/no-unused-vars onDoubleTap(e) { } /** * Called on pointer POINTERMOVE event if only a single touch is active. * Override this method to provide functionality. * @param point * @param offsetX * @param offsetY */ // eslint-disable-next-line @typescript-eslint/no-unused-vars onTouch(e, t, i) { } /** * Called on pointer POINTERMOVE event if multiple touches are active. * Override this method to provide functionality. * @param _pointA * @param _pointB * @param previousPinchSquaredDistance * @param pinchSquaredDistance * @param previousMultiTouchPanPosition * @param multiTouchPanPosition */ // eslint-disable-next-line @typescript-eslint/no-unused-vars onMultiTouch(e, t, i, r, s, n) { } /** * Called on JS contextmenu event. * Override this method to provide functionality. * @param evt */ onContextMenu(e) { e.preventDefault(); } /** * Called each time a new POINTERDOWN event occurs. Ie, for each button * press. * Override this method to provide functionality. * @param _evt Defines the event to track */ onButtonDown(e) { } /** * Called each time a new POINTERUP event occurs. Ie, for each button * release. * Override this method to provide functionality. * @param _evt Defines the event to track */ onButtonUp(e) { } /** * Called when window becomes inactive. * Override this method to provide functionality. */ onLostFocus() { } } F([ W() ], qB.prototype, "buttons", void 0); var Dd = {}; class WL { /** * Instantiate a new Camera Input Manager. * @param camera Defines the camera the input manager belongs to */ constructor(e) { this.attachedToElement = !1, this.attached = {}, this.camera = e, this.checkInputs = () => { }; } /** * Add an input method to a camera * @see https://doc.babylonjs.com/features/featuresDeepDive/cameras/customizingCameraInputs * @param input Camera input method */ add(e) { const t = e.getSimpleName(); if (this.attached[t]) { Ce.Warn("camera input of type " + t + " already exists on camera"); return; } this.attached[t] = e, e.camera = this.camera, e.checkInputs && (this.checkInputs = this._addCheckInputs(e.checkInputs.bind(e))), this.attachedToElement && e.attachControl(this.noPreventDefault); } /** * Remove a specific input method from a camera * example: camera.inputs.remove(camera.inputs.attached.mouse); * @param inputToRemove camera input method */ remove(e) { for (const t in this.attached) { const i = this.attached[t]; if (i === e) { i.detachControl(), i.camera = null, delete this.attached[t], this.rebuildInputCheck(); return; } } } /** * Remove a specific input type from a camera * example: camera.inputs.remove("ArcRotateCameraGamepadInput"); * @param inputType the type of the input to remove */ removeByType(e) { for (const t in this.attached) { const i = this.attached[t]; i.getClassName() === e && (i.detachControl(), i.camera = null, delete this.attached[t], this.rebuildInputCheck()); } } _addCheckInputs(e) { const t = this.checkInputs; return () => { t(), e(); }; } /** * Attach the input controls to the currently attached dom element to listen the events from. * @param input Defines the input to attach */ attachInput(e) { this.attachedToElement && e.attachControl(this.noPreventDefault); } /** * Attach the current manager inputs controls to a specific dom element to listen the events from. * @param noPreventDefault Defines whether event caught by the controls should call preventdefault() (https://developer.mozilla.org/en-US/docs/Web/API/Event/preventDefault) */ attachElement(e = !1) { if (!this.attachedToElement) { e = Ai.ForceAttachControlToAlwaysPreventDefault ? !1 : e, this.attachedToElement = !0, this.noPreventDefault = e; for (const t in this.attached) this.attached[t].attachControl(e); } } /** * Detach the current manager inputs controls from a specific dom element. * @param disconnect Defines whether the input should be removed from the current list of attached inputs */ detachElement(e = !1) { for (const t in this.attached) this.attached[t].detachControl(), e && (this.attached[t].camera = null); this.attachedToElement = !1; } /** * Rebuild the dynamic inputCheck function from the current list of * defined inputs in the manager. */ rebuildInputCheck() { this.checkInputs = () => { }; for (const e in this.attached) { const t = this.attached[e]; t.checkInputs && (this.checkInputs = this._addCheckInputs(t.checkInputs.bind(t))); } } /** * Remove all attached input methods from a camera */ clear() { this.attachedToElement && this.detachElement(!0), this.attached = {}, this.attachedToElement = !1, this.checkInputs = () => { }; } /** * Serialize the current input manager attached to a camera. * This ensures than once parsed, * the input associated to the camera will be identical to the current ones * @param serializedCamera Defines the camera serialization JSON the input serialization should write to */ serialize(e) { const t = {}; for (const i in this.attached) { const r = this.attached[i], s = St.Serialize(r); t[r.getClassName()] = s; } e.inputsmgr = t; } /** * Parses an input manager serialized JSON to restore the previous list of inputs * and states associated to a camera. * @param parsedCamera Defines the JSON to parse */ parse(e) { const t = e.inputsmgr; if (t) { this.clear(); for (const i in t) { const r = Dd[i]; if (r) { const s = t[i], n = St.Parse(() => new r(), s, null); this.add(n); } } } else for (const i in this.attached) { const r = Dd[this.attached[i].getClassName()]; if (r) { const s = St.Parse(() => new r(), e, null); this.remove(this.attached[i]), this.add(s); } } } } class jce { /** * Initializes the gamepad x and y control stick values * @param x The x component of the gamepad control stick value * @param y The y component of the gamepad control stick value */ constructor(e, t) { this.x = e, this.y = t; } } class zu { /** * Specifies if the gamepad has been connected */ get isConnected() { return this._isConnected; } /** * Initializes the gamepad * @param id The id of the gamepad * @param index The index of the gamepad * @param browserGamepad The browser gamepad * @param leftStickX The x component of the left joystick * @param leftStickY The y component of the left joystick * @param rightStickX The x component of the right joystick * @param rightStickY The y component of the right joystick */ constructor(e, t, i, r = 0, s = 1, n = 2, a = 3) { this.id = e, this.index = t, this.browserGamepad = i, this._leftStick = { x: 0, y: 0 }, this._rightStick = { x: 0, y: 0 }, this._isConnected = !0, this._invertLeftStickY = !1, this.type = zu.GAMEPAD, this._leftStickAxisX = r, this._leftStickAxisY = s, this._rightStickAxisX = n, this._rightStickAxisY = a, this.browserGamepad.axes.length >= 2 && (this._leftStick = { x: this.browserGamepad.axes[this._leftStickAxisX], y: this.browserGamepad.axes[this._leftStickAxisY] }), this.browserGamepad.axes.length >= 4 && (this._rightStick = { x: this.browserGamepad.axes[this._rightStickAxisX], y: this.browserGamepad.axes[this._rightStickAxisY] }); } /** * Callback triggered when the left joystick has changed * @param callback */ onleftstickchanged(e) { this._onleftstickchanged = e; } /** * Callback triggered when the right joystick has changed * @param callback */ onrightstickchanged(e) { this._onrightstickchanged = e; } /** * Gets the left joystick */ get leftStick() { return this._leftStick; } /** * Sets the left joystick values */ set leftStick(e) { this._onleftstickchanged && (this._leftStick.x !== e.x || this._leftStick.y !== e.y) && this._onleftstickchanged(e), this._leftStick = e; } /** * Gets the right joystick */ get rightStick() { return this._rightStick; } /** * Sets the right joystick value */ set rightStick(e) { this._onrightstickchanged && (this._rightStick.x !== e.x || this._rightStick.y !== e.y) && this._onrightstickchanged(e), this._rightStick = e; } /** * Updates the gamepad joystick positions */ update() { this._leftStick && (this.leftStick = { x: this.browserGamepad.axes[this._leftStickAxisX], y: this.browserGamepad.axes[this._leftStickAxisY] }, this._invertLeftStickY && (this.leftStick.y *= -1)), this._rightStick && (this.rightStick = { x: this.browserGamepad.axes[this._rightStickAxisX], y: this.browserGamepad.axes[this._rightStickAxisY] }); } /** * Disposes the gamepad */ dispose() { } } zu.GAMEPAD = 0; zu.GENERIC = 1; zu.XBOX = 2; zu.POSE_ENABLED = 3; zu.DUALSHOCK = 4; class jte extends zu { /** * Callback triggered when a button has been pressed * @param callback Called when a button has been pressed */ onbuttondown(e) { this._onbuttondown = e; } /** * Callback triggered when a button has been released * @param callback Called when a button has been released */ onbuttonup(e) { this._onbuttonup = e; } /** * Initializes the generic gamepad * @param id The id of the generic gamepad * @param index The index of the generic gamepad * @param browserGamepad The browser gamepad */ constructor(e, t, i) { super(e, t, i), this.onButtonDownObservable = new Fe(), this.onButtonUpObservable = new Fe(), this.type = zu.GENERIC, this._buttons = new Array(i.buttons.length); } _setButtonValue(e, t, i) { return e !== t && (e === 1 && (this._onbuttondown && this._onbuttondown(i), this.onButtonDownObservable.notifyObservers(i)), e === 0 && (this._onbuttonup && this._onbuttonup(i), this.onButtonUpObservable.notifyObservers(i))), e; } /** * Updates the generic gamepad */ update() { super.update(); for (let e = 0; e < this._buttons.length; e++) this._buttons[e] = this._setButtonValue(this.browserGamepad.buttons[e].value, this._buttons[e], e); } /** * Disposes the generic gamepad */ dispose() { super.dispose(), this.onButtonDownObservable.clear(), this.onButtonUpObservable.clear(); } } class jL { constructor() { this.gamepadRotationSensibility = 80, this.gamepadMoveSensibility = 40, this._yAxisScale = 1; } /** * Gets or sets a boolean indicating that Yaxis (for right stick) should be inverted */ get invertYAxis() { return this._yAxisScale !== 1; } set invertYAxis(e) { this._yAxisScale = e ? -1 : 1; } /** * Attach the input controls to a specific dom element to get the input from. */ attachControl() { const e = this.camera.getScene().gamepadManager; this._onGamepadConnectedObserver = e.onGamepadConnectedObservable.add((t) => { t.type !== zu.POSE_ENABLED && (!this.gamepad || t.type === zu.XBOX) && (this.gamepad = t); }), this._onGamepadDisconnectedObserver = e.onGamepadDisconnectedObservable.add((t) => { this.gamepad === t && (this.gamepad = null); }), this.gamepad = e.getGamepadByType(zu.XBOX); } /** * Detach the current controls from the specified dom element. */ detachControl() { this.camera.getScene().gamepadManager.onGamepadConnectedObservable.remove(this._onGamepadConnectedObserver), this.camera.getScene().gamepadManager.onGamepadDisconnectedObservable.remove(this._onGamepadDisconnectedObserver), this.gamepad = null; } /** * Update the current camera state depending on the inputs that have been used this frame. * This is a dynamically created lambda to avoid the performance penalty of looping for inputs in the render loop. */ checkInputs() { if (this.gamepad) { const e = this.camera, t = this.gamepad.rightStick; if (t) { if (t.x != 0) { const r = t.x / this.gamepadRotationSensibility; r != 0 && Math.abs(r) > 5e-3 && (e.inertialAlphaOffset += r); } if (t.y != 0) { const r = t.y / this.gamepadRotationSensibility * this._yAxisScale; r != 0 && Math.abs(r) > 5e-3 && (e.inertialBetaOffset += r); } } const i = this.gamepad.leftStick; if (i && i.y != 0) { const r = i.y / this.gamepadMoveSensibility; r != 0 && Math.abs(r) > 5e-3 && (this.camera.inertialRadiusOffset -= r); } } } /** * Gets the class name of the current intput. * @returns the class name */ getClassName() { return "ArcRotateCameraGamepadInput"; } /** * Get the friendly name associated with the input class. * @returns the input friendly name */ getSimpleName() { return "gamepad"; } } F([ W() ], jL.prototype, "gamepadRotationSensibility", void 0); F([ W() ], jL.prototype, "gamepadMoveSensibility", void 0); Dd.ArcRotateCameraGamepadInput = jL; class H4 { constructor() { this.keysUp = [38], this.keysDown = [40], this.keysLeft = [37], this.keysRight = [39], this.keysReset = [220], this.panningSensibility = 50, this.zoomingSensibility = 25, this.useAltToZoom = !0, this.angularSpeed = 0.01, this._keys = new Array(); } /** * Attach the input controls to a specific dom element to get the input from. * @param noPreventDefault Defines whether event caught by the controls should call preventdefault() (https://developer.mozilla.org/en-US/docs/Web/API/Event/preventDefault) */ attachControl(e) { e = Ve.BackCompatCameraNoPreventDefault(arguments), !this._onCanvasBlurObserver && (this._scene = this.camera.getScene(), this._engine = this._scene.getEngine(), this._onCanvasBlurObserver = this._engine.onCanvasBlurObservable.add(() => { this._keys.length = 0; }), this._onKeyboardObserver = this._scene.onKeyboardObservable.add((t) => { const i = t.event; if (!i.metaKey) { if (t.type === rx.KEYDOWN) this._ctrlPressed = i.ctrlKey, this._altPressed = i.altKey, (this.keysUp.indexOf(i.keyCode) !== -1 || this.keysDown.indexOf(i.keyCode) !== -1 || this.keysLeft.indexOf(i.keyCode) !== -1 || this.keysRight.indexOf(i.keyCode) !== -1 || this.keysReset.indexOf(i.keyCode) !== -1) && (this._keys.indexOf(i.keyCode) === -1 && this._keys.push(i.keyCode), i.preventDefault && (e || i.preventDefault())); else if (this.keysUp.indexOf(i.keyCode) !== -1 || this.keysDown.indexOf(i.keyCode) !== -1 || this.keysLeft.indexOf(i.keyCode) !== -1 || this.keysRight.indexOf(i.keyCode) !== -1 || this.keysReset.indexOf(i.keyCode) !== -1) { const r = this._keys.indexOf(i.keyCode); r >= 0 && this._keys.splice(r, 1), i.preventDefault && (e || i.preventDefault()); } } })); } /** * Detach the current controls from the specified dom element. */ detachControl() { this._scene && (this._onKeyboardObserver && this._scene.onKeyboardObservable.remove(this._onKeyboardObserver), this._onCanvasBlurObserver && this._engine.onCanvasBlurObservable.remove(this._onCanvasBlurObserver), this._onKeyboardObserver = null, this._onCanvasBlurObserver = null), this._keys.length = 0; } /** * Update the current camera state depending on the inputs that have been used this frame. * This is a dynamically created lambda to avoid the performance penalty of looping for inputs in the render loop. */ checkInputs() { if (this._onKeyboardObserver) { const e = this.camera; for (let t = 0; t < this._keys.length; t++) { const i = this._keys[t]; this.keysLeft.indexOf(i) !== -1 ? this._ctrlPressed && this.camera._useCtrlForPanning ? e.inertialPanningX -= 1 / this.panningSensibility : e.inertialAlphaOffset -= this.angularSpeed : this.keysUp.indexOf(i) !== -1 ? this._ctrlPressed && this.camera._useCtrlForPanning ? e.inertialPanningY += 1 / this.panningSensibility : this._altPressed && this.useAltToZoom ? e.inertialRadiusOffset += 1 / this.zoomingSensibility : e.inertialBetaOffset -= this.angularSpeed : this.keysRight.indexOf(i) !== -1 ? this._ctrlPressed && this.camera._useCtrlForPanning ? e.inertialPanningX += 1 / this.panningSensibility : e.inertialAlphaOffset += this.angularSpeed : this.keysDown.indexOf(i) !== -1 ? this._ctrlPressed && this.camera._useCtrlForPanning ? e.inertialPanningY -= 1 / this.panningSensibility : this._altPressed && this.useAltToZoom ? e.inertialRadiusOffset -= 1 / this.zoomingSensibility : e.inertialBetaOffset += this.angularSpeed : this.keysReset.indexOf(i) !== -1 && e.useInputToRestoreState && e.restoreState(); } } } /** * Gets the class name of the current input. * @returns the class name */ getClassName() { return "ArcRotateCameraKeyboardMoveInput"; } /** * Get the friendly name associated with the input class. * @returns the input friendly name */ getSimpleName() { return "keyboard"; } } F([ W() ], H4.prototype, "keysUp", void 0); F([ W() ], H4.prototype, "keysDown", void 0); F([ W() ], H4.prototype, "keysLeft", void 0); F([ W() ], H4.prototype, "keysRight", void 0); F([ W() ], H4.prototype, "keysReset", void 0); F([ W() ], H4.prototype, "panningSensibility", void 0); F([ W() ], H4.prototype, "zoomingSensibility", void 0); F([ W() ], H4.prototype, "useAltToZoom", void 0); F([ W() ], H4.prototype, "angularSpeed", void 0); Dd.ArcRotateCameraKeyboardMoveInput = H4; const Xce = 40; class Aw { constructor() { this.wheelPrecision = 3, this.zoomToMouseLocation = !1, this.wheelDeltaPercentage = 0, this.customComputeDeltaFromMouseWheel = null, this._viewOffset = new D(0, 0, 0), this._globalOffset = new D(0, 0, 0), this._inertialPanning = D.Zero(); } _computeDeltaFromMouseWheelLegacyEvent(e, t) { let i = 0; const r = e * 0.01 * this.wheelDeltaPercentage * t; return e > 0 ? i = r / (1 + this.wheelDeltaPercentage) : i = r * (1 + this.wheelDeltaPercentage), i; } /** * Attach the input controls to a specific dom element to get the input from. * @param noPreventDefault Defines whether event caught by the controls should call preventdefault() (https://developer.mozilla.org/en-US/docs/Web/API/Event/preventDefault) */ attachControl(e) { e = Ve.BackCompatCameraNoPreventDefault(arguments), this._wheel = (t) => { if (t.type !== si.POINTERWHEEL) return; const i = t.event; let r = 0; const s = i.deltaMode === UI.DOM_DELTA_LINE ? Xce : 1, n = -(i.deltaY * s); if (this.customComputeDeltaFromMouseWheel) r = this.customComputeDeltaFromMouseWheel(n, this, i); else if (this.wheelDeltaPercentage) { if (r = this._computeDeltaFromMouseWheelLegacyEvent(n, this.camera.radius), r > 0) { let a = this.camera.radius, l = this.camera.inertialRadiusOffset + r; for (let o = 0; o < 20 && Math.abs(l) > 1e-3; o++) a -= l, l *= this.camera.inertia; a = yt.Clamp(a, 0, Number.MAX_VALUE), r = this._computeDeltaFromMouseWheelLegacyEvent(n, a); } } else r = n / (this.wheelPrecision * 40); r && (this.zoomToMouseLocation ? (this._hitPlane || this._updateHitPlane(), this._zoomToMouse(r)) : this.camera.inertialRadiusOffset += r), i.preventDefault && (e || i.preventDefault()); }, this._observer = this.camera.getScene()._inputManager._addCameraPointerObserver(this._wheel, si.POINTERWHEEL), this.zoomToMouseLocation && this._inertialPanning.setAll(0); } /** * Detach the current controls from the specified dom element. */ detachControl() { this._observer && (this.camera.getScene()._inputManager._removeCameraPointerObserver(this._observer), this._observer = null, this._wheel = null); } /** * Update the current camera state depending on the inputs that have been used this frame. * This is a dynamically created lambda to avoid the performance penalty of looping for inputs in the render loop. */ checkInputs() { if (!this.zoomToMouseLocation) return; const e = this.camera; 0 + e.inertialAlphaOffset + e.inertialBetaOffset + e.inertialRadiusOffset && (this._updateHitPlane(), e.target.addInPlace(this._inertialPanning), this._inertialPanning.scaleInPlace(e.inertia), this._zeroIfClose(this._inertialPanning)); } /** * Gets the class name of the current input. * @returns the class name */ getClassName() { return "ArcRotateCameraMouseWheelInput"; } /** * Get the friendly name associated with the input class. * @returns the input friendly name */ getSimpleName() { return "mousewheel"; } _updateHitPlane() { const e = this.camera, t = e.target.subtract(e.position); this._hitPlane = Sd.FromPositionAndNormal(e.target, t); } // Get position on the hit plane _getPosition() { var e; const t = this.camera, i = t.getScene(), r = i.createPickingRay(i.pointerX, i.pointerY, Ae.Identity(), t, !1); (t.targetScreenOffset.x !== 0 || t.targetScreenOffset.y !== 0) && (this._viewOffset.set(t.targetScreenOffset.x, t.targetScreenOffset.y, 0), t.getViewMatrix().invertToRef(t._cameraTransformMatrix), this._globalOffset = D.TransformNormal(this._viewOffset, t._cameraTransformMatrix), r.origin.addInPlace(this._globalOffset)); let s = 0; return this._hitPlane && (s = (e = r.intersectsPlane(this._hitPlane)) !== null && e !== void 0 ? e : 0), r.origin.addInPlace(r.direction.scaleInPlace(s)); } _zoomToMouse(e) { var t, i; const r = this.camera, s = 1 - r.inertia; if (r.lowerRadiusLimit) { const u = (t = r.lowerRadiusLimit) !== null && t !== void 0 ? t : 0; r.radius - (r.inertialRadiusOffset + e) / s < u && (e = (r.radius - u) * s - r.inertialRadiusOffset); } if (r.upperRadiusLimit) { const u = (i = r.upperRadiusLimit) !== null && i !== void 0 ? i : 0; r.radius - (r.inertialRadiusOffset + e) / s > u && (e = (r.radius - u) * s - r.inertialRadiusOffset); } const a = e / s / r.radius, l = this._getPosition(), o = de.Vector3[6]; l.subtractToRef(r.target, o), o.scaleInPlace(a), o.scaleInPlace(s), this._inertialPanning.addInPlace(o), r.inertialRadiusOffset += e; } // Sets x y or z of passed in vector to zero if less than Epsilon. _zeroIfClose(e) { Math.abs(e.x) < Sr && (e.x = 0), Math.abs(e.y) < Sr && (e.y = 0), Math.abs(e.z) < Sr && (e.z = 0); } } F([ W() ], Aw.prototype, "wheelPrecision", void 0); F([ W() ], Aw.prototype, "zoomToMouseLocation", void 0); F([ W() ], Aw.prototype, "wheelDeltaPercentage", void 0); Dd.ArcRotateCameraMouseWheelInput = Aw; class N1 extends qB { constructor() { super(...arguments), this.buttons = [0, 1, 2], this.angularSensibilityX = 1e3, this.angularSensibilityY = 1e3, this.pinchPrecision = 12, this.pinchDeltaPercentage = 0, this.useNaturalPinchZoom = !1, this.pinchZoom = !0, this.panningSensibility = 1e3, this.multiTouchPanning = !0, this.multiTouchPanAndZoom = !0, this.pinchInwards = !0, this._isPanClick = !1, this._twoFingerActivityCount = 0, this._isPinching = !1; } /** * Gets the class name of the current input. * @returns the class name */ getClassName() { return "ArcRotateCameraPointersInput"; } /** * Move camera from multi touch panning positions. * @param previousMultiTouchPanPosition * @param multiTouchPanPosition */ _computeMultiTouchPanning(e, t) { if (this.panningSensibility !== 0 && e && t) { const i = t.x - e.x, r = t.y - e.y; this.camera.inertialPanningX += -i / this.panningSensibility, this.camera.inertialPanningY += r / this.panningSensibility; } } /** * Move camera from pinch zoom distances. * @param previousPinchSquaredDistance * @param pinchSquaredDistance */ _computePinchZoom(e, t) { const i = this.camera.radius || N1.MinimumRadiusForPinch; this.useNaturalPinchZoom ? this.camera.radius = i * Math.sqrt(e) / Math.sqrt(t) : this.pinchDeltaPercentage ? this.camera.inertialRadiusOffset += (t - e) * 1e-3 * i * this.pinchDeltaPercentage : this.camera.inertialRadiusOffset += (t - e) / (this.pinchPrecision * (this.pinchInwards ? 1 : -1) * (this.angularSensibilityX + this.angularSensibilityY) / 2); } /** * Called on pointer POINTERMOVE event if only a single touch is active. * @param point * @param offsetX * @param offsetY */ onTouch(e, t, i) { this.panningSensibility !== 0 && (this._ctrlKey && this.camera._useCtrlForPanning || this._isPanClick) ? (this.camera.inertialPanningX += -t / this.panningSensibility, this.camera.inertialPanningY += i / this.panningSensibility) : (this.camera.inertialAlphaOffset -= t / this.angularSensibilityX, this.camera.inertialBetaOffset -= i / this.angularSensibilityY); } /** * Called on pointer POINTERDOUBLETAP event. */ onDoubleTap() { this.camera.useInputToRestoreState && this.camera.restoreState(); } /** * Called on pointer POINTERMOVE event if multiple touches are active. * @param pointA * @param pointB * @param previousPinchSquaredDistance * @param pinchSquaredDistance * @param previousMultiTouchPanPosition * @param multiTouchPanPosition */ onMultiTouch(e, t, i, r, s, n) { i === 0 && s === null || r === 0 && n === null || (this.multiTouchPanAndZoom ? (this._computePinchZoom(i, r), this._computeMultiTouchPanning(s, n)) : this.multiTouchPanning && this.pinchZoom ? (this._twoFingerActivityCount++, this._isPinching || this._twoFingerActivityCount < 20 && Math.abs(Math.sqrt(r) - Math.sqrt(i)) > this.camera.pinchToPanMaxDistance ? (this._computePinchZoom(i, r), this._isPinching = !0) : this._computeMultiTouchPanning(s, n)) : this.multiTouchPanning ? this._computeMultiTouchPanning(s, n) : this.pinchZoom && this._computePinchZoom(i, r)); } /** * Called each time a new POINTERDOWN event occurs. Ie, for each button * press. * @param evt Defines the event to track */ onButtonDown(e) { this._isPanClick = e.button === this.camera._panningMouseButton; } /** * Called each time a new POINTERUP event occurs. Ie, for each button * release. * @param _evt Defines the event to track */ onButtonUp(e) { this._twoFingerActivityCount = 0, this._isPinching = !1; } /** * Called when window becomes inactive. */ onLostFocus() { this._isPanClick = !1, this._twoFingerActivityCount = 0, this._isPinching = !1; } } N1.MinimumRadiusForPinch = 1e-3; F([ W() ], N1.prototype, "buttons", void 0); F([ W() ], N1.prototype, "angularSensibilityX", void 0); F([ W() ], N1.prototype, "angularSensibilityY", void 0); F([ W() ], N1.prototype, "pinchPrecision", void 0); F([ W() ], N1.prototype, "pinchDeltaPercentage", void 0); F([ W() ], N1.prototype, "useNaturalPinchZoom", void 0); F([ W() ], N1.prototype, "pinchZoom", void 0); F([ W() ], N1.prototype, "panningSensibility", void 0); F([ W() ], N1.prototype, "multiTouchPanning", void 0); F([ W() ], N1.prototype, "multiTouchPanAndZoom", void 0); Dd.ArcRotateCameraPointersInput = N1; class JB extends WL { /** * Instantiates a new ArcRotateCameraInputsManager. * @param camera Defines the camera the inputs belong to */ constructor(e) { super(e); } /** * Add mouse wheel input support to the input manager. * @returns the current input manager */ addMouseWheel() { return this.add(new Aw()), this; } /** * Add pointers input support to the input manager. * @returns the current input manager */ addPointers() { return this.add(new N1()), this; } /** * Add keyboard input support to the input manager. * @returns the current input manager */ addKeyboard() { return this.add(new H4()), this; } } JB.prototype.addVRDeviceOrientation = function() { return this.add(new MK()), this; }; class MK { /** * Instantiate a new ArcRotateCameraVRDeviceOrientationInput. */ constructor() { this.alphaCorrection = 1, this.gammaCorrection = 1, this._alpha = 0, this._gamma = 0, this._dirty = !1, this._deviceOrientationHandler = (e) => this._onOrientationEvent(e); } /** * Attach the input controls to a specific dom element to get the input from. * @param noPreventDefault Defines whether event caught by the controls should call preventdefault() (https://developer.mozilla.org/en-US/docs/Web/API/Event/preventDefault) */ attachControl(e) { e = Ve.BackCompatCameraNoPreventDefault(arguments), this.camera.attachControl(e); const t = this.camera.getScene().getEngine().getHostWindow(); t && (typeof DeviceOrientationEvent < "u" && typeof DeviceOrientationEvent.requestPermission == "function" ? DeviceOrientationEvent.requestPermission().then((i) => { i === "granted" ? t.addEventListener("deviceorientation", this._deviceOrientationHandler) : Ve.Warn("Permission not granted."); }).catch((i) => { Ve.Error(i); }) : t.addEventListener("deviceorientation", this._deviceOrientationHandler)); } /** * @internal */ _onOrientationEvent(e) { e.alpha !== null && (this._alpha = (+e.alpha | 0) * this.alphaCorrection), e.gamma !== null && (this._gamma = (+e.gamma | 0) * this.gammaCorrection), this._dirty = !0; } /** * Update the current camera state depending on the inputs that have been used this frame. * This is a dynamically created lambda to avoid the performance penalty of looping for inputs in the render loop. */ checkInputs() { this._dirty && (this._dirty = !1, this._gamma < 0 && (this._gamma = 180 + this._gamma), this.camera.alpha = -this._alpha / 180 * Math.PI % Math.PI * 2, this.camera.beta = this._gamma / 180 * Math.PI); } /** * Detach the current controls from the specified dom element. */ detachControl() { window.removeEventListener("deviceorientation", this._deviceOrientationHandler); } /** * Gets the class name of the current input. * @returns the class name */ getClassName() { return "ArcRotateCameraVRDeviceOrientationInput"; } /** * Get the friendly name associated with the input class. * @returns the input friendly name */ getSimpleName() { return "VRDeviceOrientation"; } } Dd.ArcRotateCameraVRDeviceOrientationInput = MK; class TT { constructor() { this.keysForward = [87], this.keysBackward = [83], this.keysUp = [69], this.keysDown = [81], this.keysRight = [68], this.keysLeft = [65], this._keys = new Array(); } /** * Attach the input controls to a specific dom element to get the input from. * @param noPreventDefault Defines whether event caught by the controls should call preventdefault() (https://developer.mozilla.org/en-US/docs/Web/API/Event/preventDefault) */ attachControl(e) { e = Ve.BackCompatCameraNoPreventDefault(arguments), !this._onCanvasBlurObserver && (this._scene = this.camera.getScene(), this._engine = this._scene.getEngine(), this._onCanvasBlurObserver = this._engine.onCanvasBlurObservable.add(() => { this._keys.length = 0; }), this._onKeyboardObserver = this._scene.onKeyboardObservable.add((t) => { const i = t.event; if (t.type === rx.KEYDOWN) (this.keysForward.indexOf(i.keyCode) !== -1 || this.keysBackward.indexOf(i.keyCode) !== -1 || this.keysUp.indexOf(i.keyCode) !== -1 || this.keysDown.indexOf(i.keyCode) !== -1 || this.keysLeft.indexOf(i.keyCode) !== -1 || this.keysRight.indexOf(i.keyCode) !== -1) && (this._keys.indexOf(i.keyCode) === -1 && this._keys.push(i.keyCode), e || i.preventDefault()); else if (this.keysForward.indexOf(i.keyCode) !== -1 || this.keysBackward.indexOf(i.keyCode) !== -1 || this.keysUp.indexOf(i.keyCode) !== -1 || this.keysDown.indexOf(i.keyCode) !== -1 || this.keysLeft.indexOf(i.keyCode) !== -1 || this.keysRight.indexOf(i.keyCode) !== -1) { const r = this._keys.indexOf(i.keyCode); r >= 0 && this._keys.splice(r, 1), e || i.preventDefault(); } })); } /** * Detach the current controls from the specified dom element. */ detachControl() { this._scene && (this._onKeyboardObserver && this._scene.onKeyboardObservable.remove(this._onKeyboardObserver), this._onCanvasBlurObserver && this._engine.onCanvasBlurObservable.remove(this._onCanvasBlurObserver), this._onKeyboardObserver = null, this._onCanvasBlurObserver = null), this._keys.length = 0; } /** * Gets the class name of the current input. * @returns the class name */ getClassName() { return "FlyCameraKeyboardInput"; } /** * @internal */ _onLostFocus() { this._keys.length = 0; } /** * Get the friendly name associated with the input class. * @returns the input friendly name */ getSimpleName() { return "keyboard"; } /** * Update the current camera state depending on the inputs that have been used this frame. * This is a dynamically created lambda to avoid the performance penalty of looping for inputs in the render loop. */ checkInputs() { if (this._onKeyboardObserver) { const e = this.camera; for (let t = 0; t < this._keys.length; t++) { const i = this._keys[t], r = e._computeLocalCameraSpeed(); this.keysForward.indexOf(i) !== -1 ? e._localDirection.copyFromFloats(0, 0, r) : this.keysBackward.indexOf(i) !== -1 ? e._localDirection.copyFromFloats(0, 0, -r) : this.keysUp.indexOf(i) !== -1 ? e._localDirection.copyFromFloats(0, r, 0) : this.keysDown.indexOf(i) !== -1 ? e._localDirection.copyFromFloats(0, -r, 0) : this.keysRight.indexOf(i) !== -1 ? e._localDirection.copyFromFloats(r, 0, 0) : this.keysLeft.indexOf(i) !== -1 && e._localDirection.copyFromFloats(-r, 0, 0), e.getScene().useRightHandedSystem && (e._localDirection.z *= -1), e.getViewMatrix().invertToRef(e._cameraTransformMatrix), D.TransformNormalToRef(e._localDirection, e._cameraTransformMatrix, e._transformedDirection), e.cameraDirection.addInPlace(e._transformedDirection); } } } } F([ W() ], TT.prototype, "keysForward", void 0); F([ W() ], TT.prototype, "keysBackward", void 0); F([ W() ], TT.prototype, "keysUp", void 0); F([ W() ], TT.prototype, "keysDown", void 0); F([ W() ], TT.prototype, "keysRight", void 0); F([ W() ], TT.prototype, "keysLeft", void 0); Dd.FlyCameraKeyboardInput = TT; class XL { /** * Listen to mouse events to control the camera. * @see https://doc.babylonjs.com/features/featuresDeepDive/cameras/customizingCameraInputs */ constructor() { this.buttons = [0, 1, 2], this.buttonsYaw = [-1, 0, 1], this.buttonsPitch = [-1, 0, 1], this.buttonsRoll = [2], this.activeButton = -1, this.angularSensibility = 1e3, this._previousPosition = null; } /** * Attach the mouse control to the HTML DOM element. * @param noPreventDefault Defines whether events caught by the controls should call preventdefault(). */ attachControl(e) { e = Ve.BackCompatCameraNoPreventDefault(arguments), this._noPreventDefault = e, this._observer = this.camera.getScene()._inputManager._addCameraPointerObserver((t) => { this._pointerInput(t); }, si.POINTERDOWN | si.POINTERUP | si.POINTERMOVE), this._rollObserver = this.camera.getScene().onBeforeRenderObservable.add(() => { this.camera.rollCorrect && this.camera.restoreRoll(this.camera.rollCorrect); }); } /** * Detach the current controls from the specified dom element. */ detachControl() { this._observer && (this.camera.getScene()._inputManager._removeCameraPointerObserver(this._observer), this.camera.getScene().onBeforeRenderObservable.remove(this._rollObserver), this._observer = null, this._rollObserver = null, this._previousPosition = null, this._noPreventDefault = void 0); } /** * Gets the class name of the current input. * @returns the class name. */ getClassName() { return "FlyCameraMouseInput"; } /** * Get the friendly name associated with the input class. * @returns the input's friendly name. */ getSimpleName() { return "mouse"; } // Track mouse movement, when the pointer is not locked. _pointerInput(e) { const t = e.event, r = this.camera.getEngine(); if (!this.touchEnabled && t.pointerType === "touch" || e.type !== si.POINTERMOVE && this.buttons.indexOf(t.button) === -1) return; const s = t.target; if (e.type === si.POINTERDOWN) { try { s == null || s.setPointerCapture(t.pointerId); } catch { } this._previousPosition = { x: t.clientX, y: t.clientY }, this.activeButton = t.button, this._noPreventDefault || (t.preventDefault(), this._element.focus()), r.isPointerLock && this._onMouseMove(e.event); } else if (e.type === si.POINTERUP) { try { s == null || s.releasePointerCapture(t.pointerId); } catch { } this.activeButton = -1, this._previousPosition = null, this._noPreventDefault || t.preventDefault(); } else if (e.type === si.POINTERMOVE) { if (!this._previousPosition) { r.isPointerLock && this._onMouseMove(e.event); return; } const n = t.clientX - this._previousPosition.x, a = t.clientY - this._previousPosition.y; this._rotateCamera(n, a), this._previousPosition = { x: t.clientX, y: t.clientY }, this._noPreventDefault || t.preventDefault(); } } // Track mouse movement, when pointer is locked. _onMouseMove(e) { if (!this.camera.getEngine().isPointerLock) return; const r = e.movementX, s = e.movementY; this._rotateCamera(r, s), this._previousPosition = null, this._noPreventDefault || e.preventDefault(); } /** * Rotate camera by mouse offset. * @param offsetX * @param offsetY */ _rotateCamera(e, t) { const i = this.camera, r = i._calculateHandednessMultiplier(); e *= r; const s = e / this.angularSensibility, n = t / this.angularSensibility, a = Ze.RotationYawPitchRoll(i.rotation.y, i.rotation.x, i.rotation.z); let l; if (this.buttonsPitch.some((o) => o === this.activeButton) && (l = Ze.RotationAxis(bl.X, n), a.multiplyInPlace(l)), this.buttonsYaw.some((o) => o === this.activeButton)) { l = Ze.RotationAxis(bl.Y, s), a.multiplyInPlace(l); const o = i.bankedTurnLimit + i._trackRoll; if (i.bankedTurn && -o < i.rotation.z && i.rotation.z < o) { const u = i.bankedTurnMultiplier * -s; l = Ze.RotationAxis(bl.Z, u), a.multiplyInPlace(l); } } this.buttonsRoll.some((o) => o === this.activeButton) && (l = Ze.RotationAxis(bl.Z, -s), i._trackRoll -= s, a.multiplyInPlace(l)), a.toEulerAnglesToRef(i.rotation); } } F([ W() ], XL.prototype, "buttons", void 0); F([ W() ], XL.prototype, "angularSensibility", void 0); Dd.FlyCameraMouseInput = XL; class rd { constructor() { this.keysHeightOffsetIncr = [38], this.keysHeightOffsetDecr = [40], this.keysHeightOffsetModifierAlt = !1, this.keysHeightOffsetModifierCtrl = !1, this.keysHeightOffsetModifierShift = !1, this.keysRotationOffsetIncr = [37], this.keysRotationOffsetDecr = [39], this.keysRotationOffsetModifierAlt = !1, this.keysRotationOffsetModifierCtrl = !1, this.keysRotationOffsetModifierShift = !1, this.keysRadiusIncr = [40], this.keysRadiusDecr = [38], this.keysRadiusModifierAlt = !0, this.keysRadiusModifierCtrl = !1, this.keysRadiusModifierShift = !1, this.heightSensibility = 1, this.rotationSensibility = 1, this.radiusSensibility = 1, this._keys = new Array(); } /** * Attach the input controls to a specific dom element to get the input from. * @param noPreventDefault Defines whether event caught by the controls should call preventdefault() (https://developer.mozilla.org/en-US/docs/Web/API/Event/preventDefault) */ attachControl(e) { e = Ve.BackCompatCameraNoPreventDefault(arguments), !this._onCanvasBlurObserver && (this._scene = this.camera.getScene(), this._engine = this._scene.getEngine(), this._onCanvasBlurObserver = this._engine.onCanvasBlurObservable.add(() => { this._keys.length = 0; }), this._onKeyboardObserver = this._scene.onKeyboardObservable.add((t) => { const i = t.event; if (!i.metaKey) { if (t.type === rx.KEYDOWN) this._ctrlPressed = i.ctrlKey, this._altPressed = i.altKey, this._shiftPressed = i.shiftKey, (this.keysHeightOffsetIncr.indexOf(i.keyCode) !== -1 || this.keysHeightOffsetDecr.indexOf(i.keyCode) !== -1 || this.keysRotationOffsetIncr.indexOf(i.keyCode) !== -1 || this.keysRotationOffsetDecr.indexOf(i.keyCode) !== -1 || this.keysRadiusIncr.indexOf(i.keyCode) !== -1 || this.keysRadiusDecr.indexOf(i.keyCode) !== -1) && (this._keys.indexOf(i.keyCode) === -1 && this._keys.push(i.keyCode), i.preventDefault && (e || i.preventDefault())); else if (this.keysHeightOffsetIncr.indexOf(i.keyCode) !== -1 || this.keysHeightOffsetDecr.indexOf(i.keyCode) !== -1 || this.keysRotationOffsetIncr.indexOf(i.keyCode) !== -1 || this.keysRotationOffsetDecr.indexOf(i.keyCode) !== -1 || this.keysRadiusIncr.indexOf(i.keyCode) !== -1 || this.keysRadiusDecr.indexOf(i.keyCode) !== -1) { const r = this._keys.indexOf(i.keyCode); r >= 0 && this._keys.splice(r, 1), i.preventDefault && (e || i.preventDefault()); } } })); } /** * Detach the current controls from the specified dom element. */ detachControl() { this._scene && (this._onKeyboardObserver && this._scene.onKeyboardObservable.remove(this._onKeyboardObserver), this._onCanvasBlurObserver && this._engine.onCanvasBlurObservable.remove(this._onCanvasBlurObserver), this._onKeyboardObserver = null, this._onCanvasBlurObserver = null), this._keys.length = 0; } /** * Update the current camera state depending on the inputs that have been used this frame. * This is a dynamically created lambda to avoid the performance penalty of looping for inputs in the render loop. */ checkInputs() { this._onKeyboardObserver && this._keys.forEach((e) => { this.keysHeightOffsetIncr.indexOf(e) !== -1 && this._modifierHeightOffset() ? this.camera.heightOffset += this.heightSensibility : this.keysHeightOffsetDecr.indexOf(e) !== -1 && this._modifierHeightOffset() ? this.camera.heightOffset -= this.heightSensibility : this.keysRotationOffsetIncr.indexOf(e) !== -1 && this._modifierRotationOffset() ? (this.camera.rotationOffset += this.rotationSensibility, this.camera.rotationOffset %= 360) : this.keysRotationOffsetDecr.indexOf(e) !== -1 && this._modifierRotationOffset() ? (this.camera.rotationOffset -= this.rotationSensibility, this.camera.rotationOffset %= 360) : this.keysRadiusIncr.indexOf(e) !== -1 && this._modifierRadius() ? this.camera.radius += this.radiusSensibility : this.keysRadiusDecr.indexOf(e) !== -1 && this._modifierRadius() && (this.camera.radius -= this.radiusSensibility); }); } /** * Gets the class name of the current input. * @returns the class name */ getClassName() { return "FollowCameraKeyboardMoveInput"; } /** * Get the friendly name associated with the input class. * @returns the input friendly name */ getSimpleName() { return "keyboard"; } /** * Check if the pressed modifier keys (Alt/Ctrl/Shift) match those configured to * allow modification of the heightOffset value. */ _modifierHeightOffset() { return this.keysHeightOffsetModifierAlt === this._altPressed && this.keysHeightOffsetModifierCtrl === this._ctrlPressed && this.keysHeightOffsetModifierShift === this._shiftPressed; } /** * Check if the pressed modifier keys (Alt/Ctrl/Shift) match those configured to * allow modification of the rotationOffset value. */ _modifierRotationOffset() { return this.keysRotationOffsetModifierAlt === this._altPressed && this.keysRotationOffsetModifierCtrl === this._ctrlPressed && this.keysRotationOffsetModifierShift === this._shiftPressed; } /** * Check if the pressed modifier keys (Alt/Ctrl/Shift) match those configured to * allow modification of the radius value. */ _modifierRadius() { return this.keysRadiusModifierAlt === this._altPressed && this.keysRadiusModifierCtrl === this._ctrlPressed && this.keysRadiusModifierShift === this._shiftPressed; } } F([ W() ], rd.prototype, "keysHeightOffsetIncr", void 0); F([ W() ], rd.prototype, "keysHeightOffsetDecr", void 0); F([ W() ], rd.prototype, "keysHeightOffsetModifierAlt", void 0); F([ W() ], rd.prototype, "keysHeightOffsetModifierCtrl", void 0); F([ W() ], rd.prototype, "keysHeightOffsetModifierShift", void 0); F([ W() ], rd.prototype, "keysRotationOffsetIncr", void 0); F([ W() ], rd.prototype, "keysRotationOffsetDecr", void 0); F([ W() ], rd.prototype, "keysRotationOffsetModifierAlt", void 0); F([ W() ], rd.prototype, "keysRotationOffsetModifierCtrl", void 0); F([ W() ], rd.prototype, "keysRotationOffsetModifierShift", void 0); F([ W() ], rd.prototype, "keysRadiusIncr", void 0); F([ W() ], rd.prototype, "keysRadiusDecr", void 0); F([ W() ], rd.prototype, "keysRadiusModifierAlt", void 0); F([ W() ], rd.prototype, "keysRadiusModifierCtrl", void 0); F([ W() ], rd.prototype, "keysRadiusModifierShift", void 0); F([ W() ], rd.prototype, "heightSensibility", void 0); F([ W() ], rd.prototype, "rotationSensibility", void 0); F([ W() ], rd.prototype, "radiusSensibility", void 0); Dd.FollowCameraKeyboardMoveInput = rd; class C5 { constructor() { this.axisControlRadius = !0, this.axisControlHeight = !1, this.axisControlRotation = !1, this.wheelPrecision = 3, this.wheelDeltaPercentage = 0; } /** * Attach the input controls to a specific dom element to get the input from. * @param noPreventDefault Defines whether event caught by the controls should call preventdefault() (https://developer.mozilla.org/en-US/docs/Web/API/Event/preventDefault) */ attachControl(e) { e = Ve.BackCompatCameraNoPreventDefault(arguments), this._wheel = (t) => { if (t.type !== si.POINTERWHEEL) return; const i = t.event; let r = 0; const s = Math.max(-1, Math.min(1, i.deltaY)); this.wheelDeltaPercentage ? (+this.axisControlRadius + +this.axisControlHeight + +this.axisControlRotation && Ce.Warn("wheelDeltaPercentage only usable when mouse wheel controls ONE axis. Currently enabled: axisControlRadius: " + this.axisControlRadius + ", axisControlHeightOffset: " + this.axisControlHeight + ", axisControlRotationOffset: " + this.axisControlRotation), this.axisControlRadius ? r = s * 0.01 * this.wheelDeltaPercentage * this.camera.radius : this.axisControlHeight ? r = s * 0.01 * this.wheelDeltaPercentage * this.camera.heightOffset : this.axisControlRotation && (r = s * 0.01 * this.wheelDeltaPercentage * this.camera.rotationOffset)) : r = s * this.wheelPrecision, r && (this.axisControlRadius ? this.camera.radius += r : this.axisControlHeight ? this.camera.heightOffset -= r : this.axisControlRotation && (this.camera.rotationOffset -= r)), i.preventDefault && (e || i.preventDefault()); }, this._observer = this.camera.getScene()._inputManager._addCameraPointerObserver(this._wheel, si.POINTERWHEEL); } /** * Detach the current controls from the specified dom element. */ detachControl() { this._observer && (this.camera.getScene()._inputManager._removeCameraPointerObserver(this._observer), this._observer = null, this._wheel = null); } /** * Gets the class name of the current input. * @returns the class name */ getClassName() { return "ArcRotateCameraMouseWheelInput"; } /** * Get the friendly name associated with the input class. * @returns the input friendly name */ getSimpleName() { return "mousewheel"; } } F([ W() ], C5.prototype, "axisControlRadius", void 0); F([ W() ], C5.prototype, "axisControlHeight", void 0); F([ W() ], C5.prototype, "axisControlRotation", void 0); F([ W() ], C5.prototype, "wheelPrecision", void 0); F([ W() ], C5.prototype, "wheelDeltaPercentage", void 0); Dd.FollowCameraMouseWheelInput = C5; class F1 extends qB { constructor() { super(...arguments), this.angularSensibilityX = 1, this.angularSensibilityY = 1, this.pinchPrecision = 1e4, this.pinchDeltaPercentage = 0, this.axisXControlRadius = !1, this.axisXControlHeight = !1, this.axisXControlRotation = !0, this.axisYControlRadius = !1, this.axisYControlHeight = !0, this.axisYControlRotation = !1, this.axisPinchControlRadius = !0, this.axisPinchControlHeight = !1, this.axisPinchControlRotation = !1, this.warningEnable = !0, this._warningCounter = 0; } /** * Gets the class name of the current input. * @returns the class name */ getClassName() { return "FollowCameraPointersInput"; } onTouch(e, t, i) { this._warning(), this.axisXControlRotation ? this.camera.rotationOffset += t / this.angularSensibilityX : this.axisYControlRotation && (this.camera.rotationOffset += i / this.angularSensibilityX), this.axisXControlHeight ? this.camera.heightOffset += t / this.angularSensibilityY : this.axisYControlHeight && (this.camera.heightOffset += i / this.angularSensibilityY), this.axisXControlRadius ? this.camera.radius -= t / this.angularSensibilityY : this.axisYControlRadius && (this.camera.radius -= i / this.angularSensibilityY); } onMultiTouch(e, t, i, r, s, n) { if (i === 0 && s === null || r === 0 && n === null) return; let a = (r - i) / (this.pinchPrecision * (this.angularSensibilityX + this.angularSensibilityY) / 2); this.pinchDeltaPercentage ? (a *= 0.01 * this.pinchDeltaPercentage, this.axisPinchControlRotation && (this.camera.rotationOffset += a * this.camera.rotationOffset), this.axisPinchControlHeight && (this.camera.heightOffset += a * this.camera.heightOffset), this.axisPinchControlRadius && (this.camera.radius -= a * this.camera.radius)) : (this.axisPinchControlRotation && (this.camera.rotationOffset += a), this.axisPinchControlHeight && (this.camera.heightOffset += a), this.axisPinchControlRadius && (this.camera.radius -= a)); } _warning() { if (!this.warningEnable || this._warningCounter++ % 100 !== 0) return; const e = "It probably only makes sense to control ONE camera property with each pointer axis. Set 'warningEnable = false' if you are sure. Currently enabled: "; +this.axisXControlRotation + +this.axisXControlHeight + +this.axisXControlRadius <= 1 && Ce.Warn(e + "axisXControlRotation: " + this.axisXControlRotation + ", axisXControlHeight: " + this.axisXControlHeight + ", axisXControlRadius: " + this.axisXControlRadius), +this.axisYControlRotation + +this.axisYControlHeight + +this.axisYControlRadius <= 1 && Ce.Warn(e + "axisYControlRotation: " + this.axisYControlRotation + ", axisYControlHeight: " + this.axisYControlHeight + ", axisYControlRadius: " + this.axisYControlRadius), +this.axisPinchControlRotation + +this.axisPinchControlHeight + +this.axisPinchControlRadius <= 1 && Ce.Warn(e + "axisPinchControlRotation: " + this.axisPinchControlRotation + ", axisPinchControlHeight: " + this.axisPinchControlHeight + ", axisPinchControlRadius: " + this.axisPinchControlRadius); } } F([ W() ], F1.prototype, "angularSensibilityX", void 0); F([ W() ], F1.prototype, "angularSensibilityY", void 0); F([ W() ], F1.prototype, "pinchPrecision", void 0); F([ W() ], F1.prototype, "pinchDeltaPercentage", void 0); F([ W() ], F1.prototype, "axisXControlRadius", void 0); F([ W() ], F1.prototype, "axisXControlHeight", void 0); F([ W() ], F1.prototype, "axisXControlRotation", void 0); F([ W() ], F1.prototype, "axisYControlRadius", void 0); F([ W() ], F1.prototype, "axisYControlHeight", void 0); F([ W() ], F1.prototype, "axisYControlRotation", void 0); F([ W() ], F1.prototype, "axisPinchControlRadius", void 0); F([ W() ], F1.prototype, "axisPinchControlHeight", void 0); F([ W() ], F1.prototype, "axisPinchControlRotation", void 0); Dd.FollowCameraPointersInput = F1; class Em { constructor() { this.keysUp = [38], this.keysUpward = [33], this.keysDown = [40], this.keysDownward = [34], this.keysLeft = [37], this.keysRight = [39], this.rotationSpeed = 0.5, this.keysRotateLeft = [], this.keysRotateRight = [], this.keysRotateUp = [], this.keysRotateDown = [], this._keys = new Array(); } /** * Attach the input controls to a specific dom element to get the input from. * @param noPreventDefault Defines whether event caught by the controls should call preventdefault() (https://developer.mozilla.org/en-US/docs/Web/API/Event/preventDefault) */ attachControl(e) { e = Ve.BackCompatCameraNoPreventDefault(arguments), !this._onCanvasBlurObserver && (this._scene = this.camera.getScene(), this._engine = this._scene.getEngine(), this._onCanvasBlurObserver = this._engine.onCanvasBlurObservable.add(() => { this._keys.length = 0; }), this._onKeyboardObserver = this._scene.onKeyboardObservable.add((t) => { const i = t.event; if (!i.metaKey) { if (t.type === rx.KEYDOWN) (this.keysUp.indexOf(i.keyCode) !== -1 || this.keysDown.indexOf(i.keyCode) !== -1 || this.keysLeft.indexOf(i.keyCode) !== -1 || this.keysRight.indexOf(i.keyCode) !== -1 || this.keysUpward.indexOf(i.keyCode) !== -1 || this.keysDownward.indexOf(i.keyCode) !== -1 || this.keysRotateLeft.indexOf(i.keyCode) !== -1 || this.keysRotateRight.indexOf(i.keyCode) !== -1 || this.keysRotateUp.indexOf(i.keyCode) !== -1 || this.keysRotateDown.indexOf(i.keyCode) !== -1) && (this._keys.indexOf(i.keyCode) === -1 && this._keys.push(i.keyCode), e || i.preventDefault()); else if (this.keysUp.indexOf(i.keyCode) !== -1 || this.keysDown.indexOf(i.keyCode) !== -1 || this.keysLeft.indexOf(i.keyCode) !== -1 || this.keysRight.indexOf(i.keyCode) !== -1 || this.keysUpward.indexOf(i.keyCode) !== -1 || this.keysDownward.indexOf(i.keyCode) !== -1 || this.keysRotateLeft.indexOf(i.keyCode) !== -1 || this.keysRotateRight.indexOf(i.keyCode) !== -1 || this.keysRotateUp.indexOf(i.keyCode) !== -1 || this.keysRotateDown.indexOf(i.keyCode) !== -1) { const r = this._keys.indexOf(i.keyCode); r >= 0 && this._keys.splice(r, 1), e || i.preventDefault(); } } })); } /** * Detach the current controls from the specified dom element. */ detachControl() { this._scene && (this._onKeyboardObserver && this._scene.onKeyboardObservable.remove(this._onKeyboardObserver), this._onCanvasBlurObserver && this._engine.onCanvasBlurObservable.remove(this._onCanvasBlurObserver), this._onKeyboardObserver = null, this._onCanvasBlurObserver = null), this._keys.length = 0; } /** * Update the current camera state depending on the inputs that have been used this frame. * This is a dynamically created lambda to avoid the performance penalty of looping for inputs in the render loop. */ checkInputs() { if (this._onKeyboardObserver) { const e = this.camera; for (let t = 0; t < this._keys.length; t++) { const i = this._keys[t], r = e._computeLocalCameraSpeed(); this.keysLeft.indexOf(i) !== -1 ? e._localDirection.copyFromFloats(-r, 0, 0) : this.keysUp.indexOf(i) !== -1 ? e._localDirection.copyFromFloats(0, 0, r) : this.keysRight.indexOf(i) !== -1 ? e._localDirection.copyFromFloats(r, 0, 0) : this.keysDown.indexOf(i) !== -1 ? e._localDirection.copyFromFloats(0, 0, -r) : this.keysUpward.indexOf(i) !== -1 ? e._localDirection.copyFromFloats(0, r, 0) : this.keysDownward.indexOf(i) !== -1 ? e._localDirection.copyFromFloats(0, -r, 0) : this.keysRotateLeft.indexOf(i) !== -1 ? (e._localDirection.copyFromFloats(0, 0, 0), e.cameraRotation.y -= this._getLocalRotation()) : this.keysRotateRight.indexOf(i) !== -1 ? (e._localDirection.copyFromFloats(0, 0, 0), e.cameraRotation.y += this._getLocalRotation()) : this.keysRotateUp.indexOf(i) !== -1 ? (e._localDirection.copyFromFloats(0, 0, 0), e.cameraRotation.x -= this._getLocalRotation()) : this.keysRotateDown.indexOf(i) !== -1 && (e._localDirection.copyFromFloats(0, 0, 0), e.cameraRotation.x += this._getLocalRotation()), e.getScene().useRightHandedSystem && (e._localDirection.z *= -1), e.getViewMatrix().invertToRef(e._cameraTransformMatrix), D.TransformNormalToRef(e._localDirection, e._cameraTransformMatrix, e._transformedDirection), e.cameraDirection.addInPlace(e._transformedDirection); } } } /** * Gets the class name of the current input. * @returns the class name */ getClassName() { return "FreeCameraKeyboardMoveInput"; } /** @internal */ _onLostFocus() { this._keys.length = 0; } /** * Get the friendly name associated with the input class. * @returns the input friendly name */ getSimpleName() { return "keyboard"; } _getLocalRotation() { const e = this.camera._calculateHandednessMultiplier(); return this.rotationSpeed * this._engine.getDeltaTime() / 1e3 * e; } } F([ W() ], Em.prototype, "keysUp", void 0); F([ W() ], Em.prototype, "keysUpward", void 0); F([ W() ], Em.prototype, "keysDown", void 0); F([ W() ], Em.prototype, "keysDownward", void 0); F([ W() ], Em.prototype, "keysLeft", void 0); F([ W() ], Em.prototype, "keysRight", void 0); F([ W() ], Em.prototype, "rotationSpeed", void 0); F([ W() ], Em.prototype, "keysRotateLeft", void 0); F([ W() ], Em.prototype, "keysRotateRight", void 0); F([ W() ], Em.prototype, "keysRotateUp", void 0); F([ W() ], Em.prototype, "keysRotateDown", void 0); Dd.FreeCameraKeyboardMoveInput = Em; class YL { /** * Manage the mouse inputs to control the movement of a free camera. * @see https://doc.babylonjs.com/features/featuresDeepDive/cameras/customizingCameraInputs * @param touchEnabled Defines if touch is enabled or not */ constructor(e = !0) { this.touchEnabled = e, this.buttons = [0, 1, 2], this.angularSensibility = 2e3, this._previousPosition = null, this.onPointerMovedObservable = new Fe(), this._allowCameraRotation = !0, this._currentActiveButton = -1, this._activePointerId = -1; } /** * Attach the input controls to a specific dom element to get the input from. * @param noPreventDefault Defines whether event caught by the controls should call preventdefault() (https://developer.mozilla.org/en-US/docs/Web/API/Event/preventDefault) */ attachControl(e) { e = Ve.BackCompatCameraNoPreventDefault(arguments); const t = this.camera.getEngine(), i = t.getInputElement(); this._pointerInput || (this._pointerInput = (r) => { const s = r.event, n = s.pointerType === "touch"; if (!this.touchEnabled && n || r.type !== si.POINTERMOVE && this.buttons.indexOf(s.button) === -1) return; const a = s.target; if (r.type === si.POINTERDOWN) { if (n && this._activePointerId !== -1 || !n && this._currentActiveButton !== -1) return; this._activePointerId = s.pointerId; try { a == null || a.setPointerCapture(s.pointerId); } catch { } this._currentActiveButton === -1 && (this._currentActiveButton = s.button), this._previousPosition = { x: s.clientX, y: s.clientY }, e || (s.preventDefault(), i && i.focus()), t.isPointerLock && this._onMouseMove && this._onMouseMove(r.event); } else if (r.type === si.POINTERUP) { if (n && this._activePointerId !== s.pointerId || !n && this._currentActiveButton !== s.button) return; try { a == null || a.releasePointerCapture(s.pointerId); } catch { } this._currentActiveButton = -1, this._previousPosition = null, e || s.preventDefault(), this._activePointerId = -1; } else if (r.type === si.POINTERMOVE && (this._activePointerId === s.pointerId || !n)) { if (t.isPointerLock && this._onMouseMove) this._onMouseMove(r.event); else if (this._previousPosition) { const l = this.camera._calculateHandednessMultiplier(), o = (s.clientX - this._previousPosition.x) * l, u = s.clientY - this._previousPosition.y; this._allowCameraRotation && (this.camera.cameraRotation.y += o / this.angularSensibility, this.camera.cameraRotation.x += u / this.angularSensibility), this.onPointerMovedObservable.notifyObservers({ offsetX: o, offsetY: u }), this._previousPosition = { x: s.clientX, y: s.clientY }, e || s.preventDefault(); } } }), this._onMouseMove = (r) => { if (!t.isPointerLock) return; const s = this.camera._calculateHandednessMultiplier(), n = r.movementX * s; this.camera.cameraRotation.y += n / this.angularSensibility; const a = r.movementY; this.camera.cameraRotation.x += a / this.angularSensibility, this._previousPosition = null, e || r.preventDefault(); }, this._observer = this.camera.getScene()._inputManager._addCameraPointerObserver(this._pointerInput, si.POINTERDOWN | si.POINTERUP | si.POINTERMOVE), i && (this._contextMenuBind = (r) => this.onContextMenu(r), i.addEventListener("contextmenu", this._contextMenuBind, !1)); } /** * Called on JS contextmenu event. * Override this method to provide functionality. * @param evt */ onContextMenu(e) { e.preventDefault(); } /** * Detach the current controls from the specified dom element. */ detachControl() { if (this._observer) { if (this.camera.getScene()._inputManager._removeCameraPointerObserver(this._observer), this._contextMenuBind) { const t = this.camera.getEngine().getInputElement(); t && t.removeEventListener("contextmenu", this._contextMenuBind); } this.onPointerMovedObservable && this.onPointerMovedObservable.clear(), this._observer = null, this._onMouseMove = null, this._previousPosition = null; } this._activePointerId = -1, this._currentActiveButton = -1; } /** * Gets the class name of the current input. * @returns the class name */ getClassName() { return "FreeCameraMouseInput"; } /** * Get the friendly name associated with the input class. * @returns the input friendly name */ getSimpleName() { return "mouse"; } } F([ W() ], YL.prototype, "buttons", void 0); F([ W() ], YL.prototype, "angularSensibility", void 0); Dd.FreeCameraMouseInput = YL; var ja; (function(c) { c[c.MoveRelative = 0] = "MoveRelative", c[c.RotateRelative = 1] = "RotateRelative", c[c.MoveScene = 2] = "MoveScene"; })(ja || (ja = {})); class G4 extends KL { constructor() { super(...arguments), this._moveRelative = D.Zero(), this._rotateRelative = D.Zero(), this._moveScene = D.Zero(), this._wheelXAction = ja.MoveRelative, this._wheelXActionCoordinate = W8.X, this._wheelYAction = ja.MoveRelative, this._wheelYActionCoordinate = W8.Z, this._wheelZAction = null, this._wheelZActionCoordinate = null; } /** * Gets the class name of the current input. * @returns the class name */ getClassName() { return "FreeCameraMouseWheelInput"; } /** * Set which movement axis (relative to camera's orientation) the mouse * wheel's X axis controls. * @param axis The axis to be moved. Set null to clear. */ set wheelXMoveRelative(e) { e === null && this._wheelXAction !== ja.MoveRelative || (this._wheelXAction = ja.MoveRelative, this._wheelXActionCoordinate = e); } /** * Get the configured movement axis (relative to camera's orientation) the * mouse wheel's X axis controls. * @returns The configured axis or null if none. */ get wheelXMoveRelative() { return this._wheelXAction !== ja.MoveRelative ? null : this._wheelXActionCoordinate; } /** * Set which movement axis (relative to camera's orientation) the mouse * wheel's Y axis controls. * @param axis The axis to be moved. Set null to clear. */ set wheelYMoveRelative(e) { e === null && this._wheelYAction !== ja.MoveRelative || (this._wheelYAction = ja.MoveRelative, this._wheelYActionCoordinate = e); } /** * Get the configured movement axis (relative to camera's orientation) the * mouse wheel's Y axis controls. * @returns The configured axis or null if none. */ get wheelYMoveRelative() { return this._wheelYAction !== ja.MoveRelative ? null : this._wheelYActionCoordinate; } /** * Set which movement axis (relative to camera's orientation) the mouse * wheel's Z axis controls. * @param axis The axis to be moved. Set null to clear. */ set wheelZMoveRelative(e) { e === null && this._wheelZAction !== ja.MoveRelative || (this._wheelZAction = ja.MoveRelative, this._wheelZActionCoordinate = e); } /** * Get the configured movement axis (relative to camera's orientation) the * mouse wheel's Z axis controls. * @returns The configured axis or null if none. */ get wheelZMoveRelative() { return this._wheelZAction !== ja.MoveRelative ? null : this._wheelZActionCoordinate; } /** * Set which rotation axis (relative to camera's orientation) the mouse * wheel's X axis controls. * @param axis The axis to be moved. Set null to clear. */ set wheelXRotateRelative(e) { e === null && this._wheelXAction !== ja.RotateRelative || (this._wheelXAction = ja.RotateRelative, this._wheelXActionCoordinate = e); } /** * Get the configured rotation axis (relative to camera's orientation) the * mouse wheel's X axis controls. * @returns The configured axis or null if none. */ get wheelXRotateRelative() { return this._wheelXAction !== ja.RotateRelative ? null : this._wheelXActionCoordinate; } /** * Set which rotation axis (relative to camera's orientation) the mouse * wheel's Y axis controls. * @param axis The axis to be moved. Set null to clear. */ set wheelYRotateRelative(e) { e === null && this._wheelYAction !== ja.RotateRelative || (this._wheelYAction = ja.RotateRelative, this._wheelYActionCoordinate = e); } /** * Get the configured rotation axis (relative to camera's orientation) the * mouse wheel's Y axis controls. * @returns The configured axis or null if none. */ get wheelYRotateRelative() { return this._wheelYAction !== ja.RotateRelative ? null : this._wheelYActionCoordinate; } /** * Set which rotation axis (relative to camera's orientation) the mouse * wheel's Z axis controls. * @param axis The axis to be moved. Set null to clear. */ set wheelZRotateRelative(e) { e === null && this._wheelZAction !== ja.RotateRelative || (this._wheelZAction = ja.RotateRelative, this._wheelZActionCoordinate = e); } /** * Get the configured rotation axis (relative to camera's orientation) the * mouse wheel's Z axis controls. * @returns The configured axis or null if none. */ get wheelZRotateRelative() { return this._wheelZAction !== ja.RotateRelative ? null : this._wheelZActionCoordinate; } /** * Set which movement axis (relative to the scene) the mouse wheel's X axis * controls. * @param axis The axis to be moved. Set null to clear. */ set wheelXMoveScene(e) { e === null && this._wheelXAction !== ja.MoveScene || (this._wheelXAction = ja.MoveScene, this._wheelXActionCoordinate = e); } /** * Get the configured movement axis (relative to the scene) the mouse wheel's * X axis controls. * @returns The configured axis or null if none. */ get wheelXMoveScene() { return this._wheelXAction !== ja.MoveScene ? null : this._wheelXActionCoordinate; } /** * Set which movement axis (relative to the scene) the mouse wheel's Y axis * controls. * @param axis The axis to be moved. Set null to clear. */ set wheelYMoveScene(e) { e === null && this._wheelYAction !== ja.MoveScene || (this._wheelYAction = ja.MoveScene, this._wheelYActionCoordinate = e); } /** * Get the configured movement axis (relative to the scene) the mouse wheel's * Y axis controls. * @returns The configured axis or null if none. */ get wheelYMoveScene() { return this._wheelYAction !== ja.MoveScene ? null : this._wheelYActionCoordinate; } /** * Set which movement axis (relative to the scene) the mouse wheel's Z axis * controls. * @param axis The axis to be moved. Set null to clear. */ set wheelZMoveScene(e) { e === null && this._wheelZAction !== ja.MoveScene || (this._wheelZAction = ja.MoveScene, this._wheelZActionCoordinate = e); } /** * Get the configured movement axis (relative to the scene) the mouse wheel's * Z axis controls. * @returns The configured axis or null if none. */ get wheelZMoveScene() { return this._wheelZAction !== ja.MoveScene ? null : this._wheelZActionCoordinate; } /** * Called for each rendered frame. */ checkInputs() { if (this._wheelDeltaX === 0 && this._wheelDeltaY === 0 && this._wheelDeltaZ == 0) return; this._moveRelative.setAll(0), this._rotateRelative.setAll(0), this._moveScene.setAll(0), this._updateCamera(), this.camera.getScene().useRightHandedSystem && (this._moveRelative.z *= -1); const e = Ae.Zero(); this.camera.getViewMatrix().invertToRef(e); const t = D.Zero(); D.TransformNormalToRef(this._moveRelative, e, t), this.camera.cameraRotation.x += this._rotateRelative.x / 200, this.camera.cameraRotation.y += this._rotateRelative.y / 200, this.camera.cameraDirection.addInPlace(t), this.camera.cameraDirection.addInPlace(this._moveScene), super.checkInputs(); } /** * Update the camera according to any configured properties for the 3 * mouse-wheel axis. */ _updateCamera() { this._updateCameraProperty(this._wheelDeltaX, this._wheelXAction, this._wheelXActionCoordinate), this._updateCameraProperty(this._wheelDeltaY, this._wheelYAction, this._wheelYActionCoordinate), this._updateCameraProperty(this._wheelDeltaZ, this._wheelZAction, this._wheelZActionCoordinate); } /** * Update one property of the camera. * @param value * @param cameraProperty * @param coordinate */ _updateCameraProperty(e, t, i) { if (e === 0 || t === null || i === null) return; let r = null; switch (t) { case ja.MoveRelative: r = this._moveRelative; break; case ja.RotateRelative: r = this._rotateRelative; break; case ja.MoveScene: r = this._moveScene; break; } switch (i) { case W8.X: r.set(e, 0, 0); break; case W8.Y: r.set(0, e, 0); break; case W8.Z: r.set(0, 0, e); break; } } } F([ W() ], G4.prototype, "wheelXMoveRelative", null); F([ W() ], G4.prototype, "wheelYMoveRelative", null); F([ W() ], G4.prototype, "wheelZMoveRelative", null); F([ W() ], G4.prototype, "wheelXRotateRelative", null); F([ W() ], G4.prototype, "wheelYRotateRelative", null); F([ W() ], G4.prototype, "wheelZRotateRelative", null); F([ W() ], G4.prototype, "wheelXMoveScene", null); F([ W() ], G4.prototype, "wheelYMoveScene", null); F([ W() ], G4.prototype, "wheelZMoveScene", null); Dd.FreeCameraMouseWheelInput = G4; class QL { /** * Manage the touch inputs to control the movement of a free camera. * @see https://doc.babylonjs.com/features/featuresDeepDive/cameras/customizingCameraInputs * @param allowMouse Defines if mouse events can be treated as touch events */ constructor(e = !1) { this.allowMouse = e, this.touchAngularSensibility = 2e5, this.touchMoveSensibility = 250, this.singleFingerRotate = !1, this._offsetX = null, this._offsetY = null, this._pointerPressed = new Array(), this._isSafari = Ve.IsSafari(); } /** * Attach the input controls to a specific dom element to get the input from. * @param noPreventDefault Defines whether event caught by the controls should call preventdefault() (https://developer.mozilla.org/en-US/docs/Web/API/Event/preventDefault) */ attachControl(e) { e = Ve.BackCompatCameraNoPreventDefault(arguments); let t = null; if (this._pointerInput === void 0 && (this._onLostFocus = () => { this._offsetX = null, this._offsetY = null; }, this._pointerInput = (i) => { const r = i.event, s = r.pointerType === "mouse" || this._isSafari && typeof r.pointerType > "u"; if (!(!this.allowMouse && s)) { if (i.type === si.POINTERDOWN) { if (e || r.preventDefault(), this._pointerPressed.push(r.pointerId), this._pointerPressed.length !== 1) return; t = { x: r.clientX, y: r.clientY }; } else if (i.type === si.POINTERUP) { e || r.preventDefault(); const n = this._pointerPressed.indexOf(r.pointerId); if (n === -1 || (this._pointerPressed.splice(n, 1), n != 0)) return; t = null, this._offsetX = null, this._offsetY = null; } else if (i.type === si.POINTERMOVE) { if (e || r.preventDefault(), !t || this._pointerPressed.indexOf(r.pointerId) != 0) return; this._offsetX = r.clientX - t.x, this._offsetY = -(r.clientY - t.y); } } }), this._observer = this.camera.getScene()._inputManager._addCameraPointerObserver(this._pointerInput, si.POINTERDOWN | si.POINTERUP | si.POINTERMOVE), this._onLostFocus) { const r = this.camera.getEngine().getInputElement(); r && r.addEventListener("blur", this._onLostFocus); } } /** * Detach the current controls from the specified dom element. */ detachControl() { if (this._pointerInput) { if (this._observer && (this.camera.getScene()._inputManager._removeCameraPointerObserver(this._observer), this._observer = null), this._onLostFocus) { const t = this.camera.getEngine().getInputElement(); t && t.removeEventListener("blur", this._onLostFocus), this._onLostFocus = null; } this._pointerPressed.length = 0, this._offsetX = null, this._offsetY = null; } } /** * Update the current camera state depending on the inputs that have been used this frame. * This is a dynamically created lambda to avoid the performance penalty of looping for inputs in the render loop. */ checkInputs() { if (this._offsetX === null || this._offsetY === null || this._offsetX === 0 && this._offsetY === 0) return; const e = this.camera, t = e._calculateHandednessMultiplier(); if (e.cameraRotation.y = t * this._offsetX / this.touchAngularSensibility, this.singleFingerRotate && this._pointerPressed.length === 1 || !this.singleFingerRotate && this._pointerPressed.length > 1) e.cameraRotation.x = -this._offsetY / this.touchAngularSensibility; else { const r = e._computeLocalCameraSpeed(), s = new D(0, 0, this.touchMoveSensibility !== 0 ? r * this._offsetY / this.touchMoveSensibility : 0); Ae.RotationYawPitchRollToRef(e.rotation.y, e.rotation.x, 0, e._cameraRotationMatrix), e.cameraDirection.addInPlace(D.TransformCoordinates(s, e._cameraRotationMatrix)); } } /** * Gets the class name of the current input. * @returns the class name */ getClassName() { return "FreeCameraTouchInput"; } /** * Get the friendly name associated with the input class. * @returns the input friendly name */ getSimpleName() { return "touch"; } } F([ W() ], QL.prototype, "touchAngularSensibility", void 0); F([ W() ], QL.prototype, "touchMoveSensibility", void 0); Dd.FreeCameraTouchInput = QL; class $L extends WL { /** * Instantiates a new FreeCameraInputsManager. * @param camera Defines the camera the inputs belong to */ constructor(e) { super(e), this._mouseInput = null, this._mouseWheelInput = null; } /** * Add keyboard input support to the input manager. * @returns the current input manager */ addKeyboard() { return this.add(new Em()), this; } /** * Add mouse input support to the input manager. * @param touchEnabled if the FreeCameraMouseInput should support touch (default: true) * @returns the current input manager */ addMouse(e = !0) { return this._mouseInput || (this._mouseInput = new YL(e), this.add(this._mouseInput)), this; } /** * Removes the mouse input support from the manager * @returns the current input manager */ removeMouse() { return this._mouseInput && this.remove(this._mouseInput), this; } /** * Add mouse wheel input support to the input manager. * @returns the current input manager */ addMouseWheel() { return this._mouseWheelInput || (this._mouseWheelInput = new G4(), this.add(this._mouseWheelInput)), this; } /** * Removes the mouse wheel input support from the manager * @returns the current input manager */ removeMouseWheel() { return this._mouseWheelInput && this.remove(this._mouseWheelInput), this; } /** * Add touch input support to the input manager. * @returns the current input manager */ addTouch() { return this.add(new QL()), this; } /** * Remove all attached input methods from a camera */ clear() { super.clear(), this._mouseInput = null; } } $L.prototype.addDeviceOrientation = function(c) { return this._deviceOrientationInput || (this._deviceOrientationInput = new RK(), c && (this._deviceOrientationInput.smoothFactor = c), this.add(this._deviceOrientationInput)), this; }; class RK { /** * Can be used to detect if a device orientation sensor is available on a device * @param timeout amount of time in milliseconds to wait for a response from the sensor (default: infinite) * @returns a promise that will resolve on orientation change */ static WaitForOrientationChangeAsync(e) { return new Promise((t, i) => { let r = !1; const s = () => { window.removeEventListener("deviceorientation", s), r = !0, t(); }; e && setTimeout(() => { r || (window.removeEventListener("deviceorientation", s), i("WaitForOrientationChangeAsync timed out")); }, e), typeof DeviceOrientationEvent < "u" && typeof DeviceOrientationEvent.requestPermission == "function" ? DeviceOrientationEvent.requestPermission().then((n) => { n == "granted" ? window.addEventListener("deviceorientation", s) : Ve.Warn("Permission not granted."); }).catch((n) => { Ve.Error(n); }) : window.addEventListener("deviceorientation", s); }); } /** * Instantiates a new input * @see https://doc.babylonjs.com/features/featuresDeepDive/cameras/customizingCameraInputs */ constructor() { this._screenOrientationAngle = 0, this._screenQuaternion = new Ze(), this._alpha = 0, this._beta = 0, this._gamma = 0, this.smoothFactor = 0, this._onDeviceOrientationChangedObservable = new Fe(), this._orientationChanged = () => { this._screenOrientationAngle = window.orientation !== void 0 ? +window.orientation : window.screen.orientation && window.screen.orientation.angle ? window.screen.orientation.angle : 0, this._screenOrientationAngle = -Ve.ToRadians(this._screenOrientationAngle / 2), this._screenQuaternion.copyFromFloats(0, Math.sin(this._screenOrientationAngle), 0, Math.cos(this._screenOrientationAngle)); }, this._deviceOrientation = (e) => { this.smoothFactor ? (this._alpha = e.alpha !== null ? Ve.SmoothAngleChange(this._alpha, e.alpha, this.smoothFactor) : 0, this._beta = e.beta !== null ? Ve.SmoothAngleChange(this._beta, e.beta, this.smoothFactor) : 0, this._gamma = e.gamma !== null ? Ve.SmoothAngleChange(this._gamma, e.gamma, this.smoothFactor) : 0) : (this._alpha = e.alpha !== null ? e.alpha : 0, this._beta = e.beta !== null ? e.beta : 0, this._gamma = e.gamma !== null ? e.gamma : 0), e.alpha !== null && this._onDeviceOrientationChangedObservable.notifyObservers(); }, this._constantTranform = new Ze(-Math.sqrt(0.5), 0, 0, Math.sqrt(0.5)), this._orientationChanged(); } /** * Define the camera controlled by the input. */ get camera() { return this._camera; } set camera(e) { this._camera = e, this._camera != null && !this._camera.rotationQuaternion && (this._camera.rotationQuaternion = new Ze()), this._camera && this._camera.onDisposeObservable.add(() => { this._onDeviceOrientationChangedObservable.clear(); }); } /** * Attach the input controls to a specific dom element to get the input from. */ attachControl() { const e = this.camera.getScene().getEngine().getHostWindow(); if (e) { const t = () => { e.addEventListener("orientationchange", this._orientationChanged), e.addEventListener("deviceorientation", this._deviceOrientation), this._orientationChanged(); }; typeof DeviceOrientationEvent < "u" && typeof DeviceOrientationEvent.requestPermission == "function" ? DeviceOrientationEvent.requestPermission().then((i) => { i === "granted" ? t() : Ve.Warn("Permission not granted."); }).catch((i) => { Ve.Error(i); }) : t(); } } /** * Detach the current controls from the specified dom element. */ detachControl() { window.removeEventListener("orientationchange", this._orientationChanged), window.removeEventListener("deviceorientation", this._deviceOrientation), this._alpha = 0; } /** * Update the current camera state depending on the inputs that have been used this frame. * This is a dynamically created lambda to avoid the performance penalty of looping for inputs in the render loop. */ checkInputs() { this._alpha && (Ze.RotationYawPitchRollToRef(Ve.ToRadians(this._alpha), Ve.ToRadians(this._beta), -Ve.ToRadians(this._gamma), this.camera.rotationQuaternion), this._camera.rotationQuaternion.multiplyInPlace(this._screenQuaternion), this._camera.rotationQuaternion.multiplyInPlace(this._constantTranform), this._camera.rotationQuaternion.z *= -1, this._camera.rotationQuaternion.w *= -1); } /** * Gets the class name of the current input. * @returns the class name */ getClassName() { return "FreeCameraDeviceOrientationInput"; } /** * Get the friendly name associated with the input class. * @returns the input friendly name */ getSimpleName() { return "deviceOrientation"; } } Dd.FreeCameraDeviceOrientationInput = RK; class ZL { constructor() { this.gamepadAngularSensibility = 200, this.gamepadMoveSensibility = 40, this.deadzoneDelta = 0.1, this._yAxisScale = 1, this._cameraTransform = Ae.Identity(), this._deltaTransform = D.Zero(), this._vector3 = D.Zero(), this._vector2 = at.Zero(); } /** * Gets or sets a boolean indicating that Yaxis (for right stick) should be inverted */ get invertYAxis() { return this._yAxisScale !== 1; } set invertYAxis(e) { this._yAxisScale = e ? -1 : 1; } /** * Attach the input controls to a specific dom element to get the input from. */ attachControl() { const e = this.camera.getScene().gamepadManager; this._onGamepadConnectedObserver = e.onGamepadConnectedObservable.add((t) => { t.type !== zu.POSE_ENABLED && (!this.gamepad || t.type === zu.XBOX) && (this.gamepad = t); }), this._onGamepadDisconnectedObserver = e.onGamepadDisconnectedObservable.add((t) => { this.gamepad === t && (this.gamepad = null); }), this.gamepad = e.getGamepadByType(zu.XBOX), !this.gamepad && e.gamepads.length && (this.gamepad = e.gamepads[0]); } /** * Detach the current controls from the specified dom element. */ detachControl() { this.camera.getScene().gamepadManager.onGamepadConnectedObservable.remove(this._onGamepadConnectedObserver), this.camera.getScene().gamepadManager.onGamepadDisconnectedObservable.remove(this._onGamepadDisconnectedObserver), this.gamepad = null; } /** * Update the current camera state depending on the inputs that have been used this frame. * This is a dynamically created lambda to avoid the performance penalty of looping for inputs in the render loop. */ checkInputs() { if (this.gamepad && this.gamepad.leftStick) { const e = this.camera, t = this.gamepad.leftStick; this.gamepadMoveSensibility !== 0 && (t.x = Math.abs(t.x) > this.deadzoneDelta ? t.x / this.gamepadMoveSensibility : 0, t.y = Math.abs(t.y) > this.deadzoneDelta ? t.y / this.gamepadMoveSensibility : 0); let i = this.gamepad.rightStick; i && this.gamepadAngularSensibility !== 0 ? (i.x = Math.abs(i.x) > this.deadzoneDelta ? i.x / this.gamepadAngularSensibility : 0, i.y = (Math.abs(i.y) > this.deadzoneDelta ? i.y / this.gamepadAngularSensibility : 0) * this._yAxisScale) : i = { x: 0, y: 0 }, e.rotationQuaternion ? e.rotationQuaternion.toRotationMatrix(this._cameraTransform) : Ae.RotationYawPitchRollToRef(e.rotation.y, e.rotation.x, 0, this._cameraTransform); const r = e._computeLocalCameraSpeed() * 50; this._vector3.copyFromFloats(t.x * r, 0, -t.y * r), D.TransformCoordinatesToRef(this._vector3, this._cameraTransform, this._deltaTransform), e.cameraDirection.addInPlace(this._deltaTransform), this._vector2.copyFromFloats(i.y, i.x), e.cameraRotation.addInPlace(this._vector2); } } /** * Gets the class name of the current input. * @returns the class name */ getClassName() { return "FreeCameraGamepadInput"; } /** * Get the friendly name associated with the input class. * @returns the input friendly name */ getSimpleName() { return "gamepad"; } } F([ W() ], ZL.prototype, "gamepadAngularSensibility", void 0); F([ W() ], ZL.prototype, "gamepadMoveSensibility", void 0); Dd.FreeCameraGamepadInput = ZL; var Lu; (function(c) { c[c.X = 0] = "X", c[c.Y = 1] = "Y", c[c.Z = 2] = "Z"; })(Lu || (Lu = {})); class Ji { static _GetDefaultOptions() { return { puckSize: 40, containerSize: 60, color: "cyan", puckImage: void 0, containerImage: void 0, position: void 0, alwaysVisible: !1, limitToContainer: !1 }; } /** * Creates a new virtual joystick * @param leftJoystick defines that the joystick is for left hand (false by default) * @param customizations Defines the options we want to customize the VirtualJoystick */ constructor(e, t) { this._released = !1; const i = Object.assign(Object.assign({}, Ji._GetDefaultOptions()), t); if (e ? this._leftJoystick = !0 : this._leftJoystick = !1, Ji._GlobalJoystickIndex++, this._axisTargetedByLeftAndRight = Lu.X, this._axisTargetedByUpAndDown = Lu.Y, this.reverseLeftRight = !1, this.reverseUpDown = !1, this._touches = new iB(), this.deltaPosition = D.Zero(), this._joystickSensibility = 25, this._inversedSensibility = 1 / (this._joystickSensibility / 1e3), this._onResize = () => { Ji._VJCanvasWidth = window.innerWidth, Ji._VJCanvasHeight = window.innerHeight, Ji.Canvas && (Ji.Canvas.width = Ji._VJCanvasWidth, Ji.Canvas.height = Ji._VJCanvasHeight), Ji._HalfWidth = Ji._VJCanvasWidth / 2; }, !Ji.Canvas) { window.addEventListener("resize", this._onResize, !1), Ji.Canvas = document.createElement("canvas"), Ji._VJCanvasWidth = window.innerWidth, Ji._VJCanvasHeight = window.innerHeight, Ji.Canvas.width = window.innerWidth, Ji.Canvas.height = window.innerHeight, Ji.Canvas.style.width = "100%", Ji.Canvas.style.height = "100%", Ji.Canvas.style.position = "absolute", Ji.Canvas.style.backgroundColor = "transparent", Ji.Canvas.style.top = "0px", Ji.Canvas.style.left = "0px", Ji.Canvas.style.zIndex = "5", Ji.Canvas.style.touchAction = "none", Ji.Canvas.setAttribute("touch-action", "none"); const r = Ji.Canvas.getContext("2d"); if (!r) throw new Error("Unable to create canvas for virtual joystick"); Ji._VJCanvasContext = r, Ji._VJCanvasContext.strokeStyle = "#ffffff", Ji._VJCanvasContext.lineWidth = 2, document.body.appendChild(Ji.Canvas); } Ji._HalfWidth = Ji.Canvas.width / 2, this.pressed = !1, this.limitToContainer = i.limitToContainer, this._joystickColor = i.color, this.containerSize = i.containerSize, this.puckSize = i.puckSize, i.position && this.setPosition(i.position.x, i.position.y), i.puckImage && this.setPuckImage(i.puckImage), i.containerImage && this.setContainerImage(i.containerImage), i.alwaysVisible && Ji._AlwaysVisibleSticks++, this.alwaysVisible = i.alwaysVisible, this._joystickPointerId = -1, this._joystickPointerPos = new at(0, 0), this._joystickPreviousPointerPos = new at(0, 0), this._joystickPointerStartPos = new at(0, 0), this._deltaJoystickVector = new at(0, 0), this._onPointerDownHandlerRef = (r) => { this._onPointerDown(r); }, this._onPointerMoveHandlerRef = (r) => { this._onPointerMove(r); }, this._onPointerUpHandlerRef = (r) => { this._onPointerUp(r); }, Ji.Canvas.addEventListener("pointerdown", this._onPointerDownHandlerRef, !1), Ji.Canvas.addEventListener("pointermove", this._onPointerMoveHandlerRef, !1), Ji.Canvas.addEventListener("pointerup", this._onPointerUpHandlerRef, !1), Ji.Canvas.addEventListener("pointerout", this._onPointerUpHandlerRef, !1), Ji.Canvas.addEventListener("contextmenu", (r) => { r.preventDefault(); }, !1), requestAnimationFrame(() => { this._drawVirtualJoystick(); }); } /** * Defines joystick sensibility (ie. the ratio between a physical move and virtual joystick position change) * @param newJoystickSensibility defines the new sensibility */ setJoystickSensibility(e) { this._joystickSensibility = e, this._inversedSensibility = 1 / (this._joystickSensibility / 1e3); } _onPointerDown(e) { let t; e.preventDefault(), this._leftJoystick === !0 ? t = e.clientX < Ji._HalfWidth : t = e.clientX > Ji._HalfWidth, t && this._joystickPointerId < 0 ? (this._joystickPointerId = e.pointerId, this._joystickPosition ? (this._joystickPointerStartPos = this._joystickPosition.clone(), this._joystickPointerPos = this._joystickPosition.clone(), this._joystickPreviousPointerPos = this._joystickPosition.clone(), this._onPointerMove(e)) : (this._joystickPointerStartPos.x = e.clientX, this._joystickPointerStartPos.y = e.clientY, this._joystickPointerPos = this._joystickPointerStartPos.clone(), this._joystickPreviousPointerPos = this._joystickPointerStartPos.clone()), this._deltaJoystickVector.x = 0, this._deltaJoystickVector.y = 0, this.pressed = !0, this._touches.add(e.pointerId.toString(), e)) : Ji._GlobalJoystickIndex < 2 && this._action && (this._action(), this._touches.add(e.pointerId.toString(), { x: e.clientX, y: e.clientY, prevX: e.clientX, prevY: e.clientY })); } _onPointerMove(e) { if (this._joystickPointerId == e.pointerId) { if (this.limitToContainer) { const n = new at(e.clientX - this._joystickPointerStartPos.x, e.clientY - this._joystickPointerStartPos.y), a = n.length(); a > this.containerSize && n.scaleInPlace(this.containerSize / a), this._joystickPointerPos.x = this._joystickPointerStartPos.x + n.x, this._joystickPointerPos.y = this._joystickPointerStartPos.y + n.y; } else this._joystickPointerPos.x = e.clientX, this._joystickPointerPos.y = e.clientY; this._deltaJoystickVector = this._joystickPointerPos.clone(), this._deltaJoystickVector = this._deltaJoystickVector.subtract(this._joystickPointerStartPos), 0 < Ji._AlwaysVisibleSticks && (this._leftJoystick ? this._joystickPointerPos.x = Math.min(Ji._HalfWidth, this._joystickPointerPos.x) : this._joystickPointerPos.x = Math.max(Ji._HalfWidth, this._joystickPointerPos.x)); const i = (this.reverseLeftRight ? -1 : 1) * this._deltaJoystickVector.x / this._inversedSensibility; switch (this._axisTargetedByLeftAndRight) { case Lu.X: this.deltaPosition.x = Math.min(1, Math.max(-1, i)); break; case Lu.Y: this.deltaPosition.y = Math.min(1, Math.max(-1, i)); break; case Lu.Z: this.deltaPosition.z = Math.min(1, Math.max(-1, i)); break; } const s = (this.reverseUpDown ? 1 : -1) * this._deltaJoystickVector.y / this._inversedSensibility; switch (this._axisTargetedByUpAndDown) { case Lu.X: this.deltaPosition.x = Math.min(1, Math.max(-1, s)); break; case Lu.Y: this.deltaPosition.y = Math.min(1, Math.max(-1, s)); break; case Lu.Z: this.deltaPosition.z = Math.min(1, Math.max(-1, s)); break; } } else { const t = this._touches.get(e.pointerId.toString()); t && (t.x = e.clientX, t.y = e.clientY); } } _onPointerUp(e) { if (this._joystickPointerId == e.pointerId) this._clearPreviousDraw(), this._joystickPointerId = -1, this.pressed = !1; else { const t = this._touches.get(e.pointerId.toString()); t && Ji._VJCanvasContext.clearRect(t.prevX - 44, t.prevY - 44, 88, 88); } this._deltaJoystickVector.x = 0, this._deltaJoystickVector.y = 0, this._touches.remove(e.pointerId.toString()); } /** * Change the color of the virtual joystick * @param newColor a string that must be a CSS color value (like "red") or the hexa value (like "#FF0000") */ setJoystickColor(e) { this._joystickColor = e; } /** * Size of the joystick's container */ set containerSize(e) { this._joystickContainerSize = e, this._clearContainerSize = ~~(this._joystickContainerSize * 2.1), this._clearContainerSizeOffset = ~~(this._clearContainerSize / 2); } get containerSize() { return this._joystickContainerSize; } /** * Size of the joystick's puck */ set puckSize(e) { this._joystickPuckSize = e, this._clearPuckSize = ~~(this._joystickPuckSize * 2.1), this._clearPuckSizeOffset = ~~(this._clearPuckSize / 2); } get puckSize() { return this._joystickPuckSize; } /** * Clears the set position of the joystick */ clearPosition() { this.alwaysVisible = !1, this._joystickPosition = null; } /** * Defines whether or not the joystick container is always visible */ set alwaysVisible(e) { this._alwaysVisible !== e && (e && this._joystickPosition ? (Ji._AlwaysVisibleSticks++, this._alwaysVisible = !0) : (Ji._AlwaysVisibleSticks--, this._alwaysVisible = !1)); } get alwaysVisible() { return this._alwaysVisible; } /** * Sets the constant position of the Joystick container * @param x X axis coordinate * @param y Y axis coordinate */ setPosition(e, t) { this._joystickPointerStartPos && this._clearPreviousDraw(), this._joystickPosition = new at(e, t); } /** * Defines a callback to call when the joystick is touched * @param action defines the callback */ setActionOnTouch(e) { this._action = e; } /** * Defines which axis you'd like to control for left & right * @param axis defines the axis to use */ setAxisForLeftRight(e) { switch (e) { case Lu.X: case Lu.Y: case Lu.Z: this._axisTargetedByLeftAndRight = e; break; default: this._axisTargetedByLeftAndRight = Lu.X; break; } } /** * Defines which axis you'd like to control for up & down * @param axis defines the axis to use */ setAxisForUpDown(e) { switch (e) { case Lu.X: case Lu.Y: case Lu.Z: this._axisTargetedByUpAndDown = e; break; default: this._axisTargetedByUpAndDown = Lu.Y; break; } } /** * Clears the canvas from the previous puck / container draw */ _clearPreviousDraw() { const e = this._joystickPosition || this._joystickPointerStartPos; Ji._VJCanvasContext.clearRect(e.x - this._clearContainerSizeOffset, e.y - this._clearContainerSizeOffset, this._clearContainerSize, this._clearContainerSize), Ji._VJCanvasContext.clearRect(this._joystickPreviousPointerPos.x - this._clearPuckSizeOffset - 1, this._joystickPreviousPointerPos.y - this._clearPuckSizeOffset - 1, this._clearPuckSize + 2, this._clearPuckSize + 2); } /** * Loads `urlPath` to be used for the container's image * @param urlPath defines the urlPath of an image to use */ setContainerImage(e) { const t = new Image(); t.src = e, t.onload = () => this._containerImage = t; } /** * Loads `urlPath` to be used for the puck's image * @param urlPath defines the urlPath of an image to use */ setPuckImage(e) { const t = new Image(); t.src = e, t.onload = () => this._puckImage = t; } /** * Draws the Virtual Joystick's container */ _drawContainer() { const e = this._joystickPosition || this._joystickPointerStartPos; this._clearPreviousDraw(), this._containerImage ? Ji._VJCanvasContext.drawImage(this._containerImage, e.x - this.containerSize, e.y - this.containerSize, this.containerSize * 2, this.containerSize * 2) : (Ji._VJCanvasContext.beginPath(), Ji._VJCanvasContext.strokeStyle = this._joystickColor, Ji._VJCanvasContext.lineWidth = 2, Ji._VJCanvasContext.arc(e.x, e.y, this.containerSize, 0, Math.PI * 2, !0), Ji._VJCanvasContext.stroke(), Ji._VJCanvasContext.closePath(), Ji._VJCanvasContext.beginPath(), Ji._VJCanvasContext.lineWidth = 6, Ji._VJCanvasContext.strokeStyle = this._joystickColor, Ji._VJCanvasContext.arc(e.x, e.y, this.puckSize, 0, Math.PI * 2, !0), Ji._VJCanvasContext.stroke(), Ji._VJCanvasContext.closePath()); } /** * Draws the Virtual Joystick's puck */ _drawPuck() { this._puckImage ? Ji._VJCanvasContext.drawImage(this._puckImage, this._joystickPointerPos.x - this.puckSize, this._joystickPointerPos.y - this.puckSize, this.puckSize * 2, this.puckSize * 2) : (Ji._VJCanvasContext.beginPath(), Ji._VJCanvasContext.strokeStyle = this._joystickColor, Ji._VJCanvasContext.lineWidth = 2, Ji._VJCanvasContext.arc(this._joystickPointerPos.x, this._joystickPointerPos.y, this.puckSize, 0, Math.PI * 2, !0), Ji._VJCanvasContext.stroke(), Ji._VJCanvasContext.closePath()); } _drawVirtualJoystick() { this._released || (this.alwaysVisible && this._drawContainer(), this.pressed && this._touches.forEach((e, t) => { t.pointerId === this._joystickPointerId ? (this.alwaysVisible || this._drawContainer(), this._drawPuck(), this._joystickPreviousPointerPos = this._joystickPointerPos.clone()) : (Ji._VJCanvasContext.clearRect(t.prevX - 44, t.prevY - 44, 88, 88), Ji._VJCanvasContext.beginPath(), Ji._VJCanvasContext.fillStyle = "white", Ji._VJCanvasContext.beginPath(), Ji._VJCanvasContext.strokeStyle = "red", Ji._VJCanvasContext.lineWidth = 6, Ji._VJCanvasContext.arc(t.x, t.y, 40, 0, Math.PI * 2, !0), Ji._VJCanvasContext.stroke(), Ji._VJCanvasContext.closePath(), t.prevX = t.x, t.prevY = t.y); }), requestAnimationFrame(() => { this._drawVirtualJoystick(); })); } /** * Release internal HTML canvas */ releaseCanvas() { Ji.Canvas && (Ji.Canvas.removeEventListener("pointerdown", this._onPointerDownHandlerRef), Ji.Canvas.removeEventListener("pointermove", this._onPointerMoveHandlerRef), Ji.Canvas.removeEventListener("pointerup", this._onPointerUpHandlerRef), Ji.Canvas.removeEventListener("pointerout", this._onPointerUpHandlerRef), window.removeEventListener("resize", this._onResize), document.body.removeChild(Ji.Canvas), Ji.Canvas = null), this._released = !0; } } Ji._GlobalJoystickIndex = 0; Ji._AlwaysVisibleSticks = 0; $L.prototype.addVirtualJoystick = function() { return this.add(new PK()), this; }; class PK { /** * Gets the left stick of the virtual joystick. * @returns The virtual Joystick */ getLeftJoystick() { return this._leftjoystick; } /** * Gets the right stick of the virtual joystick. * @returns The virtual Joystick */ getRightJoystick() { return this._rightjoystick; } /** * Update the current camera state depending on the inputs that have been used this frame. * This is a dynamically created lambda to avoid the performance penalty of looping for inputs in the render loop. */ checkInputs() { if (this._leftjoystick) { const e = this.camera, t = e._computeLocalCameraSpeed() * 50, i = Ae.RotationYawPitchRoll(e.rotation.y, e.rotation.x, 0), r = D.TransformCoordinates(new D(this._leftjoystick.deltaPosition.x * t, this._leftjoystick.deltaPosition.y * t, this._leftjoystick.deltaPosition.z * t), i); e.cameraDirection = e.cameraDirection.add(r), e.cameraRotation = e.cameraRotation.addVector3(this._rightjoystick.deltaPosition), this._leftjoystick.pressed || (this._leftjoystick.deltaPosition = this._leftjoystick.deltaPosition.scale(0.9)), this._rightjoystick.pressed || (this._rightjoystick.deltaPosition = this._rightjoystick.deltaPosition.scale(0.9)); } } /** * Attach the input controls to a specific dom element to get the input from. */ attachControl() { this._leftjoystick = new Ji(!0), this._leftjoystick.setAxisForUpDown(Lu.Z), this._leftjoystick.setAxisForLeftRight(Lu.X), this._leftjoystick.setJoystickSensibility(0.15), this._rightjoystick = new Ji(!1), this._rightjoystick.setAxisForUpDown(Lu.X), this._rightjoystick.setAxisForLeftRight(Lu.Y), this._rightjoystick.reverseUpDown = !0, this._rightjoystick.setJoystickSensibility(0.05), this._rightjoystick.setJoystickColor("yellow"); } /** * Detach the current controls from the specified dom element. */ detachControl() { this._leftjoystick.releaseCanvas(), this._rightjoystick.releaseCanvas(); } /** * Gets the class name of the current input. * @returns the class name */ getClassName() { return "FreeCameraVirtualJoystickInput"; } /** * Get the friendly name associated with the input class. * @returns the input friendly name */ getSimpleName() { return "virtualJoystick"; } } Dd.FreeCameraVirtualJoystickInput = PK; class Cl extends Ai { /** * Instantiates a target camera that takes a mesh or position as a target and continues to look at it while it moves. * This is the base of the follow, arc rotate cameras and Free camera * @see https://doc.babylonjs.com/features/featuresDeepDive/cameras * @param name Defines the name of the camera in the scene * @param position Defines the start position of the camera in the scene * @param scene Defines the scene the camera belongs to * @param setActiveOnSceneIfNoneActive Defines whether the camera should be marked as active if not other active cameras have been defined */ constructor(e, t, i, r = !0) { super(e, t, i, r), this._tmpUpVector = D.Zero(), this._tmpTargetVector = D.Zero(), this.cameraDirection = new D(0, 0, 0), this.cameraRotation = new at(0, 0), this.ignoreParentScaling = !1, this.updateUpVectorFromRotation = !1, this._tmpQuaternion = new Ze(), this.rotation = new D(0, 0, 0), this.speed = 2, this.noRotationConstraint = !1, this.invertRotation = !1, this.inverseRotationSpeed = 0.2, this.lockedTarget = null, this._currentTarget = D.Zero(), this._initialFocalDistance = 1, this._viewMatrix = Ae.Zero(), this._camMatrix = Ae.Zero(), this._cameraTransformMatrix = Ae.Zero(), this._cameraRotationMatrix = Ae.Zero(), this._referencePoint = new D(0, 0, 1), this._transformedReferencePoint = D.Zero(), this._deferredPositionUpdate = new D(), this._deferredRotationQuaternionUpdate = new Ze(), this._deferredRotationUpdate = new D(), this._deferredUpdated = !1, this._deferOnly = !1, this._defaultUp = D.Up(), this._cachedRotationZ = 0, this._cachedQuaternionRotationZ = 0; } /** * Gets the position in front of the camera at a given distance. * @param distance The distance from the camera we want the position to be * @returns the position */ getFrontPosition(e) { this.getWorldMatrix(); const t = this.getTarget().subtract(this.position); return t.normalize(), t.scaleInPlace(e), this.globalPosition.add(t); } /** @internal */ _getLockedTargetPosition() { if (!this.lockedTarget) return null; if (this.lockedTarget.absolutePosition) { const e = this.lockedTarget; e.computeWorldMatrix().getTranslationToRef(e.absolutePosition); } return this.lockedTarget.absolutePosition || this.lockedTarget; } /** * Store current camera state of the camera (fov, position, rotation, etc..) * @returns the camera */ storeState() { return this._storedPosition = this.position.clone(), this._storedRotation = this.rotation.clone(), this.rotationQuaternion && (this._storedRotationQuaternion = this.rotationQuaternion.clone()), super.storeState(); } /** * Restored camera state. You must call storeState() first * @returns whether it was successful or not * @internal */ _restoreStateValues() { return super._restoreStateValues() ? (this.position = this._storedPosition.clone(), this.rotation = this._storedRotation.clone(), this.rotationQuaternion && (this.rotationQuaternion = this._storedRotationQuaternion.clone()), this.cameraDirection.copyFromFloats(0, 0, 0), this.cameraRotation.copyFromFloats(0, 0), !0) : !1; } /** @internal */ _initCache() { super._initCache(), this._cache.lockedTarget = new D(Number.MAX_VALUE, Number.MAX_VALUE, Number.MAX_VALUE), this._cache.rotation = new D(Number.MAX_VALUE, Number.MAX_VALUE, Number.MAX_VALUE), this._cache.rotationQuaternion = new Ze(Number.MAX_VALUE, Number.MAX_VALUE, Number.MAX_VALUE, Number.MAX_VALUE); } /** * @internal */ _updateCache(e) { e || super._updateCache(); const t = this._getLockedTargetPosition(); t ? this._cache.lockedTarget ? this._cache.lockedTarget.copyFrom(t) : this._cache.lockedTarget = t.clone() : this._cache.lockedTarget = null, this._cache.rotation.copyFrom(this.rotation), this.rotationQuaternion && this._cache.rotationQuaternion.copyFrom(this.rotationQuaternion); } // Synchronized /** @internal */ _isSynchronizedViewMatrix() { if (!super._isSynchronizedViewMatrix()) return !1; const e = this._getLockedTargetPosition(); return (this._cache.lockedTarget ? this._cache.lockedTarget.equals(e) : !e) && (this.rotationQuaternion ? this.rotationQuaternion.equals(this._cache.rotationQuaternion) : this._cache.rotation.equals(this.rotation)); } // Methods /** @internal */ _computeLocalCameraSpeed() { const e = this.getEngine(); return this.speed * Math.sqrt(e.getDeltaTime() / (e.getFps() * 100)); } // Target /** * Defines the target the camera should look at. * @param target Defines the new target as a Vector */ setTarget(e) { this.upVector.normalize(), this._initialFocalDistance = e.subtract(this.position).length(), this.position.z === e.z && (this.position.z += Sr), this._referencePoint.normalize().scaleInPlace(this._initialFocalDistance), Ae.LookAtLHToRef(this.position, e, this._defaultUp, this._camMatrix), this._camMatrix.invert(), this.rotation.x = Math.atan(this._camMatrix.m[6] / this._camMatrix.m[10]); const t = e.subtract(this.position); t.x >= 0 ? this.rotation.y = -Math.atan(t.z / t.x) + Math.PI / 2 : this.rotation.y = -Math.atan(t.z / t.x) - Math.PI / 2, this.rotation.z = 0, isNaN(this.rotation.x) && (this.rotation.x = 0), isNaN(this.rotation.y) && (this.rotation.y = 0), isNaN(this.rotation.z) && (this.rotation.z = 0), this.rotationQuaternion && Ze.RotationYawPitchRollToRef(this.rotation.y, this.rotation.x, this.rotation.z, this.rotationQuaternion); } /** * Defines the target point of the camera. * The camera looks towards it form the radius distance. */ get target() { return this.getTarget(); } set target(e) { this.setTarget(e); } /** * Return the current target position of the camera. This value is expressed in local space. * @returns the target position */ getTarget() { return this._currentTarget; } /** @internal */ _decideIfNeedsToMove() { return Math.abs(this.cameraDirection.x) > 0 || Math.abs(this.cameraDirection.y) > 0 || Math.abs(this.cameraDirection.z) > 0; } /** @internal */ _updatePosition() { if (this.parent) { this.parent.getWorldMatrix().invertToRef(de.Matrix[0]), D.TransformNormalToRef(this.cameraDirection, de.Matrix[0], de.Vector3[0]), this._deferredPositionUpdate.addInPlace(de.Vector3[0]), this._deferOnly ? this._deferredUpdated = !0 : this.position.copyFrom(this._deferredPositionUpdate); return; } this._deferredPositionUpdate.addInPlace(this.cameraDirection), this._deferOnly ? this._deferredUpdated = !0 : this.position.copyFrom(this._deferredPositionUpdate); } /** @internal */ _checkInputs() { const e = this.invertRotation ? -this.inverseRotationSpeed : 1, t = this._decideIfNeedsToMove(), i = this.cameraRotation.x || this.cameraRotation.y; this._deferredUpdated = !1, this._deferredRotationUpdate.copyFrom(this.rotation), this._deferredPositionUpdate.copyFrom(this.position), this.rotationQuaternion && this._deferredRotationQuaternionUpdate.copyFrom(this.rotationQuaternion), t && this._updatePosition(), i && (this.rotationQuaternion && this.rotationQuaternion.toEulerAnglesToRef(this._deferredRotationUpdate), this._deferredRotationUpdate.x += this.cameraRotation.x * e, this._deferredRotationUpdate.y += this.cameraRotation.y * e, this.noRotationConstraint || (this._deferredRotationUpdate.x > 1.570796 && (this._deferredRotationUpdate.x = 1.570796), this._deferredRotationUpdate.x < -1.570796 && (this._deferredRotationUpdate.x = -1.570796)), this._deferOnly ? this._deferredUpdated = !0 : this.rotation.copyFrom(this._deferredRotationUpdate), this.rotationQuaternion && this._deferredRotationUpdate.lengthSquared() && (Ze.RotationYawPitchRollToRef(this._deferredRotationUpdate.y, this._deferredRotationUpdate.x, this._deferredRotationUpdate.z, this._deferredRotationQuaternionUpdate), this._deferOnly ? this._deferredUpdated = !0 : this.rotationQuaternion.copyFrom(this._deferredRotationQuaternionUpdate))), t && (Math.abs(this.cameraDirection.x) < this.speed * Sr && (this.cameraDirection.x = 0), Math.abs(this.cameraDirection.y) < this.speed * Sr && (this.cameraDirection.y = 0), Math.abs(this.cameraDirection.z) < this.speed * Sr && (this.cameraDirection.z = 0), this.cameraDirection.scaleInPlace(this.inertia)), i && (Math.abs(this.cameraRotation.x) < this.speed * Sr && (this.cameraRotation.x = 0), Math.abs(this.cameraRotation.y) < this.speed * Sr && (this.cameraRotation.y = 0), this.cameraRotation.scaleInPlace(this.inertia)), super._checkInputs(); } _updateCameraRotationMatrix() { this.rotationQuaternion ? this.rotationQuaternion.toRotationMatrix(this._cameraRotationMatrix) : Ae.RotationYawPitchRollToRef(this.rotation.y, this.rotation.x, this.rotation.z, this._cameraRotationMatrix); } /** * Update the up vector to apply the rotation of the camera (So if you changed the camera rotation.z this will let you update the up vector as well) * @returns the current camera */ _rotateUpVectorWithCameraRotationMatrix() { return D.TransformNormalToRef(this._defaultUp, this._cameraRotationMatrix, this.upVector), this; } /** @internal */ _getViewMatrix() { return this.lockedTarget && this.setTarget(this._getLockedTargetPosition()), this._updateCameraRotationMatrix(), this.rotationQuaternion && this._cachedQuaternionRotationZ != this.rotationQuaternion.z ? (this._rotateUpVectorWithCameraRotationMatrix(), this._cachedQuaternionRotationZ = this.rotationQuaternion.z) : this._cachedRotationZ !== this.rotation.z && (this._rotateUpVectorWithCameraRotationMatrix(), this._cachedRotationZ = this.rotation.z), D.TransformCoordinatesToRef(this._referencePoint, this._cameraRotationMatrix, this._transformedReferencePoint), this.position.addToRef(this._transformedReferencePoint, this._currentTarget), this.updateUpVectorFromRotation && (this.rotationQuaternion ? bl.Y.rotateByQuaternionToRef(this.rotationQuaternion, this.upVector) : (Ze.FromEulerVectorToRef(this.rotation, this._tmpQuaternion), bl.Y.rotateByQuaternionToRef(this._tmpQuaternion, this.upVector))), this._computeViewMatrix(this.position, this._currentTarget, this.upVector), this._viewMatrix; } _computeViewMatrix(e, t, i) { if (this.ignoreParentScaling) { if (this.parent) { const r = this.parent.getWorldMatrix(); D.TransformCoordinatesToRef(e, r, this._globalPosition), D.TransformCoordinatesToRef(t, r, this._tmpTargetVector), D.TransformNormalToRef(i, r, this._tmpUpVector), this._markSyncedWithParent(); } else this._globalPosition.copyFrom(e), this._tmpTargetVector.copyFrom(t), this._tmpUpVector.copyFrom(i); this.getScene().useRightHandedSystem ? Ae.LookAtRHToRef(this._globalPosition, this._tmpTargetVector, this._tmpUpVector, this._viewMatrix) : Ae.LookAtLHToRef(this._globalPosition, this._tmpTargetVector, this._tmpUpVector, this._viewMatrix); return; } if (this.getScene().useRightHandedSystem ? Ae.LookAtRHToRef(e, t, i, this._viewMatrix) : Ae.LookAtLHToRef(e, t, i, this._viewMatrix), this.parent) { const r = this.parent.getWorldMatrix(); this._viewMatrix.invert(), this._viewMatrix.multiplyToRef(r, this._viewMatrix), this._viewMatrix.getTranslationToRef(this._globalPosition), this._viewMatrix.invert(), this._markSyncedWithParent(); } else this._globalPosition.copyFrom(e); } /** * @internal */ // eslint-disable-next-line @typescript-eslint/no-unused-vars createRigCamera(e, t) { if (this.cameraRigMode !== Ai.RIG_MODE_NONE) { const i = new Cl(e, this.position.clone(), this.getScene()); return i.isRigCamera = !0, i.rigParent = this, this.cameraRigMode === Ai.RIG_MODE_VR && (this.rotationQuaternion || (this.rotationQuaternion = new Ze()), i._cameraRigParams = {}, i.rotationQuaternion = new Ze()), i.mode = this.mode, i.orthoLeft = this.orthoLeft, i.orthoRight = this.orthoRight, i.orthoTop = this.orthoTop, i.orthoBottom = this.orthoBottom, i; } return null; } /** * @internal */ _updateRigCameras() { const e = this._rigCameras[0], t = this._rigCameras[1]; switch (this.computeWorldMatrix(), this.cameraRigMode) { case Ai.RIG_MODE_STEREOSCOPIC_ANAGLYPH: case Ai.RIG_MODE_STEREOSCOPIC_SIDEBYSIDE_PARALLEL: case Ai.RIG_MODE_STEREOSCOPIC_SIDEBYSIDE_CROSSEYED: case Ai.RIG_MODE_STEREOSCOPIC_OVERUNDER: case Ai.RIG_MODE_STEREOSCOPIC_INTERLACED: { const i = this.cameraRigMode === Ai.RIG_MODE_STEREOSCOPIC_SIDEBYSIDE_CROSSEYED ? 1 : -1, r = this.cameraRigMode === Ai.RIG_MODE_STEREOSCOPIC_SIDEBYSIDE_CROSSEYED ? -1 : 1; this._getRigCamPositionAndTarget(this._cameraRigParams.stereoHalfAngle * i, e), this._getRigCamPositionAndTarget(this._cameraRigParams.stereoHalfAngle * r, t); break; } case Ai.RIG_MODE_VR: e.rotationQuaternion ? (e.rotationQuaternion.copyFrom(this.rotationQuaternion), t.rotationQuaternion.copyFrom(this.rotationQuaternion)) : (e.rotation.copyFrom(this.rotation), t.rotation.copyFrom(this.rotation)), e.position.copyFrom(this.position), t.position.copyFrom(this.position); break; } super._updateRigCameras(); } _getRigCamPositionAndTarget(e, t) { this.getTarget().subtractToRef(this.position, Cl._TargetFocalPoint), Cl._TargetFocalPoint.normalize().scaleInPlace(this._initialFocalDistance); const r = Cl._TargetFocalPoint.addInPlace(this.position); Ae.TranslationToRef(-r.x, -r.y, -r.z, Cl._TargetTransformMatrix), Cl._TargetTransformMatrix.multiplyToRef(Ae.RotationAxis(t.upVector, e), Cl._RigCamTransformMatrix), Ae.TranslationToRef(r.x, r.y, r.z, Cl._TargetTransformMatrix), Cl._RigCamTransformMatrix.multiplyToRef(Cl._TargetTransformMatrix, Cl._RigCamTransformMatrix), D.TransformCoordinatesToRef(this.position, Cl._RigCamTransformMatrix, t.position), t.setTarget(r); } /** * Gets the current object class name. * @returns the class name */ getClassName() { return "TargetCamera"; } } Cl._RigCamTransformMatrix = new Ae(); Cl._TargetTransformMatrix = new Ae(); Cl._TargetFocalPoint = new D(); F([ oo() ], Cl.prototype, "rotation", void 0); F([ W() ], Cl.prototype, "speed", void 0); F([ hw("lockedTargetId") ], Cl.prototype, "lockedTarget", void 0); class du extends Cl { /** * Gets the input sensibility for a mouse input. (default is 2000.0) * Higher values reduce sensitivity. */ get angularSensibility() { const e = this.inputs.attached.mouse; return e ? e.angularSensibility : 0; } /** * Sets the input sensibility for a mouse input. (default is 2000.0) * Higher values reduce sensitivity. */ set angularSensibility(e) { const t = this.inputs.attached.mouse; t && (t.angularSensibility = e); } /** * Gets or Set the list of keyboard keys used to control the forward move of the camera. */ get keysUp() { const e = this.inputs.attached.keyboard; return e ? e.keysUp : []; } set keysUp(e) { const t = this.inputs.attached.keyboard; t && (t.keysUp = e); } /** * Gets or Set the list of keyboard keys used to control the upward move of the camera. */ get keysUpward() { const e = this.inputs.attached.keyboard; return e ? e.keysUpward : []; } set keysUpward(e) { const t = this.inputs.attached.keyboard; t && (t.keysUpward = e); } /** * Gets or Set the list of keyboard keys used to control the backward move of the camera. */ get keysDown() { const e = this.inputs.attached.keyboard; return e ? e.keysDown : []; } set keysDown(e) { const t = this.inputs.attached.keyboard; t && (t.keysDown = e); } /** * Gets or Set the list of keyboard keys used to control the downward move of the camera. */ get keysDownward() { const e = this.inputs.attached.keyboard; return e ? e.keysDownward : []; } set keysDownward(e) { const t = this.inputs.attached.keyboard; t && (t.keysDownward = e); } /** * Gets or Set the list of keyboard keys used to control the left strafe move of the camera. */ get keysLeft() { const e = this.inputs.attached.keyboard; return e ? e.keysLeft : []; } set keysLeft(e) { const t = this.inputs.attached.keyboard; t && (t.keysLeft = e); } /** * Gets or Set the list of keyboard keys used to control the right strafe move of the camera. */ get keysRight() { const e = this.inputs.attached.keyboard; return e ? e.keysRight : []; } set keysRight(e) { const t = this.inputs.attached.keyboard; t && (t.keysRight = e); } /** * Gets or Set the list of keyboard keys used to control the left rotation move of the camera. */ get keysRotateLeft() { const e = this.inputs.attached.keyboard; return e ? e.keysRotateLeft : []; } set keysRotateLeft(e) { const t = this.inputs.attached.keyboard; t && (t.keysRotateLeft = e); } /** * Gets or Set the list of keyboard keys used to control the right rotation move of the camera. */ get keysRotateRight() { const e = this.inputs.attached.keyboard; return e ? e.keysRotateRight : []; } set keysRotateRight(e) { const t = this.inputs.attached.keyboard; t && (t.keysRotateRight = e); } /** * Gets or Set the list of keyboard keys used to control the up rotation move of the camera. */ get keysRotateUp() { const e = this.inputs.attached.keyboard; return e ? e.keysRotateUp : []; } set keysRotateUp(e) { const t = this.inputs.attached.keyboard; t && (t.keysRotateUp = e); } /** * Gets or Set the list of keyboard keys used to control the down rotation move of the camera. */ get keysRotateDown() { const e = this.inputs.attached.keyboard; return e ? e.keysRotateDown : []; } set keysRotateDown(e) { const t = this.inputs.attached.keyboard; t && (t.keysRotateDown = e); } /** * Instantiates a Free Camera. * This represents a free type of camera. It can be useful in First Person Shooter game for instance. * Please consider using the new UniversalCamera instead as it adds more functionality like touch to this camera. * @see https://doc.babylonjs.com/features/featuresDeepDive/cameras/camera_introduction#universal-camera * @param name Define the name of the camera in the scene * @param position Define the start position of the camera in the scene * @param scene Define the scene the camera belongs to * @param setActiveOnSceneIfNoneActive Defines whether the camera should be marked as active if not other active cameras have been defined */ constructor(e, t, i, r = !0) { super(e, t, i, r), this.ellipsoid = new D(0.5, 1, 0.5), this.ellipsoidOffset = new D(0, 0, 0), this.checkCollisions = !1, this.applyGravity = !1, this._needMoveForGravity = !1, this._oldPosition = D.Zero(), this._diffPosition = D.Zero(), this._newPosition = D.Zero(), this._collisionMask = -1, this._onCollisionPositionChange = (s, n, a = null) => { this._newPosition.copyFrom(n), this._newPosition.subtractToRef(this._oldPosition, this._diffPosition), this._diffPosition.length() > $e.CollisionsEpsilon && (this.position.addToRef(this._diffPosition, this._deferredPositionUpdate), this._deferOnly ? this._deferredUpdated = !0 : this.position.copyFrom(this._deferredPositionUpdate), this.onCollide && a && this.onCollide(a)); }, this.inputs = new $L(this), this.inputs.addKeyboard().addMouse(); } /** * Attached controls to the current camera. * @param ignored defines an ignored parameter kept for backward compatibility. * @param noPreventDefault Defines whether event caught by the controls should call preventdefault() (https://developer.mozilla.org/en-US/docs/Web/API/Event/preventDefault) */ attachControl(e, t) { t = Ve.BackCompatCameraNoPreventDefault(arguments), this.inputs.attachElement(t); } /** * Detach the current controls from the specified dom element. */ detachControl() { this.inputs.detachElement(), this.cameraDirection = new D(0, 0, 0), this.cameraRotation = new at(0, 0); } /** * Define a collision mask to limit the list of object the camera can collide with */ get collisionMask() { return this._collisionMask; } set collisionMask(e) { this._collisionMask = isNaN(e) ? -1 : e; } /** * @internal */ _collideWithWorld(e) { let t; this.parent ? t = D.TransformCoordinates(this.position, this.parent.getWorldMatrix()) : t = this.position, t.subtractFromFloatsToRef(0, this.ellipsoid.y, 0, this._oldPosition), this._oldPosition.addInPlace(this.ellipsoidOffset); const i = this.getScene().collisionCoordinator; this._collider || (this._collider = i.createCollider()), this._collider._radius = this.ellipsoid, this._collider.collisionMask = this._collisionMask; let r = e; this.applyGravity && (r = e.add(this.getScene().gravity)), i.getNewPosition(this._oldPosition, r, this._collider, 3, null, this._onCollisionPositionChange, this.uniqueId); } /** @internal */ _checkInputs() { this._localDirection || (this._localDirection = D.Zero(), this._transformedDirection = D.Zero()), this.inputs.checkInputs(), super._checkInputs(); } /** * Enable movement without a user input. This allows gravity to always be applied. */ set needMoveForGravity(e) { this._needMoveForGravity = e; } /** * When true, gravity is applied whether there is user input or not. */ get needMoveForGravity() { return this._needMoveForGravity; } /** @internal */ _decideIfNeedsToMove() { return this._needMoveForGravity || Math.abs(this.cameraDirection.x) > 0 || Math.abs(this.cameraDirection.y) > 0 || Math.abs(this.cameraDirection.z) > 0; } /** @internal */ _updatePosition() { this.checkCollisions && this.getScene().collisionsEnabled ? this._collideWithWorld(this.cameraDirection) : super._updatePosition(); } /** * Destroy the camera and release the current resources hold by it. */ dispose() { this.inputs.clear(), super.dispose(); } /** * Gets the current object class name. * @returns the class name */ getClassName() { return "FreeCamera"; } } F([ oo() ], du.prototype, "ellipsoid", void 0); F([ oo() ], du.prototype, "ellipsoidOffset", void 0); F([ W() ], du.prototype, "checkCollisions", void 0); F([ W() ], du.prototype, "applyGravity", void 0); In.AddNodeConstructor("TouchCamera", (c, e) => () => new IK(c, D.Zero(), e)); class IK extends du { /** * Defines the touch sensibility for rotation. * The higher the faster. */ get touchAngularSensibility() { const e = this.inputs.attached.touch; return e ? e.touchAngularSensibility : 0; } set touchAngularSensibility(e) { const t = this.inputs.attached.touch; t && (t.touchAngularSensibility = e); } /** * Defines the touch sensibility for move. * The higher the faster. */ get touchMoveSensibility() { const e = this.inputs.attached.touch; return e ? e.touchMoveSensibility : 0; } set touchMoveSensibility(e) { const t = this.inputs.attached.touch; t && (t.touchMoveSensibility = e); } /** * Instantiates a new touch camera. * This represents a FPS type of camera controlled by touch. * This is like a universal camera minus the Gamepad controls. * @see https://doc.babylonjs.com/features/featuresDeepDive/cameras/camera_introduction#universal-camera * @param name Define the name of the camera in the scene * @param position Define the start position of the camera in the scene * @param scene Define the scene the camera belongs to */ constructor(e, t, i) { super(e, t, i), this.inputs.addTouch(), this._setupInputs(); } /** * Gets the current object class name. * @returns the class name */ getClassName() { return "TouchCamera"; } /** @internal */ _setupInputs() { const e = this.inputs.attached.touch, t = this.inputs.attached.mouse; t ? t.touchEnabled = !1 : e.allowMouse = !0; } } In.AddNodeConstructor("ArcRotateCamera", (c, e) => () => new Pn(c, 0, 0, 1, D.Zero(), e)); class Pn extends Cl { /** * Defines the target point of the camera. * The camera looks towards it from the radius distance. */ get target() { return this._target; } set target(e) { this.setTarget(e); } /** * Defines the target mesh of the camera. * The camera looks towards it from the radius distance. * Please note that setting a target host will disable panning. */ get targetHost() { return this._targetHost; } set targetHost(e) { e && this.setTarget(e); } /** * Return the current target position of the camera. This value is expressed in local space. * @returns the target position */ getTarget() { return this.target; } /** * Define the current local position of the camera in the scene */ get position() { return this._position; } set position(e) { this.setPosition(e); } /** * The vector the camera should consider as up. (default is Vector3(0, 1, 0) as returned by Vector3.Up()) * Setting this will copy the given vector to the camera's upVector, and set rotation matrices to and from Y up. * DO NOT set the up vector using copyFrom or copyFromFloats, as this bypasses setting the above matrices. */ set upVector(e) { this._upToYMatrix || (this._yToUpMatrix = new Ae(), this._upToYMatrix = new Ae(), this._upVector = D.Zero()), e.normalize(), this._upVector.copyFrom(e), this.setMatUp(); } get upVector() { return this._upVector; } /** * Sets the Y-up to camera up-vector rotation matrix, and the up-vector to Y-up rotation matrix. */ setMatUp() { Ae.RotationAlignToRef(D.UpReadOnly, this._upVector, this._yToUpMatrix), Ae.RotationAlignToRef(this._upVector, D.UpReadOnly, this._upToYMatrix); } //-- begin properties for backward compatibility for inputs /** * Gets or Set the pointer angular sensibility along the X axis or how fast is the camera rotating. */ get angularSensibilityX() { const e = this.inputs.attached.pointers; return e ? e.angularSensibilityX : 0; } set angularSensibilityX(e) { const t = this.inputs.attached.pointers; t && (t.angularSensibilityX = e); } /** * Gets or Set the pointer angular sensibility along the Y axis or how fast is the camera rotating. */ get angularSensibilityY() { const e = this.inputs.attached.pointers; return e ? e.angularSensibilityY : 0; } set angularSensibilityY(e) { const t = this.inputs.attached.pointers; t && (t.angularSensibilityY = e); } /** * Gets or Set the pointer pinch precision or how fast is the camera zooming. */ get pinchPrecision() { const e = this.inputs.attached.pointers; return e ? e.pinchPrecision : 0; } set pinchPrecision(e) { const t = this.inputs.attached.pointers; t && (t.pinchPrecision = e); } /** * Gets or Set the pointer pinch delta percentage or how fast is the camera zooming. * It will be used instead of pinchDeltaPrecision if different from 0. * It defines the percentage of current camera.radius to use as delta when pinch zoom is used. */ get pinchDeltaPercentage() { const e = this.inputs.attached.pointers; return e ? e.pinchDeltaPercentage : 0; } set pinchDeltaPercentage(e) { const t = this.inputs.attached.pointers; t && (t.pinchDeltaPercentage = e); } /** * Gets or Set the pointer use natural pinch zoom to override the pinch precision * and pinch delta percentage. * When useNaturalPinchZoom is true, multi touch zoom will zoom in such * that any object in the plane at the camera's target point will scale * perfectly with finger motion. */ get useNaturalPinchZoom() { const e = this.inputs.attached.pointers; return e ? e.useNaturalPinchZoom : !1; } set useNaturalPinchZoom(e) { const t = this.inputs.attached.pointers; t && (t.useNaturalPinchZoom = e); } /** * Gets or Set the pointer panning sensibility or how fast is the camera moving. */ get panningSensibility() { const e = this.inputs.attached.pointers; return e ? e.panningSensibility : 0; } set panningSensibility(e) { const t = this.inputs.attached.pointers; t && (t.panningSensibility = e); } /** * Gets or Set the list of keyboard keys used to control beta angle in a positive direction. */ get keysUp() { const e = this.inputs.attached.keyboard; return e ? e.keysUp : []; } set keysUp(e) { const t = this.inputs.attached.keyboard; t && (t.keysUp = e); } /** * Gets or Set the list of keyboard keys used to control beta angle in a negative direction. */ get keysDown() { const e = this.inputs.attached.keyboard; return e ? e.keysDown : []; } set keysDown(e) { const t = this.inputs.attached.keyboard; t && (t.keysDown = e); } /** * Gets or Set the list of keyboard keys used to control alpha angle in a negative direction. */ get keysLeft() { const e = this.inputs.attached.keyboard; return e ? e.keysLeft : []; } set keysLeft(e) { const t = this.inputs.attached.keyboard; t && (t.keysLeft = e); } /** * Gets or Set the list of keyboard keys used to control alpha angle in a positive direction. */ get keysRight() { const e = this.inputs.attached.keyboard; return e ? e.keysRight : []; } set keysRight(e) { const t = this.inputs.attached.keyboard; t && (t.keysRight = e); } /** * Gets or Set the mouse wheel precision or how fast is the camera zooming. */ get wheelPrecision() { const e = this.inputs.attached.mousewheel; return e ? e.wheelPrecision : 0; } set wheelPrecision(e) { const t = this.inputs.attached.mousewheel; t && (t.wheelPrecision = e); } /** * Gets or Set the boolean value that controls whether or not the mouse wheel * zooms to the location of the mouse pointer or not. The default is false. */ get zoomToMouseLocation() { const e = this.inputs.attached.mousewheel; return e ? e.zoomToMouseLocation : !1; } set zoomToMouseLocation(e) { const t = this.inputs.attached.mousewheel; t && (t.zoomToMouseLocation = e); } /** * Gets or Set the mouse wheel delta percentage or how fast is the camera zooming. * It will be used instead of pinchDeltaPrecision if different from 0. * It defines the percentage of current camera.radius to use as delta when pinch zoom is used. */ get wheelDeltaPercentage() { const e = this.inputs.attached.mousewheel; return e ? e.wheelDeltaPercentage : 0; } set wheelDeltaPercentage(e) { const t = this.inputs.attached.mousewheel; t && (t.wheelDeltaPercentage = e); } /** * Gets the bouncing behavior of the camera if it has been enabled. * @see https://doc.babylonjs.com/features/featuresDeepDive/behaviors/cameraBehaviors#bouncing-behavior */ get bouncingBehavior() { return this._bouncingBehavior; } /** * Defines if the bouncing behavior of the camera is enabled on the camera. * @see https://doc.babylonjs.com/features/featuresDeepDive/behaviors/cameraBehaviors#bouncing-behavior */ get useBouncingBehavior() { return this._bouncingBehavior != null; } set useBouncingBehavior(e) { e !== this.useBouncingBehavior && (e ? (this._bouncingBehavior = new a5(), this.addBehavior(this._bouncingBehavior)) : this._bouncingBehavior && (this.removeBehavior(this._bouncingBehavior), this._bouncingBehavior = null)); } /** * Gets the framing behavior of the camera if it has been enabled. * @see https://doc.babylonjs.com/features/featuresDeepDive/behaviors/cameraBehaviors#framing-behavior */ get framingBehavior() { return this._framingBehavior; } /** * Defines if the framing behavior of the camera is enabled on the camera. * @see https://doc.babylonjs.com/features/featuresDeepDive/behaviors/cameraBehaviors#framing-behavior */ get useFramingBehavior() { return this._framingBehavior != null; } set useFramingBehavior(e) { e !== this.useFramingBehavior && (e ? (this._framingBehavior = new I1(), this.addBehavior(this._framingBehavior)) : this._framingBehavior && (this.removeBehavior(this._framingBehavior), this._framingBehavior = null)); } /** * Gets the auto rotation behavior of the camera if it has been enabled. * @see https://doc.babylonjs.com/features/featuresDeepDive/behaviors/cameraBehaviors#autorotation-behavior */ get autoRotationBehavior() { return this._autoRotationBehavior; } /** * Defines if the auto rotation behavior of the camera is enabled on the camera. * @see https://doc.babylonjs.com/features/featuresDeepDive/behaviors/cameraBehaviors#autorotation-behavior */ get useAutoRotationBehavior() { return this._autoRotationBehavior != null; } set useAutoRotationBehavior(e) { e !== this.useAutoRotationBehavior && (e ? (this._autoRotationBehavior = new Vte(), this.addBehavior(this._autoRotationBehavior)) : this._autoRotationBehavior && (this.removeBehavior(this._autoRotationBehavior), this._autoRotationBehavior = null)); } /** * Instantiates a new ArcRotateCamera in a given scene * @param name Defines the name of the camera * @param alpha Defines the camera rotation along the longitudinal axis * @param beta Defines the camera rotation along the latitudinal axis * @param radius Defines the camera distance from its target * @param target Defines the camera target * @param scene Defines the scene the camera belongs to * @param setActiveOnSceneIfNoneActive Defines whether the camera should be marked as active if not other active cameras have been defined */ constructor(e, t, i, r, s, n, a = !0) { super(e, D.Zero(), n, a), this.inertialAlphaOffset = 0, this.inertialBetaOffset = 0, this.inertialRadiusOffset = 0, this.lowerAlphaLimit = null, this.upperAlphaLimit = null, this.lowerBetaLimit = 0.01, this.upperBetaLimit = Math.PI - 0.01, this.lowerRadiusLimit = null, this.upperRadiusLimit = null, this.inertialPanningX = 0, this.inertialPanningY = 0, this.pinchToPanMaxDistance = 20, this.panningDistanceLimit = null, this.panningOriginTarget = D.Zero(), this.panningInertia = 0.9, this.zoomOnFactor = 1, this.targetScreenOffset = at.Zero(), this.allowUpsideDown = !0, this.useInputToRestoreState = !0, this._viewMatrix = new Ae(), this.panningAxis = new D(1, 1, 0), this._transformedDirection = new D(), this.mapPanning = !1, this.onMeshTargetChangedObservable = new Fe(), this.checkCollisions = !1, this.collisionRadius = new D(0.5, 0.5, 0.5), this._previousPosition = D.Zero(), this._collisionVelocity = D.Zero(), this._newPosition = D.Zero(), this._computationVector = D.Zero(), this._onCollisionPositionChange = (l, o, u = null) => { u ? (this.setPosition(o), this.onCollide && this.onCollide(u)) : this._previousPosition.copyFrom(this._position); const h = Math.cos(this.alpha), d = Math.sin(this.alpha), f = Math.cos(this.beta); let p = Math.sin(this.beta); p === 0 && (p = 1e-4); const m = this._getTargetPosition(); this._computationVector.copyFromFloats(this.radius * h * p, this.radius * f, this.radius * d * p), m.addToRef(this._computationVector, this._newPosition), this._position.copyFrom(this._newPosition); let _ = this.upVector; this.allowUpsideDown && this.beta < 0 && (_ = _.clone(), _ = _.negate()), this._computeViewMatrix(this._position, m, _), this._viewMatrix.addAtIndex(12, this.targetScreenOffset.x), this._viewMatrix.addAtIndex(13, this.targetScreenOffset.y), this._collisionTriggered = !1; }, this._target = D.Zero(), s && this.setTarget(s), this.alpha = t, this.beta = i, this.radius = r, this.getViewMatrix(), this.inputs = new JB(this), this.inputs.addKeyboard().addMouseWheel().addPointers(); } // Cache /** @internal */ _initCache() { super._initCache(), this._cache._target = new D(Number.MAX_VALUE, Number.MAX_VALUE, Number.MAX_VALUE), this._cache.alpha = void 0, this._cache.beta = void 0, this._cache.radius = void 0, this._cache.targetScreenOffset = at.Zero(); } /** * @internal */ _updateCache(e) { e || super._updateCache(), this._cache._target.copyFrom(this._getTargetPosition()), this._cache.alpha = this.alpha, this._cache.beta = this.beta, this._cache.radius = this.radius, this._cache.targetScreenOffset.copyFrom(this.targetScreenOffset); } _getTargetPosition() { if (this._targetHost && this._targetHost.getAbsolutePosition) { const t = this._targetHost.getAbsolutePosition(); this._targetBoundingCenter ? t.addToRef(this._targetBoundingCenter, this._target) : this._target.copyFrom(t); } const e = this._getLockedTargetPosition(); return e || this._target; } /** * Stores the current state of the camera (alpha, beta, radius and target) * @returns the camera itself */ storeState() { return this._storedAlpha = this.alpha, this._storedBeta = this.beta, this._storedRadius = this.radius, this._storedTarget = this._getTargetPosition().clone(), this._storedTargetScreenOffset = this.targetScreenOffset.clone(), super.storeState(); } /** * @internal * Restored camera state. You must call storeState() first */ _restoreStateValues() { return super._restoreStateValues() ? (this.setTarget(this._storedTarget.clone()), this.alpha = this._storedAlpha, this.beta = this._storedBeta, this.radius = this._storedRadius, this.targetScreenOffset = this._storedTargetScreenOffset.clone(), this.inertialAlphaOffset = 0, this.inertialBetaOffset = 0, this.inertialRadiusOffset = 0, this.inertialPanningX = 0, this.inertialPanningY = 0, !0) : !1; } // Synchronized /** @internal */ _isSynchronizedViewMatrix() { return super._isSynchronizedViewMatrix() ? this._cache._target.equals(this._getTargetPosition()) && this._cache.alpha === this.alpha && this._cache.beta === this.beta && this._cache.radius === this.radius && this._cache.targetScreenOffset.equals(this.targetScreenOffset) : !1; } /** * Attached controls to the current camera. * @param ignored defines an ignored parameter kept for backward compatibility. * @param noPreventDefault Defines whether event caught by the controls should call preventdefault() (https://developer.mozilla.org/en-US/docs/Web/API/Event/preventDefault) * @param useCtrlForPanning Defines whether ctrl is used for panning within the controls * @param panningMouseButton Defines whether panning is allowed through mouse click button */ attachControl(e, t, i = !0, r = 2) { const s = arguments; t = Ve.BackCompatCameraNoPreventDefault(s), this._useCtrlForPanning = i, this._panningMouseButton = r, typeof s[0] == "boolean" && (s.length > 1 && (this._useCtrlForPanning = s[1]), s.length > 2 && (this._panningMouseButton = s[2])), this.inputs.attachElement(t), this._reset = () => { this.inertialAlphaOffset = 0, this.inertialBetaOffset = 0, this.inertialRadiusOffset = 0, this.inertialPanningX = 0, this.inertialPanningY = 0; }; } /** * Detach the current controls from the specified dom element. */ detachControl() { this.inputs.detachElement(), this._reset && this._reset(); } /** @internal */ _checkInputs() { if (!this._collisionTriggered) { if (this.inputs.checkInputs(), this.inertialAlphaOffset !== 0 || this.inertialBetaOffset !== 0 || this.inertialRadiusOffset !== 0) { const e = this.invertRotation ? -1 : 1, t = this._calculateHandednessMultiplier(); let i = this.inertialAlphaOffset * t; this.beta < 0 && (i *= -1), this.alpha += i * e, this.beta += this.inertialBetaOffset * e, this.radius -= this.inertialRadiusOffset, this.inertialAlphaOffset *= this.inertia, this.inertialBetaOffset *= this.inertia, this.inertialRadiusOffset *= this.inertia, Math.abs(this.inertialAlphaOffset) < Sr && (this.inertialAlphaOffset = 0), Math.abs(this.inertialBetaOffset) < Sr && (this.inertialBetaOffset = 0), Math.abs(this.inertialRadiusOffset) < this.speed * Sr && (this.inertialRadiusOffset = 0); } if (this.inertialPanningX !== 0 || this.inertialPanningY !== 0) { const e = new D(this.inertialPanningX, this.inertialPanningY, this.inertialPanningY); if (this._viewMatrix.invertToRef(this._cameraTransformMatrix), e.multiplyInPlace(this.panningAxis), D.TransformNormalToRef(e, this._cameraTransformMatrix, this._transformedDirection), this.mapPanning) { const t = this.upVector, i = D.CrossToRef(this._transformedDirection, t, this._transformedDirection); D.CrossToRef(t, i, this._transformedDirection); } else this.panningAxis.y || (this._transformedDirection.y = 0); this._targetHost || (this.panningDistanceLimit ? (this._transformedDirection.addInPlace(this._target), D.DistanceSquared(this._transformedDirection, this.panningOriginTarget) <= this.panningDistanceLimit * this.panningDistanceLimit && this._target.copyFrom(this._transformedDirection)) : this._target.addInPlace(this._transformedDirection)), this.inertialPanningX *= this.panningInertia, this.inertialPanningY *= this.panningInertia, Math.abs(this.inertialPanningX) < this.speed * Sr && (this.inertialPanningX = 0), Math.abs(this.inertialPanningY) < this.speed * Sr && (this.inertialPanningY = 0); } this._checkLimits(), super._checkInputs(); } } _checkLimits() { this.lowerBetaLimit === null || this.lowerBetaLimit === void 0 ? this.allowUpsideDown && this.beta > Math.PI && (this.beta = this.beta - 2 * Math.PI) : this.beta < this.lowerBetaLimit && (this.beta = this.lowerBetaLimit), this.upperBetaLimit === null || this.upperBetaLimit === void 0 ? this.allowUpsideDown && this.beta < -Math.PI && (this.beta = this.beta + 2 * Math.PI) : this.beta > this.upperBetaLimit && (this.beta = this.upperBetaLimit), this.lowerAlphaLimit !== null && this.alpha < this.lowerAlphaLimit && (this.alpha = this.lowerAlphaLimit), this.upperAlphaLimit !== null && this.alpha > this.upperAlphaLimit && (this.alpha = this.upperAlphaLimit), this.lowerRadiusLimit !== null && this.radius < this.lowerRadiusLimit && (this.radius = this.lowerRadiusLimit, this.inertialRadiusOffset = 0), this.upperRadiusLimit !== null && this.radius > this.upperRadiusLimit && (this.radius = this.upperRadiusLimit, this.inertialRadiusOffset = 0); } /** * Rebuilds angles (alpha, beta) and radius from the give position and target */ rebuildAnglesAndRadius() { this._position.subtractToRef(this._getTargetPosition(), this._computationVector), (this._upVector.x !== 0 || this._upVector.y !== 1 || this._upVector.z !== 0) && D.TransformCoordinatesToRef(this._computationVector, this._upToYMatrix, this._computationVector), this.radius = this._computationVector.length(), this.radius === 0 && (this.radius = 1e-4); const e = this.alpha; this._computationVector.x === 0 && this._computationVector.z === 0 ? this.alpha = Math.PI / 2 : this.alpha = Math.acos(this._computationVector.x / Math.sqrt(Math.pow(this._computationVector.x, 2) + Math.pow(this._computationVector.z, 2))), this._computationVector.z < 0 && (this.alpha = 2 * Math.PI - this.alpha); const t = Math.round((e - this.alpha) / (2 * Math.PI)); this.alpha += t * 2 * Math.PI, this.beta = Math.acos(this._computationVector.y / this.radius), this._checkLimits(); } /** * Use a position to define the current camera related information like alpha, beta and radius * @param position Defines the position to set the camera at */ setPosition(e) { this._position.equals(e) || (this._position.copyFrom(e), this.rebuildAnglesAndRadius()); } /** * Defines the target the camera should look at. * This will automatically adapt alpha beta and radius to fit within the new target. * Please note that setting a target as a mesh will disable panning. * @param target Defines the new target as a Vector or a mesh * @param toBoundingCenter In case of a mesh target, defines whether to target the mesh position or its bounding information center * @param allowSamePosition If false, prevents reapplying the new computed position if it is identical to the current one (optim) * @param cloneAlphaBetaRadius If true, replicate the current setup (alpha, beta, radius) on the new target */ setTarget(e, t = !1, i = !1, r = !1) { var s; if (r = (s = this.overrideCloneAlphaBetaRadius) !== null && s !== void 0 ? s : r, e.getBoundingInfo) t ? this._targetBoundingCenter = e.getBoundingInfo().boundingBox.centerWorld.clone() : this._targetBoundingCenter = null, e.computeWorldMatrix(), this._targetHost = e, this._target = this._getTargetPosition(), this.onMeshTargetChangedObservable.notifyObservers(this._targetHost); else { const n = e, a = this._getTargetPosition(); if (a && !i && a.equals(n)) return; this._targetHost = null, this._target = n, this._targetBoundingCenter = null, this.onMeshTargetChangedObservable.notifyObservers(null); } r || this.rebuildAnglesAndRadius(); } /** @internal */ _getViewMatrix() { const e = Math.cos(this.alpha), t = Math.sin(this.alpha), i = Math.cos(this.beta); let r = Math.sin(this.beta); r === 0 && (r = 1e-4), this.radius === 0 && (this.radius = 1e-4); const s = this._getTargetPosition(); if (this._computationVector.copyFromFloats(this.radius * e * r, this.radius * i, this.radius * t * r), (this._upVector.x !== 0 || this._upVector.y !== 1 || this._upVector.z !== 0) && D.TransformCoordinatesToRef(this._computationVector, this._yToUpMatrix, this._computationVector), s.addToRef(this._computationVector, this._newPosition), this.getScene().collisionsEnabled && this.checkCollisions) { const n = this.getScene().collisionCoordinator; this._collider || (this._collider = n.createCollider()), this._collider._radius = this.collisionRadius, this._newPosition.subtractToRef(this._position, this._collisionVelocity), this._collisionTriggered = !0, n.getNewPosition(this._position, this._collisionVelocity, this._collider, 3, null, this._onCollisionPositionChange, this.uniqueId); } else { this._position.copyFrom(this._newPosition); let n = this.upVector; this.allowUpsideDown && r < 0 && (n = n.negate()), this._computeViewMatrix(this._position, s, n), this._viewMatrix.addAtIndex(12, this.targetScreenOffset.x), this._viewMatrix.addAtIndex(13, this.targetScreenOffset.y); } return this._currentTarget = s, this._viewMatrix; } /** * Zooms on a mesh to be at the min distance where we could see it fully in the current viewport. * @param meshes Defines the mesh to zoom on * @param doNotUpdateMaxZ Defines whether or not maxZ should be updated whilst zooming on the mesh (this can happen if the mesh is big and the maxradius pretty small for instance) */ zoomOn(e, t = !1) { e = e || this.getScene().meshes; const i = ke.MinMax(e); let r = this._calculateLowerRadiusFromModelBoundingSphere(i.min, i.max); r = Math.max(Math.min(r, this.upperRadiusLimit || Number.MAX_VALUE), this.lowerRadiusLimit || 0), this.radius = r * this.zoomOnFactor, this.focusOn({ min: i.min, max: i.max, distance: r }, t); } /** * Focus on a mesh or a bounding box. This adapts the target and maxRadius if necessary but does not update the current radius. * The target will be changed but the radius * @param meshesOrMinMaxVectorAndDistance Defines the mesh or bounding info to focus on * @param doNotUpdateMaxZ Defines whether or not maxZ should be updated whilst zooming on the mesh (this can happen if the mesh is big and the maxradius pretty small for instance) */ focusOn(e, t = !1) { let i, r; if (e.min === void 0) { const s = e || this.getScene().meshes; i = ke.MinMax(s), r = D.Distance(i.min, i.max); } else { const s = e; i = s, r = s.distance; } this._target = ke.Center(i), t || (this.maxZ = r * 2); } /** * @override * Override Camera.createRigCamera */ createRigCamera(e, t) { let i = 0; switch (this.cameraRigMode) { case Ai.RIG_MODE_STEREOSCOPIC_ANAGLYPH: case Ai.RIG_MODE_STEREOSCOPIC_SIDEBYSIDE_PARALLEL: case Ai.RIG_MODE_STEREOSCOPIC_OVERUNDER: case Ai.RIG_MODE_STEREOSCOPIC_INTERLACED: case Ai.RIG_MODE_VR: i = this._cameraRigParams.stereoHalfAngle * (t === 0 ? 1 : -1); break; case Ai.RIG_MODE_STEREOSCOPIC_SIDEBYSIDE_CROSSEYED: i = this._cameraRigParams.stereoHalfAngle * (t === 0 ? -1 : 1); break; } const r = new Pn(e, this.alpha + i, this.beta, this.radius, this._target, this.getScene()); return r._cameraRigParams = {}, r.isRigCamera = !0, r.rigParent = this, r.upVector = this.upVector, r.mode = this.mode, r.orthoLeft = this.orthoLeft, r.orthoRight = this.orthoRight, r.orthoBottom = this.orthoBottom, r.orthoTop = this.orthoTop, r; } /** * @internal * @override * Override Camera._updateRigCameras */ _updateRigCameras() { const e = this._rigCameras[0], t = this._rigCameras[1]; switch (e.beta = t.beta = this.beta, this.cameraRigMode) { case Ai.RIG_MODE_STEREOSCOPIC_ANAGLYPH: case Ai.RIG_MODE_STEREOSCOPIC_SIDEBYSIDE_PARALLEL: case Ai.RIG_MODE_STEREOSCOPIC_OVERUNDER: case Ai.RIG_MODE_STEREOSCOPIC_INTERLACED: case Ai.RIG_MODE_VR: e.alpha = this.alpha - this._cameraRigParams.stereoHalfAngle, t.alpha = this.alpha + this._cameraRigParams.stereoHalfAngle; break; case Ai.RIG_MODE_STEREOSCOPIC_SIDEBYSIDE_CROSSEYED: e.alpha = this.alpha + this._cameraRigParams.stereoHalfAngle, t.alpha = this.alpha - this._cameraRigParams.stereoHalfAngle; break; } super._updateRigCameras(); } /** * @internal */ _calculateLowerRadiusFromModelBoundingSphere(e, t, i = 1) { const r = D.Distance(e, t), n = this.getScene().getEngine().getAspectRatio(this), a = Math.tan(this.fov / 2), l = a * n, u = r * 0.5 * i, h = u * Math.sqrt(1 + 1 / (l * l)), d = u * Math.sqrt(1 + 1 / (a * a)); return Math.max(h, d); } /** * Destroy the camera and release the current resources hold by it. */ dispose() { this.inputs.clear(), super.dispose(); } /** * Gets the current object class name. * @returns the class name */ getClassName() { return "ArcRotateCamera"; } } F([ W() ], Pn.prototype, "alpha", void 0); F([ W() ], Pn.prototype, "beta", void 0); F([ W() ], Pn.prototype, "radius", void 0); F([ W() ], Pn.prototype, "overrideCloneAlphaBetaRadius", void 0); F([ oo("target") ], Pn.prototype, "_target", void 0); F([ hw("targetHost") ], Pn.prototype, "_targetHost", void 0); F([ W() ], Pn.prototype, "inertialAlphaOffset", void 0); F([ W() ], Pn.prototype, "inertialBetaOffset", void 0); F([ W() ], Pn.prototype, "inertialRadiusOffset", void 0); F([ W() ], Pn.prototype, "lowerAlphaLimit", void 0); F([ W() ], Pn.prototype, "upperAlphaLimit", void 0); F([ W() ], Pn.prototype, "lowerBetaLimit", void 0); F([ W() ], Pn.prototype, "upperBetaLimit", void 0); F([ W() ], Pn.prototype, "lowerRadiusLimit", void 0); F([ W() ], Pn.prototype, "upperRadiusLimit", void 0); F([ W() ], Pn.prototype, "inertialPanningX", void 0); F([ W() ], Pn.prototype, "inertialPanningY", void 0); F([ W() ], Pn.prototype, "pinchToPanMaxDistance", void 0); F([ W() ], Pn.prototype, "panningDistanceLimit", void 0); F([ oo() ], Pn.prototype, "panningOriginTarget", void 0); F([ W() ], Pn.prototype, "panningInertia", void 0); F([ W() ], Pn.prototype, "zoomToMouseLocation", null); F([ W() ], Pn.prototype, "zoomOnFactor", void 0); F([ PL() ], Pn.prototype, "targetScreenOffset", void 0); F([ W() ], Pn.prototype, "allowUpsideDown", void 0); F([ W() ], Pn.prototype, "useInputToRestoreState", void 0); In.AddNodeConstructor("DeviceOrientationCamera", (c, e) => () => new eU(c, D.Zero(), e)); class eU extends du { /** * Creates a new device orientation camera * @param name The name of the camera * @param position The start position camera * @param scene The scene the camera belongs to */ constructor(e, t, i) { super(e, t, i), this._tmpDragQuaternion = new Ze(), this._disablePointerInputWhenUsingDeviceOrientation = !0, this._dragFactor = 0, this._quaternionCache = new Ze(), this.inputs.addDeviceOrientation(), this.inputs._deviceOrientationInput && this.inputs._deviceOrientationInput._onDeviceOrientationChangedObservable.addOnce(() => { this._disablePointerInputWhenUsingDeviceOrientation && this.inputs._mouseInput && (this.inputs._mouseInput._allowCameraRotation = !1, this.inputs._mouseInput.onPointerMovedObservable.add((r) => { this._dragFactor != 0 && (this._initialQuaternion || (this._initialQuaternion = new Ze()), Ze.FromEulerAnglesToRef(0, r.offsetX * this._dragFactor, 0, this._tmpDragQuaternion), this._initialQuaternion.multiplyToRef(this._tmpDragQuaternion, this._initialQuaternion)); })); }); } /** * Gets or sets a boolean indicating that pointer input must be disabled on first orientation sensor update (Default: true) */ get disablePointerInputWhenUsingDeviceOrientation() { return this._disablePointerInputWhenUsingDeviceOrientation; } set disablePointerInputWhenUsingDeviceOrientation(e) { this._disablePointerInputWhenUsingDeviceOrientation = e; } /** * Enabled turning on the y axis when the orientation sensor is active * @param dragFactor the factor that controls the turn speed (default: 1/300) */ enableHorizontalDragging(e = 1 / 300) { this._dragFactor = e; } /** * Gets the current instance class name ("DeviceOrientationCamera"). * This helps avoiding instanceof at run time. * @returns the class name */ getClassName() { return "DeviceOrientationCamera"; } /** * @internal * Checks and applies the current values of the inputs to the camera. (Internal use only) */ _checkInputs() { super._checkInputs(), this._quaternionCache.copyFrom(this.rotationQuaternion), this._initialQuaternion && this._initialQuaternion.multiplyToRef(this.rotationQuaternion, this.rotationQuaternion); } /** * Reset the camera to its default orientation on the specified axis only. * @param axis The axis to reset */ resetToCurrentRotation(e = bl.Y) { this.rotationQuaternion && (this._initialQuaternion || (this._initialQuaternion = new Ze()), this._initialQuaternion.copyFrom(this._quaternionCache || this.rotationQuaternion), ["x", "y", "z"].forEach((t) => { e[t] ? this._initialQuaternion[t] *= -1 : this._initialQuaternion[t] = 0; }), this._initialQuaternion.normalize(), this._initialQuaternion.multiplyToRef(this.rotationQuaternion, this.rotationQuaternion)); } } class Xte extends WL { /** * Instantiates a new FlyCameraInputsManager. * @param camera Defines the camera the inputs belong to. */ constructor(e) { super(e); } /** * Add keyboard input support to the input manager. * @returns the new FlyCameraKeyboardMoveInput(). */ addKeyboard() { return this.add(new TT()), this; } /** * Add mouse input support to the input manager. * @returns the new FlyCameraMouseInput(). */ addMouse() { return this.add(new XL()), this; } } class qL extends Cl { /** * Gets the input sensibility for mouse input. * Higher values reduce sensitivity. */ get angularSensibility() { const e = this.inputs.attached.mouse; return e ? e.angularSensibility : 0; } /** * Sets the input sensibility for a mouse input. * Higher values reduce sensitivity. */ set angularSensibility(e) { const t = this.inputs.attached.mouse; t && (t.angularSensibility = e); } /** * Get the keys for camera movement forward. */ get keysForward() { const e = this.inputs.attached.keyboard; return e ? e.keysForward : []; } /** * Set the keys for camera movement forward. */ set keysForward(e) { const t = this.inputs.attached.keyboard; t && (t.keysForward = e); } /** * Get the keys for camera movement backward. */ get keysBackward() { const e = this.inputs.attached.keyboard; return e ? e.keysBackward : []; } set keysBackward(e) { const t = this.inputs.attached.keyboard; t && (t.keysBackward = e); } /** * Get the keys for camera movement up. */ get keysUp() { const e = this.inputs.attached.keyboard; return e ? e.keysUp : []; } /** * Set the keys for camera movement up. */ set keysUp(e) { const t = this.inputs.attached.keyboard; t && (t.keysUp = e); } /** * Get the keys for camera movement down. */ get keysDown() { const e = this.inputs.attached.keyboard; return e ? e.keysDown : []; } /** * Set the keys for camera movement down. */ set keysDown(e) { const t = this.inputs.attached.keyboard; t && (t.keysDown = e); } /** * Get the keys for camera movement left. */ get keysLeft() { const e = this.inputs.attached.keyboard; return e ? e.keysLeft : []; } /** * Set the keys for camera movement left. */ set keysLeft(e) { const t = this.inputs.attached.keyboard; t && (t.keysLeft = e); } /** * Set the keys for camera movement right. */ get keysRight() { const e = this.inputs.attached.keyboard; return e ? e.keysRight : []; } /** * Set the keys for camera movement right. */ set keysRight(e) { const t = this.inputs.attached.keyboard; t && (t.keysRight = e); } /** * Instantiates a FlyCamera. * This is a flying camera, designed for 3D movement and rotation in all directions, * such as in a 3D Space Shooter or a Flight Simulator. * @param name Define the name of the camera in the scene. * @param position Define the starting position of the camera in the scene. * @param scene Define the scene the camera belongs to. * @param setActiveOnSceneIfNoneActive Defines whether the camera should be marked as active, if no other camera has been defined as active. */ constructor(e, t, i, r = !0) { super(e, t, i, r), this.ellipsoid = new D(1, 1, 1), this.ellipsoidOffset = new D(0, 0, 0), this.checkCollisions = !1, this.applyGravity = !1, this.cameraDirection = D.Zero(), this._trackRoll = 0, this.rollCorrect = 100, this.bankedTurn = !1, this.bankedTurnLimit = Math.PI / 2, this.bankedTurnMultiplier = 1, this._needMoveForGravity = !1, this._oldPosition = D.Zero(), this._diffPosition = D.Zero(), this._newPosition = D.Zero(), this._collisionMask = -1, this._onCollisionPositionChange = (s, n, a = null) => { ((o) => { this._newPosition.copyFrom(o), this._newPosition.subtractToRef(this._oldPosition, this._diffPosition), this._diffPosition.length() > $e.CollisionsEpsilon && (this.position.addInPlace(this._diffPosition), this.onCollide && a && this.onCollide(a)); })(n); }, this.inputs = new Xte(this), this.inputs.addKeyboard().addMouse(); } /** * Attached controls to the current camera. * @param ignored defines an ignored parameter kept for backward compatibility. * @param noPreventDefault Defines whether event caught by the controls should call preventdefault() (https://developer.mozilla.org/en-US/docs/Web/API/Event/preventDefault) */ attachControl(e, t) { t = Ve.BackCompatCameraNoPreventDefault(arguments), this.inputs.attachElement(t); } /** * Detach a control from the HTML DOM element. * The camera will stop reacting to that input. */ detachControl() { this.inputs.detachElement(), this.cameraDirection = new D(0, 0, 0); } /** * Get the mask that the camera ignores in collision events. */ get collisionMask() { return this._collisionMask; } /** * Set the mask that the camera ignores in collision events. */ set collisionMask(e) { this._collisionMask = isNaN(e) ? -1 : e; } /** * @internal */ _collideWithWorld(e) { let t; this.parent ? t = D.TransformCoordinates(this.position, this.parent.getWorldMatrix()) : t = this.position, t.subtractFromFloatsToRef(0, this.ellipsoid.y, 0, this._oldPosition), this._oldPosition.addInPlace(this.ellipsoidOffset); const i = this.getScene().collisionCoordinator; this._collider || (this._collider = i.createCollider()), this._collider._radius = this.ellipsoid, this._collider.collisionMask = this._collisionMask; let r = e; this.applyGravity && (r = e.add(this.getScene().gravity)), i.getNewPosition(this._oldPosition, r, this._collider, 3, null, this._onCollisionPositionChange, this.uniqueId); } /** @internal */ _checkInputs() { this._localDirection || (this._localDirection = D.Zero(), this._transformedDirection = D.Zero()), this.inputs.checkInputs(), super._checkInputs(); } /** * Enable movement without a user input. This allows gravity to always be applied. */ set needMoveForGravity(e) { this._needMoveForGravity = e; } /** * When true, gravity is applied whether there is user input or not. */ get needMoveForGravity() { return this._needMoveForGravity; } /** @internal */ _decideIfNeedsToMove() { return this._needMoveForGravity || Math.abs(this.cameraDirection.x) > 0 || Math.abs(this.cameraDirection.y) > 0 || Math.abs(this.cameraDirection.z) > 0; } /** @internal */ _updatePosition() { this.checkCollisions && this.getScene().collisionsEnabled ? this._collideWithWorld(this.cameraDirection) : super._updatePosition(); } /** * Restore the Roll to its target value at the rate specified. * @param rate - Higher means slower restoring. * @internal */ restoreRoll(e) { const t = this._trackRoll, i = this.rotation.z, r = t - i, s = 1e-3; Math.abs(r) >= s && (this.rotation.z += r / e, Math.abs(t - this.rotation.z) <= s && (this.rotation.z = t)); } /** * Destroy the camera and release the current resources held by it. */ dispose() { this.inputs.clear(), super.dispose(); } /** * Get the current object class name. * @returns the class name. */ getClassName() { return "FlyCamera"; } } F([ oo() ], qL.prototype, "ellipsoid", void 0); F([ oo() ], qL.prototype, "ellipsoidOffset", void 0); F([ W() ], qL.prototype, "checkCollisions", void 0); F([ W() ], qL.prototype, "applyGravity", void 0); class Yte extends WL { /** * Instantiates a new FollowCameraInputsManager. * @param camera Defines the camera the inputs belong to */ constructor(e) { super(e); } /** * Add keyboard input support to the input manager. * @returns the current input manager */ addKeyboard() { return this.add(new rd()), this; } /** * Add mouse wheel input support to the input manager. * @returns the current input manager */ addMouseWheel() { return this.add(new C5()), this; } /** * Add pointers input support to the input manager. * @returns the current input manager */ addPointers() { return this.add(new F1()), this; } /** * Add orientation input support to the input manager. * @returns the current input manager */ addVRDeviceOrientation() { return Ce.Warn("DeviceOrientation support not yet implemented for FollowCamera."), this; } } In.AddNodeConstructor("FollowCamera", (c, e) => () => new Fp(c, D.Zero(), e)); In.AddNodeConstructor("ArcFollowCamera", (c, e) => () => new Qte(c, 0, 0, 1, null, e)); class Fp extends Cl { /** * Instantiates the follow camera. * @see https://doc.babylonjs.com/features/featuresDeepDive/cameras/camera_introduction#followcamera * @param name Define the name of the camera in the scene * @param position Define the position of the camera * @param scene Define the scene the camera belong to * @param lockedTarget Define the target of the camera */ constructor(e, t, i, r = null) { super(e, t, i), this.radius = 12, this.lowerRadiusLimit = null, this.upperRadiusLimit = null, this.rotationOffset = 0, this.lowerRotationOffsetLimit = null, this.upperRotationOffsetLimit = null, this.heightOffset = 4, this.lowerHeightOffsetLimit = null, this.upperHeightOffsetLimit = null, this.cameraAcceleration = 0.05, this.maxCameraSpeed = 20, this.lockedTarget = r, this.inputs = new Yte(this), this.inputs.addKeyboard().addMouseWheel().addPointers(); } _follow(e) { if (!e) return; const t = de.Matrix[0]; e.absoluteRotationQuaternion.toRotationMatrix(t); const i = Math.atan2(t.m[8], t.m[10]), r = Ve.ToRadians(this.rotationOffset) + i, s = e.getAbsolutePosition(), n = s.x + Math.sin(r) * this.radius, a = s.z + Math.cos(r) * this.radius, l = n - this.position.x, o = s.y + this.heightOffset - this.position.y, u = a - this.position.z; let h = l * this.cameraAcceleration * 2, d = o * this.cameraAcceleration, f = u * this.cameraAcceleration * 2; (h > this.maxCameraSpeed || h < -this.maxCameraSpeed) && (h = h < 1 ? -this.maxCameraSpeed : this.maxCameraSpeed), (d > this.maxCameraSpeed || d < -this.maxCameraSpeed) && (d = d < 1 ? -this.maxCameraSpeed : this.maxCameraSpeed), (f > this.maxCameraSpeed || f < -this.maxCameraSpeed) && (f = f < 1 ? -this.maxCameraSpeed : this.maxCameraSpeed), this.position = new D(this.position.x + h, this.position.y + d, this.position.z + f), this.setTarget(s); } /** * Attached controls to the current camera. * @param ignored defines an ignored parameter kept for backward compatibility. * @param noPreventDefault Defines whether event caught by the controls should call preventdefault() (https://developer.mozilla.org/en-US/docs/Web/API/Event/preventDefault) */ attachControl(e, t) { t = Ve.BackCompatCameraNoPreventDefault(arguments), this.inputs.attachElement(t), this._reset = () => { }; } /** * Detach the current controls from the specified dom element. */ detachControl() { this.inputs.detachElement(), this._reset && this._reset(); } /** @internal */ _checkInputs() { this.inputs.checkInputs(), this._checkLimits(), super._checkInputs(), this.lockedTarget && this._follow(this.lockedTarget); } _checkLimits() { this.lowerRadiusLimit !== null && this.radius < this.lowerRadiusLimit && (this.radius = this.lowerRadiusLimit), this.upperRadiusLimit !== null && this.radius > this.upperRadiusLimit && (this.radius = this.upperRadiusLimit), this.lowerHeightOffsetLimit !== null && this.heightOffset < this.lowerHeightOffsetLimit && (this.heightOffset = this.lowerHeightOffsetLimit), this.upperHeightOffsetLimit !== null && this.heightOffset > this.upperHeightOffsetLimit && (this.heightOffset = this.upperHeightOffsetLimit), this.lowerRotationOffsetLimit !== null && this.rotationOffset < this.lowerRotationOffsetLimit && (this.rotationOffset = this.lowerRotationOffsetLimit), this.upperRotationOffsetLimit !== null && this.rotationOffset > this.upperRotationOffsetLimit && (this.rotationOffset = this.upperRotationOffsetLimit); } /** * Gets the camera class name. * @returns the class name */ getClassName() { return "FollowCamera"; } } F([ W() ], Fp.prototype, "radius", void 0); F([ W() ], Fp.prototype, "lowerRadiusLimit", void 0); F([ W() ], Fp.prototype, "upperRadiusLimit", void 0); F([ W() ], Fp.prototype, "rotationOffset", void 0); F([ W() ], Fp.prototype, "lowerRotationOffsetLimit", void 0); F([ W() ], Fp.prototype, "upperRotationOffsetLimit", void 0); F([ W() ], Fp.prototype, "heightOffset", void 0); F([ W() ], Fp.prototype, "lowerHeightOffsetLimit", void 0); F([ W() ], Fp.prototype, "upperHeightOffsetLimit", void 0); F([ W() ], Fp.prototype, "cameraAcceleration", void 0); F([ W() ], Fp.prototype, "maxCameraSpeed", void 0); F([ hw("lockedTargetId") ], Fp.prototype, "lockedTarget", void 0); class Qte extends Cl { /** * Instantiates a new ArcFollowCamera * @see https://doc.babylonjs.com/features/featuresDeepDive/cameras/camera_introduction#followcamera * @param name Define the name of the camera * @param alpha Define the rotation angle of the camera around the longitudinal axis * @param beta Define the rotation angle of the camera around the elevation axis * @param radius Define the radius of the camera from its target point * @param target Define the target of the camera * @param scene Define the scene the camera belongs to */ constructor(e, t, i, r, s, n) { super(e, D.Zero(), n), this.alpha = t, this.beta = i, this.radius = r, this._cartesianCoordinates = D.Zero(), this.setMeshTarget(s); } /** * Sets the mesh to follow with this camera. * @param target the target to follow */ setMeshTarget(e) { this._meshTarget = e, this._follow(); } _follow() { if (!this._meshTarget) return; this._cartesianCoordinates.x = this.radius * Math.cos(this.alpha) * Math.cos(this.beta), this._cartesianCoordinates.y = this.radius * Math.sin(this.beta), this._cartesianCoordinates.z = this.radius * Math.sin(this.alpha) * Math.cos(this.beta); const e = this._meshTarget.getAbsolutePosition(); this.position = e.add(this._cartesianCoordinates), this.setTarget(e); } /** @internal */ _checkInputs() { super._checkInputs(), this._follow(); } /** * Returns the class name of the object. * It is mostly used internally for serialization purposes. */ getClassName() { return "ArcFollowCamera"; } } var F_; (function(c) { c[c.A = 0] = "A", c[c.B = 1] = "B", c[c.X = 2] = "X", c[c.Y = 3] = "Y", c[c.LB = 4] = "LB", c[c.RB = 5] = "RB", c[c.Back = 8] = "Back", c[c.Start = 9] = "Start", c[c.LeftStick = 10] = "LeftStick", c[c.RightStick = 11] = "RightStick"; })(F_ || (F_ = {})); var KR; (function(c) { c[c.Up = 12] = "Up", c[c.Down = 13] = "Down", c[c.Left = 14] = "Left", c[c.Right = 15] = "Right"; })(KR || (KR = {})); class $te extends zu { /** * Creates a new XBox360 gamepad object * @param id defines the id of this gamepad * @param index defines its index * @param gamepad defines the internal HTML gamepad object * @param xboxOne defines if it is a XBox One gamepad */ constructor(e, t, i, r = !1) { super(e, t, i, 0, 1, 2, 3), this._leftTrigger = 0, this._rightTrigger = 0, this.onButtonDownObservable = new Fe(), this.onButtonUpObservable = new Fe(), this.onPadDownObservable = new Fe(), this.onPadUpObservable = new Fe(), this._buttonA = 0, this._buttonB = 0, this._buttonX = 0, this._buttonY = 0, this._buttonBack = 0, this._buttonStart = 0, this._buttonLB = 0, this._buttonRB = 0, this._buttonLeftStick = 0, this._buttonRightStick = 0, this._dPadUp = 0, this._dPadDown = 0, this._dPadLeft = 0, this._dPadRight = 0, this._isXboxOnePad = !1, this.type = zu.XBOX, this._isXboxOnePad = r; } /** * Defines the callback to call when left trigger is pressed * @param callback defines the callback to use */ onlefttriggerchanged(e) { this._onlefttriggerchanged = e; } /** * Defines the callback to call when right trigger is pressed * @param callback defines the callback to use */ onrighttriggerchanged(e) { this._onrighttriggerchanged = e; } /** * Gets the left trigger value */ get leftTrigger() { return this._leftTrigger; } /** * Sets the left trigger value */ set leftTrigger(e) { this._onlefttriggerchanged && this._leftTrigger !== e && this._onlefttriggerchanged(e), this._leftTrigger = e; } /** * Gets the right trigger value */ get rightTrigger() { return this._rightTrigger; } /** * Sets the right trigger value */ set rightTrigger(e) { this._onrighttriggerchanged && this._rightTrigger !== e && this._onrighttriggerchanged(e), this._rightTrigger = e; } /** * Defines the callback to call when a button is pressed * @param callback defines the callback to use */ onbuttondown(e) { this._onbuttondown = e; } /** * Defines the callback to call when a button is released * @param callback defines the callback to use */ onbuttonup(e) { this._onbuttonup = e; } /** * Defines the callback to call when a pad is pressed * @param callback defines the callback to use */ ondpaddown(e) { this._ondpaddown = e; } /** * Defines the callback to call when a pad is released * @param callback defines the callback to use */ ondpadup(e) { this._ondpadup = e; } _setButtonValue(e, t, i) { return e !== t && (e === 1 && (this._onbuttondown && this._onbuttondown(i), this.onButtonDownObservable.notifyObservers(i)), e === 0 && (this._onbuttonup && this._onbuttonup(i), this.onButtonUpObservable.notifyObservers(i))), e; } _setDPadValue(e, t, i) { return e !== t && (e === 1 && (this._ondpaddown && this._ondpaddown(i), this.onPadDownObservable.notifyObservers(i)), e === 0 && (this._ondpadup && this._ondpadup(i), this.onPadUpObservable.notifyObservers(i))), e; } /** * Gets the value of the `A` button */ get buttonA() { return this._buttonA; } /** * Sets the value of the `A` button */ set buttonA(e) { this._buttonA = this._setButtonValue(e, this._buttonA, F_.A); } /** * Gets the value of the `B` button */ get buttonB() { return this._buttonB; } /** * Sets the value of the `B` button */ set buttonB(e) { this._buttonB = this._setButtonValue(e, this._buttonB, F_.B); } /** * Gets the value of the `X` button */ get buttonX() { return this._buttonX; } /** * Sets the value of the `X` button */ set buttonX(e) { this._buttonX = this._setButtonValue(e, this._buttonX, F_.X); } /** * Gets the value of the `Y` button */ get buttonY() { return this._buttonY; } /** * Sets the value of the `Y` button */ set buttonY(e) { this._buttonY = this._setButtonValue(e, this._buttonY, F_.Y); } /** * Gets the value of the `Start` button */ get buttonStart() { return this._buttonStart; } /** * Sets the value of the `Start` button */ set buttonStart(e) { this._buttonStart = this._setButtonValue(e, this._buttonStart, F_.Start); } /** * Gets the value of the `Back` button */ get buttonBack() { return this._buttonBack; } /** * Sets the value of the `Back` button */ set buttonBack(e) { this._buttonBack = this._setButtonValue(e, this._buttonBack, F_.Back); } /** * Gets the value of the `Left` button */ get buttonLB() { return this._buttonLB; } /** * Sets the value of the `Left` button */ set buttonLB(e) { this._buttonLB = this._setButtonValue(e, this._buttonLB, F_.LB); } /** * Gets the value of the `Right` button */ get buttonRB() { return this._buttonRB; } /** * Sets the value of the `Right` button */ set buttonRB(e) { this._buttonRB = this._setButtonValue(e, this._buttonRB, F_.RB); } /** * Gets the value of the Left joystick */ get buttonLeftStick() { return this._buttonLeftStick; } /** * Sets the value of the Left joystick */ set buttonLeftStick(e) { this._buttonLeftStick = this._setButtonValue(e, this._buttonLeftStick, F_.LeftStick); } /** * Gets the value of the Right joystick */ get buttonRightStick() { return this._buttonRightStick; } /** * Sets the value of the Right joystick */ set buttonRightStick(e) { this._buttonRightStick = this._setButtonValue(e, this._buttonRightStick, F_.RightStick); } /** * Gets the value of D-pad up */ get dPadUp() { return this._dPadUp; } /** * Sets the value of D-pad up */ set dPadUp(e) { this._dPadUp = this._setDPadValue(e, this._dPadUp, KR.Up); } /** * Gets the value of D-pad down */ get dPadDown() { return this._dPadDown; } /** * Sets the value of D-pad down */ set dPadDown(e) { this._dPadDown = this._setDPadValue(e, this._dPadDown, KR.Down); } /** * Gets the value of D-pad left */ get dPadLeft() { return this._dPadLeft; } /** * Sets the value of D-pad left */ set dPadLeft(e) { this._dPadLeft = this._setDPadValue(e, this._dPadLeft, KR.Left); } /** * Gets the value of D-pad right */ get dPadRight() { return this._dPadRight; } /** * Sets the value of D-pad right */ set dPadRight(e) { this._dPadRight = this._setDPadValue(e, this._dPadRight, KR.Right); } /** * Force the gamepad to synchronize with device values */ update() { super.update(), this._isXboxOnePad ? (this.buttonA = this.browserGamepad.buttons[0].value, this.buttonB = this.browserGamepad.buttons[1].value, this.buttonX = this.browserGamepad.buttons[2].value, this.buttonY = this.browserGamepad.buttons[3].value, this.buttonLB = this.browserGamepad.buttons[4].value, this.buttonRB = this.browserGamepad.buttons[5].value, this.leftTrigger = this.browserGamepad.buttons[6].value, this.rightTrigger = this.browserGamepad.buttons[7].value, this.buttonBack = this.browserGamepad.buttons[8].value, this.buttonStart = this.browserGamepad.buttons[9].value, this.buttonLeftStick = this.browserGamepad.buttons[10].value, this.buttonRightStick = this.browserGamepad.buttons[11].value, this.dPadUp = this.browserGamepad.buttons[12].value, this.dPadDown = this.browserGamepad.buttons[13].value, this.dPadLeft = this.browserGamepad.buttons[14].value, this.dPadRight = this.browserGamepad.buttons[15].value) : (this.buttonA = this.browserGamepad.buttons[0].value, this.buttonB = this.browserGamepad.buttons[1].value, this.buttonX = this.browserGamepad.buttons[2].value, this.buttonY = this.browserGamepad.buttons[3].value, this.buttonLB = this.browserGamepad.buttons[4].value, this.buttonRB = this.browserGamepad.buttons[5].value, this.leftTrigger = this.browserGamepad.buttons[6].value, this.rightTrigger = this.browserGamepad.buttons[7].value, this.buttonBack = this.browserGamepad.buttons[8].value, this.buttonStart = this.browserGamepad.buttons[9].value, this.buttonLeftStick = this.browserGamepad.buttons[10].value, this.buttonRightStick = this.browserGamepad.buttons[11].value, this.dPadUp = this.browserGamepad.buttons[12].value, this.dPadDown = this.browserGamepad.buttons[13].value, this.dPadLeft = this.browserGamepad.buttons[14].value, this.dPadRight = this.browserGamepad.buttons[15].value); } /** * Disposes the gamepad */ dispose() { super.dispose(), this.onButtonDownObservable.clear(), this.onButtonUpObservable.clear(), this.onPadDownObservable.clear(), this.onPadUpObservable.clear(); } } var lg; (function(c) { c[c.Cross = 0] = "Cross", c[c.Circle = 1] = "Circle", c[c.Square = 2] = "Square", c[c.Triangle = 3] = "Triangle", c[c.L1 = 4] = "L1", c[c.R1 = 5] = "R1", c[c.Share = 8] = "Share", c[c.Options = 9] = "Options", c[c.LeftStick = 10] = "LeftStick", c[c.RightStick = 11] = "RightStick"; })(lg || (lg = {})); var WR; (function(c) { c[c.Up = 12] = "Up", c[c.Down = 13] = "Down", c[c.Left = 14] = "Left", c[c.Right = 15] = "Right"; })(WR || (WR = {})); class Zte extends zu { /** * Creates a new DualShock gamepad object * @param id defines the id of this gamepad * @param index defines its index * @param gamepad defines the internal HTML gamepad object */ constructor(e, t, i) { super(e.replace("STANDARD GAMEPAD", "SONY PLAYSTATION DUALSHOCK"), t, i, 0, 1, 2, 3), this._leftTrigger = 0, this._rightTrigger = 0, this.onButtonDownObservable = new Fe(), this.onButtonUpObservable = new Fe(), this.onPadDownObservable = new Fe(), this.onPadUpObservable = new Fe(), this._buttonCross = 0, this._buttonCircle = 0, this._buttonSquare = 0, this._buttonTriangle = 0, this._buttonShare = 0, this._buttonOptions = 0, this._buttonL1 = 0, this._buttonR1 = 0, this._buttonLeftStick = 0, this._buttonRightStick = 0, this._dPadUp = 0, this._dPadDown = 0, this._dPadLeft = 0, this._dPadRight = 0, this.type = zu.DUALSHOCK; } /** * Defines the callback to call when left trigger is pressed * @param callback defines the callback to use */ onlefttriggerchanged(e) { this._onlefttriggerchanged = e; } /** * Defines the callback to call when right trigger is pressed * @param callback defines the callback to use */ onrighttriggerchanged(e) { this._onrighttriggerchanged = e; } /** * Gets the left trigger value */ get leftTrigger() { return this._leftTrigger; } /** * Sets the left trigger value */ set leftTrigger(e) { this._onlefttriggerchanged && this._leftTrigger !== e && this._onlefttriggerchanged(e), this._leftTrigger = e; } /** * Gets the right trigger value */ get rightTrigger() { return this._rightTrigger; } /** * Sets the right trigger value */ set rightTrigger(e) { this._onrighttriggerchanged && this._rightTrigger !== e && this._onrighttriggerchanged(e), this._rightTrigger = e; } /** * Defines the callback to call when a button is pressed * @param callback defines the callback to use */ onbuttondown(e) { this._onbuttondown = e; } /** * Defines the callback to call when a button is released * @param callback defines the callback to use */ onbuttonup(e) { this._onbuttonup = e; } /** * Defines the callback to call when a pad is pressed * @param callback defines the callback to use */ ondpaddown(e) { this._ondpaddown = e; } /** * Defines the callback to call when a pad is released * @param callback defines the callback to use */ ondpadup(e) { this._ondpadup = e; } _setButtonValue(e, t, i) { return e !== t && (e === 1 && (this._onbuttondown && this._onbuttondown(i), this.onButtonDownObservable.notifyObservers(i)), e === 0 && (this._onbuttonup && this._onbuttonup(i), this.onButtonUpObservable.notifyObservers(i))), e; } _setDPadValue(e, t, i) { return e !== t && (e === 1 && (this._ondpaddown && this._ondpaddown(i), this.onPadDownObservable.notifyObservers(i)), e === 0 && (this._ondpadup && this._ondpadup(i), this.onPadUpObservable.notifyObservers(i))), e; } /** * Gets the value of the `Cross` button */ get buttonCross() { return this._buttonCross; } /** * Sets the value of the `Cross` button */ set buttonCross(e) { this._buttonCross = this._setButtonValue(e, this._buttonCross, lg.Cross); } /** * Gets the value of the `Circle` button */ get buttonCircle() { return this._buttonCircle; } /** * Sets the value of the `Circle` button */ set buttonCircle(e) { this._buttonCircle = this._setButtonValue(e, this._buttonCircle, lg.Circle); } /** * Gets the value of the `Square` button */ get buttonSquare() { return this._buttonSquare; } /** * Sets the value of the `Square` button */ set buttonSquare(e) { this._buttonSquare = this._setButtonValue(e, this._buttonSquare, lg.Square); } /** * Gets the value of the `Triangle` button */ get buttonTriangle() { return this._buttonTriangle; } /** * Sets the value of the `Triangle` button */ set buttonTriangle(e) { this._buttonTriangle = this._setButtonValue(e, this._buttonTriangle, lg.Triangle); } /** * Gets the value of the `Options` button */ get buttonOptions() { return this._buttonOptions; } /** * Sets the value of the `Options` button */ set buttonOptions(e) { this._buttonOptions = this._setButtonValue(e, this._buttonOptions, lg.Options); } /** * Gets the value of the `Share` button */ get buttonShare() { return this._buttonShare; } /** * Sets the value of the `Share` button */ set buttonShare(e) { this._buttonShare = this._setButtonValue(e, this._buttonShare, lg.Share); } /** * Gets the value of the `L1` button */ get buttonL1() { return this._buttonL1; } /** * Sets the value of the `L1` button */ set buttonL1(e) { this._buttonL1 = this._setButtonValue(e, this._buttonL1, lg.L1); } /** * Gets the value of the `R1` button */ get buttonR1() { return this._buttonR1; } /** * Sets the value of the `R1` button */ set buttonR1(e) { this._buttonR1 = this._setButtonValue(e, this._buttonR1, lg.R1); } /** * Gets the value of the Left joystick */ get buttonLeftStick() { return this._buttonLeftStick; } /** * Sets the value of the Left joystick */ set buttonLeftStick(e) { this._buttonLeftStick = this._setButtonValue(e, this._buttonLeftStick, lg.LeftStick); } /** * Gets the value of the Right joystick */ get buttonRightStick() { return this._buttonRightStick; } /** * Sets the value of the Right joystick */ set buttonRightStick(e) { this._buttonRightStick = this._setButtonValue(e, this._buttonRightStick, lg.RightStick); } /** * Gets the value of D-pad up */ get dPadUp() { return this._dPadUp; } /** * Sets the value of D-pad up */ set dPadUp(e) { this._dPadUp = this._setDPadValue(e, this._dPadUp, WR.Up); } /** * Gets the value of D-pad down */ get dPadDown() { return this._dPadDown; } /** * Sets the value of D-pad down */ set dPadDown(e) { this._dPadDown = this._setDPadValue(e, this._dPadDown, WR.Down); } /** * Gets the value of D-pad left */ get dPadLeft() { return this._dPadLeft; } /** * Sets the value of D-pad left */ set dPadLeft(e) { this._dPadLeft = this._setDPadValue(e, this._dPadLeft, WR.Left); } /** * Gets the value of D-pad right */ get dPadRight() { return this._dPadRight; } /** * Sets the value of D-pad right */ set dPadRight(e) { this._dPadRight = this._setDPadValue(e, this._dPadRight, WR.Right); } /** * Force the gamepad to synchronize with device values */ update() { super.update(), this.buttonCross = this.browserGamepad.buttons[0].value, this.buttonCircle = this.browserGamepad.buttons[1].value, this.buttonSquare = this.browserGamepad.buttons[2].value, this.buttonTriangle = this.browserGamepad.buttons[3].value, this.buttonL1 = this.browserGamepad.buttons[4].value, this.buttonR1 = this.browserGamepad.buttons[5].value, this.leftTrigger = this.browserGamepad.buttons[6].value, this.rightTrigger = this.browserGamepad.buttons[7].value, this.buttonShare = this.browserGamepad.buttons[8].value, this.buttonOptions = this.browserGamepad.buttons[9].value, this.buttonLeftStick = this.browserGamepad.buttons[10].value, this.buttonRightStick = this.browserGamepad.buttons[11].value, this.dPadUp = this.browserGamepad.buttons[12].value, this.dPadDown = this.browserGamepad.buttons[13].value, this.dPadLeft = this.browserGamepad.buttons[14].value, this.dPadRight = this.browserGamepad.buttons[15].value; } /** * Disposes the gamepad */ dispose() { super.dispose(), this.onButtonDownObservable.clear(), this.onButtonUpObservable.clear(), this.onPadDownObservable.clear(), this.onPadUpObservable.clear(); } } class qte { /** * Initializes the gamepad manager * @param _scene BabylonJS scene */ constructor(e) { if (this._scene = e, this._babylonGamepads = [], this._oneGamepadConnected = !1, this._isMonitoring = !1, this.onGamepadDisconnectedObservable = new Fe(), cu() ? (this._gamepadEventSupported = "GamepadEvent" in window, this._gamepadSupport = navigator && navigator.getGamepads) : this._gamepadEventSupported = !1, this.onGamepadConnectedObservable = new Fe((t) => { for (const i in this._babylonGamepads) { const r = this._babylonGamepads[i]; r && r._isConnected && this.onGamepadConnectedObservable.notifyObserver(t, r); } }), this._onGamepadConnectedEvent = (t) => { const i = t.gamepad; if (i.index in this._babylonGamepads && this._babylonGamepads[i.index].isConnected) return; let r; this._babylonGamepads[i.index] ? (r = this._babylonGamepads[i.index], r.browserGamepad = i, r._isConnected = !0) : r = this._addNewGamepad(i), this.onGamepadConnectedObservable.notifyObservers(r), this._startMonitoringGamepads(); }, this._onGamepadDisconnectedEvent = (t) => { const i = t.gamepad; for (const r in this._babylonGamepads) if (this._babylonGamepads[r].index === i.index) { const s = this._babylonGamepads[r]; s._isConnected = !1, this.onGamepadDisconnectedObservable.notifyObservers(s), s.dispose && s.dispose(); break; } }, this._gamepadSupport) if (this._updateGamepadObjects(), this._babylonGamepads.length && this._startMonitoringGamepads(), this._gamepadEventSupported) { const t = this._scene ? this._scene.getEngine().getHostWindow() : window; t && (t.addEventListener("gamepadconnected", this._onGamepadConnectedEvent, !1), t.addEventListener("gamepaddisconnected", this._onGamepadDisconnectedEvent, !1)); } else this._startMonitoringGamepads(); } /** * The gamepads in the game pad manager */ get gamepads() { return this._babylonGamepads; } /** * Get the gamepad controllers based on type * @param type The type of gamepad controller * @returns Nullable gamepad */ getGamepadByType(e = zu.XBOX) { for (const t of this._babylonGamepads) if (t && t.type === e) return t; return null; } /** * Disposes the gamepad manager */ dispose() { this._gamepadEventSupported && (this._onGamepadConnectedEvent && window.removeEventListener("gamepadconnected", this._onGamepadConnectedEvent), this._onGamepadDisconnectedEvent && window.removeEventListener("gamepaddisconnected", this._onGamepadDisconnectedEvent), this._onGamepadConnectedEvent = null, this._onGamepadDisconnectedEvent = null), this._babylonGamepads.forEach((e) => { e.dispose(); }), this.onGamepadConnectedObservable.clear(), this.onGamepadDisconnectedObservable.clear(), this._oneGamepadConnected = !1, this._stopMonitoringGamepads(), this._babylonGamepads = []; } _addNewGamepad(e) { this._oneGamepadConnected || (this._oneGamepadConnected = !0); let t; const i = e.id.search("054c") !== -1 && e.id.search("0ce6") === -1, r = e.id.search("Xbox One") !== -1; return r || e.id.search("Xbox 360") !== -1 || e.id.search("xinput") !== -1 || e.id.search("045e") !== -1 && e.id.search("Surface Dock") === -1 ? t = new $te(e.id, e.index, e, r) : i ? t = new Zte(e.id, e.index, e) : t = new jte(e.id, e.index, e), this._babylonGamepads[t.index] = t, t; } _startMonitoringGamepads() { this._isMonitoring || (this._isMonitoring = !0, this._checkGamepadsStatus()); } _stopMonitoringGamepads() { this._isMonitoring = !1; } /** @internal */ _checkGamepadsStatus() { this._updateGamepadObjects(); for (const e in this._babylonGamepads) { const t = this._babylonGamepads[e]; if (!(!t || !t.isConnected)) try { t.update(); } catch { this._loggedErrors.indexOf(t.index) === -1 && (Ve.Warn(`Error updating gamepad ${t.id}`), this._loggedErrors.push(t.index)); } } this._isMonitoring && $e.QueueNewFrame(() => { this._checkGamepadsStatus(); }); } // This function is called only on Chrome, which does not properly support // connection/disconnection events and forces you to recopy again the gamepad object _updateGamepadObjects() { const e = navigator.getGamepads ? navigator.getGamepads() : []; for (let t = 0; t < e.length; t++) { const i = e[t]; if (i) if (this._babylonGamepads[i.index]) this._babylonGamepads[t].browserGamepad = i, this._babylonGamepads[t].isConnected || (this._babylonGamepads[t]._isConnected = !0, this.onGamepadConnectedObservable.notifyObservers(this._babylonGamepads[t])); else { const r = this._addNewGamepad(i); this.onGamepadConnectedObservable.notifyObservers(r); } } } } Object.defineProperty(ii.prototype, "gamepadManager", { get: function() { if (!this._gamepadManager) { this._gamepadManager = new qte(this); let c = this._getComponent(Bt.NAME_GAMEPAD); c || (c = new Jte(this), this._addComponent(c)); } return this._gamepadManager; }, enumerable: !0, configurable: !0 }); $L.prototype.addGamepad = function() { return this.add(new ZL()), this; }; JB.prototype.addGamepad = function() { return this.add(new jL()), this; }; class Jte { /** * Creates a new instance of the component for the given scene * @param scene Defines the scene to register the component in */ constructor(e) { this.name = Bt.NAME_GAMEPAD, this.scene = e; } /** * Registers the component in a given scene */ register() { this.scene._beforeCameraUpdateStage.registerStep(Bt.STEP_BEFORECAMERAUPDATE_GAMEPAD, this, this._beforeCameraUpdate); } /** * Rebuilds the elements related to this component in case of * context lost for instance. */ rebuild() { } /** * Disposes the component and the associated resources */ dispose() { const e = this.scene._gamepadManager; e && (e.dispose(), this.scene._gamepadManager = null); } _beforeCameraUpdate() { const e = this.scene._gamepadManager; e && e._isMonitoring && e._checkGamepadsStatus(); } } In.AddNodeConstructor("FreeCamera", (c, e) => () => new x5(c, D.Zero(), e)); class x5 extends IK { /** * Defines the gamepad rotation sensibility. * This is the threshold from when rotation starts to be accounted for to prevent jittering. */ get gamepadAngularSensibility() { const e = this.inputs.attached.gamepad; return e ? e.gamepadAngularSensibility : 0; } set gamepadAngularSensibility(e) { const t = this.inputs.attached.gamepad; t && (t.gamepadAngularSensibility = e); } /** * Defines the gamepad move sensibility. * This is the threshold from when moving starts to be accounted for to prevent jittering. */ get gamepadMoveSensibility() { const e = this.inputs.attached.gamepad; return e ? e.gamepadMoveSensibility : 0; } set gamepadMoveSensibility(e) { const t = this.inputs.attached.gamepad; t && (t.gamepadMoveSensibility = e); } /** * The Universal Camera is the one to choose for first person shooter type games, and works with all the keyboard, mouse, touch and gamepads. This replaces the earlier Free Camera, * which still works and will still be found in many Playgrounds. * @see https://doc.babylonjs.com/features/featuresDeepDive/cameras/camera_introduction#universal-camera * @param name Define the name of the camera in the scene * @param position Define the start position of the camera in the scene * @param scene Define the scene the camera belongs to */ constructor(e, t, i) { super(e, t, i), this.inputs.addGamepad(); } /** * Gets the current object class name. * @returns the class name */ getClassName() { return "UniversalCamera"; } } Ai._CreateDefaultParsedCamera = (c, e) => new x5(c, D.Zero(), e); In.AddNodeConstructor("GamepadCamera", (c, e) => () => new tU(c, D.Zero(), e)); class tU extends x5 { /** * Instantiates a new Gamepad Camera * This represents a FPS type of camera. This is only here for back compat purpose. * Please use the UniversalCamera instead as both are identical. * @see https://doc.babylonjs.com/features/featuresDeepDive/cameras/camera_introduction#universal-camera * @param name Define the name of the camera in the scene * @param position Define the start position of the camera in the scene * @param scene Define the scene the camera belongs to */ constructor(e, t, i) { super(e, t, i); } /** * Gets the current object class name. * @returns the class name */ getClassName() { return "GamepadCamera"; } } const Yce = "passCubePixelShader", Qce = `varying vec2 vUV;uniform samplerCube textureSampler; #define CUSTOM_FRAGMENT_DEFINITIONS void main(void) {vec2 uv=vUV*2.0-1.0; #ifdef POSITIVEX gl_FragColor=textureCube(textureSampler,vec3(1.001,uv.y,uv.x)); #endif #ifdef NEGATIVEX gl_FragColor=textureCube(textureSampler,vec3(-1.001,uv.y,uv.x)); #endif #ifdef POSITIVEY gl_FragColor=textureCube(textureSampler,vec3(uv.y,1.001,uv.x)); #endif #ifdef NEGATIVEY gl_FragColor=textureCube(textureSampler,vec3(uv.y,-1.001,uv.x)); #endif #ifdef POSITIVEZ gl_FragColor=textureCube(textureSampler,vec3(uv,1.001)); #endif #ifdef NEGATIVEZ gl_FragColor=textureCube(textureSampler,vec3(uv,-1.001)); #endif }`; je.ShadersStore[Yce] = Qce; class h6 extends Bi { /** * Gets a string identifying the name of the class * @returns "PassPostProcess" string */ getClassName() { return "PassPostProcess"; } /** * Creates the PassPostProcess * @param name The name of the effect. * @param options The required width/height ratio to downsize to before computing the render pass. * @param camera The camera to apply the render pass to. * @param samplingMode The sampling mode to be used when computing the pass. (default: 0) * @param engine The engine which the post process will be applied. (default: current engine) * @param reusable If the post process can be reused on the same frame. (default: false) * @param textureType The type of texture to be used when performing the post processing. * @param blockCompilation If compilation of the shader should not be done in the constructor. The updateEffect method can be used to compile the shader at a later time. (default: false) */ constructor(e, t, i = null, r, s, n, a = 0, l = !1) { super(e, "pass", null, null, t, i, r, s, n, void 0, a, void 0, null, l); } /** * @internal */ static _Parse(e, t, i, r) { return St.Parse(() => new h6(e.name, e.options, t, e.renderTargetSamplingMode, e._engine, e.reusable), e, i, r); } } Be("BABYLON.PassPostProcess", h6); class DK extends Bi { /** * Gets or sets the cube face to display. * * 0 is +X * * 1 is -X * * 2 is +Y * * 3 is -Y * * 4 is +Z * * 5 is -Z */ get face() { return this._face; } set face(e) { if (!(e < 0 || e > 5)) switch (this._face = e, this._face) { case 0: this.updateEffect("#define POSITIVEX"); break; case 1: this.updateEffect("#define NEGATIVEX"); break; case 2: this.updateEffect("#define POSITIVEY"); break; case 3: this.updateEffect("#define NEGATIVEY"); break; case 4: this.updateEffect("#define POSITIVEZ"); break; case 5: this.updateEffect("#define NEGATIVEZ"); break; } } /** * Gets a string identifying the name of the class * @returns "PassCubePostProcess" string */ getClassName() { return "PassCubePostProcess"; } /** * Creates the PassCubePostProcess * @param name The name of the effect. * @param options The required width/height ratio to downsize to before computing the render pass. * @param camera The camera to apply the render pass to. * @param samplingMode The sampling mode to be used when computing the pass. (default: 0) * @param engine The engine which the post process will be applied. (default: current engine) * @param reusable If the post process can be reused on the same frame. (default: false) * @param textureType The type of texture to be used when performing the post processing. * @param blockCompilation If compilation of the shader should not be done in the constructor. The updateEffect method can be used to compile the shader at a later time. (default: false) */ constructor(e, t, i = null, r, s, n, a = 0, l = !1) { super(e, "passCube", null, null, t, i, r, s, n, "#define POSITIVEX", a, void 0, null, l), this._face = 0; } /** * @internal */ static _Parse(e, t, i, r) { return St.Parse(() => new DK(e.name, e.options, t, e.renderTargetSamplingMode, e._engine, e.reusable), e, i, r); } } $e._RescalePostProcessFactory = (c) => new h6("rescale", 1, null, 2, c, !1, 0); const $ce = "anaglyphPixelShader", Zce = `varying vec2 vUV;uniform sampler2D textureSampler;uniform sampler2D leftSampler; #define CUSTOM_FRAGMENT_DEFINITIONS void main(void) {vec4 leftFrag=texture2D(leftSampler,vUV);leftFrag=vec4(1.0,leftFrag.g,leftFrag.b,1.0);vec4 rightFrag=texture2D(textureSampler,vUV);rightFrag=vec4(rightFrag.r,1.0,1.0,1.0);gl_FragColor=vec4(rightFrag.rgb*leftFrag.rgb,1.0);}`; je.ShadersStore[$ce] = Zce; class OK extends Bi { /** * Gets a string identifying the name of the class * @returns "AnaglyphPostProcess" string */ getClassName() { return "AnaglyphPostProcess"; } /** * Creates a new AnaglyphPostProcess * @param name defines postprocess name * @param options defines creation options or target ratio scale * @param rigCameras defines cameras using this postprocess * @param samplingMode defines required sampling mode (BABYLON.Texture.NEAREST_SAMPLINGMODE by default) * @param engine defines hosting engine * @param reusable defines if the postprocess will be reused multiple times per frame */ constructor(e, t, i, r, s, n) { super(e, "anaglyph", null, ["leftSampler"], t, i[1], r, s, n), this._passedProcess = i[0]._rigPostProcess, this.onApplyObservable.add((a) => { a.setTextureFromPostProcess("leftSampler", this._passedProcess); }); } } Be("BABYLON.AnaglyphPostProcess", OK); function JL(c) { c._rigCameras[0]._rigPostProcess = new h6(c.name + "_passthru", 1, c._rigCameras[0]), c._rigCameras[1]._rigPostProcess = new OK(c.name + "_anaglyph", 1, c._rigCameras); } In.AddNodeConstructor("AnaglyphArcRotateCamera", (c, e, t) => () => new eie(c, 0, 0, 1, D.Zero(), t.interaxial_distance, e)); class eie extends Pn { /** * Creates a new AnaglyphArcRotateCamera * @param name defines camera name * @param alpha defines alpha angle (in radians) * @param beta defines beta angle (in radians) * @param radius defines radius * @param target defines camera target * @param interaxialDistance defines distance between each color axis * @param scene defines the hosting scene */ constructor(e, t, i, r, s, n, a) { super(e, t, i, r, s, a), this._setRigMode = () => JL(this), this.interaxialDistance = n, this.setCameraRigMode(Ai.RIG_MODE_STEREOSCOPIC_ANAGLYPH, { interaxialDistance: n }); } /** * Gets camera class name * @returns AnaglyphArcRotateCamera */ getClassName() { return "AnaglyphArcRotateCamera"; } } In.AddNodeConstructor("AnaglyphFreeCamera", (c, e, t) => () => new tie(c, D.Zero(), t.interaxial_distance, e)); class tie extends du { /** * Creates a new AnaglyphFreeCamera * @param name defines camera name * @param position defines initial position * @param interaxialDistance defines distance between each color axis * @param scene defines the hosting scene */ constructor(e, t, i, r) { super(e, t, r), this._setRigMode = () => JL(this), this.interaxialDistance = i, this.setCameraRigMode(Ai.RIG_MODE_STEREOSCOPIC_ANAGLYPH, { interaxialDistance: i }); } /** * Gets camera class name * @returns AnaglyphFreeCamera */ getClassName() { return "AnaglyphFreeCamera"; } } In.AddNodeConstructor("AnaglyphGamepadCamera", (c, e, t) => () => new iie(c, D.Zero(), t.interaxial_distance, e)); class iie extends tU { /** * Creates a new AnaglyphGamepadCamera * @param name defines camera name * @param position defines initial position * @param interaxialDistance defines distance between each color axis * @param scene defines the hosting scene */ constructor(e, t, i, r) { super(e, t, r), this._setRigMode = () => JL(this), this.interaxialDistance = i, this.setCameraRigMode(Ai.RIG_MODE_STEREOSCOPIC_ANAGLYPH, { interaxialDistance: i }); } /** * Gets camera class name * @returns AnaglyphGamepadCamera */ getClassName() { return "AnaglyphGamepadCamera"; } } In.AddNodeConstructor("AnaglyphUniversalCamera", (c, e, t) => () => new rie(c, D.Zero(), t.interaxial_distance, e)); class rie extends x5 { /** * Creates a new AnaglyphUniversalCamera * @param name defines camera name * @param position defines initial position * @param interaxialDistance defines distance between each color axis * @param scene defines the hosting scene */ constructor(e, t, i, r) { super(e, t, r), this._setRigMode = () => JL(this), this.interaxialDistance = i, this.setCameraRigMode(Ai.RIG_MODE_STEREOSCOPIC_ANAGLYPH, { interaxialDistance: i }); } /** * Gets camera class name * @returns AnaglyphUniversalCamera */ getClassName() { return "AnaglyphUniversalCamera"; } } const qce = "stereoscopicInterlacePixelShader", Jce = `const vec3 TWO=vec3(2.0,2.0,2.0);varying vec2 vUV;uniform sampler2D camASampler;uniform sampler2D textureSampler;uniform vec2 stepSize; #define CUSTOM_FRAGMENT_DEFINITIONS void main(void) {bool useCamA;bool useCamB;vec2 texCoord1;vec2 texCoord2;vec3 frag1;vec3 frag2; #ifdef IS_STEREOSCOPIC_HORIZ useCamB=vUV.x>0.5;useCamA=!useCamB;texCoord1=vec2(useCamB ? (vUV.x-0.5)*2.0 : vUV.x*2.0,vUV.y);texCoord2=vec2(texCoord1.x+stepSize.x,vUV.y); #else #ifdef IS_STEREOSCOPIC_INTERLACED float rowNum=floor(vUV.y/stepSize.y);useCamA=mod(rowNum,2.0)==1.0;useCamB=mod(rowNum,2.0)==0.0;texCoord1=vec2(vUV.x,vUV.y);texCoord2=vec2(vUV.x,vUV.y); #else useCamB=vUV.y>0.5;useCamA=!useCamB;texCoord1=vec2(vUV.x,useCamB ? (vUV.y-0.5)*2.0 : vUV.y*2.0);texCoord2=vec2(vUV.x,texCoord1.y+stepSize.y); #endif #endif if (useCamB){frag1=texture2D(textureSampler,texCoord1).rgb;frag2=texture2D(textureSampler,texCoord2).rgb;}else if (useCamA){frag1=texture2D(camASampler ,texCoord1).rgb;frag2=texture2D(camASampler ,texCoord2).rgb;}else {discard;} gl_FragColor=vec4((frag1+frag2)/TWO,1.0);} `; je.ShadersStore[qce] = Jce; class sie extends Bi { /** * Gets a string identifying the name of the class * @returns "StereoscopicInterlacePostProcessI" string */ getClassName() { return "StereoscopicInterlacePostProcessI"; } /** * Initializes a StereoscopicInterlacePostProcessI * @param name The name of the effect. * @param rigCameras The rig cameras to be applied to the post process * @param isStereoscopicHoriz If the rendered results are horizontal or vertical * @param isStereoscopicInterlaced If the rendered results are alternate line interlaced * @param samplingMode The sampling mode to be used when computing the pass. (default: 0) * @param engine The engine which the post process will be applied. (default: current engine) * @param reusable If the post process can be reused on the same frame. (default: false) */ constructor(e, t, i, r, s, n, a) { super(e, "stereoscopicInterlace", ["stepSize"], ["camASampler"], 1, t[1], s, n, a, r ? "#define IS_STEREOSCOPIC_INTERLACED 1" : i ? "#define IS_STEREOSCOPIC_HORIZ 1" : void 0), this._passedProcess = t[0]._rigPostProcess, this._stepSize = new at(1 / this.width, 1 / this.height), this.onSizeChangedObservable.add(() => { this._stepSize = new at(1 / this.width, 1 / this.height); }), this.onApplyObservable.add((l) => { l.setTextureFromPostProcess("camASampler", this._passedProcess), l.setFloat2("stepSize", this._stepSize.x, this._stepSize.y); }); } } class eue extends Bi { /** * Gets a string identifying the name of the class * @returns "StereoscopicInterlacePostProcess" string */ getClassName() { return "StereoscopicInterlacePostProcess"; } /** * Initializes a StereoscopicInterlacePostProcess * @param name The name of the effect. * @param rigCameras The rig cameras to be applied to the post process * @param isStereoscopicHoriz If the rendered results are horizontal or vertical * @param samplingMode The sampling mode to be used when computing the pass. (default: 0) * @param engine The engine which the post process will be applied. (default: current engine) * @param reusable If the post process can be reused on the same frame. (default: false) */ constructor(e, t, i, r, s, n) { super(e, "stereoscopicInterlace", ["stepSize"], ["camASampler"], 1, t[1], r, s, n, i ? "#define IS_STEREOSCOPIC_HORIZ 1" : void 0), this._passedProcess = t[0]._rigPostProcess, this._stepSize = new at(1 / this.width, 1 / this.height), this.onSizeChangedObservable.add(() => { this._stepSize = new at(1 / this.width, 1 / this.height); }), this.onApplyObservable.add((a) => { a.setTextureFromPostProcess("camASampler", this._passedProcess), a.setFloat2("stepSize", this._stepSize.x, this._stepSize.y); }); } } function eN(c) { const e = c.cameraRigMode === Ai.RIG_MODE_STEREOSCOPIC_SIDEBYSIDE_PARALLEL || c.cameraRigMode === Ai.RIG_MODE_STEREOSCOPIC_SIDEBYSIDE_CROSSEYED, t = c.cameraRigMode === Ai.RIG_MODE_STEREOSCOPIC_SIDEBYSIDE_CROSSEYED; c.cameraRigMode === Ai.RIG_MODE_STEREOSCOPIC_INTERLACED ? (c._rigCameras[0]._rigPostProcess = new h6(c.name + "_passthru", 1, c._rigCameras[0]), c._rigCameras[1]._rigPostProcess = new sie(c.name + "_stereoInterlace", c._rigCameras, !1, !0)) : (c._rigCameras[t ? 1 : 0].viewport = new Md(0, 0, e ? 0.5 : 1, e ? 1 : 0.5), c._rigCameras[t ? 0 : 1].viewport = new Md(e ? 0.5 : 0, e ? 0 : 0.5, e ? 0.5 : 1, e ? 1 : 0.5)); } In.AddNodeConstructor("StereoscopicArcRotateCamera", (c, e, t) => () => new nie(c, 0, 0, 1, D.Zero(), t.interaxial_distance, t.isStereoscopicSideBySide, e)); class nie extends Pn { /** * Creates a new StereoscopicArcRotateCamera * @param name defines camera name * @param alpha defines alpha angle (in radians) * @param beta defines beta angle (in radians) * @param radius defines radius * @param target defines camera target * @param interaxialDistance defines distance between each color axis * @param isStereoscopicSideBySide defines is stereoscopic is done side by side or over under * @param scene defines the hosting scene */ constructor(e, t, i, r, s, n, a, l) { super(e, t, i, r, s, l), this._setRigMode = () => eN(this), this.interaxialDistance = n, this.isStereoscopicSideBySide = a, this.setCameraRigMode(a ? Ai.RIG_MODE_STEREOSCOPIC_SIDEBYSIDE_PARALLEL : Ai.RIG_MODE_STEREOSCOPIC_OVERUNDER, { interaxialDistance: n }); } /** * Gets camera class name * @returns StereoscopicArcRotateCamera */ getClassName() { return "StereoscopicArcRotateCamera"; } } In.AddNodeConstructor("StereoscopicFreeCamera", (c, e, t) => () => new aie(c, D.Zero(), t.interaxial_distance, t.isStereoscopicSideBySide, e)); class aie extends du { /** * Creates a new StereoscopicFreeCamera * @param name defines camera name * @param position defines initial position * @param interaxialDistance defines distance between each color axis * @param isStereoscopicSideBySide defines is stereoscopic is done side by side or over under * @param scene defines the hosting scene */ constructor(e, t, i, r, s) { super(e, t, s), this._setRigMode = () => eN(this), this.interaxialDistance = i, this.isStereoscopicSideBySide = r, this.setCameraRigMode(r ? Ai.RIG_MODE_STEREOSCOPIC_SIDEBYSIDE_PARALLEL : Ai.RIG_MODE_STEREOSCOPIC_OVERUNDER, { interaxialDistance: i }); } /** * Gets camera class name * @returns StereoscopicFreeCamera */ getClassName() { return "StereoscopicFreeCamera"; } } In.AddNodeConstructor("StereoscopicGamepadCamera", (c, e, t) => () => new oie(c, D.Zero(), t.interaxial_distance, t.isStereoscopicSideBySide, e)); class oie extends tU { /** * Creates a new StereoscopicGamepadCamera * @param name defines camera name * @param position defines initial position * @param interaxialDistance defines distance between each color axis * @param isStereoscopicSideBySide defines is stereoscopic is done side by side or over under * @param scene defines the hosting scene */ constructor(e, t, i, r, s) { super(e, t, s), this._setRigMode = () => eN(this), this.interaxialDistance = i, this.isStereoscopicSideBySide = r, this.setCameraRigMode(r ? Ai.RIG_MODE_STEREOSCOPIC_SIDEBYSIDE_PARALLEL : Ai.RIG_MODE_STEREOSCOPIC_OVERUNDER, { interaxialDistance: i }); } /** * Gets camera class name * @returns StereoscopicGamepadCamera */ getClassName() { return "StereoscopicGamepadCamera"; } } In.AddNodeConstructor("StereoscopicFreeCamera", (c, e, t) => () => new lie(c, D.Zero(), t.interaxial_distance, t.isStereoscopicSideBySide, e)); class lie extends x5 { /** * Creates a new StereoscopicUniversalCamera * @param name defines camera name * @param position defines initial position * @param interaxialDistance defines distance between each color axis * @param isStereoscopicSideBySide defines is stereoscopic is done side by side or over under * @param scene defines the hosting scene */ constructor(e, t, i, r, s) { super(e, t, s), this._setRigMode = () => eN(this), this.interaxialDistance = i, this.isStereoscopicSideBySide = r, this.setCameraRigMode(r ? Ai.RIG_MODE_STEREOSCOPIC_SIDEBYSIDE_PARALLEL : Ai.RIG_MODE_STEREOSCOPIC_OVERUNDER, { interaxialDistance: i }); } /** * Gets camera class name * @returns StereoscopicUniversalCamera */ getClassName() { return "StereoscopicUniversalCamera"; } } class tue extends x5 { set distanceBetweenEyes(e) { this._distanceBetweenEyes = e; } /** * distance between the eyes */ get distanceBetweenEyes() { return this._distanceBetweenEyes; } set distanceToProjectionPlane(e) { this._distanceToProjectionPlane = e; } /** * Distance to projection plane (should be the same units the like distance between the eyes) */ get distanceToProjectionPlane() { return this._distanceToProjectionPlane; } /** * Creates a new StereoscopicScreenUniversalCamera * @param name defines camera name * @param position defines initial position * @param scene defines the hosting scene * @param distanceToProjectionPlane defines distance between each color axis. The rig cameras will receive this as their negative z position! * @param distanceBetweenEyes defines is stereoscopic is done side by side or over under */ constructor(e, t, i, r = 1, s = 0.065) { super(e, t, i), this._distanceBetweenEyes = s, this._distanceToProjectionPlane = r, this.setCameraRigMode(Ai.RIG_MODE_STEREOSCOPIC_SIDEBYSIDE_PARALLEL, { stereoHalfAngle: 0 }), this._cameraRigParams.stereoHalfAngle = 0, this._cameraRigParams.interaxialDistance = s; } /** * Gets camera class name * @returns StereoscopicScreenUniversalCamera */ getClassName() { return "StereoscopicUniversalCamera"; } /** * @internal */ createRigCamera(e) { const t = new Cl(e, D.Zero(), this.getScene()), i = new xi("tm_" + e, this.getScene()); return t.parent = i, i.setPivotMatrix(Ae.Identity(), !1), t.isRigCamera = !0, t.rigParent = this, t; } /** * @internal */ _updateRigCameras() { for (let e = 0; e < this._rigCameras.length; e++) { const t = this._rigCameras[e]; t.minZ = this.minZ, t.maxZ = this.maxZ, t.fov = this.fov, t.upVector.copyFrom(this.upVector), t.rotationQuaternion ? t.rotationQuaternion.copyFrom(this.rotationQuaternion) : t.rotation.copyFrom(this.rotation), this._updateCamera(this._rigCameras[e], e); } } _updateCamera(e, t) { const i = this.distanceBetweenEyes / 2, r = i / this.distanceToProjectionPlane; e.position.copyFrom(this.position), e.position.addInPlaceFromFloats(t === 0 ? -i : i, 0, -this._distanceToProjectionPlane); const s = e.parent, n = s.getPivotMatrix(); n.setTranslationFromFloats(t === 0 ? i : -i, 0, 0), n.setRowFromFloats(2, t === 0 ? r : -r, 0, 1, 0), s.setPivotMatrix(n, !1); } _setRigMode() { this._rigCameras[0].viewport = new Md(0, 0, 0.5, 1), this._rigCameras[1].viewport = new Md(0.5, 0, 0.5, 1); for (let e = 0; e < this._rigCameras.length; e++) this._updateCamera(this._rigCameras[e], e); } } In.AddNodeConstructor("VirtualJoysticksCamera", (c, e) => () => new cie(c, D.Zero(), e)); class cie extends du { /** * Instantiates a VirtualJoysticksCamera. It can be useful in First Person Shooter game for instance. * It is identical to the Free Camera and simply adds by default a virtual joystick. * Virtual Joysticks are on-screen 2D graphics that are used to control the camera or other scene items. * @see https://doc.babylonjs.com/features/featuresDeepDive/cameras/camera_introduction#virtual-joysticks-camera * @param name Define the name of the camera in the scene * @param position Define the start position of the camera in the scene * @param scene Define the scene the camera belongs to */ constructor(e, t, i) { super(e, t, i), this.inputs.addVirtualJoystick(); } /** * Gets the current object class name. * @returns the class name */ getClassName() { return "VirtualJoysticksCamera"; } } class kI { constructor() { this.compensateDistortion = !0, this.multiviewEnabled = !1; } /** * Gets the rendering aspect ratio based on the provided resolutions. */ get aspectRatio() { return this.hResolution / (2 * this.vResolution); } /** * Gets the aspect ratio based on the FOV, scale factors, and real screen sizes. */ get aspectRatioFov() { return 2 * Math.atan(this.postProcessScaleFactor * this.vScreenSize / (2 * this.eyeToScreenDistance)); } /** * @internal */ get leftHMatrix() { const t = 4 * (this.hScreenSize / 4 - this.lensSeparationDistance / 2) / this.hScreenSize; return Ae.Translation(t, 0, 0); } /** * @internal */ get rightHMatrix() { const t = 4 * (this.hScreenSize / 4 - this.lensSeparationDistance / 2) / this.hScreenSize; return Ae.Translation(-t, 0, 0); } /** * @internal */ get leftPreViewMatrix() { return Ae.Translation(0.5 * this.interpupillaryDistance, 0, 0); } /** * @internal */ get rightPreViewMatrix() { return Ae.Translation(-0.5 * this.interpupillaryDistance, 0, 0); } /** * Get the default VRMetrics based on the most generic setup. * @returns the default vr metrics */ static GetDefault() { const e = new kI(); return e.hResolution = 1280, e.vResolution = 800, e.hScreenSize = 0.149759993, e.vScreenSize = 0.0935999975, e.vScreenCenter = 0.0467999987, e.eyeToScreenDistance = 0.0410000011, e.lensSeparationDistance = 0.063500002, e.interpupillaryDistance = 0.064000003, e.distortionK = [1, 0.219999999, 0.239999995, 0], e.chromaAbCorrection = [0.995999992, -0.00400000019, 1.01400006, 0], e.postProcessScaleFactor = 1.714605507808412, e.lensCenterOffset = 0.151976421, e; } } const iue = "vrDistortionCorrectionPixelShader", rue = `varying vec2 vUV;uniform sampler2D textureSampler;uniform vec2 LensCenter;uniform vec2 Scale;uniform vec2 ScaleIn;uniform vec4 HmdWarpParam;vec2 HmdWarp(vec2 in01) {vec2 theta=(in01-LensCenter)*ScaleIn; float rSq=theta.x*theta.x+theta.y*theta.y;vec2 rvector=theta*(HmdWarpParam.x+HmdWarpParam.y*rSq+HmdWarpParam.z*rSq*rSq+HmdWarpParam.w*rSq*rSq*rSq);return LensCenter+Scale*rvector;} #define CUSTOM_FRAGMENT_DEFINITIONS void main(void) {vec2 tc=HmdWarp(vUV);if (tc.x <0.0 || tc.x>1.0 || tc.y<0.0 || tc.y>1.0) gl_FragColor=vec4(0.0,0.0,0.0,0.0);else{gl_FragColor=texture2D(textureSampler,tc);}}`; je.ShadersStore[iue] = rue; class mH extends Bi { /** * Gets a string identifying the name of the class * @returns "VRDistortionCorrectionPostProcess" string */ getClassName() { return "VRDistortionCorrectionPostProcess"; } /** * Initializes the VRDistortionCorrectionPostProcess * @param name The name of the effect. * @param camera The camera to apply the render pass to. * @param isRightEye If this is for the right eye distortion * @param vrMetrics All the required metrics for the VR camera */ constructor(e, t, i, r) { super(e, "vrDistortionCorrection", ["LensCenter", "Scale", "ScaleIn", "HmdWarpParam"], null, r.postProcessScaleFactor, t, De.BILINEAR_SAMPLINGMODE), this._isRightEye = i, this._distortionFactors = r.distortionK, this._postProcessScaleFactor = r.postProcessScaleFactor, this._lensCenterOffset = r.lensCenterOffset, this.adaptScaleToCurrentViewport = !0, this.onSizeChangedObservable.add(() => { this._scaleIn = new at(2, 2 / this.aspectRatio), this._scaleFactor = new at(0.5 * (1 / this._postProcessScaleFactor), 0.5 * (1 / this._postProcessScaleFactor) * this.aspectRatio), this._lensCenter = new at(this._isRightEye ? 0.5 - this._lensCenterOffset * 0.5 : 0.5 + this._lensCenterOffset * 0.5, 0.5); }), this.onApplyObservable.add((s) => { s.setFloat2("LensCenter", this._lensCenter.x, this._lensCenter.y), s.setFloat2("Scale", this._scaleFactor.x, this._scaleFactor.y), s.setFloat2("ScaleIn", this._scaleIn.x, this._scaleIn.y), s.setFloat4("HmdWarpParam", this._distortionFactors[0], this._distortionFactors[1], this._distortionFactors[2], this._distortionFactors[3]); }); } } const sue = "vrMultiviewToSingleviewPixelShader", nue = `precision mediump sampler2DArray;varying vec2 vUV;uniform sampler2DArray multiviewSampler;uniform int imageIndex; #define CUSTOM_FRAGMENT_DEFINITIONS void main(void) {gl_FragColor=texture2D(multiviewSampler,vec3(vUV,imageIndex));}`; je.ShadersStore[sue] = nue; class gH extends ra { set samples(e) { this._samples = e; } get samples() { return this._samples; } /** * Creates a multiview render target * @param scene scene used with the render target * @param size the size of the render target (used for each view) */ constructor(e, t = 512) { super("multiview rtt", t, e, !1, !0, 0, !1, void 0, !1, !1, !0, void 0, !0), this._renderTarget = this.getScene().getEngine().createMultiviewRenderTargetTexture(this.getRenderWidth(), this.getRenderHeight()), this._texture = this._renderTarget.texture, this._texture.isMultiview = !0, this._texture.format = 5, this.samples = this._getEngine().getCaps().maxSamples || this.samples, this._texture.samples = this._samples; } /** * @internal */ _bindFrameBuffer() { this._renderTarget && this.getScene().getEngine().bindMultiviewFramebuffer(this._renderTarget); } /** * Gets the number of views the corresponding to the texture (eg. a MultiviewRenderTarget will have > 1) * @returns the view count */ getViewCount() { return 2; } } $e.prototype.createMultiviewRenderTargetTexture = function(c, e, t, i) { const r = this._gl; if (!this.getCaps().multiview) throw "Multiview is not supported"; const s = this._createHardwareRenderTargetWrapper(!1, !1, { width: c, height: e }); s._framebuffer = r.createFramebuffer(); const n = new ln(this, ts.Unknown, !0); return n.width = c, n.height = e, n.isMultiview = !0, t || (t = r.createTexture(), r.bindTexture(r.TEXTURE_2D_ARRAY, t), r.texStorage3D(r.TEXTURE_2D_ARRAY, 1, r.RGBA8, c, e, 2)), s._colorTextureArray = t, i || (i = r.createTexture(), r.bindTexture(r.TEXTURE_2D_ARRAY, i), r.texStorage3D(r.TEXTURE_2D_ARRAY, 1, r.DEPTH24_STENCIL8, c, e, 2)), s._depthStencilTextureArray = i, n.isReady = !0, s.setTextures(n), s._depthStencilTexture = n, s; }; $e.prototype.bindMultiviewFramebuffer = function(c) { const e = c, t = this._gl, i = this.getCaps().oculusMultiview || this.getCaps().multiview; if (this.bindFramebuffer(e, void 0, void 0, void 0, !0), t.bindFramebuffer(t.DRAW_FRAMEBUFFER, e._framebuffer), e._colorTextureArray && e._depthStencilTextureArray) this.getCaps().oculusMultiview ? (i.framebufferTextureMultisampleMultiviewOVR(t.DRAW_FRAMEBUFFER, t.COLOR_ATTACHMENT0, e._colorTextureArray, 0, e.samples, 0, 2), i.framebufferTextureMultisampleMultiviewOVR(t.DRAW_FRAMEBUFFER, t.DEPTH_STENCIL_ATTACHMENT, e._depthStencilTextureArray, 0, e.samples, 0, 2)) : (i.framebufferTextureMultiviewOVR(t.DRAW_FRAMEBUFFER, t.COLOR_ATTACHMENT0, e._colorTextureArray, 0, 0, 2), i.framebufferTextureMultiviewOVR(t.DRAW_FRAMEBUFFER, t.DEPTH_STENCIL_ATTACHMENT, e._depthStencilTextureArray, 0, 0, 2)); else throw "Invalid multiview frame buffer"; }; $e.prototype.bindSpaceWarpFramebuffer = function(c) { const e = c, t = this._gl, i = this.getCaps().oculusMultiview || this.getCaps().multiview; if (this.bindFramebuffer(e, void 0, void 0, void 0, !0), t.bindFramebuffer(t.DRAW_FRAMEBUFFER, e._framebuffer), e._colorTextureArray && e._depthStencilTextureArray) i.framebufferTextureMultiviewOVR(t.DRAW_FRAMEBUFFER, t.COLOR_ATTACHMENT0, e._colorTextureArray, 0, 0, 2), i.framebufferTextureMultiviewOVR(t.DRAW_FRAMEBUFFER, t.DEPTH_ATTACHMENT, e._depthStencilTextureArray, 0, 0, 2); else throw new Error("Invalid Space Warp framebuffer"); }; Ai.prototype._useMultiviewToSingleView = !1; Ai.prototype._multiviewTexture = null; Ai.prototype._resizeOrCreateMultiviewTexture = function(c, e) { this._multiviewTexture ? (this._multiviewTexture.getRenderWidth() != c || this._multiviewTexture.getRenderHeight() != e) && (this._multiviewTexture.dispose(), this._multiviewTexture = new gH(this.getScene(), { width: c, height: e })) : this._multiviewTexture = new gH(this.getScene(), { width: c, height: e }); }; function uie(c, e) { const t = new Vi(c, void 0, !0, e); return t.addUniform("viewProjection", 16), t.addUniform("viewProjectionR", 16), t.addUniform("view", 16), t.addUniform("projection", 16), t.addUniform("vEyePosition", 4), t; } const aue = ii.prototype.createSceneUniformBuffer; ii.prototype._transformMatrixR = Ae.Zero(); ii.prototype._multiviewSceneUbo = null; ii.prototype._createMultiviewUbo = function() { this._multiviewSceneUbo = uie(this.getEngine(), "scene_multiview"); }; ii.prototype.createSceneUniformBuffer = function(c) { return this._multiviewSceneUbo ? uie(this.getEngine(), c) : aue.bind(this)(c); }; ii.prototype._updateMultiviewUbo = function(c, e) { c && e && c.multiplyToRef(e, this._transformMatrixR), c && e && (c.multiplyToRef(e, de.Matrix[0]), gm.GetRightPlaneToRef(de.Matrix[0], this._frustumPlanes[3])), this._multiviewSceneUbo && (this._multiviewSceneUbo.updateMatrix("viewProjection", this.getTransformMatrix()), this._multiviewSceneUbo.updateMatrix("viewProjectionR", this._transformMatrixR), this._multiviewSceneUbo.updateMatrix("view", this._viewMatrix), this._multiviewSceneUbo.updateMatrix("projection", this._projectionMatrix)); }; ii.prototype._renderMultiviewToSingleView = function(c) { c._resizeOrCreateMultiviewTexture(c._rigPostProcess && c._rigPostProcess && c._rigPostProcess.width > 0 ? c._rigPostProcess.width : this.getEngine().getRenderWidth(!0), c._rigPostProcess && c._rigPostProcess && c._rigPostProcess.height > 0 ? c._rigPostProcess.height : this.getEngine().getRenderHeight(!0)), this._multiviewSceneUbo || this._createMultiviewUbo(), c.outputRenderTarget = c._multiviewTexture, this._renderForCamera(c), c.outputRenderTarget = null; for (let e = 0; e < c._rigCameras.length; e++) { const t = this.getEngine(); this._activeCamera = c._rigCameras[e], t.setViewport(this._activeCamera.viewport), this.postProcessManager && (this.postProcessManager._prepareFrame(), this.postProcessManager._finalizeFrame(this._activeCamera.isIntermediate)); } }; class hie extends Bi { /** * Gets a string identifying the name of the class * @returns "VRMultiviewToSingleviewPostProcess" string */ getClassName() { return "VRMultiviewToSingleviewPostProcess"; } /** * Initializes a VRMultiviewToSingleview * @param name name of the post process * @param camera camera to be applied to * @param scaleFactor scaling factor to the size of the output texture */ constructor(e, t, i) { super(e, "vrMultiviewToSingleview", ["imageIndex"], ["multiviewSampler"], i, t, De.BILINEAR_SAMPLINGMODE); const r = t ?? this.getCamera(); this.onSizeChangedObservable.add(() => { }), this.onApplyObservable.add((s) => { r._scene.activeCamera && r._scene.activeCamera.isLeftCamera ? s.setInt("imageIndex", 0) : s.setInt("imageIndex", 1), s.setTexture("multiviewSampler", r._multiviewTexture); }); } } function iU(c, e) { const t = e.vrCameraMetrics || kI.GetDefault(); c._rigCameras[0]._cameraRigParams.vrMetrics = t, c._rigCameras[0].viewport = new Md(0, 0, 0.5, 1), c._rigCameras[0]._cameraRigParams.vrWorkMatrix = new Ae(), c._rigCameras[0]._cameraRigParams.vrHMatrix = t.leftHMatrix, c._rigCameras[0]._cameraRigParams.vrPreViewMatrix = t.leftPreViewMatrix, c._rigCameras[0].getProjectionMatrix = c._rigCameras[0]._getVRProjectionMatrix, c._rigCameras[1]._cameraRigParams.vrMetrics = t, c._rigCameras[1].viewport = new Md(0.5, 0, 0.5, 1), c._rigCameras[1]._cameraRigParams.vrWorkMatrix = new Ae(), c._rigCameras[1]._cameraRigParams.vrHMatrix = t.rightHMatrix, c._rigCameras[1]._cameraRigParams.vrPreViewMatrix = t.rightPreViewMatrix, c._rigCameras[1].getProjectionMatrix = c._rigCameras[1]._getVRProjectionMatrix, t.multiviewEnabled && (c.getScene().getEngine().getCaps().multiview ? (c._useMultiviewToSingleView = !0, c._rigPostProcess = new hie("VRMultiviewToSingleview", c, t.postProcessScaleFactor)) : (Ce.Warn("Multiview is not supported, falling back to standard rendering"), t.multiviewEnabled = !1)), t.compensateDistortion && (c._rigCameras[0]._rigPostProcess = new mH("VR_Distort_Compensation_Left", c._rigCameras[0], !1, t), c._rigCameras[1]._rigPostProcess = new mH("VR_Distort_Compensation_Right", c._rigCameras[1], !0, t)); } In.AddNodeConstructor("VRDeviceOrientationArcRotateCamera", (c, e) => () => new die(c, 0, 0, 1, D.Zero(), e)); class die extends Pn { /** * Creates a new VRDeviceOrientationArcRotateCamera * @param name defines camera name * @param alpha defines the camera rotation along the longitudinal axis * @param beta defines the camera rotation along the latitudinal axis * @param radius defines the camera distance from its target * @param target defines the camera target * @param scene defines the scene the camera belongs to * @param compensateDistortion defines if the camera needs to compensate the lens distortion * @param vrCameraMetrics defines the vr metrics associated to the camera */ constructor(e, t, i, r, s, n, a = !0, l = kI.GetDefault()) { super(e, t, i, r, s, n), this._setRigMode = (o) => iU(this, o), l.compensateDistortion = a, this.setCameraRigMode(Ai.RIG_MODE_VR, { vrCameraMetrics: l }), this.inputs.addVRDeviceOrientation(); } /** * Gets camera class name * @returns VRDeviceOrientationArcRotateCamera */ getClassName() { return "VRDeviceOrientationArcRotateCamera"; } } In.AddNodeConstructor("VRDeviceOrientationFreeCamera", (c, e) => () => new rU(c, D.Zero(), e)); class rU extends eU { /** * Creates a new VRDeviceOrientationFreeCamera * @param name defines camera name * @param position defines the start position of the camera * @param scene defines the scene the camera belongs to * @param compensateDistortion defines if the camera needs to compensate the lens distortion * @param vrCameraMetrics defines the vr metrics associated to the camera */ constructor(e, t, i, r = !0, s = kI.GetDefault()) { super(e, t, i), this._setRigMode = (n) => iU(this, n), s.compensateDistortion = r, this.setCameraRigMode(Ai.RIG_MODE_VR, { vrCameraMetrics: s }); } /** * Gets camera class name * @returns VRDeviceOrientationFreeCamera */ getClassName() { return "VRDeviceOrientationFreeCamera"; } } In.AddNodeConstructor("VRDeviceOrientationGamepadCamera", (c, e) => () => new fie(c, D.Zero(), e)); class fie extends rU { /** * Creates a new VRDeviceOrientationGamepadCamera * @param name defines camera name * @param position defines the start position of the camera * @param scene defines the scene the camera belongs to * @param compensateDistortion defines if the camera needs to compensate the lens distortion * @param vrCameraMetrics defines the vr metrics associated to the camera */ constructor(e, t, i, r = !0, s = kI.GetDefault()) { super(e, t, i, r, s), this._setRigMode = (n) => iU(this, n), this.inputs.addGamepad(); } /** * Gets camera class name * @returns VRDeviceOrientationGamepadCamera */ getClassName() { return "VRDeviceOrientationGamepadCamera"; } } class lB { constructor() { this.previousWorldMatrices = {}, this.previousBones = {}; } /** * Add the required uniforms to the current list. * @param uniforms defines the current uniform list. */ static AddUniforms(e) { e.push("previousWorld", "previousViewProjection", "mPreviousBones"); } /** * Add the required samplers to the current list. * @param samplers defines the current sampler list. */ // eslint-disable-next-line @typescript-eslint/no-unused-vars static AddSamplers(e) { } /** * Binds the material data. * @param effect defines the effect to update * @param scene defines the scene the material belongs to. * @param mesh The mesh * @param world World matrix of this mesh * @param isFrozen Is the material frozen */ // eslint-disable-next-line @typescript-eslint/no-unused-vars bindForSubMesh(e, t, i, r, s) { if (t.prePassRenderer && t.prePassRenderer.enabled && t.prePassRenderer.currentRTisSceneRT && t.prePassRenderer.getIndex(2) !== -1) { this.previousWorldMatrices[i.uniqueId] || (this.previousWorldMatrices[i.uniqueId] = r.clone()), this.previousViewProjection || (this.previousViewProjection = t.getTransformMatrix().clone(), this.currentViewProjection = t.getTransformMatrix().clone()); const n = t.getEngine(); this.currentViewProjection.updateFlag !== t.getTransformMatrix().updateFlag ? (this._lastUpdateFrameId = n.frameId, this.previousViewProjection.copyFrom(this.currentViewProjection), this.currentViewProjection.copyFrom(t.getTransformMatrix())) : this._lastUpdateFrameId !== n.frameId && (this._lastUpdateFrameId = n.frameId, this.previousViewProjection.copyFrom(this.currentViewProjection)), e.setMatrix("previousWorld", this.previousWorldMatrices[i.uniqueId]), e.setMatrix("previousViewProjection", this.previousViewProjection), this.previousWorldMatrices[i.uniqueId] = r.clone(); } } } class Tt { /** * Are diffuse textures enabled in the application. */ static get DiffuseTextureEnabled() { return this._DiffuseTextureEnabled; } static set DiffuseTextureEnabled(e) { this._DiffuseTextureEnabled !== e && (this._DiffuseTextureEnabled = e, $e.MarkAllMaterialsAsDirty(1)); } /** * Are detail textures enabled in the application. */ static get DetailTextureEnabled() { return this._DetailTextureEnabled; } static set DetailTextureEnabled(e) { this._DetailTextureEnabled !== e && (this._DetailTextureEnabled = e, $e.MarkAllMaterialsAsDirty(1)); } /** * Are decal maps enabled in the application. */ static get DecalMapEnabled() { return this._DecalMapEnabled; } static set DecalMapEnabled(e) { this._DecalMapEnabled !== e && (this._DecalMapEnabled = e, $e.MarkAllMaterialsAsDirty(1)); } /** * Are ambient textures enabled in the application. */ static get AmbientTextureEnabled() { return this._AmbientTextureEnabled; } static set AmbientTextureEnabled(e) { this._AmbientTextureEnabled !== e && (this._AmbientTextureEnabled = e, $e.MarkAllMaterialsAsDirty(1)); } /** * Are opacity textures enabled in the application. */ static get OpacityTextureEnabled() { return this._OpacityTextureEnabled; } static set OpacityTextureEnabled(e) { this._OpacityTextureEnabled !== e && (this._OpacityTextureEnabled = e, $e.MarkAllMaterialsAsDirty(1)); } /** * Are reflection textures enabled in the application. */ static get ReflectionTextureEnabled() { return this._ReflectionTextureEnabled; } static set ReflectionTextureEnabled(e) { this._ReflectionTextureEnabled !== e && (this._ReflectionTextureEnabled = e, $e.MarkAllMaterialsAsDirty(1)); } /** * Are emissive textures enabled in the application. */ static get EmissiveTextureEnabled() { return this._EmissiveTextureEnabled; } static set EmissiveTextureEnabled(e) { this._EmissiveTextureEnabled !== e && (this._EmissiveTextureEnabled = e, $e.MarkAllMaterialsAsDirty(1)); } /** * Are specular textures enabled in the application. */ static get SpecularTextureEnabled() { return this._SpecularTextureEnabled; } static set SpecularTextureEnabled(e) { this._SpecularTextureEnabled !== e && (this._SpecularTextureEnabled = e, $e.MarkAllMaterialsAsDirty(1)); } /** * Are bump textures enabled in the application. */ static get BumpTextureEnabled() { return this._BumpTextureEnabled; } static set BumpTextureEnabled(e) { this._BumpTextureEnabled !== e && (this._BumpTextureEnabled = e, $e.MarkAllMaterialsAsDirty(1)); } /** * Are lightmap textures enabled in the application. */ static get LightmapTextureEnabled() { return this._LightmapTextureEnabled; } static set LightmapTextureEnabled(e) { this._LightmapTextureEnabled !== e && (this._LightmapTextureEnabled = e, $e.MarkAllMaterialsAsDirty(1)); } /** * Are refraction textures enabled in the application. */ static get RefractionTextureEnabled() { return this._RefractionTextureEnabled; } static set RefractionTextureEnabled(e) { this._RefractionTextureEnabled !== e && (this._RefractionTextureEnabled = e, $e.MarkAllMaterialsAsDirty(1)); } /** * Are color grading textures enabled in the application. */ static get ColorGradingTextureEnabled() { return this._ColorGradingTextureEnabled; } static set ColorGradingTextureEnabled(e) { this._ColorGradingTextureEnabled !== e && (this._ColorGradingTextureEnabled = e, $e.MarkAllMaterialsAsDirty(1)); } /** * Are fresnels enabled in the application. */ static get FresnelEnabled() { return this._FresnelEnabled; } static set FresnelEnabled(e) { this._FresnelEnabled !== e && (this._FresnelEnabled = e, $e.MarkAllMaterialsAsDirty(4)); } /** * Are clear coat textures enabled in the application. */ static get ClearCoatTextureEnabled() { return this._ClearCoatTextureEnabled; } static set ClearCoatTextureEnabled(e) { this._ClearCoatTextureEnabled !== e && (this._ClearCoatTextureEnabled = e, $e.MarkAllMaterialsAsDirty(1)); } /** * Are clear coat bump textures enabled in the application. */ static get ClearCoatBumpTextureEnabled() { return this._ClearCoatBumpTextureEnabled; } static set ClearCoatBumpTextureEnabled(e) { this._ClearCoatBumpTextureEnabled !== e && (this._ClearCoatBumpTextureEnabled = e, $e.MarkAllMaterialsAsDirty(1)); } /** * Are clear coat tint textures enabled in the application. */ static get ClearCoatTintTextureEnabled() { return this._ClearCoatTintTextureEnabled; } static set ClearCoatTintTextureEnabled(e) { this._ClearCoatTintTextureEnabled !== e && (this._ClearCoatTintTextureEnabled = e, $e.MarkAllMaterialsAsDirty(1)); } /** * Are sheen textures enabled in the application. */ static get SheenTextureEnabled() { return this._SheenTextureEnabled; } static set SheenTextureEnabled(e) { this._SheenTextureEnabled !== e && (this._SheenTextureEnabled = e, $e.MarkAllMaterialsAsDirty(1)); } /** * Are anisotropic textures enabled in the application. */ static get AnisotropicTextureEnabled() { return this._AnisotropicTextureEnabled; } static set AnisotropicTextureEnabled(e) { this._AnisotropicTextureEnabled !== e && (this._AnisotropicTextureEnabled = e, $e.MarkAllMaterialsAsDirty(1)); } /** * Are thickness textures enabled in the application. */ static get ThicknessTextureEnabled() { return this._ThicknessTextureEnabled; } static set ThicknessTextureEnabled(e) { this._ThicknessTextureEnabled !== e && (this._ThicknessTextureEnabled = e, $e.MarkAllMaterialsAsDirty(1)); } /** * Are refraction intensity textures enabled in the application. */ static get RefractionIntensityTextureEnabled() { return this._ThicknessTextureEnabled; } static set RefractionIntensityTextureEnabled(e) { this._RefractionIntensityTextureEnabled !== e && (this._RefractionIntensityTextureEnabled = e, $e.MarkAllMaterialsAsDirty(1)); } /** * Are translucency intensity textures enabled in the application. */ static get TranslucencyIntensityTextureEnabled() { return this._ThicknessTextureEnabled; } static set TranslucencyIntensityTextureEnabled(e) { this._TranslucencyIntensityTextureEnabled !== e && (this._TranslucencyIntensityTextureEnabled = e, $e.MarkAllMaterialsAsDirty(1)); } /** * Are translucency intensity textures enabled in the application. */ static get IridescenceTextureEnabled() { return this._IridescenceTextureEnabled; } static set IridescenceTextureEnabled(e) { this._IridescenceTextureEnabled !== e && (this._IridescenceTextureEnabled = e, $e.MarkAllMaterialsAsDirty(1)); } } Tt._DiffuseTextureEnabled = !0; Tt._DetailTextureEnabled = !0; Tt._DecalMapEnabled = !0; Tt._AmbientTextureEnabled = !0; Tt._OpacityTextureEnabled = !0; Tt._ReflectionTextureEnabled = !0; Tt._EmissiveTextureEnabled = !0; Tt._SpecularTextureEnabled = !0; Tt._BumpTextureEnabled = !0; Tt._LightmapTextureEnabled = !0; Tt._RefractionTextureEnabled = !0; Tt._ColorGradingTextureEnabled = !0; Tt._FresnelEnabled = !0; Tt._ClearCoatTextureEnabled = !0; Tt._ClearCoatBumpTextureEnabled = !0; Tt._ClearCoatTintTextureEnabled = !0; Tt._SheenTextureEnabled = !0; Tt._AnisotropicTextureEnabled = !0; Tt._ThicknessTextureEnabled = !0; Tt._RefractionIntensityTextureEnabled = !0; Tt._TranslucencyIntensityTextureEnabled = !0; Tt._IridescenceTextureEnabled = !0; const oue = "decalFragmentDeclaration", lue = `#ifdef DECAL uniform vec4 vDecalInfos; #endif `; je.IncludesShadersStore[oue] = lue; const cue = "defaultFragmentDeclaration", uue = `uniform vec4 vEyePosition;uniform vec4 vDiffuseColor; #ifdef SPECULARTERM uniform vec4 vSpecularColor; #endif uniform vec3 vEmissiveColor;uniform vec3 vAmbientColor;uniform float visibility; #ifdef DIFFUSE uniform vec2 vDiffuseInfos; #endif #ifdef AMBIENT uniform vec2 vAmbientInfos; #endif #ifdef OPACITY uniform vec2 vOpacityInfos; #endif #ifdef EMISSIVE uniform vec2 vEmissiveInfos; #endif #ifdef LIGHTMAP uniform vec2 vLightmapInfos; #endif #ifdef BUMP uniform vec3 vBumpInfos;uniform vec2 vTangentSpaceParams; #endif #ifdef ALPHATEST uniform float alphaCutOff; #endif #if defined(REFLECTIONMAP_SPHERICAL) || defined(REFLECTIONMAP_PROJECTION) || defined(REFRACTION) || defined(PREPASS) uniform mat4 view; #endif #ifdef REFRACTION uniform vec4 vRefractionInfos; #ifndef REFRACTIONMAP_3D uniform mat4 refractionMatrix; #endif #ifdef REFRACTIONFRESNEL uniform vec4 refractionLeftColor;uniform vec4 refractionRightColor; #endif #if defined(USE_LOCAL_REFRACTIONMAP_CUBIC) && defined(REFRACTIONMAP_3D) uniform vec3 vRefractionPosition;uniform vec3 vRefractionSize; #endif #endif #if defined(SPECULAR) && defined(SPECULARTERM) uniform vec2 vSpecularInfos; #endif #ifdef DIFFUSEFRESNEL uniform vec4 diffuseLeftColor;uniform vec4 diffuseRightColor; #endif #ifdef OPACITYFRESNEL uniform vec4 opacityParts; #endif #ifdef EMISSIVEFRESNEL uniform vec4 emissiveLeftColor;uniform vec4 emissiveRightColor; #endif #ifdef REFLECTION uniform vec2 vReflectionInfos; #if defined(REFLECTIONMAP_PLANAR) || defined(REFLECTIONMAP_CUBIC) || defined(REFLECTIONMAP_PROJECTION) || defined(REFLECTIONMAP_EQUIRECTANGULAR) || defined(REFLECTIONMAP_SPHERICAL) || defined(REFLECTIONMAP_SKYBOX) uniform mat4 reflectionMatrix; #endif #ifndef REFLECTIONMAP_SKYBOX #if defined(USE_LOCAL_REFLECTIONMAP_CUBIC) && defined(REFLECTIONMAP_CUBIC) uniform vec3 vReflectionPosition;uniform vec3 vReflectionSize; #endif #endif #ifdef REFLECTIONFRESNEL uniform vec4 reflectionLeftColor;uniform vec4 reflectionRightColor; #endif #endif #ifdef DETAIL uniform vec4 vDetailInfos; #endif #include #define ADDITIONAL_FRAGMENT_DECLARATION `; je.IncludesShadersStore[cue] = uue; const hue = "sceneUboDeclaration", due = `layout(std140,column_major) uniform;uniform Scene {mat4 viewProjection; #ifdef MULTIVIEW mat4 viewProjectionR; #endif mat4 view;mat4 projection;vec4 vEyePosition;}; `; je.IncludesShadersStore[hue] = due; const fue = "meshUboDeclaration", pue = `#ifdef WEBGL2 uniform mat4 world;uniform float visibility; #else layout(std140,column_major) uniform;uniform Mesh {mat4 world;float visibility;}; #endif #define WORLD_UBO `; je.IncludesShadersStore[fue] = pue; const _ue = "defaultUboDeclaration", mue = `layout(std140,column_major) uniform;uniform Material {vec4 diffuseLeftColor;vec4 diffuseRightColor;vec4 opacityParts;vec4 reflectionLeftColor;vec4 reflectionRightColor;vec4 refractionLeftColor;vec4 refractionRightColor;vec4 emissiveLeftColor;vec4 emissiveRightColor;vec2 vDiffuseInfos;vec2 vAmbientInfos;vec2 vOpacityInfos;vec2 vReflectionInfos;vec3 vReflectionPosition;vec3 vReflectionSize;vec2 vEmissiveInfos;vec2 vLightmapInfos;vec2 vSpecularInfos;vec3 vBumpInfos;mat4 diffuseMatrix;mat4 ambientMatrix;mat4 opacityMatrix;mat4 reflectionMatrix;mat4 emissiveMatrix;mat4 lightmapMatrix;mat4 specularMatrix;mat4 bumpMatrix;vec2 vTangentSpaceParams;float pointSize;float alphaCutOff;mat4 refractionMatrix;vec4 vRefractionInfos;vec3 vRefractionPosition;vec3 vRefractionSize;vec4 vSpecularColor;vec3 vEmissiveColor;vec4 vDiffuseColor;vec3 vAmbientColor; #define ADDITIONAL_UBO_DECLARATION }; #include #include `; je.IncludesShadersStore[_ue] = mue; const gue = "prePassDeclaration", vue = `#ifdef PREPASS #extension GL_EXT_draw_buffers : require layout(location=0) out highp vec4 glFragData[{X}];highp vec4 gl_FragColor; #ifdef PREPASS_DEPTH varying highp vec3 vViewPos; #endif #ifdef PREPASS_VELOCITY varying highp vec4 vCurrentPosition;varying highp vec4 vPreviousPosition; #endif #endif `; je.IncludesShadersStore[gue] = vue; const Aue = "oitDeclaration", yue = `#ifdef ORDER_INDEPENDENT_TRANSPARENCY #extension GL_EXT_draw_buffers : require layout(location=0) out vec2 depth; layout(location=1) out vec4 frontColor;layout(location=2) out vec4 backColor; #define MAX_DEPTH 99999.0 highp vec4 gl_FragColor;uniform sampler2D oitDepthSampler;uniform sampler2D oitFrontColorSampler; #endif `; je.IncludesShadersStore[Aue] = yue; const Cue = "mainUVVaryingDeclaration", xue = `#ifdef MAINUV{X} varying vec2 vMainUV{X}; #endif `; je.IncludesShadersStore[Cue] = xue; const bue = "helperFunctions", Eue = `const float PI=3.1415926535897932384626433832795;const float RECIPROCAL_PI=0.3183098861837907;const float RECIPROCAL_PI2=0.15915494309189535;const float HALF_MIN=5.96046448e-08; const float LinearEncodePowerApprox=2.2;const float GammaEncodePowerApprox=1.0/LinearEncodePowerApprox;const vec3 LuminanceEncodeApprox=vec3(0.2126,0.7152,0.0722);const float Epsilon=0.0000001; #define saturate(x) clamp(x,0.0,1.0) #define absEps(x) abs(x)+Epsilon #define maxEps(x) max(x,Epsilon) #define saturateEps(x) clamp(x,Epsilon,1.0) mat3 transposeMat3(mat3 inMatrix) {vec3 i0=inMatrix[0];vec3 i1=inMatrix[1];vec3 i2=inMatrix[2];mat3 outMatrix=mat3( vec3(i0.x,i1.x,i2.x), vec3(i0.y,i1.y,i2.y), vec3(i0.z,i1.z,i2.z) );return outMatrix;} mat3 inverseMat3(mat3 inMatrix) {float a00=inMatrix[0][0],a01=inMatrix[0][1],a02=inMatrix[0][2];float a10=inMatrix[1][0],a11=inMatrix[1][1],a12=inMatrix[1][2];float a20=inMatrix[2][0],a21=inMatrix[2][1],a22=inMatrix[2][2];float b01=a22*a11-a12*a21;float b11=-a22*a10+a12*a20;float b21=a21*a10-a11*a20;float det=a00*b01+a01*b11+a02*b21;return mat3(b01,(-a22*a01+a02*a21),(a12*a01-a02*a11), b11,(a22*a00-a02*a20),(-a12*a00+a02*a10), b21,(-a21*a00+a01*a20),(a11*a00-a01*a10))/det;} #if USE_EXACT_SRGB_CONVERSIONS vec3 toLinearSpaceExact(vec3 color) {vec3 nearZeroSection=0.0773993808*color;vec3 remainingSection=pow(0.947867299*(color+vec3(0.055)),vec3(2.4)); #if defined(WEBGL2) || defined(WEBGPU) || defined(NATIVE) return mix(remainingSection,nearZeroSection,lessThanEqual(color,vec3(0.04045))); #else return vec3( color.r<=0.04045 ? nearZeroSection.r : remainingSection.r, color.g<=0.04045 ? nearZeroSection.g : remainingSection.g, color.b<=0.04045 ? nearZeroSection.b : remainingSection.b); #endif } vec3 toGammaSpaceExact(vec3 color) {vec3 nearZeroSection=12.92*color;vec3 remainingSection=1.055*pow(color,vec3(0.41666))-vec3(0.055); #if defined(WEBGL2) || defined(WEBGPU) || defined(NATIVE) return mix(remainingSection,nearZeroSection,lessThanEqual(color,vec3(0.0031308))); #else return vec3( color.r<=0.0031308 ? nearZeroSection.r : remainingSection.r, color.g<=0.0031308 ? nearZeroSection.g : remainingSection.g, color.b<=0.0031308 ? nearZeroSection.b : remainingSection.b); #endif } #endif float toLinearSpace(float color) { #if USE_EXACT_SRGB_CONVERSIONS float nearZeroSection=0.0773993808*color;float remainingSection=pow(0.947867299*(color+0.055),2.4);return color<=0.04045 ? nearZeroSection : remainingSection; #else return pow(color,LinearEncodePowerApprox); #endif } vec3 toLinearSpace(vec3 color) { #if USE_EXACT_SRGB_CONVERSIONS return toLinearSpaceExact(color); #else return pow(color,vec3(LinearEncodePowerApprox)); #endif } vec4 toLinearSpace(vec4 color) { #if USE_EXACT_SRGB_CONVERSIONS return vec4(toLinearSpaceExact(color.rgb),color.a); #else return vec4(pow(color.rgb,vec3(LinearEncodePowerApprox)),color.a); #endif } float toGammaSpace(float color) { #if USE_EXACT_SRGB_CONVERSIONS float nearZeroSection=12.92*color;float remainingSection=1.055*pow(color,0.41666)-0.055;return color<=0.0031308 ? nearZeroSection : remainingSection; #else return pow(color,GammaEncodePowerApprox); #endif } vec3 toGammaSpace(vec3 color) { #if USE_EXACT_SRGB_CONVERSIONS return toGammaSpaceExact(color); #else return pow(color,vec3(GammaEncodePowerApprox)); #endif } vec4 toGammaSpace(vec4 color) { #if USE_EXACT_SRGB_CONVERSIONS return vec4(toGammaSpaceExact(color.rgb),color.a); #else return vec4(pow(color.rgb,vec3(GammaEncodePowerApprox)),color.a); #endif } float square(float value) {return value*value;} vec3 square(vec3 value) {return value*value;} float pow5(float value) {float sq=value*value;return sq*sq*value;} float getLuminance(vec3 color) {return clamp(dot(color,LuminanceEncodeApprox),0.,1.);} float getRand(vec2 seed) {return fract(sin(dot(seed.xy ,vec2(12.9898,78.233)))*43758.5453);} float dither(vec2 seed,float varianceAmount) {float rand=getRand(seed);float normVariance=varianceAmount/255.0;float dither=mix(-normVariance,normVariance,rand);return dither;} const float rgbdMaxRange=255.0;vec4 toRGBD(vec3 color) {float maxRGB=maxEps(max(color.r,max(color.g,color.b)));float D =max(rgbdMaxRange/maxRGB,1.);D =clamp(floor(D)/255.0,0.,1.);vec3 rgb=color.rgb*D;rgb=toGammaSpace(rgb);return vec4(clamp(rgb,0.,1.),D); } vec3 fromRGBD(vec4 rgbd) {rgbd.rgb=toLinearSpace(rgbd.rgb);return rgbd.rgb/rgbd.a;} vec3 parallaxCorrectNormal( vec3 vertexPos,vec3 origVec,vec3 cubeSize,vec3 cubePos ) {vec3 invOrigVec=vec3(1.0,1.0,1.0)/origVec;vec3 halfSize=cubeSize*0.5;vec3 intersecAtMaxPlane=(cubePos+halfSize-vertexPos)*invOrigVec;vec3 intersecAtMinPlane=(cubePos-halfSize-vertexPos)*invOrigVec;vec3 largestIntersec=max(intersecAtMaxPlane,intersecAtMinPlane);float distance=min(min(largestIntersec.x,largestIntersec.y),largestIntersec.z);vec3 intersectPositionWS=vertexPos+origVec*distance;return intersectPositionWS-cubePos;} `; je.IncludesShadersStore[bue] = Eue; const Tue = "lightFragmentDeclaration", Sue = `#ifdef LIGHT{X} uniform vec4 vLightData{X};uniform vec4 vLightDiffuse{X}; #ifdef SPECULARTERM uniform vec4 vLightSpecular{X}; #else vec4 vLightSpecular{X}=vec4(0.); #endif #ifdef SHADOW{X} #ifdef SHADOWCSM{X} uniform mat4 lightMatrix{X}[SHADOWCSMNUM_CASCADES{X}];uniform float viewFrustumZ{X}[SHADOWCSMNUM_CASCADES{X}];uniform float frustumLengths{X}[SHADOWCSMNUM_CASCADES{X}];uniform float cascadeBlendFactor{X};varying vec4 vPositionFromLight{X}[SHADOWCSMNUM_CASCADES{X}];varying float vDepthMetric{X}[SHADOWCSMNUM_CASCADES{X}];varying vec4 vPositionFromCamera{X}; #if defined(SHADOWPCSS{X}) uniform highp sampler2DArrayShadow shadowSampler{X};uniform highp sampler2DArray depthSampler{X};uniform vec2 lightSizeUVCorrection{X}[SHADOWCSMNUM_CASCADES{X}];uniform float depthCorrection{X}[SHADOWCSMNUM_CASCADES{X}];uniform float penumbraDarkness{X}; #elif defined(SHADOWPCF{X}) uniform highp sampler2DArrayShadow shadowSampler{X}; #else uniform highp sampler2DArray shadowSampler{X}; #endif #ifdef SHADOWCSMDEBUG{X} const vec3 vCascadeColorsMultiplier{X}[8]=vec3[8] ( vec3 ( 1.5,0.0,0.0 ), vec3 ( 0.0,1.5,0.0 ), vec3 ( 0.0,0.0,5.5 ), vec3 ( 1.5,0.0,5.5 ), vec3 ( 1.5,1.5,0.0 ), vec3 ( 1.0,1.0,1.0 ), vec3 ( 0.0,1.0,5.5 ), vec3 ( 0.5,3.5,0.75 ) );vec3 shadowDebug{X}; #endif #ifdef SHADOWCSMUSESHADOWMAXZ{X} int index{X}=-1; #else int index{X}=SHADOWCSMNUM_CASCADES{X}-1; #endif float diff{X}=0.; #elif defined(SHADOWCUBE{X}) uniform samplerCube shadowSampler{X}; #else varying vec4 vPositionFromLight{X};varying float vDepthMetric{X}; #if defined(SHADOWPCSS{X}) uniform highp sampler2DShadow shadowSampler{X};uniform highp sampler2D depthSampler{X}; #elif defined(SHADOWPCF{X}) uniform highp sampler2DShadow shadowSampler{X}; #else uniform sampler2D shadowSampler{X}; #endif uniform mat4 lightMatrix{X}; #endif uniform vec4 shadowsInfo{X};uniform vec2 depthValues{X}; #endif #ifdef SPOTLIGHT{X} uniform vec4 vLightDirection{X};uniform vec4 vLightFalloff{X}; #elif defined(POINTLIGHT{X}) uniform vec4 vLightFalloff{X}; #elif defined(HEMILIGHT{X}) uniform vec3 vLightGround{X}; #endif #ifdef PROJECTEDLIGHTTEXTURE{X} uniform mat4 textureProjectionMatrix{X};uniform sampler2D projectionLightSampler{X}; #endif #endif `; je.IncludesShadersStore[Tue] = Sue; const Mue = "lightUboDeclaration", Rue = `#ifdef LIGHT{X} uniform Light{X} {vec4 vLightData;vec4 vLightDiffuse;vec4 vLightSpecular; #ifdef SPOTLIGHT{X} vec4 vLightDirection;vec4 vLightFalloff; #elif defined(POINTLIGHT{X}) vec4 vLightFalloff; #elif defined(HEMILIGHT{X}) vec3 vLightGround; #endif vec4 shadowsInfo;vec2 depthValues;} light{X}; #ifdef PROJECTEDLIGHTTEXTURE{X} uniform mat4 textureProjectionMatrix{X};uniform sampler2D projectionLightSampler{X}; #endif #ifdef SHADOW{X} #ifdef SHADOWCSM{X} uniform mat4 lightMatrix{X}[SHADOWCSMNUM_CASCADES{X}];uniform float viewFrustumZ{X}[SHADOWCSMNUM_CASCADES{X}];uniform float frustumLengths{X}[SHADOWCSMNUM_CASCADES{X}];uniform float cascadeBlendFactor{X};varying vec4 vPositionFromLight{X}[SHADOWCSMNUM_CASCADES{X}];varying float vDepthMetric{X}[SHADOWCSMNUM_CASCADES{X}];varying vec4 vPositionFromCamera{X}; #if defined(SHADOWPCSS{X}) uniform highp sampler2DArrayShadow shadowSampler{X};uniform highp sampler2DArray depthSampler{X};uniform vec2 lightSizeUVCorrection{X}[SHADOWCSMNUM_CASCADES{X}];uniform float depthCorrection{X}[SHADOWCSMNUM_CASCADES{X}];uniform float penumbraDarkness{X}; #elif defined(SHADOWPCF{X}) uniform highp sampler2DArrayShadow shadowSampler{X}; #else uniform highp sampler2DArray shadowSampler{X}; #endif #ifdef SHADOWCSMDEBUG{X} const vec3 vCascadeColorsMultiplier{X}[8]=vec3[8] ( vec3 ( 1.5,0.0,0.0 ), vec3 ( 0.0,1.5,0.0 ), vec3 ( 0.0,0.0,5.5 ), vec3 ( 1.5,0.0,5.5 ), vec3 ( 1.5,1.5,0.0 ), vec3 ( 1.0,1.0,1.0 ), vec3 ( 0.0,1.0,5.5 ), vec3 ( 0.5,3.5,0.75 ) );vec3 shadowDebug{X}; #endif #ifdef SHADOWCSMUSESHADOWMAXZ{X} int index{X}=-1; #else int index{X}=SHADOWCSMNUM_CASCADES{X}-1; #endif float diff{X}=0.; #elif defined(SHADOWCUBE{X}) uniform samplerCube shadowSampler{X}; #else varying vec4 vPositionFromLight{X};varying float vDepthMetric{X}; #if defined(SHADOWPCSS{X}) uniform highp sampler2DShadow shadowSampler{X};uniform highp sampler2D depthSampler{X}; #elif defined(SHADOWPCF{X}) uniform highp sampler2DShadow shadowSampler{X}; #else uniform sampler2D shadowSampler{X}; #endif uniform mat4 lightMatrix{X}; #endif #endif #endif `; je.IncludesShadersStore[Mue] = Rue; const Pue = "lightsFragmentFunctions", Iue = `struct lightingInfo {vec3 diffuse; #ifdef SPECULARTERM vec3 specular; #endif #ifdef NDOTL float ndl; #endif };lightingInfo computeLighting(vec3 viewDirectionW,vec3 vNormal,vec4 lightData,vec3 diffuseColor,vec3 specularColor,float range,float glossiness) {lightingInfo result;vec3 lightVectorW;float attenuation=1.0;if (lightData.w==0.) {vec3 direction=lightData.xyz-vPositionW;attenuation=max(0.,1.0-length(direction)/range);lightVectorW=normalize(direction);} else {lightVectorW=normalize(-lightData.xyz);} float ndl=max(0.,dot(vNormal,lightVectorW)); #ifdef NDOTL result.ndl=ndl; #endif result.diffuse=ndl*diffuseColor*attenuation; #ifdef SPECULARTERM vec3 angleW=normalize(viewDirectionW+lightVectorW);float specComp=max(0.,dot(vNormal,angleW));specComp=pow(specComp,max(1.,glossiness));result.specular=specComp*specularColor*attenuation; #endif return result;} lightingInfo computeSpotLighting(vec3 viewDirectionW,vec3 vNormal,vec4 lightData,vec4 lightDirection,vec3 diffuseColor,vec3 specularColor,float range,float glossiness) {lightingInfo result;vec3 direction=lightData.xyz-vPositionW;vec3 lightVectorW=normalize(direction);float attenuation=max(0.,1.0-length(direction)/range);float cosAngle=max(0.,dot(lightDirection.xyz,-lightVectorW));if (cosAngle>=lightDirection.w) {cosAngle=max(0.,pow(cosAngle,lightData.w));attenuation*=cosAngle;float ndl=max(0.,dot(vNormal,lightVectorW)); #ifdef NDOTL result.ndl=ndl; #endif result.diffuse=ndl*diffuseColor*attenuation; #ifdef SPECULARTERM vec3 angleW=normalize(viewDirectionW+lightVectorW);float specComp=max(0.,dot(vNormal,angleW));specComp=pow(specComp,max(1.,glossiness));result.specular=specComp*specularColor*attenuation; #endif return result;} result.diffuse=vec3(0.); #ifdef SPECULARTERM result.specular=vec3(0.); #endif #ifdef NDOTL result.ndl=0.; #endif return result;} lightingInfo computeHemisphericLighting(vec3 viewDirectionW,vec3 vNormal,vec4 lightData,vec3 diffuseColor,vec3 specularColor,vec3 groundColor,float glossiness) {lightingInfo result;float ndl=dot(vNormal,lightData.xyz)*0.5+0.5; #ifdef NDOTL result.ndl=ndl; #endif result.diffuse=mix(groundColor,diffuseColor,ndl); #ifdef SPECULARTERM vec3 angleW=normalize(viewDirectionW+lightData.xyz);float specComp=max(0.,dot(vNormal,angleW));specComp=pow(specComp,max(1.,glossiness));result.specular=specComp*specularColor; #endif return result;} #define inline vec3 computeProjectionTextureDiffuseLighting(sampler2D projectionLightSampler,mat4 textureProjectionMatrix){vec4 strq=textureProjectionMatrix*vec4(vPositionW,1.0);strq/=strq.w;vec3 textureColor=texture2D(projectionLightSampler,strq.xy).rgb;return textureColor;}`; je.IncludesShadersStore[Pue] = Iue; const Due = "shadowsFragmentFunctions", Oue = `#ifdef SHADOWS #if defined(WEBGL2) || defined(WEBGPU) || defined(NATIVE) #define TEXTUREFUNC(s,c,l) texture2DLodEXT(s,c,l) #else #define TEXTUREFUNC(s,c,b) texture2D(s,c,b) #endif #ifndef SHADOWFLOAT float unpack(vec4 color) {const vec4 bit_shift=vec4(1.0/(255.0*255.0*255.0),1.0/(255.0*255.0),1.0/255.0,1.0);return dot(color,bit_shift);} #endif float computeFallOff(float value,vec2 clipSpace,float frustumEdgeFalloff) {float mask=smoothstep(1.0-frustumEdgeFalloff,1.00000012,clamp(dot(clipSpace,clipSpace),0.,1.));return mix(value,1.0,mask);} #define inline float computeShadowCube(vec3 worldPos,vec3 lightPosition,samplerCube shadowSampler,float darkness,vec2 depthValues) {vec3 directionToLight=worldPos-lightPosition;float depth=length(directionToLight);depth=(depth+depthValues.x)/(depthValues.y);depth=clamp(depth,0.,1.0);directionToLight=normalize(directionToLight);directionToLight.y=-directionToLight.y; #ifndef SHADOWFLOAT float shadow=unpack(textureCube(shadowSampler,directionToLight)); #else float shadow=textureCube(shadowSampler,directionToLight).x; #endif return depth>shadow ? darkness : 1.0;} #define inline float computeShadowWithPoissonSamplingCube(vec3 worldPos,vec3 lightPosition,samplerCube shadowSampler,float mapSize,float darkness,vec2 depthValues) {vec3 directionToLight=worldPos-lightPosition;float depth=length(directionToLight);depth=(depth+depthValues.x)/(depthValues.y);depth=clamp(depth,0.,1.0);directionToLight=normalize(directionToLight);directionToLight.y=-directionToLight.y;float visibility=1.;vec3 poissonDisk[4];poissonDisk[0]=vec3(-1.0,1.0,-1.0);poissonDisk[1]=vec3(1.0,-1.0,-1.0);poissonDisk[2]=vec3(-1.0,-1.0,-1.0);poissonDisk[3]=vec3(1.0,-1.0,1.0); #ifndef SHADOWFLOAT if (unpack(textureCube(shadowSampler,directionToLight+poissonDisk[0]*mapSize))shadow ? computeFallOff(darkness,clipSpace.xy,frustumEdgeFalloff) : 1.;} #endif #define inline float computeShadow(vec4 vPositionFromLight,float depthMetric,sampler2D shadowSampler,float darkness,float frustumEdgeFalloff) {vec3 clipSpace=vPositionFromLight.xyz/vPositionFromLight.w;vec2 uv=0.5*clipSpace.xy+vec2(0.5);if (uv.x<0. || uv.x>1.0 || uv.y<0. || uv.y>1.0) {return 1.0;} else {float shadowPixelDepth=clamp(depthMetric,0.,1.0); #ifndef SHADOWFLOAT float shadow=unpack(TEXTUREFUNC(shadowSampler,uv,0.)); #else float shadow=TEXTUREFUNC(shadowSampler,uv,0.).x; #endif return shadowPixelDepth>shadow ? computeFallOff(darkness,clipSpace.xy,frustumEdgeFalloff) : 1.;}} #define inline float computeShadowWithPoissonSampling(vec4 vPositionFromLight,float depthMetric,sampler2D shadowSampler,float mapSize,float darkness,float frustumEdgeFalloff) {vec3 clipSpace=vPositionFromLight.xyz/vPositionFromLight.w;vec2 uv=0.5*clipSpace.xy+vec2(0.5);if (uv.x<0. || uv.x>1.0 || uv.y<0. || uv.y>1.0) {return 1.0;} else {float shadowPixelDepth=clamp(depthMetric,0.,1.0);float visibility=1.;vec2 poissonDisk[4];poissonDisk[0]=vec2(-0.94201624,-0.39906216);poissonDisk[1]=vec2(0.94558609,-0.76890725);poissonDisk[2]=vec2(-0.094184101,-0.92938870);poissonDisk[3]=vec2(0.34495938,0.29387760); #ifndef SHADOWFLOAT if (unpack(TEXTUREFUNC(shadowSampler,uv+poissonDisk[0]*mapSize,0.))1.0 || uv.y<0. || uv.y>1.0) {return 1.0;} else {float shadowPixelDepth=clamp(depthMetric,0.,1.0); #ifndef SHADOWFLOAT float shadowMapSample=unpack(TEXTUREFUNC(shadowSampler,uv,0.)); #else float shadowMapSample=TEXTUREFUNC(shadowSampler,uv,0.).x; #endif float esm=1.0-clamp(exp(min(87.,depthScale*shadowPixelDepth))*shadowMapSample,0.,1.-darkness);return computeFallOff(esm,clipSpace.xy,frustumEdgeFalloff);}} #define inline float computeShadowWithCloseESM(vec4 vPositionFromLight,float depthMetric,sampler2D shadowSampler,float darkness,float depthScale,float frustumEdgeFalloff) {vec3 clipSpace=vPositionFromLight.xyz/vPositionFromLight.w;vec2 uv=0.5*clipSpace.xy+vec2(0.5);if (uv.x<0. || uv.x>1.0 || uv.y<0. || uv.y>1.0) {return 1.0;} else {float shadowPixelDepth=clamp(depthMetric,0.,1.0); #ifndef SHADOWFLOAT float shadowMapSample=unpack(TEXTUREFUNC(shadowSampler,uv,0.)); #else float shadowMapSample=TEXTUREFUNC(shadowSampler,uv,0.).x; #endif float esm=clamp(exp(min(87.,-depthScale*(shadowPixelDepth-shadowMapSample))),darkness,1.);return computeFallOff(esm,clipSpace.xy,frustumEdgeFalloff);}} #ifdef IS_NDC_HALF_ZRANGE #define ZINCLIP clipSpace.z #else #define ZINCLIP uvDepth.z #endif #if defined(WEBGL2) || defined(WEBGPU) || defined(NATIVE) #define GREATEST_LESS_THAN_ONE 0.99999994 /* disable_uniformity_analysis */ #define inline float computeShadowWithCSMPCF1(float layer,vec4 vPositionFromLight,float depthMetric,highp sampler2DArrayShadow shadowSampler,float darkness,float frustumEdgeFalloff) {vec3 clipSpace=vPositionFromLight.xyz/vPositionFromLight.w;vec3 uvDepth=vec3(0.5*clipSpace.xyz+vec3(0.5));uvDepth.z=clamp(ZINCLIP,0.,GREATEST_LESS_THAN_ONE);vec4 uvDepthLayer=vec4(uvDepth.x,uvDepth.y,layer,uvDepth.z);float shadow=texture2D(shadowSampler,uvDepthLayer);shadow=mix(darkness,1.,shadow);return computeFallOff(shadow,clipSpace.xy,frustumEdgeFalloff);} #define inline float computeShadowWithCSMPCF3(float layer,vec4 vPositionFromLight,float depthMetric,highp sampler2DArrayShadow shadowSampler,vec2 shadowMapSizeAndInverse,float darkness,float frustumEdgeFalloff) {vec3 clipSpace=vPositionFromLight.xyz/vPositionFromLight.w;vec3 uvDepth=vec3(0.5*clipSpace.xyz+vec3(0.5));uvDepth.z=clamp(ZINCLIP,0.,GREATEST_LESS_THAN_ONE);vec2 uv=uvDepth.xy*shadowMapSizeAndInverse.x; uv+=0.5; vec2 st=fract(uv); vec2 base_uv=floor(uv)-0.5; base_uv*=shadowMapSizeAndInverse.y; vec2 uvw0=3.-2.*st;vec2 uvw1=1.+2.*st;vec2 u=vec2((2.-st.x)/uvw0.x-1.,st.x/uvw1.x+1.)*shadowMapSizeAndInverse.y;vec2 v=vec2((2.-st.y)/uvw0.y-1.,st.y/uvw1.y+1.)*shadowMapSizeAndInverse.y;float shadow=0.;shadow+=uvw0.x*uvw0.y*texture2D(shadowSampler,vec4(base_uv.xy+vec2(u[0],v[0]),layer,uvDepth.z));shadow+=uvw1.x*uvw0.y*texture2D(shadowSampler,vec4(base_uv.xy+vec2(u[1],v[0]),layer,uvDepth.z));shadow+=uvw0.x*uvw1.y*texture2D(shadowSampler,vec4(base_uv.xy+vec2(u[0],v[1]),layer,uvDepth.z));shadow+=uvw1.x*uvw1.y*texture2D(shadowSampler,vec4(base_uv.xy+vec2(u[1],v[1]),layer,uvDepth.z));shadow=shadow/16.;shadow=mix(darkness,1.,shadow);return computeFallOff(shadow,clipSpace.xy,frustumEdgeFalloff);} #define inline float computeShadowWithCSMPCF5(float layer,vec4 vPositionFromLight,float depthMetric,highp sampler2DArrayShadow shadowSampler,vec2 shadowMapSizeAndInverse,float darkness,float frustumEdgeFalloff) {vec3 clipSpace=vPositionFromLight.xyz/vPositionFromLight.w;vec3 uvDepth=vec3(0.5*clipSpace.xyz+vec3(0.5));uvDepth.z=clamp(ZINCLIP,0.,GREATEST_LESS_THAN_ONE);vec2 uv=uvDepth.xy*shadowMapSizeAndInverse.x; uv+=0.5; vec2 st=fract(uv); vec2 base_uv=floor(uv)-0.5; base_uv*=shadowMapSizeAndInverse.y; vec2 uvw0=4.-3.*st;vec2 uvw1=vec2(7.);vec2 uvw2=1.+3.*st;vec3 u=vec3((3.-2.*st.x)/uvw0.x-2.,(3.+st.x)/uvw1.x,st.x/uvw2.x+2.)*shadowMapSizeAndInverse.y;vec3 v=vec3((3.-2.*st.y)/uvw0.y-2.,(3.+st.y)/uvw1.y,st.y/uvw2.y+2.)*shadowMapSizeAndInverse.y;float shadow=0.;shadow+=uvw0.x*uvw0.y*texture2D(shadowSampler,vec4(base_uv.xy+vec2(u[0],v[0]),layer,uvDepth.z));shadow+=uvw1.x*uvw0.y*texture2D(shadowSampler,vec4(base_uv.xy+vec2(u[1],v[0]),layer,uvDepth.z));shadow+=uvw2.x*uvw0.y*texture2D(shadowSampler,vec4(base_uv.xy+vec2(u[2],v[0]),layer,uvDepth.z));shadow+=uvw0.x*uvw1.y*texture2D(shadowSampler,vec4(base_uv.xy+vec2(u[0],v[1]),layer,uvDepth.z));shadow+=uvw1.x*uvw1.y*texture2D(shadowSampler,vec4(base_uv.xy+vec2(u[1],v[1]),layer,uvDepth.z));shadow+=uvw2.x*uvw1.y*texture2D(shadowSampler,vec4(base_uv.xy+vec2(u[2],v[1]),layer,uvDepth.z));shadow+=uvw0.x*uvw2.y*texture2D(shadowSampler,vec4(base_uv.xy+vec2(u[0],v[2]),layer,uvDepth.z));shadow+=uvw1.x*uvw2.y*texture2D(shadowSampler,vec4(base_uv.xy+vec2(u[1],v[2]),layer,uvDepth.z));shadow+=uvw2.x*uvw2.y*texture2D(shadowSampler,vec4(base_uv.xy+vec2(u[2],v[2]),layer,uvDepth.z));shadow=shadow/144.;shadow=mix(darkness,1.,shadow);return computeFallOff(shadow,clipSpace.xy,frustumEdgeFalloff);} #define inline float computeShadowWithPCF1(vec4 vPositionFromLight,float depthMetric,highp sampler2DShadow shadowSampler,float darkness,float frustumEdgeFalloff) {if (depthMetric>1.0 || depthMetric<0.0) {return 1.0;} else {vec3 clipSpace=vPositionFromLight.xyz/vPositionFromLight.w;vec3 uvDepth=vec3(0.5*clipSpace.xyz+vec3(0.5));uvDepth.z=ZINCLIP;float shadow=TEXTUREFUNC(shadowSampler,uvDepth,0.);shadow=mix(darkness,1.,shadow);return computeFallOff(shadow,clipSpace.xy,frustumEdgeFalloff);}} #define inline float computeShadowWithPCF3(vec4 vPositionFromLight,float depthMetric,highp sampler2DShadow shadowSampler,vec2 shadowMapSizeAndInverse,float darkness,float frustumEdgeFalloff) {if (depthMetric>1.0 || depthMetric<0.0) {return 1.0;} else {vec3 clipSpace=vPositionFromLight.xyz/vPositionFromLight.w;vec3 uvDepth=vec3(0.5*clipSpace.xyz+vec3(0.5));uvDepth.z=ZINCLIP;vec2 uv=uvDepth.xy*shadowMapSizeAndInverse.x; uv+=0.5; vec2 st=fract(uv); vec2 base_uv=floor(uv)-0.5; base_uv*=shadowMapSizeAndInverse.y; vec2 uvw0=3.-2.*st;vec2 uvw1=1.+2.*st;vec2 u=vec2((2.-st.x)/uvw0.x-1.,st.x/uvw1.x+1.)*shadowMapSizeAndInverse.y;vec2 v=vec2((2.-st.y)/uvw0.y-1.,st.y/uvw1.y+1.)*shadowMapSizeAndInverse.y;float shadow=0.;shadow+=uvw0.x*uvw0.y*TEXTUREFUNC(shadowSampler,vec3(base_uv.xy+vec2(u[0],v[0]),uvDepth.z),0.);shadow+=uvw1.x*uvw0.y*TEXTUREFUNC(shadowSampler,vec3(base_uv.xy+vec2(u[1],v[0]),uvDepth.z),0.);shadow+=uvw0.x*uvw1.y*TEXTUREFUNC(shadowSampler,vec3(base_uv.xy+vec2(u[0],v[1]),uvDepth.z),0.);shadow+=uvw1.x*uvw1.y*TEXTUREFUNC(shadowSampler,vec3(base_uv.xy+vec2(u[1],v[1]),uvDepth.z),0.);shadow=shadow/16.;shadow=mix(darkness,1.,shadow);return computeFallOff(shadow,clipSpace.xy,frustumEdgeFalloff);}} #define inline float computeShadowWithPCF5(vec4 vPositionFromLight,float depthMetric,highp sampler2DShadow shadowSampler,vec2 shadowMapSizeAndInverse,float darkness,float frustumEdgeFalloff) {if (depthMetric>1.0 || depthMetric<0.0) {return 1.0;} else {vec3 clipSpace=vPositionFromLight.xyz/vPositionFromLight.w;vec3 uvDepth=vec3(0.5*clipSpace.xyz+vec3(0.5));uvDepth.z=ZINCLIP;vec2 uv=uvDepth.xy*shadowMapSizeAndInverse.x; uv+=0.5; vec2 st=fract(uv); vec2 base_uv=floor(uv)-0.5; base_uv*=shadowMapSizeAndInverse.y; vec2 uvw0=4.-3.*st;vec2 uvw1=vec2(7.);vec2 uvw2=1.+3.*st;vec3 u=vec3((3.-2.*st.x)/uvw0.x-2.,(3.+st.x)/uvw1.x,st.x/uvw2.x+2.)*shadowMapSizeAndInverse.y;vec3 v=vec3((3.-2.*st.y)/uvw0.y-2.,(3.+st.y)/uvw1.y,st.y/uvw2.y+2.)*shadowMapSizeAndInverse.y;float shadow=0.;shadow+=uvw0.x*uvw0.y*TEXTUREFUNC(shadowSampler,vec3(base_uv.xy+vec2(u[0],v[0]),uvDepth.z),0.);shadow+=uvw1.x*uvw0.y*TEXTUREFUNC(shadowSampler,vec3(base_uv.xy+vec2(u[1],v[0]),uvDepth.z),0.);shadow+=uvw2.x*uvw0.y*TEXTUREFUNC(shadowSampler,vec3(base_uv.xy+vec2(u[2],v[0]),uvDepth.z),0.);shadow+=uvw0.x*uvw1.y*TEXTUREFUNC(shadowSampler,vec3(base_uv.xy+vec2(u[0],v[1]),uvDepth.z),0.);shadow+=uvw1.x*uvw1.y*TEXTUREFUNC(shadowSampler,vec3(base_uv.xy+vec2(u[1],v[1]),uvDepth.z),0.);shadow+=uvw2.x*uvw1.y*TEXTUREFUNC(shadowSampler,vec3(base_uv.xy+vec2(u[2],v[1]),uvDepth.z),0.);shadow+=uvw0.x*uvw2.y*TEXTUREFUNC(shadowSampler,vec3(base_uv.xy+vec2(u[0],v[2]),uvDepth.z),0.);shadow+=uvw1.x*uvw2.y*TEXTUREFUNC(shadowSampler,vec3(base_uv.xy+vec2(u[1],v[2]),uvDepth.z),0.);shadow+=uvw2.x*uvw2.y*TEXTUREFUNC(shadowSampler,vec3(base_uv.xy+vec2(u[2],v[2]),uvDepth.z),0.);shadow=shadow/144.;shadow=mix(darkness,1.,shadow);return computeFallOff(shadow,clipSpace.xy,frustumEdgeFalloff);}} const vec3 PoissonSamplers32[64]=vec3[64]( vec3(0.06407013,0.05409927,0.), vec3(0.7366577,0.5789394,0.), vec3(-0.6270542,-0.5320278,0.), vec3(-0.4096107,0.8411095,0.), vec3(0.6849564,-0.4990818,0.), vec3(-0.874181,-0.04579735,0.), vec3(0.9989998,0.0009880066,0.), vec3(-0.004920578,-0.9151649,0.), vec3(0.1805763,0.9747483,0.), vec3(-0.2138451,0.2635818,0.), vec3(0.109845,0.3884785,0.), vec3(0.06876755,-0.3581074,0.), vec3(0.374073,-0.7661266,0.), vec3(0.3079132,-0.1216763,0.), vec3(-0.3794335,-0.8271583,0.), vec3(-0.203878,-0.07715034,0.), vec3(0.5912697,0.1469799,0.), vec3(-0.88069,0.3031784,0.), vec3(0.5040108,0.8283722,0.), vec3(-0.5844124,0.5494877,0.), vec3(0.6017799,-0.1726654,0.), vec3(-0.5554981,0.1559997,0.), vec3(-0.3016369,-0.3900928,0.), vec3(-0.5550632,-0.1723762,0.), vec3(0.925029,0.2995041,0.), vec3(-0.2473137,0.5538505,0.), vec3(0.9183037,-0.2862392,0.), vec3(0.2469421,0.6718712,0.), vec3(0.3916397,-0.4328209,0.), vec3(-0.03576927,-0.6220032,0.), vec3(-0.04661255,0.7995201,0.), vec3(0.4402924,0.3640312,0.), vec3(0.), vec3(0.), vec3(0.), vec3(0.), vec3(0.), vec3(0.), vec3(0.), vec3(0.), vec3(0.), vec3(0.), vec3(0.), vec3(0.), vec3(0.), vec3(0.), vec3(0.), vec3(0.), vec3(0.), vec3(0.), vec3(0.), vec3(0.), vec3(0.), vec3(0.), vec3(0.), vec3(0.), vec3(0.), vec3(0.), vec3(0.), vec3(0.), vec3(0.), vec3(0.), vec3(0.), vec3(0.) );const vec3 PoissonSamplers64[64]=vec3[64]( vec3(-0.613392,0.617481,0.), vec3(0.170019,-0.040254,0.), vec3(-0.299417,0.791925,0.), vec3(0.645680,0.493210,0.), vec3(-0.651784,0.717887,0.), vec3(0.421003,0.027070,0.), vec3(-0.817194,-0.271096,0.), vec3(-0.705374,-0.668203,0.), vec3(0.977050,-0.108615,0.), vec3(0.063326,0.142369,0.), vec3(0.203528,0.214331,0.), vec3(-0.667531,0.326090,0.), vec3(-0.098422,-0.295755,0.), vec3(-0.885922,0.215369,0.), vec3(0.566637,0.605213,0.), vec3(0.039766,-0.396100,0.), vec3(0.751946,0.453352,0.), vec3(0.078707,-0.715323,0.), vec3(-0.075838,-0.529344,0.), vec3(0.724479,-0.580798,0.), vec3(0.222999,-0.215125,0.), vec3(-0.467574,-0.405438,0.), vec3(-0.248268,-0.814753,0.), vec3(0.354411,-0.887570,0.), vec3(0.175817,0.382366,0.), vec3(0.487472,-0.063082,0.), vec3(-0.084078,0.898312,0.), vec3(0.488876,-0.783441,0.), vec3(0.470016,0.217933,0.), vec3(-0.696890,-0.549791,0.), vec3(-0.149693,0.605762,0.), vec3(0.034211,0.979980,0.), vec3(0.503098,-0.308878,0.), vec3(-0.016205,-0.872921,0.), vec3(0.385784,-0.393902,0.), vec3(-0.146886,-0.859249,0.), vec3(0.643361,0.164098,0.), vec3(0.634388,-0.049471,0.), vec3(-0.688894,0.007843,0.), vec3(0.464034,-0.188818,0.), vec3(-0.440840,0.137486,0.), vec3(0.364483,0.511704,0.), vec3(0.034028,0.325968,0.), vec3(0.099094,-0.308023,0.), vec3(0.693960,-0.366253,0.), vec3(0.678884,-0.204688,0.), vec3(0.001801,0.780328,0.), vec3(0.145177,-0.898984,0.), vec3(0.062655,-0.611866,0.), vec3(0.315226,-0.604297,0.), vec3(-0.780145,0.486251,0.), vec3(-0.371868,0.882138,0.), vec3(0.200476,0.494430,0.), vec3(-0.494552,-0.711051,0.), vec3(0.612476,0.705252,0.), vec3(-0.578845,-0.768792,0.), vec3(-0.772454,-0.090976,0.), vec3(0.504440,0.372295,0.), vec3(0.155736,0.065157,0.), vec3(0.391522,0.849605,0.), vec3(-0.620106,-0.328104,0.), vec3(0.789239,-0.419965,0.), vec3(-0.545396,0.538133,0.), vec3(-0.178564,-0.596057,0.) ); #define inline float computeShadowWithCSMPCSS(float layer,vec4 vPositionFromLight,float depthMetric,highp sampler2DArray depthSampler,highp sampler2DArrayShadow shadowSampler,float shadowMapSizeInverse,float lightSizeUV,float darkness,float frustumEdgeFalloff,int searchTapCount,int pcfTapCount,vec3[64] poissonSamplers,vec2 lightSizeUVCorrection,float depthCorrection,float penumbraDarkness) {vec3 clipSpace=vPositionFromLight.xyz/vPositionFromLight.w;vec3 uvDepth=vec3(0.5*clipSpace.xyz+vec3(0.5));uvDepth.z=clamp(ZINCLIP,0.,GREATEST_LESS_THAN_ONE);vec4 uvDepthLayer=vec4(uvDepth.x,uvDepth.y,layer,uvDepth.z);float blockerDepth=0.0;float sumBlockerDepth=0.0;float numBlocker=0.0;for (int i=0; i1.0 || depthMetric<0.0) {return 1.0;} else {vec3 clipSpace=vPositionFromLight.xyz/vPositionFromLight.w;vec3 uvDepth=vec3(0.5*clipSpace.xyz+vec3(0.5));uvDepth.z=ZINCLIP;float blockerDepth=0.0;float sumBlockerDepth=0.0;float numBlocker=0.0;for (int i=0; i(_DEFINENAME_,BUMP,_VARYINGNAME_,Bump,_SAMPLERNAME_,bump) #endif #if defined(DETAIL) #include(_DEFINENAME_,DETAIL,_VARYINGNAME_,Detail,_SAMPLERNAME_,detail) #endif #if defined(BUMP) && defined(PARALLAX) const float minSamples=4.;const float maxSamples=15.;const int iMaxSamples=15;vec2 parallaxOcclusion(vec3 vViewDirCoT,vec3 vNormalCoT,vec2 texCoord,float parallaxScale) {float parallaxLimit=length(vViewDirCoT.xy)/vViewDirCoT.z;parallaxLimit*=parallaxScale;vec2 vOffsetDir=normalize(vViewDirCoT.xy);vec2 vMaxOffset=vOffsetDir*parallaxLimit;float numSamples=maxSamples+(dot(vViewDirCoT,vNormalCoT)*(minSamples-maxSamples));float stepSize=1.0/numSamples;float currRayHeight=1.0;vec2 vCurrOffset=vec2(0,0);vec2 vLastOffset=vec2(0,0);float lastSampledHeight=1.0;float currSampledHeight=1.0;bool keepWorking=true;for (int i=0; icurrRayHeight) {float delta1=currSampledHeight-currRayHeight;float delta2=(currRayHeight+stepSize)-lastSampledHeight;float ratio=delta1/(delta1+delta2);vCurrOffset=(ratio)* vLastOffset+(1.0-ratio)*vCurrOffset;keepWorking=false;} else {currRayHeight-=stepSize;vLastOffset=vCurrOffset; #ifdef PARALLAX_RHS vCurrOffset-=stepSize*vMaxOffset; #else vCurrOffset+=stepSize*vMaxOffset; #endif lastSampledHeight=currSampledHeight;}} return vCurrOffset;} vec2 parallaxOffset(vec3 viewDir,float heightScale) {float height=texture2D(bumpSampler,vBumpUV).w;vec2 texCoordOffset=heightScale*viewDir.xy*height; #ifdef PARALLAX_RHS return texCoordOffset; #else return -texCoordOffset; #endif } #endif `; je.IncludesShadersStore[Wue] = jue; const Xue = "clipPlaneFragmentDeclaration", Yue = `#ifdef CLIPPLANE varying float fClipDistance; #endif #ifdef CLIPPLANE2 varying float fClipDistance2; #endif #ifdef CLIPPLANE3 varying float fClipDistance3; #endif #ifdef CLIPPLANE4 varying float fClipDistance4; #endif #ifdef CLIPPLANE5 varying float fClipDistance5; #endif #ifdef CLIPPLANE6 varying float fClipDistance6; #endif `; je.IncludesShadersStore[Xue] = Yue; const Que = "logDepthDeclaration", $ue = `#ifdef LOGARITHMICDEPTH uniform float logarithmicDepthConstant;varying float vFragmentDepth; #endif `; je.IncludesShadersStore[Que] = $ue; const Zue = "fogFragmentDeclaration", que = `#ifdef FOG #define FOGMODE_NONE 0. #define FOGMODE_EXP 1. #define FOGMODE_EXP2 2. #define FOGMODE_LINEAR 3. #define E 2.71828 uniform vec4 vFogInfos;uniform vec3 vFogColor;varying vec3 vFogDistance;float CalcFogFactor() {float fogCoeff=1.0;float fogStart=vFogInfos.y;float fogEnd=vFogInfos.z;float fogDensity=vFogInfos.w;float fogDistance=length(vFogDistance);if (FOGMODE_LINEAR==vFogInfos.x) {fogCoeff=(fogEnd-fogDistance)/(fogEnd-fogStart);} else if (FOGMODE_EXP==vFogInfos.x) {fogCoeff=1.0/pow(E,fogDistance*fogDensity);} else if (FOGMODE_EXP2==vFogInfos.x) {fogCoeff=1.0/pow(E,fogDistance*fogDistance*fogDensity*fogDensity);} return clamp(fogCoeff,0.0,1.0);} #endif `; je.IncludesShadersStore[Zue] = que; const Jue = "clipPlaneFragment", ehe = `#if defined(CLIPPLANE) || defined(CLIPPLANE2) || defined(CLIPPLANE3) || defined(CLIPPLANE4) || defined(CLIPPLANE5) || defined(CLIPPLANE6) if (false) {} #endif #ifdef CLIPPLANE else if (fClipDistance>0.0) {discard;} #endif #ifdef CLIPPLANE2 else if (fClipDistance2>0.0) {discard;} #endif #ifdef CLIPPLANE3 else if (fClipDistance3>0.0) {discard;} #endif #ifdef CLIPPLANE4 else if (fClipDistance4>0.0) {discard;} #endif #ifdef CLIPPLANE5 else if (fClipDistance5>0.0) {discard;} #endif #ifdef CLIPPLANE6 else if (fClipDistance6>0.0) {discard;} #endif `; je.IncludesShadersStore[Jue] = ehe; const the = "bumpFragment", ihe = `vec2 uvOffset=vec2(0.0,0.0); #if defined(BUMP) || defined(PARALLAX) || defined(DETAIL) #ifdef NORMALXYSCALE float normalScale=1.0; #elif defined(BUMP) float normalScale=vBumpInfos.y; #else float normalScale=1.0; #endif #if defined(TANGENT) && defined(NORMAL) mat3 TBN=vTBN; #elif defined(BUMP) vec2 TBNUV=gl_FrontFacing ? vBumpUV : -vBumpUV;mat3 TBN=cotangent_frame(normalW*normalScale,vPositionW,TBNUV,vTangentSpaceParams); #else vec2 TBNUV=gl_FrontFacing ? vDetailUV : -vDetailUV;mat3 TBN=cotangent_frame(normalW*normalScale,vPositionW,TBNUV,vec2(1.,1.)); #endif #elif defined(ANISOTROPIC) #if defined(TANGENT) && defined(NORMAL) mat3 TBN=vTBN; #else vec2 TBNUV=gl_FrontFacing ? vMainUV1 : -vMainUV1;mat3 TBN=cotangent_frame(normalW,vPositionW,TBNUV,vec2(1.,1.)); #endif #endif #ifdef PARALLAX mat3 invTBN=transposeMat3(TBN); #ifdef PARALLAXOCCLUSION uvOffset=parallaxOcclusion(invTBN*-viewDirectionW,invTBN*normalW,vBumpUV,vBumpInfos.z); #else uvOffset=parallaxOffset(invTBN*viewDirectionW,vBumpInfos.z); #endif #endif #ifdef DETAIL vec4 detailColor=texture2D(detailSampler,vDetailUV+uvOffset);vec2 detailNormalRG=detailColor.wy*2.0-1.0;float detailNormalB=sqrt(1.-saturate(dot(detailNormalRG,detailNormalRG)));vec3 detailNormal=vec3(detailNormalRG,detailNormalB); #endif #ifdef BUMP #ifdef OBJECTSPACE_NORMALMAP #define CUSTOM_FRAGMENT_BUMP_FRAGMENT normalW=normalize(texture2D(bumpSampler,vBumpUV).xyz *2.0-1.0);normalW=normalize(mat3(normalMatrix)*normalW); #elif !defined(DETAIL) normalW=perturbNormal(TBN,texture2D(bumpSampler,vBumpUV+uvOffset).xyz,vBumpInfos.y); #else vec3 bumpNormal=texture2D(bumpSampler,vBumpUV+uvOffset).xyz*2.0-1.0; #if DETAIL_NORMALBLENDMETHOD==0 detailNormal.xy*=vDetailInfos.z;vec3 blendedNormal=normalize(vec3(bumpNormal.xy+detailNormal.xy,bumpNormal.z*detailNormal.z)); #elif DETAIL_NORMALBLENDMETHOD==1 detailNormal.xy*=vDetailInfos.z;bumpNormal+=vec3(0.0,0.0,1.0);detailNormal*=vec3(-1.0,-1.0,1.0);vec3 blendedNormal=bumpNormal*dot(bumpNormal,detailNormal)/bumpNormal.z-detailNormal; #endif normalW=perturbNormalBase(TBN,blendedNormal,vBumpInfos.y); #endif #elif defined(DETAIL) detailNormal.xy*=vDetailInfos.z;normalW=perturbNormalBase(TBN,detailNormal,vDetailInfos.z); #endif `; je.IncludesShadersStore[the] = ihe; const rhe = "decalFragment", she = `#ifdef DECAL #ifdef GAMMADECAL decalColor.rgb=toLinearSpace(decalColor.rgb); #endif #ifdef DECAL_SMOOTHALPHA decalColor.a*=decalColor.a; #endif surfaceAlbedo.rgb=mix(surfaceAlbedo.rgb,decalColor.rgb,decalColor.a); #endif `; je.IncludesShadersStore[rhe] = she; const nhe = "depthPrePass", ahe = `#ifdef DEPTHPREPASS gl_FragColor=vec4(0.,0.,0.,1.0);return; #endif `; je.IncludesShadersStore[nhe] = ahe; const ohe = "lightFragment", lhe = `#ifdef LIGHT{X} #if defined(SHADOWONLY) || defined(LIGHTMAP) && defined(LIGHTMAPEXCLUDED{X}) && defined(LIGHTMAPNOSPECULAR{X}) #else #ifdef PBR #ifdef SPOTLIGHT{X} preInfo=computePointAndSpotPreLightingInfo(light{X}.vLightData,viewDirectionW,normalW); #elif defined(POINTLIGHT{X}) preInfo=computePointAndSpotPreLightingInfo(light{X}.vLightData,viewDirectionW,normalW); #elif defined(HEMILIGHT{X}) preInfo=computeHemisphericPreLightingInfo(light{X}.vLightData,viewDirectionW,normalW); #elif defined(DIRLIGHT{X}) preInfo=computeDirectionalPreLightingInfo(light{X}.vLightData,viewDirectionW,normalW); #endif preInfo.NdotV=NdotV; #ifdef SPOTLIGHT{X} #ifdef LIGHT_FALLOFF_GLTF{X} preInfo.attenuation=computeDistanceLightFalloff_GLTF(preInfo.lightDistanceSquared,light{X}.vLightFalloff.y);preInfo.attenuation*=computeDirectionalLightFalloff_GLTF(light{X}.vLightDirection.xyz,preInfo.L,light{X}.vLightFalloff.z,light{X}.vLightFalloff.w); #elif defined(LIGHT_FALLOFF_PHYSICAL{X}) preInfo.attenuation=computeDistanceLightFalloff_Physical(preInfo.lightDistanceSquared);preInfo.attenuation*=computeDirectionalLightFalloff_Physical(light{X}.vLightDirection.xyz,preInfo.L,light{X}.vLightDirection.w); #elif defined(LIGHT_FALLOFF_STANDARD{X}) preInfo.attenuation=computeDistanceLightFalloff_Standard(preInfo.lightOffset,light{X}.vLightFalloff.x);preInfo.attenuation*=computeDirectionalLightFalloff_Standard(light{X}.vLightDirection.xyz,preInfo.L,light{X}.vLightDirection.w,light{X}.vLightData.w); #else preInfo.attenuation=computeDistanceLightFalloff(preInfo.lightOffset,preInfo.lightDistanceSquared,light{X}.vLightFalloff.x,light{X}.vLightFalloff.y);preInfo.attenuation*=computeDirectionalLightFalloff(light{X}.vLightDirection.xyz,preInfo.L,light{X}.vLightDirection.w,light{X}.vLightData.w,light{X}.vLightFalloff.z,light{X}.vLightFalloff.w); #endif #elif defined(POINTLIGHT{X}) #ifdef LIGHT_FALLOFF_GLTF{X} preInfo.attenuation=computeDistanceLightFalloff_GLTF(preInfo.lightDistanceSquared,light{X}.vLightFalloff.y); #elif defined(LIGHT_FALLOFF_PHYSICAL{X}) preInfo.attenuation=computeDistanceLightFalloff_Physical(preInfo.lightDistanceSquared); #elif defined(LIGHT_FALLOFF_STANDARD{X}) preInfo.attenuation=computeDistanceLightFalloff_Standard(preInfo.lightOffset,light{X}.vLightFalloff.x); #else preInfo.attenuation=computeDistanceLightFalloff(preInfo.lightOffset,preInfo.lightDistanceSquared,light{X}.vLightFalloff.x,light{X}.vLightFalloff.y); #endif #else preInfo.attenuation=1.0; #endif #ifdef HEMILIGHT{X} preInfo.roughness=roughness; #else preInfo.roughness=adjustRoughnessFromLightProperties(roughness,light{X}.vLightSpecular.a,preInfo.lightDistance); #endif #ifdef IRIDESCENCE preInfo.iridescenceIntensity=iridescenceIntensity; #endif #ifdef HEMILIGHT{X} info.diffuse=computeHemisphericDiffuseLighting(preInfo,light{X}.vLightDiffuse.rgb,light{X}.vLightGround); #elif defined(SS_TRANSLUCENCY) info.diffuse=computeDiffuseAndTransmittedLighting(preInfo,light{X}.vLightDiffuse.rgb,subSurfaceOut.transmittance); #else info.diffuse=computeDiffuseLighting(preInfo,light{X}.vLightDiffuse.rgb); #endif #ifdef SPECULARTERM #ifdef ANISOTROPIC info.specular=computeAnisotropicSpecularLighting(preInfo,viewDirectionW,normalW,anisotropicOut.anisotropicTangent,anisotropicOut.anisotropicBitangent,anisotropicOut.anisotropy,clearcoatOut.specularEnvironmentR0,specularEnvironmentR90,AARoughnessFactors.x,light{X}.vLightDiffuse.rgb); #else info.specular=computeSpecularLighting(preInfo,normalW,clearcoatOut.specularEnvironmentR0,specularEnvironmentR90,AARoughnessFactors.x,light{X}.vLightDiffuse.rgb); #endif #endif #ifdef SHEEN #ifdef SHEEN_LINKWITHALBEDO preInfo.roughness=sheenOut.sheenIntensity; #else #ifdef HEMILIGHT{X} preInfo.roughness=sheenOut.sheenRoughness; #else preInfo.roughness=adjustRoughnessFromLightProperties(sheenOut.sheenRoughness,light{X}.vLightSpecular.a,preInfo.lightDistance); #endif #endif info.sheen=computeSheenLighting(preInfo,normalW,sheenOut.sheenColor,specularEnvironmentR90,AARoughnessFactors.x,light{X}.vLightDiffuse.rgb); #endif #ifdef CLEARCOAT #ifdef HEMILIGHT{X} preInfo.roughness=clearcoatOut.clearCoatRoughness; #else preInfo.roughness=adjustRoughnessFromLightProperties(clearcoatOut.clearCoatRoughness,light{X}.vLightSpecular.a,preInfo.lightDistance); #endif info.clearCoat=computeClearCoatLighting(preInfo,clearcoatOut.clearCoatNormalW,clearcoatOut.clearCoatAARoughnessFactors.x,clearcoatOut.clearCoatIntensity,light{X}.vLightDiffuse.rgb); #ifdef CLEARCOAT_TINT absorption=computeClearCoatLightingAbsorption(clearcoatOut.clearCoatNdotVRefract,preInfo.L,clearcoatOut.clearCoatNormalW,clearcoatOut.clearCoatColor,clearcoatOut.clearCoatThickness,clearcoatOut.clearCoatIntensity);info.diffuse*=absorption; #ifdef SPECULARTERM info.specular*=absorption; #endif #endif info.diffuse*=info.clearCoat.w; #ifdef SPECULARTERM info.specular*=info.clearCoat.w; #endif #ifdef SHEEN info.sheen*=info.clearCoat.w; #endif #endif #else #ifdef SPOTLIGHT{X} info=computeSpotLighting(viewDirectionW,normalW,light{X}.vLightData,light{X}.vLightDirection,light{X}.vLightDiffuse.rgb,light{X}.vLightSpecular.rgb,light{X}.vLightDiffuse.a,glossiness); #elif defined(HEMILIGHT{X}) info=computeHemisphericLighting(viewDirectionW,normalW,light{X}.vLightData,light{X}.vLightDiffuse.rgb,light{X}.vLightSpecular.rgb,light{X}.vLightGround,glossiness); #elif defined(POINTLIGHT{X}) || defined(DIRLIGHT{X}) info=computeLighting(viewDirectionW,normalW,light{X}.vLightData,light{X}.vLightDiffuse.rgb,light{X}.vLightSpecular.rgb,light{X}.vLightDiffuse.a,glossiness); #endif #endif #ifdef PROJECTEDLIGHTTEXTURE{X} info.diffuse*=computeProjectionTextureDiffuseLighting(projectionLightSampler{X},textureProjectionMatrix{X}); #endif #endif #ifdef SHADOW{X} #ifdef SHADOWCSM{X} for (int i=0; i=0.) {index{X}=i;break;}} #ifdef SHADOWCSMUSESHADOWMAXZ{X} if (index{X}>=0) #endif { #if defined(SHADOWPCF{X}) #if defined(SHADOWLOWQUALITY{X}) shadow=computeShadowWithCSMPCF1(float(index{X}),vPositionFromLight{X}[index{X}],vDepthMetric{X}[index{X}],shadowSampler{X},light{X}.shadowsInfo.x,light{X}.shadowsInfo.w); #elif defined(SHADOWMEDIUMQUALITY{X}) shadow=computeShadowWithCSMPCF3(float(index{X}),vPositionFromLight{X}[index{X}],vDepthMetric{X}[index{X}],shadowSampler{X},light{X}.shadowsInfo.yz,light{X}.shadowsInfo.x,light{X}.shadowsInfo.w); #else shadow=computeShadowWithCSMPCF5(float(index{X}),vPositionFromLight{X}[index{X}],vDepthMetric{X}[index{X}],shadowSampler{X},light{X}.shadowsInfo.yz,light{X}.shadowsInfo.x,light{X}.shadowsInfo.w); #endif #elif defined(SHADOWPCSS{X}) #if defined(SHADOWLOWQUALITY{X}) shadow=computeShadowWithCSMPCSS16(float(index{X}),vPositionFromLight{X}[index{X}],vDepthMetric{X}[index{X}],depthSampler{X},shadowSampler{X},light{X}.shadowsInfo.y,light{X}.shadowsInfo.z,light{X}.shadowsInfo.x,light{X}.shadowsInfo.w,lightSizeUVCorrection{X}[index{X}],depthCorrection{X}[index{X}],penumbraDarkness{X}); #elif defined(SHADOWMEDIUMQUALITY{X}) shadow=computeShadowWithCSMPCSS32(float(index{X}),vPositionFromLight{X}[index{X}],vDepthMetric{X}[index{X}],depthSampler{X},shadowSampler{X},light{X}.shadowsInfo.y,light{X}.shadowsInfo.z,light{X}.shadowsInfo.x,light{X}.shadowsInfo.w,lightSizeUVCorrection{X}[index{X}],depthCorrection{X}[index{X}],penumbraDarkness{X}); #else shadow=computeShadowWithCSMPCSS64(float(index{X}),vPositionFromLight{X}[index{X}],vDepthMetric{X}[index{X}],depthSampler{X},shadowSampler{X},light{X}.shadowsInfo.y,light{X}.shadowsInfo.z,light{X}.shadowsInfo.x,light{X}.shadowsInfo.w,lightSizeUVCorrection{X}[index{X}],depthCorrection{X}[index{X}],penumbraDarkness{X}); #endif #else shadow=computeShadowCSM(float(index{X}),vPositionFromLight{X}[index{X}],vDepthMetric{X}[index{X}],shadowSampler{X},light{X}.shadowsInfo.x,light{X}.shadowsInfo.w); #endif #ifdef SHADOWCSMDEBUG{X} shadowDebug{X}=vec3(shadow)*vCascadeColorsMultiplier{X}[index{X}]; #endif #ifndef SHADOWCSMNOBLEND{X} float frustumLength=frustumLengths{X}[index{X}];float diffRatio=clamp(diff{X}/frustumLength,0.,1.)*cascadeBlendFactor{X};if (index{X}<(SHADOWCSMNUM_CASCADES{X}-1) && diffRatio<1.) {index{X}+=1;float nextShadow=0.; #if defined(SHADOWPCF{X}) #if defined(SHADOWLOWQUALITY{X}) nextShadow=computeShadowWithCSMPCF1(float(index{X}),vPositionFromLight{X}[index{X}],vDepthMetric{X}[index{X}],shadowSampler{X},light{X}.shadowsInfo.x,light{X}.shadowsInfo.w); #elif defined(SHADOWMEDIUMQUALITY{X}) nextShadow=computeShadowWithCSMPCF3(float(index{X}),vPositionFromLight{X}[index{X}],vDepthMetric{X}[index{X}],shadowSampler{X},light{X}.shadowsInfo.yz,light{X}.shadowsInfo.x,light{X}.shadowsInfo.w); #else nextShadow=computeShadowWithCSMPCF5(float(index{X}),vPositionFromLight{X}[index{X}],vDepthMetric{X}[index{X}],shadowSampler{X},light{X}.shadowsInfo.yz,light{X}.shadowsInfo.x,light{X}.shadowsInfo.w); #endif #elif defined(SHADOWPCSS{X}) #if defined(SHADOWLOWQUALITY{X}) nextShadow=computeShadowWithCSMPCSS16(float(index{X}),vPositionFromLight{X}[index{X}],vDepthMetric{X}[index{X}],depthSampler{X},shadowSampler{X},light{X}.shadowsInfo.y,light{X}.shadowsInfo.z,light{X}.shadowsInfo.x,light{X}.shadowsInfo.w,lightSizeUVCorrection{X}[index{X}],depthCorrection{X}[index{X}],penumbraDarkness{X}); #elif defined(SHADOWMEDIUMQUALITY{X}) nextShadow=computeShadowWithCSMPCSS32(float(index{X}),vPositionFromLight{X}[index{X}],vDepthMetric{X}[index{X}],depthSampler{X},shadowSampler{X},light{X}.shadowsInfo.y,light{X}.shadowsInfo.z,light{X}.shadowsInfo.x,light{X}.shadowsInfo.w,lightSizeUVCorrection{X}[index{X}],depthCorrection{X}[index{X}],penumbraDarkness{X}); #else nextShadow=computeShadowWithCSMPCSS64(float(index{X}),vPositionFromLight{X}[index{X}],vDepthMetric{X}[index{X}],depthSampler{X},shadowSampler{X},light{X}.shadowsInfo.y,light{X}.shadowsInfo.z,light{X}.shadowsInfo.x,light{X}.shadowsInfo.w,lightSizeUVCorrection{X}[index{X}],depthCorrection{X}[index{X}],penumbraDarkness{X}); #endif #else nextShadow=computeShadowCSM(float(index{X}),vPositionFromLight{X}[index{X}],vDepthMetric{X}[index{X}],shadowSampler{X},light{X}.shadowsInfo.x,light{X}.shadowsInfo.w); #endif shadow=mix(nextShadow,shadow,diffRatio); #ifdef SHADOWCSMDEBUG{X} shadowDebug{X}=mix(vec3(nextShadow)*vCascadeColorsMultiplier{X}[index{X}],shadowDebug{X},diffRatio); #endif } #endif } #elif defined(SHADOWCLOSEESM{X}) #if defined(SHADOWCUBE{X}) shadow=computeShadowWithCloseESMCube(vPositionW,light{X}.vLightData.xyz,shadowSampler{X},light{X}.shadowsInfo.x,light{X}.shadowsInfo.z,light{X}.depthValues); #else shadow=computeShadowWithCloseESM(vPositionFromLight{X},vDepthMetric{X},shadowSampler{X},light{X}.shadowsInfo.x,light{X}.shadowsInfo.z,light{X}.shadowsInfo.w); #endif #elif defined(SHADOWESM{X}) #if defined(SHADOWCUBE{X}) shadow=computeShadowWithESMCube(vPositionW,light{X}.vLightData.xyz,shadowSampler{X},light{X}.shadowsInfo.x,light{X}.shadowsInfo.z,light{X}.depthValues); #else shadow=computeShadowWithESM(vPositionFromLight{X},vDepthMetric{X},shadowSampler{X},light{X}.shadowsInfo.x,light{X}.shadowsInfo.z,light{X}.shadowsInfo.w); #endif #elif defined(SHADOWPOISSON{X}) #if defined(SHADOWCUBE{X}) shadow=computeShadowWithPoissonSamplingCube(vPositionW,light{X}.vLightData.xyz,shadowSampler{X},light{X}.shadowsInfo.y,light{X}.shadowsInfo.x,light{X}.depthValues); #else shadow=computeShadowWithPoissonSampling(vPositionFromLight{X},vDepthMetric{X},shadowSampler{X},light{X}.shadowsInfo.y,light{X}.shadowsInfo.x,light{X}.shadowsInfo.w); #endif #elif defined(SHADOWPCF{X}) #if defined(SHADOWLOWQUALITY{X}) shadow=computeShadowWithPCF1(vPositionFromLight{X},vDepthMetric{X},shadowSampler{X},light{X}.shadowsInfo.x,light{X}.shadowsInfo.w); #elif defined(SHADOWMEDIUMQUALITY{X}) shadow=computeShadowWithPCF3(vPositionFromLight{X},vDepthMetric{X},shadowSampler{X},light{X}.shadowsInfo.yz,light{X}.shadowsInfo.x,light{X}.shadowsInfo.w); #else shadow=computeShadowWithPCF5(vPositionFromLight{X},vDepthMetric{X},shadowSampler{X},light{X}.shadowsInfo.yz,light{X}.shadowsInfo.x,light{X}.shadowsInfo.w); #endif #elif defined(SHADOWPCSS{X}) #if defined(SHADOWLOWQUALITY{X}) shadow=computeShadowWithPCSS16(vPositionFromLight{X},vDepthMetric{X},depthSampler{X},shadowSampler{X},light{X}.shadowsInfo.y,light{X}.shadowsInfo.z,light{X}.shadowsInfo.x,light{X}.shadowsInfo.w); #elif defined(SHADOWMEDIUMQUALITY{X}) shadow=computeShadowWithPCSS32(vPositionFromLight{X},vDepthMetric{X},depthSampler{X},shadowSampler{X},light{X}.shadowsInfo.y,light{X}.shadowsInfo.z,light{X}.shadowsInfo.x,light{X}.shadowsInfo.w); #else shadow=computeShadowWithPCSS64(vPositionFromLight{X},vDepthMetric{X},depthSampler{X},shadowSampler{X},light{X}.shadowsInfo.y,light{X}.shadowsInfo.z,light{X}.shadowsInfo.x,light{X}.shadowsInfo.w); #endif #else #if defined(SHADOWCUBE{X}) shadow=computeShadowCube(vPositionW,light{X}.vLightData.xyz,shadowSampler{X},light{X}.shadowsInfo.x,light{X}.depthValues); #else shadow=computeShadow(vPositionFromLight{X},vDepthMetric{X},shadowSampler{X},light{X}.shadowsInfo.x,light{X}.shadowsInfo.w); #endif #endif #ifdef SHADOWONLY #ifndef SHADOWINUSE #define SHADOWINUSE #endif globalShadow+=shadow;shadowLightCount+=1.0; #endif #else shadow=1.; #endif aggShadow+=shadow;numLights+=1.0; #ifndef SHADOWONLY #ifdef CUSTOMUSERLIGHTING diffuseBase+=computeCustomDiffuseLighting(info,diffuseBase,shadow); #ifdef SPECULARTERM specularBase+=computeCustomSpecularLighting(info,specularBase,shadow); #endif #elif defined(LIGHTMAP) && defined(LIGHTMAPEXCLUDED{X}) diffuseBase+=lightmapColor.rgb*shadow; #ifdef SPECULARTERM #ifndef LIGHTMAPNOSPECULAR{X} specularBase+=info.specular*shadow*lightmapColor.rgb; #endif #endif #ifdef CLEARCOAT #ifndef LIGHTMAPNOSPECULAR{X} clearCoatBase+=info.clearCoat.rgb*shadow*lightmapColor.rgb; #endif #endif #ifdef SHEEN #ifndef LIGHTMAPNOSPECULAR{X} sheenBase+=info.sheen.rgb*shadow; #endif #endif #else #ifdef SHADOWCSMDEBUG{X} diffuseBase+=info.diffuse*shadowDebug{X}; #else diffuseBase+=info.diffuse*shadow; #endif #ifdef SPECULARTERM specularBase+=info.specular*shadow; #endif #ifdef CLEARCOAT clearCoatBase+=info.clearCoat.rgb*shadow; #endif #ifdef SHEEN sheenBase+=info.sheen.rgb*shadow; #endif #endif #endif #endif `; je.IncludesShadersStore[ohe] = lhe; const che = "logDepthFragment", uhe = `#ifdef LOGARITHMICDEPTH gl_FragDepthEXT=log2(vFragmentDepth)*logarithmicDepthConstant*0.5; #endif `; je.IncludesShadersStore[che] = uhe; const hhe = "fogFragment", dhe = `#ifdef FOG float fog=CalcFogFactor(); #ifdef PBR fog=toLinearSpace(fog); #endif color.rgb=mix(vFogColor,color.rgb,fog); #endif `; je.IncludesShadersStore[hhe] = dhe; const fhe = "oitFragment", phe = `#ifdef ORDER_INDEPENDENT_TRANSPARENCY float fragDepth=gl_FragCoord.z; #ifdef ORDER_INDEPENDENT_TRANSPARENCY_16BITS uint halfFloat=packHalf2x16(vec2(fragDepth));vec2 full=unpackHalf2x16(halfFloat);fragDepth=full.x; #endif ivec2 fragCoord=ivec2(gl_FragCoord.xy);vec2 lastDepth=texelFetch(oitDepthSampler,fragCoord,0).rg;vec4 lastFrontColor=texelFetch(oitFrontColorSampler,fragCoord,0);depth.rg=vec2(-MAX_DEPTH);frontColor=lastFrontColor;backColor=vec4(0.0); #ifdef USE_REVERSE_DEPTHBUFFER float furthestDepth=-lastDepth.x;float nearestDepth=lastDepth.y; #else float nearestDepth=-lastDepth.x;float furthestDepth=lastDepth.y; #endif float alphaMultiplier=1.0-lastFrontColor.a; #ifdef USE_REVERSE_DEPTHBUFFER if (fragDepth>nearestDepth || fragDepthfurthestDepth) { #endif return;} #ifdef USE_REVERSE_DEPTHBUFFER if (fragDepthfurthestDepth) { #else if (fragDepth>nearestDepth && fragDepth #if defined(BUMP) || !defined(NORMAL) #extension GL_OES_standard_derivatives : enable #endif #include[SCENE_MRT_COUNT] #include #define CUSTOM_FRAGMENT_BEGIN #ifdef LOGARITHMICDEPTH #extension GL_EXT_frag_depth : enable #endif varying vec3 vPositionW; #ifdef NORMAL varying vec3 vNormalW; #endif #if defined(VERTEXCOLOR) || defined(INSTANCESCOLOR) && defined(INSTANCES) varying vec4 vColor; #endif #include[1..7] #include #include<__decl__lightFragment>[0..maxSimultaneousLights] #include #include #include(_DEFINENAME_,DIFFUSE,_VARYINGNAME_,Diffuse,_SAMPLERNAME_,diffuse) #include(_DEFINENAME_,AMBIENT,_VARYINGNAME_,Ambient,_SAMPLERNAME_,ambient) #include(_DEFINENAME_,OPACITY,_VARYINGNAME_,Opacity,_SAMPLERNAME_,opacity) #include(_DEFINENAME_,EMISSIVE,_VARYINGNAME_,Emissive,_SAMPLERNAME_,emissive) #include(_DEFINENAME_,LIGHTMAP,_VARYINGNAME_,Lightmap,_SAMPLERNAME_,lightmap) #include(_DEFINENAME_,DECAL,_VARYINGNAME_,Decal,_SAMPLERNAME_,decal) #ifdef REFRACTION #ifdef REFRACTIONMAP_3D uniform samplerCube refractionCubeSampler; #else uniform sampler2D refraction2DSampler; #endif #endif #if defined(SPECULARTERM) #include(_DEFINENAME_,SPECULAR,_VARYINGNAME_,Specular,_SAMPLERNAME_,specular) #endif #include #ifdef REFLECTION #ifdef REFLECTIONMAP_3D uniform samplerCube reflectionCubeSampler; #else uniform sampler2D reflection2DSampler; #endif #ifdef REFLECTIONMAP_SKYBOX varying vec3 vPositionUVW; #else #if defined(REFLECTIONMAP_EQUIRECTANGULAR_FIXED) || defined(REFLECTIONMAP_MIRROREDEQUIRECTANGULAR_FIXED) varying vec3 vDirectionW; #endif #endif #include #endif #include #include #include #include #include #include #include #define CUSTOM_FRAGMENT_DEFINITIONS void main(void) { #define CUSTOM_FRAGMENT_MAIN_BEGIN #include vec3 viewDirectionW=normalize(vEyePosition.xyz-vPositionW);vec4 baseColor=vec4(1.,1.,1.,1.);vec3 diffuseColor=vDiffuseColor.rgb;float alpha=vDiffuseColor.a; #ifdef NORMAL vec3 normalW=normalize(vNormalW); #else vec3 normalW=normalize(-cross(dFdx(vPositionW),dFdy(vPositionW))); #endif #include #ifdef TWOSIDEDLIGHTING normalW=gl_FrontFacing ? normalW : -normalW; #endif #ifdef DIFFUSE baseColor=texture2D(diffuseSampler,vDiffuseUV+uvOffset); #if defined(ALPHATEST) && !defined(ALPHATEST_AFTERALLALPHACOMPUTATIONS) if (baseColor.a(surfaceAlbedo,baseColor,GAMMADECAL,_GAMMADECAL_NOTUSED_) #endif #include #if defined(VERTEXCOLOR) || defined(INSTANCESCOLOR) && defined(INSTANCES) baseColor.rgb*=vColor.rgb; #endif #ifdef DETAIL baseColor.rgb=baseColor.rgb*2.0*mix(0.5,detailColor.r,vDetailInfos.y); #endif #if defined(DECAL) && defined(DECAL_AFTER_DETAIL) vec4 decalColor=texture2D(decalSampler,vDecalUV+uvOffset); #include(surfaceAlbedo,baseColor,GAMMADECAL,_GAMMADECAL_NOTUSED_) #endif #define CUSTOM_FRAGMENT_UPDATE_DIFFUSE vec3 baseAmbientColor=vec3(1.,1.,1.); #ifdef AMBIENT baseAmbientColor=texture2D(ambientSampler,vAmbientUV+uvOffset).rgb*vAmbientInfos.y; #endif #define CUSTOM_FRAGMENT_BEFORE_LIGHTS #ifdef SPECULARTERM float glossiness=vSpecularColor.a;vec3 specularColor=vSpecularColor.rgb; #ifdef SPECULAR vec4 specularMapColor=texture2D(specularSampler,vSpecularUV+uvOffset);specularColor=specularMapColor.rgb; #ifdef GLOSSINESS glossiness=glossiness*specularMapColor.a; #endif #endif #else float glossiness=0.; #endif vec3 diffuseBase=vec3(0.,0.,0.);lightingInfo info; #ifdef SPECULARTERM vec3 specularBase=vec3(0.,0.,0.); #endif float shadow=1.;float aggShadow=0.;float numLights=0.; #ifdef LIGHTMAP vec4 lightmapColor=texture2D(lightmapSampler,vLightmapUV+uvOffset); #ifdef RGBDLIGHTMAP lightmapColor.rgb=fromRGBD(lightmapColor); #endif lightmapColor.rgb*=vLightmapInfos.y; #endif #include[0..maxSimultaneousLights] aggShadow=aggShadow/numLights;vec4 refractionColor=vec4(0.,0.,0.,1.); #ifdef REFRACTION vec3 refractionVector=normalize(refract(-viewDirectionW,normalW,vRefractionInfos.y)); #ifdef REFRACTIONMAP_3D #ifdef USE_LOCAL_REFRACTIONMAP_CUBIC refractionVector=parallaxCorrectNormal(vPositionW,refractionVector,vRefractionSize,vRefractionPosition); #endif refractionVector.y=refractionVector.y*vRefractionInfos.w;vec4 refractionLookup=textureCube(refractionCubeSampler,refractionVector);if (dot(refractionVector,viewDirectionW)<1.0) {refractionColor=refractionLookup;} #else vec3 vRefractionUVW=vec3(refractionMatrix*(view*vec4(vPositionW+refractionVector*vRefractionInfos.z,1.0)));vec2 refractionCoords=vRefractionUVW.xy/vRefractionUVW.z;refractionCoords.y=1.0-refractionCoords.y;refractionColor=texture2D(refraction2DSampler,refractionCoords); #endif #ifdef RGBDREFRACTION refractionColor.rgb=fromRGBD(refractionColor); #endif #ifdef IS_REFRACTION_LINEAR refractionColor.rgb=toGammaSpace(refractionColor.rgb); #endif refractionColor.rgb*=vRefractionInfos.x; #endif vec4 reflectionColor=vec4(0.,0.,0.,1.); #ifdef REFLECTION vec3 vReflectionUVW=computeReflectionCoords(vec4(vPositionW,1.0),normalW); #ifdef REFLECTIONMAP_OPPOSITEZ vReflectionUVW.z*=-1.0; #endif #ifdef REFLECTIONMAP_3D #ifdef ROUGHNESS float bias=vReflectionInfos.y; #ifdef SPECULARTERM #ifdef SPECULAR #ifdef GLOSSINESS bias*=(1.0-specularMapColor.a); #endif #endif #endif reflectionColor=textureCube(reflectionCubeSampler,vReflectionUVW,bias); #else reflectionColor=textureCube(reflectionCubeSampler,vReflectionUVW); #endif #else vec2 coords=vReflectionUVW.xy; #ifdef REFLECTIONMAP_PROJECTION coords/=vReflectionUVW.z; #endif coords.y=1.0-coords.y;reflectionColor=texture2D(reflection2DSampler,coords); #endif #ifdef RGBDREFLECTION reflectionColor.rgb=fromRGBD(reflectionColor); #endif #ifdef IS_REFLECTION_LINEAR reflectionColor.rgb=toGammaSpace(reflectionColor.rgb); #endif reflectionColor.rgb*=vReflectionInfos.x; #ifdef REFLECTIONFRESNEL float reflectionFresnelTerm=computeFresnelTerm(viewDirectionW,normalW,reflectionRightColor.a,reflectionLeftColor.a); #ifdef REFLECTIONFRESNELFROMSPECULAR #ifdef SPECULARTERM reflectionColor.rgb*=specularColor.rgb*(1.0-reflectionFresnelTerm)+reflectionFresnelTerm*reflectionRightColor.rgb; #else reflectionColor.rgb*=reflectionLeftColor.rgb*(1.0-reflectionFresnelTerm)+reflectionFresnelTerm*reflectionRightColor.rgb; #endif #else reflectionColor.rgb*=reflectionLeftColor.rgb*(1.0-reflectionFresnelTerm)+reflectionFresnelTerm*reflectionRightColor.rgb; #endif #endif #endif #ifdef REFRACTIONFRESNEL float refractionFresnelTerm=computeFresnelTerm(viewDirectionW,normalW,refractionRightColor.a,refractionLeftColor.a);refractionColor.rgb*=refractionLeftColor.rgb*(1.0-refractionFresnelTerm)+refractionFresnelTerm*refractionRightColor.rgb; #endif #ifdef OPACITY vec4 opacityMap=texture2D(opacitySampler,vOpacityUV+uvOffset); #ifdef OPACITYRGB opacityMap.rgb=opacityMap.rgb*vec3(0.3,0.59,0.11);alpha*=(opacityMap.x+opacityMap.y+opacityMap.z)* vOpacityInfos.y; #else alpha*=opacityMap.a*vOpacityInfos.y; #endif #endif #if defined(VERTEXALPHA) || defined(INSTANCESCOLOR) && defined(INSTANCES) alpha*=vColor.a; #endif #ifdef OPACITYFRESNEL float opacityFresnelTerm=computeFresnelTerm(viewDirectionW,normalW,opacityParts.z,opacityParts.w);alpha+=opacityParts.x*(1.0-opacityFresnelTerm)+opacityFresnelTerm*opacityParts.y; #endif #ifdef ALPHATEST #ifdef ALPHATEST_AFTERALLALPHACOMPUTATIONS if (alpha #include #ifdef IMAGEPROCESSINGPOSTPROCESS color.rgb=toLinearSpace(color.rgb); #else #ifdef IMAGEPROCESSING color.rgb=toLinearSpace(color.rgb);color=applyImageProcessing(color); #endif #endif color.a*=visibility; #ifdef PREMULTIPLYALPHA color.rgb*=color.a; #endif #define CUSTOM_FRAGMENT_BEFORE_FRAGCOLOR #ifdef PREPASS float writeGeometryInfo=color.a>0.4 ? 1.0 : 0.0;gl_FragData[0]=color; #ifdef PREPASS_POSITION gl_FragData[PREPASS_POSITION_INDEX]=vec4(vPositionW,writeGeometryInfo); #endif #ifdef PREPASS_VELOCITY vec2 a=(vCurrentPosition.xy/vCurrentPosition.w)*0.5+0.5;vec2 b=(vPreviousPosition.xy/vPreviousPosition.w)*0.5+0.5;vec2 velocity=abs(a-b);velocity=vec2(pow(velocity.x,1.0/3.0),pow(velocity.y,1.0/3.0))*sign(a-b)*0.5+0.5;gl_FragData[PREPASS_VELOCITY_INDEX]=vec4(velocity,0.0,writeGeometryInfo); #endif #ifdef PREPASS_IRRADIANCE gl_FragData[PREPASS_IRRADIANCE_INDEX]=vec4(0.0,0.0,0.0,writeGeometryInfo); #endif #ifdef PREPASS_DEPTH gl_FragData[PREPASS_DEPTH_INDEX]=vec4(vViewPos.z,0.0,0.0,writeGeometryInfo); #endif #ifdef PREPASS_NORMAL #ifdef PREPASS_NORMAL_WORLDSPACE gl_FragData[PREPASS_NORMAL_INDEX]=vec4(normalW,writeGeometryInfo); #else gl_FragData[PREPASS_NORMAL_INDEX]=vec4(normalize((view*vec4(normalW,0.0)).rgb),writeGeometryInfo); #endif #endif #ifdef PREPASS_ALBEDO_SQRT gl_FragData[PREPASS_ALBEDO_SQRT_INDEX]=vec4(0.0,0.0,0.0,writeGeometryInfo); #endif #ifdef PREPASS_REFLECTIVITY #if defined(SPECULARTERM) #if defined(SPECULAR) gl_FragData[PREPASS_REFLECTIVITY_INDEX]=vec4(toLinearSpace(specularMapColor))*writeGeometryInfo; #else gl_FragData[PREPASS_REFLECTIVITY_INDEX]=vec4(toLinearSpace(specularColor),1.0)*writeGeometryInfo; #endif #else gl_FragData[PREPASS_REFLECTIVITY_INDEX]=vec4(0.0,0.0,0.0,1.0)*writeGeometryInfo; #endif #endif #endif #if !defined(PREPASS) || defined(WEBGL2) gl_FragColor=color; #endif #include #if ORDER_INDEPENDENT_TRANSPARENCY if (fragDepth==nearestDepth) {frontColor.rgb+=color.rgb*color.a*alphaMultiplier;frontColor.a=1.0-alphaMultiplier*(1.0-color.a);} else {backColor+=color;} #endif #define CUSTOM_FRAGMENT_MAIN_END } `; je.ShadersStore[_he] = mhe; const ghe = "decalVertexDeclaration", vhe = `#ifdef DECAL uniform vec4 vDecalInfos;uniform mat4 decalMatrix; #endif `; je.IncludesShadersStore[ghe] = vhe; const Ahe = "defaultVertexDeclaration", yhe = `uniform mat4 viewProjection;uniform mat4 view; #ifdef DIFFUSE uniform mat4 diffuseMatrix;uniform vec2 vDiffuseInfos; #endif #ifdef AMBIENT uniform mat4 ambientMatrix;uniform vec2 vAmbientInfos; #endif #ifdef OPACITY uniform mat4 opacityMatrix;uniform vec2 vOpacityInfos; #endif #ifdef EMISSIVE uniform vec2 vEmissiveInfos;uniform mat4 emissiveMatrix; #endif #ifdef LIGHTMAP uniform vec2 vLightmapInfos;uniform mat4 lightmapMatrix; #endif #if defined(SPECULAR) && defined(SPECULARTERM) uniform vec2 vSpecularInfos;uniform mat4 specularMatrix; #endif #ifdef BUMP uniform vec3 vBumpInfos;uniform mat4 bumpMatrix; #endif #ifdef REFLECTION uniform mat4 reflectionMatrix; #endif #ifdef POINTSIZE uniform float pointSize; #endif #ifdef DETAIL uniform vec4 vDetailInfos;uniform mat4 detailMatrix; #endif #include #define ADDITIONAL_VERTEX_DECLARATION `; je.IncludesShadersStore[Ahe] = yhe; const Che = "uvAttributeDeclaration", xhe = `#ifdef UV{X} attribute vec2 uv{X}; #endif `; je.IncludesShadersStore[Che] = xhe; const bhe = "bonesDeclaration", Ehe = `#if NUM_BONE_INFLUENCERS>0 attribute vec4 matricesIndices;attribute vec4 matricesWeights; #if NUM_BONE_INFLUENCERS>4 attribute vec4 matricesIndicesExtra;attribute vec4 matricesWeightsExtra; #endif #ifndef BAKED_VERTEX_ANIMATION_TEXTURE #ifdef BONETEXTURE uniform highp sampler2D boneSampler;uniform float boneTextureWidth; #else uniform mat4 mBones[BonesPerMesh]; #endif #ifdef BONES_VELOCITY_ENABLED uniform mat4 mPreviousBones[BonesPerMesh]; #endif #ifdef BONETEXTURE #define inline mat4 readMatrixFromRawSampler(sampler2D smp,float index) {float offset=index *4.0;float dx=1.0/boneTextureWidth;vec4 m0=texture2D(smp,vec2(dx*(offset+0.5),0.));vec4 m1=texture2D(smp,vec2(dx*(offset+1.5),0.));vec4 m2=texture2D(smp,vec2(dx*(offset+2.5),0.));vec4 m3=texture2D(smp,vec2(dx*(offset+3.5),0.));return mat4(m0,m1,m2,m3);} #endif #endif #endif `; je.IncludesShadersStore[bhe] = Ehe; const The = "bakedVertexAnimationDeclaration", She = `#ifdef BAKED_VERTEX_ANIMATION_TEXTURE uniform float bakedVertexAnimationTime;uniform vec2 bakedVertexAnimationTextureSizeInverted;uniform vec4 bakedVertexAnimationSettings;uniform sampler2D bakedVertexAnimationTexture; #ifdef INSTANCES attribute vec4 bakedVertexAnimationSettingsInstanced; #endif #define inline mat4 readMatrixFromRawSamplerVAT(sampler2D smp,float index,float frame) {float offset=index*4.0;float frameUV=(frame+0.5)*bakedVertexAnimationTextureSizeInverted.y;float dx=bakedVertexAnimationTextureSizeInverted.x;vec4 m0=texture2D(smp,vec2(dx*(offset+0.5),frameUV));vec4 m1=texture2D(smp,vec2(dx*(offset+1.5),frameUV));vec4 m2=texture2D(smp,vec2(dx*(offset+2.5),frameUV));vec4 m3=texture2D(smp,vec2(dx*(offset+3.5),frameUV));return mat4(m0,m1,m2,m3);} #endif `; je.IncludesShadersStore[The] = She; const Mhe = "instancesDeclaration", Rhe = `#ifdef INSTANCES attribute vec4 world0;attribute vec4 world1;attribute vec4 world2;attribute vec4 world3; #ifdef INSTANCESCOLOR attribute vec4 instanceColor; #endif #if defined(THIN_INSTANCES) && !defined(WORLD_UBO) uniform mat4 world; #endif #if defined(VELOCITY) || defined(PREPASS_VELOCITY) attribute vec4 previousWorld0;attribute vec4 previousWorld1;attribute vec4 previousWorld2;attribute vec4 previousWorld3; #ifdef THIN_INSTANCES uniform mat4 previousWorld; #endif #endif #else #if !defined(WORLD_UBO) uniform mat4 world; #endif #if defined(VELOCITY) || defined(PREPASS_VELOCITY) uniform mat4 previousWorld; #endif #endif `; je.IncludesShadersStore[Mhe] = Rhe; const Phe = "prePassVertexDeclaration", Ihe = `#ifdef PREPASS #ifdef PREPASS_DEPTH varying vec3 vViewPos; #endif #ifdef PREPASS_VELOCITY uniform mat4 previousViewProjection;varying vec4 vCurrentPosition;varying vec4 vPreviousPosition; #endif #endif `; je.IncludesShadersStore[Phe] = Ihe; const Dhe = "samplerVertexDeclaration", Ohe = `#if defined(_DEFINENAME_) && _DEFINENAME_DIRECTUV==0 varying vec2 v_VARYINGNAME_UV; #endif `; je.IncludesShadersStore[Dhe] = Ohe; const whe = "bumpVertexDeclaration", Lhe = `#if defined(BUMP) || defined(PARALLAX) || defined(CLEARCOAT_BUMP) || defined(ANISOTROPIC) #if defined(TANGENT) && defined(NORMAL) varying mat3 vTBN; #endif #endif `; je.IncludesShadersStore[whe] = Lhe; const Nhe = "clipPlaneVertexDeclaration", Fhe = `#ifdef CLIPPLANE uniform vec4 vClipPlane;varying float fClipDistance; #endif #ifdef CLIPPLANE2 uniform vec4 vClipPlane2;varying float fClipDistance2; #endif #ifdef CLIPPLANE3 uniform vec4 vClipPlane3;varying float fClipDistance3; #endif #ifdef CLIPPLANE4 uniform vec4 vClipPlane4;varying float fClipDistance4; #endif #ifdef CLIPPLANE5 uniform vec4 vClipPlane5;varying float fClipDistance5; #endif #ifdef CLIPPLANE6 uniform vec4 vClipPlane6;varying float fClipDistance6; #endif `; je.IncludesShadersStore[Nhe] = Fhe; const Bhe = "fogVertexDeclaration", Uhe = `#ifdef FOG varying vec3 vFogDistance; #endif `; je.IncludesShadersStore[Bhe] = Uhe; const Vhe = "lightVxFragmentDeclaration", khe = `#ifdef LIGHT{X} uniform vec4 vLightData{X};uniform vec4 vLightDiffuse{X}; #ifdef SPECULARTERM uniform vec4 vLightSpecular{X}; #else vec4 vLightSpecular{X}=vec4(0.); #endif #ifdef SHADOW{X} #ifdef SHADOWCSM{X} uniform mat4 lightMatrix{X}[SHADOWCSMNUM_CASCADES{X}];varying vec4 vPositionFromLight{X}[SHADOWCSMNUM_CASCADES{X}];varying float vDepthMetric{X}[SHADOWCSMNUM_CASCADES{X}];varying vec4 vPositionFromCamera{X}; #elif defined(SHADOWCUBE{X}) #else varying vec4 vPositionFromLight{X};varying float vDepthMetric{X};uniform mat4 lightMatrix{X}; #endif uniform vec4 shadowsInfo{X};uniform vec2 depthValues{X}; #endif #ifdef SPOTLIGHT{X} uniform vec4 vLightDirection{X};uniform vec4 vLightFalloff{X}; #elif defined(POINTLIGHT{X}) uniform vec4 vLightFalloff{X}; #elif defined(HEMILIGHT{X}) uniform vec3 vLightGround{X}; #endif #endif `; je.IncludesShadersStore[Vhe] = khe; const zhe = "lightVxUboDeclaration", Hhe = `#ifdef LIGHT{X} uniform Light{X} {vec4 vLightData;vec4 vLightDiffuse;vec4 vLightSpecular; #ifdef SPOTLIGHT{X} vec4 vLightDirection;vec4 vLightFalloff; #elif defined(POINTLIGHT{X}) vec4 vLightFalloff; #elif defined(HEMILIGHT{X}) vec3 vLightGround; #endif vec4 shadowsInfo;vec2 depthValues;} light{X}; #ifdef SHADOW{X} #ifdef SHADOWCSM{X} uniform mat4 lightMatrix{X}[SHADOWCSMNUM_CASCADES{X}];varying vec4 vPositionFromLight{X}[SHADOWCSMNUM_CASCADES{X}];varying float vDepthMetric{X}[SHADOWCSMNUM_CASCADES{X}];varying vec4 vPositionFromCamera{X}; #elif defined(SHADOWCUBE{X}) #else varying vec4 vPositionFromLight{X};varying float vDepthMetric{X};uniform mat4 lightMatrix{X}; #endif #endif #endif `; je.IncludesShadersStore[zhe] = Hhe; const Ghe = "morphTargetsVertexGlobalDeclaration", Khe = `#ifdef MORPHTARGETS uniform float morphTargetInfluences[NUM_MORPH_INFLUENCERS]; #ifdef MORPHTARGETS_TEXTURE uniform float morphTargetTextureIndices[NUM_MORPH_INFLUENCERS];uniform vec3 morphTargetTextureInfo;uniform highp sampler2DArray morphTargets;vec3 readVector3FromRawSampler(int targetIndex,float vertexIndex) { float y=floor(vertexIndex/morphTargetTextureInfo.y);float x=vertexIndex-y*morphTargetTextureInfo.y;vec3 textureUV=vec3((x+0.5)/morphTargetTextureInfo.y,(y+0.5)/morphTargetTextureInfo.z,morphTargetTextureIndices[targetIndex]);return texture(morphTargets,textureUV).xyz;} #endif #endif `; je.IncludesShadersStore[Ghe] = Khe; const Whe = "morphTargetsVertexDeclaration", jhe = `#ifdef MORPHTARGETS #ifndef MORPHTARGETS_TEXTURE attribute vec3 position{X}; #ifdef MORPHTARGETS_NORMAL attribute vec3 normal{X}; #endif #ifdef MORPHTARGETS_TANGENT attribute vec3 tangent{X}; #endif #ifdef MORPHTARGETS_UV attribute vec2 uv_{X}; #endif #endif #endif `; je.IncludesShadersStore[Whe] = jhe; const Xhe = "morphTargetsVertexGlobal", Yhe = `#ifdef MORPHTARGETS #ifdef MORPHTARGETS_TEXTURE float vertexID; #endif #endif `; je.IncludesShadersStore[Xhe] = Yhe; const Qhe = "morphTargetsVertex", $he = `#ifdef MORPHTARGETS #ifdef MORPHTARGETS_TEXTURE vertexID=float(gl_VertexID)*morphTargetTextureInfo.x;positionUpdated+=(readVector3FromRawSampler({X},vertexID)-position)*morphTargetInfluences[{X}];vertexID+=1.0; #ifdef MORPHTARGETS_NORMAL normalUpdated+=(readVector3FromRawSampler({X},vertexID) -normal)*morphTargetInfluences[{X}];vertexID+=1.0; #endif #ifdef MORPHTARGETS_UV uvUpdated+=(readVector3FromRawSampler({X},vertexID).xy-uv)*morphTargetInfluences[{X}];vertexID+=1.0; #endif #ifdef MORPHTARGETS_TANGENT tangentUpdated.xyz+=(readVector3FromRawSampler({X},vertexID) -tangent.xyz)*morphTargetInfluences[{X}]; #endif #else positionUpdated+=(position{X}-position)*morphTargetInfluences[{X}]; #ifdef MORPHTARGETS_NORMAL normalUpdated+=(normal{X}-normal)*morphTargetInfluences[{X}]; #endif #ifdef MORPHTARGETS_TANGENT tangentUpdated.xyz+=(tangent{X}-tangent.xyz)*morphTargetInfluences[{X}]; #endif #ifdef MORPHTARGETS_UV uvUpdated+=(uv_{X}-uv)*morphTargetInfluences[{X}]; #endif #endif #endif `; je.IncludesShadersStore[Qhe] = $he; const Zhe = "instancesVertex", qhe = `#ifdef INSTANCES mat4 finalWorld=mat4(world0,world1,world2,world3); #if defined(PREPASS_VELOCITY) || defined(VELOCITY) mat4 finalPreviousWorld=mat4(previousWorld0,previousWorld1,previousWorld2,previousWorld3); #endif #ifdef THIN_INSTANCES finalWorld=world*finalWorld; #if defined(PREPASS_VELOCITY) || defined(VELOCITY) finalPreviousWorld=previousWorld*finalPreviousWorld; #endif #endif #else mat4 finalWorld=world; #if defined(PREPASS_VELOCITY) || defined(VELOCITY) mat4 finalPreviousWorld=previousWorld; #endif #endif `; je.IncludesShadersStore[Zhe] = qhe; const Jhe = "bonesVertex", ede = `#ifndef BAKED_VERTEX_ANIMATION_TEXTURE #if NUM_BONE_INFLUENCERS>0 mat4 influence; #ifdef BONETEXTURE influence=readMatrixFromRawSampler(boneSampler,matricesIndices[0])*matricesWeights[0]; #if NUM_BONE_INFLUENCERS>1 influence+=readMatrixFromRawSampler(boneSampler,matricesIndices[1])*matricesWeights[1]; #endif #if NUM_BONE_INFLUENCERS>2 influence+=readMatrixFromRawSampler(boneSampler,matricesIndices[2])*matricesWeights[2]; #endif #if NUM_BONE_INFLUENCERS>3 influence+=readMatrixFromRawSampler(boneSampler,matricesIndices[3])*matricesWeights[3]; #endif #if NUM_BONE_INFLUENCERS>4 influence+=readMatrixFromRawSampler(boneSampler,matricesIndicesExtra[0])*matricesWeightsExtra[0]; #endif #if NUM_BONE_INFLUENCERS>5 influence+=readMatrixFromRawSampler(boneSampler,matricesIndicesExtra[1])*matricesWeightsExtra[1]; #endif #if NUM_BONE_INFLUENCERS>6 influence+=readMatrixFromRawSampler(boneSampler,matricesIndicesExtra[2])*matricesWeightsExtra[2]; #endif #if NUM_BONE_INFLUENCERS>7 influence+=readMatrixFromRawSampler(boneSampler,matricesIndicesExtra[3])*matricesWeightsExtra[3]; #endif #else influence=mBones[int(matricesIndices[0])]*matricesWeights[0]; #if NUM_BONE_INFLUENCERS>1 influence+=mBones[int(matricesIndices[1])]*matricesWeights[1]; #endif #if NUM_BONE_INFLUENCERS>2 influence+=mBones[int(matricesIndices[2])]*matricesWeights[2]; #endif #if NUM_BONE_INFLUENCERS>3 influence+=mBones[int(matricesIndices[3])]*matricesWeights[3]; #endif #if NUM_BONE_INFLUENCERS>4 influence+=mBones[int(matricesIndicesExtra[0])]*matricesWeightsExtra[0]; #endif #if NUM_BONE_INFLUENCERS>5 influence+=mBones[int(matricesIndicesExtra[1])]*matricesWeightsExtra[1]; #endif #if NUM_BONE_INFLUENCERS>6 influence+=mBones[int(matricesIndicesExtra[2])]*matricesWeightsExtra[2]; #endif #if NUM_BONE_INFLUENCERS>7 influence+=mBones[int(matricesIndicesExtra[3])]*matricesWeightsExtra[3]; #endif #endif finalWorld=finalWorld*influence; #endif #endif `; je.IncludesShadersStore[Jhe] = ede; const tde = "bakedVertexAnimation", ide = `#ifdef BAKED_VERTEX_ANIMATION_TEXTURE { #ifdef INSTANCES #define BVASNAME bakedVertexAnimationSettingsInstanced #else #define BVASNAME bakedVertexAnimationSettings #endif float VATStartFrame=BVASNAME.x;float VATEndFrame=BVASNAME.y;float VATOffsetFrame=BVASNAME.z;float VATSpeed=BVASNAME.w;float totalFrames=VATEndFrame-VATStartFrame+1.0;float time=bakedVertexAnimationTime*VATSpeed/totalFrames;float frameCorrection=time<1.0 ? 0.0 : 1.0;float numOfFrames=totalFrames-frameCorrection;float VATFrameNum=fract(time)*numOfFrames;VATFrameNum=mod(VATFrameNum+VATOffsetFrame,numOfFrames);VATFrameNum=floor(VATFrameNum);VATFrameNum+=VATStartFrame+frameCorrection;mat4 VATInfluence;VATInfluence=readMatrixFromRawSamplerVAT(bakedVertexAnimationTexture,matricesIndices[0],VATFrameNum)*matricesWeights[0]; #if NUM_BONE_INFLUENCERS>1 VATInfluence+=readMatrixFromRawSamplerVAT(bakedVertexAnimationTexture,matricesIndices[1],VATFrameNum)*matricesWeights[1]; #endif #if NUM_BONE_INFLUENCERS>2 VATInfluence+=readMatrixFromRawSamplerVAT(bakedVertexAnimationTexture,matricesIndices[2],VATFrameNum)*matricesWeights[2]; #endif #if NUM_BONE_INFLUENCERS>3 VATInfluence+=readMatrixFromRawSamplerVAT(bakedVertexAnimationTexture,matricesIndices[3],VATFrameNum)*matricesWeights[3]; #endif #if NUM_BONE_INFLUENCERS>4 VATInfluence+=readMatrixFromRawSamplerVAT(bakedVertexAnimationTexture,matricesIndicesExtra[0],VATFrameNum)*matricesWeightsExtra[0]; #endif #if NUM_BONE_INFLUENCERS>5 VATInfluence+=readMatrixFromRawSamplerVAT(bakedVertexAnimationTexture,matricesIndicesExtra[1],VATFrameNum)*matricesWeightsExtra[1]; #endif #if NUM_BONE_INFLUENCERS>6 VATInfluence+=readMatrixFromRawSamplerVAT(bakedVertexAnimationTexture,matricesIndicesExtra[2],VATFrameNum)*matricesWeightsExtra[2]; #endif #if NUM_BONE_INFLUENCERS>7 VATInfluence+=readMatrixFromRawSamplerVAT(bakedVertexAnimationTexture,matricesIndicesExtra[3],VATFrameNum)*matricesWeightsExtra[3]; #endif finalWorld=finalWorld*VATInfluence;} #endif `; je.IncludesShadersStore[tde] = ide; const rde = "prePassVertex", sde = `#ifdef PREPASS_DEPTH vViewPos=(view*worldPos).rgb; #endif #if defined(PREPASS_VELOCITY) && defined(BONES_VELOCITY_ENABLED) vCurrentPosition=viewProjection*worldPos; #if NUM_BONE_INFLUENCERS>0 mat4 previousInfluence;previousInfluence=mPreviousBones[int(matricesIndices[0])]*matricesWeights[0]; #if NUM_BONE_INFLUENCERS>1 previousInfluence+=mPreviousBones[int(matricesIndices[1])]*matricesWeights[1]; #endif #if NUM_BONE_INFLUENCERS>2 previousInfluence+=mPreviousBones[int(matricesIndices[2])]*matricesWeights[2]; #endif #if NUM_BONE_INFLUENCERS>3 previousInfluence+=mPreviousBones[int(matricesIndices[3])]*matricesWeights[3]; #endif #if NUM_BONE_INFLUENCERS>4 previousInfluence+=mPreviousBones[int(matricesIndicesExtra[0])]*matricesWeightsExtra[0]; #endif #if NUM_BONE_INFLUENCERS>5 previousInfluence+=mPreviousBones[int(matricesIndicesExtra[1])]*matricesWeightsExtra[1]; #endif #if NUM_BONE_INFLUENCERS>6 previousInfluence+=mPreviousBones[int(matricesIndicesExtra[2])]*matricesWeightsExtra[2]; #endif #if NUM_BONE_INFLUENCERS>7 previousInfluence+=mPreviousBones[int(matricesIndicesExtra[3])]*matricesWeightsExtra[3]; #endif vPreviousPosition=previousViewProjection*finalPreviousWorld*previousInfluence*vec4(positionUpdated,1.0); #else vPreviousPosition=previousViewProjection*finalPreviousWorld*vec4(positionUpdated,1.0); #endif #endif `; je.IncludesShadersStore[rde] = sde; const nde = "uvVariableDeclaration", ade = `#if !defined(UV{X}) && defined(MAINUV{X}) vec2 uv{X}=vec2(0.,0.); #endif #ifdef MAINUV{X} vMainUV{X}=uv{X}; #endif `; je.IncludesShadersStore[nde] = ade; const ode = "samplerVertexImplementation", lde = `#if defined(_DEFINENAME_) && _DEFINENAME_DIRECTUV==0 if (v_INFONAME_==0.) {v_VARYINGNAME_UV=vec2(_MATRIXNAME_Matrix*vec4(uvUpdated,1.0,0.0));} #ifdef UV2 else if (v_INFONAME_==1.) {v_VARYINGNAME_UV=vec2(_MATRIXNAME_Matrix*vec4(uv2,1.0,0.0));} #endif #ifdef UV3 else if (v_INFONAME_==2.) {v_VARYINGNAME_UV=vec2(_MATRIXNAME_Matrix*vec4(uv3,1.0,0.0));} #endif #ifdef UV4 else if (v_INFONAME_==3.) {v_VARYINGNAME_UV=vec2(_MATRIXNAME_Matrix*vec4(uv4,1.0,0.0));} #endif #ifdef UV5 else if (v_INFONAME_==4.) {v_VARYINGNAME_UV=vec2(_MATRIXNAME_Matrix*vec4(uv5,1.0,0.0));} #endif #ifdef UV6 else if (v_INFONAME_==5.) {v_VARYINGNAME_UV=vec2(_MATRIXNAME_Matrix*vec4(uv6,1.0,0.0));} #endif #endif `; je.IncludesShadersStore[ode] = lde; const cde = "bumpVertex", ude = `#if defined(BUMP) || defined(PARALLAX) || defined(CLEARCOAT_BUMP) || defined(ANISOTROPIC) #if defined(TANGENT) && defined(NORMAL) vec3 tbnNormal=normalize(normalUpdated);vec3 tbnTangent=normalize(tangentUpdated.xyz);vec3 tbnBitangent=cross(tbnNormal,tbnTangent)*tangentUpdated.w;vTBN=mat3(finalWorld)*mat3(tbnTangent,tbnBitangent,tbnNormal); #endif #endif `; je.IncludesShadersStore[cde] = ude; const hde = "clipPlaneVertex", dde = `#ifdef CLIPPLANE fClipDistance=dot(worldPos,vClipPlane); #endif #ifdef CLIPPLANE2 fClipDistance2=dot(worldPos,vClipPlane2); #endif #ifdef CLIPPLANE3 fClipDistance3=dot(worldPos,vClipPlane3); #endif #ifdef CLIPPLANE4 fClipDistance4=dot(worldPos,vClipPlane4); #endif #ifdef CLIPPLANE5 fClipDistance5=dot(worldPos,vClipPlane5); #endif #ifdef CLIPPLANE6 fClipDistance6=dot(worldPos,vClipPlane6); #endif `; je.IncludesShadersStore[hde] = dde; const fde = "fogVertex", pde = `#ifdef FOG vFogDistance=(view*worldPos).xyz; #endif `; je.IncludesShadersStore[fde] = pde; const _de = "shadowsVertex", mde = `#ifdef SHADOWS #if defined(SHADOWCSM{X}) vPositionFromCamera{X}=view*worldPos;for (int i=0; i #define CUSTOM_VERTEX_BEGIN attribute vec3 position; #ifdef NORMAL attribute vec3 normal; #endif #ifdef TANGENT attribute vec4 tangent; #endif #ifdef UV1 attribute vec2 uv; #endif #include[2..7] #ifdef VERTEXCOLOR attribute vec4 color; #endif #include #include #include #include #include #include[1..7] #include(_DEFINENAME_,DIFFUSE,_VARYINGNAME_,Diffuse) #include(_DEFINENAME_,DETAIL,_VARYINGNAME_,Detail) #include(_DEFINENAME_,AMBIENT,_VARYINGNAME_,Ambient) #include(_DEFINENAME_,OPACITY,_VARYINGNAME_,Opacity) #include(_DEFINENAME_,EMISSIVE,_VARYINGNAME_,Emissive) #include(_DEFINENAME_,LIGHTMAP,_VARYINGNAME_,Lightmap) #if defined(SPECULARTERM) #include(_DEFINENAME_,SPECULAR,_VARYINGNAME_,Specular) #endif #include(_DEFINENAME_,BUMP,_VARYINGNAME_,Bump) #include(_DEFINENAME_,DECAL,_VARYINGNAME_,Decal) varying vec3 vPositionW; #ifdef NORMAL varying vec3 vNormalW; #endif #if defined(VERTEXCOLOR) || defined(INSTANCESCOLOR) && defined(INSTANCES) varying vec4 vColor; #endif #include #include #include #include<__decl__lightVxFragment>[0..maxSimultaneousLights] #include #include[0..maxSimultaneousMorphTargets] #ifdef REFLECTIONMAP_SKYBOX varying vec3 vPositionUVW; #endif #if defined(REFLECTIONMAP_EQUIRECTANGULAR_FIXED) || defined(REFLECTIONMAP_MIRROREDEQUIRECTANGULAR_FIXED) varying vec3 vDirectionW; #endif #include #define CUSTOM_VERTEX_DEFINITIONS void main(void) { #define CUSTOM_VERTEX_MAIN_BEGIN vec3 positionUpdated=position; #ifdef NORMAL vec3 normalUpdated=normal; #endif #ifdef TANGENT vec4 tangentUpdated=tangent; #endif #ifdef UV1 vec2 uvUpdated=uv; #endif #include #include[0..maxSimultaneousMorphTargets] #ifdef REFLECTIONMAP_SKYBOX vPositionUVW=positionUpdated; #endif #define CUSTOM_VERTEX_UPDATE_POSITION #define CUSTOM_VERTEX_UPDATE_NORMAL #include #if defined(PREPASS) && defined(PREPASS_VELOCITY) && !defined(BONES_VELOCITY_ENABLED) vCurrentPosition=viewProjection*finalWorld*vec4(positionUpdated,1.0);vPreviousPosition=previousViewProjection*finalPreviousWorld*vec4(positionUpdated,1.0); #endif #include #include vec4 worldPos=finalWorld*vec4(positionUpdated,1.0); #ifdef NORMAL mat3 normalWorld=mat3(finalWorld); #if defined(INSTANCES) && defined(THIN_INSTANCES) vNormalW=normalUpdated/vec3(dot(normalWorld[0],normalWorld[0]),dot(normalWorld[1],normalWorld[1]),dot(normalWorld[2],normalWorld[2]));vNormalW=normalize(normalWorld*vNormalW); #else #ifdef NONUNIFORMSCALING normalWorld=transposeMat3(inverseMat3(normalWorld)); #endif vNormalW=normalize(normalWorld*normalUpdated); #endif #endif #define CUSTOM_VERTEX_UPDATE_WORLDPOS #ifdef MULTIVIEW if (gl_ViewID_OVR==0u) {gl_Position=viewProjection*worldPos;} else {gl_Position=viewProjectionR*worldPos;} #else gl_Position=viewProjection*worldPos; #endif vPositionW=vec3(worldPos); #include #if defined(REFLECTIONMAP_EQUIRECTANGULAR_FIXED) || defined(REFLECTIONMAP_MIRROREDEQUIRECTANGULAR_FIXED) vDirectionW=normalize(vec3(finalWorld*vec4(positionUpdated,0.0))); #endif #ifndef UV1 vec2 uvUpdated=vec2(0.,0.); #endif #ifdef MAINUV1 vMainUV1=uvUpdated; #endif #include[2..7] #include(_DEFINENAME_,DIFFUSE,_VARYINGNAME_,Diffuse,_MATRIXNAME_,diffuse,_INFONAME_,DiffuseInfos.x) #include(_DEFINENAME_,DETAIL,_VARYINGNAME_,Detail,_MATRIXNAME_,detail,_INFONAME_,DetailInfos.x) #include(_DEFINENAME_,AMBIENT,_VARYINGNAME_,Ambient,_MATRIXNAME_,ambient,_INFONAME_,AmbientInfos.x) #include(_DEFINENAME_,OPACITY,_VARYINGNAME_,Opacity,_MATRIXNAME_,opacity,_INFONAME_,OpacityInfos.x) #include(_DEFINENAME_,EMISSIVE,_VARYINGNAME_,Emissive,_MATRIXNAME_,emissive,_INFONAME_,EmissiveInfos.x) #include(_DEFINENAME_,LIGHTMAP,_VARYINGNAME_,Lightmap,_MATRIXNAME_,lightmap,_INFONAME_,LightmapInfos.x) #if defined(SPECULARTERM) #include(_DEFINENAME_,SPECULAR,_VARYINGNAME_,Specular,_MATRIXNAME_,specular,_INFONAME_,SpecularInfos.x) #endif #include(_DEFINENAME_,BUMP,_VARYINGNAME_,Bump,_MATRIXNAME_,bump,_INFONAME_,BumpInfos.x) #include(_DEFINENAME_,DECAL,_VARYINGNAME_,Decal,_MATRIXNAME_,decal,_INFONAME_,DecalInfos.x) #include #include #include #include[0..maxSimultaneousLights] #include #include #include #define CUSTOM_VERTEX_MAIN_END } `; je.ShadersStore[bde] = Ede; const Tde = new RegExp("^([gimus]+)!"); class nT { /** * Creates a new instance of the plugin manager * @param material material that this manager will manage the plugins for */ constructor(e) { this._plugins = [], this._activePlugins = [], this._activePluginsForExtraEvents = [], this._material = e, this._scene = e.getScene(), this._engine = this._scene.getEngine(); } /** * @internal */ _addPlugin(e) { for (let r = 0; r < this._plugins.length; ++r) if (this._plugins[r].name === e.name) return !1; if (this._material._uniformBufferLayoutBuilt) throw `The plugin "${e.name}" can't be added to the material "${this._material.name}" because this material has already been used for rendering! Please add plugins to materials before any rendering with this material occurs.`; const t = e.getClassName(); nT._MaterialPluginClassToMainDefine[t] || (nT._MaterialPluginClassToMainDefine[t] = "MATERIALPLUGIN_" + ++nT._MaterialPluginCounter), this._material._callbackPluginEventGeneric = (r, s) => this._handlePluginEvent(r, s), this._plugins.push(e), this._plugins.sort((r, s) => r.priority - s.priority), this._codeInjectionPoints = {}; const i = {}; i[nT._MaterialPluginClassToMainDefine[t]] = { type: "boolean", default: !0 }; for (const r of this._plugins) r.collectDefines(i), this._collectPointNames("vertex", r.getCustomCode("vertex")), this._collectPointNames("fragment", r.getCustomCode("fragment")); return this._defineNamesFromPlugins = i, !0; } /** * @internal */ _activatePlugin(e) { this._activePlugins.indexOf(e) === -1 && (this._activePlugins.push(e), this._activePlugins.sort((t, i) => t.priority - i.priority), this._material._callbackPluginEventIsReadyForSubMesh = this._handlePluginEventIsReadyForSubMesh.bind(this), this._material._callbackPluginEventPrepareDefinesBeforeAttributes = this._handlePluginEventPrepareDefinesBeforeAttributes.bind(this), this._material._callbackPluginEventPrepareDefines = this._handlePluginEventPrepareDefines.bind(this), this._material._callbackPluginEventBindForSubMesh = this._handlePluginEventBindForSubMesh.bind(this), e.registerForExtraEvents && (this._activePluginsForExtraEvents.push(e), this._activePluginsForExtraEvents.sort((t, i) => t.priority - i.priority), this._material._callbackPluginEventHasRenderTargetTextures = this._handlePluginEventHasRenderTargetTextures.bind(this), this._material._callbackPluginEventFillRenderTargetTextures = this._handlePluginEventFillRenderTargetTextures.bind(this), this._material._callbackPluginEventHardBindForSubMesh = this._handlePluginEventHardBindForSubMesh.bind(this))); } /** * Gets a plugin from the list of plugins managed by this manager * @param name name of the plugin * @returns the plugin if found, else null */ getPlugin(e) { for (let t = 0; t < this._plugins.length; ++t) if (this._plugins[t].name === e) return this._plugins[t]; return null; } _handlePluginEventIsReadyForSubMesh(e) { let t = !0; for (const i of this._activePlugins) t = t && i.isReadyForSubMesh(e.defines, this._scene, this._engine, e.subMesh); e.isReadyForSubMesh = t; } _handlePluginEventPrepareDefinesBeforeAttributes(e) { for (const t of this._activePlugins) t.prepareDefinesBeforeAttributes(e.defines, this._scene, e.mesh); } _handlePluginEventPrepareDefines(e) { for (const t of this._activePlugins) t.prepareDefines(e.defines, this._scene, e.mesh); } _handlePluginEventHardBindForSubMesh(e) { for (const t of this._activePluginsForExtraEvents) t.hardBindForSubMesh(this._material._uniformBuffer, this._scene, this._engine, e.subMesh); } _handlePluginEventBindForSubMesh(e) { for (const t of this._activePlugins) t.bindForSubMesh(this._material._uniformBuffer, this._scene, this._engine, e.subMesh); } _handlePluginEventHasRenderTargetTextures(e) { let t = !1; for (const i of this._activePluginsForExtraEvents) if (t = i.hasRenderTargetTextures(), t) break; e.hasRenderTargetTextures = t; } _handlePluginEventFillRenderTargetTextures(e) { for (const t of this._activePluginsForExtraEvents) t.fillRenderTargetTextures(e.renderTargets); } _handlePluginEvent(e, t) { var i; switch (e) { case xh.GetActiveTextures: { const r = t; for (const s of this._activePlugins) s.getActiveTextures(r.activeTextures); break; } case xh.GetAnimatables: { const r = t; for (const s of this._activePlugins) s.getAnimatables(r.animatables); break; } case xh.HasTexture: { const r = t; let s = !1; for (const n of this._activePlugins) if (s = n.hasTexture(r.texture), s) break; r.hasTexture = s; break; } case xh.Disposed: { const r = t; for (const s of this._plugins) s.dispose(r.forceDisposeTextures); break; } case xh.GetDefineNames: { const r = t; r.defineNames = this._defineNamesFromPlugins; break; } case xh.PrepareEffect: { const r = t; for (const s of this._activePlugins) r.fallbackRank = s.addFallbacks(r.defines, r.fallbacks, r.fallbackRank), s.getAttributes(r.attributes, this._scene, r.mesh); this._uniformList.length > 0 && r.uniforms.push(...this._uniformList), this._samplerList.length > 0 && r.samplers.push(...this._samplerList), this._uboList.length > 0 && r.uniformBuffersNames.push(...this._uboList), r.customCode = this._injectCustomCode(r, r.customCode); break; } case xh.PrepareUniformBuffer: { const r = t; this._uboDeclaration = "", this._vertexDeclaration = "", this._fragmentDeclaration = "", this._uniformList = [], this._samplerList = [], this._uboList = []; for (const s of this._plugins) { const n = s.getUniforms(); if (n) { if (n.ubo) for (const a of n.ubo) { if (a.size && a.type) { const l = (i = a.arraySize) !== null && i !== void 0 ? i : 0; r.ubo.addUniform(a.name, a.size, l), this._uboDeclaration += `${a.type} ${a.name}${l > 0 ? `[${l}]` : ""}; `; } this._uniformList.push(a.name); } n.vertex && (this._vertexDeclaration += n.vertex + ` `), n.fragment && (this._fragmentDeclaration += n.fragment + ` `); } s.getSamplers(this._samplerList), s.getUniformBuffersNames(this._uboList); } break; } } } _collectPointNames(e, t) { if (t) for (const i in t) this._codeInjectionPoints[e] || (this._codeInjectionPoints[e] = {}), this._codeInjectionPoints[e][i] = !0; } _injectCustomCode(e, t) { return (i, r) => { var s, n; t && (r = t(i, r)), this._uboDeclaration && (r = r.replace("#define ADDITIONAL_UBO_DECLARATION", this._uboDeclaration)), this._vertexDeclaration && (r = r.replace("#define ADDITIONAL_VERTEX_DECLARATION", this._vertexDeclaration)), this._fragmentDeclaration && (r = r.replace("#define ADDITIONAL_FRAGMENT_DECLARATION", this._fragmentDeclaration)); const a = (s = this._codeInjectionPoints) === null || s === void 0 ? void 0 : s[i]; if (!a) return r; let l = null; for (let o in a) { let u = ""; for (const h of this._activePlugins) { let d = (n = h.getCustomCode(i)) === null || n === void 0 ? void 0 : n[o]; if (d) { if (h.resolveIncludes) { if (l === null) { const f = Xa.GLSL; l = { defines: [], indexParameters: e.indexParameters, isFragment: !1, shouldUseHighPrecisionShader: this._engine._shouldUseHighPrecisionShader, processor: void 0, supportsUniformBuffers: this._engine.supportsUniformBuffers, shadersRepository: je.GetShadersRepository(f), includesShadersStore: je.GetIncludesShadersStore(f), version: void 0, platformName: this._engine.shaderPlatformName, processingContext: void 0, isNDCHalfZRange: this._engine.isNDCHalfZRange, useReverseDepthBuffer: this._engine.useReverseDepthBuffer, processCodeAfterIncludes: void 0 // not used by _ProcessIncludes }; } l.isFragment = i === "fragment", pg._ProcessIncludes(d, l, (f) => d = f); } u += d + ` `; } } if (u.length > 0) if (o.charAt(0) === "!") { o = o.substring(1); let h = "g"; if (o.charAt(0) === "!") h = "", o = o.substring(1); else { const m = Tde.exec(o); m && m.length >= 2 && (h = m[1], o = o.substring(h.length + 1)); } h.indexOf("g") < 0 && (h += "g"); const d = r, f = new RegExp(o, h); let p = f.exec(d); for (; p !== null; ) { let m = u; for (let _ = 0; _ < p.length; ++_) m = m.replace("$" + _, p[_]); r = r.replace(p[0], m), p = f.exec(d); } } else { const h = "#define " + o; r = r.replace(h, ` ` + u + ` ` + h); } } return r; }; } } nT._MaterialPluginClassToMainDefine = {}; nT._MaterialPluginCounter = 0; gi.OnEnginesDisposedObservable.add(() => { wK(); }); const J8 = []; let vH = !1, AH = null; function Sde(c, e) { vH || (AH = At.OnEventObservable.add((i) => { for (const [, r] of J8) r(i); }, xh.Created), vH = !0); const t = J8.filter(([i, r]) => i === c); t.length > 0 ? t[0][1] = e : J8.push([c, e]); } function Mde(c) { for (let e = 0; e < J8.length; ++e) if (J8[e][0] === c) return J8.splice(e, 1), J8.length === 0 && wK(), !0; return !1; } function wK() { J8.length = 0, vH = !1, At.OnEventObservable.remove(AH), AH = null; } class Q_ { _enable(e) { e && this._pluginManager._activatePlugin(this); } /** * Creates a new material plugin * @param material parent material of the plugin * @param name name of the plugin * @param priority priority of the plugin * @param defines list of defines used by the plugin. The value of the property is the default value for this property * @param addToPluginList true to add the plugin to the list of plugins managed by the material plugin manager of the material (default: true) * @param enable true to enable the plugin (it is handy if the plugin does not handle properties to switch its current activation) * @param resolveIncludes Indicates that any #include directive in the plugin code must be replaced by the corresponding code (default: false) */ constructor(e, t, i, r, s = !0, n = !1, a = !1) { this.priority = 500, this.resolveIncludes = !1, this.registerForExtraEvents = !1, this._material = e, this.name = t, this.priority = i, this.resolveIncludes = a, e.pluginManager || (e.pluginManager = new nT(e), e.onDisposeObservable.add(() => { e.pluginManager = void 0; })), this._pluginDefineNames = r, this._pluginManager = e.pluginManager, s && this._pluginManager._addPlugin(this), n && this._enable(!0), this.markAllDefinesAsDirty = e._dirtyCallbacks[63]; } /** * Gets the current class name useful for serialization or dynamic coding. * @returns The class name. */ getClassName() { return "MaterialPluginBase"; } /** * Specifies that the submesh is ready to be used. * @param defines the list of "defines" to update. * @param scene defines the scene the material belongs to. * @param engine the engine this scene belongs to. * @param subMesh the submesh to check for readiness * @returns - boolean indicating that the submesh is ready or not. */ // eslint-disable-next-line @typescript-eslint/no-unused-vars isReadyForSubMesh(e, t, i, r) { return !0; } /** * Binds the material data (this function is called even if mustRebind() returns false) * @param uniformBuffer defines the Uniform buffer to fill in. * @param scene defines the scene the material belongs to. * @param engine defines the engine the material belongs to. * @param subMesh the submesh to bind data for */ // eslint-disable-next-line @typescript-eslint/no-unused-vars hardBindForSubMesh(e, t, i, r) { } /** * Binds the material data. * @param uniformBuffer defines the Uniform buffer to fill in. * @param scene defines the scene the material belongs to. * @param engine the engine this scene belongs to. * @param subMesh the submesh to bind data for */ // eslint-disable-next-line @typescript-eslint/no-unused-vars bindForSubMesh(e, t, i, r) { } /** * Disposes the resources of the material. * @param forceDisposeTextures - Forces the disposal of all textures. */ // eslint-disable-next-line @typescript-eslint/no-unused-vars dispose(e) { } /** * Returns a list of custom shader code fragments to customize the shader. * @param shaderType "vertex" or "fragment" * @returns null if no code to be added, or a list of pointName =\> code. * Note that `pointName` can also be a regular expression if it starts with a `!`. * In that case, the string found by the regular expression (if any) will be * replaced by the code provided. */ // eslint-disable-next-line @typescript-eslint/no-unused-vars getCustomCode(e) { return null; } /** * Collects all defines. * @param defines The object to append to. */ collectDefines(e) { if (this._pluginDefineNames) for (const t of Object.keys(this._pluginDefineNames)) { if (t[0] === "_") continue; const i = typeof this._pluginDefineNames[t]; e[t] = { type: i === "number" ? "number" : i === "string" ? "string" : i === "boolean" ? "boolean" : "object", default: this._pluginDefineNames[t] }; } } /** * Sets the defines for the next rendering. Called before MaterialHelper.PrepareDefinesForAttributes is called. * @param defines the list of "defines" to update. * @param scene defines the scene to the material belongs to. * @param mesh the mesh being rendered */ // eslint-disable-next-line @typescript-eslint/no-unused-vars prepareDefinesBeforeAttributes(e, t, i) { } /** * Sets the defines for the next rendering * @param defines the list of "defines" to update. * @param scene defines the scene to the material belongs to. * @param mesh the mesh being rendered */ // eslint-disable-next-line @typescript-eslint/no-unused-vars prepareDefines(e, t, i) { } /** * Checks to see if a texture is used in the material. * @param texture - Base texture to use. * @returns - Boolean specifying if a texture is used in the material. */ // eslint-disable-next-line @typescript-eslint/no-unused-vars hasTexture(e) { return !1; } /** * Gets a boolean indicating that current material needs to register RTT * @returns true if this uses a render target otherwise false. */ hasRenderTargetTextures() { return !1; } /** * Fills the list of render target textures. * @param renderTargets the list of render targets to update */ // eslint-disable-next-line @typescript-eslint/no-unused-vars fillRenderTargetTextures(e) { } /** * Returns an array of the actively used textures. * @param activeTextures Array of BaseTextures */ // eslint-disable-next-line @typescript-eslint/no-unused-vars getActiveTextures(e) { } /** * Returns the animatable textures. * @param animatables Array of animatable textures. */ // eslint-disable-next-line @typescript-eslint/no-unused-vars getAnimatables(e) { } /** * Add fallbacks to the effect fallbacks list. * @param defines defines the Base texture to use. * @param fallbacks defines the current fallback list. * @param currentRank defines the current fallback rank. * @returns the new fallback rank. */ addFallbacks(e, t, i) { return i; } /** * Gets the samplers used by the plugin. * @param samplers list that the sampler names should be added to. */ // eslint-disable-next-line @typescript-eslint/no-unused-vars getSamplers(e) { } /** * Gets the attributes used by the plugin. * @param attributes list that the attribute names should be added to. * @param scene the scene that the material belongs to. * @param mesh the mesh being rendered. */ // eslint-disable-next-line @typescript-eslint/no-unused-vars getAttributes(e, t, i) { } /** * Gets the uniform buffers names added by the plugin. * @param ubos list that the ubo names should be added to. */ // eslint-disable-next-line @typescript-eslint/no-unused-vars getUniformBuffersNames(e) { } /** * Gets the description of the uniforms to add to the ubo (if engine supports ubos) or to inject directly in the vertex/fragment shaders (if engine does not support ubos) * @returns the description of the uniforms */ getUniforms() { return {}; } /** * Makes a duplicate of the current configuration into another one. * @param plugin define the config where to copy the info */ copyTo(e) { St.Clone(() => e, this); } /** * Serializes this plugin configuration. * @returns - An object with the serialized config. */ serialize() { return St.Serialize(this); } /** * Parses a plugin configuration from a serialized object. * @param source - Serialized object. * @param scene Defines the scene we are parsing for * @param rootUrl Defines the rootUrl to load from */ parse(e, t, i) { St.Parse(() => this, e, t, i); } } F([ W() ], Q_.prototype, "name", void 0); F([ W() ], Q_.prototype, "priority", void 0); F([ W() ], Q_.prototype, "resolveIncludes", void 0); F([ W() ], Q_.prototype, "registerForExtraEvents", void 0); class pie extends sa { constructor() { super(...arguments), this.DETAIL = !1, this.DETAILDIRECTUV = 0, this.DETAIL_NORMALBLENDMETHOD = 0; } } class dx extends Q_ { /** @internal */ _markAllSubMeshesAsTexturesDirty() { this._enable(this._isEnabled), this._internalMarkAllSubMeshesAsTexturesDirty(); } constructor(e, t = !0) { super(e, "DetailMap", 140, new pie(), t), this._texture = null, this.diffuseBlendLevel = 1, this.roughnessBlendLevel = 1, this.bumpLevel = 1, this._normalBlendMethod = At.MATERIAL_NORMALBLENDMETHOD_WHITEOUT, this._isEnabled = !1, this.isEnabled = !1, this._internalMarkAllSubMeshesAsTexturesDirty = e._dirtyCallbacks[1]; } isReadyForSubMesh(e, t, i) { return this._isEnabled ? !(e._areTexturesDirty && t.texturesEnabled && i.getCaps().standardDerivatives && this._texture && Tt.DetailTextureEnabled && !this._texture.isReady()) : !0; } prepareDefines(e, t) { if (this._isEnabled) { e.DETAIL_NORMALBLENDMETHOD = this._normalBlendMethod; const i = t.getEngine(); e._areTexturesDirty && (i.getCaps().standardDerivatives && this._texture && Tt.DetailTextureEnabled && this._isEnabled ? (Ke.PrepareDefinesForMergedUV(this._texture, e, "DETAIL"), e.DETAIL_NORMALBLENDMETHOD = this._normalBlendMethod) : e.DETAIL = !1); } else e.DETAIL = !1; } bindForSubMesh(e, t) { if (!this._isEnabled) return; const i = this._material.isFrozen; (!e.useUbo || !i || !e.isSync) && this._texture && Tt.DetailTextureEnabled && (e.updateFloat4("vDetailInfos", this._texture.coordinatesIndex, this.diffuseBlendLevel, this.bumpLevel, this.roughnessBlendLevel), Ke.BindTextureMatrix(this._texture, e, "detail")), t.texturesEnabled && this._texture && Tt.DetailTextureEnabled && e.setTexture("detailSampler", this._texture); } hasTexture(e) { return this._texture === e; } getActiveTextures(e) { this._texture && e.push(this._texture); } getAnimatables(e) { this._texture && this._texture.animations && this._texture.animations.length > 0 && e.push(this._texture); } dispose(e) { var t; e && ((t = this._texture) === null || t === void 0 || t.dispose()); } getClassName() { return "DetailMapConfiguration"; } getSamplers(e) { e.push("detailSampler"); } getUniforms() { return { ubo: [ { name: "vDetailInfos", size: 4, type: "vec4" }, { name: "detailMatrix", size: 16, type: "mat4" } ] }; } } F([ er("detailTexture"), ct("_markAllSubMeshesAsTexturesDirty") ], dx.prototype, "texture", void 0); F([ W() ], dx.prototype, "diffuseBlendLevel", void 0); F([ W() ], dx.prototype, "roughnessBlendLevel", void 0); F([ W() ], dx.prototype, "bumpLevel", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], dx.prototype, "normalBlendMethod", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], dx.prototype, "isEnabled", void 0); const Vk = { effect: null, subMesh: null }; class _ie extends sa { /** * Initializes the Standard Material defines. * @param externalProperties The external properties */ constructor(e) { super(e), this.MAINUV1 = !1, this.MAINUV2 = !1, this.MAINUV3 = !1, this.MAINUV4 = !1, this.MAINUV5 = !1, this.MAINUV6 = !1, this.DIFFUSE = !1, this.DIFFUSEDIRECTUV = 0, this.BAKED_VERTEX_ANIMATION_TEXTURE = !1, this.AMBIENT = !1, this.AMBIENTDIRECTUV = 0, this.OPACITY = !1, this.OPACITYDIRECTUV = 0, this.OPACITYRGB = !1, this.REFLECTION = !1, this.EMISSIVE = !1, this.EMISSIVEDIRECTUV = 0, this.SPECULAR = !1, this.SPECULARDIRECTUV = 0, this.BUMP = !1, this.BUMPDIRECTUV = 0, this.PARALLAX = !1, this.PARALLAX_RHS = !1, this.PARALLAXOCCLUSION = !1, this.SPECULAROVERALPHA = !1, this.CLIPPLANE = !1, this.CLIPPLANE2 = !1, this.CLIPPLANE3 = !1, this.CLIPPLANE4 = !1, this.CLIPPLANE5 = !1, this.CLIPPLANE6 = !1, this.ALPHATEST = !1, this.DEPTHPREPASS = !1, this.ALPHAFROMDIFFUSE = !1, this.POINTSIZE = !1, this.FOG = !1, this.SPECULARTERM = !1, this.DIFFUSEFRESNEL = !1, this.OPACITYFRESNEL = !1, this.REFLECTIONFRESNEL = !1, this.REFRACTIONFRESNEL = !1, this.EMISSIVEFRESNEL = !1, this.FRESNEL = !1, this.NORMAL = !1, this.TANGENT = !1, this.UV1 = !1, this.UV2 = !1, this.UV3 = !1, this.UV4 = !1, this.UV5 = !1, this.UV6 = !1, this.VERTEXCOLOR = !1, this.VERTEXALPHA = !1, this.NUM_BONE_INFLUENCERS = 0, this.BonesPerMesh = 0, this.BONETEXTURE = !1, this.BONES_VELOCITY_ENABLED = !1, this.INSTANCES = !1, this.THIN_INSTANCES = !1, this.INSTANCESCOLOR = !1, this.GLOSSINESS = !1, this.ROUGHNESS = !1, this.EMISSIVEASILLUMINATION = !1, this.LINKEMISSIVEWITHDIFFUSE = !1, this.REFLECTIONFRESNELFROMSPECULAR = !1, this.LIGHTMAP = !1, this.LIGHTMAPDIRECTUV = 0, this.OBJECTSPACE_NORMALMAP = !1, this.USELIGHTMAPASSHADOWMAP = !1, this.REFLECTIONMAP_3D = !1, this.REFLECTIONMAP_SPHERICAL = !1, this.REFLECTIONMAP_PLANAR = !1, this.REFLECTIONMAP_CUBIC = !1, this.USE_LOCAL_REFLECTIONMAP_CUBIC = !1, this.USE_LOCAL_REFRACTIONMAP_CUBIC = !1, this.REFLECTIONMAP_PROJECTION = !1, this.REFLECTIONMAP_SKYBOX = !1, this.REFLECTIONMAP_EXPLICIT = !1, this.REFLECTIONMAP_EQUIRECTANGULAR = !1, this.REFLECTIONMAP_EQUIRECTANGULAR_FIXED = !1, this.REFLECTIONMAP_MIRROREDEQUIRECTANGULAR_FIXED = !1, this.REFLECTIONMAP_OPPOSITEZ = !1, this.INVERTCUBICMAP = !1, this.LOGARITHMICDEPTH = !1, this.REFRACTION = !1, this.REFRACTIONMAP_3D = !1, this.REFLECTIONOVERALPHA = !1, this.TWOSIDEDLIGHTING = !1, this.SHADOWFLOAT = !1, this.MORPHTARGETS = !1, this.MORPHTARGETS_NORMAL = !1, this.MORPHTARGETS_TANGENT = !1, this.MORPHTARGETS_UV = !1, this.NUM_MORPH_INFLUENCERS = 0, this.MORPHTARGETS_TEXTURE = !1, this.NONUNIFORMSCALING = !1, this.PREMULTIPLYALPHA = !1, this.ALPHATEST_AFTERALLALPHACOMPUTATIONS = !1, this.ALPHABLEND = !0, this.PREPASS = !1, this.PREPASS_IRRADIANCE = !1, this.PREPASS_IRRADIANCE_INDEX = -1, this.PREPASS_ALBEDO_SQRT = !1, this.PREPASS_ALBEDO_SQRT_INDEX = -1, this.PREPASS_DEPTH = !1, this.PREPASS_DEPTH_INDEX = -1, this.PREPASS_NORMAL = !1, this.PREPASS_NORMAL_INDEX = -1, this.PREPASS_NORMAL_WORLDSPACE = !1, this.PREPASS_POSITION = !1, this.PREPASS_POSITION_INDEX = -1, this.PREPASS_VELOCITY = !1, this.PREPASS_VELOCITY_INDEX = -1, this.PREPASS_REFLECTIVITY = !1, this.PREPASS_REFLECTIVITY_INDEX = -1, this.SCENE_MRT_COUNT = 0, this.RGBDLIGHTMAP = !1, this.RGBDREFLECTION = !1, this.RGBDREFRACTION = !1, this.IMAGEPROCESSING = !1, this.VIGNETTE = !1, this.VIGNETTEBLENDMODEMULTIPLY = !1, this.VIGNETTEBLENDMODEOPAQUE = !1, this.TONEMAPPING = !1, this.TONEMAPPING_ACES = !1, this.CONTRAST = !1, this.COLORCURVES = !1, this.COLORGRADING = !1, this.COLORGRADING3D = !1, this.SAMPLER3DGREENDEPTH = !1, this.SAMPLER3DBGRMAP = !1, this.DITHER = !1, this.IMAGEPROCESSINGPOSTPROCESS = !1, this.SKIPFINALCOLORCLAMP = !1, this.MULTIVIEW = !1, this.ORDER_INDEPENDENT_TRANSPARENCY = !1, this.ORDER_INDEPENDENT_TRANSPARENCY_16BITS = !1, this.CAMERA_ORTHOGRAPHIC = !1, this.CAMERA_PERSPECTIVE = !1, this.IS_REFLECTION_LINEAR = !1, this.IS_REFRACTION_LINEAR = !1, this.EXPOSURE = !1, this.DECAL_AFTER_DETAIL = !1, this.rebuild(); } setReflectionMode(e) { const t = [ "REFLECTIONMAP_CUBIC", "REFLECTIONMAP_EXPLICIT", "REFLECTIONMAP_PLANAR", "REFLECTIONMAP_PROJECTION", "REFLECTIONMAP_PROJECTION", "REFLECTIONMAP_SKYBOX", "REFLECTIONMAP_SPHERICAL", "REFLECTIONMAP_EQUIRECTANGULAR", "REFLECTIONMAP_EQUIRECTANGULAR_FIXED", "REFLECTIONMAP_MIRROREDEQUIRECTANGULAR_FIXED" ]; for (const i of t) this[i] = i === e; } } class Dt extends fl { /** * Gets the image processing configuration used either in this material. */ get imageProcessingConfiguration() { return this._imageProcessingConfiguration; } /** * Sets the Default image processing configuration used either in the this material. * * If sets to null, the scene one is in use. */ set imageProcessingConfiguration(e) { this._attachImageProcessingConfiguration(e), this._markAllSubMeshesAsTexturesDirty(); } /** * Attaches a new image processing configuration to the Standard Material. * @param configuration */ _attachImageProcessingConfiguration(e) { e !== this._imageProcessingConfiguration && (this._imageProcessingConfiguration && this._imageProcessingObserver && this._imageProcessingConfiguration.onUpdateParameters.remove(this._imageProcessingObserver), e ? this._imageProcessingConfiguration = e : this._imageProcessingConfiguration = this.getScene().imageProcessingConfiguration, this._imageProcessingConfiguration && (this._imageProcessingObserver = this._imageProcessingConfiguration.onUpdateParameters.add(() => { this._markAllSubMeshesAsImageProcessingDirty(); }))); } /** * Can this material render to prepass */ get isPrePassCapable() { return !this.disableDepthWrite; } /** * Gets whether the color curves effect is enabled. */ get cameraColorCurvesEnabled() { return this.imageProcessingConfiguration.colorCurvesEnabled; } /** * Sets whether the color curves effect is enabled. */ set cameraColorCurvesEnabled(e) { this.imageProcessingConfiguration.colorCurvesEnabled = e; } /** * Gets whether the color grading effect is enabled. */ get cameraColorGradingEnabled() { return this.imageProcessingConfiguration.colorGradingEnabled; } /** * Gets whether the color grading effect is enabled. */ set cameraColorGradingEnabled(e) { this.imageProcessingConfiguration.colorGradingEnabled = e; } /** * Gets whether tonemapping is enabled or not. */ get cameraToneMappingEnabled() { return this._imageProcessingConfiguration.toneMappingEnabled; } /** * Sets whether tonemapping is enabled or not */ set cameraToneMappingEnabled(e) { this._imageProcessingConfiguration.toneMappingEnabled = e; } /** * The camera exposure used on this material. * This property is here and not in the camera to allow controlling exposure without full screen post process. * This corresponds to a photographic exposure. */ get cameraExposure() { return this._imageProcessingConfiguration.exposure; } /** * The camera exposure used on this material. * This property is here and not in the camera to allow controlling exposure without full screen post process. * This corresponds to a photographic exposure. */ set cameraExposure(e) { this._imageProcessingConfiguration.exposure = e; } /** * Gets The camera contrast used on this material. */ get cameraContrast() { return this._imageProcessingConfiguration.contrast; } /** * Sets The camera contrast used on this material. */ set cameraContrast(e) { this._imageProcessingConfiguration.contrast = e; } /** * Gets the Color Grading 2D Lookup Texture. */ get cameraColorGradingTexture() { return this._imageProcessingConfiguration.colorGradingTexture; } /** * Sets the Color Grading 2D Lookup Texture. */ set cameraColorGradingTexture(e) { this._imageProcessingConfiguration.colorGradingTexture = e; } /** * The color grading curves provide additional color adjustmnent that is applied after any color grading transform (3D LUT). * They allow basic adjustment of saturation and small exposure adjustments, along with color filter tinting to provide white balance adjustment or more stylistic effects. * These are similar to controls found in many professional imaging or colorist software. The global controls are applied to the entire image. For advanced tuning, extra controls are provided to adjust the shadow, midtone and highlight areas of the image; * corresponding to low luminance, medium luminance, and high luminance areas respectively. */ get cameraColorCurves() { return this._imageProcessingConfiguration.colorCurves; } /** * The color grading curves provide additional color adjustment that is applied after any color grading transform (3D LUT). * They allow basic adjustment of saturation and small exposure adjustments, along with color filter tinting to provide white balance adjustment or more stylistic effects. * These are similar to controls found in many professional imaging or colorist software. The global controls are applied to the entire image. For advanced tuning, extra controls are provided to adjust the shadow, midtone and highlight areas of the image; * corresponding to low luminance, medium luminance, and high luminance areas respectively. */ set cameraColorCurves(e) { this._imageProcessingConfiguration.colorCurves = e; } /** * Can this material render to several textures at once */ get canRenderToMRT() { return !0; } /** * Instantiates a new standard material. * This is the default material used in Babylon. It is the best trade off between quality * and performances. * @see https://doc.babylonjs.com/features/featuresDeepDive/materials/using/materials_introduction * @param name Define the name of the material in the scene * @param scene Define the scene the material belong to */ constructor(e, t) { super(e, t), this._diffuseTexture = null, this._ambientTexture = null, this._opacityTexture = null, this._reflectionTexture = null, this._emissiveTexture = null, this._specularTexture = null, this._bumpTexture = null, this._lightmapTexture = null, this._refractionTexture = null, this.ambientColor = new ze(0, 0, 0), this.diffuseColor = new ze(1, 1, 1), this.specularColor = new ze(1, 1, 1), this.emissiveColor = new ze(0, 0, 0), this.specularPower = 64, this._useAlphaFromDiffuseTexture = !1, this._useEmissiveAsIllumination = !1, this._linkEmissiveWithDiffuse = !1, this._useSpecularOverAlpha = !1, this._useReflectionOverAlpha = !1, this._disableLighting = !1, this._useObjectSpaceNormalMap = !1, this._useParallax = !1, this._useParallaxOcclusion = !1, this.parallaxScaleBias = 0.05, this._roughness = 0, this.indexOfRefraction = 0.98, this.invertRefractionY = !0, this.alphaCutOff = 0.4, this._useLightmapAsShadowmap = !1, this._useReflectionFresnelFromSpecular = !1, this._useGlossinessFromSpecularMapAlpha = !1, this._maxSimultaneousLights = 4, this._invertNormalMapX = !1, this._invertNormalMapY = !1, this._twoSidedLighting = !1, this._applyDecalMapAfterDetailMap = !1, this._renderTargets = new xc(16), this._worldViewProjectionMatrix = Ae.Zero(), this._globalAmbientColor = new ze(0, 0, 0), this._cacheHasRenderTargetTextures = !1, this.detailMap = new dx(this), this._attachImageProcessingConfiguration(null), this.prePassConfiguration = new lB(), this.getRenderTargetTextures = () => (this._renderTargets.reset(), Dt.ReflectionTextureEnabled && this._reflectionTexture && this._reflectionTexture.isRenderTarget && this._renderTargets.push(this._reflectionTexture), Dt.RefractionTextureEnabled && this._refractionTexture && this._refractionTexture.isRenderTarget && this._renderTargets.push(this._refractionTexture), this._eventInfo.renderTargets = this._renderTargets, this._callbackPluginEventFillRenderTargetTextures(this._eventInfo), this._renderTargets); } /** * Gets a boolean indicating that current material needs to register RTT */ get hasRenderTargetTextures() { return Dt.ReflectionTextureEnabled && this._reflectionTexture && this._reflectionTexture.isRenderTarget || Dt.RefractionTextureEnabled && this._refractionTexture && this._refractionTexture.isRenderTarget ? !0 : this._cacheHasRenderTargetTextures; } /** * Gets the current class name of the material e.g. "StandardMaterial" * Mainly use in serialization. * @returns the class name */ getClassName() { return "StandardMaterial"; } /** * Specifies if the material will require alpha blending * @returns a boolean specifying if alpha blending is needed */ needAlphaBlending() { return this._disableAlphaBlending ? !1 : this.alpha < 1 || this._opacityTexture != null || this._shouldUseAlphaFromDiffuseTexture() || this._opacityFresnelParameters && this._opacityFresnelParameters.isEnabled; } /** * Specifies if this material should be rendered in alpha test mode * @returns a boolean specifying if an alpha test is needed. */ needAlphaTesting() { return this._forceAlphaTest ? !0 : this._hasAlphaChannel() && (this._transparencyMode == null || this._transparencyMode === At.MATERIAL_ALPHATEST); } /** * Specifies whether or not the alpha value of the diffuse texture should be used for alpha blending. */ _shouldUseAlphaFromDiffuseTexture() { return this._diffuseTexture != null && this._diffuseTexture.hasAlpha && this._useAlphaFromDiffuseTexture && this._transparencyMode !== At.MATERIAL_OPAQUE; } /** * Specifies whether or not there is a usable alpha channel for transparency. */ _hasAlphaChannel() { return this._diffuseTexture != null && this._diffuseTexture.hasAlpha || this._opacityTexture != null; } /** * Get the texture used for alpha test purpose. * @returns the diffuse texture in case of the standard material. */ getAlphaTestTexture() { return this._diffuseTexture; } /** * Get if the submesh is ready to be used and all its information available. * Child classes can use it to update shaders * @param mesh defines the mesh to check * @param subMesh defines which submesh to check * @param useInstances specifies that instances should be used * @returns a boolean indicating that the submesh is ready or not */ isReadyForSubMesh(e, t, i = !1) { if (this._uniformBufferLayoutBuilt || this.buildUniformLayout(), t.effect && this.isFrozen && t.effect._wasPreviouslyReady && t.effect._wasPreviouslyUsingInstances === i) return !0; t.materialDefines || (this._callbackPluginEventGeneric(xh.GetDefineNames, this._eventInfo), t.materialDefines = new _ie(this._eventInfo.defineNames)); const r = this.getScene(), s = t.materialDefines; if (this._isReadyForSubMesh(t)) return !0; const n = r.getEngine(); s._needNormals = Ke.PrepareDefinesForLights(r, e, s, !0, this._maxSimultaneousLights, this._disableLighting), Ke.PrepareDefinesForMultiview(r, s); const a = this.needAlphaBlendingForMesh(e) && this.getScene().useOrderIndependentTransparency; if (Ke.PrepareDefinesForPrePass(r, s, this.canRenderToMRT && !a), Ke.PrepareDefinesForOIT(r, s, a), s._areTexturesDirty) { this._eventInfo.hasRenderTargetTextures = !1, this._callbackPluginEventHasRenderTargetTextures(this._eventInfo), this._cacheHasRenderTargetTextures = this._eventInfo.hasRenderTargetTextures, s._needUVs = !1; for (let o = 1; o <= 6; ++o) s["MAINUV" + o] = !1; if (r.texturesEnabled) { if (s.DIFFUSEDIRECTUV = 0, s.BUMPDIRECTUV = 0, s.AMBIENTDIRECTUV = 0, s.OPACITYDIRECTUV = 0, s.EMISSIVEDIRECTUV = 0, s.SPECULARDIRECTUV = 0, s.LIGHTMAPDIRECTUV = 0, this._diffuseTexture && Dt.DiffuseTextureEnabled) if (this._diffuseTexture.isReadyOrNotBlocking()) Ke.PrepareDefinesForMergedUV(this._diffuseTexture, s, "DIFFUSE"); else return !1; else s.DIFFUSE = !1; if (this._ambientTexture && Dt.AmbientTextureEnabled) if (this._ambientTexture.isReadyOrNotBlocking()) Ke.PrepareDefinesForMergedUV(this._ambientTexture, s, "AMBIENT"); else return !1; else s.AMBIENT = !1; if (this._opacityTexture && Dt.OpacityTextureEnabled) if (this._opacityTexture.isReadyOrNotBlocking()) Ke.PrepareDefinesForMergedUV(this._opacityTexture, s, "OPACITY"), s.OPACITYRGB = this._opacityTexture.getAlphaFromRGB; else return !1; else s.OPACITY = !1; if (this._reflectionTexture && Dt.ReflectionTextureEnabled) if (this._reflectionTexture.isReadyOrNotBlocking()) { switch (s._needNormals = !0, s.REFLECTION = !0, s.ROUGHNESS = this._roughness > 0, s.REFLECTIONOVERALPHA = this._useReflectionOverAlpha, s.INVERTCUBICMAP = this._reflectionTexture.coordinatesMode === De.INVCUBIC_MODE, s.REFLECTIONMAP_3D = this._reflectionTexture.isCube, s.REFLECTIONMAP_OPPOSITEZ = s.REFLECTIONMAP_3D && this.getScene().useRightHandedSystem ? !this._reflectionTexture.invertZ : this._reflectionTexture.invertZ, s.RGBDREFLECTION = this._reflectionTexture.isRGBD, this._reflectionTexture.coordinatesMode) { case De.EXPLICIT_MODE: s.setReflectionMode("REFLECTIONMAP_EXPLICIT"); break; case De.PLANAR_MODE: s.setReflectionMode("REFLECTIONMAP_PLANAR"); break; case De.PROJECTION_MODE: s.setReflectionMode("REFLECTIONMAP_PROJECTION"); break; case De.SKYBOX_MODE: s.setReflectionMode("REFLECTIONMAP_SKYBOX"); break; case De.SPHERICAL_MODE: s.setReflectionMode("REFLECTIONMAP_SPHERICAL"); break; case De.EQUIRECTANGULAR_MODE: s.setReflectionMode("REFLECTIONMAP_EQUIRECTANGULAR"); break; case De.FIXED_EQUIRECTANGULAR_MODE: s.setReflectionMode("REFLECTIONMAP_EQUIRECTANGULAR_FIXED"); break; case De.FIXED_EQUIRECTANGULAR_MIRRORED_MODE: s.setReflectionMode("REFLECTIONMAP_MIRROREDEQUIRECTANGULAR_FIXED"); break; case De.CUBIC_MODE: case De.INVCUBIC_MODE: default: s.setReflectionMode("REFLECTIONMAP_CUBIC"); break; } s.USE_LOCAL_REFLECTIONMAP_CUBIC = !!this._reflectionTexture.boundingBoxSize; } else return !1; else s.REFLECTION = !1, s.REFLECTIONMAP_OPPOSITEZ = !1; if (this._emissiveTexture && Dt.EmissiveTextureEnabled) if (this._emissiveTexture.isReadyOrNotBlocking()) Ke.PrepareDefinesForMergedUV(this._emissiveTexture, s, "EMISSIVE"); else return !1; else s.EMISSIVE = !1; if (this._lightmapTexture && Dt.LightmapTextureEnabled) if (this._lightmapTexture.isReadyOrNotBlocking()) Ke.PrepareDefinesForMergedUV(this._lightmapTexture, s, "LIGHTMAP"), s.USELIGHTMAPASSHADOWMAP = this._useLightmapAsShadowmap, s.RGBDLIGHTMAP = this._lightmapTexture.isRGBD; else return !1; else s.LIGHTMAP = !1; if (this._specularTexture && Dt.SpecularTextureEnabled) if (this._specularTexture.isReadyOrNotBlocking()) Ke.PrepareDefinesForMergedUV(this._specularTexture, s, "SPECULAR"), s.GLOSSINESS = this._useGlossinessFromSpecularMapAlpha; else return !1; else s.SPECULAR = !1; if (r.getEngine().getCaps().standardDerivatives && this._bumpTexture && Dt.BumpTextureEnabled) { if (this._bumpTexture.isReady()) Ke.PrepareDefinesForMergedUV(this._bumpTexture, s, "BUMP"), s.PARALLAX = this._useParallax, s.PARALLAX_RHS = r.useRightHandedSystem, s.PARALLAXOCCLUSION = this._useParallaxOcclusion; else return !1; s.OBJECTSPACE_NORMALMAP = this._useObjectSpaceNormalMap; } else s.BUMP = !1, s.PARALLAX = !1, s.PARALLAX_RHS = !1, s.PARALLAXOCCLUSION = !1; if (this._refractionTexture && Dt.RefractionTextureEnabled) if (this._refractionTexture.isReadyOrNotBlocking()) s._needUVs = !0, s.REFRACTION = !0, s.REFRACTIONMAP_3D = this._refractionTexture.isCube, s.RGBDREFRACTION = this._refractionTexture.isRGBD, s.USE_LOCAL_REFRACTIONMAP_CUBIC = !!this._refractionTexture.boundingBoxSize; else return !1; else s.REFRACTION = !1; s.TWOSIDEDLIGHTING = !this._backFaceCulling && this._twoSidedLighting; } else s.DIFFUSE = !1, s.AMBIENT = !1, s.OPACITY = !1, s.REFLECTION = !1, s.EMISSIVE = !1, s.LIGHTMAP = !1, s.BUMP = !1, s.REFRACTION = !1; s.ALPHAFROMDIFFUSE = this._shouldUseAlphaFromDiffuseTexture(), s.EMISSIVEASILLUMINATION = this._useEmissiveAsIllumination, s.LINKEMISSIVEWITHDIFFUSE = this._linkEmissiveWithDiffuse, s.SPECULAROVERALPHA = this._useSpecularOverAlpha, s.PREMULTIPLYALPHA = this.alphaMode === 7 || this.alphaMode === 8, s.ALPHATEST_AFTERALLALPHACOMPUTATIONS = this.transparencyMode !== null, s.ALPHABLEND = this.transparencyMode === null || this.needAlphaBlendingForMesh(e); } if (this._eventInfo.isReadyForSubMesh = !0, this._eventInfo.defines = s, this._eventInfo.subMesh = t, this._callbackPluginEventIsReadyForSubMesh(this._eventInfo), !this._eventInfo.isReadyForSubMesh) return !1; if (s._areImageProcessingDirty && this._imageProcessingConfiguration) { if (!this._imageProcessingConfiguration.isReady()) return !1; this._imageProcessingConfiguration.prepareDefines(s), s.IS_REFLECTION_LINEAR = this.reflectionTexture != null && !this.reflectionTexture.gammaSpace, s.IS_REFRACTION_LINEAR = this.refractionTexture != null && !this.refractionTexture.gammaSpace; } s._areFresnelDirty && (Dt.FresnelEnabled ? (this._diffuseFresnelParameters || this._opacityFresnelParameters || this._emissiveFresnelParameters || this._refractionFresnelParameters || this._reflectionFresnelParameters) && (s.DIFFUSEFRESNEL = this._diffuseFresnelParameters && this._diffuseFresnelParameters.isEnabled, s.OPACITYFRESNEL = this._opacityFresnelParameters && this._opacityFresnelParameters.isEnabled, s.REFLECTIONFRESNEL = this._reflectionFresnelParameters && this._reflectionFresnelParameters.isEnabled, s.REFLECTIONFRESNELFROMSPECULAR = this._useReflectionFresnelFromSpecular, s.REFRACTIONFRESNEL = this._refractionFresnelParameters && this._refractionFresnelParameters.isEnabled, s.EMISSIVEFRESNEL = this._emissiveFresnelParameters && this._emissiveFresnelParameters.isEnabled, s._needNormals = !0, s.FRESNEL = !0) : s.FRESNEL = !1), Ke.PrepareDefinesForMisc(e, r, this._useLogarithmicDepth, this.pointsCloud, this.fogEnabled, this._shouldTurnAlphaTestOn(e) || this._forceAlphaTest, s, this._applyDecalMapAfterDetailMap), Ke.PrepareDefinesForFrameBoundValues(r, n, this, s, i, null, t.getRenderingMesh().hasThinInstances), this._eventInfo.defines = s, this._eventInfo.mesh = e, this._callbackPluginEventPrepareDefinesBeforeAttributes(this._eventInfo), Ke.PrepareDefinesForAttributes(e, s, !0, !0, !0), this._callbackPluginEventPrepareDefines(this._eventInfo); let l = !1; if (s.isDirty) { const o = s._areLightsDisposed; s.markAsProcessed(); const u = new pl(); s.REFLECTION && u.addFallback(0, "REFLECTION"), s.SPECULAR && u.addFallback(0, "SPECULAR"), s.BUMP && u.addFallback(0, "BUMP"), s.PARALLAX && u.addFallback(1, "PARALLAX"), s.PARALLAX_RHS && u.addFallback(1, "PARALLAX_RHS"), s.PARALLAXOCCLUSION && u.addFallback(0, "PARALLAXOCCLUSION"), s.SPECULAROVERALPHA && u.addFallback(0, "SPECULAROVERALPHA"), s.FOG && u.addFallback(1, "FOG"), s.POINTSIZE && u.addFallback(0, "POINTSIZE"), s.LOGARITHMICDEPTH && u.addFallback(0, "LOGARITHMICDEPTH"), Ke.HandleFallbacksForShadows(s, u, this._maxSimultaneousLights), s.SPECULARTERM && u.addFallback(0, "SPECULARTERM"), s.DIFFUSEFRESNEL && u.addFallback(1, "DIFFUSEFRESNEL"), s.OPACITYFRESNEL && u.addFallback(2, "OPACITYFRESNEL"), s.REFLECTIONFRESNEL && u.addFallback(3, "REFLECTIONFRESNEL"), s.EMISSIVEFRESNEL && u.addFallback(4, "EMISSIVEFRESNEL"), s.FRESNEL && u.addFallback(4, "FRESNEL"), s.MULTIVIEW && u.addFallback(0, "MULTIVIEW"); const h = [Y.PositionKind]; s.NORMAL && h.push(Y.NormalKind), s.TANGENT && h.push(Y.TangentKind); for (let S = 1; S <= 6; ++S) s["UV" + S] && h.push(`uv${S === 1 ? "" : S}`); s.VERTEXCOLOR && h.push(Y.ColorKind), Ke.PrepareAttributesForBones(h, e, s, u), Ke.PrepareAttributesForInstances(h, s), Ke.PrepareAttributesForMorphTargets(h, e, s), Ke.PrepareAttributesForBakedVertexAnimation(h, e, s); let d = "default"; const f = [ "world", "view", "viewProjection", "vEyePosition", "vLightsType", "vAmbientColor", "vDiffuseColor", "vSpecularColor", "vEmissiveColor", "visibility", "vFogInfos", "vFogColor", "pointSize", "vDiffuseInfos", "vAmbientInfos", "vOpacityInfos", "vReflectionInfos", "vEmissiveInfos", "vSpecularInfos", "vBumpInfos", "vLightmapInfos", "vRefractionInfos", "mBones", "diffuseMatrix", "ambientMatrix", "opacityMatrix", "reflectionMatrix", "emissiveMatrix", "specularMatrix", "bumpMatrix", "normalMatrix", "lightmapMatrix", "refractionMatrix", "diffuseLeftColor", "diffuseRightColor", "opacityParts", "reflectionLeftColor", "reflectionRightColor", "emissiveLeftColor", "emissiveRightColor", "refractionLeftColor", "refractionRightColor", "vReflectionPosition", "vReflectionSize", "vRefractionPosition", "vRefractionSize", "logarithmicDepthConstant", "vTangentSpaceParams", "alphaCutOff", "boneTextureWidth", "morphTargetTextureInfo", "morphTargetTextureIndices" ], p = [ "diffuseSampler", "ambientSampler", "opacitySampler", "reflectionCubeSampler", "reflection2DSampler", "emissiveSampler", "specularSampler", "bumpSampler", "lightmapSampler", "refractionCubeSampler", "refraction2DSampler", "boneSampler", "morphTargets", "oitDepthSampler", "oitFrontColorSampler" ], m = ["Material", "Scene", "Mesh"], _ = { maxSimultaneousLights: this._maxSimultaneousLights, maxSimultaneousMorphTargets: s.NUM_MORPH_INFLUENCERS }; this._eventInfo.fallbacks = u, this._eventInfo.fallbackRank = 0, this._eventInfo.defines = s, this._eventInfo.uniforms = f, this._eventInfo.attributes = h, this._eventInfo.samplers = p, this._eventInfo.uniformBuffersNames = m, this._eventInfo.customCode = void 0, this._eventInfo.mesh = e, this._eventInfo.indexParameters = _, this._callbackPluginEventGeneric(xh.PrepareEffect, this._eventInfo), lB.AddUniforms(f), Ds && (Ds.PrepareUniforms(f, s), Ds.PrepareSamplers(p, s)), Ke.PrepareUniformsAndSamplersList({ uniformsNames: f, uniformBuffersNames: m, samplers: p, defines: s, maxSimultaneousLights: this._maxSimultaneousLights }), Gc(f); const v = {}; this.customShaderNameResolve && (d = this.customShaderNameResolve(d, f, m, p, s, h, v)); const C = s.toString(), x = t.effect; let b = r.getEngine().createEffect(d, { attributes: h, uniformsNames: f, uniformBuffersNames: m, samplers: p, defines: C, fallbacks: u, onCompiled: this.onCompiled, onError: this.onError, indexParameters: _, processFinalCode: v.processFinalCode, processCodeAfterIncludes: this._eventInfo.customCode, multiTarget: s.PREPASS }, n); if (this._eventInfo.customCode = void 0, b) if (this._onEffectCreatedObservable && (Vk.effect = b, Vk.subMesh = t, this._onEffectCreatedObservable.notifyObservers(Vk)), this.allowShaderHotSwapping && x && !b.isReady()) { if (b = x, s.markAsUnprocessed(), l = this.isFrozen, o) return s._areLightsDisposed = !0, !1; } else r.resetCachedMaterial(), t.setEffect(b, s, this._materialContext); } return !t.effect || !t.effect.isReady() ? !1 : (s._renderId = r.getRenderId(), t.effect._wasPreviouslyReady = !l, t.effect._wasPreviouslyUsingInstances = i, this._checkScenePerformancePriority(), !0); } /** * Builds the material UBO layouts. * Used internally during the effect preparation. */ buildUniformLayout() { const e = this._uniformBuffer; e.addUniform("diffuseLeftColor", 4), e.addUniform("diffuseRightColor", 4), e.addUniform("opacityParts", 4), e.addUniform("reflectionLeftColor", 4), e.addUniform("reflectionRightColor", 4), e.addUniform("refractionLeftColor", 4), e.addUniform("refractionRightColor", 4), e.addUniform("emissiveLeftColor", 4), e.addUniform("emissiveRightColor", 4), e.addUniform("vDiffuseInfos", 2), e.addUniform("vAmbientInfos", 2), e.addUniform("vOpacityInfos", 2), e.addUniform("vReflectionInfos", 2), e.addUniform("vReflectionPosition", 3), e.addUniform("vReflectionSize", 3), e.addUniform("vEmissiveInfos", 2), e.addUniform("vLightmapInfos", 2), e.addUniform("vSpecularInfos", 2), e.addUniform("vBumpInfos", 3), e.addUniform("diffuseMatrix", 16), e.addUniform("ambientMatrix", 16), e.addUniform("opacityMatrix", 16), e.addUniform("reflectionMatrix", 16), e.addUniform("emissiveMatrix", 16), e.addUniform("lightmapMatrix", 16), e.addUniform("specularMatrix", 16), e.addUniform("bumpMatrix", 16), e.addUniform("vTangentSpaceParams", 2), e.addUniform("pointSize", 1), e.addUniform("alphaCutOff", 1), e.addUniform("refractionMatrix", 16), e.addUniform("vRefractionInfos", 4), e.addUniform("vRefractionPosition", 3), e.addUniform("vRefractionSize", 3), e.addUniform("vSpecularColor", 4), e.addUniform("vEmissiveColor", 3), e.addUniform("vDiffuseColor", 4), e.addUniform("vAmbientColor", 3), super.buildUniformLayout(); } /** * Binds the submesh to this material by preparing the effect and shader to draw * @param world defines the world transformation matrix * @param mesh defines the mesh containing the submesh * @param subMesh defines the submesh to bind the material to */ bindForSubMesh(e, t, i) { var r; const s = this.getScene(), n = i.materialDefines; if (!n) return; const a = i.effect; if (!a) return; this._activeEffect = a, t.getMeshUniformBuffer().bindToEffect(a, "Mesh"), t.transferToEffect(e), this._uniformBuffer.bindToEffect(a, "Material"), this.prePassConfiguration.bindForSubMesh(this._activeEffect, s, t, e, this.isFrozen), this._eventInfo.subMesh = i, this._callbackPluginEventHardBindForSubMesh(this._eventInfo), n.OBJECTSPACE_NORMALMAP && (e.toNormalMatrix(this._normalMatrix), this.bindOnlyNormalMatrix(this._normalMatrix)); const l = a._forceRebindOnNextCall || this._mustRebind(s, a, t.visibility); Ke.BindBonesParameters(t, a); const o = this._uniformBuffer; if (l) { if (this.bindViewProjection(a), !o.useUbo || !this.isFrozen || !o.isSync || a._forceRebindOnNextCall) { if (Dt.FresnelEnabled && n.FRESNEL && (this.diffuseFresnelParameters && this.diffuseFresnelParameters.isEnabled && (o.updateColor4("diffuseLeftColor", this.diffuseFresnelParameters.leftColor, this.diffuseFresnelParameters.power), o.updateColor4("diffuseRightColor", this.diffuseFresnelParameters.rightColor, this.diffuseFresnelParameters.bias)), this.opacityFresnelParameters && this.opacityFresnelParameters.isEnabled && o.updateColor4("opacityParts", new ze(this.opacityFresnelParameters.leftColor.toLuminance(), this.opacityFresnelParameters.rightColor.toLuminance(), this.opacityFresnelParameters.bias), this.opacityFresnelParameters.power), this.reflectionFresnelParameters && this.reflectionFresnelParameters.isEnabled && (o.updateColor4("reflectionLeftColor", this.reflectionFresnelParameters.leftColor, this.reflectionFresnelParameters.power), o.updateColor4("reflectionRightColor", this.reflectionFresnelParameters.rightColor, this.reflectionFresnelParameters.bias)), this.refractionFresnelParameters && this.refractionFresnelParameters.isEnabled && (o.updateColor4("refractionLeftColor", this.refractionFresnelParameters.leftColor, this.refractionFresnelParameters.power), o.updateColor4("refractionRightColor", this.refractionFresnelParameters.rightColor, this.refractionFresnelParameters.bias)), this.emissiveFresnelParameters && this.emissiveFresnelParameters.isEnabled && (o.updateColor4("emissiveLeftColor", this.emissiveFresnelParameters.leftColor, this.emissiveFresnelParameters.power), o.updateColor4("emissiveRightColor", this.emissiveFresnelParameters.rightColor, this.emissiveFresnelParameters.bias))), s.texturesEnabled) { if (this._diffuseTexture && Dt.DiffuseTextureEnabled && (o.updateFloat2("vDiffuseInfos", this._diffuseTexture.coordinatesIndex, this._diffuseTexture.level), Ke.BindTextureMatrix(this._diffuseTexture, o, "diffuse")), this._ambientTexture && Dt.AmbientTextureEnabled && (o.updateFloat2("vAmbientInfos", this._ambientTexture.coordinatesIndex, this._ambientTexture.level), Ke.BindTextureMatrix(this._ambientTexture, o, "ambient")), this._opacityTexture && Dt.OpacityTextureEnabled && (o.updateFloat2("vOpacityInfos", this._opacityTexture.coordinatesIndex, this._opacityTexture.level), Ke.BindTextureMatrix(this._opacityTexture, o, "opacity")), this._hasAlphaChannel() && o.updateFloat("alphaCutOff", this.alphaCutOff), this._reflectionTexture && Dt.ReflectionTextureEnabled && (o.updateFloat2("vReflectionInfos", this._reflectionTexture.level, this.roughness), o.updateMatrix("reflectionMatrix", this._reflectionTexture.getReflectionTextureMatrix()), this._reflectionTexture.boundingBoxSize)) { const u = this._reflectionTexture; o.updateVector3("vReflectionPosition", u.boundingBoxPosition), o.updateVector3("vReflectionSize", u.boundingBoxSize); } if (this._emissiveTexture && Dt.EmissiveTextureEnabled && (o.updateFloat2("vEmissiveInfos", this._emissiveTexture.coordinatesIndex, this._emissiveTexture.level), Ke.BindTextureMatrix(this._emissiveTexture, o, "emissive")), this._lightmapTexture && Dt.LightmapTextureEnabled && (o.updateFloat2("vLightmapInfos", this._lightmapTexture.coordinatesIndex, this._lightmapTexture.level), Ke.BindTextureMatrix(this._lightmapTexture, o, "lightmap")), this._specularTexture && Dt.SpecularTextureEnabled && (o.updateFloat2("vSpecularInfos", this._specularTexture.coordinatesIndex, this._specularTexture.level), Ke.BindTextureMatrix(this._specularTexture, o, "specular")), this._bumpTexture && s.getEngine().getCaps().standardDerivatives && Dt.BumpTextureEnabled && (o.updateFloat3("vBumpInfos", this._bumpTexture.coordinatesIndex, 1 / this._bumpTexture.level, this.parallaxScaleBias), Ke.BindTextureMatrix(this._bumpTexture, o, "bump"), s._mirroredCameraPosition ? o.updateFloat2("vTangentSpaceParams", this._invertNormalMapX ? 1 : -1, this._invertNormalMapY ? 1 : -1) : o.updateFloat2("vTangentSpaceParams", this._invertNormalMapX ? -1 : 1, this._invertNormalMapY ? -1 : 1)), this._refractionTexture && Dt.RefractionTextureEnabled) { let u = 1; if (this._refractionTexture.isCube || (o.updateMatrix("refractionMatrix", this._refractionTexture.getReflectionTextureMatrix()), this._refractionTexture.depth && (u = this._refractionTexture.depth)), o.updateFloat4("vRefractionInfos", this._refractionTexture.level, this.indexOfRefraction, u, this.invertRefractionY ? -1 : 1), this._refractionTexture.boundingBoxSize) { const h = this._refractionTexture; o.updateVector3("vRefractionPosition", h.boundingBoxPosition), o.updateVector3("vRefractionSize", h.boundingBoxSize); } } } this.pointsCloud && o.updateFloat("pointSize", this.pointSize), n.SPECULARTERM && o.updateColor4("vSpecularColor", this.specularColor, this.specularPower), o.updateColor3("vEmissiveColor", Dt.EmissiveTextureEnabled ? this.emissiveColor : ze.BlackReadOnly), o.updateColor4("vDiffuseColor", this.diffuseColor, this.alpha), s.ambientColor.multiplyToRef(this.ambientColor, this._globalAmbientColor), o.updateColor3("vAmbientColor", this._globalAmbientColor); } s.texturesEnabled && (this._diffuseTexture && Dt.DiffuseTextureEnabled && a.setTexture("diffuseSampler", this._diffuseTexture), this._ambientTexture && Dt.AmbientTextureEnabled && a.setTexture("ambientSampler", this._ambientTexture), this._opacityTexture && Dt.OpacityTextureEnabled && a.setTexture("opacitySampler", this._opacityTexture), this._reflectionTexture && Dt.ReflectionTextureEnabled && (this._reflectionTexture.isCube ? a.setTexture("reflectionCubeSampler", this._reflectionTexture) : a.setTexture("reflection2DSampler", this._reflectionTexture)), this._emissiveTexture && Dt.EmissiveTextureEnabled && a.setTexture("emissiveSampler", this._emissiveTexture), this._lightmapTexture && Dt.LightmapTextureEnabled && a.setTexture("lightmapSampler", this._lightmapTexture), this._specularTexture && Dt.SpecularTextureEnabled && a.setTexture("specularSampler", this._specularTexture), this._bumpTexture && s.getEngine().getCaps().standardDerivatives && Dt.BumpTextureEnabled && a.setTexture("bumpSampler", this._bumpTexture), this._refractionTexture && Dt.RefractionTextureEnabled && (this._refractionTexture.isCube ? a.setTexture("refractionCubeSampler", this._refractionTexture) : a.setTexture("refraction2DSampler", this._refractionTexture))), this.getScene().useOrderIndependentTransparency && this.needAlphaBlendingForMesh(t) && this.getScene().depthPeelingRenderer.bind(a), this._eventInfo.subMesh = i, this._callbackPluginEventBindForSubMesh(this._eventInfo), Ec(a, this, s), this.bindEyePosition(a); } else s.getEngine()._features.needToAlwaysBindUniformBuffers && (this._needToBindSceneUbo = !0); (l || !this.isFrozen) && (s.lightsEnabled && !this._disableLighting && Ke.BindLights(s, t, a, n, this._maxSimultaneousLights), (s.fogEnabled && t.applyFog && s.fogMode !== ii.FOGMODE_NONE || this._reflectionTexture || this._refractionTexture || t.receiveShadows || n.PREPASS) && this.bindView(a), Ke.BindFogParameters(s, t, a), n.NUM_MORPH_INFLUENCERS && Ke.BindMorphTargetParameters(t, a), n.BAKED_VERTEX_ANIMATION_TEXTURE && ((r = t.bakedVertexAnimationManager) === null || r === void 0 || r.bind(a, n.INSTANCES)), this.useLogarithmicDepth && Ke.BindLogDepth(n, a, s), this._imageProcessingConfiguration && !this._imageProcessingConfiguration.applyByPostProcess && this._imageProcessingConfiguration.bind(this._activeEffect)), this._afterBind(t, this._activeEffect), o.update(); } /** * Get the list of animatables in the material. * @returns the list of animatables object used in the material */ getAnimatables() { const e = super.getAnimatables(); return this._diffuseTexture && this._diffuseTexture.animations && this._diffuseTexture.animations.length > 0 && e.push(this._diffuseTexture), this._ambientTexture && this._ambientTexture.animations && this._ambientTexture.animations.length > 0 && e.push(this._ambientTexture), this._opacityTexture && this._opacityTexture.animations && this._opacityTexture.animations.length > 0 && e.push(this._opacityTexture), this._reflectionTexture && this._reflectionTexture.animations && this._reflectionTexture.animations.length > 0 && e.push(this._reflectionTexture), this._emissiveTexture && this._emissiveTexture.animations && this._emissiveTexture.animations.length > 0 && e.push(this._emissiveTexture), this._specularTexture && this._specularTexture.animations && this._specularTexture.animations.length > 0 && e.push(this._specularTexture), this._bumpTexture && this._bumpTexture.animations && this._bumpTexture.animations.length > 0 && e.push(this._bumpTexture), this._lightmapTexture && this._lightmapTexture.animations && this._lightmapTexture.animations.length > 0 && e.push(this._lightmapTexture), this._refractionTexture && this._refractionTexture.animations && this._refractionTexture.animations.length > 0 && e.push(this._refractionTexture), e; } /** * Gets the active textures from the material * @returns an array of textures */ getActiveTextures() { const e = super.getActiveTextures(); return this._diffuseTexture && e.push(this._diffuseTexture), this._ambientTexture && e.push(this._ambientTexture), this._opacityTexture && e.push(this._opacityTexture), this._reflectionTexture && e.push(this._reflectionTexture), this._emissiveTexture && e.push(this._emissiveTexture), this._specularTexture && e.push(this._specularTexture), this._bumpTexture && e.push(this._bumpTexture), this._lightmapTexture && e.push(this._lightmapTexture), this._refractionTexture && e.push(this._refractionTexture), e; } /** * Specifies if the material uses a texture * @param texture defines the texture to check against the material * @returns a boolean specifying if the material uses the texture */ hasTexture(e) { return !!(super.hasTexture(e) || this._diffuseTexture === e || this._ambientTexture === e || this._opacityTexture === e || this._reflectionTexture === e || this._emissiveTexture === e || this._specularTexture === e || this._bumpTexture === e || this._lightmapTexture === e || this._refractionTexture === e); } /** * Disposes the material * @param forceDisposeEffect specifies if effects should be forcefully disposed * @param forceDisposeTextures specifies if textures should be forcefully disposed */ dispose(e, t) { var i, r, s, n, a, l, o, u, h; t && ((i = this._diffuseTexture) === null || i === void 0 || i.dispose(), (r = this._ambientTexture) === null || r === void 0 || r.dispose(), (s = this._opacityTexture) === null || s === void 0 || s.dispose(), (n = this._reflectionTexture) === null || n === void 0 || n.dispose(), (a = this._emissiveTexture) === null || a === void 0 || a.dispose(), (l = this._specularTexture) === null || l === void 0 || l.dispose(), (o = this._bumpTexture) === null || o === void 0 || o.dispose(), (u = this._lightmapTexture) === null || u === void 0 || u.dispose(), (h = this._refractionTexture) === null || h === void 0 || h.dispose()), this._imageProcessingConfiguration && this._imageProcessingObserver && this._imageProcessingConfiguration.onUpdateParameters.remove(this._imageProcessingObserver), super.dispose(e, t); } /** * Makes a duplicate of the material, and gives it a new name * @param name defines the new name for the duplicated material * @param cloneTexturesOnlyOnce - if a texture is used in more than one channel (e.g diffuse and opacity), only clone it once and reuse it on the other channels. Default false. * @param rootUrl defines the root URL to use to load textures * @returns the cloned material */ clone(e, t = !0, i = "") { const r = St.Clone(() => new Dt(e, this.getScene()), this, { cloneTexturesOnlyOnce: t }); return r.name = e, r.id = e, this.stencil.copyTo(r.stencil), this._clonePlugins(r, i), r; } /** * Creates a standard material from parsed material data * @param source defines the JSON representation of the material * @param scene defines the hosting scene * @param rootUrl defines the root URL to use to load textures and relative dependencies * @returns a new standard material */ static Parse(e, t, i) { const r = St.Parse(() => new Dt(e.name, t), e, t, i); return e.stencil && r.stencil.parse(e.stencil, t, i), At._parsePlugins(e, r, t, i), r; } // Flags used to enable or disable a type of texture for all Standard Materials /** * Are diffuse textures enabled in the application. */ static get DiffuseTextureEnabled() { return Tt.DiffuseTextureEnabled; } static set DiffuseTextureEnabled(e) { Tt.DiffuseTextureEnabled = e; } /** * Are detail textures enabled in the application. */ static get DetailTextureEnabled() { return Tt.DetailTextureEnabled; } static set DetailTextureEnabled(e) { Tt.DetailTextureEnabled = e; } /** * Are ambient textures enabled in the application. */ static get AmbientTextureEnabled() { return Tt.AmbientTextureEnabled; } static set AmbientTextureEnabled(e) { Tt.AmbientTextureEnabled = e; } /** * Are opacity textures enabled in the application. */ static get OpacityTextureEnabled() { return Tt.OpacityTextureEnabled; } static set OpacityTextureEnabled(e) { Tt.OpacityTextureEnabled = e; } /** * Are reflection textures enabled in the application. */ static get ReflectionTextureEnabled() { return Tt.ReflectionTextureEnabled; } static set ReflectionTextureEnabled(e) { Tt.ReflectionTextureEnabled = e; } /** * Are emissive textures enabled in the application. */ static get EmissiveTextureEnabled() { return Tt.EmissiveTextureEnabled; } static set EmissiveTextureEnabled(e) { Tt.EmissiveTextureEnabled = e; } /** * Are specular textures enabled in the application. */ static get SpecularTextureEnabled() { return Tt.SpecularTextureEnabled; } static set SpecularTextureEnabled(e) { Tt.SpecularTextureEnabled = e; } /** * Are bump textures enabled in the application. */ static get BumpTextureEnabled() { return Tt.BumpTextureEnabled; } static set BumpTextureEnabled(e) { Tt.BumpTextureEnabled = e; } /** * Are lightmap textures enabled in the application. */ static get LightmapTextureEnabled() { return Tt.LightmapTextureEnabled; } static set LightmapTextureEnabled(e) { Tt.LightmapTextureEnabled = e; } /** * Are refraction textures enabled in the application. */ static get RefractionTextureEnabled() { return Tt.RefractionTextureEnabled; } static set RefractionTextureEnabled(e) { Tt.RefractionTextureEnabled = e; } /** * Are color grading textures enabled in the application. */ static get ColorGradingTextureEnabled() { return Tt.ColorGradingTextureEnabled; } static set ColorGradingTextureEnabled(e) { Tt.ColorGradingTextureEnabled = e; } /** * Are fresnels enabled in the application. */ static get FresnelEnabled() { return Tt.FresnelEnabled; } static set FresnelEnabled(e) { Tt.FresnelEnabled = e; } } F([ er("diffuseTexture") ], Dt.prototype, "_diffuseTexture", void 0); F([ ct("_markAllSubMeshesAsTexturesAndMiscDirty") ], Dt.prototype, "diffuseTexture", void 0); F([ er("ambientTexture") ], Dt.prototype, "_ambientTexture", void 0); F([ ct("_markAllSubMeshesAsTexturesDirty") ], Dt.prototype, "ambientTexture", void 0); F([ er("opacityTexture") ], Dt.prototype, "_opacityTexture", void 0); F([ ct("_markAllSubMeshesAsTexturesAndMiscDirty") ], Dt.prototype, "opacityTexture", void 0); F([ er("reflectionTexture") ], Dt.prototype, "_reflectionTexture", void 0); F([ ct("_markAllSubMeshesAsTexturesDirty") ], Dt.prototype, "reflectionTexture", void 0); F([ er("emissiveTexture") ], Dt.prototype, "_emissiveTexture", void 0); F([ ct("_markAllSubMeshesAsTexturesDirty") ], Dt.prototype, "emissiveTexture", void 0); F([ er("specularTexture") ], Dt.prototype, "_specularTexture", void 0); F([ ct("_markAllSubMeshesAsTexturesDirty") ], Dt.prototype, "specularTexture", void 0); F([ er("bumpTexture") ], Dt.prototype, "_bumpTexture", void 0); F([ ct("_markAllSubMeshesAsTexturesDirty") ], Dt.prototype, "bumpTexture", void 0); F([ er("lightmapTexture") ], Dt.prototype, "_lightmapTexture", void 0); F([ ct("_markAllSubMeshesAsTexturesDirty") ], Dt.prototype, "lightmapTexture", void 0); F([ er("refractionTexture") ], Dt.prototype, "_refractionTexture", void 0); F([ ct("_markAllSubMeshesAsTexturesDirty") ], Dt.prototype, "refractionTexture", void 0); F([ Fs("ambient") ], Dt.prototype, "ambientColor", void 0); F([ Fs("diffuse") ], Dt.prototype, "diffuseColor", void 0); F([ Fs("specular") ], Dt.prototype, "specularColor", void 0); F([ Fs("emissive") ], Dt.prototype, "emissiveColor", void 0); F([ W() ], Dt.prototype, "specularPower", void 0); F([ W("useAlphaFromDiffuseTexture") ], Dt.prototype, "_useAlphaFromDiffuseTexture", void 0); F([ ct("_markAllSubMeshesAsTexturesAndMiscDirty") ], Dt.prototype, "useAlphaFromDiffuseTexture", void 0); F([ W("useEmissiveAsIllumination") ], Dt.prototype, "_useEmissiveAsIllumination", void 0); F([ ct("_markAllSubMeshesAsTexturesDirty") ], Dt.prototype, "useEmissiveAsIllumination", void 0); F([ W("linkEmissiveWithDiffuse") ], Dt.prototype, "_linkEmissiveWithDiffuse", void 0); F([ ct("_markAllSubMeshesAsTexturesDirty") ], Dt.prototype, "linkEmissiveWithDiffuse", void 0); F([ W("useSpecularOverAlpha") ], Dt.prototype, "_useSpecularOverAlpha", void 0); F([ ct("_markAllSubMeshesAsTexturesDirty") ], Dt.prototype, "useSpecularOverAlpha", void 0); F([ W("useReflectionOverAlpha") ], Dt.prototype, "_useReflectionOverAlpha", void 0); F([ ct("_markAllSubMeshesAsTexturesDirty") ], Dt.prototype, "useReflectionOverAlpha", void 0); F([ W("disableLighting") ], Dt.prototype, "_disableLighting", void 0); F([ ct("_markAllSubMeshesAsLightsDirty") ], Dt.prototype, "disableLighting", void 0); F([ W("useObjectSpaceNormalMap") ], Dt.prototype, "_useObjectSpaceNormalMap", void 0); F([ ct("_markAllSubMeshesAsTexturesDirty") ], Dt.prototype, "useObjectSpaceNormalMap", void 0); F([ W("useParallax") ], Dt.prototype, "_useParallax", void 0); F([ ct("_markAllSubMeshesAsTexturesDirty") ], Dt.prototype, "useParallax", void 0); F([ W("useParallaxOcclusion") ], Dt.prototype, "_useParallaxOcclusion", void 0); F([ ct("_markAllSubMeshesAsTexturesDirty") ], Dt.prototype, "useParallaxOcclusion", void 0); F([ W() ], Dt.prototype, "parallaxScaleBias", void 0); F([ W("roughness") ], Dt.prototype, "_roughness", void 0); F([ ct("_markAllSubMeshesAsTexturesDirty") ], Dt.prototype, "roughness", void 0); F([ W() ], Dt.prototype, "indexOfRefraction", void 0); F([ W() ], Dt.prototype, "invertRefractionY", void 0); F([ W() ], Dt.prototype, "alphaCutOff", void 0); F([ W("useLightmapAsShadowmap") ], Dt.prototype, "_useLightmapAsShadowmap", void 0); F([ ct("_markAllSubMeshesAsTexturesDirty") ], Dt.prototype, "useLightmapAsShadowmap", void 0); F([ uw("diffuseFresnelParameters") ], Dt.prototype, "_diffuseFresnelParameters", void 0); F([ ct("_markAllSubMeshesAsFresnelDirty") ], Dt.prototype, "diffuseFresnelParameters", void 0); F([ uw("opacityFresnelParameters") ], Dt.prototype, "_opacityFresnelParameters", void 0); F([ ct("_markAllSubMeshesAsFresnelAndMiscDirty") ], Dt.prototype, "opacityFresnelParameters", void 0); F([ uw("reflectionFresnelParameters") ], Dt.prototype, "_reflectionFresnelParameters", void 0); F([ ct("_markAllSubMeshesAsFresnelDirty") ], Dt.prototype, "reflectionFresnelParameters", void 0); F([ uw("refractionFresnelParameters") ], Dt.prototype, "_refractionFresnelParameters", void 0); F([ ct("_markAllSubMeshesAsFresnelDirty") ], Dt.prototype, "refractionFresnelParameters", void 0); F([ uw("emissiveFresnelParameters") ], Dt.prototype, "_emissiveFresnelParameters", void 0); F([ ct("_markAllSubMeshesAsFresnelDirty") ], Dt.prototype, "emissiveFresnelParameters", void 0); F([ W("useReflectionFresnelFromSpecular") ], Dt.prototype, "_useReflectionFresnelFromSpecular", void 0); F([ ct("_markAllSubMeshesAsFresnelDirty") ], Dt.prototype, "useReflectionFresnelFromSpecular", void 0); F([ W("useGlossinessFromSpecularMapAlpha") ], Dt.prototype, "_useGlossinessFromSpecularMapAlpha", void 0); F([ ct("_markAllSubMeshesAsTexturesDirty") ], Dt.prototype, "useGlossinessFromSpecularMapAlpha", void 0); F([ W("maxSimultaneousLights") ], Dt.prototype, "_maxSimultaneousLights", void 0); F([ ct("_markAllSubMeshesAsLightsDirty") ], Dt.prototype, "maxSimultaneousLights", void 0); F([ W("invertNormalMapX") ], Dt.prototype, "_invertNormalMapX", void 0); F([ ct("_markAllSubMeshesAsTexturesDirty") ], Dt.prototype, "invertNormalMapX", void 0); F([ W("invertNormalMapY") ], Dt.prototype, "_invertNormalMapY", void 0); F([ ct("_markAllSubMeshesAsTexturesDirty") ], Dt.prototype, "invertNormalMapY", void 0); F([ W("twoSidedLighting") ], Dt.prototype, "_twoSidedLighting", void 0); F([ ct("_markAllSubMeshesAsTexturesDirty") ], Dt.prototype, "twoSidedLighting", void 0); F([ W("applyDecalMapAfterDetailMap") ], Dt.prototype, "_applyDecalMapAfterDetailMap", void 0); F([ ct("_markAllSubMeshesAsMiscDirty") ], Dt.prototype, "applyDecalMapAfterDetailMap", void 0); Be("BABYLON.StandardMaterial", Dt); ii.DefaultMaterialFactory = (c) => new Dt("default material", c); mi.prototype.createDynamicTexture = function(c, e, t, i) { const r = new ln(this, ts.Dynamic); return r.baseWidth = c, r.baseHeight = e, t && (c = this.needPOTTextures ? mi.GetExponentOfTwo(c, this._caps.maxTextureSize) : c, e = this.needPOTTextures ? mi.GetExponentOfTwo(e, this._caps.maxTextureSize) : e), r.width = c, r.height = e, r.isReady = !1, r.generateMipMaps = t, r.samplingMode = i, this.updateTextureSamplingMode(i, r), this._internalTexturesCache.push(r), r; }; mi.prototype.updateDynamicTexture = function(c, e, t, i = !1, r, s = !1, n = !1) { if (!c) return; const a = this._gl, l = a.TEXTURE_2D, o = this._bindTextureDirectly(l, c, !0, s); this._unpackFlipY(t === void 0 ? c.invertY : t), i && a.pixelStorei(a.UNPACK_PREMULTIPLY_ALPHA_WEBGL, 1); const u = this._getWebGLTextureType(c.type), h = this._getInternalFormat(r || c.format), d = this._getRGBABufferInternalSizedFormat(c.type, h); a.texImage2D(l, 0, d, h, u, e), c.generateMipMaps && a.generateMipmap(l), o || this._bindTextureDirectly(l, null), i && a.pixelStorei(a.UNPACK_PREMULTIPLY_ALPHA_WEBGL, 0), r && (c.format = r), c._dynamicTextureSource = e, c._premulAlpha = i, c.invertY = t || !1, c.isReady = !0; }; class gg extends De { /** * Creates a DynamicTexture * @param name defines the name of the texture * @param options provides 3 alternatives for width and height of texture, a canvas, object with width and height properties, number for both width and height * @param scene defines the scene where you want the texture * @param generateMipMaps defines the use of MinMaps or not (default is false) * @param samplingMode defines the sampling mode to use (default is Texture.TRILINEAR_SAMPLINGMODE) * @param format defines the texture format to use (default is Engine.TEXTUREFORMAT_RGBA) * @param invertY defines if the texture needs to be inverted on the y axis during loading */ constructor(e, t, i = null, r = !1, s = 3, n = 5, a) { super(null, i, !r, a, s, void 0, void 0, void 0, void 0, n), this.name = e, this.wrapU = De.CLAMP_ADDRESSMODE, this.wrapV = De.CLAMP_ADDRESSMODE, this._generateMipMaps = r; const l = this._getEngine(); if (!l) return; t.getContext ? (this._canvas = t, this._texture = l.createDynamicTexture(t.width, t.height, r, s)) : (this._canvas = l.createCanvas(1, 1), t.width || t.width === 0 ? this._texture = l.createDynamicTexture(t.width, t.height, r, s) : this._texture = l.createDynamicTexture(t, t, r, s)); const o = this.getSize(); this._canvas.width !== o.width && (this._canvas.width = o.width), this._canvas.height !== o.height && (this._canvas.height = o.height), this._context = this._canvas.getContext("2d"); } /** * Get the current class name of the texture useful for serialization or dynamic coding. * @returns "DynamicTexture" */ getClassName() { return "DynamicTexture"; } /** * Gets the current state of canRescale */ get canRescale() { return !0; } _recreate(e) { this._canvas.width = e.width, this._canvas.height = e.height, this.releaseInternalTexture(), this._texture = this._getEngine().createDynamicTexture(e.width, e.height, this._generateMipMaps, this.samplingMode); } /** * Scales the texture * @param ratio the scale factor to apply to both width and height */ scale(e) { const t = this.getSize(); t.width *= e, t.height *= e, this._recreate(t); } /** * Resizes the texture * @param width the new width * @param height the new height */ scaleTo(e, t) { const i = this.getSize(); i.width = e, i.height = t, this._recreate(i); } /** * Gets the context of the canvas used by the texture * @returns the canvas context of the dynamic texture */ getContext() { return this._context; } /** * Clears the texture * @param clearColor Defines the clear color to use */ clear(e) { const t = this.getSize(); e && (this._context.fillStyle = e), this._context.clearRect(0, 0, t.width, t.height); } /** * Updates the texture * @param invertY defines the direction for the Y axis (default is true - y increases downwards) * @param premulAlpha defines if alpha is stored as premultiplied (default is false) * @param allowGPUOptimization true to allow some specific GPU optimizations (subject to engine feature "allowGPUOptimizationsForGUI" being true) */ update(e, t = !1, i = !1) { this._getEngine().updateDynamicTexture(this._texture, this._canvas, e === void 0 ? !0 : e, t, this._format || void 0, void 0, i); } /** * Draws text onto the texture * @param text defines the text to be drawn * @param x defines the placement of the text from the left * @param y defines the placement of the text from the top when invertY is true and from the bottom when false * @param font defines the font to be used with font-style, font-size, font-name * @param color defines the color used for the text * @param fillColor defines the color for the canvas, use null to not overwrite canvas (this bleands with the background to replace, use the clear function) * @param invertY defines the direction for the Y axis (default is true - y increases downwards) * @param update defines whether texture is immediately update (default is true) */ drawText(e, t, i, r, s, n, a, l = !0) { const o = this.getSize(); if (n && (this._context.fillStyle = n, this._context.fillRect(0, 0, o.width, o.height)), this._context.font = r, t == null) { const u = this._context.measureText(e); t = (o.width - u.width) / 2; } if (i == null) { const u = parseInt(r.replace(/\D/g, "")); i = o.height / 2 + u / 3.65; } this._context.fillStyle = s || "", this._context.fillText(e, t, i), l && this.update(a); } /** * Clones the texture * @returns the clone of the texture. */ clone() { const e = this.getScene(); if (!e) return this; const t = this.getSize(), i = new gg(this.name, t, e, this._generateMipMaps); return i.hasAlpha = this.hasAlpha, i.level = this.level, i.wrapU = this.wrapU, i.wrapV = this.wrapV, i; } /** * Serializes the dynamic texture. The scene should be ready before the dynamic texture is serialized * @returns a serialized dynamic texture object */ serialize() { const e = this.getScene(); e && !e.isReady() && Ce.Warn("The scene must be ready before serializing the dynamic texture"); const t = super.serialize(); return gg._IsCanvasElement(this._canvas) && (t.base64String = this._canvas.toDataURL()), t.invertY = this._invertY, t.samplingMode = this.samplingMode, t; } static _IsCanvasElement(e) { return e.toDataURL !== void 0; } /** @internal */ _rebuild() { this.update(); } } class LK { /** * Check if fixed foveation is supported on this device */ get isFixedFoveationSupported() { return this.layerType == "XRWebGLLayer" && typeof this.layer.fixedFoveation == "number"; } /** * Get the fixed foveation currently set, as specified by the webxr specs * If this returns null, then fixed foveation is not supported */ get fixedFoveation() { return this.isFixedFoveationSupported ? this.layer.fixedFoveation : null; } /** * Set the fixed foveation to the specified value, as specified by the webxr specs * This value will be normalized to be between 0 and 1, 1 being max foveation, 0 being no foveation */ set fixedFoveation(e) { if (this.isFixedFoveationSupported) { const t = Math.max(0, Math.min(1, e || 0)); this.layer.fixedFoveation = t; } } constructor(e, t, i, r, s) { this.getWidth = e, this.getHeight = t, this.layer = i, this.layerType = r, this.createRenderTargetTextureProvider = s; } } class NK { constructor(e, t) { this._scene = e, this.layerWrapper = t, this._renderTargetTextures = new Array(), this._engine = e.getEngine(); } _createInternalTexture(e, t) { const i = new ln(this._engine, ts.Unknown, !0); return i.width = e.width, i.height = e.height, i._hardwareTexture = new BI(t, this._engine._gl), i.isReady = !0, i; } _createRenderTargetTexture(e, t, i, r, s, n) { if (!this._engine) throw new Error("Engine is disposed"); const a = { width: e, height: t }, l = n ? new gH(this._scene, a) : new ra("XR renderTargetTexture", a, this._scene), o = l.renderTarget; if (o._samples = l.samples, (i || !r) && (o._framebuffer = i), r) if (n) o._colorTextureArray = r; else { const u = this._createInternalTexture(a, r); o.setTexture(u, 0), l._texture = u; } return s && (n ? o._depthStencilTextureArray = s : o._depthStencilTexture = this._createInternalTexture(a, s)), l.disableRescaling(), typeof XRWebGLBinding < "u" && (l.skipInitialClear = !0), this._renderTargetTextures.push(l), l; } _destroyRenderTargetTexture(e) { this._renderTargetTextures.splice(this._renderTargetTextures.indexOf(e), 1), e.dispose(); } getFramebufferDimensions() { return this._framebufferDimensions; } dispose() { this._renderTargetTextures.forEach((e) => e.dispose()), this._renderTargetTextures.length = 0; } } class FK extends LK { /** * @param layer is the layer to be wrapped. * @returns a new WebXRLayerWrapper wrapping the provided XRWebGLLayer. */ constructor(e) { super(() => e.framebufferWidth, () => e.framebufferHeight, e, "XRWebGLLayer", (t) => new Rde(t.scene, this)), this.layer = e; } } class Rde extends NK { constructor(e, t) { super(e, t), this.layerWrapper = t, this._layer = t.layer, this._framebufferDimensions = { framebufferWidth: this._layer.framebufferWidth, framebufferHeight: this._layer.framebufferHeight }; } trySetViewportForView(e, t) { const i = this._layer.getViewport(t); if (!i) return !1; const r = this._framebufferDimensions.framebufferWidth, s = this._framebufferDimensions.framebufferHeight; return e.x = i.x / r, e.y = i.y / s, e.width = i.width / r, e.height = i.height / s, !0; } // eslint-disable-next-line @typescript-eslint/no-unused-vars getRenderTargetTextureForEye(e) { const t = this._layer.framebufferWidth, i = this._layer.framebufferHeight, r = this._layer.framebuffer; return (!this._rtt || t !== this._framebufferDimensions.framebufferWidth || i !== this._framebufferDimensions.framebufferHeight || r !== this._framebuffer) && (this._rtt = this._createRenderTargetTexture(t, i, r), this._framebufferDimensions.framebufferWidth = t, this._framebufferDimensions.framebufferHeight = i, this._framebuffer = r), this._rtt; } getRenderTargetTextureForView(e) { return this.getRenderTargetTextureForEye(e.eye); } } class tN { /** * Get the default values of the configuration object * @param engine defines the engine to use (can be null) * @returns default values of this configuration object */ static GetDefaults(e) { const t = new tN(); return t.canvasOptions = { antialias: !0, depth: !0, stencil: e ? e.isStencilEnable : !0, alpha: !0, framebufferScaleFactor: 1 }, t.newCanvasCssStyle = "position:absolute; bottom:0px;right:0px;z-index:10;width:90%;height:100%;background-color: #000000;", t; } } class mie { /** * Initializes the canvas to be added/removed upon entering/exiting xr * @param _xrSessionManager The XR Session manager * @param _options optional configuration for this canvas output. defaults will be used if not provided */ constructor(e, t = tN.GetDefaults()) { if (this._options = t, this._canvas = null, this._engine = null, this.xrLayer = null, this._xrLayerWrapper = null, this.onXRLayerInitObservable = new Fe(), this._engine = e.scene.getEngine(), this._engine.onDisposeObservable.addOnce(() => { this._engine = null; }), t.canvasElement) this._setManagedOutputCanvas(t.canvasElement); else { const i = document.createElement("canvas"); i.style.cssText = this._options.newCanvasCssStyle || "position:absolute; bottom:0px;right:0px;", this._setManagedOutputCanvas(i); } e.onXRSessionInit.add(() => { this._addCanvas(); }), e.onXRSessionEnded.add(() => { this._removeCanvas(); }); } /** * Disposes of the object */ dispose() { this._removeCanvas(), this._setManagedOutputCanvas(null); } /** * Initializes a XRWebGLLayer to be used as the session's baseLayer. * @param xrSession xr session * @returns a promise that will resolve once the XR Layer has been created */ async initializeXRLayerAsync(e) { const t = () => (this.xrLayer = new XRWebGLLayer(e, this.canvasContext, this._options.canvasOptions), this._xrLayerWrapper = new FK(this.xrLayer), this.onXRLayerInitObservable.notifyObservers(this.xrLayer), this.xrLayer); return this.canvasContext.makeXRCompatible ? this.canvasContext.makeXRCompatible().then( // catch any error and continue. When using the emulator is throws this error for no apparent reason. () => { }, () => { Ve.Warn("Error executing makeXRCompatible. This does not mean that the session will work incorrectly."); } ).then(() => t()) : Promise.resolve(t()); } _addCanvas() { this._canvas && this._engine && this._canvas !== this._engine.getRenderingCanvas() && document.body.appendChild(this._canvas), this.xrLayer ? this._setCanvasSize(!0) : this.onXRLayerInitObservable.addOnce(() => { this._setCanvasSize(!0); }); } _removeCanvas() { this._canvas && this._engine && document.body.contains(this._canvas) && this._canvas !== this._engine.getRenderingCanvas() && document.body.removeChild(this._canvas), this._setCanvasSize(!1); } _setCanvasSize(e = !0, t = this._xrLayerWrapper) { !this._canvas || !this._engine || (e ? t && (this._canvas !== this._engine.getRenderingCanvas() ? (this._canvas.style.width = t.getWidth() + "px", this._canvas.style.height = t.getHeight() + "px") : this._engine.setSize(t.getWidth(), t.getHeight())) : this._originalCanvasSize && (this._canvas !== this._engine.getRenderingCanvas() ? (this._canvas.style.width = this._originalCanvasSize.width + "px", this._canvas.style.height = this._originalCanvasSize.height + "px") : this._engine.setSize(this._originalCanvasSize.width, this._originalCanvasSize.height))); } _setManagedOutputCanvas(e) { this._removeCanvas(), e ? (this._originalCanvasSize = { width: e.offsetWidth, height: e.offsetHeight }, this._canvas = e, this.canvasContext = this._canvas.getContext("webgl2"), this.canvasContext || (this.canvasContext = this._canvas.getContext("webgl"))) : (this._canvas = null, this.canvasContext = null); } } class gie extends LK { constructor(e) { super(() => e.framebufferWidth, () => e.framebufferHeight, e, "XRWebGLLayer", (t) => new vie(t, this)), this.layer = e; } } class vie extends NK { constructor(e, t) { super(e.scene, t), this.layerWrapper = t, this._nativeRTTProvider = navigator.xr.getNativeRenderTargetProvider(e.session, this._createRenderTargetTexture.bind(this), this._destroyRenderTargetTexture.bind(this)), this._nativeLayer = t.layer; } trySetViewportForView(e) { return e.x = 0, e.y = 0, e.width = 1, e.height = 1, !0; } getRenderTargetTextureForEye(e) { return this._nativeRTTProvider.getRenderTargetForEye(e); } getRenderTargetTextureForView(e) { return this._nativeRTTProvider.getRenderTargetForEye(e.eye); } getFramebufferDimensions() { return { framebufferWidth: this._nativeLayer.framebufferWidth, framebufferHeight: this._nativeLayer.framebufferHeight }; } } class Aie { constructor(e) { this._nativeRenderTarget = navigator.xr.getWebXRRenderTarget(e.scene.getEngine()); } async initializeXRLayerAsync(e) { return await this._nativeRenderTarget.initializeXRLayerAsync(e), this.xrLayer = this._nativeRenderTarget.xrLayer, this.xrLayer; } dispose() { } } class iN { /** * Constructs a WebXRSessionManager, this must be initialized within a user action before usage * @param scene The scene which the session should be created for */ constructor(e) { this.scene = e, this.currentTimestamp = -1, this.defaultHeightCompensation = 1.7, this.onXRFrameObservable = new Fe(), this.onXRReferenceSpaceChanged = new Fe(), this.onXRSessionEnded = new Fe(), this.onXRSessionInit = new Fe(), this.inXRFrameLoop = !1, this.inXRSession = !1, this._engine = e.getEngine(), this._onEngineDisposedObserver = this._engine.onDisposeObservable.addOnce(() => { this._engine = null; }), e.onDisposeObservable.addOnce(() => { this.dispose(); }); } /** * The current reference space used in this session. This reference space can constantly change! * It is mainly used to offset the camera's position. */ get referenceSpace() { return this._referenceSpace; } /** * Set a new reference space and triggers the observable */ set referenceSpace(e) { this._referenceSpace = e, this.onXRReferenceSpaceChanged.notifyObservers(this._referenceSpace); } /** * The mode for the managed XR session */ get sessionMode() { return this._sessionMode; } /** * Disposes of the session manager * This should be called explicitly by the dev, if required. */ dispose() { var e; this.inXRSession && this.exitXRAsync(), this.onXRFrameObservable.clear(), this.onXRSessionEnded.clear(), this.onXRReferenceSpaceChanged.clear(), this.onXRSessionInit.clear(), (e = this._engine) === null || e === void 0 || e.onDisposeObservable.remove(this._onEngineDisposedObserver), this._engine = null; } /** * Stops the xrSession and restores the render loop * @returns Promise which resolves after it exits XR */ exitXRAsync() { return this.session && this.inXRSession ? (this.inXRSession = !1, this.session.end().catch(() => { Ce.Warn("Could not end XR session."); })) : Promise.resolve(); } /** * Attempts to set the framebuffer-size-normalized viewport to be rendered this frame for this view. * In the event of a failure, the supplied viewport is not updated. * @param viewport the viewport to which the view will be rendered * @param view the view for which to set the viewport * @returns whether the operation was successful */ trySetViewportForView(e, t) { var i; return ((i = this._baseLayerRTTProvider) === null || i === void 0 ? void 0 : i.trySetViewportForView(e, t)) || !1; } /** * Gets the correct render target texture to be rendered this frame for this eye * @param eye the eye for which to get the render target * @returns the render target for the specified eye or null if not available */ getRenderTargetTextureForEye(e) { var t; return ((t = this._baseLayerRTTProvider) === null || t === void 0 ? void 0 : t.getRenderTargetTextureForEye(e)) || null; } /** * Gets the correct render target texture to be rendered this frame for this view * @param view the view for which to get the render target * @returns the render target for the specified view or null if not available */ getRenderTargetTextureForView(e) { var t; return ((t = this._baseLayerRTTProvider) === null || t === void 0 ? void 0 : t.getRenderTargetTextureForView(e)) || null; } /** * Creates a WebXRRenderTarget object for the XR session * @param options optional options to provide when creating a new render target * @returns a WebXR render target to which the session can render */ getWebXRRenderTarget(e) { const t = this.scene.getEngine(); return this._xrNavigator.xr.native ? new Aie(this) : (e = e || tN.GetDefaults(t), e.canvasElement = e.canvasElement || t.getRenderingCanvas() || void 0, new mie(this, e)); } /** * Initializes the manager * After initialization enterXR can be called to start an XR session * @returns Promise which resolves after it is initialized */ initializeAsync() { return this._xrNavigator = navigator, this._xrNavigator.xr ? Promise.resolve() : Promise.reject("WebXR not available"); } /** * Initializes an xr session * @param xrSessionMode mode to initialize * @param xrSessionInit defines optional and required values to pass to the session builder * @returns a promise which will resolve once the session has been initialized */ initializeSessionAsync(e = "immersive-vr", t = {}) { return this._xrNavigator.xr.requestSession(e, t).then((i) => (this.session = i, this._sessionMode = e, this.onXRSessionInit.notifyObservers(i), this.inXRSession = !0, this.session.addEventListener("end", () => { var r; this.inXRSession = !1, this.onXRSessionEnded.notifyObservers(null), this._engine && (this._engine.framebufferDimensionsObject = null, this._engine.restoreDefaultFramebuffer(), this._engine.customAnimationFrameRequester = null, this._engine._renderLoop()), this.isNative && ((r = this._baseLayerRTTProvider) === null || r === void 0 || r.dispose()), this._baseLayerRTTProvider = null, this._baseLayerWrapper = null; }, { once: !0 }), this.session)); } /** * Checks if a session would be supported for the creation options specified * @param sessionMode session mode to check if supported eg. immersive-vr * @returns A Promise that resolves to true if supported and false if not */ isSessionSupportedAsync(e) { return iN.IsSessionSupportedAsync(e); } /** * Resets the reference space to the one started the session */ resetReferenceSpace() { this.referenceSpace = this.baseReferenceSpace; } /** * Starts rendering to the xr layer */ runXRRenderLoop() { var e; !this.inXRSession || !this._engine || (this._engine.customAnimationFrameRequester = { requestAnimationFrame: (t) => this.session.requestAnimationFrame(t), renderFunction: (t, i) => { var r; !this.inXRSession || !this._engine || (this.currentFrame = i, this.currentTimestamp = t, i && (this.inXRFrameLoop = !0, this._engine.framebufferDimensionsObject = ((r = this._baseLayerRTTProvider) === null || r === void 0 ? void 0 : r.getFramebufferDimensions()) || null, this.onXRFrameObservable.notifyObservers(i), this._engine._renderLoop(), this._engine.framebufferDimensionsObject = null, this.inXRFrameLoop = !1)); } }, this._engine.framebufferDimensionsObject = ((e = this._baseLayerRTTProvider) === null || e === void 0 ? void 0 : e.getFramebufferDimensions()) || null, typeof window < "u" && window.cancelAnimationFrame && window.cancelAnimationFrame(this._engine._frameHandler), this._engine._renderLoop()); } /** * Sets the reference space on the xr session * @param referenceSpaceType space to set * @returns a promise that will resolve once the reference space has been set */ setReferenceSpaceTypeAsync(e = "local-floor") { return this.session.requestReferenceSpace(e).then((t) => t, (t) => (Ce.Error("XR.requestReferenceSpace failed for the following reason: "), Ce.Error(t), Ce.Log('Defaulting to universally-supported "viewer" reference space type.'), this.session.requestReferenceSpace("viewer").then((i) => { const r = new XRRigidTransform({ x: 0, y: -this.defaultHeightCompensation, z: 0 }); return i.getOffsetReferenceSpace(r); }, (i) => { throw Ce.Error(i), 'XR initialization failed: required "viewer" reference space type not supported.'; }))).then((t) => this.session.requestReferenceSpace("viewer").then((i) => (this.viewerReferenceSpace = i, t))).then((t) => (this.referenceSpace = this.baseReferenceSpace = t, this.referenceSpace)); } /** * Updates the render state of the session. * Note that this is deprecated in favor of WebXRSessionManager.updateRenderState(). * @param state state to set * @returns a promise that resolves once the render state has been updated * @deprecated */ updateRenderStateAsync(e) { return Promise.resolve(this.session.updateRenderState(e)); } /** * @internal */ _setBaseLayerWrapper(e) { var t, i; this.isNative && ((t = this._baseLayerRTTProvider) === null || t === void 0 || t.dispose()), this._baseLayerWrapper = e, this._baseLayerRTTProvider = ((i = this._baseLayerWrapper) === null || i === void 0 ? void 0 : i.createRenderTargetTextureProvider(this)) || null; } /** * @internal */ _getBaseLayerWrapper() { return this._baseLayerWrapper; } /** * Updates the render state of the session * @param state state to set */ updateRenderState(e) { e.baseLayer && this._setBaseLayerWrapper(this.isNative ? new gie(e.baseLayer) : new FK(e.baseLayer)), this.session.updateRenderState(e); } /** * Returns a promise that resolves with a boolean indicating if the provided session mode is supported by this browser * @param sessionMode defines the session to test * @returns a promise with boolean as final value */ static IsSessionSupportedAsync(e) { if (!navigator.xr) return Promise.resolve(!1); const t = navigator.xr.isSessionSupported || navigator.xr.supportsSession; return t ? t.call(navigator.xr, e).then((i) => { const r = typeof i > "u" ? !0 : i; return Promise.resolve(r); }).catch((i) => (Ce.Warn(i), Promise.resolve(!1))) : Promise.resolve(!1); } /** * Returns true if Babylon.js is using the BabylonNative backend, otherwise false */ get isNative() { var e; return (e = this._xrNavigator.xr.native) !== null && e !== void 0 ? e : !1; } /** * The current frame rate as reported by the device */ get currentFrameRate() { var e; return (e = this.session) === null || e === void 0 ? void 0 : e.frameRate; } /** * A list of supported frame rates (only available in-session! */ get supportedFrameRates() { var e; return (e = this.session) === null || e === void 0 ? void 0 : e.supportedFrameRates; } /** * Set the framerate of the session. * @param rate the new framerate. This value needs to be in the supportedFrameRates array * @returns a promise that resolves once the framerate has been set */ updateTargetFrameRate(e) { return this.session.updateTargetFrameRate(e); } /** * Run a callback in the xr render loop * @param callback the callback to call when in XR Frame * @param ignoreIfNotInSession if no session is currently running, run it first thing on the next session */ runInXRFrame(e, t = !0) { this.inXRFrameLoop ? e() : (this.inXRSession || !t) && this.onXRFrameObservable.addOnce(e); } /** * Check if fixed foveation is supported on this device */ get isFixedFoveationSupported() { var e; return ((e = this._baseLayerWrapper) === null || e === void 0 ? void 0 : e.isFixedFoveationSupported) || !1; } /** * Get the fixed foveation currently set, as specified by the webxr specs * If this returns null, then fixed foveation is not supported */ get fixedFoveation() { var e; return ((e = this._baseLayerWrapper) === null || e === void 0 ? void 0 : e.fixedFoveation) || null; } /** * Set the fixed foveation to the specified value, as specified by the webxr specs * This value will be normalized to be between 0 and 1, 1 being max foveation, 0 being no foveation */ set fixedFoveation(e) { const t = Math.max(0, Math.min(1, e || 0)); this._baseLayerWrapper && (this._baseLayerWrapper.fixedFoveation = t); } /** * Get the features enabled on the current session * This is only available in-session! * @see https://www.w3.org/TR/webxr/#dom-xrsession-enabledfeatures */ get enabledFeatures() { var e, t; return (t = (e = this.session) === null || e === void 0 ? void 0 : e.enabledFeatures) !== null && t !== void 0 ? t : null; } } var lu; (function(c) { c[c.ENTERING_XR = 0] = "ENTERING_XR", c[c.EXITING_XR = 1] = "EXITING_XR", c[c.IN_XR = 2] = "IN_XR", c[c.NOT_IN_XR = 3] = "NOT_IN_XR"; })(lu || (lu = {})); var jR; (function(c) { c[c.NOT_TRACKING = 0] = "NOT_TRACKING", c[c.TRACKING_LOST = 1] = "TRACKING_LOST", c[c.TRACKING = 2] = "TRACKING"; })(jR || (jR = {})); ke._GroundMeshParser = (c, e) => yw.Parse(c, e); class yw extends ke { constructor(e, t) { super(e, t), this.generateOctree = !1; } /** * "GroundMesh" * @returns "GroundMesh" */ getClassName() { return "GroundMesh"; } /** * The minimum of x and y subdivisions */ get subdivisions() { return Math.min(this._subdivisionsX, this._subdivisionsY); } /** * X subdivisions */ get subdivisionsX() { return this._subdivisionsX; } /** * Y subdivisions */ get subdivisionsY() { return this._subdivisionsY; } /** * This function will divide the mesh into submeshes and update an octree to help to select the right submeshes * for rendering, picking and collision computations. Please note that you must have a decent number of submeshes * to get performance improvements when using an octree. * @param chunksCount the number of submeshes the mesh will be divided into * @param octreeBlocksSize the maximum size of the octree blocks (Default: 32) */ optimize(e, t = 32) { this._subdivisionsX = e, this._subdivisionsY = e, this.subdivide(e); const i = this; i.createOrUpdateSubmeshesOctree && i.createOrUpdateSubmeshesOctree(t); } /** * Returns a height (y) value in the World system : * the ground altitude at the coordinates (x, z) expressed in the World system. * @param x x coordinate * @param z z coordinate * @returns the ground y position if (x, z) are outside the ground surface. */ getHeightAtCoordinates(e, t) { const i = this.getWorldMatrix(), r = de.Matrix[5]; i.invertToRef(r); const s = de.Vector3[8]; if (D.TransformCoordinatesFromFloatsToRef(e, 0, t, r, s), e = s.x, t = s.z, e < this._minX || e >= this._maxX || t <= this._minZ || t > this._maxZ) return this.position.y; (!this._heightQuads || this._heightQuads.length == 0) && (this._initHeightQuads(), this._computeHeightQuads()); const n = this._getFacetAt(e, t), a = -(n.x * e + n.z * t + n.w) / n.y; return D.TransformCoordinatesFromFloatsToRef(0, a, 0, i, s), s.y; } /** * Returns a normalized vector (Vector3) orthogonal to the ground * at the ground coordinates (x, z) expressed in the World system. * @param x x coordinate * @param z z coordinate * @returns Vector3(0.0, 1.0, 0.0) if (x, z) are outside the ground surface. */ getNormalAtCoordinates(e, t) { const i = new D(0, 1, 0); return this.getNormalAtCoordinatesToRef(e, t, i), i; } /** * Updates the Vector3 passed a reference with a normalized vector orthogonal to the ground * at the ground coordinates (x, z) expressed in the World system. * Doesn't update the reference Vector3 if (x, z) are outside the ground surface. * @param x x coordinate * @param z z coordinate * @param ref vector to store the result * @returns the GroundMesh. */ getNormalAtCoordinatesToRef(e, t, i) { const r = this.getWorldMatrix(), s = de.Matrix[5]; r.invertToRef(s); const n = de.Vector3[8]; if (D.TransformCoordinatesFromFloatsToRef(e, 0, t, s, n), e = n.x, t = n.z, e < this._minX || e > this._maxX || t < this._minZ || t > this._maxZ) return this; (!this._heightQuads || this._heightQuads.length == 0) && (this._initHeightQuads(), this._computeHeightQuads()); const a = this._getFacetAt(e, t); return D.TransformNormalFromFloatsToRef(a.x, a.y, a.z, r, i), this; } /** * Force the heights to be recomputed for getHeightAtCoordinates() or getNormalAtCoordinates() * if the ground has been updated. * This can be used in the render loop. * @returns the GroundMesh. */ updateCoordinateHeights() { return (!this._heightQuads || this._heightQuads.length == 0) && this._initHeightQuads(), this._computeHeightQuads(), this; } // Returns the element "facet" from the heightQuads array relative to (x, z) local coordinates _getFacetAt(e, t) { const i = Math.floor((e + this._maxX) * this._subdivisionsX / this._width), r = Math.floor(-(t + this._maxZ) * this._subdivisionsY / this._height + this._subdivisionsY), s = this._heightQuads[r * this._subdivisionsX + i]; let n; return t < s.slope.x * e + s.slope.y ? n = s.facet1 : n = s.facet2, n; } // Creates and populates the heightMap array with "facet" elements : // a quad is two triangular facets separated by a slope, so a "facet" element is 1 slope + 2 facets // slope : Vector2(c, h) = 2D diagonal line equation setting apart two triangular facets in a quad : z = cx + h // facet1 : Vector4(a, b, c, d) = first facet 3D plane equation : ax + by + cz + d = 0 // facet2 : Vector4(a, b, c, d) = second facet 3D plane equation : ax + by + cz + d = 0 // Returns the GroundMesh. _initHeightQuads() { const e = this._subdivisionsX, t = this._subdivisionsY; this._heightQuads = new Array(); for (let i = 0; i < t; i++) for (let r = 0; r < e; r++) { const s = { slope: at.Zero(), facet1: new Di(0, 0, 0, 0), facet2: new Di(0, 0, 0, 0) }; this._heightQuads[i * e + r] = s; } return this; } // Compute each quad element values and update the heightMap array : // slope : Vector2(c, h) = 2D diagonal line equation setting apart two triangular facets in a quad : z = cx + h // facet1 : Vector4(a, b, c, d) = first facet 3D plane equation : ax + by + cz + d = 0 // facet2 : Vector4(a, b, c, d) = second facet 3D plane equation : ax + by + cz + d = 0 // Returns the GroundMesh. _computeHeightQuads() { const e = this.getVerticesData(Y.PositionKind); if (!e) return this; const t = de.Vector3[3], i = de.Vector3[2], r = de.Vector3[1], s = de.Vector3[0], n = de.Vector3[4], a = de.Vector3[5], l = de.Vector3[6], o = de.Vector3[7], u = de.Vector3[8]; let h = 0, d = 0, f = 0, p = 0, m = 0, _ = 0, v = 0; const C = this._subdivisionsX, x = this._subdivisionsY; for (let b = 0; b < x; b++) for (let S = 0; S < C; S++) { h = S * 3, d = b * (C + 1) * 3, f = (b + 1) * (C + 1) * 3, t.x = e[d + h], t.y = e[d + h + 1], t.z = e[d + h + 2], i.x = e[d + h + 3], i.y = e[d + h + 4], i.z = e[d + h + 5], r.x = e[f + h], r.y = e[f + h + 1], r.z = e[f + h + 2], s.x = e[f + h + 3], s.y = e[f + h + 4], s.z = e[f + h + 5], p = (s.z - t.z) / (s.x - t.x), m = t.z - p * t.x, i.subtractToRef(t, n), r.subtractToRef(t, a), s.subtractToRef(t, l), D.CrossToRef(l, a, o), D.CrossToRef(n, l, u), o.normalize(), u.normalize(), _ = -(o.x * t.x + o.y * t.y + o.z * t.z), v = -(u.x * i.x + u.y * i.y + u.z * i.z); const M = this._heightQuads[b * C + S]; M.slope.copyFromFloats(p, m), M.facet1.copyFromFloats(o.x, o.y, o.z, _), M.facet2.copyFromFloats(u.x, u.y, u.z, v); } return this; } /** * Serializes this ground mesh * @param serializationObject object to write serialization to */ serialize(e) { super.serialize(e), e.subdivisionsX = this._subdivisionsX, e.subdivisionsY = this._subdivisionsY, e.minX = this._minX, e.maxX = this._maxX, e.minZ = this._minZ, e.maxZ = this._maxZ, e.width = this._width, e.height = this._height; } /** * Parses a serialized ground mesh * @param parsedMesh the serialized mesh * @param scene the scene to create the ground mesh in * @returns the created ground mesh */ static Parse(e, t) { const i = new yw(e.name, t); return i._subdivisionsX = e.subdivisionsX || 1, i._subdivisionsY = e.subdivisionsY || 1, i._minX = e.minX, i._maxX = e.maxX, i._minZ = e.minZ, i._maxZ = e.maxZ, i._width = e.width, i._height = e.height, i; } } function BC(c) { const e = [], t = [], i = [], r = []; let s, n; const a = c.width || 1, l = c.height || 1, o = (c.subdivisionsX || c.subdivisions || 1) | 0, u = (c.subdivisionsY || c.subdivisions || 1) | 0; for (s = 0; s <= u; s++) for (n = 0; n <= o; n++) { const d = new D(n * a / o - a / 2, 0, (u - s) * l / u - l / 2), f = new D(0, 1, 0); t.push(d.x, d.y, d.z), i.push(f.x, f.y, f.z), r.push(n / o, hn.UseOpenGLOrientationForUV ? s / u : 1 - s / u); } for (s = 0; s < u; s++) for (n = 0; n < o; n++) e.push(n + 1 + (s + 1) * (o + 1)), e.push(n + 1 + s * (o + 1)), e.push(n + s * (o + 1)), e.push(n + (s + 1) * (o + 1)), e.push(n + 1 + (s + 1) * (o + 1)), e.push(n + s * (o + 1)); const h = new Ot(); return h.indices = e, h.positions = t, h.normals = i, h.uvs = r, h; } function BK(c) { const e = c.xmin !== void 0 && c.xmin !== null ? c.xmin : -1, t = c.zmin !== void 0 && c.zmin !== null ? c.zmin : -1, i = c.xmax !== void 0 && c.xmax !== null ? c.xmax : 1, r = c.zmax !== void 0 && c.zmax !== null ? c.zmax : 1, s = c.subdivisions || { w: 1, h: 1 }, n = c.precision || { w: 1, h: 1 }, a = [], l = [], o = [], u = []; let h, d, f, p; s.h = s.h < 1 ? 1 : s.h, s.w = s.w < 1 ? 1 : s.w, n.w = n.w < 1 ? 1 : n.w, n.h = n.h < 1 ? 1 : n.h; const m = { w: (i - e) / s.w, h: (r - t) / s.h }; function _(C, x, b, S) { const M = l.length / 3, R = n.w + 1; for (h = 0; h < n.h; h++) for (d = 0; d < n.w; d++) { const k = [M + d + h * R, M + (d + 1) + h * R, M + (d + 1) + (h + 1) * R, M + d + (h + 1) * R]; a.push(k[1]), a.push(k[2]), a.push(k[3]), a.push(k[0]), a.push(k[1]), a.push(k[3]); } const w = D.Zero(), V = new D(0, 1, 0); for (h = 0; h <= n.h; h++) for (w.z = h * (S - x) / n.h + x, d = 0; d <= n.w; d++) w.x = d * (b - C) / n.w + C, w.y = 0, l.push(w.x, w.y, w.z), o.push(V.x, V.y, V.z), u.push(d / n.w, h / n.h); } for (f = 0; f < s.h; f++) for (p = 0; p < s.w; p++) _(e + p * m.w, t + f * m.h, e + (p + 1) * m.w, t + (f + 1) * m.h); const v = new Ot(); return v.indices = a, v.positions = l, v.normals = o, v.uvs = u, v; } function UK(c) { const e = [], t = [], i = [], r = []; let s, n; const a = c.colorFilter || new ze(0.3, 0.59, 0.11), l = c.alphaFilter || 0; let o = !1; if (c.minHeight > c.maxHeight) { o = !0; const h = c.maxHeight; c.maxHeight = c.minHeight, c.minHeight = h; } for (s = 0; s <= c.subdivisions; s++) for (n = 0; n <= c.subdivisions; n++) { const h = new D(n * c.width / c.subdivisions - c.width / 2, 0, (c.subdivisions - s) * c.height / c.subdivisions - c.height / 2), d = (h.x + c.width / 2) / c.width * (c.bufferWidth - 1) | 0, f = (1 - (h.z + c.height / 2) / c.height) * (c.bufferHeight - 1) | 0, p = (d + f * c.bufferWidth) * 4; let m = c.buffer[p] / 255, _ = c.buffer[p + 1] / 255, v = c.buffer[p + 2] / 255; const C = c.buffer[p + 3] / 255; o && (m = 1 - m, _ = 1 - _, v = 1 - v); const x = m * a.r + _ * a.g + v * a.b; C >= l ? h.y = c.minHeight + (c.maxHeight - c.minHeight) * x : h.y = c.minHeight - Sr, t.push(h.x, h.y, h.z), i.push(0, 0, 0), r.push(n / c.subdivisions, 1 - s / c.subdivisions); } for (s = 0; s < c.subdivisions; s++) for (n = 0; n < c.subdivisions; n++) { const h = n + 1 + (s + 1) * (c.subdivisions + 1), d = n + 1 + s * (c.subdivisions + 1), f = n + s * (c.subdivisions + 1), p = n + (s + 1) * (c.subdivisions + 1), m = t[h * 3 + 1] >= c.minHeight, _ = t[d * 3 + 1] >= c.minHeight, v = t[f * 3 + 1] >= c.minHeight; m && _ && v && (e.push(h), e.push(d), e.push(f)), t[p * 3 + 1] >= c.minHeight && m && v && (e.push(p), e.push(h), e.push(f)); } Ot.ComputeNormals(t, e, i); const u = new Ot(); return u.indices = e, u.positions = t, u.normals = i, u.uvs = r, u; } function zI(c, e = {}, t) { const i = new yw(c, t); return i._setReady(!1), i._subdivisionsX = e.subdivisionsX || e.subdivisions || 1, i._subdivisionsY = e.subdivisionsY || e.subdivisions || 1, i._width = e.width || 1, i._height = e.height || 1, i._maxX = i._width / 2, i._maxZ = i._height / 2, i._minX = -i._maxX, i._minZ = -i._maxZ, BC(e).applyToMesh(i, e.updatable), i._setReady(!0), i; } function sU(c, e, t = null) { const i = new ke(c, t); return BK(e).applyToMesh(i, e.updatable), i; } function nU(c, e, t = {}, i = null) { const r = t.width || 10, s = t.height || 10, n = t.subdivisions || 1, a = t.minHeight || 0, l = t.maxHeight || 1, o = t.colorFilter || new ze(0.3, 0.59, 0.11), u = t.alphaFilter || 0, h = t.updatable, d = t.onReady; i = i || gi.LastCreatedScene; const f = new yw(c, i); f._subdivisionsX = n, f._subdivisionsY = n, f._width = r, f._height = s, f._maxX = f._width / 2, f._maxZ = f._height / 2, f._minX = -f._maxX, f._minZ = -f._maxZ, f._setReady(!1); const p = (m, _, v) => { UK({ width: r, height: s, subdivisions: n, minHeight: a, maxHeight: l, colorFilter: o, buffer: m, bufferWidth: _, bufferHeight: v, alphaFilter: u }).applyToMesh(f, h), d && d(f), f._setReady(!0); }; if (typeof e == "string") { const m = (_) => { const v = _.width, C = _.height; if (i.isDisposed) return; const x = i == null ? void 0 : i.getEngine().resizeImageBitmap(_, v, C); p(x, v, C); }; Ve.LoadImage(e, m, t.onError ? t.onError : () => { }, i.offlineProvider); } else p(e.data, e.width, e.height); return f; } const Pde = { // eslint-disable-next-line @typescript-eslint/naming-convention CreateGround: zI, // eslint-disable-next-line @typescript-eslint/naming-convention CreateGroundFromHeightMap: nU, // eslint-disable-next-line @typescript-eslint/naming-convention CreateTiledGround: sU }; Ot.CreateGround = BC; Ot.CreateTiledGround = BK; Ot.CreateGroundFromHeightMap = UK; ke.CreateGround = (c, e, t, i, r, s) => zI(c, { width: e, height: t, subdivisions: i, updatable: s }, r); ke.CreateTiledGround = (c, e, t, i, r, s, n, a, l) => sU(c, { xmin: e, zmin: t, xmax: i, zmax: r, subdivisions: s, precision: n, updatable: l }, a); ke.CreateGroundFromHeightMap = (c, e, t, i, r, s, n, a, l, o, u) => nU(c, e, { width: t, height: i, subdivisions: r, minHeight: s, maxHeight: n, updatable: l, onReady: o, alphaFilter: u }, a); function aU(c) { const e = [], t = [], i = [], r = [], s = c.diameter || 1, n = c.thickness || 0.5, a = (c.tessellation || 16) | 0, l = c.sideOrientation === 0 ? 0 : c.sideOrientation || Ot.DEFAULTSIDE, o = a + 1; for (let h = 0; h <= a; h++) { const d = h / a, f = h * Math.PI * 2 / a - Math.PI / 2, p = Ae.Translation(s / 2, 0, 0).multiply(Ae.RotationY(f)); for (let m = 0; m <= a; m++) { const _ = 1 - m / a, v = m * Math.PI * 2 / a + Math.PI, C = Math.cos(v), x = Math.sin(v); let b = new D(C, x, 0), S = b.scale(n / 2); const M = new at(d, _); S = D.TransformCoordinates(S, p), b = D.TransformNormal(b, p), t.push(S.x, S.y, S.z), i.push(b.x, b.y, b.z), r.push(M.x, hn.UseOpenGLOrientationForUV ? 1 - M.y : M.y); const R = (h + 1) % o, w = (m + 1) % o; e.push(h * o + m), e.push(h * o + w), e.push(R * o + m), e.push(h * o + w), e.push(R * o + w), e.push(R * o + m); } } Ot._ComputeSides(l, t, e, i, r, c.frontUVs, c.backUVs); const u = new Ot(); return u.indices = e, u.positions = t, u.normals = i, u.uvs = r, u; } function o6(c, e = {}, t) { const i = new ke(c, t); return e.sideOrientation = ke._GetDefaultSideOrientation(e.sideOrientation), i._originalBuilderSideOrientation = e.sideOrientation, aU(e).applyToMesh(i, e.updatable), i; } const Ide = { // eslint-disable-next-line @typescript-eslint/naming-convention CreateTorus: o6 }; Ot.CreateTorus = aU; ke.CreateTorus = (c, e, t, i, r, s, n) => o6(c, { diameter: e, thickness: t, tessellation: i, sideOrientation: n, updatable: s }, r); class oU { constructor(e, t = null) { if (this.scene = e, this._pointerDownOnMeshAsked = !1, this._isActionableMesh = !1, this._teleportationRequestInitiated = !1, this._teleportationBackRequestInitiated = !1, this._rotationRightAsked = !1, this._rotationLeftAsked = !1, this._dpadPressed = !0, this._activePointer = !1, this._id = oU._IdCounter++, t) this._gazeTracker = t.clone("gazeTracker"); else { this._gazeTracker = o6("gazeTracker", { diameter: 35e-4, thickness: 25e-4, tessellation: 20, updatable: !1 }, e), this._gazeTracker.bakeCurrentTransformIntoVertices(), this._gazeTracker.isPickable = !1, this._gazeTracker.isVisible = !1; const i = new Dt("targetMat", e); i.specularColor = ze.Black(), i.emissiveColor = new ze(0.7, 0.7, 0.7), i.backFaceCulling = !1, this._gazeTracker.material = i; } } /** * @internal */ _getForwardRay(e) { return new gs(D.Zero(), new D(0, 0, e)); } /** @internal */ _selectionPointerDown() { this._pointerDownOnMeshAsked = !0, this._currentHit && this.scene.simulatePointerDown(this._currentHit, { pointerId: this._id }); } /** @internal */ _selectionPointerUp() { this._currentHit && this.scene.simulatePointerUp(this._currentHit, { pointerId: this._id }), this._pointerDownOnMeshAsked = !1; } /** @internal */ _activatePointer() { this._activePointer = !0; } /** @internal */ _deactivatePointer() { this._activePointer = !1; } /** * @internal */ // eslint-disable-next-line @typescript-eslint/no-unused-vars _updatePointerDistance(e = 100) { } dispose() { this._interactionsEnabled = !1, this._teleportationEnabled = !1, this._gazeTracker && this._gazeTracker.dispose(); } } oU._IdCounter = 0; class OZ extends oU { constructor(e, t) { super(t), this._getCamera = e; } _getForwardRay(e) { const t = this._getCamera(); return t ? t.getForwardRay(e) : new gs(D.Zero(), D.Forward()); } } class Dde { } class _P { /** Return this.onEnteringVRObservable * Note: This one is for backward compatibility. Please use onEnteringVRObservable directly */ get onEnteringVR() { return this.onEnteringVRObservable; } /** Return this.onExitingVRObservable * Note: This one is for backward compatibility. Please use onExitingVRObservable directly */ get onExitingVR() { return this.onExitingVRObservable; } /** * The mesh used to display where the user is going to teleport. */ get teleportationTarget() { return this._teleportationTarget; } /** * Sets the mesh to be used to display where the user is going to teleport. */ set teleportationTarget(e) { e && (e.name = "teleportationTarget", this._isDefaultTeleportationTarget = !1, this._teleportationTarget = e); } /** * The mesh used to display where the user is selecting, this mesh will be cloned and set as the gazeTracker for the left and right controller * when set bakeCurrentTransformIntoVertices will be called on the mesh. * See https://doc.babylonjs.com/features/featuresDeepDive/mesh/transforms/center_origin/bakingTransforms */ get gazeTrackerMesh() { return this._cameraGazer._gazeTracker; } set gazeTrackerMesh(e) { e && (this._cameraGazer._gazeTracker && this._cameraGazer._gazeTracker.dispose(), this._cameraGazer._gazeTracker = e, this._cameraGazer._gazeTracker.bakeCurrentTransformIntoVertices(), this._cameraGazer._gazeTracker.isPickable = !1, this._cameraGazer._gazeTracker.isVisible = !1, this._cameraGazer._gazeTracker.name = "gazeTracker"); } /** * If the ray of the gaze should be displayed. */ get displayGaze() { return this._displayGaze; } /** * Sets if the ray of the gaze should be displayed. */ set displayGaze(e) { this._displayGaze = e, e || (this._cameraGazer._gazeTracker.isVisible = !1); } /** * If the ray of the LaserPointer should be displayed. */ get displayLaserPointer() { return this._displayLaserPointer; } /** * Sets if the ray of the LaserPointer should be displayed. */ set displayLaserPointer(e) { this._displayLaserPointer = e; } /** * The deviceOrientationCamera used as the camera when not in VR. */ get deviceOrientationCamera() { return this._deviceOrientationCamera; } /** * Based on the current WebVR support, returns the current VR camera used. */ get currentVRCamera() { return this._scene.activeCamera; } /** * The deviceOrientationCamera that is used as a fallback when vr device is not connected. */ get vrDeviceOrientationCamera() { return this._vrDeviceOrientationCamera; } /** * The html button that is used to trigger entering into VR. */ get vrButton() { return this._btnVR; } get _teleportationRequestInitiated() { return this._cameraGazer._teleportationRequestInitiated; } /** * Instantiates a VRExperienceHelper. * Helps to quickly add VR support to an existing scene. * @param scene The scene the VRExperienceHelper belongs to. * @param webVROptions Options to modify the vr experience helper's behavior. */ constructor(e, t = {}) { if (this.webVROptions = t, this._fullscreenVRpresenting = !1, this.enableGazeEvenWhenNoPointerLock = !1, this.exitVROnDoubleTap = !0, this.onEnteringVRObservable = new Fe(), this.onAfterEnteringVRObservable = new Fe(), this.onExitingVRObservable = new Fe(), this._useCustomVRButton = !1, this._teleportActive = !1, this._floorMeshesCollection = [], this._teleportationMode = _P.TELEPORTATIONMODE_CONSTANTTIME, this._teleportationTime = 122, this._teleportationSpeed = 20, this._rotationAllowed = !0, this._teleportBackwardsVector = new D(0, -1, -1), this._isDefaultTeleportationTarget = !0, this._teleportationFillColor = "#444444", this._teleportationBorderColor = "#FFFFFF", this._rotationAngle = 0, this._haloCenter = new D(0, 0, 0), this._padSensibilityUp = 0.65, this._padSensibilityDown = 0.35, this._pickedLaserColor = new ze(0.2, 0.2, 1), this._pickedGazeColor = new ze(0, 0, 1), this.onNewMeshSelected = new Fe(), this.onNewMeshPicked = new Fe(), this.onBeforeCameraTeleport = new Fe(), this.onAfterCameraTeleport = new Fe(), this.onSelectedMeshUnselected = new Fe(), this.teleportationEnabled = !0, this._teleportationInitialized = !1, this._interactionsEnabled = !1, this._displayGaze = !0, this._displayLaserPointer = !0, this.updateGazeTrackerScale = !0, this.updateGazeTrackerColor = !0, this.updateControllerLaserColor = !0, this.requestPointerLockOnFullScreen = !0, this.xrTestDone = !1, this._onResize = () => { this._moveButtonToBottomRight(); }, this._onFullscreenChange = () => { this._fullscreenVRpresenting = !!document.fullscreenElement, !this._fullscreenVRpresenting && this._inputElement && (this.exitVR(), !this._useCustomVRButton && this._btnVR && (this._btnVR.style.top = this._inputElement.offsetTop + this._inputElement.offsetHeight - 70 + "px", this._btnVR.style.left = this._inputElement.offsetLeft + this._inputElement.offsetWidth - 100 + "px", this._updateButtonVisibility())); }, this._cachedAngularSensibility = { angularSensibilityX: null, angularSensibilityY: null, angularSensibility: null }, this._beforeRender = () => { this._scene.getEngine().isPointerLock || this.enableGazeEvenWhenNoPointerLock || (this._cameraGazer._gazeTracker.isVisible = !1); }, this._onNewGamepadConnected = (r) => { r.type !== zu.POSE_ENABLED && (r.leftStick && r.onleftstickchanged((s) => { this._teleportationInitialized && this.teleportationEnabled && (this._checkTeleportWithRay(s, this._cameraGazer), this._checkTeleportBackwards(s, this._cameraGazer)); }), r.rightStick && r.onrightstickchanged((s) => { this._teleportationInitialized && this._checkRotate(s, this._cameraGazer); }), r.type === zu.XBOX && (r.onbuttondown((s) => { this._interactionsEnabled && s === F_.A && this._cameraGazer._selectionPointerDown(); }), r.onbuttonup((s) => { this._interactionsEnabled && s === F_.A && this._cameraGazer._selectionPointerUp(); }))); }, this._workingVector = D.Zero(), this._workingQuaternion = Ze.Identity(), this._workingMatrix = Ae.Identity(), Ce.Warn("WebVR is deprecated. Please avoid using this experience helper and use the WebXR experience helper instead"), this._scene = e, this._inputElement = e.getEngine().getInputElement(), !("getVRDisplays" in navigator) && t.useXR === void 0 && (t.useXR = !0), t.createFallbackVRDeviceOrientationFreeCamera === void 0 && (t.createFallbackVRDeviceOrientationFreeCamera = !0), t.createDeviceOrientationCamera === void 0 && (t.createDeviceOrientationCamera = !0), t.laserToggle === void 0 && (t.laserToggle = !0), this._hasEnteredVR = !1, this._scene.activeCamera ? this._position = this._scene.activeCamera.position.clone() : this._position = new D(0, this._defaultHeight, 0), t.createDeviceOrientationCamera || !this._scene.activeCamera) { if (this._deviceOrientationCamera = new eU("deviceOrientationVRHelper", this._position.clone(), e), this._scene.activeCamera && (this._deviceOrientationCamera.minZ = this._scene.activeCamera.minZ, this._deviceOrientationCamera.maxZ = this._scene.activeCamera.maxZ, this._scene.activeCamera instanceof Cl && this._scene.activeCamera.rotation)) { const r = this._scene.activeCamera; r.rotationQuaternion ? this._deviceOrientationCamera.rotationQuaternion.copyFrom(r.rotationQuaternion) : this._deviceOrientationCamera.rotationQuaternion.copyFrom(Ze.RotationYawPitchRoll(r.rotation.y, r.rotation.x, r.rotation.z)), this._deviceOrientationCamera.rotation = r.rotation.clone(); } this._scene.activeCamera = this._deviceOrientationCamera, this._inputElement && this._scene.activeCamera.attachControl(); } else this._existingCamera = this._scene.activeCamera; this.webVROptions.useXR && navigator.xr ? iN.IsSessionSupportedAsync("immersive-vr").then((r) => { r ? (Ce.Log("Using WebXR. It is recommended to use the WebXRDefaultExperience directly"), e.createDefaultXRExperienceAsync({ floorMeshes: t.floorMeshes || [] }).then((s) => { this.xr = s, this.xrTestDone = !0, this._cameraGazer = new OZ(() => this.xr.baseExperience.camera, e), this.xr.baseExperience.onStateChangedObservable.add((n) => { switch (n) { case lu.ENTERING_XR: this.onEnteringVRObservable.notifyObservers(this), this._interactionsEnabled || this.xr.pointerSelection.detach(), this.xr.pointerSelection.displayLaserPointer = this._displayLaserPointer; break; case lu.EXITING_XR: this.onExitingVRObservable.notifyObservers(this), this._scene.getEngine().resize(); break; case lu.IN_XR: this._hasEnteredVR = !0; break; case lu.NOT_IN_XR: this._hasEnteredVR = !1; break; } }); })) : this._completeVRInit(e, t); }) : this._completeVRInit(e, t); } _completeVRInit(e, t) { if (this.xrTestDone = !0, t.createFallbackVRDeviceOrientationFreeCamera && (this._vrDeviceOrientationCamera = new rU("VRDeviceOrientationVRHelper", this._position, this._scene, !0, t.vrDeviceOrientationCameraMetrics), this._vrDeviceOrientationCamera.angularSensibility = Number.MAX_VALUE), this._cameraGazer = new OZ(() => this.currentVRCamera, e), !this._useCustomVRButton) { this._btnVR = document.createElement("BUTTON"), this._btnVR.className = "babylonVRicon", this._btnVR.id = "babylonVRiconbtn", this._btnVR.title = "Click to switch to VR"; let s = ".babylonVRicon { position: absolute; right: 20px; height: 50px; width: 80px; background-color: rgba(51,51,51,0.7); background-image: url(" + (window.SVGSVGElement ? "data:image/svg+xml;charset=UTF-8,%3Csvg%20xmlns%3D%22http%3A//www.w3.org/2000/svg%22%20width%3D%222048%22%20height%3D%221152%22%20viewBox%3D%220%200%202048%201152%22%20version%3D%221.1%22%3E%3Cpath%20transform%3D%22rotate%28180%201024%2C576.0000000000001%29%22%20d%3D%22m1109%2C896q17%2C0%2030%2C-12t13%2C-30t-12.5%2C-30.5t-30.5%2C-12.5l-170%2C0q-18%2C0%20-30.5%2C12.5t-12.5%2C30.5t13%2C30t30%2C12l170%2C0zm-85%2C256q59%2C0%20132.5%2C-1.5t154.5%2C-5.5t164.5%2C-11.5t163%2C-20t150%2C-30t124.5%2C-41.5q23%2C-11%2042%2C-24t38%2C-30q27%2C-25%2041%2C-61.5t14%2C-72.5l0%2C-257q0%2C-123%20-47%2C-232t-128%2C-190t-190%2C-128t-232%2C-47l-81%2C0q-37%2C0%20-68.5%2C14t-60.5%2C34.5t-55.5%2C45t-53%2C45t-53%2C34.5t-55.5%2C14t-55.5%2C-14t-53%2C-34.5t-53%2C-45t-55.5%2C-45t-60.5%2C-34.5t-68.5%2C-14l-81%2C0q-123%2C0%20-232%2C47t-190%2C128t-128%2C190t-47%2C232l0%2C257q0%2C68%2038%2C115t97%2C73q54%2C24%20124.5%2C41.5t150%2C30t163%2C20t164.5%2C11.5t154.5%2C5.5t132.5%2C1.5zm939%2C-298q0%2C39%20-24.5%2C67t-58.5%2C42q-54%2C23%20-122%2C39.5t-143.5%2C28t-155.5%2C19t-157%2C11t-148.5%2C5t-129.5%2C1.5q-59%2C0%20-130%2C-1.5t-148%2C-5t-157%2C-11t-155.5%2C-19t-143.5%2C-28t-122%2C-39.5q-34%2C-14%20-58.5%2C-42t-24.5%2C-67l0%2C-257q0%2C-106%2040.5%2C-199t110%2C-162.5t162.5%2C-109.5t199%2C-40l81%2C0q27%2C0%2052%2C14t50%2C34.5t51%2C44.5t55.5%2C44.5t63.5%2C34.5t74%2C14t74%2C-14t63.5%2C-34.5t55.5%2C-44.5t51%2C-44.5t50%2C-34.5t52%2C-14l14%2C0q37%2C0%2070%2C0.5t64.5%2C4.5t63.5%2C12t68%2C23q71%2C30%20128.5%2C78.5t98.5%2C110t63.5%2C133.5t22.5%2C149l0%2C257z%22%20fill%3D%22white%22%20/%3E%3C/svg%3E%0A" : "https://cdn.babylonjs.com/Assets/vrButton.png") + "); background-size: 80%; background-repeat:no-repeat; background-position: center; border: none; outline: none; transition: transform 0.125s ease-out } .babylonVRicon:hover { transform: scale(1.05) } .babylonVRicon:active {background-color: rgba(51,51,51,1) } .babylonVRicon:focus {background-color: rgba(51,51,51,1) }"; s += ".babylonVRicon.vrdisplaypresenting { display: none; }"; const n = document.createElement("style"); n.appendChild(document.createTextNode(s)), document.getElementsByTagName("head")[0].appendChild(n), this._moveButtonToBottomRight(); } this._btnVR && this._btnVR.addEventListener("click", () => { this.isInVRMode || this.enterVR(); }); const i = this._scene.getEngine().getHostWindow(); i && (i.addEventListener("resize", this._onResize), document.addEventListener("fullscreenchange", this._onFullscreenChange, !1), t.createFallbackVRDeviceOrientationFreeCamera && this._displayVRButton(), this._onKeyDown = (r) => { r.keyCode === 27 && this.isInVRMode && this.exitVR(); }, document.addEventListener("keydown", this._onKeyDown), this._scene.onPrePointerObservable.add(() => { this._hasEnteredVR && this.exitVROnDoubleTap && (this.exitVR(), this._fullscreenVRpresenting && this._scene.getEngine().exitFullscreen()); }, si.POINTERDOUBLETAP, !1), e.onDisposeObservable.add(() => { this.dispose(); }), this._updateButtonVisibility(), this._circleEase = new Ate(), this._circleEase.setEasingMode(hl.EASINGMODE_EASEINOUT), this._teleportationEasing = this._circleEase, e.onPointerObservable.add((r) => { this._interactionsEnabled && e.activeCamera === this.vrDeviceOrientationCamera && r.event.pointerType === "mouse" && (r.type === si.POINTERDOWN ? this._cameraGazer._selectionPointerDown() : r.type === si.POINTERUP && this._cameraGazer._selectionPointerUp()); }), this.webVROptions.floorMeshes && this.enableTeleportation({ floorMeshes: this.webVROptions.floorMeshes })); } /** * Gets a value indicating if we are currently in VR mode. */ get isInVRMode() { return this.xr && this.webVROptions.useXR && this.xr.baseExperience.state === lu.IN_XR || this._fullscreenVRpresenting; } _moveButtonToBottomRight() { if (this._inputElement && !this._useCustomVRButton && this._btnVR) { const e = this._inputElement.getBoundingClientRect(); this._btnVR.style.top = e.top + e.height - 70 + "px", this._btnVR.style.left = e.left + e.width - 100 + "px"; } } _displayVRButton() { !this._useCustomVRButton && !this._btnVRDisplayed && this._btnVR && (document.body.appendChild(this._btnVR), this._btnVRDisplayed = !0); } _updateButtonVisibility() { !this._btnVR || this._useCustomVRButton || (this._btnVR.className = "babylonVRicon", this.isInVRMode && (this._btnVR.className += " vrdisplaypresenting")); } /** * Attempt to enter VR. If a headset is connected and ready, will request present on that. * Otherwise, will use the fullscreen API. */ enterVR() { if (this.xr) { this.xr.baseExperience.enterXRAsync("immersive-vr", "local-floor", this.xr.renderTarget); return; } if (this.onEnteringVRObservable) try { this.onEnteringVRObservable.notifyObservers(this); } catch (e) { Ce.Warn("Error in your custom logic onEnteringVR: " + e); } this._scene.activeCamera && (this._position = this._scene.activeCamera.position.clone(), this.vrDeviceOrientationCamera && (this.vrDeviceOrientationCamera.rotation = Ze.FromRotationMatrix(this._scene.activeCamera.getWorldMatrix().getRotationMatrix()).toEulerAngles(), this.vrDeviceOrientationCamera.angularSensibility = 2e3), this._existingCamera = this._scene.activeCamera, this._existingCamera.angularSensibilityX && (this._cachedAngularSensibility.angularSensibilityX = this._existingCamera.angularSensibilityX, this._existingCamera.angularSensibilityX = Number.MAX_VALUE), this._existingCamera.angularSensibilityY && (this._cachedAngularSensibility.angularSensibilityY = this._existingCamera.angularSensibilityY, this._existingCamera.angularSensibilityY = Number.MAX_VALUE), this._existingCamera.angularSensibility && (this._cachedAngularSensibility.angularSensibility = this._existingCamera.angularSensibility, this._existingCamera.angularSensibility = Number.MAX_VALUE)), this._vrDeviceOrientationCamera && (this._vrDeviceOrientationCamera.position = this._position, this._scene.activeCamera && (this._vrDeviceOrientationCamera.minZ = this._scene.activeCamera.minZ), this._scene.activeCamera = this._vrDeviceOrientationCamera, this._scene.getEngine().enterFullscreen(this.requestPointerLockOnFullScreen), this._updateButtonVisibility(), this._vrDeviceOrientationCamera.onViewMatrixChangedObservable.addOnce(() => { this.onAfterEnteringVRObservable.notifyObservers({ success: !0 }); })), this._scene.activeCamera && this._inputElement && this._scene.activeCamera.attachControl(), this._interactionsEnabled && this._scene.registerBeforeRender(this._beforeRender), this._hasEnteredVR = !0; } /** * Attempt to exit VR, or fullscreen. */ exitVR() { if (this.xr) { this.xr.baseExperience.exitXRAsync(); return; } if (this._hasEnteredVR) { if (this.onExitingVRObservable) try { this.onExitingVRObservable.notifyObservers(this); } catch (e) { Ce.Warn("Error in your custom logic onExitingVR: " + e); } this._scene.activeCamera && (this._position = this._scene.activeCamera.position.clone()), this.vrDeviceOrientationCamera && (this.vrDeviceOrientationCamera.angularSensibility = Number.MAX_VALUE), this._deviceOrientationCamera ? (this._deviceOrientationCamera.position = this._position, this._scene.activeCamera = this._deviceOrientationCamera, this._cachedAngularSensibility.angularSensibilityX && (this._deviceOrientationCamera.angularSensibilityX = this._cachedAngularSensibility.angularSensibilityX, this._cachedAngularSensibility.angularSensibilityX = null), this._cachedAngularSensibility.angularSensibilityY && (this._deviceOrientationCamera.angularSensibilityY = this._cachedAngularSensibility.angularSensibilityY, this._cachedAngularSensibility.angularSensibilityY = null), this._cachedAngularSensibility.angularSensibility && (this._deviceOrientationCamera.angularSensibility = this._cachedAngularSensibility.angularSensibility, this._cachedAngularSensibility.angularSensibility = null)) : this._existingCamera && (this._existingCamera.position = this._position, this._scene.activeCamera = this._existingCamera, this._inputElement && this._scene.activeCamera.attachControl(), this._cachedAngularSensibility.angularSensibilityX && (this._existingCamera.angularSensibilityX = this._cachedAngularSensibility.angularSensibilityX, this._cachedAngularSensibility.angularSensibilityX = null), this._cachedAngularSensibility.angularSensibilityY && (this._existingCamera.angularSensibilityY = this._cachedAngularSensibility.angularSensibilityY, this._cachedAngularSensibility.angularSensibilityY = null), this._cachedAngularSensibility.angularSensibility && (this._existingCamera.angularSensibility = this._cachedAngularSensibility.angularSensibility, this._cachedAngularSensibility.angularSensibility = null)), this._updateButtonVisibility(), this._interactionsEnabled && (this._scene.unregisterBeforeRender(this._beforeRender), this._cameraGazer._gazeTracker.isVisible = !1), this._scene.getEngine().resize(), this._hasEnteredVR = !1; } } /** * The position of the vr experience helper. */ get position() { return this._position; } /** * Sets the position of the vr experience helper. */ set position(e) { this._position = e, this._scene.activeCamera && (this._scene.activeCamera.position = e); } /** * Enables controllers and user interactions such as selecting and object or clicking on an object. */ enableInteractions() { if (!this._interactionsEnabled) { if (this.xr) { this.xr.baseExperience.state === lu.IN_XR && this.xr.pointerSelection.attach(); return; } this.raySelectionPredicate = (e) => e.isVisible && (e.isPickable || e.name === this._floorMeshName), this.meshSelectionPredicate = () => !0, this._raySelectionPredicate = (e) => this._isTeleportationFloor(e) || e.name.indexOf("gazeTracker") === -1 && e.name.indexOf("teleportationTarget") === -1 && e.name.indexOf("torusTeleportation") === -1 ? this.raySelectionPredicate(e) : !1, this._interactionsEnabled = !0; } } _isTeleportationFloor(e) { for (let t = 0; t < this._floorMeshesCollection.length; t++) if (this._floorMeshesCollection[t].id === e.id) return !0; return !!(this._floorMeshName && e.name === this._floorMeshName); } /** * Adds a floor mesh to be used for teleportation. * @param floorMesh the mesh to be used for teleportation. */ addFloorMesh(e) { this._floorMeshesCollection && (this._floorMeshesCollection.indexOf(e) > -1 || this._floorMeshesCollection.push(e)); } /** * Removes a floor mesh from being used for teleportation. * @param floorMesh the mesh to be removed. */ removeFloorMesh(e) { if (!this._floorMeshesCollection) return; const t = this._floorMeshesCollection.indexOf(e); t !== -1 && this._floorMeshesCollection.splice(t, 1); } /** * Enables interactions and teleportation using the VR controllers and gaze. * @param vrTeleportationOptions options to modify teleportation behavior. */ enableTeleportation(e = {}) { if (!this._teleportationInitialized) { if (this.enableInteractions(), this.webVROptions.useXR && (e.floorMeshes || e.floorMeshName)) { const i = e.floorMeshes || []; if (!i.length) { const r = this._scene.getMeshByName(e.floorMeshName); r && i.push(r); } if (this.xr) { i.forEach((r) => { this.xr.teleportation.addFloorMesh(r); }), this.xr.teleportation.attached || this.xr.teleportation.attach(); return; } else if (!this.xrTestDone) { const r = () => { this.xrTestDone && (this._scene.unregisterBeforeRender(r), this.xr ? this.xr.teleportation.attached || this.xr.teleportation.attach() : this.enableTeleportation(e)); }; this._scene.registerBeforeRender(r); return; } } e.floorMeshName && (this._floorMeshName = e.floorMeshName), e.floorMeshes && (this._floorMeshesCollection = e.floorMeshes), e.teleportationMode && (this._teleportationMode = e.teleportationMode), e.teleportationTime && e.teleportationTime > 0 && (this._teleportationTime = e.teleportationTime), e.teleportationSpeed && e.teleportationSpeed > 0 && (this._teleportationSpeed = e.teleportationSpeed), e.easingFunction !== void 0 && (this._teleportationEasing = e.easingFunction); const t = new Ds(); t.vignetteColor = new Et(0, 0, 0, 0), t.vignetteEnabled = !0, this._teleportationInitialized = !0, this._isDefaultTeleportationTarget && this._createTeleportationCircles(); } } _checkTeleportWithRay(e, t) { this._teleportationRequestInitiated && !t._teleportationRequestInitiated || (t._teleportationRequestInitiated ? Math.sqrt(e.y * e.y + e.x * e.x) < this._padSensibilityDown && (this._teleportActive && this.teleportCamera(this._haloCenter), t._teleportationRequestInitiated = !1) : e.y < -this._padSensibilityUp && t._dpadPressed && (t._activatePointer(), t._teleportationRequestInitiated = !0)); } _checkRotate(e, t) { t._teleportationRequestInitiated || (t._rotationLeftAsked ? e.x > -this._padSensibilityDown && (t._rotationLeftAsked = !1) : e.x < -this._padSensibilityUp && t._dpadPressed && (t._rotationLeftAsked = !0, this._rotationAllowed && this._rotateCamera(!1)), t._rotationRightAsked ? e.x < this._padSensibilityDown && (t._rotationRightAsked = !1) : e.x > this._padSensibilityUp && t._dpadPressed && (t._rotationRightAsked = !0, this._rotationAllowed && this._rotateCamera(!0))); } _checkTeleportBackwards(e, t) { if (!t._teleportationRequestInitiated) if (e.y > this._padSensibilityUp && t._dpadPressed) { if (!t._teleportationBackRequestInitiated) { if (!this.currentVRCamera) return; const i = Ze.FromRotationMatrix(this.currentVRCamera.getWorldMatrix().getRotationMatrix()), r = this.currentVRCamera.position; i.toEulerAnglesToRef(this._workingVector), this._workingVector.z = 0, this._workingVector.x = 0, Ze.RotationYawPitchRollToRef(this._workingVector.y, this._workingVector.x, this._workingVector.z, this._workingQuaternion), this._workingQuaternion.toRotationMatrix(this._workingMatrix), D.TransformCoordinatesToRef(this._teleportBackwardsVector, this._workingMatrix, this._workingVector); const s = new gs(r, this._workingVector), n = this._scene.pickWithRay(s, this._raySelectionPredicate); n && n.pickedPoint && n.pickedMesh && this._isTeleportationFloor(n.pickedMesh) && n.distance < 5 && this.teleportCamera(n.pickedPoint), t._teleportationBackRequestInitiated = !0; } } else t._teleportationBackRequestInitiated = !1; } _createTeleportationCircles() { this._teleportationTarget = zI("teleportationTarget", { width: 2, height: 2, subdivisions: 2 }, this._scene), this._teleportationTarget.isPickable = !1; const e = 512, t = new gg("DynamicTexture", e, this._scene, !0); t.hasAlpha = !0; const i = t.getContext(), r = e / 2, s = e / 2, n = 200; i.beginPath(), i.arc(r, s, n, 0, 2 * Math.PI, !1), i.fillStyle = this._teleportationFillColor, i.fill(), i.lineWidth = 10, i.strokeStyle = this._teleportationBorderColor, i.stroke(), i.closePath(), t.update(); const a = new Dt("TextPlaneMaterial", this._scene); a.diffuseTexture = t, this._teleportationTarget.material = a; const l = o6("torusTeleportation", { diameter: 0.75, thickness: 0.1, tessellation: 25, updatable: !1 }, this._scene); l.isPickable = !1, l.parent = this._teleportationTarget; const o = new nt("animationInnerCircle", "position.y", 30, nt.ANIMATIONTYPE_FLOAT, nt.ANIMATIONLOOPMODE_CYCLE), u = []; u.push({ frame: 0, value: 0 }), u.push({ frame: 30, value: 0.4 }), u.push({ frame: 60, value: 0 }), o.setKeys(u); const h = new fK(); h.setEasingMode(hl.EASINGMODE_EASEINOUT), o.setEasingFunction(h), l.animations = [], l.animations.push(o), this._scene.beginAnimation(l, 0, 60, !0), this._hideTeleportationTarget(); } _hideTeleportationTarget() { this._teleportActive = !1, this._teleportationInitialized && (this._teleportationTarget.isVisible = !1, this._isDefaultTeleportationTarget && (this._teleportationTarget.getChildren()[0].isVisible = !1)); } _rotateCamera(e) { if (!(this.currentVRCamera instanceof du)) return; e ? this._rotationAngle++ : this._rotationAngle--, this.currentVRCamera.animations = []; const t = Ze.FromRotationMatrix(Ae.RotationY(Math.PI / 4 * this._rotationAngle)), i = new nt("animationRotation", "rotationQuaternion", 90, nt.ANIMATIONTYPE_QUATERNION, nt.ANIMATIONLOOPMODE_CONSTANT), r = []; r.push({ frame: 0, value: this.currentVRCamera.rotationQuaternion }), r.push({ frame: 6, value: t }), i.setKeys(r), i.setEasingFunction(this._circleEase), this.currentVRCamera.animations.push(i), this._postProcessMove.animations = []; const s = new nt("animationPP", "vignetteWeight", 90, nt.ANIMATIONTYPE_FLOAT, nt.ANIMATIONLOOPMODE_CONSTANT), n = []; n.push({ frame: 0, value: 0 }), n.push({ frame: 3, value: 4 }), n.push({ frame: 6, value: 0 }), s.setKeys(n), s.setEasingFunction(this._circleEase), this._postProcessMove.animations.push(s); const a = new nt("animationPP2", "vignetteStretch", 90, nt.ANIMATIONTYPE_FLOAT, nt.ANIMATIONLOOPMODE_CONSTANT), l = []; l.push({ frame: 0, value: 0 }), l.push({ frame: 3, value: 10 }), l.push({ frame: 6, value: 0 }), a.setKeys(l), a.setEasingFunction(this._circleEase), this._postProcessMove.animations.push(a), this._postProcessMove.imageProcessingConfiguration.vignetteWeight = 0, this._postProcessMove.imageProcessingConfiguration.vignetteStretch = 0, this._postProcessMove.samples = 4, this._scene.beginAnimation(this.currentVRCamera, 0, 6, !1, 1); } /** * Teleports the users feet to the desired location * @param location The location where the user's feet should be placed */ teleportCamera(e) { if (!(this.currentVRCamera instanceof du)) return; this._workingVector.copyFrom(e), this.isInVRMode || (this._workingVector.y += this._defaultHeight), this.onBeforeCameraTeleport.notifyObservers(this._workingVector); const t = 90; let i, r; if (this._teleportationMode == _P.TELEPORTATIONMODE_CONSTANTSPEED) { r = t; const d = D.Distance(this.currentVRCamera.position, this._workingVector); i = this._teleportationSpeed / d; } else r = Math.round(this._teleportationTime * t / 1e3), i = 1; this.currentVRCamera.animations = []; const s = new nt("animationCameraTeleportation", "position", t, nt.ANIMATIONTYPE_VECTOR3, nt.ANIMATIONLOOPMODE_CONSTANT), n = [ { frame: 0, value: this.currentVRCamera.position }, { frame: r, value: this._workingVector } ]; s.setKeys(n), s.setEasingFunction(this._teleportationEasing), this.currentVRCamera.animations.push(s), this._postProcessMove.animations = []; const a = Math.round(r / 2), l = new nt("animationPP", "vignetteWeight", t, nt.ANIMATIONTYPE_FLOAT, nt.ANIMATIONLOOPMODE_CONSTANT), o = []; o.push({ frame: 0, value: 0 }), o.push({ frame: a, value: 8 }), o.push({ frame: r, value: 0 }), l.setKeys(o), this._postProcessMove.animations.push(l); const u = new nt("animationPP2", "vignetteStretch", t, nt.ANIMATIONTYPE_FLOAT, nt.ANIMATIONLOOPMODE_CONSTANT), h = []; h.push({ frame: 0, value: 0 }), h.push({ frame: a, value: 10 }), h.push({ frame: r, value: 0 }), u.setKeys(h), this._postProcessMove.animations.push(u), this._postProcessMove.imageProcessingConfiguration.vignetteWeight = 0, this._postProcessMove.imageProcessingConfiguration.vignetteStretch = 0, this._scene.beginAnimation(this.currentVRCamera, 0, r, !1, i, () => { this.onAfterCameraTeleport.notifyObservers(this._workingVector); }), this._hideTeleportationTarget(); } /** * Permanently set new colors for the laser pointer * @param color the new laser color * @param pickedColor the new laser color when picked mesh detected */ setLaserColor(e, t = this._pickedLaserColor) { this._pickedLaserColor = t; } /** * Set lighting enabled / disabled on the laser pointer of both controllers * @param enabled should the lighting be enabled on the laser pointer */ setLaserLightingState(e = !0) { } /** * Permanently set new colors for the gaze pointer * @param color the new gaze color * @param pickedColor the new gaze color when picked mesh detected */ setGazeColor(e, t = this._pickedGazeColor) { this._pickedGazeColor = t; } /** * Sets the color of the laser ray from the vr controllers. * @param color new color for the ray. */ changeLaserColor(e) { this.updateControllerLaserColor; } /** * Sets the color of the ray from the vr headsets gaze. * @param color new color for the ray. */ changeGazeColor(e) { this.updateGazeTrackerColor && this._cameraGazer._gazeTracker.material && (this._cameraGazer._gazeTracker.material.emissiveColor = e); } /** * Exits VR and disposes of the vr experience helper */ dispose() { this.isInVRMode && this.exitVR(), this._postProcessMove && this._postProcessMove.dispose(), this._vrDeviceOrientationCamera && this._vrDeviceOrientationCamera.dispose(), !this._useCustomVRButton && this._btnVR && this._btnVR.parentNode && document.body.removeChild(this._btnVR), this._deviceOrientationCamera && this._scene.activeCamera != this._deviceOrientationCamera && this._deviceOrientationCamera.dispose(), this._cameraGazer && this._cameraGazer.dispose(), this._teleportationTarget && this._teleportationTarget.dispose(), this.xr && this.xr.dispose(), this._floorMeshesCollection.length = 0, document.removeEventListener("keydown", this._onKeyDown), window.removeEventListener("vrdisplaypresentchange", this._onVrDisplayPresentChangeBind), window.removeEventListener("resize", this._onResize), document.removeEventListener("fullscreenchange", this._onFullscreenChange), this._scene.gamepadManager.onGamepadConnectedObservable.removeCallback(this._onNewGamepadConnected), this._scene.unregisterBeforeRender(this._beforeRender); } /** * Gets the name of the VRExperienceHelper class * @returns "VRExperienceHelper" */ getClassName() { return "VRExperienceHelper"; } } _P.TELEPORTATIONMODE_CONSTANTTIME = 0; _P.TELEPORTATIONMODE_CONSTANTSPEED = 1; const Ode = (c, e, t, i) => !(c.x > t.x + i || t.x - i > e.x || c.y > t.y + i || t.y - i > e.y || c.z > t.z + i || t.z - i > e.z), zD = /* @__PURE__ */ function() { const c = { root: 0, found: !1 }; return function(e, t, i, r) { c.root = 0, c.found = !1; const s = t * t - 4 * e * i; if (s < 0) return c; const n = Math.sqrt(s); let a = (-t - n) / (2 * e), l = (-t + n) / (2 * e); if (a > l) { const o = l; l = a, a = o; } return a > 0 && a < r ? (c.root = a, c.found = !0, c) : (l > 0 && l < r && (c.root = l, c.found = !0), c); }; }(); class rN { constructor() { this._collisionPoint = D.Zero(), this._planeIntersectionPoint = D.Zero(), this._tempVector = D.Zero(), this._tempVector2 = D.Zero(), this._tempVector3 = D.Zero(), this._tempVector4 = D.Zero(), this._edge = D.Zero(), this._baseToVertex = D.Zero(), this._destinationPoint = D.Zero(), this._slidePlaneNormal = D.Zero(), this._displacementVector = D.Zero(), this._radius = D.One(), this._retry = 0, this._basePointWorld = D.Zero(), this._velocityWorld = D.Zero(), this._normalizedVelocity = D.Zero(), this._collisionMask = -1; } get collisionMask() { return this._collisionMask; } set collisionMask(e) { this._collisionMask = isNaN(e) ? -1 : e; } /** * Gets the plane normal used to compute the sliding response (in local space) */ get slidePlaneNormal() { return this._slidePlaneNormal; } // Methods /** * @internal */ _initialize(e, t, i) { this._velocity = t, this._velocitySquaredLength = this._velocity.lengthSquared(); const r = Math.sqrt(this._velocitySquaredLength); r === 0 || r === 1 ? this._normalizedVelocity.copyFromFloats(t._x, t._y, t._z) : t.scaleToRef(1 / r, this._normalizedVelocity), this._basePoint = e, e.multiplyToRef(this._radius, this._basePointWorld), t.multiplyToRef(this._radius, this._velocityWorld), this._velocityWorldLength = this._velocityWorld.length(), this._epsilon = i, this.collisionFound = !1; } /** * @internal */ _checkPointInTriangle(e, t, i, r, s) { t.subtractToRef(e, this._tempVector), i.subtractToRef(e, this._tempVector2), D.CrossToRef(this._tempVector, this._tempVector2, this._tempVector4); let n = D.Dot(this._tempVector4, s); return n < 0 || (r.subtractToRef(e, this._tempVector3), D.CrossToRef(this._tempVector2, this._tempVector3, this._tempVector4), n = D.Dot(this._tempVector4, s), n < 0) ? !1 : (D.CrossToRef(this._tempVector3, this._tempVector, this._tempVector4), n = D.Dot(this._tempVector4, s), n >= 0); } /** * @internal */ _canDoCollision(e, t, i, r) { const s = D.Distance(this._basePointWorld, e), n = Math.max(this._radius.x, this._radius.y, this._radius.z); return !(s > this._velocityWorldLength + n + t || !Ode(i, r, this._basePointWorld, this._velocityWorldLength + n)); } /** * @internal */ _testTriangle(e, t, i, r, s, n, a) { let l, o = !1; t || (t = []), t[e] || (t[e] = new Sd(0, 0, 0, 0), t[e].copyFromPoints(i, r, s)); const u = t[e]; if (!n && !u.isFrontFacingTo(this._normalizedVelocity, 0)) return; const h = u.signedDistanceTo(this._basePoint), d = D.Dot(u.normal, this._velocity); if (rN.DoubleSidedCheck && d > 1e-4) return; if (d == 0) { if (Math.abs(h) >= 1) return; o = !0, l = 0; } else { l = (-1 - h) / d; let m = (1 - h) / d; if (l > m) { const _ = m; m = l, l = _; } if (l > 1 || m < 0) return; l < 0 && (l = 0), l > 1 && (l = 1); } this._collisionPoint.copyFromFloats(0, 0, 0); let f = !1, p = 1; if (o || (this._basePoint.subtractToRef(u.normal, this._planeIntersectionPoint), this._velocity.scaleToRef(l, this._tempVector), this._planeIntersectionPoint.addInPlace(this._tempVector), this._checkPointInTriangle(this._planeIntersectionPoint, i, r, s, u.normal) && (f = !0, p = l, this._collisionPoint.copyFrom(this._planeIntersectionPoint))), !f) { let m = this._velocitySquaredLength; this._basePoint.subtractToRef(i, this._tempVector); let _ = 2 * D.Dot(this._velocity, this._tempVector), v = this._tempVector.lengthSquared() - 1, C = zD(m, _, v, p); C.found && (p = C.root, f = !0, this._collisionPoint.copyFrom(i)), this._basePoint.subtractToRef(r, this._tempVector), _ = 2 * D.Dot(this._velocity, this._tempVector), v = this._tempVector.lengthSquared() - 1, C = zD(m, _, v, p), C.found && (p = C.root, f = !0, this._collisionPoint.copyFrom(r)), this._basePoint.subtractToRef(s, this._tempVector), _ = 2 * D.Dot(this._velocity, this._tempVector), v = this._tempVector.lengthSquared() - 1, C = zD(m, _, v, p), C.found && (p = C.root, f = !0, this._collisionPoint.copyFrom(s)), r.subtractToRef(i, this._edge), i.subtractToRef(this._basePoint, this._baseToVertex); let x = this._edge.lengthSquared(), b = D.Dot(this._edge, this._velocity), S = D.Dot(this._edge, this._baseToVertex); if (m = x * -this._velocitySquaredLength + b * b, _ = 2 * (x * D.Dot(this._velocity, this._baseToVertex) - b * S), v = x * (1 - this._baseToVertex.lengthSquared()) + S * S, C = zD(m, _, v, p), C.found) { const M = (b * C.root - S) / x; M >= 0 && M <= 1 && (p = C.root, f = !0, this._edge.scaleInPlace(M), i.addToRef(this._edge, this._collisionPoint)); } if (s.subtractToRef(r, this._edge), r.subtractToRef(this._basePoint, this._baseToVertex), x = this._edge.lengthSquared(), b = D.Dot(this._edge, this._velocity), S = D.Dot(this._edge, this._baseToVertex), m = x * -this._velocitySquaredLength + b * b, _ = 2 * (x * D.Dot(this._velocity, this._baseToVertex) - b * S), v = x * (1 - this._baseToVertex.lengthSquared()) + S * S, C = zD(m, _, v, p), C.found) { const M = (b * C.root - S) / x; M >= 0 && M <= 1 && (p = C.root, f = !0, this._edge.scaleInPlace(M), r.addToRef(this._edge, this._collisionPoint)); } if (i.subtractToRef(s, this._edge), s.subtractToRef(this._basePoint, this._baseToVertex), x = this._edge.lengthSquared(), b = D.Dot(this._edge, this._velocity), S = D.Dot(this._edge, this._baseToVertex), m = x * -this._velocitySquaredLength + b * b, _ = 2 * (x * D.Dot(this._velocity, this._baseToVertex) - b * S), v = x * (1 - this._baseToVertex.lengthSquared()) + S * S, C = zD(m, _, v, p), C.found) { const M = (b * C.root - S) / x; M >= 0 && M <= 1 && (p = C.root, f = !0, this._edge.scaleInPlace(M), s.addToRef(this._edge, this._collisionPoint)); } } if (f) { const m = p * p * this._velocitySquaredLength; (!this.collisionFound || m < this._nearestDistanceSquared) && (a.collisionResponse && (this.intersectionPoint ? this.intersectionPoint.copyFrom(this._collisionPoint) : this.intersectionPoint = this._collisionPoint.clone(), this._nearestDistanceSquared = m, this._nearestDistance = Math.sqrt(m), this.collisionFound = !0), this.collidedMesh = a); } } /** * @internal */ _collide(e, t, i, r, s, n, a, l, o, u = !1) { if (u) if (!i || i.length === 0) for (let h = 0; h < t.length - 2; h += 1) { const d = t[h], f = t[h + 1], p = t[h + 2]; !d || !f || !p || ((o ? 1 : 0) ^ h % 2 ? this._testTriangle(h, e, d, f, p, a, l) : this._testTriangle(h, e, f, d, p, a, l)); } else for (let h = r; h < s - 2; h += 1) { const d = i[h], f = i[h + 1], p = i[h + 2]; if (p === 4294967295) { h += 2; continue; } const m = t[d], _ = t[f], v = t[p]; !m || !_ || !v || ((o ? 1 : 0) ^ h % 2 ? this._testTriangle(h, e, m, _, v, a, l) : this._testTriangle(h, e, _, m, v, a, l)); } else if (!i || i.length === 0) for (let h = 0; h < t.length; h += 3) { const d = t[h], f = t[h + 1], p = t[h + 2]; o ? this._testTriangle(h, e, d, f, p, a, l) : this._testTriangle(h, e, p, f, d, a, l); } else for (let h = r; h < s; h += 3) { const d = t[i[h] - n], f = t[i[h + 1] - n], p = t[i[h + 2] - n]; o ? this._testTriangle(h, e, d, f, p, a, l) : this._testTriangle(h, e, p, f, d, a, l); } } /** * @internal */ _getResponse(e, t) { e.addToRef(t, this._destinationPoint), t.scaleInPlace(this._nearestDistance / t.length()), this._basePoint.addToRef(t, e), e.subtractToRef(this.intersectionPoint, this._slidePlaneNormal), this._slidePlaneNormal.normalize(), this._slidePlaneNormal.scaleToRef(this._epsilon, this._displacementVector), e.addInPlace(this._displacementVector), this.intersectionPoint.addInPlace(this._displacementVector), this._slidePlaneNormal.scaleInPlace(Sd.SignedDistanceToPlaneFromPositionAndNormal(this.intersectionPoint, this._slidePlaneNormal, this._destinationPoint)), this._destinationPoint.subtractInPlace(this._slidePlaneNormal), this._destinationPoint.subtractToRef(this.intersectionPoint, t); } } rN.DoubleSidedCheck = !1; class yie { constructor() { this._scaledPosition = D.Zero(), this._scaledVelocity = D.Zero(), this._finalPosition = D.Zero(); } getNewPosition(e, t, i, r, s, n, a) { e.divideToRef(i._radius, this._scaledPosition), t.divideToRef(i._radius, this._scaledVelocity), i.collidedMesh = null, i._retry = 0, i._initialVelocity = this._scaledVelocity, i._initialPosition = this._scaledPosition, this._collideWithWorld(this._scaledPosition, this._scaledVelocity, i, r, this._finalPosition, s), this._finalPosition.multiplyInPlace(i._radius), n(a, this._finalPosition, i.collidedMesh); } createCollider() { return new rN(); } init(e) { this._scene = e; } _collideWithWorld(e, t, i, r, s, n = null) { const a = $e.CollisionsEpsilon * 10; if (i._retry >= r) { s.copyFrom(e); return; } const l = n ? n.collisionMask : i.collisionMask; i._initialize(e, t, a); const o = n && n.surroundingMeshes || this._scene.meshes; for (let u = 0; u < o.length; u++) { const h = o[u]; h.isEnabled() && h.checkCollisions && h.subMeshes && h !== n && l & h.collisionGroup && h._checkCollision(i); } if (!i.collisionFound) { e.addToRef(t, s); return; } if ((t.x !== 0 || t.y !== 0 || t.z !== 0) && i._getResponse(e, t), t.length() <= a) { s.copyFrom(e); return; } i._retry++, this._collideWithWorld(e, t, i, r, s, n); } } ii.CollisionCoordinatorFactory = () => new yie(); class mP { /** * Creates a compute effect that can be used to execute a compute shader * @param baseName Name of the effect * @param options Set of all options to create the effect * @param engine The engine the effect is created for * @param key Effect Key identifying uniquely compiled shader variants */ constructor(e, t, i, r = "") { var s, n; this.name = null, this.defines = "", this.onCompiled = null, this.onError = null, this.uniqueId = 0, this.onCompileObservable = new Fe(), this.onErrorObservable = new Fe(), this.onBindObservable = new Fe(), this._wasPreviouslyReady = !1, this._isReady = !1, this._compilationError = "", this._key = "", this._computeSourceCodeOverride = "", this._pipelineContext = null, this._computeSourceCode = "", this._rawComputeSourceCode = "", this._shaderLanguage = Xa.WGSL, this.name = e, this._key = r, this._engine = i, this.uniqueId = mP._UniqueIdSeed++, this.defines = (s = t.defines) !== null && s !== void 0 ? s : "", this.onError = t.onError, this.onCompiled = t.onCompiled, this._entryPoint = (n = t.entryPoint) !== null && n !== void 0 ? n : "main", this._shaderStore = je.GetShadersStore(this._shaderLanguage), this._shaderRepository = je.GetShadersRepository(this._shaderLanguage), this._includeShaderStore = je.GetIncludesShadersStore(this._shaderLanguage); let a; const l = cu() ? this._engine.getHostDocument() : null; e.computeSource ? a = "source:" + e.computeSource : e.computeElement ? (a = l ? l.getElementById(e.computeElement) : null, a || (a = e.computeElement)) : a = e.compute || e; const o = { defines: this.defines.split(` `), indexParameters: void 0, isFragment: !1, shouldUseHighPrecisionShader: !1, processor: null, supportsUniformBuffers: this._engine.supportsUniformBuffers, shadersRepository: this._shaderRepository, includesShadersStore: this._includeShaderStore, version: (this._engine.version * 100).toString(), platformName: this._engine.shaderPlatformName, processingContext: null, isNDCHalfZRange: this._engine.isNDCHalfZRange, useReverseDepthBuffer: this._engine.useReverseDepthBuffer }; this._loadShader(a, "Compute", "", (u) => { pg.Initialize(o), pg.PreProcess(u, o, (h) => { this._rawComputeSourceCode = u, t.processFinalCode && (h = t.processFinalCode(h)); const d = pg.Finalize(h, "", o); this._useFinalCode(d.vertexCode, e); }, this._engine); }); } _useFinalCode(e, t) { if (t) { const i = t.computeElement || t.compute || t.spectorName || t; this._computeSourceCode = "//#define SHADER_NAME compute:" + i + ` ` + e; } else this._computeSourceCode = e; this._prepareEffect(); } /** * Unique key for this effect */ get key() { return this._key; } /** * If the effect has been compiled and prepared. * @returns if the effect is compiled and prepared. */ isReady() { try { return this._isReadyInternal(); } catch { return !1; } } _isReadyInternal() { return this._isReady ? !0 : this._pipelineContext ? this._pipelineContext.isReady : !1; } /** * The engine the effect was initialized with. * @returns the engine. */ getEngine() { return this._engine; } /** * The pipeline context for this effect * @returns the associated pipeline context */ getPipelineContext() { return this._pipelineContext; } /** * The error from the last compilation. * @returns the error string. */ getCompilationError() { return this._compilationError; } /** * Adds a callback to the onCompiled observable and call the callback immediately if already ready. * @param func The callback to be used. */ executeWhenCompiled(e) { if (this.isReady()) { e(this); return; } this.onCompileObservable.add((t) => { e(t); }), (!this._pipelineContext || this._pipelineContext.isAsync) && setTimeout(() => { this._checkIsReady(null); }, 16); } _checkIsReady(e) { try { if (this._isReadyInternal()) return; } catch (t) { this._processCompilationErrors(t, e); return; } setTimeout(() => { this._checkIsReady(e); }, 16); } _loadShader(e, t, i, r) { if (typeof HTMLElement < "u" && e instanceof HTMLElement) { const n = IL(e); r(n); return; } if (e.substr(0, 7) === "source:") { r(e.substr(7)); return; } if (e.substr(0, 7) === "base64:") { const n = window.atob(e.substr(7)); r(n); return; } if (this._shaderStore[e + t + "Shader"]) { r(this._shaderStore[e + t + "Shader"]); return; } if (i && this._shaderStore[e + i + "Shader"]) { r(this._shaderStore[e + i + "Shader"]); return; } let s; e[0] === "." || e[0] === "/" || e.indexOf("http") > -1 ? s = e : s = this._shaderRepository + e, this._engine._loadFile(s + "." + t.toLowerCase() + ".fx", r); } /** * Gets the compute shader source code of this effect */ get computeSourceCode() { var e, t; return this._computeSourceCodeOverride ? this._computeSourceCodeOverride : (t = (e = this._pipelineContext) === null || e === void 0 ? void 0 : e._getComputeShaderCode()) !== null && t !== void 0 ? t : this._computeSourceCode; } /** * Gets the compute shader source code before it has been processed by the preprocessor */ get rawComputeSourceCode() { return this._rawComputeSourceCode; } /** * Prepares the effect * @internal */ _prepareEffect() { const e = this.defines, t = this._pipelineContext; this._isReady = !1; try { const i = this._engine; this._pipelineContext = i.createComputePipelineContext(), this._pipelineContext._name = this._key, i._prepareComputePipelineContext(this._pipelineContext, this._computeSourceCodeOverride ? this._computeSourceCodeOverride : this._computeSourceCode, this._rawComputeSourceCode, this._computeSourceCodeOverride ? null : e, this._entryPoint), i._executeWhenComputeStateIsCompiled(this._pipelineContext, () => { this._compilationError = "", this._isReady = !0, this.onCompiled && this.onCompiled(this), this.onCompileObservable.notifyObservers(this), this.onCompileObservable.clear(), t && this.getEngine()._deleteComputePipelineContext(t); }), this._pipelineContext.isAsync && this._checkIsReady(t); } catch (i) { this._processCompilationErrors(i, t); } } _getShaderCodeAndErrorLine(e, t) { const i = /COMPUTE SHADER ERROR: 0:(\d+?):/; let r = null; if (t && e) { const s = t.match(i); if (s && s.length === 2) { const n = parseInt(s[1]), a = e.split(` `, -1); a.length >= n && (r = `Offending line [${n}] in compute code: ${a[n - 1]}`); } } return [e, r]; } _processCompilationErrors(e, t = null) { var i; if (this._compilationError = e.message, Ce.Error("Unable to compile compute effect:"), Ce.Error(`Defines: ` + this.defines), mP.LogShaderCodeOnCompilationError) { let r = null, s = null; !((i = this._pipelineContext) === null || i === void 0) && i._getComputeShaderCode() && ([s, r] = this._getShaderCodeAndErrorLine(this._pipelineContext._getComputeShaderCode(), this._compilationError), s && (Ce.Error("Compute code:"), Ce.Error(s))), r && Ce.Error(r); } Ce.Error("Error: " + this._compilationError), t && (this._pipelineContext = t, this._isReady = !0, this.onError && this.onError(this, this._compilationError), this.onErrorObservable.notifyObservers(this)); } /** * Release all associated resources. **/ dispose() { this._pipelineContext && this._pipelineContext.dispose(), this._engine._releaseComputeEffect(this); } /** * This function will add a new compute shader to the shader store * @param name the name of the shader * @param computeShader compute shader content */ static RegisterShader(e, t) { je.GetShadersStore(Xa.WGSL)[`${e}ComputeShader`] = t; } } mP._UniqueIdSeed = 0; mP.LogShaderCodeOnCompilationError = !0; var ro; (function(c) { c[c.Texture = 0] = "Texture", c[c.StorageTexture = 1] = "StorageTexture", c[c.UniformBuffer = 2] = "UniformBuffer", c[c.StorageBuffer = 3] = "StorageBuffer", c[c.TextureWithoutSampler = 4] = "TextureWithoutSampler", c[c.Sampler = 5] = "Sampler", c[c.ExternalTexture = 6] = "ExternalTexture"; })(ro || (ro = {})); mi.prototype.createComputeEffect = function(c, e) { throw new Error("createComputeEffect: This engine does not support compute shaders!"); }; mi.prototype.createComputePipelineContext = function() { throw new Error("createComputePipelineContext: This engine does not support compute shaders!"); }; mi.prototype.createComputeContext = function() { }; mi.prototype.computeDispatch = function(c, e, t, i, r, s, n) { throw new Error("computeDispatch: This engine does not support compute shaders!"); }; mi.prototype.areAllComputeEffectsReady = function() { return !0; }; mi.prototype.releaseComputeEffects = function() { }; mi.prototype._prepareComputePipelineContext = function(c, e, t, i, r) { }; mi.prototype._rebuildComputeEffects = function() { }; mi.prototype._executeWhenComputeStateIsCompiled = function(c, e) { e(); }; mi.prototype._releaseComputeEffect = function(c) { }; mi.prototype._deleteComputePipelineContext = function(c) { }; class VK { constructor() { this._gpuTimeInFrameId = -1, this.counter = new Vc(); } /** * @internal */ _addDuration(e, t) { e < this._gpuTimeInFrameId || (this._gpuTimeInFrameId !== e ? (this.counter._fetchResult(), this.counter.fetchNewFrame(), this.counter.addCount(t, !1), this._gpuTimeInFrameId = e) : this.counter.addCount(t, !1)); } } class HI { /** * The options used to create the shader */ get options() { return this._options; } /** * The shaderPath used to create the shader */ get shaderPath() { return this._shaderPath; } /** * Instantiates a new compute shader. * @param name Defines the name of the compute shader in the scene * @param engine Defines the engine the compute shader belongs to * @param shaderPath Defines the route to the shader code in one of three ways: * * object: \{ compute: "custom" \}, used with ShaderStore.ShadersStoreWGSL["customComputeShader"] * * object: \{ computeElement: "HTMLElementId" \}, used with shader code in script tags * * object: \{ computeSource: "compute shader code string" \}, where the string contains the shader code * * string: try first to find the code in ShaderStore.ShadersStoreWGSL[shaderPath + "ComputeShader"]. If not, assumes it is a file with name shaderPath.compute.fx in index.html folder. * @param options Define the options used to create the shader */ constructor(e, t, i, r = {}) { if (this._bindings = {}, this._samplers = {}, this._contextIsDirty = !1, this.fastMode = !1, this.onCompiled = null, this.onError = null, this.name = e, this._engine = t, this.uniqueId = LL.UniqueId, t.enableGPUTimingMeasurements && (this.gpuTimeInFrame = new VK()), !this._engine.getCaps().supportComputeShaders) { Ce.Error("This engine does not support compute shaders!"); return; } if (!r.bindingsMapping) { Ce.Error("You must provide the binding mappings as browsers don't support reflection for wgsl shaders yet!"); return; } this._context = t.createComputeContext(), this._shaderPath = i, this._options = Object.assign({ bindingsMapping: {}, defines: [] }, r); } /** * Gets the current class name of the material e.g. "ComputeShader" * Mainly use in serialization. * @returns the class name */ getClassName() { return "ComputeShader"; } /** * Binds a texture to the shader * @param name Binding name of the texture * @param texture Texture to bind * @param bindSampler Bind the sampler corresponding to the texture (default: true). The sampler will be bound just before the binding index of the texture */ setTexture(e, t, i = !0) { const r = this._bindings[e]; this._bindings[e] = { type: i ? ro.Texture : ro.TextureWithoutSampler, object: t, indexInGroupEntries: r == null ? void 0 : r.indexInGroupEntries }, this._contextIsDirty || (this._contextIsDirty = !r || r.object !== t || r.type !== this._bindings[e].type); } /** * Binds a storage texture to the shader * @param name Binding name of the texture * @param texture Texture to bind */ setStorageTexture(e, t) { const i = this._bindings[e]; this._contextIsDirty || (this._contextIsDirty = !i || i.object !== t), this._bindings[e] = { type: ro.StorageTexture, object: t, indexInGroupEntries: i == null ? void 0 : i.indexInGroupEntries }; } /** * Binds an external texture to the shader * @param name Binding name of the texture * @param texture Texture to bind */ setExternalTexture(e, t) { const i = this._bindings[e]; this._contextIsDirty || (this._contextIsDirty = !i || i.object !== t), this._bindings[e] = { type: ro.ExternalTexture, object: t, indexInGroupEntries: i == null ? void 0 : i.indexInGroupEntries }; } /** * Binds a video texture to the shader (by binding the external texture attached to this video) * @param name Binding name of the texture * @param texture Texture to bind * @returns true if the video texture was successfully bound, else false. false will be returned if the current engine does not support external textures */ setVideoTexture(e, t) { return t.externalTexture ? (this.setExternalTexture(e, t.externalTexture), !0) : !1; } /** * Binds a uniform buffer to the shader * @param name Binding name of the buffer * @param buffer Buffer to bind */ setUniformBuffer(e, t) { const i = this._bindings[e]; this._contextIsDirty || (this._contextIsDirty = !i || i.object !== t), this._bindings[e] = { type: ro.UniformBuffer, object: t, indexInGroupEntries: i == null ? void 0 : i.indexInGroupEntries }; } /** * Binds a storage buffer to the shader * @param name Binding name of the buffer * @param buffer Buffer to bind */ setStorageBuffer(e, t) { const i = this._bindings[e]; this._contextIsDirty || (this._contextIsDirty = !i || i.object !== t), this._bindings[e] = { type: ro.StorageBuffer, object: t, indexInGroupEntries: i == null ? void 0 : i.indexInGroupEntries }; } /** * Binds a texture sampler to the shader * @param name Binding name of the sampler * @param sampler Sampler to bind */ setTextureSampler(e, t) { const i = this._bindings[e]; this._contextIsDirty || (this._contextIsDirty = !i || !t.compareSampler(i.object)), this._bindings[e] = { type: ro.Sampler, object: t, indexInGroupEntries: i == null ? void 0 : i.indexInGroupEntries }; } /** * Specifies that the compute shader is ready to be executed (the compute effect and all the resources are ready) * @returns true if the compute shader is ready to be executed */ isReady() { let e = this._effect; for (const s in this._bindings) { const n = this._bindings[s], a = n.type, l = n.object; switch (a) { case ro.Texture: case ro.TextureWithoutSampler: case ro.StorageTexture: { if (!l.isReady()) return !1; break; } case ro.ExternalTexture: { if (!l.isReady()) return !1; break; } } } const t = [], i = this._shaderPath; if (this._options.defines) for (let s = 0; s < this._options.defines.length; s++) t.push(this._options.defines[s]); const r = t.join(` `); return this._cachedDefines !== r && (this._cachedDefines = r, e = this._engine.createComputeEffect(i, { defines: r, entryPoint: this._options.entryPoint, onCompiled: this.onCompiled, onError: this.onError }), this._effect = e), !!e.isReady(); } /** * Dispatches (executes) the compute shader * @param x Number of workgroups to execute on the X dimension * @param y Number of workgroups to execute on the Y dimension (default: 1) * @param z Number of workgroups to execute on the Z dimension (default: 1) * @returns True if the dispatch could be done, else false (meaning either the compute effect or at least one of the bound resources was not ready) */ dispatch(e, t, i) { var r; if (!this.fastMode) { if (!this.isReady()) return !1; for (const s in this._bindings) { const n = this._bindings[s]; if (!this._options.bindingsMapping[s]) throw new Error("ComputeShader ('" + this.name + "'): No binding mapping has been provided for the property '" + s + "'"); switch (n.type) { case ro.Texture: { const a = this._samplers[s], l = n.object; (!a || !l._texture || !a.compareSampler(l._texture)) && (this._samplers[s] = new nK().setParameters(l.wrapU, l.wrapV, l.wrapR, l.anisotropicFilteringLevel, l._texture.samplingMode, (r = l._texture) === null || r === void 0 ? void 0 : r._comparisonFunction), this._contextIsDirty = !0); break; } case ro.ExternalTexture: { this._contextIsDirty = !0; break; } case ro.UniformBuffer: { const a = n.object; a.getBuffer() !== n.buffer && (n.buffer = a.getBuffer(), this._contextIsDirty = !0); break; } } } this._contextIsDirty && (this._contextIsDirty = !1, this._context.clear()); } return this._engine.computeDispatch(this._effect, this._context, this._bindings, e, t, i, this._options.bindingsMapping, this.gpuTimeInFrame), !0; } /** * Waits for the compute shader to be ready and executes it * @param x Number of workgroups to execute on the X dimension * @param y Number of workgroups to execute on the Y dimension (default: 1) * @param z Number of workgroups to execute on the Z dimension (default: 1) * @param delay Delay between the retries while the shader is not ready (in milliseconds - 10 by default) * @returns A promise that is resolved once the shader has been sent to the GPU. Note that it does not mean that the shader execution itself is finished! */ dispatchWhenReady(e, t, i, r = 10) { return new Promise((s) => { const n = () => { this.dispatch(e, t, i) ? s() : setTimeout(n, r); }; n(); }); } /** * Serializes this compute shader in a JSON representation * @returns the serialized compute shader object */ serialize() { const e = St.Serialize(this); e.options = this._options, e.shaderPath = this._shaderPath, e.bindings = {}, e.textures = {}; for (const t in this._bindings) { const i = this._bindings[t], r = i.object; switch (i.type) { case ro.Texture: case ro.TextureWithoutSampler: case ro.StorageTexture: { const s = r.serialize(); s && (e.textures[t] = s, e.bindings[t] = { type: i.type }); break; } case ro.UniformBuffer: break; } } return e; } /** * Creates a compute shader from parsed compute shader data * @param source defines the JSON representation of the compute shader * @param scene defines the hosting scene * @param rootUrl defines the root URL to use to load textures and relative dependencies * @returns a new compute shader */ static Parse(e, t, i) { const r = St.Parse(() => new HI(e.name, t.getEngine(), e.shaderPath, e.options), e, t, i); for (const s in e.textures) { const n = e.bindings[s], a = De.Parse(e.textures[s], t, i); n.type === ro.Texture ? r.setTexture(s, a) : n.type === ro.TextureWithoutSampler ? r.setTexture(s, a, !1) : r.setStorageTexture(s, a); } return r; } } F([ W() ], HI.prototype, "name", void 0); F([ W() ], HI.prototype, "fastMode", void 0); Be("BABYLON.ComputeShader", HI); class iL { /** * Creates a new block * @param minPoint defines the minimum vector (in world space) of the block's bounding box * @param maxPoint defines the maximum vector (in world space) of the block's bounding box * @param capacity defines the maximum capacity of this block (if capacity is reached the block will be split into sub blocks) * @param depth defines the current depth of this block in the octree * @param maxDepth defines the maximal depth allowed (beyond this value, the capacity is ignored) * @param creationFunc defines a callback to call when an element is added to the block */ constructor(e, t, i, r, s, n) { this.entries = [], this._boundingVectors = new Array(), this._capacity = i, this._depth = r, this._maxDepth = s, this._creationFunc = n, this._minPoint = e, this._maxPoint = t, this._boundingVectors.push(e.clone()), this._boundingVectors.push(t.clone()), this._boundingVectors.push(e.clone()), this._boundingVectors[2].x = t.x, this._boundingVectors.push(e.clone()), this._boundingVectors[3].y = t.y, this._boundingVectors.push(e.clone()), this._boundingVectors[4].z = t.z, this._boundingVectors.push(t.clone()), this._boundingVectors[5].z = e.z, this._boundingVectors.push(t.clone()), this._boundingVectors[6].x = e.x, this._boundingVectors.push(t.clone()), this._boundingVectors[7].y = e.y; } // Property /** * Gets the maximum capacity of this block (if capacity is reached the block will be split into sub blocks) */ get capacity() { return this._capacity; } /** * Gets the minimum vector (in world space) of the block's bounding box */ get minPoint() { return this._minPoint; } /** * Gets the maximum vector (in world space) of the block's bounding box */ get maxPoint() { return this._maxPoint; } // Methods /** * Add a new element to this block * @param entry defines the element to add */ addEntry(e) { if (this.blocks) { for (let t = 0; t < this.blocks.length; t++) this.blocks[t].addEntry(e); return; } this._creationFunc(e, this), this.entries.length > this.capacity && this._depth < this._maxDepth && this.createInnerBlocks(); } /** * Remove an element from this block * @param entry defines the element to remove */ removeEntry(e) { if (this.blocks) { for (let i = 0; i < this.blocks.length; i++) this.blocks[i].removeEntry(e); return; } const t = this.entries.indexOf(e); t > -1 && this.entries.splice(t, 1); } /** * Add an array of elements to this block * @param entries defines the array of elements to add */ addEntries(e) { for (let t = 0; t < e.length; t++) { const i = e[t]; this.addEntry(i); } } /** * Test if the current block intersects the frustum planes and if yes, then add its content to the selection array * @param frustumPlanes defines the frustum planes to test * @param selection defines the array to store current content if selection is positive * @param allowDuplicate defines if the selection array can contains duplicated entries */ select(e, t, i) { if (fg.IsInFrustum(this._boundingVectors, e)) { if (this.blocks) { for (let r = 0; r < this.blocks.length; r++) this.blocks[r].select(e, t, i); return; } i ? t.concat(this.entries) : t.concatWithNoDuplicate(this.entries); } } /** * Test if the current block intersect with the given bounding sphere and if yes, then add its content to the selection array * @param sphereCenter defines the bounding sphere center * @param sphereRadius defines the bounding sphere radius * @param selection defines the array to store current content if selection is positive * @param allowDuplicate defines if the selection array can contains duplicated entries */ intersects(e, t, i, r) { if (fg.IntersectsSphere(this._minPoint, this._maxPoint, e, t)) { if (this.blocks) { for (let s = 0; s < this.blocks.length; s++) this.blocks[s].intersects(e, t, i, r); return; } r ? i.concat(this.entries) : i.concatWithNoDuplicate(this.entries); } } /** * Test if the current block intersect with the given ray and if yes, then add its content to the selection array * @param ray defines the ray to test with * @param selection defines the array to store current content if selection is positive */ intersectsRay(e, t) { if (e.intersectsBoxMinMax(this._minPoint, this._maxPoint)) { if (this.blocks) { for (let i = 0; i < this.blocks.length; i++) this.blocks[i].intersectsRay(e, t); return; } t.concatWithNoDuplicate(this.entries); } } /** * Subdivide the content into child blocks (this block will then be empty) */ createInnerBlocks() { iL._CreateBlocks(this._minPoint, this._maxPoint, this.entries, this._capacity, this._depth, this._maxDepth, this, this._creationFunc), this.entries.splice(0); } /** * @internal */ static _CreateBlocks(e, t, i, r, s, n, a, l) { a.blocks = new Array(); const o = new D((t.x - e.x) / 2, (t.y - e.y) / 2, (t.z - e.z) / 2); for (let u = 0; u < 2; u++) for (let h = 0; h < 2; h++) for (let d = 0; d < 2; d++) { const f = e.add(o.multiplyByFloats(u, h, d)), p = e.add(o.multiplyByFloats(u + 1, h + 1, d + 1)), m = new iL(f, p, r, s + 1, n, l); m.addEntries(i), a.blocks.push(m); } } } class gP { /** * Creates a octree * @see https://doc.babylonjs.com/features/featuresDeepDive/scene/optimizeOctrees * @param creationFunc function to be used to instantiate the octree * @param maxBlockCapacity defines the maximum number of meshes you want on your octree's leaves (default: 64) * @param maxDepth defines the maximum depth (sub-levels) for your octree. Default value is 2, which means 8 8 8 = 512 blocks :) (This parameter takes precedence over capacity.) */ constructor(e, t, i = 2) { this.maxDepth = i, this.dynamicContent = [], this._maxBlockCapacity = t || 64, this._selectionContent = new XE(1024), this._creationFunc = e; } // Methods /** * Updates the octree by adding blocks for the passed in meshes within the min and max world parameters * @param worldMin worldMin for the octree blocks var blockSize = new Vector3((worldMax.x - worldMin.x) / 2, (worldMax.y - worldMin.y) / 2, (worldMax.z - worldMin.z) / 2); * @param worldMax worldMax for the octree blocks var blockSize = new Vector3((worldMax.x - worldMin.x) / 2, (worldMax.y - worldMin.y) / 2, (worldMax.z - worldMin.z) / 2); * @param entries meshes to be added to the octree blocks */ update(e, t, i) { iL._CreateBlocks(e, t, i, this._maxBlockCapacity, 0, this.maxDepth, this, this._creationFunc); } /** * Adds a mesh to the octree * @param entry Mesh to add to the octree */ addMesh(e) { for (let t = 0; t < this.blocks.length; t++) this.blocks[t].addEntry(e); } /** * Remove an element from the octree * @param entry defines the element to remove */ removeMesh(e) { for (let t = 0; t < this.blocks.length; t++) this.blocks[t].removeEntry(e); } /** * Selects an array of meshes within the frustum * @param frustumPlanes The frustum planes to use which will select all meshes within it * @param allowDuplicate If duplicate objects are allowed in the resulting object array * @returns array of meshes within the frustum */ select(e, t) { this._selectionContent.reset(); for (let i = 0; i < this.blocks.length; i++) this.blocks[i].select(e, this._selectionContent, t); return t ? this._selectionContent.concat(this.dynamicContent) : this._selectionContent.concatWithNoDuplicate(this.dynamicContent), this._selectionContent; } /** * Test if the octree intersect with the given bounding sphere and if yes, then add its content to the selection array * @param sphereCenter defines the bounding sphere center * @param sphereRadius defines the bounding sphere radius * @param allowDuplicate defines if the selection array can contains duplicated entries * @returns an array of objects that intersect the sphere */ intersects(e, t, i) { this._selectionContent.reset(); for (let r = 0; r < this.blocks.length; r++) this.blocks[r].intersects(e, t, this._selectionContent, i); return i ? this._selectionContent.concat(this.dynamicContent) : this._selectionContent.concatWithNoDuplicate(this.dynamicContent), this._selectionContent; } /** * Test if the octree intersect with the given ray and if yes, then add its content to resulting array * @param ray defines the ray to test with * @returns array of intersected objects */ intersectsRay(e) { this._selectionContent.reset(); for (let t = 0; t < this.blocks.length; t++) this.blocks[t].intersectsRay(e, this._selectionContent); return this._selectionContent.concatWithNoDuplicate(this.dynamicContent), this._selectionContent; } } gP.CreationFuncForMeshes = (c, e) => { const t = c.getBoundingInfo(); !c.isBlocked && t.boundingBox.intersectsMinMax(e.minPoint, e.maxPoint) && e.entries.push(c); }; gP.CreationFuncForSubMeshes = (c, e) => { c.getBoundingInfo().boundingBox.intersectsMinMax(e.minPoint, e.maxPoint) && e.entries.push(c); }; ii.prototype.createOrUpdateSelectionOctree = function(c = 64, e = 2) { let t = this._getComponent(Bt.NAME_OCTREE); t || (t = new kK(this), this._addComponent(t)), this._selectionOctree || (this._selectionOctree = new gP(gP.CreationFuncForMeshes, c, e)); const i = this.getWorldExtends(); return this._selectionOctree.update(i.min, i.max, this.meshes), this._selectionOctree; }; Object.defineProperty(ii.prototype, "selectionOctree", { get: function() { return this._selectionOctree; }, enumerable: !0, configurable: !0 }); xr.prototype.createOrUpdateSubmeshesOctree = function(c = 64, e = 2) { const t = this.getScene(); let i = t._getComponent(Bt.NAME_OCTREE); i || (i = new kK(t), t._addComponent(i)), this._submeshesOctree || (this._submeshesOctree = new gP(gP.CreationFuncForSubMeshes, c, e)), this.computeWorldMatrix(!0); const s = this.getBoundingInfo().boundingBox; return this._submeshesOctree.update(s.minimumWorld, s.maximumWorld, this.subMeshes), this._submeshesOctree; }; class kK { /** * Creates a new instance of the component for the given scene * @param scene Defines the scene to register the component in */ constructor(e) { this.name = Bt.NAME_OCTREE, this.checksIsEnabled = !0, this._tempRay = new gs(D.Zero(), new D(1, 1, 1)), e = e || gi.LastCreatedScene, e && (this.scene = e, this.scene.getActiveMeshCandidates = () => this.getActiveMeshCandidates(), this.scene.getActiveSubMeshCandidates = (t) => this.getActiveSubMeshCandidates(t), this.scene.getCollidingSubMeshCandidates = (t, i) => this.getCollidingSubMeshCandidates(t, i), this.scene.getIntersectingSubMeshCandidates = (t, i) => this.getIntersectingSubMeshCandidates(t, i)); } /** * Registers the component in a given scene */ register() { this.scene.onMeshRemovedObservable.add((e) => { const t = this.scene.selectionOctree; if (t != null) { const i = t.dynamicContent.indexOf(e); i !== -1 && t.dynamicContent.splice(i, 1); } }), this.scene.onMeshImportedObservable.add((e) => { const t = this.scene.selectionOctree; t != null && t.addMesh(e); }); } /** * Return the list of active meshes * @returns the list of active meshes */ getActiveMeshCandidates() { var e; return ((e = this.scene._selectionOctree) === null || e === void 0 ? void 0 : e.select(this.scene.frustumPlanes)) || this.scene._getDefaultMeshCandidates(); } /** * Return the list of active sub meshes * @param mesh The mesh to get the candidates sub meshes from * @returns the list of active sub meshes */ getActiveSubMeshCandidates(e) { return e._submeshesOctree && e.useOctreeForRenderingSelection ? e._submeshesOctree.select(this.scene.frustumPlanes) : this.scene._getDefaultSubMeshCandidates(e); } /** * Return the list of sub meshes intersecting with a given local ray * @param mesh defines the mesh to find the submesh for * @param localRay defines the ray in local space * @returns the list of intersecting sub meshes */ getIntersectingSubMeshCandidates(e, t) { return e._submeshesOctree && e.useOctreeForPicking ? (gs.TransformToRef(t, e.getWorldMatrix(), this._tempRay), e._submeshesOctree.intersectsRay(this._tempRay)) : this.scene._getDefaultSubMeshCandidates(e); } /** * Return the list of sub meshes colliding with a collider * @param mesh defines the mesh to find the submesh for * @param collider defines the collider to evaluate the collision against * @returns the list of colliding sub meshes */ getCollidingSubMeshCandidates(e, t) { if (e._submeshesOctree && e.useOctreeForCollisions) { const i = t._velocityWorldLength + Math.max(t._radius.x, t._radius.y, t._radius.z); return e._submeshesOctree.intersects(t._basePointWorld, i); } return this.scene._getDefaultSubMeshCandidates(e); } /** * Rebuilds the elements related to this component in case of * context lost for instance. */ rebuild() { } /** * Disposes the component and the associated resources. */ dispose() { } } function lU(c) { const e = c.height || 2; let t = c.diameterTop === 0 ? 0 : c.diameterTop || c.diameter || 1, i = c.diameterBottom === 0 ? 0 : c.diameterBottom || c.diameter || 1; t = t || 1e-5, i = i || 1e-5; const r = (c.tessellation || 24) | 0, s = (c.subdivisions || 1) | 0, n = !!c.hasRings, a = !!c.enclose, l = c.cap === 0 ? 0 : c.cap || ke.CAP_ALL, o = c.arc && (c.arc <= 0 || c.arc > 1) ? 1 : c.arc || 1, u = c.sideOrientation === 0 ? 0 : c.sideOrientation || Ot.DEFAULTSIDE, h = c.faceUV || new Array(3), d = c.faceColors, f = o !== 1 && a ? 2 : 0, p = n ? s : 1, m = 2 + (1 + f) * p; let _; for (_ = 0; _ < m; _++) d && d[_] === void 0 && (d[_] = new Et(1, 1, 1, 1)); for (_ = 0; _ < m; _++) h && h[_] === void 0 && (h[_] = new Di(0, 0, 1, 1)); const v = [], C = [], x = [], b = [], S = [], M = Math.PI * 2 * o / r; let R, w, V; const k = (i - t) / 2 / e, L = D.Zero(), B = D.Zero(), U = D.Zero(), K = D.Zero(), ee = D.Zero(), Z = bl.Y; let q, le, ie, $ = 1, j = 1, J = 0, ne = 0; for (q = 0; q <= s; q++) for (w = q / s, V = (w * (t - i) + i) / 2, $ = n && q !== 0 && q !== s ? 2 : 1, ie = 0; ie < $; ie++) { for (n && (j += ie), a && (j += 2 * ie), le = 0; le <= r; le++) R = le * M, L.x = Math.cos(-R) * V, L.y = -e / 2 + w * e, L.z = Math.sin(-R) * V, t === 0 && q === s ? (B.x = x[x.length - (r + 1) * 3], B.y = x[x.length - (r + 1) * 3 + 1], B.z = x[x.length - (r + 1) * 3 + 2]) : (B.x = L.x, B.z = L.z, B.y = Math.sqrt(B.x * B.x + B.z * B.z) * k, B.normalize()), le === 0 && (U.copyFrom(L), K.copyFrom(B)), C.push(L.x, L.y, L.z), x.push(B.x, B.y, B.z), n ? ne = J !== j ? h[j].y : h[j].w : ne = h[j].y + (h[j].w - h[j].y) * w, b.push(h[j].x + (h[j].z - h[j].x) * le / r, hn.UseOpenGLOrientationForUV ? 1 - ne : ne), d && S.push(d[j].r, d[j].g, d[j].b, d[j].a); o !== 1 && a && (C.push(L.x, L.y, L.z), C.push(0, L.y, 0), C.push(0, L.y, 0), C.push(U.x, U.y, U.z), D.CrossToRef(Z, B, ee), ee.normalize(), x.push(ee.x, ee.y, ee.z, ee.x, ee.y, ee.z), D.CrossToRef(K, Z, ee), ee.normalize(), x.push(ee.x, ee.y, ee.z, ee.x, ee.y, ee.z), n ? ne = J !== j ? h[j + 1].y : h[j + 1].w : ne = h[j + 1].y + (h[j + 1].w - h[j + 1].y) * w, b.push(h[j + 1].x, hn.UseOpenGLOrientationForUV ? 1 - ne : ne), b.push(h[j + 1].z, hn.UseOpenGLOrientationForUV ? 1 - ne : ne), n ? ne = J !== j ? h[j + 2].y : h[j + 2].w : ne = h[j + 2].y + (h[j + 2].w - h[j + 2].y) * w, b.push(h[j + 2].x, hn.UseOpenGLOrientationForUV ? 1 - ne : ne), b.push(h[j + 2].z, hn.UseOpenGLOrientationForUV ? 1 - ne : ne), d && (S.push(d[j + 1].r, d[j + 1].g, d[j + 1].b, d[j + 1].a), S.push(d[j + 1].r, d[j + 1].g, d[j + 1].b, d[j + 1].a), S.push(d[j + 2].r, d[j + 2].g, d[j + 2].b, d[j + 2].a), S.push(d[j + 2].r, d[j + 2].g, d[j + 2].b, d[j + 2].a))), J !== j && (J = j); } const pe = o !== 1 && a ? r + 4 : r; for (q = 0, j = 0; j < s; j++) { let ye = 0, Se = 0, re = 0, te = 0; for (le = 0; le < r; le++) ye = q * (pe + 1) + le, Se = (q + 1) * (pe + 1) + le, re = q * (pe + 1) + (le + 1), te = (q + 1) * (pe + 1) + (le + 1), v.push(ye, Se, re), v.push(te, re, Se); o !== 1 && a && (v.push(ye + 2, Se + 2, re + 2), v.push(te + 2, re + 2, Se + 2), v.push(ye + 4, Se + 4, re + 4), v.push(te + 4, re + 4, Se + 4)), q = n ? q + 2 : q + 1; } const ge = (ye) => { const Se = ye ? t / 2 : i / 2; if (Se === 0) return; let re, te, he; const be = ye ? h[m - 1] : h[0]; let Ue = null; d && (Ue = ye ? d[m - 1] : d[0]); const Ee = C.length / 3, He = ye ? e / 2 : -e / 2, Xe = new D(0, He, 0); C.push(Xe.x, Xe.y, Xe.z), x.push(0, ye ? 1 : -1, 0); const rt = be.y + (be.w - be.y) * 0.5; b.push(be.x + (be.z - be.x) * 0.5, hn.UseOpenGLOrientationForUV ? 1 - rt : rt), Ue && S.push(Ue.r, Ue.g, Ue.b, Ue.a); const dt = new at(0.5, 0.5); for (he = 0; he <= r; he++) { re = Math.PI * 2 * he * o / r; const bt = Math.cos(-re), Mt = Math.sin(-re); te = new D(bt * Se, He, Mt * Se); const Ct = new at(bt * dt.x + 0.5, Mt * dt.y + 0.5); C.push(te.x, te.y, te.z), x.push(0, ye ? 1 : -1, 0); const di = be.y + (be.w - be.y) * Ct.y; b.push(be.x + (be.z - be.x) * Ct.x, hn.UseOpenGLOrientationForUV ? 1 - di : di), Ue && S.push(Ue.r, Ue.g, Ue.b, Ue.a); } for (he = 0; he < r; he++) ye ? (v.push(Ee), v.push(Ee + (he + 2)), v.push(Ee + (he + 1))) : (v.push(Ee), v.push(Ee + (he + 1)), v.push(Ee + (he + 2))); }; (l === ke.CAP_START || l === ke.CAP_ALL) && ge(!1), (l === ke.CAP_END || l === ke.CAP_ALL) && ge(!0), Ot._ComputeSides(u, C, v, x, b, c.frontUVs, c.backUVs); const Ie = new Ot(); return Ie.indices = v, Ie.positions = C, Ie.normals = x, Ie.uvs = b, d && (Ie.colors = S), Ie; } function Hf(c, e = {}, t) { const i = new ke(c, t); return e.sideOrientation = ke._GetDefaultSideOrientation(e.sideOrientation), i._originalBuilderSideOrientation = e.sideOrientation, lU(e).applyToMesh(i, e.updatable), i; } const wde = { // eslint-disable-next-line @typescript-eslint/naming-convention CreateCylinder: Hf }; Ot.CreateCylinder = lU; ke.CreateCylinder = (c, e, t, i, r, s, n, a, l) => ((n === void 0 || !(n instanceof ii)) && (n !== void 0 && (l = a || ke.DEFAULTSIDE, a = n), n = s, s = 1), Hf(c, { height: e, diameterTop: t, diameterBottom: i, tessellation: r, subdivisions: s, sideOrientation: l, updatable: a }, n)); In.AddNodeConstructor("Light_Type_3", (c, e) => () => new vg(c, D.Zero(), e)); class vg extends hs { /** * Creates a HemisphericLight object in the scene according to the passed direction (Vector3). * The HemisphericLight simulates the ambient environment light, so the passed direction is the light reflection direction, not the incoming direction. * The HemisphericLight can't cast shadows. * Documentation : https://doc.babylonjs.com/features/featuresDeepDive/lights/lights_introduction * @param name The friendly name of the light * @param direction The direction of the light reflection * @param scene The scene the light belongs to */ constructor(e, t, i) { super(e, i), this.groundColor = new ze(0, 0, 0), this.direction = t || D.Up(); } _buildUniformLayout() { this._uniformBuffer.addUniform("vLightData", 4), this._uniformBuffer.addUniform("vLightDiffuse", 4), this._uniformBuffer.addUniform("vLightSpecular", 4), this._uniformBuffer.addUniform("vLightGround", 3), this._uniformBuffer.addUniform("shadowsInfo", 3), this._uniformBuffer.addUniform("depthValues", 2), this._uniformBuffer.create(); } /** * Returns the string "HemisphericLight". * @returns The class name */ getClassName() { return "HemisphericLight"; } /** * Sets the HemisphericLight direction towards the passed target (Vector3). * Returns the updated direction. * @param target The target the direction should point to * @returns The computed direction */ setDirectionToTarget(e) { return this.direction = D.Normalize(e.subtract(D.Zero())), this.direction; } /** * Returns the shadow generator associated to the light. * @returns Always null for hemispheric lights because it does not support shadows. */ getShadowGenerator() { return null; } /** * Sets the passed Effect object with the HemisphericLight normalized direction and color and the passed name (string). * @param _effect The effect to update * @param lightIndex The index of the light in the effect to update * @returns The hemispheric light */ transferToEffect(e, t) { const i = D.Normalize(this.direction); return this._uniformBuffer.updateFloat4("vLightData", i.x, i.y, i.z, 0, t), this._uniformBuffer.updateColor3("vLightGround", this.groundColor.scale(this.intensity), t), this; } transferToNodeMaterialEffect(e, t) { const i = D.Normalize(this.direction); return e.setFloat3(t, i.x, i.y, i.z), this; } /** * Computes the world matrix of the node * @returns the world matrix */ computeWorldMatrix() { return this._worldMatrix || (this._worldMatrix = Ae.Identity()), this._worldMatrix; } /** * Returns the integer 3. * @returns The light Type id as a constant defines in Light.LIGHTTYPEID_x */ getTypeID() { return hs.LIGHTTYPEID_HEMISPHERICLIGHT; } /** * Prepares the list of defines specific to the light type. * @param defines the list of defines * @param lightIndex defines the index of the light for the effect */ prepareLightSpecificDefines(e, t) { e["HEMILIGHT" + t] = !0; } } F([ Fs() ], vg.prototype, "groundColor", void 0); F([ oo() ], vg.prototype, "direction", void 0); class bn { /** * Gets the camera that is used to render the utility layer (when not set, this will be the last active camera) * @param getRigParentIfPossible if the current active camera is a rig camera, should its parent camera be returned * @returns the camera that is used when rendering the utility layer */ getRenderCamera(e) { if (this._renderCamera) return this._renderCamera; { let t; return this.originalScene.activeCameras && this.originalScene.activeCameras.length > 1 ? t = this.originalScene.activeCameras[this.originalScene.activeCameras.length - 1] : t = this.originalScene.activeCamera, e && t && t.isRigCamera ? t.rigParent : t; } } /** * Sets the camera that should be used when rendering the utility layer (If set to null the last active camera will be used) * @param cam the camera that should be used when rendering the utility layer */ setRenderCamera(e) { this._renderCamera = e; } /** * @internal * Light which used by gizmos to get light shading */ _getSharedGizmoLight() { return this._sharedGizmoLight || (this._sharedGizmoLight = new vg("shared gizmo light", new D(0, 1, 0), this.utilityLayerScene), this._sharedGizmoLight.intensity = 2, this._sharedGizmoLight.groundColor = ze.Gray()), this._sharedGizmoLight; } /** * A shared utility layer that can be used to overlay objects into a scene (Depth map of the previous scene is cleared before drawing on top of it) */ static get DefaultUtilityLayer() { return bn._DefaultUtilityLayer == null ? bn._CreateDefaultUtilityLayerFromScene(gi.LastCreatedScene) : bn._DefaultUtilityLayer; } /** * Creates an utility layer, and set it as a default utility layer * @param scene associated scene * @internal */ static _CreateDefaultUtilityLayerFromScene(e) { return bn._DefaultUtilityLayer = new bn(e), bn._DefaultUtilityLayer.originalScene.onDisposeObservable.addOnce(() => { bn._DefaultUtilityLayer = null; }), bn._DefaultUtilityLayer; } /** * A shared utility layer that can be used to embed objects into a scene (Depth map of the previous scene is not cleared before drawing on top of it) */ static get DefaultKeepDepthUtilityLayer() { return bn._DefaultKeepDepthUtilityLayer == null && (bn._DefaultKeepDepthUtilityLayer = new bn(gi.LastCreatedScene), bn._DefaultKeepDepthUtilityLayer.utilityLayerScene.autoClearDepthAndStencil = !1, bn._DefaultKeepDepthUtilityLayer.originalScene.onDisposeObservable.addOnce(() => { bn._DefaultKeepDepthUtilityLayer = null; })), bn._DefaultKeepDepthUtilityLayer; } /** * Instantiates a UtilityLayerRenderer * @param originalScene the original scene that will be rendered on top of * @param handleEvents boolean indicating if the utility layer should handle events */ constructor(e, t = !0) { this.originalScene = e, this._pointerCaptures = {}, this._lastPointerEvents = {}, this._sharedGizmoLight = null, this._renderCamera = null, this.pickUtilitySceneFirst = !0, this.shouldRender = !0, this.onlyCheckPointerDownEvents = !0, this.processAllEvents = !1, this.pickingEnabled = !0, this.onPointerOutObservable = new Fe(), this.utilityLayerScene = new ii(e.getEngine(), { virtual: !0 }), this.utilityLayerScene.useRightHandedSystem = e.useRightHandedSystem, this.utilityLayerScene._allowPostProcessClearColor = !1, this.utilityLayerScene.postProcessesEnabled = !1, this.utilityLayerScene.detachControl(), t && (this._originalPointerObserver = e.onPrePointerObservable.add((i) => { if (!this.utilityLayerScene.activeCamera || !this.pickingEnabled || !this.processAllEvents && i.type !== si.POINTERMOVE && i.type !== si.POINTERUP && i.type !== si.POINTERDOWN && i.type !== si.POINTERDOUBLETAP) return; this.utilityLayerScene.pointerX = e.pointerX, this.utilityLayerScene.pointerY = e.pointerY; const r = i.event; if (e.isPointerCaptured(r.pointerId)) { this._pointerCaptures[r.pointerId] = !1; return; } const s = (a) => { let l = null; if (i.nearInteractionPickingInfo) i.nearInteractionPickingInfo.pickedMesh.getScene() == a ? l = i.nearInteractionPickingInfo : l = new ku(); else if (a !== this.utilityLayerScene && i.originalPickingInfo) l = i.originalPickingInfo; else { let o = null; this._renderCamera && (o = a._activeCamera, a._activeCamera = this._renderCamera, i.ray = null), l = i.ray ? a.pickWithRay(i.ray) : a.pick(e.pointerX, e.pointerY), o && (a._activeCamera = o); } return l; }, n = s(this.utilityLayerScene); if (!i.ray && n && (i.ray = n.ray), this.utilityLayerScene.onPrePointerObservable.notifyObservers(i), this.onlyCheckPointerDownEvents && i.type != si.POINTERDOWN) { i.skipOnPointerObservable || this.utilityLayerScene.onPointerObservable.notifyObservers(new cg(i.type, i.event, n), i.type), i.type === si.POINTERUP && this._pointerCaptures[r.pointerId] && (this._pointerCaptures[r.pointerId] = !1); return; } if (this.utilityLayerScene.autoClearDepthAndStencil || this.pickUtilitySceneFirst) n && n.hit && (i.skipOnPointerObservable || this.utilityLayerScene.onPointerObservable.notifyObservers(new cg(i.type, i.event, n), i.type), i.skipOnPointerObservable = !0); else { const a = s(e), l = i.event; a && n && (n.distance === 0 && a.pickedMesh ? this.mainSceneTrackerPredicate && this.mainSceneTrackerPredicate(a.pickedMesh) ? (this._notifyObservers(i, a, l), i.skipOnPointerObservable = !0) : i.type === si.POINTERDOWN ? this._pointerCaptures[l.pointerId] = !0 : (i.type === si.POINTERMOVE || i.type === si.POINTERUP) && (this._lastPointerEvents[l.pointerId] && (this.onPointerOutObservable.notifyObservers(l.pointerId), delete this._lastPointerEvents[l.pointerId]), this._notifyObservers(i, a, l)) : !this._pointerCaptures[l.pointerId] && (n.distance < a.distance || a.distance === 0) ? (this._notifyObservers(i, n, l), i.skipOnPointerObservable || (i.skipOnPointerObservable = n.distance > 0)) : !this._pointerCaptures[l.pointerId] && n.distance >= a.distance && (this.mainSceneTrackerPredicate && this.mainSceneTrackerPredicate(a.pickedMesh) ? (this._notifyObservers(i, a, l), i.skipOnPointerObservable = !0) : ((i.type === si.POINTERMOVE || i.type === si.POINTERUP) && this._lastPointerEvents[l.pointerId] && (this.onPointerOutObservable.notifyObservers(l.pointerId), delete this._lastPointerEvents[l.pointerId]), this._notifyObservers(i, n, l))), i.type === si.POINTERUP && this._pointerCaptures[l.pointerId] && (this._pointerCaptures[l.pointerId] = !1)); } }), this._originalPointerObserver && e.onPrePointerObservable.makeObserverTopPriority(this._originalPointerObserver)), this.utilityLayerScene.autoClear = !1, this._afterRenderObserver = this.originalScene.onAfterRenderCameraObservable.add((i) => { this.shouldRender && i == this.getRenderCamera() && this.render(); }), this._sceneDisposeObserver = this.originalScene.onDisposeObservable.add(() => { this.dispose(); }), this._updateCamera(); } _notifyObservers(e, t, i) { e.skipOnPointerObservable || (this.utilityLayerScene.onPointerObservable.notifyObservers(new cg(e.type, e.event, t), e.type), this._lastPointerEvents[i.pointerId] = !0); } /** * Renders the utility layers scene on top of the original scene */ render() { if (this._updateCamera(), this.utilityLayerScene.activeCamera) { const e = this.utilityLayerScene.activeCamera.getScene(), t = this.utilityLayerScene.activeCamera; t._scene = this.utilityLayerScene, t.leftCamera && (t.leftCamera._scene = this.utilityLayerScene), t.rightCamera && (t.rightCamera._scene = this.utilityLayerScene), this.utilityLayerScene.render(!1), t._scene = e, t.leftCamera && (t.leftCamera._scene = e), t.rightCamera && (t.rightCamera._scene = e); } } /** * Disposes of the renderer */ dispose() { this.onPointerOutObservable.clear(), this._afterRenderObserver && this.originalScene.onAfterCameraRenderObservable.remove(this._afterRenderObserver), this._sceneDisposeObserver && this.originalScene.onDisposeObservable.remove(this._sceneDisposeObserver), this._originalPointerObserver && this.originalScene.onPrePointerObservable.remove(this._originalPointerObserver), this.utilityLayerScene.dispose(); } _updateCamera() { this.utilityLayerScene.cameraToUseForPointers = this.getRenderCamera(), this.utilityLayerScene.activeCamera = this.getRenderCamera(); } } bn._DefaultUtilityLayer = null; bn._DefaultKeepDepthUtilityLayer = null; var rL; (function(c) { c[c.Origin = 0] = "Origin", c[c.Pivot = 1] = "Pivot"; })(rL || (rL = {})); var p5; (function(c) { c[c.World = 0] = "World", c[c.Local = 1] = "Local"; })(p5 || (p5 = {})); class Do { /** * Ratio for the scale of the gizmo (Default: 1) */ set scaleRatio(e) { this._scaleRatio = e; } get scaleRatio() { return this._scaleRatio; } /** * True when the mouse pointer is hovered a gizmo mesh */ get isHovered() { return this._isHovered; } /** * Mesh that the gizmo will be attached to. (eg. on a drag gizmo the mesh that will be dragged) * * When set, interactions will be enabled */ get attachedMesh() { return this._attachedMesh; } set attachedMesh(e) { this._attachedMesh = e, e && (this._attachedNode = e), this._rootMesh.setEnabled(!!e), this._attachedNodeChanged(e); } /** * Node that the gizmo will be attached to. (eg. on a drag gizmo the mesh, bone or NodeTransform that will be dragged) * * When set, interactions will be enabled */ get attachedNode() { return this._attachedNode; } set attachedNode(e) { this._attachedNode = e, this._attachedMesh = null, this._rootMesh.setEnabled(!!e), this._attachedNodeChanged(e); } /** * Disposes and replaces the current meshes in the gizmo with the specified mesh * @param mesh The mesh to replace the default mesh of the gizmo */ setCustomMesh(e) { if (e.getScene() != this.gizmoLayer.utilityLayerScene) throw "When setting a custom mesh on a gizmo, the custom meshes scene must be the same as the gizmos (eg. gizmo.gizmoLayer.utilityLayerScene)"; this._rootMesh.getChildMeshes().forEach((t) => { t.dispose(); }), e.parent = this._rootMesh, this._customMeshSet = !0; } /** * If set the gizmo's rotation will be updated to match the attached mesh each frame (Default: true) * NOTE: This is only possible for meshes with uniform scaling, as otherwise it's not possible to decompose the rotation */ set updateGizmoRotationToMatchAttachedMesh(e) { this._updateGizmoRotationToMatchAttachedMesh = e; } get updateGizmoRotationToMatchAttachedMesh() { return this._updateGizmoRotationToMatchAttachedMesh; } /** * If set the gizmo's position will be updated to match the attached mesh each frame (Default: true) */ set updateGizmoPositionToMatchAttachedMesh(e) { this._updateGizmoPositionToMatchAttachedMesh = e; } get updateGizmoPositionToMatchAttachedMesh() { return this._updateGizmoPositionToMatchAttachedMesh; } /** * Defines where the gizmo will be positioned if `updateGizmoPositionToMatchAttachedMesh` is enabled. * (Default: GizmoAnchorPoint.Origin) */ set anchorPoint(e) { this._anchorPoint = e; } get anchorPoint() { return this._anchorPoint; } /** * Set the coordinate system to use. By default it's local. * But it's possible for a user to tweak so its local for translation and world for rotation. * In that case, setting the coordinate system will change `updateGizmoRotationToMatchAttachedMesh` and `updateGizmoPositionToMatchAttachedMesh` */ set coordinatesMode(e) { this._coordinatesMode = e; const t = e == p5.Local; this.updateGizmoRotationToMatchAttachedMesh = t, this.updateGizmoPositionToMatchAttachedMesh = !0; } get coordinatesMode() { return this._coordinatesMode; } /** * When set, the gizmo will always appear the same size no matter where the camera is (default: true) */ set updateScale(e) { this._updateScale = e; } get updateScale() { return this._updateScale; } // eslint-disable-next-line @typescript-eslint/no-unused-vars _attachedNodeChanged(e) { } /** * Creates a gizmo * @param gizmoLayer The utility layer the gizmo will be added to */ constructor(e = bn.DefaultUtilityLayer) { this.gizmoLayer = e, this._attachedMesh = null, this._attachedNode = null, this._customRotationQuaternion = null, this._scaleRatio = 1, this._isHovered = !1, this._customMeshSet = !1, this._updateGizmoRotationToMatchAttachedMesh = !0, this._updateGizmoPositionToMatchAttachedMesh = !0, this._anchorPoint = rL.Origin, this._updateScale = !0, this._coordinatesMode = p5.Local, this._interactionsEnabled = !0, this._rightHandtoLeftHandMatrix = Ae.RotationY(Math.PI), this._rootMesh = new ke("gizmoRootNode", e.utilityLayerScene), this._rootMesh.rotationQuaternion = Ze.Identity(), this._beforeRenderObserver = this.gizmoLayer.utilityLayerScene.onBeforeRenderObservable.add(() => { this._update(); }); } /** * posture that the gizmo will be display * When set null, default value will be used (Quaternion(0, 0, 0, 1)) */ get customRotationQuaternion() { return this._customRotationQuaternion; } set customRotationQuaternion(e) { this._customRotationQuaternion = e; } /** * Updates the gizmo to match the attached mesh's position/rotation */ _update() { if (this.attachedNode) { let e = this.attachedNode; if (this.attachedMesh && (e = this.attachedMesh || this.attachedNode), this.updateGizmoPositionToMatchAttachedMesh) if (this.anchorPoint == rL.Pivot && e.getAbsolutePivotPoint) { const t = e.getAbsolutePivotPoint(); this._rootMesh.position.copyFrom(t); } else { const t = e.getWorldMatrix().getRow(3), i = t ? t.toVector3() : new D(0, 0, 0); this._rootMesh.position.copyFrom(i); } if (this.updateGizmoRotationToMatchAttachedMesh) { const i = e._isMesh || e.getClassName() === "AbstractMesh" || e.getClassName() === "TransformNode" || e.getClassName() === "InstancedMesh" ? e : void 0; e.getWorldMatrix().decompose(void 0, this._rootMesh.rotationQuaternion, void 0, Do.PreserveScaling ? i : void 0), this._rootMesh.rotationQuaternion.normalize(); } else this._customRotationQuaternion ? this._rootMesh.rotationQuaternion.copyFrom(this._customRotationQuaternion) : this._rootMesh.rotationQuaternion.set(0, 0, 0, 1); if (this.updateScale) { const t = this.gizmoLayer.utilityLayerScene.activeCamera, i = t.globalPosition; this._rootMesh.position.subtractToRef(i, de.Vector3[0]); let r = this.scaleRatio; if (t.mode == Ai.ORTHOGRAPHIC_CAMERA) { if (t.orthoTop && t.orthoBottom) { const s = t.orthoTop - t.orthoBottom; r *= s; } } else { const s = t.getScene().useRightHandedSystem ? D.RightHandedForwardReadOnly : D.LeftHandedForwardReadOnly, n = t.getDirection(s); r *= D.Dot(de.Vector3[0], n); } this._rootMesh.scaling.setAll(r), e._getWorldMatrixDeterminant() < 0 && !Do.PreserveScaling && (this._rootMesh.scaling.y *= -1); } else this._rootMesh.scaling.setAll(this.scaleRatio); } } /** * if transform has a pivot and is not using PostMultiplyPivotMatrix, then the worldMatrix contains the pivot matrix (it's not cancelled at the end) * so, when extracting the world matrix component, the translation (and other components) is containing the pivot translation. * And the pivot is applied each frame. Removing it anyway here makes it applied only in computeWorldMatrix. * @param transform local transform that needs to be transform by the pivot inverse matrix * @param localMatrix local matrix that needs to be transform by the pivot inverse matrix * @param result resulting matrix transformed by pivot inverse if the transform node is using pivot without using post Multiply Pivot Matrix */ _handlePivotMatrixInverse(e, t, i) { if (e.isUsingPivotMatrix() && !e.isUsingPostMultiplyPivotMatrix()) { e.getPivotMatrix().invertToRef(de.Matrix[5]), de.Matrix[5].multiplyToRef(t, i); return; } i.copyFrom(t); } /** * computes the rotation/scaling/position of the transform once the Node world matrix has changed. */ _matrixChanged() { if (this._attachedNode) if (this._attachedNode._isCamera) { const e = this._attachedNode; let t, i; if (e.parent) { const s = de.Matrix[1]; e.parent._worldMatrix.invertToRef(s), this._attachedNode._worldMatrix.multiplyToRef(s, de.Matrix[0]), t = de.Matrix[0]; } else t = this._attachedNode._worldMatrix; if (e.getScene().useRightHandedSystem ? (this._rightHandtoLeftHandMatrix.multiplyToRef(t, de.Matrix[1]), i = de.Matrix[1]) : i = t, i.decompose(de.Vector3[1], de.Quaternion[0], de.Vector3[0]), this._attachedNode.getClassName() === "FreeCamera" || this._attachedNode.getClassName() === "FlyCamera" || this._attachedNode.getClassName() === "ArcFollowCamera" || this._attachedNode.getClassName() === "TargetCamera" || this._attachedNode.getClassName() === "TouchCamera" || this._attachedNode.getClassName() === "UniversalCamera") { const s = this._attachedNode; s.rotation = de.Quaternion[0].toEulerAngles(), s.rotationQuaternion && (s.rotationQuaternion.copyFrom(de.Quaternion[0]), s.rotationQuaternion.normalize()); } e.position.copyFrom(de.Vector3[0]); } else if (this._attachedNode._isMesh || this._attachedNode.getClassName() === "AbstractMesh" || this._attachedNode.getClassName() === "TransformNode" || this._attachedNode.getClassName() === "InstancedMesh") { const e = this._attachedNode; if (e.parent) { const t = de.Matrix[0], i = de.Matrix[1]; e.parent.getWorldMatrix().invertToRef(t), this._attachedNode.getWorldMatrix().multiplyToRef(t, i); const r = de.Matrix[4]; if (this._handlePivotMatrixInverse(e, i, r), r.decompose(de.Vector3[0], de.Quaternion[0], e.position, Do.PreserveScaling ? e : void 0, Do.UseAbsoluteScaling), de.Quaternion[0].normalize(), e.isUsingPivotMatrix()) { const s = de.Quaternion[1]; Ze.RotationYawPitchRollToRef(e.rotation.y, e.rotation.x, e.rotation.z, s); const n = de.Matrix[2]; Ae.ScalingToRef(e.scaling.x, e.scaling.y, e.scaling.z, n); const a = de.Matrix[2]; s.toRotationMatrix(a); const l = e.getPivotMatrix(), o = de.Matrix[3]; l.invertToRef(o), l.multiplyToRef(n, de.Matrix[4]), de.Matrix[4].multiplyToRef(a, de.Matrix[5]), de.Matrix[5].multiplyToRef(o, de.Matrix[6]), de.Matrix[6].getTranslationToRef(de.Vector3[1]), e.position.subtractInPlace(de.Vector3[1]); } } else { const t = de.Matrix[4]; this._handlePivotMatrixInverse(e, this._attachedNode._worldMatrix, t), t.decompose(de.Vector3[0], de.Quaternion[0], e.position, Do.PreserveScaling ? e : void 0, Do.UseAbsoluteScaling); } de.Vector3[0].scaleInPlace(1 / e.scalingDeterminant), e.scaling.copyFrom(de.Vector3[0]), e.billboardMode || (e.rotationQuaternion ? (e.rotationQuaternion.copyFrom(de.Quaternion[0]), e.rotationQuaternion.normalize()) : e.rotation = de.Quaternion[0].toEulerAngles()); } else if (this._attachedNode.getClassName() === "Bone") { const e = this._attachedNode, t = e.getParent(); if (t) { const i = de.Matrix[0], r = de.Matrix[1]; t.getFinalMatrix().invertToRef(i), e.getFinalMatrix().multiplyToRef(i, r), e.getLocalMatrix().copyFrom(r); } else e.getLocalMatrix().copyFrom(e.getFinalMatrix()); e.markAsDirty(); } else { const e = this._attachedNode; if (e.getTypeID) { const t = e.getTypeID(); if (t === hs.LIGHTTYPEID_DIRECTIONALLIGHT || t === hs.LIGHTTYPEID_SPOTLIGHT || t === hs.LIGHTTYPEID_POINTLIGHT) { const i = e.parent; if (i) { const r = de.Matrix[0], s = de.Matrix[1]; i.getWorldMatrix().invertToRef(r), e.getWorldMatrix().multiplyToRef(r, s), s.decompose(void 0, de.Quaternion[0], de.Vector3[0]); } else this._attachedNode._worldMatrix.decompose(void 0, de.Quaternion[0], de.Vector3[0]); e.position = new D(de.Vector3[0].x, de.Vector3[0].y, de.Vector3[0].z), e.direction && (e.direction = new D(e.direction.x, e.direction.y, e.direction.z)); } } } } /** * refresh gizmo mesh material * @param gizmoMeshes * @param material material to apply */ _setGizmoMeshMaterial(e, t) { e && e.forEach((i) => { i.material = t, i.color && (i.color = t.diffuseColor); }); } /** * Subscribes to pointer up, down, and hover events. Used for responsive gizmos. * @param gizmoLayer The utility layer the gizmo will be added to * @param gizmoAxisCache Gizmo axis definition used for reactive gizmo UI * @returns {Observer} pointerObserver */ static GizmoAxisPointerObserver(e, t) { let i = !1; return e.utilityLayerScene.onPointerObservable.add((s) => { var n, a; if (s.pickInfo) { if (s.type === si.POINTERMOVE) { if (i) return; t.forEach((l) => { var o, u; if (l.colliderMeshes && l.gizmoMeshes) { const h = ((o = l.colliderMeshes) === null || o === void 0 ? void 0 : o.indexOf((u = s == null ? void 0 : s.pickInfo) === null || u === void 0 ? void 0 : u.pickedMesh)) != -1, d = l.dragBehavior.enabled ? h || l.active ? l.hoverMaterial : l.material : l.disableMaterial; l.gizmoMeshes.forEach((f) => { f.material = d, f.color && (f.color = d.diffuseColor); }); } }); } if (s.type === si.POINTERDOWN && t.has((n = s.pickInfo.pickedMesh) === null || n === void 0 ? void 0 : n.parent)) { i = !0; const l = t.get((a = s.pickInfo.pickedMesh) === null || a === void 0 ? void 0 : a.parent); l.active = !0, t.forEach((o) => { var u, h; const f = (((u = o.colliderMeshes) === null || u === void 0 ? void 0 : u.indexOf((h = s == null ? void 0 : s.pickInfo) === null || h === void 0 ? void 0 : h.pickedMesh)) != -1 || o.active) && o.dragBehavior.enabled ? o.hoverMaterial : o.disableMaterial; o.gizmoMeshes.forEach((p) => { p.material = f, p.color && (p.color = f.diffuseColor); }); }); } s.type === si.POINTERUP && t.forEach((l) => { l.active = !1, i = !1, l.gizmoMeshes.forEach((o) => { o.material = l.dragBehavior.enabled ? l.material : l.disableMaterial, o.color && (o.color = l.material.diffuseColor); }); }); } }); } /** * Disposes of the gizmo */ dispose() { this._rootMesh.dispose(), this._beforeRenderObserver && this.gizmoLayer.utilityLayerScene.onBeforeRenderObservable.remove(this._beforeRenderObserver); } } Do.PreserveScaling = !1; Do.UseAbsoluteScaling = !0; class hg extends Do { /** Default material used to render when gizmo is not disabled or hovered */ get coloredMaterial() { return this._coloredMaterial; } /** Material used to render when gizmo is hovered with mouse*/ get hoverMaterial() { return this._hoverMaterial; } /** Material used to render when gizmo is disabled. typically grey.*/ get disableMaterial() { return this._disableMaterial; } /** * @internal */ static _CreateArrow(e, t, i = 1, r = !1) { const s = new xi("arrow", e), n = Hf("cylinder", { diameterTop: 0, height: 0.075, diameterBottom: 0.0375 * (1 + (i - 1) / 4), tessellation: 96 }, e), a = Hf("cylinder", { diameterTop: 5e-3 * i, height: 0.275, diameterBottom: 5e-3 * i, tessellation: 96 }, e); return n.parent = s, n.material = t, n.rotation.x = Math.PI / 2, n.position.z += 0.3, a.parent = s, a.material = t, a.position.z += 0.275 / 2, a.rotation.x = Math.PI / 2, r && (a.visibility = 0, n.visibility = 0), s; } /** * @internal */ static _CreateArrowInstance(e, t) { const i = new xi("arrow", e); for (const r of t.getChildMeshes()) { const s = r.createInstance(r.name); s.parent = i; } return i; } /** * Creates an AxisDragGizmo * @param dragAxis The axis which the gizmo will be able to drag on * @param color The color of the gizmo * @param gizmoLayer The utility layer the gizmo will be added to * @param parent * @param thickness display gizmo axis thickness * @param hoverColor The color of the gizmo when hovering over and dragging * @param disableColor The Color of the gizmo when its disabled */ constructor(e, t = ze.Gray(), i = bn.DefaultUtilityLayer, r = null, s = 1, n = ze.Yellow(), a = ze.Gray()) { var l; super(i), this._pointerObserver = null, this.snapDistance = 0, this.onSnapObservable = new Fe(), this._isEnabled = !0, this._parent = null, this._dragging = !1, this._parent = r, this._coloredMaterial = new Dt("", i.utilityLayerScene), this._coloredMaterial.diffuseColor = t, this._coloredMaterial.specularColor = t.subtract(new ze(0.1, 0.1, 0.1)), this._hoverMaterial = new Dt("", i.utilityLayerScene), this._hoverMaterial.diffuseColor = n, this._disableMaterial = new Dt("", i.utilityLayerScene), this._disableMaterial.diffuseColor = a, this._disableMaterial.alpha = 0.4; const o = hg._CreateArrow(i.utilityLayerScene, this._coloredMaterial, s), u = hg._CreateArrow(i.utilityLayerScene, this._coloredMaterial, s + 4, !0); this._gizmoMesh = new ke("", i.utilityLayerScene), this._gizmoMesh.addChild(o), this._gizmoMesh.addChild(u), this._gizmoMesh.lookAt(this._rootMesh.position.add(e)), this._gizmoMesh.scaling.scaleInPlace(1 / 3), this._gizmoMesh.parent = this._rootMesh; let h = 0; const d = { snapDistance: 0 }; this.dragBehavior = new Fu({ dragAxis: e }), this.dragBehavior.moveAttached = !1, this.dragBehavior.updateDragPlane = !1, this._rootMesh.addBehavior(this.dragBehavior), this.dragBehavior.onDragObservable.add((m) => { if (this.attachedNode) { let _ = !1; if (this.snapDistance == 0) this.attachedNode.getWorldMatrix().getTranslationToRef(de.Vector3[2]), de.Vector3[2].addInPlace(m.delta), this.dragBehavior.validateDrag(de.Vector3[2]) && (this.attachedNode.position && this.attachedNode.position.addInPlaceFromFloats(m.delta.x, m.delta.y, m.delta.z), this.attachedNode.getWorldMatrix().addTranslationFromFloats(m.delta.x, m.delta.y, m.delta.z), this.attachedNode.updateCache(), _ = !0); else if (h += m.dragDistance, Math.abs(h) > this.snapDistance) { const v = Math.floor(Math.abs(h) / this.snapDistance); h = h % this.snapDistance, m.delta.normalizeToRef(de.Vector3[1]), de.Vector3[1].scaleInPlace(this.snapDistance * v), this.attachedNode.getWorldMatrix().getTranslationToRef(de.Vector3[2]), de.Vector3[2].addInPlace(de.Vector3[1]), this.dragBehavior.validateDrag(de.Vector3[2]) && (this.attachedNode.getWorldMatrix().addTranslationFromFloats(de.Vector3[1].x, de.Vector3[1].y, de.Vector3[1].z), this.attachedNode.updateCache(), d.snapDistance = this.snapDistance * v * Math.sign(h), this.onSnapObservable.notifyObservers(d), _ = !0); } _ && this._matrixChanged(); } }), this.dragBehavior.onDragStartObservable.add(() => { this._dragging = !0; }), this.dragBehavior.onDragEndObservable.add(() => { this._dragging = !1; }); const f = i._getSharedGizmoLight(); f.includedOnlyMeshes = f.includedOnlyMeshes.concat(this._rootMesh.getChildMeshes(!1)); const p = { gizmoMeshes: o.getChildMeshes(), colliderMeshes: u.getChildMeshes(), material: this._coloredMaterial, hoverMaterial: this._hoverMaterial, disableMaterial: this._disableMaterial, active: !1, dragBehavior: this.dragBehavior }; (l = this._parent) === null || l === void 0 || l.addToAxisCache(u, p), this._pointerObserver = i.utilityLayerScene.onPointerObservable.add((m) => { var _; if (!this._customMeshSet && (this._isHovered = p.colliderMeshes.indexOf((_ = m == null ? void 0 : m.pickInfo) === null || _ === void 0 ? void 0 : _.pickedMesh) != -1, !this._parent)) { const v = this.dragBehavior.enabled ? this._isHovered || this._dragging ? this._hoverMaterial : this._coloredMaterial : this._disableMaterial; this._setGizmoMeshMaterial(p.gizmoMeshes, v); } }), this.dragBehavior.onEnabledObservable.add((m) => { this._setGizmoMeshMaterial(p.gizmoMeshes, m ? p.material : p.disableMaterial); }); } _attachedNodeChanged(e) { this.dragBehavior && (this.dragBehavior.enabled = !!e); } /** * If the gizmo is enabled */ set isEnabled(e) { this._isEnabled = e, e ? this._parent && (this.attachedMesh = this._parent.attachedMesh, this.attachedNode = this._parent.attachedNode) : (this.attachedMesh = null, this.attachedNode = null); } get isEnabled() { return this._isEnabled; } /** * Disposes of the gizmo */ dispose() { this.onSnapObservable.clear(), this.gizmoLayer.utilityLayerScene.onPointerObservable.remove(this._pointerObserver), this.dragBehavior.detach(), this._gizmoMesh && this._gizmoMesh.dispose(), [this._coloredMaterial, this._hoverMaterial, this._disableMaterial].forEach((e) => { e && e.dispose(); }), super.dispose(); } } class aT { /** * Gets or sets a number used to scale line length */ get scaleLines() { return this._scaleLines; } set scaleLines(e) { this._scaleLines = e, this._xAxis.scaling.setAll(this._scaleLines * this._scaleLinesFactor), this._yAxis.scaling.setAll(this._scaleLines * this._scaleLinesFactor), this._zAxis.scaling.setAll(this._scaleLines * this._scaleLinesFactor); } /** Gets the node hierarchy used to render x-axis */ get xAxis() { return this._xAxis; } /** Gets the node hierarchy used to render y-axis */ get yAxis() { return this._yAxis; } /** Gets the node hierarchy used to render z-axis */ get zAxis() { return this._zAxis; } /** * Creates a new AxesViewer * @param scene defines the hosting scene * @param scaleLines defines a number used to scale line length (1 by default) * @param renderingGroupId defines a number used to set the renderingGroupId of the meshes (2 by default) * @param xAxis defines the node hierarchy used to render the x-axis * @param yAxis defines the node hierarchy used to render the y-axis * @param zAxis defines the node hierarchy used to render the z-axis * @param lineThickness The line thickness to use when creating the arrow. defaults to 1. */ constructor(e, t = 1, i = 2, r, s, n, a = 1) { if (this._scaleLinesFactor = 4, this._instanced = !1, this.scene = null, this._scaleLines = 1, e = e || gi.LastCreatedScene, !!e) { if (!r) { const l = new Dt("xAxisMaterial", e); l.disableLighting = !0, l.emissiveColor = ze.Red().scale(0.5), r = hg._CreateArrow(e, l, a); } if (!s) { const l = new Dt("yAxisMaterial", e); l.disableLighting = !0, l.emissiveColor = ze.Green().scale(0.5), s = hg._CreateArrow(e, l, a); } if (!n) { const l = new Dt("zAxisMaterial", e); l.disableLighting = !0, l.emissiveColor = ze.Blue().scale(0.5), n = hg._CreateArrow(e, l, a); } this._xAxis = r, this._yAxis = s, this._zAxis = n, this.scaleLines = t, i != null && (aT._SetRenderingGroupId(this._xAxis, i), aT._SetRenderingGroupId(this._yAxis, i), aT._SetRenderingGroupId(this._zAxis, i)), this.scene = e, this.update(new D(), D.Right(), D.Up(), D.Forward()); } } /** * Force the viewer to update * @param position defines the position of the viewer * @param xaxis defines the x axis of the viewer * @param yaxis defines the y axis of the viewer * @param zaxis defines the z axis of the viewer */ update(e, t, i, r) { this._xAxis.position.copyFrom(e), this._xAxis.setDirection(t), this._yAxis.position.copyFrom(e), this._yAxis.setDirection(i), this._zAxis.position.copyFrom(e), this._zAxis.setDirection(r); } /** * Creates an instance of this axes viewer. * @returns a new axes viewer with instanced meshes */ createInstance() { const e = hg._CreateArrowInstance(this.scene, this._xAxis), t = hg._CreateArrowInstance(this.scene, this._yAxis), i = hg._CreateArrowInstance(this.scene, this._zAxis), r = new aT(this.scene, this.scaleLines, null, e, t, i); return r._instanced = !0, r; } /** Releases resources */ dispose() { this._xAxis && this._xAxis.dispose(!1, !this._instanced), this._yAxis && this._yAxis.dispose(!1, !this._instanced), this._zAxis && this._zAxis.dispose(!1, !this._instanced), this.scene = null; } static _SetRenderingGroupId(e, t) { e.getChildMeshes().forEach((i) => { i.renderingGroupId = t; }); } } class Lde extends aT { /** * Creates a new BoneAxesViewer * @param scene defines the hosting scene * @param bone defines the target bone * @param mesh defines the target mesh * @param scaleLines defines a scaling factor for line length (1 by default) */ constructor(e, t, i, r = 1) { super(e, r), this.pos = D.Zero(), this.xaxis = D.Zero(), this.yaxis = D.Zero(), this.zaxis = D.Zero(), this.mesh = i, this.bone = t; } /** * Force the viewer to update */ update() { if (!this.mesh || !this.bone) return; const e = this.bone; e.getAbsolutePositionToRef(this.mesh, this.pos), e.getDirectionToRef(bl.X, this.mesh, this.xaxis), e.getDirectionToRef(bl.Y, this.mesh, this.yaxis), e.getDirectionToRef(bl.Z, this.mesh, this.zaxis), super.update(this.pos, this.xaxis, this.yaxis, this.zaxis); } /** Releases resources */ dispose() { this.mesh && (this.mesh = null, this.bone = null, super.dispose()); } } Object.defineProperty(ii.prototype, "debugLayer", { get: function() { return this._debugLayer || (this._debugLayer = new vP(this)), this._debugLayer; }, enumerable: !0, configurable: !0 }); var yH; (function(c) { c[c.Properties = 0] = "Properties", c[c.Debug = 1] = "Debug", c[c.Statistics = 2] = "Statistics", c[c.Tools = 3] = "Tools", c[c.Settings = 4] = "Settings"; })(yH || (yH = {})); class vP { /** * Observable triggered when a property is changed through the inspector. */ get onPropertyChangedObservable() { return this.BJSINSPECTOR && this.BJSINSPECTOR.Inspector ? this.BJSINSPECTOR.Inspector.OnPropertyChangedObservable : (this._onPropertyChangedObservable || (this._onPropertyChangedObservable = new Fe()), this._onPropertyChangedObservable); } /** * Observable triggered when the selection is changed through the inspector. */ get onSelectionChangedObservable() { return this.BJSINSPECTOR && this.BJSINSPECTOR.Inspector ? this.BJSINSPECTOR.Inspector.OnSelectionChangeObservable : (this._onSelectionChangedObservable || (this._onSelectionChangedObservable = new Fe()), this._onSelectionChangedObservable); } /** * Instantiates a new debug layer. * The debug layer (aka Inspector) is the go to tool in order to better understand * what is happening in your scene * @see https://doc.babylonjs.com/toolsAndResources/inspector * @param scene Defines the scene to inspect */ constructor(e) { this.BJSINSPECTOR = this._getGlobalInspector(), this._scene = e || gi.LastCreatedScene, this._scene && this._scene.onDisposeObservable.add(() => { this._scene._debugLayer && this._scene._debugLayer.hide(); }); } /** * Creates the inspector window. * @param config */ _createInspector(e) { if (this.isVisible()) return; if (this._onPropertyChangedObservable) { for (const i of this._onPropertyChangedObservable.observers) this.BJSINSPECTOR.Inspector.OnPropertyChangedObservable.add(i); this._onPropertyChangedObservable.clear(), this._onPropertyChangedObservable = void 0; } if (this._onSelectionChangedObservable) { for (const i of this._onSelectionChangedObservable.observers) this.BJSINSPECTOR.Inspector.OnSelectionChangedObservable.add(i); this._onSelectionChangedObservable.clear(), this._onSelectionChangedObservable = void 0; } const t = Object.assign(Object.assign({}, vP.Config), e); this.BJSINSPECTOR = this.BJSINSPECTOR || this._getGlobalInspector(), this.BJSINSPECTOR.Inspector.Show(this._scene, t); } /** * Select a specific entity in the scene explorer and highlight a specific block in that entity property grid * @param entity defines the entity to select * @param lineContainerTitles defines the specific blocks to highlight (could be a string or an array of strings) */ select(e, t) { this.BJSINSPECTOR && (t && (Object.prototype.toString.call(t) == "[object String]" ? this.BJSINSPECTOR.Inspector.MarkLineContainerTitleForHighlighting(t) : this.BJSINSPECTOR.Inspector.MarkMultipleLineContainerTitlesForHighlighting(t)), this.BJSINSPECTOR.Inspector.OnSelectionChangeObservable.notifyObservers(e)); } /** Get the inspector from bundle or global */ _getGlobalInspector() { if (typeof INSPECTOR < "u") return INSPECTOR; if (typeof BABYLON < "u" && typeof BABYLON.Inspector < "u") return BABYLON; } /** * Get if the inspector is visible or not. * @returns true if visible otherwise, false */ isVisible() { return this.BJSINSPECTOR && this.BJSINSPECTOR.Inspector.IsVisible; } /** * Hide the inspector and close its window. */ hide() { this.BJSINSPECTOR && this.BJSINSPECTOR.Inspector.Hide(); } /** * Update the scene in the inspector */ setAsActiveScene() { this.BJSINSPECTOR && this.BJSINSPECTOR.Inspector._SetNewScene(this._scene); } /** * Launch the debugLayer. * @param config Define the configuration of the inspector * @returns a promise fulfilled when the debug layer is visible */ show(e) { return new Promise((t) => { if (typeof this.BJSINSPECTOR > "u") { const i = e && e.inspectorURL ? e.inspectorURL : vP.InspectorURL; Ve.LoadBabylonScript(i, () => { this._createInspector(e), t(this); }); } else this._createInspector(e), t(this); }); } } vP.InspectorURL = `${Ve._DefaultCdnUrl}/v${$e.Version}/inspector/babylon.inspector.bundle.js`; vP.Config = { overlay: !1, showExplorer: !0, showInspector: !0, embedMode: !1, handleResize: !0, enablePopup: !0 }; function cU(c) { let t = [0, 1, 2, 0, 2, 3, 4, 5, 6, 4, 6, 7, 8, 9, 10, 8, 10, 11, 12, 13, 14, 12, 14, 15, 16, 17, 18, 16, 18, 19, 20, 21, 22, 20, 22, 23]; const i = [ 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, -1, 0, 0, -1, 0, 0, -1, 0, 0, -1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, -1, 0, 0, -1, 0, 0, -1, 0, 0, -1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, -1, 0, 0, -1, 0, 0, -1, 0, 0, -1, 0 ], r = []; let s = []; const n = c.width || c.size || 1, a = c.height || c.size || 1, l = c.depth || c.size || 1, o = c.wrap || !1; let u = c.topBaseAt === void 0 ? 1 : c.topBaseAt, h = c.bottomBaseAt === void 0 ? 0 : c.bottomBaseAt; u = (u + 4) % 4, h = (h + 4) % 4; const d = [2, 0, 3, 1], f = [2, 0, 1, 3]; let p = d[u], m = f[h], _ = [ 1, -1, 1, -1, -1, 1, -1, 1, 1, 1, 1, 1, 1, 1, -1, -1, 1, -1, -1, -1, -1, 1, -1, -1, 1, 1, -1, 1, -1, -1, 1, -1, 1, 1, 1, 1, -1, 1, 1, -1, -1, 1, -1, -1, -1, -1, 1, -1, -1, 1, 1, -1, 1, -1, 1, 1, -1, 1, 1, 1, 1, -1, 1, 1, -1, -1, -1, -1, -1, -1, -1, 1 ]; if (o) { t = [2, 3, 0, 2, 0, 1, 4, 5, 6, 4, 6, 7, 9, 10, 11, 9, 11, 8, 12, 14, 15, 12, 13, 14], _ = [ -1, 1, 1, 1, 1, 1, 1, -1, 1, -1, -1, 1, 1, 1, -1, -1, 1, -1, -1, -1, -1, 1, -1, -1, 1, 1, 1, 1, 1, -1, 1, -1, -1, 1, -1, 1, -1, 1, -1, -1, 1, 1, -1, -1, 1, -1, -1, -1 ]; let R = [ [1, 1, 1], [-1, 1, 1], [-1, 1, -1], [1, 1, -1] ], w = [ [-1, -1, 1], [1, -1, 1], [1, -1, -1], [-1, -1, -1] ]; const V = [17, 18, 19, 16], k = [22, 23, 20, 21]; for (; p > 0; ) R.unshift(R.pop()), V.unshift(V.pop()), p--; for (; m > 0; ) w.unshift(w.pop()), k.unshift(k.pop()), m--; R = R.flat(), w = w.flat(), _ = _.concat(R).concat(w), t.push(V[0], V[2], V[3], V[0], V[1], V[2]), t.push(k[0], k[2], k[3], k[0], k[1], k[2]); } const v = [n / 2, a / 2, l / 2]; s = _.reduce((R, w, V) => R.concat(w * v[V % 3]), []); const C = c.sideOrientation === 0 ? 0 : c.sideOrientation || Ot.DEFAULTSIDE, x = c.faceUV || new Array(6), b = c.faceColors, S = []; for (let R = 0; R < 6; R++) x[R] === void 0 && (x[R] = new Di(0, 0, 1, 1)), b && b[R] === void 0 && (b[R] = new Et(1, 1, 1, 1)); for (let R = 0; R < 6; R++) if (r.push(x[R].z, hn.UseOpenGLOrientationForUV ? 1 - x[R].w : x[R].w), r.push(x[R].x, hn.UseOpenGLOrientationForUV ? 1 - x[R].w : x[R].w), r.push(x[R].x, hn.UseOpenGLOrientationForUV ? 1 - x[R].y : x[R].y), r.push(x[R].z, hn.UseOpenGLOrientationForUV ? 1 - x[R].y : x[R].y), b) for (let w = 0; w < 4; w++) S.push(b[R].r, b[R].g, b[R].b, b[R].a); Ot._ComputeSides(C, s, t, i, r, c.frontUVs, c.backUVs); const M = new Ot(); if (M.indices = t, M.positions = s, M.normals = i, M.uvs = r, b) { const R = C === Ot.DOUBLESIDE ? S.concat(S) : S; M.colors = R; } return M; } function Cie(c) { const e = c.width || c.size || 1, t = c.height || c.size || 1, i = c.depth || c.size || 1, r = (c.widthSegments || c.segments || 1) | 0, s = (c.heightSegments || c.segments || 1) | 0, n = (c.depthSegments || c.segments || 1) | 0, a = new Ae(), l = new Ae(), o = new Ae(), u = BC({ width: e, height: i, subdivisionsX: r, subdivisionsY: n }); Ae.TranslationToRef(0, -t / 2, 0, l), Ae.RotationZToRef(Math.PI, a), a.multiplyToRef(l, o), u.transform(o); const h = BC({ width: e, height: i, subdivisionsX: r, subdivisionsY: n }); Ae.TranslationToRef(0, t / 2, 0, o), h.transform(o); const d = BC({ width: t, height: i, subdivisionsX: s, subdivisionsY: n }); Ae.TranslationToRef(-e / 2, 0, 0, l), Ae.RotationZToRef(Math.PI / 2, a), a.multiplyToRef(l, o), d.transform(o); const f = BC({ width: t, height: i, subdivisionsX: s, subdivisionsY: n }); Ae.TranslationToRef(e / 2, 0, 0, l), Ae.RotationZToRef(-Math.PI / 2, a), a.multiplyToRef(l, o), f.transform(o); const p = BC({ width: e, height: t, subdivisionsX: r, subdivisionsY: s }); Ae.TranslationToRef(0, 0, -i / 2, l), Ae.RotationXToRef(-Math.PI / 2, a), a.multiplyToRef(l, o), p.transform(o); const m = BC({ width: e, height: t, subdivisionsX: r, subdivisionsY: s }); return Ae.TranslationToRef(0, 0, i / 2, l), Ae.RotationXToRef(Math.PI / 2, a), a.multiplyToRef(l, o), m.transform(o), u.merge([h, f, d, p, m], !0), u; } function B4(c, e = {}, t = null) { const i = new ke(c, t); return e.sideOrientation = ke._GetDefaultSideOrientation(e.sideOrientation), i._originalBuilderSideOrientation = e.sideOrientation, cU(e).applyToMesh(i, e.updatable), i; } const Nde = { // eslint-disable-next-line @typescript-eslint/naming-convention CreateBox: B4 }; Ot.CreateBox = cU; ke.CreateBox = (c, e, t = null, i, r) => B4(c, { size: e, sideOrientation: r, updatable: i }, t); function uU(c) { const e = (c.segments || 32) | 0, t = c.diameterX || c.diameter || 1, i = c.diameterY || c.diameter || 1, r = c.diameterZ || c.diameter || 1, s = c.arc && (c.arc <= 0 || c.arc > 1) ? 1 : c.arc || 1, n = c.slice && c.slice <= 0 ? 1 : c.slice || 1, a = c.sideOrientation === 0 ? 0 : c.sideOrientation || Ot.DEFAULTSIDE, l = !!c.dedupTopBottomIndices, o = new D(t / 2, i / 2, r / 2), u = 2 + e, h = 2 * u, d = [], f = [], p = [], m = []; for (let v = 0; v <= u; v++) { const C = v / u, x = C * Math.PI * n; for (let b = 0; b <= h; b++) { const S = b / h, M = S * Math.PI * 2 * s, R = Ae.RotationZ(-x), w = Ae.RotationY(M), V = D.TransformCoordinates(D.Up(), R), k = D.TransformCoordinates(V, w), L = k.multiply(o), B = k.divide(o).normalize(); f.push(L.x, L.y, L.z), p.push(B.x, B.y, B.z), m.push(S, hn.UseOpenGLOrientationForUV ? 1 - C : C); } if (v > 0) { const b = f.length / 3; for (let S = b - 2 * (h + 1); S + h + 2 < b; S++) l ? (v > 1 && (d.push(S), d.push(S + 1), d.push(S + h + 1)), (v < u || n < 1) && (d.push(S + h + 1), d.push(S + 1), d.push(S + h + 2))) : (d.push(S), d.push(S + 1), d.push(S + h + 1), d.push(S + h + 1), d.push(S + 1), d.push(S + h + 2)); } } Ot._ComputeSides(a, f, d, p, m, c.frontUVs, c.backUVs); const _ = new Ot(); return _.indices = d, _.positions = f, _.normals = p, _.uvs = m, _; } function Rd(c, e = {}, t = null) { const i = new ke(c, t); return e.sideOrientation = ke._GetDefaultSideOrientation(e.sideOrientation), i._originalBuilderSideOrientation = e.sideOrientation, uU(e).applyToMesh(i, e.updatable), i; } const Fde = { // eslint-disable-next-line @typescript-eslint/naming-convention CreateSphere: Rd }; Ot.CreateSphere = uU; ke.CreateSphere = (c, e, t, i, r, s) => Rd(c, { segments: e, diameterX: t, diameterY: t, diameterZ: t, sideOrientation: s, updatable: r }, i); function hU(c = { subdivisions: 2, tessellation: 16, height: 1, radius: 0.25, capSubdivisions: 6 }) { const e = Math.max(c.subdivisions ? c.subdivisions : 2, 1) | 0, t = Math.max(c.tessellation ? c.tessellation : 16, 3) | 0, i = Math.max(c.height ? c.height : 1, 0), r = Math.max(c.radius ? c.radius : 0.25, 0), s = Math.max(c.capSubdivisions ? c.capSubdivisions : 6, 1) | 0, n = t, a = e, l = Math.max(c.radiusTop ? c.radiusTop : r, 0), o = Math.max(c.radiusBottom ? c.radiusBottom : r, 0), u = i - (l + o), h = 0, d = 2 * Math.PI, f = Math.max(c.topCapSubdivisions ? c.topCapSubdivisions : s, 1), p = Math.max(c.bottomCapSubdivisions ? c.bottomCapSubdivisions : s, 1), m = Math.acos((o - l) / i); let _ = []; const v = [], C = [], x = []; let b = 0; const S = [], M = u * 0.5, R = Math.PI * 0.5; let w, V; const k = D.Zero(), L = D.Zero(), B = Math.cos(m), U = Math.sin(m), K = new at(l * U, M + l * B).subtract(new at(o * U, -M + o * B)).length(), ee = l * m + K + o * (R - m); let Z = 0; for (V = 0; V <= f; V++) { const $ = [], j = R - m * (V / f); Z += l * m / f; const J = Math.cos(j), ne = Math.sin(j), pe = J * l; for (w = 0; w <= n; w++) { const ge = w / n, Ie = ge * d + h, ye = Math.sin(Ie), Se = Math.cos(Ie); L.x = pe * ye, L.y = M + ne * l, L.z = pe * Se, v.push(L.x, L.y, L.z), k.set(J * ye, ne, J * Se), C.push(k.x, k.y, k.z), x.push(ge, hn.UseOpenGLOrientationForUV ? Z / ee : 1 - Z / ee), $.push(b), b++; } S.push($); } const q = i - l - o + B * l - B * o, le = U * (o - l) / q; for (V = 1; V <= a; V++) { const $ = []; Z += K / a; const j = U * (V * (o - l) / a + l); for (w = 0; w <= n; w++) { const J = w / n, ne = J * d + h, pe = Math.sin(ne), ge = Math.cos(ne); L.x = j * pe, L.y = M + B * l - V * q / a, L.z = j * ge, v.push(L.x, L.y, L.z), k.set(pe, le, ge).normalize(), C.push(k.x, k.y, k.z), x.push(J, hn.UseOpenGLOrientationForUV ? Z / ee : 1 - Z / ee), $.push(b), b++; } S.push($); } for (V = 1; V <= p; V++) { const $ = [], j = R - m - (Math.PI - m) * (V / p); Z += o * m / p; const J = Math.cos(j), ne = Math.sin(j), pe = J * o; for (w = 0; w <= n; w++) { const ge = w / n, Ie = ge * d + h, ye = Math.sin(Ie), Se = Math.cos(Ie); L.x = pe * ye, L.y = -M + ne * o, L.z = pe * Se, v.push(L.x, L.y, L.z), k.set(J * ye, ne, J * Se), C.push(k.x, k.y, k.z), x.push(ge, hn.UseOpenGLOrientationForUV ? Z / ee : 1 - Z / ee), $.push(b), b++; } S.push($); } for (w = 0; w < n; w++) for (V = 0; V < f + a + p; V++) { const $ = S[V][w], j = S[V + 1][w], J = S[V + 1][w + 1], ne = S[V][w + 1]; _.push($), _.push(j), _.push(ne), _.push(j), _.push(J), _.push(ne); } if (_ = _.reverse(), c.orientation && !c.orientation.equals(D.Up())) { const $ = new Ae(); c.orientation.clone().scale(Math.PI * 0.5).cross(D.Up()).toQuaternion().toRotationMatrix($); const j = D.Zero(); for (let J = 0; J < v.length; J += 3) j.set(v[J], v[J + 1], v[J + 2]), D.TransformCoordinatesToRef(j.clone(), $, j), v[J] = j.x, v[J + 1] = j.y, v[J + 2] = j.z; } const ie = new Ot(); return ie.positions = v, ie.normals = C, ie.uvs = x, ie.indices = _, ie; } function sN(c, e = { orientation: D.Up(), subdivisions: 2, tessellation: 16, height: 1, radius: 0.25, capSubdivisions: 6, updatable: !1 }, t = null) { const i = new ke(c, t); return hU(e).applyToMesh(i, e.updatable), i; } const Bde = { // eslint-disable-next-line @typescript-eslint/naming-convention CreateCapsule: sN }; ke.CreateCapsule = (c, e, t) => sN(c, e, t); Ot.CreateCapsule = hU; function zK(c) { let e = c.pathArray; const t = c.closeArray || !1, i = c.closePath || !1, r = c.invertUV || !1, s = Math.floor(e[0].length / 2); let n = c.offset || s; n = n > s ? s : Math.floor(n); const a = c.sideOrientation === 0 ? 0 : c.sideOrientation || Ot.DEFAULTSIDE, l = c.uvs, o = c.colors, u = [], h = [], d = [], f = [], p = [], m = [], _ = [], v = []; let C; const x = [], b = []; let S, M, R; if (e.length < 2) { const he = [], be = []; for (M = 0; M < e[0].length - n; M++) he.push(e[0][M]), be.push(e[0][M + n]); e = [he, be]; } let w = 0; const V = i ? 1 : 0; let k, L; C = e[0].length; let B, U; for (S = 0; S < e.length; S++) { for (_[S] = 0, p[S] = [0], k = e[S], L = k.length, C = C < L ? C : L, R = 0; R < L; ) u.push(k[R].x, k[R].y, k[R].z), R > 0 && (B = k[R].subtract(k[R - 1]).length(), U = B + _[S], p[S].push(U), _[S] = U), R++; i && (R--, u.push(k[0].x, k[0].y, k[0].z), B = k[R].subtract(k[0]).length(), U = B + _[S], p[S].push(U), _[S] = U), x[S] = L + V, b[S] = w, w += L + V; } let K, ee, Z = null, q = null; for (M = 0; M < C + V; M++) { for (v[M] = 0, m[M] = [0], S = 0; S < e.length - 1; S++) K = e[S], ee = e[S + 1], M === C ? (Z = K[0], q = ee[0]) : (Z = K[M], q = ee[M]), B = q.subtract(Z).length(), U = B + v[M], m[M].push(U), v[M] = U; t && q && Z && (K = e[S], ee = e[0], M === C && (q = ee[0]), B = q.subtract(Z).length(), U = B + v[M], v[M] = U); } let le, ie; if (l) for (S = 0; S < l.length; S++) f.push(l[S].x, hn.UseOpenGLOrientationForUV ? 1 - l[S].y : l[S].y); else for (S = 0; S < e.length; S++) for (M = 0; M < C + V; M++) le = _[S] != 0 ? p[S][M] / _[S] : 0, ie = v[M] != 0 ? m[M][S] / v[M] : 0, r ? f.push(ie, le) : f.push(le, hn.UseOpenGLOrientationForUV ? 1 - ie : ie); S = 0; let $ = 0, j = x[S] - 1, J = x[S + 1] - 1, ne = j < J ? j : J, pe = b[1] - b[0]; const ge = t ? x.length : x.length - 1; for (; $ <= ne && S < ge; ) h.push($, $ + pe, $ + 1), h.push($ + pe + 1, $ + 1, $ + pe), $ += 1, $ === ne && (S++, S === x.length - 1 ? (pe = b[0] - b[S], j = x[S] - 1, J = x[0] - 1) : (pe = b[S + 1] - b[S], j = x[S] - 1, J = x[S + 1] - 1), $ = b[S], ne = j < J ? j + $ : J + $); if (Ot.ComputeNormals(u, h, d), i) { let he = 0, be = 0; for (S = 0; S < e.length; S++) he = b[S] * 3, S + 1 < e.length ? be = (b[S + 1] - 1) * 3 : be = d.length - 3, d[he] = (d[he] + d[be]) * 0.5, d[he + 1] = (d[he + 1] + d[be + 1]) * 0.5, d[he + 2] = (d[he + 2] + d[be + 2]) * 0.5, d[be] = d[he], d[be + 1] = d[he + 1], d[be + 2] = d[he + 2]; } Ot._ComputeSides(a, u, h, d, f, c.frontUVs, c.backUVs); let Ie = null; if (o) { Ie = new Float32Array(o.length * 4); for (let he = 0; he < o.length; he++) Ie[he * 4] = o[he].r, Ie[he * 4 + 1] = o[he].g, Ie[he * 4 + 2] = o[he].b, Ie[he * 4 + 3] = o[he].a; } const ye = new Ot(), Se = new Float32Array(u), re = new Float32Array(d), te = new Float32Array(f); return ye.indices = h, ye.positions = Se, ye.normals = re, ye.uvs = te, Ie && ye.set(Ie, Y.ColorKind), i && (ye._idx = b), ye; } function nx(c, e, t = null) { const i = e.pathArray, r = e.closeArray, s = e.closePath, n = ke._GetDefaultSideOrientation(e.sideOrientation), a = e.instance, l = e.updatable; if (a) { const o = de.Vector3[0].setAll(Number.MAX_VALUE), u = de.Vector3[1].setAll(-Number.MAX_VALUE), h = (f) => { let p = i[0].length; const m = a; let _ = 0; const v = m._originalBuilderSideOrientation === ke.DOUBLESIDE ? 2 : 1; for (let C = 1; C <= v; ++C) for (let x = 0; x < i.length; ++x) { const b = i[x], S = b.length; p = p < S ? p : S; for (let M = 0; M < p; ++M) { const R = b[M]; f[_] = R.x, f[_ + 1] = R.y, f[_ + 2] = R.z, o.minimizeInPlaceFromFloats(R.x, R.y, R.z), u.maximizeInPlaceFromFloats(R.x, R.y, R.z), _ += 3; } if (m._creationDataStorage && m._creationDataStorage.closePath) { const M = b[0]; f[_] = M.x, f[_ + 1] = M.y, f[_ + 2] = M.z, _ += 3; } } }, d = a.getVerticesData(Y.PositionKind); if (h(d), a.hasBoundingInfo ? a.getBoundingInfo().reConstruct(o, u, a._worldMatrix) : a.buildBoundingInfo(o, u, a._worldMatrix), a.updateVerticesData(Y.PositionKind, d, !1, !1), e.colors) { const f = a.getVerticesData(Y.ColorKind); for (let p = 0, m = 0; p < e.colors.length; p++, m += 4) { const _ = e.colors[p]; f[m] = _.r, f[m + 1] = _.g, f[m + 2] = _.b, f[m + 3] = _.a; } a.updateVerticesData(Y.ColorKind, f, !1, !1); } if (e.uvs) { const f = a.getVerticesData(Y.UVKind); for (let p = 0; p < e.uvs.length; p++) f[p * 2] = e.uvs[p].x, f[p * 2 + 1] = hn.UseOpenGLOrientationForUV ? 1 - e.uvs[p].y : e.uvs[p].y; a.updateVerticesData(Y.UVKind, f, !1, !1); } if (!a.areNormalsFrozen || a.isFacetDataEnabled) { const f = a.getIndices(), p = a.getVerticesData(Y.NormalKind), m = a.isFacetDataEnabled ? a.getFacetDataParameters() : null; if (Ot.ComputeNormals(d, f, p, m), a._creationDataStorage && a._creationDataStorage.closePath) { let _ = 0, v = 0; for (let C = 0; C < i.length; C++) _ = a._creationDataStorage.idx[C] * 3, C + 1 < i.length ? v = (a._creationDataStorage.idx[C + 1] - 1) * 3 : v = p.length - 3, p[_] = (p[_] + p[v]) * 0.5, p[_ + 1] = (p[_ + 1] + p[v + 1]) * 0.5, p[_ + 2] = (p[_ + 2] + p[v + 2]) * 0.5, p[v] = p[_], p[v + 1] = p[_ + 1], p[v + 2] = p[_ + 2]; } a.areNormalsFrozen || a.updateVerticesData(Y.NormalKind, p, !1, !1); } return a; } else { const o = new ke(c, t); o._originalBuilderSideOrientation = n, o._creationDataStorage = new _K(); const u = zK(e); return s && (o._creationDataStorage.idx = u._idx), o._creationDataStorage.closePath = s, o._creationDataStorage.closeArray = r, u.applyToMesh(o, l), o; } } const Ude = { // eslint-disable-next-line @typescript-eslint/naming-convention CreateRibbon: nx }; Ot.CreateRibbon = zK; ke.CreateRibbon = (c, e, t = !1, i, r, s, n = !1, a, l) => nx(c, { pathArray: e, closeArray: t, closePath: i, offset: r, updatable: n, sideOrientation: a, instance: l }, s); function dU(c) { const e = [], t = [], i = [], r = [], s = c.radius || 0.5, n = c.tessellation || 64, a = c.arc && (c.arc <= 0 || c.arc > 1) ? 1 : c.arc || 1, l = c.sideOrientation === 0 ? 0 : c.sideOrientation || Ot.DEFAULTSIDE; e.push(0, 0, 0), r.push(0.5, 0.5); const o = Math.PI * 2 * a, u = a === 1 ? o / n : o / (n - 1); let h = 0; for (let p = 0; p < n; p++) { const m = Math.cos(h), _ = Math.sin(h), v = (m + 1) / 2, C = (1 - _) / 2; e.push(s * m, s * _, 0), r.push(v, hn.UseOpenGLOrientationForUV ? 1 - C : C), h += u; } a === 1 && (e.push(e[3], e[4], e[5]), r.push(r[2], hn.UseOpenGLOrientationForUV ? 1 - r[3] : r[3])); const d = e.length / 3; for (let p = 1; p < d - 1; p++) t.push(p + 1, 0, p); Ot.ComputeNormals(e, t, i), Ot._ComputeSides(l, e, t, i, r, c.frontUVs, c.backUVs); const f = new Ot(); return f.indices = t, f.positions = e, f.normals = i, f.uvs = r, f; } function Cw(c, e = {}, t = null) { const i = new ke(c, t); return e.sideOrientation = ke._GetDefaultSideOrientation(e.sideOrientation), i._originalBuilderSideOrientation = e.sideOrientation, dU(e).applyToMesh(i, e.updatable), i; } const Vde = { // eslint-disable-next-line @typescript-eslint/naming-convention CreateDisc: Cw }; Ot.CreateDisc = dU; ke.CreateDisc = (c, e, t, i = null, r, s) => Cw(c, { radius: e, tessellation: t, sideOrientation: s, updatable: r }, i); function uO(c) { const e = c.pattern || ke.NO_FLIP, t = c.tileWidth || c.tileSize || 1, i = c.tileHeight || c.tileSize || 1, r = c.alignHorizontal || 0, s = c.alignVertical || 0, n = c.width || c.size || 1, a = Math.floor(n / t); let l = n - a * t; const o = c.height || c.size || 1, u = Math.floor(o / i); let h = o - u * i; const d = t * a / 2, f = i * u / 2; let p = 0, m = 0, _ = 0, v = 0, C = 0, x = 0; if (l > 0 || h > 0) { switch (_ = -d, v = -f, C = d, x = f, r) { case ke.CENTER: l /= 2, _ -= l, C += l; break; case ke.LEFT: C += l, p = -l / 2; break; case ke.RIGHT: _ -= l, p = l / 2; break; } switch (s) { case ke.CENTER: h /= 2, v -= h, x += h; break; case ke.BOTTOM: x += h, m = -h / 2; break; case ke.TOP: v -= h, m = h / 2; break; } } const b = [], S = [], M = []; M[0] = [0, 0, 1, 0, 1, 1, 0, 1], M[1] = [0, 0, 1, 0, 1, 1, 0, 1], (e === ke.ROTATE_TILE || e === ke.ROTATE_ROW) && (M[1] = [1, 1, 0, 1, 0, 0, 1, 0]), (e === ke.FLIP_TILE || e === ke.FLIP_ROW) && (M[1] = [1, 0, 0, 0, 0, 1, 1, 1]), (e === ke.FLIP_N_ROTATE_TILE || e === ke.FLIP_N_ROTATE_ROW) && (M[1] = [0, 1, 1, 1, 1, 0, 0, 0]); let R = []; const w = [], V = []; let k = 0; for (let K = 0; K < u; K++) for (let ee = 0; ee < a; ee++) b.push(-d + ee * t + p, -f + K * i + m, 0), b.push(-d + (ee + 1) * t + p, -f + K * i + m, 0), b.push(-d + (ee + 1) * t + p, -f + (K + 1) * i + m, 0), b.push(-d + ee * t + p, -f + (K + 1) * i + m, 0), V.push(k, k + 1, k + 3, k + 1, k + 2, k + 3), e === ke.FLIP_TILE || e === ke.ROTATE_TILE || e === ke.FLIP_N_ROTATE_TILE ? R = R.concat(M[(ee % 2 + K % 2) % 2]) : e === ke.FLIP_ROW || e === ke.ROTATE_ROW || e === ke.FLIP_N_ROTATE_ROW ? R = R.concat(M[K % 2]) : R = R.concat(M[0]), w.push(1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1), S.push(0, 0, -1, 0, 0, -1, 0, 0, -1, 0, 0, -1), k += 4; if (l > 0 || h > 0) { const K = h > 0 && (s === ke.CENTER || s === ke.TOP), ee = h > 0 && (s === ke.CENTER || s === ke.BOTTOM), Z = l > 0 && (r === ke.CENTER || r === ke.RIGHT), q = l > 0 && (r === ke.CENTER || r === ke.LEFT); let le = [], ie, $, j, J; if (K && Z && (b.push(_ + p, v + m, 0), b.push(-d + p, v + m, 0), b.push(-d + p, v + h + m, 0), b.push(_ + p, v + h + m, 0), V.push(k, k + 1, k + 3, k + 1, k + 2, k + 3), k += 4, ie = 1 - l / t, $ = 1 - h / i, j = 1, J = 1, le = [ie, $, j, $, j, J, ie, J], e === ke.ROTATE_ROW && (le = [1 - ie, 1 - $, 1 - j, 1 - $, 1 - j, 1 - J, 1 - ie, 1 - J]), e === ke.FLIP_ROW && (le = [1 - ie, $, 1 - j, $, 1 - j, J, 1 - ie, J]), e === ke.FLIP_N_ROTATE_ROW && (le = [ie, 1 - $, j, 1 - $, j, 1 - J, ie, 1 - J]), R = R.concat(le), w.push(1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1), S.push(0, 0, -1, 0, 0, -1, 0, 0, -1, 0, 0, -1)), K && q && (b.push(d + p, v + m, 0), b.push(C + p, v + m, 0), b.push(C + p, v + h + m, 0), b.push(d + p, v + h + m, 0), V.push(k, k + 1, k + 3, k + 1, k + 2, k + 3), k += 4, ie = 0, $ = 1 - h / i, j = l / t, J = 1, le = [ie, $, j, $, j, J, ie, J], (e === ke.ROTATE_ROW || e === ke.ROTATE_TILE && a % 2 === 0) && (le = [1 - ie, 1 - $, 1 - j, 1 - $, 1 - j, 1 - J, 1 - ie, 1 - J]), (e === ke.FLIP_ROW || e === ke.FLIP_TILE && a % 2 === 0) && (le = [1 - ie, $, 1 - j, $, 1 - j, J, 1 - ie, J]), (e === ke.FLIP_N_ROTATE_ROW || e === ke.FLIP_N_ROTATE_TILE && a % 2 === 0) && (le = [ie, 1 - $, j, 1 - $, j, 1 - J, ie, 1 - J]), R = R.concat(le), w.push(1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1), S.push(0, 0, -1, 0, 0, -1, 0, 0, -1, 0, 0, -1)), ee && Z && (b.push(_ + p, f + m, 0), b.push(-d + p, f + m, 0), b.push(-d + p, x + m, 0), b.push(_ + p, x + m, 0), V.push(k, k + 1, k + 3, k + 1, k + 2, k + 3), k += 4, ie = 1 - l / t, $ = 0, j = 1, J = h / i, le = [ie, $, j, $, j, J, ie, J], (e === ke.ROTATE_ROW && u % 2 === 1 || e === ke.ROTATE_TILE && u % 1 === 0) && (le = [1 - ie, 1 - $, 1 - j, 1 - $, 1 - j, 1 - J, 1 - ie, 1 - J]), (e === ke.FLIP_ROW && u % 2 === 1 || e === ke.FLIP_TILE && u % 2 === 0) && (le = [1 - ie, $, 1 - j, $, 1 - j, J, 1 - ie, J]), (e === ke.FLIP_N_ROTATE_ROW && u % 2 === 1 || e === ke.FLIP_N_ROTATE_TILE && u % 2 === 0) && (le = [ie, 1 - $, j, 1 - $, j, 1 - J, ie, 1 - J]), R = R.concat(le), w.push(1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1), S.push(0, 0, -1, 0, 0, -1, 0, 0, -1, 0, 0, -1)), ee && q && (b.push(d + p, f + m, 0), b.push(C + p, f + m, 0), b.push(C + p, x + m, 0), b.push(d + p, x + m, 0), V.push(k, k + 1, k + 3, k + 1, k + 2, k + 3), k += 4, ie = 0, $ = 0, j = l / t, J = h / i, le = [ie, $, j, $, j, J, ie, J], (e === ke.ROTATE_ROW && u % 2 === 1 || e === ke.ROTATE_TILE && (u + a) % 2 === 1) && (le = [1 - ie, 1 - $, 1 - j, 1 - $, 1 - j, 1 - J, 1 - ie, 1 - J]), (e === ke.FLIP_ROW && u % 2 === 1 || e === ke.FLIP_TILE && (u + a) % 2 === 1) && (le = [1 - ie, $, 1 - j, $, 1 - j, J, 1 - ie, J]), (e === ke.FLIP_N_ROTATE_ROW && u % 2 === 1 || e === ke.FLIP_N_ROTATE_TILE && (u + a) % 2 === 1) && (le = [ie, 1 - $, j, 1 - $, j, 1 - J, ie, 1 - J]), R = R.concat(le), w.push(1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1), S.push(0, 0, -1, 0, 0, -1, 0, 0, -1, 0, 0, -1)), K) { const ne = []; ie = 0, $ = 1 - h / i, j = 1, J = 1, ne[0] = [ie, $, j, $, j, J, ie, J], ne[1] = [ie, $, j, $, j, J, ie, J], (e === ke.ROTATE_TILE || e === ke.ROTATE_ROW) && (ne[1] = [1 - ie, 1 - $, 1 - j, 1 - $, 1 - j, 1 - J, 1 - ie, 1 - J]), (e === ke.FLIP_TILE || e === ke.FLIP_ROW) && (ne[1] = [1 - ie, $, 1 - j, $, 1 - j, J, 1 - ie, J]), (e === ke.FLIP_N_ROTATE_TILE || e === ke.FLIP_N_ROTATE_ROW) && (ne[1] = [ie, 1 - $, j, 1 - $, j, 1 - J, ie, 1 - J]); for (let pe = 0; pe < a; pe++) b.push(-d + pe * t + p, v + m, 0), b.push(-d + (pe + 1) * t + p, v + m, 0), b.push(-d + (pe + 1) * t + p, v + h + m, 0), b.push(-d + pe * t + p, v + h + m, 0), V.push(k, k + 1, k + 3, k + 1, k + 2, k + 3), k += 4, e === ke.FLIP_TILE || e === ke.ROTATE_TILE || e === ke.FLIP_N_ROTATE_TILE ? R = R.concat(ne[(pe + 1) % 2]) : e === ke.FLIP_ROW || e === ke.ROTATE_ROW || e === ke.FLIP_N_ROTATE_ROW ? R = R.concat(ne[1]) : R = R.concat(ne[0]), w.push(1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1), S.push(0, 0, -1, 0, 0, -1, 0, 0, -1, 0, 0, -1); } if (ee) { const ne = []; ie = 0, $ = 0, j = 1, J = h / i, ne[0] = [ie, $, j, $, j, J, ie, J], ne[1] = [ie, $, j, $, j, J, ie, J], (e === ke.ROTATE_TILE || e === ke.ROTATE_ROW) && (ne[1] = [1 - ie, 1 - $, 1 - j, 1 - $, 1 - j, 1 - J, 1 - ie, 1 - J]), (e === ke.FLIP_TILE || e === ke.FLIP_ROW) && (ne[1] = [1 - ie, $, 1 - j, $, 1 - j, J, 1 - ie, J]), (e === ke.FLIP_N_ROTATE_TILE || e === ke.FLIP_N_ROTATE_ROW) && (ne[1] = [ie, 1 - $, j, 1 - $, j, 1 - J, ie, 1 - J]); for (let pe = 0; pe < a; pe++) b.push(-d + pe * t + p, x - h + m, 0), b.push(-d + (pe + 1) * t + p, x - h + m, 0), b.push(-d + (pe + 1) * t + p, x + m, 0), b.push(-d + pe * t + p, x + m, 0), V.push(k, k + 1, k + 3, k + 1, k + 2, k + 3), k += 4, e === ke.FLIP_TILE || e === ke.ROTATE_TILE || e === ke.FLIP_N_ROTATE_TILE ? R = R.concat(ne[(pe + u) % 2]) : e === ke.FLIP_ROW || e === ke.ROTATE_ROW || e === ke.FLIP_N_ROTATE_ROW ? R = R.concat(ne[u % 2]) : R = R.concat(ne[0]), w.push(1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1), S.push(0, 0, -1, 0, 0, -1, 0, 0, -1, 0, 0, -1); } if (Z) { const ne = []; ie = 1 - l / t, $ = 0, j = 1, J = 1, ne[0] = [ie, $, j, $, j, J, ie, J], ne[1] = [ie, $, j, $, j, J, ie, J], (e === ke.ROTATE_TILE || e === ke.ROTATE_ROW) && (ne[1] = [1 - ie, 1 - $, 1 - j, 1 - $, 1 - j, 1 - J, 1 - ie, 1 - J]), (e === ke.FLIP_TILE || e === ke.FLIP_ROW) && (ne[1] = [1 - ie, $, 1 - j, $, 1 - j, J, 1 - ie, J]), (e === ke.FLIP_N_ROTATE_TILE || e === ke.FLIP_N_ROTATE_ROW) && (ne[1] = [ie, 1 - $, j, 1 - $, j, 1 - J, ie, 1 - J]); for (let pe = 0; pe < u; pe++) b.push(_ + p, -f + pe * i + m, 0), b.push(_ + l + p, -f + pe * i + m, 0), b.push(_ + l + p, -f + (pe + 1) * i + m, 0), b.push(_ + p, -f + (pe + 1) * i + m, 0), V.push(k, k + 1, k + 3, k + 1, k + 2, k + 3), k += 4, e === ke.FLIP_TILE || e === ke.ROTATE_TILE || e === ke.FLIP_N_ROTATE_TILE ? R = R.concat(ne[(pe + 1) % 2]) : e === ke.FLIP_ROW || e === ke.ROTATE_ROW || e === ke.FLIP_N_ROTATE_ROW ? R = R.concat(ne[pe % 2]) : R = R.concat(ne[0]), w.push(1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1), S.push(0, 0, -1, 0, 0, -1, 0, 0, -1, 0, 0, -1); } if (q) { const ne = []; ie = 0, $ = 0, j = l / i, J = 1, ne[0] = [ie, $, j, $, j, J, ie, J], ne[1] = [ie, $, j, $, j, J, ie, J], (e === ke.ROTATE_TILE || e === ke.ROTATE_ROW) && (ne[1] = [1 - ie, 1 - $, 1 - j, 1 - $, 1 - j, 1 - J, 1 - ie, 1 - J]), (e === ke.FLIP_TILE || e === ke.FLIP_ROW) && (ne[1] = [1 - ie, $, 1 - j, $, 1 - j, J, 1 - ie, J]), (e === ke.FLIP_N_ROTATE_TILE || e === ke.FLIP_N_ROTATE_ROW) && (ne[1] = [ie, 1 - $, j, 1 - $, j, 1 - J, ie, 1 - J]); for (let pe = 0; pe < u; pe++) b.push(C - l + p, -f + pe * i + m, 0), b.push(C + p, -f + pe * i + m, 0), b.push(C + p, -f + (pe + 1) * i + m, 0), b.push(C - l + p, -f + (pe + 1) * i + m, 0), V.push(k, k + 1, k + 3, k + 1, k + 2, k + 3), k += 4, e === ke.FLIP_TILE || e === ke.ROTATE_TILE || e === ke.FLIP_N_ROTATE_TILE ? R = R.concat(ne[(pe + a) % 2]) : e === ke.FLIP_ROW || e === ke.ROTATE_ROW || e === ke.FLIP_N_ROTATE_ROW ? R = R.concat(ne[pe % 2]) : R = R.concat(ne[0]), w.push(1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1), S.push(0, 0, -1, 0, 0, -1, 0, 0, -1, 0, 0, -1); } } const L = c.sideOrientation === 0 ? 0 : c.sideOrientation || Ot.DEFAULTSIDE; Ot._ComputeSides(L, b, V, S, R, c.frontUVs, c.backUVs); const B = new Ot(); B.indices = V, B.positions = b, B.normals = S, B.uvs = R; const U = L === Ot.DOUBLESIDE ? w.concat(w) : w; return B.colors = U, B; } function GK(c, e, t = null) { const i = new ke(c, t); return e.sideOrientation = ke._GetDefaultSideOrientation(e.sideOrientation), i._originalBuilderSideOrientation = e.sideOrientation, uO(e).applyToMesh(i, e.updatable), i; } const kde = { // eslint-disable-next-line @typescript-eslint/naming-convention CreateTiledPlane: GK }; Ot.CreateTiledPlane = uO; function KK(c) { const t = c.faceUV || new Array(6), i = c.faceColors, r = c.pattern || ke.NO_FLIP, s = c.width || c.size || 1, n = c.height || c.size || 1, a = c.depth || c.size || 1, l = c.tileWidth || c.tileSize || 1, o = c.tileHeight || c.tileSize || 1, u = c.alignHorizontal || 0, h = c.alignVertical || 0, d = c.sideOrientation === 0 ? 0 : c.sideOrientation || Ot.DEFAULTSIDE; for (let j = 0; j < 6; j++) t[j] === void 0 && (t[j] = new Di(0, 0, 1, 1)), i && i[j] === void 0 && (i[j] = new Et(1, 1, 1, 1)); const f = s / 2, p = n / 2, m = a / 2, _ = []; for (let j = 0; j < 2; j++) _[j] = uO({ pattern: r, tileWidth: l, tileHeight: o, width: s, height: n, alignVertical: h, alignHorizontal: u, sideOrientation: d }); for (let j = 2; j < 4; j++) _[j] = uO({ pattern: r, tileWidth: l, tileHeight: o, width: a, height: n, alignVertical: h, alignHorizontal: u, sideOrientation: d }); let v = h; h === ke.BOTTOM ? v = ke.TOP : h === ke.TOP && (v = ke.BOTTOM); for (let j = 4; j < 6; j++) _[j] = uO({ pattern: r, tileWidth: l, tileHeight: o, width: s, height: a, alignVertical: v, alignHorizontal: u, sideOrientation: d }); let C = [], x = [], b = [], S = []; const M = [], R = [], w = [], V = []; let k = 0, L = 0; for (let j = 0; j < 6; j++) { const J = _[j].positions.length; R[j] = [], w[j] = []; for (let ne = 0; ne < J / 3; ne++) R[j].push(new D(_[j].positions[3 * ne], _[j].positions[3 * ne + 1], _[j].positions[3 * ne + 2])), w[j].push(new D(_[j].normals[3 * ne], _[j].normals[3 * ne + 1], _[j].normals[3 * ne + 2])); k = _[j].uvs.length, V[j] = []; for (let ne = 0; ne < k; ne += 2) V[j][ne] = t[j].x + (t[j].z - t[j].x) * _[j].uvs[ne], V[j][ne + 1] = t[j].y + (t[j].w - t[j].y) * _[j].uvs[ne + 1], hn.UseOpenGLOrientationForUV && (V[j][ne + 1] = 1 - V[j][ne + 1]); if (b = b.concat(V[j]), S = S.concat(_[j].indices.map((ne) => ne + L)), L += R[j].length, i) for (let ne = 0; ne < 4; ne++) M.push(i[j].r, i[j].g, i[j].b, i[j].a); } const B = new D(0, 0, m), U = Ae.RotationY(Math.PI); C = R[0].map((j) => D.TransformNormal(j, U).add(B)).map((j) => [j.x, j.y, j.z]).reduce((j, J) => j.concat(J), []), x = w[0].map((j) => D.TransformNormal(j, U)).map((j) => [j.x, j.y, j.z]).reduce((j, J) => j.concat(J), []), C = C.concat(R[1].map((j) => j.subtract(B)).map((j) => [j.x, j.y, j.z]).reduce((j, J) => j.concat(J), [])), x = x.concat(w[1].map((j) => [j.x, j.y, j.z]).reduce((j, J) => j.concat(J), [])); const K = new D(f, 0, 0), ee = Ae.RotationY(-Math.PI / 2); C = C.concat(R[2].map((j) => D.TransformNormal(j, ee).add(K)).map((j) => [j.x, j.y, j.z]).reduce((j, J) => j.concat(J), [])), x = x.concat(w[2].map((j) => D.TransformNormal(j, ee)).map((j) => [j.x, j.y, j.z]).reduce((j, J) => j.concat(J), [])); const Z = Ae.RotationY(Math.PI / 2); C = C.concat(R[3].map((j) => D.TransformNormal(j, Z).subtract(K)).map((j) => [j.x, j.y, j.z]).reduce((j, J) => j.concat(J), [])), x = x.concat(w[3].map((j) => D.TransformNormal(j, Z)).map((j) => [j.x, j.y, j.z]).reduce((j, J) => j.concat(J), [])); const q = new D(0, p, 0), le = Ae.RotationX(Math.PI / 2); C = C.concat(R[4].map((j) => D.TransformNormal(j, le).add(q)).map((j) => [j.x, j.y, j.z]).reduce((j, J) => j.concat(J), [])), x = x.concat(w[4].map((j) => D.TransformNormal(j, le)).map((j) => [j.x, j.y, j.z]).reduce((j, J) => j.concat(J), [])); const ie = Ae.RotationX(-Math.PI / 2); C = C.concat(R[5].map((j) => D.TransformNormal(j, ie).subtract(q)).map((j) => [j.x, j.y, j.z]).reduce((j, J) => j.concat(J), [])), x = x.concat(w[5].map((j) => D.TransformNormal(j, ie)).map((j) => [j.x, j.y, j.z]).reduce((j, J) => j.concat(J), [])), Ot._ComputeSides(d, C, S, x, b); const $ = new Ot(); if ($.indices = S, $.positions = C, $.normals = x, $.uvs = b, i) { const j = d === Ot.DOUBLESIDE ? M.concat(M) : M; $.colors = j; } return $; } function WK(c, e, t = null) { const i = new ke(c, t); return e.sideOrientation = ke._GetDefaultSideOrientation(e.sideOrientation), i._originalBuilderSideOrientation = e.sideOrientation, KK(e).applyToMesh(i, e.updatable), i; } const zde = { // eslint-disable-next-line @typescript-eslint/naming-convention CreateTiledBox: WK }; Ot.CreateTiledBox = KK; function jK(c) { const e = [], t = [], i = [], r = [], s = c.radius || 2, n = c.tube || 0.5, a = c.radialSegments || 32, l = c.tubularSegments || 32, o = c.p || 2, u = c.q || 3, h = c.sideOrientation === 0 ? 0 : c.sideOrientation || Ot.DEFAULTSIDE, d = (_) => { const v = Math.cos(_), C = Math.sin(_), x = u / o * _, b = Math.cos(x), S = s * (2 + b) * 0.5 * v, M = s * (2 + b) * C * 0.5, R = s * Math.sin(x) * 0.5; return new D(S, M, R); }; let f, p; for (f = 0; f <= a; f++) { const v = f % a / a * 2 * o * Math.PI, C = d(v), x = d(v + 0.01), b = x.subtract(C); let S = x.add(C); const M = D.Cross(b, S); for (S = D.Cross(M, b), M.normalize(), S.normalize(), p = 0; p < l; p++) { const w = p % l / l * 2 * Math.PI, V = -n * Math.cos(w), k = n * Math.sin(w); t.push(C.x + V * S.x + k * M.x), t.push(C.y + V * S.y + k * M.y), t.push(C.z + V * S.z + k * M.z), r.push(f / a), r.push(hn.UseOpenGLOrientationForUV ? 1 - p / l : p / l); } } for (f = 0; f < a; f++) for (p = 0; p < l; p++) { const _ = (p + 1) % l, v = f * l + p, C = (f + 1) * l + p, x = (f + 1) * l + _, b = f * l + _; e.push(b), e.push(C), e.push(v), e.push(b), e.push(x), e.push(C); } Ot.ComputeNormals(t, e, i), Ot._ComputeSides(h, t, e, i, r, c.frontUVs, c.backUVs); const m = new Ot(); return m.indices = e, m.positions = t, m.normals = i, m.uvs = r, m; } function fU(c, e = {}, t) { const i = new ke(c, t); return e.sideOrientation = ke._GetDefaultSideOrientation(e.sideOrientation), i._originalBuilderSideOrientation = e.sideOrientation, jK(e).applyToMesh(i, e.updatable), i; } const Hde = { // eslint-disable-next-line @typescript-eslint/naming-convention CreateTorusKnot: fU }; Ot.CreateTorusKnot = jK; ke.CreateTorusKnot = (c, e, t, i, r, s, n, a, l, o) => fU(c, { radius: e, tube: t, radialSegments: i, tubularSegments: r, p: s, q: n, sideOrientation: o, updatable: l }, a); const kk = { effect: null, subMesh: null }; class Lo extends fl { /** * Instantiate a new shader material. * The ShaderMaterial object has the necessary methods to pass data from your scene to the Vertex and Fragment Shaders and returns a material that can be applied to any mesh. * This returned material effects how the mesh will look based on the code in the shaders. * @see https://doc.babylonjs.com/features/featuresDeepDive/materials/shaders/shaderMaterial * @param name Define the name of the material in the scene * @param scene Define the scene the material belongs to * @param shaderPath Defines the route to the shader code in one of three ways: * * object: \{ vertex: "custom", fragment: "custom" \}, used with Effect.ShadersStore["customVertexShader"] and Effect.ShadersStore["customFragmentShader"] * * object: \{ vertexElement: "vertexShaderCode", fragmentElement: "fragmentShaderCode" \}, used with shader code in script tags * * object: \{ vertexSource: "vertex shader code string", fragmentSource: "fragment shader code string" \} using with strings containing the shaders code * * string: "./COMMON_NAME", used with external files COMMON_NAME.vertex.fx and COMMON_NAME.fragment.fx in index.html folder. * @param options Define the options used to create the shader * @param storeEffectOnSubMeshes true to store effect on submeshes, false to store the effect directly in the material class. */ constructor(e, t, i, r = {}, s = !0) { super(e, t, s), this._textures = {}, this._textureArrays = {}, this._externalTextures = {}, this._floats = {}, this._ints = {}, this._uints = {}, this._floatsArrays = {}, this._colors3 = {}, this._colors3Arrays = {}, this._colors4 = {}, this._colors4Arrays = {}, this._vectors2 = {}, this._vectors3 = {}, this._vectors4 = {}, this._quaternions = {}, this._quaternionsArrays = {}, this._matrices = {}, this._matrixArrays = {}, this._matrices3x3 = {}, this._matrices2x2 = {}, this._vectors2Arrays = {}, this._vectors3Arrays = {}, this._vectors4Arrays = {}, this._uniformBuffers = {}, this._textureSamplers = {}, this._storageBuffers = {}, this._cachedWorldViewMatrix = new Ae(), this._cachedWorldViewProjectionMatrix = new Ae(), this._multiview = !1, this._materialHelperNeedsPreviousMatrices = !1, this._shaderPath = i, this._options = Object.assign({ needAlphaBlending: !1, needAlphaTesting: !1, attributes: ["position", "normal", "uv"], uniforms: ["worldViewProjection"], uniformBuffers: [], samplers: [], externalTextures: [], samplerObjects: [], storageBuffers: [], defines: [], useClipPlane: !1 }, r); } /** * Gets the shader path used to define the shader code * It can be modified to trigger a new compilation */ get shaderPath() { return this._shaderPath; } /** * Sets the shader path used to define the shader code * It can be modified to trigger a new compilation */ set shaderPath(e) { this._shaderPath = e; } /** * Gets the options used to compile the shader. * They can be modified to trigger a new compilation */ get options() { return this._options; } /** * is multiview set to true? */ get isMultiview() { return this._multiview; } /** * Gets the current class name of the material e.g. "ShaderMaterial" * Mainly use in serialization. * @returns the class name */ getClassName() { return "ShaderMaterial"; } /** * Specifies if the material will require alpha blending * @returns a boolean specifying if alpha blending is needed */ needAlphaBlending() { return this.alpha < 1 || this._options.needAlphaBlending; } /** * Specifies if this material should be rendered in alpha test mode * @returns a boolean specifying if an alpha test is needed. */ needAlphaTesting() { return this._options.needAlphaTesting; } _checkUniform(e) { this._options.uniforms.indexOf(e) === -1 && this._options.uniforms.push(e); } /** * Set a texture in the shader. * @param name Define the name of the uniform samplers as defined in the shader * @param texture Define the texture to bind to this sampler * @returns the material itself allowing "fluent" like uniform updates */ setTexture(e, t) { return this._options.samplers.indexOf(e) === -1 && this._options.samplers.push(e), this._textures[e] = t, this; } /** * Set a texture array in the shader. * @param name Define the name of the uniform sampler array as defined in the shader * @param textures Define the list of textures to bind to this sampler * @returns the material itself allowing "fluent" like uniform updates */ setTextureArray(e, t) { return this._options.samplers.indexOf(e) === -1 && this._options.samplers.push(e), this._checkUniform(e), this._textureArrays[e] = t, this; } /** * Set an internal texture in the shader. * @param name Define the name of the uniform samplers as defined in the shader * @param texture Define the texture to bind to this sampler * @returns the material itself allowing "fluent" like uniform updates */ setExternalTexture(e, t) { return this._options.externalTextures.indexOf(e) === -1 && this._options.externalTextures.push(e), this._externalTextures[e] = t, this; } /** * Set a float in the shader. * @param name Define the name of the uniform as defined in the shader * @param value Define the value to give to the uniform * @returns the material itself allowing "fluent" like uniform updates */ setFloat(e, t) { return this._checkUniform(e), this._floats[e] = t, this; } /** * Set a int in the shader. * @param name Define the name of the uniform as defined in the shader * @param value Define the value to give to the uniform * @returns the material itself allowing "fluent" like uniform updates */ setInt(e, t) { return this._checkUniform(e), this._ints[e] = t, this; } /** * Set a unsigned int in the shader. * @param name Define the name of the uniform as defined in the shader * @param value Define the value to give to the uniform * @returns the material itself allowing "fluent" like uniform updates */ setUInt(e, t) { return this._checkUniform(e), this._uints[e] = t, this; } /** * Set an array of floats in the shader. * @param name Define the name of the uniform as defined in the shader * @param value Define the value to give to the uniform * @returns the material itself allowing "fluent" like uniform updates */ setFloats(e, t) { return this._checkUniform(e), this._floatsArrays[e] = t, this; } /** * Set a vec3 in the shader from a Color3. * @param name Define the name of the uniform as defined in the shader * @param value Define the value to give to the uniform * @returns the material itself allowing "fluent" like uniform updates */ setColor3(e, t) { return this._checkUniform(e), this._colors3[e] = t, this; } /** * Set a vec3 array in the shader from a Color3 array. * @param name Define the name of the uniform as defined in the shader * @param value Define the value to give to the uniform * @returns the material itself allowing "fluent" like uniform updates */ setColor3Array(e, t) { return this._checkUniform(e), this._colors3Arrays[e] = t.reduce((i, r) => (r.toArray(i, i.length), i), []), this; } /** * Set a vec4 in the shader from a Color4. * @param name Define the name of the uniform as defined in the shader * @param value Define the value to give to the uniform * @returns the material itself allowing "fluent" like uniform updates */ setColor4(e, t) { return this._checkUniform(e), this._colors4[e] = t, this; } /** * Set a vec4 array in the shader from a Color4 array. * @param name Define the name of the uniform as defined in the shader * @param value Define the value to give to the uniform * @returns the material itself allowing "fluent" like uniform updates */ setColor4Array(e, t) { return this._checkUniform(e), this._colors4Arrays[e] = t.reduce((i, r) => (r.toArray(i, i.length), i), []), this; } /** * Set a vec2 in the shader from a Vector2. * @param name Define the name of the uniform as defined in the shader * @param value Define the value to give to the uniform * @returns the material itself allowing "fluent" like uniform updates */ setVector2(e, t) { return this._checkUniform(e), this._vectors2[e] = t, this; } /** * Set a vec3 in the shader from a Vector3. * @param name Define the name of the uniform as defined in the shader * @param value Define the value to give to the uniform * @returns the material itself allowing "fluent" like uniform updates */ setVector3(e, t) { return this._checkUniform(e), this._vectors3[e] = t, this; } /** * Set a vec4 in the shader from a Vector4. * @param name Define the name of the uniform as defined in the shader * @param value Define the value to give to the uniform * @returns the material itself allowing "fluent" like uniform updates */ setVector4(e, t) { return this._checkUniform(e), this._vectors4[e] = t, this; } /** * Set a vec4 in the shader from a Quaternion. * @param name Define the name of the uniform as defined in the shader * @param value Define the value to give to the uniform * @returns the material itself allowing "fluent" like uniform updates */ setQuaternion(e, t) { return this._checkUniform(e), this._quaternions[e] = t, this; } /** * Set a vec4 array in the shader from a Quaternion array. * @param name Define the name of the uniform as defined in the shader * @param value Define the value to give to the uniform * @returns the material itself allowing "fluent" like uniform updates */ setQuaternionArray(e, t) { return this._checkUniform(e), this._quaternionsArrays[e] = t.reduce((i, r) => (r.toArray(i, i.length), i), []), this; } /** * Set a mat4 in the shader from a Matrix. * @param name Define the name of the uniform as defined in the shader * @param value Define the value to give to the uniform * @returns the material itself allowing "fluent" like uniform updates */ setMatrix(e, t) { return this._checkUniform(e), this._matrices[e] = t, this; } /** * Set a float32Array in the shader from a matrix array. * @param name Define the name of the uniform as defined in the shader * @param value Define the value to give to the uniform * @returns the material itself allowing "fluent" like uniform updates */ setMatrices(e, t) { this._checkUniform(e); const i = new Float32Array(t.length * 16); for (let r = 0; r < t.length; r++) t[r].copyToArray(i, r * 16); return this._matrixArrays[e] = i, this; } /** * Set a mat3 in the shader from a Float32Array. * @param name Define the name of the uniform as defined in the shader * @param value Define the value to give to the uniform * @returns the material itself allowing "fluent" like uniform updates */ setMatrix3x3(e, t) { return this._checkUniform(e), this._matrices3x3[e] = t, this; } /** * Set a mat2 in the shader from a Float32Array. * @param name Define the name of the uniform as defined in the shader * @param value Define the value to give to the uniform * @returns the material itself allowing "fluent" like uniform updates */ setMatrix2x2(e, t) { return this._checkUniform(e), this._matrices2x2[e] = t, this; } /** * Set a vec2 array in the shader from a number array. * @param name Define the name of the uniform as defined in the shader * @param value Define the value to give to the uniform * @returns the material itself allowing "fluent" like uniform updates */ setArray2(e, t) { return this._checkUniform(e), this._vectors2Arrays[e] = t, this; } /** * Set a vec3 array in the shader from a number array. * @param name Define the name of the uniform as defined in the shader * @param value Define the value to give to the uniform * @returns the material itself allowing "fluent" like uniform updates */ setArray3(e, t) { return this._checkUniform(e), this._vectors3Arrays[e] = t, this; } /** * Set a vec4 array in the shader from a number array. * @param name Define the name of the uniform as defined in the shader * @param value Define the value to give to the uniform * @returns the material itself allowing "fluent" like uniform updates */ setArray4(e, t) { return this._checkUniform(e), this._vectors4Arrays[e] = t, this; } /** * Set a uniform buffer in the shader * @param name Define the name of the uniform as defined in the shader * @param buffer Define the value to give to the uniform * @returns the material itself allowing "fluent" like uniform updates */ setUniformBuffer(e, t) { return this._options.uniformBuffers.indexOf(e) === -1 && this._options.uniformBuffers.push(e), this._uniformBuffers[e] = t, this; } /** * Set a texture sampler in the shader * @param name Define the name of the uniform as defined in the shader * @param sampler Define the value to give to the uniform * @returns the material itself allowing "fluent" like uniform updates */ setTextureSampler(e, t) { return this._options.samplerObjects.indexOf(e) === -1 && this._options.samplerObjects.push(e), this._textureSamplers[e] = t, this; } /** * Set a storage buffer in the shader * @param name Define the name of the storage buffer as defined in the shader * @param buffer Define the value to give to the uniform * @returns the material itself allowing "fluent" like uniform updates */ setStorageBuffer(e, t) { return this._options.storageBuffers.indexOf(e) === -1 && this._options.storageBuffers.push(e), this._storageBuffers[e] = t, this; } /** * Adds, removes, or replaces the specified shader define and value. * * setDefine("MY_DEFINE", true); // enables a boolean define * * setDefine("MY_DEFINE", "0.5"); // adds "#define MY_DEFINE 0.5" to the shader (or sets and replaces the value of any existing define with that name) * * setDefine("MY_DEFINE", false); // disables and removes the define * Note if the active defines do change, the shader will be recompiled and this can be expensive. * @param define the define name e.g., "OUTPUT_TO_SRGB" or "#define OUTPUT_TO_SRGB". If the define was passed into the constructor already, the version used should match that, and in either case, it should not include any appended value. * @param value either the value of the define (e.g. a numerical value) or for booleans, true if the define should be enabled or false if it should be disabled * @returns the material itself allowing "fluent" like uniform updates */ setDefine(e, t) { const i = e.trimEnd() + " ", r = this.options.defines.findIndex((s) => s === e || s.startsWith(i)); return r >= 0 && this.options.defines.splice(r, 1), (typeof t != "boolean" || t) && this.options.defines.push(i + t), this; } /** * Specifies that the submesh is ready to be used * @param mesh defines the mesh to check * @param subMesh defines which submesh to check * @param useInstances specifies that instances should be used * @returns a boolean indicating that the submesh is ready or not */ isReadyForSubMesh(e, t, i) { return this.isReady(e, i, t); } /** * Checks if the material is ready to render the requested mesh * @param mesh Define the mesh to render * @param useInstances Define whether or not the material is used with instances * @param subMesh defines which submesh to render * @returns true if ready, otherwise false */ isReady(e, t, i) { var r, s, n, a; const l = i && this._storeEffectOnSubMeshes; if (this.isFrozen) if (l) { if (i.effect && i.effect._wasPreviouslyReady) return !0; } else { const V = this._drawWrapper.effect; if (V && V._wasPreviouslyReady && V._wasPreviouslyUsingInstances === t) return !0; } const o = this.getScene(), u = o.getEngine(), h = [], d = [], f = new pl(); let p = this._shaderPath, m = this._options.uniforms, _ = this._options.uniformBuffers, v = this._options.samplers; u.getCaps().multiview && o.activeCamera && o.activeCamera.outputRenderTarget && o.activeCamera.outputRenderTarget.getViewCount() > 1 && (this._multiview = !0, h.push("#define MULTIVIEW"), this._options.uniforms.indexOf("viewProjection") !== -1 && this._options.uniforms.indexOf("viewProjectionR") === -1 && this._options.uniforms.push("viewProjectionR")); for (let V = 0; V < this._options.defines.length; V++) { const k = this._options.defines[V].indexOf("#define") === 0 ? this._options.defines[V] : `#define ${this._options.defines[V]}`; h.push(k); } for (let V = 0; V < this._options.attributes.length; V++) d.push(this._options.attributes[V]); if (e && e.isVerticesDataPresent(Y.ColorKind) && (d.indexOf(Y.ColorKind) === -1 && d.push(Y.ColorKind), h.push("#define VERTEXCOLOR")), t && (h.push("#define INSTANCES"), Ke.PushAttributesForInstances(d, this._materialHelperNeedsPreviousMatrices), e != null && e.hasThinInstances && (h.push("#define THIN_INSTANCES"), e && e.isVerticesDataPresent(Y.ColorInstanceKind) && (d.push(Y.ColorInstanceKind), h.push("#define INSTANCESCOLOR")))), e && e.useBones && e.computeBonesUsingShaders && e.skeleton) { d.push(Y.MatricesIndicesKind), d.push(Y.MatricesWeightsKind), e.numBoneInfluencers > 4 && (d.push(Y.MatricesIndicesExtraKind), d.push(Y.MatricesWeightsExtraKind)); const V = e.skeleton; h.push("#define NUM_BONE_INFLUENCERS " + e.numBoneInfluencers), f.addCPUSkinningFallback(0, e), V.isUsingTextureForMatrices ? (h.push("#define BONETEXTURE"), this._options.uniforms.indexOf("boneTextureWidth") === -1 && this._options.uniforms.push("boneTextureWidth"), this._options.samplers.indexOf("boneSampler") === -1 && this._options.samplers.push("boneSampler")) : (h.push("#define BonesPerMesh " + (V.bones.length + 1)), this._options.uniforms.indexOf("mBones") === -1 && this._options.uniforms.push("mBones")); } else h.push("#define NUM_BONE_INFLUENCERS 0"); let C = 0; const x = e ? e.morphTargetManager : null; if (x) { const V = x.supportsUVs && h.indexOf("#define UV1") !== -1, k = x.supportsTangents && h.indexOf("#define TANGENT") !== -1, L = x.supportsNormals && h.indexOf("#define NORMAL") !== -1; C = x.numInfluencers, V && h.push("#define MORPHTARGETS_UV"), k && h.push("#define MORPHTARGETS_TANGENT"), L && h.push("#define MORPHTARGETS_NORMAL"), C > 0 && h.push("#define MORPHTARGETS"), x.isUsingTextureForTargets && (h.push("#define MORPHTARGETS_TEXTURE"), this._options.uniforms.indexOf("morphTargetTextureIndices") === -1 && this._options.uniforms.push("morphTargetTextureIndices"), this._options.samplers.indexOf("morphTargets") === -1 && this._options.samplers.push("morphTargets")), h.push("#define NUM_MORPH_INFLUENCERS " + C); for (let B = 0; B < C; B++) d.push(Y.PositionKind + B), L && d.push(Y.NormalKind + B), k && d.push(Y.TangentKind + B), V && d.push(Y.UVKind + "_" + B); C > 0 && (m = m.slice(), m.push("morphTargetInfluences"), m.push("morphTargetTextureInfo"), m.push("morphTargetTextureIndices")); } else h.push("#define NUM_MORPH_INFLUENCERS 0"); if (e) { const V = e.bakedVertexAnimationManager; V && V.isEnabled && (h.push("#define BAKED_VERTEX_ANIMATION_TEXTURE"), this._options.uniforms.indexOf("bakedVertexAnimationSettings") === -1 && this._options.uniforms.push("bakedVertexAnimationSettings"), this._options.uniforms.indexOf("bakedVertexAnimationTextureSizeInverted") === -1 && this._options.uniforms.push("bakedVertexAnimationTextureSizeInverted"), this._options.uniforms.indexOf("bakedVertexAnimationTime") === -1 && this._options.uniforms.push("bakedVertexAnimationTime"), this._options.samplers.indexOf("bakedVertexAnimationTexture") === -1 && this._options.samplers.push("bakedVertexAnimationTexture")), Ke.PrepareAttributesForBakedVertexAnimation(d, e, h); } for (const V in this._textures) if (!this._textures[V].isReady()) return !1; e && this._shouldTurnAlphaTestOn(e) && h.push("#define ALPHATEST"), this._options.useClipPlane !== !1 && (Gc(m), bT(this, o, h)), this._useLogarithmicDepth && (h.push("#define LOGARITHMICDEPTH"), this._options.uniforms.indexOf("logarithmicDepthConstant") === -1 && this._options.uniforms.push("logarithmicDepthConstant")), this.customShaderNameResolve && (m = m.slice(), _ = _.slice(), v = v.slice(), p = this.customShaderNameResolve(p, m, _, v, h, d)); const b = l ? i._getDrawWrapper() : this._drawWrapper, S = (r = b == null ? void 0 : b.effect) !== null && r !== void 0 ? r : null, M = (s = b == null ? void 0 : b.defines) !== null && s !== void 0 ? s : null, R = h.join(` `); let w = S; return M !== R && (w = u.createEffect(p, { attributes: d, uniformsNames: m, uniformBuffersNames: _, samplers: v, defines: R, fallbacks: f, onCompiled: this.onCompiled, onError: this.onError, indexParameters: { maxSimultaneousMorphTargets: C }, shaderLanguage: this._options.shaderLanguage }, u), l ? i.setEffect(w, R, this._materialContext) : b && b.setEffect(w, R), this._onEffectCreatedObservable && (kk.effect = w, kk.subMesh = (n = i ?? (e == null ? void 0 : e.subMeshes[0])) !== null && n !== void 0 ? n : null, this._onEffectCreatedObservable.notifyObservers(kk))), w._wasPreviouslyUsingInstances = !!t, !((a = !(w != null && w.isReady())) !== null && a !== void 0) || a ? !1 : (S !== w && o.resetCachedMaterial(), w._wasPreviouslyReady = !0, !0); } /** * Binds the world matrix to the material * @param world defines the world transformation matrix * @param effectOverride - If provided, use this effect instead of internal effect */ bindOnlyWorldMatrix(e, t) { const i = this.getScene(), r = t ?? this.getEffect(); r && (this._options.uniforms.indexOf("world") !== -1 && r.setMatrix("world", e), this._options.uniforms.indexOf("worldView") !== -1 && (e.multiplyToRef(i.getViewMatrix(), this._cachedWorldViewMatrix), r.setMatrix("worldView", this._cachedWorldViewMatrix)), this._options.uniforms.indexOf("worldViewProjection") !== -1 && (e.multiplyToRef(i.getTransformMatrix(), this._cachedWorldViewProjectionMatrix), r.setMatrix("worldViewProjection", this._cachedWorldViewProjectionMatrix))); } /** * Binds the submesh to this material by preparing the effect and shader to draw * @param world defines the world transformation matrix * @param mesh defines the mesh containing the submesh * @param subMesh defines the submesh to bind the material to */ bindForSubMesh(e, t, i) { var r; this.bind(e, t, (r = i._drawWrapperOverride) === null || r === void 0 ? void 0 : r.effect, i); } /** * Binds the material to the mesh * @param world defines the world transformation matrix * @param mesh defines the mesh to bind the material to * @param effectOverride - If provided, use this effect instead of internal effect * @param subMesh defines the submesh to bind the material to */ bind(e, t, i, r) { var s; const n = r && this._storeEffectOnSubMeshes, a = i ?? (n ? r.effect : this.getEffect()); if (!a) return; const l = this.getScene(); this._activeEffect = a, this.bindOnlyWorldMatrix(e, i); const o = this._options.uniformBuffers; let u = !1; if (a && o && o.length > 0 && l.getEngine().supportsUniformBuffers) for (let d = 0; d < o.length; ++d) switch (o[d]) { case "Mesh": t && (t.getMeshUniformBuffer().bindToEffect(a, "Mesh"), t.transferToEffect(e)); break; case "Scene": Ke.BindSceneUniformBuffer(a, l.getSceneUniformBuffer()), l.finalizeSceneUbo(), u = !0; break; } const h = t && n ? this._mustRebind(l, a, t.visibility) : l.getCachedMaterial() !== this; if (a && h) { !u && this._options.uniforms.indexOf("view") !== -1 && a.setMatrix("view", l.getViewMatrix()), !u && this._options.uniforms.indexOf("projection") !== -1 && a.setMatrix("projection", l.getProjectionMatrix()), !u && this._options.uniforms.indexOf("viewProjection") !== -1 && (a.setMatrix("viewProjection", l.getTransformMatrix()), this._multiview && a.setMatrix("viewProjectionR", l._transformMatrixR)), l.activeCamera && this._options.uniforms.indexOf("cameraPosition") !== -1 && a.setVector3("cameraPosition", l.activeCamera.globalPosition), Ke.BindBonesParameters(t, a), Ec(a, this, l), this._useLogarithmicDepth && Ke.BindLogDepth(n ? r.materialDefines : a.defines, a, l); let d; for (d in this._textures) a.setTexture(d, this._textures[d]); for (d in this._textureArrays) a.setTextureArray(d, this._textureArrays[d]); for (d in this._externalTextures) a.setExternalTexture(d, this._externalTextures[d]); for (d in this._ints) a.setInt(d, this._ints[d]); for (d in this._uints) a.setUInt(d, this._uints[d]); for (d in this._floats) a.setFloat(d, this._floats[d]); for (d in this._floatsArrays) a.setArray(d, this._floatsArrays[d]); for (d in this._colors3) a.setColor3(d, this._colors3[d]); for (d in this._colors3Arrays) a.setArray3(d, this._colors3Arrays[d]); for (d in this._colors4) { const f = this._colors4[d]; a.setFloat4(d, f.r, f.g, f.b, f.a); } for (d in this._colors4Arrays) a.setArray4(d, this._colors4Arrays[d]); for (d in this._vectors2) a.setVector2(d, this._vectors2[d]); for (d in this._vectors3) a.setVector3(d, this._vectors3[d]); for (d in this._vectors4) a.setVector4(d, this._vectors4[d]); for (d in this._quaternions) a.setQuaternion(d, this._quaternions[d]); for (d in this._matrices) a.setMatrix(d, this._matrices[d]); for (d in this._matrixArrays) a.setMatrices(d, this._matrixArrays[d]); for (d in this._matrices3x3) a.setMatrix3x3(d, this._matrices3x3[d]); for (d in this._matrices2x2) a.setMatrix2x2(d, this._matrices2x2[d]); for (d in this._vectors2Arrays) a.setArray2(d, this._vectors2Arrays[d]); for (d in this._vectors3Arrays) a.setArray3(d, this._vectors3Arrays[d]); for (d in this._vectors4Arrays) a.setArray4(d, this._vectors4Arrays[d]); for (d in this._quaternionsArrays) a.setArray4(d, this._quaternionsArrays[d]); for (d in this._uniformBuffers) { const f = this._uniformBuffers[d].getBuffer(); f && a.bindUniformBuffer(f, d); } for (d in this._textureSamplers) a.setTextureSampler(d, this._textureSamplers[d]); for (d in this._storageBuffers) a.setStorageBuffer(d, this._storageBuffers[d]); } if (a && t && (h || !this.isFrozen)) { const d = t.morphTargetManager; d && d.numInfluencers > 0 && Ke.BindMorphTargetParameters(t, a); const f = t.bakedVertexAnimationManager; f && f.isEnabled && ((s = t.bakedVertexAnimationManager) === null || s === void 0 || s.bind(a, !!a._wasPreviouslyUsingInstances)); } this._afterBind(t, a); } /** * Gets the active textures from the material * @returns an array of textures */ getActiveTextures() { const e = super.getActiveTextures(); for (const t in this._textures) e.push(this._textures[t]); for (const t in this._textureArrays) { const i = this._textureArrays[t]; for (let r = 0; r < i.length; r++) e.push(i[r]); } return e; } /** * Specifies if the material uses a texture * @param texture defines the texture to check against the material * @returns a boolean specifying if the material uses the texture */ hasTexture(e) { if (super.hasTexture(e)) return !0; for (const t in this._textures) if (this._textures[t] === e) return !0; for (const t in this._textureArrays) { const i = this._textureArrays[t]; for (let r = 0; r < i.length; r++) if (i[r] === e) return !0; } return !1; } /** * Makes a duplicate of the material, and gives it a new name * @param name defines the new name for the duplicated material * @returns the cloned material */ clone(e) { const t = St.Clone(() => new Lo(e, this.getScene(), this._shaderPath, this._options, this._storeEffectOnSubMeshes), this); t.name = e, t.id = e, typeof t._shaderPath == "object" && (t._shaderPath = Object.assign({}, t._shaderPath)), this._options = Object.assign({}, this._options), Object.keys(this._options).forEach((i) => { const r = this._options[i]; Array.isArray(r) && (this._options[i] = r.slice(0)); }), this.stencil.copyTo(t.stencil); for (const i in this._textures) t.setTexture(i, this._textures[i]); for (const i in this._textureArrays) t.setTextureArray(i, this._textureArrays[i]); for (const i in this._externalTextures) t.setExternalTexture(i, this._externalTextures[i]); for (const i in this._ints) t.setInt(i, this._ints[i]); for (const i in this._uints) t.setUInt(i, this._uints[i]); for (const i in this._floats) t.setFloat(i, this._floats[i]); for (const i in this._floatsArrays) t.setFloats(i, this._floatsArrays[i]); for (const i in this._colors3) t.setColor3(i, this._colors3[i]); for (const i in this._colors3Arrays) t._colors3Arrays[i] = this._colors3Arrays[i]; for (const i in this._colors4) t.setColor4(i, this._colors4[i]); for (const i in this._colors4Arrays) t._colors4Arrays[i] = this._colors4Arrays[i]; for (const i in this._vectors2) t.setVector2(i, this._vectors2[i]); for (const i in this._vectors3) t.setVector3(i, this._vectors3[i]); for (const i in this._vectors4) t.setVector4(i, this._vectors4[i]); for (const i in this._quaternions) t.setQuaternion(i, this._quaternions[i]); for (const i in this._quaternionsArrays) t._quaternionsArrays[i] = this._quaternionsArrays[i]; for (const i in this._matrices) t.setMatrix(i, this._matrices[i]); for (const i in this._matrixArrays) t._matrixArrays[i] = this._matrixArrays[i].slice(); for (const i in this._matrices3x3) t.setMatrix3x3(i, this._matrices3x3[i]); for (const i in this._matrices2x2) t.setMatrix2x2(i, this._matrices2x2[i]); for (const i in this._vectors2Arrays) t.setArray2(i, this._vectors2Arrays[i]); for (const i in this._vectors3Arrays) t.setArray3(i, this._vectors3Arrays[i]); for (const i in this._vectors4Arrays) t.setArray4(i, this._vectors4Arrays[i]); for (const i in this._uniformBuffers) t.setUniformBuffer(i, this._uniformBuffers[i]); for (const i in this._textureSamplers) t.setTextureSampler(i, this._textureSamplers[i]); for (const i in this._storageBuffers) t.setStorageBuffer(i, this._storageBuffers[i]); return t; } /** * Disposes the material * @param forceDisposeEffect specifies if effects should be forcefully disposed * @param forceDisposeTextures specifies if textures should be forcefully disposed * @param notBoundToMesh specifies if the material that is being disposed is known to be not bound to any mesh */ dispose(e, t, i) { if (t) { let r; for (r in this._textures) this._textures[r].dispose(); for (r in this._textureArrays) { const s = this._textureArrays[r]; for (let n = 0; n < s.length; n++) s[n].dispose(); } } this._textures = {}, super.dispose(e, t, i); } /** * Serializes this material in a JSON representation * @returns the serialized material object */ serialize() { const e = St.Serialize(this); e.customType = "BABYLON.ShaderMaterial", e.uniqueId = this.uniqueId, e.options = this._options, e.shaderPath = this._shaderPath, e.storeEffectOnSubMeshes = this._storeEffectOnSubMeshes; let t; e.stencil = this.stencil.serialize(), e.textures = {}; for (t in this._textures) e.textures[t] = this._textures[t].serialize(); e.textureArrays = {}; for (t in this._textureArrays) { e.textureArrays[t] = []; const i = this._textureArrays[t]; for (let r = 0; r < i.length; r++) e.textureArrays[t].push(i[r].serialize()); } e.ints = {}; for (t in this._ints) e.ints[t] = this._ints[t]; e.uints = {}; for (t in this._uints) e.uints[t] = this._uints[t]; e.floats = {}; for (t in this._floats) e.floats[t] = this._floats[t]; e.FloatArrays = {}; for (t in this._floatsArrays) e.FloatArrays[t] = this._floatsArrays[t]; e.colors3 = {}; for (t in this._colors3) e.colors3[t] = this._colors3[t].asArray(); e.colors3Arrays = {}; for (t in this._colors3Arrays) e.colors3Arrays[t] = this._colors3Arrays[t]; e.colors4 = {}; for (t in this._colors4) e.colors4[t] = this._colors4[t].asArray(); e.colors4Arrays = {}; for (t in this._colors4Arrays) e.colors4Arrays[t] = this._colors4Arrays[t]; e.vectors2 = {}; for (t in this._vectors2) e.vectors2[t] = this._vectors2[t].asArray(); e.vectors3 = {}; for (t in this._vectors3) e.vectors3[t] = this._vectors3[t].asArray(); e.vectors4 = {}; for (t in this._vectors4) e.vectors4[t] = this._vectors4[t].asArray(); e.quaternions = {}; for (t in this._quaternions) e.quaternions[t] = this._quaternions[t].asArray(); e.matrices = {}; for (t in this._matrices) e.matrices[t] = this._matrices[t].asArray(); e.matrixArray = {}; for (t in this._matrixArrays) e.matrixArray[t] = this._matrixArrays[t]; e.matrices3x3 = {}; for (t in this._matrices3x3) e.matrices3x3[t] = this._matrices3x3[t]; e.matrices2x2 = {}; for (t in this._matrices2x2) e.matrices2x2[t] = this._matrices2x2[t]; e.vectors2Arrays = {}; for (t in this._vectors2Arrays) e.vectors2Arrays[t] = this._vectors2Arrays[t]; e.vectors3Arrays = {}; for (t in this._vectors3Arrays) e.vectors3Arrays[t] = this._vectors3Arrays[t]; e.vectors4Arrays = {}; for (t in this._vectors4Arrays) e.vectors4Arrays[t] = this._vectors4Arrays[t]; e.quaternionsArrays = {}; for (t in this._quaternionsArrays) e.quaternionsArrays[t] = this._quaternionsArrays[t]; return e; } /** * Creates a shader material from parsed shader material data * @param source defines the JSON representation of the material * @param scene defines the hosting scene * @param rootUrl defines the root URL to use to load textures and relative dependencies * @returns a new material */ static Parse(e, t, i) { const r = St.Parse(() => new Lo(e.name, t, e.shaderPath, e.options, e.storeEffectOnSubMeshes), e, t, i); let s; e.stencil && r.stencil.parse(e.stencil, t, i); for (s in e.textures) r.setTexture(s, De.Parse(e.textures[s], t, i)); for (s in e.textureArrays) { const n = e.textureArrays[s], a = []; for (let l = 0; l < n.length; l++) a.push(De.Parse(n[l], t, i)); r.setTextureArray(s, a); } for (s in e.ints) r.setInt(s, e.ints[s]); for (s in e.uints) r.setUInt(s, e.uints[s]); for (s in e.floats) r.setFloat(s, e.floats[s]); for (s in e.floatsArrays) r.setFloats(s, e.floatsArrays[s]); for (s in e.colors3) r.setColor3(s, ze.FromArray(e.colors3[s])); for (s in e.colors3Arrays) { const n = e.colors3Arrays[s].reduce((a, l, o) => (o % 3 === 0 ? a.push([l]) : a[a.length - 1].push(l), a), []).map((a) => ze.FromArray(a)); r.setColor3Array(s, n); } for (s in e.colors4) r.setColor4(s, Et.FromArray(e.colors4[s])); for (s in e.colors4Arrays) { const n = e.colors4Arrays[s].reduce((a, l, o) => (o % 4 === 0 ? a.push([l]) : a[a.length - 1].push(l), a), []).map((a) => Et.FromArray(a)); r.setColor4Array(s, n); } for (s in e.vectors2) r.setVector2(s, at.FromArray(e.vectors2[s])); for (s in e.vectors3) r.setVector3(s, D.FromArray(e.vectors3[s])); for (s in e.vectors4) r.setVector4(s, Di.FromArray(e.vectors4[s])); for (s in e.quaternions) r.setQuaternion(s, Ze.FromArray(e.quaternions[s])); for (s in e.matrices) r.setMatrix(s, Ae.FromArray(e.matrices[s])); for (s in e.matrixArray) r._matrixArrays[s] = new Float32Array(e.matrixArray[s]); for (s in e.matrices3x3) r.setMatrix3x3(s, e.matrices3x3[s]); for (s in e.matrices2x2) r.setMatrix2x2(s, e.matrices2x2[s]); for (s in e.vectors2Arrays) r.setArray2(s, e.vectors2Arrays[s]); for (s in e.vectors3Arrays) r.setArray3(s, e.vectors3Arrays[s]); for (s in e.vectors4Arrays) r.setArray4(s, e.vectors4Arrays[s]); for (s in e.quaternionsArrays) r.setArray4(s, e.quaternionsArrays[s]); return r; } /** * Creates a new ShaderMaterial from a snippet saved in a remote file * @param name defines the name of the ShaderMaterial to create (can be null or empty to use the one from the json data) * @param url defines the url to load from * @param scene defines the hosting scene * @param rootUrl defines the root URL to use to load textures and relative dependencies * @returns a promise that will resolve to the new ShaderMaterial */ static ParseFromFileAsync(e, t, i, r = "") { return new Promise((s, n) => { const a = new go(); a.addEventListener("readystatechange", () => { if (a.readyState == 4) if (a.status == 200) { const l = JSON.parse(a.responseText), o = this.Parse(l, i || gi.LastCreatedScene, r); e && (o.name = e), s(o); } else n("Unable to load the ShaderMaterial"); }), a.open("GET", t), a.send(); }); } /** * Creates a ShaderMaterial from a snippet saved by the Inspector * @param snippetId defines the snippet to load * @param scene defines the hosting scene * @param rootUrl defines the root URL to use to load textures and relative dependencies * @returns a promise that will resolve to the new ShaderMaterial */ static ParseFromSnippetAsync(e, t, i = "") { return new Promise((r, s) => { const n = new go(); n.addEventListener("readystatechange", () => { if (n.readyState == 4) if (n.status == 200) { const a = JSON.parse(JSON.parse(n.responseText).jsonPayload), l = JSON.parse(a.shaderMaterial), o = this.Parse(l, t || gi.LastCreatedScene, i); o.snippetId = e, r(o); } else s("Unable to load the snippet " + e); }), n.open("GET", this.SnippetUrl + "/" + e.replace(/#/g, "/")), n.send(); }); } } Lo.SnippetUrl = "https://snippet.babylonjs.com"; Lo.CreateFromSnippetAsync = Lo.ParseFromSnippetAsync; Be("BABYLON.ShaderMaterial", Lo); const Gde = "colorPixelShader", Kde = `#if defined(VERTEXCOLOR) || defined(INSTANCESCOLOR) && defined(INSTANCES) #define VERTEXCOLOR varying vec4 vColor; #else uniform vec4 color; #endif #include #define CUSTOM_FRAGMENT_DEFINITIONS void main(void) { #define CUSTOM_FRAGMENT_MAIN_BEGIN #include #if defined(VERTEXCOLOR) || defined(INSTANCESCOLOR) && defined(INSTANCES) gl_FragColor=vColor; #else gl_FragColor=color; #endif #define CUSTOM_FRAGMENT_MAIN_END }`; je.ShadersStore[Gde] = Kde; const Wde = "colorVertexShader", jde = `attribute vec3 position; #ifdef VERTEXCOLOR attribute vec4 color; #endif #include #include #include #include uniform mat4 viewProjection; #ifdef MULTIVIEW uniform mat4 viewProjectionR; #endif #if defined(VERTEXCOLOR) || defined(INSTANCESCOLOR) && defined(INSTANCES) varying vec4 vColor; #endif #define CUSTOM_VERTEX_DEFINITIONS void main(void) { #define CUSTOM_VERTEX_MAIN_BEGIN #include #include #include vec4 worldPos=finalWorld*vec4(position,1.0); #ifdef MULTIVIEW if (gl_ViewID_OVR==0u) {gl_Position=viewProjection*worldPos;} else {gl_Position=viewProjectionR*worldPos;} #else gl_Position=viewProjection*worldPos; #endif #include #include #define CUSTOM_VERTEX_MAIN_END }`; je.ShadersStore[Wde] = jde; ke._LinesMeshParser = (c, e) => Ag.Parse(c, e); class Ag extends ke { _isShaderMaterial(e) { return e.getClassName() === "ShaderMaterial"; } /** * Creates a new LinesMesh * @param name defines the name * @param scene defines the hosting scene * @param parent defines the parent mesh if any * @param source defines the optional source LinesMesh used to clone data from * @param doNotCloneChildren When cloning, skip cloning child meshes of source, default False. * When false, achieved by calling a clone(), also passing False. * This will make creation of children, recursive. * @param useVertexColor defines if this LinesMesh supports vertex color * @param useVertexAlpha defines if this LinesMesh supports vertex alpha * @param material material to use to draw the line. If not provided, will create a new one */ constructor(e, t = null, i = null, r = null, s, n, a, l) { super(e, t, i, r, s), this.useVertexColor = n, this.useVertexAlpha = a, this.color = new ze(1, 1, 1), this.alpha = 1, r && (this.color = r.color.clone(), this.alpha = r.alpha, this.useVertexColor = r.useVertexColor, this.useVertexAlpha = r.useVertexAlpha), this.intersectionThreshold = 0.1; const o = [], u = { attributes: [Y.PositionKind], uniforms: ["world", "viewProjection"], needAlphaBlending: !0, defines: o, useClipPlane: null }; a === !1 ? u.needAlphaBlending = !1 : u.defines.push("#define VERTEXALPHA"), n ? (u.defines.push("#define VERTEXCOLOR"), u.attributes.push(Y.ColorKind)) : (u.uniforms.push("color"), this._color4 = new Et()), l ? this.material = l : (this.material = new Lo("colorShader", this.getScene(), "color", u, !1), this.material.doNotSerialize = !0); } isReady() { return this._lineMaterial.isReady(this, !!this._userInstancedBuffersStorage || this.hasThinInstances) ? super.isReady() : !1; } /** * Returns the string "LineMesh" */ getClassName() { return "LinesMesh"; } /** * @internal */ get material() { return this._lineMaterial; } /** * @internal */ set material(e) { this._lineMaterial = e, this._lineMaterial.fillMode = At.LineListDrawMode; } /** * @internal */ get checkCollisions() { return !1; } set checkCollisions(e) { } /** * @internal */ _bind(e, t) { if (!this._geometry) return this; const i = this.isUnIndexed ? null : this._geometry.getIndexBuffer(); if (!this._userInstancedBuffersStorage || this.hasThinInstances ? this._geometry._bind(t, i) : this._geometry._bind(t, i, this._userInstancedBuffersStorage.vertexBuffers, this._userInstancedBuffersStorage.vertexArrayObjects), !this.useVertexColor && this._isShaderMaterial(this._lineMaterial)) { const { r, g: s, b: n } = this.color; this._color4.set(r, s, n, this.alpha), this._lineMaterial.setColor4("color", this._color4); } return this; } /** * @internal */ _draw(e, t, i) { if (!this._geometry || !this._geometry.getVertexBuffers() || !this._unIndexed && !this._geometry.getIndexBuffer()) return this; const r = this.getScene().getEngine(); return this._unIndexed ? r.drawArraysType(At.LineListDrawMode, e.verticesStart, e.verticesCount, i) : r.drawElementsType(At.LineListDrawMode, e.indexStart, e.indexCount, i), this; } /** * Disposes of the line mesh * @param doNotRecurse If children should be disposed * @param disposeMaterialAndTextures This parameter is not used by the LineMesh class * @param doNotDisposeMaterial If the material should not be disposed (default: false, meaning the material is disposed) */ // eslint-disable-next-line @typescript-eslint/no-unused-vars dispose(e, t = !1, i) { i || this._lineMaterial.dispose(!1, !1, !0), super.dispose(e); } /** * Returns a new LineMesh object cloned from the current one. * @param name * @param newParent * @param doNotCloneChildren */ clone(e, t = null, i) { return new Ag(e, this.getScene(), t, this, i); } /** * Creates a new InstancedLinesMesh object from the mesh model. * @see https://doc.babylonjs.com/features/featuresDeepDive/mesh/copies/instances * @param name defines the name of the new instance * @returns a new InstancedLinesMesh */ createInstance(e) { const t = new XK(e, this); if (this.instancedBuffers) { t.instancedBuffers = {}; for (const i in this.instancedBuffers) t.instancedBuffers[i] = this.instancedBuffers[i]; } return t; } /** * Serializes this ground mesh * @param serializationObject object to write serialization to */ serialize(e) { super.serialize(e), e.color = this.color.asArray(), e.alpha = this.alpha; } /** * Parses a serialized ground mesh * @param parsedMesh the serialized mesh * @param scene the scene to create the ground mesh in * @returns the created ground mesh */ static Parse(e, t) { const i = new Ag(e.name, t); return i.color = ze.FromArray(e.color), i.alpha = e.alpha, i; } } class XK extends Cg { constructor(e, t) { super(e, t), this.intersectionThreshold = t.intersectionThreshold; } /** * Returns the string "InstancedLinesMesh". */ getClassName() { return "InstancedLinesMesh"; } } function YK(c) { const e = [], t = [], i = c.lines, r = c.colors, s = []; let n = 0; for (let l = 0; l < i.length; l++) { const o = i[l]; for (let u = 0; u < o.length; u++) { const { x: h, y: d, z: f } = o[u]; if (t.push(h, d, f), r) { const p = r[l], { r: m, g: _, b: v, a: C } = p[u]; s.push(m, _, v, C); } u > 0 && (e.push(n - 1), e.push(n)), n++; } } const a = new Ot(); return a.indices = e, a.positions = t, r && (a.colors = s), a; } function QK(c) { const e = c.dashSize || 3, t = c.gapSize || 1, i = c.dashNb || 200, r = c.points, s = [], n = [], a = D.Zero(); let l = 0, o = 0, u = 0, h = 0, d = 0, f = 0, p = 0; for (p = 0; p < r.length - 1; p++) r[p + 1].subtractToRef(r[p], a), l += a.length(); for (u = l / i, h = e * u / (e + t), p = 0; p < r.length - 1; p++) { r[p + 1].subtractToRef(r[p], a), o = Math.floor(a.length() / u), a.normalize(); for (let _ = 0; _ < o; _++) d = u * _, s.push(r[p].x + d * a.x, r[p].y + d * a.y, r[p].z + d * a.z), s.push(r[p].x + (d + h) * a.x, r[p].y + (d + h) * a.y, r[p].z + (d + h) * a.z), n.push(f, f + 1), f += 2; } const m = new Ot(); return m.positions = s, m.indices = n, m; } function tP(c, e, t = null) { const i = e.instance, r = e.lines, s = e.colors; if (i) { const o = i.getVerticesData(Y.PositionKind); let u, h; s && (u = i.getVerticesData(Y.ColorKind)); let d = 0, f = 0; for (let p = 0; p < r.length; p++) { const m = r[p]; for (let _ = 0; _ < m.length; _++) o[d] = m[_].x, o[d + 1] = m[_].y, o[d + 2] = m[_].z, s && u && (h = s[p], u[f] = h[_].r, u[f + 1] = h[_].g, u[f + 2] = h[_].b, u[f + 3] = h[_].a, f += 4), d += 3; } return i.updateVerticesData(Y.PositionKind, o, !1, !1), s && u && i.updateVerticesData(Y.ColorKind, u, !1, !1), i; } const n = !!s, a = new Ag(c, t, null, void 0, void 0, n, e.useVertexAlpha, e.material); return YK(e).applyToMesh(a, e.updatable), a; } function Ba(c, e, t = null) { const i = e.colors ? [e.colors] : null; return tP(c, { lines: [e.points], updatable: e.updatable, instance: e.instance, colors: i, useVertexAlpha: e.useVertexAlpha, material: e.material }, t); } function pU(c, e, t = null) { const i = e.points, r = e.instance, s = e.gapSize || 1, n = e.dashSize || 3; if (r) { const o = (u) => { const h = D.Zero(), d = u.length / 6; let f = 0, p = 0, m = 0, _ = 0, v = 0, C = 0, x = 0, b = 0; for (x = 0; x < i.length - 1; x++) i[x + 1].subtractToRef(i[x], h), f += h.length(); m = f / d; const S = r._creationDataStorage.dashSize, M = r._creationDataStorage.gapSize; for (_ = S * m / (S + M), x = 0; x < i.length - 1; x++) for (i[x + 1].subtractToRef(i[x], h), p = Math.floor(h.length() / m), h.normalize(), b = 0; b < p && C < u.length; ) v = m * b, u[C] = i[x].x + v * h.x, u[C + 1] = i[x].y + v * h.y, u[C + 2] = i[x].z + v * h.z, u[C + 3] = i[x].x + (v + _) * h.x, u[C + 4] = i[x].y + (v + _) * h.y, u[C + 5] = i[x].z + (v + _) * h.z, C += 6, b++; for (; C < u.length; ) u[C] = i[x].x, u[C + 1] = i[x].y, u[C + 2] = i[x].z, C += 3; }; return (e.dashNb || e.dashSize || e.gapSize || e.useVertexAlpha || e.material) && Ce.Warn("You have used an option other than points with the instance option. Please be aware that these other options will be ignored."), r.updateMeshPositions(o, !1), r; } const a = new Ag(c, t, null, void 0, void 0, void 0, e.useVertexAlpha, e.material); return QK(e).applyToMesh(a, e.updatable), a._creationDataStorage = new _K(), a._creationDataStorage.dashSize = n, a._creationDataStorage.gapSize = s, a; } const Xde = { CreateDashedLines: pU, CreateLineSystem: tP, CreateLines: Ba }; Ot.CreateLineSystem = YK; Ot.CreateDashedLines = QK; ke.CreateLines = (c, e, t = null, i = !1, r = null) => Ba(c, { points: e, updatable: i, instance: r }, t); ke.CreateDashedLines = (c, e, t, i, r, s = null, n, a) => pU(c, { points: e, dashSize: t, gapSize: i, dashNb: r, updatable: n, instance: a }, s); class Yde extends at { constructor(e, t) { super(e.x, e.y), this.index = t; } } class zk { constructor() { this.elements = []; } add(e) { const t = []; return e.forEach((i) => { const r = new Yde(i, this.elements.length); t.push(r), this.elements.push(r); }), t; } computeBounds() { const e = new at(this.elements[0].x, this.elements[0].y), t = new at(this.elements[0].x, this.elements[0].y); return this.elements.forEach((i) => { i.x < e.x ? e.x = i.x : i.x > t.x && (t.x = i.x), i.y < e.y ? e.y = i.y : i.y > t.y && (t.y = i.y); }), { min: e, max: t, width: t.x - e.x, height: t.y - e.y }; } } class Qde { /** * Creates a rectangle * @param xmin bottom X coord * @param ymin bottom Y coord * @param xmax top X coord * @param ymax top Y coord * @returns points that make the resulting rectangle */ static Rectangle(e, t, i, r) { return [new at(e, t), new at(i, t), new at(i, r), new at(e, r)]; } /** * Creates a circle * @param radius radius of circle * @param cx scale in x * @param cy scale in y * @param numberOfSides number of sides that make up the circle * @returns points that make the resulting circle */ static Circle(e, t = 0, i = 0, r = 32) { const s = []; let n = 0; const a = Math.PI * 2 / r; for (let l = 0; l < r; l++) s.push(new at(t + Math.cos(n) * e, i + Math.sin(n) * e)), n -= a; return s; } /** * Creates a polygon from input string * @param input Input polygon data * @returns the parsed points */ static Parse(e) { const t = e.split(/[^-+eE.\d]+/).map(parseFloat).filter((s) => !isNaN(s)); let i; const r = []; for (i = 0; i < (t.length & 2147483646); i += 2) r.push(new at(t[i], t[i + 1])); return r; } /** * Starts building a polygon from x and y coordinates * @param x x coordinate * @param y y coordinate * @returns the started path2 */ static StartingAt(e, t) { return _w.StartingAt(e, t); } } class xie { _addToepoint(e) { for (const t of e) this._epoints.push(t.x, t.y); } /** * Creates a PolygonMeshBuilder * @param name name of the builder * @param contours Path of the polygon * @param scene scene to add to when creating the mesh * @param earcutInjection can be used to inject your own earcut reference */ constructor(e, t, i, r = earcut) { this._points = new zk(), this._outlinepoints = new zk(), this._holes = new Array(), this._epoints = new Array(), this._eholes = new Array(), this.bjsEarcut = r, this._name = e, this._scene = i || gi.LastCreatedScene; let s; t instanceof _w ? s = t.getPoints() : s = t, this._addToepoint(s), this._points.add(s), this._outlinepoints.add(s), typeof this.bjsEarcut > "u" && Ce.Warn("Earcut was not found, the polygon will not be built."); } /** * Adds a hole within the polygon * @param hole Array of points defining the hole * @returns this */ addHole(e) { this._points.add(e); const t = new zk(); return t.add(e), this._holes.push(t), this._eholes.push(this._epoints.length / 2), this._addToepoint(e), this; } /** * Creates the polygon * @param updatable If the mesh should be updatable * @param depth The depth of the mesh created * @param smoothingThreshold Dot product threshold for smoothed normals * @returns the created mesh */ build(e = !1, t = 0, i = 2) { const r = new ke(this._name, this._scene), s = this.buildVertexData(t, i); return r.setVerticesData(Y.PositionKind, s.positions, e), r.setVerticesData(Y.NormalKind, s.normals, e), r.setVerticesData(Y.UVKind, s.uvs, e), r.setIndices(s.indices), r; } /** * Creates the polygon * @param depth The depth of the mesh created * @param smoothingThreshold Dot product threshold for smoothed normals * @returns the created VertexData */ buildVertexData(e = 0, t = 2) { const i = new Ot(), r = [], s = [], n = [], a = this._points.computeBounds(); this._points.elements.forEach((u) => { r.push(0, 1, 0), s.push(u.x, 0, u.y), n.push((u.x - a.min.x) / a.width, (u.y - a.min.y) / a.height); }); const l = [], o = this.bjsEarcut(this._epoints, this._eholes, 2); for (let u = 0; u < o.length; u++) l.push(o[u]); if (e > 0) { const u = s.length / 3; this._points.elements.forEach((d) => { r.push(0, -1, 0), s.push(d.x, -e, d.y), n.push(1 - (d.x - a.min.x) / a.width, 1 - (d.y - a.min.y) / a.height); }); const h = l.length; for (let d = 0; d < h; d += 3) { const f = l[d + 0], p = l[d + 1], m = l[d + 2]; l.push(m + u), l.push(p + u), l.push(f + u); } this._addSide(s, r, n, l, a, this._outlinepoints, e, !1, t), this._holes.forEach((d) => { this._addSide(s, r, n, l, a, d, e, !0, t); }); } return i.indices = l, i.positions = s, i.normals = r, i.uvs = n, i; } /** * Adds a side to the polygon * @param positions points that make the polygon * @param normals normals of the polygon * @param uvs uvs of the polygon * @param indices indices of the polygon * @param bounds bounds of the polygon * @param points points of the polygon * @param depth depth of the polygon * @param flip flip of the polygon * @param smoothingThreshold */ _addSide(e, t, i, r, s, n, a, l, o) { let u = e.length / 3, h = 0; for (let d = 0; d < n.elements.length; d++) { const f = n.elements[d], p = n.elements[(d + 1) % n.elements.length]; e.push(f.x, 0, f.y), e.push(f.x, -a, f.y), e.push(p.x, 0, p.y), e.push(p.x, -a, p.y); const m = n.elements[(d + n.elements.length - 1) % n.elements.length], _ = n.elements[(d + 2) % n.elements.length]; let v = new D(-(p.y - f.y), 0, p.x - f.x), C = new D(-(f.y - m.y), 0, f.x - m.x), x = new D(-(_.y - p.y), 0, _.x - p.x); l || (v = v.scale(-1), C = C.scale(-1), x = x.scale(-1)); const b = v.normalizeToNew(); let S = C.normalizeToNew(), M = x.normalizeToNew(); const R = D.Dot(S, b); R > o ? R < Sr - 1 ? S = new D(f.x, 0, f.y).subtract(new D(p.x, 0, p.y)).normalize() : S = C.add(v).normalize() : S = b; const w = D.Dot(x, v); w > o ? w < Sr - 1 ? M = new D(p.x, 0, p.y).subtract(new D(f.x, 0, f.y)).normalize() : M = x.add(v).normalize() : M = b, i.push(h / s.width, 0), i.push(h / s.width, 1), h += v.length(), i.push(h / s.width, 0), i.push(h / s.width, 1), t.push(S.x, S.y, S.z), t.push(S.x, S.y, S.z), t.push(M.x, M.y, M.z), t.push(M.x, M.y, M.z), l ? (r.push(u), r.push(u + 2), r.push(u + 1), r.push(u + 1), r.push(u + 2), r.push(u + 3)) : (r.push(u), r.push(u + 1), r.push(u + 2), r.push(u + 1), r.push(u + 3), r.push(u + 2)), u += 4; } } } function $K(c, e, t, i, r, s, n) { const a = t || new Array(3), l = i, o = [], u = n || !1; for (let V = 0; V < 3; V++) a[V] === void 0 && (a[V] = new Di(0, 0, 1, 1)), l && l[V] === void 0 && (l[V] = new Et(1, 1, 1, 1)); const h = c.getVerticesData(Y.PositionKind), d = c.getVerticesData(Y.NormalKind), f = c.getVerticesData(Y.UVKind), p = c.getIndices(), m = h.length / 9; let _ = 0, v = 0, C = 0, x = 0, b = 0; const S = [0]; if (u) for (let V = m; V < h.length / 3; V += 4) v = h[3 * (V + 2)] - h[3 * V], C = h[3 * (V + 2) + 2] - h[3 * V + 2], x = Math.sqrt(v * v + C * C), b += x, S.push(b); let M = 0, R = 0; for (let V = 0; V < d.length; V += 3) Math.abs(d[V + 1]) < 1e-3 && (R = 1), Math.abs(d[V + 1] - 1) < 1e-3 && (R = 0), Math.abs(d[V + 1] + 1) < 1e-3 && (R = 2), M = V / 3, R === 1 ? (_ = M - m, _ % 4 < 1.5 ? u ? f[2 * M] = a[R].x + (a[R].z - a[R].x) * S[Math.floor(_ / 4)] / b : f[2 * M] = a[R].x : u ? f[2 * M] = a[R].x + (a[R].z - a[R].x) * S[Math.floor(_ / 4) + 1] / b : f[2 * M] = a[R].z, _ % 2 === 0 ? f[2 * M + 1] = hn.UseOpenGLOrientationForUV ? 1 - a[R].w : a[R].w : f[2 * M + 1] = hn.UseOpenGLOrientationForUV ? 1 - a[R].y : a[R].y) : (f[2 * M] = (1 - f[2 * M]) * a[R].x + f[2 * M] * a[R].z, f[2 * M + 1] = (1 - f[2 * M + 1]) * a[R].y + f[2 * M + 1] * a[R].w, hn.UseOpenGLOrientationForUV && (f[2 * M + 1] = 1 - f[2 * M + 1])), l && o.push(l[R].r, l[R].g, l[R].b, l[R].a); Ot._ComputeSides(e, h, p, d, f, r, s); const w = new Ot(); if (w.indices = p, w.positions = h, w.normals = d, w.uvs = f, l) { const V = e === Ot.DOUBLESIDE ? o.concat(o) : o; w.colors = V; } return w; } function nN(c, e, t = null, i = earcut) { e.sideOrientation = ke._GetDefaultSideOrientation(e.sideOrientation); const r = e.shape, s = e.holes || [], n = e.depth || 0, a = e.smoothingThreshold || 2, l = []; let o = []; for (let p = 0; p < r.length; p++) l[p] = new at(r[p].x, r[p].z); const u = 1e-8; l[0].equalsWithEpsilon(l[l.length - 1], u) && l.pop(); const h = new xie(c, l, t || gi.LastCreatedScene, i); for (let p = 0; p < s.length; p++) { o = []; for (let m = 0; m < s[p].length; m++) o.push(new at(s[p][m].x, s[p][m].z)); h.addHole(o); } const d = h.build(!1, n, a); return d._originalBuilderSideOrientation = e.sideOrientation, $K(d, e.sideOrientation, e.faceUV, e.faceColors, e.frontUVs, e.backUVs, e.wrap).applyToMesh(d, e.updatable), d; } function aN(c, e, t = null, i = earcut) { return nN(c, e, t, i); } const $de = { ExtrudePolygon: aN, CreatePolygon: nN }; Ot.CreatePolygon = $K; ke.CreatePolygon = (c, e, t, i, r, s, n = earcut) => nN(c, { shape: e, holes: i, updatable: r, sideOrientation: s }, t, n); ke.ExtrudePolygon = (c, e, t, i, r, s, n, a = earcut) => aN(c, { shape: e, holes: r, depth: t, updatable: s, sideOrientation: n }, i, a); function oN(c, e, t = null) { const i = e.path, r = e.shape, s = e.scale || 1, n = e.rotation || 0, a = e.cap === 0 ? 0 : e.cap || ke.NO_CAP, l = e.updatable, o = ke._GetDefaultSideOrientation(e.sideOrientation), u = e.instance || null, h = e.invertUV || !1, d = e.closeShape || !1, f = e.closePath || !1; return bie(c, r, i, s, n, null, null, f, d, a, !1, t, !!l, o, u, h, e.frontUVs || null, e.backUVs || null, e.firstNormal || null, !!e.adjustFrame); } function lN(c, e, t = null) { const i = e.path, r = e.shape, s = e.scaleFunction || (() => 1), n = e.rotationFunction || (() => 0), a = e.closePath || e.ribbonCloseArray || !1, l = e.closeShape || e.ribbonClosePath || !1, o = e.cap === 0 ? 0 : e.cap || ke.NO_CAP, u = e.updatable, h = e.firstNormal || null, d = e.adjustFrame || !1, f = ke._GetDefaultSideOrientation(e.sideOrientation), p = e.instance, m = e.invertUV || !1; return bie(c, r, i, null, null, s, n, a, l, o, !0, t, !!u, f, p || null, m, e.frontUVs || null, e.backUVs || null, h, d); } function bie(c, e, t, i, r, s, n, a, l, o, u, h, d, f, p, m, _, v, C, x) { const b = (V, k, L, B, U, K, ee, Z, q, le, ie) => { const $ = L.getTangents(), j = L.getNormals(), J = L.getBinormals(), ne = L.getDistances(); if (ie) { for (let be = 0; be < $.length; be++) if ($[be].x == 0 && $[be].y == 0 && $[be].z == 0 && $[be].copyFrom($[be - 1]), j[be].x == 0 && j[be].y == 0 && j[be].z == 0 && j[be].copyFrom(j[be - 1]), J[be].x == 0 && J[be].y == 0 && J[be].z == 0 && J[be].copyFrom(J[be - 1]), be > 0) { let Ue = $[be - 1]; D.Dot(Ue, $[be]) < 0 && $[be].scaleInPlace(-1), Ue = j[be - 1], D.Dot(Ue, j[be]) < 0 && j[be].scaleInPlace(-1), Ue = J[be - 1], D.Dot(Ue, J[be]) < 0 && J[be].scaleInPlace(-1); } } let pe = 0; const ge = () => U !== null ? U : 1, ye = le && Z ? Z : () => K !== null ? K : 0, Se = le && ee ? ee : ge; let re = q === ke.NO_CAP || q === ke.CAP_END ? 0 : 2; const te = de.Matrix[0]; for (let be = 0; be < k.length; be++) { const Ue = [], Ee = ye(be, ne[be]), He = Se(be, ne[be]); Ae.RotationAxisToRef($[be], pe, te); for (let Xe = 0; Xe < V.length; Xe++) { const rt = $[be].scale(V[Xe].z).add(j[be].scale(V[Xe].x)).add(J[be].scale(V[Xe].y)), dt = D.Zero(); D.TransformCoordinatesToRef(rt, te, dt), dt.scaleInPlace(He).addInPlace(k[be]), Ue[Xe] = dt; } B[re] = Ue, pe += Ee, re++; } const he = (be) => { const Ue = Array(), Ee = D.Zero(); let He; for (He = 0; He < be.length; He++) Ee.addInPlace(be[He]); for (Ee.scaleInPlace(1 / be.length), He = 0; He < be.length; He++) Ue.push(Ee); return Ue; }; switch (q) { case ke.NO_CAP: break; case ke.CAP_START: B[0] = he(B[2]), B[1] = B[2]; break; case ke.CAP_END: B[re] = B[re - 1], B[re + 1] = he(B[re - 1]); break; case ke.CAP_ALL: B[0] = he(B[2]), B[1] = B[2], B[re] = B[re - 1], B[re + 1] = he(B[re - 1]); break; } return B; }; let S, M; if (p) { const V = p._creationDataStorage; return S = C ? V.path3D.update(t, C) : V.path3D.update(t), M = b(e, t, V.path3D, V.pathArray, i, r, s, n, V.cap, u, x), p = nx("", { pathArray: M, closeArray: !1, closePath: !1, offset: 0, updatable: !1, sideOrientation: 0, instance: p }, h || void 0), p; } S = C ? new fP(t, C) : new fP(t); const R = new Array(); o = o < 0 || o > 3 ? 0 : o, M = b(e, t, S, R, i, r, s, n, o, u, x); const w = nx(c, { pathArray: M, closeArray: a, closePath: l, updatable: d, sideOrientation: f, invertUV: m, frontUVs: _ || void 0, backUVs: v || void 0 }, h); return w._creationDataStorage.pathArray = M, w._creationDataStorage.path3D = S, w._creationDataStorage.cap = o, w; } const Zde = { // eslint-disable-next-line @typescript-eslint/naming-convention ExtrudeShape: oN, // eslint-disable-next-line @typescript-eslint/naming-convention ExtrudeShapeCustom: lN }; ke.ExtrudeShape = (c, e, t, i, r, s, n = null, a, l, o) => { const u = { shape: e, path: t, scale: i, rotation: r, cap: s === 0 ? 0 : s || ke.NO_CAP, sideOrientation: l, instance: o, updatable: a }; return oN(c, u, n); }; ke.ExtrudeShapeCustom = (c, e, t, i, r, s, n, a, l, o, u, h) => { const d = { shape: e, path: t, scaleFunction: i, rotationFunction: r, ribbonCloseArray: s, ribbonClosePath: n, cap: a === 0 ? 0 : a || ke.NO_CAP, sideOrientation: u, instance: h, updatable: o }; return lN(c, d, l); }; function _U(c, e, t = null) { const i = e.arc ? e.arc <= 0 || e.arc > 1 ? 1 : e.arc : 1, r = e.closed === void 0 ? !0 : e.closed, s = e.shape, n = e.radius || 1, a = e.tessellation || 64, l = e.clip || 0, o = e.updatable, u = ke._GetDefaultSideOrientation(e.sideOrientation), h = e.cap || ke.NO_CAP, d = Math.PI * 2, f = [], p = e.invertUV || !1; let m = 0, _ = 0; const v = d / a * i; let C, x; for (m = 0; m <= a - l; m++) { for (x = [], (h == ke.CAP_START || h == ke.CAP_ALL) && (x.push(new D(0, s[0].y, 0)), x.push(new D(Math.cos(m * v) * s[0].x * n, s[0].y, Math.sin(m * v) * s[0].x * n))), _ = 0; _ < s.length; _++) C = new D(Math.cos(m * v) * s[_].x * n, s[_].y, Math.sin(m * v) * s[_].x * n), x.push(C); (h == ke.CAP_END || h == ke.CAP_ALL) && (x.push(new D(Math.cos(m * v) * s[s.length - 1].x * n, s[s.length - 1].y, Math.sin(m * v) * s[s.length - 1].x * n)), x.push(new D(0, s[s.length - 1].y, 0))), f.push(x); } return nx(c, { pathArray: f, closeArray: r, sideOrientation: u, updatable: o, invertUV: p, frontUVs: e.frontUVs, backUVs: e.backUVs }, t); } const qde = { // eslint-disable-next-line @typescript-eslint/naming-convention CreateLathe: _U }; ke.CreateLathe = (c, e, t, i, r, s, n) => _U(c, { shape: e, radius: t, tessellation: i, sideOrientation: n, updatable: s }, r); function mU(c, e, t = null) { const i = e.path; let r = e.instance, s = 1; e.radius !== void 0 ? s = e.radius : r && (s = r._creationDataStorage.radius); const n = e.tessellation || 64, a = e.radiusFunction || null; let l = e.cap || ke.NO_CAP; const o = e.invertUV || !1, u = e.updatable, h = ke._GetDefaultSideOrientation(e.sideOrientation); e.arc = e.arc && (e.arc <= 0 || e.arc > 1) ? 1 : e.arc || 1; const d = (v, C, x, b, S, M, R, w) => { const V = C.getTangents(), k = C.getNormals(), L = C.getDistances(), U = Math.PI * 2 / S * w, ee = M || (() => b); let Z, q, le, ie; const $ = de.Matrix[0]; let j = R === ke.NO_CAP || R === ke.CAP_END ? 0 : 2; for (let ne = 0; ne < v.length; ne++) { q = ee(ne, L[ne]), Z = Array(), le = k[ne]; for (let pe = 0; pe < S; pe++) Ae.RotationAxisToRef(V[ne], U * pe, $), ie = Z[pe] ? Z[pe] : D.Zero(), D.TransformCoordinatesToRef(le, $, ie), ie.scaleInPlace(q).addInPlace(v[ne]), Z[pe] = ie; x[j] = Z, j++; } const J = (ne, pe) => { const ge = Array(); for (let Ie = 0; Ie < ne; Ie++) ge.push(v[pe]); return ge; }; switch (R) { case ke.NO_CAP: break; case ke.CAP_START: x[0] = J(S, 0), x[1] = x[2].slice(0); break; case ke.CAP_END: x[j] = x[j - 1].slice(0), x[j + 1] = J(S, v.length - 1); break; case ke.CAP_ALL: x[0] = J(S, 0), x[1] = x[2].slice(0), x[j] = x[j - 1].slice(0), x[j + 1] = J(S, v.length - 1); break; } return x; }; let f, p; if (r) { const v = r._creationDataStorage, C = e.arc || v.arc; return f = v.path3D.update(i), p = d(i, f, v.pathArray, s, v.tessellation, a, v.cap, C), r = nx("", { pathArray: p, instance: r }), v.path3D = f, v.pathArray = p, v.arc = C, v.radius = s, r; } f = new fP(i); const m = new Array(); l = l < 0 || l > 3 ? 0 : l, p = d(i, f, m, s, n, a, l, e.arc); const _ = nx(c, { pathArray: p, closePath: !0, closeArray: !1, updatable: u, sideOrientation: h, invertUV: o, frontUVs: e.frontUVs, backUVs: e.backUVs }, t); return _._creationDataStorage.pathArray = p, _._creationDataStorage.path3D = f, _._creationDataStorage.tessellation = n, _._creationDataStorage.cap = l, _._creationDataStorage.arc = e.arc, _._creationDataStorage.radius = s, _; } const Jde = { // eslint-disable-next-line @typescript-eslint/naming-convention CreateTube: mU }; ke.CreateTube = (c, e, t, i, r, s, n, a, l, o) => mU(c, { path: e, radius: t, tessellation: i, radiusFunction: r, arc: 1, cap: s, updatable: a, sideOrientation: l, instance: o }, n); function ZK(c) { const e = []; e[0] = { vertex: [ [0, 0, 1.732051], [1.632993, 0, -0.5773503], [-0.8164966, 1.414214, -0.5773503], [-0.8164966, -1.414214, -0.5773503] ], face: [ [0, 1, 2], [0, 2, 3], [0, 3, 1], [1, 3, 2] ] }, e[1] = { vertex: [ [0, 0, 1.414214], [1.414214, 0, 0], [0, 1.414214, 0], [-1.414214, 0, 0], [0, -1.414214, 0], [0, 0, -1.414214] ], face: [ [0, 1, 2], [0, 2, 3], [0, 3, 4], [0, 4, 1], [1, 4, 5], [1, 5, 2], [2, 5, 3], [3, 5, 4] ] }, e[2] = { vertex: [ [0, 0, 1.070466], [0.7136442, 0, 0.7978784], [-0.3568221, 0.618034, 0.7978784], [-0.3568221, -0.618034, 0.7978784], [0.7978784, 0.618034, 0.3568221], [0.7978784, -0.618034, 0.3568221], [-0.9341724, 0.381966, 0.3568221], [0.1362939, 1, 0.3568221], [0.1362939, -1, 0.3568221], [-0.9341724, -0.381966, 0.3568221], [0.9341724, 0.381966, -0.3568221], [0.9341724, -0.381966, -0.3568221], [-0.7978784, 0.618034, -0.3568221], [-0.1362939, 1, -0.3568221], [-0.1362939, -1, -0.3568221], [-0.7978784, -0.618034, -0.3568221], [0.3568221, 0.618034, -0.7978784], [0.3568221, -0.618034, -0.7978784], [-0.7136442, 0, -0.7978784], [0, 0, -1.070466] ], face: [ [0, 1, 4, 7, 2], [0, 2, 6, 9, 3], [0, 3, 8, 5, 1], [1, 5, 11, 10, 4], [2, 7, 13, 12, 6], [3, 9, 15, 14, 8], [4, 10, 16, 13, 7], [5, 8, 14, 17, 11], [6, 12, 18, 15, 9], [10, 11, 17, 19, 16], [12, 13, 16, 19, 18], [14, 15, 18, 19, 17] ] }, e[3] = { vertex: [ [0, 0, 1.175571], [1.051462, 0, 0.5257311], [0.3249197, 1, 0.5257311], [-0.8506508, 0.618034, 0.5257311], [-0.8506508, -0.618034, 0.5257311], [0.3249197, -1, 0.5257311], [0.8506508, 0.618034, -0.5257311], [0.8506508, -0.618034, -0.5257311], [-0.3249197, 1, -0.5257311], [-1.051462, 0, -0.5257311], [-0.3249197, -1, -0.5257311], [0, 0, -1.175571] ], face: [ [0, 1, 2], [0, 2, 3], [0, 3, 4], [0, 4, 5], [0, 5, 1], [1, 5, 7], [1, 7, 6], [1, 6, 2], [2, 6, 8], [2, 8, 3], [3, 8, 9], [3, 9, 4], [4, 9, 10], [4, 10, 5], [5, 10, 7], [6, 7, 11], [6, 11, 8], [7, 10, 11], [8, 11, 9], [9, 11, 10] ] }, e[4] = { vertex: [ [0, 0, 1.070722], [0.7148135, 0, 0.7971752], [-0.104682, 0.7071068, 0.7971752], [-0.6841528, 0.2071068, 0.7971752], [-0.104682, -0.7071068, 0.7971752], [0.6101315, 0.7071068, 0.5236279], [1.04156, 0.2071068, 0.1367736], [0.6101315, -0.7071068, 0.5236279], [-0.3574067, 1, 0.1367736], [-0.7888348, -0.5, 0.5236279], [-0.9368776, 0.5, 0.1367736], [-0.3574067, -1, 0.1367736], [0.3574067, 1, -0.1367736], [0.9368776, -0.5, -0.1367736], [0.7888348, 0.5, -0.5236279], [0.3574067, -1, -0.1367736], [-0.6101315, 0.7071068, -0.5236279], [-1.04156, -0.2071068, -0.1367736], [-0.6101315, -0.7071068, -0.5236279], [0.104682, 0.7071068, -0.7971752], [0.6841528, -0.2071068, -0.7971752], [0.104682, -0.7071068, -0.7971752], [-0.7148135, 0, -0.7971752], [0, 0, -1.070722] ], face: [ [0, 2, 3], [1, 6, 5], [4, 9, 11], [7, 15, 13], [8, 16, 10], [12, 14, 19], [17, 22, 18], [20, 21, 23], [0, 1, 5, 2], [0, 3, 9, 4], [0, 4, 7, 1], [1, 7, 13, 6], [2, 5, 12, 8], [2, 8, 10, 3], [3, 10, 17, 9], [4, 11, 15, 7], [5, 6, 14, 12], [6, 13, 20, 14], [8, 12, 19, 16], [9, 17, 18, 11], [10, 16, 22, 17], [11, 18, 21, 15], [13, 15, 21, 20], [14, 20, 23, 19], [16, 19, 23, 22], [18, 22, 23, 21] ] }, e[5] = { vertex: [ [0, 0, 1.322876], [1.309307, 0, 0.1889822], [-0.9819805, 0.8660254, 0.1889822], [0.1636634, -1.299038, 0.1889822], [0.3273268, 0.8660254, -0.9449112], [-0.8183171, -0.4330127, -0.9449112] ], face: [ [0, 3, 1], [2, 4, 5], [0, 1, 4, 2], [0, 2, 5, 3], [1, 3, 5, 4] ] }, e[6] = { vertex: [ [0, 0, 1.159953], [1.013464, 0, 0.5642542], [-0.3501431, 0.9510565, 0.5642542], [-0.7715208, -0.6571639, 0.5642542], [0.6633206, 0.9510565, -0.03144481], [0.8682979, -0.6571639, -0.3996071], [-1.121664, 0.2938926, -0.03144481], [-0.2348831, -1.063314, -0.3996071], [0.5181548, 0.2938926, -0.9953061], [-0.5850262, -0.112257, -0.9953061] ], face: [ [0, 1, 4, 2], [0, 2, 6, 3], [1, 5, 8, 4], [3, 6, 9, 7], [5, 7, 9, 8], [0, 3, 7, 5, 1], [2, 4, 8, 9, 6] ] }, e[7] = { vertex: [ [0, 0, 1.118034], [0.8944272, 0, 0.6708204], [-0.2236068, 0.8660254, 0.6708204], [-0.7826238, -0.4330127, 0.6708204], [0.6708204, 0.8660254, 0.2236068], [1.006231, -0.4330127, -0.2236068], [-1.006231, 0.4330127, 0.2236068], [-0.6708204, -0.8660254, -0.2236068], [0.7826238, 0.4330127, -0.6708204], [0.2236068, -0.8660254, -0.6708204], [-0.8944272, 0, -0.6708204], [0, 0, -1.118034] ], face: [ [0, 1, 4, 2], [0, 2, 6, 3], [1, 5, 8, 4], [3, 6, 10, 7], [5, 9, 11, 8], [7, 10, 11, 9], [0, 3, 7, 9, 5, 1], [2, 4, 8, 11, 10, 6] ] }, e[8] = { vertex: [ [-0.729665, 0.670121, 0.319155], [-0.655235, -0.29213, -0.754096], [-0.093922, -0.607123, 0.537818], [0.702196, 0.595691, 0.485187], [0.776626, -0.36656, -0.588064] ], face: [ [1, 4, 2], [0, 1, 2], [3, 0, 2], [4, 3, 2], [4, 1, 0, 3] ] }, e[9] = { vertex: [ [-0.868849, -0.100041, 0.61257], [-0.329458, 0.976099, 0.28078], [-0.26629, -0.013796, -0.477654], [-0.13392, -1.034115, 0.229829], [0.738834, 0.707117, -0.307018], [0.859683, -0.535264, -0.338508] ], face: [ [3, 0, 2], [5, 3, 2], [4, 5, 2], [1, 4, 2], [0, 1, 2], [0, 3, 5, 4, 1] ] }, e[10] = { vertex: [ [-0.610389, 0.243975, 0.531213], [-0.187812, -0.48795, -0.664016], [-0.187812, 0.9759, -0.664016], [0.187812, -0.9759, 0.664016], [0.798201, 0.243975, 0.132803] ], face: [ [1, 3, 0], [3, 4, 0], [3, 1, 4], [0, 2, 1], [0, 4, 2], [2, 4, 1] ] }, e[11] = { vertex: [ [-1.028778, 0.392027, -0.048786], [-0.640503, -0.646161, 0.621837], [-0.125162, -0.395663, -0.540059], [4683e-6, 0.888447, -0.651988], [0.125161, 0.395663, 0.540059], [0.632925, -0.791376, 0.433102], [1.031672, 0.157063, -0.354165] ], face: [ [3, 2, 0], [2, 1, 0], [2, 5, 1], [0, 4, 3], [0, 1, 4], [4, 1, 5], [2, 3, 6], [3, 4, 6], [5, 2, 6], [4, 5, 6] ] }, e[12] = { vertex: [ [-0.669867, 0.334933, -0.529576], [-0.669867, 0.334933, 0.529577], [-0.4043, 1.212901, 0], [-0.334933, -0.669867, -0.529576], [-0.334933, -0.669867, 0.529577], [0.334933, 0.669867, -0.529576], [0.334933, 0.669867, 0.529577], [0.4043, -1.212901, 0], [0.669867, -0.334933, -0.529576], [0.669867, -0.334933, 0.529577] ], face: [ [8, 9, 7], [6, 5, 2], [3, 8, 7], [5, 0, 2], [4, 3, 7], [0, 1, 2], [9, 4, 7], [1, 6, 2], [9, 8, 5, 6], [8, 3, 0, 5], [3, 4, 1, 0], [4, 9, 6, 1] ] }, e[13] = { vertex: [ [-0.931836, 0.219976, -0.264632], [-0.636706, 0.318353, 0.692816], [-0.613483, -0.735083, -0.264632], [-0.326545, 0.979634, 0], [-0.318353, -0.636706, 0.692816], [-0.159176, 0.477529, -0.856368], [0.159176, -0.477529, -0.856368], [0.318353, 0.636706, 0.692816], [0.326545, -0.979634, 0], [0.613482, 0.735082, -0.264632], [0.636706, -0.318353, 0.692816], [0.931835, -0.219977, -0.264632] ], face: [ [11, 10, 8], [7, 9, 3], [6, 11, 8], [9, 5, 3], [2, 6, 8], [5, 0, 3], [4, 2, 8], [0, 1, 3], [10, 4, 8], [1, 7, 3], [10, 11, 9, 7], [11, 6, 5, 9], [6, 2, 0, 5], [2, 4, 1, 0], [4, 10, 7, 1] ] }, e[14] = { vertex: [ [-0.93465, 0.300459, -0.271185], [-0.838689, -0.260219, -0.516017], [-0.711319, 0.717591, 0.128359], [-0.710334, -0.156922, 0.080946], [-0.599799, 0.556003, -0.725148], [-0.503838, -4675e-6, -0.969981], [-0.487004, 0.26021, 0.48049], [-0.460089, -0.750282, -0.512622], [-0.376468, 0.973135, -0.325605], [-0.331735, -0.646985, 0.084342], [-0.254001, 0.831847, 0.530001], [-0.125239, -0.494738, -0.966586], [0.029622, 0.027949, 0.730817], [0.056536, -0.982543, -0.262295], [0.08085, 1.087391, 0.076037], [0.125583, -0.532729, 0.485984], [0.262625, 0.599586, 0.780328], [0.391387, -0.726999, -0.716259], [0.513854, -0.868287, 0.139347], [0.597475, 0.85513, 0.326364], [0.641224, 0.109523, 0.783723], [0.737185, -0.451155, 0.538891], [0.848705, -0.612742, -0.314616], [0.976075, 0.365067, 0.32976], [1.072036, -0.19561, 0.084927] ], face: [ [15, 18, 21], [12, 20, 16], [6, 10, 2], [3, 0, 1], [9, 7, 13], [2, 8, 4, 0], [0, 4, 5, 1], [1, 5, 11, 7], [7, 11, 17, 13], [13, 17, 22, 18], [18, 22, 24, 21], [21, 24, 23, 20], [20, 23, 19, 16], [16, 19, 14, 10], [10, 14, 8, 2], [15, 9, 13, 18], [12, 15, 21, 20], [6, 12, 16, 10], [3, 6, 2, 0], [9, 3, 1, 7], [9, 15, 12, 6, 3], [22, 17, 11, 5, 4, 8, 14, 19, 23, 24] ] }; const t = c.type && (c.type < 0 || c.type >= e.length) ? 0 : c.type || 0, i = c.size, r = c.sizeX || i || 1, s = c.sizeY || i || 1, n = c.sizeZ || i || 1, a = c.custom || e[t], l = a.face.length, o = c.faceUV || new Array(l), u = c.faceColors, h = c.flat === void 0 ? !0 : c.flat, d = c.sideOrientation === 0 ? 0 : c.sideOrientation || Ot.DEFAULTSIDE, f = [], p = [], m = [], _ = [], v = []; let C = 0, x = 0; const b = []; let S = 0, M = 0, R, w, V, k, L, B; if (h) for (M = 0; M < l; M++) u && u[M] === void 0 && (u[M] = new Et(1, 1, 1, 1)), o && o[M] === void 0 && (o[M] = new Di(0, 0, 1, 1)); if (h) for (M = 0; M < l; M++) { const K = a.face[M].length; for (V = 2 * Math.PI / K, k = 0.5 * Math.tan(V / 2), L = 0.5, S = 0; S < K; S++) f.push(a.vertex[a.face[M][S]][0] * r, a.vertex[a.face[M][S]][1] * s, a.vertex[a.face[M][S]][2] * n), b.push(C), C++, R = o[M].x + (o[M].z - o[M].x) * (0.5 + k), w = o[M].y + (o[M].w - o[M].y) * (L - 0.5), _.push(R, hn.UseOpenGLOrientationForUV ? 1 - w : w), B = k * Math.cos(V) - L * Math.sin(V), L = k * Math.sin(V) + L * Math.cos(V), k = B, u && v.push(u[M].r, u[M].g, u[M].b, u[M].a); for (S = 0; S < K - 2; S++) p.push(b[0 + x], b[S + 2 + x], b[S + 1 + x]); x += K; } else { for (S = 0; S < a.vertex.length; S++) f.push(a.vertex[S][0] * r, a.vertex[S][1] * s, a.vertex[S][2] * n), _.push(0, hn.UseOpenGLOrientationForUV ? 1 : 0); for (M = 0; M < l; M++) for (S = 0; S < a.face[M].length - 2; S++) p.push(a.face[M][0], a.face[M][S + 2], a.face[M][S + 1]); } Ot.ComputeNormals(f, p, m), Ot._ComputeSides(d, f, p, m, _, c.frontUVs, c.backUVs); const U = new Ot(); return U.positions = f, U.indices = p, U.normals = m, U.uvs = _, u && h && (U.colors = v), U; } function AP(c, e = {}, t = null) { const i = new ke(c, t); return e.sideOrientation = ke._GetDefaultSideOrientation(e.sideOrientation), i._originalBuilderSideOrientation = e.sideOrientation, ZK(e).applyToMesh(i, e.updatable), i; } const efe = { // eslint-disable-next-line @typescript-eslint/naming-convention CreatePolyhedron: AP }; Ot.CreatePolyhedron = ZK; ke.CreatePolyhedron = (c, e, t) => AP(c, e, t); const tfe = new D(1, 0, 0), ife = new D(-1, 0, 0), rfe = new D(0, 1, 0), sfe = new D(0, -1, 0), nfe = new D(0, 0, 1), afe = new D(0, 0, -1); class cB { constructor(e = D.Zero(), t = D.Up(), i = at.Zero(), r = 0, s = 0, n = null, a = null, l = null, o = null) { this.position = e, this.normal = t, this.uv = i, this.vertexIdx = r, this.vertexIdxForBones = s, this.localPositionOverride = n, this.localNormalOverride = a, this.matrixIndicesOverride = l, this.matrixWeightsOverride = o; } clone() { var e, t, i, r; return new cB(this.position.clone(), this.normal.clone(), this.uv.clone(), this.vertexIdx, this.vertexIdxForBones, (e = this.localPositionOverride) === null || e === void 0 ? void 0 : e.slice(), (t = this.localNormalOverride) === null || t === void 0 ? void 0 : t.slice(), (i = this.matrixIndicesOverride) === null || i === void 0 ? void 0 : i.slice(), (r = this.matrixWeightsOverride) === null || r === void 0 ? void 0 : r.slice()); } } function gU(c, e, t) { var i, r, s, n; const a = !!e.skeleton, l = t.localMode || a, o = e.overrideMaterialSideOrientation !== null && e.overrideMaterialSideOrientation !== void 0, u = e.getIndices(), h = a ? e.getPositionData(!0, !0) : e.getVerticesData(Y.PositionKind), d = a ? e.getNormalsData(!0, !0) : e.getVerticesData(Y.NormalKind), f = l ? a ? e.getVerticesData(Y.PositionKind) : h : null, p = l ? a ? e.getVerticesData(Y.NormalKind) : d : null, m = e.getVerticesData(Y.UVKind), _ = a ? e.getVerticesData(Y.MatricesIndicesKind) : null, v = a ? e.getVerticesData(Y.MatricesWeightsKind) : null, C = a ? e.getVerticesData(Y.MatricesIndicesExtraKind) : null, x = a ? e.getVerticesData(Y.MatricesWeightsExtraKind) : null, b = t.position || D.Zero(); let S = t.normal || D.Up(); const M = t.size || D.One(), R = t.angle || 0; if (!S) { const j = new D(0, 0, 1), J = e.getScene().activeCamera, ne = D.TransformCoordinates(j, J.getWorldMatrix()); S = J.globalPosition.subtract(ne); } const w = -Math.atan2(S.z, S.x) - Math.PI / 2, V = Math.sqrt(S.x * S.x + S.z * S.z), k = Math.atan2(S.y, V), L = new Ot(); L.indices = [], L.positions = [], L.normals = [], L.uvs = [], L.matricesIndices = a ? [] : null, L.matricesWeights = a ? [] : null, L.matricesIndicesExtra = C ? [] : null, L.matricesWeightsExtra = x ? [] : null; let B = 0; const U = (j, J) => { const ne = new cB(); if (!u || !h || !d) return ne; const pe = u[j]; if (ne.vertexIdx = pe * 3, ne.vertexIdxForBones = pe * 4, ne.position = new D(h[pe * 3], h[pe * 3 + 1], h[pe * 3 + 2]), D.TransformCoordinatesToRef(ne.position, J, ne.position), ne.normal = new D(d[pe * 3], d[pe * 3 + 1], d[pe * 3 + 2]), D.TransformNormalToRef(ne.normal, J, ne.normal), t.captureUVS && m) { const ge = m[pe * 2 + 1]; ne.uv = new at(m[pe * 2], hn.UseOpenGLOrientationForUV ? 1 - ge : ge); } return ne; }, K = [0, 0, 0, 0], ee = (j, J) => { if (j.length === 0) return j; const ne = 0.5 * Math.abs(D.Dot(M, J)), pe = (ye, Se, re, te) => { for (let he = 0; he < te; ++he) if (ye[re + he] === Se) return re + he; return -1; }, ge = (ye, Se) => { var re, te, he, be, Ue, Ee, He, Xe, rt, dt, bt, Mt, Ct, di, Kt, ei; const bi = D.GetClipFactor(ye.position, Se.position, J, ne); let vr = K, yi = K; if (_ && v) { const Lr = ye.matrixIndicesOverride ? 0 : ye.vertexIdxForBones, Us = (re = ye.matrixIndicesOverride) !== null && re !== void 0 ? re : _, nn = (te = ye.matrixWeightsOverride) !== null && te !== void 0 ? te : v, Li = Se.matrixIndicesOverride ? 0 : Se.vertexIdxForBones, Os = (he = Se.matrixIndicesOverride) !== null && he !== void 0 ? he : _, rn = (be = Se.matrixWeightsOverride) !== null && be !== void 0 ? be : v; vr = [0, 0, 0, 0], yi = [0, 0, 0, 0]; let Ts = 0; for (let as = 0; as < 4; ++as) if (nn[Lr + as] > 0) { const ui = pe(Os, Us[Lr + as], Li, 4); vr[Ts] = Us[Lr + as], yi[Ts] = yt.Lerp(nn[Lr + as], ui >= 0 ? rn[ui] : 0, bi), Ts++; } for (let as = 0; as < 4 && Ts < 4; ++as) { const ui = Os[Li + as]; pe(Us, ui, Lr, 4) === -1 && (vr[Ts] = ui, yi[Ts] = yt.Lerp(0, rn[Li + as], bi), Ts++); } const Zs = yi[0] + yi[1] + yi[2] + yi[3]; yi[0] /= Zs, yi[1] /= Zs, yi[2] /= Zs, yi[3] /= Zs; } const Vr = ye.localPositionOverride ? ye.localPositionOverride[0] : (Ue = f == null ? void 0 : f[ye.vertexIdx]) !== null && Ue !== void 0 ? Ue : 0, Rr = ye.localPositionOverride ? ye.localPositionOverride[1] : (Ee = f == null ? void 0 : f[ye.vertexIdx + 1]) !== null && Ee !== void 0 ? Ee : 0, ks = ye.localPositionOverride ? ye.localPositionOverride[2] : (He = f == null ? void 0 : f[ye.vertexIdx + 2]) !== null && He !== void 0 ? He : 0, Qt = Se.localPositionOverride ? Se.localPositionOverride[0] : (Xe = f == null ? void 0 : f[Se.vertexIdx]) !== null && Xe !== void 0 ? Xe : 0, Ei = Se.localPositionOverride ? Se.localPositionOverride[1] : (rt = f == null ? void 0 : f[Se.vertexIdx + 1]) !== null && rt !== void 0 ? rt : 0, Pi = Se.localPositionOverride ? Se.localPositionOverride[2] : (dt = f == null ? void 0 : f[Se.vertexIdx + 2]) !== null && dt !== void 0 ? dt : 0, rr = ye.localNormalOverride ? ye.localNormalOverride[0] : (bt = p == null ? void 0 : p[ye.vertexIdx]) !== null && bt !== void 0 ? bt : 0, sr = ye.localNormalOverride ? ye.localNormalOverride[1] : (Mt = p == null ? void 0 : p[ye.vertexIdx + 1]) !== null && Mt !== void 0 ? Mt : 0, dr = ye.localNormalOverride ? ye.localNormalOverride[2] : (Ct = p == null ? void 0 : p[ye.vertexIdx + 2]) !== null && Ct !== void 0 ? Ct : 0, nr = Se.localNormalOverride ? Se.localNormalOverride[0] : (di = p == null ? void 0 : p[Se.vertexIdx]) !== null && di !== void 0 ? di : 0, Pr = Se.localNormalOverride ? Se.localNormalOverride[1] : (Kt = p == null ? void 0 : p[Se.vertexIdx + 1]) !== null && Kt !== void 0 ? Kt : 0, ti = Se.localNormalOverride ? Se.localNormalOverride[2] : (ei = p == null ? void 0 : p[Se.vertexIdx + 2]) !== null && ei !== void 0 ? ei : 0, Oi = rr + (nr - rr) * bi, ri = sr + (Pr - sr) * bi, ki = dr + (ti - dr) * bi, wr = Math.sqrt(Oi * Oi + ri * ri + ki * ki); return new cB(D.Lerp(ye.position, Se.position, bi), D.Lerp(ye.normal, Se.normal, bi).normalize(), at.Lerp(ye.uv, Se.uv, bi), -1, -1, f ? [ Vr + (Qt - Vr) * bi, Rr + (Ei - Rr) * bi, ks + (Pi - ks) * bi ] : null, p ? [Oi / wr, ri / wr, ki / wr] : null, vr, yi); }; let Ie = null; j.length > 3 && (Ie = []); for (let ye = 0; ye < j.length; ye += 3) { let Se = 0, re = null, te = null, he = null, be = null; const Ue = D.Dot(j[ye].position, J) - ne, Ee = D.Dot(j[ye + 1].position, J) - ne, He = D.Dot(j[ye + 2].position, J) - ne, Xe = Ue > 0, rt = Ee > 0, dt = He > 0; switch (Se = (Xe ? 1 : 0) + (rt ? 1 : 0) + (dt ? 1 : 0), Se) { case 0: j.length > 3 ? (Ie.push(j[ye]), Ie.push(j[ye + 1]), Ie.push(j[ye + 2])) : Ie = j; break; case 1: if (Ie = Ie ?? new Array(), Xe && (re = j[ye + 1], te = j[ye + 2], he = ge(j[ye], re), be = ge(j[ye], te)), rt) { re = j[ye], te = j[ye + 2], he = ge(j[ye + 1], re), be = ge(j[ye + 1], te), Ie.push(he), Ie.push(te.clone()), Ie.push(re.clone()), Ie.push(te.clone()), Ie.push(he.clone()), Ie.push(be); break; } dt && (re = j[ye], te = j[ye + 1], he = ge(j[ye + 2], re), be = ge(j[ye + 2], te)), re && te && he && be && (Ie.push(re.clone()), Ie.push(te.clone()), Ie.push(he), Ie.push(be), Ie.push(he.clone()), Ie.push(te.clone())); break; case 2: Ie = Ie ?? new Array(), Xe || (re = j[ye].clone(), te = ge(re, j[ye + 1]), he = ge(re, j[ye + 2]), Ie.push(re), Ie.push(te), Ie.push(he)), rt || (re = j[ye + 1].clone(), te = ge(re, j[ye + 2]), he = ge(re, j[ye]), Ie.push(re), Ie.push(te), Ie.push(he)), dt || (re = j[ye + 2].clone(), te = ge(re, j[ye]), he = ge(re, j[ye + 1]), Ie.push(re), Ie.push(te), Ie.push(he)); break; } } return Ie; }, Z = e instanceof ke ? e : null, q = Z == null ? void 0 : Z._thinInstanceDataStorage.matrixData, le = (Z == null ? void 0 : Z.thinInstanceCount) || 1, ie = de.Matrix[0]; ie.copyFrom(Ae.IdentityReadOnly); for (let j = 0; j < le; ++j) { if (Z != null && Z.hasThinInstances && q) { const ye = j * 16; ie.setRowFromFloats(0, q[ye + 0], q[ye + 1], q[ye + 2], q[ye + 3]), ie.setRowFromFloats(1, q[ye + 4], q[ye + 5], q[ye + 6], q[ye + 7]), ie.setRowFromFloats(2, q[ye + 8], q[ye + 9], q[ye + 10], q[ye + 11]), ie.setRowFromFloats(3, q[ye + 12], q[ye + 13], q[ye + 14], q[ye + 15]); } const J = Ae.RotationYawPitchRoll(w, k, R).multiply(Ae.Translation(b.x, b.y, b.z)), ne = Ae.Invert(J), pe = e.getWorldMatrix(), ge = ie.multiply(pe).multiply(ne), Ie = new Array(3); for (let ye = 0; ye < u.length; ye += 3) { let Se = Ie; if (Se[0] = U(ye, ge), o && l ? (Se[1] = U(ye + 2, ge), Se[2] = U(ye + 1, ge)) : (Se[1] = U(ye + 1, ge), Se[2] = U(ye + 2, ge)), !(t.cullBackFaces && -Se[0].normal.z <= 0 && -Se[1].normal.z <= 0 && -Se[2].normal.z <= 0) && (Se = ee(Se, tfe), !!Se && (Se = ee(Se, ife), !!Se && (Se = ee(Se, rfe), !!Se && (Se = ee(Se, sfe), !!Se && (Se = ee(Se, nfe), !!Se && (Se = ee(Se, afe), !!Se))))))) for (let re = 0; re < Se.length; re++) { const te = Se[re]; if (L.indices.push(B), l ? (te.localPositionOverride ? (L.positions[B * 3] = te.localPositionOverride[0], L.positions[B * 3 + 1] = te.localPositionOverride[1], L.positions[B * 3 + 2] = te.localPositionOverride[2]) : f && (L.positions[B * 3] = f[te.vertexIdx], L.positions[B * 3 + 1] = f[te.vertexIdx + 1], L.positions[B * 3 + 2] = f[te.vertexIdx + 2]), te.localNormalOverride ? (L.normals[B * 3] = te.localNormalOverride[0], L.normals[B * 3 + 1] = te.localNormalOverride[1], L.normals[B * 3 + 2] = te.localNormalOverride[2]) : p && (L.normals[B * 3] = p[te.vertexIdx], L.normals[B * 3 + 1] = p[te.vertexIdx + 1], L.normals[B * 3 + 2] = p[te.vertexIdx + 2])) : (te.position.toArray(L.positions, B * 3), te.normal.toArray(L.normals, B * 3)), L.matricesIndices && L.matricesWeights && (te.matrixIndicesOverride ? (L.matricesIndices[B * 4] = te.matrixIndicesOverride[0], L.matricesIndices[B * 4 + 1] = te.matrixIndicesOverride[1], L.matricesIndices[B * 4 + 2] = te.matrixIndicesOverride[2], L.matricesIndices[B * 4 + 3] = te.matrixIndicesOverride[3]) : (_ && (L.matricesIndices[B * 4] = _[te.vertexIdxForBones], L.matricesIndices[B * 4 + 1] = _[te.vertexIdxForBones + 1], L.matricesIndices[B * 4 + 2] = _[te.vertexIdxForBones + 2], L.matricesIndices[B * 4 + 3] = _[te.vertexIdxForBones + 3]), C && L.matricesIndicesExtra && (L.matricesIndicesExtra[B * 4] = C[te.vertexIdxForBones], L.matricesIndicesExtra[B * 4 + 1] = C[te.vertexIdxForBones + 1], L.matricesIndicesExtra[B * 4 + 2] = C[te.vertexIdxForBones + 2], L.matricesIndicesExtra[B * 4 + 3] = C[te.vertexIdxForBones + 3])), te.matrixWeightsOverride ? (L.matricesWeights[B * 4] = te.matrixWeightsOverride[0], L.matricesWeights[B * 4 + 1] = te.matrixWeightsOverride[1], L.matricesWeights[B * 4 + 2] = te.matrixWeightsOverride[2], L.matricesWeights[B * 4 + 3] = te.matrixWeightsOverride[3]) : (v && (L.matricesWeights[B * 4] = v[te.vertexIdxForBones], L.matricesWeights[B * 4 + 1] = v[te.vertexIdxForBones + 1], L.matricesWeights[B * 4 + 2] = v[te.vertexIdxForBones + 2], L.matricesWeights[B * 4 + 3] = v[te.vertexIdxForBones + 3]), x && L.matricesWeightsExtra && (L.matricesWeightsExtra[B * 4] = x[te.vertexIdxForBones], L.matricesWeightsExtra[B * 4 + 1] = x[te.vertexIdxForBones + 1], L.matricesWeightsExtra[B * 4 + 2] = x[te.vertexIdxForBones + 2], L.matricesWeightsExtra[B * 4 + 3] = x[te.vertexIdxForBones + 3]))), t.captureUVS) te.uv.toArray(L.uvs, B * 2); else { L.uvs.push(0.5 + te.position.x / M.x); const he = 0.5 + te.position.y / M.y; L.uvs.push(hn.UseOpenGLOrientationForUV ? 1 - he : he); } B++; } } } L.indices.length === 0 && (L.indices = null), L.positions.length === 0 && (L.positions = null), L.normals.length === 0 && (L.normals = null), L.uvs.length === 0 && (L.uvs = null), ((i = L.matricesIndices) === null || i === void 0 ? void 0 : i.length) === 0 && (L.matricesIndices = null), ((r = L.matricesWeights) === null || r === void 0 ? void 0 : r.length) === 0 && (L.matricesWeights = null), ((s = L.matricesIndicesExtra) === null || s === void 0 ? void 0 : s.length) === 0 && (L.matricesIndicesExtra = null), ((n = L.matricesWeightsExtra) === null || n === void 0 ? void 0 : n.length) === 0 && (L.matricesWeightsExtra = null); const $ = new ke(c, e.getScene()); return L.applyToMesh($), l ? ($.skeleton = e.skeleton, $.parent = e) : ($.position = b.clone(), $.rotation = new D(k, w, R)), $.computeWorldMatrix(!0), $.refreshBoundingInfo(!0, !0), $; } const ofe = { // eslint-disable-next-line @typescript-eslint/naming-convention CreateDecal: gU }; ke.CreateDecal = (c, e, t, i, r, s) => gU(c, e, { position: t, normal: i, size: r, angle: s }); class ol { /** * Creates a new isovector from the given x and y coordinates * @param x defines the first coordinate, must be an integer * @param y defines the second coordinate, must be an integer */ constructor(e = 0, t = 0) { this.x = e, this.y = t, e !== Math.floor(e) && Ce.Warn("x is not an integer, floor(x) used"), t !== Math.floor(t) && Ce.Warn("y is not an integer, floor(y) used"); } // Operators /** * Gets a new IsoVector copied from the IsoVector * @returns a new IsoVector */ clone() { return new ol(this.x, this.y); } /** * Rotates one IsoVector 60 degrees counter clockwise about another * Please note that this is an in place operation * @param other an IsoVector a center of rotation * @returns the rotated IsoVector */ rotate60About(e) { const t = this.x; return this.x = e.x + e.y - this.y, this.y = t + this.y - e.x, this; } /** * Rotates one IsoVector 60 degrees clockwise about another * Please note that this is an in place operation * @param other an IsoVector as center of rotation * @returns the rotated IsoVector */ rotateNeg60About(e) { const t = this.x; return this.x = t + this.y - e.y, this.y = e.x + e.y - t, this; } /** * For an equilateral triangle OAB with O at isovector (0, 0) and A at isovector (m, n) * Rotates one IsoVector 120 degrees counter clockwise about the center of the triangle * Please note that this is an in place operation * @param m integer a measure a Primary triangle of order (m, n) m > n * @param n >= 0 integer a measure for a Primary triangle of order (m, n) * @returns the rotated IsoVector */ rotate120(e, t) { e !== Math.floor(e) && Ce.Warn("m not an integer only floor(m) used"), t !== Math.floor(t) && Ce.Warn("n not an integer only floor(n) used"); const i = this.x; return this.x = e - i - this.y, this.y = t + i, this; } /** * For an equilateral triangle OAB with O at isovector (0, 0) and A at isovector (m, n) * Rotates one IsoVector 120 degrees clockwise about the center of the triangle * Please note that this is an in place operation * @param m integer a measure a Primary triangle of order (m, n) m > n * @param n >= 0 integer a measure for a Primary triangle of order (m, n) * @returns the rotated IsoVector */ rotateNeg120(e, t) { e !== Math.floor(e) && Ce.Warn("m is not an integer, floor(m) used"), t !== Math.floor(t) && Ce.Warn("n is not an integer, floor(n) used"); const i = this.x; return this.x = this.y - t, this.y = e + t - i - this.y, this; } /** * Transforms an IsoVector to one in Cartesian 3D space based on an isovector * @param origin an IsoVector * @param isoGridSize * @returns Point as a Vector3 */ toCartesianOrigin(e, t) { const i = D.Zero(); return i.x = e.x + 2 * this.x * t + this.y * t, i.y = e.y + Math.sqrt(3) * this.y * t, i; } // Statics /** * Gets a new IsoVector(0, 0) * @returns a new IsoVector */ static Zero() { return new ol(0, 0); } } class qK { constructor() { this.cartesian = [], this.vertices = [], this.max = [], this.min = [], this.closestTo = [], this.innerFacets = [], this.isoVecsABOB = [], this.isoVecsOBOA = [], this.isoVecsBAOA = [], this.vertexTypes = [], this.IDATA = new uB("icosahedron", "Regular", [ [0, Uc, -1], [-Uc, 1, 0], [-1, 0, -Uc], [1, 0, -Uc], [Uc, 1, 0], [0, Uc, 1], [-1, 0, Uc], [-Uc, -1, 0], [0, -Uc, -1], [Uc, -1, 0], [1, 0, Uc], [0, -Uc, 1] ], [ [0, 2, 1], [0, 3, 2], [0, 4, 3], [0, 5, 4], [0, 1, 5], [7, 6, 1], [8, 7, 2], [9, 8, 3], [10, 9, 4], [6, 10, 5], [2, 7, 1], [3, 8, 2], [4, 9, 3], [5, 10, 4], [1, 6, 5], [11, 6, 7], [11, 7, 8], [11, 8, 9], [11, 9, 10], [11, 10, 6] ]); } /** * Creates the PrimaryIsoTriangle Triangle OAB * @param m an integer * @param n an integer */ //operators setIndices() { let e = 12; const t = {}, i = this.m, r = this.n; let s = i, n = 1, a = 0; r !== 0 && (s = yt.HCF(i, r)), n = i / s, a = r / s; let l, o, u, h, d; const f = ol.Zero(), p = new ol(i, r), m = new ol(-r, i + r), _ = ol.Zero(), v = ol.Zero(), C = ol.Zero(); let x = [], b, S, M, R; const w = [], V = this.vertByDist, k = (L, B, U, K) => { b = L + "|" + U, S = B + "|" + K, b in t || S in t ? b in t && !(S in t) ? t[S] = t[b] : S in t && !(b in t) && (t[b] = t[S]) : (t[b] = e, t[S] = e, e++), V[U][0] > 2 ? w[t[b]] = [-V[U][0], V[U][1], t[b]] : w[t[b]] = [x[V[U][0]], V[U][1], t[b]]; }; this.IDATA.edgematch = [ [1, "B"], [2, "B"], [3, "B"], [4, "B"], [0, "B"], [10, "O", 14, "A"], [11, "O", 10, "A"], [12, "O", 11, "A"], [13, "O", 12, "A"], [14, "O", 13, "A"], [0, "O"], [1, "O"], [2, "O"], [3, "O"], [4, "O"], [19, "B", 5, "A"], [15, "B", 6, "A"], [16, "B", 7, "A"], [17, "B", 8, "A"], [18, "B", 9, "A"] ]; for (let L = 0; L < 20; L++) { if (x = this.IDATA.face[L], u = x[2], h = x[1], d = x[0], M = f.x + "|" + f.y, b = L + "|" + M, b in t || (t[b] = u, w[u] = [x[V[M][0]], V[M][1]]), M = p.x + "|" + p.y, b = L + "|" + M, b in t || (t[b] = h, w[h] = [x[V[M][0]], V[M][1]]), M = m.x + "|" + m.y, b = L + "|" + M, b in t || (t[b] = d, w[d] = [x[V[M][0]], V[M][1]]), l = this.IDATA.edgematch[L][0], o = this.IDATA.edgematch[L][1], o === "B") for (let B = 1; B < s; B++) v.x = i - B * (n + a), v.y = r + B * n, C.x = -B * a, C.y = B * (n + a), M = v.x + "|" + v.y, R = C.x + "|" + C.y, k(L, l, M, R); if (o === "O") for (let B = 1; B < s; B++) C.x = -B * a, C.y = B * (n + a), _.x = B * n, _.y = B * a, M = C.x + "|" + C.y, R = _.x + "|" + _.y, k(L, l, M, R); if (l = this.IDATA.edgematch[L][2], o = this.IDATA.edgematch[L][3], o && o === "A") for (let B = 1; B < s; B++) _.x = B * n, _.y = B * a, v.x = i - (s - B) * (n + a), v.y = r + (s - B) * n, M = _.x + "|" + _.y, R = v.x + "|" + v.y, k(L, l, M, R); for (let B = 0; B < this.vertices.length; B++) M = this.vertices[B].x + "|" + this.vertices[B].y, b = L + "|" + M, b in t || (t[b] = e++, V[M][0] > 2 ? w[t[b]] = [-V[M][0], V[M][1], t[b]] : w[t[b]] = [x[V[M][0]], V[M][1], t[b]]); } this.closestTo = w, this.vecToidx = t; } calcCoeffs() { const e = this.m, t = this.n, i = Math.sqrt(3) / 3, r = e * e + t * t + e * t; this.coau = (e + t) / r, this.cobu = -t / r, this.coav = -i * (e - t) / r, this.cobv = i * (2 * e + t) / r; } createInnerFacets() { const e = this.m, t = this.n; for (let i = 0; i < t + e + 1; i++) for (let r = this.min[i]; r < this.max[i] + 1; r++) r < this.max[i] && r < this.max[i + 1] + 1 && this.innerFacets.push(["|" + r + "|" + i, "|" + r + "|" + (i + 1), "|" + (r + 1) + "|" + i]), i > 0 && r < this.max[i - 1] && r + 1 < this.max[i] + 1 && this.innerFacets.push(["|" + r + "|" + i, "|" + (r + 1) + "|" + i, "|" + (r + 1) + "|" + (i - 1)]); } edgeVecsABOB() { const e = this.m, t = this.n, i = new ol(-t, e + t); for (let r = 1; r < e + t; r++) { const s = new ol(this.min[r], r), n = new ol(this.min[r - 1], r - 1), a = new ol(this.min[r + 1], r + 1), l = s.clone(), o = n.clone(), u = a.clone(); l.rotate60About(i), o.rotate60About(i), u.rotate60About(i); const h = new ol(this.max[l.y], l.y), d = new ol(this.max[l.y - 1], l.y - 1), f = new ol(this.max[l.y - 1] - 1, l.y - 1); (l.x !== h.x || l.y !== h.y) && (l.x !== d.x ? (this.vertexTypes.push([1, 0, 0]), this.isoVecsABOB.push([s, d, f]), this.vertexTypes.push([1, 0, 0]), this.isoVecsABOB.push([s, f, h])) : l.y === u.y ? (this.vertexTypes.push([1, 1, 0]), this.isoVecsABOB.push([s, n, d]), this.vertexTypes.push([1, 0, 1]), this.isoVecsABOB.push([s, d, a])) : (this.vertexTypes.push([1, 1, 0]), this.isoVecsABOB.push([s, n, d]), this.vertexTypes.push([1, 0, 0]), this.isoVecsABOB.push([s, d, h]))); } } mapABOBtoOBOA() { const e = new ol(0, 0); for (let t = 0; t < this.isoVecsABOB.length; t++) { const i = []; for (let r = 0; r < 3; r++) e.x = this.isoVecsABOB[t][r].x, e.y = this.isoVecsABOB[t][r].y, this.vertexTypes[t][r] === 0 && e.rotateNeg120(this.m, this.n), i.push(e.clone()); this.isoVecsOBOA.push(i); } } mapABOBtoBAOA() { const e = new ol(0, 0); for (let t = 0; t < this.isoVecsABOB.length; t++) { const i = []; for (let r = 0; r < 3; r++) e.x = this.isoVecsABOB[t][r].x, e.y = this.isoVecsABOB[t][r].y, this.vertexTypes[t][r] === 1 && e.rotate120(this.m, this.n), i.push(e.clone()); this.isoVecsBAOA.push(i); } } // eslint-disable-next-line @typescript-eslint/naming-convention MapToFace(e, t) { const i = this.IDATA.face[e], r = i[2], s = i[1], n = i[0], a = D.FromArray(this.IDATA.vertex[r]), l = D.FromArray(this.IDATA.vertex[s]), o = D.FromArray(this.IDATA.vertex[n]), u = l.subtract(a), h = o.subtract(a), d = u.scale(this.coau).add(h.scale(this.cobu)), f = u.scale(this.coav).add(h.scale(this.cobv)); let p, m = de.Vector3[0]; for (let _ = 0; _ < this.cartesian.length; _++) m = d.scale(this.cartesian[_].x).add(f.scale(this.cartesian[_].y)).add(a), m.x, m.y, m.z, p = e + "|" + this.vertices[_].x + "|" + this.vertices[_].y, t.vertex[this.vecToidx[p]] = [m.x, m.y, m.z]; } //statics /**Creates a primary triangle * @internal */ build(e, t) { const i = [], r = ol.Zero(), s = new ol(e, t), n = new ol(-t, e + t); i.push(r, s, n); for (let S = t; S < e + 1; S++) for (let M = 0; M < e + 1 - S; M++) i.push(new ol(M, S)); if (t > 0) { const S = yt.HCF(e, t), M = e / S, R = t / S; for (let V = 1; V < S; V++) i.push(new ol(V * M, V * R)), i.push(new ol(-V * R, V * (M + R))), i.push(new ol(e - V * (M + R), t + V * M)); const w = e / t; for (let V = 1; V < t; V++) for (let k = 0; k < V * w; k++) i.push(new ol(k, V)), i.push(new ol(k, V).rotate120(e, t)), i.push(new ol(k, V).rotateNeg120(e, t)); } i.sort((S, M) => S.x - M.x), i.sort((S, M) => S.y - M.y); const a = new Array(e + t + 1), l = new Array(e + t + 1); for (let S = 0; S < a.length; S++) a[S] = 1 / 0, l[S] = -1 / 0; let o = 0, u = 0; const h = i.length; for (let S = 0; S < h; S++) u = i[S].x, o = i[S].y, a[o] = Math.min(u, a[o]), l[o] = Math.max(u, l[o]); const d = (S, M) => { const R = S.clone(); return M === "A" && R.rotateNeg120(e, t), M === "B" && R.rotate120(e, t), R.x < 0 ? R.y : R.x + R.y; }, f = [], p = [], m = [], _ = [], v = {}, C = []; let x = -1, b = -1; for (let S = 0; S < h; S++) f[S] = i[S].toCartesianOrigin(new ol(0, 0), 0.5), p[S] = d(i[S], "O"), m[S] = d(i[S], "A"), _[S] = d(i[S], "B"), p[S] === m[S] && m[S] === _[S] ? (x = 3, b = p[S]) : p[S] === m[S] ? (x = 4, b = p[S]) : m[S] === _[S] ? (x = 5, b = m[S]) : _[S] === p[S] && (x = 6, b = p[S]), p[S] < m[S] && p[S] < _[S] && (x = 2, b = p[S]), m[S] < p[S] && m[S] < _[S] && (x = 1, b = m[S]), _[S] < m[S] && _[S] < p[S] && (x = 0, b = _[S]), C.push([x, b, i[S].x, i[S].y]); C.sort((S, M) => S[2] - M[2]), C.sort((S, M) => S[3] - M[3]), C.sort((S, M) => S[1] - M[1]), C.sort((S, M) => S[0] - M[0]); for (let S = 0; S < C.length; S++) v[C[S][2] + "|" + C[S][3]] = [C[S][0], C[S][1], S]; return this.m = e, this.n = t, this.vertices = i, this.vertByDist = v, this.cartesian = f, this.min = a, this.max = l, this; } } class uB { constructor(e, t, i, r) { this.name = e, this.category = t, this.vertex = i, this.face = r; } } class cN extends uB { /** * @internal */ innerToData(e, t) { for (let i = 0; i < t.innerFacets.length; i++) this.face.push(t.innerFacets[i].map((r) => t.vecToidx[e + r])); } /** * @internal */ mapABOBtoDATA(e, t) { const i = t.IDATA.edgematch[e][0]; for (let r = 0; r < t.isoVecsABOB.length; r++) { const s = []; for (let n = 0; n < 3; n++) t.vertexTypes[r][n] === 0 ? s.push(e + "|" + t.isoVecsABOB[r][n].x + "|" + t.isoVecsABOB[r][n].y) : s.push(i + "|" + t.isoVecsABOB[r][n].x + "|" + t.isoVecsABOB[r][n].y); this.face.push([t.vecToidx[s[0]], t.vecToidx[s[1]], t.vecToidx[s[2]]]); } } /** * @internal */ mapOBOAtoDATA(e, t) { const i = t.IDATA.edgematch[e][0]; for (let r = 0; r < t.isoVecsOBOA.length; r++) { const s = []; for (let n = 0; n < 3; n++) t.vertexTypes[r][n] === 1 ? s.push(e + "|" + t.isoVecsOBOA[r][n].x + "|" + t.isoVecsOBOA[r][n].y) : s.push(i + "|" + t.isoVecsOBOA[r][n].x + "|" + t.isoVecsOBOA[r][n].y); this.face.push([t.vecToidx[s[0]], t.vecToidx[s[1]], t.vecToidx[s[2]]]); } } /** * @internal */ mapBAOAtoDATA(e, t) { const i = t.IDATA.edgematch[e][2]; for (let r = 0; r < t.isoVecsBAOA.length; r++) { const s = []; for (let n = 0; n < 3; n++) t.vertexTypes[r][n] === 1 ? s.push(e + "|" + t.isoVecsBAOA[r][n].x + "|" + t.isoVecsBAOA[r][n].y) : s.push(i + "|" + t.isoVecsBAOA[r][n].x + "|" + t.isoVecsBAOA[r][n].y); this.face.push([t.vecToidx[s[0]], t.vecToidx[s[1]], t.vecToidx[s[2]]]); } } /** * @internal */ orderData(e) { const t = []; for (let n = 0; n < 13; n++) t[n] = []; const i = e.closestTo; for (let n = 0; n < i.length; n++) i[n][0] > -1 ? i[n][1] > 0 && t[i[n][0]].push([n, i[n][1]]) : t[12].push([n, i[n][0]]); const r = []; for (let n = 0; n < 12; n++) r[n] = n; let s = 12; for (let n = 0; n < 12; n++) { t[n].sort((a, l) => a[1] - l[1]); for (let a = 0; a < t[n].length; a++) r[t[n][a][0]] = s++; } for (let n = 0; n < t[12].length; n++) r[t[12][n][0]] = s++; for (let n = 0; n < this.vertex.length; n++) this.vertex[n].push(r[n]); this.vertex.sort((n, a) => n[3] - a[3]); for (let n = 0; n < this.vertex.length; n++) this.vertex[n].pop(); for (let n = 0; n < this.face.length; n++) for (let a = 0; a < this.face[n].length; a++) this.face[n][a] = r[this.face[n][a]]; this.sharedNodes = t[12].length, this.poleNodes = this.vertex.length - this.sharedNodes; } /** * @internal */ setOrder(e, t) { const i = [], r = []; let s = t.pop(); r.push(s); let n = this.face[s].indexOf(e); n = (n + 2) % 3; let a = this.face[s][n]; i.push(a); let l = 0; for (; t.length > 0; ) s = t[l], this.face[s].indexOf(a) > -1 ? (n = (this.face[s].indexOf(a) + 1) % 3, a = this.face[s][n], i.push(a), r.push(s), t.splice(l, 1), l = 0) : l++; return this.adjacentFaces.push(i), r; } /** * @internal */ toGoldbergPolyhedronData() { const e = new uB("GeoDual", "Goldberg", [], []); e.name = "GD dual"; const t = this.vertex.length, i = new Array(t); for (let o = 0; o < t; o++) i[o] = []; for (let o = 0; o < this.face.length; o++) for (let u = 0; u < 3; u++) i[this.face[o][u]].push(o); let r = 0, s = 0, n = 0, a = [], l = []; this.adjacentFaces = []; for (let o = 0; o < i.length; o++) e.face[o] = this.setOrder(o, i[o].concat([])), i[o].forEach((u) => { r = 0, s = 0, n = 0, a = this.face[u]; for (let h = 0; h < 3; h++) l = this.vertex[a[h]], r += l[0], s += l[1], n += l[2]; e.vertex[u] = [r / 3, s / 3, n / 3]; }); return e; } //statics /**Builds the data for a Geodesic Polyhedron from a primary triangle * @param primTri the primary triangle * @internal */ static BuildGeodesicData(e) { const t = new cN("Geodesic-m-n", "Geodesic", [ [0, Uc, -1], [-Uc, 1, 0], [-1, 0, -Uc], [1, 0, -Uc], [Uc, 1, 0], [0, Uc, 1], [-1, 0, Uc], [-Uc, -1, 0], [0, -Uc, -1], [Uc, -1, 0], [1, 0, Uc], [0, -Uc, 1] ], []); e.setIndices(), e.calcCoeffs(), e.createInnerFacets(), e.edgeVecsABOB(), e.mapABOBtoOBOA(), e.mapABOBtoBAOA(); for (let r = 0; r < e.IDATA.face.length; r++) e.MapToFace(r, t), t.innerToData(r, e), e.IDATA.edgematch[r][1] === "B" && t.mapABOBtoDATA(r, e), e.IDATA.edgematch[r][1] === "O" && t.mapOBOAtoDATA(r, e), e.IDATA.edgematch[r][3] === "A" && t.mapBAOAtoDATA(r, e); t.orderData(e); const i = 1; return t.vertex = t.vertex.map(function(r) { const s = r[0], n = r[1], a = r[2], l = Math.sqrt(s * s + n * n + a * a); return r[0] *= i / l, r[1] *= i / l, r[2] *= i / l, r; }), t; } } function Eie(c, e, t = null) { let i = e.m || 1; i !== Math.floor(i) && Ce.Warn("m not an integer only floor(m) used"); let r = e.n || 0; if (r !== Math.floor(r) && Ce.Warn("n not an integer only floor(n) used"), r > i) { const o = r; r = i, i = o, Ce.Warn("n > m therefore m and n swapped"); } const s = new qK(); s.build(i, r); const a = { custom: cN.BuildGeodesicData(s), size: e.size, sizeX: e.sizeX, sizeY: e.sizeY, sizeZ: e.sizeZ, faceUV: e.faceUV, faceColors: e.faceColors, flat: e.flat, updatable: e.updatable, sideOrientation: e.sideOrientation, frontUVs: e.frontUVs, backUVs: e.backUVs }; return AP(c, a, t); } ke._GoldbergMeshParser = (c, e) => uN.Parse(c, e); class uN extends ke { constructor() { super(...arguments), this.goldbergData = { faceColors: [], faceCenters: [], faceZaxis: [], faceXaxis: [], faceYaxis: [], nbSharedFaces: 0, nbUnsharedFaces: 0, nbFaces: 0, nbFacesAtPole: 0, adjacentFaces: [] }; } /** * Gets the related Goldberg face from pole infos * @param poleOrShared Defines the pole index or the shared face index if the fromPole parameter is passed in * @param fromPole Defines an optional pole index to find the related info from * @returns the goldberg face number */ relatedGoldbergFace(e, t) { return t === void 0 ? (e > this.goldbergData.nbUnsharedFaces - 1 && (Ce.Warn("Maximum number of unshared faces used"), e = this.goldbergData.nbUnsharedFaces - 1), this.goldbergData.nbUnsharedFaces + e) : (e > 11 && (Ce.Warn("Last pole used"), e = 11), t > this.goldbergData.nbFacesAtPole - 1 && (Ce.Warn("Maximum number of faces at a pole used"), t = this.goldbergData.nbFacesAtPole - 1), 12 + e * this.goldbergData.nbFacesAtPole + t); } _changeGoldbergFaceColors(e) { for (let i = 0; i < e.length; i++) { const r = e[i][0], s = e[i][1], n = e[i][2]; for (let a = r; a < s + 1; a++) this.goldbergData.faceColors[a] = n; } const t = []; for (let i = 0; i < 12; i++) for (let r = 0; r < 5; r++) t.push(this.goldbergData.faceColors[i].r, this.goldbergData.faceColors[i].g, this.goldbergData.faceColors[i].b, this.goldbergData.faceColors[i].a); for (let i = 12; i < this.goldbergData.faceColors.length; i++) for (let r = 0; r < 6; r++) t.push(this.goldbergData.faceColors[i].r, this.goldbergData.faceColors[i].g, this.goldbergData.faceColors[i].b, this.goldbergData.faceColors[i].a); return t; } /** * Set new goldberg face colors * @param colorRange the new color to apply to the mesh */ setGoldbergFaceColors(e) { const t = this._changeGoldbergFaceColors(e); this.setVerticesData(Y.ColorKind, t); } /** * Updates new goldberg face colors * @param colorRange the new color to apply to the mesh */ updateGoldbergFaceColors(e) { const t = this._changeGoldbergFaceColors(e); this.updateVerticesData(Y.ColorKind, t); } _changeGoldbergFaceUVs(e) { const t = this.getVerticesData(Y.UVKind); for (let i = 0; i < e.length; i++) { const r = e[i][0], s = e[i][1], n = e[i][2], a = e[i][3], l = e[i][4], o = [], u = []; let h, d; for (let f = 0; f < 5; f++) h = n.x + a * Math.cos(l + f * Math.PI / 2.5), d = n.y + a * Math.sin(l + f * Math.PI / 2.5), h < 0 && (h = 0), h > 1 && (h = 1), o.push(h, d); for (let f = 0; f < 6; f++) h = n.x + a * Math.cos(l + f * Math.PI / 3), d = n.y + a * Math.sin(l + f * Math.PI / 3), h < 0 && (h = 0), h > 1 && (h = 1), u.push(h, d); for (let f = r; f < Math.min(12, s + 1); f++) for (let p = 0; p < 5; p++) t[10 * f + 2 * p] = o[2 * p], t[10 * f + 2 * p + 1] = o[2 * p + 1]; for (let f = Math.max(12, r); f < s + 1; f++) for (let p = 0; p < 6; p++) t[12 * f - 24 + 2 * p] = u[2 * p], t[12 * f - 23 + 2 * p] = u[2 * p + 1]; } return t; } /** * set new goldberg face UVs * @param uvRange the new UVs to apply to the mesh */ setGoldbergFaceUVs(e) { const t = this._changeGoldbergFaceUVs(e); this.setVerticesData(Y.UVKind, t); } /** * Updates new goldberg face UVs * @param uvRange the new UVs to apply to the mesh */ updateGoldbergFaceUVs(e) { const t = this._changeGoldbergFaceUVs(e); this.updateVerticesData(Y.UVKind, t); } /** * Places a mesh on a particular face of the goldberg polygon * @param mesh Defines the mesh to position * @param face Defines the face to position onto * @param position Defines the position relative to the face we are positioning the mesh onto */ placeOnGoldbergFaceAt(e, t, i) { const r = D.RotationFromAxis(this.goldbergData.faceXaxis[t], this.goldbergData.faceYaxis[t], this.goldbergData.faceZaxis[t]); e.rotation = r, e.position = this.goldbergData.faceCenters[t].add(this.goldbergData.faceXaxis[t].scale(i.x)).add(this.goldbergData.faceYaxis[t].scale(i.y)).add(this.goldbergData.faceZaxis[t].scale(i.z)); } /** * Serialize current mesh * @param serializationObject defines the object which will receive the serialization data */ serialize(e) { super.serialize(e), e.type = "GoldbergMesh"; const t = {}; if (t.adjacentFaces = this.goldbergData.adjacentFaces, t.nbSharedFaces = this.goldbergData.nbSharedFaces, t.nbUnsharedFaces = this.goldbergData.nbUnsharedFaces, t.nbFaces = this.goldbergData.nbFaces, t.nbFacesAtPole = this.goldbergData.nbFacesAtPole, this.goldbergData.faceColors) { t.faceColors = []; for (const i of this.goldbergData.faceColors) t.faceColors.push(i.asArray()); } if (this.goldbergData.faceCenters) { t.faceCenters = []; for (const i of this.goldbergData.faceCenters) t.faceCenters.push(i.asArray()); } if (this.goldbergData.faceZaxis) { t.faceZaxis = []; for (const i of this.goldbergData.faceZaxis) t.faceZaxis.push(i.asArray()); } if (this.goldbergData.faceYaxis) { t.faceYaxis = []; for (const i of this.goldbergData.faceYaxis) t.faceYaxis.push(i.asArray()); } if (this.goldbergData.faceXaxis) { t.faceXaxis = []; for (const i of this.goldbergData.faceXaxis) t.faceXaxis.push(i.asArray()); } e.goldbergData = t; } /** * Parses a serialized goldberg mesh * @param parsedMesh the serialized mesh * @param scene the scene to create the goldberg mesh in * @returns the created goldberg mesh */ static Parse(e, t) { const i = e.goldbergData; i.faceColors = i.faceColors.map((s) => Et.FromArray(s)), i.faceCenters = i.faceCenters.map((s) => D.FromArray(s)), i.faceZaxis = i.faceZaxis.map((s) => D.FromArray(s)), i.faceXaxis = i.faceXaxis.map((s) => D.FromArray(s)), i.faceYaxis = i.faceYaxis.map((s) => D.FromArray(s)); const r = new uN(e.name, t); return r.goldbergData = i, r; } } function Tie(c, e) { const t = c.size, i = c.sizeX || t || 1, r = c.sizeY || t || 1, s = c.sizeZ || t || 1, n = c.sideOrientation === 0 ? 0 : c.sideOrientation || Ot.DEFAULTSIDE, a = [], l = [], o = [], u = []; let h = 1 / 0, d = -1 / 0, f = 1 / 0, p = -1 / 0; for (let v = 0; v < e.vertex.length; v++) h = Math.min(h, e.vertex[v][0] * i), d = Math.max(d, e.vertex[v][0] * i), f = Math.min(f, e.vertex[v][1] * r), p = Math.max(p, e.vertex[v][1] * r); let m = 0; for (let v = 0; v < e.face.length; v++) { const C = e.face[v], x = D.FromArray(e.vertex[C[0]]), b = D.FromArray(e.vertex[C[2]]), S = D.FromArray(e.vertex[C[1]]), M = b.subtract(x), R = S.subtract(x), w = D.Cross(R, M).normalize(); for (let V = 0; V < C.length; V++) { o.push(w.x, w.y, w.z); const k = e.vertex[C[V]]; a.push(k[0] * i, k[1] * r, k[2] * s); const L = (k[1] * r - f) / (p - f); u.push((k[0] * i - h) / (d - h), hn.UseOpenGLOrientationForUV ? 1 - L : L); } for (let V = 0; V < C.length - 2; V++) l.push(m, m + V + 2, m + V + 1); m += C.length; } Ot._ComputeSides(n, a, l, o, u); const _ = new Ot(); return _.positions = a, _.indices = l, _.normals = o, _.uvs = u, _; } function Sie(c, e, t = null) { const i = e.size, r = e.sizeX || i || 1, s = e.sizeY || i || 1, n = e.sizeZ || i || 1; let a = e.m || 1; a !== Math.floor(a) && Ce.Warn("m not an integer only floor(m) used"); let l = e.n || 0; if (l !== Math.floor(l) && Ce.Warn("n not an integer only floor(n) used"), l > a) { const p = l; l = a, a = p, Ce.Warn("n > m therefore m and n swapped"); } const o = new qK(); o.build(a, l); const u = cN.BuildGeodesicData(o), h = u.toGoldbergPolyhedronData(), d = new uN(c, t); e.sideOrientation = ke._GetDefaultSideOrientation(e.sideOrientation), d._originalBuilderSideOrientation = e.sideOrientation, Tie(e, h).applyToMesh(d, e.updatable), d.goldbergData.nbSharedFaces = u.sharedNodes, d.goldbergData.nbUnsharedFaces = u.poleNodes, d.goldbergData.adjacentFaces = u.adjacentFaces, d.goldbergData.nbFaces = d.goldbergData.nbSharedFaces + d.goldbergData.nbUnsharedFaces, d.goldbergData.nbFacesAtPole = (d.goldbergData.nbUnsharedFaces - 12) / 12; for (let p = 0; p < u.vertex.length; p++) d.goldbergData.faceCenters.push(D.FromArray(u.vertex[p])), d.goldbergData.faceCenters[p].x *= r, d.goldbergData.faceCenters[p].y *= s, d.goldbergData.faceCenters[p].z *= n, d.goldbergData.faceColors.push(new Et(1, 1, 1, 1)); for (let p = 0; p < h.face.length; p++) { const m = h.face[p], _ = D.FromArray(h.vertex[m[0]]), v = D.FromArray(h.vertex[m[2]]), C = D.FromArray(h.vertex[m[1]]), x = v.subtract(_), b = C.subtract(_), S = D.Cross(b, x).normalize(), M = D.Cross(b, S).normalize(); d.goldbergData.faceXaxis.push(b.normalize()), d.goldbergData.faceYaxis.push(S), d.goldbergData.faceZaxis.push(M); } return d; } class lfe { /** Create the ShapePath used to support glyphs */ constructor(e) { this._paths = [], this._tempPaths = [], this._holes = [], this._resolution = e; } /** Move the virtual cursor to a coordinate */ moveTo(e, t) { this._currentPath = new _w(e, t), this._tempPaths.push(this._currentPath); } /** Draw a line from the virtual cursor to a given coordinate */ lineTo(e, t) { this._currentPath.addLineTo(e, t); } /** Create a quadratic curve from the virtual cursor to a given coordinate */ quadraticCurveTo(e, t, i, r) { this._currentPath.addQuadraticCurveTo(e, t, i, r, this._resolution); } /** Create a bezier curve from the virtual cursor to a given coordinate */ bezierCurveTo(e, t, i, r, s, n) { this._currentPath.addBezierCurveTo(e, t, i, r, s, n, this._resolution); } /** Extract holes based on CW / CCW */ extractHoles() { for (const e of this._tempPaths) e.area() > 0 ? this._holes.push(e) : this._paths.push(e); if (!this._paths.length && this._holes.length) { const e = this._holes; this._holes = this._paths, this._paths = e; } this._tempPaths.length = 0; } /** Gets the list of paths */ get paths() { return this._paths; } /** Gets the list of holes */ get holes() { return this._holes; } } function cfe(c, e, t, i, r, s) { const n = s.glyphs[c] || s.glyphs["?"]; if (!n) return null; const a = new lfe(r); if (n.o) { const l = n.o.split(" "); for (let o = 0, u = l.length; o < u; ) switch (l[o++]) { case "m": { const d = parseInt(l[o++]) * e + t, f = parseInt(l[o++]) * e + i; a.moveTo(d, f); break; } case "l": { const d = parseInt(l[o++]) * e + t, f = parseInt(l[o++]) * e + i; a.lineTo(d, f); break; } case "q": { const d = parseInt(l[o++]) * e + t, f = parseInt(l[o++]) * e + i, p = parseInt(l[o++]) * e + t, m = parseInt(l[o++]) * e + i; a.quadraticCurveTo(p, m, d, f); break; } case "b": { const d = parseInt(l[o++]) * e + t, f = parseInt(l[o++]) * e + i, p = parseInt(l[o++]) * e + t, m = parseInt(l[o++]) * e + i, _ = parseInt(l[o++]) * e + t, v = parseInt(l[o++]) * e + i; a.bezierCurveTo(p, m, _, v, d, f); break; } } } return a.extractHoles(), { offsetX: n.ha * e, shapePath: a }; } function JK(c, e, t, i) { const r = Array.from(c), s = e / i.resolution, n = (i.boundingBox.yMax - i.boundingBox.yMin + i.underlineThickness) * s, a = []; let l = 0, o = 0; for (let u = 0; u < r.length; u++) { const h = r[u]; if (h === ` `) l = 0, o -= n; else { const d = cfe(h, s, l, o, t, i); d && (l += d.offsetX, a.push(d.shapePath)); } } return a; } function Mie(c, e, t, i = { size: 50, resolution: 8, depth: 1 }, r = null, s = earcut) { var n, a; const l = JK(e, i.size || 50, i.resolution || 8, t), o = []; let u = 0; for (const d of l) { if (!d.paths.length) continue; const f = d.holes.slice(); for (const p of d.paths) { const m = [], _ = [], v = p.getPoints(); for (const b of v) _.push(new D(b.x, 0, b.y)); const C = f.slice(); for (const b of C) { const S = b.getPoints(); let M = !1; for (const w of S) if (p.isPointInside(w)) { M = !0; break; } if (!M) continue; const R = []; for (const w of S) R.push(new D(w.x, 0, w.y)); m.push(R), f.splice(f.indexOf(b), 1); } if (!m.length && f.length) for (const b of f) { const S = b.getPoints(), M = []; for (const R of S) M.push(new D(R.x, 0, R.y)); m.push(M); } const x = aN(c, { shape: _, holes: m.length ? m : void 0, depth: i.depth || 1, faceUV: i.faceUV || ((n = i.perLetterFaceUV) === null || n === void 0 ? void 0 : n.call(i, u)), faceColors: i.faceColors || ((a = i.perLetterFaceColors) === null || a === void 0 ? void 0 : a.call(i, u)), sideOrientation: ke._GetDefaultSideOrientation(i.sideOrientation || ke.DOUBLESIDE) }, r, s); o.push(x), u++; } } const h = ke.MergeMeshes(o, !0, !0); if (h) { const d = h.getBoundingInfo().boundingBox; h.position.x += -(d.minimumWorld.x + d.maximumWorld.x) / 2, h.position.y += -(d.minimumWorld.y + d.maximumWorld.y) / 2, h.position.z += -(d.minimumWorld.z + d.maximumWorld.z) / 2 + d.extendSize.z, h.name = c; const f = new xi("pivot", r); f.rotation.x = -Math.PI / 2, h.parent = f, h.bakeCurrentTransformIntoVertices(), h.parent = null, f.dispose(); } return h; } const mo = { CreateBox: B4, CreateTiledBox: WK, CreateSphere: Rd, CreateDisc: Cw, CreateIcoSphere: GL, CreateRibbon: nx, CreateCylinder: Hf, CreateTorus: o6, CreateTorusKnot: fU, CreateLineSystem: tP, CreateLines: Ba, CreateDashedLines: pU, ExtrudeShape: oN, ExtrudeShapeCustom: lN, CreateLathe: _U, CreateTiledPlane: GK, CreatePlane: hx, CreateGround: zI, CreateTiledGround: sU, CreateGroundFromHeightMap: nU, CreatePolygon: nN, ExtrudePolygon: aN, CreateTube: mU, CreatePolyhedron: AP, CreateGeodesic: Eie, CreateGoldberg: Sie, CreateDecal: gU, CreateCapsule: sN, CreateText: Mie }; class ufe { /** * Creates a new PhysicsViewer * @param scene defines the hosting scene */ constructor(e) { if (this._impostors = [], this._meshes = [], this._bodies = [], this._inertiaBodies = [], this._constraints = [], this._bodyMeshes = [], this._inertiaMeshes = [], this._constraintMeshes = [], this._numMeshes = 0, this._numBodies = 0, this._numInertiaBodies = 0, this._numConstraints = 0, this._debugMeshMeshes = new Array(), this._constraintAxesSize = 0.4, this._scene = e || gi.LastCreatedScene, !this._scene) return; const t = this._scene.getPhysicsEngine(); t && (this._physicsEnginePlugin = t.getPhysicsPlugin()), this._utilityLayer = new bn(this._scene, !1), this._utilityLayer.pickUtilitySceneFirst = !1, this._utilityLayer.utilityLayerScene.autoClearDepthAndStencil = !0; } /** * Updates the debug meshes of the physics engine. * * This code is useful for synchronizing the debug meshes of the physics engine with the physics impostor and mesh. * It checks if the impostor is disposed and if the plugin version is 1, then it syncs the mesh with the impostor. * This ensures that the debug meshes are up to date with the physics engine. */ _updateDebugMeshes() { const e = this._physicsEnginePlugin; (e == null ? void 0 : e.getPluginVersion()) === 1 ? this._updateDebugMeshesV1() : this._updateDebugMeshesV2(); } /** * Updates the debug meshes of the physics engine. * * This method is useful for synchronizing the debug meshes with the physics impostors. * It iterates through the impostors and meshes, and if the plugin version is 1, it syncs the mesh with the impostor. * This ensures that the debug meshes accurately reflect the physics impostors, which is important for debugging the physics engine. */ _updateDebugMeshesV1() { const e = this._physicsEnginePlugin; for (let t = 0; t < this._numMeshes; t++) { const i = this._impostors[t]; if (i) if (i.isDisposed) this.hideImpostor(this._impostors[t--]); else { if (i.type === tr.MeshImpostor) continue; const r = this._meshes[t]; r && e && e.syncMeshWithImpostor(r, i); } } } /** * Updates the debug meshes of the physics engine for V2 plugin. * * This method is useful for synchronizing the debug meshes of the physics engine with the current state of the bodies. * It iterates through the bodies array and updates the debug meshes with the current transform of each body. * This ensures that the debug meshes accurately reflect the current state of the physics engine. */ _updateDebugMeshesV2() { const e = this._physicsEnginePlugin; for (let t = 0; t < this._numBodies; t++) { const i = this._bodies[t], r = this._bodyMeshes[t]; i && r && e.syncTransform(i, r); } } _updateInertiaMeshes() { for (let e = 0; e < this._numInertiaBodies; e++) { const t = this._inertiaBodies[e], i = this._inertiaMeshes[e]; t && i && this._updateDebugInertia(t, i); } } _updateDebugInertia(e, t) { var i; const r = Ae.Identity(), s = Ae.Identity(), n = Ae.Identity(); if (e._pluginDataInstances.length) { const a = t, l = a._thinInstanceDataStorage.matrixData, o = e.transformNode._thinInstanceDataStorage.matrixData; for (let u = 0; u < e._pluginDataInstances.length; u++) { const h = e.getMassProperties(u); this._getMeshDebugInertiaMatrixToRef(h, r), Ae.FromArrayToRef(o, u * 16, s), r.multiplyToRef(s, n), n.copyToArray(l, u * 16); } a.thinInstanceBufferUpdated("matrix"); } else { const a = e.getMassProperties(); if (this._getMeshDebugInertiaMatrixToRef(a, r), (i = e.transformNode.rotationQuaternion) === null || i === void 0 || i.toRotationMatrix(s), s.setTranslation(e.transformNode.position), e.transformNode.parent) { const l = e.transformNode.parent.computeWorldMatrix(!0); s.multiplyToRef(l, s); } r.multiplyToRef(s, r), r.decomposeToTransformNode(t); } } _updateDebugConstraints() { for (let e = 0; e < this._numConstraints; e++) { const t = this._constraints[e], i = this._constraintMeshes[e]; t && i && this._updateDebugConstraint(t, i); } } /** * Given a scaling vector, make all of its components * 1, preserving the sign * @param scaling */ _makeScalingUnitInPlace(e) { Math.abs(e.x - 1) > Sr && (e.x = 1 * Math.sign(e.x)), Math.abs(e.y - 1) > Sr && (e.y = 1 * Math.sign(e.y)), Math.abs(e.z - 1) > Sr && (e.z = 1 * Math.sign(e.z)); } _updateDebugConstraint(e, t) { if (!e._initOptions) return; const { pivotA: i, pivotB: r, axisA: s, axisB: n, perpAxisA: a, perpAxisB: l } = e._initOptions; !i || !r || !s || !n || !a || !l || t.getDescendants(!0).forEach((o) => { const u = o.getDescendants(!0)[0], h = o.getDescendants(!0)[1], { parentBody: d, parentBodyIndex: f } = u.metadata, { childBody: p, childBodyIndex: m } = h.metadata, _ = this._getTransformFromBodyToRef(d, de.Matrix[0], f), v = this._getTransformFromBodyToRef(p, de.Matrix[1], m); _.decomposeToTransformNode(u), this._makeScalingUnitInPlace(u.scaling), v.decomposeToTransformNode(h), this._makeScalingUnitInPlace(h.scaling); const C = u.getDescendants(!0)[0]; C.position.copyFrom(i); const x = h.getDescendants(!0)[0]; x.position.copyFrom(r), Ze.FromRotationMatrixToRef(Ae.FromXYZAxesToRef(s, a, D.CrossToRef(s, a, de.Vector3[0]), de.Matrix[0]), C.rotationQuaternion), Ze.FromRotationMatrixToRef(Ae.FromXYZAxesToRef(n, l, D.CrossToRef(n, l, de.Vector3[1]), de.Matrix[1]), x.rotationQuaternion); }); } /** * Renders a specified physic impostor * @param impostor defines the impostor to render * @param targetMesh defines the mesh represented by the impostor * @returns the new debug mesh used to render the impostor */ showImpostor(e, t) { if (!this._scene) return null; for (let r = 0; r < this._numMeshes; r++) if (this._impostors[r] == e) return null; const i = this._getDebugMesh(e, t); return i && (this._impostors[this._numMeshes] = e, this._meshes[this._numMeshes] = i, this._numMeshes === 0 && (this._renderFunction = () => this._updateDebugMeshes(), this._scene.registerBeforeRender(this._renderFunction)), this._numMeshes++), i; } /** * Shows a debug mesh for a given physics body. * @param body The physics body to show. * @returns The debug mesh, or null if the body is already shown. * * This function is useful for visualizing the physics body in the scene. * It creates a debug mesh for the given body and adds it to the scene. * It also registers a before render function to update the debug mesh position and rotation. */ showBody(e) { if (!this._scene) return null; for (let i = 0; i < this._numBodies; i++) if (this._bodies[i] == e) return null; const t = this._getDebugBodyMesh(e); return t && (this._bodies[this._numBodies] = e, this._bodyMeshes[this._numBodies] = t, this._numBodies === 0 && (this._renderFunction = () => this._updateDebugMeshes(), this._scene.registerBeforeRender(this._renderFunction)), this._numBodies++), t; } /** * Shows a debug box corresponding to the inertia of a given body * @param body */ showInertia(e) { if (!this._scene) return null; for (let i = 0; i < this._numInertiaBodies; i++) if (this._inertiaBodies[i] == e) return null; const t = this._getDebugInertiaMesh(e); return t && (this._inertiaBodies[this._numInertiaBodies] = e, this._inertiaMeshes[this._numInertiaBodies] = t, this._numInertiaBodies === 0 && (this._inertiaRenderFunction = () => this._updateInertiaMeshes(), this._scene.registerBeforeRender(this._inertiaRenderFunction)), this._numInertiaBodies++), t; } /** * Shows a debug mesh for a given physics constraint. * @param constraint the physics constraint to show * @returns the debug mesh, or null if the constraint is already shown */ showConstraint(e) { if (!this._scene) return null; for (let i = 0; i < this._numConstraints; i++) if (this._constraints[i] == e) return null; const t = this._getDebugConstraintMesh(e); return t && (this._constraints[this._numConstraints] = e, this._constraintMeshes[this._numConstraints] = t, this._numConstraints === 0 && (this._constraintRenderFunction = () => this._updateDebugConstraints(), this._scene.registerBeforeRender(this._constraintRenderFunction)), this._numConstraints++), t; } /** * Hides an impostor from the scene. * @param impostor - The impostor to hide. * * This method is useful for hiding an impostor from the scene. It removes the * impostor from the utility layer scene, disposes the mesh, and removes the * impostor from the list of impostors. If the impostor is the last one in the * list, it also unregisters the render function. */ hideImpostor(e) { if (!e || !this._scene || !this._utilityLayer) return; let t = !1; const i = this._utilityLayer.utilityLayerScene; for (let r = 0; r < this._numMeshes; r++) if (this._impostors[r] == e) { const s = this._meshes[r]; if (!s) continue; i.removeMesh(s), s.dispose(); const n = this._debugMeshMeshes.indexOf(s); n > -1 && this._debugMeshMeshes.splice(n, 1), this._numMeshes--, this._numMeshes > 0 ? (this._meshes[r] = this._meshes[this._numMeshes], this._impostors[r] = this._impostors[this._numMeshes], this._meshes[this._numMeshes] = null, this._impostors[this._numMeshes] = null) : (this._meshes[0] = null, this._impostors[0] = null), t = !0; break; } t && this._numMeshes === 0 && this._scene.unregisterBeforeRender(this._renderFunction); } /** * Hides a body from the physics engine. * @param body - The body to hide. * * This function is useful for hiding a body from the physics engine. * It removes the body from the utility layer scene and disposes the mesh associated with it. * It also unregisters the render function if the number of bodies is 0. * This is useful for hiding a body from the physics engine without deleting it. */ hideBody(e) { if (!e || !this._scene || !this._utilityLayer) return; let t = !1; const i = this._utilityLayer.utilityLayerScene; for (let r = 0; r < this._numBodies; r++) if (this._bodies[r] === e) { const s = this._bodyMeshes[r]; if (!s) continue; i.removeMesh(s), s.dispose(), this._numBodies--, this._numBodies > 0 ? (this._bodyMeshes[r] = this._bodyMeshes[this._numBodies], this._bodies[r] = this._bodies[this._numBodies], this._bodyMeshes[this._numBodies] = null, this._bodies[this._numBodies] = null) : (this._bodyMeshes[0] = null, this._bodies[0] = null), t = !0; break; } t && this._numBodies === 0 && this._scene.unregisterBeforeRender(this._renderFunction); } hideInertia(e) { if (!e || !this._scene || !this._utilityLayer) return; let t = !1; const i = this._utilityLayer.utilityLayerScene; for (let r = 0; r < this._numInertiaBodies; r++) if (this._inertiaBodies[r] === e) { const s = this._inertiaMeshes[r]; if (!s) continue; i.removeMesh(s), s.dispose(), this._inertiaBodies.splice(r, 1), this._inertiaMeshes.splice(r, 1), this._numInertiaBodies--, t = !0; break; } t && this._numInertiaBodies === 0 && this._scene.unregisterBeforeRender(this._inertiaRenderFunction); } /** * Hide a physics constraint from the viewer utility layer * @param constraint the constraint to hide */ hideConstraint(e) { if (!e || !this._scene || !this._utilityLayer) return; let t = !1; const i = this._utilityLayer.utilityLayerScene; for (let r = 0; r < this._numConstraints; r++) if (this._constraints[r] === e) { const s = this._constraintMeshes[r]; if (!s) continue; i.removeMesh(s), s.dispose(), this._constraints.splice(r, 1), this._constraintMeshes.splice(r, 1), this._numConstraints--, this._numConstraints > 0 ? (this._constraints[r] = this._constraints[this._numConstraints], this._constraintMeshes[r] = this._constraintMeshes[this._numConstraints], this._constraints[this._numConstraints] = null, this._constraintMeshes[this._numConstraints] = null) : (this._constraints[0] = null, this._constraintMeshes[0] = null), t = !0; break; } t && this._numConstraints === 0 && this._scene.unregisterBeforeRender(this._constraintRenderFunction); } _getDebugMaterial(e) { return this._debugMaterial || (this._debugMaterial = new Dt("", e), this._debugMaterial.wireframe = !0, this._debugMaterial.emissiveColor = ze.White(), this._debugMaterial.disableLighting = !0), this._debugMaterial; } _getDebugInertiaMaterial(e) { return this._debugInertiaMaterial || (this._debugInertiaMaterial = new Dt("", e), this._debugInertiaMaterial.disableLighting = !0, this._debugInertiaMaterial.alpha = 0), this._debugInertiaMaterial; } _getDebugBoxMesh(e) { return this._debugBoxMesh || (this._debugBoxMesh = B4("physicsBodyBoxViewMesh", { size: 1 }, e), this._debugBoxMesh.rotationQuaternion = Ze.Identity(), this._debugBoxMesh.material = this._getDebugMaterial(e), this._debugBoxMesh.setEnabled(!1)), this._debugBoxMesh.createInstance("physicsBodyBoxViewInstance"); } _getDebugSphereMesh(e) { return this._debugSphereMesh || (this._debugSphereMesh = Rd("physicsBodySphereViewMesh", { diameter: 1 }, e), this._debugSphereMesh.rotationQuaternion = Ze.Identity(), this._debugSphereMesh.material = this._getDebugMaterial(e), this._debugSphereMesh.setEnabled(!1)), this._debugSphereMesh.createInstance("physicsBodySphereViewInstance"); } _getDebugCapsuleMesh(e) { return this._debugCapsuleMesh || (this._debugCapsuleMesh = sN("physicsBodyCapsuleViewMesh", { height: 1 }, e), this._debugCapsuleMesh.rotationQuaternion = Ze.Identity(), this._debugCapsuleMesh.material = this._getDebugMaterial(e), this._debugCapsuleMesh.setEnabled(!1)), this._debugCapsuleMesh.createInstance("physicsBodyCapsuleViewInstance"); } _getDebugCylinderMesh(e) { return this._debugCylinderMesh || (this._debugCylinderMesh = Hf("physicsBodyCylinderViewMesh", { diameterTop: 1, diameterBottom: 1, height: 1 }, e), this._debugCylinderMesh.rotationQuaternion = Ze.Identity(), this._debugCylinderMesh.material = this._getDebugMaterial(e), this._debugCylinderMesh.setEnabled(!1)), this._debugCylinderMesh.createInstance("physicsBodyCylinderViewInstance"); } _getDebugMeshMesh(e, t) { const i = new ke(e.name, t, null, e); return i.setParent(e), i.position = D.Zero(), i.material = this._getDebugMaterial(t), this._debugMeshMeshes.push(i), i; } _getDebugMesh(e, t) { if (!this._utilityLayer || t && t.parent && t.parent.physicsImpostor) return null; let i = null; const r = this._utilityLayer.utilityLayerScene; if (!e.physicsBody) return Ce.Warn("Unable to get physicsBody of impostor. It might be initialized later by its parent's impostor."), null; switch (e.type) { case tr.BoxImpostor: i = this._getDebugBoxMesh(r), e.getBoxSizeToRef(i.scaling); break; case tr.SphereImpostor: { i = this._getDebugSphereMesh(r); const s = e.getRadius(); i.scaling.x = s * 2, i.scaling.y = s * 2, i.scaling.z = s * 2; break; } case tr.CapsuleImpostor: { i = this._getDebugCapsuleMesh(r); const s = e.object.getBoundingInfo(); i.scaling.x = (s.boundingBox.maximum.x - s.boundingBox.minimum.x) * 2 * e.object.scaling.x, i.scaling.y = (s.boundingBox.maximum.y - s.boundingBox.minimum.y) * e.object.scaling.y, i.scaling.z = (s.boundingBox.maximum.z - s.boundingBox.minimum.z) * 2 * e.object.scaling.z; break; } case tr.MeshImpostor: t && (i = this._getDebugMeshMesh(t, r)); break; case tr.NoImpostor: t ? t.getChildMeshes().filter((n) => n.physicsImpostor ? 1 : 0).forEach((n) => { if (n.physicsImpostor && n.getClassName() === "Mesh") { const a = n.getBoundingInfo(), l = a.boundingBox.minimum, o = a.boundingBox.maximum; switch (n.physicsImpostor.type) { case tr.BoxImpostor: i = this._getDebugBoxMesh(r), i.position.copyFrom(l), i.position.addInPlace(o), i.position.scaleInPlace(0.5); break; case tr.SphereImpostor: i = this._getDebugSphereMesh(r); break; case tr.CylinderImpostor: i = this._getDebugCylinderMesh(r); break; default: i = null; break; } i && (i.scaling.x = o.x - l.x, i.scaling.y = o.y - l.y, i.scaling.z = o.z - l.z, i.parent = n); } }) : Ce.Warn("No target mesh parameter provided for NoImpostor. Skipping."), i = null; break; case tr.CylinderImpostor: { i = this._getDebugCylinderMesh(r); const s = e.object.getBoundingInfo(); i.scaling.x = (s.boundingBox.maximum.x - s.boundingBox.minimum.x) * e.object.scaling.x, i.scaling.y = (s.boundingBox.maximum.y - s.boundingBox.minimum.y) * e.object.scaling.y, i.scaling.z = (s.boundingBox.maximum.z - s.boundingBox.minimum.z) * e.object.scaling.z; break; } } return i; } /** * Creates a debug mesh for a given physics body * @param body The physics body to create the debug mesh for * @returns The created debug mesh or null if the utility layer is not available * * This code is useful for creating a debug mesh for a given physics body. * It creates a Mesh object with a VertexData object containing the positions and indices * of the geometry of the body. The mesh is then assigned a debug material from the utility layer scene. * This allows for visualizing the physics body in the scene. */ _getDebugBodyMesh(e) { if (!this._utilityLayer) return null; const t = this._utilityLayer.utilityLayerScene, i = new ke("custom", t), r = new Ot(), s = e.getGeometry(); if (r.positions = s.positions, r.indices = s.indices, r.applyToMesh(i), e._pluginDataInstances) { const n = new Float32Array(e._pluginDataInstances.length * 16); i.thinInstanceSetBuffer("matrix", n, 16); } return i.material = this._getDebugMaterial(t), i; } _getMeshDebugInertiaMatrixToRef(e, t) { var i, r, s; const n = (i = e.inertiaOrientation) !== null && i !== void 0 ? i : Ze.Identity(), a = (r = e.inertia) !== null && r !== void 0 ? r : D.Zero(), l = (s = e.centerOfMass) !== null && s !== void 0 ? s : D.Zero(), o = (a.x - a.y + a.z) * 6, u = Math.sqrt(Math.max(o, 0)), h = a.x * 12 - o, d = Math.sqrt(Math.max(h, 0)), f = a.z * 12 - o, p = Math.sqrt(Math.max(f, 0)), m = de.Vector3[0]; m.set(p, u, d); const _ = Ae.ScalingToRef(m.x, m.y, m.z, de.Matrix[0]), v = n.toRotationMatrix(de.Matrix[1]), C = Ae.TranslationToRef(l.x, l.y, l.z, de.Matrix[2]); return _.multiplyToRef(v, t), t.multiplyToRef(C, t), t; } _getDebugInertiaMesh(e) { if (!this._utilityLayer) return null; const t = this._utilityLayer.utilityLayerScene, i = mo.CreateBox("custom", { size: 1 }, t), r = Ae.Identity(); if (e._pluginDataInstances.length) { const s = new Float32Array(e._pluginDataInstances.length * 16); for (let n = 0; n < e._pluginDataInstances.length; ++n) { const a = e.getMassProperties(n); this._getMeshDebugInertiaMatrixToRef(a, r), r.copyToArray(s, n * 16); } i.thinInstanceSetBuffer("matrix", s, 16); } else { const s = e.getMassProperties(); this._getMeshDebugInertiaMatrixToRef(s, r), r.decomposeToTransformNode(i); } return i.enableEdgesRendering(), i.edgesWidth = 2, i.edgesColor = new Et(1, 0, 1, 1), i.material = this._getDebugInertiaMaterial(t), i; } _getTransformFromBodyToRef(e, t, i) { const r = e.transformNode; return i && i >= 0 ? Ae.FromArrayToRef(r._thinInstanceDataStorage.matrixData, i, t) : t.copyFrom(r.getWorldMatrix()); } _getDebugConstraintMesh(e) { if (!this._utilityLayer) return null; const t = this._utilityLayer.utilityLayerScene; if (!e._initOptions) return null; const { pivotA: i, pivotB: r, axisA: s, axisB: n, perpAxisA: a, perpAxisB: l } = e._initOptions; if (!i || !r || !s || !n || !a || !l) return null; const o = new ke("parentingDebugConstraint", t), u = e.getBodiesUsingConstraint(); for (const h of u) { const d = new xi("parentOfPair", t); d.parent = o; const { parentBody: f, parentBodyIndex: p, childBody: m, childBodyIndex: _ } = h, v = this._getTransformFromBodyToRef(f, de.Matrix[0], p), C = this._getTransformFromBodyToRef(m, de.Matrix[1], _), x = new xi("parentCoordSystem", t); x.parent = d, x.metadata = { parentBody: f, parentBodyIndex: p }, v.decomposeToTransformNode(x); const b = new xi("childCoordSystem", t); b.parent = d, b.metadata = { childBody: m, childBodyIndex: _ }, C.decomposeToTransformNode(b); const S = Ze.FromRotationMatrix(Ae.FromXYZAxesToRef(s, a, s.cross(a), de.Matrix[0])), M = Ze.FromRotationMatrix(Ae.FromXYZAxesToRef(n, l, n.cross(l), de.Matrix[0])), R = i, w = r, V = new xi("constraint_parent", t); V.position.copyFrom(R), V.rotationQuaternion = S, V.parent = x; const k = new xi("constraint_child", t); k.parent = b, k.position.copyFrom(w), k.rotationQuaternion = M; const L = new aT(t, this._constraintAxesSize); L.xAxis.parent = V, L.yAxis.parent = V, L.zAxis.parent = V; const B = new aT(t, this._constraintAxesSize); B.xAxis.parent = k, B.yAxis.parent = k, B.zAxis.parent = k; } return o; } /** * Clean up physics debug display */ dispose() { for (let e = this._numMeshes - 1; e >= 0; e--) this.hideImpostor(this._impostors[0]); for (let e = this._numBodies - 1; e >= 0; e--) this.hideBody(this._bodies[0]); for (let e = this._numInertiaBodies - 1; e >= 0; e--) this.hideInertia(this._inertiaBodies[0]); this._debugBoxMesh && this._debugBoxMesh.dispose(), this._debugSphereMesh && this._debugSphereMesh.dispose(), this._debugCylinderMesh && this._debugCylinderMesh.dispose(), this._debugMaterial && this._debugMaterial.dispose(), this._impostors.length = 0, this._scene = null, this._physicsEnginePlugin = null, this._utilityLayer && (this._utilityLayer.dispose(), this._utilityLayer = null); } } class eW { /** * Helper function to create a colored helper in a scene in one line. * @param ray Defines the ray we are currently trying to visualize * @param scene Defines the scene the ray is used in * @param color Defines the color we want to see the ray in * @returns The newly created ray helper. */ static CreateAndShow(e, t, i) { const r = new eW(e); return r.show(t, i), r; } /** * Instantiate a new ray helper. * As raycast might be hard to debug, the RayHelper can help rendering the different rays * in order to better appreciate the issue one might have. * @see https://doc.babylonjs.com/features/featuresDeepDive/mesh/interactions/picking_collisions#debugging * @param ray Defines the ray we are currently trying to visualize */ constructor(e) { this.ray = e; } /** * Shows the ray we are willing to debug. * @param scene Defines the scene the ray needs to be rendered in * @param color Defines the color the ray needs to be rendered in */ show(e, t) { if (!this._renderFunction && this.ray) { const i = this.ray; this._renderFunction = () => this._render(), this._scene = e, this._renderPoints = [i.origin, i.origin.add(i.direction.scale(i.length))], this._renderLine = Ba("ray", { points: this._renderPoints, updatable: !0 }, e), this._renderLine.isPickable = !1, this._renderFunction && this._scene.registerBeforeRender(this._renderFunction); } t && this._renderLine && this._renderLine.color.copyFrom(t); } /** * Hides the ray we are debugging. */ hide() { this._renderFunction && this._scene && (this._scene.unregisterBeforeRender(this._renderFunction), this._scene = null, this._renderFunction = null, this._renderLine && (this._renderLine.dispose(), this._renderLine = null), this._renderPoints = []); } _render() { var e; const t = this.ray; if (!t) return; const i = this._renderPoints[1], r = Math.min(t.length, 1e6); i.copyFrom(t.direction), i.scaleInPlace(r), i.addInPlace(t.origin), this._renderPoints[0].copyFrom(t.origin), Ba("ray", { points: this._renderPoints, updatable: !0, instance: this._renderLine }, this._scene), (e = this._renderLine) === null || e === void 0 || e.refreshBoundingInfo(); } /** * Attach a ray helper to a mesh so that we can easily see its orientation for instance or information like its normals. * @param mesh Defines the mesh we want the helper attached to * @param meshSpaceDirection Defines the direction of the Ray in mesh space (local space of the mesh node) * @param meshSpaceOrigin Defines the origin of the Ray in mesh space (local space of the mesh node) * @param length Defines the length of the ray */ attachToMesh(e, t, i, r) { this._attachedToMesh = e; const s = this.ray; s && (s.direction || (s.direction = D.Zero()), s.origin || (s.origin = D.Zero()), r && (s.length = r), i || (i = D.Zero()), t || (t = new D(0, 0, -1)), this._scene || (this._scene = e.getScene()), this._meshSpaceDirection ? (this._meshSpaceDirection.copyFrom(t), this._meshSpaceOrigin.copyFrom(i)) : (this._meshSpaceDirection = t.clone(), this._meshSpaceOrigin = i.clone()), this._onAfterRenderObserver || (this._onAfterRenderObserver = this._scene.onBeforeRenderObservable.add(() => this._updateToMesh()), this._onAfterStepObserver = this._scene.onAfterStepObservable.add(() => this._updateToMesh())), this._attachedToMesh.computeWorldMatrix(!0), this._updateToMesh()); } /** * Detach the ray helper from the mesh it has previously been attached to. */ detachFromMesh() { this._attachedToMesh && this._scene && (this._onAfterRenderObserver && (this._scene.onBeforeRenderObservable.remove(this._onAfterRenderObserver), this._scene.onAfterStepObservable.remove(this._onAfterStepObserver)), this._attachedToMesh = null, this._onAfterRenderObserver = null, this._onAfterStepObserver = null, this._scene = null); } _updateToMesh() { const e = this.ray; if (!(!this._attachedToMesh || !e)) { if (this._attachedToMesh.isDisposed()) { this.detachFromMesh(); return; } this._attachedToMesh.getDirectionToRef(this._meshSpaceDirection, e.direction), D.TransformCoordinatesToRef(this._meshSpaceOrigin, this._attachedToMesh.getWorldMatrix(), e.origin); } } /** * Dispose the helper and release its associated resources. */ dispose() { this.hide(), this.detachFromMesh(), this.ray = null; } } class P1 { /** public static method to create a BoneWeight Shader * @param options The constructor options * @param scene The scene that the shader is scoped to * @returns The created ShaderMaterial * @see http://www.babylonjs-playground.com/#1BZJVJ#395 */ static CreateBoneWeightShader(e, t) { var i, r, s, n, a, l; const o = e.skeleton, u = (i = e.colorBase) !== null && i !== void 0 ? i : ze.Black(), h = (r = e.colorZero) !== null && r !== void 0 ? r : ze.Blue(), d = (s = e.colorQuarter) !== null && s !== void 0 ? s : ze.Green(), f = (n = e.colorHalf) !== null && n !== void 0 ? n : ze.Yellow(), p = (a = e.colorFull) !== null && a !== void 0 ? a : ze.Red(), m = (l = e.targetBoneIndex) !== null && l !== void 0 ? l : 0; Cr.ShadersStore["boneWeights:" + o.name + "VertexShader"] = `precision highp float; attribute vec3 position; attribute vec2 uv; uniform mat4 view; uniform mat4 projection; uniform mat4 worldViewProjection; #include #if NUM_BONE_INFLUENCERS == 0 attribute vec4 matricesIndices; attribute vec4 matricesWeights; #endif #include #include varying vec3 vColor; uniform vec3 colorBase; uniform vec3 colorZero; uniform vec3 colorQuarter; uniform vec3 colorHalf; uniform vec3 colorFull; uniform float targetBoneIndex; void main() { vec3 positionUpdated = position; #include #include #include vec4 worldPos = finalWorld * vec4(positionUpdated, 1.0); vec3 color = colorBase; float totalWeight = 0.; if(matricesIndices[0] == targetBoneIndex && matricesWeights[0] > 0.){ totalWeight += matricesWeights[0]; } if(matricesIndices[1] == targetBoneIndex && matricesWeights[1] > 0.){ totalWeight += matricesWeights[1]; } if(matricesIndices[2] == targetBoneIndex && matricesWeights[2] > 0.){ totalWeight += matricesWeights[2]; } if(matricesIndices[3] == targetBoneIndex && matricesWeights[3] > 0.){ totalWeight += matricesWeights[3]; } color = mix(color, colorZero, smoothstep(0., 0.25, totalWeight)); color = mix(color, colorQuarter, smoothstep(0.25, 0.5, totalWeight)); color = mix(color, colorHalf, smoothstep(0.5, 0.75, totalWeight)); color = mix(color, colorFull, smoothstep(0.75, 1.0, totalWeight)); vColor = color; gl_Position = projection * view * worldPos; }`, Cr.ShadersStore["boneWeights:" + o.name + "FragmentShader"] = ` precision highp float; varying vec3 vPosition; varying vec3 vColor; void main() { vec4 color = vec4(vColor, 1.0); gl_FragColor = color; } `; const _ = new Lo("boneWeight:" + o.name, t, { vertex: "boneWeights:" + o.name, fragment: "boneWeights:" + o.name }, { attributes: ["position", "normal", "matricesIndices", "matricesWeights"], uniforms: [ "world", "worldView", "worldViewProjection", "view", "projection", "viewProjection", "colorBase", "colorZero", "colorQuarter", "colorHalf", "colorFull", "targetBoneIndex" ] }); return _.setColor3("colorBase", u), _.setColor3("colorZero", h), _.setColor3("colorQuarter", d), _.setColor3("colorHalf", f), _.setColor3("colorFull", p), _.setFloat("targetBoneIndex", m), _.getClassName = () => "BoneWeightShader", _.transparencyMode = At.MATERIAL_OPAQUE, _; } /** public static method to create a BoneWeight Shader * @param options The constructor options * @param scene The scene that the shader is scoped to * @returns The created ShaderMaterial */ static CreateSkeletonMapShader(e, t) { var i; const r = e.skeleton, s = (i = e.colorMap) !== null && i !== void 0 ? i : [ { color: new ze(1, 0.38, 0.18), location: 0 }, { color: new ze(0.59, 0.18, 1), location: 0.2 }, { color: new ze(0.59, 1, 0.18), location: 0.4 }, { color: new ze(1, 0.87, 0.17), location: 0.6 }, { color: new ze(1, 0.17, 0.42), location: 0.8 }, { color: new ze(0.17, 0.68, 1), location: 1 } ], n = r.bones.length + 1, a = P1._CreateBoneMapColorBuffer(n, s, t), l = new Lo("boneWeights:" + r.name, t, { vertexSource: `precision highp float; attribute vec3 position; attribute vec2 uv; uniform mat4 view; uniform mat4 projection; uniform mat4 worldViewProjection; uniform float colorMap[` + r.bones.length * 4 + `]; #include #if NUM_BONE_INFLUENCERS == 0 attribute vec4 matricesIndices; attribute vec4 matricesWeights; #endif #include #include varying vec3 vColor; void main() { vec3 positionUpdated = position; #include #include #include vec3 color = vec3(0.); bool first = true; for (int i = 0; i < 4; i++) { int boneIdx = int(matricesIndices[i]); float boneWgt = matricesWeights[i]; vec3 c = vec3(colorMap[boneIdx * 4 + 0], colorMap[boneIdx * 4 + 1], colorMap[boneIdx * 4 + 2]); if (boneWgt > 0.) { if (first) { first = false; color = c; } else { color = mix(color, c, boneWgt); } } } vColor = color; vec4 worldPos = finalWorld * vec4(positionUpdated, 1.0); gl_Position = projection * view * worldPos; }`, fragmentSource: ` precision highp float; varying vec3 vColor; void main() { vec4 color = vec4( vColor, 1.0 ); gl_FragColor = color; } ` }, { attributes: ["position", "normal", "matricesIndices", "matricesWeights"], uniforms: ["world", "worldView", "worldViewProjection", "view", "projection", "viewProjection", "colorMap"] }); return l.setFloats("colorMap", a), l.getClassName = () => "SkeletonMapShader", l.transparencyMode = At.MATERIAL_OPAQUE, l; } /** private static method to create a BoneWeight Shader * @param size The size of the buffer to create (usually the bone count) * @param colorMap The gradient data to generate * @param scene The scene that the shader is scoped to * @returns an Array of floats from the color gradient values */ static _CreateBoneMapColorBuffer(e, t, i) { const r = new gg("temp", { width: e, height: 1 }, i, !1), s = r.getContext(), n = s.createLinearGradient(0, 0, e, 0); t.forEach((u) => { n.addColorStop(u.location, u.color.toHexString()); }), s.fillStyle = n, s.fillRect(0, 0, e, 1), r.update(); const a = [], l = s.getImageData(0, 0, e, 1).data, o = 1 / 255; for (let u = 0; u < l.length; u++) a.push(l[u] * o); return r.dispose(), a; } /** Gets the Scene. */ get scene() { return this._scene; } /** Gets the utilityLayer. */ get utilityLayer() { return this._utilityLayer; } /** Checks Ready Status. */ get isReady() { return this._ready; } /** Sets Ready Status. */ set ready(e) { this._ready = e; } /** Gets the debugMesh */ get debugMesh() { return this._debugMesh; } /** Sets the debugMesh */ set debugMesh(e) { this._debugMesh = e; } /** Gets the displayMode */ get displayMode() { return this.options.displayMode || P1.DISPLAY_LINES; } /** Sets the displayMode */ set displayMode(e) { e > P1.DISPLAY_SPHERE_AND_SPURS && (e = P1.DISPLAY_LINES), this.options.displayMode = e; } /** * Creates a new SkeletonViewer * @param skeleton defines the skeleton to render * @param mesh defines the mesh attached to the skeleton * @param scene defines the hosting scene * @param autoUpdateBonesMatrices defines a boolean indicating if bones matrices must be forced to update before rendering (true by default) * @param renderingGroupId defines the rendering group id to use with the viewer * @param options All of the extra constructor options for the SkeletonViewer */ constructor(e, t, i, r = !0, s = 3, n = {}) { var a, l, o, u, h, d, f, p, m, _, v, C, x, b; if (this.skeleton = e, this.mesh = t, this.autoUpdateBonesMatrices = r, this.renderingGroupId = s, this.options = n, this.color = ze.White(), this._debugLines = new Array(), this._localAxes = null, this._isEnabled = !0, this._obs = null, this._scene = i, this._ready = !1, n.pauseAnimations = (a = n.pauseAnimations) !== null && a !== void 0 ? a : !0, n.returnToRest = (l = n.returnToRest) !== null && l !== void 0 ? l : !1, n.displayMode = (o = n.displayMode) !== null && o !== void 0 ? o : P1.DISPLAY_LINES, n.displayOptions = (u = n.displayOptions) !== null && u !== void 0 ? u : {}, n.displayOptions.midStep = (h = n.displayOptions.midStep) !== null && h !== void 0 ? h : 0.235, n.displayOptions.midStepFactor = (d = n.displayOptions.midStepFactor) !== null && d !== void 0 ? d : 0.155, n.displayOptions.sphereBaseSize = (f = n.displayOptions.sphereBaseSize) !== null && f !== void 0 ? f : 0.15, n.displayOptions.sphereScaleUnit = (p = n.displayOptions.sphereScaleUnit) !== null && p !== void 0 ? p : 2, n.displayOptions.sphereFactor = (m = n.displayOptions.sphereFactor) !== null && m !== void 0 ? m : 0.865, n.displayOptions.spurFollowsChild = (_ = n.displayOptions.spurFollowsChild) !== null && _ !== void 0 ? _ : !1, n.displayOptions.showLocalAxes = (v = n.displayOptions.showLocalAxes) !== null && v !== void 0 ? v : !1, n.displayOptions.localAxesSize = (C = n.displayOptions.localAxesSize) !== null && C !== void 0 ? C : 0.075, n.computeBonesUsingShaders = (x = n.computeBonesUsingShaders) !== null && x !== void 0 ? x : !0, n.useAllBones = (b = n.useAllBones) !== null && b !== void 0 ? b : !0, this._boneIndices = /* @__PURE__ */ new Set(), !n.useAllBones) { const M = t == null ? void 0 : t.getVerticesData(Y.MatricesIndicesKind), R = t == null ? void 0 : t.getVerticesData(Y.MatricesWeightsKind); if (M && R) for (let w = 0; w < M.length; ++w) { const V = M[w]; R[w] !== 0 && this._boneIndices.add(V); } } this._utilityLayer = new bn(this._scene, !1), this._utilityLayer.pickUtilitySceneFirst = !1, this._utilityLayer.utilityLayerScene.autoClearDepthAndStencil = !0; let S = this.options.displayMode || 0; S > P1.DISPLAY_SPHERE_AND_SPURS && (S = P1.DISPLAY_LINES), this.displayMode = S, this.update(), this._bindObs(); } /** The Dynamic bindings for the update functions */ _bindObs() { switch (this.displayMode) { case P1.DISPLAY_LINES: { this._obs = this.scene.onBeforeRenderObservable.add(() => { this._displayLinesUpdate(); }); break; } } } /** Update the viewer to sync with current skeleton state, only used to manually update. */ update() { switch (this.displayMode) { case P1.DISPLAY_LINES: { this._displayLinesUpdate(); break; } case P1.DISPLAY_SPHERES: { this._buildSpheresAndSpurs(!0); break; } case P1.DISPLAY_SPHERE_AND_SPURS: { this._buildSpheresAndSpurs(!1); break; } } this._buildLocalAxes(); } /** Gets or sets a boolean indicating if the viewer is enabled */ set isEnabled(e) { this.isEnabled !== e && (this._isEnabled = e, this.debugMesh && this.debugMesh.setEnabled(e), e && !this._obs ? this._bindObs() : !e && this._obs && (this.scene.onBeforeRenderObservable.remove(this._obs), this._obs = null)); } get isEnabled() { return this._isEnabled; } _getBonePosition(e, t, i, r = 0, s = 0, n = 0) { const a = de.Matrix[0], l = t.getParent(); if (a.copyFrom(t.getLocalMatrix()), r !== 0 || s !== 0 || n !== 0) { const o = de.Matrix[1]; Ae.IdentityToRef(o), o.setTranslationFromFloats(r, s, n), o.multiplyToRef(a, a); } l && a.multiplyToRef(l.getAbsoluteMatrix(), a), a.multiplyToRef(i, a), e.x = a.m[12], e.y = a.m[13], e.z = a.m[14]; } _getLinesForBonesWithLength(e, t) { const i = e.length; let r, s; t ? (r = t.getWorldMatrix(), s = t.position) : (r = new Ae(), s = e[0].position); let n = 0; for (let a = 0; a < i; a++) { const l = e[a]; let o = this._debugLines[n]; l._index === -1 || !this._boneIndices.has(l.getIndex()) && !this.options.useAllBones || (o || (o = [D.Zero(), D.Zero()], this._debugLines[n] = o), this._getBonePosition(o[0], l, r), this._getBonePosition(o[1], l, r, 0, l.length, 0), o[0].subtractInPlace(s), o[1].subtractInPlace(s), n++); } } _getLinesForBonesNoLength(e) { const t = e.length; let i = 0; const r = this.mesh; let s, n; r ? (s = r, n = r.position) : (s = new xi(""), n = e[0].position); for (let a = t - 1; a >= 0; a--) { const l = e[a], o = l.getParent(); if (!o || !this._boneIndices.has(l.getIndex()) && !this.options.useAllBones) continue; let u = this._debugLines[i]; u || (u = [D.Zero(), D.Zero()], this._debugLines[i] = u), l.getAbsolutePositionToRef(s, u[0]), o.getAbsolutePositionToRef(s, u[1]), u[0].subtractInPlace(n), u[1].subtractInPlace(n), i++; } r || s.dispose(); } /** * function to revert the mesh and scene back to the initial state. * @param animationState */ _revert(e) { this.options.pauseAnimations && (this.scene.animationsEnabled = e, this.utilityLayer.utilityLayerScene.animationsEnabled = e); } /** * function to get the absolute bind pose of a bone by accumulating transformations up the bone hierarchy. * @param bone * @param matrix */ _getAbsoluteBindPoseToRef(e, t) { if (e === null || e._index === -1) { t.copyFrom(Ae.Identity()); return; } this._getAbsoluteBindPoseToRef(e.getParent(), t), e.getBindMatrix().multiplyToRef(t, t); } /** * function to build and bind sphere joint points and spur bone representations. * @param spheresOnly */ _buildSpheresAndSpurs(e = !0) { var t, i; this._debugMesh && (this._debugMesh.dispose(), this._debugMesh = null, this.ready = !1), this._ready = !1; const r = (t = this.utilityLayer) === null || t === void 0 ? void 0 : t.utilityLayerScene, s = this.skeleton.bones, n = [], a = [], l = this.scene.animationsEnabled; try { this.options.pauseAnimations && (this.scene.animationsEnabled = !1, r.animationsEnabled = !1), this.options.returnToRest && this.skeleton.returnToRest(), this.autoUpdateBonesMatrices && this.skeleton.computeAbsoluteMatrices(); let o = Number.NEGATIVE_INFINITY; const u = this.options.displayOptions || {}; for (let m = 0; m < s.length; m++) { const _ = s[m]; if (_._index === -1 || !this._boneIndices.has(_.getIndex()) && !this.options.useAllBones) continue; const v = new Ae(); this._getAbsoluteBindPoseToRef(_, v); const C = new D(); v.decompose(void 0, void 0, C), _.children.forEach((w) => { const V = new Ae(); w.getLocalMatrix().multiplyToRef(v, V); const k = new D(); V.decompose(void 0, void 0, k); const L = D.Distance(C, k); if (L > o && (o = L), e) return; const B = k.clone().subtract(C.clone()), U = B.length(), K = B.normalize().scale(U), ee = u.midStep || 0.165, Z = u.midStepFactor || 0.215, q = K.scale(ee), le = lN("skeletonViewer", { shape: [new D(1, -1, 0), new D(1, 1, 0), new D(-1, 1, 0), new D(-1, -1, 0), new D(1, -1, 0)], path: [D.Zero(), q, K], scaleFunction: (J) => { switch (J) { case 0: case 2: return 0; case 1: return U * Z; } return 0; }, sideOrientation: ke.DEFAULTSIDE, updatable: !1 }, r), ie = le.getTotalVertices(), $ = [], j = []; for (let J = 0; J < ie; J++) $.push(1, 0, 0, 0), u.spurFollowsChild && J > 9 ? j.push(w.getIndex(), 0, 0, 0) : j.push(_.getIndex(), 0, 0, 0); le.position = C.clone(), le.setVerticesData(Y.MatricesWeightsKind, $, !1), le.setVerticesData(Y.MatricesIndicesKind, j, !1), le.convertToFlatShadedMesh(), a.push(le); }); const x = u.sphereBaseSize || 0.2, b = Rd("skeletonViewer", { segments: 6, diameter: x, updatable: !0 }, r), S = b.getTotalVertices(), M = [], R = []; for (let w = 0; w < S; w++) M.push(1, 0, 0, 0), R.push(_.getIndex(), 0, 0, 0); b.setVerticesData(Y.MatricesWeightsKind, M, !1), b.setVerticesData(Y.MatricesIndicesKind, R, !1), b.position = C.clone(), n.push([b, _]); } const h = u.sphereScaleUnit || 2, d = u.sphereFactor || 0.85, f = []; for (let m = 0; m < n.length; m++) { const [_, v] = n[m], C = 1 / (h / o); let x = 0, b = v; for (; b.getParent() && b.getParent().getIndex() !== -1; ) x++, b = b.getParent(); _.scaling.scaleInPlace(C * Math.pow(d, x)), f.push(_); } this.debugMesh = ke.MergeMeshes(f.concat(a), !0, !0), this.debugMesh && (this.debugMesh.renderingGroupId = this.renderingGroupId, this.debugMesh.skeleton = this.skeleton, this.debugMesh.parent = this.mesh, this.debugMesh.computeBonesUsingShaders = (i = this.options.computeBonesUsingShaders) !== null && i !== void 0 ? i : !0, this.debugMesh.alwaysSelectAsActiveMesh = !0); const p = this.utilityLayer._getSharedGizmoLight(); p.intensity = 0.7, this._revert(l), this.ready = !0; } catch (o) { Ce.Error(o), this._revert(l), this.dispose(); } } _buildLocalAxes() { var e; this._localAxes && this._localAxes.dispose(), this._localAxes = null; const t = this.options.displayOptions || {}; if (!t.showLocalAxes) return; const i = this._utilityLayer.utilityLayerScene, r = t.localAxesSize || 0.075, s = [], n = [], a = new Et(1, 0, 0, 1), l = new Et(0, 1, 0, 1), o = new Et(0, 0, 1, 1), u = [], h = [], d = 6; for (const f in this.skeleton.bones) { const p = this.skeleton.bones[f]; if (p._index === -1 || !this._boneIndices.has(p.getIndex()) && !this.options.useAllBones) continue; const m = new Ae(), _ = new D(); this._getAbsoluteBindPoseToRef(p, m), m.decompose(void 0, de.Quaternion[0], _); const v = new Ae(); de.Quaternion[0].toRotationMatrix(v); const C = D.TransformCoordinates(new D(0 + r, 0, 0), v), x = D.TransformCoordinates(new D(0, 0 + r, 0), v), b = D.TransformCoordinates(new D(0, 0, 0 + r), v), S = [_, _.add(C)], M = [_, _.add(x)], R = [_, _.add(b)], w = [S, M, R], V = [ [a, a], [l, l], [o, o] ]; s.push(...w), n.push(...V); for (let k = 0; k < d; k++) u.push(1, 0, 0, 0), h.push(p.getIndex(), 0, 0, 0); } this._localAxes = tP("localAxes", { lines: s, colors: n, updatable: !0 }, i), this._localAxes.setVerticesData(Y.MatricesWeightsKind, u, !1), this._localAxes.setVerticesData(Y.MatricesIndicesKind, h, !1), this._localAxes.skeleton = this.skeleton, this._localAxes.renderingGroupId = this.renderingGroupId + 1, this._localAxes.parent = this.mesh, this._localAxes.computeBonesUsingShaders = (e = this.options.computeBonesUsingShaders) !== null && e !== void 0 ? e : !0; } /** Update the viewer to sync with current skeleton state, only used for the line display. */ _displayLinesUpdate() { if (!this._utilityLayer) return; this.autoUpdateBonesMatrices && this.skeleton.computeAbsoluteMatrices(), this.skeleton.bones[0].length === void 0 ? this._getLinesForBonesNoLength(this.skeleton.bones) : this._getLinesForBonesWithLength(this.skeleton.bones, this.mesh); const e = this._utilityLayer.utilityLayerScene; e && (this._debugMesh ? tP("", { lines: this._debugLines, updatable: !0, instance: this._debugMesh }, e) : (this._debugMesh = tP("", { lines: this._debugLines, updatable: !0, instance: null }, e), this._debugMesh.renderingGroupId = this.renderingGroupId), this.mesh ? this._debugMesh.position.copyFrom(this.mesh.position) : this._debugMesh.position.copyFrom(this.skeleton.bones[0].position), this._debugMesh.color = this.color); } /** Changes the displayMode of the skeleton viewer * @param mode The displayMode numerical value */ changeDisplayMode(e) { const t = !!this.isEnabled; this.displayMode !== e && (this.isEnabled = !1, this._debugMesh && (this._debugMesh.dispose(), this._debugMesh = null, this.ready = !1), this.displayMode = e, this.update(), this._bindObs(), this.isEnabled = t); } /** Sets a display option of the skeleton viewer * * | Option | Type | Default | Description | * | ---------------- | ------- | ------- | ----------- | * | midStep | float | 0.235 | A percentage between a bone and its child that determines the widest part of a spur. Only used when `displayMode` is set to `DISPLAY_SPHERE_AND_SPURS`. | * | midStepFactor | float | 0.15 | Mid step width expressed as a factor of the length. A value of 0.5 makes the spur width half of the spur length. Only used when `displayMode` is set to `DISPLAY_SPHERE_AND_SPURS`. | * | sphereBaseSize | float | 2 | Sphere base size. Only used when `displayMode` is set to `DISPLAY_SPHERE_AND_SPURS`. | * | sphereScaleUnit | float | 0.865 | Sphere scale factor used to scale spheres in relation to the longest bone. Only used when `displayMode` is set to `DISPLAY_SPHERE_AND_SPURS`. | * | spurFollowsChild | boolean | false | Whether a spur should attach its far end to the child bone. | * | showLocalAxes | boolean | false | Displays local axes on all bones. | * | localAxesSize | float | 0.075 | Determines the length of each local axis. | * * @param option String of the option name * @param value The numerical option value */ changeDisplayOptions(e, t) { const i = !!this.isEnabled; this.options.displayOptions[e] = t, this.isEnabled = !1, this._debugMesh && (this._debugMesh.dispose(), this._debugMesh = null, this.ready = !1), this.update(), this._bindObs(), this.isEnabled = i; } /** Release associated resources */ dispose() { this.isEnabled = !1, this._debugMesh && (this._debugMesh.dispose(), this._debugMesh = null), this._utilityLayer && (this._utilityLayer.dispose(), this._utilityLayer = null), this.ready = !1; } } P1.DISPLAY_LINES = 0; P1.DISPLAY_SPHERES = 1; P1.DISPLAY_SPHERE_AND_SPURS = 2; class hfe { /** * Gets or sets the transparency of the frustum planes */ get transparency() { return this._transparency; } set transparency(e) { this._transparency = e; for (let t = 6; t < 12; ++t) this._lightHelperFrustumMeshes[t].material.alpha = e; } /** * true to display the edges of the frustum */ get showLines() { return this._showLines; } set showLines(e) { if (this._showLines !== e) { this._showLines = e; for (let t = 0; t < 6; ++t) this._lightHelperFrustumMeshes[t].setEnabled(e); } } /** * true to display the planes of the frustum */ get showPlanes() { return this._showPlanes; } set showPlanes(e) { if (this._showPlanes !== e) { this._showPlanes = e; for (let t = 6; t < 12; ++t) this._lightHelperFrustumMeshes[t].setEnabled(e); } } /** * Creates a new frustum viewer * @param light directional light to display the frustum for * @param camera camera used to retrieve the minZ / maxZ values if the shadowMinZ/shadowMaxZ values of the light are not setup */ constructor(e, t) { this._oldPosition = new D(Number.NaN, Number.NaN, Number.NaN), this._oldDirection = new D(Number.NaN, Number.NaN, Number.NaN), this._transparency = 0.3, this._showLines = !0, this._showPlanes = !0, this._scene = e.getScene(), this._light = e, this._camera = t, this._inverseViewMatrix = Ae.Identity(), this._lightHelperFrustumMeshes = [], this._createGeometry(), this.show(), this.update(); } /** * Shows the frustum */ show() { this._lightHelperFrustumMeshes.forEach((e, t) => { e.setEnabled(t < 6 && this._showLines || t >= 6 && this._showPlanes); }), this._oldPosition.set(Number.NaN, Number.NaN, Number.NaN), this._visible = !0; } /** * Hides the frustum */ hide() { this._lightHelperFrustumMeshes.forEach((e) => { e.setEnabled(!1); }), this._visible = !1; } /** * Updates the frustum. * Call this method to update the frustum view if the light has changed position/direction */ update() { var e, t, i, r, s, n; if (!this._visible || this._oldPosition.equals(this._light.position) && this._oldDirection.equals(this._light.direction) && this._oldAutoCalc === this._light.autoCalcShadowZBounds && this._oldMinZ === this._light.shadowMinZ && this._oldMaxZ === this._light.shadowMaxZ) return; this._oldPosition.copyFrom(this._light.position), this._oldDirection.copyFrom(this._light.direction), this._oldAutoCalc = this._light.autoCalcShadowZBounds, this._oldMinZ = this._light.shadowMinZ, this._oldMaxZ = this._light.shadowMaxZ, de.Vector3[0].set(this._light.orthoLeft, this._light.orthoBottom, this._light.shadowMinZ !== void 0 ? this._light.shadowMinZ : this._camera.minZ), de.Vector3[1].set(this._light.orthoRight, this._light.orthoTop, this._light.shadowMaxZ !== void 0 ? this._light.shadowMaxZ : this._camera.maxZ); const a = this._getInvertViewMatrix(); de.Vector3[2].copyFromFloats(de.Vector3[1].x, de.Vector3[1].y, de.Vector3[0].z), de.Vector3[3].copyFromFloats(de.Vector3[1].x, de.Vector3[0].y, de.Vector3[0].z), de.Vector3[4].copyFromFloats(de.Vector3[0].x, de.Vector3[0].y, de.Vector3[0].z), de.Vector3[5].copyFromFloats(de.Vector3[0].x, de.Vector3[1].y, de.Vector3[0].z), D.TransformCoordinatesToRef(de.Vector3[2], a, de.Vector3[2]), D.TransformCoordinatesToRef(de.Vector3[3], a, de.Vector3[3]), D.TransformCoordinatesToRef(de.Vector3[4], a, de.Vector3[4]), D.TransformCoordinatesToRef(de.Vector3[5], a, de.Vector3[5]), de.Vector3[6].copyFromFloats(de.Vector3[1].x, de.Vector3[1].y, de.Vector3[1].z), de.Vector3[7].copyFromFloats(de.Vector3[1].x, de.Vector3[0].y, de.Vector3[1].z), de.Vector3[8].copyFromFloats(de.Vector3[0].x, de.Vector3[0].y, de.Vector3[1].z), de.Vector3[9].copyFromFloats(de.Vector3[0].x, de.Vector3[1].y, de.Vector3[1].z), D.TransformCoordinatesToRef(de.Vector3[6], a, de.Vector3[6]), D.TransformCoordinatesToRef(de.Vector3[7], a, de.Vector3[7]), D.TransformCoordinatesToRef(de.Vector3[8], a, de.Vector3[8]), D.TransformCoordinatesToRef(de.Vector3[9], a, de.Vector3[9]), Ba("nearlines", { updatable: !0, points: this._nearLinesPoints, instance: this._lightHelperFrustumMeshes[0] }, this._scene), Ba("farlines", { updatable: !0, points: this._farLinesPoints, instance: this._lightHelperFrustumMeshes[1] }, this._scene), Ba("trlines", { updatable: !0, points: this._trLinesPoints, instance: this._lightHelperFrustumMeshes[2] }, this._scene), Ba("brlines", { updatable: !0, points: this._brLinesPoints, instance: this._lightHelperFrustumMeshes[3] }, this._scene), Ba("tllines", { updatable: !0, points: this._tlLinesPoints, instance: this._lightHelperFrustumMeshes[4] }, this._scene), Ba("bllines", { updatable: !0, points: this._blLinesPoints, instance: this._lightHelperFrustumMeshes[5] }, this._scene), de.Vector3[2].toArray(this._nearPlaneVertices, 0), de.Vector3[3].toArray(this._nearPlaneVertices, 3), de.Vector3[4].toArray(this._nearPlaneVertices, 6), de.Vector3[5].toArray(this._nearPlaneVertices, 9), (e = this._lightHelperFrustumMeshes[6].geometry) === null || e === void 0 || e.updateVerticesDataDirectly("position", this._nearPlaneVertices, 0), de.Vector3[6].toArray(this._farPlaneVertices, 0), de.Vector3[7].toArray(this._farPlaneVertices, 3), de.Vector3[8].toArray(this._farPlaneVertices, 6), de.Vector3[9].toArray(this._farPlaneVertices, 9), (t = this._lightHelperFrustumMeshes[7].geometry) === null || t === void 0 || t.updateVerticesDataDirectly("position", this._farPlaneVertices, 0), de.Vector3[2].toArray(this._rightPlaneVertices, 0), de.Vector3[6].toArray(this._rightPlaneVertices, 3), de.Vector3[7].toArray(this._rightPlaneVertices, 6), de.Vector3[3].toArray(this._rightPlaneVertices, 9), (i = this._lightHelperFrustumMeshes[8].geometry) === null || i === void 0 || i.updateVerticesDataDirectly("position", this._rightPlaneVertices, 0), de.Vector3[5].toArray(this._leftPlaneVertices, 0), de.Vector3[9].toArray(this._leftPlaneVertices, 3), de.Vector3[8].toArray(this._leftPlaneVertices, 6), de.Vector3[4].toArray(this._leftPlaneVertices, 9), (r = this._lightHelperFrustumMeshes[9].geometry) === null || r === void 0 || r.updateVerticesDataDirectly("position", this._leftPlaneVertices, 0), de.Vector3[2].toArray(this._topPlaneVertices, 0), de.Vector3[6].toArray(this._topPlaneVertices, 3), de.Vector3[9].toArray(this._topPlaneVertices, 6), de.Vector3[5].toArray(this._topPlaneVertices, 9), (s = this._lightHelperFrustumMeshes[10].geometry) === null || s === void 0 || s.updateVerticesDataDirectly("position", this._topPlaneVertices, 0), de.Vector3[3].toArray(this._bottomPlaneVertices, 0), de.Vector3[7].toArray(this._bottomPlaneVertices, 3), de.Vector3[8].toArray(this._bottomPlaneVertices, 6), de.Vector3[4].toArray(this._bottomPlaneVertices, 9), (n = this._lightHelperFrustumMeshes[11].geometry) === null || n === void 0 || n.updateVerticesDataDirectly("position", this._bottomPlaneVertices, 0); } /** * Dispose of the class / remove the frustum view */ dispose() { this._lightHelperFrustumMeshes.forEach((e) => { var t; (t = e.material) === null || t === void 0 || t.dispose(), e.dispose(); }), this._rootNode.dispose(); } _createGeometry() { this._rootNode = new xi("directionalLightHelperRoot_" + this._light.name, this._scene), this._rootNode.parent = this._light.parent, this._nearLinesPoints = [D.ZeroReadOnly, D.ZeroReadOnly, D.ZeroReadOnly, D.ZeroReadOnly, D.ZeroReadOnly]; const e = Ba("nearlines", { updatable: !0, points: this._nearLinesPoints }, this._scene); e.parent = this._rootNode, e.alwaysSelectAsActiveMesh = !0, this._farLinesPoints = [D.ZeroReadOnly, D.ZeroReadOnly, D.ZeroReadOnly, D.ZeroReadOnly, D.ZeroReadOnly]; const t = Ba("farlines", { updatable: !0, points: this._farLinesPoints }, this._scene); t.parent = this._rootNode, t.alwaysSelectAsActiveMesh = !0, this._trLinesPoints = [D.ZeroReadOnly, D.ZeroReadOnly]; const i = Ba("trlines", { updatable: !0, points: this._trLinesPoints }, this._scene); i.parent = this._rootNode, i.alwaysSelectAsActiveMesh = !0, this._brLinesPoints = [D.ZeroReadOnly, D.ZeroReadOnly]; const r = Ba("brlines", { updatable: !0, points: this._brLinesPoints }, this._scene); r.parent = this._rootNode, r.alwaysSelectAsActiveMesh = !0, this._tlLinesPoints = [D.ZeroReadOnly, D.ZeroReadOnly]; const s = Ba("tllines", { updatable: !0, points: this._tlLinesPoints }, this._scene); s.parent = this._rootNode, s.alwaysSelectAsActiveMesh = !0, this._blLinesPoints = [D.ZeroReadOnly, D.ZeroReadOnly]; const n = Ba("bllines", { updatable: !0, points: this._blLinesPoints }, this._scene); n.parent = this._rootNode, n.alwaysSelectAsActiveMesh = !0, this._lightHelperFrustumMeshes.push(e, t, i, r, s, n); const a = (l, o, u) => { const h = new ke(l + "plane", this._scene), d = new Dt(l + "PlaneMat", this._scene); h.material = d, h.parent = this._rootNode, h.alwaysSelectAsActiveMesh = !0, d.emissiveColor = o, d.alpha = this.transparency, d.backFaceCulling = !1, d.disableLighting = !0; const f = [0, 1, 2, 0, 2, 3], p = new Ot(); p.positions = u, p.indices = f, p.applyToMesh(h, !0), this._lightHelperFrustumMeshes.push(h); }; this._nearPlaneVertices = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], this._farPlaneVertices = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], this._rightPlaneVertices = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], this._leftPlaneVertices = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], this._topPlaneVertices = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], this._bottomPlaneVertices = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], a("near", new ze(1, 0, 0), this._nearPlaneVertices), a("far", new ze(0.3, 0, 0), this._farPlaneVertices), a("right", new ze(0, 1, 0), this._rightPlaneVertices), a("left", new ze(0, 0.3, 0), this._leftPlaneVertices), a("top", new ze(0, 0, 1), this._topPlaneVertices), a("bottom", new ze(0, 0, 0.3), this._bottomPlaneVertices), this._nearLinesPoints[0] = de.Vector3[2], this._nearLinesPoints[1] = de.Vector3[3], this._nearLinesPoints[2] = de.Vector3[4], this._nearLinesPoints[3] = de.Vector3[5], this._nearLinesPoints[4] = de.Vector3[2], this._farLinesPoints[0] = de.Vector3[6], this._farLinesPoints[1] = de.Vector3[7], this._farLinesPoints[2] = de.Vector3[8], this._farLinesPoints[3] = de.Vector3[9], this._farLinesPoints[4] = de.Vector3[6], this._trLinesPoints[0] = de.Vector3[2], this._trLinesPoints[1] = de.Vector3[6], this._brLinesPoints[0] = de.Vector3[3], this._brLinesPoints[1] = de.Vector3[7], this._tlLinesPoints[0] = de.Vector3[4], this._tlLinesPoints[1] = de.Vector3[8], this._blLinesPoints[0] = de.Vector3[5], this._blLinesPoints[1] = de.Vector3[9]; } _getInvertViewMatrix() { return Ae.LookAtLHToRef(this._light.position, this._light.position.add(this._light.direction), D.UpReadOnly, this._inverseViewMatrix), this._inverseViewMatrix.invertToRef(this._inverseViewMatrix), this._inverseViewMatrix; } } class et { } et.ALPHA_DISABLE = 0; et.ALPHA_ADD = 1; et.ALPHA_COMBINE = 2; et.ALPHA_SUBTRACT = 3; et.ALPHA_MULTIPLY = 4; et.ALPHA_MAXIMIZED = 5; et.ALPHA_ONEONE = 6; et.ALPHA_PREMULTIPLIED = 7; et.ALPHA_PREMULTIPLIED_PORTERDUFF = 8; et.ALPHA_INTERPOLATE = 9; et.ALPHA_SCREENMODE = 10; et.ALPHA_ONEONE_ONEONE = 11; et.ALPHA_ALPHATOCOLOR = 12; et.ALPHA_REVERSEONEMINUS = 13; et.ALPHA_SRC_DSTONEMINUSSRCALPHA = 14; et.ALPHA_ONEONE_ONEZERO = 15; et.ALPHA_EXCLUSION = 16; et.ALPHA_LAYER_ACCUMULATE = 17; et.ALPHA_EQUATION_ADD = 0; et.ALPHA_EQUATION_SUBSTRACT = 1; et.ALPHA_EQUATION_REVERSE_SUBTRACT = 2; et.ALPHA_EQUATION_MAX = 3; et.ALPHA_EQUATION_MIN = 4; et.ALPHA_EQUATION_DARKEN = 5; et.DELAYLOADSTATE_NONE = 0; et.DELAYLOADSTATE_LOADED = 1; et.DELAYLOADSTATE_LOADING = 2; et.DELAYLOADSTATE_NOTLOADED = 4; et.NEVER = 512; et.ALWAYS = 519; et.LESS = 513; et.EQUAL = 514; et.LEQUAL = 515; et.GREATER = 516; et.GEQUAL = 518; et.NOTEQUAL = 517; et.KEEP = 7680; et.ZERO = 0; et.REPLACE = 7681; et.INCR = 7682; et.DECR = 7683; et.INVERT = 5386; et.INCR_WRAP = 34055; et.DECR_WRAP = 34056; et.TEXTURE_CLAMP_ADDRESSMODE = 0; et.TEXTURE_WRAP_ADDRESSMODE = 1; et.TEXTURE_MIRROR_ADDRESSMODE = 2; et.TEXTURE_CREATIONFLAG_STORAGE = 1; et.TEXTUREFORMAT_ALPHA = 0; et.TEXTUREFORMAT_LUMINANCE = 1; et.TEXTUREFORMAT_LUMINANCE_ALPHA = 2; et.TEXTUREFORMAT_RGB = 4; et.TEXTUREFORMAT_RGBA = 5; et.TEXTUREFORMAT_RED = 6; et.TEXTUREFORMAT_R = 6; et.TEXTUREFORMAT_RG = 7; et.TEXTUREFORMAT_RED_INTEGER = 8; et.TEXTUREFORMAT_R_INTEGER = 8; et.TEXTUREFORMAT_RG_INTEGER = 9; et.TEXTUREFORMAT_RGB_INTEGER = 10; et.TEXTUREFORMAT_RGBA_INTEGER = 11; et.TEXTUREFORMAT_BGRA = 12; et.TEXTUREFORMAT_DEPTH24_STENCIL8 = 13; et.TEXTUREFORMAT_DEPTH32_FLOAT = 14; et.TEXTUREFORMAT_DEPTH16 = 15; et.TEXTUREFORMAT_DEPTH24 = 16; et.TEXTUREFORMAT_DEPTH24UNORM_STENCIL8 = 17; et.TEXTUREFORMAT_DEPTH32FLOAT_STENCIL8 = 18; et.TEXTUREFORMAT_STENCIL8 = 19; et.TEXTUREFORMAT_UNDEFINED = 4294967295; et.TEXTUREFORMAT_COMPRESSED_RGBA_BPTC_UNORM = 36492; et.TEXTUREFORMAT_COMPRESSED_SRGB_ALPHA_BPTC_UNORM = 36493; et.TEXTUREFORMAT_COMPRESSED_RGB_BPTC_UNSIGNED_FLOAT = 36495; et.TEXTUREFORMAT_COMPRESSED_RGB_BPTC_SIGNED_FLOAT = 36494; et.TEXTUREFORMAT_COMPRESSED_RGBA_S3TC_DXT5 = 33779; et.TEXTUREFORMAT_COMPRESSED_SRGB_ALPHA_S3TC_DXT5_EXT = 35919; et.TEXTUREFORMAT_COMPRESSED_RGBA_S3TC_DXT3 = 33778; et.TEXTUREFORMAT_COMPRESSED_SRGB_ALPHA_S3TC_DXT3_EXT = 35918; et.TEXTUREFORMAT_COMPRESSED_RGBA_S3TC_DXT1 = 33777; et.TEXTUREFORMAT_COMPRESSED_RGB_S3TC_DXT1 = 33776; et.TEXTUREFORMAT_COMPRESSED_SRGB_ALPHA_S3TC_DXT1_EXT = 35917; et.TEXTUREFORMAT_COMPRESSED_SRGB_S3TC_DXT1_EXT = 35916; et.TEXTUREFORMAT_COMPRESSED_RGBA_ASTC_4x4 = 37808; et.TEXTUREFORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_4x4_KHR = 37840; et.TEXTUREFORMAT_COMPRESSED_RGB_ETC1_WEBGL = 36196; et.TEXTUREFORMAT_COMPRESSED_RGB8_ETC2 = 37492; et.TEXTUREFORMAT_COMPRESSED_SRGB8_ETC2 = 37493; et.TEXTUREFORMAT_COMPRESSED_RGB8_PUNCHTHROUGH_ALPHA1_ETC2 = 37494; et.TEXTUREFORMAT_COMPRESSED_SRGB8_PUNCHTHROUGH_ALPHA1_ETC2 = 37495; et.TEXTUREFORMAT_COMPRESSED_RGBA8_ETC2_EAC = 37496; et.TEXTUREFORMAT_COMPRESSED_SRGB8_ALPHA8_ETC2_EAC = 37497; et.TEXTURETYPE_UNSIGNED_BYTE = 0; et.TEXTURETYPE_UNSIGNED_INT = 0; et.TEXTURETYPE_FLOAT = 1; et.TEXTURETYPE_HALF_FLOAT = 2; et.TEXTURETYPE_BYTE = 3; et.TEXTURETYPE_SHORT = 4; et.TEXTURETYPE_UNSIGNED_SHORT = 5; et.TEXTURETYPE_INT = 6; et.TEXTURETYPE_UNSIGNED_INTEGER = 7; et.TEXTURETYPE_UNSIGNED_SHORT_4_4_4_4 = 8; et.TEXTURETYPE_UNSIGNED_SHORT_5_5_5_1 = 9; et.TEXTURETYPE_UNSIGNED_SHORT_5_6_5 = 10; et.TEXTURETYPE_UNSIGNED_INT_2_10_10_10_REV = 11; et.TEXTURETYPE_UNSIGNED_INT_24_8 = 12; et.TEXTURETYPE_UNSIGNED_INT_10F_11F_11F_REV = 13; et.TEXTURETYPE_UNSIGNED_INT_5_9_9_9_REV = 14; et.TEXTURETYPE_FLOAT_32_UNSIGNED_INT_24_8_REV = 15; et.TEXTURETYPE_UNDEFINED = 16; et.TEXTURE_2D = 3553; et.TEXTURE_2D_ARRAY = 35866; et.TEXTURE_CUBE_MAP = 34067; et.TEXTURE_CUBE_MAP_ARRAY = 3735928559; et.TEXTURE_3D = 32879; et.TEXTURE_NEAREST_SAMPLINGMODE = 1; et.TEXTURE_NEAREST_NEAREST = 1; et.TEXTURE_BILINEAR_SAMPLINGMODE = 2; et.TEXTURE_LINEAR_LINEAR = 2; et.TEXTURE_TRILINEAR_SAMPLINGMODE = 3; et.TEXTURE_LINEAR_LINEAR_MIPLINEAR = 3; et.TEXTURE_NEAREST_NEAREST_MIPNEAREST = 4; et.TEXTURE_NEAREST_LINEAR_MIPNEAREST = 5; et.TEXTURE_NEAREST_LINEAR_MIPLINEAR = 6; et.TEXTURE_NEAREST_LINEAR = 7; et.TEXTURE_NEAREST_NEAREST_MIPLINEAR = 8; et.TEXTURE_LINEAR_NEAREST_MIPNEAREST = 9; et.TEXTURE_LINEAR_NEAREST_MIPLINEAR = 10; et.TEXTURE_LINEAR_LINEAR_MIPNEAREST = 11; et.TEXTURE_LINEAR_NEAREST = 12; et.TEXTURE_EXPLICIT_MODE = 0; et.TEXTURE_SPHERICAL_MODE = 1; et.TEXTURE_PLANAR_MODE = 2; et.TEXTURE_CUBIC_MODE = 3; et.TEXTURE_PROJECTION_MODE = 4; et.TEXTURE_SKYBOX_MODE = 5; et.TEXTURE_INVCUBIC_MODE = 6; et.TEXTURE_EQUIRECTANGULAR_MODE = 7; et.TEXTURE_FIXED_EQUIRECTANGULAR_MODE = 8; et.TEXTURE_FIXED_EQUIRECTANGULAR_MIRRORED_MODE = 9; et.TEXTURE_FILTERING_QUALITY_OFFLINE = 4096; et.TEXTURE_FILTERING_QUALITY_HIGH = 64; et.TEXTURE_FILTERING_QUALITY_MEDIUM = 16; et.TEXTURE_FILTERING_QUALITY_LOW = 8; et.SCALEMODE_FLOOR = 1; et.SCALEMODE_NEAREST = 2; et.SCALEMODE_CEILING = 3; et.MATERIAL_TextureDirtyFlag = 1; et.MATERIAL_LightDirtyFlag = 2; et.MATERIAL_FresnelDirtyFlag = 4; et.MATERIAL_AttributesDirtyFlag = 8; et.MATERIAL_MiscDirtyFlag = 16; et.MATERIAL_PrePassDirtyFlag = 32; et.MATERIAL_AllDirtyFlag = 63; et.MATERIAL_TriangleFillMode = 0; et.MATERIAL_WireFrameFillMode = 1; et.MATERIAL_PointFillMode = 2; et.MATERIAL_PointListDrawMode = 3; et.MATERIAL_LineListDrawMode = 4; et.MATERIAL_LineLoopDrawMode = 5; et.MATERIAL_LineStripDrawMode = 6; et.MATERIAL_TriangleStripDrawMode = 7; et.MATERIAL_TriangleFanDrawMode = 8; et.MATERIAL_ClockWiseSideOrientation = 0; et.MATERIAL_CounterClockWiseSideOrientation = 1; et.ACTION_NothingTrigger = 0; et.ACTION_OnPickTrigger = 1; et.ACTION_OnLeftPickTrigger = 2; et.ACTION_OnRightPickTrigger = 3; et.ACTION_OnCenterPickTrigger = 4; et.ACTION_OnPickDownTrigger = 5; et.ACTION_OnDoublePickTrigger = 6; et.ACTION_OnPickUpTrigger = 7; et.ACTION_OnPickOutTrigger = 16; et.ACTION_OnLongPressTrigger = 8; et.ACTION_OnPointerOverTrigger = 9; et.ACTION_OnPointerOutTrigger = 10; et.ACTION_OnEveryFrameTrigger = 11; et.ACTION_OnIntersectionEnterTrigger = 12; et.ACTION_OnIntersectionExitTrigger = 13; et.ACTION_OnKeyDownTrigger = 14; et.ACTION_OnKeyUpTrigger = 15; et.PARTICLES_BILLBOARDMODE_Y = 2; et.PARTICLES_BILLBOARDMODE_ALL = 7; et.PARTICLES_BILLBOARDMODE_STRETCHED = 8; et.PARTICLES_BILLBOARDMODE_STRETCHED_LOCAL = 9; et.MESHES_CULLINGSTRATEGY_STANDARD = 0; et.MESHES_CULLINGSTRATEGY_BOUNDINGSPHERE_ONLY = 1; et.MESHES_CULLINGSTRATEGY_OPTIMISTIC_INCLUSION = 2; et.MESHES_CULLINGSTRATEGY_OPTIMISTIC_INCLUSION_THEN_BSPHERE_ONLY = 3; et.SCENELOADER_NO_LOGGING = 0; et.SCENELOADER_MINIMAL_LOGGING = 1; et.SCENELOADER_SUMMARY_LOGGING = 2; et.SCENELOADER_DETAILED_LOGGING = 3; et.PREPASS_IRRADIANCE_TEXTURE_TYPE = 0; et.PREPASS_POSITION_TEXTURE_TYPE = 1; et.PREPASS_VELOCITY_TEXTURE_TYPE = 2; et.PREPASS_REFLECTIVITY_TEXTURE_TYPE = 3; et.PREPASS_COLOR_TEXTURE_TYPE = 4; et.PREPASS_DEPTH_TEXTURE_TYPE = 5; et.PREPASS_NORMAL_TEXTURE_TYPE = 6; et.PREPASS_ALBEDO_SQRT_TEXTURE_TYPE = 7; et.BUFFER_CREATIONFLAG_READ = 1; et.BUFFER_CREATIONFLAG_WRITE = 2; et.BUFFER_CREATIONFLAG_READWRITE = 3; et.BUFFER_CREATIONFLAG_UNIFORM = 4; et.BUFFER_CREATIONFLAG_VERTEX = 8; et.BUFFER_CREATIONFLAG_INDEX = 16; et.BUFFER_CREATIONFLAG_STORAGE = 32; et.RENDERPASS_MAIN = 0; et.INPUT_ALT_KEY = 18; et.INPUT_CTRL_KEY = 17; et.INPUT_META_KEY1 = 91; et.INPUT_META_KEY2 = 92; et.INPUT_META_KEY3 = 93; et.INPUT_SHIFT_KEY = 16; et.SNAPSHOTRENDERING_STANDARD = 0; et.SNAPSHOTRENDERING_FAST = 1; et.PERSPECTIVE_CAMERA = 0; et.ORTHOGRAPHIC_CAMERA = 1; et.FOVMODE_VERTICAL_FIXED = 0; et.FOVMODE_HORIZONTAL_FIXED = 1; et.RIG_MODE_NONE = 0; et.RIG_MODE_STEREOSCOPIC_ANAGLYPH = 10; et.RIG_MODE_STEREOSCOPIC_SIDEBYSIDE_PARALLEL = 11; et.RIG_MODE_STEREOSCOPIC_SIDEBYSIDE_CROSSEYED = 12; et.RIG_MODE_STEREOSCOPIC_OVERUNDER = 13; et.RIG_MODE_STEREOSCOPIC_INTERLACED = 14; et.RIG_MODE_VR = 20; et.RIG_MODE_CUSTOM = 22; et.MAX_SUPPORTED_UV_SETS = 6; et.GL_ALPHA_EQUATION_ADD = 32774; et.GL_ALPHA_EQUATION_MIN = 32775; et.GL_ALPHA_EQUATION_MAX = 32776; et.GL_ALPHA_EQUATION_SUBTRACT = 32778; et.GL_ALPHA_EQUATION_REVERSE_SUBTRACT = 32779; et.GL_ALPHA_FUNCTION_SRC = 768; et.GL_ALPHA_FUNCTION_ONE_MINUS_SRC_COLOR = 769; et.GL_ALPHA_FUNCTION_SRC_ALPHA = 770; et.GL_ALPHA_FUNCTION_ONE_MINUS_SRC_ALPHA = 771; et.GL_ALPHA_FUNCTION_DST_ALPHA = 772; et.GL_ALPHA_FUNCTION_ONE_MINUS_DST_ALPHA = 773; et.GL_ALPHA_FUNCTION_DST_COLOR = 774; et.GL_ALPHA_FUNCTION_ONE_MINUS_DST_COLOR = 775; et.GL_ALPHA_FUNCTION_SRC_ALPHA_SATURATED = 776; et.GL_ALPHA_FUNCTION_CONSTANT_COLOR = 32769; et.GL_ALPHA_FUNCTION_ONE_MINUS_CONSTANT_COLOR = 32770; et.GL_ALPHA_FUNCTION_CONSTANT_ALPHA = 32771; et.GL_ALPHA_FUNCTION_ONE_MINUS_CONSTANT_ALPHA = 32772; et.SnippetUrl = "https://snippet.babylonjs.com"; class Rie { constructor() { this.renderWidth = 512, this.renderHeight = 256, this.textureSize = 512, this.deterministicLockstep = !1, this.lockstepMaxSteps = 4; } } class Pie extends $e { /** * Gets a boolean indicating that the engine is running in deterministic lock step mode * @see https://doc.babylonjs.com/features/featuresDeepDive/animation/advanced_animations#deterministic-lockstep * @returns true if engine is in deterministic lock step mode */ isDeterministicLockStep() { return this._options.deterministicLockstep; } /** * Gets the max steps when engine is running in deterministic lock step * @see https://doc.babylonjs.com/features/featuresDeepDive/animation/advanced_animations#deterministic-lockstep * @returns the max steps */ getLockstepMaxSteps() { return this._options.lockstepMaxSteps; } /** * Gets the current hardware scaling level. * By default the hardware scaling level is computed from the window device ratio. * if level = 1 then the engine will render at the exact resolution of the canvas. If level = 0.5 then the engine will render at twice the size of the canvas. * @returns a number indicating the current hardware scaling level */ getHardwareScalingLevel() { return 1; } constructor(e = new Rie()) { super(null), $e.Instances.push(this), e.deterministicLockstep === void 0 && (e.deterministicLockstep = !1), e.lockstepMaxSteps === void 0 && (e.lockstepMaxSteps = 4), this._options = e, Uu.SetMatrixPrecision(!!e.useHighPrecisionMatrix), this._caps = { maxTexturesImageUnits: 16, maxVertexTextureImageUnits: 16, maxCombinedTexturesImageUnits: 32, maxTextureSize: 512, maxCubemapTextureSize: 512, maxRenderTextureSize: 512, maxVertexAttribs: 16, maxVaryingVectors: 16, maxFragmentUniformVectors: 16, maxVertexUniformVectors: 16, standardDerivatives: !1, astc: null, pvrtc: null, etc1: null, etc2: null, bptc: null, maxAnisotropy: 0, uintIndices: !1, fragmentDepthSupported: !1, highPrecisionShaderSupported: !0, colorBufferFloat: !1, supportFloatTexturesResolve: !1, textureFloat: !1, textureFloatLinearFiltering: !1, textureFloatRender: !1, textureHalfFloat: !1, textureHalfFloatLinearFiltering: !1, textureHalfFloatRender: !1, textureLOD: !1, texelFetch: !1, drawBuffersExtension: !1, depthTextureExtension: !1, vertexArrayObject: !1, instancedArrays: !1, supportOcclusionQuery: !1, canUseTimestampForTimerQuery: !1, maxMSAASamples: 1, blendMinMax: !1, canUseGLInstanceID: !1, canUseGLVertexID: !1, supportComputeShaders: !1, supportSRGBBuffers: !1, supportTransformFeedbacks: !1, textureMaxLevel: !1, texture2DArrayMaxLayerCount: 128, disableMorphTargetTexture: !1 }, this._features = { forceBitmapOverHTMLImageElement: !1, supportRenderAndCopyToLodForFloatTextures: !1, supportDepthStencilTexture: !1, supportShadowSamplers: !1, uniformBufferHardCheckMatrix: !1, allowTexturePrefiltering: !1, trackUbosInFrame: !1, checkUbosContentBeforeUpload: !1, supportCSM: !1, basisNeedsPOT: !1, support3DTextures: !1, needTypeSuffixInShaderConstants: !1, supportMSAA: !1, supportSSAO2: !1, supportExtendedTextureFormats: !1, supportSwitchCaseInShader: !1, supportSyncTextureRead: !1, needsInvertingBitmap: !1, useUBOBindingCache: !1, needShaderCodeInlining: !1, needToAlwaysBindUniformBuffers: !1, supportRenderPasses: !0, supportSpriteInstancing: !1, forceVertexBufferStrideMultiple4Bytes: !1, _collectUbosUpdatedInFrame: !1 }, Ce.Log(`Babylon.js v${$e.Version} - Null engine`); const t = typeof self < "u" ? self : typeof global < "u" ? global : window; typeof URL > "u" && (t.URL = { createObjectURL: function() { }, revokeObjectURL: function() { } }), typeof Blob > "u" && (t.Blob = function() { }); } /** * Creates a vertex buffer * @param vertices the data for the vertex buffer * @returns the new WebGL static buffer */ createVertexBuffer(e) { const t = new JA(); return t.references = 1, t; } /** * Creates a new index buffer * @param indices defines the content of the index buffer * @returns a new webGL buffer */ createIndexBuffer(e) { const t = new JA(); return t.references = 1, t; } /** * Clear the current render buffer or the current render target (if any is set up) * @param color defines the color to use * @param backBuffer defines if the back buffer must be cleared * @param depth defines if the depth buffer must be cleared * @param stencil defines if the stencil buffer must be cleared */ clear(e, t, i, r = !1) { } /** * Gets the current render width * @param useScreen defines if screen size must be used (or the current render target if any) * @returns a number defining the current render width */ getRenderWidth(e = !1) { return !e && this._currentRenderTarget ? this._currentRenderTarget.width : this._options.renderWidth; } /** * Gets the current render height * @param useScreen defines if screen size must be used (or the current render target if any) * @returns a number defining the current render height */ getRenderHeight(e = !1) { return !e && this._currentRenderTarget ? this._currentRenderTarget.height : this._options.renderHeight; } /** * Set the WebGL's viewport * @param viewport defines the viewport element to be used * @param requiredWidth defines the width required for rendering. If not provided the rendering canvas' width is used * @param requiredHeight defines the height required for rendering. If not provided the rendering canvas' height is used */ setViewport(e, t, i) { this._cachedViewport = e; } createShaderProgram(e, t, i, r, s) { return { // eslint-disable-next-line @typescript-eslint/naming-convention __SPECTOR_rebuildProgram: null }; } /** * Gets the list of webGL uniform locations associated with a specific program based on a list of uniform names * @param pipelineContext defines the pipeline context to use * @param uniformsNames defines the list of uniform names * @returns an array of webGL uniform locations */ getUniforms(e, t) { return []; } /** * Gets the lsit of active attributes for a given webGL program * @param pipelineContext defines the pipeline context to use * @param attributesNames defines the list of attribute names to get * @returns an array of indices indicating the offset of each attribute */ getAttributes(e, t) { return []; } /** * Binds an effect to the webGL context * @param effect defines the effect to bind */ bindSamplers(e) { this._currentEffect = null; } /** * Activates an effect, making it the current one (ie. the one used for rendering) * @param effect defines the effect to activate */ enableEffect(e) { e = e !== null && $o.IsWrapper(e) ? e.effect : e, this._currentEffect = e, e && (e.onBind && e.onBind(e), e._onBindObservable && e._onBindObservable.notifyObservers(e)); } /** * Set various states to the webGL context * @param culling defines culling state: true to enable culling, false to disable it * @param zOffset defines the value to apply to zOffset (0 by default) * @param force defines if states must be applied even if cache is up to date * @param reverseSide defines if culling must be reversed (CCW if false, CW if true) * @param cullBackFaces true to cull back faces, false to cull front faces (if culling is enabled) * @param stencil stencil states to set * @param zOffsetUnits defines the value to apply to zOffsetUnits (0 by default) */ setState(e, t = 0, i, r = !1, s, n, a = 0) { } /** * Set the value of an uniform to an array of int32 * @param uniform defines the webGL uniform location where to store the value * @param array defines the array of int32 to store * @returns true if value was set */ setIntArray(e, t) { return !0; } /** * Set the value of an uniform to an array of int32 (stored as vec2) * @param uniform defines the webGL uniform location where to store the value * @param array defines the array of int32 to store * @returns true if value was set */ setIntArray2(e, t) { return !0; } /** * Set the value of an uniform to an array of int32 (stored as vec3) * @param uniform defines the webGL uniform location where to store the value * @param array defines the array of int32 to store * @returns true if value was set */ setIntArray3(e, t) { return !0; } /** * Set the value of an uniform to an array of int32 (stored as vec4) * @param uniform defines the webGL uniform location where to store the value * @param array defines the array of int32 to store * @returns true if value was set */ setIntArray4(e, t) { return !0; } /** * Set the value of an uniform to an array of float32 * @param uniform defines the webGL uniform location where to store the value * @param array defines the array of float32 to store * @returns true if value was set */ setFloatArray(e, t) { return !0; } /** * Set the value of an uniform to an array of float32 (stored as vec2) * @param uniform defines the webGL uniform location where to store the value * @param array defines the array of float32 to store * @returns true if value was set */ setFloatArray2(e, t) { return !0; } /** * Set the value of an uniform to an array of float32 (stored as vec3) * @param uniform defines the webGL uniform location where to store the value * @param array defines the array of float32 to store * @returns true if value was set */ setFloatArray3(e, t) { return !0; } /** * Set the value of an uniform to an array of float32 (stored as vec4) * @param uniform defines the webGL uniform location where to store the value * @param array defines the array of float32 to store * @returns true if value was set */ setFloatArray4(e, t) { return !0; } /** * Set the value of an uniform to an array of number * @param uniform defines the webGL uniform location where to store the value * @param array defines the array of number to store * @returns true if value was set */ setArray(e, t) { return !0; } /** * Set the value of an uniform to an array of number (stored as vec2) * @param uniform defines the webGL uniform location where to store the value * @param array defines the array of number to store * @returns true if value was set */ setArray2(e, t) { return !0; } /** * Set the value of an uniform to an array of number (stored as vec3) * @param uniform defines the webGL uniform location where to store the value * @param array defines the array of number to store * @returns true if value was set */ setArray3(e, t) { return !0; } /** * Set the value of an uniform to an array of number (stored as vec4) * @param uniform defines the webGL uniform location where to store the value * @param array defines the array of number to store * @returns true if value was set */ setArray4(e, t) { return !0; } /** * Set the value of an uniform to an array of float32 (stored as matrices) * @param uniform defines the webGL uniform location where to store the value * @param matrices defines the array of float32 to store * @returns true if value was set */ setMatrices(e, t) { return !0; } /** * Set the value of an uniform to a matrix (3x3) * @param uniform defines the webGL uniform location where to store the value * @param matrix defines the Float32Array representing the 3x3 matrix to store * @returns true if value was set */ setMatrix3x3(e, t) { return !0; } /** * Set the value of an uniform to a matrix (2x2) * @param uniform defines the webGL uniform location where to store the value * @param matrix defines the Float32Array representing the 2x2 matrix to store * @returns true if value was set */ setMatrix2x2(e, t) { return !0; } /** * Set the value of an uniform to a number (float) * @param uniform defines the webGL uniform location where to store the value * @param value defines the float number to store * @returns true if value was set */ setFloat(e, t) { return !0; } /** * Set the value of an uniform to a vec2 * @param uniform defines the webGL uniform location where to store the value * @param x defines the 1st component of the value * @param y defines the 2nd component of the value * @returns true if value was set */ setFloat2(e, t, i) { return !0; } /** * Set the value of an uniform to a vec3 * @param uniform defines the webGL uniform location where to store the value * @param x defines the 1st component of the value * @param y defines the 2nd component of the value * @param z defines the 3rd component of the value * @returns true if value was set */ setFloat3(e, t, i, r) { return !0; } /** * Set the value of an uniform to a boolean * @param uniform defines the webGL uniform location where to store the value * @param bool defines the boolean to store * @returns true if value was set */ setBool(e, t) { return !0; } /** * Set the value of an uniform to a vec4 * @param uniform defines the webGL uniform location where to store the value * @param x defines the 1st component of the value * @param y defines the 2nd component of the value * @param z defines the 3rd component of the value * @param w defines the 4th component of the value * @returns true if value was set */ setFloat4(e, t, i, r, s) { return !0; } /** * Sets the current alpha mode * @param mode defines the mode to use (one of the Engine.ALPHA_XXX) * @param noDepthWriteChange defines if depth writing state should remains unchanged (false by default) * @see https://doc.babylonjs.com/features/featuresDeepDive/materials/advanced/transparent_rendering */ setAlphaMode(e, t = !1) { this._alphaMode !== e && (this.alphaState.alphaBlend = e !== 0, t || this.setDepthWrite(e === 0), this._alphaMode = e); } /** * Bind webGl buffers directly to the webGL context * @param vertexBuffers defines the vertex buffer to bind * @param indexBuffer defines the index buffer to bind * @param effect defines the effect associated with the vertex buffer */ bindBuffers(e, t, i) { } /** * Force the entire cache to be cleared * You should not have to use this function unless your engine needs to share the webGL context with another engine * @param bruteForce defines a boolean to force clearing ALL caches (including stencil, detoh and alpha states) */ wipeCaches(e) { this.preventCacheWipeBetweenFrames || (this.resetTextureCache(), this._currentEffect = null, e && (this._currentProgram = null, this._stencilStateComposer.reset(), this.depthCullingState.reset(), this.alphaState.reset()), this._cachedVertexBuffers = null, this._cachedIndexBuffer = null, this._cachedEffectForVertexBuffers = null); } /** * Send a draw order * @param useTriangles defines if triangles must be used to draw (else wireframe will be used) * @param indexStart defines the starting index * @param indexCount defines the number of index to draw * @param instancesCount defines the number of instances to draw (if instantiation is enabled) */ draw(e, t, i, r) { } /** * Draw a list of indexed primitives * @param fillMode defines the primitive to use * @param indexStart defines the starting index * @param indexCount defines the number of index to draw * @param instancesCount defines the number of instances to draw (if instantiation is enabled) */ drawElementsType(e, t, i, r) { } /** * Draw a list of unindexed primitives * @param fillMode defines the primitive to use * @param verticesStart defines the index of first vertex to draw * @param verticesCount defines the count of vertices to draw * @param instancesCount defines the number of instances to draw (if instantiation is enabled) */ drawArraysType(e, t, i, r) { } /** @internal */ _createTexture() { return {}; } /** * @internal */ _releaseTexture(e) { } /** * Usually called from Texture.ts. * Passed information to create a WebGLTexture * @param urlArg defines a value which contains one of the following: * * A conventional http URL, e.g. 'http://...' or 'file://...' * * A base64 string of in-line texture data, e.g. 'data:image/jpg;base64,/...' * * An indicator that data being passed using the buffer parameter, e.g. 'data:mytexture.jpg' * @param noMipmap defines a boolean indicating that no mipmaps shall be generated. Ignored for compressed textures. They must be in the file * @param invertY when true, image is flipped when loaded. You probably want true. Certain compressed textures may invert this if their default is inverted (eg. ktx) * @param scene needed for loading to the correct scene * @param samplingMode mode with should be used sample / access the texture (Default: Texture.TRILINEAR_SAMPLINGMODE) * @param onLoad optional callback to be called upon successful completion * @param onError optional callback to be called upon failure * @param buffer a source of a file previously fetched as either a base64 string, an ArrayBuffer (compressed or image format), HTMLImageElement (image format), or a Blob * @param fallback an internal argument in case the function must be called again, due to etc1 not having alpha capabilities * @param format internal format. Default: RGB when extension is '.jpg' else RGBA. Ignored for compressed textures * @param forcedExtension defines the extension to use to pick the right loader * @param mimeType defines an optional mime type * @returns a InternalTexture for assignment back into BABYLON.Texture */ createTexture(e, t, i, r, s = 3, n = null, a = null, l = null, o = null, u = null, h = null, d) { const f = new ln(this, ts.Url), p = String(e); return f.url = p, f.generateMipMaps = !t, f.samplingMode = s, f.invertY = i, f.baseWidth = this._options.textureSize, f.baseHeight = this._options.textureSize, f.width = this._options.textureSize, f.height = this._options.textureSize, u && (f.format = u), f.isReady = !0, n && setTimeout(() => { n(f); }), this._internalTexturesCache.push(f), f; } /** * @internal */ _createHardwareRenderTargetWrapper(e, t, i) { const r = new FL(e, t, i, this); return this._renderTargetWrapperCache.push(r), r; } /** * Creates a new render target wrapper * @param size defines the size of the texture * @param options defines the options used to create the texture * @returns a new render target wrapper */ createRenderTargetTexture(e, t) { const i = this._createHardwareRenderTargetWrapper(!1, !1, e), r = {}; t !== void 0 && typeof t == "object" ? (r.generateMipMaps = t.generateMipMaps, r.generateDepthBuffer = t.generateDepthBuffer === void 0 ? !0 : t.generateDepthBuffer, r.generateStencilBuffer = r.generateDepthBuffer && t.generateStencilBuffer, r.type = t.type === void 0 ? 0 : t.type, r.samplingMode = t.samplingMode === void 0 ? 3 : t.samplingMode) : (r.generateMipMaps = t, r.generateDepthBuffer = !0, r.generateStencilBuffer = !1, r.type = 0, r.samplingMode = 3); const s = new ln(this, ts.RenderTarget), n = e.width || e, a = e.height || e; return i._generateDepthBuffer = r.generateDepthBuffer, i._generateStencilBuffer = !!r.generateStencilBuffer, s.baseWidth = n, s.baseHeight = a, s.width = n, s.height = a, s.isReady = !0, s.samples = 1, s.generateMipMaps = !!r.generateMipMaps, s.samplingMode = r.samplingMode, s.type = r.type, this._internalTexturesCache.push(s), i; } /** * Creates a new render target wrapper * @param size defines the size of the texture * @param options defines the options used to create the texture * @returns a new render target wrapper */ createRenderTargetCubeTexture(e, t) { const i = this._createHardwareRenderTargetWrapper(!1, !0, e), r = Object.assign({ generateMipMaps: !0, generateDepthBuffer: !0, generateStencilBuffer: !1, type: 0, samplingMode: 3, format: 5 }, t); r.generateStencilBuffer = r.generateDepthBuffer && r.generateStencilBuffer, (r.type === 1 && !this._caps.textureFloatLinearFiltering || r.type === 2 && !this._caps.textureHalfFloatLinearFiltering) && (r.samplingMode = 1), i._generateDepthBuffer = r.generateDepthBuffer, i._generateStencilBuffer = !!r.generateStencilBuffer; const s = new ln(this, ts.RenderTarget); return s.baseWidth = e, s.baseHeight = e, s.width = e, s.height = e, s.isReady = !0, s.isCube = !0, s.samples = 1, s.generateMipMaps = !!r.generateMipMaps, s.samplingMode = r.samplingMode, s.type = r.type, this._internalTexturesCache.push(s), i; } /** * Update the sampling mode of a given texture * @param samplingMode defines the required sampling mode * @param texture defines the texture to update */ updateTextureSamplingMode(e, t) { t.samplingMode = e; } /** * Creates a raw texture * @param data defines the data to store in the texture * @param width defines the width of the texture * @param height defines the height of the texture * @param format defines the format of the data * @param generateMipMaps defines if the engine should generate the mip levels * @param invertY defines if data must be stored with Y axis inverted * @param samplingMode defines the required sampling mode (Texture.NEAREST_SAMPLINGMODE by default) * @param compression defines the compression used (null by default) * @param type defines the type fo the data (Engine.TEXTURETYPE_UNSIGNED_INT by default) * @param creationFlags specific flags to use when creating the texture (1 for storage textures, for eg) * @param useSRGBBuffer defines if the texture must be loaded in a sRGB GPU buffer (if supported by the GPU). * @returns the raw texture inside an InternalTexture */ createRawTexture(e, t, i, r, s, n, a, l = null, o = 0, u = 0, h = !1) { const d = new ln(this, ts.Raw); return d.baseWidth = t, d.baseHeight = i, d.width = t, d.height = i, d.format = r, d.generateMipMaps = s, d.samplingMode = a, d.invertY = n, d._compression = l, d.type = o, d._useSRGBBuffer = h, this._doNotHandleContextLost || (d._bufferView = e), d; } /** * Update a raw texture * @param texture defines the texture to update * @param data defines the data to store in the texture * @param format defines the format of the data * @param invertY defines if data must be stored with Y axis inverted * @param compression defines the compression used (null by default) * @param type defines the type fo the data (Engine.TEXTURETYPE_UNSIGNED_INT by default) * @param useSRGBBuffer defines if the texture must be loaded in a sRGB GPU buffer (if supported by the GPU). */ updateRawTexture(e, t, i, r, s = null, n = 0, a = !1) { e && (e._bufferView = t, e.format = i, e.invertY = r, e._compression = s, e.type = n, e._useSRGBBuffer = a); } /** * Binds the frame buffer to the specified texture. * @param rtWrapper The render target wrapper to render to * @param faceIndex The face of the texture to render to in case of cube texture * @param requiredWidth The width of the target to render to * @param requiredHeight The height of the target to render to * @param forceFullscreenViewport Forces the viewport to be the entire texture/screen if true */ bindFramebuffer(e, t, i, r, s) { this._currentRenderTarget && this.unBindFramebuffer(this._currentRenderTarget), this._currentRenderTarget = e, this._currentFramebuffer = null, this._cachedViewport && !s && this.setViewport(this._cachedViewport, i, r); } /** * Unbind the current render target texture from the webGL context * @param rtWrapper defines the render target wrapper to unbind * @param disableGenerateMipMaps defines a boolean indicating that mipmaps must not be generated * @param onBeforeUnbind defines a function which will be called before the effective unbind */ unBindFramebuffer(e, t = !1, i) { this._currentRenderTarget = null, i && i(), this._currentFramebuffer = null; } /** * Creates a dynamic vertex buffer * @param vertices the data for the dynamic vertex buffer * @returns the new WebGL dynamic buffer */ createDynamicVertexBuffer(e) { const t = new JA(); return t.references = 1, t.capacity = 1, t; } /** * Update the content of a dynamic texture * @param texture defines the texture to update * @param canvas defines the canvas containing the source * @param invertY defines if data must be stored with Y axis inverted * @param premulAlpha defines if alpha is stored as premultiplied * @param format defines the format of the data */ updateDynamicTexture(e, t, i, r = !1, s) { } /** * Gets a boolean indicating if all created effects are ready * @returns true if all effects are ready */ areAllEffectsReady() { return !0; } /** * @internal * Get the current error code of the webGL context * @returns the error code * @see https://developer.mozilla.org/en-US/docs/Web/API/WebGLRenderingContext/getError */ getError() { return 0; } /** @internal */ _getUnpackAlignement() { return 1; } /** * @internal */ _unpackFlipY(e) { } /** * Update a dynamic index buffer * @param indexBuffer defines the target index buffer * @param indices defines the data to update * @param offset defines the offset in the target index buffer where update should start */ updateDynamicIndexBuffer(e, t, i = 0) { } /** * Updates a dynamic vertex buffer. * @param vertexBuffer the vertex buffer to update * @param vertices the data used to update the vertex buffer * @param byteOffset the byte offset of the data (optional) * @param byteLength the byte length of the data (optional) */ updateDynamicVertexBuffer(e, t, i, r) { } /** * @internal */ _bindTextureDirectly(e, t) { return this._boundTexturesCache[this._activeChannel] !== t ? (this._boundTexturesCache[this._activeChannel] = t, !0) : !1; } /** * @internal */ _bindTexture(e, t) { e < 0 || this._bindTextureDirectly(0, t); } _deleteBuffer(e) { } /** * Force the engine to release all cached effects. This means that next effect compilation will have to be done completely even if a similar effect was already compiled */ releaseEffects() { } displayLoadingUI() { } hideLoadingUI() { } set loadingUIText(e) { } /** * @internal */ _uploadCompressedDataToTextureDirectly(e, t, i, r, s, n = 0, a = 0) { } /** * @internal */ _uploadDataToTextureDirectly(e, t, i = 0, r = 0) { } /** * @internal */ _uploadArrayBufferViewToTexture(e, t, i = 0, r = 0) { } /** * @internal */ _uploadImageToTexture(e, t, i = 0, r = 0) { } } mi.prototype._debugPushGroup = function(c, e) { }; mi.prototype._debugPopGroup = function(c) { }; mi.prototype._debugInsertMarker = function(c, e) { }; mi.prototype._debugFlushPendingCommands = function() { }; class Iie { constructor() { this._timeElapsedQueryEnded = !1; } } class Die { constructor() { this.occlusionInternalRetryCounter = 0, this.isOcclusionQueryInProgress = !1, this.isOccluded = !1, this.occlusionRetryCount = -1, this.occlusionType = xr.OCCLUSION_TYPE_NONE, this.occlusionQueryAlgorithmType = xr.OCCLUSION_ALGORITHM_TYPE_CONSERVATIVE, this.forceRenderingWhenOccluded = !1; } } $e.prototype.createQuery = function() { const c = this._gl.createQuery(); if (!c) throw new Error("Unable to create Occlusion Query"); return c; }; $e.prototype.deleteQuery = function(c) { return this._gl.deleteQuery(c), this; }; $e.prototype.isQueryResultAvailable = function(c) { return this._gl.getQueryParameter(c, this._gl.QUERY_RESULT_AVAILABLE); }; $e.prototype.getQueryResult = function(c) { return this._gl.getQueryParameter(c, this._gl.QUERY_RESULT); }; $e.prototype.beginOcclusionQuery = function(c, e) { const t = this._getGlAlgorithmType(c); return this._gl.beginQuery(t, e), !0; }; $e.prototype.endOcclusionQuery = function(c) { const e = this._getGlAlgorithmType(c); return this._gl.endQuery(e), this; }; $e.prototype._createTimeQuery = function() { const c = this.getCaps().timerQuery; return c.createQueryEXT ? c.createQueryEXT() : this.createQuery(); }; $e.prototype._deleteTimeQuery = function(c) { const e = this.getCaps().timerQuery; if (e.deleteQueryEXT) { e.deleteQueryEXT(c); return; } this.deleteQuery(c); }; $e.prototype._getTimeQueryResult = function(c) { const e = this.getCaps().timerQuery; return e.getQueryObjectEXT ? e.getQueryObjectEXT(c, e.QUERY_RESULT_EXT) : this.getQueryResult(c); }; $e.prototype._getTimeQueryAvailability = function(c) { const e = this.getCaps().timerQuery; return e.getQueryObjectEXT ? e.getQueryObjectEXT(c, e.QUERY_RESULT_AVAILABLE_EXT) : this.isQueryResultAvailable(c); }; $e.prototype.startTimeQuery = function() { const c = this.getCaps(), e = c.timerQuery; if (!e) return null; const t = new Iie(); if (this._gl.getParameter(e.GPU_DISJOINT_EXT), c.canUseTimestampForTimerQuery) t._startTimeQuery = this._createTimeQuery(), e.queryCounterEXT(t._startTimeQuery, e.TIMESTAMP_EXT); else { if (this._currentNonTimestampToken) return this._currentNonTimestampToken; t._timeElapsedQuery = this._createTimeQuery(), e.beginQueryEXT ? e.beginQueryEXT(e.TIME_ELAPSED_EXT, t._timeElapsedQuery) : this._gl.beginQuery(e.TIME_ELAPSED_EXT, t._timeElapsedQuery), this._currentNonTimestampToken = t; } return t; }; $e.prototype.endTimeQuery = function(c) { const e = this.getCaps(), t = e.timerQuery; if (!t || !c) return -1; if (e.canUseTimestampForTimerQuery) { if (!c._startTimeQuery) return -1; c._endTimeQuery || (c._endTimeQuery = this._createTimeQuery(), t.queryCounterEXT(c._endTimeQuery, t.TIMESTAMP_EXT)); } else if (!c._timeElapsedQueryEnded) { if (!c._timeElapsedQuery) return -1; t.endQueryEXT ? t.endQueryEXT(t.TIME_ELAPSED_EXT) : (this._gl.endQuery(t.TIME_ELAPSED_EXT), this._currentNonTimestampToken = null), c._timeElapsedQueryEnded = !0; } const i = this._gl.getParameter(t.GPU_DISJOINT_EXT); let r = !1; if (c._endTimeQuery ? r = this._getTimeQueryAvailability(c._endTimeQuery) : c._timeElapsedQuery && (r = this._getTimeQueryAvailability(c._timeElapsedQuery)), r && !i) { let s = 0; if (e.canUseTimestampForTimerQuery) { if (!c._startTimeQuery || !c._endTimeQuery) return -1; const n = this._getTimeQueryResult(c._startTimeQuery); s = this._getTimeQueryResult(c._endTimeQuery) - n, this._deleteTimeQuery(c._startTimeQuery), this._deleteTimeQuery(c._endTimeQuery), c._startTimeQuery = null, c._endTimeQuery = null; } else { if (!c._timeElapsedQuery) return -1; s = this._getTimeQueryResult(c._timeElapsedQuery), this._deleteTimeQuery(c._timeElapsedQuery), c._timeElapsedQuery = null, c._timeElapsedQueryEnded = !1; } return s; } return -1; }; $e.prototype._captureGPUFrameTime = !1; $e.prototype._gpuFrameTime = new Vc(); $e.prototype.getGPUFrameTimeCounter = function() { return this._gpuFrameTime; }; $e.prototype.captureGPUFrameTime = function(c) { c !== this._captureGPUFrameTime && (this._captureGPUFrameTime = c, c ? (this._onBeginFrameObserver = this.onBeginFrameObservable.add(() => { this._gpuFrameTimeToken || (this._gpuFrameTimeToken = this.startTimeQuery()); }), this._onEndFrameObserver = this.onEndFrameObservable.add(() => { if (!this._gpuFrameTimeToken) return; const e = this.endTimeQuery(this._gpuFrameTimeToken); e > -1 && (this._gpuFrameTimeToken = null, this._gpuFrameTime.fetchNewFrame(), this._gpuFrameTime.addCount(e, !0)); })) : (this.onBeginFrameObservable.remove(this._onBeginFrameObserver), this._onBeginFrameObserver = null, this.onEndFrameObservable.remove(this._onEndFrameObserver), this._onEndFrameObserver = null)); }; $e.prototype._getGlAlgorithmType = function(c) { return c === xr.OCCLUSION_ALGORITHM_TYPE_CONSERVATIVE ? this._gl.ANY_SAMPLES_PASSED_CONSERVATIVE : this._gl.ANY_SAMPLES_PASSED; }; Object.defineProperty(xr.prototype, "isOcclusionQueryInProgress", { get: function() { return this._occlusionDataStorage.isOcclusionQueryInProgress; }, set: function(c) { this._occlusionDataStorage.isOcclusionQueryInProgress = c; }, enumerable: !1, configurable: !0 }); Object.defineProperty(xr.prototype, "_occlusionDataStorage", { get: function() { return this.__occlusionDataStorage || (this.__occlusionDataStorage = new Die()), this.__occlusionDataStorage; }, enumerable: !1, configurable: !0 }); Object.defineProperty(xr.prototype, "isOccluded", { get: function() { return this._occlusionDataStorage.isOccluded; }, set: function(c) { this._occlusionDataStorage.isOccluded = c; }, enumerable: !0, configurable: !0 }); Object.defineProperty(xr.prototype, "occlusionQueryAlgorithmType", { get: function() { return this._occlusionDataStorage.occlusionQueryAlgorithmType; }, set: function(c) { this._occlusionDataStorage.occlusionQueryAlgorithmType = c; }, enumerable: !0, configurable: !0 }); Object.defineProperty(xr.prototype, "occlusionType", { get: function() { return this._occlusionDataStorage.occlusionType; }, set: function(c) { this._occlusionDataStorage.occlusionType = c; }, enumerable: !0, configurable: !0 }); Object.defineProperty(xr.prototype, "occlusionRetryCount", { get: function() { return this._occlusionDataStorage.occlusionRetryCount; }, set: function(c) { this._occlusionDataStorage.occlusionRetryCount = c; }, enumerable: !0, configurable: !0 }); Object.defineProperty(xr.prototype, "forceRenderingWhenOccluded", { get: function() { return this._occlusionDataStorage.forceRenderingWhenOccluded; }, set: function(c) { this._occlusionDataStorage.forceRenderingWhenOccluded = c; }, enumerable: !0, configurable: !0 }); xr.prototype._checkOcclusionQuery = function() { const c = this._occlusionDataStorage; if (c.occlusionType === xr.OCCLUSION_TYPE_NONE) return c.isOccluded = !1, !1; const e = this.getEngine(); if (!e.getCaps().supportOcclusionQuery || !e.isQueryResultAvailable) return c.isOccluded = !1, !1; if (this.isOcclusionQueryInProgress && this._occlusionQuery !== null && this._occlusionQuery !== void 0) if (e.isQueryResultAvailable(this._occlusionQuery)) { const r = e.getQueryResult(this._occlusionQuery); c.isOcclusionQueryInProgress = !1, c.occlusionInternalRetryCounter = 0, c.isOccluded = !(r > 0); } else if (c.occlusionInternalRetryCounter++, c.occlusionRetryCount !== -1 && c.occlusionInternalRetryCounter > c.occlusionRetryCount) c.isOcclusionQueryInProgress = !1, c.occlusionInternalRetryCounter = 0, c.isOccluded = c.occlusionType === xr.OCCLUSION_TYPE_OPTIMISTIC ? !1 : c.isOccluded; else return c.occlusionType === xr.OCCLUSION_TYPE_OPTIMISTIC ? !1 : c.isOccluded; const t = this.getScene(); if (t.getBoundingBoxRenderer) { const i = t.getBoundingBoxRenderer(); this._occlusionQuery === null && (this._occlusionQuery = e.createQuery()), e.beginOcclusionQuery(c.occlusionQueryAlgorithmType, this._occlusionQuery) && (i.renderOcclusionBoundingBox(this), e.endOcclusionQuery(c.occlusionQueryAlgorithmType), this._occlusionDataStorage.isOcclusionQueryInProgress = !0); } return c.isOccluded; }; var dfe = !0; $e.prototype.createTransformFeedback = function() { const c = this._gl.createTransformFeedback(); if (!c) throw new Error("Unable to create Transform Feedback"); return c; }; $e.prototype.deleteTransformFeedback = function(c) { this._gl.deleteTransformFeedback(c); }; $e.prototype.bindTransformFeedback = function(c) { this._gl.bindTransformFeedback(this._gl.TRANSFORM_FEEDBACK, c); }; $e.prototype.beginTransformFeedback = function(c = !0) { this._gl.beginTransformFeedback(c ? this._gl.POINTS : this._gl.TRIANGLES); }; $e.prototype.endTransformFeedback = function() { this._gl.endTransformFeedback(); }; $e.prototype.setTranformFeedbackVaryings = function(c, e) { this._gl.transformFeedbackVaryings(c, e, this._gl.INTERLEAVED_ATTRIBS); }; $e.prototype.bindTransformFeedbackBuffer = function(c) { this._gl.bindBufferBase(this._gl.TRANSFORM_FEEDBACK_BUFFER, 0, c ? c.underlyingResource : null); }; mi.prototype.createExternalTexture = function(c) { return null; }; mi.prototype.setExternalTexture = function(c, e) { throw new Error("setExternalTexture: This engine does not support external textures!"); }; mi.prototype.updateVideoTexture = function(c, e, t) { if (!c || c._isDisabled) return; const i = this._getInternalFormat(c.format), r = this._getRGBABufferInternalSizedFormat(0, c.format), s = this._bindTextureDirectly(this._gl.TEXTURE_2D, c, !0); this._unpackFlipY(!t); try { if (this._videoTextureSupported === void 0 && (this._gl.getError(), this._gl.texImage2D(this._gl.TEXTURE_2D, 0, r, i, this._gl.UNSIGNED_BYTE, e), this._gl.getError() !== 0 ? this._videoTextureSupported = !1 : this._videoTextureSupported = !0), this._videoTextureSupported) this._gl.texImage2D(this._gl.TEXTURE_2D, 0, r, i, this._gl.UNSIGNED_BYTE, e); else { if (!c._workingCanvas) { c._workingCanvas = this.createCanvas(c.width, c.height); const n = c._workingCanvas.getContext("2d"); if (!n) throw new Error("Unable to get 2d context"); c._workingContext = n, c._workingCanvas.width = c.width, c._workingCanvas.height = c.height; } c._workingContext.clearRect(0, 0, c.width, c.height), c._workingContext.drawImage(e, 0, 0, e.videoWidth, e.videoHeight, 0, 0, c.width, c.height), this._gl.texImage2D(this._gl.TEXTURE_2D, 0, r, i, this._gl.UNSIGNED_BYTE, c._workingCanvas); } c.generateMipMaps && this._gl.generateMipmap(this._gl.TEXTURE_2D), s || this._bindTextureDirectly(this._gl.TEXTURE_2D, null), c.isReady = !0; } catch { c._isDisabled = !0; } }; mi.prototype.restoreSingleAttachment = function() { const c = this._gl; this.bindAttachments([c.BACK]); }; mi.prototype.restoreSingleAttachmentForRenderTarget = function() { const c = this._gl; this.bindAttachments([c.COLOR_ATTACHMENT0]); }; mi.prototype.buildTextureLayout = function(c) { const e = this._gl, t = []; for (let i = 0; i < c.length; i++) c[i] ? t.push(e["COLOR_ATTACHMENT" + i]) : t.push(e.NONE); return t; }; mi.prototype.bindAttachments = function(c) { this._gl.drawBuffers(c); }; mi.prototype.unBindMultiColorAttachmentFramebuffer = function(c, e = !1, t) { this._currentRenderTarget = null; const i = this._gl, r = c._attachments, s = r.length; if (c._MSAAFramebuffer) { i.bindFramebuffer(i.READ_FRAMEBUFFER, c._MSAAFramebuffer), i.bindFramebuffer(i.DRAW_FRAMEBUFFER, c._framebuffer); for (let n = 0; n < s; n++) { const a = c.textures[n]; for (let l = 0; l < s; l++) r[l] = i.NONE; r[n] = i[this.webGLVersion > 1 ? "COLOR_ATTACHMENT" + n : "COLOR_ATTACHMENT" + n + "_WEBGL"], i.readBuffer(r[n]), i.drawBuffers(r), i.blitFramebuffer(0, 0, a.width, a.height, 0, 0, a.width, a.height, i.COLOR_BUFFER_BIT, i.NEAREST); } for (let n = 0; n < s; n++) r[n] = i[this.webGLVersion > 1 ? "COLOR_ATTACHMENT" + n : "COLOR_ATTACHMENT" + n + "_WEBGL"]; i.drawBuffers(r); } for (let n = 0; n < s; n++) { const a = c.textures[n]; a != null && a.generateMipMaps && !e && !a.isCube && (this._bindTextureDirectly(i.TEXTURE_2D, a, !0), i.generateMipmap(i.TEXTURE_2D), this._bindTextureDirectly(i.TEXTURE_2D, null)); } t && (c._MSAAFramebuffer && this._bindUnboundFramebuffer(c._framebuffer), t()), this._bindUnboundFramebuffer(null); }; mi.prototype.createMultipleRenderTarget = function(c, e, t = !0) { var i, r; let s = !1, n = !0, a = !1, l = !1, o = 15, u = 1; const h = 0, d = 3, f = !1, p = 5, m = 3553; let _ = [], v = [], C = [], x = [], b = [], S = [], M = [], R = []; const w = this._createHardwareRenderTargetWrapper(!0, !1, c); e !== void 0 && (s = e.generateMipMaps === void 0 ? !1 : e.generateMipMaps, n = e.generateDepthBuffer === void 0 ? !0 : e.generateDepthBuffer, a = e.generateStencilBuffer === void 0 ? !1 : e.generateStencilBuffer, l = e.generateDepthTexture === void 0 ? !1 : e.generateDepthTexture, u = e.textureCount || 1, e.types && (_ = e.types), e.samplingModes && (v = e.samplingModes), e.useSRGBBuffers && (C = e.useSRGBBuffers), e.formats && (x = e.formats), e.targetTypes && (b = e.targetTypes), e.faceIndex && (S = e.faceIndex), e.layerIndex && (M = e.layerIndex), e.layerCounts && (R = e.layerCounts), this.webGLVersion > 1 && (e.depthTextureFormat === 13 || e.depthTextureFormat === 17 || e.depthTextureFormat === 16 || e.depthTextureFormat === 14 || e.depthTextureFormat === 18) && (o = e.depthTextureFormat)), w.label = (i = e == null ? void 0 : e.label) !== null && i !== void 0 ? i : "MultiRenderTargetWrapper"; const V = this._gl, k = V.createFramebuffer(); this._bindUnboundFramebuffer(k); const L = c.width || c, B = c.height || c, U = [], K = [], ee = this.webGLVersion > 1 && l && (e.depthTextureFormat === 13 || e.depthTextureFormat === 17 || e.depthTextureFormat === 18), Z = this._setupFramebufferDepthAttachments(!ee && a, !l && n, L, B); w._framebuffer = k, w._depthStencilBuffer = Z, w._generateDepthBuffer = !l && n, w._generateStencilBuffer = !ee && a, w._attachments = K; for (let q = 0; q < u; q++) { let le = v[q] || d, ie = _[q] || h, $ = C[q] || f; const j = x[q] || p, J = b[q] || m, ne = (r = R[q]) !== null && r !== void 0 ? r : 1; (ie === 1 && !this._caps.textureFloatLinearFiltering || ie === 2 && !this._caps.textureHalfFloatLinearFiltering) && (le = 1); const pe = this._getSamplingParameters(le, s); ie === 1 && !this._caps.textureFloat && (ie = 0, Ce.Warn("Float textures are not supported. Render target forced to TEXTURETYPE_UNSIGNED_BYTE type")), $ = $ && this._caps.supportSRGBBuffers && (this.webGLVersion > 1 || this.isWebGPU); const ge = this.webGLVersion > 1, Ie = V[ge ? "COLOR_ATTACHMENT" + q : "COLOR_ATTACHMENT" + q + "_WEBGL"]; if (K.push(Ie), J === -1) continue; const ye = new ln(this, ts.MultiRenderTarget); U[q] = ye, V.activeTexture(V["TEXTURE" + q]), V.bindTexture(J, ye._hardwareTexture.underlyingResource), V.texParameteri(J, V.TEXTURE_MAG_FILTER, pe.mag), V.texParameteri(J, V.TEXTURE_MIN_FILTER, pe.min), V.texParameteri(J, V.TEXTURE_WRAP_S, V.CLAMP_TO_EDGE), V.texParameteri(J, V.TEXTURE_WRAP_T, V.CLAMP_TO_EDGE); const Se = this._getRGBABufferInternalSizedFormat(ie, j, $), re = this._getInternalFormat(j), te = this._getWebGLTextureType(ie); if (ge && (J === 35866 || J === 32879)) J === 35866 ? ye.is2DArray = !0 : ye.is3D = !0, ye.baseDepth = ye.depth = ne, V.texImage3D(J, 0, Se, L, B, ne, 0, re, te, null); else if (J === 34067) { for (let he = 0; he < 6; he++) V.texImage2D(V.TEXTURE_CUBE_MAP_POSITIVE_X + he, 0, Se, L, B, 0, re, te, null); ye.isCube = !0; } else V.texImage2D(V.TEXTURE_2D, 0, Se, L, B, 0, re, te, null); s && V.generateMipmap(J), this._bindTextureDirectly(J, null), ye.baseWidth = L, ye.baseHeight = B, ye.width = L, ye.height = B, ye.isReady = !0, ye.samples = 1, ye.generateMipMaps = s, ye.samplingMode = le, ye.type = ie, ye._useSRGBBuffer = $, ye.format = j, this._internalTexturesCache.push(ye); } if (l && this._caps.depthTextureExtension) { const q = new ln(this, ts.Depth); let le = 5, ie = V.DEPTH_COMPONENT16, $ = V.DEPTH_COMPONENT, j = V.UNSIGNED_SHORT, J = V.DEPTH_ATTACHMENT; this.webGLVersion < 2 ? ie = V.DEPTH_COMPONENT : o === 14 ? (le = 1, j = V.FLOAT, ie = V.DEPTH_COMPONENT32F) : o === 18 ? (le = 0, j = V.FLOAT_32_UNSIGNED_INT_24_8_REV, ie = V.DEPTH32F_STENCIL8, $ = V.DEPTH_STENCIL, J = V.DEPTH_STENCIL_ATTACHMENT) : o === 16 ? (le = 0, j = V.UNSIGNED_INT, ie = V.DEPTH_COMPONENT24, J = V.DEPTH_ATTACHMENT) : (o === 13 || o === 17) && (le = 12, j = V.UNSIGNED_INT_24_8, ie = V.DEPTH24_STENCIL8, $ = V.DEPTH_STENCIL, J = V.DEPTH_STENCIL_ATTACHMENT), V.activeTexture(V.TEXTURE0), V.bindTexture(V.TEXTURE_2D, q._hardwareTexture.underlyingResource), V.texParameteri(V.TEXTURE_2D, V.TEXTURE_MAG_FILTER, V.NEAREST), V.texParameteri(V.TEXTURE_2D, V.TEXTURE_MIN_FILTER, V.NEAREST), V.texParameteri(V.TEXTURE_2D, V.TEXTURE_WRAP_S, V.CLAMP_TO_EDGE), V.texParameteri(V.TEXTURE_2D, V.TEXTURE_WRAP_T, V.CLAMP_TO_EDGE), V.texImage2D(V.TEXTURE_2D, 0, ie, L, B, 0, $, j, null), V.framebufferTexture2D(V.FRAMEBUFFER, J, V.TEXTURE_2D, q._hardwareTexture.underlyingResource, 0), q.baseWidth = L, q.baseHeight = B, q.width = L, q.height = B, q.isReady = !0, q.samples = 1, q.generateMipMaps = s, q.samplingMode = 1, q.format = o, q.type = le, U[u] = q, this._internalTexturesCache.push(q); } return w.setTextures(U), t && V.drawBuffers(K), this._bindUnboundFramebuffer(null), w.setLayerAndFaceIndices(M, S), this.resetTextureCache(), w; }; mi.prototype.updateMultipleRenderTargetTextureSampleCount = function(c, e, t = !0) { if (this.webGLVersion < 2 || !c || !c.texture) return 1; if (c.samples === e) return e; const i = c._attachments.length; if (i === 0) return 1; const r = this._gl; e = Math.min(e, this.getCaps().maxMSAASamples); const s = !!c._depthStencilBuffer; if (s && (r.deleteRenderbuffer(c._depthStencilBuffer), c._depthStencilBuffer = null), c._MSAAFramebuffer && (r.deleteFramebuffer(c._MSAAFramebuffer), c._MSAAFramebuffer = null), e > 1 && typeof r.renderbufferStorageMultisample == "function") { const n = r.createFramebuffer(); if (!n) throw new Error("Unable to create multi sampled framebuffer"); c._MSAAFramebuffer = n, this._bindUnboundFramebuffer(n); const a = []; for (let l = 0; l < i; l++) c.textures[l]._hardwareTexture.releaseMSAARenderBuffers(); for (let l = 0; l < i; l++) { const o = c.textures[l], u = o._hardwareTexture, h = r[this.webGLVersion > 1 ? "COLOR_ATTACHMENT" + l : "COLOR_ATTACHMENT" + l + "_WEBGL"], d = this._createRenderBuffer(o.width, o.height, e, -1, this._getRGBABufferInternalSizedFormat(o.type, o.format, o._useSRGBBuffer), h); if (!d) throw new Error("Unable to create multi sampled framebuffer"); u.addMSAARenderBuffer(d), o.samples = e, a.push(h); } t && r.drawBuffers(a); } else this._bindUnboundFramebuffer(c._framebuffer); return s && (c._depthStencilBuffer = this._setupFramebufferDepthAttachments(c._generateStencilBuffer, c._generateDepthBuffer, c.texture.width, c.texture.height, e)), this._bindUnboundFramebuffer(null), e; }; mi.prototype._createDepthStencilCubeTexture = function(c, e, t) { const i = new ln(this, ts.DepthStencil); if (i.isCube = !0, this.webGLVersion === 1) return Ce.Error("Depth cube texture is not supported by WebGL 1."), i; const r = Object.assign({ bilinearFiltering: !1, comparisonFunction: 0, generateStencil: !1 }, e), s = this._gl; this._bindTextureDirectly(s.TEXTURE_CUBE_MAP, i, !0), this._setupDepthStencilTexture(i, c, r.generateStencil, r.bilinearFiltering, r.comparisonFunction), t._depthStencilTexture = i, t._depthStencilTextureWithStencil = r.generateStencil; for (let n = 0; n < 6; n++) r.generateStencil ? s.texImage2D(s.TEXTURE_CUBE_MAP_POSITIVE_X + n, 0, s.DEPTH24_STENCIL8, c, c, 0, s.DEPTH_STENCIL, s.UNSIGNED_INT_24_8, null) : s.texImage2D(s.TEXTURE_CUBE_MAP_POSITIVE_X + n, 0, s.DEPTH_COMPONENT24, c, c, 0, s.DEPTH_COMPONENT, s.UNSIGNED_INT, null); return this._bindTextureDirectly(s.TEXTURE_CUBE_MAP, null), this._internalTexturesCache.push(i), i; }; mi.prototype._partialLoadFile = function(c, e, t, i, r = null) { const s = (a) => { t[e] = a, t._internalCount++, t._internalCount === 6 && i(t); }, n = (a, l) => { r && a && r(a.status + " " + a.statusText, l); }; this._loadFile(c, s, void 0, void 0, !0, n); }; mi.prototype._cascadeLoadFiles = function(c, e, t, i = null) { const r = []; r._internalCount = 0; for (let s = 0; s < 6; s++) this._partialLoadFile(t[s], s, r, e, i); }; mi.prototype._cascadeLoadImgs = function(c, e, t, i, r = null, s) { const n = []; n._internalCount = 0; for (let a = 0; a < 6; a++) this._partialLoadImg(i[a], a, n, c, e, t, r, s); }; mi.prototype._partialLoadImg = function(c, e, t, i, r, s, n = null, a) { const l = G_(); fw(c, (h) => { t[e] = h, t._internalCount++, i && i.removePendingData(l), t._internalCount === 6 && s && s(r, t); }, (h, d) => { i && i.removePendingData(l), n && n(h, d); }, i ? i.offlineProvider : null, a), i && i.addPendingData(l); }; mi.prototype._setCubeMapTextureParams = function(c, e, t) { const i = this._gl; i.texParameteri(i.TEXTURE_CUBE_MAP, i.TEXTURE_MAG_FILTER, i.LINEAR), i.texParameteri(i.TEXTURE_CUBE_MAP, i.TEXTURE_MIN_FILTER, e ? i.LINEAR_MIPMAP_LINEAR : i.LINEAR), i.texParameteri(i.TEXTURE_CUBE_MAP, i.TEXTURE_WRAP_S, i.CLAMP_TO_EDGE), i.texParameteri(i.TEXTURE_CUBE_MAP, i.TEXTURE_WRAP_T, i.CLAMP_TO_EDGE), c.samplingMode = e ? 3 : 2, e && this.getCaps().textureMaxLevel && t !== void 0 && t > 0 && (i.texParameteri(i.TEXTURE_CUBE_MAP, i.TEXTURE_MAX_LEVEL, t), c._maxLodLevel = t), this._bindTextureDirectly(i.TEXTURE_CUBE_MAP, null); }; mi.prototype.createCubeTextureBase = function(c, e, t, i, r = null, s = null, n, a = null, l = !1, o = 0, u = 0, h = null, d = null, f = null, p = !1) { const m = h || new ln(this, ts.Cube); m.isCube = !0, m.url = c, m.generateMipMaps = !i, m._lodGenerationScale = o, m._lodGenerationOffset = u, m._useSRGBBuffer = !!p && this._caps.supportSRGBBuffers && (this.webGLVersion > 1 || this.isWebGPU || !!i), m !== h && (m.label = c.substring(0, 60)), this._doNotHandleContextLost || (m._extension = a, m._files = t); const _ = c; this._transformTextureUrl && !h && (c = this._transformTextureUrl(c)); const v = c.split("?")[0], C = v.lastIndexOf("."), x = a || (C > -1 ? v.substring(C).toLowerCase() : ""); let b = null; for (const M of mi._TextureLoaders) if (M.canLoad(x)) { b = M; break; } const S = (M, R) => { c === _ ? s && M && s(M.status + " " + M.statusText, R) : (Ce.Warn(`Failed to load ${c}, falling back to the ${_}`), this.createCubeTextureBase(_, e, t, !!i, r, s, n, a, l, o, u, m, d, f, p)); }; if (b) { const M = (R) => { d && d(m, R), b.loadCubeData(R, m, l, r, s); }; t && t.length === 6 ? b.supportCascades ? this._cascadeLoadFiles(e, (R) => M(R.map((w) => new Uint8Array(w))), t, s) : s ? s("Textures type does not support cascades.") : Ce.Warn("Texture loader does not support cascades.") : this._loadFile(c, (R) => M(new Uint8Array(R)), void 0, void 0, !0, S); } else { if (!t || t.length === 0) throw new Error("Cannot load cubemap because files were not defined, or the correct loader was not found."); this._cascadeLoadImgs(e, m, (M, R) => { f && f(M, R); }, t, s); } return this._internalTexturesCache.push(m), m; }; mi.prototype.createCubeTexture = function(c, e, t, i, r = null, s = null, n, a = null, l = !1, o = 0, u = 0, h = null, d, f = !1) { const p = this._gl; return this.createCubeTextureBase(c, e, t, !!i, r, s, n, a, l, o, u, h, (m) => this._bindTextureDirectly(p.TEXTURE_CUBE_MAP, m, !0), (m, _) => { const v = this.needPOTTextures ? mi.GetExponentOfTwo(_[0].width, this._caps.maxCubemapTextureSize) : _[0].width, C = v, x = [ p.TEXTURE_CUBE_MAP_POSITIVE_X, p.TEXTURE_CUBE_MAP_POSITIVE_Y, p.TEXTURE_CUBE_MAP_POSITIVE_Z, p.TEXTURE_CUBE_MAP_NEGATIVE_X, p.TEXTURE_CUBE_MAP_NEGATIVE_Y, p.TEXTURE_CUBE_MAP_NEGATIVE_Z ]; this._bindTextureDirectly(p.TEXTURE_CUBE_MAP, m, !0), this._unpackFlipY(!1); const b = n ? this._getInternalFormat(n, m._useSRGBBuffer) : m._useSRGBBuffer ? this._glSRGBExtensionValues.SRGB8_ALPHA8 : p.RGBA; let S = n ? this._getInternalFormat(n) : p.RGBA; m._useSRGBBuffer && this.webGLVersion === 1 && (S = b); for (let M = 0; M < x.length; M++) if (_[M].width !== v || _[M].height !== C) { if (this._prepareWorkingCanvas(), !this._workingCanvas || !this._workingContext) { Ce.Warn("Cannot create canvas to resize texture."); return; } this._workingCanvas.width = v, this._workingCanvas.height = C, this._workingContext.drawImage(_[M], 0, 0, _[M].width, _[M].height, 0, 0, v, C), p.texImage2D(x[M], 0, b, S, p.UNSIGNED_BYTE, this._workingCanvas); } else p.texImage2D(x[M], 0, b, S, p.UNSIGNED_BYTE, _[M]); i || p.generateMipmap(p.TEXTURE_CUBE_MAP), this._setCubeMapTextureParams(m, !i), m.width = v, m.height = C, m.isReady = !0, n && (m.format = n), m.onLoadedObservable.notifyObservers(m), m.onLoadedObservable.clear(), r && r(); }, !!f); }; mi.prototype.setTextureSampler = function(c, e) { throw new Error("setTextureSampler: This engine does not support separate texture sampler objects!"); }; class ffe { } const Oie = new Fe(), wie = new Fe(); Object.defineProperty($e.prototype, "onBeforeViewRenderObservable", { get: function() { return Oie; } }); Object.defineProperty($e.prototype, "onAfterViewRenderObservable", { get: function() { return wie; } }); Object.defineProperty($e.prototype, "inputElement", { get: function() { return this._inputElement; }, set: function(c) { var e; this._inputElement !== c && (this._inputElement = c, (e = this._onEngineViewChanged) === null || e === void 0 || e.call(this)); } }); $e.prototype.getInputElement = function() { return this.inputElement || this.getRenderingCanvas(); }; $e.prototype.registerView = function(c, e, t) { this.views || (this.views = []); for (const s of this.views) if (s.target === c) return s; const i = this.getRenderingCanvas(); i && (c.width = i.width, c.height = i.height); const r = { target: c, camera: e, clearBeforeCopy: t, enabled: !0, id: (Math.random() * 1e5).toFixed() }; return this.views.push(r), e && !Array.isArray(e) && e.onDisposeObservable.add(() => { this.unRegisterView(c); }), r; }; $e.prototype.unRegisterView = function(c) { if (!this.views || this.views.length === 0) return this; for (const e of this.views) if (e.target === c) { const t = this.views.indexOf(e); t !== -1 && this.views.splice(t, 1); break; } return this; }; $e.prototype._renderViewStep = function(c) { const e = c.target, t = e.getContext("2d"); if (!t) return !0; const i = this.getRenderingCanvas(); Oie.notifyObservers(c); const r = c.camera; let s = null, n = null, a = null; if (r && (a = Array.isArray(r) ? r[0].getScene() : r.getScene(), s = a.activeCamera, n = a.activeCameras, this.activeView = c, Array.isArray(r) ? a.activeCameras = r : (a.activeCamera = r, a.activeCameras = null)), c.customResize) c.customResize(e); else { const l = Math.floor(e.clientWidth / this._hardwareScalingLevel), o = Math.floor(e.clientHeight / this._hardwareScalingLevel), u = l !== e.width || i.width !== e.width || o !== e.height || i.height !== e.height; e.clientWidth && e.clientHeight && u && (e.width = l, e.height = o, this.setSize(l, o)); } return !i.width || !i.height ? !1 : (this._renderFrame(), this.flushFramebuffer(), c.clearBeforeCopy && t.clearRect(0, 0, i.width, i.height), t.drawImage(i, 0, 0), a && (a.activeCameras = n, a.activeCamera = s), wie.notifyObservers(c), !0); }; $e.prototype._renderViews = function() { if (!this.views || this.views.length === 0 || !this.getRenderingCanvas()) return !1; let e; for (const t of this.views) { if (!t.enabled) continue; if (t.target === this.inputElement) { e = t; continue; } if (!this._renderViewStep(t)) return !1; } return e && !this._renderViewStep(e) ? !1 : (this.activeView = null, !0); }; mi.prototype.createStorageBuffer = function(c, e) { throw new Error("createStorageBuffer: Unsupported method in this engine!"); }; mi.prototype.updateStorageBuffer = function(c, e, t, i) { }; mi.prototype.readFromStorageBuffer = function(c, e, t, i) { throw new Error("readFromStorageBuffer: Unsupported method in this engine!"); }; mi.prototype.setStorageBuffer = function(c, e) { throw new Error("setStorageBuffer: Unsupported method in this engine!"); }; function pfe(c) { const e = (s) => { const n = "\\b" + s + "\\b"; return c && (c === s || c.match(new RegExp(n, "g"))); }; if (this._excludedCompressedTextures && this._excludedCompressedTextures.some(e)) return c; const t = c.lastIndexOf("."), i = c.lastIndexOf("?"), r = i > -1 ? c.substring(i, c.length) : ""; return (t > -1 ? c.substring(0, t) : c) + this._textureFormatInUse + r; } Object.defineProperty($e.prototype, "texturesSupported", { get: function() { const c = []; return this._caps.astc && c.push("-astc.ktx"), this._caps.s3tc && c.push("-dxt.ktx"), this._caps.pvrtc && c.push("-pvrtc.ktx"), this._caps.etc2 && c.push("-etc2.ktx"), this._caps.etc1 && c.push("-etc1.ktx"), c; }, enumerable: !0, configurable: !0 }); Object.defineProperty($e.prototype, "textureFormatInUse", { get: function() { return this._textureFormatInUse || null; }, enumerable: !0, configurable: !0 }); $e.prototype.setCompressedTextureExclusions = function(c) { this._excludedCompressedTextures = c; }; $e.prototype.setTextureFormatToUse = function(c) { const e = this.texturesSupported; for (let t = 0, i = e.length; t < i; t++) for (let r = 0, s = c.length; r < s; r++) if (e[t] === c[r].toLowerCase()) return this._transformTextureUrl = pfe.bind(this), this._textureFormatInUse = e[t]; return this._textureFormatInUse = "", this._transformTextureUrl = null, null; }; class _5 { constructor() { const e = new ArrayBuffer(_5.DEFAULT_BUFFER_SIZE); this._uint32s = new Uint32Array(e), this._int32s = new Int32Array(e), this._float32s = new Float32Array(e), this._length = _5.DEFAULT_BUFFER_SIZE / 4, this._position = 0, this._nativeDataStream = new _native.NativeDataStream(() => { this._flush(); }); } writeUint32(e) { this._flushIfNecessary(1), this._uint32s[this._position++] = e; } writeInt32(e) { this._flushIfNecessary(1), this._int32s[this._position++] = e; } writeFloat32(e) { this._flushIfNecessary(1), this._float32s[this._position++] = e; } writeUint32Array(e) { this._flushIfNecessary(1 + e.length), this._uint32s[this._position++] = e.length, this._uint32s.set(e, this._position), this._position += e.length; } writeInt32Array(e) { this._flushIfNecessary(1 + e.length), this._uint32s[this._position++] = e.length, this._int32s.set(e, this._position), this._position += e.length; } writeFloat32Array(e) { this._flushIfNecessary(1 + e.length), this._uint32s[this._position++] = e.length, this._float32s.set(e, this._position), this._position += e.length; } writeNativeData(e) { this._flushIfNecessary(e.length), this._uint32s.set(e, this._position), this._position += e.length; } writeBoolean(e) { this.writeUint32(e ? 1 : 0); } _flushIfNecessary(e) { this._position + e > this._length && this._flush(); } _flush() { this._nativeDataStream.writeBuffer(this._uint32s.buffer, this._position), this._position = 0; } } _5.DEFAULT_BUFFER_SIZE = 65536; const MC = [ Math.sqrt(1 / (4 * Math.PI)), -Math.sqrt(3 / (4 * Math.PI)), Math.sqrt(3 / (4 * Math.PI)), -Math.sqrt(3 / (4 * Math.PI)), Math.sqrt(15 / (4 * Math.PI)), -Math.sqrt(15 / (4 * Math.PI)), Math.sqrt(5 / (16 * Math.PI)), -Math.sqrt(15 / (4 * Math.PI)), Math.sqrt(15 / (16 * Math.PI)) // l22 ], _fe = [ () => 1, (c) => c.y, (c) => c.z, (c) => c.x, (c) => c.x * c.y, (c) => c.y * c.z, (c) => 3 * c.z * c.z - 1, (c) => c.x * c.z, (c) => c.x * c.x - c.y * c.y // l22 ], zE = (c, e) => MC[c] * _fe[c](e), HE = [Math.PI, 2 * Math.PI / 3, 2 * Math.PI / 3, 2 * Math.PI / 3, Math.PI / 4, Math.PI / 4, Math.PI / 4, Math.PI / 4, Math.PI / 4]; class m5 { constructor() { this.preScaled = !1, this.l00 = D.Zero(), this.l1_1 = D.Zero(), this.l10 = D.Zero(), this.l11 = D.Zero(), this.l2_2 = D.Zero(), this.l2_1 = D.Zero(), this.l20 = D.Zero(), this.l21 = D.Zero(), this.l22 = D.Zero(); } /** * Adds a light to the spherical harmonics * @param direction the direction of the light * @param color the color of the light * @param deltaSolidAngle the delta solid angle of the light */ addLight(e, t, i) { de.Vector3[0].set(t.r, t.g, t.b); const r = de.Vector3[0], s = de.Vector3[1]; r.scaleToRef(i, s), s.scaleToRef(zE(0, e), de.Vector3[2]), this.l00.addInPlace(de.Vector3[2]), s.scaleToRef(zE(1, e), de.Vector3[2]), this.l1_1.addInPlace(de.Vector3[2]), s.scaleToRef(zE(2, e), de.Vector3[2]), this.l10.addInPlace(de.Vector3[2]), s.scaleToRef(zE(3, e), de.Vector3[2]), this.l11.addInPlace(de.Vector3[2]), s.scaleToRef(zE(4, e), de.Vector3[2]), this.l2_2.addInPlace(de.Vector3[2]), s.scaleToRef(zE(5, e), de.Vector3[2]), this.l2_1.addInPlace(de.Vector3[2]), s.scaleToRef(zE(6, e), de.Vector3[2]), this.l20.addInPlace(de.Vector3[2]), s.scaleToRef(zE(7, e), de.Vector3[2]), this.l21.addInPlace(de.Vector3[2]), s.scaleToRef(zE(8, e), de.Vector3[2]), this.l22.addInPlace(de.Vector3[2]); } /** * Scales the spherical harmonics by the given amount * @param scale the amount to scale */ scaleInPlace(e) { this.l00.scaleInPlace(e), this.l1_1.scaleInPlace(e), this.l10.scaleInPlace(e), this.l11.scaleInPlace(e), this.l2_2.scaleInPlace(e), this.l2_1.scaleInPlace(e), this.l20.scaleInPlace(e), this.l21.scaleInPlace(e), this.l22.scaleInPlace(e); } /** * Convert from incident radiance (Li) to irradiance (E) by applying convolution with the cosine-weighted hemisphere. * * ``` * E_lm = A_l * L_lm * ``` * * In spherical harmonics this convolution amounts to scaling factors for each frequency band. * This corresponds to equation 5 in "An Efficient Representation for Irradiance Environment Maps", where * the scaling factors are given in equation 9. */ convertIncidentRadianceToIrradiance() { this.l00.scaleInPlace(HE[0]), this.l1_1.scaleInPlace(HE[1]), this.l10.scaleInPlace(HE[2]), this.l11.scaleInPlace(HE[3]), this.l2_2.scaleInPlace(HE[4]), this.l2_1.scaleInPlace(HE[5]), this.l20.scaleInPlace(HE[6]), this.l21.scaleInPlace(HE[7]), this.l22.scaleInPlace(HE[8]); } /** * Convert from irradiance to outgoing radiance for Lambertian BDRF, suitable for efficient shader evaluation. * * ``` * L = (1/pi) * E * rho * ``` * * This is done by an additional scale by 1/pi, so is a fairly trivial operation but important conceptually. */ convertIrradianceToLambertianRadiance() { this.scaleInPlace(1 / Math.PI); } /** * Integrates the reconstruction coefficients directly in to the SH preventing further * required operations at run time. * * This is simply done by scaling back the SH with Ylm constants parameter. * The trigonometric part being applied by the shader at run time. */ preScaleForRendering() { this.preScaled = !0, this.l00.scaleInPlace(MC[0]), this.l1_1.scaleInPlace(MC[1]), this.l10.scaleInPlace(MC[2]), this.l11.scaleInPlace(MC[3]), this.l2_2.scaleInPlace(MC[4]), this.l2_1.scaleInPlace(MC[5]), this.l20.scaleInPlace(MC[6]), this.l21.scaleInPlace(MC[7]), this.l22.scaleInPlace(MC[8]); } /** * update the spherical harmonics coefficients from the given array * @param data defines the 9x3 coefficients (l00, l1-1, l10, l11, l2-2, l2-1, l20, l21, l22) * @returns the spherical harmonics (this) */ updateFromArray(e) { return D.FromArrayToRef(e[0], 0, this.l00), D.FromArrayToRef(e[1], 0, this.l1_1), D.FromArrayToRef(e[2], 0, this.l10), D.FromArrayToRef(e[3], 0, this.l11), D.FromArrayToRef(e[4], 0, this.l2_2), D.FromArrayToRef(e[5], 0, this.l2_1), D.FromArrayToRef(e[6], 0, this.l20), D.FromArrayToRef(e[7], 0, this.l21), D.FromArrayToRef(e[8], 0, this.l22), this; } /** * update the spherical harmonics coefficients from the given floats array * @param data defines the 9x3 coefficients (l00, l1-1, l10, l11, l2-2, l2-1, l20, l21, l22) * @returns the spherical harmonics (this) */ updateFromFloatsArray(e) { return D.FromFloatsToRef(e[0], e[1], e[2], this.l00), D.FromFloatsToRef(e[3], e[4], e[5], this.l1_1), D.FromFloatsToRef(e[6], e[7], e[8], this.l10), D.FromFloatsToRef(e[9], e[10], e[11], this.l11), D.FromFloatsToRef(e[12], e[13], e[14], this.l2_2), D.FromFloatsToRef(e[15], e[16], e[17], this.l2_1), D.FromFloatsToRef(e[18], e[19], e[20], this.l20), D.FromFloatsToRef(e[21], e[22], e[23], this.l21), D.FromFloatsToRef(e[24], e[25], e[26], this.l22), this; } /** * Constructs a spherical harmonics from an array. * @param data defines the 9x3 coefficients (l00, l1-1, l10, l11, l2-2, l2-1, l20, l21, l22) * @returns the spherical harmonics */ static FromArray(e) { return new m5().updateFromArray(e); } // Keep for references. /** * Gets the spherical harmonics from polynomial * @param polynomial the spherical polynomial * @returns the spherical harmonics */ static FromPolynomial(e) { const t = new m5(); return t.l00 = e.xx.scale(0.376127).add(e.yy.scale(0.376127)).add(e.zz.scale(0.376126)), t.l1_1 = e.y.scale(0.977204), t.l10 = e.z.scale(0.977204), t.l11 = e.x.scale(0.977204), t.l2_2 = e.xy.scale(1.16538), t.l2_1 = e.yz.scale(1.16538), t.l20 = e.zz.scale(1.34567).subtract(e.xx.scale(0.672834)).subtract(e.yy.scale(0.672834)), t.l21 = e.zx.scale(1.16538), t.l22 = e.xx.scale(1.16538).subtract(e.yy.scale(1.16538)), t.l1_1.scaleInPlace(-1), t.l11.scaleInPlace(-1), t.l2_1.scaleInPlace(-1), t.l21.scaleInPlace(-1), t.scaleInPlace(Math.PI), t; } } class ax { constructor() { this.x = D.Zero(), this.y = D.Zero(), this.z = D.Zero(), this.xx = D.Zero(), this.yy = D.Zero(), this.zz = D.Zero(), this.xy = D.Zero(), this.yz = D.Zero(), this.zx = D.Zero(); } /** * The spherical harmonics used to create the polynomials. */ get preScaledHarmonics() { return this._harmonics || (this._harmonics = m5.FromPolynomial(this)), this._harmonics.preScaled || this._harmonics.preScaleForRendering(), this._harmonics; } /** * Adds an ambient color to the spherical polynomial * @param color the color to add */ addAmbient(e) { de.Vector3[0].copyFromFloats(e.r, e.g, e.b); const t = de.Vector3[0]; this.xx.addInPlace(t), this.yy.addInPlace(t), this.zz.addInPlace(t); } /** * Scales the spherical polynomial by the given amount * @param scale the amount to scale */ scaleInPlace(e) { this.x.scaleInPlace(e), this.y.scaleInPlace(e), this.z.scaleInPlace(e), this.xx.scaleInPlace(e), this.yy.scaleInPlace(e), this.zz.scaleInPlace(e), this.yz.scaleInPlace(e), this.zx.scaleInPlace(e), this.xy.scaleInPlace(e); } /** * Updates the spherical polynomial from harmonics * @param harmonics the spherical harmonics * @returns the spherical polynomial */ updateFromHarmonics(e) { return this._harmonics = e, this.x.copyFrom(e.l11), this.x.scaleInPlace(1.02333).scaleInPlace(-1), this.y.copyFrom(e.l1_1), this.y.scaleInPlace(1.02333).scaleInPlace(-1), this.z.copyFrom(e.l10), this.z.scaleInPlace(1.02333), this.xx.copyFrom(e.l00), de.Vector3[0].copyFrom(e.l20).scaleInPlace(0.247708), de.Vector3[1].copyFrom(e.l22).scaleInPlace(0.429043), this.xx.scaleInPlace(0.886277).subtractInPlace(de.Vector3[0]).addInPlace(de.Vector3[1]), this.yy.copyFrom(e.l00), this.yy.scaleInPlace(0.886277).subtractInPlace(de.Vector3[0]).subtractInPlace(de.Vector3[1]), this.zz.copyFrom(e.l00), de.Vector3[0].copyFrom(e.l20).scaleInPlace(0.495417), this.zz.scaleInPlace(0.886277).addInPlace(de.Vector3[0]), this.yz.copyFrom(e.l2_1), this.yz.scaleInPlace(0.858086).scaleInPlace(-1), this.zx.copyFrom(e.l21), this.zx.scaleInPlace(0.858086).scaleInPlace(-1), this.xy.copyFrom(e.l2_2), this.xy.scaleInPlace(0.858086), this.scaleInPlace(1 / Math.PI), this; } /** * Gets the spherical polynomial from harmonics * @param harmonics the spherical harmonics * @returns the spherical polynomial */ static FromHarmonics(e) { return new ax().updateFromHarmonics(e); } /** * Constructs a spherical polynomial from an array. * @param data defines the 9x3 coefficients (x, y, z, xx, yy, zz, yz, zx, xy) * @returns the spherical polynomial */ static FromArray(e) { const t = new ax(); return D.FromArrayToRef(e[0], 0, t.x), D.FromArrayToRef(e[1], 0, t.y), D.FromArrayToRef(e[2], 0, t.z), D.FromArrayToRef(e[3], 0, t.xx), D.FromArrayToRef(e[4], 0, t.yy), D.FromArrayToRef(e[5], 0, t.zz), D.FromArrayToRef(e[6], 0, t.yz), D.FromArrayToRef(e[7], 0, t.zx), D.FromArrayToRef(e[8], 0, t.xy), t; } } const mfe = "rgbdDecodePixelShader", gfe = `varying vec2 vUV;uniform sampler2D textureSampler; #include #define CUSTOM_FRAGMENT_DEFINITIONS void main(void) {gl_FragColor=vec4(fromRGBD(texture2D(textureSampler,vUV)),1.0);}`; je.ShadersStore[mfe] = gfe; function Lie(c, e, t, i = !0) { const r = c.getScene(), s = r.getEngine(), n = new ra("resized" + c.name, { width: e, height: t }, r, !c.noMipmap, !0, c._texture.type, !1, c.samplingMode, !1); n.wrapU = c.wrapU, n.wrapV = c.wrapV, n.uOffset = c.uOffset, n.vOffset = c.vOffset, n.uScale = c.uScale, n.vScale = c.vScale, n.uAng = c.uAng, n.vAng = c.vAng, n.wAng = c.wAng, n.coordinatesIndex = c.coordinatesIndex, n.level = c.level, n.anisotropicFilteringLevel = c.anisotropicFilteringLevel, n._texture.isReady = !1, c.wrapU = De.CLAMP_ADDRESSMODE, c.wrapV = De.CLAMP_ADDRESSMODE; const a = new h6("pass", 1, null, i ? De.BILINEAR_SAMPLINGMODE : De.NEAREST_SAMPLINGMODE, s, !1, 0); return a.externalTextureSamplerBinding = !0, a.getEffect().executeWhenCompiled(() => { a.onApply = function(o) { o.setTexture("textureSampler", c); }; const l = n.renderTarget; l && (r.postProcessManager.directRender([a], l), s.unBindFramebuffer(l), n.disposeFramebufferObjects(), a.dispose(), n.getInternalTexture().isReady = !0); }), n; } function vU(c, e, t, i, r, s, n, a) { const l = e.getEngine(); return e.isReady = !1, r = r ?? e.samplingMode, i = i ?? e.type, s = s ?? e.format, n = n ?? e.width, a = a ?? e.height, i === -1 && (i = 0), new Promise((o) => { const u = new Bi("postprocess", c, null, null, 1, null, r, l, !1, void 0, i, void 0, null, !1, s); u.externalTextureSamplerBinding = !0; const h = l.createRenderTargetTexture({ width: n, height: a }, { generateDepthBuffer: !1, generateMipMaps: !1, generateStencilBuffer: !1, samplingMode: r, type: i, format: s }); u.getEffect().executeWhenCompiled(() => { u.onApply = (d) => { d._bindTexture("textureSampler", e), d.setFloat2("scale", 1, 1); }, t.postProcessManager.directRender([u], h, !0), l.restoreDefaultFramebuffer(), l._releaseTexture(e), u && u.dispose(), h._swapAndDie(e), e.type = i, e.format = 5, e.isReady = !0, o(e); }); }); } let MF, wZ; function GA(c) { MF || (MF = new Float32Array(1), wZ = new Int32Array(MF.buffer)), MF[0] = c; const e = wZ[0]; let t = e >> 16 & 32768, i = e >> 12 & 2047; const r = e >> 23 & 255; return r < 103 ? t : r > 142 ? (t |= 31744, t |= (r == 255 ? 0 : 1) && e & 8388607, t) : r < 113 ? (i |= 2048, t |= (i >> 114 - r) + (i >> 113 - r & 1), t) : (t |= r - 112 << 10 | i >> 1, t += i & 1, t); } function kA(c) { const e = (c & 32768) >> 15, t = (c & 31744) >> 10, i = c & 1023; return t === 0 ? (e ? -1 : 1) * Math.pow(2, -14) * (i / Math.pow(2, 10)) : t == 31 ? i ? NaN : (e ? -1 : 1) * (1 / 0) : (e ? -1 : 1) * Math.pow(2, t - 15) * (1 + i / Math.pow(2, 10)); } const vfe = async (c, e, t, i, r) => { const s = c.getScene(), n = s.getEngine(); let a; if (!c.isCube) a = new Bi("lod", "lod", ["lod", "gamma"], null, 1, null, De.NEAREST_NEAREST_MIPNEAREST, n); else { const u = ["#define POSITIVEX", "#define NEGATIVEX", "#define POSITIVEY", "#define NEGATIVEY", "#define POSITIVEZ", "#define NEGATIVEZ"]; a = new Bi("lodCube", "lodCube", ["lod", "gamma"], null, 1, null, De.NEAREST_NEAREST_MIPNEAREST, n, !1, u[i]); } await new Promise((u) => { a.getEffect().executeWhenCompiled(() => { u(0); }); }); const l = new ra("temp", { width: e, height: t }, s, !1); a.onApply = function(u) { u.setTexture("textureSampler", c), u.setFloat("lod", r), u.setBool("gamma", c.gammaSpace); }; const o = c.getInternalTexture(); try { if (l.renderTarget && o) { const u = o.samplingMode; r !== 0 ? c.updateSamplingMode(De.NEAREST_NEAREST_MIPNEAREST) : c.updateSamplingMode(De.NEAREST_NEAREST), s.postProcessManager.directRender([a], l.renderTarget, !0), c.updateSamplingMode(u); const h = await n.readPixels(0, 0, e, t), d = new Uint8Array(h.buffer, 0, h.byteLength); return n.unBindFramebuffer(l.renderTarget), d; } else throw Error("Render to texture failed."); } finally { l.dispose(), a.dispose(); } }; async function Nie(c, e, t, i = 0, r = 0) { return !c.isReady() && c._texture && await new Promise((s, n) => { if (c._texture === null) { n(0); return; } c._texture.onLoadedObservable.addOnce(() => { s(0); }); }), await vfe(c, e, t, i, r); } const Fie = { /** * Uses the GPU to create a copy texture rescaled at a given size * @param texture Texture to copy from * @param width defines the desired width * @param height defines the desired height * @param useBilinearMode defines if bilinear mode has to be used * @returns the generated texture */ CreateResizedCopy: Lie, /** * Apply a post process to a texture * @param postProcessName name of the fragment post process * @param internalTexture the texture to encode * @param scene the scene hosting the texture * @param type type of the output texture. If not provided, use the one from internalTexture * @param samplingMode sampling mode to use to sample the source texture. If not provided, use the one from internalTexture * @param format format of the output texture. If not provided, use the one from internalTexture * @returns a promise with the internalTexture having its texture replaced by the result of the processing */ ApplyPostProcess: vU, /** * Converts a number to half float * @param value number to convert * @returns converted number */ ToHalfFloat: GA, /** * Converts a half float to a number * @param value half float to convert * @returns converted half float */ FromHalfFloat: kA, /** * Gets the data of the specified texture by rendering it to an intermediate RGBA texture and retrieving the bytes from it. * This is convienent to get 8-bit RGBA values for a texture in a GPU compressed format. * @param texture the source texture * @param width the width of the result, which does not have to match the source texture width * @param height the height of the result, which does not have to match the source texture height * @param face if the texture has multiple faces, the face index to use for the source * @param channels a filter for which of the RGBA channels to return in the result * @param lod if the texture has multiple LODs, the lod index to use for the source * @returns the 8-bit texture data */ GetTextureDataAsync: Nie }; class hB { /** * Expand the RGBD Texture from RGBD to Half Float if possible. * @param texture the texture to expand. */ static ExpandRGBDTexture(e) { const t = e._texture; if (!t || !e.isRGBD) return; const i = t.getEngine(), r = i.getCaps(), s = t.isReady; let n = !1; r.textureHalfFloatRender && r.textureHalfFloatLinearFiltering ? (n = !0, t.type = 2) : r.textureFloatRender && r.textureFloatLinearFiltering && (n = !0, t.type = 1), n && (t.isReady = !1, t._isRGBD = !1, t.invertY = !1); const a = () => { if (n) { const l = new Bi("rgbdDecode", "rgbdDecode", null, null, 1, null, 3, i, !1, void 0, t.type, void 0, null, !1); l.externalTextureSamplerBinding = !0; const o = i.createRenderTargetTexture(t.width, { generateDepthBuffer: !1, generateMipMaps: !1, generateStencilBuffer: !1, samplingMode: t.samplingMode, type: t.type, format: 5 }); l.getEffect().executeWhenCompiled(() => { l.onApply = (u) => { u._bindTexture("textureSampler", t), u.setFloat2("scale", 1, 1); }, e.getScene().postProcessManager.directRender([l], o, !0), i.restoreDefaultFramebuffer(), i._releaseTexture(t), l && l.dispose(), o._swapAndDie(t), t.isReady = !0; }); } }; s ? a() : e.onLoadObservable.addOnce(a); } /** * Encode the texture to RGBD if possible. * @param internalTexture the texture to encode * @param scene the scene hosting the texture * @param outputTextureType type of the texture in which the encoding is performed * @returns a promise with the internalTexture having its texture replaced by the result of the processing */ static EncodeTextureToRGBD(e, t, i = 0) { return vU("rgbdEncode", e, t, i, 1, 5); } } class HD { constructor(e, t, i, r) { this.name = e, this.worldAxisForNormal = t, this.worldAxisForFileX = i, this.worldAxisForFileY = r; } } class GI { /** * Converts a texture to the according Spherical Polynomial data. * This extracts the first 3 orders only as they are the only one used in the lighting. * * @param texture The texture to extract the information from. * @returns The Spherical Polynomial data. */ static ConvertCubeMapTextureToSphericalPolynomial(e) { var t; if (!e.isCube) return null; (t = e.getScene()) === null || t === void 0 || t.getEngine().flushFramebuffer(); const i = e.getSize().width, r = e.readPixels(0, void 0, void 0, !1), s = e.readPixels(1, void 0, void 0, !1); let n, a; e.isRenderTarget ? (n = e.readPixels(3, void 0, void 0, !1), a = e.readPixels(2, void 0, void 0, !1)) : (n = e.readPixels(2, void 0, void 0, !1), a = e.readPixels(3, void 0, void 0, !1)); const l = e.readPixels(4, void 0, void 0, !1), o = e.readPixels(5, void 0, void 0, !1), u = e.gammaSpace, h = 5; let d = 0; return (e.textureType == 1 || e.textureType == 2) && (d = 1), new Promise((f) => { Promise.all([s, r, n, a, l, o]).then(([p, m, _, v, C, x]) => { const b = { size: i, right: m, left: p, up: _, down: v, front: C, back: x, format: h, type: d, gammaSpace: u }; f(this.ConvertCubeMapToSphericalPolynomial(b)); }); }); } /** * Compute the area on the unit sphere of the rectangle defined by (x,y) and the origin * See https://www.rorydriscoll.com/2012/01/15/cubemap-texel-solid-angle/ * @param x * @param y */ static _AreaElement(e, t) { return Math.atan2(e * t, Math.sqrt(e * e + t * t + 1)); } /** * Converts a cubemap to the according Spherical Polynomial data. * This extracts the first 3 orders only as they are the only one used in the lighting. * * @param cubeInfo The Cube map to extract the information from. * @returns The Spherical Polynomial data. */ static ConvertCubeMapToSphericalPolynomial(e) { const t = new m5(); let i = 0; const r = 2 / e.size, s = r, n = 0.5 * r, a = n - 1; for (let d = 0; d < 6; d++) { const f = this._FileFaces[d], p = e[f.name]; let m = a; const _ = e.format === 5 ? 4 : 3; for (let v = 0; v < e.size; v++) { let C = a; for (let x = 0; x < e.size; x++) { const b = f.worldAxisForFileX.scale(C).add(f.worldAxisForFileY.scale(m)).add(f.worldAxisForNormal); b.normalize(); const S = this._AreaElement(C - n, m - n) - this._AreaElement(C - n, m + n) - this._AreaElement(C + n, m - n) + this._AreaElement(C + n, m + n); let M = p[v * e.size * _ + x * _ + 0], R = p[v * e.size * _ + x * _ + 1], w = p[v * e.size * _ + x * _ + 2]; isNaN(M) && (M = 0), isNaN(R) && (R = 0), isNaN(w) && (w = 0), e.type === 0 && (M /= 255, R /= 255, w /= 255), e.gammaSpace && (M = Math.pow(yt.Clamp(M), V9), R = Math.pow(yt.Clamp(R), V9), w = Math.pow(yt.Clamp(w), V9)); const V = this.MAX_HDRI_VALUE; if (this.PRESERVE_CLAMPED_COLORS) { const L = Math.max(M, R, w); if (L > V) { const B = V / L; M *= B, R *= B, w *= B; } } else M = yt.Clamp(M, 0, V), R = yt.Clamp(R, 0, V), w = yt.Clamp(w, 0, V); const k = new ze(M, R, w); t.addLight(b, k, S), i += S, C += r; } m += s; } } const h = 4 * Math.PI * 6 / 6 / i; return t.scaleInPlace(h), t.convertIncidentRadianceToIrradiance(), t.convertIrradianceToLambertianRadiance(), ax.FromHarmonics(t); } } GI._FileFaces = [ new HD("right", new D(1, 0, 0), new D(0, 0, -1), new D(0, -1, 0)), new HD("left", new D(-1, 0, 0), new D(0, 0, 1), new D(0, -1, 0)), new HD("up", new D(0, 1, 0), new D(1, 0, 0), new D(0, 0, 1)), new HD("down", new D(0, -1, 0), new D(1, 0, 0), new D(0, 0, -1)), new HD("front", new D(0, 0, 1), new D(1, 0, 0), new D(0, -1, 0)), new HD("back", new D(0, 0, -1), new D(-1, 0, 0), new D(0, -1, 0)) // -Z bottom ]; GI.MAX_HDRI_VALUE = 4096; GI.PRESERVE_CLAMPED_COLORS = !1; dn.prototype.forceSphericalPolynomialsRecompute = function() { this._texture && (this._texture._sphericalPolynomial = null, this._texture._sphericalPolynomialPromise = null, this._texture._sphericalPolynomialComputed = !1); }; Object.defineProperty(dn.prototype, "sphericalPolynomial", { get: function() { if (this._texture) { if (this._texture._sphericalPolynomial || this._texture._sphericalPolynomialComputed) return this._texture._sphericalPolynomial; if (this._texture.isReady) return this._texture._sphericalPolynomialPromise || (this._texture._sphericalPolynomialPromise = GI.ConvertCubeMapTextureToSphericalPolynomial(this), this._texture._sphericalPolynomialPromise === null ? this._texture._sphericalPolynomialComputed = !0 : this._texture._sphericalPolynomialPromise.then((c) => { this._texture._sphericalPolynomial = c, this._texture._sphericalPolynomialComputed = !0; })), null; } return null; }, set: function(c) { this._texture && (this._texture._sphericalPolynomial = c); }, enumerable: !0, configurable: !0 }); const Afe = "rgbdEncodePixelShader", yfe = `varying vec2 vUV;uniform sampler2D textureSampler; #include #define CUSTOM_FRAGMENT_DEFINITIONS void main(void) {gl_FragColor=toRGBD(texture2D(textureSampler,vUV).rgb);}`; je.ShadersStore[Afe] = yfe; const tW = "image/png", CH = 2, W9 = [134, 22, 135, 150, 246, 214, 150, 54]; function AU(c) { const e = new DataView(c.buffer, c.byteOffset, c.byteLength); let t = 0; for (let n = 0; n < W9.length; n++) if (e.getUint8(t++) !== W9[n]) return Ce.Error("Not a babylon environment map"), null; let i = "", r = 0; for (; r = e.getUint8(t++); ) i += String.fromCharCode(r); let s = JSON.parse(i); return s = hN(s), s.specular && (s.specular.specularDataPosition = t, s.specular.lodGenerationScale = s.specular.lodGenerationScale || 0.8), s; } function hN(c) { if (c.version > CH) throw new Error(`Unsupported babylon environment map version "${c.version}". Latest supported version is "${CH}".`); return c.version === 2 || (c = Object.assign(Object.assign({}, c), { version: 2, imageType: tW })), c; } async function Bie(c, e = {}) { var t, i; const r = c.getInternalTexture(); if (!r) return Promise.reject("The cube texture is invalid."); const s = (t = e.imageType) !== null && t !== void 0 ? t : tW, n = r.getEngine(); if (c.textureType !== 2 && c.textureType !== 1 && c.textureType !== 0 && c.textureType !== 0 && c.textureType !== 7 && c.textureType !== -1) return Promise.reject("The cube texture should allow HDR (Full Float or Half Float)."); let a = 1; if (!n.getCaps().textureFloatRender && (a = 2, !n.getCaps().textureHalfFloatRender)) return Promise.reject("Env texture can only be created when the browser supports half float or full float rendering."); c.sphericalPolynomial; const l = (i = c.getInternalTexture()) === null || i === void 0 ? void 0 : i._sphericalPolynomialPromise, o = r.width, u = new ii(n), h = {}; n.flushFramebuffer(); const d = yt.ILog2(r.width); for (let R = 0; R <= d; R++) { const w = Math.pow(2, d - R); for (let V = 0; V < 6; V++) { let k = await c.readPixels(V, R, void 0, !1); if (k && k.byteLength === k.length) { const K = new Float32Array(k.byteLength * 4); for (let ee = 0; ee < k.byteLength; ee++) K[ee] = k[ee] / 255, K[ee] = Math.pow(K[ee], 2.2); k = K; } else if (k && c.gammaSpace) { const K = k; for (let ee = 0; ee < K.length; ee++) K[ee] = Math.pow(K[ee], 2.2); } const L = n.createRawTexture(k, w, w, 5, !1, !0, 1, null, a); await hB.EncodeTextureToRGBD(L, u, a); const B = await n._readTexturePixels(L, w, w), U = await qh.DumpDataAsync(w, w, B, s, void 0, !1, !0, e.imageQuality); h[R * 6 + V] = U, L.dispose(); } } u.dispose(), l && await l; const f = { version: CH, width: o, imageType: s, irradiance: Cfe(c), specular: { mipmaps: [], lodGenerationScale: c.lodGenerationScale } }; let p = 0; for (let R = 0; R <= d; R++) for (let w = 0; w < 6; w++) { const V = h[R * 6 + w].byteLength; f.specular.mipmaps.push({ length: V, position: p }), p += V; } const m = JSON.stringify(f), _ = new ArrayBuffer(m.length + 1), v = new Uint8Array(_); for (let R = 0, w = m.length; R < w; R++) v[R] = m.charCodeAt(R); v[m.length] = 0; const C = W9.length + p + _.byteLength, x = new ArrayBuffer(C), b = new Uint8Array(x), S = new DataView(x); let M = 0; for (let R = 0; R < W9.length; R++) S.setUint8(M++, W9[R]); b.set(new Uint8Array(_), M), M += _.byteLength; for (let R = 0; R <= d; R++) for (let w = 0; w < 6; w++) { const V = h[R * 6 + w]; b.set(new Uint8Array(V), M), M += V.byteLength; } return x; } function Cfe(c) { const e = c.sphericalPolynomial; return e == null ? null : { x: [e.x.x, e.x.y, e.x.z], y: [e.y.x, e.y.y, e.y.z], z: [e.z.x, e.z.y, e.z.z], xx: [e.xx.x, e.xx.y, e.xx.z], yy: [e.yy.x, e.yy.y, e.yy.z], zz: [e.zz.x, e.zz.y, e.zz.z], yz: [e.yz.x, e.yz.y, e.yz.z], zx: [e.zx.x, e.zx.y, e.zx.z], xy: [e.xy.x, e.xy.y, e.xy.z] }; } function yU(c, e) { e = hN(e); const t = e.specular; let i = yt.Log2(e.width); if (i = Math.round(i) + 1, t.mipmaps.length !== 6 * i) throw new Error(`Unsupported specular mipmaps number "${t.mipmaps.length}"`); const r = new Array(i); for (let s = 0; s < i; s++) { r[s] = new Array(6); for (let n = 0; n < 6; n++) { const a = t.mipmaps[s * 6 + n]; r[s][n] = new Uint8Array(c.buffer, c.byteOffset + t.specularDataPosition + a.position, a.length); } } return r; } function iW(c, e, t) { t = hN(t); const i = t.specular; if (!i) return Promise.resolve(); c._lodGenerationScale = i.lodGenerationScale; const r = yU(e, t); return sL(c, r, t.imageType); } function LZ(c, e, t, i, r, s, n, a, l, o, u) { return new Promise((h, d) => { if (t) { const f = e.createTexture(null, !0, !0, null, 1, null, (p) => { d(p); }, c); i.getEffect().executeWhenCompiled(() => { i.externalTextureSamplerBinding = !0, i.onApply = (p) => { p._bindTexture("textureSampler", f), p.setFloat2("scale", 1, e._features.needsInvertingBitmap && c instanceof ImageBitmap ? -1 : 1); }, e.scenes.length && (e.scenes[0].postProcessManager.directRender([i], o, !0, s, n), e.restoreDefaultFramebuffer(), f.dispose(), URL.revokeObjectURL(r), h()); }); } else { if (e._uploadImageToTexture(u, c, s, n), a) { const f = l[n]; f && e._uploadImageToTexture(f._texture, c, s, 0); } h(); } }); } function sL(c, e, t = tW) { if (!Ve.IsExponentOfTwo(c.width)) throw new Error("Texture size must be a power of two"); const i = yt.ILog2(c.width) + 1, r = c.getEngine(); let s = !1, n = !1, a = null, l = null, o = null; const u = r.getCaps(); if (c.format = 5, c.type = 0, c.generateMipMaps = !0, c._cachedAnisotropicFilteringLevel = null, r.updateTextureSamplingMode(3, c), u.textureLOD ? r._features.supportRenderAndCopyToLodForFloatTextures ? u.textureHalfFloatRender && u.textureHalfFloatLinearFiltering ? (s = !0, c.type = 2) : u.textureFloatRender && u.textureFloatLinearFiltering && (s = !0, c.type = 1) : s = !1 : (s = !1, n = !0, o = {}), s) a = new Bi("rgbdDecode", "rgbdDecode", null, null, 1, null, 3, r, !1, void 0, c.type, void 0, null, !1), c._isRGBD = !1, c.invertY = !1, l = r.createRenderTargetCubeTexture(c.width, { generateDepthBuffer: !1, generateMipMaps: !0, generateStencilBuffer: !1, samplingMode: 3, type: c.type, format: 5 }); else if (c._isRGBD = !0, c.invertY = !0, n) { const f = c._lodGenerationScale, p = c._lodGenerationOffset; for (let m = 0; m < 3; m++) { const v = 1 - m / 2, C = p, x = (i - 1) * f + p, b = C + (x - C) * v, S = Math.round(Math.min(Math.max(b, 0), x)), M = new ln(r, ts.Temp); M.isCube = !0, M.invertY = !0, M.generateMipMaps = !1, r.updateTextureSamplingMode(2, M); const R = new dn(null); switch (R._isCube = !0, R._texture = M, o[S] = R, m) { case 0: c._lodTextureLow = R; break; case 1: c._lodTextureMid = R; break; case 2: c._lodTextureHigh = R; break; } } } const h = []; for (let d = 0; d < e.length; d++) for (let f = 0; f < 6; f++) { const p = e[d][f], m = new Blob([p], { type: t }), _ = URL.createObjectURL(m); let v; if (r._features.forceBitmapOverHTMLImageElement) v = r.createImageBitmap(m, { premultiplyAlpha: "none" }).then((C) => LZ(C, r, s, a, _, f, d, n, o, l, c)); else { const C = new Image(); C.src = _, v = new Promise((x, b) => { C.onload = () => { LZ(C, r, s, a, _, f, d, n, o, l, c).then(() => x()).catch((S) => { b(S); }); }, C.onerror = (S) => { b(S); }; }); } h.push(v); } if (e.length < i) { let d; const f = Math.pow(2, i - 1 - e.length), p = f * f * 4; switch (c.type) { case 0: { d = new Uint8Array(p); break; } case 2: { d = new Uint16Array(p); break; } case 1: { d = new Float32Array(p); break; } } for (let m = e.length; m < i; m++) for (let _ = 0; _ < 6; _++) r._uploadArrayBufferViewToTexture(c, d, _, m); } return Promise.all(h).then(() => { l && (r._releaseTexture(c), l._swapAndDie(c)), a && a.dispose(), n && (c._lodTextureHigh && c._lodTextureHigh._texture && (c._lodTextureHigh._texture.isReady = !0), c._lodTextureMid && c._lodTextureMid._texture && (c._lodTextureMid._texture.isReady = !0), c._lodTextureLow && c._lodTextureLow._texture && (c._lodTextureLow._texture.isReady = !0)); }); } function CU(c, e) { e = hN(e); const t = e.irradiance; if (!t) return; const i = new ax(); D.FromArrayToRef(t.x, 0, i.x), D.FromArrayToRef(t.y, 0, i.y), D.FromArrayToRef(t.z, 0, i.z), D.FromArrayToRef(t.xx, 0, i.xx), D.FromArrayToRef(t.yy, 0, i.yy), D.FromArrayToRef(t.zz, 0, i.zz), D.FromArrayToRef(t.yz, 0, i.yz), D.FromArrayToRef(t.zx, 0, i.zx), D.FromArrayToRef(t.xy, 0, i.xy), c._sphericalPolynomial = i; } function Uie(c, e, t, i, r) { const s = c.getEngine().createRawCubeTexture(null, c.width, c.format, c.type, c.generateMipMaps, c.invertY, c.samplingMode, c._compression), n = sL(s, e).then(() => c); return c.onRebuildCallback = (a) => ({ proxy: n, isReady: !0, isAsync: !0 }), c._source = ts.CubeRawRGBD, c._bufferViewArrayArray = e, c._lodGenerationScale = i, c._lodGenerationOffset = r, c._sphericalPolynomial = t, sL(c, e).then(() => (c.isReady = !0, c)); } const xfe = { /** * Gets the environment info from an env file. * @param data The array buffer containing the .env bytes. * @returns the environment file info (the json header) if successfully parsed, normalized to the latest supported version. */ GetEnvInfo: AU, /** * Creates an environment texture from a loaded cube texture. * @param texture defines the cube texture to convert in env file * @param options options for the conversion process * @param options.imageType the mime type for the encoded images, with support for "image/png" (default) and "image/webp" * @param options.imageQuality the image quality of encoded WebP images. * @returns a promise containing the environment data if successful. */ CreateEnvTextureAsync: Bie, /** * Creates the ArrayBufferViews used for initializing environment texture image data. * @param data the image data * @param info parameters that determine what views will be created for accessing the underlying buffer * @returns the views described by info providing access to the underlying buffer */ CreateImageDataArrayBufferViews: yU, /** * Uploads the texture info contained in the env file to the GPU. * @param texture defines the internal texture to upload to * @param data defines the data to load * @param info defines the texture info retrieved through the GetEnvInfo method * @returns a promise */ UploadEnvLevelsAsync: iW, /** * Uploads the levels of image data to the GPU. * @param texture defines the internal texture to upload to * @param imageData defines the array buffer views of image data [mipmap][face] * @param imageType the mime type of the image data * @returns a promise */ UploadLevelsAsync: sL, /** * Uploads spherical polynomials information to the texture. * @param texture defines the texture we are trying to upload the information to * @param info defines the environment texture info retrieved through the GetEnvInfo method */ UploadEnvSpherical: CU }; function RF(c, e, t, i) { let r = i, s = 0, n = ""; for (; r < t.length; ) { const a = t.charAt(r); if (n) a === n ? n === '"' || n === "'" ? t.charAt(r - 1) !== "\\" && (n = "") : n = "" : n === "*/" && a === "*" && r + 1 < t.length && (t.charAt(r + 1) === "/" && (n = ""), n === "" && r++); else switch (a) { case c: s++; break; case e: s--; break; case '"': case "'": case "`": n = a; break; case "/": if (r + 1 < t.length) { const l = t.charAt(r + 1); l === "/" ? n = ` ` : l === "*" && (n = "*/"); } break; } if (r++, s === 0) break; } return s === 0 ? r - 1 : -1; } function NZ(c, e) { for (; e < c.length; ) { const t = c[e]; if (t !== " " && t !== ` ` && t !== "\r" && t !== " " && t !== ` ` && t !== " ") break; e++; } return e; } function Hk(c) { const e = c.charCodeAt(0); return e >= 48 && e <= 57 || // 0-9 e >= 65 && e <= 90 || // A-Z e >= 97 && e <= 122 || // a-z e == 95; } function xH(c) { let e = 0, t = "", i = !1; const r = []; for (; e < c.length; ) { const s = c.charAt(e); if (t) s === t ? t === '"' || t === "'" ? (c.charAt(e - 1) !== "\\" && (t = ""), r.push(s)) : (t = "", i = !1) : t === "*/" && s === "*" && e + 1 < c.length ? (c.charAt(e + 1) === "/" && (t = ""), t === "" && (i = !1, e++)) : i || r.push(s); else { switch (s) { case '"': case "'": case "`": t = s; break; case "/": if (e + 1 < c.length) { const n = c.charAt(e + 1); n === "/" ? (t = ` `, i = !0) : n === "*" && (t = "*/", i = !0); } break; } i || r.push(s); } e++; } return r.join(""); } function bfe(c, e, t, i) { for (; e >= 0 && c.charAt(e) !== t && (!i || c.charAt(e) !== i); ) e--; return e; } function Efe(c) { return c.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"); } class cT { /** Gets the code after the inlining process */ get code() { return this._sourceCode; } /** * Initializes the inliner * @param sourceCode shader code source to inline * @param numMaxIterations maximum number of iterations (used to detect recursive calls) */ constructor(e, t = 20) { this.debug = !1, this._sourceCode = e, this._numMaxIterations = t, this._functionDescr = [], this.inlineToken = "#define inline"; } /** * Start the processing of the shader code */ processCode() { this.debug && Ce.Log(`Start inlining process (code size=${this._sourceCode.length})...`), this._collectFunctions(), this._processInlining(this._numMaxIterations), this.debug && Ce.Log("End of inlining process."); } _collectFunctions() { let e = 0; for (; e < this._sourceCode.length; ) { const t = this._sourceCode.indexOf(this.inlineToken, e); if (t < 0) break; const i = this._sourceCode.indexOf("(", t + this.inlineToken.length); if (i < 0) { this.debug && Ce.Warn(`Could not find the opening parenthesis after the token. startIndex=${e}`), e = t + this.inlineToken.length; continue; } const r = cT._RegexpFindFunctionNameAndType.exec(this._sourceCode.substring(t + this.inlineToken.length, i)); if (!r) { this.debug && Ce.Warn(`Could not extract the name/type of the function from: ${this._sourceCode.substring(t + this.inlineToken.length, i)}`), e = t + this.inlineToken.length; continue; } const [s, n] = [r[3], r[4]], a = RF("(", ")", this._sourceCode, i); if (a < 0) { this.debug && Ce.Warn(`Could not extract the parameters the function '${n}' (type=${s}). funcParamsStartIndex=${i}`), e = t + this.inlineToken.length; continue; } const l = this._sourceCode.substring(i + 1, a), o = NZ(this._sourceCode, a + 1); if (o === this._sourceCode.length) { this.debug && Ce.Warn(`Could not extract the body of the function '${n}' (type=${s}). funcParamsEndIndex=${a}`), e = t + this.inlineToken.length; continue; } const u = RF("{", "}", this._sourceCode, o); if (u < 0) { this.debug && Ce.Warn(`Could not extract the body of the function '${n}' (type=${s}). funcBodyStartIndex=${o}`), e = t + this.inlineToken.length; continue; } const h = this._sourceCode.substring(o, u + 1), d = xH(l).split(","), f = []; for (let _ = 0; _ < d.length; ++_) { const v = d[_].trim(), C = v.lastIndexOf(" "); C >= 0 && f.push(v.substring(C + 1)); } s !== "void" && f.push("return"), this._functionDescr.push({ name: n, type: s, parameters: f, body: h, callIndex: 0 }), e = u + 1; const p = t > 0 ? this._sourceCode.substring(0, t) : "", m = u + 1 < this._sourceCode.length - 1 ? this._sourceCode.substring(u + 1) : ""; this._sourceCode = p + m, e -= u + 1 - t; } this.debug && Ce.Log(`Collect functions: ${this._functionDescr.length} functions found. functionDescr=${this._functionDescr}`); } _processInlining(e = 20) { for (; e-- >= 0 && this._replaceFunctionCallsByCode(); ) ; return this.debug && Ce.Log(`numMaxIterations is ${e} after inlining process`), e >= 0; } _replaceFunctionCallsByCode() { let e = !1; for (const t of this._functionDescr) { const { name: i, type: r, parameters: s, body: n } = t; let a = 0; for (; a < this._sourceCode.length; ) { const l = this._sourceCode.indexOf(i, a); if (l < 0) break; if (l === 0 || Hk(this._sourceCode.charAt(l - 1))) { a = l + i.length; continue; } const o = NZ(this._sourceCode, l + i.length); if (o === this._sourceCode.length || this._sourceCode.charAt(o) !== "(") { a = l + i.length; continue; } const u = RF("(", ")", this._sourceCode, o); if (u < 0) { this.debug && Ce.Warn(`Could not extract the parameters of the function call. Function '${i}' (type=${r}). callParamsStartIndex=${o}`), a = l + i.length; continue; } const h = this._sourceCode.substring(o + 1, u), f = ((x) => { const b = []; let S = 0, M = 0; for (; S < x.length; ) { if (x.charAt(S) === "(") { const R = RF("(", ")", x, S); if (R < 0) return null; S = R; } else x.charAt(S) === "," && (b.push(x.substring(M, S)), M = S + 1); S++; } return M < S && b.push(x.substring(M, S)), b; })(xH(h)); if (f === null) { this.debug && Ce.Warn(`Invalid function call: can't extract the parameters of the function call. Function '${i}' (type=${r}). callParamsStartIndex=${o}, callParams=` + h), a = l + i.length; continue; } const p = []; for (let x = 0; x < f.length; ++x) { const b = f[x].trim(); p.push(b); } const m = r !== "void" ? i + "_" + t.callIndex++ : null; if (m && p.push(m + " ="), p.length !== s.length) { this.debug && Ce.Warn(`Invalid function call: not the same number of parameters for the call than the number expected by the function. Function '${i}' (type=${r}). function parameters=${s}, call parameters=${p}`), a = l + i.length; continue; } a = u + 1; const _ = this._replaceNames(n, s, p); let v = l > 0 ? this._sourceCode.substring(0, l) : ""; const C = u + 1 < this._sourceCode.length - 1 ? this._sourceCode.substring(u + 1) : ""; if (m) { const x = bfe(this._sourceCode, l - 1, ` `, "{"); v = this._sourceCode.substring(0, x + 1); const b = this._sourceCode.substring(x + 1, l); this._sourceCode = v + r + " " + m + `; ` + _ + ` ` + b + m + C, this.debug && Ce.Log(`Replace function call by code. Function '${i}' (type=${r}). injectDeclarationIndex=${x}, call parameters=${p}`); } else this._sourceCode = v + _ + C, a += _.length - (u + 1 - l), this.debug && Ce.Log(`Replace function call by code. Function '${i}' (type=${r}). functionCallIndex=${l}, call parameters=${p}`); e = !0; } } return e; } _replaceNames(e, t, i) { for (let r = 0; r < t.length; ++r) { const s = new RegExp(Efe(t[r]), "g"), n = t[r].length, a = i[r]; e = e.replace(s, (l, ...o) => { const u = o[0]; return Hk(e.charAt(u - 1)) || Hk(e.charAt(u + n)) ? t[r] : a; }); } return e; } } cT._RegexpFindFunctionNameAndType = /((\s+?)(\w+)\s+(\w+)\s*?)$/; class Tfe { get isAsync() { return this.isParallelCompiled; } get isReady() { if (this.compilationError) { const e = this.compilationError.message; throw new Error("SHADER ERROR" + (typeof e == "string" ? ` ` + e : "")); } return this.isCompiled; } _getVertexShaderCode() { return null; } _getFragmentShaderCode() { return null; } // TODO: what should this do? _handlesSpectorRebuildCallback(e) { throw new Error("Not implemented"); } constructor(e) { this.isParallelCompiled = !0, this.isCompiled = !1, this._valueCache = {}, this._engine = e; } _fillEffectInformation(e, t, i, r, s, n, a, l) { const o = this._engine; if (o.supportsUniformBuffers) for (const d in t) e.bindUniformBlock(d, t[d]); this._engine.getUniforms(this, i).forEach((d, f) => { r[i[f]] = d; }), this._uniforms = r; let h; for (h = 0; h < s.length; h++) e.getUniform(s[h]) == null && (s.splice(h, 1), h--); s.forEach((d, f) => { n[d] = f; }), l.push(...o.getAttributes(this, a)); } /** * Release all associated resources. **/ dispose() { this._uniforms = {}; } /** * @internal */ _cacheMatrix(e, t) { const i = this._valueCache[e], r = t.updateFlag; return i !== void 0 && i === r ? !1 : (this._valueCache[e] = r, !0); } /** * @internal */ _cacheFloat2(e, t, i) { let r = this._valueCache[e]; if (!r) return r = [t, i], this._valueCache[e] = r, !0; let s = !1; return r[0] !== t && (r[0] = t, s = !0), r[1] !== i && (r[1] = i, s = !0), s; } /** * @internal */ _cacheFloat3(e, t, i, r) { let s = this._valueCache[e]; if (!s) return s = [t, i, r], this._valueCache[e] = s, !0; let n = !1; return s[0] !== t && (s[0] = t, n = !0), s[1] !== i && (s[1] = i, n = !0), s[2] !== r && (s[2] = r, n = !0), n; } /** * @internal */ _cacheFloat4(e, t, i, r, s) { let n = this._valueCache[e]; if (!n) return n = [t, i, r, s], this._valueCache[e] = n, !0; let a = !1; return n[0] !== t && (n[0] = t, a = !0), n[1] !== i && (n[1] = i, a = !0), n[2] !== r && (n[2] = r, a = !0), n[3] !== s && (n[3] = s, a = !0), a; } /** * Sets an integer value on a uniform variable. * @param uniformName Name of the variable. * @param value Value to be set. */ setInt(e, t) { const i = this._valueCache[e]; i !== void 0 && i === t || this._engine.setInt(this._uniforms[e], t) && (this._valueCache[e] = t); } /** * Sets a int2 on a uniform variable. * @param uniformName Name of the variable. * @param x First int in int2. * @param y Second int in int2. */ setInt2(e, t, i) { this._cacheFloat2(e, t, i) && (this._engine.setInt2(this._uniforms[e], t, i) || (this._valueCache[e] = null)); } /** * Sets a int3 on a uniform variable. * @param uniformName Name of the variable. * @param x First int in int3. * @param y Second int in int3. * @param z Third int in int3. */ setInt3(e, t, i, r) { this._cacheFloat3(e, t, i, r) && (this._engine.setInt3(this._uniforms[e], t, i, r) || (this._valueCache[e] = null)); } /** * Sets a int4 on a uniform variable. * @param uniformName Name of the variable. * @param x First int in int4. * @param y Second int in int4. * @param z Third int in int4. * @param w Fourth int in int4. */ setInt4(e, t, i, r, s) { this._cacheFloat4(e, t, i, r, s) && (this._engine.setInt4(this._uniforms[e], t, i, r, s) || (this._valueCache[e] = null)); } /** * Sets an int array on a uniform variable. * @param uniformName Name of the variable. * @param array array to be set. */ setIntArray(e, t) { this._valueCache[e] = null, this._engine.setIntArray(this._uniforms[e], t); } /** * Sets an int array 2 on a uniform variable. (Array is specified as single array eg. [1,2,3,4] will result in [[1,2],[3,4]] in the shader) * @param uniformName Name of the variable. * @param array array to be set. */ setIntArray2(e, t) { this._valueCache[e] = null, this._engine.setIntArray2(this._uniforms[e], t); } /** * Sets an int array 3 on a uniform variable. (Array is specified as single array eg. [1,2,3,4,5,6] will result in [[1,2,3],[4,5,6]] in the shader) * @param uniformName Name of the variable. * @param array array to be set. */ setIntArray3(e, t) { this._valueCache[e] = null, this._engine.setIntArray3(this._uniforms[e], t); } /** * Sets an int array 4 on a uniform variable. (Array is specified as single array eg. [1,2,3,4,5,6,7,8] will result in [[1,2,3,4],[5,6,7,8]] in the shader) * @param uniformName Name of the variable. * @param array array to be set. */ setIntArray4(e, t) { this._valueCache[e] = null, this._engine.setIntArray4(this._uniforms[e], t); } /** * Sets an unsigned integer value on a uniform variable. * @param uniformName Name of the variable. * @param value Value to be set. */ setUInt(e, t) { const i = this._valueCache[e]; i !== void 0 && i === t || this._engine.setUInt(this._uniforms[e], t) && (this._valueCache[e] = t); } /** * Sets a unsigned int2 on a uniform variable. * @param uniformName Name of the variable. * @param x First unsigned int in uint2. * @param y Second unsigned int in uint2. */ setUInt2(e, t, i) { this._cacheFloat2(e, t, i) && (this._engine.setUInt2(this._uniforms[e], t, i) || (this._valueCache[e] = null)); } /** * Sets a unsigned int3 on a uniform variable. * @param uniformName Name of the variable. * @param x First unsigned int in uint3. * @param y Second unsigned int in uint3. * @param z Third unsigned int in uint3. */ setUInt3(e, t, i, r) { this._cacheFloat3(e, t, i, r) && (this._engine.setUInt3(this._uniforms[e], t, i, r) || (this._valueCache[e] = null)); } /** * Sets a unsigned int4 on a uniform variable. * @param uniformName Name of the variable. * @param x First unsigned int in uint4. * @param y Second unsigned int in uint4. * @param z Third unsigned int in uint4. * @param w Fourth unsigned int in uint4. */ setUInt4(e, t, i, r, s) { this._cacheFloat4(e, t, i, r, s) && (this._engine.setUInt4(this._uniforms[e], t, i, r, s) || (this._valueCache[e] = null)); } /** * Sets an unsigned int array on a uniform variable. * @param uniformName Name of the variable. * @param array array to be set. */ setUIntArray(e, t) { this._valueCache[e] = null, this._engine.setUIntArray(this._uniforms[e], t); } /** * Sets an unsigned int array 2 on a uniform variable. (Array is specified as single array eg. [1,2,3,4] will result in [[1,2],[3,4]] in the shader) * @param uniformName Name of the variable. * @param array array to be set. */ setUIntArray2(e, t) { this._valueCache[e] = null, this._engine.setUIntArray2(this._uniforms[e], t); } /** * Sets an unsigned int array 3 on a uniform variable. (Array is specified as single array eg. [1,2,3,4,5,6] will result in [[1,2,3],[4,5,6]] in the shader) * @param uniformName Name of the variable. * @param array array to be set. */ setUIntArray3(e, t) { this._valueCache[e] = null, this._engine.setUIntArray3(this._uniforms[e], t); } /** * Sets an unsigned int array 4 on a uniform variable. (Array is specified as single array eg. [1,2,3,4,5,6,7,8] will result in [[1,2,3,4],[5,6,7,8]] in the shader) * @param uniformName Name of the variable. * @param array array to be set. */ setUIntArray4(e, t) { this._valueCache[e] = null, this._engine.setUIntArray4(this._uniforms[e], t); } /** * Sets an float array on a uniform variable. * @param uniformName Name of the variable. * @param array array to be set. */ setFloatArray(e, t) { this._valueCache[e] = null, this._engine.setFloatArray(this._uniforms[e], t); } /** * Sets an float array 2 on a uniform variable. (Array is specified as single array eg. [1,2,3,4] will result in [[1,2],[3,4]] in the shader) * @param uniformName Name of the variable. * @param array array to be set. */ setFloatArray2(e, t) { this._valueCache[e] = null, this._engine.setFloatArray2(this._uniforms[e], t); } /** * Sets an float array 3 on a uniform variable. (Array is specified as single array eg. [1,2,3,4,5,6] will result in [[1,2,3],[4,5,6]] in the shader) * @param uniformName Name of the variable. * @param array array to be set. */ setFloatArray3(e, t) { this._valueCache[e] = null, this._engine.setFloatArray3(this._uniforms[e], t); } /** * Sets an float array 4 on a uniform variable. (Array is specified as single array eg. [1,2,3,4,5,6,7,8] will result in [[1,2,3,4],[5,6,7,8]] in the shader) * @param uniformName Name of the variable. * @param array array to be set. */ setFloatArray4(e, t) { this._valueCache[e] = null, this._engine.setFloatArray4(this._uniforms[e], t); } /** * Sets an array on a uniform variable. * @param uniformName Name of the variable. * @param array array to be set. */ setArray(e, t) { this._valueCache[e] = null, this._engine.setArray(this._uniforms[e], t); } /** * Sets an array 2 on a uniform variable. (Array is specified as single array eg. [1,2,3,4] will result in [[1,2],[3,4]] in the shader) * @param uniformName Name of the variable. * @param array array to be set. */ setArray2(e, t) { this._valueCache[e] = null, this._engine.setArray2(this._uniforms[e], t); } /** * Sets an array 3 on a uniform variable. (Array is specified as single array eg. [1,2,3,4,5,6] will result in [[1,2,3],[4,5,6]] in the shader) * @param uniformName Name of the variable. * @param array array to be set. * @returns this effect. */ setArray3(e, t) { this._valueCache[e] = null, this._engine.setArray3(this._uniforms[e], t); } /** * Sets an array 4 on a uniform variable. (Array is specified as single array eg. [1,2,3,4,5,6,7,8] will result in [[1,2,3,4],[5,6,7,8]] in the shader) * @param uniformName Name of the variable. * @param array array to be set. */ setArray4(e, t) { this._valueCache[e] = null, this._engine.setArray4(this._uniforms[e], t); } /** * Sets matrices on a uniform variable. * @param uniformName Name of the variable. * @param matrices matrices to be set. */ setMatrices(e, t) { t && (this._valueCache[e] = null, this._engine.setMatrices(this._uniforms[e], t)); } /** * Sets matrix on a uniform variable. * @param uniformName Name of the variable. * @param matrix matrix to be set. */ setMatrix(e, t) { this._cacheMatrix(e, t) && (this._engine.setMatrices(this._uniforms[e], t.toArray()) || (this._valueCache[e] = null)); } /** * Sets a 3x3 matrix on a uniform variable. (Specified as [1,2,3,4,5,6,7,8,9] will result in [1,2,3][4,5,6][7,8,9] matrix) * @param uniformName Name of the variable. * @param matrix matrix to be set. */ setMatrix3x3(e, t) { this._valueCache[e] = null, this._engine.setMatrix3x3(this._uniforms[e], t); } /** * Sets a 2x2 matrix on a uniform variable. (Specified as [1,2,3,4] will result in [1,2][3,4] matrix) * @param uniformName Name of the variable. * @param matrix matrix to be set. */ setMatrix2x2(e, t) { this._valueCache[e] = null, this._engine.setMatrix2x2(this._uniforms[e], t); } /** * Sets a float on a uniform variable. * @param uniformName Name of the variable. * @param value value to be set. * @returns this effect. */ setFloat(e, t) { const i = this._valueCache[e]; i !== void 0 && i === t || this._engine.setFloat(this._uniforms[e], t) && (this._valueCache[e] = t); } /** * Sets a boolean on a uniform variable. * @param uniformName Name of the variable. * @param bool value to be set. */ setBool(e, t) { const i = this._valueCache[e]; i !== void 0 && i === t || this._engine.setInt(this._uniforms[e], t ? 1 : 0) && (this._valueCache[e] = t ? 1 : 0); } /** * Sets a Vector2 on a uniform variable. * @param uniformName Name of the variable. * @param vector2 vector2 to be set. */ setVector2(e, t) { this._cacheFloat2(e, t.x, t.y) && (this._engine.setFloat2(this._uniforms[e], t.x, t.y) || (this._valueCache[e] = null)); } /** * Sets a float2 on a uniform variable. * @param uniformName Name of the variable. * @param x First float in float2. * @param y Second float in float2. */ setFloat2(e, t, i) { this._cacheFloat2(e, t, i) && (this._engine.setFloat2(this._uniforms[e], t, i) || (this._valueCache[e] = null)); } /** * Sets a Vector3 on a uniform variable. * @param uniformName Name of the variable. * @param vector3 Value to be set. */ setVector3(e, t) { this._cacheFloat3(e, t.x, t.y, t.z) && (this._engine.setFloat3(this._uniforms[e], t.x, t.y, t.z) || (this._valueCache[e] = null)); } /** * Sets a float3 on a uniform variable. * @param uniformName Name of the variable. * @param x First float in float3. * @param y Second float in float3. * @param z Third float in float3. */ setFloat3(e, t, i, r) { this._cacheFloat3(e, t, i, r) && (this._engine.setFloat3(this._uniforms[e], t, i, r) || (this._valueCache[e] = null)); } /** * Sets a Vector4 on a uniform variable. * @param uniformName Name of the variable. * @param vector4 Value to be set. */ setVector4(e, t) { this._cacheFloat4(e, t.x, t.y, t.z, t.w) && (this._engine.setFloat4(this._uniforms[e], t.x, t.y, t.z, t.w) || (this._valueCache[e] = null)); } /** * Sets a Quaternion on a uniform variable. * @param uniformName Name of the variable. * @param quaternion Value to be set. */ setQuaternion(e, t) { this._cacheFloat4(e, t.x, t.y, t.z, t.w) && (this._engine.setFloat4(this._uniforms[e], t.x, t.y, t.z, t.w) || (this._valueCache[e] = null)); } /** * Sets a float4 on a uniform variable. * @param uniformName Name of the variable. * @param x First float in float4. * @param y Second float in float4. * @param z Third float in float4. * @param w Fourth float in float4. * @returns this effect. */ setFloat4(e, t, i, r, s) { this._cacheFloat4(e, t, i, r, s) && (this._engine.setFloat4(this._uniforms[e], t, i, r, s) || (this._valueCache[e] = null)); } /** * Sets a Color3 on a uniform variable. * @param uniformName Name of the variable. * @param color3 Value to be set. */ setColor3(e, t) { this._cacheFloat3(e, t.r, t.g, t.b) && (this._engine.setFloat3(this._uniforms[e], t.r, t.g, t.b) || (this._valueCache[e] = null)); } /** * Sets a Color4 on a uniform variable. * @param uniformName Name of the variable. * @param color3 Value to be set. * @param alpha Alpha value to be set. */ setColor4(e, t, i) { this._cacheFloat4(e, t.r, t.g, t.b, i) && (this._engine.setFloat4(this._uniforms[e], t.r, t.g, t.b, i) || (this._valueCache[e] = null)); } /** * Sets a Color4 on a uniform variable * @param uniformName defines the name of the variable * @param color4 defines the value to be set */ setDirectColor4(e, t) { this._cacheFloat4(e, t.r, t.g, t.b, t.a) && (this._engine.setFloat4(this._uniforms[e], t.r, t.g, t.b, t.a) || (this._valueCache[e] = null)); } } class Sfe extends FL { get _framebuffer() { return this.__framebuffer; } set _framebuffer(e) { this.__framebuffer && this._engine._releaseFramebufferObjects(this.__framebuffer), this.__framebuffer = e; } get _framebufferDepthStencil() { return this.__framebufferDepthStencil; } set _framebufferDepthStencil(e) { this.__framebufferDepthStencil && this._engine._releaseFramebufferObjects(this.__framebufferDepthStencil), this.__framebufferDepthStencil = e; } constructor(e, t, i, r) { super(e, t, i, r), this.__framebuffer = null, this.__framebufferDepthStencil = null, this._engine = r; } dispose(e = !1) { this._framebuffer = null, this._framebufferDepthStencil = null, super.dispose(e); } } class FZ { get underlyingResource() { return this._nativeTexture; } constructor(e, t) { this._engine = t, this.set(e); } setUsage() { } set(e) { this._nativeTexture = e; } reset() { this._nativeTexture = null; } release() { this._nativeTexture && this._engine.deleteTexture(this._nativeTexture), this.reset(); } } function Gk(c, e) { switch (c) { case 15: return _native.Engine.TEXTURE_FORMAT_D16; case 16: return _native.Engine.TEXTURE_FORMAT_D24; case 13: return _native.Engine.TEXTURE_FORMAT_D24S8; case 14: return _native.Engine.TEXTURE_FORMAT_D32F; case 36492: return _native.Engine.TEXTURE_FORMAT_BC7; case 36494: return _native.Engine.TEXTURE_FORMAT_BC6H; case 33779: return _native.Engine.TEXTURE_FORMAT_BC3; case 33778: return _native.Engine.TEXTURE_FORMAT_BC2; case 33777: return _native.Engine.TEXTURE_FORMAT_BC1; case 33776: return _native.Engine.TEXTURE_FORMAT_BC1; case 37808: return _native.Engine.TEXTURE_FORMAT_ASTC4x4; case 36196: return _native.Engine.TEXTURE_FORMAT_ETC1; case 37492: return _native.Engine.TEXTURE_FORMAT_ETC2; case 37496: return _native.Engine.TEXTURE_FORMAT_ETC2A; case 4: { switch (e) { case 0: return _native.Engine.TEXTURE_FORMAT_RGB8; case 3: return _native.Engine.TEXTURE_FORMAT_RGB8S; case 6: return _native.Engine.TEXTURE_FORMAT_RGB8I; case 7: return _native.Engine.TEXTURE_FORMAT_RGB8U; } break; } case 5: { switch (e) { case 0: return _native.Engine.TEXTURE_FORMAT_RGBA8; case 1: return _native.Engine.TEXTURE_FORMAT_RGBA32F; case 2: return _native.Engine.TEXTURE_FORMAT_RGBA16F; case 3: return _native.Engine.TEXTURE_FORMAT_RGBA8S; case 4: return _native.Engine.TEXTURE_FORMAT_RGBA16I; case 5: return _native.Engine.TEXTURE_FORMAT_RGBA16U; case 6: return _native.Engine.TEXTURE_FORMAT_RGBA32I; case 7: return _native.Engine.TEXTURE_FORMAT_RGBA32U; } break; } case 6: { switch (e) { case 0: return _native.Engine.TEXTURE_FORMAT_R8; case 1: return _native.Engine.TEXTURE_FORMAT_R32F; case 2: return _native.Engine.TEXTURE_FORMAT_R16F; case 3: return _native.Engine.TEXTURE_FORMAT_R8S; case 4: return _native.Engine.TEXTURE_FORMAT_R16S; case 5: return _native.Engine.TEXTURE_FORMAT_R16U; case 6: return _native.Engine.TEXTURE_FORMAT_R32I; case 7: return _native.Engine.TEXTURE_FORMAT_R32U; } break; } case 7: { switch (e) { case 0: return _native.Engine.TEXTURE_FORMAT_RG8; case 1: return _native.Engine.TEXTURE_FORMAT_RG32F; case 2: return _native.Engine.TEXTURE_FORMAT_RG16F; case 3: return _native.Engine.TEXTURE_FORMAT_RG8S; case 4: return _native.Engine.TEXTURE_FORMAT_RG16S; case 5: return _native.Engine.TEXTURE_FORMAT_RG16U; case 6: return _native.Engine.TEXTURE_FORMAT_RG32I; case 7: return _native.Engine.TEXTURE_FORMAT_RG32U; } break; } case 12: { switch (e) { case 0: return _native.Engine.TEXTURE_FORMAT_BGRA8; } break; } } throw new F4(`Unsupported texture format or type: format ${c}, type ${e}.`, $C.UnsupportedTextureError); } function b9(c) { switch (c) { case 1: return _native.Engine.TEXTURE_NEAREST_NEAREST; case 2: return _native.Engine.TEXTURE_LINEAR_LINEAR; case 3: return _native.Engine.TEXTURE_LINEAR_LINEAR_MIPLINEAR; case 4: return _native.Engine.TEXTURE_NEAREST_NEAREST_MIPNEAREST; case 5: return _native.Engine.TEXTURE_NEAREST_LINEAR_MIPNEAREST; case 6: return _native.Engine.TEXTURE_NEAREST_LINEAR_MIPLINEAR; case 7: return _native.Engine.TEXTURE_NEAREST_LINEAR; case 8: return _native.Engine.TEXTURE_NEAREST_NEAREST_MIPLINEAR; case 9: return _native.Engine.TEXTURE_LINEAR_NEAREST_MIPNEAREST; case 10: return _native.Engine.TEXTURE_LINEAR_NEAREST_MIPLINEAR; case 11: return _native.Engine.TEXTURE_LINEAR_LINEAR_MIPNEAREST; case 12: return _native.Engine.TEXTURE_LINEAR_NEAREST; default: throw new Error(`Unsupported sampling mode: ${c}.`); } } function Kk(c) { switch (c) { case 1: return _native.Engine.ADDRESS_MODE_WRAP; case 0: return _native.Engine.ADDRESS_MODE_CLAMP; case 2: return _native.Engine.ADDRESS_MODE_MIRROR; default: throw new Error("Unexpected wrap mode: " + c + "."); } } function Mfe(c) { switch (c) { case 513: return _native.Engine.STENCIL_TEST_LESS; case 515: return _native.Engine.STENCIL_TEST_LEQUAL; case 514: return _native.Engine.STENCIL_TEST_EQUAL; case 518: return _native.Engine.STENCIL_TEST_GEQUAL; case 516: return _native.Engine.STENCIL_TEST_GREATER; case 517: return _native.Engine.STENCIL_TEST_NOTEQUAL; case 512: return _native.Engine.STENCIL_TEST_NEVER; case 519: return _native.Engine.STENCIL_TEST_ALWAYS; default: throw new Error(`Unsupported stencil func mode: ${c}.`); } } function Rfe(c) { switch (c) { case 7680: return _native.Engine.STENCIL_OP_FAIL_S_KEEP; case 0: return _native.Engine.STENCIL_OP_FAIL_S_ZERO; case 7681: return _native.Engine.STENCIL_OP_FAIL_S_REPLACE; case 7682: return _native.Engine.STENCIL_OP_FAIL_S_INCR; case 7683: return _native.Engine.STENCIL_OP_FAIL_S_DECR; case 5386: return _native.Engine.STENCIL_OP_FAIL_S_INVERT; case 34055: return _native.Engine.STENCIL_OP_FAIL_S_INCRSAT; case 34056: return _native.Engine.STENCIL_OP_FAIL_S_DECRSAT; default: throw new Error(`Unsupported stencil OpFail mode: ${c}.`); } } function Pfe(c) { switch (c) { case 7680: return _native.Engine.STENCIL_OP_FAIL_Z_KEEP; case 0: return _native.Engine.STENCIL_OP_FAIL_Z_ZERO; case 7681: return _native.Engine.STENCIL_OP_FAIL_Z_REPLACE; case 7682: return _native.Engine.STENCIL_OP_FAIL_Z_INCR; case 7683: return _native.Engine.STENCIL_OP_FAIL_Z_DECR; case 5386: return _native.Engine.STENCIL_OP_FAIL_Z_INVERT; case 34055: return _native.Engine.STENCIL_OP_FAIL_Z_INCRSAT; case 34056: return _native.Engine.STENCIL_OP_FAIL_Z_DECRSAT; default: throw new Error(`Unsupported stencil depthFail mode: ${c}.`); } } function Ife(c) { switch (c) { case 7680: return _native.Engine.STENCIL_OP_PASS_Z_KEEP; case 0: return _native.Engine.STENCIL_OP_PASS_Z_ZERO; case 7681: return _native.Engine.STENCIL_OP_PASS_Z_REPLACE; case 7682: return _native.Engine.STENCIL_OP_PASS_Z_INCR; case 7683: return _native.Engine.STENCIL_OP_PASS_Z_DECR; case 5386: return _native.Engine.STENCIL_OP_PASS_Z_INVERT; case 34055: return _native.Engine.STENCIL_OP_PASS_Z_INCRSAT; case 34056: return _native.Engine.STENCIL_OP_PASS_Z_DECRSAT; default: throw new Error(`Unsupported stencil opPass mode: ${c}.`); } } function Dfe(c) { switch (c) { case 0: return _native.Engine.ALPHA_DISABLE; case 1: return _native.Engine.ALPHA_ADD; case 2: return _native.Engine.ALPHA_COMBINE; case 3: return _native.Engine.ALPHA_SUBTRACT; case 4: return _native.Engine.ALPHA_MULTIPLY; case 5: return _native.Engine.ALPHA_MAXIMIZED; case 6: return _native.Engine.ALPHA_ONEONE; case 7: return _native.Engine.ALPHA_PREMULTIPLIED; case 8: return _native.Engine.ALPHA_PREMULTIPLIED_PORTERDUFF; case 9: return _native.Engine.ALPHA_INTERPOLATE; case 10: return _native.Engine.ALPHA_SCREENMODE; default: throw new Error(`Unsupported alpha mode: ${c}.`); } } function Ofe(c) { switch (c) { case Y.BYTE: return _native.Engine.ATTRIB_TYPE_INT8; case Y.UNSIGNED_BYTE: return _native.Engine.ATTRIB_TYPE_UINT8; case Y.SHORT: return _native.Engine.ATTRIB_TYPE_INT16; case Y.UNSIGNED_SHORT: return _native.Engine.ATTRIB_TYPE_UINT16; case Y.FLOAT: return _native.Engine.ATTRIB_TYPE_FLOAT; default: throw new Error(`Unsupported attribute type: ${c}.`); } } const Vie = new Fe(); if (typeof self < "u" && !Object.prototype.hasOwnProperty.call(self, "_native")) { let c; Object.defineProperty(self, "_native", { get: () => c, set: (e) => { c = e, c && Vie.notifyObservers(c); } }); } function kie() { return new Promise((c) => { typeof _native > "u" ? Vie.addOnce((e) => c(e)) : c(_native); }); } async function zie(c, e) { (await kie())[c] = e; } class BZ extends JA { } class wfe { constructor(e) { this._engine = e, this._pending = new Array(), this._isCommandBufferScopeActive = !1, this._commandStream = yP._createNativeDataStream(), this._engine.setCommandDataStream(this._commandStream); } beginCommandScope() { if (this._isCommandBufferScopeActive) throw new Error("Command scope already active."); this._isCommandBufferScopeActive = !0; } endCommandScope() { if (!this._isCommandBufferScopeActive) throw new Error("Command scope is not active."); this._isCommandBufferScopeActive = !1, this._submit(); } startEncodingCommand(e) { this._commandStream.writeNativeData(e); } encodeCommandArgAsUInt32(e) { this._commandStream.writeUint32(e); } encodeCommandArgAsUInt32s(e) { this._commandStream.writeUint32Array(e); } encodeCommandArgAsInt32(e) { this._commandStream.writeInt32(e); } encodeCommandArgAsInt32s(e) { this._commandStream.writeInt32Array(e); } encodeCommandArgAsFloat32(e) { this._commandStream.writeFloat32(e); } encodeCommandArgAsFloat32s(e) { this._commandStream.writeFloat32Array(e); } encodeCommandArgAsNativeData(e) { this._commandStream.writeNativeData(e), this._pending.push(e); } finishEncodingCommand() { this._isCommandBufferScopeActive || this._submit(); } _submit() { this._engine.submitCommands(), this._pending.length = 0; } } class yP extends $e { setHardwareScalingLevel(e) { super.setHardwareScalingLevel(e), this._engine.setHardwareScalingLevel(e); } constructor(e = {}) { if (super(null, !1, void 0, e.adaptToDeviceRatio), this._engine = new _native.Engine(), this._camera = _native.Camera ? new _native.Camera() : null, this._commandBufferEncoder = new wfe(this._engine), this._boundBuffersVertexArray = null, this._currentDepthTest = _native.Engine.DEPTH_TEST_LEQUAL, this._stencilTest = !1, this._stencilMask = 255, this._stencilFunc = 519, this._stencilFuncRef = 0, this._stencilFuncMask = 255, this._stencilOpStencilFail = 7680, this._stencilOpDepthFail = 7680, this._stencilOpStencilDepthPass = 7681, this._zOffset = 0, this._zOffsetUnits = 0, this._depthWrite = !0, _native.Engine.PROTOCOL_VERSION !== yP.PROTOCOL_VERSION) throw new Error(`Protocol version mismatch: ${_native.Engine.PROTOCOL_VERSION} (Native) !== ${yP.PROTOCOL_VERSION} (JS)`); this._webGLVersion = 2, this.disableUniformBuffers = !0, this._shaderPlatformName = "NATIVE", this._caps = { maxTexturesImageUnits: 16, maxVertexTextureImageUnits: 16, maxCombinedTexturesImageUnits: 32, maxTextureSize: _native.Engine.CAPS_LIMITS_MAX_TEXTURE_SIZE, maxCubemapTextureSize: 512, maxRenderTextureSize: 512, maxVertexAttribs: 16, maxVaryingVectors: 16, maxFragmentUniformVectors: 16, maxVertexUniformVectors: 16, standardDerivatives: !0, astc: null, pvrtc: null, etc1: null, etc2: null, bptc: null, maxAnisotropy: 16, uintIndices: !0, fragmentDepthSupported: !1, highPrecisionShaderSupported: !0, colorBufferFloat: !1, supportFloatTexturesResolve: !1, textureFloat: !0, textureFloatLinearFiltering: !1, textureFloatRender: !0, textureHalfFloat: !0, textureHalfFloatLinearFiltering: !1, textureHalfFloatRender: !0, textureLOD: !0, texelFetch: !1, drawBuffersExtension: !1, depthTextureExtension: !1, vertexArrayObject: !0, instancedArrays: !0, supportOcclusionQuery: !1, canUseTimestampForTimerQuery: !1, blendMinMax: !1, maxMSAASamples: 16, canUseGLInstanceID: !0, canUseGLVertexID: !0, supportComputeShaders: !1, supportSRGBBuffers: !0, supportTransformFeedbacks: !1, textureMaxLevel: !1, texture2DArrayMaxLayerCount: _native.Engine.CAPS_LIMITS_MAX_TEXTURE_LAYERS, disableMorphTargetTexture: !1 }, this._features = { forceBitmapOverHTMLImageElement: !0, supportRenderAndCopyToLodForFloatTextures: !1, supportDepthStencilTexture: !1, supportShadowSamplers: !1, uniformBufferHardCheckMatrix: !1, allowTexturePrefiltering: !1, trackUbosInFrame: !1, checkUbosContentBeforeUpload: !1, supportCSM: !1, basisNeedsPOT: !1, support3DTextures: !1, needTypeSuffixInShaderConstants: !1, supportMSAA: !0, supportSSAO2: !1, supportExtendedTextureFormats: !1, supportSwitchCaseInShader: !1, supportSyncTextureRead: !1, needsInvertingBitmap: !0, useUBOBindingCache: !0, needShaderCodeInlining: !0, needToAlwaysBindUniformBuffers: !1, supportRenderPasses: !0, supportSpriteInstancing: !1, forceVertexBufferStrideMultiple4Bytes: !1, _collectUbosUpdatedInFrame: !1 }, Ve.Log("Babylon Native (v" + $e.Version + ") launched"), Ve.LoadScript = function(r, s, n, a) { Ve.LoadFile(r, (l) => { Function(l).apply(null), s && s(); }, void 0, void 0, !1, (l, o) => { n && n("LoadScript Error", o); }); }, typeof URL > "u" && (window.URL = { createObjectURL: function() { }, revokeObjectURL: function() { } }), typeof Blob > "u" && (window.Blob = function(r) { return r; }), Array.prototype.flat || Object.defineProperty(Array.prototype, "flat", { configurable: !0, value: function r() { const s = isNaN(arguments[0]) ? 1 : Number(arguments[0]); return s ? Array.prototype.reduce.call(this, function(n, a) { return Array.isArray(a) ? n.push.apply(n, r.call(a, s - 1)) : n.push(a), n; }, []) : Array.prototype.slice.call(this); }, writable: !0 }); const t = window && window.devicePixelRatio || 1; this._hardwareScalingLevel = e.adaptToDeviceRatio ? 1 / t : 1, this._engine.setHardwareScalingLevel(this._hardwareScalingLevel), this._lastDevicePixelRatio = t, this.resize(); const i = this.getDepthFunction(); i && this.setDepthFunction(i), this._shaderProcessor = new aK(), this.onNewSceneAddedObservable.add((r) => { const s = r.render; r.render = (...n) => { this._commandBufferEncoder.beginCommandScope(), s.apply(r, n), this._commandBufferEncoder.endCommandScope(); }; }); } dispose() { super.dispose(), this._boundBuffersVertexArray && this._deleteVertexArray(this._boundBuffersVertexArray), this._engine.dispose(); } /** @internal */ static _createNativeDataStream() { return new _5(); } /** * Can be used to override the current requestAnimationFrame requester. * @internal */ _queueNewFrame(e, t) { return t.requestAnimationFrame && t !== window ? t.requestAnimationFrame(e) : this._engine.requestAnimationFrame(e), 0; } /** * Override default engine behavior. * @param framebuffer */ _bindUnboundFramebuffer(e) { this._currentFramebuffer !== e && (this._currentFramebuffer && (this._commandBufferEncoder.startEncodingCommand(_native.Engine.COMMAND_UNBINDFRAMEBUFFER), this._commandBufferEncoder.encodeCommandArgAsNativeData(this._currentFramebuffer), this._commandBufferEncoder.finishEncodingCommand()), e && (this._commandBufferEncoder.startEncodingCommand(_native.Engine.COMMAND_BINDFRAMEBUFFER), this._commandBufferEncoder.encodeCommandArgAsNativeData(e), this._commandBufferEncoder.finishEncodingCommand()), this._currentFramebuffer = e); } /** * Gets host document * @returns the host document object */ getHostDocument() { return null; } clear(e, t, i, r = !1) { if (this.useReverseDepthBuffer) throw new Error("reverse depth buffer is not currently implemented"); this._commandBufferEncoder.startEncodingCommand(_native.Engine.COMMAND_CLEAR), this._commandBufferEncoder.encodeCommandArgAsUInt32(t && e ? 1 : 0), this._commandBufferEncoder.encodeCommandArgAsFloat32(e ? e.r : 0), this._commandBufferEncoder.encodeCommandArgAsFloat32(e ? e.g : 0), this._commandBufferEncoder.encodeCommandArgAsFloat32(e ? e.b : 0), this._commandBufferEncoder.encodeCommandArgAsFloat32(e ? e.a : 1), this._commandBufferEncoder.encodeCommandArgAsUInt32(i ? 1 : 0), this._commandBufferEncoder.encodeCommandArgAsFloat32(1), this._commandBufferEncoder.encodeCommandArgAsUInt32(r ? 1 : 0), this._commandBufferEncoder.encodeCommandArgAsUInt32(0), this._commandBufferEncoder.finishEncodingCommand(); } createIndexBuffer(e, t, i) { const r = this._normalizeIndexData(e), s = new BZ(); return s.references = 1, s.is32Bits = r.BYTES_PER_ELEMENT === 4, r.byteLength && (s.nativeIndexBuffer = this._engine.createIndexBuffer(r.buffer, r.byteOffset, r.byteLength, s.is32Bits, t ?? !1)), s; } createVertexBuffer(e, t, i) { const r = ArrayBuffer.isView(e) ? e : new Float32Array(e), s = new BZ(); return s.references = 1, r.byteLength && (s.nativeVertexBuffer = this._engine.createVertexBuffer(r.buffer, r.byteOffset, r.byteLength, t ?? !1)), s; } _recordVertexArrayObject(e, t, i, r, s) { i && this._engine.recordIndexBuffer(e, i.nativeIndexBuffer); const n = r.getAttributesNames(); for (let a = 0; a < n.length; a++) { const l = r.getAttributeLocation(a); if (l >= 0) { const o = n[a]; let u = null; if (s && (u = s[o]), u || (u = t[o]), u) { const h = u.getBuffer(); h && h.nativeVertexBuffer && this._engine.recordVertexBuffer(e, h.nativeVertexBuffer, l, u.byteOffset, u.byteStride, u.getSize(), Ofe(u.type), u.normalized, u.getInstanceDivisor()); } } } } bindBuffers(e, t, i) { this._boundBuffersVertexArray && this._deleteVertexArray(this._boundBuffersVertexArray), this._boundBuffersVertexArray = this._engine.createVertexArray(), this._recordVertexArrayObject(this._boundBuffersVertexArray, e, t, i), this.bindVertexArrayObject(this._boundBuffersVertexArray); } recordVertexArrayObject(e, t, i, r) { const s = this._engine.createVertexArray(); return this._recordVertexArrayObject(s, e, t, i, r), s; } _deleteVertexArray(e) { this._commandBufferEncoder.startEncodingCommand(_native.Engine.COMMAND_DELETEVERTEXARRAY), this._commandBufferEncoder.encodeCommandArgAsNativeData(e), this._commandBufferEncoder.finishEncodingCommand(); } bindVertexArrayObject(e) { this._commandBufferEncoder.startEncodingCommand(_native.Engine.COMMAND_BINDVERTEXARRAY), this._commandBufferEncoder.encodeCommandArgAsNativeData(e), this._commandBufferEncoder.finishEncodingCommand(); } releaseVertexArrayObject(e) { this._deleteVertexArray(e); } getAttributes(e, t) { const i = e; return this._engine.getAttributes(i.nativeProgram, t); } /** * Draw a list of indexed primitives * @param fillMode defines the primitive to use * @param indexStart defines the starting index * @param indexCount defines the number of index to draw * @param instancesCount defines the number of instances to draw (if instantiation is enabled) */ drawElementsType(e, t, i, r) { this._drawCalls.addCount(1, !1), r && _native.Engine.COMMAND_DRAWINDEXEDINSTANCED ? (this._commandBufferEncoder.startEncodingCommand(_native.Engine.COMMAND_DRAWINDEXEDINSTANCED), this._commandBufferEncoder.encodeCommandArgAsUInt32(e), this._commandBufferEncoder.encodeCommandArgAsUInt32(t), this._commandBufferEncoder.encodeCommandArgAsUInt32(i), this._commandBufferEncoder.encodeCommandArgAsUInt32(r)) : (this._commandBufferEncoder.startEncodingCommand(_native.Engine.COMMAND_DRAWINDEXED), this._commandBufferEncoder.encodeCommandArgAsUInt32(e), this._commandBufferEncoder.encodeCommandArgAsUInt32(t), this._commandBufferEncoder.encodeCommandArgAsUInt32(i)), this._commandBufferEncoder.finishEncodingCommand(); } /** * Draw a list of unindexed primitives * @param fillMode defines the primitive to use * @param verticesStart defines the index of first vertex to draw * @param verticesCount defines the count of vertices to draw * @param instancesCount defines the number of instances to draw (if instantiation is enabled) */ drawArraysType(e, t, i, r) { this._drawCalls.addCount(1, !1), r && _native.Engine.COMMAND_DRAWINSTANCED ? (this._commandBufferEncoder.startEncodingCommand(_native.Engine.COMMAND_DRAWINSTANCED), this._commandBufferEncoder.encodeCommandArgAsUInt32(e), this._commandBufferEncoder.encodeCommandArgAsUInt32(t), this._commandBufferEncoder.encodeCommandArgAsUInt32(i), this._commandBufferEncoder.encodeCommandArgAsUInt32(r)) : (this._commandBufferEncoder.startEncodingCommand(_native.Engine.COMMAND_DRAW), this._commandBufferEncoder.encodeCommandArgAsUInt32(e), this._commandBufferEncoder.encodeCommandArgAsUInt32(t), this._commandBufferEncoder.encodeCommandArgAsUInt32(i)), this._commandBufferEncoder.finishEncodingCommand(); } createPipelineContext() { return new Tfe(this); } createMaterialContext() { } createDrawContext() { } _preparePipelineContext(e, t, i, r, s, n, a, l) { const o = e; r ? o.nativeProgram = this.createRawShaderProgram() : o.nativeProgram = this.createShaderProgram(e, t, i, l); } isAsync(e) { return !!(e.isAsync && this._engine.createProgramAsync); } /** * @internal */ _executeWhenRenderingStateIsCompiled(e, t) { const i = e; if (!this.isAsync(e)) { t(); return; } const r = i.onCompiled; r ? i.onCompiled = () => { r(), t(); } : i.onCompiled = t; } createRawShaderProgram() { throw new Error("Not Supported"); } createShaderProgram(e, t, i, r) { const s = e; if (s.nativeProgram) throw new Error("Tried to create a second program in the same NativePipelineContext"); this.onBeforeShaderCompilationObservable.notifyObservers(this); const n = new cT(t); n.processCode(), t = n.code; const a = new cT(i); a.processCode(), i = a.code, t = mi._ConcatenateShader(t, r), i = mi._ConcatenateShader(i, r); const l = () => { var o; s.isCompiled = !0, (o = s.onCompiled) === null || o === void 0 || o.call(s), this.onAfterShaderCompilationObservable.notifyObservers(this); }; if (this.isAsync(e)) return this._engine.createProgramAsync(t, i, l, (o) => { s.compilationError = o; }); try { const o = s.nativeProgram = this._engine.createProgram(t, i); return l(), o; } catch (o) { const u = o == null ? void 0 : o.message; throw new Error("SHADER ERROR" + (typeof u == "string" ? ` ` + u : "")); } } /** * Inline functions in shader code that are marked to be inlined * @param code code to inline * @returns inlined code */ inlineShaderCode(e) { const t = new cT(e); return t.debug = !1, t.processCode(), t.code; } _setProgram(e) { this._currentProgram !== e && (this._commandBufferEncoder.startEncodingCommand(_native.Engine.COMMAND_SETPROGRAM), this._commandBufferEncoder.encodeCommandArgAsNativeData(e), this._commandBufferEncoder.finishEncodingCommand(), this._currentProgram = e); } _deletePipelineContext(e) { const t = e; t && t.nativeProgram && (this._commandBufferEncoder.startEncodingCommand(_native.Engine.COMMAND_DELETEPROGRAM), this._commandBufferEncoder.encodeCommandArgAsNativeData(t.nativeProgram), this._commandBufferEncoder.finishEncodingCommand()); } getUniforms(e, t) { const i = e; return this._engine.getUniforms(i.nativeProgram, t); } bindUniformBlock(e, t, i) { throw new Error("Not Implemented"); } bindSamplers(e) { const t = e.getPipelineContext(); this._setProgram(t.nativeProgram); const i = e.getSamplers(); for (let r = 0; r < i.length; r++) { const s = e.getUniform(i[r]); s && (this._boundUniforms[r] = s); } this._currentEffect = null; } getRenderWidth(e = !1) { return !e && this._currentRenderTarget ? this._currentRenderTarget.width : this._engine.getRenderWidth(); } getRenderHeight(e = !1) { return !e && this._currentRenderTarget ? this._currentRenderTarget.height : this._engine.getRenderHeight(); } setViewport(e, t, i) { this._cachedViewport = e, this._commandBufferEncoder.startEncodingCommand(_native.Engine.COMMAND_SETVIEWPORT), this._commandBufferEncoder.encodeCommandArgAsFloat32(e.x), this._commandBufferEncoder.encodeCommandArgAsFloat32(e.y), this._commandBufferEncoder.encodeCommandArgAsFloat32(e.width), this._commandBufferEncoder.encodeCommandArgAsFloat32(e.height), this._commandBufferEncoder.finishEncodingCommand(); } enableScissor(e, t, i, r) { this._commandBufferEncoder.startEncodingCommand(_native.Engine.COMMAND_SETSCISSOR), this._commandBufferEncoder.encodeCommandArgAsFloat32(e), this._commandBufferEncoder.encodeCommandArgAsFloat32(t), this._commandBufferEncoder.encodeCommandArgAsFloat32(i), this._commandBufferEncoder.encodeCommandArgAsFloat32(r), this._commandBufferEncoder.finishEncodingCommand(); } disableScissor() { this._commandBufferEncoder.startEncodingCommand(_native.Engine.COMMAND_SETSCISSOR), this._commandBufferEncoder.encodeCommandArgAsFloat32(0), this._commandBufferEncoder.encodeCommandArgAsFloat32(0), this._commandBufferEncoder.encodeCommandArgAsFloat32(0), this._commandBufferEncoder.encodeCommandArgAsFloat32(0), this._commandBufferEncoder.finishEncodingCommand(); } setState(e, t = 0, i, r = !1, s, n, a = 0) { var l, o; this._zOffset = t, this._zOffsetUnits = a, this._zOffset !== 0 && Ve.Warn("zOffset is not supported in Native engine."), this._commandBufferEncoder.startEncodingCommand(_native.Engine.COMMAND_SETSTATE), this._commandBufferEncoder.encodeCommandArgAsUInt32(e ? 1 : 0), this._commandBufferEncoder.encodeCommandArgAsFloat32(t), this._commandBufferEncoder.encodeCommandArgAsFloat32(a), this._commandBufferEncoder.encodeCommandArgAsUInt32(!((o = (l = this.cullBackFaces) !== null && l !== void 0 ? l : s) !== null && o !== void 0) || o ? 1 : 0), this._commandBufferEncoder.encodeCommandArgAsUInt32(r ? 1 : 0), this._commandBufferEncoder.finishEncodingCommand(); } /** * Gets the client rect of native canvas. Needed for InputManager. * @returns a client rectangle */ getInputElementClientRect() { return { bottom: this.getRenderHeight(), height: this.getRenderHeight(), left: 0, right: this.getRenderWidth(), top: 0, width: this.getRenderWidth(), x: 0, y: 0, toJSON: () => { } }; } /** * Set the z offset Factor to apply to current rendering * @param value defines the offset to apply */ setZOffset(e) { e !== this._zOffset && (this._zOffset = e, this._commandBufferEncoder.startEncodingCommand(_native.Engine.COMMAND_SETZOFFSET), this._commandBufferEncoder.encodeCommandArgAsFloat32(this.useReverseDepthBuffer ? -e : e), this._commandBufferEncoder.finishEncodingCommand()); } /** * Gets the current value of the zOffset Factor * @returns the current zOffset Factor state */ getZOffset() { return this._zOffset; } /** * Set the z offset Units to apply to current rendering * @param value defines the offset to apply */ setZOffsetUnits(e) { e !== this._zOffsetUnits && (this._zOffsetUnits = e, this._commandBufferEncoder.startEncodingCommand(_native.Engine.COMMAND_SETZOFFSETUNITS), this._commandBufferEncoder.encodeCommandArgAsFloat32(this.useReverseDepthBuffer ? -e : e), this._commandBufferEncoder.finishEncodingCommand()); } /** * Gets the current value of the zOffset Units * @returns the current zOffset Units state */ getZOffsetUnits() { return this._zOffsetUnits; } /** * Enable or disable depth buffering * @param enable defines the state to set */ setDepthBuffer(e) { this._commandBufferEncoder.startEncodingCommand(_native.Engine.COMMAND_SETDEPTHTEST), this._commandBufferEncoder.encodeCommandArgAsUInt32(e ? this._currentDepthTest : _native.Engine.DEPTH_TEST_ALWAYS), this._commandBufferEncoder.finishEncodingCommand(); } /** * Gets a boolean indicating if depth writing is enabled * @returns the current depth writing state */ getDepthWrite() { return this._depthWrite; } getDepthFunction() { switch (this._currentDepthTest) { case _native.Engine.DEPTH_TEST_NEVER: return 512; case _native.Engine.DEPTH_TEST_ALWAYS: return 519; case _native.Engine.DEPTH_TEST_GREATER: return 516; case _native.Engine.DEPTH_TEST_GEQUAL: return 518; case _native.Engine.DEPTH_TEST_NOTEQUAL: return 517; case _native.Engine.DEPTH_TEST_EQUAL: return 514; case _native.Engine.DEPTH_TEST_LESS: return 513; case _native.Engine.DEPTH_TEST_LEQUAL: return 515; } return null; } setDepthFunction(e) { let t = 0; switch (e) { case 512: t = _native.Engine.DEPTH_TEST_NEVER; break; case 519: t = _native.Engine.DEPTH_TEST_ALWAYS; break; case 516: t = _native.Engine.DEPTH_TEST_GREATER; break; case 518: t = _native.Engine.DEPTH_TEST_GEQUAL; break; case 517: t = _native.Engine.DEPTH_TEST_NOTEQUAL; break; case 514: t = _native.Engine.DEPTH_TEST_EQUAL; break; case 513: t = _native.Engine.DEPTH_TEST_LESS; break; case 515: t = _native.Engine.DEPTH_TEST_LEQUAL; break; } this._currentDepthTest = t, this._commandBufferEncoder.startEncodingCommand(_native.Engine.COMMAND_SETDEPTHTEST), this._commandBufferEncoder.encodeCommandArgAsUInt32(this._currentDepthTest), this._commandBufferEncoder.finishEncodingCommand(); } /** * Enable or disable depth writing * @param enable defines the state to set */ setDepthWrite(e) { this._depthWrite = e, this._commandBufferEncoder.startEncodingCommand(_native.Engine.COMMAND_SETDEPTHWRITE), this._commandBufferEncoder.encodeCommandArgAsUInt32(Number(e)), this._commandBufferEncoder.finishEncodingCommand(); } /** * Enable or disable color writing * @param enable defines the state to set */ setColorWrite(e) { this._colorWrite = e, this._commandBufferEncoder.startEncodingCommand(_native.Engine.COMMAND_SETCOLORWRITE), this._commandBufferEncoder.encodeCommandArgAsUInt32(Number(e)), this._commandBufferEncoder.finishEncodingCommand(); } /** * Gets a boolean indicating if color writing is enabled * @returns the current color writing state */ getColorWrite() { return this._colorWrite; } applyStencil() { this._setStencil(this._stencilMask, Rfe(this._stencilOpStencilFail), Pfe(this._stencilOpDepthFail), Ife(this._stencilOpStencilDepthPass), Mfe(this._stencilFunc), this._stencilFuncRef); } _setStencil(e, t, i, r, s, n) { this._commandBufferEncoder.startEncodingCommand(_native.Engine.COMMAND_SETSTENCIL), this._commandBufferEncoder.encodeCommandArgAsUInt32(e), this._commandBufferEncoder.encodeCommandArgAsUInt32(t), this._commandBufferEncoder.encodeCommandArgAsUInt32(i), this._commandBufferEncoder.encodeCommandArgAsUInt32(r), this._commandBufferEncoder.encodeCommandArgAsUInt32(s), this._commandBufferEncoder.encodeCommandArgAsUInt32(n), this._commandBufferEncoder.finishEncodingCommand(); } /** * Enable or disable the stencil buffer * @param enable defines if the stencil buffer must be enabled or disabled */ setStencilBuffer(e) { this._stencilTest = e, e ? this.applyStencil() : this._setStencil(255, _native.Engine.STENCIL_OP_FAIL_S_KEEP, _native.Engine.STENCIL_OP_FAIL_Z_KEEP, _native.Engine.STENCIL_OP_PASS_Z_KEEP, _native.Engine.STENCIL_TEST_ALWAYS, 0); } /** * Gets a boolean indicating if stencil buffer is enabled * @returns the current stencil buffer state */ getStencilBuffer() { return this._stencilTest; } /** * Gets the current stencil operation when stencil passes * @returns a number defining stencil operation to use when stencil passes */ getStencilOperationPass() { return this._stencilOpStencilDepthPass; } /** * Sets the stencil operation to use when stencil passes * @param operation defines the stencil operation to use when stencil passes */ setStencilOperationPass(e) { this._stencilOpStencilDepthPass = e, this.applyStencil(); } /** * Sets the current stencil mask * @param mask defines the new stencil mask to use */ setStencilMask(e) { this._stencilMask = e, this.applyStencil(); } /** * Sets the current stencil function * @param stencilFunc defines the new stencil function to use */ setStencilFunction(e) { this._stencilFunc = e, this.applyStencil(); } /** * Sets the current stencil reference * @param reference defines the new stencil reference to use */ setStencilFunctionReference(e) { this._stencilFuncRef = e, this.applyStencil(); } /** * Sets the current stencil mask * @param mask defines the new stencil mask to use */ setStencilFunctionMask(e) { this._stencilFuncMask = e; } /** * Sets the stencil operation to use when stencil fails * @param operation defines the stencil operation to use when stencil fails */ setStencilOperationFail(e) { this._stencilOpStencilFail = e, this.applyStencil(); } /** * Sets the stencil operation to use when depth fails * @param operation defines the stencil operation to use when depth fails */ setStencilOperationDepthFail(e) { this._stencilOpDepthFail = e, this.applyStencil(); } /** * Gets the current stencil mask * @returns a number defining the new stencil mask to use */ getStencilMask() { return this._stencilMask; } /** * Gets the current stencil function * @returns a number defining the stencil function to use */ getStencilFunction() { return this._stencilFunc; } /** * Gets the current stencil reference value * @returns a number defining the stencil reference value to use */ getStencilFunctionReference() { return this._stencilFuncRef; } /** * Gets the current stencil mask * @returns a number defining the stencil mask to use */ getStencilFunctionMask() { return this._stencilFuncMask; } /** * Gets the current stencil operation when stencil fails * @returns a number defining stencil operation to use when stencil fails */ getStencilOperationFail() { return this._stencilOpStencilFail; } /** * Gets the current stencil operation when depth fails * @returns a number defining stencil operation to use when depth fails */ getStencilOperationDepthFail() { return this._stencilOpDepthFail; } /** * Sets alpha constants used by some alpha blending modes * @param r defines the red component * @param g defines the green component * @param b defines the blue component * @param a defines the alpha component */ setAlphaConstants(e, t, i, r) { throw new Error("Setting alpha blend constant color not yet implemented."); } /** * Sets the current alpha mode * @param mode defines the mode to use (one of the BABYLON.undefined) * @param noDepthWriteChange defines if depth writing state should remains unchanged (false by default) * @see https://doc.babylonjs.com/features/featuresDeepDive/materials/advanced/transparent_rendering */ setAlphaMode(e, t = !1) { if (this._alphaMode === e) return; const i = Dfe(e); this._commandBufferEncoder.startEncodingCommand(_native.Engine.COMMAND_SETBLENDMODE), this._commandBufferEncoder.encodeCommandArgAsUInt32(i), this._commandBufferEncoder.finishEncodingCommand(), t || this.setDepthWrite(e === 0), this._alphaMode = e; } /** * Gets the current alpha mode * @see https://doc.babylonjs.com/features/featuresDeepDive/materials/advanced/transparent_rendering * @returns the current alpha mode */ getAlphaMode() { return this._alphaMode; } setInt(e, t) { return e ? (this._commandBufferEncoder.startEncodingCommand(_native.Engine.COMMAND_SETINT), this._commandBufferEncoder.encodeCommandArgAsNativeData(e), this._commandBufferEncoder.encodeCommandArgAsInt32(t), this._commandBufferEncoder.finishEncodingCommand(), !0) : !1; } setIntArray(e, t) { return e ? (this._commandBufferEncoder.startEncodingCommand(_native.Engine.COMMAND_SETINTARRAY), this._commandBufferEncoder.encodeCommandArgAsNativeData(e), this._commandBufferEncoder.encodeCommandArgAsInt32s(t), this._commandBufferEncoder.finishEncodingCommand(), !0) : !1; } setIntArray2(e, t) { return e ? (this._commandBufferEncoder.startEncodingCommand(_native.Engine.COMMAND_SETINTARRAY2), this._commandBufferEncoder.encodeCommandArgAsNativeData(e), this._commandBufferEncoder.encodeCommandArgAsInt32s(t), this._commandBufferEncoder.finishEncodingCommand(), !0) : !1; } setIntArray3(e, t) { return e ? (this._commandBufferEncoder.startEncodingCommand(_native.Engine.COMMAND_SETINTARRAY3), this._commandBufferEncoder.encodeCommandArgAsNativeData(e), this._commandBufferEncoder.encodeCommandArgAsInt32s(t), this._commandBufferEncoder.finishEncodingCommand(), !0) : !1; } setIntArray4(e, t) { return e ? (this._commandBufferEncoder.startEncodingCommand(_native.Engine.COMMAND_SETINTARRAY4), this._commandBufferEncoder.encodeCommandArgAsNativeData(e), this._commandBufferEncoder.encodeCommandArgAsInt32s(t), this._commandBufferEncoder.finishEncodingCommand(), !0) : !1; } setFloatArray(e, t) { return e ? (this._commandBufferEncoder.startEncodingCommand(_native.Engine.COMMAND_SETFLOATARRAY), this._commandBufferEncoder.encodeCommandArgAsNativeData(e), this._commandBufferEncoder.encodeCommandArgAsFloat32s(t), this._commandBufferEncoder.finishEncodingCommand(), !0) : !1; } setFloatArray2(e, t) { return e ? (this._commandBufferEncoder.startEncodingCommand(_native.Engine.COMMAND_SETFLOATARRAY2), this._commandBufferEncoder.encodeCommandArgAsNativeData(e), this._commandBufferEncoder.encodeCommandArgAsFloat32s(t), this._commandBufferEncoder.finishEncodingCommand(), !0) : !1; } setFloatArray3(e, t) { return e ? (this._commandBufferEncoder.startEncodingCommand(_native.Engine.COMMAND_SETFLOATARRAY3), this._commandBufferEncoder.encodeCommandArgAsNativeData(e), this._commandBufferEncoder.encodeCommandArgAsFloat32s(t), this._commandBufferEncoder.finishEncodingCommand(), !0) : !1; } setFloatArray4(e, t) { return e ? (this._commandBufferEncoder.startEncodingCommand(_native.Engine.COMMAND_SETFLOATARRAY4), this._commandBufferEncoder.encodeCommandArgAsNativeData(e), this._commandBufferEncoder.encodeCommandArgAsFloat32s(t), this._commandBufferEncoder.finishEncodingCommand(), !0) : !1; } setArray(e, t) { return e ? this.setFloatArray(e, new Float32Array(t)) : !1; } setArray2(e, t) { return e ? this.setFloatArray2(e, new Float32Array(t)) : !1; } setArray3(e, t) { return e ? this.setFloatArray3(e, new Float32Array(t)) : !1; } setArray4(e, t) { return e ? this.setFloatArray4(e, new Float32Array(t)) : !1; } setMatrices(e, t) { return e ? (this._commandBufferEncoder.startEncodingCommand(_native.Engine.COMMAND_SETMATRICES), this._commandBufferEncoder.encodeCommandArgAsNativeData(e), this._commandBufferEncoder.encodeCommandArgAsFloat32s(t), this._commandBufferEncoder.finishEncodingCommand(), !0) : !1; } setMatrix3x3(e, t) { return e ? (this._commandBufferEncoder.startEncodingCommand(_native.Engine.COMMAND_SETMATRIX3X3), this._commandBufferEncoder.encodeCommandArgAsNativeData(e), this._commandBufferEncoder.encodeCommandArgAsFloat32s(t), this._commandBufferEncoder.finishEncodingCommand(), !0) : !1; } setMatrix2x2(e, t) { return e ? (this._commandBufferEncoder.startEncodingCommand(_native.Engine.COMMAND_SETMATRIX2X2), this._commandBufferEncoder.encodeCommandArgAsNativeData(e), this._commandBufferEncoder.encodeCommandArgAsFloat32s(t), this._commandBufferEncoder.finishEncodingCommand(), !0) : !1; } setFloat(e, t) { return e ? (this._commandBufferEncoder.startEncodingCommand(_native.Engine.COMMAND_SETFLOAT), this._commandBufferEncoder.encodeCommandArgAsNativeData(e), this._commandBufferEncoder.encodeCommandArgAsFloat32(t), this._commandBufferEncoder.finishEncodingCommand(), !0) : !1; } setFloat2(e, t, i) { return e ? (this._commandBufferEncoder.startEncodingCommand(_native.Engine.COMMAND_SETFLOAT2), this._commandBufferEncoder.encodeCommandArgAsNativeData(e), this._commandBufferEncoder.encodeCommandArgAsFloat32(t), this._commandBufferEncoder.encodeCommandArgAsFloat32(i), this._commandBufferEncoder.finishEncodingCommand(), !0) : !1; } setFloat3(e, t, i, r) { return e ? (this._commandBufferEncoder.startEncodingCommand(_native.Engine.COMMAND_SETFLOAT3), this._commandBufferEncoder.encodeCommandArgAsNativeData(e), this._commandBufferEncoder.encodeCommandArgAsFloat32(t), this._commandBufferEncoder.encodeCommandArgAsFloat32(i), this._commandBufferEncoder.encodeCommandArgAsFloat32(r), this._commandBufferEncoder.finishEncodingCommand(), !0) : !1; } setFloat4(e, t, i, r, s) { return e ? (this._commandBufferEncoder.startEncodingCommand(_native.Engine.COMMAND_SETFLOAT4), this._commandBufferEncoder.encodeCommandArgAsNativeData(e), this._commandBufferEncoder.encodeCommandArgAsFloat32(t), this._commandBufferEncoder.encodeCommandArgAsFloat32(i), this._commandBufferEncoder.encodeCommandArgAsFloat32(r), this._commandBufferEncoder.encodeCommandArgAsFloat32(s), this._commandBufferEncoder.finishEncodingCommand(), !0) : !1; } setColor3(e, t) { return e ? (this.setFloat3(e, t.r, t.g, t.b), !0) : !1; } setColor4(e, t, i) { return e ? (this.setFloat4(e, t.r, t.g, t.b, i), !0) : !1; } wipeCaches(e) { this.preventCacheWipeBetweenFrames || (this.resetTextureCache(), this._currentEffect = null, e && (this._currentProgram = null, this._stencilStateComposer.reset(), this._depthCullingState.reset(), this._alphaState.reset()), this._cachedVertexBuffers = null, this._cachedIndexBuffer = null, this._cachedEffectForVertexBuffers = null); } _createTexture() { return this._engine.createTexture(); } _deleteTexture(e) { e && this._engine.deleteTexture(e); } /** * Update the content of a dynamic texture * @param texture defines the texture to update * @param canvas defines the canvas containing the source * @param invertY defines if data must be stored with Y axis inverted * @param premulAlpha defines if alpha is stored as premultiplied * @param format defines the format of the data */ updateDynamicTexture(e, t, i, r = !1, s) { if (r === void 0 && (r = !1), e && e._hardwareTexture) { const n = t.getCanvasTexture(), a = e._hardwareTexture.underlyingResource; this._engine.copyTexture(a, n), e.isReady = !0; } } createDynamicTexture(e, t, i, r) { return e = Math.max(e, 1), t = Math.max(t, 1), this.createRawTexture(new Uint8Array(e * t * 4), e, t, 5, !1, !1, r); } createVideoElement(e) { return this._camera ? this._camera.createVideo(e) : null; } updateVideoTexture(e, t, i) { if (e && e._hardwareTexture && this._camera) { const r = e._hardwareTexture.underlyingResource; this._camera.updateVideoTexture(r, t, i); } } createRawTexture(e, t, i, r, s, n, a, l = null, o = 0, u = 0, h = !1) { const d = new ln(this, ts.Raw); if (d.format = r, d.generateMipMaps = s, d.samplingMode = a, d.invertY = n, d.baseWidth = t, d.baseHeight = i, d.width = d.baseWidth, d.height = d.baseHeight, d._compression = l, d.type = o, d._useSRGBBuffer = this._getUseSRGBBuffer(h, !s), this.updateRawTexture(d, e, r, n, l, o, d._useSRGBBuffer), d._hardwareTexture) { const f = d._hardwareTexture.underlyingResource, p = b9(a); this._setTextureSampling(f, p); } return this._internalTexturesCache.push(d), d; } createRawTexture2DArray(e, t, i, r, s, n, a, l, o = null, u = 0) { const h = new ln(this, ts.Raw2DArray); if (h.baseWidth = t, h.baseHeight = i, h.baseDepth = r, h.width = t, h.height = i, h.depth = r, h.format = s, h.type = u, h.generateMipMaps = n, h.samplingMode = l, h.is2DArray = !0, h._hardwareTexture) { const d = h._hardwareTexture.underlyingResource; this._engine.loadRawTexture2DArray(d, e, t, i, r, Gk(s, u), n, a); const f = b9(l); this._setTextureSampling(d, f); } return h.isReady = !0, this._internalTexturesCache.push(h), h; } updateRawTexture(e, t, i, r, s = null, n = 0, a = !1) { if (e) { if (t && e._hardwareTexture) { const l = e._hardwareTexture.underlyingResource; this._engine.loadRawTexture(l, t, e.width, e.height, Gk(i, n), e.generateMipMaps, e.invertY); } e.isReady = !0; } } // TODO: Refactor to share more logic with babylon.engine.ts version. /** * Usually called from Texture.ts. * Passed information to create a NativeTexture * @param url defines a value which contains one of the following: * * A conventional http URL, e.g. 'http://...' or 'file://...' * * A base64 string of in-line texture data, e.g. 'data:image/jpg;base64,/...' * * An indicator that data being passed using the buffer parameter, e.g. 'data:mytexture.jpg' * @param noMipmap defines a boolean indicating that no mipmaps shall be generated. Ignored for compressed textures. They must be in the file * @param invertY when true, image is flipped when loaded. You probably want true. Certain compressed textures may invert this if their default is inverted (eg. ktx) * @param scene needed for loading to the correct scene * @param samplingMode mode with should be used sample / access the texture (Default: Texture.TRILINEAR_SAMPLINGMODE) * @param onLoad optional callback to be called upon successful completion * @param onError optional callback to be called upon failure * @param buffer a source of a file previously fetched as either a base64 string, an ArrayBuffer (compressed or image format), HTMLImageElement (image format), or a Blob * @param fallback an internal argument in case the function must be called again, due to etc1 not having alpha capabilities * @param format internal format. Default: RGB when extension is '.jpg' else RGBA. Ignored for compressed textures * @param forcedExtension defines the extension to use to pick the right loader * @param mimeType defines an optional mime type * @param loaderOptions options to be passed to the loader * @param creationFlags specific flags to use when creating the texture (1 for storage textures, for eg) * @param useSRGBBuffer defines if the texture must be loaded in a sRGB GPU buffer (if supported by the GPU). * @returns a InternalTexture for assignment back into BABYLON.Texture */ createTexture(e, t, i, r, s = 3, n = null, a = null, l = null, o = null, u = null, h = null, d, f, p, m = !1) { e = e || ""; const _ = e.substr(0, 5) === "data:", v = _ && e.indexOf(";base64,") !== -1, C = o || new ln(this, ts.Url), x = e; this._transformTextureUrl && !v && !o && !l && (e = this._transformTextureUrl(e)); const b = e.lastIndexOf("."), S = h || (b > -1 ? e.substring(b).toLowerCase() : ""); let M = null; for (const V of $e._TextureLoaders) if (V.canLoad(S)) { M = V; break; } r && r.addPendingData(C), C.url = e, C.generateMipMaps = !t, C.samplingMode = s, C.invertY = i, C._useSRGBBuffer = this._getUseSRGBBuffer(m, t), this.doNotHandleContextLost || (C._buffer = l); let R = null; n && !o && (R = C.onLoadedObservable.add(n)), o || this._internalTexturesCache.push(C); const w = (V, k) => { r && r.removePendingData(C), e === x ? (R && C.onLoadedObservable.remove(R), gi.UseFallbackTexture && this.createTexture(gi.FallbackTexture, t, C.invertY, r, s, null, a, l, C), a && a((V || "Unknown error") + (gi.UseFallbackTexture ? " - Fallback texture was used" : ""), k)) : (Ce.Warn(`Failed to load ${e}, falling back to ${x}`), this.createTexture(x, t, C.invertY, r, s, n, a, l, C, u, h, d, f)); }; if (M) throw new Error("Loading textures from IInternalTextureLoader not yet implemented."); { const V = (k) => { if (!C._hardwareTexture) { r && r.removePendingData(C); return; } const L = C._hardwareTexture.underlyingResource; this._engine.loadTexture(L, k, !t, i, C._useSRGBBuffer, () => { C.baseWidth = this._engine.getTextureWidth(L), C.baseHeight = this._engine.getTextureHeight(L), C.width = C.baseWidth, C.height = C.baseHeight, C.isReady = !0; const B = b9(s); this._setTextureSampling(L, B), r && r.removePendingData(C), C.onLoadedObservable.notifyObservers(C), C.onLoadedObservable.clear(); }, () => { throw new Error("Could not load a native texture."); }); }; if (_ && l) if (l instanceof ArrayBuffer) V(new Uint8Array(l)); else if (ArrayBuffer.isView(l)) V(l); else if (typeof l == "string") V(new Uint8Array(Ve.DecodeBase64(l))); else throw new Error("Unsupported buffer type"); else v ? V(new Uint8Array(Ve.DecodeBase64(e))) : this._loadFile(e, (k) => V(new Uint8Array(k)), void 0, void 0, !0, (k, L) => { w("Unable to load " + (k && k.responseURL, L)); }); } return C; } /** * Wraps an external native texture in a Babylon texture. * @param texture defines the external texture * @param hasMipMaps defines whether the external texture has mip maps * @param samplingMode defines the sampling mode for the external texture (default: 3) * @returns the babylon internal texture */ wrapNativeTexture(e, t = !1, i = 3) { const r = new FZ(e, this._engine), s = new ln(this, ts.Unknown, !0); return s._hardwareTexture = r, s.baseWidth = this._engine.getTextureWidth(e), s.baseHeight = this._engine.getTextureHeight(e), s.width = s.baseWidth, s.height = s.baseHeight, s.isReady = !0, s.useMipMaps = t, this.updateTextureSamplingMode(i, s), s; } /** * Wraps an external web gl texture in a Babylon texture. * @returns the babylon internal texture */ wrapWebGLTexture() { throw new Error("wrapWebGLTexture is not supported, use wrapNativeTexture instead."); } _createDepthStencilTexture(e, t, i) { var r, s; const n = t.generateStencil || !1, a = t.samples || 1, l = i, o = new ln(this, ts.DepthStencil), u = (r = e.width) !== null && r !== void 0 ? r : e, h = (s = e.height) !== null && s !== void 0 ? s : e, d = this._engine.createFrameBuffer(o._hardwareTexture.underlyingResource, u, h, n, !0, a); return l._framebufferDepthStencil = d, o; } /** * @internal */ _releaseFramebufferObjects(e) { e && (this._commandBufferEncoder.startEncodingCommand(_native.Engine.COMMAND_DELETEFRAMEBUFFER), this._commandBufferEncoder.encodeCommandArgAsNativeData(e), this._commandBufferEncoder.finishEncodingCommand()); } /** * @internal Engine abstraction for loading and creating an image bitmap from a given source string. * @param imageSource source to load the image from. * @param options An object that sets options for the image's extraction. * @returns ImageBitmap */ _createImageBitmapFromSource(e, t) { return new Promise((r, s) => { const n = this.createCanvasImage(); n.onload = () => { try { const a = this._engine.createImageBitmap(n); r(a); } catch (a) { s(`Error loading image ${n.src} with exception: ${a}`); } }, n.onerror = (a) => { s(`Error loading image ${n.src} with exception: ${a}`); }, n.src = e; }); } /** * Engine abstraction for createImageBitmap * @param image source for image * @param options An object that sets options for the image's extraction. * @returns ImageBitmap */ createImageBitmap(e, t) { return new Promise((i, r) => { if (Array.isArray(e)) { const s = e; if (s.length) { const n = this._engine.createImageBitmap(s[0]); if (n) { i(n); return; } } } r("Unsupported data for createImageBitmap."); }); } /** * Resize an image and returns the image data as an uint8array * @param image image to resize * @param bufferWidth destination buffer width * @param bufferHeight destination buffer height * @returns an uint8array containing RGBA values of bufferWidth * bufferHeight size */ resizeImageBitmap(e, t, i) { return this._engine.resizeImageBitmap(e, t, i); } /** * Creates a cube texture * @param rootUrl defines the url where the files to load is located * @param scene defines the current scene * @param files defines the list of files to load (1 per face) * @param noMipmap defines a boolean indicating that no mipmaps shall be generated (false by default) * @param onLoad defines an optional callback raised when the texture is loaded * @param onError defines an optional callback raised if there is an issue to load the texture * @param format defines the format of the data * @param forcedExtension defines the extension to use to pick the right loader * @param createPolynomials if a polynomial sphere should be created for the cube texture * @param lodScale defines the scale applied to environment texture. This manages the range of LOD level used for IBL according to the roughness * @param lodOffset defines the offset applied to environment texture. This manages first LOD level used for IBL according to the roughness * @param fallback defines texture to use while falling back when (compressed) texture file not found. * @param loaderOptions options to be passed to the loader * @param useSRGBBuffer defines if the texture must be loaded in a sRGB GPU buffer (if supported by the GPU). * @returns the cube texture as an InternalTexture */ createCubeTexture(e, t, i, r, s = null, n = null, a, l = null, o = !1, u = 0, h = 0, d = null, f, p = !1) { const m = d || new ln(this, ts.Cube); m.isCube = !0, m.url = e, m.generateMipMaps = !r, m._lodGenerationScale = u, m._lodGenerationOffset = h, m._useSRGBBuffer = this._getUseSRGBBuffer(p, !!r), this._doNotHandleContextLost || (m._extension = l, m._files = i); const _ = e.lastIndexOf("."); if ((l || (_ > -1 ? e.substring(_).toLowerCase() : "")) === ".env") { const C = (x) => { const b = AU(x); m.width = b.width, m.height = b.width, CU(m, b); const S = b.specular; if (!S) throw new Error("Nothing else parsed so far"); m._lodGenerationScale = S.lodGenerationScale; const M = yU(x, b); m.format = 5, m.type = 0, m.generateMipMaps = !0, m.getEngine().updateTextureSamplingMode(De.TRILINEAR_SAMPLINGMODE, m), m._isRGBD = !0, m.invertY = !0, this._engine.loadCubeTextureWithMips(m._hardwareTexture.underlyingResource, M, !1, m._useSRGBBuffer, () => { m.isReady = !0, s && s(); }, () => { throw new Error("Could not load a native cube texture."); }); }; if (i && i.length === 6) throw new Error("Multi-file loading not allowed on env files."); { const x = (b, S) => { n && b && n(b.status + " " + b.statusText, S); }; this._loadFile(e, (b) => { C(new Uint8Array(b, 0, b.byteLength)); }, void 0, void 0, !0, x); } } else { if (!i || i.length !== 6) throw new Error("Cannot load cubemap because 6 files were not defined"); const C = [i[0], i[3], i[1], i[4], i[2], i[5]]; Promise.all(C.map((x) => this._loadFileAsync(x, void 0, !0).then((b) => new Uint8Array(b, 0, b.byteLength)))).then((x) => new Promise((b, S) => { this._engine.loadCubeTexture(m._hardwareTexture.underlyingResource, x, !r, !0, m._useSRGBBuffer, b, S); })).then(() => { m.isReady = !0, s && s(); }, (x) => { n && n(`Failed to load cubemap: ${x.message}`, x); }); } return this._internalTexturesCache.push(m), m; } /** @internal */ _createHardwareTexture() { return new FZ(this._createTexture(), this._engine); } /** @internal */ _createHardwareRenderTargetWrapper(e, t, i) { const r = new Sfe(e, t, i, this); return this._renderTargetWrapperCache.push(r), r; } /** @internal */ _createInternalTexture(e, t, i = !0, r = ts.Unknown) { var s, n, a; let l = !1, o = 0, u = 3, h = 5, d = !1, f = 1, p; t !== void 0 && typeof t == "object" ? (l = !!t.generateMipMaps, o = t.type === void 0 ? 0 : t.type, u = t.samplingMode === void 0 ? 3 : t.samplingMode, h = t.format === void 0 ? 5 : t.format, d = t.useSRGBBuffer === void 0 ? !1 : t.useSRGBBuffer, f = (s = t.samples) !== null && s !== void 0 ? s : 1, p = t.label) : l = !!t, d = this._getUseSRGBBuffer(d, !l), (o === 1 && !this._caps.textureFloatLinearFiltering || o === 2 && !this._caps.textureHalfFloatLinearFiltering) && (u = 1), o === 1 && !this._caps.textureFloat && (o = 0, Ce.Warn("Float textures are not supported. Type forced to TEXTURETYPE_UNSIGNED_BYTE")); const m = new ln(this, r), _ = (n = e.width) !== null && n !== void 0 ? n : e, v = (a = e.height) !== null && a !== void 0 ? a : e, C = e.layers || 0; if (C !== 0) throw new Error("Texture layers are not supported in Babylon Native"); const x = m._hardwareTexture.underlyingResource, b = Gk(h, o); return this._engine.initializeTexture(x, _, v, l, b, !0, d, f), this._setTextureSampling(x, b9(u)), m._useSRGBBuffer = d, m.baseWidth = _, m.baseHeight = v, m.width = _, m.height = v, m.depth = C, m.isReady = !0, m.samples = f, m.generateMipMaps = l, m.samplingMode = u, m.type = o, m.format = h, m.label = p, this._internalTexturesCache.push(m), m; } createRenderTargetTexture(e, t) { var i, r, s, n; const a = this._createHardwareRenderTargetWrapper(!1, !1, e); let l = !0, o = !1, u = !1, h, d = 1; t !== void 0 && typeof t == "object" && (l = (i = t.generateDepthBuffer) !== null && i !== void 0 ? i : !0, o = !!t.generateStencilBuffer, u = !!t.noColorAttachment, h = t.colorAttachment, d = (r = t.samples) !== null && r !== void 0 ? r : 1); const f = h || (u ? null : this._createInternalTexture(e, t, !0, ts.RenderTarget)), p = (s = e.width) !== null && s !== void 0 ? s : e, m = (n = e.height) !== null && n !== void 0 ? n : e, _ = this._engine.createFrameBuffer(f ? f._hardwareTexture.underlyingResource : null, p, m, o, l, d); return a._framebuffer = _, a._generateDepthBuffer = l, a._generateStencilBuffer = o, a._samples = d, a.setTextures(f), a; } updateRenderTargetTextureSampleCount(e, t) { return Ce.Warn("Updating render target sample count is not currently supported"), e.samples; } updateTextureSamplingMode(e, t) { if (t._hardwareTexture) { const i = b9(e); this._setTextureSampling(t._hardwareTexture.underlyingResource, i); } t.samplingMode = e; } bindFramebuffer(e, t, i, r, s) { const n = e; if (this._currentRenderTarget && this.unBindFramebuffer(this._currentRenderTarget), this._currentRenderTarget = e, t) throw new Error("Cuboid frame buffers are not yet supported in NativeEngine."); if (i || r) throw new Error("Required width/height for frame buffers not yet supported in NativeEngine."); n._framebufferDepthStencil ? this._bindUnboundFramebuffer(n._framebufferDepthStencil) : this._bindUnboundFramebuffer(n._framebuffer); } unBindFramebuffer(e, t = !1, i) { this._currentRenderTarget = null, i && i(), this._bindUnboundFramebuffer(null); } createDynamicVertexBuffer(e) { return this.createVertexBuffer(e, !0); } updateDynamicIndexBuffer(e, t, i = 0) { const r = e, s = this._normalizeIndexData(t); r.is32Bits = s.BYTES_PER_ELEMENT === 4, this._engine.updateDynamicIndexBuffer(r.nativeIndexBuffer, s.buffer, s.byteOffset, s.byteLength, i); } updateDynamicVertexBuffer(e, t, i, r) { const s = e, n = ArrayBuffer.isView(t) ? t : new Float32Array(t); this._engine.updateDynamicVertexBuffer(s.nativeVertexBuffer, n.buffer, n.byteOffset + (i ?? 0), r ?? n.byteLength); } // TODO: Refactor to share more logic with base Engine implementation. _setTexture(e, t, i = !1, r = !1) { const s = this._boundUniforms[e]; if (!s) return !1; if (!t) return this._boundTexturesCache[e] != null && (this._activeChannel = e, this._boundTexturesCache[e] = null), !1; if (t.video) this._activeChannel = e, t.update(); else if (t.delayLoadState === 4) return t.delayLoad(), !1; let n; return r ? n = t.depthStencilTexture : t.isReady() ? n = t.getInternalTexture() : t.isCube ? n = this.emptyCubeTexture : t.is3D ? n = this.emptyTexture3D : t.is2DArray ? n = this.emptyTexture2DArray : n = this.emptyTexture, this._activeChannel = e, !n || !n._hardwareTexture ? !1 : (this._setTextureWrapMode(n._hardwareTexture.underlyingResource, Kk(t.wrapU), Kk(t.wrapV), Kk(t.wrapR)), this._updateAnisotropicLevel(t), this._setTextureCore(s, n._hardwareTexture.underlyingResource), !0); } // filter is a NativeFilter.XXXX value. _setTextureSampling(e, t) { this._commandBufferEncoder.startEncodingCommand(_native.Engine.COMMAND_SETTEXTURESAMPLING), this._commandBufferEncoder.encodeCommandArgAsNativeData(e), this._commandBufferEncoder.encodeCommandArgAsUInt32(t), this._commandBufferEncoder.finishEncodingCommand(); } // addressModes are NativeAddressMode.XXXX values. _setTextureWrapMode(e, t, i, r) { this._commandBufferEncoder.startEncodingCommand(_native.Engine.COMMAND_SETTEXTUREWRAPMODE), this._commandBufferEncoder.encodeCommandArgAsNativeData(e), this._commandBufferEncoder.encodeCommandArgAsUInt32(t), this._commandBufferEncoder.encodeCommandArgAsUInt32(i), this._commandBufferEncoder.encodeCommandArgAsUInt32(r), this._commandBufferEncoder.finishEncodingCommand(); } _setTextureCore(e, t) { this._commandBufferEncoder.startEncodingCommand(_native.Engine.COMMAND_SETTEXTURE), this._commandBufferEncoder.encodeCommandArgAsNativeData(e), this._commandBufferEncoder.encodeCommandArgAsNativeData(t), this._commandBufferEncoder.finishEncodingCommand(); } // TODO: Share more of this logic with the base implementation. // TODO: Rename to match naming in base implementation once refactoring allows different parameters. _updateAnisotropicLevel(e) { const t = e.getInternalTexture(), i = e.anisotropicFilteringLevel; !t || !t._hardwareTexture || t._cachedAnisotropicFilteringLevel !== i && (this._commandBufferEncoder.startEncodingCommand(_native.Engine.COMMAND_SETTEXTUREANISOTROPICLEVEL), this._commandBufferEncoder.encodeCommandArgAsNativeData(t._hardwareTexture.underlyingResource), this._commandBufferEncoder.encodeCommandArgAsUInt32(i), this._commandBufferEncoder.finishEncodingCommand(), t._cachedAnisotropicFilteringLevel = i); } /** * @internal */ _bindTexture(e, t) { const i = this._boundUniforms[e]; if (i && t && t._hardwareTexture) { const r = t._hardwareTexture.underlyingResource; this._setTextureCore(i, r); } } _deleteBuffer(e) { e.nativeIndexBuffer && (this._commandBufferEncoder.startEncodingCommand(_native.Engine.COMMAND_DELETEINDEXBUFFER), this._commandBufferEncoder.encodeCommandArgAsNativeData(e.nativeIndexBuffer), this._commandBufferEncoder.finishEncodingCommand(), delete e.nativeIndexBuffer), e.nativeVertexBuffer && (this._commandBufferEncoder.startEncodingCommand(_native.Engine.COMMAND_DELETEVERTEXBUFFER), this._commandBufferEncoder.encodeCommandArgAsNativeData(e.nativeVertexBuffer), this._commandBufferEncoder.finishEncodingCommand(), delete e.nativeVertexBuffer); } /** * Create a canvas * @param width width * @param height height * @returns ICanvas interface */ createCanvas(e, t) { if (!_native.Canvas) throw new Error("Native Canvas plugin not available."); const i = new _native.Canvas(); return i.width = e, i.height = t, i; } /** * Create an image to use with canvas * @returns IImage interface */ createCanvasImage() { if (!_native.Canvas) throw new Error("Native Canvas plugin not available."); return new _native.Image(); } /** * Update a portion of an internal texture * @param texture defines the texture to update * @param imageData defines the data to store into the texture * @param xOffset defines the x coordinates of the update rectangle * @param yOffset defines the y coordinates of the update rectangle * @param width defines the width of the update rectangle * @param height defines the height of the update rectangle * @param faceIndex defines the face index if texture is a cube (0 by default) * @param lod defines the lod level to update (0 by default) * @param generateMipMaps defines whether to generate mipmaps or not */ updateTextureData(e, t, i, r, s, n, a = 0, l = 0, o = !1) { throw new Error("updateTextureData not implemented."); } /** * @internal */ _uploadCompressedDataToTextureDirectly(e, t, i, r, s, n = 0, a = 0) { throw new Error("_uploadCompressedDataToTextureDirectly not implemented."); } /** * @internal */ _uploadDataToTextureDirectly(e, t, i = 0, r = 0) { throw new Error("_uploadDataToTextureDirectly not implemented."); } /** * @internal */ _uploadArrayBufferViewToTexture(e, t, i = 0, r = 0) { throw new Error("_uploadArrayBufferViewToTexture not implemented."); } /** * @internal */ _uploadImageToTexture(e, t, i = 0, r = 0) { throw new Error("_uploadArrayBufferViewToTexture not implemented."); } getFontOffset(e) { return { ascent: 0, height: 0, descent: 0 }; } _readTexturePixels(e, t, i, r, s, n, a, l, o, u) { var h, d, f, p; if (r !== void 0 && r !== -1) throw new Error(`Reading cubemap faces is not supported, but faceIndex is ${r}.`); return this._engine.readTexture((h = e._hardwareTexture) === null || h === void 0 ? void 0 : h.underlyingResource, s ?? 0, o ?? 0, u ?? 0, t, i, (d = n == null ? void 0 : n.buffer) !== null && d !== void 0 ? d : null, (f = n == null ? void 0 : n.byteOffset) !== null && f !== void 0 ? f : 0, (p = n == null ? void 0 : n.byteLength) !== null && p !== void 0 ? p : 0).then((m) => (n || (n = new Uint8Array(m)), n)); } } yP.PROTOCOL_VERSION = 8; yP._createNativeDataStream = function() { return _native.NativeDataStream.VALIDATION_ENABLED ? new Hie() : new _5(); }; class Hie extends _5 { constructor() { super(); } writeUint32(e) { super.writeUint32(_native.NativeDataStream.VALIDATION_UINT_32), super.writeUint32(e); } writeInt32(e) { super.writeUint32(_native.NativeDataStream.VALIDATION_INT_32), super.writeInt32(e); } writeFloat32(e) { super.writeUint32(_native.NativeDataStream.VALIDATION_FLOAT_32), super.writeFloat32(e); } writeUint32Array(e) { super.writeUint32(_native.NativeDataStream.VALIDATION_UINT_32_ARRAY), super.writeUint32Array(e); } writeInt32Array(e) { super.writeUint32(_native.NativeDataStream.VALIDATION_INT_32_ARRAY), super.writeInt32Array(e); } writeFloat32Array(e) { super.writeUint32(_native.NativeDataStream.VALIDATION_FLOAT_32_ARRAY), super.writeFloat32Array(e); } writeNativeData(e) { super.writeUint32(_native.NativeDataStream.VALIDATION_NATIVE_DATA), super.writeNativeData(e); } writeBoolean(e) { super.writeUint32(_native.NativeDataStream.VALIDATION_BOOLEAN), super.writeBoolean(e); } } var bH; (function(c) { c.LowPower = "low-power", c.HighPerformance = "high-performance"; })(bH || (bH = {})); var UC; (function(c) { c.DepthClipControl = "depth-clip-control", c.Depth32FloatStencil8 = "depth32float-stencil8", c.TextureCompressionBC = "texture-compression-bc", c.TextureCompressionETC2 = "texture-compression-etc2", c.TextureCompressionASTC = "texture-compression-astc", c.TimestampQuery = "timestamp-query", c.IndirectFirstInstance = "indirect-first-instance", c.ShaderF16 = "shader-f16", c.RG11B10UFloatRenderable = "rg11b10ufloat-renderable", c.BGRA8UnormStorage = "bgra8unorm-storage", c.Float32Filterable = "float32-filterable"; })(UC || (UC = {})); var EH; (function(c) { c.Unmapped = "unmapped", c.Pending = "pending", c.Mapped = "mapped"; })(EH || (EH = {})); var ya; (function(c) { c[c.MapRead = 1] = "MapRead", c[c.MapWrite = 2] = "MapWrite", c[c.CopySrc = 4] = "CopySrc", c[c.CopyDst = 8] = "CopyDst", c[c.Index = 16] = "Index", c[c.Vertex = 32] = "Vertex", c[c.Uniform = 64] = "Uniform", c[c.Storage = 128] = "Storage", c[c.Indirect = 256] = "Indirect", c[c.QueryResolve = 512] = "QueryResolve"; })(ya || (ya = {})); var c5; (function(c) { c[c.Read = 1] = "Read", c[c.Write = 2] = "Write"; })(c5 || (c5 = {})); var _g; (function(c) { c.E1d = "1d", c.E2d = "2d", c.E3d = "3d"; })(_g || (_g = {})); var fo; (function(c) { c[c.CopySrc = 1] = "CopySrc", c[c.CopyDst = 2] = "CopyDst", c[c.TextureBinding = 4] = "TextureBinding", c[c.StorageBinding = 8] = "StorageBinding", c[c.RenderAttachment = 16] = "RenderAttachment"; })(fo || (fo = {})); var Ea; (function(c) { c.E1d = "1d", c.E2d = "2d", c.E2dArray = "2d-array", c.Cube = "cube", c.CubeArray = "cube-array", c.E3d = "3d"; })(Ea || (Ea = {})); var jA; (function(c) { c.All = "all", c.StencilOnly = "stencil-only", c.DepthOnly = "depth-only"; })(jA || (jA = {})); var Re; (function(c) { c.R8Unorm = "r8unorm", c.R8Snorm = "r8snorm", c.R8Uint = "r8uint", c.R8Sint = "r8sint", c.R16Uint = "r16uint", c.R16Sint = "r16sint", c.R16Float = "r16float", c.RG8Unorm = "rg8unorm", c.RG8Snorm = "rg8snorm", c.RG8Uint = "rg8uint", c.RG8Sint = "rg8sint", c.R32Uint = "r32uint", c.R32Sint = "r32sint", c.R32Float = "r32float", c.RG16Uint = "rg16uint", c.RG16Sint = "rg16sint", c.RG16Float = "rg16float", c.RGBA8Unorm = "rgba8unorm", c.RGBA8UnormSRGB = "rgba8unorm-srgb", c.RGBA8Snorm = "rgba8snorm", c.RGBA8Uint = "rgba8uint", c.RGBA8Sint = "rgba8sint", c.BGRA8Unorm = "bgra8unorm", c.BGRA8UnormSRGB = "bgra8unorm-srgb", c.RGB9E5UFloat = "rgb9e5ufloat", c.RGB10A2UINT = "rgb10a2uint", c.RGB10A2Unorm = "rgb10a2unorm", c.RG11B10UFloat = "rg11b10ufloat", c.RG32Uint = "rg32uint", c.RG32Sint = "rg32sint", c.RG32Float = "rg32float", c.RGBA16Uint = "rgba16uint", c.RGBA16Sint = "rgba16sint", c.RGBA16Float = "rgba16float", c.RGBA32Uint = "rgba32uint", c.RGBA32Sint = "rgba32sint", c.RGBA32Float = "rgba32float", c.Stencil8 = "stencil8", c.Depth16Unorm = "depth16unorm", c.Depth24Plus = "depth24plus", c.Depth24PlusStencil8 = "depth24plus-stencil8", c.Depth32Float = "depth32float", c.BC1RGBAUnorm = "bc1-rgba-unorm", c.BC1RGBAUnormSRGB = "bc1-rgba-unorm-srgb", c.BC2RGBAUnorm = "bc2-rgba-unorm", c.BC2RGBAUnormSRGB = "bc2-rgba-unorm-srgb", c.BC3RGBAUnorm = "bc3-rgba-unorm", c.BC3RGBAUnormSRGB = "bc3-rgba-unorm-srgb", c.BC4RUnorm = "bc4-r-unorm", c.BC4RSnorm = "bc4-r-snorm", c.BC5RGUnorm = "bc5-rg-unorm", c.BC5RGSnorm = "bc5-rg-snorm", c.BC6HRGBUFloat = "bc6h-rgb-ufloat", c.BC6HRGBFloat = "bc6h-rgb-float", c.BC7RGBAUnorm = "bc7-rgba-unorm", c.BC7RGBAUnormSRGB = "bc7-rgba-unorm-srgb", c.ETC2RGB8Unorm = "etc2-rgb8unorm", c.ETC2RGB8UnormSRGB = "etc2-rgb8unorm-srgb", c.ETC2RGB8A1Unorm = "etc2-rgb8a1unorm", c.ETC2RGB8A1UnormSRGB = "etc2-rgb8a1unorm-srgb", c.ETC2RGBA8Unorm = "etc2-rgba8unorm", c.ETC2RGBA8UnormSRGB = "etc2-rgba8unorm-srgb", c.EACR11Unorm = "eac-r11unorm", c.EACR11Snorm = "eac-r11snorm", c.EACRG11Unorm = "eac-rg11unorm", c.EACRG11Snorm = "eac-rg11snorm", c.ASTC4x4Unorm = "astc-4x4-unorm", c.ASTC4x4UnormSRGB = "astc-4x4-unorm-srgb", c.ASTC5x4Unorm = "astc-5x4-unorm", c.ASTC5x4UnormSRGB = "astc-5x4-unorm-srgb", c.ASTC5x5Unorm = "astc-5x5-unorm", c.ASTC5x5UnormSRGB = "astc-5x5-unorm-srgb", c.ASTC6x5Unorm = "astc-6x5-unorm", c.ASTC6x5UnormSRGB = "astc-6x5-unorm-srgb", c.ASTC6x6Unorm = "astc-6x6-unorm", c.ASTC6x6UnormSRGB = "astc-6x6-unorm-srgb", c.ASTC8x5Unorm = "astc-8x5-unorm", c.ASTC8x5UnormSRGB = "astc-8x5-unorm-srgb", c.ASTC8x6Unorm = "astc-8x6-unorm", c.ASTC8x6UnormSRGB = "astc-8x6-unorm-srgb", c.ASTC8x8Unorm = "astc-8x8-unorm", c.ASTC8x8UnormSRGB = "astc-8x8-unorm-srgb", c.ASTC10x5Unorm = "astc-10x5-unorm", c.ASTC10x5UnormSRGB = "astc-10x5-unorm-srgb", c.ASTC10x6Unorm = "astc-10x6-unorm", c.ASTC10x6UnormSRGB = "astc-10x6-unorm-srgb", c.ASTC10x8Unorm = "astc-10x8-unorm", c.ASTC10x8UnormSRGB = "astc-10x8-unorm-srgb", c.ASTC10x10Unorm = "astc-10x10-unorm", c.ASTC10x10UnormSRGB = "astc-10x10-unorm-srgb", c.ASTC12x10Unorm = "astc-12x10-unorm", c.ASTC12x10UnormSRGB = "astc-12x10-unorm-srgb", c.ASTC12x12Unorm = "astc-12x12-unorm", c.ASTC12x12UnormSRGB = "astc-12x12-unorm-srgb", c.Depth32FloatStencil8 = "depth32float-stencil8"; })(Re || (Re = {})); var XR; (function(c) { c.ClampToEdge = "clamp-to-edge", c.Repeat = "repeat", c.MirrorRepeat = "mirror-repeat"; })(XR || (XR = {})); var Xs; (function(c) { c.Nearest = "nearest", c.Linear = "linear"; })(Xs || (Xs = {})); var TH; (function(c) { c.Nearest = "nearest", c.Linear = "linear"; })(TH || (TH = {})); var yh; (function(c) { c.Never = "never", c.Less = "less", c.Equal = "equal", c.LessEqual = "less-equal", c.Greater = "greater", c.NotEqual = "not-equal", c.GreaterEqual = "greater-equal", c.Always = "always"; })(yh || (yh = {})); var $E; (function(c) { c[c.Vertex = 1] = "Vertex", c[c.Fragment = 2] = "Fragment", c[c.Compute = 4] = "Compute"; })($E || ($E = {})); var uT; (function(c) { c.Uniform = "uniform", c.Storage = "storage", c.ReadOnlyStorage = "read-only-storage"; })(uT || (uT = {})); var AT; (function(c) { c.Filtering = "filtering", c.NonFiltering = "non-filtering", c.Comparison = "comparison"; })(AT || (AT = {})); var K_; (function(c) { c.Float = "float", c.UnfilterableFloat = "unfilterable-float", c.Depth = "depth", c.Sint = "sint", c.Uint = "uint"; })(K_ || (K_ = {})); var dB; (function(c) { c.WriteOnly = "write-only"; })(dB || (dB = {})); var SH; (function(c) { c.Error = "error", c.Warning = "warning", c.Info = "info"; })(SH || (SH = {})); var MH; (function(c) { c.Validation = "validation", c.Internal = "internal"; })(MH || (MH = {})); var zO; (function(c) { c.Auto = "auto"; })(zO || (zO = {})); var B_; (function(c) { c.PointList = "point-list", c.LineList = "line-list", c.LineStrip = "line-strip", c.TriangleList = "triangle-list", c.TriangleStrip = "triangle-strip"; })(B_ || (B_ = {})); var nL; (function(c) { c.CCW = "ccw", c.CW = "cw"; })(nL || (nL = {})); var hO; (function(c) { c.None = "none", c.Front = "front", c.Back = "back"; })(hO || (hO = {})); var RH; (function(c) { c[c.Red = 1] = "Red", c[c.Green = 2] = "Green", c[c.Blue = 4] = "Blue", c[c.Alpha = 8] = "Alpha", c[c.All = 15] = "All"; })(RH || (RH = {})); var rf; (function(c) { c.Zero = "zero", c.One = "one", c.Src = "src", c.OneMinusSrc = "one-minus-src", c.SrcAlpha = "src-alpha", c.OneMinusSrcAlpha = "one-minus-src-alpha", c.Dst = "dst", c.OneMinusDst = "one-minus-dst", c.DstAlpha = "dst-alpha", c.OneMinusDstAlpha = "one-minus-dst-alpha", c.SrcAlphaSaturated = "src-alpha-saturated", c.Constant = "constant", c.OneMinusConstant = "one-minus-constant"; })(rf || (rf = {})); var ZE; (function(c) { c.Add = "add", c.Subtract = "subtract", c.ReverseSubtract = "reverse-subtract", c.Min = "min", c.Max = "max"; })(ZE || (ZE = {})); var y4; (function(c) { c.Keep = "keep", c.Zero = "zero", c.Replace = "replace", c.Invert = "invert", c.IncrementClamp = "increment-clamp", c.DecrementClamp = "decrement-clamp", c.IncrementWrap = "increment-wrap", c.DecrementWrap = "decrement-wrap"; })(y4 || (y4 = {})); var yT; (function(c) { c.Uint16 = "uint16", c.Uint32 = "uint32"; })(yT || (yT = {})); var So; (function(c) { c.Uint8x2 = "uint8x2", c.Uint8x4 = "uint8x4", c.Sint8x2 = "sint8x2", c.Sint8x4 = "sint8x4", c.Unorm8x2 = "unorm8x2", c.Unorm8x4 = "unorm8x4", c.Snorm8x2 = "snorm8x2", c.Snorm8x4 = "snorm8x4", c.Uint16x2 = "uint16x2", c.Uint16x4 = "uint16x4", c.Sint16x2 = "sint16x2", c.Sint16x4 = "sint16x4", c.Unorm16x2 = "unorm16x2", c.Unorm16x4 = "unorm16x4", c.Snorm16x2 = "snorm16x2", c.Snorm16x4 = "snorm16x4", c.Float16x2 = "float16x2", c.Float16x4 = "float16x4", c.Float32 = "float32", c.Float32x2 = "float32x2", c.Float32x3 = "float32x3", c.Float32x4 = "float32x4", c.Uint32 = "uint32", c.Uint32x2 = "uint32x2", c.Uint32x3 = "uint32x3", c.Uint32x4 = "uint32x4", c.Sint32 = "sint32", c.Sint32x2 = "sint32x2", c.Sint32x3 = "sint32x3", c.Sint32x4 = "sint32x4", c.UNORM10x10x10x2 = "unorm10-10-10-2"; })(So || (So = {})); var aL; (function(c) { c.Vertex = "vertex", c.Instance = "instance"; })(aL || (aL = {})); var PH; (function(c) { c.Beginning = "beginning", c.End = "end"; })(PH || (PH = {})); var IH; (function(c) { c.Beginning = "beginning", c.End = "end"; })(IH || (IH = {})); var au; (function(c) { c.Load = "load", c.Clear = "clear"; })(au || (au = {})); var _m; (function(c) { c.Store = "store", c.Discard = "discard"; })(_m || (_m = {})); var oL; (function(c) { c.Occlusion = "occlusion", c.Timestamp = "timestamp"; })(oL || (oL = {})); var lL; (function(c) { c.Opaque = "opaque", c.Premultiplied = "premultiplied"; })(lL || (lL = {})); var DH; (function(c) { c.Unknown = "unknown", c.Destroyed = "destroyed"; })(DH || (DH = {})); var OH; (function(c) { c.Validation = "validation", c.OutOfMemory = "out-of-memory", c.Internal = "internal"; })(OH || (OH = {})); class Io { constructor() { this.shaderLanguage = Xa.GLSL, this.vertexBufferKindToNumberOfComponents = {}; } _addUniformToLeftOverUBO(e, t, i) { let r = 0; [e, t, r] = this._getArraySize(e, t, i); for (let s = 0; s < this._webgpuProcessingContext.leftOverUniforms.length; s++) if (this._webgpuProcessingContext.leftOverUniforms[s].name === e) return; this._webgpuProcessingContext.leftOverUniforms.push({ name: e, type: t, length: r }); } _buildLeftOverUBO() { if (!this._webgpuProcessingContext.leftOverUniforms.length) return ""; const e = Io.LeftOvertUBOName; let t = this._webgpuProcessingContext.availableBuffers[e]; return t || (t = { binding: this._webgpuProcessingContext.getNextFreeUBOBinding() }, this._webgpuProcessingContext.availableBuffers[e] = t, this._addBufferBindingDescription(e, t, uT.Uniform, !0), this._addBufferBindingDescription(e, t, uT.Uniform, !1)), this._generateLeftOverUBOCode(e, t); } _collectBindingNames() { for (let e = 0; e < this._webgpuProcessingContext.bindGroupLayoutEntries.length; e++) { const t = this._webgpuProcessingContext.bindGroupLayoutEntries[e]; if (t === void 0) { this._webgpuProcessingContext.bindGroupLayoutEntries[e] = []; continue; } for (let i = 0; i < t.length; i++) { const r = this._webgpuProcessingContext.bindGroupLayoutEntries[e][i], s = this._webgpuProcessingContext.bindGroupLayoutEntryInfo[e][r.binding].name, n = this._webgpuProcessingContext.bindGroupLayoutEntryInfo[e][r.binding].nameInArrayOfTexture; r && (r.texture || r.externalTexture || r.storageTexture ? this._webgpuProcessingContext.textureNames.push(n) : r.sampler ? this._webgpuProcessingContext.samplerNames.push(s) : r.buffer && this._webgpuProcessingContext.bufferNames.push(s)); } } } _preCreateBindGroupEntries() { const e = this._webgpuProcessingContext.bindGroupEntries; for (let t = 0; t < this._webgpuProcessingContext.bindGroupLayoutEntries.length; t++) { const i = this._webgpuProcessingContext.bindGroupLayoutEntries[t], r = []; for (let s = 0; s < i.length; s++) { const n = this._webgpuProcessingContext.bindGroupLayoutEntries[t][s]; n.sampler || n.texture || n.storageTexture || n.externalTexture ? r.push({ binding: n.binding, resource: void 0 }) : n.buffer && r.push({ binding: n.binding, resource: { buffer: void 0, offset: 0, size: 0 } }); } e[t] = r; } } _addTextureBindingDescription(e, t, i, r, s, n) { let { groupIndex: a, bindingIndex: l } = t.textures[i]; if (this._webgpuProcessingContext.bindGroupLayoutEntries[a] || (this._webgpuProcessingContext.bindGroupLayoutEntries[a] = [], this._webgpuProcessingContext.bindGroupLayoutEntryInfo[a] = []), !this._webgpuProcessingContext.bindGroupLayoutEntryInfo[a][l]) { let o; r === null ? o = this._webgpuProcessingContext.bindGroupLayoutEntries[a].push({ binding: l, visibility: 0, externalTexture: {} }) : s ? o = this._webgpuProcessingContext.bindGroupLayoutEntries[a].push({ binding: l, visibility: 0, storageTexture: { access: dB.WriteOnly, format: s, viewDimension: r } }) : o = this._webgpuProcessingContext.bindGroupLayoutEntries[a].push({ binding: l, visibility: 0, texture: { sampleType: t.sampleType, viewDimension: r, multisampled: !1 } }); const u = t.isTextureArray ? e + i : e; this._webgpuProcessingContext.bindGroupLayoutEntryInfo[a][l] = { name: e, index: o - 1, nameInArrayOfTexture: u }; } l = this._webgpuProcessingContext.bindGroupLayoutEntryInfo[a][l].index, n ? this._webgpuProcessingContext.bindGroupLayoutEntries[a][l].visibility |= $E.Vertex : this._webgpuProcessingContext.bindGroupLayoutEntries[a][l].visibility |= $E.Fragment; } _addSamplerBindingDescription(e, t, i) { let { groupIndex: r, bindingIndex: s } = t.binding; if (this._webgpuProcessingContext.bindGroupLayoutEntries[r] || (this._webgpuProcessingContext.bindGroupLayoutEntries[r] = [], this._webgpuProcessingContext.bindGroupLayoutEntryInfo[r] = []), !this._webgpuProcessingContext.bindGroupLayoutEntryInfo[r][s]) { const n = this._webgpuProcessingContext.bindGroupLayoutEntries[r].push({ binding: s, visibility: 0, sampler: { type: t.type } }); this._webgpuProcessingContext.bindGroupLayoutEntryInfo[r][s] = { name: e, index: n - 1 }; } s = this._webgpuProcessingContext.bindGroupLayoutEntryInfo[r][s].index, i ? this._webgpuProcessingContext.bindGroupLayoutEntries[r][s].visibility |= $E.Vertex : this._webgpuProcessingContext.bindGroupLayoutEntries[r][s].visibility |= $E.Fragment; } _addBufferBindingDescription(e, t, i, r) { let { groupIndex: s, bindingIndex: n } = t.binding; if (this._webgpuProcessingContext.bindGroupLayoutEntries[s] || (this._webgpuProcessingContext.bindGroupLayoutEntries[s] = [], this._webgpuProcessingContext.bindGroupLayoutEntryInfo[s] = []), !this._webgpuProcessingContext.bindGroupLayoutEntryInfo[s][n]) { const a = this._webgpuProcessingContext.bindGroupLayoutEntries[s].push({ binding: n, visibility: 0, buffer: { type: i } }); this._webgpuProcessingContext.bindGroupLayoutEntryInfo[s][n] = { name: e, index: a - 1 }; } n = this._webgpuProcessingContext.bindGroupLayoutEntryInfo[s][n].index, r ? this._webgpuProcessingContext.bindGroupLayoutEntries[s][n].visibility |= $E.Vertex : this._webgpuProcessingContext.bindGroupLayoutEntries[s][n].visibility |= $E.Fragment; } _injectStartingAndEndingCode(e, t, i, r) { let s = e.indexOf(t); if (s < 0) return Ce.Error('No "main" function found in shader code! Processing aborted.'), e; if (i) { for (; s++ < e.length && e.charAt(s) != "{"; ) ; if (s < e.length) { const n = e.substring(0, s + 1), a = e.substring(s + 1); e = n + i + a; } } if (r) { const n = e.lastIndexOf("}"); e = e.substring(0, n), e += r + ` }`; } return e; } } Io.AutoSamplerSuffix = "Sampler"; Io.LeftOvertUBOName = "LeftOver"; Io.InternalsUBOName = "Internals"; Io.UniformSizes = { // GLSL types bool: 1, int: 1, float: 1, vec2: 2, ivec2: 2, uvec2: 2, vec3: 3, ivec3: 3, uvec3: 3, vec4: 4, ivec4: 4, uvec4: 4, mat2: 4, mat3: 12, mat4: 16, // WGSL types i32: 1, u32: 1, f32: 1, mat2x2: 4, mat3x3: 12, mat4x4: 16 }; Io._SamplerFunctionByWebGLSamplerType = { sampler2D: "sampler2D", sampler2DArray: "sampler2DArray", sampler2DShadow: "sampler2DShadow", sampler2DArrayShadow: "sampler2DArrayShadow", samplerCube: "samplerCube", sampler3D: "sampler3D" }; Io._TextureTypeByWebGLSamplerType = { sampler2D: "texture2D", sampler2DArray: "texture2DArray", sampler2DShadow: "texture2D", sampler2DArrayShadow: "texture2DArray", samplerCube: "textureCube", samplerCubeArray: "textureCubeArray", sampler3D: "texture3D" }; Io._GpuTextureViewDimensionByWebGPUTextureType = { textureCube: Ea.Cube, textureCubeArray: Ea.CubeArray, texture2D: Ea.E2d, texture2DArray: Ea.E2dArray, texture3D: Ea.E3d }; Io._SamplerTypeByWebGLSamplerType = { sampler2DShadow: "samplerShadow", sampler2DArrayShadow: "samplerShadow" }; Io._IsComparisonSamplerByWebGPUSamplerType = { samplerShadow: !0, samplerArrayShadow: !0, sampler: !1 }; class Lfe { get isAsync() { return !1; } get isReady() { return !!this.stages; } constructor(e, t) { this.bindGroupLayouts = {}, this._name = "unnamed", this.shaderProcessingContext = e, this._leftOverUniformsByName = {}, this.engine = t, this.vertexBufferKindToType = {}; } _handlesSpectorRebuildCallback() { } _fillEffectInformation(e, t, i, r, s, n, a, l) { const o = this.engine; e._fragmentSourceCode = "", e._vertexSourceCode = ""; const u = this.shaderProcessingContext.availableTextures; let h; for (h = 0; h < s.length; h++) { const p = s[h], m = u[s[h]]; m == null || m == null ? (s.splice(h, 1), h--) : n[p] = h; } for (const p of o.getAttributes(this, a)) l.push(p); this.buildUniformLayout(); const d = [], f = []; for (h = 0; h < a.length; h++) { const p = l[h]; p >= 0 && (d.push(a[h]), f.push(p)); } this.shaderProcessingContext.attributeNamesFromEffect = d, this.shaderProcessingContext.attributeLocationsFromEffect = f; } /** @internal */ /** * Build the uniform buffer used in the material. */ buildUniformLayout() { if (this.shaderProcessingContext.leftOverUniforms.length) { this.uniformBuffer = new Vi(this.engine, void 0, void 0, "leftOver-" + this._name); for (const e of this.shaderProcessingContext.leftOverUniforms) { const t = e.type.replace(/^(.*?)(<.*>)?$/, "$1"), i = Io.UniformSizes[t]; this.uniformBuffer.addUniform(e.name, i, e.length), this._leftOverUniformsByName[e.name] = e.type; } this.uniformBuffer.create(); } } /** * Release all associated resources. **/ dispose() { this.uniformBuffer && this.uniformBuffer.dispose(); } /** * Sets an integer value on a uniform variable. * @param uniformName Name of the variable. * @param value Value to be set. */ setInt(e, t) { !this.uniformBuffer || !this._leftOverUniformsByName[e] || this.uniformBuffer.updateInt(e, t); } /** * Sets an int2 value on a uniform variable. * @param uniformName Name of the variable. * @param x First int in int2. * @param y Second int in int2. */ setInt2(e, t, i) { !this.uniformBuffer || !this._leftOverUniformsByName[e] || this.uniformBuffer.updateInt2(e, t, i); } /** * Sets an int3 value on a uniform variable. * @param uniformName Name of the variable. * @param x First int in int3. * @param y Second int in int3. * @param z Third int in int3. */ setInt3(e, t, i, r) { !this.uniformBuffer || !this._leftOverUniformsByName[e] || this.uniformBuffer.updateInt3(e, t, i, r); } /** * Sets an int4 value on a uniform variable. * @param uniformName Name of the variable. * @param x First int in int4. * @param y Second int in int4. * @param z Third int in int4. * @param w Fourth int in int4. */ setInt4(e, t, i, r, s) { !this.uniformBuffer || !this._leftOverUniformsByName[e] || this.uniformBuffer.updateInt4(e, t, i, r, s); } /** * Sets an int array on a uniform variable. * @param uniformName Name of the variable. * @param array array to be set. */ setIntArray(e, t) { !this.uniformBuffer || !this._leftOverUniformsByName[e] || this.uniformBuffer.updateIntArray(e, t); } /** * Sets an int array 2 on a uniform variable. (Array is specified as single array eg. [1,2,3,4] will result in [[1,2],[3,4]] in the shader) * @param uniformName Name of the variable. * @param array array to be set. */ setIntArray2(e, t) { this.setIntArray(e, t); } /** * Sets an int array 3 on a uniform variable. (Array is specified as single array eg. [1,2,3,4,5,6] will result in [[1,2,3],[4,5,6]] in the shader) * @param uniformName Name of the variable. * @param array array to be set. */ setIntArray3(e, t) { this.setIntArray(e, t); } /** * Sets an int array 4 on a uniform variable. (Array is specified as single array eg. [1,2,3,4,5,6,7,8] will result in [[1,2,3,4],[5,6,7,8]] in the shader) * @param uniformName Name of the variable. * @param array array to be set. */ setIntArray4(e, t) { this.setIntArray(e, t); } /** * Sets an unsigned integer value on a uniform variable. * @param uniformName Name of the variable. * @param value Value to be set. */ setUInt(e, t) { !this.uniformBuffer || !this._leftOverUniformsByName[e] || this.uniformBuffer.updateUInt(e, t); } /** * Sets an unsigned int2 value on a uniform variable. * @param uniformName Name of the variable. * @param x First unsigned int in uint2. * @param y Second unsigned int in uint2. */ setUInt2(e, t, i) { !this.uniformBuffer || !this._leftOverUniformsByName[e] || this.uniformBuffer.updateUInt2(e, t, i); } /** * Sets an unsigned int3 value on a uniform variable. * @param uniformName Name of the variable. * @param x First unsigned int in uint3. * @param y Second unsigned int in uint3. * @param z Third unsigned int in uint3. */ setUInt3(e, t, i, r) { !this.uniformBuffer || !this._leftOverUniformsByName[e] || this.uniformBuffer.updateUInt3(e, t, i, r); } /** * Sets an unsigned int4 value on a uniform variable. * @param uniformName Name of the variable. * @param x First unsigned int in uint4. * @param y Second unsigned int in uint4. * @param z Third unsigned int in uint4. * @param w Fourth unsigned int in uint4. */ setUInt4(e, t, i, r, s) { !this.uniformBuffer || !this._leftOverUniformsByName[e] || this.uniformBuffer.updateUInt4(e, t, i, r, s); } /** * Sets an unsigned int array on a uniform variable. * @param uniformName Name of the variable. * @param array array to be set. */ setUIntArray(e, t) { !this.uniformBuffer || !this._leftOverUniformsByName[e] || this.uniformBuffer.updateUIntArray(e, t); } /** * Sets an unsigned int array 2 on a uniform variable. (Array is specified as single array eg. [1,2,3,4] will result in [[1,2],[3,4]] in the shader) * @param uniformName Name of the variable. * @param array array to be set. */ setUIntArray2(e, t) { this.setUIntArray(e, t); } /** * Sets an unsigned int array 3 on a uniform variable. (Array is specified as single array eg. [1,2,3,4,5,6] will result in [[1,2,3],[4,5,6]] in the shader) * @param uniformName Name of the variable. * @param array array to be set. */ setUIntArray3(e, t) { this.setUIntArray(e, t); } /** * Sets an unsigned int array 4 on a uniform variable. (Array is specified as single array eg. [1,2,3,4,5,6,7,8] will result in [[1,2,3,4],[5,6,7,8]] in the shader) * @param uniformName Name of the variable. * @param array array to be set. */ setUIntArray4(e, t) { this.setUIntArray(e, t); } /** * Sets an array on a uniform variable. * @param uniformName Name of the variable. * @param array array to be set. */ setArray(e, t) { !this.uniformBuffer || !this._leftOverUniformsByName[e] || this.uniformBuffer.updateArray(e, t); } /** * Sets an array 2 on a uniform variable. (Array is specified as single array eg. [1,2,3,4] will result in [[1,2],[3,4]] in the shader) * @param uniformName Name of the variable. * @param array array to be set. */ setArray2(e, t) { this.setArray(e, t); } /** * Sets an array 3 on a uniform variable. (Array is specified as single array eg. [1,2,3,4,5,6] will result in [[1,2,3],[4,5,6]] in the shader) * @param uniformName Name of the variable. * @param array array to be set. * @returns this effect. */ setArray3(e, t) { this.setArray(e, t); } /** * Sets an array 4 on a uniform variable. (Array is specified as single array eg. [1,2,3,4,5,6,7,8] will result in [[1,2,3,4],[5,6,7,8]] in the shader) * @param uniformName Name of the variable. * @param array array to be set. */ setArray4(e, t) { this.setArray(e, t); } /** * Sets matrices on a uniform variable. * @param uniformName Name of the variable. * @param matrices matrices to be set. */ setMatrices(e, t) { !this.uniformBuffer || !this._leftOverUniformsByName[e] || this.uniformBuffer.updateMatrices(e, t); } /** * Sets matrix on a uniform variable. * @param uniformName Name of the variable. * @param matrix matrix to be set. */ setMatrix(e, t) { !this.uniformBuffer || !this._leftOverUniformsByName[e] || this.uniformBuffer.updateMatrix(e, t); } /** * Sets a 3x3 matrix on a uniform variable. (Specified as [1,2,3,4,5,6,7,8,9] will result in [1,2,3][4,5,6][7,8,9] matrix) * @param uniformName Name of the variable. * @param matrix matrix to be set. */ setMatrix3x3(e, t) { !this.uniformBuffer || !this._leftOverUniformsByName[e] || this.uniformBuffer.updateMatrix3x3(e, t); } /** * Sets a 2x2 matrix on a uniform variable. (Specified as [1,2,3,4] will result in [1,2][3,4] matrix) * @param uniformName Name of the variable. * @param matrix matrix to be set. */ setMatrix2x2(e, t) { !this.uniformBuffer || !this._leftOverUniformsByName[e] || this.uniformBuffer.updateMatrix2x2(e, t); } /** * Sets a float on a uniform variable. * @param uniformName Name of the variable. * @param value value to be set. * @returns this effect. */ setFloat(e, t) { !this.uniformBuffer || !this._leftOverUniformsByName[e] || this.uniformBuffer.updateFloat(e, t); } /** * Sets a Vector2 on a uniform variable. * @param uniformName Name of the variable. * @param vector2 vector2 to be set. */ setVector2(e, t) { this.setFloat2(e, t.x, t.y); } /** * Sets a float2 on a uniform variable. * @param uniformName Name of the variable. * @param x First float in float2. * @param y Second float in float2. */ setFloat2(e, t, i) { !this.uniformBuffer || !this._leftOverUniformsByName[e] || this.uniformBuffer.updateFloat2(e, t, i); } /** * Sets a Vector3 on a uniform variable. * @param uniformName Name of the variable. * @param vector3 Value to be set. */ setVector3(e, t) { this.setFloat3(e, t.x, t.y, t.z); } /** * Sets a float3 on a uniform variable. * @param uniformName Name of the variable. * @param x First float in float3. * @param y Second float in float3. * @param z Third float in float3. */ setFloat3(e, t, i, r) { !this.uniformBuffer || !this._leftOverUniformsByName[e] || this.uniformBuffer.updateFloat3(e, t, i, r); } /** * Sets a Vector4 on a uniform variable. * @param uniformName Name of the variable. * @param vector4 Value to be set. */ setVector4(e, t) { this.setFloat4(e, t.x, t.y, t.z, t.w); } /** * Sets a Quaternion on a uniform variable. * @param uniformName Name of the variable. * @param quaternion Value to be set. */ setQuaternion(e, t) { this.setFloat4(e, t.x, t.y, t.z, t.w); } /** * Sets a float4 on a uniform variable. * @param uniformName Name of the variable. * @param x First float in float4. * @param y Second float in float4. * @param z Third float in float4. * @param w Fourth float in float4. * @returns this effect. */ setFloat4(e, t, i, r, s) { !this.uniformBuffer || !this._leftOverUniformsByName[e] || this.uniformBuffer.updateFloat4(e, t, i, r, s); } /** * Sets a Color3 on a uniform variable. * @param uniformName Name of the variable. * @param color3 Value to be set. */ setColor3(e, t) { this.setFloat3(e, t.r, t.g, t.b); } /** * Sets a Color4 on a uniform variable. * @param uniformName Name of the variable. * @param color3 Value to be set. * @param alpha Alpha value to be set. */ setColor4(e, t, i) { this.setFloat4(e, t.r, t.g, t.b, i); } /** * Sets a Color4 on a uniform variable * @param uniformName defines the name of the variable * @param color4 defines the value to be set */ setDirectColor4(e, t) { this.setFloat4(e, t.r, t.g, t.b, t.a); } _getVertexShaderCode() { var e; return (e = this.sources) === null || e === void 0 ? void 0 : e.vertex; } _getFragmentShaderCode() { var e; return (e = this.sources) === null || e === void 0 ? void 0 : e.fragment; } } const Nfe = 4, Ffe = 65536, UZ = { // GLSL types mat2: 2, mat3: 3, mat4: 4, // WGSL types mat2x2: 2, mat3x3: 3, mat4x4: 4 }; class mg { static get KnownUBOs() { return mg._SimplifiedKnownBindings ? mg._SimplifiedKnownUBOs : mg._KnownUBOs; } constructor(e) { this.shaderLanguage = e, this._attributeNextLocation = 0, this._varyingNextLocation = 0, this.freeGroupIndex = 0, this.freeBindingIndex = 0, this.availableVaryings = {}, this.availableAttributes = {}, this.availableBuffers = {}, this.availableTextures = {}, this.availableSamplers = {}, this.orderedAttributes = [], this.bindGroupLayoutEntries = [], this.bindGroupLayoutEntryInfo = [], this.bindGroupEntries = [], this.bufferNames = [], this.textureNames = [], this.samplerNames = [], this.leftOverUniforms = [], this._findStartingGroupBinding(); } _findStartingGroupBinding() { const e = mg.KnownUBOs, t = []; for (const i in e) { const r = e[i].binding; r.groupIndex !== -1 && (t[r.groupIndex] === void 0 ? t[r.groupIndex] = r.bindingIndex : t[r.groupIndex] = Math.max(t[r.groupIndex], r.bindingIndex)); } this.freeGroupIndex = t.length - 1, this.freeGroupIndex === 0 ? (this.freeGroupIndex++, this.freeBindingIndex = 0) : this.freeBindingIndex = t[t.length - 1] + 1; } getAttributeNextLocation(e, t = 0) { var i; const r = this._attributeNextLocation; return this._attributeNextLocation += ((i = UZ[e]) !== null && i !== void 0 ? i : 1) * (t || 1), r; } getVaryingNextLocation(e, t = 0) { var i; const r = this._varyingNextLocation; return this._varyingNextLocation += ((i = UZ[e]) !== null && i !== void 0 ? i : 1) * (t || 1), r; } getNextFreeUBOBinding() { return this._getNextFreeBinding(1); } _getNextFreeBinding(e) { if (this.freeBindingIndex > Ffe - e && (this.freeGroupIndex++, this.freeBindingIndex = 0), this.freeGroupIndex === Nfe) throw "Too many textures or UBOs have been declared and it is not supported in WebGPU."; const t = { groupIndex: this.freeGroupIndex, bindingIndex: this.freeBindingIndex }; return this.freeBindingIndex += e, t; } } mg._SimplifiedKnownBindings = !0; mg._SimplifiedKnownUBOs = { Scene: { binding: { groupIndex: 0, bindingIndex: 0 } }, Light0: { binding: { groupIndex: -1, bindingIndex: -1 } }, Light1: { binding: { groupIndex: -1, bindingIndex: -1 } }, Light2: { binding: { groupIndex: -1, bindingIndex: -1 } }, Light3: { binding: { groupIndex: -1, bindingIndex: -1 } }, Light4: { binding: { groupIndex: -1, bindingIndex: -1 } }, Light5: { binding: { groupIndex: -1, bindingIndex: -1 } }, Light6: { binding: { groupIndex: -1, bindingIndex: -1 } }, Light7: { binding: { groupIndex: -1, bindingIndex: -1 } }, Light8: { binding: { groupIndex: -1, bindingIndex: -1 } }, Light9: { binding: { groupIndex: -1, bindingIndex: -1 } }, Light10: { binding: { groupIndex: -1, bindingIndex: -1 } }, Light11: { binding: { groupIndex: -1, bindingIndex: -1 } }, Light12: { binding: { groupIndex: -1, bindingIndex: -1 } }, Light13: { binding: { groupIndex: -1, bindingIndex: -1 } }, Light14: { binding: { groupIndex: -1, bindingIndex: -1 } }, Light15: { binding: { groupIndex: -1, bindingIndex: -1 } }, Light16: { binding: { groupIndex: -1, bindingIndex: -1 } }, Light17: { binding: { groupIndex: -1, bindingIndex: -1 } }, Light18: { binding: { groupIndex: -1, bindingIndex: -1 } }, Light19: { binding: { groupIndex: -1, bindingIndex: -1 } }, Light20: { binding: { groupIndex: -1, bindingIndex: -1 } }, Light21: { binding: { groupIndex: -1, bindingIndex: -1 } }, Light22: { binding: { groupIndex: -1, bindingIndex: -1 } }, Light23: { binding: { groupIndex: -1, bindingIndex: -1 } }, Light24: { binding: { groupIndex: -1, bindingIndex: -1 } }, Light25: { binding: { groupIndex: -1, bindingIndex: -1 } }, Light26: { binding: { groupIndex: -1, bindingIndex: -1 } }, Light27: { binding: { groupIndex: -1, bindingIndex: -1 } }, Light28: { binding: { groupIndex: -1, bindingIndex: -1 } }, Light29: { binding: { groupIndex: -1, bindingIndex: -1 } }, Light30: { binding: { groupIndex: -1, bindingIndex: -1 } }, Light31: { binding: { groupIndex: -1, bindingIndex: -1 } }, Material: { binding: { groupIndex: -1, bindingIndex: -1 } }, Mesh: { binding: { groupIndex: -1, bindingIndex: -1 } }, Internals: { binding: { groupIndex: -1, bindingIndex: -1 } } }; mg._KnownUBOs = { Scene: { binding: { groupIndex: 0, bindingIndex: 0 } }, Light0: { binding: { groupIndex: 1, bindingIndex: 0 } }, Light1: { binding: { groupIndex: 1, bindingIndex: 1 } }, Light2: { binding: { groupIndex: 1, bindingIndex: 2 } }, Light3: { binding: { groupIndex: 1, bindingIndex: 3 } }, Light4: { binding: { groupIndex: 1, bindingIndex: 4 } }, Light5: { binding: { groupIndex: 1, bindingIndex: 5 } }, Light6: { binding: { groupIndex: 1, bindingIndex: 6 } }, Light7: { binding: { groupIndex: 1, bindingIndex: 7 } }, Light8: { binding: { groupIndex: 1, bindingIndex: 8 } }, Light9: { binding: { groupIndex: 1, bindingIndex: 9 } }, Light10: { binding: { groupIndex: 1, bindingIndex: 10 } }, Light11: { binding: { groupIndex: 1, bindingIndex: 11 } }, Light12: { binding: { groupIndex: 1, bindingIndex: 12 } }, Light13: { binding: { groupIndex: 1, bindingIndex: 13 } }, Light14: { binding: { groupIndex: 1, bindingIndex: 14 } }, Light15: { binding: { groupIndex: 1, bindingIndex: 15 } }, Light16: { binding: { groupIndex: 1, bindingIndex: 16 } }, Light17: { binding: { groupIndex: 1, bindingIndex: 17 } }, Light18: { binding: { groupIndex: 1, bindingIndex: 18 } }, Light19: { binding: { groupIndex: 1, bindingIndex: 19 } }, Light20: { binding: { groupIndex: 1, bindingIndex: 20 } }, Light21: { binding: { groupIndex: 1, bindingIndex: 21 } }, Light22: { binding: { groupIndex: 1, bindingIndex: 22 } }, Light23: { binding: { groupIndex: 1, bindingIndex: 23 } }, Light24: { binding: { groupIndex: 1, bindingIndex: 24 } }, Light25: { binding: { groupIndex: 1, bindingIndex: 25 } }, Light26: { binding: { groupIndex: 1, bindingIndex: 26 } }, Light27: { binding: { groupIndex: 1, bindingIndex: 27 } }, Light28: { binding: { groupIndex: 1, bindingIndex: 28 } }, Light29: { binding: { groupIndex: 1, bindingIndex: 29 } }, Light30: { binding: { groupIndex: 1, bindingIndex: 30 } }, Light31: { binding: { groupIndex: 1, bindingIndex: 31 } }, Material: { binding: { groupIndex: 2, bindingIndex: 0 } }, Mesh: { binding: { groupIndex: 2, bindingIndex: 1 } }, Internals: { binding: { groupIndex: 2, bindingIndex: 2 } } }; class Bfe extends Io { constructor() { super(...arguments), this._missingVaryings = [], this._textureArrayProcessing = [], this._vertexIsGLES3 = !1, this._fragmentIsGLES3 = !1, this.shaderLanguage = Xa.GLSL, this.parseGLES3 = !0; } _getArraySize(e, t, i) { let r = 0; const s = e.indexOf("["), n = e.indexOf("]"); if (s > 0 && n > 0) { const a = e.substring(s + 1, n); r = +a, isNaN(r) && (r = +i[a.trim()]), e = e.substr(0, s); } return [e, t, r]; } initializeShaders(e) { this._webgpuProcessingContext = e, this._missingVaryings.length = 0, this._textureArrayProcessing.length = 0, this.attributeKeywordName = void 0, this.varyingVertexKeywordName = void 0, this.varyingFragmentKeywordName = void 0; } preProcessShaderCode(e, t) { const i = `// Internals UBO uniform ${Io.InternalsUBOName} { float yFactor_; float textureOutputHeight_; }; `, r = e.indexOf("// Internals UBO") !== -1; return t ? (this._fragmentIsGLES3 = e.indexOf("#version 3") !== -1, this._fragmentIsGLES3 && (this.varyingFragmentKeywordName = "in"), r ? e : i + `##INJECTCODE## ` + e) : (this._vertexIsGLES3 = e.indexOf("#version 3") !== -1, this._vertexIsGLES3 && (this.attributeKeywordName = "in", this.varyingVertexKeywordName = "out"), r ? e : i + e); } varyingCheck(e, t) { const i = /(flat\s)?\s*\bout\b/, r = /(flat\s)?\s*\bin\b/, s = /(flat\s)?\s*\bvarying\b/; return (t && this._fragmentIsGLES3 ? r : !t && this._vertexIsGLES3 ? i : s).test(e); } varyingProcessor(e, t, i) { var r; this._preProcessors = i; const s = /\s*(flat)?\s*out\s+(?:(?:highp)?|(?:lowp)?)\s*(\S+)\s+(\S+)\s*;/gm, n = /\s*(flat)?\s*in\s+(?:(?:highp)?|(?:lowp)?)\s*(\S+)\s+(\S+)\s*;/gm, a = /\s*(flat)?\s*varying\s+(?:(?:highp)?|(?:lowp)?)\s*(\S+)\s+(\S+)\s*;/gm, o = (t && this._fragmentIsGLES3 ? n : !t && this._vertexIsGLES3 ? s : a).exec(e); if (o !== null) { const u = (r = o[1]) !== null && r !== void 0 ? r : "", h = o[2], d = o[3]; let f; t ? (f = this._webgpuProcessingContext.availableVaryings[d], this._missingVaryings[f] = "", f === void 0 && Ce.Warn(`Invalid fragment shader: The varying named "${d}" is not declared in the vertex shader! This declaration will be ignored.`)) : (f = this._webgpuProcessingContext.getVaryingNextLocation(h, this._getArraySize(d, h, i)[2]), this._webgpuProcessingContext.availableVaryings[d] = f, this._missingVaryings[f] = `layout(location = ${f}) ${u} in ${h} ${d};`), e = e.replace(o[0], f === void 0 ? "" : `layout(location = ${f}) ${u} ${t ? "in" : "out"} ${h} ${d};`); } return e; } attributeProcessor(e, t) { this._preProcessors = t; const i = /\s*in\s+(\S+)\s+(\S+)\s*;/gm, r = /\s*attribute\s+(\S+)\s+(\S+)\s*;/gm, n = (this._vertexIsGLES3 ? i : r).exec(e); if (n !== null) { const a = n[1], l = n[2], o = this._webgpuProcessingContext.getAttributeNextLocation(a, this._getArraySize(l, a, t)[2]); this._webgpuProcessingContext.availableAttributes[l] = o, this._webgpuProcessingContext.orderedAttributes[o] = l; const u = this.vertexBufferKindToNumberOfComponents[l]; if (u !== void 0) { const h = u < 0 ? u === -1 ? "int" : "ivec" + -u : u === 1 ? "uint" : "uvec" + u, d = `_int_${l}_`; e = e.replace(n[0], `layout(location = ${o}) in ${h} ${d}; ${a} ${l} = ${a}(${d});`); } else e = e.replace(n[0], `layout(location = ${o}) in ${a} ${l};`); } return e; } uniformProcessor(e, t, i) { var r; this._preProcessors = i; const n = /\s*uniform\s+(?:(?:highp)?|(?:lowp)?)\s*(\S+)\s+(\S+)\s*;/gm.exec(e); if (n !== null) { let a = n[1], l = n[2]; if (a.indexOf("sampler") === 0 || a.indexOf("sampler") === 1) { let o = 0; [l, a, o] = this._getArraySize(l, a, i); let u = this._webgpuProcessingContext.availableTextures[l]; if (!u) { u = { autoBindSampler: !0, isTextureArray: o > 0, isStorageTexture: !1, textures: [], sampleType: K_.Float }; for (let w = 0; w < (o || 1); ++w) u.textures.push(this._webgpuProcessingContext.getNextFreeUBOBinding()); } const h = (r = Io._SamplerTypeByWebGLSamplerType[a]) !== null && r !== void 0 ? r : "sampler", d = !!Io._IsComparisonSamplerByWebGPUSamplerType[h], f = d ? AT.Comparison : AT.Filtering, p = l + Io.AutoSamplerSuffix; let m = this._webgpuProcessingContext.availableSamplers[p]; m || (m = { binding: this._webgpuProcessingContext.getNextFreeUBOBinding(), type: f }); const _ = a.charAt(0) === "u" ? "u" : a.charAt(0) === "i" ? "i" : ""; _ && (a = a.substr(1)); const v = d ? K_.Depth : _ === "u" ? K_.Uint : _ === "i" ? K_.Sint : K_.Float; u.sampleType = v; const C = o > 0, x = m.binding.groupIndex, b = m.binding.bindingIndex, S = Io._SamplerFunctionByWebGLSamplerType[a], M = Io._TextureTypeByWebGLSamplerType[a], R = Io._GpuTextureViewDimensionByWebGPUTextureType[M]; if (!C) o = 1, e = `layout(set = ${x}, binding = ${b}) uniform ${h} ${p}; layout(set = ${u.textures[0].groupIndex}, binding = ${u.textures[0].bindingIndex}) uniform ${_}${M} ${l}Texture; #define ${l} ${_}${S}(${l}Texture, ${p})`; else { const w = []; w.push(`layout(set = ${x}, binding = ${b}) uniform ${_}${h} ${p};`), e = ` `; for (let V = 0; V < o; ++V) { const k = u.textures[V].groupIndex, L = u.textures[V].bindingIndex; w.push(`layout(set = ${k}, binding = ${L}) uniform ${M} ${l}Texture${V};`), e += `${V > 0 ? ` ` : ""}#define ${l}${V} ${_}${S}(${l}Texture${V}, ${p})`; } e = w.join(` `) + e, this._textureArrayProcessing.push(l); } this._webgpuProcessingContext.availableTextures[l] = u, this._webgpuProcessingContext.availableSamplers[p] = m, this._addSamplerBindingDescription(p, m, !t); for (let w = 0; w < o; ++w) this._addTextureBindingDescription(l, u, w, R, null, !t); } else this._addUniformToLeftOverUBO(l, a, i), e = ""; } return e; } uniformBufferProcessor(e, t) { const r = /uniform\s+(\w+)/gm.exec(e); if (r !== null) { const s = r[1]; let n = this._webgpuProcessingContext.availableBuffers[s]; if (!n) { const a = mg.KnownUBOs[s]; let l; a && a.binding.groupIndex !== -1 ? l = a.binding : l = this._webgpuProcessingContext.getNextFreeUBOBinding(), n = { binding: l }, this._webgpuProcessingContext.availableBuffers[s] = n; } this._addBufferBindingDescription(s, n, uT.Uniform, !t), e = e.replace("uniform", `layout(set = ${n.binding.groupIndex}, binding = ${n.binding.bindingIndex}) uniform`); } return e; } postProcessor(e, t, i, r, s) { const n = e.search(/#extension.+GL_EXT_draw_buffers.+require/) !== -1, a = /#extension.+(GL_OVR_multiview2|GL_OES_standard_derivatives|GL_EXT_shader_texture_lod|GL_EXT_frag_depth|GL_EXT_draw_buffers).+(enable|require)/g; if (e = e.replace(a, ""), e = e.replace(/texture2D\s*\(/g, "texture("), i) { const l = e.indexOf("gl_FragCoord") >= 0, o = ` glFragCoord_ = gl_FragCoord; if (yFactor_ == 1.) { glFragCoord_.y = textureOutputHeight_ - glFragCoord_.y; } `, u = l ? `vec4 glFragCoord_; ` : "", h = e.search(/layout *\(location *= *0\) *out/g) !== -1; if (e = e.replace(/texture2DLodEXT\s*\(/g, "textureLod("), e = e.replace(/textureCubeLodEXT\s*\(/g, "textureLod("), e = e.replace(/textureCube\s*\(/g, "texture("), e = e.replace(/gl_FragDepthEXT/g, "gl_FragDepth"), e = e.replace(/gl_FragColor/g, "glFragColor"), e = e.replace(/gl_FragData/g, "glFragData"), e = e.replace(/gl_FragCoord/g, "glFragCoord_"), !this._fragmentIsGLES3) e = e.replace(/void\s+?main\s*\(/g, (n || h ? "" : `layout(location = 0) out vec4 glFragColor; `) + "void main("); else { const d = /^\s*out\s+\S+\s+\S+\s*;/gm.exec(e); d !== null && (e = e.substring(0, d.index) + "layout(location = 0) " + e.substring(d.index)); } e = e.replace(/dFdy/g, "(-yFactor_)*dFdy"), e = e.replace("##INJECTCODE##", u), l && (e = this._injectStartingAndEndingCode(e, "void main", o)); } else if (e = e.replace(/gl_InstanceID/g, "gl_InstanceIndex"), e = e.replace(/gl_VertexID/g, "gl_VertexIndex"), t.indexOf("#define MULTIVIEW") !== -1) return `#extension GL_OVR_multiview2 : require layout (num_views = 2) in; ` + e; if (!i) { const l = e.lastIndexOf("}"); e = e.substring(0, l), e += `gl_Position.y *= yFactor_; `, s.isNDCHalfZRange || (e += `gl_Position.z = (gl_Position.z + gl_Position.w) / 2.0; `), e += "}"; } return e; } _applyTextureArrayProcessing(e, t) { const i = new RegExp(t + "\\s*\\[(.+)?\\]", "gm"); let r = i.exec(e); for (; r !== null; ) { const s = r[1]; let n = +s; this._preProcessors && isNaN(n) && (n = +this._preProcessors[s.trim()]), e = e.replace(r[0], t + n), r = i.exec(e); } return e; } _generateLeftOverUBOCode(e, t) { let i = `layout(set = ${t.binding.groupIndex}, binding = ${t.binding.bindingIndex}) uniform ${e} { `; for (const r of this._webgpuProcessingContext.leftOverUniforms) r.length > 0 ? i += ` ${r.type} ${r.name}[${r.length}]; ` : i += ` ${r.type} ${r.name}; `; return i += `}; `, i; } finalizeShaders(e, t) { for (let r = 0; r < this._textureArrayProcessing.length; ++r) { const s = this._textureArrayProcessing[r]; e = this._applyTextureArrayProcessing(e, s), t = this._applyTextureArrayProcessing(t, s); } for (let r = 0; r < this._missingVaryings.length; ++r) { const s = this._missingVaryings[r]; s && s.length > 0 && (t = s + ` ` + t); } const i = this._buildLeftOverUBO(); return e = i + e, t = i + t, this._collectBindingNames(), this._preCreateBindGroupEntries(), this._preProcessors = null, this.vertexBufferKindToNumberOfComponents = {}, { vertexCode: e, fragmentCode: t }; } } const Ufe = "bonesDeclaration", Vfe = `#if NUM_BONE_INFLUENCERS>0 attribute matricesIndices : vec4;attribute matricesWeights : vec4; #if NUM_BONE_INFLUENCERS>4 attribute matricesIndicesExtra : vec4;attribute matricesWeightsExtra : vec4; #endif #ifndef BAKED_VERTEX_ANIMATION_TEXTURE #ifdef BONETEXTURE var boneSampler : texture_2d;uniform boneTextureWidth : f32; #else uniform mBones : array; #ifdef BONES_VELOCITY_ENABLED uniform mPreviousBones : array; #endif #endif #ifdef BONETEXTURE fn readMatrixFromRawSampler(smp : texture_2d,index : f32)->mat4x4 {let offset=i32(index) *4; let m0=textureLoad(smp,vec2(offset+0,0),0);let m1=textureLoad(smp,vec2(offset+1,0),0);let m2=textureLoad(smp,vec2(offset+2,0),0);let m3=textureLoad(smp,vec2(offset+3,0),0);return mat4x4(m0,m1,m2,m3);} #endif #endif #endif `; je.IncludesShadersStoreWGSL[Ufe] = Vfe; const kfe = "bonesVertex", zfe = `#ifndef BAKED_VERTEX_ANIMATION_TEXTURE #if NUM_BONE_INFLUENCERS>0 var influence : mat4x4; #ifdef BONETEXTURE influence=readMatrixFromRawSampler(boneSampler,vertexInputs.matricesIndices[0])*vertexInputs.matricesWeights[0]; #if NUM_BONE_INFLUENCERS>1 influence=influence+readMatrixFromRawSampler(boneSampler,vertexInputs.matricesIndices[1])*vertexInputs.matricesWeights[1]; #endif #if NUM_BONE_INFLUENCERS>2 influence=influence+readMatrixFromRawSampler(boneSampler,vertexInputs.matricesIndices[2])*vertexInputs.matricesWeights[2]; #endif #if NUM_BONE_INFLUENCERS>3 influence=influence+readMatrixFromRawSampler(boneSampler,vertexInputs.matricesIndices[3])*vertexInputs.matricesWeights[3]; #endif #if NUM_BONE_INFLUENCERS>4 influence=influence+readMatrixFromRawSampler(boneSampler,vertexInputs.matricesIndicesExtra[0])*vertexInputs.matricesWeightsExtra[0]; #endif #if NUM_BONE_INFLUENCERS>5 influence=influence+readMatrixFromRawSampler(boneSampler,vertexInputs.matricesIndicesExtra[1])*vertexInputs.matricesWeightsExtra[1]; #endif #if NUM_BONE_INFLUENCERS>6 influence=influence+readMatrixFromRawSampler(boneSampler,vertexInputs.matricesIndicesExtra[2])*vertexInputs.matricesWeightsExtra[2]; #endif #if NUM_BONE_INFLUENCERS>7 influence=influence+readMatrixFromRawSampler(boneSampler,vertexInputs.matricesIndicesExtra[3])*vertexInputs.matricesWeightsExtra[3]; #endif #else influence=uniforms.mBones[int(vertexInputs.matricesIndices[0])]*vertexInputs.matricesWeights[0]; #if NUM_BONE_INFLUENCERS>1 influence=influence+uniforms.mBones[int(vertexInputs.matricesIndices[1])]*vertexInputs.matricesWeights[1]; #endif #if NUM_BONE_INFLUENCERS>2 influence=influence+uniforms.mBones[int(vertexInputs.matricesIndices[2])]*vertexInputs.matricesWeights[2]; #endif #if NUM_BONE_INFLUENCERS>3 influence=influence+uniforms.mBones[int(vertexInputs.matricesIndices[3])]*vertexInputs.matricesWeights[3]; #endif #if NUM_BONE_INFLUENCERS>4 influence=influence+uniforms.mBones[int(vertexInputs.matricesIndicesExtra[0])]*vertexInputs.matricesWeightsExtra[0]; #endif #if NUM_BONE_INFLUENCERS>5 influence=influence+uniforms.mBones[int(vertexInputs.matricesIndicesExtra[1])]*vertexInputs.matricesWeightsExtra[1]; #endif #if NUM_BONE_INFLUENCERS>6 influence=influence+uniforms.mBones[int(vertexInputs.matricesIndicesExtra[2])]*vertexInputs.matricesWeightsExtra[2]; #endif #if NUM_BONE_INFLUENCERS>7 influence=influence+uniforms.mBones[int(vertexInputs.matricesIndicesExtra[3])]*vertexInputs.matricesWeightsExtra[3]; #endif #endif finalWorld=finalWorld*influence; #endif #endif `; je.IncludesShadersStoreWGSL[kfe] = zfe; const Hfe = "bakedVertexAnimationDeclaration", Gfe = `#ifdef BAKED_VERTEX_ANIMATION_TEXTURE uniform bakedVertexAnimationTime: f32;uniform bakedVertexAnimationTextureSizeInverted: vec2;uniform bakedVertexAnimationSettings: vec4;var bakedVertexAnimationTexture : texture_2d; #ifdef INSTANCES attribute bakedVertexAnimationSettingsInstanced : vec4; #endif fn readMatrixFromRawSamplerVAT(smp : texture_2d,index : f32,frame : f32)->mat4x4 {let offset=i32(index)*4;let frameUV=i32(frame);let m0=textureLoad(smp,vec2(offset+0,frameUV),0);let m1=textureLoad(smp,vec2(offset+1,frameUV),0);let m2=textureLoad(smp,vec2(offset+2,frameUV),0);let m3=textureLoad(smp,vec2(offset+3,frameUV),0);return mat4x4(m0,m1,m2,m3);} #endif `; je.IncludesShadersStoreWGSL[Hfe] = Gfe; const Kfe = "bakedVertexAnimation", Wfe = `#ifdef BAKED_VERTEX_ANIMATION_TEXTURE { #ifdef INSTANCES let VATStartFrame: f32=vertexInputs.bakedVertexAnimationSettingsInstanced.x;let VATEndFrame: f32=vertexInputs.bakedVertexAnimationSettingsInstanced.y;let VATOffsetFrame: f32=vertexInputs.bakedVertexAnimationSettingsInstanced.z;let VATSpeed: f32=vertexInputs.bakedVertexAnimationSettingsInstanced.w; #else let VATStartFrame: f32=uniforms.bakedVertexAnimationSettings.x;let VATEndFrame: f32=uniforms.bakedVertexAnimationSettings.y;let VATOffsetFrame: f32=uniforms.bakedVertexAnimationSettings.z;let VATSpeed: f32=uniforms.bakedVertexAnimationSettings.w; #endif let totalFrames: f32=VATEndFrame-VATStartFrame+1.0;let time: f32=uniforms.bakedVertexAnimationTime*VATSpeed/totalFrames;let frameCorrection: f32=select(1.0,0.0,time<1.0);let numOfFrames: f32=totalFrames-frameCorrection;var VATFrameNum: f32=fract(time)*numOfFrames;VATFrameNum=(VATFrameNum+VATOffsetFrame) % numOfFrames;VATFrameNum=floor(VATFrameNum);VATFrameNum=VATFrameNum+VATStartFrame+frameCorrection;var VATInfluence : mat4x4;VATInfluence=readMatrixFromRawSamplerVAT(bakedVertexAnimationTexture,vertexInputs.matricesIndices[0],VATFrameNum)*vertexInputs.matricesWeights[0]; #if NUM_BONE_INFLUENCERS>1 VATInfluence=VATInfluence+readMatrixFromRawSamplerVAT(bakedVertexAnimationTexture,vertexInputs.matricesIndices[1],VATFrameNum)*vertexInputs.matricesWeights[1]; #endif #if NUM_BONE_INFLUENCERS>2 VATInfluence=VATInfluence+readMatrixFromRawSamplerVAT(bakedVertexAnimationTexture,vertexInputs.matricesIndices[2],VATFrameNum)*vertexInputs.matricesWeights[2]; #endif #if NUM_BONE_INFLUENCERS>3 VATInfluence=VATInfluence+readMatrixFromRawSamplerVAT(bakedVertexAnimationTexture,vertexInputs.matricesIndices[3],VATFrameNum)*vertexInputs.matricesWeights[3]; #endif #if NUM_BONE_INFLUENCERS>4 VATInfluence=VATInfluence+readMatrixFromRawSamplerVAT(bakedVertexAnimationTexture,vertexInputs.matricesIndicesExtra[0],VATFrameNum)*vertexInputs.matricesWeightsExtra[0]; #endif #if NUM_BONE_INFLUENCERS>5 VATInfluence=VATInfluence+readMatrixFromRawSamplerVAT(bakedVertexAnimationTexture,vertexInputs.matricesIndicesExtra[1],VATFrameNum)*vertexInputs.matricesWeightsExtra[1]; #endif #if NUM_BONE_INFLUENCERS>6 VATInfluence=VATInfluence+readMatrixFromRawSamplerVAT(bakedVertexAnimationTexture,vertexInputs.matricesIndicesExtra[2],VATFrameNum)*vertexInputs.matricesWeightsExtra[2]; #endif #if NUM_BONE_INFLUENCERS>7 VATInfluence=VATInfluence+readMatrixFromRawSamplerVAT(bakedVertexAnimationTexture,vertexInputs.matricesIndicesExtra[3],VATFrameNum)*vertexInputs.matricesWeightsExtra[3]; #endif finalWorld=finalWorld*VATInfluence;} #endif `; je.IncludesShadersStoreWGSL[Kfe] = Wfe; const jfe = "clipPlaneFragment", Xfe = `#if defined(CLIPPLANE) || defined(CLIPPLANE2) || defined(CLIPPLANE3) || defined(CLIPPLANE4) || defined(CLIPPLANE5) || defined(CLIPPLANE6) if (false) {} #endif #ifdef CLIPPLANE else if (fragmentInputs.fClipDistance>0.0) {discard;} #endif #ifdef CLIPPLANE2 else if (fragmentInputs.fClipDistance2>0.0) {discard;} #endif #ifdef CLIPPLANE3 else if (fragmentInputs.fClipDistance3>0.0) {discard;} #endif #ifdef CLIPPLANE4 else if (fragmentInputs.fClipDistance4>0.0) {discard;} #endif #ifdef CLIPPLANE5 else if (fragmentInputs.fClipDistance5>0.0) {discard;} #endif #ifdef CLIPPLANE6 else if (fragmentInputs.fClipDistance6>0.0) {discard;} #endif `; je.IncludesShadersStoreWGSL[jfe] = Xfe; const Yfe = "clipPlaneFragmentDeclaration", Qfe = `#ifdef CLIPPLANE varying fClipDistance: f32; #endif #ifdef CLIPPLANE2 varying fClipDistance2: f32; #endif #ifdef CLIPPLANE3 varying fClipDistance3: f32; #endif #ifdef CLIPPLANE4 varying fClipDistance4: f32; #endif #ifdef CLIPPLANE5 varying fClipDistance5: f32; #endif #ifdef CLIPPLANE6 varying fClipDistance6: f32; #endif `; je.IncludesShadersStoreWGSL[Yfe] = Qfe; const $fe = "clipPlaneVertex", Zfe = `#ifdef CLIPPLANE vertexOutputs.fClipDistance=dot(worldPos,uniforms.vClipPlane); #endif #ifdef CLIPPLANE2 vertexOutputs.fClipDistance2=dot(worldPos,uniforms.vClipPlane2); #endif #ifdef CLIPPLANE3 vertexOutputs.fClipDistance3=dot(worldPos,uniforms.vClipPlane3); #endif #ifdef CLIPPLANE4 vertexOutputs.fClipDistance4=dot(worldPos,uniforms.vClipPlane4); #endif #ifdef CLIPPLANE5 vertexOutputs.fClipDistance5=dot(worldPos,uniforms.vClipPlane5); #endif #ifdef CLIPPLANE6 vertexOutputs.fClipDistance6=dot(worldPos,uniforms.vClipPlane6); #endif `; je.IncludesShadersStoreWGSL[$fe] = Zfe; const qfe = "clipPlaneVertexDeclaration", Jfe = `#ifdef CLIPPLANE uniform vClipPlane: vec4;varying fClipDistance: f32; #endif #ifdef CLIPPLANE2 uniform vClipPlane2: vec4;varying fClipDistance2: f32; #endif #ifdef CLIPPLANE3 uniform vClipPlane3: vec4;varying fClipDistance3: f32; #endif #ifdef CLIPPLANE4 uniform vClipPlane4: vec4;varying fClipDistance4: f32; #endif #ifdef CLIPPLANE5 uniform vClipPlane5: vec4;varying fClipDistance5: f32; #endif #ifdef CLIPPLANE6 uniform vClipPlane6: vec4;varying fClipDistance6: f32; #endif `; je.IncludesShadersStoreWGSL[qfe] = Jfe; const epe = "instancesDeclaration", tpe = `#ifdef INSTANCES attribute world0 : vec4;attribute world1 : vec4;attribute world2 : vec4;attribute world3 : vec4; #ifdef INSTANCESCOLOR attribute instanceColor : vec4; #endif #if defined(THIN_INSTANCES) && !defined(WORLD_UBO) uniform world : mat4x4; #endif #if defined(VELOCITY) || defined(PREPASS_VELOCITY) attribute previousWorld0 : vec4;attribute previousWorld1 : vec4;attribute previousWorld2 : vec4;attribute previousWorld3 : vec4; #ifdef THIN_INSTANCES uniform previousWorld : mat4x4; #endif #endif #else #if !defined(WORLD_UBO) uniform world : mat4x4; #endif #if defined(VELOCITY) || defined(PREPASS_VELOCITY) uniform previousWorld : mat4x4; #endif #endif `; je.IncludesShadersStoreWGSL[epe] = tpe; const ipe = "instancesVertex", rpe = `#ifdef INSTANCES var finalWorld=mat4x4(vertexInputs.world0,vertexInputs.world1,vertexInputs.world2,vertexInputs.world3); #if defined(PREPASS_VELOCITY) || defined(VELOCITY) var finalPreviousWorld=mat4x4(previousWorld0,previousWorld1,previousWorld2,previousWorld3); #endif #ifdef THIN_INSTANCES #if !defined(WORLD_UBO) finalWorld=uniforms.world*finalWorld; #else finalWorld=mesh.world*finalWorld; #endif #if defined(PREPASS_VELOCITY) || defined(VELOCITY) finalPreviousWorld=previousWorld*finalPreviousWorld; #endif #endif #else #if !defined(WORLD_UBO) var finalWorld=uniforms.world; #else var finalWorld=mesh.world; #endif #if defined(PREPASS_VELOCITY) || defined(VELOCITY) var finalPreviousWorld=previousWorld; #endif #endif `; je.IncludesShadersStoreWGSL[ipe] = rpe; const spe = "meshUboDeclaration", npe = `struct Mesh {world : mat4x4, visibility : f32,};var mesh : Mesh; #define WORLD_UBO `; je.IncludesShadersStoreWGSL[spe] = npe; const ape = "morphTargetsVertex", ope = `#ifdef MORPHTARGETS #ifdef MORPHTARGETS_TEXTURE vertexID=f32(vertexInputs.vertexIndex)*uniforms.morphTargetTextureInfo.x;positionUpdated=positionUpdated+(readVector3FromRawSampler({X},vertexID)-vertexInputs.position)*uniforms.morphTargetInfluences[{X}];vertexID=vertexID+1.0; #ifdef MORPHTARGETS_NORMAL normalUpdated=normalUpdated+(readVector3FromRawSampler({X},vertexID) -vertexInputs.normal)*uniforms.morphTargetInfluences[{X}];vertexID=vertexID+1.0; #endif #ifdef MORPHTARGETS_UV uvUpdated=uvUpdated+(readVector3FromRawSampler({X},vertexID).xy-vertexInputs.uv)*uniforms.morphTargetInfluences[{X}];vertexID=vertexID+1.0; #endif #ifdef MORPHTARGETS_TANGENT tangentUpdated.xyz=tangentUpdated.xyz+(readVector3FromRawSampler({X},vertexID) -vertexInputs.tangent.xyz)*uniforms.morphTargetInfluences[{X}]; #endif #else positionUpdated=positionUpdated+(position{X}-vertexInputs.position)*uniforms.morphTargetInfluences[{X}]; #ifdef MORPHTARGETS_NORMAL normalUpdated+=(normal{X}-vertexInputs.normal)*uniforms.morphTargetInfluences[{X}]; #endif #ifdef MORPHTARGETS_TANGENT tangentUpdated.xyz=tangentUpdated.xyz+(tangent{X}-vertexInputs.tangent.xyz)*uniforms.morphTargetInfluences[{X}]; #endif #ifdef MORPHTARGETS_UV uvUpdated=uvUpdated+(uv_{X}-vertexInputs.uv)*uniforms.morphTargetInfluences[{X}]; #endif #endif #endif `; je.IncludesShadersStoreWGSL[ape] = ope; const lpe = "morphTargetsVertexDeclaration", cpe = `#ifdef MORPHTARGETS #ifndef MORPHTARGETS_TEXTURE attribute position{X} : vec3; #ifdef MORPHTARGETS_NORMAL attribute normal{X} : vec3; #endif #ifdef MORPHTARGETS_TANGENT attribute tangent{X} : vec3; #endif #ifdef MORPHTARGETS_UV attribute uv_{X} : vec2; #endif #endif #endif `; je.IncludesShadersStoreWGSL[lpe] = cpe; const upe = "morphTargetsVertexGlobal", hpe = `#ifdef MORPHTARGETS #ifdef MORPHTARGETS_TEXTURE var vertexID : f32; #endif #endif `; je.IncludesShadersStoreWGSL[upe] = hpe; const dpe = "morphTargetsVertexGlobalDeclaration", fpe = `#ifdef MORPHTARGETS uniform morphTargetInfluences : array; #ifdef MORPHTARGETS_TEXTURE uniform morphTargetTextureIndices : array;uniform morphTargetTextureInfo : vec3;var morphTargets : texture_2d_array;var morphTargetsSampler : sampler;fn readVector3FromRawSampler(targetIndex : i32,vertexIndex : f32)->vec3 { let y=floor(vertexIndex/uniforms.morphTargetTextureInfo.y);let x=vertexIndex-y*uniforms.morphTargetTextureInfo.y;let textureUV=vec2((x+0.5)/uniforms.morphTargetTextureInfo.y,(y+0.5)/uniforms.morphTargetTextureInfo.z);return textureSampleLevel(morphTargets,morphTargetsSampler,textureUV,i32(uniforms.morphTargetTextureIndices[targetIndex]),0.0).xyz;} #endif #endif `; je.IncludesShadersStoreWGSL[dpe] = fpe; const ppe = "sceneUboDeclaration", _pe = `struct Scene {viewProjection : mat4x4, #ifdef MULTIVIEW viewProjectionR : mat4x4, #endif view : mat4x4, projection : mat4x4, vEyePosition : vec4,};var scene : Scene; `; je.IncludesShadersStoreWGSL[ppe] = _pe; const VZ = "fragmentOutputs.fragDepth", mpe = "uniforms", gpe = "internals", vpe = { texture_1d: Ea.E1d, texture_2d: Ea.E2d, texture_2d_array: Ea.E2dArray, texture_3d: Ea.E3d, texture_cube: Ea.Cube, texture_cube_array: Ea.CubeArray, texture_multisampled_2d: Ea.E2d, texture_depth_2d: Ea.E2d, texture_depth_2d_array: Ea.E2dArray, texture_depth_cube: Ea.Cube, texture_depth_cube_array: Ea.CubeArray, texture_depth_multisampled_2d: Ea.E2d, texture_storage_1d: Ea.E1d, texture_storage_2d: Ea.E2d, texture_storage_2d_array: Ea.E2dArray, texture_storage_3d: Ea.E3d, texture_external: null }; class Ape extends Io { constructor() { super(...arguments), this.shaderLanguage = Xa.WGSL, this.uniformRegexp = /uniform\s+(\w+)\s*:\s*(.+)\s*;/, this.textureRegexp = /var\s+(\w+)\s*:\s*((array<\s*)?(texture_\w+)\s*(<\s*(.+)\s*>)?\s*(,\s*\w+\s*>\s*)?);/, this.noPrecision = !0; } _getArraySize(e, t, i) { let r = 0; const s = t.lastIndexOf(">"); if (t.indexOf("array") >= 0 && s > 0) { let n = s; for (; n > 0 && t.charAt(n) !== " " && t.charAt(n) !== ","; ) n--; const a = t.substring(n + 1, s); for (r = +a, isNaN(r) && (r = +i[a.trim()]); n > 0 && (t.charAt(n) === " " || t.charAt(n) === ","); ) n--; t = t.substring(t.indexOf("<") + 1, n + 1); } return [e, t, r]; } initializeShaders(e) { this._webgpuProcessingContext = e, this._attributesInputWGSL = [], this._attributesWGSL = [], this._attributesConversionCodeWGSL = [], this._hasNonFloatAttribute = !1, this._varyingsWGSL = [], this._varyingNamesWGSL = [], this._stridedUniformArrays = []; } preProcessShaderCode(e) { return `struct ${Io.InternalsUBOName} { yFactor_: f32, textureOutputHeight_: f32, }; var ${gpe} : ${Io.InternalsUBOName}; ` + xH(e); } varyingProcessor(e, t, i) { const s = /\s*varying\s+(?:(?:highp)?|(?:lowp)?)\s*(\S+)\s*:\s*(.+)\s*;/gm.exec(e); if (s !== null) { const n = s[2], a = s[1]; let l; t ? (l = this._webgpuProcessingContext.availableVaryings[a], l === void 0 && Ce.Warn(`Invalid fragment shader: The varying named "${a}" is not declared in the vertex shader! This declaration will be ignored.`)) : (l = this._webgpuProcessingContext.getVaryingNextLocation(n, this._getArraySize(a, n, i)[2]), this._webgpuProcessingContext.availableVaryings[a] = l, this._varyingsWGSL.push(` @location(${l}) ${a} : ${n},`), this._varyingNamesWGSL.push(a)), e = ""; } return e; } attributeProcessor(e, t) { const r = /\s*attribute\s+(\S+)\s*:\s*(.+)\s*;/gm.exec(e); if (r !== null) { const s = r[2], n = r[1], a = this._webgpuProcessingContext.getAttributeNextLocation(s, this._getArraySize(n, s, t)[2]); this._webgpuProcessingContext.availableAttributes[n] = a, this._webgpuProcessingContext.orderedAttributes[a] = n; const l = this.vertexBufferKindToNumberOfComponents[n]; if (l !== void 0) { const o = l < 0 ? l === -1 ? "i32" : "vec" + -l + "" : l === 1 ? "u32" : "vec" + l + "", u = `_int_${n}_`; this._attributesInputWGSL.push(`@location(${a}) ${u} : ${o},`), this._attributesWGSL.push(`${n} : ${s},`), this._attributesConversionCodeWGSL.push(`vertexInputs.${n} = ${s}(vertexInputs_.${u});`), this._hasNonFloatAttribute = !0; } else this._attributesInputWGSL.push(`@location(${a}) ${n} : ${s},`), this._attributesWGSL.push(`${n} : ${s},`), this._attributesConversionCodeWGSL.push(`vertexInputs.${n} = vertexInputs_.${n};`); e = ""; } return e; } uniformProcessor(e, t, i) { const r = this.uniformRegexp.exec(e); if (r !== null) { const s = r[2], n = r[1]; this._addUniformToLeftOverUBO(n, s, i), e = ""; } return e; } textureProcessor(e, t, i) { const r = this.textureRegexp.exec(e); if (r !== null) { const s = r[1], n = r[2], a = !!r[3], l = r[4], o = l.indexOf("storage") > 0, u = r[6], h = o ? u.substring(0, u.indexOf(",")).trim() : null; let d = a ? this._getArraySize(s, n, i)[2] : 0, f = this._webgpuProcessingContext.availableTextures[s]; if (f) d = f.textures.length; else { f = { isTextureArray: d > 0, isStorageTexture: o, textures: [], sampleType: K_.Float }, d = d || 1; for (let v = 0; v < d; ++v) f.textures.push(this._webgpuProcessingContext.getNextFreeUBOBinding()); } this._webgpuProcessingContext.availableTextures[s] = f; const p = l.indexOf("depth") > 0, m = vpe[l], _ = p ? K_.Depth : u === "u32" ? K_.Uint : u === "i32" ? K_.Sint : K_.Float; if (f.sampleType = _, m === void 0) throw `Can't get the texture dimension corresponding to the texture function "${l}"!`; for (let v = 0; v < d; ++v) { const { groupIndex: C, bindingIndex: x } = f.textures[v]; v === 0 && (e = `@group(${C}) @binding(${x}) ${e}`), this._addTextureBindingDescription(s, f, v, m, h, !t); } } return e; } postProcessor(e) { return e; } finalizeShaders(e, t) { const i = t.indexOf("fragmentInputs.position") >= 0 ? ` if (internals.yFactor_ == 1.) { fragmentInputs.position.y = internals.textureOutputHeight_ - fragmentInputs.position.y; } ` : ""; e = this._processSamplers(e, !0), t = this._processSamplers(t, !1), e = this._processCustomBuffers(e, !0), t = this._processCustomBuffers(t, !1); const r = this._buildLeftOverUBO(); e = r + e, t = r + t, e = e.replace(/#define /g, "//#define "), e = this._processStridedUniformArrays(e); let s = `struct VertexInputs { @builtin(vertex_index) vertexIndex : u32, @builtin(instance_index) instanceIndex : u32, `; this._attributesInputWGSL.length > 0 && (s += this._attributesInputWGSL.join(` `)), s += ` }; var vertexInputs` + (this._hasNonFloatAttribute ? "_" : "") + ` : VertexInputs; `, this._hasNonFloatAttribute && (s += `struct VertexInputs_ { vertexIndex : u32, instanceIndex : u32, `, s += this._attributesWGSL.join(` `), s += ` }; var vertexInputs : VertexInputs_; `); let n = `struct FragmentInputs { @builtin(position) position : vec4, `; this._varyingsWGSL.length > 0 && (n += this._varyingsWGSL.join(` `)), n += ` }; var vertexOutputs : FragmentInputs; `, e = s + n + e; let a = ` vertexInputs${this._hasNonFloatAttribute ? "_" : ""} = input; `; this._hasNonFloatAttribute && (a += `vertexInputs.vertexIndex = vertexInputs_.vertexIndex; vertexInputs.instanceIndex = vertexInputs_.instanceIndex; `, a += this._attributesConversionCodeWGSL.join(` `), a += ` `); const l = ` vertexOutputs.position.y = vertexOutputs.position.y * internals.yFactor_; return vertexOutputs;`; e = this._injectStartingAndEndingCode(e, "fn main", a, l), t = t.replace(/#define /g, "//#define "), t = this._processStridedUniformArrays(t), t = t.replace(/dpdy/g, "(-internals.yFactor_)*dpdy"); let o = `struct FragmentInputs { @builtin(position) position : vec4, @builtin(front_facing) frontFacing : bool, `; this._varyingsWGSL.length > 0 && (o += this._varyingsWGSL.join(` `)), o += ` }; var fragmentInputs : FragmentInputs; `; let u = `struct FragmentOutputs { @location(0) color : vec4, `, h = !1, d = 0; for (; !h && (d = t.indexOf(VZ, d), !(d < 0)); ) { const m = d; for (h = !0; d > 1 && t.charAt(d) !== ` `; ) { if (t.charAt(d) === "/" && t.charAt(d - 1) === "/") { h = !1; break; } d--; } d = m + VZ.length; } h && (u += ` @builtin(frag_depth) fragDepth: f32, `), u += `}; var fragmentOutputs : FragmentOutputs; `, t = o + u + t; const f = ` fragmentInputs = input; ` + i, p = " return fragmentOutputs;"; return t = this._injectStartingAndEndingCode(t, "fn main", f, p), this._collectBindingNames(), this._preCreateBindGroupEntries(), this.vertexBufferKindToNumberOfComponents = {}, { vertexCode: e, fragmentCode: t }; } _generateLeftOverUBOCode(e, t) { let i = "", r = `struct ${e} { `; for (const s of this._webgpuProcessingContext.leftOverUniforms) { const n = s.type.replace(/^(.*?)(<.*>)?$/, "$1"), a = Io.UniformSizes[n]; if (s.length > 0) if (a <= 2) { const l = `${e}_${this._stridedUniformArrays.length}_strided_arr`; i += `struct ${l} { @size(16) el: ${n}, }`, this._stridedUniformArrays.push(s.name), r += ` @align(16) ${s.name} : array<${l}, ${s.length}>, `; } else r += ` ${s.name} : array<${s.type}, ${s.length}>, `; else r += ` ${s.name} : ${s.type}, `; } return r += `}; `, r = `${i} ${r}`, r += `@group(${t.binding.groupIndex}) @binding(${t.binding.bindingIndex}) var ${mpe} : ${e}; `, r; } _processSamplers(e, t) { const i = /var\s+(\w+Sampler)\s*:\s*(sampler|sampler_comparison)\s*;/gm; for (; ; ) { const r = i.exec(e); if (r === null) break; const s = r[1], n = r[2], a = s.indexOf(Io.AutoSamplerSuffix) === s.length - Io.AutoSamplerSuffix.length ? s.substring(0, s.indexOf(Io.AutoSamplerSuffix)) : null, l = n === "sampler_comparison" ? AT.Comparison : AT.Filtering; if (a) { const f = this._webgpuProcessingContext.availableTextures[a]; f && (f.autoBindSampler = !0); } let o = this._webgpuProcessingContext.availableSamplers[s]; o || (o = { binding: this._webgpuProcessingContext.getNextFreeUBOBinding(), type: l }, this._webgpuProcessingContext.availableSamplers[s] = o), this._addSamplerBindingDescription(s, o, t); const u = e.substring(0, r.index), h = `@group(${o.binding.groupIndex}) @binding(${o.binding.bindingIndex}) `, d = e.substring(r.index); e = u + h + d, i.lastIndex += h.length; } return e; } _processCustomBuffers(e, t) { const i = /var<\s*(uniform|storage)\s*(,\s*(read|read_write)\s*)?>\s+(\S+)\s*:\s*(\S+)\s*;/gm; for (; ; ) { const r = i.exec(e); if (r === null) break; const s = r[1], n = r[3]; let a = r[4]; const l = r[5]; let o = this._webgpuProcessingContext.availableBuffers[a]; if (!o) { const m = s === "uniform" ? mg.KnownUBOs[l] : null; let _; m ? (a = l, _ = m.binding, _.groupIndex === -1 && (_ = this._webgpuProcessingContext.getNextFreeUBOBinding())) : _ = this._webgpuProcessingContext.getNextFreeUBOBinding(), o = { binding: _ }, this._webgpuProcessingContext.availableBuffers[a] = o; } this._addBufferBindingDescription(a, this._webgpuProcessingContext.availableBuffers[a], n === "read_write" ? uT.Storage : s === "storage" ? uT.ReadOnlyStorage : uT.Uniform, t); const u = o.binding.groupIndex, h = o.binding.bindingIndex, d = e.substring(0, r.index), f = `@group(${u}) @binding(${h}) `, p = e.substring(r.index); e = d + f + p, i.lastIndex += f.length; } return e; } _processStridedUniformArrays(e) { for (const t of this._stridedUniformArrays) e = e.replace(new RegExp(`${t}\\s*\\[(.*)\\]`, "g"), `${t}[$1].el`); return e; } } class VF { get underlyingResource() { return this._webgpuTexture; } getMSAATexture(e = 0) { var t, i; return (i = (t = this._webgpuMSAATexture) === null || t === void 0 ? void 0 : t[e]) !== null && i !== void 0 ? i : null; } setMSAATexture(e, t = -1) { this._webgpuMSAATexture || (this._webgpuMSAATexture = []), t === -1 && (t = this._webgpuMSAATexture.length), this._webgpuMSAATexture[t] = e; } releaseMSAATexture() { if (this._webgpuMSAATexture) { for (const e of this._webgpuMSAATexture) e == null || e.destroy(); this._webgpuMSAATexture = null; } } constructor(e = null) { this.format = Re.RGBA8Unorm, this.textureUsages = 0, this.textureAdditionalUsages = 0, this._webgpuTexture = e, this._webgpuMSAATexture = null, this.view = null, this.viewForWriting = null; } set(e) { this._webgpuTexture = e; } setUsage(e, t, i, r, s) { this.createView({ format: this.format, dimension: i ? Ea.Cube : Ea.E2d, mipLevelCount: t ? yt.ILog2(Math.max(r, s)) + 1 : 1, baseArrayLayer: 0, baseMipLevel: 0, arrayLayerCount: i ? 6 : 1, aspect: jA.All }); } createView(e, t = !1) { if (this.view = this._webgpuTexture.createView(e), t && e) { const i = e.mipLevelCount; e.mipLevelCount = 1, this.viewForWriting = this._webgpuTexture.createView(e), e.mipLevelCount = i; } } reset() { this._webgpuTexture = null, this._webgpuMSAATexture = null, this.view = null, this.viewForWriting = null; } release() { var e, t; (e = this._webgpuTexture) === null || e === void 0 || e.destroy(), this.releaseMSAATexture(), (t = this._copyInvertYTempTexture) === null || t === void 0 || t.destroy(), this.reset(); } } const ype = ` const vec2 pos[4] = vec2[4](vec2(-1.0f, 1.0f), vec2(1.0f, 1.0f), vec2(-1.0f, -1.0f), vec2(1.0f, -1.0f)); const vec2 tex[4] = vec2[4](vec2(0.0f, 0.0f), vec2(1.0f, 0.0f), vec2(0.0f, 1.0f), vec2(1.0f, 1.0f)); layout(location = 0) out vec2 vTex; void main() { vTex = tex[gl_VertexIndex]; gl_Position = vec4(pos[gl_VertexIndex], 0.0, 1.0); } `, Cpe = ` layout(set = 0, binding = 0) uniform sampler imgSampler; layout(set = 0, binding = 1) uniform texture2D img; layout(location = 0) in vec2 vTex; layout(location = 0) out vec4 outColor; void main() { outColor = texture(sampler2D(img, imgSampler), vTex); } `, Gie = ` #extension GL_EXT_samplerless_texture_functions : enable const vec2 pos[4] = vec2[4](vec2(-1.0f, 1.0f), vec2(1.0f, 1.0f), vec2(-1.0f, -1.0f), vec2(1.0f, -1.0f)); const vec2 tex[4] = vec2[4](vec2(0.0f, 0.0f), vec2(1.0f, 0.0f), vec2(0.0f, 1.0f), vec2(1.0f, 1.0f)); layout(set = 0, binding = 0) uniform texture2D img; #ifdef INVERTY layout(location = 0) out flat ivec2 vTextureSize; #endif void main() { #ifdef INVERTY vTextureSize = textureSize(img, 0); #endif gl_Position = vec4(pos[gl_VertexIndex], 0.0, 1.0); } `, xpe = ` #extension GL_EXT_samplerless_texture_functions : enable layout(set = 0, binding = 0) uniform texture2D img; #ifdef INVERTY layout(location = 0) in flat ivec2 vTextureSize; #endif layout(location = 0) out vec4 outColor; void main() { #ifdef INVERTY vec4 color = texelFetch(img, ivec2(gl_FragCoord.x, vTextureSize.y - gl_FragCoord.y), 0); #else vec4 color = texelFetch(img, ivec2(gl_FragCoord.xy), 0); #endif #ifdef PREMULTIPLYALPHA color.rgb *= color.a; #endif outColor = color; } `, bpe = Gie, Epe = ` #extension GL_EXT_samplerless_texture_functions : enable layout(set = 0, binding = 0) uniform texture2D img; layout(set = 0, binding = 1) uniform Params { float ofstX; float ofstY; float width; float height; }; #ifdef INVERTY layout(location = 0) in flat ivec2 vTextureSize; #endif layout(location = 0) out vec4 outColor; void main() { if (gl_FragCoord.x < ofstX || gl_FragCoord.x >= ofstX + width) { discard; } if (gl_FragCoord.y < ofstY || gl_FragCoord.y >= ofstY + height) { discard; } #ifdef INVERTY vec4 color = texelFetch(img, ivec2(gl_FragCoord.x, ofstY + height - (gl_FragCoord.y - ofstY)), 0); #else vec4 color = texelFetch(img, ivec2(gl_FragCoord.xy), 0); #endif #ifdef PREMULTIPLYALPHA color.rgb *= color.a; #endif outColor = color; } `, Tpe = ` const vec2 pos[4] = vec2[4](vec2(-1.0f, 1.0f), vec2(1.0f, 1.0f), vec2(-1.0f, -1.0f), vec2(1.0f, -1.0f)); void main() { gl_Position = vec4(pos[gl_VertexIndex], 0.0, 1.0); } `, Spe = ` layout(set = 0, binding = 0) uniform Uniforms { uniform vec4 color; }; layout(location = 0) out vec4 outColor; void main() { outColor = color; } `, Mpe = ` struct VertexOutput { @builtin(position) Position : vec4, @location(0) fragUV : vec2 } @vertex fn main( @builtin(vertex_index) VertexIndex : u32 ) -> VertexOutput { var pos = array, 4>( vec2(-1.0, 1.0), vec2( 1.0, 1.0), vec2(-1.0, -1.0), vec2( 1.0, -1.0) ); var tex = array, 4>( vec2(0.0, 0.0), vec2(1.0, 0.0), vec2(0.0, 1.0), vec2(1.0, 1.0) ); var output: VertexOutput; output.Position = vec4(pos[VertexIndex], 0.0, 1.0); output.fragUV = tex[VertexIndex]; return output; } `, Rpe = ` @group(0) @binding(0) var videoSampler: sampler; @group(0) @binding(1) var videoTexture: texture_external; @fragment fn main( @location(0) fragUV: vec2 ) -> @location(0) vec4 { return textureSampleBaseClampToEdge(videoTexture, videoSampler, fragUV); } `, Ppe = ` @group(0) @binding(0) var videoSampler: sampler; @group(0) @binding(1) var videoTexture: texture_external; @fragment fn main( @location(0) fragUV: vec2 ) -> @location(0) vec4 { return textureSampleBaseClampToEdge(videoTexture, videoSampler, vec2(fragUV.x, 1.0 - fragUV.y)); } `; var g4; (function(c) { c[c.MipMap = 0] = "MipMap", c[c.InvertYPremultiplyAlpha = 1] = "InvertYPremultiplyAlpha", c[c.Clear = 2] = "Clear", c[c.InvertYPremultiplyAlphaWithOfst = 3] = "InvertYPremultiplyAlphaWithOfst"; })(g4 || (g4 = {})); var tO; (function(c) { c[c.DontInvertY = 0] = "DontInvertY", c[c.InvertY = 1] = "InvertY"; })(tO || (tO = {})); const kZ = [ { vertex: ype, fragment: Cpe }, { vertex: Gie, fragment: xpe }, { vertex: Tpe, fragment: Spe }, { vertex: bpe, fragment: Epe } ], kC = { "": 0, r8unorm: 1, r8uint: 2, r8sint: 3, r16uint: 4, r16sint: 5, r16float: 6, rg8unorm: 7, rg8uint: 8, rg8sint: 9, r32uint: 10, r32sint: 11, r32float: 12, rg16uint: 13, rg16sint: 14, rg16float: 15, rgba8unorm: 16, "rgba8unorm-srgb": 17, rgba8uint: 18, rgba8sint: 19, bgra8unorm: 20, "bgra8unorm-srgb": 21, rgb10a2uint: 22, rgb10a2unorm: 23, /* rg11b10ufloat: this entry is dynamically added if the "RG11B10UFloatRenderable" extension is supported */ rg32uint: 24, rg32sint: 25, rg32float: 26, rgba16uint: 27, rgba16sint: 28, rgba16float: 29, rgba32uint: 30, rgba32sint: 31, rgba32float: 32, stencil8: 33, depth16unorm: 34, depth24plus: 35, "depth24plus-stencil8": 36, depth32float: 37, "depth32float-stencil8": 38 }; class zn { static ComputeNumMipmapLevels(e, t) { return yt.ILog2(Math.max(e, t)) + 1; } //------------------------------------------------------------------------------ // Initialization / Helpers //------------------------------------------------------------------------------ constructor(e, t, i, r, s) { if (this._pipelines = {}, this._compiledShaders = [], this._videoPipelines = {}, this._videoCompiledShaders = [], this._deferredReleaseTextures = [], this._device = e, this._glslang = t, this._tintWASM = i, this._bufferManager = r, s.indexOf(UC.RG11B10UFloatRenderable) !== -1) { const n = Object.keys(kC); kC[Re.RG11B10UFloat] = kC[n[n.length - 1]] + 1; } this._mipmapSampler = e.createSampler({ minFilter: Xs.Linear }), this._videoSampler = e.createSampler({ minFilter: Xs.Linear }), this._ubCopyWithOfst = this._bufferManager.createBuffer(4 * 4, ya.Uniform | ya.CopyDst, "UBCopyWithOffset").underlyingResource, this._getPipeline(Re.RGBA8Unorm), this._getVideoPipeline(Re.RGBA8Unorm); } _getPipeline(e, t = g4.MipMap, i) { const r = t === g4.MipMap ? 1 : t === g4.InvertYPremultiplyAlpha ? ((i.invertY ? 1 : 0) << 1) + ((i.premultiplyAlpha ? 1 : 0) << 2) : t === g4.Clear ? 8 : t === g4.InvertYPremultiplyAlphaWithOfst ? ((i.invertY ? 1 : 0) << 4) + ((i.premultiplyAlpha ? 1 : 0) << 5) : 0; this._pipelines[e] || (this._pipelines[e] = []); let s = this._pipelines[e][r]; if (!s) { let n = `#version 450 `; (t === g4.InvertYPremultiplyAlpha || t === g4.InvertYPremultiplyAlphaWithOfst) && (i.invertY && (n += `#define INVERTY `), i.premultiplyAlpha && (n += `#define PREMULTIPLYALPHA `)); let a = this._compiledShaders[r]; if (!a) { let o = this._glslang.compileGLSL(n + kZ[t].vertex, "vertex"), u = this._glslang.compileGLSL(n + kZ[t].fragment, "fragment"); this._tintWASM && (o = this._tintWASM.convertSpirV2WGSL(o), u = this._tintWASM.convertSpirV2WGSL(u)); const h = this._device.createShaderModule({ code: o }), d = this._device.createShaderModule({ code: u }); a = this._compiledShaders[r] = [h, d]; } const l = this._device.createRenderPipeline({ layout: zO.Auto, vertex: { module: a[0], entryPoint: "main" }, fragment: { module: a[1], entryPoint: "main", targets: [ { format: e } ] }, primitive: { topology: B_.TriangleStrip, stripIndexFormat: yT.Uint16 } }); s = this._pipelines[e][r] = [l, l.getBindGroupLayout(0)]; } return s; } _getVideoPipeline(e, t = tO.DontInvertY) { const i = t === tO.InvertY ? 1 : 0; this._videoPipelines[e] || (this._videoPipelines[e] = []); let r = this._videoPipelines[e][i]; if (!r) { let s = this._videoCompiledShaders[i]; if (!s) { const a = this._device.createShaderModule({ code: Mpe }), l = this._device.createShaderModule({ code: i === 0 ? Rpe : Ppe }); s = this._videoCompiledShaders[i] = [a, l]; } const n = this._device.createRenderPipeline({ label: `CopyVideoToTexture_${e}_${i === 0 ? "DontInvertY" : "InvertY"}`, layout: zO.Auto, vertex: { module: s[0], entryPoint: "main" }, fragment: { module: s[1], entryPoint: "main", targets: [ { format: e } ] }, primitive: { topology: B_.TriangleStrip, stripIndexFormat: yT.Uint16 } }); r = this._videoPipelines[e][i] = [n, n.getBindGroupLayout(0)]; } return r; } static GetTextureTypeFromFormat(e) { switch (e) { case Re.R8Unorm: case Re.R8Snorm: case Re.R8Uint: case Re.R8Sint: case Re.RG8Unorm: case Re.RG8Snorm: case Re.RG8Uint: case Re.RG8Sint: case Re.RGBA8Unorm: case Re.RGBA8UnormSRGB: case Re.RGBA8Snorm: case Re.RGBA8Uint: case Re.RGBA8Sint: case Re.BGRA8Unorm: case Re.BGRA8UnormSRGB: case Re.RGB10A2UINT: case Re.RGB10A2Unorm: case Re.RGB9E5UFloat: case Re.RG11B10UFloat: case Re.BC7RGBAUnorm: case Re.BC7RGBAUnormSRGB: case Re.BC6HRGBUFloat: case Re.BC6HRGBFloat: case Re.BC5RGUnorm: case Re.BC5RGSnorm: case Re.BC3RGBAUnorm: case Re.BC3RGBAUnormSRGB: case Re.BC2RGBAUnorm: case Re.BC2RGBAUnormSRGB: case Re.BC4RUnorm: case Re.BC4RSnorm: case Re.BC1RGBAUnorm: case Re.BC1RGBAUnormSRGB: case Re.ETC2RGB8Unorm: case Re.ETC2RGB8UnormSRGB: case Re.ETC2RGB8A1Unorm: case Re.ETC2RGB8A1UnormSRGB: case Re.ETC2RGBA8Unorm: case Re.ETC2RGBA8UnormSRGB: case Re.EACR11Unorm: case Re.EACR11Snorm: case Re.EACRG11Unorm: case Re.EACRG11Snorm: case Re.ASTC4x4Unorm: case Re.ASTC4x4UnormSRGB: case Re.ASTC5x4Unorm: case Re.ASTC5x4UnormSRGB: case Re.ASTC5x5Unorm: case Re.ASTC5x5UnormSRGB: case Re.ASTC6x5Unorm: case Re.ASTC6x5UnormSRGB: case Re.ASTC6x6Unorm: case Re.ASTC6x6UnormSRGB: case Re.ASTC8x5Unorm: case Re.ASTC8x5UnormSRGB: case Re.ASTC8x6Unorm: case Re.ASTC8x6UnormSRGB: case Re.ASTC8x8Unorm: case Re.ASTC8x8UnormSRGB: case Re.ASTC10x5Unorm: case Re.ASTC10x5UnormSRGB: case Re.ASTC10x6Unorm: case Re.ASTC10x6UnormSRGB: case Re.ASTC10x8Unorm: case Re.ASTC10x8UnormSRGB: case Re.ASTC10x10Unorm: case Re.ASTC10x10UnormSRGB: case Re.ASTC12x10Unorm: case Re.ASTC12x10UnormSRGB: case Re.ASTC12x12Unorm: case Re.ASTC12x12UnormSRGB: case Re.Stencil8: return 0; case Re.R16Uint: case Re.R16Sint: case Re.RG16Uint: case Re.RG16Sint: case Re.RGBA16Uint: case Re.RGBA16Sint: case Re.Depth16Unorm: return 5; case Re.R16Float: case Re.RG16Float: case Re.RGBA16Float: return 2; case Re.R32Uint: case Re.R32Sint: case Re.RG32Uint: case Re.RG32Sint: case Re.RGBA32Uint: case Re.RGBA32Sint: return 7; case Re.R32Float: case Re.RG32Float: case Re.RGBA32Float: case Re.Depth32Float: case Re.Depth32FloatStencil8: case Re.Depth24Plus: case Re.Depth24PlusStencil8: return 1; } return 0; } static _GetBlockInformationFromFormat(e) { switch (e) { case Re.R8Unorm: case Re.R8Snorm: case Re.R8Uint: case Re.R8Sint: return { width: 1, height: 1, length: 1 }; case Re.R16Uint: case Re.R16Sint: case Re.R16Float: case Re.RG8Unorm: case Re.RG8Snorm: case Re.RG8Uint: case Re.RG8Sint: return { width: 1, height: 1, length: 2 }; case Re.R32Uint: case Re.R32Sint: case Re.R32Float: case Re.RG16Uint: case Re.RG16Sint: case Re.RG16Float: case Re.RGBA8Unorm: case Re.RGBA8UnormSRGB: case Re.RGBA8Snorm: case Re.RGBA8Uint: case Re.RGBA8Sint: case Re.BGRA8Unorm: case Re.BGRA8UnormSRGB: case Re.RGB9E5UFloat: case Re.RGB10A2UINT: case Re.RGB10A2Unorm: case Re.RG11B10UFloat: return { width: 1, height: 1, length: 4 }; case Re.RG32Uint: case Re.RG32Sint: case Re.RG32Float: case Re.RGBA16Uint: case Re.RGBA16Sint: case Re.RGBA16Float: return { width: 1, height: 1, length: 8 }; case Re.RGBA32Uint: case Re.RGBA32Sint: case Re.RGBA32Float: return { width: 1, height: 1, length: 16 }; case Re.Stencil8: throw "No fixed size for Stencil8 format!"; case Re.Depth16Unorm: return { width: 1, height: 1, length: 2 }; case Re.Depth24Plus: throw "No fixed size for Depth24Plus format!"; case Re.Depth24PlusStencil8: throw "No fixed size for Depth24PlusStencil8 format!"; case Re.Depth32Float: return { width: 1, height: 1, length: 4 }; case Re.Depth32FloatStencil8: return { width: 1, height: 1, length: 5 }; case Re.BC7RGBAUnorm: case Re.BC7RGBAUnormSRGB: case Re.BC6HRGBUFloat: case Re.BC6HRGBFloat: case Re.BC5RGUnorm: case Re.BC5RGSnorm: case Re.BC3RGBAUnorm: case Re.BC3RGBAUnormSRGB: case Re.BC2RGBAUnorm: case Re.BC2RGBAUnormSRGB: return { width: 4, height: 4, length: 16 }; case Re.BC4RUnorm: case Re.BC4RSnorm: case Re.BC1RGBAUnorm: case Re.BC1RGBAUnormSRGB: return { width: 4, height: 4, length: 8 }; case Re.ETC2RGB8Unorm: case Re.ETC2RGB8UnormSRGB: case Re.ETC2RGB8A1Unorm: case Re.ETC2RGB8A1UnormSRGB: case Re.EACR11Unorm: case Re.EACR11Snorm: return { width: 4, height: 4, length: 8 }; case Re.ETC2RGBA8Unorm: case Re.ETC2RGBA8UnormSRGB: case Re.EACRG11Unorm: case Re.EACRG11Snorm: return { width: 4, height: 4, length: 16 }; case Re.ASTC4x4Unorm: case Re.ASTC4x4UnormSRGB: return { width: 4, height: 4, length: 16 }; case Re.ASTC5x4Unorm: case Re.ASTC5x4UnormSRGB: return { width: 5, height: 4, length: 16 }; case Re.ASTC5x5Unorm: case Re.ASTC5x5UnormSRGB: return { width: 5, height: 5, length: 16 }; case Re.ASTC6x5Unorm: case Re.ASTC6x5UnormSRGB: return { width: 6, height: 5, length: 16 }; case Re.ASTC6x6Unorm: case Re.ASTC6x6UnormSRGB: return { width: 6, height: 6, length: 16 }; case Re.ASTC8x5Unorm: case Re.ASTC8x5UnormSRGB: return { width: 8, height: 5, length: 16 }; case Re.ASTC8x6Unorm: case Re.ASTC8x6UnormSRGB: return { width: 8, height: 6, length: 16 }; case Re.ASTC8x8Unorm: case Re.ASTC8x8UnormSRGB: return { width: 8, height: 8, length: 16 }; case Re.ASTC10x5Unorm: case Re.ASTC10x5UnormSRGB: return { width: 10, height: 5, length: 16 }; case Re.ASTC10x6Unorm: case Re.ASTC10x6UnormSRGB: return { width: 10, height: 6, length: 16 }; case Re.ASTC10x8Unorm: case Re.ASTC10x8UnormSRGB: return { width: 10, height: 8, length: 16 }; case Re.ASTC10x10Unorm: case Re.ASTC10x10UnormSRGB: return { width: 10, height: 10, length: 16 }; case Re.ASTC12x10Unorm: case Re.ASTC12x10UnormSRGB: return { width: 12, height: 10, length: 16 }; case Re.ASTC12x12Unorm: case Re.ASTC12x12UnormSRGB: return { width: 12, height: 12, length: 16 }; } return { width: 1, height: 1, length: 4 }; } static _IsHardwareTexture(e) { return !!e.release; } static _IsInternalTexture(e) { return !!e.dispose; } static IsImageBitmap(e) { return e.close !== void 0; } static IsImageBitmapArray(e) { return Array.isArray(e) && e[0].close !== void 0; } setCommandEncoder(e) { this._commandEncoderForCreation = e; } static IsCompressedFormat(e) { switch (e) { case Re.BC7RGBAUnormSRGB: case Re.BC7RGBAUnorm: case Re.BC6HRGBFloat: case Re.BC6HRGBUFloat: case Re.BC5RGSnorm: case Re.BC5RGUnorm: case Re.BC4RSnorm: case Re.BC4RUnorm: case Re.BC3RGBAUnormSRGB: case Re.BC3RGBAUnorm: case Re.BC2RGBAUnormSRGB: case Re.BC2RGBAUnorm: case Re.BC1RGBAUnormSRGB: case Re.BC1RGBAUnorm: case Re.ETC2RGB8Unorm: case Re.ETC2RGB8UnormSRGB: case Re.ETC2RGB8A1Unorm: case Re.ETC2RGB8A1UnormSRGB: case Re.ETC2RGBA8Unorm: case Re.ETC2RGBA8UnormSRGB: case Re.EACR11Unorm: case Re.EACR11Snorm: case Re.EACRG11Unorm: case Re.EACRG11Snorm: case Re.ASTC4x4Unorm: case Re.ASTC4x4UnormSRGB: case Re.ASTC5x4Unorm: case Re.ASTC5x4UnormSRGB: case Re.ASTC5x5Unorm: case Re.ASTC5x5UnormSRGB: case Re.ASTC6x5Unorm: case Re.ASTC6x5UnormSRGB: case Re.ASTC6x6Unorm: case Re.ASTC6x6UnormSRGB: case Re.ASTC8x5Unorm: case Re.ASTC8x5UnormSRGB: case Re.ASTC8x6Unorm: case Re.ASTC8x6UnormSRGB: case Re.ASTC8x8Unorm: case Re.ASTC8x8UnormSRGB: case Re.ASTC10x5Unorm: case Re.ASTC10x5UnormSRGB: case Re.ASTC10x6Unorm: case Re.ASTC10x6UnormSRGB: case Re.ASTC10x8Unorm: case Re.ASTC10x8UnormSRGB: case Re.ASTC10x10Unorm: case Re.ASTC10x10UnormSRGB: case Re.ASTC12x10Unorm: case Re.ASTC12x10UnormSRGB: case Re.ASTC12x12Unorm: case Re.ASTC12x12UnormSRGB: return !0; } return !1; } static GetWebGPUTextureFormat(e, t, i = !1) { switch (t) { case 15: return Re.Depth16Unorm; case 16: return Re.Depth24Plus; case 13: return Re.Depth24PlusStencil8; case 14: return Re.Depth32Float; case 18: return Re.Depth32FloatStencil8; case 19: return Re.Stencil8; case 36492: return i ? Re.BC7RGBAUnormSRGB : Re.BC7RGBAUnorm; case 36495: return Re.BC6HRGBUFloat; case 36494: return Re.BC6HRGBFloat; case 33779: return i ? Re.BC3RGBAUnormSRGB : Re.BC3RGBAUnorm; case 33778: return i ? Re.BC2RGBAUnormSRGB : Re.BC2RGBAUnorm; case 33777: case 33776: return i ? Re.BC1RGBAUnormSRGB : Re.BC1RGBAUnorm; case 37808: return i ? Re.ASTC4x4UnormSRGB : Re.ASTC4x4Unorm; case 36196: case 37492: return i ? Re.ETC2RGB8UnormSRGB : Re.ETC2RGB8Unorm; case 37496: return i ? Re.ETC2RGBA8UnormSRGB : Re.ETC2RGBA8Unorm; } switch (e) { case 3: switch (t) { case 6: return Re.R8Snorm; case 7: return Re.RG8Snorm; case 4: throw "RGB format not supported in WebGPU"; case 8: return Re.R8Sint; case 9: return Re.RG8Sint; case 10: throw "RGB_INTEGER format not supported in WebGPU"; case 11: return Re.RGBA8Sint; default: return Re.RGBA8Snorm; } case 0: switch (t) { case 6: return Re.R8Unorm; case 7: return Re.RG8Unorm; case 4: throw "TEXTUREFORMAT_RGB format not supported in WebGPU"; case 5: return i ? Re.RGBA8UnormSRGB : Re.RGBA8Unorm; case 12: return i ? Re.BGRA8UnormSRGB : Re.BGRA8Unorm; case 8: return Re.R8Uint; case 9: return Re.RG8Uint; case 10: throw "RGB_INTEGER format not supported in WebGPU"; case 11: return Re.RGBA8Uint; case 0: throw "TEXTUREFORMAT_ALPHA format not supported in WebGPU"; case 1: throw "TEXTUREFORMAT_LUMINANCE format not supported in WebGPU"; case 2: throw "TEXTUREFORMAT_LUMINANCE_ALPHA format not supported in WebGPU"; default: return Re.RGBA8Unorm; } case 4: switch (t) { case 8: return Re.R16Sint; case 9: return Re.RG16Sint; case 10: throw "TEXTUREFORMAT_RGB_INTEGER format not supported in WebGPU"; case 11: return Re.RGBA16Sint; default: return Re.RGBA16Sint; } case 5: switch (t) { case 8: return Re.R16Uint; case 9: return Re.RG16Uint; case 10: throw "TEXTUREFORMAT_RGB_INTEGER format not supported in WebGPU"; case 11: return Re.RGBA16Uint; default: return Re.RGBA16Uint; } case 6: switch (t) { case 8: return Re.R32Sint; case 9: return Re.RG32Sint; case 10: throw "TEXTUREFORMAT_RGB_INTEGER format not supported in WebGPU"; case 11: return Re.RGBA32Sint; default: return Re.RGBA32Sint; } case 7: switch (t) { case 8: return Re.R32Uint; case 9: return Re.RG32Uint; case 10: throw "TEXTUREFORMAT_RGB_INTEGER format not supported in WebGPU"; case 11: return Re.RGBA32Uint; default: return Re.RGBA32Uint; } case 1: switch (t) { case 6: return Re.R32Float; case 7: return Re.RG32Float; case 4: throw "TEXTUREFORMAT_RGB format not supported in WebGPU"; case 5: return Re.RGBA32Float; default: return Re.RGBA32Float; } case 2: switch (t) { case 6: return Re.R16Float; case 7: return Re.RG16Float; case 4: throw "TEXTUREFORMAT_RGB format not supported in WebGPU"; case 5: return Re.RGBA16Float; default: return Re.RGBA16Float; } case 10: throw "TEXTURETYPE_UNSIGNED_SHORT_5_6_5 format not supported in WebGPU"; case 13: switch (t) { case 5: return Re.RG11B10UFloat; case 11: throw "TEXTUREFORMAT_RGBA_INTEGER format not supported in WebGPU when type is TEXTURETYPE_UNSIGNED_INT_10F_11F_11F_REV"; default: return Re.RG11B10UFloat; } case 14: switch (t) { case 5: return Re.RGB9E5UFloat; case 11: throw "TEXTUREFORMAT_RGBA_INTEGER format not supported in WebGPU when type is TEXTURETYPE_UNSIGNED_INT_5_9_9_9_REV"; default: return Re.RGB9E5UFloat; } case 8: throw "TEXTURETYPE_UNSIGNED_SHORT_4_4_4_4 format not supported in WebGPU"; case 9: throw "TEXTURETYPE_UNSIGNED_SHORT_5_5_5_1 format not supported in WebGPU"; case 11: switch (t) { case 5: return Re.RGB10A2Unorm; case 11: return Re.RGB10A2UINT; default: return Re.RGB10A2Unorm; } } return i ? Re.RGBA8UnormSRGB : Re.RGBA8Unorm; } static GetNumChannelsFromWebGPUTextureFormat(e) { switch (e) { case Re.R8Unorm: case Re.R8Snorm: case Re.R8Uint: case Re.R8Sint: case Re.BC4RUnorm: case Re.BC4RSnorm: case Re.R16Uint: case Re.R16Sint: case Re.Depth16Unorm: case Re.R16Float: case Re.R32Uint: case Re.R32Sint: case Re.R32Float: case Re.Depth32Float: case Re.Stencil8: case Re.Depth24Plus: case Re.EACR11Unorm: case Re.EACR11Snorm: return 1; case Re.RG8Unorm: case Re.RG8Snorm: case Re.RG8Uint: case Re.RG8Sint: case Re.Depth32FloatStencil8: case Re.BC5RGUnorm: case Re.BC5RGSnorm: case Re.RG16Uint: case Re.RG16Sint: case Re.RG16Float: case Re.RG32Uint: case Re.RG32Sint: case Re.RG32Float: case Re.Depth24PlusStencil8: case Re.EACRG11Unorm: case Re.EACRG11Snorm: return 2; case Re.RGB9E5UFloat: case Re.RG11B10UFloat: case Re.BC6HRGBUFloat: case Re.BC6HRGBFloat: case Re.ETC2RGB8Unorm: case Re.ETC2RGB8UnormSRGB: return 3; case Re.RGBA8Unorm: case Re.RGBA8UnormSRGB: case Re.RGBA8Snorm: case Re.RGBA8Uint: case Re.RGBA8Sint: case Re.BGRA8Unorm: case Re.BGRA8UnormSRGB: case Re.RGB10A2UINT: case Re.RGB10A2Unorm: case Re.BC7RGBAUnorm: case Re.BC7RGBAUnormSRGB: case Re.BC3RGBAUnorm: case Re.BC3RGBAUnormSRGB: case Re.BC2RGBAUnorm: case Re.BC2RGBAUnormSRGB: case Re.BC1RGBAUnorm: case Re.BC1RGBAUnormSRGB: case Re.RGBA16Uint: case Re.RGBA16Sint: case Re.RGBA16Float: case Re.RGBA32Uint: case Re.RGBA32Sint: case Re.RGBA32Float: case Re.ETC2RGB8A1Unorm: case Re.ETC2RGB8A1UnormSRGB: case Re.ETC2RGBA8Unorm: case Re.ETC2RGBA8UnormSRGB: case Re.ASTC4x4Unorm: case Re.ASTC4x4UnormSRGB: case Re.ASTC5x4Unorm: case Re.ASTC5x4UnormSRGB: case Re.ASTC5x5Unorm: case Re.ASTC5x5UnormSRGB: case Re.ASTC6x5Unorm: case Re.ASTC6x5UnormSRGB: case Re.ASTC6x6Unorm: case Re.ASTC6x6UnormSRGB: case Re.ASTC8x5Unorm: case Re.ASTC8x5UnormSRGB: case Re.ASTC8x6Unorm: case Re.ASTC8x6UnormSRGB: case Re.ASTC8x8Unorm: case Re.ASTC8x8UnormSRGB: case Re.ASTC10x5Unorm: case Re.ASTC10x5UnormSRGB: case Re.ASTC10x6Unorm: case Re.ASTC10x6UnormSRGB: case Re.ASTC10x8Unorm: case Re.ASTC10x8UnormSRGB: case Re.ASTC10x10Unorm: case Re.ASTC10x10UnormSRGB: case Re.ASTC12x10Unorm: case Re.ASTC12x10UnormSRGB: case Re.ASTC12x12Unorm: case Re.ASTC12x12UnormSRGB: return 4; } throw `Unknown format ${e}!`; } static HasStencilAspect(e) { switch (e) { case Re.Stencil8: case Re.Depth32FloatStencil8: case Re.Depth24PlusStencil8: return !0; } return !1; } static HasDepthAndStencilAspects(e) { switch (e) { case Re.Depth32FloatStencil8: case Re.Depth24PlusStencil8: return !0; } return !1; } static GetDepthFormatOnly(e) { switch (e) { case Re.Depth16Unorm: return Re.Depth16Unorm; case Re.Depth24Plus: return Re.Depth24Plus; case Re.Depth24PlusStencil8: return Re.Depth24Plus; case Re.Depth32Float: return Re.Depth32Float; case Re.Depth32FloatStencil8: return Re.Depth32Float; } return e; } static GetSample(e) { return e > 1 ? 4 : 1; } copyVideoToTexture(e, t, i, r = !1, s) { var n, a, l, o; const u = s === void 0, [h, d] = this._getVideoPipeline(i, r ? tO.InvertY : tO.DontInvertY); u && (s = this._device.createCommandEncoder({})), (a = (n = s).pushDebugGroup) === null || a === void 0 || a.call(n, `copy video to texture - invertY=${r}`); const p = { colorAttachments: [ { view: t._hardwareTexture.underlyingResource.createView({ format: i, dimension: Ea.E2d, mipLevelCount: 1, baseArrayLayer: 0, baseMipLevel: 0, arrayLayerCount: 1, aspect: jA.All }), loadOp: au.Load, storeOp: _m.Store } ] }, m = s.beginRenderPass(p), _ = { layout: d, entries: [ { binding: 0, resource: this._videoSampler }, { binding: 1, resource: this._device.importExternalTexture({ source: e.underlyingResource }) } ] }, v = this._device.createBindGroup(_); m.setPipeline(h), m.setBindGroup(0, v), m.draw(4, 1, 0, 0), m.end(), (o = (l = s).popDebugGroup) === null || o === void 0 || o.call(l), u && (this._device.queue.submit([s.finish()]), s = null); } invertYPreMultiplyAlpha(e, t, i, r, s = !1, n = !1, a = 0, l = 0, o = 1, u = 0, h = 0, d = 0, f = 0, p, m) { var _, v, C, x, b, S; const M = d !== 0, R = p === void 0, [w, V] = this._getPipeline(r, M ? g4.InvertYPremultiplyAlphaWithOfst : g4.InvertYPremultiplyAlpha, { invertY: s, premultiplyAlpha: n }); a = Math.max(a, 0), R && (p = this._device.createCommandEncoder({})), (v = (_ = p).pushDebugGroup) === null || v === void 0 || v.call(_, `internal process texture - invertY=${s} premultiplyAlpha=${n}`); let k; if (zn._IsHardwareTexture(e) ? (k = e.underlyingResource, s && !n && o === 1 && a === 0 || (e = void 0)) : (k = e, e = void 0), !k) return; M && this._bufferManager.setRawData(this._ubCopyWithOfst, 0, new Float32Array([u, h, d, f]), 0, 4 * 4); const L = e, B = (C = L == null ? void 0 : L._copyInvertYTempTexture) !== null && C !== void 0 ? C : this.createTexture({ width: t, height: i, layers: 1 }, !1, !1, !1, !1, !1, r, 1, p, fo.CopySrc | fo.RenderAttachment | fo.TextureBinding, void 0, "TempTextureForCopyWithInvertY"), U = (x = L == null ? void 0 : L._copyInvertYRenderPassDescr) !== null && x !== void 0 ? x : { colorAttachments: [ { view: B.createView({ format: r, dimension: Ea.E2d, baseMipLevel: 0, mipLevelCount: 1, arrayLayerCount: 1, baseArrayLayer: 0 }), loadOp: au.Load, storeOp: _m.Store } ] }, K = p.beginRenderPass(U); let ee = M ? L == null ? void 0 : L._copyInvertYBindGroupWithOfst : L == null ? void 0 : L._copyInvertYBindGroup; if (!ee) { const Z = { layout: V, entries: [ { binding: 0, resource: k.createView({ format: r, dimension: Ea.E2d, baseMipLevel: l, mipLevelCount: 1, arrayLayerCount: o, baseArrayLayer: a }) } ] }; M && Z.entries.push({ binding: 1, resource: { buffer: this._ubCopyWithOfst } }), ee = this._device.createBindGroup(Z); } K.setPipeline(w), K.setBindGroup(0, ee), K.draw(4, 1, 0, 0), K.end(), p.copyTextureToTexture({ texture: B }, { texture: k, mipLevel: l, origin: { x: 0, y: 0, z: a } }, { width: t, height: i, depthOrArrayLayers: 1 }), L ? (L._copyInvertYTempTexture = B, L._copyInvertYRenderPassDescr = U, M ? L._copyInvertYBindGroupWithOfst = ee : L._copyInvertYBindGroup = ee) : this._deferredReleaseTextures.push([B, null]), (S = (b = p).popDebugGroup) === null || S === void 0 || S.call(b), R && (this._device.queue.submit([p.finish()]), p = null); } copyWithInvertY(e, t, i, r) { var s, n, a, l; const o = r === void 0, [u, h] = this._getPipeline(t, g4.InvertYPremultiplyAlpha, { invertY: !0, premultiplyAlpha: !1 }); o && (r = this._device.createCommandEncoder({})), (n = (s = r).pushDebugGroup) === null || n === void 0 || n.call(s, "internal copy texture with invertY"); const d = r.beginRenderPass(i), f = this._device.createBindGroup({ layout: h, entries: [ { binding: 0, resource: e } ] }); d.setPipeline(u), d.setBindGroup(0, f), d.draw(4, 1, 0, 0), d.end(), (l = (a = r).popDebugGroup) === null || l === void 0 || l.call(a), o && (this._device.queue.submit([r.finish()]), r = null); } //------------------------------------------------------------------------------ // Creation //------------------------------------------------------------------------------ createTexture(e, t = !1, i = !1, r = !1, s = !1, n = !1, a = Re.RGBA8Unorm, l = 1, o, u = -1, h = 0, d) { l = zn.GetSample(l); const f = e.layers || 1, p = { width: e.width, height: e.height, depthOrArrayLayers: f }, m = kC[a] ? fo.RenderAttachment : 0, _ = zn.IsCompressedFormat(a), v = t ? zn.ComputeNumMipmapLevels(e.width, e.height) : 1, C = u >= 0 ? u : fo.CopySrc | fo.CopyDst | fo.TextureBinding; h |= t && !_ ? fo.CopySrc | m : 0, !_ && !n && (h |= m | fo.CopyDst); const x = this._device.createTexture({ label: `Texture${n ? "3D" : "2D"}_${d ? d + "_" : ""}${p.width}x${p.height}x${p.depthOrArrayLayers}_${t ? "wmips" : "womips"}_${a}_samples${l}`, size: p, dimension: n ? _g.E3d : _g.E2d, format: a, usage: C | h, sampleCount: l, mipLevelCount: v }); return zn.IsImageBitmap(e) && (this.updateTexture(e, x, e.width, e.height, f, a, 0, 0, r, s, 0, 0), t && i && this.generateMipmaps(x, a, v, 0, o)), x; } createCubeTexture(e, t = !1, i = !1, r = !1, s = !1, n = Re.RGBA8Unorm, a = 1, l, o = -1, u = 0, h) { a = zn.GetSample(a); const d = zn.IsImageBitmapArray(e) ? e[0].width : e.width, f = zn.IsImageBitmapArray(e) ? e[0].height : e.height, p = kC[n] ? fo.RenderAttachment : 0, m = zn.IsCompressedFormat(n), _ = t ? zn.ComputeNumMipmapLevels(d, f) : 1, v = o >= 0 ? o : fo.CopySrc | fo.CopyDst | fo.TextureBinding; u |= t && !m ? fo.CopySrc | p : 0, m || (u |= p | fo.CopyDst); const C = this._device.createTexture({ label: `TextureCube_${h ? h + "_" : ""}${d}x${f}x6_${t ? "wmips" : "womips"}_${n}_samples${a}`, size: { width: d, height: f, depthOrArrayLayers: 6 }, dimension: _g.E2d, format: n, usage: v | u, sampleCount: a, mipLevelCount: _ }); return zn.IsImageBitmapArray(e) && (this.updateCubeTextures(e, C, d, f, n, r, s, 0, 0), t && i && this.generateCubeMipmaps(C, n, _, l)), C; } generateCubeMipmaps(e, t, i, r) { var s, n, a, l; const o = r === void 0; o && (r = this._device.createCommandEncoder({})), (n = (s = r).pushDebugGroup) === null || n === void 0 || n.call(s, `create cube mipmaps - ${i} levels`); for (let u = 0; u < 6; ++u) this.generateMipmaps(e, t, i, u, r); (l = (a = r).popDebugGroup) === null || l === void 0 || l.call(a), o && (this._device.queue.submit([r.finish()]), r = null); } generateMipmaps(e, t, i, r = 0, s) { var n, a, l, o, u, h, d, f; const p = s === void 0, [m, _] = this._getPipeline(t); r = Math.max(r, 0), p && (s = this._device.createCommandEncoder({})), (a = (n = s).pushDebugGroup) === null || a === void 0 || a.call(n, `create mipmaps for face #${r} - ${i} levels`); let v; if (zn._IsHardwareTexture(e) ? (v = e.underlyingResource, e._mipmapGenRenderPassDescr = e._mipmapGenRenderPassDescr || [], e._mipmapGenBindGroup = e._mipmapGenBindGroup || []) : (v = e, e = void 0), !v) return; const C = e; for (let x = 1; x < i; ++x) { const b = (o = (l = C == null ? void 0 : C._mipmapGenRenderPassDescr[r]) === null || l === void 0 ? void 0 : l[x - 1]) !== null && o !== void 0 ? o : { colorAttachments: [ { view: v.createView({ format: t, dimension: Ea.E2d, baseMipLevel: x, mipLevelCount: 1, arrayLayerCount: 1, baseArrayLayer: r }), loadOp: au.Load, storeOp: _m.Store } ] }; C && (C._mipmapGenRenderPassDescr[r] = C._mipmapGenRenderPassDescr[r] || [], C._mipmapGenRenderPassDescr[r][x - 1] = b); const S = s.beginRenderPass(b), M = (h = (u = C == null ? void 0 : C._mipmapGenBindGroup[r]) === null || u === void 0 ? void 0 : u[x - 1]) !== null && h !== void 0 ? h : this._device.createBindGroup({ layout: _, entries: [ { binding: 0, resource: this._mipmapSampler }, { binding: 1, resource: v.createView({ format: t, dimension: Ea.E2d, baseMipLevel: x - 1, mipLevelCount: 1, arrayLayerCount: 1, baseArrayLayer: r }) } ] }); C && (C._mipmapGenBindGroup[r] = C._mipmapGenBindGroup[r] || [], C._mipmapGenBindGroup[r][x - 1] = M), S.setPipeline(m), S.setBindGroup(0, M), S.draw(4, 1, 0, 0), S.end(); } (f = (d = s).popDebugGroup) === null || f === void 0 || f.call(d), p && (this._device.queue.submit([s.finish()]), s = null); } createGPUTextureForInternalTexture(e, t, i, r, s) { e._hardwareTexture || (e._hardwareTexture = new VF()), t === void 0 && (t = e.width), i === void 0 && (i = e.height), r === void 0 && (r = e.depth); const n = e._hardwareTexture, a = ((s ?? 0) & 1) !== 0; n.format = zn.GetWebGPUTextureFormat(e.type, e.format, e._useSRGBBuffer), n.textureUsages = e._source === ts.RenderTarget || e.source === ts.MultiRenderTarget ? fo.TextureBinding | fo.CopySrc | fo.RenderAttachment : e._source === ts.DepthStencil ? fo.TextureBinding | fo.RenderAttachment : -1, n.textureAdditionalUsages = a ? fo.StorageBinding : 0; const l = e.generateMipMaps, o = r || 1; let u; if (e._maxLodLevel !== null ? u = e._maxLodLevel : u = l ? zn.ComputeNumMipmapLevels(t, i) : 1, e.isCube) { const h = this.createCubeTexture({ width: t, height: i }, e.generateMipMaps, e.generateMipMaps, e.invertY, !1, n.format, 1, this._commandEncoderForCreation, n.textureUsages, n.textureAdditionalUsages, e.label); n.set(h), n.createView({ format: zn.GetDepthFormatOnly(n.format), dimension: Ea.Cube, mipLevelCount: u, baseArrayLayer: 0, baseMipLevel: 0, arrayLayerCount: 6, aspect: zn.HasDepthAndStencilAspects(n.format) ? jA.DepthOnly : jA.All }, a); } else { const h = this.createTexture({ width: t, height: i, layers: o }, e.generateMipMaps, e.generateMipMaps, e.invertY, !1, e.is3D, n.format, 1, this._commandEncoderForCreation, n.textureUsages, n.textureAdditionalUsages, e.label); n.set(h), n.createView({ format: zn.GetDepthFormatOnly(n.format), dimension: e.is2DArray ? Ea.E2dArray : e.is3D ? _g.E3d : Ea.E2d, mipLevelCount: u, baseArrayLayer: 0, baseMipLevel: 0, arrayLayerCount: e.is3D ? 1 : o, aspect: zn.HasDepthAndStencilAspects(n.format) ? jA.DepthOnly : jA.All }, a); } return e.width = e.baseWidth = t, e.height = e.baseHeight = i, e.depth = e.baseDepth = r, this.createMSAATexture(e, e.samples), n; } createMSAATexture(e, t, i = !0, r = -1) { const s = e._hardwareTexture; if (i && (s == null || s.releaseMSAATexture()), !s || (t ?? 1) <= 1) return; const n = e.width, a = e.height, l = this.createTexture({ width: n, height: a, layers: 1 }, !1, !1, !1, !1, !1, s.format, t, this._commandEncoderForCreation, fo.RenderAttachment, 0, e.label ? "MSAA" + e.label : void 0); s.setMSAATexture(l, r); } //------------------------------------------------------------------------------ // Update //------------------------------------------------------------------------------ updateCubeTextures(e, t, i, r, s, n = !1, a = !1, l = 0, o = 0) { const u = [0, 3, 1, 4, 2, 5]; for (let h = 0; h < u.length; ++h) { const d = e[u[h]]; this.updateTexture(d, t, i, r, 1, s, h, 0, n, a, l, o); } } // TODO WEBGPU handle data source not being in the same format than the destination texture? updateTexture(e, t, i, r, s, n, a = 0, l = 0, o = !1, u = !1, h = 0, d = 0, f) { const p = zn._IsInternalTexture(t) ? t._hardwareTexture.underlyingResource : t, m = zn._GetBlockInformationFromFormat(n), _ = zn._IsInternalTexture(t) ? t._hardwareTexture : t, v = { texture: p, origin: { x: h, y: d, z: Math.max(a, 0) }, mipLevel: l, premultipliedAlpha: u }, C = { width: Math.ceil(i / m.width) * m.width, height: Math.ceil(r / m.height) * m.height, depthOrArrayLayers: s || 1 }; if (e.byteLength !== void 0) { e = e; const x = Math.ceil(i / m.width) * m.length; if (Math.ceil(x / 256) * 256 === x) { const S = this._device.createCommandEncoder({}), M = this._bufferManager.createRawBuffer(e.byteLength, ya.MapWrite | ya.CopySrc, !0, "TempBufferForUpdateTexture" + (p ? "_" + p.label : "")), R = M.getMappedRange(); new Uint8Array(R).set(e), M.unmap(), S.copyBufferToTexture({ buffer: M, offset: 0, bytesPerRow: x, rowsPerImage: r }, v, C), this._device.queue.submit([S.finish()]), this._bufferManager.releaseBuffer(M); } else this._device.queue.writeTexture(v, e, { offset: 0, bytesPerRow: x, rowsPerImage: r }, C); if (o || u) if (zn._IsInternalTexture(t)) { const S = h === 0 && d === 0 && i === t.width && r === t.height; this.invertYPreMultiplyAlpha(_, t.width, t.height, n, o, u, a, l, s || 1, h, d, S ? 0 : i, S ? 0 : r, void 0, f); } else throw "updateTexture: Can't process the texture data because a GPUTexture was provided instead of an InternalTexture!"; } else if (e = e, o) if (v.premultipliedAlpha = !1, zn._IsInternalTexture(t) && h === 0 && d === 0 && i === t.width && r === t.height) this._device.queue.copyExternalImageToTexture({ source: e }, v, C), this.invertYPreMultiplyAlpha(_, i, r, n, o, u, a, l, s || 1, 0, 0, 0, 0, void 0, f); else { const x = this._device.createCommandEncoder({}), b = this.createTexture({ width: i, height: r, layers: 1 }, !1, !1, !1, !1, !1, n, 1, x, fo.CopySrc | fo.TextureBinding, void 0, "TempTextureForUpdateTexture"); this._deferredReleaseTextures.push([b, null]), C.depthOrArrayLayers = 1, this._device.queue.copyExternalImageToTexture({ source: e }, { texture: b }, C), C.depthOrArrayLayers = s || 1, this.invertYPreMultiplyAlpha(b, i, r, n, o, u, a, l, s || 1, 0, 0, 0, 0, x, f), x.copyTextureToTexture({ texture: b }, v, C), this._device.queue.submit([x.finish()]); } else this._device.queue.copyExternalImageToTexture({ source: e }, v, C); } readPixels(e, t, i, r, s, n, a = 0, l = 0, o = null, u = !1) { const h = zn._GetBlockInformationFromFormat(n), d = Math.ceil(r / h.width) * h.length, f = Math.ceil(d / 256) * 256, p = f * s, m = this._bufferManager.createRawBuffer(p, ya.MapRead | ya.CopyDst, void 0, "TempBufferForReadPixels" + (e.label ? "_" + e.label : "")), _ = this._device.createCommandEncoder({}); return _.copyTextureToBuffer({ texture: e, mipLevel: l, origin: { x: t, y: i, z: Math.max(a, 0) } }, { buffer: m, offset: 0, bytesPerRow: f }, { width: r, height: s, depthOrArrayLayers: 1 }), this._device.queue.submit([_.finish()]), this._bufferManager.readDataFromBuffer(m, p, r, s, d, f, zn.GetTextureTypeFromFormat(n), 0, o, !0, u); } //------------------------------------------------------------------------------ // Dispose //------------------------------------------------------------------------------ releaseTexture(e) { if (zn._IsInternalTexture(e)) { const t = e._hardwareTexture, i = e._irradianceTexture; this._deferredReleaseTextures.push([t, i]); } else this._deferredReleaseTextures.push([e, null]); } destroyDeferredTextures() { for (let e = 0; e < this._deferredReleaseTextures.length; ++e) { const [t, i] = this._deferredReleaseTextures[e]; t && (zn._IsHardwareTexture(t) ? t.release() : t.destroy()), i == null || i.dispose(); } this._deferredReleaseTextures.length = 0; } } class Kie extends JA { constructor(e, t = 0) { super(), this.capacity = t, this._buffer = e; } get underlyingResource() { return this._buffer; } } class fB { static _IsGPUBuffer(e) { return e.underlyingResource === void 0; } static _FlagsToString(e, t = "") { let i = t; for (let r = 0; r <= 9; ++r) e & 1 << r && (i && (i += "_"), i += ya[1 << r]); return i; } constructor(e, t) { this._deferredReleaseBuffers = [], this._engine = e, this._device = t; } createRawBuffer(e, t, i = !1, r) { const s = e.byteLength !== void 0 ? e.byteLength + 3 & -4 : e + 3 & -4, n = { label: fB._FlagsToString(t, r ?? "Buffer") + "_size" + s, mappedAtCreation: i, size: s, usage: t }; return this._device.createBuffer(n); } createBuffer(e, t, i) { const r = e.byteLength !== void 0, s = this.createRawBuffer(e, t, void 0, i), n = new Kie(s); return n.references = 1, n.capacity = r ? e.byteLength : e, r && this.setSubData(n, 0, e), n; } setRawData(e, t, i, r, s) { this._device.queue.writeBuffer(e, t, i.buffer, r, s); } setSubData(e, t, i, r = 0, s = 0) { const n = e.underlyingResource; s = s || i.byteLength, s = Math.min(s, e.capacity - t); let a = i.byteOffset + r, l = a + s; const o = s + 3 & -4; if (o !== s) { const d = new Uint8Array(i.buffer.slice(a, l)); i = new Uint8Array(o), i.set(d), r = 0, a = 0, l = o, s = o; } const u = 1024 * 1024 * 15; let h = 0; for (; l - (a + h) > u; ) this._device.queue.writeBuffer(n, t + h, i.buffer, a + h, u), h += u; this._device.queue.writeBuffer(n, t + h, i.buffer, a + h, s - h); } _getHalfFloatAsFloatRGBAArrayBuffer(e, t, i) { i || (i = new Float32Array(e)); const r = new Uint16Array(t); for (; e--; ) i[e] = kA(r[e]); return i; } readDataFromBuffer(e, t, i, r, s, n, a = 0, l = 0, o = null, u = !0, h = !1) { const d = a === 1 ? 2 : a === 2 ? 1 : 0; return new Promise((f, p) => { e.mapAsync(c5.Read, l, t).then(() => { const m = e.getMappedRange(l, t); let _ = o; if (h) _ === null ? _ = nB(a, t, !0, m) : _ = nB(a, _.buffer, void 0, m); else if (_ === null) switch (d) { case 0: _ = new Uint8Array(t), _.set(new Uint8Array(m)); break; case 1: _ = this._getHalfFloatAsFloatRGBAArrayBuffer(t / 2, m); break; case 2: _ = new Float32Array(t / 4), _.set(new Float32Array(m)); break; } else switch (d) { case 0: _ = new Uint8Array(_.buffer), _.set(new Uint8Array(m)); break; case 1: _ = this._getHalfFloatAsFloatRGBAArrayBuffer(t / 2, m, o); break; case 2: _ = new Float32Array(_.buffer), _.set(new Float32Array(m)); break; } if (s !== n) { d === 1 && !h && (s *= 2, n *= 2); const v = new Uint8Array(_.buffer); let C = s, x = 0; for (let b = 1; b < r; ++b) { x = b * n; for (let S = 0; S < s; ++S) v[C++] = v[x++]; } d !== 0 && !h ? _ = new Float32Array(v.buffer, 0, C / 4) : _ = new Uint8Array(v.buffer, 0, C); } e.unmap(), u && this.releaseBuffer(e), f(_); }, (m) => { this._engine.isDisposed ? f(new Uint8Array()) : p(m); }); }); } releaseBuffer(e) { return fB._IsGPUBuffer(e) ? (this._deferredReleaseBuffers.push(e), !0) : (e.references--, e.references === 0 ? (this._deferredReleaseBuffers.push(e.underlyingResource), !0) : !1); } destroyDeferredBuffers() { for (let e = 0; e < this._deferredReleaseBuffers.length; ++e) this._deferredReleaseBuffers[e].destroy(); this._deferredReleaseBuffers.length = 0; } } const Ipe = [ 0, 0, 3, 7, 0, 2, 6, 2, 4, 1, 5, 3, 1 // TEXTURE_LINEAR_NEAREST ], Dpe = [ 0, 64, 32, 96, 16, 80, 48, 112, 8 // ALWAYS ], Ope = [ 0, 128, 128, 0, 0, 0, 0, 128, 0, 0, 0, 0, 128 // TEXTURE_LINEAR_NEAREST ]; class e5 { constructor(e) { this._samplers = {}, this._device = e, this.disabled = !1; } static GetSamplerHashCode(e) { var t, i, r; const s = e._cachedAnisotropicFilteringLevel && e._cachedAnisotropicFilteringLevel > 1 ? 4 : 1; return Ipe[e.samplingMode] + Dpe[(e._comparisonFunction || 514) - 512 + 1] + Ope[e.samplingMode] + // handle the lodMinClamp = lodMaxClamp = 0 case when no filter used for mip mapping (((t = e._cachedWrapU) !== null && t !== void 0 ? t : 1) << 8) + (((i = e._cachedWrapV) !== null && i !== void 0 ? i : 1) << 10) + (((r = e._cachedWrapR) !== null && r !== void 0 ? r : 1) << 12) + ((e.useMipMaps ? 1 : 0) << 14) + // need to factor this in because _getSamplerFilterDescriptor depends on samplingMode AND useMipMaps! (s << 15); } static _GetSamplerFilterDescriptor(e, t) { let i, r, s, n, a; const l = e.useMipMaps; switch (e.samplingMode) { case 11: i = Xs.Linear, r = Xs.Linear, s = Xs.Nearest, l || (n = a = 0); break; case 3: case 3: i = Xs.Linear, r = Xs.Linear, l ? s = Xs.Linear : (s = Xs.Nearest, n = a = 0); break; case 8: i = Xs.Nearest, r = Xs.Nearest, l ? s = Xs.Linear : (s = Xs.Nearest, n = a = 0); break; case 4: i = Xs.Nearest, r = Xs.Nearest, s = Xs.Nearest, l || (n = a = 0); break; case 5: i = Xs.Nearest, r = Xs.Linear, s = Xs.Nearest, l || (n = a = 0); break; case 6: i = Xs.Nearest, r = Xs.Linear, l ? s = Xs.Linear : (s = Xs.Nearest, n = a = 0); break; case 7: i = Xs.Nearest, r = Xs.Linear, s = Xs.Nearest, n = a = 0; break; case 1: case 1: i = Xs.Nearest, r = Xs.Nearest, s = Xs.Nearest, n = a = 0; break; case 9: i = Xs.Linear, r = Xs.Nearest, s = Xs.Nearest, l || (n = a = 0); break; case 10: i = Xs.Linear, r = Xs.Nearest, l ? s = Xs.Linear : (s = Xs.Nearest, n = a = 0); break; case 2: case 2: i = Xs.Linear, r = Xs.Linear, s = Xs.Nearest, n = a = 0; break; case 12: i = Xs.Linear, r = Xs.Nearest, s = Xs.Nearest, n = a = 0; break; default: i = Xs.Nearest, r = Xs.Nearest, s = Xs.Nearest, n = a = 0; break; } return t > 1 && (n !== 0 || a !== 0) && s !== Xs.Nearest ? { magFilter: Xs.Linear, minFilter: Xs.Linear, mipmapFilter: Xs.Linear, anisotropyEnabled: !0 } : { magFilter: i, minFilter: r, mipmapFilter: s, lodMinClamp: n, lodMaxClamp: a }; } static _GetWrappingMode(e) { switch (e) { case 1: return XR.Repeat; case 0: return XR.ClampToEdge; case 2: return XR.MirrorRepeat; } return XR.Repeat; } static _GetSamplerWrappingDescriptor(e) { return { addressModeU: this._GetWrappingMode(e._cachedWrapU), addressModeV: this._GetWrappingMode(e._cachedWrapV), addressModeW: this._GetWrappingMode(e._cachedWrapR) }; } static _GetSamplerDescriptor(e, t) { const i = e.useMipMaps && e._cachedAnisotropicFilteringLevel && e._cachedAnisotropicFilteringLevel > 1 ? 4 : 1, r = this._GetSamplerFilterDescriptor(e, i); return Object.assign(Object.assign(Object.assign({ label: t }, r), this._GetSamplerWrappingDescriptor(e)), { compare: e._comparisonFunction ? e5.GetCompareFunction(e._comparisonFunction) : void 0, maxAnisotropy: r.anisotropyEnabled ? i : 1 }); } static GetCompareFunction(e) { switch (e) { case 519: return yh.Always; case 514: return yh.Equal; case 516: return yh.Greater; case 518: return yh.GreaterEqual; case 513: return yh.Less; case 515: return yh.LessEqual; case 512: return yh.Never; case 517: return yh.NotEqual; default: return yh.Less; } } getSampler(e, t = !1, i = 0, r) { if (this.disabled) return this._device.createSampler(e5._GetSamplerDescriptor(e, r)); t ? i = 0 : i === 0 && (i = e5.GetSamplerHashCode(e)); let s = t ? void 0 : this._samplers[i]; return s || (s = this._device.createSampler(e5._GetSamplerDescriptor(e, r)), t || (this._samplers[i] = s)), s; } } var mc; (function(c) { c[c.StencilReadMask = 0] = "StencilReadMask", c[c.StencilWriteMask = 1] = "StencilWriteMask", c[c.DepthBias = 2] = "DepthBias", c[c.DepthBiasSlopeScale = 3] = "DepthBiasSlopeScale", c[c.DepthStencilState = 4] = "DepthStencilState", c[c.MRTAttachments1 = 5] = "MRTAttachments1", c[c.MRTAttachments2 = 6] = "MRTAttachments2", c[c.RasterizationState = 7] = "RasterizationState", c[c.ColorStates = 8] = "ColorStates", c[c.ShaderStage = 9] = "ShaderStage", c[c.TextureStage = 10] = "TextureStage", c[c.VertexState = 11] = "VertexState", c[c.NumStates = 12] = "NumStates"; })(mc || (mc = {})); const PF = { 0: 1, 1: 2, 768: 3, 769: 4, 770: 5, 771: 6, 772: 7, 773: 8, 774: 9, 775: 10, 776: 11, 32769: 12, 32770: 13, 32771: 12, 32772: 13 // OneMinusBlendColor (alpha) }, GD = { 0: 0, 7680: 1, 7681: 2, 7682: 3, 7683: 4, 5386: 5, 34055: 6, 34056: 7 // DECR_WRAP }, wpe = { [Y.PositionKind]: !0, [Y.NormalKind]: !0, [Y.TangentKind]: !0, [Y.UVKind]: !0, [Y.UV2Kind]: !0, [Y.UV3Kind]: !0, [Y.UV4Kind]: !0, [Y.UV5Kind]: !0, [Y.UV6Kind]: !0, [Y.ColorKind]: !0, [Y.ColorInstanceKind]: !0, [Y.MatricesIndicesKind]: !0, [Y.MatricesWeightsKind]: !0, [Y.MatricesIndicesExtraKind]: !0, [Y.MatricesWeightsExtraKind]: !0 }; class po { static _IsSignedType(e) { switch (e) { case Y.BYTE: case Y.SHORT: case Y.INT: case Y.FLOAT: return !0; case Y.UNSIGNED_BYTE: case Y.UNSIGNED_SHORT: case Y.UNSIGNED_INT: return !1; default: throw new Error(`Invalid type '${e}'`); } } constructor(e, t) { this.mrtTextureCount = 0, this._device = e, this._useTextureStage = !0, this._states = new Array(30), this._statesLength = 0, this._stateDirtyLowestIndex = 0, this._emptyVertexBuffer = t, this._mrtFormats = [], this._parameter = { token: void 0, pipeline: null }, this.disabled = !1, this.vertexBuffers = [], this._kMaxVertexBufferStride = e.limits.maxVertexBufferArrayStride || 2048, this.reset(); } reset() { this._isDirty = !0, this.vertexBuffers.length = 0, this.setAlphaToCoverage(!1), this.resetDepthCullingState(), this.setClampDepth(!1), this.setDepthBias(0), this._webgpuColorFormat = [Re.BGRA8Unorm], this.setColorFormat(Re.BGRA8Unorm), this.setMRT([]), this.setAlphaBlendEnabled(!1), this.setAlphaBlendFactors([null, null, null, null], [null, null]), this.setWriteMask(15), this.setDepthStencilFormat(Re.Depth24PlusStencil8), this.setStencilEnabled(!1), this.resetStencilState(), this.setBuffers(null, null, null), this._setTextureState(0); } get colorFormats() { return this._mrtAttachments1 > 0 ? this._mrtFormats : this._webgpuColorFormat; } getRenderPipeline(e, t, i, r = 0) { if (i = zn.GetSample(i), this.disabled) { const n = po._GetTopology(e); return this._setVertexState(t), this._setTextureState(r), this._parameter.pipeline = this._createRenderPipeline(t, n, i), po.NumCacheMiss++, po._NumPipelineCreationCurrentFrame++, this._parameter.pipeline; } if (this._setShaderStage(t.uniqueId), this._setRasterizationState(e, i), this._setColorStates(), this._setDepthStencilState(), this._setVertexState(t), this._setTextureState(r), this.lastStateDirtyLowestIndex = this._stateDirtyLowestIndex, !this._isDirty && this._parameter.pipeline) return this._stateDirtyLowestIndex = this._statesLength, po.NumCacheHitWithoutHash++, this._parameter.pipeline; if (this._getRenderPipeline(this._parameter), this._isDirty = !1, this._stateDirtyLowestIndex = this._statesLength, this._parameter.pipeline) return po.NumCacheHitWithHash++, this._parameter.pipeline; const s = po._GetTopology(e); return this._parameter.pipeline = this._createRenderPipeline(t, s, i), this._setRenderPipeline(this._parameter), po.NumCacheMiss++, po._NumPipelineCreationCurrentFrame++, this._parameter.pipeline; } endFrame() { po.NumPipelineCreationLastFrame = po._NumPipelineCreationCurrentFrame, po._NumPipelineCreationCurrentFrame = 0; } setAlphaToCoverage(e) { this._alphaToCoverageEnabled = e; } setFrontFace(e) { this._frontFace = e; } setCullEnabled(e) { this._cullEnabled = e; } setCullFace(e) { this._cullFace = e; } setClampDepth(e) { this._clampDepth = e; } resetDepthCullingState() { this.setDepthCullingState(!1, 2, 1, 0, 0, !0, !0, 519); } setDepthCullingState(e, t, i, r, s, n, a, l) { this._depthWriteEnabled = a, this._depthTestEnabled = n, this._depthCompare = (l ?? 519) - 512, this._cullFace = i, this._cullEnabled = e, this._frontFace = t, this.setDepthBiasSlopeScale(r), this.setDepthBias(s); } setDepthBias(e) { this._depthBias !== e && (this._depthBias = e, this._states[mc.DepthBias] = e, this._isDirty = !0, this._stateDirtyLowestIndex = Math.min(this._stateDirtyLowestIndex, mc.DepthBias)); } /*public setDepthBiasClamp(depthBiasClamp: number): void { if (this._depthBiasClamp !== depthBiasClamp) { this._depthBiasClamp = depthBiasClamp; this._states[StatePosition.DepthBiasClamp] = depthBiasClamp.toString(); this._isDirty = true; } }*/ setDepthBiasSlopeScale(e) { this._depthBiasSlopeScale !== e && (this._depthBiasSlopeScale = e, this._states[mc.DepthBiasSlopeScale] = e, this._isDirty = !0, this._stateDirtyLowestIndex = Math.min(this._stateDirtyLowestIndex, mc.DepthBiasSlopeScale)); } setColorFormat(e) { this._webgpuColorFormat[0] = e, this._colorFormat = kC[e ?? ""]; } setMRTAttachments(e) { this.mrtAttachments = e; let t = 0; for (let i = 0; i < e.length; ++i) e[i] !== 0 && (t += 1 << i); this._mrtEnabledMask !== t && (this._mrtEnabledMask = t, this._isDirty = !0, this._stateDirtyLowestIndex = Math.min(this._stateDirtyLowestIndex, mc.MRTAttachments1)); } setMRT(e, t) { var i, r; if (t = t ?? e.length, t > 10) throw "Can't handle more than 10 attachments for a MRT in cache render pipeline!"; this.mrtTextureArray = e, this.mrtTextureCount = t, this._mrtEnabledMask = 65535; const s = [0, 0]; let n = 0, a = 0, l = 0; for (let o = 0; o < t; ++o) { const u = e[o], h = u == null ? void 0 : u._hardwareTexture; this._mrtFormats[l] = (i = h == null ? void 0 : h.format) !== null && i !== void 0 ? i : this._webgpuColorFormat[0], s[n] += kC[(r = this._mrtFormats[l]) !== null && r !== void 0 ? r : ""] << a, a += 6, l++, a >= 32 && (a = 0, n++); } this._mrtFormats.length = l, (this._mrtAttachments1 !== s[0] || this._mrtAttachments2 !== s[1]) && (this._mrtAttachments1 = s[0], this._mrtAttachments2 = s[1], this._states[mc.MRTAttachments1] = s[0], this._states[mc.MRTAttachments2] = s[1], this._isDirty = !0, this._stateDirtyLowestIndex = Math.min(this._stateDirtyLowestIndex, mc.MRTAttachments1)); } setAlphaBlendEnabled(e) { this._alphaBlendEnabled = e; } setAlphaBlendFactors(e, t) { this._alphaBlendFuncParams = e, this._alphaBlendEqParams = t; } setWriteMask(e) { this._writeMask = e; } setDepthStencilFormat(e) { this._webgpuDepthStencilFormat = e, this._depthStencilFormat = e === void 0 ? 0 : kC[e]; } setDepthTestEnabled(e) { this._depthTestEnabled = e; } setDepthWriteEnabled(e) { this._depthWriteEnabled = e; } setDepthCompare(e) { this._depthCompare = (e ?? 519) - 512; } setStencilEnabled(e) { this._stencilEnabled = e; } setStencilCompare(e) { this._stencilFrontCompare = (e ?? 519) - 512; } setStencilDepthFailOp(e) { this._stencilFrontDepthFailOp = e === null ? 1 : GD[e]; } setStencilPassOp(e) { this._stencilFrontPassOp = e === null ? 2 : GD[e]; } setStencilFailOp(e) { this._stencilFrontFailOp = e === null ? 1 : GD[e]; } setStencilReadMask(e) { this._stencilReadMask !== e && (this._stencilReadMask = e, this._states[mc.StencilReadMask] = e, this._isDirty = !0, this._stateDirtyLowestIndex = Math.min(this._stateDirtyLowestIndex, mc.StencilReadMask)); } setStencilWriteMask(e) { this._stencilWriteMask !== e && (this._stencilWriteMask = e, this._states[mc.StencilWriteMask] = e, this._isDirty = !0, this._stateDirtyLowestIndex = Math.min(this._stateDirtyLowestIndex, mc.StencilWriteMask)); } resetStencilState() { this.setStencilState(!1, 519, 7680, 7681, 7680, 255, 255); } setStencilState(e, t, i, r, s, n, a) { this._stencilEnabled = e, this._stencilFrontCompare = (t ?? 519) - 512, this._stencilFrontDepthFailOp = i === null ? 1 : GD[i], this._stencilFrontPassOp = r === null ? 2 : GD[r], this._stencilFrontFailOp = s === null ? 1 : GD[s], this.setStencilReadMask(n), this.setStencilWriteMask(a); } setBuffers(e, t, i) { this._vertexBuffers = e, this._overrideVertexBuffers = i, this._indexBuffer = t; } static _GetTopology(e) { switch (e) { case 0: return B_.TriangleList; case 2: return B_.PointList; case 1: return B_.LineList; case 3: return B_.PointList; case 4: return B_.LineList; case 5: throw "LineLoop is an unsupported fillmode in WebGPU"; case 6: return B_.LineStrip; case 7: return B_.TriangleStrip; case 8: throw "TriangleFan is an unsupported fillmode in WebGPU"; default: return B_.TriangleList; } } static _GetAphaBlendOperation(e) { switch (e) { case 32774: return ZE.Add; case 32778: return ZE.Subtract; case 32779: return ZE.ReverseSubtract; case 32775: return ZE.Min; case 32776: return ZE.Max; default: return ZE.Add; } } static _GetAphaBlendFactor(e) { switch (e) { case 0: return rf.Zero; case 1: return rf.One; case 768: return rf.Src; case 769: return rf.OneMinusSrc; case 770: return rf.SrcAlpha; case 771: return rf.OneMinusSrcAlpha; case 772: return rf.DstAlpha; case 773: return rf.OneMinusDstAlpha; case 774: return rf.Dst; case 775: return rf.OneMinusDst; case 776: return rf.SrcAlphaSaturated; case 32769: return rf.Constant; case 32770: return rf.OneMinusConstant; case 32771: return rf.Constant; case 32772: return rf.OneMinusConstant; default: return rf.One; } } static _GetCompareFunction(e) { switch (e) { case 0: return yh.Never; case 1: return yh.Less; case 2: return yh.Equal; case 3: return yh.LessEqual; case 4: return yh.Greater; case 5: return yh.NotEqual; case 6: return yh.GreaterEqual; case 7: return yh.Always; } return yh.Never; } static _GetStencilOpFunction(e) { switch (e) { case 0: return y4.Zero; case 1: return y4.Keep; case 2: return y4.Replace; case 3: return y4.IncrementClamp; case 4: return y4.DecrementClamp; case 5: return y4.Invert; case 6: return y4.IncrementWrap; case 7: return y4.DecrementWrap; } return y4.Keep; } static _GetVertexInputDescriptorFormat(e) { const t = e.type, i = e.normalized, r = e.getSize(); switch (t) { case Y.BYTE: switch (r) { case 1: case 2: return i ? So.Snorm8x2 : So.Sint8x2; case 3: case 4: return i ? So.Snorm8x4 : So.Sint8x4; } break; case Y.UNSIGNED_BYTE: switch (r) { case 1: case 2: return i ? So.Unorm8x2 : So.Uint8x2; case 3: case 4: return i ? So.Unorm8x4 : So.Uint8x4; } break; case Y.SHORT: switch (r) { case 1: case 2: return i ? So.Snorm16x2 : So.Sint16x2; case 3: case 4: return i ? So.Snorm16x4 : So.Sint16x4; } break; case Y.UNSIGNED_SHORT: switch (r) { case 1: case 2: return i ? So.Unorm16x2 : So.Uint16x2; case 3: case 4: return i ? So.Unorm16x4 : So.Uint16x4; } break; case Y.INT: switch (r) { case 1: return So.Sint32; case 2: return So.Sint32x2; case 3: return So.Sint32x3; case 4: return So.Sint32x4; } break; case Y.UNSIGNED_INT: switch (r) { case 1: return So.Uint32; case 2: return So.Uint32x2; case 3: return So.Uint32x3; case 4: return So.Uint32x4; } break; case Y.FLOAT: switch (r) { case 1: return So.Float32; case 2: return So.Float32x2; case 3: return So.Float32x3; case 4: return So.Float32x4; } break; } throw new Error(`Invalid Format '${e.getKind()}' - type=${t}, normalized=${i}, size=${r}`); } _getAphaBlendState() { return this._alphaBlendEnabled ? { srcFactor: po._GetAphaBlendFactor(this._alphaBlendFuncParams[2]), dstFactor: po._GetAphaBlendFactor(this._alphaBlendFuncParams[3]), operation: po._GetAphaBlendOperation(this._alphaBlendEqParams[1]) } : null; } _getColorBlendState() { return this._alphaBlendEnabled ? { srcFactor: po._GetAphaBlendFactor(this._alphaBlendFuncParams[0]), dstFactor: po._GetAphaBlendFactor(this._alphaBlendFuncParams[1]), operation: po._GetAphaBlendOperation(this._alphaBlendEqParams[0]) } : null; } _setShaderStage(e) { this._shaderId !== e && (this._shaderId = e, this._states[mc.ShaderStage] = e, this._isDirty = !0, this._stateDirtyLowestIndex = Math.min(this._stateDirtyLowestIndex, mc.ShaderStage)); } _setRasterizationState(e, t) { const i = this._frontFace, r = this._cullEnabled ? this._cullFace : 0, s = this._clampDepth ? 1 : 0, n = this._alphaToCoverageEnabled ? 1 : 0, a = i - 1 + (r << 1) + (s << 3) + (n << 4) + (e << 5) + (t << 8); this._rasterizationState !== a && (this._rasterizationState = a, this._states[mc.RasterizationState] = this._rasterizationState, this._isDirty = !0, this._stateDirtyLowestIndex = Math.min(this._stateDirtyLowestIndex, mc.RasterizationState)); } _setColorStates() { let e = ((this._writeMask ? 1 : 0) << 22) + (this._colorFormat << 23) + ((this._depthWriteEnabled ? 1 : 0) << 29); this._alphaBlendEnabled && (e += ((this._alphaBlendFuncParams[0] === null ? 2 : PF[this._alphaBlendFuncParams[0]]) << 0) + ((this._alphaBlendFuncParams[1] === null ? 2 : PF[this._alphaBlendFuncParams[1]]) << 4) + ((this._alphaBlendFuncParams[2] === null ? 2 : PF[this._alphaBlendFuncParams[2]]) << 8) + ((this._alphaBlendFuncParams[3] === null ? 2 : PF[this._alphaBlendFuncParams[3]]) << 12) + ((this._alphaBlendEqParams[0] === null ? 1 : this._alphaBlendEqParams[0] - 32773) << 16) + ((this._alphaBlendEqParams[1] === null ? 1 : this._alphaBlendEqParams[1] - 32773) << 19)), e !== this._colorStates && (this._colorStates = e, this._states[mc.ColorStates] = this._colorStates, this._isDirty = !0, this._stateDirtyLowestIndex = Math.min(this._stateDirtyLowestIndex, mc.ColorStates)); } _setDepthStencilState() { const e = this._stencilEnabled ? this._stencilFrontCompare + (this._stencilFrontDepthFailOp << 3) + (this._stencilFrontPassOp << 6) + (this._stencilFrontFailOp << 9) : 591, t = this._depthStencilFormat + ((this._depthTestEnabled ? this._depthCompare : 7) << 6) + (e << 10); this._depthStencilState !== t && (this._depthStencilState = t, this._states[mc.DepthStencilState] = this._depthStencilState, this._isDirty = !0, this._stateDirtyLowestIndex = Math.min(this._stateDirtyLowestIndex, mc.DepthStencilState)); } _setVertexState(e) { var t, i; const r = this._statesLength; let s = mc.VertexState; const n = e._pipelineContext, a = n.shaderProcessingContext.attributeNamesFromEffect, l = n.shaderProcessingContext.attributeLocationsFromEffect; let o, u = 0; for (let h = 0; h < a.length; h++) { const d = l[h]; let f = (t = this._overrideVertexBuffers && this._overrideVertexBuffers[a[h]]) !== null && t !== void 0 ? t : this._vertexBuffers[a[h]]; f || (f = this._emptyVertexBuffer); const p = (i = f.effectiveBuffer) === null || i === void 0 ? void 0 : i.underlyingResource; if (f._validOffsetRange === void 0) { const _ = f.effectiveByteOffset, v = f.getSize(!0), C = f.effectiveByteStride; f._validOffsetRange = _ + v <= this._kMaxVertexBufferStride && C === 0 || C !== 0 && _ + v <= C; } o && o === p && f._validOffsetRange || (this.vertexBuffers[u++] = f, o = f._validOffsetRange ? p : null); const m = f.hashCode + (d << 7); this._isDirty = this._isDirty || this._states[s] !== m, this._states[s++] = m; } this.vertexBuffers.length = u, this._statesLength = s, this._isDirty = this._isDirty || s !== r, this._isDirty && (this._stateDirtyLowestIndex = Math.min(this._stateDirtyLowestIndex, mc.VertexState)); } _setTextureState(e) { this._textureState !== e && (this._textureState = e, this._states[mc.TextureStage] = this._textureState, this._isDirty = !0, this._stateDirtyLowestIndex = Math.min(this._stateDirtyLowestIndex, mc.TextureStage)); } _createPipelineLayout(e) { if (this._useTextureStage) return this._createPipelineLayoutWithTextureStage(e); const t = [], i = e.shaderProcessingContext.bindGroupLayoutEntries; for (let r = 0; r < i.length; r++) { const s = i[r]; t[r] = this._device.createBindGroupLayout({ entries: s }); } return e.bindGroupLayouts[0] = t, this._device.createPipelineLayout({ bindGroupLayouts: t }); } _createPipelineLayoutWithTextureStage(e) { var t; const i = e.shaderProcessingContext, r = i.bindGroupLayoutEntries; let s = 1; for (let a = 0; a < r.length; a++) { const l = r[a]; for (let o = 0; o < l.length; o++) { const u = r[a][o]; if (u.texture) { const h = i.bindGroupLayoutEntryInfo[a][u.binding].name, d = i.availableTextures[h], f = d.autoBindSampler ? i.availableSamplers[h + Io.AutoSamplerSuffix] : null; let p = d.sampleType, m = (t = f == null ? void 0 : f.type) !== null && t !== void 0 ? t : AT.Filtering; if (this._textureState & s && p !== K_.Depth && (d.autoBindSampler && (m = AT.NonFiltering), p = K_.UnfilterableFloat), u.texture.sampleType = p, f) { const _ = i.bindGroupLayoutEntryInfo[f.binding.groupIndex][f.binding.bindingIndex].index; r[f.binding.groupIndex][_].sampler.type = m; } s = s << 1; } } } const n = []; for (let a = 0; a < r.length; ++a) n[a] = this._device.createBindGroupLayout({ entries: r[a] }); return e.bindGroupLayouts[this._textureState] = n, this._device.createPipelineLayout({ bindGroupLayouts: n }); } _getVertexInputDescriptor(e) { var t, i; const r = [], s = e._pipelineContext, n = s.shaderProcessingContext.attributeNamesFromEffect, a = s.shaderProcessingContext.attributeLocationsFromEffect; let l, o; for (let u = 0; u < n.length; u++) { const h = a[u]; let d = (t = this._overrideVertexBuffers && this._overrideVertexBuffers[n[u]]) !== null && t !== void 0 ? t : this._vertexBuffers[n[u]]; d || (d = this._emptyVertexBuffer); let f = (i = d.effectiveBuffer) === null || i === void 0 ? void 0 : i.underlyingResource, p = d.effectiveByteOffset; const m = !d._validOffsetRange; if (!(l && o && l === f) || m) { const _ = { arrayStride: d.effectiveByteStride, stepMode: d.getIsInstanced() ? aL.Instance : aL.Vertex, attributes: [] }; r.push(_), o = _.attributes, m && (p = 0, f = null); } o.push({ shaderLocation: h, offset: p, format: po._GetVertexInputDescriptorFormat(d) }), l = f; } return r; } _processNonFloatVertexBuffers(e, t) { const i = e.engine._getShaderProcessor(e.shaderProcessingContext.shaderLanguage); let r = !1; for (const s in this._vertexBuffers) { const n = this._vertexBuffers[s]; if (!n || !wpe[s]) continue; const a = n.normalized ? Y.FLOAT : n.type, l = e.vertexBufferKindToType[s]; (a !== Y.FLOAT && l === void 0 || l !== void 0 && l !== a) && (r = !0, e.vertexBufferKindToType[s] = a, a !== Y.FLOAT && (i.vertexBufferKindToNumberOfComponents[s] = Y.DeduceStride(s), po._IsSignedType(a) && (i.vertexBufferKindToNumberOfComponents[s] *= -1))); } r && t._processShaderCode(i, !0); } _createRenderPipeline(e, t, i) { var r, s, n; const a = e._pipelineContext, l = this._getVertexInputDescriptor(e), o = this._createPipelineLayout(a), u = [], h = this._getAphaBlendState(), d = this._getColorBlendState(); if (this._processNonFloatVertexBuffers(a, e), this._mrtAttachments1 > 0) for (let _ = 0; _ < this._mrtFormats.length; ++_) { const v = this._mrtFormats[_]; if (v) { const C = { format: v, writeMask: this._mrtEnabledMask & 1 << _ ? this._writeMask : 0 }; h && d && (C.blend = { alpha: h, color: d }), u.push(C); } else u.push(null); } else if (this._webgpuColorFormat[0]) { const _ = { format: this._webgpuColorFormat[0], writeMask: this._writeMask }; h && d && (_.blend = { alpha: h, color: d }), u.push(_); } else u.push(null); const f = { compare: po._GetCompareFunction( this._stencilEnabled ? this._stencilFrontCompare : 7 /* ALWAYS */ ), depthFailOp: po._GetStencilOpFunction( this._stencilEnabled ? this._stencilFrontDepthFailOp : 1 /* KEEP */ ), failOp: po._GetStencilOpFunction( this._stencilEnabled ? this._stencilFrontFailOp : 1 /* KEEP */ ), passOp: po._GetStencilOpFunction( this._stencilEnabled ? this._stencilFrontPassOp : 1 /* KEEP */ ) }; let p; (t === B_.LineStrip || t === B_.TriangleStrip) && (p = !this._indexBuffer || this._indexBuffer.is32Bits ? yT.Uint32 : yT.Uint16); const m = this._webgpuDepthStencilFormat ? zn.HasStencilAspect(this._webgpuDepthStencilFormat) : !1; return this._device.createRenderPipeline({ label: `RenderPipeline_${(s = (r = u[0]) === null || r === void 0 ? void 0 : r.format) !== null && s !== void 0 ? s : "nooutput"}_${(n = this._webgpuDepthStencilFormat) !== null && n !== void 0 ? n : "nodepth"}_samples${i}_textureState${this._textureState}`, layout: o, vertex: { module: a.stages.vertexStage.module, entryPoint: a.stages.vertexStage.entryPoint, buffers: l }, primitive: { topology: t, stripIndexFormat: p, frontFace: this._frontFace === 1 ? nL.CCW : nL.CW, cullMode: this._cullEnabled ? this._cullFace === 2 ? hO.Front : hO.Back : hO.None }, fragment: a.stages.fragmentStage ? { module: a.stages.fragmentStage.module, entryPoint: a.stages.fragmentStage.entryPoint, targets: u } : void 0, multisample: { count: i /*mask, alphaToCoverageEnabled,*/ }, depthStencil: this._webgpuDepthStencilFormat === void 0 ? void 0 : { depthWriteEnabled: this._depthWriteEnabled, depthCompare: this._depthTestEnabled ? po._GetCompareFunction(this._depthCompare) : yh.Always, format: this._webgpuDepthStencilFormat, stencilFront: this._stencilEnabled && m ? f : void 0, stencilBack: this._stencilEnabled && m ? f : void 0, stencilReadMask: this._stencilEnabled && m ? this._stencilReadMask : void 0, stencilWriteMask: this._stencilEnabled && m ? this._stencilWriteMask : void 0, depthBias: this._depthBias, depthBiasClamp: this._depthBiasClamp, depthBiasSlopeScale: this._depthBiasSlopeScale } }); } } po.NumCacheHitWithoutHash = 0; po.NumCacheHitWithHash = 0; po.NumCacheMiss = 0; po.NumPipelineCreationLastFrame = 0; po._NumPipelineCreationCurrentFrame = 0; class Wie { constructor() { this.values = {}; } count() { let e = 0, t = this.pipeline ? 1 : 0; for (const i in this.values) { const r = this.values[i], [s, n] = r.count(); e += s, t += n, e++; } return [e, t]; } } class zC extends po { static GetNodeCounts() { const e = zC._Cache.count(); return { nodeCount: e[0], pipelineCount: e[1] }; } static _GetPipelines(e, t, i, r) { if (e.pipeline) { const s = i.slice(); s.length = r, t.push(s); } for (const s in e.values) { const n = e.values[s]; i[r] = parseInt(s), zC._GetPipelines(n, t, i, r + 1); } } static GetPipelines() { const e = []; return zC._GetPipelines(zC._Cache, e, [], 0), e; } constructor(e, t) { super(e, t), this._nodeStack = [], this._nodeStack[0] = zC._Cache; } _getRenderPipeline(e) { let t = this._nodeStack[this._stateDirtyLowestIndex]; for (let i = this._stateDirtyLowestIndex; i < this._statesLength; ++i) { let r = t.values[this._states[i]]; r || (r = new Wie(), t.values[this._states[i]] = r), t = r, this._nodeStack[i + 1] = t; } e.token = t, e.pipeline = t.pipeline; } _setRenderPipeline(e) { e.token.pipeline = e.pipeline; } } zC._Cache = new Wie(); class Lpe extends oK { constructor(e) { super(!1), this._cache = e, this.reset(); } get func() { return this._func; } set func(e) { this._func !== e && (this._func = e, this._cache.setStencilCompare(e)); } get funcMask() { return this._funcMask; } set funcMask(e) { this._funcMask !== e && (this._funcMask = e, this._cache.setStencilReadMask(e)); } get opStencilFail() { return this._opStencilFail; } set opStencilFail(e) { this._opStencilFail !== e && (this._opStencilFail = e, this._cache.setStencilFailOp(e)); } get opDepthFail() { return this._opDepthFail; } set opDepthFail(e) { this._opDepthFail !== e && (this._opDepthFail = e, this._cache.setStencilDepthFailOp(e)); } get opStencilDepthPass() { return this._opStencilDepthPass; } set opStencilDepthPass(e) { this._opStencilDepthPass !== e && (this._opStencilDepthPass = e, this._cache.setStencilPassOp(e)); } get mask() { return this._mask; } set mask(e) { this._mask !== e && (this._mask = e, this._cache.setStencilWriteMask(e)); } get enabled() { return this._enabled; } set enabled(e) { this._enabled !== e && (this._enabled = e, this._cache.setStencilEnabled(e)); } reset() { super.reset(), this._cache.resetStencilState(); } apply() { var e; const t = (e = this.stencilMaterial) === null || e === void 0 ? void 0 : e.enabled; this.enabled = t ? this.stencilMaterial.enabled : this.stencilGlobal.enabled, this.enabled && (this.func = t ? this.stencilMaterial.func : this.stencilGlobal.func, this.funcRef = t ? this.stencilMaterial.funcRef : this.stencilGlobal.funcRef, this.funcMask = t ? this.stencilMaterial.funcMask : this.stencilGlobal.funcMask, this.opStencilFail = t ? this.stencilMaterial.opStencilFail : this.stencilGlobal.opStencilFail, this.opDepthFail = t ? this.stencilMaterial.opDepthFail : this.stencilGlobal.opDepthFail, this.opStencilDepthPass = t ? this.stencilMaterial.opStencilDepthPass : this.stencilGlobal.opStencilDepthPass, this.mask = t ? this.stencilMaterial.mask : this.stencilGlobal.mask); } } class Npe extends sK { /** * Initializes the state. * @param cache */ constructor(e) { super(!1), this._cache = e, this.reset(); } get zOffset() { return this._zOffset; } set zOffset(e) { this._zOffset !== e && (this._zOffset = e, this._isZOffsetDirty = !0, this._cache.setDepthBiasSlopeScale(e)); } get zOffsetUnits() { return this._zOffsetUnits; } set zOffsetUnits(e) { this._zOffsetUnits !== e && (this._zOffsetUnits = e, this._isZOffsetDirty = !0, this._cache.setDepthBias(e)); } get cullFace() { return this._cullFace; } set cullFace(e) { this._cullFace !== e && (this._cullFace = e, this._isCullFaceDirty = !0, this._cache.setCullFace(e ?? 1)); } get cull() { return this._cull; } set cull(e) { this._cull !== e && (this._cull = e, this._isCullDirty = !0, this._cache.setCullEnabled(!!e)); } get depthFunc() { return this._depthFunc; } set depthFunc(e) { this._depthFunc !== e && (this._depthFunc = e, this._isDepthFuncDirty = !0, this._cache.setDepthCompare(e)); } get depthMask() { return this._depthMask; } set depthMask(e) { this._depthMask !== e && (this._depthMask = e, this._isDepthMaskDirty = !0, this._cache.setDepthWriteEnabled(e)); } get depthTest() { return this._depthTest; } set depthTest(e) { this._depthTest !== e && (this._depthTest = e, this._isDepthTestDirty = !0, this._cache.setDepthTestEnabled(e)); } get frontFace() { return this._frontFace; } set frontFace(e) { this._frontFace !== e && (this._frontFace = e, this._isFrontFaceDirty = !0, this._cache.setFrontFace(e ?? 2)); } reset() { super.reset(), this._cache.resetDepthCullingState(); } apply() { } } class rW { /** * Checks if a texture is an external or internal texture * @param texture the external or internal texture * @returns true if the texture is an external texture, else false */ static IsExternalTexture(e) { return e.underlyingResource !== void 0; } /** * Get the class name of the texture. * @returns "ExternalTexture" */ getClassName() { return "ExternalTexture"; } /** * Gets the underlying texture object */ get underlyingResource() { return this._video; } /** * Constructs the texture * @param video The video the texture should be wrapped around */ constructor(e) { this.useMipMaps = !1, this.type = 16, this.format = 4294967295, this._video = e, this.uniqueId = ln._Counter++; } /** * Get if the texture is ready to be used (downloaded, converted, mip mapped...). * @returns true if fully ready */ isReady() { return this._video.readyState >= this._video.HAVE_CURRENT_DATA; } /** * Dispose the texture and release its associated resources. */ dispose() { } } class xU { get forceBindGroupCreation() { return this._numExternalTextures > 0; } get hasFloatOrDepthTextures() { return this._numFloatOrDepthTextures > 0; } constructor() { this.uniqueId = xU._Counter++, this.updateId = 0, this.textureState = 0, this.reset(); } reset() { this.samplers = {}, this.textures = {}, this.isDirty = !0, this._numFloatOrDepthTextures = 0, this._numExternalTextures = 0; } setSampler(e, t) { let i = this.samplers[e], r = -1; i ? r = i.hashCode : this.samplers[e] = i = { sampler: t, hashCode: 0 }, i.sampler = t, i.hashCode = t ? e5.GetSamplerHashCode(t) : 0; const s = r !== i.hashCode; s && this.updateId++, this.isDirty || (this.isDirty = s); } setTexture(e, t) { var i, r, s; let n = this.textures[e], a = -1; n ? a = (r = (i = n.texture) === null || i === void 0 ? void 0 : i.uniqueId) !== null && r !== void 0 ? r : -1 : this.textures[e] = n = { texture: t, isFloatOrDepthTexture: !1, isExternalTexture: !1 }, n.isExternalTexture && this._numExternalTextures--, n.isFloatOrDepthTexture && this._numFloatOrDepthTextures--, t ? (n.isFloatOrDepthTexture = t.type === 1 || t.format >= 13 && t.format <= 18, n.isExternalTexture = rW.IsExternalTexture(t), n.isFloatOrDepthTexture && this._numFloatOrDepthTextures++, n.isExternalTexture && this._numExternalTextures++) : (n.isFloatOrDepthTexture = !1, n.isExternalTexture = !1), n.texture = t; const l = a !== ((s = t == null ? void 0 : t.uniqueId) !== null && s !== void 0 ? s : -1); l && this.updateId++, this.isDirty || (this.isDirty = l); } } xU._Counter = 0; class dN { isDirty(e) { return this._isDirty || this._materialContextUpdateId !== e; } resetIsDirty(e) { this._isDirty = !1, this._materialContextUpdateId = e; } get useInstancing() { return this._useInstancing; } set useInstancing(e) { this._useInstancing !== e && (e ? (this.indirectDrawBuffer = this._bufferManager.createRawBuffer(20, ya.CopyDst | ya.Indirect | ya.Storage, void 0, "IndirectDrawBuffer"), this._indirectDrawData = new Uint32Array(5), this._indirectDrawData[3] = 0, this._indirectDrawData[4] = 0) : (this.indirectDrawBuffer && this._bufferManager.releaseBuffer(this.indirectDrawBuffer), this.indirectDrawBuffer = void 0, this._indirectDrawData = void 0), this._useInstancing = e, this._currentInstanceCount = -1); } constructor(e) { this._bufferManager = e, this.uniqueId = dN._Counter++, this._useInstancing = !1, this._currentInstanceCount = 0, this.reset(); } reset() { this.buffers = {}, this._isDirty = !0, this._materialContextUpdateId = 0, this.fastBundle = void 0, this.bindGroups = void 0; } setBuffer(e, t) { var i; this._isDirty || (this._isDirty = (t == null ? void 0 : t.uniqueId) !== ((i = this.buffers[e]) === null || i === void 0 ? void 0 : i.uniqueId)), this.buffers[e] = t; } setIndirectData(e, t, i) { t === this._currentInstanceCount || !this.indirectDrawBuffer || !this._indirectDrawData || (this._currentInstanceCount = t, this._indirectDrawData[0] = e, this._indirectDrawData[1] = t, this._indirectDrawData[2] = i, this._bufferManager.setRawData(this.indirectDrawBuffer, 0, this._indirectDrawData, 0, 20)); } dispose() { this.indirectDrawBuffer && (this._bufferManager.releaseBuffer(this.indirectDrawBuffer), this.indirectDrawBuffer = void 0, this._indirectDrawData = void 0), this.fastBundle = void 0, this.bindGroups = void 0, this.buffers = void 0; } } dN._Counter = 0; class kF { constructor() { this.values = {}; } } class cl { static get Statistics() { return { totalCreated: cl.NumBindGroupsCreatedTotal, lastFrameCreated: cl.NumBindGroupsCreatedLastFrame, lookupLastFrame: cl.NumBindGroupsLookupLastFrame, noLookupLastFrame: cl.NumBindGroupsNoLookupLastFrame }; } constructor(e, t, i) { this.disabled = !1, this._device = e, this._cacheSampler = t, this._engine = i; } endFrame() { cl.NumBindGroupsCreatedLastFrame = cl._NumBindGroupsCreatedCurrentFrame, cl.NumBindGroupsLookupLastFrame = cl._NumBindGroupsLookupCurrentFrame, cl.NumBindGroupsNoLookupLastFrame = cl._NumBindGroupsNoLookupCurrentFrame, cl._NumBindGroupsCreatedCurrentFrame = 0, cl._NumBindGroupsLookupCurrentFrame = 0, cl._NumBindGroupsNoLookupCurrentFrame = 0; } /** * Cache is currently based on the uniform/storage buffers, samplers and textures used by the binding groups. * Note that all uniform buffers have an offset of 0 in Babylon and we don't have a use case where we would have the same buffer used with different capacity values: * that means we don't need to factor in the offset/size of the buffer in the cache, only the id * @param webgpuPipelineContext * @param drawContext * @param materialContext */ getBindGroups(e, t, i) { var r, s, n, a, l, o, u, h, d, f; let p, m = cl._Cache; const _ = this.disabled || i.forceBindGroupCreation; if (!_) { if (!t.isDirty(i.updateId) && !i.isDirty) return cl._NumBindGroupsNoLookupCurrentFrame++, t.bindGroups; for (const C of e.shaderProcessingContext.bufferNames) { const x = (s = (r = t.buffers[C]) === null || r === void 0 ? void 0 : r.uniqueId) !== null && s !== void 0 ? s : 0; let b = m.values[x]; b || (b = new kF(), m.values[x] = b), m = b; } for (const C of e.shaderProcessingContext.samplerNames) { const x = (a = (n = i.samplers[C]) === null || n === void 0 ? void 0 : n.hashCode) !== null && a !== void 0 ? a : 0; let b = m.values[x]; b || (b = new kF(), m.values[x] = b), m = b; } for (const C of e.shaderProcessingContext.textureNames) { const x = (u = (o = (l = i.textures[C]) === null || l === void 0 ? void 0 : l.texture) === null || o === void 0 ? void 0 : o.uniqueId) !== null && u !== void 0 ? u : 0; let b = m.values[x]; b || (b = new kF(), m.values[x] = b), m = b; } p = m.bindGroups; } if (t.resetIsDirty(i.updateId), i.isDirty = !1, p) return t.bindGroups = p, cl._NumBindGroupsLookupCurrentFrame++, p; p = [], t.bindGroups = p, _ || (m.bindGroups = p), cl.NumBindGroupsCreatedTotal++, cl._NumBindGroupsCreatedCurrentFrame++; const v = e.bindGroupLayouts[i.textureState]; for (let C = 0; C < e.shaderProcessingContext.bindGroupLayoutEntries.length; C++) { const x = e.shaderProcessingContext.bindGroupLayoutEntries[C], b = e.shaderProcessingContext.bindGroupEntries[C]; for (let M = 0; M < x.length; M++) { const R = e.shaderProcessingContext.bindGroupLayoutEntries[C][M], w = e.shaderProcessingContext.bindGroupLayoutEntryInfo[C][R.binding], V = (h = w.nameInArrayOfTexture) !== null && h !== void 0 ? h : w.name; if (R.sampler) { const k = i.samplers[V]; if (k) { const L = k.sampler; if (!L) { this._engine.dbgSanityChecks && Ce.Error(`Trying to bind a null sampler! entry=${JSON.stringify(R)}, name=${V}, bindingInfo=${JSON.stringify(k, (B, U) => B === "texture" ? "" : U)}, materialContext.uniqueId=${i.uniqueId}`, 50); continue; } b[M].resource = this._cacheSampler.getSampler(L, !1, k.hashCode, L.label); } else Ce.Error(`Sampler "${V}" could not be bound. entry=${JSON.stringify(R)}, materialContext=${JSON.stringify(i, (L, B) => L === "texture" || L === "sampler" ? "" : B)}`, 50); } else if (R.texture || R.storageTexture) { const k = i.textures[V]; if (k) { if (this._engine.dbgSanityChecks && k.texture === null) { Ce.Error(`Trying to bind a null texture! entry=${JSON.stringify(R)}, bindingInfo=${JSON.stringify(k, (B, U) => B === "texture" ? "" : U)}, materialContext.uniqueId=${i.uniqueId}`, 50); continue; } const L = k.texture._hardwareTexture; if (this._engine.dbgSanityChecks && (!L || R.texture && !L.view || R.storageTexture && !L.viewForWriting)) { Ce.Error(`Trying to bind a null gpu texture or view! entry=${JSON.stringify(R)}, name=${V}, bindingInfo=${JSON.stringify(k, (B, U) => B === "texture" ? "" : U)}, isReady=${(d = k.texture) === null || d === void 0 ? void 0 : d.isReady}, materialContext.uniqueId=${i.uniqueId}`, 50); continue; } b[M].resource = R.storageTexture ? L.viewForWriting : L.view; } else Ce.Error(`Texture "${V}" could not be bound. entry=${JSON.stringify(R)}, materialContext=${JSON.stringify(i, (L, B) => L === "texture" || L === "sampler" ? "" : B)}`, 50); } else if (R.externalTexture) { const k = i.textures[V]; if (k) { if (this._engine.dbgSanityChecks && k.texture === null) { Ce.Error(`Trying to bind a null external texture! entry=${JSON.stringify(R)}, name=${V}, bindingInfo=${JSON.stringify(k, (B, U) => B === "texture" ? "" : U)}, materialContext.uniqueId=${i.uniqueId}`, 50); continue; } const L = k.texture.underlyingResource; if (this._engine.dbgSanityChecks && !L) { Ce.Error(`Trying to bind a null gpu external texture! entry=${JSON.stringify(R)}, name=${V}, bindingInfo=${JSON.stringify(k, (B, U) => B === "texture" ? "" : U)}, isReady=${(f = k.texture) === null || f === void 0 ? void 0 : f.isReady}, materialContext.uniqueId=${i.uniqueId}`, 50); continue; } b[M].resource = this._device.importExternalTexture({ source: L }); } else Ce.Error(`Texture "${V}" could not be bound. entry=${JSON.stringify(R)}, materialContext=${JSON.stringify(i, (L, B) => L === "texture" || L === "sampler" ? "" : B)}`, 50); } else if (R.buffer) { const k = t.buffers[V]; if (k) { const L = k.underlyingResource; b[M].resource.buffer = L, b[M].resource.size = k.capacity; } else Ce.Error(`Can't find buffer "${V}". entry=${JSON.stringify(R)}, buffers=${JSON.stringify(t.buffers)}, drawContext.uniqueId=${t.uniqueId}`, 50); } } const S = v[C]; p[C] = this._device.createBindGroup({ layout: S, entries: b }); } return p; } } cl.NumBindGroupsCreatedTotal = 0; cl.NumBindGroupsCreatedLastFrame = 0; cl.NumBindGroupsLookupLastFrame = 0; cl.NumBindGroupsNoLookupLastFrame = 0; cl._Cache = new kF(); cl._NumBindGroupsCreatedCurrentFrame = 0; cl._NumBindGroupsLookupCurrentFrame = 0; cl._NumBindGroupsNoLookupCurrentFrame = 0; const Fpe = "clearQuadVertexShader", Bpe = `uniform float depthValue;const vec2 pos[4]={vec2(-1.0,1.0), vec2(1.0,1.0), vec2(-1.0,-1.0), vec2(1.0,-1.0)}; #define CUSTOM_VERTEX_DEFINITIONS void main(void) { #define CUSTOM_VERTEX_MAIN_BEGIN gl_Position=vec4(pos[gl_VertexID],depthValue,1.0); #define CUSTOM_VERTEX_MAIN_END } `; je.ShadersStore[Fpe] = Bpe; const Upe = "clearQuadPixelShader", Vpe = `uniform vec4 color;void main() {gl_FragColor=color;} `; je.ShadersStore[Upe] = Vpe; class kpe { setDepthStencilFormat(e) { this._depthTextureFormat = e, this._cacheRenderPipeline.setDepthStencilFormat(e); } setColorFormat(e) { this._cacheRenderPipeline.setColorFormat(e); } setMRTAttachments(e, t, i) { this._cacheRenderPipeline.setMRT(t, i), this._cacheRenderPipeline.setMRTAttachments(e); } constructor(e, t, i) { this._bindGroups = {}, this._bundleCache = {}, this._keyTemp = [], this._device = e, this._engine = t, this._cacheRenderPipeline = new zC(this._device, i), this._cacheRenderPipeline.setDepthTestEnabled(!1), this._cacheRenderPipeline.setStencilReadMask(255), this._effect = t.createEffect("clearQuad", [], ["color", "depthValue"]); } clear(e, t, i, r, s = 1) { var n, a; let l, o = null, u; const h = !!this._engine._currentRenderTarget; if (e) l = e; else { let C = 0; this._keyTemp.length = 0; for (let b = 0; b < this._cacheRenderPipeline.colorFormats.length; ++b) this._keyTemp[C++] = kC[(n = this._cacheRenderPipeline.colorFormats[b]) !== null && n !== void 0 ? n : ""]; const x = kC[(a = this._depthTextureFormat) !== null && a !== void 0 ? a : 0]; if (this._keyTemp[C] = (t ? t.r + t.g * 256 + t.b * 256 * 256 + t.a * 256 * 256 * 256 : 0) + (i ? 2 ** 32 : 0) + (r ? 2 ** 33 : 0) + (this._engine.useReverseDepthBuffer ? 2 ** 34 : 0) + (h ? 2 ** 35 : 0) + (s > 1 ? 2 ** 36 : 0) + x * 2 ** 37, u = this._keyTemp.join("_"), o = this._bundleCache[u], o) return o; l = this._device.createRenderBundleEncoder({ colorFormats: this._cacheRenderPipeline.colorFormats, depthStencilFormat: this._depthTextureFormat, sampleCount: zn.GetSample(s) }); } this._cacheRenderPipeline.setDepthWriteEnabled(!!i), this._cacheRenderPipeline.setStencilEnabled(!!r && !!this._depthTextureFormat && zn.HasStencilAspect(this._depthTextureFormat)), this._cacheRenderPipeline.setStencilWriteMask(r ? 255 : 0), this._cacheRenderPipeline.setStencilCompare(r ? 519 : 512), this._cacheRenderPipeline.setStencilPassOp(r ? 7681 : 7680), this._cacheRenderPipeline.setWriteMask(t ? 15 : 0); const d = this._cacheRenderPipeline.getRenderPipeline(7, this._effect, s), f = this._effect._pipelineContext; t && this._effect.setDirectColor4("color", t), this._effect.setFloat("depthValue", this._engine.useReverseDepthBuffer ? this._engine._clearReverseDepthValue : this._engine._clearDepthValue), f.uniformBuffer.update(); const p = h ? this._engine._ubInvertY : this._engine._ubDontInvertY, m = f.uniformBuffer.getBuffer(), _ = m.uniqueId + "-" + p.uniqueId; let v = this._bindGroups[_]; if (!v) { const C = f.bindGroupLayouts[0]; v = this._bindGroups[_] = [], v.push(this._device.createBindGroup({ layout: C[0], entries: [] })), mg._SimplifiedKnownBindings || v.push(this._device.createBindGroup({ layout: C[1], entries: [] })), v.push(this._device.createBindGroup({ layout: C[mg._SimplifiedKnownBindings ? 1 : 2], entries: [ { binding: 0, resource: { buffer: p.underlyingResource, size: p.capacity } }, { binding: 1, resource: { buffer: m.underlyingResource, size: m.capacity } } ] })); } l.setPipeline(d); for (let C = 0; C < v.length; ++C) l.setBindGroup(C, v[C]); return l.draw(4, 1, 0, 0), e || (o = l.finish(), this._bundleCache[u] = o), o; } } class sW { constructor(e, t, i, r) { this.x = Math.floor(e), this.y = Math.floor(t), this.w = Math.floor(i), this.h = Math.floor(r); } run(e) { e.setViewport(this.x, this.y, this.w, this.h, 0, 1); } clone() { return new sW(this.x, this.y, this.w, this.h); } } class nW { constructor(e, t, i, r) { this.x = e, this.y = t, this.w = i, this.h = r; } run(e) { e.setScissorRect(this.x, this.y, this.w, this.h); } clone() { return new nW(this.x, this.y, this.w, this.h); } } class pB { constructor(e) { this.ref = e; } run(e) { e.setStencilReference(this.ref); } clone() { return new pB(this.ref); } } class aW { constructor(e) { this.color = e; } run(e) { e.setBlendConstant(this.color); } clone() { return new aW(this.color); } } class oW { constructor(e) { this.query = e; } run(e) { e.beginOcclusionQuery(this.query); } clone() { return new oW(this.query); } } class lW { constructor() { } run(e) { e.endOcclusionQuery(); } clone() { return new lW(); } } class cW { constructor() { this.bundles = []; } run(e) { e.executeBundles(this.bundles); } clone() { const e = new cW(); return e.bundles = this.bundles, e; } } class uW { constructor(e) { this.numDrawCalls = 0, this._device = e, this._list = new Array(10), this._listLength = 0; } addBundle(e) { if (!this._currentItemIsBundle) { const t = new cW(); this._list[this._listLength++] = t, this._currentBundleList = t.bundles, this._currentItemIsBundle = !0; } e && this._currentBundleList.push(e); } _finishBundle() { this._currentItemIsBundle && this._bundleEncoder && (this._currentBundleList.push(this._bundleEncoder.finish()), this._bundleEncoder = void 0, this._currentItemIsBundle = !1); } addItem(e) { this._finishBundle(), this._list[this._listLength++] = e, this._currentItemIsBundle = !1; } getBundleEncoder(e, t, i) { return this._currentItemIsBundle || (this.addBundle(), this._bundleEncoder = this._device.createRenderBundleEncoder({ colorFormats: e, depthStencilFormat: t, sampleCount: zn.GetSample(i) })), this._bundleEncoder; } close() { this._finishBundle(); } run(e) { this.close(); for (let t = 0; t < this._listLength; ++t) this._list[t].run(e); } reset() { this._listLength = 0, this._currentItemIsBundle = !1, this.numDrawCalls = 0; } clone() { this.close(); const e = new uW(this._device); e._list = new Array(this._listLength), e._listLength = this._listLength, e.numDrawCalls = this.numDrawCalls; for (let t = 0; t < this._listLength; ++t) e._list[t] = this._list[t].clone(); return e; } } class jie { get querySet() { return this._querySet; } constructor(e, t, i, r, s, n = !0, a) { this._dstBuffers = [], this._engine = e, this._device = r, this._bufferManager = s, this._count = t, this._canUseMultipleBuffers = n, this._querySet = r.createQuerySet({ label: a, type: i, count: t }), this._queryBuffer = s.createRawBuffer(8 * t, ya.QueryResolve | ya.CopySrc, void 0, "QueryBuffer"), n || this._dstBuffers.push(this._bufferManager.createRawBuffer(8 * this._count, ya.MapRead | ya.CopyDst, void 0, "QueryBufferNoMultipleBuffers")); } _getBuffer(e, t) { if (!this._canUseMultipleBuffers && this._dstBuffers.length === 0) return null; const i = this._device.createCommandEncoder(); let r; return this._dstBuffers.length === 0 ? r = this._bufferManager.createRawBuffer(8 * this._count, ya.MapRead | ya.CopyDst, void 0, "QueryBufferAdditionalBuffer") : (r = this._dstBuffers[this._dstBuffers.length - 1], this._dstBuffers.length--), i.resolveQuerySet(this._querySet, e, t, this._queryBuffer, 0), i.copyBufferToBuffer(this._queryBuffer, 0, r, 0, 8 * t), this._device.queue.submit([i.finish()]), r; } async readValues(e = 0, t = 1) { const i = this._getBuffer(e, t); return i === null ? null : i.mapAsync(c5.Read).then(() => { const r = new BigUint64Array(i.getMappedRange()).slice(); return i.unmap(), this._dstBuffers[this._dstBuffers.length] = i, r; }, (r) => { if (this._engine.isDisposed) return null; throw r; }); } async readValue(e = 0) { const t = this._getBuffer(e, 1); return t === null ? null : t.mapAsync(c5.Read).then(() => { const i = new BigUint64Array(t.getMappedRange()), r = Number(i[0]); return t.unmap(), this._dstBuffers[this._dstBuffers.length] = t, r; }, (i) => { if (this._engine.isDisposed) return 0; throw i; }); } async readTwoValuesAndSubtract(e = 0) { const t = this._getBuffer(e, 2); return t === null ? null : t.mapAsync(c5.Read).then(() => { const i = new BigUint64Array(t.getMappedRange()), r = Number(i[1] - i[0]); return t.unmap(), this._dstBuffers[this._dstBuffers.length] = t, r; }, (i) => { if (this._engine.isDisposed) return 0; throw i; }); } dispose() { this._querySet.destroy(), this._bufferManager.releaseBuffer(this._queryBuffer); for (let e = 0; e < this._dstBuffers.length; ++e) this._bufferManager.releaseBuffer(this._dstBuffers[e]); } } class zpe { get gpuFrameTimeCounter() { return this._gpuFrameTimeCounter; } constructor(e, t, i) { this._enabled = !1, this._gpuFrameTimeCounter = new Vc(), this._measureDurationState = 0, this._engine = e, this._device = t, this._bufferManager = i; } get enable() { return this._enabled; } set enable(e) { this._enabled !== e && (this._enabled = e, this._measureDurationState = 0, e ? this._measureDuration = new Hpe(this._engine, this._device, this._bufferManager, 2e3, "QuerySet_TimestampQuery") : this._measureDuration.dispose()); } startFrame(e) { this._enabled && this._measureDurationState === 0 && (this._measureDuration.start(e), this._measureDurationState = 1); } endFrame(e) { this._measureDurationState === 1 && (this._measureDurationState = 2, this._measureDuration.stop(e).then((t) => { t !== null && t >= 0 && (this._gpuFrameTimeCounter.fetchNewFrame(), this._gpuFrameTimeCounter.addCount(t, !0)), this._measureDurationState = 0; })); } startPass(e, t) { this._enabled ? this._measureDuration.startPass(e, t) : e.timestampWrites = void 0; } endPass(e, t) { if (!this._enabled || !t) return; const i = this._engine.frameId; this._measureDuration.stopPass(e).then((r) => { t._addDuration(i, r !== null && r > 0 ? r : 0); }); } dispose() { var e; (e = this._measureDuration) === null || e === void 0 || e.dispose(); } } class Hpe { constructor(e, t, i, r = 2, s) { this._count = r, this._querySet = new jie(e, r, oL.Timestamp, t, i, !0, s); } start(e) { var t; (t = e.writeTimestamp) === null || t === void 0 || t.call(e, this._querySet.querySet, 0); } async stop(e) { var t; return (t = e.writeTimestamp) === null || t === void 0 || t.call(e, this._querySet.querySet, 1), e.writeTimestamp ? this._querySet.readTwoValuesAndSubtract(0) : 0; } startPass(e, t) { if (t + 3 > this._count) throw new Error("WebGPUDurationMeasure: index out of range (" + t + ")"); e.timestampWrites = { querySet: this._querySet.querySet, beginningOfPassWriteIndex: t + 2, endOfPassWriteIndex: t + 3 }; } async stopPass(e) { return this._querySet.readTwoValuesAndSubtract(e + 2); } dispose() { this._querySet.dispose(); } } class Gpe { get querySet() { return this._querySet.querySet; } get hasQueries() { return this._currentTotalIndices !== this._availableIndices.length; } canBeginQuery(e) { if (this._frameQuerySetIsDirty === this._engine.frameId || this._queryFrameId[e] === this._engine.frameId) return !1; const t = this._engine._getCurrentRenderPassWrapper().renderPassDescriptor.occlusionQuerySet !== void 0; return t && (this._queryFrameId[e] = this._engine.frameId), t; } constructor(e, t, i, r = 50, s = 100) { this._availableIndices = [], this._frameQuerySetIsDirty = -1, this._queryFrameId = [], this._engine = e, this._device = t, this._bufferManager = i, this._frameLastBuffer = -1, this._currentTotalIndices = 0, this._countIncrement = s, this._allocateNewIndices(r); } createQuery() { this._availableIndices.length === 0 && this._allocateNewIndices(); const e = this._availableIndices[this._availableIndices.length - 1]; return this._availableIndices.length--, e; } deleteQuery(e) { this._availableIndices[this._availableIndices.length] = e; } isQueryResultAvailable(e) { return this._retrieveQueryBuffer(), !!this._lastBuffer && e < this._lastBuffer.length; } getQueryResult(e) { var t, i; return Number((i = (t = this._lastBuffer) === null || t === void 0 ? void 0 : t[e]) !== null && i !== void 0 ? i : -1); } _retrieveQueryBuffer() { this._lastBuffer && this._frameLastBuffer === this._engine.frameId || this._frameLastBuffer !== this._engine.frameId && (this._frameLastBuffer = this._engine.frameId, this._querySet.readValues(0, this._currentTotalIndices).then((e) => { this._lastBuffer = e; })); } _allocateNewIndices(e) { e = e ?? this._countIncrement, this._delayQuerySetDispose(); for (let t = 0; t < e; ++t) this._availableIndices.push(this._currentTotalIndices + t); this._currentTotalIndices += e, this._querySet = new jie(this._engine, this._currentTotalIndices, oL.Occlusion, this._device, this._bufferManager, !1, "QuerySet_OcclusionQuery_count_" + this._currentTotalIndices), this._frameQuerySetIsDirty = this._engine.frameId; } _delayQuerySetDispose() { const e = this._querySet; e && setTimeout(() => e.dispose, 1e3); } dispose() { var e; (e = this._querySet) === null || e === void 0 || e.dispose(), this._availableIndices.length = 0; } } class U_ { async initTwgsl(e) { if (!U_._twgsl) return e = e || {}, e = Object.assign(Object.assign({}, U_._TWgslDefaultOptions), e), e.twgsl ? (U_._twgsl = e.twgsl, Promise.resolve()) : (e.jsPath && e.wasmPath && await Ve.LoadBabylonScriptAsync(e.jsPath), self.twgsl ? (U_._twgsl = await self.twgsl(Ve.GetBabylonScriptURL(e.wasmPath)), Promise.resolve()) : Promise.reject("twgsl is not available.")); } convertSpirV2WGSL(e, t = !1) { const i = U_._twgsl.convertSpirV2WGSL(e, U_.DisableUniformityAnalysis || t); return U_.ShowWGSLShaderCode && (Ce.Log(i), Ce.Log("***********************************************")), U_.DisableUniformityAnalysis || t ? `diagnostic(off, derivative_uniformity); ` + i : i; } } U_._TWgslDefaultOptions = { jsPath: `${Ve._DefaultCdnUrl}/twgsl/twgsl.js`, wasmPath: `${Ve._DefaultCdnUrl}/twgsl/twgsl.wasm` }; U_.ShowWGSLShaderCode = !1; U_.DisableUniformityAnalysis = !1; U_._twgsl = null; class Kpe { constructor(e, t, i) { this._record = !1, this._play = !1, this._playBundleListIndex = 0, this._allBundleLists = [], this._enabled = !1, this._engine = e, this._mode = t, this._bundleList = i; } get enabled() { return this._enabled; } get play() { return this._play; } get record() { return this._record; } set enabled(e) { this._allBundleLists.length = 0, this._record = this._enabled = e, this._play = !1, e && (this._modeSaved = this._mode, this._mode = 0); } get mode() { return this._mode; } set mode(e) { this._record ? this._modeSaved = e : this._mode = e; } endRenderPass(e) { if (!this._record && !this._play) return !1; let t; if (this._record) t = this._bundleList.clone(), this._allBundleLists.push(t), this._bundleList.reset(); else { if (this._playBundleListIndex >= this._allBundleLists.length) throw new Error(`Invalid playBundleListIndex! Your snapshot is no longer valid for the current frame, you should recreate a new one. playBundleListIndex=${this._playBundleListIndex}, allBundleLists.length=${this._allBundleLists.length}}`); t = this._allBundleLists[this._playBundleListIndex++]; } return t.run(e), this._mode === 1 && this._engine._reportDrawCall(t.numDrawCalls), !0; } endFrame() { this._record && (this._record = !1, this._play = !0, this._mode = this._modeSaved), this._playBundleListIndex = 0; } reset() { this.enabled = !1, this.enabled = !0; } } const Wpe = "postprocessVertexShader", jpe = `attribute position: vec2;uniform scale: vec2;varying vUV: vec2;const madd=vec2(0.5,0.5); #define CUSTOM_VERTEX_DEFINITIONS @vertex fn main(input : VertexInputs)->FragmentInputs { #define CUSTOM_VERTEX_MAIN_BEGIN vertexOutputs.vUV=(vertexInputs.position*madd+madd)*uniforms.scale;vertexOutputs.position=vec4(vertexInputs.position,0.0,1.0); #define CUSTOM_VERTEX_MAIN_END } `; je.ShadersStoreWGSL[Wpe] = jpe; const zZ = { label: "TextureView_SwapChain_ResolveTarget", dimension: _g.E2d, format: void 0, mipLevelCount: 1, arrayLayerCount: 1 }, HZ = { label: "TextureView_SwapChain", dimension: _g.E2d, format: void 0, mipLevelCount: 1, arrayLayerCount: 1 }, IF = "/* disable_uniformity_analysis */", Xpe = new Et(); class br extends $e { /** * Gets or sets the snapshot rendering mode */ get snapshotRenderingMode() { return this._snapshotRendering.mode; } set snapshotRenderingMode(e) { this._snapshotRendering.mode = e; } /** * Creates a new snapshot at the next frame using the current snapshotRenderingMode */ snapshotRenderingReset() { this._snapshotRendering.reset(); } /** * Enables or disables the snapshot rendering mode * Note that the WebGL engine does not support snapshot rendering so setting the value won't have any effect for this engine */ get snapshotRendering() { return this._snapshotRendering.enabled; } set snapshotRendering(e) { this._snapshotRendering.enabled = e; } /** * Sets this to true to disable the cache for the samplers. You should do it only for testing purpose! */ get disableCacheSamplers() { return this._cacheSampler ? this._cacheSampler.disabled : !1; } set disableCacheSamplers(e) { this._cacheSampler && (this._cacheSampler.disabled = e); } /** * Sets this to true to disable the cache for the render pipelines. You should do it only for testing purpose! */ get disableCacheRenderPipelines() { return this._cacheRenderPipeline ? this._cacheRenderPipeline.disabled : !1; } set disableCacheRenderPipelines(e) { this._cacheRenderPipeline && (this._cacheRenderPipeline.disabled = e); } /** * Sets this to true to disable the cache for the bind groups. You should do it only for testing purpose! */ get disableCacheBindGroups() { return this._cacheBindGroups ? this._cacheBindGroups.disabled : !1; } set disableCacheBindGroups(e) { this._cacheBindGroups && (this._cacheBindGroups.disabled = e); } /** * Gets a Promise indicating if the engine can be instantiated (ie. if a WebGPU context can be found) */ static get IsSupportedAsync() { return navigator.gpu ? navigator.gpu.requestAdapter().then((e) => !!e, () => !1).catch(() => !1) : Promise.resolve(!1); } /** * Not supported by WebGPU, you should call IsSupportedAsync instead! */ static get IsSupported() { return Ce.Warn("You must call IsSupportedAsync for WebGPU!"), !1; } /** * Gets a boolean indicating that the engine supports uniform buffers */ get supportsUniformBuffers() { return !0; } /** Gets the supported extensions by the WebGPU adapter */ get supportedExtensions() { return this._adapterSupportedExtensions; } /** Gets the currently enabled extensions on the WebGPU device */ get enabledExtensions() { return this._deviceEnabledExtensions; } /** Gets the supported limits by the WebGPU adapter */ get supportedLimits() { return this._adapterSupportedLimits; } /** Gets the current limits of the WebGPU device */ get currentLimits() { return this._deviceLimits; } /** * Returns a string describing the current engine */ get description() { return this.name + this.version; } /** * Returns the version of the engine */ get version() { return 1; } /** * Gets an object containing information about the current engine context * @returns an object containing the vendor, the renderer and the version of the current engine context */ getInfo() { return { vendor: this._adapterInfo.vendor || "unknown vendor", renderer: this._adapterInfo.architecture || "unknown renderer", version: this._adapterInfo.description || "unknown version" }; } /** * (WebGPU only) True (default) to be in compatibility mode, meaning rendering all existing scenes without artifacts (same rendering than WebGL). * Setting the property to false will improve performances but may not work in some scenes if some precautions are not taken. * See https://doc.babylonjs.com/setup/support/webGPU/webGPUOptimization/webGPUNonCompatibilityMode for more details */ get compatibilityMode() { return this._compatibilityMode; } set compatibilityMode(e) { this._compatibilityMode = e; } /** * Enables or disables GPU timing measurements. * Note that this is only supported if the "timestamp-query" extension is enabled in the options. */ get enableGPUTimingMeasurements() { return this._timestampQuery.enable; } set enableGPUTimingMeasurements(e) { this._timestampQuery.enable !== e && (this.gpuTimeInFrameForMainPass = e ? new VK() : void 0, this._timestampQuery.enable = e); } /** @internal */ get currentSampleCount() { return this._currentRenderTarget ? this._currentRenderTarget.samples : this._mainPassSampleCount; } /** * Create a new instance of the gpu engine asynchronously * @param canvas Defines the canvas to use to display the result * @param options Defines the options passed to the engine to create the GPU context dependencies * @returns a promise that resolves with the created engine */ static CreateAsync(e, t = {}) { const i = new br(e, t); return new Promise((r) => { i.initAsync(t.glslangOptions, t.twgslOptions).then(() => r(i)); }); } /** * Create a new instance of the gpu engine. * @param canvas Defines the canvas to use to display the result * @param options Defines the options passed to the engine to create the GPU context dependencies */ constructor(e, t = {}) { var i, r; if (super(null, (i = t.antialias) !== null && i !== void 0 ? i : !0, t), this._uploadEncoderDescriptor = { label: "upload" }, this._renderEncoderDescriptor = { label: "render" }, this._clearDepthValue = 1, this._clearReverseDepthValue = 0, this._clearStencilValue = 0, this._defaultSampleCount = 4, this._glslang = null, this._tintWASM = null, this._adapterInfo = { vendor: "", architecture: "", device: "", description: "" }, this._timestampIndex = 0, this._compiledComputeEffects = {}, this._counters = { numEnableEffects: 0, numEnableDrawWrapper: 0, numBundleCreationNonCompatMode: 0, numBundleReuseNonCompatMode: 0 }, this.countersLastFrame = { numEnableEffects: 0, numEnableDrawWrapper: 0, numBundleCreationNonCompatMode: 0, numBundleReuseNonCompatMode: 0 }, this.numMaxUncapturedErrors = 20, this._commandBuffers = [null, null], this._currentRenderPass = null, this._mainRenderPassWrapper = { renderPassDescriptor: null, colorAttachmentViewDescriptor: null, depthAttachmentViewDescriptor: null, colorAttachmentGPUTextures: [], depthTextureFormat: void 0 }, this._rttRenderPassWrapper = { renderPassDescriptor: null, colorAttachmentViewDescriptor: null, depthAttachmentViewDescriptor: null, colorAttachmentGPUTextures: [], depthTextureFormat: void 0 }, this._pendingDebugCommands = [], this._currentOverrideVertexBuffers = null, this._currentIndexBuffer = null, this._colorWriteLocal = !0, this._forceEnableEffect = !1, this.dbgShowShaderCode = !1, this.dbgSanityChecks = !0, this.dbgVerboseLogsForFirstFrames = !1, this.dbgVerboseLogsNumFrames = 10, this.dbgLogIfNotDrawWrapper = !0, this.dbgShowEmptyEnableEffectCalls = !0, this.isNDCHalfZRange = !0, this.hasOriginBottomLeft = !1, this._viewportsCurrent = { x: 0, y: 0, w: 0, h: 0 }, this._scissorsCurrent = { x: 0, y: 0, w: 0, h: 0 }, this._scissorCached = { x: 0, y: 0, z: 0, w: 0 }, this._stencilRefsCurrent = -1, this._blendColorsCurrent = [null, null, null, null], this._name = "WebGPU", t.deviceDescriptor = t.deviceDescriptor || {}, t.enableGPUDebugMarkers = (r = t.enableGPUDebugMarkers) !== null && r !== void 0 ? r : !1, Ce.Log(`Babylon.js v${$e.Version} - ${this.description} engine`), !navigator.gpu) { Ce.Error("WebGPU is not supported by your browser."); return; } t.swapChainFormat = t.swapChainFormat || navigator.gpu.getPreferredCanvasFormat(), this._isWebGPU = !0, this._shaderPlatformName = "WEBGPU", this._renderingCanvas = e, this._options = t, this._mainPassSampleCount = t.antialias ? this._defaultSampleCount : 1, this._setupMobileChecks(), this._sharedInit(e), this._shaderProcessor = new Bfe(), this._shaderProcessorWGSL = new Ape(); } //------------------------------------------------------------------------------ // Initialization //------------------------------------------------------------------------------ /** * Initializes the WebGPU context and dependencies. * @param glslangOptions Defines the GLSLang compiler options if necessary * @param twgslOptions Defines the Twgsl compiler options if necessary * @returns a promise notifying the readiness of the engine. */ initAsync(e, t) { var i; return this._initGlslang(e ?? ((i = this._options) === null || i === void 0 ? void 0 : i.glslangOptions)).then((r) => { var s; return this._glslang = r, this._tintWASM = br.UseTWGSL ? new U_() : null, this._tintWASM ? this._tintWASM.initTwgsl(t ?? ((s = this._options) === null || s === void 0 ? void 0 : s.twgslOptions)).then(() => navigator.gpu.requestAdapter(this._options), (n) => { throw Ce.Error("Can not initialize twgsl!"), Ce.Error(n), Error("WebGPU initializations stopped."); }) : navigator.gpu.requestAdapter(this._options); }, (r) => { throw Ce.Error("Can not initialize glslang!"), Ce.Error(r), Error("WebGPU initializations stopped."); }).then((r) => { var s, n, a; if (r) { this._adapter = r, this._adapterSupportedExtensions = [], (s = this._adapter.features) === null || s === void 0 || s.forEach((u) => this._adapterSupportedExtensions.push(u)), this._adapterSupportedLimits = this._adapter.limits, this._adapter.requestAdapterInfo().then((u) => { this._adapterInfo = u; }); const l = (n = this._options.deviceDescriptor) !== null && n !== void 0 ? n : {}, o = (a = l == null ? void 0 : l.requiredFeatures) !== null && a !== void 0 ? a : this._options.enableAllFeatures ? this._adapterSupportedExtensions : void 0; if (o) { const u = o, h = []; for (const d of u) this._adapterSupportedExtensions.indexOf(d) !== -1 && h.push(d); l.requiredFeatures = h; } if (this._options.setMaximumLimits && !l.requiredLimits) { l.requiredLimits = {}; for (const u in this._adapterSupportedLimits) l.requiredLimits[u] = this._adapterSupportedLimits[u]; } return this._adapter.requestDevice(l); } else throw "Could not retrieve a WebGPU adapter (adapter is null)."; }).then((r) => { var s, n; this._device = r, this._deviceEnabledExtensions = [], (s = this._device.features) === null || s === void 0 || s.forEach((l) => this._deviceEnabledExtensions.push(l)), this._deviceLimits = r.limits; let a = -1; this._device.addEventListener("uncapturederror", (l) => { ++a < this.numMaxUncapturedErrors ? Ce.Warn(`WebGPU uncaptured error (${a + 1}): ${l.error} - ${l.error.message}`) : a++ === this.numMaxUncapturedErrors && Ce.Warn(`WebGPU uncaptured error: too many warnings (${this.numMaxUncapturedErrors}), no more warnings will be reported to the console for this engine.`); }), this._doNotHandleContextLost || (n = this._device.lost) === null || n === void 0 || n.then((l) => { this._isDisposed || (this._contextWasLost = !0, Ce.Warn("WebGPU context lost. " + l), this.onContextLostObservable.notifyObservers(this), this._restoreEngineAfterContextLost(() => this.initAsync())); }); }, (r) => { Ce.Error("Could not retrieve a WebGPU device."), Ce.Error(r); }).then(() => { this._bufferManager = new fB(this, this._device), this._textureHelper = new zn(this._device, this._glslang, this._tintWASM, this._bufferManager, this._deviceEnabledExtensions), this._cacheSampler = new e5(this._device), this._cacheBindGroups = new cl(this._device, this._cacheSampler, this), this._timestampQuery = new zpe(this, this._device, this._bufferManager), this._occlusionQuery = this._device.createQuerySet ? new Gpe(this, this._device, this._bufferManager) : void 0, this._bundleList = new uW(this._device), this._snapshotRendering = new Kpe(this, this._snapshotRenderingMode, this._bundleList), this._ubInvertY = this._bufferManager.createBuffer(new Float32Array([-1, 0]), ya.Uniform | ya.CopyDst, "UBInvertY"), this._ubDontInvertY = this._bufferManager.createBuffer(new Float32Array([1, 0]), ya.Uniform | ya.CopyDst, "UBDontInvertY"), this.dbgVerboseLogsForFirstFrames && this._count === void 0 && (this._count = 0, Ce.Log(["%c frame #" + this._count + " - begin", "background: #ffff00"])), this._uploadEncoder = this._device.createCommandEncoder(this._uploadEncoderDescriptor), this._renderEncoder = this._device.createCommandEncoder(this._renderEncoderDescriptor), this._initializeLimits(), this._emptyVertexBuffer = new Y(this, [0], "", !1, !1, 1, !1, 0, 1), this._cacheRenderPipeline = new zC(this._device, this._emptyVertexBuffer), this._depthCullingState = new Npe(this._cacheRenderPipeline), this._stencilStateComposer = new Lpe(this._cacheRenderPipeline), this._stencilStateComposer.stencilGlobal = this._stencilState, this._depthCullingState.depthTest = !0, this._depthCullingState.depthFunc = 515, this._depthCullingState.depthMask = !0, this._textureHelper.setCommandEncoder(this._uploadEncoder), this._clearQuad = new kpe(this._device, this, this._emptyVertexBuffer), this._defaultDrawContext = this.createDrawContext(), this._currentDrawContext = this._defaultDrawContext, this._defaultMaterialContext = this.createMaterialContext(), this._currentMaterialContext = this._defaultMaterialContext, this._initializeContextAndSwapChain(), this._initializeMainAttachments(), this.resize(); }).catch((r) => { var s; Ce.Error("Can not create WebGPU Device and/or context."), Ce.Error(r), (s = console == null ? void 0 : console.trace) === null || s === void 0 || s.call(console); }); } _initGlslang(e) { return e = e || {}, e = Object.assign(Object.assign({}, br._GLSLslangDefaultOptions), e), e.glslang ? Promise.resolve(e.glslang) : self.glslang ? self.glslang(e.wasmPath) : e.jsPath && e.wasmPath ? Ve.LoadBabylonScriptAsync(e.jsPath).then(() => self.glslang(Ve.GetBabylonScriptURL(e.wasmPath))) : Promise.reject("gslang is not available."); } _initializeLimits() { this._caps = { maxTexturesImageUnits: this._deviceLimits.maxSampledTexturesPerShaderStage, maxVertexTextureImageUnits: this._deviceLimits.maxSampledTexturesPerShaderStage, maxCombinedTexturesImageUnits: this._deviceLimits.maxSampledTexturesPerShaderStage * 2, maxTextureSize: this._deviceLimits.maxTextureDimension2D, maxCubemapTextureSize: this._deviceLimits.maxTextureDimension2D, maxRenderTextureSize: this._deviceLimits.maxTextureDimension2D, maxVertexAttribs: this._deviceLimits.maxVertexAttributes, maxVaryingVectors: this._deviceLimits.maxInterStageShaderVariables, maxFragmentUniformVectors: Math.floor(this._deviceLimits.maxUniformBufferBindingSize / 4), maxVertexUniformVectors: Math.floor(this._deviceLimits.maxUniformBufferBindingSize / 4), standardDerivatives: !0, astc: this._deviceEnabledExtensions.indexOf(UC.TextureCompressionASTC) >= 0 ? !0 : void 0, s3tc: this._deviceEnabledExtensions.indexOf(UC.TextureCompressionBC) >= 0 ? !0 : void 0, pvrtc: null, etc1: null, etc2: this._deviceEnabledExtensions.indexOf(UC.TextureCompressionETC2) >= 0 ? !0 : void 0, bptc: this._deviceEnabledExtensions.indexOf(UC.TextureCompressionBC) >= 0 ? !0 : void 0, maxAnisotropy: 16, uintIndices: !0, fragmentDepthSupported: !0, highPrecisionShaderSupported: !0, colorBufferFloat: !0, supportFloatTexturesResolve: !1, textureFloat: !0, textureFloatLinearFiltering: this._deviceEnabledExtensions.indexOf(UC.Float32Filterable) >= 0, textureFloatRender: !0, textureHalfFloat: !0, textureHalfFloatLinearFiltering: !0, textureHalfFloatRender: !0, textureLOD: !0, texelFetch: !0, drawBuffersExtension: !0, depthTextureExtension: !0, vertexArrayObject: !1, instancedArrays: !0, timerQuery: typeof BigUint64Array < "u" && this._deviceEnabledExtensions.indexOf(UC.TimestampQuery) !== -1 ? !0 : void 0, supportOcclusionQuery: typeof BigUint64Array < "u", canUseTimestampForTimerQuery: !0, multiview: !1, oculusMultiview: !1, parallelShaderCompile: void 0, blendMinMax: !0, maxMSAASamples: 4, canUseGLInstanceID: !0, canUseGLVertexID: !0, supportComputeShaders: !0, supportSRGBBuffers: !0, supportTransformFeedbacks: !1, textureMaxLevel: !0, texture2DArrayMaxLayerCount: this._deviceLimits.maxTextureArrayLayers, disableMorphTargetTexture: !1 }, this._caps.parallelShaderCompile = null, this._features = { forceBitmapOverHTMLImageElement: !0, supportRenderAndCopyToLodForFloatTextures: !0, supportDepthStencilTexture: !0, supportShadowSamplers: !0, uniformBufferHardCheckMatrix: !1, allowTexturePrefiltering: !0, trackUbosInFrame: !0, checkUbosContentBeforeUpload: !0, supportCSM: !0, basisNeedsPOT: !1, support3DTextures: !0, needTypeSuffixInShaderConstants: !0, supportMSAA: !0, supportSSAO2: !0, supportExtendedTextureFormats: !0, supportSwitchCaseInShader: !0, supportSyncTextureRead: !1, needsInvertingBitmap: !1, useUBOBindingCache: !1, needShaderCodeInlining: !0, needToAlwaysBindUniformBuffers: !0, supportRenderPasses: !0, supportSpriteInstancing: !0, forceVertexBufferStrideMultiple4Bytes: !0, _collectUbosUpdatedInFrame: !1 }; } _initializeContextAndSwapChain() { if (!this._renderingCanvas) throw "The rendering canvas has not been set!"; this._context = this._renderingCanvas.getContext("webgpu"), this._configureContext(), this._colorFormat = this._options.swapChainFormat, this._mainRenderPassWrapper.colorAttachmentGPUTextures = [new VF()], this._mainRenderPassWrapper.colorAttachmentGPUTextures[0].format = this._colorFormat, this._setColorFormat(this._mainRenderPassWrapper); } // Set default values as WebGL with depth and stencil attachment for the broadest Compat. _initializeMainAttachments() { if (!this._bufferManager) return; this.flushFramebuffer(), this._mainTextureExtends = { width: this.getRenderWidth(!0), height: this.getRenderHeight(!0), depthOrArrayLayers: 1 }; const e = new Float32Array([this.getRenderHeight(!0)]); this._bufferManager.setSubData(this._ubInvertY, 4, e), this._bufferManager.setSubData(this._ubDontInvertY, 4, e); let t; if (this._options.antialias) { const s = { label: `Texture_MainColor_${this._mainTextureExtends.width}x${this._mainTextureExtends.height}_antialiasing`, size: this._mainTextureExtends, mipLevelCount: 1, sampleCount: this._mainPassSampleCount, dimension: _g.E2d, format: this._options.swapChainFormat, usage: fo.RenderAttachment }; this._mainTexture && this._textureHelper.releaseTexture(this._mainTexture), this._mainTexture = this._device.createTexture(s), t = [ { view: this._mainTexture.createView({ label: "TextureView_MainColor_antialiasing", dimension: _g.E2d, format: this._options.swapChainFormat, mipLevelCount: 1, arrayLayerCount: 1 }), clearValue: new Et(0, 0, 0, 1), loadOp: au.Clear, storeOp: _m.Store // don't use StoreOp.Discard, else using several cameras with different viewports or using scissors will fail because we call beginRenderPass / endPass several times for the same color attachment! } ]; } else t = [ { view: void 0, clearValue: new Et(0, 0, 0, 1), loadOp: au.Clear, storeOp: _m.Store } ]; this._mainRenderPassWrapper.depthTextureFormat = this.isStencilEnable ? Re.Depth24PlusStencil8 : Re.Depth32Float, this._setDepthTextureFormat(this._mainRenderPassWrapper), this._setColorFormat(this._mainRenderPassWrapper); const i = { label: `Texture_MainDepthStencil_${this._mainTextureExtends.width}x${this._mainTextureExtends.height}`, size: this._mainTextureExtends, mipLevelCount: 1, sampleCount: this._mainPassSampleCount, dimension: _g.E2d, format: this._mainRenderPassWrapper.depthTextureFormat, usage: fo.RenderAttachment }; this._depthTexture && this._textureHelper.releaseTexture(this._depthTexture), this._depthTexture = this._device.createTexture(i); const r = { view: this._depthTexture.createView({ label: `TextureView_MainDepthStencil_${this._mainTextureExtends.width}x${this._mainTextureExtends.height}`, dimension: _g.E2d, format: this._depthTexture.format, mipLevelCount: 1, arrayLayerCount: 1 }), depthClearValue: this._clearDepthValue, depthLoadOp: au.Clear, depthStoreOp: _m.Store, stencilClearValue: this._clearStencilValue, stencilLoadOp: this.isStencilEnable ? au.Clear : void 0, stencilStoreOp: this.isStencilEnable ? _m.Store : void 0 }; this._mainRenderPassWrapper.renderPassDescriptor = { label: "MainRenderPass", colorAttachments: t, depthStencilAttachment: r }; } _configureContext() { this._context.configure({ device: this._device, format: this._options.swapChainFormat, usage: fo.RenderAttachment | fo.CopySrc, alphaMode: this.premultipliedAlpha ? lL.Premultiplied : lL.Opaque }); } /** * Force a specific size of the canvas * @param width defines the new canvas' width * @param height defines the new canvas' height * @param forceSetSize true to force setting the sizes of the underlying canvas * @returns true if the size was changed */ setSize(e, t, i = !1) { return super.setSize(e, t, i) ? (this.dbgVerboseLogsForFirstFrames && (this._count === void 0 && (this._count = 0), (!this._count || this._count < this.dbgVerboseLogsNumFrames) && Ce.Log(["frame #" + this._count + " - setSize -", e, t])), this._initializeMainAttachments(), this.snapshotRendering && this.snapshotRenderingReset(), !0) : !1; } /** * @internal */ _getShaderProcessor(e) { return e === Xa.WGSL ? this._shaderProcessorWGSL : this._shaderProcessor; } /** * @internal */ _getShaderProcessingContext(e) { return new mg(e); } _currentPassIsMainPass() { return this._currentRenderTarget === null; } _getCurrentRenderPass() { return this._currentRenderTarget && !this._currentRenderPass ? this._startRenderTargetRenderPass(this._currentRenderTarget, !1, null, !1, !1) : this._currentRenderPass || this._startMainRenderPass(!1), this._currentRenderPass; } /** @internal */ _getCurrentRenderPassWrapper() { return this._currentRenderTarget ? this._rttRenderPassWrapper : this._mainRenderPassWrapper; } //------------------------------------------------------------------------------ // Static Pipeline WebGPU States //------------------------------------------------------------------------------ /** @internal */ applyStates() { this._stencilStateComposer.apply(), this._cacheRenderPipeline.setAlphaBlendEnabled(this._alphaState.alphaBlend); } /** * Force the entire cache to be cleared * You should not have to use this function unless your engine needs to share the WebGPU context with another engine * @param bruteForce defines a boolean to force clearing ALL caches (including stencil, detoh and alpha states) */ wipeCaches(e) { this.preventCacheWipeBetweenFrames && !e || (this._forceEnableEffect = !0, this._currentIndexBuffer = null, this._currentOverrideVertexBuffers = null, this._cacheRenderPipeline.setBuffers(null, null, null), e && (this._stencilStateComposer.reset(), this._depthCullingState.reset(), this._depthCullingState.depthFunc = 515, this._alphaState.reset(), this._alphaMode = 1, this._alphaEquation = 0, this._cacheRenderPipeline.setAlphaBlendFactors(this._alphaState._blendFunctionParameters, this._alphaState._blendEquationParameters), this._cacheRenderPipeline.setAlphaBlendEnabled(!1), this.setColorWrite(!0)), this._cachedVertexBuffers = null, this._cachedIndexBuffer = null, this._cachedEffectForVertexBuffers = null); } /** * Enable or disable color writing * @param enable defines the state to set */ setColorWrite(e) { this._colorWriteLocal = e, this._cacheRenderPipeline.setWriteMask(e ? 15 : 0); } /** * Gets a boolean indicating if color writing is enabled * @returns the current color writing state */ getColorWrite() { return this._colorWriteLocal; } _mustUpdateViewport() { const e = this._viewportCached.x, t = this._viewportCached.y, i = this._viewportCached.z, r = this._viewportCached.w, s = this._viewportsCurrent.x !== e || this._viewportsCurrent.y !== t || this._viewportsCurrent.w !== i || this._viewportsCurrent.h !== r; return s && (this._viewportsCurrent.x = this._viewportCached.x, this._viewportsCurrent.y = this._viewportCached.y, this._viewportsCurrent.w = this._viewportCached.z, this._viewportsCurrent.h = this._viewportCached.w), s; } _applyViewport(e) { const t = Math.floor(this._viewportCached.x), i = Math.floor(this._viewportCached.z), r = Math.floor(this._viewportCached.w); let s = Math.floor(this._viewportCached.y); this._currentRenderTarget || (s = this.getRenderHeight(!0) - s - r), e ? e.addItem(new sW(t, s, i, r)) : this._getCurrentRenderPass().setViewport(t, s, i, r, 0, 1), this.dbgVerboseLogsForFirstFrames && (this._count === void 0 && (this._count = 0), (!this._count || this._count < this.dbgVerboseLogsNumFrames) && Ce.Log([ "frame #" + this._count + " - viewport applied - (", this._viewportCached.x, this._viewportCached.y, this._viewportCached.z, this._viewportCached.w, ") current pass is main pass=" + this._currentPassIsMainPass() ])); } /** * @internal */ _viewport(e, t, i, r) { this._viewportCached.x = e, this._viewportCached.y = t, this._viewportCached.z = i, this._viewportCached.w = r; } _mustUpdateScissor() { const e = this._scissorCached.x, t = this._scissorCached.y, i = this._scissorCached.z, r = this._scissorCached.w, s = this._scissorsCurrent.x !== e || this._scissorsCurrent.y !== t || this._scissorsCurrent.w !== i || this._scissorsCurrent.h !== r; return s && (this._scissorsCurrent.x = this._scissorCached.x, this._scissorsCurrent.y = this._scissorCached.y, this._scissorsCurrent.w = this._scissorCached.z, this._scissorsCurrent.h = this._scissorCached.w), s; } _applyScissor(e) { const t = this._currentRenderTarget ? this._scissorCached.y : this.getRenderHeight() - this._scissorCached.w - this._scissorCached.y; e ? e.addItem(new nW(this._scissorCached.x, t, this._scissorCached.z, this._scissorCached.w)) : this._getCurrentRenderPass().setScissorRect(this._scissorCached.x, t, this._scissorCached.z, this._scissorCached.w), this.dbgVerboseLogsForFirstFrames && (this._count === void 0 && (this._count = 0), (!this._count || this._count < this.dbgVerboseLogsNumFrames) && Ce.Log([ "frame #" + this._count + " - scissor applied - (", this._scissorCached.x, this._scissorCached.y, this._scissorCached.z, this._scissorCached.w, ") current pass is main pass=" + this._currentPassIsMainPass() ])); } _scissorIsActive() { return this._scissorCached.x !== 0 || this._scissorCached.y !== 0 || this._scissorCached.z !== 0 || this._scissorCached.w !== 0; } enableScissor(e, t, i, r) { this._scissorCached.x = e, this._scissorCached.y = t, this._scissorCached.z = i, this._scissorCached.w = r; } disableScissor() { this._scissorCached.x = this._scissorCached.y = this._scissorCached.z = this._scissorCached.w = 0, this._scissorsCurrent.x = this._scissorsCurrent.y = this._scissorsCurrent.w = this._scissorsCurrent.h = 0; } _mustUpdateStencilRef() { const e = this._stencilStateComposer.funcRef !== this._stencilRefsCurrent; return e && (this._stencilRefsCurrent = this._stencilStateComposer.funcRef), e; } _applyStencilRef(e) { var t, i; e ? e.addItem(new pB((t = this._stencilStateComposer.funcRef) !== null && t !== void 0 ? t : 0)) : this._getCurrentRenderPass().setStencilReference((i = this._stencilStateComposer.funcRef) !== null && i !== void 0 ? i : 0); } _mustUpdateBlendColor() { const e = this._alphaState._blendConstants, t = e[0] !== this._blendColorsCurrent[0] || e[1] !== this._blendColorsCurrent[1] || e[2] !== this._blendColorsCurrent[2] || e[3] !== this._blendColorsCurrent[3]; return t && (this._blendColorsCurrent[0] = e[0], this._blendColorsCurrent[1] = e[1], this._blendColorsCurrent[2] = e[2], this._blendColorsCurrent[3] = e[3]), t; } _applyBlendColor(e) { e ? e.addItem(new aW(this._alphaState._blendConstants.slice())) : this._getCurrentRenderPass().setBlendConstant(this._alphaState._blendConstants); } _resetRenderPassStates() { this._viewportsCurrent.x = this._viewportsCurrent.y = this._viewportsCurrent.w = this._viewportsCurrent.h = 0, this._scissorsCurrent.x = this._scissorsCurrent.y = this._scissorsCurrent.w = this._scissorsCurrent.h = 0, this._stencilRefsCurrent = -1, this._blendColorsCurrent[0] = this._blendColorsCurrent[1] = this._blendColorsCurrent[2] = this._blendColorsCurrent[3] = null; } /** * Clear the current render buffer or the current render target (if any is set up) * @param color defines the color to use * @param backBuffer defines if the back buffer must be cleared * @param depth defines if the depth buffer must be cleared * @param stencil defines if the stencil buffer must be cleared */ clear(e, t, i, r = !1) { e && e.a === void 0 && (e.a = 1); const s = this._scissorIsActive(); this.dbgVerboseLogsForFirstFrames && (this._count === void 0 && (this._count = 0), (!this._count || this._count < this.dbgVerboseLogsNumFrames) && Ce.Log(["frame #" + this._count + " - clear - backBuffer=", t, " depth=", i, " stencil=", r, " scissor is active=", s])), this._currentRenderTarget ? s ? (this._currentRenderPass || this._startRenderTargetRenderPass(this._currentRenderTarget, !1, t ? e : null, i, r), this._applyScissor(this.compatibilityMode ? null : this._bundleList), this._clearFullQuad(t ? e : null, i, r)) : (this._currentRenderPass && this._endCurrentRenderPass(), this._startRenderTargetRenderPass(this._currentRenderTarget, !0, t ? e : null, i, r)) : ((!this._currentRenderPass || !s) && this._startMainRenderPass(!s, t ? e : null, i, r), s && (this._applyScissor(this.compatibilityMode ? null : this._bundleList), this._clearFullQuad(t ? e : null, i, r))); } _clearFullQuad(e, t, i) { var r, s; const n = this.compatibilityMode ? this._getCurrentRenderPass() : null; this._clearQuad.setColorFormat(this._colorFormat), this._clearQuad.setDepthStencilFormat(this._depthTextureFormat), this._clearQuad.setMRTAttachments((r = this._cacheRenderPipeline.mrtAttachments) !== null && r !== void 0 ? r : [], (s = this._cacheRenderPipeline.mrtTextureArray) !== null && s !== void 0 ? s : [], this._cacheRenderPipeline.mrtTextureCount), this.compatibilityMode ? n.setStencilReference(this._clearStencilValue) : this._bundleList.addItem(new pB(this._clearStencilValue)); const a = this._clearQuad.clear(n, e, t, i, this.currentSampleCount); this.compatibilityMode ? this._applyStencilRef(null) : (this._bundleList.addBundle(a), this._applyStencilRef(this._bundleList), this._reportDrawCall()); } //------------------------------------------------------------------------------ // Vertex/Index/Storage Buffers //------------------------------------------------------------------------------ /** * Creates a vertex buffer * @param data the data for the vertex buffer * @param _updatable whether the buffer should be created as updatable * @param label defines the label of the buffer (for debug purpose) * @returns the new buffer */ createVertexBuffer(e, t, i) { let r; return e instanceof Array ? r = new Float32Array(e) : e instanceof ArrayBuffer ? r = new Uint8Array(e) : r = e, this._bufferManager.createBuffer(r, ya.Vertex | ya.CopyDst, i); } /** * Creates a vertex buffer * @param data the data for the dynamic vertex buffer * @param label defines the label of the buffer (for debug purpose) * @returns the new buffer */ createDynamicVertexBuffer(e, t) { return this.createVertexBuffer(e, void 0, t); } /** * Creates a new index buffer * @param indices defines the content of the index buffer * @param _updatable defines if the index buffer must be updatable * @param label defines the label of the buffer (for debug purpose) * @returns a new buffer */ createIndexBuffer(e, t, i) { let r = !0, s; e instanceof Uint32Array || e instanceof Int32Array ? s = e : e instanceof Uint16Array ? (s = e, r = !1) : e.length > 65535 ? s = new Uint32Array(e) : (s = new Uint16Array(e), r = !1); const n = this._bufferManager.createBuffer(s, ya.Index | ya.CopyDst, i); return n.is32Bits = r, n; } /** * @internal */ _createBuffer(e, t, i) { let r; e instanceof Array ? r = new Float32Array(e) : e instanceof ArrayBuffer ? r = new Uint8Array(e) : r = e; let s = 0; return t & 1 && (s |= ya.CopySrc), t & 2 && (s |= ya.CopyDst), t & 4 && (s |= ya.Uniform), t & 8 && (s |= ya.Vertex), t & 16 && (s |= ya.Index), t & 32 && (s |= ya.Storage), this._bufferManager.createBuffer(r, s, i); } /** * @internal */ bindBuffersDirectly() { throw "Not implemented on WebGPU"; } /** * @internal */ updateAndBindInstancesBuffer() { throw "Not implemented on WebGPU"; } /** * Bind a list of vertex buffers with the engine * @param vertexBuffers defines the list of vertex buffers to bind * @param indexBuffer defines the index buffer to bind * @param effect defines the effect associated with the vertex buffers * @param overrideVertexBuffers defines optional list of avertex buffers that overrides the entries in vertexBuffers */ bindBuffers(e, t, i, r) { this._currentIndexBuffer = t, this._currentOverrideVertexBuffers = r ?? null, this._cacheRenderPipeline.setBuffers(e, t, this._currentOverrideVertexBuffers); } /** * @internal */ _releaseBuffer(e) { return this._bufferManager.releaseBuffer(e); } //------------------------------------------------------------------------------ // Effects //------------------------------------------------------------------------------ /** * Create a new effect (used to store vertex/fragment shaders) * @param baseName defines the base name of the effect (The name of file without .fragment.fx or .vertex.fx) * @param attributesNamesOrOptions defines either a list of attribute names or an IEffectCreationOptions object * @param uniformsNamesOrEngine defines either a list of uniform names or the engine to use * @param samplers defines an array of string used to represent textures * @param defines defines the string containing the defines to use to compile the shaders * @param fallbacks defines the list of potential fallbacks to use if shader compilation fails * @param onCompiled defines a function to call when the effect creation is successful * @param onError defines a function to call when the effect creation has failed * @param indexParameters defines an object containing the index values to use to compile shaders (like the maximum number of simultaneous lights) * @param shaderLanguage the language the shader is written in (default: GLSL) * @returns the new Effect */ createEffect(e, t, i, r, s, n, a, l, o, u = Xa.GLSL) { var h; const d = e.vertexElement || e.vertex || e.vertexToken || e.vertexSource || e, f = e.fragmentElement || e.fragment || e.fragmentToken || e.fragmentSource || e, p = this._getGlobalDefines(); let m = (h = s ?? t.defines) !== null && h !== void 0 ? h : ""; p && (m += ` ` + p); const _ = d + "+" + f + "@" + m; if (this._compiledEffects[_]) { const C = this._compiledEffects[_]; return a && C.isReady() && a(C), C; } const v = new Cr(e, t, i, r, this, s, n, a, l, o, _, u); return this._compiledEffects[_] = v, v; } _compileRawShaderToSpirV(e, t) { return this._glslang.compileGLSL(e, t); } _compileShaderToSpirV(e, t, i, r) { return this._compileRawShaderToSpirV(r + (i ? i + ` ` : "") + e, t); } _getWGSLShader(e, t, i) { return i ? i = "//" + i.split(` `).join(` //`) + ` ` : i = "", i + e; } _createPipelineStageDescriptor(e, t, i, r, s) { return this._tintWASM && i === Xa.GLSL && (e = this._tintWASM.convertSpirV2WGSL(e, r), t = this._tintWASM.convertSpirV2WGSL(t, s)), { vertexStage: { module: this._device.createShaderModule({ code: e }), entryPoint: "main" }, fragmentStage: { module: this._device.createShaderModule({ code: t }), entryPoint: "main" } }; } _compileRawPipelineStageDescriptor(e, t, i) { const r = e.indexOf(IF) >= 0, s = t.indexOf(IF) >= 0, n = i === Xa.GLSL ? this._compileRawShaderToSpirV(e, "vertex") : e, a = i === Xa.GLSL ? this._compileRawShaderToSpirV(t, "fragment") : t; return this._createPipelineStageDescriptor(n, a, i, r, s); } _compilePipelineStageDescriptor(e, t, i, r) { this.onBeforeShaderCompilationObservable.notifyObservers(this); const s = e.indexOf(IF) >= 0, n = t.indexOf(IF) >= 0, a = `#version 450 `, l = r === Xa.GLSL ? this._compileShaderToSpirV(e, "vertex", i, a) : this._getWGSLShader(e, "vertex", i), o = r === Xa.GLSL ? this._compileShaderToSpirV(t, "fragment", i, a) : this._getWGSLShader(t, "fragment", i), u = this._createPipelineStageDescriptor(l, o, r, s, n); return this.onAfterShaderCompilationObservable.notifyObservers(this), u; } /** * @internal */ createRawShaderProgram() { throw "Not available on WebGPU"; } /** * @internal */ createShaderProgram() { throw "Not available on WebGPU"; } /** * Inline functions in shader code that are marked to be inlined * @param code code to inline * @returns inlined code */ inlineShaderCode(e) { const t = new cT(e); return t.debug = !1, t.processCode(), t.code; } /** * Creates a new pipeline context * @param shaderProcessingContext defines the shader processing context used during the processing if available * @returns the new pipeline */ createPipelineContext(e) { return new Lfe(e, this); } /** * Creates a new material context * @returns the new context */ createMaterialContext() { return new xU(); } /** * Creates a new draw context * @returns the new context */ createDrawContext() { return new dN(this._bufferManager); } /** * @internal */ _preparePipelineContext(e, t, i, r, s, n, a, l) { const o = e, u = o.shaderProcessingContext.shaderLanguage; this.dbgShowShaderCode && (Ce.Log(["defines", l]), Ce.Log(t), Ce.Log(i), Ce.Log("***********************************************")), o.sources = { fragment: i, vertex: t, rawVertex: s, rawFragment: n }, r ? o.stages = this._compileRawPipelineStageDescriptor(t, i, u) : o.stages = this._compilePipelineStageDescriptor(t, i, l, u); } /** * Gets the list of active attributes for a given WebGPU program * @param pipelineContext defines the pipeline context to use * @param attributesNames defines the list of attribute names to get * @returns an array of indices indicating the offset of each attribute */ getAttributes(e, t) { const i = new Array(t.length), r = e; for (let s = 0; s < t.length; s++) { const n = t[s], a = r.shaderProcessingContext.availableAttributes[n]; a !== void 0 && (i[s] = a); } return i; } /** * Activates an effect, making it the current one (ie. the one used for rendering) * @param effect defines the effect to activate */ enableEffect(e) { if (!e) return; let t = !0; if (!$o.IsWrapper(e)) t = e !== this._currentEffect, this._currentEffect = e, this._currentMaterialContext = this._defaultMaterialContext, this._currentDrawContext = this._defaultDrawContext, this._counters.numEnableEffects++, this.dbgLogIfNotDrawWrapper && Ce.Warn(`enableEffect has been called with an Effect and not a Wrapper! effect.uniqueId=${e.uniqueId}, effect.name=${e.name}, effect.name.vertex=${e.name.vertex}, effect.name.fragment=${e.name.fragment}`, 10); else if (!e.effect || e.effect === this._currentEffect && e.materialContext === this._currentMaterialContext && e.drawContext === this._currentDrawContext && !this._forceEnableEffect) { if (!e.effect && this.dbgShowEmptyEnableEffectCalls) throw Ce.Log(["drawWrapper=", e]), "Invalid call to enableEffect: the effect property is empty!"; return; } else if (t = e.effect !== this._currentEffect, this._currentEffect = e.effect, this._currentMaterialContext = e.materialContext, this._currentDrawContext = e.drawContext, this._counters.numEnableDrawWrapper++, !this._currentMaterialContext) throw Ce.Log(["drawWrapper=", e]), "Invalid call to enableEffect: the materialContext property is empty!"; this._stencilStateComposer.stencilMaterial = void 0, this._forceEnableEffect = t || this._forceEnableEffect ? !1 : this._forceEnableEffect, t && (this._currentEffect.onBind && this._currentEffect.onBind(this._currentEffect), this._currentEffect._onBindObservable && this._currentEffect._onBindObservable.notifyObservers(this._currentEffect)); } /** * @internal */ _releaseEffect(e) { this._compiledEffects[e._key] && (delete this._compiledEffects[e._key], this._deletePipelineContext(e.getPipelineContext())); } /** * Force the engine to release all cached effects. This means that next effect compilation will have to be done completely even if a similar effect was already compiled */ releaseEffects() { for (const e in this._compiledEffects) { const t = this._compiledEffects[e].getPipelineContext(); this._deletePipelineContext(t); } this._compiledEffects = {}; } _deletePipelineContext(e) { e && e.dispose(); } //------------------------------------------------------------------------------ // Textures //------------------------------------------------------------------------------ /** * Gets a boolean indicating that only power of 2 textures are supported * Please note that you can still use non power of 2 textures but in this case the engine will forcefully convert them */ get needPOTTextures() { return !1; } /** @internal */ _createHardwareTexture() { return new VF(); } /** * @internal */ _releaseTexture(e) { const t = this._internalTexturesCache.indexOf(e); t !== -1 && this._internalTexturesCache.splice(t, 1), this._textureHelper.releaseTexture(e); } /** * @internal */ _getRGBABufferInternalSizedFormat() { return 5; } updateTextureComparisonFunction(e, t) { e._comparisonFunction = t; } /** * Creates an internal texture without binding it to a framebuffer * @internal * @param size defines the size of the texture * @param options defines the options used to create the texture * @param delayGPUTextureCreation true to delay the texture creation the first time it is really needed. false to create it right away * @param source source type of the texture * @returns a new internal texture */ _createInternalTexture(e, t, i = !0, r = ts.Unknown) { var s, n, a; const l = {}; t !== void 0 && typeof t == "object" ? (l.generateMipMaps = t.generateMipMaps, l.type = t.type === void 0 ? 0 : t.type, l.samplingMode = t.samplingMode === void 0 ? 3 : t.samplingMode, l.format = t.format === void 0 ? 5 : t.format, l.samples = (s = t.samples) !== null && s !== void 0 ? s : 1, l.creationFlags = (n = t.creationFlags) !== null && n !== void 0 ? n : 0, l.useSRGBBuffer = (a = t.useSRGBBuffer) !== null && a !== void 0 ? a : !1, l.label = t.label) : (l.generateMipMaps = t, l.type = 0, l.samplingMode = 3, l.format = 5, l.samples = 1, l.creationFlags = 0, l.useSRGBBuffer = !1), (l.type === 1 && !this._caps.textureFloatLinearFiltering || l.type === 2 && !this._caps.textureHalfFloatLinearFiltering) && (l.samplingMode = 1), l.type === 1 && !this._caps.textureFloat && (l.type = 0, Ce.Warn("Float textures are not supported. Type forced to TEXTURETYPE_UNSIGNED_BYTE")); const o = new ln(this, r), u = e.width || e, h = e.height || e, d = e.layers || 0; return o.baseWidth = u, o.baseHeight = h, o.width = u, o.height = h, o.depth = d, o.isReady = !0, o.samples = l.samples, o.generateMipMaps = !!l.generateMipMaps, o.samplingMode = l.samplingMode, o.type = l.type, o.format = l.format, o.is2DArray = d > 0, o._cachedWrapU = 0, o._cachedWrapV = 0, o._useSRGBBuffer = l.useSRGBBuffer, o.label = l.label, this._internalTexturesCache.push(o), i || this._textureHelper.createGPUTextureForInternalTexture(o, u, h, d || 1, l.creationFlags), o; } /** * Usually called from Texture.ts. * Passed information to create a hardware texture * @param url defines a value which contains one of the following: * * A conventional http URL, e.g. 'http://...' or 'file://...' * * A base64 string of in-line texture data, e.g. 'data:image/jpg;base64,/...' * * An indicator that data being passed using the buffer parameter, e.g. 'data:mytexture.jpg' * @param noMipmap defines a boolean indicating that no mipmaps shall be generated. Ignored for compressed textures. They must be in the file * @param invertY when true, image is flipped when loaded. You probably want true. Certain compressed textures may invert this if their default is inverted (eg. ktx) * @param scene needed for loading to the correct scene * @param samplingMode mode with should be used sample / access the texture (Default: Texture.TRILINEAR_SAMPLINGMODE) * @param onLoad optional callback to be called upon successful completion * @param onError optional callback to be called upon failure * @param buffer a source of a file previously fetched as either a base64 string, an ArrayBuffer (compressed or image format), HTMLImageElement (image format), or a Blob * @param fallback an internal argument in case the function must be called again, due to etc1 not having alpha capabilities * @param format internal format. Default: RGB when extension is '.jpg' else RGBA. Ignored for compressed textures * @param forcedExtension defines the extension to use to pick the right loader * @param mimeType defines an optional mime type * @param loaderOptions options to be passed to the loader * @param creationFlags specific flags to use when creating the texture (1 for storage textures, for eg) * @param useSRGBBuffer defines if the texture must be loaded in a sRGB GPU buffer (if supported by the GPU). * @returns a InternalTexture for assignment back into BABYLON.Texture */ createTexture(e, t, i, r, s = 3, n = null, a = null, l = null, o = null, u = null, h = null, d, f, p, m) { return this._createTextureBase(e, t, i, r, s, n, a, (_, v, C, x, b, S, M, R) => { var w; const V = x; if (_.baseWidth = V.width, _.baseHeight = V.height, _.width = V.width, _.height = V.height, _.format = _.format !== -1 ? _.format : u ?? 5, _.type = _.type !== -1 ? _.type : 0, R(_.width, _.height, V, v, _, () => { }), !((w = _._hardwareTexture) === null || w === void 0) && w.underlyingResource) !S && !M && this._generateMipmaps(_, this._uploadEncoder); else { const k = this._textureHelper.createGPUTextureForInternalTexture(_, V.width, V.height, void 0, p); zn.IsImageBitmap(V) && (this._textureHelper.updateTexture(V, _, V.width, V.height, _.depth, k.format, 0, 0, b, !1, 0, 0), !S && !M && this._generateMipmaps(_, this._uploadEncoder)); } C && C.removePendingData(_), _.isReady = !0, _.onLoadedObservable.notifyObservers(_), _.onLoadedObservable.clear(); }, () => !1, l, o, u, h, d, f, m); } /** * Wraps an external web gpu texture in a Babylon texture. * @param texture defines the external texture * @returns the babylon internal texture */ wrapWebGPUTexture(e) { const t = new VF(e), i = new ln(this, ts.Unknown, !0); return i._hardwareTexture = t, i.isReady = !0, i; } /** * Wraps an external web gl texture in a Babylon texture. * @returns the babylon internal texture */ wrapWebGLTexture() { throw new Error("wrapWebGLTexture is not supported, use wrapWebGPUTexture instead."); } generateMipMapsForCubemap(e) { var t; e.generateMipMaps && (!((t = e._hardwareTexture) === null || t === void 0) && t.underlyingResource || this._textureHelper.createGPUTextureForInternalTexture(e), this._generateMipmaps(e)); } /** * Update the sampling mode of a given texture * @param samplingMode defines the required sampling mode * @param texture defines the texture to update * @param generateMipMaps defines whether to generate mipmaps for the texture */ updateTextureSamplingMode(e, t, i = !1) { i && (t.generateMipMaps = !0, this._generateMipmaps(t)), t.samplingMode = e; } /** * Update the sampling mode of a given texture * @param texture defines the texture to update * @param wrapU defines the texture wrap mode of the u coordinates * @param wrapV defines the texture wrap mode of the v coordinates * @param wrapR defines the texture wrap mode of the r coordinates */ updateTextureWrappingMode(e, t, i = null, r = null) { t !== null && (e._cachedWrapU = t), i !== null && (e._cachedWrapV = i), (e.is2DArray || e.is3D) && r !== null && (e._cachedWrapR = r); } /** * Update the dimensions of a texture * @param texture texture to update * @param width new width of the texture * @param height new height of the texture * @param depth new depth of the texture */ updateTextureDimensions(e, t, i, r = 1) { if (!e._hardwareTexture || e.width === t && e.height === i && e.depth === r) return; const s = e._hardwareTexture.textureAdditionalUsages; e._hardwareTexture.release(), this._textureHelper.createGPUTextureForInternalTexture(e, t, i, r, s); } /** * @internal */ _setInternalTexture(e, t, i) { if (i = i ?? e, this._currentEffect) { const s = this._currentEffect._pipelineContext.shaderProcessingContext.availableTextures[i]; if (this._currentMaterialContext.setTexture(e, t), s && s.autoBindSampler) { const n = i + Io.AutoSamplerSuffix; this._currentMaterialContext.setSampler(n, t); } } } /** * Sets a texture to the according uniform. * @param channel The texture channel * @param unused unused parameter * @param texture The texture to apply * @param name The name of the uniform in the effect */ setTexture(e, t, i, r) { this._setTexture(e, i, !1, !1, r, r); } /** * Sets an array of texture to the WebGPU context * @param channel defines the channel where the texture array must be set * @param unused unused parameter * @param textures defines the array of textures to bind * @param name name of the channel */ setTextureArray(e, t, i, r) { for (let s = 0; s < i.length; s++) this._setTexture(-1, i[s], !0, !1, r + s.toString(), r); } _setTexture(e, t, i = !1, r = !1, s = "", n) { if (n = n ?? s, this._currentEffect) { if (!t) return this._currentMaterialContext.setTexture(s, null), !1; if (t.video) t.update(); else if (t.delayLoadState === 4) return t.delayLoad(), !1; let a = null; if (r ? a = t.depthStencilTexture : t.isReady() ? a = t.getInternalTexture() : t.isCube ? a = this.emptyCubeTexture : t.is3D ? a = this.emptyTexture3D : t.is2DArray ? a = this.emptyTexture2DArray : a = this.emptyTexture, a && !a.isMultiview) { if (a.isCube && a._cachedCoordinatesMode !== t.coordinatesMode) { a._cachedCoordinatesMode = t.coordinatesMode; const l = t.coordinatesMode !== 3 && t.coordinatesMode !== 5 ? 1 : 0; t.wrapU = l, t.wrapV = l; } a._cachedWrapU = t.wrapU, a._cachedWrapV = t.wrapV, a.is3D && (a._cachedWrapR = t.wrapR), this._setAnisotropicLevel(0, a, t.anisotropicFilteringLevel); } this._setInternalTexture(s, a, n); } else this.dbgVerboseLogsForFirstFrames && (this._count === void 0 && (this._count = 0), (!this._count || this._count < this.dbgVerboseLogsNumFrames) && Ce.Log(["frame #" + this._count + " - _setTexture called with a null _currentEffect! texture=", t])); return !0; } /** * @internal */ _setAnisotropicLevel(e, t, i) { t._cachedAnisotropicFilteringLevel !== i && (t._cachedAnisotropicFilteringLevel = Math.min(i, this._caps.maxAnisotropy)); } /** * @internal */ _bindTexture(e, t, i) { e !== void 0 && this._setInternalTexture(i, t); } /** * Generates the mipmaps for a texture * @param texture texture to generate the mipmaps for */ generateMipmaps(e) { this._generateMipmaps(e); } /** * @internal */ _generateMipmaps(e, t) { t = t ?? this._renderEncoder; const i = e._hardwareTexture; if (!i) return; t === this._renderEncoder && this._endCurrentRenderPass(); const r = e._hardwareTexture.format, s = zn.ComputeNumMipmapLevels(e.width, e.height); this.dbgVerboseLogsForFirstFrames && (this._count === void 0 && (this._count = 0), (!this._count || this._count < this.dbgVerboseLogsNumFrames) && Ce.Log("frame #" + this._count + " - generate mipmaps - width=" + e.width + ", height=" + e.height + ", isCube=" + e.isCube + ", command encoder=" + (t === this._renderEncoder ? "render" : "copy"))), e.isCube ? this._textureHelper.generateCubeMipmaps(i, r, s, t) : this._textureHelper.generateMipmaps(i, r, s, 0, t); } /** * Update a portion of an internal texture * @param texture defines the texture to update * @param imageData defines the data to store into the texture * @param xOffset defines the x coordinates of the update rectangle * @param yOffset defines the y coordinates of the update rectangle * @param width defines the width of the update rectangle * @param height defines the height of the update rectangle * @param faceIndex defines the face index if texture is a cube (0 by default) * @param lod defines the lod level to update (0 by default) * @param generateMipMaps defines whether to generate mipmaps or not */ updateTextureData(e, t, i, r, s, n, a = 0, l = 0, o = !1) { var u; let h = e._hardwareTexture; !((u = e._hardwareTexture) === null || u === void 0) && u.underlyingResource || (h = this._textureHelper.createGPUTextureForInternalTexture(e)); const d = new Uint8Array(t.buffer, t.byteOffset, t.byteLength); this._textureHelper.updateTexture(d, e, s, n, e.depth, h.format, a, l, e.invertY, !1, i, r), o && this._generateMipmaps(e); } /** * @internal */ _uploadCompressedDataToTextureDirectly(e, t, i, r, s, n = 0, a = 0) { var l; let o = e._hardwareTexture; !((l = e._hardwareTexture) === null || l === void 0) && l.underlyingResource || (e.format = t, o = this._textureHelper.createGPUTextureForInternalTexture(e, i, r)); const u = new Uint8Array(s.buffer, s.byteOffset, s.byteLength); this._textureHelper.updateTexture(u, e, i, r, e.depth, o.format, n, a, !1, !1, 0, 0); } /** * @internal */ _uploadDataToTextureDirectly(e, t, i = 0, r = 0, s, n = !1) { var a; const l = Math.round(Math.log(e.width) * Math.LOG2E), o = Math.round(Math.log(e.height) * Math.LOG2E), u = n ? e.width : Math.pow(2, Math.max(l - r, 0)), h = n ? e.height : Math.pow(2, Math.max(o - r, 0)); let d = e._hardwareTexture; !((a = e._hardwareTexture) === null || a === void 0) && a.underlyingResource || (d = this._textureHelper.createGPUTextureForInternalTexture(e, u, h)); const f = new Uint8Array(t.buffer, t.byteOffset, t.byteLength); this._textureHelper.updateTexture(f, e, u, h, e.depth, d.format, i, r, e.invertY, !1, 0, 0); } /** * @internal */ _uploadArrayBufferViewToTexture(e, t, i = 0, r = 0) { this._uploadDataToTextureDirectly(e, t, i, r); } /** * @internal */ _uploadImageToTexture(e, t, i = 0, r = 0) { var s; let n = e._hardwareTexture; if (!((s = e._hardwareTexture) === null || s === void 0) && s.underlyingResource || (n = this._textureHelper.createGPUTextureForInternalTexture(e)), t instanceof HTMLImageElement) throw "WebGPU engine: HTMLImageElement not supported in _uploadImageToTexture!"; const a = t, l = Math.ceil(e.width / (1 << r)), o = Math.ceil(e.height / (1 << r)); this._textureHelper.updateTexture(a, e, l, o, e.depth, n.format, i, r, e.invertY, !1, 0, 0); } /** * Reads pixels from the current frame buffer. Please note that this function can be slow * @param x defines the x coordinate of the rectangle where pixels must be read * @param y defines the y coordinate of the rectangle where pixels must be read * @param width defines the width of the rectangle where pixels must be read * @param height defines the height of the rectangle where pixels must be read * @param hasAlpha defines whether the output should have alpha or not (defaults to true) * @param flushRenderer true to flush the renderer from the pending commands before reading the pixels * @returns a ArrayBufferView promise (Uint8Array) containing RGBA colors */ // eslint-disable-next-line @typescript-eslint/no-unused-vars readPixels(e, t, i, r, s = !0, n = !0) { const l = this._getCurrentRenderPassWrapper().colorAttachmentGPUTextures[0]; if (!l) return Promise.resolve(new Uint8Array(0)); const o = l.underlyingResource, u = l.format; return o ? (n && this.flushFramebuffer(), this._textureHelper.readPixels(o, e, t, i, r, u)) : Promise.resolve(new Uint8Array(0)); } //------------------------------------------------------------------------------ // Frame management //------------------------------------------------------------------------------ /** * Begin a new frame */ beginFrame() { super.beginFrame(); } /** * End the current frame */ endFrame() { if (this._endCurrentRenderPass(), this._snapshotRendering.endFrame(), this._timestampQuery.endFrame(this._renderEncoder), this._timestampIndex = 0, this.flushFramebuffer(), this._textureHelper.destroyDeferredTextures(), this._bufferManager.destroyDeferredBuffers(), this._features._collectUbosUpdatedInFrame) { if (this.dbgVerboseLogsForFirstFrames && (this._count === void 0 && (this._count = 0), !this._count || this._count < this.dbgVerboseLogsNumFrames)) { const e = []; for (const t in Vi._UpdatedUbosInFrame) e.push(t + ":" + Vi._UpdatedUbosInFrame[t]); Ce.Log(["frame #" + this._count + " - updated ubos -", e.join(", ")]); } Vi._UpdatedUbosInFrame = {}; } this.countersLastFrame.numEnableEffects = this._counters.numEnableEffects, this.countersLastFrame.numEnableDrawWrapper = this._counters.numEnableDrawWrapper, this.countersLastFrame.numBundleCreationNonCompatMode = this._counters.numBundleCreationNonCompatMode, this.countersLastFrame.numBundleReuseNonCompatMode = this._counters.numBundleReuseNonCompatMode, this._counters.numEnableEffects = 0, this._counters.numEnableDrawWrapper = 0, this._counters.numBundleCreationNonCompatMode = 0, this._counters.numBundleReuseNonCompatMode = 0, this._cacheRenderPipeline.endFrame(), this._cacheBindGroups.endFrame(), this._pendingDebugCommands.length = 0, super.endFrame(), this.dbgVerboseLogsForFirstFrames && (this._count === void 0 && (this._count = 0), this._count < this.dbgVerboseLogsNumFrames && Ce.Log(["%c frame #" + this._count + " - end", "background: #ffff00"]), this._count < this.dbgVerboseLogsNumFrames && (this._count++, this._count !== this.dbgVerboseLogsNumFrames && Ce.Log(["%c frame #" + this._count + " - begin", "background: #ffff00"]))); } /** * Force a WebGPU flush (ie. a flush of all waiting commands) */ flushFramebuffer() { this._endCurrentRenderPass(), this._commandBuffers[0] = this._uploadEncoder.finish(), this._commandBuffers[1] = this._renderEncoder.finish(), this._device.queue.submit(this._commandBuffers), this._uploadEncoder = this._device.createCommandEncoder(this._uploadEncoderDescriptor), this._renderEncoder = this._device.createCommandEncoder(this._renderEncoderDescriptor), this._timestampQuery.startFrame(this._uploadEncoder), this._textureHelper.setCommandEncoder(this._uploadEncoder), this._bundleList.reset(); } /** @internal */ _currentFrameBufferIsDefaultFrameBuffer() { return this._currentPassIsMainPass(); } //------------------------------------------------------------------------------ // Render Pass //------------------------------------------------------------------------------ _startRenderTargetRenderPass(e, t, i, r, s) { var n, a, l, o, u, h, d, f; this._endCurrentRenderPass(); const p = e, m = p._depthStencilTexture, _ = m == null ? void 0 : m._hardwareTexture, v = _ == null ? void 0 : _.underlyingResource, C = _ == null ? void 0 : _.getMSAATexture(), x = v == null ? void 0 : v.createView(this._rttRenderPassWrapper.depthAttachmentViewDescriptor), b = C == null ? void 0 : C.createView(this._rttRenderPassWrapper.depthAttachmentViewDescriptor), S = _ ? zn.HasStencilAspect(_.format) : !1, M = []; this.useReverseDepthBuffer && this.setDepthFunctionToGreaterOrEqual(); const R = Xpe; i && (R.r = i.r * 255, R.g = i.g * 255, R.b = i.b * 255, R.a = i.a * 255); const w = t && i, V = t && r, k = t && s; if (p._attachments && p.isMulti) { (!this._mrtAttachments || this._mrtAttachments.length === 0) && (this._mrtAttachments = p._defaultAttachments); for (let L = 0; L < this._mrtAttachments.length; ++L) { const B = this._mrtAttachments[L], U = p.textures[L], K = U == null ? void 0 : U._hardwareTexture, ee = K == null ? void 0 : K.underlyingResource; if (K && ee) { const Z = K.getMSAATexture(L), q = (a = (n = p.layerIndices) === null || n === void 0 ? void 0 : n[L]) !== null && a !== void 0 ? a : 0, le = (o = (l = p.faceIndices) === null || l === void 0 ? void 0 : l[L]) !== null && o !== void 0 ? o : 0, ie = Object.assign(Object.assign({}, this._rttRenderPassWrapper.colorAttachmentViewDescriptor), { format: K.format, baseArrayLayer: U.isCube ? q * 6 + le : q }), $ = Object.assign(Object.assign({}, this._rttRenderPassWrapper.colorAttachmentViewDescriptor), { format: K.format, baseArrayLayer: 0 }), j = U.type === 7 || U.type === 5, J = ee.createView(ie), ne = Z == null ? void 0 : Z.createView($); M.push({ view: ne || J, resolveTarget: Z ? J : void 0, clearValue: B !== 0 && w ? j ? R : i : void 0, loadOp: B !== 0 && w ? au.Clear : au.Load, storeOp: _m.Store }); } } this._cacheRenderPipeline.setMRT(p.textures, this._mrtAttachments.length), this._cacheRenderPipeline.setMRTAttachments(this._mrtAttachments); } else { const L = p.texture; if (L) { const B = L._hardwareTexture, U = B.underlyingResource, K = B.getMSAATexture(), ee = U.createView(this._rttRenderPassWrapper.colorAttachmentViewDescriptor), Z = K == null ? void 0 : K.createView(this._rttRenderPassWrapper.colorAttachmentViewDescriptor), q = L.type === 7 || L.type === 5; M.push({ view: Z || ee, resolveTarget: K ? ee : void 0, clearValue: w ? q ? R : i : void 0, loadOp: w ? au.Clear : au.Load, storeOp: _m.Store }); } else M.push(null); } if ((u = this._debugPushGroup) === null || u === void 0 || u.call(this, "render target pass" + (e.label ? " (" + e.label + ")" : ""), 1), this._rttRenderPassWrapper.renderPassDescriptor = { label: ((h = e.label) !== null && h !== void 0 ? h : "RTT") + "RenderPass", colorAttachments: M, depthStencilAttachment: m && v ? { view: b || x, depthClearValue: V ? this.useReverseDepthBuffer ? this._clearReverseDepthValue : this._clearDepthValue : void 0, depthLoadOp: V ? au.Clear : au.Load, depthStoreOp: _m.Store, stencilClearValue: p._depthStencilTextureWithStencil && k ? this._clearStencilValue : void 0, stencilLoadOp: S ? p._depthStencilTextureWithStencil && k ? au.Clear : au.Load : void 0, stencilStoreOp: S ? _m.Store : void 0 } : void 0, occlusionQuerySet: !((d = this._occlusionQuery) === null || d === void 0) && d.hasQueries ? this._occlusionQuery.querySet : void 0 }, this._timestampQuery.startPass(this._rttRenderPassWrapper.renderPassDescriptor, this._timestampIndex), this._currentRenderPass = this._renderEncoder.beginRenderPass(this._rttRenderPassWrapper.renderPassDescriptor), this.dbgVerboseLogsForFirstFrames && (this._count === void 0 && (this._count = 0), !this._count || this._count < this.dbgVerboseLogsNumFrames)) { const L = p.texture; Ce.Log([ "frame #" + this._count + " - render target begin pass - rtt name=" + e.label + ", internalTexture.uniqueId=" + L.uniqueId + ", width=" + L.width + ", height=" + L.height + ", setClearStates=" + t, "renderPassDescriptor=", this._rttRenderPassWrapper.renderPassDescriptor ]); } (f = this._debugFlushPendingCommands) === null || f === void 0 || f.call(this), this._resetRenderPassStates(), (!_ || !zn.HasStencilAspect(_.format)) && (this._stencilStateComposer.enabled = !1); } _startMainRenderPass(e, t, i, r) { var s, n, a; this._endCurrentRenderPass(), this.useReverseDepthBuffer && this.setDepthFunctionToGreaterOrEqual(); const l = e && t, o = e && i, u = e && r; this._mainRenderPassWrapper.renderPassDescriptor.colorAttachments[0].clearValue = l ? t : void 0, this._mainRenderPassWrapper.renderPassDescriptor.colorAttachments[0].loadOp = l ? au.Clear : au.Load, this._mainRenderPassWrapper.renderPassDescriptor.depthStencilAttachment.depthClearValue = o ? this.useReverseDepthBuffer ? this._clearReverseDepthValue : this._clearDepthValue : void 0, this._mainRenderPassWrapper.renderPassDescriptor.depthStencilAttachment.depthLoadOp = o ? au.Clear : au.Load, this._mainRenderPassWrapper.renderPassDescriptor.depthStencilAttachment.stencilClearValue = u ? this._clearStencilValue : void 0, this._mainRenderPassWrapper.renderPassDescriptor.depthStencilAttachment.stencilLoadOp = this.isStencilEnable ? u ? au.Clear : au.Load : void 0, this._mainRenderPassWrapper.renderPassDescriptor.occlusionQuerySet = !((s = this._occlusionQuery) === null || s === void 0) && s.hasQueries ? this._occlusionQuery.querySet : void 0; const h = this._context.getCurrentTexture(); this._mainRenderPassWrapper.colorAttachmentGPUTextures[0].set(h), this._options.antialias ? (zZ.format = h.format, this._mainRenderPassWrapper.renderPassDescriptor.colorAttachments[0].resolveTarget = h.createView(zZ)) : (HZ.format = h.format, this._mainRenderPassWrapper.renderPassDescriptor.colorAttachments[0].view = h.createView(HZ)), this.dbgVerboseLogsForFirstFrames && (this._count === void 0 && (this._count = 0), (!this._count || this._count < this.dbgVerboseLogsNumFrames) && Ce.Log([ "frame #" + this._count + " - main begin pass - texture width=" + this._mainTextureExtends.width, " height=" + this._mainTextureExtends.height + ", setClearStates=" + e, "renderPassDescriptor=", this._mainRenderPassWrapper.renderPassDescriptor ])), (n = this._debugPushGroup) === null || n === void 0 || n.call(this, "main pass", 0), this._timestampQuery.startPass(this._mainRenderPassWrapper.renderPassDescriptor, this._timestampIndex), this._currentRenderPass = this._renderEncoder.beginRenderPass(this._mainRenderPassWrapper.renderPassDescriptor), this._setDepthTextureFormat(this._mainRenderPassWrapper), this._setColorFormat(this._mainRenderPassWrapper), (a = this._debugFlushPendingCommands) === null || a === void 0 || a.call(this), this._resetRenderPassStates(), this._isStencilEnable || (this._stencilStateComposer.enabled = !1); } /** @internal */ _endCurrentRenderPass() { var e, t, i; if (!this._currentRenderPass) return 0; const r = this._currentPassIsMainPass() ? 2 : 1; return !this._snapshotRendering.endRenderPass(this._currentRenderPass) && !this.compatibilityMode && (this._bundleList.run(this._currentRenderPass), this._bundleList.reset()), this._currentRenderPass.end(), this._timestampQuery.endPass(this._timestampIndex, this._currentRenderTarget && this._currentRenderTarget.gpuTimeInFrame ? this._currentRenderTarget.gpuTimeInFrame : this.gpuTimeInFrameForMainPass), this._timestampIndex += 2, this.dbgVerboseLogsForFirstFrames && (this._count === void 0 && (this._count = 0), (!this._count || this._count < this.dbgVerboseLogsNumFrames) && Ce.Log("frame #" + this._count + " - " + (r === 2 ? "main" : "render target") + " end pass" + (r === 1 ? " - internalTexture.uniqueId=" + ((t = (e = this._currentRenderTarget) === null || e === void 0 ? void 0 : e.texture) === null || t === void 0 ? void 0 : t.uniqueId) : ""))), (i = this._debugPopGroup) === null || i === void 0 || i.call(this, 0), this._currentRenderPass = null, r; } /** * Binds the frame buffer to the specified texture. * @param texture The render target wrapper to render to * @param faceIndex The face of the texture to render to in case of cube texture * @param requiredWidth The width of the target to render to * @param requiredHeight The height of the target to render to * @param forceFullscreenViewport Forces the viewport to be the entire texture/screen if true * @param lodLevel defines the lod level to bind to the frame buffer * @param layer defines the 2d array index to bind to frame buffer to */ bindFramebuffer(e, t = 0, i, r, s, n = 0, a = 0) { var l, o; const u = (l = e.texture) === null || l === void 0 ? void 0 : l._hardwareTexture; this._currentRenderTarget ? this.unBindFramebuffer(this._currentRenderTarget) : this._endCurrentRenderPass(), this._currentRenderTarget = e, this._rttRenderPassWrapper.colorAttachmentGPUTextures[0] = u, this._rttRenderPassWrapper.depthTextureFormat = this._currentRenderTarget._depthStencilTexture ? zn.GetWebGPUTextureFormat(-1, this._currentRenderTarget._depthStencilTexture.format) : void 0, this._setDepthTextureFormat(this._rttRenderPassWrapper), this._setColorFormat(this._rttRenderPassWrapper), this._rttRenderPassWrapper.colorAttachmentViewDescriptor = { format: this._colorFormat, dimension: Ea.E2d, mipLevelCount: 1, baseArrayLayer: e.isCube ? a * 6 + t : a, baseMipLevel: n, arrayLayerCount: 1, aspect: jA.All }, this._rttRenderPassWrapper.depthAttachmentViewDescriptor = { format: this._depthTextureFormat, dimension: Ea.E2d, mipLevelCount: 1, baseArrayLayer: e.isCube ? a * 6 + t : a, baseMipLevel: 0, arrayLayerCount: 1, aspect: jA.All }, this.dbgVerboseLogsForFirstFrames && (this._count === void 0 && (this._count = 0), (!this._count || this._count < this.dbgVerboseLogsNumFrames) && Ce.Log([ "frame #" + this._count + " - bindFramebuffer - rtt name=" + e.label + ", internalTexture.uniqueId=" + ((o = e.texture) === null || o === void 0 ? void 0 : o.uniqueId) + ", face=" + t + ", lodLevel=" + n + ", layer=" + a, "colorAttachmentViewDescriptor=", this._rttRenderPassWrapper.colorAttachmentViewDescriptor, "depthAttachmentViewDescriptor=", this._rttRenderPassWrapper.depthAttachmentViewDescriptor ])), this._cachedViewport && !s ? this.setViewport(this._cachedViewport, i, r) : (i || (i = e.width, n && (i = i / Math.pow(2, n))), r || (r = e.height, n && (r = r / Math.pow(2, n))), this._viewport(0, 0, i, r)), this.wipeCaches(); } /** * Unbind the current render target texture from the WebGPU context * @param texture defines the render target wrapper to unbind * @param disableGenerateMipMaps defines a boolean indicating that mipmaps must not be generated * @param onBeforeUnbind defines a function which will be called before the effective unbind */ unBindFramebuffer(e, t = !1, i) { var r, s; const n = this._currentRenderTarget; this._currentRenderTarget = null, i && i(), this._currentRenderTarget = n, this._endCurrentRenderPass(), !((r = e.texture) === null || r === void 0) && r.generateMipMaps && !t && !e.isCube && this._generateMipmaps(e.texture), this._currentRenderTarget = null, this.dbgVerboseLogsForFirstFrames && (this._count === void 0 && (this._count = 0), (!this._count || this._count < this.dbgVerboseLogsNumFrames) && Ce.Log("frame #" + this._count + " - unBindFramebuffer - rtt name=" + e.label + ", internalTexture.uniqueId=", (s = e.texture) === null || s === void 0 ? void 0 : s.uniqueId)), this._mrtAttachments = [], this._cacheRenderPipeline.setMRT([]), this._cacheRenderPipeline.setMRTAttachments(this._mrtAttachments); } /** * Unbind the current render target and bind the default framebuffer */ restoreDefaultFramebuffer() { this._currentRenderTarget ? this.unBindFramebuffer(this._currentRenderTarget) : this._currentRenderPass || this._startMainRenderPass(!1), this._cachedViewport && this.setViewport(this._cachedViewport), this.wipeCaches(); } //------------------------------------------------------------------------------ // Render //------------------------------------------------------------------------------ /** * @internal */ _setColorFormat(e) { var t, i; const r = (i = (t = e.colorAttachmentGPUTextures[0]) === null || t === void 0 ? void 0 : t.format) !== null && i !== void 0 ? i : null; this._cacheRenderPipeline.setColorFormat(r), this._colorFormat !== r && (this._colorFormat = r); } /** * @internal */ _setDepthTextureFormat(e) { this._cacheRenderPipeline.setDepthStencilFormat(e.depthTextureFormat), this._depthTextureFormat !== e.depthTextureFormat && (this._depthTextureFormat = e.depthTextureFormat); } setDitheringState() { } setRasterizerState() { } /** * Set various states to the webGL context * @param culling defines culling state: true to enable culling, false to disable it * @param zOffset defines the value to apply to zOffset (0 by default) * @param force defines if states must be applied even if cache is up to date * @param reverseSide defines if culling must be reversed (CCW if false, CW if true) * @param cullBackFaces true to cull back faces, false to cull front faces (if culling is enabled) * @param stencil stencil states to set * @param zOffsetUnits defines the value to apply to zOffsetUnits (0 by default) */ setState(e, t = 0, i, r = !1, s, n, a = 0) { var l, o; (this._depthCullingState.cull !== e || i) && (this._depthCullingState.cull = e); const u = !((o = (l = this.cullBackFaces) !== null && l !== void 0 ? l : s) !== null && o !== void 0) || o ? 1 : 2; (this._depthCullingState.cullFace !== u || i) && (this._depthCullingState.cullFace = u), this.setZOffset(t), this.setZOffsetUnits(a); const h = r ? this._currentRenderTarget ? 1 : 2 : this._currentRenderTarget ? 2 : 1; (this._depthCullingState.frontFace !== h || i) && (this._depthCullingState.frontFace = h), this._stencilStateComposer.stencilMaterial = n; } _applyRenderPassChanges(e) { const t = this._stencilStateComposer.enabled ? this._mustUpdateStencilRef() : !1, i = this._alphaState.alphaBlend ? this._mustUpdateBlendColor() : !1; this._mustUpdateViewport() && this._applyViewport(e), this._mustUpdateScissor() && this._applyScissor(e), t && this._applyStencilRef(e), i && this._applyBlendColor(e); } _draw(e, t, i, r, s) { var n; const a = this._getCurrentRenderPass(), l = this._bundleList; this.applyStates(); const o = this._currentEffect._pipelineContext; if (this.bindUniformBufferBase(this._currentRenderTarget ? this._ubInvertY : this._ubDontInvertY, 0, Io.InternalsUBOName), o.uniformBuffer && (o.uniformBuffer.update(), this.bindUniformBufferBase(o.uniformBuffer.getBuffer(), 0, Io.LeftOvertUBOName)), this._snapshotRendering.play) { this._reportDrawCall(); return; } !this.compatibilityMode && (this._currentDrawContext.isDirty(this._currentMaterialContext.updateId) || this._currentMaterialContext.isDirty || this._currentMaterialContext.forceBindGroupCreation) && (this._currentDrawContext.fastBundle = void 0); const u = !this.compatibilityMode && this._currentDrawContext.fastBundle; let h = a; if (u || this._snapshotRendering.record) { if (this._applyRenderPassChanges(l), !this._snapshotRendering.record) { this._counters.numBundleReuseNonCompatMode++, this._currentDrawContext.indirectDrawBuffer && this._currentDrawContext.setIndirectData(r, s || 1, i), l.addBundle(this._currentDrawContext.fastBundle), this._reportDrawCall(); return; } h = l.getBundleEncoder(this._cacheRenderPipeline.colorFormats, this._depthTextureFormat, this.currentSampleCount), l.numDrawCalls++; } let d = 0; if (this._currentMaterialContext.hasFloatOrDepthTextures) { let v = 1; for (let C = 0; C < o.shaderProcessingContext.textureNames.length; ++C) { const x = o.shaderProcessingContext.textureNames[C], b = (n = this._currentMaterialContext.textures[x]) === null || n === void 0 ? void 0 : n.texture, S = b && b.format >= 13 && b.format <= 18; ((b == null ? void 0 : b.type) === 1 && !this._caps.textureFloatLinearFiltering || S) && (d |= v), v = v << 1; } } this._currentMaterialContext.textureState = d; const f = this._cacheRenderPipeline.getRenderPipeline(t, this._currentEffect, this.currentSampleCount, d), p = this._cacheBindGroups.getBindGroups(o, this._currentDrawContext, this._currentMaterialContext); this._snapshotRendering.record || (this._applyRenderPassChanges(this.compatibilityMode ? null : l), this.compatibilityMode || (this._counters.numBundleCreationNonCompatMode++, h = this._device.createRenderBundleEncoder({ colorFormats: this._cacheRenderPipeline.colorFormats, depthStencilFormat: this._depthTextureFormat, sampleCount: zn.GetSample(this.currentSampleCount) }))), h.setPipeline(f), this._currentIndexBuffer && h.setIndexBuffer(this._currentIndexBuffer.underlyingResource, this._currentIndexBuffer.is32Bits ? yT.Uint32 : yT.Uint16, 0); const m = this._cacheRenderPipeline.vertexBuffers; for (let v = 0; v < m.length; v++) { const C = m[v], x = C.effectiveBuffer; x && h.setVertexBuffer(v, x.underlyingResource, C._validOffsetRange ? 0 : C.byteOffset); } for (let v = 0; v < p.length; v++) h.setBindGroup(v, p[v]); const _ = !this.compatibilityMode && !this._snapshotRendering.record; _ && this._currentDrawContext.indirectDrawBuffer ? (this._currentDrawContext.setIndirectData(r, s || 1, i), e === 0 ? h.drawIndexedIndirect(this._currentDrawContext.indirectDrawBuffer, 0) : h.drawIndirect(this._currentDrawContext.indirectDrawBuffer, 0)) : e === 0 ? h.drawIndexed(r, s || 1, i, 0, 0) : h.draw(r, s || 1, i, 0), _ && (this._currentDrawContext.fastBundle = h.finish(), l.addBundle(this._currentDrawContext.fastBundle)), this._reportDrawCall(); } /** * Draw a list of indexed primitives * @param fillMode defines the primitive to use * @param indexStart defines the starting index * @param indexCount defines the number of index to draw * @param instancesCount defines the number of instances to draw (if instantiation is enabled) */ drawElementsType(e, t, i, r = 1) { this._draw(0, e, t, i, r); } /** * Draw a list of unindexed primitives * @param fillMode defines the primitive to use * @param verticesStart defines the index of first vertex to draw * @param verticesCount defines the count of vertices to draw * @param instancesCount defines the number of instances to draw (if instantiation is enabled) */ drawArraysType(e, t, i, r = 1) { this._currentIndexBuffer = null, this._draw(1, e, t, i, r); } //------------------------------------------------------------------------------ // Dispose //------------------------------------------------------------------------------ /** * Dispose and release all associated resources */ dispose() { var e, t; this._isDisposed = !0, this._timestampQuery.dispose(), (e = this._mainTexture) === null || e === void 0 || e.destroy(), (t = this._depthTexture) === null || t === void 0 || t.destroy(), this._textureHelper.destroyDeferredTextures(), this._bufferManager.destroyDeferredBuffers(), this._device.destroy(), super.dispose(); } //------------------------------------------------------------------------------ // Misc //------------------------------------------------------------------------------ /** * Gets the current render width * @param useScreen defines if screen size must be used (or the current render target if any) * @returns a number defining the current render width */ getRenderWidth(e = !1) { var t, i; return !e && this._currentRenderTarget ? this._currentRenderTarget.width : (i = (t = this._renderingCanvas) === null || t === void 0 ? void 0 : t.width) !== null && i !== void 0 ? i : 0; } /** * Gets the current render height * @param useScreen defines if screen size must be used (or the current render target if any) * @returns a number defining the current render height */ getRenderHeight(e = !1) { var t, i; return !e && this._currentRenderTarget ? this._currentRenderTarget.height : (i = (t = this._renderingCanvas) === null || t === void 0 ? void 0 : t.height) !== null && i !== void 0 ? i : 0; } //------------------------------------------------------------------------------ // Errors //------------------------------------------------------------------------------ /** * Get the current error code of the WebGPU context * @returns the error code */ getError() { return 0; } //------------------------------------------------------------------------------ // Unused WebGPU //------------------------------------------------------------------------------ /** * @internal */ bindSamplers() { } /** * @internal */ _bindTextureDirectly() { return !1; } /** * Gets a boolean indicating if all created effects are ready * @returns always true - No parallel shader compilation */ areAllEffectsReady() { return !0; } /** * @internal */ _executeWhenRenderingStateIsCompiled(e, t) { t(); } /** * @internal */ _isRenderingStateCompiled() { return !0; } /** @internal */ _getUnpackAlignement() { return 1; } /** * @internal */ _unpackFlipY() { } /** * @internal */ _bindUnboundFramebuffer() { throw "_bindUnboundFramebuffer is not implementedin WebGPU! You probably want to use restoreDefaultFramebuffer or unBindFramebuffer instead"; } // TODO WEBGPU. All of the below should go once engine split with baseEngine. /** * @internal */ _getSamplingParameters() { throw "_getSamplingParameters is not available in WebGPU"; } /** * @internal */ getUniforms() { return []; } /** * @internal */ setIntArray() { return !1; } /** * @internal */ setIntArray2() { return !1; } /** * @internal */ setIntArray3() { return !1; } /** * @internal */ setIntArray4() { return !1; } /** * @internal */ setArray() { return !1; } /** * @internal */ setArray2() { return !1; } /** * @internal */ setArray3() { return !1; } /** * @internal */ setArray4() { return !1; } /** * @internal */ setMatrices() { return !1; } /** * @internal */ setMatrix3x3() { return !1; } /** * @internal */ setMatrix2x2() { return !1; } /** * @internal */ setFloat() { return !1; } /** * @internal */ setFloat2() { return !1; } /** * @internal */ setFloat3() { return !1; } /** * @internal */ setFloat4() { return !1; } } br._GLSLslangDefaultOptions = { jsPath: `${Ve._DefaultCdnUrl}/glslang/glslang.js`, wasmPath: `${Ve._DefaultCdnUrl}/glslang/glslang.wasm` }; br.UseTWGSL = !0; br.prototype.setAlphaMode = function(c, e = !1) { if (this._alphaMode === c && (c === 0 && !this._alphaState.alphaBlend || c !== 0 && this._alphaState.alphaBlend)) { if (!e) { const t = c === 0; this.depthCullingState.depthMask !== t && (this.setDepthWrite(t), this._cacheRenderPipeline.setDepthWriteEnabled(t)); } return; } switch (c) { case 0: this._alphaState.alphaBlend = !1; break; case 7: this._alphaState.setAlphaBlendFunctionParameters(1, 771, 1, 1), this._alphaState.alphaBlend = !0; break; case 8: this._alphaState.setAlphaBlendFunctionParameters(1, 771, 1, 771), this._alphaState.alphaBlend = !0; break; case 2: this._alphaState.setAlphaBlendFunctionParameters(770, 771, 1, 1), this._alphaState.alphaBlend = !0; break; case 6: this._alphaState.setAlphaBlendFunctionParameters(1, 1, 0, 1), this._alphaState.alphaBlend = !0; break; case 1: this._alphaState.setAlphaBlendFunctionParameters(770, 1, 0, 1), this._alphaState.alphaBlend = !0; break; case 3: this._alphaState.setAlphaBlendFunctionParameters(0, 769, 1, 1), this._alphaState.alphaBlend = !0; break; case 4: this._alphaState.setAlphaBlendFunctionParameters(774, 0, 1, 1), this._alphaState.alphaBlend = !0; break; case 5: this._alphaState.setAlphaBlendFunctionParameters(770, 769, 1, 1), this._alphaState.alphaBlend = !0; break; case 9: this._alphaState.setAlphaBlendFunctionParameters(32769, 32770, 32771, 32772), this._alphaState.alphaBlend = !0; break; case 10: this._alphaState.setAlphaBlendFunctionParameters(1, 769, 1, 771), this._alphaState.alphaBlend = !0; break; case 11: this._alphaState.setAlphaBlendFunctionParameters(1, 1, 1, 1), this._alphaState.alphaBlend = !0; break; case 12: this._alphaState.setAlphaBlendFunctionParameters(772, 1, 0, 0), this._alphaState.alphaBlend = !0; break; case 13: this._alphaState.setAlphaBlendFunctionParameters(775, 769, 773, 771), this._alphaState.alphaBlend = !0; break; case 14: this._alphaState.setAlphaBlendFunctionParameters(1, 771, 1, 771), this._alphaState.alphaBlend = !0; break; case 15: this._alphaState.setAlphaBlendFunctionParameters(1, 1, 1, 0), this._alphaState.alphaBlend = !0; break; case 16: this._alphaState.setAlphaBlendFunctionParameters(775, 769, 0, 1), this._alphaState.alphaBlend = !0; break; case 17: this._alphaState.setAlphaBlendFunctionParameters(770, 771, 1, 771), this._alphaState.alphaBlend = !0; break; } e || (this.setDepthWrite(c === $e.ALPHA_DISABLE), this._cacheRenderPipeline.setDepthWriteEnabled(c === $e.ALPHA_DISABLE)), this._alphaMode = c, this._cacheRenderPipeline.setAlphaBlendEnabled(this._alphaState.alphaBlend), this._cacheRenderPipeline.setAlphaBlendFactors(this._alphaState._blendFunctionParameters, this._alphaState._blendEquationParameters); }; br.prototype.setAlphaEquation = function(c) { $e.prototype.setAlphaEquation.call(this, c), this._cacheRenderPipeline.setAlphaBlendFactors(this._alphaState._blendFunctionParameters, this._alphaState._blendEquationParameters); }; class bU { getBindGroups(e, t, i) { if (!i) throw new Error("WebGPUComputeContext.getBindGroups: bindingsMapping is required until browsers support reflection for wgsl shaders!"); if (this._bindGroups.length === 0) { const r = this._bindGroupEntries.length > 0; for (const s in e) { const n = e[s], a = i[s], l = a.group, o = a.binding, u = n.type, h = n.object; let d = n.indexInGroupEntries, f = this._bindGroupEntries[l]; switch (f || (f = this._bindGroupEntries[l] = []), u) { case ro.Sampler: { const p = h; d !== void 0 && r ? f[d].resource = this._cacheSampler.getSampler(p) : (n.indexInGroupEntries = f.length, f.push({ binding: o, resource: this._cacheSampler.getSampler(p) })); break; } case ro.Texture: case ro.TextureWithoutSampler: { const p = h, m = p._texture._hardwareTexture; d !== void 0 && r ? (u === ro.Texture && (f[d++].resource = this._cacheSampler.getSampler(p._texture)), f[d].resource = m.view) : (n.indexInGroupEntries = f.length, u === ro.Texture && f.push({ binding: o - 1, resource: this._cacheSampler.getSampler(p._texture) }), f.push({ binding: o, resource: m.view })); break; } case ro.StorageTexture: { const p = h, m = p._texture._hardwareTexture; m.textureAdditionalUsages & fo.StorageBinding || Ce.Error(`computeDispatch: The texture (name=${p.name}, uniqueId=${p.uniqueId}) is not a storage texture!`, 50), d !== void 0 && r ? f[d].resource = m.viewForWriting : (n.indexInGroupEntries = f.length, f.push({ binding: o, resource: m.viewForWriting })); break; } case ro.ExternalTexture: { const m = h.underlyingResource; d !== void 0 && r ? f[d].resource = this._device.importExternalTexture({ source: m }) : (n.indexInGroupEntries = f.length, f.push({ binding: o, resource: this._device.importExternalTexture({ source: m }) })); break; } case ro.UniformBuffer: case ro.StorageBuffer: { const m = (u === ro.UniformBuffer, h).getBuffer(), _ = m.underlyingResource; d !== void 0 && r ? (f[d].resource.buffer = _, f[d].resource.size = m.capacity) : (n.indexInGroupEntries = f.length, f.push({ binding: o, resource: { buffer: _, offset: 0, size: m.capacity } })); break; } } } for (let s = 0; s < this._bindGroupEntries.length; ++s) { const n = this._bindGroupEntries[s]; if (!n) { this._bindGroups[s] = void 0; continue; } this._bindGroups[s] = this._device.createBindGroup({ layout: t.getBindGroupLayout(s), entries: n }); } this._bindGroups.length = this._bindGroupEntries.length; } return this._bindGroups; } constructor(e, t) { this._device = e, this._cacheSampler = t, this.uniqueId = bU._Counter++, this._bindGroupEntries = [], this.clear(); } clear() { this._bindGroups = []; } } bU._Counter = 0; class Ype { get isAsync() { return !1; } get isReady() { return !!this.stage; } constructor(e) { this._name = "unnamed", this.engine = e; } _getComputeShaderCode() { var e; return (e = this.sources) === null || e === void 0 ? void 0 : e.compute; } dispose() { } } const GZ = {}; br.prototype.createComputeContext = function() { return new bU(this._device, this._cacheSampler); }; br.prototype.createComputeEffect = function(c, e) { const i = (c.computeElement || c.compute || c.computeToken || c.computeSource || c) + "@" + e.defines; if (this._compiledComputeEffects[i]) { const s = this._compiledComputeEffects[i]; return e.onCompiled && s.isReady() && e.onCompiled(s), s; } const r = new mP(c, e, this, i); return this._compiledComputeEffects[i] = r, r; }; br.prototype.createComputePipelineContext = function() { return new Ype(this); }; br.prototype.areAllComputeEffectsReady = function() { for (const c in this._compiledComputeEffects) if (!this._compiledComputeEffects[c].isReady()) return !1; return !0; }; br.prototype.computeDispatch = function(c, e, t, i, r = 1, s = 1, n, a) { this._endCurrentRenderPass(); const l = c._pipelineContext, o = e; l.computePipeline || (l.computePipeline = this._device.createComputePipeline({ layout: zO.Auto, compute: l.stage })), a && this._timestampQuery.startPass(GZ, this._timestampIndex); const u = this._renderEncoder.beginComputePass(GZ); u.setPipeline(l.computePipeline); const h = o.getBindGroups(t, l.computePipeline, n); for (let d = 0; d < h.length; ++d) { const f = h[d]; f && u.setBindGroup(d, f); } i + r + s > 0 && u.dispatchWorkgroups(i, r, s), u.end(), a && (this._timestampQuery.endPass(this._timestampIndex, a), this._timestampIndex += 2); }; br.prototype.releaseComputeEffects = function() { for (const c in this._compiledComputeEffects) { const e = this._compiledComputeEffects[c].getPipelineContext(); this._deleteComputePipelineContext(e); } this._compiledComputeEffects = {}; }; br.prototype._prepareComputePipelineContext = function(c, e, t, i, r) { const s = c; this.dbgShowShaderCode && (Ce.Log(i), Ce.Log(e)), s.sources = { compute: e, rawCompute: t }, s.stage = this._createComputePipelineStageDescriptor(e, i, r); }; br.prototype._releaseComputeEffect = function(c) { this._compiledComputeEffects[c._key] && (delete this._compiledComputeEffects[c._key], this._deleteComputePipelineContext(c.getPipelineContext())); }; br.prototype._rebuildComputeEffects = function() { for (const c in this._compiledComputeEffects) { const e = this._compiledComputeEffects[c]; e._pipelineContext = null, e._wasPreviouslyReady = !1, e._prepareEffect(); } }; br.prototype._deleteComputePipelineContext = function(c) { c && c.dispose(); }; br.prototype._createComputePipelineStageDescriptor = function(c, e, t) { return e ? e = "//" + e.split(` `).join(` //`) + ` ` : e = "", { module: this._device.createShaderModule({ code: e + c }), entryPoint: t }; }; br.prototype._createDepthStencilCubeTexture = function(c, e) { const t = new ln(this, ts.DepthStencil); t.isCube = !0; const i = Object.assign({ bilinearFiltering: !1, comparisonFunction: 0, generateStencil: !1, samples: 1 }, e); return t.format = i.generateStencil ? 13 : 14, this._setupDepthStencilTexture(t, c, i.generateStencil, i.bilinearFiltering, i.comparisonFunction, i.samples), this._textureHelper.createGPUTextureForInternalTexture(t), this._internalTexturesCache.push(t), t; }; br.prototype.createCubeTexture = function(c, e, t, i, r = null, s = null, n, a = null, l = !1, o = 0, u = 0, h = null, d = !1) { return this.createCubeTextureBase(c, e, t, !!i, r, s, n, a, l, o, u, h, null, (f, p) => { const m = p, _ = m[0].width, v = _; this._setCubeMapTextureParams(f, !i), f.format = n ?? -1; const C = this._textureHelper.createGPUTextureForInternalTexture(f, _, v); this._textureHelper.updateCubeTextures(m, C.underlyingResource, _, v, C.format, !1, !1, 0, 0), i || this._generateMipmaps(f, this._uploadEncoder), f.isReady = !0, f.onLoadedObservable.notifyObservers(f), f.onLoadedObservable.clear(), r && r(); }, !!d); }; br.prototype._setCubeMapTextureParams = function(c, e, t) { c.samplingMode = e ? 3 : 2, c._cachedWrapU = 0, c._cachedWrapV = 0, t && (c._maxLodLevel = t); }; br.prototype._debugPushGroup = function(c, e) { this._options.enableGPUDebugMarkers && (e === 0 || e === 1 ? this._renderEncoder.pushDebugGroup(c) : this._currentRenderPass ? this._currentRenderPass.pushDebugGroup(c) : this._pendingDebugCommands.push(["push", c])); }; br.prototype._debugPopGroup = function(c) { this._options.enableGPUDebugMarkers && (c === 0 || c === 1 ? this._renderEncoder.popDebugGroup() : this._currentRenderPass ? this._currentRenderPass.popDebugGroup() : this._pendingDebugCommands.push(["pop", null])); }; br.prototype._debugInsertMarker = function(c, e) { this._options.enableGPUDebugMarkers && (e === 0 || e === 1 ? this._renderEncoder.insertDebugMarker(c) : this._currentRenderPass ? this._currentRenderPass.insertDebugMarker(c) : this._pendingDebugCommands.push(["insert", c])); }; br.prototype._debugFlushPendingCommands = function() { for (let c = 0; c < this._pendingDebugCommands.length; ++c) { const [e, t] = this._pendingDebugCommands[c]; switch (e) { case "push": this._debugPushGroup(t); break; case "pop": this._debugPopGroup(); break; case "insert": this._debugInsertMarker(t); break; } } this._pendingDebugCommands.length = 0; }; br.prototype.updateDynamicIndexBuffer = function(c, e, t = 0) { const i = c; let r; c.is32Bits ? r = e instanceof Uint32Array ? e : new Uint32Array(e) : r = e instanceof Uint16Array ? e : new Uint16Array(e), this._bufferManager.setSubData(i, t, r); }; br.prototype.updateDynamicVertexBuffer = function(c, e, t, i) { const r = c; t === void 0 && (t = 0); let s; i === void 0 ? (e instanceof Array ? s = new Float32Array(e) : e instanceof ArrayBuffer ? s = new Uint8Array(e) : s = e, i = s.byteLength) : e instanceof Array ? s = new Float32Array(e) : e instanceof ArrayBuffer ? s = new Uint8Array(e) : s = e, this._bufferManager.setSubData(r, t, s, 0, i); }; br.prototype.updateDynamicTexture = function(c, e, t, i = !1, r, s, n) { var a; if (!c) return; const l = e.width, o = e.height; let u = c._hardwareTexture; !((a = c._hardwareTexture) === null || a === void 0) && a.underlyingResource || (u = this._textureHelper.createGPUTextureForInternalTexture(c, l, o)), this._textureHelper.updateTexture(e, c, l, o, c.depth, u.format, 0, 0, t, i, 0, 0, n), c.generateMipMaps && this._generateMipmaps(c), c.isReady = !0; }; class Qpe extends rW { constructor(e) { super(e); } } Cr.prototype.setExternalTexture = function(c, e) { this._engine.setExternalTexture(c, e); }; br.prototype.createExternalTexture = function(c) { return new Qpe(c); }; br.prototype.setExternalTexture = function(c, e) { if (!e) { this._currentMaterialContext.setTexture(c, null); return; } this._setInternalTexture(c, e); }; br.prototype.unBindMultiColorAttachmentFramebuffer = function(c, e = !1, t) { t && t(); const r = c._attachments.length; this._endCurrentRenderPass(); for (let s = 0; s < r; s++) { const n = c.textures[s]; n.generateMipMaps && !e && !n.isCube && this._generateMipmaps(n); } this._currentRenderTarget = null, this._mrtAttachments = [], this._cacheRenderPipeline.setMRT([]), this._cacheRenderPipeline.setMRTAttachments(this._mrtAttachments); }; br.prototype.createMultipleRenderTarget = function(c, e, t) { var i, r, s, n; let a = !1, l = !0, o = !1, u = !1, h = 15, d = 1; const f = 0, p = 3, m = !1, _ = 5, v = 3553; let C = [], x = [], b = [], S = [], M = [], R = [], w = [], V = [], k = []; const L = this._createHardwareRenderTargetWrapper(!0, !1, c); e !== void 0 && (a = e.generateMipMaps === void 0 ? !1 : e.generateMipMaps, l = e.generateDepthBuffer === void 0 ? !0 : e.generateDepthBuffer, o = e.generateStencilBuffer === void 0 ? !1 : e.generateStencilBuffer, u = e.generateDepthTexture === void 0 ? !1 : e.generateDepthTexture, d = e.textureCount || 1, h = (i = e.depthTextureFormat) !== null && i !== void 0 ? i : 15, e.types && (C = e.types), e.samplingModes && (x = e.samplingModes), e.useSRGBBuffers && (b = e.useSRGBBuffers), e.formats && (S = e.formats), e.targetTypes && (M = e.targetTypes), e.faceIndex && (R = e.faceIndex), e.layerIndex && (w = e.layerIndex), e.layerCounts && (V = e.layerCounts), k = (r = e.labels) !== null && r !== void 0 ? r : k), L.label = (s = e == null ? void 0 : e.label) !== null && s !== void 0 ? s : "MultiRenderTargetWrapper"; const B = c.width || c, U = c.height || c; let K = null; (l || o || u) && (u || (l && o ? h = 13 : l ? h = 14 : h = 19), K = L.createDepthStencilTexture(0, !1, o, 1, h, "MultipleRenderTargetDepthStencil")); const ee = [], Z = [], q = []; L._generateDepthBuffer = l, L._generateStencilBuffer = o, L._attachments = Z, L._defaultAttachments = q; for (let le = 0; le < d; le++) { let ie = x[le] || p, $ = C[le] || f; const j = S[le] || _, J = (b[le] || m) && this._caps.supportSRGBBuffers, ne = M[le] || v, pe = (n = V[le]) !== null && n !== void 0 ? n : 1; if (($ === 1 && !this._caps.textureFloatLinearFiltering || $ === 2 && !this._caps.textureHalfFloatLinearFiltering) && (ie = 1), $ === 1 && !this._caps.textureFloat && ($ = 0, Ce.Warn("Float textures are not supported. Render target forced to TEXTURETYPE_UNSIGNED_BYTE type")), Z.push(le + 1), q.push(t ? le + 1 : le === 0 ? 1 : 0), ne === -1) continue; const ge = new ln(this, ts.MultiRenderTarget); switch (ee[le] = ge, ne) { case 34067: ge.isCube = !0; break; case 32879: ge.is3D = !0, ge.baseDepth = ge.depth = pe; break; case 35866: ge.is2DArray = !0, ge.baseDepth = ge.depth = pe; break; } ge.baseWidth = B, ge.baseHeight = U, ge.width = B, ge.height = U, ge.isReady = !0, ge.samples = 1, ge.generateMipMaps = a, ge.samplingMode = ie, ge.type = $, ge._cachedWrapU = 0, ge._cachedWrapV = 0, ge._useSRGBBuffer = J, ge.format = j, ge.label = k[le], this._internalTexturesCache.push(ge), this._textureHelper.createGPUTextureForInternalTexture(ge); } return K && (K.incrementReferences(), ee[d] = K, this._internalTexturesCache.push(K)), L.setTextures(ee), L.setLayerAndFaceIndices(w, R), L; }; br.prototype.updateMultipleRenderTargetTextureSampleCount = function(c, e) { if (!c || !c.textures || c.textures[0].samples === e) return e; const t = c.textures.length; if (t === 0) return 1; e = Math.min(e, this.getCaps().maxMSAASamples); for (let r = 0; r < t; ++r) { const n = c.textures[r]._hardwareTexture; n == null || n.releaseMSAATexture(); } const i = c._depthStencilTexture === c.textures[t - 1]; for (let r = 0; r < t; ++r) { const s = c.textures[r]; this._textureHelper.createMSAATexture(s, e, !1, r === t - 1 && i ? 0 : r), s.samples = e; } return c._depthStencilTexture && !i && (this._textureHelper.createMSAATexture(c._depthStencilTexture, e), c._depthStencilTexture.samples = e), e; }; br.prototype.bindAttachments = function(c) { c.length === 0 || !this._currentRenderTarget || (this._mrtAttachments = c, this._currentRenderPass && this._cacheRenderPipeline.setMRTAttachments(c)); }; br.prototype.buildTextureLayout = function(c) { const e = []; for (let t = 0; t < c.length; t++) c[t] ? e.push(t + 1) : e.push(0); return e; }; br.prototype.restoreSingleAttachment = function() { }; br.prototype.restoreSingleAttachmentForRenderTarget = function() { }; br.prototype.getGPUFrameTimeCounter = function() { return this._timestampQuery.gpuFrameTimeCounter; }; br.prototype.captureGPUFrameTime = function(c) { this._timestampQuery.enable = c && !!this._caps.timerQuery; }; br.prototype.createQuery = function() { return this._occlusionQuery.createQuery(); }; br.prototype.deleteQuery = function(c) { return this._occlusionQuery.deleteQuery(c), this; }; br.prototype.isQueryResultAvailable = function(c) { return this._occlusionQuery.isQueryResultAvailable(c); }; br.prototype.getQueryResult = function(c) { return this._occlusionQuery.getQueryResult(c); }; br.prototype.beginOcclusionQuery = function(c, e) { var t; if (this.compatibilityMode) { if (this._occlusionQuery.canBeginQuery(e)) return (t = this._currentRenderPass) === null || t === void 0 || t.beginOcclusionQuery(e), !0; } else return this._bundleList.addItem(new oW(e)), !0; return !1; }; br.prototype.endOcclusionQuery = function() { var c; return this.compatibilityMode ? (c = this._currentRenderPass) === null || c === void 0 || c.endOcclusionQuery() : this._bundleList.addItem(new lW()), this; }; br.prototype.createRawTexture = function(c, e, t, i, r, s, n, a = null, l = 0, o = 0, u = !1) { const h = new ln(this, ts.Raw); return h.baseWidth = e, h.baseHeight = t, h.width = e, h.height = t, h.format = i, h.generateMipMaps = r, h.samplingMode = n, h.invertY = s, h._compression = a, h.type = l, h._useSRGBBuffer = u, this._doNotHandleContextLost || (h._bufferView = c), this._textureHelper.createGPUTextureForInternalTexture(h, e, t, void 0, o), this.updateRawTexture(h, c, i, s, a, l, u), this._internalTexturesCache.push(h), h; }; br.prototype.updateRawTexture = function(c, e, t, i, r = null, s = 0, n = !1) { if (c) { if (this._doNotHandleContextLost || (c._bufferView = e, c.invertY = i, c._compression = r, c._useSRGBBuffer = n), e) { const a = c._hardwareTexture; t === 4 && (e = fN(e, c.width, c.height, s)); const o = new Uint8Array(e.buffer, e.byteOffset, e.byteLength); this._textureHelper.updateTexture(o, c, c.width, c.height, c.depth, a.format, 0, 0, i, !1, 0, 0), c.generateMipMaps && this._generateMipmaps(c, this._uploadEncoder); } c.isReady = !0; } }; br.prototype.createRawCubeTexture = function(c, e, t, i, r, s, n, a = null) { const l = new ln(this, ts.CubeRaw); return i === 1 && !this._caps.textureFloatLinearFiltering ? (r = !1, n = 1, Ce.Warn("Float texture filtering is not supported. Mipmap generation and sampling mode are forced to false and TEXTURE_NEAREST_SAMPLINGMODE, respectively.")) : i === 2 && !this._caps.textureHalfFloatLinearFiltering ? (r = !1, n = 1, Ce.Warn("Half float texture filtering is not supported. Mipmap generation and sampling mode are forced to false and TEXTURE_NEAREST_SAMPLINGMODE, respectively.")) : i === 1 && !this._caps.textureFloatRender ? (r = !1, Ce.Warn("Render to float textures is not supported. Mipmap generation forced to false.")) : i === 2 && !this._caps.colorBufferFloat && (r = !1, Ce.Warn("Render to half float textures is not supported. Mipmap generation forced to false.")), l.isCube = !0, l.format = t === 4 ? 5 : t, l.type = i, l.generateMipMaps = r, l.width = e, l.height = e, l.samplingMode = n, this._doNotHandleContextLost || (l._bufferViewArray = c), l.invertY = s, l._compression = a, l._cachedWrapU = 0, l._cachedWrapV = 0, this._textureHelper.createGPUTextureForInternalTexture(l), c && this.updateRawCubeTexture(l, c, t, i, s, a), l.isReady = !0, l; }; br.prototype.updateRawCubeTexture = function(c, e, t, i, r, s = null) { c._bufferViewArray = e, c.invertY = r, c._compression = s; const n = c._hardwareTexture, a = t === 4, l = []; for (let o = 0; o < e.length; ++o) { let u = e[o]; a && (u = fN(e[o], c.width, c.height, i)), l.push(new Uint8Array(u.buffer, u.byteOffset, u.byteLength)); } this._textureHelper.updateCubeTextures(l, n.underlyingResource, c.width, c.height, n.format, r, !1, 0, 0), c.generateMipMaps && this._generateMipmaps(c, this._uploadEncoder), c.isReady = !0; }; br.prototype.createRawCubeTextureFromUrl = function(c, e, t, i, r, s, n, a, l = null, o = null, u = 3, h = !1) { const d = this.createRawCubeTexture(null, t, i, r, !s, h, u, null); e == null || e.addPendingData(d), d.url = c, this._internalTexturesCache.push(d); const f = (m, _) => { e == null || e.removePendingData(d), o && m && o(m.status + " " + m.statusText, _); }, p = (m) => { const _ = d.width, v = n(m); if (!v) return; const C = [0, 2, 4, 1, 3, 5]; if (a) { const x = i === 4, b = a(v), S = d._hardwareTexture, M = [0, 1, 2, 3, 4, 5]; for (let R = 0; R < b.length; R++) { const w = _ >> R, V = []; for (let k = 0; k < 6; k++) { let L = b[R][M[k]]; x && (L = fN(L, w, w, r)), V.push(new Uint8Array(L.buffer, L.byteOffset, L.byteLength)); } this._textureHelper.updateCubeTextures(V, S.underlyingResource, w, w, S.format, h, !1, 0, 0); } } else { const x = []; for (let b = 0; b < 6; b++) x.push(v[C[b]]); this.updateRawCubeTexture(d, x, i, r, h); } d.isReady = !0, e == null || e.removePendingData(d), l && l(); }; return this._loadFile(c, (m) => { p(m); }, void 0, e == null ? void 0 : e.offlineProvider, !0, f), d; }; br.prototype.createRawTexture3D = function(c, e, t, i, r, s, n, a, l = null, o = 0, u = 0) { const h = ts.Raw3D, d = new ln(this, h); return d.baseWidth = e, d.baseHeight = t, d.baseDepth = i, d.width = e, d.height = t, d.depth = i, d.format = r, d.type = o, d.generateMipMaps = s, d.samplingMode = a, d.is3D = !0, this._doNotHandleContextLost || (d._bufferView = c), this._textureHelper.createGPUTextureForInternalTexture(d, e, t, void 0, u), this.updateRawTexture3D(d, c, r, n, l, o), this._internalTexturesCache.push(d), d; }; br.prototype.updateRawTexture3D = function(c, e, t, i, r = null, s = 0) { if (this._doNotHandleContextLost || (c._bufferView = e, c.format = t, c.invertY = i, c._compression = r), e) { const n = c._hardwareTexture; t === 4 && (e = fN(e, c.width, c.height, s)); const l = new Uint8Array(e.buffer, e.byteOffset, e.byteLength); this._textureHelper.updateTexture(l, c, c.width, c.height, c.depth, n.format, 0, 0, i, !1, 0, 0), c.generateMipMaps && this._generateMipmaps(c, this._uploadEncoder); } c.isReady = !0; }; br.prototype.createRawTexture2DArray = function(c, e, t, i, r, s, n, a, l = null, o = 0, u = 0) { const h = ts.Raw2DArray, d = new ln(this, h); return d.baseWidth = e, d.baseHeight = t, d.baseDepth = i, d.width = e, d.height = t, d.depth = i, d.format = r, d.type = o, d.generateMipMaps = s, d.samplingMode = a, d.is2DArray = !0, this._doNotHandleContextLost || (d._bufferView = c), this._textureHelper.createGPUTextureForInternalTexture(d, e, t, i, u), this.updateRawTexture2DArray(d, c, r, n, l, o), this._internalTexturesCache.push(d), d; }; br.prototype.updateRawTexture2DArray = function(c, e, t, i, r = null, s = 0) { if (this._doNotHandleContextLost || (c._bufferView = e, c.format = t, c.invertY = i, c._compression = r), e) { const n = c._hardwareTexture; t === 4 && (e = fN(e, c.width, c.height, s)); const l = new Uint8Array(e.buffer, e.byteOffset, e.byteLength); this._textureHelper.updateTexture(l, c, c.width, c.height, c.depth, n.format, 0, 0, i, !1, 0, 0), c.generateMipMaps && this._generateMipmaps(c, this._uploadEncoder); } c.isReady = !0; }; function fN(c, e, t, i) { let r, s = 1; i === 1 ? r = new Float32Array(e * t * 4) : i === 2 ? (r = new Uint16Array(e * t * 4), s = 15360) : i === 7 ? r = new Uint32Array(e * t * 4) : r = new Uint8Array(e * t * 4); for (let n = 0; n < e; n++) for (let a = 0; a < t; a++) { const l = (a * e + n) * 3, o = (a * e + n) * 4; r[o + 0] = c[l + 0], r[o + 1] = c[l + 1], r[o + 2] = c[l + 2], r[o + 3] = s; } return r; } br.prototype._readTexturePixels = function(c, e, t, i = -1, r = 0, s = null, n = !0, a = !1, l = 0, o = 0) { const u = c._hardwareTexture; return n && this.flushFramebuffer(), this._textureHelper.readPixels(u.underlyingResource, l, o, e, t, u.format, i, r, s, a); }; br.prototype._readTexturePixelsSync = function() { throw "_readTexturePixelsSync is unsupported in WebGPU!"; }; class Xie extends FL { /** * Initializes the render target wrapper * @param isMulti true if the wrapper is a multi render target * @param isCube true if the wrapper should render to a cube texture * @param size size of the render target (width/height/layers) * @param engine engine used to create the render target * @param label defines the label to use for the wrapper (for debugging purpose only) */ constructor(e, t, i, r, s) { super(e, t, i, r, s), r.enableGPUTimingMeasurements && (this.gpuTimeInFrame = new VK()); } } br.prototype._createHardwareRenderTargetWrapper = function(c, e, t) { const i = new Xie(c, e, t, this); return this._renderTargetWrapperCache.push(i), i; }; br.prototype.createRenderTargetTexture = function(c, e) { var t, i, r; const s = this._createHardwareRenderTargetWrapper(!1, !1, c), n = {}; e !== void 0 && typeof e == "object" ? (n.generateMipMaps = e.generateMipMaps, n.generateDepthBuffer = e.generateDepthBuffer === void 0 ? !0 : e.generateDepthBuffer, n.generateStencilBuffer = n.generateDepthBuffer && e.generateStencilBuffer, n.samplingMode = e.samplingMode === void 0 ? 3 : e.samplingMode, n.creationFlags = (t = e.creationFlags) !== null && t !== void 0 ? t : 0, n.noColorAttachment = !!e.noColorAttachment, n.samples = e.samples, n.label = e.label) : (n.generateMipMaps = e, n.generateDepthBuffer = !0, n.generateStencilBuffer = !1, n.samplingMode = 3, n.creationFlags = 0, n.noColorAttachment = !1); const a = n.noColorAttachment ? null : this._createInternalTexture(c, e, !0, ts.RenderTarget); return s.label = (i = n.label) !== null && i !== void 0 ? i : "RenderTargetWrapper", s._samples = (r = n.samples) !== null && r !== void 0 ? r : 1, s._generateDepthBuffer = n.generateDepthBuffer, s._generateStencilBuffer = !!n.generateStencilBuffer, s.setTextures(a), (s._generateDepthBuffer || s._generateStencilBuffer) && s.createDepthStencilTexture( 0, !1, // force false as filtering is not supported for depth textures s._generateStencilBuffer, s.samples, n.generateStencilBuffer ? 13 : 14, n.label ? n.label + "-DepthStencil" : void 0 ), a && (e !== void 0 && typeof e == "object" && e.createMipMaps && !n.generateMipMaps && (a.generateMipMaps = !0), this._textureHelper.createGPUTextureForInternalTexture(a, void 0, void 0, void 0, n.creationFlags), e !== void 0 && typeof e == "object" && e.createMipMaps && !n.generateMipMaps && (a.generateMipMaps = !1)), s; }; br.prototype._createDepthStencilTexture = function(c, e) { const t = new ln(this, ts.DepthStencil); t.label = e.label; const i = Object.assign({ bilinearFiltering: !1, comparisonFunction: 0, generateStencil: !1, samples: 1, depthTextureFormat: e.generateStencil ? 13 : 14 }, e); t.format = i.depthTextureFormat, this._setupDepthStencilTexture(t, c, i.generateStencil, i.bilinearFiltering, i.comparisonFunction, i.samples), this._textureHelper.createGPUTextureForInternalTexture(t); const r = t._hardwareTexture; return t.type = zn.GetTextureTypeFromFormat(r.format), this._internalTexturesCache.push(t), t; }; br.prototype._setupDepthStencilTexture = function(c, e, t, i, r, s = 1) { const n = e.width || e, a = e.height || e, l = e.layers || 0; c.baseWidth = n, c.baseHeight = a, c.width = n, c.height = a, c.is2DArray = l > 0, c.depth = l, c.isReady = !0, c.samples = s, c.generateMipMaps = !1, c.samplingMode = i ? 2 : 1, c.type = 1, c._comparisonFunction = r, c._cachedWrapU = 0, c._cachedWrapV = 0; }; br.prototype.updateRenderTargetTextureSampleCount = function(c, e) { return !c || !c.texture || c.samples === e || (e = Math.min(e, this.getCaps().maxMSAASamples), this._textureHelper.createMSAATexture(c.texture, e), c._depthStencilTexture && (this._textureHelper.createMSAATexture(c._depthStencilTexture, e), c._depthStencilTexture.samples = e), c._samples = e, c.texture.samples = e), e; }; br.prototype.createRenderTargetCubeTexture = function(c, e) { var t; const i = this._createHardwareRenderTargetWrapper(!1, !0, c), r = Object.assign({ generateMipMaps: !0, generateDepthBuffer: !0, generateStencilBuffer: !1, type: 0, samplingMode: 3, format: 5, samples: 1 }, e); r.generateStencilBuffer = r.generateDepthBuffer && r.generateStencilBuffer, i.label = (t = r.label) !== null && t !== void 0 ? t : "RenderTargetWrapper", i._generateDepthBuffer = r.generateDepthBuffer, i._generateStencilBuffer = r.generateStencilBuffer; const s = new ln(this, ts.RenderTarget); return s.width = c, s.height = c, s.depth = 0, s.isReady = !0, s.isCube = !0, s.samples = r.samples, s.generateMipMaps = r.generateMipMaps, s.samplingMode = r.samplingMode, s.type = r.type, s.format = r.format, this._internalTexturesCache.push(s), i.setTextures(s), (i._generateDepthBuffer || i._generateStencilBuffer) && i.createDepthStencilTexture(0, r.samplingMode === void 0 || r.samplingMode === 2 || r.samplingMode === 2 || r.samplingMode === 3 || r.samplingMode === 3 || r.samplingMode === 5 || r.samplingMode === 6 || r.samplingMode === 7 || r.samplingMode === 11, i._generateStencilBuffer, i.samples), e && e.createMipMaps && !r.generateMipMaps && (s.generateMipMaps = !0), this._textureHelper.createGPUTextureForInternalTexture(s), e && e.createMipMaps && !r.generateMipMaps && (s.generateMipMaps = !1), i; }; Cr.prototype.setTextureSampler = function(c, e) { this._engine.setTextureSampler(c, e); }; br.prototype.setTextureSampler = function(c, e) { var t; (t = this._currentMaterialContext) === null || t === void 0 || t.setSampler(c, e); }; Cr.prototype.setStorageBuffer = function(c, e) { this._engine.setStorageBuffer(c, e); }; br.prototype.createStorageBuffer = function(c, e, t) { return this._createBuffer(c, e | 32, t); }; br.prototype.updateStorageBuffer = function(c, e, t, i) { const r = c; t === void 0 && (t = 0); let s; i === void 0 ? (e instanceof Array ? s = new Float32Array(e) : e instanceof ArrayBuffer ? s = new Uint8Array(e) : s = e, i = s.byteLength) : e instanceof Array ? s = new Float32Array(e) : e instanceof ArrayBuffer ? s = new Uint8Array(e) : s = e, this._bufferManager.setSubData(r, t, s, 0, i); }; br.prototype.readFromStorageBuffer = function(c, e, t, i, r) { t = t || c.capacity; const s = this._bufferManager.createRawBuffer(t, ya.MapRead | ya.CopyDst, void 0, "TempReadFromStorageBuffer"); return this._renderEncoder.copyBufferToBuffer(c.underlyingResource, e ?? 0, s, 0, t), new Promise((n, a) => { const l = () => { s.mapAsync(c5.Read, 0, t).then(() => { const o = s.getMappedRange(0, t); let u = i; if (u === void 0) u = new Uint8Array(t), u.set(new Uint8Array(o)); else { const h = u.constructor; u = new h(u.buffer), u.set(new h(o)); } s.unmap(), this._bufferManager.releaseBuffer(s), n(u); }, (o) => { this.isDisposed ? n(new Uint8Array()) : a(o); }); }; r ? (this.flushFramebuffer(), l()) : this.onEndFrameObservable.addOnce(() => { l(); }); }); }; br.prototype.setStorageBuffer = function(c, e) { var t, i; (t = this._currentDrawContext) === null || t === void 0 || t.setBuffer(c, (i = e == null ? void 0 : e.getBuffer()) !== null && i !== void 0 ? i : null); }; br.prototype.createUniformBuffer = function(c, e) { let t; return c instanceof Array ? t = new Float32Array(c) : t = c, this._bufferManager.createBuffer(t, ya.Uniform | ya.CopyDst, e); }; br.prototype.createDynamicUniformBuffer = function(c, e) { return this.createUniformBuffer(c, e); }; br.prototype.updateUniformBuffer = function(c, e, t, i) { t === void 0 && (t = 0); const r = c; let s; i === void 0 ? (e instanceof Float32Array ? s = e : s = new Float32Array(e), i = s.byteLength) : e instanceof Float32Array ? s = e : s = new Float32Array(e), this._bufferManager.setSubData(r, t, s, 0, i); }; br.prototype.bindUniformBufferBase = function(c, e, t) { this._currentDrawContext.setBuffer(t, c); }; br.prototype.bindUniformBlock = function() { }; function $pe(c) { return !!(c && c.underlyingResource !== void 0); } br.prototype.updateVideoTexture = function(c, e, t) { var i; if (!c || c._isDisabled) return; this._videoTextureSupported === void 0 && (this._videoTextureSupported = !0); let r = c._hardwareTexture; !((i = c._hardwareTexture) === null || i === void 0) && i.underlyingResource || (r = this._textureHelper.createGPUTextureForInternalTexture(c)), $pe(e) ? (this._textureHelper.copyVideoToTexture(e, c, r.format, !t), c.generateMipMaps && this._generateMipmaps(c), c.isReady = !0) : e && this.createImageBitmap(e).then((s) => { this._textureHelper.updateTexture(s, c, c.width, c.height, c.depth, r.format, 0, 0, !t, !1, 0, 0), c.generateMipMaps && this._generateMipmaps(c), c.isReady = !0; }).catch(() => { c.isReady = !0; }); }; class Zpe { /** * Creates an engine based on the capabilities of the underlying hardware * @param canvas Defines the canvas to use to display the result * @param options Defines the options passed to the engine to create the context dependencies * @returns a promise that resolves with the created engine */ static async CreateAsync(e, t) { return await br.IsSupportedAsync ? br.CreateAsync(e, t) : $e.IsSupported ? new $e(e, void 0, t) : new Pie(t); } } class yg { } yg.COPY = 1; yg.CUT = 2; yg.PASTE = 3; class zF { /** *Creates an instance of ClipboardInfo. * @param type Defines the type of event (BABYLON.ClipboardEventTypes) * @param event Defines the related dom event */ constructor(e, t) { this.type = e, this.event = t; } /** * Get the clipboard event's type from the keycode. * @param keyCode Defines the keyCode for the current keyboard event. * @returns {number} */ static GetTypeFromCharacter(e) { switch (e) { case 67: return yg.COPY; case 86: return yg.PASTE; case 88: return yg.CUT; default: return -1; } } } class dg extends Do { /** Default material used to render when gizmo is not disabled or hovered */ get coloredMaterial() { return this._coloredMaterial; } /** Material used to render when gizmo is hovered with mouse*/ get hoverMaterial() { return this._hoverMaterial; } /** Material used to render when gizmo is disabled. typically grey.*/ get disableMaterial() { return this._disableMaterial; } /** * Creates an AxisScaleGizmo * @param dragAxis The axis which the gizmo will be able to scale on * @param color The color of the gizmo * @param gizmoLayer The utility layer the gizmo will be added to * @param parent * @param thickness display gizmo axis thickness * @param hoverColor The color of the gizmo when hovering over and dragging * @param disableColor The Color of the gizmo when its disabled */ constructor(e, t = ze.Gray(), i = bn.DefaultUtilityLayer, r = null, s = 1, n = ze.Yellow(), a = ze.Gray()) { var l, o, u, h, d, f, p; super(i), this._pointerObserver = null, this.snapDistance = 0, this.onSnapObservable = new Fe(), this.uniformScaling = !1, this.sensitivity = 1, this.dragScale = 1, this.incrementalSnap = !1, this._isEnabled = !0, this._parent = null, this._dragging = !1, this._tmpVector = new D(0, 0, 0), this._incrementalStartupValue = D.Zero(), this._parent = r, this._coloredMaterial = new Dt("", i.utilityLayerScene), this._coloredMaterial.diffuseColor = t, this._coloredMaterial.specularColor = t.subtract(new ze(0.1, 0.1, 0.1)), this._hoverMaterial = new Dt("", i.utilityLayerScene), this._hoverMaterial.diffuseColor = n, this._disableMaterial = new Dt("", i.utilityLayerScene), this._disableMaterial.diffuseColor = a, this._disableMaterial.alpha = 0.4, this._gizmoMesh = new ke("axis", i.utilityLayerScene); const { arrowMesh: m, arrowTail: _ } = this._createGizmoMesh(this._gizmoMesh, s), v = this._createGizmoMesh(this._gizmoMesh, s + 4, !0); this._gizmoMesh.lookAt(this._rootMesh.position.add(e)), this._rootMesh.addChild(this._gizmoMesh, Do.PreserveScaling), this._gizmoMesh.scaling.scaleInPlace(1 / 3); const C = m.position.clone(), x = _.position.clone(), b = _.scaling.clone(), S = (B) => { const U = B * (3 / this._rootMesh.scaling.length()) * 6; m.position.z += U / 3.5, _.scaling.y += U, this.dragScale = _.scaling.y, _.position.z = m.position.z / 2; }, M = () => { m.position.set(C.x, C.y, C.z), _.position.set(x.x, x.y, x.z), _.scaling.set(b.x, b.y, b.z), this.dragScale = _.scaling.y, this._dragging = !1; }; this.dragBehavior = new Fu({ dragAxis: e }), this.dragBehavior.moveAttached = !1, this.dragBehavior.updateDragPlane = !1, this._rootMesh.addBehavior(this.dragBehavior); let R = 0, w = 0; const V = { snapDistance: 0 }; this.dragBehavior.onDragObservable.add((B) => { if (this.attachedNode) { const U = this.sensitivity * B.dragDistance * (this.scaleRatio * 3 / this._rootMesh.scaling.length()), K = this._tmpVector; let ee = !1, Z = 0; if (this.uniformScaling ? K.setAll(0.57735) : K.copyFrom(e), this.snapDistance == 0) K.scaleToRef(U, K); else { R += U, w += U; const ie = this.incrementalSnap ? w : R; Math.abs(ie) > this.snapDistance ? (Z = Math.floor(Math.abs(ie) / this.snapDistance), ie < 0 && (Z *= -1), R = R % this.snapDistance, K.scaleToRef(this.snapDistance * Z, K), ee = !0) : K.scaleInPlace(0); } K.addInPlaceFromFloats(1, 1, 1), K.x = Math.abs(K.x) < dg.MinimumAbsoluteScale ? dg.MinimumAbsoluteScale * (K.x < 0 ? -1 : 1) : K.x, K.y = Math.abs(K.y) < dg.MinimumAbsoluteScale ? dg.MinimumAbsoluteScale * (K.y < 0 ? -1 : 1) : K.y, K.z = Math.abs(K.z) < dg.MinimumAbsoluteScale ? dg.MinimumAbsoluteScale * (K.z < 0 ? -1 : 1) : K.z; const q = this.attachedNode._isMesh ? this.attachedNode : void 0; Math.abs(this.snapDistance) > 0 && this.incrementalSnap ? (this.attachedNode.getWorldMatrix().decompose(void 0, de.Quaternion[0], de.Vector3[2], Do.PreserveScaling ? q : void 0), K.addInPlace(this._incrementalStartupValue), K.addInPlaceFromFloats(-1, -1, -1), K.x = Math.abs(K.x) * (this._incrementalStartupValue.x > 0 ? 1 : -1), K.y = Math.abs(K.y) * (this._incrementalStartupValue.y > 0 ? 1 : -1), K.z = Math.abs(K.z) * (this._incrementalStartupValue.z > 0 ? 1 : -1), Ae.ComposeToRef(K, de.Quaternion[0], de.Vector3[2], de.Matrix[1])) : (Ae.ScalingToRef(K.x, K.y, K.z, de.Matrix[2]), de.Matrix[2].multiplyToRef(this.attachedNode.getWorldMatrix(), de.Matrix[1])), de.Matrix[1].decompose(de.Vector3[1], void 0, void 0, Do.PreserveScaling ? q : void 0); const le = 1e5; Math.abs(de.Vector3[1].x) < le && Math.abs(de.Vector3[1].y) < le && Math.abs(de.Vector3[1].z) < le && this.attachedNode.getWorldMatrix().copyFrom(de.Matrix[1]), ee && (V.snapDistance = this.snapDistance * Z, this.onSnapObservable.notifyObservers(V)), this._matrixChanged(); } }), this.dragBehavior.onDragStartObservable.add(() => { var B; this._dragging = !0; const U = this.attachedNode._isMesh ? this.attachedNode : void 0; (B = this.attachedNode) === null || B === void 0 || B.getWorldMatrix().decompose(this._incrementalStartupValue, void 0, void 0, Do.PreserveScaling ? U : void 0), R = 0, w = 0; }), this.dragBehavior.onDragObservable.add((B) => S(B.dragDistance)), this.dragBehavior.onDragEndObservable.add(M), (u = (o = (l = r == null ? void 0 : r.uniformScaleGizmo) === null || l === void 0 ? void 0 : l.dragBehavior) === null || o === void 0 ? void 0 : o.onDragObservable) === null || u === void 0 || u.add((B) => S(B.delta.y)), (f = (d = (h = r == null ? void 0 : r.uniformScaleGizmo) === null || h === void 0 ? void 0 : h.dragBehavior) === null || d === void 0 ? void 0 : d.onDragEndObservable) === null || f === void 0 || f.add(M); const k = { gizmoMeshes: [m, _], colliderMeshes: [v.arrowMesh, v.arrowTail], material: this._coloredMaterial, hoverMaterial: this._hoverMaterial, disableMaterial: this._disableMaterial, active: !1, dragBehavior: this.dragBehavior }; (p = this._parent) === null || p === void 0 || p.addToAxisCache(this._gizmoMesh, k), this._pointerObserver = i.utilityLayerScene.onPointerObservable.add((B) => { var U; if (!this._customMeshSet && (this._isHovered = k.colliderMeshes.indexOf((U = B == null ? void 0 : B.pickInfo) === null || U === void 0 ? void 0 : U.pickedMesh) != -1, !this._parent)) { const K = this.dragBehavior.enabled ? this._isHovered || this._dragging ? this._hoverMaterial : this._coloredMaterial : this._disableMaterial; this._setGizmoMeshMaterial(k.gizmoMeshes, K); } }), this.dragBehavior.onEnabledObservable.add((B) => { this._setGizmoMeshMaterial(k.gizmoMeshes, B ? this._coloredMaterial : this._disableMaterial); }); const L = i._getSharedGizmoLight(); L.includedOnlyMeshes = L.includedOnlyMeshes.concat(this._rootMesh.getChildMeshes()); } /** * Create Geometry for Gizmo * @param parentMesh * @param thickness * @param isCollider */ _createGizmoMesh(e, t, i = !1) { const r = B4("yPosMesh", { size: 0.4 * (1 + (t - 1) / 4) }, this.gizmoLayer.utilityLayerScene), s = Hf("cylinder", { diameterTop: 5e-3 * t, height: 0.275, diameterBottom: 5e-3 * t, tessellation: 96 }, this.gizmoLayer.utilityLayerScene); return r.scaling.scaleInPlace(0.1), r.material = this._coloredMaterial, r.rotation.x = Math.PI / 2, r.position.z += 0.3, s.material = this._coloredMaterial, s.position.z += 0.275 / 2, s.rotation.x = Math.PI / 2, i && (r.visibility = 0, s.visibility = 0), e.addChild(r), e.addChild(s), { arrowMesh: r, arrowTail: s }; } _attachedNodeChanged(e) { this.dragBehavior && (this.dragBehavior.enabled = !!e); } /** * If the gizmo is enabled */ set isEnabled(e) { this._isEnabled = e, e ? this._parent && (this.attachedMesh = this._parent.attachedMesh, this.attachedNode = this._parent.attachedNode) : (this.attachedMesh = null, this.attachedNode = null); } get isEnabled() { return this._isEnabled; } /** * Disposes of the gizmo */ dispose() { this.onSnapObservable.clear(), this.gizmoLayer.utilityLayerScene.onPointerObservable.remove(this._pointerObserver), this.dragBehavior.detach(), this._gizmoMesh && this._gizmoMesh.dispose(), [this._coloredMaterial, this._hoverMaterial, this._disableMaterial].forEach((e) => { e && e.dispose(); }), super.dispose(); } /** * Disposes and replaces the current meshes in the gizmo with the specified mesh * @param mesh The mesh to replace the default mesh of the gizmo * @param useGizmoMaterial If the gizmo's default material should be used (default: false) */ setCustomMesh(e, t = !1) { super.setCustomMesh(e), t && (this._rootMesh.getChildMeshes().forEach((i) => { i.material = this._coloredMaterial, i.color && (i.color = this._coloredMaterial.diffuseColor); }), this._customMeshSet = !1); } } dg.MinimumAbsoluteScale = Sr; class hW extends Do { /** * Sets the axis factor * @param factor the Vector3 value */ set axisFactor(e) { this._axisFactor = e; const t = this._scaleBoxesParent.getChildMeshes(); let i = 0; for (let r = 0; r < 3; r++) for (let s = 0; s < 3; s++) for (let n = 0; n < 3; n++) { const a = (r === 1 ? 1 : 0) + (s === 1 ? 1 : 0) + (n === 1 ? 1 : 0); if (!(a === 1 || a === 3)) { if (t[i]) { const l = new D(r - 1, s - 1, n - 1); l.multiplyInPlace(this._axisFactor), t[i].setEnabled(l.lengthSquared() > Sr); } i++; } } } /** * Gets the axis factor * @returns the Vector3 factor value */ get axisFactor() { return this._axisFactor; } /** * Sets scale drag speed value * @param value the new speed value */ set scaleDragSpeed(e) { this._scaleDragSpeed = e; } /** * Gets scale drag speed * @returns the scale speed number */ get scaleDragSpeed() { return this._scaleDragSpeed; } /** Default material used to render when gizmo is not disabled or hovered */ get coloredMaterial() { return this._coloredMaterial; } /** Material used to render when gizmo is hovered with mouse*/ get hoverMaterial() { return this._hoverColoredMaterial; } /** * Get the pointerDragBehavior */ get pointerDragBehavior() { return this._pointerDragBehavior; } /** * Sets the color of the bounding box gizmo * @param color the color to set */ setColor(e) { this._coloredMaterial.emissiveColor = e, this._hoverColoredMaterial.emissiveColor = e.clone().add(new ze(0.3, 0.3, 0.3)), this._lineBoundingBox.getChildren().forEach((t) => { t.color && (t.color = e); }); } /** * Creates an BoundingBoxGizmo * @param color The color of the gizmo * @param gizmoLayer The utility layer the gizmo will be added to */ constructor(e = ze.Gray(), t = bn.DefaultKeepDepthUtilityLayer) { super(t), this._boundingDimensions = new D(1, 1, 1), this._renderObserver = null, this._pointerObserver = null, this._scaleDragSpeed = 0.2, this._tmpQuaternion = new Ze(), this._tmpVector = new D(0, 0, 0), this._tmpRotationMatrix = new Ae(), this.ignoreChildren = !1, this.includeChildPredicate = null, this.rotationSphereSize = 0.1, this.scaleBoxSize = 0.1, this.fixedDragMeshScreenSize = !1, this.fixedDragMeshBoundsSize = !1, this.fixedDragMeshScreenSizeDistanceFactor = 10, this.scalingSnapDistance = 0, this.rotationSnapDistance = 0, this.onDragStartObservable = new Fe(), this.onScaleBoxDragObservable = new Fe(), this.onScaleBoxDragEndObservable = new Fe(), this.onRotationSphereDragObservable = new Fe(), this.onRotationSphereDragEndObservable = new Fe(), this.scalePivot = null, this._axisFactor = new D(1, 1, 1), this._existingMeshScale = new D(), this._dragMesh = null, this._pointerDragBehavior = new Fu(), this.updateScale = !1, this._anchorMesh = new xr("anchor", t.utilityLayerScene), this._coloredMaterial = new Dt("", t.utilityLayerScene), this._coloredMaterial.disableLighting = !0, this._hoverColoredMaterial = new Dt("", t.utilityLayerScene), this._hoverColoredMaterial.disableLighting = !0, this._lineBoundingBox = new xr("", t.utilityLayerScene), this._lineBoundingBox.rotationQuaternion = new Ze(); const i = []; i.push(Ba("lines", { points: [new D(0, 0, 0), new D(this._boundingDimensions.x, 0, 0)] }, t.utilityLayerScene)), i.push(Ba("lines", { points: [new D(0, 0, 0), new D(0, this._boundingDimensions.y, 0)] }, t.utilityLayerScene)), i.push(Ba("lines", { points: [new D(0, 0, 0), new D(0, 0, this._boundingDimensions.z)] }, t.utilityLayerScene)), i.push(Ba("lines", { points: [new D(this._boundingDimensions.x, 0, 0), new D(this._boundingDimensions.x, this._boundingDimensions.y, 0)] }, t.utilityLayerScene)), i.push(Ba("lines", { points: [new D(this._boundingDimensions.x, 0, 0), new D(this._boundingDimensions.x, 0, this._boundingDimensions.z)] }, t.utilityLayerScene)), i.push(Ba("lines", { points: [new D(0, this._boundingDimensions.y, 0), new D(this._boundingDimensions.x, this._boundingDimensions.y, 0)] }, t.utilityLayerScene)), i.push(Ba("lines", { points: [new D(0, this._boundingDimensions.y, 0), new D(0, this._boundingDimensions.y, this._boundingDimensions.z)] }, t.utilityLayerScene)), i.push(Ba("lines", { points: [new D(0, 0, this._boundingDimensions.z), new D(this._boundingDimensions.x, 0, this._boundingDimensions.z)] }, t.utilityLayerScene)), i.push(Ba("lines", { points: [new D(0, 0, this._boundingDimensions.z), new D(0, this._boundingDimensions.y, this._boundingDimensions.z)] }, t.utilityLayerScene)), i.push(Ba("lines", { points: [ new D(this._boundingDimensions.x, this._boundingDimensions.y, this._boundingDimensions.z), new D(0, this._boundingDimensions.y, this._boundingDimensions.z) ] }, t.utilityLayerScene)), i.push(Ba("lines", { points: [ new D(this._boundingDimensions.x, this._boundingDimensions.y, this._boundingDimensions.z), new D(this._boundingDimensions.x, 0, this._boundingDimensions.z) ] }, t.utilityLayerScene)), i.push(Ba("lines", { points: [ new D(this._boundingDimensions.x, this._boundingDimensions.y, this._boundingDimensions.z), new D(this._boundingDimensions.x, this._boundingDimensions.y, 0) ] }, t.utilityLayerScene)), i.forEach((s) => { s.color = e, s.position.addInPlace(new D(-this._boundingDimensions.x / 2, -this._boundingDimensions.y / 2, -this._boundingDimensions.z / 2)), s.isPickable = !1, this._lineBoundingBox.addChild(s); }), this._rootMesh.addChild(this._lineBoundingBox), this.setColor(e), this._rotateSpheresParent = new xr("", t.utilityLayerScene), this._rotateSpheresParent.rotationQuaternion = new Ze(); for (let s = 0; s < 12; s++) { const n = Rd("", { diameter: 1 }, t.utilityLayerScene); n.rotationQuaternion = new Ze(), n.material = this._coloredMaterial, n.isNearGrabbable = !0; const a = new Fu({}); a.moveAttached = !1, a.updateDragPlane = !1, n.addBehavior(a); const l = new D(1, 0, 0); let o = 0, u = 0; a.onDragStartObservable.add(() => { l.copyFrom(n.forward), o = 0, u = 0; }), a.onDragObservable.add((h) => { if (this.onRotationSphereDragObservable.notifyObservers({}), this.attachedMesh) { const d = this.attachedMesh.parent; if (d && d.scaling && d.scaling.isNonUniformWithinEpsilon(1e-3)) { Ce.Warn("BoundingBoxGizmo controls are not supported on child meshes with non-uniform parent scaling"); return; } Nn._RemoveAndStorePivotPoint(this.attachedMesh); const f = l, p = h.dragPlaneNormal.scale(D.Dot(h.dragPlaneNormal, f)), m = f.subtract(p).normalizeToNew(); let _ = D.Dot(m, h.delta) < 0 ? Math.abs(h.delta.length()) : -Math.abs(h.delta.length()); if (_ = _ / this._boundingDimensions.length() * this._anchorMesh.scaling.length(), this.attachedMesh.rotationQuaternion || (this.attachedMesh.rotationQuaternion = Ze.RotationYawPitchRoll(this.attachedMesh.rotation.y, this.attachedMesh.rotation.x, this.attachedMesh.rotation.z)), this._anchorMesh.rotationQuaternion || (this._anchorMesh.rotationQuaternion = Ze.RotationYawPitchRoll(this._anchorMesh.rotation.y, this._anchorMesh.rotation.x, this._anchorMesh.rotation.z)), o += _, Math.abs(o) <= 2 * Math.PI) { if (this.rotationSnapDistance > 0) { const v = Math.floor(Math.abs(o) / this.rotationSnapDistance) * (o < 0 ? -1 : 1), C = this.rotationSnapDistance * v; _ = C - u, u = C; } s >= 8 ? Ze.RotationYawPitchRollToRef(0, 0, _, this._tmpQuaternion) : s >= 4 ? Ze.RotationYawPitchRollToRef(_, 0, 0, this._tmpQuaternion) : Ze.RotationYawPitchRollToRef(0, _, 0, this._tmpQuaternion), this.attachedMesh.isUsingPivotMatrix() && this._anchorMesh.position.copyFrom(this.attachedMesh.position), this._anchorMesh.addChild(this.attachedMesh), this._anchorMesh.getScene().useRightHandedSystem && this._tmpQuaternion.conjugateInPlace(), this._tmpQuaternion.normalize(), this._anchorMesh.rotationQuaternion.multiplyToRef(this._tmpQuaternion, this._anchorMesh.rotationQuaternion), this._anchorMesh.rotationQuaternion.normalize(), this._anchorMesh.removeChild(this.attachedMesh), this.attachedMesh.setParent(d); } this.updateBoundingBox(), Nn._RestorePivotPoint(this.attachedMesh); } this._updateDummy(); }), a.onDragStartObservable.add(() => { this.onDragStartObservable.notifyObservers({}), this._selectNode(n); }), a.onDragEndObservable.add((h) => { this.onRotationSphereDragEndObservable.notifyObservers({}), this._selectNode(null), this._updateDummy(), this._unhoverMeshOnTouchUp(h.pointerInfo, n); }), this._rotateSpheresParent.addChild(n); } this._rootMesh.addChild(this._rotateSpheresParent), this._scaleBoxesParent = new xr("", t.utilityLayerScene), this._scaleBoxesParent.rotationQuaternion = new Ze(); for (let s = 0; s < 3; s++) for (let n = 0; n < 3; n++) for (let a = 0; a < 3; a++) { const l = (s === 1 ? 1 : 0) + (n === 1 ? 1 : 0) + (a === 1 ? 1 : 0); if (l === 1 || l === 3) continue; const o = B4("", { size: 1 }, t.utilityLayerScene); o.material = this._coloredMaterial, o._internalMetadata = l === 2, o.isNearGrabbable = !0; const u = new D(s - 1, n - 1, a - 1).normalize(), h = new Fu({ dragAxis: u }); h.updateDragPlane = !1, h.moveAttached = !1; let d = 0, f = 0; o.addBehavior(h), h.onDragObservable.add((p) => { if (this.onScaleBoxDragObservable.notifyObservers({}), this.attachedMesh) { const m = this.attachedMesh.parent; if (m && m.scaling && m.scaling.isNonUniformWithinEpsilon(1e-3)) { Ce.Warn("BoundingBoxGizmo controls are not supported on child meshes with non-uniform parent scaling"); return; } Nn._RemoveAndStorePivotPoint(this.attachedMesh); let _ = p.dragDistance / this._boundingDimensions.length() * this._anchorMesh.scaling.length(); if (d += _, this.scalingSnapDistance > 0) { const C = Math.floor(Math.abs(d) / this.scalingSnapDistance) * (d < 0 ? -1 : 1), x = this.scalingSnapDistance * C; _ = x - f, f = x; } const v = new D(_, _, _); l === 2 && (v.x *= Math.abs(u.x), v.y *= Math.abs(u.y), v.z *= Math.abs(u.z)), v.scaleInPlace(this._scaleDragSpeed), v.multiplyInPlace(this._axisFactor), this.updateBoundingBox(), this.scalePivot ? (this.attachedMesh.getWorldMatrix().getRotationMatrixToRef(this._tmpRotationMatrix), this._boundingDimensions.scaleToRef(0.5, this._tmpVector), D.TransformCoordinatesToRef(this._tmpVector, this._tmpRotationMatrix, this._tmpVector), this._anchorMesh.position.subtractInPlace(this._tmpVector), this._boundingDimensions.multiplyToRef(this.scalePivot, this._tmpVector), D.TransformCoordinatesToRef(this._tmpVector, this._tmpRotationMatrix, this._tmpVector), this._anchorMesh.position.addInPlace(this._tmpVector)) : (o.absolutePosition.subtractToRef(this._anchorMesh.position, this._tmpVector), this._anchorMesh.position.subtractInPlace(this._tmpVector), this.attachedMesh.isUsingPivotMatrix() && this._anchorMesh.position.subtractInPlace(this.attachedMesh.getPivotPoint())), this._anchorMesh.addChild(this.attachedMesh), this._anchorMesh.scaling.addInPlace(v), (this._anchorMesh.scaling.x < 0 || this._anchorMesh.scaling.y < 0 || this._anchorMesh.scaling.z < 0) && this._anchorMesh.scaling.subtractInPlace(v), this._anchorMesh.removeChild(this.attachedMesh), this.attachedMesh.setParent(m), Nn._RestorePivotPoint(this.attachedMesh); } this._updateDummy(); }), h.onDragStartObservable.add(() => { this.onDragStartObservable.notifyObservers({}), this._selectNode(o), d = 0, f = 0; }), h.onDragEndObservable.add((p) => { this.onScaleBoxDragEndObservable.notifyObservers({}), this._selectNode(null), this._updateDummy(), this._unhoverMeshOnTouchUp(p.pointerInfo, o); }), this._scaleBoxesParent.addChild(o); } this._rootMesh.addChild(this._scaleBoxesParent); const r = []; this._pointerObserver = t.utilityLayerScene.onPointerObservable.add((s) => { r[s.event.pointerId] ? s.pickInfo && s.pickInfo.pickedMesh != r[s.event.pointerId] && (r[s.event.pointerId].material = this._coloredMaterial, delete r[s.event.pointerId], this._isHovered = !1) : this._rotateSpheresParent.getChildMeshes().concat(this._scaleBoxesParent.getChildMeshes()).forEach((n) => { s.pickInfo && s.pickInfo.pickedMesh == n && (r[s.event.pointerId] = n, n.material = this._hoverColoredMaterial, this._isHovered = !0); }); }), this._renderObserver = this.gizmoLayer.originalScene.onBeforeRenderObservable.add(() => { this.attachedMesh && !this._existingMeshScale.equals(this.attachedMesh.scaling) ? this.updateBoundingBox() : (this.fixedDragMeshScreenSize || this.fixedDragMeshBoundsSize) && (this._updateRotationSpheres(), this._updateScaleBoxes()), this._dragMesh && this.attachedMesh && this._pointerDragBehavior.dragging && (this._lineBoundingBox.position.rotateByQuaternionToRef(this._rootMesh.rotationQuaternion, this._tmpVector), this.attachedMesh.setAbsolutePosition(this._dragMesh.position.add(this._tmpVector.scale(-1)))); }), this.updateBoundingBox(); } _attachedNodeChanged(e) { if (e) { this._anchorMesh.scaling.setAll(1), Nn._RemoveAndStorePivotPoint(e); const t = e.parent; this._anchorMesh.addChild(e), this._anchorMesh.removeChild(e), e.setParent(t), Nn._RestorePivotPoint(e), this.updateBoundingBox(), e.getChildMeshes(!1).forEach((i) => { i.markAsDirty("scaling"); }), this.gizmoLayer.utilityLayerScene.onAfterRenderObservable.addOnce(() => { this._updateDummy(); }); } } _selectNode(e) { this._rotateSpheresParent.getChildMeshes().concat(this._scaleBoxesParent.getChildMeshes()).forEach((t) => { t.isVisible = !e || t == e; }); } _unhoverMeshOnTouchUp(e, t) { (e == null ? void 0 : e.event) instanceof PointerEvent && (e == null ? void 0 : e.event.pointerType) === "touch" && (t.material = this._coloredMaterial); } /** * returns an array containing all boxes used for scaling (in increasing x, y and z orders) */ getScaleBoxes() { return this._scaleBoxesParent.getChildMeshes(); } /** * Updates the bounding box information for the Gizmo */ updateBoundingBox() { if (this.attachedMesh) { Nn._RemoveAndStorePivotPoint(this.attachedMesh); const e = this.attachedMesh.parent; this.attachedMesh.setParent(null), this._update(), this.attachedMesh.rotationQuaternion || (this.attachedMesh.rotationQuaternion = Ze.RotationYawPitchRoll(this.attachedMesh.rotation.y, this.attachedMesh.rotation.x, this.attachedMesh.rotation.z)), this._anchorMesh.rotationQuaternion || (this._anchorMesh.rotationQuaternion = Ze.RotationYawPitchRoll(this._anchorMesh.rotation.y, this._anchorMesh.rotation.x, this._anchorMesh.rotation.z)), this._anchorMesh.rotationQuaternion.copyFrom(this.attachedMesh.rotationQuaternion), this._tmpQuaternion.copyFrom(this.attachedMesh.rotationQuaternion), this._tmpVector.copyFrom(this.attachedMesh.position), this.attachedMesh.rotationQuaternion.set(0, 0, 0, 1), this.attachedMesh.position.set(0, 0, 0); const t = this.attachedMesh.getHierarchyBoundingVectors(!this.ignoreChildren, this.includeChildPredicate); t.max.subtractToRef(t.min, this._boundingDimensions), this._lineBoundingBox.scaling.copyFrom(this._boundingDimensions), this._lineBoundingBox.position.set((t.max.x + t.min.x) / 2, (t.max.y + t.min.y) / 2, (t.max.z + t.min.z) / 2), this._rotateSpheresParent.position.copyFrom(this._lineBoundingBox.position), this._scaleBoxesParent.position.copyFrom(this._lineBoundingBox.position), this._lineBoundingBox.computeWorldMatrix(), this._anchorMesh.position.copyFrom(this._lineBoundingBox.absolutePosition), this.attachedMesh.rotationQuaternion.copyFrom(this._tmpQuaternion), this.attachedMesh.position.copyFrom(this._tmpVector), this.attachedMesh.setParent(e); } this._updateRotationSpheres(), this._updateScaleBoxes(), this.attachedMesh && (this._existingMeshScale.copyFrom(this.attachedMesh.scaling), Nn._RestorePivotPoint(this.attachedMesh)); } _updateRotationSpheres() { const e = this._rotateSpheresParent.getChildMeshes(); for (let t = 0; t < 3; t++) for (let i = 0; i < 2; i++) for (let r = 0; r < 2; r++) { const s = t * 4 + i * 2 + r; if (t == 0 && (e[s].position.set(this._boundingDimensions.x / 2, this._boundingDimensions.y * i, this._boundingDimensions.z * r), e[s].position.addInPlace(new D(-this._boundingDimensions.x / 2, -this._boundingDimensions.y / 2, -this._boundingDimensions.z / 2)), e[s].lookAt(D.Cross(e[s].position.normalizeToNew(), D.Right()).normalizeToNew().add(e[s].position))), t == 1 && (e[s].position.set(this._boundingDimensions.x * i, this._boundingDimensions.y / 2, this._boundingDimensions.z * r), e[s].position.addInPlace(new D(-this._boundingDimensions.x / 2, -this._boundingDimensions.y / 2, -this._boundingDimensions.z / 2)), e[s].lookAt(D.Cross(e[s].position.normalizeToNew(), D.Up()).normalizeToNew().add(e[s].position))), t == 2 && (e[s].position.set(this._boundingDimensions.x * i, this._boundingDimensions.y * r, this._boundingDimensions.z / 2), e[s].position.addInPlace(new D(-this._boundingDimensions.x / 2, -this._boundingDimensions.y / 2, -this._boundingDimensions.z / 2)), e[s].lookAt(D.Cross(e[s].position.normalizeToNew(), D.Forward()).normalizeToNew().add(e[s].position))), this.fixedDragMeshScreenSize && this.gizmoLayer.utilityLayerScene.activeCamera) { e[s].absolutePosition.subtractToRef(this.gizmoLayer.utilityLayerScene.activeCamera.position, this._tmpVector); const n = this.rotationSphereSize * this._tmpVector.length() / this.fixedDragMeshScreenSizeDistanceFactor; e[s].scaling.set(n, n, n); } else this.fixedDragMeshBoundsSize ? e[s].scaling.set(this.rotationSphereSize * this._boundingDimensions.x, this.rotationSphereSize * this._boundingDimensions.y, this.rotationSphereSize * this._boundingDimensions.z) : e[s].scaling.set(this.rotationSphereSize, this.rotationSphereSize, this.rotationSphereSize); } } _updateScaleBoxes() { const e = this._scaleBoxesParent.getChildMeshes(); let t = 0; for (let i = 0; i < 3; i++) for (let r = 0; r < 3; r++) for (let s = 0; s < 3; s++) { const n = (i === 1 ? 1 : 0) + (r === 1 ? 1 : 0) + (s === 1 ? 1 : 0); if (!(n === 1 || n === 3)) { if (e[t]) if (e[t].position.set(this._boundingDimensions.x * (i / 2), this._boundingDimensions.y * (r / 2), this._boundingDimensions.z * (s / 2)), e[t].position.addInPlace(new D(-this._boundingDimensions.x / 2, -this._boundingDimensions.y / 2, -this._boundingDimensions.z / 2)), this.fixedDragMeshScreenSize && this.gizmoLayer.utilityLayerScene.activeCamera) { e[t].absolutePosition.subtractToRef(this.gizmoLayer.utilityLayerScene.activeCamera.position, this._tmpVector); const a = this.scaleBoxSize * this._tmpVector.length() / this.fixedDragMeshScreenSizeDistanceFactor; e[t].scaling.set(a, a, a); } else this.fixedDragMeshBoundsSize ? e[t].scaling.set(this.scaleBoxSize * this._boundingDimensions.x, this.scaleBoxSize * this._boundingDimensions.y, this.scaleBoxSize * this._boundingDimensions.z) : e[t].scaling.set(this.scaleBoxSize, this.scaleBoxSize, this.scaleBoxSize); t++; } } } /** * Enables rotation on the specified axis and disables rotation on the others * @param axis The list of axis that should be enabled (eg. "xy" or "xyz") */ setEnabledRotationAxis(e) { this._rotateSpheresParent.getChildMeshes().forEach((t, i) => { i < 4 ? t.setEnabled(e.indexOf("x") != -1) : i < 8 ? t.setEnabled(e.indexOf("y") != -1) : t.setEnabled(e.indexOf("z") != -1); }); } /** * Enables/disables scaling * @param enable if scaling should be enabled * @param homogeneousScaling defines if scaling should only be homogeneous */ setEnabledScaling(e, t = !1) { this._scaleBoxesParent.getChildMeshes().forEach((i) => { let r = e; t && i._internalMetadata === !0 && (r = !1), i.setEnabled(r); }); } _updateDummy() { this._dragMesh && (this._dragMesh.position.copyFrom(this._lineBoundingBox.getAbsolutePosition()), this._dragMesh.scaling.copyFrom(this._lineBoundingBox.scaling), this._dragMesh.rotationQuaternion.copyFrom(this._rootMesh.rotationQuaternion)); } /** * Enables a pointer drag behavior on the bounding box of the gizmo */ enableDragBehavior() { this._dragMesh = B4("dummy", { size: 1 }, this.gizmoLayer.utilityLayerScene), this._dragMesh.visibility = 0, this._dragMesh.rotationQuaternion = new Ze(), this._pointerDragBehavior.useObjectOrientationForDragging = !1, this._dragMesh.addBehavior(this._pointerDragBehavior); } /** * Disposes of the gizmo */ dispose() { this.gizmoLayer.utilityLayerScene.onPointerObservable.remove(this._pointerObserver), this.gizmoLayer.originalScene.onBeforeRenderObservable.remove(this._renderObserver), this._lineBoundingBox.dispose(), this._rotateSpheresParent.dispose(), this._scaleBoxesParent.dispose(), this._dragMesh && this._dragMesh.dispose(), super.dispose(); } /** * Makes a mesh not pickable and wraps the mesh inside of a bounding box mesh that is pickable. (This is useful to avoid picking within complex geometry) * @param mesh the mesh to wrap in the bounding box mesh and make not pickable * @returns the bounding box mesh with the passed in mesh as a child */ static MakeNotPickableAndWrapInBoundingBox(e) { const t = (a) => { a.isPickable = !1, a.getChildMeshes().forEach((l) => { t(l); }); }; t(e), e.rotationQuaternion || (e.rotationQuaternion = Ze.RotationYawPitchRoll(e.rotation.y, e.rotation.x, e.rotation.z)); const i = e.position.clone(), r = e.rotationQuaternion.clone(); e.rotationQuaternion.set(0, 0, 0, 1), e.position.set(0, 0, 0); const s = B4("box", { size: 1 }, e.getScene()), n = e.getHierarchyBoundingVectors(); return n.max.subtractToRef(n.min, s.scaling), s.scaling.y === 0 && (s.scaling.y = Sr), s.scaling.x === 0 && (s.scaling.x = Sr), s.scaling.z === 0 && (s.scaling.z = Sr), s.position.set((n.max.x + n.min.x) / 2, (n.max.y + n.min.y) / 2, (n.max.z + n.min.z) / 2), e.addChild(s), e.rotationQuaternion.copyFrom(r), e.position.copyFrom(i), e.removeChild(s), s.addChild(e), s.visibility = 0, s; } /** * CustomMeshes are not supported by this gizmo */ setCustomMesh() { Ce.Error("Custom meshes are not supported on this gizmo"); } } class D4 extends Do { /** Default material used to render when gizmo is not disabled or hovered */ get coloredMaterial() { return this._coloredMaterial; } /** Material used to render when gizmo is hovered with mouse */ get hoverMaterial() { return this._hoverMaterial; } /** Color used to render the drag angle sector when gizmo is rotated with mouse */ set rotationColor(e) { this._rotationShaderMaterial.setColor3("rotationColor", e); } /** Material used to render when gizmo is disabled. typically grey. */ get disableMaterial() { return this._disableMaterial; } /** * Creates a PlaneRotationGizmo * @param planeNormal The normal of the plane which the gizmo will be able to rotate on * @param color The color of the gizmo * @param gizmoLayer The utility layer the gizmo will be added to * @param tessellation Amount of tessellation to be used when creating rotation circles * @param parent * @param useEulerRotation Use and update Euler angle instead of quaternion * @param thickness display gizmo axis thickness * @param hoverColor The color of the gizmo when hovering over and dragging * @param disableColor The Color of the gizmo when its disabled */ constructor(e, t = ze.Gray(), i = bn.DefaultUtilityLayer, r = 32, s = null, n = !1, a = 1, l = ze.Yellow(), o = ze.Gray()) { var u; super(i), this._pointerObserver = null, this.snapDistance = 0, this.onSnapObservable = new Fe(), this.angle = 0, this.sensitivity = 1, this._isEnabled = !0, this._parent = null, this._dragging = !1, this._angles = new D(), this._parent = s, this._coloredMaterial = new Dt("", i.utilityLayerScene), this._coloredMaterial.diffuseColor = t, this._coloredMaterial.specularColor = t.subtract(new ze(0.1, 0.1, 0.1)), this._hoverMaterial = new Dt("", i.utilityLayerScene), this._hoverMaterial.diffuseColor = l, this._hoverMaterial.specularColor = l, this._disableMaterial = new Dt("", i.utilityLayerScene), this._disableMaterial.diffuseColor = o, this._disableMaterial.alpha = 0.4, this._gizmoMesh = new ke("", i.utilityLayerScene); const { rotationMesh: h, collider: d } = this._createGizmoMesh(this._gizmoMesh, a, r); this._rotationDisplayPlane = hx("rotationDisplay", { size: 0.6, updatable: !1 }, this.gizmoLayer.utilityLayerScene), this._rotationDisplayPlane.rotation.z = Math.PI * 0.5, this._rotationDisplayPlane.parent = this._gizmoMesh, this._rotationDisplayPlane.setEnabled(!1), Cr.ShadersStore.rotationGizmoVertexShader = D4._RotationGizmoVertexShader, Cr.ShadersStore.rotationGizmoFragmentShader = D4._RotationGizmoFragmentShader, this._rotationShaderMaterial = new Lo("shader", this.gizmoLayer.utilityLayerScene, { vertex: "rotationGizmo", fragment: "rotationGizmo" }, { attributes: ["position", "uv"], uniforms: ["worldViewProjection", "angles", "rotationColor"] }), this._rotationShaderMaterial.backFaceCulling = !1, this.rotationColor = l, this._rotationDisplayPlane.material = this._rotationShaderMaterial, this._rotationDisplayPlane.visibility = 0.999, this._gizmoMesh.lookAt(this._rootMesh.position.add(e)), this._rootMesh.addChild(this._gizmoMesh, Do.PreserveScaling), this._gizmoMesh.scaling.scaleInPlace(1 / 3), this.dragBehavior = new Fu({ dragPlaneNormal: e }), this.dragBehavior.moveAttached = !1, this.dragBehavior.maxDragAngle = D4.MaxDragAngle, this.dragBehavior._useAlternatePickedPointAboveMaxDragAngle = !0, this._rootMesh.addBehavior(this.dragBehavior); const f = new D(), p = new Ae(), m = new D(); let _ = new D(); this.dragBehavior.onDragStartObservable.add((R) => { this.attachedNode && (f.copyFrom(R.dragPlanePoint), this._rotationDisplayPlane.setEnabled(!0), this._rotationDisplayPlane.getWorldMatrix().invertToRef(p), D.TransformCoordinatesToRef(R.dragPlanePoint, p, f), this._angles.x = Math.atan2(f.y, f.x) + Math.PI, this._angles.y = 0, this._angles.z = this.updateGizmoRotationToMatchAttachedMesh ? 1 : 0, this._dragging = !0, f.copyFrom(R.dragPlanePoint), this._rotationShaderMaterial.setVector3("angles", this._angles), this.angle = 0); }), this.dragBehavior.onDragEndObservable.add(() => { this._dragging = !1, this._rotationDisplayPlane.setEnabled(!1); }); const v = { snapDistance: 0 }; let C = 0; const x = new Ae(), b = new Ze(); this.dragBehavior.onDragObservable.add((R) => { if (this.attachedNode) { const w = new D(1, 1, 1), V = new Ze(0, 0, 0, 1), k = new D(0, 0, 0); if (this.attachedNode.getWorldMatrix().decompose(w, V, k), !(Math.abs(Math.abs(w.x) - Math.abs(w.y)) <= Sr && Math.abs(Math.abs(w.x) - Math.abs(w.z)) <= Sr) && this.updateGizmoRotationToMatchAttachedMesh) { Ce.Warn("Unable to use a rotation gizmo matching mesh rotation with non uniform scaling. Use uniform scaling or set updateGizmoRotationToMatchAttachedMesh to false."); return; } V.normalize(); const B = this.updateGizmoPositionToMatchAttachedMesh ? k : this._rootMesh.absolutePosition, U = R.dragPlanePoint.subtract(B).normalize(), K = f.subtract(B).normalize(), ee = D.Cross(U, K), Z = D.Dot(U, K); let q = Math.atan2(ee.length(), Z) * this.sensitivity; m.copyFrom(e), _.copyFrom(e), this.updateGizmoRotationToMatchAttachedMesh && (V.toRotationMatrix(p), _ = D.TransformCoordinates(m, p)); let le = !1; if (i.utilityLayerScene.activeCamera) { const J = i.utilityLayerScene.activeCamera.position.subtract(B).normalize(); D.Dot(J, _) > 0 && (m.scaleInPlace(-1), _.scaleInPlace(-1), le = !0); } D.Dot(_, ee) > 0 && (q = -q), de.Vector3[0].set(q, 0, 0), this.dragBehavior.validateDrag(de.Vector3[0]) || (q = 0); let $ = !1; if (this.snapDistance != 0) if (C += q, Math.abs(C) > this.snapDistance) { let J = Math.floor(Math.abs(C) / this.snapDistance); C < 0 && (J *= -1), C = C % this.snapDistance, q = this.snapDistance * J, $ = !0; } else q = 0; const j = Math.sin(q / 2); if (b.set(m.x * j, m.y * j, m.z * j, Math.cos(q / 2)), x.determinant() > 0) { const J = new D(); b.toEulerAnglesToRef(J), Ze.RotationYawPitchRollToRef(J.y, -J.x, -J.z, b); } if (this.updateGizmoRotationToMatchAttachedMesh) V.multiplyToRef(b, V), V.normalize(), Ae.ComposeToRef(w, V, k, this.attachedNode.getWorldMatrix()); else { b.toRotationMatrix(de.Matrix[0]); const J = this.attachedNode.getWorldMatrix().getTranslation(); this.attachedNode.getWorldMatrix().multiplyToRef(de.Matrix[0], this.attachedNode.getWorldMatrix()), this.attachedNode.getWorldMatrix().setTranslation(J); } f.copyFrom(R.dragPlanePoint), $ && (v.snapDistance = q, this.onSnapObservable.notifyObservers(v)), this._angles.y += q, this.angle += le ? -q : q, this._rotationShaderMaterial.setVector3("angles", this._angles), this._matrixChanged(); } }); const S = i._getSharedGizmoLight(); S.includedOnlyMeshes = S.includedOnlyMeshes.concat(this._rootMesh.getChildMeshes(!1)); const M = { colliderMeshes: [d], gizmoMeshes: [h], material: this._coloredMaterial, hoverMaterial: this._hoverMaterial, disableMaterial: this._disableMaterial, active: !1, dragBehavior: this.dragBehavior }; (u = this._parent) === null || u === void 0 || u.addToAxisCache(this._gizmoMesh, M), this._pointerObserver = i.utilityLayerScene.onPointerObservable.add((R) => { var w; if (!this._customMeshSet && (this.dragBehavior.maxDragAngle = D4.MaxDragAngle, this._isHovered = M.colliderMeshes.indexOf((w = R == null ? void 0 : R.pickInfo) === null || w === void 0 ? void 0 : w.pickedMesh) != -1, !this._parent)) { const V = M.dragBehavior.enabled ? this._isHovered || this._dragging ? this._hoverMaterial : this._coloredMaterial : this._disableMaterial; this._setGizmoMeshMaterial(M.gizmoMeshes, V); } }), this.dragBehavior.onEnabledObservable.add((R) => { this._setGizmoMeshMaterial(M.gizmoMeshes, R ? this._coloredMaterial : this._disableMaterial); }); } /** * Create Geometry for Gizmo * @param parentMesh * @param thickness * @param tessellation */ _createGizmoMesh(e, t, i) { const r = o6("ignore", { diameter: 0.6, thickness: 0.03 * t, tessellation: i }, this.gizmoLayer.utilityLayerScene); r.visibility = 0; const s = o6("", { diameter: 0.6, thickness: 5e-3 * t, tessellation: i }, this.gizmoLayer.utilityLayerScene); return s.material = this._coloredMaterial, s.rotation.x = Math.PI / 2, r.rotation.x = Math.PI / 2, e.addChild(s, Do.PreserveScaling), e.addChild(r, Do.PreserveScaling), { rotationMesh: s, collider: r }; } _attachedNodeChanged(e) { this.dragBehavior && (this.dragBehavior.enabled = !!e); } /** * If the gizmo is enabled */ set isEnabled(e) { this._isEnabled = e, e ? this._parent && (this.attachedMesh = this._parent.attachedMesh) : this.attachedMesh = null; } get isEnabled() { return this._isEnabled; } /** * Disposes of the gizmo */ dispose() { this.onSnapObservable.clear(), this.gizmoLayer.utilityLayerScene.onPointerObservable.remove(this._pointerObserver), this.dragBehavior.detach(), this._gizmoMesh && this._gizmoMesh.dispose(), this._rotationDisplayPlane && this._rotationDisplayPlane.dispose(), this._rotationShaderMaterial && this._rotationShaderMaterial.dispose(), [this._coloredMaterial, this._hoverMaterial, this._disableMaterial].forEach((e) => { e && e.dispose(); }), super.dispose(); } } D4.MaxDragAngle = Math.PI * 9 / 20; D4._RotationGizmoVertexShader = ` precision highp float; attribute vec3 position; attribute vec2 uv; uniform mat4 worldViewProjection; varying vec3 vPosition; varying vec2 vUV; void main(void) { gl_Position = worldViewProjection * vec4(position, 1.0); vUV = uv; }`; D4._RotationGizmoFragmentShader = ` precision highp float; varying vec2 vUV; varying vec3 vPosition; uniform vec3 angles; uniform vec3 rotationColor; #define twopi 6.283185307 void main(void) { vec2 uv = vUV - vec2(0.5); float angle = atan(uv.y, uv.x) + 3.141592; float delta = gl_FrontFacing ? angles.y : -angles.y; float begin = angles.x - delta * angles.z; float start = (begin < (begin + delta)) ? begin : (begin + delta); float end = (begin > (begin + delta)) ? begin : (begin + delta); float len = sqrt(dot(uv,uv)); float opacity = 1. - step(0.5, len); float base = abs(floor(start / twopi)) * twopi; start += base; end += base; float intensity = 0.; for (int i = 0; i < 5; i++) { intensity += max(step(start, angle) - step(end, angle), 0.); angle += twopi; } gl_FragColor = vec4(rotationColor, min(intensity * 0.25, 0.8)) * opacity; } `; class dW extends Do { get attachedMesh() { return this._meshAttached; } set attachedMesh(e) { this._meshAttached = e, this._nodeAttached = e, this._checkBillboardTransform(), [this.xGizmo, this.yGizmo, this.zGizmo].forEach((t) => { t.isEnabled ? t.attachedMesh = e : t.attachedMesh = null; }); } get attachedNode() { return this._nodeAttached; } set attachedNode(e) { this._meshAttached = null, this._nodeAttached = e, this._checkBillboardTransform(), [this.xGizmo, this.yGizmo, this.zGizmo].forEach((t) => { t.isEnabled ? t.attachedNode = e : t.attachedNode = null; }); } _checkBillboardTransform() { this._nodeAttached && this._nodeAttached.billboardMode && Ce.Log("Rotation Gizmo will not work with transforms in billboard mode."); } /** * Sensitivity factor for dragging (Default: 1) */ set sensitivity(e) { this._sensitivity = e, [this.xGizmo, this.yGizmo, this.zGizmo].forEach((t) => { t && (t.sensitivity = e); }); } get sensitivity() { return this._sensitivity; } /** * True when the mouse pointer is hovering a gizmo mesh */ get isHovered() { return this.xGizmo.isHovered || this.yGizmo.isHovered || this.zGizmo.isHovered; } /** * Creates a RotationGizmo * @param gizmoLayer The utility layer the gizmo will be added to * @param tessellation Amount of tessellation to be used when creating rotation circles * @param useEulerRotation Use and update Euler angle instead of quaternion * @param thickness display gizmo axis thickness * @param gizmoManager Gizmo manager * @param options More options */ constructor(e = bn.DefaultUtilityLayer, t = 32, i = !1, r = 1, s, n) { super(e), this.onDragStartObservable = new Fe(), this.onDragObservable = new Fe(), this.onDragEndObservable = new Fe(), this._observables = [], this._sensitivity = 1, this._gizmoAxisCache = /* @__PURE__ */ new Map(); const a = n && n.xOptions && n.xOptions.color ? n.xOptions.color : ze.Red().scale(0.5), l = n && n.yOptions && n.yOptions.color ? n.yOptions.color : ze.Green().scale(0.5), o = n && n.zOptions && n.zOptions.color ? n.zOptions.color : ze.Blue().scale(0.5); this.xGizmo = new D4(new D(1, 0, 0), a, e, t, this, i, r), this.yGizmo = new D4(new D(0, 1, 0), l, e, t, this, i, r), this.zGizmo = new D4(new D(0, 0, 1), o, e, t, this, i, r), [this.xGizmo, this.yGizmo, this.zGizmo].forEach((u) => { n && n.updateScale != null && (u.updateScale = n.updateScale), u.dragBehavior.onDragStartObservable.add(() => { this.onDragStartObservable.notifyObservers({}); }), u.dragBehavior.onDragObservable.add(() => { this.onDragObservable.notifyObservers({}); }), u.dragBehavior.onDragEndObservable.add(() => { this.onDragEndObservable.notifyObservers({}); }); }), this.attachedMesh = null, this.attachedNode = null, s ? s.addToAxisCache(this._gizmoAxisCache) : Do.GizmoAxisPointerObserver(e, this._gizmoAxisCache); } /** * If set the gizmo's rotation will be updated to match the attached mesh each frame (Default: true) * NOTE: This is only possible for meshes with uniform scaling, as otherwise it's not possible to decompose the rotation */ set updateGizmoRotationToMatchAttachedMesh(e) { this.xGizmo && (this.xGizmo.updateGizmoRotationToMatchAttachedMesh = e, this.yGizmo.updateGizmoRotationToMatchAttachedMesh = e, this.zGizmo.updateGizmoRotationToMatchAttachedMesh = e); } get updateGizmoRotationToMatchAttachedMesh() { return this.xGizmo.updateGizmoRotationToMatchAttachedMesh; } set updateGizmoPositionToMatchAttachedMesh(e) { this.xGizmo && (this.xGizmo.updateGizmoPositionToMatchAttachedMesh = e, this.yGizmo.updateGizmoPositionToMatchAttachedMesh = e, this.zGizmo.updateGizmoPositionToMatchAttachedMesh = e); } get updateGizmoPositionToMatchAttachedMesh() { return this.xGizmo.updateGizmoPositionToMatchAttachedMesh; } set anchorPoint(e) { this._anchorPoint = e, [this.xGizmo, this.yGizmo, this.zGizmo].forEach((t) => { t.anchorPoint = e; }); } get anchorPoint() { return this._anchorPoint; } /** * Set the coordinate system to use. By default it's local. * But it's possible for a user to tweak so its local for translation and world for rotation. * In that case, setting the coordinate system will change `updateGizmoRotationToMatchAttachedMesh` and `updateGizmoPositionToMatchAttachedMesh` */ set coordinatesMode(e) { [this.xGizmo, this.yGizmo, this.zGizmo].forEach((t) => { t.coordinatesMode = e; }); } set updateScale(e) { this.xGizmo && (this.xGizmo.updateScale = e, this.yGizmo.updateScale = e, this.zGizmo.updateScale = e); } get updateScale() { return this.xGizmo.updateScale; } /** * Drag distance in babylon units that the gizmo will snap to when dragged (Default: 0) */ set snapDistance(e) { this.xGizmo && (this.xGizmo.snapDistance = e, this.yGizmo.snapDistance = e, this.zGizmo.snapDistance = e); } get snapDistance() { return this.xGizmo.snapDistance; } /** * Ratio for the scale of the gizmo (Default: 1) */ set scaleRatio(e) { this.xGizmo && (this.xGizmo.scaleRatio = e, this.yGizmo.scaleRatio = e, this.zGizmo.scaleRatio = e); } get scaleRatio() { return this.xGizmo.scaleRatio; } /** * posture that the gizmo will be display * When set null, default value will be used (Quaternion(0, 0, 0, 1)) */ get customRotationQuaternion() { return this._customRotationQuaternion; } set customRotationQuaternion(e) { this._customRotationQuaternion = e, [this.xGizmo, this.yGizmo, this.zGizmo].forEach((t) => { t && (t.customRotationQuaternion = e); }); } /** * Builds Gizmo Axis Cache to enable features such as hover state preservation and graying out other axis during manipulation * @param mesh Axis gizmo mesh * @param cache Gizmo axis definition used for reactive gizmo UI */ addToAxisCache(e, t) { this._gizmoAxisCache.set(e, t); } /** * Disposes of the gizmo */ dispose() { this.xGizmo.dispose(), this.yGizmo.dispose(), this.zGizmo.dispose(), this.onDragStartObservable.clear(), this.onDragObservable.clear(), this.onDragEndObservable.clear(), this._observables.forEach((e) => { this.gizmoLayer.utilityLayerScene.onPointerObservable.remove(e); }); } /** * CustomMeshes are not supported by this gizmo */ setCustomMesh() { Ce.Error("Custom meshes are not supported on this gizmo, please set the custom meshes on the gizmos contained within this one (gizmo.xGizmo, gizmo.yGizmo, gizmo.zGizmo)"); } } class dO extends Do { /** Default material used to render when gizmo is not disabled or hovered */ get coloredMaterial() { return this._coloredMaterial; } /** Material used to render when gizmo is hovered with mouse*/ get hoverMaterial() { return this._hoverMaterial; } /** Material used to render when gizmo is disabled. typically grey.*/ get disableMaterial() { return this._disableMaterial; } /** * @internal */ static _CreatePlane(e, t) { const i = new xi("plane", e), r = hx("dragPlane", { width: 0.1375, height: 0.1375, sideOrientation: 2 }, e); return r.material = t, r.parent = i, i; } /** * Creates a PlaneDragGizmo * @param dragPlaneNormal The axis normal to which the gizmo will be able to drag on * @param color The color of the gizmo * @param gizmoLayer The utility layer the gizmo will be added to * @param parent * @param hoverColor The color of the gizmo when hovering over and dragging * @param disableColor The Color of the gizmo when its disabled */ constructor(e, t = ze.Gray(), i = bn.DefaultUtilityLayer, r = null, s = ze.Yellow(), n = ze.Gray()) { var a; super(i), this._pointerObserver = null, this.snapDistance = 0, this.onSnapObservable = new Fe(), this._isEnabled = !1, this._parent = null, this._dragging = !1, this._parent = r, this._coloredMaterial = new Dt("", i.utilityLayerScene), this._coloredMaterial.diffuseColor = t, this._coloredMaterial.specularColor = t.subtract(new ze(0.1, 0.1, 0.1)), this._hoverMaterial = new Dt("", i.utilityLayerScene), this._hoverMaterial.diffuseColor = s, this._disableMaterial = new Dt("", i.utilityLayerScene), this._disableMaterial.diffuseColor = n, this._disableMaterial.alpha = 0.4, this._gizmoMesh = dO._CreatePlane(i.utilityLayerScene, this._coloredMaterial), this._gizmoMesh.lookAt(this._rootMesh.position.add(e)), this._gizmoMesh.scaling.scaleInPlace(1 / 3), this._gizmoMesh.parent = this._rootMesh; let l = 0; const o = new D(), u = { snapDistance: 0 }; this.dragBehavior = new Fu({ dragPlaneNormal: e }), this.dragBehavior.moveAttached = !1, this._rootMesh.addBehavior(this.dragBehavior), this.dragBehavior.onDragObservable.add((f) => { if (this.attachedNode) { if (this.snapDistance == 0) this.attachedNode.getWorldMatrix().getTranslationToRef(de.Vector3[0]), de.Vector3[0].addToRef(f.delta, de.Vector3[0]), this.dragBehavior.validateDrag(de.Vector3[0]) && this.attachedNode.getWorldMatrix().addTranslationFromFloats(f.delta.x, f.delta.y, f.delta.z); else if (l += f.dragDistance, Math.abs(l) > this.snapDistance) { const p = Math.floor(Math.abs(l) / this.snapDistance); l = l % this.snapDistance, f.delta.normalizeToRef(o), o.scaleInPlace(this.snapDistance * p), this.attachedNode.getWorldMatrix().getTranslationToRef(de.Vector3[0]), de.Vector3[0].addToRef(o, de.Vector3[0]), this.dragBehavior.validateDrag(de.Vector3[0]) && (this.attachedNode.getWorldMatrix().addTranslationFromFloats(o.x, o.y, o.z), u.snapDistance = this.snapDistance * p, this.onSnapObservable.notifyObservers(u)); } this._matrixChanged(); } }), this.dragBehavior.onDragStartObservable.add(() => { this._dragging = !0; }), this.dragBehavior.onDragEndObservable.add(() => { this._dragging = !1; }); const h = i._getSharedGizmoLight(); h.includedOnlyMeshes = h.includedOnlyMeshes.concat(this._rootMesh.getChildMeshes(!1)); const d = { gizmoMeshes: this._gizmoMesh.getChildMeshes(), colliderMeshes: this._gizmoMesh.getChildMeshes(), material: this._coloredMaterial, hoverMaterial: this._hoverMaterial, disableMaterial: this._disableMaterial, active: !1, dragBehavior: this.dragBehavior }; (a = this._parent) === null || a === void 0 || a.addToAxisCache(this._gizmoMesh, d), this._pointerObserver = i.utilityLayerScene.onPointerObservable.add((f) => { var p; if (!this._customMeshSet && (this._isHovered = d.colliderMeshes.indexOf((p = f == null ? void 0 : f.pickInfo) === null || p === void 0 ? void 0 : p.pickedMesh) != -1, !this._parent)) { const m = d.dragBehavior.enabled ? this._isHovered || this._dragging ? this._hoverMaterial : this._coloredMaterial : this._disableMaterial; this._setGizmoMeshMaterial(d.gizmoMeshes, m); } }), this.dragBehavior.onEnabledObservable.add((f) => { this._setGizmoMeshMaterial(d.gizmoMeshes, f ? this._coloredMaterial : this._disableMaterial); }); } _attachedNodeChanged(e) { this.dragBehavior && (this.dragBehavior.enabled = !!e); } /** * If the gizmo is enabled */ set isEnabled(e) { this._isEnabled = e, e ? this._parent && (this.attachedNode = this._parent.attachedNode) : this.attachedNode = null; } get isEnabled() { return this._isEnabled; } /** * Disposes of the gizmo */ dispose() { this.onSnapObservable.clear(), this.gizmoLayer.utilityLayerScene.onPointerObservable.remove(this._pointerObserver), this.dragBehavior.detach(), super.dispose(), this._gizmoMesh && this._gizmoMesh.dispose(), [this._coloredMaterial, this._hoverMaterial, this._disableMaterial].forEach((e) => { e && e.dispose(); }); } } class EU extends Do { get attachedMesh() { return this._meshAttached; } set attachedMesh(e) { this._meshAttached = e, this._nodeAttached = e, [this.xGizmo, this.yGizmo, this.zGizmo, this.xPlaneGizmo, this.yPlaneGizmo, this.zPlaneGizmo].forEach((t) => { t.isEnabled ? t.attachedMesh = e : t.attachedMesh = null; }); } get attachedNode() { return this._nodeAttached; } set attachedNode(e) { this._meshAttached = null, this._nodeAttached = e, [this.xGizmo, this.yGizmo, this.zGizmo, this.xPlaneGizmo, this.yPlaneGizmo, this.zPlaneGizmo].forEach((t) => { t.isEnabled ? t.attachedNode = e : t.attachedNode = null; }); } /** * True when the mouse pointer is hovering a gizmo mesh */ get isHovered() { return this.xGizmo.isHovered || this.yGizmo.isHovered || this.zGizmo.isHovered || this.xPlaneGizmo.isHovered || this.yPlaneGizmo.isHovered || this.zPlaneGizmo.isHovered; } /** * Creates a PositionGizmo * @param gizmoLayer The utility layer the gizmo will be added to @param thickness display gizmo axis thickness * @param gizmoManager */ constructor(e = bn.DefaultUtilityLayer, t = 1, i) { super(e), this._meshAttached = null, this._nodeAttached = null, this._observables = [], this._gizmoAxisCache = /* @__PURE__ */ new Map(), this.onDragStartObservable = new Fe(), this.onDragObservable = new Fe(), this.onDragEndObservable = new Fe(), this._planarGizmoEnabled = !1, this.xGizmo = new hg(new D(1, 0, 0), ze.Red().scale(0.5), e, this, t), this.yGizmo = new hg(new D(0, 1, 0), ze.Green().scale(0.5), e, this, t), this.zGizmo = new hg(new D(0, 0, 1), ze.Blue().scale(0.5), e, this, t), this.xPlaneGizmo = new dO(new D(1, 0, 0), ze.Red().scale(0.5), this.gizmoLayer, this), this.yPlaneGizmo = new dO(new D(0, 1, 0), ze.Green().scale(0.5), this.gizmoLayer, this), this.zPlaneGizmo = new dO(new D(0, 0, 1), ze.Blue().scale(0.5), this.gizmoLayer, this), [this.xGizmo, this.yGizmo, this.zGizmo, this.xPlaneGizmo, this.yPlaneGizmo, this.zPlaneGizmo].forEach((r) => { r.dragBehavior.onDragStartObservable.add(() => { this.onDragStartObservable.notifyObservers({}); }), r.dragBehavior.onDragObservable.add(() => { this.onDragObservable.notifyObservers({}); }), r.dragBehavior.onDragEndObservable.add(() => { this.onDragEndObservable.notifyObservers({}); }); }), this.attachedMesh = null, i ? i.addToAxisCache(this._gizmoAxisCache) : Do.GizmoAxisPointerObserver(e, this._gizmoAxisCache); } /** * If the planar drag gizmo is enabled * setting this will enable/disable XY, XZ and YZ planes regardless of individual gizmo settings. */ set planarGizmoEnabled(e) { this._planarGizmoEnabled = e, [this.xPlaneGizmo, this.yPlaneGizmo, this.zPlaneGizmo].forEach((t) => { t && (t.isEnabled = e, e && (t.attachedMesh ? t.attachedMesh = this.attachedMesh : t.attachedNode = this.attachedNode)); }, this); } get planarGizmoEnabled() { return this._planarGizmoEnabled; } /** * posture that the gizmo will be display * When set null, default value will be used (Quaternion(0, 0, 0, 1)) */ get customRotationQuaternion() { return this._customRotationQuaternion; } set customRotationQuaternion(e) { this._customRotationQuaternion = e, [this.xGizmo, this.yGizmo, this.zGizmo, this.xPlaneGizmo, this.yPlaneGizmo, this.zPlaneGizmo].forEach((t) => { t && (t.customRotationQuaternion = e); }); } /** * If set the gizmo's rotation will be updated to match the attached mesh each frame (Default: true) * NOTE: This is only possible for meshes with uniform scaling, as otherwise it's not possible to decompose the rotation */ set updateGizmoRotationToMatchAttachedMesh(e) { this._updateGizmoRotationToMatchAttachedMesh = e, [this.xGizmo, this.yGizmo, this.zGizmo, this.xPlaneGizmo, this.yPlaneGizmo, this.zPlaneGizmo].forEach((t) => { t && (t.updateGizmoRotationToMatchAttachedMesh = e); }); } get updateGizmoRotationToMatchAttachedMesh() { return this._updateGizmoRotationToMatchAttachedMesh; } set updateGizmoPositionToMatchAttachedMesh(e) { this._updateGizmoPositionToMatchAttachedMesh = e, [this.xGizmo, this.yGizmo, this.zGizmo, this.xPlaneGizmo, this.yPlaneGizmo, this.zPlaneGizmo].forEach((t) => { t && (t.updateGizmoPositionToMatchAttachedMesh = e); }); } get updateGizmoPositionToMatchAttachedMesh() { return this._updateGizmoPositionToMatchAttachedMesh; } set anchorPoint(e) { this._anchorPoint = e, [this.xGizmo, this.yGizmo, this.zGizmo, this.xPlaneGizmo, this.yPlaneGizmo, this.zPlaneGizmo].forEach((t) => { t.anchorPoint = e; }); } get anchorPoint() { return this._anchorPoint; } /** * Set the coordinate system to use. By default it's local. * But it's possible for a user to tweak so its local for translation and world for rotation. * In that case, setting the coordinate system will change `updateGizmoRotationToMatchAttachedMesh` and `updateGizmoPositionToMatchAttachedMesh` */ set coordinatesMode(e) { [this.xGizmo, this.yGizmo, this.zGizmo, this.xPlaneGizmo, this.yPlaneGizmo, this.zPlaneGizmo].forEach((t) => { t.coordinatesMode = e; }); } set updateScale(e) { this.xGizmo && (this.xGizmo.updateScale = e, this.yGizmo.updateScale = e, this.zGizmo.updateScale = e); } get updateScale() { return this.xGizmo.updateScale; } /** * Drag distance in babylon units that the gizmo will snap to when dragged (Default: 0) */ set snapDistance(e) { this._snapDistance = e, [this.xGizmo, this.yGizmo, this.zGizmo, this.xPlaneGizmo, this.yPlaneGizmo, this.zPlaneGizmo].forEach((t) => { t && (t.snapDistance = e); }); } get snapDistance() { return this._snapDistance; } /** * Ratio for the scale of the gizmo (Default: 1) */ set scaleRatio(e) { this._scaleRatio = e, [this.xGizmo, this.yGizmo, this.zGizmo, this.xPlaneGizmo, this.yPlaneGizmo, this.zPlaneGizmo].forEach((t) => { t && (t.scaleRatio = e); }); } get scaleRatio() { return this._scaleRatio; } /** * Builds Gizmo Axis Cache to enable features such as hover state preservation and graying out other axis during manipulation * @param mesh Axis gizmo mesh * @param cache Gizmo axis definition used for reactive gizmo UI */ addToAxisCache(e, t) { this._gizmoAxisCache.set(e, t); } /** * Disposes of the gizmo */ dispose() { [this.xGizmo, this.yGizmo, this.zGizmo, this.xPlaneGizmo, this.yPlaneGizmo, this.zPlaneGizmo].forEach((e) => { e && e.dispose(); }), this._observables.forEach((e) => { this.gizmoLayer.utilityLayerScene.onPointerObservable.remove(e); }), this.onDragStartObservable.clear(), this.onDragObservable.clear(), this.onDragEndObservable.clear(); } /** * CustomMeshes are not supported by this gizmo */ setCustomMesh() { Ce.Error("Custom meshes are not supported on this gizmo, please set the custom meshes on the gizmos contained within this one (gizmo.xGizmo, gizmo.yGizmo, gizmo.zGizmo,gizmo.xPlaneGizmo, gizmo.yPlaneGizmo, gizmo.zPlaneGizmo)"); } } class fW extends Do { /** Default material used to render when gizmo is not disabled or hovered */ get coloredMaterial() { return this._coloredMaterial; } /** Material used to render when gizmo is hovered with mouse*/ get hoverMaterial() { return this._hoverMaterial; } /** Material used to render when gizmo is disabled. typically grey.*/ get disableMaterial() { return this._disableMaterial; } get attachedMesh() { return this._meshAttached; } set attachedMesh(e) { this._meshAttached = e, this._nodeAttached = e, [this.xGizmo, this.yGizmo, this.zGizmo, this.uniformScaleGizmo].forEach((t) => { t.isEnabled ? t.attachedMesh = e : t.attachedMesh = null; }); } get attachedNode() { return this._nodeAttached; } set attachedNode(e) { this._meshAttached = null, this._nodeAttached = e, [this.xGizmo, this.yGizmo, this.zGizmo, this.uniformScaleGizmo].forEach((t) => { t.isEnabled ? t.attachedNode = e : t.attachedNode = null; }); } set updateScale(e) { this.xGizmo && (this.xGizmo.updateScale = e, this.yGizmo.updateScale = e, this.zGizmo.updateScale = e); } get updateScale() { return this.xGizmo.updateScale; } /** * True when the mouse pointer is hovering a gizmo mesh */ get isHovered() { return this.xGizmo.isHovered || this.yGizmo.isHovered || this.zGizmo.isHovered; } /** * Creates a ScaleGizmo * @param gizmoLayer The utility layer the gizmo will be added to * @param thickness display gizmo axis thickness * @param gizmoManager */ constructor(e = bn.DefaultUtilityLayer, t = 1, i) { super(e), this._meshAttached = null, this._nodeAttached = null, this._incrementalSnap = !1, this._sensitivity = 1, this._observables = [], this._gizmoAxisCache = /* @__PURE__ */ new Map(), this.onDragStartObservable = new Fe(), this.onDragObservable = new Fe(), this.onDragEndObservable = new Fe(), this.uniformScaleGizmo = this._createUniformScaleMesh(), this.xGizmo = new dg(new D(1, 0, 0), ze.Red().scale(0.5), e, this, t), this.yGizmo = new dg(new D(0, 1, 0), ze.Green().scale(0.5), e, this, t), this.zGizmo = new dg(new D(0, 0, 1), ze.Blue().scale(0.5), e, this, t), [this.xGizmo, this.yGizmo, this.zGizmo, this.uniformScaleGizmo].forEach((r) => { r.dragBehavior.onDragStartObservable.add(() => { this.onDragStartObservable.notifyObservers({}); }), r.dragBehavior.onDragObservable.add(() => { this.onDragObservable.notifyObservers({}); }), r.dragBehavior.onDragEndObservable.add(() => { this.onDragEndObservable.notifyObservers({}); }); }), this.attachedMesh = null, this.attachedNode = null, i ? i.addToAxisCache(this._gizmoAxisCache) : Do.GizmoAxisPointerObserver(e, this._gizmoAxisCache); } /** Create Geometry for Gizmo */ _createUniformScaleMesh() { this._coloredMaterial = new Dt("", this.gizmoLayer.utilityLayerScene), this._coloredMaterial.diffuseColor = ze.Gray(), this._hoverMaterial = new Dt("", this.gizmoLayer.utilityLayerScene), this._hoverMaterial.diffuseColor = ze.Yellow(), this._disableMaterial = new Dt("", this.gizmoLayer.utilityLayerScene), this._disableMaterial.diffuseColor = ze.Gray(), this._disableMaterial.alpha = 0.4; const e = new dg(new D(0, 1, 0), ze.Gray().scale(0.5), this.gizmoLayer, this); e.updateGizmoRotationToMatchAttachedMesh = !1, e.uniformScaling = !0, this._uniformScalingMesh = AP("uniform", { type: 1 }, e.gizmoLayer.utilityLayerScene), this._uniformScalingMesh.scaling.scaleInPlace(0.01), this._uniformScalingMesh.visibility = 0, this._octahedron = AP("", { type: 1 }, e.gizmoLayer.utilityLayerScene), this._octahedron.scaling.scaleInPlace(7e-3), this._uniformScalingMesh.addChild(this._octahedron), e.setCustomMesh(this._uniformScalingMesh, !0); const t = this.gizmoLayer._getSharedGizmoLight(); t.includedOnlyMeshes = t.includedOnlyMeshes.concat(this._octahedron); const i = { gizmoMeshes: [this._octahedron, this._uniformScalingMesh], colliderMeshes: [this._uniformScalingMesh], material: this._coloredMaterial, hoverMaterial: this._hoverMaterial, disableMaterial: this._disableMaterial, active: !1, dragBehavior: e.dragBehavior }; return this.addToAxisCache(e._rootMesh, i), e; } set updateGizmoRotationToMatchAttachedMesh(e) { e ? (this._updateGizmoRotationToMatchAttachedMesh = e, [this.xGizmo, this.yGizmo, this.zGizmo, this.uniformScaleGizmo].forEach((t) => { t && (t.updateGizmoRotationToMatchAttachedMesh = e); })) : Ce.Warn("Setting updateGizmoRotationToMatchAttachedMesh = false on scaling gizmo is not supported."); } get updateGizmoRotationToMatchAttachedMesh() { return this._updateGizmoRotationToMatchAttachedMesh; } set anchorPoint(e) { this._anchorPoint = e, [this.xGizmo, this.yGizmo, this.zGizmo, this.uniformScaleGizmo].forEach((t) => { t && (t.anchorPoint = e); }); } get anchorPoint() { return this._anchorPoint; } /** * posture that the gizmo will be display * When set null, default value will be used (Quaternion(0, 0, 0, 1)) */ get customRotationQuaternion() { return this._customRotationQuaternion; } set customRotationQuaternion(e) { this._customRotationQuaternion = e, [this.xGizmo, this.yGizmo, this.zGizmo, this.uniformScaleGizmo].forEach((t) => { t && (t.customRotationQuaternion = e); }); } /** * Set the coordinate system to use. By default it's local. * But it's possible for a user to tweak so its local for translation and world for rotation. * In that case, setting the coordinate system will change `updateGizmoRotationToMatchAttachedMesh` and `updateGizmoPositionToMatchAttachedMesh` */ set coordinatesMode(e) { e == p5.World && Ce.Warn("Setting coordinates Mode to world on scaling gizmo is not supported."), [this.xGizmo, this.yGizmo, this.zGizmo, this.uniformScaleGizmo].forEach((t) => { t.coordinatesMode = p5.Local; }); } /** * Drag distance in babylon units that the gizmo will snap to when dragged (Default: 0) */ set snapDistance(e) { this._snapDistance = e, [this.xGizmo, this.yGizmo, this.zGizmo, this.uniformScaleGizmo].forEach((t) => { t && (t.snapDistance = e); }); } get snapDistance() { return this._snapDistance; } /** * Incremental snap scaling (default is false). When true, with a snapDistance of 0.1, scaling will be 1.1,1.2,1.3 instead of, when false: 1.1,1.21,1.33,... */ set incrementalSnap(e) { this._incrementalSnap = e, [this.xGizmo, this.yGizmo, this.zGizmo, this.uniformScaleGizmo].forEach((t) => { t && (t.incrementalSnap = e); }); } get incrementalSnap() { return this._incrementalSnap; } /** * Ratio for the scale of the gizmo (Default: 1) */ set scaleRatio(e) { this._scaleRatio = e, [this.xGizmo, this.yGizmo, this.zGizmo, this.uniformScaleGizmo].forEach((t) => { t && (t.scaleRatio = e); }); } get scaleRatio() { return this._scaleRatio; } /** * Sensitivity factor for dragging (Default: 1) */ set sensitivity(e) { this._sensitivity = e, [this.xGizmo, this.yGizmo, this.zGizmo, this.uniformScaleGizmo].forEach((t) => { t && (t.sensitivity = e); }); } get sensitivity() { return this._sensitivity; } /** * Builds Gizmo Axis Cache to enable features such as hover state preservation and graying out other axis during manipulation * @param mesh Axis gizmo mesh * @param cache Gizmo axis definition used for reactive gizmo UI */ addToAxisCache(e, t) { this._gizmoAxisCache.set(e, t); } /** * Disposes of the gizmo */ dispose() { [this.xGizmo, this.yGizmo, this.zGizmo, this.uniformScaleGizmo].forEach((e) => { e && e.dispose(); }), this._observables.forEach((e) => { this.gizmoLayer.utilityLayerScene.onPointerObservable.remove(e); }), this.onDragStartObservable.clear(), this.onDragObservable.clear(), this.onDragEndObservable.clear(), [this._uniformScalingMesh, this._octahedron].forEach((e) => { e && e.dispose(); }), [this._coloredMaterial, this._hoverMaterial, this._disableMaterial].forEach((e) => { e && e.dispose(); }); } } class qpe { /** * Utility layer that the bounding box gizmo belongs to */ get keepDepthUtilityLayer() { return this._defaultKeepDepthUtilityLayer; } /** * Utility layer that all gizmos besides bounding box belong to */ get utilityLayer() { return this._defaultUtilityLayer; } /** * True when the mouse pointer is hovering a gizmo mesh */ get isHovered() { let e = !1; for (const t in this.gizmos) { const i = this.gizmos[t]; if (i && i.isHovered) { e = !0; break; } } return e; } /** * Ratio for the scale of the gizmo (Default: 1) */ set scaleRatio(e) { this._scaleRatio = e, [this.gizmos.positionGizmo, this.gizmos.rotationGizmo, this.gizmos.scaleGizmo].forEach((t) => { t && (t.scaleRatio = e); }); } get scaleRatio() { return this._scaleRatio; } /** * Set the coordinate system to use. By default it's local. * But it's possible for a user to tweak so its local for translation and world for rotation. * In that case, setting the coordinate system will change `updateGizmoRotationToMatchAttachedMesh` and `updateGizmoPositionToMatchAttachedMesh` */ set coordinatesMode(e) { this._coordinatesMode = e, [this.gizmos.positionGizmo, this.gizmos.rotationGizmo, this.gizmos.scaleGizmo].forEach((t) => { t && (t.coordinatesMode = e); }); } get coordinatesMode() { return this._coordinatesMode; } /** * Instantiates a gizmo manager * @param _scene the scene to overlay the gizmos on top of * @param thickness display gizmo axis thickness * @param utilityLayer the layer where gizmos are rendered * @param keepDepthUtilityLayer the layer where occluded gizmos are rendered */ constructor(e, t = 1, i = bn.DefaultUtilityLayer, r = bn.DefaultKeepDepthUtilityLayer) { this._scene = e, this.clearGizmoOnEmptyPointerEvent = !1, this.enableAutoPicking = !0, this.onAttachedToMeshObservable = new Fe(), this.onAttachedToNodeObservable = new Fe(), this._gizmosEnabled = { positionGizmo: !1, rotationGizmo: !1, scaleGizmo: !1, boundingBoxGizmo: !1 }, this._pointerObservers = [], this._attachedMesh = null, this._attachedNode = null, this._boundingBoxColor = ze.FromHexString("#0984e3"), this._thickness = 1, this._scaleRatio = 1, this._coordinatesMode = p5.Local, this._gizmoAxisCache = /* @__PURE__ */ new Map(), this.boundingBoxDragBehavior = new kte(), this.attachableMeshes = null, this.attachableNodes = null, this.usePointerToAttachGizmos = !0, this._defaultUtilityLayer = i, this._defaultKeepDepthUtilityLayer = r, this._defaultKeepDepthUtilityLayer.utilityLayerScene.autoClearDepthAndStencil = !1, this._thickness = t, this.gizmos = { positionGizmo: null, rotationGizmo: null, scaleGizmo: null, boundingBoxGizmo: null }; const s = this._attachToMeshPointerObserver(e), n = Do.GizmoAxisPointerObserver(this._defaultUtilityLayer, this._gizmoAxisCache); this._pointerObservers = [s, n]; } /** * Subscribes to pointer down events, for attaching and detaching mesh * @param scene The scene layer the observer will be added to */ _attachToMeshPointerObserver(e) { return e.onPointerObservable.add((i) => { if (this.usePointerToAttachGizmos && i.type == si.POINTERDOWN) if (i.pickInfo && i.pickInfo.pickedMesh) { if (this.enableAutoPicking) { let r = i.pickInfo.pickedMesh; if (this.attachableMeshes == null) for (; r && r.parent != null; ) r = r.parent; else { let s = !1; this.attachableMeshes.forEach((n) => { r && (r == n || r.isDescendantOf(n)) && (r = n, s = !0); }), s || (r = null); } r instanceof xr ? this._attachedMesh != r && this.attachToMesh(r) : this.clearGizmoOnEmptyPointerEvent && this.attachToMesh(null); } } else this.clearGizmoOnEmptyPointerEvent && this.attachToMesh(null); }); } /** * Attaches a set of gizmos to the specified mesh * @param mesh The mesh the gizmo's should be attached to */ attachToMesh(e) { this._attachedMesh && this._attachedMesh.removeBehavior(this.boundingBoxDragBehavior), this._attachedNode && this._attachedNode.removeBehavior(this.boundingBoxDragBehavior), this._attachedMesh = e, this._attachedNode = null; for (const t in this.gizmos) { const i = this.gizmos[t]; i && this._gizmosEnabled[t] && (i.attachedMesh = e); } this.boundingBoxGizmoEnabled && this._attachedMesh && this._attachedMesh.addBehavior(this.boundingBoxDragBehavior), this.onAttachedToMeshObservable.notifyObservers(e); } /** * Attaches a set of gizmos to the specified node * @param node The node the gizmo's should be attached to */ attachToNode(e) { this._attachedMesh && this._attachedMesh.removeBehavior(this.boundingBoxDragBehavior), this._attachedNode && this._attachedNode.removeBehavior(this.boundingBoxDragBehavior), this._attachedMesh = null, this._attachedNode = e; for (const t in this.gizmos) { const i = this.gizmos[t]; i && this._gizmosEnabled[t] && (i.attachedNode = e); } this.boundingBoxGizmoEnabled && this._attachedNode && this._attachedNode.addBehavior(this.boundingBoxDragBehavior), this.onAttachedToNodeObservable.notifyObservers(e); } /** * If the position gizmo is enabled */ set positionGizmoEnabled(e) { e ? (this.gizmos.positionGizmo || (this.gizmos.positionGizmo = new EU(this._defaultUtilityLayer, this._thickness, this)), this._attachedNode ? this.gizmos.positionGizmo.attachedNode = this._attachedNode : this.gizmos.positionGizmo.attachedMesh = this._attachedMesh) : this.gizmos.positionGizmo && (this.gizmos.positionGizmo.attachedNode = null), this._gizmosEnabled.positionGizmo = e; } get positionGizmoEnabled() { return this._gizmosEnabled.positionGizmo; } /** * If the rotation gizmo is enabled */ set rotationGizmoEnabled(e) { e ? (this.gizmos.rotationGizmo || (this.gizmos.rotationGizmo = new dW(this._defaultUtilityLayer, 32, !1, this._thickness, this)), this._attachedNode ? this.gizmos.rotationGizmo.attachedNode = this._attachedNode : this.gizmos.rotationGizmo.attachedMesh = this._attachedMesh) : this.gizmos.rotationGizmo && (this.gizmos.rotationGizmo.attachedNode = null), this._gizmosEnabled.rotationGizmo = e; } get rotationGizmoEnabled() { return this._gizmosEnabled.rotationGizmo; } /** * If the scale gizmo is enabled */ set scaleGizmoEnabled(e) { e ? (this.gizmos.scaleGizmo = this.gizmos.scaleGizmo || new fW(this._defaultUtilityLayer, this._thickness, this), this._attachedNode ? this.gizmos.scaleGizmo.attachedNode = this._attachedNode : this.gizmos.scaleGizmo.attachedMesh = this._attachedMesh) : this.gizmos.scaleGizmo && (this.gizmos.scaleGizmo.attachedNode = null), this._gizmosEnabled.scaleGizmo = e; } get scaleGizmoEnabled() { return this._gizmosEnabled.scaleGizmo; } /** * If the boundingBox gizmo is enabled */ set boundingBoxGizmoEnabled(e) { e ? (this.gizmos.boundingBoxGizmo = this.gizmos.boundingBoxGizmo || new hW(this._boundingBoxColor, this._defaultKeepDepthUtilityLayer), this._attachedMesh ? this.gizmos.boundingBoxGizmo.attachedMesh = this._attachedMesh : this.gizmos.boundingBoxGizmo.attachedNode = this._attachedNode, this._attachedMesh ? (this._attachedMesh.removeBehavior(this.boundingBoxDragBehavior), this._attachedMesh.addBehavior(this.boundingBoxDragBehavior)) : this._attachedNode && (this._attachedNode.removeBehavior(this.boundingBoxDragBehavior), this._attachedNode.addBehavior(this.boundingBoxDragBehavior))) : this.gizmos.boundingBoxGizmo && (this._attachedMesh ? this._attachedMesh.removeBehavior(this.boundingBoxDragBehavior) : this._attachedNode && this._attachedNode.removeBehavior(this.boundingBoxDragBehavior), this.gizmos.boundingBoxGizmo.attachedNode = null), this._gizmosEnabled.boundingBoxGizmo = e; } get boundingBoxGizmoEnabled() { return this._gizmosEnabled.boundingBoxGizmo; } /** * Builds Gizmo Axis Cache to enable features such as hover state preservation and graying out other axis during manipulation * @param gizmoAxisCache Gizmo axis definition used for reactive gizmo UI */ addToAxisCache(e) { e.size > 0 && e.forEach((t, i) => { this._gizmoAxisCache.set(i, t); }); } /** * Disposes of the gizmo manager */ dispose() { var e, t; this._pointerObservers.forEach((i) => { this._scene.onPointerObservable.remove(i); }); for (const i in this.gizmos) { const r = this.gizmos[i]; r && r.dispose(); } this._defaultKeepDepthUtilityLayer !== bn._DefaultKeepDepthUtilityLayer && ((e = this._defaultKeepDepthUtilityLayer) === null || e === void 0 || e.dispose()), this._defaultUtilityLayer !== bn._DefaultUtilityLayer && ((t = this._defaultUtilityLayer) === null || t === void 0 || t.dispose()), this.boundingBoxDragBehavior.detach(), this.onAttachedToMeshObservable.clear(); } } class b5 extends hs { constructor() { super(...arguments), this._needProjectionMatrixCompute = !0, this._viewMatrix = Ae.Identity(), this._projectionMatrix = Ae.Identity(); } _setPosition(e) { this._position = e; } /** * Sets the position the shadow will be casted from. Also use as the light position for both * point and spot lights. */ get position() { return this._position; } /** * Sets the position the shadow will be casted from. Also use as the light position for both * point and spot lights. */ set position(e) { this._setPosition(e); } _setDirection(e) { this._direction = e; } /** * In 2d mode (needCube being false), gets the direction used to cast the shadow. * Also use as the light direction on spot and directional lights. */ get direction() { return this._direction; } /** * In 2d mode (needCube being false), sets the direction used to cast the shadow. * Also use as the light direction on spot and directional lights. */ set direction(e) { this._setDirection(e); } /** * Gets the shadow projection clipping minimum z value. */ get shadowMinZ() { return this._shadowMinZ; } /** * Sets the shadow projection clipping minimum z value. */ set shadowMinZ(e) { this._shadowMinZ = e, this.forceProjectionMatrixCompute(); } /** * Sets the shadow projection clipping maximum z value. */ get shadowMaxZ() { return this._shadowMaxZ; } /** * Gets the shadow projection clipping maximum z value. */ set shadowMaxZ(e) { this._shadowMaxZ = e, this.forceProjectionMatrixCompute(); } /** * Computes the transformed information (transformedPosition and transformedDirection in World space) of the current light * @returns true if the information has been computed, false if it does not need to (no parenting) */ computeTransformedInformation() { return this.parent && this.parent.getWorldMatrix ? (this.transformedPosition || (this.transformedPosition = D.Zero()), D.TransformCoordinatesToRef(this.position, this.parent.getWorldMatrix(), this.transformedPosition), this.direction && (this.transformedDirection || (this.transformedDirection = D.Zero()), D.TransformNormalToRef(this.direction, this.parent.getWorldMatrix(), this.transformedDirection)), !0) : !1; } /** * Return the depth scale used for the shadow map. * @returns the depth scale. */ getDepthScale() { return 50; } /** * Get the direction to use to render the shadow map. In case of cube texture, the face index can be passed. * @param faceIndex The index of the face we are computed the direction to generate shadow * @returns The set direction in 2d mode otherwise the direction to the cubemap face if needCube() is true */ // eslint-disable-next-line @typescript-eslint/no-unused-vars getShadowDirection(e) { return this.transformedDirection ? this.transformedDirection : this.direction; } /** * Returns the ShadowLight absolute position in the World. * @returns the position vector in world space */ getAbsolutePosition() { return this.transformedPosition ? this.transformedPosition : this.position; } /** * Sets the ShadowLight direction toward the passed target. * @param target The point to target in local space * @returns the updated ShadowLight direction */ setDirectionToTarget(e) { return this.direction = D.Normalize(e.subtract(this.position)), this.direction; } /** * Returns the light rotation in euler definition. * @returns the x y z rotation in local space. */ getRotation() { this.direction.normalize(); const e = D.Cross(this.direction, bl.Y), t = D.Cross(e, this.direction); return D.RotationFromAxis(e, t, this.direction); } /** * Returns whether or not the shadow generation require a cube texture or a 2d texture. * @returns true if a cube texture needs to be use */ needCube() { return !1; } /** * Detects if the projection matrix requires to be recomputed this frame. * @returns true if it requires to be recomputed otherwise, false. */ needProjectionMatrixCompute() { return this._needProjectionMatrixCompute; } /** * Forces the shadow generator to recompute the projection matrix even if position and direction did not changed. */ forceProjectionMatrixCompute() { this._needProjectionMatrixCompute = !0; } /** @internal */ _initCache() { super._initCache(), this._cache.position = D.Zero(); } /** @internal */ _isSynchronized() { return !!this._cache.position.equals(this.position); } /** * Computes the world matrix of the node * @param force defines if the cache version should be invalidated forcing the world matrix to be created from scratch * @returns the world matrix */ computeWorldMatrix(e) { return !e && this.isSynchronized() ? (this._currentRenderId = this.getScene().getRenderId(), this._worldMatrix) : (this._updateCache(), this._cache.position.copyFrom(this.position), this._worldMatrix || (this._worldMatrix = Ae.Identity()), Ae.TranslationToRef(this.position.x, this.position.y, this.position.z, this._worldMatrix), this.parent && this.parent.getWorldMatrix && (this._worldMatrix.multiplyToRef(this.parent.getWorldMatrix(), this._worldMatrix), this._markSyncedWithParent()), this._worldMatrixDeterminantIsDirty = !0, this._worldMatrix); } /** * Gets the minZ used for shadow according to both the scene and the light. * @param activeCamera The camera we are returning the min for * @returns the depth min z */ getDepthMinZ(e) { return this.shadowMinZ !== void 0 ? this.shadowMinZ : e.minZ; } /** * Gets the maxZ used for shadow according to both the scene and the light. * @param activeCamera The camera we are returning the max for * @returns the depth max z */ getDepthMaxZ(e) { return this.shadowMaxZ !== void 0 ? this.shadowMaxZ : e.maxZ; } /** * Sets the shadow projection matrix in parameter to the generated projection matrix. * @param matrix The matrix to updated with the projection information * @param viewMatrix The transform matrix of the light * @param renderList The list of mesh to render in the map * @returns The current light */ setShadowProjectionMatrix(e, t, i) { return this.customProjectionMatrixBuilder ? this.customProjectionMatrixBuilder(t, i, e) : this._setDefaultShadowProjectionMatrix(e, t, i), this; } /** @internal */ _syncParentEnabledState() { super._syncParentEnabledState(), (!this.parent || !this.parent.getWorldMatrix) && (this.transformedPosition = null, this.transformedDirection = null); } /** * Returns the view matrix. * @param faceIndex The index of the face for which we want to extract the view matrix. Only used for point light types. * @returns The view matrix. Can be null, if a view matrix cannot be defined for the type of light considered (as for a hemispherical light, for example). */ getViewMatrix(e) { const t = de.Vector3[0]; let i = this.position; this.computeTransformedInformation() && (i = this.transformedPosition), D.NormalizeToRef(this.getShadowDirection(e), t), Math.abs(D.Dot(t, D.Up())) === 1 && (t.z = 1e-13); const r = de.Vector3[1]; return i.addToRef(t, r), Ae.LookAtLHToRef(i, r, D.Up(), this._viewMatrix), this._viewMatrix; } /** * Returns the projection matrix. * Note that viewMatrix and renderList are optional and are only used by lights that calculate the projection matrix from a list of meshes (e.g. directional lights with automatic extents calculation). * @param viewMatrix The view transform matrix of the light (optional). * @param renderList The list of meshes to take into account when calculating the projection matrix (optional). * @returns The projection matrix. Can be null, if a projection matrix cannot be defined for the type of light considered (as for a hemispherical light, for example). */ getProjectionMatrix(e, t) { return this.setShadowProjectionMatrix(this._projectionMatrix, e ?? this._viewMatrix, t ?? []), this._projectionMatrix; } } F([ oo() ], b5.prototype, "position", null); F([ oo() ], b5.prototype, "direction", null); F([ W() ], b5.prototype, "shadowMinZ", null); F([ W() ], b5.prototype, "shadowMaxZ", null); In.AddNodeConstructor("Light_Type_1", (c, e) => () => new Pd(c, D.Zero(), e)); class Pd extends b5 { /** * Fix frustum size for the shadow generation. This is disabled if the value is 0. */ get shadowFrustumSize() { return this._shadowFrustumSize; } /** * Specifies a fix frustum size for the shadow generation. */ set shadowFrustumSize(e) { this._shadowFrustumSize = e, this.forceProjectionMatrixCompute(); } /** * Gets the shadow projection scale against the optimal computed one. * 0.1 by default which means that the projection window is increase by 10% from the optimal size. * This does not impact in fixed frustum size (shadowFrustumSize being set) */ get shadowOrthoScale() { return this._shadowOrthoScale; } /** * Sets the shadow projection scale against the optimal computed one. * 0.1 by default which means that the projection window is increase by 10% from the optimal size. * This does not impact in fixed frustum size (shadowFrustumSize being set) */ set shadowOrthoScale(e) { this._shadowOrthoScale = e, this.forceProjectionMatrixCompute(); } /** * Gets or sets the orthoLeft property used to build the light frustum */ get orthoLeft() { return this._orthoLeft; } set orthoLeft(e) { this._orthoLeft = e; } /** * Gets or sets the orthoRight property used to build the light frustum */ get orthoRight() { return this._orthoRight; } set orthoRight(e) { this._orthoRight = e; } /** * Gets or sets the orthoTop property used to build the light frustum */ get orthoTop() { return this._orthoTop; } set orthoTop(e) { this._orthoTop = e; } /** * Gets or sets the orthoBottom property used to build the light frustum */ get orthoBottom() { return this._orthoBottom; } set orthoBottom(e) { this._orthoBottom = e; } /** * Creates a DirectionalLight object in the scene, oriented towards the passed direction (Vector3). * The directional light is emitted from everywhere in the given direction. * It can cast shadows. * Documentation : https://doc.babylonjs.com/features/featuresDeepDive/lights/lights_introduction * @param name The friendly name of the light * @param direction The direction of the light * @param scene The scene the light belongs to */ constructor(e, t, i) { super(e, i), this._shadowFrustumSize = 0, this._shadowOrthoScale = 0.1, this.autoUpdateExtends = !0, this.autoCalcShadowZBounds = !1, this._orthoLeft = Number.MAX_VALUE, this._orthoRight = Number.MIN_VALUE, this._orthoTop = Number.MIN_VALUE, this._orthoBottom = Number.MAX_VALUE, this.position = t.scale(-1), this.direction = t; } /** * Returns the string "DirectionalLight". * @returns The class name */ getClassName() { return "DirectionalLight"; } /** * Returns the integer 1. * @returns The light Type id as a constant defines in Light.LIGHTTYPEID_x */ getTypeID() { return hs.LIGHTTYPEID_DIRECTIONALLIGHT; } /** * Sets the passed matrix "matrix" as projection matrix for the shadows cast by the light according to the passed view matrix. * Returns the DirectionalLight Shadow projection matrix. * @param matrix * @param viewMatrix * @param renderList */ _setDefaultShadowProjectionMatrix(e, t, i) { this.shadowFrustumSize > 0 ? this._setDefaultFixedFrustumShadowProjectionMatrix(e) : this._setDefaultAutoExtendShadowProjectionMatrix(e, t, i); } /** * Sets the passed matrix "matrix" as fixed frustum projection matrix for the shadows cast by the light according to the passed view matrix. * Returns the DirectionalLight Shadow projection matrix. * @param matrix */ _setDefaultFixedFrustumShadowProjectionMatrix(e) { const t = this.getScene().activeCamera; t && Ae.OrthoLHToRef(this.shadowFrustumSize, this.shadowFrustumSize, this.shadowMinZ !== void 0 ? this.shadowMinZ : t.minZ, this.shadowMaxZ !== void 0 ? this.shadowMaxZ : t.maxZ, e, this.getScene().getEngine().isNDCHalfZRange); } /** * Sets the passed matrix "matrix" as auto extend projection matrix for the shadows cast by the light according to the passed view matrix. * Returns the DirectionalLight Shadow projection matrix. * @param matrix * @param viewMatrix * @param renderList */ _setDefaultAutoExtendShadowProjectionMatrix(e, t, i) { const r = this.getScene().activeCamera; if (!r) return; if (this.autoUpdateExtends || this._orthoLeft === Number.MAX_VALUE) { const u = D.Zero(); this._orthoLeft = Number.MAX_VALUE, this._orthoRight = -Number.MAX_VALUE, this._orthoTop = -Number.MAX_VALUE, this._orthoBottom = Number.MAX_VALUE; let h = Number.MAX_VALUE, d = -Number.MAX_VALUE; for (let f = 0; f < i.length; f++) { const p = i[f]; if (!p) continue; const _ = p.getBoundingInfo().boundingBox; for (let v = 0; v < _.vectorsWorld.length; v++) D.TransformCoordinatesToRef(_.vectorsWorld[v], t, u), u.x < this._orthoLeft && (this._orthoLeft = u.x), u.y < this._orthoBottom && (this._orthoBottom = u.y), u.x > this._orthoRight && (this._orthoRight = u.x), u.y > this._orthoTop && (this._orthoTop = u.y), this.autoCalcShadowZBounds && (u.z < h && (h = u.z), u.z > d && (d = u.z)); } this.autoCalcShadowZBounds && (this._shadowMinZ = h, this._shadowMaxZ = d); } const s = this._orthoRight - this._orthoLeft, n = this._orthoTop - this._orthoBottom, a = this.shadowMinZ !== void 0 ? this.shadowMinZ : r.minZ, l = this.shadowMaxZ !== void 0 ? this.shadowMaxZ : r.maxZ, o = this.getScene().getEngine().useReverseDepthBuffer; Ae.OrthoOffCenterLHToRef(this._orthoLeft - s * this.shadowOrthoScale, this._orthoRight + s * this.shadowOrthoScale, this._orthoBottom - n * this.shadowOrthoScale, this._orthoTop + n * this.shadowOrthoScale, o ? l : a, o ? a : l, e, this.getScene().getEngine().isNDCHalfZRange); } _buildUniformLayout() { this._uniformBuffer.addUniform("vLightData", 4), this._uniformBuffer.addUniform("vLightDiffuse", 4), this._uniformBuffer.addUniform("vLightSpecular", 4), this._uniformBuffer.addUniform("shadowsInfo", 3), this._uniformBuffer.addUniform("depthValues", 2), this._uniformBuffer.create(); } /** * Sets the passed Effect object with the DirectionalLight transformed position (or position if not parented) and the passed name. * @param effect The effect to update * @param lightIndex The index of the light in the effect to update * @returns The directional light */ transferToEffect(e, t) { return this.computeTransformedInformation() ? (this._uniformBuffer.updateFloat4("vLightData", this.transformedDirection.x, this.transformedDirection.y, this.transformedDirection.z, 1, t), this) : (this._uniformBuffer.updateFloat4("vLightData", this.direction.x, this.direction.y, this.direction.z, 1, t), this); } transferToNodeMaterialEffect(e, t) { return this.computeTransformedInformation() ? (e.setFloat3(t, this.transformedDirection.x, this.transformedDirection.y, this.transformedDirection.z), this) : (e.setFloat3(t, this.direction.x, this.direction.y, this.direction.z), this); } /** * Gets the minZ used for shadow according to both the scene and the light. * * Values are fixed on directional lights as it relies on an ortho projection hence the need to convert being * -1 and 1 to 0 and 1 doing (depth + min) / (min + max) -> (depth + 1) / (1 + 1) -> (depth * 0.5) + 0.5. * (when not using reverse depth buffer / NDC half Z range) * @param activeCamera The camera we are returning the min for * @returns the depth min z */ // eslint-disable-next-line @typescript-eslint/no-unused-vars getDepthMinZ(e) { const t = this._scene.getEngine(); return !t.useReverseDepthBuffer && t.isNDCHalfZRange ? 0 : 1; } /** * Gets the maxZ used for shadow according to both the scene and the light. * * Values are fixed on directional lights as it relies on an ortho projection hence the need to convert being * -1 and 1 to 0 and 1 doing (depth + min) / (min + max) -> (depth + 1) / (1 + 1) -> (depth * 0.5) + 0.5. * (when not using reverse depth buffer / NDC half Z range) * @param activeCamera The camera we are returning the max for * @returns the depth max z */ // eslint-disable-next-line @typescript-eslint/no-unused-vars getDepthMaxZ(e) { const t = this._scene.getEngine(); return t.useReverseDepthBuffer && t.isNDCHalfZRange ? 0 : 1; } /** * Prepares the list of defines specific to the light type. * @param defines the list of defines * @param lightIndex defines the index of the light for the effect */ prepareLightSpecificDefines(e, t) { e["DIRLIGHT" + t] = !0; } } F([ W() ], Pd.prototype, "shadowFrustumSize", null); F([ W() ], Pd.prototype, "shadowOrthoScale", null); F([ W() ], Pd.prototype, "autoUpdateExtends", void 0); F([ W() ], Pd.prototype, "autoCalcShadowZBounds", void 0); F([ W("orthoLeft") ], Pd.prototype, "_orthoLeft", void 0); F([ W("orthoRight") ], Pd.prototype, "_orthoRight", void 0); F([ W("orthoTop") ], Pd.prototype, "_orthoTop", void 0); F([ W("orthoBottom") ], Pd.prototype, "_orthoBottom", void 0); function cL(c, e = {}, t) { e.diameter || (e.diameter = 1), e.segments || (e.segments = 16); const i = Rd("", { slice: 0.5, diameter: e.diameter, segments: e.segments }, t), r = Cw("", { radius: e.diameter / 2, tessellation: e.segments * 3 + (4 - e.segments) }, t); r.rotation.x = -Math.PI / 2, r.parent = i; const s = ke.MergeMeshes([r, i], !0); return s.name = c, s; } const Jpe = { // eslint-disable-next-line @typescript-eslint/naming-convention CreateHemisphere: cL }; ke.CreateHemisphere = (c, e, t, i) => cL(c, { segments: e, diameter: t }, i); In.AddNodeConstructor("Light_Type_2", (c, e) => () => new td(c, D.Zero(), D.Zero(), 0, 0, e)); class td extends b5 { /** * Gets the cone angle of the spot light in Radians. */ get angle() { return this._angle; } /** * Sets the cone angle of the spot light in Radians. */ set angle(e) { this._angle = e, this._cosHalfAngle = Math.cos(e * 0.5), this._projectionTextureProjectionLightDirty = !0, this.forceProjectionMatrixCompute(), this._computeAngleValues(); } /** * Only used in gltf falloff mode, this defines the angle where * the directional falloff will start before cutting at angle which could be seen * as outer angle. */ get innerAngle() { return this._innerAngle; } /** * Only used in gltf falloff mode, this defines the angle where * the directional falloff will start before cutting at angle which could be seen * as outer angle. */ set innerAngle(e) { this._innerAngle = e, this._computeAngleValues(); } /** * Allows scaling the angle of the light for shadow generation only. */ get shadowAngleScale() { return this._shadowAngleScale; } /** * Allows scaling the angle of the light for shadow generation only. */ set shadowAngleScale(e) { this._shadowAngleScale = e, this.forceProjectionMatrixCompute(); } /** * Allows reading the projection texture */ get projectionTextureMatrix() { return this._projectionTextureMatrix; } /** * Gets the near clip of the Spotlight for texture projection. */ get projectionTextureLightNear() { return this._projectionTextureLightNear; } /** * Sets the near clip of the Spotlight for texture projection. */ set projectionTextureLightNear(e) { this._projectionTextureLightNear = e, this._projectionTextureProjectionLightDirty = !0; } /** * Gets the far clip of the Spotlight for texture projection. */ get projectionTextureLightFar() { return this._projectionTextureLightFar; } /** * Sets the far clip of the Spotlight for texture projection. */ set projectionTextureLightFar(e) { this._projectionTextureLightFar = e, this._projectionTextureProjectionLightDirty = !0; } /** * Gets the Up vector of the Spotlight for texture projection. */ get projectionTextureUpDirection() { return this._projectionTextureUpDirection; } /** * Sets the Up vector of the Spotlight for texture projection. */ set projectionTextureUpDirection(e) { this._projectionTextureUpDirection = e, this._projectionTextureProjectionLightDirty = !0; } /** * Gets the projection texture of the light. */ get projectionTexture() { return this._projectionTexture; } /** * Sets the projection texture of the light. */ set projectionTexture(e) { this._projectionTexture !== e && (this._projectionTexture = e, this._projectionTextureDirty = !0, this._projectionTexture && !this._projectionTexture.isReady() && (td._IsProceduralTexture(this._projectionTexture) ? this._projectionTexture.getEffect().executeWhenCompiled(() => { this._markMeshesAsLightDirty(); }) : td._IsTexture(this._projectionTexture) && this._projectionTexture.onLoadObservable.addOnce(() => { this._markMeshesAsLightDirty(); }))); } static _IsProceduralTexture(e) { return e.onGeneratedObservable !== void 0; } static _IsTexture(e) { return e.onLoadObservable !== void 0; } /** * Gets or sets the light projection matrix as used by the projection texture */ get projectionTextureProjectionLightMatrix() { return this._projectionTextureProjectionLightMatrix; } set projectionTextureProjectionLightMatrix(e) { this._projectionTextureProjectionLightMatrix = e, this._projectionTextureProjectionLightDirty = !1, this._projectionTextureDirty = !0; } /** * Creates a SpotLight object in the scene. A spot light is a simply light oriented cone. * It can cast shadows. * Documentation : https://doc.babylonjs.com/features/featuresDeepDive/lights/lights_introduction * @param name The light friendly name * @param position The position of the spot light in the scene * @param direction The direction of the light in the scene * @param angle The cone angle of the light in Radians * @param exponent The light decay speed with the distance from the emission spot * @param scene The scene the lights belongs to */ constructor(e, t, i, r, s, n) { super(e, n), this._innerAngle = 0, this._projectionTextureMatrix = Ae.Zero(), this._projectionTextureLightNear = 1e-6, this._projectionTextureLightFar = 1e3, this._projectionTextureUpDirection = D.Up(), this._projectionTextureViewLightDirty = !0, this._projectionTextureProjectionLightDirty = !0, this._projectionTextureDirty = !0, this._projectionTextureViewTargetVector = D.Zero(), this._projectionTextureViewLightMatrix = Ae.Zero(), this._projectionTextureProjectionLightMatrix = Ae.Zero(), this._projectionTextureScalingMatrix = Ae.FromValues(0.5, 0, 0, 0, 0, 0.5, 0, 0, 0, 0, 0.5, 0, 0.5, 0.5, 0.5, 1), this.position = t, this.direction = i, this.angle = r, this.exponent = s; } /** * Returns the string "SpotLight". * @returns the class name */ getClassName() { return "SpotLight"; } /** * Returns the integer 2. * @returns The light Type id as a constant defines in Light.LIGHTTYPEID_x */ getTypeID() { return hs.LIGHTTYPEID_SPOTLIGHT; } /** * Overrides the direction setter to recompute the projection texture view light Matrix. * @param value */ _setDirection(e) { super._setDirection(e), this._projectionTextureViewLightDirty = !0; } /** * Overrides the position setter to recompute the projection texture view light Matrix. * @param value */ _setPosition(e) { super._setPosition(e), this._projectionTextureViewLightDirty = !0; } /** * Sets the passed matrix "matrix" as perspective projection matrix for the shadows and the passed view matrix with the fov equal to the SpotLight angle and and aspect ratio of 1.0. * Returns the SpotLight. * @param matrix * @param viewMatrix * @param renderList */ // eslint-disable-next-line @typescript-eslint/no-unused-vars _setDefaultShadowProjectionMatrix(e, t, i) { const r = this.getScene().activeCamera; if (!r) return; this._shadowAngleScale = this._shadowAngleScale || 1; const s = this._shadowAngleScale * this._angle, n = this.shadowMinZ !== void 0 ? this.shadowMinZ : r.minZ, a = this.shadowMaxZ !== void 0 ? this.shadowMaxZ : r.maxZ, l = this.getScene().getEngine().useReverseDepthBuffer; Ae.PerspectiveFovLHToRef(s, 1, l ? a : n, l ? n : a, e, !0, this._scene.getEngine().isNDCHalfZRange, void 0, l); } _computeProjectionTextureViewLightMatrix() { this._projectionTextureViewLightDirty = !1, this._projectionTextureDirty = !0, this.getAbsolutePosition().addToRef(this.direction, this._projectionTextureViewTargetVector), Ae.LookAtLHToRef(this.getAbsolutePosition(), this._projectionTextureViewTargetVector, this._projectionTextureUpDirection, this._projectionTextureViewLightMatrix); } _computeProjectionTextureProjectionLightMatrix() { this._projectionTextureProjectionLightDirty = !1, this._projectionTextureDirty = !0; const e = this.projectionTextureLightFar, t = this.projectionTextureLightNear, i = e / (e - t), r = -i * t, s = 1 / Math.tan(this._angle / 2), n = 1; Ae.FromValuesToRef(s / n, 0, 0, 0, 0, s, 0, 0, 0, 0, i, 1, 0, 0, r, 0, this._projectionTextureProjectionLightMatrix); } /** * Main function for light texture projection matrix computing. */ _computeProjectionTextureMatrix() { if (this._projectionTextureDirty = !1, this._projectionTextureViewLightMatrix.multiplyToRef(this._projectionTextureProjectionLightMatrix, this._projectionTextureMatrix), this._projectionTexture instanceof De) { const e = this._projectionTexture.uScale / 2, t = this._projectionTexture.vScale / 2; Ae.FromValuesToRef(e, 0, 0, 0, 0, t, 0, 0, 0, 0, 0.5, 0, 0.5, 0.5, 0.5, 1, this._projectionTextureScalingMatrix); } this._projectionTextureMatrix.multiplyToRef(this._projectionTextureScalingMatrix, this._projectionTextureMatrix); } _buildUniformLayout() { this._uniformBuffer.addUniform("vLightData", 4), this._uniformBuffer.addUniform("vLightDiffuse", 4), this._uniformBuffer.addUniform("vLightSpecular", 4), this._uniformBuffer.addUniform("vLightDirection", 3), this._uniformBuffer.addUniform("vLightFalloff", 4), this._uniformBuffer.addUniform("shadowsInfo", 3), this._uniformBuffer.addUniform("depthValues", 2), this._uniformBuffer.create(); } _computeAngleValues() { this._lightAngleScale = 1 / Math.max(1e-3, Math.cos(this._innerAngle * 0.5) - this._cosHalfAngle), this._lightAngleOffset = -this._cosHalfAngle * this._lightAngleScale; } /** * Sets the passed Effect "effect" with the Light textures. * @param effect The effect to update * @param lightIndex The index of the light in the effect to update * @returns The light */ transferTexturesToEffect(e, t) { return this.projectionTexture && this.projectionTexture.isReady() && (this._projectionTextureViewLightDirty && this._computeProjectionTextureViewLightMatrix(), this._projectionTextureProjectionLightDirty && this._computeProjectionTextureProjectionLightMatrix(), this._projectionTextureDirty && this._computeProjectionTextureMatrix(), e.setMatrix("textureProjectionMatrix" + t, this._projectionTextureMatrix), e.setTexture("projectionLightSampler" + t, this.projectionTexture)), this; } /** * Sets the passed Effect object with the SpotLight transformed position (or position if not parented) and normalized direction. * @param effect The effect to update * @param lightIndex The index of the light in the effect to update * @returns The spot light */ transferToEffect(e, t) { let i; return this.computeTransformedInformation() ? (this._uniformBuffer.updateFloat4("vLightData", this.transformedPosition.x, this.transformedPosition.y, this.transformedPosition.z, this.exponent, t), i = D.Normalize(this.transformedDirection)) : (this._uniformBuffer.updateFloat4("vLightData", this.position.x, this.position.y, this.position.z, this.exponent, t), i = D.Normalize(this.direction)), this._uniformBuffer.updateFloat4("vLightDirection", i.x, i.y, i.z, this._cosHalfAngle, t), this._uniformBuffer.updateFloat4("vLightFalloff", this.range, this._inverseSquaredRange, this._lightAngleScale, this._lightAngleOffset, t), this; } transferToNodeMaterialEffect(e, t) { let i; return this.computeTransformedInformation() ? i = D.Normalize(this.transformedDirection) : i = D.Normalize(this.direction), this.getScene().useRightHandedSystem ? e.setFloat3(t, -i.x, -i.y, -i.z) : e.setFloat3(t, i.x, i.y, i.z), this; } /** * Disposes the light and the associated resources. */ dispose() { super.dispose(), this._projectionTexture && this._projectionTexture.dispose(); } /** * Gets the minZ used for shadow according to both the scene and the light. * @param activeCamera The camera we are returning the min for * @returns the depth min z */ getDepthMinZ(e) { const t = this._scene.getEngine(), i = this.shadowMinZ !== void 0 ? this.shadowMinZ : e.minZ; return t.useReverseDepthBuffer && t.isNDCHalfZRange ? i : this._scene.getEngine().isNDCHalfZRange ? 0 : i; } /** * Gets the maxZ used for shadow according to both the scene and the light. * @param activeCamera The camera we are returning the max for * @returns the depth max z */ getDepthMaxZ(e) { const t = this._scene.getEngine(), i = this.shadowMaxZ !== void 0 ? this.shadowMaxZ : e.maxZ; return t.useReverseDepthBuffer && t.isNDCHalfZRange ? 0 : i; } /** * Prepares the list of defines specific to the light type. * @param defines the list of defines * @param lightIndex defines the index of the light for the effect */ prepareLightSpecificDefines(e, t) { e["SPOTLIGHT" + t] = !0, e["PROJECTEDLIGHTTEXTURE" + t] = !!(this.projectionTexture && this.projectionTexture.isReady()); } } F([ W() ], td.prototype, "angle", null); F([ W() ], td.prototype, "innerAngle", null); F([ W() ], td.prototype, "shadowAngleScale", null); F([ W() ], td.prototype, "exponent", void 0); F([ W() ], td.prototype, "projectionTextureLightNear", null); F([ W() ], td.prototype, "projectionTextureLightFar", null); F([ W() ], td.prototype, "projectionTextureUpDirection", null); F([ er("projectedLightTexture") ], td.prototype, "_projectionTexture", void 0); class E4 extends Do { /** * Creates a LightGizmo * @param gizmoLayer The utility layer the gizmo will be added to */ constructor(e = bn.DefaultUtilityLayer) { super(e), this._cachedPosition = new D(), this._cachedForward = new D(0, 0, 1), this._pointerObserver = null, this.onClickedObservable = new Fe(), this._light = null, this.attachedMesh = new xr("", this.gizmoLayer.utilityLayerScene), this._attachedMeshParent = new xi("parent", this.gizmoLayer.utilityLayerScene), this.attachedMesh.parent = this._attachedMeshParent, this._material = new Dt("light", this.gizmoLayer.utilityLayerScene), this._material.diffuseColor = new ze(0.5, 0.5, 0.5), this._material.specularColor = new ze(0.1, 0.1, 0.1), this._pointerObserver = e.utilityLayerScene.onPointerObservable.add((t) => { this._light && (this._isHovered = !!(t.pickInfo && this._rootMesh.getChildMeshes().indexOf(t.pickInfo.pickedMesh) != -1), this._isHovered && t.event.button === 0 && this.onClickedObservable.notifyObservers(this._light)); }, si.POINTERDOWN); } /** * Override attachedNode because lightgizmo only support attached mesh * It will return the attached mesh (if any) and setting an attached node will log * a warning */ get attachedNode() { return this.attachedMesh; } set attachedNode(e) { Ce.Warn("Nodes cannot be attached to LightGizmo. Attach to a mesh instead."); } /** * The light that the gizmo is attached to */ set light(e) { if (this._light = e, e) { this._lightMesh && this._lightMesh.dispose(), e instanceof vg ? this._lightMesh = E4._CreateHemisphericLightMesh(this.gizmoLayer.utilityLayerScene) : e instanceof Pd ? this._lightMesh = E4._CreateDirectionalLightMesh(this.gizmoLayer.utilityLayerScene) : e instanceof td ? this._lightMesh = E4._CreateSpotLightMesh(this.gizmoLayer.utilityLayerScene) : this._lightMesh = E4._CreatePointLightMesh(this.gizmoLayer.utilityLayerScene), this._lightMesh.getChildMeshes(!1).forEach((i) => { i.material = this._material; }), this._lightMesh.parent = this._rootMesh; const t = this.gizmoLayer._getSharedGizmoLight(); if (t.includedOnlyMeshes = t.includedOnlyMeshes.concat(this._lightMesh.getChildMeshes(!1)), this._lightMesh.rotationQuaternion = new Ze(), this.attachedMesh.reservedDataStore || (this.attachedMesh.reservedDataStore = {}), this.attachedMesh.reservedDataStore.lightGizmo = this, e.parent && this._attachedMeshParent.freezeWorldMatrix(e.parent.getWorldMatrix()), e.position && (this.attachedMesh.position.copyFrom(e.position), this.attachedMesh.computeWorldMatrix(!0), this._cachedPosition.copyFrom(this.attachedMesh.position)), e.direction) { this.attachedMesh.setDirection(e.direction), this.attachedMesh.computeWorldMatrix(!0); const i = this._getMeshForward(); this._cachedForward.copyFrom(i); } this._update(); } } get light() { return this._light; } /** * Gets the material used to render the light gizmo */ get material() { return this._material; } /** * @internal * returns mesh forward */ _getMeshForward() { let e = this.attachedMesh.forward; return this.attachedMesh.getScene().useRightHandedSystem && (e.negateToRef(de.Vector3[0]), e = de.Vector3[0]), e; } /** * @internal * Updates the gizmo to match the attached mesh's position/rotation */ _update() { if (super._update(), !!this._light) { if (this._light.parent && this._attachedMeshParent.freezeWorldMatrix(this._light.parent.getWorldMatrix()), this._light.position) if (this.attachedMesh.position.equals(this._cachedPosition)) this.attachedMesh.position.copyFrom(this._light.position), this.attachedMesh.computeWorldMatrix(!0), this._cachedPosition.copyFrom(this.attachedMesh.position); else { const e = this.attachedMesh.position; this._light.position = new D(e.x, e.y, e.z), this._cachedPosition.copyFrom(this.attachedMesh.position); } if (this._light.direction) { const e = this._getMeshForward(); if (D.DistanceSquared(e, this._cachedForward) > 1e-4) { const t = e; this._light.direction = new D(t.x, t.y, t.z), this._cachedForward.copyFrom(e); } else D.DistanceSquared(e, this._light.direction) > 1e-4 && (this.attachedMesh.setDirection(this._light.direction), this.attachedMesh.computeWorldMatrix(!0), this._cachedForward.copyFrom(e)); } } } /** * Disposes of the light gizmo */ dispose() { this.onClickedObservable.clear(), this.gizmoLayer.utilityLayerScene.onPointerObservable.remove(this._pointerObserver), this._material.dispose(), super.dispose(), this._attachedMeshParent.dispose(); } static _CreateHemisphericLightMesh(e) { const t = new ke("hemisphereLight", e), i = cL(t.name, { segments: 10, diameter: 1 }, e); i.position.z = -0.15, i.rotation.x = Math.PI / 2, i.parent = t; const r = this._CreateLightLines(3, e); return r.parent = t, t.scaling.scaleInPlace(E4._Scale), t.rotation.x = Math.PI / 2, t; } static _CreatePointLightMesh(e) { const t = new ke("pointLight", e), i = Rd(t.name, { segments: 10, diameter: 1 }, e); i.rotation.x = Math.PI / 2, i.parent = t; const r = this._CreateLightLines(5, e); return r.parent = t, t.scaling.scaleInPlace(E4._Scale), t.rotation.x = Math.PI / 2, t; } static _CreateSpotLightMesh(e) { const t = new ke("spotLight", e), i = Rd(t.name, { segments: 10, diameter: 1 }, e); i.parent = t; const r = cL(t.name, { segments: 10, diameter: 2 }, e); r.parent = t, r.rotation.x = -Math.PI / 2; const s = this._CreateLightLines(2, e); return s.parent = t, t.scaling.scaleInPlace(E4._Scale), t.rotation.x = Math.PI / 2, t; } static _CreateDirectionalLightMesh(e) { const t = new ke("directionalLight", e), i = new ke(t.name, e); i.parent = t; const r = Rd(t.name, { diameter: 1.2, segments: 10 }, e); r.parent = i; const s = Hf(t.name, { updatable: !1, height: 6, diameterTop: 0.3, diameterBottom: 0.3, tessellation: 6, subdivisions: 1 }, e); s.parent = i; let n = s.clone(t.name); n.scaling.y = 0.5, n.position.x += 1.25; let a = s.clone(t.name); a.scaling.y = 0.5, a.position.x += -1.25; const l = Hf(t.name, { updatable: !1, height: 1, diameterTop: 0, diameterBottom: 0.6, tessellation: 6, subdivisions: 1 }, e); return l.position.y += 3, l.parent = i, n = l.clone(t.name), n.position.y = 1.5, n.position.x += 1.25, a = l.clone(t.name), a.position.y = 1.5, a.position.x += -1.25, i.scaling.scaleInPlace(E4._Scale), i.rotation.z = Math.PI / 2, i.rotation.y = Math.PI / 2, t; } } E4._Scale = 7e-3; E4._CreateLightLines = (c, e) => { const i = new ke("root", e); i.rotation.x = Math.PI / 2; const r = new ke("linePivot", e); r.parent = i; const s = Hf("line", { updatable: !1, height: 2, diameterTop: 0.2, diameterBottom: 0.3, tessellation: 6, subdivisions: 1 }, e); if (s.position.y = s.scaling.y / 2 + 1.2, s.parent = r, c < 2) return r; for (let a = 0; a < 4; a++) { const l = r.clone("lineParentClone"); l.rotation.z = Math.PI / 4, l.rotation.y = Math.PI / 2 + Math.PI / 2 * a, l.getChildMeshes()[0].scaling.y = 0.5, l.getChildMeshes()[0].scaling.x = l.getChildMeshes()[0].scaling.z = 0.8, l.getChildMeshes()[0].position.y = l.getChildMeshes()[0].scaling.y / 2 + 1.2; } if (c < 3) return i; for (let a = 0; a < 4; a++) { const l = r.clone("linePivotClone"); l.rotation.z = Math.PI / 2, l.rotation.y = Math.PI / 2 * a; } if (c < 4) return i; for (let a = 0; a < 4; a++) { const l = r.clone("linePivotClone"); l.rotation.z = Math.PI + Math.PI / 4, l.rotation.y = Math.PI / 2 + Math.PI / 2 * a, l.getChildMeshes()[0].scaling.y = 0.5, l.getChildMeshes()[0].scaling.x = l.getChildMeshes()[0].scaling.z = 0.8, l.getChildMeshes()[0].position.y = l.getChildMeshes()[0].scaling.y / 2 + 1.2; } if (c < 5) return i; const n = r.clone("linePivotClone"); return n.rotation.z = Math.PI, i; }; class fO extends Do { /** * Creates a CameraGizmo * @param gizmoLayer The utility layer the gizmo will be added to */ constructor(e = bn.DefaultUtilityLayer, t) { super(e), this._pointerObserver = null, this.onClickedObservable = new Fe(), this._camera = null, this._invProjection = new Ae(), this._material = new Dt("cameraGizmoMaterial", this.gizmoLayer.utilityLayerScene), this._gizmoColor = t, this._material.diffuseColor = t ?? new ze(0.5, 0.5, 0.5), this._material.specularColor = new ze(0.1, 0.1, 0.1), this._pointerObserver = e.utilityLayerScene.onPointerObservable.add((i) => { this._camera && (this._isHovered = !!(i.pickInfo && this._rootMesh.getChildMeshes().indexOf(i.pickInfo.pickedMesh) != -1), this._isHovered && i.event.button === 0 && this.onClickedObservable.notifyObservers(this._camera)); }, si.POINTERDOWN); } /** Gets or sets a boolean indicating if frustum lines must be rendered (true by default)) */ get displayFrustum() { return this._cameraLinesMesh.isEnabled(); } set displayFrustum(e) { this._cameraLinesMesh.setEnabled(e); } /** * The camera that the gizmo is attached to */ set camera(e) { var t, i; if (this._camera = e, this.attachedNode = e, e) { this._cameraMesh && this._cameraMesh.dispose(), this._cameraLinesMesh && this._cameraLinesMesh.dispose(), this._cameraMesh = fO._CreateCameraMesh(this.gizmoLayer.utilityLayerScene); const r = (i = (t = this._gizmoColor) === null || t === void 0 ? void 0 : t.toColor4(1)) !== null && i !== void 0 ? i : new Et(1, 1, 1, 1); this._cameraLinesMesh = fO._CreateCameraFrustum(this.gizmoLayer.utilityLayerScene, r), this._cameraMesh.getChildMeshes(!1).forEach((n) => { n.material = this._material; }), this._cameraMesh.parent = this._rootMesh, this._cameraLinesMesh.parent = this._rootMesh, this.gizmoLayer.utilityLayerScene.activeCamera && this.gizmoLayer.utilityLayerScene.activeCamera.maxZ < e.maxZ * 1.5 && (this.gizmoLayer.utilityLayerScene.activeCamera.maxZ = e.maxZ * 1.5), this.attachedNode.reservedDataStore || (this.attachedNode.reservedDataStore = {}), this.attachedNode.reservedDataStore.cameraGizmo = this; const s = this.gizmoLayer._getSharedGizmoLight(); s.includedOnlyMeshes = s.includedOnlyMeshes.concat(this._cameraMesh.getChildMeshes(!1)), this._update(); } } get camera() { return this._camera; } /** * Gets the material used to render the camera gizmo */ get material() { return this._material; } /** * @internal * Updates the gizmo to match the attached mesh's position/rotation */ _update() { super._update(), this._camera && (this._camera.getProjectionMatrix().invertToRef(this._invProjection), this._cameraLinesMesh.setPivotMatrix(this._invProjection, !1), this._cameraLinesMesh.scaling.x = 1 / this._rootMesh.scaling.x, this._cameraLinesMesh.scaling.y = 1 / this._rootMesh.scaling.y, this._cameraLinesMesh.scaling.z = 1 / this._rootMesh.scaling.z, this._cameraMesh.parent = null, this._cameraMesh.rotation.y = Math.PI * 0.5 * (this._camera.getScene().useRightHandedSystem ? 1 : -1), this._cameraMesh.parent = this._rootMesh); } /** * Disposes of the camera gizmo */ dispose() { this.onClickedObservable.clear(), this.gizmoLayer.utilityLayerScene.onPointerObservable.remove(this._pointerObserver), this._cameraMesh && this._cameraMesh.dispose(), this._cameraLinesMesh && this._cameraLinesMesh.dispose(), this._material.dispose(), super.dispose(); } static _CreateCameraMesh(e) { const t = new ke("rootCameraGizmo", e), i = new ke(t.name, e); i.parent = t; const r = B4(t.name, { width: 1, height: 0.8, depth: 0.5 }, e); r.parent = i; const s = Hf(t.name, { height: 0.5, diameterTop: 0.8, diameterBottom: 0.8 }, e); s.parent = i, s.position.y = 0.3, s.position.x = -0.6, s.rotation.x = Math.PI * 0.5; const n = Hf(t.name, { height: 0.5, diameterTop: 0.6, diameterBottom: 0.6 }, e); n.parent = i, n.position.y = 0.5, n.position.x = 0.4, n.rotation.x = Math.PI * 0.5; const a = Hf(t.name, { height: 0.5, diameterTop: 0.5, diameterBottom: 0.5 }, e); return a.parent = i, a.position.y = 0, a.position.x = 0.6, a.rotation.z = Math.PI * 0.5, t.scaling.scaleInPlace(fO._Scale), i.position.x = -0.9, t; } static _CreateCameraFrustum(e, t) { const i = new ke("rootCameraGizmo", e), r = new ke(i.name, e); r.parent = i; for (let s = 0; s < 4; s += 2) for (let n = 0; n < 4; n += 2) { let a = Ba("lines", { points: [new D(-1 + n, -1 + s, -1), new D(-1 + n, -1 + s, 1)], colors: [t, t] }, e); a.parent = r, a.alwaysSelectAsActiveMesh = !0, a.isPickable = !1, a = Ba("lines", { points: [new D(-1, -1 + n, -1 + s), new D(1, -1 + n, -1 + s)], colors: [t, t] }, e), a.parent = r, a.alwaysSelectAsActiveMesh = !0, a.isPickable = !1, a = Ba("lines", { points: [new D(-1 + n, -1, -1 + s), new D(-1 + n, 1, -1 + s)], colors: [t, t] }, e), a.parent = r, a.alwaysSelectAsActiveMesh = !0, a.isPickable = !1; } return i; } } fO._Scale = 0.05; const e1e = "kernelBlurVaryingDeclaration", t1e = "varying vec2 sampleCoord{X};"; je.IncludesShadersStore[e1e] = t1e; const i1e = "packingFunctions", r1e = `vec4 pack(float depth) {const vec4 bit_shift=vec4(255.0*255.0*255.0,255.0*255.0,255.0,1.0);const vec4 bit_mask=vec4(0.0,1.0/255.0,1.0/255.0,1.0/255.0);vec4 res=fract(depth*bit_shift);res-=res.xxyz*bit_mask;return res;} float unpack(vec4 color) {const vec4 bit_shift=vec4(1.0/(255.0*255.0*255.0),1.0/(255.0*255.0),1.0/255.0,1.0);return dot(color,bit_shift);}`; je.IncludesShadersStore[i1e] = r1e; const s1e = "kernelBlurFragment", n1e = `#ifdef DOF factor=sampleCoC(sampleCoord{X}); computedWeight=KERNEL_WEIGHT{X}*factor;sumOfWeights+=computedWeight; #else computedWeight=KERNEL_WEIGHT{X}; #endif #ifdef PACKEDFLOAT blend+=unpack(texture2D(textureSampler,sampleCoord{X}))*computedWeight; #else blend+=texture2D(textureSampler,sampleCoord{X})*computedWeight; #endif `; je.IncludesShadersStore[s1e] = n1e; const a1e = "kernelBlurFragment2", o1e = `#ifdef DOF factor=sampleCoC(sampleCenter+delta*KERNEL_DEP_OFFSET{X});computedWeight=KERNEL_DEP_WEIGHT{X}*factor;sumOfWeights+=computedWeight; #else computedWeight=KERNEL_DEP_WEIGHT{X}; #endif #ifdef PACKEDFLOAT blend+=unpack(texture2D(textureSampler,sampleCenter+delta*KERNEL_DEP_OFFSET{X}))*computedWeight; #else blend+=texture2D(textureSampler,sampleCenter+delta*KERNEL_DEP_OFFSET{X})*computedWeight; #endif `; je.IncludesShadersStore[a1e] = o1e; const l1e = "kernelBlurPixelShader", c1e = `uniform sampler2D textureSampler;uniform vec2 delta;varying vec2 sampleCenter; #ifdef DOF uniform sampler2D circleOfConfusionSampler;float sampleCoC(in vec2 offset) {float coc=texture2D(circleOfConfusionSampler,offset).r;return coc; } #endif #include[0..varyingCount] #ifdef PACKEDFLOAT #include #endif #define CUSTOM_FRAGMENT_DEFINITIONS void main(void) {float computedWeight=0.0; #ifdef PACKEDFLOAT float blend=0.; #else vec4 blend=vec4(0.); #endif #ifdef DOF float sumOfWeights=CENTER_WEIGHT; float factor=0.0; #ifdef PACKEDFLOAT blend+=unpack(texture2D(textureSampler,sampleCenter))*CENTER_WEIGHT; #else blend+=texture2D(textureSampler,sampleCenter)*CENTER_WEIGHT; #endif #endif #include[0..varyingCount] #include[0..depCount] #ifdef PACKEDFLOAT gl_FragColor=pack(blend); #else gl_FragColor=blend; #endif #ifdef DOF gl_FragColor/=sumOfWeights; #endif }`; je.ShadersStore[l1e] = c1e; const u1e = "kernelBlurVertex", h1e = "sampleCoord{X}=sampleCenter+delta*KERNEL_OFFSET{X};"; je.IncludesShadersStore[u1e] = h1e; const d1e = "kernelBlurVertexShader", f1e = `attribute vec2 position;uniform vec2 delta;varying vec2 sampleCenter; #include[0..varyingCount] const vec2 madd=vec2(0.5,0.5); #define CUSTOM_VERTEX_DEFINITIONS void main(void) { #define CUSTOM_VERTEX_MAIN_BEGIN sampleCenter=(position*madd+madd); #include[0..varyingCount] gl_Position=vec4(position,0.0,1.0); #define CUSTOM_VERTEX_MAIN_END }`; je.ShadersStore[d1e] = f1e; class fu extends Bi { /** * Sets the length in pixels of the blur sample region */ set kernel(e) { this._idealKernel !== e && (e = Math.max(e, 1), this._idealKernel = e, this._kernel = this._nearestBestKernel(e), this._blockCompilation || this._updateParameters()); } /** * Gets the length in pixels of the blur sample region */ get kernel() { return this._idealKernel; } /** * Sets whether or not the blur needs to unpack/repack floats */ set packedFloat(e) { this._packedFloat !== e && (this._packedFloat = e, this._blockCompilation || this._updateParameters()); } /** * Gets whether or not the blur is unpacking/repacking floats */ get packedFloat() { return this._packedFloat; } /** * Gets a string identifying the name of the class * @returns "BlurPostProcess" string */ getClassName() { return "BlurPostProcess"; } /** * Creates a new instance BlurPostProcess * @param name The name of the effect. * @param direction The direction in which to blur the image. * @param kernel The size of the kernel to be used when computing the blur. eg. Size of 3 will blur the center pixel by 2 pixels surrounding it. * @param options The required width/height ratio to downsize to before computing the render pass. (Use 1.0 for full size) * @param camera The camera to apply the render pass to. * @param samplingMode The sampling mode to be used when computing the pass. (default: 0) * @param engine The engine which the post process will be applied. (default: current engine) * @param reusable If the post process can be reused on the same frame. (default: false) * @param textureType Type of textures used when performing the post process. (default: 0) * @param defines * @param _blockCompilation If compilation of the shader should not be done in the constructor. The updateEffect method can be used to compile the shader at a later time. (default: false) * @param textureFormat Format of textures used when performing the post process. (default: TEXTUREFORMAT_RGBA) */ constructor(e, t, i, r, s, n = De.BILINEAR_SAMPLINGMODE, a, l, o = 0, u = "", h = !1, d = 5) { super(e, "kernelBlur", ["delta", "direction"], ["circleOfConfusionSampler"], r, s, n, a, l, null, o, "kernelBlur", { varyingCount: 0, depCount: 0 }, !0, d), this._blockCompilation = h, this._packedFloat = !1, this._staticDefines = "", this._staticDefines = u, this.direction = t, this.onApplyObservable.add((f) => { this._outputTexture ? f.setFloat2("delta", 1 / this._outputTexture.width * this.direction.x, 1 / this._outputTexture.height * this.direction.y) : f.setFloat2("delta", 1 / this.width * this.direction.x, 1 / this.height * this.direction.y); }), this.kernel = i; } /** * Updates the effect with the current post process compile time values and recompiles the shader. * @param defines Define statements that should be added at the beginning of the shader. (default: null) * @param uniforms Set of uniform variables that will be passed to the shader. (default: null) * @param samplers Set of Texture2D variables that will be passed to the shader. (default: null) * @param indexParameters The index parameters to be used for babylons include syntax "#include[0..varyingCount]". (default: undefined) See usage in babylon.blurPostProcess.ts and kernelBlur.vertex.fx * @param onCompiled Called when the shader has been compiled. * @param onError Called if there is an error when compiling a shader. */ updateEffect(e = null, t = null, i = null, r, s, n) { this._updateParameters(s, n); } _updateParameters(e, t) { const i = this._kernel, r = (i - 1) / 2; let s = [], n = [], a = 0; for (let _ = 0; _ < i; _++) { const v = _ / (i - 1), C = this._gaussianWeight(v * 2 - 1); s[_] = _ - r, n[_] = C, a += C; } for (let _ = 0; _ < n.length; _++) n[_] /= a; const l = [], o = [], u = []; for (let _ = 0; _ <= r; _ += 2) { const v = Math.min(_ + 1, Math.floor(r)); if (_ === v) u.push({ o: s[_], w: n[_] }); else { const x = v === r, b = n[_] + n[v] * (x ? 0.5 : 1), S = s[_] + 1 / (1 + n[_] / n[v]); S === 0 ? (u.push({ o: s[_], w: n[_] }), u.push({ o: s[_ + 1], w: n[_ + 1] })) : (u.push({ o: S, w: b }), u.push({ o: -S, w: b })); } } for (let _ = 0; _ < u.length; _++) o[_] = u[_].o, l[_] = u[_].w; s = o, n = l; const h = this.getEngine().getCaps().maxVaryingVectors, d = Math.max(h, 0) - 1; let f = Math.min(s.length, d), p = ""; p += this._staticDefines, this._staticDefines.indexOf("DOF") != -1 && (p += `#define CENTER_WEIGHT ${this._glslFloat(n[f - 1])} `, f--); for (let _ = 0; _ < f; _++) p += `#define KERNEL_OFFSET${_} ${this._glslFloat(s[_])} `, p += `#define KERNEL_WEIGHT${_} ${this._glslFloat(n[_])} `; let m = 0; for (let _ = d; _ < s.length; _++) p += `#define KERNEL_DEP_OFFSET${m} ${this._glslFloat(s[_])} `, p += `#define KERNEL_DEP_WEIGHT${m} ${this._glslFloat(n[_])} `, m++; this.packedFloat && (p += "#define PACKEDFLOAT 1"), this._blockCompilation = !1, super.updateEffect(p, null, null, { varyingCount: f, depCount: m }, e, t); } /** * Best kernels are odd numbers that when divided by 2, their integer part is even, so 5, 9 or 13. * Other odd kernels optimize correctly but require proportionally more samples, even kernels are * possible but will produce minor visual artifacts. Since each new kernel requires a new shader we * want to minimize kernel changes, having gaps between physical kernels is helpful in that regard. * The gaps between physical kernels are compensated for in the weighting of the samples * @param idealKernel Ideal blur kernel. * @returns Nearest best kernel. */ _nearestBestKernel(e) { const t = Math.round(e); for (const i of [t, t - 1, t + 1, t - 2, t + 2]) if (i % 2 !== 0 && Math.floor(i / 2) % 2 === 0 && i > 0) return Math.max(i, 3); return Math.max(t, 3); } /** * Calculates the value of a Gaussian distribution with sigma 3 at a given point. * @param x The point on the Gaussian distribution to sample. * @returns the value of the Gaussian function at x. */ _gaussianWeight(e) { const t = 0.3333333333333333, i = Math.sqrt(2 * Math.PI) * t, r = -(e * e / (2 * t * t)); return 1 / i * Math.exp(r); } /** * Generates a string that can be used as a floating point number in GLSL. * @param x Value to print. * @param decimalFigures Number of decimal places to print the number to (excluding trailing 0s). * @returns GLSL float string. */ _glslFloat(e, t = 8) { return e.toFixed(t).replace(/0+$/, ""); } /** * @internal */ static _Parse(e, t, i, r) { return St.Parse(() => new fu(e.name, e.direction, e.kernel, e.options, t, e.renderTargetSamplingMode, i.getEngine(), e.reusable, e.textureType, void 0, !1), e, i, r); } } F([ W("kernel") ], fu.prototype, "_kernel", void 0); F([ W("packedFloat") ], fu.prototype, "_packedFloat", void 0); F([ PL() ], fu.prototype, "direction", void 0); Be("BABYLON.BlurPostProcess", fu); class u5 extends ra { /** * Define the blur ratio used to blur the reflection if needed. */ set blurRatio(e) { this._blurRatio !== e && (this._blurRatio = e, this._preparePostProcesses()); } get blurRatio() { return this._blurRatio; } /** * Define the adaptive blur kernel used to blur the reflection if needed. * This will autocompute the closest best match for the `blurKernel` */ set adaptiveBlurKernel(e) { this._adaptiveBlurKernel = e, this._autoComputeBlurKernel(); } /** * Define the blur kernel used to blur the reflection if needed. * Please consider using `adaptiveBlurKernel` as it could find the closest best value for you. */ set blurKernel(e) { this.blurKernelX = e, this.blurKernelY = e; } /** * Define the blur kernel on the X Axis used to blur the reflection if needed. * Please consider using `adaptiveBlurKernel` as it could find the closest best value for you. */ set blurKernelX(e) { this._blurKernelX !== e && (this._blurKernelX = e, this._preparePostProcesses()); } get blurKernelX() { return this._blurKernelX; } /** * Define the blur kernel on the Y Axis used to blur the reflection if needed. * Please consider using `adaptiveBlurKernel` as it could find the closest best value for you. */ set blurKernelY(e) { this._blurKernelY !== e && (this._blurKernelY = e, this._preparePostProcesses()); } get blurKernelY() { return this._blurKernelY; } _autoComputeBlurKernel() { const e = this.getScene().getEngine(), t = this.getRenderWidth() / e.getRenderWidth(), i = this.getRenderHeight() / e.getRenderHeight(); this.blurKernelX = this._adaptiveBlurKernel * t, this.blurKernelY = this._adaptiveBlurKernel * i; } _onRatioRescale() { this._sizeRatio && (this.resize(this._initialSizeParameter), this._adaptiveBlurKernel || this._preparePostProcesses()), this._adaptiveBlurKernel && this._autoComputeBlurKernel(); } _updateGammaSpace() { const e = this.getScene(); e && (this.gammaSpace = !e.imageProcessingConfiguration.isEnabled || !e.imageProcessingConfiguration.applyByPostProcess); } /** * Instantiates a Mirror Texture. * Mirror texture can be used to simulate the view from a mirror in a scene. * It will dynamically be rendered every frame to adapt to the camera point of view. * You can then easily use it as a reflectionTexture on a flat surface. * In case the surface is not a plane, please consider relying on reflection probes. * @see https://doc.babylonjs.com/features/featuresDeepDive/materials/using/reflectionTexture#mirrors * @param name * @param size * @param scene * @param generateMipMaps * @param type * @param samplingMode * @param generateDepthBuffer */ constructor(e, t, i, r, s = 0, n = De.BILINEAR_SAMPLINGMODE, a = !0) { if (super(e, t, i, r, !0, s, !1, n, a), this.mirrorPlane = new Sd(0, 1, 0, 1), this._transformMatrix = Ae.Zero(), this._mirrorMatrix = Ae.Zero(), this._adaptiveBlurKernel = 0, this._blurKernelX = 0, this._blurKernelY = 0, this._blurRatio = 1, i = this.getScene(), !i) return this; this.ignoreCameraViewport = !0, this._updateGammaSpace(), this._imageProcessingConfigChangeObserver = i.imageProcessingConfiguration.onUpdateParameters.add(() => { this._updateGammaSpace(); }); const l = i.getEngine(); l.supportsUniformBuffers && (this._sceneUBO = i.createSceneUniformBuffer(`Scene for Mirror Texture (name "${e}")`)), this.onBeforeBindObservable.add(() => { var u; (u = l._debugPushGroup) === null || u === void 0 || u.call(l, `mirror generation for ${e}`, 1); }), this.onAfterUnbindObservable.add(() => { var u; (u = l._debugPopGroup) === null || u === void 0 || u.call(l, 1); }); let o; this.onBeforeRenderObservable.add(() => { this._sceneUBO && (this._currentSceneUBO = i.getSceneUniformBuffer(), i.setSceneUniformBuffer(this._sceneUBO), i.getSceneUniformBuffer().unbindEffect()), Ae.ReflectionToRef(this.mirrorPlane, this._mirrorMatrix), this._mirrorMatrix.multiplyToRef(i.getViewMatrix(), this._transformMatrix), i.setTransformMatrix(this._transformMatrix, i.getProjectionMatrix()), o = i.clipPlane, i.clipPlane = this.mirrorPlane, i._mirroredCameraPosition = D.TransformCoordinates(i.activeCamera.globalPosition, this._mirrorMatrix); }), this.onAfterRenderObservable.add(() => { this._sceneUBO && i.setSceneUniformBuffer(this._currentSceneUBO), i.updateTransformMatrix(), i._mirroredCameraPosition = null, i.clipPlane = o; }); } _preparePostProcesses() { if (this.clearPostProcesses(!0), this._blurKernelX && this._blurKernelY) { const e = this.getScene().getEngine(), t = e.getCaps().textureFloatRender && e.getCaps().textureFloatLinearFiltering ? 1 : 2; this._blurX = new fu("horizontal blur", new at(1, 0), this._blurKernelX, this._blurRatio, null, De.BILINEAR_SAMPLINGMODE, e, !1, t), this._blurX.autoClear = !1, this._blurRatio === 1 && this.samples < 2 && this._texture ? this._blurX.inputTexture = this._renderTarget : this._blurX.alwaysForcePOT = !0, this._blurY = new fu("vertical blur", new at(0, 1), this._blurKernelY, this._blurRatio, null, De.BILINEAR_SAMPLINGMODE, e, !1, t), this._blurY.autoClear = !1, this._blurY.alwaysForcePOT = this._blurRatio !== 1, this.addPostProcess(this._blurX), this.addPostProcess(this._blurY); } else this._blurY && (this.removePostProcess(this._blurY), this._blurY.dispose(), this._blurY = null), this._blurX && (this.removePostProcess(this._blurX), this._blurX.dispose(), this._blurX = null); } /** * Clone the mirror texture. * @returns the cloned texture */ clone() { const e = this.getScene(); if (!e) return this; const t = this.getSize(), i = new u5(this.name, t.width, e, this._renderTargetOptions.generateMipMaps, this._renderTargetOptions.type, this._renderTargetOptions.samplingMode, this._renderTargetOptions.generateDepthBuffer); return i.hasAlpha = this.hasAlpha, i.level = this.level, i.mirrorPlane = this.mirrorPlane.clone(), this.renderList && (i.renderList = this.renderList.slice(0)), i; } /** * Serialize the texture to a JSON representation you could use in Parse later on * @returns the serialized JSON representation */ serialize() { if (!this.name) return null; const e = super.serialize(); return e.mirrorPlane = this.mirrorPlane.asArray(), e; } /** * Dispose the texture and release its associated resources. */ dispose() { var e; super.dispose(); const t = this.getScene(); t && t.imageProcessingConfiguration.onUpdateParameters.remove(this._imageProcessingConfigChangeObserver), (e = this._sceneUBO) === null || e === void 0 || e.dispose(); } } De._CreateMirror = (c, e, t, i) => new u5(c, e, t, i); class ul extends dn { /** * Gets or sets the size of the bounding box associated with the cube texture * When defined, the cubemap will switch to local mode * @see https://community.arm.com/graphics/b/blog/posts/reflections-based-on-local-cubemaps-in-unity * @example https://www.babylonjs-playground.com/#RNASML */ set boundingBoxSize(e) { if (this._boundingBoxSize && this._boundingBoxSize.equals(e)) return; this._boundingBoxSize = e; const t = this.getScene(); t && t.markAllMaterialsAsDirty(1); } /** * Returns the bounding box size * @see https://doc.babylonjs.com/features/featuresDeepDive/materials/using/reflectionTexture#using-local-cubemap-mode */ get boundingBoxSize() { return this._boundingBoxSize; } /** * Sets texture matrix rotation angle around Y axis in radians. */ set rotationY(e) { this._rotationY = e, this.setReflectionTextureMatrix(Ae.RotationY(this._rotationY)); } /** * Gets texture matrix rotation angle around Y axis radians. */ get rotationY() { return this._rotationY; } /** * Are mip maps generated for this texture or not. */ get noMipmap() { return this._noMipmap; } /** * Gets the forced extension (if any) */ get forcedExtension() { return this._forcedExtension; } /** * Creates a cube texture from an array of image urls * @param files defines an array of image urls * @param scene defines the hosting scene * @param noMipmap specifies if mip maps are not used * @returns a cube texture */ static CreateFromImages(e, t, i) { let r = ""; return e.forEach((s) => r += s), new ul(r, t, null, i, e); } /** * Creates and return a texture created from prefilterd data by tools like IBL Baker or Lys. * @param url defines the url of the prefiltered texture * @param scene defines the scene the texture is attached to * @param forcedExtension defines the extension of the file if different from the url * @param createPolynomials defines whether or not to create polynomial harmonics from the texture data if necessary * @returns the prefiltered texture */ static CreateFromPrefilteredData(e, t, i = null, r = !0) { const s = t.useDelayedTextureLoading; t.useDelayedTextureLoading = !1; const n = new ul(e, t, null, !1, null, null, null, void 0, !0, i, r); return t.useDelayedTextureLoading = s, n; } /** * Creates a cube texture to use with reflection for instance. It can be based upon dds or six images as well * as prefiltered data. * @param rootUrl defines the url of the texture or the root name of the six images * @param sceneOrEngine defines the scene or engine the texture is attached to * @param extensions defines the suffixes add to the picture name in case six images are in use like _px.jpg... * @param noMipmap defines if mipmaps should be created or not * @param files defines the six files to load for the different faces in that order: px, py, pz, nx, ny, nz * @param onLoad defines a callback triggered at the end of the file load if no errors occurred * @param onError defines a callback triggered in case of error during load * @param format defines the internal format to use for the texture once loaded * @param prefiltered defines whether or not the texture is created from prefiltered data * @param forcedExtension defines the extensions to use (force a special type of file to load) in case it is different from the file name * @param createPolynomials defines whether or not to create polynomial harmonics from the texture data if necessary * @param lodScale defines the scale applied to environment texture. This manages the range of LOD level used for IBL according to the roughness * @param lodOffset defines the offset applied to environment texture. This manages first LOD level used for IBL according to the roughness * @param loaderOptions options to be passed to the loader * @param useSRGBBuffer Defines if the texture must be loaded in a sRGB GPU buffer (if supported by the GPU) (default: false) * @returns the cube texture */ constructor(e, t, i = null, r = !1, s = null, n = null, a = null, l = 5, o = !1, u = null, h = !1, d = 0.8, f = 0, p, m) { var _; super(t), this._lodScale = 0.8, this._lodOffset = 0, this.onLoadObservable = new Fe(), this.boundingBoxPosition = D.Zero(), this._rotationY = 0, this._files = null, this._forcedExtension = null, this._extensions = null, this._textureMatrixRefraction = new Ae(), this.name = e, this.url = e, this._noMipmap = r, this.hasAlpha = !1, this._format = l, this.isCube = !0, this._textureMatrix = Ae.Identity(), this._createPolynomials = h, this.coordinatesMode = De.CUBIC_MODE, this._extensions = i, this._files = s, this._forcedExtension = u, this._loaderOptions = p, this._useSRGBBuffer = m, this._lodScale = d, this._lodOffset = f, !(!e && !s) && this.updateURL(e, u, n, o, a, i, (_ = this.getScene()) === null || _ === void 0 ? void 0 : _.useDelayedTextureLoading, s); } /** * Get the current class name of the texture useful for serialization or dynamic coding. * @returns "CubeTexture" */ getClassName() { return "CubeTexture"; } /** * Update the url (and optional buffer) of this texture if url was null during construction. * @param url the url of the texture * @param forcedExtension defines the extension to use * @param onLoad callback called when the texture is loaded (defaults to null) * @param prefiltered Defines whether the updated texture is prefiltered or not * @param onError callback called if there was an error during the loading process (defaults to null) * @param extensions defines the suffixes add to the picture name in case six images are in use like _px.jpg... * @param delayLoad defines if the texture should be loaded now (false by default) * @param files defines the six files to load for the different faces in that order: px, py, pz, nx, ny, nz */ updateURL(e, t, i = null, r = !1, s = null, n = null, a = !1, l = null) { (!this.name || this.name.startsWith("data:")) && (this.name = e), this.url = e, t && (this._forcedExtension = t); const o = e.lastIndexOf("."), u = t || (o > -1 ? e.substring(o).toLowerCase() : ""), h = u.indexOf(".dds") === 0, d = u.indexOf(".env") === 0, f = u.indexOf(".basis") === 0; if (d ? (this.gammaSpace = !1, this._prefiltered = !1, this.anisotropicFilteringLevel = 1) : (this._prefiltered = r, r && (this.gammaSpace = !1, this.anisotropicFilteringLevel = 1)), l) this._files = l; else if (!f && !d && !h && !n && (n = ["_px.jpg", "_py.jpg", "_pz.jpg", "_nx.jpg", "_ny.jpg", "_nz.jpg"]), this._files = this._files || [], this._files.length = 0, n) { for (let p = 0; p < n.length; p++) this._files.push(e + n[p]); this._extensions = n; } a ? (this.delayLoadState = 4, this._delayedOnLoad = i, this._delayedOnError = s) : this._loadTexture(i, s); } /** * Delays loading of the cube texture * @param forcedExtension defines the extension to use */ delayLoad(e) { this.delayLoadState === 4 && (e && (this._forcedExtension = e), this.delayLoadState = 1, this._loadTexture(this._delayedOnLoad, this._delayedOnError)); } /** * Returns the reflection texture matrix * @returns the reflection texture matrix */ getReflectionTextureMatrix() { return this._textureMatrix; } /** * Sets the reflection texture matrix * @param value Reflection texture matrix */ setReflectionTextureMatrix(e) { var t, i; if (e.updateFlag === this._textureMatrix.updateFlag || (e.isIdentity() !== this._textureMatrix.isIdentity() && ((t = this.getScene()) === null || t === void 0 || t.markAllMaterialsAsDirty(1, (a) => a.getActiveTextures().indexOf(this) !== -1)), this._textureMatrix = e, !(!((i = this.getScene()) === null || i === void 0) && i.useRightHandedSystem))) return; const r = de.Vector3[0], s = de.Quaternion[0], n = de.Vector3[1]; this._textureMatrix.decompose(r, s, n), s.z *= -1, s.w *= -1, Ae.ComposeToRef(r, s, n, this._textureMatrixRefraction); } /** * Gets a suitable rotate/transform matrix when the texture is used for refraction. * There's a separate function from getReflectionTextureMatrix because refraction requires a special configuration of the matrix in right-handed mode. * @returns The refraction matrix */ getRefractionTextureMatrix() { var e; return !((e = this.getScene()) === null || e === void 0) && e.useRightHandedSystem ? this._textureMatrixRefraction : this._textureMatrix; } _loadTexture(e = null, t = null) { var i; const r = this.getScene(), s = this._texture; this._texture = this._getFromCache(this.url, this._noMipmap, void 0, void 0, this._useSRGBBuffer, this.isCube); const n = () => { var l; this.onLoadObservable.notifyObservers(this), s && (s.dispose(), (l = this.getScene()) === null || l === void 0 || l.markAllMaterialsAsDirty(1)), e && e(); }, a = (l, o) => { this._loadingError = !0, this._errorObject = { message: l, exception: o }, t && t(l, o), De.OnTextureLoadErrorObservable.notifyObservers(this); }; this._texture ? this._texture.isReady ? Ve.SetImmediate(() => n()) : this._texture.onLoadedObservable.add(() => n()) : (this._prefiltered ? this._texture = this._getEngine().createPrefilteredCubeTexture(this.url, r, this._lodScale, this._lodOffset, e, a, this._format, this._forcedExtension, this._createPolynomials) : this._texture = this._getEngine().createCubeTexture(this.url, r, this._files, this._noMipmap, e, a, this._format, this._forcedExtension, !1, this._lodScale, this._lodOffset, null, this._loaderOptions, !!this._useSRGBBuffer), (i = this._texture) === null || i === void 0 || i.onLoadedObservable.add(() => this.onLoadObservable.notifyObservers(this))); } /** * Parses text to create a cube texture * @param parsedTexture define the serialized text to read from * @param scene defines the hosting scene * @param rootUrl defines the root url of the cube texture * @returns a cube texture */ static Parse(e, t, i) { const r = St.Parse(() => { var s; let n = !1; return e.prefiltered && (n = e.prefiltered), new ul(i + ((s = e.url) !== null && s !== void 0 ? s : e.name), t, e.extensions, !1, e.files || null, null, null, void 0, n, e.forcedExtension); }, e, t); if (e.boundingBoxPosition && (r.boundingBoxPosition = D.FromArray(e.boundingBoxPosition)), e.boundingBoxSize && (r.boundingBoxSize = D.FromArray(e.boundingBoxSize)), e.animations) for (let s = 0; s < e.animations.length; s++) { const n = e.animations[s], a = Qo("BABYLON.Animation"); a && r.animations.push(a.Parse(n)); } return r; } /** * Makes a clone, or deep copy, of the cube texture * @returns a new cube texture */ clone() { let e = 0; const t = St.Clone(() => { const i = new ul(this.url, this.getScene() || this._getEngine(), this._extensions, this._noMipmap, this._files); return e = i.uniqueId, i; }, this); return t.uniqueId = e, t; } } F([ W() ], ul.prototype, "url", void 0); F([ oo() ], ul.prototype, "boundingBoxPosition", void 0); F([ oo() ], ul.prototype, "boundingBoxSize", null); F([ W("rotationY") ], ul.prototype, "rotationY", null); F([ W("files") ], ul.prototype, "_files", void 0); F([ W("forcedExtension") ], ul.prototype, "_forcedExtension", void 0); F([ W("extensions") ], ul.prototype, "_extensions", void 0); F([ VB("textureMatrix") ], ul.prototype, "_textureMatrix", void 0); F([ VB("textureMatrixRefraction") ], ul.prototype, "_textureMatrixRefraction", void 0); De._CubeTextureParser = ul.Parse; Be("BABYLON.CubeTexture", ul); const p1e = "backgroundFragmentDeclaration", _1e = `uniform vec4 vEyePosition;uniform vec4 vPrimaryColor; #ifdef USEHIGHLIGHTANDSHADOWCOLORS uniform vec4 vPrimaryColorShadow; #endif uniform float shadowLevel;uniform float alpha; #ifdef DIFFUSE uniform vec2 vDiffuseInfos; #endif #ifdef REFLECTION uniform vec2 vReflectionInfos;uniform mat4 reflectionMatrix;uniform vec3 vReflectionMicrosurfaceInfos; #endif #if defined(REFLECTIONFRESNEL) || defined(OPACITYFRESNEL) uniform vec3 vBackgroundCenter; #endif #ifdef REFLECTIONFRESNEL uniform vec4 vReflectionControl; #endif #if defined(REFLECTIONMAP_SPHERICAL) || defined(REFLECTIONMAP_PROJECTION) || defined(REFRACTION) uniform mat4 view; #endif #ifdef PROJECTED_GROUND uniform vec2 projectedGroundInfos; #endif `; je.IncludesShadersStore[p1e] = _1e; const m1e = "backgroundUboDeclaration", g1e = `layout(std140,column_major) uniform;uniform Material {uniform vec4 vPrimaryColor;uniform vec4 vPrimaryColorShadow;uniform vec2 vDiffuseInfos;uniform vec2 vReflectionInfos;uniform mat4 diffuseMatrix;uniform mat4 reflectionMatrix;uniform vec3 vReflectionMicrosurfaceInfos;uniform float fFovMultiplier;uniform float pointSize;uniform float shadowLevel;uniform float alpha;uniform vec3 vBackgroundCenter;uniform vec4 vReflectionControl;uniform vec2 projectedGroundInfos;}; #include `; je.IncludesShadersStore[m1e] = g1e; const v1e = "backgroundPixelShader", A1e = `#ifdef TEXTURELODSUPPORT #extension GL_EXT_shader_texture_lod : enable #endif precision highp float; #include<__decl__backgroundFragment> #include varying vec3 vPositionW; #ifdef MAINUV1 varying vec2 vMainUV1; #endif #ifdef MAINUV2 varying vec2 vMainUV2; #endif #ifdef NORMAL varying vec3 vNormalW; #endif #ifdef DIFFUSE #if DIFFUSEDIRECTUV==1 #define vDiffuseUV vMainUV1 #elif DIFFUSEDIRECTUV==2 #define vDiffuseUV vMainUV2 #else varying vec2 vDiffuseUV; #endif uniform sampler2D diffuseSampler; #endif #ifdef REFLECTION #ifdef REFLECTIONMAP_3D #define sampleReflection(s,c) textureCube(s,c) uniform samplerCube reflectionSampler; #ifdef TEXTURELODSUPPORT #define sampleReflectionLod(s,c,l) textureCubeLodEXT(s,c,l) #else uniform samplerCube reflectionSamplerLow;uniform samplerCube reflectionSamplerHigh; #endif #else #define sampleReflection(s,c) texture2D(s,c) uniform sampler2D reflectionSampler; #ifdef TEXTURELODSUPPORT #define sampleReflectionLod(s,c,l) texture2DLodEXT(s,c,l) #else uniform samplerCube reflectionSamplerLow;uniform samplerCube reflectionSamplerHigh; #endif #endif #ifdef REFLECTIONMAP_SKYBOX varying vec3 vPositionUVW; #else #if defined(REFLECTIONMAP_EQUIRECTANGULAR_FIXED) || defined(REFLECTIONMAP_MIRROREDEQUIRECTANGULAR_FIXED) varying vec3 vDirectionW; #endif #endif #include #endif #ifndef FROMLINEARSPACE #define FROMLINEARSPACE; #endif #ifndef SHADOWONLY #define SHADOWONLY; #endif #include #include<__decl__lightFragment>[0..maxSimultaneousLights] #include #include #include #ifdef LOGARITHMICDEPTH #extension GL_EXT_frag_depth : enable #endif #include #include #include #ifdef REFLECTIONFRESNEL #define FRESNEL_MAXIMUM_ON_ROUGH 0.25 vec3 fresnelSchlickEnvironmentGGX(float VdotN,vec3 reflectance0,vec3 reflectance90,float smoothness) {float weight=mix(FRESNEL_MAXIMUM_ON_ROUGH,1.0,smoothness);return reflectance0+weight*(reflectance90-reflectance0)*pow5(saturate(1.0-VdotN));} #endif #ifdef PROJECTED_GROUND float diskIntersectWithBackFaceCulling(vec3 ro,vec3 rd,vec3 c,float r) {float d=rd.y;if(d>0.0) { return 1e6; } vec3 o=ro-c;float t=-o.y/d;vec3 q=o+rd*t;return (dot(q,q) vec3 viewDirectionW=normalize(vEyePosition.xyz-vPositionW); #ifdef NORMAL vec3 normalW=normalize(vNormalW); #else vec3 normalW=vec3(0.0,1.0,0.0); #endif float shadow=1.;float globalShadow=0.;float shadowLightCount=0.;float aggShadow=0.;float numLights=0.; #include[0..maxSimultaneousLights] #ifdef SHADOWINUSE globalShadow/=shadowLightCount; #else globalShadow=1.0; #endif #ifndef BACKMAT_SHADOWONLY vec4 reflectionColor=vec4(1.,1.,1.,1.); #ifdef REFLECTION #ifdef PROJECTED_GROUND vec3 reflectionVector=project(viewDirectionW,vEyePosition.xyz);reflectionVector=vec3(reflectionMatrix*vec4(reflectionVector,1.)); #else vec3 reflectionVector=computeReflectionCoords(vec4(vPositionW,1.0),normalW); #endif #ifdef REFLECTIONMAP_OPPOSITEZ reflectionVector.z*=-1.0; #endif #ifdef REFLECTIONMAP_3D vec3 reflectionCoords=reflectionVector; #else vec2 reflectionCoords=reflectionVector.xy; #ifdef REFLECTIONMAP_PROJECTION reflectionCoords/=reflectionVector.z; #endif reflectionCoords.y=1.0-reflectionCoords.y; #endif #ifdef REFLECTIONBLUR float reflectionLOD=vReflectionInfos.y; #ifdef TEXTURELODSUPPORT reflectionLOD=reflectionLOD*log2(vReflectionMicrosurfaceInfos.x)*vReflectionMicrosurfaceInfos.y+vReflectionMicrosurfaceInfos.z;reflectionColor=sampleReflectionLod(reflectionSampler,reflectionCoords,reflectionLOD); #else float lodReflectionNormalized=saturate(reflectionLOD);float lodReflectionNormalizedDoubled=lodReflectionNormalized*2.0;vec4 reflectionSpecularMid=sampleReflection(reflectionSampler,reflectionCoords);if(lodReflectionNormalizedDoubled<1.0){reflectionColor=mix( sampleReflection(reflectionSamplerHigh,reflectionCoords), reflectionSpecularMid, lodReflectionNormalizedDoubled );} else {reflectionColor=mix( reflectionSpecularMid, sampleReflection(reflectionSamplerLow,reflectionCoords), lodReflectionNormalizedDoubled-1.0 );} #endif #else vec4 reflectionSample=sampleReflection(reflectionSampler,reflectionCoords);reflectionColor=reflectionSample; #endif #ifdef RGBDREFLECTION reflectionColor.rgb=fromRGBD(reflectionColor); #endif #ifdef GAMMAREFLECTION reflectionColor.rgb=toLinearSpace(reflectionColor.rgb); #endif #ifdef REFLECTIONBGR reflectionColor.rgb=reflectionColor.bgr; #endif reflectionColor.rgb*=vReflectionInfos.x; #endif vec3 diffuseColor=vec3(1.,1.,1.);float finalAlpha=alpha; #ifdef DIFFUSE vec4 diffuseMap=texture2D(diffuseSampler,vDiffuseUV); #ifdef GAMMADIFFUSE diffuseMap.rgb=toLinearSpace(diffuseMap.rgb); #endif diffuseMap.rgb*=vDiffuseInfos.y; #ifdef DIFFUSEHASALPHA finalAlpha*=diffuseMap.a; #endif diffuseColor=diffuseMap.rgb; #endif #ifdef REFLECTIONFRESNEL vec3 colorBase=diffuseColor; #else vec3 colorBase=reflectionColor.rgb*diffuseColor; #endif colorBase=max(colorBase,0.0); #ifdef USERGBCOLOR vec3 finalColor=colorBase; #else #ifdef USEHIGHLIGHTANDSHADOWCOLORS vec3 mainColor=mix(vPrimaryColorShadow.rgb,vPrimaryColor.rgb,colorBase); #else vec3 mainColor=vPrimaryColor.rgb; #endif vec3 finalColor=colorBase*mainColor; #endif #ifdef REFLECTIONFRESNEL vec3 reflectionAmount=vReflectionControl.xxx;vec3 reflectionReflectance0=vReflectionControl.yyy;vec3 reflectionReflectance90=vReflectionControl.zzz;float VdotN=dot(normalize(vEyePosition.xyz),normalW);vec3 planarReflectionFresnel=fresnelSchlickEnvironmentGGX(saturate(VdotN),reflectionReflectance0,reflectionReflectance90,1.0);reflectionAmount*=planarReflectionFresnel; #ifdef REFLECTIONFALLOFF float reflectionDistanceFalloff=1.0-saturate(length(vPositionW.xyz-vBackgroundCenter)*vReflectionControl.w);reflectionDistanceFalloff*=reflectionDistanceFalloff;reflectionAmount*=reflectionDistanceFalloff; #endif finalColor=mix(finalColor,reflectionColor.rgb,saturate(reflectionAmount)); #endif #ifdef OPACITYFRESNEL float viewAngleToFloor=dot(normalW,normalize(vEyePosition.xyz-vBackgroundCenter));const float startAngle=0.1;float fadeFactor=saturate(viewAngleToFloor/startAngle);finalAlpha*=fadeFactor*fadeFactor; #endif #ifdef SHADOWINUSE finalColor=mix(finalColor*shadowLevel,finalColor,globalShadow); #endif vec4 color=vec4(finalColor,finalAlpha); #else vec4 color=vec4(vPrimaryColor.rgb,(1.0-clamp(globalShadow,0.,1.))*alpha); #endif #include #include #ifdef IMAGEPROCESSINGPOSTPROCESS #if !defined(SKIPFINALCOLORCLAMP) color.rgb=clamp(color.rgb,0.,30.0); #endif #else color=applyImageProcessing(color); #endif #ifdef PREMULTIPLYALPHA color.rgb*=color.a; #endif #ifdef NOISE color.rgb+=dither(vPositionW.xy,0.5);color=max(color,0.0); #endif gl_FragColor=color; #define CUSTOM_FRAGMENT_MAIN_END } `; je.ShadersStore[v1e] = A1e; const y1e = "backgroundVertexDeclaration", C1e = `uniform mat4 view;uniform mat4 viewProjection;uniform float shadowLevel; #ifdef DIFFUSE uniform mat4 diffuseMatrix;uniform vec2 vDiffuseInfos; #endif #ifdef REFLECTION uniform vec2 vReflectionInfos;uniform mat4 reflectionMatrix;uniform vec3 vReflectionMicrosurfaceInfos;uniform float fFovMultiplier; #endif #ifdef POINTSIZE uniform float pointSize; #endif `; je.IncludesShadersStore[y1e] = C1e; const x1e = "backgroundVertexShader", b1e = `precision highp float; #include<__decl__backgroundVertex> #include attribute vec3 position; #ifdef NORMAL attribute vec3 normal; #endif #include #include #include varying vec3 vPositionW; #ifdef NORMAL varying vec3 vNormalW; #endif #ifdef UV1 attribute vec2 uv; #endif #ifdef UV2 attribute vec2 uv2; #endif #ifdef MAINUV1 varying vec2 vMainUV1; #endif #ifdef MAINUV2 varying vec2 vMainUV2; #endif #if defined(DIFFUSE) && DIFFUSEDIRECTUV==0 varying vec2 vDiffuseUV; #endif #include #include #include<__decl__lightVxFragment>[0..maxSimultaneousLights] #ifdef REFLECTIONMAP_SKYBOX varying vec3 vPositionUVW; #endif #if defined(REFLECTIONMAP_EQUIRECTANGULAR_FIXED) || defined(REFLECTIONMAP_MIRROREDEQUIRECTANGULAR_FIXED) varying vec3 vDirectionW; #endif #include #define CUSTOM_VERTEX_DEFINITIONS void main(void) { #define CUSTOM_VERTEX_MAIN_BEGIN #ifdef REFLECTIONMAP_SKYBOX vPositionUVW=position; #endif #include #include #include #ifdef MULTIVIEW if (gl_ViewID_OVR==0u) {gl_Position=viewProjection*finalWorld*vec4(position,1.0);} else {gl_Position=viewProjectionR*finalWorld*vec4(position,1.0);} #else gl_Position=viewProjection*finalWorld*vec4(position,1.0); #endif vec4 worldPos=finalWorld*vec4(position,1.0);vPositionW=vec3(worldPos); #ifdef NORMAL mat3 normalWorld=mat3(finalWorld); #ifdef NONUNIFORMSCALING normalWorld=transposeMat3(inverseMat3(normalWorld)); #endif vNormalW=normalize(normalWorld*normal); #endif #if defined(REFLECTIONMAP_EQUIRECTANGULAR_FIXED) || defined(REFLECTIONMAP_MIRROREDEQUIRECTANGULAR_FIXED) vDirectionW=normalize(vec3(finalWorld*vec4(position,0.0))); #ifdef EQUIRECTANGULAR_RELFECTION_FOV mat3 screenToWorld=inverseMat3(mat3(finalWorld*viewProjection));vec3 segment=mix(vDirectionW,screenToWorld*vec3(0.0,0.0,1.0),abs(fFovMultiplier-1.0));if (fFovMultiplier<=1.0) {vDirectionW=normalize(segment);} else {vDirectionW=normalize(vDirectionW+(vDirectionW-segment));} #endif #endif #ifndef UV1 vec2 uv=vec2(0.,0.); #endif #ifndef UV2 vec2 uv2=vec2(0.,0.); #endif #ifdef MAINUV1 vMainUV1=uv; #endif #ifdef MAINUV2 vMainUV2=uv2; #endif #if defined(DIFFUSE) && DIFFUSEDIRECTUV==0 if (vDiffuseInfos.x==0.) {vDiffuseUV=vec2(diffuseMatrix*vec4(uv,1.0,0.0));} else {vDiffuseUV=vec2(diffuseMatrix*vec4(uv2,1.0,0.0));} #endif #include #include #include[0..maxSimultaneousLights] #ifdef VERTEXCOLOR vColor=color; #endif #if defined(POINTSIZE) && !defined(WEBGPU) gl_PointSize=pointSize; #endif #include #define CUSTOM_VERTEX_MAIN_END } `; je.ShadersStore[x1e] = b1e; class E1e extends sa { /** * Constructor of the defines. */ constructor() { super(), this.DIFFUSE = !1, this.DIFFUSEDIRECTUV = 0, this.GAMMADIFFUSE = !1, this.DIFFUSEHASALPHA = !1, this.OPACITYFRESNEL = !1, this.REFLECTIONBLUR = !1, this.REFLECTIONFRESNEL = !1, this.REFLECTIONFALLOFF = !1, this.TEXTURELODSUPPORT = !1, this.PREMULTIPLYALPHA = !1, this.USERGBCOLOR = !1, this.USEHIGHLIGHTANDSHADOWCOLORS = !1, this.BACKMAT_SHADOWONLY = !1, this.NOISE = !1, this.REFLECTIONBGR = !1, this.PROJECTED_GROUND = !1, this.IMAGEPROCESSING = !1, this.VIGNETTE = !1, this.VIGNETTEBLENDMODEMULTIPLY = !1, this.VIGNETTEBLENDMODEOPAQUE = !1, this.TONEMAPPING = !1, this.TONEMAPPING_ACES = !1, this.CONTRAST = !1, this.COLORCURVES = !1, this.COLORGRADING = !1, this.COLORGRADING3D = !1, this.SAMPLER3DGREENDEPTH = !1, this.SAMPLER3DBGRMAP = !1, this.DITHER = !1, this.IMAGEPROCESSINGPOSTPROCESS = !1, this.SKIPFINALCOLORCLAMP = !1, this.EXPOSURE = !1, this.MULTIVIEW = !1, this.REFLECTION = !1, this.REFLECTIONMAP_3D = !1, this.REFLECTIONMAP_SPHERICAL = !1, this.REFLECTIONMAP_PLANAR = !1, this.REFLECTIONMAP_CUBIC = !1, this.REFLECTIONMAP_PROJECTION = !1, this.REFLECTIONMAP_SKYBOX = !1, this.REFLECTIONMAP_EXPLICIT = !1, this.REFLECTIONMAP_EQUIRECTANGULAR = !1, this.REFLECTIONMAP_EQUIRECTANGULAR_FIXED = !1, this.REFLECTIONMAP_MIRROREDEQUIRECTANGULAR_FIXED = !1, this.INVERTCUBICMAP = !1, this.REFLECTIONMAP_OPPOSITEZ = !1, this.LODINREFLECTIONALPHA = !1, this.GAMMAREFLECTION = !1, this.RGBDREFLECTION = !1, this.EQUIRECTANGULAR_RELFECTION_FOV = !1, this.MAINUV1 = !1, this.MAINUV2 = !1, this.UV1 = !1, this.UV2 = !1, this.CLIPPLANE = !1, this.CLIPPLANE2 = !1, this.CLIPPLANE3 = !1, this.CLIPPLANE4 = !1, this.CLIPPLANE5 = !1, this.CLIPPLANE6 = !1, this.POINTSIZE = !1, this.FOG = !1, this.NORMAL = !1, this.NUM_BONE_INFLUENCERS = 0, this.BonesPerMesh = 0, this.INSTANCES = !1, this.SHADOWFLOAT = !1, this.LOGARITHMICDEPTH = !1, this.NONUNIFORMSCALING = !1, this.ALPHATEST = !1, this.rebuild(); } } class Ls extends fl { /** * Experimental Internal Use Only. * * Key light Color in "perceptual value" meaning the color you would like to see on screen. * This acts as a helper to set the primary color to a more "human friendly" value. * Conversion to linear space as well as exposure and tone mapping correction will be applied to keep the * output color as close as possible from the chosen value. * (This does not account for contrast color grading and color curves as they are considered post effect and not directly * part of lighting setup.) */ get _perceptualColor() { return this.__perceptualColor; } set _perceptualColor(e) { this.__perceptualColor = e, this._computePrimaryColorFromPerceptualColor(), this._markAllSubMeshesAsLightsDirty(); } /** * Defines the level of the shadows (dark area of the reflection map) in order to help scaling the colors. * The color opposite to the primary color is used at the level chosen to define what the black area would look. */ get primaryColorShadowLevel() { return this._primaryColorShadowLevel; } set primaryColorShadowLevel(e) { this._primaryColorShadowLevel = e, this._computePrimaryColors(), this._markAllSubMeshesAsLightsDirty(); } /** * Defines the level of the highlights (highlight area of the reflection map) in order to help scaling the colors. * The primary color is used at the level chosen to define what the white area would look. */ get primaryColorHighlightLevel() { return this._primaryColorHighlightLevel; } set primaryColorHighlightLevel(e) { this._primaryColorHighlightLevel = e, this._computePrimaryColors(), this._markAllSubMeshesAsLightsDirty(); } /** * Sets the reflection reflectance fresnel values according to the default standard * empirically know to work well :-) */ set reflectionStandardFresnelWeight(e) { let t = e; t < 0.5 ? (t = t * 2, this.reflectionReflectance0 = Ls.StandardReflectance0 * t, this.reflectionReflectance90 = Ls.StandardReflectance90 * t) : (t = t * 2 - 1, this.reflectionReflectance0 = Ls.StandardReflectance0 + (1 - Ls.StandardReflectance0) * t, this.reflectionReflectance90 = Ls.StandardReflectance90 + (1 - Ls.StandardReflectance90) * t); } /** * The current fov(field of view) multiplier, 0.0 - 2.0. Defaults to 1.0. Lower values "zoom in" and higher values "zoom out". * Best used when trying to implement visual zoom effects like fish-eye or binoculars while not adjusting camera fov. * Recommended to be keep at 1.0 except for special cases. */ get fovMultiplier() { return this._fovMultiplier; } set fovMultiplier(e) { isNaN(e) && (e = 1), this._fovMultiplier = Math.max(0, Math.min(2, e)); } /** * Attaches a new image processing configuration to the PBR Material. * @param configuration (if null the scene configuration will be use) */ _attachImageProcessingConfiguration(e) { e !== this._imageProcessingConfiguration && (this._imageProcessingConfiguration && this._imageProcessingObserver && this._imageProcessingConfiguration.onUpdateParameters.remove(this._imageProcessingObserver), e ? this._imageProcessingConfiguration = e : this._imageProcessingConfiguration = this.getScene().imageProcessingConfiguration, this._imageProcessingConfiguration && (this._imageProcessingObserver = this._imageProcessingConfiguration.onUpdateParameters.add(() => { this._computePrimaryColorFromPerceptualColor(), this._markAllSubMeshesAsImageProcessingDirty(); }))); } /** * Gets the image processing configuration used either in this material. */ get imageProcessingConfiguration() { return this._imageProcessingConfiguration; } /** * Sets the Default image processing configuration used either in the this material. * * If sets to null, the scene one is in use. */ set imageProcessingConfiguration(e) { this._attachImageProcessingConfiguration(e), this._markAllSubMeshesAsTexturesDirty(); } /** * Gets whether the color curves effect is enabled. */ get cameraColorCurvesEnabled() { return this.imageProcessingConfiguration.colorCurvesEnabled; } /** * Sets whether the color curves effect is enabled. */ set cameraColorCurvesEnabled(e) { this.imageProcessingConfiguration.colorCurvesEnabled = e; } /** * Gets whether the color grading effect is enabled. */ get cameraColorGradingEnabled() { return this.imageProcessingConfiguration.colorGradingEnabled; } /** * Gets whether the color grading effect is enabled. */ set cameraColorGradingEnabled(e) { this.imageProcessingConfiguration.colorGradingEnabled = e; } /** * Gets whether tonemapping is enabled or not. */ get cameraToneMappingEnabled() { return this._imageProcessingConfiguration.toneMappingEnabled; } /** * Sets whether tonemapping is enabled or not */ set cameraToneMappingEnabled(e) { this._imageProcessingConfiguration.toneMappingEnabled = e; } /** * The camera exposure used on this material. * This property is here and not in the camera to allow controlling exposure without full screen post process. * This corresponds to a photographic exposure. */ get cameraExposure() { return this._imageProcessingConfiguration.exposure; } /** * The camera exposure used on this material. * This property is here and not in the camera to allow controlling exposure without full screen post process. * This corresponds to a photographic exposure. */ set cameraExposure(e) { this._imageProcessingConfiguration.exposure = e; } /** * Gets The camera contrast used on this material. */ get cameraContrast() { return this._imageProcessingConfiguration.contrast; } /** * Sets The camera contrast used on this material. */ set cameraContrast(e) { this._imageProcessingConfiguration.contrast = e; } /** * Gets the Color Grading 2D Lookup Texture. */ get cameraColorGradingTexture() { return this._imageProcessingConfiguration.colorGradingTexture; } /** * Sets the Color Grading 2D Lookup Texture. */ set cameraColorGradingTexture(e) { this.imageProcessingConfiguration.colorGradingTexture = e; } /** * The color grading curves provide additional color adjustment that is applied after any color grading transform (3D LUT). * They allow basic adjustment of saturation and small exposure adjustments, along with color filter tinting to provide white balance adjustment or more stylistic effects. * These are similar to controls found in many professional imaging or colorist software. The global controls are applied to the entire image. For advanced tuning, extra controls are provided to adjust the shadow, midtone and highlight areas of the image; * corresponding to low luminance, medium luminance, and high luminance areas respectively. */ get cameraColorCurves() { return this.imageProcessingConfiguration.colorCurves; } /** * The color grading curves provide additional color adjustment that is applied after any color grading transform (3D LUT). * They allow basic adjustment of saturation and small exposure adjustments, along with color filter tinting to provide white balance adjustment or more stylistic effects. * These are similar to controls found in many professional imaging or colorist software. The global controls are applied to the entire image. For advanced tuning, extra controls are provided to adjust the shadow, midtone and highlight areas of the image; * corresponding to low luminance, medium luminance, and high luminance areas respectively. */ set cameraColorCurves(e) { this.imageProcessingConfiguration.colorCurves = e; } /** * Instantiates a Background Material in the given scene * @param name The friendly name of the material * @param scene The scene to add the material to */ constructor(e, t) { super(e, t), this.primaryColor = ze.White(), this._primaryColorShadowLevel = 0, this._primaryColorHighlightLevel = 0, this.reflectionTexture = null, this.reflectionBlur = 0, this.diffuseTexture = null, this._shadowLights = null, this.shadowLights = null, this.shadowLevel = 0, this.sceneCenter = D.Zero(), this.opacityFresnel = !0, this.reflectionFresnel = !1, this.reflectionFalloffDistance = 0, this.reflectionAmount = 1, this.reflectionReflectance0 = 0.05, this.reflectionReflectance90 = 0.5, this.useRGBColor = !0, this.enableNoise = !1, this._fovMultiplier = 1, this.useEquirectangularFOV = !1, this._maxSimultaneousLights = 4, this.maxSimultaneousLights = 4, this._shadowOnly = !1, this.shadowOnly = !1, this._imageProcessingObserver = null, this.switchToBGR = !1, this._enableGroundProjection = !1, this.enableGroundProjection = !1, this.projectedGroundRadius = 1e3, this.projectedGroundHeight = 10, this._renderTargets = new xc(16), this._reflectionControls = Di.Zero(), this._white = ze.White(), this._primaryShadowColor = ze.Black(), this._primaryHighlightColor = ze.Black(), this._attachImageProcessingConfiguration(null), this.getRenderTargetTextures = () => (this._renderTargets.reset(), this._diffuseTexture && this._diffuseTexture.isRenderTarget && this._renderTargets.push(this._diffuseTexture), this._reflectionTexture && this._reflectionTexture.isRenderTarget && this._renderTargets.push(this._reflectionTexture), this._renderTargets); } /** * Gets a boolean indicating that current material needs to register RTT */ get hasRenderTargetTextures() { return !!(this._diffuseTexture && this._diffuseTexture.isRenderTarget || this._reflectionTexture && this._reflectionTexture.isRenderTarget); } /** * The entire material has been created in order to prevent overdraw. * @returns false */ needAlphaTesting() { return !0; } /** * The entire material has been created in order to prevent overdraw. * @returns true if blending is enable */ needAlphaBlending() { return this.alpha < 1 || this._diffuseTexture != null && this._diffuseTexture.hasAlpha || this._shadowOnly; } /** * Checks whether the material is ready to be rendered for a given mesh. * @param mesh The mesh to render * @param subMesh The submesh to check against * @param useInstances Specify wether or not the material is used with instances * @returns true if all the dependencies are ready (Textures, Effects...) */ isReadyForSubMesh(e, t, i = !1) { if (t.effect && this.isFrozen && t.effect._wasPreviouslyReady && t.effect._wasPreviouslyUsingInstances === i) return !0; t.materialDefines || (t.materialDefines = new E1e()); const r = this.getScene(), s = t.materialDefines; if (this._isReadyForSubMesh(t)) return !0; const n = r.getEngine(); if (Ke.PrepareDefinesForLights(r, e, s, !1, this._maxSimultaneousLights), s._needNormals = !0, Ke.PrepareDefinesForMultiview(r, s), s._areTexturesDirty) { if (s._needUVs = !1, r.texturesEnabled) { if (r.getEngine().getCaps().textureLOD && (s.TEXTURELODSUPPORT = !0), this._diffuseTexture && Tt.DiffuseTextureEnabled) { if (!this._diffuseTexture.isReadyOrNotBlocking()) return !1; Ke.PrepareDefinesForMergedUV(this._diffuseTexture, s, "DIFFUSE"), s.DIFFUSEHASALPHA = this._diffuseTexture.hasAlpha, s.GAMMADIFFUSE = this._diffuseTexture.gammaSpace, s.OPACITYFRESNEL = this._opacityFresnel; } else s.DIFFUSE = !1, s.DIFFUSEDIRECTUV = 0, s.DIFFUSEHASALPHA = !1, s.GAMMADIFFUSE = !1, s.OPACITYFRESNEL = !1; const a = this._reflectionTexture; if (a && Tt.ReflectionTextureEnabled) { if (!a.isReadyOrNotBlocking()) return !1; switch (s.REFLECTION = !0, s.GAMMAREFLECTION = a.gammaSpace, s.RGBDREFLECTION = a.isRGBD, s.REFLECTIONBLUR = this._reflectionBlur > 0, s.LODINREFLECTIONALPHA = a.lodLevelInAlpha, s.EQUIRECTANGULAR_RELFECTION_FOV = this.useEquirectangularFOV, s.REFLECTIONBGR = this.switchToBGR, a.coordinatesMode === De.INVCUBIC_MODE && (s.INVERTCUBICMAP = !0), s.REFLECTIONMAP_3D = a.isCube, s.REFLECTIONMAP_OPPOSITEZ = s.REFLECTIONMAP_3D && this.getScene().useRightHandedSystem ? !a.invertZ : a.invertZ, a.coordinatesMode) { case De.EXPLICIT_MODE: s.REFLECTIONMAP_EXPLICIT = !0; break; case De.PLANAR_MODE: s.REFLECTIONMAP_PLANAR = !0; break; case De.PROJECTION_MODE: s.REFLECTIONMAP_PROJECTION = !0; break; case De.SKYBOX_MODE: s.REFLECTIONMAP_SKYBOX = !0; break; case De.SPHERICAL_MODE: s.REFLECTIONMAP_SPHERICAL = !0; break; case De.EQUIRECTANGULAR_MODE: s.REFLECTIONMAP_EQUIRECTANGULAR = !0; break; case De.FIXED_EQUIRECTANGULAR_MODE: s.REFLECTIONMAP_EQUIRECTANGULAR_FIXED = !0; break; case De.FIXED_EQUIRECTANGULAR_MIRRORED_MODE: s.REFLECTIONMAP_MIRROREDEQUIRECTANGULAR_FIXED = !0; break; case De.CUBIC_MODE: case De.INVCUBIC_MODE: default: s.REFLECTIONMAP_CUBIC = !0; break; } this.reflectionFresnel ? (s.REFLECTIONFRESNEL = !0, s.REFLECTIONFALLOFF = this.reflectionFalloffDistance > 0, this._reflectionControls.x = this.reflectionAmount, this._reflectionControls.y = this.reflectionReflectance0, this._reflectionControls.z = this.reflectionReflectance90, this._reflectionControls.w = 1 / this.reflectionFalloffDistance) : (s.REFLECTIONFRESNEL = !1, s.REFLECTIONFALLOFF = !1); } else s.REFLECTION = !1, s.REFLECTIONFRESNEL = !1, s.REFLECTIONFALLOFF = !1, s.REFLECTIONBLUR = !1, s.REFLECTIONMAP_3D = !1, s.REFLECTIONMAP_SPHERICAL = !1, s.REFLECTIONMAP_PLANAR = !1, s.REFLECTIONMAP_CUBIC = !1, s.REFLECTIONMAP_PROJECTION = !1, s.REFLECTIONMAP_SKYBOX = !1, s.REFLECTIONMAP_EXPLICIT = !1, s.REFLECTIONMAP_EQUIRECTANGULAR = !1, s.REFLECTIONMAP_EQUIRECTANGULAR_FIXED = !1, s.REFLECTIONMAP_MIRROREDEQUIRECTANGULAR_FIXED = !1, s.INVERTCUBICMAP = !1, s.REFLECTIONMAP_OPPOSITEZ = !1, s.LODINREFLECTIONALPHA = !1, s.GAMMAREFLECTION = !1, s.RGBDREFLECTION = !1; } s.PREMULTIPLYALPHA = this.alphaMode === 7 || this.alphaMode === 8, s.USERGBCOLOR = this._useRGBColor, s.NOISE = this._enableNoise; } if (s._areLightsDirty && (s.USEHIGHLIGHTANDSHADOWCOLORS = !this._useRGBColor && (this._primaryColorShadowLevel !== 0 || this._primaryColorHighlightLevel !== 0), s.BACKMAT_SHADOWONLY = this._shadowOnly), s._areImageProcessingDirty && this._imageProcessingConfiguration) { if (!this._imageProcessingConfiguration.isReady()) return !1; this._imageProcessingConfiguration.prepareDefines(s); } if (s._areMiscDirty && (s.REFLECTIONMAP_3D && this._enableGroundProjection ? (s.PROJECTED_GROUND = !0, s.REFLECTIONMAP_SKYBOX = !0) : s.PROJECTED_GROUND = !1), Ke.PrepareDefinesForMisc(e, r, this._useLogarithmicDepth, this.pointsCloud, this.fogEnabled, this._shouldTurnAlphaTestOn(e), s), Ke.PrepareDefinesForFrameBoundValues(r, n, this, s, i, null, t.getRenderingMesh().hasThinInstances), Ke.PrepareDefinesForAttributes(e, s, !1, !0, !1) && e && !r.getEngine().getCaps().standardDerivatives && !e.isVerticesDataPresent(Y.NormalKind) && (e.createNormals(!0), Ce.Warn("BackgroundMaterial: Normals have been created for the mesh: " + e.name)), s.isDirty) { s.markAsProcessed(), r.resetCachedMaterial(); const a = new pl(); s.FOG && a.addFallback(0, "FOG"), s.POINTSIZE && a.addFallback(1, "POINTSIZE"), s.MULTIVIEW && a.addFallback(0, "MULTIVIEW"), Ke.HandleFallbacksForShadows(s, a, this._maxSimultaneousLights); const l = [Y.PositionKind]; s.NORMAL && l.push(Y.NormalKind), s.UV1 && l.push(Y.UVKind), s.UV2 && l.push(Y.UV2Kind), Ke.PrepareAttributesForBones(l, e, s, a), Ke.PrepareAttributesForInstances(l, s); const o = [ "world", "view", "viewProjection", "vEyePosition", "vLightsType", "vFogInfos", "vFogColor", "pointSize", "mBones", "vPrimaryColor", "vPrimaryColorShadow", "vReflectionInfos", "reflectionMatrix", "vReflectionMicrosurfaceInfos", "fFovMultiplier", "shadowLevel", "alpha", "vBackgroundCenter", "vReflectionControl", "vDiffuseInfos", "diffuseMatrix", "projectedGroundInfos", "logarithmicDepthConstant" ]; Gc(o); const u = ["diffuseSampler", "reflectionSampler", "reflectionSamplerLow", "reflectionSamplerHigh"], h = ["Material", "Scene"]; Ds && (Ds.PrepareUniforms(o, s), Ds.PrepareSamplers(u, s)), Ke.PrepareUniformsAndSamplersList({ uniformsNames: o, uniformBuffersNames: h, samplers: u, defines: s, maxSimultaneousLights: this._maxSimultaneousLights }); const d = s.toString(), f = r.getEngine().createEffect("background", { attributes: l, uniformsNames: o, uniformBuffersNames: h, samplers: u, defines: d, fallbacks: a, onCompiled: this.onCompiled, onError: this.onError, indexParameters: { maxSimultaneousLights: this._maxSimultaneousLights } }, n); t.setEffect(f, s, this._materialContext), this.buildUniformLayout(); } return !t.effect || !t.effect.isReady() ? !1 : (s._renderId = r.getRenderId(), t.effect._wasPreviouslyReady = !0, t.effect._wasPreviouslyUsingInstances = i, this._checkScenePerformancePriority(), !0); } /** * Compute the primary color according to the chosen perceptual color. */ _computePrimaryColorFromPerceptualColor() { this.__perceptualColor && (this._primaryColor.copyFrom(this.__perceptualColor), this._primaryColor.toLinearSpaceToRef(this._primaryColor, this.getScene().getEngine().useExactSrgbConversions), this._imageProcessingConfiguration && this._primaryColor.scaleToRef(1 / this._imageProcessingConfiguration.exposure, this._primaryColor), this._computePrimaryColors()); } /** * Compute the highlights and shadow colors according to their chosen levels. */ _computePrimaryColors() { this._primaryColorShadowLevel === 0 && this._primaryColorHighlightLevel === 0 || (this._primaryColor.scaleToRef(this._primaryColorShadowLevel, this._primaryShadowColor), this._primaryColor.subtractToRef(this._primaryShadowColor, this._primaryShadowColor), this._white.subtractToRef(this._primaryColor, this._primaryHighlightColor), this._primaryHighlightColor.scaleToRef(this._primaryColorHighlightLevel, this._primaryHighlightColor), this._primaryColor.addToRef(this._primaryHighlightColor, this._primaryHighlightColor)); } /** * Build the uniform buffer used in the material. */ buildUniformLayout() { this._uniformBuffer.addUniform("vPrimaryColor", 4), this._uniformBuffer.addUniform("vPrimaryColorShadow", 4), this._uniformBuffer.addUniform("vDiffuseInfos", 2), this._uniformBuffer.addUniform("vReflectionInfos", 2), this._uniformBuffer.addUniform("diffuseMatrix", 16), this._uniformBuffer.addUniform("reflectionMatrix", 16), this._uniformBuffer.addUniform("vReflectionMicrosurfaceInfos", 3), this._uniformBuffer.addUniform("fFovMultiplier", 1), this._uniformBuffer.addUniform("pointSize", 1), this._uniformBuffer.addUniform("shadowLevel", 1), this._uniformBuffer.addUniform("alpha", 1), this._uniformBuffer.addUniform("vBackgroundCenter", 3), this._uniformBuffer.addUniform("vReflectionControl", 4), this._uniformBuffer.addUniform("projectedGroundInfos", 2), this._uniformBuffer.create(); } /** * Unbind the material. */ unbind() { this._diffuseTexture && this._diffuseTexture.isRenderTarget && this._uniformBuffer.setTexture("diffuseSampler", null), this._reflectionTexture && this._reflectionTexture.isRenderTarget && this._uniformBuffer.setTexture("reflectionSampler", null), super.unbind(); } /** * Bind only the world matrix to the material. * @param world The world matrix to bind. */ bindOnlyWorldMatrix(e) { this._activeEffect.setMatrix("world", e); } /** * Bind the material for a dedicated submeh (every used meshes will be considered opaque). * @param world The world matrix to bind. * @param mesh * @param subMesh The submesh to bind for. */ bindForSubMesh(e, t, i) { const r = this.getScene(), s = i.materialDefines; if (!s) return; const n = i.effect; if (!n) return; this._activeEffect = n, this.bindOnlyWorldMatrix(e), Ke.BindBonesParameters(t, this._activeEffect); const a = this._mustRebind(r, n, t.visibility); if (a) { this._uniformBuffer.bindToEffect(n, "Material"), this.bindViewProjection(n); const l = this._reflectionTexture; (!this._uniformBuffer.useUbo || !this.isFrozen || !this._uniformBuffer.isSync) && (r.texturesEnabled && (this._diffuseTexture && Tt.DiffuseTextureEnabled && (this._uniformBuffer.updateFloat2("vDiffuseInfos", this._diffuseTexture.coordinatesIndex, this._diffuseTexture.level), Ke.BindTextureMatrix(this._diffuseTexture, this._uniformBuffer, "diffuse")), l && Tt.ReflectionTextureEnabled && (this._uniformBuffer.updateMatrix("reflectionMatrix", l.getReflectionTextureMatrix()), this._uniformBuffer.updateFloat2("vReflectionInfos", l.level, this._reflectionBlur), this._uniformBuffer.updateFloat3("vReflectionMicrosurfaceInfos", l.getSize().width, l.lodGenerationScale, l.lodGenerationOffset))), this.shadowLevel > 0 && this._uniformBuffer.updateFloat("shadowLevel", this.shadowLevel), this._uniformBuffer.updateFloat("alpha", this.alpha), this.pointsCloud && this._uniformBuffer.updateFloat("pointSize", this.pointSize), s.USEHIGHLIGHTANDSHADOWCOLORS ? (this._uniformBuffer.updateColor4("vPrimaryColor", this._primaryHighlightColor, 1), this._uniformBuffer.updateColor4("vPrimaryColorShadow", this._primaryShadowColor, 1)) : this._uniformBuffer.updateColor4("vPrimaryColor", this._primaryColor, 1)), this._uniformBuffer.updateFloat("fFovMultiplier", this._fovMultiplier), r.texturesEnabled && (this._diffuseTexture && Tt.DiffuseTextureEnabled && this._uniformBuffer.setTexture("diffuseSampler", this._diffuseTexture), l && Tt.ReflectionTextureEnabled && (s.REFLECTIONBLUR && s.TEXTURELODSUPPORT ? this._uniformBuffer.setTexture("reflectionSampler", l) : s.REFLECTIONBLUR ? (this._uniformBuffer.setTexture("reflectionSampler", l._lodTextureMid || l), this._uniformBuffer.setTexture("reflectionSamplerLow", l._lodTextureLow || l), this._uniformBuffer.setTexture("reflectionSamplerHigh", l._lodTextureHigh || l)) : this._uniformBuffer.setTexture("reflectionSampler", l), s.REFLECTIONFRESNEL && (this._uniformBuffer.updateFloat3("vBackgroundCenter", this.sceneCenter.x, this.sceneCenter.y, this.sceneCenter.z), this._uniformBuffer.updateFloat4("vReflectionControl", this._reflectionControls.x, this._reflectionControls.y, this._reflectionControls.z, this._reflectionControls.w))), s.PROJECTED_GROUND && this._uniformBuffer.updateFloat2("projectedGroundInfos", this.projectedGroundRadius, this.projectedGroundHeight)), Ec(this._activeEffect, this, r), r.bindEyePosition(n); } else r.getEngine()._features.needToAlwaysBindUniformBuffers && (this._uniformBuffer.bindToEffect(n, "Material"), this._needToBindSceneUbo = !0); (a || !this.isFrozen) && (r.lightsEnabled && Ke.BindLights(r, t, this._activeEffect, s, this._maxSimultaneousLights), this.bindView(n), Ke.BindFogParameters(r, t, this._activeEffect, !0), this._useLogarithmicDepth && Ke.BindLogDepth(s, n, r), this._imageProcessingConfiguration && this._imageProcessingConfiguration.bind(this._activeEffect)), this._afterBind(t, this._activeEffect), this._uniformBuffer.update(); } /** * Checks to see if a texture is used in the material. * @param texture - Base texture to use. * @returns - Boolean specifying if a texture is used in the material. */ hasTexture(e) { return !!(super.hasTexture(e) || this._reflectionTexture === e || this._diffuseTexture === e); } /** * Dispose the material. * @param forceDisposeEffect Force disposal of the associated effect. * @param forceDisposeTextures Force disposal of the associated textures. */ dispose(e = !1, t = !1) { t && (this.diffuseTexture && this.diffuseTexture.dispose(), this.reflectionTexture && this.reflectionTexture.dispose()), this._renderTargets.dispose(), this._imageProcessingConfiguration && this._imageProcessingObserver && this._imageProcessingConfiguration.onUpdateParameters.remove(this._imageProcessingObserver), super.dispose(e); } /** * Clones the material. * @param name The cloned name. * @returns The cloned material. */ clone(e) { return St.Clone(() => new Ls(e, this.getScene()), this); } /** * Serializes the current material to its JSON representation. * @returns The JSON representation. */ serialize() { const e = super.serialize(); return e.customType = "BABYLON.BackgroundMaterial", e; } /** * Gets the class name of the material * @returns "BackgroundMaterial" */ getClassName() { return "BackgroundMaterial"; } /** * Parse a JSON input to create back a background material. * @param source The JSON data to parse * @param scene The scene to create the parsed material in * @param rootUrl The root url of the assets the material depends upon * @returns the instantiated BackgroundMaterial. */ static Parse(e, t, i) { return St.Parse(() => new Ls(e.name, t), e, t, i); } } Ls.StandardReflectance0 = 0.05; Ls.StandardReflectance90 = 0.5; F([ Fs() ], Ls.prototype, "_primaryColor", void 0); F([ ct("_markAllSubMeshesAsLightsDirty") ], Ls.prototype, "primaryColor", void 0); F([ Fs() ], Ls.prototype, "__perceptualColor", void 0); F([ W() ], Ls.prototype, "_primaryColorShadowLevel", void 0); F([ W() ], Ls.prototype, "_primaryColorHighlightLevel", void 0); F([ ct("_markAllSubMeshesAsLightsDirty") ], Ls.prototype, "primaryColorHighlightLevel", null); F([ er() ], Ls.prototype, "_reflectionTexture", void 0); F([ ct("_markAllSubMeshesAsTexturesDirty") ], Ls.prototype, "reflectionTexture", void 0); F([ W() ], Ls.prototype, "_reflectionBlur", void 0); F([ ct("_markAllSubMeshesAsTexturesDirty") ], Ls.prototype, "reflectionBlur", void 0); F([ er() ], Ls.prototype, "_diffuseTexture", void 0); F([ ct("_markAllSubMeshesAsTexturesDirty") ], Ls.prototype, "diffuseTexture", void 0); F([ ct("_markAllSubMeshesAsTexturesDirty") ], Ls.prototype, "shadowLights", void 0); F([ W() ], Ls.prototype, "_shadowLevel", void 0); F([ ct("_markAllSubMeshesAsTexturesDirty") ], Ls.prototype, "shadowLevel", void 0); F([ oo() ], Ls.prototype, "_sceneCenter", void 0); F([ ct("_markAllSubMeshesAsTexturesDirty") ], Ls.prototype, "sceneCenter", void 0); F([ W() ], Ls.prototype, "_opacityFresnel", void 0); F([ ct("_markAllSubMeshesAsTexturesDirty") ], Ls.prototype, "opacityFresnel", void 0); F([ W() ], Ls.prototype, "_reflectionFresnel", void 0); F([ ct("_markAllSubMeshesAsTexturesDirty") ], Ls.prototype, "reflectionFresnel", void 0); F([ W() ], Ls.prototype, "_reflectionFalloffDistance", void 0); F([ ct("_markAllSubMeshesAsTexturesDirty") ], Ls.prototype, "reflectionFalloffDistance", void 0); F([ W() ], Ls.prototype, "_reflectionAmount", void 0); F([ ct("_markAllSubMeshesAsTexturesDirty") ], Ls.prototype, "reflectionAmount", void 0); F([ W() ], Ls.prototype, "_reflectionReflectance0", void 0); F([ ct("_markAllSubMeshesAsTexturesDirty") ], Ls.prototype, "reflectionReflectance0", void 0); F([ W() ], Ls.prototype, "_reflectionReflectance90", void 0); F([ ct("_markAllSubMeshesAsTexturesDirty") ], Ls.prototype, "reflectionReflectance90", void 0); F([ W() ], Ls.prototype, "_useRGBColor", void 0); F([ ct("_markAllSubMeshesAsTexturesDirty") ], Ls.prototype, "useRGBColor", void 0); F([ W() ], Ls.prototype, "_enableNoise", void 0); F([ ct("_markAllSubMeshesAsTexturesDirty") ], Ls.prototype, "enableNoise", void 0); F([ W() ], Ls.prototype, "_maxSimultaneousLights", void 0); F([ ct("_markAllSubMeshesAsTexturesDirty") ], Ls.prototype, "maxSimultaneousLights", void 0); F([ W() ], Ls.prototype, "_shadowOnly", void 0); F([ ct("_markAllSubMeshesAsLightsDirty") ], Ls.prototype, "shadowOnly", void 0); F([ $G() ], Ls.prototype, "_imageProcessingConfiguration", void 0); F([ W(), ct("_markAllSubMeshesAsMiscDirty") ], Ls.prototype, "enableGroundProjection", void 0); F([ W() ], Ls.prototype, "projectedGroundRadius", void 0); F([ W() ], Ls.prototype, "projectedGroundHeight", void 0); Be("BABYLON.BackgroundMaterial", Ls); class g5 { /** * Creates the default options for the helper. * @param scene The scene the environment helper belongs to. */ static _GetDefaultOptions(e) { return { createGround: !0, groundSize: 15, groundTexture: this._GroundTextureCDNUrl, groundColor: new ze(0.2, 0.2, 0.3).toLinearSpace(e.getEngine().useExactSrgbConversions).scale(3), groundOpacity: 0.9, enableGroundShadow: !0, groundShadowLevel: 0.5, enableGroundMirror: !1, groundMirrorSizeRatio: 0.3, groundMirrorBlurKernel: 64, groundMirrorAmount: 1, groundMirrorFresnelWeight: 1, groundMirrorFallOffDistance: 0, groundMirrorTextureType: 0, groundYBias: 1e-5, createSkybox: !0, skyboxSize: 20, skyboxTexture: this._SkyboxTextureCDNUrl, skyboxColor: new ze(0.2, 0.2, 0.3).toLinearSpace(e.getEngine().useExactSrgbConversions).scale(3), backgroundYRotation: 0, sizeAuto: !0, rootPosition: D.Zero(), setupImageProcessing: !0, environmentTexture: this._EnvironmentTextureCDNUrl, cameraExposure: 0.8, cameraContrast: 1.2, toneMappingEnabled: !0 }; } /** * Gets the root mesh created by the helper. */ get rootMesh() { return this._rootMesh; } /** * Gets the skybox created by the helper. */ get skybox() { return this._skybox; } /** * Gets the skybox texture created by the helper. */ get skyboxTexture() { return this._skyboxTexture; } /** * Gets the skybox material created by the helper. */ get skyboxMaterial() { return this._skyboxMaterial; } /** * Gets the ground mesh created by the helper. */ get ground() { return this._ground; } /** * Gets the ground texture created by the helper. */ get groundTexture() { return this._groundTexture; } /** * Gets the ground mirror created by the helper. */ get groundMirror() { return this._groundMirror; } /** * Gets the ground mirror render list to helps pushing the meshes * you wish in the ground reflection. */ get groundMirrorRenderList() { return this._groundMirror ? this._groundMirror.renderList : null; } /** * Gets the ground material created by the helper. */ get groundMaterial() { return this._groundMaterial; } /** * constructor * @param options Defines the options we want to customize the helper * @param scene The scene to add the material to */ constructor(e, t) { this._errorHandler = (i, r) => { this.onErrorObservable.notifyObservers({ message: i, exception: r }); }, this._options = Object.assign(Object.assign({}, g5._GetDefaultOptions(t)), e), this._scene = t, this.onErrorObservable = new Fe(), this._setupBackground(), this._setupImageProcessing(); } /** * Updates the background according to the new options * @param options */ updateOptions(e) { const t = Object.assign(Object.assign({}, this._options), e); this._ground && !t.createGround && (this._ground.dispose(), this._ground = null), this._groundMaterial && !t.createGround && (this._groundMaterial.dispose(), this._groundMaterial = null), this._groundTexture && this._options.groundTexture != t.groundTexture && (this._groundTexture.dispose(), this._groundTexture = null), this._skybox && !t.createSkybox && (this._skybox.dispose(), this._skybox = null), this._skyboxMaterial && !t.createSkybox && (this._skyboxMaterial.dispose(), this._skyboxMaterial = null), this._skyboxTexture && this._options.skyboxTexture != t.skyboxTexture && (this._skyboxTexture.dispose(), this._skyboxTexture = null), this._groundMirror && !t.enableGroundMirror && (this._groundMirror.dispose(), this._groundMirror = null), this._scene.environmentTexture && this._options.environmentTexture != t.environmentTexture && this._scene.environmentTexture.dispose(), this._options = t, this._setupBackground(), this._setupImageProcessing(); } /** * Sets the primary color of all the available elements. * @param color the main color to affect to the ground and the background */ setMainColor(e) { this.groundMaterial && (this.groundMaterial.primaryColor = e), this.skyboxMaterial && (this.skyboxMaterial.primaryColor = e), this.groundMirror && (this.groundMirror.clearColor = new Et(e.r, e.g, e.b, 1)); } /** * Setup the image processing according to the specified options. */ _setupImageProcessing() { this._options.setupImageProcessing && (this._scene.imageProcessingConfiguration.contrast = this._options.cameraContrast, this._scene.imageProcessingConfiguration.exposure = this._options.cameraExposure, this._scene.imageProcessingConfiguration.toneMappingEnabled = this._options.toneMappingEnabled, this._setupEnvironmentTexture()); } /** * Setup the environment texture according to the specified options. */ _setupEnvironmentTexture() { if (this._scene.environmentTexture) return; if (this._options.environmentTexture instanceof dn) { this._scene.environmentTexture = this._options.environmentTexture; return; } const e = ul.CreateFromPrefilteredData(this._options.environmentTexture, this._scene); this._scene.environmentTexture = e; } /** * Setup the background according to the specified options. */ _setupBackground() { this._rootMesh || (this._rootMesh = new ke("BackgroundHelper", this._scene)), this._rootMesh.rotation.y = this._options.backgroundYRotation; const e = this._getSceneSize(); this._options.createGround && (this._setupGround(e), this._setupGroundMaterial(), this._setupGroundDiffuseTexture(), this._options.enableGroundMirror && this._setupGroundMirrorTexture(e), this._setupMirrorInGroundMaterial()), this._options.createSkybox && (this._setupSkybox(e), this._setupSkyboxMaterial(), this._setupSkyboxReflectionTexture()), this._rootMesh.position.x = e.rootPosition.x, this._rootMesh.position.z = e.rootPosition.z, this._rootMesh.position.y = e.rootPosition.y; } /** * Get the scene sizes according to the setup. */ _getSceneSize() { let e = this._options.groundSize, t = this._options.skyboxSize, i = this._options.rootPosition; if (!this._scene.meshes || this._scene.meshes.length === 1) return { groundSize: e, skyboxSize: t, rootPosition: i }; const r = this._scene.getWorldExtends((n) => n !== this._ground && n !== this._rootMesh && n !== this._skybox), s = r.max.subtract(r.min); if (this._options.sizeAuto) { this._scene.activeCamera instanceof Pn && this._scene.activeCamera.upperRadiusLimit && (e = this._scene.activeCamera.upperRadiusLimit * 2, t = e); const n = s.length(); n > e && (e = n * 2, t = e), e *= 1.1, t *= 1.5, i = r.min.add(s.scale(0.5)), i.y = r.min.y - this._options.groundYBias; } return { groundSize: e, skyboxSize: t, rootPosition: i }; } /** * Setup the ground according to the specified options. * @param sceneSize */ _setupGround(e) { (!this._ground || this._ground.isDisposed()) && (this._ground = hx("BackgroundPlane", { size: e.groundSize }, this._scene), this._ground.rotation.x = Math.PI / 2, this._ground.parent = this._rootMesh, this._ground.onDisposeObservable.add(() => { this._ground = null; })), this._ground.receiveShadows = this._options.enableGroundShadow; } /** * Setup the ground material according to the specified options. */ _setupGroundMaterial() { this._groundMaterial || (this._groundMaterial = new Ls("BackgroundPlaneMaterial", this._scene)), this._groundMaterial.alpha = this._options.groundOpacity, this._groundMaterial.alphaMode = 8, this._groundMaterial.shadowLevel = this._options.groundShadowLevel, this._groundMaterial.primaryColor = this._options.groundColor, this._groundMaterial.useRGBColor = !1, this._groundMaterial.enableNoise = !0, this._ground && (this._ground.material = this._groundMaterial); } /** * Setup the ground diffuse texture according to the specified options. */ _setupGroundDiffuseTexture() { if (this._groundMaterial && !this._groundTexture) { if (this._options.groundTexture instanceof dn) { this._groundMaterial.diffuseTexture = this._options.groundTexture; return; } this._groundTexture = new De(this._options.groundTexture, this._scene, void 0, void 0, void 0, void 0, this._errorHandler), this._groundTexture.gammaSpace = !1, this._groundTexture.hasAlpha = !0, this._groundMaterial.diffuseTexture = this._groundTexture; } } /** * Setup the ground mirror texture according to the specified options. * @param sceneSize */ _setupGroundMirrorTexture(e) { const t = De.CLAMP_ADDRESSMODE; if (!this._groundMirror && (this._groundMirror = new u5("BackgroundPlaneMirrorTexture", { ratio: this._options.groundMirrorSizeRatio }, this._scene, !1, this._options.groundMirrorTextureType, De.BILINEAR_SAMPLINGMODE, !0), this._groundMirror.mirrorPlane = new Sd(0, -1, 0, e.rootPosition.y), this._groundMirror.anisotropicFilteringLevel = 1, this._groundMirror.wrapU = t, this._groundMirror.wrapV = t, this._groundMirror.renderList)) for (let r = 0; r < this._scene.meshes.length; r++) { const s = this._scene.meshes[r]; s !== this._ground && s !== this._skybox && s !== this._rootMesh && this._groundMirror.renderList.push(s); } const i = this._options.groundColor.toGammaSpace(this._scene.getEngine().useExactSrgbConversions); this._groundMirror.clearColor = new Et(i.r, i.g, i.b, 1), this._groundMirror.adaptiveBlurKernel = this._options.groundMirrorBlurKernel; } /** * Setup the ground to receive the mirror texture. */ _setupMirrorInGroundMaterial() { this._groundMaterial && (this._groundMaterial.reflectionTexture = this._groundMirror, this._groundMaterial.reflectionFresnel = !0, this._groundMaterial.reflectionAmount = this._options.groundMirrorAmount, this._groundMaterial.reflectionStandardFresnelWeight = this._options.groundMirrorFresnelWeight, this._groundMaterial.reflectionFalloffDistance = this._options.groundMirrorFallOffDistance); } /** * Setup the skybox according to the specified options. * @param sceneSize */ _setupSkybox(e) { (!this._skybox || this._skybox.isDisposed()) && (this._skybox = B4("BackgroundSkybox", { size: e.skyboxSize, sideOrientation: ke.BACKSIDE }, this._scene), this._skybox.onDisposeObservable.add(() => { this._skybox = null; })), this._skybox.parent = this._rootMesh; } /** * Setup the skybox material according to the specified options. */ _setupSkyboxMaterial() { this._skybox && (this._skyboxMaterial || (this._skyboxMaterial = new Ls("BackgroundSkyboxMaterial", this._scene)), this._skyboxMaterial.useRGBColor = !1, this._skyboxMaterial.primaryColor = this._options.skyboxColor, this._skyboxMaterial.enableNoise = !0, this._skybox.material = this._skyboxMaterial); } /** * Setup the skybox reflection texture according to the specified options. */ _setupSkyboxReflectionTexture() { if (this._skyboxMaterial && !this._skyboxTexture) { if (this._options.skyboxTexture instanceof dn) { this._skyboxMaterial.reflectionTexture = this._options.skyboxTexture; return; } this._skyboxTexture = new ul(this._options.skyboxTexture, this._scene, void 0, void 0, void 0, void 0, this._errorHandler), this._skyboxTexture.coordinatesMode = De.SKYBOX_MODE, this._skyboxTexture.gammaSpace = !1, this._skyboxMaterial.reflectionTexture = this._skyboxTexture; } } /** * Dispose all the elements created by the Helper. */ dispose() { this._groundMaterial && this._groundMaterial.dispose(!0, !0), this._skyboxMaterial && this._skyboxMaterial.dispose(!0, !0), this._rootMesh.dispose(!1); } } g5._GroundTextureCDNUrl = "https://assets.babylonjs.com/environments/backgroundGround.png"; g5._SkyboxTextureCDNUrl = "https://assets.babylonjs.com/environments/backgroundSkybox.dds"; g5._EnvironmentTextureCDNUrl = "https://assets.babylonjs.com/environments/environmentSpecular.env"; class w1 extends xi { /** * Gets the texture being displayed on the sphere */ get texture() { return this._texture; } /** * Sets the texture being displayed on the sphere */ set texture(e) { this._texture !== e && (this._texture = e, this._useDirectMapping ? (this._texture.wrapU = De.CLAMP_ADDRESSMODE, this._texture.wrapV = De.CLAMP_ADDRESSMODE, this._material.diffuseTexture = this._texture) : (this._texture.coordinatesMode = De.FIXED_EQUIRECTANGULAR_MIRRORED_MODE, this._texture.wrapV = De.CLAMP_ADDRESSMODE, this._material.reflectionTexture = this._texture), this._changeTextureMode(this._textureMode)); } /** * Gets the mesh used for the dome. */ get mesh() { return this._mesh; } /** * The current fov(field of view) multiplier, 0.0 - 2.0. Defaults to 1.0. Lower values "zoom in" and higher values "zoom out". * Also see the options.resolution property. */ get fovMultiplier() { return this._material.fovMultiplier; } set fovMultiplier(e) { this._material.fovMultiplier = e; } /** * Gets or set the current texture mode for the texture. It can be: * * TextureDome.MODE_MONOSCOPIC : Define the texture source as a Monoscopic panoramic 360. * * TextureDome.MODE_TOPBOTTOM : Define the texture source as a Stereoscopic TopBottom/OverUnder panoramic 360. * * TextureDome.MODE_SIDEBYSIDE : Define the texture source as a Stereoscopic Side by Side panoramic 360. */ get textureMode() { return this._textureMode; } /** * Sets the current texture mode for the texture. It can be: * * TextureDome.MODE_MONOSCOPIC : Define the texture source as a Monoscopic panoramic 360. * * TextureDome.MODE_TOPBOTTOM : Define the texture source as a Stereoscopic TopBottom/OverUnder panoramic 360. * * TextureDome.MODE_SIDEBYSIDE : Define the texture source as a Stereoscopic Side by Side panoramic 360. */ set textureMode(e) { this._textureMode !== e && this._changeTextureMode(e); } /** * Is it a 180 degrees dome (half dome) or 360 texture (full dome) */ get halfDome() { return this._halfDome; } /** * Set the halfDome mode. If set, only the front (180 degrees) will be displayed and the back will be blacked out. */ set halfDome(e) { this._halfDome = e, this._halfDomeMask.setEnabled(e), this._changeTextureMode(this._textureMode); } /** * Set the cross-eye mode. If set, images that can be seen when crossing eyes will render correctly */ set crossEye(e) { this._crossEye = e, this._changeTextureMode(this._textureMode); } /** * Is it a cross-eye texture? */ get crossEye() { return this._crossEye; } /** * The background material of this dome. */ get material() { return this._material; } /** * Create an instance of this class and pass through the parameters to the relevant classes- Texture, StandardMaterial, and Mesh. * @param name Element's name, child elements will append suffixes for their own names. * @param textureUrlOrElement defines the url(s) or the (video) HTML element to use * @param options An object containing optional or exposed sub element properties * @param options.resolution * @param options.clickToPlay * @param options.autoPlay * @param options.loop * @param options.size * @param options.poster * @param options.faceForward * @param options.useDirectMapping * @param options.halfDomeMode * @param options.crossEyeMode * @param options.generateMipMaps * @param options.mesh * @param scene * @param onError */ constructor(e, t, i, r, s = null) { super(e, r), this.onError = s, this._halfDome = !1, this._crossEye = !1, this._useDirectMapping = !1, this._textureMode = w1.MODE_MONOSCOPIC, this._onBeforeCameraRenderObserver = null, this.onLoadErrorObservable = new Fe(), this.onLoadObservable = new Fe(), r = this.getScene(), e = e || "textureDome", i.resolution = Math.abs(i.resolution) | 0 || 32, i.clickToPlay = !!i.clickToPlay, i.autoPlay = i.autoPlay === void 0 ? !0 : !!i.autoPlay, i.loop = i.loop === void 0 ? !0 : !!i.loop, i.size = Math.abs(i.size) || (r.activeCamera ? r.activeCamera.maxZ * 0.48 : 1e3), i.useDirectMapping === void 0 ? this._useDirectMapping = !0 : this._useDirectMapping = i.useDirectMapping, i.faceForward === void 0 && (i.faceForward = !0), this._setReady(!1), i.mesh ? this._mesh = i.mesh : this._mesh = Rd(e + "_mesh", { segments: i.resolution, diameter: i.size, updatable: !1, sideOrientation: ke.BACKSIDE }, r); const n = this._material = new Ls(e + "_material", r); n.useEquirectangularFOV = !0, n.fovMultiplier = 1, n.opacityFresnel = !1; const a = this._initTexture(t, r, i); if (this.texture = a, this._mesh.material = n, this._mesh.parent = this, this._halfDomeMask = Rd("", { slice: 0.5, diameter: i.size * 0.98, segments: i.resolution * 2, sideOrientation: ke.BACKSIDE }, r), this._halfDomeMask.rotate(bl.X, -Math.PI / 2), this._halfDomeMask.parent = this._mesh, this._halfDome = !!i.halfDomeMode, this._halfDomeMask.setEnabled(this._halfDome), this._crossEye = !!i.crossEyeMode, this._texture.anisotropicFilteringLevel = 1, this._texture.onLoadObservable.addOnce(() => { this._setReady(!0); }), i.faceForward && r.activeCamera) { const l = r.activeCamera, o = D.Forward(), u = D.TransformNormal(o, l.getViewMatrix()); u.normalize(), this.rotation.y = Math.acos(D.Dot(o, u)); } this._changeTextureMode(this._textureMode); } _changeTextureMode(e) { switch (this._scene.onBeforeCameraRenderObservable.remove(this._onBeforeCameraRenderObserver), this._textureMode = e, this._texture.uScale = 1, this._texture.vScale = 1, this._texture.uOffset = 0, this._texture.vOffset = 0, this._texture.vAng = 0, e) { case w1.MODE_MONOSCOPIC: this._halfDome && (this._texture.uScale = 2, this._texture.uOffset = -1); break; case w1.MODE_SIDEBYSIDE: { this._texture.uScale = this._halfDome ? 0.99999 : 0.5; const t = this._halfDome ? 0 : 0.5, i = this._halfDome ? -0.5 : 0; this._onBeforeCameraRenderObserver = this._scene.onBeforeCameraRenderObservable.add((r) => { let s = r.isRightCamera; this._crossEye && (s = !s), s ? this._texture.uOffset = t : this._texture.uOffset = i; }); break; } case w1.MODE_TOPBOTTOM: this._texture.vScale = this._halfDome ? 0.99999 : 0.5, this._onBeforeCameraRenderObserver = this._scene.onBeforeCameraRenderObservable.add((t) => { let i = t.isRightCamera; this._crossEye && (i = !i), this._texture.vOffset = i ? 0.5 : 0; }); break; } } /** * Releases resources associated with this node. * @param doNotRecurse Set to true to not recurse into each children (recurse into each children by default) * @param disposeMaterialAndTextures Set to true to also dispose referenced materials and textures (false by default) */ dispose(e, t = !1) { this._texture.dispose(), this._mesh.dispose(), this._material.dispose(), this._scene.onBeforeCameraRenderObservable.remove(this._onBeforeCameraRenderObserver), this.onLoadErrorObservable.clear(), this.onLoadObservable.clear(), super.dispose(e, t); } } w1.MODE_MONOSCOPIC = 0; w1.MODE_TOPBOTTOM = 1; w1.MODE_SIDEBYSIDE = 2; class HO extends w1 { /** * Gets or sets the texture being displayed on the sphere */ get photoTexture() { return this.texture; } /** * sets the texture being displayed on the sphere */ set photoTexture(e) { this.texture = e; } /** * Gets the current video mode for the video. It can be: * * TextureDome.MODE_MONOSCOPIC : Define the texture source as a Monoscopic panoramic 360. * * TextureDome.MODE_TOPBOTTOM : Define the texture source as a Stereoscopic TopBottom/OverUnder panoramic 360. * * TextureDome.MODE_SIDEBYSIDE : Define the texture source as a Stereoscopic Side by Side panoramic 360. */ get imageMode() { return this.textureMode; } /** * Sets the current video mode for the video. It can be: * * TextureDome.MODE_MONOSCOPIC : Define the texture source as a Monoscopic panoramic 360. * * TextureDome.MODE_TOPBOTTOM : Define the texture source as a Stereoscopic TopBottom/OverUnder panoramic 360. * * TextureDome.MODE_SIDEBYSIDE : Define the texture source as a Stereoscopic Side by Side panoramic 360. */ set imageMode(e) { this.textureMode = e; } _initTexture(e, t, i) { return new De(e, t, !i.generateMipMaps, !this._useDirectMapping, void 0, () => { this.onLoadObservable.notifyObservers(); }, (r, s) => { this.onLoadErrorObservable.notifyObservers(r || "Unknown error occured"), this.onError && this.onError(r, s); }); } } HO.MODE_MONOSCOPIC = w1.MODE_MONOSCOPIC; HO.MODE_TOPBOTTOM = w1.MODE_TOPBOTTOM; HO.MODE_SIDEBYSIDE = w1.MODE_SIDEBYSIDE; const T1e = "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAQAAAAEACAYAAABccqhmAAAgAElEQVR42u29yY5tWXIlZnbuiSaTbZFUkZRKrCKhElASQA0EoQABgn6hJvoXzfUP+gP9hWb6Bg00IgRoQJaKqUxmZmTEe8/v0uB2u7Fm2T7HIyIrnz88uPvt3f2a2WrMbOvf/u3PvvzP/sUf/N6//i8vf/lv/3v5H//d//Sb//Uq/5u8yf8hV/m/5Cp/L1f5hVzlG7nKJ7mKyJuIXN/hPwqXI/g++zq6rPI5u8z+WqfLre+zy7PrVv9L8brsMiGvk8XLmM/sdfHXal4e3ad6GXPdyu2ij8u/+uv/5cuf/OSLfdtEfvUr+dnf/d0X//t3H/7bf/hP//N/928h/0Yg/4VA/kogfyGQP5Wr/IFAvhbIlwK5CGQTPP+9z5uPeePJSW+yo2+s/GtN30Rnv1E+f5zxof9R/lSXv/nr//mrr3+i+5dfyX7ZZQP07Tffys//8R/l/9TtX7790T/7r/8G8pdy+/8XAvnnAvkzgfwzgfyxQP5AIL8vkJ8K5KsmMVzu1U7p5PA5AXxOAJ8TwPf7sX/51ZeXfcemqnp9w/W77/S7X/6T/vzf/7383RWCX3/z05/9i3/13/0PX//eX/2FyP8tIv+PiPy9iPy/IvIzEfm5iPxCRH4lIt/c/393//9BRD6KyKf7f488fP74/PH544dJAF9cLl98IZfLBZtuqterXr/7Dt9982v95S9+Lv+gF/3i7Spv/8lf/vnf/vGf/dF/JfKnIvLnIvLvReQ/NEngn0TklyLy6/v/34jIt00iGJOBlxAsdvv54/PH5493SQCXy9t2ueh2ueimKorrFbjq9eNH+fDtb+TXv/ol/vHyhX4Fxfbx7euPf/Lnf/PfiPyeiPyhiPxxkwB+fk8AvxzQgJcIrGTwFsiAEXH4/PH54/PHUgLY7whgu2C7bLqpQgHB2xvePn6SDx8+6G9+84384vKF/IPu8iVU9Y/+7C/+jWxffiHytYj8VER+X0T+oEEBvxqQwCMJeIngo5EI3goIwVMIPn98/vj8ESaAbbtu2ybbvl8u2ybbdtluSECA65u8ffqIDx8+6G++/VZ/efkV/sO261dQXP7wT/7kX8vl8qXIFyLylbySwe/dE0CLAr65B/9vGn0gQwRMMqgmhM/J4fPH548eAezbZd/lsm3YtssNAYiqiogAAkCvb5/k46cP8u2HD/rrb7+R/2/b9Wu9yJe//8d/9Ney6S5yEZFdRL68/38khG/uKOCnAwoYkcCoEXwkEgGDDq7CeQfyOTl8/vhd1QCum26ybZtu2yabbrKpQvXue1yvuF6v+vbpTT5+/CDffviAX1++1V9sO77WXb/66R/+4V/dgkbllQi+aBLBV/dE8LWRALwkYCWCNyMZXElkwLTMeMkga/P4/PH547ccAVwuctkvdxSw6bbdtYDbTfSZBN7e8PHTR/3u4wf55vKd/nL7DX6mu3791U9//5+/gkNFZGuSgZUQvnKowKgLWLTAQgRtEniTuEfwaELw0MJvf3LQzynud+53uG+X6y3gN9kul+2y6XVT1U27JCDAFVc8ksAn/e7jR/nN5YP+avtWfq6Xy9f7Vz/9w1dgRYngiyYhfNkkgzYBWHTg44AEMmqQUYQKOmDaiCIa8TmsfmzB+DnZDQjgcpGLbti2y3bZHjRAdRMVvb/dcYU8kcDbPQlsH/CrbddfbF98+RPZfvLFnAQeieCRDC5DMvju/vmD4JkEvjRQgKULeGggowdHkAHTYxihg89vu88I5UeGAPSOAFTlrgPopiqbKPSmCKreUoAAkCcSePukHz590m8vH+WbD9/JP335k6/+tA86KxFchv8jMvhiogE4JQm8XhfKqOAqx5qRPyeGzx8/cgSwbXcUoLJtim27C4Oi93+4v6VxQwKAvl2v+Hj9pB8+fZJvt4/yzfbF9lPdv/wJnsE2BogmyeCRED40tGFvksIXiSbgiYSRRpDNDZ6BDI6ghM+J4fPHeyKAO+zX7cb9t4tedMMNAQju5V+f1uAtBSiu1zsduMrHy5t8ePsk3376KN98sX/xE5FPAnm7/782o0DiUINXMkCXCB7/P94/e87AWUmARQWVvgMuKej9t1RLBp+Tw+ePgwngsutFFdu26WXbbl+rSvdfbnqAiuA23QcBgCugV1zl7e1NPm5v+LC96XfbJ/1W9y++fgXjA3bDYXV+MuhRwSPwL3JLMFYC+HS/LU8HYrGwIhwyNOF12SvgM4SgztdifP85MXz+KGsA2C6X7aJ6bXSAOwrY5OYIqGy3d5uq4P5GhABXuV6veLvRAf10fZMPb2/y3b7vX7+g+9v98/WOBq7GG7RNAlYy+Dgkhhb+Xxp0sE8IAC4SGAP/TbgVJK/PoJPBnAiwPKxsXfbbnRg+i3s/JAK4Q/4b9NfLtomBAqCickMBjy7BuywAUVyv8na94tMjCVzf9KNcLl/0SeA6oAEYb1i9g+FtSALb/bKL8/+t+wxXFMyswqiHoK4ToIgKqslgpg1qUC0QoYbvJZg/B/q5v4szHmPX7YEAsD0CX25OwEUVm9xag1+agKg+nxQArnKjAtDr9U0+Xd/k4/UqH7bL5YsewrcBBiMJZPRAp6TwQgWfjM9vgRbgUYGL8AvLWH2gqhesCokeUmCSwPsnhs8fP2YNYMO2XeSmAWxy2VQaXeDmDIhApf33rD4PTUCuV+DtCn27XuXT5ir8VmCJ2G5BpBM8/r/dEcJb8/0lEQMtJHA5TAlqNuLRhJChhEpSqFabH3di+G1AGj+W1/dyAR4IYJNNnuLf6+tWC9CHHiAtFhAIFLjK2/Uqn65X+SS67aK+3QeTDoy/IG2ogQ7fb/dAtz5vBgrYGqrwNtCHsVfgIvwK07OTQBURVNCBFpKCOjqCHn5L/67TgTN+fpySAC56nwSUi256kXsSuFGAVyLoUIDo8/Pz7fdoErr/v17lk162HbgHvFpIYDfoAJJfW4sGPjkU4VNAF8ZEcLmLhdc7kljdY1y1Dq9yLiI4IiRqcLujb138KIPn80ejATwRwIbtBvn1cqv+2J78/5EI5N4cJA8qIPcmwRsKAHDF9WYP6mV7VmrgLuTpxYTcMEW0LAmoQxFsuvAI8tv/a/C5fV2ZMMiKg++FCM7RDPRu8ebWY7VG6VJi+Bzk35MI2LsAckMAgwvQ0gC5DQjd3ABg2HQLAPpEAlZ1Bu7VV7MGHDFRAbo3VKsTbAY9sPWC/uvx86gBbDK3D1eEQS8pbAeSgSwmhepnJb6uBv/o/PzHLzxWA/X7TH77De5j6AGQi6o0CUGfCOD2X7cXAlCFQABtEsGLDtxuOyQB2UTQBKZe5GUPXgkUYCUAbZJRhBDeuq8xBf+bgwbehDm+BFQi2IJksOocvA8ysIMfxluVcRsY/eB3JzH8GFDAXQO48X/dcIf9jyDHptIigDsFkEe066tBSETQUYF7ElDdYEBytN4+rk9UcBPfrKaZqFHWcw3i4J8/X4ev2//bSXqAhwTay6OEIPLD2Ipt8OtAGzxkwLw9WVFRjTc/qC6H3+YK/b1oAA0KuOizHfieCLaHHiAb5NYTIC9EMEbZrVEQt1xwhVy1UfBh8PUOquMizwaap3tQXfY5B//tea/NZdfhsvbz+PURQTDSGWB87VX/7WSd4KxjUqrIgE0IUkoKGnhIvwvawpGf6eECXJ7tv4qbA7DJgwpsKthEmmYgfaAAffYF3HLxo0vwNjJ0SwRWMG4db4eh1gPNm18vQ+us/0eGmxDemu/fnM/X4evq/8342ksGHgLY5LyT/zg0wM8lcMjgGFXwqIOVFJBQw99eCvF9oZL9Mfl3QwAvIXDsBRC9R+fz8x0FPBLB0xJEpwUobrfAkARgIAF41h3wQgP6QAmX5E/7eI43IxGwwf/moIkRyWRJQIPgt9CA9b39nzt4bYUWjAlCjWDPgv8IEjgLJfzuaAsrv9VdVG4OwOXW/fdoA35qAdL0BDwvf6AAUVHd8LIEu94A3K+Q+2YxaB84MOH62P//qoo38fCRDERE2zf0JfmDa+MieElAjcDPKz+mRKCOtdgGtXaBjgNJ4H2owSpNeAW/rRH4CaHSpMwnBYYycjgSJwfie9CR6mPu20Uv8kABF206AvXlBMiIBPSlB9wjBW1fwEuSb94296VCqgMaGCt/G1BbExi3IG+r3a3J6P48Gv/J0YmEYoiGY7V/SxwFCwGoE/xa0AJ0CEiV9QPCJb1OJ5F1VTjEY2/MO9AEJvj1BJTQpqLfTlGwjABuzT962e4IoKnyrdh3+/6mzDVJ4PHOxj0JqGKoy20+wBMN6D1gLWi9NQHfVP5MEEPzjGYy8BMAOnTAJgEr8HUIejRo5xrA5xkR5AngmiSHs+zDDAmMgWzTg55GSJEmHE8IvWPAoYTfhWak/Wn/bQ0CGLSAjv83SUEfKp5q24LXuQICpzrjrgWoza8xVE00CQCORdhMJuTUT/rjuls0gO4Iby8BIEgK6gS7BsGuTtDrScH/fR68biUHNVGBnxjeNyHEvQe/ve3LZQqgG3rof6cEclsNflG9J4KtaQ8WHcVBHS1BtHE4QP9OBMS98mpbKTeDW7dJwRsnHpMBTFJpV4I+b0kY/NqInVFSyBLANbnMSgBM8F+Fqfxq/h657/Up+GaBnwV9hRqc9bZ/vA6vu+T9E8KPJWns94UfTeCj2QXwCHS9dNL8Xf3Ho/rfewSeFODGDV69AU0y6NFAE1DP3qK++rdB7/1HRxf86gT376zOr99T/h/ioBiXWQkgQgVeIrCC/WomhDmQK+hASI2ARQZKooHMLdCJwGEBBXC3+uERwg+VOHZ9ioAt9H80AI06wGgJ3nQA3BoCut6AhxYwgcPOFnxuFnrphk+NIKIGrWPQtgz3b0i7Y6D5rs1GKqTop0nQX52vmQC4BkjA+r4a7Kx9WLENGeegkhSETBCrNXIMdi/444Rw1n6E96ry7OPuj8UfLxtQ78NA2iSBbg7gIiIbdDLsb5agPhLC3RkYKv8NDbS2YGsatNRAG2oQwf9ZIOydgy1MAzBkAw8UwEEIDzSAqdPQ6za0PkeJAMH3Z0wXniUSZoHvBXU2mcjQgv56TedIKglCpIoQfgwCIjOytd8WgN0bfxoR8Fn9Gx0Aj5Zgq0lIZbsH/ibSJoFnS+C98g9ooHEELI3gliy25yONIiE6pb0NfBlyNEYyENoodkKwgl6I6s8kARgJ4ZoEfuYWHLEJa0LhSBXm7kImGeSfVdoJ1DO2G7WXsehAptupSOoyrCSF904k+6vt98X/ZcM98Hsd4JYIXhQAIg3/f9AAUYhsLQKAtkHVBnzjCKhOoYl2ym+iBtvzDzQ2DLXJ4PUmbJHAVnBQX4jkxfvHhNDqAdHXGQJgv0aSDGItgOseHIU+K9hXnIJzkoGlEKzNHagTdJ6VWEUH4iCKH4fd2AwDPaYBm4Wgng4gQ9V/CoGiuNmD04AQtNGMGzSAAQ2I2pzfogY9LRh7BrbOh4+D30sAencljFu2CUFrwY8UAWRfWwGvVOVfbx2uIILM0pwDv082dUTw8hYs8L+uIWiHGpWgClnAa1lMPJogovvvbePPs/q3Xr++kgCsfgB5oQF9WYKPJqEn6G+OE3i5AqouF59FQOmahQC8rlPLj38kg1c2f30vw+XaoIX24/pMGIgSBoZqoH3wo0sIIGlA9PWcCPrAtpPB8eBf6x1o6cHra+2+tpIFP4PgBfxZtZUJfo4qxELT948D9ucK8Mt9+ccjIQw6QJcEbrD/1g340ATuDgDkFfx6twSf1f9xvuBECYxq/7ythQQGm+5JDx6Brw4CkMGT3wgscCUoQ4sU2t6DR2ciBjTgtcpenQoZVX9NuL4Owc+dVaDursYVkVALX+shjSBKBuvCYDUZjE5BdNkxdHAUBexyHwB6NP7Iyw7sxUDViwge1t+mz8B/LAvVx/c3PeBBCToB8IUGOgqA3iV4yUg6UAOxaUFHDx6CYS8SorMOue0CCJGAf5YfRhoAI+A1CvwxqNkAY5yAIx2EQmkFfeWOXi+nEdSQQA0ZHMEItiagJArQxDXIrj8nCfQi4HZPAttrIahso9oPQ/2/JwV5JQU8zw+7I4D7/sBn4EO6rjw0FR+i3Z9fHtahzsFvJgM0X+tmVH5vaYiNDGAigewAz+gyNLThnjCURQFR1b9d3lZvnVqmj9mEPDKIUIC4KCCjBXywS4N+otp/Hk3QVthOkwEKlV9PQwXjT7s/zwF4Qf9toAAzFdjuaEB6S7D1//U5FIQu2MevO0rQQH8ZmoXE6B/IkgE60XCjVoq8gt2iCG0S8L5GdxkM1cGsfsCMArSCAnrr7dzAZxCEEpepvB8tqHJ/q+bmJGGts/AcAXFOMMeTwC7Pw0B6CtCtA2vWgonqBQJFSwH0JQK29OB2kvgj2HHXAoyeAIsCQO0kMNECAhFMqCBf8mElAkyBbX1tJQP2RJ/ha0gpAfS9l+/5n00CkrQpq0MZbOdAuxmMvHswog62jZj7BnYQe19b14kxNq2D/ehX/p68HEcF+x3yP7z/V/A/q/5DA3i5A/dzA5pdgbKp3v3/wQF4Bb70WkCTHGRAA6+KL0bFl6FJaFw0ImZwm6igSwbbwPn9RMBWf3sN2JgA/BVh/Rg0kQBgePf6HglAHLFQwqQQOwDjbdVxNZjR4iM6Qa3WxwvNxh0JFb3g/WzFQQS8b/ttKcDWoABtUMAd8j9hf0MB2uDXhzX4CHj03L9DBU3Qjz0C0l4mLSLQPicOOwZoVCB6P6dA7nDbGkVuxcNr8PU2JQO4wX5trEqmccZaHU4q8oCDFOpzAnOwqyMIMktNNNAHouDGxO37DgArQZzlmp/14W1QlqHTMaIIx7SCx0+5yza7AKJ3IXBrNAHVDcMZAU/BT/vgv/ULPOA+XiLggAREDF2g0ci6xNDRglegd7P7TWWH5oJfayliEg7bScQRBVgI4Ookg/F6rvpLWP29swREqA3CaG8/FpKqS8DTAV4TiBqIqtxfzaQRLys5I0XEFIFrPbZRQb+16Fgi2LvJv8EFUPW1gGfQv1T/F/d/HBnccP7rAwnIIyHI4ArgWeGbU4eHy6Tx/EeTZIb5bo/BsMBjmjBE08f/RB0PHYBd9eVRAGY7cHRwiBf8WeCPHY1bgBTa9xKTELzEkQX9CPtl0gJiqsAmCT7I8xbjivh3JGFI+D2nBcSJQJ8agDX+O9iBL7UfG4bzAkcaICrbtYHz1ycSmGmAjJfL3CMgT3tQpmrfB7gxSzC1DnvdhQMieG47u75+kTouKNkM8c/+vq/Q7ZYjO/hhVvRq8F/9gGfhP8aqE9EIdR6LTwJ1h0BItyDqB8iFwuNqASscRnYioxOg9ApvnYA35f8e9Ohbfe8J4rknoFkO0lmA2gmAG0YK0DkB4ieEjiLoMD8wBzom27ANZkzIoU8EMHk/uo1mzeVoEoRWKn8L/62EYAX/lsB7D/LXg74uAMr9oGivJ0CNJCGD6i9DhZdQF+gtOp4S+NODRzsDVbhdgv4BqTMNyIL9SCKwL9/FGPp5oQKxIf8A/UX6r231H7YIqLML0Ae2GtrADOvRQH5b/MPE9dt9BGLNG8jVTAQvIaK5TtvvvWQgDvyXIClUA78S9Nfg7VtIBlO7cbsEYkQDMot+ygQ7QwmOawTHnAM2XUSnJvPIYRYMmYPS+sv3J+cfP3d04JYIXsF/EwMbBKB9Q9AY+BiSwFj9mzrSXmcJhFPVHySTbgHJCPvRQ/z7G/SVUETsg0ZF+i3CRoCjhf7y1A9mOiDD7TwdwEoEXjLwAv+avLE2B7Jnb+OqDpBoAchoQJskxKnss0vu7Q2YhcDv4ySeLOg9GsCKiUIihP7yfW7zbTsBh0TQfN0iAWn9f72Z56/Ax9P7j5OAH/Qvv3/QxKfk0DgDuP+R3USg3bzBC7bO/QT9Eeh9QvDPG7glBQzJwK740lAFFgFk8P88CqDGAa223YckWYhr+c0BPdwetl2ocnsfzePAWcVnnAIp6gDVhDLyfV4nqFEDPxHsbWD3k4BDkN+pARqKMLYBPzYEvxp9xmCHQQdgWH/9EtH2TIFpu3AH/cdGydv1j0TQbRrq+D/mLcX3ZACZ15bF378CG0My6Kq/zoGOQwhASDFwFbxyNGBuSxbCEhQ/uEPe/6gAERWQObCVVfjPpQX+rexxYhYFxIkgpgX7Y/vPs+Pvxf9vwt8kAs7i32t3QCP+3SPaTwIytQXP38u0PESm+YER+o9B3vr8mETAUfDrEkPI80ck0FZ0dXh9U+HRbhey0cAc2H7A4y4egoD6y8JfkBiigLdFP8v2W00E8deT2IeAKujZ/QAVKpAtKI20gLWksHedfgPcb+0+NEHefd9vB9rayi8h7J91gBbaw20MsnWAF5xHkyDUCOoXp+yrOwwxcKj0aL6fFppaaKDv6OpHR5sgx5BAlK/+fYhuP1D196o8e7lFBaKqv5YIMnFQpd0FGVR35RJCnCDaABaXBtgbiSwtICMtalKC+1JQ6bx/PLcDPQL91QFodQNKpwOgF/9eqcBxBBqRcKAAVk+ArQOMx1RYGgB6naDhlK+uQQwJYx4meQbxtNnYQwMjt/d4f3M9ZE4UOld1LAh99fbfzOxiEkKFCkTJIUIMUeVnJ/9sDt8/e1NEJOi9oVHDGYhgnSLss9DX2IAqw1zALUncKcDr0FB5NP+0cBQNrEezDiyiADPkt9qGpwoPdL0AGPx/NOKeyf3b9WJNdfcFv6bKd2cLMJVfJ6Y3B6wB9WFUfWWEwKMfGiQL+3bz9XGQz2EHKhF41GCtZyDi/gUCsNhYoAr3UNJ58YidHKqnMb/6AB5J4N73/4L+t7mAkeeP3P+1LNSB/l0SkMEd8DcEuUlguEw6t2AU/PCE/q++Akw6QFf1u6SBrj1ZnnhG50AfkoGIdf7gJv1KcSfgzWWkQ9U33Z3tHXYASKJ9e/YhU90rvD+q9Ej69/wxYJVs506Eg/r3DkMDzEdDBRGgcZay49XihLA30P+l8N+hf1f57/0AoxbQbwYaan/rBMirE9Dk+sBzTkC8JNDEUlv5McB8PP19Y01Gayep+hC/2zvQ/2HGLAurowsNGlA1cnqGGzeH5weiYLZm7h3QQC4O2tXdhvMMk1ZS5ebpgI8eMrPvPGkwaxayk8Yc6PMOBPEdC1XZ+2UfbfOPtxLMQQAG9BcZFoF0gp/RKjxe7+oAw9T7ZPWhgedodgz0gf5KBtrtIZhQAZpAV1Bi36w6t98qVfH7hqGI318lLCjLCUFlxRHwqYEH9a2qb4XjWvDT7kBwfbZA5P0+PNuRuW1yf4yNQH3zzwv6b70QOJ0G9OT/dhoYRUGT15uQH/71MjQLtQlxfDuiCXrtM+SkA+icQdH6sU/xz7Ze7FlubV4TpoTQ2osdpaEjtqADmEU7OkBEFoLeC3IWFFeswJXKXzkboNL+wzcFHU8hTGKIboO7CLi1/P+5F+gydQhuvRbwEgxvtACmANikhLTbj0gCYk8KdlYgmj+4Ymaod7TwahwadICuX0Cm2fE5iNHPK0x/CDV66Kyg1MnqjNFBnhBoLQCgUULfaVe5nq/6EQWY67bXCszUb+7232fVPz51iGB12owK9peyP1T4raMFF/OEYJP792mgXYfZ04GHMAhBkCSmSj+dKqRPgVFGHbpLEGMiGFeQWfSgrY52VxaeDUPSNJI0P7NoisG729HHl78z6hxfs9rV3m4JjgM/lsui2qmThjCfDFSb+I9vwUqG5wwL55U7C+6ot8B+7N2o6r3q37T9trfpjgmTvv7PSQATLLeRAOZhIJHBQfDQQJPBdUwEbVW3+L08EcEE/9G4ANrCeWcnPKRHDupbNynMx5AA9IRYLmrc/YLSiD5EaEBS/s/TgnU9ILcH19n+CpHwegLejx7Mn/d25fdN+e9U/1vgb7bqf08MOtf8EXxaoh+GY8L6gDfhvs4i6HQ7seYI2sv1GchdMsBIG3xlvxcCRzdgCPTn+6q/TW00VE8Q9FaFv+R2VlOM1vm/hhjhDCdgNflVKME5B47I9xT8z0YgPAJ8myb/LqHy36j/Mwqw9AALxuO1JVjiuQAYLcFzIhiEPe05fk8tRjGw7yWQbsfuLAT2VqOId1osnr0F49VM8INACPHDoBz4B5mqqSnUgyh3ArjXxfQH5BbgUS8gP7aU+w0zHD9GGD0CGHf+P1p/DeivlhU4BbxR9a2kYFR58YaDZCUR2P0DMmgED2eg77puegy6PgDphEB0CwlG/i9d+/Hs34pBEQrBn0W51mqGnJAk3ACCHeiqkQ1XFQA5AlKH7Lk8yJKWY3/nym14h2C3JvxeMwD9ZVMz0BPMi1n1RbKl1cYhIVblF3G0ATsRiCMUvoK9//OgcwYMoe+ZKOLlC6/Xk50br9NFz9fanqA8UIYSpCwlBO4kHc4WLLBfBHVaKwKgLQjmP4Un61Vq+3s7Bsyi0WztmLjJwJwFeE0I2vD/1Q6MVwefxfUf32skCPbCnxQqf+QMPEUDHZ7vGeyj020JgkPXXwsldA7SYR1RE3h94NvNtugswcgxXEkIcBPCGZ1rmrgDC0A4K88nm2fn/eTnpQtWyZfybRoK8Dro4zYDIMGsf7saTBzvX0SMbkAD6o9CYbsfMK38cJKD9l2FJt9/VGs0h5Gib33pxMKWNsigFUh3G2un+/N1WUglI/EEx8fq27vUNnwsiOoKecL7kQS8VnWAGCFUgn6dBtQhv40CmIYggwK0uwDHRGAuBXVdfwzHUjZzATLMAoyJ4FmBhzaWBlrHld9CCWpPHRqofBqMReMGTJ78q9rDes1Tv7/0m0v0AFHXNR6P6g30SHivin7V1BOhh3iWPwvps/yE836L2XiwnUT8x2iHgfqhnwn667QHEE8oLQjEvtEW7GYBZDrDVkwNIO4G5GiBDf9fGoFM6n+vbEtzXwP6u9AduaWnGYSLAlVdl/AU+ikrSeEIKgwdaZ4AACAASURBVKj4/wtgHcHtdO2nWKcBkPfxcvnNQvsj2Me9f02r76T8q0IBn9OLKfz1HX8yVXQYGoAB/2UeBQ5/5kCL6+H/OGGoRnLSwdd3oH8r7KkGTbgIxEwVWvnF8KOpHnyzfF9Jod5Px+IF1h8owyitDw/XEgRb5bPqbt1uvn7qBIQ16vtS/u+DP3cR7CH0WWJgd5mTJKYgNzoGjQrfvu99NDBC+bnyW1x/qhTatv2OaMKgJWPvv5kwnMgxHYGFRtJW8VMl3uP+MgoqSZyWFKr7+KIDw1d6+IiOgZI4+d5iYL3imzbgyO+tph9t2oSBxOM3ugHtPoFZ1LM0hF4kXNEBssvVgPdjdXZWK7uKvyS3q1Xb1WQwtVDqSUggq+Vw3t56JA2cz7PXOwGNW1ecwxPhfe3QEUsDsFaAz8jg0nf+iZMAHNg/XSazDuC18Iq1HBRrOsAQ8NLB+16g614jmuSgs3bROxE55D+WDDQNA4ivdMJ9M1b309UqknaDU8ObV9/PwmMPATvTMAxpABLBzugUtV9bLdhNDQA+7B9tQJ06/7QNDHGSwtgZOCIA47InIoDdROQGtt0U1HI3GaoUnCnC/rzBMQJteN17+VaAzYNA7e+PFqHQUyXPUYB7iQYa5ZFjq1Zqpx8Uqu/XT7+6BWC1Xaj0GlBIwMoHu7UzcI/6/Acb8KIq+hzmGWmAYnADrIpvKP7TZeLaf0LAeQkGgebbq9FToI44p654F47tekKkI0L5PQNZPsDwPBpy/ni+wKMN76Vav4+2cFZFf8+JwAraMt0DFB7beA/u4Zz/a+RXx0M/ct4/jwaNAS8G17eSwmta0Fhx0VRxJkHMivso+onMXr+YwdWKbgioy1jp4x4AzIKg5lEA7wvHEYCRmdx11TAuT6lDLVl4KvXkAET9P4RT8H2u+lg9EPQIpw+/NpJ7RwE8HaDv/Mu4f3OdNkq/EfAiEiOANjEALvcWL9gfFV4NZbgbQc6qPky4Pm35QZxtH1f4j+P/jXuaYPcWwIEH/fmEPBoAO4m4LGxV3txOQqDU+dXgey+UwSzuqP++uImO/u/6ogCb7wTc1n61sL+vZi87rxnrNas+giTg6QLzaUCjIp6JfhwtGI7AjBBB9JjDY4ePYVR6ZPgN4owVv6Q2N5hhVHwNeYrM+w6dN6K1sMHZm/Ce7bHe3dzKr1xw1w4JrSQMZtgnoQHlr18fzunAszD4qurNUg/TDqzx/lfCaO6t4tACMUQ6P6htWjDPC1hCoZ8kpODzJ70MUR9AODcgwyqyPhmE+wfHYB/hvSqt6qeXUShhXH+d9SR8DzrDaZZdpSp/HxqLMQuATgDU/qDPRgOIeT8cvz/h/XC6BtE7ACLOWPE0KIS4UUjmZaJ2grBphiWgT41BUVWZfP3AnEIT6OrfoF122l2rMycBoU5i/OXoUZ4/aglsXwLzHNU++FVF3qikOj5HXm2PBitT1WuvJRAB+6O//W0/PY8vQH5IrAsMs/WuVmAdHBrQgrbOxJShXwRSsu08h8JMBpo0+aDTALwV4tbswgzHrftG/dJKIAQb5h9KCssWIMeto+GYqG12/HWGjx8kzqNJaa0noMWOr2KwW01AMwJoNvhMQda2/RKQP/3ecABM3g9uD6BY68Ntz9+nDOMb5iV+hIE+dP/Zs/wwJhJ9mgBnohBuStABUXjugF3hkXF9ZZJAjefKdHZCc389LoStKvIl7QIEb1d9RyciQgFDI9Cjyccc/23Aam7/PZJBhgDgin5CtQvbCzX8ip9YgIFtOAt+w0owp/hOiCWgEGbVHuYjRigPGR/YOnEoqPDoV5z5YqB3mRq2ox5ICmSSgAP1Ne+XV2NE+/vuFbCTRADxtS70VRBCjgBk2OyDUQiUgfl77b7DwaHm2rAZ7osRSOOUoHgKfNBSLI767+oDYrfwZvqChSpGfj3pFwZFsCJg2jeIQQBUiyI4WgD68ww4qO8khuWkkIuDrxWv2nv+UTBpJYiPd0KemTA8qqFiuUF1jWS3BoG6pADJq751JqBI0wvAVPyMQvjcX1zbELltKK+zBiXRFiRxG+b7q3M9xuLdzR8g0gCGNzSM5gNYfqGO9CBT8OHct6oB3KsSDBisUnwsFuISQaRHxDSv0vptt2oeLHMERfRn/FG/Cx01EpgIQG8LP+/i37PKw53xn6sYCM4/JwSRrCnIeB1ZkLsawDhaPKv/njU3wnZ/dBdGE8+YTHSG8+ofGgIjsC19YnwdM/KAnTSsqj6ig7uGgIPw3nYFzhhIIvriAxFP9CQd4HSlnzgxONIdrE7A8ZDPx9fjib8ifgegNIliRgdx95+E1T7+3nQVNNhEzDgGA3T2rEDLduwtPpuuouPcs8swwXFjdTaMKt+jA5gUAQPcf95KJQxYU0cYxEDvsBSmYuukp7AwnqniC9Afa5z8vboI68ImT0t26CvwBzSggkj447r9IojvCn7U92J/Hw0QSdwZKNNjxPCfSxRqnATkdwpOwh88oc4J8KTSm/wdbZjrc+4iFP8YO0/5JJDCfaijK5xVXevqfg6zGRrQf83chvX4aRfAE//6vv5+6490U4ADdO7QgM/5bcHP/n4OtCQhBEFeDWSvos8DPq8/IwzLzjpa8/U6MMSkBklDm8e0mn3QIY7XG1Om8wzN48y7HwhOK3P0/ZwUQHHv4psbdoVeb9VlAjChBCdtDDpOKTh9ZfcagOYq31RFjN4/gwBYzp8lAwYNwBELhZoxECeZxMlAzWGdCRV0fQWGHo8+8Kx+AAxnCIzowAxy9KvNepWfsfp4RR9kUrD88CPVTuXRybhqqTHcnxEGndsgub1Gdug8yz9fHt3Hpl57x/mfCOC29FOSQ7/noAZR5W3Ob24UMpuPYAYiQrQgk1gnFoUIKr4vKFpV15pHUJO3Y5rfH3UFHU4bGkU+NKJ9f2hJyOMxDBDpjAgwiYqvk5TqNl9EH2Arb6fA3yaA4cBtPWewhkEcIQJBlGzYp6zRmr1v+e3Fv27xpzvyI44NGDkCIi7CGNV9Dw0M8NtHC2vUwHINumCGNG8erxOwtQINsW88Tlwdoc+F85nI559ngEDpt2F/Uu3hiXYrkN/pBFS26hYDAkFgErMK67y9mGBA3L5ore5izf8b3n805MOq/t7XU4WHv1DUF/5gugCSOAIW/59uMwl6CHWAib8bvfxWl9/rBGEMTTwDfG+ezEYG4yk6FvRPuPwE+wvc39IRjENWM+/cm5b0W4Pf4WuKUnw/vD6eDbB1ETs5vl77Dhnm/51g6wPWwQAqxnivgQaeS3gy/u/1H4hpTPrIgHAN0mSgXUX13YP5PMIuQAfBr/f70cdeE+QoCX3i8nFMLcAjInBoAIYqt1LhC1WdtvmSab28AYffaeivCB+ohdYQgfUa/WS4ToMsNLHLc9nnvPZLwn1/EefPVf+U/xvnCVSEQEkEQEnEQJO7S7RvYDxNeNYKrG7DKMhtsQ8cMmhgPKKKj+F7CiHYFR5KIIPxOmg5IVAtu3ACQSPh7CzUQOgAej5CWEkIe3vgxz0ROGO//qYfz/dnLT+ZxDr4QW0eNCJBorCFOVC312Ec2TiY5Bk0cAaQmiA1VH1MOwDHQ0kHdEDDf+2UTWhS4Z8diQMicLx8MLBfverLcP/jQzF0P8EJj5+NGK9RCz755S6F/f1+X/gxeP+Wsedv+vF8/54aSPJYFjIQd624MDz/UDLQnr8HU3ztKHRf8Qeno1vyAQJBaLcMtTV3cvgP56COCqd/QP9xLgBkH4BxO13n4hNUDtACC6G1S3zqooZ6Ba4lp/zcAFb7iERKQwQcF39IFJjdXECGADw0IE4gg674pYAnk4HoHPx54tD5daO5vxrugSkMjgiiqc7TVKAT6AT8R4ckbHEQCYR/IZBxJgA+XZjsR7vaoRpIxWqeqfXuGC2CxwudicwePEB1kNkaZCuwyF0DuKv/4sz9mzP/Qxdg3BDkBTMC8Q+loD6UGBzx0Kz6eAX/KArOQTlPHFoI4vVtf4rNuLrca9edRn4xBP7k8w+9AgZCgBfEUZWfEs8iFNZ3UO7TqmkjCO/rWdgco/yIqHcQWaC2EGTzgz5y/iXQAvyx3riyxxV/JeBriaGB9OrTA5g9/eokM+37GszqfA/UZk9iW5UnCtBqBl3XoNN6Ag/+zy6A5evPAp+TIFDn15gQw9rjrOzFX0s2JBVAxa/nP1a6AsNWYGjPNGPLTQgBsNUFvOA3Ht9o/rGDN0tWOCcxJGp+f7++kkP7PxcGv1+GjkaLt/fawpwwerQxBJNW4b+PJsYEgiAYYdEAGIlDNaAbRkIgK3ut0jKByp+8yz23X6GttmBmjwDvChgiYLP5V/zhH6/110sGcKo5CkggCngxnIPoPja0j2B+1BRkiYJiviaLJqghDI63G2nAgAxMCuDdnoD0wIQm+urMB3VuAwbBrFGgGgnhAFqg9+ujKsLxB3qGCQNEEtPinIQlAj4WgIw7/iXc9V/x/yUWFs2KH504bAh4aYWf4TrTLGTy9YbftyLeVOWNfYNyt/ji29mQnqMAltU3ioTtbX343yv/1u0YPUBz6zB702tQucnX0gWaFh6DgPdmhXaapGotw0SFz1qDiTMdd8h45HfcqCPRUhA3+NmKz1l9teCPaMd4urGaewRitNBDdahR5c3AfQmDCFT9vmtQEwqAYXX4XI2n23Z9B/Yb1FL+LWox6wHGbZSo6FR1LzyG+3hriSZvWT6jfXhl2cmQZJDrAbuYAqAHo1GA/EOgD8eGcU7A8eDvH4fQBuAhBL/Zp/vamPTrRENDGLTV/7E1WEPLDlP/PwzU4YhusIMUgfIPAr6Dhv5R4y2r8ldFwiFoYHnmr8TAHbhRQSZOctH598ZYhqt6wP7q/ouqe77RJxvzFYaji/z4vna4v5cUMDXqDAJ5ytktqtBDckyjvJg04hl16LB0xFfyMfD77PZjErGQRRjYIfSvoAXntks0ok8MsUC4KARWnYPlJBeIgLeFrUgDOHYCag0/XNAbWgRwQuLAsaQwIhC1g7+jCNKuT38JfnYSyTi+QQEwwHeT4/dWHYxJPxfOj5oAnRQqgU3YgGZSOaDyK3n/qkDYBKptzR3oD6B4fyRKjp2AzSl80YR/3P+/1vBjX18Jbu+YsrMRgbqPP8zrDLTAaupphfeZtyPs9BPztpLSBZjowF3woYRwBwOWaqbev15b7X4RWsiqYiY6ZkFEIoUwUA2OrkeEQE8HYNyD/rl3m88jCGgO/nPW3xy8x4Q/HBcM1dYg5q8N+B/SBSYhtD0EY1PRGLDoKIBHF3yLz4H/gSYQJRETgqeB2d4vC8L2NVnQn4PoVJJAcP0inahAfdXVI8CFszjRagCTtRdV7Sr895NBpRKXIT64RMFw/iw5eChhEvmmyUIH+k+Qu3cLzOAN6ILlFvgWnx3YWFDz0f38ze9GlfP6UQ3ojEY0gtqRIEbA5/WgQFhsEuIeL75uTzvqHktAWfj/OD6sQXssROcGiRgFn0QVkld7OznMDT7CJKzhMIqxW9B+LCOQdH4uyxIcE49VTSeLj0wKjzcp2oDXQA8YoDEGBLMW0BJw+eAxXejPV/IXd59/tp5rVyYXDw5BlRetSpQAcvgfOwVM8ObzBq/AQ2wX4lwkQV3vNhYFfn2LFgaoDU1ogqsfqGkJYmrj9Tr22KQwBLzbLuzDeA9yzyJjVRfwegWq0H+FThDPA6ZhZwX2M2Kh4waovCzAWJTzD/qY00c+6PM8coz08VNqglzx54LfHuTJK7z2rwX35ABLg1DzsZ7Qv7l/f2yXDlbf4C/irg0MJ0aCuD0wP74MrxfdFlX7tq+vtRdCpvt599EG9Yz3V+P+Oj/n4zLruZHcJ7oMt/MNp9eD6HEeFb6/TMfbWo85Pb79HJo8t3371/PuIAZqMvjPC34nVV6ZB4hEuA7AzA5cfU0y2n6ux89D/35/n2/vWY5Bf0qwf3tPLISO1Tap9qzFB6eap/beqI94NCCbGwgqOItY3CGl446CaQ8i2Q9g0AvmgJOnBoAA0gu17tsKtKS7D4udgCYERy2QIceCX/P7mBW+g/7D9S6Mn50CS0eAoQPDcBjopIA5+EcxEjLweRjXq0UbLIjcBxsGx2IZvlf0ATjz/6qypAmY7bhrk4ahsIis6ccXKHdueAfUgk+RWPCLh42c6zEeKyJpRTdRAOqBbl/Wq/uT+q+Fx3FoTIuCzc6+hN8j4veGjuAnhSE5gKnco3A3XwYlq2sq+lmP4yEOpqEoG0M+mGDYuYT0pKCFHgLHKt3T7T9p8GcWH+n1UwGa8X6kQt2x4CeqPexegT6o/Z4Cr313PHdgrsS2ZReLfpKIf+IMFnmVmwxQ9AhithYT73+p2s+JIVfrjwiHnpAZrSsr9CMstQXP1+1+510N/q8E/YoekMN9OMFvi5LvkRDsy9rgFCOoPdpgaQIWBZjf5KCSQszZJ1ivTvLokpen6tsJAVND0NFqb6GUGg2Im4Dyx9Pn7/0dm4pADAslJzTv+dKNrAPQ0wyySm7bj1RQgbAXsRa4R+mBJzpaQmHLmy0BLoL+Nh2ZRca8uUc6P37k97n451fvTieAE8BdZ2ItqFEK6oOJIYPsiU4woo140Oh+H/UC++gatHYcOFT+2y3AYvD1rM/fpxdUcsAi70c0OxAEP45X/hymE9XeoC0zfYhbcqfbhs09HpwnKMDR6g0mmYyKth/UcLl9ITGQ8N1S6s+gA1HvQCc2pluPvN2Br8SyZyfyxPP/VhCi1L1HWX2CQCuAE8TIq/sBYdANZmTIwqq0sb0HIzhhugBeUpBZLFyA8y+EErsBUYDZHYN9QAAooQwOws+uQlhdESSSqk5Qsh8LSYI6LDS1AbmOvLlRBqQIeITvM36+TP63VfE5hFClCTr9zEyVFwS3STQBy66DMHB+PJWIrfgGnYBx2dTboPa2X49GaBVlePA7CFx4iaGi4ns0aLVjMGvtPTDtmO4XEE8E5Kb/8qYai+NHl60LgAICcUCoJPVeiYG6Pxw/X9VFNVbFn9FNPzXoIRDTyzcpREYB5Fm1EQQn3KRi9wKApR8Tz48SwxnV3qM0q7ZhpdKvr0zfY+gO4oQf+EGPFYW/Xf5hwWsUgxiBbShGoGIx+D2eH1h2EeR3UQMH4zMaUKr4033nzkSkfQADelFbLOQCalxdxvN8mInhPas9bxtGJw29Fx3Y8429MAS0fL33Oeo7qFZeiToCC3B/VSNYuU0fgDnkhxGgMFdxiYEY7MYel+OHPH30IMeVFK1C79l+QdXVpFqHlMAXEf3EYDyfkkGdNvJ8f3RAXU0jpgM7jMNA5yCrtfzOicKG/M9bgEkEjqqPPDEcDfqVwGZv6zcO9avDfOhf4OmLFd9OLBHHdxp51HvOBlnAoQksYjASA1xnIhPsapTCPjbsGB2YevpPpgM73EYeSYIftgPgte6CWesVBB9QEgfnWYMgoeC8ql69bWoRIqYHvSIv/u26bj/jdqZ9KSGk74JRo6QS9PuTiSHm6Z62kLUGH0UO4rwWrhtRETkR4iKRdI8giJ2D2nUCMjsA0TXiVDb98NAf/rCMlajA9wesWHZrAe1dlwRyVI2jx4KkyUHSx7YDe6YD4tOC6XW01puEdAJwaEJzf1uATHi6ZlSCpBQscsh6C1xRcWEG4bCFeKcAVhVlDu54JQIkTT21hptIT/Afk0kMcS9BKfjBJozcDXCrtgbWXxbMAw3INQIxtQJPAGwXmYaBbYh4SCsuKwLOAQ5awKskCMmRg8P3xwlBfbosQaDqyZqBkyQe1CLQACoTgN4qbyHsPwkTiF2pYaj6MAXBmUosQHnUEYCsBL3MW39SNKMJ5PfoBsT33DVJCEbFnBCMOkHfvj6Xq8uw+dgRIhGgAiUqf5QgKDFyhe8nnYrlqn9sG1GoAfirubygX4H+8IM1CmQrMFAJ5ExzKIp54nPoVU2Auh6eBShDlTV4u5c4HE/fVvjFrsII0Ik6QX+Iq68jB19ziLoKC27FYe0gC+j1RSS+BgB7AvAM3m8HLdy5fV60C8RMVuhD1ieQB32MCCq0QPJuvuw5IHF/geMKwOPdpmsxBwVEfGEOgeincJqNmuSFIPhPq/xM81CWIIi+gCFBqDX3QPYd2OcCRo6GZBoA3AM+00aesAOQ7/2Pe/vBCXoguD4OBD1WfPwClzcui12AuH+gC0gEwW72KfjBCQRBr05D0IQc7N8PzOCMehPWK384MPVDJQim7yDdoiRTItzzFV/ZOX9sYFetP0fsQzb6O7wOoFjxk89YoQXv+BmSN+yYHYO+BsDRAXHhuJXsEFbdIEGZQWUkNVNzGA9NZUVBIQL7jASR0AclE4Pb7JN3BO72mG92+o8UG3nybj+mASh0FsLKn9GPxDrEcS2Au35BzHO1BksriIJdpqWjKR1wlpR4fN977rZqI+XbYjYDgVDpcYQalOYKMiuQbB3G6Pu/HlMbi9a0EMkksXtjvvXTfgMKAEZRN/i/O7yD8Da2S2Bdh3ICWfp8yuMkYl5a4df4vVWt4UF0yyqEnaT6swYyWB8/j111Y1ERS9oB0SLMtBGDEBD1PEHwtdjUEAHnqmoHU4wCDAoAS+lHwtu9eQLUAgmxVvAuMB9cELMV3m8EUtcBYYI9nkNIEEJYrQeUHfnzzRyC39j8CgSkir/E0P2odnAmAqDnDIhqrtV9BDNS2POjv/0pwKr6z1h/PMz3uf9ykFYq9TtoAXSwpz0HljdvBCVAPY6t7osv6gFhMpkX13rcfXQMIpuTsfTibkfOPRAC2meLRipI4mDPwMD5x+v3+Ey+qEfACwoUEkKQSMZxYJDz9R68PyP43yvo2aYf881rNQbZgRU/jp80QnW/hdXqJxMvCFxXQSNHpE8QiF4XI+wFfQcw7VL2Md7RRajsKgh2D+6SLAKPF356+/7yXYBTUgFy/38StUjFHweD+iiHh8/LV/i/TSvGk4L5x7F6AsIKbgb4C0YjgdGRIToGUx7cgS3JKP8pRcgak95BJGQbjaJdBYQ1qHYnYHL8F45QgHx2gLMQ2cDxBD/4SeR0LSDi5XzPQNjM4ySE/HGG6g+ugltLNSARn281BPtNO72eJLjdX4ITSEgpQvJYFEUg24f1qAYQNQdxx6Q/RcB85j9f+03zf2QV33IDPHegNgPABTfqFR8cZK9TA7/ll0EQbUUHW8Gr1d+MSadia+LRHwhunv87yWoJ3h/pRDwJAbDNQQFd2P2mH4kP/wDT/ZeN3CK3+ZjvgVpw4r20AMafb58j4N1UMknuj6iCx883PU9g2VHVH5JX2eEcPghSgRBCKPzK0Q3fknwPN0Hk0CyC0zBkz//7duEetgFjVtypASDI4CsknYJgYDhqsBxxy29+eyxrAZX75EEf8f+CkOcijMDDHx4ASYGGu8WHgPwpHJc0qOG8FgFTuVk0cRZVePFwHEIUEu8xSHoL5qWg4I7/HgOKXe2dcnu2SSdCGIDTA+AcxY1zYL6Q6AAFu+/1GvjKPSeEoJV3NiM4Dz9C6oWkEav+NWjPWXNOIkKgNTi2I8LeBgaZHJxqrC4oNXoB9pzzMws/OW3ghSyQJgjbygOVEDhoj4nHLld8HPD6UUMFVLIgKrTL7cFoBRLQgEdXIseZ2/HhFPKbk4d5tYWwwR0nIFQSD2P5gQhs6meVfB+Bkyz2fOIvX/zxqsSODuAGIOLtPNnmIPCrv6Kqvgz3q4tCwNl9lWYfnsdHj2HTgQw5IBHwULmfSu1jEV3gDFSxTBmqSEVqiYK2IkWcRiAkwV/cyW9YhqHXDw9dkNQAcO6HFNJT7oChfrPUYc3KY17zAd+evAwF2w5SCKLV4EuCEKsKfjBVWHu9Q9Arh4CoBqEMWYBsNX7YgKP/69uC3M7/mOOz232QT+ox4iCyJGEFP4oBHd+GVvXBwX35nqp7qeIbV6L6tdZub3ueJ+gBIKgC6S5gOQFxDoGr+Bv2nzqbknd7ph/EmXzO0o+kZdc/wqvQkAOUffVMzKtYgx5Vob1/+HAfCdzHSiXHenX35/2JTr3KZ9Ruj2lYiMhLIFoNyMq9hFroeYMTE0bSLbhb4l3YlFPa6hMd2jk8dmrDgdQCnC4/+ANFlYTB6ATlx2GDGXP1rvL+SnWHw+cJes5/rRWt4H2pw9GklD4uSMpwasIQiaYR92gIyFX5S8dtRZt/nCAH48VXW3hRE/HKOsGquj8EM85Q9cfeAV4XwNGAlmIFIwPYrfLKuxV476RRetzcdeAsRSZhiHizCKEIOHn3EMOWy5X4uIJnXX6sFiBFLaBm/THOQAkVJK9j6TKwiSDTBWpwHkSPQJX7U959uAkoaTUuug6oQCBz1Zlxm0OJSIoIw04M+7zCGuYiznCfHww9AN6Ir+HXA7lfn2oBSJ2FOOh8SzINfmcAyITq8JX/sOMPx6A9LeYtVfwgCBZhdu25OB9/XmWWNPUEPD5dUuJ68wd1AqD2+w1PI9KxE9BW5t3z/igdYGWiL7L+wPv9jgVY8f0ZcbCKCuLAHN+c5wa69Zpr0J9t2KnpAGzyiAIPiFalJ8/xXrrA6Y+/8NoDnWCPNwFJzf5DpVkHte8hx76P+HU1+HEytEeSEIzAsu5r6wPJGu6oLz8VrKofXLce+ywIHhNa/Dmw8LrptWXZ4NKZm4pr/QQ7Qk8ehMrPtAF7PQCD309QgRgRZMKgAbFREAfBBXNalbHA9cEHMo4IgIUuPjjBWEUFEQpYTkhVO43eRiynJw9Jjj8TOUIlJExK+0wA4gWgQvcFBHAc7P4/u78/Ff4CC5ATB3P3oUwFClYgcALcxzp/B9Ez4DUV8RjBbsCBrMH4dLNwIDaCGhA6o3pXksdBvYBsktrXDgNJKAFy1Z+ZGIy5NXgXoBT8a3ZgVSPIUAMV6DjLxhsV8wX4n4ibbONObHNyCr8Z4FinNFjg8ziiF5zSV8A99u7Zdf5OisvVaAAAG3VJREFU/kIPAJLWX3hUIFD6o7MD4WkHIMXBk4IftSrPNBJVk0OoC7ice8HGS8XBKDoz/YFBLaQi392lGpCMJfhD9xVkx5Xbj73P9V4m1j0v73x9FjDDPlYvATkgFAVWcdNvJBamliOjAwRV0EpeRymAe717kMYRyy/j5FwFBX0fP7Dyx8gq8wn2ZXi8GfGYR+lFcGJSxa3Y84WgzBHetlU4cvKY44Ps4iP9fsgsPGEhQTAcHqwwGCj61SoPexKwasXFqtxq8qhD9SixoBBYcJEDNzmIoi3J7QkoJActVHocTVpPBCDhElAvMDK1PT/Sq3DwB/ygmyB9GNhYDH4so4Foy48kkPtZfZEv1PQTxYpyX0EI3Bu+/5krcN8fgwVdwWu2JNVNWAk+PcOOPMNdGFyAZ5Aj6gicgzNfwuHZg0HrLxBWfjSRl88fVCo/apX/IBrIvf65ZxtEoK9Bec4KZIPLe76osQns46NwW0pUPCPAyMc4A/KXOwZzFLGbAqD5xhhbgBcWfoJBAlarcCSQgdQJ+Movnih4gjZQTw51rz588y/ZgxVUEAQ8soCfX8OR26JwujCLGFAMsOjnwGrlPuQw9D/PPv8BYVR7pG/eeFtQpsLzR2KFI8SwKj9KlX++HeLOPuSBKrKeHBi7L4b+Kx184+ptAp4Trcscv69oARVYzWgaK01H1X0K3zNSmARKtxXYHvwJuT+8gLGGWgpHcWOmBeljFB2Ckg6wiAYOqfxEK3GMCAj6kIiTWdCBCXhkjUKMgJcLk271N9uLSbtvvK0S69OXAvoA5z94VsFubbmZvx4QAnXgBnJxENyQjy38wef81uPhxMpPJIQzr5ckuUTKe0wZyN57iFTWga8GvCwlh5UqvYgmaNV9XSxEVWs40kkosFwA70RgNOu8mLZfR6wDiwRa35y7j08NksqPQhcfkRBK/J8R75Iz+9C8gJpqzwiIeZII3QnYOkJWbVEI5jNuA+o2BwK82ifwnpSgHwaC+GNAdmW2VXfC+vPu6wR6lBj84C9WfvivZyUhZMJlJhjSukDlFJ3g4AvGJfC1iEpQJ/CaEd7G9wds7p71+odruKrHip/C7RdsxeVjzIxhoNkFGOW/+sk/YVAGtltfzZAIfzix8gcHhZCXpcGN2u69qWqD9OlRFAy7x2fQBhHUiETB+DocqvArYt98f+AEAXApsEmEcNLC0t2uPHCqPQIXwHYDfI4/9+8LMpchqr5HK39MJSrBXwnutNqjovjHFdq+fcHLp7YLR4mGgduW5hFpAXUoL4cTTuW5HJSkB5PC0S7A+8c+837DyoM1J9iv/po/o3BunlDqPjOSO/YbLFd+FGy9sxKFeT8b+nLNPrkAyD53FtT27yUS32yqUaEGTMBiASGcZ0FmK8nWxbvjC1q6WQC4VdWdAcBY8eFoAzIrC0b7Wt8wlPcIdE1FhUWeKU1Igv8Q/0dl4k/NnYSxdlDon8diUDeuQB4c8XVzcahRgyyZmNC+LAgeCfSVALde8/t1DCYawNoePGT83wlOpFUdOZKwxn89OsMEf0X8CxJCBN/dwKbFwkSMgx0ACJJDJD4iC1JEYh6XcEqVHpx4+J4I4UiAl26r5x64sttvSlAn3LBuQCz6edU8C+J5epBrC4YP52EFDgHrCw1B0eU9bOaTgh3wmYvQV3Oqqcf53XnVNXUBELX1xtSgFrirlII5d3HFulxBCNEfZx0h7K2f34XwdHpuYQcguN189Ow/nPXclaUcqMH5leCXjKOjbv3F0a7i2ZaRHmBe5zwnhA9S736ZC8AH8LHkg/T5znYgmES1dtuzGo92qwHIquiWX+4KgVLd8utv9Ml1BQNhEJW/FOgweiTguCUoQHkEwYhjfQIgm8eAzPKzHqAG5xGiiPyxeGRRaYetUpDVpHVC1T9bHGyaknb/TQTnuG7rDYwYCUT7/cMjtILzA+Go/FPw581F/mWeTkDuBsBCAK8ki+A29nMzPn4Rzjv6QV7xWW4fzQFUxb9jQQ1qc28kMi4mDl1NBr4usIsz5ltZqNm7AeJXfuTHd7nioLEyPBISU+8/tP1AC4Il/n+YGmjg2NiBRdl6yCw//zG5ph7bqaBuz8B4VMU/TqSsNPbwCeZA1cdxyG9SgKzRZPL+GXFOiH1/SFZ9wX8M3zUgvH8a4rMBjZj/h1W9MrwTiN6MlsCKiI4gycBzgV/xUaQGjGDHwHiYi0VIzeEAasCpNuL76AC7BIEl7i4AIxnAfoMxk35eJbZ68wWEUChs8IPz/EEE9BkUoNA4RCWSLJkY1h0Y/dG9bVCtUVPe7QRhtStXG4nOECDfUxc4Uw/Ik8JkA9o9+a83IrfHH11EdFUWc4phNgVFWkPsIHBnCvCCYBSgqEN9qtoXuwHhByYoJJA7BxIkkRwpDGgAHo+vQ3ZGOwCFJCJKUAx4MBpFZWvReeLgtBBkDDQu2OJxXa7SE/P4ZiUPHABjY1DsFIhPAaygWewiXK72hHjow/k8gCL6gKES8qcDZ7A+EhYlWCPGCX1wXIwzkQEKt8cP6iqkC0FEhFj/ZYtvXCtwuBLcDT5wXN+9H6ZEIkTwV/x/s78fXFX3siWHEKrC3tw7EFZ31Ll7ttknQyEMGgAqCaVe1bGk8r8nFWCQQR0h7CY0dsU/mIeIuA1AGCo02Q0YVXxub36sG1Qgfo0CBBUXxap+ECFEycQVyViBEBFPt14TK9rZHB9EwMG7DPXOv0OVHkdtx7OSCXfb3av4CFZGTwQBwT7/hKPHE4PzpJ4L4+FM9r1n8B+B+9R9I4Fu9brYUZgCunZWNxdQgIs8mASBQ4F8hJpEiaf4GPihk8FdAxin/kybjZjTj+mAQy6ihZ9whDvHAWB6BKrBXQr+5SBfqPaINwiz12UIwoTmbPACZY/fshBBBKNlW8ZCHwH/cVKSOZMm4Mxk4OwE9JeB+EFkn1IzcPQoiSB4vGgNeJSoik1A7m0TCmE/HrggB+/1M12C1Z18ACGoIeH1pH2IhAqFWgBq+kDFEWAvA3X8tpW0cnSD5WAOriOHhnYraF1eLTkS8P/QsHUBdtMPnOrMaANJE9AZiaKWII5Ue/8PTHn/UcCSTgIF2xN4zdmAQYIAKeBFl6FiO0aKfq5jcImHfPwTxcEdRmD3LcFoAva1Hdjm9UgGggI9YOoPkOBYLsT8HlG3nucMDGkOOJ8CkNOELdSO7D5qqAeJYBb2GpABgRi2gxLITgrOQ9C937HgB+0i7MeRx3gfPWCXLtgbLJAu/gCFBPzRX8eADJqCvA3FViC/BlOQC4LZyrBq8BdQAOUKoKjqR7v7EFfVFMojPgEoSlJesNIePyLHwW9NRgq7E6HvUN8A0yj0wyWDHRZ3J2A1jHdMyu3hCGwSDwdRir7h9VP7AKLgPoMCgKziOFLtrUm8aIFHlgxYfz8WBYUU55iAXauo+evJaIK/NTgRJM9sUcZRzcCnMdNKMJc7usnAyrpxHYkTRHK+n1HxS01LheAHqRWwKIDqLvQC0+PupHZgBawfVGsiniTVHwZHRqbUI/D4Cd+ftgyLAR1ehkIiqaKFw7MJEwUIuK5zsu4svoFYCFKgBJZACBuppOId2RDkPZas8H9kULcA9a0KTCQDGtpnzT+RMJiOGseHl4BQ1C29AWUXIIf/OIwwqoNEK3SCuA7FRiBrE9B4/PcrGJ1OQNj83F4Xbol/TgVHfMiIZLAdcaVkgh8sLrd+liNQH/FqsNTfj15m1J0X+ffZuq/gTY7QnvIfJz6UzBJLs83ItQpt3RfZz5iuGfNPajpngUm0R8DoA5jDlzsOTAwZjzsC3Jjxg7H914PjlcskGdghgx9HG4OOQH34uwQyzz61/0qiYNQjXxECuWYbGM/DrjtPH/Mw/K+gBLLSA+cEfPr4MroArzcDuybbr8Zc72i2UnzeHnTgzD4Ug78SzIvCoARVOQxaFFR3TzWnkkHUVFShEuqKxZnKz4p4YYcf8ZhYhuu8wFgSHcuuwCJagI4bgchJQK/qe9c/RT6nGcg6KGREJpb+MI0EY/b0jcsni3AJBeCQNsBOFVYoApcM2Aom4VFgIRdHpeIG8D3YaxBD+qCiQ+rBOSVnci8hzkAG1t/pgHA4uwDzmu8xFKkkkIqCfkIRs204r/hiDgutoAAcowBMZ9+KS0CcXVBOHCvJw2jMQSJyeoeExF2DuTuRcuWAo9sefyUQ6/oBaIjPtiRH1KvQKvygAHb171d+vc4GRMDPoxN/kL5pwlVh1mBQ1quQJAJ5j0TgOAis+h8d3mnC8xTKE34+8sDNjyVXE6nFMN+H39TQDmocHScENvN74LoGScGU4f7g6IG3n3C3qnG6JBS+Z5tHOOzRYQx+u7MZmAl0OSsRLAS/VIKfRAWU92+12aaVPksGDBWQuCMvgNy2M2Mt8EwqbjosZAec5xLEAmXmcFTHiOWARWglpNpjdEtBQRxJJU5VL5/7F1X86XntXgUK4q+KggsUoIIK8oA+kgy4+zLaACqQGTVOX6MBWdehL6BxHn+tlyBMDGAqufd7WOX5WTJwKYDfXJJP2GXDPk7Tj5Ed7BOG7DMFaBRAJgI/+H2Ngeb2SKb0zkoGlQBHkefDr7xMA5HZeJPtKIzyApI9gmnPgf1c3mulfhe0gFekDCdNFnrOwi4Gs6eTACNjB+Uegcgojog4V25P8bctRYY6RL8AJklE9ACFAGZdBEahd4d4CmghFhbzcwaXYH5qTlS6DY+KfNH5Avzjo2JJ0poDkSCMxLn73H/eB+ifvgvyIFCWAji7BWC8hd0qj0FziMdrS70BlVbgamIgcmotGZDNPwm0L9l5iHv7WRoAFx57ScFS2r2iwot8oKu8l+TOCOg2mZ2nFdjTgOFQENzKkJ8OjEnsE8f6AzyXwT6MNF3RDRnuj0Lwo6wTlBMDIyqaz6G+RiLJMg/KUrQV/rh9uH0tWduwoxmky0kSMQ+rnXxZsGadgnxfgk1pCnsIsGYltvfdzTOBIclIsN8MLAGcz5gBwj94AE8DuC9Molip/JGwB57nRyJiyD3pyk6q5ij+3TzRLohcqyqCEQBTepF15+WVmW8SEr5jMUUkx3oMIsrH3ndwAQganKzyMpOJNxMQooGBYwcByw7axIhgPRGEr6GSGJhkAELoQ1YRg+dPeD5IIRDIqq5PA2Jh0Rq0YcS8XBi0ghGRFpCtWTdum5+yLOsQf2EuYY8AfnbQZDgCjHxBSKwTGpt8QCIDVH3/4H5OwEvldhliINwAFLsEyyIfGKV+vm3eEehVqKTdNxtDiPoLHCRiuwTJxCECxMDqDjTvZ63KaPKvRgV2i/F3ohm88V8LN8hgJcXD5pVGIPPNn9EBqSQC0I4AMxBUcQNCkarkFgSn/oCs9GCVep4eUG5BRAOcQOCWlGSc3If0IFqRfURQGRrKewPKEJ9sLnIowKCcw+f48N6UHjqYtgInaCCkBbPSj8VEkCr2g8U43wY1xX/BNkwreQrzg+oaJghOCGTU8RBxuIp6VFOGoEXgEsBLIgV6gBgxoLSI5CgiYNT+GBHsU01GthrceiMUtv9KgAYktgVNeGrBbtiOQVi9x8WjiAW7UNUnm4Vet7WtsFgDCDYEwQ/EVL1PnQf/xCDLTowTh4c4HPRDoQaiwhKIAae4B7xgCBydI/CDPOrevK0FR4p6w3VfoXgQiB3T1N8Y1PCD0X19JqcHGfzB5WkQE4p/kdeXBcEVUXEIFqSij82lMyrWq/7c+LFHA7z5/dwOHHg8s/Y8C2CmhbmALtare+4UWLfb25BmXABKABTniC8gRAP2yvDAiUAsElnrxFzITQa/sAFecAOY7zPV/8jMQHSbWAiUPGkQNABhw85xrSCv+mMSzFR8+7mjw01A8f4F8S/td4jnDHYxpT8/OEyV3gz2+GTfdAeAszswfJNGlQhEIjB0Bls0BKn4Iw7WKu9f1gmSagmvqleEwJwnZwjO7npz1HdCJ1hS/mlBcRXyF3i/M7NxqJFoeH27z7nnJaBmpUZKHsTbGUc1ALEoIGsGYl9ixS50gjAT/VhB8IzvGTrBVfWEz1MzAkRFTtecW731VdjNQPukVdhdn0Y8d/a7WYH6i/TBPBzUFwAlHwtGHOQISrgb1AMUgDETTA3+THAdeRJhg59V/Ektofa9I8wxVICkC7QQSAd2O3cftzPzdMK6aA4iZI4ILfYRbb9RgqICt2AxVnYZ4kkBvHOBxT/zN9ybHx/f5Ql2fkGCX6ANm6F8WCfqAS+Eq5AGcHJd2IFHagTMHAAj+mWBnDXuc81CjhsAi5dL2K8QCYI1aJ/PJtSSxEFXASv7C2I3ZB9/a0j/7nDn/j1pHsz9Jr8fNpxPBUAUUYD4wz5GBlmyAiORjtAIGDFwzSUwqiNZ1d1tPiB7/Q9VeI9KeJU16/knkEeQJEALjY4rkp74fCZiMDSA/PgvT/aT2gYgp5E/P29AKBQAo6TRth5T4VesQFb0i4K7RA2MZpgyFXCEQHCOixuYMPgy2L7+45ezSSKt2oUkURlpXkEMOLSiXPuDQZjk63N5bmzOSxQdLHX7AhwUEA0BAeQPJIQzkAuFlOK/GtyLdiGDKEBdllQ7YouxV2Xdwza9So4Kp5Z0yAgUhTlJgFzSFrznIHYIwKcCu2/L3LsCg6UI1b1/CA+ApIV5/32HqOIjdQusE4azip5Wc1b0q/QGIAlaWEJbXP3r/L+AEipw/+BtkQVY9fIM2i/ZhgVEgJO6DZ1ksVtlYdoQAPhVO0oKmYBmnAYco4DRCRB3TwCziptaE0auER9/VzRqKNOEYINOQg2m1l9GpGNQAhh1v6UmxNQh2M4+LmlUzll0OTjYQOaGlZAEMCrdhmBphaMBwBADrSQQc3//He8KgFETT7p6BHnjj2X9EXsDjrgBS6ihoAmcSQVYmE4JgYWFpp1waAQRoqDzxDhU+HxSnZHz/9JEY6Y5MJA+cwoWrt99+U3Mc/9g/NQTFaigAEtwB1yBzwzucZSX7RZEILhR1d5GDCsBLVUdIQvsldZfEJt5i/MHx2hGJZFkVVyK242iFeh58oBUFqIQbkfp2DV2X0CkAYgv1sU+P+I/HmBu8nErugdRnUWhfp+A/ddlbEH3uQlBsNobUEMHasK1HOYn8BEEvCUaiuigXRIKj+sGOPA4KAWz9/s7WxcgB4+a6/fI2osEwv4yOENAiPf+wQhbc/5f0gGisWuQaRFmGoIqguARWsBQgTTocDLMT5OJUQnhqdCEig+/EShKSEgTVV0MBMnz04BcshPnLk/+OaV0/dwKzB4QUt1NB6uTDfGOP+cNm9mEsBAFiM7AQh9AKVEU75vy68jeOxrUC4mDEuYO0oLqoSdHaEF2eXYYSm0V+oEOwpLmYFOF3Z4CmAeBTIGueiIw2xoKPzDBJVBXQ5g5O8/twwA+QguIjJt3+g0NQEcDfUXgO5gsqlTBLkQLdl86K3CWneitQ8sg/5oWAUJP2C3V3RoEyji5n4b9lB4t9pz2CA+cAFn1Z9I/uzYsU/ELtEBOCHYQQqGcFejV+yeuRJX31zsKV5IGjway9z6PLDxKwNEPsBuOEiqw57jGgOtZ1Y++T50AuMFl7hPIbhskiOwsATtRoc7rS7dXrpcgrMCGJca6ELJo+Y0be0BW5ZKGcFz4y8W9BduwcDnK9iO5fagsKpp9ANnvDPxeP8THNyIVFo1AMas8Qk5v2Ytm0LCCYAXqn+wQsPTBh/5Bcnne14Os3uCQt28vsK1WUESJFviBgAW//3u9PLxusXchcCR2WsNzv/ImvgZzzkUByDUAIrjTvmSHAowpJBQE4SUlxMxnARlQbIqkArVAJ6pBBvELCCKlkyCDAP45BYfEPfcUpfMch3Vn4bheYK4E66BxAxHSVd5INgEPgU/NBCDfNQ8Ho1CoINAPQAW/QT8OCIZlNFCB84XhoDChFByHGjx35v9BLgyhmojqHYb5QYXnuAecvua0hZe6BV9f7v4ibvgvamrmAc1TmaEir0LQ9h97eYAYVoM/nWA60i8Q3Ifezha9BqaaL3zvqd6IAuwwLSCCuCLuJWch4h30giPtyiAphKEBcCu9BV5wwzkMxID8rhMwdwMhcSFgrBT3RUTQboAUg3+p+Qe1IGarOioVnazmefV3lHpwA0AcLWCahUiXwePHWJsP+GH1gnp/we5KfOhJAbsj0H/BIEb04TbrTPsAyb2LLu93KwfCvn5PLAwrOXAa72eEQRo1CNdw5IprsAZ3hApy9zlcITG2vpCihsRSYxNS+J4vdBZ6B52eqRcQ/QXmSjAWSfa/5GA5qEg4iJFtm624AqXLrSA2gx8p1Mdqcghv41S0lSp/xAYs9gakQc4Ie2RTUYwYgt748mV+FU1Xgp14eW3XYZ6cdqGTNHwHICTwEeTPl0jEZwIgP9gDEaogeg5IHWCF+1eoAhvEKPB/EAeTRsM/pSAP5wjWEUMM1/NJRhwJbpJSgK7S7zF3EOsI5jBQBK9DV80Z8Y0COzvmWzJXgDl40KEC6cqvqgi4OB5cpgLFYK/1CvDiItXqC6/S87wfAUfPtxqfGNzlYaOjlf1IsHPPvffHgDAoEeEST4ZLZUd/RSo91/BjXY5ggWgQ4In3fyj4mUqPrInHOCLKO3wUwRsfyXpt1nEIRLrqcWeTuk7bigsbid1zD4iDRQtnIdQsyIXnFCn1I9D7ADgxEhOvR5AJosoUbu1FkJyYCi9OhQERoIx+4AX/YqUXQhtYEwKN4Cy1HntLMmtaAQpqfrT/UCoLSxeswjA5UWPPi0mjajUWxMTdVusNvt/ChMdmILK5IRMFu90BMEzFYHdg2GAgeYVHMMJIBTA7EFTx/5fpgTFXz9w/en0ZjD8kCDoKPNGwlB01BmoWQbh+AxR689mBponGJOr9OwmMu3dtJ/ylW1Tik4ElUPmR9RqII+pVhD9ychABMQ51gOIZg+/G+5mGIzLB1JJC5WhzYjhJ7IWmLDpA8jzsAafUPkB2WnFBF4iSxkq1ty7f25rv/+EQLOxs2oUdTSA9HIR9swdBlCcFe9owPC3XWDDC0ISVzsEVbSCF/sWdA5Fu4HJqankp2SeQCYYrImNalfmhpVxYrGkUS4LeSUjg8dD7+D7w/ybIfy7vlB9/HJ978zr7/45Qgajzj+4EjIK/ULHPRAOlKr/aG0AFcqCyu0GcW45Igh6JMJmhA49/U+cEssHNJhtXDC1MOya3j/sAiAGcrEtqtgjBD6wEzSDc7D8o6C8rIqAZyPk+NQoNLAZ1hR64Yl1FBY648smUYKnSg1Xwk/0DyRyArByMUobyByhCcPnOaPyoegREFS4jNfYAw+IHCjdC1J2WDZBke/OyN85J24WiXwDYPoJyYuCD238ulvuzwt6KgHf0shWKsqCFFGjB/w8HU8eeTED9wAAAAABJRU5ErkJggg=="; let S1e = 0; const pN = (c) => { if (!c.environmentBRDFTexture) { const e = c.useDelayedTextureLoading; c.useDelayedTextureLoading = !1; const t = c._blockEntityCollection; c._blockEntityCollection = !1; const i = De.CreateFromBase64String(T1e, "EnvironmentBRDFTexture" + S1e++, c, !0, !1, De.BILINEAR_SAMPLINGMODE); c._blockEntityCollection = t; const r = c.getEngine().getLoadedTexturesCache(), s = r.indexOf(i.getInternalTexture()); s !== -1 && r.splice(s, 1), i.isRGBD = !0, i.wrapU = De.CLAMP_ADDRESSMODE, i.wrapV = De.CLAMP_ADDRESSMODE, c.environmentBRDFTexture = i, c.useDelayedTextureLoading = e, hB.ExpandRGBDTexture(i); const n = c.getEngine().onContextRestoredObservable.add(() => { i.isRGBD = !0; const a = () => { i.isReady() ? hB.ExpandRGBDTexture(i) : Ve.SetImmediate(a); }; a(); }); c.onDisposeObservable.add(() => { c.getEngine().onContextRestoredObservable.remove(n); }); } return c.environmentBRDFTexture; }, M1e = { /** * Gets a default environment BRDF for MS-BRDF Height Correlated BRDF * @param scene defines the hosting scene * @returns the environment BRDF texture */ // eslint-disable-next-line @typescript-eslint/naming-convention GetEnvironmentBRDFTexture: pN }; class R1e extends sa { constructor() { super(...arguments), this.BRDF_V_HEIGHT_CORRELATED = !1, this.MS_BRDF_ENERGY_CONSERVATION = !1, this.SPHERICAL_HARMONICS = !1, this.SPECULAR_GLOSSINESS_ENERGY_CONSERVATION = !1; } } class nf extends Q_ { /** @internal */ _markAllSubMeshesAsMiscDirty() { this._internalMarkAllSubMeshesAsMiscDirty(); } constructor(e, t = !0) { super(e, "PBRBRDF", 90, new R1e(), t), this._useEnergyConservation = nf.DEFAULT_USE_ENERGY_CONSERVATION, this.useEnergyConservation = nf.DEFAULT_USE_ENERGY_CONSERVATION, this._useSmithVisibilityHeightCorrelated = nf.DEFAULT_USE_SMITH_VISIBILITY_HEIGHT_CORRELATED, this.useSmithVisibilityHeightCorrelated = nf.DEFAULT_USE_SMITH_VISIBILITY_HEIGHT_CORRELATED, this._useSphericalHarmonics = nf.DEFAULT_USE_SPHERICAL_HARMONICS, this.useSphericalHarmonics = nf.DEFAULT_USE_SPHERICAL_HARMONICS, this._useSpecularGlossinessInputEnergyConservation = nf.DEFAULT_USE_SPECULAR_GLOSSINESS_INPUT_ENERGY_CONSERVATION, this.useSpecularGlossinessInputEnergyConservation = nf.DEFAULT_USE_SPECULAR_GLOSSINESS_INPUT_ENERGY_CONSERVATION, this._internalMarkAllSubMeshesAsMiscDirty = e._dirtyCallbacks[16], this._enable(!0); } prepareDefines(e) { e.BRDF_V_HEIGHT_CORRELATED = this._useSmithVisibilityHeightCorrelated, e.MS_BRDF_ENERGY_CONSERVATION = this._useEnergyConservation && this._useSmithVisibilityHeightCorrelated, e.SPHERICAL_HARMONICS = this._useSphericalHarmonics, e.SPECULAR_GLOSSINESS_ENERGY_CONSERVATION = this._useSpecularGlossinessInputEnergyConservation; } getClassName() { return "PBRBRDFConfiguration"; } } nf.DEFAULT_USE_ENERGY_CONSERVATION = !0; nf.DEFAULT_USE_SMITH_VISIBILITY_HEIGHT_CORRELATED = !0; nf.DEFAULT_USE_SPHERICAL_HARMONICS = !0; nf.DEFAULT_USE_SPECULAR_GLOSSINESS_INPUT_ENERGY_CONSERVATION = !0; F([ W(), ct("_markAllSubMeshesAsMiscDirty") ], nf.prototype, "useEnergyConservation", void 0); F([ W(), ct("_markAllSubMeshesAsMiscDirty") ], nf.prototype, "useSmithVisibilityHeightCorrelated", void 0); F([ W(), ct("_markAllSubMeshesAsMiscDirty") ], nf.prototype, "useSphericalHarmonics", void 0); F([ W(), ct("_markAllSubMeshesAsMiscDirty") ], nf.prototype, "useSpecularGlossinessInputEnergyConservation", void 0); const P1e = "pbrFragmentDeclaration", I1e = `uniform vec4 vEyePosition;uniform vec3 vReflectionColor;uniform vec4 vAlbedoColor;uniform vec4 vLightingIntensity;uniform vec4 vReflectivityColor;uniform vec4 vMetallicReflectanceFactors;uniform vec3 vEmissiveColor;uniform float visibility;uniform vec3 vAmbientColor; #ifdef ALBEDO uniform vec2 vAlbedoInfos; #endif #ifdef AMBIENT uniform vec4 vAmbientInfos; #endif #ifdef BUMP uniform vec3 vBumpInfos;uniform vec2 vTangentSpaceParams; #endif #ifdef OPACITY uniform vec2 vOpacityInfos; #endif #ifdef EMISSIVE uniform vec2 vEmissiveInfos; #endif #ifdef LIGHTMAP uniform vec2 vLightmapInfos; #endif #ifdef REFLECTIVITY uniform vec3 vReflectivityInfos; #endif #ifdef MICROSURFACEMAP uniform vec2 vMicroSurfaceSamplerInfos; #endif #if defined(REFLECTIONMAP_SPHERICAL) || defined(REFLECTIONMAP_PROJECTION) || defined(SS_REFRACTION) || defined(PREPASS) uniform mat4 view; #endif #ifdef REFLECTION uniform vec2 vReflectionInfos; #ifdef REALTIME_FILTERING uniform vec2 vReflectionFilteringInfo; #endif uniform mat4 reflectionMatrix;uniform vec3 vReflectionMicrosurfaceInfos; #if defined(USE_LOCAL_REFLECTIONMAP_CUBIC) && defined(REFLECTIONMAP_CUBIC) uniform vec3 vReflectionPosition;uniform vec3 vReflectionSize; #endif #endif #if defined(SS_REFRACTION) && defined(SS_USE_LOCAL_REFRACTIONMAP_CUBIC) uniform vec3 vRefractionPosition;uniform vec3 vRefractionSize; #endif #ifdef CLEARCOAT uniform vec2 vClearCoatParams;uniform vec4 vClearCoatRefractionParams; #if defined(CLEARCOAT_TEXTURE) || defined(CLEARCOAT_TEXTURE_ROUGHNESS) uniform vec4 vClearCoatInfos; #endif #ifdef CLEARCOAT_TEXTURE uniform mat4 clearCoatMatrix; #endif #ifdef CLEARCOAT_TEXTURE_ROUGHNESS uniform mat4 clearCoatRoughnessMatrix; #endif #ifdef CLEARCOAT_BUMP uniform vec2 vClearCoatBumpInfos;uniform vec2 vClearCoatTangentSpaceParams;uniform mat4 clearCoatBumpMatrix; #endif #ifdef CLEARCOAT_TINT uniform vec4 vClearCoatTintParams;uniform float clearCoatColorAtDistance; #ifdef CLEARCOAT_TINT_TEXTURE uniform vec2 vClearCoatTintInfos;uniform mat4 clearCoatTintMatrix; #endif #endif #endif #ifdef IRIDESCENCE uniform vec4 vIridescenceParams; #if defined(IRIDESCENCE_TEXTURE) || defined(IRIDESCENCE_THICKNESS_TEXTURE) uniform vec4 vIridescenceInfos; #endif #ifdef IRIDESCENCE_TEXTURE uniform mat4 iridescenceMatrix; #endif #ifdef IRIDESCENCE_THICKNESS_TEXTURE uniform mat4 iridescenceThicknessMatrix; #endif #endif #ifdef ANISOTROPIC uniform vec3 vAnisotropy; #ifdef ANISOTROPIC_TEXTURE uniform vec2 vAnisotropyInfos;uniform mat4 anisotropyMatrix; #endif #endif #ifdef SHEEN uniform vec4 vSheenColor; #ifdef SHEEN_ROUGHNESS uniform float vSheenRoughness; #endif #if defined(SHEEN_TEXTURE) || defined(SHEEN_TEXTURE_ROUGHNESS) uniform vec4 vSheenInfos; #endif #ifdef SHEEN_TEXTURE uniform mat4 sheenMatrix; #endif #ifdef SHEEN_TEXTURE_ROUGHNESS uniform mat4 sheenRoughnessMatrix; #endif #endif #ifdef SUBSURFACE #ifdef SS_REFRACTION uniform vec4 vRefractionMicrosurfaceInfos;uniform vec4 vRefractionInfos;uniform mat4 refractionMatrix; #ifdef REALTIME_FILTERING uniform vec2 vRefractionFilteringInfo; #endif #ifdef SS_DISPERSION uniform float dispersion; #endif #endif #ifdef SS_THICKNESSANDMASK_TEXTURE uniform vec2 vThicknessInfos;uniform mat4 thicknessMatrix; #endif #ifdef SS_REFRACTIONINTENSITY_TEXTURE uniform vec2 vRefractionIntensityInfos;uniform mat4 refractionIntensityMatrix; #endif #ifdef SS_TRANSLUCENCYINTENSITY_TEXTURE uniform vec2 vTranslucencyIntensityInfos;uniform mat4 translucencyIntensityMatrix; #endif uniform vec2 vThicknessParam;uniform vec3 vDiffusionDistance;uniform vec4 vTintColor;uniform vec3 vSubSurfaceIntensity; #endif #ifdef PREPASS #ifdef SS_SCATTERING uniform float scatteringDiffusionProfile; #endif #endif #if DEBUGMODE>0 uniform vec2 vDebugMode; #endif #ifdef DETAIL uniform vec4 vDetailInfos; #endif #include #ifdef USESPHERICALFROMREFLECTIONMAP #ifdef SPHERICAL_HARMONICS uniform vec3 vSphericalL00;uniform vec3 vSphericalL1_1;uniform vec3 vSphericalL10;uniform vec3 vSphericalL11;uniform vec3 vSphericalL2_2;uniform vec3 vSphericalL2_1;uniform vec3 vSphericalL20;uniform vec3 vSphericalL21;uniform vec3 vSphericalL22; #else uniform vec3 vSphericalX;uniform vec3 vSphericalY;uniform vec3 vSphericalZ;uniform vec3 vSphericalXX_ZZ;uniform vec3 vSphericalYY_ZZ;uniform vec3 vSphericalZZ;uniform vec3 vSphericalXY;uniform vec3 vSphericalYZ;uniform vec3 vSphericalZX; #endif #endif #define ADDITIONAL_FRAGMENT_DECLARATION `; je.IncludesShadersStore[P1e] = I1e; const D1e = "pbrUboDeclaration", O1e = `layout(std140,column_major) uniform;uniform Material {vec2 vAlbedoInfos;vec4 vAmbientInfos;vec2 vOpacityInfos;vec2 vEmissiveInfos;vec2 vLightmapInfos;vec3 vReflectivityInfos;vec2 vMicroSurfaceSamplerInfos;vec2 vReflectionInfos;vec2 vReflectionFilteringInfo;vec3 vReflectionPosition;vec3 vReflectionSize;vec3 vBumpInfos;mat4 albedoMatrix;mat4 ambientMatrix;mat4 opacityMatrix;mat4 emissiveMatrix;mat4 lightmapMatrix;mat4 reflectivityMatrix;mat4 microSurfaceSamplerMatrix;mat4 bumpMatrix;vec2 vTangentSpaceParams;mat4 reflectionMatrix;vec3 vReflectionColor;vec4 vAlbedoColor;vec4 vLightingIntensity;vec3 vReflectionMicrosurfaceInfos;float pointSize;vec4 vReflectivityColor;vec3 vEmissiveColor;vec3 vAmbientColor;vec2 vDebugMode;vec4 vMetallicReflectanceFactors;vec2 vMetallicReflectanceInfos;mat4 metallicReflectanceMatrix;vec2 vReflectanceInfos;mat4 reflectanceMatrix;vec3 vSphericalL00;vec3 vSphericalL1_1;vec3 vSphericalL10;vec3 vSphericalL11;vec3 vSphericalL2_2;vec3 vSphericalL2_1;vec3 vSphericalL20;vec3 vSphericalL21;vec3 vSphericalL22;vec3 vSphericalX;vec3 vSphericalY;vec3 vSphericalZ;vec3 vSphericalXX_ZZ;vec3 vSphericalYY_ZZ;vec3 vSphericalZZ;vec3 vSphericalXY;vec3 vSphericalYZ;vec3 vSphericalZX; #define ADDITIONAL_UBO_DECLARATION }; #include #include `; je.IncludesShadersStore[D1e] = O1e; const w1e = "pbrFragmentExtraDeclaration", L1e = `varying vec3 vPositionW; #if DEBUGMODE>0 varying vec4 vClipSpacePosition; #endif #include[1..7] #ifdef NORMAL varying vec3 vNormalW; #if defined(USESPHERICALFROMREFLECTIONMAP) && defined(USESPHERICALINVERTEX) varying vec3 vEnvironmentIrradiance; #endif #endif #if defined(VERTEXCOLOR) || defined(INSTANCESCOLOR) && defined(INSTANCES) varying vec4 vColor; #endif `; je.IncludesShadersStore[w1e] = L1e; const N1e = "samplerFragmentAlternateDeclaration", F1e = `#ifdef _DEFINENAME_ #if _DEFINENAME_DIRECTUV==1 #define v_VARYINGNAME_UV vMainUV1 #elif _DEFINENAME_DIRECTUV==2 #define v_VARYINGNAME_UV vMainUV2 #elif _DEFINENAME_DIRECTUV==3 #define v_VARYINGNAME_UV vMainUV3 #elif _DEFINENAME_DIRECTUV==4 #define v_VARYINGNAME_UV vMainUV4 #elif _DEFINENAME_DIRECTUV==5 #define v_VARYINGNAME_UV vMainUV5 #elif _DEFINENAME_DIRECTUV==6 #define v_VARYINGNAME_UV vMainUV6 #else varying vec2 v_VARYINGNAME_UV; #endif #endif `; je.IncludesShadersStore[N1e] = F1e; const B1e = "pbrFragmentSamplersDeclaration", U1e = `#include(_DEFINENAME_,ALBEDO,_VARYINGNAME_,Albedo,_SAMPLERNAME_,albedo) #include(_DEFINENAME_,AMBIENT,_VARYINGNAME_,Ambient,_SAMPLERNAME_,ambient) #include(_DEFINENAME_,OPACITY,_VARYINGNAME_,Opacity,_SAMPLERNAME_,opacity) #include(_DEFINENAME_,EMISSIVE,_VARYINGNAME_,Emissive,_SAMPLERNAME_,emissive) #include(_DEFINENAME_,LIGHTMAP,_VARYINGNAME_,Lightmap,_SAMPLERNAME_,lightmap) #include(_DEFINENAME_,REFLECTIVITY,_VARYINGNAME_,Reflectivity,_SAMPLERNAME_,reflectivity) #include(_DEFINENAME_,MICROSURFACEMAP,_VARYINGNAME_,MicroSurfaceSampler,_SAMPLERNAME_,microSurface) #include(_DEFINENAME_,METALLIC_REFLECTANCE,_VARYINGNAME_,MetallicReflectance,_SAMPLERNAME_,metallicReflectance) #include(_DEFINENAME_,REFLECTANCE,_VARYINGNAME_,Reflectance,_SAMPLERNAME_,reflectance) #include(_DEFINENAME_,DECAL,_VARYINGNAME_,Decal,_SAMPLERNAME_,decal) #ifdef CLEARCOAT #include(_DEFINENAME_,CLEARCOAT_TEXTURE,_VARYINGNAME_,ClearCoat,_SAMPLERNAME_,clearCoat) #include(_DEFINENAME_,CLEARCOAT_TEXTURE_ROUGHNESS,_VARYINGNAME_,ClearCoatRoughness) #if defined(CLEARCOAT_TEXTURE_ROUGHNESS) && !defined(CLEARCOAT_TEXTURE_ROUGHNESS_IDENTICAL) uniform sampler2D clearCoatRoughnessSampler; #endif #include(_DEFINENAME_,CLEARCOAT_BUMP,_VARYINGNAME_,ClearCoatBump,_SAMPLERNAME_,clearCoatBump) #include(_DEFINENAME_,CLEARCOAT_TINT_TEXTURE,_VARYINGNAME_,ClearCoatTint,_SAMPLERNAME_,clearCoatTint) #endif #ifdef IRIDESCENCE #include(_DEFINENAME_,IRIDESCENCE_TEXTURE,_VARYINGNAME_,Iridescence,_SAMPLERNAME_,iridescence) #include(_DEFINENAME_,IRIDESCENCE_THICKNESS_TEXTURE,_VARYINGNAME_,IridescenceThickness,_SAMPLERNAME_,iridescenceThickness) #endif #ifdef SHEEN #include(_DEFINENAME_,SHEEN_TEXTURE,_VARYINGNAME_,Sheen,_SAMPLERNAME_,sheen) #include(_DEFINENAME_,SHEEN_TEXTURE_ROUGHNESS,_VARYINGNAME_,SheenRoughness) #if defined(SHEEN_ROUGHNESS) && defined(SHEEN_TEXTURE_ROUGHNESS) && !defined(SHEEN_TEXTURE_ROUGHNESS_IDENTICAL) uniform sampler2D sheenRoughnessSampler; #endif #endif #ifdef ANISOTROPIC #include(_DEFINENAME_,ANISOTROPIC_TEXTURE,_VARYINGNAME_,Anisotropy,_SAMPLERNAME_,anisotropy) #endif #ifdef REFLECTION #ifdef REFLECTIONMAP_3D #define sampleReflection(s,c) textureCube(s,c) uniform samplerCube reflectionSampler; #ifdef LODBASEDMICROSFURACE #define sampleReflectionLod(s,c,l) textureCubeLodEXT(s,c,l) #else uniform samplerCube reflectionSamplerLow;uniform samplerCube reflectionSamplerHigh; #endif #ifdef USEIRRADIANCEMAP uniform samplerCube irradianceSampler; #endif #else #define sampleReflection(s,c) texture2D(s,c) uniform sampler2D reflectionSampler; #ifdef LODBASEDMICROSFURACE #define sampleReflectionLod(s,c,l) texture2DLodEXT(s,c,l) #else uniform sampler2D reflectionSamplerLow;uniform sampler2D reflectionSamplerHigh; #endif #ifdef USEIRRADIANCEMAP uniform sampler2D irradianceSampler; #endif #endif #ifdef REFLECTIONMAP_SKYBOX varying vec3 vPositionUVW; #else #if defined(REFLECTIONMAP_EQUIRECTANGULAR_FIXED) || defined(REFLECTIONMAP_MIRROREDEQUIRECTANGULAR_FIXED) varying vec3 vDirectionW; #endif #endif #endif #ifdef ENVIRONMENTBRDF uniform sampler2D environmentBrdfSampler; #endif #ifdef SUBSURFACE #ifdef SS_REFRACTION #ifdef SS_REFRACTIONMAP_3D #define sampleRefraction(s,c) textureCube(s,c) uniform samplerCube refractionSampler; #ifdef LODBASEDMICROSFURACE #define sampleRefractionLod(s,c,l) textureCubeLodEXT(s,c,l) #else uniform samplerCube refractionSamplerLow;uniform samplerCube refractionSamplerHigh; #endif #else #define sampleRefraction(s,c) texture2D(s,c) uniform sampler2D refractionSampler; #ifdef LODBASEDMICROSFURACE #define sampleRefractionLod(s,c,l) texture2DLodEXT(s,c,l) #else uniform sampler2D refractionSamplerLow;uniform sampler2D refractionSamplerHigh; #endif #endif #endif #include(_DEFINENAME_,SS_THICKNESSANDMASK_TEXTURE,_VARYINGNAME_,Thickness,_SAMPLERNAME_,thickness) #include(_DEFINENAME_,SS_REFRACTIONINTENSITY_TEXTURE,_VARYINGNAME_,RefractionIntensity,_SAMPLERNAME_,refractionIntensity) #include(_DEFINENAME_,SS_TRANSLUCENCYINTENSITY_TEXTURE,_VARYINGNAME_,TranslucencyIntensity,_SAMPLERNAME_,translucencyIntensity) #endif `; je.IncludesShadersStore[B1e] = U1e; const V1e = "subSurfaceScatteringFunctions", k1e = `bool testLightingForSSS(float diffusionProfile) {return diffusionProfile<1.;}`; je.IncludesShadersStore[V1e] = k1e; const z1e = "importanceSampling", H1e = `vec3 hemisphereCosSample(vec2 u) {float phi=2.*PI*u.x;float cosTheta2=1.-u.y;float cosTheta=sqrt(cosTheta2);float sinTheta=sqrt(1.-cosTheta2);return vec3(sinTheta*cos(phi),sinTheta*sin(phi),cosTheta);} vec3 hemisphereImportanceSampleDggx(vec2 u,float a) {float phi=2.*PI*u.x;float cosTheta2=(1.-u.y)/(1.+(a+1.)*((a-1.)*u.y));float cosTheta=sqrt(cosTheta2);float sinTheta=sqrt(1.-cosTheta2);return vec3(sinTheta*cos(phi),sinTheta*sin(phi),cosTheta);} vec3 hemisphereImportanceSampleDCharlie(vec2 u,float a) { float phi=2.*PI*u.x;float sinTheta=pow(u.y,a/(2.*a+1.));float cosTheta=sqrt(1.-sinTheta*sinTheta);return vec3(sinTheta*cos(phi),sinTheta*sin(phi),cosTheta);}`; je.IncludesShadersStore[z1e] = H1e; const G1e = "pbrHelperFunctions", K1e = `#define MINIMUMVARIANCE 0.0005 float convertRoughnessToAverageSlope(float roughness) {return square(roughness)+MINIMUMVARIANCE;} float fresnelGrazingReflectance(float reflectance0) {float reflectance90=saturate(reflectance0*25.0);return reflectance90;} vec2 getAARoughnessFactors(vec3 normalVector) { #ifdef SPECULARAA vec3 nDfdx=dFdx(normalVector.xyz);vec3 nDfdy=dFdy(normalVector.xyz);float slopeSquare=max(dot(nDfdx,nDfdx),dot(nDfdy,nDfdy));float geometricRoughnessFactor=pow(saturate(slopeSquare),0.333);float geometricAlphaGFactor=sqrt(slopeSquare);geometricAlphaGFactor*=0.75;return vec2(geometricRoughnessFactor,geometricAlphaGFactor); #else return vec2(0.); #endif } #ifdef ANISOTROPIC #ifdef ANISOTROPIC_LEGACY vec2 getAnisotropicRoughness(float alphaG,float anisotropy) {float alphaT=max(alphaG*(1.0+anisotropy),MINIMUMVARIANCE);float alphaB=max(alphaG*(1.0-anisotropy),MINIMUMVARIANCE);return vec2(alphaT,alphaB);} vec3 getAnisotropicBentNormals(const vec3 T,const vec3 B,const vec3 N,const vec3 V,float anisotropy,float roughness) {vec3 anisotropicFrameDirection=anisotropy>=0.0 ? B : T;vec3 anisotropicFrameTangent=cross(normalize(anisotropicFrameDirection),V);vec3 anisotropicFrameNormal=cross(anisotropicFrameTangent,anisotropicFrameDirection);vec3 anisotropicNormal=normalize(mix(N,anisotropicFrameNormal,abs(anisotropy)));return anisotropicNormal;} #else vec2 getAnisotropicRoughness(float alphaG,float anisotropy) {float alphaT=max(mix(alphaG,1.0,anisotropy*anisotropy),MINIMUMVARIANCE);float alphaB=max(alphaG,MINIMUMVARIANCE);return vec2(alphaT,alphaB);} vec3 getAnisotropicBentNormals(const vec3 T,const vec3 B,const vec3 N,const vec3 V,float anisotropy,float roughness) {vec3 bentNormal=cross(B,V);bentNormal=normalize(cross(bentNormal,B));float a=square(square(1.0-anisotropy*(1.0-roughness)));bentNormal=normalize(mix(bentNormal,N,a));return bentNormal;} #endif #endif #if defined(CLEARCOAT) || defined(SS_REFRACTION) vec3 cocaLambert(vec3 alpha,float distance) {return exp(-alpha*distance);} vec3 cocaLambert(float NdotVRefract,float NdotLRefract,vec3 alpha,float thickness) {return cocaLambert(alpha,(thickness*((NdotLRefract+NdotVRefract)/(NdotLRefract*NdotVRefract))));} vec3 computeColorAtDistanceInMedia(vec3 color,float distance) {return -log(color)/distance;} vec3 computeClearCoatAbsorption(float NdotVRefract,float NdotLRefract,vec3 clearCoatColor,float clearCoatThickness,float clearCoatIntensity) {vec3 clearCoatAbsorption=mix(vec3(1.0), cocaLambert(NdotVRefract,NdotLRefract,clearCoatColor,clearCoatThickness), clearCoatIntensity);return clearCoatAbsorption;} #endif #ifdef MICROSURFACEAUTOMATIC float computeDefaultMicroSurface(float microSurface,vec3 reflectivityColor) {const float kReflectivityNoAlphaWorkflow_SmoothnessMax=0.95;float reflectivityLuminance=getLuminance(reflectivityColor);float reflectivityLuma=sqrt(reflectivityLuminance);microSurface=reflectivityLuma*kReflectivityNoAlphaWorkflow_SmoothnessMax;return microSurface;} #endif `; je.IncludesShadersStore[G1e] = K1e; const W1e = "harmonicsFunctions", j1e = `#ifdef USESPHERICALFROMREFLECTIONMAP #ifdef SPHERICAL_HARMONICS vec3 computeEnvironmentIrradiance(vec3 normal) {return vSphericalL00 + vSphericalL1_1*(normal.y) + vSphericalL10*(normal.z) + vSphericalL11*(normal.x) + vSphericalL2_2*(normal.y*normal.x) + vSphericalL2_1*(normal.y*normal.z) + vSphericalL20*((3.0*normal.z*normal.z)-1.0) + vSphericalL21*(normal.z*normal.x) + vSphericalL22*(normal.x*normal.x-(normal.y*normal.y));} #else vec3 computeEnvironmentIrradiance(vec3 normal) {float Nx=normal.x;float Ny=normal.y;float Nz=normal.z;vec3 C1=vSphericalZZ.rgb;vec3 Cx=vSphericalX.rgb;vec3 Cy=vSphericalY.rgb;vec3 Cz=vSphericalZ.rgb;vec3 Cxx_zz=vSphericalXX_ZZ.rgb;vec3 Cyy_zz=vSphericalYY_ZZ.rgb;vec3 Cxy=vSphericalXY.rgb;vec3 Cyz=vSphericalYZ.rgb;vec3 Czx=vSphericalZX.rgb;vec3 a1=Cyy_zz*Ny+Cy;vec3 a2=Cyz*Nz+a1;vec3 b1=Czx*Nz+Cx;vec3 b2=Cxy*Ny+b1;vec3 b3=Cxx_zz*Nx+b2;vec3 t1=Cz *Nz+C1;vec3 t2=a2 *Ny+t1;vec3 t3=b3 *Nx+t2;return t3;} #endif #endif `; je.IncludesShadersStore[W1e] = j1e; const X1e = "pbrDirectLightingSetupFunctions", Y1e = `struct preLightingInfo {vec3 lightOffset;float lightDistanceSquared;float lightDistance;float attenuation;vec3 L;vec3 H;float NdotV;float NdotLUnclamped;float NdotL;float VdotH;float roughness; #ifdef IRIDESCENCE float iridescenceIntensity; #endif };preLightingInfo computePointAndSpotPreLightingInfo(vec4 lightData,vec3 V,vec3 N) {preLightingInfo result;result.lightOffset=lightData.xyz-vPositionW;result.lightDistanceSquared=dot(result.lightOffset,result.lightOffset);result.lightDistance=sqrt(result.lightDistanceSquared);result.L=normalize(result.lightOffset);result.H=normalize(V+result.L);result.VdotH=saturate(dot(V,result.H));result.NdotLUnclamped=dot(N,result.L);result.NdotL=saturateEps(result.NdotLUnclamped);return result;} preLightingInfo computeDirectionalPreLightingInfo(vec4 lightData,vec3 V,vec3 N) {preLightingInfo result;result.lightDistance=length(-lightData.xyz);result.L=normalize(-lightData.xyz);result.H=normalize(V+result.L);result.VdotH=saturate(dot(V,result.H));result.NdotLUnclamped=dot(N,result.L);result.NdotL=saturateEps(result.NdotLUnclamped);return result;} preLightingInfo computeHemisphericPreLightingInfo(vec4 lightData,vec3 V,vec3 N) {preLightingInfo result;result.NdotL=dot(N,lightData.xyz)*0.5+0.5;result.NdotL=saturateEps(result.NdotL);result.NdotLUnclamped=result.NdotL; #ifdef SPECULARTERM result.L=normalize(lightData.xyz);result.H=normalize(V+result.L);result.VdotH=saturate(dot(V,result.H)); #endif return result;}`; je.IncludesShadersStore[X1e] = Y1e; const Q1e = "pbrDirectLightingFalloffFunctions", $1e = `float computeDistanceLightFalloff_Standard(vec3 lightOffset,float range) {return max(0.,1.0-length(lightOffset)/range);} float computeDistanceLightFalloff_Physical(float lightDistanceSquared) {return 1.0/maxEps(lightDistanceSquared);} float computeDistanceLightFalloff_GLTF(float lightDistanceSquared,float inverseSquaredRange) {float lightDistanceFalloff=1.0/maxEps(lightDistanceSquared);float factor=lightDistanceSquared*inverseSquaredRange;float attenuation=saturate(1.0-factor*factor);attenuation*=attenuation;lightDistanceFalloff*=attenuation;return lightDistanceFalloff;} float computeDistanceLightFalloff(vec3 lightOffset,float lightDistanceSquared,float range,float inverseSquaredRange) { #ifdef USEPHYSICALLIGHTFALLOFF return computeDistanceLightFalloff_Physical(lightDistanceSquared); #elif defined(USEGLTFLIGHTFALLOFF) return computeDistanceLightFalloff_GLTF(lightDistanceSquared,inverseSquaredRange); #else return computeDistanceLightFalloff_Standard(lightOffset,range); #endif } float computeDirectionalLightFalloff_Standard(vec3 lightDirection,vec3 directionToLightCenterW,float cosHalfAngle,float exponent) {float falloff=0.0;float cosAngle=maxEps(dot(-lightDirection,directionToLightCenterW));if (cosAngle>=cosHalfAngle) {falloff=max(0.,pow(cosAngle,exponent));} return falloff;} float computeDirectionalLightFalloff_Physical(vec3 lightDirection,vec3 directionToLightCenterW,float cosHalfAngle) {const float kMinusLog2ConeAngleIntensityRatio=6.64385618977; float concentrationKappa=kMinusLog2ConeAngleIntensityRatio/(1.0-cosHalfAngle);vec4 lightDirectionSpreadSG=vec4(-lightDirection*concentrationKappa,-concentrationKappa);float falloff=exp2(dot(vec4(directionToLightCenterW,1.0),lightDirectionSpreadSG));return falloff;} float computeDirectionalLightFalloff_GLTF(vec3 lightDirection,vec3 directionToLightCenterW,float lightAngleScale,float lightAngleOffset) {float cd=dot(-lightDirection,directionToLightCenterW);float falloff=saturate(cd*lightAngleScale+lightAngleOffset);falloff*=falloff;return falloff;} float computeDirectionalLightFalloff(vec3 lightDirection,vec3 directionToLightCenterW,float cosHalfAngle,float exponent,float lightAngleScale,float lightAngleOffset) { #ifdef USEPHYSICALLIGHTFALLOFF return computeDirectionalLightFalloff_Physical(lightDirection,directionToLightCenterW,cosHalfAngle); #elif defined(USEGLTFLIGHTFALLOFF) return computeDirectionalLightFalloff_GLTF(lightDirection,directionToLightCenterW,lightAngleScale,lightAngleOffset); #else return computeDirectionalLightFalloff_Standard(lightDirection,directionToLightCenterW,cosHalfAngle,exponent); #endif }`; je.IncludesShadersStore[Q1e] = $1e; const Z1e = "pbrBRDFFunctions", q1e = `#define FRESNEL_MAXIMUM_ON_ROUGH 0.25 #ifdef MS_BRDF_ENERGY_CONSERVATION vec3 getEnergyConservationFactor(const vec3 specularEnvironmentR0,const vec3 environmentBrdf) {return 1.0+specularEnvironmentR0*(1.0/environmentBrdf.y-1.0);} #endif #ifdef ENVIRONMENTBRDF vec3 getBRDFLookup(float NdotV,float perceptualRoughness) {vec2 UV=vec2(NdotV,perceptualRoughness);vec4 brdfLookup=texture2D(environmentBrdfSampler,UV); #ifdef ENVIRONMENTBRDF_RGBD brdfLookup.rgb=fromRGBD(brdfLookup.rgba); #endif return brdfLookup.rgb;} vec3 getReflectanceFromBRDFLookup(const vec3 specularEnvironmentR0,const vec3 specularEnvironmentR90,const vec3 environmentBrdf) { #ifdef BRDF_V_HEIGHT_CORRELATED vec3 reflectance=(specularEnvironmentR90-specularEnvironmentR0)*environmentBrdf.x+specularEnvironmentR0*environmentBrdf.y; #else vec3 reflectance=specularEnvironmentR0*environmentBrdf.x+specularEnvironmentR90*environmentBrdf.y; #endif return reflectance;} vec3 getReflectanceFromBRDFLookup(const vec3 specularEnvironmentR0,const vec3 environmentBrdf) { #ifdef BRDF_V_HEIGHT_CORRELATED vec3 reflectance=mix(environmentBrdf.xxx,environmentBrdf.yyy,specularEnvironmentR0); #else vec3 reflectance=specularEnvironmentR0*environmentBrdf.x+environmentBrdf.y; #endif return reflectance;} #endif /* NOT USED #if defined(SHEEN) && defined(SHEEN_SOFTER) float getBRDFLookupCharlieSheen(float NdotV,float perceptualRoughness) {float c=1.0-NdotV;float c3=c*c*c;return 0.65584461*c3+1.0/(4.16526551+exp(-7.97291361*perceptualRoughness+6.33516894));} #endif */ #if !defined(ENVIRONMENTBRDF) || defined(REFLECTIONMAP_SKYBOX) || defined(ALPHAFRESNEL) vec3 getReflectanceFromAnalyticalBRDFLookup_Jones(float VdotN,vec3 reflectance0,vec3 reflectance90,float smoothness) {float weight=mix(FRESNEL_MAXIMUM_ON_ROUGH,1.0,smoothness);return reflectance0+weight*(reflectance90-reflectance0)*pow5(saturate(1.0-VdotN));} #endif #if defined(SHEEN) && defined(ENVIRONMENTBRDF) /** * The sheen BRDF not containing F can be easily stored in the blue channel of the BRDF texture. * The blue channel contains DCharlie*VAshikhmin*NdotL as a lokkup table */ vec3 getSheenReflectanceFromBRDFLookup(const vec3 reflectance0,const vec3 environmentBrdf) {vec3 sheenEnvironmentReflectance=reflectance0*environmentBrdf.b;return sheenEnvironmentReflectance;} #endif vec3 fresnelSchlickGGX(float VdotH,vec3 reflectance0,vec3 reflectance90) {return reflectance0+(reflectance90-reflectance0)*pow5(1.0-VdotH);} float fresnelSchlickGGX(float VdotH,float reflectance0,float reflectance90) {return reflectance0+(reflectance90-reflectance0)*pow5(1.0-VdotH);} #ifdef CLEARCOAT vec3 getR0RemappedForClearCoat(vec3 f0) { #ifdef CLEARCOAT_DEFAULTIOR #ifdef MOBILE return saturate(f0*(f0*0.526868+0.529324)-0.0482256); #else return saturate(f0*(f0*(0.941892-0.263008*f0)+0.346479)-0.0285998); #endif #else vec3 s=sqrt(f0);vec3 t=(vClearCoatRefractionParams.z+vClearCoatRefractionParams.w*s)/(vClearCoatRefractionParams.w+vClearCoatRefractionParams.z*s);return square(t); #endif } #endif #ifdef IRIDESCENCE const mat3 XYZ_TO_REC709=mat3( 3.2404542,-0.9692660, 0.0556434, -1.5371385, 1.8760108,-0.2040259, -0.4985314, 0.0415560, 1.0572252 );vec3 getIORTfromAirToSurfaceR0(vec3 f0) {vec3 sqrtF0=sqrt(f0);return (1.+sqrtF0)/(1.-sqrtF0);} vec3 getR0fromIORs(vec3 iorT,float iorI) {return square((iorT-vec3(iorI))/(iorT+vec3(iorI)));} float getR0fromIORs(float iorT,float iorI) {return square((iorT-iorI)/(iorT+iorI));} vec3 evalSensitivity(float opd,vec3 shift) {float phase=2.0*PI*opd*1.0e-9;const vec3 val=vec3(5.4856e-13,4.4201e-13,5.2481e-13);const vec3 pos=vec3(1.6810e+06,1.7953e+06,2.2084e+06);const vec3 var=vec3(4.3278e+09,9.3046e+09,6.6121e+09);vec3 xyz=val*sqrt(2.0*PI*var)*cos(pos*phase+shift)*exp(-square(phase)*var);xyz.x+=9.7470e-14*sqrt(2.0*PI*4.5282e+09)*cos(2.2399e+06*phase+shift[0])*exp(-4.5282e+09*square(phase));xyz/=1.0685e-7;vec3 srgb=XYZ_TO_REC709*xyz;return srgb;} vec3 evalIridescence(float outsideIOR,float eta2,float cosTheta1,float thinFilmThickness,vec3 baseF0) {vec3 I=vec3(1.0);float iridescenceIOR=mix(outsideIOR,eta2,smoothstep(0.0,0.03,thinFilmThickness));float sinTheta2Sq=square(outsideIOR/iridescenceIOR)*(1.0-square(cosTheta1));float cosTheta2Sq=1.0-sinTheta2Sq;if (cosTheta2Sq<0.0) {return I;} float cosTheta2=sqrt(cosTheta2Sq);float R0=getR0fromIORs(iridescenceIOR,outsideIOR);float R12=fresnelSchlickGGX(cosTheta1,R0,1.);float R21=R12;float T121=1.0-R12;float phi12=0.0;if (iridescenceIOR0 #if defined(WEBGL2) || defined(WEBGPU) || defined(NATIVE) float radicalInverse_VdC(uint bits) {bits=(bits<<16u) | (bits>>16u);bits=((bits & 0x55555555u)<<1u) | ((bits & 0xAAAAAAAAu)>>1u);bits=((bits & 0x33333333u)<<2u) | ((bits & 0xCCCCCCCCu)>>2u);bits=((bits & 0x0F0F0F0Fu)<<4u) | ((bits & 0xF0F0F0F0u)>>4u);bits=((bits & 0x00FF00FFu)<<8u) | ((bits & 0xFF00FF00u)>>8u);return float(bits)*2.3283064365386963e-10; } vec2 hammersley(uint i,uint N) {return vec2(float(i)/float(N),radicalInverse_VdC(i));} #else float vanDerCorpus(int n,int base) {float invBase=1.0/float(base);float denom =1.0;float result =0.0;for(int i=0; i<32; ++i) {if(n>0) {denom =mod(float(n),2.0);result+=denom*invBase;invBase=invBase/2.0;n =int(float(n)/2.0);}} return result;} vec2 hammersley(int i,int N) {return vec2(float(i)/float(N),vanDerCorpus(i,2));} #endif float log4(float x) {return log2(x)/2.;} const float NUM_SAMPLES_FLOAT=float(NUM_SAMPLES);const float NUM_SAMPLES_FLOAT_INVERSED=1./NUM_SAMPLES_FLOAT;const float K=4.; #define inline vec3 irradiance(samplerCube inputTexture,vec3 inputN,vec2 filteringInfo) {vec3 n=normalize(inputN);vec3 result=vec3(0.0);vec3 tangent=abs(n.z)<0.999 ? vec3(0.,0.,1.) : vec3(1.,0.,0.);tangent=normalize(cross(tangent,n));vec3 bitangent=cross(n,tangent);mat3 tbn=mat3(tangent,bitangent,n);float maxLevel=filteringInfo.y;float dim0=filteringInfo.x;float omegaP=(4.*PI)/(6.*dim0*dim0); #if defined(WEBGL2) || defined(WEBGPU) || defined(NATIVE) for(uint i=0u; i0.) {float pdf_inversed=PI/NoL;float omegaS=NUM_SAMPLES_FLOAT_INVERSED*pdf_inversed;float l=log4(omegaS)-log4(omegaP)+log4(K);float mipLevel=clamp(l,0.0,maxLevel);vec3 c=textureCubeLodEXT(inputTexture,tbn*Ls,mipLevel).rgb; #ifdef GAMMA_INPUT c=toLinearSpace(c); #endif result+=c;}} result=result*NUM_SAMPLES_FLOAT_INVERSED;return result;} #define inline vec3 radiance(float alphaG,samplerCube inputTexture,vec3 inputN,vec2 filteringInfo) {vec3 n=normalize(inputN);vec3 c=textureCube(inputTexture,n).rgb; if (alphaG==0.) { #ifdef GAMMA_INPUT c=toLinearSpace(c); #endif return c;} else {vec3 result=vec3(0.);vec3 tangent=abs(n.z)<0.999 ? vec3(0.,0.,1.) : vec3(1.,0.,0.);tangent=normalize(cross(tangent,n));vec3 bitangent=cross(n,tangent);mat3 tbn=mat3(tangent,bitangent,n);float maxLevel=filteringInfo.y;float dim0=filteringInfo.x;float omegaP=(4.*PI)/(6.*dim0*dim0);float weight=0.; #if defined(WEBGL2) || defined(WEBGPU) || defined(NATIVE) for(uint i=0u; i0.) {float pdf_inversed=4./normalDistributionFunction_TrowbridgeReitzGGX(NoH,alphaG);float omegaS=NUM_SAMPLES_FLOAT_INVERSED*pdf_inversed;float l=log4(omegaS)-log4(omegaP)+log4(K);float mipLevel=clamp(float(l),0.0,maxLevel);weight+=NoL;vec3 c=textureCubeLodEXT(inputTexture,tbn*L,mipLevel).rgb; #ifdef GAMMA_INPUT c=toLinearSpace(c); #endif result+=c*NoL;}} result=result/weight;return result;}} #endif #endif `; je.IncludesShadersStore[J1e] = e_e; const t_e = "pbrDirectLightingFunctions", i_e = `#define CLEARCOATREFLECTANCE90 1.0 struct lightingInfo {vec3 diffuse; #ifdef SPECULARTERM vec3 specular; #endif #ifdef CLEARCOAT vec4 clearCoat; #endif #ifdef SHEEN vec3 sheen; #endif };float adjustRoughnessFromLightProperties(float roughness,float lightRadius,float lightDistance) { #if defined(USEPHYSICALLIGHTFALLOFF) || defined(USEGLTFLIGHTFALLOFF) float lightRoughness=lightRadius/lightDistance;float totalRoughness=saturate(lightRoughness+roughness);return totalRoughness; #else return roughness; #endif } vec3 computeHemisphericDiffuseLighting(preLightingInfo info,vec3 lightColor,vec3 groundColor) {return mix(groundColor,lightColor,info.NdotL);} vec3 computeDiffuseLighting(preLightingInfo info,vec3 lightColor) {float diffuseTerm=diffuseBRDF_Burley(info.NdotL,info.NdotV,info.VdotH,info.roughness);return diffuseTerm*info.attenuation*info.NdotL*lightColor;} #define inline vec3 computeProjectionTextureDiffuseLighting(sampler2D projectionLightSampler,mat4 textureProjectionMatrix){vec4 strq=textureProjectionMatrix*vec4(vPositionW,1.0);strq/=strq.w;vec3 textureColor=texture2D(projectionLightSampler,strq.xy).rgb;return toLinearSpace(textureColor);} #ifdef SS_TRANSLUCENCY vec3 computeDiffuseAndTransmittedLighting(preLightingInfo info,vec3 lightColor,vec3 transmittance) {float NdotL=absEps(info.NdotLUnclamped);float wrapNdotL=computeWrappedDiffuseNdotL(NdotL,0.02);float trAdapt=step(0.,info.NdotLUnclamped);vec3 transmittanceNdotL=mix(transmittance*wrapNdotL,vec3(wrapNdotL),trAdapt);float diffuseTerm=diffuseBRDF_Burley(NdotL,info.NdotV,info.VdotH,info.roughness);return diffuseTerm*transmittanceNdotL*info.attenuation*lightColor;} #endif #ifdef SPECULARTERM vec3 computeSpecularLighting(preLightingInfo info,vec3 N,vec3 reflectance0,vec3 reflectance90,float geometricRoughnessFactor,vec3 lightColor) {float NdotH=saturateEps(dot(N,info.H));float roughness=max(info.roughness,geometricRoughnessFactor);float alphaG=convertRoughnessToAverageSlope(roughness);vec3 fresnel=fresnelSchlickGGX(info.VdotH,reflectance0,reflectance90); #ifdef IRIDESCENCE fresnel=mix(fresnel,reflectance0,info.iridescenceIntensity); #endif float distribution=normalDistributionFunction_TrowbridgeReitzGGX(NdotH,alphaG); #ifdef BRDF_V_HEIGHT_CORRELATED float smithVisibility=smithVisibility_GGXCorrelated(info.NdotL,info.NdotV,alphaG); #else float smithVisibility=smithVisibility_TrowbridgeReitzGGXFast(info.NdotL,info.NdotV,alphaG); #endif vec3 specTerm=fresnel*distribution*smithVisibility;return specTerm*info.attenuation*info.NdotL*lightColor;} #endif #ifdef ANISOTROPIC vec3 computeAnisotropicSpecularLighting(preLightingInfo info,vec3 V,vec3 N,vec3 T,vec3 B,float anisotropy,vec3 reflectance0,vec3 reflectance90,float geometricRoughnessFactor,vec3 lightColor) {float NdotH=saturateEps(dot(N,info.H));float TdotH=dot(T,info.H);float BdotH=dot(B,info.H);float TdotV=dot(T,V);float BdotV=dot(B,V);float TdotL=dot(T,info.L);float BdotL=dot(B,info.L);float alphaG=convertRoughnessToAverageSlope(info.roughness);vec2 alphaTB=getAnisotropicRoughness(alphaG,anisotropy);alphaTB=max(alphaTB,square(geometricRoughnessFactor));vec3 fresnel=fresnelSchlickGGX(info.VdotH,reflectance0,reflectance90); #ifdef IRIDESCENCE fresnel=mix(fresnel,reflectance0,info.iridescenceIntensity); #endif float distribution=normalDistributionFunction_BurleyGGX_Anisotropic(NdotH,TdotH,BdotH,alphaTB);float smithVisibility=smithVisibility_GGXCorrelated_Anisotropic(info.NdotL,info.NdotV,TdotV,BdotV,TdotL,BdotL,alphaTB);vec3 specTerm=fresnel*distribution*smithVisibility;return specTerm*info.attenuation*info.NdotL*lightColor;} #endif #ifdef CLEARCOAT vec4 computeClearCoatLighting(preLightingInfo info,vec3 Ncc,float geometricRoughnessFactor,float clearCoatIntensity,vec3 lightColor) {float NccdotL=saturateEps(dot(Ncc,info.L));float NccdotH=saturateEps(dot(Ncc,info.H));float clearCoatRoughness=max(info.roughness,geometricRoughnessFactor);float alphaG=convertRoughnessToAverageSlope(clearCoatRoughness);float fresnel=fresnelSchlickGGX(info.VdotH,vClearCoatRefractionParams.x,CLEARCOATREFLECTANCE90);fresnel*=clearCoatIntensity;float distribution=normalDistributionFunction_TrowbridgeReitzGGX(NccdotH,alphaG);float kelemenVisibility=visibility_Kelemen(info.VdotH);float clearCoatTerm=fresnel*distribution*kelemenVisibility;return vec4( clearCoatTerm*info.attenuation*NccdotL*lightColor, 1.0-fresnel );} vec3 computeClearCoatLightingAbsorption(float NdotVRefract,vec3 L,vec3 Ncc,vec3 clearCoatColor,float clearCoatThickness,float clearCoatIntensity) {vec3 LRefract=-refract(L,Ncc,vClearCoatRefractionParams.y);float NdotLRefract=saturateEps(dot(Ncc,LRefract));vec3 absorption=computeClearCoatAbsorption(NdotVRefract,NdotLRefract,clearCoatColor,clearCoatThickness,clearCoatIntensity);return absorption;} #endif #ifdef SHEEN vec3 computeSheenLighting(preLightingInfo info,vec3 N,vec3 reflectance0,vec3 reflectance90,float geometricRoughnessFactor,vec3 lightColor) {float NdotH=saturateEps(dot(N,info.H));float roughness=max(info.roughness,geometricRoughnessFactor);float alphaG=convertRoughnessToAverageSlope(roughness);float fresnel=1.;float distribution=normalDistributionFunction_CharlieSheen(NdotH,alphaG);/*#ifdef SHEEN_SOFTER float visibility=visibility_CharlieSheen(info.NdotL,info.NdotV,alphaG); #else */ float visibility=visibility_Ashikhmin(info.NdotL,info.NdotV);/* #endif */ float sheenTerm=fresnel*distribution*visibility;return sheenTerm*info.attenuation*info.NdotL*lightColor;} #endif `; je.IncludesShadersStore[t_e] = i_e; const r_e = "pbrIBLFunctions", s_e = `#if defined(REFLECTION) || defined(SS_REFRACTION) float getLodFromAlphaG(float cubeMapDimensionPixels,float microsurfaceAverageSlope) {float microsurfaceAverageSlopeTexels=cubeMapDimensionPixels*microsurfaceAverageSlope;float lod=log2(microsurfaceAverageSlopeTexels);return lod;} float getLinearLodFromRoughness(float cubeMapDimensionPixels,float roughness) {float lod=log2(cubeMapDimensionPixels)*roughness;return lod;} #endif #if defined(ENVIRONMENTBRDF) && defined(RADIANCEOCCLUSION) float environmentRadianceOcclusion(float ambientOcclusion,float NdotVUnclamped) {float temp=NdotVUnclamped+ambientOcclusion;return saturate(square(temp)-1.0+ambientOcclusion);} #endif #if defined(ENVIRONMENTBRDF) && defined(HORIZONOCCLUSION) float environmentHorizonOcclusion(vec3 view,vec3 normal,vec3 geometricNormal) {vec3 reflection=reflect(view,normal);float temp=saturate(1.0+1.1*dot(reflection,geometricNormal));return square(temp);} #endif #if defined(LODINREFLECTIONALPHA) || defined(SS_LODINREFRACTIONALPHA) #define UNPACK_LOD(x) (1.0-x)*255.0 float getLodFromAlphaG(float cubeMapDimensionPixels,float alphaG,float NdotV) {float microsurfaceAverageSlope=alphaG;microsurfaceAverageSlope*=sqrt(abs(NdotV));return getLodFromAlphaG(cubeMapDimensionPixels,microsurfaceAverageSlope);} #endif `; je.IncludesShadersStore[r_e] = s_e; const n_e = "pbrBlockAlbedoOpacity", a_e = `struct albedoOpacityOutParams {vec3 surfaceAlbedo;float alpha;}; #define pbr_inline void albedoOpacityBlock( in vec4 vAlbedoColor, #ifdef ALBEDO in vec4 albedoTexture, in vec2 albedoInfos, #endif #ifdef OPACITY in vec4 opacityMap, in vec2 vOpacityInfos, #endif #ifdef DETAIL in vec4 detailColor, in vec4 vDetailInfos, #endif #ifdef DECAL in vec4 decalColor, in vec4 vDecalInfos, #endif out albedoOpacityOutParams outParams ) {vec3 surfaceAlbedo=vAlbedoColor.rgb;float alpha=vAlbedoColor.a; #ifdef ALBEDO #if defined(ALPHAFROMALBEDO) || defined(ALPHATEST) alpha*=albedoTexture.a; #endif #ifdef GAMMAALBEDO surfaceAlbedo*=toLinearSpace(albedoTexture.rgb); #else surfaceAlbedo*=albedoTexture.rgb; #endif surfaceAlbedo*=albedoInfos.y; #endif #ifndef DECAL_AFTER_DETAIL #include #endif #if defined(VERTEXCOLOR) || defined(INSTANCESCOLOR) && defined(INSTANCES) surfaceAlbedo*=vColor.rgb; #endif #ifdef DETAIL float detailAlbedo=2.0*mix(0.5,detailColor.r,vDetailInfos.y);surfaceAlbedo.rgb=surfaceAlbedo.rgb*detailAlbedo*detailAlbedo; #endif #ifdef DECAL_AFTER_DETAIL #include #endif #define CUSTOM_FRAGMENT_UPDATE_ALBEDO #ifdef OPACITY #ifdef OPACITYRGB alpha=getLuminance(opacityMap.rgb); #else alpha*=opacityMap.a; #endif alpha*=vOpacityInfos.y; #endif #if defined(VERTEXALPHA) || defined(INSTANCESCOLOR) && defined(INSTANCES) alpha*=vColor.a; #endif #if !defined(SS_LINKREFRACTIONTOTRANSPARENCY) && !defined(ALPHAFRESNEL) #ifdef ALPHATEST #if DEBUGMODE != 88 if (alpha0 #ifdef METALLICWORKFLOW vec2 metallicRoughness; #ifdef REFLECTIVITY vec4 surfaceMetallicColorMap; #endif #ifndef FROSTBITE_REFLECTANCE vec3 metallicF0; #endif #else #ifdef REFLECTIVITY vec4 surfaceReflectivityColorMap; #endif #endif #endif }; #define pbr_inline void reflectivityBlock( in vec4 vReflectivityColor, #ifdef METALLICWORKFLOW in vec3 surfaceAlbedo, in vec4 metallicReflectanceFactors, #endif #ifdef REFLECTIVITY in vec3 reflectivityInfos, in vec4 surfaceMetallicOrReflectivityColorMap, #endif #if defined(METALLICWORKFLOW) && defined(REFLECTIVITY) && defined(AOSTOREINMETALMAPRED) in vec3 ambientOcclusionColorIn, #endif #ifdef MICROSURFACEMAP in vec4 microSurfaceTexel, #endif #ifdef DETAIL in vec4 detailColor, in vec4 vDetailInfos, #endif out reflectivityOutParams outParams ) {float microSurface=vReflectivityColor.a;vec3 surfaceReflectivityColor=vReflectivityColor.rgb; #ifdef METALLICWORKFLOW vec2 metallicRoughness=surfaceReflectivityColor.rg; #ifdef REFLECTIVITY #if DEBUGMODE>0 outParams.surfaceMetallicColorMap=surfaceMetallicOrReflectivityColorMap; #endif #ifdef AOSTOREINMETALMAPRED vec3 aoStoreInMetalMap=vec3(surfaceMetallicOrReflectivityColorMap.r,surfaceMetallicOrReflectivityColorMap.r,surfaceMetallicOrReflectivityColorMap.r);outParams.ambientOcclusionColor=mix(ambientOcclusionColorIn,aoStoreInMetalMap,reflectivityInfos.z); #endif #ifdef METALLNESSSTOREINMETALMAPBLUE metallicRoughness.r*=surfaceMetallicOrReflectivityColorMap.b; #else metallicRoughness.r*=surfaceMetallicOrReflectivityColorMap.r; #endif #ifdef ROUGHNESSSTOREINMETALMAPALPHA metallicRoughness.g*=surfaceMetallicOrReflectivityColorMap.a; #else #ifdef ROUGHNESSSTOREINMETALMAPGREEN metallicRoughness.g*=surfaceMetallicOrReflectivityColorMap.g; #endif #endif #endif #ifdef DETAIL float detailRoughness=mix(0.5,detailColor.b,vDetailInfos.w);float loLerp=mix(0.,metallicRoughness.g,detailRoughness*2.);float hiLerp=mix(metallicRoughness.g,1.,(detailRoughness-0.5)*2.);metallicRoughness.g=mix(loLerp,hiLerp,step(detailRoughness,0.5)); #endif #ifdef MICROSURFACEMAP metallicRoughness.g*=microSurfaceTexel.r; #endif #if DEBUGMODE>0 outParams.metallicRoughness=metallicRoughness; #endif #define CUSTOM_FRAGMENT_UPDATE_METALLICROUGHNESS microSurface=1.0-metallicRoughness.g;vec3 baseColor=surfaceAlbedo; #ifdef FROSTBITE_REFLECTANCE outParams.surfaceAlbedo=baseColor.rgb*(1.0-metallicRoughness.r);surfaceReflectivityColor=mix(0.16*reflectance*reflectance,baseColor,metallicRoughness.r); #else vec3 metallicF0=metallicReflectanceFactors.rgb; #if DEBUGMODE>0 outParams.metallicF0=metallicF0; #endif outParams.surfaceAlbedo=mix(baseColor.rgb*(1.0-metallicF0),vec3(0.,0.,0.),metallicRoughness.r);surfaceReflectivityColor=mix(metallicF0,baseColor,metallicRoughness.r); #endif #else #ifdef REFLECTIVITY surfaceReflectivityColor*=surfaceMetallicOrReflectivityColorMap.rgb; #if DEBUGMODE>0 outParams.surfaceReflectivityColorMap=surfaceMetallicOrReflectivityColorMap; #endif #ifdef MICROSURFACEFROMREFLECTIVITYMAP microSurface*=surfaceMetallicOrReflectivityColorMap.a;microSurface*=reflectivityInfos.z; #else #ifdef MICROSURFACEAUTOMATIC microSurface*=computeDefaultMicroSurface(microSurface,surfaceReflectivityColor); #endif #ifdef MICROSURFACEMAP microSurface*=microSurfaceTexel.r; #endif #define CUSTOM_FRAGMENT_UPDATE_MICROSURFACE #endif #endif #endif microSurface=saturate(microSurface);float roughness=1.-microSurface;outParams.microSurface=microSurface;outParams.roughness=roughness;outParams.surfaceReflectivityColor=surfaceReflectivityColor;} `; je.IncludesShadersStore[o_e] = l_e; const c_e = "pbrBlockAmbientOcclusion", u_e = `struct ambientOcclusionOutParams {vec3 ambientOcclusionColor; #if DEBUGMODE>0 && defined(AMBIENT) vec3 ambientOcclusionColorMap; #endif }; #define pbr_inline void ambientOcclusionBlock( #ifdef AMBIENT in vec3 ambientOcclusionColorMap_, in vec4 vAmbientInfos, #endif out ambientOcclusionOutParams outParams ) {vec3 ambientOcclusionColor=vec3(1.,1.,1.); #ifdef AMBIENT vec3 ambientOcclusionColorMap=ambientOcclusionColorMap_*vAmbientInfos.y; #ifdef AMBIENTINGRAYSCALE ambientOcclusionColorMap=vec3(ambientOcclusionColorMap.r,ambientOcclusionColorMap.r,ambientOcclusionColorMap.r); #endif ambientOcclusionColor=mix(ambientOcclusionColor,ambientOcclusionColorMap,vAmbientInfos.z); #if DEBUGMODE>0 outParams.ambientOcclusionColorMap=ambientOcclusionColorMap; #endif #endif outParams.ambientOcclusionColor=ambientOcclusionColor;} `; je.IncludesShadersStore[c_e] = u_e; const h_e = "pbrBlockAlphaFresnel", d_e = `#ifdef ALPHAFRESNEL #if defined(ALPHATEST) || defined(ALPHABLEND) struct alphaFresnelOutParams {float alpha;}; #define pbr_inline void alphaFresnelBlock( in vec3 normalW, in vec3 viewDirectionW, in float alpha, in float microSurface, out alphaFresnelOutParams outParams ) {float opacityPerceptual=alpha; #ifdef LINEARALPHAFRESNEL float opacity0=opacityPerceptual; #else float opacity0=opacityPerceptual*opacityPerceptual; #endif float opacity90=fresnelGrazingReflectance(opacity0);vec3 normalForward=faceforward(normalW,-viewDirectionW,normalW);outParams.alpha=getReflectanceFromAnalyticalBRDFLookup_Jones(saturate(dot(viewDirectionW,normalForward)),vec3(opacity0),vec3(opacity90),sqrt(microSurface)).x; #ifdef ALPHATEST if (outParams.alpha0 && defined(ANISOTROPIC_TEXTURE) vec3 anisotropyMapData; #endif }; #define pbr_inline void anisotropicBlock( in vec3 vAnisotropy, in float roughness, #ifdef ANISOTROPIC_TEXTURE in vec3 anisotropyMapData, #endif in mat3 TBN, in vec3 normalW, in vec3 viewDirectionW, out anisotropicOutParams outParams ) {float anisotropy=vAnisotropy.b;vec3 anisotropyDirection=vec3(vAnisotropy.xy,0.); #ifdef ANISOTROPIC_TEXTURE anisotropy*=anisotropyMapData.b; #if DEBUGMODE>0 outParams.anisotropyMapData=anisotropyMapData; #endif anisotropyMapData.rg=anisotropyMapData.rg*2.0-1.0; #ifdef ANISOTROPIC_LEGACY anisotropyDirection.rg*=anisotropyMapData.rg; #else anisotropyDirection.xy=mat2(anisotropyDirection.x,anisotropyDirection.y,-anisotropyDirection.y,anisotropyDirection.x)*normalize(anisotropyMapData.rg); #endif #endif mat3 anisoTBN=mat3(normalize(TBN[0]),normalize(TBN[1]),normalize(TBN[2]));vec3 anisotropicTangent=normalize(anisoTBN*anisotropyDirection);vec3 anisotropicBitangent=normalize(cross(anisoTBN[2],anisotropicTangent));outParams.anisotropy=anisotropy;outParams.anisotropicTangent=anisotropicTangent;outParams.anisotropicBitangent=anisotropicBitangent;outParams.anisotropicNormal=getAnisotropicBentNormals(anisotropicTangent,anisotropicBitangent,normalW,viewDirectionW,anisotropy,roughness);} #endif `; je.IncludesShadersStore[f_e] = p_e; const __e = "pbrBlockReflection", m_e = `#ifdef REFLECTION struct reflectionOutParams {vec4 environmentRadiance;vec3 environmentIrradiance; #ifdef REFLECTIONMAP_3D vec3 reflectionCoords; #else vec2 reflectionCoords; #endif #ifdef SS_TRANSLUCENCY #ifdef USESPHERICALFROMREFLECTIONMAP #if !defined(NORMAL) || !defined(USESPHERICALINVERTEX) vec3 irradianceVector; #endif #endif #endif }; #define pbr_inline void createReflectionCoords( in vec3 vPositionW, in vec3 normalW, #ifdef ANISOTROPIC in anisotropicOutParams anisotropicOut, #endif #ifdef REFLECTIONMAP_3D out vec3 reflectionCoords #else out vec2 reflectionCoords #endif ) { #ifdef ANISOTROPIC vec3 reflectionVector=computeReflectionCoords(vec4(vPositionW,1.0),anisotropicOut.anisotropicNormal); #else vec3 reflectionVector=computeReflectionCoords(vec4(vPositionW,1.0),normalW); #endif #ifdef REFLECTIONMAP_OPPOSITEZ reflectionVector.z*=-1.0; #endif #ifdef REFLECTIONMAP_3D reflectionCoords=reflectionVector; #else reflectionCoords=reflectionVector.xy; #ifdef REFLECTIONMAP_PROJECTION reflectionCoords/=reflectionVector.z; #endif reflectionCoords.y=1.0-reflectionCoords.y; #endif } #define pbr_inline #define inline void sampleReflectionTexture( in float alphaG, in vec3 vReflectionMicrosurfaceInfos, in vec2 vReflectionInfos, in vec3 vReflectionColor, #if defined(LODINREFLECTIONALPHA) && !defined(REFLECTIONMAP_SKYBOX) in float NdotVUnclamped, #endif #ifdef LINEARSPECULARREFLECTION in float roughness, #endif #ifdef REFLECTIONMAP_3D in samplerCube reflectionSampler, const vec3 reflectionCoords, #else in sampler2D reflectionSampler, const vec2 reflectionCoords, #endif #ifndef LODBASEDMICROSFURACE #ifdef REFLECTIONMAP_3D in samplerCube reflectionSamplerLow, in samplerCube reflectionSamplerHigh, #else in sampler2D reflectionSamplerLow, in sampler2D reflectionSamplerHigh, #endif #endif #ifdef REALTIME_FILTERING in vec2 vReflectionFilteringInfo, #endif out vec4 environmentRadiance ) { #if defined(LODINREFLECTIONALPHA) && !defined(REFLECTIONMAP_SKYBOX) float reflectionLOD=getLodFromAlphaG(vReflectionMicrosurfaceInfos.x,alphaG,NdotVUnclamped); #elif defined(LINEARSPECULARREFLECTION) float reflectionLOD=getLinearLodFromRoughness(vReflectionMicrosurfaceInfos.x,roughness); #else float reflectionLOD=getLodFromAlphaG(vReflectionMicrosurfaceInfos.x,alphaG); #endif #ifdef LODBASEDMICROSFURACE reflectionLOD=reflectionLOD*vReflectionMicrosurfaceInfos.y+vReflectionMicrosurfaceInfos.z; #ifdef LODINREFLECTIONALPHA float automaticReflectionLOD=UNPACK_LOD(sampleReflection(reflectionSampler,reflectionCoords).a);float requestedReflectionLOD=max(automaticReflectionLOD,reflectionLOD); #else float requestedReflectionLOD=reflectionLOD; #endif #ifdef REALTIME_FILTERING environmentRadiance=vec4(radiance(alphaG,reflectionSampler,reflectionCoords,vReflectionFilteringInfo),1.0); #else environmentRadiance=sampleReflectionLod(reflectionSampler,reflectionCoords,reflectionLOD); #endif #else float lodReflectionNormalized=saturate(reflectionLOD/log2(vReflectionMicrosurfaceInfos.x));float lodReflectionNormalizedDoubled=lodReflectionNormalized*2.0;vec4 environmentMid=sampleReflection(reflectionSampler,reflectionCoords);if (lodReflectionNormalizedDoubled<1.0){environmentRadiance=mix( sampleReflection(reflectionSamplerHigh,reflectionCoords), environmentMid, lodReflectionNormalizedDoubled );} else {environmentRadiance=mix( environmentMid, sampleReflection(reflectionSamplerLow,reflectionCoords), lodReflectionNormalizedDoubled-1.0 );} #endif #ifdef RGBDREFLECTION environmentRadiance.rgb=fromRGBD(environmentRadiance); #endif #ifdef GAMMAREFLECTION environmentRadiance.rgb=toLinearSpace(environmentRadiance.rgb); #endif environmentRadiance.rgb*=vReflectionInfos.x;environmentRadiance.rgb*=vReflectionColor.rgb;} #define pbr_inline #define inline void reflectionBlock( in vec3 vPositionW, in vec3 normalW, in float alphaG, in vec3 vReflectionMicrosurfaceInfos, in vec2 vReflectionInfos, in vec3 vReflectionColor, #ifdef ANISOTROPIC in anisotropicOutParams anisotropicOut, #endif #if defined(LODINREFLECTIONALPHA) && !defined(REFLECTIONMAP_SKYBOX) in float NdotVUnclamped, #endif #ifdef LINEARSPECULARREFLECTION in float roughness, #endif #ifdef REFLECTIONMAP_3D in samplerCube reflectionSampler, #else in sampler2D reflectionSampler, #endif #if defined(NORMAL) && defined(USESPHERICALINVERTEX) in vec3 vEnvironmentIrradiance, #endif #ifdef USESPHERICALFROMREFLECTIONMAP #if !defined(NORMAL) || !defined(USESPHERICALINVERTEX) in mat4 reflectionMatrix, #endif #endif #ifdef USEIRRADIANCEMAP #ifdef REFLECTIONMAP_3D in samplerCube irradianceSampler, #else in sampler2D irradianceSampler, #endif #endif #ifndef LODBASEDMICROSFURACE #ifdef REFLECTIONMAP_3D in samplerCube reflectionSamplerLow, in samplerCube reflectionSamplerHigh, #else in sampler2D reflectionSamplerLow, in sampler2D reflectionSamplerHigh, #endif #endif #ifdef REALTIME_FILTERING in vec2 vReflectionFilteringInfo, #endif out reflectionOutParams outParams ) {vec4 environmentRadiance=vec4(0.,0.,0.,0.); #ifdef REFLECTIONMAP_3D vec3 reflectionCoords=vec3(0.); #else vec2 reflectionCoords=vec2(0.); #endif createReflectionCoords( vPositionW, normalW, #ifdef ANISOTROPIC anisotropicOut, #endif reflectionCoords );sampleReflectionTexture( alphaG, vReflectionMicrosurfaceInfos, vReflectionInfos, vReflectionColor, #if defined(LODINREFLECTIONALPHA) && !defined(REFLECTIONMAP_SKYBOX) NdotVUnclamped, #endif #ifdef LINEARSPECULARREFLECTION roughness, #endif #ifdef REFLECTIONMAP_3D reflectionSampler, reflectionCoords, #else reflectionSampler, reflectionCoords, #endif #ifndef LODBASEDMICROSFURACE reflectionSamplerLow, reflectionSamplerHigh, #endif #ifdef REALTIME_FILTERING vReflectionFilteringInfo, #endif environmentRadiance );vec3 environmentIrradiance=vec3(0.,0.,0.); #ifdef USESPHERICALFROMREFLECTIONMAP #if defined(NORMAL) && defined(USESPHERICALINVERTEX) environmentIrradiance=vEnvironmentIrradiance; #else #ifdef ANISOTROPIC vec3 irradianceVector=vec3(reflectionMatrix*vec4(anisotropicOut.anisotropicNormal,0)).xyz; #else vec3 irradianceVector=vec3(reflectionMatrix*vec4(normalW,0)).xyz; #endif #ifdef REFLECTIONMAP_OPPOSITEZ irradianceVector.z*=-1.0; #endif #ifdef INVERTCUBICMAP irradianceVector.y*=-1.0; #endif #if defined(REALTIME_FILTERING) environmentIrradiance=irradiance(reflectionSampler,irradianceVector,vReflectionFilteringInfo); #else environmentIrradiance=computeEnvironmentIrradiance(irradianceVector); #endif #ifdef SS_TRANSLUCENCY outParams.irradianceVector=irradianceVector; #endif #endif #elif defined(USEIRRADIANCEMAP) vec4 environmentIrradiance4=sampleReflection(irradianceSampler,reflectionCoords);environmentIrradiance=environmentIrradiance4.rgb; #ifdef RGBDREFLECTION environmentIrradiance.rgb=fromRGBD(environmentIrradiance4); #endif #ifdef GAMMAREFLECTION environmentIrradiance.rgb=toLinearSpace(environmentIrradiance.rgb); #endif #endif environmentIrradiance*=vReflectionColor.rgb;outParams.environmentRadiance=environmentRadiance;outParams.environmentIrradiance=environmentIrradiance;outParams.reflectionCoords=reflectionCoords;} #endif `; je.IncludesShadersStore[__e] = m_e; const g_e = "pbrBlockSheen", v_e = `#ifdef SHEEN struct sheenOutParams {float sheenIntensity;vec3 sheenColor;float sheenRoughness; #ifdef SHEEN_LINKWITHALBEDO vec3 surfaceAlbedo; #endif #if defined(ENVIRONMENTBRDF) && defined(SHEEN_ALBEDOSCALING) float sheenAlbedoScaling; #endif #if defined(REFLECTION) && defined(ENVIRONMENTBRDF) vec3 finalSheenRadianceScaled; #endif #if DEBUGMODE>0 #ifdef SHEEN_TEXTURE vec4 sheenMapData; #endif #if defined(REFLECTION) && defined(ENVIRONMENTBRDF) vec3 sheenEnvironmentReflectance; #endif #endif }; #define pbr_inline #define inline void sheenBlock( in vec4 vSheenColor, #ifdef SHEEN_ROUGHNESS in float vSheenRoughness, #if defined(SHEEN_TEXTURE_ROUGHNESS) && !defined(SHEEN_TEXTURE_ROUGHNESS_IDENTICAL) && !defined(SHEEN_USE_ROUGHNESS_FROM_MAINTEXTURE) in vec4 sheenMapRoughnessData, #endif #endif in float roughness, #ifdef SHEEN_TEXTURE in vec4 sheenMapData, in float sheenMapLevel, #endif in float reflectance, #ifdef SHEEN_LINKWITHALBEDO in vec3 baseColor, in vec3 surfaceAlbedo, #endif #ifdef ENVIRONMENTBRDF in float NdotV, in vec3 environmentBrdf, #endif #if defined(REFLECTION) && defined(ENVIRONMENTBRDF) in vec2 AARoughnessFactors, in vec3 vReflectionMicrosurfaceInfos, in vec2 vReflectionInfos, in vec3 vReflectionColor, in vec4 vLightingIntensity, #ifdef REFLECTIONMAP_3D in samplerCube reflectionSampler, in vec3 reflectionCoords, #else in sampler2D reflectionSampler, in vec2 reflectionCoords, #endif in float NdotVUnclamped, #ifndef LODBASEDMICROSFURACE #ifdef REFLECTIONMAP_3D in samplerCube reflectionSamplerLow, in samplerCube reflectionSamplerHigh, #else in sampler2D reflectionSamplerLow, in sampler2D reflectionSamplerHigh, #endif #endif #ifdef REALTIME_FILTERING in vec2 vReflectionFilteringInfo, #endif #if !defined(REFLECTIONMAP_SKYBOX) && defined(RADIANCEOCCLUSION) in float seo, #endif #if !defined(REFLECTIONMAP_SKYBOX) && defined(HORIZONOCCLUSION) && defined(BUMP) && defined(REFLECTIONMAP_3D) in float eho, #endif #endif out sheenOutParams outParams ) {float sheenIntensity=vSheenColor.a; #ifdef SHEEN_TEXTURE #if DEBUGMODE>0 outParams.sheenMapData=sheenMapData; #endif #endif #ifdef SHEEN_LINKWITHALBEDO float sheenFactor=pow5(1.0-sheenIntensity);vec3 sheenColor=baseColor.rgb*(1.0-sheenFactor);float sheenRoughness=sheenIntensity;outParams.surfaceAlbedo=surfaceAlbedo*sheenFactor; #ifdef SHEEN_TEXTURE sheenIntensity*=sheenMapData.a; #endif #else vec3 sheenColor=vSheenColor.rgb; #ifdef SHEEN_TEXTURE #ifdef SHEEN_GAMMATEXTURE sheenColor.rgb*=toLinearSpace(sheenMapData.rgb); #else sheenColor.rgb*=sheenMapData.rgb; #endif sheenColor.rgb*=sheenMapLevel; #endif #ifdef SHEEN_ROUGHNESS float sheenRoughness=vSheenRoughness; #ifdef SHEEN_USE_ROUGHNESS_FROM_MAINTEXTURE #if defined(SHEEN_TEXTURE) sheenRoughness*=sheenMapData.a; #endif #elif defined(SHEEN_TEXTURE_ROUGHNESS) #ifdef SHEEN_TEXTURE_ROUGHNESS_IDENTICAL sheenRoughness*=sheenMapData.a; #else sheenRoughness*=sheenMapRoughnessData.a; #endif #endif #else float sheenRoughness=roughness; #ifdef SHEEN_TEXTURE sheenIntensity*=sheenMapData.a; #endif #endif #if !defined(SHEEN_ALBEDOSCALING) sheenIntensity*=(1.-reflectance); #endif sheenColor*=sheenIntensity; #endif #ifdef ENVIRONMENTBRDF /*#ifdef SHEEN_SOFTER vec3 environmentSheenBrdf=vec3(0.,0.,getBRDFLookupCharlieSheen(NdotV,sheenRoughness)); #else*/ #ifdef SHEEN_ROUGHNESS vec3 environmentSheenBrdf=getBRDFLookup(NdotV,sheenRoughness); #else vec3 environmentSheenBrdf=environmentBrdf; #endif /*#endif*/ #endif #if defined(REFLECTION) && defined(ENVIRONMENTBRDF) float sheenAlphaG=convertRoughnessToAverageSlope(sheenRoughness); #ifdef SPECULARAA sheenAlphaG+=AARoughnessFactors.y; #endif vec4 environmentSheenRadiance=vec4(0.,0.,0.,0.);sampleReflectionTexture( sheenAlphaG, vReflectionMicrosurfaceInfos, vReflectionInfos, vReflectionColor, #if defined(LODINREFLECTIONALPHA) && !defined(REFLECTIONMAP_SKYBOX) NdotVUnclamped, #endif #ifdef LINEARSPECULARREFLECTION sheenRoughness, #endif reflectionSampler, reflectionCoords, #ifndef LODBASEDMICROSFURACE reflectionSamplerLow, reflectionSamplerHigh, #endif #ifdef REALTIME_FILTERING vReflectionFilteringInfo, #endif environmentSheenRadiance );vec3 sheenEnvironmentReflectance=getSheenReflectanceFromBRDFLookup(sheenColor,environmentSheenBrdf); #if !defined(REFLECTIONMAP_SKYBOX) && defined(RADIANCEOCCLUSION) sheenEnvironmentReflectance*=seo; #endif #if !defined(REFLECTIONMAP_SKYBOX) && defined(HORIZONOCCLUSION) && defined(BUMP) && defined(REFLECTIONMAP_3D) sheenEnvironmentReflectance*=eho; #endif #if DEBUGMODE>0 outParams.sheenEnvironmentReflectance=sheenEnvironmentReflectance; #endif outParams.finalSheenRadianceScaled= environmentSheenRadiance.rgb * sheenEnvironmentReflectance * vLightingIntensity.z; #endif #if defined(ENVIRONMENTBRDF) && defined(SHEEN_ALBEDOSCALING) outParams.sheenAlbedoScaling=1.0-sheenIntensity*max(max(sheenColor.r,sheenColor.g),sheenColor.b)*environmentSheenBrdf.b; #endif outParams.sheenIntensity=sheenIntensity;outParams.sheenColor=sheenColor;outParams.sheenRoughness=sheenRoughness;} #endif `; je.IncludesShadersStore[g_e] = v_e; const A_e = "pbrBlockClearcoat", y_e = `struct clearcoatOutParams {vec3 specularEnvironmentR0;float conservationFactor;vec3 clearCoatNormalW;vec2 clearCoatAARoughnessFactors;float clearCoatIntensity;float clearCoatRoughness; #ifdef REFLECTION vec3 finalClearCoatRadianceScaled; #endif #ifdef CLEARCOAT_TINT vec3 absorption;float clearCoatNdotVRefract;vec3 clearCoatColor;float clearCoatThickness; #endif #if defined(ENVIRONMENTBRDF) && defined(MS_BRDF_ENERGY_CONSERVATION) vec3 energyConservationFactorClearCoat; #endif #if DEBUGMODE>0 #ifdef CLEARCOAT_BUMP mat3 TBNClearCoat; #endif #ifdef CLEARCOAT_TEXTURE vec2 clearCoatMapData; #endif #if defined(CLEARCOAT_TINT) && defined(CLEARCOAT_TINT_TEXTURE) vec4 clearCoatTintMapData; #endif #ifdef REFLECTION vec4 environmentClearCoatRadiance;vec3 clearCoatEnvironmentReflectance; #endif float clearCoatNdotV; #endif }; #ifdef CLEARCOAT #define pbr_inline #define inline void clearcoatBlock( in vec3 vPositionW, in vec3 geometricNormalW, in vec3 viewDirectionW, in vec2 vClearCoatParams, #if defined(CLEARCOAT_TEXTURE_ROUGHNESS) && !defined(CLEARCOAT_TEXTURE_ROUGHNESS_IDENTICAL) && !defined(CLEARCOAT_USE_ROUGHNESS_FROM_MAINTEXTURE) in vec4 clearCoatMapRoughnessData, #endif in vec3 specularEnvironmentR0, #ifdef CLEARCOAT_TEXTURE in vec2 clearCoatMapData, #endif #ifdef CLEARCOAT_TINT in vec4 vClearCoatTintParams, in float clearCoatColorAtDistance, in vec4 vClearCoatRefractionParams, #ifdef CLEARCOAT_TINT_TEXTURE in vec4 clearCoatTintMapData, #endif #endif #ifdef CLEARCOAT_BUMP in vec2 vClearCoatBumpInfos, in vec4 clearCoatBumpMapData, in vec2 vClearCoatBumpUV, #if defined(TANGENT) && defined(NORMAL) in mat3 vTBN, #else in vec2 vClearCoatTangentSpaceParams, #endif #ifdef OBJECTSPACE_NORMALMAP in mat4 normalMatrix, #endif #endif #if defined(FORCENORMALFORWARD) && defined(NORMAL) in vec3 faceNormal, #endif #ifdef REFLECTION in vec3 vReflectionMicrosurfaceInfos, in vec2 vReflectionInfos, in vec3 vReflectionColor, in vec4 vLightingIntensity, #ifdef REFLECTIONMAP_3D in samplerCube reflectionSampler, #else in sampler2D reflectionSampler, #endif #ifndef LODBASEDMICROSFURACE #ifdef REFLECTIONMAP_3D in samplerCube reflectionSamplerLow, in samplerCube reflectionSamplerHigh, #else in sampler2D reflectionSamplerLow, in sampler2D reflectionSamplerHigh, #endif #endif #ifdef REALTIME_FILTERING in vec2 vReflectionFilteringInfo, #endif #endif #if defined(ENVIRONMENTBRDF) && !defined(REFLECTIONMAP_SKYBOX) #ifdef RADIANCEOCCLUSION in float ambientMonochrome, #endif #endif #if defined(CLEARCOAT_BUMP) || defined(TWOSIDEDLIGHTING) in float frontFacingMultiplier, #endif out clearcoatOutParams outParams ) {float clearCoatIntensity=vClearCoatParams.x;float clearCoatRoughness=vClearCoatParams.y; #ifdef CLEARCOAT_TEXTURE clearCoatIntensity*=clearCoatMapData.x; #ifdef CLEARCOAT_USE_ROUGHNESS_FROM_MAINTEXTURE clearCoatRoughness*=clearCoatMapData.y; #endif #if DEBUGMODE>0 outParams.clearCoatMapData=clearCoatMapData; #endif #endif #if defined(CLEARCOAT_TEXTURE_ROUGHNESS) && !defined(CLEARCOAT_USE_ROUGHNESS_FROM_MAINTEXTURE) #ifdef CLEARCOAT_TEXTURE_ROUGHNESS_IDENTICAL clearCoatRoughness*=clearCoatMapData.y; #else clearCoatRoughness*=clearCoatMapRoughnessData.y; #endif #endif outParams.clearCoatIntensity=clearCoatIntensity;outParams.clearCoatRoughness=clearCoatRoughness; #ifdef CLEARCOAT_TINT vec3 clearCoatColor=vClearCoatTintParams.rgb;float clearCoatThickness=vClearCoatTintParams.a; #ifdef CLEARCOAT_TINT_TEXTURE #ifdef CLEARCOAT_TINT_GAMMATEXTURE clearCoatColor*=toLinearSpace(clearCoatTintMapData.rgb); #else clearCoatColor*=clearCoatTintMapData.rgb; #endif clearCoatThickness*=clearCoatTintMapData.a; #if DEBUGMODE>0 outParams.clearCoatTintMapData=clearCoatTintMapData; #endif #endif outParams.clearCoatColor=computeColorAtDistanceInMedia(clearCoatColor,clearCoatColorAtDistance);outParams.clearCoatThickness=clearCoatThickness; #endif #ifdef CLEARCOAT_REMAP_F0 vec3 specularEnvironmentR0Updated=getR0RemappedForClearCoat(specularEnvironmentR0); #else vec3 specularEnvironmentR0Updated=specularEnvironmentR0; #endif outParams.specularEnvironmentR0=mix(specularEnvironmentR0,specularEnvironmentR0Updated,clearCoatIntensity);vec3 clearCoatNormalW=geometricNormalW; #ifdef CLEARCOAT_BUMP #ifdef NORMALXYSCALE float clearCoatNormalScale=1.0; #else float clearCoatNormalScale=vClearCoatBumpInfos.y; #endif #if defined(TANGENT) && defined(NORMAL) mat3 TBNClearCoat=vTBN; #else vec2 TBNClearCoatUV=vClearCoatBumpUV*frontFacingMultiplier;mat3 TBNClearCoat=cotangent_frame(clearCoatNormalW*clearCoatNormalScale,vPositionW,TBNClearCoatUV,vClearCoatTangentSpaceParams); #endif #if DEBUGMODE>0 outParams.TBNClearCoat=TBNClearCoat; #endif #ifdef OBJECTSPACE_NORMALMAP clearCoatNormalW=normalize(clearCoatBumpMapData.xyz *2.0-1.0);clearCoatNormalW=normalize(mat3(normalMatrix)*clearCoatNormalW); #else clearCoatNormalW=perturbNormal(TBNClearCoat,clearCoatBumpMapData.xyz,vClearCoatBumpInfos.y); #endif #endif #if defined(FORCENORMALFORWARD) && defined(NORMAL) clearCoatNormalW*=sign(dot(clearCoatNormalW,faceNormal)); #endif #if defined(TWOSIDEDLIGHTING) && defined(NORMAL) clearCoatNormalW=clearCoatNormalW*frontFacingMultiplier; #endif outParams.clearCoatNormalW=clearCoatNormalW;outParams.clearCoatAARoughnessFactors=getAARoughnessFactors(clearCoatNormalW.xyz);float clearCoatNdotVUnclamped=dot(clearCoatNormalW,viewDirectionW);float clearCoatNdotV=absEps(clearCoatNdotVUnclamped); #if DEBUGMODE>0 outParams.clearCoatNdotV=clearCoatNdotV; #endif #ifdef CLEARCOAT_TINT vec3 clearCoatVRefract=refract(-viewDirectionW,clearCoatNormalW,vClearCoatRefractionParams.y);outParams.clearCoatNdotVRefract=absEps(dot(clearCoatNormalW,clearCoatVRefract)); #endif #if defined(ENVIRONMENTBRDF) && (!defined(REFLECTIONMAP_SKYBOX) || defined(MS_BRDF_ENERGY_CONSERVATION)) vec3 environmentClearCoatBrdf=getBRDFLookup(clearCoatNdotV,clearCoatRoughness); #endif #if defined(REFLECTION) float clearCoatAlphaG=convertRoughnessToAverageSlope(clearCoatRoughness); #ifdef SPECULARAA clearCoatAlphaG+=outParams.clearCoatAARoughnessFactors.y; #endif vec4 environmentClearCoatRadiance=vec4(0.,0.,0.,0.);vec3 clearCoatReflectionVector=computeReflectionCoords(vec4(vPositionW,1.0),clearCoatNormalW); #ifdef REFLECTIONMAP_OPPOSITEZ clearCoatReflectionVector.z*=-1.0; #endif #ifdef REFLECTIONMAP_3D vec3 clearCoatReflectionCoords=clearCoatReflectionVector; #else vec2 clearCoatReflectionCoords=clearCoatReflectionVector.xy; #ifdef REFLECTIONMAP_PROJECTION clearCoatReflectionCoords/=clearCoatReflectionVector.z; #endif clearCoatReflectionCoords.y=1.0-clearCoatReflectionCoords.y; #endif sampleReflectionTexture( clearCoatAlphaG, vReflectionMicrosurfaceInfos, vReflectionInfos, vReflectionColor, #if defined(LODINREFLECTIONALPHA) && !defined(REFLECTIONMAP_SKYBOX) clearCoatNdotVUnclamped, #endif #ifdef LINEARSPECULARREFLECTION clearCoatRoughness, #endif reflectionSampler, clearCoatReflectionCoords, #ifndef LODBASEDMICROSFURACE reflectionSamplerLow, reflectionSamplerHigh, #endif #ifdef REALTIME_FILTERING vReflectionFilteringInfo, #endif environmentClearCoatRadiance ); #if DEBUGMODE>0 outParams.environmentClearCoatRadiance=environmentClearCoatRadiance; #endif #if defined(ENVIRONMENTBRDF) && !defined(REFLECTIONMAP_SKYBOX) vec3 clearCoatEnvironmentReflectance=getReflectanceFromBRDFLookup(vec3(vClearCoatRefractionParams.x),environmentClearCoatBrdf); #ifdef HORIZONOCCLUSION #ifdef BUMP #ifdef REFLECTIONMAP_3D float clearCoatEho=environmentHorizonOcclusion(-viewDirectionW,clearCoatNormalW,geometricNormalW);clearCoatEnvironmentReflectance*=clearCoatEho; #endif #endif #endif #else vec3 clearCoatEnvironmentReflectance=getReflectanceFromAnalyticalBRDFLookup_Jones(clearCoatNdotV,vec3(1.),vec3(1.),sqrt(1.-clearCoatRoughness)); #endif clearCoatEnvironmentReflectance*=clearCoatIntensity; #if DEBUGMODE>0 outParams.clearCoatEnvironmentReflectance=clearCoatEnvironmentReflectance; #endif outParams.finalClearCoatRadianceScaled= environmentClearCoatRadiance.rgb * clearCoatEnvironmentReflectance * vLightingIntensity.z; #endif #if defined(CLEARCOAT_TINT) outParams.absorption=computeClearCoatAbsorption(outParams.clearCoatNdotVRefract,outParams.clearCoatNdotVRefract,outParams.clearCoatColor,clearCoatThickness,clearCoatIntensity); #endif float fresnelIBLClearCoat=fresnelSchlickGGX(clearCoatNdotV,vClearCoatRefractionParams.x,CLEARCOATREFLECTANCE90);fresnelIBLClearCoat*=clearCoatIntensity;outParams.conservationFactor=(1.-fresnelIBLClearCoat); #if defined(ENVIRONMENTBRDF) && defined(MS_BRDF_ENERGY_CONSERVATION) outParams.energyConservationFactorClearCoat=getEnergyConservationFactor(outParams.specularEnvironmentR0,environmentClearCoatBrdf); #endif } #endif `; je.IncludesShadersStore[A_e] = y_e; const C_e = "pbrBlockIridescence", x_e = `struct iridescenceOutParams {float iridescenceIntensity;float iridescenceIOR;float iridescenceThickness;vec3 specularEnvironmentR0;}; #ifdef IRIDESCENCE #define pbr_inline #define inline void iridescenceBlock( in vec4 vIridescenceParams, in float viewAngle, in vec3 specularEnvironmentR0, #ifdef IRIDESCENCE_TEXTURE in vec2 iridescenceMapData, #endif #ifdef IRIDESCENCE_THICKNESS_TEXTURE in vec2 iridescenceThicknessMapData, #endif #ifdef CLEARCOAT in float NdotVUnclamped, #ifdef CLEARCOAT_TEXTURE in vec2 clearCoatMapData, #endif #endif out iridescenceOutParams outParams ) {float iridescenceIntensity=vIridescenceParams.x;float iridescenceIOR=vIridescenceParams.y;float iridescenceThicknessMin=vIridescenceParams.z;float iridescenceThicknessMax=vIridescenceParams.w;float iridescenceThicknessWeight=1.; #ifdef IRIDESCENCE_TEXTURE iridescenceIntensity*=iridescenceMapData.x; #ifdef IRIDESCENCE_USE_THICKNESS_FROM_MAINTEXTURE iridescenceThicknessWeight=iridescenceMapData.g; #endif #endif #if defined(IRIDESCENCE_THICKNESS_TEXTURE) iridescenceThicknessWeight=iridescenceThicknessMapData.g; #endif float iridescenceThickness=mix(iridescenceThicknessMin,iridescenceThicknessMax,iridescenceThicknessWeight);float topIor=1.; #ifdef CLEARCOAT float clearCoatIntensity=vClearCoatParams.x; #ifdef CLEARCOAT_TEXTURE clearCoatIntensity*=clearCoatMapData.x; #endif topIor=mix(1.0,vClearCoatRefractionParams.w-1.,clearCoatIntensity);viewAngle=sqrt(1.0+square(1.0/topIor)*(square(NdotVUnclamped)-1.0)); #endif vec3 iridescenceFresnel=evalIridescence(topIor,iridescenceIOR,viewAngle,iridescenceThickness,specularEnvironmentR0);outParams.specularEnvironmentR0=mix(specularEnvironmentR0,iridescenceFresnel,iridescenceIntensity);outParams.iridescenceIntensity=iridescenceIntensity;outParams.iridescenceThickness=iridescenceThickness;outParams.iridescenceIOR=iridescenceIOR;} #endif `; je.IncludesShadersStore[C_e] = x_e; const b_e = "pbrBlockSubSurface", E_e = `struct subSurfaceOutParams {vec3 specularEnvironmentReflectance; #ifdef SS_REFRACTION vec3 finalRefraction;vec3 surfaceAlbedo; #ifdef SS_LINKREFRACTIONTOTRANSPARENCY float alpha; #endif #ifdef REFLECTION float refractionFactorForIrradiance; #endif #endif #ifdef SS_TRANSLUCENCY vec3 transmittance;float translucencyIntensity; #ifdef REFLECTION vec3 refractionIrradiance; #endif #endif #if DEBUGMODE>0 #ifdef SS_THICKNESSANDMASK_TEXTURE vec4 thicknessMap; #endif #ifdef SS_REFRACTION vec4 environmentRefraction;vec3 refractionTransmittance; #endif #endif }; #ifdef SUBSURFACE #ifdef SS_REFRACTION #define pbr_inline #define inline vec4 sampleEnvironmentRefraction( in float ior ,in float thickness ,in float refractionLOD ,in vec3 normalW ,in vec3 vPositionW ,in vec3 viewDirectionW ,in mat4 view ,in vec4 vRefractionInfos ,in mat4 refractionMatrix ,in vec4 vRefractionMicrosurfaceInfos ,in float alphaG #ifdef SS_REFRACTIONMAP_3D ,in samplerCube refractionSampler #ifndef LODBASEDMICROSFURACE ,in samplerCube refractionSamplerLow ,in samplerCube refractionSamplerHigh #endif #else ,in sampler2D refractionSampler #ifndef LODBASEDMICROSFURACE ,in sampler2D refractionSamplerLow ,in sampler2D refractionSamplerHigh #endif #endif #ifdef ANISOTROPIC ,in anisotropicOutParams anisotropicOut #endif #ifdef REALTIME_FILTERING ,in vec2 vRefractionFilteringInfo #endif #ifdef SS_USE_LOCAL_REFRACTIONMAP_CUBIC ,in vec3 refractionPosition ,in vec3 refractionSize #endif ) {vec4 environmentRefraction=vec4(0.,0.,0.,0.); #ifdef ANISOTROPIC vec3 refractionVector=refract(-viewDirectionW,anisotropicOut.anisotropicNormal,ior); #else vec3 refractionVector=refract(-viewDirectionW,normalW,ior); #endif #ifdef SS_REFRACTIONMAP_OPPOSITEZ refractionVector.z*=-1.0; #endif #ifdef SS_REFRACTIONMAP_3D #ifdef SS_USE_LOCAL_REFRACTIONMAP_CUBIC refractionVector=parallaxCorrectNormal(vPositionW,refractionVector,refractionSize,refractionPosition); #endif refractionVector.y=refractionVector.y*vRefractionInfos.w;vec3 refractionCoords=refractionVector;refractionCoords=vec3(refractionMatrix*vec4(refractionCoords,0)); #else #ifdef SS_USE_THICKNESS_AS_DEPTH vec3 vRefractionUVW=vec3(refractionMatrix*(view*vec4(vPositionW+refractionVector*thickness,1.0))); #else vec3 vRefractionUVW=vec3(refractionMatrix*(view*vec4(vPositionW+refractionVector*vRefractionInfos.z,1.0))); #endif vec2 refractionCoords=vRefractionUVW.xy/vRefractionUVW.z;refractionCoords.y=1.0-refractionCoords.y; #endif #ifdef LODBASEDMICROSFURACE refractionLOD=refractionLOD*vRefractionMicrosurfaceInfos.y+vRefractionMicrosurfaceInfos.z; #ifdef SS_LODINREFRACTIONALPHA float automaticRefractionLOD=UNPACK_LOD(sampleRefraction(refractionSampler,refractionCoords).a);float requestedRefractionLOD=max(automaticRefractionLOD,refractionLOD); #else float requestedRefractionLOD=refractionLOD; #endif #if defined(REALTIME_FILTERING) && defined(SS_REFRACTIONMAP_3D) environmentRefraction=vec4(radiance(alphaG,refractionSampler,refractionCoords,vRefractionFilteringInfo),1.0); #else environmentRefraction=sampleRefractionLod(refractionSampler,refractionCoords,requestedRefractionLOD); #endif #else float lodRefractionNormalized=saturate(refractionLOD/log2(vRefractionMicrosurfaceInfos.x));float lodRefractionNormalizedDoubled=lodRefractionNormalized*2.0;vec4 environmentRefractionMid=sampleRefraction(refractionSampler,refractionCoords);if (lodRefractionNormalizedDoubled<1.0){environmentRefraction=mix( sampleRefraction(refractionSamplerHigh,refractionCoords), environmentRefractionMid, lodRefractionNormalizedDoubled );} else {environmentRefraction=mix( environmentRefractionMid, sampleRefraction(refractionSamplerLow,refractionCoords), lodRefractionNormalizedDoubled-1.0 );} #endif #ifdef SS_RGBDREFRACTION environmentRefraction.rgb=fromRGBD(environmentRefraction); #endif #ifdef SS_GAMMAREFRACTION environmentRefraction.rgb=toLinearSpace(environmentRefraction.rgb); #endif return environmentRefraction;} #endif #define pbr_inline #define inline void subSurfaceBlock( in vec3 vSubSurfaceIntensity, in vec2 vThicknessParam, in vec4 vTintColor, in vec3 normalW, in vec3 specularEnvironmentReflectance, #ifdef SS_THICKNESSANDMASK_TEXTURE in vec4 thicknessMap, #endif #ifdef SS_REFRACTIONINTENSITY_TEXTURE in vec4 refractionIntensityMap, #endif #ifdef SS_TRANSLUCENCYINTENSITY_TEXTURE in vec4 translucencyIntensityMap, #endif #ifdef REFLECTION #ifdef SS_TRANSLUCENCY in mat4 reflectionMatrix, #ifdef USESPHERICALFROMREFLECTIONMAP #if !defined(NORMAL) || !defined(USESPHERICALINVERTEX) in vec3 irradianceVector_, #endif #if defined(REALTIME_FILTERING) in samplerCube reflectionSampler, in vec2 vReflectionFilteringInfo, #endif #endif #ifdef USEIRRADIANCEMAP #ifdef REFLECTIONMAP_3D in samplerCube irradianceSampler, #else in sampler2D irradianceSampler, #endif #endif #endif #endif #if defined(SS_REFRACTION) || defined(SS_TRANSLUCENCY) in vec3 surfaceAlbedo, #endif #ifdef SS_REFRACTION in vec3 vPositionW, in vec3 viewDirectionW, in mat4 view, in vec4 vRefractionInfos, in mat4 refractionMatrix, in vec4 vRefractionMicrosurfaceInfos, in vec4 vLightingIntensity, #ifdef SS_LINKREFRACTIONTOTRANSPARENCY in float alpha, #endif #ifdef SS_LODINREFRACTIONALPHA in float NdotVUnclamped, #endif #ifdef SS_LINEARSPECULARREFRACTION in float roughness, #endif in float alphaG, #ifdef SS_REFRACTIONMAP_3D in samplerCube refractionSampler, #ifndef LODBASEDMICROSFURACE in samplerCube refractionSamplerLow, in samplerCube refractionSamplerHigh, #endif #else in sampler2D refractionSampler, #ifndef LODBASEDMICROSFURACE in sampler2D refractionSamplerLow, in sampler2D refractionSamplerHigh, #endif #endif #ifdef ANISOTROPIC in anisotropicOutParams anisotropicOut, #endif #ifdef REALTIME_FILTERING in vec2 vRefractionFilteringInfo, #endif #ifdef SS_USE_LOCAL_REFRACTIONMAP_CUBIC in vec3 refractionPosition, in vec3 refractionSize, #endif #ifdef SS_DISPERSION in float dispersion, #endif #endif #ifdef SS_TRANSLUCENCY in vec3 vDiffusionDistance, #endif out subSurfaceOutParams outParams ) {outParams.specularEnvironmentReflectance=specularEnvironmentReflectance; #ifdef SS_REFRACTION float refractionIntensity=vSubSurfaceIntensity.x; #ifdef SS_LINKREFRACTIONTOTRANSPARENCY refractionIntensity*=(1.0-alpha);outParams.alpha=1.0; #endif #endif #ifdef SS_TRANSLUCENCY float translucencyIntensity=vSubSurfaceIntensity.y; #endif #ifdef SS_THICKNESSANDMASK_TEXTURE #if defined(SS_USE_GLTF_TEXTURES) float thickness=thicknessMap.g*vThicknessParam.y+vThicknessParam.x; #else float thickness=thicknessMap.r*vThicknessParam.y+vThicknessParam.x; #endif #if DEBUGMODE>0 outParams.thicknessMap=thicknessMap; #endif #ifdef SS_MASK_FROM_THICKNESS_TEXTURE #if defined(SS_REFRACTION) && defined(SS_REFRACTION_USE_INTENSITY_FROM_TEXTURE) #if defined(SS_USE_GLTF_TEXTURES) refractionIntensity*=thicknessMap.r; #else refractionIntensity*=thicknessMap.g; #endif #endif #if defined(SS_TRANSLUCENCY) && defined(SS_TRANSLUCENCY_USE_INTENSITY_FROM_TEXTURE) translucencyIntensity*=thicknessMap.b; #endif #endif #else float thickness=vThicknessParam.y; #endif #ifdef SS_REFRACTIONINTENSITY_TEXTURE #ifdef SS_USE_GLTF_TEXTURES refractionIntensity*=refractionIntensityMap.r; #else refractionIntensity*=refractionIntensityMap.g; #endif #endif #ifdef SS_TRANSLUCENCYINTENSITY_TEXTURE translucencyIntensity*=translucencyIntensityMap.b; #endif #ifdef SS_TRANSLUCENCY thickness=maxEps(thickness);vec3 transmittance=transmittanceBRDF_Burley(vTintColor.rgb,vDiffusionDistance,thickness);transmittance*=translucencyIntensity;outParams.transmittance=transmittance;outParams.translucencyIntensity=translucencyIntensity; #endif #ifdef SS_REFRACTION vec4 environmentRefraction=vec4(0.,0.,0.,0.); #ifdef SS_HAS_THICKNESS float ior=vRefractionInfos.y; #else float ior=vRefractionMicrosurfaceInfos.w; #endif #ifdef SS_LODINREFRACTIONALPHA float refractionAlphaG=alphaG;refractionAlphaG=mix(alphaG,0.0,clamp(ior*3.0-2.0,0.0,1.0));float refractionLOD=getLodFromAlphaG(vRefractionMicrosurfaceInfos.x,refractionAlphaG,NdotVUnclamped); #elif defined(SS_LINEARSPECULARREFRACTION) float refractionRoughness=alphaG;refractionRoughness=mix(alphaG,0.0,clamp(ior*3.0-2.0,0.0,1.0));float refractionLOD=getLinearLodFromRoughness(vRefractionMicrosurfaceInfos.x,refractionRoughness); #else float refractionAlphaG=alphaG;refractionAlphaG=mix(alphaG,0.0,clamp(ior*3.0-2.0,0.0,1.0));float refractionLOD=getLodFromAlphaG(vRefractionMicrosurfaceInfos.x,refractionAlphaG); #endif float refraction_ior=vRefractionInfos.y; #ifdef SS_DISPERSION float realIOR=1.0/refraction_ior;float iorDispersionSpread=0.04*dispersion*(realIOR-1.0);vec3 iors=vec3(1.0/(realIOR-iorDispersionSpread),refraction_ior,1.0/(realIOR+iorDispersionSpread));for (int i=0; i<3; i++) {refraction_ior=iors[i]; #endif vec4 envSample=sampleEnvironmentRefraction(refraction_ior,thickness,refractionLOD,normalW,vPositionW,viewDirectionW,view,vRefractionInfos,refractionMatrix,vRefractionMicrosurfaceInfos,alphaG #ifdef SS_REFRACTIONMAP_3D ,refractionSampler #ifndef LODBASEDMICROSFURACE ,refractionSamplerLow ,refractionSamplerHigh #endif #else ,refractionSampler #ifndef LODBASEDMICROSFURACE ,refractionSamplerLow ,refractionSamplerHigh #endif #endif #ifdef ANISOTROPIC ,anisotropicOut #endif #ifdef REALTIME_FILTERING ,vRefractionFilteringInfo #endif #ifdef SS_USE_LOCAL_REFRACTIONMAP_CUBIC ,refractionPosition ,refractionSize #endif ); #ifdef SS_DISPERSION environmentRefraction[i]=envSample[i];} #else environmentRefraction=envSample; #endif environmentRefraction.rgb*=vRefractionInfos.x; #endif #ifdef SS_REFRACTION vec3 refractionTransmittance=vec3(refractionIntensity); #ifdef SS_THICKNESSANDMASK_TEXTURE vec3 volumeAlbedo=computeColorAtDistanceInMedia(vTintColor.rgb,vTintColor.w);refractionTransmittance*=cocaLambert(volumeAlbedo,thickness); #elif defined(SS_LINKREFRACTIONTOTRANSPARENCY) float maxChannel=max(max(surfaceAlbedo.r,surfaceAlbedo.g),surfaceAlbedo.b);vec3 volumeAlbedo=saturate(maxChannel*surfaceAlbedo);environmentRefraction.rgb*=volumeAlbedo; #else vec3 volumeAlbedo=computeColorAtDistanceInMedia(vTintColor.rgb,vTintColor.w);refractionTransmittance*=cocaLambert(volumeAlbedo,vThicknessParam.y); #endif #ifdef SS_ALBEDOFORREFRACTIONTINT environmentRefraction.rgb*=surfaceAlbedo.rgb; #endif outParams.surfaceAlbedo=surfaceAlbedo*(1.-refractionIntensity); #ifdef REFLECTION outParams.refractionFactorForIrradiance=(1.-refractionIntensity); #endif #ifdef UNUSED_MULTIPLEBOUNCES vec3 bounceSpecularEnvironmentReflectance=(2.0*specularEnvironmentReflectance)/(1.0+specularEnvironmentReflectance);outParams.specularEnvironmentReflectance=mix(bounceSpecularEnvironmentReflectance,specularEnvironmentReflectance,refractionIntensity); #endif refractionTransmittance*=1.0-outParams.specularEnvironmentReflectance; #if DEBUGMODE>0 outParams.refractionTransmittance=refractionTransmittance; #endif outParams.finalRefraction=environmentRefraction.rgb*refractionTransmittance*vLightingIntensity.z; #if DEBUGMODE>0 outParams.environmentRefraction=environmentRefraction; #endif #endif #if defined(REFLECTION) && defined(SS_TRANSLUCENCY) #if defined(NORMAL) && defined(USESPHERICALINVERTEX) || !defined(USESPHERICALFROMREFLECTIONMAP) vec3 irradianceVector=vec3(reflectionMatrix*vec4(normalW,0)).xyz; #ifdef REFLECTIONMAP_OPPOSITEZ irradianceVector.z*=-1.0; #endif #ifdef INVERTCUBICMAP irradianceVector.y*=-1.0; #endif #else vec3 irradianceVector=irradianceVector_; #endif #if defined(USESPHERICALFROMREFLECTIONMAP) #if defined(REALTIME_FILTERING) vec3 refractionIrradiance=irradiance(reflectionSampler,-irradianceVector,vReflectionFilteringInfo); #else vec3 refractionIrradiance=computeEnvironmentIrradiance(-irradianceVector); #endif #elif defined(USEIRRADIANCEMAP) #ifdef REFLECTIONMAP_3D vec3 irradianceCoords=irradianceVector; #else vec2 irradianceCoords=irradianceVector.xy; #ifdef REFLECTIONMAP_PROJECTION irradianceCoords/=irradianceVector.z; #endif irradianceCoords.y=1.0-irradianceCoords.y; #endif vec4 refractionIrradiance=sampleReflection(irradianceSampler,-irradianceCoords); #ifdef RGBDREFLECTION refractionIrradiance.rgb=fromRGBD(refractionIrradiance); #endif #ifdef GAMMAREFLECTION refractionIrradiance.rgb=toLinearSpace(refractionIrradiance.rgb); #endif #else vec4 refractionIrradiance=vec4(0.); #endif refractionIrradiance.rgb*=transmittance; #ifdef SS_ALBEDOFORTRANSLUCENCYTINT refractionIrradiance.rgb*=surfaceAlbedo.rgb; #endif outParams.refractionIrradiance=refractionIrradiance.rgb; #endif } #endif `; je.IncludesShadersStore[b_e] = E_e; const T_e = "pbrBlockNormalGeometric", S_e = `vec3 viewDirectionW=normalize(vEyePosition.xyz-vPositionW); #ifdef NORMAL vec3 normalW=normalize(vNormalW); #else vec3 normalW=normalize(cross(dFdx(vPositionW),dFdy(vPositionW)))*vEyePosition.w; #endif vec3 geometricNormalW=normalW; #if defined(TWOSIDEDLIGHTING) && defined(NORMAL) geometricNormalW=gl_FrontFacing ? geometricNormalW : -geometricNormalW; #endif `; je.IncludesShadersStore[T_e] = S_e; const M_e = "pbrBlockNormalFinal", R_e = `#if defined(FORCENORMALFORWARD) && defined(NORMAL) vec3 faceNormal=normalize(cross(dFdx(vPositionW),dFdy(vPositionW)))*vEyePosition.w; #if defined(TWOSIDEDLIGHTING) faceNormal=gl_FrontFacing ? faceNormal : -faceNormal; #endif normalW*=sign(dot(normalW,faceNormal)); #endif #if defined(TWOSIDEDLIGHTING) && defined(NORMAL) normalW=gl_FrontFacing ? normalW : -normalW; #endif `; je.IncludesShadersStore[M_e] = R_e; const P_e = "pbrBlockLightmapInit", I_e = `#ifdef LIGHTMAP vec4 lightmapColor=texture2D(lightmapSampler,vLightmapUV+uvOffset); #ifdef RGBDLIGHTMAP lightmapColor.rgb=fromRGBD(lightmapColor); #endif #ifdef GAMMALIGHTMAP lightmapColor.rgb=toLinearSpace(lightmapColor.rgb); #endif lightmapColor.rgb*=vLightmapInfos.y; #endif `; je.IncludesShadersStore[P_e] = I_e; const D_e = "pbrBlockGeometryInfo", O_e = `float NdotVUnclamped=dot(normalW,viewDirectionW);float NdotV=absEps(NdotVUnclamped);float alphaG=convertRoughnessToAverageSlope(roughness);vec2 AARoughnessFactors=getAARoughnessFactors(normalW.xyz); #ifdef SPECULARAA alphaG+=AARoughnessFactors.y; #endif #if defined(ENVIRONMENTBRDF) vec3 environmentBrdf=getBRDFLookup(NdotV,roughness); #endif #if defined(ENVIRONMENTBRDF) && !defined(REFLECTIONMAP_SKYBOX) #ifdef RADIANCEOCCLUSION #ifdef AMBIENTINGRAYSCALE float ambientMonochrome=aoOut.ambientOcclusionColor.r; #else float ambientMonochrome=getLuminance(aoOut.ambientOcclusionColor); #endif float seo=environmentRadianceOcclusion(ambientMonochrome,NdotVUnclamped); #endif #ifdef HORIZONOCCLUSION #ifdef BUMP #ifdef REFLECTIONMAP_3D float eho=environmentHorizonOcclusion(-viewDirectionW,normalW,geometricNormalW); #endif #endif #endif #endif `; je.IncludesShadersStore[D_e] = O_e; const w_e = "pbrBlockReflectance0", L_e = `float reflectance=max(max(reflectivityOut.surfaceReflectivityColor.r,reflectivityOut.surfaceReflectivityColor.g),reflectivityOut.surfaceReflectivityColor.b);vec3 specularEnvironmentR0=reflectivityOut.surfaceReflectivityColor.rgb; #ifdef METALLICWORKFLOW vec3 specularEnvironmentR90=vec3(metallicReflectanceFactors.a); #else vec3 specularEnvironmentR90=vec3(1.0,1.0,1.0); #endif #ifdef ALPHAFRESNEL float reflectance90=fresnelGrazingReflectance(reflectance);specularEnvironmentR90=specularEnvironmentR90*reflectance90; #endif `; je.IncludesShadersStore[w_e] = L_e; const N_e = "pbrBlockReflectance", F_e = `#if defined(ENVIRONMENTBRDF) && !defined(REFLECTIONMAP_SKYBOX) vec3 specularEnvironmentReflectance=getReflectanceFromBRDFLookup(clearcoatOut.specularEnvironmentR0,specularEnvironmentR90,environmentBrdf); #ifdef RADIANCEOCCLUSION specularEnvironmentReflectance*=seo; #endif #ifdef HORIZONOCCLUSION #ifdef BUMP #ifdef REFLECTIONMAP_3D specularEnvironmentReflectance*=eho; #endif #endif #endif #else vec3 specularEnvironmentReflectance=getReflectanceFromAnalyticalBRDFLookup_Jones(NdotV,clearcoatOut.specularEnvironmentR0,specularEnvironmentR90,sqrt(microSurface)); #endif #ifdef CLEARCOAT specularEnvironmentReflectance*=clearcoatOut.conservationFactor; #if defined(CLEARCOAT_TINT) specularEnvironmentReflectance*=clearcoatOut.absorption; #endif #endif `; je.IncludesShadersStore[N_e] = F_e; const B_e = "pbrBlockDirectLighting", U_e = `vec3 diffuseBase=vec3(0.,0.,0.); #ifdef SPECULARTERM vec3 specularBase=vec3(0.,0.,0.); #endif #ifdef CLEARCOAT vec3 clearCoatBase=vec3(0.,0.,0.); #endif #ifdef SHEEN vec3 sheenBase=vec3(0.,0.,0.); #endif preLightingInfo preInfo;lightingInfo info;float shadow=1.; float aggShadow=0.;float numLights=0.; #if defined(CLEARCOAT) && defined(CLEARCOAT_TINT) vec3 absorption=vec3(0.); #endif `; je.IncludesShadersStore[B_e] = U_e; const V_e = "pbrBlockFinalLitComponents", k_e = `aggShadow=aggShadow/numLights; #if defined(ENVIRONMENTBRDF) #ifdef MS_BRDF_ENERGY_CONSERVATION vec3 energyConservationFactor=getEnergyConservationFactor(clearcoatOut.specularEnvironmentR0,environmentBrdf); #endif #endif #ifndef METALLICWORKFLOW #ifdef SPECULAR_GLOSSINESS_ENERGY_CONSERVATION surfaceAlbedo.rgb=(1.-reflectance)*surfaceAlbedo.rgb; #endif #endif #if defined(SHEEN) && defined(SHEEN_ALBEDOSCALING) && defined(ENVIRONMENTBRDF) surfaceAlbedo.rgb=sheenOut.sheenAlbedoScaling*surfaceAlbedo.rgb; #endif #ifdef REFLECTION vec3 finalIrradiance=reflectionOut.environmentIrradiance; #if defined(CLEARCOAT) finalIrradiance*=clearcoatOut.conservationFactor; #if defined(CLEARCOAT_TINT) finalIrradiance*=clearcoatOut.absorption; #endif #endif #if defined(SS_REFRACTION) finalIrradiance*=subSurfaceOut.refractionFactorForIrradiance; #endif #if defined(SS_TRANSLUCENCY) finalIrradiance*=(1.0-subSurfaceOut.translucencyIntensity);finalIrradiance+=subSurfaceOut.refractionIrradiance; #endif finalIrradiance*=surfaceAlbedo.rgb;finalIrradiance*=vLightingIntensity.z;finalIrradiance*=aoOut.ambientOcclusionColor; #endif #ifdef SPECULARTERM vec3 finalSpecular=specularBase;finalSpecular=max(finalSpecular,0.0);vec3 finalSpecularScaled=finalSpecular*vLightingIntensity.x*vLightingIntensity.w; #if defined(ENVIRONMENTBRDF) && defined(MS_BRDF_ENERGY_CONSERVATION) finalSpecularScaled*=energyConservationFactor; #endif #if defined(SHEEN) && defined(ENVIRONMENTBRDF) && defined(SHEEN_ALBEDOSCALING) finalSpecularScaled*=sheenOut.sheenAlbedoScaling; #endif #endif #ifdef REFLECTION vec3 finalRadiance=reflectionOut.environmentRadiance.rgb;finalRadiance*=subSurfaceOut.specularEnvironmentReflectance;vec3 finalRadianceScaled=finalRadiance*vLightingIntensity.z; #if defined(ENVIRONMENTBRDF) && defined(MS_BRDF_ENERGY_CONSERVATION) finalRadianceScaled*=energyConservationFactor; #endif #if defined(SHEEN) && defined(ENVIRONMENTBRDF) && defined(SHEEN_ALBEDOSCALING) finalRadianceScaled*=sheenOut.sheenAlbedoScaling; #endif #endif #ifdef SHEEN vec3 finalSheen=sheenBase*sheenOut.sheenColor;finalSheen=max(finalSheen,0.0);vec3 finalSheenScaled=finalSheen*vLightingIntensity.x*vLightingIntensity.w; #if defined(CLEARCOAT) && defined(REFLECTION) && defined(ENVIRONMENTBRDF) sheenOut.finalSheenRadianceScaled*=clearcoatOut.conservationFactor; #if defined(CLEARCOAT_TINT) sheenOut.finalSheenRadianceScaled*=clearcoatOut.absorption; #endif #endif #endif #ifdef CLEARCOAT vec3 finalClearCoat=clearCoatBase;finalClearCoat=max(finalClearCoat,0.0);vec3 finalClearCoatScaled=finalClearCoat*vLightingIntensity.x*vLightingIntensity.w; #if defined(ENVIRONMENTBRDF) && defined(MS_BRDF_ENERGY_CONSERVATION) finalClearCoatScaled*=clearcoatOut.energyConservationFactorClearCoat; #endif #ifdef SS_REFRACTION subSurfaceOut.finalRefraction*=clearcoatOut.conservationFactor; #ifdef CLEARCOAT_TINT subSurfaceOut.finalRefraction*=clearcoatOut.absorption; #endif #endif #endif #ifdef ALPHABLEND float luminanceOverAlpha=0.0; #if defined(REFLECTION) && defined(RADIANCEOVERALPHA) luminanceOverAlpha+=getLuminance(finalRadianceScaled); #if defined(CLEARCOAT) luminanceOverAlpha+=getLuminance(clearcoatOut.finalClearCoatRadianceScaled); #endif #endif #if defined(SPECULARTERM) && defined(SPECULAROVERALPHA) luminanceOverAlpha+=getLuminance(finalSpecularScaled); #endif #if defined(CLEARCOAT) && defined(CLEARCOATOVERALPHA) luminanceOverAlpha+=getLuminance(finalClearCoatScaled); #endif #if defined(RADIANCEOVERALPHA) || defined(SPECULAROVERALPHA) || defined(CLEARCOATOVERALPHA) alpha=saturate(alpha+luminanceOverAlpha*luminanceOverAlpha); #endif #endif `; je.IncludesShadersStore[V_e] = k_e; const z_e = "pbrBlockFinalUnlitComponents", H_e = `vec3 finalDiffuse=diffuseBase;finalDiffuse*=surfaceAlbedo.rgb;finalDiffuse=max(finalDiffuse,0.0);finalDiffuse*=vLightingIntensity.x;vec3 finalAmbient=vAmbientColor;finalAmbient*=surfaceAlbedo.rgb;vec3 finalEmissive=vEmissiveColor; #ifdef EMISSIVE vec3 emissiveColorTex=texture2D(emissiveSampler,vEmissiveUV+uvOffset).rgb; #ifdef GAMMAEMISSIVE finalEmissive*=toLinearSpace(emissiveColorTex.rgb); #else finalEmissive*=emissiveColorTex.rgb; #endif finalEmissive*= vEmissiveInfos.y; #endif finalEmissive*=vLightingIntensity.y; #ifdef AMBIENT vec3 ambientOcclusionForDirectDiffuse=mix(vec3(1.),aoOut.ambientOcclusionColor,vAmbientInfos.w); #else vec3 ambientOcclusionForDirectDiffuse=aoOut.ambientOcclusionColor; #endif finalAmbient*=aoOut.ambientOcclusionColor;finalDiffuse*=ambientOcclusionForDirectDiffuse; `; je.IncludesShadersStore[z_e] = H_e; const G_e = "pbrBlockFinalColorComposition", K_e = `vec4 finalColor=vec4( #ifndef UNLIT #ifdef REFLECTION finalIrradiance + #endif #ifdef SPECULARTERM finalSpecularScaled + #endif #ifdef SHEEN finalSheenScaled + #endif #ifdef CLEARCOAT finalClearCoatScaled + #endif #ifdef REFLECTION finalRadianceScaled + #if defined(SHEEN) && defined(ENVIRONMENTBRDF) sheenOut.finalSheenRadianceScaled + #endif #ifdef CLEARCOAT clearcoatOut.finalClearCoatRadianceScaled + #endif #endif #ifdef SS_REFRACTION subSurfaceOut.finalRefraction + #endif #endif finalAmbient + finalDiffuse, alpha); #ifdef LIGHTMAP #ifndef LIGHTMAPEXCLUDED #ifdef USELIGHTMAPASSHADOWMAP finalColor.rgb*=lightmapColor.rgb; #else finalColor.rgb+=lightmapColor.rgb; #endif #endif #endif finalColor.rgb+=finalEmissive; #define CUSTOM_FRAGMENT_BEFORE_FOG finalColor=max(finalColor,0.0); `; je.IncludesShadersStore[G_e] = K_e; const W_e = "pbrBlockImageProcessing", j_e = `#if defined(IMAGEPROCESSINGPOSTPROCESS) || defined(SS_SCATTERING) #if !defined(SKIPFINALCOLORCLAMP) finalColor.rgb=clamp(finalColor.rgb,0.,30.0); #endif #else finalColor=applyImageProcessing(finalColor); #endif finalColor.a*=visibility; #ifdef PREMULTIPLYALPHA finalColor.rgb*=finalColor.a; #endif `; je.IncludesShadersStore[W_e] = j_e; const X_e = "pbrDebug", Y_e = `#if DEBUGMODE>0 if (vClipSpacePosition.x/vClipSpacePosition.w>=vDebugMode.x) { #if DEBUGMODE==1 gl_FragColor.rgb=vPositionW.rgb; #define DEBUGMODE_NORMALIZE #elif DEBUGMODE==2 && defined(NORMAL) gl_FragColor.rgb=vNormalW.rgb; #define DEBUGMODE_NORMALIZE #elif DEBUGMODE==3 && defined(BUMP) || DEBUGMODE==3 && defined(PARALLAX) || DEBUGMODE==3 && defined(ANISOTROPIC) gl_FragColor.rgb=TBN[0]; #define DEBUGMODE_NORMALIZE #elif DEBUGMODE==4 && defined(BUMP) || DEBUGMODE==4 && defined(PARALLAX) || DEBUGMODE==4 && defined(ANISOTROPIC) gl_FragColor.rgb=TBN[1]; #define DEBUGMODE_NORMALIZE #elif DEBUGMODE==5 gl_FragColor.rgb=normalW; #define DEBUGMODE_NORMALIZE #elif DEBUGMODE==6 && defined(MAINUV1) gl_FragColor.rgb=vec3(vMainUV1,0.0); #elif DEBUGMODE==7 && defined(MAINUV2) gl_FragColor.rgb=vec3(vMainUV2,0.0); #elif DEBUGMODE==8 && defined(CLEARCOAT) && defined(CLEARCOAT_BUMP) gl_FragColor.rgb=clearcoatOut.TBNClearCoat[0]; #define DEBUGMODE_NORMALIZE #elif DEBUGMODE==9 && defined(CLEARCOAT) && defined(CLEARCOAT_BUMP) gl_FragColor.rgb=clearcoatOut.TBNClearCoat[1]; #define DEBUGMODE_NORMALIZE #elif DEBUGMODE==10 && defined(CLEARCOAT) gl_FragColor.rgb=clearcoatOut.clearCoatNormalW; #define DEBUGMODE_NORMALIZE #elif DEBUGMODE==11 && defined(ANISOTROPIC) gl_FragColor.rgb=anisotropicOut.anisotropicNormal; #define DEBUGMODE_NORMALIZE #elif DEBUGMODE==12 && defined(ANISOTROPIC) gl_FragColor.rgb=anisotropicOut.anisotropicTangent; #define DEBUGMODE_NORMALIZE #elif DEBUGMODE==13 && defined(ANISOTROPIC) gl_FragColor.rgb=anisotropicOut.anisotropicBitangent; #define DEBUGMODE_NORMALIZE #elif DEBUGMODE==20 && defined(ALBEDO) gl_FragColor.rgb=albedoTexture.rgb; #ifndef GAMMAALBEDO #define DEBUGMODE_GAMMA #endif #elif DEBUGMODE==21 && defined(AMBIENT) gl_FragColor.rgb=aoOut.ambientOcclusionColorMap.rgb; #elif DEBUGMODE==22 && defined(OPACITY) gl_FragColor.rgb=opacityMap.rgb; #elif DEBUGMODE==23 && defined(EMISSIVE) gl_FragColor.rgb=emissiveColorTex.rgb; #ifndef GAMMAEMISSIVE #define DEBUGMODE_GAMMA #endif #elif DEBUGMODE==24 && defined(LIGHTMAP) gl_FragColor.rgb=lightmapColor.rgb; #ifndef GAMMALIGHTMAP #define DEBUGMODE_GAMMA #endif #elif DEBUGMODE==25 && defined(REFLECTIVITY) && defined(METALLICWORKFLOW) gl_FragColor.rgb=reflectivityOut.surfaceMetallicColorMap.rgb; #elif DEBUGMODE==26 && defined(REFLECTIVITY) && !defined(METALLICWORKFLOW) gl_FragColor.rgb=reflectivityOut.surfaceReflectivityColorMap.rgb; #define DEBUGMODE_GAMMA #elif DEBUGMODE==27 && defined(CLEARCOAT) && defined(CLEARCOAT_TEXTURE) gl_FragColor.rgb=vec3(clearcoatOut.clearCoatMapData.rg,0.0); #elif DEBUGMODE==28 && defined(CLEARCOAT) && defined(CLEARCOAT_TINT) && defined(CLEARCOAT_TINT_TEXTURE) gl_FragColor.rgb=clearcoatOut.clearCoatTintMapData.rgb; #elif DEBUGMODE==29 && defined(SHEEN) && defined(SHEEN_TEXTURE) gl_FragColor.rgb=sheenOut.sheenMapData.rgb; #elif DEBUGMODE==30 && defined(ANISOTROPIC) && defined(ANISOTROPIC_TEXTURE) gl_FragColor.rgb=anisotropicOut.anisotropyMapData.rgb; #elif DEBUGMODE==31 && defined(SUBSURFACE) && defined(SS_THICKNESSANDMASK_TEXTURE) gl_FragColor.rgb=subSurfaceOut.thicknessMap.rgb; #elif DEBUGMODE==32 && defined(BUMP) gl_FragColor.rgb=texture2D(bumpSampler,vBumpUV).rgb; #elif DEBUGMODE==40 && defined(SS_REFRACTION) gl_FragColor.rgb=subSurfaceOut.environmentRefraction.rgb; #define DEBUGMODE_GAMMA #elif DEBUGMODE==41 && defined(REFLECTION) gl_FragColor.rgb=reflectionOut.environmentRadiance.rgb; #ifndef GAMMAREFLECTION #define DEBUGMODE_GAMMA #endif #elif DEBUGMODE==42 && defined(CLEARCOAT) && defined(REFLECTION) gl_FragColor.rgb=clearcoatOut.environmentClearCoatRadiance.rgb; #define DEBUGMODE_GAMMA #elif DEBUGMODE==50 gl_FragColor.rgb=diffuseBase.rgb; #define DEBUGMODE_GAMMA #elif DEBUGMODE==51 && defined(SPECULARTERM) gl_FragColor.rgb=specularBase.rgb; #define DEBUGMODE_GAMMA #elif DEBUGMODE==52 && defined(CLEARCOAT) gl_FragColor.rgb=clearCoatBase.rgb; #define DEBUGMODE_GAMMA #elif DEBUGMODE==53 && defined(SHEEN) gl_FragColor.rgb=sheenBase.rgb; #define DEBUGMODE_GAMMA #elif DEBUGMODE==54 && defined(REFLECTION) gl_FragColor.rgb=reflectionOut.environmentIrradiance.rgb; #ifndef GAMMAREFLECTION #define DEBUGMODE_GAMMA #endif #elif DEBUGMODE==60 gl_FragColor.rgb=surfaceAlbedo.rgb; #define DEBUGMODE_GAMMA #elif DEBUGMODE==61 gl_FragColor.rgb=clearcoatOut.specularEnvironmentR0; #define DEBUGMODE_GAMMA #elif DEBUGMODE==62 && defined(METALLICWORKFLOW) gl_FragColor.rgb=vec3(reflectivityOut.metallicRoughness.r); #elif DEBUGMODE==71 && defined(METALLICWORKFLOW) gl_FragColor.rgb=reflectivityOut.metallicF0; #elif DEBUGMODE==63 gl_FragColor.rgb=vec3(roughness); #elif DEBUGMODE==64 gl_FragColor.rgb=vec3(alphaG); #elif DEBUGMODE==65 gl_FragColor.rgb=vec3(NdotV); #elif DEBUGMODE==66 && defined(CLEARCOAT) && defined(CLEARCOAT_TINT) gl_FragColor.rgb=clearcoatOut.clearCoatColor.rgb; #define DEBUGMODE_GAMMA #elif DEBUGMODE==67 && defined(CLEARCOAT) gl_FragColor.rgb=vec3(clearcoatOut.clearCoatRoughness); #elif DEBUGMODE==68 && defined(CLEARCOAT) gl_FragColor.rgb=vec3(clearcoatOut.clearCoatNdotV); #elif DEBUGMODE==69 && defined(SUBSURFACE) && defined(SS_TRANSLUCENCY) gl_FragColor.rgb=subSurfaceOut.transmittance; #elif DEBUGMODE==70 && defined(SUBSURFACE) && defined(SS_REFRACTION) gl_FragColor.rgb=subSurfaceOut.refractionTransmittance; #elif DEBUGMODE==72 gl_FragColor.rgb=vec3(microSurface); #elif DEBUGMODE==73 gl_FragColor.rgb=vAlbedoColor.rgb; #define DEBUGMODE_GAMMA #elif DEBUGMODE==74 && !defined(METALLICWORKFLOW) gl_FragColor.rgb=vReflectivityColor.rgb; #define DEBUGMODE_GAMMA #elif DEBUGMODE==75 gl_FragColor.rgb=vEmissiveColor.rgb; #define DEBUGMODE_GAMMA #elif DEBUGMODE==80 && defined(RADIANCEOCCLUSION) gl_FragColor.rgb=vec3(seo); #elif DEBUGMODE==81 && defined(HORIZONOCCLUSION) gl_FragColor.rgb=vec3(eho); #elif DEBUGMODE==82 && defined(MS_BRDF_ENERGY_CONSERVATION) gl_FragColor.rgb=vec3(energyConservationFactor); #elif DEBUGMODE==83 && defined(ENVIRONMENTBRDF) && !defined(REFLECTIONMAP_SKYBOX) gl_FragColor.rgb=specularEnvironmentReflectance; #define DEBUGMODE_GAMMA #elif DEBUGMODE==84 && defined(CLEARCOAT) && defined(ENVIRONMENTBRDF) && !defined(REFLECTIONMAP_SKYBOX) gl_FragColor.rgb=clearcoatOut.clearCoatEnvironmentReflectance; #define DEBUGMODE_GAMMA #elif DEBUGMODE==85 && defined(SHEEN) && defined(REFLECTION) gl_FragColor.rgb=sheenOut.sheenEnvironmentReflectance; #define DEBUGMODE_GAMMA #elif DEBUGMODE==86 && defined(ALPHABLEND) gl_FragColor.rgb=vec3(luminanceOverAlpha); #elif DEBUGMODE==87 gl_FragColor.rgb=vec3(alpha); #elif DEBUGMODE==88 && defined(ALBEDO) gl_FragColor.rgb=vec3(albedoTexture.a); #else float stripeWidth=30.;float stripePos=floor((gl_FragCoord.x+gl_FragCoord.y)/stripeWidth);float whichColor=mod(stripePos,2.);vec3 color1=vec3(.6,.2,.2);vec3 color2=vec3(.3,.1,.1);gl_FragColor.rgb=mix(color1,color2,whichColor); #endif gl_FragColor.rgb*=vDebugMode.y; #ifdef DEBUGMODE_NORMALIZE gl_FragColor.rgb=normalize(gl_FragColor.rgb)*0.5+0.5; #endif #ifdef DEBUGMODE_GAMMA gl_FragColor.rgb=toGammaSpace(gl_FragColor.rgb); #endif gl_FragColor.a=1.0; #ifdef PREPASS gl_FragData[0]=toLinearSpace(gl_FragColor); gl_FragData[1]=vec4(0.,0.,0.,0.); #endif #ifdef DEBUGMODE_FORCERETURN return; #endif } #endif `; je.IncludesShadersStore[X_e] = Y_e; const Q_e = "pbrPixelShader", $_e = `#if defined(BUMP) || !defined(NORMAL) || defined(FORCENORMALFORWARD) || defined(SPECULARAA) || defined(CLEARCOAT_BUMP) || defined(ANISOTROPIC) #extension GL_OES_standard_derivatives : enable #endif #ifdef LODBASEDMICROSFURACE #extension GL_EXT_shader_texture_lod : enable #endif #define CUSTOM_FRAGMENT_BEGIN #ifdef LOGARITHMICDEPTH #extension GL_EXT_frag_depth : enable #endif #include[SCENE_MRT_COUNT] precision highp float; #include #ifndef FROMLINEARSPACE #define FROMLINEARSPACE #endif #include<__decl__pbrFragment> #include #include<__decl__lightFragment>[0..maxSimultaneousLights] #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #ifdef REFLECTION #include #endif #define CUSTOM_FRAGMENT_DEFINITIONS #include #include #include #include #include #include #include #include #include #include void main(void) { #define CUSTOM_FRAGMENT_MAIN_BEGIN #include #include #include #include albedoOpacityOutParams albedoOpacityOut; #ifdef ALBEDO vec4 albedoTexture=texture2D(albedoSampler,vAlbedoUV+uvOffset); #endif #ifdef OPACITY vec4 opacityMap=texture2D(opacitySampler,vOpacityUV+uvOffset); #endif #ifdef DECAL vec4 decalColor=texture2D(decalSampler,vDecalUV+uvOffset); #endif albedoOpacityBlock( vAlbedoColor, #ifdef ALBEDO albedoTexture, vAlbedoInfos, #endif #ifdef OPACITY opacityMap, vOpacityInfos, #endif #ifdef DETAIL detailColor, vDetailInfos, #endif #ifdef DECAL decalColor, vDecalInfos, #endif albedoOpacityOut );vec3 surfaceAlbedo=albedoOpacityOut.surfaceAlbedo;float alpha=albedoOpacityOut.alpha; #define CUSTOM_FRAGMENT_UPDATE_ALPHA #include #define CUSTOM_FRAGMENT_BEFORE_LIGHTS ambientOcclusionOutParams aoOut; #ifdef AMBIENT vec3 ambientOcclusionColorMap=texture2D(ambientSampler,vAmbientUV+uvOffset).rgb; #endif ambientOcclusionBlock( #ifdef AMBIENT ambientOcclusionColorMap, vAmbientInfos, #endif aoOut ); #include #ifdef UNLIT vec3 diffuseBase=vec3(1.,1.,1.); #else vec3 baseColor=surfaceAlbedo;reflectivityOutParams reflectivityOut; #if defined(REFLECTIVITY) vec4 surfaceMetallicOrReflectivityColorMap=texture2D(reflectivitySampler,vReflectivityUV+uvOffset);vec4 baseReflectivity=surfaceMetallicOrReflectivityColorMap; #ifndef METALLICWORKFLOW #ifdef REFLECTIVITY_GAMMA surfaceMetallicOrReflectivityColorMap=toLinearSpace(surfaceMetallicOrReflectivityColorMap); #endif surfaceMetallicOrReflectivityColorMap.rgb*=vReflectivityInfos.y; #endif #endif #if defined(MICROSURFACEMAP) vec4 microSurfaceTexel=texture2D(microSurfaceSampler,vMicroSurfaceSamplerUV+uvOffset)*vMicroSurfaceSamplerInfos.y; #endif #ifdef METALLICWORKFLOW vec4 metallicReflectanceFactors=vMetallicReflectanceFactors; #ifdef REFLECTANCE vec4 reflectanceFactorsMap=texture2D(reflectanceSampler,vReflectanceUV+uvOffset); #ifdef REFLECTANCE_GAMMA reflectanceFactorsMap=toLinearSpace(reflectanceFactorsMap); #endif metallicReflectanceFactors.rgb*=reflectanceFactorsMap.rgb; #endif #ifdef METALLIC_REFLECTANCE vec4 metallicReflectanceFactorsMap=texture2D(metallicReflectanceSampler,vMetallicReflectanceUV+uvOffset); #ifdef METALLIC_REFLECTANCE_GAMMA metallicReflectanceFactorsMap=toLinearSpace(metallicReflectanceFactorsMap); #endif #ifndef METALLIC_REFLECTANCE_USE_ALPHA_ONLY metallicReflectanceFactors.rgb*=metallicReflectanceFactorsMap.rgb; #endif metallicReflectanceFactors*=metallicReflectanceFactorsMap.a; #endif #endif reflectivityBlock( vReflectivityColor, #ifdef METALLICWORKFLOW surfaceAlbedo, metallicReflectanceFactors, #endif #ifdef REFLECTIVITY vReflectivityInfos, surfaceMetallicOrReflectivityColorMap, #endif #if defined(METALLICWORKFLOW) && defined(REFLECTIVITY) && defined(AOSTOREINMETALMAPRED) aoOut.ambientOcclusionColor, #endif #ifdef MICROSURFACEMAP microSurfaceTexel, #endif #ifdef DETAIL detailColor, vDetailInfos, #endif reflectivityOut );float microSurface=reflectivityOut.microSurface;float roughness=reflectivityOut.roughness; #ifdef METALLICWORKFLOW surfaceAlbedo=reflectivityOut.surfaceAlbedo; #endif #if defined(METALLICWORKFLOW) && defined(REFLECTIVITY) && defined(AOSTOREINMETALMAPRED) aoOut.ambientOcclusionColor=reflectivityOut.ambientOcclusionColor; #endif #ifdef ALPHAFRESNEL #if defined(ALPHATEST) || defined(ALPHABLEND) alphaFresnelOutParams alphaFresnelOut;alphaFresnelBlock( normalW, viewDirectionW, alpha, microSurface, alphaFresnelOut );alpha=alphaFresnelOut.alpha; #endif #endif #include #ifdef ANISOTROPIC anisotropicOutParams anisotropicOut; #ifdef ANISOTROPIC_TEXTURE vec3 anisotropyMapData=texture2D(anisotropySampler,vAnisotropyUV+uvOffset).rgb*vAnisotropyInfos.y; #endif anisotropicBlock( vAnisotropy, roughness, #ifdef ANISOTROPIC_TEXTURE anisotropyMapData, #endif TBN, normalW, viewDirectionW, anisotropicOut ); #endif #ifdef REFLECTION reflectionOutParams reflectionOut; #ifndef USE_CUSTOM_REFLECTION reflectionBlock( vPositionW, normalW, alphaG, vReflectionMicrosurfaceInfos, vReflectionInfos, vReflectionColor, #ifdef ANISOTROPIC anisotropicOut, #endif #if defined(LODINREFLECTIONALPHA) && !defined(REFLECTIONMAP_SKYBOX) NdotVUnclamped, #endif #ifdef LINEARSPECULARREFLECTION roughness, #endif reflectionSampler, #if defined(NORMAL) && defined(USESPHERICALINVERTEX) vEnvironmentIrradiance, #endif #ifdef USESPHERICALFROMREFLECTIONMAP #if !defined(NORMAL) || !defined(USESPHERICALINVERTEX) reflectionMatrix, #endif #endif #ifdef USEIRRADIANCEMAP irradianceSampler, #endif #ifndef LODBASEDMICROSFURACE reflectionSamplerLow, reflectionSamplerHigh, #endif #ifdef REALTIME_FILTERING vReflectionFilteringInfo, #endif reflectionOut ); #else #define CUSTOM_REFLECTION #endif #endif #include #ifdef SHEEN sheenOutParams sheenOut; #ifdef SHEEN_TEXTURE vec4 sheenMapData=texture2D(sheenSampler,vSheenUV+uvOffset); #endif #if defined(SHEEN_ROUGHNESS) && defined(SHEEN_TEXTURE_ROUGHNESS) && !defined(SHEEN_TEXTURE_ROUGHNESS_IDENTICAL) && !defined(SHEEN_USE_ROUGHNESS_FROM_MAINTEXTURE) vec4 sheenMapRoughnessData=texture2D(sheenRoughnessSampler,vSheenRoughnessUV+uvOffset)*vSheenInfos.w; #endif sheenBlock( vSheenColor, #ifdef SHEEN_ROUGHNESS vSheenRoughness, #if defined(SHEEN_TEXTURE_ROUGHNESS) && !defined(SHEEN_TEXTURE_ROUGHNESS_IDENTICAL) && !defined(SHEEN_USE_ROUGHNESS_FROM_MAINTEXTURE) sheenMapRoughnessData, #endif #endif roughness, #ifdef SHEEN_TEXTURE sheenMapData, vSheenInfos.y, #endif reflectance, #ifdef SHEEN_LINKWITHALBEDO baseColor, surfaceAlbedo, #endif #ifdef ENVIRONMENTBRDF NdotV, environmentBrdf, #endif #if defined(REFLECTION) && defined(ENVIRONMENTBRDF) AARoughnessFactors, vReflectionMicrosurfaceInfos, vReflectionInfos, vReflectionColor, vLightingIntensity, reflectionSampler, reflectionOut.reflectionCoords, NdotVUnclamped, #ifndef LODBASEDMICROSFURACE reflectionSamplerLow, reflectionSamplerHigh, #endif #ifdef REALTIME_FILTERING vReflectionFilteringInfo, #endif #if !defined(REFLECTIONMAP_SKYBOX) && defined(RADIANCEOCCLUSION) seo, #endif #if !defined(REFLECTIONMAP_SKYBOX) && defined(HORIZONOCCLUSION) && defined(BUMP) && defined(REFLECTIONMAP_3D) eho, #endif #endif sheenOut ); #ifdef SHEEN_LINKWITHALBEDO surfaceAlbedo=sheenOut.surfaceAlbedo; #endif #endif #ifdef CLEARCOAT #ifdef CLEARCOAT_TEXTURE vec2 clearCoatMapData=texture2D(clearCoatSampler,vClearCoatUV+uvOffset).rg*vClearCoatInfos.y; #endif #endif #ifdef IRIDESCENCE iridescenceOutParams iridescenceOut; #ifdef IRIDESCENCE_TEXTURE vec2 iridescenceMapData=texture2D(iridescenceSampler,vIridescenceUV+uvOffset).rg*vIridescenceInfos.y; #endif #ifdef IRIDESCENCE_THICKNESS_TEXTURE vec2 iridescenceThicknessMapData=texture2D(iridescenceThicknessSampler,vIridescenceThicknessUV+uvOffset).rg*vIridescenceInfos.w; #endif iridescenceBlock( vIridescenceParams, NdotV, specularEnvironmentR0, #ifdef IRIDESCENCE_TEXTURE iridescenceMapData, #endif #ifdef IRIDESCENCE_THICKNESS_TEXTURE iridescenceThicknessMapData, #endif #ifdef CLEARCOAT NdotVUnclamped, #ifdef CLEARCOAT_TEXTURE clearCoatMapData, #endif #endif iridescenceOut );float iridescenceIntensity=iridescenceOut.iridescenceIntensity;specularEnvironmentR0=iridescenceOut.specularEnvironmentR0; #endif clearcoatOutParams clearcoatOut; #ifdef CLEARCOAT #if defined(CLEARCOAT_TEXTURE_ROUGHNESS) && !defined(CLEARCOAT_TEXTURE_ROUGHNESS_IDENTICAL) && !defined(CLEARCOAT_USE_ROUGHNESS_FROM_MAINTEXTURE) vec4 clearCoatMapRoughnessData=texture2D(clearCoatRoughnessSampler,vClearCoatRoughnessUV+uvOffset)*vClearCoatInfos.w; #endif #if defined(CLEARCOAT_TINT) && defined(CLEARCOAT_TINT_TEXTURE) vec4 clearCoatTintMapData=texture2D(clearCoatTintSampler,vClearCoatTintUV+uvOffset); #endif #ifdef CLEARCOAT_BUMP vec4 clearCoatBumpMapData=texture2D(clearCoatBumpSampler,vClearCoatBumpUV+uvOffset); #endif clearcoatBlock( vPositionW, geometricNormalW, viewDirectionW, vClearCoatParams, #if defined(CLEARCOAT_TEXTURE_ROUGHNESS) && !defined(CLEARCOAT_TEXTURE_ROUGHNESS_IDENTICAL) && !defined(CLEARCOAT_USE_ROUGHNESS_FROM_MAINTEXTURE) clearCoatMapRoughnessData, #endif specularEnvironmentR0, #ifdef CLEARCOAT_TEXTURE clearCoatMapData, #endif #ifdef CLEARCOAT_TINT vClearCoatTintParams, clearCoatColorAtDistance, vClearCoatRefractionParams, #ifdef CLEARCOAT_TINT_TEXTURE clearCoatTintMapData, #endif #endif #ifdef CLEARCOAT_BUMP vClearCoatBumpInfos, clearCoatBumpMapData, vClearCoatBumpUV, #if defined(TANGENT) && defined(NORMAL) vTBN, #else vClearCoatTangentSpaceParams, #endif #ifdef OBJECTSPACE_NORMALMAP normalMatrix, #endif #endif #if defined(FORCENORMALFORWARD) && defined(NORMAL) faceNormal, #endif #ifdef REFLECTION vReflectionMicrosurfaceInfos, vReflectionInfos, vReflectionColor, vLightingIntensity, reflectionSampler, #ifndef LODBASEDMICROSFURACE reflectionSamplerLow, reflectionSamplerHigh, #endif #ifdef REALTIME_FILTERING vReflectionFilteringInfo, #endif #endif #if defined(ENVIRONMENTBRDF) && !defined(REFLECTIONMAP_SKYBOX) #ifdef RADIANCEOCCLUSION ambientMonochrome, #endif #endif #if defined(CLEARCOAT_BUMP) || defined(TWOSIDEDLIGHTING) (gl_FrontFacing ? 1. : -1.), #endif clearcoatOut ); #else clearcoatOut.specularEnvironmentR0=specularEnvironmentR0; #endif #include subSurfaceOutParams subSurfaceOut; #ifdef SUBSURFACE #ifdef SS_THICKNESSANDMASK_TEXTURE vec4 thicknessMap=texture2D(thicknessSampler,vThicknessUV+uvOffset); #endif #ifdef SS_REFRACTIONINTENSITY_TEXTURE vec4 refractionIntensityMap=texture2D(refractionIntensitySampler,vRefractionIntensityUV+uvOffset); #endif #ifdef SS_TRANSLUCENCYINTENSITY_TEXTURE vec4 translucencyIntensityMap=texture2D(translucencyIntensitySampler,vTranslucencyIntensityUV+uvOffset); #endif subSurfaceBlock( vSubSurfaceIntensity, vThicknessParam, vTintColor, normalW, specularEnvironmentReflectance, #ifdef SS_THICKNESSANDMASK_TEXTURE thicknessMap, #endif #ifdef SS_REFRACTIONINTENSITY_TEXTURE refractionIntensityMap, #endif #ifdef SS_TRANSLUCENCYINTENSITY_TEXTURE translucencyIntensityMap, #endif #ifdef REFLECTION #ifdef SS_TRANSLUCENCY reflectionMatrix, #ifdef USESPHERICALFROMREFLECTIONMAP #if !defined(NORMAL) || !defined(USESPHERICALINVERTEX) reflectionOut.irradianceVector, #endif #if defined(REALTIME_FILTERING) reflectionSampler, vReflectionFilteringInfo, #endif #endif #ifdef USEIRRADIANCEMAP irradianceSampler, #endif #endif #endif #if defined(SS_REFRACTION) || defined(SS_TRANSLUCENCY) surfaceAlbedo, #endif #ifdef SS_REFRACTION vPositionW, viewDirectionW, view, vRefractionInfos, refractionMatrix, vRefractionMicrosurfaceInfos, vLightingIntensity, #ifdef SS_LINKREFRACTIONTOTRANSPARENCY alpha, #endif #ifdef SS_LODINREFRACTIONALPHA NdotVUnclamped, #endif #ifdef SS_LINEARSPECULARREFRACTION roughness, #endif alphaG, refractionSampler, #ifndef LODBASEDMICROSFURACE refractionSamplerLow, refractionSamplerHigh, #endif #ifdef ANISOTROPIC anisotropicOut, #endif #ifdef REALTIME_FILTERING vRefractionFilteringInfo, #endif #ifdef SS_USE_LOCAL_REFRACTIONMAP_CUBIC vRefractionPosition, vRefractionSize, #endif #ifdef SS_DISPERSION dispersion, #endif #endif #ifdef SS_TRANSLUCENCY vDiffusionDistance, #endif subSurfaceOut ); #ifdef SS_REFRACTION surfaceAlbedo=subSurfaceOut.surfaceAlbedo; #ifdef SS_LINKREFRACTIONTOTRANSPARENCY alpha=subSurfaceOut.alpha; #endif #endif #else subSurfaceOut.specularEnvironmentReflectance=specularEnvironmentReflectance; #endif #include #include[0..maxSimultaneousLights] #include #endif #include #define CUSTOM_FRAGMENT_BEFORE_FINALCOLORCOMPOSITION #include #include #include(color,finalColor) #include #define CUSTOM_FRAGMENT_BEFORE_FRAGCOLOR #ifdef PREPASS float writeGeometryInfo=finalColor.a>0.4 ? 1.0 : 0.0; #ifdef PREPASS_POSITION gl_FragData[PREPASS_POSITION_INDEX]=vec4(vPositionW,writeGeometryInfo); #endif #ifdef PREPASS_VELOCITY vec2 a=(vCurrentPosition.xy/vCurrentPosition.w)*0.5+0.5;vec2 b=(vPreviousPosition.xy/vPreviousPosition.w)*0.5+0.5;vec2 velocity=abs(a-b);velocity=vec2(pow(velocity.x,1.0/3.0),pow(velocity.y,1.0/3.0))*sign(a-b)*0.5+0.5;gl_FragData[PREPASS_VELOCITY_INDEX]=vec4(velocity,0.0,writeGeometryInfo); #endif #ifdef PREPASS_ALBEDO_SQRT vec3 sqAlbedo=sqrt(surfaceAlbedo); #endif #ifdef PREPASS_IRRADIANCE vec3 irradiance=finalDiffuse; #ifndef UNLIT #ifdef REFLECTION irradiance+=finalIrradiance; #endif #endif #ifdef SS_SCATTERING gl_FragData[0]=vec4(finalColor.rgb-irradiance,finalColor.a); irradiance/=sqAlbedo; #else gl_FragData[0]=finalColor; float scatteringDiffusionProfile=255.; #endif gl_FragData[PREPASS_IRRADIANCE_INDEX]=vec4(clamp(irradiance,vec3(0.),vec3(1.)),writeGeometryInfo*scatteringDiffusionProfile/255.); #else gl_FragData[0]=vec4(finalColor.rgb,finalColor.a); #endif #ifdef PREPASS_DEPTH gl_FragData[PREPASS_DEPTH_INDEX]=vec4(vViewPos.z,0.0,0.0,writeGeometryInfo); #endif #ifdef PREPASS_NORMAL #ifdef PREPASS_NORMAL_WORLDSPACE gl_FragData[PREPASS_NORMAL_INDEX]=vec4(normalW,writeGeometryInfo); #else gl_FragData[PREPASS_NORMAL_INDEX]=vec4(normalize((view*vec4(normalW,0.0)).rgb),writeGeometryInfo); #endif #endif #ifdef PREPASS_ALBEDO_SQRT gl_FragData[PREPASS_ALBEDO_SQRT_INDEX]=vec4(sqAlbedo,writeGeometryInfo); #endif #ifdef PREPASS_REFLECTIVITY #ifndef UNLIT gl_FragData[PREPASS_REFLECTIVITY_INDEX]=vec4(specularEnvironmentR0,microSurface)*writeGeometryInfo; #else gl_FragData[PREPASS_REFLECTIVITY_INDEX]=vec4( 0.0,0.0,0.0,1.0 )*writeGeometryInfo; #endif #endif #endif #if !defined(PREPASS) || defined(WEBGL2) gl_FragColor=finalColor; #endif #include #if ORDER_INDEPENDENT_TRANSPARENCY if (fragDepth==nearestDepth) {frontColor.rgb+=finalColor.rgb*finalColor.a*alphaMultiplier;frontColor.a=1.0-alphaMultiplier*(1.0-finalColor.a);} else {backColor+=finalColor;} #endif #include #define CUSTOM_FRAGMENT_MAIN_END } `; je.ShadersStore[Q_e] = $_e; const Z_e = "pbrVertexDeclaration", q_e = `uniform mat4 view;uniform mat4 viewProjection; #ifdef ALBEDO uniform mat4 albedoMatrix;uniform vec2 vAlbedoInfos; #endif #ifdef AMBIENT uniform mat4 ambientMatrix;uniform vec4 vAmbientInfos; #endif #ifdef OPACITY uniform mat4 opacityMatrix;uniform vec2 vOpacityInfos; #endif #ifdef EMISSIVE uniform vec2 vEmissiveInfos;uniform mat4 emissiveMatrix; #endif #ifdef LIGHTMAP uniform vec2 vLightmapInfos;uniform mat4 lightmapMatrix; #endif #ifdef REFLECTIVITY uniform vec3 vReflectivityInfos;uniform mat4 reflectivityMatrix; #endif #ifdef METALLIC_REFLECTANCE uniform vec2 vMetallicReflectanceInfos;uniform mat4 metallicReflectanceMatrix; #endif #ifdef REFLECTANCE uniform vec2 vReflectanceInfos;uniform mat4 reflectanceMatrix; #endif #ifdef MICROSURFACEMAP uniform vec2 vMicroSurfaceSamplerInfos;uniform mat4 microSurfaceSamplerMatrix; #endif #ifdef BUMP uniform vec3 vBumpInfos;uniform mat4 bumpMatrix; #endif #ifdef POINTSIZE uniform float pointSize; #endif #ifdef REFLECTION uniform vec2 vReflectionInfos;uniform mat4 reflectionMatrix; #endif #ifdef CLEARCOAT #if defined(CLEARCOAT_TEXTURE) || defined(CLEARCOAT_TEXTURE_ROUGHNESS) uniform vec4 vClearCoatInfos; #endif #ifdef CLEARCOAT_TEXTURE uniform mat4 clearCoatMatrix; #endif #ifdef CLEARCOAT_TEXTURE_ROUGHNESS uniform mat4 clearCoatRoughnessMatrix; #endif #ifdef CLEARCOAT_BUMP uniform vec2 vClearCoatBumpInfos;uniform mat4 clearCoatBumpMatrix; #endif #ifdef CLEARCOAT_TINT_TEXTURE uniform vec2 vClearCoatTintInfos;uniform mat4 clearCoatTintMatrix; #endif #endif #ifdef IRIDESCENCE #if defined(IRIDESCENCE_TEXTURE) || defined(IRIDESCENCE_THICKNESS_TEXTURE) uniform vec4 vIridescenceInfos; #endif #ifdef IRIDESCENCE_TEXTURE uniform mat4 iridescenceMatrix; #endif #ifdef IRIDESCENCE_THICKNESS_TEXTURE uniform mat4 iridescenceThicknessMatrix; #endif #endif #ifdef ANISOTROPIC #ifdef ANISOTROPIC_TEXTURE uniform vec2 vAnisotropyInfos;uniform mat4 anisotropyMatrix; #endif #endif #ifdef SHEEN #if defined(SHEEN_TEXTURE) || defined(SHEEN_TEXTURE_ROUGHNESS) uniform vec4 vSheenInfos; #endif #ifdef SHEEN_TEXTURE uniform mat4 sheenMatrix; #endif #ifdef SHEEN_TEXTURE_ROUGHNESS uniform mat4 sheenRoughnessMatrix; #endif #endif #ifdef SUBSURFACE #ifdef SS_REFRACTION uniform vec4 vRefractionInfos;uniform mat4 refractionMatrix; #endif #ifdef SS_THICKNESSANDMASK_TEXTURE uniform vec2 vThicknessInfos;uniform mat4 thicknessMatrix; #endif #ifdef SS_REFRACTIONINTENSITY_TEXTURE uniform vec2 vRefractionIntensityInfos;uniform mat4 refractionIntensityMatrix; #endif #ifdef SS_TRANSLUCENCYINTENSITY_TEXTURE uniform vec2 vTranslucencyIntensityInfos;uniform mat4 translucencyIntensityMatrix; #endif #endif #ifdef NORMAL #if defined(USESPHERICALFROMREFLECTIONMAP) && defined(USESPHERICALINVERTEX) #ifdef USESPHERICALFROMREFLECTIONMAP #ifdef SPHERICAL_HARMONICS uniform vec3 vSphericalL00;uniform vec3 vSphericalL1_1;uniform vec3 vSphericalL10;uniform vec3 vSphericalL11;uniform vec3 vSphericalL2_2;uniform vec3 vSphericalL2_1;uniform vec3 vSphericalL20;uniform vec3 vSphericalL21;uniform vec3 vSphericalL22; #else uniform vec3 vSphericalX;uniform vec3 vSphericalY;uniform vec3 vSphericalZ;uniform vec3 vSphericalXX_ZZ;uniform vec3 vSphericalYY_ZZ;uniform vec3 vSphericalZZ;uniform vec3 vSphericalXY;uniform vec3 vSphericalYZ;uniform vec3 vSphericalZX; #endif #endif #endif #endif #ifdef DETAIL uniform vec4 vDetailInfos;uniform mat4 detailMatrix; #endif #include #define ADDITIONAL_VERTEX_DECLARATION `; je.IncludesShadersStore[Z_e] = q_e; const J_e = "pbrVertexShader", e2e = `precision highp float; #include<__decl__pbrVertex> #define CUSTOM_VERTEX_BEGIN attribute vec3 position; #ifdef NORMAL attribute vec3 normal; #endif #ifdef TANGENT attribute vec4 tangent; #endif #ifdef UV1 attribute vec2 uv; #endif #include[2..7] #include[1..7] #ifdef VERTEXCOLOR attribute vec4 color; #endif #include #include #include #include #include #include(_DEFINENAME_,ALBEDO,_VARYINGNAME_,Albedo) #include(_DEFINENAME_,DETAIL,_VARYINGNAME_,Detail) #include(_DEFINENAME_,AMBIENT,_VARYINGNAME_,Ambient) #include(_DEFINENAME_,OPACITY,_VARYINGNAME_,Opacity) #include(_DEFINENAME_,EMISSIVE,_VARYINGNAME_,Emissive) #include(_DEFINENAME_,LIGHTMAP,_VARYINGNAME_,Lightmap) #include(_DEFINENAME_,REFLECTIVITY,_VARYINGNAME_,Reflectivity) #include(_DEFINENAME_,MICROSURFACEMAP,_VARYINGNAME_,MicroSurfaceSampler) #include(_DEFINENAME_,METALLIC_REFLECTANCE,_VARYINGNAME_,MetallicReflectance) #include(_DEFINENAME_,REFLECTANCE,_VARYINGNAME_,Reflectance) #include(_DEFINENAME_,BUMP,_VARYINGNAME_,Bump) #include(_DEFINENAME_,DECAL,_VARYINGNAME_,Decal) #ifdef CLEARCOAT #include(_DEFINENAME_,CLEARCOAT_TEXTURE,_VARYINGNAME_,ClearCoat) #include(_DEFINENAME_,CLEARCOAT_TEXTURE_ROUGHNESS,_VARYINGNAME_,ClearCoatRoughness) #include(_DEFINENAME_,CLEARCOAT_BUMP,_VARYINGNAME_,ClearCoatBump) #include(_DEFINENAME_,CLEARCOAT_TINT_TEXTURE,_VARYINGNAME_,ClearCoatTint) #endif #ifdef IRIDESCENCE #include(_DEFINENAME_,IRIDESCENCE_TEXTURE,_VARYINGNAME_,Iridescence) #include(_DEFINENAME_,IRIDESCENCE_THICKNESS_TEXTURE,_VARYINGNAME_,IridescenceThickness) #endif #ifdef SHEEN #include(_DEFINENAME_,SHEEN_TEXTURE,_VARYINGNAME_,Sheen) #include(_DEFINENAME_,SHEEN_TEXTURE_ROUGHNESS,_VARYINGNAME_,SheenRoughness) #endif #ifdef ANISOTROPIC #include(_DEFINENAME_,ANISOTROPIC_TEXTURE,_VARYINGNAME_,Anisotropy) #endif #ifdef SUBSURFACE #include(_DEFINENAME_,SS_THICKNESSANDMASK_TEXTURE,_VARYINGNAME_,Thickness) #include(_DEFINENAME_,SS_REFRACTIONINTENSITY_TEXTURE,_VARYINGNAME_,RefractionIntensity) #include(_DEFINENAME_,SS_TRANSLUCENCYINTENSITY_TEXTURE,_VARYINGNAME_,TranslucencyIntensity) #endif varying vec3 vPositionW; #if DEBUGMODE>0 varying vec4 vClipSpacePosition; #endif #ifdef NORMAL varying vec3 vNormalW; #if defined(USESPHERICALFROMREFLECTIONMAP) && defined(USESPHERICALINVERTEX) varying vec3 vEnvironmentIrradiance; #include #endif #endif #if defined(VERTEXCOLOR) || defined(INSTANCESCOLOR) && defined(INSTANCES) varying vec4 vColor; #endif #include #include #include #include<__decl__lightVxFragment>[0..maxSimultaneousLights] #include #include[0..maxSimultaneousMorphTargets] #ifdef REFLECTIONMAP_SKYBOX varying vec3 vPositionUVW; #endif #if defined(REFLECTIONMAP_EQUIRECTANGULAR_FIXED) || defined(REFLECTIONMAP_MIRROREDEQUIRECTANGULAR_FIXED) varying vec3 vDirectionW; #endif #include #define CUSTOM_VERTEX_DEFINITIONS void main(void) { #define CUSTOM_VERTEX_MAIN_BEGIN vec3 positionUpdated=position; #ifdef NORMAL vec3 normalUpdated=normal; #endif #ifdef TANGENT vec4 tangentUpdated=tangent; #endif #ifdef UV1 vec2 uvUpdated=uv; #endif #include #include[0..maxSimultaneousMorphTargets] #ifdef REFLECTIONMAP_SKYBOX vPositionUVW=positionUpdated; #endif #define CUSTOM_VERTEX_UPDATE_POSITION #define CUSTOM_VERTEX_UPDATE_NORMAL #include #if defined(PREPASS) && defined(PREPASS_VELOCITY) && !defined(BONES_VELOCITY_ENABLED) vCurrentPosition=viewProjection*finalWorld*vec4(positionUpdated,1.0);vPreviousPosition=previousViewProjection*finalPreviousWorld*vec4(positionUpdated,1.0); #endif #include #include vec4 worldPos=finalWorld*vec4(positionUpdated,1.0);vPositionW=vec3(worldPos); #include #ifdef NORMAL mat3 normalWorld=mat3(finalWorld); #if defined(INSTANCES) && defined(THIN_INSTANCES) vNormalW=normalUpdated/vec3(dot(normalWorld[0],normalWorld[0]),dot(normalWorld[1],normalWorld[1]),dot(normalWorld[2],normalWorld[2]));vNormalW=normalize(normalWorld*vNormalW); #else #ifdef NONUNIFORMSCALING normalWorld=transposeMat3(inverseMat3(normalWorld)); #endif vNormalW=normalize(normalWorld*normalUpdated); #endif #if defined(USESPHERICALFROMREFLECTIONMAP) && defined(USESPHERICALINVERTEX) vec3 reflectionVector=vec3(reflectionMatrix*vec4(vNormalW,0)).xyz; #ifdef REFLECTIONMAP_OPPOSITEZ reflectionVector.z*=-1.0; #endif vEnvironmentIrradiance=computeEnvironmentIrradiance(reflectionVector); #endif #endif #define CUSTOM_VERTEX_UPDATE_WORLDPOS #ifdef MULTIVIEW if (gl_ViewID_OVR==0u) {gl_Position=viewProjection*worldPos;} else {gl_Position=viewProjectionR*worldPos;} #else gl_Position=viewProjection*worldPos; #endif #if DEBUGMODE>0 vClipSpacePosition=gl_Position; #endif #if defined(REFLECTIONMAP_EQUIRECTANGULAR_FIXED) || defined(REFLECTIONMAP_MIRROREDEQUIRECTANGULAR_FIXED) vDirectionW=normalize(vec3(finalWorld*vec4(positionUpdated,0.0))); #endif #ifndef UV1 vec2 uvUpdated=vec2(0.,0.); #endif #ifdef MAINUV1 vMainUV1=uvUpdated; #endif #include[2..7] #include(_DEFINENAME_,ALBEDO,_VARYINGNAME_,Albedo,_MATRIXNAME_,albedo,_INFONAME_,AlbedoInfos.x) #include(_DEFINENAME_,DETAIL,_VARYINGNAME_,Detail,_MATRIXNAME_,detail,_INFONAME_,DetailInfos.x) #include(_DEFINENAME_,AMBIENT,_VARYINGNAME_,Ambient,_MATRIXNAME_,ambient,_INFONAME_,AmbientInfos.x) #include(_DEFINENAME_,OPACITY,_VARYINGNAME_,Opacity,_MATRIXNAME_,opacity,_INFONAME_,OpacityInfos.x) #include(_DEFINENAME_,EMISSIVE,_VARYINGNAME_,Emissive,_MATRIXNAME_,emissive,_INFONAME_,EmissiveInfos.x) #include(_DEFINENAME_,LIGHTMAP,_VARYINGNAME_,Lightmap,_MATRIXNAME_,lightmap,_INFONAME_,LightmapInfos.x) #include(_DEFINENAME_,REFLECTIVITY,_VARYINGNAME_,Reflectivity,_MATRIXNAME_,reflectivity,_INFONAME_,ReflectivityInfos.x) #include(_DEFINENAME_,MICROSURFACEMAP,_VARYINGNAME_,MicroSurfaceSampler,_MATRIXNAME_,microSurfaceSampler,_INFONAME_,MicroSurfaceSamplerInfos.x) #include(_DEFINENAME_,METALLIC_REFLECTANCE,_VARYINGNAME_,MetallicReflectance,_MATRIXNAME_,metallicReflectance,_INFONAME_,MetallicReflectanceInfos.x) #include(_DEFINENAME_,REFLECTANCE,_VARYINGNAME_,Reflectance,_MATRIXNAME_,reflectance,_INFONAME_,ReflectanceInfos.x) #include(_DEFINENAME_,BUMP,_VARYINGNAME_,Bump,_MATRIXNAME_,bump,_INFONAME_,BumpInfos.x) #include(_DEFINENAME_,DECAL,_VARYINGNAME_,Decal,_MATRIXNAME_,decal,_INFONAME_,DecalInfos.x) #ifdef CLEARCOAT #include(_DEFINENAME_,CLEARCOAT_TEXTURE,_VARYINGNAME_,ClearCoat,_MATRIXNAME_,clearCoat,_INFONAME_,ClearCoatInfos.x) #include(_DEFINENAME_,CLEARCOAT_TEXTURE_ROUGHNESS,_VARYINGNAME_,ClearCoatRoughness,_MATRIXNAME_,clearCoatRoughness,_INFONAME_,ClearCoatInfos.z) #include(_DEFINENAME_,CLEARCOAT_BUMP,_VARYINGNAME_,ClearCoatBump,_MATRIXNAME_,clearCoatBump,_INFONAME_,ClearCoatBumpInfos.x) #include(_DEFINENAME_,CLEARCOAT_TINT_TEXTURE,_VARYINGNAME_,ClearCoatTint,_MATRIXNAME_,clearCoatTint,_INFONAME_,ClearCoatTintInfos.x) #endif #ifdef IRIDESCENCE #include(_DEFINENAME_,IRIDESCENCE_TEXTURE,_VARYINGNAME_,Iridescence,_MATRIXNAME_,iridescence,_INFONAME_,IridescenceInfos.x) #include(_DEFINENAME_,IRIDESCENCE_THICKNESS_TEXTURE,_VARYINGNAME_,IridescenceThickness,_MATRIXNAME_,iridescenceThickness,_INFONAME_,IridescenceInfos.z) #endif #ifdef SHEEN #include(_DEFINENAME_,SHEEN_TEXTURE,_VARYINGNAME_,Sheen,_MATRIXNAME_,sheen,_INFONAME_,SheenInfos.x) #include(_DEFINENAME_,SHEEN_TEXTURE_ROUGHNESS,_VARYINGNAME_,SheenRoughness,_MATRIXNAME_,sheen,_INFONAME_,SheenInfos.z) #endif #ifdef ANISOTROPIC #include(_DEFINENAME_,ANISOTROPIC_TEXTURE,_VARYINGNAME_,Anisotropy,_MATRIXNAME_,anisotropy,_INFONAME_,AnisotropyInfos.x) #endif #ifdef SUBSURFACE #include(_DEFINENAME_,SS_THICKNESSANDMASK_TEXTURE,_VARYINGNAME_,Thickness,_MATRIXNAME_,thickness,_INFONAME_,ThicknessInfos.x) #include(_DEFINENAME_,SS_REFRACTIONINTENSITY_TEXTURE,_VARYINGNAME_,RefractionIntensity,_MATRIXNAME_,refractionIntensity,_INFONAME_,RefractionIntensityInfos.x) #include(_DEFINENAME_,SS_TRANSLUCENCYINTENSITY_TEXTURE,_VARYINGNAME_,TranslucencyIntensity,_MATRIXNAME_,translucencyIntensity,_INFONAME_,TranslucencyIntensityInfos.x) #endif #include #include #include #include[0..maxSimultaneousLights] #include #if defined(POINTSIZE) && !defined(WEBGPU) gl_PointSize=pointSize; #endif #include #define CUSTOM_VERTEX_MAIN_END }`; je.ShadersStore[J_e] = e2e; class Yie extends sa { constructor() { super(...arguments), this.CLEARCOAT = !1, this.CLEARCOAT_DEFAULTIOR = !1, this.CLEARCOAT_TEXTURE = !1, this.CLEARCOAT_TEXTURE_ROUGHNESS = !1, this.CLEARCOAT_TEXTUREDIRECTUV = 0, this.CLEARCOAT_TEXTURE_ROUGHNESSDIRECTUV = 0, this.CLEARCOAT_BUMP = !1, this.CLEARCOAT_BUMPDIRECTUV = 0, this.CLEARCOAT_USE_ROUGHNESS_FROM_MAINTEXTURE = !1, this.CLEARCOAT_TEXTURE_ROUGHNESS_IDENTICAL = !1, this.CLEARCOAT_REMAP_F0 = !1, this.CLEARCOAT_TINT = !1, this.CLEARCOAT_TINT_TEXTURE = !1, this.CLEARCOAT_TINT_TEXTUREDIRECTUV = 0, this.CLEARCOAT_TINT_GAMMATEXTURE = !1; } } class _u extends Q_ { /** @internal */ _markAllSubMeshesAsTexturesDirty() { this._enable(this._isEnabled), this._internalMarkAllSubMeshesAsTexturesDirty(); } constructor(e, t = !0) { super(e, "PBRClearCoat", 100, new Yie(), t), this._isEnabled = !1, this.isEnabled = !1, this.intensity = 1, this.roughness = 0, this._indexOfRefraction = _u._DefaultIndexOfRefraction, this.indexOfRefraction = _u._DefaultIndexOfRefraction, this._texture = null, this.texture = null, this._useRoughnessFromMainTexture = !0, this.useRoughnessFromMainTexture = !0, this._textureRoughness = null, this.textureRoughness = null, this._remapF0OnInterfaceChange = !0, this.remapF0OnInterfaceChange = !0, this._bumpTexture = null, this.bumpTexture = null, this._isTintEnabled = !1, this.isTintEnabled = !1, this.tintColor = ze.White(), this.tintColorAtDistance = 1, this.tintThickness = 1, this._tintTexture = null, this.tintTexture = null, this._internalMarkAllSubMeshesAsTexturesDirty = e._dirtyCallbacks[1]; } isReadyForSubMesh(e, t, i) { if (!this._isEnabled) return !0; const r = this._material._disableBumpMap; return !(e._areTexturesDirty && t.texturesEnabled && (this._texture && Tt.ClearCoatTextureEnabled && !this._texture.isReadyOrNotBlocking() || this._textureRoughness && Tt.ClearCoatTextureEnabled && !this._textureRoughness.isReadyOrNotBlocking() || i.getCaps().standardDerivatives && this._bumpTexture && Tt.ClearCoatBumpTextureEnabled && !r && !this._bumpTexture.isReady() || this._isTintEnabled && this._tintTexture && Tt.ClearCoatTintTextureEnabled && !this._tintTexture.isReadyOrNotBlocking())); } prepareDefinesBeforeAttributes(e, t) { var i; this._isEnabled ? (e.CLEARCOAT = !0, e.CLEARCOAT_USE_ROUGHNESS_FROM_MAINTEXTURE = this._useRoughnessFromMainTexture, e.CLEARCOAT_TEXTURE_ROUGHNESS_IDENTICAL = this._texture !== null && this._texture._texture === ((i = this._textureRoughness) === null || i === void 0 ? void 0 : i._texture) && this._texture.checkTransformsAreIdentical(this._textureRoughness), e.CLEARCOAT_REMAP_F0 = this._remapF0OnInterfaceChange, e._areTexturesDirty && t.texturesEnabled && (this._texture && Tt.ClearCoatTextureEnabled ? Ke.PrepareDefinesForMergedUV(this._texture, e, "CLEARCOAT_TEXTURE") : e.CLEARCOAT_TEXTURE = !1, this._textureRoughness && Tt.ClearCoatTextureEnabled ? Ke.PrepareDefinesForMergedUV(this._textureRoughness, e, "CLEARCOAT_TEXTURE_ROUGHNESS") : e.CLEARCOAT_TEXTURE_ROUGHNESS = !1, this._bumpTexture && Tt.ClearCoatBumpTextureEnabled ? Ke.PrepareDefinesForMergedUV(this._bumpTexture, e, "CLEARCOAT_BUMP") : e.CLEARCOAT_BUMP = !1, e.CLEARCOAT_DEFAULTIOR = this._indexOfRefraction === _u._DefaultIndexOfRefraction, this._isTintEnabled ? (e.CLEARCOAT_TINT = !0, this._tintTexture && Tt.ClearCoatTintTextureEnabled ? (Ke.PrepareDefinesForMergedUV(this._tintTexture, e, "CLEARCOAT_TINT_TEXTURE"), e.CLEARCOAT_TINT_GAMMATEXTURE = this._tintTexture.gammaSpace) : e.CLEARCOAT_TINT_TEXTURE = !1) : (e.CLEARCOAT_TINT = !1, e.CLEARCOAT_TINT_TEXTURE = !1))) : (e.CLEARCOAT = !1, e.CLEARCOAT_TEXTURE = !1, e.CLEARCOAT_TEXTURE_ROUGHNESS = !1, e.CLEARCOAT_BUMP = !1, e.CLEARCOAT_TINT = !1, e.CLEARCOAT_TINT_TEXTURE = !1, e.CLEARCOAT_USE_ROUGHNESS_FROM_MAINTEXTURE = !1, e.CLEARCOAT_TEXTURE_ROUGHNESS_IDENTICAL = !1, e.CLEARCOAT_DEFAULTIOR = !1, e.CLEARCOAT_TEXTUREDIRECTUV = 0, e.CLEARCOAT_TEXTURE_ROUGHNESSDIRECTUV = 0, e.CLEARCOAT_BUMPDIRECTUV = 0, e.CLEARCOAT_REMAP_F0 = !1, e.CLEARCOAT_TINT_TEXTUREDIRECTUV = 0, e.CLEARCOAT_TINT_GAMMATEXTURE = !1); } bindForSubMesh(e, t, i, r) { var s, n, a, l, o, u, h, d; if (!this._isEnabled) return; const f = r.materialDefines, p = this._material.isFrozen, m = this._material._disableBumpMap, _ = this._material._invertNormalMapX, v = this._material._invertNormalMapY, C = f.CLEARCOAT_TEXTURE_ROUGHNESS_IDENTICAL; if (!e.useUbo || !p || !e.isSync) { C && Tt.ClearCoatTextureEnabled ? (e.updateFloat4("vClearCoatInfos", this._texture.coordinatesIndex, this._texture.level, -1, -1), Ke.BindTextureMatrix(this._texture, e, "clearCoat")) : (this._texture || this._textureRoughness) && Tt.ClearCoatTextureEnabled && (e.updateFloat4("vClearCoatInfos", (n = (s = this._texture) === null || s === void 0 ? void 0 : s.coordinatesIndex) !== null && n !== void 0 ? n : 0, (l = (a = this._texture) === null || a === void 0 ? void 0 : a.level) !== null && l !== void 0 ? l : 0, (u = (o = this._textureRoughness) === null || o === void 0 ? void 0 : o.coordinatesIndex) !== null && u !== void 0 ? u : 0, (d = (h = this._textureRoughness) === null || h === void 0 ? void 0 : h.level) !== null && d !== void 0 ? d : 0), this._texture && Ke.BindTextureMatrix(this._texture, e, "clearCoat"), this._textureRoughness && !C && !f.CLEARCOAT_USE_ROUGHNESS_FROM_MAINTEXTURE && Ke.BindTextureMatrix(this._textureRoughness, e, "clearCoatRoughness")), this._bumpTexture && i.getCaps().standardDerivatives && Tt.ClearCoatTextureEnabled && !m && (e.updateFloat2("vClearCoatBumpInfos", this._bumpTexture.coordinatesIndex, this._bumpTexture.level), Ke.BindTextureMatrix(this._bumpTexture, e, "clearCoatBump"), t._mirroredCameraPosition ? e.updateFloat2("vClearCoatTangentSpaceParams", _ ? 1 : -1, v ? 1 : -1) : e.updateFloat2("vClearCoatTangentSpaceParams", _ ? -1 : 1, v ? -1 : 1)), this._tintTexture && Tt.ClearCoatTintTextureEnabled && (e.updateFloat2("vClearCoatTintInfos", this._tintTexture.coordinatesIndex, this._tintTexture.level), Ke.BindTextureMatrix(this._tintTexture, e, "clearCoatTint")), e.updateFloat2("vClearCoatParams", this.intensity, this.roughness); const x = 1 - this._indexOfRefraction, b = 1 + this._indexOfRefraction, S = Math.pow(-x / b, 2), M = 1 / this._indexOfRefraction; e.updateFloat4("vClearCoatRefractionParams", S, M, x, b), this._isTintEnabled && (e.updateFloat4("vClearCoatTintParams", this.tintColor.r, this.tintColor.g, this.tintColor.b, Math.max(1e-5, this.tintThickness)), e.updateFloat("clearCoatColorAtDistance", Math.max(1e-5, this.tintColorAtDistance))); } t.texturesEnabled && (this._texture && Tt.ClearCoatTextureEnabled && e.setTexture("clearCoatSampler", this._texture), this._textureRoughness && !C && !f.CLEARCOAT_USE_ROUGHNESS_FROM_MAINTEXTURE && Tt.ClearCoatTextureEnabled && e.setTexture("clearCoatRoughnessSampler", this._textureRoughness), this._bumpTexture && i.getCaps().standardDerivatives && Tt.ClearCoatBumpTextureEnabled && !m && e.setTexture("clearCoatBumpSampler", this._bumpTexture), this._isTintEnabled && this._tintTexture && Tt.ClearCoatTintTextureEnabled && e.setTexture("clearCoatTintSampler", this._tintTexture)); } hasTexture(e) { return this._texture === e || this._textureRoughness === e || this._bumpTexture === e || this._tintTexture === e; } getActiveTextures(e) { this._texture && e.push(this._texture), this._textureRoughness && e.push(this._textureRoughness), this._bumpTexture && e.push(this._bumpTexture), this._tintTexture && e.push(this._tintTexture); } getAnimatables(e) { this._texture && this._texture.animations && this._texture.animations.length > 0 && e.push(this._texture), this._textureRoughness && this._textureRoughness.animations && this._textureRoughness.animations.length > 0 && e.push(this._textureRoughness), this._bumpTexture && this._bumpTexture.animations && this._bumpTexture.animations.length > 0 && e.push(this._bumpTexture), this._tintTexture && this._tintTexture.animations && this._tintTexture.animations.length > 0 && e.push(this._tintTexture); } dispose(e) { var t, i, r, s; e && ((t = this._texture) === null || t === void 0 || t.dispose(), (i = this._textureRoughness) === null || i === void 0 || i.dispose(), (r = this._bumpTexture) === null || r === void 0 || r.dispose(), (s = this._tintTexture) === null || s === void 0 || s.dispose()); } getClassName() { return "PBRClearCoatConfiguration"; } addFallbacks(e, t, i) { return e.CLEARCOAT_BUMP && t.addFallback(i++, "CLEARCOAT_BUMP"), e.CLEARCOAT_TINT && t.addFallback(i++, "CLEARCOAT_TINT"), e.CLEARCOAT && t.addFallback(i++, "CLEARCOAT"), i; } getSamplers(e) { e.push("clearCoatSampler", "clearCoatRoughnessSampler", "clearCoatBumpSampler", "clearCoatTintSampler"); } getUniforms() { return { ubo: [ { name: "vClearCoatParams", size: 2, type: "vec2" }, { name: "vClearCoatRefractionParams", size: 4, type: "vec4" }, { name: "vClearCoatInfos", size: 4, type: "vec4" }, { name: "clearCoatMatrix", size: 16, type: "mat4" }, { name: "clearCoatRoughnessMatrix", size: 16, type: "mat4" }, { name: "vClearCoatBumpInfos", size: 2, type: "vec2" }, { name: "vClearCoatTangentSpaceParams", size: 2, type: "vec2" }, { name: "clearCoatBumpMatrix", size: 16, type: "mat4" }, { name: "vClearCoatTintParams", size: 4, type: "vec4" }, { name: "clearCoatColorAtDistance", size: 1, type: "float" }, { name: "vClearCoatTintInfos", size: 2, type: "vec2" }, { name: "clearCoatTintMatrix", size: 16, type: "mat4" } ] }; } } _u._DefaultIndexOfRefraction = 1.5; F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], _u.prototype, "isEnabled", void 0); F([ W() ], _u.prototype, "intensity", void 0); F([ W() ], _u.prototype, "roughness", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], _u.prototype, "indexOfRefraction", void 0); F([ er(), ct("_markAllSubMeshesAsTexturesDirty") ], _u.prototype, "texture", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], _u.prototype, "useRoughnessFromMainTexture", void 0); F([ er(), ct("_markAllSubMeshesAsTexturesDirty") ], _u.prototype, "textureRoughness", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], _u.prototype, "remapF0OnInterfaceChange", void 0); F([ er(), ct("_markAllSubMeshesAsTexturesDirty") ], _u.prototype, "bumpTexture", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], _u.prototype, "isTintEnabled", void 0); F([ Fs() ], _u.prototype, "tintColor", void 0); F([ W() ], _u.prototype, "tintColorAtDistance", void 0); F([ W() ], _u.prototype, "tintThickness", void 0); F([ er(), ct("_markAllSubMeshesAsTexturesDirty") ], _u.prototype, "tintTexture", void 0); class Qie extends sa { constructor() { super(...arguments), this.IRIDESCENCE = !1, this.IRIDESCENCE_TEXTURE = !1, this.IRIDESCENCE_TEXTUREDIRECTUV = 0, this.IRIDESCENCE_THICKNESS_TEXTURE = !1, this.IRIDESCENCE_THICKNESS_TEXTUREDIRECTUV = 0, this.IRIDESCENCE_USE_THICKNESS_FROM_MAINTEXTURE = !1; } } class lf extends Q_ { /** @internal */ _markAllSubMeshesAsTexturesDirty() { this._enable(this._isEnabled), this._internalMarkAllSubMeshesAsTexturesDirty(); } constructor(e, t = !0) { super(e, "PBRIridescence", 110, new Qie(), t), this._isEnabled = !1, this.isEnabled = !1, this.intensity = 1, this.minimumThickness = lf._DefaultMinimumThickness, this.maximumThickness = lf._DefaultMaximumThickness, this.indexOfRefraction = lf._DefaultIndexOfRefraction, this._texture = null, this.texture = null, this._thicknessTexture = null, this.thicknessTexture = null, this._internalMarkAllSubMeshesAsTexturesDirty = e._dirtyCallbacks[1]; } isReadyForSubMesh(e, t) { return this._isEnabled ? !(e._areTexturesDirty && t.texturesEnabled && (this._texture && Tt.IridescenceTextureEnabled && !this._texture.isReadyOrNotBlocking() || this._thicknessTexture && Tt.IridescenceTextureEnabled && !this._thicknessTexture.isReadyOrNotBlocking())) : !0; } prepareDefinesBeforeAttributes(e, t) { var i; this._isEnabled ? (e.IRIDESCENCE = !0, e.IRIDESCENCE_USE_THICKNESS_FROM_MAINTEXTURE = this._texture !== null && this._texture._texture === ((i = this._thicknessTexture) === null || i === void 0 ? void 0 : i._texture) && this._texture.checkTransformsAreIdentical(this._thicknessTexture), e._areTexturesDirty && t.texturesEnabled && (this._texture && Tt.IridescenceTextureEnabled ? Ke.PrepareDefinesForMergedUV(this._texture, e, "IRIDESCENCE_TEXTURE") : e.IRIDESCENCE_TEXTURE = !1, !e.IRIDESCENCE_USE_THICKNESS_FROM_MAINTEXTURE && this._thicknessTexture && Tt.IridescenceTextureEnabled ? Ke.PrepareDefinesForMergedUV(this._thicknessTexture, e, "IRIDESCENCE_THICKNESS_TEXTURE") : e.IRIDESCENCE_THICKNESS_TEXTURE = !1)) : (e.IRIDESCENCE = !1, e.IRIDESCENCE_TEXTURE = !1, e.IRIDESCENCE_THICKNESS_TEXTURE = !1, e.IRIDESCENCE_USE_THICKNESS_FROM_MAINTEXTURE = !1, e.IRIDESCENCE_TEXTUREDIRECTUV = 0, e.IRIDESCENCE_THICKNESS_TEXTUREDIRECTUV = 0); } bindForSubMesh(e, t, i, r) { var s, n, a, l, o, u, h, d; if (!this._isEnabled) return; const f = r.materialDefines, p = this._material.isFrozen, m = f.IRIDESCENCE_USE_THICKNESS_FROM_MAINTEXTURE; (!e.useUbo || !p || !e.isSync) && (m && Tt.IridescenceTextureEnabled ? (e.updateFloat4("vIridescenceInfos", this._texture.coordinatesIndex, this._texture.level, -1, -1), Ke.BindTextureMatrix(this._texture, e, "iridescence")) : (this._texture || this._thicknessTexture) && Tt.IridescenceTextureEnabled && (e.updateFloat4("vIridescenceInfos", (n = (s = this._texture) === null || s === void 0 ? void 0 : s.coordinatesIndex) !== null && n !== void 0 ? n : 0, (l = (a = this._texture) === null || a === void 0 ? void 0 : a.level) !== null && l !== void 0 ? l : 0, (u = (o = this._thicknessTexture) === null || o === void 0 ? void 0 : o.coordinatesIndex) !== null && u !== void 0 ? u : 0, (d = (h = this._thicknessTexture) === null || h === void 0 ? void 0 : h.level) !== null && d !== void 0 ? d : 0), this._texture && Ke.BindTextureMatrix(this._texture, e, "iridescence"), this._thicknessTexture && !m && !f.IRIDESCENCE_USE_THICKNESS_FROM_MAINTEXTURE && Ke.BindTextureMatrix(this._thicknessTexture, e, "iridescenceThickness")), e.updateFloat4("vIridescenceParams", this.intensity, this.indexOfRefraction, this.minimumThickness, this.maximumThickness)), t.texturesEnabled && (this._texture && Tt.IridescenceTextureEnabled && e.setTexture("iridescenceSampler", this._texture), this._thicknessTexture && !m && !f.IRIDESCENCE_USE_THICKNESS_FROM_MAINTEXTURE && Tt.IridescenceTextureEnabled && e.setTexture("iridescenceThicknessSampler", this._thicknessTexture)); } hasTexture(e) { return this._texture === e || this._thicknessTexture === e; } getActiveTextures(e) { this._texture && e.push(this._texture), this._thicknessTexture && e.push(this._thicknessTexture); } getAnimatables(e) { this._texture && this._texture.animations && this._texture.animations.length > 0 && e.push(this._texture), this._thicknessTexture && this._thicknessTexture.animations && this._thicknessTexture.animations.length > 0 && e.push(this._thicknessTexture); } dispose(e) { var t, i; e && ((t = this._texture) === null || t === void 0 || t.dispose(), (i = this._thicknessTexture) === null || i === void 0 || i.dispose()); } getClassName() { return "PBRIridescenceConfiguration"; } addFallbacks(e, t, i) { return e.IRIDESCENCE && t.addFallback(i++, "IRIDESCENCE"), i; } getSamplers(e) { e.push("iridescenceSampler", "iridescenceThicknessSampler"); } getUniforms() { return { ubo: [ { name: "vIridescenceParams", size: 4, type: "vec4" }, { name: "vIridescenceInfos", size: 4, type: "vec4" }, { name: "iridescenceMatrix", size: 16, type: "mat4" }, { name: "iridescenceThicknessMatrix", size: 16, type: "mat4" } ] }; } } lf._DefaultMinimumThickness = 100; lf._DefaultMaximumThickness = 400; lf._DefaultIndexOfRefraction = 1.3; F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], lf.prototype, "isEnabled", void 0); F([ W() ], lf.prototype, "intensity", void 0); F([ W() ], lf.prototype, "minimumThickness", void 0); F([ W() ], lf.prototype, "maximumThickness", void 0); F([ W() ], lf.prototype, "indexOfRefraction", void 0); F([ er(), ct("_markAllSubMeshesAsTexturesDirty") ], lf.prototype, "texture", void 0); F([ er(), ct("_markAllSubMeshesAsTexturesDirty") ], lf.prototype, "thicknessTexture", void 0); class $ie extends sa { constructor() { super(...arguments), this.ANISOTROPIC = !1, this.ANISOTROPIC_TEXTURE = !1, this.ANISOTROPIC_TEXTUREDIRECTUV = 0, this.ANISOTROPIC_LEGACY = !1, this.MAINUV1 = !1; } } class E5 extends Q_ { /** * Sets the anisotropy direction as an angle. */ set angle(e) { this.direction.x = Math.cos(e), this.direction.y = Math.sin(e); } /** * Gets the anisotropy angle value in radians. * @returns the anisotropy angle value in radians. */ get angle() { return Math.atan2(this.direction.y, this.direction.x); } /** @internal */ _markAllSubMeshesAsTexturesDirty() { this._enable(this._isEnabled), this._internalMarkAllSubMeshesAsTexturesDirty(); } /** @internal */ _markAllSubMeshesAsMiscDirty() { this._enable(this._isEnabled), this._internalMarkAllSubMeshesAsMiscDirty(); } constructor(e, t = !0) { super(e, "PBRAnisotropic", 110, new $ie(), t), this._isEnabled = !1, this.isEnabled = !1, this.intensity = 1, this.direction = new at(1, 0), this._texture = null, this.texture = null, this._legacy = !1, this.legacy = !1, this._internalMarkAllSubMeshesAsTexturesDirty = e._dirtyCallbacks[1], this._internalMarkAllSubMeshesAsMiscDirty = e._dirtyCallbacks[16]; } isReadyForSubMesh(e, t) { return this._isEnabled ? !(e._areTexturesDirty && t.texturesEnabled && this._texture && Tt.AnisotropicTextureEnabled && !this._texture.isReadyOrNotBlocking()) : !0; } prepareDefinesBeforeAttributes(e, t, i) { this._isEnabled ? (e.ANISOTROPIC = this._isEnabled, this._isEnabled && !i.isVerticesDataPresent(Y.TangentKind) && (e._needUVs = !0, e.MAINUV1 = !0), e._areTexturesDirty && t.texturesEnabled && (this._texture && Tt.AnisotropicTextureEnabled ? Ke.PrepareDefinesForMergedUV(this._texture, e, "ANISOTROPIC_TEXTURE") : e.ANISOTROPIC_TEXTURE = !1), e._areMiscDirty && (e.ANISOTROPIC_LEGACY = this._legacy)) : (e.ANISOTROPIC = !1, e.ANISOTROPIC_TEXTURE = !1, e.ANISOTROPIC_TEXTUREDIRECTUV = 0, e.ANISOTROPIC_LEGACY = !1); } bindForSubMesh(e, t) { if (!this._isEnabled) return; const i = this._material.isFrozen; (!e.useUbo || !i || !e.isSync) && (this._texture && Tt.AnisotropicTextureEnabled && (e.updateFloat2("vAnisotropyInfos", this._texture.coordinatesIndex, this._texture.level), Ke.BindTextureMatrix(this._texture, e, "anisotropy")), e.updateFloat3("vAnisotropy", this.direction.x, this.direction.y, this.intensity)), t.texturesEnabled && this._texture && Tt.AnisotropicTextureEnabled && e.setTexture("anisotropySampler", this._texture); } hasTexture(e) { return this._texture === e; } getActiveTextures(e) { this._texture && e.push(this._texture); } getAnimatables(e) { this._texture && this._texture.animations && this._texture.animations.length > 0 && e.push(this._texture); } dispose(e) { e && this._texture && this._texture.dispose(); } getClassName() { return "PBRAnisotropicConfiguration"; } addFallbacks(e, t, i) { return e.ANISOTROPIC && t.addFallback(i++, "ANISOTROPIC"), i; } getSamplers(e) { e.push("anisotropySampler"); } getUniforms() { return { ubo: [ { name: "vAnisotropy", size: 3, type: "vec3" }, { name: "vAnisotropyInfos", size: 2, type: "vec2" }, { name: "anisotropyMatrix", size: 16, type: "mat4" } ] }; } /** * Parses a anisotropy Configuration from a serialized object. * @param source - Serialized object. * @param scene Defines the scene we are parsing for * @param rootUrl Defines the rootUrl to load from */ parse(e, t, i) { super.parse(e, t, i), e.legacy === void 0 && (this.legacy = !0); } } F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], E5.prototype, "isEnabled", void 0); F([ W() ], E5.prototype, "intensity", void 0); F([ PL() ], E5.prototype, "direction", void 0); F([ er(), ct("_markAllSubMeshesAsTexturesDirty") ], E5.prototype, "texture", void 0); F([ W(), ct("_markAllSubMeshesAsMiscDirty") ], E5.prototype, "legacy", void 0); class Zie extends sa { constructor() { super(...arguments), this.SHEEN = !1, this.SHEEN_TEXTURE = !1, this.SHEEN_GAMMATEXTURE = !1, this.SHEEN_TEXTURE_ROUGHNESS = !1, this.SHEEN_TEXTUREDIRECTUV = 0, this.SHEEN_TEXTURE_ROUGHNESSDIRECTUV = 0, this.SHEEN_LINKWITHALBEDO = !1, this.SHEEN_ROUGHNESS = !1, this.SHEEN_ALBEDOSCALING = !1, this.SHEEN_USE_ROUGHNESS_FROM_MAINTEXTURE = !1, this.SHEEN_TEXTURE_ROUGHNESS_IDENTICAL = !1; } } class K4 extends Q_ { /** @internal */ _markAllSubMeshesAsTexturesDirty() { this._enable(this._isEnabled), this._internalMarkAllSubMeshesAsTexturesDirty(); } constructor(e, t = !0) { super(e, "Sheen", 120, new Zie(), t), this._isEnabled = !1, this.isEnabled = !1, this._linkSheenWithAlbedo = !1, this.linkSheenWithAlbedo = !1, this.intensity = 1, this.color = ze.White(), this._texture = null, this.texture = null, this._useRoughnessFromMainTexture = !0, this.useRoughnessFromMainTexture = !0, this._roughness = null, this.roughness = null, this._textureRoughness = null, this.textureRoughness = null, this._albedoScaling = !1, this.albedoScaling = !1, this._internalMarkAllSubMeshesAsTexturesDirty = e._dirtyCallbacks[1]; } isReadyForSubMesh(e, t) { return this._isEnabled ? !(e._areTexturesDirty && t.texturesEnabled && (this._texture && Tt.SheenTextureEnabled && !this._texture.isReadyOrNotBlocking() || this._textureRoughness && Tt.SheenTextureEnabled && !this._textureRoughness.isReadyOrNotBlocking())) : !0; } prepareDefinesBeforeAttributes(e, t) { var i; this._isEnabled ? (e.SHEEN = !0, e.SHEEN_LINKWITHALBEDO = this._linkSheenWithAlbedo, e.SHEEN_ROUGHNESS = this._roughness !== null, e.SHEEN_ALBEDOSCALING = this._albedoScaling, e.SHEEN_USE_ROUGHNESS_FROM_MAINTEXTURE = this._useRoughnessFromMainTexture, e.SHEEN_TEXTURE_ROUGHNESS_IDENTICAL = this._texture !== null && this._texture._texture === ((i = this._textureRoughness) === null || i === void 0 ? void 0 : i._texture) && this._texture.checkTransformsAreIdentical(this._textureRoughness), e._areTexturesDirty && t.texturesEnabled && (this._texture && Tt.SheenTextureEnabled ? (Ke.PrepareDefinesForMergedUV(this._texture, e, "SHEEN_TEXTURE"), e.SHEEN_GAMMATEXTURE = this._texture.gammaSpace) : e.SHEEN_TEXTURE = !1, this._textureRoughness && Tt.SheenTextureEnabled ? Ke.PrepareDefinesForMergedUV(this._textureRoughness, e, "SHEEN_TEXTURE_ROUGHNESS") : e.SHEEN_TEXTURE_ROUGHNESS = !1)) : (e.SHEEN = !1, e.SHEEN_TEXTURE = !1, e.SHEEN_TEXTURE_ROUGHNESS = !1, e.SHEEN_LINKWITHALBEDO = !1, e.SHEEN_ROUGHNESS = !1, e.SHEEN_ALBEDOSCALING = !1, e.SHEEN_USE_ROUGHNESS_FROM_MAINTEXTURE = !1, e.SHEEN_TEXTURE_ROUGHNESS_IDENTICAL = !1, e.SHEEN_GAMMATEXTURE = !1, e.SHEEN_TEXTUREDIRECTUV = 0, e.SHEEN_TEXTURE_ROUGHNESSDIRECTUV = 0); } bindForSubMesh(e, t, i, r) { var s, n, a, l, o, u, h, d; if (!this._isEnabled) return; const f = r.materialDefines, p = this._material.isFrozen, m = f.SHEEN_TEXTURE_ROUGHNESS_IDENTICAL; (!e.useUbo || !p || !e.isSync) && (m && Tt.SheenTextureEnabled ? (e.updateFloat4("vSheenInfos", this._texture.coordinatesIndex, this._texture.level, -1, -1), Ke.BindTextureMatrix(this._texture, e, "sheen")) : (this._texture || this._textureRoughness) && Tt.SheenTextureEnabled && (e.updateFloat4("vSheenInfos", (n = (s = this._texture) === null || s === void 0 ? void 0 : s.coordinatesIndex) !== null && n !== void 0 ? n : 0, (l = (a = this._texture) === null || a === void 0 ? void 0 : a.level) !== null && l !== void 0 ? l : 0, (u = (o = this._textureRoughness) === null || o === void 0 ? void 0 : o.coordinatesIndex) !== null && u !== void 0 ? u : 0, (d = (h = this._textureRoughness) === null || h === void 0 ? void 0 : h.level) !== null && d !== void 0 ? d : 0), this._texture && Ke.BindTextureMatrix(this._texture, e, "sheen"), this._textureRoughness && !m && !f.SHEEN_USE_ROUGHNESS_FROM_MAINTEXTURE && Ke.BindTextureMatrix(this._textureRoughness, e, "sheenRoughness")), e.updateFloat4("vSheenColor", this.color.r, this.color.g, this.color.b, this.intensity), this._roughness !== null && e.updateFloat("vSheenRoughness", this._roughness)), t.texturesEnabled && (this._texture && Tt.SheenTextureEnabled && e.setTexture("sheenSampler", this._texture), this._textureRoughness && !m && !f.SHEEN_USE_ROUGHNESS_FROM_MAINTEXTURE && Tt.SheenTextureEnabled && e.setTexture("sheenRoughnessSampler", this._textureRoughness)); } hasTexture(e) { return this._texture === e || this._textureRoughness === e; } getActiveTextures(e) { this._texture && e.push(this._texture), this._textureRoughness && e.push(this._textureRoughness); } getAnimatables(e) { this._texture && this._texture.animations && this._texture.animations.length > 0 && e.push(this._texture), this._textureRoughness && this._textureRoughness.animations && this._textureRoughness.animations.length > 0 && e.push(this._textureRoughness); } dispose(e) { var t, i; e && ((t = this._texture) === null || t === void 0 || t.dispose(), (i = this._textureRoughness) === null || i === void 0 || i.dispose()); } getClassName() { return "PBRSheenConfiguration"; } addFallbacks(e, t, i) { return e.SHEEN && t.addFallback(i++, "SHEEN"), i; } getSamplers(e) { e.push("sheenSampler", "sheenRoughnessSampler"); } getUniforms() { return { ubo: [ { name: "vSheenColor", size: 4, type: "vec4" }, { name: "vSheenRoughness", size: 1, type: "float" }, { name: "vSheenInfos", size: 4, type: "vec4" }, { name: "sheenMatrix", size: 16, type: "mat4" }, { name: "sheenRoughnessMatrix", size: 16, type: "mat4" } ] }; } } F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], K4.prototype, "isEnabled", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], K4.prototype, "linkSheenWithAlbedo", void 0); F([ W() ], K4.prototype, "intensity", void 0); F([ Fs() ], K4.prototype, "color", void 0); F([ er(), ct("_markAllSubMeshesAsTexturesDirty") ], K4.prototype, "texture", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], K4.prototype, "useRoughnessFromMainTexture", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], K4.prototype, "roughness", void 0); F([ er(), ct("_markAllSubMeshesAsTexturesDirty") ], K4.prototype, "textureRoughness", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], K4.prototype, "albedoScaling", void 0); class qie extends sa { constructor() { super(...arguments), this.SUBSURFACE = !1, this.SS_REFRACTION = !1, this.SS_REFRACTION_USE_INTENSITY_FROM_TEXTURE = !1, this.SS_TRANSLUCENCY = !1, this.SS_TRANSLUCENCY_USE_INTENSITY_FROM_TEXTURE = !1, this.SS_SCATTERING = !1, this.SS_DISPERSION = !1, this.SS_THICKNESSANDMASK_TEXTURE = !1, this.SS_THICKNESSANDMASK_TEXTUREDIRECTUV = 0, this.SS_HAS_THICKNESS = !1, this.SS_REFRACTIONINTENSITY_TEXTURE = !1, this.SS_REFRACTIONINTENSITY_TEXTUREDIRECTUV = 0, this.SS_TRANSLUCENCYINTENSITY_TEXTURE = !1, this.SS_TRANSLUCENCYINTENSITY_TEXTUREDIRECTUV = 0, this.SS_REFRACTIONMAP_3D = !1, this.SS_REFRACTIONMAP_OPPOSITEZ = !1, this.SS_LODINREFRACTIONALPHA = !1, this.SS_GAMMAREFRACTION = !1, this.SS_RGBDREFRACTION = !1, this.SS_LINEARSPECULARREFRACTION = !1, this.SS_LINKREFRACTIONTOTRANSPARENCY = !1, this.SS_ALBEDOFORREFRACTIONTINT = !1, this.SS_ALBEDOFORTRANSLUCENCYTINT = !1, this.SS_USE_LOCAL_REFRACTIONMAP_CUBIC = !1, this.SS_USE_THICKNESS_AS_DEPTH = !1, this.SS_MASK_FROM_THICKNESS_TEXTURE = !1, this.SS_USE_GLTF_TEXTURES = !1; } } class Zo extends Q_ { /** * Diffusion profile for subsurface scattering. * Useful for better scattering in the skins or foliages. */ get scatteringDiffusionProfile() { return this._scene.subSurfaceConfiguration ? this._scene.subSurfaceConfiguration.ssDiffusionProfileColors[this._scatteringDiffusionProfileIndex] : null; } set scatteringDiffusionProfile(e) { this._scene.enableSubSurfaceForPrePass() && e && (this._scatteringDiffusionProfileIndex = this._scene.subSurfaceConfiguration.addDiffusionProfile(e)); } /** * Index of refraction of the material's volume. * https://en.wikipedia.org/wiki/List_of_refractive_indices * * This ONLY impacts refraction. If not provided or given a non-valid value, * the volume will use the same IOR as the surface. */ get volumeIndexOfRefraction() { return this._volumeIndexOfRefraction >= 1 ? this._volumeIndexOfRefraction : this._indexOfRefraction; } set volumeIndexOfRefraction(e) { e >= 1 ? this._volumeIndexOfRefraction = e : this._volumeIndexOfRefraction = -1; } /** @internal */ _markAllSubMeshesAsTexturesDirty() { this._enable(this._isRefractionEnabled || this._isTranslucencyEnabled || this._isScatteringEnabled), this._internalMarkAllSubMeshesAsTexturesDirty(); } /** @internal */ _markScenePrePassDirty() { this._internalMarkAllSubMeshesAsTexturesDirty(), this._internalMarkScenePrePassDirty(); } constructor(e, t = !0) { super(e, "PBRSubSurface", 130, new qie(), t), this._isRefractionEnabled = !1, this.isRefractionEnabled = !1, this._isTranslucencyEnabled = !1, this.isTranslucencyEnabled = !1, this._isDispersionEnabled = !1, this.isDispersionEnabled = !1, this._isScatteringEnabled = !1, this.isScatteringEnabled = !1, this._scatteringDiffusionProfileIndex = 0, this.refractionIntensity = 1, this.translucencyIntensity = 1, this.useAlbedoToTintRefraction = !1, this.useAlbedoToTintTranslucency = !1, this._thicknessTexture = null, this.thicknessTexture = null, this._refractionTexture = null, this.refractionTexture = null, this._indexOfRefraction = 1.5, this.indexOfRefraction = 1.5, this._volumeIndexOfRefraction = -1, this._invertRefractionY = !1, this.invertRefractionY = !1, this._linkRefractionWithTransparency = !1, this.linkRefractionWithTransparency = !1, this.minimumThickness = 0, this.maximumThickness = 1, this.useThicknessAsDepth = !1, this.tintColor = ze.White(), this.tintColorAtDistance = 1, this.dispersion = 0, this.diffusionDistance = ze.White(), this._useMaskFromThicknessTexture = !1, this.useMaskFromThicknessTexture = !1, this._refractionIntensityTexture = null, this.refractionIntensityTexture = null, this._translucencyIntensityTexture = null, this.translucencyIntensityTexture = null, this._useGltfStyleTextures = !1, this.useGltfStyleTextures = !1, this._scene = e.getScene(), this.registerForExtraEvents = !0, this._internalMarkAllSubMeshesAsTexturesDirty = e._dirtyCallbacks[1], this._internalMarkScenePrePassDirty = e._dirtyCallbacks[32]; } isReadyForSubMesh(e, t) { if (!this._isRefractionEnabled && !this._isTranslucencyEnabled && !this._isScatteringEnabled) return !0; if (e._areTexturesDirty && t.texturesEnabled) { if (this._thicknessTexture && Tt.ThicknessTextureEnabled && !this._thicknessTexture.isReadyOrNotBlocking()) return !1; const i = this._getRefractionTexture(t); if (i && Tt.RefractionTextureEnabled && !i.isReadyOrNotBlocking()) return !1; } return !0; } prepareDefinesBeforeAttributes(e, t) { if (!this._isRefractionEnabled && !this._isTranslucencyEnabled && !this._isScatteringEnabled) { e.SUBSURFACE = !1, e.SS_DISPERSION = !1, e.SS_TRANSLUCENCY = !1, e.SS_SCATTERING = !1, e.SS_REFRACTION = !1, e.SS_REFRACTION_USE_INTENSITY_FROM_TEXTURE = !1, e.SS_TRANSLUCENCY_USE_INTENSITY_FROM_TEXTURE = !1, e.SS_THICKNESSANDMASK_TEXTURE = !1, e.SS_THICKNESSANDMASK_TEXTUREDIRECTUV = 0, e.SS_HAS_THICKNESS = !1, e.SS_REFRACTIONINTENSITY_TEXTURE = !1, e.SS_REFRACTIONINTENSITY_TEXTUREDIRECTUV = 0, e.SS_TRANSLUCENCYINTENSITY_TEXTURE = !1, e.SS_TRANSLUCENCYINTENSITY_TEXTUREDIRECTUV = 0, e.SS_REFRACTIONMAP_3D = !1, e.SS_REFRACTIONMAP_OPPOSITEZ = !1, e.SS_LODINREFRACTIONALPHA = !1, e.SS_GAMMAREFRACTION = !1, e.SS_RGBDREFRACTION = !1, e.SS_LINEARSPECULARREFRACTION = !1, e.SS_LINKREFRACTIONTOTRANSPARENCY = !1, e.SS_ALBEDOFORREFRACTIONTINT = !1, e.SS_ALBEDOFORTRANSLUCENCYTINT = !1, e.SS_USE_LOCAL_REFRACTIONMAP_CUBIC = !1, e.SS_USE_THICKNESS_AS_DEPTH = !1, e.SS_MASK_FROM_THICKNESS_TEXTURE = !1, e.SS_USE_GLTF_TEXTURES = !1; return; } if (e._areTexturesDirty) { e.SUBSURFACE = !0, e.SS_DISPERSION = this._isDispersionEnabled, e.SS_TRANSLUCENCY = this._isTranslucencyEnabled, e.SS_TRANSLUCENCY_USE_INTENSITY_FROM_TEXTURE = !1, e.SS_SCATTERING = this._isScatteringEnabled, e.SS_THICKNESSANDMASK_TEXTURE = !1, e.SS_REFRACTIONINTENSITY_TEXTURE = !1, e.SS_TRANSLUCENCYINTENSITY_TEXTURE = !1, e.SS_HAS_THICKNESS = !1, e.SS_MASK_FROM_THICKNESS_TEXTURE = !1, e.SS_USE_GLTF_TEXTURES = !1, e.SS_REFRACTION = !1, e.SS_REFRACTION_USE_INTENSITY_FROM_TEXTURE = !1, e.SS_REFRACTIONMAP_3D = !1, e.SS_GAMMAREFRACTION = !1, e.SS_RGBDREFRACTION = !1, e.SS_LINEARSPECULARREFRACTION = !1, e.SS_REFRACTIONMAP_OPPOSITEZ = !1, e.SS_LODINREFRACTIONALPHA = !1, e.SS_LINKREFRACTIONTOTRANSPARENCY = !1, e.SS_ALBEDOFORREFRACTIONTINT = !1, e.SS_ALBEDOFORTRANSLUCENCYTINT = !1, e.SS_USE_LOCAL_REFRACTIONMAP_CUBIC = !1, e.SS_USE_THICKNESS_AS_DEPTH = !1; const i = !!this._thicknessTexture && !!this._refractionIntensityTexture && this._refractionIntensityTexture.checkTransformsAreIdentical(this._thicknessTexture) && this._refractionIntensityTexture._texture === this._thicknessTexture._texture, r = !!this._thicknessTexture && !!this._translucencyIntensityTexture && this._translucencyIntensityTexture.checkTransformsAreIdentical(this._thicknessTexture) && this._translucencyIntensityTexture._texture === this._thicknessTexture._texture, s = (i || !this._refractionIntensityTexture) && (r || !this._translucencyIntensityTexture); if (e._areTexturesDirty && t.texturesEnabled && (this._thicknessTexture && Tt.ThicknessTextureEnabled && Ke.PrepareDefinesForMergedUV(this._thicknessTexture, e, "SS_THICKNESSANDMASK_TEXTURE"), this._refractionIntensityTexture && Tt.RefractionIntensityTextureEnabled && !s && Ke.PrepareDefinesForMergedUV(this._refractionIntensityTexture, e, "SS_REFRACTIONINTENSITY_TEXTURE"), this._translucencyIntensityTexture && Tt.TranslucencyIntensityTextureEnabled && !s && Ke.PrepareDefinesForMergedUV(this._translucencyIntensityTexture, e, "SS_TRANSLUCENCYINTENSITY_TEXTURE")), e.SS_HAS_THICKNESS = this.maximumThickness - this.minimumThickness !== 0, e.SS_MASK_FROM_THICKNESS_TEXTURE = (this._useMaskFromThicknessTexture || !!this._refractionIntensityTexture || !!this._translucencyIntensityTexture) && s, e.SS_USE_GLTF_TEXTURES = this._useGltfStyleTextures, e.SS_REFRACTION_USE_INTENSITY_FROM_TEXTURE = (this._useMaskFromThicknessTexture || !!this._refractionIntensityTexture) && s, e.SS_TRANSLUCENCY_USE_INTENSITY_FROM_TEXTURE = (this._useMaskFromThicknessTexture || !!this._translucencyIntensityTexture) && s, this._isRefractionEnabled && t.texturesEnabled) { const n = this._getRefractionTexture(t); n && Tt.RefractionTextureEnabled && (e.SS_REFRACTION = !0, e.SS_REFRACTIONMAP_3D = n.isCube, e.SS_GAMMAREFRACTION = n.gammaSpace, e.SS_RGBDREFRACTION = n.isRGBD, e.SS_LINEARSPECULARREFRACTION = n.linearSpecularLOD, e.SS_REFRACTIONMAP_OPPOSITEZ = this._scene.useRightHandedSystem && n.isCube ? !n.invertZ : n.invertZ, e.SS_LODINREFRACTIONALPHA = n.lodLevelInAlpha, e.SS_LINKREFRACTIONTOTRANSPARENCY = this._linkRefractionWithTransparency, e.SS_ALBEDOFORREFRACTIONTINT = this.useAlbedoToTintRefraction, e.SS_USE_LOCAL_REFRACTIONMAP_CUBIC = n.isCube && n.boundingBoxSize, e.SS_USE_THICKNESS_AS_DEPTH = this.useThicknessAsDepth); } this._isTranslucencyEnabled && (e.SS_ALBEDOFORTRANSLUCENCYTINT = this.useAlbedoToTintTranslucency); } } /** * Binds the material data (this function is called even if mustRebind() returns false) * @param uniformBuffer defines the Uniform buffer to fill in. * @param scene defines the scene the material belongs to. * @param engine defines the engine the material belongs to. * @param subMesh the submesh to bind data for */ hardBindForSubMesh(e, t, i, r) { if (!this._isRefractionEnabled && !this._isTranslucencyEnabled && !this._isScatteringEnabled) return; r.getRenderingMesh().getWorldMatrix().decompose(de.Vector3[0]); const s = Math.max(Math.abs(de.Vector3[0].x), Math.abs(de.Vector3[0].y), Math.abs(de.Vector3[0].z)); e.updateFloat2("vThicknessParam", this.minimumThickness * s, (this.maximumThickness - this.minimumThickness) * s); } bindForSubMesh(e, t, i, r) { if (!this._isRefractionEnabled && !this._isTranslucencyEnabled && !this._isScatteringEnabled) return; const s = r.materialDefines, n = this._material.isFrozen, a = this._material.realTimeFiltering, l = s.LODBASEDMICROSFURACE, o = this._getRefractionTexture(t); if (!e.useUbo || !n || !e.isSync) { if (this._thicknessTexture && Tt.ThicknessTextureEnabled && (e.updateFloat2("vThicknessInfos", this._thicknessTexture.coordinatesIndex, this._thicknessTexture.level), Ke.BindTextureMatrix(this._thicknessTexture, e, "thickness")), this._refractionIntensityTexture && Tt.RefractionIntensityTextureEnabled && s.SS_REFRACTIONINTENSITY_TEXTURE && (e.updateFloat2("vRefractionIntensityInfos", this._refractionIntensityTexture.coordinatesIndex, this._refractionIntensityTexture.level), Ke.BindTextureMatrix(this._refractionIntensityTexture, e, "refractionIntensity")), this._translucencyIntensityTexture && Tt.TranslucencyIntensityTextureEnabled && s.SS_TRANSLUCENCYINTENSITY_TEXTURE && (e.updateFloat2("vTranslucencyIntensityInfos", this._translucencyIntensityTexture.coordinatesIndex, this._translucencyIntensityTexture.level), Ke.BindTextureMatrix(this._translucencyIntensityTexture, e, "translucencyIntensity")), o && Tt.RefractionTextureEnabled) { e.updateMatrix("refractionMatrix", o.getRefractionTextureMatrix()); let u = 1; o.isCube || o.depth && (u = o.depth); const h = o.getSize().width, d = this.volumeIndexOfRefraction; if (e.updateFloat4("vRefractionInfos", o.level, 1 / d, u, this._invertRefractionY ? -1 : 1), e.updateFloat4("vRefractionMicrosurfaceInfos", h, o.lodGenerationScale, o.lodGenerationOffset, 1 / this.indexOfRefraction), a && e.updateFloat2("vRefractionFilteringInfo", h, yt.Log2(h)), o.boundingBoxSize) { const f = o; e.updateVector3("vRefractionPosition", f.boundingBoxPosition), e.updateVector3("vRefractionSize", f.boundingBoxSize); } } this._isScatteringEnabled && e.updateFloat("scatteringDiffusionProfile", this._scatteringDiffusionProfileIndex), e.updateColor3("vDiffusionDistance", this.diffusionDistance), e.updateFloat4("vTintColor", this.tintColor.r, this.tintColor.g, this.tintColor.b, Math.max(1e-5, this.tintColorAtDistance)), e.updateFloat3("vSubSurfaceIntensity", this.refractionIntensity, this.translucencyIntensity, 0), e.updateFloat("dispersion", this.dispersion); } t.texturesEnabled && (this._thicknessTexture && Tt.ThicknessTextureEnabled && e.setTexture("thicknessSampler", this._thicknessTexture), this._refractionIntensityTexture && Tt.RefractionIntensityTextureEnabled && s.SS_REFRACTIONINTENSITY_TEXTURE && e.setTexture("refractionIntensitySampler", this._refractionIntensityTexture), this._translucencyIntensityTexture && Tt.TranslucencyIntensityTextureEnabled && s.SS_TRANSLUCENCYINTENSITY_TEXTURE && e.setTexture("translucencyIntensitySampler", this._translucencyIntensityTexture), o && Tt.RefractionTextureEnabled && (l ? e.setTexture("refractionSampler", o) : (e.setTexture("refractionSampler", o._lodTextureMid || o), e.setTexture("refractionSamplerLow", o._lodTextureLow || o), e.setTexture("refractionSamplerHigh", o._lodTextureHigh || o)))); } /** * Returns the texture used for refraction or null if none is used. * @param scene defines the scene the material belongs to. * @returns - Refraction texture if present. If no refraction texture and refraction * is linked with transparency, returns environment texture. Otherwise, returns null. */ _getRefractionTexture(e) { return this._refractionTexture ? this._refractionTexture : this._isRefractionEnabled ? e.environmentTexture : null; } /** * Returns true if alpha blending should be disabled. */ get disableAlphaBlending() { return this._isRefractionEnabled && this._linkRefractionWithTransparency; } /** * Fills the list of render target textures. * @param renderTargets the list of render targets to update */ fillRenderTargetTextures(e) { Tt.RefractionTextureEnabled && this._refractionTexture && this._refractionTexture.isRenderTarget && e.push(this._refractionTexture); } hasTexture(e) { return this._thicknessTexture === e || this._refractionTexture === e; } hasRenderTargetTextures() { return !!(Tt.RefractionTextureEnabled && this._refractionTexture && this._refractionTexture.isRenderTarget); } getActiveTextures(e) { this._thicknessTexture && e.push(this._thicknessTexture), this._refractionTexture && e.push(this._refractionTexture); } getAnimatables(e) { this._thicknessTexture && this._thicknessTexture.animations && this._thicknessTexture.animations.length > 0 && e.push(this._thicknessTexture), this._refractionTexture && this._refractionTexture.animations && this._refractionTexture.animations.length > 0 && e.push(this._refractionTexture); } dispose(e) { e && (this._thicknessTexture && this._thicknessTexture.dispose(), this._refractionTexture && this._refractionTexture.dispose()); } getClassName() { return "PBRSubSurfaceConfiguration"; } addFallbacks(e, t, i) { return e.SS_SCATTERING && t.addFallback(i++, "SS_SCATTERING"), e.SS_TRANSLUCENCY && t.addFallback(i++, "SS_TRANSLUCENCY"), i; } getSamplers(e) { e.push("thicknessSampler", "refractionIntensitySampler", "translucencyIntensitySampler", "refractionSampler", "refractionSamplerLow", "refractionSamplerHigh"); } getUniforms() { return { ubo: [ { name: "vRefractionMicrosurfaceInfos", size: 4, type: "vec4" }, { name: "vRefractionFilteringInfo", size: 2, type: "vec2" }, { name: "vTranslucencyIntensityInfos", size: 2, type: "vec2" }, { name: "vRefractionInfos", size: 4, type: "vec4" }, { name: "refractionMatrix", size: 16, type: "mat4" }, { name: "vThicknessInfos", size: 2, type: "vec2" }, { name: "vRefractionIntensityInfos", size: 2, type: "vec2" }, { name: "thicknessMatrix", size: 16, type: "mat4" }, { name: "refractionIntensityMatrix", size: 16, type: "mat4" }, { name: "translucencyIntensityMatrix", size: 16, type: "mat4" }, { name: "vThicknessParam", size: 2, type: "vec2" }, { name: "vDiffusionDistance", size: 3, type: "vec3" }, { name: "vTintColor", size: 4, type: "vec4" }, { name: "vSubSurfaceIntensity", size: 3, type: "vec3" }, { name: "vRefractionPosition", size: 3, type: "vec3" }, { name: "vRefractionSize", size: 3, type: "vec3" }, { name: "scatteringDiffusionProfile", size: 1, type: "float" }, { name: "dispersion", size: 1, type: "float" } ] }; } } F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], Zo.prototype, "isRefractionEnabled", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], Zo.prototype, "isTranslucencyEnabled", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], Zo.prototype, "isDispersionEnabled", void 0); F([ W(), ct("_markScenePrePassDirty") ], Zo.prototype, "isScatteringEnabled", void 0); F([ W() ], Zo.prototype, "_scatteringDiffusionProfileIndex", void 0); F([ W() ], Zo.prototype, "refractionIntensity", void 0); F([ W() ], Zo.prototype, "translucencyIntensity", void 0); F([ W() ], Zo.prototype, "useAlbedoToTintRefraction", void 0); F([ W() ], Zo.prototype, "useAlbedoToTintTranslucency", void 0); F([ er(), ct("_markAllSubMeshesAsTexturesDirty") ], Zo.prototype, "thicknessTexture", void 0); F([ er(), ct("_markAllSubMeshesAsTexturesDirty") ], Zo.prototype, "refractionTexture", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], Zo.prototype, "indexOfRefraction", void 0); F([ W() ], Zo.prototype, "_volumeIndexOfRefraction", void 0); F([ ct("_markAllSubMeshesAsTexturesDirty") ], Zo.prototype, "volumeIndexOfRefraction", null); F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], Zo.prototype, "invertRefractionY", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], Zo.prototype, "linkRefractionWithTransparency", void 0); F([ W() ], Zo.prototype, "minimumThickness", void 0); F([ W() ], Zo.prototype, "maximumThickness", void 0); F([ W() ], Zo.prototype, "useThicknessAsDepth", void 0); F([ Fs() ], Zo.prototype, "tintColor", void 0); F([ W() ], Zo.prototype, "tintColorAtDistance", void 0); F([ W() ], Zo.prototype, "dispersion", void 0); F([ Fs() ], Zo.prototype, "diffusionDistance", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], Zo.prototype, "useMaskFromThicknessTexture", void 0); F([ er(), ct("_markAllSubMeshesAsTexturesDirty") ], Zo.prototype, "refractionIntensityTexture", void 0); F([ er(), ct("_markAllSubMeshesAsTexturesDirty") ], Zo.prototype, "translucencyIntensityTexture", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], Zo.prototype, "useGltfStyleTextures", void 0); const KD = { effect: null, subMesh: null }; class wH extends sa { /** * Initializes the PBR Material defines. * @param externalProperties The external properties */ constructor(e) { super(e), this.PBR = !0, this.NUM_SAMPLES = "0", this.REALTIME_FILTERING = !1, this.MAINUV1 = !1, this.MAINUV2 = !1, this.MAINUV3 = !1, this.MAINUV4 = !1, this.MAINUV5 = !1, this.MAINUV6 = !1, this.UV1 = !1, this.UV2 = !1, this.UV3 = !1, this.UV4 = !1, this.UV5 = !1, this.UV6 = !1, this.ALBEDO = !1, this.GAMMAALBEDO = !1, this.ALBEDODIRECTUV = 0, this.VERTEXCOLOR = !1, this.BAKED_VERTEX_ANIMATION_TEXTURE = !1, this.AMBIENT = !1, this.AMBIENTDIRECTUV = 0, this.AMBIENTINGRAYSCALE = !1, this.OPACITY = !1, this.VERTEXALPHA = !1, this.OPACITYDIRECTUV = 0, this.OPACITYRGB = !1, this.ALPHATEST = !1, this.DEPTHPREPASS = !1, this.ALPHABLEND = !1, this.ALPHAFROMALBEDO = !1, this.ALPHATESTVALUE = "0.5", this.SPECULAROVERALPHA = !1, this.RADIANCEOVERALPHA = !1, this.ALPHAFRESNEL = !1, this.LINEARALPHAFRESNEL = !1, this.PREMULTIPLYALPHA = !1, this.EMISSIVE = !1, this.EMISSIVEDIRECTUV = 0, this.GAMMAEMISSIVE = !1, this.REFLECTIVITY = !1, this.REFLECTIVITY_GAMMA = !1, this.REFLECTIVITYDIRECTUV = 0, this.SPECULARTERM = !1, this.MICROSURFACEFROMREFLECTIVITYMAP = !1, this.MICROSURFACEAUTOMATIC = !1, this.LODBASEDMICROSFURACE = !1, this.MICROSURFACEMAP = !1, this.MICROSURFACEMAPDIRECTUV = 0, this.METALLICWORKFLOW = !1, this.ROUGHNESSSTOREINMETALMAPALPHA = !1, this.ROUGHNESSSTOREINMETALMAPGREEN = !1, this.METALLNESSSTOREINMETALMAPBLUE = !1, this.AOSTOREINMETALMAPRED = !1, this.METALLIC_REFLECTANCE = !1, this.METALLIC_REFLECTANCE_GAMMA = !1, this.METALLIC_REFLECTANCEDIRECTUV = 0, this.METALLIC_REFLECTANCE_USE_ALPHA_ONLY = !1, this.REFLECTANCE = !1, this.REFLECTANCE_GAMMA = !1, this.REFLECTANCEDIRECTUV = 0, this.ENVIRONMENTBRDF = !1, this.ENVIRONMENTBRDF_RGBD = !1, this.NORMAL = !1, this.TANGENT = !1, this.BUMP = !1, this.BUMPDIRECTUV = 0, this.OBJECTSPACE_NORMALMAP = !1, this.PARALLAX = !1, this.PARALLAX_RHS = !1, this.PARALLAXOCCLUSION = !1, this.NORMALXYSCALE = !0, this.LIGHTMAP = !1, this.LIGHTMAPDIRECTUV = 0, this.USELIGHTMAPASSHADOWMAP = !1, this.GAMMALIGHTMAP = !1, this.RGBDLIGHTMAP = !1, this.REFLECTION = !1, this.REFLECTIONMAP_3D = !1, this.REFLECTIONMAP_SPHERICAL = !1, this.REFLECTIONMAP_PLANAR = !1, this.REFLECTIONMAP_CUBIC = !1, this.USE_LOCAL_REFLECTIONMAP_CUBIC = !1, this.REFLECTIONMAP_PROJECTION = !1, this.REFLECTIONMAP_SKYBOX = !1, this.REFLECTIONMAP_EXPLICIT = !1, this.REFLECTIONMAP_EQUIRECTANGULAR = !1, this.REFLECTIONMAP_EQUIRECTANGULAR_FIXED = !1, this.REFLECTIONMAP_MIRROREDEQUIRECTANGULAR_FIXED = !1, this.INVERTCUBICMAP = !1, this.USESPHERICALFROMREFLECTIONMAP = !1, this.USEIRRADIANCEMAP = !1, this.USESPHERICALINVERTEX = !1, this.REFLECTIONMAP_OPPOSITEZ = !1, this.LODINREFLECTIONALPHA = !1, this.GAMMAREFLECTION = !1, this.RGBDREFLECTION = !1, this.LINEARSPECULARREFLECTION = !1, this.RADIANCEOCCLUSION = !1, this.HORIZONOCCLUSION = !1, this.INSTANCES = !1, this.THIN_INSTANCES = !1, this.INSTANCESCOLOR = !1, this.PREPASS = !1, this.PREPASS_IRRADIANCE = !1, this.PREPASS_IRRADIANCE_INDEX = -1, this.PREPASS_ALBEDO_SQRT = !1, this.PREPASS_ALBEDO_SQRT_INDEX = -1, this.PREPASS_DEPTH = !1, this.PREPASS_DEPTH_INDEX = -1, this.PREPASS_NORMAL = !1, this.PREPASS_NORMAL_INDEX = -1, this.PREPASS_NORMAL_WORLDSPACE = !1, this.PREPASS_POSITION = !1, this.PREPASS_POSITION_INDEX = -1, this.PREPASS_VELOCITY = !1, this.PREPASS_VELOCITY_INDEX = -1, this.PREPASS_REFLECTIVITY = !1, this.PREPASS_REFLECTIVITY_INDEX = -1, this.SCENE_MRT_COUNT = 0, this.NUM_BONE_INFLUENCERS = 0, this.BonesPerMesh = 0, this.BONETEXTURE = !1, this.BONES_VELOCITY_ENABLED = !1, this.NONUNIFORMSCALING = !1, this.MORPHTARGETS = !1, this.MORPHTARGETS_NORMAL = !1, this.MORPHTARGETS_TANGENT = !1, this.MORPHTARGETS_UV = !1, this.NUM_MORPH_INFLUENCERS = 0, this.MORPHTARGETS_TEXTURE = !1, this.IMAGEPROCESSING = !1, this.VIGNETTE = !1, this.VIGNETTEBLENDMODEMULTIPLY = !1, this.VIGNETTEBLENDMODEOPAQUE = !1, this.TONEMAPPING = !1, this.TONEMAPPING_ACES = !1, this.CONTRAST = !1, this.COLORCURVES = !1, this.COLORGRADING = !1, this.COLORGRADING3D = !1, this.SAMPLER3DGREENDEPTH = !1, this.SAMPLER3DBGRMAP = !1, this.DITHER = !1, this.IMAGEPROCESSINGPOSTPROCESS = !1, this.SKIPFINALCOLORCLAMP = !1, this.EXPOSURE = !1, this.MULTIVIEW = !1, this.ORDER_INDEPENDENT_TRANSPARENCY = !1, this.ORDER_INDEPENDENT_TRANSPARENCY_16BITS = !1, this.USEPHYSICALLIGHTFALLOFF = !1, this.USEGLTFLIGHTFALLOFF = !1, this.TWOSIDEDLIGHTING = !1, this.SHADOWFLOAT = !1, this.CLIPPLANE = !1, this.CLIPPLANE2 = !1, this.CLIPPLANE3 = !1, this.CLIPPLANE4 = !1, this.CLIPPLANE5 = !1, this.CLIPPLANE6 = !1, this.POINTSIZE = !1, this.FOG = !1, this.LOGARITHMICDEPTH = !1, this.CAMERA_ORTHOGRAPHIC = !1, this.CAMERA_PERSPECTIVE = !1, this.FORCENORMALFORWARD = !1, this.SPECULARAA = !1, this.UNLIT = !1, this.DECAL_AFTER_DETAIL = !1, this.DEBUGMODE = 0, this.rebuild(); } /** * Resets the PBR Material defines. */ reset() { super.reset(), this.ALPHATESTVALUE = "0.5", this.PBR = !0, this.NORMALXYSCALE = !0; } } class on extends fl { /** * Enables realtime filtering on the texture. */ get realTimeFiltering() { return this._realTimeFiltering; } set realTimeFiltering(e) { this._realTimeFiltering = e, this.markAsDirty(1); } /** * Quality switch for realtime filtering */ get realTimeFilteringQuality() { return this._realTimeFilteringQuality; } set realTimeFilteringQuality(e) { this._realTimeFilteringQuality = e, this.markAsDirty(1); } /** * Can this material render to several textures at once */ get canRenderToMRT() { return !0; } /** * Attaches a new image processing configuration to the PBR Material. * @param configuration */ _attachImageProcessingConfiguration(e) { e !== this._imageProcessingConfiguration && (this._imageProcessingConfiguration && this._imageProcessingObserver && this._imageProcessingConfiguration.onUpdateParameters.remove(this._imageProcessingObserver), e ? this._imageProcessingConfiguration = e : this._imageProcessingConfiguration = this.getScene().imageProcessingConfiguration, this._imageProcessingConfiguration && (this._imageProcessingObserver = this._imageProcessingConfiguration.onUpdateParameters.add(() => { this._markAllSubMeshesAsImageProcessingDirty(); }))); } /** * Instantiates a new PBRMaterial instance. * * @param name The material name * @param scene The scene the material will be use in. */ constructor(e, t) { super(e, t), this._directIntensity = 1, this._emissiveIntensity = 1, this._environmentIntensity = 1, this._specularIntensity = 1, this._lightingInfos = new Di(this._directIntensity, this._emissiveIntensity, this._environmentIntensity, this._specularIntensity), this._disableBumpMap = !1, this._albedoTexture = null, this._ambientTexture = null, this._ambientTextureStrength = 1, this._ambientTextureImpactOnAnalyticalLights = on.DEFAULT_AO_ON_ANALYTICAL_LIGHTS, this._opacityTexture = null, this._reflectionTexture = null, this._emissiveTexture = null, this._reflectivityTexture = null, this._metallicTexture = null, this._metallic = null, this._roughness = null, this._metallicF0Factor = 1, this._metallicReflectanceColor = ze.White(), this._useOnlyMetallicFromMetallicReflectanceTexture = !1, this._metallicReflectanceTexture = null, this._reflectanceTexture = null, this._microSurfaceTexture = null, this._bumpTexture = null, this._lightmapTexture = null, this._ambientColor = new ze(0, 0, 0), this._albedoColor = new ze(1, 1, 1), this._reflectivityColor = new ze(1, 1, 1), this._reflectionColor = new ze(1, 1, 1), this._emissiveColor = new ze(0, 0, 0), this._microSurface = 0.9, this._useLightmapAsShadowmap = !1, this._useHorizonOcclusion = !0, this._useRadianceOcclusion = !0, this._useAlphaFromAlbedoTexture = !1, this._useSpecularOverAlpha = !0, this._useMicroSurfaceFromReflectivityMapAlpha = !1, this._useRoughnessFromMetallicTextureAlpha = !0, this._useRoughnessFromMetallicTextureGreen = !1, this._useMetallnessFromMetallicTextureBlue = !1, this._useAmbientOcclusionFromMetallicTextureRed = !1, this._useAmbientInGrayScale = !1, this._useAutoMicroSurfaceFromReflectivityMap = !1, this._lightFalloff = on.LIGHTFALLOFF_PHYSICAL, this._useRadianceOverAlpha = !0, this._useObjectSpaceNormalMap = !1, this._useParallax = !1, this._useParallaxOcclusion = !1, this._parallaxScaleBias = 0.05, this._disableLighting = !1, this._maxSimultaneousLights = 4, this._invertNormalMapX = !1, this._invertNormalMapY = !1, this._twoSidedLighting = !1, this._alphaCutOff = 0.4, this._forceAlphaTest = !1, this._useAlphaFresnel = !1, this._useLinearAlphaFresnel = !1, this._environmentBRDFTexture = null, this._forceIrradianceInFragment = !1, this._realTimeFiltering = !1, this._realTimeFilteringQuality = 8, this._forceNormalForward = !1, this._enableSpecularAntiAliasing = !1, this._imageProcessingObserver = null, this._renderTargets = new xc(16), this._globalAmbientColor = new ze(0, 0, 0), this._unlit = !1, this._applyDecalMapAfterDetailMap = !1, this._debugMode = 0, this.debugMode = 0, this.debugLimit = -1, this.debugFactor = 1, this._cacheHasRenderTargetTextures = !1, this.brdf = new nf(this), this.clearCoat = new _u(this), this.iridescence = new lf(this), this.anisotropy = new E5(this), this.sheen = new K4(this), this.subSurface = new Zo(this), this.detailMap = new dx(this), this._attachImageProcessingConfiguration(null), this.getRenderTargetTextures = () => (this._renderTargets.reset(), Tt.ReflectionTextureEnabled && this._reflectionTexture && this._reflectionTexture.isRenderTarget && this._renderTargets.push(this._reflectionTexture), this._eventInfo.renderTargets = this._renderTargets, this._callbackPluginEventFillRenderTargetTextures(this._eventInfo), this._renderTargets), this._environmentBRDFTexture = pN(this.getScene()), this.prePassConfiguration = new lB(); } /** * Gets a boolean indicating that current material needs to register RTT */ get hasRenderTargetTextures() { return Tt.ReflectionTextureEnabled && this._reflectionTexture && this._reflectionTexture.isRenderTarget ? !0 : this._cacheHasRenderTargetTextures; } /** * Can this material render to prepass */ get isPrePassCapable() { return !this.disableDepthWrite; } /** * Gets the name of the material class. */ getClassName() { return "PBRBaseMaterial"; } /** * Returns true if alpha blending should be disabled. */ get _disableAlphaBlending() { var e; return this._transparencyMode === on.PBRMATERIAL_OPAQUE || this._transparencyMode === on.PBRMATERIAL_ALPHATEST || ((e = this.subSurface) === null || e === void 0 ? void 0 : e.disableAlphaBlending); } /** * Specifies whether or not this material should be rendered in alpha blend mode. */ needAlphaBlending() { return this._disableAlphaBlending ? !1 : this.alpha < 1 || this._opacityTexture != null || this._shouldUseAlphaFromAlbedoTexture(); } /** * Specifies whether or not this material should be rendered in alpha test mode. */ needAlphaTesting() { var e; return this._forceAlphaTest ? !0 : !((e = this.subSurface) === null || e === void 0) && e.disableAlphaBlending ? !1 : this._hasAlphaChannel() && (this._transparencyMode == null || this._transparencyMode === on.PBRMATERIAL_ALPHATEST); } /** * Specifies whether or not the alpha value of the albedo texture should be used for alpha blending. */ _shouldUseAlphaFromAlbedoTexture() { return this._albedoTexture != null && this._albedoTexture.hasAlpha && this._useAlphaFromAlbedoTexture && this._transparencyMode !== on.PBRMATERIAL_OPAQUE; } /** * Specifies whether or not there is a usable alpha channel for transparency. */ _hasAlphaChannel() { return this._albedoTexture != null && this._albedoTexture.hasAlpha || this._opacityTexture != null; } /** * Gets the texture used for the alpha test. */ getAlphaTestTexture() { return this._albedoTexture; } /** * Specifies that the submesh is ready to be used. * @param mesh - BJS mesh. * @param subMesh - A submesh of the BJS mesh. Used to check if it is ready. * @param useInstances - Specifies that instances should be used. * @returns - boolean indicating that the submesh is ready or not. */ isReadyForSubMesh(e, t, i) { var r; if (this._uniformBufferLayoutBuilt || this.buildUniformLayout(), t.effect && this.isFrozen && t.effect._wasPreviouslyReady && t.effect._wasPreviouslyUsingInstances === i) return !0; t.materialDefines || (this._callbackPluginEventGeneric(xh.GetDefineNames, this._eventInfo), t.materialDefines = new wH(this._eventInfo.defineNames)); const s = t.materialDefines; if (this._isReadyForSubMesh(t)) return !0; const n = this.getScene(), a = n.getEngine(); if (s._areTexturesDirty && (this._eventInfo.hasRenderTargetTextures = !1, this._callbackPluginEventHasRenderTargetTextures(this._eventInfo), this._cacheHasRenderTargetTextures = this._eventInfo.hasRenderTargetTextures, n.texturesEnabled)) { if (this._albedoTexture && Tt.DiffuseTextureEnabled && !this._albedoTexture.isReadyOrNotBlocking() || this._ambientTexture && Tt.AmbientTextureEnabled && !this._ambientTexture.isReadyOrNotBlocking() || this._opacityTexture && Tt.OpacityTextureEnabled && !this._opacityTexture.isReadyOrNotBlocking()) return !1; const d = this._getReflectionTexture(); if (d && Tt.ReflectionTextureEnabled) { if (!d.isReadyOrNotBlocking()) return !1; if (d.irradianceTexture) { if (!d.irradianceTexture.isReadyOrNotBlocking()) return !1; } else if (!d.sphericalPolynomial && (!((r = d.getInternalTexture()) === null || r === void 0) && r._sphericalPolynomialPromise)) return !1; } if (this._lightmapTexture && Tt.LightmapTextureEnabled && !this._lightmapTexture.isReadyOrNotBlocking() || this._emissiveTexture && Tt.EmissiveTextureEnabled && !this._emissiveTexture.isReadyOrNotBlocking()) return !1; if (Tt.SpecularTextureEnabled) { if (this._metallicTexture) { if (!this._metallicTexture.isReadyOrNotBlocking()) return !1; } else if (this._reflectivityTexture && !this._reflectivityTexture.isReadyOrNotBlocking()) return !1; if (this._metallicReflectanceTexture && !this._metallicReflectanceTexture.isReadyOrNotBlocking() || this._reflectanceTexture && !this._reflectanceTexture.isReadyOrNotBlocking() || this._microSurfaceTexture && !this._microSurfaceTexture.isReadyOrNotBlocking()) return !1; } if (a.getCaps().standardDerivatives && this._bumpTexture && Tt.BumpTextureEnabled && !this._disableBumpMap && !this._bumpTexture.isReady() || this._environmentBRDFTexture && Tt.ReflectionTextureEnabled && !this._environmentBRDFTexture.isReady()) return !1; } if (this._eventInfo.isReadyForSubMesh = !0, this._eventInfo.defines = s, this._eventInfo.subMesh = t, this._callbackPluginEventIsReadyForSubMesh(this._eventInfo), !this._eventInfo.isReadyForSubMesh || s._areImageProcessingDirty && this._imageProcessingConfiguration && !this._imageProcessingConfiguration.isReady()) return !1; !a.getCaps().standardDerivatives && !e.isVerticesDataPresent(Y.NormalKind) && (e.createNormals(!0), Ce.Warn("PBRMaterial: Normals have been created for the mesh: " + e.name)); const l = t.effect, o = s._areLightsDisposed; let u = this._prepareEffect(e, s, this.onCompiled, this.onError, i, null, t.getRenderingMesh().hasThinInstances), h = !1; if (u) if (this._onEffectCreatedObservable && (KD.effect = u, KD.subMesh = t, this._onEffectCreatedObservable.notifyObservers(KD)), this.allowShaderHotSwapping && l && !u.isReady()) { if (u = l, s.markAsUnprocessed(), h = this.isFrozen, o) return s._areLightsDisposed = !0, !1; } else n.resetCachedMaterial(), t.setEffect(u, s, this._materialContext); return !t.effect || !t.effect.isReady() ? !1 : (s._renderId = n.getRenderId(), t.effect._wasPreviouslyReady = !h, t.effect._wasPreviouslyUsingInstances = !!i, this._checkScenePerformancePriority(), !0); } /** * Specifies if the material uses metallic roughness workflow. * @returns boolean specifying if the material uses metallic roughness workflow. */ isMetallicWorkflow() { return !!(this._metallic != null || this._roughness != null || this._metallicTexture); } _prepareEffect(e, t, i = null, r = null, s = null, n = null, a) { if (this._prepareDefines(e, t, s, n, a), !t.isDirty) return null; t.markAsProcessed(); const o = this.getScene().getEngine(), u = new pl(); let h = 0; t.USESPHERICALINVERTEX && u.addFallback(h++, "USESPHERICALINVERTEX"), t.FOG && u.addFallback(h, "FOG"), t.SPECULARAA && u.addFallback(h, "SPECULARAA"), t.POINTSIZE && u.addFallback(h, "POINTSIZE"), t.LOGARITHMICDEPTH && u.addFallback(h, "LOGARITHMICDEPTH"), t.PARALLAX && u.addFallback(h, "PARALLAX"), t.PARALLAX_RHS && u.addFallback(h, "PARALLAX_RHS"), t.PARALLAXOCCLUSION && u.addFallback(h++, "PARALLAXOCCLUSION"), t.ENVIRONMENTBRDF && u.addFallback(h++, "ENVIRONMENTBRDF"), t.TANGENT && u.addFallback(h++, "TANGENT"), t.BUMP && u.addFallback(h++, "BUMP"), h = Ke.HandleFallbacksForShadows(t, u, this._maxSimultaneousLights, h++), t.SPECULARTERM && u.addFallback(h++, "SPECULARTERM"), t.USESPHERICALFROMREFLECTIONMAP && u.addFallback(h++, "USESPHERICALFROMREFLECTIONMAP"), t.USEIRRADIANCEMAP && u.addFallback(h++, "USEIRRADIANCEMAP"), t.LIGHTMAP && u.addFallback(h++, "LIGHTMAP"), t.NORMAL && u.addFallback(h++, "NORMAL"), t.AMBIENT && u.addFallback(h++, "AMBIENT"), t.EMISSIVE && u.addFallback(h++, "EMISSIVE"), t.VERTEXCOLOR && u.addFallback(h++, "VERTEXCOLOR"), t.MORPHTARGETS && u.addFallback(h++, "MORPHTARGETS"), t.MULTIVIEW && u.addFallback(0, "MULTIVIEW"); const d = [Y.PositionKind]; t.NORMAL && d.push(Y.NormalKind), t.TANGENT && d.push(Y.TangentKind); for (let S = 1; S <= 6; ++S) t["UV" + S] && d.push(`uv${S === 1 ? "" : S}`); t.VERTEXCOLOR && d.push(Y.ColorKind), Ke.PrepareAttributesForBones(d, e, t, u), Ke.PrepareAttributesForInstances(d, t), Ke.PrepareAttributesForMorphTargets(d, e, t), Ke.PrepareAttributesForBakedVertexAnimation(d, e, t); let f = "pbr"; const p = [ "world", "view", "viewProjection", "vEyePosition", "vLightsType", "vAmbientColor", "vAlbedoColor", "vReflectivityColor", "vMetallicReflectanceFactors", "vEmissiveColor", "visibility", "vReflectionColor", "vFogInfos", "vFogColor", "pointSize", "vAlbedoInfos", "vAmbientInfos", "vOpacityInfos", "vReflectionInfos", "vReflectionPosition", "vReflectionSize", "vEmissiveInfos", "vReflectivityInfos", "vReflectionFilteringInfo", "vMetallicReflectanceInfos", "vReflectanceInfos", "vMicroSurfaceSamplerInfos", "vBumpInfos", "vLightmapInfos", "mBones", "albedoMatrix", "ambientMatrix", "opacityMatrix", "reflectionMatrix", "emissiveMatrix", "reflectivityMatrix", "normalMatrix", "microSurfaceSamplerMatrix", "bumpMatrix", "lightmapMatrix", "metallicReflectanceMatrix", "reflectanceMatrix", "vLightingIntensity", "logarithmicDepthConstant", "vSphericalX", "vSphericalY", "vSphericalZ", "vSphericalXX_ZZ", "vSphericalYY_ZZ", "vSphericalZZ", "vSphericalXY", "vSphericalYZ", "vSphericalZX", "vSphericalL00", "vSphericalL1_1", "vSphericalL10", "vSphericalL11", "vSphericalL2_2", "vSphericalL2_1", "vSphericalL20", "vSphericalL21", "vSphericalL22", "vReflectionMicrosurfaceInfos", "vTangentSpaceParams", "boneTextureWidth", "vDebugMode", "morphTargetTextureInfo", "morphTargetTextureIndices" ], m = [ "albedoSampler", "reflectivitySampler", "ambientSampler", "emissiveSampler", "bumpSampler", "lightmapSampler", "opacitySampler", "reflectionSampler", "reflectionSamplerLow", "reflectionSamplerHigh", "irradianceSampler", "microSurfaceSampler", "environmentBrdfSampler", "boneSampler", "metallicReflectanceSampler", "reflectanceSampler", "morphTargets", "oitDepthSampler", "oitFrontColorSampler" ], _ = ["Material", "Scene", "Mesh"], v = { maxSimultaneousLights: this._maxSimultaneousLights, maxSimultaneousMorphTargets: t.NUM_MORPH_INFLUENCERS }; this._eventInfo.fallbacks = u, this._eventInfo.fallbackRank = h, this._eventInfo.defines = t, this._eventInfo.uniforms = p, this._eventInfo.attributes = d, this._eventInfo.samplers = m, this._eventInfo.uniformBuffersNames = _, this._eventInfo.customCode = void 0, this._eventInfo.mesh = e, this._eventInfo.indexParameters = v, this._callbackPluginEventGeneric(xh.PrepareEffect, this._eventInfo), lB.AddUniforms(p), Gc(p), Ds && (Ds.PrepareUniforms(p, t), Ds.PrepareSamplers(m, t)), Ke.PrepareUniformsAndSamplersList({ uniformsNames: p, uniformBuffersNames: _, samplers: m, defines: t, maxSimultaneousLights: this._maxSimultaneousLights }); const C = {}; this.customShaderNameResolve && (f = this.customShaderNameResolve(f, p, _, m, t, d, C)); const x = t.toString(), b = o.createEffect(f, { attributes: d, uniformsNames: p, uniformBuffersNames: _, samplers: m, defines: x, fallbacks: u, onCompiled: i, onError: r, indexParameters: v, processFinalCode: C.processFinalCode, processCodeAfterIncludes: this._eventInfo.customCode, multiTarget: t.PREPASS }, o); return this._eventInfo.customCode = void 0, b; } _prepareDefines(e, t, i = null, r = null, s = !1) { var n; const a = this.getScene(), l = a.getEngine(); Ke.PrepareDefinesForLights(a, e, t, !0, this._maxSimultaneousLights, this._disableLighting), t._needNormals = !0, Ke.PrepareDefinesForMultiview(a, t); const o = this.needAlphaBlendingForMesh(e) && this.getScene().useOrderIndependentTransparency; if (Ke.PrepareDefinesForPrePass(a, t, this.canRenderToMRT && !o), Ke.PrepareDefinesForOIT(a, t, o), t.METALLICWORKFLOW = this.isMetallicWorkflow(), t._areTexturesDirty) { t._needUVs = !1; for (let u = 1; u <= 6; ++u) t["MAINUV" + u] = !1; if (a.texturesEnabled) { t.ALBEDODIRECTUV = 0, t.AMBIENTDIRECTUV = 0, t.OPACITYDIRECTUV = 0, t.EMISSIVEDIRECTUV = 0, t.REFLECTIVITYDIRECTUV = 0, t.MICROSURFACEMAPDIRECTUV = 0, t.METALLIC_REFLECTANCEDIRECTUV = 0, t.REFLECTANCEDIRECTUV = 0, t.BUMPDIRECTUV = 0, t.LIGHTMAPDIRECTUV = 0, l.getCaps().textureLOD && (t.LODBASEDMICROSFURACE = !0), this._albedoTexture && Tt.DiffuseTextureEnabled ? (Ke.PrepareDefinesForMergedUV(this._albedoTexture, t, "ALBEDO"), t.GAMMAALBEDO = this._albedoTexture.gammaSpace) : t.ALBEDO = !1, this._ambientTexture && Tt.AmbientTextureEnabled ? (Ke.PrepareDefinesForMergedUV(this._ambientTexture, t, "AMBIENT"), t.AMBIENTINGRAYSCALE = this._useAmbientInGrayScale) : t.AMBIENT = !1, this._opacityTexture && Tt.OpacityTextureEnabled ? (Ke.PrepareDefinesForMergedUV(this._opacityTexture, t, "OPACITY"), t.OPACITYRGB = this._opacityTexture.getAlphaFromRGB) : t.OPACITY = !1; const u = this._getReflectionTexture(); if (u && Tt.ReflectionTextureEnabled) { switch (t.REFLECTION = !0, t.GAMMAREFLECTION = u.gammaSpace, t.RGBDREFLECTION = u.isRGBD, t.LODINREFLECTIONALPHA = u.lodLevelInAlpha, t.LINEARSPECULARREFLECTION = u.linearSpecularLOD, this.realTimeFiltering && this.realTimeFilteringQuality > 0 ? (t.NUM_SAMPLES = "" + this.realTimeFilteringQuality, l._features.needTypeSuffixInShaderConstants && (t.NUM_SAMPLES = t.NUM_SAMPLES + "u"), t.REALTIME_FILTERING = !0) : t.REALTIME_FILTERING = !1, t.INVERTCUBICMAP = u.coordinatesMode === De.INVCUBIC_MODE, t.REFLECTIONMAP_3D = u.isCube, t.REFLECTIONMAP_OPPOSITEZ = t.REFLECTIONMAP_3D && this.getScene().useRightHandedSystem ? !u.invertZ : u.invertZ, t.REFLECTIONMAP_CUBIC = !1, t.REFLECTIONMAP_EXPLICIT = !1, t.REFLECTIONMAP_PLANAR = !1, t.REFLECTIONMAP_PROJECTION = !1, t.REFLECTIONMAP_SKYBOX = !1, t.REFLECTIONMAP_SPHERICAL = !1, t.REFLECTIONMAP_EQUIRECTANGULAR = !1, t.REFLECTIONMAP_EQUIRECTANGULAR_FIXED = !1, t.REFLECTIONMAP_MIRROREDEQUIRECTANGULAR_FIXED = !1, u.coordinatesMode) { case De.EXPLICIT_MODE: t.REFLECTIONMAP_EXPLICIT = !0; break; case De.PLANAR_MODE: t.REFLECTIONMAP_PLANAR = !0; break; case De.PROJECTION_MODE: t.REFLECTIONMAP_PROJECTION = !0; break; case De.SKYBOX_MODE: t.REFLECTIONMAP_SKYBOX = !0; break; case De.SPHERICAL_MODE: t.REFLECTIONMAP_SPHERICAL = !0; break; case De.EQUIRECTANGULAR_MODE: t.REFLECTIONMAP_EQUIRECTANGULAR = !0; break; case De.FIXED_EQUIRECTANGULAR_MODE: t.REFLECTIONMAP_EQUIRECTANGULAR_FIXED = !0; break; case De.FIXED_EQUIRECTANGULAR_MIRRORED_MODE: t.REFLECTIONMAP_MIRROREDEQUIRECTANGULAR_FIXED = !0; break; case De.CUBIC_MODE: case De.INVCUBIC_MODE: default: t.REFLECTIONMAP_CUBIC = !0, t.USE_LOCAL_REFLECTIONMAP_CUBIC = !!u.boundingBoxSize; break; } u.coordinatesMode !== De.SKYBOX_MODE && (u.irradianceTexture ? (t.USEIRRADIANCEMAP = !0, t.USESPHERICALFROMREFLECTIONMAP = !1) : u.isCube && (t.USESPHERICALFROMREFLECTIONMAP = !0, t.USEIRRADIANCEMAP = !1, this._forceIrradianceInFragment || this.realTimeFiltering || this._twoSidedLighting || l.getCaps().maxVaryingVectors <= 8 ? t.USESPHERICALINVERTEX = !1 : t.USESPHERICALINVERTEX = !0)); } else t.REFLECTION = !1, t.REFLECTIONMAP_3D = !1, t.REFLECTIONMAP_SPHERICAL = !1, t.REFLECTIONMAP_PLANAR = !1, t.REFLECTIONMAP_CUBIC = !1, t.USE_LOCAL_REFLECTIONMAP_CUBIC = !1, t.REFLECTIONMAP_PROJECTION = !1, t.REFLECTIONMAP_SKYBOX = !1, t.REFLECTIONMAP_EXPLICIT = !1, t.REFLECTIONMAP_EQUIRECTANGULAR = !1, t.REFLECTIONMAP_EQUIRECTANGULAR_FIXED = !1, t.REFLECTIONMAP_MIRROREDEQUIRECTANGULAR_FIXED = !1, t.INVERTCUBICMAP = !1, t.USESPHERICALFROMREFLECTIONMAP = !1, t.USEIRRADIANCEMAP = !1, t.USESPHERICALINVERTEX = !1, t.REFLECTIONMAP_OPPOSITEZ = !1, t.LODINREFLECTIONALPHA = !1, t.GAMMAREFLECTION = !1, t.RGBDREFLECTION = !1, t.LINEARSPECULARREFLECTION = !1; if (this._lightmapTexture && Tt.LightmapTextureEnabled ? (Ke.PrepareDefinesForMergedUV(this._lightmapTexture, t, "LIGHTMAP"), t.USELIGHTMAPASSHADOWMAP = this._useLightmapAsShadowmap, t.GAMMALIGHTMAP = this._lightmapTexture.gammaSpace, t.RGBDLIGHTMAP = this._lightmapTexture.isRGBD) : t.LIGHTMAP = !1, this._emissiveTexture && Tt.EmissiveTextureEnabled ? (Ke.PrepareDefinesForMergedUV(this._emissiveTexture, t, "EMISSIVE"), t.GAMMAEMISSIVE = this._emissiveTexture.gammaSpace) : t.EMISSIVE = !1, Tt.SpecularTextureEnabled) { if (this._metallicTexture ? (Ke.PrepareDefinesForMergedUV(this._metallicTexture, t, "REFLECTIVITY"), t.ROUGHNESSSTOREINMETALMAPALPHA = this._useRoughnessFromMetallicTextureAlpha, t.ROUGHNESSSTOREINMETALMAPGREEN = !this._useRoughnessFromMetallicTextureAlpha && this._useRoughnessFromMetallicTextureGreen, t.METALLNESSSTOREINMETALMAPBLUE = this._useMetallnessFromMetallicTextureBlue, t.AOSTOREINMETALMAPRED = this._useAmbientOcclusionFromMetallicTextureRed, t.REFLECTIVITY_GAMMA = !1) : this._reflectivityTexture ? (Ke.PrepareDefinesForMergedUV(this._reflectivityTexture, t, "REFLECTIVITY"), t.MICROSURFACEFROMREFLECTIVITYMAP = this._useMicroSurfaceFromReflectivityMapAlpha, t.MICROSURFACEAUTOMATIC = this._useAutoMicroSurfaceFromReflectivityMap, t.REFLECTIVITY_GAMMA = this._reflectivityTexture.gammaSpace) : t.REFLECTIVITY = !1, this._metallicReflectanceTexture || this._reflectanceTexture) { const h = this._metallicReflectanceTexture !== null && this._metallicReflectanceTexture._texture === ((n = this._reflectanceTexture) === null || n === void 0 ? void 0 : n._texture) && this._metallicReflectanceTexture.checkTransformsAreIdentical(this._reflectanceTexture); t.METALLIC_REFLECTANCE_USE_ALPHA_ONLY = this._useOnlyMetallicFromMetallicReflectanceTexture && !h, this._metallicReflectanceTexture ? (Ke.PrepareDefinesForMergedUV(this._metallicReflectanceTexture, t, "METALLIC_REFLECTANCE"), t.METALLIC_REFLECTANCE_GAMMA = this._metallicReflectanceTexture.gammaSpace) : t.METALLIC_REFLECTANCE = !1, this._reflectanceTexture && !h && (!this._metallicReflectanceTexture || this._metallicReflectanceTexture && this._useOnlyMetallicFromMetallicReflectanceTexture) ? (Ke.PrepareDefinesForMergedUV(this._reflectanceTexture, t, "REFLECTANCE"), t.REFLECTANCE_GAMMA = this._reflectanceTexture.gammaSpace) : t.REFLECTANCE = !1; } else t.METALLIC_REFLECTANCE = !1, t.REFLECTANCE = !1; this._microSurfaceTexture ? Ke.PrepareDefinesForMergedUV(this._microSurfaceTexture, t, "MICROSURFACEMAP") : t.MICROSURFACEMAP = !1; } else t.REFLECTIVITY = !1, t.MICROSURFACEMAP = !1; l.getCaps().standardDerivatives && this._bumpTexture && Tt.BumpTextureEnabled && !this._disableBumpMap ? (Ke.PrepareDefinesForMergedUV(this._bumpTexture, t, "BUMP"), this._useParallax && this._albedoTexture && Tt.DiffuseTextureEnabled ? (t.PARALLAX = !0, t.PARALLAX_RHS = a.useRightHandedSystem, t.PARALLAXOCCLUSION = !!this._useParallaxOcclusion) : t.PARALLAX = !1, t.OBJECTSPACE_NORMALMAP = this._useObjectSpaceNormalMap) : (t.BUMP = !1, t.PARALLAX = !1, t.PARALLAX_RHS = !1, t.PARALLAXOCCLUSION = !1, t.OBJECTSPACE_NORMALMAP = !1), this._environmentBRDFTexture && Tt.ReflectionTextureEnabled ? (t.ENVIRONMENTBRDF = !0, t.ENVIRONMENTBRDF_RGBD = this._environmentBRDFTexture.isRGBD) : (t.ENVIRONMENTBRDF = !1, t.ENVIRONMENTBRDF_RGBD = !1), this._shouldUseAlphaFromAlbedoTexture() ? t.ALPHAFROMALBEDO = !0 : t.ALPHAFROMALBEDO = !1; } t.SPECULAROVERALPHA = this._useSpecularOverAlpha, this._lightFalloff === on.LIGHTFALLOFF_STANDARD ? (t.USEPHYSICALLIGHTFALLOFF = !1, t.USEGLTFLIGHTFALLOFF = !1) : this._lightFalloff === on.LIGHTFALLOFF_GLTF ? (t.USEPHYSICALLIGHTFALLOFF = !1, t.USEGLTFLIGHTFALLOFF = !0) : (t.USEPHYSICALLIGHTFALLOFF = !0, t.USEGLTFLIGHTFALLOFF = !1), t.RADIANCEOVERALPHA = this._useRadianceOverAlpha, !this.backFaceCulling && this._twoSidedLighting ? t.TWOSIDEDLIGHTING = !0 : t.TWOSIDEDLIGHTING = !1, t.SPECULARAA = l.getCaps().standardDerivatives && this._enableSpecularAntiAliasing; } (t._areTexturesDirty || t._areMiscDirty) && (t.ALPHATESTVALUE = `${this._alphaCutOff}${this._alphaCutOff % 1 === 0 ? "." : ""}`, t.PREMULTIPLYALPHA = this.alphaMode === 7 || this.alphaMode === 8, t.ALPHABLEND = this.needAlphaBlendingForMesh(e), t.ALPHAFRESNEL = this._useAlphaFresnel || this._useLinearAlphaFresnel, t.LINEARALPHAFRESNEL = this._useLinearAlphaFresnel), t._areImageProcessingDirty && this._imageProcessingConfiguration && this._imageProcessingConfiguration.prepareDefines(t), t.FORCENORMALFORWARD = this._forceNormalForward, t.RADIANCEOCCLUSION = this._useRadianceOcclusion, t.HORIZONOCCLUSION = this._useHorizonOcclusion, t._areMiscDirty && (Ke.PrepareDefinesForMisc(e, a, this._useLogarithmicDepth, this.pointsCloud, this.fogEnabled, this._shouldTurnAlphaTestOn(e) || this._forceAlphaTest, t, this._applyDecalMapAfterDetailMap), t.UNLIT = this._unlit || (this.pointsCloud || this.wireframe) && !e.isVerticesDataPresent(Y.NormalKind), t.DEBUGMODE = this._debugMode), Ke.PrepareDefinesForFrameBoundValues(a, l, this, t, !!i, r, s), this._eventInfo.defines = t, this._eventInfo.mesh = e, this._callbackPluginEventPrepareDefinesBeforeAttributes(this._eventInfo), Ke.PrepareDefinesForAttributes(e, t, !0, !0, !0, this._transparencyMode !== on.PBRMATERIAL_OPAQUE), this._callbackPluginEventPrepareDefines(this._eventInfo); } /** * Force shader compilation * @param mesh * @param onCompiled * @param options */ forceCompilation(e, t, i) { const r = Object.assign({ clipPlane: !1, useInstances: !1 }, i); this._uniformBufferLayoutBuilt || this.buildUniformLayout(), this._callbackPluginEventGeneric(xh.GetDefineNames, this._eventInfo); const s = new wH(this._eventInfo.defineNames), n = this._prepareEffect(e, s, void 0, void 0, r.useInstances, r.clipPlane, e.hasThinInstances); this._onEffectCreatedObservable && (KD.effect = n, KD.subMesh = null, this._onEffectCreatedObservable.notifyObservers(KD)), n.isReady() ? t && t(this) : n.onCompileObservable.add(() => { t && t(this); }); } /** * Initializes the uniform buffer layout for the shader. */ buildUniformLayout() { const e = this._uniformBuffer; e.addUniform("vAlbedoInfos", 2), e.addUniform("vAmbientInfos", 4), e.addUniform("vOpacityInfos", 2), e.addUniform("vEmissiveInfos", 2), e.addUniform("vLightmapInfos", 2), e.addUniform("vReflectivityInfos", 3), e.addUniform("vMicroSurfaceSamplerInfos", 2), e.addUniform("vReflectionInfos", 2), e.addUniform("vReflectionFilteringInfo", 2), e.addUniform("vReflectionPosition", 3), e.addUniform("vReflectionSize", 3), e.addUniform("vBumpInfos", 3), e.addUniform("albedoMatrix", 16), e.addUniform("ambientMatrix", 16), e.addUniform("opacityMatrix", 16), e.addUniform("emissiveMatrix", 16), e.addUniform("lightmapMatrix", 16), e.addUniform("reflectivityMatrix", 16), e.addUniform("microSurfaceSamplerMatrix", 16), e.addUniform("bumpMatrix", 16), e.addUniform("vTangentSpaceParams", 2), e.addUniform("reflectionMatrix", 16), e.addUniform("vReflectionColor", 3), e.addUniform("vAlbedoColor", 4), e.addUniform("vLightingIntensity", 4), e.addUniform("vReflectionMicrosurfaceInfos", 3), e.addUniform("pointSize", 1), e.addUniform("vReflectivityColor", 4), e.addUniform("vEmissiveColor", 3), e.addUniform("vAmbientColor", 3), e.addUniform("vDebugMode", 2), e.addUniform("vMetallicReflectanceFactors", 4), e.addUniform("vMetallicReflectanceInfos", 2), e.addUniform("metallicReflectanceMatrix", 16), e.addUniform("vReflectanceInfos", 2), e.addUniform("reflectanceMatrix", 16), e.addUniform("vSphericalL00", 3), e.addUniform("vSphericalL1_1", 3), e.addUniform("vSphericalL10", 3), e.addUniform("vSphericalL11", 3), e.addUniform("vSphericalL2_2", 3), e.addUniform("vSphericalL2_1", 3), e.addUniform("vSphericalL20", 3), e.addUniform("vSphericalL21", 3), e.addUniform("vSphericalL22", 3), e.addUniform("vSphericalX", 3), e.addUniform("vSphericalY", 3), e.addUniform("vSphericalZ", 3), e.addUniform("vSphericalXX_ZZ", 3), e.addUniform("vSphericalYY_ZZ", 3), e.addUniform("vSphericalZZ", 3), e.addUniform("vSphericalXY", 3), e.addUniform("vSphericalYZ", 3), e.addUniform("vSphericalZX", 3), super.buildUniformLayout(); } /** * Binds the submesh data. * @param world - The world matrix. * @param mesh - The BJS mesh. * @param subMesh - A submesh of the BJS mesh. */ bindForSubMesh(e, t, i) { var r, s, n, a; const l = this.getScene(), o = i.materialDefines; if (!o) return; const u = i.effect; if (!u) return; this._activeEffect = u, t.getMeshUniformBuffer().bindToEffect(u, "Mesh"), t.transferToEffect(e); const h = l.getEngine(); this._uniformBuffer.bindToEffect(u, "Material"), this.prePassConfiguration.bindForSubMesh(this._activeEffect, l, t, e, this.isFrozen), this._eventInfo.subMesh = i, this._callbackPluginEventHardBindForSubMesh(this._eventInfo), o.OBJECTSPACE_NORMALMAP && (e.toNormalMatrix(this._normalMatrix), this.bindOnlyNormalMatrix(this._normalMatrix)); const d = u._forceRebindOnNextCall || this._mustRebind(l, u, t.visibility); Ke.BindBonesParameters(t, this._activeEffect, this.prePassConfiguration); let f = null; const p = this._uniformBuffer; if (d) { if (this.bindViewProjection(u), f = this._getReflectionTexture(), !p.useUbo || !this.isFrozen || !p.isSync || u._forceRebindOnNextCall) { if (l.texturesEnabled) { if (this._albedoTexture && Tt.DiffuseTextureEnabled && (p.updateFloat2("vAlbedoInfos", this._albedoTexture.coordinatesIndex, this._albedoTexture.level), Ke.BindTextureMatrix(this._albedoTexture, p, "albedo")), this._ambientTexture && Tt.AmbientTextureEnabled && (p.updateFloat4("vAmbientInfos", this._ambientTexture.coordinatesIndex, this._ambientTexture.level, this._ambientTextureStrength, this._ambientTextureImpactOnAnalyticalLights), Ke.BindTextureMatrix(this._ambientTexture, p, "ambient")), this._opacityTexture && Tt.OpacityTextureEnabled && (p.updateFloat2("vOpacityInfos", this._opacityTexture.coordinatesIndex, this._opacityTexture.level), Ke.BindTextureMatrix(this._opacityTexture, p, "opacity")), f && Tt.ReflectionTextureEnabled) { if (p.updateMatrix("reflectionMatrix", f.getReflectionTextureMatrix()), p.updateFloat2("vReflectionInfos", f.level, 0), f.boundingBoxSize) { const m = f; p.updateVector3("vReflectionPosition", m.boundingBoxPosition), p.updateVector3("vReflectionSize", m.boundingBoxSize); } if (this.realTimeFiltering) { const m = f.getSize().width; p.updateFloat2("vReflectionFilteringInfo", m, yt.Log2(m)); } if (!o.USEIRRADIANCEMAP) { const m = f.sphericalPolynomial; if (o.USESPHERICALFROMREFLECTIONMAP && m) if (o.SPHERICAL_HARMONICS) { const _ = m.preScaledHarmonics; p.updateVector3("vSphericalL00", _.l00), p.updateVector3("vSphericalL1_1", _.l1_1), p.updateVector3("vSphericalL10", _.l10), p.updateVector3("vSphericalL11", _.l11), p.updateVector3("vSphericalL2_2", _.l2_2), p.updateVector3("vSphericalL2_1", _.l2_1), p.updateVector3("vSphericalL20", _.l20), p.updateVector3("vSphericalL21", _.l21), p.updateVector3("vSphericalL22", _.l22); } else p.updateFloat3("vSphericalX", m.x.x, m.x.y, m.x.z), p.updateFloat3("vSphericalY", m.y.x, m.y.y, m.y.z), p.updateFloat3("vSphericalZ", m.z.x, m.z.y, m.z.z), p.updateFloat3("vSphericalXX_ZZ", m.xx.x - m.zz.x, m.xx.y - m.zz.y, m.xx.z - m.zz.z), p.updateFloat3("vSphericalYY_ZZ", m.yy.x - m.zz.x, m.yy.y - m.zz.y, m.yy.z - m.zz.z), p.updateFloat3("vSphericalZZ", m.zz.x, m.zz.y, m.zz.z), p.updateFloat3("vSphericalXY", m.xy.x, m.xy.y, m.xy.z), p.updateFloat3("vSphericalYZ", m.yz.x, m.yz.y, m.yz.z), p.updateFloat3("vSphericalZX", m.zx.x, m.zx.y, m.zx.z); } p.updateFloat3("vReflectionMicrosurfaceInfos", f.getSize().width, f.lodGenerationScale, f.lodGenerationOffset); } this._emissiveTexture && Tt.EmissiveTextureEnabled && (p.updateFloat2("vEmissiveInfos", this._emissiveTexture.coordinatesIndex, this._emissiveTexture.level), Ke.BindTextureMatrix(this._emissiveTexture, p, "emissive")), this._lightmapTexture && Tt.LightmapTextureEnabled && (p.updateFloat2("vLightmapInfos", this._lightmapTexture.coordinatesIndex, this._lightmapTexture.level), Ke.BindTextureMatrix(this._lightmapTexture, p, "lightmap")), Tt.SpecularTextureEnabled && (this._metallicTexture ? (p.updateFloat3("vReflectivityInfos", this._metallicTexture.coordinatesIndex, this._metallicTexture.level, this._ambientTextureStrength), Ke.BindTextureMatrix(this._metallicTexture, p, "reflectivity")) : this._reflectivityTexture && (p.updateFloat3("vReflectivityInfos", this._reflectivityTexture.coordinatesIndex, this._reflectivityTexture.level, 1), Ke.BindTextureMatrix(this._reflectivityTexture, p, "reflectivity")), this._metallicReflectanceTexture && (p.updateFloat2("vMetallicReflectanceInfos", this._metallicReflectanceTexture.coordinatesIndex, this._metallicReflectanceTexture.level), Ke.BindTextureMatrix(this._metallicReflectanceTexture, p, "metallicReflectance")), this._reflectanceTexture && o.REFLECTANCE && (p.updateFloat2("vReflectanceInfos", this._reflectanceTexture.coordinatesIndex, this._reflectanceTexture.level), Ke.BindTextureMatrix(this._reflectanceTexture, p, "reflectance")), this._microSurfaceTexture && (p.updateFloat2("vMicroSurfaceSamplerInfos", this._microSurfaceTexture.coordinatesIndex, this._microSurfaceTexture.level), Ke.BindTextureMatrix(this._microSurfaceTexture, p, "microSurfaceSampler"))), this._bumpTexture && h.getCaps().standardDerivatives && Tt.BumpTextureEnabled && !this._disableBumpMap && (p.updateFloat3("vBumpInfos", this._bumpTexture.coordinatesIndex, this._bumpTexture.level, this._parallaxScaleBias), Ke.BindTextureMatrix(this._bumpTexture, p, "bump"), l._mirroredCameraPosition ? p.updateFloat2("vTangentSpaceParams", this._invertNormalMapX ? 1 : -1, this._invertNormalMapY ? 1 : -1) : p.updateFloat2("vTangentSpaceParams", this._invertNormalMapX ? -1 : 1, this._invertNormalMapY ? -1 : 1)); } if (this.pointsCloud && p.updateFloat("pointSize", this.pointSize), o.METALLICWORKFLOW) { mn.Color3[0].r = this._metallic === void 0 || this._metallic === null ? 1 : this._metallic, mn.Color3[0].g = this._roughness === void 0 || this._roughness === null ? 1 : this._roughness, p.updateColor4("vReflectivityColor", mn.Color3[0], 1); const m = (s = (r = this.subSurface) === null || r === void 0 ? void 0 : r._indexOfRefraction) !== null && s !== void 0 ? s : 1.5, _ = 1, v = Math.pow((m - _) / (m + _), 2); this._metallicReflectanceColor.scaleToRef(v * this._metallicF0Factor, mn.Color3[0]); const C = this._metallicF0Factor; p.updateColor4("vMetallicReflectanceFactors", mn.Color3[0], C); } else p.updateColor4("vReflectivityColor", this._reflectivityColor, this._microSurface); p.updateColor3("vEmissiveColor", Tt.EmissiveTextureEnabled ? this._emissiveColor : ze.BlackReadOnly), p.updateColor3("vReflectionColor", this._reflectionColor), !o.SS_REFRACTION && (!((n = this.subSurface) === null || n === void 0) && n._linkRefractionWithTransparency) ? p.updateColor4("vAlbedoColor", this._albedoColor, 1) : p.updateColor4("vAlbedoColor", this._albedoColor, this.alpha), this._lightingInfos.x = this._directIntensity, this._lightingInfos.y = this._emissiveIntensity, this._lightingInfos.z = this._environmentIntensity * l.environmentIntensity, this._lightingInfos.w = this._specularIntensity, p.updateVector4("vLightingIntensity", this._lightingInfos), l.ambientColor.multiplyToRef(this._ambientColor, this._globalAmbientColor), p.updateColor3("vAmbientColor", this._globalAmbientColor), p.updateFloat2("vDebugMode", this.debugLimit, this.debugFactor); } l.texturesEnabled && (this._albedoTexture && Tt.DiffuseTextureEnabled && p.setTexture("albedoSampler", this._albedoTexture), this._ambientTexture && Tt.AmbientTextureEnabled && p.setTexture("ambientSampler", this._ambientTexture), this._opacityTexture && Tt.OpacityTextureEnabled && p.setTexture("opacitySampler", this._opacityTexture), f && Tt.ReflectionTextureEnabled && (o.LODBASEDMICROSFURACE ? p.setTexture("reflectionSampler", f) : (p.setTexture("reflectionSampler", f._lodTextureMid || f), p.setTexture("reflectionSamplerLow", f._lodTextureLow || f), p.setTexture("reflectionSamplerHigh", f._lodTextureHigh || f)), o.USEIRRADIANCEMAP && p.setTexture("irradianceSampler", f.irradianceTexture)), o.ENVIRONMENTBRDF && p.setTexture("environmentBrdfSampler", this._environmentBRDFTexture), this._emissiveTexture && Tt.EmissiveTextureEnabled && p.setTexture("emissiveSampler", this._emissiveTexture), this._lightmapTexture && Tt.LightmapTextureEnabled && p.setTexture("lightmapSampler", this._lightmapTexture), Tt.SpecularTextureEnabled && (this._metallicTexture ? p.setTexture("reflectivitySampler", this._metallicTexture) : this._reflectivityTexture && p.setTexture("reflectivitySampler", this._reflectivityTexture), this._metallicReflectanceTexture && p.setTexture("metallicReflectanceSampler", this._metallicReflectanceTexture), this._reflectanceTexture && o.REFLECTANCE && p.setTexture("reflectanceSampler", this._reflectanceTexture), this._microSurfaceTexture && p.setTexture("microSurfaceSampler", this._microSurfaceTexture)), this._bumpTexture && h.getCaps().standardDerivatives && Tt.BumpTextureEnabled && !this._disableBumpMap && p.setTexture("bumpSampler", this._bumpTexture)), this.getScene().useOrderIndependentTransparency && this.needAlphaBlendingForMesh(t) && this.getScene().depthPeelingRenderer.bind(u), this._eventInfo.subMesh = i, this._callbackPluginEventBindForSubMesh(this._eventInfo), Ec(this._activeEffect, this, l), this.bindEyePosition(u); } else l.getEngine()._features.needToAlwaysBindUniformBuffers && (this._needToBindSceneUbo = !0); (d || !this.isFrozen) && (l.lightsEnabled && !this._disableLighting && Ke.BindLights(l, t, this._activeEffect, o, this._maxSimultaneousLights), (l.fogEnabled && t.applyFog && l.fogMode !== ii.FOGMODE_NONE || f || this.subSurface.refractionTexture || t.receiveShadows || o.PREPASS) && this.bindView(u), Ke.BindFogParameters(l, t, this._activeEffect, !0), o.NUM_MORPH_INFLUENCERS && Ke.BindMorphTargetParameters(t, this._activeEffect), o.BAKED_VERTEX_ANIMATION_TEXTURE && ((a = t.bakedVertexAnimationManager) === null || a === void 0 || a.bind(u, o.INSTANCES)), this._imageProcessingConfiguration.bind(this._activeEffect), Ke.BindLogDepth(o, this._activeEffect, l)), this._afterBind(t, this._activeEffect), p.update(); } /** * Returns the animatable textures. * If material have animatable metallic texture, then reflectivity texture will not be returned, even if it has animations. * @returns - Array of animatable textures. */ getAnimatables() { const e = super.getAnimatables(); return this._albedoTexture && this._albedoTexture.animations && this._albedoTexture.animations.length > 0 && e.push(this._albedoTexture), this._ambientTexture && this._ambientTexture.animations && this._ambientTexture.animations.length > 0 && e.push(this._ambientTexture), this._opacityTexture && this._opacityTexture.animations && this._opacityTexture.animations.length > 0 && e.push(this._opacityTexture), this._reflectionTexture && this._reflectionTexture.animations && this._reflectionTexture.animations.length > 0 && e.push(this._reflectionTexture), this._emissiveTexture && this._emissiveTexture.animations && this._emissiveTexture.animations.length > 0 && e.push(this._emissiveTexture), this._metallicTexture && this._metallicTexture.animations && this._metallicTexture.animations.length > 0 ? e.push(this._metallicTexture) : this._reflectivityTexture && this._reflectivityTexture.animations && this._reflectivityTexture.animations.length > 0 && e.push(this._reflectivityTexture), this._bumpTexture && this._bumpTexture.animations && this._bumpTexture.animations.length > 0 && e.push(this._bumpTexture), this._lightmapTexture && this._lightmapTexture.animations && this._lightmapTexture.animations.length > 0 && e.push(this._lightmapTexture), this._metallicReflectanceTexture && this._metallicReflectanceTexture.animations && this._metallicReflectanceTexture.animations.length > 0 && e.push(this._metallicReflectanceTexture), this._reflectanceTexture && this._reflectanceTexture.animations && this._reflectanceTexture.animations.length > 0 && e.push(this._reflectanceTexture), this._microSurfaceTexture && this._microSurfaceTexture.animations && this._microSurfaceTexture.animations.length > 0 && e.push(this._microSurfaceTexture), e; } /** * Returns the texture used for reflections. * @returns - Reflection texture if present. Otherwise, returns the environment texture. */ _getReflectionTexture() { return this._reflectionTexture ? this._reflectionTexture : this.getScene().environmentTexture; } /** * Returns an array of the actively used textures. * @returns - Array of BaseTextures */ getActiveTextures() { const e = super.getActiveTextures(); return this._albedoTexture && e.push(this._albedoTexture), this._ambientTexture && e.push(this._ambientTexture), this._opacityTexture && e.push(this._opacityTexture), this._reflectionTexture && e.push(this._reflectionTexture), this._emissiveTexture && e.push(this._emissiveTexture), this._reflectivityTexture && e.push(this._reflectivityTexture), this._metallicTexture && e.push(this._metallicTexture), this._metallicReflectanceTexture && e.push(this._metallicReflectanceTexture), this._reflectanceTexture && e.push(this._reflectanceTexture), this._microSurfaceTexture && e.push(this._microSurfaceTexture), this._bumpTexture && e.push(this._bumpTexture), this._lightmapTexture && e.push(this._lightmapTexture), e; } /** * Checks to see if a texture is used in the material. * @param texture - Base texture to use. * @returns - Boolean specifying if a texture is used in the material. */ hasTexture(e) { return !!(super.hasTexture(e) || this._albedoTexture === e || this._ambientTexture === e || this._opacityTexture === e || this._reflectionTexture === e || this._emissiveTexture === e || this._reflectivityTexture === e || this._metallicTexture === e || this._metallicReflectanceTexture === e || this._reflectanceTexture === e || this._microSurfaceTexture === e || this._bumpTexture === e || this._lightmapTexture === e); } /** * Sets the required values to the prepass renderer. * It can't be sets when subsurface scattering of this material is disabled. * When scene have ability to enable subsurface prepass effect, it will enable. */ setPrePassRenderer() { var e; if (!(!((e = this.subSurface) === null || e === void 0) && e.isScatteringEnabled)) return !1; const t = this.getScene().enableSubSurfaceForPrePass(); return t && (t.enabled = !0), !0; } /** * Disposes the resources of the material. * @param forceDisposeEffect - Forces the disposal of effects. * @param forceDisposeTextures - Forces the disposal of all textures. */ dispose(e, t) { var i, r, s, n, a, l, o, u, h, d, f, p; t && (this._environmentBRDFTexture && this.getScene().environmentBRDFTexture !== this._environmentBRDFTexture && this._environmentBRDFTexture.dispose(), (i = this._albedoTexture) === null || i === void 0 || i.dispose(), (r = this._ambientTexture) === null || r === void 0 || r.dispose(), (s = this._opacityTexture) === null || s === void 0 || s.dispose(), (n = this._reflectionTexture) === null || n === void 0 || n.dispose(), (a = this._emissiveTexture) === null || a === void 0 || a.dispose(), (l = this._metallicTexture) === null || l === void 0 || l.dispose(), (o = this._reflectivityTexture) === null || o === void 0 || o.dispose(), (u = this._bumpTexture) === null || u === void 0 || u.dispose(), (h = this._lightmapTexture) === null || h === void 0 || h.dispose(), (d = this._metallicReflectanceTexture) === null || d === void 0 || d.dispose(), (f = this._reflectanceTexture) === null || f === void 0 || f.dispose(), (p = this._microSurfaceTexture) === null || p === void 0 || p.dispose()), this._renderTargets.dispose(), this._imageProcessingConfiguration && this._imageProcessingObserver && this._imageProcessingConfiguration.onUpdateParameters.remove(this._imageProcessingObserver), super.dispose(e, t); } } on.PBRMATERIAL_OPAQUE = At.MATERIAL_OPAQUE; on.PBRMATERIAL_ALPHATEST = At.MATERIAL_ALPHATEST; on.PBRMATERIAL_ALPHABLEND = At.MATERIAL_ALPHABLEND; on.PBRMATERIAL_ALPHATESTANDBLEND = At.MATERIAL_ALPHATESTANDBLEND; on.DEFAULT_AO_ON_ANALYTICAL_LIGHTS = 0; on.LIGHTFALLOFF_PHYSICAL = 0; on.LIGHTFALLOFF_GLTF = 1; on.LIGHTFALLOFF_STANDARD = 2; F([ $G() ], on.prototype, "_imageProcessingConfiguration", void 0); F([ ct("_markAllSubMeshesAsMiscDirty") ], on.prototype, "debugMode", void 0); class Ri extends on { /** * Stores the refracted light information in a texture. */ get refractionTexture() { return this.subSurface.refractionTexture; } set refractionTexture(e) { this.subSurface.refractionTexture = e, e ? this.subSurface.isRefractionEnabled = !0 : this.subSurface.linkRefractionWithTransparency || (this.subSurface.isRefractionEnabled = !1); } /** * Index of refraction of the material base layer. * https://en.wikipedia.org/wiki/List_of_refractive_indices * * This does not only impact refraction but also the Base F0 of Dielectric Materials. * * From dielectric fresnel rules: F0 = square((iorT - iorI) / (iorT + iorI)) */ get indexOfRefraction() { return this.subSurface.indexOfRefraction; } set indexOfRefraction(e) { this.subSurface.indexOfRefraction = e; } /** * Controls if refraction needs to be inverted on Y. This could be useful for procedural texture. */ get invertRefractionY() { return this.subSurface.invertRefractionY; } set invertRefractionY(e) { this.subSurface.invertRefractionY = e; } /** * This parameters will make the material used its opacity to control how much it is refracting against not. * Materials half opaque for instance using refraction could benefit from this control. */ get linkRefractionWithTransparency() { return this.subSurface.linkRefractionWithTransparency; } set linkRefractionWithTransparency(e) { this.subSurface.linkRefractionWithTransparency = e, e && (this.subSurface.isRefractionEnabled = !0); } /** * BJS is using an hardcoded light falloff based on a manually sets up range. * In PBR, one way to represents the falloff is to use the inverse squared root algorithm. * This parameter can help you switch back to the BJS mode in order to create scenes using both materials. */ get usePhysicalLightFalloff() { return this._lightFalloff === on.LIGHTFALLOFF_PHYSICAL; } /** * BJS is using an hardcoded light falloff based on a manually sets up range. * In PBR, one way to represents the falloff is to use the inverse squared root algorithm. * This parameter can help you switch back to the BJS mode in order to create scenes using both materials. */ set usePhysicalLightFalloff(e) { e !== this.usePhysicalLightFalloff && (this._markAllSubMeshesAsTexturesDirty(), e ? this._lightFalloff = on.LIGHTFALLOFF_PHYSICAL : this._lightFalloff = on.LIGHTFALLOFF_STANDARD); } /** * In order to support the falloff compatibility with gltf, a special mode has been added * to reproduce the gltf light falloff. */ get useGLTFLightFalloff() { return this._lightFalloff === on.LIGHTFALLOFF_GLTF; } /** * In order to support the falloff compatibility with gltf, a special mode has been added * to reproduce the gltf light falloff. */ set useGLTFLightFalloff(e) { e !== this.useGLTFLightFalloff && (this._markAllSubMeshesAsTexturesDirty(), e ? this._lightFalloff = on.LIGHTFALLOFF_GLTF : this._lightFalloff = on.LIGHTFALLOFF_STANDARD); } /** * Gets the image processing configuration used either in this material. */ get imageProcessingConfiguration() { return this._imageProcessingConfiguration; } /** * Sets the Default image processing configuration used either in the this material. * * If sets to null, the scene one is in use. */ set imageProcessingConfiguration(e) { this._attachImageProcessingConfiguration(e), this._markAllSubMeshesAsTexturesDirty(); } /** * Gets whether the color curves effect is enabled. */ get cameraColorCurvesEnabled() { return this.imageProcessingConfiguration.colorCurvesEnabled; } /** * Sets whether the color curves effect is enabled. */ set cameraColorCurvesEnabled(e) { this.imageProcessingConfiguration.colorCurvesEnabled = e; } /** * Gets whether the color grading effect is enabled. */ get cameraColorGradingEnabled() { return this.imageProcessingConfiguration.colorGradingEnabled; } /** * Gets whether the color grading effect is enabled. */ set cameraColorGradingEnabled(e) { this.imageProcessingConfiguration.colorGradingEnabled = e; } /** * Gets whether tonemapping is enabled or not. */ get cameraToneMappingEnabled() { return this._imageProcessingConfiguration.toneMappingEnabled; } /** * Sets whether tonemapping is enabled or not */ set cameraToneMappingEnabled(e) { this._imageProcessingConfiguration.toneMappingEnabled = e; } /** * The camera exposure used on this material. * This property is here and not in the camera to allow controlling exposure without full screen post process. * This corresponds to a photographic exposure. */ get cameraExposure() { return this._imageProcessingConfiguration.exposure; } /** * The camera exposure used on this material. * This property is here and not in the camera to allow controlling exposure without full screen post process. * This corresponds to a photographic exposure. */ set cameraExposure(e) { this._imageProcessingConfiguration.exposure = e; } /** * Gets The camera contrast used on this material. */ get cameraContrast() { return this._imageProcessingConfiguration.contrast; } /** * Sets The camera contrast used on this material. */ set cameraContrast(e) { this._imageProcessingConfiguration.contrast = e; } /** * Gets the Color Grading 2D Lookup Texture. */ get cameraColorGradingTexture() { return this._imageProcessingConfiguration.colorGradingTexture; } /** * Sets the Color Grading 2D Lookup Texture. */ set cameraColorGradingTexture(e) { this._imageProcessingConfiguration.colorGradingTexture = e; } /** * The color grading curves provide additional color adjustment that is applied after any color grading transform (3D LUT). * They allow basic adjustment of saturation and small exposure adjustments, along with color filter tinting to provide white balance adjustment or more stylistic effects. * These are similar to controls found in many professional imaging or colorist software. The global controls are applied to the entire image. For advanced tuning, extra controls are provided to adjust the shadow, midtone and highlight areas of the image; * corresponding to low luminance, medium luminance, and high luminance areas respectively. */ get cameraColorCurves() { return this._imageProcessingConfiguration.colorCurves; } /** * The color grading curves provide additional color adjustment that is applied after any color grading transform (3D LUT). * They allow basic adjustment of saturation and small exposure adjustments, along with color filter tinting to provide white balance adjustment or more stylistic effects. * These are similar to controls found in many professional imaging or colorist software. The global controls are applied to the entire image. For advanced tuning, extra controls are provided to adjust the shadow, midtone and highlight areas of the image; * corresponding to low luminance, medium luminance, and high luminance areas respectively. */ set cameraColorCurves(e) { this._imageProcessingConfiguration.colorCurves = e; } /** * Instantiates a new PBRMaterial instance. * * @param name The material name * @param scene The scene the material will be use in. */ constructor(e, t) { super(e, t), this.directIntensity = 1, this.emissiveIntensity = 1, this.environmentIntensity = 1, this.specularIntensity = 1, this.disableBumpMap = !1, this.ambientTextureStrength = 1, this.ambientTextureImpactOnAnalyticalLights = Ri.DEFAULT_AO_ON_ANALYTICAL_LIGHTS, this.metallicF0Factor = 1, this.metallicReflectanceColor = ze.White(), this.useOnlyMetallicFromMetallicReflectanceTexture = !1, this.ambientColor = new ze(0, 0, 0), this.albedoColor = new ze(1, 1, 1), this.reflectivityColor = new ze(1, 1, 1), this.reflectionColor = new ze(1, 1, 1), this.emissiveColor = new ze(0, 0, 0), this.microSurface = 1, this.useLightmapAsShadowmap = !1, this.useAlphaFromAlbedoTexture = !1, this.forceAlphaTest = !1, this.alphaCutOff = 0.4, this.useSpecularOverAlpha = !0, this.useMicroSurfaceFromReflectivityMapAlpha = !1, this.useRoughnessFromMetallicTextureAlpha = !0, this.useRoughnessFromMetallicTextureGreen = !1, this.useMetallnessFromMetallicTextureBlue = !1, this.useAmbientOcclusionFromMetallicTextureRed = !1, this.useAmbientInGrayScale = !1, this.useAutoMicroSurfaceFromReflectivityMap = !1, this.useRadianceOverAlpha = !0, this.useObjectSpaceNormalMap = !1, this.useParallax = !1, this.useParallaxOcclusion = !1, this.parallaxScaleBias = 0.05, this.disableLighting = !1, this.forceIrradianceInFragment = !1, this.maxSimultaneousLights = 4, this.invertNormalMapX = !1, this.invertNormalMapY = !1, this.twoSidedLighting = !1, this.useAlphaFresnel = !1, this.useLinearAlphaFresnel = !1, this.environmentBRDFTexture = null, this.forceNormalForward = !1, this.enableSpecularAntiAliasing = !1, this.useHorizonOcclusion = !0, this.useRadianceOcclusion = !0, this.unlit = !1, this.applyDecalMapAfterDetailMap = !1, this._environmentBRDFTexture = pN(this.getScene()); } /** * Returns the name of this material class. */ getClassName() { return "PBRMaterial"; } /** * Makes a duplicate of the current material. * @param name - name to use for the new material. * @param cloneTexturesOnlyOnce - if a texture is used in more than one channel (e.g diffuse and opacity), only clone it once and reuse it on the other channels. Default false. * @param rootUrl defines the root URL to use to load textures */ clone(e, t = !0, i = "") { const r = St.Clone(() => new Ri(e, this.getScene()), this, { cloneTexturesOnlyOnce: t }); return r.id = e, r.name = e, this.stencil.copyTo(r.stencil), this._clonePlugins(r, i), r; } /** * Serializes this PBR Material. * @returns - An object with the serialized material. */ serialize() { const e = super.serialize(); return e.customType = "BABYLON.PBRMaterial", e; } // Statics /** * Parses a PBR Material from a serialized object. * @param source - Serialized object. * @param scene - BJS scene instance. * @param rootUrl - url for the scene object * @returns - PBRMaterial */ static Parse(e, t, i) { const r = St.Parse(() => new Ri(e.name, t), e, t, i); return e.stencil && r.stencil.parse(e.stencil, t, i), At._parsePlugins(e, r, t, i), e.clearCoat && r.clearCoat.parse(e.clearCoat, t, i), e.anisotropy && r.anisotropy.parse(e.anisotropy, t, i), e.brdf && r.brdf.parse(e.brdf, t, i), e.sheen && r.sheen.parse(e.sheen, t, i), e.subSurface && r.subSurface.parse(e.subSurface, t, i), e.iridescence && r.iridescence.parse(e.iridescence, t, i), r; } } Ri.PBRMATERIAL_OPAQUE = on.PBRMATERIAL_OPAQUE; Ri.PBRMATERIAL_ALPHATEST = on.PBRMATERIAL_ALPHATEST; Ri.PBRMATERIAL_ALPHABLEND = on.PBRMATERIAL_ALPHABLEND; Ri.PBRMATERIAL_ALPHATESTANDBLEND = on.PBRMATERIAL_ALPHATESTANDBLEND; Ri.DEFAULT_AO_ON_ANALYTICAL_LIGHTS = on.DEFAULT_AO_ON_ANALYTICAL_LIGHTS; F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], Ri.prototype, "directIntensity", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], Ri.prototype, "emissiveIntensity", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], Ri.prototype, "environmentIntensity", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], Ri.prototype, "specularIntensity", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], Ri.prototype, "disableBumpMap", void 0); F([ er(), ct("_markAllSubMeshesAsTexturesDirty") ], Ri.prototype, "albedoTexture", void 0); F([ er(), ct("_markAllSubMeshesAsTexturesDirty") ], Ri.prototype, "ambientTexture", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], Ri.prototype, "ambientTextureStrength", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], Ri.prototype, "ambientTextureImpactOnAnalyticalLights", void 0); F([ er(), ct("_markAllSubMeshesAsTexturesAndMiscDirty") ], Ri.prototype, "opacityTexture", void 0); F([ er(), ct("_markAllSubMeshesAsTexturesDirty") ], Ri.prototype, "reflectionTexture", void 0); F([ er(), ct("_markAllSubMeshesAsTexturesDirty") ], Ri.prototype, "emissiveTexture", void 0); F([ er(), ct("_markAllSubMeshesAsTexturesDirty") ], Ri.prototype, "reflectivityTexture", void 0); F([ er(), ct("_markAllSubMeshesAsTexturesDirty") ], Ri.prototype, "metallicTexture", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], Ri.prototype, "metallic", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], Ri.prototype, "roughness", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], Ri.prototype, "metallicF0Factor", void 0); F([ Fs(), ct("_markAllSubMeshesAsTexturesDirty") ], Ri.prototype, "metallicReflectanceColor", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], Ri.prototype, "useOnlyMetallicFromMetallicReflectanceTexture", void 0); F([ er(), ct("_markAllSubMeshesAsTexturesDirty") ], Ri.prototype, "metallicReflectanceTexture", void 0); F([ er(), ct("_markAllSubMeshesAsTexturesDirty") ], Ri.prototype, "reflectanceTexture", void 0); F([ er(), ct("_markAllSubMeshesAsTexturesDirty") ], Ri.prototype, "microSurfaceTexture", void 0); F([ er(), ct("_markAllSubMeshesAsTexturesDirty") ], Ri.prototype, "bumpTexture", void 0); F([ er(), ct("_markAllSubMeshesAsTexturesDirty", null) ], Ri.prototype, "lightmapTexture", void 0); F([ Fs("ambient"), ct("_markAllSubMeshesAsTexturesDirty") ], Ri.prototype, "ambientColor", void 0); F([ Fs("albedo"), ct("_markAllSubMeshesAsTexturesDirty") ], Ri.prototype, "albedoColor", void 0); F([ Fs("reflectivity"), ct("_markAllSubMeshesAsTexturesDirty") ], Ri.prototype, "reflectivityColor", void 0); F([ Fs("reflection"), ct("_markAllSubMeshesAsTexturesDirty") ], Ri.prototype, "reflectionColor", void 0); F([ Fs("emissive"), ct("_markAllSubMeshesAsTexturesDirty") ], Ri.prototype, "emissiveColor", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], Ri.prototype, "microSurface", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], Ri.prototype, "useLightmapAsShadowmap", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesAndMiscDirty") ], Ri.prototype, "useAlphaFromAlbedoTexture", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesAndMiscDirty") ], Ri.prototype, "forceAlphaTest", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesAndMiscDirty") ], Ri.prototype, "alphaCutOff", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], Ri.prototype, "useSpecularOverAlpha", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], Ri.prototype, "useMicroSurfaceFromReflectivityMapAlpha", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], Ri.prototype, "useRoughnessFromMetallicTextureAlpha", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], Ri.prototype, "useRoughnessFromMetallicTextureGreen", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], Ri.prototype, "useMetallnessFromMetallicTextureBlue", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], Ri.prototype, "useAmbientOcclusionFromMetallicTextureRed", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], Ri.prototype, "useAmbientInGrayScale", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], Ri.prototype, "useAutoMicroSurfaceFromReflectivityMap", void 0); F([ W() ], Ri.prototype, "usePhysicalLightFalloff", null); F([ W() ], Ri.prototype, "useGLTFLightFalloff", null); F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], Ri.prototype, "useRadianceOverAlpha", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], Ri.prototype, "useObjectSpaceNormalMap", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], Ri.prototype, "useParallax", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], Ri.prototype, "useParallaxOcclusion", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], Ri.prototype, "parallaxScaleBias", void 0); F([ W(), ct("_markAllSubMeshesAsLightsDirty") ], Ri.prototype, "disableLighting", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], Ri.prototype, "forceIrradianceInFragment", void 0); F([ W(), ct("_markAllSubMeshesAsLightsDirty") ], Ri.prototype, "maxSimultaneousLights", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], Ri.prototype, "invertNormalMapX", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], Ri.prototype, "invertNormalMapY", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], Ri.prototype, "twoSidedLighting", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], Ri.prototype, "useAlphaFresnel", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], Ri.prototype, "useLinearAlphaFresnel", void 0); F([ ct("_markAllSubMeshesAsTexturesDirty") ], Ri.prototype, "environmentBRDFTexture", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], Ri.prototype, "forceNormalForward", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], Ri.prototype, "enableSpecularAntiAliasing", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], Ri.prototype, "useHorizonOcclusion", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], Ri.prototype, "useRadianceOcclusion", void 0); F([ W(), ct("_markAllSubMeshesAsMiscDirty") ], Ri.prototype, "unlit", void 0); F([ W(), ct("_markAllSubMeshesAsMiscDirty") ], Ri.prototype, "applyDecalMapAfterDetailMap", void 0); Be("BABYLON.PBRMaterial", Ri); const t2e = 542327876, KZ = 131072, WZ = 512, jZ = 4, XZ = 64, YZ = 131072; function TU(c) { return c.charCodeAt(0) + (c.charCodeAt(1) << 8) + (c.charCodeAt(2) << 16) + (c.charCodeAt(3) << 24); } function i2e(c) { return String.fromCharCode(c & 255, c >> 8 & 255, c >> 16 & 255, c >> 24 & 255); } const QZ = TU("DXT1"), $Z = TU("DXT3"), ZZ = TU("DXT5"), Wk = TU("DX10"), qZ = 113, JZ = 116, eq = 2, tq = 10, r2e = 88, jk = 31, s2e = 0, n2e = 1, iq = 2, rq = 3, Xk = 4, sq = 7, Yk = 20, nq = 21, a2e = 22, o2e = 23, l2e = 24, c2e = 25, u2e = 26, h2e = 28, d2e = 32; class ua { /** * Gets DDS information from an array buffer * @param data defines the array buffer view to read data from * @returns the DDS information */ static GetDDSInfo(e) { const t = new Int32Array(e.buffer, e.byteOffset, jk), i = new Int32Array(e.buffer, e.byteOffset, jk + 4); let r = 1; t[iq] & KZ && (r = Math.max(1, t[sq])); const s = t[nq], n = s === Wk ? i[d2e] : 0; let a = 0; switch (s) { case qZ: a = 2; break; case JZ: a = 1; break; case Wk: if (n === tq) { a = 2; break; } if (n === eq) { a = 1; break; } } return { width: t[Xk], height: t[rq], mipmapCount: r, isFourCC: (t[Yk] & jZ) === jZ, isRGB: (t[Yk] & XZ) === XZ, isLuminance: (t[Yk] & YZ) === YZ, isCube: (t[h2e] & WZ) === WZ, isCompressed: s === QZ || s === $Z || s === ZZ, dxgiFormat: n, textureType: a }; } static _GetHalfFloatAsFloatRGBAArrayBuffer(e, t, i, r, s, n) { const a = new Float32Array(r), l = new Uint16Array(s, i); let o = 0; for (let u = 0; u < t; u++) for (let h = 0; h < e; h++) { const d = (h + u * e) * 4; a[o] = kA(l[d]), a[o + 1] = kA(l[d + 1]), a[o + 2] = kA(l[d + 2]), ua.StoreLODInAlphaChannel ? a[o + 3] = n : a[o + 3] = kA(l[d + 3]), o += 4; } return a; } static _GetHalfFloatRGBAArrayBuffer(e, t, i, r, s, n) { if (ua.StoreLODInAlphaChannel) { const a = new Uint16Array(r), l = new Uint16Array(s, i); let o = 0; for (let u = 0; u < t; u++) for (let h = 0; h < e; h++) { const d = (h + u * e) * 4; a[o] = l[d], a[o + 1] = l[d + 1], a[o + 2] = l[d + 2], a[o + 3] = GA(n), o += 4; } return a; } return new Uint16Array(s, i, r); } static _GetFloatRGBAArrayBuffer(e, t, i, r, s, n) { if (ua.StoreLODInAlphaChannel) { const a = new Float32Array(r), l = new Float32Array(s, i); let o = 0; for (let u = 0; u < t; u++) for (let h = 0; h < e; h++) { const d = (h + u * e) * 4; a[o] = l[d], a[o + 1] = l[d + 1], a[o + 2] = l[d + 2], a[o + 3] = n, o += 4; } return a; } return new Float32Array(s, i, r); } static _GetFloatAsHalfFloatRGBAArrayBuffer(e, t, i, r, s, n) { const a = new Uint16Array(r), l = new Float32Array(s, i); let o = 0; for (let u = 0; u < t; u++) for (let h = 0; h < e; h++) a[o] = GA(l[o]), a[o + 1] = GA(l[o + 1]), a[o + 2] = GA(l[o + 2]), ua.StoreLODInAlphaChannel ? a[o + 3] = GA(n) : a[o + 3] = GA(l[o + 3]), o += 4; return a; } static _GetFloatAsUIntRGBAArrayBuffer(e, t, i, r, s, n) { const a = new Uint8Array(r), l = new Float32Array(s, i); let o = 0; for (let u = 0; u < t; u++) for (let h = 0; h < e; h++) { const d = (h + u * e) * 4; a[o] = yt.Clamp(l[d]) * 255, a[o + 1] = yt.Clamp(l[d + 1]) * 255, a[o + 2] = yt.Clamp(l[d + 2]) * 255, ua.StoreLODInAlphaChannel ? a[o + 3] = n : a[o + 3] = yt.Clamp(l[d + 3]) * 255, o += 4; } return a; } static _GetHalfFloatAsUIntRGBAArrayBuffer(e, t, i, r, s, n) { const a = new Uint8Array(r), l = new Uint16Array(s, i); let o = 0; for (let u = 0; u < t; u++) for (let h = 0; h < e; h++) { const d = (h + u * e) * 4; a[o] = yt.Clamp(kA(l[d])) * 255, a[o + 1] = yt.Clamp(kA(l[d + 1])) * 255, a[o + 2] = yt.Clamp(kA(l[d + 2])) * 255, ua.StoreLODInAlphaChannel ? a[o + 3] = n : a[o + 3] = yt.Clamp(kA(l[d + 3])) * 255, o += 4; } return a; } static _GetRGBAArrayBuffer(e, t, i, r, s, n, a, l, o) { const u = new Uint8Array(r), h = new Uint8Array(s, i); let d = 0; for (let f = 0; f < t; f++) for (let p = 0; p < e; p++) { const m = (p + f * e) * 4; u[d] = h[m + n], u[d + 1] = h[m + a], u[d + 2] = h[m + l], u[d + 3] = h[m + o], d += 4; } return u; } static _ExtractLongWordOrder(e) { return e === 0 || e === 255 || e === -16777216 ? 0 : 1 + ua._ExtractLongWordOrder(e >> 8); } static _GetRGBArrayBuffer(e, t, i, r, s, n, a, l) { const o = new Uint8Array(r), u = new Uint8Array(s, i); let h = 0; for (let d = 0; d < t; d++) for (let f = 0; f < e; f++) { const p = (f + d * e) * 3; o[h] = u[p + n], o[h + 1] = u[p + a], o[h + 2] = u[p + l], h += 3; } return o; } static _GetLuminanceArrayBuffer(e, t, i, r, s) { const n = new Uint8Array(r), a = new Uint8Array(s, i); let l = 0; for (let o = 0; o < t; o++) for (let u = 0; u < e; u++) { const h = u + o * e; n[l] = a[h], l++; } return n; } /** * Uploads DDS Levels to a Babylon Texture * @internal */ static UploadDDSLevels(e, t, i, r, s, n, a = -1, l, o = !0) { let u = null; r.sphericalPolynomial && (u = []); const h = !!e.getCaps().s3tc; t.generateMipMaps = s; const d = new Int32Array(i.buffer, i.byteOffset, jk); let f, p, m, _ = 0, v, C, x, b, S = 0, M = 1; if (d[s2e] !== t2e) { Ce.Error("Invalid magic number in DDS header"); return; } if (!r.isFourCC && !r.isRGB && !r.isLuminance) { Ce.Error("Unsupported format, must contain a FourCC, RGB or LUMINANCE code"); return; } if (r.isCompressed && !h) { Ce.Error("Compressed textures are not supported on this platform."); return; } let R = d[a2e]; v = d[n2e] + 4; let w = !1; if (r.isFourCC) switch (f = d[nq], f) { case QZ: M = 8, S = 33777; break; case $Z: M = 16, S = 33778; break; case ZZ: M = 16, S = 33779; break; case qZ: w = !0, R = 64; break; case JZ: w = !0, R = 128; break; case Wk: { v += 5 * 4; let ee = !1; switch (r.dxgiFormat) { case tq: w = !0, R = 64, ee = !0; break; case eq: w = !0, R = 128, ee = !0; break; case r2e: r.isRGB = !0, r.isFourCC = !1, R = 32, ee = !0; break; } if (ee) break; } default: Ce.Error(["Unsupported FourCC code:", i2e(f)]); return; } const V = ua._ExtractLongWordOrder(d[o2e]), k = ua._ExtractLongWordOrder(d[l2e]), L = ua._ExtractLongWordOrder(d[c2e]), B = ua._ExtractLongWordOrder(d[u2e]); w && (S = e._getRGBABufferInternalSizedFormat(r.textureType)), x = 1, d[iq] & KZ && s !== !1 && (x = Math.max(1, d[sq])); const U = l || 0, K = e.getCaps(); for (let ee = U; ee < n; ee++) { for (p = d[Xk], m = d[rq], b = 0; b < x; ++b) { if (a === -1 || a === b) { const Z = a === -1 ? b : 0; if (!r.isCompressed && r.isFourCC) { t.format = 5, _ = p * m * 4; let q = null; if (e._badOS || e._badDesktopOS || !K.textureHalfFloat && !K.textureFloat) R === 128 ? (q = ua._GetFloatAsUIntRGBAArrayBuffer(p, m, i.byteOffset + v, _, i.buffer, Z), u && Z == 0 && u.push(ua._GetFloatRGBAArrayBuffer(p, m, i.byteOffset + v, _, i.buffer, Z))) : R === 64 && (q = ua._GetHalfFloatAsUIntRGBAArrayBuffer(p, m, i.byteOffset + v, _, i.buffer, Z), u && Z == 0 && u.push(ua._GetHalfFloatAsFloatRGBAArrayBuffer(p, m, i.byteOffset + v, _, i.buffer, Z))), t.type = 0; else { const le = K.textureFloat && (o && K.textureFloatLinearFiltering || !o), ie = K.textureHalfFloat && (o && K.textureHalfFloatLinearFiltering || !o), $ = (R === 128 || R === 64 && !ie) && le ? 1 : (R === 64 || R === 128 && !le) && ie ? 2 : 0; let j, J = null; switch (R) { case 128: { switch ($) { case 1: j = ua._GetFloatRGBAArrayBuffer, J = null; break; case 2: j = ua._GetFloatAsHalfFloatRGBAArrayBuffer, J = ua._GetFloatRGBAArrayBuffer; break; case 0: j = ua._GetFloatAsUIntRGBAArrayBuffer, J = ua._GetFloatRGBAArrayBuffer; break; } break; } default: { switch ($) { case 1: j = ua._GetHalfFloatAsFloatRGBAArrayBuffer, J = null; break; case 2: j = ua._GetHalfFloatRGBAArrayBuffer, J = ua._GetHalfFloatAsFloatRGBAArrayBuffer; break; case 0: j = ua._GetHalfFloatAsUIntRGBAArrayBuffer, J = ua._GetHalfFloatAsFloatRGBAArrayBuffer; break; } break; } } t.type = $, q = j(p, m, i.byteOffset + v, _, i.buffer, Z), u && Z == 0 && u.push(J ? J(p, m, i.byteOffset + v, _, i.buffer, Z) : q); } q && e._uploadDataToTextureDirectly(t, q, ee, Z); } else if (r.isRGB) t.type = 0, R === 24 ? (t.format = 4, _ = p * m * 3, C = ua._GetRGBArrayBuffer(p, m, i.byteOffset + v, _, i.buffer, V, k, L), e._uploadDataToTextureDirectly(t, C, ee, Z)) : (t.format = 5, _ = p * m * 4, C = ua._GetRGBAArrayBuffer(p, m, i.byteOffset + v, _, i.buffer, V, k, L, B), e._uploadDataToTextureDirectly(t, C, ee, Z)); else if (r.isLuminance) { const q = e._getUnpackAlignement(), le = p; _ = Math.floor((p + q - 1) / q) * q * (m - 1) + le, C = ua._GetLuminanceArrayBuffer(p, m, i.byteOffset + v, _, i.buffer), t.format = 1, t.type = 0, e._uploadDataToTextureDirectly(t, C, ee, Z); } else _ = Math.max(4, p) / 4 * Math.max(4, m) / 4 * M, C = new Uint8Array(i.buffer, i.byteOffset + v, _), t.type = 0, e._uploadCompressedDataToTextureDirectly(t, S, p, m, C, ee, Z); } v += R ? p * m * (R / 8) : _, p *= 0.5, m *= 0.5, p = Math.max(1, p), m = Math.max(1, m); } if (l !== void 0) break; } u && u.length > 0 ? r.sphericalPolynomial = GI.ConvertCubeMapToSphericalPolynomial({ size: d[Xk], right: u[0], left: u[1], up: u[2], down: u[3], front: u[4], back: u[5], format: 5, type: 1, gammaSpace: !1 }) : r.sphericalPolynomial = void 0; } } ua.StoreLODInAlphaChannel = !1; mi.prototype.createPrefilteredCubeTexture = function(c, e, t, i, r = null, s = null, n, a = null, l = !0) { const o = (u) => { if (!u) { r && r(null); return; } const h = u.texture; if (l ? u.info.sphericalPolynomial && (h._sphericalPolynomial = u.info.sphericalPolynomial) : h._sphericalPolynomial = new ax(), h._source = ts.CubePrefiltered, this.getCaps().textureLOD) { r && r(h); return; } const d = 3, f = this._gl, p = u.width; if (!p) return; const m = []; for (let _ = 0; _ < d; _++) { const C = 1 - _ / (d - 1), x = i, b = yt.Log2(p) * t + i, S = x + (b - x) * C, M = Math.round(Math.min(Math.max(S, 0), b)), R = new ln(this, ts.Temp); if (R.type = h.type, R.format = h.format, R.width = Math.pow(2, Math.max(yt.Log2(p) - M, 0)), R.height = R.width, R.isCube = !0, R._cachedWrapU = 0, R._cachedWrapV = 0, this._bindTextureDirectly(f.TEXTURE_CUBE_MAP, R, !0), R.samplingMode = 2, f.texParameteri(f.TEXTURE_CUBE_MAP, f.TEXTURE_MAG_FILTER, f.LINEAR), f.texParameteri(f.TEXTURE_CUBE_MAP, f.TEXTURE_MIN_FILTER, f.LINEAR), f.texParameteri(f.TEXTURE_CUBE_MAP, f.TEXTURE_WRAP_S, f.CLAMP_TO_EDGE), f.texParameteri(f.TEXTURE_CUBE_MAP, f.TEXTURE_WRAP_T, f.CLAMP_TO_EDGE), u.isDDS) { const V = u.info, k = u.data; this._unpackFlipY(V.isCompressed), ua.UploadDDSLevels(this, R, k, V, !0, 6, M); } else Ce.Warn("DDS is the only prefiltered cube map supported so far."); this._bindTextureDirectly(f.TEXTURE_CUBE_MAP, null); const w = new dn(e); w._isCube = !0, w._texture = R, R.isReady = !0, m.push(w); } h._lodTextureHigh = m[2], h._lodTextureMid = m[1], h._lodTextureLow = m[0], r && r(h); }; return this.createCubeTexture(c, e, null, !1, o, s, n, a, l, t, i); }; class Jie { constructor() { this.supportCascades = !0; } /** * This returns if the loader support the current file information. * @param extension defines the file extension of the file being loaded * @returns true if the loader can load the specified file */ canLoad(e) { return e.endsWith(".dds"); } /** * Uploads the cube texture data to the WebGL texture. It has already been bound. * @param imgs contains the cube maps * @param texture defines the BabylonJS internal texture * @param createPolynomials will be true if polynomials have been requested * @param onLoad defines the callback to trigger once the texture is ready */ loadCubeData(e, t, i, r) { const s = t.getEngine(); let n, a = !1, l = 1e3; if (Array.isArray(e)) for (let o = 0; o < e.length; o++) { const u = e[o]; n = ua.GetDDSInfo(u), t.width = n.width, t.height = n.height, a = (n.isRGB || n.isLuminance || n.mipmapCount > 1) && t.generateMipMaps, s._unpackFlipY(n.isCompressed), ua.UploadDDSLevels(s, t, u, n, a, 6, -1, o), !n.isFourCC && n.mipmapCount === 1 ? s.generateMipMapsForCubemap(t) : l = n.mipmapCount - 1; } else { const o = e; n = ua.GetDDSInfo(o), t.width = n.width, t.height = n.height, i && (n.sphericalPolynomial = new ax()), a = (n.isRGB || n.isLuminance || n.mipmapCount > 1) && t.generateMipMaps, s._unpackFlipY(n.isCompressed), ua.UploadDDSLevels(s, t, o, n, a, 6), !n.isFourCC && n.mipmapCount === 1 ? s.generateMipMapsForCubemap(t, !1) : l = n.mipmapCount - 1; } s._setCubeMapTextureParams(t, a, l), t.isReady = !0, t.onLoadedObservable.notifyObservers(t), t.onLoadedObservable.clear(), r && r({ isDDS: !0, width: t.width, info: n, data: e, texture: t }); } /** * Uploads the 2D texture data to the WebGL texture. It has already been bound once in the callback. * @param data contains the texture data * @param texture defines the BabylonJS internal texture * @param callback defines the method to call once ready to upload */ loadData(e, t, i) { const r = ua.GetDDSInfo(e), s = (r.isRGB || r.isLuminance || r.mipmapCount > 1) && t.generateMipMaps && r.width >> r.mipmapCount - 1 === 1; i(r.width, r.height, s, r.isFourCC, () => { ua.UploadDDSLevels(t.getEngine(), t, e, r, s, 1); }); } } $e._TextureLoaders.push(new Jie()); class ere { constructor() { this.supportCascades = !1; } /** * This returns if the loader support the current file information. * @param extension defines the file extension of the file being loaded * @returns true if the loader can load the specified file */ canLoad(e) { return e.endsWith(".env"); } /** * Uploads the cube texture data to the WebGL texture. It has already been bound. * @param data contains the texture data * @param texture defines the BabylonJS internal texture * @param createPolynomials will be true if polynomials have been requested * @param onLoad defines the callback to trigger once the texture is ready * @param onError defines the callback to trigger in case of error */ loadCubeData(e, t, i, r, s) { if (Array.isArray(e)) return; const n = AU(e); if (n) { t.width = n.width, t.height = n.width; try { CU(t, n), iW(t, e, n).then(() => { t.isReady = !0, t.onLoadedObservable.notifyObservers(t), t.onLoadedObservable.clear(), r && r(); }, (a) => { s == null || s("Can not upload environment levels", a); }); } catch (a) { s == null || s("Can not upload environment file", a); } } else s && s("Can not parse the environment file", null); } /** * Uploads the 2D texture data to the WebGL texture. It has already been bound once in the callback. */ loadData() { throw ".env not supported in 2d."; } } $e._TextureLoaders.push(new ere()); class Lp { /** * Creates a new KhronosTextureContainer * @param data contents of the KTX container file * @param facesExpected should be either 1 or 6, based whether a cube texture or or */ constructor(e, t) { if (this.data = e, this.isInvalid = !1, !Lp.IsValid(e)) { this.isInvalid = !0, Ce.Error("texture missing KTX identifier"); return; } const i = Uint32Array.BYTES_PER_ELEMENT, r = new DataView(this.data.buffer, this.data.byteOffset + 12, 13 * i), n = r.getUint32(0, !0) === 67305985; if (this.glType = r.getUint32(1 * i, n), this.glTypeSize = r.getUint32(2 * i, n), this.glFormat = r.getUint32(3 * i, n), this.glInternalFormat = r.getUint32(4 * i, n), this.glBaseInternalFormat = r.getUint32(5 * i, n), this.pixelWidth = r.getUint32(6 * i, n), this.pixelHeight = r.getUint32(7 * i, n), this.pixelDepth = r.getUint32(8 * i, n), this.numberOfArrayElements = r.getUint32(9 * i, n), this.numberOfFaces = r.getUint32(10 * i, n), this.numberOfMipmapLevels = r.getUint32(11 * i, n), this.bytesOfKeyValueData = r.getUint32(12 * i, n), this.glType !== 0) { Ce.Error("only compressed formats currently supported"), this.isInvalid = !0; return; } else this.numberOfMipmapLevels = Math.max(1, this.numberOfMipmapLevels); if (this.pixelHeight === 0 || this.pixelDepth !== 0) { Ce.Error("only 2D textures currently supported"), this.isInvalid = !0; return; } if (this.numberOfArrayElements !== 0) { Ce.Error("texture arrays not currently supported"), this.isInvalid = !0; return; } if (this.numberOfFaces !== t) { Ce.Error("number of faces expected" + t + ", but found " + this.numberOfFaces), this.isInvalid = !0; return; } this.loadType = Lp.COMPRESSED_2D; } /** * Uploads KTX content to a Babylon Texture. * It is assumed that the texture has already been created & is currently bound * @internal */ uploadLevels(e, t) { switch (this.loadType) { case Lp.COMPRESSED_2D: this._upload2DCompressedLevels(e, t); break; case Lp.TEX_2D: case Lp.COMPRESSED_3D: case Lp.TEX_3D: } } _upload2DCompressedLevels(e, t) { let i = Lp.HEADER_LEN + this.bytesOfKeyValueData, r = this.pixelWidth, s = this.pixelHeight; const n = t ? this.numberOfMipmapLevels : 1; for (let a = 0; a < n; a++) { const l = new Int32Array(this.data.buffer, this.data.byteOffset + i, 1)[0]; i += 4; for (let o = 0; o < this.numberOfFaces; o++) { const u = new Uint8Array(this.data.buffer, this.data.byteOffset + i, l); e.getEngine()._uploadCompressedDataToTextureDirectly(e, e.format, r, s, u, o, a), i += l, i += 3 - (l + 3) % 4; } r = Math.max(1, r * 0.5), s = Math.max(1, s * 0.5); } } /** * Checks if the given data starts with a KTX file identifier. * @param data the data to check * @returns true if the data is a KTX file or false otherwise */ static IsValid(e) { if (e.byteLength >= 12) { const t = new Uint8Array(e.buffer, e.byteOffset, 12); if (t[0] === 171 && t[1] === 75 && t[2] === 84 && t[3] === 88 && t[4] === 32 && t[5] === 49 && t[6] === 49 && t[7] === 187 && t[8] === 13 && t[9] === 10 && t[10] === 26 && t[11] === 10) return !0; } return !1; } } Lp.HEADER_LEN = 12 + 13 * 4; Lp.COMPRESSED_2D = 0; Lp.COMPRESSED_3D = 1; Lp.TEX_2D = 2; Lp.TEX_3D = 3; class tre { /** * Constructor * @param workers Array of workers to use for actions */ constructor(e) { this._pendingActions = new Array(), this._workerInfos = e.map((t) => ({ workerPromise: Promise.resolve(t), idle: !0 })); } /** * Terminates all workers and clears any pending actions. */ dispose() { for (const e of this._workerInfos) e.workerPromise.then((t) => { t.terminate(); }); this._workerInfos.length = 0, this._pendingActions.length = 0; } /** * Pushes an action to the worker pool. If all the workers are active, the action will be * pended until a worker has completed its action. * @param action The action to perform. Call onComplete when the action is complete. */ push(e) { this._executeOnIdleWorker(e) || this._pendingActions.push(e); } _executeOnIdleWorker(e) { for (const t of this._workerInfos) if (t.idle) return this._execute(t, e), !0; return !1; } _execute(e, t) { e.idle = !1, e.workerPromise.then((i) => { t(i, () => { const r = this._pendingActions.shift(); r ? this._execute(e, r) : e.idle = !0; }); }); } } class xw extends tre { constructor(e, t, i = xw.DefaultOptions) { super([]), this._maxWorkers = e, this._createWorkerAsync = t, this._options = i; } push(e) { if (!this._executeOnIdleWorker(e)) if (this._workerInfos.length < this._maxWorkers) { const t = { workerPromise: this._createWorkerAsync(), idle: !1 }; this._workerInfos.push(t), this._execute(t, e); } else this._pendingActions.push(e); } _execute(e, t) { e.timeoutId && (clearTimeout(e.timeoutId), delete e.timeoutId), super._execute(e, (i, r) => { t(i, () => { r(), e.idle && (e.timeoutId = setTimeout(() => { e.workerPromise.then((n) => { n.terminate(); }); const s = this._workerInfos.indexOf(e); s !== -1 && this._workerInfos.splice(s, 1); }, this._options.idleTimeElapsedBeforeRelease)); }); }); } } xw.DefaultOptions = { idleTimeElapsedBeforeRelease: 1e3 }; var LH; (function(c) { c[c.ETC1S = 0] = "ETC1S", c[c.UASTC4x4 = 1] = "UASTC4x4"; })(LH || (LH = {})); var pO; (function(c) { c[c.ASTC_4X4_RGBA = 0] = "ASTC_4X4_RGBA", c[c.BC7_RGBA = 1] = "BC7_RGBA", c[c.BC3_RGBA = 2] = "BC3_RGBA", c[c.BC1_RGB = 3] = "BC1_RGB", c[c.PVRTC1_4_RGBA = 4] = "PVRTC1_4_RGBA", c[c.PVRTC1_4_RGB = 5] = "PVRTC1_4_RGB", c[c.ETC2_RGBA = 6] = "ETC2_RGBA", c[c.ETC1_RGB = 7] = "ETC1_RGB", c[c.RGBA32 = 8] = "RGBA32", c[c.R8 = 9] = "R8", c[c.RG8 = 10] = "RG8"; })(pO || (pO = {})); var _B; (function(c) { c[c.COMPRESSED_RGBA_BPTC_UNORM_EXT = 36492] = "COMPRESSED_RGBA_BPTC_UNORM_EXT", c[c.COMPRESSED_RGBA_ASTC_4X4_KHR = 37808] = "COMPRESSED_RGBA_ASTC_4X4_KHR", c[c.COMPRESSED_RGB_S3TC_DXT1_EXT = 33776] = "COMPRESSED_RGB_S3TC_DXT1_EXT", c[c.COMPRESSED_RGBA_S3TC_DXT5_EXT = 33779] = "COMPRESSED_RGBA_S3TC_DXT5_EXT", c[c.COMPRESSED_RGBA_PVRTC_4BPPV1_IMG = 35842] = "COMPRESSED_RGBA_PVRTC_4BPPV1_IMG", c[c.COMPRESSED_RGB_PVRTC_4BPPV1_IMG = 35840] = "COMPRESSED_RGB_PVRTC_4BPPV1_IMG", c[c.COMPRESSED_RGBA8_ETC2_EAC = 37496] = "COMPRESSED_RGBA8_ETC2_EAC", c[c.COMPRESSED_RGB8_ETC2 = 37492] = "COMPRESSED_RGB8_ETC2", c[c.COMPRESSED_RGB_ETC1_WEBGL = 36196] = "COMPRESSED_RGB_ETC1_WEBGL", c[c.RGBA8Format = 32856] = "RGBA8Format", c[c.R8Format = 33321] = "R8Format", c[c.RG8Format = 33323] = "RG8Format"; })(_B || (_B = {})); function NH(c) { c.wasmUASTCToASTC && (KTX2DECODER.LiteTranscoder_UASTC_ASTC.WasmModuleURL = c.wasmUASTCToASTC), c.wasmUASTCToBC7 && (KTX2DECODER.LiteTranscoder_UASTC_BC7.WasmModuleURL = c.wasmUASTCToBC7), c.wasmUASTCToRGBA_UNORM && (KTX2DECODER.LiteTranscoder_UASTC_RGBA_UNORM.WasmModuleURL = c.wasmUASTCToRGBA_UNORM), c.wasmUASTCToRGBA_SRGB && (KTX2DECODER.LiteTranscoder_UASTC_RGBA_SRGB.WasmModuleURL = c.wasmUASTCToRGBA_SRGB), c.wasmUASTCToR8_UNORM && (KTX2DECODER.LiteTranscoder_UASTC_R8_UNORM.WasmModuleURL = c.wasmUASTCToR8_UNORM), c.wasmUASTCToRG8_UNORM && (KTX2DECODER.LiteTranscoder_UASTC_RG8_UNORM.WasmModuleURL = c.wasmUASTCToRG8_UNORM), c.jsMSCTranscoder && (KTX2DECODER.MSCTranscoder.JSModuleURL = c.jsMSCTranscoder), c.wasmMSCTranscoder && (KTX2DECODER.MSCTranscoder.WasmModuleURL = c.wasmMSCTranscoder), c.wasmZSTDDecoder && (KTX2DECODER.ZSTDDecoder.WasmModuleURL = c.wasmZSTDDecoder); } class ire { constructor() { this._isDirty = !0, this._useRGBAIfOnlyBC1BC3AvailableWhenUASTC = !0, this._ktx2DecoderOptions = {}; } /** * Gets the dirty flag */ get isDirty() { return this._isDirty; } /** * force a (uncompressed) RGBA transcoded format if transcoding a UASTC source format and ASTC + BC7 are not available as a compressed transcoded format */ get useRGBAIfASTCBC7NotAvailableWhenUASTC() { return this._useRGBAIfASTCBC7NotAvailableWhenUASTC; } set useRGBAIfASTCBC7NotAvailableWhenUASTC(e) { this._useRGBAIfASTCBC7NotAvailableWhenUASTC !== e && (this._useRGBAIfASTCBC7NotAvailableWhenUASTC = e, this._isDirty = !0); } /** * force a (uncompressed) RGBA transcoded format if transcoding a UASTC source format and only BC1 or BC3 are available as a compressed transcoded format. * This property is true by default to favor speed over memory, because currently transcoding from UASTC to BC1/3 is slow because the transcoder transcodes * to uncompressed and then recompresses the texture */ get useRGBAIfOnlyBC1BC3AvailableWhenUASTC() { return this._useRGBAIfOnlyBC1BC3AvailableWhenUASTC; } set useRGBAIfOnlyBC1BC3AvailableWhenUASTC(e) { this._useRGBAIfOnlyBC1BC3AvailableWhenUASTC !== e && (this._useRGBAIfOnlyBC1BC3AvailableWhenUASTC = e, this._isDirty = !0); } /** * force to always use (uncompressed) RGBA for transcoded format */ get forceRGBA() { return this._forceRGBA; } set forceRGBA(e) { this._forceRGBA !== e && (this._forceRGBA = e, this._isDirty = !0); } /** * force to always use (uncompressed) R8 for transcoded format */ get forceR8() { return this._forceR8; } set forceR8(e) { this._forceR8 !== e && (this._forceR8 = e, this._isDirty = !0); } /** * force to always use (uncompressed) RG8 for transcoded format */ get forceRG8() { return this._forceRG8; } set forceRG8(e) { this._forceRG8 !== e && (this._forceRG8 = e, this._isDirty = !0); } /** * list of transcoders to bypass when looking for a suitable transcoder. The available transcoders are: * UniversalTranscoder_UASTC_ASTC * UniversalTranscoder_UASTC_BC7 * UniversalTranscoder_UASTC_RGBA_UNORM * UniversalTranscoder_UASTC_RGBA_SRGB * UniversalTranscoder_UASTC_R8_UNORM * UniversalTranscoder_UASTC_RG8_UNORM * MSCTranscoder */ get bypassTranscoders() { return this._bypassTranscoders; } set bypassTranscoders(e) { this._bypassTranscoders !== e && (this._bypassTranscoders = e, this._isDirty = !0); } /** @internal */ _getKTX2DecoderOptions() { if (!this._isDirty) return this._ktx2DecoderOptions; this._isDirty = !1; const e = { useRGBAIfASTCBC7NotAvailableWhenUASTC: this._useRGBAIfASTCBC7NotAvailableWhenUASTC, forceRGBA: this._forceRGBA, forceR8: this._forceR8, forceRG8: this._forceRG8, bypassTranscoders: this._bypassTranscoders }; return this.useRGBAIfOnlyBC1BC3AvailableWhenUASTC && (e.transcodeFormatDecisionTree = { UASTC: { transcodeFormat: [pO.BC1_RGB, pO.BC3_RGBA], yes: { transcodeFormat: pO.RGBA32, engineFormat: _B.RGBA8Format, roundToMultiple4: !1 } } }), this._ktx2DecoderOptions = e, e; } } class mh { static GetDefaultNumWorkers() { return typeof navigator != "object" || !navigator.hardwareConcurrency ? 1 : Math.min(Math.floor(navigator.hardwareConcurrency * 0.5), 4); } static _Initialize(e) { if (mh._WorkerPoolPromise || mh._DecoderModulePromise) return; const t = { jsDecoderModule: Ve.GetBabylonScriptURL(this.URLConfig.jsDecoderModule, !0), wasmUASTCToASTC: Ve.GetBabylonScriptURL(this.URLConfig.wasmUASTCToASTC, !0), wasmUASTCToBC7: Ve.GetBabylonScriptURL(this.URLConfig.wasmUASTCToBC7, !0), wasmUASTCToRGBA_UNORM: Ve.GetBabylonScriptURL(this.URLConfig.wasmUASTCToRGBA_UNORM, !0), wasmUASTCToRGBA_SRGB: Ve.GetBabylonScriptURL(this.URLConfig.wasmUASTCToRGBA_SRGB, !0), wasmUASTCToR8_UNORM: Ve.GetBabylonScriptURL(this.URLConfig.wasmUASTCToR8_UNORM, !0), wasmUASTCToRG8_UNORM: Ve.GetBabylonScriptURL(this.URLConfig.wasmUASTCToRG8_UNORM, !0), jsMSCTranscoder: Ve.GetBabylonScriptURL(this.URLConfig.jsMSCTranscoder, !0), wasmMSCTranscoder: Ve.GetBabylonScriptURL(this.URLConfig.wasmMSCTranscoder, !0), wasmZSTDDecoder: Ve.GetBabylonScriptURL(this.URLConfig.wasmZSTDDecoder, !0) }; e && typeof Worker == "function" && typeof URL < "u" ? mh._WorkerPoolPromise = new Promise((i) => { const r = `${NH}(${f2e})()`, s = URL.createObjectURL(new Blob([r], { type: "application/javascript" })); i(new xw(e, () => new Promise((n, a) => { const l = new Worker(s), o = (h) => { l.removeEventListener("error", o), l.removeEventListener("message", u), a(h); }, u = (h) => { h.data.action === "init" && (l.removeEventListener("error", o), l.removeEventListener("message", u), n(l)); }; l.addEventListener("error", o), l.addEventListener("message", u), l.postMessage({ action: "init", urls: t }); }))); }) : typeof KTX2DECODER > "u" ? mh._DecoderModulePromise = Ve.LoadBabylonScriptAsync(t.jsDecoderModule).then(() => (KTX2DECODER.MSCTranscoder.UseFromWorkerThread = !1, KTX2DECODER.WASMMemoryManager.LoadBinariesFromCurrentThread = !0, NH(t), new KTX2DECODER.KTX2Decoder())) : (KTX2DECODER.MSCTranscoder.UseFromWorkerThread = !1, KTX2DECODER.WASMMemoryManager.LoadBinariesFromCurrentThread = !0, mh._DecoderModulePromise = Promise.resolve(new KTX2DECODER.KTX2Decoder())); } /** * Constructor * @param engine The engine to use * @param numWorkers The number of workers for async operations. Specify `0` to disable web workers and run synchronously in the current context. */ constructor(e, t = mh.DefaultNumWorkers) { this._engine = e, mh._Initialize(t); } /** * @internal */ uploadAsync(e, t, i) { const r = this._engine.getCaps(), s = { astc: !!r.astc, bptc: !!r.bptc, s3tc: !!r.s3tc, pvrtc: !!r.pvrtc, etc2: !!r.etc2, etc1: !!r.etc1 }; if (mh._WorkerPoolPromise) return mh._WorkerPoolPromise.then((n) => new Promise((a, l) => { n.push((o, u) => { const h = (p) => { o.removeEventListener("error", h), o.removeEventListener("message", d), l(p), u(); }, d = (p) => { if (p.data.action === "decoded") { if (o.removeEventListener("error", h), o.removeEventListener("message", d), !p.data.success) l({ message: p.data.msg }); else try { this._createTexture(p.data.decodedData, t, i), a(); } catch (m) { l({ message: m }); } u(); } }; o.addEventListener("error", h), o.addEventListener("message", d), o.postMessage({ action: "setDefaultDecoderOptions", options: mh.DefaultDecoderOptions._getKTX2DecoderOptions() }); const f = new Uint8Array(e.byteLength); f.set(new Uint8Array(e.buffer, e.byteOffset, e.byteLength)), o.postMessage({ action: "decode", data: f, caps: s, options: i }, [f.buffer]); }); })); if (mh._DecoderModulePromise) return mh._DecoderModulePromise.then((n) => (mh.DefaultDecoderOptions.isDirty && (KTX2DECODER.KTX2Decoder.DefaultDecoderOptions = mh.DefaultDecoderOptions._getKTX2DecoderOptions()), new Promise((a, l) => { n.decode(e, r).then((o) => { this._createTexture(o, t), a(); }).catch((o) => { l({ message: o }); }); }))); throw new Error("KTX2 decoder module is not available"); } _createTexture(e, t, i) { this._engine._bindTextureDirectly(3553, t), i && (i.transcodedFormat = e.transcodedFormat, i.isInGammaSpace = e.isInGammaSpace, i.hasAlpha = e.hasAlpha, i.transcoderName = e.transcoderName); let s = !0; switch (e.transcodedFormat) { case 32856: t.type = 0, t.format = 5; break; case 33321: t.type = 0, t.format = 6; break; case 33323: t.type = 0, t.format = 7; break; default: t.format = e.transcodedFormat, s = !1; break; } if (t._gammaSpace = e.isInGammaSpace, t.generateMipMaps = e.mipmaps.length > 1, e.errors) throw new Error("KTX2 container - could not transcode the data. " + e.errors); for (let n = 0; n < e.mipmaps.length; ++n) { const a = e.mipmaps[n]; if (!a || !a.data) throw new Error("KTX2 container - could not transcode one of the image"); s ? (t.width = a.width, t.height = a.height, this._engine._uploadDataToTextureDirectly(t, a.data, 0, n, void 0, !0)) : this._engine._uploadCompressedDataToTextureDirectly(t, e.transcodedFormat, a.width, a.height, a.data, 0, n); } t._extension = ".ktx2", t.width = e.mipmaps[0].width, t.height = e.mipmaps[0].height, t.isReady = !0, this._engine._bindTextureDirectly(3553, null); } /** * Checks if the given data starts with a KTX2 file identifier. * @param data the data to check * @returns true if the data is a KTX2 file or false otherwise */ static IsValid(e) { if (e.byteLength >= 12) { const t = new Uint8Array(e.buffer, e.byteOffset, 12); if (t[0] === 171 && t[1] === 75 && t[2] === 84 && t[3] === 88 && t[4] === 32 && t[5] === 50 && t[6] === 48 && t[7] === 187 && t[8] === 13 && t[9] === 10 && t[10] === 26 && t[11] === 10) return !0; } return !1; } } mh.URLConfig = { jsDecoderModule: "https://cdn.babylonjs.com/babylon.ktx2Decoder.js", wasmUASTCToASTC: null, wasmUASTCToBC7: null, wasmUASTCToRGBA_UNORM: null, wasmUASTCToRGBA_SRGB: null, wasmUASTCToR8_UNORM: null, wasmUASTCToRG8_UNORM: null, jsMSCTranscoder: null, wasmMSCTranscoder: null, wasmZSTDDecoder: null }; mh.DefaultNumWorkers = mh.GetDefaultNumWorkers(); mh.DefaultDecoderOptions = new ire(); function f2e() { let c; onmessage = (e) => { if (e.data) switch (e.data.action) { case "init": { const t = e.data.urls; importScripts(t.jsDecoderModule), NH(t), c = new KTX2DECODER.KTX2Decoder(), postMessage({ action: "init" }); break; } case "setDefaultDecoderOptions": { KTX2DECODER.KTX2Decoder.DefaultDecoderOptions = e.data.options; break; } case "decode": c.decode(e.data.data, e.data.caps, e.data.options).then((t) => { const i = []; for (let r = 0; r < t.mipmaps.length; ++r) { const s = t.mipmaps[r]; s && s.data && i.push(s.data.buffer); } postMessage({ action: "decoded", success: !0, decodedData: t }, i); }).catch((t) => { postMessage({ action: "decoded", success: !1, msg: t }); }); break; } }; } function p2e(c) { switch (c) { case 35916: return 33776; case 35918: return 33778; case 35919: return 33779; case 37493: return 37492; case 37497: return 37496; case 37495: return 37494; case 37840: return 37808; case 36493: return 36492; } return null; } class rre { constructor() { this.supportCascades = !1; } /** * This returns if the loader support the current file information. * @param extension defines the file extension of the file being loaded * @param mimeType defines the optional mime type of the file being loaded * @returns true if the loader can load the specified file */ canLoad(e, t) { return e.endsWith(".ktx") || e.endsWith(".ktx2") || t === "image/ktx" || t === "image/ktx2"; } /** * Uploads the cube texture data to the WebGL texture. It has already been bound. * @param data contains the texture data * @param texture defines the BabylonJS internal texture * @param createPolynomials will be true if polynomials have been requested * @param onLoad defines the callback to trigger once the texture is ready */ loadCubeData(e, t, i, r) { if (Array.isArray(e)) return; t._invertVScale = !t.invertY; const s = t.getEngine(), n = new Lp(e, 6), a = n.numberOfMipmapLevels > 1 && t.generateMipMaps; s._unpackFlipY(!0), n.uploadLevels(t, t.generateMipMaps), t.width = n.pixelWidth, t.height = n.pixelHeight, s._setCubeMapTextureParams(t, a, n.numberOfMipmapLevels - 1), t.isReady = !0, t.onLoadedObservable.notifyObservers(t), t.onLoadedObservable.clear(), r && r(); } /** * Uploads the 2D texture data to the WebGL texture. It has already been bound once in the callback. * @param data contains the texture data * @param texture defines the BabylonJS internal texture * @param callback defines the method to call once ready to upload * @param options */ loadData(e, t, i, r) { if (Lp.IsValid(e)) { t._invertVScale = !t.invertY; const s = new Lp(e, 1), n = p2e(s.glInternalFormat); n ? (t.format = n, t._useSRGBBuffer = t.getEngine()._getUseSRGBBuffer(!0, t.generateMipMaps), t._gammaSpace = !0) : t.format = s.glInternalFormat, i(s.pixelWidth, s.pixelHeight, t.generateMipMaps, !0, () => { s.uploadLevels(t, t.generateMipMaps); }, s.isInvalid); } else mh.IsValid(e) ? new mh(t.getEngine()).uploadAsync(e, t, r).then(() => { i(t.width, t.height, t.generateMipMaps, !0, () => { }, !1); }, (n) => { Ce.Warn(`Failed to load KTX2 texture data: ${n.message}`), i(0, 0, !1, !1, () => { }, !0); }) : (Ce.Error("texture missing KTX identifier"), i(0, 0, !1, !1, () => { }, !0)); } } $e._TextureLoaders.unshift(new rre()); class GO extends du { /** * Creates a new webXRCamera, this should only be set at the camera after it has been updated by the xrSessionManager * @param name the name of the camera * @param scene the scene to add the camera to * @param _xrSessionManager a constructed xr session manager */ constructor(e, t, i) { super(e, D.Zero(), t), this._xrSessionManager = i, this._firstFrame = !1, this._referenceQuaternion = Ze.Identity(), this._referencedPosition = new D(), this._trackingState = jR.NOT_TRACKING, this.onXRCameraInitializedObservable = new Fe(), this.onBeforeCameraTeleport = new Fe(), this.onAfterCameraTeleport = new Fe(), this.onTrackingStateChanged = new Fe(), this.compensateOnFirstFrame = !0, this._rotate180 = new Ze(0, 1, 0, 0), this.minZ = 0.1, this.rotationQuaternion = new Ze(), this.cameraRigMode = Ai.RIG_MODE_CUSTOM, this.updateUpVectorFromRotation = !0, this._updateNumberOfRigCameras(1), this.freezeProjectionMatrix(), this._deferOnly = !0, this._xrSessionManager.onXRSessionInit.add(() => { this._referencedPosition.copyFromFloats(0, 0, 0), this._referenceQuaternion.copyFromFloats(0, 0, 0, 1), this._firstFrame = this.compensateOnFirstFrame; }), this._xrSessionManager.onXRFrameObservable.add(() => { this._firstFrame && this._updateFromXRSession(), this.onXRCameraInitializedObservable.hasObservers() && (this.onXRCameraInitializedObservable.notifyObservers(this), this.onXRCameraInitializedObservable.clear()), this._deferredUpdated && (this.position.copyFrom(this._deferredPositionUpdate), this.rotationQuaternion.copyFrom(this._deferredRotationQuaternionUpdate)), this._updateReferenceSpace(), this._updateFromXRSession(); }, void 0, !0); } /** * Get the current XR tracking state of the camera */ get trackingState() { return this._trackingState; } _setTrackingState(e) { this._trackingState !== e && (this._trackingState = e, this.onTrackingStateChanged.notifyObservers(e)); } /** * Return the user's height, unrelated to the current ground. * This will be the y position of this camera, when ground level is 0. */ get realWorldHeight() { const e = this._xrSessionManager.currentFrame && this._xrSessionManager.currentFrame.getViewerPose(this._xrSessionManager.baseReferenceSpace); return e && e.transform ? e.transform.position.y : 0; } /** @internal */ _updateForDualEyeDebugging() { this._updateNumberOfRigCameras(2), this.rigCameras[0].viewport = new Md(0, 0, 0.5, 1), this.rigCameras[0].outputRenderTarget = null, this.rigCameras[1].viewport = new Md(0.5, 0, 0.5, 1), this.rigCameras[1].outputRenderTarget = null; } /** * Sets this camera's transformation based on a non-vr camera * @param otherCamera the non-vr camera to copy the transformation from * @param resetToBaseReferenceSpace should XR reset to the base reference space */ setTransformationFromNonVRCamera(e = this.getScene().activeCamera, t = !0) { if (!e || e === this) return; e.computeWorldMatrix().decompose(void 0, this.rotationQuaternion, this.position), this.position.y = 0, Ze.FromEulerAnglesToRef(0, this.rotationQuaternion.toEulerAngles().y, 0, this.rotationQuaternion), this._firstFrame = !0, t && this._xrSessionManager.resetReferenceSpace(); } /** * Gets the current instance class name ("WebXRCamera"). * @returns the class name */ getClassName() { return "WebXRCamera"; } /** * Set the target for the camera to look at. * Note that this only rotates around the Y axis, as opposed to the default behavior of other cameras * @param target the target to set the camera to look at */ setTarget(e) { const t = de.Vector3[1]; e.subtractToRef(this.position, t), t.y = 0, t.normalize(); const i = Math.atan2(t.x, t.z); this.rotationQuaternion.toEulerAnglesToRef(t), Ze.FromEulerAnglesToRef(t.x, i, t.z, this.rotationQuaternion); } dispose() { super.dispose(), this._lastXRViewerPose = void 0; } _updateFromXRSession() { const e = this._xrSessionManager.currentFrame && this._xrSessionManager.currentFrame.getViewerPose(this._xrSessionManager.referenceSpace); if (this._lastXRViewerPose = e || void 0, !e) { this._setTrackingState(jR.NOT_TRACKING); return; } const t = e.emulatedPosition ? jR.TRACKING_LOST : jR.TRACKING; if (this._setTrackingState(t), this.minZ !== this._cache.minZ || this.maxZ !== this._cache.maxZ) { const i = { // if maxZ is 0 it should be "Infinity", but it doesn't work with the WebXR API. Setting to a large number. depthFar: this.maxZ || 1e4, depthNear: this.minZ }; this._xrSessionManager.updateRenderState(i), this._cache.minZ = this.minZ, this._cache.maxZ = this.maxZ; } if (e.transform) { const i = e.transform.orientation; if (e.transform.orientation.x === void 0) return; const r = e.transform.position; this._referencedPosition.set(r.x, r.y, r.z), this._referenceQuaternion.set(i.x, i.y, i.z, i.w), this._scene.useRightHandedSystem || (this._referencedPosition.z *= -1, this._referenceQuaternion.z *= -1, this._referenceQuaternion.w *= -1), this._firstFrame ? (this._firstFrame = !1, this.position.y += this._referencedPosition.y, this._referenceQuaternion.copyFromFloats(0, 0, 0, 1)) : (this.rotationQuaternion.copyFrom(this._referenceQuaternion), this.position.copyFrom(this._referencedPosition)); } this.rigCameras.length !== e.views.length && this._updateNumberOfRigCameras(e.views.length), e.views.forEach((i, r) => { var s; const n = this.rigCameras[r]; !n.isLeftCamera && !n.isRightCamera && (i.eye === "right" ? n._isRightCamera = !0 : i.eye === "left" && (n._isLeftCamera = !0)); const a = i.transform.position, l = i.transform.orientation; n.parent = this.parent, n.position.set(a.x, a.y, a.z), n.rotationQuaternion.set(l.x, l.y, l.z, l.w), this._scene.useRightHandedSystem ? n.rotationQuaternion.multiplyInPlace(this._rotate180) : (n.position.z *= -1, n.rotationQuaternion.z *= -1, n.rotationQuaternion.w *= -1), Ae.FromFloat32ArrayToRefScaled(i.projectionMatrix, 0, 1, n._projectionMatrix), this._scene.useRightHandedSystem || n._projectionMatrix.toggleProjectionMatrixHandInPlace(), r === 0 && this._projectionMatrix.copyFrom(n._projectionMatrix); const o = this._xrSessionManager.getRenderTargetTextureForView(i); this._renderingMultiview = ((s = o == null ? void 0 : o._texture) === null || s === void 0 ? void 0 : s.isMultiview) || !1, this._renderingMultiview ? r == 0 && (this._xrSessionManager.trySetViewportForView(this.viewport, i), this.outputRenderTarget = o) : (this._xrSessionManager.trySetViewportForView(n.viewport, i), n.outputRenderTarget = o || this._xrSessionManager.getRenderTargetTextureForView(i)), n.layerMask = this.layerMask; }); } _updateNumberOfRigCameras(e = 1) { for (; this.rigCameras.length < e; ) { const t = new Cl("XR-RigCamera: " + this.rigCameras.length, D.Zero(), this.getScene()); t.minZ = 0.1, t.rotationQuaternion = new Ze(), t.updateUpVectorFromRotation = !0, t.isRigCamera = !0, t.rigParent = this, t.freezeProjectionMatrix(), this.rigCameras.push(t); } for (; this.rigCameras.length > e; ) { const t = this.rigCameras.pop(); t && t.dispose(); } } _updateReferenceSpace() { if (!this.position.equals(this._referencedPosition) || !this.rotationQuaternion.equals(this._referenceQuaternion)) { const e = de.Matrix[0], t = de.Matrix[1], i = de.Matrix[2]; Ae.ComposeToRef(GO._ScaleReadOnly, this._referenceQuaternion, this._referencedPosition, e), Ae.ComposeToRef(GO._ScaleReadOnly, this.rotationQuaternion, this.position, t), e.invert().multiplyToRef(t, i), i.invert(), this._scene.useRightHandedSystem || i.toggleModelMatrixHandInPlace(), i.decompose(void 0, this._referenceQuaternion, this._referencedPosition); const r = new XRRigidTransform({ x: this._referencedPosition.x, y: this._referencedPosition.y, z: this._referencedPosition.z }, { x: this._referenceQuaternion.x, y: this._referenceQuaternion.y, z: this._referenceQuaternion.z, w: this._referenceQuaternion.w }); this._xrSessionManager.referenceSpace = this._xrSessionManager.referenceSpace.getOffsetReferenceSpace(r); } } } GO._ScaleReadOnly = D.One(); class SU { /** * Creates a WebXRExperienceHelper * @param _scene The scene the helper should be created in */ constructor(e) { this._scene = e, this._nonVRCamera = null, this._attachedToElement = !1, this._spectatorCamera = null, this._originalSceneAutoClear = !0, this._supported = !1, this._spectatorMode = !1, this._lastTimestamp = 0, this.onInitialXRPoseSetObservable = new Fe(), this.onStateChangedObservable = new Fe(), this.state = lu.NOT_IN_XR, this.sessionManager = new iN(e), this.camera = new GO("webxr", e, this.sessionManager), this.featuresManager = new Oo(this.sessionManager), e.onDisposeObservable.addOnce(() => { this.dispose(); }); } /** * Creates the experience helper * @param scene the scene to attach the experience helper to * @returns a promise for the experience helper */ static CreateAsync(e) { const t = new SU(e); return t.sessionManager.initializeAsync().then(() => (t._supported = !0, t)).catch((i) => { throw t._setState(lu.NOT_IN_XR), t.dispose(), i; }); } /** * Disposes of the experience helper */ dispose() { var e; this.exitXRAsync(), this.camera.dispose(), this.onStateChangedObservable.clear(), this.onInitialXRPoseSetObservable.clear(), this.sessionManager.dispose(), (e = this._spectatorCamera) === null || e === void 0 || e.dispose(), this._nonVRCamera && (this._scene.activeCamera = this._nonVRCamera); } /** * Enters XR mode (This must be done within a user interaction in most browsers eg. button click) * @param sessionMode options for the XR session * @param referenceSpaceType frame of reference of the XR session * @param renderTarget the output canvas that will be used to enter XR mode * @param sessionCreationOptions optional XRSessionInit object to init the session with * @returns promise that resolves after xr mode has entered */ async enterXRAsync(e, t, i = this.sessionManager.getWebXRRenderTarget(), r = {}) { var s, n, a; if (!this._supported) throw "WebXR not supported in this browser or environment"; this._setState(lu.ENTERING_XR), t !== "viewer" && t !== "local" && (r.optionalFeatures = r.optionalFeatures || [], r.optionalFeatures.push(t)), r = await this.featuresManager._extendXRSessionInitObject(r), e === "immersive-ar" && t !== "unbounded" && Ce.Warn("We recommend using 'unbounded' reference space type when using 'immersive-ar' session mode"); try { await this.sessionManager.initializeSessionAsync(e, r), await this.sessionManager.setReferenceSpaceTypeAsync(t); const l = await i.initializeXRLayerAsync(this.sessionManager.session), o = { // if maxZ is 0 it should be "Infinity", but it doesn't work with the WebXR API. Setting to a large number. depthFar: this.camera.maxZ || 1e4, depthNear: this.camera.minZ }; return this.featuresManager.getEnabledFeature(Qs.LAYERS) || (o.baseLayer = l), this.sessionManager.updateRenderState(o), this.sessionManager.runXRRenderLoop(), this._originalSceneAutoClear = this._scene.autoClear, this._nonVRCamera = this._scene.activeCamera, this._attachedToElement = !!(!((n = (s = this._nonVRCamera) === null || s === void 0 ? void 0 : s.inputs) === null || n === void 0) && n.attachedToElement), (a = this._nonVRCamera) === null || a === void 0 || a.detachControl(), this._scene.activeCamera = this.camera, e !== "immersive-ar" ? this._nonXRToXRCamera() : (this._scene.autoClear = !1, this.camera.compensateOnFirstFrame = !1, this.camera.position.set(0, 0, 0), this.camera.rotationQuaternion.set(0, 0, 0, 1), this.onInitialXRPoseSetObservable.notifyObservers(this.camera)), this.sessionManager.onXRSessionEnded.addOnce(() => { this.state !== lu.EXITING_XR && this._setState(lu.EXITING_XR), this.camera.rigCameras.forEach((u) => { u.outputRenderTarget = null; }), this._scene.autoClear = this._originalSceneAutoClear, this._scene.activeCamera = this._nonVRCamera, this._attachedToElement && this._nonVRCamera && this._nonVRCamera.attachControl(!!this._nonVRCamera.inputs.noPreventDefault), e !== "immersive-ar" && this.camera.compensateOnFirstFrame && (this._nonVRCamera.setPosition ? this._nonVRCamera.setPosition(this.camera.position) : this._nonVRCamera.position.copyFrom(this.camera.position)), this._setState(lu.NOT_IN_XR); }), this.sessionManager.onXRFrameObservable.addOnce(() => { this._setState(lu.IN_XR); }), this.sessionManager; } catch (l) { throw Ce.Log(l), Ce.Log(l.message), this._setState(lu.NOT_IN_XR), l; } } /** * Exits XR mode and returns the scene to its original state * @returns promise that resolves after xr mode has exited */ exitXRAsync() { return this.state !== lu.IN_XR ? Promise.resolve() : (this._setState(lu.EXITING_XR), this.sessionManager.exitXRAsync()); } /** * Enable spectator mode for desktop VR experiences. * When spectator mode is enabled a camera will be attached to the desktop canvas and will * display the first rig camera's view on the desktop canvas. * Please note that this will degrade performance, as it requires another camera render. * It is also not recommended to enable this in devices like the quest, as it brings no benefit there. * @param options giving WebXRSpectatorModeOption for specutator camera to setup when the spectator mode is enabled. */ enableSpectatorMode(e) { this._spectatorMode || (this._spectatorMode = !0, this._switchSpectatorMode(e)); } /** * Disable spectator mode for desktop VR experiences. */ disableSpecatatorMode() { this._spectatorMode && (this._spectatorMode = !1, this._switchSpectatorMode()); } _switchSpectatorMode(e) { const i = 1 / (e != null && e.fps ? e.fps : 1e3) * 1e3, r = e != null && e.preferredCameraIndex ? e == null ? void 0 : e.preferredCameraIndex : 0, s = () => { this._spectatorCamera && this.sessionManager.currentTimestamp - this._lastTimestamp >= i && (this._lastTimestamp = this.sessionManager.currentTimestamp, this._spectatorCamera.position.copyFrom(this.camera.rigCameras[r].globalPosition), this._spectatorCamera.rotationQuaternion.copyFrom(this.camera.rigCameras[r].absoluteRotation)); }; if (this._spectatorMode) { if (r >= this.camera.rigCameras.length) throw new Error("the preferred camera index is beyond the length of rig camera array."); const n = () => { this.state === lu.IN_XR ? (this._spectatorCamera = new x5("webxr-spectator", D.Zero(), this._scene), this._spectatorCamera.rotationQuaternion = new Ze(), this._scene.activeCameras = [this.camera, this._spectatorCamera], this.sessionManager.onXRFrameObservable.add(s), this._scene.onAfterRenderCameraObservable.add((a) => { a === this.camera && (this._scene.getEngine().framebufferDimensionsObject = null); })) : this.state === lu.EXITING_XR && (this.sessionManager.onXRFrameObservable.removeCallback(s), this._scene.activeCameras = null); }; this.onStateChangedObservable.add(n), n(); } else this.sessionManager.onXRFrameObservable.removeCallback(s), this._scene.activeCameras = [this.camera]; } _nonXRToXRCamera() { this.camera.setTransformationFromNonVRCamera(this._nonVRCamera), this.onInitialXRPoseSetObservable.notifyObservers(this.camera); } _setState(e) { this.state !== e && (this.state = e, this.onStateChangedObservable.notifyObservers(this.state)); } } class j_ { /** * Creates a new component for a motion controller. * It is created by the motion controller itself * * @param id the id of this component * @param type the type of the component * @param _buttonIndex index in the buttons array of the gamepad * @param _axesIndices indices of the values in the axes array of the gamepad */ constructor(e, t, i = -1, r = []) { this.id = e, this.type = t, this._buttonIndex = i, this._axesIndices = r, this._axes = { x: 0, y: 0 }, this._changes = {}, this._currentValue = 0, this._hasChanges = !1, this._pressed = !1, this._touched = !1, this.onAxisValueChangedObservable = new Fe(), this.onButtonStateChangedObservable = new Fe(); } /** * The current axes data. If this component has no axes it will still return an object { x: 0, y: 0 } */ get axes() { return this._axes; } /** * Get the changes. Elements will be populated only if they changed with their previous and current value */ get changes() { return this._changes; } /** * Return whether or not the component changed the last frame */ get hasChanges() { return this._hasChanges; } /** * is the button currently pressed */ get pressed() { return this._pressed; } /** * is the button currently touched */ get touched() { return this._touched; } /** * Get the current value of this component */ get value() { return this._currentValue; } /** * Dispose this component */ dispose() { this.onAxisValueChangedObservable.clear(), this.onButtonStateChangedObservable.clear(); } /** * Are there axes correlating to this component * @returns true is axes data is available */ isAxes() { return this._axesIndices.length !== 0; } /** * Is this component a button (hence - pressable) * @returns true if can be pressed */ isButton() { return this._buttonIndex !== -1; } /** * update this component using the gamepad object it is in. Called on every frame * @param nativeController the native gamepad controller object */ update(e) { let t = !1, i = !1; if (this._hasChanges = !1, this._changes = {}, this.isButton()) { const r = e.buttons[this._buttonIndex]; if (!r) return; this._currentValue !== r.value && (this.changes.value = { current: r.value, previous: this._currentValue }, t = !0, this._currentValue = r.value), this._touched !== r.touched && (this.changes.touched = { current: r.touched, previous: this._touched }, t = !0, this._touched = r.touched), this._pressed !== r.pressed && (this.changes.pressed = { current: r.pressed, previous: this._pressed }, t = !0, this._pressed = r.pressed); } this.isAxes() && (this._axes.x !== e.axes[this._axesIndices[0]] && (this.changes.axes = { current: { x: e.axes[this._axesIndices[0]], y: this._axes.y }, previous: { x: this._axes.x, y: this._axes.y } }, this._axes.x = e.axes[this._axesIndices[0]], i = !0), this._axes.y !== e.axes[this._axesIndices[1]] && (this.changes.axes ? this.changes.axes.current.y = e.axes[this._axesIndices[1]] : this.changes.axes = { current: { x: this._axes.x, y: e.axes[this._axesIndices[1]] }, previous: { x: this._axes.x, y: this._axes.y } }, this._axes.y = e.axes[this._axesIndices[1]], i = !0)), t && (this._hasChanges = !0, this.onButtonStateChangedObservable.notifyObservers(this)), i && (this._hasChanges = !0, this.onAxisValueChangedObservable.notifyObservers(this._axes)); } } j_.BUTTON_TYPE = "button"; j_.SQUEEZE_TYPE = "squeeze"; j_.THUMBSTICK_TYPE = "thumbstick"; j_.TOUCHPAD_TYPE = "touchpad"; j_.TRIGGER_TYPE = "trigger"; class KI { /** * constructs a new abstract motion controller * @param scene the scene to which the model of the controller will be added * @param layout The profile layout to load * @param gamepadObject The gamepad object correlating to this controller * @param handedness handedness (left/right/none) of this controller * @param _doNotLoadControllerMesh set this flag to ignore the mesh loading * @param _controllerCache a cache holding controller models already loaded in this session */ constructor(e, t, i, r, s = !1, n) { this.scene = e, this.layout = t, this.gamepadObject = i, this.handedness = r, this._doNotLoadControllerMesh = s, this._controllerCache = n, this._initComponent = (a) => { if (!a) return; const l = this.layout.components[a], o = l.type, u = l.gamepadIndices.button, h = []; l.gamepadIndices.xAxis !== void 0 && l.gamepadIndices.yAxis !== void 0 && h.push(l.gamepadIndices.xAxis, l.gamepadIndices.yAxis), this.components[a] = new j_(a, o, u, h); }, this._modelReady = !1, this.components = {}, this.disableAnimation = !1, this.onModelLoadedObservable = new Fe(), t.components && Object.keys(t.components).forEach(this._initComponent); } /** * Dispose this controller, the model mesh and all its components */ dispose() { this.getComponentIds().forEach((e) => this.getComponent(e).dispose()), this.rootMesh && (this.rootMesh.getChildren(void 0, !0).forEach((e) => { e.setEnabled(!1); }), this.rootMesh.dispose(!!this._controllerCache, !this._controllerCache)); } /** * Returns all components of specific type * @param type the type to search for * @returns an array of components with this type */ getAllComponentsOfType(e) { return this.getComponentIds().map((t) => this.components[t]).filter((t) => t.type === e); } /** * get a component based an its component id as defined in layout.components * @param id the id of the component * @returns the component correlates to the id or undefined if not found */ getComponent(e) { return this.components[e]; } /** * Get the list of components available in this motion controller * @returns an array of strings correlating to available components */ getComponentIds() { return Object.keys(this.components); } /** * Get the first component of specific type * @param type type of component to find * @returns a controller component or null if not found */ getComponentOfType(e) { return this.getAllComponentsOfType(e)[0] || null; } /** * Get the main (Select) component of this controller as defined in the layout * @returns the main component of this controller */ getMainComponent() { return this.getComponent(this.layout.selectComponentId); } /** * Loads the model correlating to this controller * When the mesh is loaded, the onModelLoadedObservable will be triggered * @returns A promise fulfilled with the result of the model loading */ async loadModel() { const e = !this._getModelLoadingConstraints(); let t = this._getGenericFilenameAndPath(); return e ? Ce.Warn("Falling back to generic models") : t = this._getFilenameAndPath(), new Promise((i, r) => { const s = (n) => { e ? this._getGenericParentMesh(n) : this._setRootMesh(n), this._processLoadedModel(n), this._modelReady = !0, this.onModelLoadedObservable.notifyObservers(this), i(!0); }; if (this._controllerCache) { const n = this._controllerCache.filter((a) => a.filename === t.filename && a.path === t.path); if (n[0]) { n[0].meshes.forEach((a) => a.setEnabled(!0)), s(n[0].meshes); return; } } fr.ImportMesh("", t.path, t.filename, this.scene, (n) => { this._controllerCache && this._controllerCache.push(Object.assign(Object.assign({}, t), { meshes: n })), s(n); }, null, (n, a) => { Ce.Log(a), Ce.Warn(`Failed to retrieve controller model of type ${this.profileId} from the remote server: ${t.path}${t.filename}`), r(a); }); }); } /** * Update this model using the current XRFrame * @param xrFrame the current xr frame to use and update the model */ updateFromXRFrame(e) { this.getComponentIds().forEach((t) => this.getComponent(t).update(this.gamepadObject)), this.updateModel(e); } /** * Backwards compatibility due to a deeply-integrated typo */ get handness() { return this.handedness; } /** * Pulse (vibrate) this controller * If the controller does not support pulses, this function will fail silently and return Promise directly after called * Consecutive calls to this function will cancel the last pulse call * * @param value the strength of the pulse in 0.0...1.0 range * @param duration Duration of the pulse in milliseconds * @param hapticActuatorIndex optional index of actuator (will usually be 0) * @returns a promise that will send true when the pulse has ended and false if the device doesn't support pulse or an error accrued */ pulse(e, t, i = 0) { return this.gamepadObject.hapticActuators && this.gamepadObject.hapticActuators[i] ? this.gamepadObject.hapticActuators[i].pulse(e, t) : Promise.resolve(!1); } // Look through all children recursively. This will return null if no mesh exists with the given name. _getChildByName(e, t) { return e.getChildren((i) => i.name === t, !1)[0]; } // Look through only immediate children. This will return null if no mesh exists with the given name. _getImmediateChildByName(e, t) { return e.getChildren((i) => i.name == t, !0)[0]; } /** * Moves the axis on the controller mesh based on its current state * @param axisMap * @param axisValue the value of the axis which determines the meshes new position * @internal */ _lerpTransform(e, t, i) { if (!e.minMesh || !e.maxMesh || !e.valueMesh || !e.minMesh.rotationQuaternion || !e.maxMesh.rotationQuaternion || !e.valueMesh.rotationQuaternion) return; const r = i ? t * 0.5 + 0.5 : t; Ze.SlerpToRef(e.minMesh.rotationQuaternion, e.maxMesh.rotationQuaternion, r, e.valueMesh.rotationQuaternion), D.LerpToRef(e.minMesh.position, e.maxMesh.position, r, e.valueMesh.position); } /** * Update the model itself with the current frame data * @param xrFrame the frame to use for updating the model mesh */ // eslint-disable-next-line @typescript-eslint/naming-convention updateModel(e) { this._modelReady && this._updateModel(e); } _getGenericFilenameAndPath() { return { filename: "generic.babylon", path: "https://controllers.babylonjs.com/generic/" }; } _getGenericParentMesh(e) { this.rootMesh = new ke(this.profileId + " " + this.handedness, this.scene), e.forEach((t) => { t.parent || (t.isPickable = !1, t.setParent(this.rootMesh)); }), this.rootMesh.rotationQuaternion = Ze.FromEulerAngles(0, Math.PI, 0); } } class KO extends KI { constructor(e, t, i) { super(e, _2e[i], t, i), this.profileId = KO.ProfileId; } _getFilenameAndPath() { return { filename: "generic.babylon", path: "https://controllers.babylonjs.com/generic/" }; } _getModelLoadingConstraints() { return !0; } // eslint-disable-next-line @typescript-eslint/no-unused-vars _processLoadedModel(e) { } _setRootMesh(e) { this.rootMesh = new ke(this.profileId + " " + this.handedness, this.scene), e.forEach((t) => { t.isPickable = !1, t.parent || t.setParent(this.rootMesh); }), this.rootMesh.rotationQuaternion = Ze.FromEulerAngles(0, Math.PI, 0); } _updateModel() { } } KO.ProfileId = "generic-trigger"; const _2e = { left: { selectComponentId: "xr-standard-trigger", components: { // eslint-disable-next-line @typescript-eslint/naming-convention "xr-standard-trigger": { type: "trigger", gamepadIndices: { button: 0 }, rootNodeName: "xr_standard_trigger", visualResponses: {} } }, gamepadMapping: "xr-standard", rootNodeName: "generic-trigger-left", assetPath: "left.glb" }, right: { selectComponentId: "xr-standard-trigger", components: { // eslint-disable-next-line @typescript-eslint/naming-convention "xr-standard-trigger": { type: "trigger", gamepadIndices: { button: 0 }, rootNodeName: "xr_standard_trigger", visualResponses: {} } }, gamepadMapping: "xr-standard", rootNodeName: "generic-trigger-right", assetPath: "right.glb" }, none: { selectComponentId: "xr-standard-trigger", components: { // eslint-disable-next-line @typescript-eslint/naming-convention "xr-standard-trigger": { type: "trigger", gamepadIndices: { button: 0 }, rootNodeName: "xr_standard_trigger", visualResponses: {} } }, gamepadMapping: "xr-standard", rootNodeName: "generic-trigger-none", assetPath: "none.glb" } }; class sre extends KI { constructor(e, t, i, r, s) { super(e, i.layouts[t.handedness || "none"], t.gamepad, t.handedness, void 0, s), this._repositoryUrl = r, this.controllerCache = s, this._buttonMeshMapping = {}, this._touchDots = {}, this.profileId = i.profileId; } dispose() { super.dispose(), this.controllerCache || Object.keys(this._touchDots).forEach((e) => { this._touchDots[e].dispose(); }); } _getFilenameAndPath() { return { filename: this.layout.assetPath, path: `${this._repositoryUrl}/profiles/${this.profileId}/` }; } _getModelLoadingConstraints() { const e = fr.IsPluginForExtensionAvailable(".glb"); return e || Ce.Warn("glTF / glb loader was not registered, using generic controller instead"), e; } _processLoadedModel(e) { this.getComponentIds().forEach((t) => { const i = this.layout.components[t]; this._buttonMeshMapping[t] = { mainMesh: this._getChildByName(this.rootMesh, i.rootNodeName), states: {} }, Object.keys(i.visualResponses).forEach((r) => { const s = i.visualResponses[r]; if (s.valueNodeProperty === "transform") this._buttonMeshMapping[t].states[r] = { valueMesh: this._getChildByName(this.rootMesh, s.valueNodeName), minMesh: this._getChildByName(this.rootMesh, s.minNodeName), maxMesh: this._getChildByName(this.rootMesh, s.maxNodeName) }; else { const n = i.type === j_.TOUCHPAD_TYPE && i.touchPointNodeName ? i.touchPointNodeName : s.valueNodeName; if (this._buttonMeshMapping[t].states[r] = { valueMesh: this._getChildByName(this.rootMesh, n) }, i.type === j_.TOUCHPAD_TYPE && !this._touchDots[r]) { const a = Rd(r + "dot", { diameter: 15e-4, segments: 8 }, this.scene); a.material = new Dt(r + "mat", this.scene), a.material.diffuseColor = ze.Red(), a.parent = this._buttonMeshMapping[t].states[r].valueMesh || null, a.isVisible = !1, this._touchDots[r] = a; } } }); }); } _setRootMesh(e) { this.rootMesh = new ke(this.profileId + "-" + this.handedness, this.scene), this.rootMesh.isPickable = !1; let t; for (let i = 0; i < e.length; i++) { const r = e[i]; r.isPickable = !1, r.parent || (t = r); } t && t.setParent(this.rootMesh), this.scene.useRightHandedSystem || this.rootMesh.rotate(bl.Y, Math.PI, qr.WORLD); } _updateModel(e) { this.disableAnimation || this.getComponentIds().forEach((t) => { const i = this.getComponent(t); if (!i.hasChanges) return; const r = this._buttonMeshMapping[t], s = this.layout.components[t]; Object.keys(s.visualResponses).forEach((n) => { const a = s.visualResponses[n]; let l = i.value; if (a.componentProperty === "xAxis" ? l = i.axes.x : a.componentProperty === "yAxis" && (l = i.axes.y), a.valueNodeProperty === "transform") this._lerpTransform(r.states[n], l, a.componentProperty !== "button"); else { const o = r.states[n].valueMesh; o && (o.isVisible = i.touched || i.pressed), this._touchDots[n] && (this._touchDots[n].isVisible = i.touched || i.pressed); } }); }); } } const Qk = []; class Vu { /** * Clear the cache used for profile loading and reload when requested again */ static ClearProfilesCache() { this._ProfilesList = null, this._ProfileLoadingPromises = {}; } /** * Register the default fallbacks. * This function is called automatically when this file is imported. */ static DefaultFallbacks() { this.RegisterFallbacksForProfileId("google-daydream", ["generic-touchpad"]), this.RegisterFallbacksForProfileId("htc-vive-focus", ["generic-trigger-touchpad"]), this.RegisterFallbacksForProfileId("htc-vive", ["generic-trigger-squeeze-touchpad"]), this.RegisterFallbacksForProfileId("magicleap-one", ["generic-trigger-squeeze-touchpad"]), this.RegisterFallbacksForProfileId("windows-mixed-reality", ["generic-trigger-squeeze-touchpad-thumbstick"]), this.RegisterFallbacksForProfileId("microsoft-mixed-reality", ["windows-mixed-reality", "generic-trigger-squeeze-touchpad-thumbstick"]), this.RegisterFallbacksForProfileId("oculus-go", ["generic-trigger-touchpad"]), this.RegisterFallbacksForProfileId("oculus-touch-v2", ["oculus-touch", "generic-trigger-squeeze-thumbstick"]), this.RegisterFallbacksForProfileId("oculus-touch", ["generic-trigger-squeeze-thumbstick"]), this.RegisterFallbacksForProfileId("samsung-gearvr", ["windows-mixed-reality", "generic-trigger-squeeze-touchpad-thumbstick"]), this.RegisterFallbacksForProfileId("samsung-odyssey", ["generic-touchpad"]), this.RegisterFallbacksForProfileId("valve-index", ["generic-trigger-squeeze-touchpad-thumbstick"]), this.RegisterFallbacksForProfileId("generic-hand-select", ["generic-trigger"]); } /** * Find a fallback profile if the profile was not found. There are a few predefined generic profiles. * @param profileId the profile to which a fallback needs to be found * @returns an array with corresponding fallback profiles */ static FindFallbackWithProfileId(e) { const t = this._Fallbacks[e] || []; return t.unshift(e), t; } /** * When acquiring a new xrInput object (usually by the WebXRInput class), match it with the correct profile. * The order of search: * * 1) Iterate the profiles array of the xr input and try finding a corresponding motion controller * 2) (If not found) search in the gamepad id and try using it (legacy versions only) * 3) search for registered fallbacks (should be redundant, nonetheless it makes sense to check) * 4) return the generic trigger controller if none were found * * @param xrInput the xrInput to which a new controller is initialized * @param scene the scene to which the model will be added * @param forceProfile force a certain profile for this controller * @returns A promise that fulfils with the motion controller class for this profile id or the generic standard class if none was found */ static GetMotionControllerWithXRInput(e, t, i) { const r = []; if (i && r.push(i), r.push(...e.profiles || []), r.length && !r[0] && r.pop(), e.gamepad && e.gamepad.id) switch (e.gamepad.id) { case (e.gamepad.id.match(/oculus touch/gi) ? e.gamepad.id : void 0): r.push("oculus-touch-v2"); break; } const s = r.indexOf("windows-mixed-reality"); if (s !== -1 && r.splice(s, 0, "microsoft-mixed-reality"), r.length || r.push("generic-trigger"), this.UseOnlineRepository) { const n = this.PrioritizeOnlineRepository ? this._LoadProfileFromRepository : this._LoadProfilesFromAvailableControllers, a = this.PrioritizeOnlineRepository ? this._LoadProfilesFromAvailableControllers : this._LoadProfileFromRepository; return n.call(this, r, e, t).catch(() => a.call(this, r, e, t)); } else return this._LoadProfilesFromAvailableControllers(r, e, t); } /** * Register a new controller based on its profile. This function will be called by the controller classes themselves. * * If you are missing a profile, make sure it is imported in your source, otherwise it will not register. * * @param type the profile type to register * @param constructFunction the function to be called when loading this profile */ static RegisterController(e, t) { this._AvailableControllers[e] = t; } /** * Register a fallback to a specific profile. * @param profileId the profileId that will receive the fallbacks * @param fallbacks A list of fallback profiles */ static RegisterFallbacksForProfileId(e, t) { this._Fallbacks[e] ? this._Fallbacks[e].push(...t) : this._Fallbacks[e] = t; } /** * Will update the list of profiles available in the repository * @returns a promise that resolves to a map of profiles available online */ static UpdateProfilesList() { return this._ProfilesList = Ve.LoadFileAsync(this.BaseRepositoryUrl + "/profiles/profilesList.json", !1).then((e) => JSON.parse(e)), this._ProfilesList; } /** * Clear the controller's cache (usually happens at the end of a session) */ static ClearControllerCache() { Qk.forEach((e) => { e.meshes.forEach((t) => { t.dispose(!1, !0); }); }), Qk.length = 0; } static _LoadProfileFromRepository(e, t, i) { return Promise.resolve().then(() => this._ProfilesList ? this._ProfilesList : this.UpdateProfilesList()).then((r) => { for (let s = 0; s < e.length; ++s) if (e[s] && r[e[s]]) return e[s]; throw new Error(`neither controller ${e[0]} nor all fallbacks were found in the repository,`); }).then((r) => (this._ProfileLoadingPromises[r] || (this._ProfileLoadingPromises[r] = Ve.LoadFileAsync(`${this.BaseRepositoryUrl}/profiles/${r}/profile.json`, !1).then((s) => JSON.parse(s))), this._ProfileLoadingPromises[r])).then((r) => new sre(i, t, r, this.BaseRepositoryUrl, this.DisableControllerCache ? void 0 : Qk)); } static _LoadProfilesFromAvailableControllers(e, t, i) { for (let r = 0; r < e.length; ++r) { if (!e[r]) continue; const s = this.FindFallbackWithProfileId(e[r]); for (let n = 0; n < s.length; ++n) { const a = this._AvailableControllers[s[n]]; if (a) return Promise.resolve(a(t, i)); } } throw new Error("no controller requested was found in the available controllers list"); } } Vu._AvailableControllers = {}; Vu._Fallbacks = {}; Vu._ProfileLoadingPromises = {}; Vu.BaseRepositoryUrl = "https://immersive-web.github.io/webxr-input-profiles/packages/viewer/dist"; Vu.PrioritizeOnlineRepository = !0; Vu.UseOnlineRepository = !0; Vu.DisableControllerCache = !0; Vu.RegisterController(KO.ProfileId, (c, e) => new KO(e, c.gamepad, c.handedness)); Vu.DefaultFallbacks(); let m2e = 0; class nre { /** * Creates the input source object * @see https://doc.babylonjs.com/features/featuresDeepDive/webXR/webXRInputControllerSupport * @param _scene the scene which the controller should be associated to * @param inputSource the underlying input source for the controller * @param _options options for this controller creation */ constructor(e, t, i = {}) { this._scene = e, this.inputSource = t, this._options = i, this._tmpVector = new D(), this._disposed = !1, this.onDisposeObservable = new Fe(), this.onMeshLoadedObservable = new Fe(), this.onMotionControllerInitObservable = new Fe(), this._uniqueId = `controller-${m2e++}-${t.targetRayMode}-${t.handedness}`, this.pointer = new xr(`${this._uniqueId}-pointer`, e), this.pointer.rotationQuaternion = new Ze(), this.inputSource.gripSpace && (this.grip = new xr(`${this._uniqueId}-grip`, this._scene), this.grip.rotationQuaternion = new Ze()), this._tmpVector.set(0, 0, this._scene.useRightHandedSystem ? -1 : 1), this.inputSource.gamepad && this.inputSource.targetRayMode === "tracked-pointer" && Vu.GetMotionControllerWithXRInput(t, e, this._options.forceControllerProfile).then((r) => { this.motionController = r, this.onMotionControllerInitObservable.notifyObservers(r), !this._options.doNotLoadControllerMesh && !this.motionController._doNotLoadControllerMesh && this.motionController.loadModel().then((s) => { var n; s && this.motionController && this.motionController.rootMesh && (this._options.renderingGroupId && (this.motionController.rootMesh.renderingGroupId = this._options.renderingGroupId, this.motionController.rootMesh.getChildMeshes(!1).forEach((a) => a.renderingGroupId = this._options.renderingGroupId)), this.onMeshLoadedObservable.notifyObservers(this.motionController.rootMesh), this.motionController.rootMesh.parent = this.grip || this.pointer, this.motionController.disableAnimation = !!this._options.disableMotionControllerAnimation), this._disposed && ((n = this.motionController) === null || n === void 0 || n.dispose()); }); }, () => { Ve.Warn("Could not find a matching motion controller for the registered input source"); }); } /** * Get this controllers unique id */ get uniqueId() { return this._uniqueId; } /** * Disposes of the object */ dispose() { this.grip && this.grip.dispose(!0), this.motionController && this.motionController.dispose(), this.pointer.dispose(!0), this.onMotionControllerInitObservable.clear(), this.onMeshLoadedObservable.clear(), this.onDisposeObservable.notifyObservers(this), this.onDisposeObservable.clear(), this._disposed = !0; } /** * Gets a world space ray coming from the pointer or grip * @param result the resulting ray * @param gripIfAvailable use the grip mesh instead of the pointer, if available */ getWorldPointerRayToRef(e, t = !1) { const i = t && this.grip ? this.grip : this.pointer; D.TransformNormalToRef(this._tmpVector, i.getWorldMatrix(), e.direction), e.direction.normalize(), e.origin.copyFrom(i.absolutePosition), e.length = 1e3; } /** * Updates the controller pose based on the given XRFrame * @param xrFrame xr frame to update the pose with * @param referenceSpace reference space to use * @param xrCamera the xr camera, used for parenting */ updateFromXRFrame(e, t, i) { const r = e.getPose(this.inputSource.targetRaySpace, t); if (this._lastXRPose = r, r) { const s = r.transform.position; this.pointer.position.set(s.x, s.y, s.z); const n = r.transform.orientation; this.pointer.rotationQuaternion.set(n.x, n.y, n.z, n.w), this._scene.useRightHandedSystem || (this.pointer.position.z *= -1, this.pointer.rotationQuaternion.z *= -1, this.pointer.rotationQuaternion.w *= -1), this.pointer.parent = i.parent; } if (this.inputSource.gripSpace && this.grip) { const s = e.getPose(this.inputSource.gripSpace, t); if (s) { const n = s.transform.position, a = s.transform.orientation; this.grip.position.set(n.x, n.y, n.z), this.grip.rotationQuaternion.set(a.x, a.y, a.z, a.w), this._scene.useRightHandedSystem || (this.grip.position.z *= -1, this.grip.rotationQuaternion.z *= -1, this.grip.rotationQuaternion.w *= -1); } this.grip.parent = i.parent; } this.motionController && this.motionController.updateFromXRFrame(e); } } class are { /** * Initializes the WebXRInput * @param xrSessionManager the xr session manager for this session * @param xrCamera the WebXR camera for this session. Mainly used for teleportation * @param _options = initialization options for this xr input */ constructor(e, t, i = {}) { if (this.xrSessionManager = e, this.xrCamera = t, this._options = i, this.controllers = [], this.onControllerAddedObservable = new Fe(), this.onControllerRemovedObservable = new Fe(), this._onInputSourcesChange = (r) => { this._addAndRemoveControllers(r.added, r.removed); }, this._sessionEndedObserver = this.xrSessionManager.onXRSessionEnded.add(() => { this._addAndRemoveControllers([], this.controllers.map((r) => r.inputSource)); }), this._sessionInitObserver = this.xrSessionManager.onXRSessionInit.add((r) => { r.addEventListener("inputsourceschange", this._onInputSourcesChange); }), this._frameObserver = this.xrSessionManager.onXRFrameObservable.add((r) => { this.controllers.forEach((s) => { s.updateFromXRFrame(r, this.xrSessionManager.referenceSpace, this.xrCamera); }); }), this._options.customControllersRepositoryURL && (Vu.BaseRepositoryUrl = this._options.customControllersRepositoryURL), Vu.UseOnlineRepository = !this._options.disableOnlineControllerRepository, Vu.UseOnlineRepository) try { Vu.UpdateProfilesList().catch(() => { Vu.UseOnlineRepository = !1; }); } catch { Vu.UseOnlineRepository = !1; } } _addAndRemoveControllers(e, t) { const i = this.controllers.map((n) => n.inputSource); for (const n of e) if (i.indexOf(n) === -1) { const a = new nre(this.xrSessionManager.scene, n, Object.assign(Object.assign({}, this._options.controllerOptions || {}), { forceControllerProfile: this._options.forceInputProfile, doNotLoadControllerMesh: this._options.doNotLoadControllerMeshes, disableMotionControllerAnimation: this._options.disableControllerAnimation })); this.controllers.push(a), this.onControllerAddedObservable.notifyObservers(a); } const r = [], s = []; this.controllers.forEach((n) => { t.indexOf(n.inputSource) === -1 ? r.push(n) : s.push(n); }), this.controllers = r, s.forEach((n) => { this.onControllerRemovedObservable.notifyObservers(n), n.dispose(); }); } /** * Disposes of the object */ dispose() { this.controllers.forEach((e) => { e.dispose(); }), this.xrSessionManager.onXRFrameObservable.remove(this._frameObserver), this.xrSessionManager.onXRSessionInit.remove(this._sessionInitObserver), this.xrSessionManager.onXRSessionEnded.remove(this._sessionEndedObserver), this.onControllerAddedObservable.clear(), this.onControllerRemovedObservable.clear(), Vu.ClearControllerCache(); } } class i6 extends Ku { /** * constructs a new background remover module * @param _xrSessionManager the session manager for this module * @param _options read-only options to be used in this module */ constructor(e, t) { super(e), this._options = t, this._attachController = (i) => { if (this._controllers[i.uniqueId]) return; const { laserPointer: r, selectionMesh: s } = this._generateNewMeshPair(i.pointer); switch (this._controllers[i.uniqueId] = { xrController: i, laserPointer: r, selectionMesh: s, meshUnderPointer: null, pick: null, tmpRay: new gs(new D(), new D()), disabledByNearInteraction: !1, id: i6._IdCounter++ }, this._attachedController ? !this._options.enablePointerSelectionOnAllControllers && this._options.preferredHandedness && i.inputSource.handedness === this._options.preferredHandedness && (this._attachedController = i.uniqueId) : this._options.enablePointerSelectionOnAllControllers || (this._attachedController = i.uniqueId), i.inputSource.targetRayMode) { case "tracked-pointer": return this._attachTrackedPointerRayMode(i); case "gaze": return this._attachGazeMode(i); case "screen": return this._attachScreenRayMode(i); } }, this._controllers = {}, this._tmpVectorForPickCompare = new D(), this.disablePointerLighting = !0, this.disableSelectionMeshLighting = !0, this.displayLaserPointer = !0, this.displaySelectionMesh = !0, this.laserPointerPickedColor = new ze(0.9, 0.9, 0.9), this.laserPointerDefaultColor = new ze(0.7, 0.7, 0.7), this.selectionMeshDefaultColor = new ze(0.8, 0.8, 0.8), this.selectionMeshPickedColor = new ze(0.3, 0.3, 1), this._identityMatrix = Ae.Identity(), this._screenCoordinatesRef = D.Zero(), this._viewportRef = new Md(0, 0, 0, 0), this._scene = this._xrSessionManager.scene; } /** * attach this feature * Will usually be called by the features manager * * @returns true if successful. */ attach() { if (!super.attach()) return !1; if (this._options.xrInput.controllers.forEach(this._attachController), this._addNewAttachObserver(this._options.xrInput.onControllerAddedObservable, this._attachController), this._addNewAttachObserver(this._options.xrInput.onControllerRemovedObservable, (e) => { this._detachController(e.uniqueId); }), this._scene.constantlyUpdateMeshUnderPointer = !0, this._options.gazeCamera) { const e = this._options.gazeCamera, { laserPointer: t, selectionMesh: i } = this._generateNewMeshPair(e); this._controllers.camera = { webXRCamera: e, laserPointer: t, selectionMesh: i, meshUnderPointer: null, pick: null, tmpRay: new gs(new D(), new D()), disabledByNearInteraction: !1, id: i6._IdCounter++ }, this._attachGazeMode(); } return !0; } /** * detach this feature. * Will usually be called by the features manager * * @returns true if successful. */ detach() { return super.detach() ? (Object.keys(this._controllers).forEach((e) => { this._detachController(e); }), !0) : !1; } /** * Will get the mesh under a specific pointer. * `scene.meshUnderPointer` will only return one mesh - either left or right. * @param controllerId the controllerId to check * @returns The mesh under pointer or null if no mesh is under the pointer */ getMeshUnderPointer(e) { return this._controllers[e] ? this._controllers[e].meshUnderPointer : null; } /** * Get the xr controller that correlates to the pointer id in the pointer event * * @param id the pointer id to search for * @returns the controller that correlates to this id or null if not found */ getXRControllerByPointerId(e) { const t = Object.keys(this._controllers); for (let i = 0; i < t.length; ++i) if (this._controllers[t[i]].id === e) return this._controllers[t[i]].xrController || null; return null; } /** * @internal */ _getPointerSelectionDisabledByPointerId(e) { const t = Object.keys(this._controllers); for (let i = 0; i < t.length; ++i) if (this._controllers[t[i]].id === e) return this._controllers[t[i]].disabledByNearInteraction; return !0; } /** * @internal */ _setPointerSelectionDisabledByPointerId(e, t) { const i = Object.keys(this._controllers); for (let r = 0; r < i.length; ++r) if (this._controllers[i[r]].id === e) { this._controllers[i[r]].disabledByNearInteraction = t; return; } } _onXRFrame(e) { Object.keys(this._controllers).forEach((t) => { const i = this._controllers[t]; if (!this._options.enablePointerSelectionOnAllControllers && t !== this._attachedController || i.disabledByNearInteraction) { i.selectionMesh.isVisible = !1, i.laserPointer.isVisible = !1, i.pick = null; return; } i.laserPointer.isVisible = this.displayLaserPointer; let r; if (i.xrController) r = i.xrController.pointer.position, i.xrController.getWorldPointerRayToRef(i.tmpRay); else if (i.webXRCamera) r = i.webXRCamera.position, i.webXRCamera.getForwardRayToRef(i.tmpRay); else return; if (this._options.maxPointerDistance && (i.tmpRay.length = this._options.maxPointerDistance), !this._options.disableScenePointerVectorUpdate && r) { const l = this._xrSessionManager.scene, o = this._options.xrInput.xrCamera; o && (o.viewport.toGlobalToRef(l.getEngine().getRenderWidth(), l.getEngine().getRenderHeight(), this._viewportRef), D.ProjectToRef(r, this._identityMatrix, l.getTransformMatrix(), this._viewportRef, this._screenCoordinatesRef), typeof this._screenCoordinatesRef.x == "number" && typeof this._screenCoordinatesRef.y == "number" && !isNaN(this._screenCoordinatesRef.x) && !isNaN(this._screenCoordinatesRef.y) && (l.pointerX = this._screenCoordinatesRef.x, l.pointerY = this._screenCoordinatesRef.y, i.screenCoordinates = { x: this._screenCoordinatesRef.x, y: this._screenCoordinatesRef.y })); } let s = null; this._utilityLayerScene && (s = this._utilityLayerScene.pickWithRay(i.tmpRay, this._utilityLayerScene.pointerMovePredicate || this.raySelectionPredicate)); const n = this._scene.pickWithRay(i.tmpRay, this._scene.pointerMovePredicate || this.raySelectionPredicate); !s || !s.hit ? i.pick = n : !n || !n.hit || s.distance < n.distance ? i.pick = s : i.pick = n, i.pick && i.xrController && (i.pick.aimTransform = i.xrController.pointer, i.pick.gripTransform = i.xrController.grip || null); const a = i.pick; if (a && a.pickedPoint && a.hit) { this._updatePointerDistance(i.laserPointer, a.distance), i.selectionMesh.position.copyFrom(a.pickedPoint), i.selectionMesh.scaling.x = Math.sqrt(a.distance), i.selectionMesh.scaling.y = Math.sqrt(a.distance), i.selectionMesh.scaling.z = Math.sqrt(a.distance); const l = this._convertNormalToDirectionOfRay(a.getNormal(!0), i.tmpRay), o = 1e-3; if (i.selectionMesh.position.copyFrom(a.pickedPoint), l) { const u = D.Cross(bl.Y, l), h = D.Cross(l, u); D.RotationFromAxisToRef(h, l, u, i.selectionMesh.rotation), i.selectionMesh.position.addInPlace(l.scale(o)); } i.selectionMesh.isVisible = this.displaySelectionMesh, i.meshUnderPointer = a.pickedMesh; } else i.selectionMesh.isVisible = !1, this._updatePointerDistance(i.laserPointer, 1), i.meshUnderPointer = null; }); } get _utilityLayerScene() { return this._options.customUtilityLayerScene || bn.DefaultUtilityLayer.utilityLayerScene; } _attachGazeMode(e) { const t = this._controllers[e && e.uniqueId || "camera"], i = this._options.timeToSelect || 3e3, r = this._options.useUtilityLayer ? this._utilityLayerScene : this._scene; let s = new ku(); const n = o6("selection", { diameter: 35e-4 * 15, thickness: 25e-4 * 6, tessellation: 20 }, r); n.isVisible = !1, n.isPickable = !1, n.parent = t.selectionMesh; let a = 0, l = !1; const o = { pointerId: t.id, pointerType: "xr" }; t.onFrameObserver = this._xrSessionManager.onXRFrameObservable.add(() => { if (t.pick) { if (this._augmentPointerInit(o, t.id, t.screenCoordinates), t.laserPointer.material.alpha = 0, n.isVisible = !1, t.pick.hit) if (this._pickingMoved(s, t.pick)) l && (this._options.disablePointerUpOnTouchOut || this._scene.simulatePointerUp(t.pick, o)), l = !1, a = 0; else if (a > i / 10 && (n.isVisible = !0), a += this._scene.getEngine().getDeltaTime(), a >= i) this._scene.simulatePointerDown(t.pick, o), l = !0, this._options.disablePointerUpOnTouchOut && this._scene.simulatePointerUp(t.pick, o), n.isVisible = !1; else { const u = 1 - a / i; n.scaling.set(u, u, u); } else l = !1, a = 0; this._scene.simulatePointerMove(t.pick, o), s = t.pick; } }), this._options.renderingGroupId !== void 0 && (n.renderingGroupId = this._options.renderingGroupId), e && e.onDisposeObservable.addOnce(() => { t.pick && !this._options.disablePointerUpOnTouchOut && l && (this._scene.simulatePointerUp(t.pick, o), t.finalPointerUpTriggered = !0), n.dispose(); }); } _attachScreenRayMode(e) { const t = this._controllers[e.uniqueId]; let i = !1; const r = { pointerId: t.id, pointerType: "xr" }; t.onFrameObserver = this._xrSessionManager.onXRFrameObservable.add(() => { this._augmentPointerInit(r, t.id, t.screenCoordinates), !(!t.pick || this._options.disablePointerUpOnTouchOut && i) && (i ? this._scene.simulatePointerMove(t.pick, r) : (this._scene.simulatePointerDown(t.pick, r), t.pointerDownTriggered = !0, i = !0, this._options.disablePointerUpOnTouchOut && this._scene.simulatePointerUp(t.pick, r))); }), e.onDisposeObservable.addOnce(() => { this._augmentPointerInit(r, t.id, t.screenCoordinates), this._xrSessionManager.runInXRFrame(() => { t.pick && !t.finalPointerUpTriggered && i && !this._options.disablePointerUpOnTouchOut && (this._scene.simulatePointerUp(t.pick, r), t.finalPointerUpTriggered = !0); }); }); } _attachTrackedPointerRayMode(e) { const t = this._controllers[e.uniqueId]; if (this._options.forceGazeMode) return this._attachGazeMode(e); const i = { pointerId: t.id, pointerType: "xr" }; if (t.onFrameObserver = this._xrSessionManager.onXRFrameObservable.add(() => { t.laserPointer.material.disableLighting = this.disablePointerLighting, t.selectionMesh.material.disableLighting = this.disableSelectionMeshLighting, t.pick && (this._augmentPointerInit(i, t.id, t.screenCoordinates), this._scene.simulatePointerMove(t.pick, i)); }), e.inputSource.gamepad) { const r = (s) => { this._options.overrideButtonId && (t.selectionComponent = s.getComponent(this._options.overrideButtonId)), t.selectionComponent || (t.selectionComponent = s.getMainComponent()), t.onButtonChangedObserver = t.selectionComponent.onButtonStateChangedObservable.add((n) => { if (n.changes.pressed) { const a = n.changes.pressed.current; t.pick ? (this._options.enablePointerSelectionOnAllControllers || e.uniqueId === this._attachedController) && (this._augmentPointerInit(i, t.id, t.screenCoordinates), a ? (this._scene.simulatePointerDown(t.pick, i), t.pointerDownTriggered = !0, t.selectionMesh.material.emissiveColor = this.selectionMeshPickedColor, t.laserPointer.material.emissiveColor = this.laserPointerPickedColor) : (this._scene.simulatePointerUp(t.pick, i), t.selectionMesh.material.emissiveColor = this.selectionMeshDefaultColor, t.laserPointer.material.emissiveColor = this.laserPointerDefaultColor)) : a && !this._options.enablePointerSelectionOnAllControllers && !this._options.disableSwitchOnClick && (this._attachedController = e.uniqueId); } }); }; e.motionController ? r(e.motionController) : e.onMotionControllerInitObservable.add(r); } else { const r = (n) => { this._augmentPointerInit(i, t.id, t.screenCoordinates), t.xrController && n.inputSource === t.xrController.inputSource && t.pick && (this._scene.simulatePointerDown(t.pick, i), t.pointerDownTriggered = !0, t.selectionMesh.material.emissiveColor = this.selectionMeshPickedColor, t.laserPointer.material.emissiveColor = this.laserPointerPickedColor); }, s = (n) => { this._augmentPointerInit(i, t.id, t.screenCoordinates), t.xrController && n.inputSource === t.xrController.inputSource && t.pick && (this._scene.simulatePointerUp(t.pick, i), t.selectionMesh.material.emissiveColor = this.selectionMeshDefaultColor, t.laserPointer.material.emissiveColor = this.laserPointerDefaultColor); }; t.eventListeners = { selectend: s, selectstart: r }, this._xrSessionManager.session.addEventListener("selectstart", r), this._xrSessionManager.session.addEventListener("selectend", s); } } _convertNormalToDirectionOfRay(e, t) { return e && Math.acos(D.Dot(e, t.direction)) < Math.PI / 2 && e.scaleInPlace(-1), e; } _detachController(e) { const t = this._controllers[e]; if (t) { if (t.selectionComponent && t.onButtonChangedObserver && t.selectionComponent.onButtonStateChangedObservable.remove(t.onButtonChangedObserver), t.onFrameObserver && this._xrSessionManager.onXRFrameObservable.remove(t.onFrameObserver), t.eventListeners && Object.keys(t.eventListeners).forEach((i) => { const r = t.eventListeners && t.eventListeners[i]; r && this._xrSessionManager.session.removeEventListener(i, r); }), !t.finalPointerUpTriggered && t.pointerDownTriggered) { const i = { pointerId: t.id, pointerType: "xr" }; this._xrSessionManager.runInXRFrame(() => { this._augmentPointerInit(i, t.id, t.screenCoordinates), this._scene.simulatePointerUp(t.pick || new ku(), i), t.finalPointerUpTriggered = !0; }); } this._xrSessionManager.scene.onBeforeRenderObservable.addOnce(() => { try { if (t.selectionMesh.dispose(), t.laserPointer.dispose(), delete this._controllers[e], this._attachedController === e) { const i = Object.keys(this._controllers); i.length ? this._attachedController = i[0] : this._attachedController = ""; } } catch { Ve.Warn("controller already detached."); } }); } } _generateNewMeshPair(e) { const t = this._options.useUtilityLayer ? this._options.customUtilityLayerScene || bn.DefaultUtilityLayer.utilityLayerScene : this._scene, i = this._options.customLasterPointerMeshGenerator ? this._options.customLasterPointerMeshGenerator() : Hf("laserPointer", { height: 1, diameterTop: 2e-4, diameterBottom: 4e-3, tessellation: 20, subdivisions: 1 }, t); i.parent = e; const r = new Dt("laserPointerMat", t); r.emissiveColor = this.laserPointerDefaultColor, r.alpha = 0.7, i.material = r, i.rotation.x = Math.PI / 2, this._updatePointerDistance(i, 1), i.isPickable = !1, i.isVisible = !1; const s = this._options.customSelectionMeshGenerator ? this._options.customSelectionMeshGenerator() : o6("gazeTracker", { diameter: 35e-4 * 3, thickness: 25e-4 * 3, tessellation: 20 }, t); s.bakeCurrentTransformIntoVertices(), s.isPickable = !1, s.isVisible = !1; const n = new Dt("targetMat", t); return n.specularColor = ze.Black(), n.emissiveColor = this.selectionMeshDefaultColor, n.backFaceCulling = !1, s.material = n, this._options.renderingGroupId !== void 0 && (i.renderingGroupId = this._options.renderingGroupId, s.renderingGroupId = this._options.renderingGroupId), { laserPointer: i, selectionMesh: s }; } _pickingMoved(e, t) { var i; if (!e.hit || !t.hit || !e.pickedMesh || !e.pickedPoint || !t.pickedMesh || !t.pickedPoint || e.pickedMesh !== t.pickedMesh) return !0; (i = e.pickedPoint) === null || i === void 0 || i.subtractToRef(t.pickedPoint, this._tmpVectorForPickCompare), this._tmpVectorForPickCompare.set(Math.abs(this._tmpVectorForPickCompare.x), Math.abs(this._tmpVectorForPickCompare.y), Math.abs(this._tmpVectorForPickCompare.z)); const r = (this._options.gazeModePointerMovedFactor || 1) * 0.01 * t.distance; return this._tmpVectorForPickCompare.length() > r; } _updatePointerDistance(e, t = 100) { e.scaling.y = t, this._scene.useRightHandedSystem && (t *= -1), e.position.z = t / 2 + 0.05; } _augmentPointerInit(e, t, i) { e.pointerId = t, e.pointerType = "xr", i && (e.screenX = i.x, e.screenY = i.y); } /** @internal */ get lasterPointerDefaultColor() { return this.laserPointerDefaultColor; } } i6._IdCounter = 200; i6.Name = Qs.POINTER_SELECTION; i6.Version = 1; Oo.AddWebXRFeature(i6.Name, (c, e) => () => new i6(c, e), i6.Version, !0); ed.prototype._projectOnTrianglesToRef = function(c, e, t, i, r, s) { const n = de.Vector3[0], a = de.Vector3[1]; let l = 1 / 0; for (let o = this.indexStart; o < this.indexStart + this.indexCount - (3 - i); o += i) { const u = t[o], h = t[o + 1], d = t[o + 2]; if (r && d === 4294967295) { o += 2; continue; } const f = e[u], p = e[h], m = e[d]; if (!f || !p || !m) continue; const _ = D.ProjectOnTriangleToRef(c, f, p, m, a); _ < l && (n.copyFrom(a), l = _); } return s.copyFrom(n), l; }; ed.prototype._projectOnUnIndexedTrianglesToRef = function(c, e, t, i) { const r = de.Vector3[0], s = de.Vector3[1]; let n = 1 / 0; for (let a = this.verticesStart; a < this.verticesStart + this.verticesCount; a += 3) { const l = e[a], o = e[a + 1], u = e[a + 2], h = D.ProjectOnTriangleToRef(c, l, o, u, s); h < n && (r.copyFrom(s), n = h); } return i.copyFrom(r), n; }; ed.prototype.projectToRef = function(c, e, t, i) { const r = this.getMaterial(); if (!r) return -1; let s = 3, n = !1; switch (r.fillMode) { case 3: case 5: case 6: case 8: return -1; case 7: s = 1, n = !0; break; } return r.fillMode === 4 ? -1 : !t.length && this._mesh._unIndexed ? this._projectOnUnIndexedTrianglesToRef(c, e, t, i) : this._projectOnTrianglesToRef(c, e, t, s, n, i); }; var cm; (function(c) { c[c.DEHYDRATED = 0] = "DEHYDRATED", c[c.HOVER = 1] = "HOVER", c[c.TOUCH = 2] = "TOUCH"; })(cm || (cm = {})); var Y8; (function(c) { c[c.DISABLED = 0] = "DISABLED", c[c.CENTERED_ON_CONTROLLER = 1] = "CENTERED_ON_CONTROLLER", c[c.CENTERED_IN_FRONT = 2] = "CENTERED_IN_FRONT"; })(Y8 || (Y8 = {})); class r6 extends Ku { /** * constructs a new background remover module * @param _xrSessionManager the session manager for this module * @param _options read-only options to be used in this module */ constructor(e, t) { super(e), this._options = t, this._tmpRay = new gs(new D(), new D()), this._attachController = (i) => { if (this._controllers[i.uniqueId]) return; const { touchCollisionMesh: r, touchCollisionMeshFunction: s, hydrateCollisionMeshFunction: n } = this._generateNewTouchPointMesh(), a = this._generateVisualCue(); switch (this._controllers[i.uniqueId] = { xrController: i, meshUnderPointer: null, nearInteractionTargetMesh: null, pick: null, stalePick: null, touchCollisionMesh: r, touchCollisionMeshFunction: s, hydrateCollisionMeshFunction: n, currentAnimationState: cm.DEHYDRATED, grabRay: new gs(new D(), new D()), hoverInteraction: !1, nearInteraction: !1, grabInteraction: !1, id: r6._IdCounter++, pickedPointVisualCue: a }, this._attachedController ? !this._options.enableNearInteractionOnAllControllers && this._options.preferredHandedness && i.inputSource.handedness === this._options.preferredHandedness && (this._attachedController = i.uniqueId) : this._options.enableNearInteractionOnAllControllers || (this._attachedController = i.uniqueId), i.inputSource.targetRayMode) { case "tracked-pointer": return this._attachNearInteractionMode(i); case "gaze": return null; case "screen": return null; } }, this._controllers = {}, this._farInteractionFeature = null, this.selectionMeshDefaultColor = new ze(0.8, 0.8, 0.8), this.selectionMeshPickedColor = new ze(0.3, 0.3, 1), this._hoverRadius = 0.1, this._pickRadius = 0.02, this._controllerPickRadius = 0.03, this._nearGrabLengthScale = 5, this._scene = this._xrSessionManager.scene, this._options.nearInteractionControllerMode === void 0 && (this._options.nearInteractionControllerMode = Y8.CENTERED_IN_FRONT), this._options.farInteractionFeature && (this._farInteractionFeature = this._options.farInteractionFeature); } /** * Attach this feature * Will usually be called by the features manager * * @returns true if successful. */ attach() { return super.attach() ? (this._options.xrInput.controllers.forEach(this._attachController), this._addNewAttachObserver(this._options.xrInput.onControllerAddedObservable, this._attachController), this._addNewAttachObserver(this._options.xrInput.onControllerRemovedObservable, (e) => { this._detachController(e.uniqueId); }), this._scene.constantlyUpdateMeshUnderPointer = !0, !0) : !1; } /** * Detach this feature. * Will usually be called by the features manager * * @returns true if successful. */ detach() { return super.detach() ? (Object.keys(this._controllers).forEach((e) => { this._detachController(e); }), !0) : !1; } /** * Will get the mesh under a specific pointer. * `scene.meshUnderPointer` will only return one mesh - either left or right. * @param controllerId the controllerId to check * @returns The mesh under pointer or null if no mesh is under the pointer */ getMeshUnderPointer(e) { return this._controllers[e] ? this._controllers[e].meshUnderPointer : null; } /** * Get the xr controller that correlates to the pointer id in the pointer event * * @param id the pointer id to search for * @returns the controller that correlates to this id or null if not found */ getXRControllerByPointerId(e) { const t = Object.keys(this._controllers); for (let i = 0; i < t.length; ++i) if (this._controllers[t[i]].id === e) return this._controllers[t[i]].xrController || null; return null; } /** * This function sets webXRControllerPointerSelection feature that will be disabled when * the hover range is reached for a mesh and will be reattached when not in hover range. * This is used to remove the selection rays when moving. * @param farInteractionFeature the feature to disable when finger is in hover range for a mesh */ setFarInteractionFeature(e) { this._farInteractionFeature = e; } /** * Filter used for near interaction pick and hover * @param mesh */ _nearPickPredicate(e) { return e.isEnabled() && e.isVisible && e.isPickable && e.isNearPickable; } /** * Filter used for near interaction grab * @param mesh */ _nearGrabPredicate(e) { return e.isEnabled() && e.isVisible && e.isPickable && e.isNearGrabbable; } /** * Filter used for any near interaction * @param mesh */ _nearInteractionPredicate(e) { return e.isEnabled() && e.isVisible && e.isPickable && (e.isNearPickable || e.isNearGrabbable); } _controllerAvailablePredicate(e, t) { let i = e; for (; i; ) { if (i.reservedDataStore && i.reservedDataStore.nearInteraction && i.reservedDataStore.nearInteraction.excludedControllerId === t) return !1; i = i.parent; } return !0; } _handleTransitionAnimation(e, t) { var i; if (!(e.currentAnimationState === t || this._options.nearInteractionControllerMode !== Y8.CENTERED_IN_FRONT || !((i = e.xrController) === null || i === void 0) && i.inputSource.hand)) { if (t > e.currentAnimationState) switch (e.currentAnimationState) { case cm.DEHYDRATED: if (e.hydrateCollisionMeshFunction(!0), t === cm.HOVER) break; case cm.HOVER: if (e.touchCollisionMeshFunction(!0), t === cm.TOUCH) break; } else switch (e.currentAnimationState) { case cm.TOUCH: if (e.touchCollisionMeshFunction(!1), t === cm.HOVER) break; case cm.HOVER: if (e.hydrateCollisionMeshFunction(!1), t === cm.DEHYDRATED) break; } e.currentAnimationState = t; } } _processTouchPoint(e, t, i) { var r; const s = this._controllers[e]; s.grabRay.origin.copyFrom(t), i.toEulerAnglesToRef(de.Vector3[0]), s.grabRay.direction.copyFrom(de.Vector3[0]), this._options.nearInteractionControllerMode === Y8.CENTERED_IN_FRONT && !(!((r = s.xrController) === null || r === void 0) && r.inputSource.hand) && (s.xrController.getWorldPointerRayToRef(this._tmpRay), s.grabRay.origin.addInPlace(this._tmpRay.direction.scale(0.05))), s.grabRay.length = this._nearGrabLengthScale * this._hoverRadius, s.touchCollisionMesh.position.copyFrom(s.grabRay.origin); } _onXRFrame(e) { Object.keys(this._controllers).forEach((t) => { var i; const r = this._controllers[t], s = (i = r.xrController) === null || i === void 0 ? void 0 : i.inputSource.hand; if (!this._options.enableNearInteractionOnAllControllers && t !== this._attachedController || !r.xrController || !s && (!this._options.nearInteractionControllerMode || !r.xrController.inputSource.gamepad)) { r.pick = null; return; } if (r.hoverInteraction = !1, r.nearInteraction = !1, r.xrController) { if (s) { const o = s.get("index-finger-tip"); if (o) { const u = e.getJointPose(o, this._xrSessionManager.referenceSpace); if (u && u.transform) { const h = this._scene.useRightHandedSystem ? 1 : -1; de.Vector3[0].set(u.transform.position.x, u.transform.position.y, u.transform.position.z * h), de.Quaternion[0].set(u.transform.orientation.x, u.transform.orientation.y, u.transform.orientation.z * h, u.transform.orientation.w * h), this._processTouchPoint(t, de.Vector3[0], de.Quaternion[0]); } } } else if (r.xrController.inputSource.gamepad && this._options.nearInteractionControllerMode !== Y8.DISABLED) { let o = r.xrController.pointer; r.xrController.grip && this._options.nearInteractionControllerMode === Y8.CENTERED_ON_CONTROLLER && (o = r.xrController.grip), this._processTouchPoint(t, o.position, o.rotationQuaternion); } } else return; const n = (o, u) => { let h = null; return !u || !u.hit ? h = o : !o || !o.hit || u.distance < o.distance ? h = u : h = o, h; }, a = (o) => { let u = new ku(), h = !1; const d = o && o.pickedPoint && o.hit; return o != null && o.pickedPoint && (h = o.pickedPoint.x === 0 && o.pickedPoint.y === 0 && o.pickedPoint.z === 0), d && !h && (u = o), u; }; if (!r.grabInteraction) { let o = null, u = null; this._options.useUtilityLayer && this._utilityLayerScene && (u = this._pickWithSphere(r, this._hoverRadius, this._utilityLayerScene, (f) => this._nearInteractionPredicate(f))); const h = this._pickWithSphere(r, this._hoverRadius, this._scene, (f) => this._nearInteractionPredicate(f)), d = n(h, u); if (d && d.hit && (o = a(d), o.hit && (r.hoverInteraction = !0)), r.hoverInteraction) { let f = null; const p = s ? this._pickRadius : this._controllerPickRadius; this._options.useUtilityLayer && this._utilityLayerScene && (f = this._pickWithSphere(r, p, this._utilityLayerScene, (C) => this._nearPickPredicate(C))); const m = this._pickWithSphere(r, p, this._scene, (C) => this._nearPickPredicate(C)), _ = n(m, f), v = a(_); v.hit && (o = v, r.nearInteraction = !0); } r.stalePick = r.pick, r.pick = o, r.pick && r.pick.pickedPoint && r.pick.hit ? (r.meshUnderPointer = r.pick.pickedMesh, r.pickedPointVisualCue.position.copyFrom(r.pick.pickedPoint), r.pickedPointVisualCue.isVisible = !0, this._farInteractionFeature && this._farInteractionFeature.attached && this._farInteractionFeature._setPointerSelectionDisabledByPointerId(r.id, !0)) : (r.meshUnderPointer = null, r.pickedPointVisualCue.isVisible = !1, this._farInteractionFeature && this._farInteractionFeature.attached && this._farInteractionFeature._setPointerSelectionDisabledByPointerId(r.id, !1)); } let l = cm.DEHYDRATED; r.grabInteraction || r.nearInteraction ? l = cm.TOUCH : r.hoverInteraction && (l = cm.HOVER), this._handleTransitionAnimation(r, l); }); } get _utilityLayerScene() { return this._options.customUtilityLayerScene || bn.DefaultUtilityLayer.utilityLayerScene; } _generateVisualCue() { const e = this._options.useUtilityLayer ? this._options.customUtilityLayerScene || bn.DefaultUtilityLayer.utilityLayerScene : this._scene, t = Rd("nearInteraction", { diameter: 35e-4 * 3 }, e); t.bakeCurrentTransformIntoVertices(), t.isPickable = !1, t.isVisible = !1, t.rotationQuaternion = Ze.Identity(); const i = new Dt("targetMat", e); return i.specularColor = ze.Black(), i.emissiveColor = this.selectionMeshDefaultColor, i.backFaceCulling = !1, t.material = i, t; } _isControllerReadyForNearInteraction(e) { return this._farInteractionFeature ? this._farInteractionFeature._getPointerSelectionDisabledByPointerId(e) : !0; } _attachNearInteractionMode(e) { const t = this._controllers[e.uniqueId], i = { pointerId: t.id, pointerType: "xr-near" }; t.onFrameObserver = this._xrSessionManager.onXRFrameObservable.add(() => { !this._options.enableNearInteractionOnAllControllers && e.uniqueId !== this._attachedController || !t.xrController || !t.xrController.inputSource.hand && (!this._options.nearInteractionControllerMode || !t.xrController.inputSource.gamepad) || (t.pick && (t.pick.ray = t.grabRay), t.pick && this._isControllerReadyForNearInteraction(t.id) && this._scene.simulatePointerMove(t.pick, i), t.nearInteraction && t.pick && t.pick.hit ? t.nearInteractionTargetMesh || (this._scene.simulatePointerDown(t.pick, i), t.nearInteractionTargetMesh = t.meshUnderPointer) : t.nearInteractionTargetMesh && t.stalePick && (this._scene.simulatePointerUp(t.stalePick, i), t.nearInteractionTargetMesh = null)); }); const r = (s) => { this._options.enableNearInteractionOnAllControllers || e.uniqueId === this._attachedController && this._isControllerReadyForNearInteraction(t.id) ? (t.pick && (t.pick.ray = t.grabRay), s && t.pick && t.meshUnderPointer && this._nearGrabPredicate(t.meshUnderPointer) ? (t.grabInteraction = !0, t.pickedPointVisualCue.isVisible = !1, this._scene.simulatePointerDown(t.pick, i)) : !s && t.pick && t.grabInteraction && (this._scene.simulatePointerUp(t.pick, i), t.grabInteraction = !1, t.pickedPointVisualCue.isVisible = !0)) : s && !this._options.enableNearInteractionOnAllControllers && !this._options.disableSwitchOnClick && (this._attachedController = e.uniqueId); }; if (e.inputSource.gamepad) { const s = (n) => { t.squeezeComponent = n.getComponent("grasp"), t.squeezeComponent ? t.onSqueezeButtonChangedObserver = t.squeezeComponent.onButtonStateChangedObservable.add((a) => { if (a.changes.pressed) { const l = a.changes.pressed.current; r(l); } }) : (t.selectionComponent = n.getMainComponent(), t.onButtonChangedObserver = t.selectionComponent.onButtonStateChangedObservable.add((a) => { if (a.changes.pressed) { const l = a.changes.pressed.current; r(l); } })); }; e.motionController ? s(e.motionController) : e.onMotionControllerInitObservable.add(s); } else { const s = (a) => { t.xrController && a.inputSource === t.xrController.inputSource && t.pick && this._isControllerReadyForNearInteraction(t.id) && t.meshUnderPointer && this._nearGrabPredicate(t.meshUnderPointer) && (t.grabInteraction = !0, t.pickedPointVisualCue.isVisible = !1, this._scene.simulatePointerDown(t.pick, i)); }, n = (a) => { t.xrController && a.inputSource === t.xrController.inputSource && t.pick && this._isControllerReadyForNearInteraction(t.id) && (this._scene.simulatePointerUp(t.pick, i), t.grabInteraction = !1, t.pickedPointVisualCue.isVisible = !0); }; t.eventListeners = { selectend: n, selectstart: s }, this._xrSessionManager.session.addEventListener("selectstart", s), this._xrSessionManager.session.addEventListener("selectend", n); } } _detachController(e) { const t = this._controllers[e]; if (t && (t.squeezeComponent && t.onSqueezeButtonChangedObserver && t.squeezeComponent.onButtonStateChangedObservable.remove(t.onSqueezeButtonChangedObserver), t.selectionComponent && t.onButtonChangedObserver && t.selectionComponent.onButtonStateChangedObservable.remove(t.onButtonChangedObserver), t.onFrameObserver && this._xrSessionManager.onXRFrameObservable.remove(t.onFrameObserver), t.eventListeners && Object.keys(t.eventListeners).forEach((i) => { const r = t.eventListeners && t.eventListeners[i]; r && this._xrSessionManager.session.removeEventListener(i, r); }), t.touchCollisionMesh.dispose(), t.pickedPointVisualCue.dispose(), this._xrSessionManager.runInXRFrame(() => { const i = { pointerId: t.id, pointerType: "xr-near" }; this._scene.simulatePointerUp(new ku(), i); }), delete this._controllers[e], this._attachedController === e)) { const i = Object.keys(this._controllers); i.length ? this._attachedController = i[0] : this._attachedController = ""; } } _generateNewTouchPointMesh() { const e = this._options.useUtilityLayer ? this._options.customUtilityLayerScene || bn.DefaultUtilityLayer.utilityLayerScene : this._scene, t = Rd("PickSphere", { diameter: 1 }, e); t.isVisible = !1, this._options.motionControllerOrbMaterial ? t.material = this._options.motionControllerOrbMaterial : Ta.ParseFromSnippetAsync("8RUNKL#3", e).then((R) => { t.material = R; }); const i = new dK(); i.setEasingMode(hl.EASINGMODE_EASEINOUT); const r = new D(this._controllerPickRadius, this._controllerPickRadius, this._controllerPickRadius), s = this._controllerPickRadius * (4 / 3), n = new D(s, s, s), a = this._controllerPickRadius * (7 / 6), l = new D(a, a, a), o = this._controllerPickRadius * (4 / 5), u = new D(o, o, o), h = this._controllerPickRadius * (3 / 2), d = new D(h, h, h), f = [ { frame: 0, value: r }, { frame: 10, value: d }, { frame: 18, value: n } ], p = [ { frame: 0, value: n }, { frame: 10, value: u }, { frame: 18, value: r } ], m = [ { frame: 0, value: D.ZeroReadOnly }, { frame: 12, value: l }, { frame: 15, value: r } ], _ = [ { frame: 0, value: r }, { frame: 10, value: D.ZeroReadOnly }, { frame: 15, value: D.ZeroReadOnly } ], v = new nt("touch", "scaling", 60, nt.ANIMATIONTYPE_VECTOR3, nt.ANIMATIONLOOPMODE_CONSTANT), C = new nt("release", "scaling", 60, nt.ANIMATIONTYPE_VECTOR3, nt.ANIMATIONLOOPMODE_CONSTANT), x = new nt("hydrate", "scaling", 60, nt.ANIMATIONTYPE_VECTOR3, nt.ANIMATIONLOOPMODE_CONSTANT), b = new nt("dehydrate", "scaling", 60, nt.ANIMATIONTYPE_VECTOR3, nt.ANIMATIONLOOPMODE_CONSTANT); return v.setEasingFunction(i), C.setEasingFunction(i), x.setEasingFunction(i), b.setEasingFunction(i), v.setKeys(f), C.setKeys(p), x.setKeys(m), b.setKeys(_), { touchCollisionMesh: t, touchCollisionMeshFunction: (R) => { const w = R ? v : C; e.beginDirectAnimation(t, [w], 0, 18, !1, 1); }, hydrateCollisionMeshFunction: (R) => { const w = R ? x : b; R && (t.isVisible = !0), e.beginDirectAnimation(t, [w], 0, 15, !1, 1, () => { R || (t.isVisible = !1); }); } }; } _pickWithSphere(e, t, i, r) { const s = new ku(); if (s.distance = 1 / 0, e.touchCollisionMesh && e.xrController) { const n = e.touchCollisionMesh.position, a = e6.CreateFromCenterAndRadius(n, t); for (let l = 0; l < i.meshes.length; l++) { const o = i.meshes[l]; if (!r(o) || !this._controllerAvailablePredicate(o, e.xrController.uniqueId)) continue; const u = r6.PickMeshWithSphere(o, a); u && u.hit && u.distance < s.distance && (s.hit = u.hit, s.pickedMesh = o, s.pickedPoint = u.pickedPoint, s.aimTransform = e.xrController.pointer, s.gripTransform = e.xrController.grip || null, s.originMesh = e.touchCollisionMesh, s.distance = u.distance); } } return s; } /** * Picks a mesh with a sphere * @param mesh the mesh to pick * @param sphere picking sphere in world coordinates * @param skipBoundingInfo a boolean indicating if we should skip the bounding info check * @returns the picking info */ static PickMeshWithSphere(e, t, i = !1) { const r = e.subMeshes, s = new ku(), n = e.getBoundingInfo(); if (!e._generatePointsArray() || !e.subMeshes || !n || !i && !e6.Intersects(n.boundingSphere, t)) return s; const a = de.Vector3[0], l = de.Vector3[1]; let o = 1 / 0, u, h, d; const f = de.Vector3[2], p = de.Matrix[0]; p.copyFrom(e.getWorldMatrix()), p.invert(), D.TransformCoordinatesToRef(t.center, p, f); for (let m = 0; m < r.length; m++) r[m].projectToRef(f, e._positions, e.getIndices(), l), D.TransformCoordinatesToRef(l, e.getWorldMatrix(), l), u = D.Distance(l, t.center), d = D.Distance(l, e.getAbsolutePosition()), h = D.Distance(t.center, e.getAbsolutePosition()), h !== -1 && d !== -1 && d > h && (u = 0, l.copyFrom(t.center)), u !== -1 && u < o && (o = u, a.copyFrom(l)); return o < t.radius && (s.hit = !0, s.distance = o, s.pickedMesh = e, s.pickedPoint = a.clone()), s; } } r6._IdCounter = 200; r6.Name = Qs.NEAR_INTERACTION; r6.Version = 1; Oo.AddWebXRFeature(r6.Name, (c, e) => () => new r6(c, e), r6.Version, !0); class ore { /** * Creates a WebXREnterExitUIButton * @param element button element * @param sessionMode XR initialization session mode * @param referenceSpaceType the type of reference space to be used */ constructor(e, t, i) { this.element = e, this.sessionMode = t, this.referenceSpaceType = i; } /** * Extendable function which can be used to update the button's visuals when the state changes * @param activeButton the current active button in the UI */ // eslint-disable-next-line @typescript-eslint/no-unused-vars update(e) { } } class g2e { } class MU { /** * Construct a new EnterExit UI class * * @param _scene babylon scene object to use * @param options (read-only) version of the options passed to this UI */ constructor(e, t) { if (this._scene = e, this.options = t, this._activeButton = null, this._buttons = [], this.activeButtonChangedObservable = new Fe(), this._onSessionGranted = (r) => { this._helper && this._enterXRWithButtonIndex(0); }, this.overlay = document.createElement("div"), this.overlay.classList.add("xr-button-overlay"), !t.ignoreSessionGrantedEvent && navigator.xr && navigator.xr.addEventListener("sessiongranted", this._onSessionGranted), typeof window < "u" && window.location && window.location.protocol === "http:" && window.location.hostname !== "localhost") throw Ve.Warn("WebXR can only be served over HTTPS"), new Error("WebXR can only be served over HTTPS"); if (t.customButtons) this._buttons = t.customButtons; else { this.overlay.style.cssText = "z-index:11;position: absolute; right: 20px;bottom: 50px;"; const r = t.sessionMode || "immersive-vr", s = t.referenceSpaceType || "local-floor"; let a = ".babylonVRicon { color: #868686; border-color: #868686; border-style: solid; margin-left: 10px; height: 50px; width: 80px; background-color: rgba(51,51,51,0.7); background-image: url(" + (typeof SVGSVGElement > "u" ? "https://cdn.babylonjs.com/Assets/vrButton.png" : "data:image/svg+xml;charset=UTF-8,%3Csvg%20xmlns%3D%22http%3A//www.w3.org/2000/svg%22%20width%3D%222048%22%20height%3D%221152%22%20viewBox%3D%220%200%202048%201152%22%20version%3D%221.1%22%3E%3Cpath%20transform%3D%22rotate%28180%201024%2C576.0000000000001%29%22%20d%3D%22m1109%2C896q17%2C0%2030%2C-12t13%2C-30t-12.5%2C-30.5t-30.5%2C-12.5l-170%2C0q-18%2C0%20-30.5%2C12.5t-12.5%2C30.5t13%2C30t30%2C12l170%2C0zm-85%2C256q59%2C0%20132.5%2C-1.5t154.5%2C-5.5t164.5%2C-11.5t163%2C-20t150%2C-30t124.5%2C-41.5q23%2C-11%2042%2C-24t38%2C-30q27%2C-25%2041%2C-61.5t14%2C-72.5l0%2C-257q0%2C-123%20-47%2C-232t-128%2C-190t-190%2C-128t-232%2C-47l-81%2C0q-37%2C0%20-68.5%2C14t-60.5%2C34.5t-55.5%2C45t-53%2C45t-53%2C34.5t-55.5%2C14t-55.5%2C-14t-53%2C-34.5t-53%2C-45t-55.5%2C-45t-60.5%2C-34.5t-68.5%2C-14l-81%2C0q-123%2C0%20-232%2C47t-190%2C128t-128%2C190t-47%2C232l0%2C257q0%2C68%2038%2C115t97%2C73q54%2C24%20124.5%2C41.5t150%2C30t163%2C20t164.5%2C11.5t154.5%2C5.5t132.5%2C1.5zm939%2C-298q0%2C39%20-24.5%2C67t-58.5%2C42q-54%2C23%20-122%2C39.5t-143.5%2C28t-155.5%2C19t-157%2C11t-148.5%2C5t-129.5%2C1.5q-59%2C0%20-130%2C-1.5t-148%2C-5t-157%2C-11t-155.5%2C-19t-143.5%2C-28t-122%2C-39.5q-34%2C-14%20-58.5%2C-42t-24.5%2C-67l0%2C-257q0%2C-106%2040.5%2C-199t110%2C-162.5t162.5%2C-109.5t199%2C-40l81%2C0q27%2C0%2052%2C14t50%2C34.5t51%2C44.5t55.5%2C44.5t63.5%2C34.5t74%2C14t74%2C-14t63.5%2C-34.5t55.5%2C-44.5t51%2C-44.5t50%2C-34.5t52%2C-14l14%2C0q37%2C0%2070%2C0.5t64.5%2C4.5t63.5%2C12t68%2C23q71%2C30%20128.5%2C78.5t98.5%2C110t63.5%2C133.5t22.5%2C149l0%2C257z%22%20fill%3D%22white%22%20/%3E%3C/svg%3E%0A") + "); background-size: 80%; background-repeat:no-repeat; background-position: center; border: none; outline: none; transition: transform 0.125s ease-out } .babylonVRicon:hover { transform: scale(1.05) } .babylonVRicon:active {background-color: rgba(51,51,51,1) } .babylonVRicon:focus {background-color: rgba(51,51,51,1) }"; a += '.babylonVRicon.vrdisplaypresenting { background-image: none;} .vrdisplaypresenting::after { content: "EXIT"} .xr-error::after { content: "ERROR"}'; const l = document.createElement("style"); l.appendChild(document.createTextNode(a)), document.getElementsByTagName("head")[0].appendChild(l); const o = document.createElement("button"); o.className = "babylonVRicon", o.title = `${r} - ${s}`, this._buttons.push(new ore(o, r, s)), this._buttons[this._buttons.length - 1].update = function(u) { this.element.style.display = u === null || u === this ? "" : "none", o.className = "babylonVRicon" + (u === this ? " vrdisplaypresenting" : ""); }, this._updateButtons(null); } const i = e.getEngine().getInputElement(); i && i.parentNode && (i.parentNode.appendChild(this.overlay), e.onDisposeObservable.addOnce(() => { this.dispose(); })); } /** * Set the helper to be used with this UI component. * The UI is bound to an experience helper. If not provided the UI can still be used but the events should be registered by the developer. * * @param helper the experience helper to attach * @param renderTarget an optional render target (in case it is created outside of the helper scope) * @returns a promise that resolves when the ui is ready */ async setHelperAsync(e, t) { this._helper = e, this._renderTarget = t; const i = this._buttons.map((s) => e.sessionManager.isSessionSupportedAsync(s.sessionMode)); e.onStateChangedObservable.add((s) => { s == lu.NOT_IN_XR && this._updateButtons(null); }), (await Promise.all(i)).forEach((s, n) => { s ? (this.overlay.appendChild(this._buttons[n].element), this._buttons[n].element.onclick = this._enterXRWithButtonIndex.bind(this, n)) : Ve.Warn(`Session mode "${this._buttons[n].sessionMode}" not supported in browser`); }); } /** * Creates UI to allow the user to enter/exit XR mode * @param scene the scene to add the ui to * @param helper the xr experience helper to enter/exit xr with * @param options options to configure the UI * @returns the created ui */ static async CreateAsync(e, t, i) { const r = new MU(e, i); return await r.setHelperAsync(t, i.renderTarget || void 0), r; } async _enterXRWithButtonIndex(e = 0) { if (this._helper.state == lu.IN_XR) await this._helper.exitXRAsync(), this._updateButtons(null); else if (this._helper.state == lu.NOT_IN_XR) try { await this._helper.enterXRAsync(this._buttons[e].sessionMode, this._buttons[e].referenceSpaceType, this._renderTarget, { optionalFeatures: this.options.optionalFeatures, requiredFeatures: this.options.requiredFeatures }), this._updateButtons(this._buttons[e]); } catch (t) { this._updateButtons(null); const i = this._buttons[e].element, r = i.title; i.title = "Error entering XR session : " + r, i.classList.add("xr-error"), this.options.onError && this.options.onError(t); } } /** * Disposes of the XR UI component */ dispose() { const e = this._scene.getEngine().getInputElement(); e && e.parentNode && e.parentNode.contains(this.overlay) && e.parentNode.removeChild(this.overlay), this.activeButtonChangedObservable.clear(), navigator.xr.removeEventListener("sessiongranted", this._onSessionGranted); } _updateButtons(e) { this._activeButton = e, this._buttons.forEach((t) => { t.update(this._activeButton); }), this.activeButtonChangedObservable.notifyObservers(this._activeButton); } } var Q8; (function(c) { c[c.INIT = 0] = "INIT", c[c.STARTED = 1] = "STARTED", c[c.ENDED = 2] = "ENDED"; })(Q8 || (Q8 = {})); function FH(c) { var e; let t = 0; const i = Date.now(); c.observableParameters = (e = c.observableParameters) !== null && e !== void 0 ? e : {}; const r = c.contextObservable.add((s) => { const n = Date.now(); t = n - i; const a = { startTime: i, currentTime: n, deltaTime: t, completeRate: t / c.timeout, payload: s }; c.onTick && c.onTick(a), c.breakCondition && c.breakCondition() && (c.contextObservable.remove(r), c.onAborted && c.onAborted(a)), t >= c.timeout && (c.contextObservable.remove(r), c.onEnded && c.onEnded(a)); }, c.observableParameters.mask, c.observableParameters.insertFirst, c.observableParameters.scope); return r; } class lre { /** * Will construct a new advanced timer based on the options provided. Timer will not start until start() is called. * @param options construction options for this advanced timer */ constructor(e) { var t, i; this.onEachCountObservable = new Fe(), this.onTimerAbortedObservable = new Fe(), this.onTimerEndedObservable = new Fe(), this.onStateChangedObservable = new Fe(), this._observer = null, this._breakOnNextTick = !1, this._tick = (r) => { const s = Date.now(); this._timer = s - this._startTime; const n = { startTime: this._startTime, currentTime: s, deltaTime: this._timer, completeRate: this._timer / this._timeToEnd, payload: r }, a = this._breakOnNextTick || this._breakCondition(n); a || this._timer >= this._timeToEnd ? this._stop(n, a) : this.onEachCountObservable.notifyObservers(n); }, this._setState(Q8.INIT), this._contextObservable = e.contextObservable, this._observableParameters = (t = e.observableParameters) !== null && t !== void 0 ? t : {}, this._breakCondition = (i = e.breakCondition) !== null && i !== void 0 ? i : () => !1, this._timeToEnd = e.timeout, e.onEnded && this.onTimerEndedObservable.add(e.onEnded), e.onTick && this.onEachCountObservable.add(e.onTick), e.onAborted && this.onTimerAbortedObservable.add(e.onAborted); } /** * set a breaking condition for this timer. Default is to never break during count * @param predicate the new break condition. Returns true to break, false otherwise */ set breakCondition(e) { this._breakCondition = e; } /** * Reset ALL associated observables in this advanced timer */ clearObservables() { this.onEachCountObservable.clear(), this.onTimerAbortedObservable.clear(), this.onTimerEndedObservable.clear(), this.onStateChangedObservable.clear(); } /** * Will start a new iteration of this timer. Only one instance of this timer can run at a time. * * @param timeToEnd how much time to measure until timer ended */ start(e = this._timeToEnd) { if (this._state === Q8.STARTED) throw new Error("Timer already started. Please stop it before starting again"); this._timeToEnd = e, this._startTime = Date.now(), this._timer = 0, this._observer = this._contextObservable.add(this._tick, this._observableParameters.mask, this._observableParameters.insertFirst, this._observableParameters.scope), this._setState(Q8.STARTED); } /** * Will force a stop on the next tick. */ stop() { this._state === Q8.STARTED && (this._breakOnNextTick = !0); } /** * Dispose this timer, clearing all resources */ dispose() { this._observer && this._contextObservable.remove(this._observer), this.clearObservables(); } _setState(e) { this._state = e, this.onStateChangedObservable.notifyObservers(this._state); } _stop(e, t = !1) { this._contextObservable.remove(this._observer), this._setState(Q8.ENDED), t ? this.onTimerAbortedObservable.notifyObservers(e) : this.onTimerEndedObservable.notifyObservers(e); } } class iP extends Ku { /** * Is rotation enabled when moving forward? * Disabling this feature will prevent the user from deciding the direction when teleporting */ get rotationEnabled() { return this._rotationEnabled; } /** * Sets whether rotation is enabled or not * @param enabled is rotation enabled when teleportation is shown */ set rotationEnabled(e) { if (this._rotationEnabled = e, this._options.teleportationTargetMesh) { const t = this._options.teleportationTargetMesh.getChildMeshes(!1, (i) => i.name === "rotationCone"); t[0] && t[0].setEnabled(e); } } /** * Exposes the currently set teleportation target mesh. */ get teleportationTargetMesh() { return this._options.teleportationTargetMesh || null; } /** * constructs a new teleportation system * @param _xrSessionManager an instance of WebXRSessionManager * @param _options configuration object for this feature */ constructor(e, t) { super(e), this._options = t, this._controllers = {}, this._snappedToPoint = !1, this._cachedColor4White = new Et(1, 1, 1, 1), this._tmpRay = new gs(new D(), new D()), this._tmpVector = new D(), this._tmpQuaternion = new Ze(), this.skipNextTeleportation = !1, this.backwardsMovementEnabled = !0, this.backwardsTeleportationDistance = 0.7, this.parabolicCheckRadius = 5, this.parabolicRayEnabled = !0, this.straightRayEnabled = !0, this.rotationAngle = Math.PI / 8, this.onTargetMeshPositionUpdatedObservable = new Fe(), this.teleportationEnabled = !0, this._rotationEnabled = !0, this._attachController = (i) => { if (this._controllers[i.uniqueId] || this._options.forceHandedness && i.inputSource.handedness !== this._options.forceHandedness) return; this._controllers[i.uniqueId] = { xrController: i, teleportationState: { forward: !1, backwards: !1, rotating: !1, currentRotation: 0, baseRotation: 0, blocked: !1 } }; const r = this._controllers[i.uniqueId]; if (r.xrController.inputSource.targetRayMode === "tracked-pointer" && r.xrController.inputSource.gamepad) { const s = () => { if (i.motionController) { const n = i.motionController.getComponentOfType(j_.THUMBSTICK_TYPE) || i.motionController.getComponentOfType(j_.TOUCHPAD_TYPE); if (!n || this._options.useMainComponentOnly) { const a = i.motionController.getMainComponent(); if (!a) return; r.teleportationComponent = a, r.onButtonChangedObserver = a.onButtonStateChangedObservable.add(() => { if (this.teleportationEnabled && a.changes.pressed) if (a.changes.pressed.current) { r.teleportationState.forward = !0, this._currentTeleportationControllerId = r.xrController.uniqueId, r.teleportationState.baseRotation = this._options.xrInput.xrCamera.rotationQuaternion.toEulerAngles().y, r.teleportationState.currentRotation = 0; const l = this._options.timeToTeleport || 3e3; FH({ timeout: l, contextObservable: this._xrSessionManager.onXRFrameObservable, breakCondition: () => !a.pressed, onEnded: () => { this._currentTeleportationControllerId === r.xrController.uniqueId && r.teleportationState.forward && this._teleportForward(i.uniqueId); } }); } else r.teleportationState.forward = !1, this._currentTeleportationControllerId = ""; }); } else r.teleportationComponent = n, r.onAxisChangedObserver = n.onAxisValueChangedObservable.add((a) => { if (a.y <= 0.7 && r.teleportationState.backwards && (r.teleportationState.backwards = !1), a.y > 0.7 && !r.teleportationState.forward && this.backwardsMovementEnabled && !this.snapPointsOnly && !r.teleportationState.backwards) { r.teleportationState.backwards = !0, this._tmpQuaternion.copyFrom(this._options.xrInput.xrCamera.rotationQuaternion), this._tmpQuaternion.toEulerAnglesToRef(this._tmpVector), this._tmpVector.x = 0, this._tmpVector.z = 0, Ze.FromEulerVectorToRef(this._tmpVector, this._tmpQuaternion), this._tmpVector.set(0, 0, this.backwardsTeleportationDistance * (this._xrSessionManager.scene.useRightHandedSystem ? 1 : -1)), this._tmpVector.rotateByQuaternionToRef(this._tmpQuaternion, this._tmpVector), this._tmpVector.addInPlace(this._options.xrInput.xrCamera.position), this._tmpRay.origin.copyFrom(this._tmpVector), this._tmpRay.length = this._options.xrInput.xrCamera.realWorldHeight + 0.1, this._tmpRay.direction.set(0, -1, 0); const l = this._xrSessionManager.scene.pickWithRay(this._tmpRay, (o) => this._floorMeshes.indexOf(o) !== -1); l && l.pickedPoint && (this._options.xrInput.xrCamera.position.x = l.pickedPoint.x, this._options.xrInput.xrCamera.position.z = l.pickedPoint.z); } if (a.y < -0.7 && !this._currentTeleportationControllerId && !r.teleportationState.rotating && this.teleportationEnabled && (r.teleportationState.forward = !0, this._currentTeleportationControllerId = r.xrController.uniqueId, r.teleportationState.baseRotation = this._options.xrInput.xrCamera.rotationQuaternion.toEulerAngles().y), a.x) { if (r.teleportationState.forward) this._currentTeleportationControllerId === r.xrController.uniqueId && (this.rotationEnabled ? setTimeout(() => { r.teleportationState.currentRotation = Math.atan2(a.x, a.y * (this._xrSessionManager.scene.useRightHandedSystem ? 1 : -1)); }) : r.teleportationState.currentRotation = 0); else if (!r.teleportationState.rotating && Math.abs(a.x) > 0.7) { r.teleportationState.rotating = !0; const l = this.rotationAngle * (a.x > 0 ? 1 : -1) * (this._xrSessionManager.scene.useRightHandedSystem ? -1 : 1); Ze.FromEulerAngles(0, l, 0).multiplyToRef(this._options.xrInput.xrCamera.rotationQuaternion, this._options.xrInput.xrCamera.rotationQuaternion); } } else r.teleportationState.rotating = !1; a.x === 0 && a.y === 0 && (r.teleportationState.blocked && (r.teleportationState.blocked = !1, this._setTargetMeshVisibility(!1)), r.teleportationState.forward && this._teleportForward(i.uniqueId)); }); } }; i.motionController ? s() : i.onMotionControllerInitObservable.addOnce(() => { s(); }); } else this._xrSessionManager.scene.onPointerObservable.add((s) => { if (s.type === si.POINTERDOWN) { r.teleportationState.forward = !0, this._currentTeleportationControllerId = r.xrController.uniqueId, r.teleportationState.baseRotation = this._options.xrInput.xrCamera.rotationQuaternion.toEulerAngles().y, r.teleportationState.currentRotation = 0; const n = this._options.timeToTeleport || 3e3; FH({ timeout: n, contextObservable: this._xrSessionManager.onXRFrameObservable, onEnded: () => { this._currentTeleportationControllerId === r.xrController.uniqueId && r.teleportationState.forward && this._teleportForward(i.uniqueId); } }); } else s.type === si.POINTERUP && (r.teleportationState.forward = !1, this._currentTeleportationControllerId = ""); }); }, this._options.teleportationTargetMesh || this._createDefaultTargetMesh(), this._floorMeshes = this._options.floorMeshes || [], this._snapToPositions = this._options.snapPositions || [], this._blockedRayColor = this._options.blockedRayColor || new Et(1, 0, 0, 0.75), this._setTargetMeshVisibility(!1); } /** * Get the snapPointsOnly flag */ get snapPointsOnly() { return !!this._options.snapPointsOnly; } /** * Sets the snapPointsOnly flag * @param snapToPoints should teleportation be exclusively to snap points */ set snapPointsOnly(e) { this._options.snapPointsOnly = e; } /** * Add a new mesh to the floor meshes array * @param mesh the mesh to use as floor mesh */ addFloorMesh(e) { this._floorMeshes.push(e); } /** * Add a mesh to the list of meshes blocking the teleportation ray * @param mesh The mesh to add to the teleportation-blocking meshes */ addBlockerMesh(e) { this._options.pickBlockerMeshes = this._options.pickBlockerMeshes || [], this._options.pickBlockerMeshes.push(e); } /** * Add a new snap-to point to fix teleportation to this position * @param newSnapPoint The new Snap-To point */ addSnapPoint(e) { this._snapToPositions.push(e); } attach() { return super.attach() ? (this._currentTeleportationControllerId = "", this._options.xrInput.controllers.forEach(this._attachController), this._addNewAttachObserver(this._options.xrInput.onControllerAddedObservable, this._attachController), this._addNewAttachObserver(this._options.xrInput.onControllerRemovedObservable, (e) => { this._detachController(e.uniqueId); }), !0) : !1; } detach() { return super.detach() ? (Object.keys(this._controllers).forEach((e) => { this._detachController(e); }), this._setTargetMeshVisibility(!1), this._currentTeleportationControllerId = "", this._controllers = {}, !0) : !1; } dispose() { super.dispose(), this._options.teleportationTargetMesh && this._options.teleportationTargetMesh.dispose(!1, !0); } /** * Remove a mesh from the floor meshes array * @param mesh the mesh to remove */ removeFloorMesh(e) { const t = this._floorMeshes.indexOf(e); t !== -1 && this._floorMeshes.splice(t, 1); } /** * Remove a mesh from the blocker meshes array * @param mesh the mesh to remove */ removeBlockerMesh(e) { this._options.pickBlockerMeshes = this._options.pickBlockerMeshes || []; const t = this._options.pickBlockerMeshes.indexOf(e); t !== -1 && this._options.pickBlockerMeshes.splice(t, 1); } /** * Remove a mesh from the floor meshes array using its name * @param name the mesh name to remove */ removeFloorMeshByName(e) { const t = this._xrSessionManager.scene.getMeshByName(e); t && this.removeFloorMesh(t); } /** * This function will iterate through the array, searching for this point or equal to it. It will then remove it from the snap-to array * @param snapPointToRemove the point (or a clone of it) to be removed from the array * @returns was the point found and removed or not */ removeSnapPoint(e) { let t = this._snapToPositions.indexOf(e); if (t === -1) { for (let i = 0; i < this._snapToPositions.length; ++i) if (this._snapToPositions[i].equals(e)) { t = i; break; } } return t !== -1 ? (this._snapToPositions.splice(t, 1), !0) : !1; } /** * This function sets a selection feature that will be disabled when * the forward ray is shown and will be reattached when hidden. * This is used to remove the selection rays when moving. * @param selectionFeature the feature to disable when forward movement is enabled */ setSelectionFeature(e) { this._selectionFeature = e; } _onXRFrame(e) { const t = this._xrSessionManager.currentFrame, i = this._xrSessionManager.scene; if (!this.attach || !t) return; const r = this._options.teleportationTargetMesh; if (this._currentTeleportationControllerId) { if (!r) return; r.rotationQuaternion = r.rotationQuaternion || new Ze(); const s = this._controllers[this._currentTeleportationControllerId]; if (s && s.teleportationState.forward) { Ze.RotationYawPitchRollToRef(s.teleportationState.currentRotation + s.teleportationState.baseRotation, 0, 0, r.rotationQuaternion); let n = !1; if (s.xrController.getWorldPointerRayToRef(this._tmpRay), this.straightRayEnabled) { const a = i.pickWithRay(this._tmpRay, (l) => { if (this._options.pickBlockerMeshes && this._options.pickBlockerMeshes.indexOf(l) !== -1) return !0; const o = this._floorMeshes.indexOf(l); return o === -1 ? !1 : this._floorMeshes[o].absolutePosition.y < this._options.xrInput.xrCamera.globalPosition.y; }); if (a && a.pickedMesh && this._options.pickBlockerMeshes && this._options.pickBlockerMeshes.indexOf(a.pickedMesh) !== -1) { s.teleportationState.blocked = !0, this._setTargetMeshVisibility(!1), this._showParabolicPath(a); return; } else a && a.pickedPoint && (s.teleportationState.blocked = !1, n = !0, this._setTargetMeshPosition(a), this._setTargetMeshVisibility(!0), this._showParabolicPath(a)); } if (this.parabolicRayEnabled && !n) { const a = s.xrController.pointer.rotationQuaternion.toEulerAngles().x, l = 1 + (Math.PI / 2 - Math.abs(a)), o = this.parabolicCheckRadius * l; this._tmpRay.origin.addToRef(this._tmpRay.direction.scale(o * 2), this._tmpVector), this._tmpVector.y = this._tmpRay.origin.y, this._tmpRay.origin.addInPlace(this._tmpRay.direction.scale(o)), this._tmpVector.subtractToRef(this._tmpRay.origin, this._tmpRay.direction), this._tmpRay.direction.normalize(); const u = i.pickWithRay(this._tmpRay, (h) => this._options.pickBlockerMeshes && this._options.pickBlockerMeshes.indexOf(h) !== -1 ? !0 : this._floorMeshes.indexOf(h) !== -1); if (u && u.pickedMesh && this._options.pickBlockerMeshes && this._options.pickBlockerMeshes.indexOf(u.pickedMesh) !== -1) { s.teleportationState.blocked = !0, this._setTargetMeshVisibility(!1), this._showParabolicPath(u); return; } else u && u.pickedPoint && (s.teleportationState.blocked = !1, n = !0, this._setTargetMeshPosition(u), this._setTargetMeshVisibility(!0), this._showParabolicPath(u)); } this._setTargetMeshVisibility(n); } else this._setTargetMeshVisibility(!1); } else this._disposeBezierCurve(), this._setTargetMeshVisibility(!1); } _createDefaultTargetMesh() { this._options.defaultTargetMeshOptions = this._options.defaultTargetMeshOptions || {}; const e = this._options.useUtilityLayer ? this._options.customUtilityLayerScene || bn.DefaultUtilityLayer.utilityLayerScene : this._xrSessionManager.scene, t = zI("teleportationTarget", { width: 2, height: 2, subdivisions: 2 }, e); if (t.isPickable = !1, this._options.defaultTargetMeshOptions.teleportationCircleMaterial) t.material = this._options.defaultTargetMeshOptions.teleportationCircleMaterial; else { const n = new gg("teleportationPlaneDynamicTexture", 512, e, !0); n.hasAlpha = !0; const a = n.getContext(), l = 512 / 2, o = 512 / 2, u = 200; a.beginPath(), a.arc(l, o, u, 0, 2 * Math.PI, !1), a.fillStyle = this._options.defaultTargetMeshOptions.teleportationFillColor || "#444444", a.fill(), a.lineWidth = 10, a.strokeStyle = this._options.defaultTargetMeshOptions.teleportationBorderColor || "#FFFFFF", a.stroke(), a.closePath(), n.update(); const h = new Dt("teleportationPlaneMaterial", e); h.diffuseTexture = n, t.material = h; } const i = o6("torusTeleportation", { diameter: 0.75, thickness: 0.1, tessellation: 20 }, e); if (i.isPickable = !1, i.parent = t, !this._options.defaultTargetMeshOptions.disableAnimation) { const s = new nt("animationInnerCircle", "position.y", 30, nt.ANIMATIONTYPE_FLOAT, nt.ANIMATIONLOOPMODE_CYCLE), n = []; n.push({ frame: 0, value: 0 }), n.push({ frame: 30, value: 0.4 }), n.push({ frame: 60, value: 0 }), s.setKeys(n); const a = new fK(); a.setEasingMode(hl.EASINGMODE_EASEINOUT), s.setEasingFunction(a), i.animations = [], i.animations.push(s), e.beginAnimation(i, 0, 60, !0); } const r = Hf("rotationCone", { diameterTop: 0, tessellation: 4 }, e); if (r.isPickable = !1, r.scaling.set(0.5, 0.12, 0.2), r.rotate(bl.X, Math.PI / 2), r.position.z = 0.6, r.parent = i, this._options.defaultTargetMeshOptions.torusArrowMaterial) i.material = this._options.defaultTargetMeshOptions.torusArrowMaterial, r.material = this._options.defaultTargetMeshOptions.torusArrowMaterial; else { const s = new Dt("torusConsMat", e); s.disableLighting = !!this._options.defaultTargetMeshOptions.disableLighting, s.disableLighting ? s.emissiveColor = new ze(0.3, 0.3, 1) : s.diffuseColor = new ze(0.3, 0.3, 1), s.alpha = 0.9, i.material = s, r.material = s, this._teleportationRingMaterial = s; } this._options.renderingGroupId !== void 0 && (t.renderingGroupId = this._options.renderingGroupId, i.renderingGroupId = this._options.renderingGroupId, r.renderingGroupId = this._options.renderingGroupId), this._options.teleportationTargetMesh = t, this._setTargetMeshVisibility(!1); } _detachController(e) { const t = this._controllers[e]; t && (t.teleportationComponent && (t.onAxisChangedObserver && t.teleportationComponent.onAxisValueChangedObservable.remove(t.onAxisChangedObserver), t.onButtonChangedObserver && t.teleportationComponent.onButtonStateChangedObservable.remove(t.onButtonChangedObserver)), delete this._controllers[e]); } _findClosestSnapPointWithRadius(e, t = this._options.snapToPositionRadius || 0.8) { let i = null, r = Number.MAX_VALUE; if (this._snapToPositions.length) { const s = t * t; this._snapToPositions.forEach((n) => { const a = D.DistanceSquared(n, e); a <= s && a < r && (r = a, i = n); }); } return i; } _setTargetMeshPosition(e) { const t = e.pickedPoint; if (!this._options.teleportationTargetMesh || !t) return; const i = this._findClosestSnapPointWithRadius(t); this._snappedToPoint = !!i, this.snapPointsOnly && !this._snappedToPoint && this._teleportationRingMaterial ? this._teleportationRingMaterial.diffuseColor.set(1, 0.3, 0.3) : this.snapPointsOnly && this._snappedToPoint && this._teleportationRingMaterial && this._teleportationRingMaterial.diffuseColor.set(0.3, 0.3, 1), this._options.teleportationTargetMesh.position.copyFrom(i || t), this._options.teleportationTargetMesh.position.y += 0.01, this.onTargetMeshPositionUpdatedObservable.notifyObservers(e); } _setTargetMeshVisibility(e, t) { this._options.teleportationTargetMesh && (this._options.teleportationTargetMesh.isVisible === e && !t || (this._options.teleportationTargetMesh.isVisible = e, this._options.teleportationTargetMesh.getChildren(void 0, !1).forEach((i) => { i.isVisible = e; }), e ? this._selectionFeature && this._selectionFeature.detach() : (this._quadraticBezierCurve && (this._quadraticBezierCurve.dispose(), this._quadraticBezierCurve = null), this._selectionFeature && this._selectionFeature.attach()))); } _disposeBezierCurve() { this._quadraticBezierCurve && (this._quadraticBezierCurve.dispose(), this._quadraticBezierCurve = null); } _showParabolicPath(e) { if (!e.pickedPoint || !this._currentTeleportationControllerId) return; const t = this._options.useUtilityLayer ? this._options.customUtilityLayerScene || bn.DefaultUtilityLayer.utilityLayerScene : this._xrSessionManager.scene, i = this._controllers[this._currentTeleportationControllerId], r = T4.CreateQuadraticBezier(i.xrController.pointer.absolutePosition, e.ray.origin, e.pickedPoint, 25), s = i.teleportationState.blocked ? this._blockedRayColor : void 0, n = new Array(26).fill(s || this._cachedColor4White); this._options.generateRayPathMesh ? this._quadraticBezierCurve = this._options.generateRayPathMesh(r.getPoints(), e) : this._quadraticBezierCurve = Ba("teleportation path line", { points: r.getPoints(), instance: this._quadraticBezierCurve, updatable: !0, colors: n }, t), this._quadraticBezierCurve.isPickable = !1, this._options.renderingGroupId !== void 0 && (this._quadraticBezierCurve.renderingGroupId = this._options.renderingGroupId); } _teleportForward(e) { const t = this._controllers[e]; if (!(!t || !t.teleportationState.forward || !this.teleportationEnabled) && (t.teleportationState.forward = !1, this._currentTeleportationControllerId = "", !(this.snapPointsOnly && !this._snappedToPoint))) { if (this.skipNextTeleportation) { this.skipNextTeleportation = !1; return; } if (this._options.teleportationTargetMesh && this._options.teleportationTargetMesh.isVisible) { const i = this._options.xrInput.xrCamera.realWorldHeight; this._options.xrInput.xrCamera.onBeforeCameraTeleport.notifyObservers(this._options.xrInput.xrCamera.position), this._options.xrInput.xrCamera.position.copyFrom(this._options.teleportationTargetMesh.position), this._options.xrInput.xrCamera.position.y += i, Ze.FromEulerAngles(0, t.teleportationState.currentRotation - (this._xrSessionManager.scene.useRightHandedSystem ? Math.PI : 0), 0).multiplyToRef(this._options.xrInput.xrCamera.rotationQuaternion, this._options.xrInput.xrCamera.rotationQuaternion), this._options.xrInput.xrCamera.onAfterCameraTeleport.notifyObservers(this._options.xrInput.xrCamera.position); } } } } iP.Name = Qs.TELEPORTATION; iP.Version = 1; Oo.AddWebXRFeature(iP.Name, (c, e) => () => new iP(c, e), iP.Version, !0); class v2e { } class RU { constructor() { } /** * Creates the default xr experience * @param scene scene * @param options options for basic configuration * @returns resulting WebXRDefaultExperience */ static CreateAsync(e, t = {}) { const i = new RU(); if (e.onDisposeObservable.addOnce(() => { i.dispose(); }), !t.disableDefaultUI) { const r = Object.assign({ renderTarget: i.renderTarget }, t.uiOptions || {}); t.optionalFeatures && (typeof t.optionalFeatures == "boolean" ? r.optionalFeatures = ["hit-test", "anchors", "plane-detection", "hand-tracking"] : r.optionalFeatures = t.optionalFeatures), i.enterExitUI = new MU(e, r); } return SU.CreateAsync(e).then((r) => { if (i.baseExperience = r, t.ignoreNativeCameraTransformation && (i.baseExperience.camera.compensateOnFirstFrame = !1), i.input = new are(r.sessionManager, r.camera, Object.assign({ controllerOptions: { renderingGroupId: t.renderingGroupId } }, t.inputOptions || {})), !t.disablePointerSelection) { const s = Object.assign(Object.assign({}, t.pointerSelectionOptions), { xrInput: i.input, renderingGroupId: t.renderingGroupId }); i.pointerSelection = i.baseExperience.featuresManager.enableFeature(i6.Name, t.useStablePlugins ? "stable" : "latest", s), t.disableTeleportation || (i.teleportation = i.baseExperience.featuresManager.enableFeature(iP.Name, t.useStablePlugins ? "stable" : "latest", Object.assign({ floorMeshes: t.floorMeshes, xrInput: i.input, renderingGroupId: t.renderingGroupId }, t.teleportationOptions)), i.teleportation.setSelectionFeature(i.pointerSelection)); } if (t.disableNearInteraction || (i.nearInteraction = i.baseExperience.featuresManager.enableFeature(r6.Name, t.useStablePlugins ? "stable" : "latest", Object.assign({ xrInput: i.input, farInteractionFeature: i.pointerSelection, renderingGroupId: t.renderingGroupId, useUtilityLayer: !0, enableNearInteractionOnAllControllers: !0 }, t.nearInteractionOptions))), i.renderTarget = i.baseExperience.sessionManager.getWebXRRenderTarget(t.outputCanvasOptions), !t.disableDefaultUI) return i.enterExitUI.setHelperAsync(i.baseExperience, i.renderTarget); }).then(() => i).catch((r) => (Ce.Error("Error initializing XR"), Ce.Error(r), i)); } /** * Disposes of the experience helper */ dispose() { this.baseExperience && this.baseExperience.dispose(), this.input && this.input.dispose(), this.enterExitUI && this.enterExitUI.dispose(), this.renderTarget && this.renderTarget.dispose(); } } var A2e = !0; ii.prototype.createDefaultLight = function(c = !1) { if (c && this.lights) for (let e = 0; e < this.lights.length; e++) this.lights[e].dispose(); this.lights.length === 0 && new vg("default light", D.Up(), this); }; ii.prototype.createDefaultCamera = function(c = !1, e = !1, t = !1) { if (e && this.activeCamera && (this.activeCamera.dispose(), this.activeCamera = null), !this.activeCamera) { const i = this.getWorldExtends((l) => l.isVisible && l.isEnabled()), r = i.max.subtract(i.min), s = i.min.add(r.scale(0.5)); let n, a = r.length() * 1.5; if (isFinite(a) || (a = 1, s.copyFromFloats(0, 0, 0)), c) { const l = new Pn("default camera", -(Math.PI / 2), Math.PI / 2, a, s, this); l.lowerRadiusLimit = a * 0.01, l.wheelPrecision = 100 / a, n = l; } else { const l = new du("default camera", new D(s.x, s.y, -a), this); l.setTarget(s), n = l; } n.minZ = a * 0.01, n.maxZ = a * 1e3, n.speed = a * 0.2, this.activeCamera = n, t && n.attachControl(); } }; ii.prototype.createDefaultCameraOrLight = function(c = !1, e = !1, t = !1) { this.createDefaultLight(e), this.createDefaultCamera(c, e, t); }; ii.prototype.createDefaultSkybox = function(c, e = !1, t = 1e3, i = 0, r = !0) { if (!c) return Ce.Warn("Can not create default skybox without environment texture."), null; r && c && (this.environmentTexture = c); const s = B4("hdrSkyBox", { size: t }, this); if (e) { const n = new Ri("skyBox", this); n.backFaceCulling = !1, n.reflectionTexture = c.clone(), n.reflectionTexture && (n.reflectionTexture.coordinatesMode = De.SKYBOX_MODE), n.microSurface = 1 - i, n.disableLighting = !0, n.twoSidedLighting = !0, s.material = n; } else { const n = new Dt("skyBox", this); n.backFaceCulling = !1, n.reflectionTexture = c.clone(), n.reflectionTexture && (n.reflectionTexture.coordinatesMode = De.SKYBOX_MODE), n.disableLighting = !0, s.material = n; } return s.isPickable = !1, s.infiniteDistance = !0, s.ignoreCameraMaxZ = !0, s; }; ii.prototype.createDefaultEnvironment = function(c) { return g5 ? new g5(c, this) : null; }; ii.prototype.createDefaultVRExperience = function(c = {}) { return new _P(this, c); }; ii.prototype.createDefaultXRExperienceAsync = function(c = {}) { return RU.CreateAsync(this, c).then((e) => e); }; function aq(c) { for (; c.firstChild; ) c.removeChild(c.firstChild); c.srcObject = null, c.src = "", c.removeAttribute("src"); } class Cm extends De { /** * Event triggered when a dom action is required by the user to play the video. * This happens due to recent changes in browser policies preventing video to auto start. */ get onUserActionRequestedObservable() { return this._onUserActionRequestedObservable || (this._onUserActionRequestedObservable = new Fe()), this._onUserActionRequestedObservable; } _processError(e) { this._errorFound = !0, this._onError ? this._onError(e == null ? void 0 : e.message) : Ce.Error(e == null ? void 0 : e.message); } _handlePlay() { this._errorFound = !1, this.video.play().catch((e) => { if ((e == null ? void 0 : e.name) === "NotAllowedError") { if (this._onUserActionRequestedObservable && this._onUserActionRequestedObservable.hasObservers()) { this._onUserActionRequestedObservable.notifyObservers(this); return; } else if (!this.video.muted) { Ce.Warn("Unable to autoplay a video with sound. Trying again with muted turned true"), this.video.muted = !0, this._errorFound = !1, this.video.play().catch((t) => { this._processError(t); }); return; } } this._processError(e); }); } /** * Creates a video texture. * If you want to display a video in your scene, this is the special texture for that. * This special texture works similar to other textures, with the exception of a few parameters. * @see https://doc.babylonjs.com/features/featuresDeepDive/materials/using/videoTexture * @param name optional name, will detect from video source, if not defined * @param src can be used to provide an url, array of urls or an already setup HTML video element. * @param scene is obviously the current scene. * @param generateMipMaps can be used to turn on mipmaps (Can be expensive for videoTextures because they are often updated). * @param invertY is false by default but can be used to invert video on Y axis * @param samplingMode controls the sampling method and is set to TRILINEAR_SAMPLINGMODE by default * @param settings allows finer control over video usage * @param onError defines a callback triggered when an error occurred during the loading session * @param format defines the texture format to use (Engine.TEXTUREFORMAT_RGBA by default) */ constructor(e, t, i, r = !1, s = !1, n = De.TRILINEAR_SAMPLINGMODE, a = {}, l, o = 5) { var u; super(null, i, !r, s), this._externalTexture = null, this._onUserActionRequestedObservable = null, this._stillImageCaptured = !1, this._displayingPosterTexture = !1, this._frameId = -1, this._currentSrc = null, this._errorFound = !1, this.isVideo = !0, this._resizeInternalTexture = () => { var d; this._texture != null && this._texture.dispose(), !this._getEngine().needPOTTextures || Ve.IsExponentOfTwo(this.video.videoWidth) && Ve.IsExponentOfTwo(this.video.videoHeight) ? (this.wrapU = De.WRAP_ADDRESSMODE, this.wrapV = De.WRAP_ADDRESSMODE) : (this.wrapU = De.CLAMP_ADDRESSMODE, this.wrapV = De.CLAMP_ADDRESSMODE, this._generateMipMaps = !1), this._texture = this._getEngine().createDynamicTexture(this.video.videoWidth, this.video.videoHeight, this._generateMipMaps, this.samplingMode), this._texture.format = (d = this._format) !== null && d !== void 0 ? d : 5, this._frameId = -1, this._updateInternalTexture(); }, this._createInternalTexture = () => { if (this._texture != null) if (this._displayingPosterTexture) this._displayingPosterTexture = !1; else return; if (this.video.addEventListener("resize", this._resizeInternalTexture), this._resizeInternalTexture(), !this.video.autoplay && !this._settings.poster && !this._settings.independentVideoSource) { const d = this.video.onplaying, f = this.video.muted; this.video.muted = !0, this.video.onplaying = () => { this.video.muted = f, this.video.onplaying = d, this._updateInternalTexture(), this._errorFound || this.video.pause(), this.onLoadObservable.hasObservers() && this.onLoadObservable.notifyObservers(this); }, this._handlePlay(); } else this._updateInternalTexture(), this.onLoadObservable.hasObservers() && this.onLoadObservable.notifyObservers(this); }, this._reset = () => { this._texture != null && (this._displayingPosterTexture || (this._texture.dispose(), this._texture = null)); }, this._updateInternalTexture = () => { if (this._texture == null || this.video.readyState < this.video.HAVE_CURRENT_DATA || this._displayingPosterTexture) return; const d = this.getScene().getFrameId(); this._frameId !== d && (this._frameId = d, this._getEngine().updateVideoTexture(this._texture, this._externalTexture ? this._externalTexture : this.video, this._invertY)); }, this._settings = Object.assign({ autoPlay: !0, loop: !0, autoUpdateTexture: !0 }, a), this._onError = l, this._generateMipMaps = r, this._initialSamplingMode = n, this.autoUpdateTexture = this._settings.autoUpdateTexture, this._currentSrc = t, this.name = e || this._getName(t), this.video = this._getVideo(t), !((u = this._engine) === null || u === void 0) && u.createExternalTexture && (this._externalTexture = this._engine.createExternalTexture(this.video)), this._settings.independentVideoSource || (this._settings.poster && (this.video.poster = this._settings.poster), this._settings.autoPlay !== void 0 && (this.video.autoplay = this._settings.autoPlay), this._settings.loop !== void 0 && (this.video.loop = this._settings.loop), this._settings.muted !== void 0 && (this.video.muted = this._settings.muted), this.video.setAttribute("playsinline", ""), this.video.addEventListener("paused", this._updateInternalTexture), this.video.addEventListener("seeked", this._updateInternalTexture), this.video.addEventListener("emptied", this._reset), this._settings.autoPlay && this._handlePlay()), this._createInternalTextureOnEvent = this._settings.poster && !this._settings.autoPlay ? "play" : "canplay", this.video.addEventListener(this._createInternalTextureOnEvent, this._createInternalTexture), this._format = o; const h = this.video.readyState >= this.video.HAVE_CURRENT_DATA; this._settings.poster && (!this._settings.autoPlay || !h) ? (this._texture = this._getEngine().createTexture(this._settings.poster, !1, !this.invertY, i), this._displayingPosterTexture = !0) : h && this._createInternalTexture(); } /** * Get the current class name of the video texture useful for serialization or dynamic coding. * @returns "VideoTexture" */ getClassName() { return "VideoTexture"; } _getName(e) { return e instanceof HTMLVideoElement ? e.currentSrc : typeof e == "object" ? e.toString() : e; } _getVideo(e) { if (e.isNative) return e; if (e instanceof HTMLVideoElement) return Ve.SetCorsBehavior(e.currentSrc, e), e; const t = document.createElement("video"); return typeof e == "string" ? (Ve.SetCorsBehavior(e, t), t.src = e) : (Ve.SetCorsBehavior(e[0], t), e.forEach((i) => { const r = document.createElement("source"); r.src = i, t.appendChild(r); })), this.onDisposeObservable.addOnce(() => { aq(t); }), t; } /** * @internal Internal method to initiate `update`. */ _rebuild() { this.update(); } /** * Update Texture in the `auto` mode. Does not do anything if `settings.autoUpdateTexture` is false. */ update() { this.autoUpdateTexture && this.updateTexture(!0); } /** * Update Texture in `manual` mode. Does not do anything if not visible or paused. * @param isVisible Visibility state, detected by user using `scene.getActiveMeshes()` or otherwise. */ updateTexture(e) { e && (this.video.paused && this._stillImageCaptured || (this._stillImageCaptured = !0, this._updateInternalTexture())); } /** * Get the underlying external texture (if supported by the current engine, else null) */ get externalTexture() { return this._externalTexture; } /** * Change video content. Changing video instance or setting multiple urls (as in constructor) is not supported. * @param url New url. */ updateURL(e) { this.video.src = e, this._currentSrc = e; } /** * Clones the texture. * @returns the cloned texture */ clone() { return new Cm(this.name, this._currentSrc, this.getScene(), this._generateMipMaps, this.invertY, this.samplingMode, this._settings); } /** * Dispose the texture and release its associated resources. */ dispose() { var e; super.dispose(), this._currentSrc = null, this._onUserActionRequestedObservable && (this._onUserActionRequestedObservable.clear(), this._onUserActionRequestedObservable = null), this.video.removeEventListener(this._createInternalTextureOnEvent, this._createInternalTexture), this._settings.independentVideoSource || (this.video.removeEventListener("paused", this._updateInternalTexture), this.video.removeEventListener("seeked", this._updateInternalTexture), this.video.removeEventListener("emptied", this._reset), this.video.removeEventListener("resize", this._resizeInternalTexture), this.video.pause()), (e = this._externalTexture) === null || e === void 0 || e.dispose(); } /** * Creates a video texture straight from a stream. * @param scene Define the scene the texture should be created in * @param stream Define the stream the texture should be created from * @param constraints video constraints * @param invertY Defines if the video should be stored with invert Y set to true (true by default) * @returns The created video texture as a promise */ static CreateFromStreamAsync(e, t, i, r = !0) { const s = e.getEngine().createVideoElement(i); return e.getEngine()._badOS && (document.body.appendChild(s), s.style.transform = "scale(0.0001, 0.0001)", s.style.opacity = "0", s.style.position = "fixed", s.style.bottom = "0px", s.style.right = "0px"), s.setAttribute("autoplay", ""), s.setAttribute("muted", "true"), s.setAttribute("playsinline", ""), s.muted = !0, s.isNative || (s.mozSrcObject !== void 0 ? s.mozSrcObject = t : typeof s.srcObject == "object" ? s.srcObject = t : s.src = window.URL && window.URL.createObjectURL(t)), new Promise((n) => { const a = () => { const l = new Cm("video", s, e, !0, r, void 0, void 0, void 0, 4); e.getEngine()._badOS && l.onDisposeObservable.addOnce(() => { s.remove(); }), l.onDisposeObservable.addOnce(() => { aq(s); }), n(l), s.removeEventListener("playing", a); }; s.addEventListener("playing", a), s.play(); }); } /** * Creates a video texture straight from your WebCam video feed. * @param scene Define the scene the texture should be created in * @param constraints Define the constraints to use to create the web cam feed from WebRTC * @param audioConstaints Define the audio constraints to use to create the web cam feed from WebRTC * @param invertY Defines if the video should be stored with invert Y set to true (true by default) * @returns The created video texture as a promise */ static async CreateFromWebCamAsync(e, t, i = !1, r = !0) { if (navigator.mediaDevices) { const s = await navigator.mediaDevices.getUserMedia({ video: t, audio: i }), n = await this.CreateFromStreamAsync(e, s, t, r); return n.onDisposeObservable.addOnce(() => { s.getTracks().forEach((a) => { a.stop(); }); }), n; } return Promise.reject("No support for userMedia on this device"); } /** * Creates a video texture straight from your WebCam video feed. * @param scene Defines the scene the texture should be created in * @param onReady Defines a callback to triggered once the texture will be ready * @param constraints Defines the constraints to use to create the web cam feed from WebRTC * @param audioConstaints Defines the audio constraints to use to create the web cam feed from WebRTC * @param invertY Defines if the video should be stored with invert Y set to true (true by default) */ static CreateFromWebCam(e, t, i, r = !1, s = !0) { this.CreateFromWebCamAsync(e, i, r, s).then(function(n) { t && t(n); }).catch(function(n) { Ce.Error(n.name); }); } } F([ W("settings") ], Cm.prototype, "_settings", void 0); F([ W("src") ], Cm.prototype, "_currentSrc", void 0); F([ W() ], Cm.prototype, "isVideo", void 0); De._CreateVideoTexture = (c, e, t, i = !1, r = !1, s = De.TRILINEAR_SAMPLINGMODE, n = {}, a, l = 5) => new Cm(c, e, t, i, r, s, n, a, l); Be("BABYLON.VideoTexture", Cm); class PU extends w1 { /** * Get the video texture associated with this video dome */ get videoTexture() { return this._texture; } /** * Get the video mode of this dome */ get videoMode() { return this.textureMode; } /** * Set the video mode of this dome. * @see textureMode */ set videoMode(e) { this.textureMode = e; } _initTexture(e, t, i) { const r = { loop: i.loop, autoPlay: i.autoPlay, autoUpdateTexture: !0, poster: i.poster }, s = new Cm((this.name || "videoDome") + "_texture", e, t, i.generateMipMaps, this._useDirectMapping, De.TRILINEAR_SAMPLINGMODE, r); return i.clickToPlay && (this._pointerObserver = t.onPointerObservable.add((n) => { var a; ((a = n.pickInfo) === null || a === void 0 ? void 0 : a.pickedMesh) === this.mesh && this._texture.video.play(); }, si.POINTERDOWN)), this._textureObserver = s.onLoadObservable.add(() => { this.onLoadObservable.notifyObservers(); }), s; } /** * Releases resources associated with this node. * @param doNotRecurse Set to true to not recurse into each children (recurse into each children by default) * @param disposeMaterialAndTextures Set to true to also dispose referenced materials and textures (false by default) */ dispose(e, t = !1) { this._texture.onLoadObservable.remove(this._textureObserver), this._scene.onPointerObservable.remove(this._pointerObserver), super.dispose(e, t); } } PU.MODE_MONOSCOPIC = w1.MODE_MONOSCOPIC; PU.MODE_TOPBOTTOM = w1.MODE_TOPBOTTOM; PU.MODE_SIDEBYSIDE = w1.MODE_SIDEBYSIDE; class cre { // Properties /** * Gets the perf counter used for GPU frame time */ get gpuFrameTimeCounter() { return this.engine.getGPUFrameTimeCounter(); } /** * Gets the GPU frame time capture status */ get captureGPUFrameTime() { return this._captureGPUFrameTime; } /** * Enable or disable the GPU frame time capture */ set captureGPUFrameTime(e) { e !== this._captureGPUFrameTime && (this._captureGPUFrameTime = e, this.engine.captureGPUFrameTime(e)); } /** * Gets the perf counter used for shader compilation time */ get shaderCompilationTimeCounter() { return this._shaderCompilationTime; } /** * Gets the shader compilation time capture status */ get captureShaderCompilationTime() { return this._captureShaderCompilationTime; } /** * Enable or disable the shader compilation time capture */ set captureShaderCompilationTime(e) { e !== this._captureShaderCompilationTime && (this._captureShaderCompilationTime = e, e ? (this._onBeforeShaderCompilationObserver = this.engine.onBeforeShaderCompilationObservable.add(() => { this._shaderCompilationTime.fetchNewFrame(), this._shaderCompilationTime.beginMonitoring(); }), this._onAfterShaderCompilationObserver = this.engine.onAfterShaderCompilationObservable.add(() => { this._shaderCompilationTime.endMonitoring(); })) : (this.engine.onBeforeShaderCompilationObservable.remove(this._onBeforeShaderCompilationObserver), this._onBeforeShaderCompilationObserver = null, this.engine.onAfterShaderCompilationObservable.remove(this._onAfterShaderCompilationObserver), this._onAfterShaderCompilationObserver = null)); } /** * Instantiates a new engine instrumentation. * This class can be used to get instrumentation data from a Babylon engine * @see https://doc.babylonjs.com/features/featuresDeepDive/scene/optimize_your_scene#engineinstrumentation * @param engine Defines the engine to instrument */ constructor(e) { this.engine = e, this._captureGPUFrameTime = !1, this._captureShaderCompilationTime = !1, this._shaderCompilationTime = new Vc(), this._onBeginFrameObserver = null, this._onEndFrameObserver = null, this._onBeforeShaderCompilationObserver = null, this._onAfterShaderCompilationObserver = null; } /** * Dispose and release associated resources. */ dispose() { this.engine.onBeginFrameObservable.remove(this._onBeginFrameObserver), this._onBeginFrameObserver = null, this.engine.onEndFrameObservable.remove(this._onEndFrameObserver), this._onEndFrameObserver = null, this.engine.onBeforeShaderCompilationObservable.remove(this._onBeforeShaderCompilationObserver), this._onBeforeShaderCompilationObserver = null, this.engine.onAfterShaderCompilationObservable.remove(this._onAfterShaderCompilationObserver), this._onAfterShaderCompilationObserver = null, this.engine = null; } } class ure { // Properties /** * Gets the perf counter used for active meshes evaluation time */ get activeMeshesEvaluationTimeCounter() { return this._activeMeshesEvaluationTime; } /** * Gets the active meshes evaluation time capture status */ get captureActiveMeshesEvaluationTime() { return this._captureActiveMeshesEvaluationTime; } /** * Enable or disable the active meshes evaluation time capture */ set captureActiveMeshesEvaluationTime(e) { e !== this._captureActiveMeshesEvaluationTime && (this._captureActiveMeshesEvaluationTime = e, e ? (this._onBeforeActiveMeshesEvaluationObserver = this.scene.onBeforeActiveMeshesEvaluationObservable.add(() => { Ve.StartPerformanceCounter("Active meshes evaluation"), this._activeMeshesEvaluationTime.beginMonitoring(); }), this._onAfterActiveMeshesEvaluationObserver = this.scene.onAfterActiveMeshesEvaluationObservable.add(() => { Ve.EndPerformanceCounter("Active meshes evaluation"), this._activeMeshesEvaluationTime.endMonitoring(!1); })) : (this.scene.onBeforeActiveMeshesEvaluationObservable.remove(this._onBeforeActiveMeshesEvaluationObserver), this._onBeforeActiveMeshesEvaluationObserver = null, this.scene.onAfterActiveMeshesEvaluationObservable.remove(this._onAfterActiveMeshesEvaluationObserver), this._onAfterActiveMeshesEvaluationObserver = null)); } /** * Gets the perf counter used for render targets render time */ get renderTargetsRenderTimeCounter() { return this._renderTargetsRenderTime; } /** * Gets the render targets render time capture status */ get captureRenderTargetsRenderTime() { return this._captureRenderTargetsRenderTime; } /** * Enable or disable the render targets render time capture */ set captureRenderTargetsRenderTime(e) { e !== this._captureRenderTargetsRenderTime && (this._captureRenderTargetsRenderTime = e, e ? (this._onBeforeRenderTargetsRenderObserver = this.scene.onBeforeRenderTargetsRenderObservable.add(() => { Ve.StartPerformanceCounter("Render targets rendering"), this._renderTargetsRenderTime.beginMonitoring(); }), this._onAfterRenderTargetsRenderObserver = this.scene.onAfterRenderTargetsRenderObservable.add(() => { Ve.EndPerformanceCounter("Render targets rendering"), this._renderTargetsRenderTime.endMonitoring(!1); })) : (this.scene.onBeforeRenderTargetsRenderObservable.remove(this._onBeforeRenderTargetsRenderObserver), this._onBeforeRenderTargetsRenderObserver = null, this.scene.onAfterRenderTargetsRenderObservable.remove(this._onAfterRenderTargetsRenderObserver), this._onAfterRenderTargetsRenderObserver = null)); } /** * Gets the perf counter used for particles render time */ get particlesRenderTimeCounter() { return this._particlesRenderTime; } /** * Gets the particles render time capture status */ get captureParticlesRenderTime() { return this._captureParticlesRenderTime; } /** * Enable or disable the particles render time capture */ set captureParticlesRenderTime(e) { e !== this._captureParticlesRenderTime && (this._captureParticlesRenderTime = e, e ? (this._onBeforeParticlesRenderingObserver = this.scene.onBeforeParticlesRenderingObservable.add(() => { Ve.StartPerformanceCounter("Particles"), this._particlesRenderTime.beginMonitoring(); }), this._onAfterParticlesRenderingObserver = this.scene.onAfterParticlesRenderingObservable.add(() => { Ve.EndPerformanceCounter("Particles"), this._particlesRenderTime.endMonitoring(!1); })) : (this.scene.onBeforeParticlesRenderingObservable.remove(this._onBeforeParticlesRenderingObserver), this._onBeforeParticlesRenderingObserver = null, this.scene.onAfterParticlesRenderingObservable.remove(this._onAfterParticlesRenderingObserver), this._onAfterParticlesRenderingObserver = null)); } /** * Gets the perf counter used for sprites render time */ get spritesRenderTimeCounter() { return this._spritesRenderTime; } /** * Gets the sprites render time capture status */ get captureSpritesRenderTime() { return this._captureSpritesRenderTime; } /** * Enable or disable the sprites render time capture */ set captureSpritesRenderTime(e) { e !== this._captureSpritesRenderTime && (this._captureSpritesRenderTime = e, this.scene.spriteManagers && (e ? (this._onBeforeSpritesRenderingObserver = this.scene.onBeforeSpritesRenderingObservable.add(() => { Ve.StartPerformanceCounter("Sprites"), this._spritesRenderTime.beginMonitoring(); }), this._onAfterSpritesRenderingObserver = this.scene.onAfterSpritesRenderingObservable.add(() => { Ve.EndPerformanceCounter("Sprites"), this._spritesRenderTime.endMonitoring(!1); })) : (this.scene.onBeforeSpritesRenderingObservable.remove(this._onBeforeSpritesRenderingObserver), this._onBeforeSpritesRenderingObserver = null, this.scene.onAfterSpritesRenderingObservable.remove(this._onAfterSpritesRenderingObserver), this._onAfterSpritesRenderingObserver = null))); } /** * Gets the perf counter used for physics time */ get physicsTimeCounter() { return this._physicsTime; } /** * Gets the physics time capture status */ get capturePhysicsTime() { return this._capturePhysicsTime; } /** * Enable or disable the physics time capture */ set capturePhysicsTime(e) { e !== this._capturePhysicsTime && this.scene.onBeforePhysicsObservable && (this._capturePhysicsTime = e, e ? (this._onBeforePhysicsObserver = this.scene.onBeforePhysicsObservable.add(() => { Ve.StartPerformanceCounter("Physics"), this._physicsTime.beginMonitoring(); }), this._onAfterPhysicsObserver = this.scene.onAfterPhysicsObservable.add(() => { Ve.EndPerformanceCounter("Physics"), this._physicsTime.endMonitoring(); })) : (this.scene.onBeforePhysicsObservable.remove(this._onBeforePhysicsObserver), this._onBeforePhysicsObserver = null, this.scene.onAfterPhysicsObservable.remove(this._onAfterPhysicsObserver), this._onAfterPhysicsObserver = null)); } /** * Gets the perf counter used for animations time */ get animationsTimeCounter() { return this._animationsTime; } /** * Gets the animations time capture status */ get captureAnimationsTime() { return this._captureAnimationsTime; } /** * Enable or disable the animations time capture */ set captureAnimationsTime(e) { e !== this._captureAnimationsTime && (this._captureAnimationsTime = e, e ? this._onAfterAnimationsObserver = this.scene.onAfterAnimationsObservable.add(() => { this._animationsTime.endMonitoring(); }) : (this.scene.onAfterAnimationsObservable.remove(this._onAfterAnimationsObserver), this._onAfterAnimationsObserver = null)); } /** * Gets the perf counter used for frame time capture */ get frameTimeCounter() { return this._frameTime; } /** * Gets the frame time capture status */ get captureFrameTime() { return this._captureFrameTime; } /** * Enable or disable the frame time capture */ set captureFrameTime(e) { this._captureFrameTime = e; } /** * Gets the perf counter used for inter-frames time capture */ get interFrameTimeCounter() { return this._interFrameTime; } /** * Gets the inter-frames time capture status */ get captureInterFrameTime() { return this._captureInterFrameTime; } /** * Enable or disable the inter-frames time capture */ set captureInterFrameTime(e) { this._captureInterFrameTime = e; } /** * Gets the perf counter used for render time capture */ get renderTimeCounter() { return this._renderTime; } /** * Gets the render time capture status */ get captureRenderTime() { return this._captureRenderTime; } /** * Enable or disable the render time capture */ set captureRenderTime(e) { e !== this._captureRenderTime && (this._captureRenderTime = e, e ? (this._onBeforeDrawPhaseObserver = this.scene.onBeforeDrawPhaseObservable.add(() => { this._renderTime.beginMonitoring(), Ve.StartPerformanceCounter("Main render"); }), this._onAfterDrawPhaseObserver = this.scene.onAfterDrawPhaseObservable.add(() => { this._renderTime.endMonitoring(!1), Ve.EndPerformanceCounter("Main render"); })) : (this.scene.onBeforeDrawPhaseObservable.remove(this._onBeforeDrawPhaseObserver), this._onBeforeDrawPhaseObserver = null, this.scene.onAfterDrawPhaseObservable.remove(this._onAfterDrawPhaseObserver), this._onAfterDrawPhaseObserver = null)); } /** * Gets the perf counter used for camera render time capture */ get cameraRenderTimeCounter() { return this._cameraRenderTime; } /** * Gets the camera render time capture status */ get captureCameraRenderTime() { return this._captureCameraRenderTime; } /** * Enable or disable the camera render time capture */ set captureCameraRenderTime(e) { e !== this._captureCameraRenderTime && (this._captureCameraRenderTime = e, e ? (this._onBeforeCameraRenderObserver = this.scene.onBeforeCameraRenderObservable.add((t) => { this._cameraRenderTime.beginMonitoring(), Ve.StartPerformanceCounter(`Rendering camera ${t.name}`); }), this._onAfterCameraRenderObserver = this.scene.onAfterCameraRenderObservable.add((t) => { this._cameraRenderTime.endMonitoring(!1), Ve.EndPerformanceCounter(`Rendering camera ${t.name}`); })) : (this.scene.onBeforeCameraRenderObservable.remove(this._onBeforeCameraRenderObserver), this._onBeforeCameraRenderObserver = null, this.scene.onAfterCameraRenderObservable.remove(this._onAfterCameraRenderObserver), this._onAfterCameraRenderObserver = null)); } /** * Gets the perf counter used for draw calls */ get drawCallsCounter() { return this.scene.getEngine()._drawCalls; } /** * Instantiates a new scene instrumentation. * This class can be used to get instrumentation data from a Babylon engine * @see https://doc.babylonjs.com/features/featuresDeepDive/scene/optimize_your_scene#sceneinstrumentation * @param scene Defines the scene to instrument */ constructor(e) { this.scene = e, this._captureActiveMeshesEvaluationTime = !1, this._activeMeshesEvaluationTime = new Vc(), this._captureRenderTargetsRenderTime = !1, this._renderTargetsRenderTime = new Vc(), this._captureFrameTime = !1, this._frameTime = new Vc(), this._captureRenderTime = !1, this._renderTime = new Vc(), this._captureInterFrameTime = !1, this._interFrameTime = new Vc(), this._captureParticlesRenderTime = !1, this._particlesRenderTime = new Vc(), this._captureSpritesRenderTime = !1, this._spritesRenderTime = new Vc(), this._capturePhysicsTime = !1, this._physicsTime = new Vc(), this._captureAnimationsTime = !1, this._animationsTime = new Vc(), this._captureCameraRenderTime = !1, this._cameraRenderTime = new Vc(), this._onBeforeActiveMeshesEvaluationObserver = null, this._onAfterActiveMeshesEvaluationObserver = null, this._onBeforeRenderTargetsRenderObserver = null, this._onAfterRenderTargetsRenderObserver = null, this._onAfterRenderObserver = null, this._onBeforeDrawPhaseObserver = null, this._onAfterDrawPhaseObserver = null, this._onBeforeAnimationsObserver = null, this._onBeforeParticlesRenderingObserver = null, this._onAfterParticlesRenderingObserver = null, this._onBeforeSpritesRenderingObserver = null, this._onAfterSpritesRenderingObserver = null, this._onBeforePhysicsObserver = null, this._onAfterPhysicsObserver = null, this._onAfterAnimationsObserver = null, this._onBeforeCameraRenderObserver = null, this._onAfterCameraRenderObserver = null, this._onBeforeAnimationsObserver = e.onBeforeAnimationsObservable.add(() => { this._captureActiveMeshesEvaluationTime && this._activeMeshesEvaluationTime.fetchNewFrame(), this._captureRenderTargetsRenderTime && this._renderTargetsRenderTime.fetchNewFrame(), this._captureFrameTime && (Ve.StartPerformanceCounter("Scene rendering"), this._frameTime.beginMonitoring()), this._captureInterFrameTime && this._interFrameTime.endMonitoring(), this._captureParticlesRenderTime && this._particlesRenderTime.fetchNewFrame(), this._captureSpritesRenderTime && this._spritesRenderTime.fetchNewFrame(), this._captureAnimationsTime && this._animationsTime.beginMonitoring(), this._captureRenderTime && this._renderTime.fetchNewFrame(), this._captureCameraRenderTime && this._cameraRenderTime.fetchNewFrame(), this.scene.getEngine()._drawCalls.fetchNewFrame(); }), this._onAfterRenderObserver = e.onAfterRenderObservable.add(() => { this._captureFrameTime && (Ve.EndPerformanceCounter("Scene rendering"), this._frameTime.endMonitoring()), this._captureRenderTime && this._renderTime.endMonitoring(!1), this._captureInterFrameTime && this._interFrameTime.beginMonitoring(), this._captureActiveMeshesEvaluationTime && this._activeMeshesEvaluationTime.endFrame(), this._captureRenderTargetsRenderTime && this._renderTargetsRenderTime.endFrame(), this._captureParticlesRenderTime && this._particlesRenderTime.endFrame(), this._captureSpritesRenderTime && this._spritesRenderTime.endFrame(), this._captureRenderTime && this._renderTime.endFrame(), this._captureCameraRenderTime && this._cameraRenderTime.endFrame(); }); } /** * Dispose and release associated resources. */ dispose() { this.scene.onAfterRenderObservable.remove(this._onAfterRenderObserver), this._onAfterRenderObserver = null, this.scene.onBeforeActiveMeshesEvaluationObservable.remove(this._onBeforeActiveMeshesEvaluationObserver), this._onBeforeActiveMeshesEvaluationObserver = null, this.scene.onAfterActiveMeshesEvaluationObservable.remove(this._onAfterActiveMeshesEvaluationObserver), this._onAfterActiveMeshesEvaluationObserver = null, this.scene.onBeforeRenderTargetsRenderObservable.remove(this._onBeforeRenderTargetsRenderObserver), this._onBeforeRenderTargetsRenderObserver = null, this.scene.onAfterRenderTargetsRenderObservable.remove(this._onAfterRenderTargetsRenderObserver), this._onAfterRenderTargetsRenderObserver = null, this.scene.onBeforeAnimationsObservable.remove(this._onBeforeAnimationsObserver), this._onBeforeAnimationsObserver = null, this.scene.onBeforeParticlesRenderingObservable.remove(this._onBeforeParticlesRenderingObserver), this._onBeforeParticlesRenderingObserver = null, this.scene.onAfterParticlesRenderingObservable.remove(this._onAfterParticlesRenderingObserver), this._onAfterParticlesRenderingObserver = null, this._onBeforeSpritesRenderingObserver && (this.scene.onBeforeSpritesRenderingObservable.remove(this._onBeforeSpritesRenderingObserver), this._onBeforeSpritesRenderingObserver = null), this._onAfterSpritesRenderingObserver && (this.scene.onAfterSpritesRenderingObservable.remove(this._onAfterSpritesRenderingObserver), this._onAfterSpritesRenderingObserver = null), this.scene.onBeforeDrawPhaseObservable.remove(this._onBeforeDrawPhaseObserver), this._onBeforeDrawPhaseObserver = null, this.scene.onAfterDrawPhaseObservable.remove(this._onAfterDrawPhaseObserver), this._onAfterDrawPhaseObserver = null, this._onBeforePhysicsObserver && (this.scene.onBeforePhysicsObservable.remove(this._onBeforePhysicsObserver), this._onBeforePhysicsObserver = null), this._onAfterPhysicsObserver && (this.scene.onAfterPhysicsObservable.remove(this._onAfterPhysicsObserver), this._onAfterPhysicsObserver = null), this.scene.onAfterAnimationsObservable.remove(this._onAfterAnimationsObserver), this._onAfterAnimationsObserver = null, this.scene.onBeforeCameraRenderObservable.remove(this._onBeforeCameraRenderObserver), this._onBeforeCameraRenderObserver = null, this.scene.onAfterCameraRenderObservable.remove(this._onAfterCameraRenderObserver), this._onAfterCameraRenderObserver = null, this.scene = null; } } const y2e = "glowMapGenerationPixelShader", C2e = `#if defined(DIFFUSE_ISLINEAR) || defined(EMISSIVE_ISLINEAR) #include #endif #ifdef DIFFUSE varying vec2 vUVDiffuse;uniform sampler2D diffuseSampler; #endif #ifdef OPACITY varying vec2 vUVOpacity;uniform sampler2D opacitySampler;uniform float opacityIntensity; #endif #ifdef EMISSIVE varying vec2 vUVEmissive;uniform sampler2D emissiveSampler; #endif #ifdef VERTEXALPHA varying vec4 vColor; #endif uniform vec4 glowColor;uniform float glowIntensity; #include #define CUSTOM_FRAGMENT_DEFINITIONS void main(void) { #include vec4 finalColor=glowColor; #ifdef DIFFUSE vec4 albedoTexture=texture2D(diffuseSampler,vUVDiffuse); #ifdef DIFFUSE_ISLINEAR albedoTexture=toGammaSpace(albedoTexture); #endif #ifdef GLOW finalColor.a*=albedoTexture.a; #endif #ifdef HIGHLIGHT finalColor.a=albedoTexture.a; #endif #endif #ifdef OPACITY vec4 opacityMap=texture2D(opacitySampler,vUVOpacity); #ifdef OPACITYRGB finalColor.a*=getLuminance(opacityMap.rgb); #else finalColor.a*=opacityMap.a; #endif finalColor.a*=opacityIntensity; #endif #ifdef VERTEXALPHA finalColor.a*=vColor.a; #endif #ifdef ALPHATEST if (finalColor.a #include #include #include[0..maxSimultaneousMorphTargets] #include #include uniform mat4 viewProjection;varying vec4 vPosition; #ifdef UV1 attribute vec2 uv; #endif #ifdef UV2 attribute vec2 uv2; #endif #ifdef DIFFUSE varying vec2 vUVDiffuse;uniform mat4 diffuseMatrix; #endif #ifdef OPACITY varying vec2 vUVOpacity;uniform mat4 opacityMatrix; #endif #ifdef EMISSIVE varying vec2 vUVEmissive;uniform mat4 emissiveMatrix; #endif #ifdef VERTEXALPHA attribute vec4 color;varying vec4 vColor; #endif #define CUSTOM_VERTEX_DEFINITIONS void main(void) {vec3 positionUpdated=position; #ifdef UV1 vec2 uvUpdated=uv; #endif #include #include[0..maxSimultaneousMorphTargets] #include #include #include vec4 worldPos=finalWorld*vec4(positionUpdated,1.0); #ifdef CUBEMAP vPosition=worldPos;gl_Position=viewProjection*finalWorld*vec4(position,1.0); #else vPosition=viewProjection*worldPos;gl_Position=vPosition; #endif #ifdef DIFFUSE #ifdef DIFFUSEUV1 vUVDiffuse=vec2(diffuseMatrix*vec4(uvUpdated,1.0,0.0)); #endif #ifdef DIFFUSEUV2 vUVDiffuse=vec2(diffuseMatrix*vec4(uv2,1.0,0.0)); #endif #endif #ifdef OPACITY #ifdef OPACITYUV1 vUVOpacity=vec2(opacityMatrix*vec4(uvUpdated,1.0,0.0)); #endif #ifdef OPACITYUV2 vUVOpacity=vec2(opacityMatrix*vec4(uv2,1.0,0.0)); #endif #endif #ifdef EMISSIVE #ifdef EMISSIVEUV1 vUVEmissive=vec2(emissiveMatrix*vec4(uvUpdated,1.0,0.0)); #endif #ifdef EMISSIVEUV2 vUVEmissive=vec2(emissiveMatrix*vec4(uv2,1.0,0.0)); #endif #endif #ifdef VERTEXALPHA vColor=color; #endif #include }`; je.ShadersStore[x2e] = b2e; class Tm { /** * Gets the camera attached to the layer. */ get camera() { return this._effectLayerOptions.camera; } /** * Gets the rendering group id the layer should render in. */ get renderingGroupId() { return this._effectLayerOptions.renderingGroupId; } set renderingGroupId(e) { this._effectLayerOptions.renderingGroupId = e; } /** * Gets the main texture where the effect is rendered */ get mainTexture() { return this._mainTexture; } /** * Sets a specific material to be used to render a mesh/a list of meshes in the layer * @param mesh mesh or array of meshes * @param material material to use by the layer when rendering the mesh(es). If undefined is passed, the specific material created by the layer will be used. */ setMaterialForRendering(e, t) { if (this._mainTexture.setMaterialForRendering(e, t), Array.isArray(e)) for (let i = 0; i < e.length; ++i) { const r = e[i]; t ? this._materialForRendering[r.uniqueId] = [r, t] : delete this._materialForRendering[r.uniqueId]; } else t ? this._materialForRendering[e.uniqueId] = [e, t] : delete this._materialForRendering[e.uniqueId]; } /** * Gets the intensity of the effect for a specific mesh. * @param mesh The mesh to get the effect intensity for * @returns The intensity of the effect for the mesh */ getEffectIntensity(e) { var t; return (t = this._effectIntensity[e.uniqueId]) !== null && t !== void 0 ? t : 1; } /** * Sets the intensity of the effect for a specific mesh. * @param mesh The mesh to set the effect intensity for * @param intensity The intensity of the effect for the mesh */ setEffectIntensity(e, t) { this._effectIntensity[e.uniqueId] = t; } /** * Instantiates a new effect Layer and references it in the scene. * @param name The name of the layer * @param scene The scene to use the layer in */ constructor(e, t) { this._vertexBuffers = {}, this._maxSize = 0, this._mainTextureDesiredSize = { width: 0, height: 0 }, this._shouldRender = !0, this._postProcesses = [], this._textures = [], this._emissiveTextureAndColor = { texture: null, color: new Et() }, this._effectIntensity = {}, this.neutralColor = new Et(), this.isEnabled = !0, this.disableBoundingBoxesFromEffectLayer = !1, this.onDisposeObservable = new Fe(), this.onBeforeRenderMainTextureObservable = new Fe(), this.onBeforeComposeObservable = new Fe(), this.onBeforeRenderMeshToEffect = new Fe(), this.onAfterRenderMeshToEffect = new Fe(), this.onAfterComposeObservable = new Fe(), this.onSizeChangedObservable = new Fe(), this._materialForRendering = {}, this.name = e, this._scene = t || gi.LastCreatedScene, Tm._SceneComponentInitialization(this._scene), this._engine = this._scene.getEngine(), this._maxSize = this._engine.getCaps().maxTextureSize, this._scene.effectLayers.push(this), this._mergeDrawWrapper = [], this._generateIndexBuffer(), this._generateVertexBuffer(); } /** * Number of times _internalRender will be called. Some effect layers need to render the mesh several times, so they should override this method with the number of times the mesh should be rendered * @returns Number of times a mesh must be rendered in the layer */ _numInternalDraws() { return 1; } /** * Initializes the effect layer with the required options. * @param options Sets of none mandatory options to use with the layer (see IEffectLayerOptions for more information) */ _init(e) { this._effectLayerOptions = Object.assign({ mainTextureRatio: 0.5, alphaBlendingMode: 2, camera: null, renderingGroupId: -1, mainTextureType: 0, generateStencilBuffer: !1 }, e), this._setMainTextureSize(), this._createMainTexture(), this._createTextureAndPostProcesses(); } /** * Generates the index buffer of the full screen quad blending to the main canvas. */ _generateIndexBuffer() { const e = []; e.push(0), e.push(1), e.push(2), e.push(0), e.push(2), e.push(3), this._indexBuffer = this._engine.createIndexBuffer(e); } /** * Generates the vertex buffer of the full screen quad blending to the main canvas. */ _generateVertexBuffer() { const e = []; e.push(1, 1), e.push(-1, 1), e.push(-1, -1), e.push(1, -1); const t = new Y(this._engine, e, Y.PositionKind, !1, !1, 2); this._vertexBuffers[Y.PositionKind] = t; } /** * Sets the main texture desired size which is the closest power of two * of the engine canvas size. */ _setMainTextureSize() { this._effectLayerOptions.mainTextureFixedSize ? (this._mainTextureDesiredSize.width = this._effectLayerOptions.mainTextureFixedSize, this._mainTextureDesiredSize.height = this._effectLayerOptions.mainTextureFixedSize) : (this._mainTextureDesiredSize.width = this._engine.getRenderWidth() * this._effectLayerOptions.mainTextureRatio, this._mainTextureDesiredSize.height = this._engine.getRenderHeight() * this._effectLayerOptions.mainTextureRatio, this._mainTextureDesiredSize.width = this._engine.needPOTTextures ? $e.GetExponentOfTwo(this._mainTextureDesiredSize.width, this._maxSize) : this._mainTextureDesiredSize.width, this._mainTextureDesiredSize.height = this._engine.needPOTTextures ? $e.GetExponentOfTwo(this._mainTextureDesiredSize.height, this._maxSize) : this._mainTextureDesiredSize.height), this._mainTextureDesiredSize.width = Math.floor(this._mainTextureDesiredSize.width), this._mainTextureDesiredSize.height = Math.floor(this._mainTextureDesiredSize.height); } /** * Creates the main texture for the effect layer. */ _createMainTexture() { this._mainTexture = new ra("EffectLayerMainRTT", { width: this._mainTextureDesiredSize.width, height: this._mainTextureDesiredSize.height }, this._scene, !1, !0, this._effectLayerOptions.mainTextureType, !1, De.TRILINEAR_SAMPLINGMODE, !0, this._effectLayerOptions.generateStencilBuffer), this._mainTexture.activeCamera = this._effectLayerOptions.camera, this._mainTexture.wrapU = De.CLAMP_ADDRESSMODE, this._mainTexture.wrapV = De.CLAMP_ADDRESSMODE, this._mainTexture.anisotropicFilteringLevel = 1, this._mainTexture.updateSamplingMode(De.BILINEAR_SAMPLINGMODE), this._mainTexture.renderParticles = !1, this._mainTexture.renderList = null, this._mainTexture.ignoreCameraViewport = !0; for (const e in this._materialForRendering) { const [t, i] = this._materialForRendering[e]; this._mainTexture.setMaterialForRendering(t, i); } if (this._mainTexture.customIsReadyFunction = (e, t, i) => { if ((i || t === 0) && e.subMeshes) for (let r = 0; r < e.subMeshes.length; ++r) { const s = e.subMeshes[r], n = s.getMaterial(), a = s.getRenderingMesh(); if (!n) continue; const o = a._getInstancesRenderList(s._id, !!s.getReplacementMesh()).hardwareInstancedRendering[s._id] || a.hasThinInstances; if (this._setEmissiveTextureAndColor(a, s, n), !this._isReady(s, o, this._emissiveTextureAndColor.texture)) return !1; } return !0; }, this._mainTexture.customRenderFunction = (e, t, i, r) => { this.onBeforeRenderMainTextureObservable.notifyObservers(this); let s; const n = this._scene.getEngine(); if (r.length) { for (n.setColorWrite(!1), s = 0; s < r.length; s++) this._renderSubMesh(r.data[s]); n.setColorWrite(!0); } for (s = 0; s < e.length; s++) this._renderSubMesh(e.data[s]); for (s = 0; s < t.length; s++) this._renderSubMesh(t.data[s]); const a = n.getAlphaMode(); for (s = 0; s < i.length; s++) this._renderSubMesh(i.data[s], !0); n.setAlphaMode(a); }, this._mainTexture.onClearObservable.add((e) => { e.clear(this.neutralColor, !0, !0, !0); }), this._scene.getBoundingBoxRenderer) { const e = this._scene.getBoundingBoxRenderer().enabled; this._mainTexture.onBeforeBindObservable.add(() => { this._scene.getBoundingBoxRenderer().enabled = !this.disableBoundingBoxesFromEffectLayer && e; }), this._mainTexture.onAfterUnbindObservable.add(() => { this._scene.getBoundingBoxRenderer().enabled = e; }); } } /** * Adds specific effects defines. * @param defines The defines to add specifics to. */ // eslint-disable-next-line @typescript-eslint/no-unused-vars _addCustomEffectDefines(e) { } /** * Checks for the readiness of the element composing the layer. * @param subMesh the mesh to check for * @param useInstances specify whether or not to use instances to render the mesh * @param emissiveTexture the associated emissive texture used to generate the glow * @returns true if ready otherwise, false */ _isReady(e, t, i) { var r; const s = this._scene.getEngine(), n = e.getMesh(), a = (r = n._internalAbstractMeshDataInfo._materialForRenderPass) === null || r === void 0 ? void 0 : r[s.currentRenderPassId]; if (a) return a.isReadyForSubMesh(n, e, t); const l = e.getMaterial(); if (!l) return !1; if (this._useMeshMaterial(e.getRenderingMesh())) return l.isReadyForSubMesh(e.getMesh(), e, t); const o = [], u = [Y.PositionKind]; let h = !1, d = !1; if (l) { const x = l.needAlphaTesting(), b = l.getAlphaTestTexture(), S = b && b.hasAlpha && (l.useAlphaFromDiffuseTexture || l._useAlphaFromAlbedoTexture); b && (x || S) && (o.push("#define DIFFUSE"), n.isVerticesDataPresent(Y.UV2Kind) && b.coordinatesIndex === 1 ? (o.push("#define DIFFUSEUV2"), d = !0) : n.isVerticesDataPresent(Y.UVKind) && (o.push("#define DIFFUSEUV1"), h = !0), x && (o.push("#define ALPHATEST"), o.push("#define ALPHATESTVALUE 0.4")), b.gammaSpace || o.push("#define DIFFUSE_ISLINEAR")); const M = l.opacityTexture; M && (o.push("#define OPACITY"), n.isVerticesDataPresent(Y.UV2Kind) && M.coordinatesIndex === 1 ? (o.push("#define OPACITYUV2"), d = !0) : n.isVerticesDataPresent(Y.UVKind) && (o.push("#define OPACITYUV1"), h = !0)); } i && (o.push("#define EMISSIVE"), n.isVerticesDataPresent(Y.UV2Kind) && i.coordinatesIndex === 1 ? (o.push("#define EMISSIVEUV2"), d = !0) : n.isVerticesDataPresent(Y.UVKind) && (o.push("#define EMISSIVEUV1"), h = !0), i.gammaSpace || o.push("#define EMISSIVE_ISLINEAR")), n.useVertexColors && n.isVerticesDataPresent(Y.ColorKind) && n.hasVertexAlpha && l.transparencyMode !== At.MATERIAL_OPAQUE && (u.push(Y.ColorKind), o.push("#define VERTEXALPHA")), h && (u.push(Y.UVKind), o.push("#define UV1")), d && (u.push(Y.UV2Kind), o.push("#define UV2")); const f = new pl(); if (n.useBones && n.computeBonesUsingShaders) { u.push(Y.MatricesIndicesKind), u.push(Y.MatricesWeightsKind), n.numBoneInfluencers > 4 && (u.push(Y.MatricesIndicesExtraKind), u.push(Y.MatricesWeightsExtraKind)), o.push("#define NUM_BONE_INFLUENCERS " + n.numBoneInfluencers); const x = n.skeleton; x && x.isUsingTextureForMatrices ? o.push("#define BONETEXTURE") : o.push("#define BonesPerMesh " + (x ? x.bones.length + 1 : 0)), n.numBoneInfluencers > 0 && f.addCPUSkinningFallback(0, n); } else o.push("#define NUM_BONE_INFLUENCERS 0"); const p = n.morphTargetManager; let m = 0; p && p.numInfluencers > 0 && (o.push("#define MORPHTARGETS"), m = p.numInfluencers, o.push("#define NUM_MORPH_INFLUENCERS " + m), p.isUsingTextureForTargets && o.push("#define MORPHTARGETS_TEXTURE"), Ke.PrepareAttributesForMorphTargetsInfluencers(u, n, m)), t && (o.push("#define INSTANCES"), Ke.PushAttributesForInstances(u), e.getRenderingMesh().hasThinInstances && o.push("#define THIN_INSTANCES")), bT(l, this._scene, o), this._addCustomEffectDefines(o); const _ = e._getDrawWrapper(void 0, !0), v = _.defines, C = o.join(` `); if (v !== C) { const x = [ "world", "mBones", "viewProjection", "glowColor", "morphTargetInfluences", "boneTextureWidth", "diffuseMatrix", "emissiveMatrix", "opacityMatrix", "opacityIntensity", "morphTargetTextureInfo", "morphTargetTextureIndices", "glowIntensity" ]; Gc(x), _.setEffect(this._engine.createEffect("glowMapGeneration", u, x, ["diffuseSampler", "emissiveSampler", "opacitySampler", "boneSampler", "morphTargets"], C, f, void 0, void 0, { maxSimultaneousMorphTargets: m }), C); } return _.effect.isReady(); } /** * Renders the glowing part of the scene by blending the blurred glowing meshes on top of the rendered scene. */ render() { for (let n = 0; n < this._postProcesses.length; n++) if (!this._postProcesses[n].isReady()) return; const e = this._scene.getEngine(), t = this._numInternalDraws(); let i = !0; for (let n = 0; n < t; ++n) { let a = this._mergeDrawWrapper[n]; a || (a = this._mergeDrawWrapper[n] = new $o(this._engine), a.setEffect(this._createMergeEffect())), i = i && a.effect.isReady(); } if (!i) return; this.onBeforeComposeObservable.notifyObservers(this); const r = e.getAlphaMode(); for (let n = 0; n < t; ++n) { const a = this._mergeDrawWrapper[n]; e.enableEffect(a), e.setState(!1), e.bindBuffers(this._vertexBuffers, this._indexBuffer, a.effect), e.setAlphaMode(this._effectLayerOptions.alphaBlendingMode), this._internalRender(a.effect, n); } e.setAlphaMode(r), this.onAfterComposeObservable.notifyObservers(this); const s = this._mainTexture.getSize(); this._setMainTextureSize(), (s.width !== this._mainTextureDesiredSize.width || s.height !== this._mainTextureDesiredSize.height) && this._mainTextureDesiredSize.width !== 0 && this._mainTextureDesiredSize.height !== 0 && (this.onSizeChangedObservable.notifyObservers(this), this._disposeTextureAndPostProcesses(), this._createMainTexture(), this._createTextureAndPostProcesses()); } /** * Determine if a given mesh will be used in the current effect. * @param mesh mesh to test * @returns true if the mesh will be used */ hasMesh(e) { return this.renderingGroupId === -1 || e.renderingGroupId === this.renderingGroupId; } /** * Returns true if the layer contains information to display, otherwise false. * @returns true if the glow layer should be rendered */ shouldRender() { return this.isEnabled && this._shouldRender; } /** * Returns true if the mesh should render, otherwise false. * @param mesh The mesh to render * @returns true if it should render otherwise false */ // eslint-disable-next-line @typescript-eslint/no-unused-vars _shouldRenderMesh(e) { return !0; } /** * Returns true if the mesh can be rendered, otherwise false. * @param mesh The mesh to render * @param material The material used on the mesh * @returns true if it can be rendered otherwise false */ _canRenderMesh(e, t) { return !t.needAlphaBlendingForMesh(e); } /** * Returns true if the mesh should render, otherwise false. * @returns true if it should render otherwise false */ _shouldRenderEmissiveTextureForMesh() { return !0; } /** * Renders the submesh passed in parameter to the generation map. * @param subMesh * @param enableAlphaMode */ _renderSubMesh(e, t = !1) { var i, r; if (!this.shouldRender()) return; const s = e.getMaterial(), n = e.getMesh(), a = e.getReplacementMesh(), l = e.getRenderingMesh(), o = e.getEffectiveMesh(), u = this._scene, h = u.getEngine(); if (o._internalAbstractMeshDataInfo._isActiveIntermediate = !1, !s || !this._canRenderMesh(l, s)) return; let d = (i = l.overrideMaterialSideOrientation) !== null && i !== void 0 ? i : s.sideOrientation; o._getWorldMatrixDeterminant() < 0 && (d = d === At.ClockWiseSideOrientation ? At.CounterClockWiseSideOrientation : At.ClockWiseSideOrientation); const p = d === At.ClockWiseSideOrientation; h.setState(s.backFaceCulling, s.zOffset, void 0, p, s.cullBackFaces, void 0, s.zOffsetUnits); const m = l._getInstancesRenderList(e._id, !!a); if (m.mustReturn || !this._shouldRenderMesh(l)) return; const _ = m.hardwareInstancedRendering[e._id] || l.hasThinInstances; if (this._setEmissiveTextureAndColor(l, e, s), this.onBeforeRenderMeshToEffect.notifyObservers(n), this._useMeshMaterial(l)) l.render(e, t, a || void 0); else if (this._isReady(e, _, this._emissiveTextureAndColor.texture)) { const v = (r = o._internalAbstractMeshDataInfo._materialForRenderPass) === null || r === void 0 ? void 0 : r[h.currentRenderPassId]; let C = e._getDrawWrapper(); if (!C && v && (C = v._getDrawWrapper()), !C) return; const x = C.effect; if (h.enableEffect(C), _ || l._bind(e, x, s.fillMode), v ? v.bindForSubMesh(o.getWorldMatrix(), o, e) : (x.setMatrix("viewProjection", u.getTransformMatrix()), x.setMatrix("world", o.getWorldMatrix()), x.setFloat4("glowColor", this._emissiveTextureAndColor.color.r, this._emissiveTextureAndColor.color.g, this._emissiveTextureAndColor.color.b, this._emissiveTextureAndColor.color.a)), !v) { const b = s.needAlphaTesting(), S = s.getAlphaTestTexture(), M = S && S.hasAlpha && (s.useAlphaFromDiffuseTexture || s._useAlphaFromAlbedoTexture); if (S && (b || M)) { x.setTexture("diffuseSampler", S); const w = S.getTextureMatrix(); w && x.setMatrix("diffuseMatrix", w); } const R = s.opacityTexture; if (R) { x.setTexture("opacitySampler", R), x.setFloat("opacityIntensity", R.level); const w = R.getTextureMatrix(); w && x.setMatrix("opacityMatrix", w); } if (this._emissiveTextureAndColor.texture && (x.setTexture("emissiveSampler", this._emissiveTextureAndColor.texture), x.setMatrix("emissiveMatrix", this._emissiveTextureAndColor.texture.getTextureMatrix())), l.useBones && l.computeBonesUsingShaders && l.skeleton) { const w = l.skeleton; if (w.isUsingTextureForMatrices) { const V = w.getTransformMatrixTexture(l); if (!V) return; x.setTexture("boneSampler", V), x.setFloat("boneTextureWidth", 4 * (w.bones.length + 1)); } else x.setMatrices("mBones", w.getTransformMatrices(l)); } Ke.BindMorphTargetParameters(l, x), l.morphTargetManager && l.morphTargetManager.isUsingTextureForTargets && l.morphTargetManager._bind(x), t && h.setAlphaMode(s.alphaMode), x.setFloat("glowIntensity", this.getEffectIntensity(l)), Ec(x, s, u); } l._processRendering(o, e, x, s.fillMode, m, _, (b, S) => x.setMatrix("world", S)); } else this._mainTexture.resetRefreshCounter(); this.onAfterRenderMeshToEffect.notifyObservers(n); } /** * Defines whether the current material of the mesh should be use to render the effect. * @param mesh defines the current mesh to render */ // eslint-disable-next-line @typescript-eslint/no-unused-vars _useMeshMaterial(e) { return !1; } /** * Rebuild the required buffers. * @internal Internal use only. */ _rebuild() { const e = this._vertexBuffers[Y.PositionKind]; e && e._rebuild(), this._generateIndexBuffer(); } /** * Dispose only the render target textures and post process. */ _disposeTextureAndPostProcesses() { this._mainTexture.dispose(); for (let e = 0; e < this._postProcesses.length; e++) this._postProcesses[e] && this._postProcesses[e].dispose(); this._postProcesses = []; for (let e = 0; e < this._textures.length; e++) this._textures[e] && this._textures[e].dispose(); this._textures = []; } /** * Dispose the highlight layer and free resources. */ dispose() { const e = this._vertexBuffers[Y.PositionKind]; e && (e.dispose(), this._vertexBuffers[Y.PositionKind] = null), this._indexBuffer && (this._scene.getEngine()._releaseBuffer(this._indexBuffer), this._indexBuffer = null); for (const i of this._mergeDrawWrapper) i.dispose(); this._mergeDrawWrapper = [], this._disposeTextureAndPostProcesses(); const t = this._scene.effectLayers.indexOf(this, 0); t > -1 && this._scene.effectLayers.splice(t, 1), this.onDisposeObservable.notifyObservers(this), this.onDisposeObservable.clear(), this.onBeforeRenderMainTextureObservable.clear(), this.onBeforeComposeObservable.clear(), this.onBeforeRenderMeshToEffect.clear(), this.onAfterRenderMeshToEffect.clear(), this.onAfterComposeObservable.clear(), this.onSizeChangedObservable.clear(); } /** * Gets the class name of the effect layer * @returns the string with the class name of the effect layer */ getClassName() { return "EffectLayer"; } /** * Creates an effect layer from parsed effect layer data * @param parsedEffectLayer defines effect layer data * @param scene defines the current scene * @param rootUrl defines the root URL containing the effect layer information * @returns a parsed effect Layer */ static Parse(e, t, i) { return Ve.Instantiate(e.customType).Parse(e, t, i); } } Tm._SceneComponentInitialization = (c) => { throw yr("EffectLayerSceneComponent"); }; F([ W() ], Tm.prototype, "name", void 0); F([ dw() ], Tm.prototype, "neutralColor", void 0); F([ W() ], Tm.prototype, "isEnabled", void 0); F([ ete() ], Tm.prototype, "camera", null); F([ W() ], Tm.prototype, "renderingGroupId", null); F([ W() ], Tm.prototype, "disableBoundingBoxesFromEffectLayer", void 0); Yl.AddParser(Bt.NAME_EFFECTLAYER, (c, e, t, i) => { if (c.effectLayers) { t.effectLayers || (t.effectLayers = []); for (let r = 0; r < c.effectLayers.length; r++) { const s = Tm.Parse(c.effectLayers[r], e, i); t.effectLayers.push(s); } } }); Yl.prototype.removeEffectLayer = function(c) { const e = this.effectLayers.indexOf(c); return e !== -1 && this.effectLayers.splice(e, 1), e; }; Yl.prototype.addEffectLayer = function(c) { this.effectLayers.push(c); }; class hre { /** * Creates a new instance of the component for the given scene * @param scene Defines the scene to register the component in */ constructor(e) { this.name = Bt.NAME_EFFECTLAYER, this._renderEffects = !1, this._needStencil = !1, this._previousStencilState = !1, this.scene = e || gi.LastCreatedScene, this.scene && (this._engine = this.scene.getEngine(), this.scene.effectLayers = []); } /** * Registers the component in a given scene */ register() { this.scene._isReadyForMeshStage.registerStep(Bt.STEP_ISREADYFORMESH_EFFECTLAYER, this, this._isReadyForMesh), this.scene._cameraDrawRenderTargetStage.registerStep(Bt.STEP_CAMERADRAWRENDERTARGET_EFFECTLAYER, this, this._renderMainTexture), this.scene._beforeCameraDrawStage.registerStep(Bt.STEP_BEFORECAMERADRAW_EFFECTLAYER, this, this._setStencil), this.scene._afterRenderingGroupDrawStage.registerStep(Bt.STEP_AFTERRENDERINGGROUPDRAW_EFFECTLAYER_DRAW, this, this._drawRenderingGroup), this.scene._afterCameraDrawStage.registerStep(Bt.STEP_AFTERCAMERADRAW_EFFECTLAYER, this, this._setStencilBack), this.scene._afterCameraDrawStage.registerStep(Bt.STEP_AFTERCAMERADRAW_EFFECTLAYER_DRAW, this, this._drawCamera); } /** * Rebuilds the elements related to this component in case of * context lost for instance. */ rebuild() { const e = this.scene.effectLayers; for (const t of e) t._rebuild(); } /** * Serializes the component data to the specified json object * @param serializationObject The object to serialize to */ serialize(e) { e.effectLayers = []; const t = this.scene.effectLayers; for (const i of t) i.serialize && e.effectLayers.push(i.serialize()); } /** * Adds all the elements from the container to the scene * @param container the container holding the elements */ addFromContainer(e) { e.effectLayers && e.effectLayers.forEach((t) => { this.scene.addEffectLayer(t); }); } /** * Removes all the elements in the container from the scene * @param container contains the elements to remove * @param dispose if the removed element should be disposed (default: false) */ removeFromContainer(e, t) { e.effectLayers && e.effectLayers.forEach((i) => { this.scene.removeEffectLayer(i), t && i.dispose(); }); } /** * Disposes the component and the associated resources. */ dispose() { const e = this.scene.effectLayers; for (; e.length; ) e[0].dispose(); } _isReadyForMesh(e, t) { const i = this._engine.currentRenderPassId, r = this.scene.effectLayers; for (const s of r) { if (!s.hasMesh(e)) continue; const n = s._mainTexture; this._engine.currentRenderPassId = n.renderPassId; for (const a of e.subMeshes) if (!s.isReady(a, t)) return this._engine.currentRenderPassId = i, !1; } return this._engine.currentRenderPassId = i, !0; } _renderMainTexture(e) { this._renderEffects = !1, this._needStencil = !1; let t = !1; const i = this.scene.effectLayers; if (i && i.length > 0) { this._previousStencilState = this._engine.getStencilBuffer(); for (const r of i) if (r.shouldRender() && (!r.camera || r.camera.cameraRigMode === Ai.RIG_MODE_NONE && e === r.camera || r.camera.cameraRigMode !== Ai.RIG_MODE_NONE && r.camera._rigCameras.indexOf(e) > -1)) { this._renderEffects = !0, this._needStencil = this._needStencil || r.needStencil(); const s = r._mainTexture; s._shouldRender() && (this.scene.incrementRenderId(), s.render(!1, !1), t = !0); } this.scene.incrementRenderId(); } return t; } _setStencil() { this._needStencil && this._engine.setStencilBuffer(!0); } _setStencilBack() { this._needStencil && this._engine.setStencilBuffer(this._previousStencilState); } _draw(e) { if (this._renderEffects) { this._engine.setDepthBuffer(!1); const t = this.scene.effectLayers; for (let i = 0; i < t.length; i++) { const r = t[i]; r.renderingGroupId === e && r.shouldRender() && r.render(); } this._engine.setDepthBuffer(!0); } } _drawCamera() { this._renderEffects && this._draw(-1); } _drawRenderingGroup(e) { !this.scene._isInIntermediateRendering() && this._renderEffects && this._draw(e); } } Tm._SceneComponentInitialization = (c) => { let e = c._getComponent(Bt.NAME_EFFECTLAYER); e || (e = new hre(c), c._addComponent(e)); }; const E2e = "glowMapMergePixelShader", T2e = `varying vec2 vUV;uniform sampler2D textureSampler; #ifdef EMISSIVE uniform sampler2D textureSampler2; #endif uniform float offset; #define CUSTOM_FRAGMENT_DEFINITIONS void main(void) { #define CUSTOM_FRAGMENT_MAIN_BEGIN vec4 baseColor=texture2D(textureSampler,vUV); #ifdef EMISSIVE baseColor+=texture2D(textureSampler2,vUV);baseColor*=offset; #else baseColor.a=abs(offset-baseColor.a); #ifdef STROKE float alpha=smoothstep(.0,.1,baseColor.a);baseColor.a=alpha;baseColor.rgb=baseColor.rgb*alpha; #endif #endif #if LDR baseColor=clamp(baseColor,0.,1.0); #endif gl_FragColor=baseColor; #define CUSTOM_FRAGMENT_MAIN_END }`; je.ShadersStore[E2e] = T2e; const S2e = "glowMapMergeVertexShader", M2e = `attribute vec2 position;varying vec2 vUV;const vec2 madd=vec2(0.5,0.5); #define CUSTOM_VERTEX_DEFINITIONS void main(void) { #define CUSTOM_VERTEX_MAIN_BEGIN vUV=position*madd+madd;gl_Position=vec4(position,0.0,1.0); #define CUSTOM_VERTEX_MAIN_END }`; je.ShadersStore[S2e] = M2e; Yl.prototype.getGlowLayerByName = function(c) { var e; for (let t = 0; t < ((e = this.effectLayers) === null || e === void 0 ? void 0 : e.length); t++) if (this.effectLayers[t].name === c && this.effectLayers[t].getEffectName() === Y_.EffectName) return this.effectLayers[t]; return null; }; class Y_ extends Tm { /** * Sets the kernel size of the blur. */ set blurKernelSize(e) { if (e === this._options.blurKernelSize) return; this._options.blurKernelSize = e; const t = this._getEffectiveBlurKernelSize(); this._horizontalBlurPostprocess1.kernel = t, this._verticalBlurPostprocess1.kernel = t, this._horizontalBlurPostprocess2.kernel = t, this._verticalBlurPostprocess2.kernel = t; } /** * Gets the kernel size of the blur. */ get blurKernelSize() { return this._options.blurKernelSize; } /** * Sets the glow intensity. */ set intensity(e) { this._intensity = e; } /** * Gets the glow intensity. */ get intensity() { return this._intensity; } /** * Instantiates a new glow Layer and references it to the scene. * @param name The name of the layer * @param scene The scene to use the layer in * @param options Sets of none mandatory options to use with the layer (see IGlowLayerOptions for more information) */ constructor(e, t, i) { super(e, t), this._intensity = 1, this._includedOnlyMeshes = [], this._excludedMeshes = [], this._meshesUsingTheirOwnMaterials = [], this.neutralColor = new Et(0, 0, 0, 1), this._options = Object.assign({ mainTextureRatio: Y_.DefaultTextureRatio, blurKernelSize: 32, mainTextureFixedSize: void 0, camera: null, mainTextureSamples: 1, renderingGroupId: -1, ldrMerge: !1, alphaBlendingMode: 1, mainTextureType: 0, generateStencilBuffer: !1 }, i), this._init({ alphaBlendingMode: this._options.alphaBlendingMode, camera: this._options.camera, mainTextureFixedSize: this._options.mainTextureFixedSize, mainTextureRatio: this._options.mainTextureRatio, renderingGroupId: this._options.renderingGroupId, mainTextureType: this._options.mainTextureType, generateStencilBuffer: this._options.generateStencilBuffer }); } /** * Get the effect name of the layer. * @returns The effect name */ getEffectName() { return Y_.EffectName; } /** * Create the merge effect. This is the shader use to blit the information back * to the main canvas at the end of the scene rendering. */ _createMergeEffect() { let e = `#define EMISSIVE `; return this._options.ldrMerge && (e += `#define LDR `), this._engine.createEffect("glowMapMerge", [Y.PositionKind], ["offset"], ["textureSampler", "textureSampler2"], e); } /** * Creates the render target textures and post processes used in the glow layer. */ _createTextureAndPostProcesses() { let e = this._mainTextureDesiredSize.width, t = this._mainTextureDesiredSize.height; e = this._engine.needPOTTextures ? $e.GetExponentOfTwo(e, this._maxSize) : e, t = this._engine.needPOTTextures ? $e.GetExponentOfTwo(t, this._maxSize) : t; let i = 0; this._engine.getCaps().textureHalfFloatRender ? i = 2 : i = 0, this._blurTexture1 = new ra("GlowLayerBlurRTT", { width: e, height: t }, this._scene, !1, !0, i), this._blurTexture1.wrapU = De.CLAMP_ADDRESSMODE, this._blurTexture1.wrapV = De.CLAMP_ADDRESSMODE, this._blurTexture1.updateSamplingMode(De.BILINEAR_SAMPLINGMODE), this._blurTexture1.renderParticles = !1, this._blurTexture1.ignoreCameraViewport = !0; const r = Math.floor(e / 2), s = Math.floor(t / 2); this._blurTexture2 = new ra("GlowLayerBlurRTT2", { width: r, height: s }, this._scene, !1, !0, i), this._blurTexture2.wrapU = De.CLAMP_ADDRESSMODE, this._blurTexture2.wrapV = De.CLAMP_ADDRESSMODE, this._blurTexture2.updateSamplingMode(De.BILINEAR_SAMPLINGMODE), this._blurTexture2.renderParticles = !1, this._blurTexture2.ignoreCameraViewport = !0, this._textures = [this._blurTexture1, this._blurTexture2]; const n = this._getEffectiveBlurKernelSize(); this._horizontalBlurPostprocess1 = new fu("GlowLayerHBP1", new at(1, 0), n, { width: e, height: t }, null, De.BILINEAR_SAMPLINGMODE, this._scene.getEngine(), !1, i), this._horizontalBlurPostprocess1.width = e, this._horizontalBlurPostprocess1.height = t, this._horizontalBlurPostprocess1.externalTextureSamplerBinding = !0, this._horizontalBlurPostprocess1.onApplyObservable.add((a) => { a.setTexture("textureSampler", this._mainTexture); }), this._verticalBlurPostprocess1 = new fu("GlowLayerVBP1", new at(0, 1), n, { width: e, height: t }, null, De.BILINEAR_SAMPLINGMODE, this._scene.getEngine(), !1, i), this._horizontalBlurPostprocess2 = new fu("GlowLayerHBP2", new at(1, 0), n, { width: r, height: s }, null, De.BILINEAR_SAMPLINGMODE, this._scene.getEngine(), !1, i), this._horizontalBlurPostprocess2.width = r, this._horizontalBlurPostprocess2.height = s, this._horizontalBlurPostprocess2.externalTextureSamplerBinding = !0, this._horizontalBlurPostprocess2.onApplyObservable.add((a) => { a.setTexture("textureSampler", this._blurTexture1); }), this._verticalBlurPostprocess2 = new fu("GlowLayerVBP2", new at(0, 1), n, { width: r, height: s }, null, De.BILINEAR_SAMPLINGMODE, this._scene.getEngine(), !1, i), this._postProcesses = [this._horizontalBlurPostprocess1, this._verticalBlurPostprocess1, this._horizontalBlurPostprocess2, this._verticalBlurPostprocess2], this._postProcesses1 = [this._horizontalBlurPostprocess1, this._verticalBlurPostprocess1], this._postProcesses2 = [this._horizontalBlurPostprocess2, this._verticalBlurPostprocess2], this._mainTexture.samples = this._options.mainTextureSamples, this._mainTexture.onAfterUnbindObservable.add(() => { const a = this._blurTexture1.renderTarget; if (a) { this._scene.postProcessManager.directRender(this._postProcesses1, a, !0); const l = this._blurTexture2.renderTarget; l && this._scene.postProcessManager.directRender(this._postProcesses2, l, !0), this._engine.unBindFramebuffer(l ?? a, !0); } }), this._postProcesses.map((a) => { a.autoClear = !1; }); } /** * @returns The blur kernel size used by the glow. * Note: The value passed in the options is divided by 2 for back compatibility. */ _getEffectiveBlurKernelSize() { return this._options.blurKernelSize / 2; } /** * Checks for the readiness of the element composing the layer. * @param subMesh the mesh to check for * @param useInstances specify whether or not to use instances to render the mesh * @returns true if ready otherwise, false */ isReady(e, t) { const i = e.getMaterial(), r = e.getRenderingMesh(); if (!i || !r) return !1; const s = i.emissiveTexture; return super._isReady(e, t, s); } /** * Returns whether or not the layer needs stencil enabled during the mesh rendering. */ needStencil() { return !1; } /** * Returns true if the mesh can be rendered, otherwise false. * @param mesh The mesh to render * @param material The material used on the mesh * @returns true if it can be rendered otherwise false */ _canRenderMesh(e, t) { return !0; } /** * Implementation specific of rendering the generating effect on the main canvas. * @param effect The effect used to render through */ _internalRender(e) { e.setTexture("textureSampler", this._blurTexture1), e.setTexture("textureSampler2", this._blurTexture2), e.setFloat("offset", this._intensity); const t = this._engine, i = t.getStencilBuffer(); t.setStencilBuffer(!1), t.drawElementsType(At.TriangleFillMode, 0, 6), t.setStencilBuffer(i); } /** * Sets the required values for both the emissive texture and and the main color. * @param mesh * @param subMesh * @param material */ _setEmissiveTextureAndColor(e, t, i) { var r; let s = 1; if (this.customEmissiveTextureSelector ? this._emissiveTextureAndColor.texture = this.customEmissiveTextureSelector(e, t, i) : i ? (this._emissiveTextureAndColor.texture = i.emissiveTexture, this._emissiveTextureAndColor.texture && (s = this._emissiveTextureAndColor.texture.level)) : this._emissiveTextureAndColor.texture = null, this.customEmissiveColorSelector) this.customEmissiveColorSelector(e, t, i, this._emissiveTextureAndColor.color); else if (i.emissiveColor) { const n = (r = i.emissiveIntensity) !== null && r !== void 0 ? r : 1; s *= n, this._emissiveTextureAndColor.color.set(i.emissiveColor.r * s, i.emissiveColor.g * s, i.emissiveColor.b * s, i.alpha); } else this._emissiveTextureAndColor.color.set(this.neutralColor.r, this.neutralColor.g, this.neutralColor.b, this.neutralColor.a); } /** * Returns true if the mesh should render, otherwise false. * @param mesh The mesh to render * @returns true if it should render otherwise false */ _shouldRenderMesh(e) { return this.hasMesh(e); } /** * Adds specific effects defines. * @param defines The defines to add specifics to. */ _addCustomEffectDefines(e) { e.push("#define GLOW"); } /** * Add a mesh in the exclusion list to prevent it to impact or being impacted by the glow layer. * @param mesh The mesh to exclude from the glow layer */ addExcludedMesh(e) { this._excludedMeshes.indexOf(e.uniqueId) === -1 && this._excludedMeshes.push(e.uniqueId); } /** * Remove a mesh from the exclusion list to let it impact or being impacted by the glow layer. * @param mesh The mesh to remove */ removeExcludedMesh(e) { const t = this._excludedMeshes.indexOf(e.uniqueId); t !== -1 && this._excludedMeshes.splice(t, 1); } /** * Add a mesh in the inclusion list to impact or being impacted by the glow layer. * @param mesh The mesh to include in the glow layer */ addIncludedOnlyMesh(e) { this._includedOnlyMeshes.indexOf(e.uniqueId) === -1 && this._includedOnlyMeshes.push(e.uniqueId); } /** * Remove a mesh from the Inclusion list to prevent it to impact or being impacted by the glow layer. * @param mesh The mesh to remove */ removeIncludedOnlyMesh(e) { const t = this._includedOnlyMeshes.indexOf(e.uniqueId); t !== -1 && this._includedOnlyMeshes.splice(t, 1); } /** * Determine if a given mesh will be used in the glow layer * @param mesh The mesh to test * @returns true if the mesh will be highlighted by the current glow layer */ hasMesh(e) { return super.hasMesh(e) ? this._includedOnlyMeshes.length ? this._includedOnlyMeshes.indexOf(e.uniqueId) !== -1 : this._excludedMeshes.length ? this._excludedMeshes.indexOf(e.uniqueId) === -1 : !0 : !1; } /** * Defines whether the current material of the mesh should be use to render the effect. * @param mesh defines the current mesh to render */ _useMeshMaterial(e) { return this._meshesUsingTheirOwnMaterials.length == 0 ? !1 : this._meshesUsingTheirOwnMaterials.indexOf(e.uniqueId) > -1; } /** * Add a mesh to be rendered through its own material and not with emissive only. * @param mesh The mesh for which we need to use its material */ referenceMeshToUseItsOwnMaterial(e) { e.resetDrawCache(this._mainTexture.renderPassId), this._meshesUsingTheirOwnMaterials.push(e.uniqueId), e.onDisposeObservable.add(() => { this._disposeMesh(e); }); } /** * Remove a mesh from being rendered through its own material and not with emissive only. * @param mesh The mesh for which we need to not use its material */ unReferenceMeshFromUsingItsOwnMaterial(e) { let t = this._meshesUsingTheirOwnMaterials.indexOf(e.uniqueId); for (; t >= 0; ) this._meshesUsingTheirOwnMaterials.splice(t, 1), t = this._meshesUsingTheirOwnMaterials.indexOf(e.uniqueId); e.resetDrawCache(this._mainTexture.renderPassId); } /** * Free any resources and references associated to a mesh. * Internal use * @param mesh The mesh to free. * @internal */ _disposeMesh(e) { this.removeIncludedOnlyMesh(e), this.removeExcludedMesh(e); } /** * Gets the class name of the effect layer * @returns the string with the class name of the effect layer */ getClassName() { return "GlowLayer"; } /** * Serializes this glow layer * @returns a serialized glow layer object */ serialize() { const e = St.Serialize(this); e.customType = "BABYLON.GlowLayer"; let t; if (e.includedMeshes = [], this._includedOnlyMeshes.length) for (t = 0; t < this._includedOnlyMeshes.length; t++) { const i = this._scene.getMeshByUniqueId(this._includedOnlyMeshes[t]); i && e.includedMeshes.push(i.id); } if (e.excludedMeshes = [], this._excludedMeshes.length) for (t = 0; t < this._excludedMeshes.length; t++) { const i = this._scene.getMeshByUniqueId(this._excludedMeshes[t]); i && e.excludedMeshes.push(i.id); } return e; } /** * Creates a Glow Layer from parsed glow layer data * @param parsedGlowLayer defines glow layer data * @param scene defines the current scene * @param rootUrl defines the root URL containing the glow layer information * @returns a parsed Glow Layer */ static Parse(e, t, i) { const r = St.Parse(() => new Y_(e.name, t, e.options), e, t, i); let s; for (s = 0; s < e.excludedMeshes.length; s++) { const n = t.getMeshById(e.excludedMeshes[s]); n && r.addExcludedMesh(n); } for (s = 0; s < e.includedMeshes.length; s++) { const n = t.getMeshById(e.includedMeshes[s]); n && r.addIncludedOnlyMesh(n); } return r; } } Y_.EffectName = "GlowLayer"; Y_.DefaultBlurKernelSize = 32; Y_.DefaultTextureRatio = 0.5; F([ W() ], Y_.prototype, "blurKernelSize", null); F([ W() ], Y_.prototype, "intensity", null); F([ W("options") ], Y_.prototype, "_options", void 0); Be("BABYLON.GlowLayer", Y_); const R2e = "glowBlurPostProcessPixelShader", P2e = `varying vec2 vUV;uniform sampler2D textureSampler;uniform vec2 screenSize;uniform vec2 direction;uniform float blurWidth;float getLuminance(vec3 color) {return dot(color,vec3(0.2126,0.7152,0.0722));} #define CUSTOM_FRAGMENT_DEFINITIONS void main(void) {float weights[7];weights[0]=0.05;weights[1]=0.1;weights[2]=0.2;weights[3]=0.3;weights[4]=0.2;weights[5]=0.1;weights[6]=0.05;vec2 texelSize=vec2(1.0/screenSize.x,1.0/screenSize.y);vec2 texelStep=texelSize*direction*blurWidth;vec2 start=vUV-3.0*texelStep;vec4 baseColor=vec4(0.,0.,0.,0.);vec2 texelOffset=vec2(0.,0.);for (int i=0; i<7; i++) {vec4 texel=texture2D(textureSampler,start+texelOffset);baseColor.a+=texel.a*weights[i];float luminance=getLuminance(baseColor.rgb);float luminanceTexel=getLuminance(texel.rgb);float choice=step(luminanceTexel,luminance);baseColor.rgb=choice*baseColor.rgb+(1.0-choice)*texel.rgb;texelOffset+=texelStep;} gl_FragColor=baseColor;}`; je.ShadersStore[R2e] = P2e; Yl.prototype.getHighlightLayerByName = function(c) { var e; for (let t = 0; t < ((e = this.effectLayers) === null || e === void 0 ? void 0 : e.length); t++) if (this.effectLayers[t].name === c && this.effectLayers[t].getEffectName() === of.EffectName) return this.effectLayers[t]; return null; }; class oq extends Bi { constructor(e, t, i, r, s, n = De.BILINEAR_SAMPLINGMODE, a, l) { super(e, "glowBlurPostProcess", ["screenSize", "direction", "blurWidth"], null, r, s, n, a, l), this.direction = t, this.kernel = i, this.onApplyObservable.add((o) => { o.setFloat2("screenSize", this.width, this.height), o.setVector2("direction", this.direction), o.setFloat("blurWidth", this.kernel); }); } } class of extends Tm { /** * Specifies the horizontal size of the blur. */ set blurHorizontalSize(e) { this._horizontalBlurPostprocess.kernel = e, this._options.blurHorizontalSize = e; } /** * Specifies the vertical size of the blur. */ set blurVerticalSize(e) { this._verticalBlurPostprocess.kernel = e, this._options.blurVerticalSize = e; } /** * Gets the horizontal size of the blur. */ get blurHorizontalSize() { return this._horizontalBlurPostprocess.kernel; } /** * Gets the vertical size of the blur. */ get blurVerticalSize() { return this._verticalBlurPostprocess.kernel; } /** * Instantiates a new highlight Layer and references it to the scene.. * @param name The name of the layer * @param scene The scene to use the layer in * @param options Sets of none mandatory options to use with the layer (see IHighlightLayerOptions for more information) */ constructor(e, t, i) { super(e, t), this.name = e, this.innerGlow = !0, this.outerGlow = !0, this.onBeforeBlurObservable = new Fe(), this.onAfterBlurObservable = new Fe(), this._instanceGlowingMeshStencilReference = of.GlowingMeshStencilReference++, this._meshes = {}, this._excludedMeshes = {}, this.neutralColor = of.NeutralColor, this._engine.isStencilEnable || Ce.Warn("Rendering the Highlight Layer requires the stencil to be active on the canvas. var engine = new Engine(canvas, antialias, { stencil: true }"), this._options = Object.assign({ mainTextureRatio: 0.5, blurTextureSizeRatio: 0.5, blurHorizontalSize: 1, blurVerticalSize: 1, alphaBlendingMode: 2, camera: null, renderingGroupId: -1, mainTextureType: 0 }, i), this._init({ alphaBlendingMode: this._options.alphaBlendingMode, camera: this._options.camera, mainTextureFixedSize: this._options.mainTextureFixedSize, mainTextureRatio: this._options.mainTextureRatio, renderingGroupId: this._options.renderingGroupId, mainTextureType: this._options.mainTextureType }), this._shouldRender = !1; } /** * Get the effect name of the layer. * @returns The effect name */ getEffectName() { return of.EffectName; } _numInternalDraws() { return 2; } /** * Create the merge effect. This is the shader use to blit the information back * to the main canvas at the end of the scene rendering. */ _createMergeEffect() { return this._engine.createEffect("glowMapMerge", [Y.PositionKind], ["offset"], ["textureSampler"], this._options.isStroke ? `#define STROKE ` : void 0); } /** * Creates the render target textures and post processes used in the highlight layer. */ _createTextureAndPostProcesses() { let e = this._mainTextureDesiredSize.width * this._options.blurTextureSizeRatio, t = this._mainTextureDesiredSize.height * this._options.blurTextureSizeRatio; e = this._engine.needPOTTextures ? $e.GetExponentOfTwo(e, this._maxSize) : e, t = this._engine.needPOTTextures ? $e.GetExponentOfTwo(t, this._maxSize) : t; let i = 0; this._engine.getCaps().textureHalfFloatRender ? i = 2 : i = 0, this._blurTexture = new ra("HighlightLayerBlurRTT", { width: e, height: t }, this._scene, !1, !0, i), this._blurTexture.wrapU = De.CLAMP_ADDRESSMODE, this._blurTexture.wrapV = De.CLAMP_ADDRESSMODE, this._blurTexture.anisotropicFilteringLevel = 16, this._blurTexture.updateSamplingMode(De.TRILINEAR_SAMPLINGMODE), this._blurTexture.renderParticles = !1, this._blurTexture.ignoreCameraViewport = !0, this._textures = [this._blurTexture], this._options.alphaBlendingMode === 2 ? (this._downSamplePostprocess = new h6("HighlightLayerPPP", this._options.blurTextureSizeRatio, null, De.BILINEAR_SAMPLINGMODE, this._scene.getEngine()), this._downSamplePostprocess.externalTextureSamplerBinding = !0, this._downSamplePostprocess.onApplyObservable.add((r) => { r.setTexture("textureSampler", this._mainTexture); }), this._horizontalBlurPostprocess = new oq("HighlightLayerHBP", new at(1, 0), this._options.blurHorizontalSize, 1, null, De.BILINEAR_SAMPLINGMODE, this._scene.getEngine()), this._horizontalBlurPostprocess.onApplyObservable.add((r) => { r.setFloat2("screenSize", e, t); }), this._verticalBlurPostprocess = new oq("HighlightLayerVBP", new at(0, 1), this._options.blurVerticalSize, 1, null, De.BILINEAR_SAMPLINGMODE, this._scene.getEngine()), this._verticalBlurPostprocess.onApplyObservable.add((r) => { r.setFloat2("screenSize", e, t); }), this._postProcesses = [this._downSamplePostprocess, this._horizontalBlurPostprocess, this._verticalBlurPostprocess]) : (this._horizontalBlurPostprocess = new fu("HighlightLayerHBP", new at(1, 0), this._options.blurHorizontalSize / 2, { width: e, height: t }, null, De.BILINEAR_SAMPLINGMODE, this._scene.getEngine(), !1, i), this._horizontalBlurPostprocess.width = e, this._horizontalBlurPostprocess.height = t, this._horizontalBlurPostprocess.externalTextureSamplerBinding = !0, this._horizontalBlurPostprocess.onApplyObservable.add((r) => { r.setTexture("textureSampler", this._mainTexture); }), this._verticalBlurPostprocess = new fu("HighlightLayerVBP", new at(0, 1), this._options.blurVerticalSize / 2, { width: e, height: t }, null, De.BILINEAR_SAMPLINGMODE, this._scene.getEngine(), !1, i), this._postProcesses = [this._horizontalBlurPostprocess, this._verticalBlurPostprocess]), this._mainTexture.onAfterUnbindObservable.add(() => { this.onBeforeBlurObservable.notifyObservers(this); const r = this._blurTexture.renderTarget; r && (this._scene.postProcessManager.directRender(this._postProcesses, r, !0), this._engine.unBindFramebuffer(r, !0)), this.onAfterBlurObservable.notifyObservers(this); }), this._postProcesses.map((r) => { r.autoClear = !1; }); } /** * Returns whether or not the layer needs stencil enabled during the mesh rendering. */ needStencil() { return !0; } /** * Checks for the readiness of the element composing the layer. * @param subMesh the mesh to check for * @param useInstances specify whether or not to use instances to render the mesh * @returns true if ready otherwise, false */ isReady(e, t) { const i = e.getMaterial(), r = e.getRenderingMesh(); if (!i || !r || !this._meshes) return !1; let s = null; const n = this._meshes[r.uniqueId]; return n && n.glowEmissiveOnly && i && (s = i.emissiveTexture), super._isReady(e, t, s); } /** * Implementation specific of rendering the generating effect on the main canvas. * @param effect The effect used to render through * @param renderIndex */ _internalRender(e, t) { e.setTexture("textureSampler", this._blurTexture); const i = this._engine; i.cacheStencilState(), i.setStencilOperationPass(7681), i.setStencilOperationFail(7680), i.setStencilOperationDepthFail(7680), i.setStencilMask(0), i.setStencilBuffer(!0), i.setStencilFunctionReference(this._instanceGlowingMeshStencilReference), this.outerGlow && t === 0 && (e.setFloat("offset", 0), i.setStencilFunction(517), i.drawElementsType(At.TriangleFillMode, 0, 6)), this.innerGlow && t === 1 && (e.setFloat("offset", 1), i.setStencilFunction(514), i.drawElementsType(At.TriangleFillMode, 0, 6)), i.restoreStencilState(); } /** * Returns true if the layer contains information to display, otherwise false. */ shouldRender() { return super.shouldRender() ? !!this._meshes : !1; } /** * Returns true if the mesh should render, otherwise false. * @param mesh The mesh to render * @returns true if it should render otherwise false */ _shouldRenderMesh(e) { return !(this._excludedMeshes && this._excludedMeshes[e.uniqueId] || !super.hasMesh(e)); } /** * Returns true if the mesh can be rendered, otherwise false. * @param mesh The mesh to render * @param material The material used on the mesh * @returns true if it can be rendered otherwise false */ _canRenderMesh(e, t) { return !0; } /** * Adds specific effects defines. * @param defines The defines to add specifics to. */ _addCustomEffectDefines(e) { e.push("#define HIGHLIGHT"); } /** * Sets the required values for both the emissive texture and and the main color. * @param mesh * @param subMesh * @param material */ _setEmissiveTextureAndColor(e, t, i) { const r = this._meshes[e.uniqueId]; r ? this._emissiveTextureAndColor.color.set(r.color.r, r.color.g, r.color.b, 1) : this._emissiveTextureAndColor.color.set(this.neutralColor.r, this.neutralColor.g, this.neutralColor.b, this.neutralColor.a), r && r.glowEmissiveOnly && i ? (this._emissiveTextureAndColor.texture = i.emissiveTexture, this._emissiveTextureAndColor.color.set(1, 1, 1, 1)) : this._emissiveTextureAndColor.texture = null; } /** * Add a mesh in the exclusion list to prevent it to impact or being impacted by the highlight layer. * @param mesh The mesh to exclude from the highlight layer */ addExcludedMesh(e) { if (!this._excludedMeshes) return; if (!this._excludedMeshes[e.uniqueId]) { const i = { mesh: e, beforeBind: null, afterRender: null, stencilState: !1 }; i.beforeBind = e.onBeforeBindObservable.add((r) => { i.stencilState = r.getEngine().getStencilBuffer(), r.getEngine().setStencilBuffer(!1); }), i.afterRender = e.onAfterRenderObservable.add((r) => { r.getEngine().setStencilBuffer(i.stencilState); }), this._excludedMeshes[e.uniqueId] = i; } } /** * Remove a mesh from the exclusion list to let it impact or being impacted by the highlight layer. * @param mesh The mesh to highlight */ removeExcludedMesh(e) { if (!this._excludedMeshes) return; const t = this._excludedMeshes[e.uniqueId]; t && (t.beforeBind && e.onBeforeBindObservable.remove(t.beforeBind), t.afterRender && e.onAfterRenderObservable.remove(t.afterRender)), this._excludedMeshes[e.uniqueId] = null; } /** * Determine if a given mesh will be highlighted by the current HighlightLayer * @param mesh mesh to test * @returns true if the mesh will be highlighted by the current HighlightLayer */ hasMesh(e) { return !this._meshes || !super.hasMesh(e) ? !1 : this._meshes[e.uniqueId] !== void 0 && this._meshes[e.uniqueId] !== null; } /** * Add a mesh in the highlight layer in order to make it glow with the chosen color. * @param mesh The mesh to highlight * @param color The color of the highlight * @param glowEmissiveOnly Extract the glow from the emissive texture */ addMesh(e, t, i = !1) { if (!this._meshes) return; const r = this._meshes[e.uniqueId]; r ? r.color = t : (this._meshes[e.uniqueId] = { mesh: e, color: t, // Lambda required for capture due to Observable this context observerHighlight: e.onBeforeBindObservable.add((s) => { this.isEnabled && (this._excludedMeshes && this._excludedMeshes[s.uniqueId] ? this._defaultStencilReference(s) : s.getScene().getEngine().setStencilFunctionReference(this._instanceGlowingMeshStencilReference)); }), observerDefault: e.onAfterRenderObservable.add((s) => { this.isEnabled && this._defaultStencilReference(s); }), glowEmissiveOnly: i }, e.onDisposeObservable.add(() => { this._disposeMesh(e); })), this._shouldRender = !0; } /** * Remove a mesh from the highlight layer in order to make it stop glowing. * @param mesh The mesh to highlight */ removeMesh(e) { if (!this._meshes) return; const t = this._meshes[e.uniqueId]; t && (t.observerHighlight && e.onBeforeBindObservable.remove(t.observerHighlight), t.observerDefault && e.onAfterRenderObservable.remove(t.observerDefault), delete this._meshes[e.uniqueId]), this._shouldRender = !1; for (const i in this._meshes) if (this._meshes[i]) { this._shouldRender = !0; break; } } /** * Remove all the meshes currently referenced in the highlight layer */ removeAllMeshes() { if (this._meshes) { for (const e in this._meshes) if (Object.prototype.hasOwnProperty.call(this._meshes, e)) { const t = this._meshes[e]; t && this.removeMesh(t.mesh); } } } /** * Force the stencil to the normal expected value for none glowing parts * @param mesh */ _defaultStencilReference(e) { e.getScene().getEngine().setStencilFunctionReference(of.NormalMeshStencilReference); } /** * Free any resources and references associated to a mesh. * Internal use * @param mesh The mesh to free. * @internal */ _disposeMesh(e) { this.removeMesh(e), this.removeExcludedMesh(e); } /** * Dispose the highlight layer and free resources. */ dispose() { if (this._meshes) { for (const e in this._meshes) { const t = this._meshes[e]; t && t.mesh && (t.observerHighlight && t.mesh.onBeforeBindObservable.remove(t.observerHighlight), t.observerDefault && t.mesh.onAfterRenderObservable.remove(t.observerDefault)); } this._meshes = null; } if (this._excludedMeshes) { for (const e in this._excludedMeshes) { const t = this._excludedMeshes[e]; t && (t.beforeBind && t.mesh.onBeforeBindObservable.remove(t.beforeBind), t.afterRender && t.mesh.onAfterRenderObservable.remove(t.afterRender)); } this._excludedMeshes = null; } super.dispose(); } /** * Gets the class name of the effect layer * @returns the string with the class name of the effect layer */ getClassName() { return "HighlightLayer"; } /** * Serializes this Highlight layer * @returns a serialized Highlight layer object */ serialize() { const e = St.Serialize(this); if (e.customType = "BABYLON.HighlightLayer", e.meshes = [], this._meshes) for (const t in this._meshes) { const i = this._meshes[t]; i && e.meshes.push({ glowEmissiveOnly: i.glowEmissiveOnly, color: i.color.asArray(), meshId: i.mesh.id }); } if (e.excludedMeshes = [], this._excludedMeshes) for (const t in this._excludedMeshes) { const i = this._excludedMeshes[t]; i && e.excludedMeshes.push(i.mesh.id); } return e; } /** * Creates a Highlight layer from parsed Highlight layer data * @param parsedHightlightLayer defines the Highlight layer data * @param scene defines the current scene * @param rootUrl defines the root URL containing the Highlight layer information * @returns a parsed Highlight layer */ static Parse(e, t, i) { const r = St.Parse(() => new of(e.name, t, e.options), e, t, i); let s; for (s = 0; s < e.excludedMeshes.length; s++) { const n = t.getMeshById(e.excludedMeshes[s]); n && r.addExcludedMesh(n); } for (s = 0; s < e.meshes.length; s++) { const n = e.meshes[s], a = t.getMeshById(n.meshId); a && r.addMesh(a, ze.FromArray(n.color), n.glowEmissiveOnly); } return r; } } of.EffectName = "HighlightLayer"; of.NeutralColor = new Et(0, 0, 0, 0); of.GlowingMeshStencilReference = 2; of.NormalMeshStencilReference = 1; F([ W() ], of.prototype, "innerGlow", void 0); F([ W() ], of.prototype, "outerGlow", void 0); F([ W() ], of.prototype, "blurHorizontalSize", null); F([ W() ], of.prototype, "blurVerticalSize", null); F([ W("options") ], of.prototype, "_options", void 0); Be("BABYLON.HighlightLayer", of); class dre { /** * Creates a new instance of the component for the given scene * @param scene Defines the scene to register the component in */ constructor(e) { this.name = Bt.NAME_LAYER, this.scene = e || gi.LastCreatedScene, this.scene && (this._engine = this.scene.getEngine(), this.scene.layers = []); } /** * Registers the component in a given scene */ register() { this.scene._beforeCameraDrawStage.registerStep(Bt.STEP_BEFORECAMERADRAW_LAYER, this, this._drawCameraBackground), this.scene._afterCameraDrawStage.registerStep(Bt.STEP_AFTERCAMERADRAW_LAYER, this, this._drawCameraForegroundWithPostProcessing), this.scene._afterCameraPostProcessStage.registerStep(Bt.STEP_AFTERCAMERAPOSTPROCESS_LAYER, this, this._drawCameraForegroundWithoutPostProcessing), this.scene._beforeRenderTargetDrawStage.registerStep(Bt.STEP_BEFORERENDERTARGETDRAW_LAYER, this, this._drawRenderTargetBackground), this.scene._afterRenderTargetDrawStage.registerStep(Bt.STEP_AFTERRENDERTARGETDRAW_LAYER, this, this._drawRenderTargetForegroundWithPostProcessing), this.scene._afterRenderTargetPostProcessStage.registerStep(Bt.STEP_AFTERRENDERTARGETPOSTPROCESS_LAYER, this, this._drawRenderTargetForegroundWithoutPostProcessing); } /** * Rebuilds the elements related to this component in case of * context lost for instance. */ rebuild() { const e = this.scene.layers; for (const t of e) t._rebuild(); } /** * Disposes the component and the associated resources. */ dispose() { const e = this.scene.layers; for (; e.length; ) e[0].dispose(); } _draw(e) { const t = this.scene.layers; if (t.length) { this._engine.setDepthBuffer(!1); for (const i of t) e(i) && i.render(); this._engine.setDepthBuffer(!0); } } _drawCameraPredicate(e, t, i, r) { return !e.renderOnlyInRenderTargetTextures && e.isBackground === t && e.applyPostProcess === i && (e.layerMask & r) !== 0; } _drawCameraBackground(e) { this._draw((t) => this._drawCameraPredicate(t, !0, !0, e.layerMask)); } _drawCameraForegroundWithPostProcessing(e) { this._draw((t) => this._drawCameraPredicate(t, !1, !0, e.layerMask)); } _drawCameraForegroundWithoutPostProcessing(e) { this._draw((t) => this._drawCameraPredicate(t, !1, !1, e.layerMask)); } _drawRenderTargetPredicate(e, t, i, r, s) { return e.renderTargetTextures.length > 0 && e.isBackground === t && e.applyPostProcess === i && e.renderTargetTextures.indexOf(s) > -1 && (e.layerMask & r) !== 0; } _drawRenderTargetBackground(e) { this._draw((t) => this._drawRenderTargetPredicate(t, !0, !0, this.scene.activeCamera.layerMask, e)); } _drawRenderTargetForegroundWithPostProcessing(e) { this._draw((t) => this._drawRenderTargetPredicate(t, !1, !0, this.scene.activeCamera.layerMask, e)); } _drawRenderTargetForegroundWithoutPostProcessing(e) { this._draw((t) => this._drawRenderTargetPredicate(t, !1, !1, this.scene.activeCamera.layerMask, e)); } /** * Adds all the elements from the container to the scene * @param container the container holding the elements */ addFromContainer(e) { e.layers && e.layers.forEach((t) => { this.scene.layers.push(t); }); } /** * Removes all the elements in the container from the scene * @param container contains the elements to remove * @param dispose if the removed element should be disposed (default: false) */ removeFromContainer(e, t = !1) { e.layers && e.layers.forEach((i) => { const r = this.scene.layers.indexOf(i); r !== -1 && this.scene.layers.splice(r, 1), t && i.dispose(); }); } } const I2e = "layerPixelShader", D2e = `varying vec2 vUV;uniform sampler2D textureSampler;uniform vec4 color; #include #define CUSTOM_FRAGMENT_DEFINITIONS void main(void) { #define CUSTOM_FRAGMENT_MAIN_BEGIN vec4 baseColor=texture2D(textureSampler,vUV); #ifdef LINEAR baseColor.rgb=toGammaSpace(baseColor.rgb); #endif #ifdef ALPHATEST if (baseColor.a<0.4) discard; #endif gl_FragColor=baseColor*color; #define CUSTOM_FRAGMENT_MAIN_END }`; je.ShadersStore[I2e] = D2e; const O2e = "layerVertexShader", w2e = `attribute vec2 position;uniform vec2 scale;uniform vec2 offset;uniform mat4 textureMatrix;varying vec2 vUV;const vec2 madd=vec2(0.5,0.5); #define CUSTOM_VERTEX_DEFINITIONS void main(void) { #define CUSTOM_VERTEX_MAIN_BEGIN vec2 shiftedPosition=position*scale+offset;vUV=vec2(textureMatrix*vec4(shiftedPosition*madd+madd,1.0,0.0));gl_Position=vec4(shiftedPosition,0.0,1.0); #define CUSTOM_VERTEX_MAIN_END }`; je.ShadersStore[O2e] = w2e; class fre { /** * Determines if the layer is drawn before (true) or after (false) post-processing. * If the layer is background, it is always before. */ set applyPostProcess(e) { this._applyPostProcess = e; } get applyPostProcess() { return this.isBackground || this._applyPostProcess; } /** * Back compatibility with callback before the onDisposeObservable existed. * The set callback will be triggered when the layer has been disposed. */ set onDispose(e) { this._onDisposeObserver && this.onDisposeObservable.remove(this._onDisposeObserver), this._onDisposeObserver = this.onDisposeObservable.add(e); } /** * Back compatibility with callback before the onBeforeRenderObservable existed. * The set callback will be triggered just before rendering the layer. */ set onBeforeRender(e) { this._onBeforeRenderObserver && this.onBeforeRenderObservable.remove(this._onBeforeRenderObserver), this._onBeforeRenderObserver = this.onBeforeRenderObservable.add(e); } /** * Back compatibility with callback before the onAfterRenderObservable existed. * The set callback will be triggered just after rendering the layer. */ set onAfterRender(e) { this._onAfterRenderObserver && this.onAfterRenderObservable.remove(this._onAfterRenderObserver), this._onAfterRenderObserver = this.onAfterRenderObservable.add(e); } /** * Instantiates a new layer. * This represents a full screen 2d layer. * This can be useful to display a picture in the background of your scene for instance. * @see https://www.babylonjs-playground.com/#08A2BS#1 * @param name Define the name of the layer in the scene * @param imgUrl Define the url of the texture to display in the layer * @param scene Define the scene the layer belongs to * @param isBackground Defines whether the layer is displayed in front or behind the scene * @param color Defines a color for the layer */ constructor(e, t, i, r, s) { this.name = e, this._applyPostProcess = !0, this.scale = new at(1, 1), this.offset = new at(0, 0), this.alphaBlendingMode = 2, this.layerMask = 268435455, this.renderTargetTextures = [], this.renderOnlyInRenderTargetTextures = !1, this.isEnabled = !0, this._vertexBuffers = {}, this.onDisposeObservable = new Fe(), this.onBeforeRenderObservable = new Fe(), this.onAfterRenderObservable = new Fe(), this.texture = t ? new De(t, i, !0) : null, this.isBackground = r === void 0 ? !0 : r, this.color = s === void 0 ? new Et(1, 1, 1, 1) : s, this._scene = i || gi.LastCreatedScene; let n = this._scene._getComponent(Bt.NAME_LAYER); n || (n = new dre(this._scene), this._scene._addComponent(n)), this._scene.layers.push(this); const a = this._scene.getEngine(); this._drawWrapper = new $o(a); const l = []; l.push(1, 1), l.push(-1, 1), l.push(-1, -1), l.push(1, -1); const o = new Y(a, l, Y.PositionKind, !1, !1, 2); this._vertexBuffers[Y.PositionKind] = o, this._createIndexBuffer(); } _createIndexBuffer() { const e = this._scene.getEngine(), t = []; t.push(0), t.push(1), t.push(2), t.push(0), t.push(2), t.push(3), this._indexBuffer = e.createIndexBuffer(t); } /** @internal */ _rebuild() { const e = this._vertexBuffers[Y.PositionKind]; e && e._rebuild(), this._createIndexBuffer(); } /** * Checks if the layer is ready to be rendered * @returns true if the layer is ready. False otherwise. */ isReady() { var e; const t = this._scene.getEngine(); let i = ""; this.alphaTest && (i = "#define ALPHATEST"), this.texture && !this.texture.gammaSpace && (i += ` #define LINEAR`), this._previousDefines !== i && (this._previousDefines = i, this._drawWrapper.effect = t.createEffect("layer", [Y.PositionKind], ["textureMatrix", "color", "scale", "offset"], ["textureSampler"], i)); const r = this._drawWrapper.effect; return (r == null ? void 0 : r.isReady()) && ((e = this.texture) === null || e === void 0 ? void 0 : e.isReady()); } /** * Renders the layer in the scene. */ render() { if (!this.isEnabled) return; const e = this._scene.getEngine(); if (!this.isReady()) return; const t = this._drawWrapper.effect; this.onBeforeRenderObservable.notifyObservers(this), e.enableEffect(this._drawWrapper), e.setState(!1), t.setTexture("textureSampler", this.texture), t.setMatrix("textureMatrix", this.texture.getTextureMatrix()), t.setFloat4("color", this.color.r, this.color.g, this.color.b, this.color.a), t.setVector2("offset", this.offset), t.setVector2("scale", this.scale), e.bindBuffers(this._vertexBuffers, this._indexBuffer, t), this.alphaTest ? e.drawElementsType(At.TriangleFillMode, 0, 6) : (e.setAlphaMode(this.alphaBlendingMode), e.drawElementsType(At.TriangleFillMode, 0, 6), e.setAlphaMode(0)), this.onAfterRenderObservable.notifyObservers(this); } /** * Disposes and releases the associated resources. */ dispose() { const e = this._vertexBuffers[Y.PositionKind]; e && (e.dispose(), this._vertexBuffers[Y.PositionKind] = null), this._indexBuffer && (this._scene.getEngine()._releaseBuffer(this._indexBuffer), this._indexBuffer = null), this.texture && (this.texture.dispose(), this.texture = null), this.renderTargetTextures = []; const t = this._scene.layers.indexOf(this); this._scene.layers.splice(t, 1), this.onDisposeObservable.notifyObservers(this), this.onDisposeObservable.clear(), this.onAfterRenderObservable.clear(), this.onBeforeRenderObservable.clear(); } } class IU { /** * Creates a new Lens Flare. * This represents one of the lens effect in a `lensFlareSystem`. * It controls one of the individual texture used in the effect. * @see https://doc.babylonjs.com/features/featuresDeepDive/environment/lenseFlare * @param size Define the size of the lens flare (a floating value between 0 and 1) * @param position Define the position of the lens flare in the system. (a floating value between -1 and 1). A value of 0 is located on the emitter. A value greater than 0 is beyond the emitter and a value lesser than 0 is behind. * @param color Define the lens color * @param imgUrl Define the lens texture url * @param system Define the `lensFlareSystem` this flare is part of * @returns The newly created Lens Flare */ static AddFlare(e, t, i, r, s) { return new IU(e, t, i, r, s); } /** * Instantiates a new Lens Flare. * This represents one of the lens effect in a `lensFlareSystem`. * It controls one of the individual texture used in the effect. * @see https://doc.babylonjs.com/features/featuresDeepDive/environment/lenseFlare * @param size Define the size of the lens flare in the system (a floating value between 0 and 1) * @param position Define the position of the lens flare in the system. (a floating value between -1 and 1). A value of 0 is located on the emitter. A value greater than 0 is beyond the emitter and a value lesser than 0 is behind. * @param color Define the lens color * @param imgUrl Define the lens texture url * @param system Define the `lensFlareSystem` this flare is part of */ constructor(e, t, i, r, s) { this.size = e, this.position = t, this.alphaMode = 6, this.color = i || new ze(1, 1, 1), this.texture = r ? new De(r, s.getScene(), !0) : null, this._system = s; const n = s.scene.getEngine(); this._drawWrapper = new $o(n), this._drawWrapper.effect = n.createEffect("lensFlare", [Y.PositionKind], ["color", "viewportMatrix"], ["textureSampler"], ""), s.lensFlares.push(this); } /** * Dispose and release the lens flare with its associated resources. */ dispose() { this.texture && this.texture.dispose(); const e = this._system.lensFlares.indexOf(this); this._system.lensFlares.splice(e, 1); } } const L2e = "lensFlarePixelShader", N2e = `varying vec2 vUV;uniform sampler2D textureSampler;uniform vec4 color; #define CUSTOM_FRAGMENT_DEFINITIONS void main(void) { #define CUSTOM_FRAGMENT_MAIN_BEGIN vec4 baseColor=texture2D(textureSampler,vUV);gl_FragColor=baseColor*color; #define CUSTOM_FRAGMENT_MAIN_END }`; je.ShadersStore[L2e] = N2e; const F2e = "lensFlareVertexShader", B2e = `attribute vec2 position;uniform mat4 viewportMatrix;varying vec2 vUV;const vec2 madd=vec2(0.5,0.5); #define CUSTOM_VERTEX_DEFINITIONS void main(void) { #define CUSTOM_VERTEX_MAIN_BEGIN vUV=position*madd+madd;gl_Position=viewportMatrix*vec4(position,0.0,1.0); #define CUSTOM_VERTEX_MAIN_END }`; je.ShadersStore[F2e] = B2e; class CP { /** Gets the scene */ get scene() { return this._scene; } /** * Instantiates a lens flare system. * This represents a Lens Flare System or the shiny effect created by the light reflection on the camera lenses. * It is usually composed of several `lensFlare`. * @see https://doc.babylonjs.com/features/featuresDeepDive/environment/lenseFlare * @param name Define the name of the lens flare system in the scene * @param emitter Define the source (the emitter) of the lens flares (it can be a camera, a light or a mesh). * @param scene Define the scene the lens flare system belongs to */ constructor(e, t, i) { this.name = e, this.lensFlares = [], this.borderLimit = 300, this.viewportBorder = 0, this.layerMask = 268435455, this._vertexBuffers = {}, this._isEnabled = !0, this._scene = i || gi.LastCreatedScene, CP._SceneComponentInitialization(this._scene), this._emitter = t, this.id = e, i.lensFlareSystems.push(this), this.meshesSelectionPredicate = (n) => i.activeCamera && n.material && n.isVisible && n.isEnabled() && n.isBlocker && (n.layerMask & i.activeCamera.layerMask) != 0; const r = i.getEngine(), s = []; s.push(1, 1), s.push(-1, 1), s.push(-1, -1), s.push(1, -1), this._vertexBuffers[Y.PositionKind] = new Y(r, s, Y.PositionKind, !1, !1, 2), this._createIndexBuffer(); } _createIndexBuffer() { const e = []; e.push(0), e.push(1), e.push(2), e.push(0), e.push(2), e.push(3), this._indexBuffer = this._scene.getEngine().createIndexBuffer(e); } /** * Define if the lens flare system is enabled. */ get isEnabled() { return this._isEnabled; } set isEnabled(e) { this._isEnabled = e; } /** * Get the scene the effects belongs to. * @returns the scene holding the lens flare system */ getScene() { return this._scene; } /** * Get the emitter of the lens flare system. * It defines the source of the lens flares (it can be a camera, a light or a mesh). * @returns the emitter of the lens flare system */ getEmitter() { return this._emitter; } /** * Set the emitter of the lens flare system. * It defines the source of the lens flares (it can be a camera, a light or a mesh). * @param newEmitter Define the new emitter of the system */ setEmitter(e) { this._emitter = e; } /** * Get the lens flare system emitter position. * The emitter defines the source of the lens flares (it can be a camera, a light or a mesh). * @returns the position */ getEmitterPosition() { return this._emitter.getAbsolutePosition ? this._emitter.getAbsolutePosition() : this._emitter.position; } /** * @internal */ computeEffectivePosition(e) { let t = this.getEmitterPosition(); t = D.Project(t, Ae.Identity(), this._scene.getTransformMatrix(), e), this._positionX = t.x, this._positionY = t.y, t = D.TransformCoordinates(this.getEmitterPosition(), this._scene.getViewMatrix()), this.viewportBorder > 0 && (e.x -= this.viewportBorder, e.y -= this.viewportBorder, e.width += this.viewportBorder * 2, e.height += this.viewportBorder * 2, t.x += this.viewportBorder, t.y += this.viewportBorder, this._positionX += this.viewportBorder, this._positionY += this.viewportBorder); const i = this._scene.useRightHandedSystem; return t.z > 0 && !i || t.z < 0 && i ? (this._positionX > e.x && this._positionX < e.x + e.width && this._positionY > e.y && this._positionY < e.y + e.height, !0) : !1; } /** @internal */ _isVisible() { if (!this._isEnabled || !this._scene.activeCamera) return !1; const t = this.getEmitterPosition().subtract(this._scene.activeCamera.globalPosition), i = t.length(); t.normalize(); const r = new gs(this._scene.activeCamera.globalPosition, t), s = this._scene.pickWithRay(r, this.meshesSelectionPredicate, !0); return !s || !s.hit || s.distance > i; } /** * @internal */ render() { if (!this._scene.activeCamera) return !1; const e = this._scene.getEngine(), i = this._scene.activeCamera.viewport.toGlobal(e.getRenderWidth(!0), e.getRenderHeight(!0)); if (!this.computeEffectivePosition(i) || !this._isVisible()) return !1; let r, s; this._positionX < this.borderLimit + i.x ? r = this.borderLimit + i.x - this._positionX : this._positionX > i.x + i.width - this.borderLimit ? r = this._positionX - i.x - i.width + this.borderLimit : r = 0, this._positionY < this.borderLimit + i.y ? s = this.borderLimit + i.y - this._positionY : this._positionY > i.y + i.height - this.borderLimit ? s = this._positionY - i.y - i.height + this.borderLimit : s = 0; let n = r > s ? r : s; n -= this.viewportBorder, n > this.borderLimit && (n = this.borderLimit); let a = 1 - yt.Clamp(n / this.borderLimit, 0, 1); if (a < 0) return !1; a > 1 && (a = 1), this.viewportBorder > 0 && (i.x += this.viewportBorder, i.y += this.viewportBorder, i.width -= this.viewportBorder * 2, i.height -= this.viewportBorder * 2, this._positionX -= this.viewportBorder, this._positionY -= this.viewportBorder); const l = i.x + i.width / 2, o = i.y + i.height / 2, u = l - this._positionX, h = o - this._positionY; e.setState(!1), e.setDepthBuffer(!1); for (let d = 0; d < this.lensFlares.length; d++) { const f = this.lensFlares[d]; if (!f._drawWrapper.effect.isReady() || f.texture && !f.texture.isReady()) continue; e.enableEffect(f._drawWrapper), e.bindBuffers(this._vertexBuffers, this._indexBuffer, f._drawWrapper.effect), e.setAlphaMode(f.alphaMode); const p = l - u * f.position, m = o - h * f.position, _ = f.size, v = f.size * e.getAspectRatio(this._scene.activeCamera, !0), C = 2 * (p / (i.width + i.x * 2)) - 1, x = 1 - 2 * (m / (i.height + i.y * 2)), b = Ae.FromValues(_ / 2, 0, 0, 0, 0, v / 2, 0, 0, 0, 0, 1, 0, C, x, 0, 1); f._drawWrapper.effect.setMatrix("viewportMatrix", b), f._drawWrapper.effect.setTexture("textureSampler", f.texture), f._drawWrapper.effect.setFloat4("color", f.color.r * a, f.color.g * a, f.color.b * a, 1), e.drawElementsType(At.TriangleFillMode, 0, 6); } return e.setDepthBuffer(!0), e.setAlphaMode(0), !0; } /** * Rebuilds the lens flare system */ rebuild() { var e; this._createIndexBuffer(); for (const t in this._vertexBuffers) (e = this._vertexBuffers[t]) === null || e === void 0 || e._rebuild(); } /** * Dispose and release the lens flare with its associated resources. */ dispose() { const e = this._vertexBuffers[Y.PositionKind]; for (e && (e.dispose(), this._vertexBuffers[Y.PositionKind] = null), this._indexBuffer && (this._scene.getEngine()._releaseBuffer(this._indexBuffer), this._indexBuffer = null); this.lensFlares.length; ) this.lensFlares[0].dispose(); const t = this._scene.lensFlareSystems.indexOf(this); this._scene.lensFlareSystems.splice(t, 1); } /** * Parse a lens flare system from a JSON representation * @param parsedLensFlareSystem Define the JSON to parse * @param scene Define the scene the parsed system should be instantiated in * @param rootUrl Define the rootUrl of the load sequence to easily find a load relative dependencies such as textures * @returns the parsed system */ static Parse(e, t, i) { const r = t.getLastEntryById(e.emitterId), s = e.name || "lensFlareSystem#" + e.emitterId, n = new CP(s, r, t); n.id = e.id || s, n.borderLimit = e.borderLimit; for (let a = 0; a < e.flares.length; a++) { const l = e.flares[a]; IU.AddFlare(l.size, l.position, ze.FromArray(l.color), l.textureName ? i + l.textureName : "", n); } return n; } /** * Serialize the current Lens Flare System into a JSON representation. * @returns the serialized JSON */ serialize() { const e = {}; e.id = this.id, e.name = this.name, e.emitterId = this.getEmitter().id, e.borderLimit = this.borderLimit, e.flares = []; for (let t = 0; t < this.lensFlares.length; t++) { const i = this.lensFlares[t]; e.flares.push({ size: i.size, position: i.position, color: i.color.asArray(), textureName: Ve.GetFilename(i.texture ? i.texture.name : "") }); } return e; } } CP._SceneComponentInitialization = (c) => { throw yr("LensFlareSystemSceneComponent"); }; Yl.AddParser(Bt.NAME_LENSFLARESYSTEM, (c, e, t, i) => { if (c.lensFlareSystems !== void 0 && c.lensFlareSystems !== null) { t.lensFlareSystems || (t.lensFlareSystems = []); for (let r = 0, s = c.lensFlareSystems.length; r < s; r++) { const n = c.lensFlareSystems[r], a = CP.Parse(n, e, i); t.lensFlareSystems.push(a); } } }); Yl.prototype.getLensFlareSystemByName = function(c) { for (let e = 0; e < this.lensFlareSystems.length; e++) if (this.lensFlareSystems[e].name === c) return this.lensFlareSystems[e]; return null; }; Yl.prototype.getLensFlareSystemById = function(c) { for (let e = 0; e < this.lensFlareSystems.length; e++) if (this.lensFlareSystems[e].id === c) return this.lensFlareSystems[e]; return null; }; Yl.prototype.getLensFlareSystemByID = function(c) { return this.getLensFlareSystemById(c); }; Yl.prototype.removeLensFlareSystem = function(c) { const e = this.lensFlareSystems.indexOf(c); return e !== -1 && this.lensFlareSystems.splice(e, 1), e; }; Yl.prototype.addLensFlareSystem = function(c) { this.lensFlareSystems.push(c); }; class pre { /** * Creates a new instance of the component for the given scene * @param scene Defines the scene to register the component in */ constructor(e) { this.name = Bt.NAME_LENSFLARESYSTEM, this.scene = e, e.lensFlareSystems = []; } /** * Registers the component in a given scene */ register() { this.scene._afterCameraDrawStage.registerStep(Bt.STEP_AFTERCAMERADRAW_LENSFLARESYSTEM, this, this._draw); } /** * Rebuilds the elements related to this component in case of * context lost for instance. */ rebuild() { for (let e = 0; e < this.scene.lensFlareSystems.length; e++) this.scene.lensFlareSystems[e].rebuild(); } /** * Adds all the elements from the container to the scene * @param container the container holding the elements */ addFromContainer(e) { e.lensFlareSystems && e.lensFlareSystems.forEach((t) => { this.scene.addLensFlareSystem(t); }); } /** * Removes all the elements in the container from the scene * @param container contains the elements to remove * @param dispose if the removed element should be disposed (default: false) */ removeFromContainer(e, t) { e.lensFlareSystems && e.lensFlareSystems.forEach((i) => { this.scene.removeLensFlareSystem(i), t && i.dispose(); }); } /** * Serializes the component data to the specified json object * @param serializationObject The object to serialize to */ serialize(e) { e.lensFlareSystems = []; const t = this.scene.lensFlareSystems; for (const i of t) e.lensFlareSystems.push(i.serialize()); } /** * Disposes the component and the associated resources. */ dispose() { const e = this.scene.lensFlareSystems; for (; e.length; ) e[0].dispose(); } _draw(e) { if (this.scene.lensFlaresEnabled) { const t = this.scene.lensFlareSystems; Ve.StartPerformanceCounter("Lens flares", t.length > 0); for (const i of t) e.layerMask & i.layerMask && i.render(); Ve.EndPerformanceCounter("Lens flares", t.length > 0); } } } CP._SceneComponentInitialization = (c) => { let e = c._getComponent(Bt.NAME_LENSFLARESYSTEM); e || (e = new pre(c), c._addComponent(e)); }; const U2e = "bayerDitherFunctions", V2e = `float bayerDither2(vec2 _P) {return mod(2.0*_P.y+_P.x+1.0,4.0);} float bayerDither4(vec2 _P) {vec2 P1=mod(_P,2.0); vec2 P2=floor(0.5*mod(_P,4.0)); return 4.0*bayerDither2(P1)+bayerDither2(P2);} float bayerDither8(vec2 _P) {vec2 P1=mod(_P,2.0); vec2 P2=floor(0.5 *mod(_P,4.0)); vec2 P4=floor(0.25*mod(_P,8.0)); return 4.0*(4.0*bayerDither2(P1)+bayerDither2(P2))+bayerDither2(P4);} `; je.IncludesShadersStore[U2e] = V2e; const k2e = "shadowMapFragmentExtraDeclaration", z2e = `#if SM_FLOAT==0 #include #endif #if SM_SOFTTRANSPARENTSHADOW==1 #include uniform float softTransparentShadowSM; #endif varying float vDepthMetricSM; #if SM_USEDISTANCE==1 uniform vec3 lightDataSM;varying vec3 vPositionWSM; #endif uniform vec3 biasAndScaleSM;uniform vec2 depthValuesSM; #if defined(SM_DEPTHCLAMP) && SM_DEPTHCLAMP==1 varying float zSM; #endif `; je.IncludesShadersStore[k2e] = z2e; const H2e = "shadowMapFragment", G2e = `float depthSM=vDepthMetricSM; #if defined(SM_DEPTHCLAMP) && SM_DEPTHCLAMP==1 #if SM_USEDISTANCE==1 depthSM=(length(vPositionWSM-lightDataSM)+depthValuesSM.x)/depthValuesSM.y+biasAndScaleSM.x; #else #ifdef USE_REVERSE_DEPTHBUFFER depthSM=(-zSM+depthValuesSM.x)/depthValuesSM.y+biasAndScaleSM.x; #else depthSM=(zSM+depthValuesSM.x)/depthValuesSM.y+biasAndScaleSM.x; #endif #endif #ifdef USE_REVERSE_DEPTHBUFFER gl_FragDepth=clamp(1.0-depthSM,0.0,1.0); #else gl_FragDepth=clamp(depthSM,0.0,1.0); #endif #elif SM_USEDISTANCE==1 depthSM=(length(vPositionWSM-lightDataSM)+depthValuesSM.x)/depthValuesSM.y+biasAndScaleSM.x; #endif #if SM_ESM==1 depthSM=clamp(exp(-min(87.,biasAndScaleSM.z*depthSM)),0.,1.); #endif #if SM_FLOAT==1 gl_FragColor=vec4(depthSM,1.0,1.0,1.0); #else gl_FragColor=pack(depthSM); #endif return;`; je.IncludesShadersStore[H2e] = G2e; const K2e = "shadowMapPixelShader", W2e = `#include #ifdef ALPHATEXTURE varying vec2 vUV;uniform sampler2D diffuseSampler; #endif #include #define CUSTOM_FRAGMENT_DEFINITIONS void main(void) { #include #ifdef ALPHATEXTURE float alphaFromAlphaTexture=texture2D(diffuseSampler,vUV).a; #ifdef ALPHATESTVALUE if (alphaFromAlphaTexture=softTransparentShadowSM*alphaFromAlphaTexture) discard; #else if ((bayerDither8(floor(mod(gl_FragCoord.xy,8.0))))/64.0>=softTransparentShadowSM) discard; #endif #endif #include }`; je.ShadersStore[K2e] = W2e; const j2e = "sceneVertexDeclaration", X2e = `uniform mat4 viewProjection; #ifdef MULTIVIEW uniform mat4 viewProjectionR; #endif uniform mat4 view;uniform mat4 projection;uniform vec4 vEyePosition; `; je.IncludesShadersStore[j2e] = X2e; const Y2e = "meshVertexDeclaration", Q2e = `uniform mat4 world;uniform float visibility; `; je.IncludesShadersStore[Y2e] = Q2e; const $2e = "shadowMapVertexDeclaration", Z2e = `#include #include `; je.IncludesShadersStore[$2e] = Z2e; const q2e = "shadowMapUboDeclaration", J2e = `layout(std140,column_major) uniform; #include #include `; je.IncludesShadersStore[q2e] = J2e; const eme = "shadowMapVertexExtraDeclaration", tme = `#if SM_NORMALBIAS==1 uniform vec3 lightDataSM; #endif uniform vec3 biasAndScaleSM;uniform vec2 depthValuesSM;varying float vDepthMetricSM; #if SM_USEDISTANCE==1 varying vec3 vPositionWSM; #endif #if defined(SM_DEPTHCLAMP) && SM_DEPTHCLAMP==1 varying float zSM; #endif `; je.IncludesShadersStore[eme] = tme; const ime = "shadowMapVertexNormalBias", rme = `#if SM_NORMALBIAS==1 #if SM_DIRECTIONINLIGHTDATA==1 vec3 worldLightDirSM=normalize(-lightDataSM.xyz); #else vec3 directionToLightSM=lightDataSM.xyz-worldPos.xyz;vec3 worldLightDirSM=normalize(directionToLightSM); #endif float ndlSM=dot(vNormalW,worldLightDirSM);float sinNLSM=sqrt(1.0-ndlSM*ndlSM);float normalBiasSM=biasAndScaleSM.y*sinNLSM;worldPos.xyz-=vNormalW*normalBiasSM; #endif `; je.IncludesShadersStore[ime] = rme; const sme = "shadowMapVertexMetric", nme = `#if SM_USEDISTANCE==1 vPositionWSM=worldPos.xyz; #endif #if SM_DEPTHTEXTURE==1 #ifdef IS_NDC_HALF_ZRANGE #define BIASFACTOR 0.5 #else #define BIASFACTOR 1.0 #endif #ifdef USE_REVERSE_DEPTHBUFFER gl_Position.z-=biasAndScaleSM.x*gl_Position.w*BIASFACTOR; #else gl_Position.z+=biasAndScaleSM.x*gl_Position.w*BIASFACTOR; #endif #endif #if defined(SM_DEPTHCLAMP) && SM_DEPTHCLAMP==1 zSM=gl_Position.z;gl_Position.z=0.0; #elif SM_USEDISTANCE==0 #ifdef USE_REVERSE_DEPTHBUFFER vDepthMetricSM=(-gl_Position.z+depthValuesSM.x)/depthValuesSM.y+biasAndScaleSM.x; #else vDepthMetricSM=(gl_Position.z+depthValuesSM.x)/depthValuesSM.y+biasAndScaleSM.x; #endif #endif `; je.IncludesShadersStore[sme] = nme; const ame = "shadowMapVertexShader", ome = `attribute vec3 position; #ifdef NORMAL attribute vec3 normal; #endif #include #include #include #include[0..maxSimultaneousMorphTargets] #ifdef INSTANCES attribute vec4 world0;attribute vec4 world1;attribute vec4 world2;attribute vec4 world3; #endif #include #include<__decl__shadowMapVertex> #ifdef ALPHATEXTURE varying vec2 vUV;uniform mat4 diffuseMatrix; #ifdef UV1 attribute vec2 uv; #endif #ifdef UV2 attribute vec2 uv2; #endif #endif #include #include #define CUSTOM_VERTEX_DEFINITIONS void main(void) {vec3 positionUpdated=position; #ifdef UV1 vec2 uvUpdated=uv; #endif #ifdef NORMAL vec3 normalUpdated=normal; #endif #include #include[0..maxSimultaneousMorphTargets] #include #include #include vec4 worldPos=finalWorld*vec4(positionUpdated,1.0); #ifdef NORMAL mat3 normWorldSM=mat3(finalWorld); #if defined(INSTANCES) && defined(THIN_INSTANCES) vec3 vNormalW=normalUpdated/vec3(dot(normWorldSM[0],normWorldSM[0]),dot(normWorldSM[1],normWorldSM[1]),dot(normWorldSM[2],normWorldSM[2]));vNormalW=normalize(normWorldSM*vNormalW); #else #ifdef NONUNIFORMSCALING normWorldSM=transposeMat3(inverseMat3(normWorldSM)); #endif vec3 vNormalW=normalize(normWorldSM*normalUpdated); #endif #endif #include gl_Position=viewProjection*worldPos; #include #ifdef ALPHATEXTURE #ifdef UV1 vUV=vec2(diffuseMatrix*vec4(uvUpdated,1.0,0.0)); #endif #ifdef UV2 vUV=vec2(diffuseMatrix*vec4(uv2,1.0,0.0)); #endif #endif #include }`; je.ShadersStore[ame] = ome; const lme = "depthBoxBlurPixelShader", cme = `varying vec2 vUV;uniform sampler2D textureSampler;uniform vec2 screenSize; #define CUSTOM_FRAGMENT_DEFINITIONS void main(void) {vec4 colorDepth=vec4(0.0);for (int x=-OFFSET; x<=OFFSET; x++) for (int y=-OFFSET; y<=OFFSET; y++) colorDepth+=texture2D(textureSampler,vUV+vec2(x,y)/screenSize);gl_FragColor=(colorDepth/float((OFFSET*2+1)*(OFFSET*2+1)));}`; je.ShadersStore[lme] = cme; const ume = "shadowMapFragmentSoftTransparentShadow", hme = `#if SM_SOFTTRANSPARENTSHADOW==1 if ((bayerDither8(floor(mod(gl_FragCoord.xy,8.0))))/64.0>=softTransparentShadowSM*alpha) discard; #endif `; je.IncludesShadersStore[ume] = hme; class hr { /** * Gets the bias: offset applied on the depth preventing acnea (in light direction). */ get bias() { return this._bias; } /** * Sets the bias: offset applied on the depth preventing acnea (in light direction). */ set bias(e) { this._bias = e; } /** * Gets the normalBias: offset applied on the depth preventing acnea (along side the normal direction and proportional to the light/normal angle). */ get normalBias() { return this._normalBias; } /** * Sets the normalBias: offset applied on the depth preventing acnea (along side the normal direction and proportional to the light/normal angle). */ set normalBias(e) { this._normalBias = e; } /** * Gets the blur box offset: offset applied during the blur pass. * Only useful if useKernelBlur = false */ get blurBoxOffset() { return this._blurBoxOffset; } /** * Sets the blur box offset: offset applied during the blur pass. * Only useful if useKernelBlur = false */ set blurBoxOffset(e) { this._blurBoxOffset !== e && (this._blurBoxOffset = e, this._disposeBlurPostProcesses()); } /** * Gets the blur scale: scale of the blurred texture compared to the main shadow map. * 2 means half of the size. */ get blurScale() { return this._blurScale; } /** * Sets the blur scale: scale of the blurred texture compared to the main shadow map. * 2 means half of the size. */ set blurScale(e) { this._blurScale !== e && (this._blurScale = e, this._disposeBlurPostProcesses()); } /** * Gets the blur kernel: kernel size of the blur pass. * Only useful if useKernelBlur = true */ get blurKernel() { return this._blurKernel; } /** * Sets the blur kernel: kernel size of the blur pass. * Only useful if useKernelBlur = true */ set blurKernel(e) { this._blurKernel !== e && (this._blurKernel = e, this._disposeBlurPostProcesses()); } /** * Gets whether the blur pass is a kernel blur (if true) or box blur. * Only useful in filtered mode (useBlurExponentialShadowMap...) */ get useKernelBlur() { return this._useKernelBlur; } /** * Sets whether the blur pass is a kernel blur (if true) or box blur. * Only useful in filtered mode (useBlurExponentialShadowMap...) */ set useKernelBlur(e) { this._useKernelBlur !== e && (this._useKernelBlur = e, this._disposeBlurPostProcesses()); } /** * Gets the depth scale used in ESM mode. */ get depthScale() { return this._depthScale !== void 0 ? this._depthScale : this._light.getDepthScale(); } /** * Sets the depth scale used in ESM mode. * This can override the scale stored on the light. */ set depthScale(e) { this._depthScale = e; } _validateFilter(e) { return e; } /** * Gets the current mode of the shadow generator (normal, PCF, ESM...). * The returned value is a number equal to one of the available mode defined in ShadowMap.FILTER_x like _FILTER_NONE */ get filter() { return this._filter; } /** * Sets the current mode of the shadow generator (normal, PCF, ESM...). * The returned value is a number equal to one of the available mode defined in ShadowMap.FILTER_x like _FILTER_NONE */ set filter(e) { if (e = this._validateFilter(e), this._light.needCube()) { if (e === hr.FILTER_BLUREXPONENTIALSHADOWMAP) { this.useExponentialShadowMap = !0; return; } else if (e === hr.FILTER_BLURCLOSEEXPONENTIALSHADOWMAP) { this.useCloseExponentialShadowMap = !0; return; } else if (e === hr.FILTER_PCF || e === hr.FILTER_PCSS) { this.usePoissonSampling = !0; return; } } if ((e === hr.FILTER_PCF || e === hr.FILTER_PCSS) && !this._scene.getEngine()._features.supportShadowSamplers) { this.usePoissonSampling = !0; return; } this._filter !== e && (this._filter = e, this._disposeBlurPostProcesses(), this._applyFilterValues(), this._light._markMeshesAsLightDirty()); } /** * Gets if the current filter is set to Poisson Sampling. */ get usePoissonSampling() { return this.filter === hr.FILTER_POISSONSAMPLING; } /** * Sets the current filter to Poisson Sampling. */ set usePoissonSampling(e) { const t = this._validateFilter(hr.FILTER_POISSONSAMPLING); !e && this.filter !== hr.FILTER_POISSONSAMPLING || (this.filter = e ? t : hr.FILTER_NONE); } /** * Gets if the current filter is set to ESM. */ get useExponentialShadowMap() { return this.filter === hr.FILTER_EXPONENTIALSHADOWMAP; } /** * Sets the current filter is to ESM. */ set useExponentialShadowMap(e) { const t = this._validateFilter(hr.FILTER_EXPONENTIALSHADOWMAP); !e && this.filter !== hr.FILTER_EXPONENTIALSHADOWMAP || (this.filter = e ? t : hr.FILTER_NONE); } /** * Gets if the current filter is set to filtered ESM. */ get useBlurExponentialShadowMap() { return this.filter === hr.FILTER_BLUREXPONENTIALSHADOWMAP; } /** * Gets if the current filter is set to filtered ESM. */ set useBlurExponentialShadowMap(e) { const t = this._validateFilter(hr.FILTER_BLUREXPONENTIALSHADOWMAP); !e && this.filter !== hr.FILTER_BLUREXPONENTIALSHADOWMAP || (this.filter = e ? t : hr.FILTER_NONE); } /** * Gets if the current filter is set to "close ESM" (using the inverse of the * exponential to prevent steep falloff artifacts). */ get useCloseExponentialShadowMap() { return this.filter === hr.FILTER_CLOSEEXPONENTIALSHADOWMAP; } /** * Sets the current filter to "close ESM" (using the inverse of the * exponential to prevent steep falloff artifacts). */ set useCloseExponentialShadowMap(e) { const t = this._validateFilter(hr.FILTER_CLOSEEXPONENTIALSHADOWMAP); !e && this.filter !== hr.FILTER_CLOSEEXPONENTIALSHADOWMAP || (this.filter = e ? t : hr.FILTER_NONE); } /** * Gets if the current filter is set to filtered "close ESM" (using the inverse of the * exponential to prevent steep falloff artifacts). */ get useBlurCloseExponentialShadowMap() { return this.filter === hr.FILTER_BLURCLOSEEXPONENTIALSHADOWMAP; } /** * Sets the current filter to filtered "close ESM" (using the inverse of the * exponential to prevent steep falloff artifacts). */ set useBlurCloseExponentialShadowMap(e) { const t = this._validateFilter(hr.FILTER_BLURCLOSEEXPONENTIALSHADOWMAP); !e && this.filter !== hr.FILTER_BLURCLOSEEXPONENTIALSHADOWMAP || (this.filter = e ? t : hr.FILTER_NONE); } /** * Gets if the current filter is set to "PCF" (percentage closer filtering). */ get usePercentageCloserFiltering() { return this.filter === hr.FILTER_PCF; } /** * Sets the current filter to "PCF" (percentage closer filtering). */ set usePercentageCloserFiltering(e) { const t = this._validateFilter(hr.FILTER_PCF); !e && this.filter !== hr.FILTER_PCF || (this.filter = e ? t : hr.FILTER_NONE); } /** * Gets the PCF or PCSS Quality. * Only valid if usePercentageCloserFiltering or usePercentageCloserFiltering is true. */ get filteringQuality() { return this._filteringQuality; } /** * Sets the PCF or PCSS Quality. * Only valid if usePercentageCloserFiltering or usePercentageCloserFiltering is true. */ set filteringQuality(e) { this._filteringQuality !== e && (this._filteringQuality = e, this._disposeBlurPostProcesses(), this._applyFilterValues(), this._light._markMeshesAsLightDirty()); } /** * Gets if the current filter is set to "PCSS" (contact hardening). */ get useContactHardeningShadow() { return this.filter === hr.FILTER_PCSS; } /** * Sets the current filter to "PCSS" (contact hardening). */ set useContactHardeningShadow(e) { const t = this._validateFilter(hr.FILTER_PCSS); !e && this.filter !== hr.FILTER_PCSS || (this.filter = e ? t : hr.FILTER_NONE); } /** * Gets the Light Size (in shadow map uv unit) used in PCSS to determine the blocker search area and the penumbra size. * Using a ratio helps keeping shape stability independently of the map size. * * It does not account for the light projection as it was having too much * instability during the light setup or during light position changes. * * Only valid if useContactHardeningShadow is true. */ get contactHardeningLightSizeUVRatio() { return this._contactHardeningLightSizeUVRatio; } /** * Sets the Light Size (in shadow map uv unit) used in PCSS to determine the blocker search area and the penumbra size. * Using a ratio helps keeping shape stability independently of the map size. * * It does not account for the light projection as it was having too much * instability during the light setup or during light position changes. * * Only valid if useContactHardeningShadow is true. */ set contactHardeningLightSizeUVRatio(e) { this._contactHardeningLightSizeUVRatio = e; } /** Gets or sets the actual darkness of a shadow */ get darkness() { return this._darkness; } set darkness(e) { this.setDarkness(e); } /** * Returns the darkness value (float). This can only decrease the actual darkness of a shadow. * 0 means strongest and 1 would means no shadow. * @returns the darkness. */ getDarkness() { return this._darkness; } /** * Sets the darkness value (float). This can only decrease the actual darkness of a shadow. * @param darkness The darkness value 0 means strongest and 1 would means no shadow. * @returns the shadow generator allowing fluent coding. */ setDarkness(e) { return e >= 1 ? this._darkness = 1 : e <= 0 ? this._darkness = 0 : this._darkness = e, this; } /** Gets or sets the ability to have transparent shadow */ get transparencyShadow() { return this._transparencyShadow; } set transparencyShadow(e) { this.setTransparencyShadow(e); } /** * Sets the ability to have transparent shadow (boolean). * @param transparent True if transparent else False * @returns the shadow generator allowing fluent coding */ setTransparencyShadow(e) { return this._transparencyShadow = e, this; } /** * Gets the main RTT containing the shadow map (usually storing depth from the light point of view). * @returns The render target texture if present otherwise, null */ getShadowMap() { return this._shadowMap; } /** * Gets the RTT used during rendering (can be a blurred version of the shadow map or the shadow map itself). * @returns The render target texture if the shadow map is present otherwise, null */ getShadowMapForRendering() { return this._shadowMap2 ? this._shadowMap2 : this._shadowMap; } /** * Gets the class name of that object * @returns "ShadowGenerator" */ getClassName() { return hr.CLASSNAME; } /** * Helper function to add a mesh and its descendants to the list of shadow casters. * @param mesh Mesh to add * @param includeDescendants boolean indicating if the descendants should be added. Default to true * @returns the Shadow Generator itself */ addShadowCaster(e, t = !0) { if (!this._shadowMap) return this; if (this._shadowMap.renderList || (this._shadowMap.renderList = []), this._shadowMap.renderList.indexOf(e) === -1 && this._shadowMap.renderList.push(e), t) for (const i of e.getChildMeshes()) this._shadowMap.renderList.indexOf(i) === -1 && this._shadowMap.renderList.push(i); return this; } /** * Helper function to remove a mesh and its descendants from the list of shadow casters * @param mesh Mesh to remove * @param includeDescendants boolean indicating if the descendants should be removed. Default to true * @returns the Shadow Generator itself */ removeShadowCaster(e, t = !0) { if (!this._shadowMap || !this._shadowMap.renderList) return this; const i = this._shadowMap.renderList.indexOf(e); if (i !== -1 && this._shadowMap.renderList.splice(i, 1), t) for (const r of e.getChildren()) this.removeShadowCaster(r); return this; } /** * Returns the associated light object. * @returns the light generating the shadow */ getLight() { return this._light; } _getCamera() { var e; return (e = this._camera) !== null && e !== void 0 ? e : this._scene.activeCamera; } /** * Gets or sets the size of the texture what stores the shadows */ get mapSize() { return this._mapSize; } set mapSize(e) { this._mapSize = e, this._light._markMeshesAsLightDirty(), this.recreateShadowMap(); } /** * Creates a ShadowGenerator object. * A ShadowGenerator is the required tool to use the shadows. * Each light casting shadows needs to use its own ShadowGenerator. * Documentation : https://doc.babylonjs.com/features/featuresDeepDive/lights/shadows * @param mapSize The size of the texture what stores the shadows. Example : 1024. * @param light The light object generating the shadows. * @param usefullFloatFirst By default the generator will try to use half float textures but if you need precision (for self shadowing for instance), you can use this option to enforce full float texture. * @param camera Camera associated with this shadow generator (default: null). If null, takes the scene active camera at the time we need to access it * @param useRedTextureType Forces the generator to use a Red instead of a RGBA type for the shadow map texture format (default: false) */ constructor(e, t, i, r, s) { this.onBeforeShadowMapRenderObservable = new Fe(), this.onAfterShadowMapRenderObservable = new Fe(), this.onBeforeShadowMapRenderMeshObservable = new Fe(), this.onAfterShadowMapRenderMeshObservable = new Fe(), this._bias = 5e-5, this._normalBias = 0, this._blurBoxOffset = 1, this._blurScale = 2, this._blurKernel = 1, this._useKernelBlur = !1, this._filter = hr.FILTER_NONE, this._filteringQuality = hr.QUALITY_HIGH, this._contactHardeningLightSizeUVRatio = 0.1, this._darkness = 0, this._transparencyShadow = !1, this.enableSoftTransparentShadow = !1, this.useOpacityTextureForTransparentShadow = !1, this.frustumEdgeFalloff = 0, this.forceBackFacesOnly = !1, this._lightDirection = D.Zero(), this._viewMatrix = Ae.Zero(), this._projectionMatrix = Ae.Zero(), this._transformMatrix = Ae.Zero(), this._cachedPosition = new D(Number.MAX_VALUE, Number.MAX_VALUE, Number.MAX_VALUE), this._cachedDirection = new D(Number.MAX_VALUE, Number.MAX_VALUE, Number.MAX_VALUE), this._currentFaceIndex = 0, this._currentFaceIndexCache = 0, this._defaultTextureMatrix = Ae.Identity(), this._mapSize = e, this._light = t, this._scene = t.getScene(), this._camera = r ?? null, this._useRedTextureType = !!s; let n = t._shadowGenerators; n || (n = t._shadowGenerators = /* @__PURE__ */ new Map()), n.set(this._camera, this), this.id = t.id, this._useUBO = this._scene.getEngine().supportsUniformBuffers, this._useUBO && (this._sceneUBOs = [], this._sceneUBOs.push(this._scene.createSceneUniformBuffer(`Scene for Shadow Generator (light "${this._light.name}")`))), hr._SceneComponentInitialization(this._scene); const a = this._scene.getEngine().getCaps(); i ? a.textureFloatRender && a.textureFloatLinearFiltering ? this._textureType = 1 : a.textureHalfFloatRender && a.textureHalfFloatLinearFiltering ? this._textureType = 2 : this._textureType = 0 : a.textureHalfFloatRender && a.textureHalfFloatLinearFiltering ? this._textureType = 2 : a.textureFloatRender && a.textureFloatLinearFiltering ? this._textureType = 1 : this._textureType = 0, this._initializeGenerator(), this._applyFilterValues(); } _initializeGenerator() { this._light._markMeshesAsLightDirty(), this._initializeShadowMap(); } _createTargetRenderTexture() { const e = this._scene.getEngine(); e._features.supportDepthStencilTexture ? (this._shadowMap = new ra(this._light.name + "_shadowMap", this._mapSize, this._scene, !1, !0, this._textureType, this._light.needCube(), void 0, !1, !1, void 0, this._useRedTextureType ? 6 : 5), this._shadowMap.createDepthStencilTexture(e.useReverseDepthBuffer ? 516 : 513, !0)) : this._shadowMap = new ra(this._light.name + "_shadowMap", this._mapSize, this._scene, !1, !0, this._textureType, this._light.needCube()), this._shadowMap.noPrePassRenderer = !0; } _initializeShadowMap() { if (this._createTargetRenderTexture(), this._shadowMap === null) return; this._shadowMap.wrapU = De.CLAMP_ADDRESSMODE, this._shadowMap.wrapV = De.CLAMP_ADDRESSMODE, this._shadowMap.anisotropicFilteringLevel = 1, this._shadowMap.updateSamplingMode(De.BILINEAR_SAMPLINGMODE), this._shadowMap.renderParticles = !1, this._shadowMap.ignoreCameraViewport = !0, this._storedUniqueId && (this._shadowMap.uniqueId = this._storedUniqueId), this._shadowMap.customRenderFunction = (r, s, n, a) => this._renderForShadowMap(r, s, n, a), this._shadowMap.customIsReadyFunction = () => !0; const e = this._scene.getEngine(); this._shadowMap.onBeforeBindObservable.add(() => { var r; this._currentSceneUBO = this._scene.getSceneUniformBuffer(), (r = e._debugPushGroup) === null || r === void 0 || r.call(e, `shadow map generation for pass id ${e.currentRenderPassId}`, 1); }), this._shadowMap.onBeforeRenderObservable.add((r) => { this._sceneUBOs && this._scene.setSceneUniformBuffer(this._sceneUBOs[0]), this._currentFaceIndex = r, this._filter === hr.FILTER_PCF && e.setColorWrite(!1), this.getTransformMatrix(), this._scene.setTransformMatrix(this._viewMatrix, this._projectionMatrix), this._useUBO && (this._scene.getSceneUniformBuffer().unbindEffect(), this._scene.finalizeSceneUbo()); }), this._shadowMap.onAfterUnbindObservable.add(() => { var r, s; if (this._sceneUBOs && this._scene.setSceneUniformBuffer(this._currentSceneUBO), this._scene.updateTransformMatrix(), this._filter === hr.FILTER_PCF && e.setColorWrite(!0), !this.useBlurExponentialShadowMap && !this.useBlurCloseExponentialShadowMap) { (r = e._debugPopGroup) === null || r === void 0 || r.call(e, 1); return; } const n = this.getShadowMapForRendering(); n && (this._scene.postProcessManager.directRender(this._blurPostProcesses, n.renderTarget, !0), e.unBindFramebuffer(n.renderTarget, !0), (s = e._debugPopGroup) === null || s === void 0 || s.call(e, 1)); }); const t = new Et(0, 0, 0, 0), i = new Et(1, 1, 1, 1); this._shadowMap.onClearObservable.add((r) => { this._filter === hr.FILTER_PCF ? r.clear(i, !1, !0, !1) : this.useExponentialShadowMap || this.useBlurExponentialShadowMap ? r.clear(t, !0, !0, !1) : r.clear(i, !0, !0, !1); }), this._shadowMap.onResizeObservable.add((r) => { this._storedUniqueId = this._shadowMap.uniqueId, this._mapSize = r.getRenderSize(), this._light._markMeshesAsLightDirty(), this.recreateShadowMap(); }); for (let r = Zh.MIN_RENDERINGGROUPS; r < Zh.MAX_RENDERINGGROUPS; r++) this._shadowMap.setRenderingAutoClearDepthStencil(r, !1); } _initializeBlurRTTAndPostProcesses() { const e = this._scene.getEngine(), t = this._mapSize / this.blurScale; (!this.useKernelBlur || this.blurScale !== 1) && (this._shadowMap2 = new ra(this._light.name + "_shadowMap2", t, this._scene, !1, !0, this._textureType, void 0, void 0, !1), this._shadowMap2.wrapU = De.CLAMP_ADDRESSMODE, this._shadowMap2.wrapV = De.CLAMP_ADDRESSMODE, this._shadowMap2.updateSamplingMode(De.BILINEAR_SAMPLINGMODE)), this.useKernelBlur ? (this._kernelBlurXPostprocess = new fu(this._light.name + "KernelBlurX", new at(1, 0), this.blurKernel, 1, null, De.BILINEAR_SAMPLINGMODE, e, !1, this._textureType), this._kernelBlurXPostprocess.width = t, this._kernelBlurXPostprocess.height = t, this._kernelBlurXPostprocess.externalTextureSamplerBinding = !0, this._kernelBlurXPostprocess.onApplyObservable.add((i) => { i.setTexture("textureSampler", this._shadowMap); }), this._kernelBlurYPostprocess = new fu(this._light.name + "KernelBlurY", new at(0, 1), this.blurKernel, 1, null, De.BILINEAR_SAMPLINGMODE, e, !1, this._textureType), this._kernelBlurXPostprocess.autoClear = !1, this._kernelBlurYPostprocess.autoClear = !1, this._textureType === 0 && (this._kernelBlurXPostprocess.packedFloat = !0, this._kernelBlurYPostprocess.packedFloat = !0), this._blurPostProcesses = [this._kernelBlurXPostprocess, this._kernelBlurYPostprocess]) : (this._boxBlurPostprocess = new Bi(this._light.name + "DepthBoxBlur", "depthBoxBlur", ["screenSize", "boxOffset"], [], 1, null, De.BILINEAR_SAMPLINGMODE, e, !1, "#define OFFSET " + this._blurBoxOffset, this._textureType), this._boxBlurPostprocess.externalTextureSamplerBinding = !0, this._boxBlurPostprocess.onApplyObservable.add((i) => { i.setFloat2("screenSize", t, t), i.setTexture("textureSampler", this._shadowMap); }), this._boxBlurPostprocess.autoClear = !1, this._blurPostProcesses = [this._boxBlurPostprocess]); } _renderForShadowMap(e, t, i, r) { let s; if (r.length) for (s = 0; s < r.length; s++) this._renderSubMeshForShadowMap(r.data[s]); for (s = 0; s < e.length; s++) this._renderSubMeshForShadowMap(e.data[s]); for (s = 0; s < t.length; s++) this._renderSubMeshForShadowMap(t.data[s]); if (this._transparencyShadow) for (s = 0; s < i.length; s++) this._renderSubMeshForShadowMap(i.data[s], !0); else for (s = 0; s < i.length; s++) i.data[s].getEffectiveMesh()._internalAbstractMeshDataInfo._isActiveIntermediate = !1; } // eslint-disable-next-line @typescript-eslint/no-unused-vars _bindCustomEffectForRenderSubMeshForShadowMap(e, t, i) { t.setMatrix("viewProjection", this.getTransformMatrix()); } _renderSubMeshForShadowMap(e, t = !1) { var i, r; const s = e.getRenderingMesh(), n = e.getEffectiveMesh(), a = this._scene, l = a.getEngine(), o = e.getMaterial(); if (n._internalAbstractMeshDataInfo._isActiveIntermediate = !1, !o || e.verticesCount === 0 || e._renderId === a.getRenderId()) return; const u = n._getWorldMatrixDeterminant() < 0; let h = (i = s.overrideMaterialSideOrientation) !== null && i !== void 0 ? i : o.sideOrientation; u && (h = h === 0 ? 1 : 0); const d = h === 0; l.setState(o.backFaceCulling, void 0, void 0, d, o.cullBackFaces); const f = s._getInstancesRenderList(e._id, !!e.getReplacementMesh()); if (f.mustReturn) return; const p = l.getCaps().instancedArrays && (f.visibleInstances[e._id] !== null && f.visibleInstances[e._id] !== void 0 || s.hasThinInstances); if (!(this.customAllowRendering && !this.customAllowRendering(e))) if (this.isReady(e, p, t)) { e._renderId = a.getRenderId(); const m = o.shadowDepthWrapper, _ = (r = m == null ? void 0 : m.getEffect(e, this, l.currentRenderPassId)) !== null && r !== void 0 ? r : e._getDrawWrapper(), v = $o.GetEffect(_); l.enableEffect(_), p || s._bind(e, v, o.fillMode), this.getTransformMatrix(), v.setFloat3("biasAndScaleSM", this.bias, this.normalBias, this.depthScale), this.getLight().getTypeID() === hs.LIGHTTYPEID_DIRECTIONALLIGHT ? v.setVector3("lightDataSM", this._cachedDirection) : v.setVector3("lightDataSM", this._cachedPosition); const C = this._getCamera(); if (C && v.setFloat2("depthValuesSM", this.getLight().getDepthMinZ(C), this.getLight().getDepthMinZ(C) + this.getLight().getDepthMaxZ(C)), t && this.enableSoftTransparentShadow && v.setFloat("softTransparentShadowSM", n.visibility * o.alpha), m) e._setMainDrawWrapperOverride(_), m.standalone ? m.baseMaterial.bindForSubMesh(n.getWorldMatrix(), s, e) : o.bindForSubMesh(n.getWorldMatrix(), s, e), e._setMainDrawWrapperOverride(null); else { if (this._opacityTexture && (v.setTexture("diffuseSampler", this._opacityTexture), v.setMatrix("diffuseMatrix", this._opacityTexture.getTextureMatrix() || this._defaultTextureMatrix)), s.useBones && s.computeBonesUsingShaders && s.skeleton) { const b = s.skeleton; if (b.isUsingTextureForMatrices) { const S = b.getTransformMatrixTexture(s); if (!S) return; v.setTexture("boneSampler", S), v.setFloat("boneTextureWidth", 4 * (b.bones.length + 1)); } else v.setMatrices("mBones", b.getTransformMatrices(s)); } Ke.BindMorphTargetParameters(s, v), s.morphTargetManager && s.morphTargetManager.isUsingTextureForTargets && s.morphTargetManager._bind(v), Ec(v, o, a); } !this._useUBO && !m && this._bindCustomEffectForRenderSubMeshForShadowMap(e, v, n), Ke.BindSceneUniformBuffer(v, this._scene.getSceneUniformBuffer()), this._scene.getSceneUniformBuffer().bindUniformBuffer(); const x = n.getWorldMatrix(); p && (n.getMeshUniformBuffer().bindToEffect(v, "Mesh"), n.transferToEffect(x)), this.forceBackFacesOnly && l.setState(!0, 0, !1, !0, o.cullBackFaces), this.onBeforeShadowMapRenderMeshObservable.notifyObservers(s), this.onBeforeShadowMapRenderObservable.notifyObservers(v), s._processRendering(n, e, v, o.fillMode, f, p, (b, S) => { n !== s && !b ? (s.getMeshUniformBuffer().bindToEffect(v, "Mesh"), s.transferToEffect(S)) : (n.getMeshUniformBuffer().bindToEffect(v, "Mesh"), n.transferToEffect(b ? S : x)); }), this.forceBackFacesOnly && l.setState(!0, 0, !1, !1, o.cullBackFaces), this.onAfterShadowMapRenderObservable.notifyObservers(v), this.onAfterShadowMapRenderMeshObservable.notifyObservers(s); } else this._shadowMap && this._shadowMap.resetRefreshCounter(); } _applyFilterValues() { this._shadowMap && (this.filter === hr.FILTER_NONE || this.filter === hr.FILTER_PCSS ? this._shadowMap.updateSamplingMode(De.NEAREST_SAMPLINGMODE) : this._shadowMap.updateSamplingMode(De.BILINEAR_SAMPLINGMODE)); } /** * Forces all the attached effect to compile to enable rendering only once ready vs. lazily compiling effects. * @param onCompiled Callback triggered at the and of the effects compilation * @param options Sets of optional options forcing the compilation with different modes */ forceCompilation(e, t) { const i = Object.assign({ useInstances: !1 }, t), r = this.getShadowMap(); if (!r) { e && e(this); return; } const s = r.renderList; if (!s) { e && e(this); return; } const n = []; for (const o of s) n.push(...o.subMeshes); if (n.length === 0) { e && e(this); return; } let a = 0; const l = () => { var o, u; if (!(!this._scene || !this._scene.getEngine())) { for (; this.isReady(n[a], i.useInstances, (u = (o = n[a].getMaterial()) === null || o === void 0 ? void 0 : o.needAlphaBlendingForMesh(n[a].getMesh())) !== null && u !== void 0 ? u : !1); ) if (a++, a >= n.length) { e && e(this); return; } setTimeout(l, 16); } }; l(); } /** * Forces all the attached effect to compile to enable rendering only once ready vs. lazily compiling effects. * @param options Sets of optional options forcing the compilation with different modes * @returns A promise that resolves when the compilation completes */ forceCompilationAsync(e) { return new Promise((t) => { this.forceCompilation(() => { t(); }, e); }); } // eslint-disable-next-line @typescript-eslint/no-unused-vars _isReadyCustomDefines(e, t, i) { } _prepareShadowDefines(e, t, i, r) { i.push("#define SM_LIGHTTYPE_" + this._light.getClassName().toUpperCase()), i.push("#define SM_FLOAT " + (this._textureType !== 0 ? "1" : "0")), i.push("#define SM_ESM " + (this.useExponentialShadowMap || this.useBlurExponentialShadowMap ? "1" : "0")), i.push("#define SM_DEPTHTEXTURE " + (this.usePercentageCloserFiltering || this.useContactHardeningShadow ? "1" : "0")); const s = e.getMesh(); return i.push("#define SM_NORMALBIAS " + (this.normalBias && s.isVerticesDataPresent(Y.NormalKind) ? "1" : "0")), i.push("#define SM_DIRECTIONINLIGHTDATA " + (this.getLight().getTypeID() === hs.LIGHTTYPEID_DIRECTIONALLIGHT ? "1" : "0")), i.push("#define SM_USEDISTANCE " + (this._light.needCube() ? "1" : "0")), i.push("#define SM_SOFTTRANSPARENTSHADOW " + (this.enableSoftTransparentShadow && r ? "1" : "0")), this._isReadyCustomDefines(i, e, t), i; } /** * Determine whether the shadow generator is ready or not (mainly all effects and related post processes needs to be ready). * @param subMesh The submesh we want to render in the shadow map * @param useInstances Defines whether will draw in the map using instances * @param isTransparent Indicates that isReady is called for a transparent subMesh * @returns true if ready otherwise, false */ isReady(e, t, i) { var r; const s = e.getMaterial(), n = s == null ? void 0 : s.shadowDepthWrapper; if (this._opacityTexture = null, !s) return !1; const a = []; if (this._prepareShadowDefines(e, t, a, i), n) { if (!n.isReadyForSubMesh(e, a, this, t, this._scene.getEngine().currentRenderPassId)) return !1; } else { const l = e._getDrawWrapper(void 0, !0); let o = l.effect, u = l.defines; const h = [Y.PositionKind], d = e.getMesh(); this.normalBias && d.isVerticesDataPresent(Y.NormalKind) && (h.push(Y.NormalKind), a.push("#define NORMAL"), d.nonUniformScaling && a.push("#define NONUNIFORMSCALING")); const f = s.needAlphaTesting(); if ((f || s.needAlphaBlending()) && (this.useOpacityTextureForTransparentShadow ? this._opacityTexture = s.opacityTexture : this._opacityTexture = s.getAlphaTestTexture(), this._opacityTexture)) { if (!this._opacityTexture.isReady()) return !1; const C = (r = s.alphaCutOff) !== null && r !== void 0 ? r : hr.DEFAULT_ALPHA_CUTOFF; a.push("#define ALPHATEXTURE"), f && a.push(`#define ALPHATESTVALUE ${C}${C % 1 === 0 ? "." : ""}`), d.isVerticesDataPresent(Y.UVKind) && (h.push(Y.UVKind), a.push("#define UV1")), d.isVerticesDataPresent(Y.UV2Kind) && this._opacityTexture.coordinatesIndex === 1 && (h.push(Y.UV2Kind), a.push("#define UV2")); } const p = new pl(); if (d.useBones && d.computeBonesUsingShaders && d.skeleton) { h.push(Y.MatricesIndicesKind), h.push(Y.MatricesWeightsKind), d.numBoneInfluencers > 4 && (h.push(Y.MatricesIndicesExtraKind), h.push(Y.MatricesWeightsExtraKind)); const C = d.skeleton; a.push("#define NUM_BONE_INFLUENCERS " + d.numBoneInfluencers), d.numBoneInfluencers > 0 && p.addCPUSkinningFallback(0, d), C.isUsingTextureForMatrices ? a.push("#define BONETEXTURE") : a.push("#define BonesPerMesh " + (C.bones.length + 1)); } else a.push("#define NUM_BONE_INFLUENCERS 0"); const m = d.morphTargetManager; let _ = 0; if (m && m.numInfluencers > 0 && (a.push("#define MORPHTARGETS"), _ = m.numInfluencers, a.push("#define NUM_MORPH_INFLUENCERS " + _), m.isUsingTextureForTargets && a.push("#define MORPHTARGETS_TEXTURE"), Ke.PrepareAttributesForMorphTargetsInfluencers(h, d, _)), bT(s, this._scene, a), t && (a.push("#define INSTANCES"), Ke.PushAttributesForInstances(h), e.getRenderingMesh().hasThinInstances && a.push("#define THIN_INSTANCES")), this.customShaderOptions && this.customShaderOptions.defines) for (const C of this.customShaderOptions.defines) a.indexOf(C) === -1 && a.push(C); const v = a.join(` `); if (u !== v) { u = v; let C = "shadowMap"; const x = [ "world", "mBones", "viewProjection", "diffuseMatrix", "lightDataSM", "depthValuesSM", "biasAndScaleSM", "morphTargetInfluences", "boneTextureWidth", "softTransparentShadowSM", "morphTargetTextureInfo", "morphTargetTextureIndices" ], b = ["diffuseSampler", "boneSampler", "morphTargets"], S = ["Scene", "Mesh"]; if (Gc(x), this.customShaderOptions) { if (C = this.customShaderOptions.shaderName, this.customShaderOptions.attributes) for (const R of this.customShaderOptions.attributes) h.indexOf(R) === -1 && h.push(R); if (this.customShaderOptions.uniforms) for (const R of this.customShaderOptions.uniforms) x.indexOf(R) === -1 && x.push(R); if (this.customShaderOptions.samplers) for (const R of this.customShaderOptions.samplers) b.indexOf(R) === -1 && b.push(R); } const M = this._scene.getEngine(); o = M.createEffect(C, { attributes: h, uniformsNames: x, uniformBuffersNames: S, samplers: b, defines: v, fallbacks: p, onCompiled: null, onError: null, indexParameters: { maxSimultaneousMorphTargets: _ } }, M), l.setEffect(o, u); } if (!o.isReady()) return !1; } return (this.useBlurExponentialShadowMap || this.useBlurCloseExponentialShadowMap) && (!this._blurPostProcesses || !this._blurPostProcesses.length) && this._initializeBlurRTTAndPostProcesses(), !(this._kernelBlurXPostprocess && !this._kernelBlurXPostprocess.isReady() || this._kernelBlurYPostprocess && !this._kernelBlurYPostprocess.isReady() || this._boxBlurPostprocess && !this._boxBlurPostprocess.isReady()); } /** * Prepare all the defines in a material relying on a shadow map at the specified light index. * @param defines Defines of the material we want to update * @param lightIndex Index of the light in the enabled light list of the material */ prepareDefines(e, t) { const i = this._scene, r = this._light; !i.shadowsEnabled || !r.shadowEnabled || (e["SHADOW" + t] = !0, this.useContactHardeningShadow ? (e["SHADOWPCSS" + t] = !0, this._filteringQuality === hr.QUALITY_LOW ? e["SHADOWLOWQUALITY" + t] = !0 : this._filteringQuality === hr.QUALITY_MEDIUM && (e["SHADOWMEDIUMQUALITY" + t] = !0)) : this.usePercentageCloserFiltering ? (e["SHADOWPCF" + t] = !0, this._filteringQuality === hr.QUALITY_LOW ? e["SHADOWLOWQUALITY" + t] = !0 : this._filteringQuality === hr.QUALITY_MEDIUM && (e["SHADOWMEDIUMQUALITY" + t] = !0)) : this.usePoissonSampling ? e["SHADOWPOISSON" + t] = !0 : this.useExponentialShadowMap || this.useBlurExponentialShadowMap ? e["SHADOWESM" + t] = !0 : (this.useCloseExponentialShadowMap || this.useBlurCloseExponentialShadowMap) && (e["SHADOWCLOSEESM" + t] = !0), r.needCube() && (e["SHADOWCUBE" + t] = !0)); } /** * Binds the shadow related information inside of an effect (information like near, far, darkness... * defined in the generator but impacting the effect). * @param lightIndex Index of the light in the enabled light list of the material owning the effect * @param effect The effect we are binding the information for */ bindShadowLight(e, t) { const i = this._light; if (!this._scene.shadowsEnabled || !i.shadowEnabled) return; const s = this._getCamera(); if (!s) return; const n = this.getShadowMap(); n && (i.needCube() || t.setMatrix("lightMatrix" + e, this.getTransformMatrix()), this._filter === hr.FILTER_PCF ? (t.setDepthStencilTexture("shadowSampler" + e, this.getShadowMapForRendering()), i._uniformBuffer.updateFloat4("shadowsInfo", this.getDarkness(), n.getSize().width, 1 / n.getSize().width, this.frustumEdgeFalloff, e)) : this._filter === hr.FILTER_PCSS ? (t.setDepthStencilTexture("shadowSampler" + e, this.getShadowMapForRendering()), t.setTexture("depthSampler" + e, this.getShadowMapForRendering()), i._uniformBuffer.updateFloat4("shadowsInfo", this.getDarkness(), 1 / n.getSize().width, this._contactHardeningLightSizeUVRatio * n.getSize().width, this.frustumEdgeFalloff, e)) : (t.setTexture("shadowSampler" + e, this.getShadowMapForRendering()), i._uniformBuffer.updateFloat4("shadowsInfo", this.getDarkness(), this.blurScale / n.getSize().width, this.depthScale, this.frustumEdgeFalloff, e)), i._uniformBuffer.updateFloat2("depthValues", this.getLight().getDepthMinZ(s), this.getLight().getDepthMinZ(s) + this.getLight().getDepthMaxZ(s), e)); } /** * Gets the view matrix used to render the shadow map. */ get viewMatrix() { return this._viewMatrix; } /** * Gets the projection matrix used to render the shadow map. */ get projectionMatrix() { return this._projectionMatrix; } /** * Gets the transformation matrix used to project the meshes into the map from the light point of view. * (eq to shadow projection matrix * light transform matrix) * @returns The transform matrix used to create the shadow map */ getTransformMatrix() { const e = this._scene; if (this._currentRenderId === e.getRenderId() && this._currentFaceIndexCache === this._currentFaceIndex) return this._transformMatrix; this._currentRenderId = e.getRenderId(), this._currentFaceIndexCache = this._currentFaceIndex; let t = this._light.position; if (this._light.computeTransformedInformation() && (t = this._light.transformedPosition), D.NormalizeToRef(this._light.getShadowDirection(this._currentFaceIndex), this._lightDirection), Math.abs(D.Dot(this._lightDirection, D.Up())) === 1 && (this._lightDirection.z = 1e-13), this._light.needProjectionMatrixCompute() || !this._cachedPosition || !this._cachedDirection || !t.equals(this._cachedPosition) || !this._lightDirection.equals(this._cachedDirection)) { this._cachedPosition.copyFrom(t), this._cachedDirection.copyFrom(this._lightDirection), Ae.LookAtLHToRef(t, t.add(this._lightDirection), D.Up(), this._viewMatrix); const i = this.getShadowMap(); if (i) { const r = i.renderList; r && this._light.setShadowProjectionMatrix(this._projectionMatrix, this._viewMatrix, r); } this._viewMatrix.multiplyToRef(this._projectionMatrix, this._transformMatrix); } return this._transformMatrix; } /** * Recreates the shadow map dependencies like RTT and post processes. This can be used during the switch between * Cube and 2D textures for instance. */ recreateShadowMap() { const e = this._shadowMap; if (!e) return; const t = e.renderList; if (this._disposeRTTandPostProcesses(), this._initializeGenerator(), this.filter = this._filter, this._applyFilterValues(), t) { this._shadowMap.renderList || (this._shadowMap.renderList = []); for (const i of t) this._shadowMap.renderList.push(i); } else this._shadowMap.renderList = null; } _disposeBlurPostProcesses() { this._shadowMap2 && (this._shadowMap2.dispose(), this._shadowMap2 = null), this._boxBlurPostprocess && (this._boxBlurPostprocess.dispose(), this._boxBlurPostprocess = null), this._kernelBlurXPostprocess && (this._kernelBlurXPostprocess.dispose(), this._kernelBlurXPostprocess = null), this._kernelBlurYPostprocess && (this._kernelBlurYPostprocess.dispose(), this._kernelBlurYPostprocess = null), this._blurPostProcesses = []; } _disposeRTTandPostProcesses() { this._shadowMap && (this._shadowMap.dispose(), this._shadowMap = null), this._disposeBlurPostProcesses(); } _disposeSceneUBOs() { if (this._sceneUBOs) { for (const e of this._sceneUBOs) e.dispose(); this._sceneUBOs = []; } } /** * Disposes the ShadowGenerator. * Returns nothing. */ dispose() { if (this._disposeRTTandPostProcesses(), this._disposeSceneUBOs(), this._light) { if (this._light._shadowGenerators) { const e = this._light._shadowGenerators.entries(); for (let t = e.next(); t.done !== !0; t = e.next()) { const [i, r] = t.value; r === this && this._light._shadowGenerators.delete(i); } this._light._shadowGenerators.size === 0 && (this._light._shadowGenerators = null); } this._light._markMeshesAsLightDirty(); } this.onBeforeShadowMapRenderMeshObservable.clear(), this.onBeforeShadowMapRenderObservable.clear(), this.onAfterShadowMapRenderMeshObservable.clear(), this.onAfterShadowMapRenderObservable.clear(); } /** * Serializes the shadow generator setup to a json object. * @returns The serialized JSON object */ serialize() { var e; const t = {}, i = this.getShadowMap(); if (!i) return t; if (t.className = this.getClassName(), t.lightId = this._light.id, t.cameraId = (e = this._camera) === null || e === void 0 ? void 0 : e.id, t.id = this.id, t.mapSize = i.getRenderSize(), t.forceBackFacesOnly = this.forceBackFacesOnly, t.darkness = this.getDarkness(), t.transparencyShadow = this._transparencyShadow, t.frustumEdgeFalloff = this.frustumEdgeFalloff, t.bias = this.bias, t.normalBias = this.normalBias, t.usePercentageCloserFiltering = this.usePercentageCloserFiltering, t.useContactHardeningShadow = this.useContactHardeningShadow, t.contactHardeningLightSizeUVRatio = this.contactHardeningLightSizeUVRatio, t.filteringQuality = this.filteringQuality, t.useExponentialShadowMap = this.useExponentialShadowMap, t.useBlurExponentialShadowMap = this.useBlurExponentialShadowMap, t.useCloseExponentialShadowMap = this.useBlurExponentialShadowMap, t.useBlurCloseExponentialShadowMap = this.useBlurExponentialShadowMap, t.usePoissonSampling = this.usePoissonSampling, t.depthScale = this.depthScale, t.blurBoxOffset = this.blurBoxOffset, t.blurKernel = this.blurKernel, t.blurScale = this.blurScale, t.useKernelBlur = this.useKernelBlur, t.renderList = [], i.renderList) for (let r = 0; r < i.renderList.length; r++) { const s = i.renderList[r]; t.renderList.push(s.id); } return t; } /** * Parses a serialized ShadowGenerator and returns a new ShadowGenerator. * @param parsedShadowGenerator The JSON object to parse * @param scene The scene to create the shadow map for * @param constr A function that builds a shadow generator or undefined to create an instance of the default shadow generator * @returns The parsed shadow generator */ static Parse(e, t, i) { const r = t.getLightById(e.lightId), s = e.cameraId !== void 0 ? t.getCameraById(e.cameraId) : null, n = i ? i(e.mapSize, r, s) : new hr(e.mapSize, r, void 0, s), a = n.getShadowMap(); for (let l = 0; l < e.renderList.length; l++) t.getMeshesById(e.renderList[l]).forEach(function(u) { a && (a.renderList || (a.renderList = []), a.renderList.push(u)); }); return e.id !== void 0 && (n.id = e.id), n.forceBackFacesOnly = !!e.forceBackFacesOnly, e.darkness !== void 0 && n.setDarkness(e.darkness), e.transparencyShadow && n.setTransparencyShadow(!0), e.frustumEdgeFalloff !== void 0 && (n.frustumEdgeFalloff = e.frustumEdgeFalloff), e.bias !== void 0 && (n.bias = e.bias), e.normalBias !== void 0 && (n.normalBias = e.normalBias), e.usePercentageCloserFiltering ? n.usePercentageCloserFiltering = !0 : e.useContactHardeningShadow ? n.useContactHardeningShadow = !0 : e.usePoissonSampling ? n.usePoissonSampling = !0 : e.useExponentialShadowMap ? n.useExponentialShadowMap = !0 : e.useBlurExponentialShadowMap ? n.useBlurExponentialShadowMap = !0 : e.useCloseExponentialShadowMap ? n.useCloseExponentialShadowMap = !0 : e.useBlurCloseExponentialShadowMap ? n.useBlurCloseExponentialShadowMap = !0 : e.useVarianceShadowMap ? n.useExponentialShadowMap = !0 : e.useBlurVarianceShadowMap && (n.useBlurExponentialShadowMap = !0), e.contactHardeningLightSizeUVRatio !== void 0 && (n.contactHardeningLightSizeUVRatio = e.contactHardeningLightSizeUVRatio), e.filteringQuality !== void 0 && (n.filteringQuality = e.filteringQuality), e.depthScale && (n.depthScale = e.depthScale), e.blurScale && (n.blurScale = e.blurScale), e.blurBoxOffset && (n.blurBoxOffset = e.blurBoxOffset), e.useKernelBlur && (n.useKernelBlur = e.useKernelBlur), e.blurKernel && (n.blurKernel = e.blurKernel), n; } } hr.CLASSNAME = "ShadowGenerator"; hr.FILTER_NONE = 0; hr.FILTER_EXPONENTIALSHADOWMAP = 1; hr.FILTER_POISSONSAMPLING = 2; hr.FILTER_BLUREXPONENTIALSHADOWMAP = 3; hr.FILTER_CLOSEEXPONENTIALSHADOWMAP = 4; hr.FILTER_BLURCLOSEEXPONENTIALSHADOWMAP = 5; hr.FILTER_PCF = 6; hr.FILTER_PCSS = 7; hr.QUALITY_HIGH = 0; hr.QUALITY_MEDIUM = 1; hr.QUALITY_LOW = 2; hr.DEFAULT_ALPHA_CUTOFF = 0.5; hr._SceneComponentInitialization = (c) => { throw yr("ShadowGeneratorSceneComponent"); }; const dme = "depthPixelShader", fme = `#ifdef ALPHATEST varying vec2 vUV;uniform sampler2D diffuseSampler; #endif #include varying float vDepthMetric; #ifdef PACKED #include #endif #ifdef STORE_CAMERASPACE_Z varying vec4 vViewPos; #endif #define CUSTOM_FRAGMENT_DEFINITIONS void main(void) { #include #ifdef ALPHATEST if (texture2D(diffuseSampler,vUV).a<0.4) discard; #endif #ifdef STORE_CAMERASPACE_Z #ifdef PACKED gl_FragColor=pack(vViewPos.z); #else gl_FragColor=vec4(vViewPos.z,0.0,0.0,1.0); #endif #else #ifdef NONLINEARDEPTH #ifdef PACKED gl_FragColor=pack(gl_FragCoord.z); #else gl_FragColor=vec4(gl_FragCoord.z,0.0,0.0,0.0); #endif #else #ifdef PACKED gl_FragColor=pack(vDepthMetric); #else gl_FragColor=vec4(vDepthMetric,0.0,0.0,1.0); #endif #endif #endif }`; je.ShadersStore[dme] = fme; const pme = "pointCloudVertexDeclaration", _me = `#ifdef POINTSIZE uniform float pointSize; #endif `; je.IncludesShadersStore[pme] = _me; const mme = "depthVertexShader", gme = `attribute vec3 position; #include #include #include #include[0..maxSimultaneousMorphTargets] #include #include uniform mat4 viewProjection;uniform vec2 depthValues; #if defined(ALPHATEST) || defined(NEED_UV) varying vec2 vUV;uniform mat4 diffuseMatrix; #ifdef UV1 attribute vec2 uv; #endif #ifdef UV2 attribute vec2 uv2; #endif #endif #ifdef STORE_CAMERASPACE_Z uniform mat4 view;varying vec4 vViewPos; #endif #include varying float vDepthMetric; #define CUSTOM_VERTEX_DEFINITIONS void main(void) {vec3 positionUpdated=position; #ifdef UV1 vec2 uvUpdated=uv; #endif #include #include[0..maxSimultaneousMorphTargets] #include #include #include vec4 worldPos=finalWorld*vec4(positionUpdated,1.0); #include gl_Position=viewProjection*worldPos; #ifdef STORE_CAMERASPACE_Z vViewPos=view*worldPos; #else #ifdef USE_REVERSE_DEPTHBUFFER vDepthMetric=((-gl_Position.z+depthValues.x)/(depthValues.y)); #else vDepthMetric=((gl_Position.z+depthValues.x)/(depthValues.y)); #endif #endif #if defined(ALPHATEST) || defined(BASIC_RENDER) #ifdef UV1 vUV=vec2(diffuseMatrix*vec4(uvUpdated,1.0,0.0)); #endif #ifdef UV2 vUV=vec2(diffuseMatrix*vec4(uv2,1.0,0.0)); #endif #endif #include } `; je.ShadersStore[mme] = gme; class T5 { /** * Sets a specific material to be used to render a mesh/a list of meshes by the depth renderer * @param mesh mesh or array of meshes * @param material material to use by the depth render when rendering the mesh(es). If undefined is passed, the specific material created by the depth renderer will be used. */ setMaterialForRendering(e, t) { this._depthMap.setMaterialForRendering(e, t); } /** * Instantiates a depth renderer * @param scene The scene the renderer belongs to * @param type The texture type of the depth map (default: Engine.TEXTURETYPE_FLOAT) * @param camera The camera to be used to render the depth map (default: scene's active camera) * @param storeNonLinearDepth Defines whether the depth is stored linearly like in Babylon Shadows or directly like glFragCoord.z * @param samplingMode The sampling mode to be used with the render target (Linear, Nearest...) (default: TRILINEAR_SAMPLINGMODE) * @param storeCameraSpaceZ Defines whether the depth stored is the Z coordinate in camera space. If true, storeNonLinearDepth has no effect. (Default: false) * @param name Name of the render target (default: DepthRenderer) */ constructor(e, t = 1, i = null, r = !1, s = De.TRILINEAR_SAMPLINGMODE, n = !1, a) { this.enabled = !0, this.forceDepthWriteTransparentMeshes = !1, this.useOnlyInActiveCamera = !1, this.reverseCulling = !1, this._scene = e, this._storeNonLinearDepth = r, this._storeCameraSpaceZ = n, this.isPacked = t === 0, this.isPacked ? this.clearColor = new Et(1, 1, 1, 1) : this.clearColor = new Et(n ? 1e8 : 1, 0, 0, 1), T5._SceneComponentInitialization(this._scene); const l = e.getEngine(); this._camera = i, s !== De.NEAREST_SAMPLINGMODE && (t === 1 && !l._caps.textureFloatLinearFiltering && (s = De.NEAREST_SAMPLINGMODE), t === 2 && !l._caps.textureHalfFloatLinearFiltering && (s = De.NEAREST_SAMPLINGMODE)); const o = this.isPacked || !l._features.supportExtendedTextureFormats ? 5 : 6; this._depthMap = new ra(a ?? "DepthRenderer", { width: l.getRenderWidth(), height: l.getRenderHeight() }, this._scene, !1, !0, t, !1, s, void 0, void 0, void 0, o), this._depthMap.wrapU = De.CLAMP_ADDRESSMODE, this._depthMap.wrapV = De.CLAMP_ADDRESSMODE, this._depthMap.refreshRate = 1, this._depthMap.renderParticles = !1, this._depthMap.renderList = null, this._depthMap.noPrePassRenderer = !0, this._depthMap.activeCamera = this._camera, this._depthMap.ignoreCameraViewport = !0, this._depthMap.useCameraPostProcesses = !1, this._depthMap.onClearObservable.add((h) => { h.clear(this.clearColor, !0, !0, !0); }), this._depthMap.onBeforeBindObservable.add(() => { var h; (h = l._debugPushGroup) === null || h === void 0 || h.call(l, "depth renderer", 1); }), this._depthMap.onAfterUnbindObservable.add(() => { var h; (h = l._debugPopGroup) === null || h === void 0 || h.call(l, 1); }), this._depthMap.customIsReadyFunction = (h, d, f) => { if ((f || d === 0) && h.subMeshes) for (let p = 0; p < h.subMeshes.length; ++p) { const m = h.subMeshes[p], _ = m.getRenderingMesh(), v = _._getInstancesRenderList(m._id, !!m.getReplacementMesh()), C = l.getCaps().instancedArrays && (v.visibleInstances[m._id] !== null && v.visibleInstances[m._id] !== void 0 || _.hasThinInstances); if (!this.isReady(m, C)) return !1; } return !0; }; const u = (h) => { var d, f; const p = h.getRenderingMesh(), m = h.getEffectiveMesh(), _ = this._scene, v = _.getEngine(), C = h.getMaterial(); if (m._internalAbstractMeshDataInfo._isActiveIntermediate = !1, !C || m.infiniteDistance || C.disableDepthWrite || h.verticesCount === 0 || h._renderId === _.getRenderId()) return; const x = m._getWorldMatrixDeterminant() < 0; let b = (d = p.overrideMaterialSideOrientation) !== null && d !== void 0 ? d : C.sideOrientation; x && (b = b === 0 ? 1 : 0); const S = b === 0; v.setState(C.backFaceCulling, 0, !1, S, this.reverseCulling ? !C.cullBackFaces : C.cullBackFaces); const M = p._getInstancesRenderList(h._id, !!h.getReplacementMesh()); if (M.mustReturn) return; const R = v.getCaps().instancedArrays && (M.visibleInstances[h._id] !== null && M.visibleInstances[h._id] !== void 0 || p.hasThinInstances), w = this._camera || _.activeCamera; if (this.isReady(h, R) && w) { h._renderId = _.getRenderId(); const V = (f = m._internalAbstractMeshDataInfo._materialForRenderPass) === null || f === void 0 ? void 0 : f[v.currentRenderPassId]; let k = h._getDrawWrapper(); !k && V && (k = V._getDrawWrapper()); const L = w.mode === Ai.ORTHOGRAPHIC_CAMERA; if (!k) return; const B = k.effect; v.enableEffect(k), R || p._bind(h, B, C.fillMode), V ? V.bindForSubMesh(m.getWorldMatrix(), m, h) : (B.setMatrix("viewProjection", _.getTransformMatrix()), B.setMatrix("world", m.getWorldMatrix()), this._storeCameraSpaceZ && B.setMatrix("view", _.getViewMatrix())); let U, K; if (L ? (U = !v.useReverseDepthBuffer && v.isNDCHalfZRange ? 0 : 1, K = v.useReverseDepthBuffer && v.isNDCHalfZRange ? 0 : 1) : (U = v.useReverseDepthBuffer && v.isNDCHalfZRange ? w.minZ : v.isNDCHalfZRange ? 0 : w.minZ, K = v.useReverseDepthBuffer && v.isNDCHalfZRange ? 0 : w.maxZ), B.setFloat2("depthValues", U, U + K), !V) { if (C.needAlphaTesting()) { const ee = C.getAlphaTestTexture(); ee && (B.setTexture("diffuseSampler", ee), B.setMatrix("diffuseMatrix", ee.getTextureMatrix())); } if (p.useBones && p.computeBonesUsingShaders && p.skeleton) { const ee = p.skeleton; if (ee.isUsingTextureForMatrices) { const Z = ee.getTransformMatrixTexture(p); if (!Z) return; B.setTexture("boneSampler", Z), B.setFloat("boneTextureWidth", 4 * (ee.bones.length + 1)); } else B.setMatrices("mBones", ee.getTransformMatrices(p)); } Ec(B, C, _), Ke.BindMorphTargetParameters(p, B), p.morphTargetManager && p.morphTargetManager.isUsingTextureForTargets && p.morphTargetManager._bind(B), C.pointsCloud && B.setFloat("pointSize", C.pointSize); } p._processRendering(m, h, B, C.fillMode, M, R, (ee, Z) => B.setMatrix("world", Z)); } }; this._depthMap.customRenderFunction = (h, d, f, p) => { let m; if (p.length) for (m = 0; m < p.length; m++) u(p.data[m]); for (m = 0; m < h.length; m++) u(h.data[m]); for (m = 0; m < d.length; m++) u(d.data[m]); if (this.forceDepthWriteTransparentMeshes) for (m = 0; m < f.length; m++) u(f.data[m]); else for (m = 0; m < f.length; m++) f.data[m].getEffectiveMesh()._internalAbstractMeshDataInfo._isActiveIntermediate = !1; }; } /** * Creates the depth rendering effect and checks if the effect is ready. * @param subMesh The submesh to be used to render the depth map of * @param useInstances If multiple world instances should be used * @returns if the depth renderer is ready to render the depth map */ isReady(e, t) { var i; const r = this._scene.getEngine(), s = e.getMesh(), n = s.getScene(), a = (i = s._internalAbstractMeshDataInfo._materialForRenderPass) === null || i === void 0 ? void 0 : i[r.currentRenderPassId]; if (a) return a.isReadyForSubMesh(s, e, t); const l = e.getMaterial(); if (!l || l.disableDepthWrite) return !1; const o = [], u = [Y.PositionKind]; if (l && l.needAlphaTesting() && l.getAlphaTestTexture() && (o.push("#define ALPHATEST"), s.isVerticesDataPresent(Y.UVKind) && (u.push(Y.UVKind), o.push("#define UV1")), s.isVerticesDataPresent(Y.UV2Kind) && (u.push(Y.UV2Kind), o.push("#define UV2"))), s.useBones && s.computeBonesUsingShaders) { u.push(Y.MatricesIndicesKind), u.push(Y.MatricesWeightsKind), s.numBoneInfluencers > 4 && (u.push(Y.MatricesIndicesExtraKind), u.push(Y.MatricesWeightsExtraKind)), o.push("#define NUM_BONE_INFLUENCERS " + s.numBoneInfluencers), o.push("#define BonesPerMesh " + (s.skeleton ? s.skeleton.bones.length + 1 : 0)); const _ = e.getRenderingMesh().skeleton; _ != null && _.isUsingTextureForMatrices && o.push("#define BONETEXTURE"); } else o.push("#define NUM_BONE_INFLUENCERS 0"); const h = s.morphTargetManager; let d = 0; h && h.numInfluencers > 0 && (d = h.numInfluencers, o.push("#define MORPHTARGETS"), o.push("#define NUM_MORPH_INFLUENCERS " + d), h.isUsingTextureForTargets && o.push("#define MORPHTARGETS_TEXTURE"), Ke.PrepareAttributesForMorphTargetsInfluencers(u, s, d)), l.pointsCloud && o.push("#define POINTSIZE"), t && (o.push("#define INSTANCES"), Ke.PushAttributesForInstances(u), e.getRenderingMesh().hasThinInstances && o.push("#define THIN_INSTANCES")), this._storeNonLinearDepth && o.push("#define NONLINEARDEPTH"), this._storeCameraSpaceZ && o.push("#define STORE_CAMERASPACE_Z"), this.isPacked && o.push("#define PACKED"), bT(l, n, o); const f = e._getDrawWrapper(void 0, !0), p = f.defines, m = o.join(` `); if (p !== m) { const _ = [ "world", "mBones", "boneTextureWidth", "pointSize", "viewProjection", "view", "diffuseMatrix", "depthValues", "morphTargetInfluences", "morphTargetTextureInfo", "morphTargetTextureIndices" ]; Gc(_), f.setEffect(r.createEffect("depth", u, _, ["diffuseSampler", "morphTargets", "boneSampler"], m, void 0, void 0, void 0, { maxSimultaneousMorphTargets: d }), m); } return f.effect.isReady(); } /** * Gets the texture which the depth map will be written to. * @returns The depth map texture */ getDepthMap() { return this._depthMap; } /** * Disposes of the depth renderer. */ dispose() { const e = []; for (const t in this._scene._depthRenderer) this._scene._depthRenderer[t] === this && e.push(t); if (e.length > 0) { this._depthMap.dispose(); for (const t of e) delete this._scene._depthRenderer[t]; } } } T5._SceneComponentInitialization = (c) => { throw yr("DepthRendererSceneComponent"); }; const vme = "minmaxReduxPixelShader", Ame = `varying vec2 vUV;uniform sampler2D textureSampler; #if defined(INITIAL) uniform sampler2D sourceTexture;uniform vec2 texSize;void main(void) {ivec2 coord=ivec2(vUV*(texSize-1.0));float f1=texelFetch(sourceTexture,coord,0).r;float f2=texelFetch(sourceTexture,coord+ivec2(1,0),0).r;float f3=texelFetch(sourceTexture,coord+ivec2(1,1),0).r;float f4=texelFetch(sourceTexture,coord+ivec2(0,1),0).r;float minz=min(min(min(f1,f2),f3),f4); #ifdef DEPTH_REDUX float maxz=max(max(max(sign(1.0-f1)*f1,sign(1.0-f2)*f2),sign(1.0-f3)*f3),sign(1.0-f4)*f4); #else float maxz=max(max(max(f1,f2),f3),f4); #endif glFragColor=vec4(minz,maxz,0.,0.);} #elif defined(MAIN) uniform vec2 texSize;void main(void) {ivec2 coord=ivec2(vUV*(texSize-1.0));vec2 f1=texelFetch(textureSampler,coord,0).rg;vec2 f2=texelFetch(textureSampler,coord+ivec2(1,0),0).rg;vec2 f3=texelFetch(textureSampler,coord+ivec2(1,1),0).rg;vec2 f4=texelFetch(textureSampler,coord+ivec2(0,1),0).rg;float minz=min(min(min(f1.x,f2.x),f3.x),f4.x);float maxz=max(max(max(f1.y,f2.y),f3.y),f4.y);glFragColor=vec4(minz,maxz,0.,0.);} #elif defined(ONEBEFORELAST) uniform ivec2 texSize;void main(void) {ivec2 coord=ivec2(vUV*vec2(texSize-1));vec2 f1=texelFetch(textureSampler,coord % texSize,0).rg;vec2 f2=texelFetch(textureSampler,(coord+ivec2(1,0)) % texSize,0).rg;vec2 f3=texelFetch(textureSampler,(coord+ivec2(1,1)) % texSize,0).rg;vec2 f4=texelFetch(textureSampler,(coord+ivec2(0,1)) % texSize,0).rg;float minz=min(f1.x,f2.x);float maxz=max(f1.y,f2.y);glFragColor=vec4(minz,maxz,0.,0.);} #elif defined(LAST) void main(void) {glFragColor=vec4(0.);if (true) { discard;}} #endif `; je.ShadersStore[vme] = Ame; class _re { /** * Creates a min/max reducer * @param camera The camera to use for the post processes */ constructor(e) { this.onAfterReductionPerformed = new Fe(), this._forceFullscreenViewport = !0, this._activated = !1, this._camera = e, this._postProcessManager = new q9(e.getScene()), this._onContextRestoredObserver = e.getEngine().onContextRestoredObservable.add(() => { this._postProcessManager._rebuild(); }); } /** * Gets the texture used to read the values from. */ get sourceTexture() { return this._sourceTexture; } /** * Sets the source texture to read the values from. * One must indicate if the texture is a depth texture or not through the depthRedux parameter * because in such textures '1' value must not be taken into account to compute the maximum * as this value is used to clear the texture. * Note that the computation is not activated by calling this function, you must call activate() for that! * @param sourceTexture The texture to read the values from. The values should be in the red channel. * @param depthRedux Indicates if the texture is a depth texture or not * @param type The type of the textures created for the reduction (defaults to TEXTURETYPE_HALF_FLOAT) * @param forceFullscreenViewport Forces the post processes used for the reduction to be applied without taking into account viewport (defaults to true) */ setSourceTexture(e, t, i = 2, r = !0) { if (e === this._sourceTexture) return; this.dispose(!1), this._sourceTexture = e, this._reductionSteps = [], this._forceFullscreenViewport = r; const s = this._camera.getScene(), n = new Bi( "Initial reduction phase", "minmaxRedux", // shader ["texSize"], ["sourceTexture"], // textures 1, // options null, // camera 1, // sampling s.getEngine(), // engine !1, // reusable "#define INITIAL" + (t ? ` #define DEPTH_REDUX` : ""), // defines i, void 0, void 0, void 0, 7 ); n.autoClear = !1, n.forceFullscreenViewport = r; let a = this._sourceTexture.getRenderWidth(), l = this._sourceTexture.getRenderHeight(); n.onApply = /* @__PURE__ */ ((u, h) => (d) => { d.setTexture("sourceTexture", this._sourceTexture), d.setFloat2("texSize", u, h); })(a, l), this._reductionSteps.push(n); let o = 1; for (; a > 1 || l > 1; ) { a = Math.max(Math.round(a / 2), 1), l = Math.max(Math.round(l / 2), 1); const u = new Bi( "Reduction phase " + o, "minmaxRedux", // shader ["texSize"], null, { width: a, height: l }, // options null, // camera 1, // sampling s.getEngine(), // engine !1, // reusable "#define " + (a == 1 && l == 1 ? "LAST" : a == 1 || l == 1 ? "ONEBEFORELAST" : "MAIN"), // defines i, void 0, void 0, void 0, 7 ); if (u.autoClear = !1, u.forceFullscreenViewport = r, u.onApply = /* @__PURE__ */ ((h, d) => (f) => { h == 1 || d == 1 ? f.setInt2("texSize", h, d) : f.setFloat2("texSize", h, d); })(a, l), this._reductionSteps.push(u), o++, a == 1 && l == 1) { const h = (d, f, p) => { const m = new Float32Array(4 * d * f), _ = { min: 0, max: 0 }; return () => { s.getEngine()._readTexturePixels(p.inputTexture.texture, d, f, -1, 0, m, !1), _.min = m[0], _.max = m[1], this.onAfterReductionPerformed.notifyObservers(_); }; }; u.onAfterRenderObservable.add(h(a, l, u)); } } } /** * Defines the refresh rate of the computation. * Use 0 to compute just once, 1 to compute on every frame, 2 to compute every two frames and so on... */ get refreshRate() { return this._sourceTexture ? this._sourceTexture.refreshRate : -1; } set refreshRate(e) { this._sourceTexture && (this._sourceTexture.refreshRate = e); } /** * Gets the activation status of the reducer */ get activated() { return this._activated; } /** * Activates the reduction computation. * When activated, the observers registered in onAfterReductionPerformed are * called after the computation is performed */ activate() { this._onAfterUnbindObserver || !this._sourceTexture || (this._onAfterUnbindObserver = this._sourceTexture.onAfterUnbindObservable.add(() => { var e, t; const i = this._camera.getScene().getEngine(); (e = i._debugPushGroup) === null || e === void 0 || e.call(i, "min max reduction", 1), this._reductionSteps[0].activate(this._camera), this._postProcessManager.directRender(this._reductionSteps, this._reductionSteps[0].inputTexture, this._forceFullscreenViewport), i.unBindFramebuffer(this._reductionSteps[0].inputTexture, !1), (t = i._debugPopGroup) === null || t === void 0 || t.call(i, 1); }), this._activated = !0); } /** * Deactivates the reduction computation. */ deactivate() { !this._onAfterUnbindObserver || !this._sourceTexture || (this._sourceTexture.onAfterUnbindObservable.remove(this._onAfterUnbindObserver), this._onAfterUnbindObserver = null, this._activated = !1); } /** * Disposes the min/max reducer * @param disposeAll true to dispose all the resources. You should always call this function with true as the parameter (or without any parameter as it is the default one). This flag is meant to be used internally. */ dispose(e = !0) { if (e && (this.onAfterReductionPerformed.clear(), this._onContextRestoredObserver && (this._camera.getEngine().onContextRestoredObservable.remove(this._onContextRestoredObserver), this._onContextRestoredObserver = null)), this.deactivate(), this._reductionSteps) { for (let t = 0; t < this._reductionSteps.length; ++t) this._reductionSteps[t].dispose(); this._reductionSteps = null; } this._postProcessManager && e && this._postProcessManager.dispose(), this._sourceTexture = null; } } class mre extends _re { /** * Gets the depth renderer used for the computation. * Note that the result is null if you provide your own renderer when calling setDepthRenderer. */ get depthRenderer() { return this._depthRenderer; } /** * Creates a depth reducer * @param camera The camera used to render the depth texture */ constructor(e) { super(e); } /** * Sets the depth renderer to use to generate the depth map * @param depthRenderer The depth renderer to use. If not provided, a new one will be created automatically * @param type The texture type of the depth map (default: TEXTURETYPE_HALF_FLOAT) * @param forceFullscreenViewport Forces the post processes used for the reduction to be applied without taking into account viewport (defaults to true) */ setDepthRenderer(e = null, t = 2, i = !0) { const r = this._camera.getScene(); this._depthRenderer && (delete r._depthRenderer[this._depthRendererId], this._depthRenderer.dispose(), this._depthRenderer = null), e === null && (r._depthRenderer || (r._depthRenderer = {}), e = this._depthRenderer = new T5(r, t, this._camera, !1, 1), e.enabled = !1, this._depthRendererId = "minmax" + this._camera.id, r._depthRenderer[this._depthRendererId] = e), super.setSourceTexture(e.getDepthMap(), !0, t, i); } /** * @internal */ setSourceTexture(e, t, i = 2, r = !0) { super.setSourceTexture(e, t, i, r); } /** * Activates the reduction computation. * When activated, the observers registered in onAfterReductionPerformed are * called after the computation is performed */ activate() { this._depthRenderer && (this._depthRenderer.enabled = !0), super.activate(); } /** * Deactivates the reduction computation. */ deactivate() { super.deactivate(), this._depthRenderer && (this._depthRenderer.enabled = !1); } /** * Disposes the depth reducer * @param disposeAll true to dispose all the resources. You should always call this function with true as the parameter (or without any parameter as it is the default one). This flag is meant to be used internally. */ dispose(e = !0) { if (super.dispose(e), this._depthRenderer && e) { const t = this._depthRenderer.getDepthMap().getScene(); t && delete t._depthRenderer[this._depthRendererId], this._depthRenderer.dispose(), this._depthRenderer = null; } } } const lq = D.Up(), yme = D.Zero(), wu = new D(), WD = new D(), DF = new Ae(); class vh extends hr { _validateFilter(e) { return e === hr.FILTER_NONE || e === hr.FILTER_PCF || e === hr.FILTER_PCSS ? e : (Ce.Error('Unsupported filter "' + e + '"!'), hr.FILTER_NONE); } /** * Gets or set the number of cascades used by the CSM. */ get numCascades() { return this._numCascades; } set numCascades(e) { e = Math.min(Math.max(e, vh.MIN_CASCADES_COUNT), vh.MAX_CASCADES_COUNT), e !== this._numCascades && (this._numCascades = e, this.recreateShadowMap(), this._recreateSceneUBOs()); } /** * Enables or disables the shadow casters bounding info computation. * If your shadow casters don't move, you can disable this feature. * If it is enabled, the bounding box computation is done every frame. */ get freezeShadowCastersBoundingInfo() { return this._freezeShadowCastersBoundingInfo; } set freezeShadowCastersBoundingInfo(e) { this._freezeShadowCastersBoundingInfoObservable && e && (this._scene.onBeforeRenderObservable.remove(this._freezeShadowCastersBoundingInfoObservable), this._freezeShadowCastersBoundingInfoObservable = null), !this._freezeShadowCastersBoundingInfoObservable && !e && (this._freezeShadowCastersBoundingInfoObservable = this._scene.onBeforeRenderObservable.add(() => this._computeShadowCastersBoundingInfo())), this._freezeShadowCastersBoundingInfo = e, e && this._computeShadowCastersBoundingInfo(); } _computeShadowCastersBoundingInfo() { if (this._scbiMin.copyFromFloats(Number.MAX_VALUE, Number.MAX_VALUE, Number.MAX_VALUE), this._scbiMax.copyFromFloats(-Number.MAX_VALUE, -Number.MAX_VALUE, -Number.MAX_VALUE), this._shadowMap && this._shadowMap.renderList) { const e = this._shadowMap.renderList; for (let i = 0; i < e.length; i++) { const r = e[i]; if (!r) continue; const s = r.getBoundingInfo(), n = s.boundingBox; this._scbiMin.minimizeInPlace(n.minimumWorld), this._scbiMax.maximizeInPlace(n.maximumWorld); } const t = this._scene.meshes; for (let i = 0; i < t.length; i++) { const r = t[i]; if (!r || !r.isVisible || !r.isEnabled || !r.receiveShadows) continue; const s = r.getBoundingInfo(), n = s.boundingBox; this._scbiMin.minimizeInPlace(n.minimumWorld), this._scbiMax.maximizeInPlace(n.maximumWorld); } } this._shadowCastersBoundingInfo.reConstruct(this._scbiMin, this._scbiMax); } /** * Gets or sets the shadow casters bounding info. * If you provide your own shadow casters bounding info, first enable freezeShadowCastersBoundingInfo * so that the system won't overwrite the bounds you provide */ get shadowCastersBoundingInfo() { return this._shadowCastersBoundingInfo; } set shadowCastersBoundingInfo(e) { this._shadowCastersBoundingInfo = e; } /** * Sets the minimal and maximal distances to use when computing the cascade breaks. * * The values of min / max are typically the depth zmin and zmax values of your scene, for a given frame. * If you don't know these values, simply leave them to their defaults and don't call this function. * @param min minimal distance for the breaks (default to 0.) * @param max maximal distance for the breaks (default to 1.) */ setMinMaxDistance(e, t) { this._minDistance === e && this._maxDistance === t || (e > t && (e = 0, t = 1), e < 0 && (e = 0), t > 1 && (t = 1), this._minDistance = e, this._maxDistance = t, this._breaksAreDirty = !0); } /** Gets the minimal distance used in the cascade break computation */ get minDistance() { return this._minDistance; } /** Gets the maximal distance used in the cascade break computation */ get maxDistance() { return this._maxDistance; } /** * Gets the class name of that object * @returns "CascadedShadowGenerator" */ getClassName() { return vh.CLASSNAME; } /** * Gets a cascade minimum extents * @param cascadeIndex index of the cascade * @returns the minimum cascade extents */ getCascadeMinExtents(e) { return e >= 0 && e < this._numCascades ? this._cascadeMinExtents[e] : null; } /** * Gets a cascade maximum extents * @param cascadeIndex index of the cascade * @returns the maximum cascade extents */ getCascadeMaxExtents(e) { return e >= 0 && e < this._numCascades ? this._cascadeMaxExtents[e] : null; } /** * Gets the shadow max z distance. It's the limit beyond which shadows are not displayed. * It defaults to camera.maxZ */ get shadowMaxZ() { return this._getCamera() ? this._shadowMaxZ : 0; } /** * Sets the shadow max z distance. */ set shadowMaxZ(e) { const t = this._getCamera(); if (!t) { this._shadowMaxZ = e; return; } this._shadowMaxZ === e || e < t.minZ || e > t.maxZ && t.maxZ !== 0 || (this._shadowMaxZ = e, this._light._markMeshesAsLightDirty(), this._breaksAreDirty = !0); } /** * Gets or sets the debug flag. * When enabled, the cascades are materialized by different colors on the screen. */ get debug() { return this._debug; } set debug(e) { this._debug = e, this._light._markMeshesAsLightDirty(); } /** * Gets or sets the depth clamping value. * * When enabled, it improves the shadow quality because the near z plane of the light frustum don't need to be adjusted * to account for the shadow casters far away. * * Note that this property is incompatible with PCSS filtering, so it won't be used in that case. */ get depthClamp() { return this._depthClamp; } set depthClamp(e) { this._depthClamp = e; } /** * Gets or sets the percentage of blending between two cascades (value between 0. and 1.). * It defaults to 0.1 (10% blending). */ get cascadeBlendPercentage() { return this._cascadeBlendPercentage; } set cascadeBlendPercentage(e) { this._cascadeBlendPercentage = e, this._light._markMeshesAsLightDirty(); } /** * Gets or set the lambda parameter. * This parameter is used to split the camera frustum and create the cascades. * It's a value between 0. and 1.: If 0, the split is a uniform split of the frustum, if 1 it is a logarithmic split. * For all values in-between, it's a linear combination of the uniform and logarithm split algorithm. */ get lambda() { return this._lambda; } set lambda(e) { const t = Math.min(Math.max(e, 0), 1); this._lambda != t && (this._lambda = t, this._breaksAreDirty = !0); } /** * Gets the view matrix corresponding to a given cascade * @param cascadeNum cascade to retrieve the view matrix from * @returns the cascade view matrix */ getCascadeViewMatrix(e) { return e >= 0 && e < this._numCascades ? this._viewMatrices[e] : null; } /** * Gets the projection matrix corresponding to a given cascade * @param cascadeNum cascade to retrieve the projection matrix from * @returns the cascade projection matrix */ getCascadeProjectionMatrix(e) { return e >= 0 && e < this._numCascades ? this._projectionMatrices[e] : null; } /** * Gets the transformation matrix corresponding to a given cascade * @param cascadeNum cascade to retrieve the transformation matrix from * @returns the cascade transformation matrix */ getCascadeTransformMatrix(e) { return e >= 0 && e < this._numCascades ? this._transformMatrices[e] : null; } /** * Sets the depth renderer to use when autoCalcDepthBounds is enabled. * * Note that if no depth renderer is set, a new one will be automatically created internally when necessary. * * You should call this function if you already have a depth renderer enabled in your scene, to avoid * doing multiple depth rendering each frame. If you provide your own depth renderer, make sure it stores linear depth! * @param depthRenderer The depth renderer to use when autoCalcDepthBounds is enabled. If you pass null or don't call this function at all, a depth renderer will be automatically created */ setDepthRenderer(e) { this._depthRenderer = e, this._depthReducer && this._depthReducer.setDepthRenderer(this._depthRenderer); } /** * Gets or sets the autoCalcDepthBounds property. * * When enabled, a depth rendering pass is first performed (with an internally created depth renderer or with the one * you provide by calling setDepthRenderer). Then, a min/max reducing is applied on the depth map to compute the * minimal and maximal depth of the map and those values are used as inputs for the setMinMaxDistance() function. * It can greatly enhance the shadow quality, at the expense of more GPU works. * When using this option, you should increase the value of the lambda parameter, and even set it to 1 for best results. */ get autoCalcDepthBounds() { return this._autoCalcDepthBounds; } set autoCalcDepthBounds(e) { const t = this._getCamera(); if (t) { if (this._autoCalcDepthBounds = e, !e) { this._depthReducer && this._depthReducer.deactivate(), this.setMinMaxDistance(0, 1); return; } this._depthReducer || (this._depthReducer = new mre(t), this._depthReducer.onAfterReductionPerformed.add((i) => { let r = i.min, s = i.max; r >= s && (r = 0, s = 1), (r != this._minDistance || s != this._maxDistance) && this.setMinMaxDistance(r, s); }), this._depthReducer.setDepthRenderer(this._depthRenderer)), this._depthReducer.activate(); } } /** * Defines the refresh rate of the min/max computation used when autoCalcDepthBounds is set to true * Use 0 to compute just once, 1 to compute on every frame, 2 to compute every two frames and so on... * Note that if you provided your own depth renderer through a call to setDepthRenderer, you are responsible * for setting the refresh rate on the renderer yourself! */ get autoCalcDepthBoundsRefreshRate() { var e, t, i; return (i = (t = (e = this._depthReducer) === null || e === void 0 ? void 0 : e.depthRenderer) === null || t === void 0 ? void 0 : t.getDepthMap().refreshRate) !== null && i !== void 0 ? i : -1; } set autoCalcDepthBoundsRefreshRate(e) { var t; !((t = this._depthReducer) === null || t === void 0) && t.depthRenderer && (this._depthReducer.depthRenderer.getDepthMap().refreshRate = e); } /** * Create the cascade breaks according to the lambda, shadowMaxZ and min/max distance properties, as well as the camera near and far planes. * This function is automatically called when updating lambda, shadowMaxZ and min/max distances, however you should call it yourself if * you change the camera near/far planes! */ splitFrustum() { this._breaksAreDirty = !0; } _splitFrustum() { const e = this._getCamera(); if (!e) return; const t = e.minZ, i = e.maxZ || this._shadowMaxZ, r = i - t, s = this._minDistance, n = this._shadowMaxZ < i && this._shadowMaxZ >= t ? Math.min((this._shadowMaxZ - t) / (i - t), this._maxDistance) : this._maxDistance, a = t + s * r, l = t + n * r, o = l - a, u = l / a; for (let h = 0; h < this._cascades.length; ++h) { const d = (h + 1) / this._numCascades, f = a * u ** d, p = a + o * d, m = this._lambda * (f - p) + p; this._cascades[h].prevBreakDistance = h === 0 ? s : this._cascades[h - 1].breakDistance, this._cascades[h].breakDistance = (m - t) / r, this._viewSpaceFrustumsZ[h] = m, this._frustumLengths[h] = (this._cascades[h].breakDistance - this._cascades[h].prevBreakDistance) * r; } this._breaksAreDirty = !1; } _computeMatrices() { const e = this._scene; if (!this._getCamera()) return; D.NormalizeToRef(this._light.getShadowDirection(0), this._lightDirection), Math.abs(D.Dot(this._lightDirection, D.Up())) === 1 && (this._lightDirection.z = 1e-13), this._cachedDirection.copyFrom(this._lightDirection); const i = e.getEngine().useReverseDepthBuffer; for (let r = 0; r < this._numCascades; ++r) { this._computeFrustumInWorldSpace(r), this._computeCascadeFrustum(r), this._cascadeMaxExtents[r].subtractToRef(this._cascadeMinExtents[r], wu), this._frustumCenter[r].addToRef(this._lightDirection.scale(this._cascadeMinExtents[r].z), this._shadowCameraPos[r]), Ae.LookAtLHToRef(this._shadowCameraPos[r], this._frustumCenter[r], lq, this._viewMatrices[r]); let s = 0, n = wu.z; const a = this._shadowCastersBoundingInfo; a.update(this._viewMatrices[r]), n = Math.min(n, a.boundingBox.maximumWorld.z), !this._depthClamp || this.filter === hr.FILTER_PCSS ? s = Math.min(s, a.boundingBox.minimumWorld.z) : s = Math.max(s, a.boundingBox.minimumWorld.z), Ae.OrthoOffCenterLHToRef(this._cascadeMinExtents[r].x, this._cascadeMaxExtents[r].x, this._cascadeMinExtents[r].y, this._cascadeMaxExtents[r].y, i ? n : s, i ? s : n, this._projectionMatrices[r], e.getEngine().isNDCHalfZRange), this._cascadeMinExtents[r].z = s, this._cascadeMaxExtents[r].z = n, this._viewMatrices[r].multiplyToRef(this._projectionMatrices[r], this._transformMatrices[r]), D.TransformCoordinatesToRef(yme, this._transformMatrices[r], wu), wu.scaleInPlace(this._mapSize / 2), WD.copyFromFloats(Math.round(wu.x), Math.round(wu.y), Math.round(wu.z)), WD.subtractInPlace(wu).scaleInPlace(2 / this._mapSize), Ae.TranslationToRef(WD.x, WD.y, 0, DF), this._projectionMatrices[r].multiplyToRef(DF, this._projectionMatrices[r]), this._viewMatrices[r].multiplyToRef(this._projectionMatrices[r], this._transformMatrices[r]), this._transformMatrices[r].copyToArray(this._transformMatricesAsArray, r * 16); } } // Get the 8 points of the view frustum in world space _computeFrustumInWorldSpace(e) { const t = this._getCamera(); if (!t) return; const i = this._cascades[e].prevBreakDistance, r = this._cascades[e].breakDistance, s = this._scene.getEngine().isNDCHalfZRange; t.getViewMatrix(); const n = t.maxZ === 0, a = t.maxZ; n && (t.maxZ = this._shadowMaxZ, t.getProjectionMatrix(!0)); const l = Ae.Invert(t.getTransformationMatrix()); n && (t.maxZ = a, t.getProjectionMatrix(!0)); const o = this._scene.getEngine().useReverseDepthBuffer ? 4 : 0; for (let u = 0; u < vh._FrustumCornersNDCSpace.length; ++u) wu.copyFrom(vh._FrustumCornersNDCSpace[(u + o) % vh._FrustumCornersNDCSpace.length]), s && wu.z === -1 && (wu.z = 0), D.TransformCoordinatesToRef(wu, l, this._frustumCornersWorldSpace[e][u]); for (let u = 0; u < vh._FrustumCornersNDCSpace.length / 2; ++u) wu.copyFrom(this._frustumCornersWorldSpace[e][u + 4]).subtractInPlace(this._frustumCornersWorldSpace[e][u]), WD.copyFrom(wu).scaleInPlace(i), wu.scaleInPlace(r), wu.addInPlace(this._frustumCornersWorldSpace[e][u]), this._frustumCornersWorldSpace[e][u + 4].copyFrom(wu), this._frustumCornersWorldSpace[e][u].addInPlace(WD); } _computeCascadeFrustum(e) { if (this._cascadeMinExtents[e].copyFromFloats(Number.MAX_VALUE, Number.MAX_VALUE, Number.MAX_VALUE), this._cascadeMaxExtents[e].copyFromFloats(-Number.MAX_VALUE, -Number.MAX_VALUE, -Number.MAX_VALUE), this._frustumCenter[e].copyFromFloats(0, 0, 0), !!this._getCamera()) { for (let i = 0; i < this._frustumCornersWorldSpace[e].length; ++i) this._frustumCenter[e].addInPlace(this._frustumCornersWorldSpace[e][i]); if (this._frustumCenter[e].scaleInPlace(1 / this._frustumCornersWorldSpace[e].length), this.stabilizeCascades) { let i = 0; for (let r = 0; r < this._frustumCornersWorldSpace[e].length; ++r) { const s = this._frustumCornersWorldSpace[e][r].subtractToRef(this._frustumCenter[e], wu).length(); i = Math.max(i, s); } i = Math.ceil(i * 16) / 16, this._cascadeMaxExtents[e].copyFromFloats(i, i, i), this._cascadeMinExtents[e].copyFromFloats(-i, -i, -i); } else { const i = this._frustumCenter[e]; this._frustumCenter[e].addToRef(this._lightDirection, wu), Ae.LookAtLHToRef(i, wu, lq, DF); for (let r = 0; r < this._frustumCornersWorldSpace[e].length; ++r) D.TransformCoordinatesToRef(this._frustumCornersWorldSpace[e][r], DF, wu), this._cascadeMinExtents[e].minimizeInPlace(wu), this._cascadeMaxExtents[e].maximizeInPlace(wu); } } } _recreateSceneUBOs() { if (this._disposeSceneUBOs(), this._sceneUBOs) for (let e = 0; e < this._numCascades; ++e) this._sceneUBOs.push(this._scene.createSceneUniformBuffer(`Scene for CSM Shadow Generator (light "${this._light.name}" cascade #${e})`)); } /** * Support test. */ static get IsSupported() { const e = gi.LastCreatedEngine; return e ? e._features.supportCSM : !1; } /** * Creates a Cascaded Shadow Generator object. * A ShadowGenerator is the required tool to use the shadows. * Each directional light casting shadows needs to use its own ShadowGenerator. * Documentation : https://doc.babylonjs.com/babylon101/cascadedShadows * @param mapSize The size of the texture what stores the shadows. Example : 1024. * @param light The directional light object generating the shadows. * @param usefulFloatFirst By default the generator will try to use half float textures but if you need precision (for self shadowing for instance), you can use this option to enforce full float texture. * @param camera Camera associated with this shadow generator (default: null). If null, takes the scene active camera at the time we need to access it * @param useRedTextureType Forces the generator to use a Red instead of a RGBA type for the shadow map texture format (default: true) */ constructor(e, t, i, r, s = !0) { if (!vh.IsSupported) { Ce.Error("CascadedShadowMap is not supported by the current engine."); return; } super(e, t, i, r, s), this.usePercentageCloserFiltering = !0; } _initializeGenerator() { var e, t, i, r, s, n, a, l, o, u, h, d, f, p, m, _, v, C, x, b; this.penumbraDarkness = (e = this.penumbraDarkness) !== null && e !== void 0 ? e : 1, this._numCascades = (t = this._numCascades) !== null && t !== void 0 ? t : vh.DEFAULT_CASCADES_COUNT, this.stabilizeCascades = (i = this.stabilizeCascades) !== null && i !== void 0 ? i : !1, this._freezeShadowCastersBoundingInfoObservable = (r = this._freezeShadowCastersBoundingInfoObservable) !== null && r !== void 0 ? r : null, this.freezeShadowCastersBoundingInfo = (s = this.freezeShadowCastersBoundingInfo) !== null && s !== void 0 ? s : !1, this._scbiMin = (n = this._scbiMin) !== null && n !== void 0 ? n : new D(0, 0, 0), this._scbiMax = (a = this._scbiMax) !== null && a !== void 0 ? a : new D(0, 0, 0), this._shadowCastersBoundingInfo = (l = this._shadowCastersBoundingInfo) !== null && l !== void 0 ? l : new zf(new D(0, 0, 0), new D(0, 0, 0)), this._breaksAreDirty = (o = this._breaksAreDirty) !== null && o !== void 0 ? o : !0, this._minDistance = (u = this._minDistance) !== null && u !== void 0 ? u : 0, this._maxDistance = (h = this._maxDistance) !== null && h !== void 0 ? h : 1, this._currentLayer = (d = this._currentLayer) !== null && d !== void 0 ? d : 0, this._shadowMaxZ = (m = (f = this._shadowMaxZ) !== null && f !== void 0 ? f : (p = this._getCamera()) === null || p === void 0 ? void 0 : p.maxZ) !== null && m !== void 0 ? m : 1e4, this._debug = (_ = this._debug) !== null && _ !== void 0 ? _ : !1, this._depthClamp = (v = this._depthClamp) !== null && v !== void 0 ? v : !0, this._cascadeBlendPercentage = (C = this._cascadeBlendPercentage) !== null && C !== void 0 ? C : 0.1, this._lambda = (x = this._lambda) !== null && x !== void 0 ? x : 0.5, this._autoCalcDepthBounds = (b = this._autoCalcDepthBounds) !== null && b !== void 0 ? b : !1, this._recreateSceneUBOs(), super._initializeGenerator(); } _createTargetRenderTexture() { const e = this._scene.getEngine(), t = { width: this._mapSize, height: this._mapSize, layers: this.numCascades }; this._shadowMap = new ra(this._light.name + "_CSMShadowMap", t, this._scene, !1, !0, this._textureType, !1, void 0, !1, !1, void 0, this._useRedTextureType ? 6 : 5), this._shadowMap.createDepthStencilTexture(e.useReverseDepthBuffer ? 516 : 513, !0), this._shadowMap.noPrePassRenderer = !0; } _initializeShadowMap() { if (super._initializeShadowMap(), this._shadowMap === null) return; this._transformMatricesAsArray = new Float32Array(this._numCascades * 16), this._viewSpaceFrustumsZ = new Array(this._numCascades), this._frustumLengths = new Array(this._numCascades), this._lightSizeUVCorrection = new Array(this._numCascades * 2), this._depthCorrection = new Array(this._numCascades), this._cascades = [], this._viewMatrices = [], this._projectionMatrices = [], this._transformMatrices = [], this._cascadeMinExtents = [], this._cascadeMaxExtents = [], this._frustumCenter = [], this._shadowCameraPos = [], this._frustumCornersWorldSpace = []; for (let t = 0; t < this._numCascades; ++t) { this._cascades[t] = { prevBreakDistance: 0, breakDistance: 0 }, this._viewMatrices[t] = Ae.Zero(), this._projectionMatrices[t] = Ae.Zero(), this._transformMatrices[t] = Ae.Zero(), this._cascadeMinExtents[t] = new D(), this._cascadeMaxExtents[t] = new D(), this._frustumCenter[t] = new D(), this._shadowCameraPos[t] = new D(), this._frustumCornersWorldSpace[t] = new Array(vh._FrustumCornersNDCSpace.length); for (let i = 0; i < vh._FrustumCornersNDCSpace.length; ++i) this._frustumCornersWorldSpace[t][i] = new D(); } const e = this._scene.getEngine(); this._shadowMap.onBeforeBindObservable.clear(), this._shadowMap.onBeforeRenderObservable.clear(), this._shadowMap.onBeforeRenderObservable.add((t) => { this._sceneUBOs && this._scene.setSceneUniformBuffer(this._sceneUBOs[t]), this._currentLayer = t, this._filter === hr.FILTER_PCF && e.setColorWrite(!1), this._scene.setTransformMatrix(this.getCascadeViewMatrix(t), this.getCascadeProjectionMatrix(t)), this._useUBO && (this._scene.getSceneUniformBuffer().unbindEffect(), this._scene.finalizeSceneUbo()); }), this._shadowMap.onBeforeBindObservable.add(() => { var t; this._currentSceneUBO = this._scene.getSceneUniformBuffer(), (t = e._debugPushGroup) === null || t === void 0 || t.call(e, `cascaded shadow map generation for pass id ${e.currentRenderPassId}`, 1), this._breaksAreDirty && this._splitFrustum(), this._computeMatrices(); }), this._splitFrustum(); } _bindCustomEffectForRenderSubMeshForShadowMap(e, t) { t.setMatrix("viewProjection", this.getCascadeTransformMatrix(this._currentLayer)); } _isReadyCustomDefines(e) { e.push("#define SM_DEPTHCLAMP " + (this._depthClamp && this._filter !== hr.FILTER_PCSS ? "1" : "0")); } /** * Prepare all the defines in a material relying on a shadow map at the specified light index. * @param defines Defines of the material we want to update * @param lightIndex Index of the light in the enabled light list of the material */ prepareDefines(e, t) { super.prepareDefines(e, t); const i = this._scene, r = this._light; if (!i.shadowsEnabled || !r.shadowEnabled) return; e["SHADOWCSM" + t] = !0, e["SHADOWCSMDEBUG" + t] = this.debug, e["SHADOWCSMNUM_CASCADES" + t] = this.numCascades, e["SHADOWCSM_RIGHTHANDED" + t] = i.useRightHandedSystem; const s = this._getCamera(); s && this._shadowMaxZ <= (s.maxZ || this._shadowMaxZ) && (e["SHADOWCSMUSESHADOWMAXZ" + t] = !0), this.cascadeBlendPercentage === 0 && (e["SHADOWCSMNOBLEND" + t] = !0); } /** * Binds the shadow related information inside of an effect (information like near, far, darkness... * defined in the generator but impacting the effect). * @param lightIndex Index of the light in the enabled light list of the material owning the effect * @param effect The effect we are binfing the information for */ bindShadowLight(e, t) { const i = this._light; if (!this._scene.shadowsEnabled || !i.shadowEnabled) return; const s = this._getCamera(); if (!s) return; const n = this.getShadowMap(); if (!n) return; const a = n.getSize().width; if (t.setMatrices("lightMatrix" + e, this._transformMatricesAsArray), t.setArray("viewFrustumZ" + e, this._viewSpaceFrustumsZ), t.setFloat("cascadeBlendFactor" + e, this.cascadeBlendPercentage === 0 ? 1e4 : 1 / this.cascadeBlendPercentage), t.setArray("frustumLengths" + e, this._frustumLengths), this._filter === hr.FILTER_PCF) t.setDepthStencilTexture("shadowSampler" + e, n), i._uniformBuffer.updateFloat4("shadowsInfo", this.getDarkness(), a, 1 / a, this.frustumEdgeFalloff, e); else if (this._filter === hr.FILTER_PCSS) { for (let l = 0; l < this._numCascades; ++l) this._lightSizeUVCorrection[l * 2 + 0] = l === 0 ? 1 : (this._cascadeMaxExtents[0].x - this._cascadeMinExtents[0].x) / (this._cascadeMaxExtents[l].x - this._cascadeMinExtents[l].x), this._lightSizeUVCorrection[l * 2 + 1] = l === 0 ? 1 : (this._cascadeMaxExtents[0].y - this._cascadeMinExtents[0].y) / (this._cascadeMaxExtents[l].y - this._cascadeMinExtents[l].y), this._depthCorrection[l] = l === 0 ? 1 : (this._cascadeMaxExtents[l].z - this._cascadeMinExtents[l].z) / (this._cascadeMaxExtents[0].z - this._cascadeMinExtents[0].z); t.setDepthStencilTexture("shadowSampler" + e, n), t.setTexture("depthSampler" + e, n), t.setArray2("lightSizeUVCorrection" + e, this._lightSizeUVCorrection), t.setArray("depthCorrection" + e, this._depthCorrection), t.setFloat("penumbraDarkness" + e, this.penumbraDarkness), i._uniformBuffer.updateFloat4("shadowsInfo", this.getDarkness(), 1 / a, this._contactHardeningLightSizeUVRatio * a, this.frustumEdgeFalloff, e); } else t.setTexture("shadowSampler" + e, n), i._uniformBuffer.updateFloat4("shadowsInfo", this.getDarkness(), a, 1 / a, this.frustumEdgeFalloff, e); i._uniformBuffer.updateFloat2("depthValues", this.getLight().getDepthMinZ(s), this.getLight().getDepthMinZ(s) + this.getLight().getDepthMaxZ(s), e); } /** * Gets the transformation matrix of the first cascade used to project the meshes into the map from the light point of view. * (eq to view projection * shadow projection matrices) * @returns The transform matrix used to create the shadow map */ getTransformMatrix() { return this.getCascadeTransformMatrix(0); } /** * Disposes the ShadowGenerator. * Returns nothing. */ dispose() { super.dispose(), this._freezeShadowCastersBoundingInfoObservable && (this._scene.onBeforeRenderObservable.remove(this._freezeShadowCastersBoundingInfoObservable), this._freezeShadowCastersBoundingInfoObservable = null), this._depthReducer && (this._depthReducer.dispose(), this._depthReducer = null); } /** * Serializes the shadow generator setup to a json object. * @returns The serialized JSON object */ serialize() { const e = super.serialize(), t = this.getShadowMap(); if (!t) return e; if (e.numCascades = this._numCascades, e.debug = this._debug, e.stabilizeCascades = this.stabilizeCascades, e.lambda = this._lambda, e.cascadeBlendPercentage = this.cascadeBlendPercentage, e.depthClamp = this._depthClamp, e.autoCalcDepthBounds = this.autoCalcDepthBounds, e.shadowMaxZ = this._shadowMaxZ, e.penumbraDarkness = this.penumbraDarkness, e.freezeShadowCastersBoundingInfo = this._freezeShadowCastersBoundingInfo, e.minDistance = this.minDistance, e.maxDistance = this.maxDistance, e.renderList = [], t.renderList) for (let i = 0; i < t.renderList.length; i++) { const r = t.renderList[i]; e.renderList.push(r.id); } return e; } /** * Parses a serialized ShadowGenerator and returns a new ShadowGenerator. * @param parsedShadowGenerator The JSON object to parse * @param scene The scene to create the shadow map for * @returns The parsed shadow generator */ static Parse(e, t) { const i = hr.Parse(e, t, (r, s, n) => new vh(r, s, void 0, n)); return e.numCascades !== void 0 && (i.numCascades = e.numCascades), e.debug !== void 0 && (i.debug = e.debug), e.stabilizeCascades !== void 0 && (i.stabilizeCascades = e.stabilizeCascades), e.lambda !== void 0 && (i.lambda = e.lambda), e.cascadeBlendPercentage !== void 0 && (i.cascadeBlendPercentage = e.cascadeBlendPercentage), e.depthClamp !== void 0 && (i.depthClamp = e.depthClamp), e.autoCalcDepthBounds !== void 0 && (i.autoCalcDepthBounds = e.autoCalcDepthBounds), e.shadowMaxZ !== void 0 && (i.shadowMaxZ = e.shadowMaxZ), e.penumbraDarkness !== void 0 && (i.penumbraDarkness = e.penumbraDarkness), e.freezeShadowCastersBoundingInfo !== void 0 && (i.freezeShadowCastersBoundingInfo = e.freezeShadowCastersBoundingInfo), e.minDistance !== void 0 && e.maxDistance !== void 0 && i.setMinMaxDistance(e.minDistance, e.maxDistance), i; } } vh._FrustumCornersNDCSpace = [ new D(-1, 1, -1), new D(1, 1, -1), new D(1, -1, -1), new D(-1, -1, -1), new D(-1, 1, 1), new D(1, 1, 1), new D(1, -1, 1), new D(-1, -1, 1) ]; vh.CLASSNAME = "CascadedShadowGenerator"; vh.DEFAULT_CASCADES_COUNT = 4; vh.MIN_CASCADES_COUNT = 2; vh.MAX_CASCADES_COUNT = 4; vh._SceneComponentInitialization = (c) => { throw yr("ShadowGeneratorSceneComponent"); }; Yl.AddParser(Bt.NAME_SHADOWGENERATOR, (c, e) => { if (c.shadowGenerators !== void 0 && c.shadowGenerators !== null) for (let t = 0, i = c.shadowGenerators.length; t < i; t++) { const r = c.shadowGenerators[t]; r.className === vh.CLASSNAME ? vh.Parse(r, e) : hr.Parse(r, e); } }); class gre { /** * Creates a new instance of the component for the given scene * @param scene Defines the scene to register the component in */ constructor(e) { this.name = Bt.NAME_SHADOWGENERATOR, this.scene = e; } /** * Registers the component in a given scene */ register() { this.scene._gatherRenderTargetsStage.registerStep(Bt.STEP_GATHERRENDERTARGETS_SHADOWGENERATOR, this, this._gatherRenderTargets); } /** * Rebuilds the elements related to this component in case of * context lost for instance. */ rebuild() { } /** * Serializes the component data to the specified json object * @param serializationObject The object to serialize to */ serialize(e) { e.shadowGenerators = []; const t = this.scene.lights; for (const i of t) { const r = i.getShadowGenerators(); if (r) { const s = r.values(); for (let n = s.next(); n.done !== !0; n = s.next()) { const a = n.value; e.shadowGenerators.push(a.serialize()); } } } } /** * Adds all the elements from the container to the scene * @param container the container holding the elements */ // eslint-disable-next-line @typescript-eslint/no-unused-vars addFromContainer(e) { } /** * Removes all the elements in the container from the scene * @param container contains the elements to remove * @param dispose if the removed element should be disposed (default: false) */ // eslint-disable-next-line @typescript-eslint/no-unused-vars removeFromContainer(e, t) { } /** * Rebuilds the elements related to this component in case of * context lost for instance. */ dispose() { } _gatherRenderTargets(e) { const t = this.scene; if (this.scene.shadowsEnabled) for (let i = 0; i < t.lights.length; i++) { const r = t.lights[i], s = r.getShadowGenerators(); if (r.isEnabled() && r.shadowEnabled && s) { const n = s.values(); for (let a = n.next(); a.done !== !0; a = n.next()) { const o = a.value.getShadowMap(); t.textures.indexOf(o) !== -1 && e.push(o); } } } } } hr._SceneComponentInitialization = (c) => { let e = c._getComponent(Bt.NAME_SHADOWGENERATOR); e || (e = new gre(c), c._addComponent(e)); }; In.AddNodeConstructor("Light_Type_0", (c, e) => () => new s6(c, D.Zero(), e)); class s6 extends b5 { /** * Getter: In case of direction provided, the shadow will not use a cube texture but simulate a spot shadow as a fallback * This specifies what angle the shadow will use to be created. * * It default to 90 degrees to work nicely with the cube texture generation for point lights shadow maps. */ get shadowAngle() { return this._shadowAngle; } /** * Setter: In case of direction provided, the shadow will not use a cube texture but simulate a spot shadow as a fallback * This specifies what angle the shadow will use to be created. * * It default to 90 degrees to work nicely with the cube texture generation for point lights shadow maps. */ set shadowAngle(e) { this._shadowAngle = e, this.forceProjectionMatrixCompute(); } /** * Gets the direction if it has been set. * In case of direction provided, the shadow will not use a cube texture but simulate a spot shadow as a fallback */ get direction() { return this._direction; } /** * In case of direction provided, the shadow will not use a cube texture but simulate a spot shadow as a fallback */ set direction(e) { const t = this.needCube(); if (this._direction = e, this.needCube() !== t && this._shadowGenerators) { const i = this._shadowGenerators.values(); for (let r = i.next(); r.done !== !0; r = i.next()) r.value.recreateShadowMap(); } } /** * Creates a PointLight object from the passed name and position (Vector3) and adds it in the scene. * A PointLight emits the light in every direction. * It can cast shadows. * If the scene camera is already defined and you want to set your PointLight at the camera position, just set it : * ```javascript * var pointLight = new PointLight("pl", camera.position, scene); * ``` * Documentation : https://doc.babylonjs.com/features/featuresDeepDive/lights/lights_introduction * @param name The light friendly name * @param position The position of the point light in the scene * @param scene The scene the lights belongs to */ constructor(e, t, i) { super(e, i), this._shadowAngle = Math.PI / 2, this.position = t; } /** * Returns the string "PointLight" * @returns the class name */ getClassName() { return "PointLight"; } /** * Returns the integer 0. * @returns The light Type id as a constant defines in Light.LIGHTTYPEID_x */ getTypeID() { return hs.LIGHTTYPEID_POINTLIGHT; } /** * Specifies whether or not the shadowmap should be a cube texture. * @returns true if the shadowmap needs to be a cube texture. */ needCube() { return !this.direction; } /** * Returns a new Vector3 aligned with the PointLight cube system according to the passed cube face index (integer). * @param faceIndex The index of the face we are computed the direction to generate shadow * @returns The set direction in 2d mode otherwise the direction to the cubemap face if needCube() is true */ getShadowDirection(e) { if (this.direction) return super.getShadowDirection(e); switch (e) { case 0: return new D(1, 0, 0); case 1: return new D(-1, 0, 0); case 2: return new D(0, -1, 0); case 3: return new D(0, 1, 0); case 4: return new D(0, 0, 1); case 5: return new D(0, 0, -1); } return D.Zero(); } /** * Sets the passed matrix "matrix" as a left-handed perspective projection matrix with the following settings : * - fov = PI / 2 * - aspect ratio : 1.0 * - z-near and far equal to the active camera minZ and maxZ. * Returns the PointLight. * @param matrix * @param viewMatrix * @param renderList */ // eslint-disable-next-line @typescript-eslint/no-unused-vars _setDefaultShadowProjectionMatrix(e, t, i) { const r = this.getScene().activeCamera; if (!r) return; const s = this.shadowMinZ !== void 0 ? this.shadowMinZ : r.minZ, n = this.shadowMaxZ !== void 0 ? this.shadowMaxZ : r.maxZ, a = this.getScene().getEngine().useReverseDepthBuffer; Ae.PerspectiveFovLHToRef(this.shadowAngle, 1, a ? n : s, a ? s : n, e, !0, this._scene.getEngine().isNDCHalfZRange, void 0, a); } _buildUniformLayout() { this._uniformBuffer.addUniform("vLightData", 4), this._uniformBuffer.addUniform("vLightDiffuse", 4), this._uniformBuffer.addUniform("vLightSpecular", 4), this._uniformBuffer.addUniform("vLightFalloff", 4), this._uniformBuffer.addUniform("shadowsInfo", 3), this._uniformBuffer.addUniform("depthValues", 2), this._uniformBuffer.create(); } /** * Sets the passed Effect "effect" with the PointLight transformed position (or position, if none) and passed name (string). * @param effect The effect to update * @param lightIndex The index of the light in the effect to update * @returns The point light */ transferToEffect(e, t) { return this.computeTransformedInformation() ? this._uniformBuffer.updateFloat4("vLightData", this.transformedPosition.x, this.transformedPosition.y, this.transformedPosition.z, 0, t) : this._uniformBuffer.updateFloat4("vLightData", this.position.x, this.position.y, this.position.z, 0, t), this._uniformBuffer.updateFloat4("vLightFalloff", this.range, this._inverseSquaredRange, 0, 0, t), this; } transferToNodeMaterialEffect(e, t) { return this.computeTransformedInformation() ? e.setFloat3(t, this.transformedPosition.x, this.transformedPosition.y, this.transformedPosition.z) : e.setFloat3(t, this.position.x, this.position.y, this.position.z), this; } /** * Prepares the list of defines specific to the light type. * @param defines the list of defines * @param lightIndex defines the index of the light for the effect */ prepareLightSpecificDefines(e, t) { e["POINTLIGHT" + t] = !0; } } F([ W() ], s6.prototype, "shadowAngle", null); class oT { /** * Creates a new default loading screen * @param _renderingCanvas defines the canvas used to render the scene * @param _loadingText defines the default text to display * @param _loadingDivBackgroundColor defines the default background color */ constructor(e, t = "", i = "black") { this._renderingCanvas = e, this._loadingText = t, this._loadingDivBackgroundColor = i, this._resizeLoadingUI = () => { const r = this._renderingCanvas.getBoundingClientRect(), s = window.getComputedStyle(this._renderingCanvas).position; this._loadingDiv && (this._loadingDiv.style.position = s === "fixed" ? "fixed" : "absolute", this._loadingDiv.style.left = r.left + "px", this._loadingDiv.style.top = r.top + "px", this._loadingDiv.style.width = r.width + "px", this._loadingDiv.style.height = r.height + "px"); }; } /** * Function called to display the loading screen */ displayLoadingUI() { if (this._loadingDiv) return; this._loadingDiv = document.createElement("div"), this._loadingDiv.id = "babylonjsLoadingDiv", this._loadingDiv.style.opacity = "0", this._loadingDiv.style.transition = "opacity 1.5s ease", this._loadingDiv.style.pointerEvents = "none", this._loadingDiv.style.display = "grid", this._loadingDiv.style.gridTemplateRows = "100%", this._loadingDiv.style.gridTemplateColumns = "100%", this._loadingDiv.style.justifyItems = "center", this._loadingDiv.style.alignItems = "center", this._loadingTextDiv = document.createElement("div"), this._loadingTextDiv.style.position = "absolute", this._loadingTextDiv.style.left = "0", this._loadingTextDiv.style.top = "50%", this._loadingTextDiv.style.marginTop = "80px", this._loadingTextDiv.style.width = "100%", this._loadingTextDiv.style.height = "20px", this._loadingTextDiv.style.fontFamily = "Arial", this._loadingTextDiv.style.fontSize = "14px", this._loadingTextDiv.style.color = "white", this._loadingTextDiv.style.textAlign = "center", this._loadingTextDiv.style.zIndex = "1", this._loadingTextDiv.innerHTML = "Loading", this._loadingDiv.appendChild(this._loadingTextDiv), this._loadingTextDiv.innerHTML = this._loadingText, this._style = document.createElement("style"), this._style.type = "text/css"; const e = `@-webkit-keyframes spin1 { 0% { -webkit-transform: rotate(0deg);} 100% { -webkit-transform: rotate(360deg);} } @keyframes spin1 { 0% { transform: rotate(0deg);} 100% { transform: rotate(360deg);} }`; this._style.innerHTML = e, document.getElementsByTagName("head")[0].appendChild(this._style); const t = !!window.SVGSVGElement, i = new Image(); oT.DefaultLogoUrl ? i.src = oT.DefaultLogoUrl : i.src = t ? "data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCAxODAuMTcgMjA4LjA0Ij48ZGVmcz48c3R5bGU+LmNscy0xe2ZpbGw6I2ZmZjt9LmNscy0ye2ZpbGw6I2UwNjg0Yjt9LmNscy0ze2ZpbGw6I2JiNDY0Yjt9LmNscy00e2ZpbGw6I2UwZGVkODt9LmNscy01e2ZpbGw6I2Q1ZDJjYTt9PC9zdHlsZT48L2RlZnM+PHRpdGxlPkJhYnlsb25Mb2dvPC90aXRsZT48ZyBpZD0iTGF5ZXJfMiIgZGF0YS1uYW1lPSJMYXllciAyIj48ZyBpZD0iUGFnZV9FbGVtZW50cyIgZGF0YS1uYW1lPSJQYWdlIEVsZW1lbnRzIj48cGF0aCBjbGFzcz0iY2xzLTEiIGQ9Ik05MC4wOSwwLDAsNTJWMTU2bDkwLjA5LDUyLDkwLjA4LTUyVjUyWiIvPjxwb2x5Z29uIGNsYXNzPSJjbHMtMiIgcG9pbnRzPSIxODAuMTcgNTIuMDEgMTUxLjk3IDM1LjczIDEyNC44NSA1MS4zOSAxNTMuMDUgNjcuNjcgMTgwLjE3IDUyLjAxIi8+PHBvbHlnb24gY2xhc3M9ImNscy0yIiBwb2ludHM9IjI3LjEyIDY3LjY3IDExNy4yMSAxNS42NiA5MC4wOCAwIDAgNTIuMDEgMjcuMTIgNjcuNjciLz48cG9seWdvbiBjbGFzcz0iY2xzLTIiIHBvaW50cz0iNjEuODkgMTIwLjMgOTAuMDggMTM2LjU4IDExOC4yOCAxMjAuMyA5MC4wOCAxMDQuMDIgNjEuODkgMTIwLjMiLz48cG9seWdvbiBjbGFzcz0iY2xzLTMiIHBvaW50cz0iMTUzLjA1IDY3LjY3IDE1My4wNSAxNDAuMzcgOTAuMDggMTc2LjcyIDI3LjEyIDE0MC4zNyAyNy4xMiA2Ny42NyAwIDUyLjAxIDAgMTU2LjAzIDkwLjA4IDIwOC4wNCAxODAuMTcgMTU2LjAzIDE4MC4xNyA1Mi4wMSAxNTMuMDUgNjcuNjciLz48cG9seWdvbiBjbGFzcz0iY2xzLTMiIHBvaW50cz0iOTAuMDggNzEuNDYgNjEuODkgODcuNzQgNjEuODkgMTIwLjMgOTAuMDggMTA0LjAyIDExOC4yOCAxMjAuMyAxMTguMjggODcuNzQgOTAuMDggNzEuNDYiLz48cG9seWdvbiBjbGFzcz0iY2xzLTQiIHBvaW50cz0iMTUzLjA1IDY3LjY3IDExOC4yOCA4Ny43NCAxMTguMjggMTIwLjMgOTAuMDggMTM2LjU4IDkwLjA4IDE3Ni43MiAxNTMuMDUgMTQwLjM3IDE1My4wNSA2Ny42NyIvPjxwb2x5Z29uIGNsYXNzPSJjbHMtNSIgcG9pbnRzPSIyNy4xMiA2Ny42NyA2MS44OSA4Ny43NCA2MS44OSAxMjAuMyA5MC4wOCAxMzYuNTggOTAuMDggMTc2LjcyIDI3LjEyIDE0MC4zNyAyNy4xMiA2Ny42NyIvPjwvZz48L2c+PC9zdmc+" : "https://cdn.babylonjs.com/Assets/babylonLogo.png", i.style.width = "150px", i.style.gridColumn = "1", i.style.gridRow = "1", i.style.top = "50%", i.style.left = "50%", i.style.transform = "translate(-50%, -50%)", i.style.position = "absolute"; const r = document.createElement("div"); r.style.width = "300px", r.style.gridColumn = "1", r.style.gridRow = "1", r.style.top = "50%", r.style.left = "50%", r.style.transform = "translate(-50%, -50%)", r.style.position = "absolute"; const s = new Image(); if (oT.DefaultSpinnerUrl ? s.src = oT.DefaultSpinnerUrl : s.src = t ? "data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCAzOTIgMzkyIj48ZGVmcz48c3R5bGU+LmNscy0xe2ZpbGw6I2UwNjg0Yjt9LmNscy0ye2ZpbGw6bm9uZTt9PC9zdHlsZT48L2RlZnM+PHRpdGxlPlNwaW5uZXJJY29uPC90aXRsZT48ZyBpZD0iTGF5ZXJfMiIgZGF0YS1uYW1lPSJMYXllciAyIj48ZyBpZD0iU3Bpbm5lciI+PHBhdGggY2xhc3M9ImNscy0xIiBkPSJNNDAuMjEsMTI2LjQzYzMuNy03LjMxLDcuNjctMTQuNDQsMTItMjEuMzJsMy4zNi01LjEsMy41Mi01YzEuMjMtMS42MywyLjQxLTMuMjksMy42NS00LjkxczIuNTMtMy4yMSwzLjgyLTQuNzlBMTg1LjIsMTg1LjIsMCwwLDEsODMuNCw2Ny40M2EyMDgsMjA4LDAsMCwxLDE5LTE1LjY2YzMuMzUtMi40MSw2Ljc0LTQuNzgsMTAuMjUtN3M3LjExLTQuMjgsMTAuNzUtNi4zMmM3LjI5LTQsMTQuNzMtOCwyMi41My0xMS40OSwzLjktMS43Miw3Ljg4LTMuMywxMi00LjY0YTEwNC4yMiwxMDQuMjIsMCwwLDEsMTIuNDQtMy4yMyw2Mi40NCw2Mi40NCwwLDAsMSwxMi43OC0xLjM5QTI1LjkyLDI1LjkyLDAsMCwxLDE5NiwyMS40NGE2LjU1LDYuNTUsMCwwLDEsMi4wNSw5LDYuNjYsNi42NiwwLDAsMS0xLjY0LDEuNzhsLS40MS4yOWEyMi4wNywyMi4wNywwLDAsMS01Ljc4LDMsMzAuNDIsMzAuNDIsMCwwLDEtNS42NywxLjYyLDM3LjgyLDM3LjgyLDAsMCwxLTUuNjkuNzFjLTEsMC0xLjkuMTgtMi44NS4yNmwtMi44NS4yNHEtNS43Mi41MS0xMS40OCwxLjFjLTMuODQuNC03LjcxLjgyLTExLjU4LDEuNGExMTIuMzQsMTEyLjM0LDAsMCwwLTIyLjk0LDUuNjFjLTMuNzIsMS4zNS03LjM0LDMtMTAuOTQsNC42NHMtNy4xNCwzLjUxLTEwLjYsNS41MUExNTEuNiwxNTEuNiwwLDAsMCw2OC41Niw4N0M2Ny4yMyw4OC40OCw2Niw5MCw2NC42NCw5MS41NnMtMi41MSwzLjE1LTMuNzUsNC43M2wtMy41NCw0LjljLTEuMTMsMS42Ni0yLjIzLDMuMzUtMy4zMyw1YTEyNywxMjcsMCwwLDAtMTAuOTMsMjEuNDksMS41OCwxLjU4LDAsMSwxLTMtMS4xNVM0MC4xOSwxMjYuNDcsNDAuMjEsMTI2LjQzWiIvPjxyZWN0IGNsYXNzPSJjbHMtMiIgd2lkdGg9IjM5MiIgaGVpZ2h0PSIzOTIiLz48L2c+PC9nPjwvc3ZnPg==" : "https://cdn.babylonjs.com/Assets/loadingIcon.png", s.style.animation = "spin1 0.75s infinite linear", s.style.webkitAnimation = "spin1 0.75s infinite linear", s.style.transformOrigin = "50% 50%", s.style.webkitTransformOrigin = "50% 50%", !t) { const n = { w: 16, h: 18.5 }, a = { w: 30, h: 30 }; i.style.width = `${n.w}vh`, i.style.height = `${n.h}vh`, i.style.left = `calc(50% - ${n.w / 2}vh)`, i.style.top = `calc(50% - ${n.h / 2}vh)`, s.style.width = `${a.w}vh`, s.style.height = `${a.h}vh`, s.style.left = `calc(50% - ${a.w / 2}vh)`, s.style.top = `calc(50% - ${a.h / 2}vh)`; } r.appendChild(s), this._loadingDiv.appendChild(i), this._loadingDiv.appendChild(r), this._resizeLoadingUI(), window.addEventListener("resize", this._resizeLoadingUI), this._loadingDiv.style.backgroundColor = this._loadingDivBackgroundColor, document.body.appendChild(this._loadingDiv), this._loadingDiv.style.opacity = "1"; } /** * Function called to hide the loading screen */ hideLoadingUI() { if (!this._loadingDiv) return; const e = () => { this._loadingTextDiv && (this._loadingTextDiv.remove(), this._loadingTextDiv = null), this._loadingDiv && (this._loadingDiv.remove(), this._loadingDiv = null), this._style && (this._style.remove(), this._style = null), window.removeEventListener("resize", this._resizeLoadingUI); }; this._loadingDiv.style.opacity = "0", this._loadingDiv.addEventListener("transitionend", e); } /** * Gets or sets the text to display while loading */ set loadingUIText(e) { this._loadingText = e, this._loadingTextDiv && (this._loadingTextDiv.innerHTML = this._loadingText); } get loadingUIText() { return this._loadingText; } /** * Gets or sets the color to use for the background */ get loadingUIBackgroundColor() { return this._loadingDivBackgroundColor; } set loadingUIBackgroundColor(e) { this._loadingDivBackgroundColor = e, this._loadingDiv && (this._loadingDiv.style.backgroundColor = this._loadingDivBackgroundColor); } } oT.DefaultLogoUrl = ""; oT.DefaultSpinnerUrl = ""; $e.DefaultLoadingScreenFactory = (c) => new oT(c); class ST { /** * Converts a panorama stored in RGB right to left up to down format into a cubemap (6 faces). * * @param float32Array The source data. * @param inputWidth The width of the input panorama. * @param inputHeight The height of the input panorama. * @param size The willing size of the generated cubemap (each faces will be size * size pixels) * @returns The cubemap data */ static ConvertPanoramaToCubemap(e, t, i, r, s = !1) { if (!e) throw "ConvertPanoramaToCubemap: input cannot be null"; if (e.length != t * i * 3) throw "ConvertPanoramaToCubemap: input size is wrong"; const n = this.CreateCubemapTexture(r, this.FACE_FRONT, e, t, i, s), a = this.CreateCubemapTexture(r, this.FACE_BACK, e, t, i, s), l = this.CreateCubemapTexture(r, this.FACE_LEFT, e, t, i, s), o = this.CreateCubemapTexture(r, this.FACE_RIGHT, e, t, i, s), u = this.CreateCubemapTexture(r, this.FACE_UP, e, t, i, s), h = this.CreateCubemapTexture(r, this.FACE_DOWN, e, t, i, s); return { front: n, back: a, left: l, right: o, up: u, down: h, size: r, type: 1, format: 4, gammaSpace: !1 }; } static CreateCubemapTexture(e, t, i, r, s, n = !1) { const a = new ArrayBuffer(e * e * 4 * 3), l = new Float32Array(a), o = n ? Math.max(1, Math.round(r / 4 / e)) : 1, u = 1 / o, h = u * u, d = t[1].subtract(t[0]).scale(u / e), f = t[3].subtract(t[2]).scale(u / e), p = 1 / e; let m = 0; for (let _ = 0; _ < e; _++) for (let v = 0; v < o; v++) { let C = t[0], x = t[2]; for (let b = 0; b < e; b++) for (let S = 0; S < o; S++) { const M = x.subtract(C).scale(m).add(C); M.normalize(); const R = this.CalcProjectionSpherical(M, i, r, s); l[_ * e * 3 + b * 3 + 0] += R.r * h, l[_ * e * 3 + b * 3 + 1] += R.g * h, l[_ * e * 3 + b * 3 + 2] += R.b * h, C = C.add(d), x = x.add(f); } m += p * u; } return l; } static CalcProjectionSpherical(e, t, i, r) { let s = Math.atan2(e.z, e.x); const n = Math.acos(e.y); for (; s < -Math.PI; ) s += 2 * Math.PI; for (; s > Math.PI; ) s -= 2 * Math.PI; let a = s / Math.PI; const l = n / Math.PI; a = a * 0.5 + 0.5; let o = Math.round(a * i); o < 0 ? o = 0 : o >= i && (o = i - 1); let u = Math.round(l * r); u < 0 ? u = 0 : u >= r && (u = r - 1); const h = r - u - 1, d = t[h * i * 3 + o * 3 + 0], f = t[h * i * 3 + o * 3 + 1], p = t[h * i * 3 + o * 3 + 2]; return { r: d, g: f, b: p }; } } ST.FACE_LEFT = [new D(-1, -1, -1), new D(1, -1, -1), new D(-1, 1, -1), new D(1, 1, -1)]; ST.FACE_RIGHT = [new D(1, -1, 1), new D(-1, -1, 1), new D(1, 1, 1), new D(-1, 1, 1)]; ST.FACE_FRONT = [new D(1, -1, -1), new D(1, -1, 1), new D(1, 1, -1), new D(1, 1, 1)]; ST.FACE_BACK = [new D(-1, -1, 1), new D(-1, -1, -1), new D(-1, 1, 1), new D(-1, 1, -1)]; ST.FACE_DOWN = [new D(1, 1, -1), new D(1, 1, 1), new D(-1, 1, -1), new D(-1, 1, 1)]; ST.FACE_UP = [new D(-1, -1, -1), new D(-1, -1, 1), new D(1, -1, -1), new D(1, -1, 1)]; class mB { static _Ldexp(e, t) { return t > 1023 ? e * Math.pow(2, 1023) * Math.pow(2, t - 1023) : t < -1074 ? e * Math.pow(2, -1074) * Math.pow(2, t + 1074) : e * Math.pow(2, t); } static _Rgbe2float(e, t, i, r, s, n) { s > 0 ? (s = this._Ldexp(1, s - 136), e[n + 0] = t * s, e[n + 1] = i * s, e[n + 2] = r * s) : (e[n + 0] = 0, e[n + 1] = 0, e[n + 2] = 0); } static _ReadStringLine(e, t) { let i = "", r = ""; for (let s = t; s < e.length - t && (r = String.fromCharCode(e[s]), r != ` `); s++) i += r; return i; } /** * Reads header information from an RGBE texture stored in a native array. * More information on this format are available here: * https://en.wikipedia.org/wiki/RGBE_image_format * * @param uint8array The binary file stored in native array. * @returns The header information. */ // eslint-disable-next-line @typescript-eslint/naming-convention static RGBE_ReadHeader(e) { let t = 0, i = 0, r = this._ReadStringLine(e, 0); if (r[0] != "#" || r[1] != "?") throw "Bad HDR Format."; let s = !1, n = !1, a = 0; do a += r.length + 1, r = this._ReadStringLine(e, a), r == "FORMAT=32-bit_rle_rgbe" ? n = !0 : r.length == 0 && (s = !0); while (!s); if (!n) throw "HDR Bad header format, unsupported FORMAT"; a += r.length + 1, r = this._ReadStringLine(e, a); const o = /^-Y (.*) \+X (.*)$/g.exec(r); if (!o || o.length < 3) throw "HDR Bad header format, no size"; if (i = parseInt(o[2]), t = parseInt(o[1]), i < 8 || i > 32767) throw "HDR Bad header format, unsupported size"; return a += r.length + 1, { height: t, width: i, dataPosition: a }; } /** * Returns the cubemap information (each faces texture data) extracted from an RGBE texture. * This RGBE texture needs to store the information as a panorama. * * More information on this format are available here: * https://en.wikipedia.org/wiki/RGBE_image_format * * @param buffer The binary file stored in an array buffer. * @param size The expected size of the extracted cubemap. * @returns The Cube Map information. */ static GetCubeMapTextureData(e, t, i = !1) { const r = new Uint8Array(e), s = this.RGBE_ReadHeader(r), n = this.RGBE_ReadPixels(r, s); return ST.ConvertPanoramaToCubemap(n, s.width, s.height, t, i); } /** * Returns the pixels data extracted from an RGBE texture. * This pixels will be stored left to right up to down in the R G B order in one array. * * More information on this format are available here: * https://en.wikipedia.org/wiki/RGBE_image_format * * @param uint8array The binary file stored in an array buffer. * @param hdrInfo The header information of the file. * @returns The pixels data in RGB right to left up to down order. */ // eslint-disable-next-line @typescript-eslint/naming-convention static RGBE_ReadPixels(e, t) { return this._RGBEReadPixelsRLE(e, t); } static _RGBEReadPixelsRLE(e, t) { let i = t.height; const r = t.width; let s, n, a, l, o, u = t.dataPosition, h = 0, d = 0, f = 0; const p = new ArrayBuffer(r * 4), m = new Uint8Array(p), _ = new ArrayBuffer(t.width * t.height * 4 * 3), v = new Float32Array(_); for (; i > 0; ) { if (s = e[u++], n = e[u++], a = e[u++], l = e[u++], s != 2 || n != 2 || a & 128 || t.width < 8 || t.width > 32767) return this._RGBEReadPixelsNOTRLE(e, t); if ((a << 8 | l) != r) throw "HDR Bad header format, wrong scan line width"; for (h = 0, f = 0; f < 4; f++) for (d = (f + 1) * r; h < d; ) if (s = e[u++], n = e[u++], s > 128) { if (o = s - 128, o == 0 || o > d - h) throw "HDR Bad Format, bad scanline data (run)"; for (; o-- > 0; ) m[h++] = n; } else { if (o = s, o == 0 || o > d - h) throw "HDR Bad Format, bad scanline data (non-run)"; if (m[h++] = n, --o > 0) for (let C = 0; C < o; C++) m[h++] = e[u++]; } for (f = 0; f < r; f++) s = m[f], n = m[f + r], a = m[f + 2 * r], l = m[f + 3 * r], this._Rgbe2float(v, s, n, a, l, (t.height - i) * r * 3 + f * 3); i--; } return v; } static _RGBEReadPixelsNOTRLE(e, t) { let i = t.height; const r = t.width; let s, n, a, l, o, u = t.dataPosition; const h = new ArrayBuffer(t.width * t.height * 4 * 3), d = new Float32Array(h); for (; i > 0; ) { for (o = 0; o < t.width; o++) s = e[u++], n = e[u++], a = e[u++], l = e[u++], this._Rgbe2float(d, s, n, a, l, (t.height - i) * r * 3 + o * 3); i--; } return d; } } const Cme = "hdrFilteringVertexShader", xme = `attribute vec2 position;varying vec3 direction;uniform vec3 up;uniform vec3 right;uniform vec3 front; #define CUSTOM_VERTEX_DEFINITIONS void main(void) { #define CUSTOM_VERTEX_MAIN_BEGIN mat3 view=mat3(up,right,front);direction=view*vec3(position,1.0);gl_Position=vec4(position,0.0,1.0); #define CUSTOM_VERTEX_MAIN_END }`; je.ShadersStore[Cme] = xme; const bme = "hdrFilteringPixelShader", Eme = `#include #include #include #include uniform float alphaG;uniform samplerCube inputTexture;uniform vec2 vFilteringInfo;uniform float hdrScale;varying vec3 direction;void main() {vec3 color=radiance(alphaG,inputTexture,direction,vFilteringInfo);gl_FragColor=vec4(color*hdrScale,1.0);}`; je.ShadersStore[bme] = Eme; class pW { /** * Instantiates HDR filter for reflection maps * * @param engine Thin engine * @param options Options */ constructor(e, t = {}) { this._lodGenerationOffset = 0, this._lodGenerationScale = 0.8, this.quality = 4096, this.hdrScale = 1, this._engine = e, this.hdrScale = t.hdrScale || this.hdrScale, this.quality = t.quality || this.quality; } _createRenderTarget(e) { let t = 0; this._engine.getCaps().textureHalfFloatRender ? t = 2 : this._engine.getCaps().textureFloatRender && (t = 1); const i = this._engine.createRenderTargetCubeTexture(e, { format: 5, type: t, createMipMaps: !0, generateMipMaps: !1, generateDepthBuffer: !1, generateStencilBuffer: !1, samplingMode: 1 }); return this._engine.updateTextureWrappingMode(i.texture, 0, 0, 0), this._engine.updateTextureSamplingMode(3, i.texture, !0), i; } _prefilterInternal(e) { const t = e.getSize().width, i = yt.ILog2(t) + 1, r = this._effectWrapper.effect, s = this._createRenderTarget(t); this._effectRenderer.saveStates(), this._effectRenderer.setViewport(); const n = e.getInternalTexture(); n && this._engine.updateTextureSamplingMode(3, n, !0), this._effectRenderer.applyEffectWrapper(this._effectWrapper); const a = [ [new D(0, 0, -1), new D(0, -1, 0), new D(1, 0, 0)], [new D(0, 0, 1), new D(0, -1, 0), new D(-1, 0, 0)], [new D(1, 0, 0), new D(0, 0, 1), new D(0, 1, 0)], [new D(1, 0, 0), new D(0, 0, -1), new D(0, -1, 0)], [new D(1, 0, 0), new D(0, -1, 0), new D(0, 0, 1)], [new D(-1, 0, 0), new D(0, -1, 0), new D(0, 0, -1)] // NegativeZ ]; r.setFloat("hdrScale", this.hdrScale), r.setFloat2("vFilteringInfo", e.getSize().width, i), r.setTexture("inputTexture", e); for (let u = 0; u < 6; u++) { r.setVector3("up", a[u][0]), r.setVector3("right", a[u][1]), r.setVector3("front", a[u][2]); for (let h = 0; h < i; h++) { this._engine.bindFramebuffer(s, u, void 0, void 0, !0, h), this._effectRenderer.applyEffectWrapper(this._effectWrapper); let d = Math.pow(2, (h - this._lodGenerationOffset) / this._lodGenerationScale) / t; h === 0 && (d = 0), r.setFloat("alphaG", d), this._effectRenderer.draw(); } } this._effectRenderer.restoreStates(), this._engine.restoreDefaultFramebuffer(), this._engine._releaseTexture(e._texture); const l = s.texture.type, o = s.texture.format; return s._swapAndDie(e._texture), e._texture.type = l, e._texture.format = o, e.gammaSpace = !1, e.lodGenerationOffset = this._lodGenerationOffset, e.lodGenerationScale = this._lodGenerationScale, e._prefiltered = !0, e; } _createEffect(e, t) { const i = []; return e.gammaSpace && i.push("#define GAMMA_INPUT"), i.push("#define NUM_SAMPLES " + this.quality + "u"), new t6({ engine: this._engine, name: "hdrFiltering", vertexShader: "hdrFiltering", fragmentShader: "hdrFiltering", samplerNames: ["inputTexture"], uniformNames: ["vSampleDirections", "vWeights", "up", "right", "front", "vFilteringInfo", "hdrScale", "alphaG"], useShaderStore: !0, defines: i, onCompiled: t }); } /** * Get a value indicating if the filter is ready to be used * @param texture Texture to filter * @returns true if the filter is ready */ isReady(e) { return e.isReady() && this._effectWrapper.effect.isReady(); } /** * Prefilters a cube texture to have mipmap levels representing roughness values. * Prefiltering will be invoked at the end of next rendering pass. * This has to be done once the map is loaded, and has not been prefiltered by a third party software. * See http://blog.selfshadow.com/publications/s2013-shading-course/karis/s2013_pbs_epic_notes_v2.pdf for more information * @param texture Texture to filter * @param onFinished Callback when filtering is done * @returns Promise called when prefiltering is done */ prefilter(e, t = null) { return this._engine._features.allowTexturePrefiltering ? new Promise((i) => { this._effectRenderer = new vw(this._engine), this._effectWrapper = this._createEffect(e), this._effectWrapper.effect.executeWhenCompiled(() => { this._prefilterInternal(e), this._effectRenderer.dispose(), this._effectWrapper.dispose(), i(), t && t(); }); }) : (Ce.Warn("HDR prefiltering is not available in WebGL 1., you can use real time filtering instead."), Promise.reject("HDR prefiltering is not available in WebGL 1., you can use real time filtering instead.")); } } class ZC extends dn { /** * Sets whether or not the texture is blocking during loading. */ set isBlocking(e) { this._isBlocking = e; } /** * Gets whether or not the texture is blocking during loading. */ get isBlocking() { return this._isBlocking; } /** * Sets texture matrix rotation angle around Y axis in radians. */ set rotationY(e) { this._rotationY = e, this.setReflectionTextureMatrix(Ae.RotationY(this._rotationY)); } /** * Gets texture matrix rotation angle around Y axis radians. */ get rotationY() { return this._rotationY; } /** * Gets or sets the size of the bounding box associated with the cube texture * When defined, the cubemap will switch to local mode * @see https://community.arm.com/graphics/b/blog/posts/reflections-based-on-local-cubemaps-in-unity * @example https://www.babylonjs-playground.com/#RNASML */ set boundingBoxSize(e) { if (this._boundingBoxSize && this._boundingBoxSize.equals(e)) return; this._boundingBoxSize = e; const t = this.getScene(); t && t.markAllMaterialsAsDirty(1); } get boundingBoxSize() { return this._boundingBoxSize; } /** * Instantiates an HDRTexture from the following parameters. * * @param url The location of the HDR raw data (Panorama stored in RGBE format) * @param sceneOrEngine The scene or engine the texture will be used in * @param size The cubemap desired size (the more it increases the longer the generation will be) * @param noMipmap Forces to not generate the mipmap if true * @param generateHarmonics Specifies whether you want to extract the polynomial harmonics during the generation process * @param gammaSpace Specifies if the texture will be use in gamma or linear space (the PBR material requires those texture in linear space, but the standard material would require them in Gamma space) * @param prefilterOnLoad Prefilters HDR texture to allow use of this texture as a PBR reflection texture. * @param onLoad * @param onError */ constructor(e, t, i, r = !1, s = !0, n = !1, a = !1, l = null, o = null, u = !1) { var h; super(t), this._generateHarmonics = !0, this._onError = null, this._isBlocking = !0, this._rotationY = 0, this.boundingBoxPosition = D.Zero(), this.onLoadObservable = new Fe(), e && (this._coordinatesMode = De.CUBIC_MODE, this.name = e, this.url = e, this.hasAlpha = !1, this.isCube = !0, this._textureMatrix = Ae.Identity(), this._prefilterOnLoad = a, this._onLoad = () => { this.onLoadObservable.notifyObservers(this), l && l(); }, this._onError = o, this.gammaSpace = n, this._noMipmap = r, this._size = i, this._supersample = u, this._generateHarmonics = s, this._texture = this._getFromCache(e, this._noMipmap, void 0, void 0, void 0, this.isCube), this._texture ? this._texture.isReady ? Ve.SetImmediate(() => this._onLoad()) : this._texture.onLoadedObservable.add(this._onLoad) : !((h = this.getScene()) === null || h === void 0) && h.useDelayedTextureLoading ? this.delayLoadState = 4 : this._loadTexture()); } /** * Get the current class name of the texture useful for serialization or dynamic coding. * @returns "HDRCubeTexture" */ getClassName() { return "HDRCubeTexture"; } /** * Occurs when the file is raw .hdr file. */ _loadTexture() { const e = this._getEngine(), t = e.getCaps(); let i = 0; t.textureFloat && t.textureFloatLinearFiltering ? i = 1 : t.textureHalfFloat && t.textureHalfFloatLinearFiltering && (i = 2); const r = (s) => { this.lodGenerationOffset = 0, this.lodGenerationScale = 0.8; const n = mB.GetCubeMapTextureData(s, this._size, this._supersample); if (this._generateHarmonics) { const u = GI.ConvertCubeMapToSphericalPolynomial(n); this.sphericalPolynomial = u; } const a = []; let l = null, o = null; for (let u = 0; u < 6; u++) { i === 2 ? o = new Uint16Array(this._size * this._size * 3) : i === 0 && (l = new Uint8Array(this._size * this._size * 3)); const h = n[ZC._FacesMapping[u]]; if (this.gammaSpace || o || l) { for (let d = 0; d < this._size * this._size; d++) if (this.gammaSpace && (h[d * 3 + 0] = Math.pow(h[d * 3 + 0], nO), h[d * 3 + 1] = Math.pow(h[d * 3 + 1], nO), h[d * 3 + 2] = Math.pow(h[d * 3 + 2], nO)), o && (o[d * 3 + 0] = GA(h[d * 3 + 0]), o[d * 3 + 1] = GA(h[d * 3 + 1]), o[d * 3 + 2] = GA(h[d * 3 + 2])), l) { let f = Math.max(h[d * 3 + 0] * 255, 0), p = Math.max(h[d * 3 + 1] * 255, 0), m = Math.max(h[d * 3 + 2] * 255, 0); const _ = Math.max(Math.max(f, p), m); if (_ > 255) { const v = 255 / _; f *= v, p *= v, m *= v; } l[d * 3 + 0] = f, l[d * 3 + 1] = p, l[d * 3 + 2] = m; } } o ? a.push(o) : l ? a.push(l) : a.push(h); } return a; }; if (e._features.allowTexturePrefiltering && this._prefilterOnLoad) { const s = this._onLoad, n = new pW(e); this._onLoad = () => { n.prefilter(this, s); }; } this._texture = e.createRawCubeTextureFromUrl(this.url, this.getScene(), this._size, 4, i, this._noMipmap, r, null, this._onLoad, this._onError); } clone() { const e = new ZC(this.url, this.getScene() || this._getEngine(), this._size, this._noMipmap, this._generateHarmonics, this.gammaSpace); return e.level = this.level, e.wrapU = this.wrapU, e.wrapV = this.wrapV, e.coordinatesIndex = this.coordinatesIndex, e.coordinatesMode = this.coordinatesMode, e; } // Methods delayLoad() { this.delayLoadState === 4 && (this.delayLoadState = 1, this._texture = this._getFromCache(this.url, this._noMipmap), this._texture || this._loadTexture()); } /** * Get the texture reflection matrix used to rotate/transform the reflection. * @returns the reflection matrix */ getReflectionTextureMatrix() { return this._textureMatrix; } /** * Set the texture reflection matrix used to rotate/transform the reflection. * @param value Define the reflection matrix to set */ setReflectionTextureMatrix(e) { var t; this._textureMatrix = e, e.updateFlag !== this._textureMatrix.updateFlag && e.isIdentity() !== this._textureMatrix.isIdentity() && ((t = this.getScene()) === null || t === void 0 || t.markAllMaterialsAsDirty(1, (i) => i.getActiveTextures().indexOf(this) !== -1)); } /** * Dispose the texture and release its associated resources. */ dispose() { this.onLoadObservable.clear(), super.dispose(); } /** * Parses a JSON representation of an HDR Texture in order to create the texture * @param parsedTexture Define the JSON representation * @param scene Define the scene the texture should be created in * @param rootUrl Define the root url in case we need to load relative dependencies * @returns the newly created texture after parsing */ static Parse(e, t, i) { let r = null; return e.name && !e.isRenderTarget && (r = new ZC(i + e.name, t, e.size, e.noMipmap, e.generateHarmonics, e.useInGammaSpace), r.name = e.name, r.hasAlpha = e.hasAlpha, r.level = e.level, r.coordinatesMode = e.coordinatesMode, r.isBlocking = e.isBlocking), r && (e.boundingBoxPosition && (r.boundingBoxPosition = D.FromArray(e.boundingBoxPosition)), e.boundingBoxSize && (r.boundingBoxSize = D.FromArray(e.boundingBoxSize)), e.rotationY && (r.rotationY = e.rotationY)), r; } serialize() { if (!this.name) return null; const e = {}; return e.name = this.name, e.hasAlpha = this.hasAlpha, e.isCube = !0, e.level = this.level, e.size = this._size, e.coordinatesMode = this.coordinatesMode, e.useInGammaSpace = this.gammaSpace, e.generateHarmonics = this._generateHarmonics, e.customType = "BABYLON.HDRCubeTexture", e.noMipmap = this._noMipmap, e.isBlocking = this._isBlocking, e.rotationY = this._rotationY, e; } } ZC._FacesMapping = ["right", "left", "up", "down", "front", "back"]; Be("BABYLON.HDRCubeTexture", ZC); class h5 { /** * Gets or sets the influence of this target (ie. its weight in the overall morphing) */ get influence() { return this._influence; } set influence(e) { if (this._influence === e) return; const t = this._influence; this._influence = e, this.onInfluenceChanged.hasObservers() && this.onInfluenceChanged.notifyObservers(t === 0 || e === 0); } /** * Gets or sets the animation properties override */ get animationPropertiesOverride() { return !this._animationPropertiesOverride && this._scene ? this._scene.animationPropertiesOverride : this._animationPropertiesOverride; } set animationPropertiesOverride(e) { this._animationPropertiesOverride = e; } /** * Creates a new MorphTarget * @param name defines the name of the target * @param influence defines the influence to use * @param scene defines the scene the morphtarget belongs to */ constructor(e, t = 0, i = null) { this.name = e, this.animations = [], this._positions = null, this._normals = null, this._tangents = null, this._uvs = null, this._uniqueId = 0, this.onInfluenceChanged = new Fe(), this._onDataLayoutChanged = new Fe(), this._animationPropertiesOverride = null, this._scene = i || gi.LastCreatedScene, this.influence = t, this._scene && (this._uniqueId = this._scene.getUniqueId()); } /** * Gets the unique ID of this manager */ get uniqueId() { return this._uniqueId; } /** * Gets a boolean defining if the target contains position data */ get hasPositions() { return !!this._positions; } /** * Gets a boolean defining if the target contains normal data */ get hasNormals() { return !!this._normals; } /** * Gets a boolean defining if the target contains tangent data */ get hasTangents() { return !!this._tangents; } /** * Gets a boolean defining if the target contains texture coordinates data */ get hasUVs() { return !!this._uvs; } /** * Affects position data to this target * @param data defines the position data to use */ setPositions(e) { const t = this.hasPositions; this._positions = e, t !== this.hasPositions && this._onDataLayoutChanged.notifyObservers(void 0); } /** * Gets the position data stored in this target * @returns a FloatArray containing the position data (or null if not present) */ getPositions() { return this._positions; } /** * Affects normal data to this target * @param data defines the normal data to use */ setNormals(e) { const t = this.hasNormals; this._normals = e, t !== this.hasNormals && this._onDataLayoutChanged.notifyObservers(void 0); } /** * Gets the normal data stored in this target * @returns a FloatArray containing the normal data (or null if not present) */ getNormals() { return this._normals; } /** * Affects tangent data to this target * @param data defines the tangent data to use */ setTangents(e) { const t = this.hasTangents; this._tangents = e, t !== this.hasTangents && this._onDataLayoutChanged.notifyObservers(void 0); } /** * Gets the tangent data stored in this target * @returns a FloatArray containing the tangent data (or null if not present) */ getTangents() { return this._tangents; } /** * Affects texture coordinates data to this target * @param data defines the texture coordinates data to use */ setUVs(e) { const t = this.hasUVs; this._uvs = e, t !== this.hasUVs && this._onDataLayoutChanged.notifyObservers(void 0); } /** * Gets the texture coordinates data stored in this target * @returns a FloatArray containing the texture coordinates data (or null if not present) */ getUVs() { return this._uvs; } /** * Clone the current target * @returns a new MorphTarget */ clone() { const e = St.Clone(() => new h5(this.name, this.influence, this._scene), this); return e._positions = this._positions, e._normals = this._normals, e._tangents = this._tangents, e._uvs = this._uvs, e; } /** * Serializes the current target into a Serialization object * @returns the serialized object */ serialize() { const e = {}; return e.name = this.name, e.influence = this.influence, e.positions = Array.prototype.slice.call(this.getPositions()), this.id != null && (e.id = this.id), this.hasNormals && (e.normals = Array.prototype.slice.call(this.getNormals())), this.hasTangents && (e.tangents = Array.prototype.slice.call(this.getTangents())), this.hasUVs && (e.uvs = Array.prototype.slice.call(this.getUVs())), St.AppendSerializedAnimations(this, e), e; } /** * Returns the string "MorphTarget" * @returns "MorphTarget" */ getClassName() { return "MorphTarget"; } // Statics /** * Creates a new target from serialized data * @param serializationObject defines the serialized data to use * @param scene defines the hosting scene * @returns a new MorphTarget */ static Parse(e, t) { const i = new h5(e.name, e.influence); if (i.setPositions(e.positions), e.id != null && (i.id = e.id), e.normals && i.setNormals(e.normals), e.tangents && i.setTangents(e.tangents), e.uvs && i.setUVs(e.uvs), e.animations) { for (let r = 0; r < e.animations.length; r++) { const s = e.animations[r], n = Qo("BABYLON.Animation"); n && i.animations.push(n.Parse(s)); } e.autoAnimate && t && t.beginAnimation(i, e.autoAnimateFrom, e.autoAnimateTo, e.autoAnimateLoop, e.autoAnimateSpeed || 1); } return i; } /** * Creates a MorphTarget from mesh data * @param mesh defines the source mesh * @param name defines the name to use for the new target * @param influence defines the influence to attach to the target * @returns a new MorphTarget */ static FromMesh(e, t, i) { t || (t = e.name); const r = new h5(t, i, e.getScene()); return r.setPositions(e.getVerticesData(Y.PositionKind)), e.isVerticesDataPresent(Y.NormalKind) && r.setNormals(e.getVerticesData(Y.NormalKind)), e.isVerticesDataPresent(Y.TangentKind) && r.setTangents(e.getVerticesData(Y.TangentKind)), e.isVerticesDataPresent(Y.UVKind) && r.setUVs(e.getVerticesData(Y.UVKind)), r; } } F([ W() ], h5.prototype, "id", void 0); class DU extends De { /** * Gets the number of layers of the texture */ get depth() { return this._depth; } /** * Create a new RawTexture2DArray * @param data defines the data of the texture * @param width defines the width of the texture * @param height defines the height of the texture * @param depth defines the number of layers of the texture * @param format defines the texture format to use * @param scene defines the hosting scene * @param generateMipMaps defines a boolean indicating if mip levels should be generated (true by default) * @param invertY defines if texture must be stored with Y axis inverted * @param samplingMode defines the sampling mode to use (Texture.TRILINEAR_SAMPLINGMODE by default) * @param textureType defines the texture Type (Engine.TEXTURETYPE_UNSIGNED_INT, Engine.TEXTURETYPE_FLOAT...) * @param creationFlags specific flags to use when creating the texture (1 for storage textures, for eg) */ constructor(e, t, i, r, s, n, a = !0, l = !1, o = De.TRILINEAR_SAMPLINGMODE, u = 0, h) { super(null, n, !a, l), this.format = s, this._texture = n.getEngine().createRawTexture2DArray(e, t, i, r, s, a, l, o, null, u, h), this._depth = r, this.is2DArray = !0; } /** * Update the texture with new data * @param data defines the data to store in the texture */ update(e) { this._texture && this._getEngine().updateRawTexture2DArray(this._texture, e, this._texture.format, this._texture.invertY, null, this._texture.type); } /** * Creates a RGBA texture from some data. * @param data Define the texture data * @param width Define the width of the texture * @param height Define the height of the texture * @param depth defines the number of layers of the texture * @param scene defines the scene the texture will belong to * @param generateMipMaps Define whether or not to create mip maps for the texture * @param invertY define if the data should be flipped on Y when uploaded to the GPU * @param samplingMode define the texture sampling mode (Texture.xxx_SAMPLINGMODE) * @param type define the format of the data (int, float... Engine.TEXTURETYPE_xxx) * @returns the RGBA texture */ static CreateRGBATexture(e, t, i, r, s, n = !0, a = !1, l = 3, o = 0) { return new DU(e, t, i, r, 5, s, n, a, l, o); } } class O4 { /** * Sets a boolean indicating that adding new target or updating an existing target will not update the underlying data buffers */ set areUpdatesFrozen(e) { e ? this._blockCounter++ : (this._blockCounter--, this._blockCounter <= 0 && (this._blockCounter = 0, this._syncActiveTargets(!0))); } get areUpdatesFrozen() { return this._blockCounter > 0; } /** * Creates a new MorphTargetManager * @param scene defines the current scene */ constructor(e = null) { if (this._targets = new Array(), this._targetInfluenceChangedObservers = new Array(), this._targetDataLayoutChangedObservers = new Array(), this._activeTargets = new xc(16), this._supportsNormals = !1, this._supportsTangents = !1, this._supportsUVs = !1, this._vertexCount = 0, this._textureVertexStride = 0, this._textureWidth = 0, this._textureHeight = 1, this._uniqueId = 0, this._tempInfluences = new Array(), this._canUseTextureForTargets = !1, this._blockCounter = 0, this._parentContainer = null, this.optimizeInfluencers = !0, this.enableNormalMorphing = !0, this.enableTangentMorphing = !0, this.enableUVMorphing = !0, this._useTextureToStoreTargets = !0, e || (e = gi.LastCreatedScene), this._scene = e, this._scene) { this._scene.addMorphTargetManager(this), this._uniqueId = this._scene.getUniqueId(); const t = this._scene.getEngine().getCaps(); this._canUseTextureForTargets = t.canUseGLVertexID && t.textureFloat && t.maxVertexTextureImageUnits > 0 && t.texture2DArrayMaxLayerCount > 1; } } /** * Gets the unique ID of this manager */ get uniqueId() { return this._uniqueId; } /** * Gets the number of vertices handled by this manager */ get vertexCount() { return this._vertexCount; } /** * Gets a boolean indicating if this manager supports morphing of normals */ get supportsNormals() { return this._supportsNormals && this.enableNormalMorphing; } /** * Gets a boolean indicating if this manager supports morphing of tangents */ get supportsTangents() { return this._supportsTangents && this.enableTangentMorphing; } /** * Gets a boolean indicating if this manager supports morphing of texture coordinates */ get supportsUVs() { return this._supportsUVs && this.enableUVMorphing; } /** * Gets the number of targets stored in this manager */ get numTargets() { return this._targets.length; } /** * Gets the number of influencers (ie. the number of targets with influences > 0) */ get numInfluencers() { return this._activeTargets.length; } /** * Gets the list of influences (one per target) */ get influences() { return this._influences; } /** * Gets or sets a boolean indicating that targets should be stored as a texture instead of using vertex attributes (default is true). * Please note that this option is not available if the hardware does not support it */ get useTextureToStoreTargets() { return this._useTextureToStoreTargets; } set useTextureToStoreTargets(e) { this._useTextureToStoreTargets = e; } /** * Gets a boolean indicating that the targets are stored into a texture (instead of as attributes) */ get isUsingTextureForTargets() { var e; return O4.EnableTextureStorage && this.useTextureToStoreTargets && this._canUseTextureForTargets && !(!((e = this._scene) === null || e === void 0) && e.getEngine().getCaps().disableMorphTargetTexture); } /** * Gets the active target at specified index. An active target is a target with an influence > 0 * @param index defines the index to check * @returns the requested target */ getActiveTarget(e) { return this._activeTargets.data[e]; } /** * Gets the target at specified index * @param index defines the index to check * @returns the requested target */ getTarget(e) { return this._targets[e]; } /** * Add a new target to this manager * @param target defines the target to add */ addTarget(e) { this._targets.push(e), this._targetInfluenceChangedObservers.push(e.onInfluenceChanged.add((t) => { this._syncActiveTargets(t); })), this._targetDataLayoutChangedObservers.push(e._onDataLayoutChanged.add(() => { this._syncActiveTargets(!0); })), this._syncActiveTargets(!0); } /** * Removes a target from the manager * @param target defines the target to remove */ removeTarget(e) { const t = this._targets.indexOf(e); t >= 0 && (this._targets.splice(t, 1), e.onInfluenceChanged.remove(this._targetInfluenceChangedObservers.splice(t, 1)[0]), e._onDataLayoutChanged.remove(this._targetDataLayoutChangedObservers.splice(t, 1)[0]), this._syncActiveTargets(!0)), this._scene && this._scene.stopAnimation(e); } /** * @internal */ _bind(e) { e.setFloat3("morphTargetTextureInfo", this._textureVertexStride, this._textureWidth, this._textureHeight), e.setFloatArray("morphTargetTextureIndices", this._morphTargetTextureIndices), e.setTexture("morphTargets", this._targetStoreTexture); } /** * Clone the current manager * @returns a new MorphTargetManager */ clone() { const e = new O4(this._scene); for (const t of this._targets) e.addTarget(t.clone()); return e.enableNormalMorphing = this.enableNormalMorphing, e.enableTangentMorphing = this.enableTangentMorphing, e.enableUVMorphing = this.enableUVMorphing, e; } /** * Serializes the current manager into a Serialization object * @returns the serialized object */ serialize() { const e = {}; e.id = this.uniqueId, e.targets = []; for (const t of this._targets) e.targets.push(t.serialize()); return e; } _syncActiveTargets(e) { if (this.areUpdatesFrozen) return; let t = 0; this._activeTargets.reset(), this._supportsNormals = !0, this._supportsTangents = !0, this._supportsUVs = !0, this._vertexCount = 0, this._scene && this._targets.length > this._scene.getEngine().getCaps().texture2DArrayMaxLayerCount && (this.useTextureToStoreTargets = !1), (!this._morphTargetTextureIndices || this._morphTargetTextureIndices.length !== this._targets.length) && (this._morphTargetTextureIndices = new Float32Array(this._targets.length)); let i = -1; for (const r of this._targets) { if (i++, r.influence === 0 && this.optimizeInfluencers) continue; if (this._activeTargets.length >= O4.MaxActiveMorphTargetsInVertexAttributeMode && !this.isUsingTextureForTargets) break; this._activeTargets.push(r), this._morphTargetTextureIndices[t] = i, this._tempInfluences[t++] = r.influence, this._supportsNormals = this._supportsNormals && r.hasNormals, this._supportsTangents = this._supportsTangents && r.hasTangents, this._supportsUVs = this._supportsUVs && r.hasUVs; const s = r.getPositions(); if (s) { const n = s.length / 3; if (this._vertexCount === 0) this._vertexCount = n; else if (this._vertexCount !== n) { Ce.Error("Incompatible target. Targets must all have the same vertices count."); return; } } } this._morphTargetTextureIndices.length !== t && (this._morphTargetTextureIndices = this._morphTargetTextureIndices.slice(0, t)), (!this._influences || this._influences.length !== t) && (this._influences = new Float32Array(t)); for (let r = 0; r < t; r++) this._influences[r] = this._tempInfluences[r]; e && this.synchronize(); } /** * Synchronize the targets with all the meshes using this morph target manager */ synchronize() { if (!(!this._scene || this.areUpdatesFrozen)) { if (this.isUsingTextureForTargets && this._vertexCount) { this._textureVertexStride = 1, this._supportsNormals && this._textureVertexStride++, this._supportsTangents && this._textureVertexStride++, this._supportsUVs && this._textureVertexStride++, this._textureWidth = this._vertexCount * this._textureVertexStride, this._textureHeight = 1; const e = this._scene.getEngine().getCaps().maxTextureSize; this._textureWidth > e && (this._textureHeight = Math.ceil(this._textureWidth / e), this._textureWidth = e); let t = !0; if (this._targetStoreTexture) { const i = this._targetStoreTexture.getSize(); i.width === this._textureWidth && i.height === this._textureHeight && this._targetStoreTexture.depth === this._targets.length && (t = !1); } if (t) { this._targetStoreTexture && this._targetStoreTexture.dispose(); const i = this._targets.length, r = new Float32Array(i * this._textureWidth * this._textureHeight * 4); let s = 0; for (let n = 0; n < i; n++) { const a = this._targets[n], l = a.getPositions(), o = a.getNormals(), u = a.getUVs(), h = a.getTangents(); if (!l) { n === 0 && Ce.Error("Invalid morph target. Target must have positions."); return; } s = n * this._textureWidth * this._textureHeight * 4; for (let d = 0; d < this._vertexCount; d++) r[s] = l[d * 3], r[s + 1] = l[d * 3 + 1], r[s + 2] = l[d * 3 + 2], s += 4, this._supportsNormals && o && (r[s] = o[d * 3], r[s + 1] = o[d * 3 + 1], r[s + 2] = o[d * 3 + 2], s += 4), this._supportsUVs && u && (r[s] = u[d * 2], r[s + 1] = u[d * 2 + 1], s += 4), this._supportsTangents && h && (r[s] = h[d * 3], r[s + 1] = h[d * 3 + 1], r[s + 2] = h[d * 3 + 2], s += 4); } this._targetStoreTexture = DU.CreateRGBATexture(r, this._textureWidth, this._textureHeight, i, this._scene, !1, !1, 1, 1); } } for (const e of this._scene.meshes) e.morphTargetManager === this && e._syncGeometryWithMorphTargetManager(); } } /** * Release all resources */ dispose() { if (this._targetStoreTexture && this._targetStoreTexture.dispose(), this._targetStoreTexture = null, this._scene) { if (this._scene.removeMorphTargetManager(this), this._parentContainer) { const e = this._parentContainer.morphTargetManagers.indexOf(this); e > -1 && this._parentContainer.morphTargetManagers.splice(e, 1), this._parentContainer = null; } for (const e of this._targets) this._scene.stopAnimation(e); } } // Statics /** * Creates a new MorphTargetManager from serialized data * @param serializationObject defines the serialized data * @param scene defines the hosting scene * @returns the new MorphTargetManager */ static Parse(e, t) { const i = new O4(t); i._uniqueId = e.id; for (const r of e.targets) i.addTarget(h5.Parse(r, t)); return i; } } O4.EnableTextureStorage = !0; O4.MaxActiveMorphTargetsInVertexAttributeMode = 8; class _N { constructor() { this._hasHit = !1, this._hitDistance = 0, this._hitNormalWorld = D.Zero(), this._hitPointWorld = D.Zero(), this._rayFromWorld = D.Zero(), this._rayToWorld = D.Zero(), this._triangleIndex = -1; } /** * Gets if there was a hit */ get hasHit() { return this._hasHit; } /** * Gets the distance from the hit */ get hitDistance() { return this._hitDistance; } /** * Gets the hit normal/direction in the world */ get hitNormalWorld() { return this._hitNormalWorld; } /** * Gets the hit point in the world */ get hitPointWorld() { return this._hitPointWorld; } /** * Gets the ray "start point" of the ray in the world */ get rayFromWorld() { return this._rayFromWorld; } /** * Gets the ray "end point" of the ray in the world */ get rayToWorld() { return this._rayToWorld; } /* * The index of the original triangle which was hit. Will be -1 if contact point is not on a mesh shape */ get triangleIndex() { return this._triangleIndex; } /** * Sets the hit data (normal & point in world space) * @param hitNormalWorld defines the normal in world space * @param hitPointWorld defines the point in world space */ setHitData(e, t, i) { this._hasHit = !0, this._hitNormalWorld.set(e.x, e.y, e.z), this._hitPointWorld.set(t.x, t.y, t.z), this._triangleIndex = i ?? -1; } /** * Sets the distance from the start point to the hit point * @param distance */ setHitDistance(e) { this._hitDistance = e; } /** * Calculates the distance manually */ calculateHitDistance() { this._hitDistance = D.Distance(this._rayFromWorld, this._hitPointWorld); } /** * Resets all the values to default * @param from The from point on world space * @param to The to point on world space */ reset(e = D.Zero(), t = D.Zero()) { this._rayFromWorld.copyFrom(e), this._rayToWorld.copyFrom(t), this._hasHit = !1, this._hitDistance = 0, this._hitNormalWorld.setAll(0), this._hitPointWorld.setAll(0), this._triangleIndex = -1, this.body = void 0, this.bodyIndex = void 0; } } let _W = class vre { /** * * @returns version */ getPluginVersion() { return this._physicsPlugin.getPluginVersion(); } /** * Factory used to create the default physics plugin. * @returns The default physics plugin */ static DefaultPluginFactory() { throw yr("CannonJSPlugin"); } /** * Creates a new Physics Engine * @param gravity defines the gravity vector used by the simulation * @param _physicsPlugin defines the plugin to use (CannonJS by default) */ constructor(e, t = vre.DefaultPluginFactory()) { if (this._physicsPlugin = t, this._impostors = [], this._joints = [], this._subTimeStep = 0, this._uniqueIdCounter = 0, !this._physicsPlugin.isSupported()) throw new Error("Physics Engine " + this._physicsPlugin.name + " cannot be found. Please make sure it is included."); e = e || new D(0, -9.807, 0), this.setGravity(e), this.setTimeStep(); } /** * Sets the gravity vector used by the simulation * @param gravity defines the gravity vector to use */ setGravity(e) { this.gravity = e, this._physicsPlugin.setGravity(this.gravity); } /** * Set the time step of the physics engine. * Default is 1/60. * To slow it down, enter 1/600 for example. * To speed it up, 1/30 * @param newTimeStep defines the new timestep to apply to this world. */ setTimeStep(e = 1 / 60) { this._physicsPlugin.setTimeStep(e); } /** * Get the time step of the physics engine. * @returns the current time step */ getTimeStep() { return this._physicsPlugin.getTimeStep(); } /** * Set the sub time step of the physics engine. * Default is 0 meaning there is no sub steps * To increase physics resolution precision, set a small value (like 1 ms) * @param subTimeStep defines the new sub timestep used for physics resolution. */ setSubTimeStep(e = 0) { this._subTimeStep = e; } /** * Get the sub time step of the physics engine. * @returns the current sub time step */ getSubTimeStep() { return this._subTimeStep; } /** * Release all resources */ dispose() { this._impostors.forEach(function(e) { e.dispose(); }), this._physicsPlugin.dispose(); } /** * Gets the name of the current physics plugin * @returns the name of the plugin */ getPhysicsPluginName() { return this._physicsPlugin.name; } /** * Adding a new impostor for the impostor tracking. * This will be done by the impostor itself. * @param impostor the impostor to add */ addImpostor(e) { this._impostors.push(e), e.uniqueId = this._uniqueIdCounter++, e.parent || this._physicsPlugin.generatePhysicsBody(e); } /** * Remove an impostor from the engine. * This impostor and its mesh will not longer be updated by the physics engine. * @param impostor the impostor to remove */ removeImpostor(e) { const t = this._impostors.indexOf(e); t > -1 && this._impostors.splice(t, 1).length && this.getPhysicsPlugin().removePhysicsBody(e); } /** * Add a joint to the physics engine * @param mainImpostor defines the main impostor to which the joint is added. * @param connectedImpostor defines the impostor that is connected to the main impostor using this joint * @param joint defines the joint that will connect both impostors. */ addJoint(e, t, i) { const r = { mainImpostor: e, connectedImpostor: t, joint: i }; i.physicsPlugin = this._physicsPlugin, this._joints.push(r), this._physicsPlugin.generateJoint(r); } /** * Removes a joint from the simulation * @param mainImpostor defines the impostor used with the joint * @param connectedImpostor defines the other impostor connected to the main one by the joint * @param joint defines the joint to remove */ removeJoint(e, t, i) { const r = this._joints.filter(function(s) { return s.connectedImpostor === t && s.joint === i && s.mainImpostor === e; }); r.length && this._physicsPlugin.removeJoint(r[0]); } /** * Called by the scene. No need to call it. * @param delta defines the timespan between frames */ _step(e) { this._impostors.forEach((t) => { t.isBodyInitRequired() && this._physicsPlugin.generatePhysicsBody(t); }), e > 0.1 ? e = 0.1 : e <= 0 && (e = 1 / 60), this._physicsPlugin.executeStep(e, this._impostors); } /** * Gets the current plugin used to run the simulation * @returns current plugin */ getPhysicsPlugin() { return this._physicsPlugin; } /** * Gets the list of physic impostors * @returns an array of PhysicsImpostor */ getImpostors() { return this._impostors; } /** * Gets the impostor for a physics enabled object * @param object defines the object impersonated by the impostor * @returns the PhysicsImpostor or null if not found */ getImpostorForPhysicsObject(e) { for (let t = 0; t < this._impostors.length; ++t) if (this._impostors[t].object === e) return this._impostors[t]; return null; } /** * Gets the impostor for a physics body object * @param body defines physics body used by the impostor * @returns the PhysicsImpostor or null if not found */ getImpostorWithPhysicsBody(e) { for (let t = 0; t < this._impostors.length; ++t) if (this._impostors[t].physicsBody === e) return this._impostors[t]; return null; } /** * Does a raycast in the physics world * @param from when should the ray start? * @param to when should the ray end? * @returns PhysicsRaycastResult */ raycast(e, t) { return this._physicsPlugin.raycast(e, t); } /** * Does a raycast in the physics world * @param from when should the ray start? * @param to when should the ray end? * @param result resulting PhysicsRaycastResult */ raycastToRef(e, t, i) { return this._physicsPlugin.raycastToRef(e, t, i); } }; class gB { constructor(e = !0, t = 10, i = CANNON) { if (this._useDeltaForWorldStep = e, this.name = "CannonJSPlugin", this._physicsMaterials = new Array(), this._fixedTimeStep = 1 / 60, this._physicsBodiesToRemoveAfterStep = new Array(), this._firstFrame = !0, this._tmpQuaternion = new Ze(), this._minus90X = new Ze(-0.7071067811865475, 0, 0, 0.7071067811865475), this._plus90X = new Ze(0.7071067811865475, 0, 0, 0.7071067811865475), this._tmpPosition = D.Zero(), this._tmpDeltaPosition = D.Zero(), this._tmpUnityRotation = new Ze(), this.BJSCANNON = i, !this.isSupported()) { Ce.Error("CannonJS is not available. Please make sure you included the js file."); return; } this._extendNamespace(), this.world = new this.BJSCANNON.World(), this.world.broadphase = new this.BJSCANNON.NaiveBroadphase(), this.world.solver.iterations = t, this._cannonRaycastResult = new this.BJSCANNON.RaycastResult(), this._raycastResult = new _N(); } /** * * @returns plugin version */ getPluginVersion() { return 1; } setGravity(e) { const t = e; this.world.gravity.set(t.x, t.y, t.z); } setTimeStep(e) { this._fixedTimeStep = e; } getTimeStep() { return this._fixedTimeStep; } executeStep(e, t) { if (this._firstFrame) { this._firstFrame = !1; for (const i of t) i.type == tr.HeightmapImpostor || i.type === tr.PlaneImpostor || i.beforeStep(); } this.world.step(this._useDeltaForWorldStep ? e : this._fixedTimeStep), this._removeMarkedPhysicsBodiesFromWorld(); } _removeMarkedPhysicsBodiesFromWorld() { this._physicsBodiesToRemoveAfterStep.length > 0 && (this._physicsBodiesToRemoveAfterStep.forEach((e) => { typeof this.world.removeBody == "function" ? this.world.removeBody(e) : this.world.remove(e); }), this._physicsBodiesToRemoveAfterStep.length = 0); } applyImpulse(e, t, i) { const r = new this.BJSCANNON.Vec3(i.x, i.y, i.z), s = new this.BJSCANNON.Vec3(t.x, t.y, t.z); e.physicsBody.applyImpulse(s, r); } applyForce(e, t, i) { const r = new this.BJSCANNON.Vec3(i.x, i.y, i.z), s = new this.BJSCANNON.Vec3(t.x, t.y, t.z); e.physicsBody.applyForce(s, r); } generatePhysicsBody(e) { if (this._removeMarkedPhysicsBodiesFromWorld(), e.parent) { e.physicsBody && (this.removePhysicsBody(e), e.forceUpdate()); return; } if (e.isBodyInitRequired()) { const t = this._createShape(e); if (!t) { Ce.Warn("It was not possible to create a physics body for this object."); return; } const i = e.physicsBody; i && this.removePhysicsBody(e); const r = this._addMaterial("mat-" + e.uniqueId, e.getParam("friction"), e.getParam("restitution")), s = { mass: e.getParam("mass"), material: r }, n = e.getParam("nativeOptions"); for (const a in n) Object.prototype.hasOwnProperty.call(n, a) && (s[a] = n[a]); e.physicsBody = new this.BJSCANNON.Body(s), e.physicsBody.addEventListener("collide", e.onCollide), this.world.addEventListener("preStep", e.beforeStep), this.world.addEventListener("postStep", e.afterStep), e.physicsBody.addShape(t), typeof this.world.addBody == "function" ? this.world.addBody(e.physicsBody) : this.world.add(e.physicsBody), i && ["force", "torque", "velocity", "angularVelocity"].forEach(function(a) { const l = i[a]; e.physicsBody[a].set(l.x, l.y, l.z); }), this._processChildMeshes(e); } this._updatePhysicsBodyTransformation(e); } _processChildMeshes(e) { const t = e.object.getChildMeshes ? e.object.getChildMeshes(!0) : [], i = e.object.rotationQuaternion; if (i ? i.conjugateToRef(this._tmpQuaternion) : this._tmpQuaternion.set(0, 0, 0, 1), t.length) { const r = (s) => { if (!s.rotationQuaternion) return; const n = s.getPhysicsImpostor(); if (n && n.parent !== e && s.parent) { const l = s.getAbsolutePosition().subtract(s.parent.getAbsolutePosition()), o = s.rotationQuaternion.multiply(this._tmpQuaternion); n.physicsBody && (this.removePhysicsBody(n), n.physicsBody = null), n.parent = e, n.resetUpdateFlags(), e.physicsBody.addShape(this._createShape(n), new this.BJSCANNON.Vec3(l.x, l.y, l.z), new this.BJSCANNON.Quaternion(o.x, o.y, o.z, o.w)), e.physicsBody.mass += n.getParam("mass"); } s.getChildMeshes(!0).filter((a) => !!a.physicsImpostor).forEach(r); }; t.filter((s) => !!s.physicsImpostor).forEach(r); } } removePhysicsBody(e) { e.physicsBody.removeEventListener("collide", e.onCollide), this.world.removeEventListener("preStep", e.beforeStep), this.world.removeEventListener("postStep", e.afterStep), this._physicsBodiesToRemoveAfterStep.indexOf(e.physicsBody) === -1 && this._physicsBodiesToRemoveAfterStep.push(e.physicsBody); } generateJoint(e) { const t = e.mainImpostor.physicsBody, i = e.connectedImpostor.physicsBody; if (!t || !i) return; let r; const s = e.joint.jointData, n = { pivotA: s.mainPivot ? new this.BJSCANNON.Vec3().set(s.mainPivot.x, s.mainPivot.y, s.mainPivot.z) : null, pivotB: s.connectedPivot ? new this.BJSCANNON.Vec3().set(s.connectedPivot.x, s.connectedPivot.y, s.connectedPivot.z) : null, axisA: s.mainAxis ? new this.BJSCANNON.Vec3().set(s.mainAxis.x, s.mainAxis.y, s.mainAxis.z) : null, axisB: s.connectedAxis ? new this.BJSCANNON.Vec3().set(s.connectedAxis.x, s.connectedAxis.y, s.connectedAxis.z) : null, maxForce: s.nativeParams.maxForce, collideConnected: !!s.collision }; switch (e.joint.type) { case ta.HingeJoint: case ta.Hinge2Joint: r = new this.BJSCANNON.HingeConstraint(t, i, n); break; case ta.DistanceJoint: r = new this.BJSCANNON.DistanceConstraint(t, i, s.maxDistance || 2); break; case ta.SpringJoint: { const a = s; r = new this.BJSCANNON.Spring(t, i, { restLength: a.length, stiffness: a.stiffness, damping: a.damping, localAnchorA: n.pivotA, localAnchorB: n.pivotB }); break; } case ta.LockJoint: r = new this.BJSCANNON.LockConstraint(t, i, n); break; case ta.PointToPointJoint: case ta.BallAndSocketJoint: default: r = new this.BJSCANNON.PointToPointConstraint(t, n.pivotA, i, n.pivotB, n.maxForce); break; } r.collideConnected = !!s.collision, e.joint.physicsJoint = r, e.joint.type !== ta.SpringJoint ? this.world.addConstraint(r) : (e.joint.jointData.forceApplicationCallback = e.joint.jointData.forceApplicationCallback || function() { r.applyForce(); }, e.mainImpostor.registerAfterPhysicsStep(e.joint.jointData.forceApplicationCallback)); } removeJoint(e) { e.joint.type !== ta.SpringJoint ? this.world.removeConstraint(e.joint.physicsJoint) : e.mainImpostor.unregisterAfterPhysicsStep(e.joint.jointData.forceApplicationCallback); } _addMaterial(e, t, i) { let r, s; for (r = 0; r < this._physicsMaterials.length; r++) if (s = this._physicsMaterials[r], s.friction === t && s.restitution === i) return s; const n = new this.BJSCANNON.Material(e); return n.friction = t, n.restitution = i, this._physicsMaterials.push(n), n; } _checkWithEpsilon(e) { return e < Sr ? Sr : e; } _createShape(e) { const t = e.object; let i; const r = e.getObjectExtents(); switch (e.type) { case tr.SphereImpostor: { const s = r.x, n = r.y, a = r.z; i = new this.BJSCANNON.Sphere(Math.max(this._checkWithEpsilon(s), this._checkWithEpsilon(n), this._checkWithEpsilon(a)) / 2); break; } case tr.CylinderImpostor: { let s = e.getParam("nativeOptions"); s || (s = {}); const n = s.radiusTop !== void 0 ? s.radiusTop : this._checkWithEpsilon(r.x) / 2, a = s.radiusBottom !== void 0 ? s.radiusBottom : this._checkWithEpsilon(r.x) / 2, l = s.height !== void 0 ? s.height : this._checkWithEpsilon(r.y), o = s.numSegments !== void 0 ? s.numSegments : 16; i = new this.BJSCANNON.Cylinder(n, a, l, o); const u = new this.BJSCANNON.Quaternion(); u.setFromAxisAngle(new this.BJSCANNON.Vec3(1, 0, 0), -Math.PI / 2); const h = new this.BJSCANNON.Vec3(0, 0, 0); i.transformAllPoints(h, u); break; } case tr.BoxImpostor: { const s = r.scale(0.5); i = new this.BJSCANNON.Box(new this.BJSCANNON.Vec3(this._checkWithEpsilon(s.x), this._checkWithEpsilon(s.y), this._checkWithEpsilon(s.z))); break; } case tr.PlaneImpostor: Ce.Warn("Attention, PlaneImposter might not behave as you expect. Consider using BoxImposter instead"), i = new this.BJSCANNON.Plane(); break; case tr.MeshImpostor: { const s = t.getVerticesData ? t.getVerticesData(Y.PositionKind) : [], n = t.getIndices ? t.getIndices() : []; if (!s) { Ce.Warn("Tried to create a MeshImpostor for an object without vertices. This will fail."); return; } const a = t.position.clone(), l = t.rotation && t.rotation.clone(), o = t.rotationQuaternion && t.rotationQuaternion.clone(); t.position.copyFromFloats(0, 0, 0), t.rotation && t.rotation.copyFromFloats(0, 0, 0), t.rotationQuaternion && t.rotationQuaternion.copyFrom(e.getParentsRotation()), t.rotationQuaternion && t.parent && t.rotationQuaternion.conjugateInPlace(); const u = t.computeWorldMatrix(!0), h = []; let d; for (d = 0; d < s.length; d += 3) D.TransformCoordinates(D.FromArray(s, d), u).toArray(h, d); Ce.Warn("MeshImpostor only collides against spheres."), i = new this.BJSCANNON.Trimesh(h, n), t.position.copyFrom(a), l && t.rotation && t.rotation.copyFrom(l), o && t.rotationQuaternion && t.rotationQuaternion.copyFrom(o); break; } case tr.HeightmapImpostor: { const s = t.position.clone(), n = t.rotation && t.rotation.clone(), a = t.rotationQuaternion && t.rotationQuaternion.clone(); t.position.copyFromFloats(0, 0, 0), t.rotation && t.rotation.copyFromFloats(0, 0, 0), t.rotationQuaternion && t.rotationQuaternion.copyFrom(e.getParentsRotation()), t.rotationQuaternion && t.parent && t.rotationQuaternion.conjugateInPlace(), t.rotationQuaternion && t.rotationQuaternion.multiplyInPlace(this._minus90X), i = this._createHeightmap(t), t.position.copyFrom(s), n && t.rotation && t.rotation.copyFrom(n), a && t.rotationQuaternion && t.rotationQuaternion.copyFrom(a), t.computeWorldMatrix(!0); break; } case tr.ParticleImpostor: i = new this.BJSCANNON.Particle(); break; case tr.NoImpostor: i = new this.BJSCANNON.Box(new this.BJSCANNON.Vec3(0, 0, 0)); break; } return i; } _createHeightmap(e, t) { let i = e.getVerticesData(Y.PositionKind); const r = e.computeWorldMatrix(!0), s = []; let n; for (n = 0; n < i.length; n += 3) D.TransformCoordinates(D.FromArray(i, n), r).toArray(s, n); i = s; const a = new Array(), l = t || ~~(Math.sqrt(i.length / 3) - 1), o = e.getBoundingInfo(), u = Math.min(o.boundingBox.extendSizeWorld.x, o.boundingBox.extendSizeWorld.y), h = o.boundingBox.extendSizeWorld.z, d = u * 2 / l; for (let p = 0; p < i.length; p = p + 3) { const m = Math.round(i[p + 0] / d + l / 2), _ = Math.round((i[p + 1] / d - l / 2) * -1), v = -i[p + 2] + h; a[m] || (a[m] = []), a[m][_] || (a[m][_] = v), a[m][_] = Math.max(v, a[m][_]); } for (let p = 0; p <= l; ++p) { if (!a[p]) { let m = 1; for (; !a[(p + m) % l]; ) m++; a[p] = a[(p + m) % l].slice(); } for (let m = 0; m <= l; ++m) if (!a[p][m]) { let _ = 1, v; for (; v === void 0; ) v = a[p][(m + _++) % l]; a[p][m] = v; } } const f = new this.BJSCANNON.Heightfield(a, { elementSize: d }); return f.minY = h, f; } _updatePhysicsBodyTransformation(e) { const t = e.object; if (t.computeWorldMatrix && t.computeWorldMatrix(!0), !t.getBoundingInfo()) return; const i = e.getObjectCenter(); this._tmpDeltaPosition.copyFrom(t.getAbsolutePivotPoint().subtract(i)), this._tmpDeltaPosition.divideInPlace(e.object.scaling), this._tmpPosition.copyFrom(i); let r = t.rotationQuaternion; if (r) { if ((e.type === tr.PlaneImpostor || e.type === tr.HeightmapImpostor) && (r = r.multiply(this._minus90X), e.setDeltaRotation(this._plus90X)), e.type === tr.HeightmapImpostor) { const s = t; let n = s.getBoundingInfo(); const a = s.rotationQuaternion; s.rotationQuaternion = this._tmpUnityRotation, s.computeWorldMatrix(!0); const l = i.clone(); let o = s.getPivotMatrix(); o ? o = o.clone() : o = Ae.Identity(); const u = Ae.Translation(n.boundingBox.extendSizeWorld.x, 0, -n.boundingBox.extendSizeWorld.z); s.setPreTransformMatrix(u), s.computeWorldMatrix(!0), n = s.getBoundingInfo(); const h = n.boundingBox.centerWorld.subtract(i).subtract(s.position).negate(); this._tmpPosition.copyFromFloats(h.x, h.y - n.boundingBox.extendSizeWorld.y, h.z), this._tmpDeltaPosition.copyFrom(n.boundingBox.centerWorld.subtract(l)), this._tmpDeltaPosition.y += n.boundingBox.extendSizeWorld.y, s.rotationQuaternion = a, s.setPreTransformMatrix(o), s.computeWorldMatrix(!0); } else e.type === tr.MeshImpostor && this._tmpDeltaPosition.copyFromFloats(0, 0, 0); e.setDeltaPosition(this._tmpDeltaPosition), e.physicsBody.position.set(this._tmpPosition.x, this._tmpPosition.y, this._tmpPosition.z), e.physicsBody.quaternion.set(r.x, r.y, r.z, r.w); } } setTransformationFromPhysicsBody(e) { if (e.object.position.set(e.physicsBody.position.x, e.physicsBody.position.y, e.physicsBody.position.z), e.object.rotationQuaternion) { const t = e.physicsBody.quaternion; e.object.rotationQuaternion.set(t.x, t.y, t.z, t.w); } } setPhysicsBodyTransformation(e, t, i) { e.physicsBody.position.set(t.x, t.y, t.z), e.physicsBody.quaternion.set(i.x, i.y, i.z, i.w); } isSupported() { return this.BJSCANNON !== void 0; } setLinearVelocity(e, t) { e.physicsBody.velocity.set(t.x, t.y, t.z); } setAngularVelocity(e, t) { e.physicsBody.angularVelocity.set(t.x, t.y, t.z); } getLinearVelocity(e) { const t = e.physicsBody.velocity; return t ? new D(t.x, t.y, t.z) : null; } getAngularVelocity(e) { const t = e.physicsBody.angularVelocity; return t ? new D(t.x, t.y, t.z) : null; } setBodyMass(e, t) { e.physicsBody.mass = t, e.physicsBody.updateMassProperties(); } getBodyMass(e) { return e.physicsBody.mass; } getBodyFriction(e) { return e.physicsBody.material.friction; } setBodyFriction(e, t) { e.physicsBody.material.friction = t; } getBodyRestitution(e) { return e.physicsBody.material.restitution; } setBodyRestitution(e, t) { e.physicsBody.material.restitution = t; } sleepBody(e) { e.physicsBody.sleep(); } wakeUpBody(e) { e.physicsBody.wakeUp(); } updateDistanceJoint(e, t) { e.physicsJoint.distance = t; } setMotor(e, t, i, r) { r || (e.physicsJoint.enableMotor(), e.physicsJoint.setMotorSpeed(t), i && this.setLimit(e, i)); } setLimit(e, t, i) { e.physicsJoint.motorEquation.maxForce = i, e.physicsJoint.motorEquation.minForce = t === void 0 ? -t : t; } syncMeshWithImpostor(e, t) { const i = t.physicsBody; e.position.x = i.position.x, e.position.y = i.position.y, e.position.z = i.position.z, e.rotationQuaternion && (e.rotationQuaternion.x = i.quaternion.x, e.rotationQuaternion.y = i.quaternion.y, e.rotationQuaternion.z = i.quaternion.z, e.rotationQuaternion.w = i.quaternion.w); } getRadius(e) { return e.physicsBody.shapes[0].boundingSphereRadius; } getBoxSizeToRef(e, t) { const i = e.physicsBody.shapes[0]; t.x = i.halfExtents.x * 2, t.y = i.halfExtents.y * 2, t.z = i.halfExtents.z * 2; } dispose() { } _extendNamespace() { const e = new this.BJSCANNON.Vec3(), t = this.BJSCANNON; this.BJSCANNON.World.prototype.step = function(i, r, s) { if (s = s || 10, r = r || 0, r === 0) this.internalStep(i), this.time += i; else { let n = Math.floor((this.time + r) / i) - Math.floor(this.time / i); n = Math.min(n, s) || 1; const a = performance.now(); for (let d = 0; d !== n && (this.internalStep(i), !(performance.now() - a > i * 1e3)); d++) ; this.time += r; const o = this.time % i / i, u = e, h = this.bodies; for (let d = 0; d !== h.length; d++) { const f = h[d]; f.type !== t.Body.STATIC && f.sleepState !== t.Body.SLEEPING ? (f.position.vsub(f.previousPosition, u), u.scale(o, u), f.position.vadd(u, f.interpolatedPosition)) : (f.interpolatedPosition.set(f.position.x, f.position.y, f.position.z), f.interpolatedQuaternion.set(f.quaternion.x, f.quaternion.y, f.quaternion.z, f.quaternion.w)); } } }; } /** * Does a raycast in the physics world * @param from when should the ray start? * @param to when should the ray end? * @returns PhysicsRaycastResult */ raycast(e, t) { return this._raycastResult.reset(e, t), this.raycastToRef(e, t, this._raycastResult), this._raycastResult; } /** * Does a raycast in the physics world * @param from when should the ray start? * @param to when should the ray end? * @param result resulting PhysicsRaycastResult */ raycastToRef(e, t, i) { this._cannonRaycastResult.reset(), this.world.raycastClosest(e, t, {}, this._cannonRaycastResult), i.reset(e, t), this._cannonRaycastResult.hasHit && (i.setHitData({ x: this._cannonRaycastResult.hitNormalWorld.x, y: this._cannonRaycastResult.hitNormalWorld.y, z: this._cannonRaycastResult.hitNormalWorld.z }, { x: this._cannonRaycastResult.hitPointWorld.x, y: this._cannonRaycastResult.hitPointWorld.y, z: this._cannonRaycastResult.hitPointWorld.z }), i.setHitDistance(this._cannonRaycastResult.distance)); } } _W.DefaultPluginFactory = () => new gB(); class BH { constructor(e = !0, t, i = OIMO) { this._useDeltaForWorldStep = e, this.name = "OimoJSPlugin", this._fixedTimeStep = 1 / 60, this._tmpImpostorsArray = [], this._tmpPositionVector = D.Zero(), this.BJSOIMO = i, this.world = new this.BJSOIMO.World({ iterations: t }), this.world.clear(), this._raycastResult = new _N(); } /** * * @returns plugin version */ getPluginVersion() { return 1; } setGravity(e) { this.world.gravity.set(e.x, e.y, e.z); } setTimeStep(e) { this.world.timeStep = e; } getTimeStep() { return this.world.timeStep; } executeStep(e, t) { t.forEach(function(r) { r.beforeStep(); }), this.world.timeStep = this._useDeltaForWorldStep ? e : this._fixedTimeStep, this.world.step(), t.forEach((r) => { r.afterStep(), this._tmpImpostorsArray[r.uniqueId] = r; }); let i = this.world.contacts; for (; i !== null; ) { if (i.touching && !i.body1.sleeping && !i.body2.sleeping) { i = i.next; continue; } const r = this._tmpImpostorsArray[+i.body1.name], s = this._tmpImpostorsArray[+i.body2.name]; if (!r || !s) { i = i.next; continue; } r.onCollide({ body: s.physicsBody, point: null, distance: 0, impulse: 0, normal: null }), s.onCollide({ body: r.physicsBody, point: null, distance: 0, impulse: 0, normal: null }), i = i.next; } } applyImpulse(e, t, i) { const r = e.physicsBody.mass; e.physicsBody.applyImpulse(i.scale(this.world.invScale), t.scale(this.world.invScale * r)); } applyForce(e, t, i) { Ce.Warn("Oimo doesn't support applying force. Using impulse instead."), this.applyImpulse(e, t, i); } generatePhysicsBody(e) { if (e.parent) { e.physicsBody && (this.removePhysicsBody(e), e.forceUpdate()); return; } if (e.isBodyInitRequired()) { const t = { name: e.uniqueId, //Oimo must have mass, also for static objects. config: [e.getParam("mass") || 1e-3, e.getParam("friction"), e.getParam("restitution")], size: [], type: [], pos: [], posShape: [], rot: [], rotShape: [], move: e.getParam("mass") !== 0, density: e.getParam("mass"), friction: e.getParam("friction"), restitution: e.getParam("restitution"), //Supporting older versions of Oimo world: this.world }, i = [e]; ((a) => { a.getChildMeshes && a.getChildMeshes().forEach(function(l) { l.physicsImpostor && i.push(l.physicsImpostor); }); })(e.object); const s = (a) => Math.max(a, Sr), n = new Ze(); i.forEach((a) => { if (!a.object.rotationQuaternion) return; const l = a.object.rotationQuaternion; n.copyFrom(l), a.object.rotationQuaternion.set(0, 0, 0, 1), a.object.computeWorldMatrix(!0); const o = n.toEulerAngles(), u = a.getObjectExtents(), h = 57.29577951308232; if (a === e) { const d = e.getObjectCenter(); e.object.getAbsolutePivotPoint().subtractToRef(d, this._tmpPositionVector), this._tmpPositionVector.divideInPlace(e.object.scaling), t.pos.push(d.x), t.pos.push(d.y), t.pos.push(d.z), t.posShape.push(0, 0, 0), t.rotShape.push(0, 0, 0); } else { const d = a.object.position.clone(); t.posShape.push(d.x), t.posShape.push(d.y), t.posShape.push(d.z), t.rotShape.push(o.x * h, o.y * h, o.z * h); } switch (a.object.rotationQuaternion.copyFrom(n), a.type) { case tr.ParticleImpostor: Ce.Warn("No Particle support in OIMO.js. using SphereImpostor instead"); case tr.SphereImpostor: { const d = u.x, f = u.y, p = u.z, m = Math.max(s(d), s(f), s(p)) / 2; t.type.push("sphere"), t.size.push(m), t.size.push(m), t.size.push(m); break; } case tr.CylinderImpostor: { const d = s(u.x) / 2, f = s(u.y); t.type.push("cylinder"), t.size.push(d), t.size.push(f), t.size.push(f); break; } case tr.PlaneImpostor: case tr.BoxImpostor: default: { const d = s(u.x), f = s(u.y), p = s(u.z); t.type.push("box"), t.size.push(d), t.size.push(f), t.size.push(p); break; } } a.object.rotationQuaternion = l; }), e.physicsBody = this.world.add(t), e.physicsBody.resetQuaternion(n), e.physicsBody.updatePosition(0); } else this._tmpPositionVector.copyFromFloats(0, 0, 0); e.setDeltaPosition(this._tmpPositionVector); } removePhysicsBody(e) { this.world.removeRigidBody(e.physicsBody); } generateJoint(e) { const t = e.mainImpostor.physicsBody, i = e.connectedImpostor.physicsBody; if (!t || !i) return; const r = e.joint.jointData, s = r.nativeParams || {}; let n; const a = { body1: t, body2: i, axe1: s.axe1 || (r.mainAxis ? r.mainAxis.asArray() : null), axe2: s.axe2 || (r.connectedAxis ? r.connectedAxis.asArray() : null), pos1: s.pos1 || (r.mainPivot ? r.mainPivot.asArray() : null), pos2: s.pos2 || (r.connectedPivot ? r.connectedPivot.asArray() : null), min: s.min, max: s.max, collision: s.collision || r.collision, spring: s.spring, //supporting older version of Oimo world: this.world }; switch (e.joint.type) { case ta.BallAndSocketJoint: n = "jointBall"; break; case ta.SpringJoint: { Ce.Warn("OIMO.js doesn't support Spring Constraint. Simulating using DistanceJoint instead"); const l = r; a.min = l.length || a.min, a.max = Math.max(a.min, a.max); } case ta.DistanceJoint: n = "jointDistance", a.max = r.maxDistance; break; case ta.PrismaticJoint: n = "jointPrisme"; break; case ta.SliderJoint: n = "jointSlide"; break; case ta.WheelJoint: n = "jointWheel"; break; case ta.HingeJoint: default: n = "jointHinge"; break; } a.type = n, e.joint.physicsJoint = this.world.add(a); } removeJoint(e) { try { this.world.removeJoint(e.joint.physicsJoint); } catch (t) { Ce.Warn(t); } } isSupported() { return this.BJSOIMO !== void 0; } setTransformationFromPhysicsBody(e) { if (!e.physicsBody.sleeping) { if (e.physicsBody.shapes.next) { let t = e.physicsBody.shapes; for (; t.next; ) t = t.next; e.object.position.set(t.position.x, t.position.y, t.position.z); } else { const t = e.physicsBody.getPosition(); e.object.position.set(t.x, t.y, t.z); } if (e.object.rotationQuaternion) { const t = e.physicsBody.getQuaternion(); e.object.rotationQuaternion.set(t.x, t.y, t.z, t.w); } } } setPhysicsBodyTransformation(e, t, i) { const r = e.physicsBody; e.physicsBody.shapes.next || (r.position.set(t.x, t.y, t.z), r.orientation.set(i.x, i.y, i.z, i.w), r.syncShapes(), r.awake()); } /*private _getLastShape(body: any): any { var lastShape = body.shapes; while (lastShape.next) { lastShape = lastShape.next; } return lastShape; }*/ setLinearVelocity(e, t) { e.physicsBody.linearVelocity.set(t.x, t.y, t.z); } setAngularVelocity(e, t) { e.physicsBody.angularVelocity.set(t.x, t.y, t.z); } getLinearVelocity(e) { const t = e.physicsBody.linearVelocity; return t ? new D(t.x, t.y, t.z) : null; } getAngularVelocity(e) { const t = e.physicsBody.angularVelocity; return t ? new D(t.x, t.y, t.z) : null; } setBodyMass(e, t) { const i = t === 0; e.physicsBody.shapes.density = i ? 1 : t, e.physicsBody.setupMass(i ? 2 : 1); } getBodyMass(e) { return e.physicsBody.shapes.density; } getBodyFriction(e) { return e.physicsBody.shapes.friction; } setBodyFriction(e, t) { e.physicsBody.shapes.friction = t; } getBodyRestitution(e) { return e.physicsBody.shapes.restitution; } setBodyRestitution(e, t) { e.physicsBody.shapes.restitution = t; } sleepBody(e) { e.physicsBody.sleep(); } wakeUpBody(e) { e.physicsBody.awake(); } updateDistanceJoint(e, t, i) { e.physicsJoint.limitMotor.upperLimit = t, i !== void 0 && (e.physicsJoint.limitMotor.lowerLimit = i); } setMotor(e, t, i, r) { i !== void 0 ? Ce.Warn("OimoJS plugin currently has unexpected behavior when using setMotor with force parameter") : i = 1e6, t *= -1; const s = r ? e.physicsJoint.rotationalLimitMotor2 : e.physicsJoint.rotationalLimitMotor1 || e.physicsJoint.rotationalLimitMotor || e.physicsJoint.limitMotor; s && s.setMotor(t, i); } setLimit(e, t, i, r) { const s = r ? e.physicsJoint.rotationalLimitMotor2 : e.physicsJoint.rotationalLimitMotor1 || e.physicsJoint.rotationalLimitMotor || e.physicsJoint.limitMotor; s && s.setLimit(t, i === void 0 ? -t : i); } syncMeshWithImpostor(e, t) { const i = t.physicsBody; e.position.x = i.position.x, e.position.y = i.position.y, e.position.z = i.position.z, e.rotationQuaternion && (e.rotationQuaternion.x = i.orientation.x, e.rotationQuaternion.y = i.orientation.y, e.rotationQuaternion.z = i.orientation.z, e.rotationQuaternion.w = i.orientation.w); } getRadius(e) { return e.physicsBody.shapes.radius; } getBoxSizeToRef(e, t) { const i = e.physicsBody.shapes; t.x = i.halfWidth * 2, t.y = i.halfHeight * 2, t.z = i.halfDepth * 2; } dispose() { this.world.clear(); } /** * Does a raycast in the physics world * @param from when should the ray start? * @param to when should the ray end? * @returns PhysicsRaycastResult */ raycast(e, t) { return Ce.Warn("raycast is not currently supported by the Oimo physics plugin"), this._raycastResult.reset(e, t), this._raycastResult; } /** * Does a raycast in the physics world * @param from when should the ray start? * @param to when should the ray end? * @param result resulting PhysicsRaycastResult */ raycastToRef(e, t, i) { Ce.Warn("raycast is not currently supported by the Oimo physics plugin"), i.reset(e, t); } } class ZA { /** * Initializes the ammoJS plugin * @param _useDeltaForWorldStep if the time between frames should be used when calculating physics steps (Default: true) * @param ammoInjection can be used to inject your own ammo reference * @param overlappingPairCache can be used to specify your own overlapping pair cache */ constructor(e = !0, t = Ammo, i = null) { if (this._useDeltaForWorldStep = e, this.bjsAMMO = {}, this.name = "AmmoJSPlugin", this._timeStep = 1 / 60, this._fixedTimeStep = 1 / 60, this._maxSteps = 5, this._tmpQuaternion = new Ze(), this._tmpContactCallbackResult = !1, this._tmpContactPoint = new D(), this._tmpContactNormal = new D(), this._tmpVec3 = new D(), this._tmpMatrix = new Ae(), typeof t == "function") { Ce.Error("AmmoJS is not ready. Please make sure you await Ammo() before using the plugin."); return; } else this.bjsAMMO = t; if (!this.isSupported()) { Ce.Error("AmmoJS is not available. Please make sure you included the js file."); return; } this._collisionConfiguration = new this.bjsAMMO.btSoftBodyRigidBodyCollisionConfiguration(), this._dispatcher = new this.bjsAMMO.btCollisionDispatcher(this._collisionConfiguration), this._overlappingPairCache = i || new this.bjsAMMO.btDbvtBroadphase(), this._solver = new this.bjsAMMO.btSequentialImpulseConstraintSolver(), this._softBodySolver = new this.bjsAMMO.btDefaultSoftBodySolver(), this.world = new this.bjsAMMO.btSoftRigidDynamicsWorld(this._dispatcher, this._overlappingPairCache, this._solver, this._collisionConfiguration, this._softBodySolver), this._tmpAmmoConcreteContactResultCallback = new this.bjsAMMO.ConcreteContactResultCallback(), this._tmpAmmoConcreteContactResultCallback.addSingleResult = (r) => { r = this.bjsAMMO.wrapPointer(r, this.bjsAMMO.btManifoldPoint); const s = r.getPositionWorldOnA(), n = r.m_normalWorldOnB; this._tmpContactPoint.x = s.x(), this._tmpContactPoint.y = s.y(), this._tmpContactPoint.z = s.z(), this._tmpContactNormal.x = n.x(), this._tmpContactNormal.y = n.y(), this._tmpContactNormal.z = n.z(), this._tmpContactImpulse = r.getAppliedImpulse(), this._tmpContactDistance = r.getDistance(), this._tmpContactCallbackResult = !0; }, this._raycastResult = new _N(), this._tmpAmmoTransform = new this.bjsAMMO.btTransform(), this._tmpAmmoTransform.setIdentity(), this._tmpAmmoQuaternion = new this.bjsAMMO.btQuaternion(0, 0, 0, 1), this._tmpAmmoVectorA = new this.bjsAMMO.btVector3(0, 0, 0), this._tmpAmmoVectorB = new this.bjsAMMO.btVector3(0, 0, 0), this._tmpAmmoVectorC = new this.bjsAMMO.btVector3(0, 0, 0), this._tmpAmmoVectorD = new this.bjsAMMO.btVector3(0, 0, 0); } /** * * @returns plugin version */ getPluginVersion() { return 1; } /** * Sets the gravity of the physics world (m/(s^2)) * @param gravity Gravity to set */ setGravity(e) { this._tmpAmmoVectorA.setValue(e.x, e.y, e.z), this.world.setGravity(this._tmpAmmoVectorA), this.world.getWorldInfo().set_m_gravity(this._tmpAmmoVectorA); } /** * Amount of time to step forward on each frame (only used if useDeltaForWorldStep is false in the constructor) * @param timeStep timestep to use in seconds */ setTimeStep(e) { this._timeStep = e; } /** * Increment to step forward in the physics engine (If timeStep is set to 1/60 and fixedTimeStep is set to 1/120 the physics engine should run 2 steps per frame) (Default: 1/60) * @param fixedTimeStep fixedTimeStep to use in seconds */ setFixedTimeStep(e) { this._fixedTimeStep = e; } /** * Sets the maximum number of steps by the physics engine per frame (Default: 5) * @param maxSteps the maximum number of steps by the physics engine per frame */ setMaxSteps(e) { this._maxSteps = e; } /** * Gets the current timestep (only used if useDeltaForWorldStep is false in the constructor) * @returns the current timestep in seconds */ getTimeStep() { return this._timeStep; } // Ammo's contactTest and contactPairTest take a callback that runs synchronously, wrap them so that they are easier to consume _isImpostorInContact(e) { return this._tmpContactCallbackResult = !1, this.world.contactTest(e.physicsBody, this._tmpAmmoConcreteContactResultCallback), this._tmpContactCallbackResult; } // Ammo's collision events have some weird quirks // contactPairTest fires too many events as it fires events even when objects are close together but contactTest does not // so only fire event if both contactTest and contactPairTest have a hit _isImpostorPairInContact(e, t) { return this._tmpContactCallbackResult = !1, this.world.contactPairTest(e.physicsBody, t.physicsBody, this._tmpAmmoConcreteContactResultCallback), this._tmpContactCallbackResult; } // Ammo's behavior when maxSteps > 0 does not behave as described in docs // @see http://www.bulletphysics.org/mediawiki-1.5.8/index.php/Stepping_The_World // // When maxSteps is 0 do the entire simulation in one step // When maxSteps is > 0, run up to maxStep times, if on the last step the (remaining step - fixedTimeStep) is < fixedTimeStep, the remainder will be used for the step. (eg. if remainder is 1.001 and fixedTimeStep is 1 the last step will be 1.001, if instead it did 2 steps (1, 0.001) issues occuered when having a tiny step in ammo) // Note: To get deterministic physics, timeStep would always need to be divisible by fixedTimeStep _stepSimulation(e = 1 / 60, t = 10, i = 1 / 60) { if (t == 0) this.world.stepSimulation(e, 0); else for (; t > 0 && e > 0; ) e - i < i ? (this.world.stepSimulation(e, 0), e = 0) : (e -= i, this.world.stepSimulation(i, 0)), t--; } /** * Moves the physics simulation forward delta seconds and updates the given physics imposters * Prior to the step the imposters physics location is set to the position of the babylon meshes * After the step the babylon meshes are set to the position of the physics imposters * @param delta amount of time to step forward * @param impostors array of imposters to update before/after the step */ executeStep(e, t) { for (const i of t) i.soft || i.beforeStep(); this._stepSimulation(this._useDeltaForWorldStep ? e : this._timeStep, this._maxSteps, this._fixedTimeStep); for (const i of t) if (i.soft ? this._afterSoftStep(i) : i.afterStep(), i._onPhysicsCollideCallbacks.length > 0 && this._isImpostorInContact(i)) for (const r of i._onPhysicsCollideCallbacks) for (const s of r.otherImpostors) (i.physicsBody.isActive() || s.physicsBody.isActive()) && this._isImpostorPairInContact(i, s) && (i.onCollide({ body: s.physicsBody, point: this._tmpContactPoint, distance: this._tmpContactDistance, impulse: this._tmpContactImpulse, normal: this._tmpContactNormal }), s.onCollide({ body: i.physicsBody, point: this._tmpContactPoint, distance: this._tmpContactDistance, impulse: this._tmpContactImpulse, normal: this._tmpContactNormal })); } /** * Update babylon mesh to match physics world object * @param impostor imposter to match */ _afterSoftStep(e) { e.type === tr.RopeImpostor ? this._ropeStep(e) : this._softbodyOrClothStep(e); } /** * Update babylon mesh vertices vertices to match physics world softbody or cloth * @param impostor imposter to match */ _ropeStep(e) { const t = e.physicsBody.get_m_nodes(), i = t.size(); let r, s, n, a, l; const o = new Array(); for (let d = 0; d < i; d++) r = t.at(d), s = r.get_m_x(), n = s.x(), a = s.y(), l = s.z(), o.push(new D(n, a, l)); const u = e.object, h = e.getParam("shape"); e._isFromLine ? e.object = Ba("lines", { points: o, instance: u }) : e.object = oN("ext", { shape: h, path: o, instance: u }); } /** * Update babylon mesh vertices vertices to match physics world softbody or cloth * @param impostor imposter to match */ _softbodyOrClothStep(e) { const t = e.type === tr.ClothImpostor ? 1 : -1, i = e.object; let r = i.getVerticesData(Y.PositionKind); r || (r = []); let s = i.getVerticesData(Y.NormalKind); s || (s = []); const n = r.length / 3, a = e.physicsBody.get_m_nodes(); let l, o, u, h, d, f, p, m; for (let v = 0; v < n; v++) { l = a.at(v), o = l.get_m_x(), u = o.x(), h = o.y(), d = o.z() * t; const C = l.get_m_n(); f = C.x(), p = C.y(), m = C.z() * t, r[3 * v] = u, r[3 * v + 1] = h, r[3 * v + 2] = d, s[3 * v] = f, s[3 * v + 1] = p, s[3 * v + 2] = m; } const _ = new Ot(); _.positions = r, _.normals = s, _.uvs = i.getVerticesData(Y.UVKind), _.colors = i.getVerticesData(Y.ColorKind), i && i.getIndices && (_.indices = i.getIndices()), _.applyToMesh(i); } /** * Applies an impulse on the imposter * @param impostor imposter to apply impulse to * @param force amount of force to be applied to the imposter * @param contactPoint the location to apply the impulse on the imposter */ applyImpulse(e, t, i) { if (e.soft) Ce.Warn("Cannot be applied to a soft body"); else { e.physicsBody.activate(); const r = this._tmpAmmoVectorA, s = this._tmpAmmoVectorB; e.object && e.object.getWorldMatrix && i.subtractInPlace(e.object.getWorldMatrix().getTranslation()), r.setValue(i.x, i.y, i.z), s.setValue(t.x, t.y, t.z), e.physicsBody.applyImpulse(s, r); } } /** * Applies a force on the imposter * @param impostor imposter to apply force * @param force amount of force to be applied to the imposter * @param contactPoint the location to apply the force on the imposter */ applyForce(e, t, i) { if (e.soft) Ce.Warn("Cannot be applied to a soft body"); else { e.physicsBody.activate(); const r = this._tmpAmmoVectorA, s = this._tmpAmmoVectorB; if (e.object && e.object.getWorldMatrix) { const n = e.object.getWorldMatrix().getTranslation(); r.setValue(i.x - n.x, i.y - n.y, i.z - n.z); } else r.setValue(i.x, i.y, i.z); s.setValue(t.x, t.y, t.z), e.physicsBody.applyForce(s, r); } } /** * Creates a physics body using the plugin * @param impostor the imposter to create the physics body on */ generatePhysicsBody(e) { if (e._pluginData.toDispose = [], e.parent) { e.physicsBody && (this.removePhysicsBody(e), e.forceUpdate()); return; } if (e.isBodyInitRequired()) { const t = this._createShape(e), i = e.getParam("mass"); if (e._pluginData.mass = i, e.soft) t.get_m_cfg().set_collisions(17), t.get_m_cfg().set_kDP(e.getParam("damping")), this.bjsAMMO.castObject(t, this.bjsAMMO.btCollisionObject).getCollisionShape().setMargin(e.getParam("margin")), t.setActivationState(ZA._DISABLE_DEACTIVATION_FLAG), this.world.addSoftBody(t, 1, -1), e.physicsBody = t, e._pluginData.toDispose.push(t), this.setBodyPressure(e, 0), e.type === tr.SoftbodyImpostor && this.setBodyPressure(e, e.getParam("pressure")), this.setBodyStiffness(e, e.getParam("stiffness")), this.setBodyVelocityIterations(e, e.getParam("velocityIterations")), this.setBodyPositionIterations(e, e.getParam("positionIterations")); else { const r = new this.bjsAMMO.btVector3(0, 0, 0), s = new this.bjsAMMO.btTransform(); e.object.computeWorldMatrix(!0), s.setIdentity(), i !== 0 && t.calculateLocalInertia(i, r), this._tmpAmmoVectorA.setValue(e.object.position.x, e.object.position.y, e.object.position.z), this._tmpAmmoQuaternion.setValue(e.object.rotationQuaternion.x, e.object.rotationQuaternion.y, e.object.rotationQuaternion.z, e.object.rotationQuaternion.w), s.setOrigin(this._tmpAmmoVectorA), s.setRotation(this._tmpAmmoQuaternion); const n = new this.bjsAMMO.btDefaultMotionState(s), a = new this.bjsAMMO.btRigidBodyConstructionInfo(i, n, t, r), l = new this.bjsAMMO.btRigidBody(a); if (i === 0 && (l.setCollisionFlags(l.getCollisionFlags() | ZA._KINEMATIC_FLAG), l.setActivationState(ZA._DISABLE_DEACTIVATION_FLAG)), e.type == tr.NoImpostor && !t.getChildShape && l.setCollisionFlags(l.getCollisionFlags() | ZA._DISABLE_COLLISION_FLAG), e.type !== tr.MeshImpostor && e.type !== tr.NoImpostor) { const h = e.object.getBoundingInfo(); this._tmpVec3.copyFrom(e.object.getAbsolutePosition()), this._tmpVec3.subtractInPlace(h.boundingBox.centerWorld), this._tmpVec3.x /= e.object.scaling.x, this._tmpVec3.y /= e.object.scaling.y, this._tmpVec3.z /= e.object.scaling.z, e.setDeltaPosition(this._tmpVec3); } const o = e.getParam("group"), u = e.getParam("mask"); o && u ? this.world.addRigidBody(l, o, u) : this.world.addRigidBody(l), e.physicsBody = l, e._pluginData.toDispose = e._pluginData.toDispose.concat([l, a, n, s, r, t]); } this.setBodyRestitution(e, e.getParam("restitution")), this.setBodyFriction(e, e.getParam("friction")); } } /** * Removes the physics body from the imposter and disposes of the body's memory * @param impostor imposter to remove the physics body from */ removePhysicsBody(e) { this.world && (e.soft ? this.world.removeSoftBody(e.physicsBody) : this.world.removeRigidBody(e.physicsBody), e._pluginData && (e._pluginData.toDispose.forEach((t) => { this.bjsAMMO.destroy(t); }), e._pluginData.toDispose = [])); } /** * Generates a joint * @param impostorJoint the imposter joint to create the joint with */ generateJoint(e) { const t = e.mainImpostor.physicsBody, i = e.connectedImpostor.physicsBody; if (!t || !i) return; const r = e.joint.jointData; r.mainPivot || (r.mainPivot = new D(0, 0, 0)), r.connectedPivot || (r.connectedPivot = new D(0, 0, 0)); let s; switch (e.joint.type) { case ta.DistanceJoint: { const n = r.maxDistance; n && (r.mainPivot = new D(0, -n / 2, 0), r.connectedPivot = new D(0, n / 2, 0)), s = new this.bjsAMMO.btPoint2PointConstraint(t, i, new this.bjsAMMO.btVector3(r.mainPivot.x, r.mainPivot.y, r.mainPivot.z), new this.bjsAMMO.btVector3(r.connectedPivot.x, r.connectedPivot.y, r.connectedPivot.z)); break; } case ta.HingeJoint: { r.mainAxis || (r.mainAxis = new D(0, 0, 0)), r.connectedAxis || (r.connectedAxis = new D(0, 0, 0)); const n = new this.bjsAMMO.btVector3(r.mainAxis.x, r.mainAxis.y, r.mainAxis.z), a = new this.bjsAMMO.btVector3(r.connectedAxis.x, r.connectedAxis.y, r.connectedAxis.z); s = new this.bjsAMMO.btHingeConstraint(t, i, new this.bjsAMMO.btVector3(r.mainPivot.x, r.mainPivot.y, r.mainPivot.z), new this.bjsAMMO.btVector3(r.connectedPivot.x, r.connectedPivot.y, r.connectedPivot.z), n, a); break; } case ta.BallAndSocketJoint: s = new this.bjsAMMO.btPoint2PointConstraint(t, i, new this.bjsAMMO.btVector3(r.mainPivot.x, r.mainPivot.y, r.mainPivot.z), new this.bjsAMMO.btVector3(r.connectedPivot.x, r.connectedPivot.y, r.connectedPivot.z)); break; default: Ce.Warn("JointType not currently supported by the Ammo plugin, falling back to PhysicsJoint.BallAndSocketJoint"), s = new this.bjsAMMO.btPoint2PointConstraint(t, i, new this.bjsAMMO.btVector3(r.mainPivot.x, r.mainPivot.y, r.mainPivot.z), new this.bjsAMMO.btVector3(r.connectedPivot.x, r.connectedPivot.y, r.connectedPivot.z)); break; } this.world.addConstraint(s, !e.joint.jointData.collision), e.joint.physicsJoint = s; } /** * Removes a joint * @param impostorJoint the imposter joint to remove the joint from */ removeJoint(e) { this.world && this.world.removeConstraint(e.joint.physicsJoint); } // adds all verticies (including child verticies) to the triangle mesh _addMeshVerts(e, t, i) { let r = 0; if (i && i.getIndices && i.getWorldMatrix && i.getChildMeshes) { let s = i.getIndices(); s || (s = []); let n = i.getVerticesData(Y.PositionKind); n || (n = []); let a; if (t && t !== i) { let o; t.rotationQuaternion ? o = t.rotationQuaternion : t.rotation ? o = Ze.FromEulerAngles(t.rotation.x, t.rotation.y, t.rotation.z) : o = Ze.Identity(), Ae.Compose(D.One(), o, t.position).invertToRef(this._tmpMatrix), a = i.computeWorldMatrix(!1).multiply(this._tmpMatrix); } else Ae.ScalingToRef(i.scaling.x, i.scaling.y, i.scaling.z, this._tmpMatrix), a = this._tmpMatrix; const l = s.length / 3; for (let o = 0; o < l; o++) { const u = []; for (let h = 0; h < 3; h++) { let d = new D(n[s[o * 3 + h] * 3 + 0], n[s[o * 3 + h] * 3 + 1], n[s[o * 3 + h] * 3 + 2]); d = D.TransformCoordinates(d, a); let f; h == 0 ? f = this._tmpAmmoVectorA : h == 1 ? f = this._tmpAmmoVectorB : f = this._tmpAmmoVectorC, f.setValue(d.x, d.y, d.z), u.push(f); } e.addTriangle(u[0], u[1], u[2]), r++; } i.getChildMeshes().forEach((o) => { r += this._addMeshVerts(e, t, o); }); } return r; } /** * Initialise the soft body vertices to match its object's (mesh) vertices * Softbody vertices (nodes) are in world space and to match this * The object's position and rotation is set to zero and so its vertices are also then set in world space * @param impostor to create the softbody for */ _softVertexData(e) { const t = e.object; if (t && t.getIndices && t.getWorldMatrix && t.getChildMeshes) { t.getIndices(); let i = t.getVerticesData(Y.PositionKind); i || (i = []); let r = t.getVerticesData(Y.NormalKind); r || (r = []), t.computeWorldMatrix(!1); const s = [], n = []; for (let l = 0; l < i.length; l += 3) { let o = new D(i[l], i[l + 1], i[l + 2]), u = new D(r[l], r[l + 1], r[l + 2]); o = D.TransformCoordinates(o, t.getWorldMatrix()), u = D.TransformNormal(u, t.getWorldMatrix()), s.push(o.x, o.y, o.z), n.push(u.x, u.y, u.z); } const a = new Ot(); return a.positions = s, a.normals = n, a.uvs = t.getVerticesData(Y.UVKind), a.colors = t.getVerticesData(Y.ColorKind), t && t.getIndices && (a.indices = t.getIndices()), a.applyToMesh(t), t.position = D.Zero(), t.rotationQuaternion = null, t.rotation = D.Zero(), t.computeWorldMatrix(!0), a; } return Ot.ExtractFromMesh(t); } /** * Create an impostor's soft body * @param impostor to create the softbody for */ _createSoftbody(e) { const t = e.object; if (t && t.getIndices) { let i = t.getIndices(); i || (i = []); const r = this._softVertexData(e), s = r.positions, n = r.normals; if (s === null || n === null) return new this.bjsAMMO.btCompoundShape(); { const a = [], l = []; for (let p = 0; p < s.length; p += 3) { const m = new D(s[p], s[p + 1], s[p + 2]), _ = new D(n[p], n[p + 1], n[p + 2]); a.push(m.x, m.y, -m.z), l.push(_.x, _.y, -_.z); } const o = new this.bjsAMMO.btSoftBodyHelpers().CreateFromTriMesh(this.world.getWorldInfo(), a, t.getIndices(), i.length / 3, !0), u = s.length / 3, h = o.get_m_nodes(); let d, f; for (let p = 0; p < u; p++) d = h.at(p), f = d.get_m_n(), f.setX(l[3 * p]), f.setY(l[3 * p + 1]), f.setZ(l[3 * p + 2]); return o; } } } /** * Create cloth for an impostor * @param impostor to create the softbody for */ _createCloth(e) { const t = e.object; if (t && t.getIndices) { t.getIndices(); const i = this._softVertexData(e), r = i.positions, s = i.normals; if (r === null || s === null) return new this.bjsAMMO.btCompoundShape(); { const n = r.length, a = Math.sqrt(n / 3); e.segments = a; const l = a - 1; return this._tmpAmmoVectorA.setValue(r[0], r[1], r[2]), this._tmpAmmoVectorB.setValue(r[3 * l], r[3 * l + 1], r[3 * l + 2]), this._tmpAmmoVectorD.setValue(r[n - 3], r[n - 2], r[n - 1]), this._tmpAmmoVectorC.setValue(r[n - 3 - 3 * l], r[n - 2 - 3 * l], r[n - 1 - 3 * l]), new this.bjsAMMO.btSoftBodyHelpers().CreatePatch(this.world.getWorldInfo(), this._tmpAmmoVectorA, this._tmpAmmoVectorB, this._tmpAmmoVectorC, this._tmpAmmoVectorD, a, a, e.getParam("fixedPoints"), !0); } } } /** * Create rope for an impostor * @param impostor to create the softbody for */ _createRope(e) { let t, i; const r = this._softVertexData(e), s = r.positions, n = r.normals; if (s === null || n === null) return new this.bjsAMMO.btCompoundShape(); r.applyToMesh(e.object, !0), e._isFromLine = !0; const a = n.map((d) => d * d), l = (d, f) => d + f; if (a.reduce(l) === 0) t = s.length, i = t / 3 - 1, this._tmpAmmoVectorA.setValue(s[0], s[1], s[2]), this._tmpAmmoVectorB.setValue(s[t - 3], s[t - 2], s[t - 1]); else { e._isFromLine = !1; const d = e.getParam("path"); if (e.getParam("shape") === null) return Ce.Warn("No shape available for extruded mesh"), new this.bjsAMMO.btCompoundShape(); t = d.length, i = t - 1, this._tmpAmmoVectorA.setValue(d[0].x, d[0].y, d[0].z), this._tmpAmmoVectorB.setValue(d[t - 1].x, d[t - 1].y, d[t - 1].z); } e.segments = i; let u = e.getParam("fixedPoints"); u = u > 3 ? 3 : u; const h = new this.bjsAMMO.btSoftBodyHelpers().CreateRope(this.world.getWorldInfo(), this._tmpAmmoVectorA, this._tmpAmmoVectorB, i - 1, u); return h.get_m_cfg().set_collisions(17), h; } /** * Create a custom physics impostor shape using the plugin's onCreateCustomShape handler * @param impostor to create the custom physics shape for */ _createCustom(e) { let t = null; return this.onCreateCustomShape && (t = this.onCreateCustomShape(e)), t == null && (t = new this.bjsAMMO.btCompoundShape()), t; } // adds all verticies (including child verticies) to the convex hull shape _addHullVerts(e, t, i) { let r = 0; if (i && i.getIndices && i.getWorldMatrix && i.getChildMeshes) { let s = i.getIndices(); s || (s = []); let n = i.getVerticesData(Y.PositionKind); n || (n = []), i.computeWorldMatrix(!1); const a = s.length / 3; for (let l = 0; l < a; l++) { const o = []; for (let u = 0; u < 3; u++) { let h = new D(n[s[l * 3 + u] * 3 + 0], n[s[l * 3 + u] * 3 + 1], n[s[l * 3 + u] * 3 + 2]); Ae.ScalingToRef(i.scaling.x, i.scaling.y, i.scaling.z, this._tmpMatrix), h = D.TransformCoordinates(h, this._tmpMatrix); let d; u == 0 ? d = this._tmpAmmoVectorA : u == 1 ? d = this._tmpAmmoVectorB : d = this._tmpAmmoVectorC, d.setValue(h.x, h.y, h.z), o.push(d); } e.addPoint(o[0], !0), e.addPoint(o[1], !0), e.addPoint(o[2], !0), r++; } i.getChildMeshes().forEach((l) => { r += this._addHullVerts(e, t, l); }); } return r; } _createShape(e, t = !1) { const i = e.object; let r; const s = e.getObjectExtents(); if (!t) { const n = e.object.getChildMeshes ? e.object.getChildMeshes(!0) : []; r = new this.bjsAMMO.btCompoundShape(); let a = 0; if (n.forEach((l) => { const o = l.getPhysicsImpostor(); if (o) { if (o.type == tr.MeshImpostor) throw "A child MeshImpostor is not supported. Only primitive impostors are supported as children (eg. box or sphere)"; const u = this._createShape(o), h = l.parent.getWorldMatrix().clone(), d = new D(); h.decompose(d), this._tmpAmmoTransform.getOrigin().setValue(l.position.x * d.x, l.position.y * d.y, l.position.z * d.z), this._tmpAmmoQuaternion.setValue(l.rotationQuaternion.x, l.rotationQuaternion.y, l.rotationQuaternion.z, l.rotationQuaternion.w), this._tmpAmmoTransform.setRotation(this._tmpAmmoQuaternion), r.addChildShape(this._tmpAmmoTransform, u), o.dispose(), a++; } }), a > 0) { if (e.type != tr.NoImpostor) { const l = this._createShape(e, !0); l && (this._tmpAmmoTransform.getOrigin().setValue(0, 0, 0), this._tmpAmmoQuaternion.setValue(0, 0, 0, 1), this._tmpAmmoTransform.setRotation(this._tmpAmmoQuaternion), r.addChildShape(this._tmpAmmoTransform, l)); } return r; } else this.bjsAMMO.destroy(r), r = null; } switch (e.type) { case tr.SphereImpostor: if (yt.WithinEpsilon(s.x, s.y, 1e-4) && yt.WithinEpsilon(s.x, s.z, 1e-4)) r = new this.bjsAMMO.btSphereShape(s.x / 2); else { const n = [new this.bjsAMMO.btVector3(0, 0, 0)], a = [1]; r = new this.bjsAMMO.btMultiSphereShape(n, a, 1), r.setLocalScaling(new this.bjsAMMO.btVector3(s.x / 2, s.y / 2, s.z / 2)); } break; case tr.CapsuleImpostor: { const n = s.x / 2; r = new this.bjsAMMO.btCapsuleShape(n, s.y - n * 2); } break; case tr.CylinderImpostor: this._tmpAmmoVectorA.setValue(s.x / 2, s.y / 2, s.z / 2), r = new this.bjsAMMO.btCylinderShape(this._tmpAmmoVectorA); break; case tr.PlaneImpostor: case tr.BoxImpostor: this._tmpAmmoVectorA.setValue(s.x / 2, s.y / 2, s.z / 2), r = new this.bjsAMMO.btBoxShape(this._tmpAmmoVectorA); break; case tr.MeshImpostor: if (e.getParam("mass") == 0) { if (this.onCreateCustomMeshImpostor) r = this.onCreateCustomMeshImpostor(e); else { const n = new this.bjsAMMO.btTriangleMesh(); e._pluginData.toDispose.push(n), this._addMeshVerts(n, i, i) == 0 ? r = new this.bjsAMMO.btCompoundShape() : r = new this.bjsAMMO.btBvhTriangleMeshShape(n); } break; } case tr.ConvexHullImpostor: { if (this.onCreateCustomConvexHullImpostor) r = this.onCreateCustomConvexHullImpostor(e); else { const n = new this.bjsAMMO.btConvexHullShape(); this._addHullVerts(n, i, i) == 0 ? (e._pluginData.toDispose.push(n), r = new this.bjsAMMO.btCompoundShape()) : r = n; } break; } case tr.NoImpostor: r = new this.bjsAMMO.btSphereShape(s.x / 2); break; case tr.CustomImpostor: r = this._createCustom(e); break; case tr.SoftbodyImpostor: r = this._createSoftbody(e); break; case tr.ClothImpostor: r = this._createCloth(e); break; case tr.RopeImpostor: r = this._createRope(e); break; default: Ce.Warn("The impostor type is not currently supported by the ammo plugin."); break; } return r; } /** * Sets the mesh body position/rotation from the babylon impostor * @param impostor imposter containing the physics body and babylon object */ setTransformationFromPhysicsBody(e) { e.physicsBody.getMotionState().getWorldTransform(this._tmpAmmoTransform), e.object.position.set(this._tmpAmmoTransform.getOrigin().x(), this._tmpAmmoTransform.getOrigin().y(), this._tmpAmmoTransform.getOrigin().z()), e.object.rotationQuaternion ? e.object.rotationQuaternion.set(this._tmpAmmoTransform.getRotation().x(), this._tmpAmmoTransform.getRotation().y(), this._tmpAmmoTransform.getRotation().z(), this._tmpAmmoTransform.getRotation().w()) : e.object.rotation && (this._tmpQuaternion.set(this._tmpAmmoTransform.getRotation().x(), this._tmpAmmoTransform.getRotation().y(), this._tmpAmmoTransform.getRotation().z(), this._tmpAmmoTransform.getRotation().w()), this._tmpQuaternion.toEulerAnglesToRef(e.object.rotation)); } /** * Sets the babylon object's position/rotation from the physics body's position/rotation * @param impostor imposter containing the physics body and babylon object * @param newPosition new position * @param newRotation new rotation */ setPhysicsBodyTransformation(e, t, i) { const r = e.physicsBody.getWorldTransform(); if (Math.abs(r.getOrigin().x() - t.x) > Sr || Math.abs(r.getOrigin().y() - t.y) > Sr || Math.abs(r.getOrigin().z() - t.z) > Sr || Math.abs(r.getRotation().x() - i.x) > Sr || Math.abs(r.getRotation().y() - i.y) > Sr || Math.abs(r.getRotation().z() - i.z) > Sr || Math.abs(r.getRotation().w() - i.w) > Sr) if (this._tmpAmmoVectorA.setValue(t.x, t.y, t.z), r.setOrigin(this._tmpAmmoVectorA), this._tmpAmmoQuaternion.setValue(i.x, i.y, i.z, i.w), r.setRotation(this._tmpAmmoQuaternion), e.physicsBody.setWorldTransform(r), e.mass == 0) { const s = e.physicsBody.getMotionState(); s && s.setWorldTransform(r); } else e.physicsBody.activate(); } /** * If this plugin is supported * @returns true if its supported */ isSupported() { return this.bjsAMMO !== void 0; } /** * Sets the linear velocity of the physics body * @param impostor imposter to set the velocity on * @param velocity velocity to set */ setLinearVelocity(e, t) { this._tmpAmmoVectorA.setValue(t.x, t.y, t.z), e.soft ? e.physicsBody.linearVelocity(this._tmpAmmoVectorA) : e.physicsBody.setLinearVelocity(this._tmpAmmoVectorA); } /** * Sets the angular velocity of the physics body * @param impostor imposter to set the velocity on * @param velocity velocity to set */ setAngularVelocity(e, t) { this._tmpAmmoVectorA.setValue(t.x, t.y, t.z), e.soft ? e.physicsBody.angularVelocity(this._tmpAmmoVectorA) : e.physicsBody.setAngularVelocity(this._tmpAmmoVectorA); } /** * gets the linear velocity * @param impostor imposter to get linear velocity from * @returns linear velocity */ getLinearVelocity(e) { let t; if (e.soft ? t = e.physicsBody.linearVelocity() : t = e.physicsBody.getLinearVelocity(), !t) return null; const i = new D(t.x(), t.y(), t.z()); return this.bjsAMMO.destroy(t), i; } /** * gets the angular velocity * @param impostor imposter to get angular velocity from * @returns angular velocity */ getAngularVelocity(e) { let t; if (e.soft ? t = e.physicsBody.angularVelocity() : t = e.physicsBody.getAngularVelocity(), !t) return null; const i = new D(t.x(), t.y(), t.z()); return this.bjsAMMO.destroy(t), i; } /** * Sets the mass of physics body * @param impostor imposter to set the mass on * @param mass mass to set */ setBodyMass(e, t) { e.soft ? e.physicsBody.setTotalMass(t, !1) : e.physicsBody.setMassProps(t), e._pluginData.mass = t; } /** * Gets the mass of the physics body * @param impostor imposter to get the mass from * @returns mass */ getBodyMass(e) { return e._pluginData.mass || 0; } /** * Gets friction of the impostor * @param impostor impostor to get friction from * @returns friction value */ getBodyFriction(e) { return e._pluginData.friction || 0; } /** * Sets friction of the impostor * @param impostor impostor to set friction on * @param friction friction value */ setBodyFriction(e, t) { e.soft ? e.physicsBody.get_m_cfg().set_kDF(t) : e.physicsBody.setFriction(t), e._pluginData.friction = t; } /** * Gets restitution of the impostor * @param impostor impostor to get restitution from * @returns restitution value */ getBodyRestitution(e) { return e._pluginData.restitution || 0; } /** * Sets restitution of the impostor * @param impostor impostor to set resitution on * @param restitution resitution value */ setBodyRestitution(e, t) { e.physicsBody.setRestitution(t), e._pluginData.restitution = t; } /** * Gets pressure inside the impostor * @param impostor impostor to get pressure from * @returns pressure value */ getBodyPressure(e) { return e.soft ? e._pluginData.pressure || 0 : (Ce.Warn("Pressure is not a property of a rigid body"), 0); } /** * Sets pressure inside a soft body impostor * Cloth and rope must remain 0 pressure * @param impostor impostor to set pressure on * @param pressure pressure value */ setBodyPressure(e, t) { e.soft ? e.type === tr.SoftbodyImpostor ? (e.physicsBody.get_m_cfg().set_kPR(t), e._pluginData.pressure = t) : (e.physicsBody.get_m_cfg().set_kPR(0), e._pluginData.pressure = 0) : Ce.Warn("Pressure can only be applied to a softbody"); } /** * Gets stiffness of the impostor * @param impostor impostor to get stiffness from * @returns pressure value */ getBodyStiffness(e) { return e.soft ? e._pluginData.stiffness || 0 : (Ce.Warn("Stiffness is not a property of a rigid body"), 0); } /** * Sets stiffness of the impostor * @param impostor impostor to set stiffness on * @param stiffness stiffness value from 0 to 1 */ setBodyStiffness(e, t) { e.soft ? (t = t < 0 ? 0 : t, t = t > 1 ? 1 : t, e.physicsBody.get_m_materials().at(0).set_m_kLST(t), e._pluginData.stiffness = t) : Ce.Warn("Stiffness cannot be applied to a rigid body"); } /** * Gets velocityIterations of the impostor * @param impostor impostor to get velocity iterations from * @returns velocityIterations value */ getBodyVelocityIterations(e) { return e.soft ? e._pluginData.velocityIterations || 0 : (Ce.Warn("Velocity iterations is not a property of a rigid body"), 0); } /** * Sets velocityIterations of the impostor * @param impostor impostor to set velocity iterations on * @param velocityIterations velocityIterations value */ setBodyVelocityIterations(e, t) { e.soft ? (t = t < 0 ? 0 : t, e.physicsBody.get_m_cfg().set_viterations(t), e._pluginData.velocityIterations = t) : Ce.Warn("Velocity iterations cannot be applied to a rigid body"); } /** * Gets positionIterations of the impostor * @param impostor impostor to get position iterations from * @returns positionIterations value */ getBodyPositionIterations(e) { return e.soft ? e._pluginData.positionIterations || 0 : (Ce.Warn("Position iterations is not a property of a rigid body"), 0); } /** * Sets positionIterations of the impostor * @param impostor impostor to set position on * @param positionIterations positionIterations value */ setBodyPositionIterations(e, t) { e.soft ? (t = t < 0 ? 0 : t, e.physicsBody.get_m_cfg().set_piterations(t), e._pluginData.positionIterations = t) : Ce.Warn("Position iterations cannot be applied to a rigid body"); } /** * Append an anchor to a cloth object * @param impostor is the cloth impostor to add anchor to * @param otherImpostor is the rigid impostor to anchor to * @param width ratio across width from 0 to 1 * @param height ratio up height from 0 to 1 * @param influence the elasticity between cloth impostor and anchor from 0, very stretchy to 1, little stretch * @param noCollisionBetweenLinkedBodies when true collisions between soft impostor and anchor are ignored; default false */ appendAnchor(e, t, i, r, s = 1, n = !1) { const a = e.segments, l = Math.round((a - 1) * i), o = Math.round((a - 1) * r), u = a - 1 - o, h = l + a * u; e.physicsBody.appendAnchor(h, t.physicsBody, n, s); } /** * Append an hook to a rope object * @param impostor is the rope impostor to add hook to * @param otherImpostor is the rigid impostor to hook to * @param length ratio along the rope from 0 to 1 * @param influence the elasticity between soft impostor and anchor from 0, very stretchy to 1, little stretch * @param noCollisionBetweenLinkedBodies when true collisions between soft impostor and anchor are ignored; default false */ appendHook(e, t, i, r = 1, s = !1) { const n = Math.round(e.segments * i); e.physicsBody.appendAnchor(n, t.physicsBody, s, r); } /** * Sleeps the physics body and stops it from being active * @param impostor impostor to sleep */ sleepBody(e) { e.physicsBody.forceActivationState(0); } /** * Activates the physics body * @param impostor impostor to activate */ wakeUpBody(e) { e.physicsBody.activate(); } /** * Updates the distance parameters of the joint */ updateDistanceJoint() { Ce.Warn("updateDistanceJoint is not currently supported by the Ammo physics plugin"); } /** * Sets a motor on the joint * @param joint joint to set motor on * @param speed speed of the motor * @param maxForce maximum force of the motor */ setMotor(e, t, i) { e.physicsJoint.enableAngularMotor(!0, t, i); } /** * Sets the motors limit */ setLimit() { Ce.Warn("setLimit is not currently supported by the Ammo physics plugin"); } /** * Syncs the position and rotation of a mesh with the impostor * @param mesh mesh to sync * @param impostor impostor to update the mesh with */ syncMeshWithImpostor(e, t) { t.physicsBody.getMotionState().getWorldTransform(this._tmpAmmoTransform), e.position.x = this._tmpAmmoTransform.getOrigin().x(), e.position.y = this._tmpAmmoTransform.getOrigin().y(), e.position.z = this._tmpAmmoTransform.getOrigin().z(), e.rotationQuaternion && (e.rotationQuaternion.x = this._tmpAmmoTransform.getRotation().x(), e.rotationQuaternion.y = this._tmpAmmoTransform.getRotation().y(), e.rotationQuaternion.z = this._tmpAmmoTransform.getRotation().z(), e.rotationQuaternion.w = this._tmpAmmoTransform.getRotation().w()); } /** * Gets the radius of the impostor * @param impostor impostor to get radius from * @returns the radius */ getRadius(e) { return e.getObjectExtents().x / 2; } /** * Gets the box size of the impostor * @param impostor impostor to get box size from * @param result the resulting box size */ getBoxSizeToRef(e, t) { const i = e.getObjectExtents(); t.x = i.x, t.y = i.y, t.z = i.z; } /** * Disposes of the impostor */ dispose() { this.bjsAMMO.destroy(this.world), this.bjsAMMO.destroy(this._solver), this.bjsAMMO.destroy(this._overlappingPairCache), this.bjsAMMO.destroy(this._dispatcher), this.bjsAMMO.destroy(this._collisionConfiguration), this.bjsAMMO.destroy(this._tmpAmmoVectorA), this.bjsAMMO.destroy(this._tmpAmmoVectorB), this.bjsAMMO.destroy(this._tmpAmmoVectorC), this.bjsAMMO.destroy(this._tmpAmmoTransform), this.bjsAMMO.destroy(this._tmpAmmoQuaternion), this.bjsAMMO.destroy(this._tmpAmmoConcreteContactResultCallback), this.world = null; } /** * Does a raycast in the physics world * @param from where should the ray start? * @param to where should the ray end? * @returns PhysicsRaycastResult */ raycast(e, t) { return this.raycastToRef(e, t, this._raycastResult), this._raycastResult; } /** * Does a raycast in the physics world * @param from when should the ray start? * @param to when should the ray end? * @param result resulting PhysicsRaycastResult */ raycastToRef(e, t, i) { this._tmpAmmoVectorRCA = new this.bjsAMMO.btVector3(e.x, e.y, e.z), this._tmpAmmoVectorRCB = new this.bjsAMMO.btVector3(t.x, t.y, t.z); const r = new this.bjsAMMO.ClosestRayResultCallback(this._tmpAmmoVectorRCA, this._tmpAmmoVectorRCB); this.world.rayTest(this._tmpAmmoVectorRCA, this._tmpAmmoVectorRCB, r), i.reset(e, t), r.hasHit() && (i.setHitData({ x: r.get_m_hitNormalWorld().x(), y: r.get_m_hitNormalWorld().y(), z: r.get_m_hitNormalWorld().z() }, { x: r.get_m_hitPointWorld().x(), y: r.get_m_hitPointWorld().y(), z: r.get_m_hitPointWorld().z() }), i.calculateHitDistance()), this.bjsAMMO.destroy(r), this.bjsAMMO.destroy(this._tmpAmmoVectorRCA), this.bjsAMMO.destroy(this._tmpAmmoVectorRCB); } } ZA._DISABLE_COLLISION_FLAG = 4; ZA._KINEMATIC_FLAG = 2; ZA._DISABLE_DEACTIVATION_FLAG = 4; Yl.prototype.removeReflectionProbe = function(c) { if (!this.reflectionProbes) return -1; const e = this.reflectionProbes.indexOf(c); return e !== -1 && this.reflectionProbes.splice(e, 1), e; }; Yl.prototype.addReflectionProbe = function(c) { this.reflectionProbes || (this.reflectionProbes = []), this.reflectionProbes.push(c); }; class WI { /** * Creates a new reflection probe * @param name defines the name of the probe * @param size defines the texture resolution (for each face) * @param scene defines the hosting scene * @param generateMipMaps defines if mip maps should be generated automatically (true by default) * @param useFloat defines if HDR data (float data) should be used to store colors (false by default) * @param linearSpace defines if the probe should be generated in linear space or not (false by default) */ constructor(e, t, i, r = !0, s = !1, n = !1) { if (this.name = e, this._viewMatrix = Ae.Identity(), this._target = D.Zero(), this._add = D.Zero(), this._invertYAxis = !1, this.position = D.Zero(), this.metadata = null, this._parentContainer = null, this._scene = i, i.getEngine().supportsUniformBuffers) { this._sceneUBOs = []; for (let u = 0; u < 6; ++u) this._sceneUBOs.push(i.createSceneUniformBuffer(`Scene for Reflection Probe (name "${e}") face #${u}`)); } this._scene.reflectionProbes || (this._scene.reflectionProbes = []), this._scene.reflectionProbes.push(this); let a = 0; if (s) { const u = this._scene.getEngine().getCaps(); u.textureHalfFloatRender ? a = 2 : u.textureFloatRender && (a = 1); } this._renderTargetTexture = new ra(e, t, i, r, !0, a, !0), this._renderTargetTexture.gammaSpace = !n, this._renderTargetTexture.invertZ = i.useRightHandedSystem; const l = i.getEngine().useReverseDepthBuffer; this._renderTargetTexture.onBeforeRenderObservable.add((u) => { switch (this._sceneUBOs && (i.setSceneUniformBuffer(this._sceneUBOs[u]), i.getSceneUniformBuffer().unbindEffect()), u) { case 0: this._add.copyFromFloats(1, 0, 0); break; case 1: this._add.copyFromFloats(-1, 0, 0); break; case 2: this._add.copyFromFloats(0, this._invertYAxis ? 1 : -1, 0); break; case 3: this._add.copyFromFloats(0, this._invertYAxis ? -1 : 1, 0); break; case 4: this._add.copyFromFloats(0, 0, i.useRightHandedSystem ? -1 : 1); break; case 5: this._add.copyFromFloats(0, 0, i.useRightHandedSystem ? 1 : -1); break; } this._attachedMesh && this.position.copyFrom(this._attachedMesh.getAbsolutePosition()), this.position.addToRef(this._add, this._target); const h = i.useRightHandedSystem ? Ae.LookAtRHToRef : Ae.LookAtLHToRef, d = i.useRightHandedSystem ? Ae.PerspectiveFovRH : Ae.PerspectiveFovLH; h(this.position, this._target, D.Up(), this._viewMatrix), i.activeCamera && (this._projectionMatrix = d(Math.PI / 2, 1, l ? i.activeCamera.maxZ : i.activeCamera.minZ, l ? i.activeCamera.minZ : i.activeCamera.maxZ, this._scene.getEngine().isNDCHalfZRange), i.setTransformMatrix(this._viewMatrix, this._projectionMatrix), i.activeCamera.isRigCamera && !this._renderTargetTexture.activeCamera && (this._renderTargetTexture.activeCamera = i.activeCamera.rigParent || null)), i._forcedViewPosition = this.position; }); let o; this._renderTargetTexture.onBeforeBindObservable.add(() => { var u, h; this._currentSceneUBO = i.getSceneUniformBuffer(), (h = (u = i.getEngine())._debugPushGroup) === null || h === void 0 || h.call(u, `reflection probe generation for ${e}`, 1), o = this._scene.imageProcessingConfiguration.applyByPostProcess, n && (i.imageProcessingConfiguration.applyByPostProcess = !0); }), this._renderTargetTexture.onAfterUnbindObservable.add(() => { var u, h; i.imageProcessingConfiguration.applyByPostProcess = o, i._forcedViewPosition = null, this._sceneUBOs && i.setSceneUniformBuffer(this._currentSceneUBO), i.updateTransformMatrix(!0), (h = (u = i.getEngine())._debugPopGroup) === null || h === void 0 || h.call(u, 1); }); } /** Gets or sets the number of samples to use for multi-sampling (0 by default). Required WebGL2 */ get samples() { return this._renderTargetTexture.samples; } set samples(e) { this._renderTargetTexture.samples = e; } /** Gets or sets the refresh rate to use (on every frame by default) */ get refreshRate() { return this._renderTargetTexture.refreshRate; } set refreshRate(e) { this._renderTargetTexture.refreshRate = e; } /** * Gets the hosting scene * @returns a Scene */ getScene() { return this._scene; } /** Gets the internal CubeTexture used to render to */ get cubeTexture() { return this._renderTargetTexture; } /** Gets or sets the list of meshes to render */ get renderList() { return this._renderTargetTexture.renderList; } set renderList(e) { this._renderTargetTexture.renderList = e; } /** * Attach the probe to a specific mesh (Rendering will be done from attached mesh's position) * @param mesh defines the mesh to attach to */ attachToMesh(e) { this._attachedMesh = e; } /** * Specifies whether or not the stencil and depth buffer are cleared between two rendering groups * @param renderingGroupId The rendering group id corresponding to its index * @param autoClearDepthStencil Automatically clears depth and stencil between groups if true. */ setRenderingAutoClearDepthStencil(e, t) { this._renderTargetTexture.setRenderingAutoClearDepthStencil(e, t); } /** * Clean all associated resources */ dispose() { const e = this._scene.reflectionProbes.indexOf(this); if (e !== -1 && this._scene.reflectionProbes.splice(e, 1), this._parentContainer) { const t = this._parentContainer.reflectionProbes.indexOf(this); t > -1 && this._parentContainer.reflectionProbes.splice(t, 1), this._parentContainer = null; } if (this._renderTargetTexture && (this._renderTargetTexture.dispose(), this._renderTargetTexture = null), this._sceneUBOs) { for (const t of this._sceneUBOs) t.dispose(); this._sceneUBOs = []; } } /** * Converts the reflection probe information to a readable string for debug purpose. * @param fullDetails Supports for multiple levels of logging within scene loading * @returns the human readable reflection probe info */ toString(e) { let t = "Name: " + this.name; return e && (t += ", position: " + this.position.toString(), this._attachedMesh && (t += ", attached mesh: " + this._attachedMesh.name)), t; } /** * Get the class name of the refection probe. * @returns "ReflectionProbe" */ getClassName() { return "ReflectionProbe"; } /** * Serialize the reflection probe to a JSON representation we can easily use in the respective Parse function. * @returns The JSON representation of the texture */ serialize() { const e = St.Serialize(this, this._renderTargetTexture.serialize()); return e.isReflectionProbe = !0, e.metadata = this.metadata, e; } /** * Parse the JSON representation of a reflection probe in order to recreate the reflection probe in the given scene. * @param parsedReflectionProbe Define the JSON representation of the reflection probe * @param scene Define the scene the parsed reflection probe should be instantiated in * @param rootUrl Define the root url of the parsing sequence in the case of relative dependencies * @returns The parsed reflection probe if successful */ static Parse(e, t, i) { let r = null; if (t.reflectionProbes) for (let s = 0; s < t.reflectionProbes.length; s++) { const n = t.reflectionProbes[s]; if (n.name === e.name) { r = n; break; } } return r = St.Parse(() => r || new WI(e.name, e.renderTargetSize, t, e._generateMipMaps), e, t, i), r.cubeTexture._waitingRenderList = e.renderList, e._attachedMesh && r.attachToMesh(t.getMeshById(e._attachedMesh)), e.metadata && (r.metadata = e.metadata), r; } } F([ hw() ], WI.prototype, "_attachedMesh", void 0); F([ oo() ], WI.prototype, "position", void 0); var Tme = !0; class j9 { } j9.LoaderInjectedPhysicsEngine = void 0; let qD = {}, t5 = {}; const cq = (c, e, t, i) => { if (!e.materials) return null; for (let r = 0, s = e.materials.length; r < s; r++) { const n = e.materials[r]; if (c(n)) return { parsedMaterial: n, material: At.Parse(n, t, i) }; } return null; }, Sme = (c, e, t) => { for (const i in e) if (c.name === e[i]) return t.push(c.id), !0; return c.parentId !== void 0 && t.indexOf(c.parentId) !== -1 ? (t.push(c.id), !0) : !1; }, iO = (c, e) => c + " of " + (e ? e.file + " from " + e.name + " version: " + e.version + ", exporter version: " + e.exporter_version : "unknown"), Are = (c, e) => { const t = e; if (e._waitingData.lods) { if (e._waitingData.lods.ids && e._waitingData.lods.ids.length > 0) { const i = e._waitingData.lods.ids, r = t.isEnabled(!1); if (e._waitingData.lods.distances) { const s = e._waitingData.lods.distances; if (s.length >= i.length) { const n = s.length > i.length ? s[s.length - 1] : 0; t.setEnabled(!1); for (let a = 0; a < i.length; a++) { const l = i[a], o = c.getMeshById(l); o != null && t.addLODLevel(s[a], o); } n > 0 && t.addLODLevel(n, null), r === !0 && t.setEnabled(!0); } else Ve.Warn("Invalid level of detail distances for " + e.name); } } e._waitingData.lods = null; } }, OF = (c, e, t) => { if (typeof c != "number") { const r = t.getLastEntryById(c); return r && e !== void 0 && e !== null ? r.instances[parseInt(e)] : r; } const i = qD[c]; return i && e !== void 0 && e !== null ? i.instances[parseInt(e)] : i; }, vB = (c, e) => typeof c != "number" ? e.getLastMaterialById(c, !0) : t5[c], uq = (c, e, t, i, r = !1) => { const s = new NL(c); let n = "importScene has failed JSON parse"; try { var a = JSON.parse(e); n = ""; const l = fr.loggingLevel === fr.DETAILED_LOGGING; let o, u; if (a.environmentTexture !== void 0 && a.environmentTexture !== null) { const d = a.isPBR !== void 0 ? a.isPBR : !0; if (a.environmentTextureType && a.environmentTextureType === "BABYLON.HDRCubeTexture") { const f = a.environmentTextureSize ? a.environmentTextureSize : 128, p = new ZC((a.environmentTexture.match(/https?:\/\//g) ? "" : t) + a.environmentTexture, c, f, !0, !d, void 0, a.environmentTexturePrefilterOnLoad); a.environmentTextureRotationY && (p.rotationY = a.environmentTextureRotationY), c.environmentTexture = p; } else if (typeof a.environmentTexture == "object") { const f = ul.Parse(a.environmentTexture, c, t); c.environmentTexture = f; } else if (a.environmentTexture.endsWith(".env")) { const f = new ul((a.environmentTexture.match(/https?:\/\//g) ? "" : t) + a.environmentTexture, c, a.environmentTextureForcedExtension); a.environmentTextureRotationY && (f.rotationY = a.environmentTextureRotationY), c.environmentTexture = f; } else { const f = ul.CreateFromPrefilteredData((a.environmentTexture.match(/https?:\/\//g) ? "" : t) + a.environmentTexture, c, a.environmentTextureForcedExtension); a.environmentTextureRotationY && (f.rotationY = a.environmentTextureRotationY), c.environmentTexture = f; } if (a.createDefaultSkybox === !0) { const f = c.activeCamera !== void 0 && c.activeCamera !== null ? (c.activeCamera.maxZ - c.activeCamera.minZ) / 2 : 1e3, p = a.skyboxBlurLevel || 0; c.createDefaultSkybox(c.environmentTexture, d, f, p); } s.environmentTexture = c.environmentTexture; } if (a.environmentIntensity !== void 0 && a.environmentIntensity !== null && (c.environmentIntensity = a.environmentIntensity), a.lights !== void 0 && a.lights !== null) for (o = 0, u = a.lights.length; o < u; o++) { const d = a.lights[o], f = hs.Parse(d, c); f && (qD[d.uniqueId] = f, s.lights.push(f), f._parentContainer = s, n += o === 0 ? ` Lights:` : "", n += ` ` + f.toString(l)); } if (a.reflectionProbes !== void 0 && a.reflectionProbes !== null) for (o = 0, u = a.reflectionProbes.length; o < u; o++) { const d = a.reflectionProbes[o], f = WI.Parse(d, c, t); f && (s.reflectionProbes.push(f), f._parentContainer = s, n += o === 0 ? ` Reflection Probes:` : "", n += ` ` + f.toString(l)); } if (a.animations !== void 0 && a.animations !== null) for (o = 0, u = a.animations.length; o < u; o++) { const d = a.animations[o], f = Qo("BABYLON.Animation"); if (f) { const p = f.Parse(d); c.animations.push(p), s.animations.push(p), n += o === 0 ? ` Animations:` : "", n += ` ` + p.toString(l); } } if (a.materials !== void 0 && a.materials !== null) for (o = 0, u = a.materials.length; o < u; o++) { const d = a.materials[o], f = At.Parse(d, c, t); f && (t5[d.uniqueId || d.id] = f, s.materials.push(f), f._parentContainer = s, n += o === 0 ? ` Materials:` : "", n += ` ` + f.toString(l), f.getActiveTextures().forEach((m) => { s.textures.indexOf(m) == -1 && (s.textures.push(m), m._parentContainer = s); })); } if (a.multiMaterials !== void 0 && a.multiMaterials !== null) for (o = 0, u = a.multiMaterials.length; o < u; o++) { const d = a.multiMaterials[o], f = xm.ParseMultiMaterial(d, c); t5[d.uniqueId || d.id] = f, s.multiMaterials.push(f), f._parentContainer = s, n += o === 0 ? ` MultiMaterials:` : "", n += ` ` + f.toString(l), f.getActiveTextures().forEach((m) => { s.textures.indexOf(m) == -1 && (s.textures.push(m), m._parentContainer = s); }); } if (a.morphTargetManagers !== void 0 && a.morphTargetManagers !== null) for (const d of a.morphTargetManagers) { const f = O4.Parse(d, c); s.morphTargetManagers.push(f), f._parentContainer = s; } if (a.skeletons !== void 0 && a.skeletons !== null) for (o = 0, u = a.skeletons.length; o < u; o++) { const d = a.skeletons[o], f = sx.Parse(d, c); s.skeletons.push(f), f._parentContainer = s, n += o === 0 ? ` Skeletons:` : "", n += ` ` + f.toString(l); } const h = a.geometries; if (h != null) { const d = new Array(), f = h.vertexData; if (f != null) for (o = 0, u = f.length; o < u; o++) { const p = f[o]; d.push(yc.Parse(p, c, t)); } d.forEach((p) => { p && (s.geometries.push(p), p._parentContainer = s); }); } if (a.transformNodes !== void 0 && a.transformNodes !== null) for (o = 0, u = a.transformNodes.length; o < u; o++) { const d = a.transformNodes[o], f = xi.Parse(d, c, t); qD[d.uniqueId] = f, s.transformNodes.push(f), f._parentContainer = s; } if (a.meshes !== void 0 && a.meshes !== null) for (o = 0, u = a.meshes.length; o < u; o++) { const d = a.meshes[o], f = ke.Parse(d, c, t); if (qD[d.uniqueId] = f, s.meshes.push(f), f._parentContainer = s, f.hasInstances) for (const p of f.instances) s.meshes.push(p), p._parentContainer = s; n += o === 0 ? ` Meshes:` : "", n += ` ` + f.toString(l); } if (a.cameras !== void 0 && a.cameras !== null) for (o = 0, u = a.cameras.length; o < u; o++) { const d = a.cameras[o], f = Ai.Parse(d, c); qD[d.uniqueId] = f, s.cameras.push(f), f._parentContainer = s, n += o === 0 ? ` Cameras:` : "", n += ` ` + f.toString(l); } if (a.postProcesses !== void 0 && a.postProcesses !== null) for (o = 0, u = a.postProcesses.length; o < u; o++) { const d = a.postProcesses[o], f = Bi.Parse(d, c, t); f && (s.postProcesses.push(f), f._parentContainer = s, n += o === 0 ? ` Postprocesses:` : "", n += ` ` + f.toString()); } if (a.animationGroups !== void 0 && a.animationGroups !== null) for (o = 0, u = a.animationGroups.length; o < u; o++) { const d = a.animationGroups[o], f = S4.Parse(d, c); s.animationGroups.push(f), f._parentContainer = s, n += o === 0 ? ` AnimationGroups:` : "", n += ` ` + f.toString(l); } for (o = 0, u = c.cameras.length; o < u; o++) { const d = c.cameras[o]; d._waitingParentId !== null && (d.parent = OF(d._waitingParentId, d._waitingParentInstanceIndex, c), d._waitingParentId = null, d._waitingParentInstanceIndex = null); } for (o = 0, u = c.lights.length; o < u; o++) { const d = c.lights[o]; d && d._waitingParentId !== null && (d.parent = OF(d._waitingParentId, d._waitingParentInstanceIndex, c), d._waitingParentId = null, d._waitingParentInstanceIndex = null); } for (o = 0, u = c.transformNodes.length; o < u; o++) { const d = c.transformNodes[o]; d._waitingParentId !== null && (d.parent = OF(d._waitingParentId, d._waitingParentInstanceIndex, c), d._waitingParentId = null, d._waitingParentInstanceIndex = null); } for (o = 0, u = c.meshes.length; o < u; o++) { const d = c.meshes[o]; d._waitingParentId !== null && (d.parent = OF(d._waitingParentId, d._waitingParentInstanceIndex, c), d._waitingParentId = null, d._waitingParentInstanceIndex = null), d._waitingData.lods && Are(c, d); } for (c.multiMaterials.forEach((d) => { d._waitingSubMaterialsUniqueIds.forEach((f) => { d.subMaterials.push(vB(f, c)); }), d._waitingSubMaterialsUniqueIds = []; }), c.meshes.forEach((d) => { d._waitingMaterialId !== null && (d.material = vB(d._waitingMaterialId, c), d._waitingMaterialId = null); }), o = 0, u = c.skeletons.length; o < u; o++) { const d = c.skeletons[o]; d._hasWaitingData && (d.bones != null && d.bones.forEach((f) => { if (f._waitingTransformNodeId) { const p = c.getLastEntryById(f._waitingTransformNodeId); p && f.linkTransformNode(p), f._waitingTransformNodeId = null; } }), d._hasWaitingData = null); } for (o = 0, u = c.meshes.length; o < u; o++) { const d = c.meshes[o]; d._waitingData.freezeWorldMatrix ? (d.freezeWorldMatrix(), d._waitingData.freezeWorldMatrix = null) : d.computeWorldMatrix(!0); } for (o = 0, u = c.lights.length; o < u; o++) { const d = c.lights[o]; if (d._excludedMeshesIds.length > 0) { for (let f = 0; f < d._excludedMeshesIds.length; f++) { const p = c.getMeshById(d._excludedMeshesIds[f]); p && d.excludedMeshes.push(p); } d._excludedMeshesIds = []; } if (d._includedOnlyMeshesIds.length > 0) { for (let f = 0; f < d._includedOnlyMeshesIds.length; f++) { const p = c.getMeshById(d._includedOnlyMeshesIds[f]); p && d.includedOnlyMeshes.push(p); } d._includedOnlyMeshesIds = []; } } for (c.geometries.forEach((d) => { d._loadedUniqueId = ""; }), Yl.Parse(a, c, s, t), o = 0, u = c.meshes.length; o < u; o++) { const d = c.meshes[o]; d._waitingData.actions && (Ln.Parse(d._waitingData.actions, d, c), d._waitingData.actions = null); } a.actions !== void 0 && a.actions !== null && Ln.Parse(a.actions, null, c); } catch (l) { const o = iO("loadAssets", a ? a.producer : "Unknown") + n; if (i) i(o, l); else throw Ce.Log(o), l; } finally { qD = {}, t5 = {}, r || s.removeAllFromScene(), n !== null && fr.loggingLevel !== fr.NO_LOGGING && Ce.Log(iO("loadAssets", a ? a.producer : "Unknown") + (fr.loggingLevel !== fr.MINIMAL_LOGGING ? n : "")); } return s; }; fr.RegisterPlugin({ name: "babylon.js", extensions: ".babylon", canDirectLoad: (c) => c.indexOf("babylon") !== -1, importMesh: (c, e, t, i, r, s, n, a) => { var l; let o = "importMesh has failed JSON parse"; try { var u = JSON.parse(t); o = ""; const h = fr.loggingLevel === fr.DETAILED_LOGGING; c ? Array.isArray(c) || (c = [c]) : c = null; const d = [], f = /* @__PURE__ */ new Map(), p = []; if (u.transformNodes !== void 0 && u.transformNodes !== null) for (let m = 0, _ = u.transformNodes.length; m < _; m++) { const v = u.transformNodes[m], C = xi.Parse(v, e, i); p.push(C), f.set(C._waitingParsedUniqueId, C), C._waitingParsedUniqueId = null; } if (u.meshes !== void 0 && u.meshes !== null) { const m = [], _ = [], v = [], C = []; for (let b = 0, S = u.meshes.length; b < S; b++) { const M = u.meshes[b]; if (c === null || Sme(M, c, d)) { if (c !== null && delete c[c.indexOf(M.name)], M.geometryId !== void 0 && M.geometryId !== null && u.geometries !== void 0 && u.geometries !== null) { let w = !1; ["boxes", "spheres", "cylinders", "toruses", "grounds", "planes", "torusKnots", "vertexData"].forEach((V) => { w === !0 || !u.geometries[V] || !Array.isArray(u.geometries[V]) || u.geometries[V].forEach((k) => { if (k.id === M.geometryId) { switch (V) { case "vertexData": yc.Parse(k, e, i); break; } w = !0; } }); }), w === !1 && Ce.Warn("Geometry not found for mesh " + M.id); } if (M.materialUniqueId || M.materialId) { const w = M.materialUniqueId ? v : _; let V = w.indexOf(M.materialUniqueId || M.materialId) !== -1; if (V === !1 && u.multiMaterials !== void 0 && u.multiMaterials !== null) { const k = (L, B) => { w.push(L); const U = cq(B, u, e, i); U && U.material && (t5[U.parsedMaterial.uniqueId || U.parsedMaterial.id] = U.material, o += ` Material ` + U.material.toString(h)); }; for (let L = 0, B = u.multiMaterials.length; L < B; L++) { const U = u.multiMaterials[L]; if (M.materialUniqueId && U.uniqueId === M.materialUniqueId || U.id === M.materialId) { U.materialsUniqueIds ? U.materialsUniqueIds.forEach((ee) => k(ee, (Z) => Z.uniqueId === ee)) : U.materials.forEach((ee) => k(ee, (Z) => Z.id === ee)), w.push(U.uniqueId || U.id); const K = xm.ParseMultiMaterial(U, e); t5[U.uniqueId || U.id] = K, K && (V = !0, o += ` Multi-Material ` + K.toString(h)); break; } } } if (V === !1) { w.push(M.materialUniqueId || M.materialId); const k = cq((L) => M.materialUniqueId && L.uniqueId === M.materialUniqueId || L.id === M.materialId, u, e, i); !k || !k.material ? Ce.Warn("Material not found for mesh " + M.id) : (t5[k.parsedMaterial.uniqueId || k.parsedMaterial.id] = k.material, o += ` Material ` + k.material.toString(h)); } } if (M.skeletonId !== null && M.skeletonId !== void 0 && u.skeletonId !== -1 && u.skeletons !== void 0 && u.skeletons !== null && !(m.indexOf(M.skeletonId) > -1)) for (let V = 0, k = u.skeletons.length; V < k; V++) { const L = u.skeletons[V]; if (L.id === M.skeletonId) { const B = sx.Parse(L, e); n.push(B), m.push(L.id), o += ` Skeleton ` + B.toString(h); } } if (M.morphTargetManagerId > -1 && u.morphTargetManagers !== void 0 && u.morphTargetManagers !== null && !(C.indexOf(M.morphTargetManagerId) > -1)) for (let V = 0, k = u.morphTargetManagers.length; V < k; V++) { const L = u.morphTargetManagers[V]; if (L.id === M.morphTargetManagerId) { const B = O4.Parse(L, e); C.push(B.uniqueId), o += ` Morph target ` + B.toString(); } } const R = ke.Parse(M, e, i); r.push(R), f.set(R._waitingParsedUniqueId, R), R._waitingParsedUniqueId = null, o += ` Mesh ` + R.toString(h); } } e.multiMaterials.forEach((b) => { b._waitingSubMaterialsUniqueIds.forEach((S) => { b.subMaterials.push(vB(S, e)); }), b._waitingSubMaterialsUniqueIds = []; }), e.meshes.forEach((b) => { b._waitingMaterialId !== null && (b.material = vB(b._waitingMaterialId, e), b._waitingMaterialId = null); }); for (let b = 0, S = e.transformNodes.length; b < S; b++) { const M = e.transformNodes[b]; if (M._waitingParentId !== null) { let R = f.get(parseInt(M._waitingParentId)) || null; R === null && (R = e.getLastEntryById(M._waitingParentId)); let w = R; M._waitingParentInstanceIndex && (w = R.instances[parseInt(M._waitingParentInstanceIndex)], M._waitingParentInstanceIndex = null), M.parent = w, M._waitingParentId = null; } } let x; for (let b = 0, S = e.meshes.length; b < S; b++) { if (x = e.meshes[b], x._waitingParentId) { let M = f.get(parseInt(x._waitingParentId)) || null; M === null && (M = e.getLastEntryById(x._waitingParentId)); let R = M; if (x._waitingParentInstanceIndex && (R = M.instances[parseInt(x._waitingParentInstanceIndex)], x._waitingParentInstanceIndex = null), x.parent = R, ((l = x.parent) === null || l === void 0 ? void 0 : l.getClassName()) === "TransformNode") { const w = p.indexOf(x.parent); w > -1 && p.splice(w, 1); } x._waitingParentId = null; } x._waitingData.lods && Are(e, x); } for (const b of p) b.dispose(); for (let b = 0, S = e.skeletons.length; b < S; b++) { const M = e.skeletons[b]; M._hasWaitingData && (M.bones != null && M.bones.forEach((R) => { if (R._waitingTransformNodeId) { const w = e.getLastEntryById(R._waitingTransformNodeId); w && R.linkTransformNode(w), R._waitingTransformNodeId = null; } }), M._hasWaitingData = null); } for (let b = 0, S = e.meshes.length; b < S; b++) x = e.meshes[b], x._waitingData.freezeWorldMatrix ? (x.freezeWorldMatrix(), x._waitingData.freezeWorldMatrix = null) : x.computeWorldMatrix(!0); } if (u.particleSystems !== void 0 && u.particleSystems !== null) { const m = Yl.GetIndividualParser(Bt.NAME_PARTICLESYSTEM); if (m) for (let _ = 0, v = u.particleSystems.length; _ < v; _++) { const C = u.particleSystems[_]; d.indexOf(C.emitterId) !== -1 && s.push(m(C, e, i)); } } return e.geometries.forEach((m) => { m._loadedUniqueId = ""; }), !0; } catch (h) { const d = iO("importMesh", u ? u.producer : "Unknown") + o; if (a) a(d, h); else throw Ce.Log(d), h; } finally { o !== null && fr.loggingLevel !== fr.NO_LOGGING && Ce.Log(iO("importMesh", u ? u.producer : "Unknown") + (fr.loggingLevel !== fr.MINIMAL_LOGGING ? o : "")), t5 = {}; } return !1; }, load: (c, e, t, i) => { let r = "importScene has failed JSON parse"; try { var s = JSON.parse(e); if (r = "", s.useDelayedTextureLoading !== void 0 && s.useDelayedTextureLoading !== null && (c.useDelayedTextureLoading = s.useDelayedTextureLoading && !fr.ForceFullSceneLoadingForIncremental), s.autoClear !== void 0 && s.autoClear !== null && (c.autoClear = s.autoClear), s.clearColor !== void 0 && s.clearColor !== null && (c.clearColor = Et.FromArray(s.clearColor)), s.ambientColor !== void 0 && s.ambientColor !== null && (c.ambientColor = ze.FromArray(s.ambientColor)), s.gravity !== void 0 && s.gravity !== null && (c.gravity = D.FromArray(s.gravity)), s.useRightHandedSystem !== void 0 && (c.useRightHandedSystem = !!s.useRightHandedSystem), s.fogMode && s.fogMode !== 0) switch (c.fogMode = s.fogMode, c.fogColor = ze.FromArray(s.fogColor), c.fogStart = s.fogStart, c.fogEnd = s.fogEnd, c.fogDensity = s.fogDensity, r += " Fog mode for scene: ", c.fogMode) { case 1: r += `exp `; break; case 2: r += `exp2 `; break; case 3: r += `linear `; break; } if (s.physicsEnabled) { let a; s.physicsEngine === "cannon" || s.physicsEngine === gB.name ? a = new gB(void 0, void 0, j9.LoaderInjectedPhysicsEngine) : s.physicsEngine === "oimo" || s.physicsEngine === BH.name ? a = new BH(void 0, j9.LoaderInjectedPhysicsEngine) : (s.physicsEngine === "ammo" || s.physicsEngine === ZA.name) && (a = new ZA(void 0, j9.LoaderInjectedPhysicsEngine, void 0)), r = " Physics engine " + (s.physicsEngine ? s.physicsEngine : "oimo") + ` enabled `; const l = s.physicsGravity ? D.FromArray(s.physicsGravity) : null; c.enablePhysics(l, a); } return s.metadata !== void 0 && s.metadata !== null && (c.metadata = s.metadata), s.collisionsEnabled !== void 0 && s.collisionsEnabled !== null && (c.collisionsEnabled = s.collisionsEnabled), uq(c, e, t, i, !0) ? (s.autoAnimate && c.beginAnimation(c, s.autoAnimateFrom, s.autoAnimateTo, s.autoAnimateLoop, s.autoAnimateSpeed || 1), s.activeCameraID !== void 0 && s.activeCameraID !== null && c.setActiveCameraById(s.activeCameraID), !0) : !1; } catch (n) { const a = iO("importScene", s ? s.producer : "Unknown") + r; if (i) i(a, n); else throw Ce.Log(a), n; } finally { r !== null && fr.loggingLevel !== fr.NO_LOGGING && Ce.Log(iO("importScene", s ? s.producer : "Unknown") + (fr.loggingLevel !== fr.MINIMAL_LOGGING ? r : "")); } return !1; }, loadAssetContainer: (c, e, t, i) => uq(c, e, t, i) }); class uL { /** * Define if the fresnel effect is enable or not. */ get isEnabled() { return this._isEnabled; } set isEnabled(e) { this._isEnabled !== e && (this._isEnabled = e, $e.MarkAllMaterialsAsDirty(20)); } /** * Creates a new FresnelParameters object. * * @param options provide your own settings to optionally to override defaults */ constructor(e = {}) { this._isEnabled = !0, this.bias = e.bias === void 0 ? 0 : e.bias, this.power = e.power === void 0 ? 1 : e.power, this.leftColor = e.leftColor || ze.White(), this.rightColor = e.rightColor || ze.Black(), e.isEnabled === !1 && (this.isEnabled = !1); } /** * Clones the current fresnel and its values * @returns a clone fresnel configuration */ clone() { const e = new uL(); return id.DeepCopy(this, e), e; } /** * Determines equality between FresnelParameters objects * @param otherFresnelParameters defines the second operand * @returns true if the power, bias, leftColor, rightColor and isEnabled values are equal to the given ones */ equals(e) { return e && this.bias === e.bias && this.power === e.power && this.leftColor.equals(e.leftColor) && this.rightColor.equals(e.rightColor) && this.isEnabled === e.isEnabled; } /** * Serializes the current fresnel parameters to a JSON representation. * @returns the JSON serialization */ serialize() { return { isEnabled: this.isEnabled, leftColor: this.leftColor.asArray(), rightColor: this.rightColor.asArray(), bias: this.bias, power: this.power }; } /** * Parse a JSON object and deserialize it to a new Fresnel parameter object. * @param parsedFresnelParameters Define the JSON representation * @returns the parsed parameters */ static Parse(e) { return new uL({ isEnabled: e.isEnabled, leftColor: ze.FromArray(e.leftColor), rightColor: ze.FromArray(e.rightColor), bias: e.bias, power: e.power || 1 }); } } St._FresnelParametersParser = uL.Parse; class Mme extends Lo { constructor(e, t) { super(e, t, "color", { attributes: ["position"], uniforms: ["world", "viewProjection", "color"] }), this.disableColorWrite = !0, this.forceDepthWrite = !0, this.setColor4("color", new Et(0, 0, 0, 1)); } } class Vp extends on { /** * Gets the current double sided mode. */ get doubleSided() { return this._twoSidedLighting; } /** * If sets to true and backfaceCulling is false, normals will be flipped on the backside. */ set doubleSided(e) { this._twoSidedLighting !== e && (this._twoSidedLighting = e, this.backFaceCulling = !e, this._markAllSubMeshesAsTexturesDirty()); } /** * Instantiates a new PBRMaterial instance. * * @param name The material name * @param scene The scene the material will be use in. */ constructor(e, t) { super(e, t), this.maxSimultaneousLights = 4, this.disableLighting = !1, this.invertNormalMapX = !1, this.invertNormalMapY = !1, this.emissiveColor = new ze(0, 0, 0), this.occlusionStrength = 1, this.useLightmapAsShadowmap = !1, this._useAlphaFromAlbedoTexture = !0, this._useAmbientInGrayScale = !0; } getClassName() { return "PBRBaseSimpleMaterial"; } } F([ W(), ct("_markAllSubMeshesAsLightsDirty") ], Vp.prototype, "maxSimultaneousLights", void 0); F([ W(), ct("_markAllSubMeshesAsLightsDirty") ], Vp.prototype, "disableLighting", void 0); F([ er(), ct("_markAllSubMeshesAsTexturesDirty", "_reflectionTexture") ], Vp.prototype, "environmentTexture", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], Vp.prototype, "invertNormalMapX", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], Vp.prototype, "invertNormalMapY", void 0); F([ er(), ct("_markAllSubMeshesAsTexturesDirty", "_bumpTexture") ], Vp.prototype, "normalTexture", void 0); F([ Fs("emissive"), ct("_markAllSubMeshesAsTexturesDirty") ], Vp.prototype, "emissiveColor", void 0); F([ er(), ct("_markAllSubMeshesAsTexturesDirty") ], Vp.prototype, "emissiveTexture", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesDirty", "_ambientTextureStrength") ], Vp.prototype, "occlusionStrength", void 0); F([ er(), ct("_markAllSubMeshesAsTexturesDirty", "_ambientTexture") ], Vp.prototype, "occlusionTexture", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesDirty", "_alphaCutOff") ], Vp.prototype, "alphaCutOff", void 0); F([ W() ], Vp.prototype, "doubleSided", null); F([ er(), ct("_markAllSubMeshesAsTexturesDirty", null) ], Vp.prototype, "lightmapTexture", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], Vp.prototype, "useLightmapAsShadowmap", void 0); class ox extends Vp { /** * Instantiates a new PBRMetalRoughnessMaterial instance. * * @param name The material name * @param scene The scene the material will be use in. */ constructor(e, t) { super(e, t), this._useRoughnessFromMetallicTextureAlpha = !1, this._useRoughnessFromMetallicTextureGreen = !0, this._useMetallnessFromMetallicTextureBlue = !0, this.metallic = 1, this.roughness = 1; } /** * Return the current class name of the material. */ getClassName() { return "PBRMetallicRoughnessMaterial"; } /** * Makes a duplicate of the current material. * @param name - name to use for the new material. */ clone(e) { const t = St.Clone(() => new ox(e, this.getScene()), this); return t.id = e, t.name = e, this.clearCoat.copyTo(t.clearCoat), this.anisotropy.copyTo(t.anisotropy), this.brdf.copyTo(t.brdf), this.sheen.copyTo(t.sheen), this.subSurface.copyTo(t.subSurface), t; } /** * Serialize the material to a parsable JSON object. */ serialize() { const e = St.Serialize(this); return e.customType = "BABYLON.PBRMetallicRoughnessMaterial", e.clearCoat = this.clearCoat.serialize(), e.anisotropy = this.anisotropy.serialize(), e.brdf = this.brdf.serialize(), e.sheen = this.sheen.serialize(), e.subSurface = this.subSurface.serialize(), e.iridescence = this.iridescence.serialize(), e; } /** * Parses a JSON object corresponding to the serialize function. * @param source * @param scene * @param rootUrl */ static Parse(e, t, i) { const r = St.Parse(() => new ox(e.name, t), e, t, i); return e.clearCoat && r.clearCoat.parse(e.clearCoat, t, i), e.anisotropy && r.anisotropy.parse(e.anisotropy, t, i), e.brdf && r.brdf.parse(e.brdf, t, i), e.sheen && r.sheen.parse(e.sheen, t, i), e.subSurface && r.subSurface.parse(e.subSurface, t, i), e.iridescence && r.iridescence.parse(e.iridescence, t, i), r; } } F([ Fs(), ct("_markAllSubMeshesAsTexturesDirty", "_albedoColor") ], ox.prototype, "baseColor", void 0); F([ er(), ct("_markAllSubMeshesAsTexturesDirty", "_albedoTexture") ], ox.prototype, "baseTexture", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], ox.prototype, "metallic", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], ox.prototype, "roughness", void 0); F([ er(), ct("_markAllSubMeshesAsTexturesDirty", "_metallicTexture") ], ox.prototype, "metallicRoughnessTexture", void 0); Be("BABYLON.PBRMetallicRoughnessMaterial", ox); class lx extends Vp { /** * Specifies if the reflectivity texture contains the glossiness information in its alpha channel. */ get useMicroSurfaceFromReflectivityMapAlpha() { return this._useMicroSurfaceFromReflectivityMapAlpha; } /** * Instantiates a new PBRSpecularGlossinessMaterial instance. * * @param name The material name * @param scene The scene the material will be use in. */ constructor(e, t) { super(e, t), this._useMicroSurfaceFromReflectivityMapAlpha = !0; } /** * Return the current class name of the material. */ getClassName() { return "PBRSpecularGlossinessMaterial"; } /** * Makes a duplicate of the current material. * @param name - name to use for the new material. */ clone(e) { const t = St.Clone(() => new lx(e, this.getScene()), this); return t.id = e, t.name = e, this.clearCoat.copyTo(t.clearCoat), this.anisotropy.copyTo(t.anisotropy), this.brdf.copyTo(t.brdf), this.sheen.copyTo(t.sheen), this.subSurface.copyTo(t.subSurface), t; } /** * Serialize the material to a parsable JSON object. */ serialize() { const e = St.Serialize(this); return e.customType = "BABYLON.PBRSpecularGlossinessMaterial", e.clearCoat = this.clearCoat.serialize(), e.anisotropy = this.anisotropy.serialize(), e.brdf = this.brdf.serialize(), e.sheen = this.sheen.serialize(), e.subSurface = this.subSurface.serialize(), e.iridescence = this.iridescence.serialize(), e; } /** * Parses a JSON object corresponding to the serialize function. * @param source * @param scene * @param rootUrl */ static Parse(e, t, i) { const r = St.Parse(() => new lx(e.name, t), e, t, i); return e.clearCoat && r.clearCoat.parse(e.clearCoat, t, i), e.anisotropy && r.anisotropy.parse(e.anisotropy, t, i), e.brdf && r.brdf.parse(e.brdf, t, i), e.sheen && r.sheen.parse(e.sheen, t, i), e.subSurface && r.subSurface.parse(e.subSurface, t, i), e.iridescence && r.iridescence.parse(e.iridescence, t, i), r; } } F([ Fs("diffuse"), ct("_markAllSubMeshesAsTexturesDirty", "_albedoColor") ], lx.prototype, "diffuseColor", void 0); F([ er(), ct("_markAllSubMeshesAsTexturesDirty", "_albedoTexture") ], lx.prototype, "diffuseTexture", void 0); F([ Fs("specular"), ct("_markAllSubMeshesAsTexturesDirty", "_reflectivityColor") ], lx.prototype, "specularColor", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesDirty", "_microSurface") ], lx.prototype, "glossiness", void 0); F([ er(), ct("_markAllSubMeshesAsTexturesDirty", "_reflectivityTexture") ], lx.prototype, "specularGlossinessTexture", void 0); Be("BABYLON.PBRSpecularGlossinessMaterial", lx); class d5 extends dn { /** * Instantiates a ColorGradingTexture from the following parameters. * * @param url The location of the color grading data (currently only supporting 3dl) * @param sceneOrEngine The scene or engine the texture will be used in * @param onLoad defines a callback triggered when the texture has been loaded */ constructor(e, t, i = null) { if (super(t), !!e) if (this._textureMatrix = Ae.Identity(), this.name = e, this.url = e, this._onLoad = i, this._texture = this._getFromCache(e, !0), this._texture) this._triggerOnLoad(); else { const r = this.getScene(); r ? r.useDelayedTextureLoading ? this.delayLoadState = 4 : this._loadTexture() : this._loadTexture(); } } /** * Fires the onload event from the constructor if requested. */ _triggerOnLoad() { this._onLoad && this._onLoad(); } /** * Returns the texture matrix used in most of the material. * This is not used in color grading but keep for troubleshooting purpose (easily swap diffuse by colorgrading to look in). */ getTextureMatrix() { return this._textureMatrix; } /** * Occurs when the file being loaded is a .3dl LUT file. */ _load3dlTexture() { const e = this._getEngine(); let t; e._features.support3DTextures ? t = e.createRawTexture3D(null, 1, 1, 1, 5, !1, !1, 2, null, 0) : t = e.createRawTexture(null, 1, 1, 5, !1, !1, 2, null, 0), this._texture = t, this._texture.isReady = !1, this.isCube = !1, this.is3D = e._features.support3DTextures, this.wrapU = 0, this.wrapV = 0, this.wrapR = 0, this.anisotropicFilteringLevel = 1; const i = (s) => { if (typeof s != "string") return; let n = null, a = null, l; const o = s.split(` `); let u = 0, h = 0, d = 0, f = 0, p = 0; for (let m = 0; m < o.length; m++) { if (l = o[m], !d5._NoneEmptyLineRegex.test(l) || l.indexOf("#") === 0) continue; const _ = l.split(" "); if (u === 0) { u = _.length, n = new Uint8Array(u * u * u * 4), a = new Float32Array(u * u * u * 4); continue; } if (u != 0) { const v = Math.max(parseInt(_[0]), 0), C = Math.max(parseInt(_[1]), 0), x = Math.max(parseInt(_[2]), 0); p = Math.max(v, p), p = Math.max(C, p), p = Math.max(x, p); const b = (h + f * u + d * u * u) * 4; a && (a[b + 0] = v, a[b + 1] = C, a[b + 2] = x), d++, d % u == 0 && (f++, d = 0, f % u == 0 && (h++, f = 0)); } } if (a && n) for (let m = 0; m < a.length; m++) if (m > 0 && (m + 1) % 4 === 0) n[m] = 255; else { const _ = a[m]; n[m] = _ / p * 255; } t.is3D ? (t.updateSize(u, u, u), e.updateRawTexture3D(t, n, 5, !1)) : (t.updateSize(u * u, u), e.updateRawTexture(t, n, 5, !1)), t.isReady = !0, this._triggerOnLoad(); }, r = this.getScene(); return r ? r._loadFile(this.url, i) : e._loadFile(this.url, i), this._texture; } /** * Starts the loading process of the texture. */ _loadTexture() { this.url && this.url.toLocaleLowerCase().indexOf(".3dl") == this.url.length - 4 && this._load3dlTexture(); } /** * Clones the color grading texture. */ clone() { const e = new d5(this.url, this.getScene() || this._getEngine()); return e.level = this.level, e; } /** * Called during delayed load for textures. */ delayLoad() { this.delayLoadState === 4 && (this.delayLoadState = 1, this._texture = this._getFromCache(this.url, !0), this._texture || this._loadTexture()); } /** * Parses a color grading texture serialized by Babylon. * @param parsedTexture The texture information being parsedTexture * @param scene The scene to load the texture in * @returns A color grading texture */ static Parse(e, t) { let i = null; return e.name && !e.isRenderTarget && (i = new d5(e.name, t), i.name = e.name, i.level = e.level), i; } /** * Serializes the LUT texture to json format. */ serialize() { if (!this.name) return null; const e = {}; return e.name = this.name, e.level = this.level, e.customType = "BABYLON.ColorGradingTexture", e; } } d5._NoneEmptyLineRegex = /\S+/; Be("BABYLON.ColorGradingTexture", d5); class WO extends dn { /** * Instantiates an EquiRectangularCubeTexture from the following parameters. * @param url The location of the image * @param scene The scene the texture will be used in * @param size The cubemap desired size (the more it increases the longer the generation will be) * @param noMipmap Forces to not generate the mipmap if true * @param gammaSpace Specifies if the texture will be used in gamma or linear space * (the PBR material requires those textures in linear space, but the standard material would require them in Gamma space) * @param onLoad — defines a callback called when texture is loaded * @param onError — defines a callback called if there is an error */ constructor(e, t, i, r = !1, s = !0, n = null, a = null, l = !1) { if (super(t), this._onLoad = null, this._onError = null, !e) throw new Error("Image url is not set"); this._coordinatesMode = De.CUBIC_MODE, this.name = e, this.url = e, this._size = i, this._supersample = l, this._noMipmap = r, this.gammaSpace = s, this._onLoad = n, this._onError = a, this.hasAlpha = !1, this.isCube = !0, this._texture = this._getFromCache(e, this._noMipmap, void 0, void 0, void 0, this.isCube), this._texture ? n && (this._texture.isReady ? Ve.SetImmediate(() => n()) : this._texture.onLoadedObservable.add(n)) : t.useDelayedTextureLoading ? this.delayLoadState = 4 : this._loadImage(() => this._loadTexture(), this._onError); } /** * Load the image data, by putting the image on a canvas and extracting its buffer. * @param loadTextureCallback * @param onError */ _loadImage(e, t) { const i = this.getScene(); if (!i) return; const r = i.getEngine().createRawCubeTexture(null, this._size, 4, i.getEngine().getCaps().textureFloat ? 1 : 7, this._noMipmap, !1, 3); r.generateMipMaps = !this._noMipmap, i.addPendingData(r), r.url = this.url, r.isReady = !1, i.getEngine()._internalTexturesCache.push(r), this._texture = r; const s = document.createElement("canvas"); fw(this.url, (n) => { this._width = n.width, this._height = n.height, s.width = this._width, s.height = this._height; const a = s.getContext("2d"); a.drawImage(n, 0, 0); const l = a.getImageData(0, 0, n.width, n.height); this._buffer = l.data.buffer, s.remove(), e(); }, (n, a) => { i.removePendingData(r), t && t(`${this.getClassName()} could not be loaded`, a); }, i ? i.offlineProvider : null); } /** * Convert the image buffer into a cubemap and create a CubeTexture. */ _loadTexture() { const e = this.getScene(), t = () => { const s = this._getFloat32ArrayFromArrayBuffer(this._buffer), n = ST.ConvertPanoramaToCubemap(s, this._width, this._height, this._size, this._supersample), a = []; for (let l = 0; l < 6; l++) { const o = n[WO._FacesMapping[l]]; a.push(o); } return a; }; if (!e) return; const i = t(), r = this._texture; e.getEngine().updateRawCubeTexture(r, i, r.format, r.type, r.invertY), r.isReady = !0, e.removePendingData(r), r.onLoadedObservable.notifyObservers(r), r.onLoadedObservable.clear(), this._onLoad && this._onLoad(); } /** * Convert the ArrayBuffer into a Float32Array and drop the transparency channel. * @param buffer The ArrayBuffer that should be converted. * @returns The buffer as Float32Array. */ _getFloat32ArrayFromArrayBuffer(e) { const t = new DataView(e), i = new Float32Array(e.byteLength * 3 / 4); let r = 0; for (let s = 0; s < e.byteLength; s++) (s + 1) % 4 !== 0 && (i[r++] = t.getUint8(s) / 255); return i; } /** * Get the current class name of the texture useful for serialization or dynamic coding. * @returns "EquiRectangularCubeTexture" */ getClassName() { return "EquiRectangularCubeTexture"; } /** * Create a clone of the current EquiRectangularCubeTexture and return it. * @returns A clone of the current EquiRectangularCubeTexture. */ clone() { const e = this.getScene(); if (!e) return this; const t = new WO(this.url, e, this._size, this._noMipmap, this.gammaSpace); return t.level = this.level, t.wrapU = this.wrapU, t.wrapV = this.wrapV, t.coordinatesIndex = this.coordinatesIndex, t.coordinatesMode = this.coordinatesMode, t; } } WO._FacesMapping = ["right", "left", "up", "down", "front", "back"]; class OU extends dn { /** * Instantiates a HtmlElementTexture from the following parameters. * * @param name Defines the name of the texture * @param element Defines the video or canvas the texture is filled with * @param options Defines the other none mandatory texture creation options */ constructor(e, t, i) { var r, s; super(i.scene || i.engine), this.onLoadObservable = new Fe(), !(!t || !i.engine && !i.scene) && (i = Object.assign(Object.assign({}, OU._DefaultOptions), i), this._generateMipMaps = i.generateMipMaps, this._samplingMode = i.samplingMode, this._textureMatrix = Ae.Identity(), this._format = i.format, this.name = e, this.element = t, this._isVideo = !!t.getVideoPlaybackQuality, this._externalTexture = this._isVideo && (s = (r = this._engine) === null || r === void 0 ? void 0 : r.createExternalTexture(t)) !== null && s !== void 0 ? s : null, this.anisotropicFilteringLevel = 1, this._createInternalTexture()); } _createInternalTexture() { let e = 0, t = 0; this._isVideo ? (e = this.element.videoWidth, t = this.element.videoHeight) : (e = this.element.width, t = this.element.height); const i = this._getEngine(); i && (this._texture = i.createDynamicTexture(e, t, this._generateMipMaps, this._samplingMode), this._texture.format = this._format), this.update(); } /** * Returns the texture matrix used in most of the material. */ getTextureMatrix() { return this._textureMatrix; } /** * Updates the content of the texture. * @param invertY Defines whether the texture should be inverted on Y (false by default on video and true on canvas) */ update(e = null) { const t = this._getEngine(); if (this._texture == null || t == null) return; const i = this.isReady(); if (this._isVideo) { const r = this.element; if (r.readyState < r.HAVE_CURRENT_DATA) return; t.updateVideoTexture(this._texture, this._externalTexture ? this._externalTexture : r, e === null ? !0 : e); } else { const r = this.element; t.updateDynamicTexture(this._texture, r, e === null ? !0 : e, !1, this._format); } !i && this.isReady() && this.onLoadObservable.notifyObservers(this); } /** * Dispose the texture and release its associated resources. */ dispose() { this.onLoadObservable.clear(), super.dispose(); } } OU._DefaultOptions = { generateMipMaps: !1, samplingMode: 2, format: 5, engine: null, scene: null }; const Rme = 1, Pme = 2, Ime = 3, Dme = 9, Ome = 10, wme = 11, Lme = 48, Nme = 4, Fme = 0, Bme = 1, Ume = 2, Vme = 3; function mN(c) { let e = 0; return { id_length: c[e++], colormap_type: c[e++], image_type: c[e++], colormap_index: c[e++] | c[e++] << 8, colormap_length: c[e++] | c[e++] << 8, colormap_size: c[e++], origin: [c[e++] | c[e++] << 8, c[e++] | c[e++] << 8], width: c[e++] | c[e++] << 8, height: c[e++] | c[e++] << 8, pixel_size: c[e++], flags: c[e++] }; } function mW(c, e) { if (e.length < 19) { Ce.Error("Unable to load TGA file - Not enough data to contain header"); return; } let t = 18; const i = mN(e); if (i.id_length + t > e.length) { Ce.Error("Unable to load TGA file - Not enough data"); return; } t += i.id_length; let r = !1, s = !1, n = !1; switch (i.image_type) { case Dme: r = !0; case Rme: s = !0; break; case Ome: r = !0; case Pme: break; case wme: r = !0; case Ime: n = !0; break; } let a; const l = i.pixel_size >> 3, o = i.width * i.height * l; let u; if (s && (u = e.subarray(t, t += i.colormap_length * (i.colormap_size >> 3))), r) { a = new Uint8Array(o); let b, S, M, R = 0; const w = new Uint8Array(l); for (; t < o && R < o; ) if (b = e[t++], S = (b & 127) + 1, b & 128) { for (M = 0; M < l; ++M) w[M] = e[t++]; for (M = 0; M < S; ++M) a.set(w, R + M * l); R += l * S; } else { for (S *= l, M = 0; M < S; ++M) a[R + M] = e[t++]; R += S; } } else a = e.subarray(t, t += s ? i.width * i.height : o); let h, d, f, p, m, _; switch ((i.flags & Lme) >> Nme) { default: case Ume: h = 0, f = 1, _ = i.width, d = 0, p = 1, m = i.height; break; case Fme: h = 0, f = 1, _ = i.width, d = i.height - 1, p = -1, m = -1; break; case Vme: h = i.width - 1, f = -1, _ = -1, d = 0, p = 1, m = i.height; break; case Bme: h = i.width - 1, f = -1, _ = -1, d = i.height - 1, p = -1, m = -1; break; } const v = "_getImageData" + (n ? "Grey" : "") + i.pixel_size + "bits", C = yre[v](i, u, a, d, p, m, h, f, _); c.getEngine()._uploadDataToTextureDirectly(c, C); } function kme(c, e, t, i, r, s, n, a, l) { const o = t, u = e, h = c.width, d = c.height; let f, p = 0, m, _; const v = new Uint8Array(h * d * 4); for (_ = i; _ !== s; _ += r) for (m = n; m !== l; m += a, p++) f = o[p], v[(m + h * _) * 4 + 3] = 255, v[(m + h * _) * 4 + 2] = u[f * 3 + 0], v[(m + h * _) * 4 + 1] = u[f * 3 + 1], v[(m + h * _) * 4 + 0] = u[f * 3 + 2]; return v; } function zme(c, e, t, i, r, s, n, a, l) { const o = t, u = c.width, h = c.height; let d, f = 0, p, m; const _ = new Uint8Array(u * h * 4); for (m = i; m !== s; m += r) for (p = n; p !== l; p += a, f += 2) { d = o[f + 0] + (o[f + 1] << 8); const v = ((d & 31744) >> 10) * 255 / 31 | 0, C = ((d & 992) >> 5) * 255 / 31 | 0, x = (d & 31) * 255 / 31 | 0; _[(p + u * m) * 4 + 0] = v, _[(p + u * m) * 4 + 1] = C, _[(p + u * m) * 4 + 2] = x, _[(p + u * m) * 4 + 3] = d & 32768 ? 0 : 255; } return _; } function Hme(c, e, t, i, r, s, n, a, l) { const o = t, u = c.width, h = c.height; let d = 0, f, p; const m = new Uint8Array(u * h * 4); for (p = i; p !== s; p += r) for (f = n; f !== l; f += a, d += 3) m[(f + u * p) * 4 + 3] = 255, m[(f + u * p) * 4 + 2] = o[d + 0], m[(f + u * p) * 4 + 1] = o[d + 1], m[(f + u * p) * 4 + 0] = o[d + 2]; return m; } function Gme(c, e, t, i, r, s, n, a, l) { const o = t, u = c.width, h = c.height; let d = 0, f, p; const m = new Uint8Array(u * h * 4); for (p = i; p !== s; p += r) for (f = n; f !== l; f += a, d += 4) m[(f + u * p) * 4 + 2] = o[d + 0], m[(f + u * p) * 4 + 1] = o[d + 1], m[(f + u * p) * 4 + 0] = o[d + 2], m[(f + u * p) * 4 + 3] = o[d + 3]; return m; } function Kme(c, e, t, i, r, s, n, a, l) { const o = t, u = c.width, h = c.height; let d, f = 0, p, m; const _ = new Uint8Array(u * h * 4); for (m = i; m !== s; m += r) for (p = n; p !== l; p += a, f++) d = o[f], _[(p + u * m) * 4 + 0] = d, _[(p + u * m) * 4 + 1] = d, _[(p + u * m) * 4 + 2] = d, _[(p + u * m) * 4 + 3] = 255; return _; } function Wme(c, e, t, i, r, s, n, a, l) { const o = t, u = c.width, h = c.height; let d = 0, f, p; const m = new Uint8Array(u * h * 4); for (p = i; p !== s; p += r) for (f = n; f !== l; f += a, d += 2) m[(f + u * p) * 4 + 0] = o[d + 0], m[(f + u * p) * 4 + 1] = o[d + 0], m[(f + u * p) * 4 + 2] = o[d + 0], m[(f + u * p) * 4 + 3] = o[d + 1]; return m; } const yre = { /** * Gets the header of a TGA file * @param data defines the TGA data * @returns the header */ GetTGAHeader: mN, /** * Uploads TGA content to a Babylon Texture * @internal */ UploadContent: mW, /** @internal */ _getImageData8bits: kme, /** @internal */ _getImageData16bits: zme, /** @internal */ _getImageData24bits: Hme, /** @internal */ _getImageData32bits: Gme, /** @internal */ _getImageDataGrey8bits: Kme, /** @internal */ _getImageDataGrey16bits: Wme }; class Cre { constructor() { this.supportCascades = !1; } /** * This returns if the loader support the current file information. * @param extension defines the file extension of the file being loaded * @returns true if the loader can load the specified file */ canLoad(e) { return e.endsWith(".tga"); } /** * Uploads the cube texture data to the WebGL texture. It has already been bound. */ loadCubeData() { throw ".env not supported in Cube."; } /** * Uploads the 2D texture data to the WebGL texture. It has already been bound once in the callback. * @param data contains the texture data * @param texture defines the BabylonJS internal texture * @param callback defines the method to call once ready to upload */ loadData(e, t, i) { const r = new Uint8Array(e.buffer, e.byteOffset, e.byteLength), s = mN(r); i(s.width, s.height, t.generateMipMaps, !1, () => { mW(t, r); }); } } $e._TextureLoaders.push(new Cre()); class xre { constructor() { this.supportCascades = !1; } /** * This returns if the loader support the current file information. * @param extension defines the file extension of the file being loaded * @returns true if the loader can load the specified file */ canLoad(e) { return e.endsWith(".hdr"); } /** * Uploads the cube texture data to the WebGL texture. It has already been bound. */ loadCubeData() { throw ".env not supported in Cube."; } /** * Uploads the 2D texture data to the WebGL texture. It has already been bound once in the callback. * @param data contains the texture data * @param texture defines the BabylonJS internal texture * @param callback defines the method to call once ready to upload */ loadData(e, t, i) { const r = new Uint8Array(e.buffer, e.byteOffset, e.byteLength), s = mB.RGBE_ReadHeader(r), n = mB.RGBE_ReadPixels(r, s), a = s.width * s.height, l = new Float32Array(a * 4); for (let o = 0; o < a; o += 1) l[o * 4] = n[o * 3], l[o * 4 + 1] = n[o * 3 + 1], l[o * 4 + 2] = n[o * 3 + 2], l[o * 4 + 3] = 1; i(s.width, s.height, t.generateMipMaps, !1, () => { const o = t.getEngine(); t.type = 1, t.format = 5, t._gammaSpace = !1, o._uploadDataToTextureDirectly(t, l); }); } } $e._TextureLoaders.push(new xre()); class jme { } var qE; (function(c) { c[c.cTFETC1 = 0] = "cTFETC1", c[c.cTFETC2 = 1] = "cTFETC2", c[c.cTFBC1 = 2] = "cTFBC1", c[c.cTFBC3 = 3] = "cTFBC3", c[c.cTFBC4 = 4] = "cTFBC4", c[c.cTFBC5 = 5] = "cTFBC5", c[c.cTFBC7 = 6] = "cTFBC7", c[c.cTFPVRTC1_4_RGB = 8] = "cTFPVRTC1_4_RGB", c[c.cTFPVRTC1_4_RGBA = 9] = "cTFPVRTC1_4_RGBA", c[c.cTFASTC_4x4 = 10] = "cTFASTC_4x4", c[c.cTFATC_RGB = 11] = "cTFATC_RGB", c[c.cTFATC_RGBA_INTERPOLATED_ALPHA = 12] = "cTFATC_RGBA_INTERPOLATED_ALPHA", c[c.cTFRGBA32 = 13] = "cTFRGBA32", c[c.cTFRGB565 = 14] = "cTFRGB565", c[c.cTFBGR565 = 15] = "cTFBGR565", c[c.cTFRGBA4444 = 16] = "cTFRGBA4444", c[c.cTFFXT1_RGB = 17] = "cTFFXT1_RGB", c[c.cTFPVRTC2_4_RGB = 18] = "cTFPVRTC2_4_RGB", c[c.cTFPVRTC2_4_RGBA = 19] = "cTFPVRTC2_4_RGBA", c[c.cTFETC2_EAC_R11 = 20] = "cTFETC2_EAC_R11", c[c.cTFETC2_EAC_RG11 = 21] = "cTFETC2_EAC_RG11"; })(qE || (qE = {})); const CT = { /** * URL to use when loading the basis transcoder */ JSModuleURL: `${Ve._DefaultCdnUrl}/basisTranscoder/1/basis_transcoder.js`, /** * URL to use when loading the wasm module for the transcoder */ WasmModuleURL: `${Ve._DefaultCdnUrl}/basisTranscoder/1/basis_transcoder.wasm` }, bre = (c, e) => { let t; switch (c) { case qE.cTFETC1: t = 36196; break; case qE.cTFBC1: t = 33776; break; case qE.cTFBC4: t = 33779; break; case qE.cTFASTC_4x4: t = 37808; break; case qE.cTFETC2: t = 37496; break; case qE.cTFBC7: t = 36492; break; } if (t === void 0) throw "The chosen Basis transcoder format is not currently supported"; return t; }; let $k = null, DC = null, Xme = 0; const Yme = !1, Qme = () => ($k || ($k = new Promise((c, e) => { DC ? c(DC) : Ve.LoadFileAsync(Ve.GetBabylonScriptURL(CT.WasmModuleURL)).then((t) => { if (typeof URL != "function") return e("Basis transcoder requires an environment with a URL constructor"); const i = URL.createObjectURL(new Blob([`(${$me})()`], { type: "application/javascript" })); DC = new Worker(i); const r = (s) => { s.data.action === "init" ? (DC.removeEventListener("message", r), c(DC)) : s.data.action === "error" && e(s.data.error || "error initializing worker"); }; DC.addEventListener("message", r), DC.postMessage({ action: "init", url: Ve.GetBabylonScriptURL(CT.JSModuleURL), wasmBinary: t }); }).catch(e); })), $k), AB = (c, e) => { const t = c instanceof ArrayBuffer ? new Uint8Array(c) : c; return new Promise((i, r) => { Qme().then(() => { const s = Xme++, n = (l) => { l.data.action === "transcode" && l.data.id === s && (DC.removeEventListener("message", n), l.data.success ? i(l.data) : r("Transcode is not supported on this device")); }; DC.addEventListener("message", n); const a = new Uint8Array(t.byteLength); a.set(new Uint8Array(t.buffer, t.byteOffset, t.byteLength)), DC.postMessage({ action: "transcode", id: s, imageData: a, config: e, ignoreSupportedFormats: Yme }, [ a.buffer ]); }, (s) => { r(s); }); }); }, wF = (c, e) => { var t, i; let r = (t = e._gl) === null || t === void 0 ? void 0 : t.TEXTURE_2D; c.isCube && (r = (i = e._gl) === null || i === void 0 ? void 0 : i.TEXTURE_CUBE_MAP), e._bindTextureDirectly(r, c, !0); }, yB = (c, e) => { const t = c.getEngine(); for (let i = 0; i < e.fileInfo.images.length; i++) { const r = e.fileInfo.images[i].levels[0]; if (c._invertVScale = c.invertY, e.format === -1 || e.format === qE.cTFRGB565) if (c.type = 10, c.format = 4, t._features.basisNeedsPOT && (yt.Log2(r.width) % 1 !== 0 || yt.Log2(r.height) % 1 !== 0)) { const s = new ln(t, ts.Temp); c._invertVScale = c.invertY, s.type = 10, s.format = 4, s.width = r.width + 3 & -4, s.height = r.height + 3 & -4, wF(s, t), t._uploadDataToTextureDirectly(s, new Uint16Array(r.transcodedPixels.buffer), i, 0, 4, !0), t._rescaleTexture(s, c, t.scenes[0], t._getInternalFormat(4), () => { t._releaseTexture(s), wF(c, t); }); } else c._invertVScale = !c.invertY, c.width = r.width + 3 & -4, c.height = r.height + 3 & -4, c.samplingMode = 2, wF(c, t), t._uploadDataToTextureDirectly(c, new Uint16Array(r.transcodedPixels.buffer), i, 0, 4, !0); else { c.width = r.width, c.height = r.height, c.generateMipMaps = e.fileInfo.images[i].levels.length > 1; const s = wU.GetInternalFormatFromBasisFormat(e.format, t); c.format = s, wF(c, t), e.fileInfo.images[i].levels.forEach((n, a) => { t._uploadCompressedDataToTextureDirectly(c, s, n.width, n.height, n.transcodedPixels, i, a); }), t._features.basisNeedsPOT && (yt.Log2(c.width) % 1 !== 0 || yt.Log2(c.height) % 1 !== 0) && (Ve.Warn("Loaded .basis texture width and height are not a power of two. Texture wrapping will be set to Texture.CLAMP_ADDRESSMODE as other modes are not supported with non power of two dimensions in webGL 1."), c._cachedWrapU = De.CLAMP_ADDRESSMODE, c._cachedWrapV = De.CLAMP_ADDRESSMODE); } } }, wU = { /** * URL to use when loading the basis transcoder */ JSModuleURL: CT.JSModuleURL, /** * URL to use when loading the wasm module for the transcoder */ WasmModuleURL: CT.WasmModuleURL, /** * Get the internal format to be passed to texImage2D corresponding to the .basis format value * @param basisFormat format chosen from GetSupportedTranscodeFormat * @returns internal format corresponding to the Basis format */ GetInternalFormatFromBasisFormat: bre, /** * Transcodes a loaded image file to compressed pixel data * @param data image data to transcode * @param config configuration options for the transcoding * @returns a promise resulting in the transcoded image */ TranscodeAsync: AB, /** * Loads a texture from the transcode result * @param texture texture load to * @param transcodeResult the result of transcoding the basis file to load from */ LoadTextureFromTranscodeResult: yB }; function $me() { const c = { cTFETC1: 0, cTFETC2: 1, cTFBC1: 2, cTFBC3: 3, cTFBC4: 4, cTFBC5: 5, cTFBC7: 6, cTFPVRTC1_4_RGB: 8, cTFPVRTC1_4_RGBA: 9, cTFASTC_4x4: 10, cTFATC_RGB: 11, cTFATC_RGBA_INTERPOLATED_ALPHA: 12, cTFRGBA32: 13, cTFRGB565: 14, cTFBGR565: 15, cTFRGBA4444: 16, cTFFXT1_RGB: 17, cTFPVRTC2_4_RGB: 18, cTFPVRTC2_4_RGBA: 19, cTFETC2_EAC_R11: 20, cTFETC2_EAC_RG11: 21 }; let e = null; onmessage = (n) => { if (n.data.action === "init") { if (!e) { try { importScripts(n.data.url); } catch (a) { postMessage({ action: "error", error: a }); } e = BASIS({ // Override wasm binary wasmBinary: n.data.wasmBinary }); } e !== null && e.then((a) => { BASIS = a, a.initializeBasis(), postMessage({ action: "init" }); }); } else if (n.data.action === "transcode") { const a = n.data.config, l = n.data.imageData, o = new BASIS.BasisFile(l), u = i(o); let h = n.data.ignoreSupportedFormats ? null : t(n.data.config, u), d = !1; h === null && (d = !0, h = u.hasAlpha ? c.cTFBC3 : c.cTFBC1); let f = !0; o.startTranscoding() || (f = !1); const p = []; for (let m = 0; m < u.images.length && f; m++) { const _ = u.images[m]; if (a.loadSingleImage === void 0 || a.loadSingleImage === m) { let v = _.levels.length; a.loadMipmapLevels === !1 && (v = 1); for (let C = 0; C < v; C++) { const x = _.levels[C], b = r(o, m, C, h, d); if (!b) { f = !1; break; } x.transcodedPixels = b, p.push(x.transcodedPixels.buffer); } } } o.close(), o.delete(), d && (h = -1), f ? postMessage({ action: "transcode", success: f, id: n.data.id, fileInfo: u, format: h }, p) : postMessage({ action: "transcode", success: f, id: n.data.id }); } }; function t(n, a) { let l = null; return n.supportedCompressionFormats && (n.supportedCompressionFormats.astc ? l = c.cTFASTC_4x4 : n.supportedCompressionFormats.bc7 ? l = c.cTFBC7 : n.supportedCompressionFormats.s3tc ? l = a.hasAlpha ? c.cTFBC3 : c.cTFBC1 : n.supportedCompressionFormats.pvrtc ? l = a.hasAlpha ? c.cTFPVRTC1_4_RGBA : c.cTFPVRTC1_4_RGB : n.supportedCompressionFormats.etc2 ? l = c.cTFETC2 : n.supportedCompressionFormats.etc1 ? l = c.cTFETC1 : l = c.cTFRGB565), l; } function i(n) { const a = n.getHasAlpha(), l = n.getNumImages(), o = []; for (let h = 0; h < l; h++) { const d = { levels: [] }, f = n.getNumLevels(h); for (let p = 0; p < f; p++) { const m = { width: n.getImageWidth(h, p), height: n.getImageHeight(h, p) }; d.levels.push(m); } o.push(d); } return { hasAlpha: a, images: o }; } function r(n, a, l, o, u) { const h = n.getImageTranscodedSizeInBytes(a, l, o); let d = new Uint8Array(h); if (!n.transcodeImage(d, a, l, o, 1, 0)) return null; if (u) { const f = n.getImageWidth(a, l) + 3 & -4, p = n.getImageHeight(a, l) + 3 & -4; d = s(d, 0, f, p); } return d; } function s(n, a, l, o) { const u = new Uint16Array(4), h = new Uint16Array(l * o), d = l / 4, f = o / 4; for (let p = 0; p < f; p++) for (let m = 0; m < d; m++) { const _ = a + 8 * (p * d + m); u[0] = n[_] | n[_ + 1] << 8, u[1] = n[_ + 2] | n[_ + 3] << 8, u[2] = (2 * (u[0] & 31) + 1 * (u[1] & 31)) / 3 | (2 * (u[0] & 2016) + 1 * (u[1] & 2016)) / 3 & 2016 | (2 * (u[0] & 63488) + 1 * (u[1] & 63488)) / 3 & 63488, u[3] = (2 * (u[1] & 31) + 1 * (u[0] & 31)) / 3 | (2 * (u[1] & 2016) + 1 * (u[0] & 2016)) / 3 & 2016 | (2 * (u[1] & 63488) + 1 * (u[0] & 63488)) / 3 & 63488; for (let v = 0; v < 4; v++) { const C = n[_ + 4 + v]; let x = (p * 4 + v) * l + m * 4; h[x++] = u[C & 3], h[x++] = u[C >> 2 & 3], h[x++] = u[C >> 4 & 3], h[x++] = u[C >> 6 & 3]; } } return h; } } Object.defineProperty(wU, "JSModuleURL", { get: function() { return CT.JSModuleURL; }, set: function(c) { CT.JSModuleURL = c; } }); Object.defineProperty(wU, "WasmModuleURL", { get: function() { return CT.WasmModuleURL; }, set: function(c) { CT.WasmModuleURL = c; } }); class Ere { constructor() { this.supportCascades = !1; } /** * This returns if the loader support the current file information. * @param extension defines the file extension of the file being loaded * @returns true if the loader can load the specified file */ canLoad(e) { return e.endsWith(".basis"); } /** * Uploads the cube texture data to the WebGL texture. It has already been bound. * @param data contains the texture data * @param texture defines the BabylonJS internal texture * @param createPolynomials will be true if polynomials have been requested * @param onLoad defines the callback to trigger once the texture is ready * @param onError defines the callback to trigger in case of error */ loadCubeData(e, t, i, r, s) { if (Array.isArray(e)) return; const n = t.getEngine().getCaps(), a = { supportedCompressionFormats: { etc1: !!n.etc1, s3tc: !!n.s3tc, pvrtc: !!n.pvrtc, etc2: !!n.etc2, astc: !!n.astc, bc7: !!n.bptc } }; AB(e, a).then((l) => { const o = l.fileInfo.images[0].levels.length > 1 && t.generateMipMaps; yB(t, l), t.getEngine()._setCubeMapTextureParams(t, o), t.isReady = !0, t.onLoadedObservable.notifyObservers(t), t.onLoadedObservable.clear(), r && r(); }).catch((l) => { const o = "Failed to transcode Basis file, transcoding may not be supported on this device"; Ve.Warn(o), t.isReady = !0, s && s(l); }); } /** * Uploads the 2D texture data to the WebGL texture. It has already been bound once in the callback. * @param data contains the texture data * @param texture defines the BabylonJS internal texture * @param callback defines the method to call once ready to upload */ loadData(e, t, i) { const r = t.getEngine().getCaps(), s = { supportedCompressionFormats: { etc1: !!r.etc1, s3tc: !!r.s3tc, pvrtc: !!r.pvrtc, etc2: !!r.etc2, astc: !!r.astc, bc7: !!r.bptc } }; AB(e, s).then((n) => { const a = n.fileInfo.images[0].levels[0], l = n.fileInfo.images[0].levels.length > 1 && t.generateMipMaps; i(a.width, a.height, l, n.format !== -1, () => { yB(t, n); }); }).catch((n) => { Ve.Warn("Failed to transcode Basis file, transcoding may not be supported on this device"), Ve.Warn(`Failed to transcode Basis file: ${n}`), i(0, 0, !1, !1, () => { }, !0); }); } } $e._TextureLoaders.push(new Ere()); class $8 extends ra { /** * Get if draw buffers (render textures) are currently supported by the used hardware and browser. */ get isSupported() { var e, t; return (t = (e = this._engine) === null || e === void 0 ? void 0 : e.getCaps().drawBuffersExtension) !== null && t !== void 0 ? t : !1; } /** * Get the list of textures generated by the multi render target. */ get textures() { return this._textures; } /** * Gets the number of textures in this MRT. This number can be different from `_textures.length` in case a depth texture is generated. */ get count() { return this._count; } /** * Get the depth texture generated by the multi render target if options.generateDepthTexture has been set */ get depthTexture() { return this._textures[this._textures.length - 1]; } /** * Set the wrapping mode on U of all the textures we are rendering to. * Can be any of the Texture. (CLAMP_ADDRESSMODE, MIRROR_ADDRESSMODE or WRAP_ADDRESSMODE) */ set wrapU(e) { if (this._textures) for (let t = 0; t < this._textures.length; t++) this._textures[t].wrapU = e; } /** * Set the wrapping mode on V of all the textures we are rendering to. * Can be any of the Texture. (CLAMP_ADDRESSMODE, MIRROR_ADDRESSMODE or WRAP_ADDRESSMODE) */ set wrapV(e) { if (this._textures) for (let t = 0; t < this._textures.length; t++) this._textures[t].wrapV = e; } /** * Instantiate a new multi render target texture. * A multi render target, like a render target provides the ability to render to a texture. * Unlike the render target, it can render to several draw buffers (render textures) in one draw. * This is specially interesting in deferred rendering or for any effects requiring more than * just one color from a single pass. * @param name Define the name of the texture * @param size Define the size of the buffers to render to * @param count Define the number of target we are rendering into * @param scene Define the scene the texture belongs to * @param options Define the options used to create the multi render target * @param textureNames Define the names to set to the textures (if count \> 0 - optional) */ constructor(e, t, i, r, s, n) { const a = s && s.generateMipMaps ? s.generateMipMaps : !1, l = s && s.generateDepthTexture ? s.generateDepthTexture : !1, o = s && s.depthTextureFormat ? s.depthTextureFormat : 15, u = !s || s.doNotChangeAspectRatio === void 0 ? !0 : s.doNotChangeAspectRatio, h = s && s.drawOnlyOnFirstAttachmentByDefault ? s.drawOnlyOnFirstAttachmentByDefault : !1; if (super(e, t, r, a, u, void 0, void 0, void 0, void 0, void 0, void 0, void 0, !0), !this.isSupported) { this.dispose(); return; } this._textureNames = n; const d = [], f = [], p = [], m = [], _ = [], v = [], C = [], x = []; this._initTypes(i, d, f, p, m, _, v, C, x, s); const b = !s || s.generateDepthBuffer === void 0 ? !0 : s.generateDepthBuffer, S = !s || s.generateStencilBuffer === void 0 ? !1 : s.generateStencilBuffer; this._multiRenderTargetOptions = { samplingModes: f, generateMipMaps: a, generateDepthBuffer: b, generateStencilBuffer: S, generateDepthTexture: l, depthTextureFormat: o, types: d, textureCount: i, useSRGBBuffers: p, formats: m, targetTypes: _, faceIndex: v, layerIndex: C, layerCounts: x, labels: n, label: e }, this._count = i, this._drawOnlyOnFirstAttachmentByDefault = h, i > 0 && (this._createInternalTextures(), this._createTextures(n)); } _initTypes(e, t, i, r, s, n, a, l, o, u) { for (let h = 0; h < e; h++) u && u.types && u.types[h] !== void 0 ? t.push(u.types[h]) : t.push(u && u.defaultType ? u.defaultType : 0), u && u.samplingModes && u.samplingModes[h] !== void 0 ? i.push(u.samplingModes[h]) : i.push(De.BILINEAR_SAMPLINGMODE), u && u.useSRGBBuffers && u.useSRGBBuffers[h] !== void 0 ? r.push(u.useSRGBBuffers[h]) : r.push(!1), u && u.formats && u.formats[h] !== void 0 ? s.push(u.formats[h]) : s.push(5), u && u.targetTypes && u.targetTypes[h] !== void 0 ? n.push(u.targetTypes[h]) : n.push(3553), u && u.faceIndex && u.faceIndex[h] !== void 0 ? a.push(u.faceIndex[h]) : a.push(0), u && u.layerIndex && u.layerIndex[h] !== void 0 ? l.push(u.layerIndex[h]) : l.push(0), u && u.layerCounts && u.layerCounts[h] !== void 0 ? o.push(u.layerCounts[h]) : o.push(1); } _createInternaTextureIndexMapping() { const e = {}, t = []; if (!this._renderTarget) return t; const i = this._renderTarget.textures; for (let r = 0; r < i.length; r++) { const s = i[r]; if (!s) continue; const n = e[s.uniqueId]; n !== void 0 ? t[r] = n : e[s.uniqueId] = r; } return t; } /** * @internal */ _rebuild(e = !1, t) { if (this._count < 1) return; const i = this._createInternaTextureIndexMapping(); this.releaseInternalTextures(), this._createInternalTextures(), e && (this._releaseTextures(), this._createTextures(t)); const r = this._renderTarget.textures; for (let s = 0; s < r.length; s++) { const n = this._textures[s]; i[s] !== void 0 && this._renderTarget.setTexture(r[i[s]], s), n._texture = r[s], n._texture && (n._noMipmap = !n._texture.useMipMaps, n._useSRGBBuffer = n._texture._useSRGBBuffer); } this.samples !== 1 && this._renderTarget.setSamples(this.samples, !this._drawOnlyOnFirstAttachmentByDefault, !0); } _createInternalTextures() { this._renderTarget = this._getEngine().createMultipleRenderTarget(this._size, this._multiRenderTargetOptions, !this._drawOnlyOnFirstAttachmentByDefault), this._texture = this._renderTarget.texture; } _releaseTextures() { if (this._textures) for (let e = 0; e < this._textures.length; e++) this._textures[e]._texture = null, this._textures[e].dispose(); } _createTextures(e) { const t = this._renderTarget.textures; this._textures = []; for (let i = 0; i < t.length; i++) { const r = new De(null, this.getScene()); e != null && e[i] && (r.name = e[i]), r._texture = t[i], r._texture && (r._noMipmap = !r._texture.useMipMaps, r._useSRGBBuffer = r._texture._useSRGBBuffer), this._textures.push(r); } } /** * Replaces an internal texture within the MRT. Useful to share textures between MultiRenderTarget. * @param texture The new texture to set in the MRT * @param index The index of the texture to replace * @param disposePrevious Set to true if the previous internal texture should be disposed */ setInternalTexture(e, t, i = !0) { var r, s; if (this.renderTarget && (t === 0 && (this._texture = e), this.renderTarget.setTexture(e, t, i), this.textures[t] || (this.textures[t] = new De(null, this.getScene()), this.textures[t].name = (s = (r = this._textureNames) === null || r === void 0 ? void 0 : r[t]) !== null && s !== void 0 ? s : this.textures[t].name), this.textures[t]._texture = e, this.textures[t]._noMipmap = !e.useMipMaps, this.textures[t]._useSRGBBuffer = e._useSRGBBuffer, this._count = this.renderTarget.textures ? this.renderTarget.textures.length : 0, this._multiRenderTargetOptions.types && (this._multiRenderTargetOptions.types[t] = e.type), this._multiRenderTargetOptions.samplingModes && (this._multiRenderTargetOptions.samplingModes[t] = e.samplingMode), this._multiRenderTargetOptions.useSRGBBuffers && (this._multiRenderTargetOptions.useSRGBBuffers[t] = e._useSRGBBuffer), this._multiRenderTargetOptions.targetTypes && this._multiRenderTargetOptions.targetTypes[t] !== -1)) { let n = 0; e.is2DArray ? n = 35866 : e.isCube ? n = 34067 : e.is3D ? n = 32879 : n = 3553, this._multiRenderTargetOptions.targetTypes[t] = n; } } /** * Changes an attached texture's face index or layer. * @param index The index of the texture to modify the attachment of * @param layerIndex The layer index of the texture to be attached to the framebuffer * @param faceIndex The face index of the texture to be attached to the framebuffer */ setLayerAndFaceIndex(e, t = -1, i = -1) { !this.textures[e] || !this.renderTarget || (this._multiRenderTargetOptions.layerIndex && (this._multiRenderTargetOptions.layerIndex[e] = t), this._multiRenderTargetOptions.faceIndex && (this._multiRenderTargetOptions.faceIndex[e] = i), this.renderTarget.setLayerAndFaceIndex(e, t, i)); } /** * Changes every attached texture's face index or layer. * @param layerIndices The layer indices of the texture to be attached to the framebuffer * @param faceIndices The face indices of the texture to be attached to the framebuffer */ setLayerAndFaceIndices(e, t) { this.renderTarget && (this._multiRenderTargetOptions.layerIndex = e, this._multiRenderTargetOptions.faceIndex = t, this.renderTarget.setLayerAndFaceIndices(e, t)); } /** * Define the number of samples used if MSAA is enabled. */ get samples() { return this._samples; } set samples(e) { this._renderTarget ? this._samples = this._renderTarget.setSamples(e) : this._samples = e; } /** * Resize all the textures in the multi render target. * Be careful as it will recreate all the data in the new texture. * @param size Define the new size */ resize(e) { this._processSizeParameter(e, !1), this._rebuild(void 0, this._textureNames); } /** * Changes the number of render targets in this MRT * Be careful as it will recreate all the data in the new texture. * @param count new texture count * @param options Specifies texture types and sampling modes for new textures * @param textureNames Specifies the names of the textures (optional) */ updateCount(e, t, i) { this._multiRenderTargetOptions.textureCount = e, this._count = e; const r = [], s = [], n = [], a = [], l = [], o = [], u = [], h = []; this._textureNames = i, this._initTypes(e, r, s, n, a, l, o, u, h, t), this._multiRenderTargetOptions.types = r, this._multiRenderTargetOptions.samplingModes = s, this._multiRenderTargetOptions.useSRGBBuffers = n, this._multiRenderTargetOptions.formats = a, this._multiRenderTargetOptions.targetTypes = l, this._multiRenderTargetOptions.faceIndex = o, this._multiRenderTargetOptions.layerIndex = u, this._multiRenderTargetOptions.layerCounts = h, this._multiRenderTargetOptions.labels = i, this._rebuild(!0, i); } _unbindFrameBuffer(e, t) { this._renderTarget && e.unBindMultiColorAttachmentFramebuffer(this._renderTarget, this.isCube, () => { this.onAfterRenderObservable.notifyObservers(t); }); } /** * Dispose the render targets and their associated resources * @param doNotDisposeInternalTextures */ dispose(e = !1) { this._releaseTextures(), e ? this._texture = null : this.releaseInternalTextures(), super.dispose(); } /** * Release all the underlying texture used as draw buffers (render textures). */ releaseInternalTextures() { var e, t; const i = (e = this._renderTarget) === null || e === void 0 ? void 0 : e.textures; if (i) { for (let r = i.length - 1; r >= 0; r--) this._textures[r]._texture = null; (t = this._renderTarget) === null || t === void 0 || t.dispose(), this._renderTarget = null; } } } class UH { /** * Initializes a texture package frame. * @param id The numerical frame identifier * @param scale Scalar Vector2 for UV frame * @param offset Vector2 for the frame position in UV units. * @returns TexturePackerFrame */ constructor(e, t, i) { this.id = e, this.scale = t, this.offset = i; } } class w4 { /** * Initializes a texture package series from an array of meshes or a single mesh. * @param name The name of the package * @param meshes The target meshes to compose the package from * @param options The arguments that texture packer should follow while building. * @param scene The scene which the textures are scoped to. * @returns TexturePacker */ constructor(e, t, i, r) { var s, n, a, l, o, u, h, d, f, p, m, _, v; return this.name = e, this.meshes = t, this.scene = r, this.options = i, this.options.map = (s = this.options.map) !== null && s !== void 0 ? s : [ "ambientTexture", "bumpTexture", "diffuseTexture", "emissiveTexture", "lightmapTexture", "opacityTexture", "reflectionTexture", "refractionTexture", "specularTexture" ], this.options.uvsIn = (n = this.options.uvsIn) !== null && n !== void 0 ? n : Y.UVKind, this.options.uvsOut = (a = this.options.uvsOut) !== null && a !== void 0 ? a : Y.UVKind, this.options.layout = (l = this.options.layout) !== null && l !== void 0 ? l : w4.LAYOUT_STRIP, this.options.layout === w4.LAYOUT_COLNUM && (this.options.colnum = (o = this.options.colnum) !== null && o !== void 0 ? o : 8), this.options.updateInputMeshes = (u = this.options.updateInputMeshes) !== null && u !== void 0 ? u : !0, this.options.disposeSources = (h = this.options.disposeSources) !== null && h !== void 0 ? h : !0, this._expecting = 0, this.options.fillBlanks = (d = this.options.fillBlanks) !== null && d !== void 0 ? d : !0, this.options.fillBlanks === !0 && (this.options.customFillColor = (f = this.options.customFillColor) !== null && f !== void 0 ? f : "black"), this.options.frameSize = (p = this.options.frameSize) !== null && p !== void 0 ? p : 256, this.options.paddingRatio = (m = this.options.paddingRatio) !== null && m !== void 0 ? m : 0.0115, this._paddingValue = Math.ceil(this.options.frameSize * this.options.paddingRatio), this._paddingValue % 2 !== 0 && this._paddingValue++, this.options.paddingMode = (_ = this.options.paddingMode) !== null && _ !== void 0 ? _ : w4.SUBUV_WRAP, this.options.paddingMode === w4.SUBUV_COLOR && (this.options.paddingColor = (v = this.options.paddingColor) !== null && v !== void 0 ? v : new Et(0, 0, 0, 1)), this.sets = {}, this.frames = [], this; } /** * Starts the package process * @param resolve The promises resolution function * @returns TexturePacker */ _createFrames(e) { const t = this._calculateSize(), i = new at(1, 1).divide(t); let r = 0; const s = this._expecting, n = this.meshes.length, a = Object.keys(this.sets); for (let d = 0; d < a.length; d++) { const f = a[d], p = new gg( this.name + ".TexturePack." + f + "Set", { width: t.x, height: t.y }, this.scene, !0, //Generate Mips De.TRILINEAR_SAMPLINGMODE, $e.TEXTUREFORMAT_RGBA ), m = p.getContext(); m.fillStyle = "rgba(0,0,0,0)", m.fillRect(0, 0, t.x, t.y), p.update(!1), this.sets[f] = p; } const l = this.options.frameSize || 256, o = this._paddingValue, u = l + 2 * o, h = () => { this._calculateMeshUVFrames(l, o, t, i, this.options.updateInputMeshes || !1); }; for (let d = 0; d < n; d++) { const p = this.meshes[d].material; for (let m = 0; m < a.length; m++) { const _ = new gg("temp", u, this.scene, !0), v = _.getContext(), C = this._getFrameOffset(d), x = () => { r++, _.update(!1); const S = v.getImageData(0, 0, u, u), M = this.sets[b]; if (M.getContext().putImageData(S, t.x * C.x, t.y * C.y), _.dispose(), M.update(!1), r == s) { h(), e(); return; } }, b = a[m] || "_blank"; if (!p || p[b] === null) v.fillStyle = "rgba(0,0,0,0)", this.options.fillBlanks && (v.fillStyle = this.options.customFillColor), v.fillRect(0, 0, u, u), x(); else { const S = p[b], M = new Image(); S instanceof gg ? M.src = S.getContext().canvas.toDataURL("image/png") : M.src = S.url, Ve.SetCorsBehavior(M.src, M), M.onload = () => { v.fillStyle = "rgba(0,0,0,0)", v.fillRect(0, 0, u, u), _.update(!1), v.setTransform(1, 0, 0, -1, 0, 0); const R = [0, 0, 1, 0, 1, 1, 0, 1, -1, 1, -1, 0, -2, 0, -1, 1, -1]; switch (this.options.paddingMode) { case 0: for (let w = 0; w < 9; w++) v.drawImage(M, 0, 0, M.width, M.height, o + l * R[w], o + l * R[w + 1] - u, l, l); break; case 1: for (let w = 0; w < o; w++) v.drawImage(M, 0, 0, M.width, M.height, w + l * R[0], o - u, l, l), v.drawImage(M, 0, 0, M.width, M.height, o * 2 - w, o - u, l, l), v.drawImage(M, 0, 0, M.width, M.height, o, w - u, l, l), v.drawImage(M, 0, 0, M.width, M.height, o, o * 2 - w - u, l, l); v.drawImage(M, 0, 0, M.width, M.height, o + l * R[0], o + l * R[1] - u, l, l); break; case 2: v.fillStyle = (this.options.paddingColor || ze.Black()).toHexString(), v.fillRect(0, 0, u, -u), v.clearRect(o, o, l, l), v.drawImage(M, 0, 0, M.width, M.height, o + l * R[0], o + l * R[1] - u, l, l); break; } v.setTransform(1, 0, 0, 1, 0, 0), x(); }; } } } } /** * Calculates the Size of the Channel Sets * @returns Vector2 */ _calculateSize() { const e = this.meshes.length || 0, t = this.options.frameSize || 0, i = this._paddingValue || 0; switch (this.options.layout) { case 0: return new at(t * e + 2 * i * e, t + 2 * i); case 1: { const r = Math.max(2, Math.ceil(Math.sqrt(e))), s = t * r + 2 * i * r; return new at(s, s); } case 2: { const r = this.options.colnum || 1, s = Math.max(1, Math.ceil(e / r)); return new at(t * r + 2 * i * r, t * s + 2 * i * s); } } return at.Zero(); } /** * Calculates the UV data for the frames. * @param baseSize the base frameSize * @param padding the base frame padding * @param dtSize size of the Dynamic Texture for that channel * @param dtUnits is 1/dtSize * @param update flag to update the input meshes */ _calculateMeshUVFrames(e, t, i, r, s) { const n = this.meshes.length; for (let a = 0; a < n; a++) { const l = this.meshes[a], o = new at(e / i.x, e / i.y), u = r.clone().scale(t), d = this._getFrameOffset(a).add(u), f = new UH(a, o, d); this.frames.push(f), s && (this._updateMeshUV(l, a), this._updateTextureReferences(l)); } } /** * Calculates the frames Offset. * @param index of the frame * @returns Vector2 */ _getFrameOffset(e) { const t = this.meshes.length; let i, r, s; switch (this.options.layout) { case 0: return i = 1 / t, new at(e * i, 0); case 1: { const n = Math.max(2, Math.ceil(Math.sqrt(t))); return r = Math.floor(e / n), s = e - r * n, i = 1 / n, new at(s * i, r * i); } case 2: { const n = this.options.colnum || 1, a = Math.max(1, Math.ceil(t / n)); return s = Math.floor(e / a), r = e - s * a, i = new at(1 / n, 1 / a), new at(s * i.x, r * i.y); } } return at.Zero(); } /** * Updates a Mesh to the frame data * @param mesh that is the target * @param frameID or the frame index */ _updateMeshUV(e, t) { const i = this.frames[t], r = e.getVerticesData(this.options.uvsIn || Y.UVKind), s = []; let n = 0; r.length && (n = r.length || 0); for (let a = 0; a < n; a += 2) s.push(r[a] * i.scale.x + i.offset.x, r[a + 1] * i.scale.y + i.offset.y); e.setVerticesData(this.options.uvsOut || Y.UVKind, s); } /** * Updates a Meshes materials to use the texture packer channels * @param m is the mesh to target * @param force all channels on the packer to be set. */ _updateTextureReferences(e, t = !1) { const i = e.material, r = Object.keys(this.sets), s = (n) => { n.dispose && n.dispose(); }; for (let n = 0; n < r.length; n++) { const a = r[n]; if (t) i[a] !== null && s(i[a]), i[a] = this.sets[a]; else { if (!i) return; i[a] !== null && (s(i[a]), i[a] = this.sets[a]); } } } /** * Public method to set a Mesh to a frame * @param m that is the target * @param frameID or the frame index * @param updateMaterial trigger for if the Meshes attached Material be updated? */ setMeshToFrame(e, t, i = !1) { this._updateMeshUV(e, t), i && this._updateTextureReferences(e, !0); } /** * Starts the async promise to compile the texture packer. * @returns Promise */ processAsync() { return new Promise((e, t) => { try { if (this.meshes.length === 0) { e(); return; } let i = 0; const r = (s) => { if (i++, this.options.map) { for (let n = 0; n < this.options.map.length; n++) { const a = this.options.map[n]; s[a] !== null && (this.sets[this.options.map[n]] || (this.sets[this.options.map[n]] = !0), this._expecting++); } i === this.meshes.length && this._createFrames(e); } }; for (let s = 0; s < this.meshes.length; s++) { const n = this.meshes[s], a = n.material; if (!a) { if (i++, i === this.meshes.length) return this._createFrames(e); continue; } a.forceCompilationAsync(n).then(() => { r(a); }); } } catch (i) { return t(i); } }); } /** * Disposes all textures associated with this packer */ dispose() { const e = Object.keys(this.sets); for (let t = 0; t < e.length; t++) { const i = e[t]; this.sets[i].dispose(); } } /** * Starts the download process for all the channels converting them to base64 data and embedding it all in a JSON file. * @param imageType is the image type to use. * @param quality of the image if downloading as jpeg, Ranges from >0 to 1. */ download(e = "png", t = 1) { setTimeout(() => { const i = { name: this.name, sets: {}, options: {}, frames: [] }, r = Object.keys(this.sets), s = Object.keys(this.options); try { for (let l = 0; l < r.length; l++) { const o = r[l], u = this.sets[o]; i.sets[o] = u.getContext().canvas.toDataURL("image/" + e, t); } for (let l = 0; l < s.length; l++) { const o = s[l]; i.options[o] = this.options[o]; } for (let l = 0; l < this.frames.length; l++) { const o = this.frames[l]; i.frames.push(o.scale.x, o.scale.y, o.offset.x, o.offset.y); } } catch (l) { Ce.Warn("Unable to download: " + l); return; } const n = "data:text/json;charset=utf-8," + encodeURIComponent(JSON.stringify(i, null, 4)), a = document.createElement("a"); a.setAttribute("href", n), a.setAttribute("download", this.name + "_texurePackage.json"), document.body.appendChild(a), a.click(), a.remove(); }, 0); } /** * Public method to load a texturePacker JSON file. * @param data of the JSON file in string format. */ updateFromJSON(e) { try { const t = JSON.parse(e); this.name = t.name; const i = Object.keys(t.options); for (let s = 0; s < i.length; s++) this.options[i[s]] = t.options[i[s]]; for (let s = 0; s < t.frames.length; s += 4) { const n = new UH(s / 4, new at(t.frames[s], t.frames[s + 1]), new at(t.frames[s + 2], t.frames[s + 3])); this.frames.push(n); } const r = Object.keys(t.sets); for (let s = 0; s < r.length; s++) { const n = new De(t.sets[r[s]], this.scene, !1, !1); this.sets[r[s]] = n; } } catch (t) { Ce.Warn("Unable to update from JSON: " + t); } } } w4.LAYOUT_STRIP = 0; w4.LAYOUT_POWER2 = 1; w4.LAYOUT_COLNUM = 2; w4.SUBUV_WRAP = 0; w4.SUBUV_EXTEND = 1; w4.SUBUV_COLOR = 2; class Tre extends z4 { /** * Instantiates a new Custom Procedural Texture. * Procedural texturing is a way to programmatically create a texture. There are 2 types of procedural textures: code-only, and code that references some classic 2D images, sometimes called 'refMaps' or 'sampler' images. * Custom Procedural textures are the easiest way to create your own procedural in your application. * @see https://doc.babylonjs.com/features/featuresDeepDive/materials/using/proceduralTextures#creating-custom-procedural-textures * @param name Define the name of the texture * @param texturePath Define the folder path containing all the custom texture related files (config, shaders...) * @param size Define the size of the texture to create * @param scene Define the scene the texture belongs to * @param fallbackTexture Define a fallback texture in case there were issues to create the custom texture * @param generateMipMaps Define if the texture should creates mip maps or not * @param skipJson Define a boolena indicating that there is no json config file to load */ constructor(e, t, i, r, s, n, a) { super(e, i, null, r, s, n), this._animate = !0, this._time = 0, this._texturePath = t, a ? this.setFragment(this._texturePath) : this._loadJson(t), this.refreshRate = 1; } _loadJson(e) { const t = () => { try { this.setFragment(this._texturePath); } catch { Ce.Log("No json or ShaderStore or DOM element found for CustomProceduralTexture"); } }, i = e + "/config.json", r = new go(); r.open("GET", i), r.addEventListener("load", () => { if (r.status === 200 || r.responseText && r.responseText.length > 0) try { this._config = JSON.parse(r.response), this.updateShaderUniforms(), this.updateTextures(), this.setFragment(this._texturePath + "/custom"), this._animate = this._config.animate, this.refreshRate = this._config.refreshrate; } catch { t(); } else t(); }, !1), r.addEventListener("error", () => { t(); }, !1); try { r.send(); } catch { Ce.Error("CustomProceduralTexture: Error on XHR send request."); } } /** * Is the texture ready to be used ? (rendered at least once) * @returns true if ready, otherwise, false. */ isReady() { if (!super.isReady()) return !1; for (const e in this._textures) if (!this._textures[e].isReady()) return !1; return !0; } /** * Render the texture to its associated render target. * @param useCameraPostProcess Define if camera post process should be applied to the texture */ render(e) { const t = this.getScene(); this._animate && t && (this._time += t.getAnimationRatio() * 0.03, this.updateShaderUniforms()), super.render(e); } /** * Update the list of dependant textures samplers in the shader. */ updateTextures() { for (let e = 0; e < this._config.sampler2Ds.length; e++) this.setTexture(this._config.sampler2Ds[e].sample2Dname, new De(this._texturePath + "/" + this._config.sampler2Ds[e].textureRelativeUrl, this.getScene())); } /** * Update the uniform values of the procedural texture in the shader. */ updateShaderUniforms() { if (this._config) for (let e = 0; e < this._config.uniforms.length; e++) { const t = this._config.uniforms[e]; switch (t.type) { case "float": this.setFloat(t.name, t.value); break; case "color3": this.setColor3(t.name, new ze(t.r, t.g, t.b)); break; case "color4": this.setColor4(t.name, new Et(t.r, t.g, t.b, t.a)); break; case "vector2": this.setVector2(t.name, new at(t.x, t.y)); break; case "vector3": this.setVector3(t.name, new D(t.x, t.y, t.z)); break; } } this.setFloat("time", this._time); } /** * Define if the texture animates or not. */ get animate() { return this._animate; } set animate(e) { this._animate = e; } } const Zme = "noisePixelShader", qme = `uniform float brightness;uniform float persistence;uniform float timeScale;varying vec2 vUV;vec2 hash22(vec2 p) {p=p*mat2(127.1,311.7,269.5,183.3);p=-1.0+2.0*fract(sin(p)*43758.5453123);return sin(p*6.283+timeScale);} float interpolationNoise(vec2 p) {vec2 pi=floor(p);vec2 pf=p-pi;vec2 w=pf*pf*(3.-2.*pf);float f00=dot(hash22(pi+vec2(.0,.0)),pf-vec2(.0,.0));float f01=dot(hash22(pi+vec2(.0,1.)),pf-vec2(.0,1.));float f10=dot(hash22(pi+vec2(1.0,0.)),pf-vec2(1.0,0.));float f11=dot(hash22(pi+vec2(1.0,1.)),pf-vec2(1.0,1.));float xm1=mix(f00,f10,w.x);float xm2=mix(f01,f11,w.x);float ym=mix(xm1,xm2,w.y); return ym;} float perlinNoise2D(float x,float y) {float sum=0.0;float frequency=0.0;float amplitude=0.0;for(int i=0; i { }); } /** * Clones the raw cube texture. * @returns a new cube texture */ clone() { return St.Clone(() => { const e = this.getScene(), t = this._texture, i = new LU(e, t._bufferViewArray, t.width, t.format, t.type, t.generateMipMaps, t.invertY, t.samplingMode, t._compression); return t.source === ts.CubeRawRGBD && i.updateRGBDAsync(t._bufferViewArrayArray, t._sphericalPolynomial, t._lodGenerationScale, t._lodGenerationOffset), i; }, this); } } class Jme extends De { /** * Create a new RawTexture3D * @param data defines the data of the texture * @param width defines the width of the texture * @param height defines the height of the texture * @param depth defines the depth of the texture * @param format defines the texture format to use * @param scene defines the hosting scene * @param generateMipMaps defines a boolean indicating if mip levels should be generated (true by default) * @param invertY defines if texture must be stored with Y axis inverted * @param samplingMode defines the sampling mode to use (Texture.TRILINEAR_SAMPLINGMODE by default) * @param textureType defines the texture Type (Engine.TEXTURETYPE_UNSIGNED_INT, Engine.TEXTURETYPE_FLOAT...) * @param creationFlags specific flags to use when creating the texture (1 for storage textures, for eg) */ constructor(e, t, i, r, s, n, a = !0, l = !1, o = De.TRILINEAR_SAMPLINGMODE, u = 0, h) { super(null, n, !a, l), this.format = s, this._texture = n.getEngine().createRawTexture3D(e, t, i, r, s, a, l, o, null, u, h), this.is3D = !0; } /** * Update the texture with new data * @param data defines the data to store in the texture */ update(e) { this._texture && this._getEngine().updateRawTexture3D(this._texture, e, this._texture.format, this._texture.invertY, null, this._texture.type); } } class gW extends ra { /** * Creates a refraction texture used by refraction channel of the standard material. * It is like a mirror but to see through a material. * @see https://doc.babylonjs.com/features/featuresDeepDive/materials/using/reflectionTexture#refraction * @param name Define the texture name * @param size Define the size of the underlying texture * @param scene Define the scene the refraction belongs to * @param generateMipMaps Define if we need to generate mips level for the refraction */ constructor(e, t, i, r) { super(e, t, i, r, !0), this.refractionPlane = new Sd(0, 1, 0, 1), this.depth = 2, this.onBeforeRenderObservable.add(() => { this.getScene().clipPlane = this.refractionPlane; }), this.onAfterRenderObservable.add(() => { this.getScene().clipPlane = null; }); } /** * Clone the refraction texture. * @returns the cloned texture */ clone() { const e = this.getScene(); if (!e) return this; const t = this.getSize(), i = new gW(this.name, t.width, e, this._generateMipMaps); return i.hasAlpha = this.hasAlpha, i.level = this.level, i.refractionPlane = this.refractionPlane.clone(), this.renderList && (i.renderList = this.renderList.slice(0)), i.depth = this.depth, i; } /** * Serialize the texture to a JSON representation you could use in Parse later on * @returns the serialized JSON representation */ serialize() { if (!this.name) return null; const e = super.serialize(); return e.mirrorPlane = this.refractionPlane.asArray(), e.depth = this.depth, e; } } class e0e extends rT { /** * Gets the render target wrapper associated with this render target */ get renderTarget() { return this._renderTarget; } /** * Instantiates a new ThinRenderTargetTexture. * Tiny helper class to wrap a RenderTargetWrapper in a texture. * This can be used as an internal texture wrapper in ThinEngine to benefit from the cache and to hold on the associated RTT * @param engine Define the internalTexture to wrap * @param size Define the size of the RTT to create * @param options Define rendertarget options */ constructor(e, t, i) { super(null), this._renderTarget = null, this._engine = e, this._renderTargetOptions = i, this.resize(t); } /** * Resize the texture to a new desired size. * Be careful as it will recreate all the data in the new texture. * @param size Define the new size. It can be: * - a number for squared texture, * - an object containing { width: number, height: number } */ resize(e) { var t; (t = this._renderTarget) === null || t === void 0 || t.dispose(), this._renderTarget = null, this._texture = null, this._size = e, this._engine && (this._renderTarget = this._engine.createRenderTargetTexture(this._size, this._renderTargetOptions)), this._texture = this.renderTarget.texture; } /** * Get the underlying lower level texture from Babylon. * @returns the internal texture */ getInternalTexture() { return this._texture; } /** * Get the class name of the texture. * @returns "ThinRenderTargetTexture" */ getClassName() { return "ThinRenderTargetTexture"; } /** * Dispose the texture and release its associated resources. * @param disposeOnlyFramebuffers */ dispose(e = !1) { var t; (t = this._renderTarget) === null || t === void 0 || t.dispose(!0), this._renderTarget = null, e || super.dispose(); } } class Yo extends pP { /** * Creates a new connection point * @param name defines the connection point name * @param ownerBlock defines the block hosting this connection point * @param direction defines the direction of the connection point * @param _blockType * @param _blockName */ constructor(e, t, i, r, s) { super(e, t, i), this._blockType = r, this._blockName = s, this.needDualDirectionValidation = !0; } /** * Gets a number indicating if the current point can be connected to another point * @param connectionPoint defines the other connection point * @returns a number defining the compatibility state */ checkCompatibilityState(e) { return e instanceof Yo && e._blockName === this._blockName ? fm.Compatible : fm.TypeIncompatible; } /** * Creates a block suitable to be used as an input for this input point. * If null is returned, a block based on the point type will be created. * @returns The returned string parameter is the name of the output point of NodeMaterialBlock (first parameter of the returned array) that can be connected to the input */ createCustomInputBlock() { return [new this._blockType(this._blockName), this.name]; } } class Sre extends Wi { /** * Creates a new BonesBlock * @param name defines the block name */ constructor(e) { super(e, Le.Vertex), this.registerInput("matricesIndices", ue.Vector4), this.registerInput("matricesWeights", ue.Vector4), this.registerInput("matricesIndicesExtra", ue.Vector4, !0), this.registerInput("matricesWeightsExtra", ue.Vector4, !0), this.registerInput("world", ue.Matrix), this.registerOutput("output", ue.Matrix); } /** * Initialize the block and prepare the context for build * @param state defines the state that will be used for the build */ initialize(e) { e._excludeVariableName("boneSampler"), e._excludeVariableName("boneTextureWidth"), e._excludeVariableName("mBones"), e._excludeVariableName("BonesPerMesh"); } /** * Gets the current class name * @returns the class name */ getClassName() { return "BonesBlock"; } /** * Gets the matrix indices input component */ get matricesIndices() { return this._inputs[0]; } /** * Gets the matrix weights input component */ get matricesWeights() { return this._inputs[1]; } /** * Gets the extra matrix indices input component */ get matricesIndicesExtra() { return this._inputs[2]; } /** * Gets the extra matrix weights input component */ get matricesWeightsExtra() { return this._inputs[3]; } /** * Gets the world input component */ get world() { return this._inputs[4]; } /** * Gets the output component */ get output() { return this._outputs[0]; } autoConfigure(e, t = () => !0) { if (!this.matricesIndices.isConnected) { let i = e.getInputBlockByPredicate((r) => r.isAttribute && r.name === "matricesIndices" && t(r)); i || (i = new vs("matricesIndices"), i.setAsAttribute("matricesIndices")), i.output.connectTo(this.matricesIndices); } if (!this.matricesWeights.isConnected) { let i = e.getInputBlockByPredicate((r) => r.isAttribute && r.name === "matricesWeights" && t(r)); i || (i = new vs("matricesWeights"), i.setAsAttribute("matricesWeights")), i.output.connectTo(this.matricesWeights); } if (!this.world.isConnected) { let i = e.getInputBlockByPredicate((r) => r.systemValue === Ms.World && t(r)); i || (i = new vs("world"), i.setAsSystemValue(Ms.World)), i.output.connectTo(this.world); } } provideFallbacks(e, t) { e && e.useBones && e.computeBonesUsingShaders && e.skeleton && t.addCPUSkinningFallback(0, e); } bind(e, t, i) { Ke.BindBonesParameters(i, e); } prepareDefines(e, t, i) { i._areAttributesDirty && Ke.PrepareDefinesForBones(e, i); } _buildBlock(e) { super._buildBlock(e), e.sharedData.blocksWithFallbacks.push(this), e.sharedData.forcedBindableBlocks.push(this), e.sharedData.blocksWithDefines.push(this), e.uniforms.push("boneTextureWidth"), e.uniforms.push("mBones"), e.samplers.push("boneSampler"); const t = `//${this.name}`; e._emitFunctionFromInclude("bonesDeclaration", t, { removeAttributes: !0, removeUniforms: !1, removeVaryings: !0, removeIfDef: !1 }); const i = e._getFreeVariableName("influence"); e.compilationString += e._emitCodeFromInclude("bonesVertex", t, { replaceStrings: [ { search: /finalWorld=finalWorld\*influence;/, replace: "" }, { search: /influence/gm, replace: i } ] }); const r = this._outputs[0], s = this.world; return e.compilationString += `#if NUM_BONE_INFLUENCERS>0 `, e.compilationString += this._declareOutput(r, e) + ` = ${s.associatedVariableName} * ${i}; `, e.compilationString += `#else `, e.compilationString += this._declareOutput(r, e) + ` = ${s.associatedVariableName}; `, e.compilationString += `#endif `, this; } } Be("BABYLON.BonesBlock", Sre); class Mre extends Wi { /** * Creates a new InstancesBlock * @param name defines the block name */ constructor(e) { super(e, Le.Vertex), this.registerInput("world0", ue.Vector4), this.registerInput("world1", ue.Vector4), this.registerInput("world2", ue.Vector4), this.registerInput("world3", ue.Vector4), this.registerInput("world", ue.Matrix, !0), this.registerOutput("output", ue.Matrix), this.registerOutput("instanceID", ue.Float); } /** * Gets the current class name * @returns the class name */ getClassName() { return "InstancesBlock"; } /** * Gets the first world row input component */ get world0() { return this._inputs[0]; } /** * Gets the second world row input component */ get world1() { return this._inputs[1]; } /** * Gets the third world row input component */ get world2() { return this._inputs[2]; } /** * Gets the forth world row input component */ get world3() { return this._inputs[3]; } /** * Gets the world input component */ get world() { return this._inputs[4]; } /** * Gets the output component */ get output() { return this._outputs[0]; } /** * Gets the instanceID component */ get instanceID() { return this._outputs[1]; } autoConfigure(e, t = () => !0) { if (!this.world0.connectedPoint) { let i = e.getInputBlockByPredicate((r) => r.isAttribute && r.name === "world0" && t(r)); i || (i = new vs("world0"), i.setAsAttribute("world0")), i.output.connectTo(this.world0); } if (!this.world1.connectedPoint) { let i = e.getInputBlockByPredicate((r) => r.isAttribute && r.name === "world1" && t(r)); i || (i = new vs("world1"), i.setAsAttribute("world1")), i.output.connectTo(this.world1); } if (!this.world2.connectedPoint) { let i = e.getInputBlockByPredicate((r) => r.isAttribute && r.name === "world2" && t(r)); i || (i = new vs("world2"), i.setAsAttribute("world2")), i.output.connectTo(this.world2); } if (!this.world3.connectedPoint) { let i = e.getInputBlockByPredicate((r) => r.isAttribute && r.name === "world3" && t(r)); i || (i = new vs("world3"), i.setAsAttribute("world3")), i.output.connectTo(this.world3); } if (!this.world.connectedPoint) { let i = e.getInputBlockByPredicate((r) => r.isAttribute && r.name === "world" && t(r)); i || (i = new vs("world"), i.setAsSystemValue(Ms.World)), i.output.connectTo(this.world); } this.world.define = "!INSTANCES || THIN_INSTANCES"; } prepareDefines(e, t, i, r = !1, s) { let n = !1; i.INSTANCES !== r && (i.setValue("INSTANCES", r), n = !0), s && i.THIN_INSTANCES !== !!(s != null && s.getRenderingMesh().hasThinInstances) && (i.setValue("THIN_INSTANCES", !!(s != null && s.getRenderingMesh().hasThinInstances)), n = !0), n && i.markAsUnprocessed(); } _buildBlock(e) { super._buildBlock(e); const t = e.sharedData.scene.getEngine(); e.sharedData.blocksWithDefines.push(this); const i = this._outputs[0], r = this._outputs[1], s = this.world0, n = this.world1, a = this.world2, l = this.world3; return e.compilationString += `#ifdef INSTANCES `, e.compilationString += this._declareOutput(i, e) + ` = mat4(${s.associatedVariableName}, ${n.associatedVariableName}, ${a.associatedVariableName}, ${l.associatedVariableName}); `, e.compilationString += `#ifdef THIN_INSTANCES `, e.compilationString += `${i.associatedVariableName} = ${this.world.associatedVariableName} * ${i.associatedVariableName}; `, e.compilationString += `#endif `, t._caps.canUseGLInstanceID ? e.compilationString += this._declareOutput(r, e) + ` = float(gl_InstanceID); ` : e.compilationString += this._declareOutput(r, e) + ` = 0.0; `, e.compilationString += `#else `, e.compilationString += this._declareOutput(i, e) + ` = ${this.world.associatedVariableName}; `, e.compilationString += this._declareOutput(r, e) + ` = 0.0; `, e.compilationString += `#endif `, this; } } Be("BABYLON.InstancesBlock", Mre); class vW extends Wi { /** * Create a new MorphTargetsBlock * @param name defines the block name */ constructor(e) { super(e, Le.Vertex), this.registerInput("position", ue.Vector3), this.registerInput("normal", ue.Vector3), this.registerInput("tangent", ue.AutoDetect), this.tangent.addExcludedConnectionPointFromAllowedTypes(ue.Color4 | ue.Vector4 | ue.Vector3), this.registerInput("uv", ue.Vector2), this.registerOutput("positionOutput", ue.Vector3), this.registerOutput("normalOutput", ue.Vector3), this.registerOutput("tangentOutput", ue.Vector4), this.registerOutput("uvOutput", ue.Vector2); } /** * Gets the current class name * @returns the class name */ getClassName() { return "MorphTargetsBlock"; } /** * Gets the position input component */ get position() { return this._inputs[0]; } /** * Gets the normal input component */ get normal() { return this._inputs[1]; } /** * Gets the tangent input component */ get tangent() { return this._inputs[2]; } /** * Gets the tangent input component */ get uv() { return this._inputs[3]; } /** * Gets the position output component */ get positionOutput() { return this._outputs[0]; } /** * Gets the normal output component */ get normalOutput() { return this._outputs[1]; } /** * Gets the tangent output component */ get tangentOutput() { return this._outputs[2]; } /** * Gets the tangent output component */ get uvOutput() { return this._outputs[3]; } initialize(e) { e._excludeVariableName("morphTargetInfluences"); } autoConfigure(e, t = () => !0) { if (!this.position.isConnected) { let i = e.getInputBlockByPredicate((r) => r.isAttribute && r.name === "position" && t(r)); i || (i = new vs("position"), i.setAsAttribute()), i.output.connectTo(this.position); } if (!this.normal.isConnected) { let i = e.getInputBlockByPredicate((r) => r.isAttribute && r.name === "normal" && t(r)); i || (i = new vs("normal"), i.setAsAttribute("normal")), i.output.connectTo(this.normal); } if (!this.tangent.isConnected) { let i = e.getInputBlockByPredicate((r) => r.isAttribute && r.name === "tangent" && t(r)); i || (i = new vs("tangent"), i.setAsAttribute("tangent")), i.output.connectTo(this.tangent); } if (!this.uv.isConnected) { let i = e.getInputBlockByPredicate((r) => r.isAttribute && r.name === "uv" && t(r)); i || (i = new vs("uv"), i.setAsAttribute("uv")), i.output.connectTo(this.uv); } } prepareDefines(e, t, i) { if (e.morphTargetManager) { const r = e.morphTargetManager; r != null && r.isUsingTextureForTargets && r.numInfluencers !== i.NUM_MORPH_INFLUENCERS && i.markAsAttributesDirty(); } i._areAttributesDirty && Ke.PrepareDefinesForMorphTargets(e, i); } bind(e, t, i) { i && i.morphTargetManager && i.morphTargetManager.numInfluencers > 0 && (Ke.BindMorphTargetParameters(i, e), i.morphTargetManager.isUsingTextureForTargets && i.morphTargetManager._bind(e)); } replaceRepeatableContent(e, t, i, r) { const s = this.position, n = this.normal, a = this.tangent, l = this.uv, o = this.positionOutput, u = this.normalOutput, h = this.tangentOutput, d = this.uvOutput, f = e, p = r.NUM_MORPH_INFLUENCERS, m = i.morphTargetManager, _ = m && m.supportsNormals && r.NORMAL, v = m && m.supportsTangents && r.TANGENT, C = m && m.supportsUVs && r.UV1; let x = ""; m != null && m.isUsingTextureForTargets && p > 0 && (x += `float vertexID; `); for (let b = 0; b < p; b++) x += `#ifdef MORPHTARGETS `, m != null && m.isUsingTextureForTargets ? (x += `vertexID = float(gl_VertexID) * morphTargetTextureInfo.x; `, x += `${o.associatedVariableName} += (readVector3FromRawSampler(${b}, vertexID) - ${s.associatedVariableName}) * morphTargetInfluences[${b}]; `, x += `vertexID += 1.0; `) : x += `${o.associatedVariableName} += (position${b} - ${s.associatedVariableName}) * morphTargetInfluences[${b}]; `, _ && (x += `#ifdef MORPHTARGETS_NORMAL `, m != null && m.isUsingTextureForTargets ? (x += `${u.associatedVariableName} += (readVector3FromRawSampler(${b}, vertexID) - ${n.associatedVariableName}) * morphTargetInfluences[${b}]; `, x += `vertexID += 1.0; `) : x += `${u.associatedVariableName} += (normal${b} - ${n.associatedVariableName}) * morphTargetInfluences[${b}]; `, x += `#endif `), C && (x += `#ifdef MORPHTARGETS_UV `, m != null && m.isUsingTextureForTargets ? (x += `${d.associatedVariableName} += (readVector3FromRawSampler(${b}, vertexID).xy - ${l.associatedVariableName}) * morphTargetInfluences[${b}]; `, x += `vertexID += 1.0; `) : x += `${d.associatedVariableName}.xy += (uv_${b} - ${l.associatedVariableName}.xy) * morphTargetInfluences[${b}]; `, x += `#endif `), v && (x += `#ifdef MORPHTARGETS_TANGENT `, m != null && m.isUsingTextureForTargets ? x += `${h.associatedVariableName}.xyz += (readVector3FromRawSampler(${b}, vertexID) - ${a.associatedVariableName}.xyz) * morphTargetInfluences[${b}]; ` : x += `${h.associatedVariableName}.xyz += (tangent${b} - ${a.associatedVariableName}.xyz) * morphTargetInfluences[${b}]; `, a.type === ue.Vector4 ? x += `${h.associatedVariableName}.w = ${a.associatedVariableName}.w; ` : x += `${h.associatedVariableName}.w = 1.; `, x += `#endif `), x += `#endif `; if (f.compilationString = f.compilationString.replace(this._repeatableContentAnchor, x), p > 0) for (let b = 0; b < p; b++) f.attributes.push(Y.PositionKind + b), _ && f.attributes.push(Y.NormalKind + b), v && f.attributes.push(Y.TangentKind + b), C && f.attributes.push(Y.UVKind + "_" + b); } _buildBlock(e) { super._buildBlock(e), e.sharedData.blocksWithDefines.push(this), e.sharedData.bindableBlocks.push(this), e.sharedData.repeatableContentBlocks.push(this); const t = this.position, i = this.normal, r = this.tangent, s = this.uv, n = this.positionOutput, a = this.normalOutput, l = this.tangentOutput, o = this.uvOutput, u = `//${this.name}`; return e.uniforms.push("morphTargetInfluences"), e.uniforms.push("morphTargetTextureInfo"), e.uniforms.push("morphTargetTextureIndices"), e.samplers.push("morphTargets"), e._emitFunctionFromInclude("morphTargetsVertexGlobalDeclaration", u), e._emitFunctionFromInclude("morphTargetsVertexDeclaration", u, { repeatKey: "maxSimultaneousMorphTargets" }), e.compilationString += `${this._declareOutput(n, e)} = ${t.associatedVariableName}; `, e.compilationString += `#ifdef NORMAL `, e.compilationString += `${this._declareOutput(a, e)} = ${i.associatedVariableName}; `, e.compilationString += `#else `, e.compilationString += `${this._declareOutput(a, e)} = vec3(0., 0., 0.); `, e.compilationString += `#endif `, e.compilationString += `#ifdef TANGENT `, e.compilationString += `${this._declareOutput(l, e)} = ${r.associatedVariableName}; `, e.compilationString += `#else `, e.compilationString += `${this._declareOutput(l, e)} = vec4(0., 0., 0., 0.); `, e.compilationString += `#endif `, e.compilationString += `#ifdef UV1 `, e.compilationString += `${this._declareOutput(o, e)} = ${s.associatedVariableName}; `, e.compilationString += `#else `, e.compilationString += `${this._declareOutput(o, e)} = vec2(0., 0.); `, e.compilationString += `#endif `, this._repeatableContentAnchor = e._repeatableContentAnchor, e.compilationString += this._repeatableContentAnchor, this; } } Be("BABYLON.MorphTargetsBlock", vW); class Rre extends Wi { /** * Creates a new LightInformationBlock * @param name defines the block name */ constructor(e) { super(e, Le.Vertex), this.registerInput("worldPosition", ue.Vector4, !1, Le.Vertex), this.registerOutput("direction", ue.Vector3), this.registerOutput("color", ue.Color3), this.registerOutput("intensity", ue.Float), this.registerOutput("shadowBias", ue.Float), this.registerOutput("shadowNormalBias", ue.Float), this.registerOutput("shadowDepthScale", ue.Float), this.registerOutput("shadowDepthRange", ue.Vector2); } /** * Gets the current class name * @returns the class name */ getClassName() { return "LightInformationBlock"; } /** * Gets the world position input component */ get worldPosition() { return this._inputs[0]; } /** * Gets the direction output component */ get direction() { return this._outputs[0]; } /** * Gets the direction output component */ get color() { return this._outputs[1]; } /** * Gets the direction output component */ get intensity() { return this._outputs[2]; } /** * Gets the shadow bias output component */ get shadowBias() { return this._outputs[3]; } /** * Gets the shadow normal bias output component */ get shadowNormalBias() { return this._outputs[4]; } /** * Gets the shadow depth scale component */ get shadowDepthScale() { return this._outputs[5]; } /** * Gets the shadow depth range component */ get shadowDepthRange() { return this._outputs[6]; } bind(e, t, i) { if (!i) return; this.light && this.light.isDisposed() && (this.light = null); let r = this.light; const s = t.getScene(); if (!r && s.lights.length && (r = this.light = s.lights[0], this._forcePrepareDefines = !0), !r || !r.isEnabled) { e.setFloat3(this._lightDataUniformName, 0, 0, 0), e.setFloat4(this._lightColorUniformName, 0, 0, 0, 0); return; } r.transferToNodeMaterialEffect(e, this._lightDataUniformName), e.setColor4(this._lightColorUniformName, r.diffuse, r.intensity); const n = r.getShadowGenerator(); if ((this.shadowBias.hasEndpoints || this.shadowNormalBias.hasEndpoints || this.shadowDepthScale.hasEndpoints) && (n ? e.setFloat3(this._lightShadowUniformName, n.bias, n.normalBias, n.depthScale) : e.setFloat3(this._lightShadowUniformName, 0, 0, 0)), this.shadowDepthRange) if (n && s.activeCamera) { const a = r; e.setFloat2(this._lightShadowExtraUniformName, a.getDepthMinZ(s.activeCamera), a.getDepthMinZ(s.activeCamera) + a.getDepthMaxZ(s.activeCamera)); } else e.setFloat2(this._lightShadowExtraUniformName, 0, 0); } prepareDefines(e, t, i) { if (!i._areLightsDirty && !this._forcePrepareDefines) return; this._forcePrepareDefines = !1; const r = this.light; i.setValue(this._lightTypeDefineName, !!(r && r instanceof s6), !0); } _buildBlock(e) { super._buildBlock(e), e.sharedData.bindableBlocks.push(this), e.sharedData.blocksWithDefines.push(this); const t = this.direction, i = this.color, r = this.intensity, s = this.shadowBias, n = this.shadowNormalBias, a = this.shadowDepthScale, l = this.shadowDepthRange; return this._lightDataUniformName = e._getFreeVariableName("lightData"), this._lightColorUniformName = e._getFreeVariableName("lightColor"), this._lightShadowUniformName = e._getFreeVariableName("shadowData"), this._lightShadowExtraUniformName = e._getFreeVariableName("shadowExtraData"), this._lightTypeDefineName = e._getFreeDefineName("LIGHTPOINTTYPE"), e._emitUniformFromString(this._lightDataUniformName, "vec3"), e._emitUniformFromString(this._lightColorUniformName, "vec4"), e.compilationString += `#ifdef ${this._lightTypeDefineName} `, e.compilationString += this._declareOutput(t, e) + ` = normalize(${this.worldPosition.associatedVariableName}.xyz - ${this._lightDataUniformName}); `, e.compilationString += `#else `, e.compilationString += this._declareOutput(t, e) + ` = ${this._lightDataUniformName}; `, e.compilationString += `#endif `, e.compilationString += this._declareOutput(i, e) + ` = ${this._lightColorUniformName}.rgb; `, e.compilationString += this._declareOutput(r, e) + ` = ${this._lightColorUniformName}.a; `, (s.hasEndpoints || n.hasEndpoints || a.hasEndpoints) && (e._emitUniformFromString(this._lightShadowUniformName, "vec3"), s.hasEndpoints && (e.compilationString += this._declareOutput(s, e) + ` = ${this._lightShadowUniformName}.x; `), n.hasEndpoints && (e.compilationString += this._declareOutput(n, e) + ` = ${this._lightShadowUniformName}.y; `), a.hasEndpoints && (e.compilationString += this._declareOutput(a, e) + ` = ${this._lightShadowUniformName}.z; `)), l.hasEndpoints && (e._emitUniformFromString(this._lightShadowExtraUniformName, "vec2"), e.compilationString += this._declareOutput(l, e) + ` = ${this._lightShadowUniformName}; `), this; } serialize() { const e = super.serialize(); return this.light && (e.lightId = this.light.id), e; } _deserialize(e, t, i) { super._deserialize(e, t, i), e.lightId && (this.light = t.getLightById(e.lightId)); } } Be("BABYLON.LightInformationBlock", Rre); class AW extends Wi { /** * Create a new ImageProcessingBlock * @param name defines the block name */ constructor(e) { super(e, Le.Fragment), this.convertInputToLinearSpace = !0, this.registerInput("color", ue.AutoDetect), this.registerOutput("output", ue.Color4), this.registerOutput("rgb", ue.Color3), this._inputs[0].addExcludedConnectionPointFromAllowedTypes(ue.Color3 | ue.Color4 | ue.Vector3 | ue.Vector4); } /** * Gets the current class name * @returns the class name */ getClassName() { return "ImageProcessingBlock"; } /** * Gets the color input component */ get color() { return this._inputs[0]; } /** * Gets the output component */ get output() { return this._outputs[0]; } /** * Gets the rgb component */ get rgb() { return this._outputs[1]; } /** * Initialize the block and prepare the context for build * @param state defines the state that will be used for the build */ initialize(e) { e._excludeVariableName("exposureLinear"), e._excludeVariableName("contrast"), e._excludeVariableName("vInverseScreenSize"), e._excludeVariableName("vignetteSettings1"), e._excludeVariableName("vignetteSettings2"), e._excludeVariableName("vCameraColorCurveNegative"), e._excludeVariableName("vCameraColorCurveNeutral"), e._excludeVariableName("vCameraColorCurvePositive"), e._excludeVariableName("txColorTransform"), e._excludeVariableName("colorTransformSettings"), e._excludeVariableName("ditherIntensity"); } isReady(e, t, i) { return !(i._areImageProcessingDirty && t.imageProcessingConfiguration && !t.imageProcessingConfiguration.isReady()); } prepareDefines(e, t, i) { i._areImageProcessingDirty && t.imageProcessingConfiguration && t.imageProcessingConfiguration.prepareDefines(i); } bind(e, t, i) { i && t.imageProcessingConfiguration && t.imageProcessingConfiguration.bind(e); } _buildBlock(e) { var t; super._buildBlock(e), e.sharedData.blocksWithDefines.push(this), e.sharedData.blockingBlocks.push(this), e.sharedData.bindableBlocks.push(this), e.uniforms.push("exposureLinear"), e.uniforms.push("contrast"), e.uniforms.push("vInverseScreenSize"), e.uniforms.push("vignetteSettings1"), e.uniforms.push("vignetteSettings2"), e.uniforms.push("vCameraColorCurveNegative"), e.uniforms.push("vCameraColorCurveNeutral"), e.uniforms.push("vCameraColorCurvePositive"), e.uniforms.push("txColorTransform"), e.uniforms.push("colorTransformSettings"), e.uniforms.push("ditherIntensity"); const i = this.color, r = this._outputs[0], s = `//${this.name}`; return e._emitFunctionFromInclude("helperFunctions", s), e._emitFunctionFromInclude("imageProcessingDeclaration", s), e._emitFunctionFromInclude("imageProcessingFunctions", s), !((t = i.connectedPoint) === null || t === void 0) && t.isConnected && (i.connectedPoint.type === ue.Color4 || i.connectedPoint.type === ue.Vector4 ? e.compilationString += `${this._declareOutput(r, e)} = ${i.associatedVariableName}; ` : e.compilationString += `${this._declareOutput(r, e)} = vec4(${i.associatedVariableName}, 1.0); `, e.compilationString += `#ifdef IMAGEPROCESSINGPOSTPROCESS `, this.convertInputToLinearSpace && (e.compilationString += `${r.associatedVariableName}.rgb = toLinearSpace(${i.associatedVariableName}.rgb); `), e.compilationString += `#else `, e.compilationString += `#ifdef IMAGEPROCESSING `, this.convertInputToLinearSpace && (e.compilationString += `${r.associatedVariableName}.rgb = toLinearSpace(${i.associatedVariableName}.rgb); `), e.compilationString += `${r.associatedVariableName} = applyImageProcessing(${r.associatedVariableName}); `, e.compilationString += `#endif `, e.compilationString += `#endif `, this.rgb.hasEndpoints && (e.compilationString += this._declareOutput(this.rgb, e) + ` = ${this.output.associatedVariableName}.xyz; `)), this; } _dumpPropertiesCode() { let e = super._dumpPropertiesCode(); return e += `${this._codeVariableName}.convertInputToLinearSpace = ${this.convertInputToLinearSpace}; `, e; } serialize() { const e = super.serialize(); return e.convertInputToLinearSpace = this.convertInputToLinearSpace, e; } _deserialize(e, t, i) { var r; super._deserialize(e, t, i), this.convertInputToLinearSpace = (r = e.convertInputToLinearSpace) !== null && r !== void 0 ? r : !0; } } F([ ir("Convert input to linear space", $i.Boolean, "ADVANCED") ], AW.prototype, "convertInputToLinearSpace", void 0); Be("BABYLON.ImageProcessingBlock", AW); class jI extends Wi { /** * Create a new TBNBlock * @param name defines the block name */ constructor(e) { super(e, Le.Fragment, !0), this.registerInput("normal", ue.AutoDetect, !1), this.normal.addExcludedConnectionPointFromAllowedTypes(ue.Color4 | ue.Vector4 | ue.Vector3), this.registerInput("tangent", ue.Vector4, !1), this.registerInput("world", ue.Matrix, !1), this.registerOutput("TBN", ue.Object, Le.Fragment, new Yo("TBN", this, no.Output, jI, "TBNBlock")), this.registerOutput("row0", ue.Vector3, Le.Fragment), this.registerOutput("row1", ue.Vector3, Le.Fragment), this.registerOutput("row2", ue.Vector3, Le.Fragment); } /** * Gets the current class name * @returns the class name */ getClassName() { return "TBNBlock"; } /** * Initialize the block and prepare the context for build * @param state defines the state that will be used for the build */ initialize(e) { e._excludeVariableName("tbnNormal"), e._excludeVariableName("tbnTangent"), e._excludeVariableName("tbnBitangent"), e._excludeVariableName("TBN"); } /** * Gets the normal input component */ get normal() { return this._inputs[0]; } /** * Gets the tangent input component */ get tangent() { return this._inputs[1]; } /** * Gets the world matrix input component */ get world() { return this._inputs[2]; } /** * Gets the TBN output component */ // eslint-disable-next-line @typescript-eslint/naming-convention get TBN() { return this._outputs[0]; } /** * Gets the row0 of the output matrix */ get row0() { return this._outputs[1]; } /** * Gets the row1 of the output matrix */ get row1() { return this._outputs[2]; } /** * Gets the row2 of the output matrix */ get row2() { return this._outputs[3]; } get target() { return Le.Fragment; } set target(e) { } autoConfigure(e, t = () => !0) { if (!this.world.isConnected) { let i = e.getInputBlockByPredicate((r) => r.isSystemValue && r.systemValue === Ms.World && t(r)); i || (i = new vs("world"), i.setAsSystemValue(Ms.World)), i.output.connectTo(this.world); } if (!this.normal.isConnected) { let i = e.getInputBlockByPredicate((r) => r.isAttribute && r.name === "normal" && t(r)); i || (i = new vs("normal"), i.setAsAttribute("normal")), i.output.connectTo(this.normal); } if (!this.tangent.isConnected) { let i = e.getInputBlockByPredicate((r) => r.isAttribute && r.name === "tangent" && r.type === ue.Vector4 && t(r)); i || (i = new vs("tangent"), i.setAsAttribute("tangent")), i.output.connectTo(this.tangent); } } prepareDefines(e, t, i) { var r, s, n, a; const l = this.normal, o = this.tangent; let u = l.isConnected; !((r = l.connectInputBlock) === null || r === void 0) && r.isAttribute && !e.isVerticesDataPresent((s = l.connectInputBlock) === null || s === void 0 ? void 0 : s.name) && (u = !1); let h = o.isConnected; !((n = o.connectInputBlock) === null || n === void 0) && n.isAttribute && !e.isVerticesDataPresent((a = o.connectInputBlock) === null || a === void 0 ? void 0 : a.name) && (h = !1); const d = u && h; i.setValue("TBNBLOCK", d, !0); } _buildBlock(e) { super._buildBlock(e); const t = this.normal, i = this.tangent, r = this.world, s = this.TBN, n = this.row0, a = this.row1, l = this.row2; return e.target === Le.Fragment && (e.compilationString += ` // ${this.name} vec3 tbnNormal = normalize(${t.associatedVariableName}).xyz; vec3 tbnTangent = normalize(${i.associatedVariableName}.xyz); vec3 tbnBitangent = cross(tbnNormal, tbnTangent) * ${i.associatedVariableName}.w; mat3 ${s.associatedVariableName} = mat3(${r.associatedVariableName}) * mat3(tbnTangent, tbnBitangent, tbnNormal); `, n.hasEndpoints && (e.compilationString += this._declareOutput(n, e) + ` = vec3(${s.associatedVariableName}[0][0], ${s.associatedVariableName}[0][1], ${s.associatedVariableName}[0][2]); `), a.hasEndpoints && (e.compilationString += this._declareOutput(a, e) + ` = vec3(${s.associatedVariableName}[1[0], ${s.associatedVariableName}[1][1], ${s.associatedVariableName}[1][2]); `), l.hasEndpoints && (e.compilationString += this._declareOutput(l, e) + ` = vec3(${s.associatedVariableName}[2][0], ${s.associatedVariableName}[2][1], ${s.associatedVariableName}[2][2]); `), e.sharedData.blocksWithDefines.push(this)), this; } } Be("BABYLON.TBNBlock", jI); class bw extends Wi { /** * Create a new PerturbNormalBlock * @param name defines the block name */ constructor(e) { super(e, Le.Fragment), this._tangentSpaceParameterName = "", this._tangentCorrectionFactorName = "", this._worldMatrixName = "", this.invertX = !1, this.invertY = !1, this.useParallaxOcclusion = !1, this.useObjectSpaceNormalMap = !1, this._isUnique = !0, this.registerInput("worldPosition", ue.Vector4, !1), this.registerInput("worldNormal", ue.Vector4, !1), this.registerInput("worldTangent", ue.Vector4, !0), this.registerInput("uv", ue.Vector2, !1), this.registerInput("normalMapColor", ue.Color3, !1), this.registerInput("strength", ue.Float, !1), this.registerInput("viewDirection", ue.Vector3, !0), this.registerInput("parallaxScale", ue.Float, !0), this.registerInput("parallaxHeight", ue.Float, !0), this.registerInput("TBN", ue.Object, !0, Le.VertexAndFragment, new Yo("TBN", this, no.Input, jI, "TBNBlock")), this.registerInput("world", ue.Matrix, !0), this.registerOutput("output", ue.Vector4), this.registerOutput("uvOffset", ue.Vector2); } /** * Gets the current class name * @returns the class name */ getClassName() { return "PerturbNormalBlock"; } /** * Gets the world position input component */ get worldPosition() { return this._inputs[0]; } /** * Gets the world normal input component */ get worldNormal() { return this._inputs[1]; } /** * Gets the world tangent input component */ get worldTangent() { return this._inputs[2]; } /** * Gets the uv input component */ get uv() { return this._inputs[3]; } /** * Gets the normal map color input component */ get normalMapColor() { return this._inputs[4]; } /** * Gets the strength input component */ get strength() { return this._inputs[5]; } /** * Gets the view direction input component */ get viewDirection() { return this._inputs[6]; } /** * Gets the parallax scale input component */ get parallaxScale() { return this._inputs[7]; } /** * Gets the parallax height input component */ get parallaxHeight() { return this._inputs[8]; } /** * Gets the TBN input component */ // eslint-disable-next-line @typescript-eslint/naming-convention get TBN() { return this._inputs[9]; } /** * Gets the World input component */ get world() { return this._inputs[10]; } /** * Gets the output component */ get output() { return this._outputs[0]; } /** * Gets the uv offset output component */ get uvOffset() { return this._outputs[1]; } prepareDefines(e, t, i) { const r = this.normalMapColor.connectedPoint._ownerBlock.samplerName, s = this.viewDirection.isConnected && (this.useParallaxOcclusion && r || !this.useParallaxOcclusion && this.parallaxHeight.isConnected); i.setValue("BUMP", !0), i.setValue("PARALLAX", s, !0), i.setValue("PARALLAX_RHS", t.getScene().useRightHandedSystem, !0), i.setValue("PARALLAXOCCLUSION", this.useParallaxOcclusion, !0), i.setValue("OBJECTSPACE_NORMALMAP", this.useObjectSpaceNormalMap, !0); } bind(e, t, i) { t.getScene()._mirroredCameraPosition ? e.setFloat2(this._tangentSpaceParameterName, this.invertX ? 1 : -1, this.invertY ? 1 : -1) : e.setFloat2(this._tangentSpaceParameterName, this.invertX ? -1 : 1, this.invertY ? -1 : 1), i && (e.setFloat(this._tangentCorrectionFactorName, i.getWorldMatrix().determinant() < 0 ? -1 : 1), this.useObjectSpaceNormalMap && !this.world.isConnected && e.setMatrix(this._worldMatrixName, i.getWorldMatrix())); } autoConfigure(e, t = () => !0) { if (!this.uv.isConnected) { let i = e.getInputBlockByPredicate((r) => r.isAttribute && r.name === "uv" && t(r)); i || (i = new vs("uv"), i.setAsAttribute()), i.output.connectTo(this.uv); } if (!this.strength.isConnected) { const i = new vs("strength"); i.value = 1, i.output.connectTo(this.strength); } } _buildBlock(e) { super._buildBlock(e); const t = `//${this.name}`, i = this.uv, r = this.worldPosition, s = this.worldNormal, n = this.worldTangent; e.sharedData.blocksWithDefines.push(this), e.sharedData.bindableBlocks.push(this), this._tangentSpaceParameterName = e._getFreeDefineName("tangentSpaceParameter"), e._emitUniformFromString(this._tangentSpaceParameterName, "vec2"), this._tangentCorrectionFactorName = e._getFreeDefineName("tangentCorrectionFactor"), e._emitUniformFromString(this._tangentCorrectionFactorName, "float"), this._worldMatrixName = e._getFreeDefineName("perturbNormalWorldMatrix"), e._emitUniformFromString(this._worldMatrixName, "mat4"); let a = null; this.normalMapColor.connectedPoint && (a = this.normalMapColor.connectedPoint._ownerBlock.samplerName); const l = this.viewDirection.isConnected && (this.useParallaxOcclusion && a || !this.useParallaxOcclusion && this.parallaxHeight.isConnected), o = this.parallaxScale.isConnectedToInputBlock ? this.parallaxScale.connectInputBlock.isConstant ? e._emitFloat(this.parallaxScale.connectInputBlock.value) : this.parallaxScale.associatedVariableName : "0.05", u = this.strength.isConnectedToInputBlock && this.strength.connectInputBlock.isConstant ? ` #if !defined(NORMALXYSCALE) 1.0/ #endif ${e._emitFloat(this.strength.connectInputBlock.value)}` : ` #if !defined(NORMALXYSCALE) 1.0/ #endif ${this.strength.associatedVariableName}`; e._emitExtension("derivatives", "#extension GL_OES_standard_derivatives : enable"); const h = { search: /defined\(TANGENT\)/g, replace: n.isConnected ? "defined(TANGENT)" : "defined(IGNORE)" }, d = { search: /varying mat3 vTBN;/g, replace: "" }, f = { search: /uniform mat4 normalMatrix;/g, replace: "" }, p = this.TBN; p.isConnected ? e.compilationString += ` #ifdef TBNBLOCK mat3 vTBN = ${p.associatedVariableName}; #endif ` : n.isConnected && (e.compilationString += `vec3 tbnNormal = normalize(${s.associatedVariableName}.xyz); `, e.compilationString += `vec3 tbnTangent = normalize(${n.associatedVariableName}.xyz); `, e.compilationString += `vec3 tbnBitangent = cross(tbnNormal, tbnTangent) * ${this._tangentCorrectionFactorName}; `, e.compilationString += `mat3 vTBN = mat3(tbnTangent, tbnBitangent, tbnNormal); `), e._emitFunctionFromInclude("bumpFragmentMainFunctions", t, { replaceStrings: [h, d, f] }), e._emitFunctionFromInclude("bumpFragmentFunctions", t, { replaceStrings: [ { search: /#include\(_DEFINENAME_,BUMP,_VARYINGNAME_,Bump,_SAMPLERNAME_,bump\)/g, replace: "" }, { search: /uniform sampler2D bumpSampler;/g, replace: "" }, { search: /vec2 parallaxOcclusion\(vec3 vViewDirCoT,vec3 vNormalCoT,vec2 texCoord,float parallaxScale\)/g, replace: `#define inline vec2 parallaxOcclusion(vec3 vViewDirCoT, vec3 vNormalCoT, vec2 texCoord, float parallaxScale, sampler2D bumpSampler)` }, { search: /vec2 parallaxOffset\(vec3 viewDir,float heightScale\)/g, replace: "vec2 parallaxOffset(vec3 viewDir, float heightScale, float height_)" }, { search: /texture2D\(bumpSampler,vBumpUV\)\.w/g, replace: "height_" } ] }); const m = !l || !a ? this.normalMapColor.associatedVariableName : `texture2D(${a}, ${i.associatedVariableName} + uvOffset).xyz`; return e.compilationString += this._declareOutput(this.output, e) + ` = vec4(0.); `, e.compilationString += e._emitCodeFromInclude("bumpFragment", t, { replaceStrings: [ { search: /texture2D\(bumpSampler,vBumpUV\)/g, replace: `${m}` }, { search: /#define CUSTOM_FRAGMENT_BUMP_FRAGMENT/g, replace: `mat4 normalMatrix = toNormalMatrix(${this.world.isConnected ? this.world.associatedVariableName : this._worldMatrixName});` }, { search: /perturbNormal\(TBN,texture2D\(bumpSampler,vBumpUV\+uvOffset\).xyz,vBumpInfos.y\)/g, replace: `perturbNormal(TBN, ${m}, vBumpInfos.y)` }, { search: /parallaxOcclusion\(invTBN\*-viewDirectionW,invTBN\*normalW,vBumpUV,vBumpInfos.z\)/g, replace: `parallaxOcclusion((invTBN * -viewDirectionW), (invTBN * normalW), vBumpUV, vBumpInfos.z, ${l && this.useParallaxOcclusion ? a : "bumpSampler"})` }, { search: /parallaxOffset\(invTBN\*viewDirectionW,vBumpInfos\.z\)/g, replace: `parallaxOffset(invTBN * viewDirectionW, vBumpInfos.z, ${l ? this.parallaxHeight.associatedVariableName : "0."})` }, { search: /vTangentSpaceParams/g, replace: this._tangentSpaceParameterName }, { search: /vBumpInfos.y/g, replace: u }, { search: /vBumpInfos.z/g, replace: o }, { search: /vBumpUV/g, replace: i.associatedVariableName }, { search: /vPositionW/g, replace: r.associatedVariableName + ".xyz" }, { search: /normalW=/g, replace: this.output.associatedVariableName + ".xyz = " }, { search: /mat3\(normalMatrix\)\*normalW/g, replace: "mat3(normalMatrix) * " + this.output.associatedVariableName + ".xyz" }, { search: /normalW/g, replace: s.associatedVariableName + ".xyz" }, { search: /viewDirectionW/g, replace: l ? this.viewDirection.associatedVariableName : "vec3(0.)" }, h ] }), this; } _dumpPropertiesCode() { let e = super._dumpPropertiesCode() + `${this._codeVariableName}.invertX = ${this.invertX}; `; return e += `${this._codeVariableName}.invertY = ${this.invertY}; `, e += `${this._codeVariableName}.useParallaxOcclusion = ${this.useParallaxOcclusion}; `, e += `${this._codeVariableName}.useObjectSpaceNormalMap = ${this.useObjectSpaceNormalMap}; `, e; } serialize() { const e = super.serialize(); return e.invertX = this.invertX, e.invertY = this.invertY, e.useParallaxOcclusion = this.useParallaxOcclusion, e.useObjectSpaceNormalMap = this.useObjectSpaceNormalMap, e; } _deserialize(e, t, i) { super._deserialize(e, t, i), this.invertX = e.invertX, this.invertY = e.invertY, this.useParallaxOcclusion = !!e.useParallaxOcclusion, this.useObjectSpaceNormalMap = !!e.useObjectSpaceNormalMap; } } F([ ir("Invert X axis", $i.Boolean, "PROPERTIES", { notifiers: { update: !1 } }) ], bw.prototype, "invertX", void 0); F([ ir("Invert Y axis", $i.Boolean, "PROPERTIES", { notifiers: { update: !1 } }) ], bw.prototype, "invertY", void 0); F([ ir("Use parallax occlusion", $i.Boolean) ], bw.prototype, "useParallaxOcclusion", void 0); F([ ir("Object Space Mode", $i.Boolean, "PROPERTIES", { notifiers: { update: !1 } }) ], bw.prototype, "useObjectSpaceNormalMap", void 0); Be("BABYLON.PerturbNormalBlock", bw); class Pre extends Wi { /** * Create a new DiscardBlock * @param name defines the block name */ constructor(e) { super(e, Le.Fragment, !0), this.registerInput("value", ue.Float, !0), this.registerInput("cutoff", ue.Float, !0); } /** * Gets the current class name * @returns the class name */ getClassName() { return "DiscardBlock"; } /** * Gets the color input component */ get value() { return this._inputs[0]; } /** * Gets the cutoff input component */ get cutoff() { return this._inputs[1]; } _buildBlock(e) { if (super._buildBlock(e), e.sharedData.hints.needAlphaTesting = !0, !(!this.cutoff.isConnected || !this.value.isConnected)) return e.compilationString += `if (${this.value.associatedVariableName} < ${this.cutoff.associatedVariableName}) discard; `, this; } } Be("BABYLON.DiscardBlock", Pre); class Ire extends Wi { /** * Creates a new FrontFacingBlock * @param name defines the block name */ constructor(e) { super(e, Le.Fragment), this.registerOutput("output", ue.Float, Le.Fragment); } /** * Gets the current class name * @returns the class name */ getClassName() { return "FrontFacingBlock"; } /** * Gets the output component */ get output() { return this._outputs[0]; } _buildBlock(e) { if (super._buildBlock(e), e.target === Le.Vertex) throw "FrontFacingBlock must only be used in a fragment shader"; const t = this._outputs[0]; return e.compilationString += this._declareOutput(t, e) + ` = gl_FrontFacing ? 1.0 : 0.0; `, this; } } Be("BABYLON.FrontFacingBlock", Ire); class Dre extends Wi { /** * Create a new DerivativeBlock * @param name defines the block name */ constructor(e) { super(e, Le.Fragment), this.registerInput("input", ue.AutoDetect, !1), this.registerOutput("dx", ue.BasedOnInput), this.registerOutput("dy", ue.BasedOnInput), this._outputs[0]._typeConnectionSource = this._inputs[0], this._outputs[1]._typeConnectionSource = this._inputs[0]; } /** * Gets the current class name * @returns the class name */ getClassName() { return "DerivativeBlock"; } /** * Gets the input component */ get input() { return this._inputs[0]; } /** * Gets the derivative output on x */ get dx() { return this._outputs[0]; } /** * Gets the derivative output on y */ get dy() { return this._outputs[1]; } _buildBlock(e) { super._buildBlock(e); const t = this._outputs[0], i = this._outputs[1]; return e._emitExtension("derivatives", "#extension GL_OES_standard_derivatives : enable"), t.hasEndpoints && (e.compilationString += this._declareOutput(t, e) + ` = dFdx(${this.input.associatedVariableName}); `), i.hasEndpoints && (e.compilationString += this._declareOutput(i, e) + ` = dFdy(${this.input.associatedVariableName}); `), this; } } Be("BABYLON.DerivativeBlock", Dre); class Ore extends Wi { /** * Creates a new FragCoordBlock * @param name defines the block name */ constructor(e) { super(e, Le.Fragment), this.registerOutput("xy", ue.Vector2, Le.Fragment), this.registerOutput("xyz", ue.Vector3, Le.Fragment), this.registerOutput("xyzw", ue.Vector4, Le.Fragment), this.registerOutput("x", ue.Float, Le.Fragment), this.registerOutput("y", ue.Float, Le.Fragment), this.registerOutput("z", ue.Float, Le.Fragment), this.registerOutput("w", ue.Float, Le.Fragment); } /** * Gets the current class name * @returns the class name */ getClassName() { return "FragCoordBlock"; } /** * Gets the xy component */ get xy() { return this._outputs[0]; } /** * Gets the xyz component */ get xyz() { return this._outputs[1]; } /** * Gets the xyzw component */ get xyzw() { return this._outputs[2]; } /** * Gets the x component */ get x() { return this._outputs[3]; } /** * Gets the y component */ get y() { return this._outputs[4]; } /** * Gets the z component */ get z() { return this._outputs[5]; } /** * Gets the w component */ get output() { return this._outputs[6]; } // eslint-disable-next-line @typescript-eslint/naming-convention writeOutputs(e) { let t = ""; for (const i of this._outputs) i.hasEndpoints && (t += `${this._declareOutput(i, e)} = gl_FragCoord.${i.name}; `); return t; } _buildBlock(e) { if (super._buildBlock(e), e.target === Le.Vertex) throw "FragCoordBlock must only be used in a fragment shader"; return e.compilationString += this.writeOutputs(e), this; } } Be("BABYLON.FragCoordBlock", Ore); class wre extends Wi { /** * Creates a new ScreenSizeBlock * @param name defines the block name */ constructor(e) { super(e, Le.Fragment), this.registerOutput("xy", ue.Vector2, Le.Fragment), this.registerOutput("x", ue.Float, Le.Fragment), this.registerOutput("y", ue.Float, Le.Fragment); } /** * Gets the current class name * @returns the class name */ getClassName() { return "ScreenSizeBlock"; } /** * Gets the xy component */ get xy() { return this._outputs[0]; } /** * Gets the x component */ get x() { return this._outputs[1]; } /** * Gets the y component */ get y() { return this._outputs[2]; } bind(e) { const t = this._scene.getEngine(); e.setFloat2(this._varName, t.getRenderWidth(), t.getRenderHeight()); } // eslint-disable-next-line @typescript-eslint/naming-convention writeOutputs(e, t) { let i = ""; for (const r of this._outputs) r.hasEndpoints && (i += `${this._declareOutput(r, e)} = ${t}.${r.name}; `); return i; } _buildBlock(e) { if (super._buildBlock(e), this._scene = e.sharedData.scene, e.target === Le.Vertex) throw "ScreenSizeBlock must only be used in a fragment shader"; return e.sharedData.bindableBlocks.push(this), this._varName = e._getFreeVariableName("screenSize"), e._emitUniformFromString(this._varName, "vec2"), e.compilationString += this.writeOutputs(e, this._varName), this; } } Be("BABYLON.ScreenSizeBlock", wre); class Lre extends Wi { /** * Creates a new ScreenSpaceBlock * @param name defines the block name */ constructor(e) { super(e, Le.Fragment), this.registerInput("vector", ue.AutoDetect), this.registerInput("worldViewProjection", ue.Matrix), this.registerOutput("output", ue.Vector2), this.registerOutput("x", ue.Float), this.registerOutput("y", ue.Float), this.inputs[0].addExcludedConnectionPointFromAllowedTypes(ue.Color3 | ue.Vector3 | ue.Vector4); } /** * Gets the current class name * @returns the class name */ getClassName() { return "ScreenSpaceBlock"; } /** * Gets the vector input */ get vector() { return this._inputs[0]; } /** * Gets the worldViewProjection transform input */ get worldViewProjection() { return this._inputs[1]; } /** * Gets the output component */ get output() { return this._outputs[0]; } /** * Gets the x output component */ get x() { return this._outputs[1]; } /** * Gets the y output component */ get y() { return this._outputs[2]; } autoConfigure(e, t = () => !0) { if (!this.worldViewProjection.isConnected) { let i = e.getInputBlockByPredicate((r) => r.systemValue === Ms.WorldViewProjection && t(r)); i || (i = new vs("worldViewProjection"), i.setAsSystemValue(Ms.WorldViewProjection)), i.output.connectTo(this.worldViewProjection); } } _buildBlock(e) { super._buildBlock(e); const t = this.vector, i = this.worldViewProjection; if (!t.connectedPoint) return; const r = i.associatedVariableName, s = e._getFreeVariableName("screenSpaceTemp"); switch (t.connectedPoint.type) { case ue.Vector3: e.compilationString += `vec4 ${s} = ${r} * vec4(${t.associatedVariableName}, 1.0); `; break; case ue.Vector4: e.compilationString += `vec4 ${s} = ${r} * ${t.associatedVariableName}; `; break; } return e.compilationString += `${s}.xy /= ${s}.w;`, e.compilationString += `${s}.xy = ${s}.xy * 0.5 + vec2(0.5, 0.5);`, this.output.hasEndpoints && (e.compilationString += this._declareOutput(this.output, e) + ` = ${s}.xy; `), this.x.hasEndpoints && (e.compilationString += this._declareOutput(this.x, e) + ` = ${s}.x; `), this.y.hasEndpoints && (e.compilationString += this._declareOutput(this.y, e) + ` = ${s}.y; `), this; } } Be("BABYLON.ScreenSpaceBlock", Lre); class Nre extends Wi { /** * Creates a new TwirlBlock * @param name defines the block name */ constructor(e) { super(e, Le.Fragment), this.registerInput("input", ue.Vector2), this.registerInput("strength", ue.Float), this.registerInput("center", ue.Vector2), this.registerInput("offset", ue.Vector2), this.registerOutput("output", ue.Vector2), this.registerOutput("x", ue.Float), this.registerOutput("y", ue.Float); } /** * Gets the current class name * @returns the class name */ getClassName() { return "TwirlBlock"; } /** * Gets the input component */ get input() { return this._inputs[0]; } /** * Gets the strength component */ get strength() { return this._inputs[1]; } /** * Gets the center component */ get center() { return this._inputs[2]; } /** * Gets the offset component */ get offset() { return this._inputs[3]; } /** * Gets the output component */ get output() { return this._outputs[0]; } /** * Gets the x output component */ get x() { return this._outputs[1]; } /** * Gets the y output component */ get y() { return this._outputs[2]; } autoConfigure() { if (!this.center.isConnected) { const e = new vs("center"); e.value = new at(0.5, 0.5), e.output.connectTo(this.center); } if (!this.strength.isConnected) { const e = new vs("strength"); e.value = 1, e.output.connectTo(this.strength); } if (!this.offset.isConnected) { const e = new vs("offset"); e.value = new at(0, 0), e.output.connectTo(this.offset); } } _buildBlock(e) { super._buildBlock(e); const t = e._getFreeVariableName("delta"), i = e._getFreeVariableName("angle"), r = e._getFreeVariableName("x"), s = e._getFreeVariableName("y"), n = e._getFreeVariableName("result"); return e.compilationString += ` vec2 ${t} = ${this.input.associatedVariableName} - ${this.center.associatedVariableName}; float ${i} = ${this.strength.associatedVariableName} * length(${t}); float ${r} = cos(${i}) * ${t}.x - sin(${i}) * ${t}.y; float ${s} = sin(${i}) * ${t}.x + cos(${i}) * ${t}.y; vec2 ${n} = vec2(${r} + ${this.center.associatedVariableName}.x + ${this.offset.associatedVariableName}.x, ${s} + ${this.center.associatedVariableName}.y + ${this.offset.associatedVariableName}.y); `, this.output.hasEndpoints && (e.compilationString += this._declareOutput(this.output, e) + ` = ${n}; `), this.x.hasEndpoints && (e.compilationString += this._declareOutput(this.x, e) + ` = ${n}.x; `), this.y.hasEndpoints && (e.compilationString += this._declareOutput(this.y, e) + ` = ${n}.y; `), this; } } Be("BABYLON.TwirlBlock", Nre); class gN extends Wi { /** * Creates a new HeightToNormalBlock * @param name defines the block name */ constructor(e) { super(e, Le.Fragment), this.generateInWorldSpace = !1, this.automaticNormalizationNormal = !0, this.automaticNormalizationTangent = !0, this.registerInput("input", ue.Float), this.registerInput("worldPosition", ue.Vector3), this.registerInput("worldNormal", ue.Vector3), this.registerInput("worldTangent", ue.AutoDetect, !0), this.registerOutput("output", ue.Vector4), this.registerOutput("xyz", ue.Vector3), this._inputs[3].addExcludedConnectionPointFromAllowedTypes(ue.Color3 | ue.Vector3 | ue.Vector4); } /** * Gets the current class name * @returns the class name */ getClassName() { return "HeightToNormalBlock"; } /** * Gets the input component */ get input() { return this._inputs[0]; } /** * Gets the position component */ get worldPosition() { return this._inputs[1]; } /** * Gets the normal component */ get worldNormal() { return this._inputs[2]; } /** * Gets the tangent component */ get worldTangent() { return this._inputs[3]; } /** * Gets the output component */ get output() { return this._outputs[0]; } /** * Gets the xyz component */ get xyz() { return this._outputs[1]; } _buildBlock(e) { super._buildBlock(e); const t = this._outputs[0]; !this.generateInWorldSpace && !this.worldTangent.isConnected && Ce.Error(`You must connect the 'worldTangent' input of the ${this.name} block!`); const i = this.generateInWorldSpace ? "" : ` vec3 biTangent = cross(normal, tangent); mat3 TBN = mat3(tangent, biTangent, normal); `, r = this.generateInWorldSpace ? "" : ` result = TBN * result; result = result * vec3(0.5) + vec3(0.5); `, s = ` vec4 heightToNormal(in float height, in vec3 position, in vec3 tangent, in vec3 normal) { ${i} ${this.automaticNormalizationTangent ? "tangent = normalize(tangent);" : ""} ${this.automaticNormalizationNormal ? "normal = normalize(normal);" : ""} vec3 worlddX = dFdx(position); vec3 worlddY = dFdy(position); vec3 crossX = cross(normal, worlddX); vec3 crossY = cross(normal, worlddY); float d = abs(dot(crossY, worlddX)); vec3 inToNormal = vec3(((((height + dFdx(height)) - height) * crossY) + (((height + dFdy(height)) - height) * crossX)) * sign(d)); inToNormal.y *= -1.0; vec3 result = normalize((d * normal) - inToNormal); ${r} return vec4(result, 0.); }`; return e._emitExtension("derivatives", "#extension GL_OES_standard_derivatives : enable"), e._emitFunction("heightToNormal", s, "// heightToNormal"), e.compilationString += this._declareOutput(t, e) + ` = heightToNormal(${this.input.associatedVariableName}, ${this.worldPosition.associatedVariableName}, ${this.worldTangent.isConnected ? this.worldTangent.associatedVariableName : "vec3(0.)"}.xyz, ${this.worldNormal.associatedVariableName}); `, this.xyz.hasEndpoints && (e.compilationString += this._declareOutput(this.xyz, e) + ` = ${this.output.associatedVariableName}.xyz; `), this; } _dumpPropertiesCode() { let e = super._dumpPropertiesCode(); return e += `${this._codeVariableName}.generateInWorldSpace = ${this.generateInWorldSpace}; `, e += `${this._codeVariableName}.automaticNormalizationNormal = ${this.automaticNormalizationNormal}; `, e += `${this._codeVariableName}.automaticNormalizationTangent = ${this.automaticNormalizationTangent}; `, e; } serialize() { const e = super.serialize(); return e.generateInWorldSpace = this.generateInWorldSpace, e.automaticNormalizationNormal = this.automaticNormalizationNormal, e.automaticNormalizationTangent = this.automaticNormalizationTangent, e; } _deserialize(e, t, i) { super._deserialize(e, t, i), this.generateInWorldSpace = e.generateInWorldSpace, this.automaticNormalizationNormal = e.automaticNormalizationNormal, this.automaticNormalizationTangent = e.automaticNormalizationTangent; } } F([ ir("Generate in world space instead of tangent space", $i.Boolean, "PROPERTIES", { notifiers: { update: !0 } }) ], gN.prototype, "generateInWorldSpace", void 0); F([ ir("Force normalization for the worldNormal input", $i.Boolean, "PROPERTIES", { notifiers: { update: !0 } }) ], gN.prototype, "automaticNormalizationNormal", void 0); F([ ir("Force normalization for the worldTangent input", $i.Boolean, "PROPERTIES", { notifiers: { update: !0 } }) ], gN.prototype, "automaticNormalizationTangent", void 0); Be("BABYLON.HeightToNormalBlock", gN); class Fre extends Wi { /** * Create a new FragDepthBlock * @param name defines the block name */ constructor(e) { super(e, Le.Fragment, !0), this.registerInput("depth", ue.Float, !0), this.registerInput("worldPos", ue.Vector4, !0), this.registerInput("viewProjection", ue.Matrix, !0); } /** * Gets the current class name * @returns the class name */ getClassName() { return "FragDepthBlock"; } /** * Gets the depth input component */ get depth() { return this._inputs[0]; } /** * Gets the worldPos input component */ get worldPos() { return this._inputs[1]; } /** * Gets the viewProjection input component */ get viewProjection() { return this._inputs[2]; } _buildBlock(e) { return super._buildBlock(e), this.depth.isConnected ? e.compilationString += `gl_FragDepth = ${this.depth.associatedVariableName}; ` : this.worldPos.isConnected && this.viewProjection.isConnected ? e.compilationString += ` vec4 p = ${this.viewProjection.associatedVariableName} * ${this.worldPos.associatedVariableName}; float v = p.z / p.w; #ifndef IS_NDC_HALF_ZRANGE v = v * 0.5 + 0.5; #endif gl_FragDepth = v; ` : Ce.Warn("FragDepthBlock: either the depth input or both the worldPos and viewProjection inputs must be connected!"), this; } } Be("BABYLON.FragDepthBlock", Fre); class Bre extends Wi { /** * Create a new ShadowMapBlock * @param name defines the block name */ constructor(e) { super(e, Le.Fragment), this.registerInput("worldPosition", ue.Vector4, !1), this.registerInput("viewProjection", ue.Matrix, !1), this.registerInput("worldNormal", ue.AutoDetect, !0), this.registerOutput("depth", ue.Vector3), this.worldNormal.addExcludedConnectionPointFromAllowedTypes(ue.Color3 | ue.Vector3 | ue.Vector4); } /** * Gets the current class name * @returns the class name */ getClassName() { return "ShadowMapBlock"; } /** * Initialize the block and prepare the context for build * @param state defines the state that will be used for the build */ initialize(e) { e._excludeVariableName("vPositionWSM"), e._excludeVariableName("lightDataSM"), e._excludeVariableName("biasAndScaleSM"), e._excludeVariableName("depthValuesSM"), e._excludeVariableName("clipPos"), e._excludeVariableName("worldPos"), e._excludeVariableName("zSM"); } /** * Gets the world position input component */ get worldPosition() { return this._inputs[0]; } /** * Gets the view x projection input component */ get viewProjection() { return this._inputs[1]; } /** * Gets the world normal input component */ get worldNormal() { return this._inputs[2]; } /** * Gets the depth output component */ get depth() { return this._outputs[0]; } _buildBlock(e) { super._buildBlock(e); const t = `//${this.name}`; return e._emitUniformFromString("biasAndScaleSM", "vec3"), e._emitUniformFromString("lightDataSM", "vec3"), e._emitUniformFromString("depthValuesSM", "vec2"), e._emitFunctionFromInclude("packingFunctions", t), e.compilationString += `vec4 worldPos = ${this.worldPosition.associatedVariableName}; `, e.compilationString += `vec3 vPositionWSM; `, e.compilationString += `float vDepthMetricSM = 0.0; `, e.compilationString += `float zSM; `, this.worldNormal.isConnected && (e.compilationString += `vec3 vNormalW = ${this.worldNormal.associatedVariableName}.xyz; `, e.compilationString += e._emitCodeFromInclude("shadowMapVertexNormalBias", t)), e.compilationString += `vec4 clipPos = ${this.viewProjection.associatedVariableName} * worldPos; `, e.compilationString += e._emitCodeFromInclude("shadowMapVertexMetric", t, { replaceStrings: [ { search: /gl_Position/g, replace: "clipPos" } ] }), e.compilationString += e._emitCodeFromInclude("shadowMapFragment", t, { replaceStrings: [ { search: /return;/g, replace: "" } ] }), e.compilationString += ` #if SM_DEPTHTEXTURE == 1 #ifdef IS_NDC_HALF_ZRANGE gl_FragDepth = (clipPos.z / clipPos.w); #else gl_FragDepth = (clipPos.z / clipPos.w) * 0.5 + 0.5; #endif #endif `, e.compilationString += `${this._declareOutput(this.depth, e)} = vec3(depthSM, 1., 1.); `, this; } } Be("BABYLON.ShadowMapBlock", Bre); class Ure extends Wi { /** * Create a new PrePassOutputBlock * @param name defines the block name */ constructor(e) { super(e, Le.Fragment, !0), this.registerInput("viewDepth", ue.Float, !0), this.registerInput("worldPosition", ue.AutoDetect, !0), this.registerInput("viewNormal", ue.AutoDetect, !0), this.registerInput("reflectivity", ue.AutoDetect, !0), this.inputs[1].addExcludedConnectionPointFromAllowedTypes(ue.Vector3 | ue.Vector4), this.inputs[2].addExcludedConnectionPointFromAllowedTypes(ue.Vector3 | ue.Vector4), this.inputs[3].addExcludedConnectionPointFromAllowedTypes(ue.Vector3 | ue.Vector4 | ue.Color3 | ue.Color4); } /** * Gets the current class name * @returns the class name */ getClassName() { return "PrePassOutputBlock"; } /** * Gets the view depth component */ get viewDepth() { return this._inputs[0]; } /** * Gets the world position component */ get worldPosition() { return this._inputs[1]; } /** * Gets the view normal component */ get viewNormal() { return this._inputs[2]; } /** * Gets the reflectivity component */ get reflectivity() { return this._inputs[3]; } _buildBlock(e) { super._buildBlock(e); const t = this.worldPosition, i = this.viewNormal, r = this.viewDepth, s = this.reflectivity; e.sharedData.blocksWithDefines.push(this); const n = `//${this.name}`; return e._emitFunctionFromInclude("helperFunctions", n), e.compilationString += `#if defined(PREPASS)\r `, e.compilationString += `#ifdef PREPASS_DEPTH\r `, r.connectedPoint ? e.compilationString += ` gl_FragData[PREPASS_DEPTH_INDEX] = vec4(${r.associatedVariableName}, 0.0, 0.0, 1.0);\r ` : e.compilationString += ` gl_FragData[PREPASS_DEPTH_INDEX] = vec4(0.0, 0.0, 0.0, 0.0);\r `, e.compilationString += `#endif\r `, e.compilationString += `#ifdef PREPASS_POSITION\r `, t.connectedPoint ? e.compilationString += ` gl_FragData[PREPASS_POSITION_INDEX] = vec4(${t.associatedVariableName}.rgb, ${t.connectedPoint.type === ue.Vector4 ? t.associatedVariableName + ".a" : "1.0"});\r ` : e.compilationString += ` gl_FragData[PREPASS_POSITION_INDEX] = vec4(0.0, 0.0, 0.0, 0.0);\r `, e.compilationString += `#endif\r `, e.compilationString += `#ifdef PREPASS_NORMAL\r `, i.connectedPoint ? e.compilationString += ` gl_FragData[PREPASS_NORMAL_INDEX] = vec4(${i.associatedVariableName}.rgb, ${i.connectedPoint.type === ue.Vector4 ? i.associatedVariableName + ".a" : "1.0"});\r ` : e.compilationString += ` gl_FragData[PREPASS_NORMAL_INDEX] = vec4(0.0, 0.0, 0.0, 0.0);\r `, e.compilationString += `#endif\r `, e.compilationString += `#ifdef PREPASS_REFLECTIVITY\r `, s.connectedPoint ? e.compilationString += ` gl_FragData[PREPASS_REFLECTIVITY_INDEX] = vec4(${s.associatedVariableName}.rgb, ${s.connectedPoint.type === ue.Vector4 ? s.associatedVariableName + ".a" : "1.0"});\r ` : e.compilationString += ` gl_FragData[PREPASS_REFLECTIVITY_INDEX] = vec4(0.0, 0.0, 0.0, 1.0);\r `, e.compilationString += `#endif\r `, e.compilationString += `#endif\r `, this; } } Be("BABYLON.PrePassOutputBlock", Ure); class Vre extends Wi { /** * Create a new FogBlock * @param name defines the block name */ constructor(e) { super(e, Le.VertexAndFragment, !1), this.registerInput("worldPosition", ue.Vector4, !1, Le.Vertex), this.registerInput("view", ue.Matrix, !1, Le.Vertex), this.registerInput("input", ue.AutoDetect, !1, Le.Fragment), this.registerInput("fogColor", ue.AutoDetect, !1, Le.Fragment), this.registerOutput("output", ue.Color3, Le.Fragment), this.input.addExcludedConnectionPointFromAllowedTypes(ue.Color3 | ue.Vector3 | ue.Color4), this.fogColor.addExcludedConnectionPointFromAllowedTypes(ue.Color3 | ue.Vector3 | ue.Color4); } /** * Gets the current class name * @returns the class name */ getClassName() { return "FogBlock"; } /** * Gets the world position input component */ get worldPosition() { return this._inputs[0]; } /** * Gets the view input component */ get view() { return this._inputs[1]; } /** * Gets the color input component */ get input() { return this._inputs[2]; } /** * Gets the fog color input component */ get fogColor() { return this._inputs[3]; } /** * Gets the output component */ get output() { return this._outputs[0]; } autoConfigure(e, t = () => !0) { if (!this.view.isConnected) { let i = e.getInputBlockByPredicate((r) => r.systemValue === Ms.View && t(r)); i || (i = new vs("view"), i.setAsSystemValue(Ms.View)), i.output.connectTo(this.view); } if (!this.fogColor.isConnected) { let i = e.getInputBlockByPredicate((r) => r.systemValue === Ms.FogColor && t(r)); i || (i = new vs("fogColor", void 0, ue.Color3), i.setAsSystemValue(Ms.FogColor)), i.output.connectTo(this.fogColor); } } prepareDefines(e, t, i) { const r = e.getScene(); i.setValue("FOG", t.fogEnabled && Ke.GetFogState(e, r)); } bind(e, t, i) { if (!i) return; const r = i.getScene(); e.setFloat4(this._fogParameters, r.fogMode, r.fogStart, r.fogEnd, r.fogDensity); } _buildBlock(e) { if (super._buildBlock(e), e.target === Le.Fragment) { e.sharedData.blocksWithDefines.push(this), e.sharedData.bindableBlocks.push(this), e._emitFunctionFromInclude("fogFragmentDeclaration", `//${this.name}`, { removeUniforms: !0, removeVaryings: !0, removeIfDef: !1, replaceStrings: [{ search: /float CalcFogFactor\(\)/, replace: "float CalcFogFactor(vec3 vFogDistance, vec4 vFogInfos)" }] }); const t = e._getFreeVariableName("fog"), i = this.input, r = this.fogColor; this._fogParameters = e._getFreeVariableName("fogParameters"); const s = this._outputs[0]; e._emitUniformFromString(this._fogParameters, "vec4"), e.compilationString += `#ifdef FOG `, e.compilationString += `float ${t} = CalcFogFactor(${this._fogDistanceName}, ${this._fogParameters}); `, e.compilationString += this._declareOutput(s, e) + ` = ${t} * ${i.associatedVariableName}.rgb + (1.0 - ${t}) * ${r.associatedVariableName}.rgb; `, e.compilationString += `#else ${this._declareOutput(s, e)} = ${i.associatedVariableName}.rgb; `, e.compilationString += `#endif `; } else { const t = this.worldPosition, i = this.view; this._fogDistanceName = e._getFreeVariableName("vFogDistance"), e._emitVaryingFromString(this._fogDistanceName, "vec3"), e.compilationString += `${this._fogDistanceName} = (${i.associatedVariableName} * ${t.associatedVariableName}).xyz; `; } return this; } } Be("BABYLON.FogBlock", Vre); class CB extends Wi { static _OnGenerateOnlyFragmentCodeChanged(e, t) { const i = e; return i.worldPosition.isConnected ? (i.generateOnlyFragmentCode = !i.generateOnlyFragmentCode, Ce.Error("The worldPosition input must not be connected to be able to switch!"), !1) : (i._setTarget(), !0); } _setTarget() { this._setInitialTarget(this.generateOnlyFragmentCode ? Le.Fragment : Le.VertexAndFragment), this.getInputByName("worldPosition").target = this.generateOnlyFragmentCode ? Le.Fragment : Le.Vertex; } /** * Create a new LightBlock * @param name defines the block name */ constructor(e) { super(e, Le.VertexAndFragment), this._lightId = 0, this.generateOnlyFragmentCode = !1, this._isUnique = !0, this.registerInput("worldPosition", ue.Vector4, !1, Le.Vertex), this.registerInput("worldNormal", ue.Vector4, !1, Le.Fragment), this.registerInput("cameraPosition", ue.Vector3, !1, Le.Fragment), this.registerInput("glossiness", ue.Float, !0, Le.Fragment), this.registerInput("glossPower", ue.Float, !0, Le.Fragment), this.registerInput("diffuseColor", ue.Color3, !0, Le.Fragment), this.registerInput("specularColor", ue.Color3, !0, Le.Fragment), this.registerInput("view", ue.Matrix, !0), this.registerOutput("diffuseOutput", ue.Color3, Le.Fragment), this.registerOutput("specularOutput", ue.Color3, Le.Fragment), this.registerOutput("shadow", ue.Float, Le.Fragment); } /** * Gets the current class name * @returns the class name */ getClassName() { return "LightBlock"; } /** * Gets the world position input component */ get worldPosition() { return this._inputs[0]; } /** * Gets the world normal input component */ get worldNormal() { return this._inputs[1]; } /** * Gets the camera (or eye) position component */ get cameraPosition() { return this._inputs[2]; } /** * Gets the glossiness component */ get glossiness() { return this._inputs[3]; } /** * Gets the glossiness power component */ get glossPower() { return this._inputs[4]; } /** * Gets the diffuse color component */ get diffuseColor() { return this._inputs[5]; } /** * Gets the specular color component */ get specularColor() { return this._inputs[6]; } /** * Gets the view matrix component */ get view() { return this._inputs[7]; } /** * Gets the diffuse output component */ get diffuseOutput() { return this._outputs[0]; } /** * Gets the specular output component */ get specularOutput() { return this._outputs[1]; } /** * Gets the shadow output component */ get shadow() { return this._outputs[2]; } autoConfigure(e, t = () => !0) { if (!this.cameraPosition.isConnected) { let i = e.getInputBlockByPredicate((r) => r.systemValue === Ms.CameraPosition && t(r)); i || (i = new vs("cameraPosition"), i.setAsSystemValue(Ms.CameraPosition)), i.output.connectTo(this.cameraPosition); } } prepareDefines(e, t, i) { if (!i._areLightsDirty) return; const r = e.getScene(); if (!this.light) Ke.PrepareDefinesForLights(r, e, i, !0, t.maxSimultaneousLights); else { const s = { needNormals: !1, needRebuild: !1, lightmapMode: !1, shadowEnabled: !1, specularEnabled: !1 }; Ke.PrepareDefinesForLight(r, e, this.light, this._lightId, i, !0, s), s.needRebuild && i.rebuild(); } } updateUniformsAndSamples(e, t, i, r) { for (let s = 0; s < t.maxSimultaneousLights && i["LIGHT" + s]; s++) { const n = e.uniforms.indexOf("vLightData" + s) >= 0; Ke.PrepareUniformsAndSamplersForLight(s, e.uniforms, e.samplers, i["PROJECTEDLIGHTTEXTURE" + s], r, n); } } bind(e, t, i) { if (!i) return; const r = i.getScene(); this.light ? Ke.BindLight(this.light, this._lightId, r, e, !0) : Ke.BindLights(r, i, e, !0, t.maxSimultaneousLights); } _injectVertexCode(e) { const t = this.worldPosition, i = `//${this.name}`; this.light ? (this._lightId = (e.counters.lightCounter !== void 0 ? e.counters.lightCounter : -1) + 1, e.counters.lightCounter = this._lightId, e._emitFunctionFromInclude(e.supportUniformBuffers ? "lightVxUboDeclaration" : "lightVxFragmentDeclaration", i, { replaceStrings: [{ search: /{X}/g, replace: this._lightId.toString() }] }, this._lightId.toString())) : (e._emitFunctionFromInclude(e.supportUniformBuffers ? "lightVxUboDeclaration" : "lightVxFragmentDeclaration", i, { repeatKey: "maxSimultaneousLights" }), this._lightId = 0, e.sharedData.dynamicUniformBlocks.push(this)); const r = "v_" + t.associatedVariableName; e._emitVaryingFromString(r, "vec4") && (e.compilationString += `${r} = ${t.associatedVariableName}; `), this.light ? e.compilationString += e._emitCodeFromInclude("shadowsVertex", i, { replaceStrings: [ { search: /{X}/g, replace: this._lightId.toString() }, { search: /worldPos/g, replace: t.associatedVariableName } ] }) : (e.compilationString += `vec4 worldPos = ${t.associatedVariableName}; `, this.view.isConnected && (e.compilationString += `mat4 view = ${this.view.associatedVariableName}; `), e.compilationString += e._emitCodeFromInclude("shadowsVertex", i, { repeatKey: "maxSimultaneousLights" })); } _buildBlock(e) { if (super._buildBlock(e), e.target !== Le.Fragment) { this._injectVertexCode(e); return; } this.generateOnlyFragmentCode && e.sharedData.dynamicUniformBlocks.push(this), e.sharedData.forcedBindableBlocks.push(this), e.sharedData.blocksWithDefines.push(this); const t = `//${this.name}`, i = this.worldPosition; let r = i.associatedVariableName; this.generateOnlyFragmentCode ? (r = e._getFreeVariableName("globalWorldPos"), e._emitFunction("light_globalworldpos", `vec3 ${r}; `, t), e.compilationString += `${r} = ${i.associatedVariableName}.xyz; `, e.compilationString += e._emitCodeFromInclude("shadowsVertex", t, { repeatKey: "maxSimultaneousLights", substitutionVars: this.generateOnlyFragmentCode ? `worldPos,${i.associatedVariableName}` : void 0 })) : r = "v_" + r + ".xyz", e._emitFunctionFromInclude("helperFunctions", t), e._emitFunctionFromInclude("lightsFragmentFunctions", t, { replaceStrings: [{ search: /vPositionW/g, replace: r }] }), e._emitFunctionFromInclude("shadowsFragmentFunctions", t, { replaceStrings: [{ search: /vPositionW/g, replace: r }] }), this.light ? e._emitFunctionFromInclude(e.supportUniformBuffers ? "lightUboDeclaration" : "lightFragmentDeclaration", t, { replaceStrings: [{ search: /{X}/g, replace: this._lightId.toString() }] }, this._lightId.toString()) : e._emitFunctionFromInclude(e.supportUniformBuffers ? "lightUboDeclaration" : "lightFragmentDeclaration", t, { repeatKey: "maxSimultaneousLights", substitutionVars: this.generateOnlyFragmentCode ? "varying," : void 0 }), this._lightId === 0 && (e._registerTempVariable("viewDirectionW") && (e.compilationString += `vec3 viewDirectionW = normalize(${this.cameraPosition.associatedVariableName} - ${r}); `), e.compilationString += `lightingInfo info; `, e.compilationString += `float shadow = 1.; `, e.compilationString += `float aggShadow = 0.; `, e.compilationString += `float numLights = 0.; `, e.compilationString += `float glossiness = ${this.glossiness.isConnected ? this.glossiness.associatedVariableName : "1.0"} * ${this.glossPower.isConnected ? this.glossPower.associatedVariableName : "1024.0"}; `, e.compilationString += `vec3 diffuseBase = vec3(0., 0., 0.); `, e.compilationString += `vec3 specularBase = vec3(0., 0., 0.); `, e.compilationString += `vec3 normalW = ${this.worldNormal.associatedVariableName}.xyz; `), this.light ? e.compilationString += e._emitCodeFromInclude("lightFragment", t, { replaceStrings: [ { search: /{X}/g, replace: this._lightId.toString() }, { search: /vPositionW/g, replace: r + ".xyz" } ] }) : e.compilationString += e._emitCodeFromInclude("lightFragment", t, { repeatKey: "maxSimultaneousLights", substitutionVars: `vPositionW,${r}.xyz` }), this._lightId === 0 && (e.compilationString += `aggShadow = aggShadow / numLights; `); const s = this.diffuseOutput, n = this.specularOutput; return e.compilationString += this._declareOutput(s, e) + ` = diffuseBase${this.diffuseColor.isConnected ? " * " + this.diffuseColor.associatedVariableName : ""}; `, n.hasEndpoints && (e.compilationString += this._declareOutput(n, e) + ` = specularBase${this.specularColor.isConnected ? " * " + this.specularColor.associatedVariableName : ""}; `), this.shadow.hasEndpoints && (e.compilationString += this._declareOutput(this.shadow, e) + ` = aggShadow; `), this; } serialize() { const e = super.serialize(); return e.generateOnlyFragmentCode = this.generateOnlyFragmentCode, this.light && (e.lightId = this.light.id), e; } _deserialize(e, t, i) { super._deserialize(e, t, i), e.lightId && (this.light = t.getLightById(e.lightId)), this.generateOnlyFragmentCode = e.generateOnlyFragmentCode, this._setTarget(); } } F([ ir("Generate only fragment code", $i.Boolean, "ADVANCED", { notifiers: { rebuild: !0, update: !0, onValidation: CB._OnGenerateOnlyFragmentCodeChanged } }) ], CB.prototype, "generateOnlyFragmentCode", void 0); Be("BABYLON.LightBlock", CB); class U4 extends Wi { /** * Gets or sets the texture associated with the node */ get texture() { return this._texture; } set texture(e) { var t; if (this._texture === e) return; const i = (t = e == null ? void 0 : e.getScene()) !== null && t !== void 0 ? t : gi.LastCreatedScene; !e && i && i.markAllMaterialsAsDirty(1, (r) => r.hasTexture(this._texture)), this._texture = e, e && i && i.markAllMaterialsAsDirty(1, (r) => r.hasTexture(e)); } /** * Gets the sampler name associated with this image source */ get samplerName() { return this._samplerName; } /** * Creates a new ImageSourceBlock * @param name defines the block name */ constructor(e) { super(e, Le.VertexAndFragment), this.registerOutput("source", ue.Object, Le.VertexAndFragment, new Yo("source", this, no.Output, U4, "ImageSourceBlock")); } bind(e) { this.texture && e.setTexture(this._samplerName, this.texture); } isReady() { return !(this.texture && !this.texture.isReadyOrNotBlocking()); } /** * Gets the current class name * @returns the class name */ getClassName() { return "ImageSourceBlock"; } /** * Gets the output component */ get source() { return this._outputs[0]; } _buildBlock(e) { return super._buildBlock(e), e.target === Le.Vertex && (this._samplerName = e._getFreeVariableName(this.name + "Sampler"), e.sharedData.blockingBlocks.push(this), e.sharedData.textureBlocks.push(this), e.sharedData.bindableBlocks.push(this)), e._emit2DSampler(this._samplerName), this; } _dumpPropertiesCode() { let e = super._dumpPropertiesCode(); return this.texture && (e += `${this._codeVariableName}.texture = new BABYLON.Texture("${this.texture.name}", null, ${this.texture.noMipmap}, ${this.texture.invertY}, ${this.texture.samplingMode}); `, e += `${this._codeVariableName}.texture.wrapU = ${this.texture.wrapU}; `, e += `${this._codeVariableName}.texture.wrapV = ${this.texture.wrapV}; `, e += `${this._codeVariableName}.texture.uAng = ${this.texture.uAng}; `, e += `${this._codeVariableName}.texture.vAng = ${this.texture.vAng}; `, e += `${this._codeVariableName}.texture.wAng = ${this.texture.wAng}; `, e += `${this._codeVariableName}.texture.uOffset = ${this.texture.uOffset}; `, e += `${this._codeVariableName}.texture.vOffset = ${this.texture.vOffset}; `, e += `${this._codeVariableName}.texture.uScale = ${this.texture.uScale}; `, e += `${this._codeVariableName}.texture.vScale = ${this.texture.vScale}; `, e += `${this._codeVariableName}.texture.coordinatesMode = ${this.texture.coordinatesMode}; `), e; } serialize() { const e = super.serialize(); return this.texture && !this.texture.isRenderTarget && this.texture.getClassName() !== "VideoTexture" && (e.texture = this.texture.serialize()), e; } _deserialize(e, t, i) { super._deserialize(e, t, i), e.texture && !Ta.IgnoreTexturesAtLoadTime && e.texture.url !== void 0 && (i = e.texture.url.indexOf("data:") === 0 ? "" : i, this.texture = De.Parse(e.texture, t, i)); } } Be("BABYLON.ImageSourceBlock", U4); class dL extends Wi { /** * Gets or sets the texture associated with the node */ get texture() { var e; return this.source.isConnected ? ((e = this.source.connectedPoint) === null || e === void 0 ? void 0 : e.ownerBlock).texture : this._texture; } set texture(e) { var t; if (this._texture === e) return; const i = (t = e == null ? void 0 : e.getScene()) !== null && t !== void 0 ? t : gi.LastCreatedScene; !e && i && i.markAllMaterialsAsDirty(1, (r) => r.hasTexture(this._texture)), this._texture = e, e && i && i.markAllMaterialsAsDirty(1, (r) => r.hasTexture(e)); } static _IsPrePassTextureBlock(e) { return (e == null ? void 0 : e.getClassName()) === "PrePassTextureBlock"; } get _isSourcePrePass() { return dL._IsPrePassTextureBlock(this._imageSource); } /** * Gets the sampler name associated with this texture */ get samplerName() { if (this._imageSource) { if (!dL._IsPrePassTextureBlock(this._imageSource)) return this._imageSource.samplerName; if (this.source.connectedPoint) return this._imageSource.getSamplerName(this.source.connectedPoint); } return this._samplerName; } /** * Gets a boolean indicating that this block is linked to an ImageSourceBlock */ get hasImageSource() { return this.source.isConnected; } /** * Gets or sets a boolean indicating if content needs to be converted to gamma space */ set convertToGammaSpace(e) { var t; if (e !== this._convertToGammaSpace && (this._convertToGammaSpace = e, this.texture)) { const i = (t = this.texture.getScene()) !== null && t !== void 0 ? t : gi.LastCreatedScene; i == null || i.markAllMaterialsAsDirty(1, (r) => r.hasTexture(this.texture)); } } get convertToGammaSpace() { return this._convertToGammaSpace; } /** * Gets or sets a boolean indicating if content needs to be converted to linear space */ set convertToLinearSpace(e) { var t; if (e !== this._convertToLinearSpace && (this._convertToLinearSpace = e, this.texture)) { const i = (t = this.texture.getScene()) !== null && t !== void 0 ? t : gi.LastCreatedScene; i == null || i.markAllMaterialsAsDirty(1, (r) => r.hasTexture(this.texture)); } } get convertToLinearSpace() { return this._convertToLinearSpace; } /** * Create a new TextureBlock * @param name defines the block name * @param fragmentOnly */ constructor(e, t = !1) { super(e, t ? Le.Fragment : Le.VertexAndFragment), this._convertToGammaSpace = !1, this._convertToLinearSpace = !1, this.disableLevelMultiplication = !1, this._fragmentOnly = t, this.registerInput("uv", ue.AutoDetect, !1, Le.VertexAndFragment), this.registerInput("source", ue.Object, !0, Le.VertexAndFragment, new Yo("source", this, no.Input, U4, "ImageSourceBlock")), this.registerInput("layer", ue.Float, !0), this.registerInput("lod", ue.Float, !0), this.registerOutput("rgba", ue.Color4, Le.Neutral), this.registerOutput("rgb", ue.Color3, Le.Neutral), this.registerOutput("r", ue.Float, Le.Neutral), this.registerOutput("g", ue.Float, Le.Neutral), this.registerOutput("b", ue.Float, Le.Neutral), this.registerOutput("a", ue.Float, Le.Neutral), this.registerOutput("level", ue.Float, Le.Neutral), this._inputs[0].addExcludedConnectionPointFromAllowedTypes(ue.Vector2 | ue.Vector3 | ue.Vector4), this._inputs[0]._prioritizeVertex = !t; } /** * Gets the current class name * @returns the class name */ getClassName() { return "TextureBlock"; } /** * Gets the uv input component */ get uv() { return this._inputs[0]; } /** * Gets the source input component */ get source() { return this._inputs[1]; } /** * Gets the layer input component */ get layer() { return this._inputs[2]; } /** * Gets the LOD input component */ get lod() { return this._inputs[3]; } /** * Gets the rgba output component */ get rgba() { return this._outputs[0]; } /** * Gets the rgb output component */ get rgb() { return this._outputs[1]; } /** * Gets the r output component */ get r() { return this._outputs[2]; } /** * Gets the g output component */ get g() { return this._outputs[3]; } /** * Gets the b output component */ get b() { return this._outputs[4]; } /** * Gets the a output component */ get a() { return this._outputs[5]; } /** * Gets the level output component */ get level() { return this._outputs[6]; } get target() { if (this._fragmentOnly) return Le.Fragment; if (!this.uv.isConnected || this.uv.sourceBlock.isInput) return Le.VertexAndFragment; let e = this.uv.connectedPoint; for (; e; ) { if (e.target === Le.Fragment) return Le.Fragment; if (e.target === Le.Vertex) return Le.VertexAndFragment; if (e.target === Le.Neutral || e.target === Le.VertexAndFragment) { const t = e.ownerBlock; if (t.target === Le.Fragment) return Le.Fragment; e = null; for (const i of t.inputs) if (i.connectedPoint) { e = i.connectedPoint; break; } } } return Le.VertexAndFragment; } set target(e) { } autoConfigure(e, t = () => !0) { if (!this.uv.isConnected) if (e.mode === Ip.PostProcess) { const i = e.getBlockByPredicate((r) => r.name === "uv" && t(r)); i && i.connectTo(this); } else { const i = e.mode === Ip.Particle ? "particle_uv" : "uv"; let r = e.getInputBlockByPredicate((s) => s.isAttribute && s.name === i && t(s)); r || (r = new vs("uv"), r.setAsAttribute(i)), r.output.connectTo(this.uv); } } initializeDefines(e, t, i) { i._areTexturesDirty && this._mainUVDefineName !== void 0 && i.setValue(this._mainUVDefineName, !1, !0); } prepareDefines(e, t, i) { if (!i._areTexturesDirty) return; if (!this.texture || !this.texture.getTextureMatrix) { this._isMixed && (i.setValue(this._defineName, !1, !0), i.setValue(this._mainUVDefineName, !0, !0)); return; } const r = this.convertToGammaSpace && this.texture && !this.texture.gammaSpace, s = this.convertToLinearSpace && this.texture && this.texture.gammaSpace; i.setValue(this._linearDefineName, r, !0), i.setValue(this._gammaDefineName, s, !0), this._isMixed && (this.texture.getTextureMatrix().isIdentityAs3x2() ? (i.setValue(this._defineName, !1, !0), i.setValue(this._mainUVDefineName, !0, !0)) : (i.setValue(this._defineName, !0), i[this._mainUVDefineName] == null && i.setValue(this._mainUVDefineName, !1, !0))); } isReady() { return this._isSourcePrePass ? !0 : !(this.texture && !this.texture.isReadyOrNotBlocking()); } bind(e) { this._isSourcePrePass && e.setFloat(this._textureInfoName, 1), this.texture && (this._isMixed && (e.setFloat(this._textureInfoName, this.texture.level), e.setMatrix(this._textureTransformName, this.texture.getTextureMatrix())), this._imageSource || e.setTexture(this._samplerName, this.texture)); } get _isMixed() { return this.target !== Le.Fragment; } _injectVertexCode(e) { const t = this.uv; if (this._defineName = e._getFreeDefineName("UVTRANSFORM"), this._mainUVDefineName = "VMAIN" + t.associatedVariableName.toUpperCase(), this._mainUVName = "vMain" + t.associatedVariableName, this._transformedUVName = e._getFreeVariableName("transformedUV"), this._textureTransformName = e._getFreeVariableName("textureTransform"), this._textureInfoName = e._getFreeVariableName("textureInfoName"), this.level.associatedVariableName = this._textureInfoName, e._emitVaryingFromString(this._transformedUVName, "vec2", this._defineName), e._emitVaryingFromString(this._mainUVName, "vec2", this._mainUVDefineName), e._emitUniformFromString(this._textureTransformName, "mat4", this._defineName), e.compilationString += `#ifdef ${this._defineName} `, e.compilationString += `${this._transformedUVName} = vec2(${this._textureTransformName} * vec4(${t.associatedVariableName}.xy, 1.0, 0.0)); `, e.compilationString += `#elif defined(${this._mainUVDefineName}) `, e.compilationString += `${this._mainUVName} = ${t.associatedVariableName}.xy; `, e.compilationString += `#endif `, !!this._outputs.some((i) => i.isConnectedInVertexShader)) { this._writeTextureRead(e, !0); for (const i of this._outputs) i.hasEndpoints && i.name !== "level" && this._writeOutput(e, i, i.name, !0); } } _getUVW(e) { var t, i, r; let s = e; if ((r = (i = (t = this._texture) === null || t === void 0 ? void 0 : t._texture) === null || i === void 0 ? void 0 : i.is2DArray) !== null && r !== void 0 ? r : !1) { const a = this.layer.isConnected ? this.layer.associatedVariableName : "0"; s = `vec3(${e}, ${a})`; } return s; } get _samplerFunc() { return this.lod.isConnected ? "texture2DLodEXT" : "texture2D"; } get _samplerLodSuffix() { return this.lod.isConnected ? `, ${this.lod.associatedVariableName}` : ""; } _generateTextureLookup(e) { const t = this.samplerName; e.compilationString += `#ifdef ${this._defineName} `, e.compilationString += `vec4 ${this._tempTextureRead} = ${this._samplerFunc}(${t}, ${this._getUVW(this._transformedUVName)}${this._samplerLodSuffix}); `, e.compilationString += `#elif defined(${this._mainUVDefineName}) `, e.compilationString += `vec4 ${this._tempTextureRead} = ${this._samplerFunc}(${t}, ${this._getUVW(this._mainUVName ? this._mainUVName : this.uv.associatedVariableName)}${this._samplerLodSuffix}); `, e.compilationString += `#endif `; } _writeTextureRead(e, t = !1) { const i = this.uv; if (t) { if (e.target === Le.Fragment) return; this._generateTextureLookup(e); return; } if (this.uv.ownerBlock.target === Le.Fragment) { e.compilationString += `vec4 ${this._tempTextureRead} = ${this._samplerFunc}(${this.samplerName}, ${this._getUVW(i.associatedVariableName)}${this._samplerLodSuffix}); `; return; } this._generateTextureLookup(e); } _generateConversionCode(e, t, i) { i !== "a" && ((!this.texture || !this.texture.gammaSpace) && (e.compilationString += `#ifdef ${this._linearDefineName} ${t.associatedVariableName} = toGammaSpace(${t.associatedVariableName}); #endif `), e.compilationString += `#ifdef ${this._gammaDefineName} ${t.associatedVariableName} = toLinearSpace(${t.associatedVariableName}); #endif `); } _writeOutput(e, t, i, r = !1) { if (r) { if (e.target === Le.Fragment) return; e.compilationString += `${this._declareOutput(t, e)} = ${this._tempTextureRead}.${i}; `, this._generateConversionCode(e, t, i); return; } if (this.uv.ownerBlock.target === Le.Fragment) { e.compilationString += `${this._declareOutput(t, e)} = ${this._tempTextureRead}.${i}; `, this._generateConversionCode(e, t, i); return; } let s = ""; this.disableLevelMultiplication || (s = ` * ${this._textureInfoName}`), e.compilationString += `${this._declareOutput(t, e)} = ${this._tempTextureRead}.${i}${s}; `, this._generateConversionCode(e, t, i); } _buildBlock(e) { var t, i, r, s; if (super._buildBlock(e), this.source.isConnected ? this._imageSource = this.source.connectedPoint.ownerBlock : this._imageSource = null, (e.target === Le.Vertex || this._fragmentOnly || e.target === Le.Fragment) && (this._tempTextureRead = e._getFreeVariableName("tempTextureRead"), this._linearDefineName = e._getFreeDefineName("ISLINEAR"), this._gammaDefineName = e._getFreeDefineName("ISGAMMA")), (!this._isMixed && e.target === Le.Fragment || this._isMixed && e.target === Le.Vertex) && (this._imageSource || (this._samplerName = e._getFreeVariableName(this.name + "Sampler"), !((i = (t = this._texture) === null || t === void 0 ? void 0 : t._texture) === null || i === void 0) && i.is2DArray ? e._emit2DArraySampler(this._samplerName) : e._emit2DSampler(this._samplerName)), e.sharedData.blockingBlocks.push(this), e.sharedData.textureBlocks.push(this), e.sharedData.blocksWithDefines.push(this), e.sharedData.bindableBlocks.push(this)), e.target !== Le.Fragment) { this._injectVertexCode(e); return; } if (!this._outputs.some((a) => a.isConnectedInFragmentShader)) return; this._isMixed && !this._imageSource && (!((s = (r = this._texture) === null || r === void 0 ? void 0 : r._texture) === null || s === void 0) && s.is2DArray ? e._emit2DArraySampler(this._samplerName) : e._emit2DSampler(this._samplerName)); const n = `//${this.name}`; e._emitFunctionFromInclude("helperFunctions", n), this._isMixed && e._emitUniformFromString(this._textureInfoName, "float"), this._writeTextureRead(e); for (const a of this._outputs) a.hasEndpoints && a.name !== "level" && this._writeOutput(e, a, a.name); return this; } _dumpPropertiesCode() { let e = super._dumpPropertiesCode(); return e += `${this._codeVariableName}.convertToGammaSpace = ${this.convertToGammaSpace}; `, e += `${this._codeVariableName}.convertToLinearSpace = ${this.convertToLinearSpace}; `, e += `${this._codeVariableName}.disableLevelMultiplication = ${this.disableLevelMultiplication}; `, this.texture && (e += `${this._codeVariableName}.texture = new BABYLON.Texture("${this.texture.name}", null, ${this.texture.noMipmap}, ${this.texture.invertY}, ${this.texture.samplingMode}); `, e += `${this._codeVariableName}.texture.wrapU = ${this.texture.wrapU}; `, e += `${this._codeVariableName}.texture.wrapV = ${this.texture.wrapV}; `, e += `${this._codeVariableName}.texture.uAng = ${this.texture.uAng}; `, e += `${this._codeVariableName}.texture.vAng = ${this.texture.vAng}; `, e += `${this._codeVariableName}.texture.wAng = ${this.texture.wAng}; `, e += `${this._codeVariableName}.texture.uOffset = ${this.texture.uOffset}; `, e += `${this._codeVariableName}.texture.vOffset = ${this.texture.vOffset}; `, e += `${this._codeVariableName}.texture.uScale = ${this.texture.uScale}; `, e += `${this._codeVariableName}.texture.vScale = ${this.texture.vScale}; `, e += `${this._codeVariableName}.texture.coordinatesMode = ${this.texture.coordinatesMode}; `), e; } serialize() { const e = super.serialize(); return e.convertToGammaSpace = this.convertToGammaSpace, e.convertToLinearSpace = this.convertToLinearSpace, e.fragmentOnly = this._fragmentOnly, e.disableLevelMultiplication = this.disableLevelMultiplication, !this.hasImageSource && this.texture && !this.texture.isRenderTarget && this.texture.getClassName() !== "VideoTexture" && (e.texture = this.texture.serialize()), e; } _deserialize(e, t, i) { super._deserialize(e, t, i), this.convertToGammaSpace = e.convertToGammaSpace, this.convertToLinearSpace = !!e.convertToLinearSpace, this._fragmentOnly = !!e.fragmentOnly, this.disableLevelMultiplication = !!e.disableLevelMultiplication, e.texture && !Ta.IgnoreTexturesAtLoadTime && e.texture.url !== void 0 && (i = e.texture.url.indexOf("data:") === 0 ? "" : i, this.texture = De.Parse(e.texture, t, i)); } } Be("BABYLON.TextureBlock", dL); class fL extends Wi { /** * Gets or sets the texture associated with the node */ get texture() { return this._texture; } set texture(e) { var t; if (this._texture === e) return; const i = (t = e == null ? void 0 : e.getScene()) !== null && t !== void 0 ? t : gi.LastCreatedScene; !e && i && i.markAllMaterialsAsDirty(1, (r) => r.hasTexture(this._texture)), this._texture = e, e && i && i.markAllMaterialsAsDirty(1, (r) => r.hasTexture(e)); } static _OnGenerateOnlyFragmentCodeChanged(e, t) { return e._onGenerateOnlyFragmentCodeChanged(); } _onGenerateOnlyFragmentCodeChanged() { return this._setTarget(), !0; } _setTarget() { this._setInitialTarget(this.generateOnlyFragmentCode ? Le.Fragment : Le.VertexAndFragment); } /** * Create a new ReflectionTextureBaseBlock * @param name defines the block name */ constructor(e) { super(e, Le.VertexAndFragment), this.generateOnlyFragmentCode = !1; } /** * Gets the current class name * @returns the class name */ getClassName() { return "ReflectionTextureBaseBlock"; } _getTexture() { return this.texture; } autoConfigure(e, t = () => !0) { if (!this.position.isConnected) { let i = e.getInputBlockByPredicate((r) => r.isAttribute && r.name === "position" && t(r)); i || (i = new vs("position"), i.setAsAttribute()), i.output.connectTo(this.position); } if (!this.world.isConnected) { let i = e.getInputBlockByPredicate((r) => r.systemValue === Ms.World && t(r)); i || (i = new vs("world"), i.setAsSystemValue(Ms.World)), i.output.connectTo(this.world); } if (this.view && !this.view.isConnected) { let i = e.getInputBlockByPredicate((r) => r.systemValue === Ms.View && t(r)); i || (i = new vs("view"), i.setAsSystemValue(Ms.View)), i.output.connectTo(this.view); } } prepareDefines(e, t, i) { if (!i._areTexturesDirty) return; const r = this._getTexture(); !r || !r.getTextureMatrix || (i.setValue(this._define3DName, r.isCube, !0), i.setValue(this._defineLocalCubicName, !!r.boundingBoxSize, !0), i.setValue(this._defineExplicitName, r.coordinatesMode === 0, !0), i.setValue(this._defineSkyboxName, r.coordinatesMode === 5, !0), i.setValue(this._defineCubicName, r.coordinatesMode === 3 || r.coordinatesMode === 6, !0), i.setValue("INVERTCUBICMAP", r.coordinatesMode === 6, !0), i.setValue(this._defineSphericalName, r.coordinatesMode === 1, !0), i.setValue(this._definePlanarName, r.coordinatesMode === 2, !0), i.setValue(this._defineProjectionName, r.coordinatesMode === 4, !0), i.setValue(this._defineEquirectangularName, r.coordinatesMode === 7, !0), i.setValue(this._defineEquirectangularFixedName, r.coordinatesMode === 8, !0), i.setValue(this._defineMirroredEquirectangularFixedName, r.coordinatesMode === 9, !0)); } isReady() { const e = this._getTexture(); return !(e && !e.isReadyOrNotBlocking()); } bind(e, t, i) { const r = this._getTexture(); if (!(!i || !r) && (e.setMatrix(this._reflectionMatrixName, r.getReflectionTextureMatrix()), r.isCube ? e.setTexture(this._cubeSamplerName, r) : e.setTexture(this._2DSamplerName, r), r.boundingBoxSize)) { const s = r; e.setVector3(this._reflectionPositionName, s.boundingBoxPosition), e.setVector3(this._reflectionSizeName, s.boundingBoxSize); } } /** * Gets the code to inject in the vertex shader * @param state current state of the node material building * @returns the shader code */ handleVertexSide(e) { if (this.generateOnlyFragmentCode && e.target === Le.Vertex) return ""; this._define3DName = e._getFreeDefineName("REFLECTIONMAP_3D"), this._defineCubicName = e._getFreeDefineName("REFLECTIONMAP_CUBIC"), this._defineSphericalName = e._getFreeDefineName("REFLECTIONMAP_SPHERICAL"), this._definePlanarName = e._getFreeDefineName("REFLECTIONMAP_PLANAR"), this._defineProjectionName = e._getFreeDefineName("REFLECTIONMAP_PROJECTION"), this._defineExplicitName = e._getFreeDefineName("REFLECTIONMAP_EXPLICIT"), this._defineEquirectangularName = e._getFreeDefineName("REFLECTIONMAP_EQUIRECTANGULAR"), this._defineLocalCubicName = e._getFreeDefineName("USE_LOCAL_REFLECTIONMAP_CUBIC"), this._defineMirroredEquirectangularFixedName = e._getFreeDefineName("REFLECTIONMAP_MIRROREDEQUIRECTANGULAR_FIXED"), this._defineEquirectangularFixedName = e._getFreeDefineName("REFLECTIONMAP_EQUIRECTANGULAR_FIXED"), this._defineSkyboxName = e._getFreeDefineName("REFLECTIONMAP_SKYBOX"), this._defineOppositeZ = e._getFreeDefineName("REFLECTIONMAP_OPPOSITEZ"), this._reflectionMatrixName = e._getFreeVariableName("reflectionMatrix"), e._emitUniformFromString(this._reflectionMatrixName, "mat4"); let t = ""; this._worldPositionNameInFragmentOnlyMode = e._getFreeVariableName("worldPosition"); const i = this.generateOnlyFragmentCode ? this._worldPositionNameInFragmentOnlyMode : "v_" + this.worldPosition.associatedVariableName; return (this.generateOnlyFragmentCode || e._emitVaryingFromString(i, "vec4")) && (t += `${this.generateOnlyFragmentCode ? "vec4 " : ""}${i} = ${this.worldPosition.associatedVariableName}; `), this._positionUVWName = e._getFreeVariableName("positionUVW"), this._directionWName = e._getFreeVariableName("directionW"), (this.generateOnlyFragmentCode || e._emitVaryingFromString(this._positionUVWName, "vec3", this._defineSkyboxName)) && (t += `#ifdef ${this._defineSkyboxName} `, t += `${this.generateOnlyFragmentCode ? "vec3 " : ""}${this._positionUVWName} = ${this.position.associatedVariableName}.xyz; `, t += `#endif `), (this.generateOnlyFragmentCode || e._emitVaryingFromString(this._directionWName, "vec3", `defined(${this._defineEquirectangularFixedName}) || defined(${this._defineMirroredEquirectangularFixedName})`)) && (t += `#if defined(${this._defineEquirectangularFixedName}) || defined(${this._defineMirroredEquirectangularFixedName}) `, t += `${this.generateOnlyFragmentCode ? "vec3 " : ""}${this._directionWName} = normalize(vec3(${this.world.associatedVariableName} * vec4(${this.position.associatedVariableName}.xyz, 0.0))); `, t += `#endif `), t; } /** * Handles the inits for the fragment code path * @param state node material build state */ handleFragmentSideInits(e) { e.sharedData.blockingBlocks.push(this), e.sharedData.textureBlocks.push(this), this._cubeSamplerName = e._getFreeVariableName(this.name + "CubeSampler"), e.samplers.push(this._cubeSamplerName), this._2DSamplerName = e._getFreeVariableName(this.name + "2DSampler"), e.samplers.push(this._2DSamplerName), e._samplerDeclaration += `#ifdef ${this._define3DName} `, e._samplerDeclaration += `uniform samplerCube ${this._cubeSamplerName}; `, e._samplerDeclaration += `#else `, e._samplerDeclaration += `uniform sampler2D ${this._2DSamplerName}; `, e._samplerDeclaration += `#endif `, e.sharedData.blocksWithDefines.push(this), e.sharedData.bindableBlocks.push(this); const t = `//${this.name}`; e._emitFunctionFromInclude("helperFunctions", t), e._emitFunctionFromInclude("reflectionFunction", t, { replaceStrings: [{ search: /vec3 computeReflectionCoords/g, replace: "void DUMMYFUNC" }] }), this._reflectionColorName = e._getFreeVariableName("reflectionColor"), this._reflectionVectorName = e._getFreeVariableName("reflectionUVW"), this._reflectionCoordsName = e._getFreeVariableName("reflectionCoords"), this._reflectionPositionName = e._getFreeVariableName("vReflectionPosition"), e._emitUniformFromString(this._reflectionPositionName, "vec3"), this._reflectionSizeName = e._getFreeVariableName("vReflectionPosition"), e._emitUniformFromString(this._reflectionSizeName, "vec3"); } /** * Generates the reflection coords code for the fragment code path * @param worldNormalVarName name of the world normal variable * @param worldPos name of the world position variable. If not provided, will use the world position connected to this block * @param onlyReflectionVector if true, generates code only for the reflection vector computation, not for the reflection coordinates * @param doNotEmitInvertZ if true, does not emit the invertZ code * @returns the shader code */ handleFragmentSideCodeReflectionCoords(e, t, i = !1, r = !1) { t || (t = this.generateOnlyFragmentCode ? this._worldPositionNameInFragmentOnlyMode : `v_${this.worldPosition.associatedVariableName}`); const s = this._reflectionMatrixName, n = `normalize(${this._directionWName})`, a = `${this._positionUVWName}`, l = `${this.cameraPosition.associatedVariableName}`, o = `${this.view.associatedVariableName}`; e += ".xyz"; let u = ` #ifdef ${this._defineMirroredEquirectangularFixedName} vec3 ${this._reflectionVectorName} = computeMirroredFixedEquirectangularCoords(${t}, ${e}, ${n}); #endif #ifdef ${this._defineEquirectangularFixedName} vec3 ${this._reflectionVectorName} = computeFixedEquirectangularCoords(${t}, ${e}, ${n}); #endif #ifdef ${this._defineEquirectangularName} vec3 ${this._reflectionVectorName} = computeEquirectangularCoords(${t}, ${e}, ${l}.xyz, ${s}); #endif #ifdef ${this._defineSphericalName} vec3 ${this._reflectionVectorName} = computeSphericalCoords(${t}, ${e}, ${o}, ${s}); #endif #ifdef ${this._definePlanarName} vec3 ${this._reflectionVectorName} = computePlanarCoords(${t}, ${e}, ${l}.xyz, ${s}); #endif #ifdef ${this._defineCubicName} #ifdef ${this._defineLocalCubicName} vec3 ${this._reflectionVectorName} = computeCubicLocalCoords(${t}, ${e}, ${l}.xyz, ${s}, ${this._reflectionSizeName}, ${this._reflectionPositionName}); #else vec3 ${this._reflectionVectorName} = computeCubicCoords(${t}, ${e}, ${l}.xyz, ${s}); #endif #endif #ifdef ${this._defineProjectionName} vec3 ${this._reflectionVectorName} = computeProjectionCoords(${t}, ${o}, ${s}); #endif #ifdef ${this._defineSkyboxName} vec3 ${this._reflectionVectorName} = computeSkyBoxCoords(${a}, ${s}); #endif #ifdef ${this._defineExplicitName} vec3 ${this._reflectionVectorName} = vec3(0, 0, 0); #endif `; return r || (u += `#ifdef ${this._defineOppositeZ} ${this._reflectionVectorName}.z *= -1.0; #endif `), i || (u += ` #ifdef ${this._define3DName} vec3 ${this._reflectionCoordsName} = ${this._reflectionVectorName}; #else vec2 ${this._reflectionCoordsName} = ${this._reflectionVectorName}.xy; #ifdef ${this._defineProjectionName} ${this._reflectionCoordsName} /= ${this._reflectionVectorName}.z; #endif ${this._reflectionCoordsName}.y = 1.0 - ${this._reflectionCoordsName}.y; #endif `), u; } /** * Generates the reflection color code for the fragment code path * @param lodVarName name of the lod variable * @param swizzleLookupTexture swizzle to use for the final color variable * @returns the shader code */ handleFragmentSideCodeReflectionColor(e, t = ".rgb") { let r = `${"vec" + (t.length === 0 ? "4" : t.length - 1)} ${this._reflectionColorName}; #ifdef ${this._define3DName} `; return e ? r += `${this._reflectionColorName} = textureCubeLodEXT(${this._cubeSamplerName}, ${this._reflectionVectorName}, ${e})${t}; ` : r += `${this._reflectionColorName} = textureCube(${this._cubeSamplerName}, ${this._reflectionVectorName})${t}; `, r += ` #else `, e ? r += `${this._reflectionColorName} = texture2DLodEXT(${this._2DSamplerName}, ${this._reflectionCoordsName}, ${e})${t}; ` : r += `${this._reflectionColorName} = texture2D(${this._2DSamplerName}, ${this._reflectionCoordsName})${t}; `, r += `#endif `, r; } /** * Generates the code corresponding to the connected output points * @param state node material build state * @param varName name of the variable to output * @returns the shader code */ writeOutputs(e, t) { let i = ""; if (e.target === Le.Fragment) for (const r of this._outputs) r.hasEndpoints && (i += `${this._declareOutput(r, e)} = ${t}.${r.name}; `); return i; } _buildBlock(e) { return super._buildBlock(e), this; } _dumpPropertiesCode() { let e = super._dumpPropertiesCode(); if (!this.texture) return e; if (this.texture.isCube) { const t = this.texture.forcedExtension; e += `${this._codeVariableName}.texture = new BABYLON.CubeTexture("${this.texture.name}", undefined, undefined, ${this.texture.noMipmap}, null, undefined, undefined, undefined, ${this.texture._prefiltered}, ${t ? '"' + t + '"' : "null"}); `; } else e += `${this._codeVariableName}.texture = new BABYLON.Texture("${this.texture.name}", null); `; return e += `${this._codeVariableName}.texture.coordinatesMode = ${this.texture.coordinatesMode}; `, e; } serialize() { const e = super.serialize(); return this.texture && !this.texture.isRenderTarget && (e.texture = this.texture.serialize()), e.generateOnlyFragmentCode = this.generateOnlyFragmentCode, e; } _deserialize(e, t, i) { super._deserialize(e, t, i), e.texture && !Ta.IgnoreTexturesAtLoadTime && (i = e.texture.url.indexOf("data:") === 0 ? "" : i, e.texture.isCube ? this.texture = ul.Parse(e.texture, t, i) : this.texture = De.Parse(e.texture, t, i)), this.generateOnlyFragmentCode = e.generateOnlyFragmentCode, this._setTarget(); } } F([ ir("Generate only fragment code", $i.Boolean, "ADVANCED", { notifiers: { rebuild: !0, update: !0, onValidation: fL._OnGenerateOnlyFragmentCodeChanged } }) ], fL.prototype, "generateOnlyFragmentCode", void 0); Be("BABYLON.ReflectionTextureBaseBlock", fL); class kre extends fL { _onGenerateOnlyFragmentCodeChanged() { return this.position.isConnected ? (this.generateOnlyFragmentCode = !this.generateOnlyFragmentCode, Ce.Error("The position input must not be connected to be able to switch!"), !1) : this.worldPosition.isConnected ? (this.generateOnlyFragmentCode = !this.generateOnlyFragmentCode, Ce.Error("The worldPosition input must not be connected to be able to switch!"), !1) : (this._setTarget(), !0); } _setTarget() { super._setTarget(), this.getInputByName("position").target = this.generateOnlyFragmentCode ? Le.Fragment : Le.Vertex, this.getInputByName("worldPosition").target = this.generateOnlyFragmentCode ? Le.Fragment : Le.Vertex; } /** * Create a new ReflectionTextureBlock * @param name defines the block name */ constructor(e) { super(e), this.registerInput("position", ue.AutoDetect, !1, Le.Vertex), this.registerInput("worldPosition", ue.Vector4, !1, Le.Vertex), this.registerInput("worldNormal", ue.Vector4, !1, Le.Fragment), this.registerInput("world", ue.Matrix, !1, Le.Vertex), this.registerInput("cameraPosition", ue.Vector3, !1, Le.Fragment), this.registerInput("view", ue.Matrix, !1, Le.Fragment), this.registerOutput("rgb", ue.Color3, Le.Fragment), this.registerOutput("rgba", ue.Color4, Le.Fragment), this.registerOutput("r", ue.Float, Le.Fragment), this.registerOutput("g", ue.Float, Le.Fragment), this.registerOutput("b", ue.Float, Le.Fragment), this.registerOutput("a", ue.Float, Le.Fragment), this._inputs[0].addExcludedConnectionPointFromAllowedTypes(ue.Color3 | ue.Vector3 | ue.Vector4); } /** * Gets the current class name * @returns the class name */ getClassName() { return "ReflectionTextureBlock"; } /** * Gets the world position input component */ get position() { return this._inputs[0]; } /** * Gets the world position input component */ get worldPosition() { return this._inputs[1]; } /** * Gets the world normal input component */ get worldNormal() { return this._inputs[2]; } /** * Gets the world input component */ get world() { return this._inputs[3]; } /** * Gets the camera (or eye) position component */ get cameraPosition() { return this._inputs[4]; } /** * Gets the view input component */ get view() { return this._inputs[5]; } /** * Gets the rgb output component */ get rgb() { return this._outputs[0]; } /** * Gets the rgba output component */ get rgba() { return this._outputs[1]; } /** * Gets the r output component */ get r() { return this._outputs[2]; } /** * Gets the g output component */ get g() { return this._outputs[3]; } /** * Gets the b output component */ get b() { return this._outputs[4]; } /** * Gets the a output component */ get a() { return this._outputs[5]; } autoConfigure(e, t = () => !0) { if (super.autoConfigure(e), !this.cameraPosition.isConnected) { let i = e.getInputBlockByPredicate((r) => r.systemValue === Ms.CameraPosition && t(r)); i || (i = new vs("cameraPosition"), i.setAsSystemValue(Ms.CameraPosition)), i.output.connectTo(this.cameraPosition); } } _buildBlock(e) { if (super._buildBlock(e), !this.texture) return e.compilationString += this.writeOutputs(e, "vec4(0.)"), this; if (e.target !== Le.Fragment) return e.compilationString += this.handleVertexSide(e), this; this.generateOnlyFragmentCode && (e.compilationString += this.handleVertexSide(e)), this.handleFragmentSideInits(e); const t = e._getFreeVariableName("normalWUnit"); return e.compilationString += `vec4 ${t} = normalize(${this.worldNormal.associatedVariableName}); `, e.compilationString += this.handleFragmentSideCodeReflectionCoords(t), e.compilationString += this.handleFragmentSideCodeReflectionColor(void 0, ""), e.compilationString += this.writeOutputs(e, this._reflectionColorName), this; } } Be("BABYLON.ReflectionTextureBlock", kre); class vN extends Wi { /** * Create a new SceneDepthBlock * @param name defines the block name */ constructor(e) { super(e, Le.VertexAndFragment), this.useNonLinearDepth = !1, this.storeCameraSpaceZ = !1, this.force32itsFloat = !1, this._isUnique = !0, this.registerInput("uv", ue.AutoDetect, !1, Le.VertexAndFragment), this.registerOutput("depth", ue.Float, Le.Neutral), this._inputs[0].addExcludedConnectionPointFromAllowedTypes(ue.Vector2 | ue.Vector3 | ue.Vector4), this._inputs[0]._prioritizeVertex = !1; } /** * Gets the current class name * @returns the class name */ getClassName() { return "SceneDepthBlock"; } /** * Gets the uv input component */ get uv() { return this._inputs[0]; } /** * Gets the depth output component */ get depth() { return this._outputs[0]; } /** * Initialize the block and prepare the context for build * @param state defines the state that will be used for the build */ initialize(e) { e._excludeVariableName("textureSampler"); } get target() { return !this.uv.isConnected || this.uv.sourceBlock.isInput ? Le.VertexAndFragment : Le.Fragment; } _getTexture(e) { return e.enableDepthRenderer(void 0, this.useNonLinearDepth, this.force32itsFloat, void 0, this.storeCameraSpaceZ).getDepthMap(); } bind(e, t) { const i = this._getTexture(t.getScene()); e.setTexture(this._samplerName, i); } _injectVertexCode(e) { const t = this.uv; if (t.connectedPoint.ownerBlock.isInput && (t.connectedPoint.ownerBlock.isAttribute || e._emitUniformFromString(t.associatedVariableName, "vec" + (t.type === ue.Vector3 ? "3" : t.type === ue.Vector4 ? "4" : "2"))), this._mainUVName = "vMain" + t.associatedVariableName, e._emitVaryingFromString(this._mainUVName, "vec2"), e.compilationString += `${this._mainUVName} = ${t.associatedVariableName}.xy; `, !!this._outputs.some((i) => i.isConnectedInVertexShader)) { this._writeTextureRead(e, !0); for (const i of this._outputs) i.hasEndpoints && this._writeOutput(e, i, "r", !0); } } _writeTextureRead(e, t = !1) { const i = this.uv; if (t) { if (e.target === Le.Fragment) return; e.compilationString += `vec4 ${this._tempTextureRead} = texture2D(${this._samplerName}, ${i.associatedVariableName}.xy); `; return; } if (this.uv.ownerBlock.target === Le.Fragment) { e.compilationString += `vec4 ${this._tempTextureRead} = texture2D(${this._samplerName}, ${i.associatedVariableName}.xy); `; return; } e.compilationString += `vec4 ${this._tempTextureRead} = texture2D(${this._samplerName}, ${this._mainUVName}); `; } _writeOutput(e, t, i, r = !1) { if (r) { if (e.target === Le.Fragment) return; e.compilationString += `${this._declareOutput(t, e)} = ${this._tempTextureRead}.${i}; `; return; } if (this.uv.ownerBlock.target === Le.Fragment) { e.compilationString += `${this._declareOutput(t, e)} = ${this._tempTextureRead}.${i}; `; return; } e.compilationString += `${this._declareOutput(t, e)} = ${this._tempTextureRead}.${i}; `; } _buildBlock(e) { if (super._buildBlock(e), this._samplerName = e._getFreeVariableName(this.name + "Sampler"), this._tempTextureRead = e._getFreeVariableName("tempTextureRead"), e.sharedData.bindableBlocks.indexOf(this) < 0 && e.sharedData.bindableBlocks.push(this), e.target !== Le.Fragment) { e._emit2DSampler(this._samplerName), this._injectVertexCode(e); return; } if (this._outputs.some((t) => t.isConnectedInFragmentShader)) { e._emit2DSampler(this._samplerName), this._writeTextureRead(e); for (const t of this._outputs) t.hasEndpoints && this._writeOutput(e, t, "r"); return this; } } serialize() { const e = super.serialize(); return e.useNonLinearDepth = this.useNonLinearDepth, e.storeCameraSpaceZ = this.storeCameraSpaceZ, e.force32itsFloat = this.force32itsFloat, e; } _deserialize(e, t, i) { super._deserialize(e, t, i), this.useNonLinearDepth = e.useNonLinearDepth, this.storeCameraSpaceZ = !!e.storeCameraSpaceZ, this.force32itsFloat = e.force32itsFloat; } } F([ ir("Use non linear depth", $i.Boolean, "ADVANCED", { notifiers: { activatePreviewCommand: !0, callback: (c, e) => { const t = e; let i = !1; return t.useNonLinearDepth && (t.storeCameraSpaceZ = !1, i = !0), c && c.disableDepthRenderer(), i; } } }) ], vN.prototype, "useNonLinearDepth", void 0); F([ ir("Store Camera space Z", $i.Boolean, "ADVANCED", { notifiers: { activatePreviewCommand: !0, callback: (c, e) => { const t = e; let i = !1; return t.storeCameraSpaceZ && (t.useNonLinearDepth = !1, i = !0), c && c.disableDepthRenderer(), i; } } }) ], vN.prototype, "storeCameraSpaceZ", void 0); F([ ir("Force 32 bits float", $i.Boolean, "ADVANCED", { notifiers: { activatePreviewCommand: !0, callback: (c) => c == null ? void 0 : c.disableDepthRenderer() } }) ], vN.prototype, "force32itsFloat", void 0); Be("BABYLON.SceneDepthBlock", vN); class zre extends Wi { /** * Create a new ClipPlanesBlock * @param name defines the block name */ constructor(e) { super(e, Le.VertexAndFragment, !0), this.registerInput("worldPosition", ue.Vector4, !1); } /** * Gets the current class name * @returns the class name */ getClassName() { return "ClipPlanesBlock"; } /** * Initialize the block and prepare the context for build * @param state defines the state that will be used for the build */ initialize(e) { e._excludeVariableName("vClipPlane"), e._excludeVariableName("fClipDistance"), e._excludeVariableName("vClipPlane2"), e._excludeVariableName("fClipDistance2"), e._excludeVariableName("vClipPlane3"), e._excludeVariableName("fClipDistance3"), e._excludeVariableName("vClipPlane4"), e._excludeVariableName("fClipDistance4"), e._excludeVariableName("vClipPlane5"), e._excludeVariableName("fClipDistance5"), e._excludeVariableName("vClipPlane6"), e._excludeVariableName("fClipDistance6"); } /** * Gets the worldPosition input component */ get worldPosition() { return this._inputs[0]; } get target() { return Le.VertexAndFragment; } set target(e) { } prepareDefines(e, t, i) { var r, s, n, a, l, o; const u = e.getScene(), h = !!((r = t.clipPlane) !== null && r !== void 0 ? r : u.clipPlane), d = !!((s = t.clipPlane2) !== null && s !== void 0 ? s : u.clipPlane2), f = !!((n = t.clipPlane3) !== null && n !== void 0 ? n : u.clipPlane3), p = !!((a = t.clipPlane4) !== null && a !== void 0 ? a : u.clipPlane4), m = !!((l = t.clipPlane5) !== null && l !== void 0 ? l : u.clipPlane5), _ = !!((o = t.clipPlane6) !== null && o !== void 0 ? o : u.clipPlane6); i.setValue("CLIPPLANE", h, !0), i.setValue("CLIPPLANE2", d, !0), i.setValue("CLIPPLANE3", f, !0), i.setValue("CLIPPLANE4", p, !0), i.setValue("CLIPPLANE5", m, !0), i.setValue("CLIPPLANE6", _, !0); } bind(e, t, i) { if (!i) return; const r = i.getScene(); Ec(e, t, r); } _buildBlock(e) { super._buildBlock(e); const t = `//${this.name}`; if (e.target !== Le.Fragment) { const i = this.worldPosition; e._emitFunctionFromInclude("clipPlaneVertexDeclaration", t, { replaceStrings: [{ search: /uniform vec4 vClipPlane\d*;/g, replace: "" }] }), e.compilationString += e._emitCodeFromInclude("clipPlaneVertex", t, { replaceStrings: [{ search: /worldPos/g, replace: i.associatedVariableName }] }), e._emitUniformFromString("vClipPlane", "vec4"), e._emitUniformFromString("vClipPlane2", "vec4"), e._emitUniformFromString("vClipPlane3", "vec4"), e._emitUniformFromString("vClipPlane4", "vec4"), e._emitUniformFromString("vClipPlane5", "vec4"), e._emitUniformFromString("vClipPlane6", "vec4"); return; } return e.sharedData.bindableBlocks.push(this), e.sharedData.blocksWithDefines.push(this), e._emitFunctionFromInclude("clipPlaneFragmentDeclaration", t), e.compilationString += e._emitCodeFromInclude("clipPlaneFragment", t), this; } } Be("BABYLON.ClipPlanesBlock", zre); class Hre extends Wi { /** * The texture associated with the node is the prepass texture */ get texture() { return null; } set texture(e) { } /** * Creates a new PrePassTextureBlock * @param name defines the block name * @param target defines the target of that block (VertexAndFragment by default) */ constructor(e, t = Le.VertexAndFragment) { super(e, t, !1), this.registerOutput("position", ue.Object, Le.VertexAndFragment, new Yo("position", this, no.Output, U4, "ImageSourceBlock")), this.registerOutput("depth", ue.Object, Le.VertexAndFragment, new Yo("depth", this, no.Output, U4, "ImageSourceBlock")), this.registerOutput("normal", ue.Object, Le.VertexAndFragment, new Yo("normal", this, no.Output, U4, "ImageSourceBlock")); } /** * Returns the sampler name associated with the node connection point * @param output * @returns */ getSamplerName(e) { return e === this._outputs[0] ? this._positionSamplerName : e === this._outputs[1] ? this._depthSamplerName : e === this._outputs[2] ? this._normalSamplerName : ""; } /** * Gets the position texture */ get position() { return this._outputs[0]; } /** * Gets the depth texture */ get depth() { return this._outputs[1]; } /** * Gets the normal texture */ get normal() { return this._outputs[2]; } /** * Gets the sampler name associated with this image source */ get positionSamplerName() { return this._positionSamplerName; } /** * Gets the sampler name associated with this image source */ get normalSamplerName() { return this._normalSamplerName; } /** * Gets the sampler name associated with this image source */ get depthSamplerName() { return this._depthSamplerName; } /** * Gets the current class name * @returns the class name */ getClassName() { return "PrePassTextureBlock"; } _buildBlock(e) { if (super._buildBlock(e), e.target !== Le.Vertex) return this._positionSamplerName = "prepassPositionSampler", this._depthSamplerName = "prepassDepthSampler", this._normalSamplerName = "prepassNormalSampler", e.sharedData.variableNames.prepassPositionSampler = 0, e.sharedData.variableNames.prepassDepthSampler = 0, e.sharedData.variableNames.prepassNormalSampler = 0, e.sharedData.textureBlocks.push(this), e.sharedData.bindableBlocks.push(this), e._emit2DSampler(this._positionSamplerName), e._emit2DSampler(this._depthSamplerName), e._emit2DSampler(this._normalSamplerName), this; } bind(e, t) { const r = t.getScene().enablePrePassRenderer(); if (!r) return; const s = r.defaultRT; s.textures && (this.position.isConnected && e.setTexture(this._positionSamplerName, s.textures[r.getIndex(1)]), this.depth.isConnected && e.setTexture(this._depthSamplerName, s.textures[r.getIndex(5)]), this.normal.isConnected && e.setTexture(this._normalSamplerName, s.textures[r.getIndex(6)])); } } Be("BABYLON.PrePassTextureBlock", Hre); class Gre extends Wi { /** Gets the list of attached endpoints */ get endpoints() { return this._endpoints; } /** * Gets or sets the target of the block */ get target() { const e = this._inputs[0]; if (e.isConnected) { const t = e.connectedPoint.ownerBlock; if (t.target !== Le.VertexAndFragment) return t.target; if (e.connectedPoint.target !== Le.VertexAndFragment) return e.connectedPoint.target; } return this._target; } set target(e) { this._target & e || (this._target = e); } /** * Create a new NodeMaterialTeleportInBlock * @param name defines the block name */ constructor(e) { super(e, Le.Neutral), this._endpoints = [], this.registerInput("input", ue.AutoDetect); } /** * Gets the current class name * @returns the class name */ getClassName() { return "NodeMaterialTeleportInBlock"; } /** * Gets the input component */ get input() { return this._inputs[0]; } /** Gets a boolean indicating that this connection will be used in the fragment shader */ isConnectedInFragmentShader() { return this.endpoints.some((e) => e.output.isConnectedInFragmentShader); } _dumpCode(e, t) { let i = super._dumpCode(e, t); for (const r of this.endpoints) t.indexOf(r) === -1 && (i += r._dumpCode(e, t)); return i; } /** * Checks if the current block is an ancestor of a given block * @param block defines the potential descendant block to check * @returns true if block is a descendant */ isAnAncestorOf(e) { for (const t of this.endpoints) if (t === e || t.isAnAncestorOf(e)) return !0; return !1; } /** * Add an enpoint to this block * @param endpoint define the endpoint to attach to */ attachToEndpoint(e) { e.detach(), this._endpoints.push(e), e._entryPoint = this, e._outputs[0]._typeConnectionSource = this._inputs[0], e._tempEntryPointUniqueId = null, e.name = "> " + this.name; } /** * Remove enpoint from this block * @param endpoint define the endpoint to remove */ detachFromEndpoint(e) { const t = this._endpoints.indexOf(e); t !== -1 && (this._endpoints.splice(t, 1), e._outputs[0]._typeConnectionSource = null, e._entryPoint = null); } /** * Release resources */ dispose() { super.dispose(); for (const e of this._endpoints) this.detachFromEndpoint(e); this._endpoints = []; } } Be("BABYLON.NodeMaterialTeleportInBlock", Gre); class Kre extends Wi { /** * Create a new TeleportOutBlock * @param name defines the block name */ constructor(e) { super(e, Le.Neutral), this._entryPoint = null, this._tempEntryPointUniqueId = null, this.registerOutput("output", ue.BasedOnInput); } /** * Gets the entry point */ get entryPoint() { return this._entryPoint; } /** * Gets the current class name * @returns the class name */ getClassName() { return "NodeMaterialTeleportOutBlock"; } /** * Gets the output component */ get output() { return this._outputs[0]; } /** * Gets or sets the target of the block */ get target() { return this._entryPoint ? this._entryPoint.target : this._target; } set target(e) { this._target & e || (this._target = e); } /** Detach from entry point */ detach() { this._entryPoint && this._entryPoint.detachFromEndpoint(this); } _buildBlock(e) { super._buildBlock(e), this.entryPoint && (e.compilationString += this._declareOutput(this.output, e) + ` = ${this.entryPoint.input.associatedVariableName}; `); } /** * Clone the current block to a new identical block * @param scene defines the hosting scene * @param rootUrl defines the root URL to use to load textures and relative dependencies * @returns a copy of the current block */ clone(e, t = "") { const i = super.clone(e, t); return this.entryPoint && this.entryPoint.attachToEndpoint(i), i; } _customBuildStep(e, t) { this.entryPoint && this.entryPoint.build(e, t); } _dumpCode(e, t) { let i = ""; return this.entryPoint && t.indexOf(this.entryPoint) === -1 && (i += this.entryPoint._dumpCode(e, t)), i + super._dumpCode(e, t); } _dumpCodeForOutputConnections(e) { let t = super._dumpCodeForOutputConnections(e); return this.entryPoint && (t += this.entryPoint._dumpCodeForOutputConnections(e)), t; } _dumpPropertiesCode() { let e = super._dumpPropertiesCode(); return this.entryPoint && (e += `${this.entryPoint._codeVariableName}.attachToEndpoint(${this._codeVariableName}); `), e; } /** * Serializes this block in a JSON representation * @returns the serialized block object */ serialize() { var e, t; const i = super.serialize(); return i.entryPoint = (t = (e = this.entryPoint) === null || e === void 0 ? void 0 : e.uniqueId) !== null && t !== void 0 ? t : "", i; } _deserialize(e, t, i) { super._deserialize(e, t, i), this._tempEntryPointUniqueId = e.entryPoint; } } Be("BABYLON.NodeMaterialTeleportOutBlock", Kre); class Wre extends Wi { /** * Creates a new AddBlock * @param name defines the block name */ constructor(e) { super(e, Le.Neutral), this.registerInput("left", ue.AutoDetect), this.registerInput("right", ue.AutoDetect), this.registerOutput("output", ue.BasedOnInput), this._outputs[0]._typeConnectionSource = this._inputs[0], this._linkConnectionTypes(0, 1), this._inputs[0].acceptedConnectionPointTypes.push(ue.Float), this._inputs[1].acceptedConnectionPointTypes.push(ue.Float); } /** * Gets the current class name * @returns the class name */ getClassName() { return "AddBlock"; } /** * Gets the left operand input component */ get left() { return this._inputs[0]; } /** * Gets the right operand input component */ get right() { return this._inputs[1]; } /** * Gets the output component */ get output() { return this._outputs[0]; } _buildBlock(e) { super._buildBlock(e); const t = this._outputs[0]; return e.compilationString += this._declareOutput(t, e) + ` = ${this.left.associatedVariableName} + ${this.right.associatedVariableName}; `, this; } } Be("BABYLON.AddBlock", Wre); class jre extends Wi { /** * Creates a new ScaleBlock * @param name defines the block name */ constructor(e) { super(e, Le.Neutral), this.registerInput("input", ue.AutoDetect), this.registerInput("factor", ue.Float), this.registerOutput("output", ue.BasedOnInput), this._outputs[0]._typeConnectionSource = this._inputs[0]; } /** * Gets the current class name * @returns the class name */ getClassName() { return "ScaleBlock"; } /** * Gets the input component */ get input() { return this._inputs[0]; } /** * Gets the factor input component */ get factor() { return this._inputs[1]; } /** * Gets the output component */ get output() { return this._outputs[0]; } _buildBlock(e) { super._buildBlock(e); const t = this._outputs[0]; return e.compilationString += this._declareOutput(t, e) + ` = ${this.input.associatedVariableName} * ${this.factor.associatedVariableName}; `, this; } } Be("BABYLON.ScaleBlock", jre); class NU extends Wi { /** * Creates a new ClampBlock * @param name defines the block name */ constructor(e) { super(e, Le.Neutral), this.minimum = 0, this.maximum = 1, this.registerInput("value", ue.AutoDetect), this.registerOutput("output", ue.BasedOnInput), this._outputs[0]._typeConnectionSource = this._inputs[0]; } /** * Gets the current class name * @returns the class name */ getClassName() { return "ClampBlock"; } /** * Gets the value input component */ get value() { return this._inputs[0]; } /** * Gets the output component */ get output() { return this._outputs[0]; } _buildBlock(e) { super._buildBlock(e); const t = this._outputs[0]; return e.compilationString += this._declareOutput(t, e) + ` = clamp(${this.value.associatedVariableName}, ${this._writeFloat(this.minimum)}, ${this._writeFloat(this.maximum)}); `, this; } _dumpPropertiesCode() { let e = super._dumpPropertiesCode() + `${this._codeVariableName}.minimum = ${this.minimum}; `; return e += `${this._codeVariableName}.maximum = ${this.maximum}; `, e; } serialize() { const e = super.serialize(); return e.minimum = this.minimum, e.maximum = this.maximum, e; } _deserialize(e, t, i) { super._deserialize(e, t, i), this.minimum = e.minimum, this.maximum = e.maximum; } } F([ ir("Minimum", $i.Float) ], NU.prototype, "minimum", void 0); F([ ir("Maximum", $i.Float) ], NU.prototype, "maximum", void 0); Be("BABYLON.ClampBlock", NU); class Xre extends Wi { /** * Creates a new CrossBlock * @param name defines the block name */ constructor(e) { super(e, Le.Neutral), this.registerInput("left", ue.AutoDetect), this.registerInput("right", ue.AutoDetect), this.registerOutput("output", ue.Vector3), this._linkConnectionTypes(0, 1), this._inputs[0].excludedConnectionPointTypes.push(ue.Float), this._inputs[0].excludedConnectionPointTypes.push(ue.Matrix), this._inputs[0].excludedConnectionPointTypes.push(ue.Vector2), this._inputs[1].excludedConnectionPointTypes.push(ue.Float), this._inputs[1].excludedConnectionPointTypes.push(ue.Matrix), this._inputs[1].excludedConnectionPointTypes.push(ue.Vector2); } /** * Gets the current class name * @returns the class name */ getClassName() { return "CrossBlock"; } /** * Gets the left operand input component */ get left() { return this._inputs[0]; } /** * Gets the right operand input component */ get right() { return this._inputs[1]; } /** * Gets the output component */ get output() { return this._outputs[0]; } _buildBlock(e) { super._buildBlock(e); const t = this._outputs[0]; return e.compilationString += this._declareOutput(t, e) + ` = cross(${this.left.associatedVariableName}.xyz, ${this.right.associatedVariableName}.xyz); `, this; } } Be("BABYLON.CrossBlock", Xre); class Yre extends Wi { /** * Gets or sets the options for this custom block */ get options() { return this._options; } set options(e) { this._deserializeOptions(e); } /** * Creates a new CustomBlock * @param name defines the block name */ constructor(e) { super(e); } /** * Gets the current class name * @returns the class name */ getClassName() { return "CustomBlock"; } _buildBlock(e) { super._buildBlock(e); let t = this._code, i = this._options.functionName; this._inputs.forEach((s) => { const n = new RegExp("\\{TYPE_" + s.name + "\\}", "gm"), a = e._getGLType(s.type); t = t.replace(n, a), i = i.replace(n, a); }), this._outputs.forEach((s) => { const n = new RegExp("\\{TYPE_" + s.name + "\\}", "gm"), a = e._getGLType(s.type); t = t.replace(n, a), i = i.replace(n, a); }), e._emitFunction(i, t, ""), this._outputs.forEach((s) => { e.compilationString += this._declareOutput(s, e) + `; `; }), e.compilationString += i + "("; let r = !1; return this._inputs.forEach((s, n) => { var a, l, o; n > 0 && (e.compilationString += ", "), this._inputSamplers && this._inputSamplers.indexOf(s.name) !== -1 ? e.compilationString += (o = (l = (a = s.connectedPoint) === null || a === void 0 ? void 0 : a.ownerBlock) === null || l === void 0 ? void 0 : l.samplerName) !== null && o !== void 0 ? o : s.associatedVariableName : e.compilationString += s.associatedVariableName, r = !0; }), this._outputs.forEach((s, n) => { (n > 0 || r) && (e.compilationString += ", "), e.compilationString += s.associatedVariableName; }), e.compilationString += `); `, this; } _dumpPropertiesCode() { let e = super._dumpPropertiesCode(); return e += `${this._codeVariableName}.options = ${JSON.stringify(this._options)}; `, e; } serialize() { const e = super.serialize(); return e.options = this._options, e; } _deserialize(e, t, i) { this._deserializeOptions(e.options), super._deserialize(e, t, i); } _deserializeOptions(e) { var t, i, r; this._options = e, this._code = e.code.join(` `) + ` `, this.name = this.name || e.name, this.target = Le[e.target], (t = e.inParameters) === null || t === void 0 || t.forEach((s, n) => { const a = ue[s.type]; s.type === "sampler2D" || s.type === "samplerCube" ? (this._inputSamplers = this._inputSamplers || [], this._inputSamplers.push(s.name), this.registerInput(s.name, ue.Object, !0, Le.VertexAndFragment, new Yo(s.name, this, no.Input, U4, "ImageSourceBlock"))) : this.registerInput(s.name, a), Object.defineProperty(this, s.name, { get: function() { return this._inputs[n]; }, enumerable: !0, configurable: !0 }); }), (i = e.outParameters) === null || i === void 0 || i.forEach((s, n) => { this.registerOutput(s.name, ue[s.type]), Object.defineProperty(this, s.name, { get: function() { return this._outputs[n]; }, enumerable: !0, configurable: !0 }), s.type === "BasedOnInput" && (this._outputs[n]._typeConnectionSource = this._findInputByName(s.typeFromInput)[0]); }), (r = e.inLinkedConnectionTypes) === null || r === void 0 || r.forEach((s) => { this._linkConnectionTypes(this._findInputByName(s.input1)[1], this._findInputByName(s.input2)[1]); }); } _findInputByName(e) { if (!e) return null; for (let t = 0; t < this._inputs.length; t++) if (this._inputs[t].name === e) return [this._inputs[t], t]; return null; } } Be("BABYLON.CustomBlock", Yre); class Qre extends Wi { /** * Creates a new DotBlock * @param name defines the block name */ constructor(e) { super(e, Le.Neutral), this.registerInput("left", ue.AutoDetect), this.registerInput("right", ue.AutoDetect), this.registerOutput("output", ue.Float), this._linkConnectionTypes(0, 1), this._inputs[0].excludedConnectionPointTypes.push(ue.Float), this._inputs[0].excludedConnectionPointTypes.push(ue.Matrix), this._inputs[1].excludedConnectionPointTypes.push(ue.Float), this._inputs[1].excludedConnectionPointTypes.push(ue.Matrix); } /** * Gets the current class name * @returns the class name */ getClassName() { return "DotBlock"; } /** * Gets the left operand input component */ get left() { return this._inputs[0]; } /** * Gets the right operand input component */ get right() { return this._inputs[1]; } /** * Gets the output component */ get output() { return this._outputs[0]; } _buildBlock(e) { super._buildBlock(e); const t = this._outputs[0]; return e.compilationString += this._declareOutput(t, e) + ` = dot(${this.left.associatedVariableName}, ${this.right.associatedVariableName}); `, this; } } Be("BABYLON.DotBlock", Qre); class $re extends Wi { /** * Creates a new NormalizeBlock * @param name defines the block name */ constructor(e) { super(e, Le.Neutral), this.registerInput("input", ue.AutoDetect), this.registerOutput("output", ue.BasedOnInput), this._outputs[0]._typeConnectionSource = this._inputs[0], this._inputs[0].excludedConnectionPointTypes.push(ue.Float), this._inputs[0].excludedConnectionPointTypes.push(ue.Matrix); } /** * Gets the current class name * @returns the class name */ getClassName() { return "NormalizeBlock"; } /** * Gets the input component */ get input() { return this._inputs[0]; } /** * Gets the output component */ get output() { return this._outputs[0]; } _buildBlock(e) { super._buildBlock(e); const t = this._outputs[0], i = this._inputs[0]; return e.compilationString += this._declareOutput(t, e) + ` = normalize(${i.associatedVariableName}); `, this; } } Be("BABYLON.NormalizeBlock", $re); class Zre extends Wi { /** * Create a new ColorMergerBlock * @param name defines the block name */ constructor(e) { super(e, Le.Neutral), this.rSwizzle = "r", this.gSwizzle = "g", this.bSwizzle = "b", this.aSwizzle = "a", this.registerInput("rgb ", ue.Color3, !0), this.registerInput("r", ue.Float, !0), this.registerInput("g", ue.Float, !0), this.registerInput("b", ue.Float, !0), this.registerInput("a", ue.Float, !0), this.registerOutput("rgba", ue.Color4), this.registerOutput("rgb", ue.Color3); } /** * Gets the current class name * @returns the class name */ getClassName() { return "ColorMergerBlock"; } /** * Gets the rgb component (input) */ get rgbIn() { return this._inputs[0]; } /** * Gets the r component (input) */ get r() { return this._inputs[1]; } /** * Gets the g component (input) */ get g() { return this._inputs[2]; } /** * Gets the b component (input) */ get b() { return this._inputs[3]; } /** * Gets the a component (input) */ get a() { return this._inputs[4]; } /** * Gets the rgba component (output) */ get rgba() { return this._outputs[0]; } /** * Gets the rgb component (output) */ get rgbOut() { return this._outputs[1]; } /** * Gets the rgb component (output) * @deprecated Please use rgbOut instead. */ get rgb() { return this.rgbOut; } _inputRename(e) { return e === "rgb " ? "rgbIn" : e; } _buildSwizzle(e) { return "." + (this.rSwizzle + this.gSwizzle + this.bSwizzle + this.aSwizzle).substr(0, e); } _buildBlock(e) { super._buildBlock(e); const t = this.r, i = this.g, r = this.b, s = this.a, n = this.rgbIn, a = this._outputs[0], l = this._outputs[1]; return n.isConnected ? (a.hasEndpoints && (e.compilationString += this._declareOutput(a, e) + ` = vec4(${n.associatedVariableName}, ${s.isConnected ? this._writeVariable(s) : "0.0"})${this._buildSwizzle(4)}; `), l.hasEndpoints && (e.compilationString += this._declareOutput(l, e) + ` = ${n.associatedVariableName}${this._buildSwizzle(3)}; `)) : (a.hasEndpoints && (e.compilationString += this._declareOutput(a, e) + ` = vec4(${t.isConnected ? this._writeVariable(t) : "0.0"}, ${i.isConnected ? this._writeVariable(i) : "0.0"}, ${r.isConnected ? this._writeVariable(r) : "0.0"}, ${s.isConnected ? this._writeVariable(s) : "0.0"})${this._buildSwizzle(4)}; `), l.hasEndpoints && (e.compilationString += this._declareOutput(l, e) + ` = vec3(${t.isConnected ? this._writeVariable(t) : "0.0"}, ${i.isConnected ? this._writeVariable(i) : "0.0"}, ${r.isConnected ? this._writeVariable(r) : "0.0"})${this._buildSwizzle(3)}; `)), this; } serialize() { const e = super.serialize(); return e.rSwizzle = this.rSwizzle, e.gSwizzle = this.gSwizzle, e.bSwizzle = this.bSwizzle, e.aSwizzle = this.aSwizzle, e; } _deserialize(e, t, i) { var r, s, n, a; super._deserialize(e, t, i), this.rSwizzle = (r = e.rSwizzle) !== null && r !== void 0 ? r : "r", this.gSwizzle = (s = e.gSwizzle) !== null && s !== void 0 ? s : "g", this.bSwizzle = (n = e.bSwizzle) !== null && n !== void 0 ? n : "b", this.aSwizzle = (a = e.aSwizzle) !== null && a !== void 0 ? a : "a"; } _dumpPropertiesCode() { let e = super._dumpPropertiesCode(); return e += `${this._codeVariableName}.rSwizzle = "${this.rSwizzle}"; `, e += `${this._codeVariableName}.gSwizzle = "${this.gSwizzle}"; `, e += `${this._codeVariableName}.bSwizzle = "${this.bSwizzle}"; `, e += `${this._codeVariableName}.aSwizzle = "${this.aSwizzle}"; `, e; } } Be("BABYLON.ColorMergerBlock", Zre); class qre extends Wi { /** * Create a new VectorSplitterBlock * @param name defines the block name */ constructor(e) { super(e, Le.Neutral), this.registerInput("xyzw", ue.Vector4, !0), this.registerInput("xyz ", ue.Vector3, !0), this.registerInput("xy ", ue.Vector2, !0), this.registerOutput("xyz", ue.Vector3), this.registerOutput("xy", ue.Vector2), this.registerOutput("zw", ue.Vector2), this.registerOutput("x", ue.Float), this.registerOutput("y", ue.Float), this.registerOutput("z", ue.Float), this.registerOutput("w", ue.Float), this.inputsAreExclusive = !0; } /** * Gets the current class name * @returns the class name */ getClassName() { return "VectorSplitterBlock"; } /** * Gets the xyzw component (input) */ get xyzw() { return this._inputs[0]; } /** * Gets the xyz component (input) */ get xyzIn() { return this._inputs[1]; } /** * Gets the xy component (input) */ get xyIn() { return this._inputs[2]; } /** * Gets the xyz component (output) */ get xyzOut() { return this._outputs[0]; } /** * Gets the xy component (output) */ get xyOut() { return this._outputs[1]; } /** * Gets the zw component (output) */ get zw() { return this._outputs[2]; } /** * Gets the x component (output) */ get x() { return this._outputs[3]; } /** * Gets the y component (output) */ get y() { return this._outputs[4]; } /** * Gets the z component (output) */ get z() { return this._outputs[5]; } /** * Gets the w component (output) */ get w() { return this._outputs[6]; } _inputRename(e) { switch (e) { case "xy ": return "xyIn"; case "xyz ": return "xyzIn"; default: return e; } } _outputRename(e) { switch (e) { case "xy": return "xyOut"; case "xyz": return "xyzOut"; default: return e; } } _buildBlock(e) { super._buildBlock(e); const t = this.xyzw.isConnected ? this.xyzw : this.xyzIn.isConnected ? this.xyzIn : this.xyIn, i = this._outputs[0], r = this._outputs[1], s = this._outputs[2], n = this._outputs[3], a = this._outputs[4], l = this._outputs[5], o = this._outputs[6]; return i.hasEndpoints && (t === this.xyIn ? e.compilationString += this._declareOutput(i, e) + ` = vec3(${t.associatedVariableName}, 0.0); ` : e.compilationString += this._declareOutput(i, e) + ` = ${t.associatedVariableName}.xyz; `), s.hasEndpoints && this.xyzw.isConnected && (e.compilationString += this._declareOutput(s, e) + ` = ${this.xyzw.associatedVariableName}.zw; `), r.hasEndpoints && (e.compilationString += this._declareOutput(r, e) + ` = ${t.associatedVariableName}.xy; `), n.hasEndpoints && (e.compilationString += this._declareOutput(n, e) + ` = ${t.associatedVariableName}.x; `), a.hasEndpoints && (e.compilationString += this._declareOutput(a, e) + ` = ${t.associatedVariableName}.y; `), l.hasEndpoints && (e.compilationString += this._declareOutput(l, e) + ` = ${t.associatedVariableName}.z; `), o.hasEndpoints && (e.compilationString += this._declareOutput(o, e) + ` = ${t.associatedVariableName}.w; `), this; } } Be("BABYLON.VectorSplitterBlock", qre); class Jre extends Wi { /** * Creates a new LerpBlock * @param name defines the block name */ constructor(e) { super(e, Le.Neutral), this.registerInput("left", ue.AutoDetect), this.registerInput("right", ue.AutoDetect), this.registerInput("gradient", ue.AutoDetect), this.registerOutput("output", ue.BasedOnInput), this._outputs[0]._typeConnectionSource = this._inputs[0], this._linkConnectionTypes(0, 1), this._linkConnectionTypes(1, 2, !0), this._inputs[2].acceptedConnectionPointTypes.push(ue.Float); } /** * Gets the current class name * @returns the class name */ getClassName() { return "LerpBlock"; } /** * Gets the left operand input component */ get left() { return this._inputs[0]; } /** * Gets the right operand input component */ get right() { return this._inputs[1]; } /** * Gets the gradient operand input component */ get gradient() { return this._inputs[2]; } /** * Gets the output component */ get output() { return this._outputs[0]; } _buildBlock(e) { super._buildBlock(e); const t = this._outputs[0]; return e.compilationString += this._declareOutput(t, e) + ` = mix(${this.left.associatedVariableName} , ${this.right.associatedVariableName}, ${this.gradient.associatedVariableName}); `, this; } } Be("BABYLON.LerpBlock", Jre); class ese extends Wi { /** * Creates a new DivideBlock * @param name defines the block name */ constructor(e) { super(e, Le.Neutral), this.registerInput("left", ue.AutoDetect), this.registerInput("right", ue.AutoDetect), this.registerOutput("output", ue.BasedOnInput), this._outputs[0]._typeConnectionSource = this._inputs[0], this._linkConnectionTypes(0, 1), this._inputs[0].acceptedConnectionPointTypes.push(ue.Float), this._inputs[1].acceptedConnectionPointTypes.push(ue.Float); } /** * Gets the current class name * @returns the class name */ getClassName() { return "DivideBlock"; } /** * Gets the left operand input component */ get left() { return this._inputs[0]; } /** * Gets the right operand input component */ get right() { return this._inputs[1]; } /** * Gets the output component */ get output() { return this._outputs[0]; } _buildBlock(e) { super._buildBlock(e); const t = this._outputs[0]; return e.compilationString += this._declareOutput(t, e) + ` = ${this.left.associatedVariableName} / ${this.right.associatedVariableName}; `, this; } } Be("BABYLON.DivideBlock", ese); class tse extends Wi { /** * Creates a new SubtractBlock * @param name defines the block name */ constructor(e) { super(e, Le.Neutral), this.registerInput("left", ue.AutoDetect), this.registerInput("right", ue.AutoDetect), this.registerOutput("output", ue.BasedOnInput), this._outputs[0]._typeConnectionSource = this._inputs[0], this._linkConnectionTypes(0, 1), this._inputs[0].acceptedConnectionPointTypes.push(ue.Float), this._inputs[1].acceptedConnectionPointTypes.push(ue.Float); } /** * Gets the current class name * @returns the class name */ getClassName() { return "SubtractBlock"; } /** * Gets the left operand input component */ get left() { return this._inputs[0]; } /** * Gets the right operand input component */ get right() { return this._inputs[1]; } /** * Gets the output component */ get output() { return this._outputs[0]; } _buildBlock(e) { super._buildBlock(e); const t = this._outputs[0]; return e.compilationString += this._declareOutput(t, e) + ` = ${this.left.associatedVariableName} - ${this.right.associatedVariableName}; `, this; } } Be("BABYLON.SubtractBlock", tse); class ise extends Wi { /** * Creates a new StepBlock * @param name defines the block name */ constructor(e) { super(e, Le.Neutral), this.registerInput("value", ue.Float), this.registerInput("edge", ue.Float), this.registerOutput("output", ue.Float); } /** * Gets the current class name * @returns the class name */ getClassName() { return "StepBlock"; } /** * Gets the value operand input component */ get value() { return this._inputs[0]; } /** * Gets the edge operand input component */ get edge() { return this._inputs[1]; } /** * Gets the output component */ get output() { return this._outputs[0]; } _buildBlock(e) { super._buildBlock(e); const t = this._outputs[0]; return e.compilationString += this._declareOutput(t, e) + ` = step(${this.edge.associatedVariableName}, ${this.value.associatedVariableName}); `, this; } } Be("BABYLON.StepBlock", ise); class yW extends Wi { /** * Creates a new OneMinusBlock * @param name defines the block name */ constructor(e) { super(e, Le.Neutral), this.registerInput("input", ue.AutoDetect), this.registerOutput("output", ue.BasedOnInput), this._outputs[0]._typeConnectionSource = this._inputs[0], this._outputs[0].excludedConnectionPointTypes.push(ue.Matrix); } /** * Gets the current class name * @returns the class name */ getClassName() { return "OneMinusBlock"; } /** * Gets the input component */ get input() { return this._inputs[0]; } /** * Gets the output component */ get output() { return this._outputs[0]; } _buildBlock(e) { super._buildBlock(e); const t = this._outputs[0]; return e.compilationString += this._declareOutput(t, e) + ` = 1. - ${this.input.associatedVariableName}; `, this; } } Be("BABYLON.OneMinusBlock", yW); Be("BABYLON.OppositeBlock", yW); class CW extends Wi { /** * Creates a new ViewDirectionBlock * @param name defines the block name */ constructor(e) { super(e, Le.Neutral), this.registerInput("worldPosition", ue.Vector4), this.registerInput("cameraPosition", ue.Vector3), this.registerOutput("output", ue.Vector3); } /** * Gets the current class name * @returns the class name */ getClassName() { return "ViewDirectionBlock"; } /** * Gets the world position component */ get worldPosition() { return this._inputs[0]; } /** * Gets the camera position component */ get cameraPosition() { return this._inputs[1]; } /** * Gets the output component */ get output() { return this._outputs[0]; } autoConfigure(e, t = () => !0) { if (!this.cameraPosition.isConnected) { let i = e.getInputBlockByPredicate((r) => r.systemValue === Ms.CameraPosition && t(r)); i || (i = new vs("cameraPosition"), i.setAsSystemValue(Ms.CameraPosition)), i.output.connectTo(this.cameraPosition); } } _buildBlock(e) { super._buildBlock(e); const t = this._outputs[0]; return e.compilationString += this._declareOutput(t, e) + ` = normalize(${this.cameraPosition.associatedVariableName} - ${this.worldPosition.associatedVariableName}.xyz); `, this; } } Be("BABYLON.ViewDirectionBlock", CW); class rse extends Wi { /** * Create a new FresnelBlock * @param name defines the block name */ constructor(e) { super(e, Le.Neutral), this.registerInput("worldNormal", ue.Vector4), this.registerInput("viewDirection", ue.Vector3), this.registerInput("bias", ue.Float), this.registerInput("power", ue.Float), this.registerOutput("fresnel", ue.Float); } /** * Gets the current class name * @returns the class name */ getClassName() { return "FresnelBlock"; } /** * Gets the world normal input component */ get worldNormal() { return this._inputs[0]; } /** * Gets the view direction input component */ get viewDirection() { return this._inputs[1]; } /** * Gets the bias input component */ get bias() { return this._inputs[2]; } /** * Gets the camera (or eye) position component */ get power() { return this._inputs[3]; } /** * Gets the fresnel output component */ get fresnel() { return this._outputs[0]; } autoConfigure(e) { if (!this.viewDirection.isConnected) { const t = new CW("View direction"); t.output.connectTo(this.viewDirection), t.autoConfigure(e); } if (!this.bias.isConnected) { const t = new vs("bias"); t.value = 0, t.output.connectTo(this.bias); } if (!this.power.isConnected) { const t = new vs("power"); t.value = 1, t.output.connectTo(this.power); } } _buildBlock(e) { super._buildBlock(e); const t = `//${this.name}`; return e._emitFunctionFromInclude("fresnelFunction", t, { removeIfDef: !0 }), e.compilationString += this._declareOutput(this.fresnel, e) + ` = computeFresnelTerm(${this.viewDirection.associatedVariableName}.xyz, ${this.worldNormal.associatedVariableName}.xyz, ${this.bias.associatedVariableName}, ${this.power.associatedVariableName}); `, this; } } Be("BABYLON.FresnelBlock", rse); class sse extends Wi { /** * Creates a new MaxBlock * @param name defines the block name */ constructor(e) { super(e, Le.Neutral), this.registerInput("left", ue.AutoDetect), this.registerInput("right", ue.AutoDetect), this.registerOutput("output", ue.BasedOnInput), this._outputs[0]._typeConnectionSource = this._inputs[0], this._linkConnectionTypes(0, 1); } /** * Gets the current class name * @returns the class name */ getClassName() { return "MaxBlock"; } /** * Gets the left operand input component */ get left() { return this._inputs[0]; } /** * Gets the right operand input component */ get right() { return this._inputs[1]; } /** * Gets the output component */ get output() { return this._outputs[0]; } _buildBlock(e) { super._buildBlock(e); const t = this._outputs[0]; return e.compilationString += this._declareOutput(t, e) + ` = max(${this.left.associatedVariableName}, ${this.right.associatedVariableName}); `, this; } } Be("BABYLON.MaxBlock", sse); class nse extends Wi { /** * Creates a new MinBlock * @param name defines the block name */ constructor(e) { super(e, Le.Neutral), this.registerInput("left", ue.AutoDetect), this.registerInput("right", ue.AutoDetect), this.registerOutput("output", ue.BasedOnInput), this._outputs[0]._typeConnectionSource = this._inputs[0], this._linkConnectionTypes(0, 1); } /** * Gets the current class name * @returns the class name */ getClassName() { return "MinBlock"; } /** * Gets the left operand input component */ get left() { return this._inputs[0]; } /** * Gets the right operand input component */ get right() { return this._inputs[1]; } /** * Gets the output component */ get output() { return this._outputs[0]; } _buildBlock(e) { super._buildBlock(e); const t = this._outputs[0]; return e.compilationString += this._declareOutput(t, e) + ` = min(${this.left.associatedVariableName}, ${this.right.associatedVariableName}); `, this; } } Be("BABYLON.MinBlock", nse); class ase extends Wi { /** * Creates a new DistanceBlock * @param name defines the block name */ constructor(e) { super(e, Le.Neutral), this.registerInput("left", ue.AutoDetect), this.registerInput("right", ue.AutoDetect), this.registerOutput("output", ue.Float), this._linkConnectionTypes(0, 1), this._inputs[0].excludedConnectionPointTypes.push(ue.Float), this._inputs[0].excludedConnectionPointTypes.push(ue.Matrix), this._inputs[1].excludedConnectionPointTypes.push(ue.Float), this._inputs[1].excludedConnectionPointTypes.push(ue.Matrix); } /** * Gets the current class name * @returns the class name */ getClassName() { return "DistanceBlock"; } /** * Gets the left operand input component */ get left() { return this._inputs[0]; } /** * Gets the right operand input component */ get right() { return this._inputs[1]; } /** * Gets the output component */ get output() { return this._outputs[0]; } _buildBlock(e) { super._buildBlock(e); const t = this._outputs[0]; return e.compilationString += this._declareOutput(t, e) + ` = length(${this.left.associatedVariableName} - ${this.right.associatedVariableName}); `, this; } } Be("BABYLON.DistanceBlock", ase); class ose extends Wi { /** * Creates a new LengthBlock * @param name defines the block name */ constructor(e) { super(e, Le.Neutral), this.registerInput("value", ue.AutoDetect), this.registerOutput("output", ue.Float), this._inputs[0].excludedConnectionPointTypes.push(ue.Float), this._inputs[0].excludedConnectionPointTypes.push(ue.Matrix); } /** * Gets the current class name * @returns the class name */ getClassName() { return "LengthBlock"; } /** * Gets the value input component */ get value() { return this._inputs[0]; } /** * Gets the output component */ get output() { return this._outputs[0]; } _buildBlock(e) { super._buildBlock(e); const t = this._outputs[0]; return e.compilationString += this._declareOutput(t, e) + ` = length(${this.value.associatedVariableName}); `, this; } } Be("BABYLON.LengthBlock", ose); class lse extends Wi { /** * Creates a new NegateBlock * @param name defines the block name */ constructor(e) { super(e, Le.Neutral), this.registerInput("value", ue.AutoDetect), this.registerOutput("output", ue.BasedOnInput), this._outputs[0]._typeConnectionSource = this._inputs[0]; } /** * Gets the current class name * @returns the class name */ getClassName() { return "NegateBlock"; } /** * Gets the value input component */ get value() { return this._inputs[0]; } /** * Gets the output component */ get output() { return this._outputs[0]; } _buildBlock(e) { super._buildBlock(e); const t = this._outputs[0]; return e.compilationString += this._declareOutput(t, e) + ` = -1.0 * ${this.value.associatedVariableName}; `, this; } } Be("BABYLON.NegateBlock", lse); class cse extends Wi { /** * Creates a new PowBlock * @param name defines the block name */ constructor(e) { super(e, Le.Neutral), this.registerInput("value", ue.AutoDetect), this.registerInput("power", ue.AutoDetect), this.registerOutput("output", ue.BasedOnInput), this._outputs[0]._typeConnectionSource = this._inputs[0], this._linkConnectionTypes(0, 1); } /** * Gets the current class name * @returns the class name */ getClassName() { return "PowBlock"; } /** * Gets the value operand input component */ get value() { return this._inputs[0]; } /** * Gets the power operand input component */ get power() { return this._inputs[1]; } /** * Gets the output component */ get output() { return this._outputs[0]; } _buildBlock(e) { super._buildBlock(e); const t = this._outputs[0]; return e.compilationString += this._declareOutput(t, e) + ` = pow(${this.value.associatedVariableName}, ${this.power.associatedVariableName}); `, this; } } Be("BABYLON.PowBlock", cse); class use extends Wi { /** * Creates a new RandomNumberBlock * @param name defines the block name */ constructor(e) { super(e, Le.Neutral), this.registerInput("seed", ue.AutoDetect), this.registerOutput("output", ue.Float), this._inputs[0].addExcludedConnectionPointFromAllowedTypes(ue.Vector2 | ue.Vector3 | ue.Vector4 | ue.Color3 | ue.Color4); } /** * Gets the current class name * @returns the class name */ getClassName() { return "RandomNumberBlock"; } /** * Gets the seed input component */ get seed() { return this._inputs[0]; } /** * Gets the output component */ get output() { return this._outputs[0]; } _buildBlock(e) { super._buildBlock(e); const t = this._outputs[0], i = `//${this.name}`; return e._emitFunctionFromInclude("helperFunctions", i), e.compilationString += this._declareOutput(t, e) + ` = getRand(${this.seed.associatedVariableName}.xy); `, this; } } Be("BABYLON.RandomNumberBlock", use); class hse extends Wi { /** * Creates a new ArcTan2Block * @param name defines the block name */ constructor(e) { super(e, Le.Neutral), this.registerInput("x", ue.Float), this.registerInput("y", ue.Float), this.registerOutput("output", ue.Float); } /** * Gets the current class name * @returns the class name */ getClassName() { return "ArcTan2Block"; } /** * Gets the x operand input component */ get x() { return this._inputs[0]; } /** * Gets the y operand input component */ get y() { return this._inputs[1]; } /** * Gets the output component */ get output() { return this._outputs[0]; } _buildBlock(e) { super._buildBlock(e); const t = this._outputs[0]; return e.compilationString += this._declareOutput(t, e) + ` = atan(${this.x.associatedVariableName}, ${this.y.associatedVariableName}); `, this; } } Be("BABYLON.ArcTan2Block", hse); class dse extends Wi { /** * Creates a new SmoothStepBlock * @param name defines the block name */ constructor(e) { super(e, Le.Neutral), this.registerInput("value", ue.AutoDetect), this.registerInput("edge0", ue.Float), this.registerInput("edge1", ue.Float), this.registerOutput("output", ue.BasedOnInput), this._outputs[0]._typeConnectionSource = this._inputs[0]; } /** * Gets the current class name * @returns the class name */ getClassName() { return "SmoothStepBlock"; } /** * Gets the value operand input component */ get value() { return this._inputs[0]; } /** * Gets the first edge operand input component */ get edge0() { return this._inputs[1]; } /** * Gets the second edge operand input component */ get edge1() { return this._inputs[2]; } /** * Gets the output component */ get output() { return this._outputs[0]; } _buildBlock(e) { super._buildBlock(e); const t = this._outputs[0]; return e.compilationString += this._declareOutput(t, e) + ` = smoothstep(${this.edge0.associatedVariableName}, ${this.edge1.associatedVariableName}, ${this.value.associatedVariableName}); `, this; } } Be("BABYLON.SmoothStepBlock", dse); class fse extends Wi { /** * Creates a new ReciprocalBlock * @param name defines the block name */ constructor(e) { super(e, Le.Neutral), this.registerInput("input", ue.AutoDetect), this.registerOutput("output", ue.BasedOnInput), this._outputs[0]._typeConnectionSource = this._inputs[0]; } /** * Gets the current class name * @returns the class name */ getClassName() { return "ReciprocalBlock"; } /** * Gets the input component */ get input() { return this._inputs[0]; } /** * Gets the output component */ get output() { return this._outputs[0]; } _buildBlock(e) { super._buildBlock(e); const t = this._outputs[0]; return this.input.type === ue.Matrix ? e.compilationString += this._declareOutput(t, e) + ` = inverse(${this.input.associatedVariableName}); ` : e.compilationString += this._declareOutput(t, e) + ` = 1. / ${this.input.associatedVariableName}; `, this; } } Be("BABYLON.ReciprocalBlock", fse); class pse extends Wi { /** * Creates a new ReplaceColorBlock * @param name defines the block name */ constructor(e) { super(e, Le.Neutral), this.registerInput("value", ue.AutoDetect), this.registerInput("reference", ue.AutoDetect), this.registerInput("distance", ue.Float), this.registerInput("replacement", ue.AutoDetect), this.registerOutput("output", ue.BasedOnInput), this._outputs[0]._typeConnectionSource = this._inputs[0], this._linkConnectionTypes(0, 1), this._linkConnectionTypes(0, 3), this._inputs[0].excludedConnectionPointTypes.push(ue.Float), this._inputs[0].excludedConnectionPointTypes.push(ue.Matrix), this._inputs[1].excludedConnectionPointTypes.push(ue.Float), this._inputs[1].excludedConnectionPointTypes.push(ue.Matrix), this._inputs[3].excludedConnectionPointTypes.push(ue.Float), this._inputs[3].excludedConnectionPointTypes.push(ue.Matrix); } /** * Gets the current class name * @returns the class name */ getClassName() { return "ReplaceColorBlock"; } /** * Gets the value input component */ get value() { return this._inputs[0]; } /** * Gets the reference input component */ get reference() { return this._inputs[1]; } /** * Gets the distance input component */ get distance() { return this._inputs[2]; } /** * Gets the replacement input component */ get replacement() { return this._inputs[3]; } /** * Gets the output component */ get output() { return this._outputs[0]; } _buildBlock(e) { super._buildBlock(e); const t = this._outputs[0]; return e.compilationString += this._declareOutput(t, e) + `; `, e.compilationString += `if (length(${this.value.associatedVariableName} - ${this.reference.associatedVariableName}) < ${this.distance.associatedVariableName}) { `, e.compilationString += `${t.associatedVariableName} = ${this.replacement.associatedVariableName}; `, e.compilationString += `} else { `, e.compilationString += `${t.associatedVariableName} = ${this.value.associatedVariableName}; `, e.compilationString += `} `, this; } } Be("BABYLON.ReplaceColorBlock", pse); class _se extends Wi { /** * Creates a new PosterizeBlock * @param name defines the block name */ constructor(e) { super(e, Le.Neutral), this.registerInput("value", ue.AutoDetect), this.registerInput("steps", ue.AutoDetect), this.registerOutput("output", ue.BasedOnInput), this._outputs[0]._typeConnectionSource = this._inputs[0], this._linkConnectionTypes(0, 1), this._inputs[0].excludedConnectionPointTypes.push(ue.Matrix), this._inputs[1].excludedConnectionPointTypes.push(ue.Matrix); } /** * Gets the current class name * @returns the class name */ getClassName() { return "PosterizeBlock"; } /** * Gets the value input component */ get value() { return this._inputs[0]; } /** * Gets the steps input component */ get steps() { return this._inputs[1]; } /** * Gets the output component */ get output() { return this._outputs[0]; } _buildBlock(e) { super._buildBlock(e); const t = this._outputs[0]; return e.compilationString += this._declareOutput(t, e) + ` = floor(${this.value.associatedVariableName} / (1.0 / ${this.steps.associatedVariableName})) * (1.0 / ${this.steps.associatedVariableName}); `, this; } } Be("BABYLON.PosterizeBlock", _se); var YR; (function(c) { c[c.SawTooth = 0] = "SawTooth", c[c.Square = 1] = "Square", c[c.Triangle = 2] = "Triangle"; })(YR || (YR = {})); class mse extends Wi { /** * Creates a new WaveBlock * @param name defines the block name */ constructor(e) { super(e, Le.Neutral), this.kind = YR.SawTooth, this.registerInput("input", ue.AutoDetect), this.registerOutput("output", ue.BasedOnInput), this._outputs[0]._typeConnectionSource = this._inputs[0], this._inputs[0].excludedConnectionPointTypes.push(ue.Matrix); } /** * Gets the current class name * @returns the class name */ getClassName() { return "WaveBlock"; } /** * Gets the input component */ get input() { return this._inputs[0]; } /** * Gets the output component */ get output() { return this._outputs[0]; } _buildBlock(e) { super._buildBlock(e); const t = this._outputs[0]; switch (this.kind) { case YR.SawTooth: { e.compilationString += this._declareOutput(t, e) + ` = ${this.input.associatedVariableName} - floor(0.5 + ${this.input.associatedVariableName}); `; break; } case YR.Square: { e.compilationString += this._declareOutput(t, e) + ` = 1.0 - 2.0 * round(fract(${this.input.associatedVariableName})); `; break; } case YR.Triangle: { e.compilationString += this._declareOutput(t, e) + ` = 2.0 * abs(2.0 * (${this.input.associatedVariableName} - floor(0.5 + ${this.input.associatedVariableName}))) - 1.0; `; break; } } return this; } serialize() { const e = super.serialize(); return e.kind = this.kind, e; } _deserialize(e, t, i) { super._deserialize(e, t, i), this.kind = e.kind; } } Be("BABYLON.WaveBlock", mse); class HF { /** * Gets value indicating which step this color is associated with (between 0 and 1) */ get step() { return this._step; } /** * Sets a value indicating which step this color is associated with (between 0 and 1) */ set step(e) { this._step = e; } /** * Gets the color associated with this step */ get color() { return this._color; } /** * Sets the color associated with this step */ set color(e) { this._color = e; } /** * Creates a new GradientBlockColorStep * @param step defines a value indicating which step this color is associated with (between 0 and 1) * @param color defines the color associated with this step */ constructor(e, t) { this.step = e, this.color = t; } } class gse extends Wi { /** calls observable when the value is changed*/ colorStepsUpdated() { this.onValueChangedObservable.notifyObservers(this); } /** * Creates a new GradientBlock * @param name defines the block name */ constructor(e) { super(e, Le.Neutral), this.colorSteps = [new HF(0, ze.Black()), new HF(1, ze.White())], this.onValueChangedObservable = new Fe(), this.registerInput("gradient", ue.AutoDetect), this.registerOutput("output", ue.Color3), this._inputs[0].addExcludedConnectionPointFromAllowedTypes(ue.Float | ue.Vector2 | ue.Vector3 | ue.Vector4 | ue.Color3 | ue.Color4); } /** * Gets the current class name * @returns the class name */ getClassName() { return "GradientBlock"; } /** * Gets the gradient input component */ get gradient() { return this._inputs[0]; } /** * Gets the output component */ get output() { return this._outputs[0]; } _writeColorConstant(e) { const t = this.colorSteps[e]; return `vec3(${t.color.r}, ${t.color.g}, ${t.color.b})`; } _buildBlock(e) { super._buildBlock(e); const t = this._outputs[0]; if (!this.colorSteps.length || !this.gradient.connectedPoint) { e.compilationString += this._declareOutput(t, e) + ` = vec3(0., 0., 0.); `; return; } const i = e._getFreeVariableName("gradientTempColor"), r = e._getFreeVariableName("gradientTempPosition"); e.compilationString += `vec3 ${i} = ${this._writeColorConstant(0)}; `, e.compilationString += `float ${r}; `; let s = this.gradient.associatedVariableName; this.gradient.connectedPoint.type !== ue.Float && (s += ".x"); for (let n = 1; n < this.colorSteps.length; n++) { const a = this.colorSteps[n], l = this.colorSteps[n - 1]; e.compilationString += `${r} = clamp((${s} - ${e._emitFloat(l.step)}) / (${e._emitFloat(a.step)} - ${e._emitFloat(l.step)}), 0.0, 1.0) * step(${e._emitFloat(n)}, ${e._emitFloat(this.colorSteps.length - 1)}); `, e.compilationString += `${i} = mix(${i}, ${this._writeColorConstant(n)}, ${r}); `; } return e.compilationString += this._declareOutput(t, e) + ` = ${i}; `, this; } serialize() { const e = super.serialize(); e.colorSteps = []; for (const t of this.colorSteps) e.colorSteps.push({ step: t.step, color: { r: t.color.r, g: t.color.g, b: t.color.b } }); return e; } _deserialize(e, t, i) { super._deserialize(e, t, i), this.colorSteps.length = 0; for (const r of e.colorSteps) this.colorSteps.push(new HF(r.step, new ze(r.color.r, r.color.g, r.color.b))); } _dumpPropertiesCode() { let e = super._dumpPropertiesCode(); e += `${this._codeVariableName}.colorSteps = []; `; for (const t of this.colorSteps) e += `${this._codeVariableName}.colorSteps.push(new BABYLON.GradientBlockColorStep(${t.step}, new BABYLON.Color3(${t.color.r}, ${t.color.g}, ${t.color.b}))); `; return e; } } Be("BABYLON.GradientBlock", gse); class vse extends Wi { /** * Creates a new NLerpBlock * @param name defines the block name */ constructor(e) { super(e, Le.Neutral), this.registerInput("left", ue.AutoDetect), this.registerInput("right", ue.AutoDetect), this.registerInput("gradient", ue.AutoDetect), this.registerOutput("output", ue.BasedOnInput), this._outputs[0]._typeConnectionSource = this._inputs[0], this._linkConnectionTypes(0, 1), this._linkConnectionTypes(1, 2, !0), this._inputs[2].acceptedConnectionPointTypes.push(ue.Float); } /** * Gets the current class name * @returns the class name */ getClassName() { return "NLerpBlock"; } /** * Gets the left operand input component */ get left() { return this._inputs[0]; } /** * Gets the right operand input component */ get right() { return this._inputs[1]; } /** * Gets the gradient operand input component */ get gradient() { return this._inputs[2]; } /** * Gets the output component */ get output() { return this._outputs[0]; } _buildBlock(e) { super._buildBlock(e); const t = this._outputs[0]; return e.compilationString += this._declareOutput(t, e) + ` = normalize(mix(${this.left.associatedVariableName} , ${this.right.associatedVariableName}, ${this.gradient.associatedVariableName})); `, this; } } Be("BABYLON.NLerpBlock", vse); class xW extends Wi { /** * Creates a new WorleyNoise3DBlock * @param name defines the block name */ constructor(e) { super(e, Le.Neutral), this.manhattanDistance = !1, this.registerInput("seed", ue.Vector3), this.registerInput("jitter", ue.Float), this.registerOutput("output", ue.Vector2), this.registerOutput("x", ue.Float), this.registerOutput("y", ue.Float); } /** * Gets the current class name * @returns the class name */ getClassName() { return "WorleyNoise3DBlock"; } /** * Gets the seed input component */ get seed() { return this._inputs[0]; } /** * Gets the jitter input component */ get jitter() { return this._inputs[1]; } /** * Gets the output component */ get output() { return this._outputs[0]; } /** * Gets the x component */ get x() { return this._outputs[1]; } /** * Gets the y component */ get y() { return this._outputs[2]; } _buildBlock(e) { if (super._buildBlock(e), !this.seed.isConnected || !this.output.hasEndpoints && !this.x.hasEndpoints && !this.y.hasEndpoints) return; let t = `vec3 permute(vec3 x){ `; t += ` return mod((34.0 * x + 1.0) * x, 289.0); `, t += `} `, t += `vec3 dist(vec3 x, vec3 y, vec3 z, bool manhattanDistance){ `, t += ` return manhattanDistance ? abs(x) + abs(y) + abs(z) : (x * x + y * y + z * z); `, t += `} `, t += `vec2 worley(vec3 P, float jitter, bool manhattanDistance){ `, t += ` float K = 0.142857142857; // 1/7 `, t += ` float Ko = 0.428571428571; // 1/2-K/2 `, t += ` float K2 = 0.020408163265306; // 1/(7*7) `, t += ` float Kz = 0.166666666667; // 1/6 `, t += ` float Kzo = 0.416666666667; // 1/2-1/6*2 `, t += ` `, t += ` vec3 Pi = mod(floor(P), 289.0); `, t += ` vec3 Pf = fract(P) - 0.5; `, t += ` `, t += ` vec3 Pfx = Pf.x + vec3(1.0, 0.0, -1.0); `, t += ` vec3 Pfy = Pf.y + vec3(1.0, 0.0, -1.0); `, t += ` vec3 Pfz = Pf.z + vec3(1.0, 0.0, -1.0); `, t += ` `, t += ` vec3 p = permute(Pi.x + vec3(-1.0, 0.0, 1.0)); `, t += ` vec3 p1 = permute(p + Pi.y - 1.0); `, t += ` vec3 p2 = permute(p + Pi.y); `, t += ` vec3 p3 = permute(p + Pi.y + 1.0); `, t += ` `, t += ` vec3 p11 = permute(p1 + Pi.z - 1.0); `, t += ` vec3 p12 = permute(p1 + Pi.z); `, t += ` vec3 p13 = permute(p1 + Pi.z + 1.0); `, t += ` `, t += ` vec3 p21 = permute(p2 + Pi.z - 1.0); `, t += ` vec3 p22 = permute(p2 + Pi.z); `, t += ` vec3 p23 = permute(p2 + Pi.z + 1.0); `, t += ` `, t += ` vec3 p31 = permute(p3 + Pi.z - 1.0); `, t += ` vec3 p32 = permute(p3 + Pi.z); `, t += ` vec3 p33 = permute(p3 + Pi.z + 1.0); `, t += ` `, t += ` vec3 ox11 = fract(p11*K) - Ko; `, t += ` vec3 oy11 = mod(floor(p11*K), 7.0)*K - Ko; `, t += ` vec3 oz11 = floor(p11*K2)*Kz - Kzo; // p11 < 289 guaranteed `, t += ` `, t += ` vec3 ox12 = fract(p12*K) - Ko; `, t += ` vec3 oy12 = mod(floor(p12*K), 7.0)*K - Ko; `, t += ` vec3 oz12 = floor(p12*K2)*Kz - Kzo; `, t += ` `, t += ` vec3 ox13 = fract(p13*K) - Ko; `, t += ` vec3 oy13 = mod(floor(p13*K), 7.0)*K - Ko; `, t += ` vec3 oz13 = floor(p13*K2)*Kz - Kzo; `, t += ` `, t += ` vec3 ox21 = fract(p21*K) - Ko; `, t += ` vec3 oy21 = mod(floor(p21*K), 7.0)*K - Ko; `, t += ` vec3 oz21 = floor(p21*K2)*Kz - Kzo; `, t += ` `, t += ` vec3 ox22 = fract(p22*K) - Ko; `, t += ` vec3 oy22 = mod(floor(p22*K), 7.0)*K - Ko; `, t += ` vec3 oz22 = floor(p22*K2)*Kz - Kzo; `, t += ` `, t += ` vec3 ox23 = fract(p23*K) - Ko; `, t += ` vec3 oy23 = mod(floor(p23*K), 7.0)*K - Ko; `, t += ` vec3 oz23 = floor(p23*K2)*Kz - Kzo; `, t += ` `, t += ` vec3 ox31 = fract(p31*K) - Ko; `, t += ` vec3 oy31 = mod(floor(p31*K), 7.0)*K - Ko; `, t += ` vec3 oz31 = floor(p31*K2)*Kz - Kzo; `, t += ` `, t += ` vec3 ox32 = fract(p32*K) - Ko; `, t += ` vec3 oy32 = mod(floor(p32*K), 7.0)*K - Ko; `, t += ` vec3 oz32 = floor(p32*K2)*Kz - Kzo; `, t += ` `, t += ` vec3 ox33 = fract(p33*K) - Ko; `, t += ` vec3 oy33 = mod(floor(p33*K), 7.0)*K - Ko; `, t += ` vec3 oz33 = floor(p33*K2)*Kz - Kzo; `, t += ` `, t += ` vec3 dx11 = Pfx + jitter*ox11; `, t += ` vec3 dy11 = Pfy.x + jitter*oy11; `, t += ` vec3 dz11 = Pfz.x + jitter*oz11; `, t += ` `, t += ` vec3 dx12 = Pfx + jitter*ox12; `, t += ` vec3 dy12 = Pfy.x + jitter*oy12; `, t += ` vec3 dz12 = Pfz.y + jitter*oz12; `, t += ` `, t += ` vec3 dx13 = Pfx + jitter*ox13; `, t += ` vec3 dy13 = Pfy.x + jitter*oy13; `, t += ` vec3 dz13 = Pfz.z + jitter*oz13; `, t += ` `, t += ` vec3 dx21 = Pfx + jitter*ox21; `, t += ` vec3 dy21 = Pfy.y + jitter*oy21; `, t += ` vec3 dz21 = Pfz.x + jitter*oz21; `, t += ` `, t += ` vec3 dx22 = Pfx + jitter*ox22; `, t += ` vec3 dy22 = Pfy.y + jitter*oy22; `, t += ` vec3 dz22 = Pfz.y + jitter*oz22; `, t += ` `, t += ` vec3 dx23 = Pfx + jitter*ox23; `, t += ` vec3 dy23 = Pfy.y + jitter*oy23; `, t += ` vec3 dz23 = Pfz.z + jitter*oz23; `, t += ` `, t += ` vec3 dx31 = Pfx + jitter*ox31; `, t += ` vec3 dy31 = Pfy.z + jitter*oy31; `, t += ` vec3 dz31 = Pfz.x + jitter*oz31; `, t += ` `, t += ` vec3 dx32 = Pfx + jitter*ox32; `, t += ` vec3 dy32 = Pfy.z + jitter*oy32; `, t += ` vec3 dz32 = Pfz.y + jitter*oz32; `, t += ` `, t += ` vec3 dx33 = Pfx + jitter*ox33; `, t += ` vec3 dy33 = Pfy.z + jitter*oy33; `, t += ` vec3 dz33 = Pfz.z + jitter*oz33; `, t += ` `, t += ` vec3 d11 = dist(dx11, dy11, dz11, manhattanDistance); `, t += ` vec3 d12 =dist(dx12, dy12, dz12, manhattanDistance); `, t += ` vec3 d13 = dist(dx13, dy13, dz13, manhattanDistance); `, t += ` vec3 d21 = dist(dx21, dy21, dz21, manhattanDistance); `, t += ` vec3 d22 = dist(dx22, dy22, dz22, manhattanDistance); `, t += ` vec3 d23 = dist(dx23, dy23, dz23, manhattanDistance); `, t += ` vec3 d31 = dist(dx31, dy31, dz31, manhattanDistance); `, t += ` vec3 d32 = dist(dx32, dy32, dz32, manhattanDistance); `, t += ` vec3 d33 = dist(dx33, dy33, dz33, manhattanDistance); `, t += ` `, t += ` vec3 d1a = min(d11, d12); `, t += ` d12 = max(d11, d12); `, t += ` d11 = min(d1a, d13); // Smallest now not in d12 or d13 `, t += ` d13 = max(d1a, d13); `, t += ` d12 = min(d12, d13); // 2nd smallest now not in d13 `, t += ` vec3 d2a = min(d21, d22); `, t += ` d22 = max(d21, d22); `, t += ` d21 = min(d2a, d23); // Smallest now not in d22 or d23 `, t += ` d23 = max(d2a, d23); `, t += ` d22 = min(d22, d23); // 2nd smallest now not in d23 `, t += ` vec3 d3a = min(d31, d32); `, t += ` d32 = max(d31, d32); `, t += ` d31 = min(d3a, d33); // Smallest now not in d32 or d33 `, t += ` d33 = max(d3a, d33); `, t += ` d32 = min(d32, d33); // 2nd smallest now not in d33 `, t += ` vec3 da = min(d11, d21); `, t += ` d21 = max(d11, d21); `, t += ` d11 = min(da, d31); // Smallest now in d11 `, t += ` d31 = max(da, d31); // 2nd smallest now not in d31 `, t += ` d11.xy = (d11.x < d11.y) ? d11.xy : d11.yx; `, t += ` d11.xz = (d11.x < d11.z) ? d11.xz : d11.zx; // d11.x now smallest `, t += ` d12 = min(d12, d21); // 2nd smallest now not in d21 `, t += ` d12 = min(d12, d22); // nor in d22 `, t += ` d12 = min(d12, d31); // nor in d31 `, t += ` d12 = min(d12, d32); // nor in d32 `, t += ` d11.yz = min(d11.yz,d12.xy); // nor in d12.yz `, t += ` d11.y = min(d11.y,d12.z); // Only two more to go `, t += ` d11.y = min(d11.y,d11.z); // Done! (Phew!) `, t += ` return sqrt(d11.xy); // F1, F2 `, t += `} `, e._emitFunction("worley3D", t, "// Worley3D"); const i = e._getFreeVariableName("worleyTemp"); return e.compilationString += `vec2 ${i} = worley(${this.seed.associatedVariableName}, ${this.jitter.associatedVariableName}, ${this.manhattanDistance}); `, this.output.hasEndpoints && (e.compilationString += this._declareOutput(this.output, e) + ` = ${i}; `), this.x.hasEndpoints && (e.compilationString += this._declareOutput(this.x, e) + ` = ${i}.x; `), this.y.hasEndpoints && (e.compilationString += this._declareOutput(this.y, e) + ` = ${i}.y; `), this; } /** * Exposes the properties to the UI? */ _dumpPropertiesCode() { return super._dumpPropertiesCode() + `${this._codeVariableName}.manhattanDistance = ${this.manhattanDistance}; `; } /** * Exposes the properties to the Serialize? */ serialize() { const e = super.serialize(); return e.manhattanDistance = this.manhattanDistance, e; } /** * Exposes the properties to the deserialize? * @param serializationObject * @param scene * @param rootUrl */ _deserialize(e, t, i) { super._deserialize(e, t, i), this.manhattanDistance = e.manhattanDistance; } } F([ ir("Use Manhattan Distance", $i.Boolean, "PROPERTIES", { notifiers: { update: !1 } }) ], xW.prototype, "manhattanDistance", void 0); Be("BABYLON.WorleyNoise3DBlock", xW); class Ase extends Wi { /** * Creates a new SimplexPerlin3DBlock * @param name defines the block name */ constructor(e) { super(e, Le.Neutral), this.registerInput("seed", ue.Vector3), this.registerOutput("output", ue.Float); } /** * Gets the current class name * @returns the class name */ getClassName() { return "SimplexPerlin3DBlock"; } /** * Gets the seed operand input component */ get seed() { return this._inputs[0]; } /** * Gets the output component */ get output() { return this._outputs[0]; } _buildBlock(e) { if (super._buildBlock(e), !this.seed.isConnected || !this._outputs[0].hasEndpoints) return; let t = `const float SKEWFACTOR = 1.0/3.0; `; return t += `const float UNSKEWFACTOR = 1.0/6.0; `, t += `const float SIMPLEX_CORNER_POS = 0.5; `, t += `const float SIMPLEX_TETRAHADRON_HEIGHT = 0.70710678118654752440084436210485; `, t += `float SimplexPerlin3D( vec3 P ){ `, t += ` P.x = P == vec3(0., 0., 0.) ? 0.00001 : P.x; `, t += ` P *= SIMPLEX_TETRAHADRON_HEIGHT; `, t += " vec3 Pi = floor( P + dot( P, vec3( SKEWFACTOR) ) );", t += ` vec3 x0 = P - Pi + dot(Pi, vec3( UNSKEWFACTOR ) ); `, t += ` vec3 g = step(x0.yzx, x0.xyz); `, t += ` vec3 l = 1.0 - g; `, t += ` vec3 Pi_1 = min( g.xyz, l.zxy ); `, t += ` vec3 Pi_2 = max( g.xyz, l.zxy ); `, t += ` vec3 x1 = x0 - Pi_1 + UNSKEWFACTOR; `, t += ` vec3 x2 = x0 - Pi_2 + SKEWFACTOR; `, t += ` vec3 x3 = x0 - SIMPLEX_CORNER_POS; `, t += ` vec4 v1234_x = vec4( x0.x, x1.x, x2.x, x3.x ); `, t += ` vec4 v1234_y = vec4( x0.y, x1.y, x2.y, x3.y ); `, t += ` vec4 v1234_z = vec4( x0.z, x1.z, x2.z, x3.z ); `, t += ` Pi.xyz = Pi.xyz - floor(Pi.xyz * ( 1.0 / 69.0 )) * 69.0; `, t += ` vec3 Pi_inc1 = step( Pi, vec3( 69.0 - 1.5 ) ) * ( Pi + 1.0 ); `, t += ` vec4 Pt = vec4( Pi.xy, Pi_inc1.xy ) + vec2( 50.0, 161.0 ).xyxy; `, t += ` Pt *= Pt; `, t += ` vec4 V1xy_V2xy = mix( Pt.xyxy, Pt.zwzw, vec4( Pi_1.xy, Pi_2.xy ) ); `, t += ` Pt = vec4( Pt.x, V1xy_V2xy.xz, Pt.z ) * vec4( Pt.y, V1xy_V2xy.yw, Pt.w ); `, t += ` const vec3 SOMELARGEFLOATS = vec3( 635.298681, 682.357502, 668.926525 ); `, t += ` const vec3 ZINC = vec3( 48.500388, 65.294118, 63.934599 ); `, t += ` vec3 lowz_mods = vec3( 1.0 / ( SOMELARGEFLOATS.xyz + Pi.zzz * ZINC.xyz ) ); `, t += ` vec3 highz_mods = vec3( 1.0 / ( SOMELARGEFLOATS.xyz + Pi_inc1.zzz * ZINC.xyz ) ); `, t += ` Pi_1 = ( Pi_1.z < 0.5 ) ? lowz_mods : highz_mods; `, t += ` Pi_2 = ( Pi_2.z < 0.5 ) ? lowz_mods : highz_mods; `, t += ` vec4 hash_0 = fract( Pt * vec4( lowz_mods.x, Pi_1.x, Pi_2.x, highz_mods.x ) ) - 0.49999; `, t += ` vec4 hash_1 = fract( Pt * vec4( lowz_mods.y, Pi_1.y, Pi_2.y, highz_mods.y ) ) - 0.49999; `, t += ` vec4 hash_2 = fract( Pt * vec4( lowz_mods.z, Pi_1.z, Pi_2.z, highz_mods.z ) ) - 0.49999; `, t += ` vec4 grad_results = inversesqrt( hash_0 * hash_0 + hash_1 * hash_1 + hash_2 * hash_2 ) * ( hash_0 * v1234_x + hash_1 * v1234_y + hash_2 * v1234_z ); `, t += ` const float FINAL_NORMALIZATION = 37.837227241611314102871574478976; `, t += ` vec4 kernel_weights = v1234_x * v1234_x + v1234_y * v1234_y + v1234_z * v1234_z; `, t += ` kernel_weights = max(0.5 - kernel_weights, 0.0); `, t += ` kernel_weights = kernel_weights*kernel_weights*kernel_weights; `, t += ` return dot( kernel_weights, grad_results ) * FINAL_NORMALIZATION; `, t += `} `, e._emitFunction("SimplexPerlin3D", t, "// SimplexPerlin3D"), e.compilationString += this._declareOutput(this._outputs[0], e) + ` = SimplexPerlin3D(${this.seed.associatedVariableName}); `, this; } } Be("BABYLON.SimplexPerlin3DBlock", Ase); class yse extends Wi { /** * Creates a new NormalBlendBlock * @param name defines the block name */ constructor(e) { super(e, Le.Neutral), this.registerInput("normalMap0", ue.AutoDetect), this.registerInput("normalMap1", ue.AutoDetect), this.registerOutput("output", ue.Vector3), this._inputs[0].addExcludedConnectionPointFromAllowedTypes(ue.Color3 | ue.Color4 | ue.Vector3 | ue.Vector4), this._inputs[1].addExcludedConnectionPointFromAllowedTypes(ue.Color3 | ue.Color4 | ue.Vector3 | ue.Vector4); } /** * Gets the current class name * @returns the class name */ getClassName() { return "NormalBlendBlock"; } /** * Gets the first input component */ get normalMap0() { return this._inputs[0]; } /** * Gets the second input component */ get normalMap1() { return this._inputs[1]; } /** * Gets the output component */ get output() { return this._outputs[0]; } _buildBlock(e) { super._buildBlock(e); const t = this._outputs[0], i = this._inputs[0], r = this._inputs[1], s = e._getFreeVariableName("stepR"), n = e._getFreeVariableName("stepG"); return e.compilationString += `float ${s} = step(0.5, ${i.associatedVariableName}.r); `, e.compilationString += `float ${n} = step(0.5, ${i.associatedVariableName}.g); `, e.compilationString += this._declareOutput(t, e) + `; `, e.compilationString += `${t.associatedVariableName}.r = (1.0 - ${s}) * ${i.associatedVariableName}.r * ${r.associatedVariableName}.r * 2.0 + ${s} * (1.0 - (1.0 - ${i.associatedVariableName}.r) * (1.0 - ${r.associatedVariableName}.r) * 2.0); `, e.compilationString += `${t.associatedVariableName}.g = (1.0 - ${n}) * ${i.associatedVariableName}.g * ${r.associatedVariableName}.g * 2.0 + ${n} * (1.0 - (1.0 - ${i.associatedVariableName}.g) * (1.0 - ${r.associatedVariableName}.g) * 2.0); `, e.compilationString += `${t.associatedVariableName}.b = ${i.associatedVariableName}.b * ${r.associatedVariableName}.b; `, this; } } Be("BABYLON.NormalBlendBlock", yse); class Cse extends Wi { /** * Creates a new Rotate2dBlock * @param name defines the block name */ constructor(e) { super(e, Le.Neutral), this.registerInput("input", ue.Vector2), this.registerInput("angle", ue.Float), this.registerOutput("output", ue.Vector2); } /** * Gets the current class name * @returns the class name */ getClassName() { return "Rotate2dBlock"; } /** * Gets the input vector */ get input() { return this._inputs[0]; } /** * Gets the input angle */ get angle() { return this._inputs[1]; } /** * Gets the output component */ get output() { return this._outputs[0]; } autoConfigure() { if (!this.angle.isConnected) { const e = new vs("angle"); e.value = 0, e.output.connectTo(this.angle); } } _buildBlock(e) { super._buildBlock(e); const t = this._outputs[0], i = this.angle, r = this.input; return e.compilationString += this._declareOutput(t, e) + ` = vec2(cos(${i.associatedVariableName}) * ${r.associatedVariableName}.x - sin(${i.associatedVariableName}) * ${r.associatedVariableName}.y, sin(${i.associatedVariableName}) * ${r.associatedVariableName}.x + cos(${i.associatedVariableName}) * ${r.associatedVariableName}.y); `, this; } } Be("BABYLON.Rotate2dBlock", Cse); class xse extends Wi { /** * Creates a new ReflectBlock * @param name defines the block name */ constructor(e) { super(e, Le.Neutral), this.registerInput("incident", ue.AutoDetect), this.registerInput("normal", ue.AutoDetect), this.registerOutput("output", ue.Vector3), this._inputs[0].addExcludedConnectionPointFromAllowedTypes(ue.Vector3 | ue.Vector4 | ue.Color3 | ue.Color4), this._inputs[1].addExcludedConnectionPointFromAllowedTypes(ue.Vector3 | ue.Vector4 | ue.Color3 | ue.Color4); } /** * Gets the current class name * @returns the class name */ getClassName() { return "ReflectBlock"; } /** * Gets the incident component */ get incident() { return this._inputs[0]; } /** * Gets the normal component */ get normal() { return this._inputs[1]; } /** * Gets the output component */ get output() { return this._outputs[0]; } _buildBlock(e) { super._buildBlock(e); const t = this._outputs[0]; return e.compilationString += this._declareOutput(t, e) + ` = reflect(${this.incident.associatedVariableName}.xyz, ${this.normal.associatedVariableName}.xyz); `, this; } } Be("BABYLON.ReflectBlock", xse); class bse extends Wi { /** * Creates a new RefractBlock * @param name defines the block name */ constructor(e) { super(e, Le.Neutral), this.registerInput("incident", ue.AutoDetect), this.registerInput("normal", ue.AutoDetect), this.registerInput("ior", ue.Float), this.registerOutput("output", ue.Vector3), this._inputs[0].addExcludedConnectionPointFromAllowedTypes(ue.Vector3 | ue.Vector4 | ue.Color3 | ue.Color4), this._inputs[1].addExcludedConnectionPointFromAllowedTypes(ue.Vector3 | ue.Vector4 | ue.Color3 | ue.Color4); } /** * Gets the current class name * @returns the class name */ getClassName() { return "RefractBlock"; } /** * Gets the incident component */ get incident() { return this._inputs[0]; } /** * Gets the normal component */ get normal() { return this._inputs[1]; } /** * Gets the index of refraction component */ get ior() { return this._inputs[2]; } /** * Gets the output component */ get output() { return this._outputs[0]; } _buildBlock(e) { super._buildBlock(e); const t = this._outputs[0]; return e.compilationString += this._declareOutput(t, e) + ` = refract(${this.incident.associatedVariableName}.xyz, ${this.normal.associatedVariableName}.xyz, ${this.ior.associatedVariableName}); `, this; } } Be("BABYLON.RefractBlock", bse); class Ese extends Wi { /** * Creates a new DesaturateBlock * @param name defines the block name */ constructor(e) { super(e, Le.Neutral), this.registerInput("color", ue.Color3), this.registerInput("level", ue.Float), this.registerOutput("output", ue.Color3); } /** * Gets the current class name * @returns the class name */ getClassName() { return "DesaturateBlock"; } /** * Gets the color operand input component */ get color() { return this._inputs[0]; } /** * Gets the level operand input component */ get level() { return this._inputs[1]; } /** * Gets the output component */ get output() { return this._outputs[0]; } _buildBlock(e) { super._buildBlock(e); const t = this._outputs[0], r = this.color.associatedVariableName, s = e._getFreeVariableName("colorMin"), n = e._getFreeVariableName("colorMax"), a = e._getFreeVariableName("colorMerge"); return e.compilationString += `float ${s} = min(min(${r}.x, ${r}.y), ${r}.z); `, e.compilationString += `float ${n} = max(max(${r}.x, ${r}.y), ${r}.z); `, e.compilationString += `float ${a} = 0.5 * (${s} + ${n}); `, e.compilationString += this._declareOutput(t, e) + ` = mix(${r}, vec3(${a}, ${a}, ${a}), ${this.level.associatedVariableName}); `, this; } } Be("BABYLON.DesaturateBlock", Ese); class XI extends Wi { /** * Create a new SheenBlock * @param name defines the block name */ constructor(e) { super(e, Le.Fragment), this.albedoScaling = !1, this.linkSheenWithAlbedo = !1, this._isUnique = !0, this.registerInput("intensity", ue.Float, !0, Le.Fragment), this.registerInput("color", ue.Color3, !0, Le.Fragment), this.registerInput("roughness", ue.Float, !0, Le.Fragment), this.registerOutput("sheen", ue.Object, Le.Fragment, new Yo("sheen", this, no.Output, XI, "SheenBlock")); } /** * Initialize the block and prepare the context for build * @param state defines the state that will be used for the build */ initialize(e) { e._excludeVariableName("sheenOut"), e._excludeVariableName("sheenMapData"), e._excludeVariableName("vSheenColor"), e._excludeVariableName("vSheenRoughness"); } /** * Gets the current class name * @returns the class name */ getClassName() { return "SheenBlock"; } /** * Gets the intensity input component */ get intensity() { return this._inputs[0]; } /** * Gets the color input component */ get color() { return this._inputs[1]; } /** * Gets the roughness input component */ get roughness() { return this._inputs[2]; } /** * Gets the sheen object output component */ get sheen() { return this._outputs[0]; } prepareDefines(e, t, i) { super.prepareDefines(e, t, i), i.setValue("SHEEN", !0), i.setValue("SHEEN_USE_ROUGHNESS_FROM_MAINTEXTURE", !0, !0), i.setValue("SHEEN_LINKWITHALBEDO", this.linkSheenWithAlbedo, !0), i.setValue("SHEEN_ROUGHNESS", this.roughness.isConnected, !0), i.setValue("SHEEN_ALBEDOSCALING", this.albedoScaling, !0); } /** * Gets the main code of the block (fragment side) * @param reflectionBlock instance of a ReflectionBlock null if the code must be generated without an active reflection module * @returns the shader code */ getCode(e) { let t = ""; const i = this.color.isConnected ? this.color.associatedVariableName : "vec3(1.)", r = this.intensity.isConnected ? this.intensity.associatedVariableName : "1.", s = this.roughness.isConnected ? this.roughness.associatedVariableName : "0."; return t = `#ifdef SHEEN sheenOutParams sheenOut; vec4 vSheenColor = vec4(${i}, ${r}); sheenBlock( vSheenColor, #ifdef SHEEN_ROUGHNESS ${s}, #endif roughness, #ifdef SHEEN_TEXTURE vec4(0.), 1.0, #endif reflectance, #ifdef SHEEN_LINKWITHALBEDO baseColor, surfaceAlbedo, #endif #ifdef ENVIRONMENTBRDF NdotV, environmentBrdf, #endif #if defined(REFLECTION) && defined(ENVIRONMENTBRDF) AARoughnessFactors, ${e == null ? void 0 : e._vReflectionMicrosurfaceInfosName}, ${e == null ? void 0 : e._vReflectionInfosName}, ${e == null ? void 0 : e.reflectionColor}, vLightingIntensity, #ifdef ${e == null ? void 0 : e._define3DName} ${e == null ? void 0 : e._cubeSamplerName}, #else ${e == null ? void 0 : e._2DSamplerName}, #endif reflectionOut.reflectionCoords, NdotVUnclamped, #ifndef LODBASEDMICROSFURACE #ifdef ${e == null ? void 0 : e._define3DName} ${e == null ? void 0 : e._cubeSamplerName}, ${e == null ? void 0 : e._cubeSamplerName}, #else ${e == null ? void 0 : e._2DSamplerName}, ${e == null ? void 0 : e._2DSamplerName}, #endif #endif #if !defined(${e == null ? void 0 : e._defineSkyboxName}) && defined(RADIANCEOCCLUSION) seo, #endif #if !defined(${e == null ? void 0 : e._defineSkyboxName}) && defined(HORIZONOCCLUSION) && defined(BUMP) && defined(${e == null ? void 0 : e._define3DName}) eho, #endif #endif sheenOut ); #ifdef SHEEN_LINKWITHALBEDO surfaceAlbedo = sheenOut.surfaceAlbedo; #endif #endif `, t; } _buildBlock(e) { return e.target === Le.Fragment && e.sharedData.blocksWithDefines.push(this), this; } _dumpPropertiesCode() { let e = super._dumpPropertiesCode(); return e += `${this._codeVariableName}.albedoScaling = ${this.albedoScaling}; `, e += `${this._codeVariableName}.linkSheenWithAlbedo = ${this.linkSheenWithAlbedo}; `, e; } serialize() { const e = super.serialize(); return e.albedoScaling = this.albedoScaling, e.linkSheenWithAlbedo = this.linkSheenWithAlbedo, e; } _deserialize(e, t, i) { super._deserialize(e, t, i), this.albedoScaling = e.albedoScaling, this.linkSheenWithAlbedo = e.linkSheenWithAlbedo; } } F([ ir("Albedo scaling", $i.Boolean, "PROPERTIES", { notifiers: { update: !0 } }) ], XI.prototype, "albedoScaling", void 0); F([ ir("Link sheen with albedo", $i.Boolean, "PROPERTIES", { notifiers: { update: !0 } }) ], XI.prototype, "linkSheenWithAlbedo", void 0); Be("BABYLON.SheenBlock", XI); class AN extends Wi { /** * Create a new AnisotropyBlock * @param name defines the block name */ constructor(e) { super(e, Le.Fragment), this._tangentCorrectionFactorName = "", this._isUnique = !0, this.registerInput("intensity", ue.Float, !0, Le.Fragment), this.registerInput("direction", ue.Vector2, !0, Le.Fragment), this.registerInput("uv", ue.Vector2, !0), this.registerInput("worldTangent", ue.Vector4, !0), this.registerInput("TBN", ue.Object, !0, Le.VertexAndFragment, new Yo("TBN", this, no.Input, jI, "TBNBlock")), this.registerInput("roughness", ue.Float, !0, Le.Fragment), this.registerOutput("anisotropy", ue.Object, Le.Fragment, new Yo("anisotropy", this, no.Output, AN, "AnisotropyBlock")); } /** * Initialize the block and prepare the context for build * @param state defines the state that will be used for the build */ initialize(e) { e._excludeVariableName("anisotropicOut"), e._excludeVariableName("TBN"); } /** * Gets the current class name * @returns the class name */ getClassName() { return "AnisotropyBlock"; } /** * Gets the intensity input component */ get intensity() { return this._inputs[0]; } /** * Gets the direction input component */ get direction() { return this._inputs[1]; } /** * Gets the uv input component */ get uv() { return this._inputs[2]; } /** * Gets the worldTangent input component */ get worldTangent() { return this._inputs[3]; } /** * Gets the TBN input component */ // eslint-disable-next-line @typescript-eslint/naming-convention get TBN() { return this._inputs[4]; } /** * Gets the roughness input component */ get roughness() { return this._inputs[5]; } /** * Gets the anisotropy object output component */ get anisotropy() { return this._outputs[0]; } _generateTBNSpace(e) { let t = ""; const i = `//${this.name}`, r = this.uv, s = this.worldPositionConnectionPoint, n = this.worldNormalConnectionPoint, a = this.worldTangent; r.isConnected || Ce.Error("You must connect the 'uv' input of the Anisotropy block!"), e._emitExtension("derivatives", "#extension GL_OES_standard_derivatives : enable"); const l = { search: /defined\(TANGENT\)/g, replace: a.isConnected ? "defined(TANGENT)" : "defined(IGNORE)" }, o = this.TBN; return o.isConnected ? e.compilationString += ` #ifdef TBNBLOCK mat3 vTBN = ${o.associatedVariableName}; #endif ` : a.isConnected && (t += `vec3 tbnNormal = normalize(${n.associatedVariableName}.xyz); `, t += `vec3 tbnTangent = normalize(${a.associatedVariableName}.xyz); `, t += `vec3 tbnBitangent = cross(tbnNormal, tbnTangent) * ${this._tangentCorrectionFactorName}; `, t += `mat3 vTBN = mat3(tbnTangent, tbnBitangent, tbnNormal); `), t += ` #if defined(${a.isConnected ? "TANGENT" : "IGNORE"}) && defined(NORMAL) mat3 TBN = vTBN; #else mat3 TBN = cotangent_frame(${n.associatedVariableName + ".xyz"}, ${"v_" + s.associatedVariableName + ".xyz"}, ${r.isConnected ? r.associatedVariableName : "vec2(0.)"}, vec2(1., 1.)); #endif `, e._emitFunctionFromInclude("bumpFragmentMainFunctions", i, { replaceStrings: [l] }), t; } /** * Gets the main code of the block (fragment side) * @param state current state of the node material building * @param generateTBNSpace if true, the code needed to create the TBN coordinate space is generated * @returns the shader code */ getCode(e, t = !1) { let i = ""; t && (i += this._generateTBNSpace(e)); const r = this.intensity.isConnected ? this.intensity.associatedVariableName : "1.0", s = this.direction.isConnected ? this.direction.associatedVariableName : "vec2(1., 0.)", n = this.roughness.isConnected ? this.roughness.associatedVariableName : "0."; return i += `anisotropicOutParams anisotropicOut; anisotropicBlock( vec3(${s}, ${r}), ${n}, #ifdef ANISOTROPIC_TEXTURE vec3(0.), #endif TBN, normalW, viewDirectionW, anisotropicOut ); `, i; } prepareDefines(e, t, i) { super.prepareDefines(e, t, i), i.setValue("ANISOTROPIC", !0), i.setValue("ANISOTROPIC_TEXTURE", !1, !0), i.setValue("ANISOTROPIC_LEGACY", !this.roughness.isConnected); } bind(e, t, i) { super.bind(e, t, i), i && e.setFloat(this._tangentCorrectionFactorName, i.getWorldMatrix().determinant() < 0 ? -1 : 1); } _buildBlock(e) { return e.target === Le.Fragment && (e.sharedData.blocksWithDefines.push(this), e.sharedData.bindableBlocks.push(this), this._tangentCorrectionFactorName = e._getFreeDefineName("tangentCorrectionFactor"), e._emitUniformFromString(this._tangentCorrectionFactorName, "float")), this; } } Be("BABYLON.AnisotropyBlock", AN); class YI extends fL { _onGenerateOnlyFragmentCodeChanged() { return this.position.isConnected ? (this.generateOnlyFragmentCode = !this.generateOnlyFragmentCode, Ce.Error("The position input must not be connected to be able to switch!"), !1) : (this._setTarget(), !0); } _setTarget() { super._setTarget(), this.getInputByName("position").target = this.generateOnlyFragmentCode ? Le.Fragment : Le.Vertex, this.generateOnlyFragmentCode && (this.forceIrradianceInFragment = !0); } /** * Create a new ReflectionBlock * @param name defines the block name */ constructor(e) { super(e), this.useSphericalHarmonics = !0, this.forceIrradianceInFragment = !1, this._isUnique = !0, this.registerInput("position", ue.AutoDetect, !1, Le.Vertex), this.registerInput("world", ue.Matrix, !1, Le.Vertex), this.registerInput("color", ue.Color3, !0, Le.Fragment), this.registerOutput("reflection", ue.Object, Le.Fragment, new Yo("reflection", this, no.Output, YI, "ReflectionBlock")), this.position.addExcludedConnectionPointFromAllowedTypes(ue.Color3 | ue.Vector3 | ue.Vector4); } /** * Gets the current class name * @returns the class name */ getClassName() { return "ReflectionBlock"; } /** * Gets the position input component */ get position() { return this._inputs[0]; } /** * Gets the world position input component */ get worldPosition() { return this.worldPositionConnectionPoint; } /** * Gets the world normal input component */ get worldNormal() { return this.worldNormalConnectionPoint; } /** * Gets the world input component */ get world() { return this._inputs[1]; } /** * Gets the camera (or eye) position component */ get cameraPosition() { return this.cameraPositionConnectionPoint; } /** * Gets the view input component */ get view() { return this.viewConnectionPoint; } /** * Gets the color input component */ get color() { return this._inputs[2]; } /** * Gets the reflection object output component */ get reflection() { return this._outputs[0]; } /** * Returns true if the block has a texture (either its own texture or the environment texture from the scene, if set) */ get hasTexture() { return !!this._getTexture(); } /** * Gets the reflection color (either the name of the variable if the color input is connected, else a default value) */ get reflectionColor() { return this.color.isConnected ? this.color.associatedVariableName : "vec3(1., 1., 1.)"; } _getTexture() { return this.texture ? this.texture : this._scene.environmentTexture; } prepareDefines(e, t, i) { super.prepareDefines(e, t, i); const r = this._getTexture(), s = r && r.getTextureMatrix; i.setValue("REFLECTION", s, !0), s && (i.setValue(this._defineLODReflectionAlpha, r.lodLevelInAlpha, !0), i.setValue(this._defineLinearSpecularReflection, r.linearSpecularLOD, !0), i.setValue(this._defineOppositeZ, this._scene.useRightHandedSystem ? !r.invertZ : r.invertZ, !0), i.setValue("SPHERICAL_HARMONICS", this.useSphericalHarmonics, !0), i.setValue("GAMMAREFLECTION", r.gammaSpace, !0), i.setValue("RGBDREFLECTION", r.isRGBD, !0), r && r.coordinatesMode !== De.SKYBOX_MODE && r.isCube && (i.setValue("USESPHERICALFROMREFLECTIONMAP", !0), i.setValue("USEIRRADIANCEMAP", !1), this.forceIrradianceInFragment || this._scene.getEngine().getCaps().maxVaryingVectors <= 8 ? i.setValue("USESPHERICALINVERTEX", !1) : i.setValue("USESPHERICALINVERTEX", !0))); } bind(e, t, i, r) { super.bind(e, t, i); const s = this._getTexture(); if (!s || !r) return; s.isCube ? e.setTexture(this._cubeSamplerName, s) : e.setTexture(this._2DSamplerName, s); const n = s.getSize().width; e.setFloat3(this._vReflectionMicrosurfaceInfosName, n, s.lodGenerationScale, s.lodGenerationOffset), e.setFloat2(this._vReflectionFilteringInfoName, n, yt.Log2(n)); const a = r.materialDefines, l = s.sphericalPolynomial; if (a.USESPHERICALFROMREFLECTIONMAP && l) if (a.SPHERICAL_HARMONICS) { const o = l.preScaledHarmonics; e.setVector3("vSphericalL00", o.l00), e.setVector3("vSphericalL1_1", o.l1_1), e.setVector3("vSphericalL10", o.l10), e.setVector3("vSphericalL11", o.l11), e.setVector3("vSphericalL2_2", o.l2_2), e.setVector3("vSphericalL2_1", o.l2_1), e.setVector3("vSphericalL20", o.l20), e.setVector3("vSphericalL21", o.l21), e.setVector3("vSphericalL22", o.l22); } else e.setFloat3("vSphericalX", l.x.x, l.x.y, l.x.z), e.setFloat3("vSphericalY", l.y.x, l.y.y, l.y.z), e.setFloat3("vSphericalZ", l.z.x, l.z.y, l.z.z), e.setFloat3("vSphericalXX_ZZ", l.xx.x - l.zz.x, l.xx.y - l.zz.y, l.xx.z - l.zz.z), e.setFloat3("vSphericalYY_ZZ", l.yy.x - l.zz.x, l.yy.y - l.zz.y, l.yy.z - l.zz.z), e.setFloat3("vSphericalZZ", l.zz.x, l.zz.y, l.zz.z), e.setFloat3("vSphericalXY", l.xy.x, l.xy.y, l.xy.z), e.setFloat3("vSphericalYZ", l.yz.x, l.yz.y, l.yz.z), e.setFloat3("vSphericalZX", l.zx.x, l.zx.y, l.zx.z); } /** * Gets the code to inject in the vertex shader * @param state current state of the node material building * @returns the shader code */ handleVertexSide(e) { let t = super.handleVertexSide(e); e._emitFunctionFromInclude("harmonicsFunctions", `//${this.name}`, { replaceStrings: [ { search: /uniform vec3 vSphericalL00;[\s\S]*?uniform vec3 vSphericalL22;/g, replace: "" }, { search: /uniform vec3 vSphericalX;[\s\S]*?uniform vec3 vSphericalZX;/g, replace: "" } ] }); const i = e._getFreeVariableName("reflectionVector"); return this._vEnvironmentIrradianceName = e._getFreeVariableName("vEnvironmentIrradiance"), e._emitVaryingFromString(this._vEnvironmentIrradianceName, "vec3", "defined(USESPHERICALFROMREFLECTIONMAP) && defined(USESPHERICALINVERTEX)"), e._emitUniformFromString("vSphericalL00", "vec3", "SPHERICAL_HARMONICS"), e._emitUniformFromString("vSphericalL1_1", "vec3", "SPHERICAL_HARMONICS"), e._emitUniformFromString("vSphericalL10", "vec3", "SPHERICAL_HARMONICS"), e._emitUniformFromString("vSphericalL11", "vec3", "SPHERICAL_HARMONICS"), e._emitUniformFromString("vSphericalL2_2", "vec3", "SPHERICAL_HARMONICS"), e._emitUniformFromString("vSphericalL2_1", "vec3", "SPHERICAL_HARMONICS"), e._emitUniformFromString("vSphericalL20", "vec3", "SPHERICAL_HARMONICS"), e._emitUniformFromString("vSphericalL21", "vec3", "SPHERICAL_HARMONICS"), e._emitUniformFromString("vSphericalL22", "vec3", "SPHERICAL_HARMONICS"), e._emitUniformFromString("vSphericalX", "vec3", "SPHERICAL_HARMONICS", !0), e._emitUniformFromString("vSphericalY", "vec3", "SPHERICAL_HARMONICS", !0), e._emitUniformFromString("vSphericalZ", "vec3", "SPHERICAL_HARMONICS", !0), e._emitUniformFromString("vSphericalXX_ZZ", "vec3", "SPHERICAL_HARMONICS", !0), e._emitUniformFromString("vSphericalYY_ZZ", "vec3", "SPHERICAL_HARMONICS", !0), e._emitUniformFromString("vSphericalZZ", "vec3", "SPHERICAL_HARMONICS", !0), e._emitUniformFromString("vSphericalXY", "vec3", "SPHERICAL_HARMONICS", !0), e._emitUniformFromString("vSphericalYZ", "vec3", "SPHERICAL_HARMONICS", !0), e._emitUniformFromString("vSphericalZX", "vec3", "SPHERICAL_HARMONICS", !0), t += `#if defined(USESPHERICALFROMREFLECTIONMAP) && defined(USESPHERICALINVERTEX) vec3 ${i} = vec3(${this._reflectionMatrixName} * vec4(normalize(${this.worldNormal.associatedVariableName}).xyz, 0)).xyz; #ifdef ${this._defineOppositeZ} ${i}.z *= -1.0; #endif ${this._vEnvironmentIrradianceName} = computeEnvironmentIrradiance(${i}); #endif `, t; } /** * Gets the main code of the block (fragment side) * @param state current state of the node material building * @param normalVarName name of the existing variable corresponding to the normal * @returns the shader code */ getCode(e, t) { let i = ""; this.handleFragmentSideInits(e), e._emitFunctionFromInclude("harmonicsFunctions", `//${this.name}`, { replaceStrings: [ { search: /uniform vec3 vSphericalL00;[\s\S]*?uniform vec3 vSphericalL22;/g, replace: "" }, { search: /uniform vec3 vSphericalX;[\s\S]*?uniform vec3 vSphericalZX;/g, replace: "" } ] }), e._emitFunction("sampleReflection", ` #ifdef ${this._define3DName} #define sampleReflection(s, c) textureCube(s, c) #else #define sampleReflection(s, c) texture2D(s, c) #endif `, `//${this.name}`), e._emitFunction("sampleReflectionLod", ` #ifdef ${this._define3DName} #define sampleReflectionLod(s, c, l) textureCubeLodEXT(s, c, l) #else #define sampleReflectionLod(s, c, l) texture2DLodEXT(s, c, l) #endif `, `//${this.name}`); const r = ` vec3 computeReflectionCoordsPBR(vec4 worldPos, vec3 worldNormal) { ${this.handleFragmentSideCodeReflectionCoords("worldNormal", "worldPos", !0, !0)} return ${this._reflectionVectorName}; } `; return e._emitFunction("computeReflectionCoordsPBR", r, `//${this.name}`), this._vReflectionMicrosurfaceInfosName = e._getFreeVariableName("vReflectionMicrosurfaceInfos"), e._emitUniformFromString(this._vReflectionMicrosurfaceInfosName, "vec3"), this._vReflectionInfosName = e._getFreeVariableName("vReflectionInfos"), this._vReflectionFilteringInfoName = e._getFreeVariableName("vReflectionFilteringInfo"), e._emitUniformFromString(this._vReflectionFilteringInfoName, "vec2"), i += `#ifdef REFLECTION vec2 ${this._vReflectionInfosName} = vec2(1., 0.); reflectionOutParams reflectionOut; reflectionBlock( ${this.generateOnlyFragmentCode ? this._worldPositionNameInFragmentOnlyMode : "v_" + this.worldPosition.associatedVariableName}.xyz, ${t}, alphaG, ${this._vReflectionMicrosurfaceInfosName}, ${this._vReflectionInfosName}, ${this.reflectionColor}, #ifdef ANISOTROPIC anisotropicOut, #endif #if defined(${this._defineLODReflectionAlpha}) && !defined(${this._defineSkyboxName}) NdotVUnclamped, #endif #ifdef ${this._defineLinearSpecularReflection} roughness, #endif #ifdef ${this._define3DName} ${this._cubeSamplerName}, #else ${this._2DSamplerName}, #endif #if defined(NORMAL) && defined(USESPHERICALINVERTEX) ${this._vEnvironmentIrradianceName}, #endif #ifdef USESPHERICALFROMREFLECTIONMAP #if !defined(NORMAL) || !defined(USESPHERICALINVERTEX) ${this._reflectionMatrixName}, #endif #endif #ifdef USEIRRADIANCEMAP irradianceSampler, // ** not handled ** #endif #ifndef LODBASEDMICROSFURACE #ifdef ${this._define3DName} ${this._cubeSamplerName}, ${this._cubeSamplerName}, #else ${this._2DSamplerName}, ${this._2DSamplerName}, #endif #endif #ifdef REALTIME_FILTERING ${this._vReflectionFilteringInfoName}, #endif reflectionOut ); #endif `, i; } _buildBlock(e) { return this._scene = e.sharedData.scene, e.target !== Le.Fragment && (this._defineLODReflectionAlpha = e._getFreeDefineName("LODINREFLECTIONALPHA"), this._defineLinearSpecularReflection = e._getFreeDefineName("LINEARSPECULARREFLECTION")), this; } _dumpPropertiesCode() { let e = super._dumpPropertiesCode(); return this.texture && (e += `${this._codeVariableName}.texture.gammaSpace = ${this.texture.gammaSpace}; `), e += `${this._codeVariableName}.useSphericalHarmonics = ${this.useSphericalHarmonics}; `, e += `${this._codeVariableName}.forceIrradianceInFragment = ${this.forceIrradianceInFragment}; `, e; } serialize() { var e, t; const i = super.serialize(); return i.useSphericalHarmonics = this.useSphericalHarmonics, i.forceIrradianceInFragment = this.forceIrradianceInFragment, i.gammaSpace = (t = (e = this.texture) === null || e === void 0 ? void 0 : e.gammaSpace) !== null && t !== void 0 ? t : !0, i; } _deserialize(e, t, i) { super._deserialize(e, t, i), this.useSphericalHarmonics = e.useSphericalHarmonics, this.forceIrradianceInFragment = e.forceIrradianceInFragment, this.texture && (this.texture.gammaSpace = e.gammaSpace); } } F([ ir("Spherical Harmonics", $i.Boolean, "ADVANCED", { notifiers: { update: !0 } }) ], YI.prototype, "useSphericalHarmonics", void 0); F([ ir("Force irradiance in fragment", $i.Boolean, "ADVANCED", { notifiers: { update: !0 } }) ], YI.prototype, "forceIrradianceInFragment", void 0); Be("BABYLON.ReflectionBlock", YI); class xP extends Wi { /** * Create a new ClearCoatBlock * @param name defines the block name */ constructor(e) { super(e, Le.Fragment), this._tangentCorrectionFactorName = "", this.remapF0OnInterfaceChange = !0, this._isUnique = !0, this.registerInput("intensity", ue.Float, !1, Le.Fragment), this.registerInput("roughness", ue.Float, !0, Le.Fragment), this.registerInput("indexOfRefraction", ue.Float, !0, Le.Fragment), this.registerInput("normalMapColor", ue.Color3, !0, Le.Fragment), this.registerInput("uv", ue.Vector2, !0, Le.Fragment), this.registerInput("tintColor", ue.Color3, !0, Le.Fragment), this.registerInput("tintAtDistance", ue.Float, !0, Le.Fragment), this.registerInput("tintThickness", ue.Float, !0, Le.Fragment), this.registerInput("worldTangent", ue.Vector4, !0), this.registerInput("worldNormal", ue.AutoDetect, !0), this.worldNormal.addExcludedConnectionPointFromAllowedTypes(ue.Color4 | ue.Vector4 | ue.Vector3), this.registerInput("TBN", ue.Object, !0, Le.VertexAndFragment, new Yo("TBN", this, no.Input, jI, "TBNBlock")), this.registerOutput("clearcoat", ue.Object, Le.Fragment, new Yo("clearcoat", this, no.Output, xP, "ClearCoatBlock")); } /** * Initialize the block and prepare the context for build * @param state defines the state that will be used for the build */ initialize(e) { e._excludeVariableName("clearcoatOut"), e._excludeVariableName("vClearCoatParams"), e._excludeVariableName("vClearCoatTintParams"), e._excludeVariableName("vClearCoatRefractionParams"), e._excludeVariableName("vClearCoatTangentSpaceParams"), e._excludeVariableName("vGeometricNormaClearCoatW"); } /** * Gets the current class name * @returns the class name */ getClassName() { return "ClearCoatBlock"; } /** * Gets the intensity input component */ get intensity() { return this._inputs[0]; } /** * Gets the roughness input component */ get roughness() { return this._inputs[1]; } /** * Gets the ior input component */ get indexOfRefraction() { return this._inputs[2]; } /** * Gets the bump texture input component */ get normalMapColor() { return this._inputs[3]; } /** * Gets the uv input component */ get uv() { return this._inputs[4]; } /** * Gets the tint color input component */ get tintColor() { return this._inputs[5]; } /** * Gets the tint "at distance" input component */ get tintAtDistance() { return this._inputs[6]; } /** * Gets the tint thickness input component */ get tintThickness() { return this._inputs[7]; } /** * Gets the world tangent input component */ get worldTangent() { return this._inputs[8]; } /** * Gets the world normal input component */ get worldNormal() { return this._inputs[9]; } /** * Gets the TBN input component */ // eslint-disable-next-line @typescript-eslint/naming-convention get TBN() { return this._inputs[10]; } /** * Gets the clear coat object output component */ get clearcoat() { return this._outputs[0]; } autoConfigure() { if (!this.intensity.isConnected) { const e = new vs("ClearCoat intensity", Le.Fragment, ue.Float); e.value = 1, e.output.connectTo(this.intensity); } } prepareDefines(e, t, i) { super.prepareDefines(e, t, i), i.setValue("CLEARCOAT", !0), i.setValue("CLEARCOAT_TEXTURE", !1, !0), i.setValue("CLEARCOAT_USE_ROUGHNESS_FROM_MAINTEXTURE", !0, !0), i.setValue("CLEARCOAT_TINT", this.tintColor.isConnected || this.tintThickness.isConnected || this.tintAtDistance.isConnected, !0), i.setValue("CLEARCOAT_BUMP", this.normalMapColor.isConnected, !0), i.setValue("CLEARCOAT_DEFAULTIOR", this.indexOfRefraction.isConnected ? this.indexOfRefraction.connectInputBlock.value === _u._DefaultIndexOfRefraction : !0, !0), i.setValue("CLEARCOAT_REMAP_F0", this.remapF0OnInterfaceChange, !0); } bind(e, t, i) { var r, s; super.bind(e, t, i); const n = (s = (r = this.indexOfRefraction.connectInputBlock) === null || r === void 0 ? void 0 : r.value) !== null && s !== void 0 ? s : _u._DefaultIndexOfRefraction, a = 1 - n, l = 1 + n, o = Math.pow(-a / l, 2), u = 1 / n; e.setFloat4("vClearCoatRefractionParams", o, u, a, l); const h = this.clearcoat.hasEndpoints ? this.clearcoat.endpoints[0].ownerBlock : null, d = h != null && h.perturbedNormal.isConnected ? h.perturbedNormal.connectedPoint.ownerBlock : null; this._scene._mirroredCameraPosition ? e.setFloat2("vClearCoatTangentSpaceParams", d != null && d.invertX ? 1 : -1, d != null && d.invertY ? 1 : -1) : e.setFloat2("vClearCoatTangentSpaceParams", d != null && d.invertX ? -1 : 1, d != null && d.invertY ? -1 : 1), i && e.setFloat(this._tangentCorrectionFactorName, i.getWorldMatrix().determinant() < 0 ? -1 : 1); } _generateTBNSpace(e, t, i) { let r = ""; const s = `//${this.name}`, n = this.worldTangent; e._emitExtension("derivatives", "#extension GL_OES_standard_derivatives : enable"); const a = { search: /defined\(TANGENT\)/g, replace: n.isConnected ? "defined(TANGENT)" : "defined(IGNORE)" }, l = this.TBN; return l.isConnected ? e.compilationString += ` #ifdef TBNBLOCK mat3 vTBN = ${l.associatedVariableName}; #endif ` : n.isConnected && (r += `vec3 tbnNormal = normalize(${i}.xyz); `, r += `vec3 tbnTangent = normalize(${n.associatedVariableName}.xyz); `, r += `vec3 tbnBitangent = cross(tbnNormal, tbnTangent) * ${this._tangentCorrectionFactorName}; `, r += `mat3 vTBN = mat3(tbnTangent, tbnBitangent, tbnNormal); `), e._emitFunctionFromInclude("bumpFragmentMainFunctions", s, { replaceStrings: [a] }), r; } /** * Gets the main code of the block (fragment side) * @param state current state of the node material building * @param ccBlock instance of a ClearCoatBlock or null if the code must be generated without an active clear coat module * @param reflectionBlock instance of a ReflectionBlock null if the code must be generated without an active reflection module * @param worldPosVarName name of the variable holding the world position * @param generateTBNSpace if true, the code needed to create the TBN coordinate space is generated * @param vTBNAvailable indicate that the vTBN variable is already existing because it has already been generated by another block (PerturbNormal or Anisotropy) * @param worldNormalVarName name of the variable holding the world normal * @returns the shader code */ static GetCode(e, t, i, r, s, n, a) { let l = ""; const o = t != null && t.intensity.isConnected ? t.intensity.associatedVariableName : "1.", u = t != null && t.roughness.isConnected ? t.roughness.associatedVariableName : "0.", h = t != null && t.normalMapColor.isConnected ? t.normalMapColor.associatedVariableName : "vec3(0.)", d = t != null && t.uv.isConnected ? t.uv.associatedVariableName : "vec2(0.)", f = t != null && t.tintColor.isConnected ? t.tintColor.associatedVariableName : "vec3(1.)", p = t != null && t.tintThickness.isConnected ? t.tintThickness.associatedVariableName : "1.", m = t != null && t.tintAtDistance.isConnected ? t.tintAtDistance.associatedVariableName : "1.", _ = "vec4(0.)"; if (t) { e._emitUniformFromString("vClearCoatRefractionParams", "vec4"), e._emitUniformFromString("vClearCoatTangentSpaceParams", "vec2"); const v = t.worldNormal; l += `vec3 vGeometricNormaClearCoatW = ${v.isConnected ? "normalize(" + v.associatedVariableName + ".xyz)" : "geometricNormalW"}; `; } else l += `vec3 vGeometricNormaClearCoatW = geometricNormalW; `; return s && t && (l += t._generateTBNSpace(e, r, a), n = t.worldTangent.isConnected), l += `clearcoatOutParams clearcoatOut; #ifdef CLEARCOAT vec2 vClearCoatParams = vec2(${o}, ${u}); vec4 vClearCoatTintParams = vec4(${f}, ${p}); clearcoatBlock( ${r}.xyz, vGeometricNormaClearCoatW, viewDirectionW, vClearCoatParams, specularEnvironmentR0, #ifdef CLEARCOAT_TEXTURE vec2(0.), #endif #ifdef CLEARCOAT_TINT vClearCoatTintParams, ${m}, vClearCoatRefractionParams, #ifdef CLEARCOAT_TINT_TEXTURE ${_}, #endif #endif #ifdef CLEARCOAT_BUMP vec2(0., 1.), vec4(${h}, 0.), ${d}, #if defined(${n ? "TANGENT" : "IGNORE"}) && defined(NORMAL) vTBN, #else vClearCoatTangentSpaceParams, #endif #ifdef OBJECTSPACE_NORMALMAP normalMatrix, #endif #endif #if defined(FORCENORMALFORWARD) && defined(NORMAL) faceNormal, #endif #ifdef REFLECTION ${i == null ? void 0 : i._vReflectionMicrosurfaceInfosName}, ${i == null ? void 0 : i._vReflectionInfosName}, ${i == null ? void 0 : i.reflectionColor}, vLightingIntensity, #ifdef ${i == null ? void 0 : i._define3DName} ${i == null ? void 0 : i._cubeSamplerName}, #else ${i == null ? void 0 : i._2DSamplerName}, #endif #ifndef LODBASEDMICROSFURACE #ifdef ${i == null ? void 0 : i._define3DName} ${i == null ? void 0 : i._cubeSamplerName}, ${i == null ? void 0 : i._cubeSamplerName}, #else ${i == null ? void 0 : i._2DSamplerName}, ${i == null ? void 0 : i._2DSamplerName}, #endif #endif #endif #if defined(ENVIRONMENTBRDF) && !defined(${i == null ? void 0 : i._defineSkyboxName}) #ifdef RADIANCEOCCLUSION ambientMonochrome, #endif #endif #if defined(CLEARCOAT_BUMP) || defined(TWOSIDEDLIGHTING) (gl_FrontFacing ? 1. : -1.), #endif clearcoatOut ); #else clearcoatOut.specularEnvironmentR0 = specularEnvironmentR0; #endif `, l; } _buildBlock(e) { return this._scene = e.sharedData.scene, e.target === Le.Fragment && (e.sharedData.bindableBlocks.push(this), e.sharedData.blocksWithDefines.push(this), this._tangentCorrectionFactorName = e._getFreeDefineName("tangentCorrectionFactor"), e._emitUniformFromString(this._tangentCorrectionFactorName, "float")), this; } _dumpPropertiesCode() { let e = super._dumpPropertiesCode(); return e += `${this._codeVariableName}.remapF0OnInterfaceChange = ${this.remapF0OnInterfaceChange}; `, e; } serialize() { const e = super.serialize(); return e.remapF0OnInterfaceChange = this.remapF0OnInterfaceChange, e; } _deserialize(e, t, i) { var r; super._deserialize(e, t, i), this.remapF0OnInterfaceChange = (r = e.remapF0OnInterfaceChange) !== null && r !== void 0 ? r : !0; } } F([ ir("Remap F0 on interface change", $i.Boolean, "ADVANCED") ], xP.prototype, "remapF0OnInterfaceChange", void 0); Be("BABYLON.ClearCoatBlock", xP); class pL extends Wi { /** * Create a new IridescenceBlock * @param name defines the block name */ constructor(e) { super(e, Le.Fragment), this._isUnique = !0, this.registerInput("intensity", ue.Float, !0, Le.Fragment), this.registerInput("indexOfRefraction", ue.Float, !0, Le.Fragment), this.registerInput("thickness", ue.Float, !0, Le.Fragment), this.registerOutput("iridescence", ue.Object, Le.Fragment, new Yo("iridescence", this, no.Output, pL, "IridescenceBlock")); } /** * Initialize the block and prepare the context for build * @param state defines the state that will be used for the build */ initialize(e) { e._excludeVariableName("iridescenceOut"), e._excludeVariableName("vIridescenceParams"); } /** * Gets the current class name * @returns the class name */ getClassName() { return "IridescenceBlock"; } /** * Gets the intensity input component */ get intensity() { return this._inputs[0]; } /** * Gets the indexOfRefraction input component */ get indexOfRefraction() { return this._inputs[1]; } /** * Gets the thickness input component */ get thickness() { return this._inputs[2]; } /** * Gets the iridescence object output component */ get iridescence() { return this._outputs[0]; } autoConfigure() { if (!this.intensity.isConnected) { const e = new vs("Iridescence intensity", Le.Fragment, ue.Float); e.value = 1, e.output.connectTo(this.intensity); const t = new vs("Iridescence ior", Le.Fragment, ue.Float); t.value = 1.3, t.output.connectTo(this.indexOfRefraction); const i = new vs("Iridescence thickness", Le.Fragment, ue.Float); i.value = 400, i.output.connectTo(this.thickness); } } prepareDefines(e, t, i) { super.prepareDefines(e, t, i), i.setValue("IRIDESCENCE", !0, !0), i.setValue("IRIDESCENCE_TEXTURE", !1, !0), i.setValue("IRIDESCENCE_THICKNESS_TEXTURE", !1, !0); } /** * Gets the main code of the block (fragment side) * @param iridescenceBlock instance of a IridescenceBlock or null if the code must be generated without an active iridescence module * @returns the shader code */ static GetCode(e) { let t = ""; const i = e != null && e.intensity.isConnected ? e.intensity.associatedVariableName : "1.", r = e != null && e.indexOfRefraction.isConnected ? e.indexOfRefraction.associatedVariableName : lf._DefaultIndexOfRefraction, s = e != null && e.thickness.isConnected ? e.thickness.associatedVariableName : lf._DefaultMaximumThickness; return t += `iridescenceOutParams iridescenceOut; #ifdef IRIDESCENCE iridescenceBlock( vec4(${i}, ${r}, 1., ${s}), NdotV, specularEnvironmentR0, #ifdef CLEARCOAT NdotVUnclamped, #endif iridescenceOut ); float iridescenceIntensity = iridescenceOut.iridescenceIntensity; specularEnvironmentR0 = iridescenceOut.specularEnvironmentR0; #endif `, t; } _buildBlock(e) { return e.target === Le.Fragment && (e.sharedData.bindableBlocks.push(this), e.sharedData.blocksWithDefines.push(this)), this; } serialize() { return super.serialize(); } _deserialize(e, t, i) { super._deserialize(e, t, i); } } Be("BABYLON.IridescenceBlock", pL); class S5 extends Wi { /** * Create a new RefractionBlock * @param name defines the block name */ constructor(e) { super(e, Le.Fragment), this.linkRefractionWithTransparency = !1, this.invertRefractionY = !1, this.useThicknessAsDepth = !1, this._isUnique = !0, this.registerInput("intensity", ue.Float, !1, Le.Fragment), this.registerInput("tintAtDistance", ue.Float, !0, Le.Fragment), this.registerInput("volumeIndexOfRefraction", ue.Float, !0, Le.Fragment), this.registerOutput("refraction", ue.Object, Le.Fragment, new Yo("refraction", this, no.Output, S5, "RefractionBlock")); } /** * Initialize the block and prepare the context for build * @param state defines the state that will be used for the build */ initialize(e) { e._excludeVariableName("vRefractionPosition"), e._excludeVariableName("vRefractionSize"); } /** * Gets the current class name * @returns the class name */ getClassName() { return "RefractionBlock"; } /** * Gets the intensity input component */ get intensity() { return this._inputs[0]; } /** * Gets the tint at distance input component */ get tintAtDistance() { return this._inputs[1]; } /** * Gets the volume index of refraction input component */ get volumeIndexOfRefraction() { return this._inputs[2]; } /** * Gets the view input component */ get view() { return this.viewConnectionPoint; } /** * Gets the refraction object output component */ get refraction() { return this._outputs[0]; } /** * Returns true if the block has a texture */ get hasTexture() { return !!this._getTexture(); } _getTexture() { return this.texture ? this.texture : this._scene.environmentTexture; } autoConfigure(e, t = () => !0) { if (!this.intensity.isConnected) { const i = new vs("Refraction intensity", Le.Fragment, ue.Float); i.value = 1, i.output.connectTo(this.intensity); } if (this.view && !this.view.isConnected) { let i = e.getInputBlockByPredicate((r) => r.systemValue === Ms.View && t(r)); i || (i = new vs("view"), i.setAsSystemValue(Ms.View)), i.output.connectTo(this.view); } } prepareDefines(e, t, i) { super.prepareDefines(e, t, i); const r = this._getTexture(), s = r && r.getTextureMatrix; i.setValue("SS_REFRACTION", s, !0), s && (i.setValue(this._define3DName, r.isCube, !0), i.setValue(this._defineLODRefractionAlpha, r.lodLevelInAlpha, !0), i.setValue(this._defineLinearSpecularRefraction, r.linearSpecularLOD, !0), i.setValue(this._defineOppositeZ, this._scene.useRightHandedSystem && r.isCube ? !r.invertZ : r.invertZ, !0), i.setValue("SS_LINKREFRACTIONTOTRANSPARENCY", this.linkRefractionWithTransparency, !0), i.setValue("SS_GAMMAREFRACTION", r.gammaSpace, !0), i.setValue("SS_RGBDREFRACTION", r.isRGBD, !0), i.setValue("SS_USE_LOCAL_REFRACTIONMAP_CUBIC", !!r.boundingBoxSize, !0), i.setValue("SS_USE_THICKNESS_AS_DEPTH", this.useThicknessAsDepth, !0)); } isReady() { const e = this._getTexture(); return !(e && !e.isReadyOrNotBlocking()); } bind(e, t, i) { var r, s, n, a; super.bind(e, t, i); const l = this._getTexture(); if (!l) return; l.isCube ? e.setTexture(this._cubeSamplerName, l) : e.setTexture(this._2DSamplerName, l), e.setMatrix(this._refractionMatrixName, l.getRefractionTextureMatrix()); let o = 1; l.isCube || l.depth && (o = l.depth); const u = (a = (s = (r = this.volumeIndexOfRefraction.connectInputBlock) === null || r === void 0 ? void 0 : r.value) !== null && s !== void 0 ? s : (n = this.indexOfRefractionConnectionPoint.connectInputBlock) === null || n === void 0 ? void 0 : n.value) !== null && a !== void 0 ? a : 1.5; e.setFloat4(this._vRefractionInfosName, l.level, 1 / u, o, this.invertRefractionY ? -1 : 1), e.setFloat4(this._vRefractionMicrosurfaceInfosName, l.getSize().width, l.lodGenerationScale, l.lodGenerationOffset, 1 / u); const h = l.getSize().width; if (e.setFloat2(this._vRefractionFilteringInfoName, h, yt.Log2(h)), l.boundingBoxSize) { const d = l; e.setVector3("vRefractionPosition", d.boundingBoxPosition), e.setVector3("vRefractionSize", d.boundingBoxSize); } } /** * Gets the main code of the block (fragment side) * @param state current state of the node material building * @returns the shader code */ getCode(e) { const t = ""; return e.sharedData.blockingBlocks.push(this), e.sharedData.textureBlocks.push(this), this._cubeSamplerName = e._getFreeVariableName(this.name + "CubeSampler"), e.samplers.push(this._cubeSamplerName), this._2DSamplerName = e._getFreeVariableName(this.name + "2DSampler"), e.samplers.push(this._2DSamplerName), this._define3DName = e._getFreeDefineName("SS_REFRACTIONMAP_3D"), e._samplerDeclaration += `#ifdef ${this._define3DName} `, e._samplerDeclaration += `uniform samplerCube ${this._cubeSamplerName}; `, e._samplerDeclaration += `#else `, e._samplerDeclaration += `uniform sampler2D ${this._2DSamplerName}; `, e._samplerDeclaration += `#endif `, e.sharedData.blocksWithDefines.push(this), e.sharedData.bindableBlocks.push(this), this._defineLODRefractionAlpha = e._getFreeDefineName("SS_LODINREFRACTIONALPHA"), this._defineLinearSpecularRefraction = e._getFreeDefineName("SS_LINEARSPECULARREFRACTION"), this._defineOppositeZ = e._getFreeDefineName("SS_REFRACTIONMAP_OPPOSITEZ"), this._refractionMatrixName = e._getFreeVariableName("refractionMatrix"), e._emitUniformFromString(this._refractionMatrixName, "mat4"), e._emitFunction("sampleRefraction", ` #ifdef ${this._define3DName} #define sampleRefraction(s, c) textureCube(s, c) #else #define sampleRefraction(s, c) texture2D(s, c) #endif `, `//${this.name}`), e._emitFunction("sampleRefractionLod", ` #ifdef ${this._define3DName} #define sampleRefractionLod(s, c, l) textureCubeLodEXT(s, c, l) #else #define sampleRefractionLod(s, c, l) texture2DLodEXT(s, c, l) #endif `, `//${this.name}`), this._vRefractionMicrosurfaceInfosName = e._getFreeVariableName("vRefractionMicrosurfaceInfos"), e._emitUniformFromString(this._vRefractionMicrosurfaceInfosName, "vec4"), this._vRefractionInfosName = e._getFreeVariableName("vRefractionInfos"), e._emitUniformFromString(this._vRefractionInfosName, "vec4"), this._vRefractionFilteringInfoName = e._getFreeVariableName("vRefractionFilteringInfo"), e._emitUniformFromString(this._vRefractionFilteringInfoName, "vec2"), e._emitUniformFromString("vRefractionPosition", "vec3"), e._emitUniformFromString("vRefractionSize", "vec3"), t; } _buildBlock(e) { return this._scene = e.sharedData.scene, this; } _dumpPropertiesCode() { let e = super._dumpPropertiesCode(); return this.texture && (this.texture.isCube ? e = `${this._codeVariableName}.texture = new BABYLON.CubeTexture("${this.texture.name}"); ` : e = `${this._codeVariableName}.texture = new BABYLON.Texture("${this.texture.name}"); `, e += `${this._codeVariableName}.texture.coordinatesMode = ${this.texture.coordinatesMode}; `), e += `${this._codeVariableName}.linkRefractionWithTransparency = ${this.linkRefractionWithTransparency}; `, e += `${this._codeVariableName}.invertRefractionY = ${this.invertRefractionY}; `, e += `${this._codeVariableName}.useThicknessAsDepth = ${this.useThicknessAsDepth}; `, e; } serialize() { const e = super.serialize(); return this.texture && !this.texture.isRenderTarget && (e.texture = this.texture.serialize()), e.linkRefractionWithTransparency = this.linkRefractionWithTransparency, e.invertRefractionY = this.invertRefractionY, e.useThicknessAsDepth = this.useThicknessAsDepth, e; } _deserialize(e, t, i) { super._deserialize(e, t, i), e.texture && (i = e.texture.url.indexOf("data:") === 0 ? "" : i, e.texture.isCube ? this.texture = ul.Parse(e.texture, t, i) : this.texture = De.Parse(e.texture, t, i)), this.linkRefractionWithTransparency = e.linkRefractionWithTransparency, this.invertRefractionY = e.invertRefractionY, this.useThicknessAsDepth = !!e.useThicknessAsDepth; } } F([ ir("Link refraction to transparency", $i.Boolean, "ADVANCED", { notifiers: { update: !0 } }) ], S5.prototype, "linkRefractionWithTransparency", void 0); F([ ir("Invert refraction Y", $i.Boolean, "ADVANCED", { notifiers: { update: !0 } }) ], S5.prototype, "invertRefractionY", void 0); F([ ir("Use thickness as depth", $i.Boolean, "ADVANCED", { notifiers: { update: !0 } }) ], S5.prototype, "useThicknessAsDepth", void 0); Be("BABYLON.RefractionBlock", S5); class jO extends Wi { /** * Create a new SubSurfaceBlock * @param name defines the block name */ constructor(e) { super(e, Le.Fragment), this._isUnique = !0, this.registerInput("thickness", ue.Float, !1, Le.Fragment), this.registerInput("tintColor", ue.Color3, !0, Le.Fragment), this.registerInput("translucencyIntensity", ue.Float, !0, Le.Fragment), this.registerInput("translucencyDiffusionDist", ue.Color3, !0, Le.Fragment), this.registerInput("refraction", ue.Object, !0, Le.Fragment, new Yo("refraction", this, no.Input, S5, "RefractionBlock")), this.registerInput("dispersion", ue.Float, !0, Le.Fragment), this.registerOutput("subsurface", ue.Object, Le.Fragment, new Yo("subsurface", this, no.Output, jO, "SubSurfaceBlock")); } /** * Initialize the block and prepare the context for build * @param state defines the state that will be used for the build */ initialize(e) { e._excludeVariableName("subSurfaceOut"), e._excludeVariableName("vThicknessParam"), e._excludeVariableName("vTintColor"), e._excludeVariableName("vSubSurfaceIntensity"), e._excludeVariableName("dispersion"); } /** * Gets the current class name * @returns the class name */ getClassName() { return "SubSurfaceBlock"; } /** * Gets the thickness component */ get thickness() { return this._inputs[0]; } /** * Gets the tint color input component */ get tintColor() { return this._inputs[1]; } /** * Gets the translucency intensity input component */ get translucencyIntensity() { return this._inputs[2]; } /** * Gets the translucency diffusion distance input component */ get translucencyDiffusionDist() { return this._inputs[3]; } /** * Gets the refraction object parameters */ get refraction() { return this._inputs[4]; } /** * Gets the dispersion input component */ get dispersion() { return this._inputs[5]; } /** * Gets the sub surface object output component */ get subsurface() { return this._outputs[0]; } autoConfigure() { if (!this.thickness.isConnected) { const e = new vs("SubSurface thickness", Le.Fragment, ue.Float); e.value = 0, e.output.connectTo(this.thickness); } } prepareDefines(e, t, i) { super.prepareDefines(e, t, i); const r = this.translucencyDiffusionDist.isConnected || this.translucencyIntensity.isConnected; i.setValue("SUBSURFACE", r || this.refraction.isConnected, !0), i.setValue("SS_TRANSLUCENCY", r, !0), i.setValue("SS_THICKNESSANDMASK_TEXTURE", !1, !0), i.setValue("SS_REFRACTIONINTENSITY_TEXTURE", !1, !0), i.setValue("SS_TRANSLUCENCYINTENSITY_TEXTURE", !1, !0), i.setValue("SS_MASK_FROM_THICKNESS_TEXTURE", !1, !0), i.setValue("SS_USE_GLTF_TEXTURES", !1, !0), i.setValue("SS_DISPERSION", this.dispersion.isConnected, !0); } /** * Gets the main code of the block (fragment side) * @param state current state of the node material building * @param ssBlock instance of a SubSurfaceBlock or null if the code must be generated without an active sub surface module * @param reflectionBlock instance of a ReflectionBlock null if the code must be generated without an active reflection module * @param worldPosVarName name of the variable holding the world position * @returns the shader code */ static GetCode(e, t, i, r) { var s, n, a, l, o, u, h, d, f, p, m, _, v, C, x, b; let S = ""; const M = t != null && t.thickness.isConnected ? t.thickness.associatedVariableName : "0.", R = t != null && t.tintColor.isConnected ? t.tintColor.associatedVariableName : "vec3(1.)", w = t != null && t.translucencyIntensity.isConnected ? t == null ? void 0 : t.translucencyIntensity.associatedVariableName : "1.", V = t != null && t.translucencyDiffusionDist.isConnected ? t == null ? void 0 : t.translucencyDiffusionDist.associatedVariableName : "vec3(1.)", k = t != null && t.refraction.isConnected ? (s = t == null ? void 0 : t.refraction.connectedPoint) === null || s === void 0 ? void 0 : s.ownerBlock : null, L = k != null && k.tintAtDistance.isConnected ? k.tintAtDistance.associatedVariableName : "1.", B = k != null && k.intensity.isConnected ? k.intensity.associatedVariableName : "1.", U = k != null && k.view.isConnected ? k.view.associatedVariableName : "", K = t != null && t.dispersion.isConnected ? t == null ? void 0 : t.dispersion.associatedVariableName : "0.0"; return S += (n = k == null ? void 0 : k.getCode(e)) !== null && n !== void 0 ? n : "", S += `subSurfaceOutParams subSurfaceOut; #ifdef SUBSURFACE vec2 vThicknessParam = vec2(0., ${M}); vec4 vTintColor = vec4(${R}, ${L}); vec3 vSubSurfaceIntensity = vec3(${B}, ${w}, 0.); float dispersion = ${K}; subSurfaceBlock( vSubSurfaceIntensity, vThicknessParam, vTintColor, normalW, specularEnvironmentReflectance, #ifdef SS_THICKNESSANDMASK_TEXTURE vec4(0.), #endif #ifdef REFLECTION #ifdef SS_TRANSLUCENCY ${i == null ? void 0 : i._reflectionMatrixName}, #ifdef USESPHERICALFROMREFLECTIONMAP #if !defined(NORMAL) || !defined(USESPHERICALINVERTEX) reflectionOut.irradianceVector, #endif #if defined(REALTIME_FILTERING) ${i == null ? void 0 : i._cubeSamplerName}, ${i == null ? void 0 : i._vReflectionFilteringInfoName}, #endif #endif #ifdef USEIRRADIANCEMAP irradianceSampler, #endif #endif #endif #if defined(SS_REFRACTION) || defined(SS_TRANSLUCENCY) surfaceAlbedo, #endif #ifdef SS_REFRACTION ${r}.xyz, viewDirectionW, ${U}, ${(a = k == null ? void 0 : k._vRefractionInfosName) !== null && a !== void 0 ? a : ""}, ${(l = k == null ? void 0 : k._refractionMatrixName) !== null && l !== void 0 ? l : ""}, ${(o = k == null ? void 0 : k._vRefractionMicrosurfaceInfosName) !== null && o !== void 0 ? o : ""}, vLightingIntensity, #ifdef SS_LINKREFRACTIONTOTRANSPARENCY alpha, #endif #ifdef ${(u = k == null ? void 0 : k._defineLODRefractionAlpha) !== null && u !== void 0 ? u : "IGNORE"} NdotVUnclamped, #endif #ifdef ${(h = k == null ? void 0 : k._defineLinearSpecularRefraction) !== null && h !== void 0 ? h : "IGNORE"} roughness, #endif alphaG, #ifdef ${(d = k == null ? void 0 : k._define3DName) !== null && d !== void 0 ? d : "IGNORE"} ${(f = k == null ? void 0 : k._cubeSamplerName) !== null && f !== void 0 ? f : ""}, #else ${(p = k == null ? void 0 : k._2DSamplerName) !== null && p !== void 0 ? p : ""}, #endif #ifndef LODBASEDMICROSFURACE #ifdef ${(m = k == null ? void 0 : k._define3DName) !== null && m !== void 0 ? m : "IGNORE"} ${(_ = k == null ? void 0 : k._cubeSamplerName) !== null && _ !== void 0 ? _ : ""}, ${(v = k == null ? void 0 : k._cubeSamplerName) !== null && v !== void 0 ? v : ""}, #else ${(C = k == null ? void 0 : k._2DSamplerName) !== null && C !== void 0 ? C : ""}, ${(x = k == null ? void 0 : k._2DSamplerName) !== null && x !== void 0 ? x : ""}, #endif #endif #ifdef ANISOTROPIC anisotropicOut, #endif #ifdef REALTIME_FILTERING ${(b = k == null ? void 0 : k._vRefractionFilteringInfoName) !== null && b !== void 0 ? b : ""}, #endif #ifdef SS_USE_LOCAL_REFRACTIONMAP_CUBIC vRefractionPosition, vRefractionSize, #endif #ifdef SS_DISPERSION dispersion, #endif #endif #ifdef SS_TRANSLUCENCY ${V}, #endif subSurfaceOut ); #ifdef SS_REFRACTION surfaceAlbedo = subSurfaceOut.surfaceAlbedo; #ifdef SS_LINKREFRACTIONTOTRANSPARENCY alpha = subSurfaceOut.alpha; #endif #endif #else subSurfaceOut.specularEnvironmentReflectance = specularEnvironmentReflectance; #endif `, S; } _buildBlock(e) { return e.target === Le.Fragment && e.sharedData.blocksWithDefines.push(this), this; } } Be("BABYLON.SubSurfaceBlock", jO); const t0e = { ambientClr: ["finalAmbient", ""], diffuseDir: ["finalDiffuse", ""], specularDir: ["finalSpecularScaled", "!defined(UNLIT) && defined(SPECULARTERM)"], clearcoatDir: ["finalClearCoatScaled", "!defined(UNLIT) && defined(CLEARCOAT)"], sheenDir: ["finalSheenScaled", "!defined(UNLIT) && defined(SHEEN)"], diffuseInd: ["finalIrradiance", "!defined(UNLIT) && defined(REFLECTION)"], specularInd: ["finalRadianceScaled", "!defined(UNLIT) && defined(REFLECTION)"], clearcoatInd: ["clearcoatOut.finalClearCoatRadianceScaled", "!defined(UNLIT) && defined(REFLECTION) && defined(CLEARCOAT)"], sheenInd: ["sheenOut.finalSheenRadianceScaled", "!defined(UNLIT) && defined(REFLECTION) && defined(SHEEN) && defined(ENVIRONMENTBRDF)"], refraction: ["subSurfaceOut.finalRefraction", "!defined(UNLIT) && defined(SS_REFRACTION)"], lighting: ["finalColor.rgb", ""], shadow: ["aggShadow", ""], alpha: ["alpha", ""] }; class gu extends Wi { static _OnGenerateOnlyFragmentCodeChanged(e, t) { const i = e; return i.worldPosition.isConnected ? (i.generateOnlyFragmentCode = !i.generateOnlyFragmentCode, Ce.Error("The worldPosition input must not be connected to be able to switch!"), !1) : (i._setTarget(), !0); } _setTarget() { this._setInitialTarget(this.generateOnlyFragmentCode ? Le.Fragment : Le.VertexAndFragment), this.getInputByName("worldPosition").target = this.generateOnlyFragmentCode ? Le.Fragment : Le.Vertex; } /** * Create a new ReflectionBlock * @param name defines the block name */ constructor(e) { super(e, Le.VertexAndFragment), this._environmentBRDFTexture = null, this._metallicReflectanceColor = ze.White(), this._metallicF0Factor = 1, this.directIntensity = 1, this.environmentIntensity = 1, this.specularIntensity = 1, this.lightFalloff = 0, this.useAlphaTest = !1, this.alphaTestCutoff = 0.5, this.useAlphaBlending = !1, this.useRadianceOverAlpha = !0, this.useSpecularOverAlpha = !0, this.enableSpecularAntiAliasing = !1, this.realTimeFiltering = !1, this.realTimeFilteringQuality = 8, this.useEnergyConservation = !0, this.useRadianceOcclusion = !0, this.useHorizonOcclusion = !0, this.unlit = !1, this.forceNormalForward = !1, this.generateOnlyFragmentCode = !1, this.debugMode = 0, this.debugLimit = 0, this.debugFactor = 1, this._isUnique = !0, this.registerInput("worldPosition", ue.Vector4, !1, Le.Vertex), this.registerInput("worldNormal", ue.Vector4, !1, Le.Fragment), this.registerInput("view", ue.Matrix, !1), this.registerInput("cameraPosition", ue.Vector3, !1, Le.Fragment), this.registerInput("perturbedNormal", ue.Vector4, !0, Le.Fragment), this.registerInput("baseColor", ue.Color3, !0, Le.Fragment), this.registerInput("metallic", ue.Float, !1, Le.Fragment), this.registerInput("roughness", ue.Float, !1, Le.Fragment), this.registerInput("ambientOcc", ue.Float, !0, Le.Fragment), this.registerInput("opacity", ue.Float, !0, Le.Fragment), this.registerInput("indexOfRefraction", ue.Float, !0, Le.Fragment), this.registerInput("ambientColor", ue.Color3, !0, Le.Fragment), this.registerInput("reflection", ue.Object, !0, Le.Fragment, new Yo("reflection", this, no.Input, YI, "ReflectionBlock")), this.registerInput("clearcoat", ue.Object, !0, Le.Fragment, new Yo("clearcoat", this, no.Input, xP, "ClearCoatBlock")), this.registerInput("sheen", ue.Object, !0, Le.Fragment, new Yo("sheen", this, no.Input, XI, "SheenBlock")), this.registerInput("subsurface", ue.Object, !0, Le.Fragment, new Yo("subsurface", this, no.Input, jO, "SubSurfaceBlock")), this.registerInput("anisotropy", ue.Object, !0, Le.Fragment, new Yo("anisotropy", this, no.Input, AN, "AnisotropyBlock")), this.registerInput("iridescence", ue.Object, !0, Le.Fragment, new Yo("iridescence", this, no.Input, pL, "IridescenceBlock")), this.registerOutput("ambientClr", ue.Color3, Le.Fragment), this.registerOutput("diffuseDir", ue.Color3, Le.Fragment), this.registerOutput("specularDir", ue.Color3, Le.Fragment), this.registerOutput("clearcoatDir", ue.Color3, Le.Fragment), this.registerOutput("sheenDir", ue.Color3, Le.Fragment), this.registerOutput("diffuseInd", ue.Color3, Le.Fragment), this.registerOutput("specularInd", ue.Color3, Le.Fragment), this.registerOutput("clearcoatInd", ue.Color3, Le.Fragment), this.registerOutput("sheenInd", ue.Color3, Le.Fragment), this.registerOutput("refraction", ue.Color3, Le.Fragment), this.registerOutput("lighting", ue.Color3, Le.Fragment), this.registerOutput("shadow", ue.Float, Le.Fragment), this.registerOutput("alpha", ue.Float, Le.Fragment); } /** * Initialize the block and prepare the context for build * @param state defines the state that will be used for the build */ initialize(e) { e._excludeVariableName("vLightingIntensity"), e._excludeVariableName("geometricNormalW"), e._excludeVariableName("normalW"), e._excludeVariableName("faceNormal"), e._excludeVariableName("albedoOpacityOut"), e._excludeVariableName("surfaceAlbedo"), e._excludeVariableName("alpha"), e._excludeVariableName("aoOut"), e._excludeVariableName("baseColor"), e._excludeVariableName("reflectivityOut"), e._excludeVariableName("microSurface"), e._excludeVariableName("roughness"), e._excludeVariableName("NdotVUnclamped"), e._excludeVariableName("NdotV"), e._excludeVariableName("alphaG"), e._excludeVariableName("AARoughnessFactors"), e._excludeVariableName("environmentBrdf"), e._excludeVariableName("ambientMonochrome"), e._excludeVariableName("seo"), e._excludeVariableName("eho"), e._excludeVariableName("environmentRadiance"), e._excludeVariableName("irradianceVector"), e._excludeVariableName("environmentIrradiance"), e._excludeVariableName("diffuseBase"), e._excludeVariableName("specularBase"), e._excludeVariableName("preInfo"), e._excludeVariableName("info"), e._excludeVariableName("shadow"), e._excludeVariableName("finalDiffuse"), e._excludeVariableName("finalAmbient"), e._excludeVariableName("ambientOcclusionForDirectDiffuse"), e._excludeVariableName("finalColor"), e._excludeVariableName("vClipSpacePosition"), e._excludeVariableName("vDebugMode"); } /** * Gets the current class name * @returns the class name */ getClassName() { return "PBRMetallicRoughnessBlock"; } /** * Gets the world position input component */ get worldPosition() { return this._inputs[0]; } /** * Gets the world normal input component */ get worldNormal() { return this._inputs[1]; } /** * Gets the view matrix parameter */ get view() { return this._inputs[2]; } /** * Gets the camera position input component */ get cameraPosition() { return this._inputs[3]; } /** * Gets the perturbed normal input component */ get perturbedNormal() { return this._inputs[4]; } /** * Gets the base color input component */ get baseColor() { return this._inputs[5]; } /** * Gets the metallic input component */ get metallic() { return this._inputs[6]; } /** * Gets the roughness input component */ get roughness() { return this._inputs[7]; } /** * Gets the ambient occlusion input component */ get ambientOcc() { return this._inputs[8]; } /** * Gets the opacity input component */ get opacity() { return this._inputs[9]; } /** * Gets the index of refraction input component */ get indexOfRefraction() { return this._inputs[10]; } /** * Gets the ambient color input component */ get ambientColor() { return this._inputs[11]; } /** * Gets the reflection object parameters */ get reflection() { return this._inputs[12]; } /** * Gets the clear coat object parameters */ get clearcoat() { return this._inputs[13]; } /** * Gets the sheen object parameters */ get sheen() { return this._inputs[14]; } /** * Gets the sub surface object parameters */ get subsurface() { return this._inputs[15]; } /** * Gets the anisotropy object parameters */ get anisotropy() { return this._inputs[16]; } /** * Gets the iridescence object parameters */ get iridescence() { return this._inputs[17]; } /** * Gets the ambient output component */ get ambientClr() { return this._outputs[0]; } /** * Gets the diffuse output component */ get diffuseDir() { return this._outputs[1]; } /** * Gets the specular output component */ get specularDir() { return this._outputs[2]; } /** * Gets the clear coat output component */ get clearcoatDir() { return this._outputs[3]; } /** * Gets the sheen output component */ get sheenDir() { return this._outputs[4]; } /** * Gets the indirect diffuse output component */ get diffuseInd() { return this._outputs[5]; } /** * Gets the indirect specular output component */ get specularInd() { return this._outputs[6]; } /** * Gets the indirect clear coat output component */ get clearcoatInd() { return this._outputs[7]; } /** * Gets the indirect sheen output component */ get sheenInd() { return this._outputs[8]; } /** * Gets the refraction output component */ get refraction() { return this._outputs[9]; } /** * Gets the global lighting output component */ get lighting() { return this._outputs[10]; } /** * Gets the shadow output component */ get shadow() { return this._outputs[11]; } /** * Gets the alpha output component */ get alpha() { return this._outputs[12]; } autoConfigure(e, t = () => !0) { if (!this.cameraPosition.isConnected) { let i = e.getInputBlockByPredicate((r) => r.systemValue === Ms.CameraPosition && t(r)); i || (i = new vs("cameraPosition"), i.setAsSystemValue(Ms.CameraPosition)), i.output.connectTo(this.cameraPosition); } if (!this.view.isConnected) { let i = e.getInputBlockByPredicate((r) => r.systemValue === Ms.View && t(r)); i || (i = new vs("view"), i.setAsSystemValue(Ms.View)), i.output.connectTo(this.view); } } prepareDefines(e, t, i) { i.setValue("PBR", !0), i.setValue("METALLICWORKFLOW", !0), i.setValue("DEBUGMODE", this.debugMode, !0), i.setValue("DEBUGMODE_FORCERETURN", !0), i.setValue("NORMALXYSCALE", !0), i.setValue("BUMP", this.perturbedNormal.isConnected, !0), i.setValue("LODBASEDMICROSFURACE", this._scene.getEngine().getCaps().textureLOD), i.setValue("ALBEDO", !1, !0), i.setValue("OPACITY", this.opacity.isConnected, !0), i.setValue("AMBIENT", !0, !0), i.setValue("AMBIENTINGRAYSCALE", !1, !0), i.setValue("REFLECTIVITY", !1, !0), i.setValue("AOSTOREINMETALMAPRED", !1, !0), i.setValue("METALLNESSSTOREINMETALMAPBLUE", !1, !0), i.setValue("ROUGHNESSSTOREINMETALMAPALPHA", !1, !0), i.setValue("ROUGHNESSSTOREINMETALMAPGREEN", !1, !0), this.lightFalloff === on.LIGHTFALLOFF_STANDARD ? (i.setValue("USEPHYSICALLIGHTFALLOFF", !1), i.setValue("USEGLTFLIGHTFALLOFF", !1)) : this.lightFalloff === on.LIGHTFALLOFF_GLTF ? (i.setValue("USEPHYSICALLIGHTFALLOFF", !1), i.setValue("USEGLTFLIGHTFALLOFF", !0)) : (i.setValue("USEPHYSICALLIGHTFALLOFF", !0), i.setValue("USEGLTFLIGHTFALLOFF", !1)); const r = this.alphaTestCutoff.toString(); i.setValue("ALPHABLEND", this.useAlphaBlending, !0), i.setValue("ALPHAFROMALBEDO", !1, !0), i.setValue("ALPHATEST", this.useAlphaTest, !0), i.setValue("ALPHATESTVALUE", r.indexOf(".") < 0 ? r + "." : r, !0), i.setValue("OPACITYRGB", !1, !0), i.setValue("RADIANCEOVERALPHA", this.useRadianceOverAlpha, !0), i.setValue("SPECULAROVERALPHA", this.useSpecularOverAlpha, !0), i.setValue("SPECULARAA", this._scene.getEngine().getCaps().standardDerivatives && this.enableSpecularAntiAliasing, !0), i.setValue("REALTIME_FILTERING", this.realTimeFiltering, !0); const s = e.getScene(); if (s.getEngine()._features.needTypeSuffixInShaderConstants ? i.setValue("NUM_SAMPLES", this.realTimeFilteringQuality + "u", !0) : i.setValue("NUM_SAMPLES", "" + this.realTimeFilteringQuality, !0), i.setValue("BRDF_V_HEIGHT_CORRELATED", !0), i.setValue("MS_BRDF_ENERGY_CONSERVATION", this.useEnergyConservation, !0), i.setValue("RADIANCEOCCLUSION", this.useRadianceOcclusion, !0), i.setValue("HORIZONOCCLUSION", this.useHorizonOcclusion, !0), i.setValue("UNLIT", this.unlit, !0), i.setValue("FORCENORMALFORWARD", this.forceNormalForward, !0), this._environmentBRDFTexture && Tt.ReflectionTextureEnabled ? (i.setValue("ENVIRONMENTBRDF", !0), i.setValue("ENVIRONMENTBRDF_RGBD", this._environmentBRDFTexture.isRGBD, !0)) : (i.setValue("ENVIRONMENTBRDF", !1), i.setValue("ENVIRONMENTBRDF_RGBD", !1)), i._areImageProcessingDirty && t.imageProcessingConfiguration && t.imageProcessingConfiguration.prepareDefines(i), !!i._areLightsDirty) if (!this.light) Ke.PrepareDefinesForLights(s, e, i, !0, t.maxSimultaneousLights), i._needNormals = !0, Ke.PrepareDefinesForMultiview(s, i); else { const a = { needNormals: !1, needRebuild: !1, lightmapMode: !1, shadowEnabled: !1, specularEnabled: !1 }; Ke.PrepareDefinesForLight(s, e, this.light, this._lightId, i, !0, a), a.needRebuild && i.rebuild(); } } updateUniformsAndSamples(e, t, i, r) { for (let s = 0; s < t.maxSimultaneousLights && i["LIGHT" + s]; s++) { const n = e.uniforms.indexOf("vLightData" + s) >= 0; Ke.PrepareUniformsAndSamplersForLight(s, e.uniforms, e.samplers, i["PROJECTEDLIGHTTEXTURE" + s], r, n); } } isReady(e, t, i) { return !(this._environmentBRDFTexture && !this._environmentBRDFTexture.isReady() || i._areImageProcessingDirty && t.imageProcessingConfiguration && !t.imageProcessingConfiguration.isReady()); } bind(e, t, i) { var r, s; if (!i) return; const n = i.getScene(); this.light ? Ke.BindLight(this.light, this._lightId, n, e, !0) : Ke.BindLights(n, i, e, !0, t.maxSimultaneousLights), e.setTexture(this._environmentBrdfSamplerName, this._environmentBRDFTexture), e.setFloat2("vDebugMode", this.debugLimit, this.debugFactor); const a = this._scene.ambientColor; a && e.setColor3("ambientFromScene", a); const l = n.useRightHandedSystem === (n._mirroredCameraPosition != null); e.setFloat(this._invertNormalName, l ? -1 : 1), e.setFloat4("vLightingIntensity", this.directIntensity, 1, this.environmentIntensity * this._scene.environmentIntensity, this.specularIntensity); const o = 1, u = (s = (r = this.indexOfRefraction.connectInputBlock) === null || r === void 0 ? void 0 : r.value) !== null && s !== void 0 ? s : 1.5, h = Math.pow((u - o) / (u + o), 2); this._metallicReflectanceColor.scaleToRef(h * this._metallicF0Factor, mn.Color3[0]); const d = this._metallicF0Factor; e.setColor4(this._vMetallicReflectanceFactorsName, mn.Color3[0], d), t.imageProcessingConfiguration && t.imageProcessingConfiguration.bind(e); } _injectVertexCode(e) { var t, i; const r = this.worldPosition, s = `//${this.name}`; this.light ? (this._lightId = (e.counters.lightCounter !== void 0 ? e.counters.lightCounter : -1) + 1, e.counters.lightCounter = this._lightId, e._emitFunctionFromInclude(e.supportUniformBuffers ? "lightVxUboDeclaration" : "lightVxFragmentDeclaration", s, { replaceStrings: [{ search: /{X}/g, replace: this._lightId.toString() }] }, this._lightId.toString())) : (e._emitFunctionFromInclude(e.supportUniformBuffers ? "lightVxUboDeclaration" : "lightVxFragmentDeclaration", s, { repeatKey: "maxSimultaneousLights" }), this._lightId = 0, e.sharedData.dynamicUniformBlocks.push(this)); const n = "v_" + r.associatedVariableName; e._emitVaryingFromString(n, "vec4") && (e.compilationString += `${n} = ${r.associatedVariableName}; `); const a = this.reflection.isConnected ? (t = this.reflection.connectedPoint) === null || t === void 0 ? void 0 : t.ownerBlock : null; a && (a.viewConnectionPoint = this.view), e.compilationString += (i = a == null ? void 0 : a.handleVertexSide(e)) !== null && i !== void 0 ? i : "", e._emitVaryingFromString("vClipSpacePosition", "vec4", "defined(IGNORE) || DEBUGMODE > 0") && (e._injectAtEnd += `#if DEBUGMODE > 0 `, e._injectAtEnd += `vClipSpacePosition = gl_Position; `, e._injectAtEnd += `#endif `), this.light ? e.compilationString += e._emitCodeFromInclude("shadowsVertex", s, { replaceStrings: [ { search: /{X}/g, replace: this._lightId.toString() }, { search: /worldPos/g, replace: r.associatedVariableName } ] }) : (e.compilationString += `vec4 worldPos = ${r.associatedVariableName}; `, this.view.isConnected && (e.compilationString += `mat4 view = ${this.view.associatedVariableName}; `), e.compilationString += e._emitCodeFromInclude("shadowsVertex", s, { repeatKey: "maxSimultaneousLights" })); } _getAlbedoOpacityCode() { let e = `albedoOpacityOutParams albedoOpacityOut; `; const t = this.baseColor.isConnected ? this.baseColor.associatedVariableName : "vec3(1.)", i = this.opacity.isConnected ? this.opacity.associatedVariableName : "1."; return e += `albedoOpacityBlock( vec4(${t}, 1.), #ifdef ALBEDO vec4(1.), vec2(1., 1.), #endif #ifdef OPACITY vec4(${i}), vec2(1., 1.), #endif albedoOpacityOut ); vec3 surfaceAlbedo = albedoOpacityOut.surfaceAlbedo; float alpha = albedoOpacityOut.alpha; `, e; } _getAmbientOcclusionCode() { let e = `ambientOcclusionOutParams aoOut; `; const t = this.ambientOcc.isConnected ? this.ambientOcc.associatedVariableName : "1."; return e += `ambientOcclusionBlock( #ifdef AMBIENT vec3(${t}), vec4(0., 1.0, 1.0, 0.), #endif aoOut ); `, e; } _getReflectivityCode(e) { let t = `reflectivityOutParams reflectivityOut; `; const i = "1."; return this._vMetallicReflectanceFactorsName = e._getFreeVariableName("vMetallicReflectanceFactors"), e._emitUniformFromString(this._vMetallicReflectanceFactorsName, "vec4"), t += `vec3 baseColor = surfaceAlbedo; reflectivityBlock( vec4(${this.metallic.associatedVariableName}, ${this.roughness.associatedVariableName}, 0., 0.), #ifdef METALLICWORKFLOW surfaceAlbedo, ${this._vMetallicReflectanceFactorsName}, #endif #ifdef REFLECTIVITY vec3(0., 0., ${i}), vec4(1.), #endif #if defined(METALLICWORKFLOW) && defined(REFLECTIVITY) && defined(AOSTOREINMETALMAPRED) aoOut.ambientOcclusionColor, #endif #ifdef MICROSURFACEMAP microSurfaceTexel, <== not handled! #endif reflectivityOut ); float microSurface = reflectivityOut.microSurface; float roughness = reflectivityOut.roughness; #ifdef METALLICWORKFLOW surfaceAlbedo = reflectivityOut.surfaceAlbedo; #endif #if defined(METALLICWORKFLOW) && defined(REFLECTIVITY) && defined(AOSTOREINMETALMAPRED) aoOut.ambientOcclusionColor = reflectivityOut.ambientOcclusionColor; #endif `, t; } _buildBlock(e) { var t, i, r, s, n, a, l, o, u, h, d, f, p, m, _, v, C, x, b, S, M, R, w, V, k, L, B, U, K, ee, Z, q, le, ie, $, j, J, ne, pe, ge, Ie; super._buildBlock(e), this._scene = e.sharedData.scene, this._environmentBRDFTexture || (this._environmentBRDFTexture = pN(this._scene)); const ye = this.reflection.isConnected ? (t = this.reflection.connectedPoint) === null || t === void 0 ? void 0 : t.ownerBlock : null; if (ye && (ye.worldPositionConnectionPoint = this.worldPosition, ye.cameraPositionConnectionPoint = this.cameraPosition, ye.worldNormalConnectionPoint = this.worldNormal, ye.viewConnectionPoint = this.view), e.target !== Le.Fragment) return this._injectVertexCode(e), this; e.sharedData.forcedBindableBlocks.push(this), e.sharedData.blocksWithDefines.push(this), e.sharedData.blockingBlocks.push(this), this.generateOnlyFragmentCode && e.sharedData.dynamicUniformBlocks.push(this); const Se = `//${this.name}`, re = this.perturbedNormal; let te = this.worldPosition.associatedVariableName; this.generateOnlyFragmentCode ? (te = e._getFreeVariableName("globalWorldPos"), e._emitFunction("pbr_globalworldpos", `vec3 ${te}; `, Se), e.compilationString += `${te} = ${this.worldPosition.associatedVariableName}.xyz; `, e.compilationString += e._emitCodeFromInclude("shadowsVertex", Se, { repeatKey: "maxSimultaneousLights", substitutionVars: this.generateOnlyFragmentCode ? `worldPos,${this.worldPosition.associatedVariableName}` : void 0 }), e.compilationString += `#if DEBUGMODE > 0 `, e.compilationString += `vec4 vClipSpacePosition = vec4((vec2(gl_FragCoord.xy) / vec2(1.0)) * 2.0 - 1.0, 0.0, 1.0); `, e.compilationString += `#endif `) : te = "v_" + te, this._environmentBrdfSamplerName = e._getFreeVariableName("environmentBrdfSampler"), e._emit2DSampler(this._environmentBrdfSamplerName), e.sharedData.hints.needAlphaBlending = e.sharedData.hints.needAlphaBlending || this.useAlphaBlending, e.sharedData.hints.needAlphaTesting = e.sharedData.hints.needAlphaTesting || this.useAlphaTest, e._emitExtension("lod", "#extension GL_EXT_shader_texture_lod : enable", "defined(LODBASEDMICROSFURACE)"), e._emitExtension("derivatives", "#extension GL_OES_standard_derivatives : enable"), e._emitUniformFromString("vDebugMode", "vec2", "defined(IGNORE) || DEBUGMODE > 0"), e._emitUniformFromString("ambientFromScene", "vec3"), e.uniforms.push("exposureLinear"), e.uniforms.push("contrast"), e.uniforms.push("vInverseScreenSize"), e.uniforms.push("vignetteSettings1"), e.uniforms.push("vignetteSettings2"), e.uniforms.push("vCameraColorCurveNegative"), e.uniforms.push("vCameraColorCurveNeutral"), e.uniforms.push("vCameraColorCurvePositive"), e.uniforms.push("txColorTransform"), e.uniforms.push("colorTransformSettings"), e.uniforms.push("ditherIntensity"), this.light ? e._emitFunctionFromInclude(e.supportUniformBuffers ? "lightUboDeclaration" : "lightFragmentDeclaration", Se, { replaceStrings: [{ search: /{X}/g, replace: this._lightId.toString() }] }, this._lightId.toString()) : e._emitFunctionFromInclude(e.supportUniformBuffers ? "lightUboDeclaration" : "lightFragmentDeclaration", Se, { repeatKey: "maxSimultaneousLights", substitutionVars: this.generateOnlyFragmentCode ? "varying," : void 0 }), e._emitFunctionFromInclude("helperFunctions", Se), e._emitFunctionFromInclude("importanceSampling", Se), e._emitFunctionFromInclude("pbrHelperFunctions", Se), e._emitFunctionFromInclude("imageProcessingDeclaration", Se), e._emitFunctionFromInclude("imageProcessingFunctions", Se), e._emitFunctionFromInclude("shadowsFragmentFunctions", Se), e._emitFunctionFromInclude("pbrDirectLightingSetupFunctions", Se, { replaceStrings: [{ search: /vPositionW/g, replace: te + ".xyz" }] }), e._emitFunctionFromInclude("pbrDirectLightingFalloffFunctions", Se), e._emitFunctionFromInclude("pbrBRDFFunctions", Se, { replaceStrings: [{ search: /REFLECTIONMAP_SKYBOX/g, replace: (i = ye == null ? void 0 : ye._defineSkyboxName) !== null && i !== void 0 ? i : "REFLECTIONMAP_SKYBOX" }] }), e._emitFunctionFromInclude("hdrFilteringFunctions", Se), e._emitFunctionFromInclude("pbrDirectLightingFunctions", Se, { replaceStrings: [{ search: /vPositionW/g, replace: te + ".xyz" }] }), e._emitFunctionFromInclude("pbrIBLFunctions", Se), e._emitFunctionFromInclude("pbrBlockAlbedoOpacity", Se), e._emitFunctionFromInclude("pbrBlockReflectivity", Se), e._emitFunctionFromInclude("pbrBlockAmbientOcclusion", Se), e._emitFunctionFromInclude("pbrBlockAlphaFresnel", Se), e._emitFunctionFromInclude("pbrBlockAnisotropic", Se), e._emitUniformFromString("vLightingIntensity", "vec4"), ye != null && ye.generateOnlyFragmentCode && (e.compilationString += ye.handleVertexSide(e)), this._vNormalWName = e._getFreeVariableName("vNormalW"), e.compilationString += `vec4 ${this._vNormalWName} = normalize(${this.worldNormal.associatedVariableName}); `, e._registerTempVariable("viewDirectionW") && (e.compilationString += `vec3 viewDirectionW = normalize(${this.cameraPosition.associatedVariableName} - ${te}.xyz); `), e.compilationString += `vec3 geometricNormalW = ${this._vNormalWName}.xyz; `, e.compilationString += `vec3 normalW = ${re.isConnected ? "normalize(" + re.associatedVariableName + ".xyz)" : "geometricNormalW"}; `, this._invertNormalName = e._getFreeVariableName("invertNormal"), e._emitUniformFromString(this._invertNormalName, "float"), e.compilationString += e._emitCodeFromInclude("pbrBlockNormalFinal", Se, { replaceStrings: [ { search: /vPositionW/g, replace: te + ".xyz" }, { search: /vEyePosition.w/g, replace: this._invertNormalName } ] }), e.compilationString += this._getAlbedoOpacityCode(), e.compilationString += e._emitCodeFromInclude("depthPrePass", Se), e.compilationString += this._getAmbientOcclusionCode(), e.compilationString += e._emitCodeFromInclude("pbrBlockLightmapInit", Se), e.compilationString += `#ifdef UNLIT vec3 diffuseBase = vec3(1., 1., 1.); #else `, e.compilationString += this._getReflectivityCode(e), e.compilationString += e._emitCodeFromInclude("pbrBlockGeometryInfo", Se, { replaceStrings: [ { search: /REFLECTIONMAP_SKYBOX/g, replace: (r = ye == null ? void 0 : ye._defineSkyboxName) !== null && r !== void 0 ? r : "REFLECTIONMAP_SKYBOX" }, { search: /REFLECTIONMAP_3D/g, replace: (s = ye == null ? void 0 : ye._define3DName) !== null && s !== void 0 ? s : "REFLECTIONMAP_3D" } ] }); const he = this.anisotropy.isConnected ? (n = this.anisotropy.connectedPoint) === null || n === void 0 ? void 0 : n.ownerBlock : null; he && (he.worldPositionConnectionPoint = this.worldPosition, he.worldNormalConnectionPoint = this.worldNormal, e.compilationString += he.getCode(e, !this.perturbedNormal.isConnected)), ye && ye.hasTexture && (e.compilationString += ye.getCode(e, he ? "anisotropicOut.anisotropicNormal" : "normalW")), e._emitFunctionFromInclude("pbrBlockReflection", Se, { replaceStrings: [ { search: /computeReflectionCoords/g, replace: "computeReflectionCoordsPBR" }, { search: /REFLECTIONMAP_3D/g, replace: (a = ye == null ? void 0 : ye._define3DName) !== null && a !== void 0 ? a : "REFLECTIONMAP_3D" }, { search: /REFLECTIONMAP_OPPOSITEZ/g, replace: (l = ye == null ? void 0 : ye._defineOppositeZ) !== null && l !== void 0 ? l : "REFLECTIONMAP_OPPOSITEZ" }, { search: /REFLECTIONMAP_PROJECTION/g, replace: (o = ye == null ? void 0 : ye._defineProjectionName) !== null && o !== void 0 ? o : "REFLECTIONMAP_PROJECTION" }, { search: /REFLECTIONMAP_SKYBOX/g, replace: (u = ye == null ? void 0 : ye._defineSkyboxName) !== null && u !== void 0 ? u : "REFLECTIONMAP_SKYBOX" }, { search: /LODINREFLECTIONALPHA/g, replace: (h = ye == null ? void 0 : ye._defineLODReflectionAlpha) !== null && h !== void 0 ? h : "LODINREFLECTIONALPHA" }, { search: /LINEARSPECULARREFLECTION/g, replace: (d = ye == null ? void 0 : ye._defineLinearSpecularReflection) !== null && d !== void 0 ? d : "LINEARSPECULARREFLECTION" }, { search: /vReflectionFilteringInfo/g, replace: (f = ye == null ? void 0 : ye._vReflectionFilteringInfoName) !== null && f !== void 0 ? f : "vReflectionFilteringInfo" } ] }), e.compilationString += e._emitCodeFromInclude("pbrBlockReflectance0", Se, { replaceStrings: [{ search: /metallicReflectanceFactors/g, replace: this._vMetallicReflectanceFactorsName }] }); const be = this.sheen.isConnected ? (p = this.sheen.connectedPoint) === null || p === void 0 ? void 0 : p.ownerBlock : null; be && (e.compilationString += be.getCode(ye)), e._emitFunctionFromInclude("pbrBlockSheen", Se, { replaceStrings: [ { search: /REFLECTIONMAP_3D/g, replace: (m = ye == null ? void 0 : ye._define3DName) !== null && m !== void 0 ? m : "REFLECTIONMAP_3D" }, { search: /REFLECTIONMAP_SKYBOX/g, replace: (_ = ye == null ? void 0 : ye._defineSkyboxName) !== null && _ !== void 0 ? _ : "REFLECTIONMAP_SKYBOX" }, { search: /LODINREFLECTIONALPHA/g, replace: (v = ye == null ? void 0 : ye._defineLODReflectionAlpha) !== null && v !== void 0 ? v : "LODINREFLECTIONALPHA" }, { search: /LINEARSPECULARREFLECTION/g, replace: (C = ye == null ? void 0 : ye._defineLinearSpecularReflection) !== null && C !== void 0 ? C : "LINEARSPECULARREFLECTION" } ] }); const Ue = this.iridescence.isConnected ? (x = this.iridescence.connectedPoint) === null || x === void 0 ? void 0 : x.ownerBlock : null; e.compilationString += pL.GetCode(Ue), e._emitFunctionFromInclude("pbrBlockIridescence", Se, { replaceStrings: [] }); const Ee = this.clearcoat.isConnected ? (b = this.clearcoat.connectedPoint) === null || b === void 0 ? void 0 : b.ownerBlock : null, He = !this.perturbedNormal.isConnected && !this.anisotropy.isConnected, Xe = this.perturbedNormal.isConnected && ((M = ((S = this.perturbedNormal.connectedPoint) === null || S === void 0 ? void 0 : S.ownerBlock).worldTangent) === null || M === void 0 ? void 0 : M.isConnected), rt = this.anisotropy.isConnected && ((R = this.anisotropy.connectedPoint) === null || R === void 0 ? void 0 : R.ownerBlock).worldTangent.isConnected; let dt = Xe || !this.perturbedNormal.isConnected && rt; e.compilationString += xP.GetCode(e, Ee, ye, te, He, dt, this.worldNormal.associatedVariableName), He && (dt = (w = Ee == null ? void 0 : Ee.worldTangent.isConnected) !== null && w !== void 0 ? w : !1), e._emitFunctionFromInclude("pbrBlockClearcoat", Se, { replaceStrings: [ { search: /computeReflectionCoords/g, replace: "computeReflectionCoordsPBR" }, { search: /REFLECTIONMAP_3D/g, replace: (V = ye == null ? void 0 : ye._define3DName) !== null && V !== void 0 ? V : "REFLECTIONMAP_3D" }, { search: /REFLECTIONMAP_OPPOSITEZ/g, replace: (k = ye == null ? void 0 : ye._defineOppositeZ) !== null && k !== void 0 ? k : "REFLECTIONMAP_OPPOSITEZ" }, { search: /REFLECTIONMAP_PROJECTION/g, replace: (L = ye == null ? void 0 : ye._defineProjectionName) !== null && L !== void 0 ? L : "REFLECTIONMAP_PROJECTION" }, { search: /REFLECTIONMAP_SKYBOX/g, replace: (B = ye == null ? void 0 : ye._defineSkyboxName) !== null && B !== void 0 ? B : "REFLECTIONMAP_SKYBOX" }, { search: /LODINREFLECTIONALPHA/g, replace: (U = ye == null ? void 0 : ye._defineLODReflectionAlpha) !== null && U !== void 0 ? U : "LODINREFLECTIONALPHA" }, { search: /LINEARSPECULARREFLECTION/g, replace: (K = ye == null ? void 0 : ye._defineLinearSpecularReflection) !== null && K !== void 0 ? K : "LINEARSPECULARREFLECTION" }, { search: /defined\(TANGENT\)/g, replace: dt ? "defined(TANGENT)" : "defined(IGNORE)" } ] }), e.compilationString += e._emitCodeFromInclude("pbrBlockReflectance", Se, { replaceStrings: [ { search: /REFLECTIONMAP_SKYBOX/g, replace: (ee = ye == null ? void 0 : ye._defineSkyboxName) !== null && ee !== void 0 ? ee : "REFLECTIONMAP_SKYBOX" }, { search: /REFLECTIONMAP_3D/g, replace: (Z = ye == null ? void 0 : ye._define3DName) !== null && Z !== void 0 ? Z : "REFLECTIONMAP_3D" } ] }); const bt = this.subsurface.isConnected ? (q = this.subsurface.connectedPoint) === null || q === void 0 ? void 0 : q.ownerBlock : null, Mt = this.subsurface.isConnected ? (ie = ((le = this.subsurface.connectedPoint) === null || le === void 0 ? void 0 : le.ownerBlock).refraction.connectedPoint) === null || ie === void 0 ? void 0 : ie.ownerBlock : null; Mt && (Mt.viewConnectionPoint = this.view, Mt.indexOfRefractionConnectionPoint = this.indexOfRefraction), e.compilationString += jO.GetCode(e, bt, ye, te), e._emitFunctionFromInclude("pbrBlockSubSurface", Se, { replaceStrings: [ { search: /REFLECTIONMAP_3D/g, replace: ($ = ye == null ? void 0 : ye._define3DName) !== null && $ !== void 0 ? $ : "REFLECTIONMAP_3D" }, { search: /REFLECTIONMAP_OPPOSITEZ/g, replace: (j = ye == null ? void 0 : ye._defineOppositeZ) !== null && j !== void 0 ? j : "REFLECTIONMAP_OPPOSITEZ" }, { search: /REFLECTIONMAP_PROJECTION/g, replace: (J = ye == null ? void 0 : ye._defineProjectionName) !== null && J !== void 0 ? J : "REFLECTIONMAP_PROJECTION" }, { search: /SS_REFRACTIONMAP_3D/g, replace: (ne = Mt == null ? void 0 : Mt._define3DName) !== null && ne !== void 0 ? ne : "SS_REFRACTIONMAP_3D" }, { search: /SS_LODINREFRACTIONALPHA/g, replace: (pe = Mt == null ? void 0 : Mt._defineLODRefractionAlpha) !== null && pe !== void 0 ? pe : "SS_LODINREFRACTIONALPHA" }, { search: /SS_LINEARSPECULARREFRACTION/g, replace: (ge = Mt == null ? void 0 : Mt._defineLinearSpecularRefraction) !== null && ge !== void 0 ? ge : "SS_LINEARSPECULARREFRACTION" }, { search: /SS_REFRACTIONMAP_OPPOSITEZ/g, replace: (Ie = Mt == null ? void 0 : Mt._defineOppositeZ) !== null && Ie !== void 0 ? Ie : "SS_REFRACTIONMAP_OPPOSITEZ" } ] }), e.compilationString += e._emitCodeFromInclude("pbrBlockDirectLighting", Se), this.light ? e.compilationString += e._emitCodeFromInclude("lightFragment", Se, { replaceStrings: [ { search: /{X}/g, replace: this._lightId.toString() }, { search: /vPositionW/g, replace: te + ".xyz" } ] }) : e.compilationString += e._emitCodeFromInclude("lightFragment", Se, { repeatKey: "maxSimultaneousLights", substitutionVars: `vPositionW,${te}.xyz` }), e.compilationString += e._emitCodeFromInclude("pbrBlockFinalLitComponents", Se), e.compilationString += `#endif `; const Ct = this.ambientColor.isConnected ? this.ambientColor.associatedVariableName : "vec3(0., 0., 0.)"; let di = on.DEFAULT_AO_ON_ANALYTICAL_LIGHTS.toString(); di.indexOf(".") === -1 && (di += "."), e.compilationString += e._emitCodeFromInclude("pbrBlockFinalUnlitComponents", Se, { replaceStrings: [ { search: /vec3 finalEmissive[\s\S]*?finalEmissive\*=vLightingIntensity\.y;/g, replace: "" }, { search: /vAmbientColor/g, replace: Ct + " * ambientFromScene" }, { search: /vAmbientInfos\.w/g, replace: di } ] }), e.compilationString += e._emitCodeFromInclude("pbrBlockFinalColorComposition", Se, { replaceStrings: [{ search: /finalEmissive/g, replace: "vec3(0.)" }] }), e.compilationString += e._emitCodeFromInclude("pbrBlockImageProcessing", Se, { replaceStrings: [{ search: /visibility/g, replace: "1." }] }), e.compilationString += e._emitCodeFromInclude("pbrDebug", Se, { replaceStrings: [ { search: /vNormalW/g, replace: this._vNormalWName }, { search: /vPositionW/g, replace: te }, { search: /albedoTexture\.rgb;/g, replace: `vec3(1.); gl_FragColor.rgb = toGammaSpace(gl_FragColor.rgb); ` } ] }); for (const Kt of this._outputs) if (Kt.hasEndpoints) { const ei = t0e[Kt.name]; if (ei) { const [bi, vr] = ei; vr && (e.compilationString += `#if ${vr} `), e.compilationString += `${this._declareOutput(Kt, e)} = ${bi}; `, vr && (e.compilationString += `#else `, e.compilationString += `${this._declareOutput(Kt, e)} = vec3(0.); `, e.compilationString += `#endif `); } else Ce.Error(`There's no remapping for the ${Kt.name} end point! No code generated`); } return this; } _dumpPropertiesCode() { let e = super._dumpPropertiesCode(); return e += `${this._codeVariableName}.lightFalloff = ${this.lightFalloff}; `, e += `${this._codeVariableName}.useAlphaTest = ${this.useAlphaTest}; `, e += `${this._codeVariableName}.alphaTestCutoff = ${this.alphaTestCutoff}; `, e += `${this._codeVariableName}.useAlphaBlending = ${this.useAlphaBlending}; `, e += `${this._codeVariableName}.useRadianceOverAlpha = ${this.useRadianceOverAlpha}; `, e += `${this._codeVariableName}.useSpecularOverAlpha = ${this.useSpecularOverAlpha}; `, e += `${this._codeVariableName}.enableSpecularAntiAliasing = ${this.enableSpecularAntiAliasing}; `, e += `${this._codeVariableName}.realTimeFiltering = ${this.realTimeFiltering}; `, e += `${this._codeVariableName}.realTimeFilteringQuality = ${this.realTimeFilteringQuality}; `, e += `${this._codeVariableName}.useEnergyConservation = ${this.useEnergyConservation}; `, e += `${this._codeVariableName}.useRadianceOcclusion = ${this.useRadianceOcclusion}; `, e += `${this._codeVariableName}.useHorizonOcclusion = ${this.useHorizonOcclusion}; `, e += `${this._codeVariableName}.unlit = ${this.unlit}; `, e += `${this._codeVariableName}.forceNormalForward = ${this.forceNormalForward}; `, e += `${this._codeVariableName}.debugMode = ${this.debugMode}; `, e += `${this._codeVariableName}.debugLimit = ${this.debugLimit}; `, e += `${this._codeVariableName}.debugFactor = ${this.debugFactor}; `, e; } serialize() { const e = super.serialize(); return this.light && (e.lightId = this.light.id), e.lightFalloff = this.lightFalloff, e.useAlphaTest = this.useAlphaTest, e.alphaTestCutoff = this.alphaTestCutoff, e.useAlphaBlending = this.useAlphaBlending, e.useRadianceOverAlpha = this.useRadianceOverAlpha, e.useSpecularOverAlpha = this.useSpecularOverAlpha, e.enableSpecularAntiAliasing = this.enableSpecularAntiAliasing, e.realTimeFiltering = this.realTimeFiltering, e.realTimeFilteringQuality = this.realTimeFilteringQuality, e.useEnergyConservation = this.useEnergyConservation, e.useRadianceOcclusion = this.useRadianceOcclusion, e.useHorizonOcclusion = this.useHorizonOcclusion, e.unlit = this.unlit, e.forceNormalForward = this.forceNormalForward, e.debugMode = this.debugMode, e.debugLimit = this.debugLimit, e.debugFactor = this.debugFactor, e.generateOnlyFragmentCode = this.generateOnlyFragmentCode, e; } _deserialize(e, t, i) { var r, s; super._deserialize(e, t, i), e.lightId && (this.light = t.getLightById(e.lightId)), this.lightFalloff = (r = e.lightFalloff) !== null && r !== void 0 ? r : 0, this.useAlphaTest = e.useAlphaTest, this.alphaTestCutoff = e.alphaTestCutoff, this.useAlphaBlending = e.useAlphaBlending, this.useRadianceOverAlpha = e.useRadianceOverAlpha, this.useSpecularOverAlpha = e.useSpecularOverAlpha, this.enableSpecularAntiAliasing = e.enableSpecularAntiAliasing, this.realTimeFiltering = !!e.realTimeFiltering, this.realTimeFilteringQuality = (s = e.realTimeFilteringQuality) !== null && s !== void 0 ? s : 8, this.useEnergyConservation = e.useEnergyConservation, this.useRadianceOcclusion = e.useRadianceOcclusion, this.useHorizonOcclusion = e.useHorizonOcclusion, this.unlit = e.unlit, this.forceNormalForward = !!e.forceNormalForward, this.debugMode = e.debugMode, this.debugLimit = e.debugLimit, this.debugFactor = e.debugFactor, this.generateOnlyFragmentCode = !!e.generateOnlyFragmentCode, this._setTarget(); } } F([ ir("Direct lights", $i.Float, "INTENSITY", { min: 0, max: 1, notifiers: { update: !0 } }) ], gu.prototype, "directIntensity", void 0); F([ ir("Environment lights", $i.Float, "INTENSITY", { min: 0, max: 1, notifiers: { update: !0 } }) ], gu.prototype, "environmentIntensity", void 0); F([ ir("Specular highlights", $i.Float, "INTENSITY", { min: 0, max: 1, notifiers: { update: !0 } }) ], gu.prototype, "specularIntensity", void 0); F([ ir("Light falloff", $i.List, "LIGHTING & COLORS", { notifiers: { update: !0 }, options: [ { label: "Physical", value: on.LIGHTFALLOFF_PHYSICAL }, { label: "GLTF", value: on.LIGHTFALLOFF_GLTF }, { label: "Standard", value: on.LIGHTFALLOFF_STANDARD } ] }) ], gu.prototype, "lightFalloff", void 0); F([ ir("Alpha Testing", $i.Boolean, "OPACITY") ], gu.prototype, "useAlphaTest", void 0); F([ ir("Alpha CutOff", $i.Float, "OPACITY", { min: 0, max: 1, notifiers: { update: !0 } }) ], gu.prototype, "alphaTestCutoff", void 0); F([ ir("Alpha blending", $i.Boolean, "OPACITY") ], gu.prototype, "useAlphaBlending", void 0); F([ ir("Radiance over alpha", $i.Boolean, "RENDERING", { notifiers: { update: !0 } }) ], gu.prototype, "useRadianceOverAlpha", void 0); F([ ir("Specular over alpha", $i.Boolean, "RENDERING", { notifiers: { update: !0 } }) ], gu.prototype, "useSpecularOverAlpha", void 0); F([ ir("Specular anti-aliasing", $i.Boolean, "RENDERING", { notifiers: { update: !0 } }) ], gu.prototype, "enableSpecularAntiAliasing", void 0); F([ ir("Realtime filtering", $i.Boolean, "RENDERING", { notifiers: { update: !0 } }) ], gu.prototype, "realTimeFiltering", void 0); F([ ir("Realtime filtering quality", $i.List, "RENDERING", { notifiers: { update: !0 }, options: [ { label: "Low", value: 8 }, { label: "Medium", value: 16 }, { label: "High", value: 64 } ] }) ], gu.prototype, "realTimeFilteringQuality", void 0); F([ ir("Energy Conservation", $i.Boolean, "ADVANCED", { notifiers: { update: !0 } }) ], gu.prototype, "useEnergyConservation", void 0); F([ ir("Radiance occlusion", $i.Boolean, "ADVANCED", { notifiers: { update: !0 } }) ], gu.prototype, "useRadianceOcclusion", void 0); F([ ir("Horizon occlusion", $i.Boolean, "ADVANCED", { notifiers: { update: !0 } }) ], gu.prototype, "useHorizonOcclusion", void 0); F([ ir("Unlit", $i.Boolean, "ADVANCED", { notifiers: { update: !0 } }) ], gu.prototype, "unlit", void 0); F([ ir("Force normal forward", $i.Boolean, "ADVANCED", { notifiers: { update: !0 } }) ], gu.prototype, "forceNormalForward", void 0); F([ ir("Generate only fragment code", $i.Boolean, "ADVANCED", { notifiers: { rebuild: !0, update: !0, onValidation: gu._OnGenerateOnlyFragmentCodeChanged } }) ], gu.prototype, "generateOnlyFragmentCode", void 0); F([ ir("Debug mode", $i.List, "DEBUG", { notifiers: { update: !0 }, options: [ { label: "None", value: 0 }, // Geometry { label: "Normalized position", value: 1 }, { label: "Normals", value: 2 }, { label: "Tangents", value: 3 }, { label: "Bitangents", value: 4 }, { label: "Bump Normals", value: 5 }, //{ label: "UV1", value: 6 }, //{ label: "UV2", value: 7 }, { label: "ClearCoat Normals", value: 8 }, { label: "ClearCoat Tangents", value: 9 }, { label: "ClearCoat Bitangents", value: 10 }, { label: "Anisotropic Normals", value: 11 }, { label: "Anisotropic Tangents", value: 12 }, { label: "Anisotropic Bitangents", value: 13 }, // Maps //{ label: "Emissive Map", value: 23 }, //{ label: "Light Map", value: 24 }, // Env { label: "Env Refraction", value: 40 }, { label: "Env Reflection", value: 41 }, { label: "Env Clear Coat", value: 42 }, // Lighting { label: "Direct Diffuse", value: 50 }, { label: "Direct Specular", value: 51 }, { label: "Direct Clear Coat", value: 52 }, { label: "Direct Sheen", value: 53 }, { label: "Env Irradiance", value: 54 }, // Lighting Params { label: "Surface Albedo", value: 60 }, { label: "Reflectance 0", value: 61 }, { label: "Metallic", value: 62 }, { label: "Metallic F0", value: 71 }, { label: "Roughness", value: 63 }, { label: "AlphaG", value: 64 }, { label: "NdotV", value: 65 }, { label: "ClearCoat Color", value: 66 }, { label: "ClearCoat Roughness", value: 67 }, { label: "ClearCoat NdotV", value: 68 }, { label: "Transmittance", value: 69 }, { label: "Refraction Transmittance", value: 70 }, // Misc { label: "SEO", value: 80 }, { label: "EHO", value: 81 }, { label: "Energy Factor", value: 82 }, { label: "Specular Reflectance", value: 83 }, { label: "Clear Coat Reflectance", value: 84 }, { label: "Sheen Reflectance", value: 85 }, { label: "Luminance Over Alpha", value: 86 }, { label: "Alpha", value: 87 } ] }) ], gu.prototype, "debugMode", void 0); F([ ir("Split position", $i.Float, "DEBUG", { min: -1, max: 1, notifiers: { update: !0 } }) ], gu.prototype, "debugLimit", void 0); F([ ir("Output factor", $i.Float, "DEBUG", { min: 0, max: 5, notifiers: { update: !0 } }) ], gu.prototype, "debugFactor", void 0); Be("BABYLON.PBRMetallicRoughnessBlock", gu); class Tse extends Wi { /** * Creates a new ModBlock * @param name defines the block name */ constructor(e) { super(e, Le.Neutral), this.registerInput("left", ue.AutoDetect), this.registerInput("right", ue.AutoDetect), this.registerOutput("output", ue.BasedOnInput), this._outputs[0]._typeConnectionSource = this._inputs[0], this._linkConnectionTypes(0, 1), this._inputs[1].acceptedConnectionPointTypes.push(ue.Float); } /** * Gets the current class name * @returns the class name */ getClassName() { return "ModBlock"; } /** * Gets the left operand input component */ get left() { return this._inputs[0]; } /** * Gets the right operand input component */ get right() { return this._inputs[1]; } /** * Gets the output component */ get output() { return this._outputs[0]; } _buildBlock(e) { super._buildBlock(e); const t = this._outputs[0]; return e.compilationString += this._declareOutput(t, e) + ` = mod(${this.left.associatedVariableName}, ${this.right.associatedVariableName}); `, this; } } Be("BABYLON.ModBlock", Tse); class Sse extends Wi { /** * Creates a new MatrixBuilder * @param name defines the block name */ constructor(e) { super(e, Le.Neutral), this.registerInput("row0", ue.Vector4), this.registerInput("row1", ue.Vector4), this.registerInput("row2", ue.Vector4), this.registerInput("row3", ue.Vector4), this.registerOutput("output", ue.Matrix); } /** * Gets the current class name * @returns the class name */ getClassName() { return "MatrixBuilder"; } /** * Gets the row0 vector */ get row0() { return this._inputs[0]; } /** * Gets the row1 vector */ get row1() { return this._inputs[1]; } /** * Gets the row2 vector */ get row2() { return this._inputs[2]; } /** * Gets the row3 vector */ get row3() { return this._inputs[3]; } /** * Gets the output component */ get output() { return this._outputs[0]; } autoConfigure() { if (!this.row0.isConnected) { const e = new vs("row0"); e.value = new Di(1, 0, 0, 0), e.output.connectTo(this.row0); } if (!this.row1.isConnected) { const e = new vs("row1"); e.value = new Di(0, 1, 0, 0), e.output.connectTo(this.row1); } if (!this.row2.isConnected) { const e = new vs("row2"); e.value = new Di(0, 0, 1, 0), e.output.connectTo(this.row2); } if (!this.row3.isConnected) { const e = new vs("row3"); e.value = new Di(0, 0, 0, 1), e.output.connectTo(this.row3); } } _buildBlock(e) { super._buildBlock(e); const t = this._outputs[0], i = this.row0, r = this.row1, s = this.row2, n = this.row3; return e.compilationString += this._declareOutput(t, e) + ` = mat4(${i.associatedVariableName}, ${r.associatedVariableName}, ${s.associatedVariableName}, ${n.associatedVariableName}); `, this; } } Be("BABYLON.MatrixBuilder", Sse); var um; (function(c) { c[c.Equal = 0] = "Equal", c[c.NotEqual = 1] = "NotEqual", c[c.LessThan = 2] = "LessThan", c[c.GreaterThan = 3] = "GreaterThan", c[c.LessOrEqual = 4] = "LessOrEqual", c[c.GreaterOrEqual = 5] = "GreaterOrEqual", c[c.Xor = 6] = "Xor", c[c.Or = 7] = "Or", c[c.And = 8] = "And"; })(um || (um = {})); class Mse extends Wi { /** * Creates a new ConditionalBlock * @param name defines the block name */ constructor(e) { super(e, Le.Neutral), this.condition = um.LessThan, this.registerInput("a", ue.Float), this.registerInput("b", ue.Float), this.registerInput("true", ue.AutoDetect, !0), this.registerInput("false", ue.AutoDetect, !0), this.registerOutput("output", ue.BasedOnInput), this._linkConnectionTypes(2, 3), this._outputs[0]._typeConnectionSource = this._inputs[2], this._outputs[0]._defaultConnectionPointType = ue.Float; } /** * Gets the current class name * @returns the class name */ getClassName() { return "ConditionalBlock"; } /** * Gets the first operand component */ get a() { return this._inputs[0]; } /** * Gets the second operand component */ get b() { return this._inputs[1]; } /** * Gets the value to return if condition is true */ get true() { return this._inputs[2]; } /** * Gets the value to return if condition is false */ get false() { return this._inputs[3]; } /** * Gets the output component */ get output() { return this._outputs[0]; } _buildBlock(e) { super._buildBlock(e); const t = this._outputs[0], i = this.true.isConnected ? this.true.associatedVariableName : "1.0", r = this.false.isConnected ? this.false.associatedVariableName : "0.0"; switch (this.condition) { case um.Equal: { e.compilationString += this._declareOutput(t, e) + ` = ${this.a.associatedVariableName} == ${this.b.associatedVariableName} ? ${i} : ${r}; `; break; } case um.NotEqual: { e.compilationString += this._declareOutput(t, e) + ` = ${this.a.associatedVariableName} != ${this.b.associatedVariableName} ? ${i} : ${r}; `; break; } case um.LessThan: { e.compilationString += this._declareOutput(t, e) + ` = ${this.a.associatedVariableName} < ${this.b.associatedVariableName} ? ${i} : ${r}; `; break; } case um.LessOrEqual: { e.compilationString += this._declareOutput(t, e) + ` = ${this.a.associatedVariableName} <= ${this.b.associatedVariableName} ? ${i} : ${r}; `; break; } case um.GreaterThan: { e.compilationString += this._declareOutput(t, e) + ` = ${this.a.associatedVariableName} > ${this.b.associatedVariableName} ? ${i} : ${r}; `; break; } case um.GreaterOrEqual: { e.compilationString += this._declareOutput(t, e) + ` = ${this.a.associatedVariableName} >= ${this.b.associatedVariableName} ? ${i} : ${r}; `; break; } case um.Xor: { e.compilationString += this._declareOutput(t, e) + ` = (mod(${this.a.associatedVariableName} + ${this.b.associatedVariableName}, 2.0) > 0.0) ? ${i} : ${r}; `; break; } case um.Or: { e.compilationString += this._declareOutput(t, e) + ` = (min(${this.a.associatedVariableName} + ${this.b.associatedVariableName}, 1.0) > 0.0) ? ${i} : ${r}; `; break; } case um.And: { e.compilationString += this._declareOutput(t, e) + ` = (${this.a.associatedVariableName} * ${this.b.associatedVariableName} > 0.0) ? ${i} : ${r}; `; break; } } return this; } serialize() { const e = super.serialize(); return e.condition = this.condition, e; } _deserialize(e, t, i) { super._deserialize(e, t, i), this.condition = e.condition; } _dumpPropertiesCode() { return super._dumpPropertiesCode() + `${this._codeVariableName}.condition = BABYLON.ConditionalBlockConditions.${um[this.condition]}; `; } } Be("BABYLON.ConditionalBlock", Mse); class bW extends Wi { /** * Creates a new CloudBlock * @param name defines the block name */ constructor(e) { super(e, Le.Neutral), this.octaves = 6, this.registerInput("seed", ue.AutoDetect), this.registerInput("chaos", ue.AutoDetect, !0), this.registerInput("offsetX", ue.Float, !0), this.registerInput("offsetY", ue.Float, !0), this.registerInput("offsetZ", ue.Float, !0), this.registerOutput("output", ue.Float), this._inputs[0].acceptedConnectionPointTypes.push(ue.Vector2), this._inputs[0].acceptedConnectionPointTypes.push(ue.Vector3), this._linkConnectionTypes(0, 1); } /** * Gets the current class name * @returns the class name */ getClassName() { return "CloudBlock"; } /** * Gets the seed input component */ get seed() { return this._inputs[0]; } /** * Gets the chaos input component */ get chaos() { return this._inputs[1]; } /** * Gets the offset X input component */ get offsetX() { return this._inputs[2]; } /** * Gets the offset Y input component */ get offsetY() { return this._inputs[3]; } /** * Gets the offset Z input component */ get offsetZ() { return this._inputs[4]; } /** * Gets the output component */ get output() { return this._outputs[0]; } _buildBlock(e) { var t, i; if (super._buildBlock(e), !this.seed.isConnected || !this._outputs[0].hasEndpoints) return; const r = ` float cloudRandom(in float p) { p = fract(p * 0.011); p *= p + 7.5; p *= p + p; return fract(p); } // Based on Morgan McGuire @morgan3d // https://www.shadertoy.com/view/4dS3Wd float cloudNoise(in vec2 x, in vec2 chaos) { vec2 step = chaos * vec2(75., 120.) + vec2(75., 120.); vec2 i = floor(x); vec2 f = fract(x); float n = dot(i, step); vec2 u = f * f * (3.0 - 2.0 * f); return mix( mix(cloudRandom(n + dot(step, vec2(0, 0))), cloudRandom(n + dot(step, vec2(1, 0))), u.x), mix(cloudRandom(n + dot(step, vec2(0, 1))), cloudRandom(n + dot(step, vec2(1, 1))), u.x), u.y ); } float cloudNoise(in vec3 x, in vec3 chaos) { vec3 step = chaos * vec3(60., 120., 75.) + vec3(60., 120., 75.); vec3 i = floor(x); vec3 f = fract(x); float n = dot(i, step); vec3 u = f * f * (3.0 - 2.0 * f); return mix(mix(mix( cloudRandom(n + dot(step, vec3(0, 0, 0))), cloudRandom(n + dot(step, vec3(1, 0, 0))), u.x), mix( cloudRandom(n + dot(step, vec3(0, 1, 0))), cloudRandom(n + dot(step, vec3(1, 1, 0))), u.x), u.y), mix(mix( cloudRandom(n + dot(step, vec3(0, 0, 1))), cloudRandom(n + dot(step, vec3(1, 0, 1))), u.x), mix( cloudRandom(n + dot(step, vec3(0, 1, 1))), cloudRandom(n + dot(step, vec3(1, 1, 1))), u.x), u.y), u.z); }`, s = ` float fbm(in vec2 st, in vec2 chaos) { // Initial values float value = 0.0; float amplitude = .5; float frequency = 0.; // Loop of octaves for (int i = 0; i < OCTAVES; i++) { value += amplitude * cloudNoise(st, chaos); st *= 2.0; amplitude *= 0.5; } return value; } float fbm(in vec3 x, in vec3 chaos) { // Initial values float value = 0.0; float amplitude = 0.5; for (int i = 0; i < OCTAVES; ++i) { value += amplitude * cloudNoise(x, chaos); x = x * 2.0; amplitude *= 0.5; } return value; }`, n = `fbm${this.octaves}`; e._emitFunction("CloudBlockCode", r, "// CloudBlockCode"), e._emitFunction("CloudBlockCodeFBM" + this.octaves, s.replace(/fbm/gi, n).replace(/OCTAVES/gi, (this.octaves | 0).toString()), "// CloudBlockCode FBM"); const a = e._getFreeVariableName("st"), l = ((t = this.seed.connectedPoint) === null || t === void 0 ? void 0 : t.type) === ue.Vector2 ? "vec2" : "vec3"; e.compilationString += `${l} ${a} = ${this.seed.associatedVariableName}; `, this.offsetX.isConnected && (e.compilationString += `${a}.x += 0.1 * ${this.offsetX.associatedVariableName}; `), this.offsetY.isConnected && (e.compilationString += `${a}.y += 0.1 * ${this.offsetY.associatedVariableName}; `), this.offsetZ.isConnected && l === "vec3" && (e.compilationString += `${a}.z += 0.1 * ${this.offsetZ.associatedVariableName}; `); let o = ""; return this.chaos.isConnected ? o = this.chaos.associatedVariableName : o = ((i = this.seed.connectedPoint) === null || i === void 0 ? void 0 : i.type) === ue.Vector2 ? "vec2(0., 0.)" : "vec3(0., 0., 0.)", e.compilationString += this._declareOutput(this._outputs[0], e) + ` = ${n}(${a}, ${o}); `, this; } _dumpPropertiesCode() { return super._dumpPropertiesCode() + `${this._codeVariableName}.octaves = ${this.octaves}; `; } serialize() { const e = super.serialize(); return e.octaves = this.octaves, e; } _deserialize(e, t, i) { super._deserialize(e, t, i), this.octaves = e.octaves; } } F([ ir("Octaves", $i.Int) ], bW.prototype, "octaves", void 0); Be("BABYLON.CloudBlock", bW); class Rse extends Wi { /** * Creates a new VoronoiNoiseBlock * @param name defines the block name */ constructor(e) { super(e, Le.Neutral), this.registerInput("seed", ue.Vector2), this.registerInput("offset", ue.Float), this.registerInput("density", ue.Float), this.registerOutput("output", ue.Float), this.registerOutput("cells", ue.Float); } /** * Gets the current class name * @returns the class name */ getClassName() { return "VoronoiNoiseBlock"; } /** * Gets the seed input component */ get seed() { return this._inputs[0]; } /** * Gets the offset input component */ get offset() { return this._inputs[1]; } /** * Gets the density input component */ get density() { return this._inputs[2]; } /** * Gets the output component */ get output() { return this._outputs[0]; } /** * Gets the output component */ get cells() { return this._outputs[1]; } _buildBlock(e) { if (super._buildBlock(e), !this.seed.isConnected) return; let t = `vec2 voronoiRandom(vec2 seed, float offset){ mat2 m = mat2(15.27, 47.63, 99.41, 89.98); vec2 uv = fract(sin(m * seed) * 46839.32); return vec2(sin(uv.y * offset) * 0.5 + 0.5, cos(uv.x * offset) * 0.5 + 0.5); } `; e._emitFunction("voronoiRandom", t, "// Voronoi random generator"), t = `void voronoi(vec2 seed, float offset, float density, out float outValue, out float cells){ vec2 g = floor(seed * density); vec2 f = fract(seed * density); float t = 8.0; vec3 res = vec3(8.0, 0.0, 0.0); for(int y=-1; y<=1; y++) { for(int x=-1; x<=1; x++) { vec2 lattice = vec2(x,y); vec2 randomOffset = voronoiRandom(lattice + g, offset); float d = distance(lattice + randomOffset, f); if(d < res.x) { res = vec3(d, randomOffset.x, randomOffset.y); outValue = res.x; cells = res.y; } } } } `, e._emitFunction("voronoi", t, "// Voronoi"); const i = e._getFreeVariableName("tempOutput"), r = e._getFreeVariableName("tempCells"); return e.compilationString += `float ${i} = 0.0; `, e.compilationString += `float ${r} = 0.0; `, e.compilationString += `voronoi(${this.seed.associatedVariableName}, ${this.offset.associatedVariableName}, ${this.density.associatedVariableName}, ${i}, ${r}); `, this.output.hasEndpoints && (e.compilationString += this._declareOutput(this.output, e) + ` = ${i}; `), this.cells.hasEndpoints && (e.compilationString += this._declareOutput(this.cells, e) + ` = ${r}; `), this; } } Be("BABYLON.VoronoiNoiseBlock", Rse); class Pse extends Wi { /** * Creates a new ElbowBlock * @param name defines the block name */ constructor(e) { super(e, Le.Neutral), this.registerInput("input", ue.AutoDetect), this.registerOutput("output", ue.BasedOnInput), this._outputs[0]._typeConnectionSource = this._inputs[0]; } /** * Gets the current class name * @returns the class name */ getClassName() { return "ElbowBlock"; } /** * Gets the input component */ get input() { return this._inputs[0]; } /** * Gets the output component */ get output() { return this._outputs[0]; } /** * Gets or sets the target of the block */ get target() { const e = this._inputs[0]; if (e.isConnected) { const t = e.connectedPoint.ownerBlock; if (t.target !== Le.VertexAndFragment) return t.target; if (e.connectedPoint.target !== Le.VertexAndFragment) return e.connectedPoint.target; } return this._target; } set target(e) { this._target & e || (this._target = e); } _buildBlock(e) { super._buildBlock(e); const t = this._outputs[0], i = this._inputs[0]; return e.compilationString += this._declareOutput(t, e) + ` = ${i.associatedVariableName}; `, this; } } Be("BABYLON.ElbowBlock", Pse); class FU extends Wi { /** * Gets or sets the texture associated with the node */ get texture() { var e; return this.source.isConnected ? ((e = this.source.connectedPoint) === null || e === void 0 ? void 0 : e.ownerBlock).texture : this._texture; } set texture(e) { var t; if (this._texture === e) return; const i = (t = e == null ? void 0 : e.getScene()) !== null && t !== void 0 ? t : gi.LastCreatedScene; !e && i && i.markAllMaterialsAsDirty(1, (r) => r.hasTexture(this._texture)), this._texture = e, e && i && i.markAllMaterialsAsDirty(1, (r) => r.hasTexture(e)); } /** * Gets the textureY associated with the node */ get textureY() { var e; return this.sourceY.isConnected ? ((e = this.sourceY.connectedPoint) === null || e === void 0 ? void 0 : e.ownerBlock).texture : null; } /** * Gets the textureZ associated with the node */ get textureZ() { var e, t; return !((e = this.sourceZ) === null || e === void 0) && e.isConnected ? ((t = this.sourceY.connectedPoint) === null || t === void 0 ? void 0 : t.ownerBlock).texture : null; } _getImageSourceBlock(e) { return e != null && e.isConnected ? e.connectedPoint.ownerBlock : null; } /** * Gets the sampler name associated with this texture */ get samplerName() { const e = this._getImageSourceBlock(this.source); return e ? e.samplerName : this._samplerName; } /** * Gets the samplerY name associated with this texture */ get samplerYName() { var e, t; return (t = (e = this._getImageSourceBlock(this.sourceY)) === null || e === void 0 ? void 0 : e.samplerName) !== null && t !== void 0 ? t : null; } /** * Gets the samplerZ name associated with this texture */ get samplerZName() { var e, t; return (t = (e = this._getImageSourceBlock(this.sourceZ)) === null || e === void 0 ? void 0 : e.samplerName) !== null && t !== void 0 ? t : null; } /** * Gets a boolean indicating that this block is linked to an ImageSourceBlock */ get hasImageSource() { return this.source.isConnected; } /** * Gets or sets a boolean indicating if content needs to be converted to gamma space */ set convertToGammaSpace(e) { var t; if (e !== this._convertToGammaSpace && (this._convertToGammaSpace = e, this.texture)) { const i = (t = this.texture.getScene()) !== null && t !== void 0 ? t : gi.LastCreatedScene; i == null || i.markAllMaterialsAsDirty(1, (r) => r.hasTexture(this.texture)); } } get convertToGammaSpace() { return this._convertToGammaSpace; } /** * Gets or sets a boolean indicating if content needs to be converted to linear space */ set convertToLinearSpace(e) { var t; if (e !== this._convertToLinearSpace && (this._convertToLinearSpace = e, this.texture)) { const i = (t = this.texture.getScene()) !== null && t !== void 0 ? t : gi.LastCreatedScene; i == null || i.markAllMaterialsAsDirty(1, (r) => r.hasTexture(this.texture)); } } get convertToLinearSpace() { return this._convertToLinearSpace; } /** * Create a new TriPlanarBlock * @param name defines the block name */ constructor(e, t = !1) { super(e, Le.Neutral), this.projectAsCube = !1, this._convertToGammaSpace = !1, this._convertToLinearSpace = !1, this.disableLevelMultiplication = !1, this.registerInput("position", ue.AutoDetect, !1), this.registerInput("normal", ue.AutoDetect, !1), this.registerInput("sharpness", ue.Float, !0), this.registerInput("source", ue.Object, !0, Le.VertexAndFragment, new Yo("source", this, no.Input, U4, "ImageSourceBlock")), this.registerInput("sourceY", ue.Object, !0, Le.VertexAndFragment, new Yo("sourceY", this, no.Input, U4, "ImageSourceBlock")), t || this.registerInput("sourceZ", ue.Object, !0, Le.VertexAndFragment, new Yo("sourceZ", this, no.Input, U4, "ImageSourceBlock")), this.registerOutput("rgba", ue.Color4, Le.Neutral), this.registerOutput("rgb", ue.Color3, Le.Neutral), this.registerOutput("r", ue.Float, Le.Neutral), this.registerOutput("g", ue.Float, Le.Neutral), this.registerOutput("b", ue.Float, Le.Neutral), this.registerOutput("a", ue.Float, Le.Neutral), this.registerOutput("level", ue.Float, Le.Neutral), this._inputs[0].addExcludedConnectionPointFromAllowedTypes(ue.Color3 | ue.Vector3 | ue.Vector4), this._inputs[1].addExcludedConnectionPointFromAllowedTypes(ue.Color3 | ue.Vector3 | ue.Vector4); } /** * Gets the current class name * @returns the class name */ getClassName() { return "TriPlanarBlock"; } /** * Gets the position input component */ get position() { return this._inputs[0]; } /** * Gets the normal input component */ get normal() { return this._inputs[1]; } /** * Gets the sharpness input component */ get sharpness() { return this._inputs[2]; } /** * Gets the source input component */ get source() { return this._inputs[3]; } /** * Gets the sourceY input component */ get sourceY() { return this._inputs[4]; } /** * Gets the sourceZ input component */ get sourceZ() { return this._inputs[5]; } /** * Gets the rgba output component */ get rgba() { return this._outputs[0]; } /** * Gets the rgb output component */ get rgb() { return this._outputs[1]; } /** * Gets the r output component */ get r() { return this._outputs[2]; } /** * Gets the g output component */ get g() { return this._outputs[3]; } /** * Gets the b output component */ get b() { return this._outputs[4]; } /** * Gets the a output component */ get a() { return this._outputs[5]; } /** * Gets the level output component */ get level() { return this._outputs[6]; } prepareDefines(e, t, i) { if (!i._areTexturesDirty) return; const r = this.convertToGammaSpace && this.texture && !this.texture.gammaSpace, s = this.convertToLinearSpace && this.texture && this.texture.gammaSpace; i.setValue(this._linearDefineName, r, !0), i.setValue(this._gammaDefineName, s, !0); } isReady() { return !(this.texture && !this.texture.isReadyOrNotBlocking()); } bind(e) { this.texture && (e.setFloat(this._textureInfoName, this.texture.level), this._imageSource || e.setTexture(this._samplerName, this.texture)); } _generateTextureLookup(e) { var t, i; const r = this.samplerName, s = (t = this.samplerYName) !== null && t !== void 0 ? t : r, n = (i = this.samplerZName) !== null && i !== void 0 ? i : r, a = this.sharpness.isConnected ? this.sharpness.associatedVariableName : "1.0", l = e._getFreeVariableName("x"), o = e._getFreeVariableName("y"), u = e._getFreeVariableName("z"), h = e._getFreeVariableName("w"), d = e._getFreeVariableName("n"), f = e._getFreeVariableName("uvx"), p = e._getFreeVariableName("uvy"), m = e._getFreeVariableName("uvz"); e.compilationString += ` vec3 ${d} = ${this.normal.associatedVariableName}.xyz; vec2 ${f} = ${this.position.associatedVariableName}.yz; vec2 ${p} = ${this.position.associatedVariableName}.zx; vec2 ${m} = ${this.position.associatedVariableName}.xy; `, this.projectAsCube && (e.compilationString += ` ${f}.xy = ${f}.yx; if (${d}.x >= 0.0) { ${f}.x = -${f}.x; } if (${d}.y < 0.0) { ${p}.y = -${p}.y; } if (${d}.z < 0.0) { ${m}.x = -${m}.x; } `), e.compilationString += ` vec4 ${l} = texture2D(${r}, ${f}); vec4 ${o} = texture2D(${s}, ${p}); vec4 ${u} = texture2D(${n}, ${m}); // blend weights vec3 ${h} = pow(abs(${d}), vec3(${a})); // blend and return vec4 ${this._tempTextureRead} = (${l}*${h}.x + ${o}*${h}.y + ${u}*${h}.z) / (${h}.x + ${h}.y + ${h}.z); `; } _generateConversionCode(e, t, i) { i !== "a" && ((!this.texture || !this.texture.gammaSpace) && (e.compilationString += `#ifdef ${this._linearDefineName} ${t.associatedVariableName} = toGammaSpace(${t.associatedVariableName}); #endif `), e.compilationString += `#ifdef ${this._gammaDefineName} ${t.associatedVariableName} = toLinearSpace(${t.associatedVariableName}); #endif `); } _writeOutput(e, t, i) { let r = ""; this.disableLevelMultiplication || (r = ` * ${this._textureInfoName}`), e.compilationString += `${this._declareOutput(t, e)} = ${this._tempTextureRead}.${i}${r}; `, this._generateConversionCode(e, t, i); } _buildBlock(e) { super._buildBlock(e), this.source.isConnected ? this._imageSource = this.source.connectedPoint.ownerBlock : this._imageSource = null, this._textureInfoName = e._getFreeVariableName("textureInfoName"), this.level.associatedVariableName = this._textureInfoName, this._tempTextureRead = e._getFreeVariableName("tempTextureRead"), this._linearDefineName = e._getFreeDefineName("ISLINEAR"), this._gammaDefineName = e._getFreeDefineName("ISGAMMA"), this._imageSource || (this._samplerName = e._getFreeVariableName(this.name + "Sampler"), e._emit2DSampler(this._samplerName)), e.sharedData.blockingBlocks.push(this), e.sharedData.textureBlocks.push(this), e.sharedData.blocksWithDefines.push(this), e.sharedData.bindableBlocks.push(this); const t = `//${this.name}`; e._emitFunctionFromInclude("helperFunctions", t), e._emitUniformFromString(this._textureInfoName, "float"), this._generateTextureLookup(e); for (const i of this._outputs) i.hasEndpoints && i.name !== "level" && this._writeOutput(e, i, i.name); return this; } _dumpPropertiesCode() { let e = super._dumpPropertiesCode(); return e += `${this._codeVariableName}.convertToGammaSpace = ${this.convertToGammaSpace}; `, e += `${this._codeVariableName}.convertToLinearSpace = ${this.convertToLinearSpace}; `, e += `${this._codeVariableName}.disableLevelMultiplication = ${this.disableLevelMultiplication}; `, e += `${this._codeVariableName}.projectAsCube = ${this.projectAsCube}; `, this.texture && (e += `${this._codeVariableName}.texture = new BABYLON.Texture("${this.texture.name}", null, ${this.texture.noMipmap}, ${this.texture.invertY}, ${this.texture.samplingMode}); `, e += `${this._codeVariableName}.texture.wrapU = ${this.texture.wrapU}; `, e += `${this._codeVariableName}.texture.wrapV = ${this.texture.wrapV}; `, e += `${this._codeVariableName}.texture.uAng = ${this.texture.uAng}; `, e += `${this._codeVariableName}.texture.vAng = ${this.texture.vAng}; `, e += `${this._codeVariableName}.texture.wAng = ${this.texture.wAng}; `, e += `${this._codeVariableName}.texture.uOffset = ${this.texture.uOffset}; `, e += `${this._codeVariableName}.texture.vOffset = ${this.texture.vOffset}; `, e += `${this._codeVariableName}.texture.uScale = ${this.texture.uScale}; `, e += `${this._codeVariableName}.texture.vScale = ${this.texture.vScale}; `, e += `${this._codeVariableName}.texture.coordinatesMode = ${this.texture.coordinatesMode}; `), e; } serialize() { const e = super.serialize(); return e.convertToGammaSpace = this.convertToGammaSpace, e.convertToLinearSpace = this.convertToLinearSpace, e.disableLevelMultiplication = this.disableLevelMultiplication, e.projectAsCube = this.projectAsCube, !this.hasImageSource && this.texture && !this.texture.isRenderTarget && this.texture.getClassName() !== "VideoTexture" && (e.texture = this.texture.serialize()), e; } _deserialize(e, t, i) { super._deserialize(e, t, i), this.convertToGammaSpace = e.convertToGammaSpace, this.convertToLinearSpace = !!e.convertToLinearSpace, this.disableLevelMultiplication = !!e.disableLevelMultiplication, this.projectAsCube = !!e.projectAsCube, e.texture && !Ta.IgnoreTexturesAtLoadTime && e.texture.url !== void 0 && (i = e.texture.url.indexOf("data:") === 0 ? "" : i, this.texture = De.Parse(e.texture, t, i)); } } F([ ir("Project as cube", $i.Boolean, "ADVANCED", { notifiers: { update: !0 } }) ], FU.prototype, "projectAsCube", void 0); Be("BABYLON.TriPlanarBlock", FU); class Ise extends FU { /** * Create a new BiPlanarBlock * @param name defines the block name */ constructor(e) { super(e, !0); } /** * Gets the current class name * @returns the class name */ getClassName() { return "BiPlanarBlock"; } _generateTextureLookup(e) { var t; const i = this.samplerName, r = (t = this.samplerYName) !== null && t !== void 0 ? t : this.samplerName, s = this.sharpness.isConnected ? this.sharpness.associatedVariableName : "1.0", n = e._getFreeVariableName("dpdx"), a = e._getFreeVariableName("dpdy"), l = e._getFreeVariableName("n"), o = e._getFreeVariableName("ma"), u = e._getFreeVariableName("mi"), h = e._getFreeVariableName("me"), d = e._getFreeVariableName("x"), f = e._getFreeVariableName("y"), p = e._getFreeVariableName("y"); e.compilationString += ` // grab coord derivatives for texturing vec3 ${n} = dFdx(${this.position.associatedVariableName}.xyz); vec3 ${a} = dFdy(${this.position.associatedVariableName}.xyz); vec3 ${l} = abs(${this.normal.associatedVariableName}.xyz); // determine major axis (in x; yz are following axis) ivec3 ${o} = (${l}.x>${l}.y && ${l}.x>${l}.z) ? ivec3(0,1,2) : (${l}.y>${l}.z) ? ivec3(1,2,0) : ivec3(2,0,1) ; // determine minor axis (in x; yz are following axis) ivec3 ${u} = (${l}.x<${l}.y && ${l}.x<${l}.z) ? ivec3(0,1,2) : (${l}.y<${l}.z) ? ivec3(1,2,0) : ivec3(2,0,1) ; // determine median axis (in x; yz are following axis) ivec3 ${h} = ivec3(3) - ${u} - ${o}; // project+fetch vec4 ${d} = textureGrad( ${i}, vec2( ${this.position.associatedVariableName}[${o}.y], ${this.position.associatedVariableName}[${o}.z]), vec2(${n}[${o}.y],${n}[${o}.z]), vec2(${a}[${o}.y],${a}[${o}.z]) ); vec4 ${f} = textureGrad( ${r}, vec2( ${this.position.associatedVariableName}[${h}.y], ${this.position.associatedVariableName}[${h}.z]), vec2(${n}[${h}.y],${n}[${h}.z]), vec2(${a}[${h}.y],${a}[${h}.z]) ); // blend factors vec2 ${p} = vec2(${l}[${o}.x],${l}[${h}.x]); // make local support ${p} = clamp( (${p}-0.5773)/(1.0-0.5773), 0.0, 1.0 ); // shape transition ${p} = pow( ${p}, vec2(${s}/8.0) ); // blend and return vec4 ${this._tempTextureRead} = (${d}*${p}.x + ${f}*${p}.y) / (${p}.x + ${p}.y); `; } } Be("BABYLON.BiPlanarBlock", Ise); class Dse extends Wi { /** * Creates a new MatrixDeterminantBlock * @param name defines the block name */ constructor(e) { super(e, Le.Neutral), this.registerInput("input", ue.Matrix), this.registerOutput("output", ue.Float); } /** * Gets the current class name * @returns the class name */ getClassName() { return "MatrixDeterminantBlock"; } /** * Gets the input matrix */ get input() { return this._inputs[0]; } /** * Gets the output component */ get output() { return this._outputs[0]; } _buildBlock(e) { super._buildBlock(e); const t = this.output, i = this.input; return e.compilationString += this._declareOutput(t, e) + `${t.associatedVariableName} = determinant(${i.associatedVariableName}); `, this; } } Be("BABYLON.MatrixDeterminantBlock", Dse); class Ose extends Wi { /** * Creates a new MatrixTransposeBlock * @param name defines the block name */ constructor(e) { super(e, Le.Neutral), this.registerInput("input", ue.Matrix), this.registerOutput("output", ue.Matrix); } /** * Gets the current class name * @returns the class name */ getClassName() { return "MatrixTransposeBlock"; } /** * Gets the input matrix */ get input() { return this._inputs[0]; } /** * Gets the output component */ get output() { return this._outputs[0]; } _buildBlock(e) { super._buildBlock(e); const t = this.output, i = this.input; return e.compilationString += this._declareOutput(t, e) + `${t.associatedVariableName} = transpose(${i.associatedVariableName}); `, this; } } Be("BABYLON.MatrixTransposeBlock", Ose); var ba; (function(c) { c[c.None = 0] = "None", c[c.Normal = 1] = "Normal", c[c.Tangent = 2] = "Tangent", c[c.VertexColor = 3] = "VertexColor", c[c.UV1 = 4] = "UV1", c[c.UV2 = 5] = "UV2", c[c.UV3 = 6] = "UV3", c[c.UV4 = 7] = "UV4", c[c.UV5 = 8] = "UV5", c[c.UV6 = 9] = "UV6"; })(ba || (ba = {})); class EW extends Wi { /** * Creates a new MeshAttributeExistsBlock * @param name defines the block name */ constructor(e) { super(e, Le.Neutral), this.attributeType = ba.None, this.registerInput("input", ue.AutoDetect), this.registerInput("fallback", ue.AutoDetect), this.registerOutput("output", ue.BasedOnInput), this._outputs[0]._typeConnectionSource = this._inputs[0], this._linkConnectionTypes(0, 1), this._inputs[0].onConnectionObservable.add((t) => { var i; if (this.attributeType) return; const r = t.ownerBlock; if (r instanceof vs && r.isAttribute) switch (r.name) { case "color": this.attributeType = ba.VertexColor; break; case "normal": this.attributeType = ba.Normal; break; case "tangent": this.attributeType = ba.Tangent; break; case "uv": this.attributeType = ba.UV1; break; case "uv2": this.attributeType = ba.UV2; break; case "uv3": this.attributeType = ba.UV3; break; case "uv4": this.attributeType = ba.UV4; break; case "uv5": this.attributeType = ba.UV5; break; case "uv6": this.attributeType = ba.UV6; break; } else if (r instanceof vW) switch ((i = this.input.connectedPoint) === null || i === void 0 ? void 0 : i.name) { case "normalOutput": this.attributeType = ba.Normal; break; case "tangentOutput": this.attributeType = ba.Tangent; break; case "uvOutput": this.attributeType = ba.UV1; break; } }); } /** * Gets the current class name * @returns the class name */ getClassName() { return "MeshAttributeExistsBlock"; } /** * Gets the input component */ get input() { return this._inputs[0]; } /** * Gets the fallback component when speciefied attribute doesn't exist */ get fallback() { return this._inputs[1]; } /** * Gets the output component */ get output() { return this._outputs[0]; } _buildBlock(e) { super._buildBlock(e); let t = null; switch (this.attributeType) { case ba.VertexColor: t = "VERTEXCOLOR_NME"; break; case ba.Normal: t = "NORMAL"; break; case ba.Tangent: t = "TANGENT"; break; case ba.UV1: t = "UV1"; break; case ba.UV2: t = "UV2"; break; case ba.UV3: t = "UV3"; break; case ba.UV4: t = "UV4"; break; case ba.UV5: t = "UV5"; break; case ba.UV6: t = "UV6"; break; } const i = this._declareOutput(this.output, e); return t && (e.compilationString += `#ifdef ${t} `), e.compilationString += `${i} = ${this.input.associatedVariableName}; `, t && (e.compilationString += `#else `, e.compilationString += `${i} = ${this.fallback.associatedVariableName}; `, e.compilationString += `#endif `), this; } serialize() { const e = super.serialize(); return e.attributeType = this.attributeType, e; } _deserialize(e, t, i) { var r; super._deserialize(e, t, i), this.attributeType = (r = e.attributeType) !== null && r !== void 0 ? r : ba.None; } _dumpPropertiesCode() { let e = super._dumpPropertiesCode(); return e += `${this._codeVariableName}.attributeType = ${this.attributeType}; `, e; } } F([ ir("Attribute lookup", $i.List, void 0, { notifiers: { update: !0 }, options: [ { label: "(None)", value: ba.None }, { label: "Normal", value: ba.Normal }, { label: "Tangent", value: ba.Tangent }, { label: "Vertex Color", value: ba.VertexColor }, { label: "UV1", value: ba.UV1 }, { label: "UV2", value: ba.UV2 }, { label: "UV3", value: ba.UV3 }, { label: "UV4", value: ba.UV4 }, { label: "UV5", value: ba.UV5 }, { label: "UV6", value: ba.UV6 } ] }) ], EW.prototype, "attributeType", void 0); Be("BABYLON.MeshAttributeExistsBlock", EW); var io; (function(c) { c[c.EaseInSine = 0] = "EaseInSine", c[c.EaseOutSine = 1] = "EaseOutSine", c[c.EaseInOutSine = 2] = "EaseInOutSine", c[c.EaseInQuad = 3] = "EaseInQuad", c[c.EaseOutQuad = 4] = "EaseOutQuad", c[c.EaseInOutQuad = 5] = "EaseInOutQuad", c[c.EaseInCubic = 6] = "EaseInCubic", c[c.EaseOutCubic = 7] = "EaseOutCubic", c[c.EaseInOutCubic = 8] = "EaseInOutCubic", c[c.EaseInQuart = 9] = "EaseInQuart", c[c.EaseOutQuart = 10] = "EaseOutQuart", c[c.EaseInOutQuart = 11] = "EaseInOutQuart", c[c.EaseInQuint = 12] = "EaseInQuint", c[c.EaseOutQuint = 13] = "EaseOutQuint", c[c.EaseInOutQuint = 14] = "EaseInOutQuint", c[c.EaseInExpo = 15] = "EaseInExpo", c[c.EaseOutExpo = 16] = "EaseOutExpo", c[c.EaseInOutExpo = 17] = "EaseInOutExpo", c[c.EaseInCirc = 18] = "EaseInCirc", c[c.EaseOutCirc = 19] = "EaseOutCirc", c[c.EaseInOutCirc = 20] = "EaseInOutCirc", c[c.EaseInBack = 21] = "EaseInBack", c[c.EaseOutBack = 22] = "EaseOutBack", c[c.EaseInOutBack = 23] = "EaseInOutBack", c[c.EaseInElastic = 24] = "EaseInElastic", c[c.EaseOutElastic = 25] = "EaseOutElastic", c[c.EaseInOutElastic = 26] = "EaseInOutElastic"; })(io || (io = {})); class wse extends Wi { /** * Creates a new CurveBlock * @param name defines the block name */ constructor(e) { super(e, Le.Neutral), this.type = io.EaseInOutSine, this.registerInput("input", ue.AutoDetect), this.registerOutput("output", ue.BasedOnInput), this._outputs[0]._typeConnectionSource = this._inputs[0], this._inputs[0].excludedConnectionPointTypes.push(ue.Matrix), this._inputs[0].excludedConnectionPointTypes.push(ue.Object), this._inputs[0].excludedConnectionPointTypes.push(ue.Int); } /** * Gets the current class name * @returns the class name */ getClassName() { return "CurveBlock"; } /** * Gets the input component */ get input() { return this._inputs[0]; } /** * Gets the output component */ get output() { return this._outputs[0]; } _duplicateEntry(e, t) { return `ret.${t} = ${e.replace(/VAL/g, "v." + t)}`; } _duplicateEntryDirect(e) { return `return ${e.replace(/VAL/g, "v")}`; } _duplicateVector(e, t) { if (t === "float") return this._duplicateEntryDirect(e); const i = parseInt(t.replace("vec", "")); let r = ` vec${i} ret = vec${i}(0.0); `; for (let s = 1; s <= i; s++) r += this._duplicateEntry(e, s === 1 ? "x" : s === 2 ? "y" : s === 3 ? "z" : "w") + `; `; return r += `return ret; `, r; } _buildBlock(e) { super._buildBlock(e); const t = this._outputs[0]; let i = "", r = "", s = ""; switch (this.input.type) { case ue.Float: s = "float"; break; case ue.Vector2: s = "vec2"; break; case ue.Vector3: case ue.Color3: s = "vec3"; break; case ue.Vector4: case ue.Color4: s = "vec4"; break; } switch (r = io[this.type] + "_" + s, this.type) { case io.EaseInSine: i = "return 1.0 - cos((v * 3.1415) / 2.0)"; break; case io.EaseOutSine: i = "return sin((v * 3.1415) / 2.0)"; break; case io.EaseInOutSine: i = "return -(cos(v * 3.1415) - 1.0) / 2.0"; break; case io.EaseInQuad: i = "return v * v"; break; case io.EaseOutQuad: i = "return (1.0 - v) * (1.0 - v)"; break; case io.EaseInOutQuad: { const n = "VAL < 0.5 ? 2.0 * VAL * VAL : 1.0 - pow(-2.0 * VAL + 2.0, 2.0) / 2.0"; i = this._duplicateVector(n, s); break; } case io.EaseInCubic: i = "return v * v * v"; break; case io.EaseOutCubic: { const n = "1.0 - pow(1.0 - VAL, 3.0)"; i = this._duplicateVector(n, s); break; } case io.EaseInOutCubic: { const n = "VAL < 0.5 ? 4.0 * VAL * VAL * VAL : 1.0 - pow(-2.0 * VAL + 2.0, 3.0) / 2.0"; i = this._duplicateVector(n, s); break; } case io.EaseInQuart: i = "return v * v * v * v"; break; case io.EaseOutQuart: { const n = "1.0 - pow(1.0 - VAL, 4.0)"; i = this._duplicateVector(n, s); break; } case io.EaseInOutQuart: { const n = "VAL < 0.5 ? 8.0 * VAL * VAL * VAL * VAL : 1.0 - pow(-2.0 * VAL + 2.0, 4.0) / 2.0"; i = this._duplicateVector(n, s); break; } case io.EaseInQuint: i = "return v * v * v * v * v"; break; case io.EaseOutQuint: { const n = "1.0 - pow(1.0 - VAL, 5.0)"; i = this._duplicateVector(n, s); break; } case io.EaseInOutQuint: { const n = "VAL < 0.5 ? 16.0 * VAL * VAL * VAL * VAL * VAL : 1.0 - pow(-2.0 * VAL + 2.0, 5.0) / 2.0"; i = this._duplicateVector(n, s); break; } case io.EaseInExpo: { const n = "VAL == 0.0 ? 0.0 : pow(2.0, 10.0 * VAL - 10.0)"; i = this._duplicateVector(n, s); break; } case io.EaseOutExpo: { const n = "VAL == 1.0 ? 1.0 : 1.0 - pow(2.0, -10.0 * VAL)"; i = this._duplicateVector(n, s); break; } case io.EaseInOutExpo: { const n = "VAL == 0.0 ? 0.0 : VAL == 1.0 ? 1.0 : VAL < 0.5 ? pow(2.0, 20.0 * VAL - 10.0) / 2.0 : (2.0 - pow(2.0, -20.0 * VAL + 10.0)) / 2.0"; i = this._duplicateVector(n, s); break; } case io.EaseInCirc: { const n = "1.0 - sqrt(1.0 - pow(VAL, 2.0))"; i = this._duplicateVector(n, s); break; } case io.EaseOutCirc: { const n = "sqrt(1.0 - pow(VAL - 1.0, 2.0))"; i = this._duplicateVector(n, s); break; } case io.EaseInOutCirc: { const n = "VAL < 0.5 ? (1.0 - sqrt(1.0 - pow(2.0 * VAL, 2.0))) / 2.0 : (sqrt(1.0 - pow(-2.0 * VAL + 2.0, 2.0)) + 1.0) / 2.0"; i = this._duplicateVector(n, s); break; } case io.EaseInBack: { i = "return 2.70158 * v * v * v - 1.70158 * v * v"; break; } case io.EaseOutBack: { const n = "2.70158 * pow(VAL - 1.0, 3.0) + 1.70158 * pow(VAL - 1.0, 2.0)"; i = this._duplicateVector(n, s); break; } case io.EaseInOutBack: { const n = "VAL < 0.5 ? (pow(2.0 * VAL, 2.0) * ((3.5949095) * 2.0 * VAL - 2.5949095)) / 2.0 : (pow(2.0 * VAL - 2.0, 2.0) * (3.5949095 * (VAL * 2.0 - 2.0) + 3.5949095) + 2.0) / 2.0"; i = this._duplicateVector(n, s); break; } case io.EaseInElastic: { const n = "VAL == 0.0 ? 0.0 : VAL == 1.0 ? 1.0 : -pow(2.0, 10.0 * VAL - 10.0) * sin((VAL * 10.0 - 10.75) * ((2.0 * 3.1415) / 3.0))"; i = this._duplicateVector(n, s); break; } case io.EaseOutElastic: { const n = "VAL == 0.0 ? 0.0 : VAL == 1.0 ? 1.0 : pow(2.0, -10.0 * VAL) * sin((VAL * 10.0 - 0.75) * ((2.0 * 3.1415) / 3.0)) + 1.0"; i = this._duplicateVector(n, s); break; } case io.EaseInOutElastic: { const n = "VAL == 0.0 ? 0.0 : VAL == 1.0 ? 1.0 : VAL < 0.5 ? -(pow(2.0, 20.0 * VAL - 10.0) * sin((20.0 * VAL - 11.125) * ((2.0 * 3.1415) / 4.5))) / 2.0 : (pow(2.0, -20.0 * VAL + 10.0) * sin((20.0 * VAL - 11.125) * ((2.0 * 3.1415) / 4.5))) / 2.0 + 1.0"; i = this._duplicateVector(n, s); break; } } return e._emitFunction(r, `${s} ${r}(${s} v) {${i};} `, ""), e.compilationString += this._declareOutput(t, e) + ` = ${r}(${this.input.associatedVariableName}); `, this; } serialize() { const e = super.serialize(); return e.curveType = this.type, e; } _deserialize(e, t, i) { super._deserialize(e, t, i), this.type = e.curveType; } _dumpPropertiesCode() { return super._dumpPropertiesCode() + `${this._codeVariableName}.type = BABYLON.CurveBlockTypes.${io[this.type]}; `; } } Be("BABYLON.CurveBlock", wse); class i0e { /** * Function used to optimize a NodeMaterial graph * @param _vertexOutputNodes defines the list of output nodes for the vertex shader * @param _fragmentOutputNodes defines the list of output nodes for the fragment shader */ optimize(e, t) { } } class r0e { constructor() { this.mm = /* @__PURE__ */ new Map(); } get(e, t) { const i = this.mm.get(e); if (i !== void 0) return i.get(t); } set(e, t, i) { let r = this.mm.get(e); r === void 0 && this.mm.set(e, r = /* @__PURE__ */ new Map()), r.set(t, i); } } class s0e { /** Gets the standalone status of the wrapper */ get standalone() { var e, t; return (t = (e = this._options) === null || e === void 0 ? void 0 : e.standalone) !== null && t !== void 0 ? t : !1; } /** Gets the base material the wrapper is built upon */ get baseMaterial() { return this._baseMaterial; } /** Gets the doNotInjectCode status of the wrapper */ get doNotInjectCode() { var e, t; return (t = (e = this._options) === null || e === void 0 ? void 0 : e.doNotInjectCode) !== null && t !== void 0 ? t : !1; } /** * Instantiate a new shadow depth wrapper. * It works by injecting some specific code in the vertex/fragment shaders of the base material and is used by a shadow generator to * generate the shadow depth map. For more information, please refer to the documentation: * https://doc.babylonjs.com/features/featuresDeepDive/lights/shadows * @param baseMaterial Material to wrap * @param scene Define the scene the material belongs to * @param options Options used to create the wrapper */ constructor(e, t, i) { this._baseMaterial = e, this._scene = t ?? gi.LastCreatedScene, this._options = i, this._subMeshToEffect = /* @__PURE__ */ new Map(), this._subMeshToDepthWrapper = new r0e(), this._meshes = /* @__PURE__ */ new Map(), this._onEffectCreatedObserver = this._baseMaterial.onEffectCreatedObservable.add((r) => { var s, n; const a = (s = r.subMesh) === null || s === void 0 ? void 0 : s.getMesh(); a && !this._meshes.has(a) && this._meshes.set(a, a.onDisposeObservable.add((l) => { const o = this._subMeshToEffect.keys(); for (let u = o.next(); u.done !== !0; u = o.next()) { const h = u.value; (h == null ? void 0 : h.getMesh()) === l && (this._subMeshToEffect.delete(h), this._deleteDepthWrapperEffect(h)); } })), ((n = this._subMeshToEffect.get(r.subMesh)) === null || n === void 0 ? void 0 : n[0]) !== r.effect && (this._subMeshToEffect.set(r.subMesh, [r.effect, this._scene.getEngine().currentRenderPassId]), this._deleteDepthWrapperEffect(r.subMesh)); }); } _deleteDepthWrapperEffect(e) { const t = this._subMeshToDepthWrapper.mm.get(e); t && (t.forEach((i) => { var r; (r = i.mainDrawWrapper.effect) === null || r === void 0 || r.dispose(); }), this._subMeshToDepthWrapper.mm.delete(e)); } /** * Gets the effect to use to generate the depth map * @param subMesh subMesh to get the effect for * @param shadowGenerator shadow generator to get the effect for * @param passIdForDrawWrapper Id of the pass for which the effect from the draw wrapper must be retrieved from * @returns the effect to use to generate the depth map for the subMesh + shadow generator specified */ getEffect(e, t, i) { var r; const s = (r = this._subMeshToDepthWrapper.mm.get(e)) === null || r === void 0 ? void 0 : r.get(t); if (!s) return null; let n = s.drawWrapper[i]; return n || (n = s.drawWrapper[i] = new $o(this._scene.getEngine()), n.setEffect(s.mainDrawWrapper.effect, s.mainDrawWrapper.defines)), n; } /** * Specifies that the submesh is ready to be used for depth rendering * @param subMesh submesh to check * @param defines the list of defines to take into account when checking the effect * @param shadowGenerator combined with subMesh, it defines the effect to check * @param useInstances specifies that instances should be used * @param passIdForDrawWrapper Id of the pass for which the draw wrapper should be created * @returns a boolean indicating that the submesh is ready or not */ isReadyForSubMesh(e, t, i, r, s) { var n, a; return this.standalone && !this._baseMaterial.isReadyForSubMesh(e.getMesh(), e, r) ? !1 : (a = (n = this._makeEffect(e, t, i, s)) === null || n === void 0 ? void 0 : n.isReady()) !== null && a !== void 0 ? a : !1; } /** * Disposes the resources */ dispose() { this._baseMaterial.onEffectCreatedObservable.remove(this._onEffectCreatedObserver), this._onEffectCreatedObserver = null; const e = this._meshes.entries(); for (let t = e.next(); t.done !== !0; t = e.next()) { const [i, r] = t.value; i.onDisposeObservable.remove(r); } } _makeEffect(e, t, i, r) { var s, n, a; const l = this._scene.getEngine(), o = this._subMeshToEffect.get(e); if (!o) return null; const [u, h] = o; let d = this._subMeshToDepthWrapper.get(e, i); if (!d) { const v = new $o(l); v.defines = (n = (s = e._getDrawWrapper(h)) === null || s === void 0 ? void 0 : s.defines) !== null && n !== void 0 ? n : null, d = { drawWrapper: [], mainDrawWrapper: v, depthDefines: "", token: G_() }, d.drawWrapper[r] = v, this._subMeshToDepthWrapper.set(e, i, d); } const f = t.join(` `); if (d.mainDrawWrapper.effect && f === d.depthDefines) return d.mainDrawWrapper.effect; d.depthDefines = f; const p = u.getUniformNames().slice(); let m = u.vertexSourceCodeBeforeMigration, _ = u.fragmentSourceCodeBeforeMigration; if (!this.doNotInjectCode) { const v = this._options && this._options.remappedVariables ? `#include(${this._options.remappedVariables.join(",")})` : Cr.IncludesShadersStore.shadowMapVertexNormalBias, C = this._options && this._options.remappedVariables ? `#include(${this._options.remappedVariables.join(",")})` : Cr.IncludesShadersStore.shadowMapVertexMetric, x = this._options && this._options.remappedVariables ? `#include(${this._options.remappedVariables.join(",")})` : Cr.IncludesShadersStore.shadowMapFragmentSoftTransparentShadow, b = Cr.IncludesShadersStore.shadowMapFragment; m = m.replace(/void\s+?main/g, Cr.IncludesShadersStore.shadowMapVertexExtraDeclaration + ` void main`), m = m.replace(/#define SHADOWDEPTH_NORMALBIAS|#define CUSTOM_VERTEX_UPDATE_WORLDPOS/g, v), m.indexOf("#define SHADOWDEPTH_METRIC") !== -1 ? m = m.replace(/#define SHADOWDEPTH_METRIC/g, C) : m = m.replace(/}\s*$/g, C + ` }`), m = m.replace(/#define SHADER_NAME.*?\n|out vec4 glFragColor;\n/g, ""); const S = _.indexOf("#define SHADOWDEPTH_SOFTTRANSPARENTSHADOW") >= 0 || _.indexOf("#define CUSTOM_FRAGMENT_BEFORE_FOG") >= 0, M = _.indexOf("#define SHADOWDEPTH_FRAGMENT") !== -1; let R = ""; S ? _ = _.replace(/#define SHADOWDEPTH_SOFTTRANSPARENTSHADOW|#define CUSTOM_FRAGMENT_BEFORE_FOG/g, x) : R = x + ` `, _ = _.replace(/void\s+?main/g, Cr.IncludesShadersStore.shadowMapFragmentExtraDeclaration + ` void main`), M ? _ = _.replace(/#define SHADOWDEPTH_FRAGMENT/g, b) : R += b + ` `, R && (_ = _.replace(/}\s*$/g, R + "}")), p.push("biasAndScaleSM", "depthValuesSM", "lightDataSM", "softTransparentShadowSM"); } d.mainDrawWrapper.effect = l.createEffect({ vertexSource: m, fragmentSource: _, vertexToken: d.token, fragmentToken: d.token }, { attributes: u.getAttributesNames(), uniformsNames: p, uniformBuffersNames: u.getUniformBuffersNames(), samplers: u.getSamplers(), defines: f + ` ` + u.defines.replace("#define SHADOWS", "").replace(/#define SHADOW\d/g, ""), indexParameters: u.getIndexParameters() }, l); for (let v = 0; v < d.drawWrapper.length; ++v) v !== r && ((a = d.drawWrapper[v]) === null || a === void 0 || a.setEffect(d.mainDrawWrapper.effect, d.mainDrawWrapper.defines)); return d.mainDrawWrapper.effect; } } class Lse extends sa { constructor() { super(...arguments), this.DECAL = !1, this.DECALDIRECTUV = 0, this.DECAL_SMOOTHALPHA = !1, this.GAMMADECAL = !1; } } class Ew extends Q_ { /** @internal */ _markAllSubMeshesAsTexturesDirty() { this._enable(this._isEnabled), this._internalMarkAllSubMeshesAsTexturesDirty(); } /** * Creates a new DecalMapConfiguration * @param material The material to attach the decal map plugin to * @param addToPluginList If the plugin should be added to the material plugin list */ constructor(e, t = !0) { super(e, "DecalMap", 150, new Lse(), t), this._isEnabled = !1, this.isEnabled = !1, this._smoothAlpha = !1, this.smoothAlpha = !1, this.registerForExtraEvents = !0, this._internalMarkAllSubMeshesAsTexturesDirty = e._dirtyCallbacks[1]; } isReadyForSubMesh(e, t, i, r) { const s = r.getMesh().decalMap; return !this._isEnabled || !(s != null && s.texture) || !Tt.DecalMapEnabled || !t.texturesEnabled ? !0 : s.isReady(); } prepareDefines(e, t, i) { const r = i.decalMap; !this._isEnabled || !(r != null && r.texture) || !Tt.DecalMapEnabled || !t.texturesEnabled ? (e.DECAL && e.markAsTexturesDirty(), e.DECAL = !1) : ((!e.DECAL || e.GAMMADECAL !== r.texture.gammaSpace) && e.markAsTexturesDirty(), e.DECAL = !0, e.GAMMADECAL = r.texture.gammaSpace, e.DECAL_SMOOTHALPHA = this._smoothAlpha, Ke.PrepareDefinesForMergedUV(r.texture, e, "DECAL")); } /** * Note that we override hardBindForSubMesh and not bindForSubMesh because the material can be shared by multiple meshes, * in which case mustRebind could return false even though the decal map is different for each mesh: that's because the decal map * is not part of the material but hosted by the decalMap of the mesh instead. */ hardBindForSubMesh(e, t, i, r) { const s = r.getMesh().decalMap; if (!this._isEnabled || !(s != null && s.texture) || !Tt.DecalMapEnabled || !t.texturesEnabled) return; const n = this._material.isFrozen, a = s.texture; (!e.useUbo || !n || !e.isSync) && (e.updateFloat4("vDecalInfos", a.coordinatesIndex, 0, 0, 0), Ke.BindTextureMatrix(a, e, "decal")), e.setTexture("decalSampler", a); } getClassName() { return "DecalMapConfiguration"; } getSamplers(e) { e.push("decalSampler"); } getUniforms() { return { ubo: [ { name: "vDecalInfos", size: 4, type: "vec4" }, { name: "decalMatrix", size: 16, type: "mat4" } ] }; } } F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], Ew.prototype, "isEnabled", void 0); F([ W(), ct("_markAllSubMeshesAsTexturesDirty") ], Ew.prototype, "smoothAlpha", void 0); Be("BABYLON.DecalMapConfiguration", Ew); function n0e(c) { return c instanceof on ? new E5(c) : null; } function a0e(c) { return c instanceof on ? new nf(c) : null; } function o0e(c) { return c instanceof on ? new _u(c) : null; } function l0e(c) { return c instanceof on ? new lf(c) : null; } function c0e(c) { return c instanceof on ? new K4(c) : null; } function u0e(c) { return c instanceof on ? new Zo(c) : null; } function h0e(c) { return c instanceof on || c instanceof Dt ? new dx(c) : null; } var XO; (function(c) { c[c.MATERIAL_TYPE_STANDARD = 0] = "MATERIAL_TYPE_STANDARD", c[c.MATERIAL_TYPE_PBR = 1] = "MATERIAL_TYPE_PBR", c[c.MATERIAL_TYPE_SIMPLE = 2] = "MATERIAL_TYPE_SIMPLE"; })(XO || (XO = {})); var vm; (function(c) { c[c.COLOR_MODE_SET = 0] = "COLOR_MODE_SET", c[c.COLOR_MODE_ADD = 1] = "COLOR_MODE_ADD", c[c.COLOR_MODE_MULTIPLY = 2] = "COLOR_MODE_MULTIPLY"; })(vm || (vm = {})); var jC; (function(c) { c[c.COLOR_DISTRIBUTION_TYPE_SEGMENT = 0] = "COLOR_DISTRIBUTION_TYPE_SEGMENT", c[c.COLOR_DISTRIBUTION_TYPE_LINE = 1] = "COLOR_DISTRIBUTION_TYPE_LINE"; })(jC || (jC = {})); class bh { } bh.DEFAULT_COLOR = ze.White(); bh.DEFAULT_WIDTH_ATTENUATED = 1; bh.DEFAULT_WIDTH = 0.1; class Hn { /** * Converts GreasedLinePoints to number[][] * @param points GreasedLinePoints * @returns number[][] with x, y, z coordinates of the points, like [[x, y, z, x, y, z, ...], [x, y, z, ...]] */ static ConvertPoints(e) { if (e.length && Array.isArray(e) && typeof e[0] == "number") return [e]; if (e.length && Array.isArray(e[0]) && typeof e[0][0] == "number") return e; if (e.length && !Array.isArray(e[0]) && e[0] instanceof D) { const t = []; for (let i = 0; i < e.length; i++) { const r = e[i]; t.push(r.x, r.y, r.z); } return [t]; } else if (e.length > 0 && Array.isArray(e[0]) && e[0].length > 0 && e[0][0] instanceof D) { const t = []; return e.forEach((r) => { t.push(r.flatMap((s) => [s.x, s.y, s.z])); }), t; } else { if (e instanceof Float32Array) return [Array.from(e)]; if (e.length && e[0] instanceof Float32Array) { const t = []; return e.forEach((i) => { t.push(Array.from(i)); }), t; } } return []; } /** * Omit zero length lines predicate for the MeshesToLines function * @param p1 point1 position of the face * @param p2 point2 position of the face * @param p3 point3 position of the face * @returns original points or null if any edge length is zero */ static OmitZeroLengthPredicate(e, t, i) { const r = []; return t.subtract(e).lengthSquared() > 0 && r.push([e, t]), i.subtract(t).lengthSquared() > 0 && r.push([t, i]), e.subtract(i).lengthSquared() > 0 && r.push([i, e]), r.length === 0 ? null : r; } /** * Omit duplicate lines predicate for the MeshesToLines function * @param p1 point1 position of the face * @param p2 point2 position of the face * @param p3 point3 position of the face * @returns original points or null if any edge length is zero */ static OmitDuplicatesPredicate(e, t, i, r) { const s = []; return Hn._SearchInPoints(e, t, r) || s.push([e, t]), Hn._SearchInPoints(t, i, r) || s.push([t, i]), Hn._SearchInPoints(i, e, r) || s.push([i, e]), s.length === 0 ? null : s; } static _SearchInPoints(e, t, i) { var r, s, n; for (const a of i) for (let l = 0; l < a.length; l++) if (!((r = a[l]) === null || r === void 0) && r.equals(e) && (!((s = a[l + 1]) === null || s === void 0) && s.equals(t) || !((n = a[l - 1]) === null || n === void 0) && n.equals(t))) return !0; return !1; } /** * Gets mesh triangles as line positions * @param meshes array of meshes * @param predicate predicate function which decides whether to include the mesh triangle/face in the ouput * @returns array of arrays of points */ static MeshesToLines(e, t) { const i = []; return e.forEach((r, s) => { const n = r.getVerticesData(Y.PositionKind), a = r.getIndices(); if (n && a) for (let l = 0, o = 0; l < a.length; l++) { const u = a[o++] * 3, h = a[o++] * 3, d = a[o++] * 3, f = new D(n[u], n[u + 1], n[u + 2]), p = new D(n[h], n[h + 1], n[h + 2]), m = new D(n[d], n[d + 1], n[d + 2]); if (t) { const _ = t(f, p, m, i, l, u, r, s, n, a); if (_) for (const v of _) i.push(v); } else i.push([f, p], [p, m], [m, f]); } }), i; } /** * Converts number coordinates to Vector3s * @param points number array of x, y, z, x, y z, ... coordinates * @returns Vector3 array */ static ToVector3Array(e) { if (Array.isArray(e[0])) { const r = [], s = e; for (const n of s) { const a = []; for (let l = 0; l < n.length; l += 3) a.push(new D(n[l], n[l + 1], n[l + 2])); r.push(a); } return r; } const t = e, i = []; for (let r = 0; r < t.length; r += 3) i.push(new D(t[r], t[r + 1], t[r + 2])); return i; } /** * Gets a number array from a Vector3 array. * You can you for example to convert your Vector3[] offsets to the required number[] for the offsets option. * @param points Vector3 array * @returns an array of x, y, z coordinates as numbers [x, y, z, x, y, z, x, y, z, ....] */ static ToNumberArray(e) { return e.flatMap((t) => [t.x, t.y, t.z]); } /** * Calculates the sum of points of every line and the number of points in each line. * This function is useful when you are drawing multiple lines in one mesh and you want * to know the counts. For example for creating an offsets table. * @param points point array * @returns points count info */ static GetPointsCountInfo(e) { const t = new Array(e.length); let i = 0; for (let r = e.length; r--; ) t[r] = e[r].length / 3, i += t[r]; return { total: i, counts: t }; } /** * Gets the length of the line counting all it's segments length * @param data array of line points * @returns length of the line */ static GetLineLength(e) { if (e.length === 0) return 0; let t; typeof e[0] == "number" ? t = Hn.ToVector3Array(e) : t = e; const i = de.Vector3[0]; let r = 0; for (let s = 0; s < t.length - 1; s++) { const n = t[s], a = t[s + 1]; r += a.subtractToRef(n, i).length(); } return r; } /** * Divides a segment into smaller segments. * A segment is a part of the line between it's two points. * @param point1 first point of the line * @param point2 second point of the line * @param segmentCount number of segments we want to have in the divided line * @returns */ static SegmentizeSegmentByCount(e, t, i) { const r = [], s = t.subtract(e), n = de.Vector3[0]; n.setAll(i); const a = de.Vector3[1]; s.divideToRef(n, a); let l = e.clone(); r.push(l); for (let o = 0; o < i; o++) l = l.clone(), r.push(l.addInPlace(a)); return r; } /** * Divides a line into segments. * A segment is a part of the line between it's two points. * @param what line points * @param segmentLength length of each segment of the resulting line (distance between two line points) * @returns line point */ static SegmentizeLineBySegmentLength(e, t) { const i = e[0] instanceof D ? Hn.GetLineSegments(e) : typeof e[0] == "number" ? Hn.GetLineSegments(Hn.ToVector3Array(e)) : e, r = []; return i.forEach((s) => { s.length > t ? Hn.SegmentizeSegmentByCount(s.point1, s.point2, Math.ceil(s.length / t)).forEach((a) => { r.push(a); }) : (r.push(s.point1), r.push(s.point2)); }), r; } /** * Divides a line into segments. * A segment is a part of the line between it's two points. * @param what line points * @param segmentCount number of segments * @returns line point */ static SegmentizeLineBySegmentCount(e, t) { const i = typeof e[0] == "number" ? Hn.ToVector3Array(e) : e, r = Hn.GetLineLength(i) / t; return Hn.SegmentizeLineBySegmentLength(i, r); } /** * Gets line segments. * A segment is a part of the line between it's two points. * @param points line points * @returns segments information of the line segment including starting point, ending point and the distance between them */ static GetLineSegments(e) { const t = []; for (let i = 0; i < e.length - 1; i++) { const r = e[i], s = e[i + 1], n = s.subtract(r).length(); t.push({ point1: r, point2: s, length: n }); } return t; } /** * Gets the minimum and the maximum length of a line segment in the line. * A segment is a part of the line between it's two points. * @param points line points * @returns */ static GetMinMaxSegmentLength(e) { const i = Hn.GetLineSegments(e).sort((r) => r.length); return { min: i[0].length, max: i[i.length - 1].length }; } /** * Finds the last visible position in world space of the line according to the visibility parameter * @param lineSegments segments of the line * @param lineLength total length of the line * @param visbility normalized value of visibility * @returns world space coordinate of the last visible piece of the line */ static GetPositionOnLineByVisibility(e, t, i, r = !1) { const s = t * i; let n = 0, a = 0; const l = e.length; for (let u = 0; u < l; u++) { if (s <= n + e[u].length) { a = u; break; } n += e[u].length; } const o = (s - n) / e[a].length; return e[a].point2.subtractToRef(e[a].point1, de.Vector3[0]), de.Vector3[1] = de.Vector3[0].multiplyByFloats(o, o, o), r || de.Vector3[1].addInPlace(e[a].point1), de.Vector3[1].clone(); } /** * Creates lines in a shape of circle/arc. * A segment is a part of the line between it's two points. * @param radiusX radiusX of the circle * @param segments number of segments in the circle * @param z z coordinate of the points. Defaults to 0. * @param radiusY radiusY of the circle - you can draw an oval if using different values * @param segmentAngle angle offset of the segments. Defaults to Math.PI * 2 / segments. Change this value to draw a part of the circle. * @returns line points */ static GetCircleLinePoints(e, t, i = 0, r = e, s = Math.PI * 2 / t) { const n = []; for (let a = 0; a <= t; a++) n.push(new D(Math.cos(a * s) * e, Math.sin(a * s) * r, i)); return n; } /** * Gets line points in a shape of a bezier curve * @param p0 bezier point0 * @param p1 bezier point1 * @param p2 bezier point2 * @param segments number of segments in the curve * @returns */ static GetBezierLinePoints(e, t, i, r) { return T4.CreateQuadraticBezier(e, t, i, r).getPoints().flatMap((s) => [s.x, s.y, s.z]); } /** * * @param position position of the arrow cap (mainly you want to create a triangle, set widthUp and widthDown to the same value and omit widthStartUp and widthStartDown) * @param direction direction which the arrow points to * @param length length (size) of the arrow cap itself * @param widthUp the arrow width above the line * @param widthDown the arrow width belove the line * @param widthStartUp the arrow width at the start of the arrow above the line. In most scenarios this is 0. * @param widthStartDown the arrow width at the start of the arrow below the line. In most scenarios this is 0. * @returns */ static GetArrowCap(e, t, i, r, s, n = 0, a = 0) { return { points: [e.clone(), e.add(t.multiplyByFloats(i, i, i))], widths: [r, s, n, a] }; } /** * Gets 3D positions of points from a text and font * @param text Text * @param size Size of the font * @param resolution Resolution of the font * @param fontData defines the font data (can be generated with http://gero3.github.io/facetype.js/) * @param z z coordinate * @param includeInner include the inner parts of the font in the result. Default true. If false, only the outlines will be returned. * @returns number[][] of 3D positions */ static GetPointsFromText(e, t, i, r, s = 0, n = !0) { const a = [], l = JK(e, t, i, r); for (const o of l) { for (const u of o.paths) { const h = [], d = u.getPoints(); for (const f of d) h.push(f.x, f.y, s); a.push(h); } if (n) for (const u of o.holes) { const h = [], d = u.getPoints(); for (const f of d) h.push(f.x, f.y, s); a.push(h); } } return a; } /** * Converts an array of Color3 to Uint8Array * @param colors Arrray of Color3 * @returns Uin8Array of colors [r, g, b, a, r, g, b, a, ...] */ static Color3toRGBAUint8(e) { const t = new Uint8Array(e.length * 4); for (let i = 0, r = 0; i < e.length; i++) t[r++] = e[i].r * 255, t[r++] = e[i].g * 255, t[r++] = e[i].b * 255, t[r++] = 255; return t; } /** * Creates a RawTexture from an RGBA color array and sets it on the plugin material instance. * @param name name of the texture * @param colors Uint8Array of colors */ static CreateColorsTexture(e, t, i, r) { const s = Hn.Color3toRGBAUint8(t), n = new Po(s, t.length, 1, $e.TEXTUREFORMAT_RGBA, r, !1, !0, i); return n.name = e, n; } /** * A minimum size texture for the colors sampler2D when there is no colors texture defined yet. * For fast switching using the useColors property without the need to use defines. * @param scene Scene * @returns empty colors texture */ static PrepareEmptyColorsTexture(e) { if (!bh.EmptyColorsTexture) { const t = new Uint8Array(4); bh.EmptyColorsTexture = new Po(t, 1, 1, $e.TEXTUREFORMAT_RGBA, e, !1, !1, Po.NEAREST_NEAREST), bh.EmptyColorsTexture.name = "grlEmptyColorsTexture"; } return bh.EmptyColorsTexture; } /** * Diposes the shared empty colors texture */ static DisposeEmptyColorsTexture() { var e; (e = bh.EmptyColorsTexture) === null || e === void 0 || e.dispose(), bh.EmptyColorsTexture = null; } /** * Converts boolean to number. * @param bool * @returns 1 if true, 0 if false. */ static BooleanToNumber(e) { return e ? 1 : 0; } } class Nse extends sa { constructor() { super(...arguments), this.GREASED_LINE_HAS_COLOR = !1, this.GREASED_LINE_SIZE_ATTENUATION = !1, this.GREASED_LINE_COLOR_DISTRIBUTION_TYPE_LINE = !1, this.GREASED_LINE_RIGHT_HANDED_COORDINATE_SYSTEM = !1, this.GREASED_LINE_CAMERA_FACING = !0; } } class cx extends Q_ { /** * Creates a new instance of the GreasedLinePluginMaterial * @param material base material for the plugin * @param scene the scene * @param options plugin options */ constructor(e, t, i) { var r, s, n, a, l, o, u, h, d, f, p, m, _, v, C, x, b; i = i || { color: bh.DEFAULT_COLOR }; const S = new Nse(); S.GREASED_LINE_HAS_COLOR = !!i.color && !i.useColors, S.GREASED_LINE_SIZE_ATTENUATION = (r = i.sizeAttenuation) !== null && r !== void 0 ? r : !1, S.GREASED_LINE_COLOR_DISTRIBUTION_TYPE_LINE = i.colorDistributionType === jC.COLOR_DISTRIBUTION_TYPE_LINE, S.GREASED_LINE_RIGHT_HANDED_COORDINATE_SYSTEM = (t ?? e.getScene()).useRightHandedSystem, S.GREASED_LINE_CAMERA_FACING = (s = i.cameraFacing) !== null && s !== void 0 ? s : !0, super(e, cx.GREASED_LINE_MATERIAL_NAME, 200, S), this.colorsTexture = null, this._scene = t ?? e.getScene(), this._engine = this._scene.getEngine(), this._cameraFacing = (n = i.cameraFacing) !== null && n !== void 0 ? n : !0, this.visibility = (a = i.visibility) !== null && a !== void 0 ? a : 1, this.useDash = (l = i.useDash) !== null && l !== void 0 ? l : !1, this.dashRatio = (o = i.dashRatio) !== null && o !== void 0 ? o : 0.5, this.dashOffset = (u = i.dashOffset) !== null && u !== void 0 ? u : 0, this.width = i.width ? i.width : i.sizeAttenuation ? bh.DEFAULT_WIDTH_ATTENUATED : bh.DEFAULT_WIDTH, this._sizeAttenuation = (h = i.sizeAttenuation) !== null && h !== void 0 ? h : !1, this.colorMode = (d = i.colorMode) !== null && d !== void 0 ? d : vm.COLOR_MODE_SET, this._color = (f = i.color) !== null && f !== void 0 ? f : null, this.useColors = (p = i.useColors) !== null && p !== void 0 ? p : !1, this._colorsDistributionType = (m = i.colorDistributionType) !== null && m !== void 0 ? m : jC.COLOR_DISTRIBUTION_TYPE_SEGMENT, this.colorsSampling = (_ = i.colorsSampling) !== null && _ !== void 0 ? _ : Po.NEAREST_NEAREST, this._colors = (v = i.colors) !== null && v !== void 0 ? v : null, this.dashCount = (C = i.dashCount) !== null && C !== void 0 ? C : 1, this.resolution = (x = i.resolution) !== null && x !== void 0 ? x : new at(this._engine.getRenderWidth(), this._engine.getRenderHeight()), i.colorsTexture ? this.colorsTexture = i.colorsTexture : this._colors ? this.colorsTexture = Hn.CreateColorsTexture(`${e.name}-colors-texture`, this._colors, this.colorsSampling, this._scene) : (this._color = (b = this._color) !== null && b !== void 0 ? b : bh.DEFAULT_COLOR, Hn.PrepareEmptyColorsTexture(this._scene)), this._engine.onDisposeObservable.add(() => { Hn.DisposeEmptyColorsTexture(); }), this._enable(!0); } /** * Get the shader attributes * @param attributes array which will be filled with the attributes */ getAttributes(e) { e.push("grl_offsets"), e.push("grl_widths"), e.push("grl_colorPointers"), e.push("grl_counters"), this._cameraFacing ? (e.push("grl_previousAndSide"), e.push("grl_nextAndCounters")) : e.push("grl_slopes"); } /** * Get the shader samplers * @param samplers */ getSamplers(e) { e.push("grl_colors"); } /** * Get the shader textures * @param activeTextures */ getActiveTextures(e) { this.colorsTexture && e.push(this.colorsTexture); } /** * Get the shader uniforms * @returns uniforms */ getUniforms() { const e = [ { name: "grl_singleColor", size: 3, type: "vec3" }, { name: "grl_dashOptions", size: 4, type: "vec4" }, { name: "grl_colorMode_visibility_colorsWidth_useColors", size: 4, type: "vec4" } ]; return this._cameraFacing && e.push({ name: "grl_projection", size: 16, type: "mat4" }, { name: "grl_aspect_resolution_lineWidth", size: 4, type: "vec4" }), { ubo: e, vertex: this._cameraFacing ? ` uniform vec4 grl_aspect_resolution_lineWidth; uniform mat4 grl_projection; ` : "", fragment: ` uniform vec4 grl_dashOptions; uniform vec4 grl_colorMode_visibility_colorsWidth_useColors; uniform vec3 grl_singleColor; ` }; } // only getter, it doesn't make sense to use this plugin on a mesh other than GreasedLineMesh // and it doesn't make sense to disable it on the mesh get isEnabled() { return !0; } /** * Bind the uniform buffer * @param uniformBuffer */ bindForSubMesh(e) { var t; if (this._cameraFacing) { const s = this._scene.activeCamera; if (s) { const a = s.getProjectionMatrix(); e.updateMatrix("grl_projection", a); } else throw Error("GreasedLinePluginMaterial requires an active camera."); const n = de.Vector4[0]; n.x = this._aspect, n.y = this._resolution.x, n.z = this._resolution.y, n.w = this.width, e.updateVector4("grl_aspect_resolution_lineWidth", n); } const i = de.Vector4[0]; i.x = Hn.BooleanToNumber(this.useDash), i.y = this._dashArray, i.z = this.dashOffset, i.w = this.dashRatio, e.updateVector4("grl_dashOptions", i); const r = de.Vector4[1]; r.x = this.colorMode, r.y = this.visibility, r.z = this.colorsTexture ? this.colorsTexture.getSize().width : 0, r.w = Hn.BooleanToNumber(this.useColors), e.updateVector4("grl_colorMode_visibility_colorsWidth_useColors", r), this._color && e.updateColor3("grl_singleColor", this._color), e.setTexture("grl_colors", (t = this.colorsTexture) !== null && t !== void 0 ? t : bh.EmptyColorsTexture); } /** * Prepare the defines * @param defines * @param _scene * @param _mesh */ prepareDefines(e, t, i) { e.GREASED_LINE_HAS_COLOR = !!this.color && !this.useColors, e.GREASED_LINE_SIZE_ATTENUATION = this._sizeAttenuation, e.GREASED_LINE_COLOR_DISTRIBUTION_TYPE_LINE = this._colorsDistributionType === jC.COLOR_DISTRIBUTION_TYPE_LINE, e.GREASED_LINE_RIGHT_HANDED_COORDINATE_SYSTEM = t.useRightHandedSystem, e.GREASED_LINE_CAMERA_FACING = this._cameraFacing; } /** * Get the class name * @returns class name */ getClassName() { return cx.GREASED_LINE_MATERIAL_NAME; } /** * Get shader code * @param shaderType vertex/fragment * @returns shader code */ getCustomCode(e) { if (e === "vertex") { const t = { // eslint-disable-next-line @typescript-eslint/naming-convention CUSTOM_VERTEX_DEFINITIONS: ` attribute float grl_widths; attribute vec3 grl_offsets; attribute float grl_colorPointers; varying float grlCounters; varying float grlColorPointer; #ifdef GREASED_LINE_CAMERA_FACING attribute vec4 grl_previousAndSide; attribute vec4 grl_nextAndCounters; vec2 grlFix( vec4 i, float aspect ) { vec2 res = i.xy / i.w; res.x *= aspect; return res; } #else attribute vec3 grl_slopes; attribute float grl_counters; #endif `, // eslint-disable-next-line @typescript-eslint/naming-convention CUSTOM_VERTEX_UPDATE_POSITION: ` #ifdef GREASED_LINE_CAMERA_FACING vec3 grlPositionOffset = grl_offsets; positionUpdated += grlPositionOffset; #else positionUpdated = (positionUpdated + grl_offsets) + (grl_slopes * grl_widths); #endif `, // eslint-disable-next-line @typescript-eslint/naming-convention CUSTOM_VERTEX_MAIN_END: ` grlColorPointer = grl_colorPointers; #ifdef GREASED_LINE_CAMERA_FACING float grlAspect = grl_aspect_resolution_lineWidth.x; float grlBaseWidth = grl_aspect_resolution_lineWidth.w; vec3 grlPrevious = grl_previousAndSide.xyz; float grlSide = grl_previousAndSide.w; vec3 grlNext = grl_nextAndCounters.xyz; grlCounters = grl_nextAndCounters.w; mat4 grlMatrix = viewProjection * finalWorld; vec4 grlFinalPosition = grlMatrix * vec4( positionUpdated , 1.0 ); vec4 grlPrevPos = grlMatrix * vec4( grlPrevious + grlPositionOffset, 1.0 ); vec4 grlNextPos = grlMatrix * vec4( grlNext + grlPositionOffset, 1.0 ); vec2 grlCurrentP = grlFix( grlFinalPosition, grlAspect ); vec2 grlPrevP = grlFix( grlPrevPos, grlAspect ); vec2 grlNextP = grlFix( grlNextPos, grlAspect ); float grlWidth = grlBaseWidth * grl_widths; vec2 grlDir; if( grlNextP == grlCurrentP ) grlDir = normalize( grlCurrentP - grlPrevP ); else if( grlPrevP == grlCurrentP ) grlDir = normalize( grlNextP - grlCurrentP ); else { vec2 grlDir1 = normalize( grlCurrentP - grlPrevP ); vec2 grlDir2 = normalize( grlNextP - grlCurrentP ); grlDir = normalize( grlDir1 + grlDir2 ); } vec4 grlNormal = vec4( -grlDir.y, grlDir.x, 0., 1. ); #ifdef GREASED_LINE_RIGHT_HANDED_COORDINATE_SYSTEM grlNormal.xy *= -.5 * grlWidth; #else grlNormal.xy *= .5 * grlWidth; #endif grlNormal *= grl_projection; #ifdef GREASED_LINE_SIZE_ATTENUATION grlNormal.xy *= grlFinalPosition.w; grlNormal.xy /= ( vec4( grl_aspect_resolution_lineWidth.yz, 0., 1. ) * grl_projection ).xy; #endif grlFinalPosition.xy += grlNormal.xy * grlSide; gl_Position = grlFinalPosition; vPositionW = vec3(grlFinalPosition); #else grlCounters = grl_counters; #endif ` }; return this._cameraFacing && (t["!gl_Position\\=viewProjection\\*worldPos;"] = "//"), t; } return e === "fragment" ? { // eslint-disable-next-line @typescript-eslint/naming-convention CUSTOM_FRAGMENT_DEFINITIONS: ` varying float grlCounters; varying float grlColorPointer; uniform sampler2D grl_colors; `, // eslint-disable-next-line @typescript-eslint/naming-convention CUSTOM_FRAGMENT_MAIN_END: ` float grlColorMode = grl_colorMode_visibility_colorsWidth_useColors.x; float grlVisibility = grl_colorMode_visibility_colorsWidth_useColors.y; float grlColorsWidth = grl_colorMode_visibility_colorsWidth_useColors.z; float grlUseColors = grl_colorMode_visibility_colorsWidth_useColors.w; float grlUseDash = grl_dashOptions.x; float grlDashArray = grl_dashOptions.y; float grlDashOffset = grl_dashOptions.z; float grlDashRatio = grl_dashOptions.w; gl_FragColor.a *= step(grlCounters, grlVisibility); if( gl_FragColor.a == 0. ) discard; if(grlUseDash == 1.){ gl_FragColor.a *= ceil(mod(grlCounters + grlDashOffset, grlDashArray) - (grlDashArray * grlDashRatio)); if (gl_FragColor.a == 0.) discard; } #ifdef GREASED_LINE_HAS_COLOR if (grlColorMode == ${vm.COLOR_MODE_SET}.) { gl_FragColor.rgb = grl_singleColor; } else if (grlColorMode == ${vm.COLOR_MODE_ADD}.) { gl_FragColor.rgb += grl_singleColor; } else if (grlColorMode == ${vm.COLOR_MODE_MULTIPLY}.) { gl_FragColor.rgb *= grl_singleColor; } #else if (grlUseColors == 1.) { #ifdef GREASED_LINE_COLOR_DISTRIBUTION_TYPE_LINE vec4 grlColor = texture2D(grl_colors, vec2(grlCounters, 0.), 0.); #else vec4 grlColor = texture2D(grl_colors, vec2(grlColorPointer/grlColorsWidth, 0.), 0.); #endif if (grlColorMode == ${vm.COLOR_MODE_SET}.) { gl_FragColor = grlColor; } else if (grlColorMode == ${vm.COLOR_MODE_ADD}.) { gl_FragColor += grlColor; } else if (grlColorMode == ${vm.COLOR_MODE_MULTIPLY}.) { gl_FragColor *= grlColor; } } #endif ` } : null; } /** * Disposes the plugin material. */ dispose() { var e; (e = this.colorsTexture) === null || e === void 0 || e.dispose(), super.dispose(); } /** * Returns the colors used to colorize the line */ get colors() { return this._colors; } /** * Sets the colors used to colorize the line */ set colors(e) { this.setColors(e); } /** * Creates or updates the colors texture * @param colors color table RGBA * @param lazy if lazy, the colors are not updated * @param forceNewTexture force creation of a new texture * @returns */ setColors(e, t = !1, i = !1) { var r, s, n, a; const l = (s = (r = this._colors) === null || r === void 0 ? void 0 : r.length) !== null && s !== void 0 ? s : 0; if (this._colors = e, e === null || e.length === 0) { (n = this.colorsTexture) === null || n === void 0 || n.dispose(); return; } if (!(t && !i)) if (this.colorsTexture && l === e.length && !i) { const o = Hn.Color3toRGBAUint8(e); this.colorsTexture.update(o); } else (a = this.colorsTexture) === null || a === void 0 || a.dispose(), this.colorsTexture = Hn.CreateColorsTexture(`${this._material.name}-colors-texture`, e, this.colorsSampling, this._scene); } /** * Updates the material. Use when material created in lazy mode. */ updateLazy() { this._colors && this.setColors(this._colors, !1, !0); } /** * Gets the number of dashes in the line */ get dashCount() { return this._dashCount; } /** * Sets the number of dashes in the line * @param value dash */ set dashCount(e) { this._dashCount = e, this._dashArray = 1 / e; } /** * If set to true the line will be rendered always with the same width regardless how far it is located from the camera. * Not supported for non camera facing lines. */ get sizeAttenuation() { return this._sizeAttenuation; } /** * Turn on/off size attenuation of the width option and widths array. * Not supported for non camera facing lines. * @param value If set to true the line will be rendered always with the same width regardless how far it is located from the camera. */ set sizeAttenuation(e) { this._sizeAttenuation = e, this.markAllDefinesAsDirty(); } /** * Gets the color of the line */ get color() { return this._color; } /** * Sets the color of the line * @param value Color3 or null to clear the color. You need to clear the color if you use colors and useColors = true */ set color(e) { this.setColor(e); } /** * Sets the color of the line. If set the whole line will be mixed with this color according to the colorMode option. * @param value color */ setColor(e, t = !1) { this._color === null && e !== null || this._color !== null && e === null ? (this._color = e, !t && this.markAllDefinesAsDirty()) : this._color = e; } /** * Gets the color distributiopn type */ get colorsDistributionType() { return this._colorsDistributionType; } /** * Sets the color distribution type * @see GreasedLineMeshColorDistributionType * @param value color distribution type */ set colorsDistributionType(e) { this._colorsDistributionType = e, this.markAllDefinesAsDirty(); } /** * Gets the resolution */ get resolution() { return this._resolution; } /** * Sets the resolution * @param value resolution of the screen for GreasedLine */ set resolution(e) { this._aspect = e.x / e.y, this._resolution = e; } /** * Serializes this plugin material * @returns serializationObjec */ serialize() { const e = super.serialize(), t = { colorDistributionType: this._colorsDistributionType, colorsSampling: this.colorsSampling, colorMode: this.colorMode, dashCount: this._dashCount, dashOffset: this.dashOffset, dashRatio: this.dashRatio, resolution: this._resolution, sizeAttenuation: this._sizeAttenuation, useColors: this.useColors, useDash: this.useDash, visibility: this.visibility, width: this.width }; return this._colors && (t.colors = this._colors), this._color && (t.color = this._color), e.greasedLineMaterialOptions = t, e; } /** * Parses a serialized objects * @param source serialized object * @param scene scene * @param rootUrl root url for textures */ parse(e, t, i) { var r; super.parse(e, t, i); const s = e.greasedLineMaterialOptions; (r = this.colorsTexture) === null || r === void 0 || r.dispose(), s.color && this.setColor(s.color, !0), s.colorDistributionType && (this.colorsDistributionType = s.colorDistributionType), s.colors && (this.colors = s.colors), s.colorsSampling && (this.colorsSampling = s.colorsSampling), s.colorMode && (this.colorMode = s.colorMode), s.useColors && (this.useColors = s.useColors), s.visibility && (this.visibility = s.visibility), s.useDash && (this.useDash = s.useDash), s.dashCount && (this.dashCount = s.dashCount), s.dashRatio && (this.dashRatio = s.dashRatio), s.dashOffset && (this.dashOffset = s.dashOffset), s.width && (this.width = s.width), s.sizeAttenuation && (this.sizeAttenuation = s.sizeAttenuation), s.resolution && (this.resolution = s.resolution), this.colors ? this.colorsTexture = Hn.CreateColorsTexture(`${this._material.name}-colors-texture`, this.colors, this.colorsSampling, t) : Hn.PrepareEmptyColorsTexture(t), this.markAllDefinesAsDirty(); } /** * Makes a duplicate of the current configuration into another one. * @param plugin define the config where to copy the info */ copyTo(e) { var t; const i = e; (t = i.colorsTexture) === null || t === void 0 || t.dispose(), this._colors && (i.colorsTexture = Hn.CreateColorsTexture(`${i._material.name}-colors-texture`, this._colors, i.colorsSampling, this._scene)), i.setColor(this.color, !0), i.colorsDistributionType = this.colorsDistributionType, i.colorsSampling = this.colorsSampling, i.colorMode = this.colorMode, i.useColors = this.useColors, i.visibility = this.visibility, i.useDash = this.useDash, i.dashCount = this.dashCount, i.dashRatio = this.dashRatio, i.dashOffset = this.dashOffset, i.width = this.width, i.sizeAttenuation = this.sizeAttenuation, i.resolution = this.resolution, i.markAllDefinesAsDirty(); } } cx.GREASED_LINE_MATERIAL_NAME = "GreasedLinePluginMaterial"; Be(`BABYLON.${cx.GREASED_LINE_MATERIAL_NAME}`, cx); const d0e = "greasedLinePixelShader", f0e = `precision highp float;uniform sampler2D grlColors;uniform float grlUseColors;uniform float grlUseDash;uniform float grlDashArray;uniform float grlDashOffset;uniform float grlDashRatio;uniform float grlVisibility;uniform float grlColorsWidth;uniform vec2 grl_colorModeAndColorDistributionType;uniform vec3 grlColor;varying float grlCounters;varying float grlColorPointer;void main() {float grlColorMode=grl_colorModeAndColorDistributionType.x;float grlColorDistributionType=grl_colorModeAndColorDistributionType.y;gl_FragColor=vec4(grlColor,1.);gl_FragColor.a=step(grlCounters,grlVisibility);if (gl_FragColor.a==0.) discard;if( grlUseDash==1. ){gl_FragColor.a=ceil(mod(grlCounters+grlDashOffset,grlDashArray)-(grlDashArray*grlDashRatio));if (gl_FragColor.a==0.) discard;} if (grlUseColors==1.) {vec4 textureColor;if (grlColorDistributionType==COLOR_DISTRIBUTION_TYPE_LINE) { textureColor=texture2D(grlColors,vec2(grlCounters,0.),0.);} else {textureColor=texture2D(grlColors,vec2(grlColorPointer/grlColorsWidth,0.),0.);} if (grlColorMode==COLOR_MODE_SET) {gl_FragColor=textureColor;} else if (grlColorMode==COLOR_MODE_ADD) {gl_FragColor+=textureColor;} else if (grlColorMode==COLOR_MODE_MULTIPLY) {gl_FragColor*=textureColor;}}} `; je.ShadersStore[d0e] = f0e; const p0e = "greasedLineVertexShader", _0e = `precision highp float; #include attribute float grl_widths;attribute vec3 grl_offsets;attribute float grl_colorPointers;attribute vec3 position;uniform mat4 viewProjection;uniform mat4 projection;varying float grlCounters;varying float grlColorPointer; #ifdef GREASED_LINE_CAMERA_FACING attribute vec4 grl_nextAndCounters;attribute vec4 grl_previousAndSide;uniform vec2 grlResolution;uniform float grlAspect;uniform float grlWidth;uniform float grlSizeAttenuation;vec2 grlFix( vec4 i,float aspect ) {vec2 res=i.xy/i.w;res.x*=aspect;return res;} #else attribute vec3 grl_slopes;attribute float grl_counters; #endif void main() { #include grlColorPointer=grl_colorPointers; #ifdef GREASED_LINE_CAMERA_FACING float grlBaseWidth=grlWidth;vec3 grlPrevious=grl_previousAndSide.xyz;float grlSide=grl_previousAndSide.w;vec3 grlNext=grl_nextAndCounters.xyz;grlCounters=grl_nextAndCounters.w;mat4 grlMatrix=viewProjection*finalWorld ;vec3 grlPositionOffset=grl_offsets;vec4 grlFinalPosition=grlMatrix*vec4( position+grlPositionOffset ,1.0 );vec4 grlPrevPos=grlMatrix*vec4( grlPrevious+grlPositionOffset,1.0 );vec4 grlNextPos=grlMatrix*vec4( grlNext+grlPositionOffset,1.0 );vec2 grlCurrentP=grlFix( grlFinalPosition,grlAspect );vec2 grlPrevP=grlFix( grlPrevPos,grlAspect );vec2 grlNextP=grlFix( grlNextPos,grlAspect );float grlWidth=grlBaseWidth*grl_widths;vec2 grlDir;if( grlNextP==grlCurrentP ) grlDir=normalize( grlCurrentP-grlPrevP );else if( grlPrevP==grlCurrentP ) grlDir=normalize( grlNextP-grlCurrentP );else {vec2 grlDir1=normalize( grlCurrentP-grlPrevP );vec2 grlDir2=normalize( grlNextP-grlCurrentP );grlDir=normalize( grlDir1+grlDir2 );} vec4 grlNormal=vec4( -grlDir.y,grlDir.x,0.,1. ); #ifdef GREASED_LINE_RIGHT_HANDED_COORDINATE_SYSTEM grlNormal.xy*=-.5*grlWidth; #else grlNormal.xy*=.5*grlWidth; #endif grlNormal*=projection;if (grlSizeAttenuation==1.) {grlNormal.xy*=grlFinalPosition.w;grlNormal.xy/=( vec4( grlResolution,0.,1. )*projection ).xy;} grlFinalPosition.xy+=grlNormal.xy*grlSide;gl_Position=grlFinalPosition; #else grlCounters=grl_counters;vec4 grlFinalPosition=worldViewProjection*vec4( (position+grl_offsets)+grl_slopes*grl_widths ,1.0 ) ;gl_Position=grlFinalPosition; #endif } `; je.ShadersStore[p0e] = _0e; class TW extends Lo { /** * GreasedLineSimple material constructor * @param name material name * @param scene the scene * @param options material options */ constructor(e, t, i) { var r, s, n, a, l, o, u, h, d, f, p, m, _, v; const C = [ `COLOR_DISTRIBUTION_TYPE_LINE ${jC.COLOR_DISTRIBUTION_TYPE_LINE}.`, `COLOR_DISTRIBUTION_TYPE_SEGMENT ${jC.COLOR_DISTRIBUTION_TYPE_SEGMENT}.`, `COLOR_MODE_SET ${vm.COLOR_MODE_SET}.`, `COLOR_MODE_ADD ${vm.COLOR_MODE_ADD}.`, `COLOR_MODE_MULTIPLY ${vm.COLOR_MODE_MULTIPLY}.` ], x = ["position", "grl_widths", "grl_offsets", "grl_colorPointers"]; t.useRightHandedSystem && C.push("GREASED_LINE_RIGHT_HANDED_COORDINATE_SYSTEM"), i.cameraFacing ? (C.push("GREASED_LINE_CAMERA_FACING"), x.push("grl_previousAndSide", "grl_nextAndCounters")) : (x.push("grl_slopes"), x.push("grl_counters")), super(e, t, { vertex: "greasedLine", fragment: "greasedLine" }, { attributes: x, uniforms: [ "world", "viewProjection", "view", "projection", "grlColorsWidth", "grlUseColors", "grlWidth", "grlColor", "grl_colorModeAndColorDistributionType", "grlResolution", "grlAspect", "grlAizeAttenuation", "grlDashArray", "grlDashOffset", "grlDashRatio", "grlUseDash", "grlVisibility" ], samplers: ["grlColors"], defines: C }), this._color = ze.White(), this._colorsDistributionType = jC.COLOR_DISTRIBUTION_TYPE_SEGMENT, this._colorsTexture = null, i = i || { color: bh.DEFAULT_COLOR }; const b = t.getEngine(); this.visibility = (r = i.visibility) !== null && r !== void 0 ? r : 1, this.useDash = (s = i.useDash) !== null && s !== void 0 ? s : !1, this.dashRatio = (n = i.dashRatio) !== null && n !== void 0 ? n : 0.5, this.dashOffset = (a = i.dashOffset) !== null && a !== void 0 ? a : 0, this.dashCount = (l = i.dashCount) !== null && l !== void 0 ? l : 1, this.width = i.width ? i.width : i.sizeAttenuation && i.cameraFacing ? bh.DEFAULT_WIDTH_ATTENUATED : bh.DEFAULT_WIDTH, this.sizeAttenuation = (o = i.sizeAttenuation) !== null && o !== void 0 ? o : !1, this.color = (u = i.color) !== null && u !== void 0 ? u : ze.White(), this.useColors = (h = i.useColors) !== null && h !== void 0 ? h : !1, this.colorsDistributionType = (d = i.colorDistributionType) !== null && d !== void 0 ? d : jC.COLOR_DISTRIBUTION_TYPE_SEGMENT, this.colorsSampling = (f = i.colorsSampling) !== null && f !== void 0 ? f : Po.NEAREST_NEAREST, this.colorMode = (p = i.colorMode) !== null && p !== void 0 ? p : vm.COLOR_MODE_SET, this._colors = (m = i.colors) !== null && m !== void 0 ? m : null, this._cameraFacing = (_ = i.cameraFacing) !== null && _ !== void 0 ? _ : !0, this.resolution = (v = i.resolution) !== null && v !== void 0 ? v : new at(b.getRenderWidth(), b.getRenderHeight()), i.colorsTexture ? this.colorsTexture = i.colorsTexture : this.colorsTexture = Hn.PrepareEmptyColorsTexture(t), this._colors && this.setColors(this._colors), b.onDisposeObservable.add(() => { Hn.DisposeEmptyColorsTexture(); }); } /** * Disposes the plugin material. */ dispose() { var e; (e = this._colorsTexture) === null || e === void 0 || e.dispose(), super.dispose(); } _setColorModeAndColorDistributionType() { this.setVector2("grl_colorModeAndColorDistributionType", new at(this._colorMode, this._colorsDistributionType)); } /** * Updates the material. Use when material created in lazy mode. */ updateLazy() { this._colors && this.setColors(this._colors, !1, !0); } /** * Returns the colors used to colorize the line */ get colors() { return this._colors; } /** * Sets the colors used to colorize the line */ set colors(e) { this.setColors(e); } /** * Creates or updates the colors texture * @param colors color table RGBA * @param lazy if lazy, the colors are not updated * @param forceNewTexture force creation of a new texture * @returns */ setColors(e, t = !1, i = !1) { var r, s, n, a; const l = (s = (r = this._colors) === null || r === void 0 ? void 0 : r.length) !== null && s !== void 0 ? s : 0; if (this._colors = e, e === null || e.length === 0) { (n = this._colorsTexture) === null || n === void 0 || n.dispose(); return; } if (!(t && !i)) if (this._colorsTexture && l === e.length && !i) { const o = Hn.Color3toRGBAUint8(e); this._colorsTexture.update(o); } else (a = this._colorsTexture) === null || a === void 0 || a.dispose(), this.colorsTexture = Hn.CreateColorsTexture(`${this.name}-colors-texture`, e, this.colorsSampling, this.getScene()); } /** * Gets the colors texture */ get colorsTexture() { var e; return (e = this._colorsTexture) !== null && e !== void 0 ? e : null; } /** * Sets the colorsTexture */ set colorsTexture(e) { this._colorsTexture = e, this.setFloat("grlColorsWidth", this._colorsTexture.getSize().width), this.setTexture("grlColors", this._colorsTexture); } /** * Line base width. At each point the line width is calculated by widths[pointIndex] * width */ get width() { return this._width; } /** * Line base width. At each point the line width is calculated by widths[pointIndex] * width */ set width(e) { this._width = e, this.setFloat("grlWidth", e); } /** * Whether to use the colors option to colorize the line */ get useColors() { return this._useColors; } set useColors(e) { this._useColors = e, this.setFloat("grlUseColors", Hn.BooleanToNumber(e)); } /** * The type of sampling of the colors texture. The values are the same when using with textures. */ get colorsSampling() { return this._colorsSampling; } /** * The type of sampling of the colors texture. The values are the same when using with textures. */ set colorsSampling(e) { this._colorsSampling = e; } /** * Normalized value of how much of the line will be visible * 0 - 0% of the line will be visible * 1 - 100% of the line will be visible */ get visibility() { return this._visibility; } set visibility(e) { this._visibility = e, this.setFloat("grlVisibility", e); } /** * Turns on/off dash mode */ get useDash() { return this._useDash; } /** * Turns on/off dash mode */ set useDash(e) { this._useDash = e, this.setFloat("grlUseDash", Hn.BooleanToNumber(e)); } /** * Gets the dash offset */ get dashOffset() { return this._dashOffset; } /** * Sets the dash offset */ set dashOffset(e) { this._dashOffset = e, this.setFloat("grlDashOffset", e); } /** * Length of the dash. 0 to 1. 0.5 means half empty, half drawn. */ get dashRatio() { return this._dashRatio; } /** * Length of the dash. 0 to 1. 0.5 means half empty, half drawn. */ set dashRatio(e) { this._dashRatio = e, this.setFloat("grlDashRatio", e); } /** * Gets the number of dashes in the line */ get dashCount() { return this._dashCount; } /** * Sets the number of dashes in the line * @param value dash */ set dashCount(e) { this._dashCount = e, this._dashArray = 1 / e, this.setFloat("grlDashArray", this._dashArray); } /** * False means 1 unit in width = 1 unit on scene, true means 1 unit in width is reduced on the screen to make better looking lines */ get sizeAttenuation() { return this._sizeAttenuation; } /** * Turn on/off attenuation of the width option and widths array. * @param value false means 1 unit in width = 1 unit on scene, true means 1 unit in width is reduced on the screen to make better looking lines */ set sizeAttenuation(e) { this._sizeAttenuation = e, this.setFloat("grlSizeAttenuation", Hn.BooleanToNumber(e)); } /** * Gets the color of the line */ get color() { return this.color; } /** * Sets the color of the line * @param value Color3 */ set color(e) { this.setColor(e); } /** * Sets the color of the line. If set the whole line will be mixed with this color according to the colorMode option. * The simple material always needs a color to be set. If you set it to null it will set the color to the default color (GreasedLineSimpleMaterial.DEFAULT_COLOR). * @param value color */ setColor(e) { e = e ?? bh.DEFAULT_COLOR, this._color = e, this.setColor3("grlColor", e); } /** * Gets the color distributiopn type */ get colorsDistributionType() { return this._colorsDistributionType; } /** * Sets the color distribution type * @see GreasedLineMeshColorDistributionType * @param value color distribution type */ set colorsDistributionType(e) { this._colorsDistributionType = e, this._setColorModeAndColorDistributionType(); } /** * Gets the mixing mode of the color and colors paramaters. Default value is GreasedLineMeshColorMode.SET. * MATERIAL_TYPE_SIMPLE mixes the color and colors of the greased line material. * @see GreasedLineMeshColorMode */ get colorMode() { return this._colorMode; } /** * Sets the mixing mode of the color and colors paramaters. Default value is GreasedLineMeshColorMode.SET. * MATERIAL_TYPE_SIMPLE mixes the color and colors of the greased line material. * @see GreasedLineMeshColorMode */ set colorMode(e) { this._colorMode = e, this._setColorModeAndColorDistributionType(); } /** * Gets the resolution */ get resolution() { return this._resolution; } /** * Sets the resolution * @param value resolution of the screen for GreasedLine */ set resolution(e) { this._resolution = e, this.setVector2("grlResolution", e), this.setFloat("grlAspect", e.x / e.y); } /** * Serializes this plugin material * @returns serializationObjec */ serialize() { const e = super.serialize(), t = { colorDistributionType: this._colorsDistributionType, colorsSampling: this._colorsSampling, colorMode: this._colorMode, color: this._color, dashCount: this._dashCount, dashOffset: this._dashOffset, dashRatio: this._dashRatio, resolution: this._resolution, sizeAttenuation: this._sizeAttenuation, useColors: this._useColors, useDash: this._useDash, visibility: this._visibility, width: this._width, cameraFacing: this._cameraFacing }; return this._colors && (t.colors = this._colors), e.greasedLineMaterialOptions = t, e; } /** * Parses a serialized objects * @param source serialized object * @param scene scene * @param _rootUrl root url for textures */ parse(e, t, i) { var r, s; const n = e.greasedLineMaterialOptions; (r = this._colorsTexture) === null || r === void 0 || r.dispose(), n.color && (this.color = n.color), n.colorDistributionType && (this.colorsDistributionType = n.colorDistributionType), n.colorsSampling && (this.colorsSampling = n.colorsSampling), n.colorMode && (this.colorMode = n.colorMode), n.useColors && (this.useColors = n.useColors), n.visibility && (this.visibility = n.visibility), n.useDash && (this.useDash = n.useDash), n.dashCount && (this.dashCount = n.dashCount), n.dashRatio && (this.dashRatio = n.dashRatio), n.dashOffset && (this.dashOffset = n.dashOffset), n.width && (this.width = n.width), n.sizeAttenuation && (this.sizeAttenuation = n.sizeAttenuation), n.resolution && (this.resolution = n.resolution), n.colors ? this.colorsTexture = Hn.CreateColorsTexture(`${this.name}-colors-texture`, n.colors, this.colorsSampling, this.getScene()) : this.colorsTexture = Hn.PrepareEmptyColorsTexture(t), this._cameraFacing = (s = n.cameraFacing) !== null && s !== void 0 ? s : !0, this.setDefine("GREASED_LINE_CAMERA_FACING", this._cameraFacing); } } const m0e = `#if defined(DBG_ENABLED) attribute float dbg_initialPass; varying vec3 dbg_vBarycentric; flat varying vec3 dbg_vVertexWorldPos; flat varying float dbg_vPass; #endif`, g0e = `#if defined(DBG_ENABLED) float dbg_vertexIndex = mod(float(gl_VertexID), 3.); if (dbg_vertexIndex == 0.0) { dbg_vBarycentric = vec3(1.,0.,0.); } else if (dbg_vertexIndex == 1.0) { dbg_vBarycentric = vec3(0.,1.,0.); } else { dbg_vBarycentric = vec3(0.,0.,1.); } dbg_vVertexWorldPos = vPositionW; dbg_vPass = dbg_initialPass; #endif`, v0e = `#if defined(DBG_ENABLED) uniform vec3 dbg_shadedDiffuseColor; uniform vec4 dbg_shadedSpecularColorPower; uniform vec3 dbg_thicknessRadiusScale; #if DBG_MODE == 2 || DBG_MODE == 3 uniform vec3 dbg_vertexColor; #endif #if DBG_MODE == 1 uniform vec3 dbg_wireframeTrianglesColor; #elif DBG_MODE == 3 uniform vec3 dbg_wireframeVerticesColor; #elif DBG_MODE == 4 || DBG_MODE == 5 uniform vec3 dbg_uvPrimaryColor; uniform vec3 dbg_uvSecondaryColor; #elif DBG_MODE == 7 uniform vec3 dbg_materialColor; #endif #endif`, A0e = `#if defined(DBG_ENABLED) varying vec3 dbg_vBarycentric; flat varying vec3 dbg_vVertexWorldPos; flat varying float dbg_vPass; #if !defined(DBG_MULTIPLY) vec3 dbg_applyShading(vec3 color) { vec3 N = vNormalW.xyz; vec3 L = normalize(vEyePosition.xyz - vPositionW.xyz); vec3 H = normalize(L + L); float LdotN = clamp(dot(L,N), 0., 1.); float HdotN = clamp(dot(H,N), 0., 1.); float specTerm = pow(HdotN, dbg_shadedSpecularColorPower.w); color *= (LdotN / PI); color += dbg_shadedSpecularColorPower.rgb * (specTerm / PI); return color; } #endif #if DBG_MODE == 1 || DBG_MODE == 3 float dbg_edgeFactor() { vec3 d = fwidth(dbg_vBarycentric); vec3 a3 = smoothstep(vec3(0.), d * dbg_thicknessRadiusScale.x, dbg_vBarycentric); return min(min(a3.x, a3.y), a3.z); } #endif #if DBG_MODE == 2 || DBG_MODE == 3 float dbg_cornerFactor() { vec3 worldPos = vPositionW; float dist = length(worldPos - dbg_vVertexWorldPos); float camDist = length(worldPos - vEyePosition.xyz); float d = sqrt(camDist) * .001; return smoothstep((dbg_thicknessRadiusScale.y * d), ((dbg_thicknessRadiusScale.y * 1.01) * d), dist); } #endif #if (DBG_MODE == 4 && defined(UV1)) || (DBG_MODE == 5 && defined(UV2)) float dbg_checkerboardFactor(vec2 uv) { vec2 f = fract(uv * dbg_thicknessRadiusScale.z); f -= .5; return (f.x * f.y) > 0. ? 1. : 0.; } #endif #endif`, y0e = `#if defined(DBG_ENABLED) vec3 dbg_color = vec3(1.); #if DBG_MODE == 1 dbg_color = mix(dbg_wireframeTrianglesColor, vec3(1.), dbg_edgeFactor()); #elif DBG_MODE == 2 || DBG_MODE == 3 float dbg_cornerFactor = dbg_cornerFactor(); if (dbg_vPass == 0. && dbg_cornerFactor == 1.) discard; dbg_color = mix(dbg_vertexColor, vec3(1.), dbg_cornerFactor); #if DBG_MODE == 3 dbg_color *= mix(dbg_wireframeVerticesColor, vec3(1.), dbg_edgeFactor()); #endif #elif DBG_MODE == 4 && defined(UV1) dbg_color = mix(dbg_uvPrimaryColor, dbg_uvSecondaryColor, dbg_checkerboardFactor(vMainUV1)); #elif DBG_MODE == 5 && defined(UV2) dbg_color = mix(dbg_uvPrimaryColor, dbg_uvSecondaryColor, dbg_checkerboardFactor(vMainUV2)); #elif DBG_MODE == 6 && defined(VERTEXCOLOR) dbg_color = vColor.rgb; #elif DBG_MODE == 7 dbg_color = dbg_materialColor; #endif #if defined(DBG_MULTIPLY) gl_FragColor *= vec4(dbg_color, 1.); #else #if DBG_MODE != 6 gl_FragColor = vec4(dbg_applyShading(dbg_shadedDiffuseColor) * dbg_color, 1.); #else gl_FragColor = vec4(dbg_color, 1.); #endif #endif #endif`, Fse = [ new ze(0.98, 0.26, 0.38), new ze(0.47, 0.75, 0.3), new ze(0, 0.26, 0.77), new ze(0.97, 0.6, 0.76), new ze(0.19, 0.63, 0.78), new ze(0.98, 0.8, 0.6), new ze(0.65, 0.43, 0.15), new ze(0.15, 0.47, 0.22), new ze(0.67, 0.71, 0.86), new ze(0.09, 0.46, 0.56), new ze(0.8, 0.98, 0.02), new ze(0.39, 0.29, 0.13), new ze(0.53, 0.63, 0.06), new ze(0.95, 0.96, 0.41), new ze(1, 0.72, 0.94), new ze(0.63, 0.08, 0.31), new ze(0.66, 0.96, 0.95), new ze(0.22, 0.14, 0.19), new ze(0.14, 0.65, 0.59), new ze(0.93, 1, 0.68), new ze(0.93, 0.14, 0.44), new ze(0.47, 0.86, 0.67), new ze(0.85, 0.07, 0.78), new ze(0.53, 0.64, 0.98), new ze(0.43, 0.37, 0.56), new ze(0.71, 0.65, 0.25), new ze(0.66, 0.19, 0.01), new ze(0.94, 0.53, 0.12), new ze(0.41, 0.44, 0.44), new ze(0.24, 0.71, 0.96), new ze(0.57, 0.28, 0.56), new ze(0.44, 0.98, 0.42) ]; var rP; (function(c) { c[c.NONE = 0] = "NONE", c[c.TRIANGLES = 1] = "TRIANGLES", c[c.VERTICES = 2] = "VERTICES", c[c.TRIANGLES_VERTICES = 3] = "TRIANGLES_VERTICES", c[c.UV0 = 4] = "UV0", c[c.UV1 = 5] = "UV1", c[c.VERTEXCOLORS = 6] = "VERTEXCOLORS", c[c.MATERIALIDS = 7] = "MATERIALIDS"; })(rP || (rP = {})); class C0e extends sa { constructor() { super(...arguments), this.DBG_MODE = rP.NONE, this.DBG_MULTIPLY = !0, this.DBG_ENABLED = !0; } } class Hu extends Q_ { /** @internal */ _markAllDefinesAsDirty() { this._enable(this._isEnabled), this.markAllDefinesAsDirty(); } /** * Creates a new MeshDebugPluginMaterial * @param material Material to attach the mesh debug plugin to * @param options Options for the mesh debug plugin */ constructor(e, t = {}) { var i, r, s, n, a, l, o, u, h, d, f, p, m; const _ = new C0e(); _.DBG_MODE = (i = t.mode) !== null && i !== void 0 ? i : _.DBG_MODE, _.DBG_MULTIPLY = (r = t.multiply) !== null && r !== void 0 ? r : _.DBG_MULTIPLY, super(e, "MeshDebug", 200, _, !0, !0), this._mode = _.DBG_MODE, this._multiply = _.DBG_MULTIPLY, this.shadedDiffuseColor = (s = t.shadedDiffuseColor) !== null && s !== void 0 ? s : new ze(1, 1, 1), this.shadedSpecularColor = (n = t.shadedSpecularColor) !== null && n !== void 0 ? n : new ze(0.8, 0.8, 0.8), this.shadedSpecularPower = (a = t.shadedSpecularPower) !== null && a !== void 0 ? a : 10, this.wireframeThickness = (l = t.wireframeThickness) !== null && l !== void 0 ? l : 0.7, this.wireframeTrianglesColor = (o = t.wireframeTrianglesColor) !== null && o !== void 0 ? o : new ze(0, 0, 0), this.wireframeVerticesColor = (u = t.wireframeVerticesColor) !== null && u !== void 0 ? u : new ze(0.8, 0.8, 0.8), this.vertexColor = (h = t.vertexColor) !== null && h !== void 0 ? h : new ze(0, 0, 0), this.vertexRadius = (d = t.vertexRadius) !== null && d !== void 0 ? d : 1.2, this.uvScale = (f = t.uvScale) !== null && f !== void 0 ? f : 20, this.uvPrimaryColor = (p = t.uvPrimaryColor) !== null && p !== void 0 ? p : new ze(1, 1, 1), this.uvSecondaryColor = (m = t.uvSecondaryColor) !== null && m !== void 0 ? m : new ze(0.5, 0.5, 0.5), this._materialColor = Hu.MaterialColors[Hu._PluginCount++ % Hu.MaterialColors.length], this.isEnabled = !0; } /** * Get the class name * @returns Class name */ getClassName() { return "MeshDebugPluginMaterial"; } /** * Gets whether the mesh debug plugin is enabled in the material. */ get isEnabled() { return this._isEnabled; } /** * Sets whether the mesh debug plugin is enabled in the material. * @param value enabled */ set isEnabled(e) { if (this._isEnabled !== e) { if (!this._material.getScene().getEngine().isWebGPU && this._material.getScene().getEngine().webGLVersion == 1) { Ce.Error("MeshDebugPluginMaterial is not supported on WebGL 1.0."), this._isEnabled = !1; return; } this._isEnabled = e, this._markAllDefinesAsDirty(); } } /** * Prepare the defines * @param defines Mesh debug defines * @param scene Scene * @param mesh Mesh associated with material */ prepareDefines(e, t, i) { (this._mode == rP.VERTICES || this._mode == rP.TRIANGLES || this._mode == rP.TRIANGLES_VERTICES) && !i.isVerticesDataPresent("dbg_initialPass") && Ce.Warn("For best results with TRIANGLES, TRIANGLES_VERTICES, or VERTICES modes, please use MeshDebugPluginMaterial.PrepareMeshForTrianglesAndVerticesMode() on mesh.", 1), e.DBG_MODE = this._mode, e.DBG_MULTIPLY = this._multiply, e.DBG_ENABLED = this._isEnabled; } /** * Get the shader attributes * @param attributes Array of attributes */ getAttributes(e) { e.push("dbg_initialPass"); } /** * Get the shader uniforms * @returns Uniforms */ getUniforms() { return { ubo: [ { name: "dbg_shadedDiffuseColor", size: 3, type: "vec3" }, { name: "dbg_shadedSpecularColorPower", size: 4, type: "vec4" }, { name: "dbg_thicknessRadiusScale", size: 3, type: "vec3" }, { name: "dbg_wireframeTrianglesColor", size: 3, type: "vec3" }, { name: "dbg_wireframeVerticesColor", size: 3, type: "vec3" }, { name: "dbg_vertexColor", size: 3, type: "vec3" }, { name: "dbg_uvPrimaryColor", size: 3, type: "vec3" }, { name: "dbg_uvSecondaryColor", size: 3, type: "vec3" }, { name: "dbg_materialColor", size: 3, type: "vec3" } ], fragment: v0e }; } /** * Bind the uniform buffer * @param uniformBuffer Uniform buffer */ bindForSubMesh(e) { this._isEnabled && (e.updateFloat3("dbg_shadedDiffuseColor", this.shadedDiffuseColor.r, this.shadedDiffuseColor.g, this.shadedDiffuseColor.b), e.updateFloat4("dbg_shadedSpecularColorPower", this.shadedSpecularColor.r, this.shadedSpecularColor.g, this.shadedSpecularColor.b, this.shadedSpecularPower), e.updateFloat3("dbg_thicknessRadiusScale", this.wireframeThickness, this.vertexRadius, this.uvScale), e.updateColor3("dbg_wireframeTrianglesColor", this.wireframeTrianglesColor), e.updateColor3("dbg_wireframeVerticesColor", this.wireframeVerticesColor), e.updateColor3("dbg_vertexColor", this.vertexColor), e.updateColor3("dbg_uvPrimaryColor", this.uvPrimaryColor), e.updateColor3("dbg_uvSecondaryColor", this.uvSecondaryColor), e.updateColor3("dbg_materialColor", this._materialColor)); } /** * Get shader code * @param shaderType "vertex" or "fragment" * @returns Shader code */ getCustomCode(e) { return e === "vertex" ? { CUSTOM_VERTEX_DEFINITIONS: m0e, CUSTOM_VERTEX_MAIN_END: g0e } : { CUSTOM_FRAGMENT_DEFINITIONS: A0e, CUSTOM_FRAGMENT_MAIN_END: y0e }; } /** * Resets static variables of the plugin to their original state */ static Reset() { this._PluginCount = 0, this.MaterialColors = Fse; } /** * Renders triangles in a mesh 3 times by tripling the indices in the index buffer. * Used to prepare a mesh to be rendered in `TRIANGLES`, `VERTICES`, or `TRIANGLES_VERTICES` modes. * NOTE: This is a destructive operation. The mesh's index buffer and vertex buffers are modified, and a new vertex buffer is allocated. * If you'd like the ability to revert these changes, toggle the optional `returnRollback` flag. * @param mesh the mesh to target * @param returnRollback whether or not to return a function that reverts mesh to its initial state. Default: false. * @returns a rollback function if `returnRollback` is true, otherwise an empty function. */ static PrepareMeshForTrianglesAndVerticesMode(e, t = !1) { let i = () => { }; if (e.getTotalIndices() == 0) return i; if (t) { const u = e.getVerticesDataKinds(), h = e.getIndices(), d = {}; for (const f of u) d[f] = e.getVerticesData(f); i = function() { e.setIndices(h); for (const f of u) { const p = e.getVertexBuffer(f).getStrideSize(); e.setVerticesData(f, d[f], void 0, p); } e.removeVerticesData("dbg_initialPass"); }; } let r = Array.from(e.getIndices()); const s = []; for (let u = 0; u < r.length; u += 3) s.push(r[u + 1], r[u + 2], r[u + 0]); e.setIndices(r.concat(s)), e.convertToUnIndexedMesh(), e.isUnIndexed = !1, r = Array.from(e.getIndices()); const n = []; for (let u = r.length / 2; u < r.length; u += 3) n.push(r[u + 1], r[u + 2], r[u + 0]); e.setIndices(r.concat(n)); const a = e.getTotalVertices(), l = a / 2, o = new Array(a).fill(1, 0, l).fill(0, l, a); return e.setVerticesData("dbg_initialPass", o, !1, 1), i; } } Hu._PluginCount = 0; Hu.MaterialColors = Fse; F([ Fs() ], Hu.prototype, "_materialColor", void 0); F([ W() ], Hu.prototype, "_isEnabled", void 0); F([ W(), ct("_markAllDefinesAsDirty") ], Hu.prototype, "mode", void 0); F([ W(), ct("_markAllDefinesAsDirty") ], Hu.prototype, "multiply", void 0); F([ Fs() ], Hu.prototype, "shadedDiffuseColor", void 0); F([ Fs() ], Hu.prototype, "shadedSpecularColor", void 0); F([ W() ], Hu.prototype, "shadedSpecularPower", void 0); F([ W() ], Hu.prototype, "wireframeThickness", void 0); F([ Fs() ], Hu.prototype, "wireframeTrianglesColor", void 0); F([ Fs() ], Hu.prototype, "wireframeVerticesColor", void 0); F([ Fs() ], Hu.prototype, "vertexColor", void 0); F([ W() ], Hu.prototype, "vertexRadius", void 0); F([ W() ], Hu.prototype, "uvScale", void 0); F([ Fs() ], Hu.prototype, "uvPrimaryColor", void 0); F([ Fs() ], Hu.prototype, "uvSecondaryColor", void 0); Be("BABYLON.MeshDebugPluginMaterial", Hu); Object.defineProperty(Dt.prototype, "decalMap", { get: function() { if (!this._decalMap) { if (this._uniformBufferLayoutBuilt) return null; this._decalMap = new Ew(this); } return this._decalMap; }, enumerable: !0, configurable: !0 }); Object.defineProperty(on.prototype, "decalMap", { get: function() { if (!this._decalMap) { if (this._uniformBufferLayoutBuilt) return null; this._decalMap = new Ew(this); } return this._decalMap; }, enumerable: !0, configurable: !0 }); Object.defineProperty(xr.prototype, "decalMap", { get: function() { return this._decalMap; }, set: function(c) { this._decalMap = c; }, enumerable: !0, configurable: !0 }); class v4 { /** * Creates a new Polar object * @param radius the radius of the vector * @param theta the angle of the vector */ constructor(e, t) { this.radius = e, this.theta = t; } /** * Gets the class name * @returns the string "Polar" */ getClassName() { return "Polar"; } /** * Converts the current polar to a string * @returns the current polar as a string */ toString() { return JSON.stringify(this); } /** * Converts the current polar to an array * @reutrns the current polar as an array */ asArray() { return [this.radius, this.theta]; } /** * Adds the current Polar and the given Polar and stores the result * @param polar the polar to add * @param ref the polar to store the result in * @returns the updated ref */ addToRef(e, t) { return t.radius = this.radius + e.radius, t.theta = this.theta + e.theta, t; } /** * Adds the current Polar and the given Polar * @param polar the polar to add * @returns the sum polar */ add(e) { const t = new v4(0, 0); return this.addToRef(e, t), t; } /** * Adds the given polar to the current polar * @param polar the polar to add * @returns the current polar */ addInPlace(e) { return this.addToRef(e, this), this; } /** * Adds the provided values to the current polar * @param radius the amount to add to the radius * @param theta the amount to add to the theta * @returns the current polar */ addInPlaceFromFloats(e, t) { return this.radius += e, this.theta += t, this; } /** * Subtracts the given Polar from the current Polar and stores the result * @param polar the polar to subtract * @param ref the polar to store the result in * @returns the updated ref */ subtractToRef(e, t) { return t.radius = this.radius - e.radius, t.theta = this.theta - e.theta, t; } /** * Subtracts the given Polar from the current Polar * @param polar the polar to subtract * @returns the difference polar */ subtract(e) { const t = new v4(0, 0); return this.subtractToRef(e, t), t; } /** * Subtracts the given Polar from the current Polar * @param polar the polar to subtract * @returns the current polar */ subtractInPlace(e) { return this.subtractToRef(e, this), this; } /** * Subtracts the given floats from the current polar * @param radius the amount to subtract from the radius * @param theta the amount to subtract from the theta * @param ref the polar to store the result in * @returns the updated ref */ subtractFromFloatsToRef(e, t, i) { return i.radius = this.radius - e, i.theta = this.theta - t, i; } /** * Subtracts the given floats from the current polar * @param radius the amount to subtract from the radius * @param theta the amount to subtract from the theta * @returns the difference polar */ subtractFromFloats(e, t) { const i = new v4(0, 0); return this.subtractFromFloatsToRef(e, t, i), i; } /** * Multiplies the given Polar with the current Polar and stores the result * @param polar the polar to multiply * @param ref the polar to store the result in * @returns the updated ref */ multiplyToRef(e, t) { return t.radius = this.radius * e.radius, t.theta = this.theta * e.theta, t; } /** * Multiplies the given Polar with the current Polar * @param polar the polar to multiply * @returns the product polar */ multiply(e) { const t = new v4(0, 0); return this.multiplyToRef(e, t), t; } /** * Multiplies the given Polar with the current Polar * @param polar the polar to multiply * @returns the current polar */ multiplyInPlace(e) { return this.multiplyToRef(e, this), this; } /** * Divides the current Polar by the given Polar and stores the result * @param polar the polar to divide * @param ref the polar to store the result in * @returns the updated ref */ divideToRef(e, t) { return t.radius = this.radius / e.radius, t.theta = this.theta / e.theta, t; } /** * Divides the current Polar by the given Polar * @param polar the polar to divide * @returns the quotient polar */ divide(e) { const t = new v4(0, 0); return this.divideToRef(e, t), t; } /** * Divides the current Polar by the given Polar * @param polar the polar to divide * @returns the current polar */ divideInPlace(e) { return this.divideToRef(e, this), this; } /** * Clones the current polar * @returns a clone of the current polar */ clone() { return new v4(this.radius, this.theta); } /** * Copies the source polar into the current polar * @param source the polar to copy from * @returns the current polar */ copyFrom(e) { return this.radius = e.radius, this.theta = e.theta, this; } /** * Copies the given values into the current polar * @param radius the radius to use * @param theta the theta to use * @returns the current polar */ copyFromFloats(e, t) { return this.radius = e, this.theta = t, this; } /** * Scales the current polar and stores the result * @param scale defines the multiplication factor * @param ref where to store the result * @returns the updated ref */ scaleToRef(e, t) { return t.radius = this.radius * e, t.theta = this.theta * e, t; } /** * Scales the current polar and returns a new polar with the scaled coordinates * @param scale defines the multiplication factor * @returns the scaled polar */ scale(e) { const t = new v4(0, 0); return this.scaleToRef(e, t), t; } /** * Scales the current polar * @param scale defines the multiplication factor * @returns the current polar */ scaleInPlace(e) { return this.scaleToRef(e, this), this; } /** * Sets the values of the current polar * @param radius the new radius * @param theta the new theta * @returns the current polar */ set(e, t) { return this.radius = e, this.theta = t, this; } /** * Sets the values of the current polar * @param value the new values * @returns the current polar */ setAll(e) { return this.set(e, e), this; } /** * Gets the rectangular coordinates of the current Polar * @param ref the reference to assign the result * @returns the updated reference */ toVector2ToRef(e) { const t = this.radius * Math.cos(this.theta), i = this.radius * Math.sin(this.theta); return e.set(t, i), e; } /** * Gets the rectangular coordinates of the current Polar * @returns the rectangular coordinates */ toVector2() { const e = new at(0, 0); return this.toVector2ToRef(e); } /** * Converts a given Vector2 to its polar coordinates * @param v the Vector2 to convert * @param ref the reference to assign the result * @returns the updated reference */ static FromVector2ToRef(e, t) { const i = Math.sign(e.y) * Math.acos(e.x / e.length()); return t.radius = e.length(), t.theta = i, t; } /** * Converts a given Vector2 to its polar coordinates * @param v the Vector2 to convert * @returns a Polar */ static FromVector2(e) { const t = new v4(0, 0); return v4.FromVector2ToRef(e, t), t; } /** * Converts an array of floats to a polar * @param array the array to convert * @returns the converted polar */ static FromArray(e) { return new v4(e[0], e[1]); } } class A4 { /** * @param radius spherical radius * @param theta angle from positive y axis to radial line from 0 to PI (vertical) * @param phi angle from positive x axis measured anticlockwise from -PI to PI (horizontal) */ constructor(e, t, i) { this.radius = e, this.theta = t, this.phi = i; } /** * Gets the class name * @returns the string "Spherical" */ getClassName() { return "Spherical"; } /** * Converts the current spherical to a string * @returns the current spherical as a string */ toString() { return JSON.stringify(this); } /** * Converts the current spherical to an array * @reutrns the current spherical as an array */ asArray() { return [this.radius, this.theta, this.phi]; } /** * Adds the current Spherical and the given Spherical and stores the result * @param spherical the spherical to add * @param ref the spherical to store the result in * @returns the updated ref */ addToRef(e, t) { return t.radius = this.radius + e.radius, t.theta = this.theta + e.theta, t.phi = this.phi + e.phi, t; } /** * Adds the current Spherical and the given Spherical * @param spherical the spherical to add * @returns the sum spherical */ add(e) { const t = new A4(0, 0, 0); return this.addToRef(e, t), t; } /** * Adds the given spherical to the current spherical * @param spherical the spherical to add * @returns the current spherical */ addInPlace(e) { return this.addToRef(e, this), this; } /** * Adds the provided values to the current spherical * @param radius the amount to add to the radius * @param theta the amount to add to the theta * @param phi the amount to add to the phi * @returns the current spherical */ addInPlaceFromFloats(e, t, i) { return this.radius += e, this.theta += t, this.phi += i, this; } /** * Subtracts the given Spherical from the current Spherical and stores the result * @param spherical the spherical to subtract * @param ref the spherical to store the result in * @returns the updated ref */ subtractToRef(e, t) { return t.radius = this.radius - e.radius, t.theta = this.theta - e.theta, t.phi = this.phi - e.phi, t; } /** * Subtracts the given Spherical from the current Spherical * @param spherical the spherical to subtract * @returns the difference spherical */ subtract(e) { const t = new A4(0, 0, 0); return this.subtractToRef(e, t), t; } /** * Subtracts the given Spherical from the current Spherical * @param spherical the spherical to subtract * @returns the current spherical */ subtractInPlace(e) { return this.subtractToRef(e, this), this; } /** * Subtracts the given floats from the current spherical * @param radius the amount to subtract from the radius * @param theta the amount to subtract from the theta * @param phi the amount to subtract from the phi * @param ref the spherical to store the result in * @returns the updated ref */ subtractFromFloatsToRef(e, t, i, r) { return r.radius = this.radius - e, r.theta = this.theta - t, r.phi = this.phi - i, r; } /** * Subtracts the given floats from the current spherical * @param radius the amount to subtract from the radius * @param theta the amount to subtract from the theta * @param phi the amount to subtract from the phi * @returns the difference spherical */ subtractFromFloats(e, t, i) { const r = new A4(0, 0, 0); return this.subtractFromFloatsToRef(e, t, i, r), r; } /** * Multiplies the given Spherical with the current Spherical and stores the result * @param spherical the spherical to multiply * @param ref the spherical to store the result in * @returns the updated ref */ multiplyToRef(e, t) { return t.radius = this.radius * e.radius, t.theta = this.theta * e.theta, t.phi = this.phi * e.phi, t; } /** * Multiplies the given Spherical with the current Spherical * @param spherical the spherical to multiply * @returns the product spherical */ multiply(e) { const t = new A4(0, 0, 0); return this.multiplyToRef(e, t), t; } /** * Multiplies the given Spherical with the current Spherical * @param spherical the spherical to multiply * @returns the current spherical */ multiplyInPlace(e) { return this.multiplyToRef(e, this), this; } /** * Divides the current Spherical by the given Spherical and stores the result * @param spherical the spherical to divide * @param ref the spherical to store the result in * @returns the updated ref */ divideToRef(e, t) { return t.radius = this.radius / e.radius, t.theta = this.theta / e.theta, t.phi = this.phi / e.phi, t; } /** * Divides the current Spherical by the given Spherical * @param spherical the spherical to divide * @returns the quotient spherical */ divide(e) { const t = new A4(0, 0, 0); return this.divideToRef(e, t), t; } /** * Divides the current Spherical by the given Spherical * @param spherical the spherical to divide * @returns the current spherical */ divideInPlace(e) { return this.divideToRef(e, this), this; } /** * Clones the current spherical * @returns a clone of the current spherical */ clone() { return new A4(this.radius, this.theta, this.phi); } /** * Copies the source spherical into the current spherical * @param source the spherical to copy from * @returns the current spherical */ copyFrom(e) { return this.radius = e.radius, this.theta = e.theta, this.phi = e.phi, this; } /** * Copies the given values into the current spherical * @param radius the radius to use * @param theta the theta to use * @param phi the phi to use * @returns the current spherical */ copyFromFloats(e, t, i) { return this.radius = e, this.theta = t, this.phi = i, this; } /** * Scales the current spherical and stores the result * @param scale defines the multiplication factor * @param ref where to store the result * @returns the updated ref */ scaleToRef(e, t) { return t.radius = this.radius * e, t.theta = this.theta * e, t.phi = this.phi * e, t; } /** * Scales the current spherical and returns a new spherical with the scaled coordinates * @param scale defines the multiplication factor * @returns the scaled spherical */ scale(e) { const t = new A4(0, 0, 0); return this.scaleToRef(e, t), t; } /** * Scales the current spherical * @param scale defines the multiplication factor * @returns the current spherical */ scaleInPlace(e) { return this.scaleToRef(e, this), this; } /** * Sets the values of the current spherical * @param radius the new radius * @param theta the new theta * @param phi the new phi * @returns the current spherical */ set(e, t, i) { return this.radius = e, this.theta = t, this.phi = i, this; } /** * Sets the values of the current spherical * @param value the new values * @returns the current spherical */ setAll(e) { return this.set(e, e, e), this; } /** * Assigns the rectangular coordinates of the current Spherical to a Vector3 * @param ref the Vector3 to update * @returns the updated Vector3 */ toVector3ToRef(e) { const t = this.radius * Math.sin(this.theta) * Math.cos(this.phi), i = this.radius * Math.cos(this.theta), r = this.radius * Math.sin(this.theta) * Math.sin(this.phi); return e.set(t, i, r), e; } /** * Gets a Vector3 from the current spherical coordinates * @returns the (x, y,z) form of the current Spherical */ toVector3() { const e = new D(0, 0, 0); return this.toVector3ToRef(e); } /** * Assigns the spherical coordinates from a Vector3 * @param vector the vector to convert * @param ref the Spherical to update * @returns the updated ref */ static FromVector3ToRef(e, t) { return t.radius = e.length(), t.theta = Math.acos(e.y / t.radius), t.phi = Math.atan2(e.z, e.x), t; } /** * Gets a Spherical from a Vector3 * @param vector defines the vector in (x, y, z) coordinate space * @returns a new Spherical */ static FromVector3(e) { const t = new A4(0, 0, 0); return A4.FromVector3ToRef(e, t), t; } /** * Converts an array of floats to a spherical * @param array the array to convert * @returns the converted spherical */ static FromArray(e) { return new A4(e[0], e[1], e[2]); } } function x0e(c) { return new Promise((e) => { DracoDecoderModule({ wasmBinary: c }).then((t) => { e({ module: t }); }); }); } function VH(c, e, t, i, r) { let s = null, n = null, a = null; try { s = new c.Decoder(), n = new c.DecoderBuffer(), n.Init(e, e.byteLength); let l; const o = s.GetEncodedGeometryType(n); switch (o) { case c.TRIANGULAR_MESH: { const d = new c.Mesh(); if (l = s.DecodeBufferToMesh(n, d), !l.ok() || d.ptr === 0) throw new Error(l.error_msg()); const p = d.num_faces() * 3, m = p * 4, _ = c._malloc(m); try { s.GetTrianglesUInt32Array(d, m, _); const v = new Uint32Array(p); v.set(new Uint32Array(c.HEAPF32.buffer, _, p)), i(v); } finally { c._free(_); } a = d; break; } case c.POINT_CLOUD: { const d = new c.PointCloud(); if (l = s.DecodeBufferToPointCloud(n, d), !l.ok() || !d.ptr) throw new Error(l.error_msg()); a = d; break; } default: throw new Error(`Invalid geometry type ${o}`); } const u = a.num_points(), h = (d, f, p, m) => { const _ = m.data_type(), v = m.num_components(), C = m.normalized(), x = m.byte_stride(), b = m.byte_offset(), M = { [c.DT_FLOAT32]: { typedArrayConstructor: Float32Array, heap: c.HEAPF32 }, [c.DT_INT8]: { typedArrayConstructor: Int8Array, heap: c.HEAP8 }, [c.DT_INT16]: { typedArrayConstructor: Int16Array, heap: c.HEAP16 }, [c.DT_INT32]: { typedArrayConstructor: Int32Array, heap: c.HEAP32 }, [c.DT_UINT8]: { typedArrayConstructor: Uint8Array, heap: c.HEAPU8 }, [c.DT_UINT16]: { typedArrayConstructor: Uint16Array, heap: c.HEAPU16 }, [c.DT_UINT32]: { typedArrayConstructor: Uint32Array, heap: c.HEAPU32 } }[_]; if (!M) throw new Error(`Invalid data type ${_}`); const R = u * v, w = R * M.typedArrayConstructor.BYTES_PER_ELEMENT, V = c._malloc(w); try { d.GetAttributeDataArrayForAllPoints(f, m, _, w, V); const k = new M.typedArrayConstructor(M.heap.buffer, V, R); r(p, k.slice(), v, b, x, C); } finally { c._free(V); } }; if (t) for (const d in t) { const f = t[d], p = s.GetAttributeByUniqueId(a, f); h(s, a, d, p); } else { const d = { position: c.POSITION, normal: c.NORMAL, color: c.COLOR, uv: c.TEX_COORD }; for (const f in d) { const p = s.GetAttributeId(a, d[f]); if (p !== -1) { const m = s.GetAttribute(a, p); h(s, a, f, m); } } } return u; } finally { a && c.destroy(a), n && c.destroy(n), s && c.destroy(s); } } function b0e() { let c; onmessage = (e) => { const t = e.data; switch (t.id) { case "init": { const i = t.decoder; i.url && (importScripts(i.url), c = DracoDecoderModule({ wasmBinary: i.wasmBinary })), postMessage({ id: "initDone" }); break; } case "decodeMesh": { if (!c) throw new Error("Draco decoder module is not available"); c.then((i) => { const r = VH(i, t.dataView, t.attributes, (s) => { postMessage({ id: "indices", data: s }, [s.buffer]); }, (s, n, a, l, o, u) => { postMessage({ id: "attribute", kind: s, data: n, size: a, byteOffset: l, byteStride: o, normalized: u }, [n.buffer]); }); postMessage({ id: "decodeMeshDone", totalVertices: r }); }); break; } } }; } class k_ { /** * Returns true if the decoder configuration is available. */ static get DecoderAvailable() { const e = k_.Configuration.decoder; return !!(e.wasmUrl && e.wasmBinaryUrl && typeof WebAssembly == "object" || e.fallbackUrl); } static GetDefaultNumWorkers() { return typeof navigator != "object" || !navigator.hardwareConcurrency ? 1 : Math.min(Math.floor(navigator.hardwareConcurrency * 0.5), 4); } /** * Default instance for the draco compression object. */ static get Default() { return k_._Default || (k_._Default = new k_()), k_._Default; } /** * Constructor * @param numWorkers The number of workers for async operations. Specify `0` to disable web workers and run synchronously in the current context. */ constructor(e = k_.DefaultNumWorkers) { const t = k_.Configuration.decoder, i = t.wasmUrl && t.wasmBinaryUrl && typeof WebAssembly == "object" ? { url: Ve.GetBabylonScriptURL(t.wasmUrl, !0), wasmBinaryPromise: Ve.LoadFileAsync(Ve.GetBabylonScriptURL(t.wasmBinaryUrl, !0)) } : { url: Ve.GetBabylonScriptURL(t.fallbackUrl), wasmBinaryPromise: Promise.resolve(void 0) }; e && typeof Worker == "function" && typeof URL == "function" ? this._workerPoolPromise = i.wasmBinaryPromise.then((r) => { const s = `${VH}(${b0e})()`, n = URL.createObjectURL(new Blob([s], { type: "application/javascript" })); return new xw(e, () => new Promise((a, l) => { const o = new Worker(n), u = (d) => { o.removeEventListener("error", u), o.removeEventListener("message", h), l(d); }, h = (d) => { d.data.id === "initDone" && (o.removeEventListener("error", u), o.removeEventListener("message", h), a(o)); }; o.addEventListener("error", u), o.addEventListener("message", h), o.postMessage({ id: "init", decoder: { url: i.url, wasmBinary: r } }); })); }) : this._decoderModulePromise = i.wasmBinaryPromise.then((r) => { if (!i.url) throw new Error("Draco decoder module is not available"); return Ve.LoadBabylonScriptAsync(i.url).then(() => x0e(r)); }); } /** * Stop all async operations and release resources. */ dispose() { this._workerPoolPromise && this._workerPoolPromise.then((e) => { e.dispose(); }), delete this._workerPoolPromise, delete this._decoderModulePromise; } /** * Returns a promise that resolves when ready. Call this manually to ensure draco compression is ready before use. * @returns a promise that resolves when ready */ whenReadyAsync() { return this._workerPoolPromise ? this._workerPoolPromise.then(() => { }) : this._decoderModulePromise ? this._decoderModulePromise.then(() => { }) : Promise.resolve(); } _decodeMeshAsync(e, t, i) { const r = e instanceof ArrayBuffer ? new Int8Array(e) : new Int8Array(e.buffer, e.byteOffset, e.byteLength), s = (n, a) => i && i[n] !== void 0 ? (a !== i[n] && Ce.Warn(`Normalized flag from Draco data (${a}) does not match normalized flag from glTF accessor (${i[n]}). Using flag from glTF accessor.`), i[n]) : a; if (this._workerPoolPromise) return this._workerPoolPromise.then((n) => new Promise((a, l) => { n.push((o, u) => { let h = null; const d = [], f = (_) => { o.removeEventListener("error", f), o.removeEventListener("message", p), l(_), u(); }, p = (_) => { const v = _.data; switch (v.id) { case "decodeMeshDone": { o.removeEventListener("error", f), o.removeEventListener("message", p), a({ indices: h, attributes: d, totalVertices: v.totalVertices }), u(); break; } case "indices": { h = v.data; break; } case "attribute": { d.push({ kind: v.kind, data: v.data, size: v.size, byteOffset: v.byteOffset, byteStride: v.byteStride, normalized: s(v.kind, v.normalized) }); break; } } }; o.addEventListener("error", f), o.addEventListener("message", p); const m = r.slice(); o.postMessage({ id: "decodeMesh", dataView: m, attributes: t }, [m.buffer]); }); })); if (this._decoderModulePromise) return this._decoderModulePromise.then((n) => { let a = null; const l = [], o = VH(n.module, r, t, (u) => { a = u; }, (u, h, d, f, p, m) => { l.push({ kind: u, data: h, size: d, byteOffset: f, byteStride: p, normalized: m }); }); return { indices: a, attributes: l, totalVertices: o }; }); throw new Error("Draco decoder module is not available"); } /** * Decode Draco compressed mesh data to Babylon geometry. * @param name The name to use when creating the geometry * @param scene The scene to use when creating the geometry * @param data The ArrayBuffer or ArrayBufferView for the Draco compression data * @param attributes A map of attributes from vertex buffer kinds to Draco unique ids * @returns A promise that resolves with the decoded geometry */ decodeMeshToGeometryAsync(e, t, i, r) { return this._decodeMeshAsync(i, r).then((s) => { const n = new yc(e, t); s.indices && n.setIndices(s.indices); for (const a of s.attributes) n.setVerticesBuffer(new Y(t.getEngine(), a.data, a.kind, !1, void 0, a.byteStride, void 0, a.byteOffset, a.size, void 0, a.normalized, !0), s.totalVertices); return n; }); } /** @internal */ _decodeMeshToGeometryForGltfAsync(e, t, i, r, s) { return this._decodeMeshAsync(i, r, s).then((n) => { const a = new yc(e, t); n.indices && a.setIndices(n.indices); for (const l of n.attributes) a.setVerticesBuffer(new Y(t.getEngine(), l.data, l.kind, !1, void 0, l.byteStride, void 0, l.byteOffset, l.size, void 0, l.normalized, !0), n.totalVertices); return a; }); } /** * Decode Draco compressed mesh data to Babylon vertex data. * @param data The ArrayBuffer or ArrayBufferView for the Draco compression data * @param attributes A map of attributes from vertex buffer kinds to Draco unique ids * @returns A promise that resolves with the decoded vertex data * @deprecated Use {@link decodeMeshToGeometryAsync} for better performance in some cases */ decodeMeshAsync(e, t) { return this._decodeMeshAsync(e, t).then((i) => { const r = new Ot(); i.indices && (r.indices = i.indices); for (const s of i.attributes) { const n = Y.GetFloatData(s.data, s.size, Y.GetDataType(s.data), s.byteOffset, s.byteStride, s.normalized, i.totalVertices); r.set(n, s.kind); } return r; }); } } k_.Configuration = { decoder: { wasmUrl: `${Ve._DefaultCdnUrl}/draco_wasm_wrapper_gltf.js`, wasmBinaryUrl: `${Ve._DefaultCdnUrl}/draco_decoder_gltf.wasm`, fallbackUrl: `${Ve._DefaultCdnUrl}/draco_decoder_gltf.js` } }; k_.DefaultNumWorkers = k_.GetDefaultNumWorkers(); k_._Default = null; class HC { /** * Default instance for the meshoptimizer object. */ static get Default() { return HC._Default || (HC._Default = new HC()), HC._Default; } /** * Constructor */ constructor() { const e = HC.Configuration.decoder; this._decoderModulePromise = Ve.LoadBabylonScriptAsync(e.url).then(() => MeshoptDecoder.ready); } /** * Stop all async operations and release resources. */ dispose() { delete this._decoderModulePromise; } /** * Decode meshopt data. * @see https://github.com/zeux/meshoptimizer/tree/master/js#decoder * @param source The input data. * @param count The number of elements. * @param stride The stride in bytes. * @param mode The compression mode. * @param filter The compression filter. * @returns a Promise that resolves to the decoded data */ decodeGltfBufferAsync(e, t, i, r, s) { return this._decoderModulePromise.then(() => { const n = new Uint8Array(t * i); return MeshoptDecoder.decodeGltfBuffer(n, t, i, e, r, s), n; }); } } HC.Configuration = { decoder: { url: `${Ve._DefaultCdnUrl}/meshopt_decoder.js` } }; HC._Default = null; let LF = 0; class _L { /** * Initializes the vertex * @param pos The position of the vertex * @param normal The normal of the vertex * @param uv The texture coordinate of the vertex * @param vertColor The RGBA color of the vertex */ constructor(e, t, i, r) { this.pos = e, this.normal = t, this.uv = i, this.vertColor = r; } /** * Make a clone, or deep copy, of the vertex * @returns A new Vertex */ clone() { var e, t; return new _L(this.pos.clone(), this.normal.clone(), (e = this.uv) === null || e === void 0 ? void 0 : e.clone(), (t = this.vertColor) === null || t === void 0 ? void 0 : t.clone()); } /** * Invert all orientation-specific data (e.g. vertex normal). Called when the * orientation of a polygon is flipped. */ flip() { this.normal = this.normal.scale(-1); } /** * Create a new vertex between this vertex and `other` by linearly * interpolating all properties using a parameter of `t`. Subclasses should * override this to interpolate additional properties. * @param other the vertex to interpolate against * @param t The factor used to linearly interpolate between the vertices */ interpolate(e, t) { return new _L(D.Lerp(this.pos, e.pos, t), D.Lerp(this.normal, e.normal, t), this.uv && e.uv ? at.Lerp(this.uv, e.uv, t) : void 0, this.vertColor && e.vertColor ? Et.Lerp(this.vertColor, e.vertColor, t) : void 0); } } class QR { /** * Initializes the plane * @param normal The normal for the plane * @param w */ constructor(e, t) { this.normal = e, this.w = t; } /** * Construct a plane from three points * @param a Point a * @param b Point b * @param c Point c */ static FromPoints(e, t, i) { const r = i.subtract(e), s = t.subtract(e); if (r.lengthSquared() === 0 || s.lengthSquared() === 0) return null; const n = D.Normalize(D.Cross(r, s)); return new QR(n, D.Dot(n, e)); } /** * Clone, or make a deep copy of the plane * @returns a new Plane */ clone() { return new QR(this.normal.clone(), this.w); } /** * Flip the face of the plane */ flip() { this.normal.scaleInPlace(-1), this.w = -this.w; } /** * Split `polygon` by this plane if needed, then put the polygon or polygon * fragments in the appropriate lists. Coplanar polygons go into either `* coplanarFront` or `coplanarBack` depending on their orientation with * respect to this plane. Polygons in front or in back of this plane go into * either `front` or `back` * @param polygon The polygon to be split * @param coplanarFront Will contain polygons coplanar with the plane that are oriented to the front of the plane * @param coplanarBack Will contain polygons coplanar with the plane that are oriented to the back of the plane * @param front Will contain the polygons in front of the plane * @param back Will contain the polygons begind the plane */ splitPolygon(e, t, i, r, s) { let u = 0; const h = []; let d, f; for (d = 0; d < e.vertices.length; d++) { f = D.Dot(this.normal, e.vertices[d].pos) - this.w; const p = f < -QR.EPSILON ? 2 : f > QR.EPSILON ? 1 : 0; u |= p, h.push(p); } switch (u) { case 0: (D.Dot(this.normal, e.plane.normal) > 0 ? t : i).push(e); break; case 1: r.push(e); break; case 2: s.push(e); break; case 3: { const p = [], m = []; for (d = 0; d < e.vertices.length; d++) { const v = (d + 1) % e.vertices.length, C = h[d], x = h[v], b = e.vertices[d], S = e.vertices[v]; if (C !== 2 && p.push(b), C !== 1 && m.push(C !== 2 ? b.clone() : b), (C | x) === 3) { f = (this.w - D.Dot(this.normal, b.pos)) / D.Dot(this.normal, S.pos.subtract(b.pos)); const M = b.interpolate(S, f); p.push(M), m.push(M.clone()); } } let _; p.length >= 3 && (_ = new YO(p, e.shared), _.plane && r.push(_)), m.length >= 3 && (_ = new YO(m, e.shared), _.plane && s.push(_)); break; } } } } QR.EPSILON = 1e-5; class YO { /** * Initializes the polygon * @param vertices The vertices of the polygon * @param shared The properties shared across all polygons */ constructor(e, t) { this.vertices = e, this.shared = t, this.plane = QR.FromPoints(e[0].pos, e[1].pos, e[2].pos); } /** * Clones, or makes a deep copy, or the polygon */ clone() { const e = this.vertices.map((t) => t.clone()); return new YO(e, this.shared); } /** * Flips the faces of the polygon */ flip() { this.vertices.reverse().map((e) => { e.flip(); }), this.plane.flip(); } } let _4 = class GF { /** * Initializes the node * @param polygons A collection of polygons held in the node */ constructor(e) { this._plane = null, this._front = null, this._back = null, this._polygons = new Array(), e && this.build(e); } /** * Clones, or makes a deep copy, of the node * @returns The cloned node */ clone() { const e = new GF(); return e._plane = this._plane && this._plane.clone(), e._front = this._front && this._front.clone(), e._back = this._back && this._back.clone(), e._polygons = this._polygons.map((t) => t.clone()), e; } /** * Convert solid space to empty space and empty space to solid space */ invert() { for (let t = 0; t < this._polygons.length; t++) this._polygons[t].flip(); this._plane && this._plane.flip(), this._front && this._front.invert(), this._back && this._back.invert(); const e = this._front; this._front = this._back, this._back = e; } /** * Recursively remove all polygons in `polygons` that are inside this BSP * tree. * @param polygons Polygons to remove from the BSP * @returns Polygons clipped from the BSP */ clipPolygons(e) { if (!this._plane) return e.slice(); let t = [], i = []; for (let r = 0; r < e.length; r++) this._plane.splitPolygon(e[r], t, i, t, i); return this._front && (t = this._front.clipPolygons(t)), this._back ? i = this._back.clipPolygons(i) : i = [], t.concat(i); } /** * Remove all polygons in this BSP tree that are inside the other BSP tree * `bsp`. * @param bsp BSP containing polygons to remove from this BSP */ clipTo(e) { this._polygons = e.clipPolygons(this._polygons), this._front && this._front.clipTo(e), this._back && this._back.clipTo(e); } /** * Return a list of all polygons in this BSP tree * @returns List of all polygons in this BSP tree */ allPolygons() { let e = this._polygons.slice(); return this._front && (e = e.concat(this._front.allPolygons())), this._back && (e = e.concat(this._back.allPolygons())), e; } /** * Build a BSP tree out of `polygons`. When called on an existing tree, the * new polygons are filtered down to the bottom of the tree and become new * nodes there. Each set of polygons is partitioned using the first polygon * (no heuristic is used to pick a good split) * @param polygons Polygons used to construct the BSP tree */ build(e) { if (!e.length) return; this._plane || (this._plane = e[0].plane.clone()); const t = [], i = []; for (let r = 0; r < e.length; r++) this._plane.splitPolygon(e[r], this._polygons, this._polygons, t, i); t.length && (this._front || (this._front = new GF()), this._front.build(t)), i.length && (this._back || (this._back = new GF()), this._back.build(i)); } }; class zA { constructor() { this._polygons = new Array(); } /** * Convert a VertexData to CSG * @param mesh defines the VertexData to convert to CSG * @returns the new CSG */ static FromVertexData(e) { let t, i, r; const s = [], n = e.indices, a = e.positions, l = e.normals, o = e.uvs, u = e.colors; if (!n || !a) throw "BABYLON.CSG: VertexData must at least contain positions and indices"; for (let d = 0; d < n.length; d += 3) { r = []; for (let f = 0; f < 3; f++) { const p = d + f, m = n[p], _ = l ? D.FromArray(l, m * 3) : D.Zero(), v = o ? at.FromArray(o, m * 2) : void 0, C = u ? Et.FromArray(u, m * 4) : void 0, x = D.FromArray(a, m * 3); t = new _L(x, _, v, C), r.push(t); } i = new YO(r, { subMeshId: 0, meshId: LF, materialIndex: 0 }), i.plane && s.push(i); } const h = zA._FromPolygons(s); return h.matrix = Ae.Identity(), h.position = D.Zero(), h.rotation = D.Zero(), h.scaling = D.One(), h.rotationQuaternion = Ze.Identity(), LF++, h; } /** * Convert the Mesh to CSG * @param mesh The Mesh to convert to CSG * @param absolute If true, the final (local) matrix transformation is set to the identity and not to that of `mesh`. It can help when dealing with right-handed meshes (default: false) * @returns A new CSG from the Mesh */ static FromMesh(e, t = !1) { let i, r, s, n, a, l, o; const u = []; let h, d, f, p = null, m, _ = !1; if (e instanceof ke) e.computeWorldMatrix(!0), h = e.getWorldMatrix(), d = e.position.clone(), f = e.rotation.clone(), e.rotationQuaternion && (p = e.rotationQuaternion.clone()), m = e.scaling.clone(), e.material && t && (_ = e.material.sideOrientation === 0); else throw "BABYLON.CSG: Wrong Mesh type, must be BABYLON.Mesh"; const v = e.getIndices(), C = e.getVerticesData(Y.PositionKind), x = e.getVerticesData(Y.NormalKind), b = e.getVerticesData(Y.UVKind), S = e.getVerticesData(Y.ColorKind), M = e.subMeshes; for (let w = 0, V = M.length; w < V; w++) for (let k = M[w].indexStart, L = M[w].indexCount + M[w].indexStart; k < L; k += 3) { o = []; for (let B = 0; B < 3; B++) { const U = B === 0 ? k + B : _ ? k + 3 - B : k + B, K = new D(x[v[U] * 3], x[v[U] * 3 + 1], x[v[U] * 3 + 2]); b && (s = new at(b[v[U] * 2], b[v[U] * 2 + 1])), S && (a = new Et(S[v[U] * 4], S[v[U] * 4 + 1], S[v[U] * 4 + 2], S[v[U] * 4 + 3])); const ee = new D(C[v[U] * 3], C[v[U] * 3 + 1], C[v[U] * 3 + 2]); n = D.TransformCoordinates(ee, h), r = D.TransformNormal(K, h), i = new _L(n, r, s, a), o.push(i); } l = new YO(o, { subMeshId: w, meshId: LF, materialIndex: M[w].materialIndex }), l.plane && u.push(l); } const R = zA._FromPolygons(u); return R.matrix = t ? Ae.Identity() : h, R.position = t ? D.Zero() : d, R.rotation = t ? D.Zero() : f, R.scaling = t ? D.One() : m, R.rotationQuaternion = t && p ? Ze.Identity() : p, LF++, R; } /** * Construct a CSG solid from a list of `CSG.Polygon` instances. * @param polygons Polygons used to construct a CSG solid */ static _FromPolygons(e) { const t = new zA(); return t._polygons = e, t; } /** * Clones, or makes a deep copy, of the CSG * @returns A new CSG */ clone() { const e = new zA(); return e._polygons = this._polygons.map((t) => t.clone()), e.copyTransformAttributes(this), e; } /** * Unions this CSG with another CSG * @param csg The CSG to union against this CSG * @returns The unioned CSG */ union(e) { const t = new _4(this.clone()._polygons), i = new _4(e.clone()._polygons); return t.clipTo(i), i.clipTo(t), i.invert(), i.clipTo(t), i.invert(), t.build(i.allPolygons()), zA._FromPolygons(t.allPolygons()).copyTransformAttributes(this); } /** * Unions this CSG with another CSG in place * @param csg The CSG to union against this CSG */ unionInPlace(e) { const t = new _4(this._polygons), i = new _4(e._polygons); t.clipTo(i), i.clipTo(t), i.invert(), i.clipTo(t), i.invert(), t.build(i.allPolygons()), this._polygons = t.allPolygons(); } /** * Subtracts this CSG with another CSG * @param csg The CSG to subtract against this CSG * @returns A new CSG */ subtract(e) { const t = new _4(this.clone()._polygons), i = new _4(e.clone()._polygons); return t.invert(), t.clipTo(i), i.clipTo(t), i.invert(), i.clipTo(t), i.invert(), t.build(i.allPolygons()), t.invert(), zA._FromPolygons(t.allPolygons()).copyTransformAttributes(this); } /** * Subtracts this CSG with another CSG in place * @param csg The CSG to subtract against this CSG */ subtractInPlace(e) { const t = new _4(this._polygons), i = new _4(e._polygons); t.invert(), t.clipTo(i), i.clipTo(t), i.invert(), i.clipTo(t), i.invert(), t.build(i.allPolygons()), t.invert(), this._polygons = t.allPolygons(); } /** * Intersect this CSG with another CSG * @param csg The CSG to intersect against this CSG * @returns A new CSG */ intersect(e) { const t = new _4(this.clone()._polygons), i = new _4(e.clone()._polygons); return t.invert(), i.clipTo(t), i.invert(), t.clipTo(i), i.clipTo(t), t.build(i.allPolygons()), t.invert(), zA._FromPolygons(t.allPolygons()).copyTransformAttributes(this); } /** * Intersects this CSG with another CSG in place * @param csg The CSG to intersect against this CSG */ intersectInPlace(e) { const t = new _4(this._polygons), i = new _4(e._polygons); t.invert(), i.clipTo(t), i.invert(), t.clipTo(i), i.clipTo(t), t.build(i.allPolygons()), t.invert(), this._polygons = t.allPolygons(); } /** * Return a new CSG solid with solid and empty space switched. This solid is * not modified. * @returns A new CSG solid with solid and empty space switched */ inverse() { const e = this.clone(); return e.inverseInPlace(), e; } /** * Inverses the CSG in place */ inverseInPlace() { this._polygons.map((e) => { e.flip(); }); } /** * This is used to keep meshes transformations so they can be restored * when we build back a Babylon Mesh * NB : All CSG operations are performed in world coordinates * @param csg The CSG to copy the transform attributes from * @returns This CSG */ copyTransformAttributes(e) { return this.matrix = e.matrix, this.position = e.position, this.rotation = e.rotation, this.scaling = e.scaling, this.rotationQuaternion = e.rotationQuaternion, this; } /** * Build vertex data from CSG * Coordinates here are in world space * @returns the final vertex data */ toVertexData(e = null, t = null) { const i = this.matrix.clone(); i.invert(); const r = this._polygons, s = [], n = [], a = []; let l = null, o = null; const u = D.Zero(), h = D.Zero(), d = at.Zero(), f = new Et(0, 0, 0, 0), p = [0, 0, 0], m = {}; let _; for (let C = 0, x = r.length; C < x; C++) { const b = r[C]; e && e(b); for (let S = 2, M = b.vertices.length; S < M; S++) { p[0] = 0, p[1] = S - 1, p[2] = S; for (let R = 0; R < 3; R++) { u.copyFrom(b.vertices[p[R]].pos), h.copyFrom(b.vertices[p[R]].normal), b.vertices[p[R]].uv && (l || (l = []), d.copyFrom(b.vertices[p[R]].uv)), b.vertices[p[R]].vertColor && (o || (o = []), f.copyFrom(b.vertices[p[R]].vertColor)); const w = D.TransformCoordinates(u, i), V = D.TransformNormal(h, i); _ = m[w.x + "," + w.y + "," + w.z]; let k = !1; l && !(l[_ * 2] === d.x || l[_ * 2 + 1] === d.y) && (k = !0); let L = !1; o && !(o[_ * 4] === f.r || o[_ * 4 + 1] === f.g || o[_ * 4 + 2] === f.b || o[_ * 4 + 3] === f.a) && (L = !0), (!(typeof _ < "u" && a[_ * 3] === V.x && a[_ * 3 + 1] === V.y && a[_ * 3 + 2] === V.z) || k || L) && (s.push(w.x, w.y, w.z), l && l.push(d.x, d.y), a.push(h.x, h.y, h.z), o && o.push(f.r, f.g, f.b, f.a), _ = m[w.x + "," + w.y + "," + w.z] = s.length / 3 - 1), n.push(_), t && t(); } } } const v = new Ot(); return v.positions = s, v.normals = a, l && (v.uvs = l), o && (v.colors = o), v.indices = n, v; } /** * Build Raw mesh from CSG * Coordinates here are in world space * @param name The name of the mesh geometry * @param scene The Scene * @param keepSubMeshes Specifies if the submeshes should be kept * @returns A new Mesh */ buildMeshGeometry(e, t, i) { const r = new ke(e, t), s = this._polygons; let n = 0; const a = {}; let l; if (i && s.sort((u, h) => u.shared.meshId === h.shared.meshId ? u.shared.subMeshId - h.shared.subMeshId : u.shared.meshId - h.shared.meshId), this.toVertexData((u) => { a[u.shared.meshId] || (a[u.shared.meshId] = {}), a[u.shared.meshId][u.shared.subMeshId] || (a[u.shared.meshId][u.shared.subMeshId] = { indexStart: 1 / 0, indexEnd: -1 / 0, materialIndex: u.shared.materialIndex }), l = a[u.shared.meshId][u.shared.subMeshId]; }, () => { l.indexStart = Math.min(n, l.indexStart), l.indexEnd = Math.max(n, l.indexEnd), n++; }).applyToMesh(r), i) { let u = 0, h; r.subMeshes = []; for (const d in a) { h = -1; for (const f in a[d]) l = a[d][f], ed.CreateFromIndices(l.materialIndex + u, l.indexStart, l.indexEnd - l.indexStart + 1, r), h = Math.max(l.materialIndex, h); u += ++h; } } return r; } /** * Build Mesh from CSG taking material and transforms into account * @param name The name of the Mesh * @param material The material of the Mesh * @param scene The Scene * @param keepSubMeshes Specifies if submeshes should be kept * @returns The new Mesh */ toMesh(e, t = null, i, r) { const s = this.buildMeshGeometry(e, i, r); return s.material = t, s.position.copyFrom(this.position), s.rotation.copyFrom(this.rotation), this.rotationQuaternion && (s.rotationQuaternion = this.rotationQuaternion.clone()), s.scaling.copyFrom(this.scaling), s.computeWorldMatrix(!0), s; } } const E0e = "meshUVSpaceRendererVertexShader", T0e = `precision highp float;attribute vec3 position;attribute vec3 normal;attribute vec2 uv;uniform mat4 projMatrix;varying vec2 vDecalTC; #include #include #include #include[0..maxSimultaneousMorphTargets] #include void main(void) {vec3 positionUpdated=position;vec3 normalUpdated=normal; #include #include[0..maxSimultaneousMorphTargets] #include #include #include vec4 worldPos=finalWorld*vec4(positionUpdated,1.0);mat3 normWorldSM=mat3(finalWorld);vec3 vNormalW; #if defined(INSTANCES) && defined(THIN_INSTANCES) vNormalW=normalUpdated/vec3(dot(normWorldSM[0],normWorldSM[0]),dot(normWorldSM[1],normWorldSM[1]),dot(normWorldSM[2],normWorldSM[2]));vNormalW=normalize(normWorldSM*vNormalW); #else #ifdef NONUNIFORMSCALING normWorldSM=transposeMat3(inverseMat3(normWorldSM)); #endif vNormalW=normalize(normWorldSM*normalUpdated); #endif vec3 normalView=normalize((projMatrix*vec4(vNormalW,0.0)).xyz);vec3 decalTC=(projMatrix*worldPos).xyz;vDecalTC=decalTC.xy;gl_Position=vec4(uv*2.0-1.0,normalView.z>0.0 ? 2. : decalTC.z,1.0);}`; je.ShadersStore[E0e] = T0e; const S0e = "meshUVSpaceRendererPixelShader", M0e = `precision highp float;varying vec2 vDecalTC;uniform sampler2D textureSampler;void main(void) {if (vDecalTC.x<0. || vDecalTC.x>1. || vDecalTC.y<0. || vDecalTC.y>1.) {discard;} gl_FragColor=texture2D(textureSampler,vDecalTC);} `; je.ShadersStore[S0e] = M0e; const R0e = "meshUVSpaceRendererMaskerVertexShader", P0e = "attribute vec2 uv;varying vec2 vUV;void main(void) {gl_Position=vec4(vec2(uv.x,uv.y)*2.0-1.0,0.,1.0);vUV=uv;}"; je.ShadersStore[R0e] = P0e; const I0e = "meshUVSpaceRendererMaskerPixelShader", D0e = `varying vec2 vUV;void main(void) {gl_FragColor=vec4(1.0,1.0,1.0,1.0);} `; je.ShadersStore[I0e] = D0e; const O0e = "meshUVSpaceRendererFinaliserPixelShader", w0e = `precision highp float;varying vec2 vUV;uniform sampler2D textureSampler;uniform sampler2D maskTextureSampler;uniform vec2 textureSize;void main() {vec4 mask=texture2D(maskTextureSampler,vUV).rgba;if (mask.r>0.5) {gl_FragColor=texture2D(textureSampler,vUV);} else {vec2 texelSize=4.0/textureSize;vec2 uv_p01=vUV+vec2(-1.0,0.0)*texelSize;vec2 uv_p21=vUV+vec2(1.0,0.0)*texelSize;vec2 uv_p10=vUV+vec2(0.0,-1.0)*texelSize;vec2 uv_p12=vUV+vec2(0.0,1.0)*texelSize;float mask_p01=texture2D(maskTextureSampler,uv_p01).r;float mask_p21=texture2D(maskTextureSampler,uv_p21).r;float mask_p10=texture2D(maskTextureSampler,uv_p10).r;float mask_p12=texture2D(maskTextureSampler,uv_p12).r;vec4 col=vec4(0.0,0.0,0.0,0.0);float total_weight=0.0;if (mask_p01>0.5) {col+=texture2D(textureSampler,uv_p01);total_weight+=1.0;} if (mask_p21>0.5) {col+=texture2D(textureSampler,uv_p21);total_weight+=1.0;} if (mask_p10>0.5) {col+=texture2D(textureSampler,uv_p10);total_weight+=1.0;} if (mask_p12>0.5) {col+=texture2D(textureSampler,uv_p12);total_weight+=1.0;} if (total_weight>0.0) {gl_FragColor=col/total_weight;} else {gl_FragColor=col;}}} `; je.ShadersStore[O0e] = w0e; const L0e = "meshUVSpaceRendererFinaliserVertexShader", N0e = `precision highp float;attribute vec3 position;attribute vec2 uv;uniform mat4 worldViewProjection;varying vec2 vUV;void main() {gl_Position=worldViewProjection*vec4(position,1.0);vUV=uv;} `; je.ShadersStore[L0e] = N0e; class RC { static _GetShader(e) { if (!e._meshUVSpaceRendererShader) { const t = new Lo("meshUVSpaceRendererShader", e, { vertex: "meshUVSpaceRenderer", fragment: "meshUVSpaceRenderer" }, { attributes: ["position", "normal", "uv"], uniforms: ["world", "projMatrix"], samplers: ["textureSampler"], needAlphaBlending: !0 }); t.backFaceCulling = !1, t.alphaMode = 2, e.onDisposeObservable.add(() => { var i; (i = e._meshUVSpaceRendererShader) === null || i === void 0 || i.dispose(), e._meshUVSpaceRendererShader = null; }), e._meshUVSpaceRendererShader = t; } return e._meshUVSpaceRendererShader; } static _GetMaskShader(e) { if (!e._meshUVSpaceRendererMaskShader) { const t = new Lo("meshUVSpaceRendererMaskShader", e, { vertex: "meshUVSpaceRendererMasker", fragment: "meshUVSpaceRendererMasker" }, { attributes: ["position", "uv"], uniforms: ["worldViewProjection"] }); t.backFaceCulling = !1, t.alphaMode = 2, e.onDisposeObservable.add(() => { var i; (i = e._meshUVSpaceRendererMaskShader) === null || i === void 0 || i.dispose(), e._meshUVSpaceRendererMaskShader = null; }), e._meshUVSpaceRendererMaskShader = t; } return e._meshUVSpaceRendererMaskShader; } static _IsRenderTargetTexture(e) { return e.renderList !== void 0; } /** * Creates a new MeshUVSpaceRenderer * @param mesh The mesh used for the source UV space * @param scene The scene the mesh belongs to * @param options The options to use when creating the texture */ constructor(e, t, i) { this._textureCreatedInternally = !1, this._configureUserCreatedTexture = !0, this._maskTexture = null, this._finalPostProcess = null, this.clearColor = new Et(0, 0, 0, 0), this._mesh = e, this._scene = t, this._options = Object.assign({ width: 1024, height: 1024, textureType: 0, generateMipMaps: !0, optimizeUVAllocation: !0, uvEdgeBlending: !1 }, i); } /** * Checks if the texture is ready to be used * @returns true if the texture is ready to be used */ isReady() { var e, t, i, r; this.texture || this._createDiffuseRTT(); const s = RC._IsRenderTargetTexture(this.texture) ? this.texture.isReadyForRendering() : this.texture.isReady(), n = (t = (e = this._maskTexture) === null || e === void 0 ? void 0 : e.isReadyForRendering()) !== null && t !== void 0 ? t : !0, a = (r = (i = this._finalPostProcess) === null || i === void 0 ? void 0 : i.isReady()) !== null && r !== void 0 ? r : !0; return s && n && a; } /** * Projects and renders a texture in the mesh UV space * @param texture The texture * @param position The position of the center of projection (world space coordinates) * @param normal The direction of the projection (world space coordinates) * @param size The size of the projection * @param angle The rotation angle around the direction of the projection */ renderTexture(e, t, i, r, s = 0) { if (this.texture ? this._configureUserCreatedTexture && this._configureUserCreatedRTT() : this._createDiffuseRTT(), RC._IsRenderTargetTexture(this.texture)) { const n = this._createProjectionMatrix(t, i, r, s), a = RC._GetShader(this._scene); a.setTexture("textureSampler", e), a.setMatrix("projMatrix", n), this.texture.render(); } } /** * Clears the texture map */ clear() { var e, t, i; if (RC._IsRenderTargetTexture(this.texture) && this.texture.renderTarget) { const r = this._scene.getEngine(); r.bindFramebuffer(this.texture.renderTarget), r.clear(this.clearColor, !0, !0, !0), r.unBindFramebuffer(this.texture.renderTarget); } if (!((e = this._finalPostProcess) === null || e === void 0) && e.inputTexture) { const r = this._scene.getEngine(); r.bindFramebuffer((t = this._finalPostProcess) === null || t === void 0 ? void 0 : t.inputTexture), r.clear(this.clearColor, !0, !0, !0), r.unBindFramebuffer((i = this._finalPostProcess) === null || i === void 0 ? void 0 : i.inputTexture); } } /** * Disposes of the resources */ dispose() { var e, t; this._textureCreatedInternally && (this.texture.dispose(), this._textureCreatedInternally = !1), this._configureUserCreatedTexture = !0, (e = this._maskTexture) === null || e === void 0 || e.dispose(), this._maskTexture = null, (t = this._finalPostProcess) === null || t === void 0 || t.dispose(), this._finalPostProcess = null; } _configureUserCreatedRTT() { this._configureUserCreatedTexture = !1, RC._IsRenderTargetTexture(this.texture) && (this.texture.setMaterialForRendering(this._mesh, RC._GetShader(this._scene)), this.texture.onClearObservable.add(() => { }), this.texture.renderList = [this._mesh], this._options.uvEdgeBlending && (this._createMaskTexture(), this._createPostProcess(), this.texture.addPostProcess(this._finalPostProcess))); } _createDiffuseRTT() { this._textureCreatedInternally = !0; const e = this._createRenderTargetTexture(this._options.width, this._options.height); e.setMaterialForRendering(this._mesh, RC._GetShader(this._scene)), this.texture = e, this._configureUserCreatedTexture = !1, this._options.uvEdgeBlending && (this._createMaskTexture(), this._createPostProcess(), e.addPostProcess(this._finalPostProcess)); } _createMaskTexture() { this._maskTexture || (this._maskTexture = new ra( this._mesh.name + "_maskTexture", { width: this._options.width, height: this._options.height }, this._scene, !1, // No mipmaps for the mask texture !0, 0, !1, 2, void 0, void 0, void 0, 6 ), this._maskTexture.clearColor = new Et(0, 0, 0, 0), this._maskTexture.renderList.push(this._mesh), this._maskTexture.setMaterialForRendering(this._mesh, RC._GetMaskShader(this._scene)), this._maskTexture.refreshRate = ra.REFRESHRATE_RENDER_ONCE, this._scene.customRenderTargets.push(this._maskTexture)); } _createPostProcess() { this._finalPostProcess || (this._finalPostProcess = new Bi(this._mesh.name + "_fixSeamsPostProcess", "meshUVSpaceRendererFinaliser", ["textureSize"], ["textureSampler", "maskTextureSampler"], 1, null, 1, this._scene.getEngine(), !1, null, this._options.textureType), this._finalPostProcess.onApplyObservable.add((e) => { e.setTexture("maskTextureSampler", this._maskTexture), e.setFloat2("textureSize", this._options.width, this._options.height); })); } _createRenderTargetTexture(e, t) { const i = new ra(this._mesh.name + "_uvspaceTexture", { width: e, height: t }, this._scene, this._options.generateMipMaps, !0, this._options.textureType, !1, this._options.generateMipMaps ? 3 : 2, !1, !1, !1, 5); return i.renderParticles = !1, i.optimizeUVAllocation = !!this._options.optimizeUVAllocation, i.onClearObservable.addOnce(() => { this._scene.getEngine().clear(this.clearColor, !0, !0, !0), i.onClearObservable.add(() => { }); }), i.renderList = [this._mesh], i; } _createProjectionMatrix(e, t, i, r = 0) { const s = -Math.atan2(t.z, t.x) - Math.PI / 2, n = Math.sqrt(t.x * t.x + t.z * t.z), a = Math.atan2(t.y, n), l = e.add(t.scale(i.z * 0.5)), o = Ae.RotationYawPitchRoll(s, a, r).multiply(Ae.Translation(l.x, l.y, l.z)), u = Ae.Invert(o), h = Ae.FromArray([2 / i.x, 0, 0, 0, 0, 2 / i.y, 0, 0, 0, 0, 1 / i.z, 0, 0, 0, 0, 1]), d = Ae.FromArray([0.5, 0, 0, 0, 0, 0.5, 0, 0, 0, 0, 1, 0, 0.5, 0.5, 0, 1]); return u.multiply(h).multiply(d); } } ke._TrailMeshParser = (c, e) => mL.Parse(c, e); class mL extends ke { /** * Creates a new TrailMesh. * @param name The value used by scene.getMeshByName() to do a lookup. * @param generator The mesh or transform node to generate a trail. * @param scene The scene to add this mesh to. * @param diameter Diameter of trailing mesh. Default is 1. * @param length Length of trailing mesh. Default is 60. * @param autoStart Automatically start trailing mesh. Default true. */ constructor(e, t, i, r = 1, s = 60, n = !0) { super(e, i), this._sectionPolygonPointsCount = 4, this._running = !1, this._autoStart = n, this._generator = t, this.diameter = r, this._length = s, this._sectionVectors = [], this._sectionNormalVectors = []; for (let a = 0; a < this._sectionPolygonPointsCount; a++) this._sectionVectors[a] = D.Zero(), this._sectionNormalVectors[a] = D.Zero(); this._createMesh(); } /** * "TrailMesh" * @returns "TrailMesh" */ getClassName() { return "TrailMesh"; } _createMesh() { const e = new Ot(), t = [], i = [], r = []; let s = D.Zero(); this._generator instanceof xr && this._generator.hasBoundingInfo ? s = this._generator.getBoundingInfo().boundingBox.centerWorld : s = this._generator.position; const n = 2 * Math.PI / this._sectionPolygonPointsCount; for (let a = 0; a < this._sectionPolygonPointsCount; a++) t.push(s.x + Math.cos(a * n) * this.diameter, s.y + Math.sin(a * n) * this.diameter, s.z); for (let a = 1; a <= this._length; a++) { for (let o = 0; o < this._sectionPolygonPointsCount; o++) t.push(s.x + Math.cos(o * n) * this.diameter, s.y + Math.sin(o * n) * this.diameter, s.z); const l = t.length / 3 - 2 * this._sectionPolygonPointsCount; for (let o = 0; o < this._sectionPolygonPointsCount - 1; o++) r.push(l + o, l + o + this._sectionPolygonPointsCount, l + o + this._sectionPolygonPointsCount + 1), r.push(l + o, l + o + this._sectionPolygonPointsCount + 1, l + o + 1); r.push(l + this._sectionPolygonPointsCount - 1, l + this._sectionPolygonPointsCount - 1 + this._sectionPolygonPointsCount, l + this._sectionPolygonPointsCount), r.push(l + this._sectionPolygonPointsCount - 1, l + this._sectionPolygonPointsCount, l); } Ot.ComputeNormals(t, r, i), e.positions = t, e.normals = i, e.indices = r, e.applyToMesh(this, !0), this._autoStart && this.start(); } /** * Start trailing mesh. */ start() { this._running || (this._running = !0, this._beforeRenderObserver = this.getScene().onBeforeRenderObservable.add(() => { this.update(); })); } /** * Stop trailing mesh. */ stop() { this._beforeRenderObserver && this._running && (this._running = !1, this.getScene().onBeforeRenderObservable.remove(this._beforeRenderObserver)); } /** * Update trailing mesh geometry. */ update() { const e = this.getVerticesData(Y.PositionKind), t = this.getVerticesData(Y.NormalKind), i = this._generator.getWorldMatrix(); if (e && t) { for (let n = 3 * this._sectionPolygonPointsCount; n < e.length; n++) e[n - 3 * this._sectionPolygonPointsCount] = e[n] - t[n] / this._length * this.diameter; for (let n = 3 * this._sectionPolygonPointsCount; n < t.length; n++) t[n - 3 * this._sectionPolygonPointsCount] = t[n]; const r = e.length - 3 * this._sectionPolygonPointsCount, s = 2 * Math.PI / this._sectionPolygonPointsCount; for (let n = 0; n < this._sectionPolygonPointsCount; n++) this._sectionVectors[n].copyFromFloats(Math.cos(n * s) * this.diameter, Math.sin(n * s) * this.diameter, 0), this._sectionNormalVectors[n].copyFromFloats(Math.cos(n * s), Math.sin(n * s), 0), D.TransformCoordinatesToRef(this._sectionVectors[n], i, this._sectionVectors[n]), D.TransformNormalToRef(this._sectionNormalVectors[n], i, this._sectionNormalVectors[n]); for (let n = 0; n < this._sectionPolygonPointsCount; n++) e[r + 3 * n] = this._sectionVectors[n].x, e[r + 3 * n + 1] = this._sectionVectors[n].y, e[r + 3 * n + 2] = this._sectionVectors[n].z, t[r + 3 * n] = this._sectionNormalVectors[n].x, t[r + 3 * n + 1] = this._sectionNormalVectors[n].y, t[r + 3 * n + 2] = this._sectionNormalVectors[n].z; this.updateVerticesData(Y.PositionKind, e, !0, !1), this.updateVerticesData(Y.NormalKind, t, !0, !1); } } /** * Returns a new TrailMesh object. * @param name is a string, the name given to the new mesh * @param newGenerator use new generator object for cloned trail mesh * @returns a new mesh */ clone(e = "", t) { return new mL(e, t === void 0 ? this._generator : t, this.getScene(), this.diameter, this._length, this._autoStart); } /** * Serializes this trail mesh * @param serializationObject object to write serialization to */ serialize(e) { super.serialize(e), e.generatorId = this._generator.id; } /** * Parses a serialized trail mesh * @param parsedMesh the serialized mesh * @param scene the scene to create the trail mesh in * @returns the created trail mesh */ static Parse(e, t) { var i, r; const s = (i = t.getLastMeshById(e.generatorId)) !== null && i !== void 0 ? i : t.getLastTransformNodeById(e.generatorId); if (!s) throw new Error("TrailMesh: generator not found with ID " + e.generatorId); return new mL(e.name, s, t, (r = e.diameter) !== null && r !== void 0 ? r : e._diameter, e._length, e._autoStart); } } class F0e { /** * Creates a SimplificationSettings * @param quality expected quality * @param distance distance when this optimized version should be used * @param optimizeMesh already optimized mesh */ constructor(e, t, i) { this.quality = e, this.distance = t, this.optimizeMesh = i; } } class Bse { /** * Creates a new queue */ constructor() { this.running = !1, this._simplificationArray = []; } /** * Adds a new simplification task * @param task defines a task to add */ addTask(e) { this._simplificationArray.push(e); } /** * Execute next task */ executeNext() { const e = this._simplificationArray.pop(); e ? (this.running = !0, this.runSimplification(e)) : this.running = !1; } /** * Execute a simplification task * @param task defines the task to run */ runSimplification(e) { if (e.parallelProcessing) e.settings.forEach((t) => { this._getSimplifier(e).simplify(t, (r) => { t.distance !== void 0 && e.mesh.addLODLevel(t.distance, r), r.isVisible = !0, t.quality === e.settings[e.settings.length - 1].quality && e.successCallback && e.successCallback(), this.executeNext(); }); }); else { const t = this._getSimplifier(e), i = (r, s) => { t.simplify(r, (n) => { r.distance !== void 0 && e.mesh.addLODLevel(r.distance, n), n.isVisible = !0, s(); }); }; ug.Run(e.settings.length, (r) => { i(e.settings[r.index], () => { r.executeNext(); }); }, () => { e.successCallback && e.successCallback(), this.executeNext(); }); } } _getSimplifier(e) { switch (e.simplificationType) { case gL.QUADRATIC: default: return new Use(e.mesh); } } } var gL; (function(c) { c[c.QUADRATIC = 0] = "QUADRATIC"; })(gL || (gL = {})); class B0e { constructor(e) { this._vertices = e, this.error = new Array(4), this.deleted = !1, this.isDirty = !1, this.deletePending = !1, this.borderFactor = 0; } } class U0e { constructor(e, t) { this.position = e, this.id = t, this.isBorder = !0, this.q = new _O(), this.triangleCount = 0, this.triangleStart = 0, this.originalOffsets = []; } updatePosition(e) { this.position.copyFrom(e); } } class _O { constructor(e) { this.data = new Array(10); for (let t = 0; t < 10; ++t) e && e[t] ? this.data[t] = e[t] : this.data[t] = 0; } det(e, t, i, r, s, n, a, l, o) { return this.data[e] * this.data[s] * this.data[o] + this.data[i] * this.data[r] * this.data[l] + this.data[t] * this.data[n] * this.data[a] - this.data[i] * this.data[s] * this.data[a] - this.data[e] * this.data[n] * this.data[l] - this.data[t] * this.data[r] * this.data[o]; } addInPlace(e) { for (let t = 0; t < 10; ++t) this.data[t] += e.data[t]; } addArrayInPlace(e) { for (let t = 0; t < 10; ++t) this.data[t] += e[t]; } add(e) { const t = new _O(); for (let i = 0; i < 10; ++i) t.data[i] = this.data[i] + e.data[i]; return t; } static FromData(e, t, i, r) { return new _O(_O.DataFromNumbers(e, t, i, r)); } //returning an array to avoid garbage collection static DataFromNumbers(e, t, i, r) { return [e * e, e * t, e * i, e * r, t * t, t * i, t * r, i * i, i * r, r * r]; } } class V0e { constructor(e, t) { this.vertexId = e, this.triangleId = t; } } class Use { /** * Creates a new QuadraticErrorSimplification * @param _mesh defines the target mesh */ constructor(e) { this._mesh = e, this.syncIterations = 5e3, this.aggressiveness = 7, this.decimationIterations = 100, this.boundingBoxEpsilon = Sr; } /** * Simplification of a given mesh according to the given settings. * Since this requires computation, it is assumed that the function runs async. * @param settings The settings of the simplification, including quality and distance * @param successCallback A callback that will be called after the mesh was simplified. */ simplify(e, t) { this._initDecimatedMesh(), ug.Run(this._mesh.subMeshes.length, (i) => { this._initWithMesh(i.index, () => { this._runDecimation(e, i.index, () => { i.executeNext(); }); }, e.optimizeMesh); }, () => { setTimeout(() => { t(this._reconstructedMesh); }, 0); }); } _runDecimation(e, t, i) { const r = ~~(this._triangles.length * e.quality); let s = 0; const n = this._triangles.length, a = (l, o) => { setTimeout(() => { l % 5 === 0 && this._updateMesh(l === 0); for (let d = 0; d < this._triangles.length; ++d) this._triangles[d].isDirty = !1; const u = 1e-9 * Math.pow(l + 3, this.aggressiveness), h = (d) => { const f = ~~((this._triangles.length / 2 + d) % this._triangles.length), p = this._triangles[f]; if (p && !(p.error[3] > u || p.deleted || p.isDirty)) { for (let m = 0; m < 3; ++m) if (p.error[m] < u) { const _ = [], v = [], C = p._vertices[m], x = p._vertices[(m + 1) % 3]; if (C.isBorder || x.isBorder) continue; const b = D.Zero(); this._calculateError(C, x, b); const S = []; if (this._isFlipped(C, x, b, _, S) || this._isFlipped(x, C, b, v, S) || _.indexOf(!0) < 0 || v.indexOf(!0) < 0) continue; const M = []; if (S.forEach((V) => { M.indexOf(V) === -1 && (V.deletePending = !0, M.push(V)); }), M.length % 2 !== 0) continue; C.q = x.q.add(C.q), C.updatePosition(b); const R = this._references.length; s = this._updateTriangles(C, C, _, s), s = this._updateTriangles(C, x, v, s); const w = this._references.length - R; if (w <= C.triangleCount) { if (w) for (let V = 0; V < w; V++) this._references[C.triangleStart + V] = this._references[R + V]; } else C.triangleStart = R; C.triangleCount = w; break; } } }; ug.SyncAsyncForLoop(this._triangles.length, this.syncIterations, h, o, () => n - s <= r); }, 0); }; ug.Run(this.decimationIterations, (l) => { n - s <= r ? l.breakLoop() : a(l.index, () => { l.executeNext(); }); }, () => { setTimeout(() => { this._reconstructMesh(t), i(); }, 0); }); } _initWithMesh(e, t, i) { this._vertices = [], this._triangles = []; const r = this._mesh.getVerticesData(Y.PositionKind), s = this._mesh.getIndices(), n = this._mesh.subMeshes[e], a = (h) => { if (i) { for (let d = 0; d < this._vertices.length; ++d) if (this._vertices[d].position.equalsWithEpsilon(h, 1e-4)) return this._vertices[d]; } return null; }, l = [], o = (h) => { if (!r) return; const d = h + n.verticesStart, f = D.FromArray(r, d * 3), p = a(f) || new U0e(f, this._vertices.length); p.originalOffsets.push(d), p.id === this._vertices.length && this._vertices.push(p), l.push(p.id); }, u = n.verticesCount; ug.SyncAsyncForLoop(u, this.syncIterations / 4 >> 0, o, () => { const h = (d) => { if (!s) return; const p = (n.indexStart / 3 + d) * 3, m = s[p + 0], _ = s[p + 1], v = s[p + 2], C = this._vertices[l[m - n.verticesStart]], x = this._vertices[l[_ - n.verticesStart]], b = this._vertices[l[v - n.verticesStart]], S = new B0e([C, x, b]); S.originalOffset = p, this._triangles.push(S); }; ug.SyncAsyncForLoop(n.indexCount / 3, this.syncIterations, h, () => { this._init(t); }); }); } _init(e) { const t = (i) => { const r = this._triangles[i]; r.normal = D.Cross(r._vertices[1].position.subtract(r._vertices[0].position), r._vertices[2].position.subtract(r._vertices[0].position)).normalize(); for (let s = 0; s < 3; s++) r._vertices[s].q.addArrayInPlace(_O.DataFromNumbers(r.normal.x, r.normal.y, r.normal.z, -D.Dot(r.normal, r._vertices[0].position))); }; ug.SyncAsyncForLoop(this._triangles.length, this.syncIterations, t, () => { const i = (r) => { const s = this._triangles[r]; for (let n = 0; n < 3; ++n) s.error[n] = this._calculateError(s._vertices[n], s._vertices[(n + 1) % 3]); s.error[3] = Math.min(s.error[0], s.error[1], s.error[2]); }; ug.SyncAsyncForLoop(this._triangles.length, this.syncIterations, i, () => { e(); }); }); } _reconstructMesh(e) { const t = []; let i; for (i = 0; i < this._vertices.length; ++i) this._vertices[i].triangleCount = 0; let r, s; for (i = 0; i < this._triangles.length; ++i) if (!this._triangles[i].deleted) { for (r = this._triangles[i], s = 0; s < 3; ++s) r._vertices[s].triangleCount = 1; t.push(r); } const n = this._reconstructedMesh.getVerticesData(Y.PositionKind) || [], a = this._reconstructedMesh.getVerticesData(Y.NormalKind) || [], l = this._reconstructedMesh.getVerticesData(Y.UVKind) || [], o = this._reconstructedMesh.getVerticesData(Y.ColorKind) || [], u = this._mesh.getVerticesData(Y.NormalKind), h = this._mesh.getVerticesData(Y.UVKind), d = this._mesh.getVerticesData(Y.ColorKind); let f = 0; for (i = 0; i < this._vertices.length; ++i) { const b = this._vertices[i]; b.id = f, b.triangleCount && b.originalOffsets.forEach((S) => { n.push(b.position.x), n.push(b.position.y), n.push(b.position.z), u && u.length && (a.push(u[S * 3]), a.push(u[S * 3 + 1]), a.push(u[S * 3 + 2])), h && h.length && (l.push(h[S * 2]), l.push(h[S * 2 + 1])), d && d.length && (o.push(d[S * 4]), o.push(d[S * 4 + 1]), o.push(d[S * 4 + 2]), o.push(d[S * 4 + 3])), ++f; }); } const p = this._reconstructedMesh.getTotalIndices(), m = this._reconstructedMesh.getTotalVertices(), _ = this._reconstructedMesh.subMeshes; this._reconstructedMesh.subMeshes = []; const v = this._reconstructedMesh.getIndices(), C = this._mesh.getIndices(); for (i = 0; i < t.length; ++i) r = t[i], [0, 1, 2].forEach((b) => { const S = C[r.originalOffset + b]; let M = r._vertices[b].originalOffsets.indexOf(S); M < 0 && (M = 0), v.push(r._vertices[b].id + M + m); }); this._reconstructedMesh.setIndices(v), this._reconstructedMesh.setVerticesData(Y.PositionKind, n), a.length > 0 && this._reconstructedMesh.setVerticesData(Y.NormalKind, a), l.length > 0 && this._reconstructedMesh.setVerticesData(Y.UVKind, l), o.length > 0 && this._reconstructedMesh.setVerticesData(Y.ColorKind, o); const x = this._mesh.subMeshes[e]; e > 0 && (this._reconstructedMesh.subMeshes = [], _.forEach((b) => { ed.AddToMesh( b.materialIndex, b.verticesStart, b.verticesCount, /* 0, newPositionData.length/3, */ b.indexStart, b.indexCount, b.getMesh() ); }), ed.AddToMesh( x.materialIndex, m, f, /* 0, newPositionData.length / 3, */ p, t.length * 3, this._reconstructedMesh )); } _initDecimatedMesh() { this._reconstructedMesh = new ke(this._mesh.name + "Decimated", this._mesh.getScene()), this._reconstructedMesh.material = this._mesh.material, this._reconstructedMesh.parent = this._mesh.parent, this._reconstructedMesh.isVisible = !1, this._reconstructedMesh.renderingGroupId = this._mesh.renderingGroupId; } _isFlipped(e, t, i, r, s) { for (let n = 0; n < e.triangleCount; ++n) { const a = this._triangles[this._references[e.triangleStart + n].triangleId]; if (a.deleted) continue; const l = this._references[e.triangleStart + n].vertexId, o = a._vertices[(l + 1) % 3], u = a._vertices[(l + 2) % 3]; if (o === t || u === t) { r[n] = !0, s.push(a); continue; } let h = o.position.subtract(i); h = h.normalize(); let d = u.position.subtract(i); if (d = d.normalize(), Math.abs(D.Dot(h, d)) > 0.999) return !0; const f = D.Cross(h, d).normalize(); if (r[n] = !1, D.Dot(f, a.normal) < 0.2) return !0; } return !1; } _updateTriangles(e, t, i, r) { let s = r; for (let n = 0; n < t.triangleCount; ++n) { const a = this._references[t.triangleStart + n], l = this._triangles[a.triangleId]; if (!l.deleted) { if (i[n] && l.deletePending) { l.deleted = !0, s++; continue; } l._vertices[a.vertexId] = e, l.isDirty = !0, l.error[0] = this._calculateError(l._vertices[0], l._vertices[1]) + l.borderFactor / 2, l.error[1] = this._calculateError(l._vertices[1], l._vertices[2]) + l.borderFactor / 2, l.error[2] = this._calculateError(l._vertices[2], l._vertices[0]) + l.borderFactor / 2, l.error[3] = Math.min(l.error[0], l.error[1], l.error[2]), this._references.push(a); } } return s; } _identifyBorder() { for (let e = 0; e < this._vertices.length; ++e) { const t = [], i = [], r = this._vertices[e]; let s; for (s = 0; s < r.triangleCount; ++s) { const n = this._triangles[this._references[r.triangleStart + s].triangleId]; for (let a = 0; a < 3; a++) { let l = 0; const o = n._vertices[a]; for (; l < t.length && i[l] !== o.id; ) ++l; l === t.length ? (t.push(1), i.push(o.id)) : t[l]++; } } for (s = 0; s < t.length; ++s) t[s] === 1 ? this._vertices[i[s]].isBorder = !0 : this._vertices[i[s]].isBorder = !1; } } _updateMesh(e = !1) { let t; if (!e) { const l = []; for (t = 0; t < this._triangles.length; ++t) this._triangles[t].deleted || l.push(this._triangles[t]); this._triangles = l; } for (t = 0; t < this._vertices.length; ++t) this._vertices[t].triangleCount = 0, this._vertices[t].triangleStart = 0; let i, r, s; for (t = 0; t < this._triangles.length; ++t) for (i = this._triangles[t], r = 0; r < 3; ++r) s = i._vertices[r], s.triangleCount++; let n = 0; for (t = 0; t < this._vertices.length; ++t) this._vertices[t].triangleStart = n, n += this._vertices[t].triangleCount, this._vertices[t].triangleCount = 0; const a = new Array(this._triangles.length * 3); for (t = 0; t < this._triangles.length; ++t) for (i = this._triangles[t], r = 0; r < 3; ++r) s = i._vertices[r], a[s.triangleStart + s.triangleCount] = new V0e(r, t), s.triangleCount++; this._references = a, e && this._identifyBorder(); } _vertexError(e, t) { const i = t.x, r = t.y, s = t.z; return e.data[0] * i * i + 2 * e.data[1] * i * r + 2 * e.data[2] * i * s + 2 * e.data[3] * i + e.data[4] * r * r + 2 * e.data[5] * r * s + 2 * e.data[6] * r + e.data[7] * s * s + 2 * e.data[8] * s + e.data[9]; } _calculateError(e, t, i) { const r = e.q.add(t.q), s = e.isBorder && t.isBorder; let n = 0; const a = r.det(0, 1, 2, 1, 4, 5, 2, 5, 7); if (a !== 0 && !s) i || (i = D.Zero()), i.x = -1 / a * r.det(1, 2, 3, 4, 5, 6, 5, 7, 8), i.y = 1 / a * r.det(0, 2, 3, 1, 5, 6, 2, 7, 8), i.z = -1 / a * r.det(0, 1, 3, 1, 4, 6, 2, 5, 8), n = this._vertexError(r, i); else { const l = e.position.add(t.position).divide(new D(2, 2, 2)), o = this._vertexError(r, e.position), u = this._vertexError(r, t.position), h = this._vertexError(r, l); n = Math.min(o, u, h), n === o ? i && i.copyFrom(e.position) : n === u ? i && i.copyFrom(t.position) : i && i.copyFrom(l); } return n; } } Object.defineProperty(ii.prototype, "simplificationQueue", { get: function() { if (!this._simplificationQueue) { this._simplificationQueue = new Bse(); let c = this._getComponent(Bt.NAME_SIMPLIFICATIONQUEUE); c || (c = new Vse(this), this._addComponent(c)); } return this._simplificationQueue; }, set: function(c) { this._simplificationQueue = c; }, enumerable: !0, configurable: !0 }); ke.prototype.simplify = function(c, e = !0, t = gL.QUADRATIC, i) { return this.getScene().simplificationQueue.addTask({ settings: c, parallelProcessing: e, mesh: this, simplificationType: t, successCallback: i }), this; }; class Vse { /** * Creates a new instance of the component for the given scene * @param scene Defines the scene to register the component in */ constructor(e) { this.name = Bt.NAME_SIMPLIFICATIONQUEUE, this.scene = e; } /** * Registers the component in a given scene */ register() { this.scene._beforeCameraUpdateStage.registerStep(Bt.STEP_BEFORECAMERAUPDATE_SIMPLIFICATIONQUEUE, this, this._beforeCameraUpdate); } /** * Rebuilds the elements related to this component in case of * context lost for instance. */ rebuild() { } /** * Disposes the component and the associated resources */ dispose() { } _beforeCameraUpdate() { this.scene._simplificationQueue && !this.scene._simplificationQueue.running && this.scene._simplificationQueue.executeNext(); } } var M4; (function(c) { c[c.POINTS_MODE_POINTS = 0] = "POINTS_MODE_POINTS", c[c.POINTS_MODE_PATHS = 1] = "POINTS_MODE_PATHS"; })(M4 || (M4 = {})); var QO; (function(c) { c[c.FACES_MODE_SINGLE_SIDED = 0] = "FACES_MODE_SINGLE_SIDED", c[c.FACES_MODE_SINGLE_SIDED_NO_BACKFACE_CULLING = 1] = "FACES_MODE_SINGLE_SIDED_NO_BACKFACE_CULLING", c[c.FACES_MODE_DOUBLE_SIDED = 2] = "FACES_MODE_DOUBLE_SIDED"; })(QO || (QO = {})); var f5; (function(c) { c[c.AUTO_DIRECTIONS_FROM_FIRST_SEGMENT = 0] = "AUTO_DIRECTIONS_FROM_FIRST_SEGMENT", c[c.AUTO_DIRECTIONS_FROM_ALL_SEGMENTS = 1] = "AUTO_DIRECTIONS_FROM_ALL_SEGMENTS", c[c.AUTO_DIRECTIONS_ENHANCED = 2] = "AUTO_DIRECTIONS_ENHANCED", c[c.AUTO_DIRECTIONS_NONE = 99] = "AUTO_DIRECTIONS_NONE"; })(f5 || (f5 = {})); class SW extends ke { constructor(e, t, i) { var r, s, n, a; super(e, t, null, null, !1, !1), this.name = e, this._options = i, this._lazy = !1, this._updatable = !1, this._engine = t.getEngine(), this._lazy = (r = i.lazy) !== null && r !== void 0 ? r : !1, this._updatable = (s = i.updatable) !== null && s !== void 0 ? s : !1, this._vertexPositions = [], this._indices = [], this._uvs = [], this._points = [], this._colorPointers = (n = i.colorPointers) !== null && n !== void 0 ? n : [], this._widths = (a = i.widths) !== null && a !== void 0 ? a : new Array(i.points.length).fill(1); } /** * "GreasedLineMesh" * @returns "GreasedLineMesh" */ getClassName() { return "GreasedLineMesh"; } _updateWidthsWithValue(e) { let t = 0; for (const r of this._points) t += r.length; const i = t / 3 * 2 - this._widths.length; for (let r = 0; r < i; r++) this._widths.push(e); } /** * Updated a lazy line. Rerenders the line and updates boundinfo as well. */ updateLazy() { var e, t; this._setPoints(this._points), this._options.colorPointers || this._updateColorPointers(), this._createVertexBuffers((e = this._options.ribbonOptions) === null || e === void 0 ? void 0 : e.smoothShading), this.refreshBoundingInfo(), (t = this.greasedLineMaterial) === null || t === void 0 || t.updateLazy(); } /** * Adds new points to the line. It doesn't rerenders the line if in lazy mode. * @param points points table */ addPoints(e, t) { for (const i of e) this._points.push(i); this._lazy || this.setPoints(this._points, t); } /** * Dispose the line and it's resources */ dispose() { super.dispose(); } /** * * @returns true if the mesh was created in lazy mode */ isLazy() { return this._lazy; } /** * Return the points offsets */ get offsets() { return this._offsets; } /** * Sets point offests * @param offsets offset table [x,y,z, x,y,z, ....] */ set offsets(e) { this._offsets = e, this._offsetsBuffer ? this._offsetsBuffer.update(e) : this._createOffsetsBuffer(e); } /** * Gets widths at each line point like [widthLower, widthUpper, widthLower, widthUpper, ...] */ get widths() { return this._widths; } /** * Sets widths at each line point * @param widths width table [widthLower, widthUpper, widthLower, widthUpper ...] */ set widths(e) { this._widths = e, this._lazy || this._widthsBuffer && this._widthsBuffer.update(e); } /** * Gets the color pointer. Each vertex need a color pointer. These color pointers points to the colors in the color table @see colors */ get colorPointers() { return this._colorPointers; } /** * Sets the color pointer * @param colorPointers array of color pointer in the colors array. One pointer for every vertex is needed. */ set colorPointers(e) { this._colorPointers = e, this._lazy || this._colorPointersBuffer && this._colorPointersBuffer.update(e); } /** * Gets the pluginMaterial associated with line */ get greasedLineMaterial() { var e, t; if (this.material && this.material instanceof TW) return this.material; const i = (t = (e = this.material) === null || e === void 0 ? void 0 : e.pluginManager) === null || t === void 0 ? void 0 : t.getPlugin(cx.GREASED_LINE_MATERIAL_NAME); if (i) return i; } /** * Return copy the points. */ get points() { const e = []; return id.DeepCopy(this._points, e), e; } /** * Sets line points and rerenders the line. * @param points points table */ setPoints(e, t) { this._points = e, this._updateWidths(), t != null && t.colorPointers || this._updateColorPointers(), this._setPoints(e, t); } _initGreasedLine() { this._vertexPositions = [], this._indices = [], this._uvs = []; } _createLineOptions() { return { points: this._points, colorPointers: this._colorPointers, lazy: this._lazy, updatable: this._updatable, uvs: this._uvs, widths: this._widths, ribbonOptions: this._options.ribbonOptions }; } /** * Serializes this GreasedLineMesh * @param serializationObject object to write serialization to */ serialize(e) { super.serialize(e), e.type = this.getClassName(), e.lineOptions = this._createLineOptions(); } _createVertexBuffers(e = !1) { const t = new Ot(); return t.positions = this._vertexPositions, t.indices = this._indices, t.uvs = this._uvs, e && (t.normals = [], Ot.ComputeNormals(this._vertexPositions, this._indices, t.normals)), t.applyToMesh(this, this._options.updatable), t; } _createOffsetsBuffer(e) { const t = this._scene.getEngine(), i = new hu(t, e, this._updatable, 3); this.setVerticesBuffer(i.createVertexBuffer("grl_offsets", 0, 3)), this._offsetsBuffer = i; } } ke._GreasedLineMeshParser = (c, e) => Al.Parse(c, e); class Al extends SW { /** * GreasedLineMesh * @param name name of the mesh * @param scene the scene * @param _options mesh options */ constructor(e, t, i) { super(e, t, i), this.name = e, this.intersectionThreshold = 0.1, this._previousAndSide = [], this._nextAndCounters = [], i.points && this.addPoints(Hn.ConvertPoints(i.points)); } /** * "GreasedLineMesh" * @returns "GreasedLineMesh" */ getClassName() { return "GreasedLineMesh"; } _updateColorPointers() { if (this._options.colorPointers) return; let e = 0; this._colorPointers = [], this._points.forEach((t) => { for (let i = 0; i < t.length; i += 3) this._colorPointers.push(e), this._colorPointers.push(e++); }); } _updateWidths() { super._updateWidthsWithValue(0); } _setPoints(e) { this._points = e, this._options.points = e, this._initGreasedLine(); let t = 0; e.forEach((i) => { var r; const s = [], n = [], a = [], l = Hn.GetLineLength(i); for (let f = 0, p = 0; p < i.length; f++, p += 3) { const m = i.slice(0, p + 3), v = Hn.GetLineLength(m) / l; if (n.push(i[p], i[p + 1], i[p + 2]), n.push(i[p], i[p + 1], i[p + 2]), s.push(v), s.push(v), p < i.length - 3) { const C = f * 2 + t; a.push(C, C + 1, C + 2), a.push(C + 2, C + 1, C + 3); } } t += i.length / 3 * 2; const o = [], u = [], h = []; let d = []; this._preprocess(n, o, u, h, d); for (const f of n) this._vertexPositions.push(f); for (const f of a) this._indices.push(f); for (let f = 0; f < h.length; f++) this._previousAndSide.push(o[f * 3], o[f * 3 + 1], o[f * 3 + 2], h[f]), this._nextAndCounters.push(u[f * 3], u[f * 3 + 1], u[f * 3 + 2], s[f]); d = (r = this._options.uvs) !== null && r !== void 0 ? r : d; for (const f of d) this._uvs.push(f); }), this._lazy || (this._options.colorPointers || this._updateColorPointers(), this._createVertexBuffers(), this.refreshBoundingInfo()); } /** * Clones the GreasedLineMesh. * @param name new line name * @param newParent new parent node * @returns cloned line */ clone(e = `${this.name}-cloned`, t) { const i = this._createLineOptions(), r = {}; id.DeepCopy(i, r, ["instance"], void 0, !0); const s = new Al(e, this._scene, r); return t && (s.parent = t), s.material = this.material, s; } /** * Serializes this GreasedLineMesh * @param serializationObject object to write serialization to */ serialize(e) { super.serialize(e), e.type = this.getClassName(), e.lineOptions = this._createLineOptions(); } /** * Parses a serialized GreasedLineMesh * @param parsedMesh the serialized GreasedLineMesh * @param scene the scene to create the GreasedLineMesh in * @returns the created GreasedLineMesh */ static Parse(e, t) { const i = e.lineOptions, r = e.name; return new Al(r, t, i); } _initGreasedLine() { super._initGreasedLine(), this._previousAndSide = [], this._nextAndCounters = []; } /** * Checks whether a ray is intersecting this GreasedLineMesh * @param ray ray to check the intersection of this mesh with * @param fastCheck not supported * @param trianglePredicate not supported * @param onlyBoundingInfo defines a boolean indicating if picking should only happen using bounding info (false by default) * @param worldToUse not supported * @param skipBoundingInfo a boolean indicating if we should skip the bounding info check * @returns the picking info */ intersects(e, t, i, r = !1, s, n = !1) { const a = new ku(), l = this.findAllIntersections(e, t, i, r, s, n, !0); if ((l == null ? void 0 : l.length) === 1) { const o = l[0]; a.hit = !0, a.distance = o.distance, a.ray = e, a.pickedMesh = this, a.pickedPoint = o.point; } return a; } /** * Gets all intersections of a ray and the line * @param ray Ray to check the intersection of this mesh with * @param _fastCheck not supported * @param _trianglePredicate not supported * @param onlyBoundingInfo defines a boolean indicating if picking should only happen using bounding info (false by default) * @param _worldToUse not supported * @param skipBoundingInfo a boolean indicating if we should skip the bounding info check * @param firstOnly If true, the first and only intersection is immediatelly returned if found * @returns intersection(s) */ findAllIntersections(e, t, i, r = !1, s, n = !1, a = !1) { var l, o; if (r && !n && e.intersectsSphere(this._boundingSphere, this.intersectionThreshold) === !1) return; const u = this.getIndices(), h = this.getVerticesData(Y.PositionKind), d = this._widths, f = (o = (l = this.greasedLineMaterial) === null || l === void 0 ? void 0 : l.width) !== null && o !== void 0 ? o : 1, p = []; if (u && h && d) { let m = 0, _ = 0; for (m = 0, _ = u.length - 1; m < _; m += 3) { const v = u[m], C = u[m + 1]; Al._V_START.fromArray(h, v * 3), Al._V_END.fromArray(h, C * 3), this._offsets && (Al._V_OFFSET_START.fromArray(this._offsets, v * 3), Al._V_OFFSET_END.fromArray(this._offsets, C * 3), Al._V_START.addInPlace(Al._V_OFFSET_START), Al._V_END.addInPlace(Al._V_OFFSET_END)); const x = Math.floor(m / 3), b = d[x] !== void 0 ? d[x] : 1, S = this.intersectionThreshold * (f * b) / 2, M = e.intersectionSegment(Al._V_START, Al._V_END, S); if (M !== -1 && (p.push({ distance: M, point: e.direction.normalize().multiplyByFloats(M, M, M).add(e.origin) }), a)) return p; } m = _; } return p; } get _boundingSphere() { return this.getBoundingInfo().boundingSphere; } static _CompareV3(e, t, i) { const r = e * 6, s = t * 6; return i[r] === i[s] && i[r + 1] === i[s + 1] && i[r + 2] === i[s + 2]; } static _CopyV3(e, t) { const i = e * 6; return [t[i], t[i + 1], t[i + 2]]; } _preprocess(e, t, i, r, s) { const n = e.length / 6; let a = []; Al._CompareV3(0, n - 1, e) ? a = Al._CopyV3(n - 2, e) : a = Al._CopyV3(0, e), t.push(a[0], a[1], a[2]), t.push(a[0], a[1], a[2]); for (let l = 0; l < n; l++) r.push(1), r.push(-1), this._options.uvs || (s.push(l / (n - 1), 0), s.push(l / (n - 1), 1)), l < n - 1 && (a = Al._CopyV3(l, e), t.push(a[0], a[1], a[2]), t.push(a[0], a[1], a[2])), l > 0 && (a = Al._CopyV3(l, e), i.push(a[0], a[1], a[2]), i.push(a[0], a[1], a[2])); return Al._CompareV3(n - 1, 0, e) ? a = Al._CopyV3(1, e) : a = Al._CopyV3(n - 1, e), i.push(a[0], a[1], a[2]), i.push(a[0], a[1], a[2]), { previous: t, next: i, uvs: s, side: r }; } _createVertexBuffers() { const e = super._createVertexBuffers(), t = this._scene.getEngine(), i = new hu(t, this._previousAndSide, !1, 4); this.setVerticesBuffer(i.createVertexBuffer("grl_previousAndSide", 0, 4)); const r = new hu(t, this._nextAndCounters, !1, 4); this.setVerticesBuffer(r.createVertexBuffer("grl_nextAndCounters", 0, 4)); const s = new hu(t, this._widths, this._updatable, 1); this.setVerticesBuffer(s.createVertexBuffer("grl_widths", 0, 1)), this._widthsBuffer = s; const n = new hu(t, this._colorPointers, this._updatable, 1); return this.setVerticesBuffer(n.createVertexBuffer("grl_colorPointers", 0, 1)), this._colorPointersBuffer = n, e; } } Al._V_START = new D(); Al._V_END = new D(); Al._V_OFFSET_START = new D(); Al._V_OFFSET_END = new D(); ke._GreasedLineRibbonMeshParser = (c, e) => jl.Parse(c, e); class jl extends SW { /** * GreasedLineRibbonMesh * @param name name of the mesh * @param scene the scene * @param _options mesh options * @param _pathOptions used internaly when parsing a serialized GreasedLineRibbonMesh */ constructor(e, t, i, r) { var s; if (super(e, t, i), this.name = e, !i.ribbonOptions) throw "'GreasedLineMeshOptions.ribbonOptions' is not set."; this._paths = [], this._counters = [], this._slopes = [], this._widths = (s = i.widths) !== null && s !== void 0 ? s : [], this._ribbonWidths = [], this._pathsOptions = r ?? [], i.points && this.addPoints(Hn.ConvertPoints(i.points), i, !!r); } /** * Adds new points to the line. It doesn't rerenders the line if in lazy mode. * @param points points table */ addPoints(e, t, i = !1) { if (!t.ribbonOptions) throw "addPoints() on GreasedLineRibbonMesh instance requires 'GreasedLineMeshOptions.ribbonOptions'."; i || this._pathsOptions.push({ options: t, pathCount: e.length }), super.addPoints(e, t); } /** * "GreasedLineRibbonMesh" * @returns "GreasedLineRibbonMesh" */ getClassName() { return "GreasedLineRibbonMesh"; } /** * Return true if the line was created from two edge paths or one points path. * In this case the line is always flat. */ get isFlatLine() { return this._paths.length < 3; } /** * Returns the slopes of the line at each point relative to the center of the line */ get slopes() { return this._slopes; } /** * Set the slopes of the line at each point relative to the center of the line */ set slopes(e) { this._slopes = e; } _updateColorPointers() { if (this._options.colorPointers) return; let e = 0; this._colorPointers = []; for (let t = 0; t < this._pathsOptions.length; t++) { const { options: i, pathCount: r } = this._pathsOptions[t], s = this._points[t]; if (i.ribbonOptions.pointsMode === M4.POINTS_MODE_POINTS) for (let n = 0; n < r; n++) for (let a = 0; a < s.length; a += 3) this._colorPointers.push(e), this._colorPointers.push(e++); else for (let n = 0; n < s.length; n += 3) { for (let a = 0; a < r; a++) this._colorPointers.push(e); e++; } } } _updateWidths() { super._updateWidthsWithValue(1); } _setPoints(e, t) { var i, r; if (!this._options.ribbonOptions) throw "No 'GreasedLineMeshOptions.ribbonOptions' provided."; this._points = e, this._options.points = e, this._initGreasedLine(); let s = 0, n; for (let a = 0, l = 0; a < this._pathsOptions.length; a++) { const { options: o, pathCount: u } = this._pathsOptions[a], h = e.slice(l, l + u); if (l += u, ((i = o.ribbonOptions) === null || i === void 0 ? void 0 : i.pointsMode) === M4.POINTS_MODE_PATHS) s = this._preprocess(Hn.ToVector3Array(h), s, o); else { if (((r = o.ribbonOptions) === null || r === void 0 ? void 0 : r.directionsAutoMode) === f5.AUTO_DIRECTIONS_NONE) { if (!o.ribbonOptions.directions) throw "In GreasedLineRibbonAutoDirectionMode.AUTO_DIRECTIONS_NONE 'GreasedLineMeshOptions.ribbonOptions.directions' must be defined."; n = jl._GetDirectionPlanesFromDirectionsOption(h.length, o.ribbonOptions.directions); } h.forEach((d, f) => { const p = jl._ConvertToRibbonPath(d, o.ribbonOptions, this._scene.useRightHandedSystem, n && n[f]); s = this._preprocess(p, s, o); }); } } this._lazy || (this._createVertexBuffers(), this.refreshBoundingInfo()); } static _GetDirectionPlanesFromDirectionsOption(e, t) { return Array.isArray(t) ? t : new Array(e).fill(t); } static _CreateRibbonVertexData(e, t) { var i, r, s; const n = e.length; if (n < 2) throw "Minimum of two paths are required to create a GreasedLineRibbonMesh."; const a = [], l = [], o = e[0]; for (let f = 0; f < o.length; f++) for (let p = 0; p < e.length; p++) { const m = e[p][f]; a.push(m.x, m.y, m.z); } const u = [1, 0, n], h = (r = ((i = t.ribbonOptions) === null || i === void 0 ? void 0 : i.facesMode) === QO.FACES_MODE_DOUBLE_SIDED) !== null && r !== void 0 ? r : !1, d = ((s = t.ribbonOptions) === null || s === void 0 ? void 0 : s.pointsMode) === M4.POINTS_MODE_PATHS && t.ribbonOptions.closePath; if (n > 2) for (let f = 0; f < o.length - 1; f++) { u[0] = 1 + n * f, u[1] = n * f, u[2] = (f + 1) * n; for (let p = 0; p < (n - 1) * 2; p++) p % 2 !== 0 && (u[2] += 1), p % 2 === 0 && p > 0 && (u[0] += 1, u[1] += 1), l.push(u[1] + (p % 2 !== 0 ? n : 0), u[0], u[2]), h && l.push(u[0], u[1] + (p % 2 !== 0 ? n : 0), u[2]); } else for (let f = 0; f < a.length / 3 - 3; f += 2) l.push(f, f + 1, f + 2), l.push(f + 2, f + 1, f + 3), h && (l.push(f + 1, f, f + 2), l.push(f + 1, f + 2, f + 3)); if (d) { let f = n * (o.length - 1); for (let p = 0; p < n - 1; p++) l.push(f, p + 1, p), l.push(f + 1, p + 1, f), h && (l.push(p, p + 1, f), l.push(f, p + 1, f + 1)), f++; } return { positions: a, indices: l }; } _preprocess(e, t, i) { var r, s, n, a; this._paths = e; const l = jl._CreateRibbonVertexData(e, i), o = l.positions; if (!this._options.widths) throw "No 'GreasedLineMeshOptions.widths' table is specified."; for (const p of o) this._vertexPositions.push(p); let u = e; if (((r = i.ribbonOptions) === null || r === void 0 ? void 0 : r.pointsMode) === M4.POINTS_MODE_PATHS && i.ribbonOptions.closePath) { u = []; for (let p = 0; p < e.length; p++) { const m = e[p].slice(); m.push(e[p][0].clone()), u.push(m); } } this._calculateSegmentLengths(u); const h = u.length, d = new Array(h).fill(0); for (let p = 0; p < u[0].length; p++) { let m = 0; for (let _ = 0; _ < h; _++) { const v = d[_] + this._vSegmentLengths[_][p] / this._vTotalLengths[_]; this._counters.push(v), this._uvs.push(v, m), d[_] = v, m += this._uSegmentLengths[p][_] / this._uTotalLengths[p]; } } for (let p = 0, m = 0; p < u[0].length; p++) { const _ = this._uSegmentLengths[p][0] / 2, v = this._uSegmentLengths[p][h - 1] / 2; this._ribbonWidths.push((((s = this._widths[m++]) !== null && s !== void 0 ? s : 1) - 1) * _); for (let C = 0; C < h - 2; C++) this._ribbonWidths.push(0); this._ribbonWidths.push((((n = this._widths[m++]) !== null && n !== void 0 ? n : 1) - 1) * v); } const f = ((a = i.ribbonOptions) === null || a === void 0 ? void 0 : a.pointsMode) === M4.POINTS_MODE_PATHS ? new Array(u[0].length * u.length * 6).fill(0) : jl._CalculateSlopes(u); for (const p of f) this._slopes.push(p); if (l.indices) for (let p = 0; p < l.indices.length; p++) this._indices.push(l.indices[p] + t); return t += o.length / 3, t; } static _ConvertToRibbonPath(e, t, i, r) { if (t.pointsMode === M4.POINTS_MODE_POINTS && !t.width) throw "'GreasedLineMeshOptions.ribbonOptiosn.width' must be specified in GreasedLineRibbonPointsMode.POINTS_MODE_POINTS."; const s = [], n = []; if (t.pointsMode === M4.POINTS_MODE_POINTS) { const a = t.width / 2, l = Hn.ToVector3Array(e); let o = null, u = null; t.directionsAutoMode === f5.AUTO_DIRECTIONS_FROM_FIRST_SEGMENT && (r = jl._GetDirectionFromPoints(l[0], l[1], null)); for (let h = 0; h < l.length - (r ? 0 : 1); h++) { const d = l[h], f = l[h + 1]; if (r) o = r; else if (t.directionsAutoMode === f5.AUTO_DIRECTIONS_FROM_ALL_SEGMENTS) o = jl._GetDirectionFromPoints(d, f, o); else { const p = f.subtract(d); p.applyRotationQuaternionInPlace(p.x > p.y && p.x > p.z ? i ? jl._RightHandedForwardReadOnlyQuaternion : jl._LeftHandedForwardReadOnlyQuaternion : jl._LeftReadOnlyQuaternion), o = p.normalize(); } u = o.multiplyByFloats(a, a, a), s.push(d.add(u)), n.push(d.subtract(u)); } r || (s.push(l[l.length - 1].add(u)), n.push(l[l.length - 1].subtract(u))); } return [s, n]; } static _GetDirectionFromPoints(e, t, i) { return e.x === t.x && (!i || (i == null ? void 0 : i.x) === 1) ? jl.DIRECTION_YZ : e.y === t.y ? jl.DIRECTION_XZ : e.z === t.z ? jl.DIRECTION_XY : jl.DIRECTION_XZ; } /** * Clones the GreasedLineRibbonMesh. * @param name new line name * @param newParent new parent node * @returns cloned line */ clone(e = `${this.name}-cloned`, t) { const i = this._createLineOptions(), r = {}, s = []; id.DeepCopy(this._pathsOptions, s, void 0, void 0, !0), id.DeepCopy(i, r, ["instance"], void 0, !0); const n = new jl(e, this._scene, r, s); return t && (n.parent = t), n.material = this.material, n; } /** * Serializes this GreasedLineRibbonMesh * @param serializationObject object to write serialization to */ serialize(e) { super.serialize(e), e.type = this.getClassName(), e.lineOptions = this._createLineOptions(), e.pathsOptions = this._pathsOptions; } /** * Parses a serialized GreasedLineRibbonMesh * @param parsedMesh the serialized GreasedLineRibbonMesh * @param scene the scene to create the GreasedLineRibbonMesh in * @returns the created GreasedLineRibbonMesh */ static Parse(e, t) { const i = e.lineOptions, r = e.name, s = e.pathOptions; return new jl(r, t, i, s); } _initGreasedLine() { super._initGreasedLine(), this._paths = [], this._counters = [], this._slopes = [], this._ribbonWidths = []; } _calculateSegmentLengths(e) { const t = e.length; this._vSegmentLengths = new Array(t), this._vTotalLengths = new Array(t); let i = 0; for (let n = 0; n < t; n++) { const a = e[n]; this._vSegmentLengths[n] = [0], i = 0; for (let l = 0; l < a.length - 1; l++) { const o = Math.abs(a[l].subtract(a[l + 1]).lengthSquared()); i += o, this._vSegmentLengths[n].push(o); } this._vTotalLengths[n] = i; } const r = e[0].length; this._uSegmentLengths = new Array(r).fill([]), this._uTotalLengths = new Array(r).fill([]); const s = new D(); for (let n = 0; n < r; n++) { i = 0; for (let a = 1; a < t; a++) { e[a][n].subtractToRef(e[a - 1][n], s); const l = s.length(); i += l, this._uSegmentLengths[n].push(l); } this._uTotalLengths[n] = i; } } static _CalculateSlopes(e) { const t = e[0], i = e.length === 2 ? e[1] : e[e.length - 1], r = [], s = new D(); for (let n = 0; n < t.length; n++) for (let a = 0; a < e.length; a++) a === 0 || a === e.length - 1 ? (t[n].subtract(i[n]).normalizeToRef(s), r.push(s.x, s.y, s.z), r.push(-s.x, -s.y, -s.z)) : r.push(0, 0, 0, 0, 0, 0); return r; } _createVertexBuffers() { var e, t; this._uvs = (e = this._options.uvs) !== null && e !== void 0 ? e : this._uvs; const i = super._createVertexBuffers((t = this._options.ribbonOptions) === null || t === void 0 ? void 0 : t.smoothShading), r = new hu(this._engine, this._counters, this._updatable, 1); this.setVerticesBuffer(r.createVertexBuffer("grl_counters", 0, 1)); const s = new hu(this._engine, this._colorPointers, this._updatable, 1); this.setVerticesBuffer(s.createVertexBuffer("grl_colorPointers", 0, 1)); const n = new hu(this._engine, this._slopes, this._updatable, 3); this.setVerticesBuffer(n.createVertexBuffer("grl_slopes", 0, 3)); const a = new hu(this._engine, this._ribbonWidths, this._updatable, 1); return this.setVerticesBuffer(a.createVertexBuffer("grl_widths", 0, 1)), this._widthsBuffer = a, i; } } jl.DEFAULT_WIDTH = 0.1; jl._RightHandedForwardReadOnlyQuaternion = Ze.RotationAxis(D.RightHandedForwardReadOnly, Math.PI / 2); jl._LeftHandedForwardReadOnlyQuaternion = Ze.RotationAxis(D.LeftHandedForwardReadOnly, Math.PI / 2); jl._LeftReadOnlyQuaternion = Ze.RotationAxis(D.LeftReadOnly, Math.PI / 2); jl.DIRECTION_XY = D.LeftHandedForwardReadOnly; jl.DIRECTION_XZ = D.UpReadOnly; jl.DIRECTION_YZ = D.LeftReadOnly; var VC; (function(c) { c[c.COLOR_DISTRIBUTION_NONE = 0] = "COLOR_DISTRIBUTION_NONE", c[c.COLOR_DISTRIBUTION_REPEAT = 1] = "COLOR_DISTRIBUTION_REPEAT", c[c.COLOR_DISTRIBUTION_EVEN = 2] = "COLOR_DISTRIBUTION_EVEN", c[c.COLOR_DISTRIBUTION_START = 3] = "COLOR_DISTRIBUTION_START", c[c.COLOR_DISTRIBUTION_END = 4] = "COLOR_DISTRIBUTION_END", c[c.COLOR_DISTRIBUTION_START_END = 5] = "COLOR_DISTRIBUTION_START_END"; })(VC || (VC = {})); var iT; (function(c) { c[c.WIDTH_DISTRIBUTION_NONE = 0] = "WIDTH_DISTRIBUTION_NONE", c[c.WIDTH_DISTRIBUTION_REPEAT = 1] = "WIDTH_DISTRIBUTION_REPEAT", c[c.WIDTH_DISTRIBUTION_EVEN = 2] = "WIDTH_DISTRIBUTION_EVEN", c[c.WIDTH_DISTRIBUTION_START = 3] = "WIDTH_DISTRIBUTION_START", c[c.WIDTH_DISTRIBUTION_END = 4] = "WIDTH_DISTRIBUTION_END", c[c.WIDTH_DISTRIBUTION_START_END = 5] = "WIDTH_DISTRIBUTION_START_END"; })(iT || (iT = {})); function kse(c, e, t) { t = t ?? gi.LastCreatedScene; let i; switch (e.materialType) { case XO.MATERIAL_TYPE_PBR: i = new Ri(c, t), new cx(i, t, e); break; case XO.MATERIAL_TYPE_SIMPLE: i = new TW(c, t, e); break; default: i = new Dt(c, t), new cx(i, t, e); break; } return i; } function k0e(c, e, t, i) { var r, s, n, a, l, o, u, h, d, f, p, m; i = i ?? gi.LastCreatedScene; let _; const v = Hn.ConvertPoints(e.points); e.widthDistribution = (r = e.widthDistribution) !== null && r !== void 0 ? r : iT.WIDTH_DISTRIBUTION_START, e.ribbonOptions && (e.ribbonOptions.facesMode = (s = e.ribbonOptions.facesMode) !== null && s !== void 0 ? s : QO.FACES_MODE_SINGLE_SIDED_NO_BACKFACE_CULLING, e.ribbonOptions.pointsMode = (n = e.ribbonOptions.pointsMode) !== null && n !== void 0 ? n : M4.POINTS_MODE_POINTS, e.ribbonOptions.directionsAutoMode = (a = e.ribbonOptions.directionsAutoMode) !== null && a !== void 0 ? a : e.ribbonOptions.directions ? f5.AUTO_DIRECTIONS_NONE : f5.AUTO_DIRECTIONS_FROM_FIRST_SEGMENT), t = t ?? { color: bh.DEFAULT_COLOR }, t.createAndAssignMaterial = (l = t.createAndAssignMaterial) !== null && l !== void 0 ? l : !0, t.colorDistribution = (o = t == null ? void 0 : t.colorDistribution) !== null && o !== void 0 ? o : VC.COLOR_DISTRIBUTION_START, t.materialType = (u = t.materialType) !== null && u !== void 0 ? u : XO.MATERIAL_TYPE_STANDARD; let C = 0; Array.isArray(v[0]) && v.forEach((M) => { C += M.length / 3; }); const x = zse(C, (h = e.widths) !== null && h !== void 0 ? h : [], e.widthDistribution), b = t != null && t.colors ? Hse(C, t.colors, t.colorDistribution, (d = t.color) !== null && d !== void 0 ? d : bh.DEFAULT_COLOR) : void 0, S = { points: v, updatable: e.updatable, widths: x, lazy: e.lazy, ribbonOptions: e.ribbonOptions, uvs: e.uvs, colorPointers: e.colorPointers }; if (S.ribbonOptions && S.ribbonOptions.pointsMode === M4.POINTS_MODE_POINTS && (S.ribbonOptions.width = (p = (f = t.width) !== null && f !== void 0 ? f : S.ribbonOptions.width) !== null && p !== void 0 ? p : bh.DEFAULT_WIDTH), e.instance) if (_ = e.instance, _ instanceof jl) _.addPoints(v, S); else { const M = _.widths; if (M) { const R = M.slice(); for (const w of x) R.push(w); _.widths = R; } else _.widths = x; _.addPoints(v); } else if (_ = S.ribbonOptions ? new jl(c, i, S) : new Al(c, i, S), t) { const M = { materialType: t.materialType, dashCount: t.dashCount, dashOffset: t.dashOffset, dashRatio: t.dashRatio, resolution: t.resolution, sizeAttenuation: t.sizeAttenuation, useColors: t.useColors, useDash: t.useDash, visibility: t.visibility, width: t.width, color: t.color, colorMode: t.colorMode, colorsSampling: t.colorsSampling, colorDistributionType: t.colorDistributionType, colors: b, cameraFacing: !e.ribbonOptions, colorsTexture: t.colorsTexture }; if (t.createAndAssignMaterial) { const R = kse(c, M, i); _.material = R, ((m = e.ribbonOptions) === null || m === void 0 ? void 0 : m.facesMode) === QO.FACES_MODE_SINGLE_SIDED_NO_BACKFACE_CULLING && (R.backFaceCulling = !1); } } if (b && e.instance && e.instance.greasedLineMaterial) { const M = e.instance.greasedLineMaterial.colors; if (M) { const R = M.concat(b); e.instance.greasedLineMaterial.setColors(R, _.isLazy()); } } return _; } function zse(c, e, t, i = 1, r = 1) { const s = c - e.length / 2, n = []; if (s < 0) return e.slice(0, c * 2); if (s > 0) { if (e.length % 2 != 0 && e.push(i), t === iT.WIDTH_DISTRIBUTION_START_END) { const a = Math.floor(e.length / 2); for (let u = 0, h = 0; u < a - 1; u++) n.push(e[h++]), n.push(e[h++]); const l = e[a / 2], o = e[a / 2 + 1]; for (let u = 0; u < s; u++) n.push(o), n.push(l); for (let u = a; u < e.length; u += 2) n.push(e[u]), n.push(e[u + 1]); } else if (t === iT.WIDTH_DISTRIBUTION_START) { for (let a = 0; a < e.length; a += 2) n.push(e[a]), n.push(e[a + 1]); for (let a = 0; a < s; a++) n.push(i), n.push(r); } else if (t === iT.WIDTH_DISTRIBUTION_END) { for (let a = 0; a < s; a++) n.push(i), n.push(r); for (let a = 0; a < e.length; a += 2) n.push(e[a]), n.push(e[a + 1]); } else if (t === iT.WIDTH_DISTRIBUTION_REPEAT) { let a = 0; for (let l = 0; l < c; l++) n.push(e[a++]), n.push(e[a++]), a === e.length && (a = 0); } else if (t === iT.WIDTH_DISTRIBUTION_EVEN) { let a = 0; const l = e.length / ((c - 1) * 2); for (let o = 0; o < c; o++) { const u = Math.floor(a); n.push(e[u]), n.push(e[u + 1]), a += l; } } } else for (let a = 0; a < e.length; a++) n.push(e[a]); return n; } function Hse(c, e, t, i) { c = Math.max(e.length, c); const r = c - e.length; if (r < 0) return e.slice(0, c); const s = []; if (r > 0) { if (t === VC.COLOR_DISTRIBUTION_START_END) { const n = Math.floor(e.length / 2); for (let a = 0; a < n; a++) s.push(e[a]); for (let a = 0; a < r - 1; a++) s.push(i); for (let a = n; a < e.length; a++) s.push(e[a]); } else if (t === VC.COLOR_DISTRIBUTION_START) { for (let n = 0; n < e.length; n++) s.push(e[n]); for (let n = 0; n < r; n++) s.push(i); } else if (t === VC.COLOR_DISTRIBUTION_END) { for (let n = 0; n < r - 1; n++) s.push(i); for (let n = 0; n < e.length; n++) s.push(e[n]); } else if (t === VC.COLOR_DISTRIBUTION_REPEAT) { let n = 0; for (let a = 0; a < c; a++) s.push(e[n]), n++, n === e.length && (n = 0); } else if (t === VC.COLOR_DISTRIBUTION_EVEN) { let n = 0; const a = e.length / (c - 1); for (let l = 0; l < c - 1; l++) { const o = Math.floor(n); s.push(e[o]), n += a; } } else if (t === VC.COLOR_DISTRIBUTION_NONE) for (let n = 0; n < e.length; n++) s.push(e[n]); } else for (let n = 0; n < c; n++) s.push(e[n]); return s; } ke.prototype.thinInstanceAdd = function(c, e = !0) { if (!this.getScene().getEngine().getCaps().instancedArrays) return Ce.Error("Thin Instances are not supported on this device as Instanced Array extension not supported"), -1; this._thinInstanceUpdateBufferSize("matrix", Array.isArray(c) ? c.length : 1); const t = this._thinInstanceDataStorage.instancesCount; if (Array.isArray(c)) for (let i = 0; i < c.length; ++i) this.thinInstanceSetMatrixAt(this._thinInstanceDataStorage.instancesCount++, c[i], i === c.length - 1 && e); else this.thinInstanceSetMatrixAt(this._thinInstanceDataStorage.instancesCount++, c, e); return t; }; ke.prototype.thinInstanceAddSelf = function(c = !0) { return this.thinInstanceAdd(Ae.IdentityReadOnly, c); }; ke.prototype.thinInstanceRegisterAttribute = function(c, e) { c === Y.ColorKind && (c = Y.ColorInstanceKind), this.removeVerticesData(c), this._thinInstanceInitializeUserStorage(), this._userThinInstanceBuffersStorage.strides[c] = e, this._userThinInstanceBuffersStorage.sizes[c] = e * Math.max(32, this._thinInstanceDataStorage.instancesCount), this._userThinInstanceBuffersStorage.data[c] = new Float32Array(this._userThinInstanceBuffersStorage.sizes[c]), this._userThinInstanceBuffersStorage.vertexBuffers[c] = new Y(this.getEngine(), this._userThinInstanceBuffersStorage.data[c], c, !0, !1, e, !0), this.setVerticesBuffer(this._userThinInstanceBuffersStorage.vertexBuffers[c]); }; ke.prototype.thinInstanceSetMatrixAt = function(c, e, t = !0) { if (!this._thinInstanceDataStorage.matrixData || c >= this._thinInstanceDataStorage.instancesCount) return !1; const i = this._thinInstanceDataStorage.matrixData; return e.copyToArray(i, c * 16), this._thinInstanceDataStorage.worldMatrices && (this._thinInstanceDataStorage.worldMatrices[c] = e), t && (this.thinInstanceBufferUpdated("matrix"), this.doNotSyncBoundingInfo || this.thinInstanceRefreshBoundingInfo(!1)), !0; }; ke.prototype.thinInstanceSetAttributeAt = function(c, e, t, i = !0) { return c === Y.ColorKind && (c = Y.ColorInstanceKind), !this._userThinInstanceBuffersStorage || !this._userThinInstanceBuffersStorage.data[c] || e >= this._thinInstanceDataStorage.instancesCount ? !1 : (this._thinInstanceUpdateBufferSize(c, 0), this._userThinInstanceBuffersStorage.data[c].set(t, e * this._userThinInstanceBuffersStorage.strides[c]), i && this.thinInstanceBufferUpdated(c), !0); }; Object.defineProperty(ke.prototype, "thinInstanceCount", { get: function() { return this._thinInstanceDataStorage.instancesCount; }, set: function(c) { var e, t; const i = (e = this._thinInstanceDataStorage.matrixData) !== null && e !== void 0 ? e : (t = this.source) === null || t === void 0 ? void 0 : t._thinInstanceDataStorage.matrixData, r = i ? i.length / 16 : 0; c <= r && (this._thinInstanceDataStorage.instancesCount = c); }, enumerable: !0, configurable: !0 }); ke.prototype._thinInstanceCreateMatrixBuffer = function(c, e, t = !1) { c === Y.ColorKind && (c = Y.ColorInstanceKind); const i = new hu(this.getEngine(), e, !t, 16, !1, !0); for (let r = 0; r < 4; r++) this.setVerticesBuffer(i.createVertexBuffer(c + r, r * 4, 4)); return i; }; ke.prototype.thinInstanceSetBuffer = function(c, e, t = 0, i = !1) { var r, s, n; t = t || 16, c === "matrix" ? ((r = this._thinInstanceDataStorage.matrixBuffer) === null || r === void 0 || r.dispose(), this._thinInstanceDataStorage.matrixBuffer = null, this._thinInstanceDataStorage.matrixBufferSize = e ? e.length : 32 * t, this._thinInstanceDataStorage.matrixData = e, this._thinInstanceDataStorage.worldMatrices = null, e !== null ? (this._thinInstanceDataStorage.instancesCount = e.length / t, this._thinInstanceDataStorage.matrixBuffer = this._thinInstanceCreateMatrixBuffer("world", e, i), this.doNotSyncBoundingInfo || this.thinInstanceRefreshBoundingInfo(!1)) : (this._thinInstanceDataStorage.instancesCount = 0, this.doNotSyncBoundingInfo || this.refreshBoundingInfo())) : c === "previousMatrix" ? ((s = this._thinInstanceDataStorage.previousMatrixBuffer) === null || s === void 0 || s.dispose(), this._thinInstanceDataStorage.previousMatrixBuffer = null, this._thinInstanceDataStorage.previousMatrixData = e, e !== null && (this._thinInstanceDataStorage.previousMatrixBuffer = this._thinInstanceCreateMatrixBuffer("previousWorld", e, i))) : (c === Y.ColorKind && (c = Y.ColorInstanceKind), e === null ? !((n = this._userThinInstanceBuffersStorage) === null || n === void 0) && n.data[c] && (this.removeVerticesData(c), delete this._userThinInstanceBuffersStorage.data[c], delete this._userThinInstanceBuffersStorage.strides[c], delete this._userThinInstanceBuffersStorage.sizes[c], delete this._userThinInstanceBuffersStorage.vertexBuffers[c]) : (this._thinInstanceInitializeUserStorage(), this._userThinInstanceBuffersStorage.data[c] = e, this._userThinInstanceBuffersStorage.strides[c] = t, this._userThinInstanceBuffersStorage.sizes[c] = e.length, this._userThinInstanceBuffersStorage.vertexBuffers[c] = new Y(this.getEngine(), e, c, !i, !1, t, !0), this.setVerticesBuffer(this._userThinInstanceBuffersStorage.vertexBuffers[c]))); }; ke.prototype.thinInstanceBufferUpdated = function(c) { var e, t, i; c === "matrix" ? (e = this._thinInstanceDataStorage.matrixBuffer) === null || e === void 0 || e.updateDirectly(this._thinInstanceDataStorage.matrixData, 0, this._thinInstanceDataStorage.instancesCount) : c === "previousMatrix" ? (t = this._thinInstanceDataStorage.previousMatrixBuffer) === null || t === void 0 || t.updateDirectly(this._thinInstanceDataStorage.previousMatrixData, 0, this._thinInstanceDataStorage.instancesCount) : (c === Y.ColorKind && (c = Y.ColorInstanceKind), !((i = this._userThinInstanceBuffersStorage) === null || i === void 0) && i.vertexBuffers[c] && this._userThinInstanceBuffersStorage.vertexBuffers[c].updateDirectly(this._userThinInstanceBuffersStorage.data[c], 0)); }; ke.prototype.thinInstancePartialBufferUpdate = function(c, e, t) { var i; c === "matrix" ? this._thinInstanceDataStorage.matrixBuffer && this._thinInstanceDataStorage.matrixBuffer.updateDirectly(e, t) : (c === Y.ColorKind && (c = Y.ColorInstanceKind), !((i = this._userThinInstanceBuffersStorage) === null || i === void 0) && i.vertexBuffers[c] && this._userThinInstanceBuffersStorage.vertexBuffers[c].updateDirectly(e, t)); }; ke.prototype.thinInstanceGetWorldMatrices = function() { if (!this._thinInstanceDataStorage.matrixData || !this._thinInstanceDataStorage.matrixBuffer) return []; const c = this._thinInstanceDataStorage.matrixData; if (!this._thinInstanceDataStorage.worldMatrices) { this._thinInstanceDataStorage.worldMatrices = []; for (let e = 0; e < this._thinInstanceDataStorage.instancesCount; ++e) this._thinInstanceDataStorage.worldMatrices[e] = Ae.FromArray(c, e * 16); } return this._thinInstanceDataStorage.worldMatrices; }; ke.prototype.thinInstanceRefreshBoundingInfo = function(c = !1, e = !1, t = !1) { if (!this._thinInstanceDataStorage.matrixData || !this._thinInstanceDataStorage.matrixBuffer) return; const i = this._thinInstanceDataStorage.boundingVectors; if (c || !this.rawBoundingInfo) { i.length = 0, this.refreshBoundingInfo(e, t); const n = this.getBoundingInfo(); this.rawBoundingInfo = new zf(n.minimum, n.maximum); } const r = this.getBoundingInfo(), s = this._thinInstanceDataStorage.matrixData; if (i.length === 0) for (let n = 0; n < r.boundingBox.vectors.length; ++n) i.push(r.boundingBox.vectors[n].clone()); de.Vector3[0].setAll(Number.POSITIVE_INFINITY), de.Vector3[1].setAll(Number.NEGATIVE_INFINITY); for (let n = 0; n < this._thinInstanceDataStorage.instancesCount; ++n) { Ae.FromArrayToRef(s, n * 16, de.Matrix[0]); for (let a = 0; a < i.length; ++a) D.TransformCoordinatesToRef(i[a], de.Matrix[0], de.Vector3[2]), de.Vector3[0].minimizeInPlace(de.Vector3[2]), de.Vector3[1].maximizeInPlace(de.Vector3[2]); } r.reConstruct(de.Vector3[0], de.Vector3[1]), this._updateBoundingInfo(); }; ke.prototype._thinInstanceUpdateBufferSize = function(c, e = 1) { var t, i, r; c === Y.ColorKind && (c = Y.ColorInstanceKind); const s = c === "matrix"; if (!s && (!this._userThinInstanceBuffersStorage || !this._userThinInstanceBuffersStorage.strides[c])) return; const n = s ? 16 : this._userThinInstanceBuffersStorage.strides[c], a = s ? this._thinInstanceDataStorage.matrixBufferSize : this._userThinInstanceBuffersStorage.sizes[c]; let l = s ? this._thinInstanceDataStorage.matrixData : this._userThinInstanceBuffersStorage.data[c]; const o = (this._thinInstanceDataStorage.instancesCount + e) * n; let u = a; for (; u < o; ) u *= 2; if (!l || a != u) { if (!l) l = new Float32Array(u); else { const h = new Float32Array(u); h.set(l, 0), l = h; } s ? ((t = this._thinInstanceDataStorage.matrixBuffer) === null || t === void 0 || t.dispose(), this._thinInstanceDataStorage.matrixBuffer = this._thinInstanceCreateMatrixBuffer("world", l, !1), this._thinInstanceDataStorage.matrixData = l, this._thinInstanceDataStorage.matrixBufferSize = u, this._scene.needsPreviousWorldMatrices && !this._thinInstanceDataStorage.previousMatrixData && ((i = this._thinInstanceDataStorage.previousMatrixBuffer) === null || i === void 0 || i.dispose(), this._thinInstanceDataStorage.previousMatrixBuffer = this._thinInstanceCreateMatrixBuffer("previousWorld", l, !1))) : ((r = this._userThinInstanceBuffersStorage.vertexBuffers[c]) === null || r === void 0 || r.dispose(), this._userThinInstanceBuffersStorage.data[c] = l, this._userThinInstanceBuffersStorage.sizes[c] = u, this._userThinInstanceBuffersStorage.vertexBuffers[c] = new Y(this.getEngine(), l, c, !0, !1, n, !0), this.setVerticesBuffer(this._userThinInstanceBuffersStorage.vertexBuffers[c])); } }; ke.prototype._thinInstanceInitializeUserStorage = function() { this._userThinInstanceBuffersStorage || (this._userThinInstanceBuffersStorage = { data: {}, sizes: {}, vertexBuffers: {}, strides: {} }); }; ke.prototype._disposeThinInstanceSpecificData = function() { var c; !((c = this._thinInstanceDataStorage) === null || c === void 0) && c.matrixBuffer && (this._thinInstanceDataStorage.matrixBuffer.dispose(), this._thinInstanceDataStorage.matrixBuffer = null); }; var We; (function(c) { c[c.Int = 1] = "Int", c[c.Float = 2] = "Float", c[c.Vector2 = 4] = "Vector2", c[c.Vector3 = 8] = "Vector3", c[c.Vector4 = 16] = "Vector4", c[c.Matrix = 32] = "Matrix", c[c.Geometry = 64] = "Geometry", c[c.Texture = 128] = "Texture", c[c.AutoDetect = 1024] = "AutoDetect", c[c.BasedOnInput = 2048] = "BasedOnInput", c[c.Undefined = 4096] = "Undefined", c[c.All = 4095] = "All"; })(We || (We = {})); var JE; (function(c) { c[c.Compatible = 0] = "Compatible", c[c.TypeIncompatible = 1] = "TypeIncompatible", c[c.HierarchyIssue = 2] = "HierarchyIssue"; })(JE || (JE = {})); var $O; (function(c) { c[c.Input = 0] = "Input", c[c.Output = 1] = "Output"; })($O || ($O = {})); class kH { /** Gets the direction of the point */ get direction() { return this._direction; } /** * Gets or sets the connection point type (default is float) */ get type() { if (this._type === We.AutoDetect) { if (this._ownerBlock.isInput) return this._ownerBlock.type; if (this._connectedPoint) return this._connectedPoint.type; if (this._linkedConnectionSource && this._linkedConnectionSource.isConnected) return this._linkedConnectionSource.type; } if (this._type === We.BasedOnInput) { if (this._typeConnectionSource) return !this._typeConnectionSource.isConnected && this._defaultConnectionPointType ? this._defaultConnectionPointType : this._typeConnectionSource.type; if (this._defaultConnectionPointType) return this._defaultConnectionPointType; } return this._type; } set type(e) { this._type = e; } /** * Gets a boolean indicating that the current point is connected to another NodeMaterialBlock */ get isConnected() { return this.connectedPoint !== null || this.hasEndpoints; } /** Get the other side of the connection (if any) */ get connectedPoint() { return this._connectedPoint; } /** Get the block that owns this connection point */ get ownerBlock() { return this._ownerBlock; } /** Get the block connected on the other side of this connection (if any) */ get sourceBlock() { return this._connectedPoint ? this._connectedPoint.ownerBlock : null; } /** Get the block connected on the endpoints of this connection (if any) */ get connectedBlocks() { return this._endpoints.length === 0 ? [] : this._endpoints.map((e) => e.ownerBlock); } /** Gets the list of connected endpoints */ get endpoints() { return this._endpoints; } /** Gets a boolean indicating if that output point is connected to at least one input */ get hasEndpoints() { return this._endpoints && this._endpoints.length > 0; } /** Get the inner type (ie AutoDetect for instance instead of the inferred one) */ get innerType() { return this._linkedConnectionSource && this._linkedConnectionSource.isConnected ? this.type : this._type; } /** @internal */ _resetCounters() { this._callCount = 0, this._executionCount = 0; } /** * Gets the number of times this point was called */ get callCount() { return this._callCount; } /** * Gets the number of times this point was executed */ get executionCount() { return this._executionCount; } /** * Gets the value represented by this connection point * @param state current evaluation state * @returns the connected value or the value if nothing is connected */ getConnectedValue(e) { var t; return this.isConnected ? !((t = this._connectedPoint) === null || t === void 0) && t._storedFunction ? (this._connectedPoint._callCount++, this._connectedPoint._executionCount++, this._connectedPoint._storedFunction(e)) : (this._connectedPoint._callCount++, this._connectedPoint._executionCount = 1, this._connectedPoint._storedValue) : (this._callCount++, this._executionCount = 1, this.value); } /** * Creates a new connection point * @param name defines the connection point name * @param ownerBlock defines the block hosting this connection point * @param direction defines the direction of the connection point */ constructor(e, t, i) { this._connectedPoint = null, this._storedValue = null, this._storedFunction = null, this._acceptedConnectionPointType = null, this._endpoints = new Array(), this._type = We.Geometry, this._linkedConnectionSource = null, this._typeConnectionSource = null, this._defaultConnectionPointType = null, this.acceptedConnectionPointTypes = [], this.excludedConnectionPointTypes = [], this.onConnectionObservable = new Fe(), this.isExposedOnFrame = !1, this.exposedPortPosition = -1, this.defaultValue = null, this.value = null, this.valueMin = null, this.valueMax = null, this._callCount = 0, this._executionCount = 0, this._ownerBlock = t, this.name = e, this._direction = i; } /** * Gets the current class name e.g. "NodeMaterialConnectionPoint" * @returns the class name */ getClassName() { return "NodeGeometryConnectionPoint"; } /** * Gets a boolean indicating if the current point can be connected to another point * @param connectionPoint defines the other connection point * @returns a boolean */ canConnectTo(e) { return this.checkCompatibilityState(e) === JE.Compatible; } /** * Gets a number indicating if the current point can be connected to another point * @param connectionPoint defines the other connection point * @returns a number defining the compatibility state */ checkCompatibilityState(e) { const t = this._ownerBlock, i = e.ownerBlock; if (this.type !== e.type && e.innerType !== We.AutoDetect) return e.acceptedConnectionPointTypes && e.acceptedConnectionPointTypes.indexOf(this.type) !== -1 ? JE.Compatible : JE.TypeIncompatible; if (e.excludedConnectionPointTypes && e.excludedConnectionPointTypes.indexOf(this.type) !== -1) return JE.TypeIncompatible; let r = i, s = t; return this.direction === $O.Input && (r = t, s = i), r.isAnAncestorOf(s) ? JE.HierarchyIssue : JE.Compatible; } /** * Connect this point to another connection point * @param connectionPoint defines the other connection point * @param ignoreConstraints defines if the system will ignore connection type constraints (default is false) * @returns the current connection point */ connectTo(e, t = !1) { if (!t && !this.canConnectTo(e)) throw "Cannot connect these two connectors."; return this._endpoints.push(e), e._connectedPoint = this, this.onConnectionObservable.notifyObservers(e), e.onConnectionObservable.notifyObservers(this), this; } /** * Disconnect this point from one of his endpoint * @param endpoint defines the other connection point * @returns the current connection point */ disconnectFrom(e) { const t = this._endpoints.indexOf(e); return t === -1 ? this : (this._endpoints.splice(t, 1), e._connectedPoint = null, this); } /** * Fill the list of excluded connection point types with all types other than those passed in the parameter * @param mask Types (ORed values of NodeMaterialBlockConnectionPointTypes) that are allowed, and thus will not be pushed to the excluded list */ addExcludedConnectionPointFromAllowedTypes(e) { let t = 1; for (; t < We.All; ) e & t || this.excludedConnectionPointTypes.push(t), t = t << 1; } /** * Serializes this point in a JSON representation * @param isInput defines if the connection point is an input (default is true) * @returns the serialized point object */ serialize(e = !0) { const t = {}; return t.name = this.name, t.displayName = this.displayName, this.value !== void 0 && this.value !== null && (this.value.asArray ? (t.valueType = "BABYLON." + this.value.getClassName(), t.value = this.value.asArray()) : (t.valueType = "number", t.value = this.value)), e && this.connectedPoint && (t.inputName = this.name, t.targetBlockId = this.connectedPoint.ownerBlock.uniqueId, t.targetConnectionName = this.connectedPoint.name), t; } /** * Release resources */ dispose() { this.onConnectionObservable.clear(); } } class Bs { /** * Gets the time spent to build this block (in ms) */ get buildExecutionTime() { return this._buildExecutionTime; } /** * Gets the list of input points */ get inputs() { return this._inputs; } /** Gets the list of output points */ get outputs() { return this._outputs; } /** * Gets or set the name of the block */ get name() { return this._name; } set name(e) { this._name = e; } /** * Gets a boolean indicating if this block is an input */ get isInput() { return this._isInput; } /** * Gets a boolean indicating if this block is a teleport out */ get isTeleportOut() { return this._isTeleportOut; } /** * Gets a boolean indicating if this block is a teleport in */ get isTeleportIn() { return this._isTeleportIn; } /** * Gets a boolean indicating if this block is a debug block */ get isDebug() { return this._isDebug; } /** * Gets a boolean indicating that this block can only be used once per NodeGeometry */ get isUnique() { return this._isUnique; } /** * Gets the current class name e.g. "NodeGeometryBlock" * @returns the class name */ getClassName() { return "NodeGeometryBlock"; } _inputRename(e) { return e; } _outputRename(e) { return e; } /** * Checks if the current block is an ancestor of a given block * @param block defines the potential descendant block to check * @returns true if block is a descendant */ isAnAncestorOf(e) { for (const t of this._outputs) if (t.hasEndpoints) { for (const i of t.endpoints) if (i.ownerBlock === e || i.ownerBlock.isAnAncestorOf(e)) return !0; } return !1; } /** * Checks if the current block is an ancestor of a given type * @param type defines the potential type to check * @returns true if block is a descendant */ isAnAncestorOfType(e) { if (this.getClassName() === e) return !0; for (const t of this._outputs) if (t.hasEndpoints) { for (const i of t.endpoints) if (i.ownerBlock.isAnAncestorOfType(e)) return !0; } return !1; } /** * Get the first descendant using a predicate * @param predicate defines the predicate to check * @returns descendant or null if none found */ getDescendantOfPredicate(e) { if (e(this)) return this; for (const t of this._outputs) if (t.hasEndpoints) for (const i of t.endpoints) { const r = i.ownerBlock.getDescendantOfPredicate(e); if (r) return r; } return null; } /** * Creates a new NodeGeometryBlock * @param name defines the block name */ constructor(e) { this._name = "", this._isInput = !1, this._isTeleportOut = !1, this._isTeleportIn = !1, this._isDebug = !1, this._isUnique = !1, this._buildExecutionTime = 0, this.onBuildObservable = new Fe(), this._inputs = new Array(), this._outputs = new Array(), this._codeVariableName = "", this.visibleOnFrame = !1, this._name = e, this.uniqueId = LL.UniqueId; } /** * Register a new input. Must be called inside a block constructor * @param name defines the connection point name * @param type defines the connection point type * @param isOptional defines a boolean indicating that this input can be omitted * @param value value to return if there is no connection * @param valueMin min value accepted for value * @param valueMax max value accepted for value * @returns the current block */ registerInput(e, t, i = !1, r, s, n) { const a = new kH(e, this, $O.Input); return a.type = t, a.isOptional = i, a.defaultValue = r, a.value = r, a.valueMin = s, a.valueMax = n, this._inputs.push(a), this; } /** * Register a new output. Must be called inside a block constructor * @param name defines the connection point name * @param type defines the connection point type * @param point an already created connection point. If not provided, create a new one * @returns the current block */ registerOutput(e, t, i) { return i = i ?? new kH(e, this, $O.Output), i.type = t, this._outputs.push(i), this; } // eslint-disable-next-line @typescript-eslint/no-unused-vars _buildBlock(e) { } // eslint-disable-next-line @typescript-eslint/no-unused-vars _customBuildStep(e) { } /** * Build the current node and generate the vertex data * @param state defines the current generation state * @returns true if already built */ build(e) { if (this._buildId === e.buildId) return !0; if (this._outputs.length > 0) { if (!this._outputs.some((i) => i.hasEndpoints) && !this.isDebug) return !1; this.outputs.forEach((i) => i._resetCounters()); } this._buildId = e.buildId; for (const i of this._inputs) { if (!i.connectedPoint) { i.isOptional || e.notConnectedNonOptionalInputs.push(i); continue; } const r = i.connectedPoint.ownerBlock; r && r !== this && r.build(e); } this._customBuildStep(e), e.verbose && Ce.Log(`Building ${this.name} [${this.getClassName()}]`); const t = Gs.Now; this._buildBlock(e), this._buildExecutionTime = Gs.Now - t; for (const i of this._outputs) for (const r of i.endpoints) { const s = r.ownerBlock; s && s.build(e); } return this.onBuildObservable.notifyObservers(this), !1; } _linkConnectionTypes(e, t, i = !1) { i ? this._inputs[t]._acceptedConnectionPointType = this._inputs[e] : this._inputs[e]._linkedConnectionSource = this._inputs[t], this._inputs[t]._linkedConnectionSource = this._inputs[e]; } /** * Initialize the block and prepare the context for build */ initialize() { } /** * Lets the block try to connect some inputs automatically */ autoConfigure() { } /** * Find an input by its name * @param name defines the name of the input to look for * @returns the input or null if not found */ getInputByName(e) { const t = this._inputs.filter((i) => i.name === e); return t.length ? t[0] : null; } /** * Find an output by its name * @param name defines the name of the output to look for * @returns the output or null if not found */ getOutputByName(e) { const t = this._outputs.filter((i) => i.name === e); return t.length ? t[0] : null; } /** * Serializes this block in a JSON representation * @returns the serialized block object */ serialize() { const e = {}; e.customType = "BABYLON." + this.getClassName(), e.id = this.uniqueId, e.name = this.name, e.inputs = [], e.outputs = []; for (const t of this.inputs) e.inputs.push(t.serialize()); for (const t of this.outputs) e.outputs.push(t.serialize(!1)); return e; } /** * @internal */ _deserialize(e) { this._name = e.name, this.comments = e.comments, this.visibleOnFrame = !!e.visibleOnFrame, this._deserializePortDisplayNamesAndExposedOnFrame(e); } _deserializePortDisplayNamesAndExposedOnFrame(e) { const t = e.inputs, i = e.outputs; t && t.forEach((r) => { const s = this.inputs.find((n) => n.name === r.name); if (s && (r.displayName && (s.displayName = r.displayName), r.isExposedOnFrame && (s.isExposedOnFrame = r.isExposedOnFrame, s.exposedPortPosition = r.exposedPortPosition), r.value !== void 0 && r.value !== null)) if (r.valueType === "number") s.value = r.value; else { const n = Qo(r.valueType); n && (s.value = n.FromArray(r.value)); } }), i && i.forEach((r, s) => { r.displayName && (this.outputs[s].displayName = r.displayName), r.isExposedOnFrame && (this.outputs[s].isExposedOnFrame = r.isExposedOnFrame, this.outputs[s].exposedPortPosition = r.exposedPortPosition); }); } _dumpPropertiesCode() { return `${this._codeVariableName}.visibleOnFrame = ${this.visibleOnFrame}; `; } /** * @internal */ _dumpCodeForOutputConnections(e) { let t = ""; if (e.indexOf(this) !== -1) return t; e.push(this); for (const i of this.inputs) { if (!i.isConnected) continue; const r = i.connectedPoint, s = r.ownerBlock; t += s._dumpCodeForOutputConnections(e), t += `${s._codeVariableName}.${s._outputRename(r.name)}.connectTo(${this._codeVariableName}.${this._inputRename(i.name)}); `; } return t; } /** * @internal */ _dumpCode(e, t) { t.push(this); const i = this.name.replace(/[^A-Za-z_]+/g, ""); if (this._codeVariableName = i || `${this.getClassName()}_${this.uniqueId}`, e.indexOf(this._codeVariableName) !== -1) { let n = 0; do n++, this._codeVariableName = i + n; while (e.indexOf(this._codeVariableName) !== -1); } e.push(this._codeVariableName); let r = ` // ${this.getClassName()} `; this.comments && (r += `// ${this.comments} `); const s = this.getClassName(); if (s === "GeometryInputBlock") { const a = this.type; r += `var ${this._codeVariableName} = new BABYLON.GeometryInputBlock("${this.name}", ${a}); `; } else r += `var ${this._codeVariableName} = new BABYLON.${s}("${this.name}"); `; r += this._dumpPropertiesCode(); for (const n of this.inputs) { if (!n.isConnected) continue; const l = n.connectedPoint.ownerBlock; t.indexOf(l) === -1 && (r += l._dumpCode(e, t)); } for (const n of this.outputs) if (n.hasEndpoints) for (const a of n.endpoints) { const l = a.ownerBlock; l && t.indexOf(l) === -1 && (r += l._dumpCode(e, t)); } return r; } /** * Clone the current block to a new identical block * @returns a copy of the current block */ clone() { const e = this.serialize(), t = Qo(e.customType); if (t) { const i = new t(); return i._deserialize(e), i; } return null; } /** * Release resources */ dispose() { for (const e of this.inputs) e.dispose(); for (const e of this.outputs) e.dispose(); this.onBuildObservable.clear(); } } F([ W("comment") ], Bs.prototype, "comments", void 0); class MW extends Bs { /** * Gets the current vertex data if the graph was successfully built */ get currentVertexData() { return this._vertexData; } /** * Create a new GeometryOutputBlock * @param name defines the block name */ constructor(e) { super(e), this._vertexData = null, this._isUnique = !0, this.registerInput("geometry", We.Geometry); } /** * Gets the current class name * @returns the class name */ getClassName() { return "GeometryOutputBlock"; } /** * Gets the geometry input component */ get geometry() { return this._inputs[0]; } _buildBlock(e) { e.vertexData = this.geometry.getConnectedValue(e), this._vertexData = e.vertexData; } } Be("BABYLON.GeometryOutputBlock", MW); var Rn; (function(c) { c[c.None = 0] = "None", c[c.Positions = 1] = "Positions", c[c.Normals = 2] = "Normals", c[c.Tangents = 3] = "Tangents", c[c.UV = 4] = "UV", c[c.UV2 = 5] = "UV2", c[c.UV3 = 6] = "UV3", c[c.UV4 = 7] = "UV4", c[c.UV5 = 8] = "UV5", c[c.UV6 = 9] = "UV6", c[c.Colors = 10] = "Colors", c[c.VertexID = 11] = "VertexID", c[c.FaceID = 12] = "FaceID", c[c.GeometryID = 13] = "GeometryID", c[c.CollectionID = 14] = "CollectionID", c[c.LoopID = 15] = "LoopID", c[c.InstanceID = 16] = "InstanceID"; })(Rn || (Rn = {})); class Gse { constructor() { this._rotationMatrix = new Ae(), this._scalingMatrix = new Ae(), this._positionMatrix = new Ae(), this._scalingRotationMatrix = new Ae(), this._transformMatrix = new Ae(), this._tempVector3 = new D(), this.notConnectedNonOptionalInputs = [], this.noContextualData = [], this.vertexData = null, this._geometryContext = null, this._executionContext = null, this._instancingContext = null, this._geometryContextStack = [], this._executionContextStack = [], this._instancingContextStack = []; } /** Gets or sets the geometry context */ get geometryContext() { return this._geometryContext; } /** Gets or sets the execution context */ get executionContext() { return this._executionContext; } /** Gets or sets the instancing context */ get instancingContext() { return this._instancingContext; } /** * Push the new active geometry context * @param geometryContext defines the geometry context */ pushGeometryContext(e) { this._geometryContext = e, this._geometryContextStack.push(this._geometryContext); } /** * Push the new active execution context * @param executionContext defines the execution context */ pushExecutionContext(e) { this._executionContext = e, this._executionContextStack.push(this._executionContext); } /** * Push the new active instancing context * @param instancingContext defines the instancing context */ pushInstancingContext(e) { this._instancingContext = e, this._instancingContextStack.push(this._instancingContext); } /** * Remove current geometry context and restore the previous one */ restoreGeometryContext() { this._geometryContextStack.pop(), this._geometryContext = this._geometryContextStack.length > 0 ? this._geometryContextStack[this._geometryContextStack.length - 1] : null; } /** * Remove current execution context and restore the previous one */ restoreExecutionContext() { this._executionContextStack.pop(), this._executionContext = this._executionContextStack.length > 0 ? this._executionContextStack[this._executionContextStack.length - 1] : null; } /** * Remove current isntancing context and restore the previous one */ restoreInstancingContext() { this._instancingContextStack.pop(), this._instancingContext = this._instancingContextStack.length > 0 ? this._instancingContextStack[this._instancingContextStack.length - 1] : null; } /** * Gets the value associated with a contextual source * @param source Source of the contextual value * @param skipWarning Do not store the warning for reporting if true * @returns the value associated with the source */ getContextualValue(e, t = !1) { if (!this.executionContext) return t || this.noContextualData.push(e), null; const i = this.executionContext.getExecutionIndex(); switch (e) { case Rn.Positions: return this.executionContext.getOverridePositionsContextualValue ? this.executionContext.getOverridePositionsContextualValue() : !this.geometryContext || !this.geometryContext.positions ? D.Zero() : D.FromArray(this.geometryContext.positions, i * 3); case Rn.Normals: return this.executionContext.getOverrideNormalsContextualValue ? this.executionContext.getOverrideNormalsContextualValue() : !this.geometryContext || !this.geometryContext.normals ? D.Zero() : D.FromArray(this.geometryContext.normals, i * 3); case Rn.Colors: return !this.geometryContext || !this.geometryContext.colors ? Di.Zero() : Di.FromArray(this.geometryContext.colors, i * 4); case Rn.Tangents: return !this.geometryContext || !this.geometryContext.tangents ? Di.Zero() : Di.FromArray(this.geometryContext.tangents, i * 4); case Rn.UV: return this.executionContext.getOverrideUVs1ContextualValue ? this.executionContext.getOverrideUVs1ContextualValue() : !this.geometryContext || !this.geometryContext.uvs ? at.Zero() : at.FromArray(this.geometryContext.uvs, i * 2); case Rn.UV2: return !this.geometryContext || !this.geometryContext.uvs2 ? at.Zero() : at.FromArray(this.geometryContext.uvs2, i * 2); case Rn.UV3: return !this.geometryContext || !this.geometryContext.uvs3 ? at.Zero() : at.FromArray(this.geometryContext.uvs3, i * 2); case Rn.UV4: return !this.geometryContext || !this.geometryContext.uvs4 ? at.Zero() : at.FromArray(this.geometryContext.uvs4, i * 2); case Rn.UV5: return !this.geometryContext || !this.geometryContext.uvs5 ? at.Zero() : at.FromArray(this.geometryContext.uvs5, i * 2); case Rn.UV6: return !this.geometryContext || !this.geometryContext.uvs6 ? at.Zero() : at.FromArray(this.geometryContext.uvs6, i * 2); case Rn.VertexID: return i; case Rn.FaceID: return this.executionContext.getExecutionFaceIndex(); case Rn.LoopID: return this.executionContext.getExecutionLoopIndex(); case Rn.InstanceID: return this.instancingContext ? this.instancingContext.getInstanceIndex() : 0; case Rn.GeometryID: return this.geometryContext ? this.geometryContext.uniqueId : 0; case Rn.CollectionID: return !this.geometryContext || !this.geometryContext.metadata ? 0 : this.geometryContext.metadata.collectionId || 0; } return null; } /** * Adapt a value to a target type * @param source defines the value to adapt * @param targetType defines the target type * @returns the adapted value */ adapt(e, t) { const i = e.getConnectedValue(this) || 0; if (e.type === t) return i; switch (t) { case We.Vector2: return new at(i, i); case We.Vector3: return new D(i, i, i); case We.Vector4: return new Di(i, i, i, i); } return null; } /** * Adapt an input value to a target type * @param source defines the value to adapt * @param targetType defines the target type * @param defaultValue defines the default value to use if not connected * @returns the adapted value */ adaptInput(e, t, i) { var r; if (!e.isConnected) return e.value || i; const s = e.getConnectedValue(this); if (((r = e._connectedPoint) === null || r === void 0 ? void 0 : r.type) === t) return s; switch (t) { case We.Vector2: return new at(s, s); case We.Vector3: return new D(s, s, s); case We.Vector4: return new Di(s, s, s, s); } return null; } /** * Emits console errors and exceptions if there is a failing check */ emitErrors() { let e = ""; for (const t of this.notConnectedNonOptionalInputs) e += `input ${t.name} from block ${t.ownerBlock.name}[${t.ownerBlock.getClassName()}] is not connected and is not optional. `; for (const t of this.noContextualData) e += `Contextual input ${Rn[t]} has no context to pull data from (must be connected to a setXXX block or a instantiateXXX block). `; if (e) throw `Build of NodeGeometry failed: ` + e; } /** @internal */ _instantiate(e, t, i, r, s) { Ae.ScalingToRef(r.x, r.y, r.z, this._scalingMatrix), Ae.RotationYawPitchRollToRef(i.y, i.x, i.z, this._rotationMatrix), Ae.TranslationToRef(t.x, t.y, t.z, this._positionMatrix), this._scalingMatrix.multiplyToRef(this._rotationMatrix, this._scalingRotationMatrix), this._scalingRotationMatrix.multiplyToRef(this._positionMatrix, this._transformMatrix); for (let n = 0; n < e.positions.length; n += 3) this._tempVector3.fromArray(e.positions, n), D.TransformCoordinatesToRef(this._tempVector3, this._transformMatrix, this._tempVector3), this._tempVector3.toArray(e.positions, n), e.normals && (this._tempVector3.fromArray(e.normals, n), D.TransformNormalToRef(this._tempVector3, this._scalingRotationMatrix, this._tempVector3), this._tempVector3.toArray(e.normals, n)); s.push(e); } /** @internal */ _instantiateWithMatrix(e, t, i) { for (let r = 0; r < e.positions.length; r += 3) this._tempVector3.fromArray(e.positions, r), D.TransformCoordinatesToRef(this._tempVector3, t, this._tempVector3), this._tempVector3.toArray(e.positions, r), e.normals && (this._tempVector3.fromArray(e.normals, r), D.TransformNormalToRef(this._tempVector3, t, this._tempVector3), this._tempVector3.toArray(e.normals, r)); i.push(e); } /** @internal */ _instantiateWithPositionAndMatrix(e, t, i, r) { Ae.TranslationToRef(t.x, t.y, t.z, this._positionMatrix), i.multiplyToRef(this._positionMatrix, this._transformMatrix); for (let s = 0; s < e.positions.length; s += 3) this._tempVector3.fromArray(e.positions, s), D.TransformCoordinatesToRef(this._tempVector3, this._transformMatrix, this._tempVector3), this._tempVector3.toArray(e.positions, s), e.normals && (this._tempVector3.fromArray(e.normals, s), D.TransformNormalToRef(this._tempVector3, this._transformMatrix, this._tempVector3), this._tempVector3.toArray(e.normals, s)); r.push(e); } } class bc extends Bs { /** * Gets or sets the connection point type (default is float) */ get type() { if (this._type === We.AutoDetect && this.value != null) { if (!isNaN(this.value)) return this._type = We.Float, this._type; switch (this.value.getClassName()) { case "Vector2": return this._type = We.Vector2, this._type; case "Vector3": return this._type = We.Vector3, this._type; case "Vector4": return this._type = We.Vector4, this._type; case "Matrix": return this._type = We.Matrix, this._type; } } return this._type; } /** * Gets a boolean indicating that the current connection point is a contextual value */ get isContextual() { return this._contextualSource !== Rn.None; } /** * Gets or sets the current contextual value */ get contextualValue() { return this._contextualSource; } set contextualValue(e) { switch (this._contextualSource = e, e) { case Rn.Positions: case Rn.Normals: this._type = We.Vector3; break; case Rn.Colors: case Rn.Tangents: this._type = We.Vector4; break; case Rn.UV: case Rn.UV2: case Rn.UV3: case Rn.UV4: case Rn.UV5: case Rn.UV6: this._type = We.Vector2; break; case Rn.VertexID: case Rn.GeometryID: case Rn.CollectionID: case Rn.FaceID: case Rn.LoopID: case Rn.InstanceID: this._type = We.Int; break; } this.output && (this.output.type = this._type); } /** * Creates a new InputBlock * @param name defines the block name * @param type defines the type of the input (can be set to NodeGeometryBlockConnectionPointTypes.AutoDetect) */ constructor(e, t = We.AutoDetect) { super(e), this._type = We.Undefined, this._contextualSource = Rn.None, this.min = 0, this.max = 0, this.groupInInspector = "", this.onValueChangedObservable = new Fe(), this._type = t, this._isInput = !0, this.setDefaultValue(), this.registerOutput("output", t); } /** * Gets or sets the value of that point. * Please note that this value will be ignored if valueCallback is defined */ get value() { return this._storedValue; } set value(e) { this.type === We.Float && this.min !== this.max && (e = Math.max(this.min, e), e = Math.min(this.max, e)), this._storedValue = e, this.onValueChangedObservable.notifyObservers(this); } /** * Gets or sets a callback used to get the value of that point. * Please note that setting this value will force the connection point to ignore the value property */ get valueCallback() { return this._valueCallback; } set valueCallback(e) { this._valueCallback = e; } /** * Gets the current class name * @returns the class name */ getClassName() { return "GeometryInputBlock"; } /** * Gets the geometry output component */ get output() { return this._outputs[0]; } /** * Set the input block to its default value (based on its type) */ setDefaultValue() { switch (this.contextualValue = Rn.None, this.type) { case We.Int: case We.Float: this.value = 0; break; case We.Vector2: this.value = at.Zero(); break; case We.Vector3: this.value = D.Zero(); break; case We.Vector4: this.value = Di.Zero(); break; case We.Matrix: this.value = Ae.Identity(); break; } } _buildBlock(e) { super._buildBlock(e), this.isContextual ? (this.output._storedValue = null, this.output._storedFunction = (t) => t.getContextualValue(this._contextualSource)) : (this.output._storedFunction = null, this.output._storedValue = this.value); } dispose() { this.onValueChangedObservable.clear(), super.dispose(); } _dumpPropertiesCode() { const e = this._codeVariableName; if (this.isContextual) return super._dumpPropertiesCode() + `${e}.contextualValue = BABYLON.NodeGeometryContextualSources.${Rn[this._contextualSource]}; `; const t = []; let i = ""; switch (this.type) { case We.Float: case We.Int: i = `${this.value}`; break; case We.Vector2: i = `new BABYLON.Vector2(${this.value.x}, ${this.value.y})`; break; case We.Vector3: i = `new BABYLON.Vector3(${this.value.x}, ${this.value.y}, ${this.value.z})`; break; case We.Vector4: i = `new BABYLON.Vector4(${this.value.x}, ${this.value.y}, ${this.value.z}, ${this.value.w})`; break; } return t.push(`${e}.value = ${i}`), (this.type === We.Float || this.type === We.Int) && t.push(`${e}.min = ${this.min}`, `${e}.max = ${this.max}`), t.push(""), super._dumpPropertiesCode() + t.join(`; `); } serialize() { const e = super.serialize(); return e.type = this.type, e.contextualValue = this.contextualValue, e.min = this.min, e.max = this.max, e.groupInInspector = this.groupInInspector, this._storedValue !== null && !this.isContextual && (this._storedValue.asArray ? (e.valueType = "BABYLON." + this._storedValue.getClassName(), e.value = this._storedValue.asArray()) : (e.valueType = "number", e.value = this._storedValue)), e; } _deserialize(e) { if (super._deserialize(e), this._type = e.type, this.contextualValue = e.contextualValue, this.min = e.min || 0, this.max = e.max || 0, this.groupInInspector = e.groupInInspector || "", !!e.valueType) if (e.valueType === "number") this._storedValue = e.value; else { const t = Qo(e.valueType); t && (this._storedValue = t.FromArray(e.value)); } } } Be("BABYLON.GeometryInputBlock", bc); class BU extends Bs { /** * Create a new BoxBlock * @param name defines the block name */ constructor(e) { super(e), this.evaluateContext = !1, this.registerInput("size", We.Float, !0, 1), this.registerInput("width", We.Float, !0, 0), this.registerInput("height", We.Float, !0, 0), this.registerInput("depth", We.Float, !0, 0), this.registerInput("subdivisions", We.Int, !0, 1), this.registerInput("subdivisionsX", We.Int, !0, 0), this.registerInput("subdivisionsY", We.Int, !0, 0), this.registerInput("subdivisionsZ", We.Int, !0, 0), this.registerOutput("geometry", We.Geometry); } /** * Gets the current class name * @returns the class name */ getClassName() { return "BoxBlock"; } /** * Gets the size input component */ get size() { return this._inputs[0]; } /** * Gets the width input component */ get width() { return this._inputs[1]; } /** * Gets the height input component */ get height() { return this._inputs[2]; } /** * Gets the depth input component */ get depth() { return this._inputs[3]; } /** * Gets the subdivisions input component */ get subdivisions() { return this._inputs[4]; } /** * Gets the subdivisionsX input component */ get subdivisionsX() { return this._inputs[5]; } /** * Gets the subdivisionsY input component */ get subdivisionsY() { return this._inputs[6]; } /** * Gets the subdivisionsZ input component */ get subdivisionsZ() { return this._inputs[7]; } /** * Gets the geometry output component */ get geometry() { return this._outputs[0]; } autoConfigure() { if (!this.size.isConnected) { if (!this.width.isConnected && !this.height.isConnected && !this.depth.isConnected) { const e = new bc("Size"); e.value = 1, e.output.connectTo(this.size); return; } if (!this.width.isConnected) { const e = new bc("Width"); e.value = 1, e.output.connectTo(this.width); } if (!this.height.isConnected) { const e = new bc("Height"); e.value = 1, e.output.connectTo(this.height); } if (!this.depth.isConnected) { const e = new bc("Depth"); e.value = 1, e.output.connectTo(this.depth); } } } _buildBlock(e) { const t = {}, i = (r) => { t.size = this.size.getConnectedValue(r), t.width = this.width.getConnectedValue(r), t.height = this.height.getConnectedValue(r), t.depth = this.depth.getConnectedValue(r); const s = this.subdivisions.getConnectedValue(r), n = this.subdivisionsX.getConnectedValue(r), a = this.subdivisionsY.getConnectedValue(r), l = this.subdivisionsZ.getConnectedValue(r); return s && (t.segments = s), n && (t.widthSegments = n), a && (t.heightSegments = a), l && (t.depthSegments = l), Cie(t); }; if (this.evaluateContext) this.geometry._storedFunction = i; else { const r = i(e); this.geometry._storedFunction = () => (this.geometry._executionCount = 1, r.clone()); } } _dumpPropertiesCode() { return super._dumpPropertiesCode() + `${this._codeVariableName}.evaluateContext = ${this.evaluateContext ? "true" : "false"}; `; } /** * Serializes this block in a JSON representation * @returns the serialized block object */ serialize() { const e = super.serialize(); return e.evaluateContext = this.evaluateContext, e; } _deserialize(e) { super._deserialize(e), this.evaluateContext = e.evaluateContext; } } F([ ir("Evaluate context", $i.Boolean, "ADVANCED", { notifiers: { rebuild: !0 } }) ], BU.prototype, "evaluateContext", void 0); Be("BABYLON.BoxBlock", BU); class V_ { /** Get the inspector from bundle or global */ _getGlobalNodeGeometryEditor() { if (typeof NODEGEOMETRYEDITOR < "u") return NODEGEOMETRYEDITOR; if (typeof BABYLON < "u" && typeof BABYLON.NodeGeometryEditor < "u") return BABYLON; } /** * Gets the time spent to build this block (in ms) */ get buildExecutionTime() { return this._buildExecutionTime; } /** * Creates a new geometry * @param name defines the name of the geometry */ constructor(e) { this._buildId = V_._BuildIdGenerator++, this._buildWasSuccessful = !1, this._vertexData = null, this._buildExecutionTime = 0, this.BJSNODEGEOMETRYEDITOR = this._getGlobalNodeGeometryEditor(), this.editorData = null, this.attachedBlocks = [], this.onBuildObservable = new Fe(), this.outputBlock = null, this.name = e; } /** * Gets the current class name of the geometry e.g. "NodeGeometry" * @returns the class name */ getClassName() { return "NodeGeometry"; } /** * Get a block by its name * @param name defines the name of the block to retrieve * @returns the required block or null if not found */ getBlockByName(e) { let t = null; for (const i of this.attachedBlocks) if (i.name === e) if (!t) t = i; else return Ve.Warn("More than one block was found with the name `" + e + "`"), t; return t; } /** * Get a block using a predicate * @param predicate defines the predicate used to find the good candidate * @returns the required block or null if not found */ getBlockByPredicate(e) { for (const t of this.attachedBlocks) if (e(t)) return t; return null; } /** * Gets the list of input blocks attached to this material * @returns an array of InputBlocks */ getInputBlocks() { const e = []; for (const t of this.attachedBlocks) t.isInput && e.push(t); return e; } /** * Launch the node geometry editor * @param config Define the configuration of the editor * @returns a promise fulfilled when the node editor is visible */ edit(e) { return new Promise((t) => { if (this.BJSNODEGEOMETRYEDITOR = this.BJSNODEGEOMETRYEDITOR || this._getGlobalNodeGeometryEditor(), typeof this.BJSNODEGEOMETRYEDITOR > "u") { const i = e && e.editorURL ? e.editorURL : V_.EditorURL; Ve.LoadBabylonScript(i, () => { this.BJSNODEGEOMETRYEDITOR = this.BJSNODEGEOMETRYEDITOR || this._getGlobalNodeGeometryEditor(), this._createNodeEditor(e == null ? void 0 : e.nodeGeometryEditorConfig), t(); }); } else this._createNodeEditor(e == null ? void 0 : e.nodeGeometryEditorConfig), t(); }); } /** Creates the node editor window. */ _createNodeEditor(e) { const t = Object.assign({ nodeGeometry: this }, e); this.BJSNODEGEOMETRYEDITOR.NodeGeometryEditor.Show(t); } /** * Build the final geometry * @param verbose defines if the build should log activity * @param updateBuildId defines if the internal build Id should be updated (default is true) * @param autoConfigure defines if the autoConfigure method should be called when initializing blocks (default is false) */ build(e = !1, t = !0, i = !1) { if (this._buildWasSuccessful = !1, !this.outputBlock) throw "You must define the outputBlock property before building the geometry"; const r = Gs.Now; this._initializeBlock(this.outputBlock, i); const s = new Gse(); s.buildId = this._buildId, s.verbose = e, this.outputBlock.build(s), t && (this._buildId = V_._BuildIdGenerator++), this._buildExecutionTime = Gs.Now - r, s.emitErrors(), this._buildWasSuccessful = !0, this._vertexData = s.vertexData, this.onBuildObservable.notifyObservers(this); } /** * Creates a mesh from the geometry blocks * @param name defines the name of the mesh * @param scene The scene the mesh is scoped to * @returns The new mesh */ createMesh(e, t = null) { if (this._buildWasSuccessful || this.build(), !this._vertexData) return null; const i = new ke(e, t); return this._vertexData.applyToMesh(i), i._internalMetadata = i._internalMetadata || {}, i._internalMetadata.nodeGeometry = this, i; } /** * Creates a mesh from the geometry blocks * @param mesh the mesh to update * @returns True if successfully updated */ updateMesh(e) { return this._buildWasSuccessful || this.build(), this._vertexData ? (this._vertexData.applyToMesh(e), e._internalMetadata = e._internalMetadata || {}, e._internalMetadata.nodeGeometry = this, e) : !1; } _initializeBlock(e, t = !0) { e.initialize(), t && e.autoConfigure(), e._preparationId = this._buildId, this.attachedBlocks.indexOf(e) === -1 && this.attachedBlocks.push(e); for (const i of e.inputs) { const r = i.connectedPoint; if (r) { const s = r.ownerBlock; s !== e && this._initializeBlock(s, t); } } } /** * Clear the current geometry */ clear() { this.outputBlock = null, this.attachedBlocks.length = 0; } /** * Remove a block from the current geometry * @param block defines the block to remove */ removeBlock(e) { const t = this.attachedBlocks.indexOf(e); t > -1 && this.attachedBlocks.splice(t, 1), e === this.outputBlock && (this.outputBlock = null); } /** * Clear the current graph and load a new one from a serialization object * @param source defines the JSON representation of the geometry * @param merge defines whether or not the source must be merged or replace the current content */ parseSerializedObject(e, t = !1) { t || this.clear(); const i = {}; for (const r of e.blocks) { const s = Qo(r.customType); if (s) { const n = new s(); n._deserialize(r), i[r.id] = n, this.attachedBlocks.push(n); } } for (const r of this.attachedBlocks) if (r.isTeleportOut) { const s = r, n = s._tempEntryPointUniqueId; if (n) { const a = i[n]; a && a.attachToEndpoint(s); } } for (let r = 0; r < e.blocks.length; r++) { const s = e.blocks[r], n = i[s.id]; n && (n.inputs.length && s.inputs.some((a) => a.targetConnectionName) && !t || this._restoreConnections(n, e, i)); } if (e.outputNodeId && (this.outputBlock = i[e.outputNodeId]), e.locations || e.editorData && e.editorData.locations) { const r = e.locations || e.editorData.locations; for (const n of r) i[n.blockId] && (n.blockId = i[n.blockId].uniqueId); t && this.editorData && this.editorData.locations && r.concat(this.editorData.locations), e.locations ? this.editorData = { locations: r } : (this.editorData = e.editorData, this.editorData.locations = r); const s = []; for (const n in i) s[n] = i[n].uniqueId; this.editorData.map = s; } this.comment = e.comment; } _restoreConnections(e, t, i) { for (const r of e.outputs) for (const s of t.blocks) { const n = i[s.id]; if (n) { for (const a of s.inputs) if (i[a.targetBlockId] === e && a.targetConnectionName === r.name) { const l = n.getInputByName(a.inputName); if (!l || l.isConnected) continue; r.connectTo(l, !0), this._restoreConnections(n, t, i); continue; } } } } /** * Generate a string containing the code declaration required to create an equivalent of this geometry * @returns a string */ generateCode() { let e = []; const t = [], i = ["const", "var", "let"]; this.outputBlock && this._gatherBlocks(this.outputBlock, t); let r = `let nodeGeometry = new BABYLON.NodeGeometry("${this.name || "node geometry"}"); `; for (const s of t) s.isInput && e.indexOf(s) === -1 && (r += s._dumpCode(i, e)); return this.outputBlock && (e = [], r += `// Connections `, r += this.outputBlock._dumpCodeForOutputConnections(e), r += `// Output nodes `, r += `nodeGeometry.outputBlock = ${this.outputBlock._codeVariableName}; `, r += `nodeGeometry.build(); `), r; } _gatherBlocks(e, t) { if (t.indexOf(e) === -1) { t.push(e); for (const i of e.inputs) { const r = i.connectedPoint; if (r) { const s = r.ownerBlock; s !== e && this._gatherBlocks(s, t); } } if (e.isTeleportOut) { const i = e; i.entryPoint && this._gatherBlocks(i.entryPoint, t); } } } /** * Clear the current geometry and set it to a default state */ setToDefault() { this.clear(), this.editorData = null; const e = new BU("Box"); e.autoConfigure(); const t = new MW("Geometry Output"); e.geometry.connectTo(t.geometry), this.outputBlock = t; } /** * Makes a duplicate of the current geometry. * @param name defines the name to use for the new geometry */ clone(e) { const t = this.serialize(), i = St.Clone(() => new V_(e), this); return i.name = e, i.parseSerializedObject(t), i._buildId = this._buildId, i.build(!1), i; } /** * Serializes this geometry in a JSON representation * @param selectedBlocks defines the list of blocks to save (if null the whole geometry will be saved) * @returns the serialized geometry object */ serialize(e) { const t = e ? {} : St.Serialize(this); t.editorData = JSON.parse(JSON.stringify(this.editorData)); let i = []; e ? i = e : (t.customType = "BABYLON.NodeGeometry", this.outputBlock && (t.outputNodeId = this.outputBlock.uniqueId)), t.blocks = []; for (const r of i) t.blocks.push(r.serialize()); if (!e) for (const r of this.attachedBlocks) i.indexOf(r) === -1 && t.blocks.push(r.serialize()); return t; } /** * Disposes the ressources */ dispose() { for (const e of this.attachedBlocks) e.dispose(); this.attachedBlocks.length = 0, this.onBuildObservable.clear(); } /** * Creates a new node geometry set to default basic configuration * @param name defines the name of the geometry * @returns a new NodeGeometry */ static CreateDefault(e) { const t = new V_(e); return t.setToDefault(), t.build(), t; } /** * Creates a node geometry from parsed geometry data * @param source defines the JSON representation of the geometry * @returns a new node geometry */ static Parse(e) { const t = St.Parse(() => new V_(e.name), e, null); return t.parseSerializedObject(e), t.build(), t; } /** * Creates a node geometry from a snippet saved by the node geometry editor * @param snippetId defines the snippet to load * @param nodeGeometry defines a node geometry to update (instead of creating a new one) * @param skipBuild defines whether to build the node geometry * @returns a promise that will resolve to the new node geometry */ static ParseFromSnippetAsync(e, t, i = !1) { return e === "_BLANK" ? Promise.resolve(V_.CreateDefault("blank")) : new Promise((r, s) => { const n = new go(); n.addEventListener("readystatechange", () => { if (n.readyState == 4) if (n.status == 200) { const a = JSON.parse(JSON.parse(n.responseText).jsonPayload), l = JSON.parse(a.nodeGeometry); t || (t = St.Parse(() => new V_(e), l, null)), t.parseSerializedObject(l), t.snippetId = e; try { i || t.build(), r(t); } catch (o) { s(o); } } else s("Unable to load the snippet " + e); }), n.open("GET", this.SnippetUrl + "/" + e.replace(/#/g, "/")), n.send(); }); } } V_._BuildIdGenerator = 0; V_.EditorURL = `${Ve._DefaultCdnUrl}/v${$e.Version}/nodeGeometryEditor/babylon.nodeGeometryEditor.js`; V_.SnippetUrl = "https://snippet.babylonjs.com"; F([ W() ], V_.prototype, "name", void 0); F([ W("comment") ], V_.prototype, "comment", void 0); class UU extends Bs { /** * Creates a new GeometryOptimizeBlock * @param name defines the block name */ constructor(e) { super(e), this.evaluateContext = !0, this.epsilon = Sr, this.registerInput("geometry", We.Geometry), this.registerOutput("output", We.Geometry); } /** * Gets the current class name * @returns the class name */ getClassName() { return "GeometryOptimizeBlock"; } /** * Gets the geometry component */ get geometry() { return this._inputs[0]; } /** * Gets the output component */ get output() { return this._outputs[0]; } _buildBlock(e) { const t = (i) => { if (!this.geometry.isConnected) return null; const r = this.geometry.getConnectedValue(i), s = [], n = {}; for (let l = 0; l < r.positions.length; l += 3) { const o = r.positions[l], u = r.positions[l + 1], h = r.positions[l + 2]; let d = !1; for (let f = 0; f < s.length; f += 3) if (yt.WithinEpsilon(o, s[f], this.epsilon) && yt.WithinEpsilon(u, s[f + 1], this.epsilon) && yt.WithinEpsilon(h, s[f + 2], this.epsilon)) { n[l / 3] = f / 3, d = !0; continue; } d || (n[l / 3] = s.length / 3, s.push(o, u, h)); } const a = new Ot(); return a.positions = s, a.indices = r.indices.map((l) => n[l]), a; }; this.evaluateContext ? this.output._storedFunction = t : (this.output._storedFunction = null, this.output._storedValue = t(e)); } _dumpPropertiesCode() { let e = super._dumpPropertiesCode() + `${this._codeVariableName}.evaluateContext = ${this.evaluateContext ? "true" : "false"}; `; return e += `${this._codeVariableName}.epsilon = ${this.epsilon}; `, e; } /** * Serializes this block in a JSON representation * @returns the serialized block object */ serialize() { const e = super.serialize(); return e.evaluateContext = this.evaluateContext, e.epsilon = this.epsilon, e; } _deserialize(e) { super._deserialize(e), this.evaluateContext = e.evaluateContext, this.epsilon = e.epsilon; } } F([ ir("Evaluate context", $i.Boolean, "ADVANCED", { notifiers: { rebuild: !0 } }) ], UU.prototype, "evaluateContext", void 0); F([ ir("Epsilon", $i.Float, "ADVANCED", { notifiers: { rebuild: !0 } }) ], UU.prototype, "epsilon", void 0); Be("BABYLON.GeometryOptimizeBlock", UU); class RW extends Bs { /** * Create a new PlaneBlock * @param name defines the block name */ constructor(e) { super(e), this.evaluateContext = !1, this.registerInput("size", We.Float, !0, 1), this.registerInput("width", We.Float, !0, 0), this.registerInput("height", We.Float, !0, 0), this.registerOutput("geometry", We.Geometry); } /** * Gets the current class name * @returns the class name */ getClassName() { return "PlaneBlock"; } /** * Gets the size input component */ get size() { return this._inputs[0]; } /** * Gets the width input component */ get width() { return this._inputs[1]; } /** * Gets the height input component */ get height() { return this._inputs[2]; } /** * Gets the geometry output component */ get geometry() { return this._outputs[0]; } autoConfigure() { if (!this.size.isConnected) { if (!this.width.isConnected && !this.height.isConnected) { const e = new bc("Size"); e.value = 1, e.output.connectTo(this.size); return; } if (!this.width.isConnected) { const e = new bc("Width"); e.value = 1, e.output.connectTo(this.width); } if (!this.height.isConnected) { const e = new bc("Height"); e.value = 1, e.output.connectTo(this.height); } } } _buildBlock(e) { const t = {}, i = (r) => (t.size = this.size.getConnectedValue(r), t.width = this.width.getConnectedValue(r), t.height = this.height.getConnectedValue(r), QB(t)); if (this.evaluateContext) this.geometry._storedFunction = i; else { const r = i(e); this.geometry._storedFunction = () => (this.geometry._executionCount = 1, r.clone()); } } _dumpPropertiesCode() { return super._dumpPropertiesCode() + `${this._codeVariableName}.evaluateContext = ${this.evaluateContext ? "true" : "false"}; `; } /** * Serializes this block in a JSON representation * @returns the serialized block object */ serialize() { const e = super.serialize(); return e.evaluateContext = this.evaluateContext, e; } _deserialize(e) { super._deserialize(e), this.evaluateContext = e.evaluateContext; } } F([ ir("Evaluate context", $i.Boolean, "ADVANCED", { notifiers: { rebuild: !0 } }) ], RW.prototype, "evaluateContext", void 0); Be("BABYLON.PlaneBlock", RW); class PW extends Bs { /** * Gets or sets the mesh to use to get vertex data */ get mesh() { return this._mesh; } set mesh(e) { this._mesh = e; } /** * Create a new MeshBlock * @param name defines the block name */ constructor(e) { super(e), this._cachedVertexData = null, this.reverseWindingOrder = !1, this.serializedCachedData = !1, this.registerOutput("geometry", We.Geometry); } /** * Gets the current class name * @returns the class name */ getClassName() { return "MeshBlock"; } /** * Gets a boolean indicating if the block is using cached data */ get isUsingCachedData() { return !this.mesh && !!this._cachedVertexData; } /** * Gets the geometry output component */ get geometry() { return this._outputs[0]; } /** * Remove stored data */ cleanData() { this._mesh = null, this._cachedVertexData = null; } _buildBlock() { if (!this._mesh) { this._cachedVertexData ? this.geometry._storedValue = this._cachedVertexData.clone() : this.geometry._storedValue = null; return; } const e = Ot.ExtractFromMesh(this._mesh, !1, !0); if (this._cachedVertexData = null, this.reverseWindingOrder && e.indices) for (let t = 0; t < e.indices.length; t += 3) { const i = e.indices[t]; e.indices[t] = e.indices[t + 2], e.indices[t + 2] = i; } this.geometry._storedFunction = () => e.clone(); } /** * Serializes this block in a JSON representation * @returns the serialized block object */ serialize() { const e = super.serialize(); return e.serializedCachedData = this.serializedCachedData, this.serializedCachedData && (this._mesh ? e.cachedVertexData = Ot.ExtractFromMesh(this._mesh, !1, !0).serialize() : this._cachedVertexData && (e.cachedVertexData = this._cachedVertexData.serialize())), e.reverseWindingOrder = this.reverseWindingOrder, e; } _deserialize(e) { super._deserialize(e), e.cachedVertexData && (this._cachedVertexData = Ot.Parse(e.cachedVertexData)), this.serializedCachedData = !!e.serializedCachedData, this.reverseWindingOrder = e.reverseWindingOrder; } } F([ ir("Serialize cached data", $i.Boolean, "ADVANCED", { notifiers: { rebuild: !0 } }) ], PW.prototype, "serializedCachedData", void 0); Be("BABYLON.MeshBlock", PW); class IW extends Bs { /** * Create a new IcoSphereBlock * @param name defines the block name */ constructor(e) { super(e), this.evaluateContext = !1, this.registerInput("radius", We.Float, !0, 1), this.registerInput("radiusX", We.Float, !0, 0), this.registerInput("radiusY", We.Float, !0, 0), this.registerInput("radiusZ", We.Float, !0, 0), this.registerInput("subdivisions", We.Int, !0, 4), this.registerOutput("geometry", We.Geometry); } /** * Gets the current class name * @returns the class name */ getClassName() { return "IcoSphereBlock"; } /** * Gets the radius input component */ get radius() { return this._inputs[0]; } /** * Gets the radiusX input component */ get radiusX() { return this._inputs[1]; } /** * Gets the radiusY input component */ get radiusY() { return this._inputs[2]; } /** * Gets the radiusZ input component */ get radiusZ() { return this._inputs[3]; } /** * Gets the subdivisions input component */ get subdivisions() { return this._inputs[4]; } /** * Gets the geometry output component */ get geometry() { return this._outputs[0]; } autoConfigure() { if (!this.radius.isConnected) { const e = new bc("Radius"); e.value = 0.2, e.output.connectTo(this.radius); } } _buildBlock(e) { const t = {}, i = (r) => (t.radius = this.radius.getConnectedValue(r), t.subdivisions = this.subdivisions.getConnectedValue(r), t.radiusX = this.radiusX.getConnectedValue(r), t.radiusY = this.radiusY.getConnectedValue(r), t.radiusZ = this.radiusZ.getConnectedValue(r), ZB(t)); if (this.evaluateContext) this.geometry._storedFunction = i; else { const r = i(e); this.geometry._storedFunction = () => (this.geometry._executionCount = 1, r.clone()); } } _dumpPropertiesCode() { return super._dumpPropertiesCode() + `${this._codeVariableName}.evaluateContext = ${this.evaluateContext ? "true" : "false"}; `; } /** * Serializes this block in a JSON representation * @returns the serialized block object */ serialize() { const e = super.serialize(); return e.evaluateContext = this.evaluateContext, e; } _deserialize(e) { super._deserialize(e), this.evaluateContext = e.evaluateContext; } } F([ ir("Evaluate context", $i.Boolean, "ADVANCED", { notifiers: { rebuild: !0 } }) ], IW.prototype, "evaluateContext", void 0); Be("BABYLON.IcoSphereBlock", IW); class DW extends Bs { /** * Create a new SphereBlock * @param name defines the block name */ constructor(e) { super(e), this.evaluateContext = !1, this.registerInput("segments", We.Int, !0, 32), this.registerInput("diameter", We.Float, !0, 1), this.registerInput("diameterX", We.Float, !0, 0), this.registerInput("diameterY", We.Float, !0, 0), this.registerInput("diameterZ", We.Float, !0, 0), this.registerInput("arc", We.Float, !0, 1), this.registerInput("slice", We.Float, !0, 1), this.registerOutput("geometry", We.Geometry); } /** * Gets the current class name * @returns the class name */ getClassName() { return "SphereBlock"; } /** * Gets the segments input component */ get segments() { return this._inputs[0]; } /** * Gets the diameter input component */ get diameter() { return this._inputs[1]; } /** * Gets the diameterX input component */ get diameterX() { return this._inputs[2]; } /** * Gets the diameterY input component */ get diameterY() { return this._inputs[3]; } /** * Gets the diameterZ input component */ get diameterZ() { return this._inputs[4]; } /** * Gets the arc input component */ get arc() { return this._inputs[5]; } /** * Gets the slice input component */ get slice() { return this._inputs[6]; } /** * Gets the geometry output component */ get geometry() { return this._outputs[0]; } autoConfigure() { if (!this.diameter.isConnected) { const e = new bc("Diameter"); e.value = 1, e.output.connectTo(this.diameter); } } _buildBlock(e) { const t = {}, i = (r) => (t.segments = this.segments.getConnectedValue(r), t.diameter = this.diameter.getConnectedValue(r), t.diameterX = this.diameterX.getConnectedValue(r), t.diameterY = this.diameterY.getConnectedValue(r), t.diameterZ = this.diameterZ.getConnectedValue(r), t.arc = this.arc.getConnectedValue(r), t.slice = this.slice.getConnectedValue(r), uU(t)); if (this.evaluateContext) this.geometry._storedFunction = i; else { const r = i(e); this.geometry._storedFunction = () => (this.geometry._executionCount = 1, r.clone()); } } _dumpPropertiesCode() { return super._dumpPropertiesCode() + `${this._codeVariableName}.evaluateContext = ${this.evaluateContext ? "true" : "false"}; `; } /** * Serializes this block in a JSON representation * @returns the serialized block object */ serialize() { const e = super.serialize(); return e.evaluateContext = this.evaluateContext, e; } _deserialize(e) { super._deserialize(e), this.evaluateContext = e.evaluateContext; } } F([ ir("Evaluate context", $i.Boolean, "ADVANCED", { notifiers: { rebuild: !0 } }) ], DW.prototype, "evaluateContext", void 0); Be("BABYLON.SphereBlock", DW); class OW extends Bs { /** * Create a new GridBlock * @param name defines the block name */ constructor(e) { super(e), this.evaluateContext = !1, this.registerInput("width", We.Float, !0, 1), this.registerInput("height", We.Float, !0, 1), this.registerInput("subdivisions", We.Int, !0, 1), this.registerInput("subdivisionsX", We.Int, !0, 0), this.registerInput("subdivisionsY", We.Int, !0, 0), this.registerOutput("geometry", We.Geometry); } /** * Gets the current class name * @returns the class name */ getClassName() { return "GridBlock"; } /** * Gets the width input component */ get width() { return this._inputs[0]; } /** * Gets the height input component */ get height() { return this._inputs[1]; } /** * Gets the subdivisions input component */ get subdivisions() { return this._inputs[2]; } /** * Gets the subdivisionsX input component */ get subdivisionsX() { return this._inputs[3]; } /** * Gets the subdivisionsY input component */ get subdivisionsY() { return this._inputs[4]; } /** * Gets the geometry output component */ get geometry() { return this._outputs[0]; } autoConfigure() { if (!this.width.isConnected) { const e = new bc("Width"); e.value = 1, e.output.connectTo(this.width); } if (!this.height.isConnected) { const e = new bc("Height"); e.value = 1, e.output.connectTo(this.height); } } _buildBlock(e) { const t = {}, i = (r) => (t.width = this.width.getConnectedValue(r), t.height = this.height.getConnectedValue(r), t.subdivisions = this.subdivisions.getConnectedValue(r), t.subdivisionsX = this.subdivisionsX.getConnectedValue(r), t.subdivisionsY = this.subdivisionsY.getConnectedValue(r), BC(t)); if (this.evaluateContext) this.geometry._storedFunction = i; else { const r = i(e); this.geometry._storedFunction = () => (this.geometry._executionCount = 1, r.clone()); } } _dumpPropertiesCode() { return super._dumpPropertiesCode() + `${this._codeVariableName}.evaluateContext = ${this.evaluateContext ? "true" : "false"}; `; } /** * Serializes this block in a JSON representation * @returns the serialized block object */ serialize() { const e = super.serialize(); return e.evaluateContext = this.evaluateContext, e; } _deserialize(e) { super._deserialize(e), this.evaluateContext = e.evaluateContext; } } F([ ir("Evaluate context", $i.Boolean, "ADVANCED", { notifiers: { rebuild: !0 } }) ], OW.prototype, "evaluateContext", void 0); Be("BABYLON.GridBlock", OW); class wW extends Bs { /** * Create a new TorusBlock * @param name defines the block name */ constructor(e) { super(e), this.evaluateContext = !1, this.registerInput("diameter", We.Float, !0, 1), this.registerInput("thickness", We.Float, !0, 0.5), this.registerInput("tessellation", We.Int, !0, 16), this.registerOutput("geometry", We.Geometry); } /** * Gets the current class name * @returns the class name */ getClassName() { return "TorusBlock"; } /** * Gets the diameter input component */ get diameter() { return this._inputs[0]; } /** * Gets the thickness input component */ get thickness() { return this._inputs[1]; } /** * Gets the tessellation input component */ get tessellation() { return this._inputs[2]; } /** * Gets the geometry output component */ get geometry() { return this._outputs[0]; } autoConfigure() { if (!this.diameter.isConnected) { const e = new bc("Diameter"); e.value = 1, e.output.connectTo(this.diameter); } } _buildBlock(e) { const t = {}, i = (r) => (t.thickness = this.thickness.getConnectedValue(r), t.diameter = this.diameter.getConnectedValue(r), t.tessellation = this.tessellation.getConnectedValue(r), aU(t)); if (this.evaluateContext) this.geometry._storedFunction = i; else { const r = i(e); this.geometry._storedFunction = () => (this.geometry._executionCount = 1, r.clone()); } } _dumpPropertiesCode() { return super._dumpPropertiesCode() + `${this._codeVariableName}.evaluateContext = ${this.evaluateContext ? "true" : "false"}; `; } /** * Serializes this block in a JSON representation * @returns the serialized block object */ serialize() { const e = super.serialize(); return e.evaluateContext = this.evaluateContext, e; } _deserialize(e) { super._deserialize(e), this.evaluateContext = e.evaluateContext; } } F([ ir("Evaluate context", $i.Boolean, "ADVANCED", { notifiers: { rebuild: !0 } }) ], wW.prototype, "evaluateContext", void 0); Be("BABYLON.TorusBlock", wW); class LW extends Bs { /** * Create a new SphereBlock * @param name defines the block name */ constructor(e) { super(e), this.evaluateContext = !1, this.registerInput("height", We.Float, !0, 25), this.registerInput("diameter", We.Float, !0, 1), this.registerInput("diameterTop", We.Float, !0, -1), this.registerInput("diameterBottom", We.Float, !0, -1), this.registerInput("subdivisions", We.Int, !0, 1), this.registerInput("tessellation", We.Int, !0, 24), this.registerInput("arc", We.Float, !0, 1), this.registerOutput("geometry", We.Geometry); } /** * Gets the current class name * @returns the class name */ getClassName() { return "CylinderBlock"; } /** * Gets the height input component */ get height() { return this._inputs[0]; } /** * Gets the diameter input component */ get diameter() { return this._inputs[1]; } /** * Gets the diameterTop input component */ get diameterTop() { return this._inputs[2]; } /** * Gets the diameterBottom input component */ get diameterBottom() { return this._inputs[3]; } /** * Gets the subdivisions input component */ get subdivisions() { return this._inputs[4]; } /** * Gets the tessellation input component */ get tessellation() { return this._inputs[5]; } /** * Gets the arc input component */ get arc() { return this._inputs[6]; } /** * Gets the geometry output component */ get geometry() { return this._outputs[0]; } autoConfigure() { if (!this.diameter.isConnected) { const e = new bc("Diameter"); e.value = 1, e.output.connectTo(this.diameter); } if (!this.height.isConnected) { const e = new bc("Height"); e.value = 1, e.output.connectTo(this.height); } } _buildBlock(e) { const t = {}, i = (r) => (t.height = this.height.getConnectedValue(r), t.diameter = this.diameter.getConnectedValue(r), t.diameterTop = this.diameterTop.getConnectedValue(r), t.diameterBottom = this.diameterBottom.getConnectedValue(r), t.diameterTop === -1 && (t.diameterTop = t.diameter), t.diameterBottom === -1 && (t.diameterBottom = t.diameter), t.tessellation = this.tessellation.getConnectedValue(r), t.subdivisions = this.subdivisions.getConnectedValue(r), t.arc = this.arc.getConnectedValue(r), lU(t)); if (this.evaluateContext) this.geometry._storedFunction = i; else { const r = i(e); this.geometry._storedFunction = () => (this.geometry._executionCount = 1, r.clone()); } } _dumpPropertiesCode() { return super._dumpPropertiesCode() + `${this._codeVariableName}.evaluateContext = ${this.evaluateContext ? "true" : "false"}; `; } /** * Serializes this block in a JSON representation * @returns the serialized block object */ serialize() { const e = super.serialize(); return e.evaluateContext = this.evaluateContext, e; } _deserialize(e) { super._deserialize(e), this.evaluateContext = e.evaluateContext; } } F([ ir("Evaluate context", $i.Boolean, "ADVANCED", { notifiers: { rebuild: !0 } }) ], LW.prototype, "evaluateContext", void 0); Be("BABYLON.CylinderBlock", LW); class NW extends Bs { /** * Create a new CapsuleBlock * @param name defines the block name */ constructor(e) { super(e), this.evaluateContext = !1, this.registerInput("height", We.Float, !0, 1), this.registerInput("radius", We.Float, !0, 0.25), this.registerInput("tessellation", We.Int, !0, 16), this.registerInput("subdivisions", We.Int, !0, 2), this.registerOutput("geometry", We.Geometry); } /** * Gets the current class name * @returns the class name */ getClassName() { return "CapsuleBlock"; } /** * Gets the height input component */ get height() { return this._inputs[0]; } /** * Gets the radius input component */ get radius() { return this._inputs[1]; } /** * Gets the tessellation input component */ get tessellation() { return this._inputs[2]; } /** * Gets the subdivisions input component */ get subdivisions() { return this._inputs[3]; } /** * Gets the geometry output component */ get geometry() { return this._outputs[0]; } autoConfigure() { if (!this.height.isConnected) { const e = new bc("Height"); e.value = 1, e.output.connectTo(this.height); } if (!this.radius.isConnected) { const e = new bc("Radius"); e.value = 0.2, e.output.connectTo(this.radius); } } _buildBlock(e) { const t = {}, i = (r) => (t.height = this.height.getConnectedValue(r), t.radius = this.radius.getConnectedValue(r), t.tessellation = this.tessellation.getConnectedValue(r), t.subdivisions = this.subdivisions.getConnectedValue(r), hU(t)); if (this.evaluateContext) this.geometry._storedFunction = i; else { const r = i(e); this.geometry._storedFunction = () => (this.geometry._executionCount = 1, r.clone()); } } _dumpPropertiesCode() { return super._dumpPropertiesCode() + `${this._codeVariableName}.evaluateContext = ${this.evaluateContext ? "true" : "false"}; `; } /** * Serializes this block in a JSON representation * @returns the serialized block object */ serialize() { const e = super.serialize(); return e.evaluateContext = this.evaluateContext, e; } _deserialize(e) { super._deserialize(e), this.evaluateContext = e.evaluateContext; } } F([ ir("Evaluate context", $i.Boolean, "ADVANCED", { notifiers: { rebuild: !0 } }) ], NW.prototype, "evaluateContext", void 0); Be("BABYLON.CapsuleBlock", NW); class FW extends Bs { /** * Create a new DiscBlock * @param name defines the block name */ constructor(e) { super(e), this.evaluateContext = !1, this.registerInput("radius", We.Float, !0, 0.5), this.registerInput("tessellation", We.Int, !0, 64), this.registerInput("arc", We.Float, !0, 1), this.registerOutput("geometry", We.Geometry); } /** * Gets the current class name * @returns the class name */ getClassName() { return "DiscBlock"; } /** * Gets the radius input component */ get radius() { return this._inputs[0]; } /** * Gets the tessellation input component */ get tessellation() { return this._inputs[1]; } /** * Gets the arc input component */ get arc() { return this._inputs[2]; } /** * Gets the geometry output component */ get geometry() { return this._outputs[0]; } autoConfigure() { if (!this.radius.isConnected) { const e = new bc("Radius"); e.value = 0.2, e.output.connectTo(this.radius); } } _buildBlock(e) { const t = {}, i = (r) => (t.radius = this.radius.getConnectedValue(r), t.tessellation = this.tessellation.getConnectedValue(r), t.arc = this.arc.getConnectedValue(r), dU(t)); if (this.evaluateContext) this.geometry._storedFunction = i; else { const r = i(e); this.geometry._storedFunction = () => (this.geometry._executionCount = 1, r.clone()); } } _dumpPropertiesCode() { return super._dumpPropertiesCode() + `${this._codeVariableName}.evaluateContext = ${this.evaluateContext ? "true" : "false"}; `; } /** * Serializes this block in a JSON representation * @returns the serialized block object */ serialize() { const e = super.serialize(); return e.evaluateContext = this.evaluateContext, e; } _deserialize(e) { super._deserialize(e), this.evaluateContext = e.evaluateContext; } } F([ ir("Evaluate context", $i.Boolean, "ADVANCED", { notifiers: { rebuild: !0 } }) ], FW.prototype, "evaluateContext", void 0); Be("BABYLON.DiscBlock", FW); class Kse extends Bs { /** * Create a new NullBlock * @param name defines the block name */ constructor(e) { super(e), this.registerOutput("geometry", We.Geometry); } /** * Gets the current class name * @returns the class name */ getClassName() { return "NullBlock"; } /** * Gets the geometry output component */ get geometry() { return this._outputs[0]; } _buildBlock() { this.geometry._storedValue = null; } } Be("BABYLON.NullBlock", Kse); class BW extends Bs { /** * Create a new SetPositionsBlock * @param name defines the block name */ constructor(e) { super(e), this.evaluateContext = !0, this.registerInput("geometry", We.Geometry), this.registerInput("positions", We.Vector3), this.registerOutput("output", We.Geometry); } /** * Gets the current index in the current flow * @returns the current index */ getExecutionIndex() { return this._currentIndex; } /** * Gets the current loop index in the current flow * @returns the current loop index */ getExecutionLoopIndex() { return this._currentIndex; } /** * Gets the current face index in the current flow * @returns the current face index */ getExecutionFaceIndex() { return 0; } /** * Gets the current class name * @returns the class name */ getClassName() { return "SetPositionsBlock"; } /** * Gets the geometry input component */ get geometry() { return this._inputs[0]; } /** * Gets the positions input component */ get positions() { return this._inputs[1]; } /** * Gets the geometry output component */ get output() { return this._outputs[0]; } _buildBlock(e) { const t = (i) => { if (i.pushExecutionContext(this), this._vertexData = this.geometry.getConnectedValue(i), this._vertexData && (this._vertexData = this._vertexData.clone()), i.pushGeometryContext(this._vertexData), !this._vertexData || !this._vertexData.positions || !this.positions.isConnected) { i.restoreGeometryContext(), i.restoreExecutionContext(), this.output._storedValue = null; return; } const r = this._vertexData.positions.length / 3; for (this._currentIndex = 0; this._currentIndex < r; this._currentIndex++) { const s = this.positions.getConnectedValue(i); s && s.toArray(this._vertexData.positions, this._currentIndex * 3); } return i.restoreGeometryContext(), i.restoreExecutionContext(), this._vertexData; }; this.evaluateContext ? this.output._storedFunction = t : (this.output._storedFunction = null, this.output._storedValue = t(e)); } _dumpPropertiesCode() { return super._dumpPropertiesCode() + `${this._codeVariableName}.evaluateContext = ${this.evaluateContext ? "true" : "false"}; `; } /** * Serializes this block in a JSON representation * @returns the serialized block object */ serialize() { const e = super.serialize(); return e.evaluateContext = this.evaluateContext, e; } _deserialize(e) { super._deserialize(e), e.evaluateContext !== void 0 && (this.evaluateContext = e.evaluateContext); } } F([ ir("Evaluate context", $i.Boolean, "ADVANCED", { notifiers: { rebuild: !0 } }) ], BW.prototype, "evaluateContext", void 0); Be("BABYLON.SetPositionsBlock", BW); class UW extends Bs { /** * Create a new SetNormalsBlock * @param name defines the block name */ constructor(e) { super(e), this.evaluateContext = !0, this.registerInput("geometry", We.Geometry), this.registerInput("normals", We.Vector3), this.registerOutput("output", We.Geometry); } /** * Gets the current index in the current flow * @returns the current index */ getExecutionIndex() { return this._currentIndex; } /** * Gets the current loop index in the current flow * @returns the current loop index */ getExecutionLoopIndex() { return this._currentIndex; } /** * Gets the current face index in the current flow * @returns the current face index */ getExecutionFaceIndex() { return 0; } /** * Gets the current class name * @returns the class name */ getClassName() { return "SetNormalsBlock"; } /** * Gets the geometry input component */ get geometry() { return this._inputs[0]; } /** * Gets the normals input component */ get normals() { return this._inputs[1]; } /** * Gets the geometry output component */ get output() { return this._outputs[0]; } _buildBlock(e) { const t = (i) => { if (i.pushExecutionContext(this), this._vertexData = this.geometry.getConnectedValue(i), this._vertexData && (this._vertexData = this._vertexData.clone()), i.pushGeometryContext(this._vertexData), !this._vertexData || !this._vertexData.positions) { i.restoreGeometryContext(), i.restoreExecutionContext(), this.output._storedValue = null; return; } if (!this.normals.isConnected) { i.restoreGeometryContext(), i.restoreExecutionContext(), this.output._storedValue = this._vertexData; return; } this._vertexData.normals || (this._vertexData.normals = []); const r = this._vertexData.positions.length / 3; for (this._currentIndex = 0; this._currentIndex < r; this._currentIndex++) { const s = this.normals.getConnectedValue(i); s && s.toArray(this._vertexData.normals, this._currentIndex * 3); } return i.restoreGeometryContext(), i.restoreExecutionContext(), this._vertexData; }; this.evaluateContext ? this.output._storedFunction = t : (this.output._storedFunction = null, this.output._storedValue = t(e)); } _dumpPropertiesCode() { return super._dumpPropertiesCode() + `${this._codeVariableName}.evaluateContext = ${this.evaluateContext ? "true" : "false"}; `; } /** * Serializes this block in a JSON representation * @returns the serialized block object */ serialize() { const e = super.serialize(); return e.evaluateContext = this.evaluateContext, e; } _deserialize(e) { super._deserialize(e), e.evaluateContext !== void 0 && (this.evaluateContext = e.evaluateContext); } } F([ ir("Evaluate context", $i.Boolean, "ADVANCED", { notifiers: { rebuild: !0 } }) ], UW.prototype, "evaluateContext", void 0); Be("BABYLON.SetNormalsBlock", UW); class VU extends Bs { /** * Create a new SetUVsBlock * @param name defines the block name */ constructor(e) { super(e), this.evaluateContext = !0, this.textureCoordinateIndex = 0, this.registerInput("geometry", We.Geometry), this.registerInput("uvs", We.Vector2), this.registerOutput("output", We.Geometry); } /** * Gets the current index in the current flow * @returns the current index */ getExecutionIndex() { return this._currentIndex; } /** * Gets the current loop index in the current flow * @returns the current loop index */ getExecutionLoopIndex() { return this._currentIndex; } /** * Gets the current face index in the current flow * @returns the current face index */ getExecutionFaceIndex() { return 0; } /** * Gets the current class name * @returns the class name */ getClassName() { return "SetUVsBlock"; } /** * Gets the geometry input component */ get geometry() { return this._inputs[0]; } /** * Gets the uvs input component */ get uvs() { return this._inputs[1]; } /** * Gets the geometry output component */ get output() { return this._outputs[0]; } _buildBlock(e) { const t = (i) => { if (i.pushExecutionContext(this), this._vertexData = this.geometry.getConnectedValue(i), this._vertexData && (this._vertexData = this._vertexData.clone()), i.pushGeometryContext(this._vertexData), !this._vertexData || !this._vertexData.positions) { i.restoreGeometryContext(), i.restoreExecutionContext(), this.output._storedValue = null; return; } if (!this.uvs.isConnected) { i.restoreGeometryContext(), i.restoreExecutionContext(), this.output._storedValue = this._vertexData; return; } const r = [], s = this._vertexData.positions.length / 3; for (this._currentIndex = 0; this._currentIndex < s; this._currentIndex++) { const n = this.uvs.getConnectedValue(i); n && n.toArray(r, this._currentIndex * 2); } switch (this.textureCoordinateIndex) { case 0: this._vertexData.uvs = r; break; case 1: this._vertexData.uvs2 = r; break; case 2: this._vertexData.uvs3 = r; break; case 3: this._vertexData.uvs4 = r; break; case 4: this._vertexData.uvs5 = r; break; case 5: this._vertexData.uvs6 = r; break; } return i.restoreGeometryContext(), i.restoreExecutionContext(), this._vertexData; }; this.evaluateContext ? this.output._storedFunction = t : (this.output._storedFunction = null, this.output._storedValue = t(e)); } _dumpPropertiesCode() { let e = super._dumpPropertiesCode() + `${this._codeVariableName}.textureCoordinateIndex}; `; return e += `${this._codeVariableName}.evaluateContext = ${this.evaluateContext ? "true" : "false"}; `, e; } /** * Serializes this block in a JSON representation * @returns the serialized block object */ serialize() { const e = super.serialize(); return e.evaluateContext = this.evaluateContext, e.textureCoordinateIndex = this.textureCoordinateIndex, e; } _deserialize(e) { super._deserialize(e), this.textureCoordinateIndex = e.textureCoordinateIndex, e.evaluateContext !== void 0 && (this.evaluateContext = e.evaluateContext); } } F([ ir("Evaluate context", $i.Boolean, "ADVANCED", { notifiers: { rebuild: !0 } }) ], VU.prototype, "evaluateContext", void 0); F([ ir("Texture coordinates index", $i.List, "ADVANCED", { notifiers: { update: !0 }, options: [ { label: "UV1", value: 0 }, { label: "UV2", value: 1 }, { label: "UV3", value: 2 }, { label: "UV4", value: 3 }, { label: "UV5", value: 4 }, { label: "UV6", value: 5 } ] }) ], VU.prototype, "textureCoordinateIndex", void 0); Be("BABYLON.SetUVsBlock", VU); class VW extends Bs { /** * Create a new SetColorsBlock * @param name defines the block name */ constructor(e) { super(e), this.evaluateContext = !0, this.registerInput("geometry", We.Geometry), this.registerInput("colors", We.Vector4), this.registerOutput("output", We.Geometry); } /** * Gets the current index in the current flow * @returns the current index */ getExecutionIndex() { return this._currentIndex; } /** * Gets the current loop index in the current flow * @returns the current loop index */ getExecutionLoopIndex() { return this._currentIndex; } /** * Gets the current face index in the current flow * @returns the current face index */ getExecutionFaceIndex() { return 0; } /** * Gets the current class name * @returns the class name */ getClassName() { return "SetColorsBlock"; } /** * Gets the geometry input component */ get geometry() { return this._inputs[0]; } /** * Gets the colors input component */ get colors() { return this._inputs[1]; } /** * Gets the geometry output component */ get output() { return this._outputs[0]; } _buildBlock(e) { const t = (i) => { if (i.pushExecutionContext(this), this._vertexData = this.geometry.getConnectedValue(i), this._vertexData && (this._vertexData = this._vertexData.clone()), i.pushGeometryContext(this._vertexData), !this._vertexData || !this._vertexData.positions) { i.restoreGeometryContext(), i.restoreExecutionContext(), this.output._storedValue = null; return; } if (!this.colors.isConnected) { i.restoreGeometryContext(), i.restoreExecutionContext(), this.output._storedValue = this._vertexData; return; } this._vertexData.colors || (this._vertexData.colors = []); const r = this._vertexData.positions.length / 3; for (this._currentIndex = 0; this._currentIndex < r; this._currentIndex++) { const s = this.colors.getConnectedValue(i); s && s.toArray(this._vertexData.colors, this._currentIndex * 4); } return i.restoreGeometryContext(), i.restoreExecutionContext(), this._vertexData; }; this.evaluateContext ? this.output._storedFunction = t : (this.output._storedFunction = null, this.output._storedValue = t(e)); } _dumpPropertiesCode() { return super._dumpPropertiesCode() + `${this._codeVariableName}.evaluateContext = ${this.evaluateContext ? "true" : "false"}; `; } /** * Serializes this block in a JSON representation * @returns the serialized block object */ serialize() { const e = super.serialize(); return e.evaluateContext = this.evaluateContext, e; } _deserialize(e) { super._deserialize(e), e.evaluateContext !== void 0 && (this.evaluateContext = e.evaluateContext); } } F([ ir("Evaluate context", $i.Boolean, "ADVANCED", { notifiers: { rebuild: !0 } }) ], VW.prototype, "evaluateContext", void 0); Be("BABYLON.SetColorsBlock", VW); class kW extends Bs { /** * Create a new SetTangentsBlock * @param name defines the block name */ constructor(e) { super(e), this.evaluateContext = !0, this.registerInput("geometry", We.Geometry), this.registerInput("tangents", We.Vector4), this.registerOutput("output", We.Geometry); } /** * Gets the current index in the current flow * @returns the current index */ getExecutionIndex() { return this._currentIndex; } /** * Gets the current loop index in the current flow * @returns the current loop index */ getExecutionLoopIndex() { return this._currentIndex; } /** * Gets the current face index in the current flow * @returns the current face index */ getExecutionFaceIndex() { return 0; } /** * Gets the current class name * @returns the class name */ getClassName() { return "SetTangentsBlock"; } /** * Gets the geometry input component */ get geometry() { return this._inputs[0]; } /** * Gets the tangents input component */ get tangents() { return this._inputs[1]; } /** * Gets the geometry output component */ get output() { return this._outputs[0]; } _buildBlock(e) { const t = (i) => { if (i.pushExecutionContext(this), this._vertexData = this.geometry.getConnectedValue(i), this._vertexData && (this._vertexData = this._vertexData.clone()), i.pushGeometryContext(this._vertexData), !this._vertexData || !this._vertexData.positions) { i.restoreGeometryContext(), i.restoreExecutionContext(), this.output._storedValue = null; return; } if (!this.tangents.isConnected) { i.restoreGeometryContext(), i.restoreExecutionContext(), this.output._storedValue = this._vertexData; return; } this._vertexData.tangents || (this._vertexData.tangents = []); const r = this._vertexData.positions.length / 3; for (this._currentIndex = 0; this._currentIndex < r; this._currentIndex++) { const s = this.tangents.getConnectedValue(i); s && s.toArray(this._vertexData.tangents, this._currentIndex * 4); } return i.restoreGeometryContext(), i.restoreExecutionContext(), this._vertexData; }; this.evaluateContext ? this.output._storedFunction = t : (this.output._storedFunction = null, this.output._storedValue = t(e)); } _dumpPropertiesCode() { return super._dumpPropertiesCode() + `${this._codeVariableName}.evaluateContext = ${this.evaluateContext ? "true" : "false"}; `; } /** * Serializes this block in a JSON representation * @returns the serialized block object */ serialize() { const e = super.serialize(); return e.evaluateContext = this.evaluateContext, e; } _deserialize(e) { super._deserialize(e), e.evaluateContext !== void 0 && (this.evaluateContext = e.evaluateContext); } } F([ ir("Evaluate context", $i.Boolean, "ADVANCED", { notifiers: { rebuild: !0 } }) ], kW.prototype, "evaluateContext", void 0); Be("BABYLON.SetTangentsBlock", kW); var wp; (function(c) { c[c.Add = 0] = "Add", c[c.Subtract = 1] = "Subtract", c[c.Multiply = 2] = "Multiply", c[c.Divide = 3] = "Divide", c[c.Max = 4] = "Max", c[c.Min = 5] = "Min"; })(wp || (wp = {})); class zW extends Bs { /** * Create a new MathBlock * @param name defines the block name */ constructor(e) { super(e), this.operation = wp.Add, this.registerInput("left", We.AutoDetect), this.registerInput("right", We.AutoDetect), this.registerOutput("output", We.BasedOnInput), this._outputs[0]._typeConnectionSource = this._inputs[0], this._inputs[0].excludedConnectionPointTypes.push(We.Matrix), this._inputs[0].excludedConnectionPointTypes.push(We.Geometry), this._inputs[0].excludedConnectionPointTypes.push(We.Texture), this._inputs[1].excludedConnectionPointTypes.push(We.Matrix), this._inputs[1].excludedConnectionPointTypes.push(We.Geometry), this._inputs[1].excludedConnectionPointTypes.push(We.Texture), this._inputs[1].acceptedConnectionPointTypes.push(We.Float), this._linkConnectionTypes(0, 1); } /** * Gets the current class name * @returns the class name */ getClassName() { return "MathBlock"; } /** * Gets the left input component */ get left() { return this._inputs[0]; } /** * Gets the right input component */ get right() { return this._inputs[1]; } /** * Gets the geometry output component */ get output() { return this._outputs[0]; } _buildBlock() { let e; const t = this.left, i = this.right; if (!t.isConnected || !i.isConnected) { this.output._storedFunction = null, this.output._storedValue = null; return; } const r = t.type === We.Float || t.type === We.Int; switch (this.operation) { case wp.Add: { r ? e = (s) => t.getConnectedValue(s) + i.getConnectedValue(s) : e = (s) => t.getConnectedValue(s).add(s.adapt(i, t.type)); break; } case wp.Subtract: { r ? e = (s) => t.getConnectedValue(s) - i.getConnectedValue(s) : e = (s) => t.getConnectedValue(s).subtract(s.adapt(i, t.type)); break; } case wp.Multiply: { r ? e = (s) => t.getConnectedValue(s) * i.getConnectedValue(s) : e = (s) => t.getConnectedValue(s).multiply(s.adapt(i, t.type)); break; } case wp.Divide: { r ? e = (s) => t.getConnectedValue(s) / i.getConnectedValue(s) : e = (s) => t.getConnectedValue(s).divide(s.adapt(i, t.type)); break; } case wp.Min: { if (r) e = (s) => Math.min(t.getConnectedValue(s), i.getConnectedValue(s)); else switch (t.type) { case We.Vector2: { e = (s) => at.Minimize(t.getConnectedValue(s), s.adapt(i, t.type)); break; } case We.Vector3: { e = (s) => D.Minimize(t.getConnectedValue(s), s.adapt(i, t.type)); break; } case We.Vector4: { e = (s) => Di.Minimize(t.getConnectedValue(s), s.adapt(i, t.type)); break; } } break; } case wp.Max: if (r) e = (s) => Math.max(t.getConnectedValue(s), i.getConnectedValue(s)); else { switch (t.type) { case We.Vector2: { e = (s) => at.Maximize(t.getConnectedValue(s), s.adapt(i, t.type)); break; } case We.Vector3: { e = (s) => D.Maximize(t.getConnectedValue(s), s.adapt(i, t.type)); break; } case We.Vector4: { e = (s) => Di.Maximize(t.getConnectedValue(s), s.adapt(i, t.type)); break; } } break; } } this.output._storedFunction = (s) => t.type === We.Int ? e(s) | 0 : e(s); } _dumpPropertiesCode() { return super._dumpPropertiesCode() + `${this._codeVariableName}.operation = BABYLON.MathBlockOperations.${wp[this.operation]}; `; } /** * Serializes this block in a JSON representation * @returns the serialized block object */ serialize() { const e = super.serialize(); return e.operation = this.operation, e; } _deserialize(e) { super._deserialize(e), this.operation = e.operation; } } F([ ir("Operation", $i.List, "ADVANCED", { notifiers: { rebuild: !0 }, options: [ { label: "Add", value: wp.Add }, { label: "Subtract", value: wp.Subtract }, { label: "Multiply", value: wp.Multiply }, { label: "Divide", value: wp.Divide }, { label: "Max", value: wp.Max }, { label: "Min", value: wp.Min } ] }) ], zW.prototype, "operation", void 0); Be("BABYLON.MathBlock", zW); class Wse extends Bs { /** * Create a new MapRangeBlock * @param name defines the block name */ constructor(e) { super(e), this.registerInput("value", We.AutoDetect), this.registerInput("fromMin", We.Float, !0, 0), this.registerInput("fromMax", We.Float, !0, 1), this.registerInput("toMin", We.Float, !0, 0), this.registerInput("toMax", We.Float, !0, 1), this.registerOutput("output", We.BasedOnInput), this._inputs[0].excludedConnectionPointTypes.push(We.Vector2), this._inputs[0].excludedConnectionPointTypes.push(We.Vector3), this._inputs[0].excludedConnectionPointTypes.push(We.Vector4), this._inputs[0].excludedConnectionPointTypes.push(We.Matrix), this._inputs[0].excludedConnectionPointTypes.push(We.Geometry), this._inputs[0].excludedConnectionPointTypes.push(We.Texture), this._outputs[0]._typeConnectionSource = this._inputs[0]; } /** * Gets the current class name * @returns the class name */ getClassName() { return "MapRangeBlock"; } /** * Gets the value input component */ get value() { return this._inputs[0]; } /** * Gets the fromMin input component */ get fromMin() { return this._inputs[1]; } /** * Gets the fromMax input component */ get fromMax() { return this._inputs[2]; } /** * Gets the toMin input component */ get toMin() { return this._inputs[3]; } /** * Gets the toMax input component */ get toMax() { return this._inputs[4]; } /** * Gets the output component */ get output() { return this._outputs[0]; } _buildBlock() { if (!this.value.isConnected) { this.output._storedFunction = null, this.output._storedValue = null; return; } this.output._storedFunction = (e) => { const t = this.value.getConnectedValue(e), i = this.fromMin.getConnectedValue(e), r = this.fromMax.getConnectedValue(e), s = this.toMin.getConnectedValue(e), n = this.toMax.getConnectedValue(e), a = (t - i) / (r - i) * (n - s) + s; return this.output.type === We.Int ? Math.floor(a) : a; }; } } Be("BABYLON.MapRangeBlock", Wse); var Bu; (function(c) { c[c.Equal = 0] = "Equal", c[c.NotEqual = 1] = "NotEqual", c[c.LessThan = 2] = "LessThan", c[c.GreaterThan = 3] = "GreaterThan", c[c.LessOrEqual = 4] = "LessOrEqual", c[c.GreaterOrEqual = 5] = "GreaterOrEqual", c[c.Xor = 6] = "Xor", c[c.Or = 7] = "Or", c[c.And = 8] = "And"; })(Bu || (Bu = {})); class HW extends Bs { /** * Create a new ConditionBlock * @param name defines the block name */ constructor(e) { super(e), this.test = Bu.Equal, this.registerInput("left", We.Float), this.registerInput("right", We.Float, !0, 0), this.registerInput("ifTrue", We.AutoDetect, !0, 1), this.registerInput("ifFalse", We.AutoDetect, !0, 0), this.registerOutput("output", We.BasedOnInput), this._outputs[0]._typeConnectionSource = this._inputs[2], this._outputs[0]._defaultConnectionPointType = We.Float, this._inputs[0].acceptedConnectionPointTypes.push(We.Int), this._inputs[1].acceptedConnectionPointTypes.push(We.Int), this._linkConnectionTypes(2, 3); } /** * Gets the current class name * @returns the class name */ getClassName() { return "ConditionBlock"; } /** * Gets the left input component */ get left() { return this._inputs[0]; } /** * Gets the right input component */ get right() { return this._inputs[1]; } /** * Gets the ifTrue input component */ get ifTrue() { return this._inputs[2]; } /** * Gets the ifFalse input component */ get ifFalse() { return this._inputs[3]; } /** * Gets the output component */ get output() { return this._outputs[0]; } _buildBlock() { if (!this.left.isConnected) { this.output._storedFunction = null, this.output._storedValue = null; return; } const e = (t) => { const i = this.left.getConnectedValue(t), r = this.right.getConnectedValue(t); let s = !1; switch (this.test) { case Bu.Equal: s = yt.WithinEpsilon(i, r, Sr); break; case Bu.NotEqual: s = i !== r; break; case Bu.LessThan: s = i < r; break; case Bu.GreaterThan: s = i > r; break; case Bu.LessOrEqual: s = i <= r; break; case Bu.GreaterOrEqual: s = i >= r; break; case Bu.Xor: s = !!i && !r || !i && !!r; break; case Bu.Or: s = !!i || !!r; break; case Bu.And: s = !!i && !!r; break; } return s; }; this.output._storedFunction = (t) => e(t) ? this.ifTrue.getConnectedValue(t) : this.ifFalse.getConnectedValue(t); } _dumpPropertiesCode() { return super._dumpPropertiesCode() + `${this._codeVariableName}.test = BABYLON.ConditionBlockTests.${Bu[this.test]}; `; } /** * Serializes this block in a JSON representation * @returns the serialized block object */ serialize() { const e = super.serialize(); return e.test = this.test, e; } _deserialize(e) { super._deserialize(e), this.test = e.test; } } F([ ir("Test", $i.List, "ADVANCED", { notifiers: { rebuild: !0 }, options: [ { label: "Equal", value: Bu.Equal }, { label: "NotEqual", value: Bu.NotEqual }, { label: "LessThan", value: Bu.LessThan }, { label: "GreaterThan", value: Bu.GreaterThan }, { label: "LessOrEqual", value: Bu.LessOrEqual }, { label: "GreaterOrEqual", value: Bu.GreaterOrEqual }, { label: "Xor", value: Bu.Xor }, { label: "Or", value: Bu.Or }, { label: "And", value: Bu.And } ] }) ], HW.prototype, "test", void 0); Be("BABYLON.ConditionBlock", HW); var R4; (function(c) { c[c.None = 0] = "None", c[c.LoopID = 1] = "LoopID", c[c.InstanceID = 2] = "InstanceID"; })(R4 || (R4 = {})); class GW extends Bs { /** * Create a new RandomBlock * @param name defines the block name */ constructor(e) { super(e), this._currentLockId = -1, this.lockMode = R4.None, this.registerInput("min", We.AutoDetect), this.registerInput("max", We.AutoDetect), this.registerOutput("output", We.BasedOnInput), this._inputs[0].excludedConnectionPointTypes.push(We.Matrix), this._inputs[0].excludedConnectionPointTypes.push(We.Geometry), this._inputs[0].excludedConnectionPointTypes.push(We.Texture), this._inputs[1].excludedConnectionPointTypes.push(We.Matrix), this._inputs[1].excludedConnectionPointTypes.push(We.Geometry), this._inputs[1].excludedConnectionPointTypes.push(We.Texture), this._outputs[0]._typeConnectionSource = this._inputs[0], this._linkConnectionTypes(0, 1); } /** * Gets the current class name * @returns the class name */ getClassName() { return "RandomBlock"; } /** * Gets the min input component */ get min() { return this._inputs[0]; } /** * Gets the max input component */ get max() { return this._inputs[1]; } /** * Gets the geometry output component */ get output() { return this._outputs[0]; } autoConfigure() { if (!this.min.isConnected) { const e = new bc("Min"); e.value = 0, e.output.connectTo(this.min); } if (!this.max.isConnected) { const e = new bc("Max"); e.value = 1, e.output.connectTo(this.max); } } _buildBlock() { let e = null; switch (this._currentLockId = -1, this.min.type) { case We.Int: case We.Float: { e = (t) => { const i = this.min.getConnectedValue(t) || 0, r = this.max.getConnectedValue(t) || 0; return i + Math.random() * (r - i); }; break; } case We.Vector2: { e = (t) => { const i = this.min.getConnectedValue(t) || at.Zero(), r = this.max.getConnectedValue(t) || at.Zero(); return new at(i.x + Math.random() * (r.x - i.x), i.y + Math.random() * (r.y - i.y)); }; break; } case We.Vector3: { e = (t) => { const i = this.min.getConnectedValue(t) || D.Zero(), r = this.max.getConnectedValue(t) || D.Zero(); return new D(i.x + Math.random() * (r.x - i.x), i.y + Math.random() * (r.y - i.y), i.z + Math.random() * (r.z - i.z)); }; break; } case We.Vector4: { e = (t) => { const i = this.min.getConnectedValue(t) || Di.Zero(), r = this.max.getConnectedValue(t) || Di.Zero(); return new Di(i.x + Math.random() * (r.x - i.x), i.y + Math.random() * (r.y - i.y), i.z + Math.random() * (r.z - i.z), i.w + Math.random() * (r.w - i.w)); }; break; } } this.lockMode === R4.None || !e ? this.output._storedFunction = e : this.output._storedFunction = (t) => { let i = 0; switch (this.lockMode) { case R4.InstanceID: i = t.getContextualValue(Rn.InstanceID, !0) || 0; break; case R4.LoopID: i = t.getContextualValue(Rn.LoopID, !0) || 0; break; } return (this._currentLockId !== i || this.lockMode === R4.None) && (this._currentLockId = i, this.output._storedValue = e(t)), this.output._storedValue; }; } _dumpPropertiesCode() { return super._dumpPropertiesCode() + `${this._codeVariableName}.lockMode = BABYLON.RandomBlockLocks.${R4[this.lockMode]}; `; } /** * Serializes this block in a JSON representation * @returns the serialized block object */ serialize() { const e = super.serialize(); return e.lockMode = this.lockMode, e; } _deserialize(e) { super._deserialize(e), this.lockMode = e.lockMode; } } F([ ir("LockMode", $i.List, "ADVANCED", { notifiers: { rebuild: !0 }, options: [ { label: "None", value: R4.None }, { label: "LoopID", value: R4.LoopID }, { label: "InstanceID", value: R4.InstanceID } ] }) ], GW.prototype, "lockMode", void 0); Be("BABYLON.RandomBlock", GW); class jse extends Bs { /** * Create a new NoiseBlock * @param name defines the block name */ constructor(e) { super(e), this.registerInput("offset", We.Vector3, !0, D.Zero()), this.registerInput("scale", We.Float, !0, 1), this.registerInput("octaves", We.Float, !0, 2, 0, 16), this.registerInput("roughness", We.Float, !0, 0.5, 0, 1), this.registerOutput("output", We.Float); } /** * Gets the current class name * @returns the class name */ getClassName() { return "NoiseBlock"; } /** * Gets the offset input component */ get offset() { return this._inputs[0]; } /** * Gets the scale input component */ get scale() { return this._inputs[1]; } /** * Gets the octaves input component */ get octaves() { return this._inputs[2]; } /** * Gets the roughtness input component */ get roughness() { return this._inputs[3]; } /** * Gets the geometry output component */ get output() { return this._outputs[0]; } _negateIf(e, t) { return t !== 0 ? -e : e; } _noiseGrad(e, t, i, r) { const s = e & 15, n = s < 8 ? t : i, a = s === 12 || s == 14 ? t : r, l = s < 4 ? i : a; return this._negateIf(n, s & n) + this._negateIf(l, s & 2); } _fade(e) { return e * e * e * (e * (e * 6 - 15) + 10); } _hashBitRotate(e, t) { return e << t | e >> 32 - t; } _hash(e, t, i) { let r, s, n; return r = s = n = 3735928584, n += i, s += t, r += e, n ^= s, n -= this._hashBitRotate(s, 14), r ^= n, r -= this._hashBitRotate(n, 11), s ^= r, s -= this._hashBitRotate(r, 25), n ^= s, n -= this._hashBitRotate(s, 16), r ^= n, r -= this._hashBitRotate(n, 4), s ^= r, s -= this._hashBitRotate(r, 14), n ^= s, n -= this._hashBitRotate(s, 24), n; } _mix(e, t, i, r, s, n, a, l, o, u, h) { const d = 1 - o, f = 1 - u; return (1 - h) * (f * (e * d + t * o) + u * (i * d + r * o)) + h * (f * (s * d + n * o) + u * (a * d + l * o)); } _perlinNoise(e) { const t = (e.x | 0) - (e.x < 0 ? 1 : 0), i = (e.y | 0) - (e.y < 0 ? 1 : 0), r = (e.z | 0) - (e.z < 0 ? 1 : 0), s = e.x - t, n = e.y - i, a = e.z - r, l = this._fade(s), o = this._fade(n), u = this._fade(a); return this._mix(this._noiseGrad(this._hash(t, i, r), s, n, a), this._noiseGrad(this._hash(t + 1, i, r), s - 1, n, a), this._noiseGrad(this._hash(t, i + 1, r), s, n - 1, a), this._noiseGrad(this._hash(t + 1, i + 1, r), s - 1, n - 1, a), this._noiseGrad(this._hash(t, i, r + 1), s, n, a - 1), this._noiseGrad(this._hash(t + 1, i, r + 1), s - 1, n, a - 1), this._noiseGrad(this._hash(t, i + 1, r + 1), s, n - 1, a - 1), this._noiseGrad(this._hash(t + 1, i + 1, r + 1), s - 1, n - 1, a - 1), l, o, u); } _perlinSigned(e) { return this._perlinNoise(e) * 0.982; } _perlin(e) { return this._perlinSigned(e) / 2 + 0.5; } /** * Gets a perlin noise value * @param octaves * @param roughness * @param position * @returns a value between 0 and 1 * @see Based on https://github.com/blender/blender/blob/main/source/blender/blenlib/intern/noise.cc#L533 */ noise(e, t, i, r, s) { const n = new D(i.x * s + r.x, i.y * s + r.y, i.z * s + r.z); let a = 1, l = 1, o = 0, u = 0; e = yt.Clamp(e, 0, 15); const h = e | 0; for (let m = 0; m <= h; m++) { const _ = this._perlin(n.scale(a)); u += _ * l, o += l, l *= yt.Clamp(t, 0, 1), a *= 2; } const d = e - Math.floor(e); if (d == 0) return u / o; const f = this._perlin(n.scale(a)); let p = u + f * l; return u /= o, p /= o + l, (1 - d) * u + d * p; } _buildBlock() { this.output._storedFunction = (e) => { const t = e.getContextualValue(Rn.Positions), i = this.octaves.getConnectedValue(e), r = this.roughness.getConnectedValue(e), s = this.offset.getConnectedValue(e), n = this.scale.getConnectedValue(e); return this.noise(i, r, t, s, n); }; } } Be("BABYLON.NoiseBlock", jse); class KW extends Bs { /** * Create a new MergeGeometryBlock * @param name defines the block name */ constructor(e) { super(e), this.evaluateContext = !1, this.registerInput("geometry0", We.Geometry), this.registerInput("geometry1", We.Geometry, !0), this.registerInput("geometry2", We.Geometry, !0), this.registerInput("geometry3", We.Geometry, !0), this.registerInput("geometry4", We.Geometry, !0), this.registerOutput("output", We.Geometry); } /** * Gets the current class name * @returns the class name */ getClassName() { return "MergeGeometryBlock"; } /** * Gets the geometry0 input component */ get geometry0() { return this._inputs[0]; } /** * Gets the geometry1 input component */ get geometry1() { return this._inputs[1]; } /** * Gets the geometry2 input component */ get geometry2() { return this._inputs[2]; } /** * Gets the geometry3 input component */ get geometry3() { return this._inputs[3]; } /** * Gets the geometry4 input component */ get geometry4() { return this._inputs[4]; } /** * Gets the geometry output component */ get output() { return this._outputs[0]; } _buildBlock(e) { const t = (i) => { let r = this.geometry0.getConnectedValue(i); const s = []; if (r) r = r.clone(); else return null; if (this.geometry1.isConnected) { const n = this.geometry1.getConnectedValue(i); n && s.push(n); } if (this.geometry2.isConnected) { const n = this.geometry2.getConnectedValue(i); n && s.push(n); } if (this.geometry3.isConnected) { const n = this.geometry3.getConnectedValue(i); n && s.push(n); } if (this.geometry4.isConnected) { const n = this.geometry4.getConnectedValue(i); n && s.push(n); } return s.length && r && (r = r.merge(s, !0, !1, !0, !0)), r; }; this.evaluateContext ? this.output._storedFunction = t : (this.output._storedFunction = null, this.output._storedValue = t(e)); } _dumpPropertiesCode() { return super._dumpPropertiesCode() + `${this._codeVariableName}.evaluateContext = ${this.evaluateContext ? "true" : "false"}; `; } /** * Serializes this block in a JSON representation * @returns the serialized block object */ serialize() { const e = super.serialize(); return e.evaluateContext = this.evaluateContext, e; } _deserialize(e) { super._deserialize(e), this.evaluateContext = e.evaluateContext; } } F([ ir("Evaluate context", $i.Boolean, "ADVANCED", { notifiers: { rebuild: !0 } }) ], KW.prototype, "evaluateContext", void 0); Be("BABYLON.MergeGeometryBlock", KW); class WW extends Bs { /** * Create a new GeometryCollectionBlock * @param name defines the block name */ constructor(e) { super(e), this.evaluateContext = !0, this.registerInput("geometry0", We.Geometry, !0), this.registerInput("geometry1", We.Geometry, !0), this.registerInput("geometry2", We.Geometry, !0), this.registerInput("geometry3", We.Geometry, !0), this.registerInput("geometry4", We.Geometry, !0), this.registerInput("geometry5", We.Geometry, !0), this.registerInput("geometry6", We.Geometry, !0), this.registerInput("geometry7", We.Geometry, !0), this.registerInput("geometry8", We.Geometry, !0), this.registerInput("geometry9", We.Geometry, !0), this.registerOutput("output", We.Geometry), this._outputs[0]._typeConnectionSource = this._inputs[0], this._linkConnectionTypes(0, 1); } /** * Gets the current class name * @returns the class name */ getClassName() { return "GeometryCollectionBlock"; } /** * Gets the geometry0 input component */ get geometry0() { return this._inputs[0]; } /** * Gets the geometry1 input component */ get geometry1() { return this._inputs[1]; } /** * Gets the geometry2 input component */ get geometry2() { return this._inputs[2]; } /** * Gets the geometry3 input component */ get geometry3() { return this._inputs[3]; } /** * Gets the geometry4 input component */ get geometry4() { return this._inputs[4]; } /** * Gets the geometry5 input component */ get geometry5() { return this._inputs[5]; } /** * Gets the geometry6 input component */ get geometry6() { return this._inputs[6]; } /** * Gets the geometry7 input component */ get geometry7() { return this._inputs[7]; } /** * Gets the geometry8 input component */ get geometry8() { return this._inputs[8]; } /** * Gets the geometry9 input component */ get geometry9() { return this._inputs[9]; } /** * Gets the geometry output component */ get output() { return this._outputs[0]; } _storeGeometry(e, t, i, r) { if (e.isConnected) { const s = e.getConnectedValue(t); if (!s) return; s.metadata = s.metadata || {}, s.metadata.collectionId = i, r.push(s); } } _buildBlock(e) { const t = (i) => { const r = []; return this._storeGeometry(this.geometry0, i, 0, r), this._storeGeometry(this.geometry1, i, 1, r), this._storeGeometry(this.geometry2, i, 2, r), this._storeGeometry(this.geometry3, i, 3, r), this._storeGeometry(this.geometry4, i, 4, r), this._storeGeometry(this.geometry5, i, 5, r), this._storeGeometry(this.geometry6, i, 6, r), this._storeGeometry(this.geometry7, i, 7, r), this._storeGeometry(this.geometry8, i, 8, r), this._storeGeometry(this.geometry9, i, 9, r), r.length ? r[Math.round(Math.random() * (r.length - 1))] : null; }; this.evaluateContext ? this.output._storedFunction = t : (this.output._storedFunction = null, this.output._storedValue = t(e)); } _dumpPropertiesCode() { return super._dumpPropertiesCode() + `${this._codeVariableName}.evaluateContext = ${this.evaluateContext ? "true" : "false"}; `; } /** * Serializes this block in a JSON representation * @returns the serialized block object */ serialize() { const e = super.serialize(); return e.evaluateContext = this.evaluateContext, e; } _deserialize(e) { super._deserialize(e), this.evaluateContext = e.evaluateContext; } } F([ ir("Evaluate context", $i.Boolean, "ADVANCED", { notifiers: { rebuild: !0 } }) ], WW.prototype, "evaluateContext", void 0); Be("BABYLON.GeometryCollectionBlock", WW); class Xse extends Bs { /** * Creates a new GeometryElbowBlock * @param name defines the block name */ constructor(e) { super(e), this.registerInput("input", We.AutoDetect), this.registerOutput("output", We.BasedOnInput), this._outputs[0]._typeConnectionSource = this._inputs[0]; } /** * Gets the time spent to build this block (in ms) */ get buildExecutionTime() { return 0; } /** * Gets the current class name * @returns the class name */ getClassName() { return "GeometryElbowBlock"; } /** * Gets the input component */ get input() { return this._inputs[0]; } /** * Gets the output component */ get output() { return this._outputs[0]; } _buildBlock(e) { super._buildBlock(e); const t = this._outputs[0], i = this._inputs[0]; t._storedFunction = (r) => i.getConnectedValue(r); } } Be("BABYLON.GeometryElbowBlock", Xse); class Yse extends Bs { /** * Creates a new ComputeNormalsBlock * @param name defines the block name */ constructor(e) { super(e), this.registerInput("geometry", We.Geometry), this.registerOutput("output", We.Geometry); } /** * Gets the current class name * @returns the class name */ getClassName() { return "ComputeNormalsBlock"; } /** * Gets the geometry component */ get geometry() { return this._inputs[0]; } /** * Gets the output component */ get output() { return this._outputs[0]; } _buildBlock() { this.output._storedFunction = (e) => { if (!this.geometry.isConnected) return null; const t = this.geometry.getConnectedValue(e); return t.normals || (t.normals = []), Ot.ComputeNormals(t.positions, t.indices, t.normals), t; }; } } Be("BABYLON.ComputeNormalsBlock", Yse); class Qse extends Bs { /** * Create a new VectorConverterBlock * @param name defines the block name */ constructor(e) { super(e), this.registerInput("xyzw ", We.Vector4, !0), this.registerInput("xyz ", We.Vector3, !0), this.registerInput("xy ", We.Vector2, !0), this.registerInput("zw ", We.Vector2, !0), this.registerInput("x ", We.Float, !0), this.registerInput("y ", We.Float, !0), this.registerInput("z ", We.Float, !0), this.registerInput("w ", We.Float, !0), this.registerOutput("xyzw", We.Vector4), this.registerOutput("xyz", We.Vector3), this.registerOutput("xy", We.Vector2), this.registerOutput("zw", We.Vector2), this.registerOutput("x", We.Float), this.registerOutput("y", We.Float), this.registerOutput("z", We.Float), this.registerOutput("w", We.Float); } /** * Gets the current class name * @returns the class name */ getClassName() { return "VectorConverterBlock"; } /** * Gets the xyzw component (input) */ get xyzwIn() { return this._inputs[0]; } /** * Gets the xyz component (input) */ get xyzIn() { return this._inputs[1]; } /** * Gets the xy component (input) */ get xyIn() { return this._inputs[2]; } /** * Gets the zw component (input) */ get zwIn() { return this._inputs[3]; } /** * Gets the x component (input) */ get xIn() { return this._inputs[4]; } /** * Gets the y component (input) */ get yIn() { return this._inputs[5]; } /** * Gets the z component (input) */ get zIn() { return this._inputs[6]; } /** * Gets the w component (input) */ get wIn() { return this._inputs[7]; } /** * Gets the xyzw component (output) */ get xyzwOut() { return this._outputs[0]; } /** * Gets the xyz component (output) */ get xyzOut() { return this._outputs[1]; } /** * Gets the xy component (output) */ get xyOut() { return this._outputs[2]; } /** * Gets the zw component (output) */ get zwOut() { return this._outputs[3]; } /** * Gets the x component (output) */ get xOut() { return this._outputs[4]; } /** * Gets the y component (output) */ get yOut() { return this._outputs[5]; } /** * Gets the z component (output) */ get zOut() { return this._outputs[6]; } /** * Gets the w component (output) */ get wOut() { return this._outputs[7]; } _inputRename(e) { return e === "xyzw " ? "xyzwIn" : e === "xyz " ? "xyzIn" : e === "xy " ? "xyIn" : e === "zw " ? "zwIn" : e === "x " ? "xIn" : e === "y " ? "yIn" : e === "z " ? "zIn" : e === "w " ? "wIn" : e; } _outputRename(e) { switch (e) { case "x": return "xOut"; case "y": return "yOut"; case "z": return "zOut"; case "w": return "wOut"; case "xy": return "xyOut"; case "zw": return "zwOut"; case "xyz": return "xyzOut"; case "xyzw": return "xyzwOut"; default: return e; } } _buildBlock(e) { super._buildBlock(e); const t = this.xIn, i = this.yIn, r = this.zIn, s = this.wIn, n = this.xyIn, a = this.zwIn, l = this.xyzIn, o = this.xyzwIn, u = this.xyzwOut, h = this.xyzOut, d = this.xyOut, f = this.zwOut, p = this.xOut, m = this.yOut, _ = this.zOut, v = this.wOut, C = (x) => { if (o.isConnected) return o.getConnectedValue(x); let b = 0, S = 0, M = 0, R = 0; if (t.isConnected && (b = t.getConnectedValue(x)), i.isConnected && (S = i.getConnectedValue(x)), r.isConnected && (M = r.getConnectedValue(x)), s.isConnected && (R = s.getConnectedValue(x)), n.isConnected) { const w = n.getConnectedValue(x); w && (b = w.x, S = w.y); } if (a.isConnected) { const w = a.getConnectedValue(x); w && (M = w.x, R = w.y); } if (l.isConnected) { const w = l.getConnectedValue(x); w && (b = w.x, S = w.y, M = w.z); } return new Di(b, S, M, R); }; u._storedFunction = (x) => C(x), h._storedFunction = (x) => { const b = C(x); return new D(b.x, b.y, b.z); }, d._storedFunction = (x) => { const b = C(x); return new at(b.x, b.y); }, f._storedFunction = (x) => { const b = C(x); return new at(b.z, b.w); }, p._storedFunction = (x) => C(x).x, m._storedFunction = (x) => C(x).y, _._storedFunction = (x) => C(x).z, v._storedFunction = (x) => C(x).w; } } Be("BABYLON.VectorConverterBlock", Qse); class $se extends Bs { /** * Creates a new NormalizeVectorBlock * @param name defines the block name */ constructor(e) { super(e), this.registerInput("input", We.AutoDetect), this.registerOutput("output", We.BasedOnInput), this._inputs[0].excludedConnectionPointTypes.push(We.Float), this._inputs[0].excludedConnectionPointTypes.push(We.Matrix), this._inputs[0].excludedConnectionPointTypes.push(We.Geometry), this._inputs[0].excludedConnectionPointTypes.push(We.Texture), this._outputs[0]._typeConnectionSource = this._inputs[0]; } /** * Gets the current class name * @returns the class name */ getClassName() { return "NormalizeVectorBlock"; } /** * Gets the input component */ get input() { return this._inputs[0]; } /** * Gets the output component */ get output() { return this._outputs[0]; } _buildBlock(e) { if (super._buildBlock(e), this.output._storedFunction = null, !this.input.isConnected) { this.output._storedValue = null; return; } this.output._storedFunction = (t) => this.input.getConnectedValue(t).normalize(); } } Be("BABYLON.NormalizeVectorBlock", $se); class jW extends Bs { /** * Create a new SetMaterialIDBlock * @param name defines the block name */ constructor(e) { super(e), this.evaluateContext = !0, this.registerInput("geometry", We.Geometry), this.registerInput("id", We.Int, !0, 0), this.registerOutput("output", We.Geometry), this.id.acceptedConnectionPointTypes.push(We.Float); } /** * Gets the current class name * @returns the class name */ getClassName() { return "SetMaterialIDBlock"; } /** * Gets the geometry input component */ get geometry() { return this._inputs[0]; } /** * Gets the id input component */ get id() { return this._inputs[1]; } /** * Gets the geometry output component */ get output() { return this._outputs[0]; } _buildBlock(e) { if (!this.geometry.isConnected) { this.output._storedFunction = null, this.output._storedValue = null; return; } const t = (i) => { const r = this.geometry.getConnectedValue(i); if (!r || !r.indices || !r.positions) return r; const s = new H9(); return s.materialIndex = this.id.getConnectedValue(i) | 0, s.indexStart = 0, s.indexCount = r.indices.length, s.verticesStart = 0, s.verticesCount = r.positions.length / 3, r.materialInfos = [s], r; }; this.evaluateContext ? this.output._storedFunction = t : (this.output._storedFunction = null, this.output._storedValue = t(e)); } _dumpPropertiesCode() { return super._dumpPropertiesCode() + `${this._codeVariableName}.evaluateContext = ${this.evaluateContext ? "true" : "false"}; `; } /** * Serializes this block in a JSON representation * @returns the serialized block object */ serialize() { const e = super.serialize(); return e.evaluateContext = this.evaluateContext, e; } _deserialize(e) { super._deserialize(e), e.evaluateContext !== void 0 && (this.evaluateContext = e.evaluateContext); } } F([ ir("Evaluate context", $i.Boolean, "ADVANCED", { notifiers: { rebuild: !0 } }) ], jW.prototype, "evaluateContext", void 0); Be("BABYLON.SetMaterialIDBlock", jW); var Mn; (function(c) { c[c.Cos = 0] = "Cos", c[c.Sin = 1] = "Sin", c[c.Abs = 2] = "Abs", c[c.Exp = 3] = "Exp", c[c.Round = 4] = "Round", c[c.Floor = 5] = "Floor", c[c.Ceiling = 6] = "Ceiling", c[c.Sqrt = 7] = "Sqrt", c[c.Log = 8] = "Log", c[c.Tan = 9] = "Tan", c[c.ArcTan = 10] = "ArcTan", c[c.ArcCos = 11] = "ArcCos", c[c.ArcSin = 12] = "ArcSin", c[c.Sign = 13] = "Sign", c[c.Negate = 14] = "Negate", c[c.OneMinus = 15] = "OneMinus", c[c.Reciprocal = 16] = "Reciprocal", c[c.ToDegrees = 17] = "ToDegrees", c[c.ToRadians = 18] = "ToRadians"; })(Mn || (Mn = {})); class XW extends Bs { /** * Creates a new GeometryTrigonometryBlock * @param name defines the block name */ constructor(e) { super(e), this.operation = Mn.Cos, this.registerInput("input", We.AutoDetect), this.registerOutput("output", We.BasedOnInput), this._outputs[0]._typeConnectionSource = this._inputs[0], this._inputs[0].excludedConnectionPointTypes.push(We.Matrix), this._inputs[0].excludedConnectionPointTypes.push(We.Geometry), this._inputs[0].excludedConnectionPointTypes.push(We.Texture); } /** * Gets the current class name * @returns the class name */ getClassName() { return "GeometryTrigonometryBlock"; } /** * Gets the input component */ get input() { return this._inputs[0]; } /** * Gets the output component */ get output() { return this._outputs[0]; } _buildBlock(e) { super._buildBlock(e); let t = null; switch (this.operation) { case Mn.Cos: { t = (i) => Math.cos(i); break; } case Mn.Sin: { t = (i) => Math.sin(i); break; } case Mn.Abs: { t = (i) => Math.abs(i); break; } case Mn.Exp: { t = (i) => Math.exp(i); break; } case Mn.Round: { t = (i) => Math.round(i); break; } case Mn.Floor: { t = (i) => Math.floor(i); break; } case Mn.Ceiling: { t = (i) => Math.ceil(i); break; } case Mn.Sqrt: { t = (i) => Math.sqrt(i); break; } case Mn.Log: { t = (i) => Math.log(i); break; } case Mn.Tan: { t = (i) => Math.tan(i); break; } case Mn.ArcTan: { t = (i) => Math.atan(i); break; } case Mn.ArcCos: { t = (i) => Math.acos(i); break; } case Mn.ArcSin: { t = (i) => Math.asin(i); break; } case Mn.Sign: { t = (i) => Math.sign(i); break; } case Mn.Negate: { t = (i) => -i; break; } case Mn.OneMinus: { t = (i) => 1 - i; break; } case Mn.Reciprocal: { t = (i) => 1 / i; break; } case Mn.ToRadians: { t = (i) => i * Math.PI / 180; break; } case Mn.ToDegrees: { t = (i) => i * 180 / Math.PI; break; } } if (!t) { this.input._storedFunction = null, this.input._storedValue = null; return; } switch (this.input.type) { case We.Int: case We.Float: { this.output._storedFunction = (i) => { const r = this.input.getConnectedValue(i); return t(r); }; break; } case We.Vector2: { this.output._storedFunction = (i) => { const r = this.input.getConnectedValue(i); return new at(t(r.x), t(r.y)); }; break; } case We.Vector3: { this.output._storedFunction = (i) => { const r = this.input.getConnectedValue(i); return new D(t(r.x), t(r.y), t(r.z)); }; break; } case We.Vector4: { this.output._storedFunction = (i) => { const r = this.input.getConnectedValue(i); return new Di(t(r.x), t(r.y), t(r.z), t(r.w)); }; break; } } return this; } serialize() { const e = super.serialize(); return e.operation = this.operation, e; } _deserialize(e) { super._deserialize(e), this.operation = e.operation; } _dumpPropertiesCode() { return super._dumpPropertiesCode() + `${this._codeVariableName}.operation = BABYLON.GeometryTrigonometryBlockOperations.${Mn[this.operation]}; `; } } F([ ir("Operation", $i.List, "ADVANCED", { notifiers: { rebuild: !0 }, options: [ { label: "Cos", value: Mn.Cos }, { label: "Sin", value: Mn.Sin }, { label: "Abs", value: Mn.Abs }, { label: "Exp", value: Mn.Exp }, { label: "Round", value: Mn.Round }, { label: "Floor", value: Mn.Floor }, { label: "Ceiling", value: Mn.Ceiling }, { label: "Sqrt", value: Mn.Sqrt }, { label: "Log", value: Mn.Log }, { label: "Tan", value: Mn.Tan }, { label: "ArcTan", value: Mn.ArcTan }, { label: "ArcCos", value: Mn.ArcCos }, { label: "ArcSin", value: Mn.ArcSin }, { label: "Sign", value: Mn.Sign }, { label: "Negate", value: Mn.Negate }, { label: "OneMinus", value: Mn.OneMinus }, { label: "Reciprocal", value: Mn.Reciprocal }, { label: "ToDegrees", value: Mn.ToDegrees }, { label: "ToRadians", value: Mn.ToRadians } ] }) ], XW.prototype, "operation", void 0); Be("BABYLON.GeometryTrigonometryBlock", XW); class YW extends Bs { /** * Create a new GeometryTransformBlock * @param name defines the block name */ constructor(e) { super(e), this._rotationMatrix = new Ae(), this._scalingMatrix = new Ae(), this._translationMatrix = new Ae(), this._scalingRotationMatrix = new Ae(), this._transformMatrix = new Ae(), this.evaluateContext = !0, this.registerInput("value", We.AutoDetect), this.registerInput("matrix", We.Matrix, !0), this.registerInput("translation", We.Vector3, !0, D.Zero()), this.registerInput("rotation", We.Vector3, !0, D.Zero()), this.registerInput("scaling", We.Vector3, !0, D.One()), this.registerOutput("output", We.BasedOnInput), this._outputs[0]._typeConnectionSource = this._inputs[0], this._inputs[0].excludedConnectionPointTypes.push(We.Float), this._inputs[0].excludedConnectionPointTypes.push(We.Matrix), this._inputs[0].excludedConnectionPointTypes.push(We.Texture); } /** * Gets the current class name * @returns the class name */ getClassName() { return "GeometryTransformBlock"; } /** * Gets the value input component */ get value() { return this._inputs[0]; } /** * Gets the matrix input component */ get matrix() { return this._inputs[1]; } /** * Gets the translation input component */ get translation() { return this._inputs[2]; } /** * Gets the rotation input component */ get rotation() { return this._inputs[3]; } /** * Gets the scaling input component */ get scaling() { return this._inputs[4]; } /** * Gets the output component */ get output() { return this._outputs[0]; } _buildBlock(e) { if (!this.value.isConnected) { this.output._storedFunction = null, this.output._storedValue = null; return; } const t = (i) => { const r = this.value.getConnectedValue(i); if (!r) return null; let s; if (this.matrix.isConnected) s = this.matrix.getConnectedValue(i); else { const n = this.scaling.getConnectedValue(i), a = this.rotation.getConnectedValue(i), l = this.translation.getConnectedValue(i); Ae.ScalingToRef(n.x, n.y, n.z, this._scalingMatrix), Ae.RotationYawPitchRollToRef(a.y, a.x, a.z, this._rotationMatrix), Ae.TranslationToRef(l.x, l.y, l.z, this._translationMatrix), this._scalingMatrix.multiplyToRef(this._rotationMatrix, this._scalingRotationMatrix), this._scalingRotationMatrix.multiplyToRef(this._translationMatrix, this._transformMatrix), s = this._transformMatrix; } switch (this.value.type) { case We.Geometry: { const n = r.clone(); return n.transform(s), n; } case We.Vector2: return at.Transform(r, s); case We.Vector3: return D.TransformCoordinates(r, s); case We.Vector4: return Di.TransformCoordinates(r, s); } return null; }; this.evaluateContext ? this.output._storedFunction = t : (this.output._storedFunction = null, this.output._storedValue = t(e)); } _dumpPropertiesCode() { return super._dumpPropertiesCode() + `${this._codeVariableName}.evaluateContext = ${this.evaluateContext ? "true" : "false"}; `; } /** * Serializes this block in a JSON representation * @returns the serialized block object */ serialize() { const e = super.serialize(); return e.evaluateContext = this.evaluateContext, e; } _deserialize(e) { super._deserialize(e), e.evaluateContext !== void 0 && (this.evaluateContext = e.evaluateContext); } } F([ ir("Evaluate context", $i.Boolean, "ADVANCED", { notifiers: { rebuild: !0 } }) ], YW.prototype, "evaluateContext", void 0); Be("BABYLON.GeometryTransformBlock", YW); class Zse extends Bs { /** * Create a new RotationXBlock * @param name defines the block name */ constructor(e) { super(e), this.registerInput("angle", We.Float, !1, 0), this.registerOutput("matrix", We.Matrix); } /** * Gets the current class name * @returns the class name */ getClassName() { return "RotationXBlock"; } /** * Gets the angle input component */ get angle() { return this._inputs[0]; } /** * Gets the matrix output component */ get matrix() { return this._outputs[0]; } autoConfigure() { if (!this.angle.isConnected) { const e = new bc("Angle"); e.value = 0, e.output.connectTo(this.angle); } } _buildBlock(e) { super._buildBlock(e), this.matrix._storedFunction = (t) => Ae.RotationX(this.angle.getConnectedValue(t)); } } Be("BABYLON.RotationXBlock", Zse); class qse extends Bs { /** * Create a new RotationYBlock * @param name defines the block name */ constructor(e) { super(e), this.registerInput("angle", We.Float, !1, 0), this.registerOutput("matrix", We.Matrix); } /** * Gets the current class name * @returns the class name */ getClassName() { return "RotationYBlock"; } /** * Gets the angle input component */ get angle() { return this._inputs[0]; } /** * Gets the matrix output component */ get matrix() { return this._outputs[0]; } autoConfigure() { if (!this.angle.isConnected) { const e = new bc("Angle"); e.value = 0, e.output.connectTo(this.angle); } } _buildBlock(e) { super._buildBlock(e), this.matrix._storedFunction = (t) => Ae.RotationY(this.angle.getConnectedValue(t)); } } Be("BABYLON.RotationYBlock", qse); class Jse extends Bs { /** * Create a new RotationZBlock * @param name defines the block name */ constructor(e) { super(e), this.registerInput("angle", We.Float, !1, 0), this.registerOutput("matrix", We.Matrix); } /** * Gets the current class name * @returns the class name */ getClassName() { return "RotationZBlock"; } /** * Gets the angle input component */ get angle() { return this._inputs[0]; } /** * Gets the matrix output component */ get matrix() { return this._outputs[0]; } autoConfigure() { if (!this.angle.isConnected) { const e = new bc("Angle"); e.value = 0, e.output.connectTo(this.angle); } } _buildBlock(e) { super._buildBlock(e), this.matrix._storedFunction = (t) => Ae.RotationZ(this.angle.getConnectedValue(t)); } } Be("BABYLON.RotationZBlock", Jse); class ene extends Bs { /** * Create a new ScalingBlock * @param name defines the block name */ constructor(e) { super(e), this.registerInput("scale", We.Vector3, !1, D.One()), this.registerOutput("matrix", We.Matrix); } /** * Gets the current class name * @returns the class name */ getClassName() { return "ScalingBlock"; } /** * Gets the scale input component */ get scale() { return this._inputs[0]; } /** * Gets the matrix output component */ get matrix() { return this._outputs[0]; } autoConfigure() { if (!this.scale.isConnected) { const e = new bc("Scale"); e.value = new D(1, 1, 1), e.output.connectTo(this.scale); } } _buildBlock(e) { super._buildBlock(e), this.matrix._storedFunction = (t) => { const i = this.scale.getConnectedValue(t); return Ae.Scaling(i.x, i.y, i.z); }; } } Be("BABYLON.ScalingBlock", ene); class tne extends Bs { /** * Create a new AlignBlock * @param name defines the block name */ constructor(e) { super(e), this.registerInput("source", We.Vector3, !0, D.Up()), this.registerInput("target", We.Vector3, !0, D.Left()), this.registerOutput("matrix", We.Matrix); } /** * Gets the current class name * @returns the class name */ getClassName() { return "AlignBlock"; } /** * Gets the source input component */ get source() { return this._inputs[0]; } /** * Gets the target input component */ get target() { return this._inputs[1]; } /** * Gets the matrix output component */ get matrix() { return this._outputs[0]; } _buildBlock(e) { super._buildBlock(e), this.matrix._storedFunction = (t) => { const i = this.source.getConnectedValue(t).clone(), r = this.target.getConnectedValue(t).clone(), s = new Ae(); return i.normalize(), r.normalize(), Ae.RotationAlignToRef(i, r, s, !0), s; }; } } Be("BABYLON.AlignBlock", tne); class ine extends Bs { /** * Create a new TranslationBlock * @param name defines the block name */ constructor(e) { super(e), this.registerInput("translation", We.Vector3, !1, D.Zero()), this.registerOutput("matrix", We.Matrix); } /** * Gets the current class name * @returns the class name */ getClassName() { return "TranslationBlock"; } /** * Gets the translation input component */ get translation() { return this._inputs[0]; } /** * Gets the matrix output component */ get matrix() { return this._outputs[0]; } autoConfigure() { if (!this.translation.isConnected) { const e = new bc("Translation"); e.value = new D(0, 0, 0), e.output.connectTo(this.translation); } } _buildBlock(e) { super._buildBlock(e), this.matrix._storedFunction = (t) => { const i = this.translation.getConnectedValue(t); return Ae.Translation(i.x, i.y, i.z); }; } } Be("BABYLON.TranslationBlock", ine); class kU extends Bs { /** * Create a new InstantiateOnVerticesBlock * @param name defines the block name */ constructor(e) { super(e), this._indexTranslation = null, this.evaluateContext = !0, this.removeDuplicatedPositions = !0, this.registerInput("geometry", We.Geometry), this.registerInput("instance", We.Geometry, !0), this.registerInput("density", We.Float, !0, 1, 0, 1), this.registerInput("matrix", We.Matrix, !0), this.registerInput("rotation", We.Vector3, !0, D.Zero()), this.registerInput("scaling", We.Vector3, !0, D.One()), this.scaling.acceptedConnectionPointTypes.push(We.Float), this.registerOutput("output", We.Geometry); } /** * Gets the current instance index in the current flow * @returns the current index */ getInstanceIndex() { return this._currentLoopIndex; } /** * Gets the current index in the current flow * @returns the current index */ getExecutionIndex() { return this._indexTranslation ? this._indexTranslation[this._currentIndex] : this._currentIndex; } /** * Gets the current loop index in the current flow * @returns the current loop index */ getExecutionLoopIndex() { return this._currentLoopIndex; } /** * Gets the current face index in the current flow * @returns the current face index */ getExecutionFaceIndex() { return 0; } /** * Gets the current class name * @returns the class name */ getClassName() { return "InstantiateOnVerticesBlock"; } /** * Gets the geometry input component */ get geometry() { return this._inputs[0]; } /** * Gets the instance input component */ get instance() { return this._inputs[1]; } /** * Gets the density input component */ get density() { return this._inputs[2]; } /** * Gets the matrix input component */ get matrix() { return this._inputs[3]; } /** * Gets the rotation input component */ get rotation() { return this._inputs[4]; } /** * Gets the scaling input component */ get scaling() { return this._inputs[5]; } /** * Gets the geometry output component */ get output() { return this._outputs[0]; } _buildBlock(e) { const t = (i) => { if (i.pushExecutionContext(this), i.pushInstancingContext(this), this._vertexData = this.geometry.getConnectedValue(i), i.pushGeometryContext(this._vertexData), !this._vertexData || !this._vertexData.positions || !this.instance.isConnected) { i.restoreExecutionContext(), i.restoreInstancingContext(), i.restoreGeometryContext(), this.output._storedValue = null; return; } let r = this._vertexData.positions.length / 3; const s = [], n = new D(), a = []; let l = this._vertexData.positions; if (this._currentLoopIndex = 0, this.removeDuplicatedPositions) { for (this._indexTranslation = {}, this._currentIndex = 0; this._currentIndex < r; this._currentIndex++) { const o = l[this._currentIndex * 3], u = l[this._currentIndex * 3 + 1], h = l[this._currentIndex * 3 + 2]; let d = !1; for (let f = 0; f < a.length; f += 3) if (Math.abs(a[f] - o) < Sr && Math.abs(a[f + 1] - u) < Sr && Math.abs(a[f + 2] - h) < Sr) { d = !0; break; } d || (this._indexTranslation[a.length / 3] = this._currentIndex, a.push(o, u, h)); } l = a, r = l.length / 3; } else this._indexTranslation = null; for (this._currentIndex = 0; this._currentIndex < r; this._currentIndex++) { const o = this.instance.getConnectedValue(i); if (!o || !o.positions || o.positions.length === 0) continue; const u = this.density.getConnectedValue(i); if (u < 1 && Math.random() > u) continue; n.fromArray(l, this._currentIndex * 3); const h = o.clone(); if (this.matrix.isConnected) { const d = this.matrix.getConnectedValue(i); i._instantiateWithPositionAndMatrix(h, n, d, s); } else { const d = i.adaptInput(this.scaling, We.Vector3, D.OneReadOnly), f = this.rotation.getConnectedValue(i) || D.ZeroReadOnly; i._instantiate(h, n, f, d, s); } this._currentLoopIndex++; } if (i.restoreGeometryContext(), i.restoreExecutionContext(), i.restoreInstancingContext(), s.length) if (s.length === 1) this._vertexData = s[0]; else { const o = s.splice(0, 1)[0]; this._vertexData = o.merge(s, !0, !1, !0, !0); } else return null; return this._vertexData; }; this.evaluateContext ? this.output._storedFunction = t : (this.output._storedFunction = null, this.output._storedValue = t(e)); } _dumpPropertiesCode() { let e = super._dumpPropertiesCode() + `${this._codeVariableName}.removeDuplicatedPositions = ${this.removeDuplicatedPositions ? "true" : "false"}; `; return e += `${this._codeVariableName}.evaluateContext = ${this.evaluateContext ? "true" : "false"}; `, e; } /** * Serializes this block in a JSON representation * @returns the serialized block object */ serialize() { const e = super.serialize(); return e.removeDuplicatedPositions = this.removeDuplicatedPositions, e.evaluateContext = this.evaluateContext, e; } _deserialize(e) { super._deserialize(e), this.removeDuplicatedPositions = e.removeDuplicatedPositions, e.evaluateContext !== void 0 && (this.evaluateContext = e.evaluateContext); } } F([ ir("Evaluate context", $i.Boolean, "ADVANCED", { notifiers: { rebuild: !0 } }) ], kU.prototype, "evaluateContext", void 0); F([ ir("Remove duplicated positions", $i.Boolean, "ADVANCED", { notifiers: { update: !0 } }) ], kU.prototype, "removeDuplicatedPositions", void 0); Be("BABYLON.InstantiateOnVerticesBlock", kU); class QW extends Bs { /** * Create a new InstantiateOnFacesBlock * @param name defines the block name */ constructor(e) { super(e), this._currentPosition = new D(), this._currentUV = new at(), this._vertex0 = new D(), this._vertex1 = new D(), this._vertex2 = new D(), this._tempVector0 = new D(), this._tempVector1 = new D(), this._uv0 = new at(), this._uv1 = new at(), this._uv2 = new at(), this.evaluateContext = !0, this.registerInput("geometry", We.Geometry), this.registerInput("instance", We.Geometry, !0), this.registerInput("count", We.Int, !0, 256), this.registerInput("matrix", We.Matrix, !0), this.registerInput("rotation", We.Vector3, !0, D.Zero()), this.registerInput("scaling", We.Vector3, !0, D.One()), this.scaling.acceptedConnectionPointTypes.push(We.Float), this.registerOutput("output", We.Geometry); } /** * Gets the current instance index in the current flow * @returns the current index */ getInstanceIndex() { return this._currentLoopIndex; } /** * Gets the current index in the current flow * @returns the current index */ getExecutionIndex() { return 0; } /** * Gets the current face index in the current flow * @returns the current face index */ getExecutionFaceIndex() { return this._currentFaceIndex; } /** * Gets the current loop index in the current flow * @returns the current loop index */ getExecutionLoopIndex() { return this._currentLoopIndex; } /** * Gets the value associated with a contextual positions * @returns the value associated with the source */ getOverridePositionsContextualValue() { return this._currentPosition; } /** * Gets the value associated with a contextual normals * @returns the value associated with the source */ getOverrideNormalsContextualValue() { return this._vertex1.subtractToRef(this._vertex0, this._tempVector0), this._vertex2.subtractToRef(this._vertex1, this._tempVector1), this._tempVector0.normalize(), this._tempVector1.normalize(), D.Cross(this._tempVector1, this._tempVector0); } /** * Gets the value associated with a contextual UV1 se * @returns the value associated with the source */ getOverrideUVs1ContextualValue() { return this._currentUV; } /** * Gets the current class name * @returns the class name */ getClassName() { return "InstantiateOnFacesBlock"; } /** * Gets the geometry input component */ get geometry() { return this._inputs[0]; } /** * Gets the instance input component */ get instance() { return this._inputs[1]; } /** * Gets the count input component */ get count() { return this._inputs[2]; } /** * Gets the matrix input component */ get matrix() { return this._inputs[3]; } /** * Gets the rotation input component */ get rotation() { return this._inputs[4]; } /** * Gets the scaling input component */ get scaling() { return this._inputs[5]; } /** * Gets the geometry output component */ get output() { return this._outputs[0]; } _buildBlock(e) { const t = (i) => { if (i.pushExecutionContext(this), i.pushInstancingContext(this), this._vertexData = this.geometry.getConnectedValue(i), i.pushGeometryContext(this._vertexData), !this._vertexData || !this._vertexData.positions || !this._vertexData.indices || !this.instance.isConnected) { i.restoreExecutionContext(), i.restoreInstancingContext(), i.restoreGeometryContext(), this.output._storedValue = null; return; } let r = null; const s = this.count.getConnectedValue(i), n = this._vertexData.indices.length / 3, a = s / n; let l = 0; const o = []; let u = 0; for (this._currentLoopIndex = 0, this._currentFaceIndex = 0; this._currentFaceIndex < n; this._currentFaceIndex++) { l += a; const h = (l | 0) - u; if (h < 1) continue; const d = this._vertexData.indices[this._currentFaceIndex * 3], f = this._vertexData.indices[this._currentFaceIndex * 3 + 1], p = this._vertexData.indices[this._currentFaceIndex * 3 + 2]; this._vertex0.fromArray(this._vertexData.positions, d * 3), this._vertex1.fromArray(this._vertexData.positions, f * 3), this._vertex2.fromArray(this._vertexData.positions, p * 3), this._vertexData.uvs && (this._uv0.fromArray(this._vertexData.uvs, d * 2), this._uv1.fromArray(this._vertexData.uvs, f * 2), this._uv2.fromArray(this._vertexData.uvs, p * 2)); for (let m = 0; m < h && !(u >= s); m++) { let _ = Math.random(), v = Math.random(); if (_ > v) { const M = _; _ = v, v = M; } const C = _, x = v - _, b = 1 - C - x; if (this._currentPosition.set(C * this._vertex0.x + x * this._vertex1.x + b * this._vertex2.x, C * this._vertex0.y + x * this._vertex1.y + b * this._vertex2.y, C * this._vertex0.z + x * this._vertex1.z + b * this._vertex2.z), this._vertexData.uvs && this._currentUV.set(C * this._uv0.x + x * this._uv1.x + b * this._uv2.x, C * this._uv0.y + x * this._uv1.y + b * this._uv2.y), r = this.instance.getConnectedValue(i), !r || !r.positions || r.positions.length === 0) { l -= a; continue; } const S = r.clone(); if (this.matrix.isConnected) { const M = this.matrix.getConnectedValue(i); i._instantiateWithPositionAndMatrix(S, this._currentPosition, M, o); } else { const M = i.adaptInput(this.scaling, We.Vector3, D.OneReadOnly), R = this.rotation.getConnectedValue(i) || D.ZeroReadOnly; i._instantiate(S, this._currentPosition, R, M, o); } u++, this._currentLoopIndex++; } } if (o.length) if (o.length === 1) this._vertexData = o[0]; else { const h = o.splice(0, 1)[0]; this._vertexData = h.merge(o, !0, !1, !0, !0); } return i.restoreExecutionContext(), i.restoreInstancingContext(), i.restoreGeometryContext(), this._vertexData; }; this.evaluateContext ? this.output._storedFunction = t : (this.output._storedFunction = null, this.output._storedValue = t(e)); } _dumpPropertiesCode() { return super._dumpPropertiesCode() + `${this._codeVariableName}.evaluateContext = ${this.evaluateContext ? "true" : "false"}; `; } /** * Serializes this block in a JSON representation * @returns the serialized block object */ serialize() { const e = super.serialize(); return e.evaluateContext = this.evaluateContext, e; } _deserialize(e) { super._deserialize(e), e.evaluateContext !== void 0 && (this.evaluateContext = e.evaluateContext); } } F([ ir("Evaluate context", $i.Boolean, "ADVANCED", { notifiers: { rebuild: !0 } }) ], QW.prototype, "evaluateContext", void 0); Be("BABYLON.InstantiateOnFacesBlock", QW); class $W extends Bs { /** * Create a new InstantiateOnVolumeBlock * @param name defines the block name */ constructor(e) { super(e), this._currentPosition = new D(), this._vertex0 = new D(), this._vertex1 = new D(), this._vertex2 = new D(), this.evaluateContext = !0, this.registerInput("geometry", We.Geometry), this.registerInput("instance", We.Geometry, !0), this.registerInput("count", We.Int, !0, 256), this.registerInput("matrix", We.Matrix, !0), this.registerInput("rotation", We.Vector3, !0, D.Zero()), this.registerInput("scaling", We.Vector3, !0, D.One()), this.scaling.acceptedConnectionPointTypes.push(We.Float), this.registerOutput("output", We.Geometry); } /** * Gets the current instance index in the current flow * @returns the current index */ getInstanceIndex() { return this._currentLoopIndex; } /** * Gets the current index in the current flow * @returns the current index */ getExecutionIndex() { return 0; } /** * Gets the current face index in the current flow * @returns the current face index */ getExecutionFaceIndex() { return 0; } /** * Gets the current loop index in the current flow * @returns the current loop index */ getExecutionLoopIndex() { return this._currentLoopIndex; } /** * Gets the value associated with a contextual positions * @returns the value associated with the source */ getOverridePositionsContextualValue() { return this._currentPosition; } /** * Gets the current class name * @returns the class name */ getClassName() { return "InstantiateOnVolumeBlock"; } /** * Gets the geometry input component */ get geometry() { return this._inputs[0]; } /** * Gets the instance input component */ get instance() { return this._inputs[1]; } /** * Gets the count input component */ get count() { return this._inputs[2]; } /** * Gets the matrix input component */ get matrix() { return this._inputs[3]; } /** * Gets the rotation input component */ get rotation() { return this._inputs[4]; } /** * Gets the scaling input component */ get scaling() { return this._inputs[5]; } /** * Gets the geometry output component */ get output() { return this._outputs[0]; } _buildBlock(e) { const t = (i) => { if (i.pushExecutionContext(this), i.pushInstancingContext(this), this._vertexData = this.geometry.getConnectedValue(i), i.pushGeometryContext(this._vertexData), !this._vertexData || !this._vertexData.positions || !this._vertexData.indices || !this.instance.isConnected) { i.restoreExecutionContext(), i.restoreInstancingContext(), i.restoreGeometryContext(), this.output._storedValue = null; return; } let r = null; const s = this.count.getConnectedValue(i), n = [], a = kO(this._vertexData.positions, 0, this._vertexData.positions.length / 3), l = a.minimum, o = a.maximum, u = new D(1, 0, 0), h = this._vertexData.indices.length / 3; this._currentLoopIndex = 0; for (let d = 0; d < s; d++) { this._currentPosition.set(Math.random() * (o.x - l.x) + l.x, Math.random() * (o.y - l.y) + l.y, Math.random() * (o.z - l.z) + l.z); const f = new gs(this._currentPosition, u); let p = 0; for (let _ = 0; _ < h; _++) { this._vertex0.fromArray(this._vertexData.positions, this._vertexData.indices[_ * 3] * 3), this._vertex1.fromArray(this._vertexData.positions, this._vertexData.indices[_ * 3 + 1] * 3), this._vertex2.fromArray(this._vertexData.positions, this._vertexData.indices[_ * 3 + 2] * 3); const v = f.intersectsTriangle(this._vertex0, this._vertex1, this._vertex2); v && v.distance > 0 && p++; } if (p % 2 === 0) { d--; continue; } if (r = this.instance.getConnectedValue(i), !r || !r.positions || r.positions.length === 0) continue; const m = r.clone(); if (this.matrix.isConnected) { const _ = this.matrix.getConnectedValue(i); i._instantiateWithPositionAndMatrix(m, this._currentPosition, _, n); } else { const _ = i.adaptInput(this.scaling, We.Vector3, D.OneReadOnly), v = this.rotation.getConnectedValue(i) || D.ZeroReadOnly; i._instantiate(m, this._currentPosition, v, _, n); } this._currentLoopIndex++; } if (n.length) if (n.length === 1) this._vertexData = n[0]; else { const d = n.splice(0, 1)[0]; this._vertexData = d.merge(n, !0, !1, !0, !0); } return i.restoreGeometryContext(), i.restoreExecutionContext(), i.restoreInstancingContext(), this._vertexData; }; this.evaluateContext ? this.output._storedFunction = t : (this.output._storedFunction = null, this.output._storedValue = t(e)); } _dumpPropertiesCode() { return super._dumpPropertiesCode() + `${this._codeVariableName}.evaluateContext = ${this.evaluateContext ? "true" : "false"}; `; } /** * Serializes this block in a JSON representation * @returns the serialized block object */ serialize() { const e = super.serialize(); return e.evaluateContext = this.evaluateContext, e; } _deserialize(e) { super._deserialize(e), e.evaluateContext !== void 0 && (this.evaluateContext = e.evaluateContext); } } F([ ir("Evaluate context", $i.Boolean, "ADVANCED", { notifiers: { rebuild: !0 } }) ], $W.prototype, "evaluateContext", void 0); Be("BABYLON.InstantiateOnVolumeBlock", $W); class zU extends Bs { /** * Create a new InstantiateBaseBlock * @param name defines the block name */ constructor(e) { super(e), this.evaluateContext = !0, this.registerInput("instance", We.Geometry, !0), this.registerInput("count", We.Int, !0, 1), this.registerOutput("output", We.Geometry); } /** * Gets the current instance index in the current flow * @returns the current index */ getInstanceIndex() { return this._currentIndex; } /** * Gets the current index in the current flow * @returns the current index */ getExecutionIndex() { return this._currentIndex; } /** * Gets the current loop index in the current flow * @returns the current loop index */ getExecutionLoopIndex() { return this._currentIndex; } /** * Gets the current face index in the current flow * @returns the current face index */ getExecutionFaceIndex() { return 0; } /** * Gets the current class name * @returns the class name */ getClassName() { return "InstantiateBaseBlock"; } /** * Gets the instance input component */ get instance() { return this._inputs[0]; } /** * Gets the count input component */ get count() { return this._inputs[1]; } /** * Gets the geometry output component */ get output() { return this._outputs[0]; } _dumpPropertiesCode() { return super._dumpPropertiesCode() + `${this._codeVariableName}.evaluateContext = ${this.evaluateContext ? "true" : "false"}; `; } /** * Serializes this block in a JSON representation * @returns the serialized block object */ serialize() { const e = super.serialize(); return e.evaluateContext = this.evaluateContext, e; } _deserialize(e) { super._deserialize(e), e.evaluateContext !== void 0 && (this.evaluateContext = e.evaluateContext); } } F([ ir("Evaluate context", $i.Boolean, "ADVANCED", { notifiers: { rebuild: !0 } }) ], zU.prototype, "evaluateContext", void 0); class rne extends zU { /** * Create a new InstantiateBlock * @param name defines the block name */ constructor(e) { super(e), this.registerInput("matrix", We.Matrix, !0), this.registerInput("position", We.Vector3, !0, D.Zero()), this.registerInput("rotation", We.Vector3, !0, D.Zero()), this.registerInput("scaling", We.Vector3, !0, D.One()), this.scaling.acceptedConnectionPointTypes.push(We.Float); } /** * Gets the current instance index in the current flow * @returns the current index */ getInstanceIndex() { return this._currentIndex; } /** * Gets the current index in the current flow * @returns the current index */ getExecutionIndex() { return this._currentIndex; } /** * Gets the current loop index in the current flow * @returns the current loop index */ getExecutionLoopIndex() { return this._currentIndex; } /** * Gets the current face index in the current flow * @returns the current face index */ getExecutionFaceIndex() { return 0; } /** * Gets the current class name * @returns the class name */ getClassName() { return "InstantiateBlock"; } /** * Gets the matrix input component */ get matrix() { return this._inputs[2]; } /** * Gets the position input component */ get position() { return this._inputs[3]; } /** * Gets the rotation input component */ get rotation() { return this._inputs[4]; } /** * Gets the scaling input component */ get scaling() { return this._inputs[5]; } _buildBlock(e) { const t = (i) => { i.pushExecutionContext(this), i.pushInstancingContext(this); const r = this.count.getConnectedValue(i), s = []; for (this._currentIndex = 0; this._currentIndex < r; this._currentIndex++) { const n = this.instance.getConnectedValue(i); if (!n || !n.positions || n.positions.length === 0) continue; const a = n.clone(); if (this.matrix.isConnected) { const l = this.matrix.getConnectedValue(i); i._instantiateWithMatrix(a, l, s); } else { const l = this.position.getConnectedValue(i) || D.ZeroReadOnly, o = i.adaptInput(this.scaling, We.Vector3, D.OneReadOnly), u = this.rotation.getConnectedValue(i) || D.ZeroReadOnly; i._instantiate(a, l, u, o, s); } } if (s.length) if (s.length === 1) this._vertexData = s[0]; else { const n = s.splice(0, 1)[0]; this._vertexData = n.merge(s, !0, !1, !0, !0); } return i.restoreExecutionContext(), i.restoreInstancingContext(), this._vertexData; }; this.evaluateContext ? this.output._storedFunction = t : (this.output._storedFunction = null, this.output._storedValue = t(e)); } } Be("BABYLON.InstantiateBlock", rne); class sne extends zU { /** * Create a new Instantiate Linear Block * @param name defines the block name */ constructor(e) { super(e), this.registerInput("direction", We.Vector3, !0, new D(1, 0, 0)), this.registerInput("rotation", We.Vector3, !0, new D(0, 0, 0)), this.registerInput("scaling", We.Vector3, !0, new D(0, 0, 0)), this.scaling.acceptedConnectionPointTypes.push(We.Float); } /** * Gets the current class name * @returns the class name */ getClassName() { return "InstantiateLinearBlock"; } /** * Gets the direction input component */ get direction() { return this._inputs[2]; } /** * Gets the rotation input component */ get rotation() { return this._inputs[3]; } /** * Gets the scaling input component */ get scaling() { return this._inputs[4]; } _buildBlock(e) { const t = (i) => { i.pushExecutionContext(this), i.pushInstancingContext(this); const r = this.count.getConnectedValue(i), s = [], n = Ae.Identity(), a = D.Zero(), l = D.Zero(), o = D.Zero(); for (this._currentIndex = 0; this._currentIndex < r; this._currentIndex++) { const u = this.instance.getConnectedValue(i); if (!u || !u.positions || u.positions.length === 0) continue; const h = u.clone(), d = this.direction.getConnectedValue(i), f = this.rotation.getConnectedValue(i), p = i.adaptInput(this.scaling, We.Vector3, D.OneReadOnly); a.copyFrom(d.clone().scale(this._currentIndex)), l.copyFrom(f.clone().scale(this._currentIndex)), o.copyFrom(p.clone().scale(this._currentIndex)), o.addInPlaceFromFloats(1, 1, 1), Ae.ComposeToRef(o, Ze.FromEulerAngles(l.x, l.y, l.z), a, n), i._instantiateWithMatrix(h, n, s); } if (s.length) if (s.length === 1) this._vertexData = s[0]; else { const u = s.splice(0, 1)[0]; this._vertexData = u.merge(s, !0, !1, !0, !0); } return i.restoreExecutionContext(), i.restoreInstancingContext(), this._vertexData; }; this.evaluateContext ? this.output._storedFunction = t : (this.output._storedFunction = null, this.output._storedValue = t(e)); } } Be("BABYLON.InstantiateLinearBlock", sne); class nne extends zU { /** * Create a new InstantiateRadialBlock * @param name defines the block name */ constructor(e) { super(e), this.registerInput("radius", We.Int, !0, 0, 0), this.registerInput("angleStart", We.Float, !0, 0), this.registerInput("angleEnd", We.Float, !0, Math.PI * 2), this.registerInput("transform", We.Vector3, !0, new D(0, 0, 0)), this.registerInput("rotation", We.Vector3, !0, new D(0, 0, 0)), this.registerInput("scaling", We.Vector3, !0, new D(0, 0, 0)), this.scaling.acceptedConnectionPointTypes.push(We.Float); } /** * Gets the current class name * @returns the class name */ getClassName() { return "InstantiateRadialBlock"; } /** * Gets the direction input component */ get radius() { return this._inputs[2]; } /** * Gets the direction input component */ get angleStart() { return this._inputs[3]; } /** * Gets the direction input component */ get angleEnd() { return this._inputs[4]; } /** * Gets the transform input component */ get transform() { return this._inputs[5]; } /** * Gets the rotation input component */ get rotation() { return this._inputs[6]; } /** * Gets the scaling input component */ get scaling() { return this._inputs[7]; } _buildBlock(e) { const t = (i) => { i.pushExecutionContext(this), i.pushInstancingContext(this); const r = this.count.getConnectedValue(i), s = [], n = Ae.Identity(), a = Ae.Identity(), l = Ae.Identity(), o = D.Zero(), u = D.Zero(), h = D.Zero(); for (this._currentIndex = 0; this._currentIndex < r; this._currentIndex++) { const d = this.instance.getConnectedValue(i); if (!d || !d.positions || d.positions.length === 0) continue; const f = d.clone(), p = this.radius.getConnectedValue(i), m = this.angleStart.getConnectedValue(i), _ = this.angleEnd.getConnectedValue(i), v = this.transform.getConnectedValue(i), C = this.rotation.getConnectedValue(i), x = i.adaptInput(this.scaling, We.Vector3, D.OneReadOnly), S = (_ - m) / r, M = m + S * this._currentIndex, R = Ze.FromEulerAngles(0, M, 0); o.copyFrom(v.clone().scale(this._currentIndex)), u.copyFrom(C.clone().scale(this._currentIndex)), h.copyFrom(x.clone().scale(this._currentIndex)), h.addInPlaceFromFloats(1, 1, 1), Ae.RotationYawPitchRollToRef(u.y, u.x, u.z, n), a.setTranslationFromFloats(0, 0, p), Ae.ComposeToRef(h, R, o, l), n.multiplyToRef(a, a), a.multiplyToRef(l, l), i._instantiateWithMatrix(f, l, s); } if (s.length) if (s.length === 1) this._vertexData = s[0]; else { const d = s.splice(0, 1)[0]; this._vertexData = d.merge(s, !0, !1, !0, !0); } return i.restoreExecutionContext(), i.restoreInstancingContext(), this._vertexData; }; this.evaluateContext ? this.output._storedFunction = t : (this.output._storedFunction = null, this.output._storedValue = t(e)); } } Be("BABYLON.InstantiateRadialBlock", nne); class ane extends Bs { /** * Create a new IntFloatConverterBlock * @param name defines the block name */ constructor(e) { super(e), this.registerInput("float ", We.Float, !0), this.registerInput("int ", We.Int, !0), this.registerOutput("float", We.Float), this.registerOutput("int", We.Int); } /** * Gets the current class name * @returns the class name */ getClassName() { return "IntFloatConverterBlock"; } /** * Gets the float input component */ get floatIn() { return this._inputs[0]; } /** * Gets the int input component */ get intIn() { return this._inputs[1]; } /** * Gets the float output component */ get floatOut() { return this._outputs[0]; } /** * Gets the int output component */ get intOut() { return this._outputs[1]; } _inputRename(e) { return e === "float " ? "floatIn" : e === "int " ? "intIn" : e; } _buildBlock() { this.floatOut._storedFunction = (e) => this.floatIn.isConnected ? this.floatIn.getConnectedValue(e) : this.intIn.isConnected ? this.intIn.getConnectedValue(e) : 0, this.intOut._storedFunction = (e) => this.floatIn.isConnected ? Math.floor(this.floatIn.getConnectedValue(e)) : this.intIn.isConnected ? Math.floor(this.intIn.getConnectedValue(e)) : 0; } } Be("BABYLON.IntFloatConverterBlock", ane); class one extends Bs { /** * Create a new DebugBlock * @param name defines the block name */ constructor(e) { super(e), this.log = [], this._isDebug = !0, this.registerInput("input", We.AutoDetect), this.registerOutput("output", We.BasedOnInput), this._outputs[0]._typeConnectionSource = this._inputs[0], this._inputs[0].excludedConnectionPointTypes.push(We.Geometry), this._inputs[0].excludedConnectionPointTypes.push(We.Texture); } /** * Gets the time spent to build this block (in ms) */ get buildExecutionTime() { return 0; } /** * Gets the current class name * @returns the class name */ getClassName() { return "DebugBlock"; } /** * Gets the input component */ get input() { return this._inputs[0]; } /** * Gets the output component */ get output() { return this._outputs[0]; } _buildBlock(e) { if (!this.input.isConnected) { this.output._storedFunction = null, this.output._storedValue = null; return; } this.log = []; const t = (i) => { const r = this.input.getConnectedValue(i); return r == null ? (this.log.push("null"), r) : (this.log.push(r.toString()), r); }; this.output.isConnected ? this.output._storedFunction = t : this.output._storedValue = t(e); } } Be("BABYLON.DebugBlock", one); class lne extends Bs { /** * Create a new GeometryInfoBlock * @param name defines the block name */ constructor(e) { super(e), this.registerInput("geometry", We.Geometry), this.registerOutput("output", We.Geometry), this.registerOutput("id", We.Int), this.registerOutput("collectionId", We.Int), this.registerOutput("verticesCount", We.Int), this.registerOutput("facesCount", We.Int); } /** * Gets the current class name * @returns the class name */ getClassName() { return "GeometryInfoBlock"; } /** * Gets the geometry input component */ get geometry() { return this._inputs[0]; } /** * Gets the geometry output component */ get output() { return this._outputs[0]; } /** * Gets the id output component */ get id() { return this._outputs[1]; } /** * Gets the collectionId output component */ get collectionId() { return this._outputs[2]; } /** * Gets the verticesCount output component */ get verticesCount() { return this._outputs[3]; } /** * Gets the facesCount output component */ get facesCount() { return this._outputs[4]; } _buildBlock() { if (!this.geometry.isConnected) { this.id._storedValue = 0, this.collectionId._storedValue = 0, this.verticesCount._storedValue = 0, this.facesCount._storedValue = 0, this.output._storedValue = 0, this.id._storedFunction = null, this.collectionId._storedFunction = null, this.verticesCount._storedFunction = null, this.facesCount._storedFunction = null, this.output._storedFunction = null; return; } this.output._storedFunction = (e) => (this._currentVertexData = this.geometry.getConnectedValue(e), this._currentVertexData), this.id._storedFunction = (e) => (this._currentVertexData = this._currentVertexData || this.geometry.getConnectedValue(e), this._currentVertexData.uniqueId), this.collectionId._storedFunction = (e) => (this._currentVertexData = this._currentVertexData || this.geometry.getConnectedValue(e), this._currentVertexData.metadata ? this._currentVertexData.metadata.collectionId : 0), this.verticesCount._storedFunction = (e) => (this._currentVertexData = this._currentVertexData || this.geometry.getConnectedValue(e), this._currentVertexData.positions ? this._currentVertexData.positions.length / 3 : 0), this.facesCount._storedFunction = (e) => (this._currentVertexData = this._currentVertexData || this.geometry.getConnectedValue(e), this._currentVertexData.indices ? this._currentVertexData.indices.length / 3 : 0); } } Be("BABYLON.GeometryInfoBlock", lne); var XA; (function(c) { c[c.Spherical = 0] = "Spherical", c[c.Cylindrical = 1] = "Cylindrical", c[c.Cubic = 2] = "Cubic"; })(XA || (XA = {})); class ZW extends Bs { /** * Create a new MappingBlock * @param name defines the block name */ constructor(e) { super(e), this.mapping = XA.Spherical, this.registerInput("position", We.Vector3), this.registerInput("normal", We.Vector3), this.registerInput("center", We.Vector3, !0, D.Zero()), this.registerOutput("uv", We.Vector2); } /** * Gets the current class name * @returns the class name */ getClassName() { return "MappingBlock"; } /** * Gets the position input component */ get position() { return this._inputs[0]; } /** * Gets the normal input component */ get normal() { return this._inputs[1]; } /** * Gets the center input component */ get center() { return this._inputs[2]; } /** * Gets the output component */ get uv() { return this._outputs[0]; } _buildBlock() { if (!this.position.isConnected) { this.uv._storedFunction = null, this.uv._storedValue = null; return; } const e = D.Zero(), t = (i) => { const r = this.position.getConnectedValue(i) || D.Zero(), s = this.normal.getConnectedValue(i) || D.Zero(), n = this.center.getConnectedValue(i), a = at.Zero(); switch (this.mapping) { case XA.Spherical: { r.subtractToRef(n, e); const l = e.length(); l > 0 && (a.x = Math.acos(e.y / l) / Math.PI, (e.x !== 0 || e.z !== 0) && (a.y = Math.atan2(e.x, e.z) / (Math.PI * 2))); break; } case XA.Cylindrical: { r.subtractToRef(n, e); const l = e.length(); l > 0 && (a.x = Math.atan2(e.x / l, e.z / l) / (Math.PI * 2), a.y = (e.y + 1) / 2); break; } case XA.Cubic: { const l = Math.abs(s.x), o = Math.abs(s.y), u = Math.abs(s.z), h = Math.max(Math.abs(r.x), Math.abs(r.y), Math.abs(r.z)); let d = 0, f = 0; l >= o && l >= u ? (d = r.y / h - n.y, f = r.z / h - n.z) : o >= l && o >= u ? (d = r.x / h - n.x, f = r.z / h - n.z) : (d = r.x / h - n.x, f = r.y / h - n.y), a.x = (d + 1) / 2, a.y = (f + 1) / 2; } } return a; }; this.uv._storedFunction = (i) => t(i); } _dumpPropertiesCode() { return super._dumpPropertiesCode() + `${this._codeVariableName}.mapping = BABYLON.MappingTypes.${XA[this.mapping]}; `; } /** * Serializes this block in a JSON representation * @returns the serialized block object */ serialize() { const e = super.serialize(); return e.mapping = this.mapping, e; } _deserialize(e) { super._deserialize(e), this.mapping = e.mapping; } } F([ ir("Mapping", $i.List, "ADVANCED", { notifiers: { rebuild: !0 }, options: [ { label: "Spherical", value: XA.Spherical }, { label: "Cylindrical", value: XA.Cylindrical }, { label: "Cubic", value: XA.Cubic } ] }) ], ZW.prototype, "mapping", void 0); Be("BABYLON.MappingBlock", ZW); class cne extends Bs { /** * Create a new MatrixComposeBlock * @param name defines the block name */ constructor(e) { super(e), this.registerInput("matrix0", We.Matrix), this.registerInput("matrix1", We.Matrix), this.registerOutput("output", We.Matrix); } /** * Gets the current class name * @returns the class name */ getClassName() { return "MatrixComposeBlock"; } /** * Gets the matrix0 input component */ get matrix0() { return this._inputs[0]; } /** * Gets the matrix1 input component */ get matrix1() { return this._inputs[1]; } /** * Gets the output component */ get output() { return this._outputs[0]; } _buildBlock() { this.output._storedFunction = (e) => { if (!this.matrix0.isConnected || !this.matrix1.isConnected) return null; const t = this.matrix0.getConnectedValue(e), i = this.matrix1.getConnectedValue(e); return !t || !i ? null : t.multiply(i); }; } } Be("BABYLON.MatrixComposeBlock", cne); class une extends Bs { /** Gets the list of attached endpoints */ get endpoints() { return this._endpoints; } /** * Create a new TeleportInBlock * @param name defines the block name */ constructor(e) { super(e), this._endpoints = [], this._isTeleportIn = !0, this.registerInput("input", We.AutoDetect); } /** * Gets the current class name * @returns the class name */ getClassName() { return "TeleportInBlock"; } /** * Gets the input component */ get input() { return this._inputs[0]; } _dumpCode(e, t) { let i = super._dumpCode(e, t); for (const r of this.endpoints) t.indexOf(r) === -1 && (i += r._dumpCode(e, t)); return i; } /** * Checks if the current block is an ancestor of a given type * @param type defines the potential type to check * @returns true if block is a descendant */ isAnAncestorOfType(e) { if (this.getClassName() === e) return !0; for (const t of this.endpoints) if (t.isAnAncestorOfType(e)) return !0; return !1; } /** * Checks if the current block is an ancestor of a given block * @param block defines the potential descendant block to check * @returns true if block is a descendant */ isAnAncestorOf(e) { for (const t of this.endpoints) if (t === e || t.isAnAncestorOf(e)) return !0; return !1; } /** * Get the first descendant using a predicate * @param predicate defines the predicate to check * @returns descendant or null if none found */ getDescendantOfPredicate(e) { if (e(this)) return this; for (const t of this.endpoints) { const i = t.getDescendantOfPredicate(e); if (i) return i; } return null; } /** * Add an enpoint to this block * @param endpoint define the endpoint to attach to */ attachToEndpoint(e) { e.detach(), this._endpoints.push(e), e._entryPoint = this, e._outputs[0]._typeConnectionSource = this._inputs[0], e._tempEntryPointUniqueId = null, e.name = "> " + this.name; } /** * Remove enpoint from this block * @param endpoint define the endpoint to remove */ detachFromEndpoint(e) { const t = this._endpoints.indexOf(e); t !== -1 && (this._endpoints.splice(t, 1), e._outputs[0]._typeConnectionSource = null, e._entryPoint = null); } _buildBlock() { for (const e of this._endpoints) e.output._storedFunction = (t) => this.input.getConnectedValue(t); } } Be("BABYLON.TeleportInBlock", une); class hne extends Bs { /** * Create a new TeleportOutBlock * @param name defines the block name */ constructor(e) { super(e), this._entryPoint = null, this._tempEntryPointUniqueId = null, this._isTeleportOut = !0, this.registerOutput("output", We.BasedOnInput); } /** * Gets the entry point */ get entryPoint() { return this._entryPoint; } /** * Gets the current class name * @returns the class name */ getClassName() { return "TeleportOutBlock"; } /** * Gets the output component */ get output() { return this._outputs[0]; } /** Detach from entry point */ detach() { this._entryPoint && this._entryPoint.detachFromEndpoint(this); } _buildBlock() { } _customBuildStep(e) { this.entryPoint && this.entryPoint.build(e); } _dumpCode(e, t) { let i = ""; return this.entryPoint && t.indexOf(this.entryPoint) === -1 && (i += this.entryPoint._dumpCode(e, t)), i + super._dumpCode(e, t); } _dumpCodeForOutputConnections(e) { let t = super._dumpCodeForOutputConnections(e); return this.entryPoint && (t += this.entryPoint._dumpCodeForOutputConnections(e)), t; } /** * Clone the current block to a new identical block * @returns a copy of the current block */ clone() { const e = super.clone(); return this.entryPoint && this.entryPoint.attachToEndpoint(e), e; } _dumpPropertiesCode() { let e = super._dumpPropertiesCode(); return this.entryPoint && (e += `${this.entryPoint._codeVariableName}.attachToEndpoint(${this._codeVariableName}); `), e; } /** * Serializes this block in a JSON representation * @returns the serialized block object */ serialize() { var e, t; const i = super.serialize(); return i.entryPoint = (t = (e = this.entryPoint) === null || e === void 0 ? void 0 : e.uniqueId) !== null && t !== void 0 ? t : "", i; } _deserialize(e) { super._deserialize(e), this._tempEntryPointUniqueId = e.entryPoint; } } Be("BABYLON.TeleportOutBlock", hne); class qW extends Bs { /** * Gets the texture data */ get textureData() { return this._data; } /** * Gets the texture width */ get textureWidth() { return this._width; } /** * Gets the texture height */ get textureHeight() { return this._height; } /** * Creates a new GeometryTextureBlock * @param name defines the block name */ constructor(e) { super(e), this._data = null, this.serializedCachedData = !1, this.registerOutput("texture", We.Texture); } /** * Gets the current class name * @returns the class name */ getClassName() { return "GeometryTextureBlock"; } /** * Gets the texture component */ get texture() { return this._outputs[0]; } _prepareImgToLoadAsync(e) { return new Promise((t, i) => { const r = new Image(), s = document.createElement("canvas"), n = s.getContext("2d"); r.onload = () => { s.width = r.width, s.height = r.height, n.drawImage(r, 0, 0); const l = n.getImageData(0, 0, r.width, r.height).data, o = new Float32Array(l.length); for (let u = 0; u < l.length; u++) o[u] = l[u] / 255; this._data = o, this._width = r.width, this._height = r.height, t(); }, r.onerror = () => { this._data = null, i(); }, r.src = e; }); } /** * Remove stored data */ cleanData() { this._data = null; } /** * Load the texture data * @param imageFile defines the file to load data from * @returns a promise fulfilled when image data is loaded */ loadTextureFromFileAsync(e) { return this._prepareImgToLoadAsync(URL.createObjectURL(e)); } /** * Load the texture data * @param url defines the url to load data from * @returns a promise fulfilled when image data is loaded */ loadTextureFromUrlAsync(e) { return this._prepareImgToLoadAsync(e); } /** * Load the texture data * @param url defines the url to load data from * @returns a promise fulfilled when image data is loaded */ extractFromTextureAsync(e) { return new Promise((t, i) => { if (!e.isReady()) { e.onLoadObservable.addOnce(() => this.extractFromTextureAsync(e).then(t).catch(i)); return; } const r = e.getSize(); Fie.GetTextureDataAsync(e, r.width, r.height).then(async (s) => { const n = new Float32Array(s.length); for (let a = 0; a < s.length; a++) n[a] = s[a] / 255; this._data = n, this._width = r.width, this._height = r.height, t(); }).catch(i); }); } _buildBlock() { if (!this._data) { this.texture._storedValue = null; return; } const e = { data: this._data, width: this._width, height: this._height }; this.texture._storedValue = e; } /** * Serializes this block in a JSON representation * @returns the serialized block object */ serialize() { const e = super.serialize(); return e.width = this._width, e.height = this._height, e.serializedCachedData = this.serializedCachedData, this._data && this.serializedCachedData && (e.data = Array.from(this._data)), e; } _deserialize(e) { super._deserialize(e), this._width = e.width, this._height = e.height, e.data ? (this._data = new Float32Array(e.data), this.serializedCachedData = !0) : this.serializedCachedData = !!e.serializedCachedData; } } F([ ir("Serialize cached data", $i.Boolean, "ADVANCED", { notifiers: { rebuild: !0 } }) ], qW.prototype, "serializedCachedData", void 0); Be("BABYLON.GeometryTextureBlock", qW); class JW extends Bs { /** * Creates a new GeometryTextureFetchBlock * @param name defines the block name */ constructor(e) { super(e), this.clampCoordinates = !0, this.registerInput("texture", We.Texture), this.registerInput("coordinates", We.Vector2), this.registerOutput("rgba", We.Vector4), this.registerOutput("rgb", We.Vector3), this.registerOutput("r", We.Float), this.registerOutput("g", We.Float), this.registerOutput("b", We.Float), this.registerOutput("a", We.Float); } /** * Gets the current class name * @returns the class name */ getClassName() { return "GeometryTextureFetchBlock"; } /** * Gets the texture component */ get texture() { return this.inputs[0]; } /** * Gets the coordinates component */ get coordinates() { return this.inputs[1]; } /** * Gets the rgba component */ get rgba() { return this._outputs[0]; } /** * Gets the rgb component */ get rgb() { return this._outputs[1]; } /** * Gets the r component */ get r() { return this._outputs[2]; } /** * Gets the g component */ get g() { return this._outputs[3]; } /** * Gets the b component */ get b() { return this._outputs[4]; } /** * Gets the a component */ get a() { return this._outputs[5]; } _repeatClamp(e) { return e >= 0 ? e % 1 : 1 - Math.abs(e) % 1; } _buildBlock() { const e = (t) => { const i = this.texture.getConnectedValue(t); if (!i || !i.data) return null; const r = this.coordinates.getConnectedValue(t); if (!r) return null; const s = this.clampCoordinates ? Math.max(0, Math.min(r.x, 1)) : this._repeatClamp(r.x), n = this.clampCoordinates ? Math.max(0, Math.min(r.y, 1)) : this._repeatClamp(r.y), a = Math.floor(s * (i.width - 1)), l = Math.floor(n * (i.height - 1)), o = a + i.width * l; return Di.FromArray(i.data, o * 4); }; this.rgba._storedFunction = (t) => e(t), this.rgb._storedFunction = (t) => { const i = e(t); return i ? i.toVector3() : null; }, this.r._storedFunction = (t) => { const i = e(t); return i ? i.x : null; }, this.g._storedFunction = (t) => { const i = e(t); return i ? i.y : null; }, this.b._storedFunction = (t) => { const i = e(t); return i ? i.z : null; }, this.a._storedFunction = (t) => { const i = e(t); return i ? i.w : null; }; } _dumpPropertiesCode() { return super._dumpPropertiesCode() + `${this._codeVariableName}.clampCoordinates = ${this.clampCoordinates}; `; } /** * Serializes this block in a JSON representation * @returns the serialized block object */ serialize() { const e = super.serialize(); return e.clampCoordinates = this.clampCoordinates, e; } _deserialize(e) { super._deserialize(e), this.clampCoordinates = e.clampCoordinates; } } F([ ir("Clamp Coordinates", $i.Boolean, "ADVANCED", { notifiers: { rebuild: !0 } }) ], JW.prototype, "clampCoordinates", void 0); Be("BABYLON.GeometryTextureFetchBlock", JW); class dne extends Bs { /** * Create a new BoundingBlock * @param name defines the block name */ constructor(e) { super(e), this.registerInput("geometry", We.Geometry), this.registerOutput("min", We.Vector3), this.registerOutput("max", We.Vector3); } /** * Gets the current class name * @returns the class name */ getClassName() { return "BoundingBlock"; } /** * Gets the geometry input component */ get geometry() { return this._inputs[0]; } /** * Gets the min output component */ get min() { return this._outputs[0]; } /** * Gets the max output component */ get max() { return this._outputs[1]; } _buildBlock() { this.min._storedFunction = (e) => { const t = this.geometry.getConnectedValue(e); return t ? kO(t.positions, 0, t.positions.length / 3).minimum : null; }, this.max._storedFunction = (e) => { const t = this.geometry.getConnectedValue(e); return t ? kO(t.positions, 0, t.positions.length / 3).maximum : null; }; } } Be("BABYLON.BoundingBlock", dne); var YA; (function(c) { c[c.Intersect = 0] = "Intersect", c[c.Subtract = 1] = "Subtract", c[c.Union = 2] = "Union"; })(YA || (YA = {})); class HU extends Bs { /** * Create a new BooleanGeometryBlock * @param name defines the block name */ constructor(e) { super(e), this.evaluateContext = !1, this.operation = YA.Intersect, this.registerInput("geometry0", We.Geometry), this.registerInput("geometry1", We.Geometry), this.registerOutput("output", We.Geometry); } /** * Gets the current class name * @returns the class name */ getClassName() { return "BooleanGeometryBlock"; } /** * Gets the geometry0 input component */ get geometry0() { return this._inputs[0]; } /** * Gets the geometry1 input component */ get geometry1() { return this._inputs[1]; } /** * Gets the geometry output component */ get output() { return this._outputs[0]; } _buildBlock(e) { const t = (i) => { const r = this.geometry0.getConnectedValue(i), s = this.geometry1.getConnectedValue(i); if (!r || !s) return null; const n = r.positions.length / 3; !r.normals && s.normals && (r.normals = new Array(r.positions.length)), !s.normals && r.normals && (s.normals = new Array(s.positions.length)), !r.uvs && s.uvs && (r.uvs = new Array(n * 2)), !s.uvs && r.uvs && (s.uvs = new Array(n * 2)), !r.colors && s.colors && (r.colors = new Array(n * 4)), !s.colors && r.colors && (s.colors = new Array(n * 4)); const a = zA.FromVertexData(r), l = zA.FromVertexData(s); let o; switch (this.operation) { case YA.Intersect: o = a.intersect(l); break; case YA.Subtract: o = a.subtract(l); break; case YA.Union: o = a.union(l); break; } return o.toVertexData(); }; this.evaluateContext ? this.output._storedFunction = t : (this.output._storedFunction = null, this.output._storedValue = t(e)); } _dumpPropertiesCode() { let e = super._dumpPropertiesCode() + `${this._codeVariableName}.evaluateContext = ${this.evaluateContext ? "true" : "false"}; `; return e += `${this._codeVariableName}.operation = BABYLON.BooleanGeometryOperations.${YA[this.operation]}; `, e; } /** * Serializes this block in a JSON representation * @returns the serialized block object */ serialize() { const e = super.serialize(); return e.evaluateContext = this.evaluateContext, e.operation = this.operation, e; } _deserialize(e) { super._deserialize(e), this.evaluateContext = e.evaluateContext, e.operation && (this.operation = e.operation); } } F([ ir("Evaluate context", $i.Boolean, "ADVANCED", { notifiers: { rebuild: !0 } }) ], HU.prototype, "evaluateContext", void 0); F([ ir("Operation", $i.List, "ADVANCED", { notifiers: { rebuild: !0 }, options: [ { label: "Intersect", value: YA.Intersect }, { label: "Subtract", value: YA.Subtract }, { label: "Union", value: YA.Union } ] }) ], HU.prototype, "operation", void 0); Be("BABYLON.BooleanGeometryBlock", HU); class z0e { /** * Initializes the recastJS plugin * @param recastInjection can be used to inject your own recast reference */ constructor(e = Recast) { if (this.bjsRECAST = {}, this.name = "RecastJSPlugin", this._maximumSubStepCount = 10, this._timeStep = 1 / 60, this._timeFactor = 1, this._worker = null, typeof e == "function" ? Ce.Error("RecastJS is not ready. Please make sure you await Recast() before using the plugin.") : this.bjsRECAST = e, !this.isSupported()) { Ce.Error("RecastJS is not available. Please make sure you included the js file."); return; } this.setTimeStep(), this._tempVec1 = new this.bjsRECAST.Vec3(), this._tempVec2 = new this.bjsRECAST.Vec3(); } /** * Set worker URL to be used when generating a new navmesh * @param workerURL url string * @returns boolean indicating if worker is created */ setWorkerURL(e) { return window && window.Worker ? (this._worker = new Worker(e), !0) : !1; } /** * Set the time step of the navigation tick update. * Default is 1/60. * A value of 0 will disable fixed time update * @param newTimeStep the new timestep to apply to this world. */ setTimeStep(e = 1 / 60) { this._timeStep = e; } /** * Get the time step of the navigation tick update. * @returns the current time step */ getTimeStep() { return this._timeStep; } /** * If delta time in navigation tick update is greater than the time step * a number of sub iterations are done. If more iterations are need to reach deltatime * they will be discarded. * A value of 0 will set to no maximum and update will use as many substeps as needed * @param newStepCount the maximum number of iterations */ setMaximumSubStepCount(e = 10) { this._maximumSubStepCount = e; } /** * Get the maximum number of iterations per navigation tick update * @returns the maximum number of iterations */ getMaximumSubStepCount() { return this._maximumSubStepCount; } /** * Time factor applied when updating crowd agents (default 1). A value of 0 will pause crowd updates. * @param value the time factor applied at update */ set timeFactor(e) { this._timeFactor = Math.max(e, 0); } /** * Get the time factor used for crowd agent update * @returns the time factor */ get timeFactor() { return this._timeFactor; } /** * Creates a navigation mesh * @param meshes array of all the geometry used to compute the navigation mesh * @param parameters bunch of parameters used to filter geometry * @param completion callback when data is available from the worker. Not used without a worker */ createNavMesh(e, t, i) { this._worker && !i ? Ce.Warn("A worker is avaible but no completion callback. Defaulting to blocking navmesh creation") : !this._worker && i && Ce.Warn("A completion callback is avaible but no worker. Defaulting to blocking navmesh creation"), this.navMesh = new this.bjsRECAST.NavMesh(); let r, s, n; const a = [], l = []; let o = 0; for (r = 0; r < e.length; r++) if (e[r]) { const u = e[r], h = u.getIndices(); if (!h) continue; const d = u.getVerticesData(Y.PositionKind, !1, !1); if (!d) continue; const f = [], p = u.computeWorldMatrix(!0); if (u.hasThinInstances) { const m = u.thinInstanceGetWorldMatrices(); for (let _ = 0; _ < m.length; _++) { const v = new Ae(); m[_].multiplyToRef(p, v), f.push(v); } } else f.push(p); for (let m = 0; m < f.length; m++) { const _ = f[m]; for (s = 0; s < h.length; s++) a.push(h[s] + o); const v = D.Zero(), C = D.Zero(); for (n = 0; n < d.length; n += 3) D.FromArrayToRef(d, n, C), D.TransformCoordinatesToRef(C, _, v), l.push(v.x, v.y, v.z); o += d.length / 3; } } if (this._worker && i) this._worker.postMessage([l, o, a, a.length, t]), this._worker.onmessage = function(u) { i(u.data); }; else { const u = new this.bjsRECAST.rcConfig(); u.cs = t.cs, u.ch = t.ch, u.borderSize = t.borderSize ? t.borderSize : 0, u.tileSize = t.tileSize ? t.tileSize : 0, u.walkableSlopeAngle = t.walkableSlopeAngle, u.walkableHeight = t.walkableHeight, u.walkableClimb = t.walkableClimb, u.walkableRadius = t.walkableRadius, u.maxEdgeLen = t.maxEdgeLen, u.maxSimplificationError = t.maxSimplificationError, u.minRegionArea = t.minRegionArea, u.mergeRegionArea = t.mergeRegionArea, u.maxVertsPerPoly = t.maxVertsPerPoly, u.detailSampleDist = t.detailSampleDist, u.detailSampleMaxError = t.detailSampleMaxError, this.navMesh.build(l, o, a, a.length, u); } } /** * Create a navigation mesh debug mesh * @param scene is where the mesh will be added * @returns debug display mesh */ createDebugNavMesh(e) { let t, i; const r = this.navMesh.getDebugNavMesh(), s = r.getTriangleCount(), n = [], a = []; for (t = 0; t < s * 3; t++) n.push(t); for (t = 0; t < s; t++) for (i = 0; i < 3; i++) { const u = r.getTriangle(t).getPoint(i); a.push(u.x, u.y, u.z); } const l = new ke("NavMeshDebug", e), o = new Ot(); return o.indices = n, o.positions = a, o.applyToMesh(l, !1), l; } /** * Get a navigation mesh constrained position, closest to the parameter position * @param position world position * @returns the closest point to position constrained by the navigation mesh */ getClosestPoint(e) { this._tempVec1.x = e.x, this._tempVec1.y = e.y, this._tempVec1.z = e.z; const t = this.navMesh.getClosestPoint(this._tempVec1); return new D(t.x, t.y, t.z); } /** * Get a navigation mesh constrained position, closest to the parameter position * @param position world position * @param result output the closest point to position constrained by the navigation mesh */ getClosestPointToRef(e, t) { this._tempVec1.x = e.x, this._tempVec1.y = e.y, this._tempVec1.z = e.z; const i = this.navMesh.getClosestPoint(this._tempVec1); t.set(i.x, i.y, i.z); } /** * Get a navigation mesh constrained position, within a particular radius * @param position world position * @param maxRadius the maximum distance to the constrained world position * @returns the closest point to position constrained by the navigation mesh */ getRandomPointAround(e, t) { this._tempVec1.x = e.x, this._tempVec1.y = e.y, this._tempVec1.z = e.z; const i = this.navMesh.getRandomPointAround(this._tempVec1, t); return new D(i.x, i.y, i.z); } /** * Get a navigation mesh constrained position, within a particular radius * @param position world position * @param maxRadius the maximum distance to the constrained world position * @param result output the closest point to position constrained by the navigation mesh */ getRandomPointAroundToRef(e, t, i) { this._tempVec1.x = e.x, this._tempVec1.y = e.y, this._tempVec1.z = e.z; const r = this.navMesh.getRandomPointAround(this._tempVec1, t); i.set(r.x, r.y, r.z); } /** * Compute the final position from a segment made of destination-position * @param position world position * @param destination world position * @returns the resulting point along the navmesh */ moveAlong(e, t) { this._tempVec1.x = e.x, this._tempVec1.y = e.y, this._tempVec1.z = e.z, this._tempVec2.x = t.x, this._tempVec2.y = t.y, this._tempVec2.z = t.z; const i = this.navMesh.moveAlong(this._tempVec1, this._tempVec2); return new D(i.x, i.y, i.z); } /** * Compute the final position from a segment made of destination-position * @param position world position * @param destination world position * @param result output the resulting point along the navmesh */ moveAlongToRef(e, t, i) { this._tempVec1.x = e.x, this._tempVec1.y = e.y, this._tempVec1.z = e.z, this._tempVec2.x = t.x, this._tempVec2.y = t.y, this._tempVec2.z = t.z; const r = this.navMesh.moveAlong(this._tempVec1, this._tempVec2); i.set(r.x, r.y, r.z); } /** * Compute a navigation path from start to end. Returns an empty array if no path can be computed * @param start world position * @param end world position * @returns array containing world position composing the path */ computePath(e, t) { let i; this._tempVec1.x = e.x, this._tempVec1.y = e.y, this._tempVec1.z = e.z, this._tempVec2.x = t.x, this._tempVec2.y = t.y, this._tempVec2.z = t.z; const r = this.navMesh.computePath(this._tempVec1, this._tempVec2), s = r.getPointCount(), n = []; for (i = 0; i < s; i++) { const a = r.getPoint(i); n.push(new D(a.x, a.y, a.z)); } return n; } /** * Create a new Crowd so you can add agents * @param maxAgents the maximum agent count in the crowd * @param maxAgentRadius the maximum radius an agent can have * @param scene to attach the crowd to * @returns the crowd you can add agents to */ createCrowd(e, t, i) { return new fne(this, e, t, i); } /** * Set the Bounding box extent for doing spatial queries (getClosestPoint, getRandomPointAround, ...) * The queries will try to find a solution within those bounds * default is (1,1,1) * @param extent x,y,z value that define the extent around the queries point of reference */ setDefaultQueryExtent(e) { this._tempVec1.x = e.x, this._tempVec1.y = e.y, this._tempVec1.z = e.z, this.navMesh.setDefaultQueryExtent(this._tempVec1); } /** * Get the Bounding box extent specified by setDefaultQueryExtent * @returns the box extent values */ getDefaultQueryExtent() { const e = this.navMesh.getDefaultQueryExtent(); return new D(e.x, e.y, e.z); } /** * build the navmesh from a previously saved state using getNavmeshData * @param data the Uint8Array returned by getNavmeshData */ buildFromNavmeshData(e) { const t = e.length * e.BYTES_PER_ELEMENT, i = this.bjsRECAST._malloc(t), r = new Uint8Array(this.bjsRECAST.HEAPU8.buffer, i, t); r.set(e); const s = new this.bjsRECAST.NavmeshData(); s.dataPointer = r.byteOffset, s.size = e.length, this.navMesh = new this.bjsRECAST.NavMesh(), this.navMesh.buildFromNavmeshData(s), this.bjsRECAST._free(r.byteOffset); } /** * returns the navmesh data that can be used later. The navmesh must be built before retrieving the data * @returns data the Uint8Array that can be saved and reused */ getNavmeshData() { const e = this.navMesh.getNavmeshData(), t = new Uint8Array(this.bjsRECAST.HEAPU8.buffer, e.dataPointer, e.size), i = new Uint8Array(e.size); return i.set(t), this.navMesh.freeNavmeshData(e), i; } /** * Get the Bounding box extent result specified by setDefaultQueryExtent * @param result output the box extent values */ getDefaultQueryExtentToRef(e) { const t = this.navMesh.getDefaultQueryExtent(); e.set(t.x, t.y, t.z); } /** * Disposes */ dispose() { } /** * Creates a cylinder obstacle and add it to the navigation * @param position world position * @param radius cylinder radius * @param height cylinder height * @returns the obstacle freshly created */ addCylinderObstacle(e, t, i) { return this._tempVec1.x = e.x, this._tempVec1.y = e.y, this._tempVec1.z = e.z, this.navMesh.addCylinderObstacle(this._tempVec1, t, i); } /** * Creates an oriented box obstacle and add it to the navigation * @param position world position * @param extent box size * @param angle angle in radians of the box orientation on Y axis * @returns the obstacle freshly created */ addBoxObstacle(e, t, i) { return this._tempVec1.x = e.x, this._tempVec1.y = e.y, this._tempVec1.z = e.z, this._tempVec2.x = t.x, this._tempVec2.y = t.y, this._tempVec2.z = t.z, this.navMesh.addBoxObstacle(this._tempVec1, this._tempVec2, i); } /** * Removes an obstacle created by addCylinderObstacle or addBoxObstacle * @param obstacle obstacle to remove from the navigation */ removeObstacle(e) { this.navMesh.removeObstacle(e); } /** * If this plugin is supported * @returns true if plugin is supported */ isSupported() { return this.bjsRECAST !== void 0; } } class fne { /** * Constructor * @param plugin recastJS plugin * @param maxAgents the maximum agent count in the crowd * @param maxAgentRadius the maximum radius an agent can have * @param scene to attach the crowd to * @returns the crowd you can add agents to */ constructor(e, t, i, r) { this.recastCrowd = {}, this.transforms = new Array(), this.agents = new Array(), this.reachRadii = new Array(), this._agentDestinationArmed = new Array(), this._agentDestination = new Array(), this._onBeforeAnimationsObserver = null, this.onReachTargetObservable = new Fe(), this.bjsRECASTPlugin = e, this.recastCrowd = new this.bjsRECASTPlugin.bjsRECAST.Crowd(t, i, this.bjsRECASTPlugin.navMesh.getNavMesh()), this._scene = r, this._onBeforeAnimationsObserver = r.onBeforeAnimationsObservable.add(() => { this.update(r.getEngine().getDeltaTime() * 1e-3 * e.timeFactor); }); } /** * Add a new agent to the crowd with the specified parameter a corresponding transformNode. * You can attach anything to that node. The node position is updated in the scene update tick. * @param pos world position that will be constrained by the navigation mesh * @param parameters agent parameters * @param transform hooked to the agent that will be update by the scene * @returns agent index */ addAgent(e, t, i) { const r = new this.bjsRECASTPlugin.bjsRECAST.dtCrowdAgentParams(); r.radius = t.radius, r.height = t.height, r.maxAcceleration = t.maxAcceleration, r.maxSpeed = t.maxSpeed, r.collisionQueryRange = t.collisionQueryRange, r.pathOptimizationRange = t.pathOptimizationRange, r.separationWeight = t.separationWeight, r.updateFlags = 7, r.obstacleAvoidanceType = 0, r.queryFilterType = 0, r.userData = 0; const s = this.recastCrowd.addAgent(new this.bjsRECASTPlugin.bjsRECAST.Vec3(e.x, e.y, e.z), r); return this.transforms.push(i), this.agents.push(s), this.reachRadii.push(t.reachRadius ? t.reachRadius : t.radius), this._agentDestinationArmed.push(!1), this._agentDestination.push(new D(0, 0, 0)), s; } /** * Returns the agent position in world space * @param index agent index returned by addAgent * @returns world space position */ getAgentPosition(e) { const t = this.recastCrowd.getAgentPosition(e); return new D(t.x, t.y, t.z); } /** * Returns the agent position result in world space * @param index agent index returned by addAgent * @param result output world space position */ getAgentPositionToRef(e, t) { const i = this.recastCrowd.getAgentPosition(e); t.set(i.x, i.y, i.z); } /** * Returns the agent velocity in world space * @param index agent index returned by addAgent * @returns world space velocity */ getAgentVelocity(e) { const t = this.recastCrowd.getAgentVelocity(e); return new D(t.x, t.y, t.z); } /** * Returns the agent velocity result in world space * @param index agent index returned by addAgent * @param result output world space velocity */ getAgentVelocityToRef(e, t) { const i = this.recastCrowd.getAgentVelocity(e); t.set(i.x, i.y, i.z); } /** * Returns the agent next target point on the path * @param index agent index returned by addAgent * @returns world space position */ getAgentNextTargetPath(e) { const t = this.recastCrowd.getAgentNextTargetPath(e); return new D(t.x, t.y, t.z); } /** * Returns the agent next target point on the path * @param index agent index returned by addAgent * @param result output world space position */ getAgentNextTargetPathToRef(e, t) { const i = this.recastCrowd.getAgentNextTargetPath(e); t.set(i.x, i.y, i.z); } /** * Gets the agent state * @param index agent index returned by addAgent * @returns agent state */ getAgentState(e) { return this.recastCrowd.getAgentState(e); } /** * returns true if the agent in over an off mesh link connection * @param index agent index returned by addAgent * @returns true if over an off mesh link connection */ overOffmeshConnection(e) { return this.recastCrowd.overOffmeshConnection(e); } /** * Asks a particular agent to go to a destination. That destination is constrained by the navigation mesh * @param index agent index returned by addAgent * @param destination targeted world position */ agentGoto(e, t) { this.recastCrowd.agentGoto(e, new this.bjsRECASTPlugin.bjsRECAST.Vec3(t.x, t.y, t.z)); const i = this.agents.indexOf(e); i > -1 && (this._agentDestinationArmed[i] = !0, this._agentDestination[i].set(t.x, t.y, t.z)); } /** * Teleport the agent to a new position * @param index agent index returned by addAgent * @param destination targeted world position */ agentTeleport(e, t) { this.recastCrowd.agentTeleport(e, new this.bjsRECASTPlugin.bjsRECAST.Vec3(t.x, t.y, t.z)); } /** * Update agent parameters * @param index agent index returned by addAgent * @param parameters agent parameters */ updateAgentParameters(e, t) { const i = this.recastCrowd.getAgentParameters(e); t.radius !== void 0 && (i.radius = t.radius), t.height !== void 0 && (i.height = t.height), t.maxAcceleration !== void 0 && (i.maxAcceleration = t.maxAcceleration), t.maxSpeed !== void 0 && (i.maxSpeed = t.maxSpeed), t.collisionQueryRange !== void 0 && (i.collisionQueryRange = t.collisionQueryRange), t.pathOptimizationRange !== void 0 && (i.pathOptimizationRange = t.pathOptimizationRange), t.separationWeight !== void 0 && (i.separationWeight = t.separationWeight), this.recastCrowd.setAgentParameters(e, i); } /** * remove a particular agent previously created * @param index agent index returned by addAgent */ removeAgent(e) { this.recastCrowd.removeAgent(e); const t = this.agents.indexOf(e); t > -1 && (this.agents.splice(t, 1), this.transforms.splice(t, 1), this.reachRadii.splice(t, 1), this._agentDestinationArmed.splice(t, 1), this._agentDestination.splice(t, 1)); } /** * get the list of all agents attached to this crowd * @returns list of agent indices */ getAgents() { return this.agents; } /** * Tick update done by the Scene. Agent position/velocity/acceleration is updated by this function * @param deltaTime in seconds */ update(e) { if (this.bjsRECASTPlugin.navMesh.update(), e <= Sr) return; const t = this.bjsRECASTPlugin.getTimeStep(), i = this.bjsRECASTPlugin.getMaximumSubStepCount(); if (t <= Sr) this.recastCrowd.update(e); else { let r = Math.floor(e / t); i && r > i && (r = i), r < 1 && (r = 1); const s = e / r; for (let n = 0; n < r; n++) this.recastCrowd.update(s); } for (let r = 0; r < this.agents.length; r++) { const s = this.agents[r], n = this.getAgentPosition(s); if (this.transforms[r].position = n, this._agentDestinationArmed[r]) { const a = n.x - this._agentDestination[r].x, l = n.z - this._agentDestination[r].z, o = this.reachRadii[r], u = this._agentDestination[r].y - this.reachRadii[r], h = this._agentDestination[r].y + this.reachRadii[r], d = a * a + l * l; n.y > u && n.y < h && d < o * o && (this._agentDestinationArmed[r] = !1, this.onReachTargetObservable.notifyObservers({ agentIndex: s, destination: this._agentDestination[r] })); } } } /** * Set the Bounding box extent for doing spatial queries (getClosestPoint, getRandomPointAround, ...) * The queries will try to find a solution within those bounds * default is (1,1,1) * @param extent x,y,z value that define the extent around the queries point of reference */ setDefaultQueryExtent(e) { const t = new this.bjsRECASTPlugin.bjsRECAST.Vec3(e.x, e.y, e.z); this.recastCrowd.setDefaultQueryExtent(t); } /** * Get the Bounding box extent specified by setDefaultQueryExtent * @returns the box extent values */ getDefaultQueryExtent() { const e = this.recastCrowd.getDefaultQueryExtent(); return new D(e.x, e.y, e.z); } /** * Get the Bounding box extent result specified by setDefaultQueryExtent * @param result output the box extent values */ getDefaultQueryExtentToRef(e) { const t = this.recastCrowd.getDefaultQueryExtent(); e.set(t.x, t.y, t.z); } /** * Get the next corner points composing the path (max 4 points) * @param index agent index returned by addAgent * @returns array containing world position composing the path */ getCorners(e) { let t; const i = this.recastCrowd.getCorners(e), r = i.getPointCount(), s = []; for (t = 0; t < r; t++) { const n = i.getPoint(t); s.push(new D(n.x, n.y, n.z)); } return s; } /** * Release all resources */ dispose() { this.recastCrowd.destroy(), this._scene.onBeforeAnimationsObservable.remove(this._onBeforeAnimationsObserver), this._onBeforeAnimationsObserver = null, this.onReachTargetObservable.clear(); } } $e.OfflineProviderFactory = (c, e, t = !1) => new Dp(c, e, t); class Dp { /** * Gets a boolean indicating if scene must be saved in the database */ get enableSceneOffline() { return this._enableSceneOffline; } /** * Gets a boolean indicating if textures must be saved in the database */ get enableTexturesOffline() { return this._enableTexturesOffline; } /** * Creates a new Database * @param urlToScene defines the url to load the scene * @param callbackManifestChecked defines the callback to use when manifest is checked * @param disableManifestCheck defines a boolean indicating that we want to skip the manifest validation (it will be considered validated and up to date) */ constructor(e, t, i = !1) { this._idbFactory = typeof indexedDB < "u" ? indexedDB : void 0, this._currentSceneUrl = Dp._ReturnFullUrlLocation(e), this._db = null, this._enableSceneOffline = !1, this._enableTexturesOffline = !1, this._manifestVersionFound = 0, this._mustUpdateRessources = !1, this._hasReachedQuota = !1, Dp.IDBStorageEnabled ? i ? (this._enableSceneOffline = !0, this._enableTexturesOffline = !0, this._manifestVersionFound = 1, Ve.SetImmediate(() => { t(!0); })) : this._checkManifestFile(t) : t(!0); } _checkManifestFile(e) { const t = () => { this._enableSceneOffline = !1, this._enableTexturesOffline = !1, e(!1); }, i = () => { try { if (typeof URL == "function" && this._currentSceneUrl.indexOf("http") === 0) { const a = new URL(this._currentSceneUrl); return a.pathname += ".manifest", a.toString(); } } catch { } return `${this._currentSceneUrl}.manifest`; }; let r = !1, s = i(); const n = new go(); navigator.onLine && (r = !0, s = s + (s.match(/\?/) == null ? "?" : "&") + Date.now()), n.open("GET", s), n.addEventListener("load", () => { if (n.status === 200 || Dp._ValidateXHRData(n, 1)) try { const a = JSON.parse(n.response); this._enableSceneOffline = a.enableSceneOffline, this._enableTexturesOffline = a.enableTexturesOffline && Dp._IsUASupportingBlobStorage, a.version && !isNaN(parseInt(a.version)) && (this._manifestVersionFound = a.version), e(!0); } catch { t(); } else t(); }, !1), n.addEventListener("error", () => { if (r) { r = !1; const a = i(); n.open("GET", a), n.send(); } else t(); }, !1); try { n.send(); } catch { Ce.Error("Error on XHR send request."), e(!1); } } /** * Open the database and make it available * @param successCallback defines the callback to call on success * @param errorCallback defines the callback to call on error */ open(e, t) { const i = () => { this._isSupported = !1, t && t(); }; if (!this._idbFactory || !(this._enableSceneOffline || this._enableTexturesOffline)) this._isSupported = !1, t && t(); else if (this._db) e && e(); else { this._hasReachedQuota = !1, this._isSupported = !0; const r = this._idbFactory.open("babylonjs", 1); r.onerror = () => { i(); }, r.onblocked = () => { Ce.Error("IDB request blocked. Please reload the page."), i(); }, r.onsuccess = () => { this._db = r.result, e(); }, r.onupgradeneeded = (s) => { if (this._db = s.target.result, this._db) try { this._db.createObjectStore("scenes", { keyPath: "sceneUrl" }), this._db.createObjectStore("versions", { keyPath: "sceneUrl" }), this._db.createObjectStore("textures", { keyPath: "textureUrl" }); } catch (n) { Ce.Error("Error while creating object stores. Exception: " + n.message), i(); } }; } } /** * Loads an image from the database * @param url defines the url to load from * @param image defines the target DOM image */ loadImage(e, t) { const i = Dp._ReturnFullUrlLocation(e), r = () => { !this._hasReachedQuota && this._db !== null ? this._saveImageIntoDBAsync(i, t) : t.src = e; }; this._mustUpdateRessources ? r() : this._loadImageFromDBAsync(i, t, r); } _loadImageFromDBAsync(e, t, i) { if (this._isSupported && this._db !== null) { let r; const s = this._db.transaction(["textures"]); s.onabort = () => { t.src = e; }, s.oncomplete = () => { let a; r && typeof URL == "function" ? (a = URL.createObjectURL(r.data), t.onerror = () => { Ce.Error("Error loading image from blob URL: " + a + " switching back to web url: " + e), t.src = e; }, t.src = a) : i(); }; const n = s.objectStore("textures").get(e); n.onsuccess = (a) => { r = a.target.result; }, n.onerror = () => { Ce.Error("Error loading texture " + e + " from DB."), t.src = e; }; } else Ce.Error("Error: IndexedDB not supported by your browser or BabylonJS Database is not open."), t.src = e; } _saveImageIntoDBAsync(e, t) { let i; if (this._isSupported) { const r = () => { let s; if (i && typeof URL == "function") try { s = URL.createObjectURL(i); } catch { s = URL.createObjectURL(i); } s && (t.src = s); }; if (Dp._IsUASupportingBlobStorage) { const s = new go(); s.open("GET", e), s.responseType = "blob", s.addEventListener("load", () => { if (s.status === 200 && this._db) { i = s.response; const n = this._db.transaction(["textures"], "readwrite"); n.onabort = (l) => { try { const u = l.target.error; u && u.name === "QuotaExceededError" && (this._hasReachedQuota = !0); } catch { } r(); }, n.oncomplete = () => { r(); }; const a = { textureUrl: e, data: i }; try { const l = n.objectStore("textures").put(a); l.onsuccess = () => { }, l.onerror = () => { r(); }; } catch (l) { l.code === 25 && (Dp._IsUASupportingBlobStorage = !1, this._enableTexturesOffline = !1), t.src = e; } } else t.src = e; }, !1), s.addEventListener("error", () => { Ce.Error("Error in XHR request in BABYLON.Database."), t.src = e; }, !1), s.send(); } else t.src = e; } else Ce.Error("Error: IndexedDB not supported by your browser or Babylon.js database is not open."), t.src = e; } _checkVersionFromDB(e, t) { const i = () => { this._saveVersionIntoDBAsync(e, t); }; this._loadVersionFromDBAsync(e, t, i); } _loadVersionFromDBAsync(e, t, i) { if (this._isSupported && this._db) { let r; try { const s = this._db.transaction(["versions"]); s.oncomplete = () => { r ? this._manifestVersionFound !== r.data ? (this._mustUpdateRessources = !0, i()) : t(r.data) : (this._mustUpdateRessources = !0, i()); }, s.onabort = () => { t(-1); }; const n = s.objectStore("versions").get(e); n.onsuccess = (a) => { r = a.target.result; }, n.onerror = () => { Ce.Error("Error loading version for scene " + e + " from DB."), t(-1); }; } catch (s) { Ce.Error("Error while accessing 'versions' object store (READ OP). Exception: " + s.message), t(-1); } } else Ce.Error("Error: IndexedDB not supported by your browser or Babylon.js database is not open."), t(-1); } _saveVersionIntoDBAsync(e, t) { if (this._isSupported && !this._hasReachedQuota && this._db) try { const i = this._db.transaction(["versions"], "readwrite"); i.onabort = (n) => { try { const a = n.target.error; a && a.name === "QuotaExceededError" && (this._hasReachedQuota = !0); } catch { } t(-1); }, i.oncomplete = () => { t(this._manifestVersionFound); }; const r = { sceneUrl: e, data: this._manifestVersionFound }, s = i.objectStore("versions").put(r); s.onsuccess = () => { }, s.onerror = () => { Ce.Error("Error in DB add version request in BABYLON.Database."); }; } catch (i) { Ce.Error("Error while accessing 'versions' object store (WRITE OP). Exception: " + i.message), t(-1); } else t(-1); } /** * Loads a file from database * @param url defines the URL to load from * @param sceneLoaded defines a callback to call on success * @param progressCallBack defines a callback to call when progress changed * @param errorCallback defines a callback to call on error * @param useArrayBuffer defines a boolean to use array buffer instead of text string */ loadFile(e, t, i, r, s) { const n = Dp._ReturnFullUrlLocation(e), a = () => { this._saveFileAsync(n, t, i, s, r); }; this._checkVersionFromDB(n, (l) => { l !== -1 ? this._mustUpdateRessources ? this._saveFileAsync(n, t, i, s, r) : this._loadFileAsync(n, t, a) : r && r(); }); } _loadFileAsync(e, t, i) { if (this._isSupported && this._db) { let r; e.indexOf(".babylon") !== -1 ? r = "scenes" : r = "textures"; let s; const n = this._db.transaction([r]); n.oncomplete = () => { s ? t(s.data) : i(); }, n.onabort = () => { i(); }; const a = n.objectStore(r).get(e); a.onsuccess = (l) => { s = l.target.result; }, a.onerror = () => { Ce.Error("Error loading file " + e + " from DB."), i(); }; } else Ce.Error("Error: IndexedDB not supported by your browser or BabylonJS Database is not open."), t(); } _saveFileAsync(e, t, i, r, s) { if (this._isSupported) { let n; e.indexOf(".babylon") !== -1 ? n = "scenes" : n = "textures"; const a = new go(); let l; a.open("GET", e + (e.match(/\?/) == null ? "?" : "&") + Date.now()), r && (a.responseType = "arraybuffer"), i && (a.onprogress = i), a.addEventListener("load", () => { if (a.status === 200 || a.status < 400 && Dp._ValidateXHRData(a, r ? 6 : 1)) if (l = r ? a.response : a.responseText, !this._hasReachedQuota && this._db) { const o = this._db.transaction([n], "readwrite"); o.onabort = (h) => { try { const d = h.target.error; d && d.name === "QuotaExceededError" && (this._hasReachedQuota = !0); } catch { } t(l); }, o.oncomplete = () => { t(l); }; let u; n === "scenes" ? u = { sceneUrl: e, data: l, version: this._manifestVersionFound } : u = { textureUrl: e, data: l }; try { const h = o.objectStore(n).put(u); h.onsuccess = () => { }, h.onerror = () => { Ce.Error("Error in DB add file request in BABYLON.Database."); }; } catch { t(l); } } else t(l); else a.status >= 400 && s ? s(a) : t(); }, !1), a.addEventListener("error", () => { Ce.Error("error on XHR request."), s && s(); }, !1), a.send(); } else Ce.Error("Error: IndexedDB not supported by your browser or Babylon.js database is not open."), s && s(); } /** * Validates if xhr data is correct * @param xhr defines the request to validate * @param dataType defines the expected data type * @returns true if data is correct */ static _ValidateXHRData(e, t = 7) { try { if (t & 1) { if (e.responseText && e.responseText.length > 0) return !0; if (t === 1) return !1; } if (t & 2) { const i = mN(e.response); if (i.width && i.height && i.width > 0 && i.height > 0) return !0; if (t === 2) return !1; } if (t & 4) { const i = new Uint8Array(e.response, 0, 3); return i[0] === 68 && i[1] === 68 && i[2] === 83; } } catch { } return !1; } } Dp._IsUASupportingBlobStorage = !0; Dp.IDBStorageEnabled = !1; Dp._ParseURL = (c) => { const e = document.createElement("a"); e.href = c; const t = c.substring(0, c.lastIndexOf("#")), i = c.substring(t.lastIndexOf("/") + 1, c.length); return c.substring(0, c.indexOf(i, 0)); }; Dp._ReturnFullUrlLocation = (c) => c.indexOf("http:/") === -1 && c.indexOf("https:/") === -1 && typeof window < "u" ? Dp._ParseURL(window.location.href) + c : c; class pne { _isUbo(e) { return e.addUniform !== void 0; } constructor(e) { this._isUbo(e) ? (this.setMatrix3x3 = e.updateMatrix3x3.bind(e), this.setMatrix2x2 = e.updateMatrix2x2.bind(e), this.setFloat = e.updateFloat.bind(e), this.setFloat2 = e.updateFloat2.bind(e), this.setFloat3 = e.updateFloat3.bind(e), this.setFloat4 = e.updateFloat4.bind(e), this.setFloatArray = e.updateFloatArray.bind(e), this.setArray = e.updateArray.bind(e), this.setIntArray = e.updateIntArray.bind(e), this.setMatrix = e.updateMatrix.bind(e), this.setMatrices = e.updateMatrices.bind(e), this.setVector3 = e.updateVector3.bind(e), this.setVector4 = e.updateVector4.bind(e), this.setColor3 = e.updateColor3.bind(e), this.setColor4 = e.updateColor4.bind(e), this.setDirectColor4 = e.updateDirectColor4.bind(e), this.setInt = e.updateInt.bind(e), this.setInt2 = e.updateInt2.bind(e), this.setInt3 = e.updateInt3.bind(e), this.setInt4 = e.updateInt4.bind(e)) : (this.setMatrix3x3 = e.setMatrix3x3.bind(e), this.setMatrix2x2 = e.setMatrix2x2.bind(e), this.setFloat = e.setFloat.bind(e), this.setFloat2 = e.setFloat2.bind(e), this.setFloat3 = e.setFloat3.bind(e), this.setFloat4 = e.setFloat4.bind(e), this.setFloatArray = e.setFloatArray.bind(e), this.setArray = e.setArray.bind(e), this.setIntArray = e.setIntArray.bind(e), this.setMatrix = e.setMatrix.bind(e), this.setMatrices = e.setMatrices.bind(e), this.setVector3 = e.setVector3.bind(e), this.setVector4 = e.setVector4.bind(e), this.setColor3 = e.setColor3.bind(e), this.setColor4 = e.setColor4.bind(e), this.setDirectColor4 = e.setDirectColor4.bind(e), this.setInt = e.setInt.bind(e), this.setInt2 = e.setInt2.bind(e), this.setInt3 = e.setInt3.bind(e), this.setInt4 = e.setInt4.bind(e)); } } const H0e = "gpuUpdateParticlesPixelShader", G0e = `#version 300 es void main() {discard;} `; je.ShadersStore[H0e] = G0e; const K0e = "gpuUpdateParticlesVertexShader", W0e = `#version 300 es #define PI 3.14159 uniform float currentCount;uniform float timeDelta;uniform float stopFactor; #ifndef LOCAL uniform mat4 emitterWM; #endif uniform vec2 lifeTime;uniform vec2 emitPower;uniform vec2 sizeRange;uniform vec4 scaleRange; #ifndef COLORGRADIENTS uniform vec4 color1;uniform vec4 color2; #endif uniform vec3 gravity;uniform sampler2D randomSampler;uniform sampler2D randomSampler2;uniform vec4 angleRange; #ifdef BOXEMITTER uniform vec3 direction1;uniform vec3 direction2;uniform vec3 minEmitBox;uniform vec3 maxEmitBox; #endif #ifdef POINTEMITTER uniform vec3 direction1;uniform vec3 direction2; #endif #ifdef HEMISPHERICEMITTER uniform float radius;uniform float radiusRange;uniform float directionRandomizer; #endif #ifdef SPHEREEMITTER uniform float radius;uniform float radiusRange; #ifdef DIRECTEDSPHEREEMITTER uniform vec3 direction1;uniform vec3 direction2; #else uniform float directionRandomizer; #endif #endif #ifdef CYLINDEREMITTER uniform float radius;uniform float height;uniform float radiusRange; #ifdef DIRECTEDCYLINDEREMITTER uniform vec3 direction1;uniform vec3 direction2; #else uniform float directionRandomizer; #endif #endif #ifdef CONEEMITTER uniform vec2 radius;uniform float coneAngle;uniform vec2 height;uniform float directionRandomizer; #endif in vec3 position; #ifdef CUSTOMEMITTER in vec3 initialPosition; #endif in float age;in float life;in vec4 seed;in vec3 size; #ifndef COLORGRADIENTS in vec4 color; #endif in vec3 direction; #ifndef BILLBOARD in vec3 initialDirection; #endif #ifdef ANGULARSPEEDGRADIENTS in float angle; #else in vec2 angle; #endif #ifdef ANIMATESHEET in float cellIndex; #ifdef ANIMATESHEETRANDOMSTART in float cellStartOffset; #endif #endif #ifdef NOISE in vec3 noiseCoordinates1;in vec3 noiseCoordinates2; #endif out vec3 outPosition; #ifdef CUSTOMEMITTER out vec3 outInitialPosition; #endif out float outAge;out float outLife;out vec4 outSeed;out vec3 outSize; #ifndef COLORGRADIENTS out vec4 outColor; #endif out vec3 outDirection; #ifndef BILLBOARD out vec3 outInitialDirection; #endif #ifdef ANGULARSPEEDGRADIENTS out float outAngle; #else out vec2 outAngle; #endif #ifdef ANIMATESHEET out float outCellIndex; #ifdef ANIMATESHEETRANDOMSTART out float outCellStartOffset; #endif #endif #ifdef NOISE out vec3 outNoiseCoordinates1;out vec3 outNoiseCoordinates2; #endif #ifdef SIZEGRADIENTS uniform sampler2D sizeGradientSampler; #endif #ifdef ANGULARSPEEDGRADIENTS uniform sampler2D angularSpeedGradientSampler; #endif #ifdef VELOCITYGRADIENTS uniform sampler2D velocityGradientSampler; #endif #ifdef LIMITVELOCITYGRADIENTS uniform sampler2D limitVelocityGradientSampler;uniform float limitVelocityDamping; #endif #ifdef DRAGGRADIENTS uniform sampler2D dragGradientSampler; #endif #ifdef NOISE uniform vec3 noiseStrength;uniform sampler2D noiseSampler; #endif #ifdef ANIMATESHEET uniform vec4 cellInfos; #endif vec3 getRandomVec3(float offset) {return texture(randomSampler2,vec2(float(gl_VertexID)*offset/currentCount,0)).rgb;} vec4 getRandomVec4(float offset) {return texture(randomSampler,vec2(float(gl_VertexID)*offset/currentCount,0));} void main() {float newAge=age+timeDelta; if (newAge>=life && stopFactor != 0.) {vec3 newPosition;vec3 newDirection;vec4 randoms=getRandomVec4(seed.x);outLife=lifeTime.x+(lifeTime.y-lifeTime.x)*randoms.r;outAge=newAge-life;outSeed=seed; #ifdef SIZEGRADIENTS outSize.x=texture(sizeGradientSampler,vec2(0,0)).r; #else outSize.x=sizeRange.x+(sizeRange.y-sizeRange.x)*randoms.g; #endif outSize.y=scaleRange.x+(scaleRange.y-scaleRange.x)*randoms.b;outSize.z=scaleRange.z+(scaleRange.w-scaleRange.z)*randoms.a; #ifndef COLORGRADIENTS outColor=color1+(color2-color1)*randoms.b; #endif #ifndef ANGULARSPEEDGRADIENTS outAngle.y=angleRange.x+(angleRange.y-angleRange.x)*randoms.a;outAngle.x=angleRange.z+(angleRange.w-angleRange.z)*randoms.r; #else outAngle=angleRange.z+(angleRange.w-angleRange.z)*randoms.r; #endif #ifdef POINTEMITTER vec3 randoms2=getRandomVec3(seed.y);vec3 randoms3=getRandomVec3(seed.z);newPosition=vec3(0,0,0);newDirection=direction1+(direction2-direction1)*randoms3; #elif defined(BOXEMITTER) vec3 randoms2=getRandomVec3(seed.y);vec3 randoms3=getRandomVec3(seed.z);newPosition=minEmitBox+(maxEmitBox-minEmitBox)*randoms2;newDirection=direction1+(direction2-direction1)*randoms3; #elif defined(HEMISPHERICEMITTER) vec3 randoms2=getRandomVec3(seed.y);vec3 randoms3=getRandomVec3(seed.z);float phi=2.0*PI*randoms2.x;float theta=acos(2.0*randoms2.y-1.0);float randX=cos(phi)*sin(theta);float randY=cos(theta);float randZ=sin(phi)*sin(theta);newPosition=(radius-(radius*radiusRange*randoms2.z))*vec3(randX,abs(randY),randZ);newDirection=newPosition+directionRandomizer*randoms3; #elif defined(SPHEREEMITTER) vec3 randoms2=getRandomVec3(seed.y);vec3 randoms3=getRandomVec3(seed.z);float phi=2.0*PI*randoms2.x;float theta=acos(2.0*randoms2.y-1.0);float randX=cos(phi)*sin(theta);float randY=cos(theta);float randZ=sin(phi)*sin(theta);newPosition=(radius-(radius*radiusRange*randoms2.z))*vec3(randX,randY,randZ); #ifdef DIRECTEDSPHEREEMITTER newDirection=normalize(direction1+(direction2-direction1)*randoms3); #else newDirection=normalize(newPosition+directionRandomizer*randoms3); #endif #elif defined(CYLINDEREMITTER) vec3 randoms2=getRandomVec3(seed.y);vec3 randoms3=getRandomVec3(seed.z);float yPos=(randoms2.x-0.5)*height;float angle=randoms2.y*PI*2.;float inverseRadiusRangeSquared=((1.-radiusRange)*(1.-radiusRange));float positionRadius=radius*sqrt(inverseRadiusRangeSquared+(randoms2.z*(1.-inverseRadiusRangeSquared)));float xPos=positionRadius*cos(angle);float zPos=positionRadius*sin(angle);newPosition=vec3(xPos,yPos,zPos); #ifdef DIRECTEDCYLINDEREMITTER newDirection=direction1+(direction2-direction1)*randoms3; #else angle=angle+((randoms3.x-0.5)*PI)*directionRandomizer;newDirection=vec3(cos(angle),(randoms3.y-0.5)*directionRandomizer,sin(angle));newDirection=normalize(newDirection); #endif #elif defined(CONEEMITTER) vec3 randoms2=getRandomVec3(seed.y);float s=2.0*PI*randoms2.x; #ifdef CONEEMITTERSPAWNPOINT float h=0.0001; #else float h=randoms2.y*height.y;h=1.-h*h; #endif float lRadius=radius.x-radius.x*randoms2.z*radius.y;lRadius=lRadius*h;float randX=lRadius*sin(s);float randZ=lRadius*cos(s);float randY=h *height.x;newPosition=vec3(randX,randY,randZ); if (abs(cos(coneAngle))==1.0) {newDirection=vec3(0.,1.0,0.);} else {vec3 randoms3=getRandomVec3(seed.z);newDirection=normalize(newPosition+directionRandomizer*randoms3); } #elif defined(CUSTOMEMITTER) newPosition=initialPosition;outInitialPosition=initialPosition; #else newPosition=vec3(0.,0.,0.);newDirection=2.0*(getRandomVec3(seed.w)-vec3(0.5,0.5,0.5)); #endif float power=emitPower.x+(emitPower.y-emitPower.x)*randoms.a; #ifdef LOCAL outPosition=newPosition; #else outPosition=(emitterWM*vec4(newPosition,1.)).xyz; #endif #ifdef CUSTOMEMITTER outDirection=direction; #ifndef BILLBOARD outInitialDirection=direction; #endif #else #ifdef LOCAL vec3 initial=newDirection; #else vec3 initial=(emitterWM*vec4(newDirection,0.)).xyz; #endif outDirection=initial*power; #ifndef BILLBOARD outInitialDirection=initial; #endif #endif #ifdef ANIMATESHEET outCellIndex=cellInfos.x; #ifdef ANIMATESHEETRANDOMSTART outCellStartOffset=randoms.a*outLife; #endif #endif #ifdef NOISE outNoiseCoordinates1=noiseCoordinates1;outNoiseCoordinates2=noiseCoordinates2; #endif } else {float directionScale=timeDelta;outAge=newAge;float ageGradient=newAge/life; #ifdef VELOCITYGRADIENTS directionScale*=texture(velocityGradientSampler,vec2(ageGradient,0)).r; #endif #ifdef DRAGGRADIENTS directionScale*=1.0-texture(dragGradientSampler,vec2(ageGradient,0)).r; #endif #if defined(CUSTOMEMITTER) outPosition=position+(direction-position)*ageGradient; outInitialPosition=initialPosition; #else outPosition=position+direction*directionScale; #endif outLife=life;outSeed=seed; #ifndef COLORGRADIENTS outColor=color; #endif #ifdef SIZEGRADIENTS outSize.x=texture(sizeGradientSampler,vec2(ageGradient,0)).r;outSize.yz=size.yz; #else outSize=size; #endif #ifndef BILLBOARD outInitialDirection=initialDirection; #endif #ifdef CUSTOMEMITTER outDirection=direction; #else vec3 updatedDirection=direction+gravity*timeDelta; #ifdef LIMITVELOCITYGRADIENTS float limitVelocity=texture(limitVelocityGradientSampler,vec2(ageGradient,0)).r;float currentVelocity=length(updatedDirection);if (currentVelocity>limitVelocity) {updatedDirection=updatedDirection*limitVelocityDamping;} #endif outDirection=updatedDirection; #ifdef NOISE float fetchedR=texture(noiseSampler,vec2(noiseCoordinates1.x,noiseCoordinates1.y)*vec2(0.5)+vec2(0.5)).r;float fetchedG=texture(noiseSampler,vec2(noiseCoordinates1.z,noiseCoordinates2.x)*vec2(0.5)+vec2(0.5)).r;float fetchedB=texture(noiseSampler,vec2(noiseCoordinates2.y,noiseCoordinates2.z)*vec2(0.5)+vec2(0.5)).r;vec3 force=vec3(2.*fetchedR-1.,2.*fetchedG-1.,2.*fetchedB-1.)*noiseStrength;outDirection=outDirection+force*timeDelta;outNoiseCoordinates1=noiseCoordinates1;outNoiseCoordinates2=noiseCoordinates2; #endif #endif #ifdef ANGULARSPEEDGRADIENTS float angularSpeed=texture(angularSpeedGradientSampler,vec2(ageGradient,0)).r;outAngle=angle+angularSpeed*timeDelta; #else outAngle=vec2(angle.x+angle.y*timeDelta,angle.y); #endif #ifdef ANIMATESHEET float offsetAge=outAge;float dist=cellInfos.y-cellInfos.x; #ifdef ANIMATESHEETRANDOMSTART outCellStartOffset=cellStartOffset;offsetAge+=cellStartOffset; #else float cellStartOffset=0.; #endif float ratio=0.;if (cellInfos.w==1.0) {ratio=clamp(mod(cellStartOffset+cellInfos.z*offsetAge,life)/life,0.,1.0);} else {ratio=clamp(cellStartOffset+cellInfos.z*offsetAge/life,0.,1.0);} outCellIndex=float(int(cellInfos.x+ratio*dist)); #endif }}`; je.ShadersStore[K0e] = W0e; class _ne { constructor(e, t) { this._renderVAO = [], this._updateVAO = [], this.alignDataInBuffer = !1, this._parent = e, this._engine = t, this._updateEffectOptions = { attributes: [ "position", "initialPosition", "age", "life", "seed", "size", "color", "direction", "initialDirection", "angle", "cellIndex", "cellStartOffset", "noiseCoordinates1", "noiseCoordinates2" ], uniformsNames: [ "currentCount", "timeDelta", "emitterWM", "lifeTime", "color1", "color2", "sizeRange", "scaleRange", "gravity", "emitPower", "direction1", "direction2", "minEmitBox", "maxEmitBox", "radius", "directionRandomizer", "height", "coneAngle", "stopFactor", "angleRange", "radiusRange", "cellInfos", "noiseStrength", "limitVelocityDamping" ], uniformBuffersNames: [], samplers: [ "randomSampler", "randomSampler2", "sizeGradientSampler", "angularSpeedGradientSampler", "velocityGradientSampler", "limitVelocityGradientSampler", "noiseSampler", "dragGradientSampler" ], defines: "", fallbacks: null, onCompiled: null, onError: null, indexParameters: null, maxSimultaneousLights: 0, transformFeedbackVaryings: [] }; } contextLost() { this._updateEffect = void 0, this._renderVAO.length = 0, this._updateVAO.length = 0; } isUpdateBufferCreated() { return !!this._updateEffect; } isUpdateBufferReady() { var e, t; return (t = (e = this._updateEffect) === null || e === void 0 ? void 0 : e.isReady()) !== null && t !== void 0 ? t : !1; } createUpdateBuffer(e) { return this._updateEffectOptions.transformFeedbackVaryings = ["outPosition"], this._updateEffectOptions.transformFeedbackVaryings.push("outAge"), this._updateEffectOptions.transformFeedbackVaryings.push("outSize"), this._updateEffectOptions.transformFeedbackVaryings.push("outLife"), this._updateEffectOptions.transformFeedbackVaryings.push("outSeed"), this._updateEffectOptions.transformFeedbackVaryings.push("outDirection"), this._parent.particleEmitterType instanceof l5 && this._updateEffectOptions.transformFeedbackVaryings.push("outInitialPosition"), this._parent._colorGradientsTexture || this._updateEffectOptions.transformFeedbackVaryings.push("outColor"), this._parent._isBillboardBased || this._updateEffectOptions.transformFeedbackVaryings.push("outInitialDirection"), this._parent.noiseTexture && (this._updateEffectOptions.transformFeedbackVaryings.push("outNoiseCoordinates1"), this._updateEffectOptions.transformFeedbackVaryings.push("outNoiseCoordinates2")), this._updateEffectOptions.transformFeedbackVaryings.push("outAngle"), this._parent.isAnimationSheetEnabled && (this._updateEffectOptions.transformFeedbackVaryings.push("outCellIndex"), this._parent.spriteRandomStartCell && this._updateEffectOptions.transformFeedbackVaryings.push("outCellStartOffset")), this._updateEffectOptions.defines = e, this._updateEffect = new Cr("gpuUpdateParticles", this._updateEffectOptions, this._engine), new pne(this._updateEffect); } createVertexBuffers(e, t) { this._updateVAO.push(this._createUpdateVAO(e)), this._renderVAO.push(this._engine.recordVertexArrayObject(t, null, this._parent._getWrapper(this._parent.blendMode).effect)), this._engine.bindArrayBuffer(null), this._renderVertexBuffers = t; } createParticleBuffer(e) { return e; } bindDrawBuffers(e, t, i) { i ? this._engine.bindBuffers(this._renderVertexBuffers, i, t) : this._engine.bindVertexArrayObject(this._renderVAO[e], null); } preUpdateParticleBuffer() { const e = this._engine; if (this._engine.enableEffect(this._updateEffect), !e.setState) throw new Error("GPU particles cannot work without a full Engine. ThinEngine is not supported"); } updateParticleBuffer(e, t, i) { this._updateEffect.setTexture("randomSampler", this._parent._randomTexture), this._updateEffect.setTexture("randomSampler2", this._parent._randomTexture2), this._parent._sizeGradientsTexture && this._updateEffect.setTexture("sizeGradientSampler", this._parent._sizeGradientsTexture), this._parent._angularSpeedGradientsTexture && this._updateEffect.setTexture("angularSpeedGradientSampler", this._parent._angularSpeedGradientsTexture), this._parent._velocityGradientsTexture && this._updateEffect.setTexture("velocityGradientSampler", this._parent._velocityGradientsTexture), this._parent._limitVelocityGradientsTexture && this._updateEffect.setTexture("limitVelocityGradientSampler", this._parent._limitVelocityGradientsTexture), this._parent._dragGradientsTexture && this._updateEffect.setTexture("dragGradientSampler", this._parent._dragGradientsTexture), this._parent.noiseTexture && this._updateEffect.setTexture("noiseSampler", this._parent.noiseTexture), this._engine.bindVertexArrayObject(this._updateVAO[e], null); const r = this._engine; r.bindTransformFeedbackBuffer(t.getBuffer()), r.setRasterizerState(!1), r.beginTransformFeedback(!0), r.drawArraysType(3, 0, i), r.endTransformFeedback(), r.setRasterizerState(!0), r.bindTransformFeedbackBuffer(null); } releaseBuffers() { } releaseVertexBuffers() { for (let e = 0; e < this._updateVAO.length; e++) this._engine.releaseVertexArrayObject(this._updateVAO[e]); this._updateVAO.length = 0; for (let e = 0; e < this._renderVAO.length; e++) this._engine.releaseVertexArrayObject(this._renderVAO[e]); this._renderVAO.length = 0; } _createUpdateVAO(e) { const t = {}; t.position = e.createVertexBuffer("position", 0, 3); let i = 3; t.age = e.createVertexBuffer("age", i, 1), i += 1, t.size = e.createVertexBuffer("size", i, 3), i += 3, t.life = e.createVertexBuffer("life", i, 1), i += 1, t.seed = e.createVertexBuffer("seed", i, 4), i += 4, t.direction = e.createVertexBuffer("direction", i, 3), i += 3, this._parent.particleEmitterType instanceof l5 && (t.initialPosition = e.createVertexBuffer("initialPosition", i, 3), i += 3), this._parent._colorGradientsTexture || (t.color = e.createVertexBuffer("color", i, 4), i += 4), this._parent._isBillboardBased || (t.initialDirection = e.createVertexBuffer("initialDirection", i, 3), i += 3), this._parent.noiseTexture && (t.noiseCoordinates1 = e.createVertexBuffer("noiseCoordinates1", i, 3), i += 3, t.noiseCoordinates2 = e.createVertexBuffer("noiseCoordinates2", i, 3), i += 3), this._parent._angularSpeedGradientsTexture ? (t.angle = e.createVertexBuffer("angle", i, 1), i += 1) : (t.angle = e.createVertexBuffer("angle", i, 2), i += 2), this._parent._isAnimationSheetEnabled && (t.cellIndex = e.createVertexBuffer("cellIndex", i, 1), i += 1, this._parent.spriteRandomStartCell && (t.cellStartOffset = e.createVertexBuffer("cellStartOffset", i, 1), i += 1)); const r = this._engine.recordVertexArrayObject(t, null, this._updateEffect); return this._engine.bindArrayBuffer(null), r; } } Be("BABYLON.WebGL2ParticleSystem", _ne); const j0e = "gpuUpdateParticlesComputeShader", X0e = `struct Particle {position : vec3, age : f32, size : vec3, life : f32, seed : vec4, direction : vec3, dummy0: f32, #ifdef CUSTOMEMITTER initialPosition : vec3, dummy1: f32, #endif #ifndef COLORGRADIENTS color : vec4, #endif #ifndef BILLBOARD initialDirection : vec3, dummy2: f32, #endif #ifdef NOISE noiseCoordinates1 : vec3, dummy3: f32, noiseCoordinates2 : vec3, dummy4: f32, #endif #ifdef ANGULARSPEEDGRADIENTS angle : f32, #else angle : vec2, #endif #ifdef ANIMATESHEET cellIndex : f32, #ifdef ANIMATESHEETRANDOMSTART cellStartOffset : f32, #endif #endif };struct Particles {particles : array,};struct SimParams {currentCount : f32, timeDelta : f32, stopFactor : f32, randomTextureSize: i32, lifeTime : vec2, emitPower : vec2, #ifndef COLORGRADIENTS color1 : vec4, color2 : vec4, #endif sizeRange : vec2, scaleRange : vec4, angleRange : vec4, gravity : vec3, #ifdef LIMITVELOCITYGRADIENTS limitVelocityDamping : f32, #endif #ifdef ANIMATESHEET cellInfos : vec4, #endif #ifdef NOISE noiseStrength : vec3, #endif #ifndef LOCAL emitterWM : mat4x4, #endif #ifdef BOXEMITTER direction1 : vec3, direction2 : vec3, minEmitBox : vec3, maxEmitBox : vec3, #endif #ifdef CONEEMITTER radius : vec2, coneAngle : f32, height : vec2, directionRandomizer : f32, #endif #ifdef CYLINDEREMITTER radius : f32, height : f32, radiusRange : f32, #ifdef DIRECTEDCYLINDEREMITTER direction1 : vec3, direction2 : vec3, #else directionRandomizer : f32, #endif #endif #ifdef HEMISPHERICEMITTER radius : f32, radiusRange : f32, directionRandomizer : f32, #endif #ifdef POINTEMITTER direction1 : vec3, direction2 : vec3, #endif #ifdef SPHEREEMITTER radius : f32, radiusRange : f32, #ifdef DIRECTEDSPHEREEMITTER direction1 : vec3, direction2 : vec3, #else directionRandomizer : f32, #endif #endif };@binding(0) @group(0) var params : SimParams;@binding(1) @group(0) var particlesIn : Particles;@binding(2) @group(0) var particlesOut : Particles;@binding(3) @group(0) var randomTexture : texture_2d;@binding(4) @group(0) var randomTexture2 : texture_2d; #ifdef SIZEGRADIENTS @binding(0) @group(1) var sizeGradientSampler : sampler;@binding(1) @group(1) var sizeGradientTexture : texture_2d; #endif #ifdef ANGULARSPEEDGRADIENTS @binding(2) @group(1) var angularSpeedGradientSampler : sampler;@binding(3) @group(1) var angularSpeedGradientTexture : texture_2d; #endif #ifdef VELOCITYGRADIENTS @binding(4) @group(1) var velocityGradientSampler : sampler;@binding(5) @group(1) var velocityGradientTexture : texture_2d; #endif #ifdef LIMITVELOCITYGRADIENTS @binding(6) @group(1) var limitVelocityGradientSampler : sampler;@binding(7) @group(1) var limitVelocityGradientTexture : texture_2d; #endif #ifdef DRAGGRADIENTS @binding(8) @group(1) var dragGradientSampler : sampler;@binding(9) @group(1) var dragGradientTexture : texture_2d; #endif #ifdef NOISE @binding(10) @group(1) var noiseSampler : sampler;@binding(11) @group(1) var noiseTexture : texture_2d; #endif fn getRandomVec3(offset : f32,vertexID : f32)->vec3 {return textureLoad(randomTexture2,vec2(i32(vertexID*offset/params.currentCount*f32(params.randomTextureSize)) % params.randomTextureSize,0),0).rgb;} fn getRandomVec4(offset : f32,vertexID : f32)->vec4 {return textureLoad(randomTexture,vec2(i32(vertexID*offset/params.currentCount*f32(params.randomTextureSize)) % params.randomTextureSize,0),0);} @compute @workgroup_size(64) fn main(@builtin(global_invocation_id) GlobalInvocationID : vec3) {let index : u32=GlobalInvocationID.x;let vertexID : f32=f32(index);if (index>=u32(params.currentCount)) {return;} let PI : f32=3.14159;let timeDelta : f32=params.timeDelta;let newAge : f32=particlesIn.particles[index].age+timeDelta;let life : f32=particlesIn.particles[index].life;let seed : vec4=particlesIn.particles[index].seed;let direction : vec3=particlesIn.particles[index].direction;if (newAge>=life && params.stopFactor != 0.) {var newPosition : vec3;var newDirection : vec3;let randoms : vec4=getRandomVec4(seed.x,vertexID);let outLife : f32=params.lifeTime.x+(params.lifeTime.y-params.lifeTime.x)*randoms.r;particlesOut.particles[index].life=outLife;particlesOut.particles[index].age=newAge-life;particlesOut.particles[index].seed=seed;var sizex : f32; #ifdef SIZEGRADIENTS sizex=textureSampleLevel(sizeGradientTexture,sizeGradientSampler,vec2(0.,0.),0.).r; #else sizex=params.sizeRange.x+(params.sizeRange.y-params.sizeRange.x)*randoms.g; #endif particlesOut.particles[index].size=vec3( sizex, params.scaleRange.x+(params.scaleRange.y-params.scaleRange.x)*randoms.b, params.scaleRange.z+(params.scaleRange.w-params.scaleRange.z)*randoms.a); #ifndef COLORGRADIENTS particlesOut.particles[index].color=params.color1+(params.color2-params.color1)*randoms.b; #endif #ifndef ANGULARSPEEDGRADIENTS particlesOut.particles[index].angle=vec2( params.angleRange.z+(params.angleRange.w-params.angleRange.z)*randoms.r, params.angleRange.x+(params.angleRange.y-params.angleRange.x)*randoms.a); #else particlesOut.particles[index].angle=params.angleRange.z+(params.angleRange.w-params.angleRange.z)*randoms.r; #endif #if defined(POINTEMITTER) let randoms2 : vec3=getRandomVec3(seed.y,vertexID);let randoms3 : vec3=getRandomVec3(seed.z,vertexID);newPosition=vec3(0.,0.,0.);newDirection=params.direction1+(params.direction2-params.direction1)*randoms3; #elif defined(BOXEMITTER) let randoms2 : vec3=getRandomVec3(seed.y,vertexID);let randoms3 : vec3=getRandomVec3(seed.z,vertexID);newPosition=params.minEmitBox+(params.maxEmitBox-params.minEmitBox)*randoms2;newDirection=params.direction1+(params.direction2-params.direction1)*randoms3; #elif defined(HEMISPHERICEMITTER) let randoms2 : vec3=getRandomVec3(seed.y,vertexID);let randoms3 : vec3=getRandomVec3(seed.z,vertexID);let phi : f32=2.0*PI*randoms2.x;let theta : f32=acos(-1.0+2.0*randoms2.y);let randX : f32=cos(phi)*sin(theta);let randY : f32=cos(theta);let randZ : f32=sin(phi)*sin(theta);newPosition=(params.radius-(params.radius*params.radiusRange*randoms2.z))*vec3(randX,abs(randY),randZ);newDirection=normalize(newPosition+params.directionRandomizer*randoms3); #elif defined(SPHEREEMITTER) let randoms2 : vec3=getRandomVec3(seed.y,vertexID);let randoms3 : vec3=getRandomVec3(seed.z,vertexID);let phi : f32=2.0*PI*randoms2.x;let theta : f32=acos(-1.0+2.0*randoms2.y);let randX : f32=cos(phi)*sin(theta);let randY : f32=cos(theta);let randZ : f32=sin(phi)*sin(theta);newPosition=(params.radius-(params.radius*params.radiusRange*randoms2.z))*vec3(randX,randY,randZ); #ifdef DIRECTEDSPHEREEMITTER newDirection=normalize(params.direction1+(params.direction2-params.direction1)*randoms3); #else newDirection=normalize(newPosition+params.directionRandomizer*randoms3); #endif #elif defined(CYLINDEREMITTER) let randoms2 : vec3=getRandomVec3(seed.y,vertexID);let randoms3 : vec3=getRandomVec3(seed.z,vertexID);let yPos : f32=(-0.5+randoms2.x)*params.height;var angle : f32=randoms2.y*PI*2.;let inverseRadiusRangeSquared : f32=(1.-params.radiusRange)*(1.-params.radiusRange);let positionRadius : f32=params.radius*sqrt(inverseRadiusRangeSquared+randoms2.z*(1.-inverseRadiusRangeSquared));let xPos : f32=positionRadius*cos(angle);let zPos : f32=positionRadius*sin(angle);newPosition=vec3(xPos,yPos,zPos); #ifdef DIRECTEDCYLINDEREMITTER newDirection=params.direction1+(params.direction2-params.direction1)*randoms3; #else angle=angle+(-0.5+randoms3.x)*PI*params.directionRandomizer;newDirection=vec3(cos(angle),(-0.5+randoms3.y)*params.directionRandomizer,sin(angle));newDirection=normalize(newDirection); #endif #elif defined(CONEEMITTER) let randoms2 : vec3=getRandomVec3(seed.y,vertexID);let s : f32=2.0*PI*randoms2.x; #ifdef CONEEMITTERSPAWNPOINT let h : f32=0.0001; #else var h : f32=randoms2.y*params.height.y;h=1.-h*h; #endif var lRadius : f32=params.radius.x-params.radius.x*randoms2.z*params.radius.y;lRadius=lRadius*h;let randX : f32=lRadius*sin(s);let randZ : f32=lRadius*cos(s);let randY : f32=h *params.height.x;newPosition=vec3(randX,randY,randZ); if (abs(cos(params.coneAngle))==1.0) {newDirection=vec3(0.,1.0,0.);} else {let randoms3 : vec3=getRandomVec3(seed.z,vertexID);newDirection=normalize(newPosition+params.directionRandomizer*randoms3); } #elif defined(CUSTOMEMITTER) newPosition=particlesIn.particles[index].initialPosition;particlesOut.particles[index].initialPosition=newPosition; #else newPosition=vec3(0.,0.,0.);newDirection=2.0*(getRandomVec3(seed.w,vertexID)-vec3(0.5,0.5,0.5)); #endif let power : f32=params.emitPower.x+(params.emitPower.y-params.emitPower.x)*randoms.a; #ifdef LOCAL particlesOut.particles[index].position=newPosition; #else particlesOut.particles[index].position=(params.emitterWM*vec4(newPosition,1.)).xyz; #endif #ifdef CUSTOMEMITTER particlesOut.particles[index].direction=direction; #ifndef BILLBOARD particlesOut.particles[index].initialDirection=direction; #endif #else #ifdef LOCAL let initial : vec3=newDirection; #else let initial : vec3=(params.emitterWM*vec4(newDirection,0.)).xyz; #endif particlesOut.particles[index].direction=initial*power; #ifndef BILLBOARD particlesOut.particles[index].initialDirection=initial; #endif #endif #ifdef ANIMATESHEET particlesOut.particles[index].cellIndex=params.cellInfos.x; #ifdef ANIMATESHEETRANDOMSTART particlesOut.particles[index].cellStartOffset=randoms.a*outLife; #endif #endif #ifdef NOISE particlesOut.particles[index].noiseCoordinates1=particlesIn.particles[index].noiseCoordinates1;particlesOut.particles[index].noiseCoordinates2=particlesIn.particles[index].noiseCoordinates2; #endif } else {var directionScale : f32=timeDelta;particlesOut.particles[index].age=newAge;let ageGradient : f32=newAge/life; #ifdef VELOCITYGRADIENTS directionScale=directionScale*textureSampleLevel(velocityGradientTexture,velocityGradientSampler,vec2(ageGradient,0.),0.).r; #endif #ifdef DRAGGRADIENTS directionScale=directionScale*(1.0-textureSampleLevel(dragGradientTexture,dragGradientSampler,vec2(ageGradient,0.),0.).r); #endif let position : vec3=particlesIn.particles[index].position; #if defined(CUSTOMEMITTER) particlesOut.particles[index].position=position+(direction-position)*ageGradient; particlesOut.particles[index].initialPosition=particlesIn.particles[index].initialPosition; #else particlesOut.particles[index].position=position+direction*directionScale; #endif particlesOut.particles[index].life=life;particlesOut.particles[index].seed=seed; #ifndef COLORGRADIENTS particlesOut.particles[index].color=particlesIn.particles[index].color; #endif #ifdef SIZEGRADIENTS particlesOut.particles[index].size=vec3( textureSampleLevel(sizeGradientTexture,sizeGradientSampler,vec2(ageGradient,0.),0.).r, particlesIn.particles[index].size.yz); #else particlesOut.particles[index].size=particlesIn.particles[index].size; #endif #ifndef BILLBOARD particlesOut.particles[index].initialDirection=particlesIn.particles[index].initialDirection; #endif #ifdef CUSTOMEMITTER particlesOut.particles[index].direction=direction; #else var updatedDirection : vec3=direction+params.gravity*timeDelta; #ifdef LIMITVELOCITYGRADIENTS let limitVelocity : f32=textureSampleLevel(limitVelocityGradientTexture,limitVelocityGradientSampler,vec2(ageGradient,0.),0.).r;let currentVelocity : f32=length(updatedDirection);if (currentVelocity>limitVelocity) {updatedDirection=updatedDirection*params.limitVelocityDamping;} #endif particlesOut.particles[index].direction=updatedDirection; #ifdef NOISE let noiseCoordinates1 : vec3=particlesIn.particles[index].noiseCoordinates1;let noiseCoordinates2 : vec3=particlesIn.particles[index].noiseCoordinates2;let fetchedR : f32=textureSampleLevel(noiseTexture,noiseSampler,vec2(noiseCoordinates1.x,noiseCoordinates1.y)*vec2(0.5,0.5)+vec2(0.5,0.5),0.).r;let fetchedG : f32=textureSampleLevel(noiseTexture,noiseSampler,vec2(noiseCoordinates1.z,noiseCoordinates2.x)*vec2(0.5,0.5)+vec2(0.5,0.5),0.).r;let fetchedB : f32=textureSampleLevel(noiseTexture,noiseSampler,vec2(noiseCoordinates2.y,noiseCoordinates2.z)*vec2(0.5,0.5)+vec2(0.5,0.5),0.).r;let force : vec3=vec3(-1.+2.*fetchedR,-1.+2.*fetchedG,-1.+2.*fetchedB)*params.noiseStrength;particlesOut.particles[index].direction=particlesOut.particles[index].direction+force*timeDelta;particlesOut.particles[index].noiseCoordinates1=noiseCoordinates1;particlesOut.particles[index].noiseCoordinates2=noiseCoordinates2; #endif #endif #ifdef ANGULARSPEEDGRADIENTS let angularSpeed : f32=textureSampleLevel(angularSpeedGradientTexture,angularSpeedGradientSampler,vec2(ageGradient,0.),0.).r;particlesOut.particles[index].angle=particlesIn.particles[index].angle+angularSpeed*timeDelta; #else let angle : vec2=particlesIn.particles[index].angle;particlesOut.particles[index].angle=vec2(angle.x+angle.y*timeDelta,angle.y); #endif #ifdef ANIMATESHEET var offsetAge : f32=particlesOut.particles[index].age;let dist : f32=params.cellInfos.y-params.cellInfos.x; #ifdef ANIMATESHEETRANDOMSTART let cellStartOffset : f32=particlesIn.particles[index].cellStartOffset;particlesOut.particles[index].cellStartOffset=cellStartOffset;offsetAge=offsetAge+cellStartOffset; #else let cellStartOffset : f32=0.; #endif var ratio : f32;if (params.cellInfos.w==1.0) {ratio=clamp(((cellStartOffset+params.cellInfos.z*offsetAge) % life)/life,0.,1.0);} else {ratio=clamp((cellStartOffset+params.cellInfos.z*offsetAge)/life,0.,1.0);} particlesOut.particles[index].cellIndex=f32(i32(params.cellInfos.x+ratio*dist)); #endif }} `; je.ShadersStoreWGSL[j0e] = X0e; class mne { constructor(e, t) { this._bufferComputeShader = [], this._renderVertexBuffers = [], this.alignDataInBuffer = !0, this._parent = e, this._engine = t; } contextLost() { this._updateComputeShader = void 0, this._bufferComputeShader.length = 0, this._renderVertexBuffers.length = 0; } isUpdateBufferCreated() { return !!this._updateComputeShader; } isUpdateBufferReady() { var e, t; return (t = (e = this._updateComputeShader) === null || e === void 0 ? void 0 : e.isReady()) !== null && t !== void 0 ? t : !1; } createUpdateBuffer(e) { var t; const i = { params: { group: 0, binding: 0 }, particlesIn: { group: 0, binding: 1 }, particlesOut: { group: 0, binding: 2 }, randomTexture: { group: 0, binding: 3 }, randomTexture2: { group: 0, binding: 4 } }; return this._parent._sizeGradientsTexture && (i.sizeGradientTexture = { group: 1, binding: 1 }), this._parent._angularSpeedGradientsTexture && (i.angularSpeedGradientTexture = { group: 1, binding: 3 }), this._parent._velocityGradientsTexture && (i.velocityGradientTexture = { group: 1, binding: 5 }), this._parent._limitVelocityGradientsTexture && (i.limitVelocityGradientTexture = { group: 1, binding: 7 }), this._parent._dragGradientsTexture && (i.dragGradientTexture = { group: 1, binding: 9 }), this._parent.noiseTexture && (i.noiseTexture = { group: 1, binding: 11 }), this._updateComputeShader = new HI("updateParticles", this._engine, "gpuUpdateParticles", { bindingsMapping: i, defines: e.split(` `) }), (t = this._simParamsComputeShader) === null || t === void 0 || t.dispose(), this._simParamsComputeShader = new Vi(this._engine), this._simParamsComputeShader.addUniform("currentCount", 1), this._simParamsComputeShader.addUniform("timeDelta", 1), this._simParamsComputeShader.addUniform("stopFactor", 1), this._simParamsComputeShader.addUniform("randomTextureSize", 1), this._simParamsComputeShader.addUniform("lifeTime", 2), this._simParamsComputeShader.addUniform("emitPower", 2), this._parent._colorGradientsTexture || (this._simParamsComputeShader.addUniform("color1", 4), this._simParamsComputeShader.addUniform("color2", 4)), this._simParamsComputeShader.addUniform("sizeRange", 2), this._simParamsComputeShader.addUniform("scaleRange", 4), this._simParamsComputeShader.addUniform("angleRange", 4), this._simParamsComputeShader.addUniform("gravity", 3), this._parent._limitVelocityGradientsTexture && this._simParamsComputeShader.addUniform("limitVelocityDamping", 1), this._parent.isAnimationSheetEnabled && this._simParamsComputeShader.addUniform("cellInfos", 4), this._parent.noiseTexture && this._simParamsComputeShader.addUniform("noiseStrength", 3), this._parent.isLocal || this._simParamsComputeShader.addUniform("emitterWM", 16), this._parent.particleEmitterType && this._parent.particleEmitterType.buildUniformLayout(this._simParamsComputeShader), this._updateComputeShader.setUniformBuffer("params", this._simParamsComputeShader), new pne(this._simParamsComputeShader); } createVertexBuffers(e, t) { this._renderVertexBuffers.push(t); } createParticleBuffer(e) { const t = new Wte(this._engine, e.length * 4, 11); return t.update(e), this._bufferComputeShader.push(t), t.getBuffer(); } bindDrawBuffers(e, t, i) { this._engine.bindBuffers(this._renderVertexBuffers[e], i, t); } preUpdateParticleBuffer() { } updateParticleBuffer(e, t, i) { this._simParamsComputeShader.update(), this._updateComputeShader.setTexture("randomTexture", this._parent._randomTexture, !1), this._updateComputeShader.setTexture("randomTexture2", this._parent._randomTexture2, !1), this._parent._sizeGradientsTexture && this._updateComputeShader.setTexture("sizeGradientTexture", this._parent._sizeGradientsTexture), this._parent._angularSpeedGradientsTexture && this._updateComputeShader.setTexture("angularSpeedGradientTexture", this._parent._angularSpeedGradientsTexture), this._parent._velocityGradientsTexture && this._updateComputeShader.setTexture("velocityGradientTexture", this._parent._velocityGradientsTexture), this._parent._limitVelocityGradientsTexture && this._updateComputeShader.setTexture("limitVelocityGradientTexture", this._parent._limitVelocityGradientsTexture), this._parent._dragGradientsTexture && this._updateComputeShader.setTexture("dragGradientTexture", this._parent._dragGradientsTexture), this._parent.noiseTexture && this._updateComputeShader.setTexture("noiseTexture", this._parent.noiseTexture), this._updateComputeShader.setStorageBuffer("particlesIn", this._bufferComputeShader[e]), this._updateComputeShader.setStorageBuffer("particlesOut", this._bufferComputeShader[e ^ 1]), this._updateComputeShader.dispatch(Math.ceil(i / 64)); } releaseBuffers() { var e; for (let t = 0; t < this._bufferComputeShader.length; ++t) this._bufferComputeShader[t].dispose(); this._bufferComputeShader.length = 0, (e = this._simParamsComputeShader) === null || e === void 0 || e.dispose(), this._simParamsComputeShader = null, this._updateComputeShader = null; } releaseVertexBuffers() { this._renderVertexBuffers.length = 0; } } Be("BABYLON.ComputeShaderParticleSystem", mne); class ej { /** * Creates a new color4 gradient * @param gradient gets or sets the gradient value (between 0 and 1) * @param color1 gets or sets first associated color * @param color2 gets or sets first second color */ constructor(e, t, i) { this.gradient = e, this.color1 = t, this.color2 = i; } /** * Will get a color picked randomly between color1 and color2. * If color2 is undefined then color1 will be used * @param result defines the target Color4 to store the result in */ getColorToRef(e) { if (!this.color2) { e.copyFrom(this.color1); return; } Et.LerpToRef(this.color1, this.color2, Math.random(), e); } } class gne { /** * Creates a new color3 gradient * @param gradient gets or sets the gradient value (between 0 and 1) * @param color gets or sets associated color */ constructor(e, t) { this.gradient = e, this.color = t; } } class tj { /** * Creates a new factor gradient * @param gradient gets or sets the gradient value (between 0 and 1) * @param factor1 gets or sets first associated factor * @param factor2 gets or sets second associated factor */ constructor(e, t, i) { this.gradient = e, this.factor1 = t, this.factor2 = i; } /** * Will get a number picked randomly between factor1 and factor2. * If factor2 is undefined then factor1 will be used * @returns the picked number */ getFactor() { return this.factor2 === void 0 || this.factor2 === this.factor1 ? this.factor1 : this.factor1 + (this.factor2 - this.factor1) * Math.random(); } } class N_ { /** * Gets the current gradient from an array of IValueGradient * @param ratio defines the current ratio to get * @param gradients defines the array of IValueGradient * @param updateFunc defines the callback function used to get the final value from the selected gradients */ static GetCurrentGradient(e, t, i) { if (t[0].gradient > e) { i(t[0], t[0], 1); return; } for (let s = 0; s < t.length - 1; s++) { const n = t[s], a = t[s + 1]; if (e >= n.gradient && e <= a.gradient) { const l = (e - n.gradient) / (a.gradient - n.gradient); i(n, a, l); return; } } const r = t.length - 1; i(t[r], t[r], 1); } } class ZO { /** * Creates a new instance Particle * @param particleSystem the particle system the particle belongs to */ constructor(e) { this.particleSystem = e, this.position = D.Zero(), this.direction = D.Zero(), this.color = new Et(0, 0, 0, 0), this.colorStep = new Et(0, 0, 0, 0), this.lifeTime = 1, this.age = 0, this.size = 0, this.scale = new at(1, 1), this.angle = 0, this.angularSpeed = 0, this.cellIndex = 0, this._attachedSubEmitters = null, this._currentColor1 = new Et(0, 0, 0, 0), this._currentColor2 = new Et(0, 0, 0, 0), this._currentSize1 = 0, this._currentSize2 = 0, this._currentAngularSpeed1 = 0, this._currentAngularSpeed2 = 0, this._currentVelocity1 = 0, this._currentVelocity2 = 0, this._currentLimitVelocity1 = 0, this._currentLimitVelocity2 = 0, this._currentDrag1 = 0, this._currentDrag2 = 0, this.id = ZO._Count++, this.particleSystem.isAnimationSheetEnabled && this._updateCellInfoFromSystem(); } _updateCellInfoFromSystem() { this.cellIndex = this.particleSystem.startSpriteCellID; } /** * Defines how the sprite cell index is updated for the particle */ updateCellIndex() { let e = this.age, t = this.particleSystem.spriteCellChangeSpeed; this.particleSystem.spriteRandomStartCell && (this._randomCellOffset === void 0 && (this._randomCellOffset = Math.random() * this.lifeTime), t === 0 ? (t = 1, e = this._randomCellOffset) : e += this._randomCellOffset); const i = this._initialEndSpriteCellID - this._initialStartSpriteCellID; let r; this._initialSpriteCellLoop ? r = yt.Clamp(e * t % this.lifeTime / this.lifeTime) : r = yt.Clamp(e * t / this.lifeTime), this.cellIndex = this._initialStartSpriteCellID + r * i | 0; } /** * @internal */ _inheritParticleInfoToSubEmitter(e) { if (e.particleSystem.emitter.position) { const t = e.particleSystem.emitter; if (t.position.copyFrom(this.position), e.inheritDirection) { const i = de.Vector3[0]; this.direction.normalizeToRef(i), t.setDirection(i, 0, Math.PI / 2); } } else e.particleSystem.emitter.copyFrom(this.position); this.direction.scaleToRef(e.inheritedVelocityAmount / 2, de.Vector3[0]), e.particleSystem._inheritedVelocityOffset.copyFrom(de.Vector3[0]); } /** @internal */ _inheritParticleInfoToSubEmitters() { this._attachedSubEmitters && this._attachedSubEmitters.length > 0 && this._attachedSubEmitters.forEach((e) => { this._inheritParticleInfoToSubEmitter(e); }); } /** @internal */ _reset() { this.age = 0, this.id = ZO._Count++, this._currentColorGradient = null, this._currentSizeGradient = null, this._currentAngularSpeedGradient = null, this._currentVelocityGradient = null, this._currentLimitVelocityGradient = null, this._currentDragGradient = null, this.cellIndex = this.particleSystem.startSpriteCellID, this._randomCellOffset = void 0; } /** * Copy the properties of particle to another one. * @param other the particle to copy the information to. */ copyTo(e) { e.position.copyFrom(this.position), this._initialDirection ? e._initialDirection ? e._initialDirection.copyFrom(this._initialDirection) : e._initialDirection = this._initialDirection.clone() : e._initialDirection = null, e.direction.copyFrom(this.direction), this._localPosition && (e._localPosition ? e._localPosition.copyFrom(this._localPosition) : e._localPosition = this._localPosition.clone()), e.color.copyFrom(this.color), e.colorStep.copyFrom(this.colorStep), e.lifeTime = this.lifeTime, e.age = this.age, e._randomCellOffset = this._randomCellOffset, e.size = this.size, e.scale.copyFrom(this.scale), e.angle = this.angle, e.angularSpeed = this.angularSpeed, e.particleSystem = this.particleSystem, e.cellIndex = this.cellIndex, e.id = this.id, e._attachedSubEmitters = this._attachedSubEmitters, this._currentColorGradient && (e._currentColorGradient = this._currentColorGradient, e._currentColor1.copyFrom(this._currentColor1), e._currentColor2.copyFrom(this._currentColor2)), this._currentSizeGradient && (e._currentSizeGradient = this._currentSizeGradient, e._currentSize1 = this._currentSize1, e._currentSize2 = this._currentSize2), this._currentAngularSpeedGradient && (e._currentAngularSpeedGradient = this._currentAngularSpeedGradient, e._currentAngularSpeed1 = this._currentAngularSpeed1, e._currentAngularSpeed2 = this._currentAngularSpeed2), this._currentVelocityGradient && (e._currentVelocityGradient = this._currentVelocityGradient, e._currentVelocity1 = this._currentVelocity1, e._currentVelocity2 = this._currentVelocity2), this._currentLimitVelocityGradient && (e._currentLimitVelocityGradient = this._currentLimitVelocityGradient, e._currentLimitVelocity1 = this._currentLimitVelocity1, e._currentLimitVelocity2 = this._currentLimitVelocity2), this._currentDragGradient && (e._currentDragGradient = this._currentDragGradient, e._currentDrag1 = this._currentDrag1, e._currentDrag2 = this._currentDrag2), this.particleSystem.isAnimationSheetEnabled && (e._initialStartSpriteCellID = this._initialStartSpriteCellID, e._initialEndSpriteCellID = this._initialEndSpriteCellID, e._initialSpriteCellLoop = this._initialSpriteCellLoop), this.particleSystem.useRampGradients && (e.remapData && this.remapData ? e.remapData.copyFrom(this.remapData) : e.remapData = new Di(0, 0, 0, 0)), this._randomNoiseCoordinates1 && (e._randomNoiseCoordinates1 ? (e._randomNoiseCoordinates1.copyFrom(this._randomNoiseCoordinates1), e._randomNoiseCoordinates2.copyFrom(this._randomNoiseCoordinates2)) : (e._randomNoiseCoordinates1 = this._randomNoiseCoordinates1.clone(), e._randomNoiseCoordinates2 = this._randomNoiseCoordinates2.clone())); } } ZO._Count = 0; var qO; (function(c) { c[c.ATTACHED = 0] = "ATTACHED", c[c.END = 1] = "END"; })(qO || (qO = {})); class hT { /** * Creates a sub emitter * @param particleSystem the particle system to be used by the sub emitter */ constructor(e) { if (this.particleSystem = e, this.type = qO.END, this.inheritDirection = !1, this.inheritedVelocityAmount = 0, !e.emitter || !e.emitter.dispose) { const t = Qo("BABYLON.AbstractMesh"); e.emitter = new t("SubemitterSystemEmitter", e.getScene()), e._disposeEmitterOnDispose = !0; } } /** * Clones the sub emitter * @returns the cloned sub emitter */ clone() { let e = this.particleSystem.emitter; if (!e) e = new D(); else if (e instanceof D) e = e.clone(); else if (e.getClassName().indexOf("Mesh") !== -1) { const i = Qo("BABYLON.Mesh"); e = new i("", e.getScene()), e.isVisible = !1; } const t = new hT(this.particleSystem.clone(this.particleSystem.name, e)); return t.particleSystem.name += "Clone", t.type = this.type, t.inheritDirection = this.inheritDirection, t.inheritedVelocityAmount = this.inheritedVelocityAmount, t.particleSystem._disposeEmitterOnDispose = !0, t.particleSystem.disposeOnStop = !0, t; } /** * Serialize current object to a JSON object * @param serializeTexture defines if the texture must be serialized as well * @returns the serialized object */ serialize(e = !1) { const t = {}; return t.type = this.type, t.inheritDirection = this.inheritDirection, t.inheritedVelocityAmount = this.inheritedVelocityAmount, t.particleSystem = this.particleSystem.serialize(e), t; } /** * @internal */ // eslint-disable-next-line @typescript-eslint/no-unused-vars static _ParseParticleSystem(e, t, i, r = !1) { throw yr("ParseParticle"); } /** * Creates a new SubEmitter from a serialized JSON version * @param serializationObject defines the JSON object to read from * @param sceneOrEngine defines the hosting scene or the hosting engine * @param rootUrl defines the rootUrl for data loading * @returns a new SubEmitter */ static Parse(e, t, i) { const r = e.particleSystem, s = new hT(hT._ParseParticleSystem(r, t, i, !0)); return s.type = e.type, s.inheritDirection = e.inheritDirection, s.inheritedVelocityAmount = e.inheritedVelocityAmount, s.particleSystem._isSubEmitter = !0, s; } /** Release associated resources */ dispose() { this.particleSystem.dispose(); } } const Y0e = "particlesPixelShader", Q0e = `#ifdef LOGARITHMICDEPTH #extension GL_EXT_frag_depth : enable #endif varying vec2 vUV;varying vec4 vColor;uniform vec4 textureMask;uniform sampler2D diffuseSampler; #include #include #include #include #include #ifdef RAMPGRADIENT varying vec4 remapRanges;uniform sampler2D rampSampler; #endif #define CUSTOM_FRAGMENT_DEFINITIONS void main(void) { #define CUSTOM_FRAGMENT_MAIN_BEGIN #include vec4 textureColor=texture2D(diffuseSampler,vUV);vec4 baseColor=(textureColor*textureMask+(vec4(1.,1.,1.,1.)-textureMask))*vColor; #ifdef RAMPGRADIENT float alpha=baseColor.a;float remappedColorIndex=clamp((alpha-remapRanges.x)/remapRanges.y,0.0,1.0);vec4 rampColor=texture2D(rampSampler,vec2(1.0-remappedColorIndex,0.));baseColor.rgb*=rampColor.rgb;float finalAlpha=baseColor.a;baseColor.a=clamp((alpha*rampColor.a-remapRanges.z)/remapRanges.w,0.0,1.0); #endif #ifdef BLENDMULTIPLYMODE float sourceAlpha=vColor.a*textureColor.a;baseColor.rgb=baseColor.rgb*sourceAlpha+vec3(1.0)*(1.0-sourceAlpha); #endif #include #ifdef IMAGEPROCESSINGPOSTPROCESS baseColor.rgb=toLinearSpace(baseColor.rgb); #else #ifdef IMAGEPROCESSING baseColor.rgb=toLinearSpace(baseColor.rgb);baseColor=applyImageProcessing(baseColor); #endif #endif gl_FragColor=baseColor; #define CUSTOM_FRAGMENT_MAIN_END }`; je.ShadersStore[Y0e] = Q0e; const $0e = "particlesVertexShader", Z0e = `attribute vec3 position;attribute vec4 color;attribute float angle;attribute vec2 size; #ifdef ANIMATESHEET attribute float cellIndex; #endif #ifndef BILLBOARD attribute vec3 direction; #endif #ifdef BILLBOARDSTRETCHED attribute vec3 direction; #endif #ifdef RAMPGRADIENT attribute vec4 remapData; #endif attribute vec2 offset;uniform mat4 view;uniform mat4 projection;uniform vec2 translationPivot; #ifdef ANIMATESHEET uniform vec3 particlesInfos; #endif varying vec2 vUV;varying vec4 vColor;varying vec3 vPositionW; #ifdef RAMPGRADIENT varying vec4 remapRanges; #endif #if defined(BILLBOARD) && !defined(BILLBOARDY) && !defined(BILLBOARDSTRETCHED) uniform mat4 invView; #endif #include #include #ifdef BILLBOARD uniform vec3 eyePosition; #endif vec3 rotate(vec3 yaxis,vec3 rotatedCorner) {vec3 xaxis=normalize(cross(vec3(0.,1.0,0.),yaxis));vec3 zaxis=normalize(cross(yaxis,xaxis));vec3 row0=vec3(xaxis.x,xaxis.y,xaxis.z);vec3 row1=vec3(yaxis.x,yaxis.y,yaxis.z);vec3 row2=vec3(zaxis.x,zaxis.y,zaxis.z);mat3 rotMatrix= mat3(row0,row1,row2);vec3 alignedCorner=rotMatrix*rotatedCorner;return position+alignedCorner;} #ifdef BILLBOARDSTRETCHED vec3 rotateAlign(vec3 toCamera,vec3 rotatedCorner) {vec3 normalizedToCamera=normalize(toCamera);vec3 normalizedCrossDirToCamera=normalize(cross(normalize(direction),normalizedToCamera));vec3 row0=vec3(normalizedCrossDirToCamera.x,normalizedCrossDirToCamera.y,normalizedCrossDirToCamera.z);vec3 row2=vec3(normalizedToCamera.x,normalizedToCamera.y,normalizedToCamera.z); #ifdef BILLBOARDSTRETCHED_LOCAL vec3 row1=direction; #else vec3 crossProduct=normalize(cross(normalizedToCamera,normalizedCrossDirToCamera));vec3 row1=vec3(crossProduct.x,crossProduct.y,crossProduct.z); #endif mat3 rotMatrix= mat3(row0,row1,row2);vec3 alignedCorner=rotMatrix*rotatedCorner;return position+alignedCorner;} #endif #define CUSTOM_VERTEX_DEFINITIONS void main(void) { #define CUSTOM_VERTEX_MAIN_BEGIN vec2 cornerPos;cornerPos=(vec2(offset.x-0.5,offset.y -0.5)-translationPivot)*size; #ifdef BILLBOARD vec3 rotatedCorner; #ifdef BILLBOARDY rotatedCorner.x=cornerPos.x*cos(angle)-cornerPos.y*sin(angle);rotatedCorner.z=cornerPos.x*sin(angle)+cornerPos.y*cos(angle);rotatedCorner.y=0.;rotatedCorner.xz+=translationPivot;vec3 yaxis=position-eyePosition;yaxis.y=0.;vPositionW=rotate(normalize(yaxis),rotatedCorner);vec3 viewPos=(view*vec4(vPositionW,1.0)).xyz; #elif defined(BILLBOARDSTRETCHED) rotatedCorner.x=cornerPos.x*cos(angle)-cornerPos.y*sin(angle);rotatedCorner.y=cornerPos.x*sin(angle)+cornerPos.y*cos(angle);rotatedCorner.z=0.;rotatedCorner.xy+=translationPivot;vec3 toCamera=position-eyePosition;vPositionW=rotateAlign(toCamera,rotatedCorner);vec3 viewPos=(view*vec4(vPositionW,1.0)).xyz; #else rotatedCorner.x=cornerPos.x*cos(angle)-cornerPos.y*sin(angle);rotatedCorner.y=cornerPos.x*sin(angle)+cornerPos.y*cos(angle);rotatedCorner.z=0.;rotatedCorner.xy+=translationPivot;vec3 viewPos=(view*vec4(position,1.0)).xyz+rotatedCorner;vPositionW=(invView*vec4(viewPos,1)).xyz; #endif #ifdef RAMPGRADIENT remapRanges=remapData; #endif gl_Position=projection*vec4(viewPos,1.0); #else vec3 rotatedCorner;rotatedCorner.x=cornerPos.x*cos(angle)-cornerPos.y*sin(angle);rotatedCorner.z=cornerPos.x*sin(angle)+cornerPos.y*cos(angle);rotatedCorner.y=0.;rotatedCorner.xz+=translationPivot;vec3 yaxis=normalize(direction);vPositionW=rotate(yaxis,rotatedCorner);gl_Position=projection*view*vec4(vPositionW,1.0); #endif vColor=color; #ifdef ANIMATESHEET float rowOffset=floor(cellIndex*particlesInfos.z);float columnOffset=cellIndex-rowOffset/particlesInfos.z;vec2 uvScale=particlesInfos.xy;vec2 uvOffset=vec2(offset.x ,1.0-offset.y);vUV=(uvOffset+vec2(columnOffset,rowOffset))*uvScale; #else vUV=offset; #endif #if defined(CLIPPLANE) || defined(CLIPPLANE2) || defined(CLIPPLANE3) || defined(CLIPPLANE4) || defined(CLIPPLANE5) || defined(CLIPPLANE6) vec4 worldPos=vec4(vPositionW,1.0); #endif #include #include #define CUSTOM_VERTEX_MAIN_END }`; je.ShadersStore[$0e] = Z0e; class ns extends V4 { /** * Sets a callback that will be triggered when the system is disposed */ set onDispose(e) { this._onDisposeObserver && this.onDisposeObservable.remove(this._onDisposeObserver), this._onDisposeObserver = this.onDisposeObservable.add(e); } /** Gets or sets a boolean indicating that ramp gradients must be used * @see https://doc.babylonjs.com/features/featuresDeepDive/particles/particle_system/particle_system_intro#ramp-gradients */ get useRampGradients() { return this._useRampGradients; } set useRampGradients(e) { this._useRampGradients !== e && (this._useRampGradients = e, this._resetEffect()); } //end of Sub-emitter /** * Gets the current list of active particles */ get particles() { return this._particles; } /** * Gets the number of particles active at the same time. * @returns The number of active particles. */ getActiveCount() { return this._particles.length; } /** * Returns the string "ParticleSystem" * @returns a string containing the class name */ getClassName() { return "ParticleSystem"; } /** * Gets a boolean indicating that the system is stopping * @returns true if the system is currently stopping */ isStopping() { return this._stopped && this.isAlive(); } /** * Gets the custom effect used to render the particles * @param blendMode Blend mode for which the effect should be retrieved * @returns The effect */ getCustomEffect(e = 0) { var t, i; return (i = (t = this._customWrappers[e]) === null || t === void 0 ? void 0 : t.effect) !== null && i !== void 0 ? i : this._customWrappers[0].effect; } _getCustomDrawWrapper(e = 0) { var t; return (t = this._customWrappers[e]) !== null && t !== void 0 ? t : this._customWrappers[0]; } /** * Sets the custom effect used to render the particles * @param effect The effect to set * @param blendMode Blend mode for which the effect should be set */ setCustomEffect(e, t = 0) { this._customWrappers[t] = new $o(this._engine), this._customWrappers[t].effect = e, this._customWrappers[t].drawContext && (this._customWrappers[t].drawContext.useInstancing = this._useInstancing); } /** * Observable that will be called just before the particles are drawn */ get onBeforeDrawParticlesObservable() { return this._onBeforeDrawParticlesObservable || (this._onBeforeDrawParticlesObservable = new Fe()), this._onBeforeDrawParticlesObservable; } /** * Gets the name of the particle vertex shader */ get vertexShaderName() { return "particles"; } /** * Gets the vertex buffers used by the particle system */ get vertexBuffers() { return this._vertexBuffers; } /** * Gets the index buffer used by the particle system (or null if no index buffer is used (if _useInstancing=true)) */ get indexBuffer() { return this._indexBuffer; } /** * Instantiates a particle system. * Particles are often small sprites used to simulate hard-to-reproduce phenomena like fire, smoke, water, or abstract visual effects like magic glitter and faery dust. * @param name The name of the particle system * @param capacity The max number of particles alive at the same time * @param sceneOrEngine The scene the particle system belongs to or the engine to use if no scene * @param customEffect a custom effect used to change the way particles are rendered by default * @param isAnimationSheetEnabled Must be true if using a spritesheet to animate the particles texture * @param epsilon Offset used to render the particles */ constructor(e, t, i, r = null, s = !1, n = 0.01) { super(e), this._emitterInverseWorldMatrix = Ae.Identity(), this._inheritedVelocityOffset = new D(), this.onDisposeObservable = new Fe(), this.onStoppedObservable = new Fe(), this._particles = new Array(), this._stockParticles = new Array(), this._newPartsExcess = 0, this._vertexBuffers = {}, this._scaledColorStep = new Et(0, 0, 0, 0), this._colorDiff = new Et(0, 0, 0, 0), this._scaledDirection = D.Zero(), this._scaledGravity = D.Zero(), this._currentRenderId = -1, this._useInstancing = !1, this._started = !1, this._stopped = !1, this._actualFrame = 0, this._currentEmitRate1 = 0, this._currentEmitRate2 = 0, this._currentStartSize1 = 0, this._currentStartSize2 = 0, this.updateInAnimate = !0, this._rawTextureWidth = 256, this._useRampGradients = !1, this._disposeEmitterOnDispose = !1, this.isLocal = !1, this.isGPU = !1, this._onBeforeDrawParticlesObservable = null, this.recycleParticle = (l) => { const o = this._particles.pop(); o !== l && o.copyTo(l), this._stockParticles.push(o); }, this._createParticle = () => { let l; if (this._stockParticles.length !== 0 ? (l = this._stockParticles.pop(), l._reset()) : l = new ZO(this), this._subEmitters && this._subEmitters.length > 0) { const o = this._subEmitters[Math.floor(Math.random() * this._subEmitters.length)]; l._attachedSubEmitters = [], o.forEach((u) => { if (u.type === qO.ATTACHED) { const h = u.clone(); l._attachedSubEmitters.push(h), h.particleSystem.start(); } }); } return l; }, this._emitFromParticle = (l) => { if (!this._subEmitters || this._subEmitters.length === 0) return; const o = Math.floor(Math.random() * this._subEmitters.length); this._subEmitters[o].forEach((u) => { if (u.type === qO.END) { const h = u.clone(); l._inheritParticleInfoToSubEmitter(h), h.particleSystem._rootParticleSystem = this, this.activeSubSystems.push(h.particleSystem), h.particleSystem.start(); } }); }, this._capacity = t, this._epsilon = n, this._isAnimationSheetEnabled = s, !i || i.getClassName() === "Scene" ? (this._scene = i || gi.LastCreatedScene, this._engine = this._scene.getEngine(), this.uniqueId = this._scene.getUniqueId(), this._scene.particleSystems.push(this)) : (this._engine = i, this.defaultProjectionMatrix = Ae.PerspectiveFovLH(0.8, 1, 0.1, 100, this._engine.isNDCHalfZRange)), this._engine.getCaps().vertexArrayObject && (this._vertexArrayObject = null), this._attachImageProcessingConfiguration(null), this._customWrappers = { 0: new $o(this._engine) }, this._customWrappers[0].effect = r, this._drawWrappers = [], this._useInstancing = this._engine.getCaps().instancedArrays, this._createIndexBuffer(), this._createVertexBuffers(), this.particleEmitterType = new o5(); let a = null; this.updateFunction = (l) => { var o; let u = null; this.noiseTexture && (u = this.noiseTexture.getSize(), (o = this.noiseTexture.getContent()) === null || o === void 0 || o.then((d) => { a = d; })); const h = l === this._particles; for (let d = 0; d < l.length; d++) { const f = l[d]; let p = this._scaledUpdateSpeed; const m = f.age; if (f.age += p, f.age > f.lifeTime) { const C = f.age - m; p = (f.lifeTime - m) * p / C, f.age = f.lifeTime; } const _ = f.age / f.lifeTime; this._colorGradients && this._colorGradients.length > 0 ? N_.GetCurrentGradient(_, this._colorGradients, (C, x, b) => { C !== f._currentColorGradient && (f._currentColor1.copyFrom(f._currentColor2), x.getColorToRef(f._currentColor2), f._currentColorGradient = C), Et.LerpToRef(f._currentColor1, f._currentColor2, b, f.color); }) : (f.colorStep.scaleToRef(p, this._scaledColorStep), f.color.addInPlace(this._scaledColorStep), f.color.a < 0 && (f.color.a = 0)), this._angularSpeedGradients && this._angularSpeedGradients.length > 0 && N_.GetCurrentGradient(_, this._angularSpeedGradients, (C, x, b) => { C !== f._currentAngularSpeedGradient && (f._currentAngularSpeed1 = f._currentAngularSpeed2, f._currentAngularSpeed2 = x.getFactor(), f._currentAngularSpeedGradient = C), f.angularSpeed = yt.Lerp(f._currentAngularSpeed1, f._currentAngularSpeed2, b); }), f.angle += f.angularSpeed * p; let v = p; if (this._velocityGradients && this._velocityGradients.length > 0 && N_.GetCurrentGradient(_, this._velocityGradients, (C, x, b) => { C !== f._currentVelocityGradient && (f._currentVelocity1 = f._currentVelocity2, f._currentVelocity2 = x.getFactor(), f._currentVelocityGradient = C), v *= yt.Lerp(f._currentVelocity1, f._currentVelocity2, b); }), f.direction.scaleToRef(v, this._scaledDirection), this._limitVelocityGradients && this._limitVelocityGradients.length > 0 && N_.GetCurrentGradient(_, this._limitVelocityGradients, (C, x, b) => { C !== f._currentLimitVelocityGradient && (f._currentLimitVelocity1 = f._currentLimitVelocity2, f._currentLimitVelocity2 = x.getFactor(), f._currentLimitVelocityGradient = C); const S = yt.Lerp(f._currentLimitVelocity1, f._currentLimitVelocity2, b); f.direction.length() > S && f.direction.scaleInPlace(this.limitVelocityDamping); }), this._dragGradients && this._dragGradients.length > 0 && N_.GetCurrentGradient(_, this._dragGradients, (C, x, b) => { C !== f._currentDragGradient && (f._currentDrag1 = f._currentDrag2, f._currentDrag2 = x.getFactor(), f._currentDragGradient = C); const S = yt.Lerp(f._currentDrag1, f._currentDrag2, b); this._scaledDirection.scaleInPlace(1 - S); }), this.isLocal && f._localPosition ? (f._localPosition.addInPlace(this._scaledDirection), D.TransformCoordinatesToRef(f._localPosition, this._emitterWorldMatrix, f.position)) : f.position.addInPlace(this._scaledDirection), a && u && f._randomNoiseCoordinates1) { const C = this._fetchR(f._randomNoiseCoordinates1.x, f._randomNoiseCoordinates1.y, u.width, u.height, a), x = this._fetchR(f._randomNoiseCoordinates1.z, f._randomNoiseCoordinates2.x, u.width, u.height, a), b = this._fetchR(f._randomNoiseCoordinates2.y, f._randomNoiseCoordinates2.z, u.width, u.height, a), S = de.Vector3[0], M = de.Vector3[1]; S.copyFromFloats((2 * C - 1) * this.noiseStrength.x, (2 * x - 1) * this.noiseStrength.y, (2 * b - 1) * this.noiseStrength.z), S.scaleToRef(p, M), f.direction.addInPlace(M); } if (this.gravity.scaleToRef(p, this._scaledGravity), f.direction.addInPlace(this._scaledGravity), this._sizeGradients && this._sizeGradients.length > 0 && N_.GetCurrentGradient(_, this._sizeGradients, (C, x, b) => { C !== f._currentSizeGradient && (f._currentSize1 = f._currentSize2, f._currentSize2 = x.getFactor(), f._currentSizeGradient = C), f.size = yt.Lerp(f._currentSize1, f._currentSize2, b); }), this._useRampGradients && (this._colorRemapGradients && this._colorRemapGradients.length > 0 && N_.GetCurrentGradient(_, this._colorRemapGradients, (C, x, b) => { const S = yt.Lerp(C.factor1, x.factor1, b), M = yt.Lerp(C.factor2, x.factor2, b); f.remapData.x = S, f.remapData.y = M - S; }), this._alphaRemapGradients && this._alphaRemapGradients.length > 0 && N_.GetCurrentGradient(_, this._alphaRemapGradients, (C, x, b) => { const S = yt.Lerp(C.factor1, x.factor1, b), M = yt.Lerp(C.factor2, x.factor2, b); f.remapData.z = S, f.remapData.w = M - S; })), this._isAnimationSheetEnabled && f.updateCellIndex(), f._inheritParticleInfoToSubEmitters(), f.age >= f.lifeTime) { this._emitFromParticle(f), f._attachedSubEmitters && (f._attachedSubEmitters.forEach((C) => { C.particleSystem.disposeOnStop = !0, C.particleSystem.stop(); }), f._attachedSubEmitters = null), this.recycleParticle(f), h && d--; continue; } } }; } _addFactorGradient(e, t, i, r) { const s = new tj(t, i, r); e.push(s), e.sort((n, a) => n.gradient < a.gradient ? -1 : n.gradient > a.gradient ? 1 : 0); } _removeFactorGradient(e, t) { if (!e) return; let i = 0; for (const r of e) { if (r.gradient === t) { e.splice(i, 1); break; } i++; } } /** * Adds a new life time gradient * @param gradient defines the gradient to use (between 0 and 1) * @param factor defines the life time factor to affect to the specified gradient * @param factor2 defines an additional factor used to define a range ([factor, factor2]) with main value to pick the final value from * @returns the current particle system */ addLifeTimeGradient(e, t, i) { return this._lifeTimeGradients || (this._lifeTimeGradients = []), this._addFactorGradient(this._lifeTimeGradients, e, t, i), this; } /** * Remove a specific life time gradient * @param gradient defines the gradient to remove * @returns the current particle system */ removeLifeTimeGradient(e) { return this._removeFactorGradient(this._lifeTimeGradients, e), this; } /** * Adds a new size gradient * @param gradient defines the gradient to use (between 0 and 1) * @param factor defines the size factor to affect to the specified gradient * @param factor2 defines an additional factor used to define a range ([factor, factor2]) with main value to pick the final value from * @returns the current particle system */ addSizeGradient(e, t, i) { return this._sizeGradients || (this._sizeGradients = []), this._addFactorGradient(this._sizeGradients, e, t, i), this; } /** * Remove a specific size gradient * @param gradient defines the gradient to remove * @returns the current particle system */ removeSizeGradient(e) { return this._removeFactorGradient(this._sizeGradients, e), this; } /** * Adds a new color remap gradient * @param gradient defines the gradient to use (between 0 and 1) * @param min defines the color remap minimal range * @param max defines the color remap maximal range * @returns the current particle system */ addColorRemapGradient(e, t, i) { return this._colorRemapGradients || (this._colorRemapGradients = []), this._addFactorGradient(this._colorRemapGradients, e, t, i), this; } /** * Remove a specific color remap gradient * @param gradient defines the gradient to remove * @returns the current particle system */ removeColorRemapGradient(e) { return this._removeFactorGradient(this._colorRemapGradients, e), this; } /** * Adds a new alpha remap gradient * @param gradient defines the gradient to use (between 0 and 1) * @param min defines the alpha remap minimal range * @param max defines the alpha remap maximal range * @returns the current particle system */ addAlphaRemapGradient(e, t, i) { return this._alphaRemapGradients || (this._alphaRemapGradients = []), this._addFactorGradient(this._alphaRemapGradients, e, t, i), this; } /** * Remove a specific alpha remap gradient * @param gradient defines the gradient to remove * @returns the current particle system */ removeAlphaRemapGradient(e) { return this._removeFactorGradient(this._alphaRemapGradients, e), this; } /** * Adds a new angular speed gradient * @param gradient defines the gradient to use (between 0 and 1) * @param factor defines the angular speed to affect to the specified gradient * @param factor2 defines an additional factor used to define a range ([factor, factor2]) with main value to pick the final value from * @returns the current particle system */ addAngularSpeedGradient(e, t, i) { return this._angularSpeedGradients || (this._angularSpeedGradients = []), this._addFactorGradient(this._angularSpeedGradients, e, t, i), this; } /** * Remove a specific angular speed gradient * @param gradient defines the gradient to remove * @returns the current particle system */ removeAngularSpeedGradient(e) { return this._removeFactorGradient(this._angularSpeedGradients, e), this; } /** * Adds a new velocity gradient * @param gradient defines the gradient to use (between 0 and 1) * @param factor defines the velocity to affect to the specified gradient * @param factor2 defines an additional factor used to define a range ([factor, factor2]) with main value to pick the final value from * @returns the current particle system */ addVelocityGradient(e, t, i) { return this._velocityGradients || (this._velocityGradients = []), this._addFactorGradient(this._velocityGradients, e, t, i), this; } /** * Remove a specific velocity gradient * @param gradient defines the gradient to remove * @returns the current particle system */ removeVelocityGradient(e) { return this._removeFactorGradient(this._velocityGradients, e), this; } /** * Adds a new limit velocity gradient * @param gradient defines the gradient to use (between 0 and 1) * @param factor defines the limit velocity value to affect to the specified gradient * @param factor2 defines an additional factor used to define a range ([factor, factor2]) with main value to pick the final value from * @returns the current particle system */ addLimitVelocityGradient(e, t, i) { return this._limitVelocityGradients || (this._limitVelocityGradients = []), this._addFactorGradient(this._limitVelocityGradients, e, t, i), this; } /** * Remove a specific limit velocity gradient * @param gradient defines the gradient to remove * @returns the current particle system */ removeLimitVelocityGradient(e) { return this._removeFactorGradient(this._limitVelocityGradients, e), this; } /** * Adds a new drag gradient * @param gradient defines the gradient to use (between 0 and 1) * @param factor defines the drag value to affect to the specified gradient * @param factor2 defines an additional factor used to define a range ([factor, factor2]) with main value to pick the final value from * @returns the current particle system */ addDragGradient(e, t, i) { return this._dragGradients || (this._dragGradients = []), this._addFactorGradient(this._dragGradients, e, t, i), this; } /** * Remove a specific drag gradient * @param gradient defines the gradient to remove * @returns the current particle system */ removeDragGradient(e) { return this._removeFactorGradient(this._dragGradients, e), this; } /** * Adds a new emit rate gradient (please note that this will only work if you set the targetStopDuration property) * @param gradient defines the gradient to use (between 0 and 1) * @param factor defines the emit rate value to affect to the specified gradient * @param factor2 defines an additional factor used to define a range ([factor, factor2]) with main value to pick the final value from * @returns the current particle system */ addEmitRateGradient(e, t, i) { return this._emitRateGradients || (this._emitRateGradients = []), this._addFactorGradient(this._emitRateGradients, e, t, i), this; } /** * Remove a specific emit rate gradient * @param gradient defines the gradient to remove * @returns the current particle system */ removeEmitRateGradient(e) { return this._removeFactorGradient(this._emitRateGradients, e), this; } /** * Adds a new start size gradient (please note that this will only work if you set the targetStopDuration property) * @param gradient defines the gradient to use (between 0 and 1) * @param factor defines the start size value to affect to the specified gradient * @param factor2 defines an additional factor used to define a range ([factor, factor2]) with main value to pick the final value from * @returns the current particle system */ addStartSizeGradient(e, t, i) { return this._startSizeGradients || (this._startSizeGradients = []), this._addFactorGradient(this._startSizeGradients, e, t, i), this; } /** * Remove a specific start size gradient * @param gradient defines the gradient to remove * @returns the current particle system */ removeStartSizeGradient(e) { return this._removeFactorGradient(this._startSizeGradients, e), this; } _createRampGradientTexture() { if (!this._rampGradients || !this._rampGradients.length || this._rampGradientsTexture || !this._scene) return; const e = new Uint8Array(this._rawTextureWidth * 4), t = mn.Color3[0]; for (let i = 0; i < this._rawTextureWidth; i++) { const r = i / this._rawTextureWidth; N_.GetCurrentGradient(r, this._rampGradients, (s, n, a) => { ze.LerpToRef(s.color, n.color, a, t), e[i * 4] = t.r * 255, e[i * 4 + 1] = t.g * 255, e[i * 4 + 2] = t.b * 255, e[i * 4 + 3] = 255; }); } this._rampGradientsTexture = Po.CreateRGBATexture(e, this._rawTextureWidth, 1, this._scene, !1, !1, 1); } /** * Gets the current list of ramp gradients. * You must use addRampGradient and removeRampGradient to update this list * @returns the list of ramp gradients */ getRampGradients() { return this._rampGradients; } /** Force the system to rebuild all gradients that need to be resync */ forceRefreshGradients() { this._syncRampGradientTexture(); } _syncRampGradientTexture() { this._rampGradients && (this._rampGradients.sort((e, t) => e.gradient < t.gradient ? -1 : e.gradient > t.gradient ? 1 : 0), this._rampGradientsTexture && (this._rampGradientsTexture.dispose(), this._rampGradientsTexture = null), this._createRampGradientTexture()); } /** * Adds a new ramp gradient used to remap particle colors * @param gradient defines the gradient to use (between 0 and 1) * @param color defines the color to affect to the specified gradient * @returns the current particle system */ addRampGradient(e, t) { this._rampGradients || (this._rampGradients = []); const i = new gne(e, t); return this._rampGradients.push(i), this._syncRampGradientTexture(), this; } /** * Remove a specific ramp gradient * @param gradient defines the gradient to remove * @returns the current particle system */ removeRampGradient(e) { return this._removeGradientAndTexture(e, this._rampGradients, this._rampGradientsTexture), this._rampGradientsTexture = null, this._rampGradients && this._rampGradients.length > 0 && this._createRampGradientTexture(), this; } /** * Adds a new color gradient * @param gradient defines the gradient to use (between 0 and 1) * @param color1 defines the color to affect to the specified gradient * @param color2 defines an additional color used to define a range ([color, color2]) with main color to pick the final color from * @returns this particle system */ addColorGradient(e, t, i) { this._colorGradients || (this._colorGradients = []); const r = new ej(e, t, i); return this._colorGradients.push(r), this._colorGradients.sort((s, n) => s.gradient < n.gradient ? -1 : s.gradient > n.gradient ? 1 : 0), this; } /** * Remove a specific color gradient * @param gradient defines the gradient to remove * @returns this particle system */ removeColorGradient(e) { if (!this._colorGradients) return this; let t = 0; for (const i of this._colorGradients) { if (i.gradient === e) { this._colorGradients.splice(t, 1); break; } t++; } return this; } /** * Resets the draw wrappers cache */ resetDrawCache() { for (const e of this._drawWrappers) if (e) for (const t of e) t == null || t.dispose(); this._drawWrappers = []; } _fetchR(e, t, i, r, s) { e = Math.abs(e) * 0.5 + 0.5, t = Math.abs(t) * 0.5 + 0.5; const n = e * i % i | 0, a = t * r % r | 0, l = (n + a * i) * 4; return s[l] / 255; } _reset() { this._resetEffect(); } _resetEffect() { this._vertexBuffer && (this._vertexBuffer.dispose(), this._vertexBuffer = null), this._spriteBuffer && (this._spriteBuffer.dispose(), this._spriteBuffer = null), this._vertexArrayObject && (this._engine.releaseVertexArrayObject(this._vertexArrayObject), this._vertexArrayObject = null), this._createVertexBuffers(); } _createVertexBuffers() { this._vertexBufferSize = this._useInstancing ? 10 : 12, this._isAnimationSheetEnabled && (this._vertexBufferSize += 1), (!this._isBillboardBased || this.billboardMode === ns.BILLBOARDMODE_STRETCHED || this.billboardMode === ns.BILLBOARDMODE_STRETCHED_LOCAL) && (this._vertexBufferSize += 3), this._useRampGradients && (this._vertexBufferSize += 4); const e = this._engine, t = this._vertexBufferSize * (this._useInstancing ? 1 : 4); this._vertexData = new Float32Array(this._capacity * t), this._vertexBuffer = new hu(e, this._vertexData, !0, t); let i = 0; const r = this._vertexBuffer.createVertexBuffer(Y.PositionKind, i, 3, this._vertexBufferSize, this._useInstancing); this._vertexBuffers[Y.PositionKind] = r, i += 3; const s = this._vertexBuffer.createVertexBuffer(Y.ColorKind, i, 4, this._vertexBufferSize, this._useInstancing); this._vertexBuffers[Y.ColorKind] = s, i += 4; const n = this._vertexBuffer.createVertexBuffer("angle", i, 1, this._vertexBufferSize, this._useInstancing); this._vertexBuffers.angle = n, i += 1; const a = this._vertexBuffer.createVertexBuffer("size", i, 2, this._vertexBufferSize, this._useInstancing); if (this._vertexBuffers.size = a, i += 2, this._isAnimationSheetEnabled) { const o = this._vertexBuffer.createVertexBuffer("cellIndex", i, 1, this._vertexBufferSize, this._useInstancing); this._vertexBuffers.cellIndex = o, i += 1; } if (!this._isBillboardBased || this.billboardMode === ns.BILLBOARDMODE_STRETCHED || this.billboardMode === ns.BILLBOARDMODE_STRETCHED_LOCAL) { const o = this._vertexBuffer.createVertexBuffer("direction", i, 3, this._vertexBufferSize, this._useInstancing); this._vertexBuffers.direction = o, i += 3; } if (this._useRampGradients) { const o = this._vertexBuffer.createVertexBuffer("remapData", i, 4, this._vertexBufferSize, this._useInstancing); this._vertexBuffers.remapData = o, i += 4; } let l; if (this._useInstancing) { const o = new Float32Array([0, 0, 1, 0, 0, 1, 1, 1]); this._spriteBuffer = new hu(e, o, !1, 2), l = this._spriteBuffer.createVertexBuffer("offset", 0, 2); } else l = this._vertexBuffer.createVertexBuffer("offset", i, 2, this._vertexBufferSize, this._useInstancing), i += 2; this._vertexBuffers.offset = l, this.resetDrawCache(); } _createIndexBuffer() { if (this._useInstancing) { this._linesIndexBufferUseInstancing = this._engine.createIndexBuffer(new Uint32Array([0, 1, 1, 3, 3, 2, 2, 0, 0, 3])); return; } const e = [], t = []; let i = 0; for (let r = 0; r < this._capacity; r++) e.push(i), e.push(i + 1), e.push(i + 2), e.push(i), e.push(i + 2), e.push(i + 3), t.push(i, i + 1, i + 1, i + 2, i + 2, i + 3, i + 3, i, i, i + 3), i += 4; this._indexBuffer = this._engine.createIndexBuffer(e), this._linesIndexBuffer = this._engine.createIndexBuffer(t); } /** * Gets the maximum number of particles active at the same time. * @returns The max number of active particles. */ getCapacity() { return this._capacity; } /** * Gets whether there are still active particles in the system. * @returns True if it is alive, otherwise false. */ isAlive() { return this._alive; } /** * Gets if the system has been started. (Note: this will still be true after stop is called) * @returns True if it has been started, otherwise false. */ isStarted() { return this._started; } _prepareSubEmitterInternalArray() { this._subEmitters = new Array(), this.subEmitters && this.subEmitters.forEach((e) => { e instanceof ns ? this._subEmitters.push([new hT(e)]) : e instanceof hT ? this._subEmitters.push([e]) : e instanceof Array && this._subEmitters.push(e); }); } /** * Starts the particle system and begins to emit * @param delay defines the delay in milliseconds before starting the system (this.startDelay by default) */ start(e = this.startDelay) { var t; if (!this.targetStopDuration && this._hasTargetStopDurationDependantGradient()) throw "Particle system started with a targetStopDuration dependant gradient (eg. startSizeGradients) but no targetStopDuration set"; if (e) { setTimeout(() => { this.start(0); }, e); return; } if (this._prepareSubEmitterInternalArray(), this._started = !0, this._stopped = !1, this._actualFrame = 0, this._subEmitters && this._subEmitters.length != 0 && (this.activeSubSystems = []), this._emitRateGradients && (this._emitRateGradients.length > 0 && (this._currentEmitRateGradient = this._emitRateGradients[0], this._currentEmitRate1 = this._currentEmitRateGradient.getFactor(), this._currentEmitRate2 = this._currentEmitRate1), this._emitRateGradients.length > 1 && (this._currentEmitRate2 = this._emitRateGradients[1].getFactor())), this._startSizeGradients && (this._startSizeGradients.length > 0 && (this._currentStartSizeGradient = this._startSizeGradients[0], this._currentStartSize1 = this._currentStartSizeGradient.getFactor(), this._currentStartSize2 = this._currentStartSize1), this._startSizeGradients.length > 1 && (this._currentStartSize2 = this._startSizeGradients[1].getFactor())), this.preWarmCycles) { ((t = this.emitter) === null || t === void 0 ? void 0 : t.getClassName().indexOf("Mesh")) !== -1 && this.emitter.computeWorldMatrix(!0); const i = this.noiseTexture; if (i && i.onGeneratedObservable) i.onGeneratedObservable.addOnce(() => { setTimeout(() => { for (let r = 0; r < this.preWarmCycles; r++) this.animate(!0), i.render(); }); }); else for (let r = 0; r < this.preWarmCycles; r++) this.animate(!0); } this.beginAnimationOnStart && this.animations && this.animations.length > 0 && this._scene && this._scene.beginAnimation(this, this.beginAnimationFrom, this.beginAnimationTo, this.beginAnimationLoop); } /** * Stops the particle system. * @param stopSubEmitters if true it will stop the current system and all created sub-Systems if false it will stop the current root system only, this param is used by the root particle system only. the default value is true. */ stop(e = !0) { this._stopped || (this.onStoppedObservable.notifyObservers(this), this._stopped = !0, e && this._stopSubEmitters()); } // animation sheet /** * Remove all active particles */ reset() { this._stockParticles.length = 0, this._particles.length = 0; } /** * @internal (for internal use only) */ _appendParticleVertex(e, t, i, r) { let s = e * this._vertexBufferSize; if (this._vertexData[s++] = t.position.x + this.worldOffset.x, this._vertexData[s++] = t.position.y + this.worldOffset.y, this._vertexData[s++] = t.position.z + this.worldOffset.z, this._vertexData[s++] = t.color.r, this._vertexData[s++] = t.color.g, this._vertexData[s++] = t.color.b, this._vertexData[s++] = t.color.a, this._vertexData[s++] = t.angle, this._vertexData[s++] = t.scale.x * t.size, this._vertexData[s++] = t.scale.y * t.size, this._isAnimationSheetEnabled && (this._vertexData[s++] = t.cellIndex), this._isBillboardBased) (this.billboardMode === ns.BILLBOARDMODE_STRETCHED || this.billboardMode === ns.BILLBOARDMODE_STRETCHED_LOCAL) && (this._vertexData[s++] = t.direction.x, this._vertexData[s++] = t.direction.y, this._vertexData[s++] = t.direction.z); else if (t._initialDirection) { let n = t._initialDirection; this.isLocal && (D.TransformNormalToRef(n, this._emitterWorldMatrix, de.Vector3[0]), n = de.Vector3[0]), n.x === 0 && n.z === 0 && (n.x = 1e-3), this._vertexData[s++] = n.x, this._vertexData[s++] = n.y, this._vertexData[s++] = n.z; } else { let n = t.direction; this.isLocal && (D.TransformNormalToRef(n, this._emitterWorldMatrix, de.Vector3[0]), n = de.Vector3[0]), n.x === 0 && n.z === 0 && (n.x = 1e-3), this._vertexData[s++] = n.x, this._vertexData[s++] = n.y, this._vertexData[s++] = n.z; } this._useRampGradients && t.remapData && (this._vertexData[s++] = t.remapData.x, this._vertexData[s++] = t.remapData.y, this._vertexData[s++] = t.remapData.z, this._vertexData[s++] = t.remapData.w), this._useInstancing || (this._isAnimationSheetEnabled && (i === 0 ? i = this._epsilon : i === 1 && (i = 1 - this._epsilon), r === 0 ? r = this._epsilon : r === 1 && (r = 1 - this._epsilon)), this._vertexData[s++] = i, this._vertexData[s++] = r); } _stopSubEmitters() { this.activeSubSystems && (this.activeSubSystems.forEach((e) => { e.stop(!0); }), this.activeSubSystems = []); } _removeFromRoot() { if (!this._rootParticleSystem) return; const e = this._rootParticleSystem.activeSubSystems.indexOf(this); e !== -1 && this._rootParticleSystem.activeSubSystems.splice(e, 1), this._rootParticleSystem = null; } // End of sub system methods _update(e) { if (this._alive = this._particles.length > 0, this.emitter.position) { const i = this.emitter; this._emitterWorldMatrix = i.getWorldMatrix(); } else { const i = this.emitter; this._emitterWorldMatrix = Ae.Translation(i.x, i.y, i.z); } this._emitterWorldMatrix.invertToRef(this._emitterInverseWorldMatrix), this.updateFunction(this._particles); let t; for (let i = 0; i < e && this._particles.length !== this._capacity; i++) { if (t = this._createParticle(), this._particles.push(t), this.targetStopDuration && this._lifeTimeGradients && this._lifeTimeGradients.length > 0) { const s = yt.Clamp(this._actualFrame / this.targetStopDuration); N_.GetCurrentGradient(s, this._lifeTimeGradients, (n, a) => { const l = n, o = a, u = l.getFactor(), h = o.getFactor(), d = (s - l.gradient) / (o.gradient - l.gradient); t.lifeTime = yt.Lerp(u, h, d); }); } else t.lifeTime = yt.RandomRange(this.minLifeTime, this.maxLifeTime); const r = yt.RandomRange(this.minEmitPower, this.maxEmitPower); if (this.startPositionFunction ? this.startPositionFunction(this._emitterWorldMatrix, t.position, t, this.isLocal) : this.particleEmitterType.startPositionFunction(this._emitterWorldMatrix, t.position, t, this.isLocal), this.isLocal && (t._localPosition ? t._localPosition.copyFrom(t.position) : t._localPosition = t.position.clone(), D.TransformCoordinatesToRef(t._localPosition, this._emitterWorldMatrix, t.position)), this.startDirectionFunction ? this.startDirectionFunction(this._emitterWorldMatrix, t.direction, t, this.isLocal) : this.particleEmitterType.startDirectionFunction(this._emitterWorldMatrix, t.direction, t, this.isLocal, this._emitterInverseWorldMatrix), r === 0 ? t._initialDirection ? t._initialDirection.copyFrom(t.direction) : t._initialDirection = t.direction.clone() : t._initialDirection = null, t.direction.scaleInPlace(r), !this._sizeGradients || this._sizeGradients.length === 0 ? t.size = yt.RandomRange(this.minSize, this.maxSize) : (t._currentSizeGradient = this._sizeGradients[0], t._currentSize1 = t._currentSizeGradient.getFactor(), t.size = t._currentSize1, this._sizeGradients.length > 1 ? t._currentSize2 = this._sizeGradients[1].getFactor() : t._currentSize2 = t._currentSize1), t.scale.copyFromFloats(yt.RandomRange(this.minScaleX, this.maxScaleX), yt.RandomRange(this.minScaleY, this.maxScaleY)), this._startSizeGradients && this._startSizeGradients[0] && this.targetStopDuration) { const s = this._actualFrame / this.targetStopDuration; N_.GetCurrentGradient(s, this._startSizeGradients, (n, a, l) => { n !== this._currentStartSizeGradient && (this._currentStartSize1 = this._currentStartSize2, this._currentStartSize2 = a.getFactor(), this._currentStartSizeGradient = n); const o = yt.Lerp(this._currentStartSize1, this._currentStartSize2, l); t.scale.scaleInPlace(o); }); } if (!this._angularSpeedGradients || this._angularSpeedGradients.length === 0 ? t.angularSpeed = yt.RandomRange(this.minAngularSpeed, this.maxAngularSpeed) : (t._currentAngularSpeedGradient = this._angularSpeedGradients[0], t.angularSpeed = t._currentAngularSpeedGradient.getFactor(), t._currentAngularSpeed1 = t.angularSpeed, this._angularSpeedGradients.length > 1 ? t._currentAngularSpeed2 = this._angularSpeedGradients[1].getFactor() : t._currentAngularSpeed2 = t._currentAngularSpeed1), t.angle = yt.RandomRange(this.minInitialRotation, this.maxInitialRotation), this._velocityGradients && this._velocityGradients.length > 0 && (t._currentVelocityGradient = this._velocityGradients[0], t._currentVelocity1 = t._currentVelocityGradient.getFactor(), this._velocityGradients.length > 1 ? t._currentVelocity2 = this._velocityGradients[1].getFactor() : t._currentVelocity2 = t._currentVelocity1), this._limitVelocityGradients && this._limitVelocityGradients.length > 0 && (t._currentLimitVelocityGradient = this._limitVelocityGradients[0], t._currentLimitVelocity1 = t._currentLimitVelocityGradient.getFactor(), this._limitVelocityGradients.length > 1 ? t._currentLimitVelocity2 = this._limitVelocityGradients[1].getFactor() : t._currentLimitVelocity2 = t._currentLimitVelocity1), this._dragGradients && this._dragGradients.length > 0 && (t._currentDragGradient = this._dragGradients[0], t._currentDrag1 = t._currentDragGradient.getFactor(), this._dragGradients.length > 1 ? t._currentDrag2 = this._dragGradients[1].getFactor() : t._currentDrag2 = t._currentDrag1), !this._colorGradients || this._colorGradients.length === 0) { const s = yt.RandomRange(0, 1); Et.LerpToRef(this.color1, this.color2, s, t.color), this.colorDead.subtractToRef(t.color, this._colorDiff), this._colorDiff.scaleToRef(1 / t.lifeTime, t.colorStep); } else t._currentColorGradient = this._colorGradients[0], t._currentColorGradient.getColorToRef(t.color), t._currentColor1.copyFrom(t.color), this._colorGradients.length > 1 ? this._colorGradients[1].getColorToRef(t._currentColor2) : t._currentColor2.copyFrom(t.color); this._isAnimationSheetEnabled && (t._initialStartSpriteCellID = this.startSpriteCellID, t._initialEndSpriteCellID = this.endSpriteCellID, t._initialSpriteCellLoop = this.spriteCellLoop), t.direction.addInPlace(this._inheritedVelocityOffset), this._useRampGradients && (t.remapData = new Di(0, 1, 0, 1)), this.noiseTexture && (t._randomNoiseCoordinates1 ? (t._randomNoiseCoordinates1.copyFromFloats(Math.random(), Math.random(), Math.random()), t._randomNoiseCoordinates2.copyFromFloats(Math.random(), Math.random(), Math.random())) : (t._randomNoiseCoordinates1 = new D(Math.random(), Math.random(), Math.random()), t._randomNoiseCoordinates2 = new D(Math.random(), Math.random(), Math.random()))), t._inheritParticleInfoToSubEmitters(); } } /** * @internal */ static _GetAttributeNamesOrOptions(e = !1, t = !1, i = !1) { const r = [Y.PositionKind, Y.ColorKind, "angle", "offset", "size"]; return e && r.push("cellIndex"), t || r.push("direction"), i && r.push("remapData"), r; } /** * @internal */ static _GetEffectCreationOptions(e = !1, t = !1) { const i = ["invView", "view", "projection", "textureMask", "translationPivot", "eyePosition"]; return Gc(i), e && i.push("particlesInfos"), t && i.push("logarithmicDepthConstant"), i; } /** * Fill the defines array according to the current settings of the particle system * @param defines Array to be updated * @param blendMode blend mode to take into account when updating the array */ fillDefines(e, t) { if (this._scene && bT(this, this._scene, e), this._isAnimationSheetEnabled && e.push("#define ANIMATESHEET"), this.useLogarithmicDepth && e.push("#define LOGARITHMICDEPTH"), t === ns.BLENDMODE_MULTIPLY && e.push("#define BLENDMULTIPLYMODE"), this._useRampGradients && e.push("#define RAMPGRADIENT"), this._isBillboardBased) switch (e.push("#define BILLBOARD"), this.billboardMode) { case ns.BILLBOARDMODE_Y: e.push("#define BILLBOARDY"); break; case ns.BILLBOARDMODE_STRETCHED: case ns.BILLBOARDMODE_STRETCHED_LOCAL: e.push("#define BILLBOARDSTRETCHED"), this.billboardMode === ns.BILLBOARDMODE_STRETCHED_LOCAL && e.push("#define BILLBOARDSTRETCHED_LOCAL"); break; case ns.BILLBOARDMODE_ALL: e.push("#define BILLBOARDMODE_ALL"); break; } this._imageProcessingConfiguration && (this._imageProcessingConfiguration.prepareDefines(this._imageProcessingConfigurationDefines), e.push(this._imageProcessingConfigurationDefines.toString())); } /** * Fill the uniforms, attributes and samplers arrays according to the current settings of the particle system * @param uniforms Uniforms array to fill * @param attributes Attributes array to fill * @param samplers Samplers array to fill */ fillUniformsAttributesAndSamplerNames(e, t, i) { t.push(...ns._GetAttributeNamesOrOptions(this._isAnimationSheetEnabled, this._isBillboardBased && this.billboardMode !== ns.BILLBOARDMODE_STRETCHED && this.billboardMode !== ns.BILLBOARDMODE_STRETCHED_LOCAL, this._useRampGradients)), e.push(...ns._GetEffectCreationOptions(this._isAnimationSheetEnabled, this.useLogarithmicDepth)), i.push("diffuseSampler", "rampSampler"), this._imageProcessingConfiguration && (Ds.PrepareUniforms(e, this._imageProcessingConfigurationDefines), Ds.PrepareSamplers(i, this._imageProcessingConfigurationDefines)); } /** * @internal */ _getWrapper(e) { const t = this._getCustomDrawWrapper(e); if (t != null && t.effect) return t; const i = []; this.fillDefines(i, e); const r = this._engine._features.supportRenderPasses ? this._engine.currentRenderPassId : 0; let s = this._drawWrappers[r]; s || (s = this._drawWrappers[r] = []); let n = s[e]; n || (n = new $o(this._engine), n.drawContext && (n.drawContext.useInstancing = this._useInstancing), s[e] = n); const a = i.join(` `); if (n.defines !== a) { const l = [], o = [], u = []; this.fillUniformsAttributesAndSamplerNames(o, l, u), n.setEffect(this._engine.createEffect("particles", l, o, u, a), a); } return n; } /** * Animates the particle system for the current frame by emitting new particles and or animating the living ones. * @param preWarmOnly will prevent the system from updating the vertex buffer (default is false) */ animate(e = !1) { var t; if (!this._started) return; if (!e && this._scene) { if (!this.isReady() || this._currentRenderId === this._scene.getFrameId()) return; this._currentRenderId = this._scene.getFrameId(); } this._scaledUpdateSpeed = this.updateSpeed * (e ? this.preWarmStepOffset : ((t = this._scene) === null || t === void 0 ? void 0 : t.getAnimationRatio()) || 1); let i; if (this.manualEmitCount > -1) i = this.manualEmitCount, this._newPartsExcess = 0, this.manualEmitCount = 0; else { let r = this.emitRate; if (this._emitRateGradients && this._emitRateGradients.length > 0 && this.targetStopDuration) { const s = this._actualFrame / this.targetStopDuration; N_.GetCurrentGradient(s, this._emitRateGradients, (n, a, l) => { n !== this._currentEmitRateGradient && (this._currentEmitRate1 = this._currentEmitRate2, this._currentEmitRate2 = a.getFactor(), this._currentEmitRateGradient = n), r = yt.Lerp(this._currentEmitRate1, this._currentEmitRate2, l); }); } i = r * this._scaledUpdateSpeed >> 0, this._newPartsExcess += r * this._scaledUpdateSpeed - i; } if (this._newPartsExcess > 1 && (i += this._newPartsExcess >> 0, this._newPartsExcess -= this._newPartsExcess >> 0), this._alive = !1, this._stopped ? i = 0 : (this._actualFrame += this._scaledUpdateSpeed, this.targetStopDuration && this._actualFrame >= this.targetStopDuration && this.stop()), this._update(i), this._stopped && (this._alive || (this._started = !1, this.onAnimationEnd && this.onAnimationEnd(), this.disposeOnStop && this._scene && this._scene._toBeDisposed.push(this))), !e) { let r = 0; for (let s = 0; s < this._particles.length; s++) { const n = this._particles[s]; this._appendParticleVertices(r, n), r += this._useInstancing ? 1 : 4; } this._vertexBuffer && this._vertexBuffer.updateDirectly(this._vertexData, 0, this._particles.length); } this.manualEmitCount === 0 && this.disposeOnStop && this.stop(); } _appendParticleVertices(e, t) { this._appendParticleVertex(e++, t, 0, 0), this._useInstancing || (this._appendParticleVertex(e++, t, 1, 0), this._appendParticleVertex(e++, t, 1, 1), this._appendParticleVertex(e++, t, 0, 1)); } /** * Rebuilds the particle system. */ rebuild() { var e, t; this._engine.getCaps().vertexArrayObject && (this._vertexArrayObject = null), this._createIndexBuffer(), (e = this._spriteBuffer) === null || e === void 0 || e._rebuild(), (t = this._vertexBuffer) === null || t === void 0 || t._rebuild(); for (const i in this._vertexBuffers) this._vertexBuffers[i]._rebuild(); this.resetDrawCache(); } /** * Is this system ready to be used/rendered * @returns true if the system is ready */ isReady() { if (!this.emitter || this._imageProcessingConfiguration && !this._imageProcessingConfiguration.isReady() || !this.particleTexture || !this.particleTexture.isReady()) return !1; if (this.blendMode !== ns.BLENDMODE_MULTIPLYADD) { if (!this._getWrapper(this.blendMode).effect.isReady()) return !1; } else if (!this._getWrapper(ns.BLENDMODE_MULTIPLY).effect.isReady() || !this._getWrapper(ns.BLENDMODE_ADD).effect.isReady()) return !1; return !0; } _render(e) { var t, i, r, s, n, a, l, o; const u = this._getWrapper(e), h = u.effect, d = this._engine; d.enableEffect(u); const f = (t = this.defaultViewMatrix) !== null && t !== void 0 ? t : this._scene.getViewMatrix(); if (h.setTexture("diffuseSampler", this.particleTexture), h.setMatrix("view", f), h.setMatrix("projection", (i = this.defaultProjectionMatrix) !== null && i !== void 0 ? i : this._scene.getProjectionMatrix()), this._isAnimationSheetEnabled && this.particleTexture) { const m = this.particleTexture.getBaseSize(); h.setFloat3("particlesInfos", this.spriteCellWidth / m.width, this.spriteCellHeight / m.height, this.spriteCellWidth / m.width); } if (h.setVector2("translationPivot", this.translationPivot), h.setFloat4("textureMask", this.textureMask.r, this.textureMask.g, this.textureMask.b, this.textureMask.a), this._isBillboardBased && this._scene) { const m = this._scene.activeCamera; h.setVector3("eyePosition", m.globalPosition); } this._rampGradientsTexture && ((!this._rampGradients || !this._rampGradients.length) && (this._rampGradientsTexture.dispose(), this._rampGradientsTexture = null), h.setTexture("rampSampler", this._rampGradientsTexture)); const p = h.defines; switch (this._scene && Ec(h, this, this._scene), p.indexOf("#define BILLBOARDMODE_ALL") >= 0 && (f.invertToRef(de.Matrix[0]), h.setMatrix("invView", de.Matrix[0])), this._vertexArrayObject !== void 0 ? !((r = this._scene) === null || r === void 0) && r.forceWireframe ? d.bindBuffers(this._vertexBuffers, this._linesIndexBufferUseInstancing, h) : (this._vertexArrayObject || (this._vertexArrayObject = this._engine.recordVertexArrayObject(this._vertexBuffers, null, h)), this._engine.bindVertexArrayObject(this._vertexArrayObject, !((s = this._scene) === null || s === void 0) && s.forceWireframe ? this._linesIndexBufferUseInstancing : this._indexBuffer)) : this._indexBuffer ? d.bindBuffers(this._vertexBuffers, !((a = this._scene) === null || a === void 0) && a.forceWireframe ? this._linesIndexBuffer : this._indexBuffer, h) : d.bindBuffers(this._vertexBuffers, !((n = this._scene) === null || n === void 0) && n.forceWireframe ? this._linesIndexBufferUseInstancing : null, h), this.useLogarithmicDepth && this._scene && Ke.BindLogDepth(p, h, this._scene), this._imageProcessingConfiguration && !this._imageProcessingConfiguration.applyByPostProcess && this._imageProcessingConfiguration.bind(h), e) { case ns.BLENDMODE_ADD: d.setAlphaMode(1); break; case ns.BLENDMODE_ONEONE: d.setAlphaMode(6); break; case ns.BLENDMODE_STANDARD: d.setAlphaMode(2); break; case ns.BLENDMODE_MULTIPLY: d.setAlphaMode(4); break; } return this._onBeforeDrawParticlesObservable && this._onBeforeDrawParticlesObservable.notifyObservers(h), this._useInstancing ? !((l = this._scene) === null || l === void 0) && l.forceWireframe ? d.drawElementsType(6, 0, 10, this._particles.length) : d.drawArraysType(7, 0, 4, this._particles.length) : !((o = this._scene) === null || o === void 0) && o.forceWireframe ? d.drawElementsType(1, 0, this._particles.length * 10) : d.drawElementsType(0, 0, this._particles.length * 6), this._particles.length; } /** * Renders the particle system in its current state. * @returns the current number of particles */ render() { if (!this.isReady() || !this._particles.length) return 0; const e = this._engine; e.setState && (e.setState(!1), this.forceDepthWrite && e.setDepthWrite(!0)); let t = 0; return this.blendMode === ns.BLENDMODE_MULTIPLYADD ? t = this._render(ns.BLENDMODE_MULTIPLY) + this._render(ns.BLENDMODE_ADD) : t = this._render(this.blendMode), this._engine.unbindInstanceAttributes(), this._engine.setAlphaMode(0), t; } /** * Disposes the particle system and free the associated resources * @param disposeTexture defines if the particle texture must be disposed as well (true by default) */ dispose(e = !0) { if (this.resetDrawCache(), this._vertexBuffer && (this._vertexBuffer.dispose(), this._vertexBuffer = null), this._spriteBuffer && (this._spriteBuffer.dispose(), this._spriteBuffer = null), this._indexBuffer && (this._engine._releaseBuffer(this._indexBuffer), this._indexBuffer = null), this._linesIndexBuffer && (this._engine._releaseBuffer(this._linesIndexBuffer), this._linesIndexBuffer = null), this._linesIndexBufferUseInstancing && (this._engine._releaseBuffer(this._linesIndexBufferUseInstancing), this._linesIndexBufferUseInstancing = null), this._vertexArrayObject && (this._engine.releaseVertexArrayObject(this._vertexArrayObject), this._vertexArrayObject = null), e && this.particleTexture && (this.particleTexture.dispose(), this.particleTexture = null), e && this.noiseTexture && (this.noiseTexture.dispose(), this.noiseTexture = null), this._rampGradientsTexture && (this._rampGradientsTexture.dispose(), this._rampGradientsTexture = null), this._removeFromRoot(), this.subEmitters && !this._subEmitters && this._prepareSubEmitterInternalArray(), this._subEmitters && this._subEmitters.length) { for (let t = 0; t < this._subEmitters.length; t++) for (const i of this._subEmitters[t]) i.dispose(); this._subEmitters = [], this.subEmitters = []; } if (this._disposeEmitterOnDispose && this.emitter && this.emitter.dispose && this.emitter.dispose(!0), this._onBeforeDrawParticlesObservable && this._onBeforeDrawParticlesObservable.clear(), this._scene) { const t = this._scene.particleSystems.indexOf(this); t > -1 && this._scene.particleSystems.splice(t, 1), this._scene._activeParticleSystems.dispose(); } this.onDisposeObservable.notifyObservers(this), this.onDisposeObservable.clear(), this.onStoppedObservable.clear(), this.reset(); } // Clone /** * Clones the particle system. * @param name The name of the cloned object * @param newEmitter The new emitter to use * @param cloneTexture Also clone the textures if true * @returns the cloned particle system */ clone(e, t, i = !1) { const r = Object.assign({}, this._customWrappers); let s = null; const n = this._engine; if (n.createEffectForParticles && this.customShader != null) { s = this.customShader; const o = s.shaderOptions.defines.length > 0 ? s.shaderOptions.defines.join(` `) : "", u = n.createEffectForParticles(s.shaderPath.fragmentElement, s.shaderOptions.uniforms, s.shaderOptions.samplers, o); r[0] ? r[0].effect = u : this.setCustomEffect(u, 0); } const a = this.serialize(i), l = ns.Parse(a, this._scene || this._engine, this._rootUrl); return l.name = e, l.customShader = s, l._customWrappers = r, t === void 0 && (t = this.emitter), this.noiseTexture && (l.noiseTexture = this.noiseTexture.clone()), l.emitter = t, this.preventAutoStart || l.start(), l; } /** * Serializes the particle system to a JSON object * @param serializeTexture defines if the texture must be serialized as well * @returns the JSON object */ serialize(e = !1) { const t = {}; if (ns._Serialize(t, this, e), t.textureMask = this.textureMask.asArray(), t.customShader = this.customShader, t.preventAutoStart = this.preventAutoStart, this.subEmitters) { t.subEmitters = [], this._subEmitters || this._prepareSubEmitterInternalArray(); for (const i of this._subEmitters) { const r = []; for (const s of i) r.push(s.serialize(e)); t.subEmitters.push(r); } } return t; } /** * @internal */ static _Serialize(e, t, i) { if (e.name = t.name, e.id = t.id, e.capacity = t.getCapacity(), e.disposeOnStop = t.disposeOnStop, e.manualEmitCount = t.manualEmitCount, t.emitter.position) { const _ = t.emitter; e.emitterId = _.id; } else { const _ = t.emitter; e.emitter = _.asArray(); } t.particleEmitterType && (e.particleEmitterType = t.particleEmitterType.serialize()), t.particleTexture && (i ? e.texture = t.particleTexture.serialize() : (e.textureName = t.particleTexture.name, e.invertY = !!t.particleTexture._invertY)), e.isLocal = t.isLocal, St.AppendSerializedAnimations(t, e), e.beginAnimationOnStart = t.beginAnimationOnStart, e.beginAnimationFrom = t.beginAnimationFrom, e.beginAnimationTo = t.beginAnimationTo, e.beginAnimationLoop = t.beginAnimationLoop, e.startDelay = t.startDelay, e.renderingGroupId = t.renderingGroupId, e.isBillboardBased = t.isBillboardBased, e.billboardMode = t.billboardMode, e.minAngularSpeed = t.minAngularSpeed, e.maxAngularSpeed = t.maxAngularSpeed, e.minSize = t.minSize, e.maxSize = t.maxSize, e.minScaleX = t.minScaleX, e.maxScaleX = t.maxScaleX, e.minScaleY = t.minScaleY, e.maxScaleY = t.maxScaleY, e.minEmitPower = t.minEmitPower, e.maxEmitPower = t.maxEmitPower, e.minLifeTime = t.minLifeTime, e.maxLifeTime = t.maxLifeTime, e.emitRate = t.emitRate, e.gravity = t.gravity.asArray(), e.noiseStrength = t.noiseStrength.asArray(), e.color1 = t.color1.asArray(), e.color2 = t.color2.asArray(), e.colorDead = t.colorDead.asArray(), e.updateSpeed = t.updateSpeed, e.targetStopDuration = t.targetStopDuration, e.blendMode = t.blendMode, e.preWarmCycles = t.preWarmCycles, e.preWarmStepOffset = t.preWarmStepOffset, e.minInitialRotation = t.minInitialRotation, e.maxInitialRotation = t.maxInitialRotation, e.startSpriteCellID = t.startSpriteCellID, e.spriteCellLoop = t.spriteCellLoop, e.endSpriteCellID = t.endSpriteCellID, e.spriteCellChangeSpeed = t.spriteCellChangeSpeed, e.spriteCellWidth = t.spriteCellWidth, e.spriteCellHeight = t.spriteCellHeight, e.spriteRandomStartCell = t.spriteRandomStartCell, e.isAnimationSheetEnabled = t.isAnimationSheetEnabled, e.useLogarithmicDepth = t.useLogarithmicDepth; const r = t.getColorGradients(); if (r) { e.colorGradients = []; for (const _ of r) { const v = { gradient: _.gradient, color1: _.color1.asArray() }; _.color2 ? v.color2 = _.color2.asArray() : v.color2 = _.color1.asArray(), e.colorGradients.push(v); } } const s = t.getRampGradients(); if (s) { e.rampGradients = []; for (const _ of s) { const v = { gradient: _.gradient, color: _.color.asArray() }; e.rampGradients.push(v); } e.useRampGradients = t.useRampGradients; } const n = t.getColorRemapGradients(); if (n) { e.colorRemapGradients = []; for (const _ of n) { const v = { gradient: _.gradient, factor1: _.factor1 }; _.factor2 !== void 0 ? v.factor2 = _.factor2 : v.factor2 = _.factor1, e.colorRemapGradients.push(v); } } const a = t.getAlphaRemapGradients(); if (a) { e.alphaRemapGradients = []; for (const _ of a) { const v = { gradient: _.gradient, factor1: _.factor1 }; _.factor2 !== void 0 ? v.factor2 = _.factor2 : v.factor2 = _.factor1, e.alphaRemapGradients.push(v); } } const l = t.getSizeGradients(); if (l) { e.sizeGradients = []; for (const _ of l) { const v = { gradient: _.gradient, factor1: _.factor1 }; _.factor2 !== void 0 ? v.factor2 = _.factor2 : v.factor2 = _.factor1, e.sizeGradients.push(v); } } const o = t.getAngularSpeedGradients(); if (o) { e.angularSpeedGradients = []; for (const _ of o) { const v = { gradient: _.gradient, factor1: _.factor1 }; _.factor2 !== void 0 ? v.factor2 = _.factor2 : v.factor2 = _.factor1, e.angularSpeedGradients.push(v); } } const u = t.getVelocityGradients(); if (u) { e.velocityGradients = []; for (const _ of u) { const v = { gradient: _.gradient, factor1: _.factor1 }; _.factor2 !== void 0 ? v.factor2 = _.factor2 : v.factor2 = _.factor1, e.velocityGradients.push(v); } } const h = t.getDragGradients(); if (h) { e.dragGradients = []; for (const _ of h) { const v = { gradient: _.gradient, factor1: _.factor1 }; _.factor2 !== void 0 ? v.factor2 = _.factor2 : v.factor2 = _.factor1, e.dragGradients.push(v); } } const d = t.getEmitRateGradients(); if (d) { e.emitRateGradients = []; for (const _ of d) { const v = { gradient: _.gradient, factor1: _.factor1 }; _.factor2 !== void 0 ? v.factor2 = _.factor2 : v.factor2 = _.factor1, e.emitRateGradients.push(v); } } const f = t.getStartSizeGradients(); if (f) { e.startSizeGradients = []; for (const _ of f) { const v = { gradient: _.gradient, factor1: _.factor1 }; _.factor2 !== void 0 ? v.factor2 = _.factor2 : v.factor2 = _.factor1, e.startSizeGradients.push(v); } } const p = t.getLifeTimeGradients(); if (p) { e.lifeTimeGradients = []; for (const _ of p) { const v = { gradient: _.gradient, factor1: _.factor1 }; _.factor2 !== void 0 ? v.factor2 = _.factor2 : v.factor2 = _.factor1, e.lifeTimeGradients.push(v); } } const m = t.getLimitVelocityGradients(); if (m) { e.limitVelocityGradients = []; for (const _ of m) { const v = { gradient: _.gradient, factor1: _.factor1 }; _.factor2 !== void 0 ? v.factor2 = _.factor2 : v.factor2 = _.factor1, e.limitVelocityGradients.push(v); } e.limitVelocityDamping = t.limitVelocityDamping; } t.noiseTexture && (e.noiseTexture = t.noiseTexture.serialize()); } /** * @internal */ static _Parse(e, t, i, r) { var s, n, a; let l; i instanceof mi ? l = null : l = i; const o = Qo("BABYLON.Texture"); if (o && l && (e.texture ? t.particleTexture = o.Parse(e.texture, l, r) : e.textureName && (t.particleTexture = new o(r + e.textureName, l, !1, e.invertY !== void 0 ? e.invertY : !0), t.particleTexture.name = e.textureName)), !e.emitterId && e.emitterId !== 0 && e.emitter === void 0 ? t.emitter = D.Zero() : e.emitterId && l ? t.emitter = l.getLastMeshById(e.emitterId) : t.emitter = D.FromArray(e.emitter), t.isLocal = !!e.isLocal, e.renderingGroupId !== void 0 && (t.renderingGroupId = e.renderingGroupId), e.isBillboardBased !== void 0 && (t.isBillboardBased = e.isBillboardBased), e.billboardMode !== void 0 && (t.billboardMode = e.billboardMode), e.useLogarithmicDepth !== void 0 && (t.useLogarithmicDepth = e.useLogarithmicDepth), e.animations) { for (let h = 0; h < e.animations.length; h++) { const d = e.animations[h], f = Qo("BABYLON.Animation"); f && t.animations.push(f.Parse(d)); } t.beginAnimationOnStart = e.beginAnimationOnStart, t.beginAnimationFrom = e.beginAnimationFrom, t.beginAnimationTo = e.beginAnimationTo, t.beginAnimationLoop = e.beginAnimationLoop; } if (e.autoAnimate && l && l.beginAnimation(t, e.autoAnimateFrom, e.autoAnimateTo, e.autoAnimateLoop, e.autoAnimateSpeed || 1), t.startDelay = e.startDelay | 0, t.minAngularSpeed = e.minAngularSpeed, t.maxAngularSpeed = e.maxAngularSpeed, t.minSize = e.minSize, t.maxSize = e.maxSize, e.minScaleX && (t.minScaleX = e.minScaleX, t.maxScaleX = e.maxScaleX, t.minScaleY = e.minScaleY, t.maxScaleY = e.maxScaleY), e.preWarmCycles !== void 0 && (t.preWarmCycles = e.preWarmCycles, t.preWarmStepOffset = e.preWarmStepOffset), e.minInitialRotation !== void 0 && (t.minInitialRotation = e.minInitialRotation, t.maxInitialRotation = e.maxInitialRotation), t.minLifeTime = e.minLifeTime, t.maxLifeTime = e.maxLifeTime, t.minEmitPower = e.minEmitPower, t.maxEmitPower = e.maxEmitPower, t.emitRate = e.emitRate, t.gravity = D.FromArray(e.gravity), e.noiseStrength && (t.noiseStrength = D.FromArray(e.noiseStrength)), t.color1 = Et.FromArray(e.color1), t.color2 = Et.FromArray(e.color2), t.colorDead = Et.FromArray(e.colorDead), t.updateSpeed = e.updateSpeed, t.targetStopDuration = e.targetStopDuration, t.blendMode = e.blendMode, e.colorGradients) for (const h of e.colorGradients) t.addColorGradient(h.gradient, Et.FromArray(h.color1), h.color2 ? Et.FromArray(h.color2) : void 0); if (e.rampGradients) { for (const h of e.rampGradients) t.addRampGradient(h.gradient, ze.FromArray(h.color)); t.useRampGradients = e.useRampGradients; } if (e.colorRemapGradients) for (const h of e.colorRemapGradients) t.addColorRemapGradient(h.gradient, h.factor1 !== void 0 ? h.factor1 : h.factor, h.factor2); if (e.alphaRemapGradients) for (const h of e.alphaRemapGradients) t.addAlphaRemapGradient(h.gradient, h.factor1 !== void 0 ? h.factor1 : h.factor, h.factor2); if (e.sizeGradients) for (const h of e.sizeGradients) t.addSizeGradient(h.gradient, h.factor1 !== void 0 ? h.factor1 : h.factor, h.factor2); if (e.angularSpeedGradients) for (const h of e.angularSpeedGradients) t.addAngularSpeedGradient(h.gradient, h.factor1 !== void 0 ? h.factor1 : h.factor, h.factor2); if (e.velocityGradients) for (const h of e.velocityGradients) t.addVelocityGradient(h.gradient, h.factor1 !== void 0 ? h.factor1 : h.factor, h.factor2); if (e.dragGradients) for (const h of e.dragGradients) t.addDragGradient(h.gradient, h.factor1 !== void 0 ? h.factor1 : h.factor, h.factor2); if (e.emitRateGradients) for (const h of e.emitRateGradients) t.addEmitRateGradient(h.gradient, h.factor1 !== void 0 ? h.factor1 : h.factor, h.factor2); if (e.startSizeGradients) for (const h of e.startSizeGradients) t.addStartSizeGradient(h.gradient, h.factor1 !== void 0 ? h.factor1 : h.factor, h.factor2); if (e.lifeTimeGradients) for (const h of e.lifeTimeGradients) t.addLifeTimeGradient(h.gradient, h.factor1 !== void 0 ? h.factor1 : h.factor, h.factor2); if (e.limitVelocityGradients) { for (const h of e.limitVelocityGradients) t.addLimitVelocityGradient(h.gradient, h.factor1 !== void 0 ? h.factor1 : h.factor, h.factor2); t.limitVelocityDamping = e.limitVelocityDamping; } if (e.noiseTexture && l) { const h = Qo("BABYLON.ProceduralTexture"); t.noiseTexture = h.Parse(e.noiseTexture, l, r); } let u; if (e.particleEmitterType) { switch (e.particleEmitterType.type) { case "SphereParticleEmitter": u = new gw(); break; case "SphereDirectedParticleEmitter": u = new HL(); break; case "ConeEmitter": case "ConeParticleEmitter": u = new UL(); break; case "CylinderParticleEmitter": u = new mw(); break; case "CylinderDirectedParticleEmitter": u = new VL(); break; case "HemisphericParticleEmitter": u = new kL(); break; case "PointParticleEmitter": u = new zL(); break; case "MeshParticleEmitter": u = new $B(); break; case "BoxEmitter": case "BoxParticleEmitter": default: u = new o5(); break; } u.parse(e.particleEmitterType, l); } else u = new o5(), u.parse(e, l); t.particleEmitterType = u, t.startSpriteCellID = e.startSpriteCellID, t.endSpriteCellID = e.endSpriteCellID, t.spriteCellLoop = (s = e.spriteCellLoop) !== null && s !== void 0 ? s : !0, t.spriteCellWidth = e.spriteCellWidth, t.spriteCellHeight = e.spriteCellHeight, t.spriteCellChangeSpeed = e.spriteCellChangeSpeed, t.spriteRandomStartCell = e.spriteRandomStartCell, t.disposeOnStop = (n = e.disposeOnStop) !== null && n !== void 0 ? n : !1, t.manualEmitCount = (a = e.manualEmitCount) !== null && a !== void 0 ? a : -1; } /** * Parses a JSON object to create a particle system. * @param parsedParticleSystem The JSON object to parse * @param sceneOrEngine The scene or the engine to create the particle system in * @param rootUrl The root url to use to load external dependencies like texture * @param doNotStart Ignore the preventAutoStart attribute and does not start * @param capacity defines the system capacity (if null or undefined the sotred capacity will be used) * @returns the Parsed particle system */ static Parse(e, t, i, r = !1, s) { const n = e.name; let a = null, l = null, o, u; if (t instanceof mi ? o = t : (u = t, o = u.getEngine()), e.customShader && o.createEffectForParticles) { l = e.customShader; const d = l.shaderOptions.defines.length > 0 ? l.shaderOptions.defines.join(` `) : ""; a = o.createEffectForParticles(l.shaderPath.fragmentElement, l.shaderOptions.uniforms, l.shaderOptions.samplers, d); } const h = new ns(n, s || e.capacity, t, a, e.isAnimationSheetEnabled); if (h.customShader = l, h._rootUrl = i, e.id && (h.id = e.id), e.subEmitters) { h.subEmitters = []; for (const d of e.subEmitters) { const f = []; for (const p of d) f.push(hT.Parse(p, t, i)); h.subEmitters.push(f); } } return ns._Parse(e, h, t, i), e.textureMask && (h.textureMask = Et.FromArray(e.textureMask)), e.preventAutoStart && (h.preventAutoStart = e.preventAutoStart), !r && !h.preventAutoStart && h.start(), h; } } ns.BILLBOARDMODE_Y = 2; ns.BILLBOARDMODE_ALL = 7; ns.BILLBOARDMODE_STRETCHED = 8; ns.BILLBOARDMODE_STRETCHED_LOCAL = 9; hT._ParseParticleSystem = ns.Parse; const q0e = "clipPlaneFragmentDeclaration2", J0e = `#ifdef CLIPPLANE in float fClipDistance; #endif #ifdef CLIPPLANE2 in float fClipDistance2; #endif #ifdef CLIPPLANE3 in float fClipDistance3; #endif #ifdef CLIPPLANE4 in float fClipDistance4; #endif #ifdef CLIPPLANE5 in float fClipDistance5; #endif #ifdef CLIPPLANE6 in float fClipDistance6; #endif `; je.IncludesShadersStore[q0e] = J0e; const ege = "gpuRenderParticlesPixelShader", tge = `precision highp float; #ifdef LOGARITHMICDEPTH #extension GL_EXT_frag_depth : enable #endif uniform sampler2D diffuseSampler;varying vec2 vUV;varying vec4 vColor; #include #include #include #include #include void main() { #include vec4 textureColor=texture2D(diffuseSampler,vUV);gl_FragColor=textureColor*vColor; #ifdef BLENDMULTIPLYMODE float alpha=vColor.a*textureColor.a;gl_FragColor.rgb=gl_FragColor.rgb*alpha+vec3(1.0)*(1.0-alpha); #endif #include #ifdef IMAGEPROCESSINGPOSTPROCESS gl_FragColor.rgb=toLinearSpace(gl_FragColor.rgb); #else #ifdef IMAGEPROCESSING gl_FragColor.rgb=toLinearSpace(gl_FragColor.rgb);gl_FragColor=applyImageProcessing(gl_FragColor); #endif #endif } `; je.ShadersStore[ege] = tge; const ige = "clipPlaneVertexDeclaration2", rge = `#ifdef CLIPPLANE uniform vec4 vClipPlane;out float fClipDistance; #endif #ifdef CLIPPLANE2 uniform vec4 vClipPlane2;out float fClipDistance2; #endif #ifdef CLIPPLANE3 uniform vec4 vClipPlane3;out float fClipDistance3; #endif #ifdef CLIPPLANE4 uniform vec4 vClipPlane4;out float fClipDistance4; #endif #ifdef CLIPPLANE5 uniform vec4 vClipPlane5;out float fClipDistance5; #endif #ifdef CLIPPLANE6 uniform vec4 vClipPlane6;out float fClipDistance6; #endif `; je.IncludesShadersStore[ige] = rge; const sge = "gpuRenderParticlesVertexShader", nge = `precision highp float;uniform mat4 view;uniform mat4 projection;uniform vec2 translationPivot;uniform vec3 worldOffset; #ifdef LOCAL uniform mat4 emitterWM; #endif attribute vec3 position;attribute float age;attribute float life;attribute vec3 size; #ifndef BILLBOARD attribute vec3 initialDirection; #endif #ifdef BILLBOARDSTRETCHED attribute vec3 direction; #endif attribute float angle; #ifdef ANIMATESHEET attribute float cellIndex; #endif attribute vec2 offset;attribute vec2 uv;varying vec2 vUV;varying vec4 vColor;varying vec3 vPositionW; #if defined(BILLBOARD) && !defined(BILLBOARDY) && !defined(BILLBOARDSTRETCHED) uniform mat4 invView; #endif #include #include #ifdef COLORGRADIENTS uniform sampler2D colorGradientSampler; #else uniform vec4 colorDead;attribute vec4 color; #endif #ifdef ANIMATESHEET uniform vec3 sheetInfos; #endif #ifdef BILLBOARD uniform vec3 eyePosition; #endif vec3 rotate(vec3 yaxis,vec3 rotatedCorner) {vec3 xaxis=normalize(cross(vec3(0.,1.0,0.),yaxis));vec3 zaxis=normalize(cross(yaxis,xaxis));vec3 row0=vec3(xaxis.x,xaxis.y,xaxis.z);vec3 row1=vec3(yaxis.x,yaxis.y,yaxis.z);vec3 row2=vec3(zaxis.x,zaxis.y,zaxis.z);mat3 rotMatrix= mat3(row0,row1,row2);vec3 alignedCorner=rotMatrix*rotatedCorner; #ifdef LOCAL return ((emitterWM*vec4(position,1.0)).xyz+worldOffset)+alignedCorner; #else return (position+worldOffset)+alignedCorner; #endif } #ifdef BILLBOARDSTRETCHED vec3 rotateAlign(vec3 toCamera,vec3 rotatedCorner) {vec3 normalizedToCamera=normalize(toCamera);vec3 normalizedCrossDirToCamera=normalize(cross(normalize(direction),normalizedToCamera));vec3 crossProduct=normalize(cross(normalizedToCamera,normalizedCrossDirToCamera));vec3 row0=vec3(normalizedCrossDirToCamera.x,normalizedCrossDirToCamera.y,normalizedCrossDirToCamera.z);vec3 row1=vec3(crossProduct.x,crossProduct.y,crossProduct.z);vec3 row2=vec3(normalizedToCamera.x,normalizedToCamera.y,normalizedToCamera.z);mat3 rotMatrix= mat3(row0,row1,row2);vec3 alignedCorner=rotMatrix*rotatedCorner; #ifdef LOCAL return ((emitterWM*vec4(position,1.0)).xyz+worldOffset)+alignedCorner; #else return (position+worldOffset)+alignedCorner; #endif } #endif void main() { #ifdef ANIMATESHEET float rowOffset=floor(cellIndex/sheetInfos.z);float columnOffset=cellIndex-rowOffset*sheetInfos.z;vec2 uvScale=sheetInfos.xy;vec2 uvOffset=vec2(uv.x ,1.0-uv.y);vUV=(uvOffset+vec2(columnOffset,rowOffset))*uvScale; #else vUV=uv; #endif float ratio=age/life; #ifdef COLORGRADIENTS vColor=texture2D(colorGradientSampler,vec2(ratio,0)); #else vColor=color*vec4(1.0-ratio)+colorDead*vec4(ratio); #endif vec2 cornerPos=(offset-translationPivot)*size.yz*size.x; #ifdef BILLBOARD vec4 rotatedCorner;rotatedCorner.w=0.; #ifdef BILLBOARDY rotatedCorner.x=cornerPos.x*cos(angle)-cornerPos.y*sin(angle);rotatedCorner.z=cornerPos.x*sin(angle)+cornerPos.y*cos(angle);rotatedCorner.y=0.;rotatedCorner.xz+=translationPivot;vec3 yaxis=(position+worldOffset)-eyePosition;yaxis.y=0.;vPositionW=rotate(normalize(yaxis),rotatedCorner.xyz);vec4 viewPosition=(view*vec4(vPositionW,1.0)); #elif defined(BILLBOARDSTRETCHED) rotatedCorner.x=cornerPos.x*cos(angle)-cornerPos.y*sin(angle);rotatedCorner.y=cornerPos.x*sin(angle)+cornerPos.y*cos(angle);rotatedCorner.z=0.;rotatedCorner.xy+=translationPivot;vec3 toCamera=(position+worldOffset)-eyePosition;vPositionW=rotateAlign(toCamera,rotatedCorner.xyz);vec4 viewPosition=(view*vec4(vPositionW,1.0)); #else rotatedCorner.x=cornerPos.x*cos(angle)-cornerPos.y*sin(angle);rotatedCorner.y=cornerPos.x*sin(angle)+cornerPos.y*cos(angle);rotatedCorner.z=0.;rotatedCorner.xy+=translationPivot; #ifdef LOCAL vec4 viewPosition=view*vec4(((emitterWM*vec4(position,1.0)).xyz+worldOffset),1.0)+rotatedCorner; #else vec4 viewPosition=view*vec4((position+worldOffset),1.0)+rotatedCorner; #endif vPositionW=(invView*viewPosition).xyz; #endif #else vec3 rotatedCorner;rotatedCorner.x=cornerPos.x*cos(angle)-cornerPos.y*sin(angle);rotatedCorner.y=0.;rotatedCorner.z=cornerPos.x*sin(angle)+cornerPos.y*cos(angle);rotatedCorner.xz+=translationPivot;vec3 yaxis=normalize(initialDirection);vPositionW=rotate(yaxis,rotatedCorner);vec4 viewPosition=view*vec4(vPositionW,1.0); #endif gl_Position=projection*viewPosition; #if defined(CLIPPLANE) || defined(CLIPPLANE2) || defined(CLIPPLANE3) || defined(CLIPPLANE4) || defined(CLIPPLANE5) || defined(CLIPPLANE6) vec4 worldPos=vec4(vPositionW,1.0); #endif #include #include }`; je.ShadersStore[sge] = nge; class L4 extends V4 { /** * Gets a boolean indicating if the GPU particles can be rendered on current browser */ static get IsSupported() { if (!gi.LastCreatedEngine) return !1; const e = gi.LastCreatedEngine.getCaps(); return e.supportTransformFeedbacks || e.supportComputeShaders; } _createIndexBuffer() { this._linesIndexBufferUseInstancing = this._engine.createIndexBuffer(new Uint32Array([0, 1, 1, 3, 3, 2, 2, 0, 0, 3])); } /** * Gets the maximum number of particles active at the same time. * @returns The max number of active particles. */ getCapacity() { return this._capacity; } /** * Gets or set the number of active particles * The value cannot be greater than "capacity" (if it is, it will be limited to "capacity"). */ get maxActiveParticleCount() { return this._maxActiveParticleCount; } set maxActiveParticleCount(e) { this._maxActiveParticleCount = Math.min(e, this._capacity); } /** * Gets or set the number of active particles * @deprecated Please use maxActiveParticleCount instead. */ get activeParticleCount() { return this.maxActiveParticleCount; } set activeParticleCount(e) { this.maxActiveParticleCount = e; } /** * Is this system ready to be used/rendered * @returns true if the system is ready */ isReady() { if (!this.emitter || this._imageProcessingConfiguration && !this._imageProcessingConfiguration.isReady() || !this.particleTexture || !this.particleTexture.isReady()) return !1; if (this.blendMode !== ns.BLENDMODE_MULTIPLYADD) { if (!this._getWrapper(this.blendMode).effect.isReady()) return !1; } else if (!this._getWrapper(ns.BLENDMODE_MULTIPLY).effect.isReady() || !this._getWrapper(ns.BLENDMODE_ADD).effect.isReady()) return !1; return this._platform.isUpdateBufferCreated() ? this._platform.isUpdateBufferReady() : (this._recreateUpdateEffect(), !1); } /** * Gets if the system has been started. (Note: this will still be true after stop is called) * @returns True if it has been started, otherwise false. */ isStarted() { return this._started; } /** * Gets if the system has been stopped. (Note: rendering is still happening but the system is frozen) * @returns True if it has been stopped, otherwise false. */ isStopped() { return this._stopped; } /** * Gets a boolean indicating that the system is stopping * @returns true if the system is currently stopping */ isStopping() { return !1; } /** * Gets the number of particles active at the same time. * @returns The number of active particles. */ getActiveCount() { return this._currentActiveCount; } /** * Starts the particle system and begins to emit * @param delay defines the delay in milliseconds before starting the system (this.startDelay by default) */ start(e = this.startDelay) { if (!this.targetStopDuration && this._hasTargetStopDurationDependantGradient()) throw "Particle system started with a targetStopDuration dependant gradient (eg. startSizeGradients) but no targetStopDuration set"; if (e) { setTimeout(() => { this.start(0); }, e); return; } this._started = !0, this._stopped = !1, this._preWarmDone = !1, this.beginAnimationOnStart && this.animations && this.animations.length > 0 && this._scene && this._scene.beginAnimation(this, this.beginAnimationFrom, this.beginAnimationTo, this.beginAnimationLoop); } /** * Stops the particle system. */ stop() { this._stopped || (this._stopped = !0); } /** * Remove all active particles */ reset() { this._releaseBuffers(), this._platform.releaseVertexBuffers(), this._currentActiveCount = 0, this._targetIndex = 0; } /** * Returns the string "GPUParticleSystem" * @returns a string containing the class name */ getClassName() { return "GPUParticleSystem"; } /** * Gets the custom effect used to render the particles * @param blendMode Blend mode for which the effect should be retrieved * @returns The effect */ getCustomEffect(e = 0) { var t, i; return (i = (t = this._customWrappers[e]) === null || t === void 0 ? void 0 : t.effect) !== null && i !== void 0 ? i : this._customWrappers[0].effect; } _getCustomDrawWrapper(e = 0) { var t; return (t = this._customWrappers[e]) !== null && t !== void 0 ? t : this._customWrappers[0]; } /** * Sets the custom effect used to render the particles * @param effect The effect to set * @param blendMode Blend mode for which the effect should be set */ setCustomEffect(e, t = 0) { this._customWrappers[t] = new $o(this._engine), this._customWrappers[t].effect = e; } /** * Observable that will be called just before the particles are drawn */ get onBeforeDrawParticlesObservable() { return this._onBeforeDrawParticlesObservable || (this._onBeforeDrawParticlesObservable = new Fe()), this._onBeforeDrawParticlesObservable; } /** * Gets the name of the particle vertex shader */ get vertexShaderName() { return "gpuRenderParticles"; } /** * Gets the vertex buffers used by the particle system * Should be called after render() has been called for the current frame so that the buffers returned are the ones that have been updated * in the current frame (there's a ping-pong between two sets of buffers - for a given frame, one set is used as the source and the other as the destination) */ get vertexBuffers() { return this._renderVertexBuffers[this._targetIndex ^ 1]; } /** * Gets the index buffer used by the particle system (null for GPU particle systems) */ get indexBuffer() { return null; } _removeGradientAndTexture(e, t, i) { return super._removeGradientAndTexture(e, t, i), this._releaseBuffers(), this; } /** * Adds a new color gradient * @param gradient defines the gradient to use (between 0 and 1) * @param color1 defines the color to affect to the specified gradient * @returns the current particle system */ addColorGradient(e, t) { this._colorGradients || (this._colorGradients = []); const i = new ej(e, t); return this._colorGradients.push(i), this._refreshColorGradient(!0), this._releaseBuffers(), this; } _refreshColorGradient(e = !1) { this._colorGradients && (e && this._colorGradients.sort((t, i) => t.gradient < i.gradient ? -1 : t.gradient > i.gradient ? 1 : 0), this._colorGradientsTexture && (this._colorGradientsTexture.dispose(), this._colorGradientsTexture = null)); } /** Force the system to rebuild all gradients that need to be resync */ forceRefreshGradients() { this._refreshColorGradient(), this._refreshFactorGradient(this._sizeGradients, "_sizeGradientsTexture"), this._refreshFactorGradient(this._angularSpeedGradients, "_angularSpeedGradientsTexture"), this._refreshFactorGradient(this._velocityGradients, "_velocityGradientsTexture"), this._refreshFactorGradient(this._limitVelocityGradients, "_limitVelocityGradientsTexture"), this._refreshFactorGradient(this._dragGradients, "_dragGradientsTexture"), this.reset(); } /** * Remove a specific color gradient * @param gradient defines the gradient to remove * @returns the current particle system */ removeColorGradient(e) { return this._removeGradientAndTexture(e, this._colorGradients, this._colorGradientsTexture), this._colorGradientsTexture = null, this; } /** * Resets the draw wrappers cache */ resetDrawCache() { var e; for (const t in this._drawWrappers) (e = this._drawWrappers[t].drawContext) === null || e === void 0 || e.reset(); } _addFactorGradient(e, t, i) { const r = new tj(t, i); e.push(r), this._releaseBuffers(); } /** * Adds a new size gradient * @param gradient defines the gradient to use (between 0 and 1) * @param factor defines the size factor to affect to the specified gradient * @returns the current particle system */ addSizeGradient(e, t) { return this._sizeGradients || (this._sizeGradients = []), this._addFactorGradient(this._sizeGradients, e, t), this._refreshFactorGradient(this._sizeGradients, "_sizeGradientsTexture", !0), this._releaseBuffers(), this; } /** * Remove a specific size gradient * @param gradient defines the gradient to remove * @returns the current particle system */ removeSizeGradient(e) { return this._removeGradientAndTexture(e, this._sizeGradients, this._sizeGradientsTexture), this._sizeGradientsTexture = null, this; } _refreshFactorGradient(e, t, i = !1) { if (!e) return; i && e.sort((s, n) => s.gradient < n.gradient ? -1 : s.gradient > n.gradient ? 1 : 0); const r = this; r[t] && (r[t].dispose(), r[t] = null); } /** * Adds a new angular speed gradient * @param gradient defines the gradient to use (between 0 and 1) * @param factor defines the angular speed to affect to the specified gradient * @returns the current particle system */ addAngularSpeedGradient(e, t) { return this._angularSpeedGradients || (this._angularSpeedGradients = []), this._addFactorGradient(this._angularSpeedGradients, e, t), this._refreshFactorGradient(this._angularSpeedGradients, "_angularSpeedGradientsTexture", !0), this._releaseBuffers(), this; } /** * Remove a specific angular speed gradient * @param gradient defines the gradient to remove * @returns the current particle system */ removeAngularSpeedGradient(e) { return this._removeGradientAndTexture(e, this._angularSpeedGradients, this._angularSpeedGradientsTexture), this._angularSpeedGradientsTexture = null, this; } /** * Adds a new velocity gradient * @param gradient defines the gradient to use (between 0 and 1) * @param factor defines the velocity to affect to the specified gradient * @returns the current particle system */ addVelocityGradient(e, t) { return this._velocityGradients || (this._velocityGradients = []), this._addFactorGradient(this._velocityGradients, e, t), this._refreshFactorGradient(this._velocityGradients, "_velocityGradientsTexture", !0), this._releaseBuffers(), this; } /** * Remove a specific velocity gradient * @param gradient defines the gradient to remove * @returns the current particle system */ removeVelocityGradient(e) { return this._removeGradientAndTexture(e, this._velocityGradients, this._velocityGradientsTexture), this._velocityGradientsTexture = null, this; } /** * Adds a new limit velocity gradient * @param gradient defines the gradient to use (between 0 and 1) * @param factor defines the limit velocity value to affect to the specified gradient * @returns the current particle system */ addLimitVelocityGradient(e, t) { return this._limitVelocityGradients || (this._limitVelocityGradients = []), this._addFactorGradient(this._limitVelocityGradients, e, t), this._refreshFactorGradient(this._limitVelocityGradients, "_limitVelocityGradientsTexture", !0), this._releaseBuffers(), this; } /** * Remove a specific limit velocity gradient * @param gradient defines the gradient to remove * @returns the current particle system */ removeLimitVelocityGradient(e) { return this._removeGradientAndTexture(e, this._limitVelocityGradients, this._limitVelocityGradientsTexture), this._limitVelocityGradientsTexture = null, this; } /** * Adds a new drag gradient * @param gradient defines the gradient to use (between 0 and 1) * @param factor defines the drag value to affect to the specified gradient * @returns the current particle system */ addDragGradient(e, t) { return this._dragGradients || (this._dragGradients = []), this._addFactorGradient(this._dragGradients, e, t), this._refreshFactorGradient(this._dragGradients, "_dragGradientsTexture", !0), this._releaseBuffers(), this; } /** * Remove a specific drag gradient * @param gradient defines the gradient to remove * @returns the current particle system */ removeDragGradient(e) { return this._removeGradientAndTexture(e, this._dragGradients, this._dragGradientsTexture), this._dragGradientsTexture = null, this; } /** * Not supported by GPUParticleSystem * @returns the current particle system */ addEmitRateGradient() { return this; } /** * Not supported by GPUParticleSystem * @returns the current particle system */ removeEmitRateGradient() { return this; } /** * Not supported by GPUParticleSystem * @returns the current particle system */ addStartSizeGradient() { return this; } /** * Not supported by GPUParticleSystem * @returns the current particle system */ removeStartSizeGradient() { return this; } /** * Not supported by GPUParticleSystem * @returns the current particle system */ addColorRemapGradient() { return this; } /** * Not supported by GPUParticleSystem * @returns the current particle system */ removeColorRemapGradient() { return this; } /** * Not supported by GPUParticleSystem * @returns the current particle system */ addAlphaRemapGradient() { return this; } /** * Not supported by GPUParticleSystem * @returns the current particle system */ removeAlphaRemapGradient() { return this; } /** * Not supported by GPUParticleSystem * @returns the current particle system */ addRampGradient() { return this; } /** * Not supported by GPUParticleSystem * @returns the current particle system */ removeRampGradient() { return this; } /** * Not supported by GPUParticleSystem * @returns the list of ramp gradients */ getRampGradients() { return null; } /** * Not supported by GPUParticleSystem * Gets or sets a boolean indicating that ramp gradients must be used * @see https://doc.babylonjs.com/features/featuresDeepDive/particles/particle_system/particle_system_intro#ramp-gradients */ get useRampGradients() { return !1; } set useRampGradients(e) { } /** * Not supported by GPUParticleSystem * @returns the current particle system */ addLifeTimeGradient() { return this; } /** * Not supported by GPUParticleSystem * @returns the current particle system */ removeLifeTimeGradient() { return this; } /** * Instantiates a GPU particle system. * Particles are often small sprites used to simulate hard-to-reproduce phenomena like fire, smoke, water, or abstract visual effects like magic glitter and faery dust. * @param name The name of the particle system * @param options The options used to create the system * @param sceneOrEngine The scene the particle system belongs to or the engine to use if no scene * @param customEffect a custom effect used to change the way particles are rendered by default * @param isAnimationSheetEnabled Must be true if using a spritesheet to animate the particles texture */ constructor(e, t, i, r = null, s = !1) { if (super(e), this.layerMask = 268435455, this._accumulatedCount = 0, this._renderVertexBuffers = [], this._targetIndex = 0, this._currentRenderId = -1, this._currentRenderingCameraUniqueId = -1, this._started = !1, this._stopped = !1, this._timeDelta = 0, this.updateInAnimate = !1, this._actualFrame = 0, this._rawTextureWidth = 256, this.onDisposeObservable = new Fe(), this.onStoppedObservable = new Fe(), this.forceDepthWrite = !1, this._preWarmDone = !1, this.isLocal = !1, this.isGPU = !0, this._onBeforeDrawParticlesObservable = null, !i || i.getClassName() === "Scene" ? (this._scene = i || gi.LastCreatedScene, this._engine = this._scene.getEngine(), this.uniqueId = this._scene.getUniqueId(), this._scene.particleSystems.push(this)) : (this._engine = i, this.defaultProjectionMatrix = Ae.PerspectiveFovLH(0.8, 1, 0.1, 100, this._engine.isNDCHalfZRange)), this._engine.getCaps().supportComputeShaders) { if (!Qo("BABYLON.ComputeShaderParticleSystem")) throw new Error("The ComputeShaderParticleSystem class is not available! Make sure you have imported it."); this._platform = new (Qo("BABYLON.ComputeShaderParticleSystem"))(this, this._engine); } else { if (!Qo("BABYLON.WebGL2ParticleSystem")) throw new Error("The WebGL2ParticleSystem class is not available! Make sure you have imported it."); this._platform = new (Qo("BABYLON.WebGL2ParticleSystem"))(this, this._engine); } this._customWrappers = { 0: new $o(this._engine) }, this._customWrappers[0].effect = r, this._drawWrappers = { 0: new $o(this._engine) }, this._drawWrappers[0].drawContext && (this._drawWrappers[0].drawContext.useInstancing = !0), this._createIndexBuffer(), this._attachImageProcessingConfiguration(null), t = t ?? {}, t.randomTextureSize || delete t.randomTextureSize; const n = Object.assign({ capacity: 5e4, randomTextureSize: this._engine.getCaps().maxTextureSize }, t), a = t; isFinite(a) && (n.capacity = a), this._capacity = n.capacity, this._maxActiveParticleCount = n.capacity, this._currentActiveCount = 0, this._isAnimationSheetEnabled = s, this.particleEmitterType = new o5(); const l = Math.min(this._engine.getCaps().maxTextureSize, n.randomTextureSize); let o = []; for (let u = 0; u < l; ++u) o.push(Math.random()), o.push(Math.random()), o.push(Math.random()), o.push(Math.random()); this._randomTexture = new Po(new Float32Array(o), l, 1, 5, i, !1, !1, 1, 1), this._randomTexture.name = "GPUParticleSystem_random1", this._randomTexture.wrapU = 1, this._randomTexture.wrapV = 1, o = []; for (let u = 0; u < l; ++u) o.push(Math.random()), o.push(Math.random()), o.push(Math.random()), o.push(Math.random()); this._randomTexture2 = new Po(new Float32Array(o), l, 1, 5, i, !1, !1, 1, 1), this._randomTexture2.name = "GPUParticleSystem_random2", this._randomTexture2.wrapU = 1, this._randomTexture2.wrapV = 1, this._randomTextureSize = l; } _reset() { this._releaseBuffers(); } _createVertexBuffers(e, t, i) { const r = {}; r.position = t.createVertexBuffer("position", 0, 3, this._attributesStrideSize, !0); let s = 3; r.age = t.createVertexBuffer("age", s, 1, this._attributesStrideSize, !0), s += 1, r.size = t.createVertexBuffer("size", s, 3, this._attributesStrideSize, !0), s += 3, r.life = t.createVertexBuffer("life", s, 1, this._attributesStrideSize, !0), s += 1, s += 4, this.billboardMode === ns.BILLBOARDMODE_STRETCHED && (r.direction = t.createVertexBuffer("direction", s, 3, this._attributesStrideSize, !0)), s += 3, this._platform.alignDataInBuffer && (s += 1), this.particleEmitterType instanceof l5 && (s += 3, this._platform.alignDataInBuffer && (s += 1)), this._colorGradientsTexture || (r.color = t.createVertexBuffer("color", s, 4, this._attributesStrideSize, !0), s += 4), this._isBillboardBased || (r.initialDirection = t.createVertexBuffer("initialDirection", s, 3, this._attributesStrideSize, !0), s += 3, this._platform.alignDataInBuffer && (s += 1)), this.noiseTexture && (r.noiseCoordinates1 = t.createVertexBuffer("noiseCoordinates1", s, 3, this._attributesStrideSize, !0), s += 3, this._platform.alignDataInBuffer && (s += 1), r.noiseCoordinates2 = t.createVertexBuffer("noiseCoordinates2", s, 3, this._attributesStrideSize, !0), s += 3, this._platform.alignDataInBuffer && (s += 1)), r.angle = t.createVertexBuffer("angle", s, 1, this._attributesStrideSize, !0), this._angularSpeedGradientsTexture ? s++ : s += 2, this._isAnimationSheetEnabled && (r.cellIndex = t.createVertexBuffer("cellIndex", s, 1, this._attributesStrideSize, !0), s += 1, this.spriteRandomStartCell && (r.cellStartOffset = t.createVertexBuffer("cellStartOffset", s, 1, this._attributesStrideSize, !0), s += 1)), r.offset = i.createVertexBuffer("offset", 0, 2), r.uv = i.createVertexBuffer("uv", 2, 2), this._renderVertexBuffers.push(r), this._platform.createVertexBuffers(e, r), this.resetDrawCache(); } _initialize(e = !1) { if (this._buffer0 && !e) return; const t = this._engine, i = []; this._attributesStrideSize = 21, this._targetIndex = 0, this._platform.alignDataInBuffer && (this._attributesStrideSize += 1), this.particleEmitterType instanceof l5 && (this._attributesStrideSize += 3, this._platform.alignDataInBuffer && (this._attributesStrideSize += 1)), this.isBillboardBased || (this._attributesStrideSize += 3, this._platform.alignDataInBuffer && (this._attributesStrideSize += 1)), this._colorGradientsTexture && (this._attributesStrideSize -= 4), this._angularSpeedGradientsTexture && (this._attributesStrideSize -= 1), this._isAnimationSheetEnabled && (this._attributesStrideSize += 1, this.spriteRandomStartCell && (this._attributesStrideSize += 1)), this.noiseTexture && (this._attributesStrideSize += 6, this._platform.alignDataInBuffer && (this._attributesStrideSize += 2)), this._platform.alignDataInBuffer && (this._attributesStrideSize += 3 - (this._attributesStrideSize + 3 & 3)); const r = this.particleEmitterType instanceof l5, s = de.Vector3[0]; let n = 0; for (let u = 0; u < this._capacity; u++) if (i.push(0), i.push(0), i.push(0), i.push(0), i.push(0), i.push(0), i.push(0), i.push(0), i.push(Math.random()), i.push(Math.random()), i.push(Math.random()), i.push(Math.random()), r ? (this.particleEmitterType.particleDestinationGenerator(u, null, s), i.push(s.x), i.push(s.y), i.push(s.z)) : (i.push(0), i.push(0), i.push(0)), this._platform.alignDataInBuffer && i.push(0), n += 16, r && (this.particleEmitterType.particlePositionGenerator(u, null, s), i.push(s.x), i.push(s.y), i.push(s.z), this._platform.alignDataInBuffer && i.push(0), n += 4), this._colorGradientsTexture || (i.push(0), i.push(0), i.push(0), i.push(0), n += 4), this.isBillboardBased || (i.push(0), i.push(0), i.push(0), this._platform.alignDataInBuffer && i.push(0), n += 4), this.noiseTexture && (i.push(Math.random()), i.push(Math.random()), i.push(Math.random()), this._platform.alignDataInBuffer && i.push(0), i.push(Math.random()), i.push(Math.random()), i.push(Math.random()), this._platform.alignDataInBuffer && i.push(0), n += 8), i.push(0), n += 1, this._angularSpeedGradientsTexture || (i.push(0), n += 1), this._isAnimationSheetEnabled && (i.push(0), n += 1, this.spriteRandomStartCell && (i.push(0), n += 1)), this._platform.alignDataInBuffer) { let h = 3 - (n + 3 & 3); for (n += h; h-- > 0; ) i.push(0); } const a = new Float32Array([0.5, 0.5, 1, 1, -0.5, 0.5, 0, 1, 0.5, -0.5, 1, 0, -0.5, -0.5, 0, 0]), l = this._platform.createParticleBuffer(i), o = this._platform.createParticleBuffer(i); this._buffer0 = new hu(t, l, !1, this._attributesStrideSize), this._buffer1 = new hu(t, o, !1, this._attributesStrideSize), this._spriteBuffer = new hu(t, a, !1, 4), this._renderVertexBuffers = [], this._createVertexBuffers(this._buffer0, this._buffer1, this._spriteBuffer), this._createVertexBuffers(this._buffer1, this._buffer0, this._spriteBuffer), this._sourceBuffer = this._buffer0, this._targetBuffer = this._buffer1; } /** @internal */ _recreateUpdateEffect() { this._createColorGradientTexture(), this._createSizeGradientTexture(), this._createAngularSpeedGradientTexture(), this._createVelocityGradientTexture(), this._createLimitVelocityGradientTexture(), this._createDragGradientTexture(); let e = this.particleEmitterType ? this.particleEmitterType.getEffectDefines() : ""; return this._isBillboardBased && (e += ` #define BILLBOARD`), this._colorGradientsTexture && (e += ` #define COLORGRADIENTS`), this._sizeGradientsTexture && (e += ` #define SIZEGRADIENTS`), this._angularSpeedGradientsTexture && (e += ` #define ANGULARSPEEDGRADIENTS`), this._velocityGradientsTexture && (e += ` #define VELOCITYGRADIENTS`), this._limitVelocityGradientsTexture && (e += ` #define LIMITVELOCITYGRADIENTS`), this._dragGradientsTexture && (e += ` #define DRAGGRADIENTS`), this.isAnimationSheetEnabled && (e += ` #define ANIMATESHEET`, this.spriteRandomStartCell && (e += ` #define ANIMATESHEETRANDOMSTART`)), this.noiseTexture && (e += ` #define NOISE`), this.isLocal && (e += ` #define LOCAL`), this._platform.isUpdateBufferCreated() && this._cachedUpdateDefines === e ? !0 : (this._cachedUpdateDefines = e, this._updateBuffer = this._platform.createUpdateBuffer(e), this._platform.isUpdateBufferReady()); } /** * @internal */ _getWrapper(e) { const t = this._getCustomDrawWrapper(e); if (t != null && t.effect) return t; const i = []; this.fillDefines(i, e); let r = this._drawWrappers[e]; r || (r = new $o(this._engine), r.drawContext && (r.drawContext.useInstancing = !0), this._drawWrappers[e] = r); const s = i.join(` `); if (r.defines !== s) { const n = [], a = [], l = []; this.fillUniformsAttributesAndSamplerNames(a, n, l), r.setEffect(this._engine.createEffect("gpuRenderParticles", n, a, l, s), s); } return r; } /** * @internal */ static _GetAttributeNamesOrOptions(e = !1, t = !1, i = !1, r = !1) { const s = [Y.PositionKind, "age", "life", "size", "angle"]; return e || s.push(Y.ColorKind), t && s.push("cellIndex"), i || s.push("initialDirection"), r && s.push("direction"), s.push("offset", Y.UVKind), s; } /** * @internal */ static _GetEffectCreationOptions(e = !1, t = !1) { const i = ["emitterWM", "worldOffset", "view", "projection", "colorDead", "invView", "translationPivot", "eyePosition"]; return Gc(i), e && i.push("sheetInfos"), t && i.push("logarithmicDepthConstant"), i; } /** * Fill the defines array according to the current settings of the particle system * @param defines Array to be updated * @param blendMode blend mode to take into account when updating the array */ fillDefines(e, t = 0) { if (this._scene && bT(this, this._scene, e), t === ns.BLENDMODE_MULTIPLY && e.push("#define BLENDMULTIPLYMODE"), this.isLocal && e.push("#define LOCAL"), this.useLogarithmicDepth && e.push("#define LOGARITHMICDEPTH"), this._isBillboardBased) switch (e.push("#define BILLBOARD"), this.billboardMode) { case ns.BILLBOARDMODE_Y: e.push("#define BILLBOARDY"); break; case ns.BILLBOARDMODE_STRETCHED: e.push("#define BILLBOARDSTRETCHED"); break; case ns.BILLBOARDMODE_ALL: e.push("#define BILLBOARDMODE_ALL"); break; } this._colorGradientsTexture && e.push("#define COLORGRADIENTS"), this.isAnimationSheetEnabled && e.push("#define ANIMATESHEET"), this._imageProcessingConfiguration && (this._imageProcessingConfiguration.prepareDefines(this._imageProcessingConfigurationDefines), e.push("" + this._imageProcessingConfigurationDefines.toString())); } /** * Fill the uniforms, attributes and samplers arrays according to the current settings of the particle system * @param uniforms Uniforms array to fill * @param attributes Attributes array to fill * @param samplers Samplers array to fill */ fillUniformsAttributesAndSamplerNames(e, t, i) { t.push(...L4._GetAttributeNamesOrOptions(!!this._colorGradientsTexture, this._isAnimationSheetEnabled, this._isBillboardBased, this._isBillboardBased && this.billboardMode === ns.BILLBOARDMODE_STRETCHED)), e.push(...L4._GetEffectCreationOptions(this._isAnimationSheetEnabled, this.useLogarithmicDepth)), i.push("diffuseSampler", "colorGradientSampler"), this._imageProcessingConfiguration && (Ds.PrepareUniforms(e, this._imageProcessingConfigurationDefines), Ds.PrepareSamplers(i, this._imageProcessingConfigurationDefines)); } /** * Animates the particle system for the current frame by emitting new particles and or animating the living ones. * @param preWarm defines if we are in the pre-warmimg phase */ animate(e = !1) { var t; this._timeDelta = this.updateSpeed * (e ? this.preWarmStepOffset : ((t = this._scene) === null || t === void 0 ? void 0 : t.getAnimationRatio()) || 1), this._actualFrame += this._timeDelta, this._stopped || this.targetStopDuration && this._actualFrame >= this.targetStopDuration && this.stop(), this.updateInAnimate && this._update(); } _createFactorGradientTexture(e, t) { const i = this[t]; if (!e || !e.length || i) return; const r = new Float32Array(this._rawTextureWidth); for (let s = 0; s < this._rawTextureWidth; s++) { const n = s / this._rawTextureWidth; N_.GetCurrentGradient(n, e, (a, l, o) => { r[s] = yt.Lerp(a.factor1, l.factor1, o); }); } this[t] = Po.CreateRTexture(r, this._rawTextureWidth, 1, this._scene || this._engine, !1, !1, 1), this[t].name = t.substring(1); } _createSizeGradientTexture() { this._createFactorGradientTexture(this._sizeGradients, "_sizeGradientsTexture"); } _createAngularSpeedGradientTexture() { this._createFactorGradientTexture(this._angularSpeedGradients, "_angularSpeedGradientsTexture"); } _createVelocityGradientTexture() { this._createFactorGradientTexture(this._velocityGradients, "_velocityGradientsTexture"); } _createLimitVelocityGradientTexture() { this._createFactorGradientTexture(this._limitVelocityGradients, "_limitVelocityGradientsTexture"); } _createDragGradientTexture() { this._createFactorGradientTexture(this._dragGradients, "_dragGradientsTexture"); } _createColorGradientTexture() { if (!this._colorGradients || !this._colorGradients.length || this._colorGradientsTexture) return; const e = new Uint8Array(this._rawTextureWidth * 4), t = mn.Color4[0]; for (let i = 0; i < this._rawTextureWidth; i++) { const r = i / this._rawTextureWidth; N_.GetCurrentGradient(r, this._colorGradients, (s, n, a) => { Et.LerpToRef(s.color1, n.color1, a, t), e[i * 4] = t.r * 255, e[i * 4 + 1] = t.g * 255, e[i * 4 + 2] = t.b * 255, e[i * 4 + 3] = t.a * 255; }); } this._colorGradientsTexture = Po.CreateRGBATexture(e, this._rawTextureWidth, 1, this._scene, !1, !1, 1), this._colorGradientsTexture.name = "colorGradients"; } _render(e, t) { var i, r, s, n, a; const l = this._getWrapper(e), o = l.effect; this._engine.enableEffect(l); const u = ((i = this._scene) === null || i === void 0 ? void 0 : i.getViewMatrix()) || Ae.IdentityReadOnly; if (o.setMatrix("view", u), o.setMatrix("projection", (r = this.defaultProjectionMatrix) !== null && r !== void 0 ? r : this._scene.getProjectionMatrix()), o.setTexture("diffuseSampler", this.particleTexture), o.setVector2("translationPivot", this.translationPivot), o.setVector3("worldOffset", this.worldOffset), this.isLocal && o.setMatrix("emitterWM", t), this._colorGradientsTexture ? o.setTexture("colorGradientSampler", this._colorGradientsTexture) : o.setDirectColor4("colorDead", this.colorDead), this._isAnimationSheetEnabled && this.particleTexture) { const d = this.particleTexture.getBaseSize(); o.setFloat3("sheetInfos", this.spriteCellWidth / d.width, this.spriteCellHeight / d.height, d.width / this.spriteCellWidth); } if (this._isBillboardBased && this._scene) { const d = this._scene.activeCamera; o.setVector3("eyePosition", d.globalPosition); } const h = o.defines; if (this._scene && Ec(o, this, this._scene), h.indexOf("#define BILLBOARDMODE_ALL") >= 0) { const d = u.clone(); d.invert(), o.setMatrix("invView", d); } switch (this.useLogarithmicDepth && this._scene && Ke.BindLogDepth(h, o, this._scene), this._imageProcessingConfiguration && !this._imageProcessingConfiguration.applyByPostProcess && this._imageProcessingConfiguration.bind(o), e) { case ns.BLENDMODE_ADD: this._engine.setAlphaMode(1); break; case ns.BLENDMODE_ONEONE: this._engine.setAlphaMode(6); break; case ns.BLENDMODE_STANDARD: this._engine.setAlphaMode(2); break; case ns.BLENDMODE_MULTIPLY: this._engine.setAlphaMode(4); break; } return this._platform.bindDrawBuffers(this._targetIndex, o, !((s = this._scene) === null || s === void 0) && s.forceWireframe ? this._linesIndexBufferUseInstancing : null), this._onBeforeDrawParticlesObservable && this._onBeforeDrawParticlesObservable.notifyObservers(o), !((n = this._scene) === null || n === void 0) && n.forceWireframe ? this._engine.drawElementsType(6, 0, 10, this._currentActiveCount) : this._engine.drawArraysType(7, 0, 4, this._currentActiveCount), this._engine.setAlphaMode(0), !((a = this._scene) === null || a === void 0) && a.forceWireframe && this._engine.unbindInstanceAttributes(), this._currentActiveCount; } /** @internal */ _update(e) { if (!this.emitter || !this._targetBuffer || !this._recreateUpdateEffect()) return; if (!e) if (this.emitter.position) e = this.emitter.getWorldMatrix(); else { const i = this.emitter; e = de.Matrix[0], Ae.TranslationToRef(i.x, i.y, i.z, e); } this._platform.preUpdateParticleBuffer(), this._updateBuffer.setFloat("currentCount", this._currentActiveCount), this._updateBuffer.setFloat("timeDelta", this._timeDelta), this._updateBuffer.setFloat("stopFactor", this._stopped ? 0 : 1), this._updateBuffer.setInt("randomTextureSize", this._randomTextureSize), this._updateBuffer.setFloat2("lifeTime", this.minLifeTime, this.maxLifeTime), this._updateBuffer.setFloat2("emitPower", this.minEmitPower, this.maxEmitPower), this._colorGradientsTexture || (this._updateBuffer.setDirectColor4("color1", this.color1), this._updateBuffer.setDirectColor4("color2", this.color2)), this._updateBuffer.setFloat2("sizeRange", this.minSize, this.maxSize), this._updateBuffer.setFloat4("scaleRange", this.minScaleX, this.maxScaleX, this.minScaleY, this.maxScaleY), this._updateBuffer.setFloat4("angleRange", this.minAngularSpeed, this.maxAngularSpeed, this.minInitialRotation, this.maxInitialRotation), this._updateBuffer.setVector3("gravity", this.gravity), this._limitVelocityGradientsTexture && this._updateBuffer.setFloat("limitVelocityDamping", this.limitVelocityDamping), this.particleEmitterType && this.particleEmitterType.applyToShader(this._updateBuffer), this._isAnimationSheetEnabled && this._updateBuffer.setFloat4("cellInfos", this.startSpriteCellID, this.endSpriteCellID, this.spriteCellChangeSpeed, this.spriteCellLoop ? 1 : 0), this.noiseTexture && this._updateBuffer.setVector3("noiseStrength", this.noiseStrength), this.isLocal || this._updateBuffer.setMatrix("emitterWM", e), this._platform.updateParticleBuffer(this._targetIndex, this._targetBuffer, this._currentActiveCount), this._targetIndex++, this._targetIndex === 2 && (this._targetIndex = 0); const t = this._sourceBuffer; this._sourceBuffer = this._targetBuffer, this._targetBuffer = t; } /** * Renders the particle system in its current state * @param preWarm defines if the system should only update the particles but not render them * @param forceUpdateOnly if true, force to only update the particles and never display them (meaning, even if preWarm=false, when forceUpdateOnly=true the particles won't be displayed) * @returns the current number of particles */ render(e = !1, t = !1) { if (!this._started || !this.isReady()) return 0; if (!e && this._scene) { if (!this._preWarmDone && this.preWarmCycles) { for (let n = 0; n < this.preWarmCycles; n++) this.animate(!0), this.render(!0, !0); this._preWarmDone = !0; } if (this._currentRenderId === this._scene.getRenderId() && (!this._scene.activeCamera || this._scene.activeCamera && this._currentRenderingCameraUniqueId === this._scene.activeCamera.uniqueId)) return 0; this._currentRenderId = this._scene.getRenderId(), this._scene.activeCamera && (this._currentRenderingCameraUniqueId = this._scene.activeCamera.uniqueId); } if (this._initialize(), this._accumulatedCount += this.emitRate * this._timeDelta, this._accumulatedCount > 1) { const n = this._accumulatedCount | 0; this._accumulatedCount -= n, this._currentActiveCount += n; } if (this._currentActiveCount = Math.min(this._maxActiveParticleCount, this._currentActiveCount), !this._currentActiveCount) return 0; let i; if (this.emitter.position) i = this.emitter.getWorldMatrix(); else { const n = this.emitter; i = de.Matrix[0], Ae.TranslationToRef(n.x, n.y, n.z, i); } const r = this._engine; this.updateInAnimate || this._update(i); let s = 0; return !e && !t && (r.setState(!1), this.forceDepthWrite && r.setDepthWrite(!0), this.blendMode === ns.BLENDMODE_MULTIPLYADD ? s = this._render(ns.BLENDMODE_MULTIPLY, i) + this._render(ns.BLENDMODE_ADD, i) : s = this._render(this.blendMode, i), this._engine.setAlphaMode(0)), s; } /** * Rebuilds the particle system */ rebuild() { const e = () => { !this._recreateUpdateEffect() || !this._platform.isUpdateBufferReady() ? setTimeout(e, 10) : this._initialize(!0); }; this._createIndexBuffer(), this._cachedUpdateDefines = "", this._platform.contextLost(), e(); } _releaseBuffers() { this._buffer0 && (this._buffer0.dispose(), this._buffer0 = null), this._buffer1 && (this._buffer1.dispose(), this._buffer1 = null), this._spriteBuffer && (this._spriteBuffer.dispose(), this._spriteBuffer = null), this._platform.releaseBuffers(); } /** * Disposes the particle system and free the associated resources * @param disposeTexture defines if the particule texture must be disposed as well (true by default) */ dispose(e = !0) { for (const t in this._drawWrappers) this._drawWrappers[t].dispose(); if (this._drawWrappers = {}, this._scene) { const t = this._scene.particleSystems.indexOf(this); t > -1 && this._scene.particleSystems.splice(t, 1); } this._releaseBuffers(), this._platform.releaseVertexBuffers(); for (let t = 0; t < this._renderVertexBuffers.length; ++t) { const i = this._renderVertexBuffers[t]; for (const r in i) i[r].dispose(); } this._renderVertexBuffers = [], this._colorGradientsTexture && (this._colorGradientsTexture.dispose(), this._colorGradientsTexture = null), this._sizeGradientsTexture && (this._sizeGradientsTexture.dispose(), this._sizeGradientsTexture = null), this._angularSpeedGradientsTexture && (this._angularSpeedGradientsTexture.dispose(), this._angularSpeedGradientsTexture = null), this._velocityGradientsTexture && (this._velocityGradientsTexture.dispose(), this._velocityGradientsTexture = null), this._limitVelocityGradientsTexture && (this._limitVelocityGradientsTexture.dispose(), this._limitVelocityGradientsTexture = null), this._dragGradientsTexture && (this._dragGradientsTexture.dispose(), this._dragGradientsTexture = null), this._randomTexture && (this._randomTexture.dispose(), this._randomTexture = null), this._randomTexture2 && (this._randomTexture2.dispose(), this._randomTexture2 = null), e && this.particleTexture && (this.particleTexture.dispose(), this.particleTexture = null), e && this.noiseTexture && (this.noiseTexture.dispose(), this.noiseTexture = null), this.onStoppedObservable.clear(), this.onDisposeObservable.notifyObservers(this), this.onDisposeObservable.clear(); } /** * Clones the particle system. * @param name The name of the cloned object * @param newEmitter The new emitter to use * @param cloneTexture Also clone the textures if true * @returns the cloned particle system */ clone(e, t, i = !1) { const r = Object.assign({}, this._customWrappers); let s = null; const n = this._engine; if (n.createEffectForParticles && this.customShader != null) { s = this.customShader; const o = s.shaderOptions.defines.length > 0 ? s.shaderOptions.defines.join(` `) : ""; r[0] = n.createEffectForParticles(s.shaderPath.fragmentElement, s.shaderOptions.uniforms, s.shaderOptions.samplers, o, void 0, void 0, void 0, this); } const a = this.serialize(i), l = L4.Parse(a, this._scene || this._engine, this._rootUrl); return l.name = e, l.customShader = s, l._customWrappers = r, t === void 0 && (t = this.emitter), this.noiseTexture && (l.noiseTexture = this.noiseTexture.clone()), l.emitter = t, l; } /** * Serializes the particle system to a JSON object * @param serializeTexture defines if the texture must be serialized as well * @returns the JSON object */ serialize(e = !1) { const t = {}; return ns._Serialize(t, this, e), t.activeParticleCount = this.activeParticleCount, t.randomTextureSize = this._randomTextureSize, t.customShader = this.customShader, t; } /** * Parses a JSON object to create a GPU particle system. * @param parsedParticleSystem The JSON object to parse * @param sceneOrEngine The scene or the engine to create the particle system in * @param rootUrl The root url to use to load external dependencies like texture * @param doNotStart Ignore the preventAutoStart attribute and does not start * @param capacity defines the system capacity (if null or undefined the sotred capacity will be used) * @returns the parsed GPU particle system */ static Parse(e, t, i, r = !1, s) { const n = e.name; let a, l; t instanceof mi ? a = t : (l = t, a = l.getEngine()); const o = new L4(n, { capacity: s || e.capacity, randomTextureSize: e.randomTextureSize }, t, null, e.isAnimationSheetEnabled); if (o._rootUrl = i, e.customShader && a.createEffectForParticles) { const u = e.customShader, h = u.shaderOptions.defines.length > 0 ? u.shaderOptions.defines.join(` `) : "", d = a.createEffectForParticles(u.shaderPath.fragmentElement, u.shaderOptions.uniforms, u.shaderOptions.samplers, h, void 0, void 0, void 0, o); o.setCustomEffect(d, 0), o.customShader = u; } return e.id && (o.id = e.id), e.activeParticleCount && (o.activeParticleCount = e.activeParticleCount), ns._Parse(e, o, t, i), e.preventAutoStart && (o.preventAutoStart = e.preventAutoStart), !r && !o.preventAutoStart && o.start(), o; } } class qC { constructor() { this._emitterNodeIsOwned = !0, this.systems = []; } /** * Gets or sets the emitter node used with this set */ get emitterNode() { return this._emitterNode; } set emitterNode(e) { this._emitterNodeIsOwned && this._emitterNode && (this._emitterNode.dispose && this._emitterNode.dispose(), this._emitterNodeIsOwned = !1); for (const t of this.systems) t.emitter = e; this._emitterNode = e; } /** * Creates a new emitter mesh as a sphere * @param options defines the options used to create the sphere * @param options.diameter * @param options.segments * @param options.color * @param renderingGroupId defines the renderingGroupId to use for the sphere * @param scene defines the hosting scene */ setEmitterAsSphere(e, t, i) { this._emitterNodeIsOwned && this._emitterNode && this._emitterNode.dispose && this._emitterNode.dispose(), this._emitterNodeIsOwned = !0, this._emitterCreationOptions = { kind: "Sphere", options: e, renderingGroupId: t }; const r = Rd("emitterSphere", { diameter: e.diameter, segments: e.segments }, i); r.renderingGroupId = t; const s = new Dt("emitterSphereMaterial", i); s.emissiveColor = e.color, r.material = s; for (const n of this.systems) n.emitter = r; this._emitterNode = r; } /** * Starts all particle systems of the set * @param emitter defines an optional mesh to use as emitter for the particle systems */ start(e) { for (const t of this.systems) e && (t.emitter = e), t.start(); } /** * Release all associated resources */ dispose() { for (const e of this.systems) e.dispose(); this.systems.length = 0, this._emitterNode && (this._emitterNode.dispose && this._emitterNode.dispose(), this._emitterNode = null); } /** * Serialize the set into a JSON compatible object * @param serializeTexture defines if the texture must be serialized as well * @returns a JSON compatible representation of the set */ serialize(e = !1) { const t = {}; t.systems = []; for (const i of this.systems) t.systems.push(i.serialize(e)); return this._emitterNode && (t.emitter = this._emitterCreationOptions), t; } /** * Parse a new ParticleSystemSet from a serialized source * @param data defines a JSON compatible representation of the set * @param scene defines the hosting scene * @param gpu defines if we want GPU particles or CPU particles * @param capacity defines the system capacity (if null or undefined the sotred capacity will be used) * @returns a new ParticleSystemSet */ static Parse(e, t, i = !1, r) { const s = new qC(), n = this.BaseAssetsUrl + "/textures/"; t = t || gi.LastCreatedScene; for (const a of e.systems) s.systems.push(i ? L4.Parse(a, t, n, !0, r) : ns.Parse(a, t, n, !0, r)); if (e.emitter) { const a = e.emitter.options; switch (e.emitter.kind) { case "Sphere": s.setEmitterAsSphere({ diameter: a.diameter, segments: a.segments, color: ze.FromArray(a.color) }, e.emitter.renderingGroupId, t); break; } } return s; } } qC.BaseAssetsUrl = "https://assets.babylonjs.com/particles"; class bP { /** * Create a default particle system that you can tweak * @param emitter defines the emitter to use * @param capacity defines the system capacity (default is 500 particles) * @param scene defines the hosting scene * @param useGPU defines if a GPUParticleSystem must be created (default is false) * @returns the new Particle system */ static CreateDefault(e, t = 500, i, r = !1) { let s; return r ? s = new L4("default system", { capacity: t }, i) : s = new ns("default system", t, i), s.emitter = e, s.particleTexture = new De("https://assets.babylonjs.com/textures/flare.png", s.getScene()), s.createConeEmitter(0.1, Math.PI / 4), s.color1 = new Et(1, 1, 1, 1), s.color2 = new Et(1, 1, 1, 1), s.colorDead = new Et(1, 1, 1, 0), s.minSize = 0.1, s.maxSize = 0.1, s.minEmitPower = 2, s.maxEmitPower = 2, s.updateSpeed = 1 / 60, s.emitRate = 30, s; } /** * This is the main static method (one-liner) of this helper to create different particle systems * @param type This string represents the type to the particle system to create * @param scene The scene where the particle system should live * @param gpu If the system will use gpu * @param capacity defines the system capacity (if null or undefined the sotred capacity will be used) * @returns the ParticleSystemSet created */ static CreateAsync(e, t, i = !1, r) { t || (t = gi.LastCreatedScene); const s = {}; return t.addPendingData(s), new Promise((n, a) => { if (i && !L4.IsSupported) return t.removePendingData(s), a("Particle system with GPU is not supported."); Ve.LoadFile(`${bP.BaseAssetsUrl}/systems/${e}.json`, (l) => { t.removePendingData(s); const o = JSON.parse(l.toString()); return n(qC.Parse(o, t, i, r)); }, void 0, void 0, void 0, () => (t.removePendingData(s), a(`An error occurred with the creation of your particle system. Check if your type '${e}' exists.`))); }); } /** * Static function used to export a particle system to a ParticleSystemSet variable. * Please note that the emitter shape is not exported * @param systems defines the particle systems to export * @returns the created particle system set */ static ExportSet(e) { const t = new qC(); for (const i of e) t.systems.push(i); return t; } /** * Creates a particle system from a snippet saved in a remote file * @param name defines the name of the particle system to create (can be null or empty to use the one from the json data) * @param url defines the url to load from * @param scene defines the hosting scene * @param gpu If the system will use gpu * @param rootUrl defines the root URL to use to load textures and relative dependencies * @param capacity defines the system capacity (if null or undefined the sotred capacity will be used) * @returns a promise that will resolve to the new particle system */ static ParseFromFileAsync(e, t, i, r = !1, s = "", n) { return new Promise((a, l) => { const o = new go(); o.addEventListener("readystatechange", () => { if (o.readyState == 4) if (o.status == 200) { const u = JSON.parse(o.responseText); let h; r ? h = L4.Parse(u, i, s, !1, n) : h = ns.Parse(u, i, s, !1, n), e && (h.name = e), a(h); } else l("Unable to load the particle system"); }), o.open("GET", t), o.send(); }); } /** * Creates a particle system from a snippet saved by the particle system editor * @param snippetId defines the snippet to load (can be set to _BLANK to create a default one) * @param scene defines the hosting scene * @param gpu If the system will use gpu * @param rootUrl defines the root URL to use to load textures and relative dependencies * @param capacity defines the system capacity (if null or undefined the sotred capacity will be used) * @returns a promise that will resolve to the new particle system */ static ParseFromSnippetAsync(e, t, i = !1, r = "", s) { if (e === "_BLANK") { const n = this.CreateDefault(null); return n.start(), Promise.resolve(n); } return new Promise((n, a) => { const l = new go(); l.addEventListener("readystatechange", () => { if (l.readyState == 4) if (l.status == 200) { const o = JSON.parse(JSON.parse(l.responseText).jsonPayload), u = JSON.parse(o.particleSystem); let h; i ? h = L4.Parse(u, t, r, !1, s) : h = ns.Parse(u, t, r, !1, s), h.snippetId = e, n(h); } else a("Unable to load the snippet " + e); }), l.open("GET", this.SnippetUrl + "/" + e.replace(/#/g, "/")), l.send(); }); } } bP.BaseAssetsUrl = qC.BaseAssetsUrl; bP.SnippetUrl = "https://snippet.babylonjs.com"; bP.CreateFromSnippetAsync = bP.ParseFromSnippetAsync; Yl.AddParser(Bt.NAME_PARTICLESYSTEM, (c, e, t, i) => { const r = Yl.GetIndividualParser(Bt.NAME_PARTICLESYSTEM); if (r && c.particleSystems !== void 0 && c.particleSystems !== null) for (let s = 0, n = c.particleSystems.length; s < n; s++) { const a = c.particleSystems[s]; t.particleSystems.push(r(a, e, i)); } }); Yl.AddIndividualParser(Bt.NAME_PARTICLESYSTEM, (c, e, t) => c.activeParticleCount ? L4.Parse(c, e, t) : ns.Parse(c, e, t)); $e.prototype.createEffectForParticles = function(c, e = [], t = [], i = "", r, s, n, a) { var l; let o = [], u = []; const h = []; return a ? a.fillUniformsAttributesAndSamplerNames(u, o, h) : (o = ns._GetAttributeNamesOrOptions(), u = ns._GetEffectCreationOptions()), i.indexOf(" BILLBOARD") === -1 && (i += ` #define BILLBOARD `), a != null && a.isAnimationSheetEnabled && i.indexOf(" ANIMATESHEET") === -1 && (i += ` #define ANIMATESHEET `), t.indexOf("diffuseSampler") === -1 && t.push("diffuseSampler"), this.createEffect({ vertex: (l = a == null ? void 0 : a.vertexShaderName) !== null && l !== void 0 ? l : "particles", fragmentElement: c }, o, u.concat(e), h.concat(t), i, r, s, n); }; ke.prototype.getEmittedParticleSystems = function() { const c = []; for (let e = 0; e < this.getScene().particleSystems.length; e++) { const t = this.getScene().particleSystems[e]; t.emitter === this && c.push(t); } return c; }; ke.prototype.getHierarchyEmittedParticleSystems = function() { const c = [], e = this.getDescendants(); e.push(this); for (let t = 0; t < this.getScene().particleSystems.length; t++) { const i = this.getScene().particleSystems[t], r = i.emitter; r.position && e.indexOf(r) !== -1 && c.push(i); } return c; }; class zH { /** * Particle BoundingInfo object * @returns a BoundingInfo */ getBoundingInfo() { return this._boundingInfo; } /** * Returns true if there is already a bounding info */ get hasBoundingInfo() { return this._boundingInfo !== null; } /** * Creates a Solid Particle object. * Don't create particles manually, use instead the Solid Particle System internal tools like _addParticle() * @param particleIndex (integer) is the particle index in the Solid Particle System pool. * @param particleId (integer) is the particle identifier. Unless some particles are removed from the SPS, it's the same value than the particle idx. * @param positionIndex (integer) is the starting index of the particle vertices in the SPS "positions" array. * @param indiceIndex (integer) is the starting index of the particle indices in the SPS "indices" array. * @param model (ModelShape) is a reference to the model shape on what the particle is designed. * @param shapeId (integer) is the model shape identifier in the SPS. * @param idxInShape (integer) is the index of the particle in the current model (ex: the 10th box of addShape(box, 30)) * @param sps defines the sps it is associated to * @param modelBoundingInfo is the reference to the model BoundingInfo used for intersection computations. * @param materialIndex is the particle material identifier (integer) when the MultiMaterials are enabled in the SPS. */ constructor(e, t, i, r, s, n, a, l, o = null, u = null) { this.idx = 0, this.id = 0, this.color = new Et(1, 1, 1, 1), this.position = D.Zero(), this.rotation = D.Zero(), this.scaling = D.One(), this.uvs = new Di(0, 0, 1, 1), this.velocity = D.Zero(), this.pivot = D.Zero(), this.translateFromPivot = !1, this.alive = !0, this.isVisible = !0, this._pos = 0, this._ind = 0, this.shapeId = 0, this.idxInShape = 0, this._stillInvisible = !1, this._rotationMatrix = [1, 0, 0, 0, 1, 0, 0, 0, 1], this.parentId = null, this.materialIndex = null, this.props = null, this.cullingStrategy = xr.CULLINGSTRATEGY_BOUNDINGSPHERE_ONLY, this._globalPosition = D.Zero(), this.idx = e, this.id = t, this._pos = i, this._ind = r, this._model = s, this.shapeId = n, this.idxInShape = a, this._sps = l, o && (this._modelBoundingInfo = o, this._boundingInfo = new zf(o.minimum, o.maximum)), u !== null && (this.materialIndex = u); } /** * Copies the particle property values into the existing target : position, rotation, scaling, uvs, colors, pivot, parent, visibility, alive * @param target the particle target * @returns the current particle */ copyToRef(e) { return e.position.copyFrom(this.position), e.rotation.copyFrom(this.rotation), this.rotationQuaternion && (e.rotationQuaternion ? e.rotationQuaternion.copyFrom(this.rotationQuaternion) : e.rotationQuaternion = this.rotationQuaternion.clone()), e.scaling.copyFrom(this.scaling), this.color && (e.color ? e.color.copyFrom(this.color) : e.color = this.color.clone()), e.uvs.copyFrom(this.uvs), e.velocity.copyFrom(this.velocity), e.pivot.copyFrom(this.pivot), e.translateFromPivot = this.translateFromPivot, e.alive = this.alive, e.isVisible = this.isVisible, e.parentId = this.parentId, e.cullingStrategy = this.cullingStrategy, this.materialIndex !== null && (e.materialIndex = this.materialIndex), this; } /** * Legacy support, changed scale to scaling */ get scale() { return this.scaling; } /** * Legacy support, changed scale to scaling */ set scale(e) { this.scaling = e; } /** * Legacy support, changed quaternion to rotationQuaternion */ get quaternion() { return this.rotationQuaternion; } /** * Legacy support, changed quaternion to rotationQuaternion */ set quaternion(e) { this.rotationQuaternion = e; } /** * Returns a boolean. True if the particle intersects another particle or another mesh, else false. * The intersection is computed on the particle bounding sphere and Axis Aligned Bounding Box (AABB) * @param target is the object (solid particle or mesh) what the intersection is computed against. * @returns true if it intersects */ intersectsMesh(e) { return !this._boundingInfo || !e.hasBoundingInfo ? !1 : this._sps._bSphereOnly ? e6.Intersects(this._boundingInfo.boundingSphere, e.getBoundingInfo().boundingSphere) : this._boundingInfo.intersects(e.getBoundingInfo(), !1); } /** * Returns `true` if the solid particle is within the frustum defined by the passed array of planes. * A particle is in the frustum if its bounding box intersects the frustum * @param frustumPlanes defines the frustum to test * @returns true if the particle is in the frustum planes */ isInFrustum(e) { return this._boundingInfo !== null && this._boundingInfo.isInFrustum(e, this.cullingStrategy); } /** * get the rotation matrix of the particle * @internal */ getRotationMatrix(e) { let t; if (this.rotationQuaternion) t = this.rotationQuaternion; else { t = de.Quaternion[0]; const i = this.rotation; Ze.RotationYawPitchRollToRef(i.y, i.x, i.z, t); } t.toRotationMatrix(e); } } class HH { /** * Get or set the shapeId * @deprecated Please use shapeId instead */ get shapeID() { return this.shapeId; } set shapeID(e) { this.shapeId = e; } /** * Creates a ModelShape object. This is an internal simplified reference to a mesh used as for a model to replicate particles from by the SPS. * SPS internal tool, don't use it manually. * @internal */ constructor(e, t, i, r, s, n, a, l, o) { this._indicesLength = 0, this.shapeId = e, this._shape = t, this._indices = i, this._indicesLength = i.length, this._shapeUV = n, this._shapeColors = s, this._normals = r, this._positionFunction = a, this._vertexFunction = l, this._material = o; } } class vne { /** * Creates a new sorted particle * @param idx * @param ind * @param indLength * @param materialIndex */ constructor(e, t, i, r) { this.idx = 0, this.ind = 0, this.indicesLength = 0, this.sqDistance = 0, this.materialIndex = 0, this.idx = e, this.ind = t, this.indicesLength = i, this.materialIndex = r; } } class Ane { /** * Creates a new solid particle vertex */ constructor() { this.position = D.Zero(), this.color = new Et(1, 1, 1, 1), this.uv = at.Zero(); } // Getters and Setters for back-compatibility /** Vertex x coordinate */ get x() { return this.position.x; } set x(e) { this.position.x = e; } /** Vertex y coordinate */ get y() { return this.position.y; } set y(e) { this.position.y = e; } /** Vertex z coordinate */ get z() { return this.position.z; } set z(e) { this.position.z = e; } } class age { /** * Creates a SPS (Solid Particle System) object. * @param name (String) is the SPS name, this will be the underlying mesh name. * @param scene (Scene) is the scene in which the SPS is added. * @param options defines the options of the sps e.g. * * updatable (optional boolean, default true) : if the SPS must be updatable or immutable. * * isPickable (optional boolean, default false) : if the solid particles must be pickable. * * enableDepthSort (optional boolean, default false) : if the solid particles must be sorted in the geometry according to their distance to the camera. * * useModelMaterial (optional boolean, default false) : if the model materials must be used to create the SPS multimaterial. This enables the multimaterial supports of the SPS. * * enableMultiMaterial (optional boolean, default false) : if the solid particles can be given different materials. * * expandable (optional boolean, default false) : if particles can still be added after the initial SPS mesh creation. * * particleIntersection (optional boolean, default false) : if the solid particle intersections must be computed. * * boundingSphereOnly (optional boolean, default false) : if the particle intersection must be computed only with the bounding sphere (no bounding box computation, so faster). * * bSphereRadiusFactor (optional float, default 1.0) : a number to multiply the bounding sphere radius by in order to reduce it for instance. * * computeBoundingBox (optional boolean, default false): if the bounding box of the entire SPS will be computed (for occlusion detection, for example). If it is false, the bounding box will be the bounding box of the first particle. * * autoFixFaceOrientation (optional boolean, default false): if the particle face orientations will be flipped for transformations that change orientation (scale (-1, 1, 1), for example) * @param options.updatable * @param options.isPickable * @param options.enableDepthSort * @param options.particleIntersection * @param options.boundingSphereOnly * @param options.bSphereRadiusFactor * @param options.expandable * @param options.useModelMaterial * @param options.enableMultiMaterial * @param options.computeBoundingBox * @param options.autoFixFaceOrientation * @example bSphereRadiusFactor = 1.0 / Math.sqrt(3.0) => the bounding sphere exactly matches a spherical mesh. */ constructor(e, t, i) { this.particles = new Array(), this.nbParticles = 0, this.billboard = !1, this.recomputeNormals = !1, this.counter = 0, this.vars = {}, this._bSphereOnly = !1, this._bSphereRadiusFactor = 1, this._positions = new Array(), this._indices = new Array(), this._normals = new Array(), this._colors = new Array(), this._uvs = new Array(), this._index = 0, this._updatable = !0, this._pickable = !1, this._isVisibilityBoxLocked = !1, this._alwaysVisible = !1, this._depthSort = !1, this._expandable = !1, this._shapeCounter = 0, this._copy = new zH(0, 0, 0, 0, null, 0, 0, this), this._color = new Et(0, 0, 0, 0), this._computeParticleColor = !0, this._computeParticleTexture = !0, this._computeParticleRotation = !0, this._computeParticleVertex = !1, this._computeBoundingBox = !1, this._autoFixFaceOrientation = !1, this._depthSortParticles = !0, this._mustUnrotateFixedNormals = !1, this._particlesIntersect = !1, this._needs32Bits = !1, this._isNotBuilt = !0, this._lastParticleId = 0, this._idxOfId = [], this._multimaterialEnabled = !1, this._useModelMaterial = !1, this._depthSortFunction = (r, s) => s.sqDistance - r.sqDistance, this._materialSortFunction = (r, s) => r.materialIndex - s.materialIndex, this._autoUpdateSubMeshes = !1, this._recomputeInvisibles = !1, this.name = e, this._scene = t || gi.LastCreatedScene, this._camera = t.activeCamera, this._pickable = i ? i.isPickable : !1, this._depthSort = i ? i.enableDepthSort : !1, this._multimaterialEnabled = i ? i.enableMultiMaterial : !1, this._useModelMaterial = i ? i.useModelMaterial : !1, this._multimaterialEnabled = this._useModelMaterial ? !0 : this._multimaterialEnabled, this._expandable = i ? i.expandable : !1, this._particlesIntersect = i ? i.particleIntersection : !1, this._bSphereOnly = i ? i.boundingSphereOnly : !1, this._bSphereRadiusFactor = i && i.bSphereRadiusFactor ? i.bSphereRadiusFactor : 1, this._computeBoundingBox = i != null && i.computeBoundingBox ? i.computeBoundingBox : !1, this._autoFixFaceOrientation = i != null && i.autoFixFaceOrientation ? i.autoFixFaceOrientation : !1, i && i.updatable !== void 0 ? this._updatable = i.updatable : this._updatable = !0, this._pickable && (this.pickedBySubMesh = [[]], this.pickedParticles = this.pickedBySubMesh[0]), (this._depthSort || this._multimaterialEnabled) && (this.depthSortedParticles = []), this._multimaterialEnabled && (this._multimaterial = new xm(this.name + "MultiMaterial", this._scene), this._materials = [], this._materialIndexesById = {}), this._tmpVertex = new Ane(); } /** * Builds the SPS underlying mesh. Returns a standard Mesh. * If no model shape was added to the SPS, the returned mesh is just a single triangular plane. * @returns the created mesh */ buildMesh() { if (!this._isNotBuilt && this.mesh) return this.mesh; if (this.nbParticles === 0 && !this.mesh) { const t = Cw("", { radius: 1, tessellation: 3 }, this._scene); this.addShape(t, 1), t.dispose(); } if (this._indices32 = this._needs32Bits ? new Uint32Array(this._indices) : new Uint16Array(this._indices), this._positions32 = new Float32Array(this._positions), this._uvs32 = new Float32Array(this._uvs), this._colors32 = new Float32Array(this._colors), !this.mesh) { const t = new ke(this.name, this._scene); this.mesh = t; } !this._updatable && this._multimaterialEnabled && this._sortParticlesByMaterial(), this.recomputeNormals && Ot.ComputeNormals(this._positions32, this._indices32, this._normals), this._normals32 = new Float32Array(this._normals), this._fixedNormal32 = new Float32Array(this._normals), this._mustUnrotateFixedNormals && this._unrotateFixedNormals(); const e = new Ot(); if (e.indices = this._depthSort ? this._indices : this._indices32, e.set(this._positions32, Y.PositionKind), e.set(this._normals32, Y.NormalKind), this._uvs32.length > 0 && e.set(this._uvs32, Y.UVKind), this._colors32.length > 0 && e.set(this._colors32, Y.ColorKind), e.applyToMesh(this.mesh, this._updatable), this.mesh.isPickable = this._pickable, this._pickable) { let t = 0; for (let i = 0; i < this.nbParticles; i++) { const r = this.particles[i], s = r._model._indicesLength; for (let n = 0; n < s; n++) if (n % 3 == 0) { const l = { idx: r.idx, faceId: t }; this.pickedParticles[t] = l, t++; } } } return this._multimaterialEnabled && this.setMultiMaterial(this._materials), this._expandable || (!this._depthSort && !this._multimaterialEnabled && !this._autoFixFaceOrientation && (this._indices = null), this._positions = null, this._normals = null, this._uvs = null, this._colors = null, this._updatable || (this.particles.length = 0)), this._isNotBuilt = !1, this.recomputeNormals = !1, this._recomputeInvisibles = !0, this.mesh; } _getUVKind(e, t) { var i, r; return t === -1 && (!((i = e.material) === null || i === void 0) && i.diffuseTexture ? t = e.material.diffuseTexture.coordinatesIndex : !((r = e.material) === null || r === void 0) && r.albedoTexture && (t = e.material.albedoTexture.coordinatesIndex)), "uv" + (t ? t + 1 : ""); } /** * Digests the mesh and generates as many solid particles in the system as wanted. Returns the SPS. * These particles will have the same geometry than the mesh parts and will be positioned at the same localisation than the mesh original places. * Thus the particles generated from `digest()` have their property `position` set yet. * @param mesh ( Mesh ) is the mesh to be digested * @param options {facetNb} (optional integer, default 1) is the number of mesh facets per particle, this parameter is overridden by the parameter `number` if any * {delta} (optional integer, default 0) is the random extra number of facets per particle , each particle will have between `facetNb` and `facetNb + delta` facets * {number} (optional positive integer) is the wanted number of particles : each particle is built with `mesh_total_facets / number` facets * {storage} (optional existing array) is an array where the particles will be stored for a further use instead of being inserted in the SPS. * {uvKind} (optional positive integer, default 0) is the kind of UV to read from. Use -1 to deduce it from the diffuse/albedo texture (if any) of the mesh material * @param options.facetNb * @param options.number * @param options.delta * @param options.storage * @param options.uvKind * @returns the current SPS */ digest(e, t) { var i; let r = t && t.facetNb || 1, s = t && t.number || 0, n = t && t.delta || 0; const a = e.getVerticesData(Y.PositionKind), l = e.getIndices(), o = e.getVerticesData(this._getUVKind(e, (i = t == null ? void 0 : t.uvKind) !== null && i !== void 0 ? i : 0)), u = e.getVerticesData(Y.ColorKind), h = e.getVerticesData(Y.NormalKind), d = t && t.storage ? t.storage : null; let f = 0; const p = l.length / 3; s ? (s = s > p ? p : s, r = Math.round(p / s), n = 0) : r = r > p ? p : r; const m = [], _ = [], v = [], C = [], x = [], b = D.Zero(), S = r; for (; f < p; ) { r = S + Math.floor((1 + n) * Math.random()), f > p - r && (r = p - f), m.length = 0, _.length = 0, v.length = 0, C.length = 0, x.length = 0; let M = 0; for (let j = f * 3; j < (f + r) * 3; j++) { v.push(M); const J = l[j], ne = J * 3; if (m.push(a[ne], a[ne + 1], a[ne + 2]), _.push(h[ne], h[ne + 1], h[ne + 2]), o) { const pe = J * 2; C.push(o[pe], o[pe + 1]); } if (u) { const pe = J * 4; x.push(u[pe], u[pe + 1], u[pe + 2], u[pe + 3]); } M++; } let R = this.nbParticles; const w = this._posToShape(m), V = this._uvsToShapeUV(C), k = v.slice(), L = x.slice(), B = _.slice(); b.copyFromFloats(0, 0, 0); let U; for (U = 0; U < w.length; U++) b.addInPlace(w[U]); b.scaleInPlace(1 / w.length); const K = new D(1 / 0, 1 / 0, 1 / 0), ee = new D(-1 / 0, -1 / 0, -1 / 0); for (U = 0; U < w.length; U++) w[U].subtractInPlace(b), K.minimizeInPlaceFromFloats(w[U].x, w[U].y, w[U].z), ee.maximizeInPlaceFromFloats(w[U].x, w[U].y, w[U].z); let Z; this._particlesIntersect && (Z = new zf(K, ee)); let q = null; this._useModelMaterial && (q = e.material ? e.material : this._setDefaultMaterial()); const le = new HH(this._shapeCounter, w, k, B, L, V, null, null, q), ie = this._positions.length, $ = this._indices.length; this._meshBuilder(this._index, $, w, this._positions, k, this._indices, C, this._uvs, L, this._colors, B, this._normals, R, 0, null, le), this._addParticle(R, this._lastParticleId, ie, $, le, this._shapeCounter, 0, Z, d), this.particles[this.nbParticles].position.addInPlace(b), d || (this._index += w.length, R++, this.nbParticles++, this._lastParticleId++), this._shapeCounter++, f += r; } return this._isNotBuilt = !0, this; } /** * Unrotate the fixed normals in case the mesh was built with pre-rotated particles, ex : use of positionFunction in addShape() * @internal */ _unrotateFixedNormals() { let e = 0, t = 0; const i = de.Vector3[0], r = de.Quaternion[0], s = de.Matrix[0]; for (let n = 0; n < this.particles.length; n++) { const a = this.particles[n], l = a._model._shape; if (a.rotationQuaternion) a.rotationQuaternion.conjugateToRef(r); else { const o = a.rotation; Ze.RotationYawPitchRollToRef(o.y, o.x, o.z, r), r.conjugateInPlace(); } r.toRotationMatrix(s); for (let o = 0; o < l.length; o++) t = e + o * 3, D.TransformNormalFromFloatsToRef(this._normals32[t], this._normals32[t + 1], this._normals32[t + 2], s, i), i.toArray(this._fixedNormal32, t); e = t + 3; } } /** * Resets the temporary working copy particle * @internal */ _resetCopy() { const e = this._copy; e.position.setAll(0), e.rotation.setAll(0), e.rotationQuaternion = null, e.scaling.setAll(1), e.uvs.copyFromFloats(0, 0, 1, 1), e.color = null, e.translateFromPivot = !1, e.shapeId = 0, e.materialIndex = null; } /** * Inserts the shape model geometry in the global SPS mesh by updating the positions, indices, normals, colors, uvs arrays * @param p the current index in the positions array to be updated * @param ind the current index in the indices array * @param shape a Vector3 array, the shape geometry * @param positions the positions array to be updated * @param meshInd the shape indices array * @param indices the indices array to be updated * @param meshUV the shape uv array * @param uvs the uv array to be updated * @param meshCol the shape color array * @param colors the color array to be updated * @param meshNor the shape normals array * @param normals the normals array to be updated * @param idx the particle index * @param idxInShape the particle index in its shape * @param options the addShape() method passed options * @param model * @model the particle model * @internal */ _meshBuilder(e, t, i, r, s, n, a, l, o, u, h, d, f, p, m, _) { let v, C = 0, x = 0, b = 0; this._resetCopy(); const S = this._copy, M = !!(m && m.storage); if (S.idx = f, S.idxInShape = p, S.shapeId = _.shapeId, this._useModelMaterial) { const Z = _._material.uniqueId, q = this._materialIndexesById; Object.prototype.hasOwnProperty.call(q, Z) || (q[Z] = this._materials.length, this._materials.push(_._material)); const le = q[Z]; S.materialIndex = le; } if (m && m.positionFunction && (m.positionFunction(S, f, p), this._mustUnrotateFixedNormals = !0), M) return S; const R = de.Matrix[0], w = this._tmpVertex, V = w.position, k = w.color, L = w.uv, B = de.Vector3[1], U = de.Vector3[2], K = de.Vector3[3]; Ae.IdentityToRef(R), S.getRotationMatrix(R), S.pivot.multiplyToRef(S.scaling, K), S.translateFromPivot ? U.setAll(0) : U.copyFrom(K); const ee = m && m.vertexFunction; for (v = 0; v < i.length; v++) { if (V.copyFrom(i[v]), S.color && k.copyFrom(S.color), a && L.copyFromFloats(a[C], a[C + 1]), ee && m.vertexFunction(S, w, v), V.multiplyInPlace(S.scaling).subtractInPlace(K), D.TransformCoordinatesToRef(V, R, B), B.addInPlace(U).addInPlace(S.position), r.push(B.x, B.y, B.z), a) { const Z = S.uvs; l.push((Z.z - Z.x) * L.x + Z.x, (Z.w - Z.y) * L.y + Z.y), C += 2; } if (S.color) this._color.copyFrom(k); else { const Z = this._color; o && o[x] !== void 0 ? (Z.r = o[x], Z.g = o[x + 1], Z.b = o[x + 2], Z.a = o[x + 3]) : (Z.r = 1, Z.g = 1, Z.b = 1, Z.a = 1); } u.push(this._color.r, this._color.g, this._color.b, this._color.a), x += 4, !this.recomputeNormals && h && (D.TransformNormalFromFloatsToRef(h[b], h[b + 1], h[b + 2], R, V), d.push(V.x, V.y, V.z), b += 3); } for (v = 0; v < s.length; v++) { const Z = e + s[v]; n.push(Z), Z > 65535 && (this._needs32Bits = !0); } if (this._depthSort || this._multimaterialEnabled) { const Z = S.materialIndex !== null ? S.materialIndex : 0; this.depthSortedParticles.push(new vne(f, t, s.length, Z)); } return S; } /** * Returns a shape Vector3 array from positions float array * @param positions float array * @returns a vector3 array * @internal */ _posToShape(e) { const t = []; for (let i = 0; i < e.length; i += 3) t.push(D.FromArray(e, i)); return t; } /** * Returns a shapeUV array from a float uvs (array deep copy) * @param uvs as a float array * @returns a shapeUV array * @internal */ _uvsToShapeUV(e) { const t = []; if (e) for (let i = 0; i < e.length; i++) t.push(e[i]); return t; } /** * Adds a new particle object in the particles array * @param idx particle index in particles array * @param id particle id * @param idxpos positionIndex : the starting index of the particle vertices in the SPS "positions" array * @param idxind indiceIndex : he starting index of the particle indices in the SPS "indices" array * @param model particle ModelShape object * @param shapeId model shape identifier * @param idxInShape index of the particle in the current model * @param bInfo model bounding info object * @param storage target storage array, if any * @internal */ _addParticle(e, t, i, r, s, n, a, l = null, o = null) { const u = new zH(e, t, i, r, s, n, a, this, l); return (o || this.particles).push(u), u; } /** * Adds some particles to the SPS from the model shape. Returns the shape id. * Please read the doc : https://doc.babylonjs.com/features/featuresDeepDive/particles/solid_particle_system/immutable_sps * @param mesh is any Mesh object that will be used as a model for the solid particles. If the mesh does not have vertex normals, it will turn on the recomputeNormals attribute. * @param nb (positive integer) the number of particles to be created from this model * @param options {positionFunction} is an optional javascript function to called for each particle on SPS creation. * {vertexFunction} is an optional javascript function to called for each vertex of each particle on SPS creation * {storage} (optional existing array) is an array where the particles will be stored for a further use instead of being inserted in the SPS. * @param options.positionFunction * @param options.vertexFunction * @param options.storage * @returns the number of shapes in the system */ addShape(e, t, i) { const r = e.getVerticesData(Y.PositionKind), s = e.getIndices(), n = e.getVerticesData(Y.UVKind), a = e.getVerticesData(Y.ColorKind), l = e.getVerticesData(Y.NormalKind); this.recomputeNormals = !l; const o = Array.from(s), u = l ? Array.from(l) : [], h = a ? Array.from(a) : [], d = i && i.storage ? i.storage : null; let f = null; this._particlesIntersect && (f = e.getBoundingInfo()); const p = this._posToShape(r), m = this._uvsToShapeUV(n), _ = i ? i.positionFunction : null, v = i ? i.vertexFunction : null; let C = null; this._useModelMaterial && (C = e.material ? e.material : this._setDefaultMaterial()); const x = new HH(this._shapeCounter, p, o, u, h, m, _, v, C); for (let b = 0; b < t; b++) this._insertNewParticle(this.nbParticles, b, x, p, s, n, a, l, f, d, i); return this._shapeCounter++, this._isNotBuilt = !0, this._shapeCounter - 1; } /** * Rebuilds a particle back to its just built status : if needed, recomputes the custom positions and vertices * @internal */ _rebuildParticle(e, t = !1) { this._resetCopy(); const i = this._copy; e._model._positionFunction && e._model._positionFunction(i, e.idx, e.idxInShape); const r = de.Matrix[0], s = de.Vector3[0], n = de.Vector3[1], a = de.Vector3[2], l = de.Vector3[3]; i.getRotationMatrix(r), e.pivot.multiplyToRef(e.scaling, l), i.translateFromPivot ? a.copyFromFloats(0, 0, 0) : a.copyFrom(l); const o = e._model._shape; for (let u = 0; u < o.length; u++) s.copyFrom(o[u]), e._model._vertexFunction && e._model._vertexFunction(i, s, u), s.multiplyInPlace(i.scaling).subtractInPlace(l), D.TransformCoordinatesToRef(s, r, n), n.addInPlace(a).addInPlace(i.position).toArray(this._positions32, e._pos + u * 3); t && (e.position.setAll(0), e.rotation.setAll(0), e.rotationQuaternion = null, e.scaling.setAll(1), e.uvs.setAll(0), e.pivot.setAll(0), e.translateFromPivot = !1, e.parentId = null); } /** * Rebuilds the whole mesh and updates the VBO : custom positions and vertices are recomputed if needed. * @param reset boolean, default false : if the particles must be reset at position and rotation zero, scaling 1, color white, initial UVs and not parented. * @returns the SPS. */ rebuildMesh(e = !1) { for (let t = 0; t < this.particles.length; t++) this._rebuildParticle(this.particles[t], e); return this.mesh.updateVerticesData(Y.PositionKind, this._positions32, !1, !1), this; } /** Removes the particles from the start-th to the end-th included from an expandable SPS (required). * Returns an array with the removed particles. * If the number of particles to remove is lower than zero or greater than the global remaining particle number, then an empty array is returned. * The SPS can't be empty so at least one particle needs to remain in place. * Under the hood, the VertexData array, so the VBO buffer, is recreated each call. * @param start index of the first particle to remove * @param end index of the last particle to remove (included) * @returns an array populated with the removed particles */ removeParticles(e, t) { const i = t - e + 1; if (!this._expandable || i <= 0 || i >= this.nbParticles || !this._updatable) return []; const r = this.particles, s = this.nbParticles; if (t < s - 1) { const o = t + 1, u = r[o]._pos - r[e]._pos, h = r[o]._ind - r[e]._ind; for (let d = o; d < s; d++) { const f = r[d]; f._pos -= u, f._ind -= h; } } const n = r.splice(e, i); this._positions.length = 0, this._indices.length = 0, this._colors.length = 0, this._uvs.length = 0, this._normals.length = 0, this._index = 0, this._idxOfId.length = 0, (this._depthSort || this._multimaterialEnabled) && (this.depthSortedParticles = []); let a = 0; const l = r.length; for (let o = 0; o < l; o++) { const u = r[o], h = u._model, d = h._shape, f = h._indices, p = h._normals, m = h._shapeColors, _ = h._shapeUV; u.idx = o, this._idxOfId[u.id] = o, this._meshBuilder(this._index, a, d, this._positions, f, this._indices, _, this._uvs, m, this._colors, p, this._normals, u.idx, u.idxInShape, null, h), this._index += d.length, a += f.length; } return this.nbParticles -= i, this._isNotBuilt = !0, n; } /** * Inserts some pre-created particles in the solid particle system so that they can be managed by setParticles(). * @param solidParticleArray an array populated with Solid Particles objects * @returns the SPS */ insertParticlesFromArray(e) { if (!this._expandable) return this; let t = 0, i = e[0].shapeId; const r = e.length; for (let s = 0; s < r; s++) { const n = e[s], a = n._model, l = a._shape, o = a._indices, u = a._shapeUV, h = a._shapeColors, d = a._normals, f = !d; this.recomputeNormals = f || this.recomputeNormals; const p = n.getBoundingInfo(), m = this._insertNewParticle(this.nbParticles, t, a, l, o, u, h, d, p, null, null); n.copyToRef(m), t++, i != n.shapeId && (i = n.shapeId, t = 0); } return this._isNotBuilt = !0, this; } /** * Creates a new particle and modifies the SPS mesh geometry : * - calls _meshBuilder() to increase the SPS mesh geometry step by step * - calls _addParticle() to populate the particle array * factorized code from addShape() and insertParticlesFromArray() * @param idx particle index in the particles array * @param i particle index in its shape * @param modelShape particle ModelShape object * @param shape shape vertex array * @param meshInd shape indices array * @param meshUV shape uv array * @param meshCol shape color array * @param meshNor shape normals array * @param bbInfo shape bounding info * @param storage target particle storage * @param options * @options addShape() passed options * @internal */ _insertNewParticle(e, t, i, r, s, n, a, l, o, u, h) { const d = this._positions.length, f = this._indices.length, p = this._meshBuilder(this._index, f, r, this._positions, s, this._indices, n, this._uvs, a, this._colors, l, this._normals, e, t, h, i); let m = null; return this._updatable && (m = this._addParticle(this.nbParticles, this._lastParticleId, d, f, i, this._shapeCounter, t, o, u), m.position.copyFrom(p.position), m.rotation.copyFrom(p.rotation), p.rotationQuaternion && (m.rotationQuaternion ? m.rotationQuaternion.copyFrom(p.rotationQuaternion) : m.rotationQuaternion = p.rotationQuaternion.clone()), p.color && (m.color ? m.color.copyFrom(p.color) : m.color = p.color.clone()), m.scaling.copyFrom(p.scaling), m.uvs.copyFrom(p.uvs), p.materialIndex !== null && (m.materialIndex = p.materialIndex), this.expandable && (this._idxOfId[m.id] = m.idx)), u || (this._index += r.length, this.nbParticles++, this._lastParticleId++), m; } /** * Sets all the particles : this method actually really updates the mesh according to the particle positions, rotations, colors, textures, etc. * This method calls `updateParticle()` for each particle of the SPS. * For an animated SPS, it is usually called within the render loop. * This methods does nothing if called on a non updatable or not yet built SPS. Example : buildMesh() not called after having added or removed particles from an expandable SPS. * @param start The particle index in the particle array where to start to compute the particle property values _(default 0)_ * @param end The particle index in the particle array where to stop to compute the particle property values _(default nbParticle - 1)_ * @param update If the mesh must be finally updated on this call after all the particle computations _(default true)_ * @returns the SPS. */ setParticles(e = 0, t = this.nbParticles - 1, i = !0) { if (!this._updatable || this._isNotBuilt) return this; this.beforeUpdateParticles(e, t, i); const r = de.Matrix[0], s = de.Matrix[1], n = this.mesh, a = this._colors32, l = this._positions32, o = this._normals32, u = this._uvs32, h = this._indices32, d = this._indices, f = this._fixedNormal32, p = this._depthSort && this._depthSortParticles, m = de.Vector3, _ = m[5].copyFromFloats(1, 0, 0), v = m[6].copyFromFloats(0, 1, 0), C = m[7].copyFromFloats(0, 0, 1), x = m[8].setAll(Number.MAX_VALUE), b = m[9].setAll(-Number.MAX_VALUE), S = m[10].setAll(0), M = this._tmpVertex, R = M.position, w = M.color, V = M.uv; if ((this.billboard || this._depthSort) && (this.mesh.computeWorldMatrix(!0), this.mesh._worldMatrix.invertToRef(s)), this.billboard) { const le = m[0]; this._camera.getDirectionToRef(bl.Z, le), D.TransformNormalToRef(le, s, C), C.normalize(); const ie = this._camera.getViewMatrix(!0); D.TransformNormalFromFloatsToRef(ie.m[1], ie.m[5], ie.m[9], s, v), D.CrossToRef(v, C, _), v.normalize(), _.normalize(); } this._depthSort && D.TransformCoordinatesToRef(this._camera.globalPosition, s, S), Ae.IdentityToRef(r); let k = 0, L = 0, B = 0, U = 0, K = 0, ee = 0, Z = 0; if (this.mesh.isFacetDataEnabled && (this._computeBoundingBox = !0), t = t >= this.nbParticles ? this.nbParticles - 1 : t, this._computeBoundingBox && (e != 0 || t != this.nbParticles - 1)) { const le = this.mesh.getBoundingInfo(); le && (x.copyFrom(le.minimum), b.copyFrom(le.maximum)); } L = this.particles[e]._pos; const q = L / 3 | 0; U = q * 4, ee = q * 2; for (let le = e; le <= t; le++) { const ie = this.particles[le]; this.updateParticle(ie); const $ = ie._model._shape, j = ie._model._shapeUV, J = ie._rotationMatrix, ne = ie.position, pe = ie.rotation, ge = ie.scaling, Ie = ie._globalPosition; if (p) { const ye = this.depthSortedParticles[le]; ye.idx = ie.idx, ye.ind = ie._ind, ye.indicesLength = ie._model._indicesLength, ye.sqDistance = D.DistanceSquared(ie.position, S); } if (!ie.alive || ie._stillInvisible && !ie.isVisible && !this._recomputeInvisibles) { Z = $.length, L += Z * 3, U += Z * 4, ee += Z * 2; continue; } if (ie.isVisible) { ie._stillInvisible = !1; const ye = m[12]; if (ie.pivot.multiplyToRef(ge, ye), this.billboard && (pe.x = 0, pe.y = 0), (this._computeParticleRotation || this.billboard) && ie.getRotationMatrix(r), ie.parentId !== null) { const te = this.getParticleById(ie.parentId); if (te) { const he = te._rotationMatrix, be = te._globalPosition, Ue = ne.x * he[1] + ne.y * he[4] + ne.z * he[7], Ee = ne.x * he[0] + ne.y * he[3] + ne.z * he[6], He = ne.x * he[2] + ne.y * he[5] + ne.z * he[8]; if (Ie.x = be.x + Ee, Ie.y = be.y + Ue, Ie.z = be.z + He, this._computeParticleRotation || this.billboard) { const Xe = r.m; J[0] = Xe[0] * he[0] + Xe[1] * he[3] + Xe[2] * he[6], J[1] = Xe[0] * he[1] + Xe[1] * he[4] + Xe[2] * he[7], J[2] = Xe[0] * he[2] + Xe[1] * he[5] + Xe[2] * he[8], J[3] = Xe[4] * he[0] + Xe[5] * he[3] + Xe[6] * he[6], J[4] = Xe[4] * he[1] + Xe[5] * he[4] + Xe[6] * he[7], J[5] = Xe[4] * he[2] + Xe[5] * he[5] + Xe[6] * he[8], J[6] = Xe[8] * he[0] + Xe[9] * he[3] + Xe[10] * he[6], J[7] = Xe[8] * he[1] + Xe[9] * he[4] + Xe[10] * he[7], J[8] = Xe[8] * he[2] + Xe[9] * he[5] + Xe[10] * he[8]; } } else ie.parentId = null; } else if (Ie.x = ne.x, Ie.y = ne.y, Ie.z = ne.z, this._computeParticleRotation || this.billboard) { const te = r.m; J[0] = te[0], J[1] = te[1], J[2] = te[2], J[3] = te[4], J[4] = te[5], J[5] = te[6], J[6] = te[8], J[7] = te[9], J[8] = te[10]; } const re = m[11]; for (ie.translateFromPivot ? re.setAll(0) : re.copyFrom(ye), Z = 0; Z < $.length; Z++) { k = L + Z * 3, B = U + Z * 4, K = ee + Z * 2; const te = 2 * Z, he = te + 1; R.copyFrom($[Z]), this._computeParticleColor && ie.color && w.copyFrom(ie.color), this._computeParticleTexture && V.copyFromFloats(j[te], j[he]), this._computeParticleVertex && this.updateParticleVertex(ie, M, Z); const be = R.x * ge.x - ye.x, Ue = R.y * ge.y - ye.y, Ee = R.z * ge.z - ye.z; let He = be * J[0] + Ue * J[3] + Ee * J[6], Xe = be * J[1] + Ue * J[4] + Ee * J[7], rt = be * J[2] + Ue * J[5] + Ee * J[8]; He += re.x, Xe += re.y, rt += re.z; const dt = l[k] = Ie.x + _.x * He + v.x * Xe + C.x * rt, bt = l[k + 1] = Ie.y + _.y * He + v.y * Xe + C.y * rt, Mt = l[k + 2] = Ie.z + _.z * He + v.z * Xe + C.z * rt; if (this._computeBoundingBox && (x.minimizeInPlaceFromFloats(dt, bt, Mt), b.maximizeInPlaceFromFloats(dt, bt, Mt)), !this._computeParticleVertex) { const Ct = f[k], di = f[k + 1], Kt = f[k + 2], ei = Ct * J[0] + di * J[3] + Kt * J[6], bi = Ct * J[1] + di * J[4] + Kt * J[7], vr = Ct * J[2] + di * J[5] + Kt * J[8]; o[k] = _.x * ei + v.x * bi + C.x * vr, o[k + 1] = _.y * ei + v.y * bi + C.y * vr, o[k + 2] = _.z * ei + v.z * bi + C.z * vr; } if (this._computeParticleColor && ie.color) { const Ct = this._colors32; Ct[B] = w.r, Ct[B + 1] = w.g, Ct[B + 2] = w.b, Ct[B + 3] = w.a; } if (this._computeParticleTexture) { const Ct = ie.uvs; u[K] = V.x * (Ct.z - Ct.x) + Ct.x, u[K + 1] = V.y * (Ct.w - Ct.y) + Ct.y; } } } else for (ie._stillInvisible = !0, Z = 0; Z < $.length; Z++) { if (k = L + Z * 3, B = U + Z * 4, K = ee + Z * 2, l[k] = l[k + 1] = l[k + 2] = 0, o[k] = o[k + 1] = o[k + 2] = 0, this._computeParticleColor && ie.color) { const ye = ie.color; a[B] = ye.r, a[B + 1] = ye.g, a[B + 2] = ye.b, a[B + 3] = ye.a; } if (this._computeParticleTexture) { const ye = ie.uvs; u[K] = j[Z * 2] * (ye.z - ye.x) + ye.x, u[K + 1] = j[Z * 2 + 1] * (ye.w - ye.y) + ye.y; } } if (this._particlesIntersect) { const ye = ie.getBoundingInfo(), Se = ye.boundingBox, re = ye.boundingSphere, te = ie._modelBoundingInfo; if (!this._bSphereOnly) { const rt = te.boundingBox.vectors, dt = m[1], bt = m[2]; dt.setAll(Number.MAX_VALUE), bt.setAll(-Number.MAX_VALUE); for (let Mt = 0; Mt < 8; Mt++) { const Ct = rt[Mt].x * ge.x, di = rt[Mt].y * ge.y, Kt = rt[Mt].z * ge.z, ei = Ct * J[0] + di * J[3] + Kt * J[6], bi = Ct * J[1] + di * J[4] + Kt * J[7], vr = Ct * J[2] + di * J[5] + Kt * J[8], yi = ne.x + _.x * ei + v.x * bi + C.x * vr, Vr = ne.y + _.y * ei + v.y * bi + C.y * vr, Rr = ne.z + _.z * ei + v.z * bi + C.z * vr; dt.minimizeInPlaceFromFloats(yi, Vr, Rr), bt.maximizeInPlaceFromFloats(yi, Vr, Rr); } Se.reConstruct(dt, bt, n._worldMatrix); } const he = te.minimum.multiplyToRef(ge, m[1]), be = te.maximum.multiplyToRef(ge, m[2]), Ue = be.addToRef(he, m[3]).scaleInPlace(0.5).addInPlace(Ie), Ee = be.subtractToRef(he, m[4]).scaleInPlace(0.5 * this._bSphereRadiusFactor), He = Ue.subtractToRef(Ee, m[1]), Xe = Ue.addToRef(Ee, m[2]); re.reConstruct(He, Xe, n._worldMatrix); } L = k + 3, U = B + 4, ee = K + 2; } if (i) { if (this._computeParticleColor) { const ie = n.getVertexBuffer(Y.ColorKind); ie && !n.isPickable ? ie.updateDirectly(a, 0) : n.updateVerticesData(Y.ColorKind, a, !1, !1); } if (this._computeParticleTexture) { const ie = n.getVertexBuffer(Y.UVKind); ie && !n.isPickable ? ie.updateDirectly(u, 0) : n.updateVerticesData(Y.UVKind, u, !1, !1); } const le = n.getVertexBuffer(Y.PositionKind); if (le && !n.isPickable ? le.updateDirectly(l, 0) : n.updateVerticesData(Y.PositionKind, l, !1, !1), !n.areNormalsFrozen || n.isFacetDataEnabled) { if (this._computeParticleVertex || n.isFacetDataEnabled) { const ie = n.isFacetDataEnabled ? n.getFacetDataParameters() : null; Ot.ComputeNormals(l, h, o, ie); for (let $ = 0; $ < o.length; $++) f[$] = o[$]; } if (!n.areNormalsFrozen) { const ie = n.getVertexBuffer(Y.NormalKind); ie && !n.isPickable ? ie.updateDirectly(o, 0) : n.updateVerticesData(Y.NormalKind, o, !1, !1); } } if (p) { const ie = this.depthSortedParticles; ie.sort(this._depthSortFunction); const $ = ie.length; let j = 0, J = 0; for (let ne = 0; ne < $; ne++) { const pe = ie[ne], ge = pe.indicesLength, Ie = pe.ind; for (let ye = 0; ye < ge; ye++) if (h[j] = d[Ie + ye], j++, this._pickable && ye % 3 == 0) { const re = this.pickedParticles[J]; re.idx = pe.idx, re.faceId = J, J++; } } } if (this._autoFixFaceOrientation) { let ie = 0; for (let $ = 0; $ < this.particles.length; $++) { const j = p ? this.particles[this.depthSortedParticles[$].idx] : this.particles[$]; if (j.scale.x * j.scale.y * j.scale.z < 0) for (let ne = 0; ne < j._model._indicesLength; ne += 3) { const pe = d[j._ind + ne]; h[ie + ne] = d[j._ind + ne + 1], h[ie + ne + 1] = pe; } ie += j._model._indicesLength; } } (p || this._autoFixFaceOrientation) && n.updateIndices(h); } return this._computeBoundingBox && (n.hasBoundingInfo ? n.getBoundingInfo().reConstruct(x, b, n._worldMatrix) : n.buildBoundingInfo(x, b, n._worldMatrix)), this._autoUpdateSubMeshes && this.computeSubMeshes(), this._recomputeInvisibles = !1, this.afterUpdateParticles(e, t, i), this; } /** * Disposes the SPS. */ dispose() { this.mesh.dispose(), this.vars = null, this._positions = null, this._indices = null, this._normals = null, this._uvs = null, this._colors = null, this._indices32 = null, this._positions32 = null, this._normals32 = null, this._fixedNormal32 = null, this._uvs32 = null, this._colors32 = null, this.pickedParticles = null, this.pickedBySubMesh = null, this._materials = null, this._materialIndexes = null, this._indicesByMaterial = null, this._idxOfId = null; } /** Returns an object {idx: number faceId: number} for the picked particle from the passed pickingInfo object. * idx is the particle index in the SPS * faceId is the picked face index counted within this particle. * Returns null if the pickInfo can't identify a picked particle. * @param pickingInfo (PickingInfo object) * @returns {idx: number, faceId: number} or null */ pickedParticle(e) { if (e.hit) { const t = e.subMeshId, i = e.faceId - this.mesh.subMeshes[t].indexStart / 3, r = this.pickedBySubMesh; if (r[t] && r[t][i]) return r[t][i]; } return null; } /** * Returns a SolidParticle object from its identifier : particle.id * @param id (integer) the particle Id * @returns the searched particle or null if not found in the SPS. */ getParticleById(e) { const t = this.particles[e]; if (t && t.id == e) return t; const i = this.particles, r = this._idxOfId[e]; if (r !== void 0) return i[r]; let s = 0; const n = this.nbParticles; for (; s < n; ) { const a = i[s]; if (a.id == e) return a; s++; } return null; } /** * Returns a new array populated with the particles having the passed shapeId. * @param shapeId (integer) the shape identifier * @returns a new solid particle array */ getParticlesByShapeId(e) { const t = []; return this.getParticlesByShapeIdToRef(e, t), t; } /** * Populates the passed array "ref" with the particles having the passed shapeId. * @param shapeId the shape identifier * @returns the SPS * @param ref */ getParticlesByShapeIdToRef(e, t) { t.length = 0; for (let i = 0; i < this.nbParticles; i++) { const r = this.particles[i]; r.shapeId == e && t.push(r); } return this; } /** * Computes the required SubMeshes according the materials assigned to the particles. * @returns the solid particle system. * Does nothing if called before the SPS mesh is built. */ computeSubMeshes() { if (!this.mesh || !this._multimaterialEnabled) return this; const e = this.depthSortedParticles; if (this.particles.length > 0) for (let n = 0; n < this.particles.length; n++) { const a = this.particles[n]; a.materialIndex || (a.materialIndex = 0); const l = e[n]; l.materialIndex = a.materialIndex, l.ind = a._ind, l.indicesLength = a._model._indicesLength, l.idx = a.idx; } this._sortParticlesByMaterial(); const t = this._indicesByMaterial, i = this._materialIndexes, r = this.mesh; r.subMeshes = []; const s = r.getTotalVertices(); for (let n = 0; n < i.length; n++) { const a = t[n], l = t[n + 1] - a, o = i[n]; new ed(o, 0, s, a, l, r); } return this; } /** * Sorts the solid particles by material when MultiMaterial is enabled. * Updates the indices32 array. * Updates the indicesByMaterial array. * Updates the mesh indices array. * @returns the SPS * @internal */ _sortParticlesByMaterial() { const e = [0]; this._indicesByMaterial = e; const t = []; this._materialIndexes = t; const i = this.depthSortedParticles; i.sort(this._materialSortFunction); const r = i.length, s = this._indices32, n = this._indices; let a = 0, l = 0, o = 0, u = i[0].materialIndex; t.push(u), this._pickable && (this.pickedBySubMesh = [[]], this.pickedParticles = this.pickedBySubMesh[0]); for (let h = 0; h < r; h++) { const d = i[h], f = d.indicesLength, p = d.ind; d.materialIndex !== u && (u = d.materialIndex, e.push(o), t.push(u), this._pickable && (a++, this.pickedBySubMesh[a] = [], l = 0)); let m = 0; for (let _ = 0; _ < f; _++) { if (s[o] = n[p + _], this._pickable && _ % 3 == 0) { const C = this.pickedBySubMesh[a][l]; C ? (C.idx = d.idx, C.faceId = m) : this.pickedBySubMesh[a][l] = { idx: d.idx, faceId: m }, l++, m++; } o++; } } return e.push(s.length), this._updatable && this.mesh.updateIndices(s), this; } /** * Sets the material indexes by id materialIndexesById[id] = materialIndex * @internal */ _setMaterialIndexesById() { this._materialIndexesById = {}; for (let e = 0; e < this._materials.length; e++) { const t = this._materials[e].uniqueId; this._materialIndexesById[t] = e; } } /** * Returns an array with unique values of Materials from the passed array * @param array the material array to be checked and filtered * @internal */ _filterUniqueMaterialId(e) { return e.filter(function(i, r, s) { return s.indexOf(i) === r; }); } /** * Sets a new Standard Material as _defaultMaterial if not already set. * @internal */ _setDefaultMaterial() { return this._defaultMaterial || (this._defaultMaterial = new Dt(this.name + "DefaultMaterial", this._scene)), this._defaultMaterial; } /** * Visibility helper : Recomputes the visible size according to the mesh bounding box * doc : https://doc.babylonjs.com/features/featuresDeepDive/particles/solid_particle_system/sps_visibility * @returns the SPS. */ refreshVisibleSize() { return this._isVisibilityBoxLocked || this.mesh.refreshBoundingInfo(), this; } /** * Visibility helper : Sets the size of a visibility box, this sets the underlying mesh bounding box. * @param size the size (float) of the visibility box * note : this doesn't lock the SPS mesh bounding box. * doc : https://doc.babylonjs.com/features/featuresDeepDive/particles/solid_particle_system/sps_visibility */ setVisibilityBox(e) { const t = e / 2; this.mesh.buildBoundingInfo(new D(-t, -t, -t), new D(t, t, t)); } /** * Gets whether the SPS as always visible or not * doc : https://doc.babylonjs.com/features/featuresDeepDive/particles/solid_particle_system/sps_visibility */ get isAlwaysVisible() { return this._alwaysVisible; } /** * Sets the SPS as always visible or not * doc : https://doc.babylonjs.com/features/featuresDeepDive/particles/solid_particle_system/sps_visibility */ set isAlwaysVisible(e) { this._alwaysVisible = e, this.mesh.alwaysSelectAsActiveMesh = e; } /** * Sets the SPS visibility box as locked or not. This enables/disables the underlying mesh bounding box updates. * doc : https://doc.babylonjs.com/features/featuresDeepDive/particles/solid_particle_system/sps_visibility */ set isVisibilityBoxLocked(e) { this._isVisibilityBoxLocked = e; const t = this.mesh.getBoundingInfo(); t.isLocked = e; } /** * Gets if the SPS visibility box as locked or not. This enables/disables the underlying mesh bounding box updates. * doc : https://doc.babylonjs.com/features/featuresDeepDive/particles/solid_particle_system/sps_visibility */ get isVisibilityBoxLocked() { return this._isVisibilityBoxLocked; } /** * Tells to `setParticles()` to compute the particle rotations or not. * Default value : true. The SPS is faster when it's set to false. * Note : the particle rotations aren't stored values, so setting `computeParticleRotation` to false will prevents the particle to rotate. */ set computeParticleRotation(e) { this._computeParticleRotation = e; } /** * Tells to `setParticles()` to compute the particle colors or not. * Default value : true. The SPS is faster when it's set to false. * Note : the particle colors are stored values, so setting `computeParticleColor` to false will keep yet the last colors set. */ set computeParticleColor(e) { this._computeParticleColor = e; } set computeParticleTexture(e) { this._computeParticleTexture = e; } /** * Tells to `setParticles()` to call the vertex function for each vertex of each particle, or not. * Default value : false. The SPS is faster when it's set to false. * Note : the particle custom vertex positions aren't stored values. */ set computeParticleVertex(e) { this._computeParticleVertex = e; } /** * Tells to `setParticles()` to compute or not the mesh bounding box when computing the particle positions. */ set computeBoundingBox(e) { this._computeBoundingBox = e; } /** * Tells to `setParticles()` to sort or not the distance between each particle and the camera. * Skipped when `enableDepthSort` is set to `false` (default) at construction time. * Default : `true` */ set depthSortParticles(e) { this._depthSortParticles = e; } /** * Gets if `setParticles()` computes the particle rotations or not. * Default value : true. The SPS is faster when it's set to false. * Note : the particle rotations aren't stored values, so setting `computeParticleRotation` to false will prevents the particle to rotate. */ get computeParticleRotation() { return this._computeParticleRotation; } /** * Gets if `setParticles()` computes the particle colors or not. * Default value : true. The SPS is faster when it's set to false. * Note : the particle colors are stored values, so setting `computeParticleColor` to false will keep yet the last colors set. */ get computeParticleColor() { return this._computeParticleColor; } /** * Gets if `setParticles()` computes the particle textures or not. * Default value : true. The SPS is faster when it's set to false. * Note : the particle textures are stored values, so setting `computeParticleTexture` to false will keep yet the last colors set. */ get computeParticleTexture() { return this._computeParticleTexture; } /** * Gets if `setParticles()` calls the vertex function for each vertex of each particle, or not. * Default value : false. The SPS is faster when it's set to false. * Note : the particle custom vertex positions aren't stored values. */ get computeParticleVertex() { return this._computeParticleVertex; } /** * Gets if `setParticles()` computes or not the mesh bounding box when computing the particle positions. */ get computeBoundingBox() { return this._computeBoundingBox; } /** * Gets if `setParticles()` sorts or not the distance between each particle and the camera. * Skipped when `enableDepthSort` is set to `false` (default) at construction time. * Default : `true` */ get depthSortParticles() { return this._depthSortParticles; } /** * Gets if the SPS is created as expandable at construction time. * Default : `false` */ get expandable() { return this._expandable; } /** * Gets if the SPS supports the Multi Materials */ get multimaterialEnabled() { return this._multimaterialEnabled; } /** * Gets if the SPS uses the model materials for its own multimaterial. */ get useModelMaterial() { return this._useModelMaterial; } /** * The SPS used material array. */ get materials() { return this._materials; } /** * Sets the SPS MultiMaterial from the passed materials. * Note : the passed array is internally copied and not used then by reference. * @param materials an array of material objects. This array indexes are the materialIndex values of the particles. */ setMultiMaterial(e) { this._materials = this._filterUniqueMaterialId(e), this._setMaterialIndexesById(), this._multimaterial && this._multimaterial.dispose(), this._multimaterial = new xm(this.name + "MultiMaterial", this._scene); for (let t = 0; t < this._materials.length; t++) this._multimaterial.subMaterials.push(this._materials[t]); this.computeSubMeshes(), this.mesh.material = this._multimaterial; } /** * The SPS computed multimaterial object */ get multimaterial() { return this._multimaterial; } set multimaterial(e) { this._multimaterial = e; } /** * If the subMeshes must be updated on the next call to setParticles() */ get autoUpdateSubMeshes() { return this._autoUpdateSubMeshes; } set autoUpdateSubMeshes(e) { this._autoUpdateSubMeshes = e; } // ======================================================================= // Particle behavior logic // these following methods may be overwritten by the user to fit his needs /** * This function does nothing. It may be overwritten to set all the particle first values. * The SPS doesn't call this function, you may have to call it by your own. * doc : https://doc.babylonjs.com/features/featuresDeepDive/particles/solid_particle_system/manage_sps_particles */ initParticles() { } /** * This function does nothing. It may be overwritten to recycle a particle. * The SPS doesn't call this function, you may have to call it by your own. * doc : https://doc.babylonjs.com/features/featuresDeepDive/particles/solid_particle_system/manage_sps_particles * @param particle The particle to recycle * @returns the recycled particle */ recycleParticle(e) { return e; } /** * Updates a particle : this function should be overwritten by the user. * It is called on each particle by `setParticles()`. This is the place to code each particle behavior. * doc : https://doc.babylonjs.com/features/featuresDeepDive/particles/solid_particle_system/manage_sps_particles * @example : just set a particle position or velocity and recycle conditions * @param particle The particle to update * @returns the updated particle */ updateParticle(e) { return e; } /** * Updates a vertex of a particle : it can be overwritten by the user. * This will be called on each vertex particle by `setParticles()` if `computeParticleVertex` is set to true only. * @param particle the current particle * @param vertex the current vertex of the current particle : a SolidParticleVertex object * @param pt the index of the current vertex in the particle shape * doc : https://doc.babylonjs.com/features/featuresDeepDive/particles/solid_particle_system/sps_vertices * @example : just set a vertex particle position or color * @returns the sps */ // eslint-disable-next-line @typescript-eslint/no-unused-vars updateParticleVertex(e, t, i) { return this; } /** * This will be called before any other treatment by `setParticles()` and will be passed three parameters. * This does nothing and may be overwritten by the user. * @param start the particle index in the particle array where to stop to iterate, same than the value passed to setParticle() * @param stop the particle index in the particle array where to stop to iterate, same than the value passed to setParticle() * @param update the boolean update value actually passed to setParticles() */ // eslint-disable-next-line @typescript-eslint/no-unused-vars beforeUpdateParticles(e, t, i) { } /** * This will be called by `setParticles()` after all the other treatments and just before the actual mesh update. * This will be passed three parameters. * This does nothing and may be overwritten by the user. * @param start the particle index in the particle array where to stop to iterate, same than the value passed to setParticle() * @param stop the particle index in the particle array where to stop to iterate, same than the value passed to setParticle() * @param update the boolean update value actually passed to setParticles() */ // eslint-disable-next-line @typescript-eslint/no-unused-vars afterUpdateParticles(e, t, i) { } } class yne { /** * Creates a Point Cloud object. * Don't create particles manually, use instead the PCS internal tools like _addParticle() * @param particleIndex (integer) is the particle index in the PCS pool. It's also the particle identifier. * @param group (PointsGroup) is the group the particle belongs to * @param groupId (integer) is the group identifier in the PCS. * @param idxInGroup (integer) is the index of the particle in the current point group (ex: the 10th point of addPoints(30)) * @param pcs defines the PCS it is associated to */ constructor(e, t, i, r, s) { this.idx = 0, this.color = new Et(1, 1, 1, 1), this.position = D.Zero(), this.rotation = D.Zero(), this.uv = new at(0, 0), this.velocity = D.Zero(), this.pivot = D.Zero(), this.translateFromPivot = !1, this._pos = 0, this._ind = 0, this.groupId = 0, this.idxInGroup = 0, this._stillInvisible = !1, this._rotationMatrix = [1, 0, 0, 0, 1, 0, 0, 0, 1], this.parentId = null, this._globalPosition = D.Zero(), this.idx = e, this._group = t, this.groupId = i, this.idxInGroup = r, this._pcs = s; } /** * get point size */ get size() { return this.size; } /** * Set point size */ set size(e) { this.size = e; } /** * Legacy support, changed quaternion to rotationQuaternion */ get quaternion() { return this.rotationQuaternion; } /** * Legacy support, changed quaternion to rotationQuaternion */ set quaternion(e) { this.rotationQuaternion = e; } /** * Returns a boolean. True if the particle intersects a mesh, else false * The intersection is computed on the particle position and Axis Aligned Bounding Box (AABB) or Sphere * @param target is the object (point or mesh) what the intersection is computed against * @param isSphere is boolean flag when false (default) bounding box of mesh is used, when true the bounding sphere is used * @returns true if it intersects */ intersectsMesh(e, t) { if (!e.hasBoundingInfo) return !1; if (!this._pcs.mesh) throw new Error("Point Cloud System doesnt contain the Mesh"); if (t) return e.getBoundingInfo().boundingSphere.intersectsPoint(this.position.add(this._pcs.mesh.position)); const i = e.getBoundingInfo().boundingBox, r = i.maximumWorld.x, s = i.minimumWorld.x, n = i.maximumWorld.y, a = i.minimumWorld.y, l = i.maximumWorld.z, o = i.minimumWorld.z, u = this.position.x + this._pcs.mesh.position.x, h = this.position.y + this._pcs.mesh.position.y, d = this.position.z + this._pcs.mesh.position.z; return s <= u && u <= r && a <= h && h <= n && o <= d && d <= l; } /** * get the rotation matrix of the particle * @internal */ getRotationMatrix(e) { let t; if (this.rotationQuaternion) t = this.rotationQuaternion; else { t = de.Quaternion[0]; const i = this.rotation; Ze.RotationYawPitchRollToRef(i.y, i.x, i.z, t); } t.toRotationMatrix(e); } } class KF { /** * Get or set the groupId * @deprecated Please use groupId instead */ get groupID() { return this.groupId; } set groupID(e) { this.groupId = e; } /** * Creates a points group object. This is an internal reference to produce particles for the PCS. * PCS internal tool, don't use it manually. * @internal */ constructor(e, t) { this.groupId = e, this._positionFunction = t; } } var Rp; (function(c) { c[c.Color = 2] = "Color", c[c.UV = 1] = "UV", c[c.Random = 0] = "Random", c[c.Stated = 3] = "Stated"; })(Rp || (Rp = {})); class Cne { /** * Gets the particle positions computed by the Point Cloud System */ get positions() { return this._positions32; } /** * Gets the particle colors computed by the Point Cloud System */ get colors() { return this._colors32; } /** * Gets the particle uvs computed by the Point Cloud System */ get uvs() { return this._uvs32; } /** * Creates a PCS (Points Cloud System) object * @param name (String) is the PCS name, this will be the underlying mesh name * @param pointSize (number) is the size for each point. Has no effect on a WebGPU engine. * @param scene (Scene) is the scene in which the PCS is added * @param options defines the options of the PCS e.g. * * updatable (optional boolean, default true) : if the PCS must be updatable or immutable * @param options.updatable */ constructor(e, t, i, r) { this.particles = new Array(), this.nbParticles = 0, this.counter = 0, this.vars = {}, this._promises = [], this._positions = new Array(), this._indices = new Array(), this._normals = new Array(), this._colors = new Array(), this._uvs = new Array(), this._updatable = !0, this._isVisibilityBoxLocked = !1, this._alwaysVisible = !1, this._groups = new Array(), this._groupCounter = 0, this._computeParticleColor = !0, this._computeParticleTexture = !0, this._computeParticleRotation = !0, this._computeBoundingBox = !1, this._isReady = !1, this.name = e, this._size = t, this._scene = i || gi.LastCreatedScene, r && r.updatable !== void 0 ? this._updatable = r.updatable : this._updatable = !0; } /** * Builds the PCS underlying mesh. Returns a standard Mesh. * If no points were added to the PCS, the returned mesh is just a single point. * @param material The material to use to render the mesh. If not provided, will create a default one * @returns a promise for the created mesh */ buildMeshAsync(e) { return Promise.all(this._promises).then(() => (this._isReady = !0, this._buildMesh(e))); } /** * @internal */ _buildMesh(e) { this.nbParticles === 0 && this.addPoints(1), this._positions32 = new Float32Array(this._positions), this._uvs32 = new Float32Array(this._uvs), this._colors32 = new Float32Array(this._colors); const t = new Ot(); t.set(this._positions32, Y.PositionKind), this._uvs32.length > 0 && t.set(this._uvs32, Y.UVKind); let i = 0; this._colors32.length > 0 && (i = 1, t.set(this._colors32, Y.ColorKind)); const r = new ke(this.name, this._scene); t.applyToMesh(r, this._updatable), this.mesh = r, this._positions = null, this._uvs = null, this._colors = null, this._updatable || (this.particles.length = 0); let s = e; return s || (s = new Dt("point cloud material", this._scene), s.emissiveColor = new ze(i, i, i), s.disableLighting = !0, s.pointsCloud = !0, s.pointSize = this._size), r.material = s, new Promise((n) => n(r)); } // adds a new particle object in the particles array _addParticle(e, t, i, r) { const s = new yne(e, t, i, r, this); return this.particles.push(s), s; } _randomUnitVector(e) { e.position = new D(Math.random(), Math.random(), Math.random()), e.color = new Et(1, 1, 1, 1); } _getColorIndicesForCoord(e, t, i, r) { const s = e._groupImageData, n = i * (r * 4) + t * 4, a = [n, n + 1, n + 2, n + 3], l = a[0], o = a[1], u = a[2], h = a[3], d = s[l], f = s[o], p = s[u], m = s[h]; return new Et(d / 255, f / 255, p / 255, m); } _setPointsColorOrUV(e, t, i, r, s, n, a, l) { l = l ?? 0, i && e.updateFacetData(); const u = 2 * e.getBoundingInfo().boundingSphere.radius; let h = e.getVerticesData(Y.PositionKind); const d = e.getIndices(), f = e.getVerticesData(Y.UVKind + (l ? l + 1 : "")), p = e.getVerticesData(Y.ColorKind), m = D.Zero(); e.computeWorldMatrix(); const _ = e.getWorldMatrix(); if (!_.isIdentity()) { h = h.slice(0); for (let ki = 0; ki < h.length / 3; ki++) D.TransformCoordinatesFromFloatsToRef(h[3 * ki], h[3 * ki + 1], h[3 * ki + 2], _, m), h[3 * ki] = m.x, h[3 * ki + 1] = m.y, h[3 * ki + 2] = m.z; } let v = 0, C = 0, x = 0, b = 0, S = 0, M = 0, R = 0, w = 0, V = 0, k = 0, L = 0, B = 0, U = 0; const K = D.Zero(), ee = D.Zero(), Z = D.Zero(), q = D.Zero(), le = D.Zero(); let ie = 0, $ = 0, j = 0, J = 0, ne = 0, pe = 0; const ge = at.Zero(), Ie = at.Zero(), ye = at.Zero(), Se = at.Zero(), re = at.Zero(); let te = 0, he = 0, be = 0, Ue = 0, Ee = 0, He = 0, Xe = 0, rt = 0, dt = 0, bt = 0, Mt = 0, Ct = 0; const di = Di.Zero(), Kt = Di.Zero(), ei = Di.Zero(), bi = Di.Zero(), vr = Di.Zero(); let yi = 0, Vr = 0; a = a || 0; let Rr, ks, Qt = new Di(0, 0, 0, 0), Ei = D.Zero(), Pi = D.Zero(), rr = D.Zero(), sr = 0, dr = D.Zero(), nr = 0, Pr = 0; const ti = new gs(D.Zero(), new D(1, 0, 0)); let Oi, ri = D.Zero(); for (let ki = 0; ki < d.length / 3; ki++) { C = d[3 * ki], x = d[3 * ki + 1], b = d[3 * ki + 2], S = h[3 * C], M = h[3 * C + 1], R = h[3 * C + 2], w = h[3 * x], V = h[3 * x + 1], k = h[3 * x + 2], L = h[3 * b], B = h[3 * b + 1], U = h[3 * b + 2], K.set(S, M, R), ee.set(w, V, k), Z.set(L, B, U), ee.subtractToRef(K, q), Z.subtractToRef(ee, le), f && (ie = f[2 * C], $ = f[2 * C + 1], j = f[2 * x], J = f[2 * x + 1], ne = f[2 * b], pe = f[2 * b + 1], ge.set(ie, $), Ie.set(j, J), ye.set(ne, pe), Ie.subtractToRef(ge, Se), ye.subtractToRef(Ie, re)), p && r && (te = p[4 * C], he = p[4 * C + 1], be = p[4 * C + 2], Ue = p[4 * C + 3], Ee = p[4 * x], He = p[4 * x + 1], Xe = p[4 * x + 2], rt = p[4 * x + 3], dt = p[4 * b], bt = p[4 * b + 1], Mt = p[4 * b + 2], Ct = p[4 * b + 3], di.set(te, he, be, Ue), Kt.set(Ee, He, Xe, rt), ei.set(dt, bt, Mt, Ct), Kt.subtractToRef(di, bi), ei.subtractToRef(Kt, vr)); let wr, Lr, Us, nn, Li, Os, rn, Ts; const Zs = new ze(0, 0, 0), as = new ze(0, 0, 0); let ui, zi; for (let _i = 0; _i < t._groupDensity[ki]; _i++) v = this.particles.length, this._addParticle(v, t, this._groupCounter, ki + _i), zi = this.particles[v], yi = yt.RandomRange(0, 1), Vr = yt.RandomRange(0, 1), Rr = K.add(q.scale(yi)).add(le.scale(yi * Vr)), i && (Ei = e.getFacetNormal(ki).normalize().scale(-1), Pi = q.clone().normalize(), rr = D.Cross(Ei, Pi), sr = yt.RandomRange(0, 2 * Math.PI), dr = Pi.scale(Math.cos(sr)).add(rr.scale(Math.sin(sr))), sr = yt.RandomRange(0.1, Math.PI / 2), ri = dr.scale(Math.cos(sr)).add(Ei.scale(Math.sin(sr))), ti.origin = Rr.add(ri.scale(1e-5)), ti.direction = ri, ti.length = u, Oi = ti.intersectsMesh(e), Oi.hit && (Pr = Oi.pickedPoint.subtract(Rr).length(), nr = yt.RandomRange(0, 1) * Pr, Rr.addInPlace(ri.scale(nr)))), zi.position = Rr.clone(), this._positions.push(zi.position.x, zi.position.y, zi.position.z), r !== void 0 ? f && (ks = ge.add(Se.scale(yi)).add(re.scale(yi * Vr)), r ? s && t._groupImageData !== null ? (wr = t._groupImgWidth, Lr = t._groupImgHeight, ui = this._getColorIndicesForCoord(t, Math.round(ks.x * wr), Math.round(ks.y * Lr), wr), zi.color = ui, this._colors.push(ui.r, ui.g, ui.b, ui.a)) : p ? (Qt = di.add(bi.scale(yi)).add(vr.scale(yi * Vr)), zi.color = new Et(Qt.x, Qt.y, Qt.z, Qt.w), this._colors.push(Qt.x, Qt.y, Qt.z, Qt.w)) : (Qt = di.set(Math.random(), Math.random(), Math.random(), 1), zi.color = new Et(Qt.x, Qt.y, Qt.z, Qt.w), this._colors.push(Qt.x, Qt.y, Qt.z, Qt.w)) : (zi.uv = ks.clone(), this._uvs.push(zi.uv.x, zi.uv.y))) : (n ? (Zs.set(n.r, n.g, n.b), Us = yt.RandomRange(-a, a), nn = yt.RandomRange(-a, a), Ts = Zs.toHSV(), Li = Ts.r, Os = Ts.g + Us, rn = Ts.b + nn, Os < 0 && (Os = 0), Os > 1 && (Os = 1), rn < 0 && (rn = 0), rn > 1 && (rn = 1), ze.HSVtoRGBToRef(Li, Os, rn, as), Qt.set(as.r, as.g, as.b, 1)) : Qt = di.set(Math.random(), Math.random(), Math.random(), 1), zi.color = new Et(Qt.x, Qt.y, Qt.z, Qt.w), this._colors.push(Qt.x, Qt.y, Qt.z, Qt.w)); } } // stores mesh texture in dynamic texture for color pixel retrieval // when pointColor type is color for surface points _colorFromTexture(e, t, i) { if (e.material === null) { Ce.Warn(e.name + "has no material."), t._groupImageData = null, this._setPointsColorOrUV(e, t, i, !0, !1); return; } const s = e.material.getActiveTextures(); if (s.length === 0) { Ce.Warn(e.name + "has no usable texture."), t._groupImageData = null, this._setPointsColorOrUV(e, t, i, !0, !1); return; } const n = e.clone(); n.setEnabled(!1), this._promises.push(new Promise((a) => { dn.WhenAllReady(s, () => { let l = t._textureNb; l < 0 && (l = 0), l > s.length - 1 && (l = s.length - 1); const o = () => { t._groupImgWidth = s[l].getSize().width, t._groupImgHeight = s[l].getSize().height, this._setPointsColorOrUV(n, t, i, !0, !0, void 0, void 0, s[l].coordinatesIndex), n.dispose(), a(); }; t._groupImageData = null; const u = s[l].readPixels(); u ? u.then((h) => { t._groupImageData = h, o(); }) : o(); }); })); } // calculates the point density per facet of a mesh for surface points _calculateDensity(e, t, i) { let r = new Array(), s, n, a, l, o, u, h, d, f, p, m, _; const v = D.Zero(), C = D.Zero(), x = D.Zero(), b = D.Zero(), S = D.Zero(), M = D.Zero(); let R, w, V, k, L; const B = new Array(); let U = 0; const K = i.length / 3; for (let ie = 0; ie < K; ie++) s = i[3 * ie], n = i[3 * ie + 1], a = i[3 * ie + 2], l = t[3 * s], o = t[3 * s + 1], u = t[3 * s + 2], h = t[3 * n], d = t[3 * n + 1], f = t[3 * n + 2], p = t[3 * a], m = t[3 * a + 1], _ = t[3 * a + 2], v.set(l, o, u), C.set(h, d, f), x.set(p, m, _), C.subtractToRef(v, b), x.subtractToRef(C, S), x.subtractToRef(v, M), R = b.length(), w = S.length(), V = M.length(), k = (R + w + V) / 2, L = Math.sqrt(k * (k - R) * (k - w) * (k - V)), U += L, B[ie] = L; let ee = 0; for (let ie = 0; ie < K; ie++) r[ie] = Math.floor(e * B[ie] / U), ee += r[ie]; const Z = e - ee, q = Math.floor(Z / K), le = Z % K; q > 0 && (r = r.map((ie) => ie + q)); for (let ie = 0; ie < le; ie++) r[ie] += 1; return r; } /** * Adds points to the PCS in random positions within a unit sphere * @param nb (positive integer) the number of particles to be created from this model * @param pointFunction is an optional javascript function to be called for each particle on PCS creation * @returns the number of groups in the system */ addPoints(e, t = this._randomUnitVector) { const i = new KF(this._groupCounter, t); let r, s = this.nbParticles; for (let n = 0; n < e; n++) r = this._addParticle(s, i, this._groupCounter, n), i && i._positionFunction && i._positionFunction(r, s, n), this._positions.push(r.position.x, r.position.y, r.position.z), r.color && this._colors.push(r.color.r, r.color.g, r.color.b, r.color.a), r.uv && this._uvs.push(r.uv.x, r.uv.y), s++; return this.nbParticles += e, this._groupCounter++, this._groupCounter; } /** * Adds points to the PCS from the surface of the model shape * @param mesh is any Mesh object that will be used as a surface model for the points * @param nb (positive integer) the number of particles to be created from this model * @param colorWith determines whether a point is colored using color (default), uv, random, stated or none (invisible) * @param color (color4) to be used when colorWith is stated or color (number) when used to specify texture position * @param range (number from 0 to 1) to determine the variation in shape and tone for a stated color * @returns the number of groups in the system */ addSurfacePoints(e, t, i, r, s) { let n = i || Rp.Random; (isNaN(n) || n < 0 || n > 3) && (n = Rp.Random); const a = e.getVerticesData(Y.PositionKind), l = e.getIndices(); this._groups.push(this._groupCounter); const o = new KF(this._groupCounter, null); switch (o._groupDensity = this._calculateDensity(t, a, l), n === Rp.Color ? o._textureNb = r || 0 : r = r || new Et(1, 1, 1, 1), n) { case Rp.Color: this._colorFromTexture(e, o, !1); break; case Rp.UV: this._setPointsColorOrUV(e, o, !1, !1, !1); break; case Rp.Random: this._setPointsColorOrUV(e, o, !1); break; case Rp.Stated: this._setPointsColorOrUV(e, o, !1, void 0, void 0, r, s); break; } return this.nbParticles += t, this._groupCounter++, this._groupCounter - 1; } /** * Adds points to the PCS inside the model shape * @param mesh is any Mesh object that will be used as a surface model for the points * @param nb (positive integer) the number of particles to be created from this model * @param colorWith determines whether a point is colored using color (default), uv, random, stated or none (invisible) * @param color (color4) to be used when colorWith is stated or color (number) when used to specify texture position * @param range (number from 0 to 1) to determine the variation in shape and tone for a stated color * @returns the number of groups in the system */ addVolumePoints(e, t, i, r, s) { let n = i || Rp.Random; (isNaN(n) || n < 0 || n > 3) && (n = Rp.Random); const a = e.getVerticesData(Y.PositionKind), l = e.getIndices(); this._groups.push(this._groupCounter); const o = new KF(this._groupCounter, null); switch (o._groupDensity = this._calculateDensity(t, a, l), n === Rp.Color ? o._textureNb = r || 0 : r = r || new Et(1, 1, 1, 1), n) { case Rp.Color: this._colorFromTexture(e, o, !0); break; case Rp.UV: this._setPointsColorOrUV(e, o, !0, !1, !1); break; case Rp.Random: this._setPointsColorOrUV(e, o, !0); break; case Rp.Stated: this._setPointsColorOrUV(e, o, !0, void 0, void 0, r, s); break; } return this.nbParticles += t, this._groupCounter++, this._groupCounter - 1; } /** * Sets all the particles : this method actually really updates the mesh according to the particle positions, rotations, colors, textures, etc. * This method calls `updateParticle()` for each particle of the SPS. * For an animated SPS, it is usually called within the render loop. * @param start The particle index in the particle array where to start to compute the particle property values _(default 0)_ * @param end The particle index in the particle array where to stop to compute the particle property values _(default nbParticle - 1)_ * @param update If the mesh must be finally updated on this call after all the particle computations _(default true)_ * @returns the PCS. */ setParticles(e = 0, t = this.nbParticles - 1, i = !0) { var r, s; if (!this._updatable || !this._isReady) return this; this.beforeUpdateParticles(e, t, i); const n = de.Matrix[0], a = this.mesh, l = this._colors32, o = this._positions32, u = this._uvs32, h = de.Vector3, d = h[5].copyFromFloats(1, 0, 0), f = h[6].copyFromFloats(0, 1, 0), p = h[7].copyFromFloats(0, 0, 1), m = h[8].setAll(Number.MAX_VALUE), _ = h[9].setAll(-Number.MAX_VALUE); Ae.IdentityToRef(n); let v = 0; if (!((r = this.mesh) === null || r === void 0) && r.isFacetDataEnabled && (this._computeBoundingBox = !0), t = t >= this.nbParticles ? this.nbParticles - 1 : t, this._computeBoundingBox && (e != 0 || t != this.nbParticles - 1)) { const S = (s = this.mesh) === null || s === void 0 ? void 0 : s.getBoundingInfo(); S && (m.copyFrom(S.minimum), _.copyFrom(S.maximum)); } v = 0; let C = 0, x = 0, b = 0; for (let S = e; S <= t; S++) { const M = this.particles[S]; v = M.idx, C = 3 * v, x = 4 * v, b = 2 * v, this.updateParticle(M); const R = M._rotationMatrix, w = M.position, V = M._globalPosition; if (this._computeParticleRotation && M.getRotationMatrix(n), M.parentId !== null) { const J = this.particles[M.parentId], ne = J._rotationMatrix, pe = J._globalPosition, ge = w.x * ne[1] + w.y * ne[4] + w.z * ne[7], Ie = w.x * ne[0] + w.y * ne[3] + w.z * ne[6], ye = w.x * ne[2] + w.y * ne[5] + w.z * ne[8]; if (V.x = pe.x + Ie, V.y = pe.y + ge, V.z = pe.z + ye, this._computeParticleRotation) { const Se = n.m; R[0] = Se[0] * ne[0] + Se[1] * ne[3] + Se[2] * ne[6], R[1] = Se[0] * ne[1] + Se[1] * ne[4] + Se[2] * ne[7], R[2] = Se[0] * ne[2] + Se[1] * ne[5] + Se[2] * ne[8], R[3] = Se[4] * ne[0] + Se[5] * ne[3] + Se[6] * ne[6], R[4] = Se[4] * ne[1] + Se[5] * ne[4] + Se[6] * ne[7], R[5] = Se[4] * ne[2] + Se[5] * ne[5] + Se[6] * ne[8], R[6] = Se[8] * ne[0] + Se[9] * ne[3] + Se[10] * ne[6], R[7] = Se[8] * ne[1] + Se[9] * ne[4] + Se[10] * ne[7], R[8] = Se[8] * ne[2] + Se[9] * ne[5] + Se[10] * ne[8]; } } else if (V.x = 0, V.y = 0, V.z = 0, this._computeParticleRotation) { const J = n.m; R[0] = J[0], R[1] = J[1], R[2] = J[2], R[3] = J[4], R[4] = J[5], R[5] = J[6], R[6] = J[8], R[7] = J[9], R[8] = J[10]; } const L = h[11]; M.translateFromPivot ? L.setAll(0) : L.copyFrom(M.pivot); const B = h[0]; B.copyFrom(M.position); const U = B.x - M.pivot.x, K = B.y - M.pivot.y, ee = B.z - M.pivot.z; let Z = U * R[0] + K * R[3] + ee * R[6], q = U * R[1] + K * R[4] + ee * R[7], le = U * R[2] + K * R[5] + ee * R[8]; Z += L.x, q += L.y, le += L.z; const ie = o[C] = V.x + d.x * Z + f.x * q + p.x * le, $ = o[C + 1] = V.y + d.y * Z + f.y * q + p.y * le, j = o[C + 2] = V.z + d.z * Z + f.z * q + p.z * le; if (this._computeBoundingBox && (m.minimizeInPlaceFromFloats(ie, $, j), _.maximizeInPlaceFromFloats(ie, $, j)), this._computeParticleColor && M.color) { const J = M.color, ne = this._colors32; ne[x] = J.r, ne[x + 1] = J.g, ne[x + 2] = J.b, ne[x + 3] = J.a; } if (this._computeParticleTexture && M.uv) { const J = M.uv, ne = this._uvs32; ne[b] = J.x, ne[b + 1] = J.y; } } return a && (i && (this._computeParticleColor && a.updateVerticesData(Y.ColorKind, l, !1, !1), this._computeParticleTexture && a.updateVerticesData(Y.UVKind, u, !1, !1), a.updateVerticesData(Y.PositionKind, o, !1, !1)), this._computeBoundingBox && (a.hasBoundingInfo ? a.getBoundingInfo().reConstruct(m, _, a._worldMatrix) : a.buildBoundingInfo(m, _, a._worldMatrix))), this.afterUpdateParticles(e, t, i), this; } /** * Disposes the PCS. */ dispose() { var e; (e = this.mesh) === null || e === void 0 || e.dispose(), this.vars = null, this._positions = null, this._indices = null, this._normals = null, this._uvs = null, this._colors = null, this._indices32 = null, this._positions32 = null, this._uvs32 = null, this._colors32 = null; } /** * Visibility helper : Recomputes the visible size according to the mesh bounding box * doc : * @returns the PCS. */ refreshVisibleSize() { var e; return this._isVisibilityBoxLocked || (e = this.mesh) === null || e === void 0 || e.refreshBoundingInfo(), this; } /** * Visibility helper : Sets the size of a visibility box, this sets the underlying mesh bounding box. * @param size the size (float) of the visibility box * note : this doesn't lock the PCS mesh bounding box. * doc : */ setVisibilityBox(e) { if (!this.mesh) return; const t = e / 2; this.mesh.buildBoundingInfo(new D(-t, -t, -t), new D(t, t, t)); } /** * Gets whether the PCS is always visible or not * doc : */ get isAlwaysVisible() { return this._alwaysVisible; } /** * Sets the PCS as always visible or not * doc : */ set isAlwaysVisible(e) { this.mesh && (this._alwaysVisible = e, this.mesh.alwaysSelectAsActiveMesh = e); } /** * Tells to `setParticles()` to compute the particle rotations or not * Default value : false. The PCS is faster when it's set to false * Note : particle rotations are only applied to parent particles * Note : the particle rotations aren't stored values, so setting `computeParticleRotation` to false will prevents the particle to rotate */ set computeParticleRotation(e) { this._computeParticleRotation = e; } /** * Tells to `setParticles()` to compute the particle colors or not. * Default value : true. The PCS is faster when it's set to false. * Note : the particle colors are stored values, so setting `computeParticleColor` to false will keep yet the last colors set. */ set computeParticleColor(e) { this._computeParticleColor = e; } set computeParticleTexture(e) { this._computeParticleTexture = e; } /** * Gets if `setParticles()` computes the particle colors or not. * Default value : false. The PCS is faster when it's set to false. * Note : the particle colors are stored values, so setting `computeParticleColor` to false will keep yet the last colors set. */ get computeParticleColor() { return this._computeParticleColor; } /** * Gets if `setParticles()` computes the particle textures or not. * Default value : false. The PCS is faster when it's set to false. * Note : the particle textures are stored values, so setting `computeParticleTexture` to false will keep yet the last colors set. */ get computeParticleTexture() { return this._computeParticleTexture; } /** * Tells to `setParticles()` to compute or not the mesh bounding box when computing the particle positions. */ set computeBoundingBox(e) { this._computeBoundingBox = e; } /** * Gets if `setParticles()` computes or not the mesh bounding box when computing the particle positions. */ get computeBoundingBox() { return this._computeBoundingBox; } // ======================================================================= // Particle behavior logic // these following methods may be overwritten by users to fit their needs /** * This function does nothing. It may be overwritten to set all the particle first values. * The PCS doesn't call this function, you may have to call it by your own. * doc : */ initParticles() { } /** * This function does nothing. It may be overwritten to recycle a particle * The PCS doesn't call this function, you can to call it * doc : * @param particle The particle to recycle * @returns the recycled particle */ recycleParticle(e) { return e; } /** * Updates a particle : this function should be overwritten by the user. * It is called on each particle by `setParticles()`. This is the place to code each particle behavior. * doc : * @example : just set a particle position or velocity and recycle conditions * @param particle The particle to update * @returns the updated particle */ updateParticle(e) { return e; } /** * This will be called before any other treatment by `setParticles()` and will be passed three parameters. * This does nothing and may be overwritten by the user. * @param start the particle index in the particle array where to start to iterate, same than the value passed to setParticle() * @param stop the particle index in the particle array where to stop to iterate, same than the value passed to setParticle() * @param update the boolean update value actually passed to setParticles() */ // eslint-disable-next-line @typescript-eslint/no-unused-vars beforeUpdateParticles(e, t, i) { } /** * This will be called by `setParticles()` after all the other treatments and just before the actual mesh update. * This will be passed three parameters. * This does nothing and may be overwritten by the user. * @param start the particle index in the particle array where to start to iterate, same than the value passed to setParticle() * @param stop the particle index in the particle array where to stop to iterate, same than the value passed to setParticle() * @param update the boolean update value actually passed to setParticles() */ // eslint-disable-next-line @typescript-eslint/no-unused-vars afterUpdateParticles(e, t, i) { } } Object.defineProperty(xr.prototype, "physicsImpostor", { get: function() { return this._physicsImpostor; }, set: function(c) { this._physicsImpostor !== c && (this._disposePhysicsObserver && this.onDisposeObservable.remove(this._disposePhysicsObserver), this._physicsImpostor = c, c && (this._disposePhysicsObserver = this.onDisposeObservable.add(() => { this.physicsImpostor && (this.physicsImpostor.dispose( /*!doNotRecurse*/ ), this.physicsImpostor = null); }))); }, enumerable: !0, configurable: !0 }); xr.prototype.getPhysicsImpostor = function() { return this.physicsImpostor; }; xr.prototype.applyImpulse = function(c, e) { return this.physicsImpostor ? (this.physicsImpostor.applyImpulse(c, e), this) : this; }; xr.prototype.setPhysicsLinkWith = function(c, e, t, i) { return !this.physicsImpostor || !c.physicsImpostor ? this : (this.physicsImpostor.createJoint(c.physicsImpostor, ta.HingeJoint, { mainPivot: e, connectedPivot: t, nativeParams: i }), this); }; class GU { /** * * @returns physics plugin version */ getPluginVersion() { return this._physicsPlugin.getPluginVersion(); } /** * Factory used to create the default physics plugin. * @returns The default physics plugin */ static DefaultPluginFactory() { throw yr(""); } /** * Creates a new Physics Engine * @param gravity defines the gravity vector used by the simulation * @param _physicsPlugin defines the plugin to use (CannonJS by default) */ constructor(e, t = GU.DefaultPluginFactory()) { this._physicsPlugin = t, this._physicsBodies = [], this._subTimeStep = 0, e = e || new D(0, -9.807, 0), this.setGravity(e), this.setTimeStep(); } /** * Sets the gravity vector used by the simulation * @param gravity defines the gravity vector to use */ setGravity(e) { this.gravity = e, this._physicsPlugin.setGravity(this.gravity); } /** * Set the time step of the physics engine. * Default is 1/60. * To slow it down, enter 1/600 for example. * To speed it up, 1/30 * @param newTimeStep defines the new timestep to apply to this world. */ setTimeStep(e = 1 / 60) { this._physicsPlugin.setTimeStep(e); } /** * Get the time step of the physics engine. * @returns the current time step */ getTimeStep() { return this._physicsPlugin.getTimeStep(); } /** * Set the sub time step of the physics engine. * Default is 0 meaning there is no sub steps * To increase physics resolution precision, set a small value (like 1 ms) * @param subTimeStep defines the new sub timestep used for physics resolution. */ setSubTimeStep(e = 0) { this._subTimeStep = e; } /** * Get the sub time step of the physics engine. * @returns the current sub time step */ getSubTimeStep() { return this._subTimeStep; } /** * Release all resources */ dispose() { this._physicsPlugin.dispose(); } /** * Gets the name of the current physics plugin * @returns the name of the plugin */ getPhysicsPluginName() { return this._physicsPlugin.name; } /** * Adding a new impostor for the impostor tracking. * This will be done by the impostor itself. * @param impostor the impostor to add */ /** * Called by the scene. No need to call it. * @param delta defines the timespan between frames */ _step(e) { e > 0.1 ? e = 0.1 : e <= 0 && (e = 1 / 60), this._physicsPlugin.executeStep(e, this._physicsBodies); } /** * Add a body as an active component of this engine * @param body */ addBody(e) { this._physicsBodies.push(e); } /** * Removes a particular body from this engine */ removeBody(e) { const t = this._physicsBodies.indexOf(e); t > -1 && this._physicsBodies.splice(t, 1); } /** * Returns an array of bodies added to this engine */ getBodies() { return this._physicsBodies; } /** * Gets the current plugin used to run the simulation * @returns current plugin */ getPhysicsPlugin() { return this._physicsPlugin; } /** * Does a raycast in the physics world * @param from when should the ray start? * @param to when should the ray end? * @param result resulting PhysicsRaycastResult */ raycastToRef(e, t, i, r) { this._physicsPlugin.raycast(e, t, i, r); } /** * Does a raycast in the physics world * @param from when should the ray start? * @param to when should the ray end? * @returns PhysicsRaycastResult */ raycast(e, t, i) { const r = new _N(); return this._physicsPlugin.raycast(e, t, r, i), r; } } class KU { /** * Constructs a new physics body for the given node. * @param transformNode - The Transform Node to construct the physics body for. For better performance, it is advised that this node does not have a parent. * @param motionType - The motion type of the physics body. The options are: * - PhysicsMotionType.STATIC - Static bodies are not moving and unaffected by forces or collisions. They are good for level boundaries or terrain. * - PhysicsMotionType.DYNAMIC - Dynamic bodies are fully simulated. They can move and collide with other objects. * - PhysicsMotionType.ANIMATED - They behave like dynamic bodies, but they won't be affected by other bodies, but still push other bodies out of the way. * @param startsAsleep - Whether the physics body should start in a sleeping state (not a guarantee). Defaults to false. * @param scene - The scene containing the physics engine. * * This code is useful for creating a physics body for a given Transform Node in a scene. * It checks the version of the physics engine and the physics plugin, and initializes the body accordingly. * It also sets the node's rotation quaternion if it is not already set. Finally, it adds the body to the physics engine. */ constructor(e, t, i, r) { if (this._pluginData = void 0, this._pluginDataInstances = [], this._collisionCBEnabled = !1, this._collisionEndedCBEnabled = !1, this.disablePreStep = !0, this._isDisposed = !1, !r) return; const s = r.getPhysicsEngine(); if (!s) throw new Error("No Physics Engine available."); if (this._physicsEngine = s, s.getPluginVersion() != 2) throw new Error("Plugin version is incorrect. Expected version 2."); const n = s.getPhysicsPlugin(); if (!n) throw new Error("No Physics Plugin available."); this._physicsPlugin = n, e.rotationQuaternion || (e.rotationQuaternion = Ze.FromEulerAngles(e.rotation.x, e.rotation.y, e.rotation.z)), this.startAsleep = i; const a = e; a.hasThinInstances ? this._physicsPlugin.initBodyInstances(this, t, a) : (e.parent && e.computeWorldMatrix(!0), this._physicsPlugin.initBody(this, t, e.absolutePosition, e.absoluteRotationQuaternion)), this.transformNode = e, e.physicsBody = this, s.addBody(this), this._nodeDisposeObserver = e.onDisposeObservable.add(() => { this.dispose(); }); } /** * Returns the string "PhysicsBody". * @returns "PhysicsBody" */ getClassName() { return "PhysicsBody"; } /** * Clone the PhysicsBody to a new body and assign it to the transformNode parameter * @param transformNode transformNode that will be used for the cloned PhysicsBody * @returns the newly cloned PhysicsBody */ clone(e) { const t = new KU(e, this.getMotionType(), this.startAsleep, this.transformNode.getScene()); return t.shape = this.shape, t.setMassProperties(this.getMassProperties()), t.setLinearDamping(this.getLinearDamping()), t.setAngularDamping(this.getAngularDamping()), t; } /** * If a physics body is connected to an instanced node, update the number physic instances to match the number of node instances. */ updateBodyInstances() { const e = this.transformNode; e.hasThinInstances && this._physicsPlugin.updateBodyInstances(this, e); } /** * This returns the number of internal instances of the physics body */ get numInstances() { return this._pluginDataInstances.length; } /** * Sets the shape of the physics body. * @param shape - The shape of the physics body. * * This method is useful for setting the shape of the physics body, which is necessary for the physics engine to accurately simulate the body's behavior. * The shape is used to calculate the body's mass, inertia, and other properties. */ set shape(e) { this._physicsPlugin.setShape(this, e); } /** * Retrieves the physics shape associated with this object. * * @returns The physics shape associated with this object, or `undefined` if no * shape is associated. * * This method is useful for retrieving the physics shape associated with this object, * which can be used to apply physical forces to the object or to detect collisions. */ get shape() { return this._physicsPlugin.getShape(this); } /** * Sets the event mask for the physics engine. * * @param eventMask - A bitmask that determines which events will be sent to the physics engine. * * This method is useful for setting the event mask for the physics engine, which determines which events * will be sent to the physics engine. This allows the user to control which events the physics engine will respond to. */ setEventMask(e, t) { this._physicsPlugin.setEventMask(this, e, t); } /** * Gets the event mask of the physics engine. * * @returns The event mask of the physics engine. * * This method is useful for getting the event mask of the physics engine, * which is used to determine which events the engine will respond to. * This is important for ensuring that the engine is responding to the correct events and not * wasting resources on unnecessary events. */ getEventMask(e) { return this._physicsPlugin.getEventMask(this, e); } /** * Sets the motion type of the physics body. Can be STATIC, DYNAMIC, or ANIMATED. */ setMotionType(e, t) { this._physicsPlugin.setMotionType(this, e, t); } /** * Gets the motion type of the physics body. Can be STATIC, DYNAMIC, or ANIMATED. */ getMotionType(e) { return this._physicsPlugin.getMotionType(this, e); } /** * Computes the mass properties of the physics object, based on the set of physics shapes this body uses. * This method is useful for computing the initial mass properties of a physics object, such as its mass, * inertia, and center of mass; these values are important for accurately simulating the physics of the * object in the physics engine, and computing values based on the shape will provide you with reasonable * intial values, which you can then customize. */ computeMassProperties(e) { return this._physicsPlugin.computeMassProperties(this, e); } /** * Sets the mass properties of the physics object. * * @param massProps - The mass properties to set. * @param instanceIndex - The index of the instance to set the mass properties for. If not defined, the mass properties will be set for all instances. * * This method is useful for setting the mass properties of a physics object, such as its mass, * inertia, and center of mass. This is important for accurately simulating the physics of the object in the physics engine. */ setMassProperties(e, t) { this._physicsPlugin.setMassProperties(this, e, t); } /** * Retrieves the mass properties of the object. * * @returns The mass properties of the object. * * This method is useful for physics simulations, as it allows the user to * retrieve the mass properties of the object, such as its mass, center of mass, * and moment of inertia. This information is necessary for accurate physics * simulations. */ getMassProperties(e) { return this._physicsPlugin.getMassProperties(this, e); } /** * Sets the linear damping of the physics body. * * @param damping - The linear damping value. * * This method is useful for controlling the linear damping of the physics body, * which is the rate at which the body's velocity decreases over time. This is useful for simulating * the effects of air resistance or other forms of friction. */ setLinearDamping(e, t) { this._physicsPlugin.setLinearDamping(this, e, t); } /** * Gets the linear damping of the physics body. * @returns The linear damping of the physics body. * * This method is useful for retrieving the linear damping of the physics body, which is the amount of * resistance the body has to linear motion. This is useful for simulating realistic physics behavior * in a game. */ getLinearDamping(e) { return this._physicsPlugin.getLinearDamping(this, e); } /** * Sets the angular damping of the physics body. * @param damping The angular damping of the body. * * This method is useful for controlling the angular velocity of a physics body. * By setting the damping, the body's angular velocity will be reduced over time, simulating the effect of friction. * This can be used to create realistic physical behavior in a physics engine. */ setAngularDamping(e, t) { this._physicsPlugin.setAngularDamping(this, e, t); } /** * Gets the angular damping of the physics body. * * @returns The angular damping of the physics body. * * This method is useful for getting the angular damping of the physics body, * which is the rate of reduction of the angular velocity over time. * This is important for simulating realistic physics behavior in a game. */ getAngularDamping(e) { return this._physicsPlugin.getAngularDamping(this, e); } /** * Sets the linear velocity of the physics object. * @param linVel - The linear velocity to set. * * This method is useful for setting the linear velocity of a physics object, * which is necessary for simulating realistic physics in a game engine. * By setting the linear velocity, the physics object will move in the direction and speed specified by the vector. * This allows for realistic physics simulations, such as simulating the motion of a ball rolling down a hill. */ setLinearVelocity(e, t) { this._physicsPlugin.setLinearVelocity(this, e, t); } /** * Gets the linear velocity of the physics body and stores it in the given vector3. * @param linVel - The vector3 to store the linear velocity in. * * This method is useful for getting the linear velocity of a physics body in a physics engine. * This can be used to determine the speed and direction of the body, which can be used to calculate the motion of the body. */ getLinearVelocityToRef(e, t) { return this._physicsPlugin.getLinearVelocityToRef(this, e, t); } /** * Gets the linear velocity of the physics body as a new vector3. * @returns The linear velocity of the physics body. * * This method is useful for getting the linear velocity of a physics body in a physics engine. * This can be used to determine the speed and direction of the body, which can be used to calculate the motion of the body. */ getLinearVelocity(e) { const t = new D(); return this.getLinearVelocityToRef(t, e), t; } /** * Sets the angular velocity of the physics object. * @param angVel - The angular velocity to set. * * This method is useful for setting the angular velocity of a physics object, which is necessary for * simulating realistic physics behavior. The angular velocity is used to determine the rate of rotation of the object, * which is important for simulating realistic motion. */ setAngularVelocity(e, t) { this._physicsPlugin.setAngularVelocity(this, e, t); } /** * Gets the angular velocity of the physics body and stores it in the given vector3. * @param angVel - The vector3 to store the angular velocity in. * * This method is useful for getting the angular velocity of a physics body, which can be used to determine the body's * rotational speed. This information can be used to create realistic physics simulations. */ getAngularVelocityToRef(e, t) { return this._physicsPlugin.getAngularVelocityToRef(this, e, t); } /** * Gets the angular velocity of the physics body as a new vector3. * @returns The angular velocity of the physics body. * * This method is useful for getting the angular velocity of a physics body, which can be used to determine the body's * rotational speed. This information can be used to create realistic physics simulations. */ getAngularVelocity(e) { const t = new D(); return this.getAngularVelocityToRef(t, e), t; } /** * Applies an impulse to the physics object. * * @param impulse The impulse vector. * @param location The location of the impulse. * @param instanceIndex For a instanced body, the instance to where the impulse should be applied. If not specified, the impulse is applied to all instances. * * This method is useful for applying an impulse to a physics object, which can be used to simulate physical forces such as gravity, * collisions, and explosions. This can be used to create realistic physics simulations in a game or other application. */ applyImpulse(e, t, i) { this._physicsPlugin.applyImpulse(this, e, t, i); } /** * Applies a force to the physics object. * * @param force The force vector. * @param location The location of the force. * @param instanceIndex For a instanced body, the instance to where the force should be applied. If not specified, the force is applied to all instances. * * This method is useful for applying a force to a physics object, which can be used to simulate physical forces such as gravity, * collisions, and explosions. This can be used to create realistic physics simulations in a game or other application. */ applyForce(e, t, i) { this._physicsPlugin.applyForce(this, e, t, i); } /** * Retrieves the geometry of the body from the physics plugin. * * @returns The geometry of the body. * * This method is useful for retrieving the geometry of the body from the physics plugin, which can be used for various physics calculations. */ getGeometry() { return this._physicsPlugin.getBodyGeometry(this); } /** * Returns an observable that will be notified for when a collision starts or continues for this PhysicsBody * @returns Observable */ getCollisionObservable() { return this._physicsPlugin.getCollisionObservable(this); } /** * Returns an observable that will be notified when the body has finished colliding with another body * @returns */ getCollisionEndedObservable() { return this._physicsPlugin.getCollisionEndedObservable(this); } /** * Enable or disable collision callback for this PhysicsBody. * @param enabled true if PhysicsBody's collision will rise a collision event and notifies the observable */ setCollisionCallbackEnabled(e) { this._collisionCBEnabled = e, this._physicsPlugin.setCollisionCallbackEnabled(this, e); } setCollisionEndedCallbackEnabled(e) { this._collisionEndedCBEnabled = e, this._physicsPlugin.setCollisionEndedCallbackEnabled(this, e); } /* * Get the center of the object in world space. * @param instanceIndex - If this body is instanced, the index of the instance to get the center for. * @returns geometric center of the associated mesh */ getObjectCenterWorld(e) { const t = new D(); return this.getObjectCenterWorldToRef(t, e); } /* * Get the center of the object in world space. * @param ref - The vector3 to store the result in. * @param instanceIndex - If this body is instanced, the index of the instance to get the center for. * @returns geometric center of the associated mesh */ getObjectCenterWorldToRef(e, t) { var i; if (((i = this._pluginDataInstances) === null || i === void 0 ? void 0 : i.length) > 0) { const r = t || 0, s = this.transformNode._thinInstanceDataStorage.matrixData; s && e.set(s[r * 16 + 12], s[r * 16 + 13], s[r * 16 + 14]); } else e.copyFrom(this.transformNode.position); return e; } /** * Adds a constraint to the physics engine. * * @param childBody - The body to which the constraint will be applied. * @param constraint - The constraint to be applied. * @param instanceIndex - If this body is instanced, the index of the instance to which the constraint will be applied. If not specified, no constraint will be applied. * @param childInstanceIndex - If the child body is instanced, the index of the instance to which the constraint will be applied. If not specified, no constraint will be applied. * */ addConstraint(e, t, i, r) { this._physicsPlugin.addConstraint(this, e, t, i, r); } /** * Sync with a bone * @param bone The bone that the impostor will be synced to. * @param boneMesh The mesh that the bone is influencing. * @param jointPivot The pivot of the joint / bone in local space. * @param distToJoint Optional distance from the impostor to the joint. * @param adjustRotation Optional quaternion for adjusting the local rotation of the bone. * @param boneAxis Optional vector3 axis the bone is aligned with */ syncWithBone(e, t, i, r, s, n) { const a = this.transformNode; if (a.rotationQuaternion) if (s) { const u = de.Quaternion[0]; e.getRotationQuaternionToRef(qr.WORLD, t, u), u.multiplyToRef(s, a.rotationQuaternion); } else e.getRotationQuaternionToRef(qr.WORLD, t, a.rotationQuaternion); const l = de.Vector3[0], o = de.Vector3[1]; n || (n = de.Vector3[2], n.x = 0, n.y = 1, n.z = 0), e.getDirectionToRef(n, t, o), e.getAbsolutePositionToRef(t, l), r == null && i && (r = i.length()), r != null && (l.x += o.x * r, l.y += o.y * r, l.z += o.z * r), a.setAbsolutePosition(l); } /** * Executes a callback on the body or all of the instances of a body * @param callback the callback to execute */ iterateOverAllInstances(e) { var t; if (((t = this._pluginDataInstances) === null || t === void 0 ? void 0 : t.length) > 0) for (let i = 0; i < this._pluginDataInstances.length; i++) e(this, i); else e(this, void 0); } /** * Sets the gravity factor of the physics body * @param factor the gravity factor to set * @param instanceIndex the instance of the body to set, if undefined all instances will be set */ setGravityFactor(e, t) { this._physicsPlugin.setGravityFactor(this, e, t); } /** * Gets the gravity factor of the physics body * @param instanceIndex the instance of the body to get, if undefined the value of first instance will be returned * @returns the gravity factor */ getGravityFactor(e) { return this._physicsPlugin.getGravityFactor(this, e); } /** * Set the target transformation (position and rotation) of the body, such that the body will set its velocity to reach that target * @param position The target position * @param rotation The target rotation * @param instanceIndex The index of the instance in an instanced body */ setTargetTransform(e, t, i) { this._physicsPlugin.setTargetTransform(this, e, t, i); } /** * Returns if the body has been disposed. * @returns true if disposed, false otherwise. */ get isDisposed() { return this._isDisposed; } /** * Disposes the body from the physics engine. * * This method is useful for cleaning up the physics engine when a body is no longer needed. Disposing the body will free up resources and prevent memory leaks. */ dispose() { this._isDisposed || (this._collisionCBEnabled && this.setCollisionCallbackEnabled(!1), this._collisionEndedCBEnabled && this.setCollisionEndedCallbackEnabled(!1), this._nodeDisposeObserver && (this.transformNode.onDisposeObservable.remove(this._nodeDisposeObserver), this._nodeDisposeObserver = null), this._physicsEngine.removeBody(this), this._physicsPlugin.removeBody(this), this._physicsPlugin.disposeBody(this), this.transformNode.physicsBody = null, this._pluginData = null, this._pluginDataInstances.length = 0, this._isDisposed = !0); } } var OC; (function(c) { c[c.FREE = 0] = "FREE", c[c.LIMITED = 1] = "LIMITED", c[c.LOCKED = 2] = "LOCKED"; })(OC || (OC = {})); var HA; (function(c) { c[c.LINEAR_X = 0] = "LINEAR_X", c[c.LINEAR_Y = 1] = "LINEAR_Y", c[c.LINEAR_Z = 2] = "LINEAR_Z", c[c.ANGULAR_X = 3] = "ANGULAR_X", c[c.ANGULAR_Y = 4] = "ANGULAR_Y", c[c.ANGULAR_Z = 5] = "ANGULAR_Z", c[c.LINEAR_DISTANCE = 6] = "LINEAR_DISTANCE"; })(HA || (HA = {})); var af; (function(c) { c[c.BALL_AND_SOCKET = 1] = "BALL_AND_SOCKET", c[c.DISTANCE = 2] = "DISTANCE", c[c.HINGE = 3] = "HINGE", c[c.SLIDER = 4] = "SLIDER", c[c.LOCK = 5] = "LOCK", c[c.PRISMATIC = 6] = "PRISMATIC", c[c.SIX_DOF = 7] = "SIX_DOF"; })(af || (af = {})); var Cc; (function(c) { c[c.SPHERE = 0] = "SPHERE", c[c.CAPSULE = 1] = "CAPSULE", c[c.CYLINDER = 2] = "CYLINDER", c[c.BOX = 3] = "BOX", c[c.CONVEX_HULL = 4] = "CONVEX_HULL", c[c.CONTAINER = 5] = "CONTAINER", c[c.MESH = 6] = "MESH", c[c.HEIGHTFIELD = 7] = "HEIGHTFIELD"; })(Cc || (Cc = {})); var Z8; (function(c) { c[c.NONE = 0] = "NONE", c[c.VELOCITY = 1] = "VELOCITY", c[c.POSITION = 2] = "POSITION"; })(Z8 || (Z8 = {})); var C4; (function(c) { c.COLLISION_STARTED = "COLLISION_STARTED", c.COLLISION_CONTINUED = "COLLISION_CONTINUED", c.COLLISION_FINISHED = "COLLISION_FINISHED", c.TRIGGER_ENTERED = "TRIGGER_ENTERED", c.TRIGGER_EXITED = "TRIGGER_EXITED"; })(C4 || (C4 = {})); var P4; (function(c) { c[c.STATIC = 0] = "STATIC", c[c.ANIMATED = 1] = "ANIMATED", c[c.DYNAMIC = 2] = "DYNAMIC"; })(P4 || (P4 = {})); class fx { /** * Constructs a new physics shape. * @param options The options for the physics shape. These are: * * type: The type of the shape. This can be one of the following: SPHERE, BOX, CAPSULE, CYLINDER, CONVEX_HULL, MESH, HEIGHTFIELD, CONTAINER * * parameters: The parameters of the shape. * * pluginData: The plugin data of the shape. This is used if you already have a reference to the object on the plugin side. * You need to specify either type or pluginData. * @param scene The scene the shape belongs to. * * This code is useful for creating a new physics shape with the given type, options, and scene. * It also checks that the physics engine and plugin version are correct. * If not, it throws an error. This ensures that the shape is created with the correct parameters and is compatible with the physics engine. */ constructor(e, t) { var i; if (this._pluginData = void 0, this._isTrigger = !1, this._isDisposed = !1, !t) return; const r = t.getPhysicsEngine(); if (!r) throw new Error("No Physics Engine available."); if (r.getPluginVersion() != 2) throw new Error("Plugin version is incorrect. Expected version 2."); const s = r.getPhysicsPlugin(); if (!s) throw new Error("No Physics Plugin available."); if (this._physicsPlugin = s, e.pluginData !== void 0 && e.pluginData !== null) this._pluginData = e.pluginData, this._type = this._physicsPlugin.getShapeType(this); else if (e.type !== void 0 && e.type !== null) { this._type = e.type; const n = (i = e.parameters) !== null && i !== void 0 ? i : {}; this._physicsPlugin.initShape(this, e.type, n); } } /** * Returns the string "PhysicsShape". * @returns "PhysicsShape" */ getClassName() { return "PhysicsShape"; } /** * Returns the type of the physics shape. * @returns The type of the physics shape. */ get type() { return this._type; } /** * Set the membership mask of a shape. This is a bitfield of arbitrary * "categories" to which the shape is a member. This is used in combination * with the collide mask to determine if this shape should collide with * another. * * @param membershipMask Bitfield of categories of this shape. */ set filterMembershipMask(e) { this._physicsPlugin.setShapeFilterMembershipMask(this, e); } /** * Get the membership mask of a shape. * @returns Bitmask of categories which this shape is a member of. */ get filterMembershipMask() { return this._physicsPlugin.getShapeFilterMembershipMask(this); } /** * Sets the collide mask of a shape. This is a bitfield of arbitrary * "categories" to which this shape collides with. Given two shapes, * the engine will check if the collide mask and membership overlap: * shapeA.filterMembershipMask & shapeB.filterCollideMask * * If this value is zero (i.e. shapeB only collides with categories * which shapeA is _not_ a member of) then the shapes will not collide. * * Note, the engine will also perform the same test with shapeA and * shapeB swapped; the shapes will not collide if either shape has * a collideMask which prevents collision with the other shape. * * @param collideMask Bitmask of categories this shape should collide with */ set filterCollideMask(e) { this._physicsPlugin.setShapeFilterCollideMask(this, e); } /** * * @returns Bitmask of categories that this shape should collide with */ get filterCollideMask() { return this._physicsPlugin.getShapeFilterCollideMask(this); } /** * * @param material */ set material(e) { this._physicsPlugin.setMaterial(this, e), this._material = e; } /** * Returns the material of the physics shape. * @returns The material of the physics shape. */ get material() { return this._material; } /** * Sets the density of the physics shape. * @param density The density of the physics shape. */ set density(e) { this._physicsPlugin.setDensity(this, e); } /** * Returns the density of the physics shape. * @returns The density of the physics shape. */ get density() { return this._physicsPlugin.getDensity(this); } /** * Utility to add a child shape to this container, * automatically computing the relative transform between * the container shape and the child instance. * * @param parentTransform The transform node associated with this shape * @param newChild The new PhysicsShape to add * @param childTransform The transform node associated with the child shape */ addChildFromParent(e, t, i) { const r = i.computeWorldMatrix(!0), s = e.computeWorldMatrix(!0), n = de.Matrix[0]; r.multiplyToRef(Ae.Invert(s), n); const a = de.Vector3[0], l = de.Quaternion[0], o = de.Vector3[1]; n.decompose(o, l, a), this._physicsPlugin.addChild(this, t, a, l, o); } /** * Adds a child shape to a container with an optional transform * @param newChild The new PhysicsShape to add * @param translation Optional position of the child shape relative to this shape * @param rotation Optional rotation of the child shape relative to this shape * @param scale Optional scale of the child shape relative to this shape */ addChild(e, t, i, r) { this._physicsPlugin.addChild(this, e, t, i, r); } /** * Removes a child shape from this shape. * @param childIndex The index of the child shape to remove */ removeChild(e) { this._physicsPlugin.removeChild(this, e); } /** * Returns the number of children of a physics shape. * @returns The number of children of a physics shape. */ getNumChildren() { return this._physicsPlugin.getNumChildren(this); } /** * Returns the bounding box of the physics shape. * @returns The bounding box of the physics shape. */ getBoundingBox() { return this._physicsPlugin.getBoundingBox(this); } set isTrigger(e) { this._isTrigger !== e && (this._isTrigger = e, this._physicsPlugin.setTrigger(this, e)); } get isTrigger() { return this._isTrigger; } /** * Dispose the shape and release its associated resources. */ dispose() { this._isDisposed || (this._physicsPlugin.disposeShape(this), this._isDisposed = !0); } } class ij extends fx { /** * Constructor for the Sphere Shape * @param center local center of the sphere * @param radius radius * @param scene scene to attach to */ constructor(e, t, i) { super({ type: Cc.SPHERE, parameters: { center: e, radius: t } }, i); } /** * Derive an approximate sphere from the mesh. * @param mesh node from which to derive the sphere shape * @returns PhysicsShapeSphere */ static FromMesh(e) { const t = e.getBoundingInfo(), i = t.boundingSphere.center, r = t.boundingBox.extendSize, s = Math.max(r.x, r.y, r.z); return new ij(i, s, e.getScene()); } } class rj extends fx { /** * * @param pointA Starting point that defines the capsule segment * @param pointB ending point of that same segment * @param radius radius * @param scene scene to attach to */ constructor(e, t, i, r) { super({ type: Cc.CAPSULE, parameters: { pointA: e, pointB: t, radius: i } }, r); } /** * Derive an approximate capsule from the mesh. Note, this is * not the optimal bounding capsule. * @param mesh Node from which to derive a cylinder shape */ static FromMesh(e) { const t = e.getBoundingInfo(), i = t.boundingBox.extendSize.x, r = new D(0, t.boundingBox.extendSize.y - i, 0), s = t.boundingBox.center.add(r), n = t.boundingBox.center.subtract(r); return new rj(s, n, i, e.getScene()); } } class sj extends fx { /** * * @param pointA Starting point that defines the cylinder segment * @param pointB ending point of that same segment * @param radius radius * @param scene scene to attach to */ constructor(e, t, i, r) { super({ type: Cc.CYLINDER, parameters: { pointA: e, pointB: t, radius: i } }, r); } /** * Derive an approximate cylinder from the mesh. Note, this is * not the optimal bounding cylinder. * @param mesh Node from which to derive a cylinder shape */ static FromMesh(e) { const t = e.getBoundingInfo(), i = t.boundingBox.extendSize.x, r = new D(0, t.boundingBox.extendSize.y, 0), s = t.boundingBox.center.add(r), n = t.boundingBox.center.subtract(r); return new sj(s, n, i, e.getScene()); } } class nj extends fx { /** * * @param center local center of the box * @param rotation local orientation * @param extents size of the box in each direction * @param scene scene to attach to */ constructor(e, t, i, r) { super({ type: Cc.BOX, parameters: { center: e, rotation: t, extents: i } }, r); } /** * * @param mesh * @returns PhysicsShapeBox */ static FromMesh(e) { const t = e.getBoundingInfo(), i = t.boundingBox.center, r = t.boundingBox.extendSize.scale(2); return new nj(i, Ze.Identity(), r, e.getScene()); } } class oge extends fx { /** * * @param mesh the mesh to be used as topology infos for the convex hull * @param scene scene to attach to */ constructor(e, t) { super({ type: Cc.CONVEX_HULL, parameters: { mesh: e } }, t); } } class lge extends fx { /** * * @param mesh the mesh topology that will be used to create the shape * @param scene scene to attach to */ constructor(e, t) { super({ type: Cc.MESH, parameters: { mesh: e } }, t); } } class cge extends fx { /** * Constructor of the Shape container * @param scene scene to attach to */ constructor(e) { super({ type: Cc.CONTAINER, parameters: {} }, e); } } class M5 { /** * Constructs a new constraint for the physics constraint. * @param type The type of constraint to create. * @param options The options for the constraint. * @param scene The scene the constraint belongs to. * * This code is useful for creating a new constraint for the physics engine. It checks if the scene has a physics engine, and if the plugin version is correct. * If all checks pass, it initializes the constraint with the given type and options. */ constructor(e, t, i) { if (this._pluginData = void 0, !i) throw new Error("Missing scene parameter for constraint constructor."); const r = i.getPhysicsEngine(); if (!r) throw new Error("No Physics Engine available."); if (r.getPluginVersion() != 2) throw new Error("Plugin version is incorrect. Expected version 2."); const s = r.getPhysicsPlugin(); if (!s) throw new Error("No Physics Plugin available."); this._physicsPlugin = s, this._options = t, this._type = e; } /** * Gets the type of the constraint. * * @returns The type of the constraint. * */ get type() { return this._type; } /** * Retrieves the options of the physics constraint. * * @returns The physics constraint parameters. * */ get options() { return this._options; } /** * Enable/disable the constraint * @param isEnabled value for the constraint */ set isEnabled(e) { this._physicsPlugin.setEnabled(this, e); } /** * * @returns true if constraint is enabled */ get isEnabled() { return this._physicsPlugin.getEnabled(this); } /** * Enables or disables collisions for the physics engine. * * @param isEnabled - A boolean value indicating whether collisions should be enabled or disabled. * */ set isCollisionsEnabled(e) { this._physicsPlugin.setCollisionsEnabled(this, e); } /** * Gets whether collisions are enabled for this physics object. * * @returns `true` if collisions are enabled, `false` otherwise. * */ get isCollisionsEnabled() { return this._physicsPlugin.getCollisionsEnabled(this); } /** * Gets all bodies that are using this constraint * @returns */ getBodiesUsingConstraint() { return this._physicsPlugin.getBodiesUsingConstraint(this); } /** * Disposes the constraint from the physics engine. * * This method is useful for cleaning up the physics engine when a body is no longer needed. Disposing the body will free up resources and prevent memory leaks. */ dispose() { this._physicsPlugin.disposeConstraint(this); } } class uge { } class xne extends M5 { constructor(e, t, i) { super(af.SIX_DOF, e, i), this.limits = t; } /** * Sets the friction of the given axis of the physics engine. * @param axis - The axis of the physics engine to set the friction for. * @param friction - The friction to set for the given axis. * */ setAxisFriction(e, t) { this._physicsPlugin.setAxisFriction(this, e, t); } /** * Gets the friction of the given axis of the physics engine. * @param axis - The axis of the physics engine. * @returns The friction of the given axis, or null if the constraint hasn't been initialized yet. * */ getAxisFriction(e) { return this._physicsPlugin.getAxisFriction(this, e); } /** * Sets the limit mode for the given axis of the constraint. * @param axis The axis to set the limit mode for. * @param limitMode The limit mode to set. * * This method is useful for setting the limit mode for a given axis of the constraint. This is important for * controlling the behavior of the physics engine when the constraint is reached. By setting the limit mode, * the engine can be configured to either stop the motion of the objects, or to allow them to continue * moving beyond the constraint. */ setAxisMode(e, t) { this._physicsPlugin.setAxisMode(this, e, t); } /** * Gets the limit mode of the given axis of the constraint. * * @param axis - The axis of the constraint. * @returns The limit mode of the given axis, or null if the constraint hasn't been initialized yet. * */ getAxisMode(e) { return this._physicsPlugin.getAxisMode(this, e); } /** * Sets the minimum limit of a given axis of a constraint. * @param axis - The axis of the constraint. * @param minLimit - The minimum limit of the axis. * */ setAxisMinLimit(e, t) { this._physicsPlugin.setAxisMinLimit(this, e, t); } /** * Gets the minimum limit of the given axis of the physics engine. * @param axis - The axis of the physics engine. * @returns The minimum limit of the given axis, or null if the constraint hasn't been initialized yet. * */ getAxisMinLimit(e) { return this._physicsPlugin.getAxisMinLimit(this, e); } /** * Sets the maximum limit of the given axis for the physics engine. * @param axis - The axis to set the limit for. * @param limit - The maximum limit of the axis. * * This method is useful for setting the maximum limit of the given axis for the physics engine, * which can be used to control the movement of the physics object. This helps to ensure that the * physics object does not move beyond the given limit. */ setAxisMaxLimit(e, t) { this._physicsPlugin.setAxisMaxLimit(this, e, t); } /** * Gets the maximum limit of the given axis of the physics engine. * @param axis - The axis of the physics engine. * @returns The maximum limit of the given axis, or null if the constraint hasn't been initialized yet. * */ getAxisMaxLimit(e) { return this._physicsPlugin.getAxisMaxLimit(this, e); } /** * Sets the motor type of the given axis of the constraint. * @param axis - The axis of the constraint. * @param motorType - The type of motor to use. * @returns void * */ setAxisMotorType(e, t) { this._physicsPlugin.setAxisMotorType(this, e, t); } /** * Gets the motor type of the specified axis of the constraint. * * @param axis - The axis of the constraint. * @returns The motor type of the specified axis, or null if the constraint hasn't been initialized yet. * */ getAxisMotorType(e) { return this._physicsPlugin.getAxisMotorType(this, e); } /** * Sets the target velocity of the motor associated with the given axis of the constraint. * @param axis - The axis of the constraint. * @param target - The target velocity of the motor. * * This method is useful for setting the target velocity of the motor associated with the given axis of the constraint. */ setAxisMotorTarget(e, t) { this._physicsPlugin.setAxisMotorTarget(this, e, t); } /** * Gets the target velocity of the motor associated to the given constraint axis. * @param axis - The constraint axis associated to the motor. * @returns The target velocity of the motor, or null if the constraint hasn't been initialized yet. * */ getAxisMotorTarget(e) { return this._physicsPlugin.getAxisMotorTarget(this, e); } /** * Sets the maximum force of the motor of the given axis of the constraint. * @param axis - The axis of the constraint. * @param maxForce - The maximum force of the motor. * */ setAxisMotorMaxForce(e, t) { this._physicsPlugin.setAxisMotorMaxForce(this, e, t); } /** * Gets the maximum force of the motor of the given axis of the constraint. * @param axis - The axis of the constraint. * @returns The maximum force of the motor, or null if the constraint hasn't been initialized yet. * */ getAxisMotorMaxForce(e) { return this._physicsPlugin.getAxisMotorMaxForce(this, e); } } class bne extends M5 { constructor(e, t, i, r, s) { super(af.BALL_AND_SOCKET, { pivotA: e, pivotB: t, axisA: i, axisB: r }, s); } } class hge extends M5 { constructor(e, t) { super(af.DISTANCE, { maxDistance: e }, t); } } class dge extends M5 { constructor(e, t, i, r, s) { super(af.HINGE, { pivotA: e, pivotB: t, axisA: i, axisB: r }, s); } } class fge extends M5 { constructor(e, t, i, r, s) { super(af.SLIDER, { pivotA: e, pivotB: t, axisA: i, axisB: r }, s); } } class pge extends M5 { constructor(e, t, i, r, s) { super(af.LOCK, { pivotA: e, pivotB: t, axisA: i, axisB: r }, s); } } class _ge extends M5 { constructor(e, t, i, r, s) { super(af.PRISMATIC, { pivotA: e, pivotB: t, axisA: i, axisB: r }, s); } } class mge extends xne { constructor(e, t, i, r, s, n, a, l, o) { super({ pivotA: e, pivotB: t, axisA: i, axisB: r }, [{ axis: HA.LINEAR_DISTANCE, minLimit: s, maxLimit: n, stiffness: a, damping: l }], o); } } var wC; (function(c) { c[c.GEOMETRIC_MEAN = 0] = "GEOMETRIC_MEAN", c[c.MINIMUM = 1] = "MINIMUM", c[c.MAXIMUM = 2] = "MAXIMUM", c[c.ARITHMETIC_MEAN = 3] = "ARITHMETIC_MEAN", c[c.MULTIPLY = 4] = "MULTIPLY"; })(wC || (wC = {})); class Ene { constructor(e, t, i = { mass: 0 }, r) { var s; if (this.transformNode = e, this.type = t, this._options = i, this._scene = r, this._disposeShapeWhenDisposed = !0, !this.transformNode) { Ce.Error("No object was provided. A physics object is obligatory"); return; } const n = e; if (this.transformNode.parent && this._options.mass !== 0 && n.hasThinInstances && Ce.Warn("A physics body has been created for an object which has a parent and thin instances. Babylon physics currently works in local space so unexpected issues may occur."), !this._scene && e.getScene && (this._scene = e.getScene()), !this._scene) return; this._options.mass = i.mass === void 0 ? 0 : i.mass, this._options.friction = i.friction === void 0 ? 0.2 : i.friction, this._options.restitution = i.restitution === void 0 ? 0.2 : i.restitution; const a = this._options.mass === 0 ? P4.STATIC : P4.DYNAMIC, l = (s = this._options.startAsleep) !== null && s !== void 0 ? s : !1; this.body = new KU(e, a, l, this._scene), this._addSizeOptions(), t.getClassName && t.getClassName() === "PhysicsShape" ? (this.shape = t, this._disposeShapeWhenDisposed = !1) : this.shape = new fx({ type: t, parameters: this._options }, this._scene), this._options.isTriggerShape && (this.shape.isTrigger = !0), this.material = { friction: this._options.friction, restitution: this._options.restitution }, this.body.shape = this.shape, this.shape.material = this.material, this.body.setMassProperties({ mass: this._options.mass }), this._nodeDisposeObserver = this.transformNode.onDisposeObservable.add(() => { this.dispose(); }); } _getObjectBoundingBox() { return this.transformNode.getRawBoundingInfo ? this.transformNode.getRawBoundingInfo().boundingBox : new fg(new D(-0.5, -0.5, -0.5), new D(0.5, 0.5, 0.5)); } _hasVertices(e) { return (e == null ? void 0 : e.getTotalVertices()) > 0; } _addSizeOptions() { var e, t, i, r, s, n, a, l; this.transformNode.computeWorldMatrix(!0); const o = this._getObjectBoundingBox(), u = de.Vector3[0]; u.copyFrom(o.extendSize), u.scaleInPlace(2), u.multiplyInPlace(this.transformNode.scaling), u.x = Math.abs(u.x), u.y = Math.abs(u.y), u.z = Math.abs(u.z); const h = de.Vector3[1]; if (h.copyFrom(o.minimum), h.multiplyInPlace(this.transformNode.scaling), !this._options.center) { const d = new D(); d.copyFrom(o.center), d.multiplyInPlace(this.transformNode.scaling), this._options.center = d; } switch (this.type) { case Cc.SPHERE: !this._options.radius && yt.WithinEpsilon(u.x, u.y, 1e-4) && yt.WithinEpsilon(u.x, u.z, 1e-4) ? this._options.radius = u.x / 2 : this._options.radius || (Ce.Warn("Non uniform scaling is unsupported for sphere shapes. Setting the radius to the biggest bounding box extent."), this._options.radius = Math.max(u.x, u.y, u.z) / 2); break; case Cc.CAPSULE: { const d = u.x / 2; this._options.radius = (e = this._options.radius) !== null && e !== void 0 ? e : d, this._options.pointA = (t = this._options.pointA) !== null && t !== void 0 ? t : new D(0, h.y + d, 0), this._options.pointB = (i = this._options.pointB) !== null && i !== void 0 ? i : new D(0, h.y + u.y - d, 0); } break; case Cc.CYLINDER: { const d = u.x / 2; this._options.radius = (r = this._options.radius) !== null && r !== void 0 ? r : d, this._options.pointA = (s = this._options.pointA) !== null && s !== void 0 ? s : new D(0, h.y, 0), this._options.pointB = (n = this._options.pointB) !== null && n !== void 0 ? n : new D(0, h.y + u.y, 0); } break; case Cc.MESH: case Cc.CONVEX_HULL: if (!this._options.mesh && this._hasVertices(this.transformNode)) this._options.mesh = this.transformNode; else if (!this._options.mesh || !this._hasVertices(this._options.mesh)) throw new Error("No valid mesh was provided for mesh or convex hull shape parameter. Please provide a mesh with valid geometry (number of vertices greater than 0)."); break; case Cc.BOX: this._options.extents = (a = this._options.extents) !== null && a !== void 0 ? a : new D(u.x, u.y, u.z), this._options.rotation = (l = this._options.rotation) !== null && l !== void 0 ? l : Ze.Identity(); break; } } /** * Releases the body, shape and material */ dispose() { this._nodeDisposeObserver && (this.body.transformNode.onDisposeObservable.remove(this._nodeDisposeObserver), this._nodeDisposeObserver = null), this.body.dispose(), this._disposeShapeWhenDisposed && this.shape.dispose(); } } class gge { } class vge { /** * Construct a new Ragdoll object. Once ready, it can be made dynamic by calling `Ragdoll` method * @param skeleton The skeleton containing bones to be physicalized * @param mesh The mesh used by the skeleton * @param config an array of `RagdollBoneProperties` corresponding to bones and their properties used to instanciate physics bodies */ constructor(e, t, i) { this._boxConfigs = new Array(), this._bones = new Array(), this._initialRotation = new Array(), this._boneNames = [], this._transforms = new Array(), this._aggregates = new Array(), this._ragdollMode = !1, this._rootBoneName = "", this._rootBoneIndex = -1, this._mass = 10, this._restitution = 0, this.pauseSync = !1, this._defaultJoint = af.HINGE, this._defaultJointMin = -90, this._defaultJointMax = 90, this._skeleton = e, this._scene = e.getScene(), this._mesh = t, this._config = i, this._boxConfigs = [], this._putBoxesInBoneCenter = !1, this._defaultJoint = af.HINGE, this._boneOffsetAxis = bl.Y; } _createColliders() { var e, t, i; this._mesh.computeWorldMatrix(); const r = this._config; for (let s = 0; s < r.length; s++) { const n = r[s].bone !== void 0 ? [r[s].bone] : r[s].bones; for (let a = 0; a < n.length; a++) { const l = this._skeleton.bones[this._skeleton.getBoneIndexByName(n[a])]; if (l == null) return; const o = { width: this._config[s].width, depth: this._config[s].depth, height: this._config[s].height, size: this._config[s].size }; o.width = (e = o.width) !== null && e !== void 0 ? e : o.size, o.depth = (t = o.depth) !== null && t !== void 0 ? t : o.size, o.height = (i = o.height) !== null && i !== void 0 ? i : o.size; const u = new xi(n[a] + "_transform", this._scene); o.joint = r[s].joint !== void 0 ? r[s].joint : this._defaultJoint, o.rotationAxis = r[s].rotationAxis !== void 0 ? r[s].rotationAxis : bl.X, o.min = r[s].min !== void 0 ? r[s].min : this._defaultJointMin, o.max = r[s].max !== void 0 ? r[s].max : this._defaultJointMax; let h = 0; r[s].putBoxInBoneCenter !== void 0 && r[s].putBoxInBoneCenter || this._putBoxesInBoneCenter ? (l.length === void 0 && Ce.Log("The length property is not defined for bone " + l.name), h = l.length / 2) : r[s].boxOffset !== void 0 && (h = r[s].boxOffset), o.boxOffset = h; const d = r[s].boneOffsetAxis !== void 0 ? r[s].boneOffsetAxis : this._boneOffsetAxis, f = l.getDirection(d, this._mesh); o.boneOffsetAxis = d, u.position = l.getAbsolutePosition(this._mesh).add(f.scale(h)); const p = r[s].mass !== void 0 ? r[s].mass : this._mass, m = r[s].restitution !== void 0 ? r[s].restitution : this._restitution, _ = new Ene(u, Cc.BOX, { mass: p, restitution: m, friction: 0.6, extents: new D(o.width, o.height, o.depth) }, this._scene); _.body.setCollisionCallbackEnabled(!0), _.body.disablePreStep = !1, this._aggregates.push(_), this._bones.push(l), this._boneNames.push(l.name), this._transforms.push(u), this._boxConfigs.push(o), this._initialRotation.push(l.getRotationQuaternion(qr.WORLD)); } } } _initJoints() { this._mesh.computeWorldMatrix(); for (let e = 0; e < this._bones.length; e++) { if (e == this._rootBoneIndex) continue; const t = this._findNearestParent(e); if (t == null) { Ce.Warn("Couldn't find a nearest parent bone in the configs for bone called " + this._boneNames[e]); return; } const i = this._boneNames.indexOf(t.name); let r = this._bones[e].getAbsolutePosition(this._mesh).subtract(this._transforms[i].position); const s = this._transforms[i].computeWorldMatrix(), n = Ae.Invert(s); r = D.TransformCoordinates(this._bones[e].getAbsolutePosition(this._mesh), n); const a = this._bones[e].getAbsolutePosition(this._mesh), l = this._transforms[e].position.clone(), o = a.subtract(l), u = new bne(r, o, this._boxConfigs[e].rotationAxis, this._boxConfigs[e].rotationAxis, this._scene); this._aggregates[i].body.addConstraint(this._aggregates[e].body, u); } } _syncBonesAndBoxes() { if (!this.pauseSync && this._ragdollMode) { this._bones[this._rootBoneIndex].getDirectionToRef(this._boxConfigs[this._rootBoneIndex].boneOffsetAxis, this._mesh, de.Vector3[0]), de.Vector3[0].scaleInPlace(this._boxConfigs[this._rootBoneIndex].boxOffset), this._bones[this._rootBoneIndex].getAbsolutePositionToRef(this._mesh, de.Vector3[1]), de.Vector3[1].addInPlace(de.Vector3[0]), this._bones[this._rootBoneIndex].setAbsolutePosition(this._transforms[this._rootBoneIndex].position, this._mesh), this._addImpostorRotationToBone(this._rootBoneIndex); const e = this._aggregates[this._rootBoneIndex].body.transformNode.position; de.Vector3[1].subtractToRef(e, de.Vector3[0]), this._mesh.position.subtractToRef(de.Vector3[0], this._mesh.position); for (let t = 0; t < this._bones.length; t++) t != this._rootBoneIndex && this._addImpostorRotationToBone(t); } } _addImpostorRotationToBone(e) { var t, i, r; const s = (t = this._mesh.rotationQuaternion) !== null && t !== void 0 ? t : Ze.FromEulerAngles(this._mesh.rotation.x, this._mesh.rotation.y, this._mesh.rotation.z), n = this._initialRotation[e], a = (r = (i = this._aggregates[e].body) === null || i === void 0 ? void 0 : i.transformNode) === null || r === void 0 ? void 0 : r.rotationQuaternion; s.multiplyToRef(n, de.Quaternion[1]), a.multiplyToRef(de.Quaternion[1], de.Quaternion[0]), this._bones[e].setRotationQuaternion(de.Quaternion[0], qr.WORLD, this._mesh); } // Return true if root bone is valid/exists in this.bonesNames. false otherwise. _defineRootBone() { const e = this._skeleton.getChildren(); return e.length != 1 ? (Ce.Log("Ragdoll creation failed: there can only be one root in the skeleton."), !1) : (this._rootBoneName = e[0].name, this._rootBoneIndex = this._boneNames.indexOf(this._rootBoneName), this._rootBoneIndex == -1 ? (Ce.Log("Ragdoll creation failed: the array boneNames doesn't have the root bone. The root bone is " + this._skeleton.getChildren()), !1) : !0); } _findNearestParent(e) { let t = this._bones[e].getParent(); do { if (t != null && this._boneNames.includes(t.name)) break; t = t == null ? void 0 : t.getParent(); } while (t != null); return t; } _init() { this._createColliders(), this._defineRootBone() && (this._initJoints(), this._scene.registerBeforeRender(() => { this._syncBonesAndBoxes(); })); } /** * Enable ragdoll mode. Create physics objects and make them dynamic. */ ragdoll() { this._ragdollMode || (this._ragdollMode = !0, this._init()); } /** * Dispose resources and remove physics objects */ dispose() { this._aggregates.forEach((e) => { e.dispose(); }); } } class Age { /** * Constructor of the mesh accumulator * @param mesh - The mesh used to compute the world matrix. * @param collectIndices - use mesh indices * @param scene - The scene used to determine the right handed system. * * Merge mesh and its children so whole hierarchy can be used as a mesh shape or convex hull */ constructor(e, t, i) { this._vertices = [], this._indices = [], this._isRightHanded = i.useRightHandedSystem, this._collectIndices = t; } /** * Adds a mesh to the physics engine. * @param mesh The mesh to add. * @param includeChildren Whether to include the children of the mesh. * * This method adds a mesh to the physics engine by computing the world matrix, * multiplying it with the body from world matrix, and then transforming the * coordinates of the mesh's vertices. It also adds the indices of the mesh * to the physics engine. If includeChildren is true, it will also add the * children of the mesh to the physics engine, ignoring any children which * have a physics impostor. This is useful for creating a physics engine * that accurately reflects the mesh and its children. */ addNodeMeshes(e, t) { e.computeWorldMatrix(!0); const i = de.Matrix[0]; if (Ae.ScalingToRef(e.absoluteScaling.x, e.absoluteScaling.y, e.absoluteScaling.z, i), e instanceof ke ? this._addMesh(e, i) : e instanceof Cg && this._addMesh(e.sourceMesh, i), t) { const r = de.Matrix[1]; e.computeWorldMatrix().invertToRef(r); const s = de.Matrix[2]; r.multiplyToRef(i, s), e.getChildMeshes(!1).filter((a) => !a.physicsBody).forEach((a) => { const l = a.computeWorldMatrix(), o = de.Matrix[3]; l.multiplyToRef(s, o), a instanceof ke ? this._addMesh(a, o) : a instanceof Cg && this._addMesh(a.sourceMesh, o); }); } } _addMesh(e, t) { const i = e.getVerticesData(Y.PositionKind) || [], r = i.length / 3, s = this._vertices.length; for (let n = 0; n < r; n++) { const a = new D(i[n * 3 + 0], i[n * 3 + 1], i[n * 3 + 2]); this._vertices.push(D.TransformCoordinates(a, t)); } if (this._collectIndices) { const n = e.getIndices(); if (n) for (let a = 0; a < n.length; a += 3) this._isRightHanded ? (this._indices.push(n[a + 0] + s), this._indices.push(n[a + 1] + s), this._indices.push(n[a + 2] + s)) : (this._indices.push(n[a + 2] + s), this._indices.push(n[a + 1] + s), this._indices.push(n[a + 0] + s)); } } /** * Allocate and populate the vertex positions inside the physics plugin. * * @returns An array of floats, whose backing memory is inside the plugin. The array contains the * positions of the mesh vertices, where a position is defined by three floats. You must call * freeBuffer() on the returned array once you have finished with it, in order to free the * memory inside the plugin.. */ getVertices(e) { const t = this._vertices.length * 3, r = t * 4, s = e._malloc(r), n = new Float32Array(e.HEAPU8.buffer, s, t); for (let a = 0; a < this._vertices.length; a++) n[a * 3 + 0] = this._vertices[a].x, n[a * 3 + 1] = this._vertices[a].y, n[a * 3 + 2] = this._vertices[a].z; return { offset: s, numObjects: t }; } freeBuffer(e, t) { e._free(t.offset); } /** * Allocate and populate the triangle indices inside the physics plugin * * @returns A new Int32Array, whose backing memory is inside the plugin. The array contains the indices * of the triangle positions, where a single triangle is defined by three indices. You must call * freeBuffer() on this array once you have finished with it, to free the memory inside the plugin.. */ getTriangles(e) { const i = this._indices.length * 4, r = e._malloc(i), s = new Int32Array(e.HEAPU8.buffer, r, this._indices.length); for (let n = 0; n < this._indices.length; n++) s[n] = this._indices[n]; return { offset: r, numObjects: this._indices.length }; } } class hq { constructor(e) { this.hpBodyId = e, this.userMassProps = { centerOfMass: void 0, mass: void 0, inertia: void 0, inertiaOrientation: void 0 }; } } class dq { constructor() { this.bodyId = BigInt(0), this.position = new D(), this.normal = new D(); } } class fq { constructor() { this.contactOnA = new dq(), this.contactOnB = new dq(), this.impulseApplied = 0, this.type = 0; } static readToRef(e, t, i) { const r = new Int32Array(e, t), s = new Float32Array(e, t), n = 2; i.contactOnA.bodyId = BigInt(r[n]), i.contactOnA.position.set(s[n + 8], s[n + 9], s[n + 10]), i.contactOnA.normal.set(s[n + 11], s[n + 12], s[n + 13]); const a = 18; i.contactOnB.bodyId = BigInt(r[a]), i.contactOnB.position.set(s[a + 8], s[a + 9], s[a + 10]), i.contactOnB.normal.set(s[a + 11], s[a + 12], s[a + 13]), i.impulseApplied = s[a + 13 + 3], i.type = r[0]; } } class pq { constructor() { this.bodyIdA = BigInt(0), this.bodyIdB = BigInt(0), this.type = 0; } static readToRef(e, t, i) { const r = new Int32Array(e, t); i.type = r[0], i.bodyIdA = BigInt(r[2]), i.bodyIdB = BigInt(r[6]); } } class yge { constructor(e = !0, t = HK) { if (this._useDeltaForWorldStep = e, this._hknp = {}, this.name = "HavokPlugin", this._fixedTimeStep = 1 / 60, this._timeStep = 1 / 60, this._tmpVec3 = kc.BuildArray(3, D.Zero), this._bodies = /* @__PURE__ */ new Map(), this._bodyCollisionObservable = /* @__PURE__ */ new Map(), this._constraintToBodyIdPair = /* @__PURE__ */ new Map(), this._bodyCollisionEndedObservable = /* @__PURE__ */ new Map(), this.onCollisionObservable = new Fe(), this.onCollisionEndedObservable = new Fe(), this.onTriggerCollisionObservable = new Fe(), typeof t == "function") { Ce.Error("Havok is not ready. Please make sure you await HK() before using the plugin."); return; } else this._hknp = t; if (!this.isSupported()) { Ce.Error("Havok is not available. Please make sure you included the js file."); return; } this.world = this._hknp.HP_World_Create()[1], this._queryCollector = this._hknp.HP_QueryCollector_Create(1)[1]; } /** * If this plugin is supported * @returns true if its supported */ isSupported() { return this._hknp !== void 0; } /** * Sets the gravity of the physics world. * * @param gravity - The gravity vector to set. * */ setGravity(e) { this._hknp.HP_World_SetGravity(this.world, this._bVecToV3(e)); } /** * Sets the fixed time step for the physics engine. * * @param timeStep - The fixed time step to use for the physics engine. * */ setTimeStep(e) { this._fixedTimeStep = e; } /** * Gets the fixed time step used by the physics engine. * * @returns The fixed time step used by the physics engine. * */ getTimeStep() { return this._fixedTimeStep; } /** * Executes a single step of the physics engine. * * @param delta The time delta in seconds since the last step. * @param physicsBodies An array of physics bodies to be simulated. * @returns void * * This method is useful for simulating the physics engine. It sets the physics body transformation, * steps the world, syncs the physics body, and notifies collisions. This allows for the physics engine * to accurately simulate the physics bodies in the world. */ executeStep(e, t) { for (const i of t) i.disablePreStep || this.setPhysicsBodyTransformation(i, i.transformNode); this._hknp.HP_World_Step(this.world, this._useDeltaForWorldStep ? e : this._timeStep), this._bodyBuffer = this._hknp.HP_World_GetBodyBuffer(this.world)[1]; for (const i of t) this.sync(i); this._notifyCollisions(), this._notifyTriggers(); } /** * Returns the version of the physics engine plugin. * * @returns The version of the physics engine plugin. * * This method is useful for determining the version of the physics engine plugin that is currently running. */ getPluginVersion() { return 2; } /** * Initializes a physics body with the given position and orientation. * * @param body - The physics body to initialize. * @param motionType - The motion type of the body. * @param position - The position of the body. * @param orientation - The orientation of the body. * This code is useful for initializing a physics body with the given position and orientation. * It creates a plugin data for the body and adds it to the world. It then converts the position * and orientation to a transform and sets the body's transform to the given values. */ initBody(e, t, i, r) { e._pluginData = new hq(this._hknp.HP_Body_Create()[1]), this._internalSetMotionType(e._pluginData, t); const s = [this._bVecToV3(i), this._bQuatToV4(r)]; this._hknp.HP_Body_SetQTransform(e._pluginData.hpBodyId, s), this._hknp.HP_World_AddBody(this.world, e._pluginData.hpBodyId, e.startAsleep), this._bodies.set(e._pluginData.hpBodyId[0], { body: e, index: 0 }); } /** * Removes a body from the world. To dispose of a body, it is necessary to remove it from the world first. * * @param body - The body to remove. */ removeBody(e) { if (e._pluginDataInstances && e._pluginDataInstances.length > 0) for (const t of e._pluginDataInstances) this._bodyCollisionObservable.delete(t.hpBodyId[0]), this._hknp.HP_World_RemoveBody(this.world, t.hpBodyId), this._bodies.delete(t.hpBodyId[0]); e._pluginData && (this._bodyCollisionObservable.delete(e._pluginData.hpBodyId[0]), this._hknp.HP_World_RemoveBody(this.world, e._pluginData.hpBodyId), this._bodies.delete(e._pluginData.hpBodyId[0])); } /** * Initializes the body instances for a given physics body and mesh. * * @param body - The physics body to initialize. * @param motionType - How the body will be handled by the engine * @param mesh - The mesh to initialize. * * This code is useful for creating a physics body from a mesh. It creates a * body instance for each instance of the mesh and adds it to the world. It also * sets the position of the body instance to the position of the mesh instance. * This allows for the physics engine to accurately simulate the mesh in the * world. */ initBodyInstances(e, t, i) { var r, s; const n = (s = (r = i._thinInstanceDataStorage) === null || r === void 0 ? void 0 : r.instancesCount) !== null && s !== void 0 ? s : 0, a = i._thinInstanceDataStorage.matrixData; a && (this._createOrUpdateBodyInstances(e, t, a, 0, n, !1), e._pluginDataInstances.forEach((l, o) => { this._bodies.set(l.hpBodyId[0], { body: e, index: o }); })); } _createOrUpdateBodyInstances(e, t, i, r, s, n) { const a = de.Quaternion[0], l = Ae.Identity(); for (let o = r; o < s; o++) { const u = [i[o * 16 + 12], i[o * 16 + 13], i[o * 16 + 14]]; let h; n ? h = e._pluginDataInstances[o].hpBodyId : h = this._hknp.HP_Body_Create()[1], l.setRowFromFloats(0, i[o * 16 + 0], i[o * 16 + 1], i[o * 16 + 2], 0), l.setRowFromFloats(1, i[o * 16 + 4], i[o * 16 + 5], i[o * 16 + 6], 0), l.setRowFromFloats(2, i[o * 16 + 8], i[o * 16 + 9], i[o * 16 + 10], 0), Ze.FromRotationMatrixToRef(l, a); const d = [u, [a.x, a.y, a.z, a.w]]; if (this._hknp.HP_Body_SetQTransform(h, d), !n) { const f = new hq(h); e._pluginDataInstances.length && (f.userMassProps = e._pluginDataInstances[0].userMassProps), this._internalSetMotionType(f, t), this._internalUpdateMassProperties(f), e._pluginDataInstances.push(f), this._hknp.HP_World_AddBody(this.world, h, e.startAsleep), f.worldTransformOffset = this._hknp.HP_Body_GetWorldTransformOffset(h)[1]; } } } /** * Update the internal body instances for a given physics body to match the instances in a mesh. * @param body the body that will be updated * @param mesh the mesh with reference instances */ updateBodyInstances(e, t) { var i, r; const s = (r = (i = t._thinInstanceDataStorage) === null || i === void 0 ? void 0 : i.instancesCount) !== null && r !== void 0 ? r : 0, n = t._thinInstanceDataStorage.matrixData; if (!n) return; const a = e._pluginDataInstances.length, l = this.getMotionType(e); if (s > a) { this._createOrUpdateBodyInstances(e, l, n, a, s, !1); const o = this._hknp.HP_Body_GetShape(e._pluginDataInstances[0].hpBodyId)[1]; for (let u = a; u < s; u++) this._hknp.HP_Body_SetShape(e._pluginDataInstances[u].hpBodyId, o), this._internalUpdateMassProperties(e._pluginDataInstances[u]), this._bodies.set(e._pluginDataInstances[u].hpBodyId[0], { body: e, index: u }); } else if (s < a) { const o = a - s; for (let u = 0; u < o; u++) { const h = e._pluginDataInstances.pop(); this._bodies.delete(h.hpBodyId[0]), this._hknp.HP_World_RemoveBody(this.world, h.hpBodyId), this._hknp.HP_Body_Release(h.hpBodyId); } this._createOrUpdateBodyInstances(e, l, n, 0, s, !0); } } /** * Synchronizes the transform of a physics body with its transform node. * @param body - The physics body to synchronize. * * This function is useful for keeping the physics body's transform in sync with its transform node. * This is important for ensuring that the physics body is accurately represented in the physics engine. */ sync(e) { this.syncTransform(e, e.transformNode); } /** * Synchronizes the transform of a physics body with the transform of its * corresponding transform node. * * @param body - The physics body to synchronize. * @param transformNode - The destination Transform Node. * * This code is useful for synchronizing the position and orientation of a * physics body with the position and orientation of its corresponding * transform node. This is important for ensuring that the physics body and * the transform node are in the same position and orientation in the scene. * This is necessary for the physics engine to accurately simulate the * physical behavior of the body. */ syncTransform(e, t) { var i; if (e._pluginDataInstances.length) { const r = t, s = r._thinInstanceDataStorage.matrixData; if (!s) return; const n = e._pluginDataInstances.length; for (let a = 0; a < n; a++) { const l = e._pluginDataInstances[a].worldTransformOffset, o = new Float32Array(this._hknp.HEAPU8.buffer, this._bodyBuffer + l, 16), u = a * 16; for (let h = 0; h < 15; h++) (h & 3) != 3 && (s[u + h] = o[h]); s[u + 15] = 1; } r.thinInstanceBufferUpdated("matrix"); } else try { const r = this._hknp.HP_Body_GetQTransform(e._pluginData.hpBodyId)[1], s = r[0], n = r[1], a = de.Quaternion[0]; a.set(n[0], n[1], n[2], n[3]); const l = t.parent; if (l && !l.getWorldMatrix().isIdentity()) { l.computeWorldMatrix(!0), a.normalize(); const o = de.Matrix[0], u = de.Vector3[0]; u.copyFromFloats(s[0], s[1], s[2]), Ae.ComposeToRef(t.absoluteScaling, a, u, o); const h = de.Matrix[1]; l.getWorldMatrix().invertToRef(h); const d = de.Matrix[2]; o.multiplyToRef(h, d), d.decomposeToTransformNode(t), (i = t.rotationQuaternion) === null || i === void 0 || i.normalize(); } else t.position.set(s[0], s[1], s[2]), t.rotationQuaternion ? t.rotationQuaternion.copyFrom(a) : a.toEulerAnglesToRef(t.rotation); } catch (r) { Ce.Error(`Syncing transform failed for node ${t.name}: ${r.message}...`); } } /** * Sets the shape of a physics body. * @param body - The physics body to set the shape for. * @param shape - The physics shape to set. * * This function is used to set the shape of a physics body. It is useful for * creating a physics body with a specific shape, such as a box or a sphere, * which can then be used to simulate physical interactions in a physics engine. * This function is especially useful for meshes with multiple instances, as it * will set the shape for each instance of the mesh. */ setShape(e, t) { var i, r, s; const n = t && t._pluginData ? t._pluginData : BigInt(0); if (!(e.transformNode instanceof ke) || !(!((i = e.transformNode._thinInstanceDataStorage) === null || i === void 0) && i.matrixData)) { this._hknp.HP_Body_SetShape(e._pluginData.hpBodyId, n), this._internalUpdateMassProperties(e._pluginData); return; } const l = (s = (r = e.transformNode._thinInstanceDataStorage) === null || r === void 0 ? void 0 : r.instancesCount) !== null && s !== void 0 ? s : 0; for (let o = 0; o < l; o++) this._hknp.HP_Body_SetShape(e._pluginDataInstances[o].hpBodyId, n), this._internalUpdateMassProperties(e._pluginDataInstances[o]); } /** * Returns a reference to the first instance of the plugin data for a physics body. * @param body * @param instanceIndex * @returns a reference to the first instance */ _getPluginReference(e, t) { var i; return !((i = e._pluginDataInstances) === null || i === void 0) && i.length ? e._pluginDataInstances[t ?? 0] : e._pluginData; } /** * Gets the shape of a physics body. This will create a new shape object * * @param body - The physics body. * @returns The shape of the physics body. * */ getShape(e) { const t = this._getPluginReference(e), i = this._hknp.HP_Body_GetShape(t.hpBodyId)[1]; if (i != 0) { const r = e.transformNode.getScene(); return new fx({ pluginData: i }, r); } return null; } /** * Gets the type of a physics shape. * @param shape - The physics shape to get the type for. * @returns The type of the physics shape. * */ getShapeType(e) { return e.type ? e.type : this._hknp.HP_Shape_GetType(e._pluginData); } /** * Sets the event mask of a physics body. * @param body - The physics body to set the event mask for. * @param eventMask - The event mask to set. * * This function is useful for setting the event mask of a physics body, which is used to determine which events the body will respond to. This is important for ensuring that the physics engine is able to accurately simulate the behavior of the body in the game world. */ setEventMask(e, t, i) { this._applyToBodyOrInstances(e, (r) => { this._hknp.HP_Body_SetEventMask(r.hpBodyId, t); }, i); } /** * Retrieves the event mask of a physics body. * * @param body - The physics body to retrieve the event mask from. * @returns The event mask of the physics body. * */ getEventMask(e, t) { const i = this._getPluginReference(e, t); return this._hknp.HP_Body_GetEventMask(i.hpBodyId)[1]; } _fromMassPropertiesTuple(e) { return { centerOfMass: D.FromArray(e[0]), mass: e[1], inertia: D.FromArray(e[2]), inertiaOrientation: Ze.FromArray(e[3]) }; } _internalUpdateMassProperties(e) { const t = this._internalComputeMassProperties(e), i = e.userMassProps; i.centerOfMass && (t[0] = i.centerOfMass.asArray()), i.mass != null && (t[1] = i.mass), i.inertia && (t[2] = i.inertia.asArray()), i.inertiaOrientation && (t[3] = i.inertiaOrientation.asArray()), this._hknp.HP_Body_SetMassProperties(e.hpBodyId, t); } _internalSetMotionType(e, t) { switch (t) { case P4.STATIC: this._hknp.HP_Body_SetMotionType(e.hpBodyId, this._hknp.MotionType.STATIC); break; case P4.ANIMATED: this._hknp.HP_Body_SetMotionType(e.hpBodyId, this._hknp.MotionType.KINEMATIC); break; case P4.DYNAMIC: this._hknp.HP_Body_SetMotionType(e.hpBodyId, this._hknp.MotionType.DYNAMIC); break; } } setMotionType(e, t, i) { this._applyToBodyOrInstances(e, (r) => { this._internalSetMotionType(r, t); }, i); } getMotionType(e, t) { const i = this._getPluginReference(e, t), r = this._hknp.HP_Body_GetMotionType(i.hpBodyId)[1]; switch (r) { case this._hknp.MotionType.STATIC: return P4.STATIC; case this._hknp.MotionType.KINEMATIC: return P4.ANIMATED; case this._hknp.MotionType.DYNAMIC: return P4.DYNAMIC; } throw new Error("Unknown motion type: " + r); } _internalComputeMassProperties(e) { const t = this._hknp.HP_Body_GetShape(e.hpBodyId); if (t[0] == this._hknp.Result.RESULT_OK) { const i = this._hknp.HP_Shape_BuildMassProperties(t[1]); if (i[0] == this._hknp.Result.RESULT_OK) return i[1]; } return [[0, 0, 0], 1, [1, 1, 1], [0, 0, 0, 1]]; } /** * Computes the mass properties of a physics body, from it's shape * * @param body - The physics body to copmute the mass properties of */ computeMassProperties(e, t) { const i = this._getPluginReference(e, t), r = this._internalComputeMassProperties(i); return this._fromMassPropertiesTuple(r); } /** * Sets the mass properties of a physics body. * * @param body - The physics body to set the mass properties of. * @param massProps - The mass properties to set. * @param instanceIndex - The index of the instance to set the mass properties of. If undefined, the mass properties of all the bodies will be set. * This function is useful for setting the mass properties of a physics body, * such as its mass, inertia, and center of mass. This is important for * accurately simulating the physics of the body in the physics engine. * */ setMassProperties(e, t, i) { this._applyToBodyOrInstances(e, (r) => { r.userMassProps = t, this._internalUpdateMassProperties(r); }, i); } /** * */ getMassProperties(e, t) { const i = this._getPluginReference(e, t), r = this._hknp.HP_Body_GetMassProperties(i.hpBodyId)[1]; return this._fromMassPropertiesTuple(r); } /** * Sets the linear damping of the given body. * @param body - The body to set the linear damping for. * @param damping - The linear damping to set. * * This method is useful for controlling the linear damping of a body in a physics engine. * Linear damping is a force that opposes the motion of the body, and is proportional to the velocity of the body. * This method allows the user to set the linear damping of a body, which can be used to control the motion of the body. */ setLinearDamping(e, t, i) { this._applyToBodyOrInstances(e, (r) => { this._hknp.HP_Body_SetLinearDamping(r.hpBodyId, t); }, i); } /** * Gets the linear damping of the given body. * @param body - The body to get the linear damping from. * @returns The linear damping of the given body. * * This method is useful for getting the linear damping of a body in a physics engine. * Linear damping is a force that opposes the motion of the body and is proportional to the velocity of the body. * It is used to simulate the effects of air resistance and other forms of friction. */ getLinearDamping(e, t) { const i = this._getPluginReference(e, t); return this._hknp.HP_Body_GetLinearDamping(i.hpBodyId)[1]; } /** * Sets the angular damping of a physics body. * @param body - The physics body to set the angular damping for. * @param damping - The angular damping value to set. * * This function is useful for controlling the angular velocity of a physics body. * By setting the angular damping, the body's angular velocity will be reduced over time, allowing for more realistic physics simulations. */ setAngularDamping(e, t, i) { this._applyToBodyOrInstances(e, (r) => { this._hknp.HP_Body_SetAngularDamping(r.hpBodyId, t); }, i); } /** * Gets the angular damping of a physics body. * @param body - The physics body to get the angular damping from. * @returns The angular damping of the body. * * This function is useful for retrieving the angular damping of a physics body, * which is used to control the rotational motion of the body. The angular damping is a value between 0 and 1, where 0 is no damping and 1 is full damping. */ getAngularDamping(e, t) { const i = this._getPluginReference(e, t); return this._hknp.HP_Body_GetAngularDamping(i.hpBodyId)[1]; } /** * Sets the linear velocity of a physics body. * @param body - The physics body to set the linear velocity of. * @param linVel - The linear velocity to set. * * This function is useful for setting the linear velocity of a physics body, which is necessary for simulating * motion in a physics engine. The linear velocity is the speed and direction of the body's movement. */ setLinearVelocity(e, t, i) { this._applyToBodyOrInstances(e, (r) => { this._hknp.HP_Body_SetLinearVelocity(r.hpBodyId, this._bVecToV3(t)); }, i); } /** * Gets the linear velocity of a physics body and stores it in a given vector. * @param body - The physics body to get the linear velocity from. * @param linVel - The vector to store the linear velocity in. * * This function is useful for retrieving the linear velocity of a physics body, * which can be used to determine the speed and direction of the body. This * information can be used to simulate realistic physics behavior in a game. */ getLinearVelocityToRef(e, t, i) { const r = this._getPluginReference(e, i), s = this._hknp.HP_Body_GetLinearVelocity(r.hpBodyId)[1]; this._v3ToBvecRef(s, t); } /* * Apply an operation either to all instances of a body, if instanceIndex is not specified, or to a specific instance. */ _applyToBodyOrInstances(e, t, i) { var r; if (((r = e._pluginDataInstances) === null || r === void 0 ? void 0 : r.length) > 0 && i === void 0) for (let s = 0; s < e._pluginDataInstances.length; s++) t(e._pluginDataInstances[s]); else t(this._getPluginReference(e, i)); } /** * Applies an impulse to a physics body at a given location. * @param body - The physics body to apply the impulse to. * @param impulse - The impulse vector to apply. * @param location - The location in world space to apply the impulse. * @param instanceIndex - The index of the instance to apply the impulse to. If not specified, the impulse will be applied to all instances. * * This method is useful for applying an impulse to a physics body at a given location. * This can be used to simulate physical forces such as explosions, collisions, and gravity. */ applyImpulse(e, t, i, r) { this._applyToBodyOrInstances(e, (s) => { this._hknp.HP_Body_ApplyImpulse(s.hpBodyId, this._bVecToV3(i), this._bVecToV3(t)); }, r); } /** * Applies a force to a physics body at a given location. * @param body - The physics body to apply the impulse to. * @param force - The force vector to apply. * @param location - The location in world space to apply the impulse. * @param instanceIndex - The index of the instance to apply the force to. If not specified, the force will be applied to all instances. * * This method is useful for applying a force to a physics body at a given location. * This can be used to simulate physical forces such as explosions, collisions, and gravity. */ applyForce(e, t, i, r) { t.scaleToRef(this.getTimeStep(), this._tmpVec3[0]), this.applyImpulse(e, this._tmpVec3[0], i, r); } /** * Sets the angular velocity of a physics body. * * @param body - The physics body to set the angular velocity of. * @param angVel - The angular velocity to set. * * This function is useful for setting the angular velocity of a physics body in a physics engine. * This allows for more realistic simulations of physical objects, as they can be given a rotational velocity. */ setAngularVelocity(e, t, i) { this._applyToBodyOrInstances(e, (r) => { this._hknp.HP_Body_SetAngularVelocity(r.hpBodyId, this._bVecToV3(t)); }, i); } /** * Gets the angular velocity of a body. * @param body - The body to get the angular velocity from. * @param angVel - The vector3 to store the angular velocity. * * This method is useful for getting the angular velocity of a body in a physics engine. It * takes the body and a vector3 as parameters and stores the angular velocity of the body * in the vector3. This is useful for getting the angular velocity of a body in order to * calculate the motion of the body in the physics engine. */ getAngularVelocityToRef(e, t, i) { const r = this._getPluginReference(e, i), s = this._hknp.HP_Body_GetAngularVelocity(r.hpBodyId)[1]; this._v3ToBvecRef(s, t); } /** * Sets the transformation of the given physics body to the given transform node. * @param body The physics body to set the transformation for. * @param node The transform node to set the transformation from. * Sets the transformation of the given physics body to the given transform node. * * This function is useful for setting the transformation of a physics body to a * transform node, which is necessary for the physics engine to accurately simulate * the motion of the body. It also takes into account instances of the transform * node, which is necessary for accurate simulation of multiple bodies with the * same transformation. */ setPhysicsBodyTransformation(e, t) { const i = e.transformNode; if (e.numInstances > 0) { const s = i._thinInstanceDataStorage.matrixData; if (!s) return; const n = e.numInstances; this._createOrUpdateBodyInstances(e, e.getMotionType(), s, 0, n, !0); } else this._hknp.HP_Body_SetQTransform(e._pluginData.hpBodyId, this._getTransformInfos(t)); } /** * Set the target transformation (position and rotation) of the body, such that the body will set its velocity to reach that target * @param body The physics body to set the target transformation for. * @param position The target position * @param rotation The target rotation * @param instanceIndex The index of the instance in an instanced body */ setTargetTransform(e, t, i, r) { this._applyToBodyOrInstances(e, (s) => { this._hknp.HP_Body_SetTargetQTransform(s.hpBodyId, [this._bVecToV3(t), this._bQuatToV4(i)]); }, r); } /** * Sets the gravity factor of a body * @param body the physics body to set the gravity factor for * @param factor the gravity factor * @param instanceIndex the index of the instance in an instanced body */ setGravityFactor(e, t, i) { this._applyToBodyOrInstances(e, (r) => { this._hknp.HP_Body_SetGravityFactor(r.hpBodyId, t); }, i); } /** * Get the gravity factor of a body * @param body the physics body to get the gravity factor from * @param instanceIndex the index of the instance in an instanced body. If not specified, the gravity factor of the first instance will be returned. * @returns the gravity factor */ getGravityFactor(e, t) { const i = this._getPluginReference(e, t); return this._hknp.HP_Body_GetGravityFactor(i.hpBodyId)[1]; } /** * Disposes a physics body. * * @param body - The physics body to dispose. * * This method is useful for releasing the resources associated with a physics body when it is no longer needed. * This is important for avoiding memory leaks in the physics engine. */ disposeBody(e) { if (e._pluginDataInstances && e._pluginDataInstances.length > 0) for (const t of e._pluginDataInstances) this._hknp.HP_Body_Release(t.hpBodyId), t.hpBodyId = void 0; e._pluginData && (this._hknp.HP_Body_Release(e._pluginData.hpBodyId), e._pluginData.hpBodyId = void 0); } /** * Initializes a physics shape with the given type and parameters. * @param shape - The physics shape to initialize. * @param type - The type of shape to initialize. * @param options - The parameters for the shape. * * This code is useful for initializing a physics shape with the given type and parameters. * It allows for the creation of a sphere, box, capsule, container, cylinder, mesh, and heightfield. * Depending on the type of shape, different parameters are required. * For example, a sphere requires a radius, while a box requires extents and a rotation. */ initShape(e, t, i) { switch (t) { case Cc.SPHERE: { const r = i.radius || 1, s = i.center ? this._bVecToV3(i.center) : [0, 0, 0]; e._pluginData = this._hknp.HP_Shape_CreateSphere(s, r)[1]; } break; case Cc.BOX: { const r = i.rotation ? this._bQuatToV4(i.rotation) : [0, 0, 0, 1], s = i.extents ? this._bVecToV3(i.extents) : [1, 1, 1], n = i.center ? this._bVecToV3(i.center) : [0, 0, 0]; e._pluginData = this._hknp.HP_Shape_CreateBox(n, r, s)[1]; } break; case Cc.CAPSULE: { const r = i.pointA ? this._bVecToV3(i.pointA) : [0, 0, 0], s = i.pointB ? this._bVecToV3(i.pointB) : [0, 1, 0], n = i.radius || 0; e._pluginData = this._hknp.HP_Shape_CreateCapsule(r, s, n)[1]; } break; case Cc.CONTAINER: e._pluginData = this._hknp.HP_Shape_CreateContainer()[1]; break; case Cc.CYLINDER: { const r = i.pointA ? this._bVecToV3(i.pointA) : [0, 0, 0], s = i.pointB ? this._bVecToV3(i.pointB) : [0, 1, 0], n = i.radius || 0; e._pluginData = this._hknp.HP_Shape_CreateCylinder(r, s, n)[1]; } break; case Cc.CONVEX_HULL: case Cc.MESH: { const r = i.mesh; if (r) { const s = !!i.includeChildMeshes, n = t != Cc.CONVEX_HULL, a = new Age(r, n, r == null ? void 0 : r.getScene()); a.addNodeMeshes(r, s); const l = a.getVertices(this._hknp), o = l.numObjects / 3; if (t == Cc.CONVEX_HULL) e._pluginData = this._hknp.HP_Shape_CreateConvexHull(l.offset, o)[1]; else { const u = a.getTriangles(this._hknp), h = u.numObjects / 3; e._pluginData = this._hknp.HP_Shape_CreateMesh(l.offset, o, u.offset, h)[1], a.freeBuffer(this._hknp, u); } a.freeBuffer(this._hknp, l); } else throw new Error("No mesh provided to create physics shape."); } break; default: throw new Error("Unsupported Shape Type."); } } setShapeFilterMembershipMask(e, t) { const i = this._hknp.HP_Shape_GetFilterInfo(e._pluginData)[1][1]; this._hknp.HP_Shape_SetFilterInfo(e._pluginData, [t, i]); } getShapeFilterMembershipMask(e) { return this._hknp.HP_Shape_GetFilterInfo(e._pluginData)[1][0]; } setShapeFilterCollideMask(e, t) { const i = this._hknp.HP_Shape_GetFilterInfo(e._pluginData)[1][0]; this._hknp.HP_Shape_SetFilterInfo(e._pluginData, [i, t]); } getShapeFilterCollideMask(e) { return this._hknp.HP_Shape_GetFilterInfo(e._pluginData)[1][1]; } /** * Sets the material of a physics shape. * @param shape - The physics shape to set the material of. * @param material - The material to set. * */ setMaterial(e, t) { var i, r, s, n, a; const l = (i = t.friction) !== null && i !== void 0 ? i : 0.5, o = (r = t.staticFriction) !== null && r !== void 0 ? r : l, u = (s = t.restitution) !== null && s !== void 0 ? s : 0, h = (n = t.frictionCombine) !== null && n !== void 0 ? n : wC.MINIMUM, d = (a = t.restitutionCombine) !== null && a !== void 0 ? a : wC.MAXIMUM, f = [o, l, u, this._materialCombineToNative(h), this._materialCombineToNative(d)]; this._hknp.HP_Shape_SetMaterial(e._pluginData, f); } /** * Sets the density of a physics shape. * @param shape - The physics shape to set the density of. * @param density - The density to set. * */ setDensity(e, t) { this._hknp.HP_Shape_SetDensity(e._pluginData, t); } /** * Calculates the density of a given physics shape. * * @param shape - The physics shape to calculate the density of. * @returns The density of the given physics shape. * */ getDensity(e) { return this._hknp.HP_Shape_GetDensity(e._pluginData)[1]; } /** * Gets the transform infos of a given transform node. * @param node - The transform node. * @returns An array containing the position and orientation of the node. * This code is useful for getting the position and orientation of a given transform node. * It first checks if the node has a rotation quaternion, and if not, it creates one from the node's rotation. * It then creates an array containing the position and orientation of the node and returns it. */ _getTransformInfos(e) { if (e.parent) return e.computeWorldMatrix(!0), [this._bVecToV3(e.absolutePosition), this._bQuatToV4(e.absoluteRotationQuaternion)]; let t = de.Quaternion[0]; if (e.rotationQuaternion) t = e.rotationQuaternion; else { const r = e.rotation; Ze.FromEulerAnglesToRef(r.x, r.y, r.z, t); } return [this._bVecToV3(e.position), this._bQuatToV4(t)]; } /** * Adds a child shape to the given shape. * @param shape - The parent shape. * @param newChild - The child shape to add. * @param translation - The relative translation of the child from the parent shape * @param rotation - The relative rotation of the child from the parent shape * @param scale - The relative scale scale of the child from the parent shaep * */ addChild(e, t, i, r, s) { const n = [ i ? this._bVecToV3(i) : [0, 0, 0], r ? this._bQuatToV4(r) : [0, 0, 0, 1], s ? this._bVecToV3(s) : [1, 1, 1] ]; this._hknp.HP_Shape_AddChild(e._pluginData, t._pluginData, n); } /** * Removes a child shape from a parent shape. * @param shape - The parent shape. * @param childIndex - The index of the child shape to remove. * */ removeChild(e, t) { this._hknp.HP_Shape_RemoveChild(e._pluginData, t); } /** * Returns the number of children of the given shape. * * @param shape - The shape to get the number of children from. * @returns The number of children of the given shape. * */ getNumChildren(e) { return this._hknp.HP_Shape_GetNumChildren(e._pluginData)[1]; } /** * Marks the shape as a trigger * @param shape the shape to mark as a trigger * @param isTrigger if the shape is a trigger */ setTrigger(e, t) { this._hknp.HP_Shape_SetTrigger(e._pluginData, t); } /** * Calculates the bounding box of a given physics shape. * * @param _shape - The physics shape to calculate the bounding box for. * @returns The calculated bounding box. * * This method is useful for physics engines as it allows to calculate the * boundaries of a given shape. Knowing the boundaries of a shape is important * for collision detection and other physics calculations. */ getBoundingBox(e) { return {}; } /** * Gets the geometry of a physics body. * * @param body - The physics body. * @returns An object containing the positions and indices of the body's geometry. * */ getBodyGeometry(e) { var t; const i = ((t = e._pluginDataInstances) === null || t === void 0 ? void 0 : t.length) > 0 ? e._pluginDataInstances[0] : e._pluginData, r = this._hknp.HP_Body_GetShape(i.hpBodyId)[1], s = this._hknp.HP_Shape_CreateDebugDisplayGeometry(r); if (s[0] != this._hknp.Result.RESULT_OK) return { positions: [], indices: [] }; const n = this._hknp.HP_DebugGeometry_GetInfo(s[1])[1], a = new Float32Array(this._hknp.HEAPU8.buffer, n[0], n[1] * 3), l = new Uint32Array(this._hknp.HEAPU8.buffer, n[2], n[3] * 3), o = a.slice(0), u = l.slice(0); return this._hknp.HP_DebugGeometry_Release(s[1]), { positions: o, indices: u }; } /** * Releases a physics shape from the physics engine. * * @param shape - The physics shape to be released. * @returns void * * This method is useful for releasing a physics shape from the physics engine, freeing up resources and preventing memory leaks. */ disposeShape(e) { this._hknp.HP_Shape_Release(e._pluginData), e._pluginData = void 0; } // constraint /** * Initializes a physics constraint with the given parameters. * * @param constraint - The physics constraint to be initialized. * @param body - The main body * @param childBody - The child body. * @param instanceIndex - If this body is instanced, the index of the instance to which the constraint will be applied. If not specified, no constraint will be applied. * @param childInstanceIndex - If the child body is instanced, the index of the instance to which the constraint will be applied. If not specified, no constraint will be applied. * * This function is useful for setting up a physics constraint in a physics engine. */ initConstraint(e, t, i, r, s) { var n, a, l, o, u; const h = e.type, d = e.options; if (!h || !d) { Ce.Warn("No constraint type or options. Constraint is invalid."); return; } if (t._pluginDataInstances.length > 0 && r === void 0 || i._pluginDataInstances.length > 0 && s === void 0) { Ce.Warn("Body is instanced but no instance index was specified. Constraint will not be applied."); return; } e._pluginData = (n = e._pluginData) !== null && n !== void 0 ? n : []; const f = this._hknp.HP_Constraint_Create()[1]; e._pluginData.push(f); const p = this._getPluginReference(t, r).hpBodyId, m = this._getPluginReference(i, s).hpBodyId; this._hknp.HP_Constraint_SetParentBody(f, p), this._hknp.HP_Constraint_SetChildBody(f, m), this._constraintToBodyIdPair.set(f[0], [p[0], m[0]]); const _ = d.pivotA ? this._bVecToV3(d.pivotA) : this._bVecToV3(D.Zero()), v = (a = d.axisA) !== null && a !== void 0 ? a : new D(1, 0, 0), C = this._tmpVec3[0]; d.perpAxisA ? C.copyFrom(d.perpAxisA) : v.getNormalToRef(C), this._hknp.HP_Constraint_SetAnchorInParent(f, _, this._bVecToV3(v), this._bVecToV3(C)); const x = d.pivotB ? this._bVecToV3(d.pivotB) : this._bVecToV3(D.Zero()), b = (l = d.axisB) !== null && l !== void 0 ? l : new D(1, 0, 0), S = this._tmpVec3[0]; if (d.perpAxisB ? S.copyFrom(d.perpAxisB) : b.getNormalToRef(S), this._hknp.HP_Constraint_SetAnchorInChild(f, x, this._bVecToV3(b), this._bVecToV3(S)), e._initOptions || (e._initOptions = { axisA: v.clone(), axisB: b.clone(), perpAxisA: C.clone(), perpAxisB: S.clone(), pivotA: new D(_[0], _[1], _[2]), pivotB: new D(x[0], x[1], x[2]) }), h == af.LOCK) this._hknp.HP_Constraint_SetAxisMode(f, this._hknp.ConstraintAxis.LINEAR_X, this._hknp.ConstraintAxisLimitMode.LOCKED), this._hknp.HP_Constraint_SetAxisMode(f, this._hknp.ConstraintAxis.LINEAR_Y, this._hknp.ConstraintAxisLimitMode.LOCKED), this._hknp.HP_Constraint_SetAxisMode(f, this._hknp.ConstraintAxis.LINEAR_Z, this._hknp.ConstraintAxisLimitMode.LOCKED), this._hknp.HP_Constraint_SetAxisMode(f, this._hknp.ConstraintAxis.ANGULAR_X, this._hknp.ConstraintAxisLimitMode.LOCKED), this._hknp.HP_Constraint_SetAxisMode(f, this._hknp.ConstraintAxis.ANGULAR_Y, this._hknp.ConstraintAxisLimitMode.LOCKED), this._hknp.HP_Constraint_SetAxisMode(f, this._hknp.ConstraintAxis.ANGULAR_Z, this._hknp.ConstraintAxisLimitMode.LOCKED); else if (h == af.DISTANCE) { const R = d.maxDistance || 0, w = this._hknp.ConstraintAxis.LINEAR_DISTANCE; this._hknp.HP_Constraint_SetAxisMode(f, w, this._hknp.ConstraintAxisLimitMode.LIMITED), this._hknp.HP_Constraint_SetAxisMinLimit(f, w, R), this._hknp.HP_Constraint_SetAxisMaxLimit(f, w, R); } else if (h == af.HINGE) this._hknp.HP_Constraint_SetAxisMode(f, this._hknp.ConstraintAxis.LINEAR_X, this._hknp.ConstraintAxisLimitMode.LOCKED), this._hknp.HP_Constraint_SetAxisMode(f, this._hknp.ConstraintAxis.LINEAR_Y, this._hknp.ConstraintAxisLimitMode.LOCKED), this._hknp.HP_Constraint_SetAxisMode(f, this._hknp.ConstraintAxis.LINEAR_Z, this._hknp.ConstraintAxisLimitMode.LOCKED), this._hknp.HP_Constraint_SetAxisMode(f, this._hknp.ConstraintAxis.ANGULAR_Y, this._hknp.ConstraintAxisLimitMode.LOCKED), this._hknp.HP_Constraint_SetAxisMode(f, this._hknp.ConstraintAxis.ANGULAR_Z, this._hknp.ConstraintAxisLimitMode.LOCKED); else if (h == af.PRISMATIC) this._hknp.HP_Constraint_SetAxisMode(f, this._hknp.ConstraintAxis.LINEAR_Y, this._hknp.ConstraintAxisLimitMode.LOCKED), this._hknp.HP_Constraint_SetAxisMode(f, this._hknp.ConstraintAxis.LINEAR_Z, this._hknp.ConstraintAxisLimitMode.LOCKED), this._hknp.HP_Constraint_SetAxisMode(f, this._hknp.ConstraintAxis.ANGULAR_X, this._hknp.ConstraintAxisLimitMode.LOCKED), this._hknp.HP_Constraint_SetAxisMode(f, this._hknp.ConstraintAxis.ANGULAR_Y, this._hknp.ConstraintAxisLimitMode.LOCKED), this._hknp.HP_Constraint_SetAxisMode(f, this._hknp.ConstraintAxis.ANGULAR_Z, this._hknp.ConstraintAxisLimitMode.LOCKED); else if (h == af.SLIDER) this._hknp.HP_Constraint_SetAxisMode(f, this._hknp.ConstraintAxis.LINEAR_Y, this._hknp.ConstraintAxisLimitMode.LOCKED), this._hknp.HP_Constraint_SetAxisMode(f, this._hknp.ConstraintAxis.LINEAR_Z, this._hknp.ConstraintAxisLimitMode.LOCKED), this._hknp.HP_Constraint_SetAxisMode(f, this._hknp.ConstraintAxis.ANGULAR_Y, this._hknp.ConstraintAxisLimitMode.LOCKED), this._hknp.HP_Constraint_SetAxisMode(f, this._hknp.ConstraintAxis.ANGULAR_Z, this._hknp.ConstraintAxisLimitMode.LOCKED); else if (h == af.BALL_AND_SOCKET) this._hknp.HP_Constraint_SetAxisMode(f, this._hknp.ConstraintAxis.LINEAR_X, this._hknp.ConstraintAxisLimitMode.LOCKED), this._hknp.HP_Constraint_SetAxisMode(f, this._hknp.ConstraintAxis.LINEAR_Y, this._hknp.ConstraintAxisLimitMode.LOCKED), this._hknp.HP_Constraint_SetAxisMode(f, this._hknp.ConstraintAxis.LINEAR_Z, this._hknp.ConstraintAxisLimitMode.LOCKED); else if (h == af.SIX_DOF) { const R = e; for (const w of R.limits) { const V = this._constraintAxisToNative(w.axis); ((o = w.minLimit) !== null && o !== void 0 ? o : -1) == 0 && ((u = w.maxLimit) !== null && u !== void 0 ? u : -1) == 0 ? this._hknp.HP_Constraint_SetAxisMode(f, V, this._hknp.ConstraintAxisLimitMode.LOCKED) : (w.minLimit != null && (this._hknp.HP_Constraint_SetAxisMode(f, V, this._hknp.ConstraintAxisLimitMode.LIMITED), this._hknp.HP_Constraint_SetAxisMinLimit(f, V, w.minLimit)), w.maxLimit != null && (this._hknp.HP_Constraint_SetAxisMode(f, V, this._hknp.ConstraintAxisLimitMode.LIMITED), this._hknp.HP_Constraint_SetAxisMaxLimit(f, V, w.maxLimit))), w.stiffness && this._hknp.HP_Constraint_SetAxisStiffness(f, V, w.stiffness), w.damping && this._hknp.HP_Constraint_SetAxisDamping(f, V, w.damping); } } else throw new Error("Unsupported Constraint Type."); const M = !!d.collision; this._hknp.HP_Constraint_SetCollisionsEnabled(f, M), this._hknp.HP_Constraint_SetEnabled(f, !0); } /** * Get a list of all the pairs of bodies that are connected by this constraint. * @param constraint the constraint to search from * @returns a list of parent, child pairs */ getBodiesUsingConstraint(e) { const t = []; for (const i of e._pluginData) { const r = this._constraintToBodyIdPair.get(i[0]); if (r) { const s = this._bodies.get(r[0]), n = this._bodies.get(r[1]); s && n && t.push({ parentBody: s.body, parentBodyIndex: s.index, childBody: n.body, childBodyIndex: n.index }); } } return t; } /** * Adds a constraint to the physics engine. * * @param body - The main body to which the constraint is applied. * @param childBody - The body to which the constraint is applied. * @param constraint - The constraint to be applied. * @param instanceIndex - If this body is instanced, the index of the instance to which the constraint will be applied. If not specified, no constraint will be applied. * @param childInstanceIndex - If the child body is instanced, the index of the instance to which the constraint will be applied. If not specified, no constraint will be applied. */ addConstraint(e, t, i, r, s) { this.initConstraint(i, e, t, r, s); } /** * Enables or disables a constraint in the physics engine. * @param constraint - The constraint to enable or disable. * @param isEnabled - Whether the constraint should be enabled or disabled. * */ setEnabled(e, t) { for (const i of e._pluginData) this._hknp.HP_Constraint_SetEnabled(i, t); } /** * Gets the enabled state of the given constraint. * @param constraint - The constraint to get the enabled state from. * @returns The enabled state of the given constraint. * */ getEnabled(e) { const t = e._pluginData && e._pluginData[0]; return t ? this._hknp.HP_Constraint_GetEnabled(t)[1] : !1; } /** * Enables or disables collisions for the given constraint. * @param constraint - The constraint to enable or disable collisions for. * @param isEnabled - Whether collisions should be enabled or disabled. * */ setCollisionsEnabled(e, t) { for (const i of e._pluginData) this._hknp.HP_Constraint_SetCollisionsEnabled(i, t); } /** * Gets whether collisions are enabled for the given constraint. * @param constraint - The constraint to get collisions enabled for. * @returns Whether collisions are enabled for the given constraint. * */ getCollisionsEnabled(e) { const t = e._pluginData && e._pluginData[0]; return t ? this._hknp.HP_Constraint_GetCollisionsEnabled(t)[1] : !1; } /** * Sets the friction of the given axis of the given constraint. * * @param constraint - The constraint to set the friction of. * @param axis - The axis of the constraint to set the friction of. * @param friction - The friction to set. * @returns void * */ setAxisFriction(e, t, i) { for (const r of e._pluginData) this._hknp.HP_Constraint_SetAxisFriction(r, this._constraintAxisToNative(t), i); } /** * Gets the friction value of the specified axis of the given constraint. * * @param constraint - The constraint to get the axis friction from. * @param axis - The axis to get the friction from. * @returns The friction value of the specified axis. * */ getAxisFriction(e, t) { const i = e._pluginData && e._pluginData[0]; return i ? this._hknp.HP_Constraint_GetAxisFriction(i, this._constraintAxisToNative(t))[1] : null; } /** * Sets the limit mode of the specified axis of the given constraint. * @param constraint - The constraint to set the axis mode of. * @param axis - The axis to set the limit mode of. * @param limitMode - The limit mode to set. */ setAxisMode(e, t, i) { for (const r of e._pluginData) this._hknp.HP_Constraint_SetAxisMode(r, this._constraintAxisToNative(t), this._limitModeToNative(i)); } /** * Gets the axis limit mode of the given constraint. * * @param constraint - The constraint to get the axis limit mode from. * @param axis - The axis to get the limit mode from. * @returns The axis limit mode of the given constraint. * */ getAxisMode(e, t) { const i = e._pluginData && e._pluginData[0]; if (i) { const r = this._hknp.HP_Constraint_GetAxisMode(i, this._constraintAxisToNative(t))[1]; return this._nativeToLimitMode(r); } return null; } /** * Sets the minimum limit of the given axis of the given constraint. * @param constraint - The constraint to set the minimum limit of. * @param axis - The axis to set the minimum limit of. * @param limit - The minimum limit to set. * */ setAxisMinLimit(e, t, i) { for (const r of e._pluginData) this._hknp.HP_Constraint_SetAxisMinLimit(r, this._constraintAxisToNative(t), i); } /** * Gets the minimum limit of the specified axis of the given constraint. * @param constraint - The constraint to get the minimum limit from. * @param axis - The axis to get the minimum limit from. * @returns The minimum limit of the specified axis of the given constraint. * */ getAxisMinLimit(e, t) { const i = e._pluginData && e._pluginData[0]; return i ? this._hknp.HP_Constraint_GetAxisMinLimit(i, this._constraintAxisToNative(t))[1] : null; } /** * Sets the maximum limit of the given axis of the given constraint. * @param constraint - The constraint to set the maximum limit of the given axis. * @param axis - The axis to set the maximum limit of. * @param limit - The maximum limit to set. * */ setAxisMaxLimit(e, t, i) { for (const r of e._pluginData) this._hknp.HP_Constraint_SetAxisMaxLimit(r, this._constraintAxisToNative(t), i); } /** * Gets the maximum limit of the given axis of the given constraint. * * @param constraint - The constraint to get the maximum limit from. * @param axis - The axis to get the maximum limit from. * @returns The maximum limit of the given axis of the given constraint. * */ getAxisMaxLimit(e, t) { const i = e._pluginData && e._pluginData[0]; return i ? this._hknp.HP_Constraint_GetAxisMaxLimit(i, this._constraintAxisToNative(t))[1] : null; } /** * Sets the motor type of the given axis of the given constraint. * @param constraint - The constraint to set the motor type of. * @param axis - The axis of the constraint to set the motor type of. * @param motorType - The motor type to set. * @returns void * */ setAxisMotorType(e, t, i) { for (const r of e._pluginData) this._hknp.HP_Constraint_SetAxisMotorType(r, this._constraintAxisToNative(t), this._constraintMotorTypeToNative(i)); } /** * Gets the motor type of the specified axis of the given constraint. * @param constraint - The constraint to get the motor type from. * @param axis - The axis of the constraint to get the motor type from. * @returns The motor type of the specified axis of the given constraint. * */ getAxisMotorType(e, t) { const i = e._pluginData && e._pluginData[0]; return i ? this._nativeToMotorType(this._hknp.HP_Constraint_GetAxisMotorType(i, this._constraintAxisToNative(t))[1]) : null; } /** * Sets the target of an axis motor of a constraint. * * @param constraint - The constraint to set the axis motor target of. * @param axis - The axis of the constraint to set the motor target of. * @param target - The target of the axis motor. * */ setAxisMotorTarget(e, t, i) { for (const r of e._pluginData) this._hknp.HP_Constraint_SetAxisMotorTarget(r, this._constraintAxisToNative(t), i); } /** * Gets the target of the motor of the given axis of the given constraint. * * @param constraint - The constraint to get the motor target from. * @param axis - The axis of the constraint to get the motor target from. * @returns The target of the motor of the given axis of the given constraint. * */ getAxisMotorTarget(e, t) { return e._pluginData && e._pluginData[0] ? this._hknp.HP_Constraint_GetAxisMotorTarget(e._pluginData, this._constraintAxisToNative(t))[1] : null; } /** * Sets the maximum force that can be applied by the motor of the given constraint axis. * @param constraint - The constraint to set the motor max force for. * @param axis - The axis of the constraint to set the motor max force for. * @param maxForce - The maximum force that can be applied by the motor. * */ setAxisMotorMaxForce(e, t, i) { for (const r of e._pluginData) this._hknp.HP_Constraint_SetAxisMotorMaxForce(r, this._constraintAxisToNative(t), i); } /** * Gets the maximum force of the motor of the given constraint axis. * * @param constraint - The constraint to get the motor maximum force from. * @param axis - The axis of the constraint to get the motor maximum force from. * @returns The maximum force of the motor of the given constraint axis. * */ getAxisMotorMaxForce(e, t) { const i = e._pluginData && e._pluginData[0]; return i ? this._hknp.HP_Constraint_GetAxisMotorMaxForce(i, this._constraintAxisToNative(t))[1] : null; } /** * Disposes a physics constraint. * * @param constraint - The physics constraint to dispose. * * This method is useful for releasing the resources associated with a physics constraint, such as * the Havok constraint, when it is no longer needed. This is important for avoiding memory leaks. */ disposeConstraint(e) { for (const t of e._pluginData) this._hknp.HP_Constraint_SetEnabled(t, !1), this._hknp.HP_Constraint_Release(t); e._pluginData.length = 0; } /** * Performs a raycast from a given start point to a given end point and stores the result in a given PhysicsRaycastResult object. * * @param from - The start point of the raycast. * @param to - The end point of the raycast. * @param result - The PhysicsRaycastResult object to store the result of the raycast. * @param query - The raycast query options. See [[IRaycastQuery]] for more information. * * Performs a raycast. It takes in two points, from and to, and a PhysicsRaycastResult object to store the result of the raycast. * It then performs the raycast and stores the hit data in the PhysicsRaycastResult object. */ raycast(e, t, i, r) { var s, n; const a = (s = r == null ? void 0 : r.membership) !== null && s !== void 0 ? s : -1, l = (n = r == null ? void 0 : r.collideWith) !== null && n !== void 0 ? n : -1; i.reset(e, t); const o = !1, u = [BigInt(0)], h = [this._bVecToV3(e), this._bVecToV3(t), [a, l], o, u]; if (this._hknp.HP_World_CastRayWithCollector(this.world, this._queryCollector, h), this._hknp.HP_QueryCollector_GetNumHits(this._queryCollector)[1] > 0) { const d = this._hknp.HP_QueryCollector_GetCastRayResult(this._queryCollector, 0)[1], f = d[1][3], p = d[1][4], m = d[1][5]; i.setHitData({ x: p[0], y: p[1], z: p[2] }, { x: f[0], y: f[1], z: f[2] }, m), i.calculateHitDistance(); const _ = this._bodies.get(d[1][0][0]); i.body = _ == null ? void 0 : _.body, i.bodyIndex = _ == null ? void 0 : _.index; } } /** * Return the collision observable for a particular physics body. * @param body the physics body */ getCollisionObservable(e) { const t = e._pluginData.hpBodyId[0]; let i = this._bodyCollisionObservable.get(t); return i || (i = new Fe(), this._bodyCollisionObservable.set(t, i)), i; } /** * Return the collision ended observable for a particular physics body. * @param body the physics body * @returns */ getCollisionEndedObservable(e) { const t = e._pluginData.hpBodyId[0]; let i = this._bodyCollisionEndedObservable.get(t); return i || (i = new Fe(), this._bodyCollisionEndedObservable.set(t, i)), i; } /** * Enable collision to be reported for a body when a callback is setup on the world * @param body the physics body * @param enabled */ setCollisionCallbackEnabled(e, t) { const i = this._hknp.EventType.COLLISION_STARTED.value | this._hknp.EventType.COLLISION_CONTINUED.value | this._hknp.EventType.COLLISION_FINISHED.value; e._pluginDataInstances && e._pluginDataInstances.length ? e._pluginDataInstances.forEach((r) => { this._hknp.HP_Body_SetEventMask(r.hpBodyId, t ? i : 0); }) : e._pluginData && this._hknp.HP_Body_SetEventMask(e._pluginData.hpBodyId, t ? i : 0); } /** * Enable collision ended to be reported for a body when a callback is setup on the world * @param body * @param enabled */ setCollisionEndedCallbackEnabled(e, t) { const i = this._getPluginReference(e); let r = this._hknp.HP_Body_GetEventMask(i.hpBodyId)[1]; r = t ? r | this._hknp.EventType.COLLISION_FINISHED.value : r & ~this._hknp.EventType.COLLISION_FINISHED.value, e._pluginDataInstances && e._pluginDataInstances.length ? e._pluginDataInstances.forEach((s) => { this._hknp.HP_Body_SetEventMask(s.hpBodyId, r); }) : e._pluginData && this._hknp.HP_Body_SetEventMask(e._pluginData.hpBodyId, r); } _notifyTriggers() { let e = this._hknp.HP_World_GetTriggerEvents(this.world)[1]; const t = new pq(); for (; e; ) { pq.readToRef(this._hknp.HEAPU8.buffer, e, t); const i = this._bodies.get(t.bodyIdA), r = this._bodies.get(t.bodyIdB); if (i && r) { const s = { collider: i.body, colliderIndex: i.index, collidedAgainst: r.body, collidedAgainstIndex: r.index, type: this._nativeTriggerCollisionValueToCollisionType(t.type) }; this.onTriggerCollisionObservable.notifyObservers(s); } e = this._hknp.HP_World_GetNextTriggerEvent(this.world, e); } } /** * Runs thru all detected collisions and filter by body */ _notifyCollisions() { let e = this._hknp.HP_World_GetCollisionEvents(this.world)[1]; const t = new fq(), i = Number(this.world); for (; e; ) { fq.readToRef(this._hknp.HEAPU8.buffer, e, t); const r = this._bodies.get(t.contactOnA.bodyId), s = this._bodies.get(t.contactOnB.bodyId); if (r && s) { const n = { collider: r.body, colliderIndex: r.index, collidedAgainst: s.body, collidedAgainstIndex: s.index, type: this._nativeCollisionValueToCollisionType(t.type) }; if (n.type === C4.COLLISION_FINISHED) this.onCollisionEndedObservable.notifyObservers(n); else { t.contactOnB.position.subtractToRef(t.contactOnA.position, this._tmpVec3[0]); const a = D.Dot(this._tmpVec3[0], t.contactOnA.normal); n.point = t.contactOnA.position, n.distance = a, n.impulse = t.impulseApplied, n.normal = t.contactOnA.normal, this.onCollisionObservable.notifyObservers(n); } if (this._bodyCollisionObservable.size && n.type !== C4.COLLISION_FINISHED) { const a = this._bodyCollisionObservable.get(t.contactOnA.bodyId), l = this._bodyCollisionObservable.get(t.contactOnB.bodyId); a ? a.notifyObservers(n) : l && (n.collider = s.body, n.colliderIndex = s.index, n.collidedAgainst = r.body, n.collidedAgainstIndex = r.index, n.normal = t.contactOnB.normal, l.notifyObservers(n)); } else if (this._bodyCollisionEndedObservable.size) { const a = this._bodyCollisionEndedObservable.get(t.contactOnA.bodyId), l = this._bodyCollisionEndedObservable.get(t.contactOnB.bodyId); a ? a.notifyObservers(n) : l && (n.collider = s.body, n.colliderIndex = s.index, n.collidedAgainst = r.body, n.collidedAgainstIndex = r.index, n.normal = t.contactOnB.normal, l.notifyObservers(n)); } } e = this._hknp.HP_World_GetNextCollisionEvent(i, e); } } /** * Gets the number of bodies in the world */ get numBodies() { return this._hknp.HP_World_GetNumBodies(this.world)[1]; } /** * Dispose the world and free resources */ dispose() { this._hknp.HP_QueryCollector_Release(this._queryCollector), this._queryCollector = BigInt(0), this._hknp.HP_World_Release(this.world), this.world = void 0; } _v3ToBvecRef(e, t) { t.set(e[0], e[1], e[2]); } _bVecToV3(e) { return [e._x, e._y, e._z]; } _bQuatToV4(e) { return [e._x, e._y, e._z, e._w]; } _constraintMotorTypeToNative(e) { switch (e) { case Z8.POSITION: return this._hknp.ConstraintMotorType.POSITION; case Z8.VELOCITY: return this._hknp.ConstraintMotorType.VELOCITY; } return this._hknp.ConstraintMotorType.NONE; } _nativeToMotorType(e) { switch (e) { case this._hknp.ConstraintMotorType.POSITION: return Z8.POSITION; case this._hknp.ConstraintMotorType.VELOCITY: return Z8.VELOCITY; } return Z8.NONE; } _materialCombineToNative(e) { switch (e) { case wC.GEOMETRIC_MEAN: return this._hknp.MaterialCombine.GEOMETRIC_MEAN; case wC.MINIMUM: return this._hknp.MaterialCombine.MINIMUM; case wC.MAXIMUM: return this._hknp.MaterialCombine.MAXIMUM; case wC.ARITHMETIC_MEAN: return this._hknp.MaterialCombine.ARITHMETIC_MEAN; case wC.MULTIPLY: return this._hknp.MaterialCombine.MULTIPLY; } } _constraintAxisToNative(e) { switch (e) { case HA.LINEAR_X: return this._hknp.ConstraintAxis.LINEAR_X; case HA.LINEAR_Y: return this._hknp.ConstraintAxis.LINEAR_Y; case HA.LINEAR_Z: return this._hknp.ConstraintAxis.LINEAR_Z; case HA.ANGULAR_X: return this._hknp.ConstraintAxis.ANGULAR_X; case HA.ANGULAR_Y: return this._hknp.ConstraintAxis.ANGULAR_Y; case HA.ANGULAR_Z: return this._hknp.ConstraintAxis.ANGULAR_Z; case HA.LINEAR_DISTANCE: return this._hknp.ConstraintAxis.LINEAR_DISTANCE; } } _nativeToLimitMode(e) { switch (e) { case this._hknp.ConstraintAxisLimitMode.FREE: return OC.FREE; case this._hknp.ConstraintAxisLimitMode.LIMITED: return OC.LIMITED; case this._hknp.ConstraintAxisLimitMode.LOCKED: return OC.LOCKED; } return OC.FREE; } _limitModeToNative(e) { switch (e) { case OC.FREE: return this._hknp.ConstraintAxisLimitMode.FREE; case OC.LIMITED: return this._hknp.ConstraintAxisLimitMode.LIMITED; case OC.LOCKED: return this._hknp.ConstraintAxisLimitMode.LOCKED; } } _nativeCollisionValueToCollisionType(e) { switch (e) { case this._hknp.EventType.COLLISION_STARTED.value: return C4.COLLISION_STARTED; case this._hknp.EventType.COLLISION_FINISHED.value: return C4.COLLISION_FINISHED; case this._hknp.EventType.COLLISION_CONTINUED.value: return C4.COLLISION_CONTINUED; } return C4.COLLISION_STARTED; } _nativeTriggerCollisionValueToCollisionType(e) { switch (e) { case 8: return C4.TRIGGER_ENTERED; case 16: return C4.TRIGGER_EXITED; } return C4.TRIGGER_ENTERED; } } ii.prototype.getPhysicsEngine = function() { return this._physicsEngine; }; ii.prototype.enablePhysics = function(c = null, e) { if (this._physicsEngine) return !0; let t = this._getComponent(Bt.NAME_PHYSICSENGINE); t || (t = new Cge(this), this._addComponent(t)); try { if (!e || (e == null ? void 0 : e.getPluginVersion()) === 1) this._physicsEngine = new _W(c, e); else if ((e == null ? void 0 : e.getPluginVersion()) === 2) this._physicsEngine = new GU(c, e); else throw new Error("Unsupported Physics plugin version."); return this._physicsTimeAccumulator = 0, !0; } catch (i) { return Ce.Error(i.message), !1; } }; ii.prototype.disablePhysicsEngine = function() { this._physicsEngine && (this._physicsEngine.dispose(), this._physicsEngine = null); }; ii.prototype.isPhysicsEnabled = function() { return this._physicsEngine !== void 0; }; ii.prototype.deleteCompoundImpostor = function(c) { const e = c.parts[0].mesh; e.physicsImpostor && (e.physicsImpostor.dispose( /*true*/ ), e.physicsImpostor = null); }; ii.prototype._advancePhysicsEngineStep = function(c) { if (this._physicsEngine) { const e = this._physicsEngine.getSubTimeStep(); if (e > 0) for (this._physicsTimeAccumulator += c; this._physicsTimeAccumulator > e; ) this.onBeforePhysicsObservable.notifyObservers(this), this._physicsEngine._step(e / 1e3), this.onAfterPhysicsObservable.notifyObservers(this), this._physicsTimeAccumulator -= e; else this.onBeforePhysicsObservable.notifyObservers(this), this._physicsEngine._step(c / 1e3), this.onAfterPhysicsObservable.notifyObservers(this); } }; class Cge { /** * Creates a new instance of the component for the given scene * @param scene Defines the scene to register the component in */ constructor(e) { this.name = Bt.NAME_PHYSICSENGINE, this.scene = e, this.scene.onBeforePhysicsObservable = new Fe(), this.scene.onAfterPhysicsObservable = new Fe(), this.scene.getDeterministicFrameTime = () => this.scene._physicsEngine ? this.scene._physicsEngine.getTimeStep() * 1e3 : 1e3 / 60; } /** * Registers the component in a given scene */ register() { } /** * Rebuilds the elements related to this component in case of * context lost for instance. */ rebuild() { } /** * Disposes the component and the associated resources */ dispose() { this.scene.onBeforePhysicsObservable.clear(), this.scene.onAfterPhysicsObservable.clear(), this.scene._physicsEngine && this.scene.disablePhysicsEngine(); } } Object.defineProperty(xi.prototype, "physicsBody", { get: function() { return this._physicsBody; }, set: function(c) { this._physicsBody !== c && (this._disposePhysicsObserver && this.onDisposeObservable.remove(this._disposePhysicsObserver), this._physicsBody = c, c && (this._disposePhysicsObserver = this.onDisposeObservable.add(() => { this.physicsBody && (this.physicsBody.dispose( /*!doNotRecurse*/ ), this.physicsBody = null); }))); }, enumerable: !0, configurable: !0 }); xi.prototype.getPhysicsBody = function() { return this.physicsBody; }; xi.prototype.applyImpulse = function(c, e) { if (!this.physicsBody) throw new Error("No Physics Body for TransformNode"); return this.physicsBody.applyImpulse(c, e), this; }; class sP { /* * Gets the hit contact point between a mesh and a ray. The method varies between * the different plugin versions; V1 uses a mesh intersection, V2 uses the physics body instance/object center (to avoid a raycast and improve perf). */ static GetContactPointToRef(e, t, i, r, s) { const n = e.getScene().getPhysicsEngine(), a = n == null ? void 0 : n.getPluginVersion(); if (a === 1) { const o = new gs(t, i).intersectsMesh(e); if (o.hit && o.pickedPoint) return r.copyFrom(o.pickedPoint), !0; } else if (a === 2) return e.physicsBody.getObjectCenterWorldToRef(r, s), !0; return !1; } /** * Checks if a body will be affected by forces * @param body the body to check * @param instanceIndex for instanced bodies, the index of the instance to check * @returns */ static HasAppliedForces(e, t) { var i, r, s; return e.getMotionType(t) === P4.STATIC || ((r = (i = e.getMassProperties(t)) === null || i === void 0 ? void 0 : i.mass) !== null && r !== void 0 ? r : 0) === 0 || ((s = e.transformNode) === null || s === void 0 ? void 0 : s.getTotalVertices()) === 0; } /** * Checks if a point is inside a cylinder * @param point point to check * @param origin cylinder origin on the bottom * @param radius cylinder radius * @param height cylinder height * @returns */ static IsInsideCylinder(e, t, i, r) { const s = de.Vector3[0]; return e.subtractToRef(t, s), Math.abs(s.x) <= i && Math.abs(s.z) <= i && s.y >= 0 && s.y <= r; } } class xge { /** * Initializes the Physics helper * @param scene Babylon.js scene */ constructor(e) { if (this._hitData = { force: new D(), contactPoint: new D(), distanceFromOrigin: 0 }, this._scene = e, this._physicsEngine = this._scene.getPhysicsEngine(), !this._physicsEngine) { Ce.Warn("Physics engine not enabled. Please enable the physics before you can use the methods."); return; } } /** * Applies a radial explosion impulse * @param origin the origin of the explosion * @param radiusOrEventOptions the radius or the options of radial explosion * @param strength the explosion strength * @param falloff possible options: Constant & Linear. Defaults to Constant * @returns A physics radial explosion event, or null */ applyRadialExplosionImpulse(e, t, i, r) { if (!this._physicsEngine) return Ce.Warn("Physics engine not enabled. Please enable the physics before you call this method."), null; if (this._physicsEngine.getPluginVersion() === 1 && this._physicsEngine.getImpostors().length === 0 || this._physicsEngine.getPluginVersion() === 2 && this._physicsEngine.getBodies().length === 0) return null; let s = !1; if (typeof t == "number") { const l = t; t = new mO(), t.radius = l, t.strength = i ?? t.strength, t.falloff = r ?? t.falloff; } else s = !!(t.affectedImpostorsCallback || t.affectedBodiesCallback); const n = new _q(this._scene, t), a = this._hitData; if (this._physicsEngine.getPluginVersion() === 1) { const l = Array(); this._physicsEngine.getImpostors().forEach((u) => { n.getImpostorHitData(u, e, a) && (u.applyImpulse(a.force, a.contactPoint), s && l.push({ impostor: u, hitData: this._copyPhysicsHitData(a) })); }), n.triggerAffectedImpostorsCallback(l); } else this._applicationForBodies(n, e, a, s, (l, o) => { l.applyImpulse(o.force, o.contactPoint, o.instanceIndex); }); return n.dispose(!1), n; } /** * Applies a radial explosion force * @param origin the origin of the explosion * @param radiusOrEventOptions the radius or the options of radial explosion * @param strength the explosion strength * @param falloff possible options: Constant & Linear. Defaults to Constant * @returns A physics radial explosion event, or null */ applyRadialExplosionForce(e, t, i, r) { if (!this._physicsEngine) return Ce.Warn("Physics engine not enabled. Please enable the physics before you call the PhysicsHelper."), null; if (this._physicsEngine.getPluginVersion() === 1 && this._physicsEngine.getImpostors().length === 0 || this._physicsEngine.getPluginVersion() === 2 && this._physicsEngine.getBodies().length === 0) return null; let s = !1; if (typeof t == "number") { const l = t; t = new mO(), t.radius = l, t.strength = i ?? t.strength, t.falloff = r ?? t.falloff; } else s = !!(t.affectedImpostorsCallback || t.affectedBodiesCallback); const n = new _q(this._scene, t), a = this._hitData; if (this._physicsEngine.getPluginVersion() === 1) { const l = Array(); this._physicsEngine.getImpostors().forEach((u) => { n.getImpostorHitData(u, e, a) && (u.applyForce(a.force, a.contactPoint), s && l.push({ impostor: u, hitData: this._copyPhysicsHitData(a) })); }), n.triggerAffectedImpostorsCallback(l); } else this._applicationForBodies(n, e, a, s, (l, o) => { l.applyForce(o.force, o.contactPoint, o.instanceIndex); }); return n.dispose(!1), n; } _applicationForBodies(e, t, i, r, s) { const n = Array(), a = this._physicsEngine.getBodies(); for (const l of a) l.iterateOverAllInstances((o, u) => { e.getBodyHitData(o, t, i, u) && (s(o, i), r && n.push({ body: o, hitData: this._copyPhysicsHitData(i) })); }); e.triggerAffectedBodiesCallback(n); } /** * Creates a gravitational field * @param origin the origin of the gravitational field * @param radiusOrEventOptions the radius or the options of radial gravitational field * @param strength the gravitational field strength * @param falloff possible options: Constant & Linear. Defaults to Constant * @returns A physics gravitational field event, or null */ gravitationalField(e, t, i, r) { if (!this._physicsEngine) return Ce.Warn("Physics engine not enabled. Please enable the physics before you call the PhysicsHelper."), null; if (this._physicsEngine.getPluginVersion() === 1 && this._physicsEngine.getImpostors().length === 0 || this._physicsEngine.getPluginVersion() === 2 && this._physicsEngine.getBodies().length === 0) return null; if (typeof t == "number") { const n = t; t = new mO(), t.radius = n, t.strength = i ?? t.strength, t.falloff = r ?? t.falloff; } const s = new bge(this, this._scene, e, t); return s.dispose(!1), s; } /** * Creates a physics updraft event * @param origin the origin of the updraft * @param radiusOrEventOptions the radius or the options of the updraft * @param strength the strength of the updraft * @param height the height of the updraft * @param updraftMode possible options: Center & Perpendicular. Defaults to Center * @returns A physics updraft event, or null */ updraft(e, t, i, r, s) { if (!this._physicsEngine) return Ce.Warn("Physics engine not enabled. Please enable the physics before you call the PhysicsHelper."), null; if (this._physicsEngine.getPluginVersion() === 1 && this._physicsEngine.getImpostors().length === 0 || this._physicsEngine.getPluginVersion() === 2 && this._physicsEngine.getBodies().length === 0) return null; if (typeof t == "number") { const a = t; t = new aj(), t.radius = a, t.strength = i ?? t.strength, t.height = r ?? t.height, t.updraftMode = s ?? t.updraftMode; } const n = new WU(this._scene, e, t); return n.dispose(!1), n; } /** * Creates a physics vortex event * @param origin the of the vortex * @param radiusOrEventOptions the radius or the options of the vortex * @param strength the strength of the vortex * @param height the height of the vortex * @returns a Physics vortex event, or null * A physics vortex event or null */ vortex(e, t, i, r) { if (!this._physicsEngine) return Ce.Warn("Physics engine not enabled. Please enable the physics before you call the PhysicsHelper."), null; if (this._physicsEngine.getPluginVersion() === 1 && this._physicsEngine.getImpostors().length === 0 || this._physicsEngine.getPluginVersion() === 2 && this._physicsEngine.getBodies().length === 0) return null; if (typeof t == "number") { const n = t; t = new oj(), t.radius = n, t.strength = i ?? t.strength, t.height = r ?? t.height; } const s = new JO(this._scene, e, t); return s.dispose(!1), s; } _copyPhysicsHitData(e) { return { force: e.force.clone(), contactPoint: e.contactPoint.clone(), distanceFromOrigin: e.distanceFromOrigin, instanceIndex: e.instanceIndex }; } } class _q { /** * Initializes a radial explosion event * @param _scene BabylonJS scene * @param _options The options for the vortex event */ constructor(e, t) { this._scene = e, this._options = t, this._dataFetched = !1, this._options = Object.assign(Object.assign({}, new mO()), this._options); } /** * Returns the data related to the radial explosion event (sphere). * @returns The radial explosion event data */ getData() { return this._dataFetched = !0, { sphere: this._sphere }; } _getHitData(e, t, i, r) { const s = de.Vector3[0]; s.copyFrom(t).subtractInPlace(i); const n = de.Vector3[1]; if (!sP.GetContactPointToRef(e, i, s, n, r.instanceIndex)) return !1; const l = D.Distance(i, n); if (l > this._options.radius) return !1; const o = this._options.falloff === vL.Constant ? this._options.strength : this._options.strength * (1 - l / this._options.radius); return s.scaleInPlace(o), r.force.copyFrom(s), r.contactPoint.copyFrom(n), r.distanceFromOrigin = l, !0; } /** * Returns the force and contact point of the body or false, if the body is not affected by the force/impulse. * @param body A physics body where the transform node is an AbstractMesh * @param origin the origin of the explosion * @param data the data of the hit * @param instanceIndex the instance index of the body * @returns if there was a hit */ getBodyHitData(e, t, i, r) { if (sP.HasAppliedForces(e, r)) return !1; const s = e.transformNode, n = e.getObjectCenterWorld(r); return i.instanceIndex = r, this._getHitData(s, n, t, i); } /** * Returns the force and contact point of the impostor or false, if the impostor is not affected by the force/impulse. * @param impostor A physics imposter * @param origin the origin of the explosion * @returns A physics force and contact point, or null */ getImpostorHitData(e, t, i) { if (e.mass === 0 || e.object.getClassName() !== "Mesh" && e.object.getClassName() !== "InstancedMesh") return !1; const r = e.object; if (!this._intersectsWithSphere(r, t, this._options.radius)) return !1; const s = e.getObjectCenter(); return this._getHitData(r, s, t, i), !0; } /** * Triggers affected impostors callbacks * @param affectedImpostorsWithData defines the list of affected impostors (including associated data) */ triggerAffectedImpostorsCallback(e) { this._options.affectedImpostorsCallback && this._options.affectedImpostorsCallback(e); } /** * Triggers affected bodies callbacks * @param affectedBodiesWithData defines the list of affected bodies (including associated data) */ triggerAffectedBodiesCallback(e) { this._options.affectedBodiesCallback && this._options.affectedBodiesCallback(e); } /** * Disposes the sphere. * @param force Specifies if the sphere should be disposed by force */ dispose(e = !0) { this._sphere && (e ? this._sphere.dispose() : setTimeout(() => { this._dataFetched || this._sphere.dispose(); }, 0)); } /*** Helpers ***/ _prepareSphere() { this._sphere || (this._sphere = Rd("radialExplosionEventSphere", this._options.sphere, this._scene), this._sphere.isVisible = !1); } _intersectsWithSphere(e, t, i) { return this._prepareSphere(), this._sphere.position = t, this._sphere.scaling.setAll(i * 2), this._sphere._updateBoundingInfo(), this._sphere.computeWorldMatrix(!0), this._sphere.intersectsMesh(e, !0); } } class bge { /** * Initializes the physics gravitational field event * @param _physicsHelper A physics helper * @param _scene BabylonJS scene * @param _origin The origin position of the gravitational field event * @param _options The options for the vortex event */ constructor(e, t, i, r) { this._physicsHelper = e, this._scene = t, this._origin = i, this._options = r, this._dataFetched = !1, this._options = Object.assign(Object.assign({}, new mO()), this._options), this._tickCallback = () => this._tick(), this._options.strength = this._options.strength * -1; } /** * Returns the data related to the gravitational field event (sphere). * @returns A gravitational field event */ getData() { return this._dataFetched = !0, { sphere: this._sphere }; } /** * Enables the gravitational field. */ enable() { this._tickCallback.call(this), this._scene.registerBeforeRender(this._tickCallback); } /** * Disables the gravitational field. */ disable() { this._scene.unregisterBeforeRender(this._tickCallback); } /** * Disposes the sphere. * @param force The force to dispose from the gravitational field event */ dispose(e = !0) { this._sphere && (e ? this._sphere.dispose() : setTimeout(() => { this._dataFetched || this._sphere.dispose(); }, 0)); } _tick() { var e; if (this._sphere) this._physicsHelper.applyRadialExplosionForce(this._origin, this._options); else { const t = this._physicsHelper.applyRadialExplosionForce(this._origin, this._options); t && (this._sphere = (e = t.getData().sphere) === null || e === void 0 ? void 0 : e.clone("radialExplosionEventSphereClone")); } } } class WU { /** * Initializes the physics updraft event * @param _scene BabylonJS scene * @param _origin The origin position of the updraft * @param _options The options for the updraft event */ constructor(e, t, i) { this._scene = e, this._origin = t, this._options = i, this._originTop = D.Zero(), this._originDirection = D.Zero(), this._cylinderPosition = D.Zero(), this._dataFetched = !1, this._physicsEngine = this._scene.getPhysicsEngine(), this._options = Object.assign(Object.assign({}, new aj()), this._options), this._origin.addToRef(new D(0, this._options.height / 2, 0), this._cylinderPosition), this._origin.addToRef(new D(0, this._options.height, 0), this._originTop), this._options.updraftMode === ew.Perpendicular && (this._originDirection = this._origin.subtract(this._originTop).normalize()), this._tickCallback = () => this._tick(), this._physicsEngine.getPluginVersion() === 1 && this._prepareCylinder(); } /** * Returns the data related to the updraft event (cylinder). * @returns A physics updraft event */ getData() { return this._dataFetched = !0, { cylinder: this._cylinder }; } /** * Enables the updraft. */ enable() { this._tickCallback.call(this), this._scene.registerBeforeRender(this._tickCallback); } /** * Disables the updraft. */ disable() { this._scene.unregisterBeforeRender(this._tickCallback); } /** * Disposes the cylinder. * @param force Specifies if the updraft should be disposed by force */ dispose(e = !0) { this._cylinder && (e ? (this._cylinder.dispose(), this._cylinder = void 0) : setTimeout(() => { !this._dataFetched && this._cylinder && (this._cylinder.dispose(), this._cylinder = void 0); }, 0)); } _getHitData(e, t) { let i; this._options.updraftMode === ew.Perpendicular ? i = this._originDirection : i = e.subtract(this._originTop); const r = D.Distance(this._origin, e), s = this._options.strength * -1, n = i.multiplyByFloats(s, s, s); t.force.copyFrom(n), t.contactPoint.copyFrom(e), t.distanceFromOrigin = r; } _getBodyHitData(e, t, i) { if (sP.HasAppliedForces(e)) return !1; const r = e.getObjectCenterWorld(i); return sP.IsInsideCylinder(r, this._origin, this._options.radius, this._options.height) ? (t.instanceIndex = i, this._getHitData(r, t), !0) : !1; } _getImpostorHitData(e, t) { if (e.mass === 0) return !1; const i = e.object; if (!this._intersectsWithCylinder(i)) return !1; const r = e.getObjectCenter(); return this._getHitData(r, t), !0; } _tick() { const e = WU._HitData; this._physicsEngine.getPluginVersion() === 1 ? this._physicsEngine.getImpostors().forEach((t) => { this._getImpostorHitData(t, e) && t.applyForce(e.force, e.contactPoint); }) : this._physicsEngine.getBodies().forEach((t) => { t.iterateOverAllInstances((i, r) => { this._getBodyHitData(i, e, r) && i.applyForce(e.force, e.contactPoint, e.instanceIndex); }); }); } /*** Helpers ***/ _prepareCylinder() { this._cylinder || (this._cylinder = Hf("updraftEventCylinder", { height: this._options.height, diameter: this._options.radius * 2 }, this._scene), this._cylinder.isVisible = !1); } _intersectsWithCylinder(e) { return this._cylinder ? (this._cylinder.position = this._cylinderPosition, this._cylinder.intersectsMesh(e, !0)) : !1; } } WU._HitData = { force: new D(), contactPoint: new D(), distanceFromOrigin: 0 }; class JO { /** * Initializes the physics vortex event * @param _scene The BabylonJS scene * @param _origin The origin position of the vortex * @param _options The options for the vortex event */ constructor(e, t, i) { this._scene = e, this._origin = t, this._options = i, this._originTop = D.Zero(), this._cylinderPosition = D.Zero(), this._dataFetched = !1, this._physicsEngine = this._scene.getPhysicsEngine(), this._options = Object.assign(Object.assign({}, new oj()), this._options), this._origin.addToRef(new D(0, this._options.height / 2, 0), this._cylinderPosition), this._origin.addToRef(new D(0, this._options.height, 0), this._originTop), this._tickCallback = () => this._tick(), this._physicsEngine.getPluginVersion() === 1 && this._prepareCylinder(); } /** * Returns the data related to the vortex event (cylinder). * @returns The physics vortex event data */ getData() { return this._dataFetched = !0, { cylinder: this._cylinder }; } /** * Enables the vortex. */ enable() { this._tickCallback.call(this), this._scene.registerBeforeRender(this._tickCallback); } /** * Disables the cortex. */ disable() { this._scene.unregisterBeforeRender(this._tickCallback); } /** * Disposes the sphere. * @param force */ dispose(e = !0) { this._cylinder && (e ? this._cylinder.dispose() : setTimeout(() => { this._dataFetched || this._cylinder.dispose(); }, 0)); } _getHitData(e, t, i) { const r = JO.originOnPlane; r.set(this._origin.x, t.y, this._origin.z); const s = de.Vector3[0]; t.subtractToRef(r, s); const n = de.Vector3[1]; if (!sP.GetContactPointToRef(e, r, s, n, i.instanceIndex)) return !1; const o = D.Distance(n, r) / this._options.radius, u = de.Vector3[2]; n.normalizeToRef(u), o > this._options.centripetalForceThreshold && u.negateInPlace(); let h, d, f; if (o > this._options.centripetalForceThreshold) h = u.x * this._options.centripetalForceMultiplier, d = u.y * this._options.updraftForceMultiplier, f = u.z * this._options.centripetalForceMultiplier; else { const m = D.Cross(r, t).normalize(); h = (m.x + u.x) * this._options.centrifugalForceMultiplier, d = this._originTop.y * this._options.updraftForceMultiplier, f = (m.z + u.z) * this._options.centrifugalForceMultiplier; } const p = de.Vector3[3]; return p.set(h, d, f), p.scaleInPlace(this._options.strength), i.force.copyFrom(p), i.contactPoint.copyFrom(t), i.distanceFromOrigin = o, !0; } _getBodyHitData(e, t, i) { if (sP.HasAppliedForces(e, i)) return !1; const r = e.transformNode, s = e.getObjectCenterWorld(i); return sP.IsInsideCylinder(s, this._origin, this._options.radius, this._options.height) ? (t.instanceIndex = i, this._getHitData(r, s, t)) : !1; } _getImpostorHitData(e, t) { if (e.mass === 0 || e.object.getClassName() !== "Mesh" && e.object.getClassName() !== "InstancedMesh") return !1; const i = e.object; if (!this._intersectsWithCylinder(i)) return !1; const r = e.getObjectCenter(); return this._getHitData(i, r, t), !0; } _tick() { const e = JO.hitData; this._physicsEngine.getPluginVersion() === 1 ? this._physicsEngine.getImpostors().forEach((t) => { this._getImpostorHitData(t, e) && t.applyForce(e.force, e.contactPoint); }) : this._physicsEngine.getBodies().forEach((t) => { t.iterateOverAllInstances((i, r) => { this._getBodyHitData(i, e, r) && i.applyForce(e.force, e.contactPoint, e.instanceIndex); }); }); } /*** Helpers ***/ _prepareCylinder() { this._cylinder || (this._cylinder = Hf("vortexEventCylinder", { height: this._options.height, diameter: this._options.radius * 2 }, this._scene), this._cylinder.isVisible = !1); } _intersectsWithCylinder(e) { return this._cylinder.position = this._cylinderPosition, this._cylinder.intersectsMesh(e, !0); } } JO.originOnPlane = D.Zero(); JO.hitData = { force: new D(), contactPoint: new D(), distanceFromOrigin: 0 }; class mO { constructor() { this.radius = 5, this.strength = 10, this.falloff = vL.Constant, this.sphere = { segments: 32, diameter: 1 }; } } class aj { constructor() { this.radius = 5, this.strength = 10, this.height = 10, this.updraftMode = ew.Center; } } class oj { constructor() { this.radius = 5, this.strength = 10, this.height = 10, this.centripetalForceThreshold = 0.7, this.centripetalForceMultiplier = 5, this.centrifugalForceMultiplier = 0.5, this.updraftForceMultiplier = 0.02; } } var vL; (function(c) { c[c.Constant = 0] = "Constant", c[c.Linear = 1] = "Linear"; })(vL || (vL = {})); var ew; (function(c) { c[c.Center = 0] = "Center", c[c.Perpendicular = 1] = "Perpendicular"; })(ew || (ew = {})); const Ege = "blackAndWhitePixelShader", Tge = `varying vec2 vUV;uniform sampler2D textureSampler;uniform float degree; #define CUSTOM_FRAGMENT_DEFINITIONS void main(void) {vec3 color=texture2D(textureSampler,vUV).rgb;float luminance=dot(color,vec3(0.3,0.59,0.11)); vec3 blackAndWhite=vec3(luminance,luminance,luminance);gl_FragColor=vec4(color-((color-blackAndWhite)*degree),1.0);}`; je.ShadersStore[Ege] = Tge; class yN extends Bi { /** * Gets a string identifying the name of the class * @returns "BlackAndWhitePostProcess" string */ getClassName() { return "BlackAndWhitePostProcess"; } /** * Creates a black and white post process * @see https://doc.babylonjs.com/features/featuresDeepDive/postProcesses/usePostProcesses#black-and-white * @param name The name of the effect. * @param options The required width/height ratio to downsize to before computing the render pass. * @param camera The camera to apply the render pass to. * @param samplingMode The sampling mode to be used when computing the pass. (default: 0) * @param engine The engine which the post process will be applied. (default: current engine) * @param reusable If the post process can be reused on the same frame. (default: false) */ constructor(e, t, i, r, s, n) { super(e, "blackAndWhite", ["degree"], null, t, i, r, s, n), this.degree = 1, this.onApplyObservable.add((a) => { a.setFloat("degree", this.degree); }); } /** * @internal */ static _Parse(e, t, i, r) { return St.Parse(() => new yN(e.name, e.options, t, e.renderTargetSamplingMode, i.getEngine(), e.reusable), e, i, r); } } F([ W() ], yN.prototype, "degree", void 0); Be("BABYLON.BlackAndWhitePostProcess", yN); class gn { /** * Instantiates a post process render effect. * A post process can be used to apply a shader to a texture after it is rendered. * @param engine The engine the effect is tied to * @param name The name of the effect * @param getPostProcesses A function that returns a set of post processes which the effect will run in order to be run. * @param singleInstance False if this post process can be run on multiple cameras. (default: true) */ constructor(e, t, i, r) { this._name = t, this._singleInstance = r || !0, this._getPostProcesses = i, this._cameras = {}, this._indicesForCamera = {}, this._postProcesses = {}; } /** * Checks if all the post processes in the effect are supported. */ get isSupported() { for (const e in this._postProcesses) if (Object.prototype.hasOwnProperty.call(this._postProcesses, e)) { const t = this._postProcesses[e]; for (let i = 0; i < t.length; i++) if (!t[i].isSupported) return !1; } return !0; } /** * Updates the current state of the effect * @internal */ _update() { } /** * Attaches the effect on cameras * @param cameras The camera to attach to. * @internal */ _attachCameras(e) { let t; const i = Ve.MakeArray(e || this._cameras); if (i) for (let r = 0; r < i.length; r++) { const s = i[r]; if (!s) continue; const n = s.name; if (this._singleInstance ? t = 0 : t = n, !this._postProcesses[t]) { const a = this._getPostProcesses(); a && (this._postProcesses[t] = Array.isArray(a) ? a : [a]); } this._indicesForCamera[n] || (this._indicesForCamera[n] = []), this._postProcesses[t].forEach((a) => { const l = s.attachPostProcess(a); this._indicesForCamera[n].push(l); }), this._cameras[n] || (this._cameras[n] = s); } } /** * Detaches the effect on cameras * @param cameras The camera to detach from. * @internal */ _detachCameras(e) { const t = Ve.MakeArray(e || this._cameras); if (t) for (let i = 0; i < t.length; i++) { const r = t[i], s = r.name, n = this._postProcesses[this._singleInstance ? 0 : s]; n && n.forEach((a) => { r.detachPostProcess(a); }), this._cameras[s] && (this._cameras[s] = null), delete this._indicesForCamera[s]; } } /** * Enables the effect on given cameras * @param cameras The camera to enable. * @internal */ _enable(e) { const t = Ve.MakeArray(e || this._cameras); if (t) for (let i = 0; i < t.length; i++) { const r = t[i], s = r.name, n = this._singleInstance ? 0 : s; for (let a = 0; a < this._indicesForCamera[s].length; a++) { const l = this._indicesForCamera[s][a], o = r._postProcesses[l]; o == null && t[i].attachPostProcess(this._postProcesses[n][a], l); } } } /** * Disables the effect on the given cameras * @param cameras The camera to disable. * @internal */ _disable(e) { const t = Ve.MakeArray(e || this._cameras); if (t) for (let i = 0; i < t.length; i++) { const r = t[i], s = r.name; this._postProcesses[this._singleInstance ? 0 : s].forEach((n) => { r.detachPostProcess(n); }); } } /** * Gets a list of the post processes contained in the effect. * @param camera The camera to get the post processes on. * @returns The list of the post processes in the effect. */ getPostProcesses(e) { return this._singleInstance ? this._postProcesses[0] : e ? this._postProcesses[e.name] : null; } } const Sge = "extractHighlightsPixelShader", Mge = `#include varying vec2 vUV;uniform sampler2D textureSampler;uniform float threshold;uniform float exposure; #define CUSTOM_FRAGMENT_DEFINITIONS void main(void) {gl_FragColor=texture2D(textureSampler,vUV);float luma=dot(LuminanceEncodeApprox,gl_FragColor.rgb*exposure);gl_FragColor.rgb=step(threshold,luma)*gl_FragColor.rgb;}`; je.ShadersStore[Sge] = Mge; class jU extends Bi { /** * Gets a string identifying the name of the class * @returns "ExtractHighlightsPostProcess" string */ getClassName() { return "ExtractHighlightsPostProcess"; } constructor(e, t, i, r, s, n, a = 0, l = !1) { super(e, "extractHighlights", ["threshold", "exposure"], null, t, i, r, s, n, null, a, void 0, null, l), this.threshold = 0.9, this._exposure = 1, this._inputPostProcess = null, this.onApplyObservable.add((o) => { this.externalTextureSamplerBinding = !!this._inputPostProcess, this._inputPostProcess && o.setTextureFromPostProcess("textureSampler", this._inputPostProcess), o.setFloat("threshold", Math.pow(this.threshold, nO)), o.setFloat("exposure", this._exposure); }); } } F([ W() ], jU.prototype, "threshold", void 0); Be("BABYLON.ExtractHighlightsPostProcess", jU); const Rge = "bloomMergePixelShader", Pge = `uniform sampler2D textureSampler;uniform sampler2D bloomBlur;varying vec2 vUV;uniform float bloomWeight; #define CUSTOM_FRAGMENT_DEFINITIONS void main(void) {gl_FragColor=texture2D(textureSampler,vUV);vec3 blurred=texture2D(bloomBlur,vUV).rgb;gl_FragColor.rgb=gl_FragColor.rgb+(blurred.rgb*bloomWeight); } `; je.ShadersStore[Rge] = Pge; class XU extends Bi { /** * Gets a string identifying the name of the class * @returns "BloomMergePostProcess" string */ getClassName() { return "BloomMergePostProcess"; } /** * Creates a new instance of @see BloomMergePostProcess * @param name The name of the effect. * @param originalFromInput Post process which's input will be used for the merge. * @param blurred Blurred highlights post process which's output will be used. * @param weight Weight of the bloom to be added to the original input. * @param options The required width/height ratio to downsize to before computing the render pass. * @param camera The camera to apply the render pass to. * @param samplingMode The sampling mode to be used when computing the pass. (default: 0) * @param engine The engine which the post process will be applied. (default: current engine) * @param reusable If the post process can be reused on the same frame. (default: false) * @param textureType Type of textures used when performing the post process. (default: 0) * @param blockCompilation If compilation of the shader should not be done in the constructor. The updateEffect method can be used to compile the shader at a later time. (default: false) */ constructor(e, t, i, r, s, n, a, l, o, u = 0, h = !1) { super(e, "bloomMerge", ["bloomWeight"], ["bloomBlur"], s, n, a, l, o, null, u, void 0, null, !0), this.weight = 1, this.weight = r, this.externalTextureSamplerBinding = !0, this.onApplyObservable.add((d) => { d.setTextureFromPostProcess("textureSampler", t), d.setTextureFromPostProcessOutput("bloomBlur", i), d.setFloat("bloomWeight", this.weight); }), h || this.updateEffect(); } } F([ W() ], XU.prototype, "weight", void 0); Be("BABYLON.BloomMergePostProcess", XU); class GH extends gn { /** * The luminance threshold to find bright areas of the image to bloom. */ get threshold() { return this._downscale.threshold; } set threshold(e) { this._downscale.threshold = e; } /** * The strength of the bloom. */ get weight() { return this._merge.weight; } set weight(e) { this._merge.weight = e; } /** * Specifies the size of the bloom blur kernel, relative to the final output size */ get kernel() { return this._blurX.kernel / this._bloomScale; } set kernel(e) { this._blurX.kernel = e * this._bloomScale, this._blurY.kernel = e * this._bloomScale; } /** * Creates a new instance of @see BloomEffect * @param scene The scene the effect belongs to. * @param _bloomScale The ratio of the blur texture to the input texture that should be used to compute the bloom. * @param bloomWeight The strength of bloom. * @param bloomKernel The size of the kernel to be used when applying the blur. * @param pipelineTextureType The type of texture to be used when performing the post processing. * @param blockCompilation If compilation of the shader should not be done in the constructor. The updateEffect method can be used to compile the shader at a later time. (default: false) */ constructor(e, t, i, r, s = 0, n = !1) { super(e.getEngine(), "bloom", () => this._effects, !0), this._bloomScale = t, this._effects = [], this._downscale = new jU("highlights", 1, null, De.BILINEAR_SAMPLINGMODE, e.getEngine(), !1, s, n), this._blurX = new fu("horizontal blur", new at(1, 0), 10, t, null, De.BILINEAR_SAMPLINGMODE, e.getEngine(), !1, s, void 0, n), this._blurX.alwaysForcePOT = !0, this._blurX.autoClear = !1, this._blurY = new fu("vertical blur", new at(0, 1), 10, t, null, De.BILINEAR_SAMPLINGMODE, e.getEngine(), !1, s, void 0, n), this._blurY.alwaysForcePOT = !0, this._blurY.autoClear = !1, this.kernel = r, this._effects = [this._downscale, this._blurX, this._blurY], this._merge = new XU("bloomMerge", this._downscale, this._blurY, i, t, null, De.BILINEAR_SAMPLINGMODE, e.getEngine(), !1, s, n), this._merge.autoClear = !1, this._effects.push(this._merge); } /** * Disposes each of the internal effects for a given camera. * @param camera The camera to dispose the effect on. */ disposeEffects(e) { for (let t = 0; t < this._effects.length; t++) this._effects[t].dispose(e); } /** * @internal Internal */ _updateEffects() { for (let e = 0; e < this._effects.length; e++) this._effects[e].updateEffect(); } /** * Internal * @returns if all the contained post processes are ready. * @internal */ _isReady() { for (let e = 0; e < this._effects.length; e++) if (!this._effects[e].isReady()) return !1; return !0; } } const Ige = "chromaticAberrationPixelShader", Dge = `uniform sampler2D textureSampler; uniform float chromatic_aberration;uniform float radialIntensity;uniform vec2 direction;uniform vec2 centerPosition;uniform float screen_width;uniform float screen_height;varying vec2 vUV; #define CUSTOM_FRAGMENT_DEFINITIONS void main(void) {vec2 centered_screen_pos=vec2(vUV.x-centerPosition.x,vUV.y-centerPosition.y);vec2 directionOfEffect=direction;if(directionOfEffect.x==0. && directionOfEffect.y==0.){directionOfEffect=normalize(centered_screen_pos);} float radius2=centered_screen_pos.x*centered_screen_pos.x + centered_screen_pos.y*centered_screen_pos.y;float radius=sqrt(radius2);vec4 original=texture2D(textureSampler,vUV);vec3 ref_indices=vec3(-0.3,0.0,0.3);float ref_shiftX=chromatic_aberration*pow(radius,radialIntensity)*directionOfEffect.x/screen_width;float ref_shiftY=chromatic_aberration*pow(radius,radialIntensity)*directionOfEffect.y/screen_height;vec2 ref_coords_r=vec2(vUV.x+ref_indices.r*ref_shiftX,vUV.y+ref_indices.r*ref_shiftY*0.5);vec2 ref_coords_g=vec2(vUV.x+ref_indices.g*ref_shiftX,vUV.y+ref_indices.g*ref_shiftY*0.5);vec2 ref_coords_b=vec2(vUV.x+ref_indices.b*ref_shiftX,vUV.y+ref_indices.b*ref_shiftY*0.5);original.r=texture2D(textureSampler,ref_coords_r).r;original.g=texture2D(textureSampler,ref_coords_g).g;original.b=texture2D(textureSampler,ref_coords_b).b;original.a=clamp(texture2D(textureSampler,ref_coords_r).a+texture2D(textureSampler,ref_coords_g).a+texture2D(textureSampler,ref_coords_b).a,0.,1.);gl_FragColor=original;}`; je.ShadersStore[Ige] = Dge; class d6 extends Bi { /** * Gets a string identifying the name of the class * @returns "ChromaticAberrationPostProcess" string */ getClassName() { return "ChromaticAberrationPostProcess"; } /** * Creates a new instance ChromaticAberrationPostProcess * @param name The name of the effect. * @param screenWidth The width of the screen to apply the effect on. * @param screenHeight The height of the screen to apply the effect on. * @param options The required width/height ratio to downsize to before computing the render pass. * @param camera The camera to apply the render pass to. * @param samplingMode The sampling mode to be used when computing the pass. (default: 0) * @param engine The engine which the post process will be applied. (default: current engine) * @param reusable If the post process can be reused on the same frame. (default: false) * @param textureType Type of textures used when performing the post process. (default: 0) * @param blockCompilation If compilation of the shader should not be done in the constructor. The updateEffect method can be used to compile the shader at a later time. (default: false) */ constructor(e, t, i, r, s, n, a, l, o = 0, u = !1) { super(e, "chromaticAberration", ["chromatic_aberration", "screen_width", "screen_height", "direction", "radialIntensity", "centerPosition"], [], r, s, n, a, l, null, o, void 0, null, u), this.aberrationAmount = 30, this.radialIntensity = 0, this.direction = new at(0.707, 0.707), this.centerPosition = new at(0.5, 0.5), this.screenWidth = t, this.screenHeight = i, this.onApplyObservable.add((h) => { h.setFloat("chromatic_aberration", this.aberrationAmount), h.setFloat("screen_width", t), h.setFloat("screen_height", i), h.setFloat("radialIntensity", this.radialIntensity), h.setFloat2("direction", this.direction.x, this.direction.y), h.setFloat2("centerPosition", this.centerPosition.x, this.centerPosition.y); }); } /** * @internal */ static _Parse(e, t, i, r) { return St.Parse(() => new d6(e.name, e.screenWidth, e.screenHeight, e.options, t, e.renderTargetSamplingMode, i.getEngine(), e.reusable, e.textureType, !1), e, i, r); } } F([ W() ], d6.prototype, "aberrationAmount", void 0); F([ W() ], d6.prototype, "radialIntensity", void 0); F([ W() ], d6.prototype, "direction", void 0); F([ W() ], d6.prototype, "centerPosition", void 0); F([ W() ], d6.prototype, "screenWidth", void 0); F([ W() ], d6.prototype, "screenHeight", void 0); Be("BABYLON.ChromaticAberrationPostProcess", d6); const Oge = "circleOfConfusionPixelShader", wge = `uniform sampler2D depthSampler;varying vec2 vUV;uniform vec2 cameraMinMaxZ;uniform float focusDistance;uniform float cocPrecalculation; #define CUSTOM_FRAGMENT_DEFINITIONS void main(void) {float depth=texture2D(depthSampler,vUV).r; #define CUSTOM_COC_DEPTH float pixelDistance=(cameraMinMaxZ.x+cameraMinMaxZ.y*depth)*1000.0; #define CUSTOM_COC_PIXELDISTANCE float coc=abs(cocPrecalculation*((focusDistance-pixelDistance)/pixelDistance));coc=clamp(coc,0.0,1.0);gl_FragColor=vec4(coc,coc,coc,1.0);} `; je.ShadersStore[Oge] = wge; class QI extends Bi { /** * Gets a string identifying the name of the class * @returns "CircleOfConfusionPostProcess" string */ getClassName() { return "CircleOfConfusionPostProcess"; } /** * Creates a new instance CircleOfConfusionPostProcess * @param name The name of the effect. * @param depthTexture The depth texture of the scene to compute the circle of confusion. This must be set in order for this to function but may be set after initialization if needed. * @param options The required width/height ratio to downsize to before computing the render pass. * @param camera The camera to apply the render pass to. * @param samplingMode The sampling mode to be used when computing the pass. (default: 0) * @param engine The engine which the post process will be applied. (default: current engine) * @param reusable If the post process can be reused on the same frame. (default: false) * @param textureType Type of textures used when performing the post process. (default: 0) * @param blockCompilation If compilation of the shader should not be done in the constructor. The updateEffect method can be used to compile the shader at a later time. (default: false) */ constructor(e, t, i, r, s, n, a, l = 0, o = !1) { super(e, "circleOfConfusion", ["cameraMinMaxZ", "focusDistance", "cocPrecalculation"], ["depthSampler"], i, r, s, n, a, null, l, void 0, null, o), this.lensSize = 50, this.fStop = 1.4, this.focusDistance = 2e3, this.focalLength = 50, this._depthTexture = null, this._depthTexture = t, this.onApplyObservable.add((u) => { if (!this._depthTexture) { Ce.Warn("No depth texture set on CircleOfConfusionPostProcess"); return; } u.setTexture("depthSampler", this._depthTexture); const d = this.lensSize / this.fStop * this.focalLength / (this.focusDistance - this.focalLength); u.setFloat("focusDistance", this.focusDistance), u.setFloat("cocPrecalculation", d); const f = this._depthTexture.activeCamera; u.setFloat2("cameraMinMaxZ", f.minZ, f.maxZ - f.minZ); }); } /** * Depth texture to be used to compute the circle of confusion. This must be set here or in the constructor in order for the post process to function. */ set depthTexture(e) { this._depthTexture = e; } } F([ W() ], QI.prototype, "lensSize", void 0); F([ W() ], QI.prototype, "fStop", void 0); F([ W() ], QI.prototype, "focusDistance", void 0); F([ W() ], QI.prototype, "focalLength", void 0); Be("BABYLON.CircleOfConfusionPostProcess", QI); const Lge = "colorCorrectionPixelShader", Nge = `uniform sampler2D textureSampler; uniform sampler2D colorTable; varying vec2 vUV;const float SLICE_COUNT=16.0; vec4 sampleAs3DTexture(sampler2D textureSampler,vec3 uv,float width) {float sliceSize=1.0/width; float slicePixelSize=sliceSize/width; float sliceInnerSize=slicePixelSize*(width-1.0); float zSlice0=min(floor(uv.z*width),width-1.0);float zSlice1=min(zSlice0+1.0,width-1.0);float xOffset=slicePixelSize*0.5+uv.x*sliceInnerSize;float s0=xOffset+(zSlice0*sliceSize);float s1=xOffset+(zSlice1*sliceSize);vec4 slice0Color=texture2D(textureSampler,vec2(s0,uv.y));vec4 slice1Color=texture2D(textureSampler,vec2(s1,uv.y));float zOffset=mod(uv.z*width,1.0);vec4 result=mix(slice0Color,slice1Color,zOffset);return result;} #define CUSTOM_FRAGMENT_DEFINITIONS void main(void) {vec4 screen_color=texture2D(textureSampler,vUV);gl_FragColor=sampleAs3DTexture(colorTable,screen_color.rgb,SLICE_COUNT);}`; je.ShadersStore[Lge] = Nge; class CN extends Bi { /** * Gets a string identifying the name of the class * @returns "ColorCorrectionPostProcess" string */ getClassName() { return "ColorCorrectionPostProcess"; } constructor(e, t, i, r, s, n, a) { super(e, "colorCorrection", null, ["colorTable"], i, r, s, n, a); const l = (r == null ? void 0 : r.getScene()) || null; this._colorTableTexture = new De(t, l, !0, !1, De.TRILINEAR_SAMPLINGMODE), this._colorTableTexture.anisotropicFilteringLevel = 1, this._colorTableTexture.wrapU = De.CLAMP_ADDRESSMODE, this._colorTableTexture.wrapV = De.CLAMP_ADDRESSMODE, this.colorTableUrl = t, this.onApply = (o) => { o.setTexture("colorTable", this._colorTableTexture); }; } /** * @internal */ static _Parse(e, t, i, r) { return St.Parse(() => new CN(e.name, e.colorTableUrl, e.options, t, e.renderTargetSamplingMode, i.getEngine(), e.reusable), e, i, r); } } F([ W() ], CN.prototype, "colorTableUrl", void 0); Be("BABYLON.ColorCorrectionPostProcess", CN); const Fge = "convolutionPixelShader", Bge = `varying vec2 vUV;uniform sampler2D textureSampler;uniform vec2 screenSize;uniform float kernel[9]; #define CUSTOM_FRAGMENT_DEFINITIONS void main(void) {vec2 onePixel=vec2(1.0,1.0)/screenSize;vec4 colorSum = texture2D(textureSampler,vUV+onePixel*vec2(-1,-1))*kernel[0] + texture2D(textureSampler,vUV+onePixel*vec2(0,-1))*kernel[1] + texture2D(textureSampler,vUV+onePixel*vec2(1,-1))*kernel[2] + texture2D(textureSampler,vUV+onePixel*vec2(-1,0))*kernel[3] + texture2D(textureSampler,vUV+onePixel*vec2(0,0))*kernel[4] + texture2D(textureSampler,vUV+onePixel*vec2(1,0))*kernel[5] + texture2D(textureSampler,vUV+onePixel*vec2(-1,1))*kernel[6] + texture2D(textureSampler,vUV+onePixel*vec2(0,1))*kernel[7] + texture2D(textureSampler,vUV+onePixel*vec2(1,1))*kernel[8];float kernelWeight = kernel[0] + kernel[1] + kernel[2] + kernel[3] + kernel[4] + kernel[5] + kernel[6] + kernel[7] + kernel[8];if (kernelWeight<=0.0) {kernelWeight=1.0;} gl_FragColor=vec4((colorSum/kernelWeight).rgb,1);}`; je.ShadersStore[Fge] = Bge; class f6 extends Bi { /** * Gets a string identifying the name of the class * @returns "ConvolutionPostProcess" string */ getClassName() { return "ConvolutionPostProcess"; } /** * Creates a new instance ConvolutionPostProcess * @param name The name of the effect. * @param kernel Array of 9 values corresponding to the 3x3 kernel to be applied * @param options The required width/height ratio to downsize to before computing the render pass. * @param camera The camera to apply the render pass to. * @param samplingMode The sampling mode to be used when computing the pass. (default: 0) * @param engine The engine which the post process will be applied. (default: current engine) * @param reusable If the post process can be reused on the same frame. (default: false) * @param textureType Type of textures used when performing the post process. (default: 0) */ constructor(e, t, i, r, s, n, a, l = 0) { super(e, "convolution", ["kernel", "screenSize"], null, i, r, s, n, a, null, l), this.kernel = t, this.onApply = (o) => { o.setFloat2("screenSize", this.width, this.height), o.setArray("kernel", this.kernel); }; } /** * @internal */ static _Parse(e, t, i, r) { return St.Parse(() => new f6(e.name, e.kernel, e.options, t, e.renderTargetSamplingMode, i.getEngine(), e.reusable, e.textureType), e, i, r); } } f6.EdgeDetect0Kernel = [1, 0, -1, 0, 0, 0, -1, 0, 1]; f6.EdgeDetect1Kernel = [0, 1, 0, 1, -4, 1, 0, 1, 0]; f6.EdgeDetect2Kernel = [-1, -1, -1, -1, 8, -1, -1, -1, -1]; f6.SharpenKernel = [0, -1, 0, -1, 5, -1, 0, -1, 0]; f6.EmbossKernel = [-2, -1, 0, -1, 1, 1, 0, 1, 2]; f6.GaussianKernel = [0, 1, 0, 1, 1, 1, 0, 1, 0]; F([ W() ], f6.prototype, "kernel", void 0); Be("BABYLON.ConvolutionPostProcess", f6); class AL extends fu { /** * Gets a string identifying the name of the class * @returns "DepthOfFieldBlurPostProcess" string */ getClassName() { return "DepthOfFieldBlurPostProcess"; } /** * Creates a new instance DepthOfFieldBlurPostProcess * @param name The name of the effect. * @param scene The scene the effect belongs to. * @param direction The direction the blur should be applied. * @param kernel The size of the kernel used to blur. * @param options The required width/height ratio to downsize to before computing the render pass. * @param camera The camera to apply the render pass to. * @param circleOfConfusion The circle of confusion + depth map to be used to avoid blurring across edges * @param imageToBlur The image to apply the blur to (default: Current rendered frame) * @param samplingMode The sampling mode to be used when computing the pass. (default: 0) * @param engine The engine which the post process will be applied. (default: current engine) * @param reusable If the post process can be reused on the same frame. (default: false) * @param textureType Type of textures used when performing the post process. (default: 0) * @param blockCompilation If compilation of the shader should not be done in the constructor. The updateEffect method can be used to compile the shader at a later time. (default: false) * @param textureFormat Format of textures used when performing the post process. (default: TEXTUREFORMAT_RGBA) */ constructor(e, t, i, r, s, n, a, l = null, o = De.BILINEAR_SAMPLINGMODE, u, h, d = 0, f = !1, p = 5) { super( e, i, r, s, n, // eslint-disable-next-line @typescript-eslint/no-unused-vars o = 2, u, h, d, `#define DOF 1 `, f, p ), this.direction = i, this.externalTextureSamplerBinding = !!l, this.onApplyObservable.add((m) => { l != null && m.setTextureFromPostProcess("textureSampler", l), m.setTextureFromPostProcessOutput("circleOfConfusionSampler", a); }); } } F([ W() ], AL.prototype, "direction", void 0); Be("BABYLON.DepthOfFieldBlurPostProcess", AL); const Uge = "depthOfFieldMergePixelShader", Vge = `#if defined(WEBGL2) || defined(WEBGPU) || defined(NATIVE) #define TEXTUREFUNC(s,c,lod) texture2DLodEXT(s,c,lod) #else #define TEXTUREFUNC(s,c,bias) texture2D(s,c,bias) #endif uniform sampler2D textureSampler;varying vec2 vUV;uniform sampler2D circleOfConfusionSampler;uniform sampler2D blurStep0; #if BLUR_LEVEL>0 uniform sampler2D blurStep1; #endif #if BLUR_LEVEL>1 uniform sampler2D blurStep2; #endif #define CUSTOM_FRAGMENT_DEFINITIONS void main(void) {float coc=TEXTUREFUNC(circleOfConfusionSampler,vUV,0.0).r; #if BLUR_LEVEL==0 vec4 original=TEXTUREFUNC(textureSampler,vUV,0.0);vec4 blurred0=TEXTUREFUNC(blurStep0,vUV,0.0);gl_FragColor=mix(original,blurred0,coc); #endif #if BLUR_LEVEL==1 if(coc<0.5){vec4 original=TEXTUREFUNC(textureSampler,vUV,0.0);vec4 blurred1=TEXTUREFUNC(blurStep1,vUV,0.0);gl_FragColor=mix(original,blurred1,coc/0.5);}else{vec4 blurred0=TEXTUREFUNC(blurStep0,vUV,0.0);vec4 blurred1=TEXTUREFUNC(blurStep1,vUV,0.0);gl_FragColor=mix(blurred1,blurred0,(coc-0.5)/0.5);} #endif #if BLUR_LEVEL==2 if(coc<0.33){vec4 original=TEXTUREFUNC(textureSampler,vUV,0.0);vec4 blurred2=TEXTUREFUNC(blurStep2,vUV,0.0);gl_FragColor=mix(original,blurred2,coc/0.33);}else if(coc<0.66){vec4 blurred1=TEXTUREFUNC(blurStep1,vUV,0.0);vec4 blurred2=TEXTUREFUNC(blurStep2,vUV,0.0);gl_FragColor=mix(blurred2,blurred1,(coc-0.33)/0.33);}else{vec4 blurred0=TEXTUREFUNC(blurStep0,vUV,0.0);vec4 blurred1=TEXTUREFUNC(blurStep1,vUV,0.0);gl_FragColor=mix(blurred1,blurred0,(coc-0.66)/0.34);} #endif } `; je.ShadersStore[Uge] = Vge; class Tne extends Bi { /** * Gets a string identifying the name of the class * @returns "DepthOfFieldMergePostProcess" string */ getClassName() { return "DepthOfFieldMergePostProcess"; } /** * Creates a new instance of DepthOfFieldMergePostProcess * @param name The name of the effect. * @param originalFromInput Post process which's input will be used for the merge. * @param circleOfConfusion Circle of confusion post process which's output will be used to blur each pixel. * @param _blurSteps Blur post processes from low to high which will be mixed with the original image. * @param options The required width/height ratio to downsize to before computing the render pass. * @param camera The camera to apply the render pass to. * @param samplingMode The sampling mode to be used when computing the pass. (default: 0) * @param engine The engine which the post process will be applied. (default: current engine) * @param reusable If the post process can be reused on the same frame. (default: false) * @param textureType Type of textures used when performing the post process. (default: 0) * @param blockCompilation If compilation of the shader should not be done in the constructor. The updateEffect method can be used to compile the shader at a later time. (default: false) */ constructor(e, t, i, r, s, n, a, l, o, u = 0, h = !1) { super(e, "depthOfFieldMerge", [], ["circleOfConfusionSampler", "blurStep0", "blurStep1", "blurStep2"], s, n, a, l, o, null, u, void 0, null, !0), this._blurSteps = r, this.externalTextureSamplerBinding = !0, this.onApplyObservable.add((d) => { d.setTextureFromPostProcess("textureSampler", t), d.setTextureFromPostProcessOutput("circleOfConfusionSampler", i), r.forEach((f, p) => { d.setTextureFromPostProcessOutput("blurStep" + (r.length - p - 1), f); }); }), h || this.updateEffect(); } /** * Updates the effect with the current post process compile time values and recompiles the shader. * @param defines Define statements that should be added at the beginning of the shader. (default: null) * @param uniforms Set of uniform variables that will be passed to the shader. (default: null) * @param samplers Set of Texture2D variables that will be passed to the shader. (default: null) * @param indexParameters The index parameters to be used for babylons include syntax "#include[0..varyingCount]". (default: undefined) See usage in babylon.blurPostProcess.ts and kernelBlur.vertex.fx * @param onCompiled Called when the shader has been compiled. * @param onError Called if there is an error when compiling a shader. */ updateEffect(e = null, t = null, i = null, r, s, n) { e || (e = "", e += "#define BLUR_LEVEL " + (this._blurSteps.length - 1) + ` `), super.updateEffect(e, t, i, r, s, n); } } var nP; (function(c) { c[c.Low = 0] = "Low", c[c.Medium = 1] = "Medium", c[c.High = 2] = "High"; })(nP || (nP = {})); class KH extends gn { /** * The focal the length of the camera used in the effect in scene units/1000 (eg. millimeter) */ set focalLength(e) { this._circleOfConfusion.focalLength = e; } get focalLength() { return this._circleOfConfusion.focalLength; } /** * F-Stop of the effect's camera. The diameter of the resulting aperture can be computed by lensSize/fStop. (default: 1.4) */ set fStop(e) { this._circleOfConfusion.fStop = e; } get fStop() { return this._circleOfConfusion.fStop; } /** * Distance away from the camera to focus on in scene units/1000 (eg. millimeter). (default: 2000) */ set focusDistance(e) { this._circleOfConfusion.focusDistance = e; } get focusDistance() { return this._circleOfConfusion.focusDistance; } /** * Max lens size in scene units/1000 (eg. millimeter). Standard cameras are 50mm. (default: 50) The diameter of the resulting aperture can be computed by lensSize/fStop. */ set lensSize(e) { this._circleOfConfusion.lensSize = e; } get lensSize() { return this._circleOfConfusion.lensSize; } /** * Creates a new instance DepthOfFieldEffect * @param scene The scene the effect belongs to. * @param depthTexture The depth texture of the scene to compute the circle of confusion.This must be set in order for this to function but may be set after initialization if needed. * @param blurLevel * @param pipelineTextureType The type of texture to be used when performing the post processing. * @param blockCompilation If compilation of the shader should not be done in the constructor. The updateEffect method can be used to compile the shader at a later time. (default: false) */ constructor(e, t, i = nP.Low, r = 0, s = !1) { super(e.getEngine(), "depth of field", () => this._effects, !0), this._effects = []; const n = e.getEngine(), a = n.isWebGPU || n.webGLVersion > 1 ? 6 : 5; this._circleOfConfusion = new QI("circleOfConfusion", t, 1, null, De.BILINEAR_SAMPLINGMODE, n, !1, r, s), this._depthOfFieldBlurY = [], this._depthOfFieldBlurX = []; let l = 1, o = 15; switch (i) { case nP.High: { l = 3, o = 51; break; } case nP.Medium: { l = 2, o = 31; break; } default: { o = 15, l = 1; break; } } const u = o / Math.pow(2, l - 1); let h = 1; for (let d = 0; d < l; d++) { const f = new AL("vertical blur", e, new at(0, 1), u, h, null, this._circleOfConfusion, d == 0 ? this._circleOfConfusion : null, De.BILINEAR_SAMPLINGMODE, n, !1, r, s, d == 0 ? a : 5); f.autoClear = !1, h = 0.75 / Math.pow(2, d); const p = new AL("horizontal blur", e, new at(1, 0), u, h, null, this._circleOfConfusion, null, De.BILINEAR_SAMPLINGMODE, n, !1, r, s); p.autoClear = !1, this._depthOfFieldBlurY.push(f), this._depthOfFieldBlurX.push(p); } this._effects = [this._circleOfConfusion]; for (let d = 0; d < this._depthOfFieldBlurX.length; d++) this._effects.push(this._depthOfFieldBlurY[d]), this._effects.push(this._depthOfFieldBlurX[d]); this._dofMerge = new Tne("dofMerge", this._circleOfConfusion, this._circleOfConfusion, this._depthOfFieldBlurX, h, null, De.BILINEAR_SAMPLINGMODE, n, !1, r, s), this._dofMerge.autoClear = !1, this._effects.push(this._dofMerge); } /** * Get the current class name of the current effect * @returns "DepthOfFieldEffect" */ getClassName() { return "DepthOfFieldEffect"; } /** * Depth texture to be used to compute the circle of confusion. This must be set here or in the constructor in order for the post process to function. */ set depthTexture(e) { this._circleOfConfusion.depthTexture = e; } /** * Disposes each of the internal effects for a given camera. * @param camera The camera to dispose the effect on. */ disposeEffects(e) { for (let t = 0; t < this._effects.length; t++) this._effects[t].dispose(e); } /** * @internal Internal */ _updateEffects() { for (let e = 0; e < this._effects.length; e++) this._effects[e].updateEffect(); } /** * Internal * @returns if all the contained post processes are ready. * @internal */ _isReady() { for (let e = 0; e < this._effects.length; e++) if (!this._effects[e].isReady()) return !1; return !0; } } const kge = "displayPassPixelShader", zge = `varying vec2 vUV;uniform sampler2D textureSampler;uniform sampler2D passSampler; #define CUSTOM_FRAGMENT_DEFINITIONS void main(void) {gl_FragColor=texture2D(passSampler,vUV);}`; je.ShadersStore[kge] = zge; class YU extends Bi { /** * Gets a string identifying the name of the class * @returns "DisplayPassPostProcess" string */ getClassName() { return "DisplayPassPostProcess"; } /** * Creates the DisplayPassPostProcess * @param name The name of the effect. * @param options The required width/height ratio to downsize to before computing the render pass. * @param camera The camera to apply the render pass to. * @param samplingMode The sampling mode to be used when computing the pass. (default: 0) * @param engine The engine which the post process will be applied. (default: current engine) * @param reusable If the post process can be reused on the same frame. (default: false) */ constructor(e, t, i, r, s, n) { super(e, "displayPass", ["passSampler"], ["passSampler"], t, i, r, s, n); } /** * @internal */ static _Parse(e, t, i, r) { return St.Parse(() => new YU(e.name, e.options, t, e.renderTargetSamplingMode, i.getEngine(), e.reusable), e, i, r); } } Be("BABYLON.DisplayPassPostProcess", YU); const Hge = "filterPixelShader", Gge = `varying vec2 vUV;uniform sampler2D textureSampler;uniform mat4 kernelMatrix; #define CUSTOM_FRAGMENT_DEFINITIONS void main(void) {vec3 baseColor=texture2D(textureSampler,vUV).rgb;vec3 updatedColor=(kernelMatrix*vec4(baseColor,1.0)).rgb;gl_FragColor=vec4(updatedColor,1.0);}`; je.ShadersStore[Hge] = Gge; class xN extends Bi { /** * Gets a string identifying the name of the class * @returns "FilterPostProcess" string */ getClassName() { return "FilterPostProcess"; } /** * * @param name The name of the effect. * @param kernelMatrix The matrix to be applied to the image * @param options The required width/height ratio to downsize to before computing the render pass. * @param camera The camera to apply the render pass to. * @param samplingMode The sampling mode to be used when computing the pass. (default: 0) * @param engine The engine which the post process will be applied. (default: current engine) * @param reusable If the post process can be reused on the same frame. (default: false) */ constructor(e, t, i, r, s, n, a) { super(e, "filter", ["kernelMatrix"], null, i, r, s, n, a), this.kernelMatrix = t, this.onApply = (l) => { l.setMatrix("kernelMatrix", this.kernelMatrix); }; } /** * @internal */ static _Parse(e, t, i, r) { return St.Parse(() => new xN(e.name, e.kernelMatrix, e.options, t, e.renderTargetSamplingMode, i.getEngine(), e.reusable), e, i, r); } } F([ VB() ], xN.prototype, "kernelMatrix", void 0); Be("BABYLON.FilterPostProcess", xN); const Kge = "fxaaPixelShader", Wge = `#if defined(WEBGL2) || defined(WEBGPU) || defined(NATIVE) #define TEXTUREFUNC(s,c,l) texture2DLodEXT(s,c,l) #else #define TEXTUREFUNC(s,c,b) texture2D(s,c,b) #endif uniform sampler2D textureSampler;uniform vec2 texelSize;varying vec2 vUV;varying vec2 sampleCoordS;varying vec2 sampleCoordE;varying vec2 sampleCoordN;varying vec2 sampleCoordW;varying vec2 sampleCoordNW;varying vec2 sampleCoordSE;varying vec2 sampleCoordNE;varying vec2 sampleCoordSW;const float fxaaQualitySubpix=1.0;const float fxaaQualityEdgeThreshold=0.166;const float fxaaQualityEdgeThresholdMin=0.0833;const vec3 kLumaCoefficients=vec3(0.2126,0.7152,0.0722); #define FxaaLuma(rgba) dot(rgba.rgb,kLumaCoefficients) void main(){vec2 posM;posM.x=vUV.x;posM.y=vUV.y;vec4 rgbyM=TEXTUREFUNC(textureSampler,vUV,0.0);float lumaM=FxaaLuma(rgbyM);float lumaS=FxaaLuma(TEXTUREFUNC(textureSampler,sampleCoordS,0.0));float lumaE=FxaaLuma(TEXTUREFUNC(textureSampler,sampleCoordE,0.0));float lumaN=FxaaLuma(TEXTUREFUNC(textureSampler,sampleCoordN,0.0));float lumaW=FxaaLuma(TEXTUREFUNC(textureSampler,sampleCoordW,0.0));float maxSM=max(lumaS,lumaM);float minSM=min(lumaS,lumaM);float maxESM=max(lumaE,maxSM);float minESM=min(lumaE,minSM);float maxWN=max(lumaN,lumaW);float minWN=min(lumaN,lumaW);float rangeMax=max(maxWN,maxESM);float rangeMin=min(minWN,minESM);float rangeMaxScaled=rangeMax*fxaaQualityEdgeThreshold;float range=rangeMax-rangeMin;float rangeMaxClamped=max(fxaaQualityEdgeThresholdMin,rangeMaxScaled); #ifndef MALI if(range=edgeVert;float subpixA=subpixNSWE*2.0+subpixNWSWNESE;if (!horzSpan) {lumaN=lumaW;} if (!horzSpan) {lumaS=lumaE;} if (horzSpan) {lengthSign=texelSize.y;} float subpixB=(subpixA*(1.0/12.0))-lumaM;float gradientN=lumaN-lumaM;float gradientS=lumaS-lumaM;float lumaNN=lumaN+lumaM;float lumaSS=lumaS+lumaM;bool pairN=abs(gradientN)>=abs(gradientS);float gradient=max(abs(gradientN),abs(gradientS));if (pairN) {lengthSign=-lengthSign;} float subpixC=clamp(abs(subpixB)*subpixRcpRange,0.0,1.0);vec2 posB;posB.x=posM.x;posB.y=posM.y;vec2 offNP;offNP.x=(!horzSpan) ? 0.0 : texelSize.x;offNP.y=(horzSpan) ? 0.0 : texelSize.y;if (!horzSpan) {posB.x+=lengthSign*0.5;} if (horzSpan) {posB.y+=lengthSign*0.5;} vec2 posN;posN.x=posB.x-offNP.x*1.5;posN.y=posB.y-offNP.y*1.5;vec2 posP;posP.x=posB.x+offNP.x*1.5;posP.y=posB.y+offNP.y*1.5;float subpixD=((-2.0)*subpixC)+3.0;float lumaEndN=FxaaLuma(TEXTUREFUNC(textureSampler,posN,0.0));float subpixE=subpixC*subpixC;float lumaEndP=FxaaLuma(TEXTUREFUNC(textureSampler,posP,0.0));if (!pairN) {lumaNN=lumaSS;} float gradientScaled=gradient*1.0/4.0;float lumaMM=lumaM-lumaNN*0.5;float subpixF=subpixD*subpixE;bool lumaMLTZero=lumaMM<0.0;lumaEndN-=lumaNN*0.5;lumaEndP-=lumaNN*0.5;bool doneN=abs(lumaEndN)>=gradientScaled;bool doneP=abs(lumaEndP)>=gradientScaled;if (!doneN) {posN.x-=offNP.x*3.0;} if (!doneN) {posN.y-=offNP.y*3.0;} bool doneNP=(!doneN) || (!doneP);if (!doneP) {posP.x+=offNP.x*3.0;} if (!doneP) {posP.y+=offNP.y*3.0;} if (doneNP) {if (!doneN) lumaEndN=FxaaLuma(TEXTUREFUNC(textureSampler,posN.xy,0.0));if (!doneP) lumaEndP=FxaaLuma(TEXTUREFUNC(textureSampler,posP.xy,0.0));if (!doneN) lumaEndN=lumaEndN-lumaNN*0.5;if (!doneP) lumaEndP=lumaEndP-lumaNN*0.5;doneN=abs(lumaEndN)>=gradientScaled;doneP=abs(lumaEndP)>=gradientScaled;if (!doneN) posN.x-=offNP.x*12.0;if (!doneN) posN.y-=offNP.y*12.0;doneNP=(!doneN) || (!doneP);if (!doneP) posP.x+=offNP.x*12.0;if (!doneP) posP.y+=offNP.y*12.0;} float dstN=posM.x-posN.x;float dstP=posP.x-posM.x;if (!horzSpan) {dstN=posM.y-posN.y;} if (!horzSpan) {dstP=posP.y-posM.y;} bool goodSpanN=(lumaEndN<0.0) != lumaMLTZero;float spanLength=(dstP+dstN);bool goodSpanP=(lumaEndP<0.0) != lumaMLTZero;float spanLengthRcp=1.0/spanLength;bool directionN=dstN { const u = this.texelSize; o.setFloat2("texelSize", u.x, u.y); }); } _getDefines() { const e = this.getEngine(); if (!e) return null; const t = e.getGlInfo(); return t && t.renderer && t.renderer.toLowerCase().indexOf("mali") > -1 ? `#define MALI 1 ` : null; } /** * @internal */ static _Parse(e, t, i, r) { return St.Parse(() => new $I(e.name, e.options, t, e.renderTargetSamplingMode, i.getEngine(), e.reusable), e, i, r); } } Be("BABYLON.FxaaPostProcess", $I); const Yge = "grainPixelShader", Qge = `#include uniform sampler2D textureSampler; uniform float intensity;uniform float animatedSeed;varying vec2 vUV; #define CUSTOM_FRAGMENT_DEFINITIONS void main(void) {gl_FragColor=texture2D(textureSampler,vUV);vec2 seed=vUV*(animatedSeed);float grain=dither(seed,intensity);float lum=getLuminance(gl_FragColor.rgb);float grainAmount=(cos(-PI+(lum*PI*2.))+1.)/2.;gl_FragColor.rgb+=grain*grainAmount;gl_FragColor.rgb=max(gl_FragColor.rgb,0.0);}`; je.ShadersStore[Yge] = Qge; class ZI extends Bi { /** * Gets a string identifying the name of the class * @returns "GrainPostProcess" string */ getClassName() { return "GrainPostProcess"; } /** * Creates a new instance of @see GrainPostProcess * @param name The name of the effect. * @param options The required width/height ratio to downsize to before computing the render pass. * @param camera The camera to apply the render pass to. * @param samplingMode The sampling mode to be used when computing the pass. (default: 0) * @param engine The engine which the post process will be applied. (default: current engine) * @param reusable If the post process can be reused on the same frame. (default: false) * @param textureType Type of textures used when performing the post process. (default: 0) * @param blockCompilation If compilation of the shader should not be done in the constructor. The updateEffect method can be used to compile the shader at a later time. (default: false) */ constructor(e, t, i, r, s, n, a = 0, l = !1) { super(e, "grain", ["intensity", "animatedSeed"], [], t, i, r, s, n, null, a, void 0, null, l), this.intensity = 30, this.animated = !1, this.onApplyObservable.add((o) => { o.setFloat("intensity", this.intensity), o.setFloat("animatedSeed", this.animated ? Math.random() + 1 : 1); }); } /** * @internal */ static _Parse(e, t, i, r) { return St.Parse(() => new ZI(e.name, e.options, t, e.renderTargetSamplingMode, i.getEngine(), e.reusable), e, i, r); } } F([ W() ], ZI.prototype, "intensity", void 0); F([ W() ], ZI.prototype, "animated", void 0); Be("BABYLON.GrainPostProcess", ZI); const $ge = "highlightsPixelShader", Zge = `varying vec2 vUV;uniform sampler2D textureSampler;const vec3 RGBLuminanceCoefficients=vec3(0.2126,0.7152,0.0722); #define CUSTOM_FRAGMENT_DEFINITIONS void main(void) {vec4 tex=texture2D(textureSampler,vUV);vec3 c=tex.rgb;float luma=dot(c.rgb,RGBLuminanceCoefficients);gl_FragColor=vec4(pow(c,vec3(25.0-luma*15.0)),tex.a); }`; je.ShadersStore[$ge] = Zge; class qge extends Bi { /** * Gets a string identifying the name of the class * @returns "HighlightsPostProcess" string */ getClassName() { return "HighlightsPostProcess"; } /** * Extracts highlights from the image * @see https://doc.babylonjs.com/features/featuresDeepDive/postProcesses/usePostProcesses * @param name The name of the effect. * @param options The required width/height ratio to downsize to before computing the render pass. * @param camera The camera to apply the render pass to. * @param samplingMode The sampling mode to be used when computing the pass. (default: 0) * @param engine The engine which the post process will be applied. (default: current engine) * @param reusable If the post process can be reused on the same frame. (default: false) * @param textureType Type of texture for the post process (default: Engine.TEXTURETYPE_UNSIGNED_INT) */ constructor(e, t, i, r, s, n, a = 0) { super(e, "highlights", null, null, t, i, r, s, n, null, a); } } const Jge = "imageProcessingPixelShader", e3e = `varying vec2 vUV;uniform sampler2D textureSampler; #include #include #include #define CUSTOM_FRAGMENT_DEFINITIONS void main(void) {vec4 result=texture2D(textureSampler,vUV); #ifdef IMAGEPROCESSING #ifndef FROMLINEARSPACE result.rgb=toLinearSpace(result.rgb); #endif result=applyImageProcessing(result); #else #ifdef FROMLINEARSPACE result=applyImageProcessing(result); #endif #endif gl_FragColor=result;}`; je.ShadersStore[Jge] = e3e; class QU extends Bi { /** * Gets the image processing configuration used either in this material. */ get imageProcessingConfiguration() { return this._imageProcessingConfiguration; } /** * Sets the Default image processing configuration used either in the this material. * * If sets to null, the scene one is in use. */ set imageProcessingConfiguration(e) { e.applyByPostProcess = !0, this._attachImageProcessingConfiguration(e); } /** * Attaches a new image processing configuration to the PBR Material. * @param configuration * @param doNotBuild */ _attachImageProcessingConfiguration(e, t = !1) { if (e !== this._imageProcessingConfiguration) { if (this._imageProcessingConfiguration && this._imageProcessingObserver && this._imageProcessingConfiguration.onUpdateParameters.remove(this._imageProcessingObserver), e) this._imageProcessingConfiguration = e; else { let i = null; const r = this.getEngine(), s = this.getCamera(); if (s) i = s.getScene(); else if (r && r.scenes) { const n = r.scenes; i = n[n.length - 1]; } else i = gi.LastCreatedScene; i ? this._imageProcessingConfiguration = i.imageProcessingConfiguration : this._imageProcessingConfiguration = new Ds(); } this._imageProcessingConfiguration && (this._imageProcessingObserver = this._imageProcessingConfiguration.onUpdateParameters.add(() => { this._updateParameters(); })), t || this._updateParameters(); } } /** * If the post process is supported. */ get isSupported() { const e = this.getEffect(); return !e || e.isSupported; } /** * Gets Color curves setup used in the effect if colorCurvesEnabled is set to true . */ get colorCurves() { return this.imageProcessingConfiguration.colorCurves; } /** * Sets Color curves setup used in the effect if colorCurvesEnabled is set to true . */ set colorCurves(e) { this.imageProcessingConfiguration.colorCurves = e; } /** * Gets whether the color curves effect is enabled. */ get colorCurvesEnabled() { return this.imageProcessingConfiguration.colorCurvesEnabled; } /** * Sets whether the color curves effect is enabled. */ set colorCurvesEnabled(e) { this.imageProcessingConfiguration.colorCurvesEnabled = e; } /** * Gets Color grading LUT texture used in the effect if colorGradingEnabled is set to true. */ get colorGradingTexture() { return this.imageProcessingConfiguration.colorGradingTexture; } /** * Sets Color grading LUT texture used in the effect if colorGradingEnabled is set to true. */ set colorGradingTexture(e) { this.imageProcessingConfiguration.colorGradingTexture = e; } /** * Gets whether the color grading effect is enabled. */ get colorGradingEnabled() { return this.imageProcessingConfiguration.colorGradingEnabled; } /** * Gets whether the color grading effect is enabled. */ set colorGradingEnabled(e) { this.imageProcessingConfiguration.colorGradingEnabled = e; } /** * Gets exposure used in the effect. */ get exposure() { return this.imageProcessingConfiguration.exposure; } /** * Sets exposure used in the effect. */ set exposure(e) { this.imageProcessingConfiguration.exposure = e; } /** * Gets whether tonemapping is enabled or not. */ get toneMappingEnabled() { return this._imageProcessingConfiguration.toneMappingEnabled; } /** * Sets whether tonemapping is enabled or not */ set toneMappingEnabled(e) { this._imageProcessingConfiguration.toneMappingEnabled = e; } /** * Gets the type of tone mapping effect. */ get toneMappingType() { return this._imageProcessingConfiguration.toneMappingType; } /** * Sets the type of tone mapping effect. */ set toneMappingType(e) { this._imageProcessingConfiguration.toneMappingType = e; } /** * Gets contrast used in the effect. */ get contrast() { return this.imageProcessingConfiguration.contrast; } /** * Sets contrast used in the effect. */ set contrast(e) { this.imageProcessingConfiguration.contrast = e; } /** * Gets Vignette stretch size. */ get vignetteStretch() { return this.imageProcessingConfiguration.vignetteStretch; } /** * Sets Vignette stretch size. */ set vignetteStretch(e) { this.imageProcessingConfiguration.vignetteStretch = e; } /** * Gets Vignette center X Offset. * @deprecated use vignetteCenterX instead */ get vignetteCentreX() { return this.imageProcessingConfiguration.vignetteCenterX; } /** * Sets Vignette center X Offset. * @deprecated use vignetteCenterX instead */ set vignetteCentreX(e) { this.imageProcessingConfiguration.vignetteCenterX = e; } /** * Gets Vignette center Y Offset. * @deprecated use vignetteCenterY instead */ get vignetteCentreY() { return this.imageProcessingConfiguration.vignetteCenterY; } /** * Sets Vignette center Y Offset. * @deprecated use vignetteCenterY instead */ set vignetteCentreY(e) { this.imageProcessingConfiguration.vignetteCenterY = e; } /** * Vignette center Y Offset. */ get vignetteCenterY() { return this.imageProcessingConfiguration.vignetteCenterY; } set vignetteCenterY(e) { this.imageProcessingConfiguration.vignetteCenterY = e; } /** * Vignette center X Offset. */ get vignetteCenterX() { return this.imageProcessingConfiguration.vignetteCenterX; } set vignetteCenterX(e) { this.imageProcessingConfiguration.vignetteCenterX = e; } /** * Gets Vignette weight or intensity of the vignette effect. */ get vignetteWeight() { return this.imageProcessingConfiguration.vignetteWeight; } /** * Sets Vignette weight or intensity of the vignette effect. */ set vignetteWeight(e) { this.imageProcessingConfiguration.vignetteWeight = e; } /** * Gets Color of the vignette applied on the screen through the chosen blend mode (vignetteBlendMode) * if vignetteEnabled is set to true. */ get vignetteColor() { return this.imageProcessingConfiguration.vignetteColor; } /** * Sets Color of the vignette applied on the screen through the chosen blend mode (vignetteBlendMode) * if vignetteEnabled is set to true. */ set vignetteColor(e) { this.imageProcessingConfiguration.vignetteColor = e; } /** * Gets Camera field of view used by the Vignette effect. */ get vignetteCameraFov() { return this.imageProcessingConfiguration.vignetteCameraFov; } /** * Sets Camera field of view used by the Vignette effect. */ set vignetteCameraFov(e) { this.imageProcessingConfiguration.vignetteCameraFov = e; } /** * Gets the vignette blend mode allowing different kind of effect. */ get vignetteBlendMode() { return this.imageProcessingConfiguration.vignetteBlendMode; } /** * Sets the vignette blend mode allowing different kind of effect. */ set vignetteBlendMode(e) { this.imageProcessingConfiguration.vignetteBlendMode = e; } /** * Gets whether the vignette effect is enabled. */ get vignetteEnabled() { return this.imageProcessingConfiguration.vignetteEnabled; } /** * Sets whether the vignette effect is enabled. */ set vignetteEnabled(e) { this.imageProcessingConfiguration.vignetteEnabled = e; } /** * Gets intensity of the dithering effect. */ get ditheringIntensity() { return this.imageProcessingConfiguration.ditheringIntensity; } /** * Sets intensity of the dithering effect. */ set ditheringIntensity(e) { this.imageProcessingConfiguration.ditheringIntensity = e; } /** * Gets whether the dithering effect is enabled. */ get ditheringEnabled() { return this.imageProcessingConfiguration.ditheringEnabled; } /** * Sets whether the dithering effect is enabled. */ set ditheringEnabled(e) { this.imageProcessingConfiguration.ditheringEnabled = e; } /** * Gets whether the input of the processing is in Gamma or Linear Space. */ get fromLinearSpace() { return this._fromLinearSpace; } /** * Sets whether the input of the processing is in Gamma or Linear Space. */ set fromLinearSpace(e) { this._fromLinearSpace !== e && (this._fromLinearSpace = e, this._updateParameters()); } constructor(e, t, i = null, r, s, n, a = 0, l) { super(e, "imageProcessing", [], [], t, i, r, s, n, null, a, "postprocess", null, !0), this._fromLinearSpace = !0, this._defines = { IMAGEPROCESSING: !1, VIGNETTE: !1, VIGNETTEBLENDMODEMULTIPLY: !1, VIGNETTEBLENDMODEOPAQUE: !1, TONEMAPPING: !1, TONEMAPPING_ACES: !1, CONTRAST: !1, COLORCURVES: !1, COLORGRADING: !1, COLORGRADING3D: !1, FROMLINEARSPACE: !1, SAMPLER3DGREENDEPTH: !1, SAMPLER3DBGRMAP: !1, DITHER: !1, IMAGEPROCESSINGPOSTPROCESS: !1, EXPOSURE: !1, SKIPFINALCOLORCLAMP: !1 }, l ? (l.applyByPostProcess = !0, this._attachImageProcessingConfiguration(l, !0), this._updateParameters()) : (this._attachImageProcessingConfiguration(null, !0), this.imageProcessingConfiguration.applyByPostProcess = !0), this.onApply = (o) => { this.imageProcessingConfiguration.bind(o, this.aspectRatio); }; } /** * "ImageProcessingPostProcess" * @returns "ImageProcessingPostProcess" */ getClassName() { return "ImageProcessingPostProcess"; } /** * @internal */ _updateParameters() { this._defines.FROMLINEARSPACE = this._fromLinearSpace, this.imageProcessingConfiguration.prepareDefines(this._defines, !0); let e = ""; for (const r in this._defines) this._defines[r] && (e += `#define ${r}; `); const t = ["textureSampler"], i = ["scale"]; Ds && (Ds.PrepareSamplers(t, this._defines), Ds.PrepareUniforms(i, this._defines)), this.updateEffect(e, i, t); } dispose(e) { super.dispose(e), this._imageProcessingConfiguration && this._imageProcessingObserver && this._imageProcessingConfiguration.onUpdateParameters.remove(this._imageProcessingObserver), this._imageProcessingConfiguration && (this.imageProcessingConfiguration.applyByPostProcess = !1); } } F([ W() ], QU.prototype, "_fromLinearSpace", void 0); const t3e = "mrtFragmentDeclaration", i3e = `#if defined(WEBGL2) || defined(WEBGPU) || defined(NATIVE) layout(location=0) out vec4 glFragData[{X}]; #endif `; je.IncludesShadersStore[t3e] = i3e; const r3e = "geometryPixelShader", s3e = `#extension GL_EXT_draw_buffers : require #if defined(BUMP) || !defined(NORMAL) #extension GL_OES_standard_derivatives : enable #endif precision highp float; #ifdef BUMP varying mat4 vWorldView;varying vec3 vNormalW; #else varying vec3 vNormalV; #endif varying vec4 vViewPos; #if defined(POSITION) || defined(BUMP) varying vec3 vPositionW; #endif #ifdef VELOCITY varying vec4 vCurrentPosition;varying vec4 vPreviousPosition; #endif #ifdef NEED_UV varying vec2 vUV; #endif #ifdef BUMP uniform vec3 vBumpInfos;uniform vec2 vTangentSpaceParams; #endif #if defined(REFLECTIVITY) #if defined(ORMTEXTURE) || defined(SPECULARGLOSSINESSTEXTURE) || defined(REFLECTIVITYTEXTURE) uniform sampler2D reflectivitySampler;varying vec2 vReflectivityUV; #endif #ifdef ALBEDOTEXTURE varying vec2 vAlbedoUV;uniform sampler2D albedoSampler; #endif #ifdef REFLECTIVITYCOLOR uniform vec3 reflectivityColor; #endif #ifdef ALBEDOCOLOR uniform vec3 albedoColor; #endif #ifdef METALLIC uniform float metallic; #endif #if defined(ROUGHNESS) || defined(GLOSSINESS) uniform float glossiness; #endif #endif #if defined(ALPHATEST) && defined(NEED_UV) uniform sampler2D diffuseSampler; #endif #include #include[RENDER_TARGET_COUNT] #include #include #include void main() { #include #ifdef ALPHATEST if (texture2D(diffuseSampler,vUV).a<0.4) discard; #endif vec3 normalOutput; #ifdef BUMP vec3 normalW=normalize(vNormalW); #include #ifdef NORMAL_WORLDSPACE normalOutput=normalW; #else normalOutput=normalize(vec3(vWorldView*vec4(normalW,0.0))); #endif #else normalOutput=normalize(vNormalV); #endif #ifdef PREPASS #ifdef PREPASS_DEPTH gl_FragData[DEPTH_INDEX]=vec4(vViewPos.z/vViewPos.w,0.0,0.0,1.0); #endif #ifdef PREPASS_NORMAL gl_FragData[NORMAL_INDEX]=vec4(normalOutput,1.0); #endif #else gl_FragData[0]=vec4(vViewPos.z/vViewPos.w,0.0,0.0,1.0);gl_FragData[1]=vec4(normalOutput,1.0); #endif #ifdef POSITION gl_FragData[POSITION_INDEX]=vec4(vPositionW,1.0); #endif #ifdef VELOCITY vec2 a=(vCurrentPosition.xy/vCurrentPosition.w)*0.5+0.5;vec2 b=(vPreviousPosition.xy/vPreviousPosition.w)*0.5+0.5;vec2 velocity=abs(a-b);velocity=vec2(pow(velocity.x,1.0/3.0),pow(velocity.y,1.0/3.0))*sign(a-b)*0.5+0.5;gl_FragData[VELOCITY_INDEX]=vec4(velocity,0.0,1.0); #endif #ifdef REFLECTIVITY vec4 reflectivity=vec4(0.0,0.0,0.0,1.0); #ifdef METALLICWORKFLOW float metal=1.0;float roughness=1.0; #ifdef ORMTEXTURE metal*=texture2D(reflectivitySampler,vReflectivityUV).b;roughness*=texture2D(reflectivitySampler,vReflectivityUV).g; #endif #ifdef METALLIC metal*=metallic; #endif #ifdef ROUGHNESS roughness*=(1.0-glossiness); #endif reflectivity.a-=roughness;vec3 color=vec3(1.0); #ifdef ALBEDOTEXTURE color=texture2D(albedoSampler,vAlbedoUV).rgb; #ifdef GAMMAALBEDO color=toLinearSpace(color); #endif #endif #ifdef ALBEDOCOLOR color*=albedoColor.xyz; #endif reflectivity.rgb=mix(vec3(0.04),color,metal); #else #if defined(SPECULARGLOSSINESSTEXTURE) || defined(REFLECTIVITYTEXTURE) reflectivity=texture2D(reflectivitySampler,vReflectivityUV); #ifdef GAMMAREFLECTIVITYTEXTURE reflectivity.rgb=toLinearSpace(reflectivity.rgb); #endif #else #ifdef REFLECTIVITYCOLOR reflectivity.rgb=toLinearSpace(reflectivityColor.xyz);reflectivity.a=1.0; #endif #endif #ifdef GLOSSINESSS reflectivity.a*=glossiness; #endif #endif gl_FragData[REFLECTIVITY_INDEX]=reflectivity; #endif } `; je.ShadersStore[r3e] = s3e; const n3e = "geometryVertexDeclaration", a3e = "uniform mat4 viewProjection;uniform mat4 view;"; je.IncludesShadersStore[n3e] = a3e; const o3e = "geometryUboDeclaration", l3e = `#include `; je.IncludesShadersStore[o3e] = l3e; const c3e = "geometryVertexShader", u3e = `precision highp float; #include #include #include #include[0..maxSimultaneousMorphTargets] #include #include<__decl__geometryVertex> #include attribute vec3 position;attribute vec3 normal; #ifdef NEED_UV varying vec2 vUV; #ifdef ALPHATEST uniform mat4 diffuseMatrix; #endif #ifdef BUMP uniform mat4 bumpMatrix;varying vec2 vBumpUV; #endif #ifdef REFLECTIVITY uniform mat4 reflectivityMatrix;uniform mat4 albedoMatrix;varying vec2 vReflectivityUV;varying vec2 vAlbedoUV; #endif #ifdef UV1 attribute vec2 uv; #endif #ifdef UV2 attribute vec2 uv2; #endif #endif #ifdef BUMP varying mat4 vWorldView; #endif #ifdef BUMP varying vec3 vNormalW; #else varying vec3 vNormalV; #endif varying vec4 vViewPos; #if defined(POSITION) || defined(BUMP) varying vec3 vPositionW; #endif #ifdef VELOCITY uniform mat4 previousViewProjection;varying vec4 vCurrentPosition;varying vec4 vPreviousPosition; #endif #define CUSTOM_VERTEX_DEFINITIONS void main(void) {vec3 positionUpdated=position;vec3 normalUpdated=normal; #ifdef UV1 vec2 uvUpdated=uv; #endif #include #include[0..maxSimultaneousMorphTargets] #include #if defined(VELOCITY) && !defined(BONES_VELOCITY_ENABLED) vCurrentPosition=viewProjection*finalWorld*vec4(positionUpdated,1.0);vPreviousPosition=previousViewProjection*finalPreviousWorld*vec4(positionUpdated,1.0); #endif #include #include vec4 worldPos=vec4(finalWorld*vec4(positionUpdated,1.0)); #ifdef BUMP vWorldView=view*finalWorld;vNormalW=normalUpdated; #else #ifdef NORMAL_WORLDSPACE vNormalV=normalize(vec3(finalWorld*vec4(normalUpdated,0.0))); #else vNormalV=normalize(vec3((view*finalWorld)*vec4(normalUpdated,0.0))); #endif #endif vViewPos=view*worldPos; #if defined(VELOCITY) && defined(BONES_VELOCITY_ENABLED) vCurrentPosition=viewProjection*finalWorld*vec4(positionUpdated,1.0); #if NUM_BONE_INFLUENCERS>0 mat4 previousInfluence;previousInfluence=mPreviousBones[int(matricesIndices[0])]*matricesWeights[0]; #if NUM_BONE_INFLUENCERS>1 previousInfluence+=mPreviousBones[int(matricesIndices[1])]*matricesWeights[1]; #endif #if NUM_BONE_INFLUENCERS>2 previousInfluence+=mPreviousBones[int(matricesIndices[2])]*matricesWeights[2]; #endif #if NUM_BONE_INFLUENCERS>3 previousInfluence+=mPreviousBones[int(matricesIndices[3])]*matricesWeights[3]; #endif #if NUM_BONE_INFLUENCERS>4 previousInfluence+=mPreviousBones[int(matricesIndicesExtra[0])]*matricesWeightsExtra[0]; #endif #if NUM_BONE_INFLUENCERS>5 previousInfluence+=mPreviousBones[int(matricesIndicesExtra[1])]*matricesWeightsExtra[1]; #endif #if NUM_BONE_INFLUENCERS>6 previousInfluence+=mPreviousBones[int(matricesIndicesExtra[2])]*matricesWeightsExtra[2]; #endif #if NUM_BONE_INFLUENCERS>7 previousInfluence+=mPreviousBones[int(matricesIndicesExtra[3])]*matricesWeightsExtra[3]; #endif vPreviousPosition=previousViewProjection*finalPreviousWorld*previousInfluence*vec4(positionUpdated,1.0); #else vPreviousPosition=previousViewProjection*finalPreviousWorld*vec4(positionUpdated,1.0); #endif #endif #if defined(POSITION) || defined(BUMP) vPositionW=worldPos.xyz/worldPos.w; #endif gl_Position=viewProjection*finalWorld*vec4(positionUpdated,1.0); #include #ifdef NEED_UV #ifdef UV1 #if defined(ALPHATEST) && defined(ALPHATEST_UV1) vUV=vec2(diffuseMatrix*vec4(uvUpdated,1.0,0.0)); #else vUV=uv; #endif #ifdef BUMP_UV1 vBumpUV=vec2(bumpMatrix*vec4(uvUpdated,1.0,0.0)); #endif #ifdef REFLECTIVITY_UV1 vReflectivityUV=vec2(reflectivityMatrix*vec4(uvUpdated,1.0,0.0)); #endif #ifdef ALBEDO_UV1 vAlbedoUV=vec2(albedoMatrix*vec4(uvUpdated,1.0,0.0)); #endif #endif #ifdef UV2 #if defined(ALPHATEST) && defined(ALPHATEST_UV2) vUV=vec2(diffuseMatrix*vec4(uv2,1.0,0.0)); #else vUV=uv2; #endif #ifdef BUMP_UV2 vBumpUV=vec2(bumpMatrix*vec4(uv2,1.0,0.0)); #endif #ifdef REFLECTIVITY_UV2 vReflectivityUV=vec2(reflectivityMatrix*vec4(uv2,1.0,0.0)); #endif #ifdef ALBEDO_UV2 vAlbedoUV=vec2(albedoMatrix*vec4(uv2,1.0,0.0)); #endif #endif #endif #include } `; je.ShadersStore[c3e] = u3e; const Sne = [ "world", "mBones", "viewProjection", "diffuseMatrix", "view", "previousWorld", "previousViewProjection", "mPreviousBones", "bumpMatrix", "reflectivityMatrix", "albedoMatrix", "reflectivityColor", "albedoColor", "metallic", "glossiness", "vTangentSpaceParams", "vBumpInfos", "morphTargetInfluences", "morphTargetTextureInfo", "morphTargetTextureIndices", "boneTextureWidth" ]; Gc(Sne); class _o { /** * @internal * Sets up internal structures to share outputs with PrePassRenderer * This method should only be called by the PrePassRenderer itself */ _linkPrePassRenderer(e) { this._linkedWithPrePass = !0, this._prePassRenderer = e, this._multiRenderTarget && (this._multiRenderTarget.onClearObservable.clear(), this._multiRenderTarget.onClearObservable.add(() => { })); } /** * @internal * Separates internal structures from PrePassRenderer so the geometry buffer can now operate by itself. * This method should only be called by the PrePassRenderer itself */ _unlinkPrePassRenderer() { this._linkedWithPrePass = !1, this._createRenderTargets(); } /** * @internal * Resets the geometry buffer layout */ _resetLayout() { this._enablePosition = !1, this._enableReflectivity = !1, this._enableVelocity = !1, this._attachmentsFromPrePass = []; } /** * @internal * Replaces a texture in the geometry buffer renderer * Useful when linking textures of the prepass renderer */ _forceTextureType(e, t) { e === _o.POSITION_TEXTURE_TYPE ? (this._positionIndex = t, this._enablePosition = !0) : e === _o.VELOCITY_TEXTURE_TYPE ? (this._velocityIndex = t, this._enableVelocity = !0) : e === _o.REFLECTIVITY_TEXTURE_TYPE ? (this._reflectivityIndex = t, this._enableReflectivity = !0) : e === _o.DEPTH_TEXTURE_TYPE ? this._depthIndex = t : e === _o.NORMAL_TEXTURE_TYPE && (this._normalIndex = t); } /** * @internal * Sets texture attachments * Useful when linking textures of the prepass renderer */ _setAttachments(e) { this._attachmentsFromPrePass = e; } /** * @internal * Replaces the first texture which is hard coded as a depth texture in the geometry buffer * Useful when linking textures of the prepass renderer */ _linkInternalTexture(e) { this._multiRenderTarget.setInternalTexture(e, 0, !1); } /** * Gets the render list (meshes to be rendered) used in the G buffer. */ get renderList() { return this._multiRenderTarget.renderList; } /** * Set the render list (meshes to be rendered) used in the G buffer. */ set renderList(e) { this._multiRenderTarget.renderList = e; } /** * Gets whether or not G buffer are supported by the running hardware. * This requires draw buffer supports */ get isSupported() { return this._multiRenderTarget.isSupported; } /** * Returns the index of the given texture type in the G-Buffer textures array * @param textureType The texture type constant. For example GeometryBufferRenderer.POSITION_TEXTURE_INDEX * @returns the index of the given texture type in the G-Buffer textures array */ getTextureIndex(e) { switch (e) { case _o.POSITION_TEXTURE_TYPE: return this._positionIndex; case _o.VELOCITY_TEXTURE_TYPE: return this._velocityIndex; case _o.REFLECTIVITY_TEXTURE_TYPE: return this._reflectivityIndex; case _o.DEPTH_TEXTURE_TYPE: return this._linkedWithPrePass ? this._depthIndex : 0; case _o.NORMAL_TEXTURE_TYPE: return this._linkedWithPrePass ? this._normalIndex : 1; default: return -1; } } /** * Gets a boolean indicating if objects positions are enabled for the G buffer. */ get enablePosition() { return this._enablePosition; } /** * Sets whether or not objects positions are enabled for the G buffer. */ set enablePosition(e) { this._enablePosition = e, this._linkedWithPrePass || (this.dispose(), this._createRenderTargets()); } /** * Gets a boolean indicating if objects velocities are enabled for the G buffer. */ get enableVelocity() { return this._enableVelocity; } /** * Sets whether or not objects velocities are enabled for the G buffer. */ set enableVelocity(e) { this._enableVelocity = e, e || (this._previousTransformationMatrices = {}), this._linkedWithPrePass || (this.dispose(), this._createRenderTargets()), this._scene.needsPreviousWorldMatrices = e; } /** * Gets a boolean indicating if objects reflectivity are enabled in the G buffer. */ get enableReflectivity() { return this._enableReflectivity; } /** * Sets whether or not objects reflectivity are enabled for the G buffer. * For Metallic-Roughness workflow with ORM texture, we assume that ORM texture is defined according to the default layout: * pbr.useRoughnessFromMetallicTextureAlpha = false; * pbr.useRoughnessFromMetallicTextureGreen = true; * pbr.useMetallnessFromMetallicTextureBlue = true; */ set enableReflectivity(e) { this._enableReflectivity = e, this._linkedWithPrePass || (this.dispose(), this._createRenderTargets()); } /** * Gets the scene associated with the buffer. */ get scene() { return this._scene; } /** * Gets the ratio used by the buffer during its creation. * How big is the buffer related to the main canvas. */ get ratio() { return typeof this._ratioOrDimensions == "object" ? 1 : this._ratioOrDimensions; } /** * Creates a new G Buffer for the scene * @param scene The scene the buffer belongs to * @param ratioOrDimensions How big is the buffer related to the main canvas (default: 1). You can also directly pass a width and height for the generated textures @since * @param depthFormat Format of the depth texture (default: 15) */ constructor(e, t = 1, i = 15) { this._previousTransformationMatrices = {}, this._previousBonesTransformationMatrices = {}, this.excludedSkinnedMeshesFromVelocity = [], this.renderTransparentMeshes = !0, this.generateNormalsInWorldSpace = !1, this._resizeObserver = null, this._enablePosition = !1, this._enableVelocity = !1, this._enableReflectivity = !1, this._clearColor = new Et(0, 0, 0, 0), this._clearDepthColor = new Et(1e8, 0, 0, 1), this._positionIndex = -1, this._velocityIndex = -1, this._reflectivityIndex = -1, this._depthIndex = -1, this._normalIndex = -1, this._linkedWithPrePass = !1, this.useSpecificClearForDepthTexture = !1, this._scene = e, this._ratioOrDimensions = t, this._useUbo = e.getEngine().supportsUniformBuffers, this._depthFormat = i, _o._SceneComponentInitialization(this._scene), this._createRenderTargets(); } /** * Checks whether everything is ready to render a submesh to the G buffer. * @param subMesh the submesh to check readiness for * @param useInstances is the mesh drawn using instance or not * @returns true if ready otherwise false */ isReady(e, t) { const i = e.getMaterial(); if (i && i.disableDepthWrite) return !1; const r = [], s = [Y.PositionKind, Y.NormalKind], n = e.getMesh(); if (i) { let f = !1; if (i.needAlphaTesting() && i.getAlphaTestTexture() && (r.push("#define ALPHATEST"), r.push(`#define ALPHATEST_UV${i.getAlphaTestTexture().coordinatesIndex + 1}`), f = !0), i.bumpTexture && Tt.BumpTextureEnabled && (r.push("#define BUMP"), r.push(`#define BUMP_UV${i.bumpTexture.coordinatesIndex + 1}`), f = !0), this._enableReflectivity) { let p = !1; i.getClassName() === "PBRMetallicRoughnessMaterial" ? (i.metallicRoughnessTexture !== null && (r.push("#define ORMTEXTURE"), r.push(`#define REFLECTIVITY_UV${i.metallicRoughnessTexture.coordinatesIndex + 1}`), r.push("#define METALLICWORKFLOW"), f = !0, p = !0), i.metallic !== null && (r.push("#define METALLIC"), r.push("#define METALLICWORKFLOW"), p = !0), i.roughness !== null && (r.push("#define ROUGHNESS"), r.push("#define METALLICWORKFLOW"), p = !0), p && (i.baseTexture !== null && (r.push("#define ALBEDOTEXTURE"), r.push(`#define ALBEDO_UV${i.baseTexture.coordinatesIndex + 1}`), i.baseTexture.gammaSpace && r.push("#define GAMMAALBEDO"), f = !0), i.baseColor !== null && r.push("#define ALBEDOCOLOR"))) : i.getClassName() === "PBRSpecularGlossinessMaterial" ? (i.specularGlossinessTexture !== null ? (r.push("#define SPECULARGLOSSINESSTEXTURE"), r.push(`#define REFLECTIVITY_UV${i.specularGlossinessTexture.coordinatesIndex + 1}`), f = !0, i.specularGlossinessTexture.gammaSpace && r.push("#define GAMMAREFLECTIVITYTEXTURE")) : i.specularColor !== null && r.push("#define REFLECTIVITYCOLOR"), i.glossiness !== null && r.push("#define GLOSSINESS")) : i.getClassName() === "PBRMaterial" ? (i.metallicTexture !== null && (r.push("#define ORMTEXTURE"), r.push(`#define REFLECTIVITY_UV${i.metallicTexture.coordinatesIndex + 1}`), r.push("#define METALLICWORKFLOW"), f = !0, p = !0), i.metallic !== null && (r.push("#define METALLIC"), r.push("#define METALLICWORKFLOW"), p = !0), i.roughness !== null && (r.push("#define ROUGHNESS"), r.push("#define METALLICWORKFLOW"), p = !0), p ? (i.albedoTexture !== null && (r.push("#define ALBEDOTEXTURE"), r.push(`#define ALBEDO_UV${i.albedoTexture.coordinatesIndex + 1}`), i.albedoTexture.gammaSpace && r.push("#define GAMMAALBEDO"), f = !0), i.albedoColor !== null && r.push("#define ALBEDOCOLOR")) : (i.reflectivityTexture !== null ? (r.push("#define SPECULARGLOSSINESSTEXTURE"), r.push(`#define REFLECTIVITY_UV${i.reflectivityTexture.coordinatesIndex + 1}`), i.reflectivityTexture.gammaSpace && r.push("#define GAMMAREFLECTIVITYTEXTURE"), f = !0) : i.reflectivityColor !== null && r.push("#define REFLECTIVITYCOLOR"), i.microSurface !== null && r.push("#define GLOSSINESS"))) : i.getClassName() === "StandardMaterial" && (i.specularTexture !== null && (r.push("#define REFLECTIVITYTEXTURE"), r.push(`#define REFLECTIVITY_UV${i.specularTexture.coordinatesIndex + 1}`), i.specularTexture.gammaSpace && r.push("#define GAMMAREFLECTIVITYTEXTURE"), f = !0), i.specularColor !== null && r.push("#define REFLECTIVITYCOLOR")); } f && (r.push("#define NEED_UV"), n.isVerticesDataPresent(Y.UVKind) && (s.push(Y.UVKind), r.push("#define UV1")), n.isVerticesDataPresent(Y.UV2Kind) && (s.push(Y.UV2Kind), r.push("#define UV2"))); } this._linkedWithPrePass && (r.push("#define PREPASS"), this._depthIndex !== -1 && (r.push("#define DEPTH_INDEX " + this._depthIndex), r.push("#define PREPASS_DEPTH")), this._normalIndex !== -1 && (r.push("#define NORMAL_INDEX " + this._normalIndex), r.push("#define PREPASS_NORMAL"))), this._enablePosition && (r.push("#define POSITION"), r.push("#define POSITION_INDEX " + this._positionIndex)), this._enableVelocity && (r.push("#define VELOCITY"), r.push("#define VELOCITY_INDEX " + this._velocityIndex), this.excludedSkinnedMeshesFromVelocity.indexOf(n) === -1 && r.push("#define BONES_VELOCITY_ENABLED")), this._enableReflectivity && (r.push("#define REFLECTIVITY"), r.push("#define REFLECTIVITY_INDEX " + this._reflectivityIndex)), this.generateNormalsInWorldSpace && r.push("#define NORMAL_WORLDSPACE"), n.useBones && n.computeBonesUsingShaders && n.skeleton ? (s.push(Y.MatricesIndicesKind), s.push(Y.MatricesWeightsKind), n.numBoneInfluencers > 4 && (s.push(Y.MatricesIndicesExtraKind), s.push(Y.MatricesWeightsExtraKind)), r.push("#define NUM_BONE_INFLUENCERS " + n.numBoneInfluencers), r.push("#define BONETEXTURE " + n.skeleton.isUsingTextureForMatrices), r.push("#define BonesPerMesh " + (n.skeleton.bones.length + 1))) : (r.push("#define NUM_BONE_INFLUENCERS 0"), r.push("#define BONETEXTURE false"), r.push("#define BonesPerMesh 0")); const a = n.morphTargetManager; let l = 0; a && a.numInfluencers > 0 && (l = a.numInfluencers, r.push("#define MORPHTARGETS"), r.push("#define NUM_MORPH_INFLUENCERS " + l), a.isUsingTextureForTargets && r.push("#define MORPHTARGETS_TEXTURE"), Ke.PrepareAttributesForMorphTargetsInfluencers(s, n, l)), t && (r.push("#define INSTANCES"), Ke.PushAttributesForInstances(s, this._enableVelocity), e.getRenderingMesh().hasThinInstances && r.push("#define THIN_INSTANCES")), this._linkedWithPrePass ? r.push("#define RENDER_TARGET_COUNT " + this._attachmentsFromPrePass.length) : r.push("#define RENDER_TARGET_COUNT " + this._multiRenderTarget.textures.length), bT(i, this._scene, r); const o = this._scene.getEngine(), u = e._getDrawWrapper(void 0, !0), h = u.defines, d = r.join(` `); return h !== d && u.setEffect(o.createEffect("geometry", { attributes: s, uniformsNames: Sne, samplers: ["diffuseSampler", "bumpSampler", "reflectivitySampler", "albedoSampler", "morphTargets", "boneSampler"], defines: d, onCompiled: null, fallbacks: null, onError: null, uniformBuffersNames: ["Scene"], indexParameters: { buffersCount: this._multiRenderTarget.textures.length - 1, maxSimultaneousMorphTargets: l } }, o), d), u.effect.isReady(); } /** * Gets the current underlying G Buffer. * @returns the buffer */ getGBuffer() { return this._multiRenderTarget; } /** * Gets the number of samples used to render the buffer (anti aliasing). */ get samples() { return this._multiRenderTarget.samples; } /** * Sets the number of samples used to render the buffer (anti aliasing). */ set samples(e) { this._multiRenderTarget.samples = e; } /** * Disposes the renderer and frees up associated resources. */ dispose() { this._resizeObserver && (this._scene.getEngine().onResizeObservable.remove(this._resizeObserver), this._resizeObserver = null), this.getGBuffer().dispose(); } _assignRenderTargetIndices() { const e = []; let t = 2; return e.push("gBuffer_Depth", "gBuffer_Normal"), this._enablePosition && (this._positionIndex = t, t++, e.push("gBuffer_Position")), this._enableVelocity && (this._velocityIndex = t, t++, e.push("gBuffer_Velocity")), this._enableReflectivity && (this._reflectivityIndex = t, t++, e.push("gBuffer_Reflectivity")), [t, e]; } _createRenderTargets() { const e = this._scene.getEngine(), [t, i] = this._assignRenderTargetIndices(); let r = 0; e._caps.textureFloat && e._caps.textureFloatLinearFiltering ? r = 1 : e._caps.textureHalfFloat && e._caps.textureHalfFloatLinearFiltering && (r = 2); const s = this._ratioOrDimensions.width !== void 0 ? this._ratioOrDimensions : { width: e.getRenderWidth() * this._ratioOrDimensions, height: e.getRenderHeight() * this._ratioOrDimensions }; if (this._multiRenderTarget = new $8("gBuffer", s, t, this._scene, { generateMipMaps: !1, generateDepthTexture: !0, defaultType: r, depthTextureFormat: this._depthFormat }, i.concat("gBuffer_DepthBuffer")), !this.isSupported) return; this._multiRenderTarget.wrapU = De.CLAMP_ADDRESSMODE, this._multiRenderTarget.wrapV = De.CLAMP_ADDRESSMODE, this._multiRenderTarget.refreshRate = 1, this._multiRenderTarget.renderParticles = !1, this._multiRenderTarget.renderList = null; const n = [!0], a = [!1], l = [!0]; for (let f = 1; f < t; ++f) n.push(!0), l.push(!1), a.push(!0); const o = e.buildTextureLayout(n), u = e.buildTextureLayout(a), h = e.buildTextureLayout(l); this._multiRenderTarget.onClearObservable.add((f) => { f.bindAttachments(this.useSpecificClearForDepthTexture ? u : o), f.clear(this._clearColor, !0, !0, !0), this.useSpecificClearForDepthTexture && (f.bindAttachments(h), f.clear(this._clearDepthColor, !0, !0, !0)), f.bindAttachments(o); }), this._resizeObserver = e.onResizeObservable.add(() => { if (this._multiRenderTarget) { const f = this._ratioOrDimensions.width !== void 0 ? this._ratioOrDimensions : { width: e.getRenderWidth() * this._ratioOrDimensions, height: e.getRenderHeight() * this._ratioOrDimensions }; this._multiRenderTarget.resize(f); } }); const d = (f) => { const p = f.getRenderingMesh(), m = f.getEffectiveMesh(), _ = this._scene, v = _.getEngine(), C = f.getMaterial(); if (!C) return; if (m._internalAbstractMeshDataInfo._isActiveIntermediate = !1, this._enableVelocity && !this._previousTransformationMatrices[m.uniqueId] && (this._previousTransformationMatrices[m.uniqueId] = { world: Ae.Identity(), viewProjection: _.getTransformMatrix() }, p.skeleton)) { const M = p.skeleton.getTransformMatrices(p); this._previousBonesTransformationMatrices[p.uniqueId] = this._copyBonesTransformationMatrices(M, new Float32Array(M.length)); } const x = p._getInstancesRenderList(f._id, !!f.getReplacementMesh()); if (x.mustReturn) return; const b = v.getCaps().instancedArrays && (x.visibleInstances[f._id] !== null || p.hasThinInstances), S = m.getWorldMatrix(); if (this.isReady(f, b)) { const M = f._getDrawWrapper(); if (!M) return; const R = M.effect; v.enableEffect(M), b || p._bind(f, R, C.fillMode), this._useUbo ? (Ke.BindSceneUniformBuffer(R, this._scene.getSceneUniformBuffer()), this._scene.finalizeSceneUbo()) : (R.setMatrix("viewProjection", _.getTransformMatrix()), R.setMatrix("view", _.getViewMatrix())); let w; const V = p._instanceDataStorage; if (!V.isFrozen && (C.backFaceCulling || p.overrideMaterialSideOrientation !== null)) { const k = m._getWorldMatrixDeterminant(); w = p.overrideMaterialSideOrientation, w === null && (w = C.sideOrientation), k < 0 && (w = w === At.ClockWiseSideOrientation ? At.CounterClockWiseSideOrientation : At.ClockWiseSideOrientation); } else w = V.sideOrientation; if (C._preBind(M, w), C.needAlphaTesting()) { const k = C.getAlphaTestTexture(); k && (R.setTexture("diffuseSampler", k), R.setMatrix("diffuseMatrix", k.getTextureMatrix())); } if (C.bumpTexture && _.getEngine().getCaps().standardDerivatives && Tt.BumpTextureEnabled && (R.setFloat3("vBumpInfos", C.bumpTexture.coordinatesIndex, 1 / C.bumpTexture.level, C.parallaxScaleBias), R.setMatrix("bumpMatrix", C.bumpTexture.getTextureMatrix()), R.setTexture("bumpSampler", C.bumpTexture), R.setFloat2("vTangentSpaceParams", C.invertNormalMapX ? -1 : 1, C.invertNormalMapY ? -1 : 1)), this._enableReflectivity && (C.getClassName() === "PBRMetallicRoughnessMaterial" ? (C.metallicRoughnessTexture !== null && (R.setTexture("reflectivitySampler", C.metallicRoughnessTexture), R.setMatrix("reflectivityMatrix", C.metallicRoughnessTexture.getTextureMatrix())), C.metallic !== null && R.setFloat("metallic", C.metallic), C.roughness !== null && R.setFloat("glossiness", 1 - C.roughness), C.baseTexture !== null && (R.setTexture("albedoSampler", C.baseTexture), R.setMatrix("albedoMatrix", C.baseTexture.getTextureMatrix())), C.baseColor !== null && R.setColor3("albedoColor", C.baseColor)) : C.getClassName() === "PBRSpecularGlossinessMaterial" ? (C.specularGlossinessTexture !== null ? (R.setTexture("reflectivitySampler", C.specularGlossinessTexture), R.setMatrix("reflectivityMatrix", C.specularGlossinessTexture.getTextureMatrix())) : C.specularColor !== null && R.setColor3("reflectivityColor", C.specularColor), C.glossiness !== null && R.setFloat("glossiness", C.glossiness)) : C.getClassName() === "PBRMaterial" ? (C.metallicTexture !== null && (R.setTexture("reflectivitySampler", C.metallicTexture), R.setMatrix("reflectivityMatrix", C.metallicTexture.getTextureMatrix())), C.metallic !== null && R.setFloat("metallic", C.metallic), C.roughness !== null && R.setFloat("glossiness", 1 - C.roughness), C.roughness !== null || C.metallic !== null || C.metallicTexture !== null ? (C.albedoTexture !== null && (R.setTexture("albedoSampler", C.albedoTexture), R.setMatrix("albedoMatrix", C.albedoTexture.getTextureMatrix())), C.albedoColor !== null && R.setColor3("albedoColor", C.albedoColor)) : (C.reflectivityTexture !== null ? (R.setTexture("reflectivitySampler", C.reflectivityTexture), R.setMatrix("reflectivityMatrix", C.reflectivityTexture.getTextureMatrix())) : C.reflectivityColor !== null && R.setColor3("reflectivityColor", C.reflectivityColor), C.microSurface !== null && R.setFloat("glossiness", C.microSurface))) : C.getClassName() === "StandardMaterial" && (C.specularTexture !== null && (R.setTexture("reflectivitySampler", C.specularTexture), R.setMatrix("reflectivityMatrix", C.specularTexture.getTextureMatrix())), C.specularColor !== null && R.setColor3("reflectivityColor", C.specularColor))), Ec(R, C, this._scene), p.useBones && p.computeBonesUsingShaders && p.skeleton) { const k = p.skeleton; if (k.isUsingTextureForMatrices && R.getUniformIndex("boneTextureWidth") > -1) { const L = k.getTransformMatrixTexture(p); R.setTexture("boneSampler", L), R.setFloat("boneTextureWidth", 4 * (k.bones.length + 1)); } else R.setMatrices("mBones", p.skeleton.getTransformMatrices(p)); this._enableVelocity && R.setMatrices("mPreviousBones", this._previousBonesTransformationMatrices[p.uniqueId]); } Ke.BindMorphTargetParameters(p, R), p.morphTargetManager && p.morphTargetManager.isUsingTextureForTargets && p.morphTargetManager._bind(R), this._enableVelocity && (R.setMatrix("previousWorld", this._previousTransformationMatrices[m.uniqueId].world), R.setMatrix("previousViewProjection", this._previousTransformationMatrices[m.uniqueId].viewProjection)), b && p.hasThinInstances && R.setMatrix("world", S), p._processRendering(m, f, R, C.fillMode, x, b, (k, L) => { k || R.setMatrix("world", L); }); } this._enableVelocity && (this._previousTransformationMatrices[m.uniqueId].world = S.clone(), this._previousTransformationMatrices[m.uniqueId].viewProjection = this._scene.getTransformMatrix().clone(), p.skeleton && this._copyBonesTransformationMatrices(p.skeleton.getTransformMatrices(p), this._previousBonesTransformationMatrices[m.uniqueId])); }; this._multiRenderTarget.customIsReadyFunction = (f, p, m) => { if ((m || p === 0) && f.subMeshes) for (let _ = 0; _ < f.subMeshes.length; ++_) { const v = f.subMeshes[_], C = v.getMaterial(), x = v.getRenderingMesh(); if (!C) continue; const b = x._getInstancesRenderList(v._id, !!v.getReplacementMesh()), S = e.getCaps().instancedArrays && (b.visibleInstances[v._id] !== null || x.hasThinInstances); if (!this.isReady(v, S)) return !1; } return !0; }, this._multiRenderTarget.customRenderFunction = (f, p, m, _) => { let v; if (this._linkedWithPrePass) { if (!this._prePassRenderer.enabled) return; this._scene.getEngine().bindAttachments(this._attachmentsFromPrePass); } if (_.length) { for (e.setColorWrite(!1), v = 0; v < _.length; v++) d(_.data[v]); e.setColorWrite(!0); } for (v = 0; v < f.length; v++) d(f.data[v]); for (e.setDepthWrite(!1), v = 0; v < p.length; v++) d(p.data[v]); if (this.renderTransparentMeshes) for (v = 0; v < m.length; v++) d(m.data[v]); e.setDepthWrite(!0); }; } // Copies the bones transformation matrices into the target array and returns the target's reference _copyBonesTransformationMatrices(e, t) { for (let i = 0; i < e.length; i++) t[i] = e[i]; return t; } } _o.DEPTH_TEXTURE_TYPE = 0; _o.NORMAL_TEXTURE_TYPE = 1; _o.POSITION_TEXTURE_TYPE = 2; _o.VELOCITY_TEXTURE_TYPE = 3; _o.REFLECTIVITY_TEXTURE_TYPE = 4; _o._SceneComponentInitialization = (c) => { throw yr("GeometryBufferRendererSceneComponent"); }; class h3e { constructor() { this.enabled = !1, this.name = "motionBlur", this.texturesRequired = [2]; } } Object.defineProperty(ii.prototype, "geometryBufferRenderer", { get: function() { return this._geometryBufferRenderer; }, set: function(c) { c && c.isSupported && (this._geometryBufferRenderer = c); }, enumerable: !0, configurable: !0 }); ii.prototype.enableGeometryBufferRenderer = function(c = 1, e = 15) { return this._geometryBufferRenderer ? this._geometryBufferRenderer : (this._geometryBufferRenderer = new _o(this, c, e), this._geometryBufferRenderer.isSupported || (this._geometryBufferRenderer = null), this._geometryBufferRenderer); }; ii.prototype.disableGeometryBufferRenderer = function() { this._geometryBufferRenderer && (this._geometryBufferRenderer.dispose(), this._geometryBufferRenderer = null); }; class Mne { /** * Creates a new instance of the component for the given scene * @param scene Defines the scene to register the component in */ constructor(e) { this.name = Bt.NAME_GEOMETRYBUFFERRENDERER, this.scene = e; } /** * Registers the component in a given scene */ register() { this.scene._gatherRenderTargetsStage.registerStep(Bt.STEP_GATHERRENDERTARGETS_GEOMETRYBUFFERRENDERER, this, this._gatherRenderTargets); } /** * Rebuilds the elements related to this component in case of * context lost for instance. */ rebuild() { } /** * Disposes the component and the associated resources */ dispose() { } _gatherRenderTargets(e) { this.scene._geometryBufferRenderer && e.push(this.scene._geometryBufferRenderer.getGBuffer()); } } _o._SceneComponentInitialization = (c) => { let e = c._getComponent(Bt.NAME_GEOMETRYBUFFERRENDERER); e || (e = new Mne(c), c._addComponent(e)); }; const d3e = "motionBlurPixelShader", f3e = `varying vec2 vUV;uniform sampler2D textureSampler;uniform float motionStrength;uniform float motionScale;uniform vec2 screenSize; #ifdef OBJECT_BASED uniform sampler2D velocitySampler; #else uniform sampler2D depthSampler;uniform mat4 inverseViewProjection;uniform mat4 prevViewProjection;uniform mat4 projection; #endif #define CUSTOM_FRAGMENT_DEFINITIONS void main(void) { #ifdef GEOMETRY_SUPPORTED #ifdef OBJECT_BASED vec2 texelSize=1.0/screenSize;vec4 velocityColor=texture2D(velocitySampler,vUV);velocityColor.rg=velocityColor.rg*2.0-vec2(1.0);vec2 velocity=vec2(pow(velocityColor.r,3.0),pow(velocityColor.g,3.0))*velocityColor.a;velocity*=motionScale*motionStrength;float speed=length(velocity/texelSize);int samplesCount=int(clamp(speed,1.0,SAMPLES));velocity=normalize(velocity)*texelSize;float hlim=float(-samplesCount)*0.5+0.5;vec4 result=texture2D(textureSampler,vUV);for (int i=1; i=samplesCount) break;vec2 offset=vUV+velocity*(hlim+float(i)); #if defined(WEBGPU) result+=texture2DLodEXT(textureSampler,offset,0.0); #else result+=texture2D(textureSampler,offset); #endif } gl_FragColor=result/float(samplesCount);gl_FragColor.a=1.0; #else vec2 texelSize=1.0/screenSize;float depth=texture2D(depthSampler,vUV).r;depth=projection[2].z+projection[3].z/depth; vec4 cpos=vec4(vUV*2.0-1.0,depth,1.0);cpos=inverseViewProjection*cpos;cpos/=cpos.w;vec4 ppos=prevViewProjection*cpos;ppos/=ppos.w;ppos.xy=ppos.xy*0.5+0.5;vec2 velocity=(ppos.xy-vUV)*motionScale*motionStrength;float speed=length(velocity/texelSize);int nSamples=int(clamp(speed,1.0,SAMPLES));vec4 result=texture2D(textureSampler,vUV);for (int i=1; i=nSamples) break;vec2 offset1=vUV+velocity*(float(i)/float(nSamples-1)-0.5); #if defined(WEBGPU) result+=texture2DLodEXT(textureSampler,offset1,0.0); #else result+=texture2D(textureSampler,offset1); #endif } gl_FragColor=result/float(nSamples); #endif #else gl_FragColor=texture2D(textureSampler,vUV); #endif } `; je.ShadersStore[d3e] = f3e; class R5 extends Bi { /** * Gets the number of iterations are used for motion blur quality. Default value is equal to 32 */ get motionBlurSamples() { return this._motionBlurSamples; } /** * Sets the number of iterations to be used for motion blur quality */ set motionBlurSamples(e) { this._motionBlurSamples = e, this._updateEffect(); } /** * Gets whether or not the motion blur post-process is in object based mode. */ get isObjectBased() { return this._isObjectBased; } /** * Sets whether or not the motion blur post-process is in object based mode. */ set isObjectBased(e) { this._isObjectBased !== e && (this._isObjectBased = e, this._applyMode()); } get _geometryBufferRenderer() { return this._forceGeometryBuffer ? this._scene.geometryBufferRenderer : null; } get _prePassRenderer() { return this._forceGeometryBuffer ? null : this._scene.prePassRenderer; } /** * Gets a string identifying the name of the class * @returns "MotionBlurPostProcess" string */ getClassName() { return "MotionBlurPostProcess"; } /** * Creates a new instance MotionBlurPostProcess * @param name The name of the effect. * @param scene The scene containing the objects to blur according to their velocity. * @param options The required width/height ratio to downsize to before computing the render pass. * @param camera The camera to apply the render pass to. * @param samplingMode The sampling mode to be used when computing the pass. (default: 0) * @param engine The engine which the post process will be applied. (default: current engine) * @param reusable If the post process can be reused on the same frame. (default: false) * @param textureType Type of textures used when performing the post process. (default: 0) * @param blockCompilation If compilation of the shader should not be done in the constructor. The updateEffect method can be used to compile the shader at a later time. (default: true) * @param forceGeometryBuffer If this post process should use geometry buffer instead of prepass (default: false) */ constructor(e, t, i, r, s, n, a, l = 0, o = !1, u = !1) { super(e, "motionBlur", ["motionStrength", "motionScale", "screenSize", "inverseViewProjection", "prevViewProjection", "projection"], ["velocitySampler", "depthSampler"], i, r, s, n, a, `#define GEOMETRY_SUPPORTED #define SAMPLES 64.0 #define OBJECT_BASED`, l, void 0, null, o), this.motionStrength = 1, this._motionBlurSamples = 32, this._isObjectBased = !0, this._forceGeometryBuffer = !1, this._invViewProjection = null, this._previousViewProjection = null, this._forceGeometryBuffer = u, this._forceGeometryBuffer ? (t.enableGeometryBufferRenderer(), this._geometryBufferRenderer && (this._geometryBufferRenderer.enableVelocity = this._isObjectBased)) : (t.enablePrePassRenderer(), this._prePassRenderer && (this._prePassRenderer.markAsDirty(), this._prePassEffectConfiguration = new h3e())), this._applyMode(); } /** * Excludes the given skinned mesh from computing bones velocities. * Computing bones velocities can have a cost and that cost. The cost can be saved by calling this function and by passing the skinned mesh reference to ignore. * @param skinnedMesh The mesh containing the skeleton to ignore when computing the velocity map. */ excludeSkinnedMesh(e) { if (e.skeleton) { let t; if (this._geometryBufferRenderer) t = this._geometryBufferRenderer.excludedSkinnedMeshesFromVelocity; else if (this._prePassRenderer) t = this._prePassRenderer.excludedSkinnedMesh; else return; t.push(e); } } /** * Removes the given skinned mesh from the excluded meshes to integrate bones velocities while rendering the velocity map. * @param skinnedMesh The mesh containing the skeleton that has been ignored previously. * @see excludeSkinnedMesh to exclude a skinned mesh from bones velocity computation. */ removeExcludedSkinnedMesh(e) { if (e.skeleton) { let t; if (this._geometryBufferRenderer) t = this._geometryBufferRenderer.excludedSkinnedMeshesFromVelocity; else if (this._prePassRenderer) t = this._prePassRenderer.excludedSkinnedMesh; else return; const i = t.indexOf(e); i !== -1 && t.splice(i, 1); } } /** * Disposes the post process. * @param camera The camera to dispose the post process on. */ dispose(e) { this._geometryBufferRenderer && (this._geometryBufferRenderer._previousTransformationMatrices = {}, this._geometryBufferRenderer._previousBonesTransformationMatrices = {}, this._geometryBufferRenderer.excludedSkinnedMeshesFromVelocity = []), super.dispose(e); } /** * Called on the mode changed (object based or screen based). */ _applyMode() { if (!this._geometryBufferRenderer && !this._prePassRenderer) return Ce.Warn("Multiple Render Target support needed to compute object based motion blur"), this.updateEffect(); this._geometryBufferRenderer && (this._geometryBufferRenderer.enableVelocity = this._isObjectBased), this._updateEffect(), this._invViewProjection = null, this._previousViewProjection = null, this.isObjectBased ? (this._prePassRenderer && this._prePassEffectConfiguration && (this._prePassEffectConfiguration.texturesRequired[0] = 2), this.onApply = (e) => this._onApplyObjectBased(e)) : (this._invViewProjection = Ae.Identity(), this._previousViewProjection = this._scene.getTransformMatrix().clone(), this._prePassRenderer && this._prePassEffectConfiguration && (this._prePassEffectConfiguration.texturesRequired[0] = 5), this.onApply = (e) => this._onApplyScreenBased(e)); } /** * Called on the effect is applied when the motion blur post-process is in object based mode. * @param effect */ _onApplyObjectBased(e) { if (e.setVector2("screenSize", new at(this.width, this.height)), e.setFloat("motionScale", this._scene.getAnimationRatio()), e.setFloat("motionStrength", this.motionStrength), this._geometryBufferRenderer) { const t = this._geometryBufferRenderer.getTextureIndex(_o.VELOCITY_TEXTURE_TYPE); e.setTexture("velocitySampler", this._geometryBufferRenderer.getGBuffer().textures[t]); } else if (this._prePassRenderer) { const t = this._prePassRenderer.getIndex(2); e.setTexture("velocitySampler", this._prePassRenderer.getRenderTarget().textures[t]); } } /** * Called on the effect is applied when the motion blur post-process is in screen based mode. * @param effect */ _onApplyScreenBased(e) { const t = de.Matrix[0]; if (t.copyFrom(this._scene.getTransformMatrix()), t.invertToRef(this._invViewProjection), e.setMatrix("inverseViewProjection", this._invViewProjection), e.setMatrix("prevViewProjection", this._previousViewProjection), this._previousViewProjection.copyFrom(t), e.setMatrix("projection", this._scene.getProjectionMatrix()), e.setVector2("screenSize", new at(this.width, this.height)), e.setFloat("motionScale", this._scene.getAnimationRatio()), e.setFloat("motionStrength", this.motionStrength), this._geometryBufferRenderer) { const i = this._geometryBufferRenderer.getTextureIndex(_o.DEPTH_TEXTURE_TYPE); e.setTexture("depthSampler", this._geometryBufferRenderer.getGBuffer().textures[i]); } else if (this._prePassRenderer) { const i = this._prePassRenderer.getIndex(5); e.setTexture("depthSampler", this._prePassRenderer.getRenderTarget().textures[i]); } } /** * Called on the effect must be updated (changed mode, samples count, etc.). */ _updateEffect() { if (this._geometryBufferRenderer || this._prePassRenderer) { const e = [ "#define GEOMETRY_SUPPORTED", "#define SAMPLES " + this._motionBlurSamples.toFixed(1), this._isObjectBased ? "#define OBJECT_BASED" : "#define SCREEN_BASED" ]; this.updateEffect(e.join(` `)); } } /** * @internal */ static _Parse(e, t, i, r) { return St.Parse(() => new R5(e.name, i, e.options, t, e.renderTargetSamplingMode, i.getEngine(), e.reusable, e.textureType, !1), e, i, r); } } F([ W() ], R5.prototype, "motionStrength", void 0); F([ W() ], R5.prototype, "motionBlurSamples", null); F([ W() ], R5.prototype, "isObjectBased", null); Be("BABYLON.MotionBlurPostProcess", R5); const p3e = "refractionPixelShader", _3e = "varying vec2 vUV;uniform sampler2D textureSampler;uniform sampler2D refractionSampler;uniform vec3 baseColor;uniform float depth;uniform float colorLevel;void main() {float ref=1.0-texture2D(refractionSampler,vUV).r;vec2 uv=vUV-vec2(0.5);vec2 offset=uv*depth*ref;vec3 sourceColor=texture2D(textureSampler,vUV-offset).rgb;gl_FragColor=vec4(sourceColor+sourceColor*ref*colorLevel,1.0);}"; je.ShadersStore[p3e] = _3e; class P5 extends Bi { /** * Gets or sets the refraction texture * Please note that you are responsible for disposing the texture if you set it manually */ get refractionTexture() { return this._refTexture; } set refractionTexture(e) { this._refTexture && this._ownRefractionTexture && this._refTexture.dispose(), this._refTexture = e, this._ownRefractionTexture = !1; } /** * Gets a string identifying the name of the class * @returns "RefractionPostProcess" string */ getClassName() { return "RefractionPostProcess"; } /** * Initializes the RefractionPostProcess * @see https://doc.babylonjs.com/features/featuresDeepDive/postProcesses/usePostProcesses#refraction * @param name The name of the effect. * @param refractionTextureUrl Url of the refraction texture to use * @param color the base color of the refraction (used to taint the rendering) * @param depth simulated refraction depth * @param colorLevel the coefficient of the base color (0 to remove base color tainting) * @param options The required width/height ratio to downsize to before computing the render pass. * @param camera The camera to apply the render pass to. * @param samplingMode The sampling mode to be used when computing the pass. (default: 0) * @param engine The engine which the post process will be applied. (default: current engine) * @param reusable If the post process can be reused on the same frame. (default: false) */ constructor(e, t, i, r, s, n, a, l, o, u) { super(e, "refraction", ["baseColor", "depth", "colorLevel"], ["refractionSampler"], n, a, l, o, u), this._ownRefractionTexture = !0, this.color = i, this.depth = r, this.colorLevel = s, this.refractionTextureUrl = t, this.onActivateObservable.add((h) => { this._refTexture = this._refTexture || new De(t, h.getScene()); }), this.onApplyObservable.add((h) => { h.setColor3("baseColor", this.color), h.setFloat("depth", this.depth), h.setFloat("colorLevel", this.colorLevel), h.setTexture("refractionSampler", this._refTexture); }); } // Methods /** * Disposes of the post process * @param camera Camera to dispose post process on */ dispose(e) { this._refTexture && this._ownRefractionTexture && (this._refTexture.dispose(), this._refTexture = null), super.dispose(e); } /** * @internal */ static _Parse(e, t, i, r) { return St.Parse(() => new P5(e.name, e.refractionTextureUrl, e.color, e.depth, e.colorLevel, e.options, t, e.renderTargetSamplingMode, i.getEngine(), e.reusable), e, i, r); } } F([ W() ], P5.prototype, "color", void 0); F([ W() ], P5.prototype, "depth", void 0); F([ W() ], P5.prototype, "colorLevel", void 0); F([ W() ], P5.prototype, "refractionTextureUrl", void 0); Be("BABYLON.RefractionPostProcess", P5); const m3e = "sharpenPixelShader", g3e = `varying vec2 vUV;uniform sampler2D textureSampler;uniform vec2 screenSize;uniform vec2 sharpnessAmounts; #define CUSTOM_FRAGMENT_DEFINITIONS void main(void) {vec2 onePixel=vec2(1.0,1.0)/screenSize;vec4 color=texture2D(textureSampler,vUV);vec4 edgeDetection=texture2D(textureSampler,vUV+onePixel*vec2(0,-1)) + texture2D(textureSampler,vUV+onePixel*vec2(-1,0)) + texture2D(textureSampler,vUV+onePixel*vec2(1,0)) + texture2D(textureSampler,vUV+onePixel*vec2(0,1)) - color*4.0;gl_FragColor=max(vec4(color.rgb*sharpnessAmounts.y,color.a)-(sharpnessAmounts.x*vec4(edgeDetection.rgb,0)),0.);}`; je.ShadersStore[m3e] = g3e; class qI extends Bi { /** * Gets a string identifying the name of the class * @returns "SharpenPostProcess" string */ getClassName() { return "SharpenPostProcess"; } /** * Creates a new instance ConvolutionPostProcess * @param name The name of the effect. * @param options The required width/height ratio to downsize to before computing the render pass. * @param camera The camera to apply the render pass to. * @param samplingMode The sampling mode to be used when computing the pass. (default: 0) * @param engine The engine which the post process will be applied. (default: current engine) * @param reusable If the post process can be reused on the same frame. (default: false) * @param textureType Type of textures used when performing the post process. (default: 0) * @param blockCompilation If compilation of the shader should not be done in the constructor. The updateEffect method can be used to compile the shader at a later time. (default: false) */ constructor(e, t, i, r, s, n, a = 0, l = !1) { super(e, "sharpen", ["sharpnessAmounts", "screenSize"], null, t, i, r, s, n, null, a, void 0, null, l), this.colorAmount = 1, this.edgeAmount = 0.3, this.onApply = (o) => { o.setFloat2("screenSize", this.width, this.height), o.setFloat2("sharpnessAmounts", this.edgeAmount, this.colorAmount); }; } /** * @internal */ static _Parse(e, t, i, r) { return St.Parse(() => new qI(e.name, e.options, t, e.renderTargetSamplingMode, i.getEngine(), e.textureType, e.reusable), e, i, r); } } F([ W() ], qI.prototype, "colorAmount", void 0); F([ W() ], qI.prototype, "edgeAmount", void 0); Be("BABYLON.SharpenPostProcess", qI); class I5 { /** * Gets pipeline name */ get name() { return this._name; } /** Gets the list of attached cameras */ get cameras() { return this._cameras; } /** * Initializes a PostProcessRenderPipeline * @param _engine engine to add the pipeline to * @param name name of the pipeline */ constructor(e, t) { this._engine = e, this._name = t, this._renderEffects = {}, this._renderEffectsForIsolatedPass = new Array(), this._cameras = []; } /** * Gets the class name * @returns "PostProcessRenderPipeline" */ getClassName() { return "PostProcessRenderPipeline"; } /** * If all the render effects in the pipeline are supported */ get isSupported() { for (const e in this._renderEffects) if (Object.prototype.hasOwnProperty.call(this._renderEffects, e) && !this._renderEffects[e].isSupported) return !1; return !0; } /** * Adds an effect to the pipeline * @param renderEffect the effect to add */ addEffect(e) { this._renderEffects[e._name] = e; } // private /** @internal */ _rebuild() { } /** * @internal */ _enableEffect(e, t) { const i = this._renderEffects[e]; i && i._enable(Ve.MakeArray(t || this._cameras)); } /** * @internal */ _disableEffect(e, t) { const i = this._renderEffects[e]; i && i._disable(Ve.MakeArray(t || this._cameras)); } /** * @internal */ _attachCameras(e, t) { const i = Ve.MakeArray(e || this._cameras); if (!i) return; const r = []; let s; for (s = 0; s < i.length; s++) { const n = i[s]; n && (this._cameras.indexOf(n) === -1 ? this._cameras.push(n) : t && r.push(s)); } for (s = 0; s < r.length; s++) i.splice(r[s], 1); for (const n in this._renderEffects) Object.prototype.hasOwnProperty.call(this._renderEffects, n) && this._renderEffects[n]._attachCameras(i); } /** * @internal */ _detachCameras(e) { const t = Ve.MakeArray(e || this._cameras); if (t) { for (const i in this._renderEffects) Object.prototype.hasOwnProperty.call(this._renderEffects, i) && this._renderEffects[i]._detachCameras(t); for (let i = 0; i < t.length; i++) this._cameras.splice(this._cameras.indexOf(t[i]), 1); } } /** @internal */ _update() { for (const e in this._renderEffects) Object.prototype.hasOwnProperty.call(this._renderEffects, e) && this._renderEffects[e]._update(); for (let e = 0; e < this._cameras.length; e++) { if (!this._cameras[e]) continue; const t = this._cameras[e].name; this._renderEffectsForIsolatedPass[t] && this._renderEffectsForIsolatedPass[t]._update(); } } /** @internal */ _reset() { this._renderEffects = {}, this._renderEffectsForIsolatedPass = new Array(); } _enableMSAAOnFirstPostProcess(e) { if (!this._engine._features.supportMSAA) return !1; const t = Object.keys(this._renderEffects); if (t.length > 0) { const i = this._renderEffects[t[0]].getPostProcesses(); i && (i[0].samples = e); } return !0; } /** * Ensures that all post processes in the pipeline are the correct size according to the * the viewport's required size */ _adaptPostProcessesToViewPort() { const e = Object.keys(this._renderEffects); for (const t of e) { const i = this._renderEffects[t].getPostProcesses(); if (i) for (const r of i) r.adaptScaleToCurrentViewport = !0; } } /** * Sets the required values to the prepass renderer. * @param prePassRenderer defines the prepass renderer to setup. * @returns true if the pre pass is needed. */ // eslint-disable-next-line @typescript-eslint/no-unused-vars setPrePassRenderer(e) { return !1; } /** * Disposes of the pipeline */ dispose() { } } F([ W() ], I5.prototype, "_name", void 0); class Rne { /** * Initializes a PostProcessRenderPipelineManager * @see https://doc.babylonjs.com/features/featuresDeepDive/postProcesses/postProcessRenderPipeline */ constructor() { this._renderPipelines = {}; } /** * Gets the list of supported render pipelines */ get supportedPipelines() { const e = []; for (const t in this._renderPipelines) if (Object.prototype.hasOwnProperty.call(this._renderPipelines, t)) { const i = this._renderPipelines[t]; i.isSupported && e.push(i); } return e; } /** * Adds a pipeline to the manager * @param renderPipeline The pipeline to add */ addPipeline(e) { this._renderPipelines[e._name] = e; } /** * Remove the pipeline from the manager * @param renderPipelineName the name of the pipeline to remove */ removePipeline(e) { delete this._renderPipelines[e]; } /** * Attaches a camera to the pipeline * @param renderPipelineName The name of the pipeline to attach to * @param cameras the camera to attach * @param unique if the camera can be attached multiple times to the pipeline */ attachCamerasToRenderPipeline(e, t, i = !1) { const r = this._renderPipelines[e]; r && r._attachCameras(t, i); } /** * Detaches a camera from the pipeline * @param renderPipelineName The name of the pipeline to detach from * @param cameras the camera to detach */ detachCamerasFromRenderPipeline(e, t) { const i = this._renderPipelines[e]; i && i._detachCameras(t); } /** * Enables an effect by name on a pipeline * @param renderPipelineName the name of the pipeline to enable the effect in * @param renderEffectName the name of the effect to enable * @param cameras the cameras that the effect should be enabled on */ enableEffectInPipeline(e, t, i) { const r = this._renderPipelines[e]; r && r._enableEffect(t, i); } /** * Disables an effect by name on a pipeline * @param renderPipelineName the name of the pipeline to disable the effect in * @param renderEffectName the name of the effect to disable * @param cameras the cameras that the effect should be disabled on */ disableEffectInPipeline(e, t, i) { const r = this._renderPipelines[e]; r && r._disableEffect(t, i); } /** * Updates the state of all contained render pipelines and disposes of any non supported pipelines */ update() { for (const e in this._renderPipelines) if (Object.prototype.hasOwnProperty.call(this._renderPipelines, e)) { const t = this._renderPipelines[e]; t.isSupported ? t._update() : (t.dispose(), delete this._renderPipelines[e]); } } /** @internal */ _rebuild() { for (const e in this._renderPipelines) Object.prototype.hasOwnProperty.call(this._renderPipelines, e) && this._renderPipelines[e]._rebuild(); } /** * Disposes of the manager and pipelines */ dispose() { for (const e in this._renderPipelines) Object.prototype.hasOwnProperty.call(this._renderPipelines, e) && this._renderPipelines[e].dispose(); } } Object.defineProperty(ii.prototype, "postProcessRenderPipelineManager", { get: function() { if (!this._postProcessRenderPipelineManager) { let c = this._getComponent(Bt.NAME_POSTPROCESSRENDERPIPELINEMANAGER); c || (c = new Pne(this), this._addComponent(c)), this._postProcessRenderPipelineManager = new Rne(); } return this._postProcessRenderPipelineManager; }, enumerable: !0, configurable: !0 }); class Pne { /** * Creates a new instance of the component for the given scene * @param scene Defines the scene to register the component in */ constructor(e) { this.name = Bt.NAME_POSTPROCESSRENDERPIPELINEMANAGER, this.scene = e; } /** * Registers the component in a given scene */ register() { this.scene._gatherRenderTargetsStage.registerStep(Bt.STEP_GATHERRENDERTARGETS_POSTPROCESSRENDERPIPELINEMANAGER, this, this._gatherRenderTargets); } /** * Rebuilds the elements related to this component in case of * context lost for instance. */ rebuild() { this.scene._postProcessRenderPipelineManager && this.scene._postProcessRenderPipelineManager._rebuild(); } /** * Disposes the component and the associated resources */ dispose() { this.scene._postProcessRenderPipelineManager && this.scene._postProcessRenderPipelineManager.dispose(); } _gatherRenderTargets() { this.scene._postProcessRenderPipelineManager && this.scene._postProcessRenderPipelineManager.update(); } } class Wu extends I5 { /** * Enable or disable automatic building of the pipeline when effects are enabled and disabled. * If false, you will have to manually call prepare() to update the pipeline. */ get automaticBuild() { return this._buildAllowed; } set automaticBuild(e) { this._buildAllowed = e; } /** * Gets active scene */ get scene() { return this._scene; } /** * Enable or disable the sharpen process from the pipeline */ set sharpenEnabled(e) { this._sharpenEnabled !== e && (this._sharpenEnabled = e, this._buildPipeline()); } get sharpenEnabled() { return this._sharpenEnabled; } /** * Specifies the size of the bloom blur kernel, relative to the final output size */ get bloomKernel() { return this._bloomKernel; } set bloomKernel(e) { this._bloomKernel = e, this.bloom.kernel = e / this._hardwareScaleLevel; } /** * The strength of the bloom. */ set bloomWeight(e) { this._bloomWeight !== e && (this.bloom.weight = e, this._bloomWeight = e); } get bloomWeight() { return this._bloomWeight; } /** * The luminance threshold to find bright areas of the image to bloom. */ set bloomThreshold(e) { this._bloomThreshold !== e && (this.bloom.threshold = e, this._bloomThreshold = e); } get bloomThreshold() { return this._bloomThreshold; } /** * The scale of the bloom, lower value will provide better performance. */ set bloomScale(e) { this._bloomScale !== e && (this._bloomScale = e, this._rebuildBloom(), this._buildPipeline()); } get bloomScale() { return this._bloomScale; } /** * Enable or disable the bloom from the pipeline */ set bloomEnabled(e) { this._bloomEnabled !== e && (this._bloomEnabled = e, this._buildPipeline()); } get bloomEnabled() { return this._bloomEnabled; } _rebuildBloom() { const e = this.bloom; this.bloom = new GH(this._scene, this.bloomScale, this._bloomWeight, this.bloomKernel / this._hardwareScaleLevel, this._defaultPipelineTextureType, !1), this.bloom.threshold = e.threshold; for (let t = 0; t < this._cameras.length; t++) e.disposeEffects(this._cameras[t]); } /** * If the depth of field is enabled. */ get depthOfFieldEnabled() { return this._depthOfFieldEnabled; } set depthOfFieldEnabled(e) { this._depthOfFieldEnabled !== e && (this._depthOfFieldEnabled = e, this._buildPipeline()); } /** * Blur level of the depth of field effect. (Higher blur will effect performance) */ get depthOfFieldBlurLevel() { return this._depthOfFieldBlurLevel; } set depthOfFieldBlurLevel(e) { if (this._depthOfFieldBlurLevel === e) return; this._depthOfFieldBlurLevel = e; const t = this.depthOfField; this.depthOfField = new KH(this._scene, null, this._depthOfFieldBlurLevel, this._defaultPipelineTextureType, !1), this.depthOfField.focalLength = t.focalLength, this.depthOfField.focusDistance = t.focusDistance, this.depthOfField.fStop = t.fStop, this.depthOfField.lensSize = t.lensSize; for (let i = 0; i < this._cameras.length; i++) t.disposeEffects(this._cameras[i]); this._buildPipeline(); } /** * If the anti aliasing is enabled. */ set fxaaEnabled(e) { this._fxaaEnabled !== e && (this._fxaaEnabled = e, this._buildPipeline()); } get fxaaEnabled() { return this._fxaaEnabled; } /** * MSAA sample count, setting this to 4 will provide 4x anti aliasing. (default: 1) */ set samples(e) { this._samples !== e && (this._samples = e, this._buildPipeline()); } get samples() { return this._samples; } /** * If image processing is enabled. */ set imageProcessingEnabled(e) { this._imageProcessingEnabled !== e && (this._scene.imageProcessingConfiguration.isEnabled = e); } get imageProcessingEnabled() { return this._imageProcessingEnabled; } /** * If glow layer is enabled. (Adds a glow effect to emmissive materials) */ set glowLayerEnabled(e) { e && !this._glowLayer ? this._glowLayer = new Y_("", this._scene) : !e && this._glowLayer && (this._glowLayer.dispose(), this._glowLayer = null); } get glowLayerEnabled() { return this._glowLayer != null; } /** * Gets the glow layer (or null if not defined) */ get glowLayer() { return this._glowLayer; } /** * Enable or disable the chromaticAberration process from the pipeline */ set chromaticAberrationEnabled(e) { this._chromaticAberrationEnabled !== e && (this._chromaticAberrationEnabled = e, this._buildPipeline()); } get chromaticAberrationEnabled() { return this._chromaticAberrationEnabled; } /** * Enable or disable the grain process from the pipeline */ set grainEnabled(e) { this._grainEnabled !== e && (this._grainEnabled = e, this._buildPipeline()); } get grainEnabled() { return this._grainEnabled; } /** * Instantiates a DefaultRenderingPipeline. * @param name The rendering pipeline name (default: "") * @param hdr If high dynamic range textures should be used (default: true) * @param scene The scene linked to this pipeline (default: the last created scene) * @param cameras The array of cameras that the rendering pipeline will be attached to (default: scene.cameras) * @param automaticBuild If false, you will have to manually call prepare() to update the pipeline (default: true) */ constructor(e = "", t = !0, i = gi.LastCreatedScene, r, s = !0) { super(i.getEngine(), e), this._camerasToBeAttached = [], this.SharpenPostProcessId = "SharpenPostProcessEffect", this.ImageProcessingPostProcessId = "ImageProcessingPostProcessEffect", this.FxaaPostProcessId = "FxaaPostProcessEffect", this.ChromaticAberrationPostProcessId = "ChromaticAberrationPostProcessEffect", this.GrainPostProcessId = "GrainPostProcessEffect", this._glowLayer = null, this.animations = [], this._imageProcessingConfigurationObserver = null, this._sharpenEnabled = !1, this._bloomEnabled = !1, this._depthOfFieldEnabled = !1, this._depthOfFieldBlurLevel = nP.Low, this._fxaaEnabled = !1, this._imageProcessingEnabled = !0, this._bloomScale = 0.5, this._chromaticAberrationEnabled = !1, this._grainEnabled = !1, this._buildAllowed = !0, this.onBuildObservable = new Fe(), this._resizeObserver = null, this._hardwareScaleLevel = 1, this._bloomKernel = 64, this._bloomWeight = 0.15, this._bloomThreshold = 0.9, this._samples = 1, this._hasCleared = !1, this._prevPostProcess = null, this._prevPrevPostProcess = null, this._depthOfFieldSceneObserver = null, this._activeCameraChangedObserver = null, this._activeCamerasChangedObserver = null, this._cameras = r || i.cameras, this._cameras = this._cameras.slice(), this._camerasToBeAttached = this._cameras.slice(), this._buildAllowed = s, this._scene = i; const n = this._scene.getEngine().getCaps(); this._hdr = t && (n.textureHalfFloatRender || n.textureFloatRender), this._hdr ? n.textureHalfFloatRender ? this._defaultPipelineTextureType = 2 : n.textureFloatRender && (this._defaultPipelineTextureType = 1) : this._defaultPipelineTextureType = 0, i.postProcessRenderPipelineManager.addPipeline(this); const a = this._scene.getEngine(); this.sharpen = new qI("sharpen", 1, null, De.BILINEAR_SAMPLINGMODE, a, !1, this._defaultPipelineTextureType, !0), this._sharpenEffect = new gn(a, this.SharpenPostProcessId, () => this.sharpen, !0), this.depthOfField = new KH(this._scene, null, this._depthOfFieldBlurLevel, this._defaultPipelineTextureType, !0), this._hardwareScaleLevel = a.getHardwareScalingLevel(), this._resizeObserver = a.onResizeObservable.add(() => { this._hardwareScaleLevel = a.getHardwareScalingLevel(), this.bloomKernel = this._bloomKernel; }), this.bloom = new GH(this._scene, this._bloomScale, this._bloomWeight, this.bloomKernel / this._hardwareScaleLevel, this._defaultPipelineTextureType, !0), this.chromaticAberration = new d6("ChromaticAberration", a.getRenderWidth(), a.getRenderHeight(), 1, null, De.BILINEAR_SAMPLINGMODE, a, !1, this._defaultPipelineTextureType, !0), this._chromaticAberrationEffect = new gn(a, this.ChromaticAberrationPostProcessId, () => this.chromaticAberration, !0), this.grain = new ZI("Grain", 1, null, De.BILINEAR_SAMPLINGMODE, a, !1, this._defaultPipelineTextureType, !0), this._grainEffect = new gn(a, this.GrainPostProcessId, () => this.grain, !0); let l = !0; this._imageProcessingConfigurationObserver = this._scene.imageProcessingConfiguration.onUpdateParameters.add(() => { this.bloom._downscale._exposure = this._scene.imageProcessingConfiguration.exposure, this.imageProcessingEnabled !== this._scene.imageProcessingConfiguration.isEnabled && (this._imageProcessingEnabled = this._scene.imageProcessingConfiguration.isEnabled, l ? Ve.SetImmediate(() => { this._buildPipeline(); }) : this._buildPipeline()); }), this._buildPipeline(), l = !1; } /** * Get the class name * @returns "DefaultRenderingPipeline" */ getClassName() { return "DefaultRenderingPipeline"; } /** * Force the compilation of the entire pipeline. */ prepare() { const e = this._buildAllowed; this._buildAllowed = !0, this._buildPipeline(), this._buildAllowed = e; } _setAutoClearAndTextureSharing(e, t = !1) { this._hasCleared ? e.autoClear = !1 : (e.autoClear = !0, this._scene.autoClear = !1, this._hasCleared = !0), t || (this._prevPrevPostProcess ? e.shareOutputWith(this._prevPrevPostProcess) : e.useOwnOutput(), this._prevPostProcess && (this._prevPrevPostProcess = this._prevPostProcess), this._prevPostProcess = e); } _buildPipeline() { if (!this._buildAllowed) return; this._scene.autoClear = !0; const e = this._scene.getEngine(); if (this._disposePostProcesses(), this._cameras !== null && (this._scene.postProcessRenderPipelineManager.detachCamerasFromRenderPipeline(this._name, this._cameras), this._cameras = this._camerasToBeAttached.slice()), this._reset(), this._prevPostProcess = null, this._prevPrevPostProcess = null, this._hasCleared = !1, this.depthOfFieldEnabled) { if (this._cameras.length > 1) { for (const t of this._cameras) { const i = this._scene.enableDepthRenderer(t); i.useOnlyInActiveCamera = !0; } this._depthOfFieldSceneObserver = this._scene.onAfterRenderTargetsRenderObservable.add((t) => { this._cameras.indexOf(t.activeCamera) > -1 && (this.depthOfField.depthTexture = t.enableDepthRenderer(t.activeCamera).getDepthMap()); }); } else { this._scene.onAfterRenderTargetsRenderObservable.remove(this._depthOfFieldSceneObserver); const t = this._scene.enableDepthRenderer(this._cameras[0]); this.depthOfField.depthTexture = t.getDepthMap(); } this.depthOfField._isReady() || this.depthOfField._updateEffects(), this.addEffect(this.depthOfField), this._setAutoClearAndTextureSharing(this.depthOfField._effects[0], !0); } else this._scene.onAfterRenderTargetsRenderObservable.remove(this._depthOfFieldSceneObserver); this.bloomEnabled && (this.bloom._isReady() || this.bloom._updateEffects(), this.addEffect(this.bloom), this._setAutoClearAndTextureSharing(this.bloom._effects[0], !0)), this._imageProcessingEnabled && (this.imageProcessing = new QU("imageProcessing", 1, null, De.BILINEAR_SAMPLINGMODE, e, !1, this._defaultPipelineTextureType, this.scene.imageProcessingConfiguration), this._hdr ? (this.addEffect(new gn(e, this.ImageProcessingPostProcessId, () => this.imageProcessing, !0)), this._setAutoClearAndTextureSharing(this.imageProcessing)) : this._scene.imageProcessingConfiguration.applyByPostProcess = !1, (!this._cameras || this._cameras.length === 0) && (this._scene.imageProcessingConfiguration.applyByPostProcess = !1), this.imageProcessing.getEffect() || this.imageProcessing._updateParameters()), this.sharpenEnabled && (this.sharpen.isReady() || this.sharpen.updateEffect(), this.addEffect(this._sharpenEffect), this._setAutoClearAndTextureSharing(this.sharpen)), this.grainEnabled && (this.grain.isReady() || this.grain.updateEffect(), this.addEffect(this._grainEffect), this._setAutoClearAndTextureSharing(this.grain)), this.chromaticAberrationEnabled && (this.chromaticAberration.isReady() || this.chromaticAberration.updateEffect(), this.addEffect(this._chromaticAberrationEffect), this._setAutoClearAndTextureSharing(this.chromaticAberration)), this.fxaaEnabled && (this.fxaa = new $I("fxaa", 1, null, De.BILINEAR_SAMPLINGMODE, e, !1, this._defaultPipelineTextureType), this.addEffect(new gn(e, this.FxaaPostProcessId, () => this.fxaa, !0)), this._setAutoClearAndTextureSharing(this.fxaa, !0)), this._cameras !== null && this._scene.postProcessRenderPipelineManager.attachCamerasToRenderPipeline(this._name, this._cameras), (this._scene.activeCameras && this._scene.activeCameras.length > 1 || this._scene.activeCamera && this._cameras.indexOf(this._scene.activeCamera) === -1) && (this._scene.autoClear = !0), this._activeCameraChangedObserver || (this._activeCameraChangedObserver = this._scene.onActiveCameraChanged.add(() => { this._scene.activeCamera && this._cameras.indexOf(this._scene.activeCamera) === -1 && (this._scene.autoClear = !0); })), this._activeCamerasChangedObserver || (this._activeCamerasChangedObserver = this._scene.onActiveCamerasChanged.add(() => { this._scene.activeCameras && this._scene.activeCameras.length > 1 && (this._scene.autoClear = !0); })), this._adaptPostProcessesToViewPort(), !this._enableMSAAOnFirstPostProcess(this.samples) && this.samples > 1 && Ce.Warn("MSAA failed to enable, MSAA is only supported in browsers that support webGL >= 2.0"), this.onBuildObservable.notifyObservers(this); } _disposePostProcesses(e = !1) { for (let t = 0; t < this._cameras.length; t++) { const i = this._cameras[t]; this.imageProcessing && this.imageProcessing.dispose(i), this.fxaa && this.fxaa.dispose(i), e && (this.sharpen && this.sharpen.dispose(i), this.depthOfField && (this._scene.onAfterRenderTargetsRenderObservable.remove(this._depthOfFieldSceneObserver), this.depthOfField.disposeEffects(i)), this.bloom && this.bloom.disposeEffects(i), this.chromaticAberration && this.chromaticAberration.dispose(i), this.grain && this.grain.dispose(i), this._glowLayer && this._glowLayer.dispose()); } this.imageProcessing = null, this.fxaa = null, e && (this.sharpen = null, this._sharpenEffect = null, this.depthOfField = null, this.bloom = null, this.chromaticAberration = null, this._chromaticAberrationEffect = null, this.grain = null, this._grainEffect = null, this._glowLayer = null); } /** * Adds a camera to the pipeline * @param camera the camera to be added */ addCamera(e) { this._camerasToBeAttached.push(e), this._buildPipeline(); } /** * Removes a camera from the pipeline * @param camera the camera to remove */ removeCamera(e) { const t = this._camerasToBeAttached.indexOf(e); this._camerasToBeAttached.splice(t, 1), this._buildPipeline(); } /** * Dispose of the pipeline and stop all post processes */ dispose() { this._buildAllowed = !1, this.onBuildObservable.clear(), this._disposePostProcesses(!0), this._scene.postProcessRenderPipelineManager.detachCamerasFromRenderPipeline(this._name, this._cameras), this._scene._postProcessRenderPipelineManager.removePipeline(this.name), this._scene.autoClear = !0, this._resizeObserver && (this._scene.getEngine().onResizeObservable.remove(this._resizeObserver), this._resizeObserver = null), this._scene.onActiveCameraChanged.remove(this._activeCameraChangedObserver), this._scene.onActiveCamerasChanged.remove(this._activeCamerasChangedObserver), this._scene.imageProcessingConfiguration.onUpdateParameters.remove(this._imageProcessingConfigurationObserver), super.dispose(); } /** * Serialize the rendering pipeline (Used when exporting) * @returns the serialized object */ serialize() { const e = St.Serialize(this); return e.customType = "DefaultRenderingPipeline", e; } /** * Parse the serialized pipeline * @param source Source pipeline. * @param scene The scene to load the pipeline to. * @param rootUrl The URL of the serialized pipeline. * @returns An instantiated pipeline from the serialized object. */ static Parse(e, t, i) { return St.Parse(() => new Wu(e._name, e._name._hdr, t), e, t, i); } } F([ W() ], Wu.prototype, "sharpenEnabled", null); F([ W() ], Wu.prototype, "bloomKernel", null); F([ W() ], Wu.prototype, "_bloomWeight", void 0); F([ W() ], Wu.prototype, "_bloomThreshold", void 0); F([ W() ], Wu.prototype, "_hdr", void 0); F([ W() ], Wu.prototype, "bloomWeight", null); F([ W() ], Wu.prototype, "bloomThreshold", null); F([ W() ], Wu.prototype, "bloomScale", null); F([ W() ], Wu.prototype, "bloomEnabled", null); F([ W() ], Wu.prototype, "depthOfFieldEnabled", null); F([ W() ], Wu.prototype, "depthOfFieldBlurLevel", null); F([ W() ], Wu.prototype, "fxaaEnabled", null); F([ W() ], Wu.prototype, "samples", null); F([ W() ], Wu.prototype, "imageProcessingEnabled", null); F([ W() ], Wu.prototype, "glowLayerEnabled", null); F([ W() ], Wu.prototype, "chromaticAberrationEnabled", null); F([ W() ], Wu.prototype, "grainEnabled", null); Be("BABYLON.DefaultRenderingPipeline", Wu); const v3e = "lensHighlightsPixelShader", A3e = `uniform sampler2D textureSampler; uniform float gain;uniform float threshold;uniform float screen_width;uniform float screen_height;varying vec2 vUV;vec4 highlightColor(vec4 color) {vec4 highlight=color;float luminance=dot(highlight.rgb,vec3(0.2125,0.7154,0.0721));float lum_threshold;if (threshold>1.0) { lum_threshold=0.94+0.01*threshold; } else { lum_threshold=0.5+0.44*threshold; } luminance=clamp((luminance-lum_threshold)*(1.0/(1.0-lum_threshold)),0.0,1.0);highlight*=luminance*gain;highlight.a=1.0;return highlight;} #define CUSTOM_FRAGMENT_DEFINITIONS void main(void) {vec4 original=texture2D(textureSampler,vUV);if (gain==-1.0) {gl_FragColor=vec4(0.0,0.0,0.0,1.0);return;} float w=2.0/screen_width;float h=2.0/screen_height;float weight=1.0;vec4 blurred=vec4(0.0,0.0,0.0,0.0); #ifdef PENTAGON blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(-0.84*w,0.43*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(0.48*w,-1.29*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(0.61*w,1.51*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(-1.55*w,-0.74*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(1.71*w,-0.52*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(-0.94*w,1.59*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(-0.40*w,-1.87*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(1.62*w,1.16*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(-2.09*w,0.25*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(1.46*w,-1.71*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(0.08*w,2.42*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(-1.85*w,-1.89*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(2.89*w,0.16*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(-2.29*w,1.88*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(0.40*w,-2.81*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(1.54*w,2.26*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(-2.60*w,-0.61*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(2.31*w,-1.30*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(-0.83*w,2.53*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(-1.12*w,-2.48*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(2.60*w,1.11*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(-2.82*w,0.99*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(1.50*w,-2.81*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(0.85*w,3.33*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(-2.94*w,-1.92*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(3.27*w,-0.53*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(-1.95*w,2.48*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(-0.23*w,-3.04*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(2.17*w,2.05*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(-2.97*w,-0.04*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(2.25*w,-2.00*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(-0.31*w,3.08*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(-1.94*w,-2.59*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(3.37*w,0.64*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(-3.13*w,1.93*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(1.03*w,-3.65*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(1.60*w,3.17*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(-3.14*w,-1.19*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(3.00*w,-1.19*h))); #else blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(-0.85*w,0.36*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(0.52*w,-1.14*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(0.46*w,1.42*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(-1.46*w,-0.83*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(1.79*w,-0.42*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(-1.11*w,1.62*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(-0.29*w,-2.07*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(1.69*w,1.39*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(-2.28*w,0.12*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(1.65*w,-1.69*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(-0.08*w,2.44*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(-1.63*w,-1.90*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(2.55*w,0.31*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(-2.13*w,1.52*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(0.56*w,-2.61*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(1.38*w,2.34*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(-2.64*w,-0.81*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(2.53*w,-1.21*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(-1.06*w,2.63*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(-1.00*w,-2.69*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(2.59*w,1.32*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(-2.82*w,0.78*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(1.57*w,-2.50*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(0.54*w,2.93*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(-2.39*w,-1.81*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(3.01*w,-0.28*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(-2.04*w,2.25*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(-0.02*w,-3.05*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(2.09*w,2.25*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(-3.07*w,-0.25*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(2.44*w,-1.90*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(-0.52*w,3.05*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(-1.68*w,-2.61*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(3.01*w,0.79*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(-2.76*w,1.46*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(1.05*w,-2.94*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(1.21*w,2.88*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(-2.84*w,-1.30*h)));blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(2.98*w,-0.96*h))); #endif blurred/=39.0;gl_FragColor=blurred;}`; je.ShadersStore[v3e] = A3e; const y3e = "depthOfFieldPixelShader", C3e = `uniform sampler2D textureSampler;uniform sampler2D highlightsSampler;uniform sampler2D depthSampler;uniform sampler2D grainSampler;uniform float grain_amount;uniform bool blur_noise;uniform float screen_width;uniform float screen_height;uniform float distortion;uniform bool dof_enabled;uniform float screen_distance; uniform float aperture;uniform float darken;uniform float edge_blur;uniform bool highlights;uniform float near;uniform float far;varying vec2 vUV; #define PI 3.14159265 #define TWOPI 6.28318530 #define inverse_focal_length 0.1 vec2 centered_screen_pos;vec2 distorted_coords;float radius2;float radius;vec2 rand(vec2 co) {float noise1=(fract(sin(dot(co,vec2(12.9898,78.233)))*43758.5453));float noise2=(fract(sin(dot(co,vec2(12.9898,78.233)*2.0))*43758.5453));return clamp(vec2(noise1,noise2),0.0,1.0);} vec2 getDistortedCoords(vec2 coords) {if (distortion==0.0) { return coords; } vec2 direction=1.0*normalize(centered_screen_pos);vec2 dist_coords=vec2(0.5,0.5);dist_coords.x=0.5+direction.x*radius2*1.0;dist_coords.y=0.5+direction.y*radius2*1.0;float dist_amount=clamp(distortion*0.23,0.0,1.0);dist_coords=mix(coords,dist_coords,dist_amount);return dist_coords;} float sampleScreen(inout vec4 color,in vec2 offset,in float weight) {vec2 coords=distorted_coords;float angle=rand(coords*100.0).x*TWOPI;coords+=vec2(offset.x*cos(angle)-offset.y*sin(angle),offset.x*sin(angle)+offset.y*cos(angle));color+=texture2D(textureSampler,coords)*weight;return weight;} float getBlurLevel(float size) {return min(3.0,ceil(size/1.0));} vec4 getBlurColor(float size) {vec4 col=texture2D(textureSampler,distorted_coords);float blur_level=getBlurLevel(size);float w=(size/screen_width);float h=(size/screen_height);float total_weight=1.0;vec2 sample_coords;total_weight+=sampleScreen(col,vec2(-0.50*w,0.24*h),0.93);total_weight+=sampleScreen(col,vec2(0.30*w,-0.75*h),0.90);total_weight+=sampleScreen(col,vec2(0.36*w,0.96*h),0.87);total_weight+=sampleScreen(col,vec2(-1.08*w,-0.55*h),0.85);total_weight+=sampleScreen(col,vec2(1.33*w,-0.37*h),0.83);total_weight+=sampleScreen(col,vec2(-0.82*w,1.31*h),0.80);total_weight+=sampleScreen(col,vec2(-0.31*w,-1.67*h),0.78);total_weight+=sampleScreen(col,vec2(1.47*w,1.11*h),0.76);total_weight+=sampleScreen(col,vec2(-1.97*w,0.19*h),0.74);total_weight+=sampleScreen(col,vec2(1.42*w,-1.57*h),0.72);if (blur_level>1.0) {total_weight+=sampleScreen(col,vec2(0.01*w,2.25*h),0.70);total_weight+=sampleScreen(col,vec2(-1.62*w,-1.74*h),0.67);total_weight+=sampleScreen(col,vec2(2.49*w,0.20*h),0.65);total_weight+=sampleScreen(col,vec2(-2.07*w,1.61*h),0.63);total_weight+=sampleScreen(col,vec2(0.46*w,-2.70*h),0.61);total_weight+=sampleScreen(col,vec2(1.55*w,2.40*h),0.59);total_weight+=sampleScreen(col,vec2(-2.88*w,-0.75*h),0.56);total_weight+=sampleScreen(col,vec2(2.73*w,-1.44*h),0.54);total_weight+=sampleScreen(col,vec2(-1.08*w,3.02*h),0.52);total_weight+=sampleScreen(col,vec2(-1.28*w,-3.05*h),0.49);} if (blur_level>2.0) {total_weight+=sampleScreen(col,vec2(3.11*w,1.43*h),0.46);total_weight+=sampleScreen(col,vec2(-3.36*w,1.08*h),0.44);total_weight+=sampleScreen(col,vec2(1.80*w,-3.16*h),0.41);total_weight+=sampleScreen(col,vec2(0.83*w,3.65*h),0.38);total_weight+=sampleScreen(col,vec2(-3.16*w,-2.19*h),0.34);total_weight+=sampleScreen(col,vec2(3.92*w,-0.53*h),0.31);total_weight+=sampleScreen(col,vec2(-2.59*w,3.12*h),0.26);total_weight+=sampleScreen(col,vec2(-0.20*w,-4.15*h),0.22);total_weight+=sampleScreen(col,vec2(3.02*w,3.00*h),0.15);} col/=total_weight; if (darken>0.0) {col.rgb*=clamp(0.3,1.0,1.05-size*0.5*darken);} return col;} #define CUSTOM_FRAGMENT_DEFINITIONS void main(void) {centered_screen_pos=vec2(vUV.x-0.5,vUV.y-0.5);radius2=centered_screen_pos.x*centered_screen_pos.x+centered_screen_pos.y*centered_screen_pos.y;radius=sqrt(radius2);distorted_coords=getDistortedCoords(vUV); vec2 texels_coords=vec2(vUV.x*screen_width,vUV.y*screen_height); float depth=texture2D(depthSampler,distorted_coords).r; float distance=near+(far-near)*depth; vec4 color=texture2D(textureSampler,vUV); float coc=abs(aperture*(screen_distance*(inverse_focal_length-1.0/distance)-1.0));if (dof_enabled==false || coc<0.07) { coc=0.0; } float edge_blur_amount=0.0;if (edge_blur>0.0) {edge_blur_amount=clamp((radius*2.0-1.0+0.15*edge_blur)*1.5,0.0,1.0)*1.3;} float blur_amount=max(edge_blur_amount,coc);if (blur_amount==0.0) {gl_FragColor=texture2D(textureSampler,distorted_coords);} else {gl_FragColor=getBlurColor(blur_amount*1.7);if (highlights) {gl_FragColor.rgb+=clamp(coc,0.0,1.0)*texture2D(highlightsSampler,distorted_coords).rgb;} if (blur_noise) {vec2 noise=rand(distorted_coords)*0.01*blur_amount;vec2 blurred_coord=vec2(distorted_coords.x+noise.x,distorted_coords.y+noise.y);gl_FragColor=0.04*texture2D(textureSampler,blurred_coord)+0.96*gl_FragColor;}} if (grain_amount>0.0) {vec4 grain_color=texture2D(grainSampler,texels_coords*0.003);gl_FragColor.rgb+=(-0.5+grain_color.rgb)*0.30*grain_amount;}} `; je.ShadersStore[y3e] = C3e; class x3e extends I5 { /** * @constructor * * Effect parameters are as follow: * { * chromatic_aberration: number; // from 0 to x (1 for realism) * edge_blur: number; // from 0 to x (1 for realism) * distortion: number; // from 0 to x (1 for realism), note that this will effect the pointer position precision * grain_amount: number; // from 0 to 1 * grain_texture: BABYLON.Texture; // texture to use for grain effect; if unset, use random B&W noise * dof_focus_distance: number; // depth-of-field: focus distance; unset to disable (disabled by default) * dof_aperture: number; // depth-of-field: focus blur bias (default: 1) * dof_darken: number; // depth-of-field: darken that which is out of focus (from 0 to 1, disabled by default) * dof_pentagon: boolean; // depth-of-field: makes a pentagon-like "bokeh" effect * dof_gain: number; // depth-of-field: highlights gain; unset to disable (disabled by default) * dof_threshold: number; // depth-of-field: highlights threshold (default: 1) * blur_noise: boolean; // add a little bit of noise to the blur (default: true) * } * Note: if an effect parameter is unset, effect is disabled * * @param name The rendering pipeline name * @param parameters - An object containing all parameters (see above) * @param scene The scene linked to this pipeline * @param ratio The size of the postprocesses (0.5 means that your postprocess will have a width = canvas.width 0.5 and a height = canvas.height 0.5) * @param cameras The array of cameras that the rendering pipeline will be attached to */ constructor(e, t, i, r = 1, s) { super(i.getEngine(), e), this.LensChromaticAberrationEffect = "LensChromaticAberrationEffect", this.HighlightsEnhancingEffect = "HighlightsEnhancingEffect", this.LensDepthOfFieldEffect = "LensDepthOfFieldEffect", this._pentagonBokehIsEnabled = !1, this._scene = i, this._depthTexture = i.enableDepthRenderer().getDepthMap(), t.grain_texture ? this._grainTexture = t.grain_texture : this._createGrainTexture(), this._edgeBlur = t.edge_blur ? t.edge_blur : 0, this._grainAmount = t.grain_amount ? t.grain_amount : 0, this._chromaticAberration = t.chromatic_aberration ? t.chromatic_aberration : 0, this._distortion = t.distortion ? t.distortion : 0, this._highlightsGain = t.dof_gain !== void 0 ? t.dof_gain : -1, this._highlightsThreshold = t.dof_threshold ? t.dof_threshold : 1, this._dofDistance = t.dof_focus_distance !== void 0 ? t.dof_focus_distance : -1, this._dofAperture = t.dof_aperture ? t.dof_aperture : 1, this._dofDarken = t.dof_darken ? t.dof_darken : 0, this._dofPentagon = t.dof_pentagon !== void 0 ? t.dof_pentagon : !0, this._blurNoise = t.blur_noise !== void 0 ? t.blur_noise : !0, this._createChromaticAberrationPostProcess(r), this._createHighlightsPostProcess(r), this._createDepthOfFieldPostProcess(r / 4), this.addEffect(new gn(i.getEngine(), this.LensChromaticAberrationEffect, () => this._chromaticAberrationPostProcess, !0)), this.addEffect(new gn(i.getEngine(), this.HighlightsEnhancingEffect, () => this._highlightsPostProcess, !0)), this.addEffect(new gn(i.getEngine(), this.LensDepthOfFieldEffect, () => this._depthOfFieldPostProcess, !0)), this._highlightsGain === -1 && this._disableEffect(this.HighlightsEnhancingEffect, null), i.postProcessRenderPipelineManager.addPipeline(this), s && i.postProcessRenderPipelineManager.attachCamerasToRenderPipeline(e, s); } /** * Get the class name * @returns "LensRenderingPipeline" */ getClassName() { return "LensRenderingPipeline"; } // Properties /** * Gets associated scene */ get scene() { return this._scene; } /** * Gets or sets the edge blur */ get edgeBlur() { return this._edgeBlur; } set edgeBlur(e) { this.setEdgeBlur(e); } /** * Gets or sets the grain amount */ get grainAmount() { return this._grainAmount; } set grainAmount(e) { this.setGrainAmount(e); } /** * Gets or sets the chromatic aberration amount */ get chromaticAberration() { return this._chromaticAberration; } set chromaticAberration(e) { this.setChromaticAberration(e); } /** * Gets or sets the depth of field aperture */ get dofAperture() { return this._dofAperture; } set dofAperture(e) { this.setAperture(e); } /** * Gets or sets the edge distortion */ get edgeDistortion() { return this._distortion; } set edgeDistortion(e) { this.setEdgeDistortion(e); } /** * Gets or sets the depth of field distortion */ get dofDistortion() { return this._dofDistance; } set dofDistortion(e) { this.setFocusDistance(e); } /** * Gets or sets the darken out of focus amount */ get darkenOutOfFocus() { return this._dofDarken; } set darkenOutOfFocus(e) { this.setDarkenOutOfFocus(e); } /** * Gets or sets a boolean indicating if blur noise is enabled */ get blurNoise() { return this._blurNoise; } set blurNoise(e) { this._blurNoise = e; } /** * Gets or sets a boolean indicating if pentagon bokeh is enabled */ get pentagonBokeh() { return this._pentagonBokehIsEnabled; } set pentagonBokeh(e) { e ? this.enablePentagonBokeh() : this.disablePentagonBokeh(); } /** * Gets or sets the highlight grain amount */ get highlightsGain() { return this._highlightsGain; } set highlightsGain(e) { this.setHighlightsGain(e); } /** * Gets or sets the highlight threshold */ get highlightsThreshold() { return this._highlightsThreshold; } set highlightsThreshold(e) { this.setHighlightsThreshold(e); } // public methods (self explanatory) /** * Sets the amount of blur at the edges * @param amount blur amount */ setEdgeBlur(e) { this._edgeBlur = e; } /** * Sets edge blur to 0 */ disableEdgeBlur() { this._edgeBlur = 0; } /** * Sets the amount of grain * @param amount Amount of grain */ setGrainAmount(e) { this._grainAmount = e; } /** * Set grain amount to 0 */ disableGrain() { this._grainAmount = 0; } /** * Sets the chromatic aberration amount * @param amount amount of chromatic aberration */ setChromaticAberration(e) { this._chromaticAberration = e; } /** * Sets chromatic aberration amount to 0 */ disableChromaticAberration() { this._chromaticAberration = 0; } /** * Sets the EdgeDistortion amount * @param amount amount of EdgeDistortion */ setEdgeDistortion(e) { this._distortion = e; } /** * Sets edge distortion to 0 */ disableEdgeDistortion() { this._distortion = 0; } /** * Sets the FocusDistance amount * @param amount amount of FocusDistance */ setFocusDistance(e) { this._dofDistance = e; } /** * Disables depth of field */ disableDepthOfField() { this._dofDistance = -1; } /** * Sets the Aperture amount * @param amount amount of Aperture */ setAperture(e) { this._dofAperture = e; } /** * Sets the DarkenOutOfFocus amount * @param amount amount of DarkenOutOfFocus */ setDarkenOutOfFocus(e) { this._dofDarken = e; } /** * Creates a pentagon bokeh effect */ enablePentagonBokeh() { this._highlightsPostProcess.updateEffect(`#define PENTAGON `), this._pentagonBokehIsEnabled = !0; } /** * Disables the pentagon bokeh effect */ disablePentagonBokeh() { this._pentagonBokehIsEnabled = !1, this._highlightsPostProcess.updateEffect(); } /** * Enables noise blur */ enableNoiseBlur() { this._blurNoise = !0; } /** * Disables noise blur */ disableNoiseBlur() { this._blurNoise = !1; } /** * Sets the HighlightsGain amount * @param amount amount of HighlightsGain */ setHighlightsGain(e) { this._highlightsGain = e; } /** * Sets the HighlightsThreshold amount * @param amount amount of HighlightsThreshold */ setHighlightsThreshold(e) { this._highlightsGain === -1 && (this._highlightsGain = 1), this._highlightsThreshold = e; } /** * Disables highlights */ disableHighlights() { this._highlightsGain = -1; } /** * Removes the internal pipeline assets and detaches the pipeline from the scene cameras * @param disableDepthRender If the scene's depth rendering should be disabled (default: false) */ dispose(e = !1) { this._scene.postProcessRenderPipelineManager.detachCamerasFromRenderPipeline(this._name, this._scene.cameras), this._chromaticAberrationPostProcess = null, this._highlightsPostProcess = null, this._depthOfFieldPostProcess = null, this._grainTexture.dispose(), e && this._scene.disableDepthRenderer(); } // colors shifting and distortion _createChromaticAberrationPostProcess(e) { this._chromaticAberrationPostProcess = new Bi( "LensChromaticAberration", "chromaticAberration", ["chromatic_aberration", "screen_width", "screen_height", "direction", "radialIntensity", "centerPosition"], // uniforms [], // samplers e, null, De.TRILINEAR_SAMPLINGMODE, this._scene.getEngine(), !1 ), this._chromaticAberrationPostProcess.onApply = (t) => { t.setFloat("chromatic_aberration", this._chromaticAberration), t.setFloat("screen_width", this._scene.getEngine().getRenderWidth()), t.setFloat("screen_height", this._scene.getEngine().getRenderHeight()), t.setFloat("radialIntensity", 1), t.setFloat2("direction", 17, 17), t.setFloat2("centerPosition", 0.5, 0.5); }; } // highlights enhancing _createHighlightsPostProcess(e) { this._highlightsPostProcess = new Bi( "LensHighlights", "lensHighlights", ["gain", "threshold", "screen_width", "screen_height"], // uniforms [], // samplers e, null, De.TRILINEAR_SAMPLINGMODE, this._scene.getEngine(), !1, this._dofPentagon ? `#define PENTAGON ` : "" ), this._highlightsPostProcess.externalTextureSamplerBinding = !0, this._highlightsPostProcess.onApply = (t) => { t.setFloat("gain", this._highlightsGain), t.setFloat("threshold", this._highlightsThreshold), t.setTextureFromPostProcess("textureSampler", this._chromaticAberrationPostProcess), t.setFloat("screen_width", this._scene.getEngine().getRenderWidth()), t.setFloat("screen_height", this._scene.getEngine().getRenderHeight()); }; } // colors shifting and distortion _createDepthOfFieldPostProcess(e) { this._depthOfFieldPostProcess = new Bi("LensDepthOfField", "depthOfField", [ "grain_amount", "blur_noise", "screen_width", "screen_height", "distortion", "dof_enabled", "screen_distance", "aperture", "darken", "edge_blur", "highlights", "near", "far" ], ["depthSampler", "grainSampler", "highlightsSampler"], e, null, De.TRILINEAR_SAMPLINGMODE, this._scene.getEngine(), !1), this._depthOfFieldPostProcess.externalTextureSamplerBinding = !0, this._depthOfFieldPostProcess.onApply = (t) => { t.setTexture("depthSampler", this._depthTexture), t.setTexture("grainSampler", this._grainTexture), t.setTextureFromPostProcess("textureSampler", this._highlightsPostProcess), t.setTextureFromPostProcess("highlightsSampler", this._depthOfFieldPostProcess), t.setFloat("grain_amount", this._grainAmount), t.setBool("blur_noise", this._blurNoise), t.setFloat("screen_width", this._scene.getEngine().getRenderWidth()), t.setFloat("screen_height", this._scene.getEngine().getRenderHeight()), t.setFloat("distortion", this._distortion), t.setBool("dof_enabled", this._dofDistance !== -1), t.setFloat("screen_distance", 1 / (0.1 - 1 / this._dofDistance)), t.setFloat("aperture", this._dofAperture), t.setFloat("darken", this._dofDarken), t.setFloat("edge_blur", this._edgeBlur), t.setBool("highlights", this._highlightsGain !== -1), this._scene.activeCamera && (t.setFloat("near", this._scene.activeCamera.minZ), t.setFloat("far", this._scene.activeCamera.maxZ)); }; } // creates a black and white random noise texture, 512x512 _createGrainTexture() { const t = new Uint8Array(1048576); for (let r = 0; r < t.length; ) { const s = Math.floor(yt.RandomRange(0.42, 0.58) * 255); t[r++] = s, t[r++] = s, t[r++] = s, t[r++] = 255; } const i = Po.CreateRGBATexture(t, 512, 512, this._scene, !1, !1, 2); i.name = "LensNoiseTexture", i.wrapU = De.WRAP_ADDRESSMODE, i.wrapV = De.WRAP_ADDRESSMODE, this._grainTexture = i; } } class b3e { constructor() { this.enabled = !1, this.name = "ssao2", this.texturesRequired = [6, 5]; } } const E3e = "ssao2PixelShader", T3e = `precision highp float;uniform sampler2D textureSampler;varying vec2 vUV; #ifdef SSAO float scales[16]=float[16]( 0.1, 0.11406250000000001, 0.131640625, 0.15625, 0.187890625, 0.2265625, 0.272265625, 0.325, 0.384765625, 0.4515625, 0.525390625, 0.60625, 0.694140625, 0.7890625, 0.891015625, 1.0 );uniform float near;uniform float radius;uniform sampler2D depthSampler;uniform sampler2D randomSampler;uniform sampler2D normalSampler;uniform float randTextureTiles;uniform float samplesFactor;uniform vec3 sampleSphere[SAMPLES];uniform float totalStrength;uniform float base;uniform float xViewport;uniform float yViewport;uniform mat3 depthProjection;uniform float maxZ;uniform float minZAspect;uniform vec2 texelSize;uniform mat4 projection;void main() {vec3 random=textureLod(randomSampler,vUV*randTextureTiles,0.0).rgb;float depth=textureLod(depthSampler,vUV,0.0).r;float depthSign=depth/abs(depth);depth=depth*depthSign;vec3 normal=textureLod(normalSampler,vUV,0.0).rgb;float occlusion=0.0;float correctedRadius=min(radius,minZAspect*depth/near);vec3 vViewRay=vec3((vUV.x*2.0-1.0)*xViewport,(vUV.y*2.0-1.0)*yViewport,depthSign);vec3 vDepthFactor=depthProjection*vec3(1.0,1.0,depth);vec3 origin=vViewRay*vDepthFactor;vec3 rvec=random*2.0-1.0;rvec.z=0.0;float dotProduct=dot(rvec,normal);rvec=1.0-abs(dotProduct)>1e-2 ? rvec : vec3(-rvec.y,0.0,rvec.x);vec3 tangent=normalize(rvec-normal*dot(rvec,normal));vec3 bitangent=cross(normal,tangent);mat3 tbn=mat3(tangent,bitangent,normal);float difference;for (int i=0; i1.0 || offset.y>1.0) {continue;} float sampleDepth=abs(textureLod(depthSampler,offset.xy,0.0).r);difference=depthSign*samplePosition.z-sampleDepth;float rangeCheck=1.0-smoothstep(correctedRadius*0.5,correctedRadius,difference);occlusion+=step(EPSILON,difference)*rangeCheck;} occlusion=occlusion*(1.0-smoothstep(maxZ*0.75,maxZ,depth));float ao=1.0-totalStrength*occlusion*samplesFactor;float result=clamp(ao+base,0.0,1.0);gl_FragColor=vec4(vec3(result),1.0);} #endif #ifdef BLUR uniform float outSize;uniform float soften;uniform float tolerance;uniform int samples; #ifndef BLUR_BYPASS uniform sampler2D depthSampler; #ifdef BLUR_LEGACY #define inline float blur13Bilateral(sampler2D image,vec2 uv,vec2 step) {float result=0.0;vec2 off1=vec2(1.411764705882353)*step;vec2 off2=vec2(3.2941176470588234)*step;vec2 off3=vec2(5.176470588235294)*step;float compareDepth=abs(textureLod(depthSampler,uv,0.0).r);float sampleDepth;float weight;float weightSum=30.0;result+=textureLod(image,uv,0.0).r*30.0;sampleDepth=abs(textureLod(depthSampler,uv+off1,0.0).r);weight=clamp(1.0/( 0.003+abs(compareDepth-sampleDepth)),0.0,30.0);weightSum+= weight;result+=textureLod(image,uv+off1,0.0).r*weight;sampleDepth=abs(textureLod(depthSampler,uv-off1,0.0).r);weight=clamp(1.0/( 0.003+abs(compareDepth-sampleDepth)),0.0,30.0);weightSum+= weight;result+=textureLod(image,uv-off1,0.0).r*weight;sampleDepth=abs(textureLod(depthSampler,uv+off2,0.0).r);weight=clamp(1.0/( 0.003+abs(compareDepth-sampleDepth)),0.0,30.0);weightSum+=weight;result+=textureLod(image,uv+off2,0.0).r*weight;sampleDepth=abs(textureLod(depthSampler,uv-off2,0.0).r);weight=clamp(1.0/( 0.003+abs(compareDepth-sampleDepth)),0.0,30.0);weightSum+=weight;result+=textureLod(image,uv-off2,0.0).r*weight;sampleDepth=abs(textureLod(depthSampler,uv+off3,0.0).r);weight=clamp(1.0/( 0.003+abs(compareDepth-sampleDepth)),0.0,30.0);weightSum+=weight;result+=textureLod(image,uv+off3,0.0).r*weight;sampleDepth=abs(textureLod(depthSampler,uv-off3,0.0).r);weight=clamp(1.0/( 0.003+abs(compareDepth-sampleDepth)),0.0,30.0);weightSum+=weight;result+=textureLod(image,uv-off3,0.0).r*weight;return result/weightSum;} #endif #endif void main() {float result=0.0; #ifdef BLUR_BYPASS result=textureLod(textureSampler,vUV,0.0).r; #else #ifdef BLUR_H vec2 step=vec2(1.0/outSize,0.0); #else vec2 step=vec2(0.0,1.0/outSize); #endif #ifdef BLUR_LEGACY result=blur13Bilateral(textureSampler,vUV,step); #else float compareDepth=abs(textureLod(depthSampler,vUV,0.0).r);float weightSum=0.0;for (int i=-samples; i this._originalColorPostProcess, !0)), this.addEffect(new gn(t.getEngine(), this.SSAORenderEffect, () => this._ssaoPostProcess, !0)), this.addEffect(new gn(t.getEngine(), this.SSAOBlurHRenderEffect, () => this._blurHPostProcess, !0)), this.addEffect(new gn(t.getEngine(), this.SSAOBlurVRenderEffect, () => this._blurVPostProcess, !0)), this.addEffect(new gn(t.getEngine(), this.SSAOCombineRenderEffect, () => this._ssaoCombinePostProcess, !0)), t.postProcessRenderPipelineManager.addPipeline(this), r && t.postProcessRenderPipelineManager.attachCamerasToRenderPipeline(e, r); } // Public Methods /** * Get the class name * @returns "SSAO2RenderingPipeline" */ getClassName() { return "SSAO2RenderingPipeline"; } /** * Removes the internal pipeline assets and detaches the pipeline from the scene cameras * @param disableGeometryBufferRenderer */ dispose(e = !1) { for (let t = 0; t < this._scene.cameras.length; t++) { const i = this._scene.cameras[t]; this._originalColorPostProcess.dispose(i), this._ssaoPostProcess.dispose(i), this._blurHPostProcess.dispose(i), this._blurVPostProcess.dispose(i), this._ssaoCombinePostProcess.dispose(i); } this._randomTexture.dispose(), e && this._scene.disableGeometryBufferRenderer(), this._scene.postProcessRenderPipelineManager.detachCamerasFromRenderPipeline(this._name, this._scene.cameras), super.dispose(); } // Private Methods /** @internal */ _rebuild() { super._rebuild(); } _getSamplersForBlur(e) { return e ? ["textureSampler"] : ["textureSampler", "depthSampler"]; } _getDefinesForBlur(e, t) { let i = `#define BLUR `; return t && (i += `#define BLUR_BYPASS `), e || (i += `#define BLUR_LEGACY `), { h: i + `#define BLUR_H `, v: i }; } _createBlurPostProcess(e, t, i) { const r = this._getDefinesForBlur(this.expensiveBlur, this.bypassBlur), s = this._getSamplersForBlur(this.bypassBlur); this._blurHPostProcess = this._createBlurFilter("BlurH", s, e, r.h, i, !0), this._blurVPostProcess = this._createBlurFilter("BlurV", s, t, r.v, i, !1); } _createBlurFilter(e, t, i, r, s, n) { const a = new Bi(e, "ssao2", ["outSize", "samples", "soften", "tolerance"], t, i, null, De.BILINEAR_SAMPLINGMODE, this._scene.getEngine(), !1, r, s); return a.onApply = (l) => { if (!this._scene.activeCamera) return; const o = n ? this._ssaoCombinePostProcess.width : this._ssaoCombinePostProcess.height, u = n ? this._originalColorPostProcess.width : this._originalColorPostProcess.height; l.setFloat("outSize", o > 0 ? o : u), l.setInt("samples", this.bilateralSamples), l.setFloat("soften", this.bilateralSoften), l.setFloat("tolerance", this.bilateralTolerance), this._geometryBufferRenderer ? l.setTexture("depthSampler", this._geometryBufferRenderer.getGBuffer().textures[0]) : this._prePassRenderer && l.setTexture("depthSampler", this._prePassRenderer.getRenderTarget().textures[this._prePassRenderer.getIndex(5)]); }, a.samples = this.textureSamples, a; } //Van der Corput radical inverse _radicalInverse_VdC(e) { return this._bits[0] = e, this._bits[0] = (this._bits[0] << 16 | this._bits[0] >> 16) >>> 0, this._bits[0] = (this._bits[0] & 1431655765) << 1 | (this._bits[0] & 2863311530) >>> 1 >>> 0, this._bits[0] = (this._bits[0] & 858993459) << 2 | (this._bits[0] & 3435973836) >>> 2 >>> 0, this._bits[0] = (this._bits[0] & 252645135) << 4 | (this._bits[0] & 4042322160) >>> 4 >>> 0, this._bits[0] = (this._bits[0] & 16711935) << 8 | (this._bits[0] & 4278255360) >>> 8 >>> 0, this._bits[0] * 23283064365386963e-26; } _hammersley(e, t) { return [e / t, this._radicalInverse_VdC(e)]; } _hemisphereSample_uniform(e, t) { const i = t * 2 * Math.PI, r = 1 - e * 0.85, s = Math.sqrt(1 - r * r); return new D(Math.cos(i) * s, Math.sin(i) * s, r); } _generateHemisphere() { const e = this.samples, t = []; let i, r = 0; for (; r < e; ) { if (e < 16) i = this._hemisphereSample_uniform(Math.random(), Math.random()); else { const s = this._hammersley(r, e); i = this._hemisphereSample_uniform(s[0], s[1]); } t.push(i.x, i.y, i.z), r++; } return t; } _getDefinesForSSAO() { return `#define SSAO #define SAMPLES ${this.samples} #define EPSILON ${this.epsilon.toFixed(4)}`; } _createSSAOPostProcess(e, t) { this._sampleSphere = this._generateHemisphere(); const i = this._getDefinesForSSAO(), r = ["randomSampler", "depthSampler", "normalSampler"]; this._ssaoPostProcess = new Bi("ssao2", "ssao2", [ "sampleSphere", "samplesFactor", "randTextureTiles", "totalStrength", "radius", "base", "range", "projection", "near", "texelSize", "xViewport", "yViewport", "maxZ", "minZAspect", "depthProjection" ], r, e, null, De.BILINEAR_SAMPLINGMODE, this._scene.getEngine(), !1, i, t), this._ssaoPostProcess.onApply = (s) => { var n, a, l, o; if (this._scene.activeCamera) { if (s.setArray3("sampleSphere", this._sampleSphere), s.setFloat("randTextureTiles", 32), s.setFloat("samplesFactor", 1 / this.samples), s.setFloat("totalStrength", this.totalStrength), s.setFloat2("texelSize", 1 / this._ssaoPostProcess.width, 1 / this._ssaoPostProcess.height), s.setFloat("radius", this.radius), s.setFloat("maxZ", this.maxZ), s.setFloat("minZAspect", this.minZAspect), s.setFloat("base", this.base), s.setFloat("near", this._scene.activeCamera.minZ), this._scene.activeCamera.mode === Ai.PERSPECTIVE_CAMERA) s.setMatrix3x3("depthProjection", mu.PERSPECTIVE_DEPTH_PROJECTION), s.setFloat("xViewport", Math.tan(this._scene.activeCamera.fov / 2) * this._scene.getEngine().getAspectRatio(this._scene.activeCamera, !0)), s.setFloat("yViewport", Math.tan(this._scene.activeCamera.fov / 2)); else { const u = this._scene.getEngine().getRenderWidth() / 2, h = this._scene.getEngine().getRenderHeight() / 2, d = (n = this._scene.activeCamera.orthoLeft) !== null && n !== void 0 ? n : -u, f = (a = this._scene.activeCamera.orthoRight) !== null && a !== void 0 ? a : u, p = (l = this._scene.activeCamera.orthoBottom) !== null && l !== void 0 ? l : -h, m = (o = this._scene.activeCamera.orthoTop) !== null && o !== void 0 ? o : h; s.setMatrix3x3("depthProjection", mu.ORTHO_DEPTH_PROJECTION), s.setFloat("xViewport", (f - d) * 0.5), s.setFloat("yViewport", (m - p) * 0.5); } s.setMatrix("projection", this._scene.getProjectionMatrix()), this._geometryBufferRenderer ? (s.setTexture("depthSampler", this._geometryBufferRenderer.getGBuffer().textures[0]), s.setTexture("normalSampler", this._geometryBufferRenderer.getGBuffer().textures[1])) : this._prePassRenderer && (s.setTexture("depthSampler", this._prePassRenderer.getRenderTarget().textures[this._prePassRenderer.getIndex(5)]), s.setTexture("normalSampler", this._prePassRenderer.getRenderTarget().textures[this._prePassRenderer.getIndex(6)])), s.setTexture("randomSampler", this._randomTexture); } }, this._ssaoPostProcess.samples = this.textureSamples, this._forceGeometryBuffer || (this._ssaoPostProcess._prePassEffectConfiguration = new b3e()); } _createSSAOCombinePostProcess(e, t) { this._ssaoCombinePostProcess = new Bi("ssaoCombine", "ssaoCombine", [], ["originalColor", "viewport"], e, null, De.BILINEAR_SAMPLINGMODE, this._scene.getEngine(), !1, void 0, t), this._ssaoCombinePostProcess.onApply = (i) => { const r = this._scene.activeCamera.viewport; i.setVector4("viewport", de.Vector4[0].copyFromFloats(r.x, r.y, r.width, r.height)), i.setTextureFromPostProcessOutput("originalColor", this._originalColorPostProcess); }, this._ssaoCombinePostProcess.samples = this.textureSamples; } _createRandomTexture() { const t = new Uint8Array(65536), i = at.Zero(); for (let s = 0; s < t.length; ) i.set(yt.RandomRange(0, 1), yt.RandomRange(0, 1)).normalize().scaleInPlace(255), t[s++] = Math.floor(i.x), t[s++] = Math.floor(i.y), t[s++] = 0, t[s++] = 255; const r = Po.CreateRGBATexture(t, 128, 128, this._scene, !1, !1, 2); r.name = "SSAORandomTexture", r.wrapU = De.WRAP_ADDRESSMODE, r.wrapV = De.WRAP_ADDRESSMODE, this._randomTexture = r; } /** * Serialize the rendering pipeline (Used when exporting) * @returns the serialized object */ serialize() { const e = St.Serialize(this); return e.customType = "SSAO2RenderingPipeline", e; } /** * Parse the serialized pipeline * @param source Source pipeline. * @param scene The scene to load the pipeline to. * @param rootUrl The URL of the serialized pipeline. * @returns An instantiated pipeline from the serialized object. */ static Parse(e, t, i) { return St.Parse(() => new mu(e._name, t, e._ratio, void 0, e._forceGeometryBuffer, e._textureType), e, t, i); } } mu.ORTHO_DEPTH_PROJECTION = [1, 0, 0, 0, 1, 0, 0, 0, 1]; mu.PERSPECTIVE_DEPTH_PROJECTION = [0, 0, 0, 0, 0, 0, 1, 1, 1]; F([ W() ], mu.prototype, "totalStrength", void 0); F([ W() ], mu.prototype, "maxZ", void 0); F([ W() ], mu.prototype, "minZAspect", void 0); F([ W("epsilon") ], mu.prototype, "_epsilon", void 0); F([ W("samples") ], mu.prototype, "_samples", void 0); F([ W("textureSamples") ], mu.prototype, "_textureSamples", void 0); F([ W() ], mu.prototype, "_forceGeometryBuffer", void 0); F([ W() ], mu.prototype, "_ratio", void 0); F([ W() ], mu.prototype, "_textureType", void 0); F([ W() ], mu.prototype, "radius", void 0); F([ W() ], mu.prototype, "base", void 0); F([ W("bypassBlur") ], mu.prototype, "_bypassBlur", void 0); F([ W("expensiveBlur") ], mu.prototype, "_expensiveBlur", void 0); F([ W() ], mu.prototype, "bilateralSamples", void 0); F([ W() ], mu.prototype, "bilateralSoften", void 0); F([ W() ], mu.prototype, "bilateralTolerance", void 0); Be("BABYLON.SSAO2RenderingPipeline", mu); const R3e = "ssaoPixelShader", P3e = `uniform sampler2D textureSampler;varying vec2 vUV; #ifdef SSAO uniform sampler2D randomSampler;uniform float randTextureTiles;uniform float samplesFactor;uniform vec3 sampleSphere[SAMPLES];uniform float totalStrength;uniform float radius;uniform float area;uniform float fallOff;uniform float base;vec3 normalFromDepth(float depth,vec2 coords) {vec2 offset1=vec2(0.0,radius);vec2 offset2=vec2(radius,0.0);float depth1=texture2D(textureSampler,coords+offset1).r;float depth2=texture2D(textureSampler,coords+offset2).r;vec3 p1=vec3(offset1,depth1-depth);vec3 p2=vec3(offset2,depth2-depth);vec3 normal=cross(p1,p2);normal.z=-normal.z;return normalize(normal);} void main() {vec3 random=normalize(texture2D(randomSampler,vUV*randTextureTiles).rgb);float depth=texture2D(textureSampler,vUV).r;vec3 position=vec3(vUV,depth);vec3 normal=normalFromDepth(depth,vUV);float radiusDepth=radius/depth;float occlusion=0.0;vec3 ray;vec3 hemiRay;float occlusionDepth;float difference;for (int i=0; i this._originalColorPostProcess, !0)), this.addEffect(new gn(t.getEngine(), this.SSAORenderEffect, () => this._ssaoPostProcess, !0)), this.addEffect(new gn(t.getEngine(), this.SSAOBlurHRenderEffect, () => this._blurHPostProcess, !0)), this.addEffect(new gn(t.getEngine(), this.SSAOBlurVRenderEffect, () => this._blurVPostProcess, !0)), this.addEffect(new gn(t.getEngine(), this.SSAOCombineRenderEffect, () => this._ssaoCombinePostProcess, !0)), t.postProcessRenderPipelineManager.addPipeline(this), r && t.postProcessRenderPipelineManager.attachCamerasToRenderPipeline(e, r); } /** * @internal */ _attachCameras(e, t) { super._attachCameras(e, t); for (const i of this._cameras) this._scene.enableDepthRenderer(i).getDepthMap(); } // Public Methods /** * Get the class name * @returns "SSAORenderingPipeline" */ getClassName() { return "SSAORenderingPipeline"; } /** * Removes the internal pipeline assets and detaches the pipeline from the scene cameras * @param disableDepthRender */ dispose(e = !1) { for (let t = 0; t < this._scene.cameras.length; t++) { const i = this._scene.cameras[t]; this._originalColorPostProcess.dispose(i), this._ssaoPostProcess.dispose(i), this._blurHPostProcess.dispose(i), this._blurVPostProcess.dispose(i), this._ssaoCombinePostProcess.dispose(i); } this._randomTexture.dispose(), e && this._scene.disableDepthRenderer(), this._scene.postProcessRenderPipelineManager.detachCamerasFromRenderPipeline(this._name, this._scene.cameras), super.dispose(); } // Private Methods _createBlurPostProcess(e) { this._blurHPostProcess = new fu("BlurH", new at(1, 0), 16, e, null, De.BILINEAR_SAMPLINGMODE, this._scene.getEngine(), !1, 0), this._blurVPostProcess = new fu("BlurV", new at(0, 1), 16, e, null, De.BILINEAR_SAMPLINGMODE, this._scene.getEngine(), !1, 0), this._blurHPostProcess.onActivateObservable.add(() => { const i = this._blurHPostProcess.width / this._scene.getEngine().getRenderWidth(); this._blurHPostProcess.kernel = 16 * i; }), this._blurVPostProcess.onActivateObservable.add(() => { const i = this._blurVPostProcess.height / this._scene.getEngine().getRenderHeight(); this._blurVPostProcess.kernel = 16 * i; }); } /** @internal */ _rebuild() { this._firstUpdate = !0, super._rebuild(); } _createSSAOPostProcess(e) { const i = [ 0.5381, 0.1856, -0.4319, 0.1379, 0.2486, 0.443, 0.3371, 0.5679, -57e-4, -0.6999, -0.0451, -19e-4, 0.0689, -0.1598, -0.8547, 0.056, 69e-4, -0.1843, -0.0146, 0.1402, 0.0762, 0.01, -0.1924, -0.0344, -0.3577, -0.5301, -0.4358, -0.3169, 0.1063, 0.0158, 0.0103, -0.5869, 46e-4, -0.0897, -0.494, 0.3287, 0.7119, -0.0154, -0.0918, -0.0533, 0.0596, -0.5411, 0.0352, -0.0631, 0.546, -0.4776, 0.2847, -0.0271 ], r = 1 / 16; this._ssaoPostProcess = new Bi("ssao", "ssao", ["sampleSphere", "samplesFactor", "randTextureTiles", "totalStrength", "radius", "area", "fallOff", "base", "range", "viewport"], ["randomSampler"], e, null, De.BILINEAR_SAMPLINGMODE, this._scene.getEngine(), !1, `#define SAMPLES 16 #define SSAO`), this._ssaoPostProcess.externalTextureSamplerBinding = !0, this._ssaoPostProcess.onApply = (s) => { this._firstUpdate && (s.setArray3("sampleSphere", i), s.setFloat("samplesFactor", r), s.setFloat("randTextureTiles", 4)), s.setFloat("totalStrength", this.totalStrength), s.setFloat("radius", this.radius), s.setFloat("area", this.area), s.setFloat("fallOff", this.fallOff), s.setFloat("base", this.base), s.setTexture("textureSampler", this._scene.enableDepthRenderer(this._scene.activeCamera).getDepthMap()), s.setTexture("randomSampler", this._randomTexture); }; } _createSSAOCombinePostProcess(e) { this._ssaoCombinePostProcess = new Bi("ssaoCombine", "ssaoCombine", [], ["originalColor", "viewport"], e, null, De.BILINEAR_SAMPLINGMODE, this._scene.getEngine(), !1), this._ssaoCombinePostProcess.onApply = (t) => { t.setVector4("viewport", de.Vector4[0].copyFromFloats(0, 0, 1, 1)), t.setTextureFromPostProcess("originalColor", this._originalColorPostProcess); }; } _createRandomTexture() { const t = new Uint8Array(1048576); for (let r = 0; r < t.length; ) t[r++] = Math.floor(Math.max(0, yt.RandomRange(-1, 1)) * 255), t[r++] = Math.floor(Math.max(0, yt.RandomRange(-1, 1)) * 255), t[r++] = Math.floor(Math.max(0, yt.RandomRange(-1, 1)) * 255), t[r++] = 255; const i = Po.CreateRGBATexture(t, 512, 512, this._scene, !1, !1, 2); i.name = "SSAORandomTexture", i.wrapU = De.WRAP_ADDRESSMODE, i.wrapV = De.WRAP_ADDRESSMODE, this._randomTexture = i; } } F([ W() ], Tw.prototype, "totalStrength", void 0); F([ W() ], Tw.prototype, "radius", void 0); F([ W() ], Tw.prototype, "area", void 0); F([ W() ], Tw.prototype, "fallOff", void 0); F([ W() ], Tw.prototype, "base", void 0); class I3e { constructor() { this.enabled = !1, this.name = "screenSpaceReflections", this.texturesRequired = [6, 3, 1]; } } const D3e = "screenSpaceReflectionPixelShader", O3e = `uniform sampler2D textureSampler; #ifdef SSR_SUPPORTED uniform sampler2D reflectivitySampler;uniform sampler2D normalSampler;uniform sampler2D positionSampler; #endif uniform mat4 view;uniform mat4 projection;uniform float stepSize;uniform float strength;uniform float threshold;uniform float roughnessFactor;uniform float reflectionSpecularFalloffExponent;varying vec2 vUV; #ifdef SSR_SUPPORTED struct ReflectionInfo {vec3 color;vec4 coords;};/** * According to specular,see https: */ vec3 fresnelSchlick(float cosTheta,vec3 F0) {return F0+(1.0-F0)*pow(1.0-cosTheta,5.0);} /** * Once the pixel's coordinates has been found,let's adjust (smooth) a little bit * by sampling multiple reflection pixels. */ ReflectionInfo smoothReflectionInfo(vec3 dir,vec3 hitCoord) {ReflectionInfo info;info.color=vec3(0.0);vec4 projectedCoord;float sampledDepth;for(int i=0; i0.0) hitCoord-=dir;else hitCoord+=dir;info.color+=texture2D(textureSampler,projectedCoord.xy).rgb;} projectedCoord=projection*vec4(hitCoord,1.0);projectedCoord.xy/=projectedCoord.w;projectedCoord.xy=0.5*projectedCoord.xy+vec2(0.5);info.coords=vec4(projectedCoord.xy,sampledDepth,1.0);info.color+=texture2D(textureSampler,projectedCoord.xy).rgb;info.color/=float(SMOOTH_STEPS+1);return info;} /** * Tests the given world position (hitCoord) according to the given reflection vector (dir) * until it finds a collision (means that depth is enough close to say "it's the pixel to sample!"). */ ReflectionInfo getReflectionInfo(vec3 dir,vec3 hitCoord) {ReflectionInfo info;vec4 projectedCoord;float sampledDepth;dir*=stepSize;for(int i=0; i { const d = this._geometryBufferRenderer, f = this._prePassRenderer; if (!f && !d) return; if (d) { const v = d.getTextureIndex(_o.POSITION_TEXTURE_TYPE), C = d.getTextureIndex(_o.REFLECTIVITY_TEXTURE_TYPE); h.setTexture("normalSampler", d.getGBuffer().textures[1]), h.setTexture("positionSampler", d.getGBuffer().textures[v]), h.setTexture("reflectivitySampler", d.getGBuffer().textures[C]); } else if (f) { const v = f.getIndex(1), C = f.getIndex(3), x = f.getIndex(6); h.setTexture("normalSampler", f.getRenderTarget().textures[x]), h.setTexture("positionSampler", f.getRenderTarget().textures[v]), h.setTexture("reflectivitySampler", f.getRenderTarget().textures[C]); } const p = t.activeCamera; if (!p) return; const m = p.getViewMatrix(!0), _ = p.getProjectionMatrix(!0); h.setMatrix("projection", _), h.setMatrix("view", m), h.setFloat("threshold", this.threshold), h.setFloat("reflectionSpecularFalloffExponent", this.reflectionSpecularFalloffExponent), h.setFloat("strength", this.strength), h.setFloat("stepSize", this.step), h.setFloat("roughnessFactor", this.roughnessFactor); }, this._isSceneRightHanded = t.useRightHandedSystem; } /** * Gets whether or not smoothing reflections is enabled. * Enabling smoothing will require more GPU power and can generate a drop in FPS. */ get enableSmoothReflections() { return this._enableSmoothReflections; } /** * Sets whether or not smoothing reflections is enabled. * Enabling smoothing will require more GPU power and can generate a drop in FPS. */ set enableSmoothReflections(e) { e !== this._enableSmoothReflections && (this._enableSmoothReflections = e, this._updateEffectDefines()); } /** * Gets the number of samples taken while computing reflections. More samples count is high, * more the post-process wil require GPU power and can generate a drop in FPS. Basically in interval [25, 100]. */ get reflectionSamples() { return this._reflectionSamples; } /** * Sets the number of samples taken while computing reflections. More samples count is high, * more the post-process wil require GPU power and can generate a drop in FPS. Basically in interval [25, 100]. */ set reflectionSamples(e) { e !== this._reflectionSamples && (this._reflectionSamples = e, this._updateEffectDefines()); } /** * Gets the number of samples taken while smoothing reflections. More samples count is high, * more the post-process will require GPU power and can generate a drop in FPS. * Default value (5.0) work pretty well in all cases but can be adjusted. */ get smoothSteps() { return this._smoothSteps; } /* * Sets the number of samples taken while smoothing reflections. More samples count is high, * more the post-process will require GPU power and can generate a drop in FPS. * Default value (5.0) work pretty well in all cases but can be adjusted. */ set smoothSteps(e) { e !== this._smoothSteps && (this._smoothSteps = e, this._updateEffectDefines()); } _updateEffectDefines() { const e = []; (this._geometryBufferRenderer || this._prePassRenderer) && e.push("#define SSR_SUPPORTED"), this._enableSmoothReflections && e.push("#define ENABLE_SMOOTH_REFLECTIONS"), this._isSceneRightHanded && e.push("#define RIGHT_HANDED_SCENE"), e.push("#define REFLECTION_SAMPLES " + (this._reflectionSamples >> 0)), e.push("#define SMOOTH_STEPS " + (this._smoothSteps >> 0)), this.updateEffect(e.join(` `)); } /** * @internal */ static _Parse(e, t, i, r) { return St.Parse(() => new Eg(e.name, i, e.options, t, e.renderTargetSamplingMode, i.getEngine(), e.textureType, e.reusable), e, i, r); } } F([ W() ], Eg.prototype, "threshold", void 0); F([ W() ], Eg.prototype, "strength", void 0); F([ W() ], Eg.prototype, "reflectionSpecularFalloffExponent", void 0); F([ W() ], Eg.prototype, "step", void 0); F([ W() ], Eg.prototype, "roughnessFactor", void 0); F([ W() ], Eg.prototype, "enableSmoothReflections", null); F([ W() ], Eg.prototype, "reflectionSamples", null); F([ W() ], Eg.prototype, "smoothSteps", null); Be("BABYLON.ScreenSpaceReflectionPostProcess", Eg); const w3e = "standardPixelShader", L3e = `uniform sampler2D textureSampler;varying vec2 vUV; #define CUSTOM_FRAGMENT_DEFINITIONS #if defined(PASS_POST_PROCESS) void main(void) {vec4 color=texture2D(textureSampler,vUV);gl_FragColor=color;} #endif #if defined(DOWN_SAMPLE_X4) uniform vec2 dsOffsets[16];void main(void) {vec4 average=vec4(0.0,0.0,0.0,0.0);average=texture2D(textureSampler,vUV+dsOffsets[0]);average+=texture2D(textureSampler,vUV+dsOffsets[1]);average+=texture2D(textureSampler,vUV+dsOffsets[2]);average+=texture2D(textureSampler,vUV+dsOffsets[3]);average+=texture2D(textureSampler,vUV+dsOffsets[4]);average+=texture2D(textureSampler,vUV+dsOffsets[5]);average+=texture2D(textureSampler,vUV+dsOffsets[6]);average+=texture2D(textureSampler,vUV+dsOffsets[7]);average+=texture2D(textureSampler,vUV+dsOffsets[8]);average+=texture2D(textureSampler,vUV+dsOffsets[9]);average+=texture2D(textureSampler,vUV+dsOffsets[10]);average+=texture2D(textureSampler,vUV+dsOffsets[11]);average+=texture2D(textureSampler,vUV+dsOffsets[12]);average+=texture2D(textureSampler,vUV+dsOffsets[13]);average+=texture2D(textureSampler,vUV+dsOffsets[14]);average+=texture2D(textureSampler,vUV+dsOffsets[15]);average/=16.0;gl_FragColor=average;} #endif #if defined(BRIGHT_PASS) uniform vec2 dsOffsets[4];uniform float brightThreshold;void main(void) {vec4 average=vec4(0.0,0.0,0.0,0.0);average=texture2D(textureSampler,vUV+vec2(dsOffsets[0].x,dsOffsets[0].y));average+=texture2D(textureSampler,vUV+vec2(dsOffsets[1].x,dsOffsets[1].y));average+=texture2D(textureSampler,vUV+vec2(dsOffsets[2].x,dsOffsets[2].y));average+=texture2D(textureSampler,vUV+vec2(dsOffsets[3].x,dsOffsets[3].y));average*=0.25;float luminance=length(average.rgb);if (luminanceshadowPixelDepth) accumFog+=sunColor*computeScattering(dot(rayDirection,sunDirection));currentPosition+=stepL;} accumFog/=NB_STEPS;vec3 color=accumFog*scatteringPower;gl_FragColor=vec4(color*exp(color) ,1.0);} #endif #if defined(VLSMERGE) uniform sampler2D originalSampler;void main(void) {gl_FragColor=texture2D(originalSampler,vUV)+texture2D(textureSampler,vUV);} #endif #if defined(LUMINANCE) uniform vec2 lumOffsets[4];void main() {float average=0.0;vec4 color=vec4(0.0);float maximum=-1e20;vec3 weight=vec3(0.299,0.587,0.114);for (int i=0; i<4; i++) {color=texture2D(textureSampler,vUV+ lumOffsets[i]);float GreyValue=dot(color.rgb,vec3(0.33,0.33,0.33)); #ifdef WEIGHTED_AVERAGE float GreyValue=dot(color.rgb,weight); #endif #ifdef BRIGHTNESS float GreyValue=max(color.r,max(color.g,color.b)); #endif #ifdef HSL_COMPONENT float GreyValue=0.5*(max(color.r,max(color.g,color.b))+min(color.r,min(color.g,color.b))); #endif #ifdef MAGNITUDE float GreyValue=length(color.rgb); #endif maximum=max(maximum,GreyValue);average+=(0.25*log(1e-5+GreyValue));} average=exp(average);gl_FragColor=vec4(average,maximum,0.0,1.0);} #endif #if defined(LUMINANCE_DOWN_SAMPLE) uniform vec2 dsOffsets[9];uniform float halfDestPixelSize; #ifdef FINAL_DOWN_SAMPLER #include #endif void main() {vec4 color=vec4(0.0);float average=0.0;for (int i=0; i<9; i++) {color=texture2D(textureSampler,vUV+vec2(halfDestPixelSize,halfDestPixelSize)+dsOffsets[i]);average+=color.r;} average/=9.0; #ifdef FINAL_DOWN_SAMPLER gl_FragColor=pack(average); #else gl_FragColor=vec4(average,average,0.0,1.0); #endif } #endif #if defined(HDR) uniform sampler2D textureAdderSampler;uniform float averageLuminance;void main() {vec4 color=texture2D(textureAdderSampler,vUV); #ifndef AUTO_EXPOSURE vec4 adjustedColor=color/averageLuminance;color=adjustedColor;color.a=1.0; #endif gl_FragColor=color;} #endif #if defined(LENS_FLARE) #define GHOSTS 3 uniform sampler2D lensColorSampler;uniform float strength;uniform float ghostDispersal;uniform float haloWidth;uniform vec2 resolution;uniform float distortionStrength;float hash(vec2 p) {float h=dot(p,vec2(127.1,311.7));return -1.0+2.0*fract(sin(h)*43758.5453123);} float noise(in vec2 p) {vec2 i=floor(p);vec2 f=fract(p);vec2 u=f*f*(3.0-2.0*f);return mix(mix(hash(i+vec2(0.0,0.0)), hash(i+vec2(1.0,0.0)),u.x), mix(hash(i+vec2(0.0,1.0)), hash(i+vec2(1.0,1.0)),u.x),u.y);} float fbm(vec2 p) {float f=0.0;f+=0.5000*noise(p); p*=2.02;f+=0.2500*noise(p); p*=2.03;f+=0.1250*noise(p); p*=2.01;f+=0.0625*noise(p); p*=2.04;f/=0.9375;return f;} vec3 pattern(vec2 uv) {vec2 p=-1.0+2.0*uv;float p2=dot(p,p);float f=fbm(vec2(15.0*p2))/2.0;float r=0.2+0.6*sin(12.5*length(uv-vec2(0.5)));float g=0.2+0.6*sin(20.5*length(uv-vec2(0.5)));float b=0.2+0.6*sin(17.2*length(uv-vec2(0.5)));return (1.0-f)*vec3(r,g,b);} float luminance(vec3 color) {return dot(color.rgb,vec3(0.2126,0.7152,0.0722));} vec4 textureDistorted(sampler2D tex,vec2 texcoord,vec2 direction,vec3 distortion) {return vec4( texture2D(tex,texcoord+direction*distortion.r).r, texture2D(tex,texcoord+direction*distortion.g).g, texture2D(tex,texcoord+direction*distortion.b).b, 1.0 );} void main(void) {vec2 uv=-vUV+vec2(1.0);vec2 ghostDir=(vec2(0.5)-uv)*ghostDispersal;vec2 texelSize=1.0/resolution;vec3 distortion=vec3(-texelSize.x*distortionStrength,0.0,texelSize.x*distortionStrength);vec4 result=vec4(0.0);float ghostIndice=1.0;for (int i=0; i=nSamples) break;vec2 offset1=vUV+velocity*(float(i)/float(nSamples-1)-0.5);result+=texture2D(textureSampler,offset1);} gl_FragColor=result/float(nSamples);} #endif `; je.ShadersStore[w3e] = L3e; class Dn extends I5 { /** * Gets the overall exposure used by the pipeline */ get exposure() { return this._fixedExposure; } /** * Sets the overall exposure used by the pipeline */ set exposure(e) { this._fixedExposure = e, this._currentExposure = e; } /** * Gets whether or not the exposure of the overall pipeline should be automatically adjusted by the HDR post-process */ get hdrAutoExposure() { return this._hdrAutoExposure; } /** * Sets whether or not the exposure of the overall pipeline should be automatically adjusted by the HDR post-process */ set hdrAutoExposure(e) { if (this._hdrAutoExposure = e, this.hdrPostProcess) { const t = ["#define HDR"]; e && t.push("#define AUTO_EXPOSURE"), this.hdrPostProcess.updateEffect(t.join(` `)); } } /** * Gets how much the image is blurred by the movement while using the motion blur post-process */ get motionStrength() { return this._motionStrength; } /** * Sets how much the image is blurred by the movement while using the motion blur post-process */ set motionStrength(e) { this._motionStrength = e, this._isObjectBasedMotionBlur && this.motionBlurPostProcess && (this.motionBlurPostProcess.motionStrength = e); } /** * Gets whether or not the motion blur post-process is object based or screen based. */ get objectBasedMotionBlur() { return this._isObjectBasedMotionBlur; } /** * Sets whether or not the motion blur post-process should be object based or screen based */ set objectBasedMotionBlur(e) { const t = this._isObjectBasedMotionBlur !== e; this._isObjectBasedMotionBlur = e, t && this._buildPipeline(); } /** * @ignore * Specifies if the bloom pipeline is enabled */ get BloomEnabled() { return this._bloomEnabled; } set BloomEnabled(e) { this._bloomEnabled !== e && (this._bloomEnabled = e, this._buildPipeline()); } /** * @ignore * Specifies if the depth of field pipeline is enabled */ get DepthOfFieldEnabled() { return this._depthOfFieldEnabled; } set DepthOfFieldEnabled(e) { this._depthOfFieldEnabled !== e && (this._depthOfFieldEnabled = e, this._buildPipeline()); } /** * @ignore * Specifies if the lens flare pipeline is enabled */ get LensFlareEnabled() { return this._lensFlareEnabled; } set LensFlareEnabled(e) { this._lensFlareEnabled !== e && (this._lensFlareEnabled = e, this._buildPipeline()); } /** * @ignore * Specifies if the HDR pipeline is enabled */ get HDREnabled() { return this._hdrEnabled; } set HDREnabled(e) { this._hdrEnabled !== e && (this._hdrEnabled = e, this._buildPipeline()); } /** * @ignore * Specifies if the volumetric lights scattering effect is enabled */ get VLSEnabled() { return this._vlsEnabled; } // eslint-disable-next-line @typescript-eslint/naming-convention set VLSEnabled(e) { if (this._vlsEnabled !== e) { if (e && !this._scene.enableGeometryBufferRenderer()) { Ce.Warn("Geometry renderer is not supported, cannot create volumetric lights in Standard Rendering Pipeline"); return; } this._vlsEnabled = e, this._buildPipeline(); } } /** * @ignore * Specifies if the motion blur effect is enabled */ get MotionBlurEnabled() { return this._motionBlurEnabled; } set MotionBlurEnabled(e) { this._motionBlurEnabled !== e && (this._motionBlurEnabled = e, this._buildPipeline()); } /** * Specifies if anti-aliasing is enabled */ get fxaaEnabled() { return this._fxaaEnabled; } set fxaaEnabled(e) { this._fxaaEnabled !== e && (this._fxaaEnabled = e, this._buildPipeline()); } /** * Specifies if screen space reflections are enabled. */ get screenSpaceReflectionsEnabled() { return this._screenSpaceReflectionsEnabled; } set screenSpaceReflectionsEnabled(e) { this._screenSpaceReflectionsEnabled !== e && (this._screenSpaceReflectionsEnabled = e, this._buildPipeline()); } /** * Specifies the number of steps used to calculate the volumetric lights * Typically in interval [50, 200] */ get volumetricLightStepsCount() { return this._volumetricLightStepsCount; } set volumetricLightStepsCount(e) { this.volumetricLightPostProcess && this.volumetricLightPostProcess.updateEffect(`#define VLS #define NB_STEPS ` + e.toFixed(1)), this._volumetricLightStepsCount = e; } /** * Specifies the number of samples used for the motion blur effect * Typically in interval [16, 64] */ get motionBlurSamples() { return this._motionBlurSamples; } set motionBlurSamples(e) { this.motionBlurPostProcess && (this._isObjectBasedMotionBlur ? this.motionBlurPostProcess.motionBlurSamples = e : this.motionBlurPostProcess.updateEffect(`#define MOTION_BLUR #define MAX_MOTION_SAMPLES ` + e.toFixed(1))), this._motionBlurSamples = e; } /** * Specifies MSAA sample count, setting this to 4 will provide 4x anti aliasing. (default: 1) */ get samples() { return this._samples; } set samples(e) { this._samples !== e && (this._samples = e, this._buildPipeline()); } /** * Default pipeline should be used going forward but the standard pipeline will be kept for backwards compatibility. * @constructor * @param name The rendering pipeline name * @param scene The scene linked to this pipeline * @param ratio The size of the postprocesses (0.5 means that your postprocess will have a width = canvas.width 0.5 and a height = canvas.height 0.5) * @param originalPostProcess the custom original color post-process. Must be "reusable". Can be null. * @param cameras The array of cameras that the rendering pipeline will be attached to */ constructor(e, t, i, r = null, s) { super(t.getEngine(), e), this.downSampleX4PostProcess = null, this.brightPassPostProcess = null, this.blurHPostProcesses = [], this.blurVPostProcesses = [], this.textureAdderPostProcess = null, this.volumetricLightPostProcess = null, this.volumetricLightSmoothXPostProcess = null, this.volumetricLightSmoothYPostProcess = null, this.volumetricLightMergePostProces = null, this.volumetricLightFinalPostProcess = null, this.luminancePostProcess = null, this.luminanceDownSamplePostProcesses = [], this.hdrPostProcess = null, this.textureAdderFinalPostProcess = null, this.lensFlareFinalPostProcess = null, this.hdrFinalPostProcess = null, this.lensFlarePostProcess = null, this.lensFlareComposePostProcess = null, this.motionBlurPostProcess = null, this.depthOfFieldPostProcess = null, this.fxaaPostProcess = null, this.screenSpaceReflectionPostProcess = null, this.brightThreshold = 1, this.blurWidth = 512, this.horizontalBlur = !1, this.lensTexture = null, this.volumetricLightCoefficient = 0.2, this.volumetricLightPower = 4, this.volumetricLightBlurScale = 64, this.sourceLight = null, this.hdrMinimumLuminance = 1, this.hdrDecreaseRate = 0.5, this.hdrIncreaseRate = 0.5, this.lensColorTexture = null, this.lensFlareStrength = 20, this.lensFlareGhostDispersal = 1.4, this.lensFlareHaloWidth = 0.7, this.lensFlareDistortionStrength = 16, this.lensFlareBlurWidth = 512, this.lensStarTexture = null, this.lensFlareDirtTexture = null, this.depthOfFieldDistance = 10, this.depthOfFieldBlurWidth = 64, this.animations = [], this._currentDepthOfFieldSource = null, this._fixedExposure = 1, this._currentExposure = 1, this._hdrAutoExposure = !1, this._hdrCurrentLuminance = 1, this._motionStrength = 1, this._isObjectBasedMotionBlur = !1, this._camerasToBeAttached = [], this._bloomEnabled = !1, this._depthOfFieldEnabled = !1, this._vlsEnabled = !1, this._lensFlareEnabled = !1, this._hdrEnabled = !1, this._motionBlurEnabled = !1, this._fxaaEnabled = !1, this._screenSpaceReflectionsEnabled = !1, this._motionBlurSamples = 64, this._volumetricLightStepsCount = 50, this._samples = 1, this._cameras = s || t.cameras, this._cameras = this._cameras.slice(), this._camerasToBeAttached = this._cameras.slice(), this._scene = t, this._basePostProcess = r, this._ratio = i, this._floatTextureType = t.getEngine().getCaps().textureFloatRender ? 1 : 2, t.postProcessRenderPipelineManager.addPipeline(this), this._buildPipeline(); } _buildPipeline() { const e = this._ratio, t = this._scene; this._disposePostProcesses(), this._cameras !== null && (this._scene.postProcessRenderPipelineManager.detachCamerasFromRenderPipeline(this._name, this._cameras), this._cameras = this._camerasToBeAttached.slice()), this._reset(), this._screenSpaceReflectionsEnabled && (this.screenSpaceReflectionPostProcess = new Eg("HDRPass", t, e, null, De.BILINEAR_SAMPLINGMODE, t.getEngine(), !1, this._floatTextureType), this.screenSpaceReflectionPostProcess.onApplyObservable.add(() => { this._currentDepthOfFieldSource = this.screenSpaceReflectionPostProcess; }), this.addEffect(new gn(t.getEngine(), "HDRScreenSpaceReflections", () => this.screenSpaceReflectionPostProcess, !0))), this._basePostProcess ? this.originalPostProcess = this._basePostProcess : this.originalPostProcess = new Bi("HDRPass", "standard", [], [], e, null, De.BILINEAR_SAMPLINGMODE, t.getEngine(), !1, "#define PASS_POST_PROCESS", this._floatTextureType), this.originalPostProcess.autoClear = !this.screenSpaceReflectionPostProcess, this.originalPostProcess.onApplyObservable.add(() => { this._currentDepthOfFieldSource = this.originalPostProcess; }), this.addEffect(new gn(t.getEngine(), "HDRPassPostProcess", () => this.originalPostProcess, !0)), this._bloomEnabled && (this._createDownSampleX4PostProcess(t, e / 4), this._createBrightPassPostProcess(t, e / 4), this._createBlurPostProcesses(t, e / 4, 1), this._createTextureAdderPostProcess(t, e), this.textureAdderFinalPostProcess = new Bi("HDRDepthOfFieldSource", "standard", [], [], e, null, De.BILINEAR_SAMPLINGMODE, t.getEngine(), !1, "#define PASS_POST_PROCESS", 0), this.addEffect(new gn(t.getEngine(), "HDRBaseDepthOfFieldSource", () => this.textureAdderFinalPostProcess, !0))), this._vlsEnabled && (this._createVolumetricLightPostProcess(t, e), this.volumetricLightFinalPostProcess = new Bi("HDRVLSFinal", "standard", [], [], e, null, De.BILINEAR_SAMPLINGMODE, t.getEngine(), !1, "#define PASS_POST_PROCESS", 0), this.addEffect(new gn(t.getEngine(), "HDRVLSFinal", () => this.volumetricLightFinalPostProcess, !0))), this._lensFlareEnabled && (this._createLensFlarePostProcess(t, e), this.lensFlareFinalPostProcess = new Bi("HDRPostLensFlareDepthOfFieldSource", "standard", [], [], e, null, De.BILINEAR_SAMPLINGMODE, t.getEngine(), !1, "#define PASS_POST_PROCESS", 0), this.addEffect(new gn(t.getEngine(), "HDRPostLensFlareDepthOfFieldSource", () => this.lensFlareFinalPostProcess, !0))), this._hdrEnabled && (this._createLuminancePostProcesses(t, this._floatTextureType), this._createHdrPostProcess(t, e), this.hdrFinalPostProcess = new Bi("HDRPostHDReDepthOfFieldSource", "standard", [], [], e, null, De.BILINEAR_SAMPLINGMODE, t.getEngine(), !1, "#define PASS_POST_PROCESS", 0), this.addEffect(new gn(t.getEngine(), "HDRPostHDReDepthOfFieldSource", () => this.hdrFinalPostProcess, !0))), this._depthOfFieldEnabled && (this._createBlurPostProcesses(t, e / 2, 3, "depthOfFieldBlurWidth"), this._createDepthOfFieldPostProcess(t, e)), this._motionBlurEnabled && this._createMotionBlurPostProcess(t, e), this._fxaaEnabled && (this.fxaaPostProcess = new $I("fxaa", 1, null, De.BILINEAR_SAMPLINGMODE, t.getEngine(), !1, 0), this.addEffect(new gn(t.getEngine(), "HDRFxaa", () => this.fxaaPostProcess, !0))), this._cameras !== null && this._scene.postProcessRenderPipelineManager.attachCamerasToRenderPipeline(this._name, this._cameras), !this._enableMSAAOnFirstPostProcess(this._samples) && this._samples > 1 && Ce.Warn("MSAA failed to enable, MSAA is only supported in browsers that support webGL >= 2.0"); } // Down Sample X4 Post-Process _createDownSampleX4PostProcess(e, t) { const i = new Array(32); this.downSampleX4PostProcess = new Bi("HDRDownSampleX4", "standard", ["dsOffsets"], [], t, null, De.BILINEAR_SAMPLINGMODE, e.getEngine(), !1, "#define DOWN_SAMPLE_X4", this._floatTextureType), this.downSampleX4PostProcess.onApply = (r) => { let s = 0; const n = this.downSampleX4PostProcess.width, a = this.downSampleX4PostProcess.height; for (let l = -2; l < 2; l++) for (let o = -2; o < 2; o++) i[s] = (l + 0.5) * (1 / n), i[s + 1] = (o + 0.5) * (1 / a), s += 2; r.setArray2("dsOffsets", i); }, this.addEffect(new gn(e.getEngine(), "HDRDownSampleX4", () => this.downSampleX4PostProcess, !0)); } // Brightpass Post-Process _createBrightPassPostProcess(e, t) { const i = new Array(8); this.brightPassPostProcess = new Bi("HDRBrightPass", "standard", ["dsOffsets", "brightThreshold"], [], t, null, De.BILINEAR_SAMPLINGMODE, e.getEngine(), !1, "#define BRIGHT_PASS", this._floatTextureType), this.brightPassPostProcess.onApply = (r) => { const s = 1 / this.brightPassPostProcess.width, n = 1 / this.brightPassPostProcess.height; i[0] = -0.5 * s, i[1] = 0.5 * n, i[2] = 0.5 * s, i[3] = 0.5 * n, i[4] = -0.5 * s, i[5] = -0.5 * n, i[6] = 0.5 * s, i[7] = -0.5 * n, r.setArray2("dsOffsets", i), r.setFloat("brightThreshold", this.brightThreshold); }, this.addEffect(new gn(e.getEngine(), "HDRBrightPass", () => this.brightPassPostProcess, !0)); } // Create blur H&V post-processes _createBlurPostProcesses(e, t, i, r = "blurWidth") { const s = e.getEngine(), n = new fu("HDRBlurH_" + i, new at(1, 0), this[r], t, null, De.BILINEAR_SAMPLINGMODE, e.getEngine(), !1, this._floatTextureType), a = new fu("HDRBlurV_" + i, new at(0, 1), this[r], t, null, De.BILINEAR_SAMPLINGMODE, e.getEngine(), !1, this._floatTextureType); n.onActivateObservable.add(() => { const l = n.width / s.getRenderWidth(); n.kernel = this[r] * l; }), a.onActivateObservable.add(() => { const l = a.height / s.getRenderHeight(); a.kernel = this.horizontalBlur ? 64 * l : this[r] * l; }), this.addEffect(new gn(e.getEngine(), "HDRBlurH" + i, () => n, !0)), this.addEffect(new gn(e.getEngine(), "HDRBlurV" + i, () => a, !0)), this.blurHPostProcesses.push(n), this.blurVPostProcesses.push(a); } // Create texture adder post-process _createTextureAdderPostProcess(e, t) { this.textureAdderPostProcess = new Bi("HDRTextureAdder", "standard", ["exposure"], ["otherSampler", "lensSampler"], t, null, De.BILINEAR_SAMPLINGMODE, e.getEngine(), !1, "#define TEXTURE_ADDER", this._floatTextureType), this.textureAdderPostProcess.onApply = (i) => { i.setTextureFromPostProcess("otherSampler", this._vlsEnabled ? this._currentDepthOfFieldSource : this.originalPostProcess), i.setTexture("lensSampler", this.lensTexture), i.setFloat("exposure", this._currentExposure), this._currentDepthOfFieldSource = this.textureAdderFinalPostProcess; }, this.addEffect(new gn(e.getEngine(), "HDRTextureAdder", () => this.textureAdderPostProcess, !0)); } _createVolumetricLightPostProcess(e, t) { const i = e.enableGeometryBufferRenderer(); i.enablePosition = !0; const r = i.getGBuffer(); this.volumetricLightPostProcess = new Bi("HDRVLS", "standard", ["shadowViewProjection", "cameraPosition", "sunDirection", "sunColor", "scatteringCoefficient", "scatteringPower", "depthValues"], ["shadowMapSampler", "positionSampler"], t / 8, null, De.BILINEAR_SAMPLINGMODE, e.getEngine(), !1, `#define VLS #define NB_STEPS ` + this._volumetricLightStepsCount.toFixed(1)); const s = at.Zero(); this.volumetricLightPostProcess.onApply = (n) => { if (this.sourceLight && this.sourceLight.getShadowGenerator() && this._scene.activeCamera) { const a = this.sourceLight.getShadowGenerator(); n.setTexture("shadowMapSampler", a.getShadowMap()), n.setTexture("positionSampler", r.textures[2]), n.setColor3("sunColor", this.sourceLight.diffuse), n.setVector3("sunDirection", this.sourceLight.getShadowDirection()), n.setVector3("cameraPosition", this._scene.activeCamera.globalPosition), n.setMatrix("shadowViewProjection", a.getTransformMatrix()), n.setFloat("scatteringCoefficient", this.volumetricLightCoefficient), n.setFloat("scatteringPower", this.volumetricLightPower), s.x = this.sourceLight.getDepthMinZ(this._scene.activeCamera), s.y = this.sourceLight.getDepthMaxZ(this._scene.activeCamera), n.setVector2("depthValues", s); } }, this.addEffect(new gn(e.getEngine(), "HDRVLS", () => this.volumetricLightPostProcess, !0)), this._createBlurPostProcesses(e, t / 4, 0, "volumetricLightBlurScale"), this.volumetricLightMergePostProces = new Bi("HDRVLSMerge", "standard", [], ["originalSampler"], t, null, De.BILINEAR_SAMPLINGMODE, e.getEngine(), !1, "#define VLSMERGE"), this.volumetricLightMergePostProces.onApply = (n) => { n.setTextureFromPostProcess("originalSampler", this._bloomEnabled ? this.textureAdderFinalPostProcess : this.originalPostProcess), this._currentDepthOfFieldSource = this.volumetricLightFinalPostProcess; }, this.addEffect(new gn(e.getEngine(), "HDRVLSMerge", () => this.volumetricLightMergePostProces, !0)); } // Create luminance _createLuminancePostProcesses(e, t) { let i = Math.pow(3, Dn.LuminanceSteps); this.luminancePostProcess = new Bi("HDRLuminance", "standard", ["lumOffsets"], [], { width: i, height: i }, null, De.BILINEAR_SAMPLINGMODE, e.getEngine(), !1, "#define LUMINANCE", t); const r = []; this.luminancePostProcess.onApply = (n) => { const a = 1 / this.luminancePostProcess.width, l = 1 / this.luminancePostProcess.height; r[0] = -0.5 * a, r[1] = 0.5 * l, r[2] = 0.5 * a, r[3] = 0.5 * l, r[4] = -0.5 * a, r[5] = -0.5 * l, r[6] = 0.5 * a, r[7] = -0.5 * l, n.setArray2("lumOffsets", r); }, this.addEffect(new gn(e.getEngine(), "HDRLuminance", () => this.luminancePostProcess, !0)); for (let n = Dn.LuminanceSteps - 1; n >= 0; n--) { i = Math.pow(3, n); let a = `#define LUMINANCE_DOWN_SAMPLE `; n === 0 && (a += "#define FINAL_DOWN_SAMPLER"); const l = new Bi("HDRLuminanceDownSample" + n, "standard", ["dsOffsets", "halfDestPixelSize"], [], { width: i, height: i }, null, De.BILINEAR_SAMPLINGMODE, e.getEngine(), !1, a, t); this.luminanceDownSamplePostProcesses.push(l); } let s = this.luminancePostProcess; this.luminanceDownSamplePostProcesses.forEach((n, a) => { const l = new Array(18); n.onApply = (o) => { if (!s) return; let u = 0; for (let h = -1; h < 2; h++) for (let d = -1; d < 2; d++) l[u] = h / s.width, l[u + 1] = d / s.height, u += 2; o.setArray2("dsOffsets", l), o.setFloat("halfDestPixelSize", 0.5 / s.width), a === this.luminanceDownSamplePostProcesses.length - 1 ? s = this.luminancePostProcess : s = n; }, a === this.luminanceDownSamplePostProcesses.length - 1 && (n.onAfterRender = () => { const o = e.getEngine().readPixels(0, 0, 1, 1), u = new Di(1 / (255 * 255 * 255), 1 / (255 * 255), 1 / 255, 1); o.then((h) => { const d = new Uint8Array(h.buffer); this._hdrCurrentLuminance = (d[0] * u.x + d[1] * u.y + d[2] * u.z + d[3] * u.w) / 100; }); }), this.addEffect(new gn(e.getEngine(), "HDRLuminanceDownSample" + a, () => n, !0)); }); } // Create HDR post-process _createHdrPostProcess(e, t) { const i = ["#define HDR"]; this._hdrAutoExposure && i.push("#define AUTO_EXPOSURE"), this.hdrPostProcess = new Bi("HDR", "standard", ["averageLuminance"], ["textureAdderSampler"], t, null, De.BILINEAR_SAMPLINGMODE, e.getEngine(), !1, i.join(` `), 0); let r = 1, s = 0, n = 0; this.hdrPostProcess.onApply = (a) => { if (a.setTextureFromPostProcess("textureAdderSampler", this._currentDepthOfFieldSource), s += e.getEngine().getDeltaTime(), r < 0) r = this._hdrCurrentLuminance; else { const l = (n - s) / 1e3; this._hdrCurrentLuminance < r + this.hdrDecreaseRate * l ? r += this.hdrDecreaseRate * l : this._hdrCurrentLuminance > r - this.hdrIncreaseRate * l ? r -= this.hdrIncreaseRate * l : r = this._hdrCurrentLuminance; } this.hdrAutoExposure ? this._currentExposure = this._fixedExposure / r : (r = yt.Clamp(r, this.hdrMinimumLuminance, 1e20), a.setFloat("averageLuminance", r)), n = s, this._currentDepthOfFieldSource = this.hdrFinalPostProcess; }, this.addEffect(new gn(e.getEngine(), "HDR", () => this.hdrPostProcess, !0)); } // Create lens flare post-process _createLensFlarePostProcess(e, t) { this.lensFlarePostProcess = new Bi("HDRLensFlare", "standard", ["strength", "ghostDispersal", "haloWidth", "resolution", "distortionStrength"], ["lensColorSampler"], t / 2, null, De.BILINEAR_SAMPLINGMODE, e.getEngine(), !1, "#define LENS_FLARE", 0), this.addEffect(new gn(e.getEngine(), "HDRLensFlare", () => this.lensFlarePostProcess, !0)), this._createBlurPostProcesses(e, t / 4, 2, "lensFlareBlurWidth"), this.lensFlareComposePostProcess = new Bi("HDRLensFlareCompose", "standard", ["lensStarMatrix"], ["otherSampler", "lensDirtSampler", "lensStarSampler"], t, null, De.BILINEAR_SAMPLINGMODE, e.getEngine(), !1, "#define LENS_FLARE_COMPOSE", 0), this.addEffect(new gn(e.getEngine(), "HDRLensFlareCompose", () => this.lensFlareComposePostProcess, !0)); const i = new at(0, 0); this.lensFlarePostProcess.externalTextureSamplerBinding = !0, this.lensFlarePostProcess.onApply = (n) => { n.setTextureFromPostProcess("textureSampler", this._bloomEnabled ? this.blurHPostProcesses[0] : this.originalPostProcess), n.setTexture("lensColorSampler", this.lensColorTexture), n.setFloat("strength", this.lensFlareStrength), n.setFloat("ghostDispersal", this.lensFlareGhostDispersal), n.setFloat("haloWidth", this.lensFlareHaloWidth), i.x = this.lensFlarePostProcess.width, i.y = this.lensFlarePostProcess.height, n.setVector2("resolution", i), n.setFloat("distortionStrength", this.lensFlareDistortionStrength); }; const r = Ae.FromValues(2, 0, -1, 0, 0, 2, -1, 0, 0, 0, 1, 0, 0, 0, 0, 1), s = Ae.FromValues(0.5, 0, 0.5, 0, 0, 0.5, 0.5, 0, 0, 0, 1, 0, 0, 0, 0, 1); this.lensFlareComposePostProcess.onApply = (n) => { if (!this._scene.activeCamera) return; n.setTextureFromPostProcess("otherSampler", this.lensFlarePostProcess), n.setTexture("lensDirtSampler", this.lensFlareDirtTexture), n.setTexture("lensStarSampler", this.lensStarTexture); const a = this._scene.activeCamera.getViewMatrix().getRow(0), l = this._scene.activeCamera.getViewMatrix().getRow(2); let o = D.Dot(a.toVector3(), new D(1, 0, 0)) + D.Dot(l.toVector3(), new D(0, 0, 1)); o *= 4; const u = Ae.FromValues(Math.cos(o) * 0.5, -Math.sin(o), 0, 0, Math.sin(o), Math.cos(o) * 0.5, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1), h = s.multiply(u).multiply(r); n.setMatrix("lensStarMatrix", h), this._currentDepthOfFieldSource = this.lensFlareFinalPostProcess; }; } // Create depth-of-field post-process _createDepthOfFieldPostProcess(e, t) { this.depthOfFieldPostProcess = new Bi("HDRDepthOfField", "standard", ["distance"], ["otherSampler", "depthSampler"], t, null, De.BILINEAR_SAMPLINGMODE, e.getEngine(), !1, "#define DEPTH_OF_FIELD", 0), this.depthOfFieldPostProcess.onApply = (i) => { i.setTextureFromPostProcess("otherSampler", this._currentDepthOfFieldSource), i.setTexture("depthSampler", this._getDepthTexture()), i.setFloat("distance", this.depthOfFieldDistance); }, this.addEffect(new gn(e.getEngine(), "HDRDepthOfField", () => this.depthOfFieldPostProcess, !0)); } // Create motion blur post-process _createMotionBlurPostProcess(e, t) { if (this._isObjectBasedMotionBlur) { const i = new R5("HDRMotionBlur", e, t, null, De.BILINEAR_SAMPLINGMODE, e.getEngine(), !1, 0); i.motionStrength = this.motionStrength, i.motionBlurSamples = this.motionBlurSamples, this.motionBlurPostProcess = i; } else { this.motionBlurPostProcess = new Bi("HDRMotionBlur", "standard", ["inverseViewProjection", "prevViewProjection", "screenSize", "motionScale", "motionStrength"], ["depthSampler"], t, null, De.BILINEAR_SAMPLINGMODE, e.getEngine(), !1, `#define MOTION_BLUR #define MAX_MOTION_SAMPLES ` + this.motionBlurSamples.toFixed(1), 0); let i = 0, r = Ae.Identity(); const s = Ae.Identity(); let n = Ae.Identity(); const a = at.Zero(); this.motionBlurPostProcess.onApply = (l) => { n = e.getProjectionMatrix().multiply(e.getViewMatrix()), n.invertToRef(s), l.setMatrix("inverseViewProjection", s), l.setMatrix("prevViewProjection", r), r = n, a.x = this.motionBlurPostProcess.width, a.y = this.motionBlurPostProcess.height, l.setVector2("screenSize", a), i = e.getEngine().getFps() / 60, l.setFloat("motionScale", i), l.setFloat("motionStrength", this.motionStrength), l.setTexture("depthSampler", this._getDepthTexture()); }; } this.addEffect(new gn(e.getEngine(), "HDRMotionBlur", () => this.motionBlurPostProcess, !0)); } _getDepthTexture() { return this._scene.getEngine().getCaps().drawBuffersExtension ? this._scene.enableGeometryBufferRenderer().getGBuffer().textures[0] : this._scene.enableDepthRenderer().getDepthMap(); } _disposePostProcesses() { for (let e = 0; e < this._cameras.length; e++) { const t = this._cameras[e]; this.originalPostProcess && this.originalPostProcess.dispose(t), this.screenSpaceReflectionPostProcess && this.screenSpaceReflectionPostProcess.dispose(t), this.downSampleX4PostProcess && this.downSampleX4PostProcess.dispose(t), this.brightPassPostProcess && this.brightPassPostProcess.dispose(t), this.textureAdderPostProcess && this.textureAdderPostProcess.dispose(t), this.volumetricLightPostProcess && this.volumetricLightPostProcess.dispose(t), this.volumetricLightSmoothXPostProcess && this.volumetricLightSmoothXPostProcess.dispose(t), this.volumetricLightSmoothYPostProcess && this.volumetricLightSmoothYPostProcess.dispose(t), this.volumetricLightMergePostProces && this.volumetricLightMergePostProces.dispose(t), this.volumetricLightFinalPostProcess && this.volumetricLightFinalPostProcess.dispose(t), this.lensFlarePostProcess && this.lensFlarePostProcess.dispose(t), this.lensFlareComposePostProcess && this.lensFlareComposePostProcess.dispose(t); for (let i = 0; i < this.luminanceDownSamplePostProcesses.length; i++) this.luminanceDownSamplePostProcesses[i].dispose(t); this.luminancePostProcess && this.luminancePostProcess.dispose(t), this.hdrPostProcess && this.hdrPostProcess.dispose(t), this.hdrFinalPostProcess && this.hdrFinalPostProcess.dispose(t), this.depthOfFieldPostProcess && this.depthOfFieldPostProcess.dispose(t), this.motionBlurPostProcess && this.motionBlurPostProcess.dispose(t), this.fxaaPostProcess && this.fxaaPostProcess.dispose(t); for (let i = 0; i < this.blurHPostProcesses.length; i++) this.blurHPostProcesses[i].dispose(t); for (let i = 0; i < this.blurVPostProcesses.length; i++) this.blurVPostProcesses[i].dispose(t); } this.originalPostProcess = null, this.downSampleX4PostProcess = null, this.brightPassPostProcess = null, this.textureAdderPostProcess = null, this.textureAdderFinalPostProcess = null, this.volumetricLightPostProcess = null, this.volumetricLightSmoothXPostProcess = null, this.volumetricLightSmoothYPostProcess = null, this.volumetricLightMergePostProces = null, this.volumetricLightFinalPostProcess = null, this.lensFlarePostProcess = null, this.lensFlareComposePostProcess = null, this.luminancePostProcess = null, this.hdrPostProcess = null, this.hdrFinalPostProcess = null, this.depthOfFieldPostProcess = null, this.motionBlurPostProcess = null, this.fxaaPostProcess = null, this.screenSpaceReflectionPostProcess = null, this.luminanceDownSamplePostProcesses.length = 0, this.blurHPostProcesses.length = 0, this.blurVPostProcesses.length = 0; } /** * Dispose of the pipeline and stop all post processes */ dispose() { this._disposePostProcesses(), this._scene.postProcessRenderPipelineManager.detachCamerasFromRenderPipeline(this._name, this._cameras), super.dispose(); } /** * Serialize the rendering pipeline (Used when exporting) * @returns the serialized object */ serialize() { const e = St.Serialize(this); return this.sourceLight && (e.sourceLightId = this.sourceLight.id), this.screenSpaceReflectionPostProcess && (e.screenSpaceReflectionPostProcess = St.Serialize(this.screenSpaceReflectionPostProcess)), e.customType = "StandardRenderingPipeline", e; } /** * Parse the serialized pipeline * @param source Source pipeline. * @param scene The scene to load the pipeline to. * @param rootUrl The URL of the serialized pipeline. * @returns An instantiated pipeline from the serialized object. */ static Parse(e, t, i) { const r = St.Parse(() => new Dn(e._name, t, e._ratio), e, t, i); return e.sourceLightId && (r.sourceLight = t.getLightById(e.sourceLightId)), e.screenSpaceReflectionPostProcess && St.Parse(() => r.screenSpaceReflectionPostProcess, e.screenSpaceReflectionPostProcess, t, i), r; } } Dn.LuminanceSteps = 6; F([ W() ], Dn.prototype, "brightThreshold", void 0); F([ W() ], Dn.prototype, "blurWidth", void 0); F([ W() ], Dn.prototype, "horizontalBlur", void 0); F([ W() ], Dn.prototype, "exposure", null); F([ er("lensTexture") ], Dn.prototype, "lensTexture", void 0); F([ W() ], Dn.prototype, "volumetricLightCoefficient", void 0); F([ W() ], Dn.prototype, "volumetricLightPower", void 0); F([ W() ], Dn.prototype, "volumetricLightBlurScale", void 0); F([ W() ], Dn.prototype, "hdrMinimumLuminance", void 0); F([ W() ], Dn.prototype, "hdrDecreaseRate", void 0); F([ W() ], Dn.prototype, "hdrIncreaseRate", void 0); F([ W() ], Dn.prototype, "hdrAutoExposure", null); F([ er("lensColorTexture") ], Dn.prototype, "lensColorTexture", void 0); F([ W() ], Dn.prototype, "lensFlareStrength", void 0); F([ W() ], Dn.prototype, "lensFlareGhostDispersal", void 0); F([ W() ], Dn.prototype, "lensFlareHaloWidth", void 0); F([ W() ], Dn.prototype, "lensFlareDistortionStrength", void 0); F([ W() ], Dn.prototype, "lensFlareBlurWidth", void 0); F([ er("lensStarTexture") ], Dn.prototype, "lensStarTexture", void 0); F([ er("lensFlareDirtTexture") ], Dn.prototype, "lensFlareDirtTexture", void 0); F([ W() ], Dn.prototype, "depthOfFieldDistance", void 0); F([ W() ], Dn.prototype, "depthOfFieldBlurWidth", void 0); F([ W() ], Dn.prototype, "motionStrength", null); F([ W() ], Dn.prototype, "objectBasedMotionBlur", null); F([ W() ], Dn.prototype, "_ratio", void 0); F([ W() ], Dn.prototype, "BloomEnabled", null); F([ W() ], Dn.prototype, "DepthOfFieldEnabled", null); F([ W() ], Dn.prototype, "LensFlareEnabled", null); F([ W() ], Dn.prototype, "HDREnabled", null); F([ W() // eslint-disable-next-line @typescript-eslint/naming-convention ], Dn.prototype, "VLSEnabled", null); F([ W() ], Dn.prototype, "MotionBlurEnabled", null); F([ W() ], Dn.prototype, "fxaaEnabled", null); F([ W() ], Dn.prototype, "screenSpaceReflectionsEnabled", null); F([ W() ], Dn.prototype, "volumetricLightStepsCount", null); F([ W() ], Dn.prototype, "motionBlurSamples", null); F([ W() ], Dn.prototype, "samples", null); Be("BABYLON.StandardRenderingPipeline", Dn); class N3e { constructor() { this.enabled = !1, this.name = "screenSpaceReflections2", this.texturesRequired = [6, 3, 5]; } } const F3e = "screenSpaceRayTrace", B3e = `float distanceSquared(vec2 a,vec2 b) { a-=b; return dot(a,a); } /** param csOrigin Camera-space ray origin,which must be within the view volume and must have z>0.01 and project within the valid screen rectangle param csDirection Unit length camera-space ray direction param projectToPixelMatrix A projection matrix that maps to **pixel** coordinates (**not** [-1,+1] normalized device coordinates). param csZBuffer The camera-space Z buffer param csZBufferSize Dimensions of csZBuffer param csZThickness Camera space csZThickness to ascribe to each pixel in the depth buffer param nearPlaneZ Positive number. Doesn't have to be THE actual near plane,just a reasonable value for clipping rays headed towards the camera param stride Step in horizontal or vertical pixels between samples. This is a float because integer math is slow on GPUs,but should be set to an integer>=1 param jitterFraction Number between 0 and 1 for how far to bump the ray in stride units to conceal banding artifacts,plus the stride ray offset. param maxSteps Maximum number of iterations. Higher gives better images but may be slow param maxRayTraceDistance Maximum camera-space distance to trace before returning a miss param selfCollisionNumSkip Number of steps to skip at start when raytracing to avoid self collisions. 1 is a reasonable value,depending on the scene you may need to set this value to 2 param hitPixel Pixel coordinates of the first intersection with the scene param numIterations number of iterations performed param csHitPoint Camera space location of the ray hit */ #define inline bool traceScreenSpaceRay1( vec3 csOrigin, vec3 csDirection, mat4 projectToPixelMatrix, sampler2D csZBuffer, vec2 csZBufferSize, #ifdef SSRAYTRACE_USE_BACK_DEPTHBUFFER sampler2D csZBackBuffer, float csZBackSizeFactor, #endif float csZThickness, float nearPlaneZ, float stride, float jitterFraction, float maxSteps, float maxRayTraceDistance, float selfCollisionNumSkip, out vec2 startPixel, out vec2 hitPixel, out vec3 csHitPoint, out float numIterations #ifdef SSRAYTRACE_DEBUG ,out vec3 debugColor #endif ) { #ifdef SSRAYTRACE_RIGHT_HANDED_SCENE float rayLength=(csOrigin.z+csDirection.z*maxRayTraceDistance)>-nearPlaneZ ? (-nearPlaneZ-csOrigin.z)/csDirection.z : maxRayTraceDistance; #else float rayLength=(csOrigin.z+csDirection.z*maxRayTraceDistance)yMax) || (P1.yyMax) ? yMax : yMin))/(P1.y-P0.y);} if ((P1.x>xMax) || (P1.xxMax) ? xMax : xMin))/(P1.x-P0.x));} P1=mix(P1,P0,alpha); k1=mix(k1,k0,alpha); Q1=mix(Q1,Q0,alpha); #endif P1+=vec2((distanceSquared(P0,P1)<0.0001) ? 0.01 : 0.0);vec2 delta=P1-P0;bool permute=false;if (abs(delta.x)rayZMax) { float t=rayZMin; rayZMin=rayZMax; rayZMax=t;} sceneZMax=texelFetch(csZBuffer,ivec2(hitPixel),0).r; #ifdef SSRAYTRACE_RIGHT_HANDED_SCENE #ifdef SSRAYTRACE_USE_BACK_DEPTHBUFFER float sceneBackZ=texelFetch(csZBackBuffer,ivec2(hitPixel/csZBackSizeFactor),0).r;hit=(rayZMax>=sceneBackZ-csZThickness) && (rayZMin<=sceneZMax); #else hit=(rayZMax>=sceneZMax-csZThickness) && (rayZMin<=sceneZMax); #endif #else #ifdef SSRAYTRACE_USE_BACK_DEPTHBUFFER float sceneBackZ=texelFetch(csZBackBuffer,ivec2(hitPixel/csZBackSizeFactor),0).r;hit=(rayZMin<=sceneBackZ+csZThickness) && (rayZMax>=sceneZMax) && (sceneZMax != 0.0); #else hit=(rayZMin<=sceneZMax+csZThickness) && (rayZMax>=sceneZMax); #endif #endif } pqk-=dPQK;stepCount-=1.0;if (((pqk.x+dPQK.x)*stepDirection)>end || (stepCount+1.0)>=maxSteps || sceneZMax==0.0) {hit=false;} #ifdef SSRAYTRACE_ENABLE_REFINEMENT if (stride>1.0 && hit) {pqk-=dPQK;stepCount-=1.0;float invStride=1.0/stride;dPQK*=invStride;float refinementStepCount=0.0;prevZMaxEstimate=pqk.z/pqk.w;rayZMax=prevZMaxEstimate;sceneZMax=rayZMax+1e7;for (;refinementStepCount<=1.0 || (refinementStepCount<=stride*1.4) && (rayZMaxend) {debugColor=vec3(0,0,1);} else if ((stepCount+1.0)>=maxSteps) {debugColor=vec3(1,0,0);} else if (sceneZMax==0.0) {debugColor=vec3(1,1,0);} else {debugColor=vec3(0,stepCount/maxSteps,0);} #endif return hit;} /** texCoord: in the [0,1] range depth: depth in view space (range [znear,zfar]]) */ vec3 computeViewPosFromUVDepth(vec2 texCoord,float depth,mat4 projection,mat4 invProjectionMatrix) {vec4 ndc;ndc.xy=texCoord*2.0-1.0; #ifdef SSRAYTRACE_RIGHT_HANDED_SCENE ndc.z=-projection[2].z-projection[3].z/depth; #else ndc.z=projection[2].z+projection[3].z/depth; #endif ndc.w=1.0;vec4 eyePos=invProjectionMatrix*ndc;eyePos.xyz/=eyePos.w;return eyePos.xyz;} `; je.IncludesShadersStore[F3e] = B3e; const U3e = "screenSpaceReflection2PixelShader", V3e = `#if defined(WEBGL2) || defined(WEBGPU) || defined(NATIVE) #define TEXTUREFUNC(s,c,lod) texture2DLodEXT(s,c,lod) #define TEXTURECUBEFUNC(s,c,lod) textureLod(s,c,lod) #else #define TEXTUREFUNC(s,c,bias) texture2D(s,c,bias) #define TEXTURECUBEFUNC(s,c,bias) textureCube(s,c,bias) #endif uniform sampler2D textureSampler;varying vec2 vUV; #ifdef SSR_SUPPORTED uniform sampler2D reflectivitySampler;uniform sampler2D normalSampler;uniform sampler2D depthSampler; #ifdef SSRAYTRACE_USE_BACK_DEPTHBUFFER uniform sampler2D backDepthSampler;uniform float backSizeFactor; #endif #ifdef SSR_USE_ENVIRONMENT_CUBE uniform samplerCube envCubeSampler; #ifdef SSR_USE_LOCAL_REFLECTIONMAP_CUBIC uniform vec3 vReflectionPosition;uniform vec3 vReflectionSize; #endif #endif uniform mat4 view;uniform mat4 invView;uniform mat4 projection;uniform mat4 invProjectionMatrix;uniform mat4 projectionPixel;uniform float nearPlaneZ;uniform float stepSize;uniform float maxSteps;uniform float strength;uniform float thickness;uniform float roughnessFactor;uniform float reflectionSpecularFalloffExponent;uniform float maxDistance;uniform float selfCollisionNumSkip;uniform float reflectivityThreshold; #include #include #include vec3 hash(vec3 a) {a=fract(a*0.8);a+=dot(a,a.yxz+19.19);return fract((a.xxy+a.yxx)*a.zyx);} float computeAttenuationForIntersection(ivec2 hitPixel,vec2 hitUV,vec3 vsRayOrigin,vec3 vsHitPoint,vec3 reflectionVector,float maxRayDistance,float numIterations) {float attenuation=1.0; #ifdef SSR_ATTENUATE_SCREEN_BORDERS vec2 dCoords=smoothstep(0.2,0.6,abs(vec2(0.5,0.5)-hitUV.xy));attenuation*=clamp(1.0-(dCoords.x+dCoords.y),0.0,1.0); #endif #ifdef SSR_ATTENUATE_INTERSECTION_DISTANCE attenuation*=1.0-clamp(distance(vsRayOrigin,vsHitPoint)/maxRayDistance,0.0,1.0); #endif #ifdef SSR_ATTENUATE_INTERSECTION_NUMITERATIONS attenuation*=1.0-(numIterations/maxSteps); #endif #ifdef SSR_ATTENUATE_BACKFACE_REFLECTION vec3 reflectionNormal=texelFetch(normalSampler,hitPixel,0).xyz;float directionBasedAttenuation=smoothstep(-0.17,0.0,dot(reflectionNormal,-reflectionVector));attenuation*=directionBasedAttenuation; #endif return attenuation;} #endif void main() { #ifdef SSR_SUPPORTED vec4 colorFull=TEXTUREFUNC(textureSampler,vUV,0.0);vec3 color=colorFull.rgb;vec4 reflectivity=TEXTUREFUNC(reflectivitySampler,vUV,0.0); #ifndef SSR_DISABLE_REFLECTIVITY_TEST if (max(reflectivity.r,max(reflectivity.g,reflectivity.b))<=reflectivityThreshold) { #ifdef SSR_USE_BLUR gl_FragColor=vec4(0.); #else gl_FragColor=colorFull; #endif return;} #endif #ifdef SSR_INPUT_IS_GAMMA_SPACE color=toLinearSpace(color); #endif vec2 texSize=vec2(textureSize(depthSampler,0));vec3 csNormal=texelFetch(normalSampler,ivec2(vUV*texSize),0).xyz; float depth=texelFetch(depthSampler,ivec2(vUV*texSize),0).r;vec3 csPosition=computeViewPosFromUVDepth(vUV,depth,projection,invProjectionMatrix);vec3 csViewDirection=normalize(csPosition);vec3 csReflectedVector=reflect(csViewDirection,csNormal); #ifdef SSR_USE_ENVIRONMENT_CUBE vec3 wReflectedVector=vec3(invView*vec4(csReflectedVector,0.0)); #ifdef SSR_USE_LOCAL_REFLECTIONMAP_CUBIC vec4 worldPos=invView*vec4(csPosition,1.0);wReflectedVector=parallaxCorrectNormal(worldPos.xyz,normalize(wReflectedVector),vReflectionSize,vReflectionPosition); #endif #ifdef SSR_INVERTCUBICMAP wReflectedVector.y*=-1.0; #endif #ifdef SSRAYTRACE_RIGHT_HANDED_SCENE wReflectedVector.z*=-1.0; #endif vec3 envColor=TEXTURECUBEFUNC(envCubeSampler,wReflectedVector,0.0).xyz; #ifdef SSR_ENVIRONMENT_CUBE_IS_GAMMASPACE envColor=toLinearSpace(envColor); #endif #else vec3 envColor=color; #endif float reflectionAttenuation=1.0;bool rayHasHit=false;vec2 startPixel;vec2 hitPixel;vec3 hitPoint;float numIterations; #ifdef SSRAYTRACE_DEBUG vec3 debugColor; #endif #ifdef SSR_ATTENUATE_FACING_CAMERA reflectionAttenuation*=1.0-smoothstep(0.25,0.5,dot(-csViewDirection,csReflectedVector)); #endif if (reflectionAttenuation>0.0) { #ifdef SSR_USE_BLUR vec3 jitt=vec3(0.); #else float roughness=1.0-reflectivity.a;vec3 jitt=mix(vec3(0.0),hash(csPosition)-vec3(0.5),roughness)*roughnessFactor; #endif vec2 uv2=vUV*texSize;float c=(uv2.x+uv2.y)*0.25;float jitter=mod(c,1.0); rayHasHit=traceScreenSpaceRay1( csPosition, normalize(csReflectedVector+jitt), projectionPixel, depthSampler, texSize, #ifdef SSRAYTRACE_USE_BACK_DEPTHBUFFER backDepthSampler, backSizeFactor, #endif thickness, nearPlaneZ, stepSize, jitter, maxSteps, maxDistance, selfCollisionNumSkip, startPixel, hitPixel, hitPoint, numIterations #ifdef SSRAYTRACE_DEBUG ,debugColor #endif );} #ifdef SSRAYTRACE_DEBUG gl_FragColor=vec4(debugColor,1.);return; #endif vec3 F0=reflectivity.rgb;vec3 fresnel=fresnelSchlickGGX(max(dot(csNormal,-csViewDirection),0.0),F0,vec3(1.));vec3 SSR=envColor;if (rayHasHit) {vec3 reflectedColor=texelFetch(textureSampler,ivec2(hitPixel),0).rgb; #ifdef SSR_INPUT_IS_GAMMA_SPACE reflectedColor=toLinearSpace(reflectedColor); #endif reflectionAttenuation*=computeAttenuationForIntersection(ivec2(hitPixel),hitPixel/texSize,csPosition,hitPoint,csReflectedVector,maxDistance,numIterations);SSR=reflectedColor*reflectionAttenuation+(1.0-reflectionAttenuation)*envColor;} #ifndef SSR_BLEND_WITH_FRESNEL SSR*=fresnel; #endif #ifdef SSR_USE_BLUR float blur_radius=0.0;float roughness=1.0-reflectivity.a*(1.0-roughnessFactor);if (roughness>0.001) {float cone_angle=min(roughness,0.999)*3.14159265*0.5;float cone_len=distance(startPixel,hitPixel);float op_len=2.0*tan(cone_angle)*cone_len; float a=op_len;float h=cone_len;float a2=a*a;float fh2=4.0f*h*h;blur_radius=(a*(sqrt(a2+fh2)-a))/(4.0f*h);} gl_FragColor=vec4(SSR,blur_radius/255.0); #else #ifdef SSR_BLEND_WITH_FRESNEL vec3 reflectionMultiplier=clamp(pow(fresnel*strength,vec3(reflectionSpecularFalloffExponent)),0.0,1.0); #else vec3 reflectionMultiplier=clamp(pow(reflectivity.rgb*strength,vec3(reflectionSpecularFalloffExponent)),0.0,1.0); #endif vec3 colorMultiplier=1.0-reflectionMultiplier;vec3 finalColor=(color*colorMultiplier)+(SSR*reflectionMultiplier); #ifdef SSR_OUTPUT_IS_GAMMA_SPACE finalColor=toGammaSpace(finalColor); #endif gl_FragColor=vec4(finalColor,colorFull.a); #endif #else gl_FragColor=TEXTUREFUNC(textureSampler,vUV,0.0); #endif } `; je.ShadersStore[U3e] = V3e; const k3e = "screenSpaceReflection2BlurPixelShader", z3e = `#if defined(WEBGL2) || defined(WEBGPU) || defined(NATIVE) #define TEXTUREFUNC(s,c,lod) texture2DLodEXT(s,c,lod) #else #define TEXTUREFUNC(s,c,bias) texture2D(s,c,bias) #endif uniform sampler2D textureSampler;varying vec2 vUV;uniform vec2 texelOffsetScale;const float weights[8]=float[8] (0.071303,0.131514,0.189879,0.321392,0.452906, 0.584419,0.715932,0.847445);void processSample(vec2 uv,float i,vec2 stepSize,inout vec4 accumulator,inout float denominator) {vec2 offsetUV=stepSize*i+uv;float coefficient=weights[int(2.0-abs(i))];accumulator+=TEXTUREFUNC(textureSampler,offsetUV,0.0)*coefficient;denominator+=coefficient;} void main() {vec4 colorFull=TEXTUREFUNC(textureSampler,vUV,0.0);if (dot(colorFull,vec4(1.0))==0.0) {gl_FragColor=colorFull;return;} float blurRadius=colorFull.a*255.0; vec2 stepSize=texelOffsetScale.xy*blurRadius;vec4 accumulator=TEXTUREFUNC(textureSampler,vUV,0.0)*0.214607;float denominator=0.214607;processSample(vUV,1.0,stepSize,accumulator,denominator);processSample(vUV,1.0*0.2,stepSize,accumulator,denominator);processSample(vUV,1.0*0.4,stepSize,accumulator,denominator);processSample(vUV,1.0*0.6,stepSize,accumulator,denominator);processSample(vUV,1.0*0.8,stepSize,accumulator,denominator);processSample(vUV,1.0*1.2,stepSize,accumulator,denominator);processSample(vUV,1.0*1.4,stepSize,accumulator,denominator);processSample(vUV,1.0*1.6,stepSize,accumulator,denominator);processSample(vUV,1.0*1.8,stepSize,accumulator,denominator);processSample(vUV,1.0*2.0,stepSize,accumulator,denominator);processSample(vUV,-1.0,stepSize,accumulator,denominator);processSample(vUV,-1.0*0.2,stepSize,accumulator,denominator);processSample(vUV,-1.0*0.4,stepSize,accumulator,denominator);processSample(vUV,-1.0*0.6,stepSize,accumulator,denominator);processSample(vUV,-1.0*0.8,stepSize,accumulator,denominator);processSample(vUV,-1.0*1.2,stepSize,accumulator,denominator);processSample(vUV,-1.0*1.4,stepSize,accumulator,denominator);processSample(vUV,-1.0*1.6,stepSize,accumulator,denominator);processSample(vUV,-1.0*1.8,stepSize,accumulator,denominator);processSample(vUV,-1.0*2.0,stepSize,accumulator,denominator);gl_FragColor=vec4(accumulator.rgb/denominator,colorFull.a);} `; je.ShadersStore[k3e] = z3e; const H3e = "screenSpaceReflection2BlurCombinerPixelShader", G3e = `uniform sampler2D textureSampler; uniform sampler2D mainSampler;uniform sampler2D reflectivitySampler;uniform float strength;uniform float reflectionSpecularFalloffExponent;uniform float reflectivityThreshold;varying vec2 vUV; #include #ifdef SSR_BLEND_WITH_FRESNEL #include #include uniform mat4 projection;uniform mat4 invProjectionMatrix;uniform sampler2D normalSampler;uniform sampler2D depthSampler; #endif void main() { #ifdef SSRAYTRACE_DEBUG gl_FragColor=texture2D(textureSampler,vUV); #else vec3 SSR=texture2D(textureSampler,vUV).rgb;vec4 color=texture2D(mainSampler,vUV);vec4 reflectivity=texture2D(reflectivitySampler,vUV); #ifndef SSR_DISABLE_REFLECTIVITY_TEST if (max(reflectivity.r,max(reflectivity.g,reflectivity.b))<=reflectivityThreshold) {gl_FragColor=color;return;} #endif #ifdef SSR_INPUT_IS_GAMMA_SPACE color=toLinearSpace(color); #endif #ifdef SSR_BLEND_WITH_FRESNEL vec2 texSize=vec2(textureSize(depthSampler,0));vec3 csNormal=texelFetch(normalSampler,ivec2(vUV*texSize),0).xyz;float depth=texelFetch(depthSampler,ivec2(vUV*texSize),0).r;vec3 csPosition=computeViewPosFromUVDepth(vUV,depth,projection,invProjectionMatrix);vec3 csViewDirection=normalize(csPosition);vec3 F0=reflectivity.rgb;vec3 fresnel=fresnelSchlickGGX(max(dot(csNormal,-csViewDirection),0.0),F0,vec3(1.));vec3 reflectionMultiplier=clamp(pow(fresnel*strength,vec3(reflectionSpecularFalloffExponent)),0.0,1.0); #else vec3 reflectionMultiplier=clamp(pow(reflectivity.rgb*strength,vec3(reflectionSpecularFalloffExponent)),0.0,1.0); #endif vec3 colorMultiplier=1.0-reflectionMultiplier;vec3 finalColor=(color.rgb*colorMultiplier)+(SSR*reflectionMultiplier); #ifdef SSR_OUTPUT_IS_GAMMA_SPACE finalColor=toGammaSpace(finalColor); #endif gl_FragColor=vec4(finalColor,color.a); #endif } `; je.ShadersStore[H3e] = G3e; const K3e = Ae.Compose(new D(0.5, 0.5, 0.5), Ze.Identity(), new D(0.5, 0.5, 0.5)), W3e = Ae.Compose(new D(0.5, 0.5, 1), Ze.Identity(), new D(0.5, 0.5, 0)); class Ma extends I5 { /** * MSAA sample count, setting this to 4 will provide 4x anti aliasing. (default: 1) */ set samples(e) { this._samples !== e && (this._samples = e, this._buildPipeline()); } get samples() { return this._samples; } /** * Gets or sets the minimum value for one of the reflectivity component of the material to consider it for SSR (default: 0.04). * If all r/g/b components of the reflectivity is below or equal this value, the pixel will not be considered reflective and SSR won't be applied. */ get reflectivityThreshold() { return this._reflectivityThreshold; } set reflectivityThreshold(e) { e !== this._reflectivityThreshold && (e === 0 && this._reflectivityThreshold !== 0 || e !== 0 && this._reflectivityThreshold === 0 ? (this._reflectivityThreshold = e, this._buildPipeline()) : this._reflectivityThreshold = e); } /** * Gets or sets the downsample factor used to reduce the size of the texture used to compute the SSR contribution (default: 0). * Use 0 to render the SSR contribution at full resolution, 1 to render at half resolution, 2 to render at 1/3 resolution, etc. * Note that it is used only when blurring is enabled (blurDispersionStrength \> 0), because in that mode the SSR contribution is generated in a separate texture. */ get ssrDownsample() { return this._ssrDownsample; } set ssrDownsample(e) { e !== this._ssrDownsample && (this._ssrDownsample = e, this._buildPipeline()); } /** * Gets or sets the blur dispersion strength. Set this value to 0 to disable blurring (default: 0.05) * The reflections are blurred based on the roughness of the surface and the distance between the pixel shaded and the reflected pixel: the higher the distance the more blurry the reflection is. * blurDispersionStrength allows to increase or decrease this effect. */ get blurDispersionStrength() { return this._blurDispersionStrength; } set blurDispersionStrength(e) { if (e === this._blurDispersionStrength) return; const t = e === 0 && this._blurDispersionStrength !== 0 || e !== 0 && this._blurDispersionStrength === 0; this._blurDispersionStrength = e, t && this._buildPipeline(); } _useBlur() { return this._blurDispersionStrength > 0; } /** * Gets or sets the downsample factor used to reduce the size of the textures used to blur the reflection effect (default: 0). * Use 0 to blur at full resolution, 1 to render at half resolution, 2 to render at 1/3 resolution, etc. */ get blurDownsample() { return this._blurDownsample; } set blurDownsample(e) { e !== this._blurDownsample && (this._blurDownsample = e, this._buildPipeline()); } /** * Gets or sets whether or not smoothing reflections is enabled (default: false) * Enabling smoothing will require more GPU power. * Note that this setting has no effect if step = 1: it's only used if step \> 1. */ get enableSmoothReflections() { return this._enableSmoothReflections; } set enableSmoothReflections(e) { e !== this._enableSmoothReflections && (this._enableSmoothReflections = e, this._updateEffectDefines()); } /** * Gets or sets the environment cube texture used to define the reflection when the reflected rays of SSR leave the view space or when the maxDistance/maxSteps is reached. */ get environmentTexture() { return this._environmentTexture; } set environmentTexture(e) { this._environmentTexture = e, this._updateEffectDefines(); } /** * Gets or sets the boolean defining if the environment texture is a standard cubemap (false) or a probe (true). Default value is false. * Note: a probe cube texture is treated differently than an ordinary cube texture because the Y axis is reversed. */ get environmentTextureIsProbe() { return this._environmentTextureIsProbe; } set environmentTextureIsProbe(e) { this._environmentTextureIsProbe = e, this._updateEffectDefines(); } /** * Gets or sets a boolean indicating if the reflections should be attenuated at the screen borders (default: true). */ get attenuateScreenBorders() { return this._attenuateScreenBorders; } set attenuateScreenBorders(e) { this._attenuateScreenBorders !== e && (this._attenuateScreenBorders = e, this._updateEffectDefines()); } /** * Gets or sets a boolean indicating if the reflections should be attenuated according to the distance of the intersection (default: true). */ get attenuateIntersectionDistance() { return this._attenuateIntersectionDistance; } set attenuateIntersectionDistance(e) { this._attenuateIntersectionDistance !== e && (this._attenuateIntersectionDistance = e, this._updateEffectDefines()); } /** * Gets or sets a boolean indicating if the reflections should be attenuated according to the number of iterations performed to find the intersection (default: true). */ get attenuateIntersectionIterations() { return this._attenuateIntersectionIterations; } set attenuateIntersectionIterations(e) { this._attenuateIntersectionIterations !== e && (this._attenuateIntersectionIterations = e, this._updateEffectDefines()); } /** * Gets or sets a boolean indicating if the reflections should be attenuated when the reflection ray is facing the camera (the view direction) (default: false). */ get attenuateFacingCamera() { return this._attenuateFacingCamera; } set attenuateFacingCamera(e) { this._attenuateFacingCamera !== e && (this._attenuateFacingCamera = e, this._updateEffectDefines()); } /** * Gets or sets a boolean indicating if the backface reflections should be attenuated (default: false). */ get attenuateBackfaceReflection() { return this._attenuateBackfaceReflection; } set attenuateBackfaceReflection(e) { this._attenuateBackfaceReflection !== e && (this._attenuateBackfaceReflection = e, this._updateEffectDefines()); } /** * Gets or sets a boolean indicating if the ray should be clipped to the frustum (default: true). * You can try to set this parameter to false to save some performances: it may produce some artefacts in some cases, but generally they won't really be visible */ get clipToFrustum() { return this._clipToFrustum; } set clipToFrustum(e) { this._clipToFrustum !== e && (this._clipToFrustum = e, this._updateEffectDefines()); } /** * Gets or sets a boolean indicating whether the blending between the current color pixel and the reflection color should be done with a Fresnel coefficient (default: false). * It is more physically accurate to use the Fresnel coefficient (otherwise it uses the reflectivity of the material for blending), but it is also more expensive when you use blur (when blurDispersionStrength \> 0). */ get useFresnel() { return this._useFresnel; } set useFresnel(e) { this._useFresnel !== e && (this._useFresnel = e, this._buildPipeline()); } /** * Gets or sets a boolean defining if geometry thickness should be computed automatically (default: false). * When enabled, a depth renderer is created which will render the back faces of the scene to a depth texture (meaning additional work for the GPU). * In that mode, the "thickness" property is still used as an offset to compute the ray intersection, but you can typically use a much lower * value than when enableAutomaticThicknessComputation is false (it's even possible to use a value of 0 when using low values for "step") * Note that for performance reasons, this option will only apply to the first camera to which the rendering pipeline is attached! */ get enableAutomaticThicknessComputation() { return this._enableAutomaticThicknessComputation; } set enableAutomaticThicknessComputation(e) { this._enableAutomaticThicknessComputation !== e && (this._enableAutomaticThicknessComputation = e, this._buildPipeline()); } /** * Gets the depth renderer used to render the back faces of the scene to a depth texture. */ get backfaceDepthRenderer() { return this._depthRenderer; } /** * Gets or sets the downsample factor (default: 0) used to create the backface depth texture - used only if enableAutomaticThicknessComputation = true. * Use 0 to render the depth at full resolution, 1 to render at half resolution, 2 to render at 1/4 resolution, etc. * Note that you will get rendering artefacts when using a value different from 0: it's a tradeoff between image quality and performances. */ get backfaceDepthTextureDownsample() { return this._backfaceDepthTextureDownsample; } set backfaceDepthTextureDownsample(e) { this._backfaceDepthTextureDownsample !== e && (this._backfaceDepthTextureDownsample = e, this._resizeDepthRenderer()); } /** * Gets or sets a boolean (default: true) indicating if the depth of transparent meshes should be written to the backface depth texture (when automatic thickness computation is enabled). */ get backfaceForceDepthWriteTransparentMeshes() { return this._backfaceForceDepthWriteTransparentMeshes; } set backfaceForceDepthWriteTransparentMeshes(e) { this._backfaceForceDepthWriteTransparentMeshes !== e && (this._backfaceForceDepthWriteTransparentMeshes = e, this._depthRenderer && (this._depthRenderer.forceDepthWriteTransparentMeshes = e)); } /** * Gets or sets a boolean indicating if the effect is enabled (default: true). */ get isEnabled() { return this._isEnabled; } set isEnabled(e) { this._isEnabled !== e && (this._isEnabled = e, e ? e && (this._isDirty ? this._buildPipeline() : this._cameras !== null && this._scene.postProcessRenderPipelineManager.attachCamerasToRenderPipeline(this._name, this._cameras)) : this._cameras !== null && (this._scene.postProcessRenderPipelineManager.detachCamerasFromRenderPipeline(this._name, this._cameras), this._cameras = this._camerasToBeAttached.slice())); } /** * Gets or sets a boolean defining if the input color texture is in gamma space (default: true) * The SSR effect works in linear space, so if the input texture is in gamma space, we must convert the texture to linear space before applying the effect */ get inputTextureColorIsInGammaSpace() { return this._inputTextureColorIsInGammaSpace; } set inputTextureColorIsInGammaSpace(e) { this._inputTextureColorIsInGammaSpace !== e && (this._inputTextureColorIsInGammaSpace = e, this._buildPipeline()); } /** * Gets or sets a boolean defining if the output color texture generated by the SSR pipeline should be in gamma space (default: true) * If you have a post-process that comes after the SSR and that post-process needs the input to be in a linear space, you must disable generateOutputInGammaSpace */ get generateOutputInGammaSpace() { return this._generateOutputInGammaSpace; } set generateOutputInGammaSpace(e) { this._generateOutputInGammaSpace !== e && (this._generateOutputInGammaSpace = e, this._buildPipeline()); } /** * Gets or sets a boolean indicating if the effect should be rendered in debug mode (default: false). * In this mode, colors have this meaning: * - blue: the ray hit the max distance (we reached maxDistance) * - red: the ray ran out of steps (we reached maxSteps) * - yellow: the ray went off screen * - green: the ray hit a surface. The brightness of the green color is proportional to the distance between the ray origin and the intersection point: A brighter green means more computation than a darker green. * In the first 3 cases, the final color is calculated by mixing the skybox color with the pixel color (if environmentTexture is defined), otherwise the pixel color is not modified * You should try to get as few blue/red/yellow pixels as possible, as this means that the ray has gone further than if it had hit a surface. */ get debug() { return this._debug; } set debug(e) { this._debug !== e && (this._debug = e, this._buildPipeline()); } /** * Gets the scene the effect belongs to. * @returns the scene the effect belongs to. */ getScene() { return this._scene; } get _geometryBufferRenderer() { return this._forceGeometryBuffer ? this._scene.geometryBufferRenderer : null; } get _prePassRenderer() { return this._forceGeometryBuffer ? null : this._scene.prePassRenderer; } /** * Gets active scene */ get scene() { return this._scene; } /** * Returns true if SSR is supported by the running hardware */ get isSupported() { const e = this._scene.getEngine().getCaps(); return e.drawBuffersExtension && e.texelFetch; } /** * Constructor of the SSR rendering pipeline * @param name The rendering pipeline name * @param scene The scene linked to this pipeline * @param cameras The array of cameras that the rendering pipeline will be attached to (default: scene.cameras) * @param forceGeometryBuffer Set to true if you want to use the legacy geometry buffer renderer (default: false) * @param textureType The texture type used by the different post processes created by SSR (default: 0) */ constructor(e, t, i, r = !1, s = 0) { if (super(t.getEngine(), e), this.SSRRenderEffect = "SSRRenderEffect", this.SSRBlurRenderEffect = "SSRBlurRenderEffect", this.SSRCombineRenderEffect = "SSRCombineRenderEffect", this._samples = 1, this.maxDistance = 1e3, this.step = 1, this.thickness = 0.5, this.strength = 1, this.reflectionSpecularFalloffExponent = 1, this.maxSteps = 1e3, this.roughnessFactor = 0.2, this.selfCollisionNumSkip = 1, this._reflectivityThreshold = 0.04, this._ssrDownsample = 0, this._blurDispersionStrength = 0.03, this._blurDownsample = 0, this._enableSmoothReflections = !1, this._environmentTextureIsProbe = !1, this._attenuateScreenBorders = !0, this._attenuateIntersectionDistance = !0, this._attenuateIntersectionIterations = !0, this._attenuateFacingCamera = !1, this._attenuateBackfaceReflection = !1, this._clipToFrustum = !0, this._useFresnel = !1, this._enableAutomaticThicknessComputation = !1, this._backfaceDepthTextureDownsample = 0, this._backfaceForceDepthWriteTransparentMeshes = !0, this._isEnabled = !0, this._inputTextureColorIsInGammaSpace = !0, this._generateOutputInGammaSpace = !0, this._debug = !1, this._forceGeometryBuffer = !1, this._isDirty = !1, this._camerasToBeAttached = [], this._cameras = i || t.cameras, this._cameras = this._cameras.slice(), this._camerasToBeAttached = this._cameras.slice(), this._scene = t, this._textureType = s, this._forceGeometryBuffer = r, this.isSupported) { if (t.postProcessRenderPipelineManager.addPipeline(this), this._forceGeometryBuffer) { const n = t.enableGeometryBufferRenderer(); n && (n.enableReflectivity = !0, n.useSpecificClearForDepthTexture = !0, n.generateNormalsInWorldSpace && Ce.Error("SSRRenderingPipeline does not support generateNormalsInWorldSpace=true for the geometry buffer renderer!")); } else { const n = t.enablePrePassRenderer(); n && (n.useSpecificClearForDepthTexture = !0, n.markAsDirty(), n.generateNormalsInWorldSpace && Ce.Error("SSRRenderingPipeline does not support generateNormalsInWorldSpace=true for the prepass renderer!")); } this._buildPipeline(); } } /** * Get the class name * @returns "SSRRenderingPipeline" */ getClassName() { return "SSRRenderingPipeline"; } /** * Adds a camera to the pipeline * @param camera the camera to be added */ addCamera(e) { this._camerasToBeAttached.push(e), this._buildPipeline(); } /** * Removes a camera from the pipeline * @param camera the camera to remove */ removeCamera(e) { const t = this._camerasToBeAttached.indexOf(e); this._camerasToBeAttached.splice(t, 1), this._buildPipeline(); } /** * Removes the internal pipeline assets and detaches the pipeline from the scene cameras * @param disableGeometryBufferRenderer */ dispose(e = !1) { this._disposeDepthRenderer(), this._disposePostProcesses(), e && this._scene.disableGeometryBufferRenderer(), this._scene.postProcessRenderPipelineManager.detachCamerasFromRenderPipeline(this._name, this._cameras), super.dispose(); } _getTextureSize() { var e, t; const i = this._scene.getEngine(), r = this._prePassRenderer; let s = { width: i.getRenderWidth(), height: i.getRenderHeight() }; if (r && ((e = this._scene.activeCamera) === null || e === void 0 ? void 0 : e._getFirstPostProcess()) === this._ssrPostProcess) { const n = r.getRenderTarget(); n && n.textures && (s = n.textures[r.getIndex(4)].getSize()); } else !((t = this._ssrPostProcess) === null || t === void 0) && t.inputTexture && (s.width = this._ssrPostProcess.inputTexture.width, s.height = this._ssrPostProcess.inputTexture.height); return s; } _updateEffectDefines() { var e; const t = []; (this._geometryBufferRenderer || this._prePassRenderer) && t.push("#define SSR_SUPPORTED"), this._enableSmoothReflections && t.push("#define SSRAYTRACE_ENABLE_REFINEMENT"), this._scene.useRightHandedSystem && t.push("#define SSRAYTRACE_RIGHT_HANDED_SCENE"), this._environmentTexture && (t.push("#define SSR_USE_ENVIRONMENT_CUBE"), this._environmentTexture.boundingBoxSize && t.push("#define SSR_USE_LOCAL_REFLECTIONMAP_CUBIC"), this._environmentTexture.gammaSpace && t.push("#define SSR_ENVIRONMENT_CUBE_IS_GAMMASPACE")), this._environmentTextureIsProbe && t.push("#define SSR_INVERTCUBICMAP"), this._enableAutomaticThicknessComputation && t.push("#define SSRAYTRACE_USE_BACK_DEPTHBUFFER"), this._attenuateScreenBorders && t.push("#define SSR_ATTENUATE_SCREEN_BORDERS"), this._attenuateIntersectionDistance && t.push("#define SSR_ATTENUATE_INTERSECTION_DISTANCE"), this._attenuateIntersectionIterations && t.push("#define SSR_ATTENUATE_INTERSECTION_NUMITERATIONS"), this._attenuateFacingCamera && t.push("#define SSR_ATTENUATE_FACING_CAMERA"), this._attenuateBackfaceReflection && t.push("#define SSR_ATTENUATE_BACKFACE_REFLECTION"), this._clipToFrustum && t.push("#define SSRAYTRACE_CLIP_TO_FRUSTUM"), this._useBlur() && t.push("#define SSR_USE_BLUR"), this._debug && t.push("#define SSRAYTRACE_DEBUG"), this._inputTextureColorIsInGammaSpace && t.push("#define SSR_INPUT_IS_GAMMA_SPACE"), this._generateOutputInGammaSpace && t.push("#define SSR_OUTPUT_IS_GAMMA_SPACE"), this._useFresnel && t.push("#define SSR_BLEND_WITH_FRESNEL"), this._reflectivityThreshold === 0 && t.push("#define SSR_DISABLE_REFLECTIVITY_TEST"), (e = this._ssrPostProcess) === null || e === void 0 || e.updateEffect(t.join(` `)); } _buildPipeline() { var e; if (!this.isSupported) return; if (!this._isEnabled) { this._isDirty = !0; return; } this._isDirty = !1; const t = this._scene.getEngine(); if (this._disposeDepthRenderer(), this._disposePostProcesses(), this._cameras !== null && (this._scene.postProcessRenderPipelineManager.detachCamerasFromRenderPipeline(this._name, this._cameras), this._cameras = this._camerasToBeAttached.slice()), this._reset(), this._enableAutomaticThicknessComputation) { const i = (e = this._cameras) === null || e === void 0 ? void 0 : e[0]; i && (this._depthRendererCamera = i, this._depthRenderer = new T5(this._scene, void 0, void 0, void 0, 1, !0, "SSRBackDepth"), this._depthRenderer.clearColor.r = 1e8, this._depthRenderer.reverseCulling = !0, this._depthRenderer.forceDepthWriteTransparentMeshes = this._backfaceForceDepthWriteTransparentMeshes, this._resizeDepthRenderer(), i.customRenderTargets.push(this._depthRenderer.getDepthMap())); } this._createSSRPostProcess(), this.addEffect(new gn(t, this.SSRRenderEffect, () => this._ssrPostProcess, !0)), this._useBlur() && (this._createBlurAndCombinerPostProcesses(), this.addEffect(new gn(t, this.SSRBlurRenderEffect, () => [this._blurPostProcessX, this._blurPostProcessY], !0)), this.addEffect(new gn(t, this.SSRCombineRenderEffect, () => this._blurCombinerPostProcess, !0))), this._cameras !== null && this._scene.postProcessRenderPipelineManager.attachCamerasToRenderPipeline(this._name, this._cameras); } _resizeDepthRenderer() { if (!this._depthRenderer) return; const e = this._getTextureSize(), t = this._depthRenderer.getDepthMap().getSize(), i = Math.floor(e.width / (this._backfaceDepthTextureDownsample + 1)), r = Math.floor(e.height / (this._backfaceDepthTextureDownsample + 1)); (t.width !== i || t.height !== r) && this._depthRenderer.getDepthMap().resize({ width: i, height: r }); } _disposeDepthRenderer() { var e; if (this._depthRenderer) { if (this._depthRendererCamera) { const t = (e = this._depthRendererCamera.customRenderTargets.indexOf(this._depthRenderer.getDepthMap())) !== null && e !== void 0 ? e : -1; t !== -1 && this._depthRendererCamera.customRenderTargets.splice(t, 1); } this._depthRendererCamera = null, this._depthRenderer.getDepthMap().dispose(); } this._depthRenderer = null; } _disposePostProcesses() { var e, t, i, r; for (let s = 0; s < this._cameras.length; s++) { const n = this._cameras[s]; (e = this._ssrPostProcess) === null || e === void 0 || e.dispose(n), (t = this._blurPostProcessX) === null || t === void 0 || t.dispose(n), (i = this._blurPostProcessY) === null || i === void 0 || i.dispose(n), (r = this._blurCombinerPostProcess) === null || r === void 0 || r.dispose(n); } this._ssrPostProcess = null, this._blurPostProcessX = null, this._blurPostProcessY = null, this._blurCombinerPostProcess = null; } _createSSRPostProcess() { this._ssrPostProcess = new Bi("ssr", "screenSpaceReflection2", [ "projection", "invProjectionMatrix", "view", "invView", "thickness", "reflectionSpecularFalloffExponent", "strength", "stepSize", "maxSteps", "roughnessFactor", "projectionPixel", "nearPlaneZ", "maxDistance", "selfCollisionNumSkip", "vReflectionPosition", "vReflectionSize", "backSizeFactor", "reflectivityThreshold" ], ["textureSampler", "normalSampler", "reflectivitySampler", "depthSampler", "envCubeSampler", "backDepthSampler"], 1, null, this._textureType, this._scene.getEngine(), !1, "", this._textureType), this._updateEffectDefines(), this._ssrPostProcess.onApply = (e) => { this._resizeDepthRenderer(); const t = this._geometryBufferRenderer, i = this._prePassRenderer; if (!i && !t) return; if (t) { const l = t.getTextureIndex(_o.REFLECTIVITY_TEXTURE_TYPE); e.setTexture("normalSampler", t.getGBuffer().textures[1]), e.setTexture("reflectivitySampler", t.getGBuffer().textures[l]), e.setTexture("depthSampler", t.getGBuffer().textures[0]); } else if (i) { const l = i.getIndex(5), o = i.getIndex(3), u = i.getIndex(6); e.setTexture("normalSampler", i.getRenderTarget().textures[u]), e.setTexture("depthSampler", i.getRenderTarget().textures[l]), e.setTexture("reflectivitySampler", i.getRenderTarget().textures[o]); } this._enableAutomaticThicknessComputation && this._depthRenderer && (e.setTexture("backDepthSampler", this._depthRenderer.getDepthMap()), e.setFloat("backSizeFactor", this._backfaceDepthTextureDownsample + 1)); const r = this._scene.activeCamera; if (!r) return; const s = r.getViewMatrix(), n = r.getProjectionMatrix(); n.invertToRef(de.Matrix[0]), s.invertToRef(de.Matrix[1]), e.setMatrix("projection", n), e.setMatrix("view", s), e.setMatrix("invView", de.Matrix[1]), e.setMatrix("invProjectionMatrix", de.Matrix[0]), e.setFloat("thickness", this.thickness), e.setFloat("reflectionSpecularFalloffExponent", this.reflectionSpecularFalloffExponent), e.setFloat("strength", this.strength), e.setFloat("stepSize", this.step), e.setFloat("maxSteps", this.maxSteps), e.setFloat("roughnessFactor", this.roughnessFactor), e.setFloat("nearPlaneZ", r.minZ), e.setFloat("maxDistance", this.maxDistance), e.setFloat("selfCollisionNumSkip", this.selfCollisionNumSkip), e.setFloat("reflectivityThreshold", this._reflectivityThreshold); const a = this._getTextureSize(); Ae.ScalingToRef(a.width, a.height, 1, de.Matrix[2]), n.multiplyToRef(this._scene.getEngine().isWebGPU ? W3e : K3e, de.Matrix[3]), de.Matrix[3].multiplyToRef(de.Matrix[2], de.Matrix[4]), e.setMatrix("projectionPixel", de.Matrix[4]), this._environmentTexture && (e.setTexture("envCubeSampler", this._environmentTexture), this._environmentTexture.boundingBoxSize && (e.setVector3("vReflectionPosition", this._environmentTexture.boundingBoxPosition), e.setVector3("vReflectionSize", this._environmentTexture.boundingBoxSize))); }, this._ssrPostProcess.samples = this.samples, this._forceGeometryBuffer || (this._ssrPostProcess._prePassEffectConfiguration = new N3e()); } _createBlurAndCombinerPostProcesses() { const e = this._scene.getEngine(); this._blurPostProcessX = new Bi("SSRblurX", "screenSpaceReflection2Blur", ["texelOffsetScale"], ["textureSampler"], this._useBlur() ? 1 / (this._ssrDownsample + 1) : 1, null, 2, e, !1, "", this._textureType), this._blurPostProcessX.autoClear = !1, this._blurPostProcessX.onApplyObservable.add((s) => { var n, a; const l = (a = (n = this._blurPostProcessX) === null || n === void 0 ? void 0 : n.inputTexture.width) !== null && a !== void 0 ? a : this._scene.getEngine().getRenderWidth(); s.setFloat2("texelOffsetScale", this._blurDispersionStrength / l, 0); }), this._blurPostProcessY = new Bi("SSRblurY", "screenSpaceReflection2Blur", ["texelOffsetScale"], ["textureSampler"], this._useBlur() ? 1 / (this._blurDownsample + 1) : 1, null, 2, e, !1, "", this._textureType), this._blurPostProcessY.autoClear = !1, this._blurPostProcessY.onApplyObservable.add((s) => { var n, a; const l = (a = (n = this._blurPostProcessY) === null || n === void 0 ? void 0 : n.inputTexture.height) !== null && a !== void 0 ? a : this._scene.getEngine().getRenderHeight(); s.setFloat2("texelOffsetScale", 0, this._blurDispersionStrength / l); }); const t = ["strength", "reflectionSpecularFalloffExponent", "reflectivityThreshold"], i = ["textureSampler", "mainSampler", "reflectivitySampler"]; let r = ""; this._debug && (r += `#define SSRAYTRACE_DEBUG `), this._inputTextureColorIsInGammaSpace && (r += `#define SSR_INPUT_IS_GAMMA_SPACE `), this._generateOutputInGammaSpace && (r += `#define SSR_OUTPUT_IS_GAMMA_SPACE `), this.useFresnel && (r += `#define SSR_BLEND_WITH_FRESNEL `, t.push("projection", "invProjectionMatrix"), i.push("depthSampler", "normalSampler")), this._reflectivityThreshold === 0 && (r += "#define SSR_DISABLE_REFLECTIVITY_TEST"), this._blurCombinerPostProcess = new Bi("SSRblurCombiner", "screenSpaceReflection2BlurCombiner", t, i, this._useBlur() ? 1 / (this._blurDownsample + 1) : 1, null, 1, e, !1, r, this._textureType), this._blurCombinerPostProcess.autoClear = !1, this._blurCombinerPostProcess.onApplyObservable.add((s) => { var n; const a = this._geometryBufferRenderer, l = this._prePassRenderer; if (!(!l && !a)) { if (l && ((n = this._scene.activeCamera) === null || n === void 0 ? void 0 : n._getFirstPostProcess()) === this._ssrPostProcess) { const o = l.getRenderTarget(); o && o.textures && s.setTexture("mainSampler", o.textures[l.getIndex(4)]); } else s.setTextureFromPostProcess("mainSampler", this._ssrPostProcess); if (a) { const o = a.getTextureIndex(_o.REFLECTIVITY_TEXTURE_TYPE); s.setTexture("reflectivitySampler", a.getGBuffer().textures[o]), this.useFresnel && (s.setTexture("normalSampler", a.getGBuffer().textures[1]), s.setTexture("depthSampler", a.getGBuffer().textures[0])); } else if (l) { const o = l.getIndex(3); if (s.setTexture("reflectivitySampler", l.getRenderTarget().textures[o]), this.useFresnel) { const u = l.getIndex(5), h = l.getIndex(6); s.setTexture("normalSampler", l.getRenderTarget().textures[h]), s.setTexture("depthSampler", l.getRenderTarget().textures[u]); } } if (s.setFloat("strength", this.strength), s.setFloat("reflectionSpecularFalloffExponent", this.reflectionSpecularFalloffExponent), s.setFloat("reflectivityThreshold", this._reflectivityThreshold), this.useFresnel) { const o = this._scene.activeCamera; if (o) { const u = o.getProjectionMatrix(); u.invertToRef(de.Matrix[0]), s.setMatrix("projection", u), s.setMatrix("invProjectionMatrix", de.Matrix[0]); } } } }); } /** * Serializes the rendering pipeline (Used when exporting) * @returns the serialized object */ serialize() { const e = St.Serialize(this); return e.customType = "SSRRenderingPipeline", e; } /** * Parse the serialized pipeline * @param source Source pipeline. * @param scene The scene to load the pipeline to. * @param rootUrl The URL of the serialized pipeline. * @returns An instantiated pipeline from the serialized object. */ static Parse(e, t, i) { return St.Parse(() => new Ma(e._name, t, e._ratio), e, t, i); } } F([ W() ], Ma.prototype, "samples", null); F([ W() ], Ma.prototype, "maxDistance", void 0); F([ W() ], Ma.prototype, "step", void 0); F([ W() ], Ma.prototype, "thickness", void 0); F([ W() ], Ma.prototype, "strength", void 0); F([ W() ], Ma.prototype, "reflectionSpecularFalloffExponent", void 0); F([ W() ], Ma.prototype, "maxSteps", void 0); F([ W() ], Ma.prototype, "roughnessFactor", void 0); F([ W() ], Ma.prototype, "selfCollisionNumSkip", void 0); F([ W() ], Ma.prototype, "_reflectivityThreshold", void 0); F([ W("_ssrDownsample") ], Ma.prototype, "_ssrDownsample", void 0); F([ W() ], Ma.prototype, "ssrDownsample", null); F([ W("blurDispersionStrength") ], Ma.prototype, "_blurDispersionStrength", void 0); F([ W("blurDownsample") ], Ma.prototype, "_blurDownsample", void 0); F([ W("enableSmoothReflections") ], Ma.prototype, "_enableSmoothReflections", void 0); F([ W("environmentTexture") ], Ma.prototype, "_environmentTexture", void 0); F([ W("environmentTextureIsProbe") ], Ma.prototype, "_environmentTextureIsProbe", void 0); F([ W("attenuateScreenBorders") ], Ma.prototype, "_attenuateScreenBorders", void 0); F([ W("attenuateIntersectionDistance") ], Ma.prototype, "_attenuateIntersectionDistance", void 0); F([ W("attenuateIntersectionIterations") ], Ma.prototype, "_attenuateIntersectionIterations", void 0); F([ W("attenuateFacingCamera") ], Ma.prototype, "_attenuateFacingCamera", void 0); F([ W("attenuateBackfaceReflection") ], Ma.prototype, "_attenuateBackfaceReflection", void 0); F([ W("clipToFrustum") ], Ma.prototype, "_clipToFrustum", void 0); F([ W("useFresnel") ], Ma.prototype, "_useFresnel", void 0); F([ W("enableAutomaticThicknessComputation") ], Ma.prototype, "_enableAutomaticThicknessComputation", void 0); F([ W("backfaceDepthTextureDownsample") ], Ma.prototype, "_backfaceDepthTextureDownsample", void 0); F([ W("backfaceForceDepthWriteTransparentMeshes") ], Ma.prototype, "_backfaceForceDepthWriteTransparentMeshes", void 0); F([ W("isEnabled") ], Ma.prototype, "_isEnabled", void 0); F([ W("inputTextureColorIsInGammaSpace") ], Ma.prototype, "_inputTextureColorIsInGammaSpace", void 0); F([ W("generateOutputInGammaSpace") ], Ma.prototype, "_generateOutputInGammaSpace", void 0); F([ W("debug") ], Ma.prototype, "_debug", void 0); Be("BABYLON.SSRRenderingPipeline", Ma); const j3e = "tonemapPixelShader", X3e = `varying vec2 vUV;uniform sampler2D textureSampler;uniform float _ExposureAdjustment; #if defined(HABLE_TONEMAPPING) const float A=0.15;const float B=0.50;const float C=0.10;const float D=0.20;const float E=0.02;const float F=0.30;const float W=11.2; #endif float Luminance(vec3 c) {return dot(c,vec3(0.22,0.707,0.071));} #define CUSTOM_FRAGMENT_DEFINITIONS void main(void) {vec3 colour=texture2D(textureSampler,vUV).rgb; #if defined(REINHARD_TONEMAPPING) float lum=Luminance(colour.rgb); float lumTm=lum*_ExposureAdjustment;float scale=lumTm/(1.0+lumTm); colour*=scale/lum; #elif defined(HABLE_TONEMAPPING) colour*=_ExposureAdjustment;const float ExposureBias=2.0;vec3 x=ExposureBias*colour;vec3 curr=((x*(A*x+C*B)+D*E)/(x*(A*x+B)+D*F))-E/F;x=vec3(W,W,W);vec3 whiteScale=1.0/(((x*(A*x+C*B)+D*E)/(x*(A*x+B)+D*F))-E/F);colour=curr*whiteScale; #elif defined(OPTIMIZED_HEJIDAWSON_TONEMAPPING) colour*=_ExposureAdjustment;vec3 X=max(vec3(0.0,0.0,0.0),colour-0.004);vec3 retColor=(X*(6.2*X+0.5))/(X*(6.2*X+1.7)+0.06);colour=retColor*retColor; #elif defined(PHOTOGRAPHIC_TONEMAPPING) colour= vec3(1.0,1.0,1.0)-exp2(-_ExposureAdjustment*colour); #endif gl_FragColor=vec4(colour.rgb,1.0);}`; je.ShadersStore[j3e] = X3e; var $R; (function(c) { c[c.Hable = 0] = "Hable", c[c.Reinhard = 1] = "Reinhard", c[c.HejiDawson = 2] = "HejiDawson", c[c.Photographic = 3] = "Photographic"; })($R || ($R = {})); class Y3e extends Bi { /** * Gets a string identifying the name of the class * @returns "TonemapPostProcess" string */ getClassName() { return "TonemapPostProcess"; } /** * Creates a new TonemapPostProcess * @param name defines the name of the postprocess * @param _operator defines the operator to use * @param exposureAdjustment defines the required exposure adjustment * @param camera defines the camera to use (can be null) * @param samplingMode defines the required sampling mode (BABYLON.Texture.BILINEAR_SAMPLINGMODE by default) * @param engine defines the hosting engine (can be ignore if camera is set) * @param textureFormat defines the texture format to use (BABYLON.Engine.TEXTURETYPE_UNSIGNED_INT by default) * @param reusable If the post process can be reused on the same frame. (default: false) */ constructor(e, t, i, r, s = 2, n, a = 0, l) { super(e, "tonemap", ["_ExposureAdjustment"], null, 1, r, s, n, l, null, a), this._operator = t, this.exposureAdjustment = i; let o = "#define "; this._operator === $R.Hable ? o += "HABLE_TONEMAPPING" : this._operator === $R.Reinhard ? o += "REINHARD_TONEMAPPING" : this._operator === $R.HejiDawson ? o += "OPTIMIZED_HEJIDAWSON_TONEMAPPING" : this._operator === $R.Photographic && (o += "PHOTOGRAPHIC_TONEMAPPING"), this.updateEffect(o), this.onApply = (u) => { u.setFloat("_ExposureAdjustment", this.exposureAdjustment); }; } } const Q3e = "volumetricLightScatteringPixelShader", $3e = `uniform sampler2D textureSampler;uniform sampler2D lightScatteringSampler;uniform float decay;uniform float exposure;uniform float weight;uniform float density;uniform vec2 meshPositionOnScreen;varying vec2 vUV; #define CUSTOM_FRAGMENT_DEFINITIONS void main(void) { #define CUSTOM_FRAGMENT_MAIN_BEGIN vec2 tc=vUV;vec2 deltaTexCoord=(tc-meshPositionOnScreen.xy);deltaTexCoord*=1.0/float(NUM_SAMPLES)*density;float illuminationDecay=1.0;vec4 color=texture2D(lightScatteringSampler,tc)*0.4;for(int i=0; i #include #include #include[0..maxSimultaneousMorphTargets] #include uniform mat4 viewProjection;uniform vec2 depthValues; #if defined(ALPHATEST) || defined(NEED_UV) varying vec2 vUV;uniform mat4 diffuseMatrix; #ifdef UV1 attribute vec2 uv; #endif #ifdef UV2 attribute vec2 uv2; #endif #endif #define CUSTOM_VERTEX_DEFINITIONS void main(void) {vec3 positionUpdated=position; #if (defined(ALPHATEST) || defined(NEED_UV)) && defined(UV1) vec2 uvUpdated=uv; #endif #include #include[0..maxSimultaneousMorphTargets] #include #include #include gl_Position=viewProjection*finalWorld*vec4(positionUpdated,1.0); #if defined(ALPHATEST) || defined(BASIC_RENDER) #ifdef UV1 vUV=vec2(diffuseMatrix*vec4(uvUpdated,1.0,0.0)); #endif #ifdef UV2 vUV=vec2(diffuseMatrix*vec4(uv2,1.0,0.0)); #endif #endif } `; je.ShadersStore[Z3e] = q3e; const J3e = "volumetricLightScatteringPassPixelShader", e4e = `#if defined(ALPHATEST) || defined(NEED_UV) varying vec2 vUV; #endif #if defined(ALPHATEST) uniform sampler2D diffuseSampler; #endif #define CUSTOM_FRAGMENT_DEFINITIONS void main(void) { #if defined(ALPHATEST) vec4 diffuseColor=texture2D(diffuseSampler,vUV);if (diffuseColor.a<0.4) discard; #endif gl_FragColor=vec4(0.0,0.0,0.0,1.0);} `; je.ShadersStore[J3e] = e4e; class Sm extends Bi { /** * @internal * VolumetricLightScatteringPostProcess.useDiffuseColor is no longer used, use the mesh material directly instead */ get useDiffuseColor() { return Ce.Warn("VolumetricLightScatteringPostProcess.useDiffuseColor is no longer used, use the mesh material directly instead"), !1; } set useDiffuseColor(e) { Ce.Warn("VolumetricLightScatteringPostProcess.useDiffuseColor is no longer used, use the mesh material directly instead"); } /** * @constructor * @param name The post-process name * @param ratio The size of the post-process and/or internal pass (0.5 means that your postprocess will have a width = canvas.width 0.5 and a height = canvas.height 0.5) * @param camera The camera that the post-process will be attached to * @param mesh The mesh used to create the light scattering * @param samples The post-process quality, default 100 * @param samplingMode The post-process filtering mode * @param engine The babylon engine * @param reusable If the post-process is reusable * @param scene The constructor needs a scene reference to initialize internal components. If "camera" is null a "scene" must be provided */ constructor(e, t, i, r, s = 100, n = De.BILINEAR_SAMPLINGMODE, a, l, o) { var u, h; super(e, "volumetricLightScattering", ["decay", "exposure", "weight", "meshPositionOnScreen", "density"], ["lightScatteringSampler"], t.postProcessRatio || t, i, n, a, l, "#define NUM_SAMPLES " + s), this._screenCoordinates = at.Zero(), this.customMeshPosition = D.Zero(), this.useCustomMeshPosition = !1, this.invert = !0, this.excludedMeshes = [], this.includedMeshes = [], this.exposure = 0.3, this.decay = 0.96815, this.weight = 0.58767, this.density = 0.926, o = (h = (u = i == null ? void 0 : i.getScene()) !== null && u !== void 0 ? u : o) !== null && h !== void 0 ? h : this._scene, a = o.getEngine(), this._viewPort = new Md(0, 0, 1, 1).toGlobal(a.getRenderWidth(), a.getRenderHeight()), this.mesh = r ?? Sm.CreateDefaultMesh("VolumetricLightScatteringMesh", o), this._createPass(o, t.passRatio || t), this.onActivate = (d) => { this.isSupported || this.dispose(d), this.onActivate = null; }, this.onApplyObservable.add((d) => { this._updateMeshScreenCoordinates(o), d.setTexture("lightScatteringSampler", this._volumetricLightScatteringRTT), d.setFloat("exposure", this.exposure), d.setFloat("decay", this.decay), d.setFloat("weight", this.weight), d.setFloat("density", this.density), d.setVector2("meshPositionOnScreen", this._screenCoordinates); }); } /** * Returns the string "VolumetricLightScatteringPostProcess" * @returns "VolumetricLightScatteringPostProcess" */ getClassName() { return "VolumetricLightScatteringPostProcess"; } _isReady(e, t) { var i; const r = e.getMesh(); if (r === this.mesh && r.material) return r.material.isReady(r); const s = (i = r._internalAbstractMeshDataInfo._materialForRenderPass) === null || i === void 0 ? void 0 : i[this._scene.getEngine().currentRenderPassId]; if (s) return s.isReadyForSubMesh(r, e, t); const n = [], a = [Y.PositionKind], l = e.getMaterial(); l && (l.needAlphaTesting() && n.push("#define ALPHATEST"), r.isVerticesDataPresent(Y.UVKind) && (a.push(Y.UVKind), n.push("#define UV1")), r.isVerticesDataPresent(Y.UV2Kind) && (a.push(Y.UV2Kind), n.push("#define UV2"))), r.useBones && r.computeBonesUsingShaders ? (a.push(Y.MatricesIndicesKind), a.push(Y.MatricesWeightsKind), n.push("#define NUM_BONE_INFLUENCERS " + r.numBoneInfluencers), n.push("#define BonesPerMesh " + (r.skeleton ? r.skeleton.bones.length + 1 : 0))) : n.push("#define NUM_BONE_INFLUENCERS 0"), t && (n.push("#define INSTANCES"), Ke.PushAttributesForInstances(a), e.getRenderingMesh().hasThinInstances && n.push("#define THIN_INSTANCES")); const o = e._getDrawWrapper(void 0, !0), u = o.defines, h = n.join(` `); return u !== h && o.setEffect(r.getScene().getEngine().createEffect("volumetricLightScatteringPass", a, ["world", "mBones", "viewProjection", "diffuseMatrix"], ["diffuseSampler"], h, void 0, void 0, void 0, { maxSimultaneousMorphTargets: r.numBoneInfluencers }), h), o.effect.isReady(); } /** * Sets the new light position for light scattering effect * @param position The new custom light position */ setCustomMeshPosition(e) { this.customMeshPosition = e; } /** * Returns the light position for light scattering effect * @returns Vector3 The custom light position */ getCustomMeshPosition() { return this.customMeshPosition; } /** * Disposes the internal assets and detaches the post-process from the camera * @param camera */ dispose(e) { const t = e.getScene().customRenderTargets.indexOf(this._volumetricLightScatteringRTT); t !== -1 && e.getScene().customRenderTargets.splice(t, 1), this._volumetricLightScatteringRTT.dispose(), super.dispose(e); } /** * Returns the render target texture used by the post-process * @returns the render target texture used by the post-process */ getPass() { return this._volumetricLightScatteringRTT; } // Private methods _meshExcluded(e) { return this.includedMeshes.length > 0 && this.includedMeshes.indexOf(e) === -1 || this.excludedMeshes.length > 0 && this.excludedMeshes.indexOf(e) !== -1; } _createPass(e, t) { const i = e.getEngine(); this._volumetricLightScatteringRTT = new ra("volumetricLightScatteringMap", { width: i.getRenderWidth() * t, height: i.getRenderHeight() * t }, e, !1, !0, 0), this._volumetricLightScatteringRTT.wrapU = De.CLAMP_ADDRESSMODE, this._volumetricLightScatteringRTT.wrapV = De.CLAMP_ADDRESSMODE, this._volumetricLightScatteringRTT.renderList = null, this._volumetricLightScatteringRTT.renderParticles = !1, this._volumetricLightScatteringRTT.ignoreCameraViewport = !0; const r = this.getCamera(); r ? r.customRenderTargets.push(this._volumetricLightScatteringRTT) : e.customRenderTargets.push(this._volumetricLightScatteringRTT); const s = (l) => { var o; const u = l.getRenderingMesh(), h = l.getEffectiveMesh(); if (this._meshExcluded(u)) return; h._internalAbstractMeshDataInfo._isActiveIntermediate = !1; const d = l.getMaterial(); if (!d) return; const f = u.getScene(), p = f.getEngine(); p.setState(d.backFaceCulling, void 0, void 0, void 0, d.cullBackFaces); const m = u._getInstancesRenderList(l._id, !!l.getReplacementMesh()); if (m.mustReturn) return; const _ = p.getCaps().instancedArrays && (m.visibleInstances[l._id] !== null || u.hasThinInstances); if (this._isReady(l, _)) { const v = (o = h._internalAbstractMeshDataInfo._materialForRenderPass) === null || o === void 0 ? void 0 : o[p.currentRenderPassId]; let C = l._getDrawWrapper(); if (u === this.mesh && !C && (C = d._getDrawWrapper()), !C) return; const x = C.effect; if (p.enableEffect(C), _ || u._bind(l, x, d.fillMode), u === this.mesh) d.bind(h.getWorldMatrix(), u); else if (v) v.bindForSubMesh(h.getWorldMatrix(), h, l); else { if (x.setMatrix("viewProjection", f.getTransformMatrix()), d && d.needAlphaTesting()) { const b = d.getAlphaTestTexture(); x.setTexture("diffuseSampler", b), b && x.setMatrix("diffuseMatrix", b.getTextureMatrix()); } u.useBones && u.computeBonesUsingShaders && u.skeleton && x.setMatrices("mBones", u.skeleton.getTransformMatrices(u)); } _ && u.hasThinInstances && x.setMatrix("world", h.getWorldMatrix()), u._processRendering(h, l, x, At.TriangleFillMode, m, _, (b, S) => { b || x.setMatrix("world", S); }); } }; let n; const a = new Et(0, 0, 0, 1); this._volumetricLightScatteringRTT.onBeforeRenderObservable.add(() => { n = e.clearColor, e.clearColor = a; }), this._volumetricLightScatteringRTT.onAfterRenderObservable.add(() => { e.clearColor = n; }), this._volumetricLightScatteringRTT.customIsReadyFunction = (l, o, u) => { if ((u || o === 0) && l.subMeshes) for (let h = 0; h < l.subMeshes.length; ++h) { const d = l.subMeshes[h], f = d.getMaterial(), p = d.getRenderingMesh(); if (!f) continue; const m = p._getInstancesRenderList(d._id, !!d.getReplacementMesh()), _ = i.getCaps().instancedArrays && (m.visibleInstances[d._id] !== null || p.hasThinInstances); if (!this._isReady(d, _)) return !1; } return !0; }, this._volumetricLightScatteringRTT.customRenderFunction = (l, o, u, h) => { const d = e.getEngine(); let f; if (h.length) { for (d.setColorWrite(!1), f = 0; f < h.length; f++) s(h.data[f]); d.setColorWrite(!0); } for (f = 0; f < l.length; f++) s(l.data[f]); for (f = 0; f < o.length; f++) s(o.data[f]); if (u.length) { for (f = 0; f < u.length; f++) { const m = u.data[f], _ = m.getBoundingInfo(); _ && e.activeCamera && (m._alphaIndex = m.getMesh().alphaIndex, m._distanceToCamera = _.boundingSphere.centerWorld.subtract(e.activeCamera.position).length()); } const p = u.data.slice(0, u.length); for (p.sort((m, _) => m._alphaIndex > _._alphaIndex ? 1 : m._alphaIndex < _._alphaIndex ? -1 : m._distanceToCamera < _._distanceToCamera ? 1 : m._distanceToCamera > _._distanceToCamera ? -1 : 0), d.setAlphaMode(2), f = 0; f < p.length; f++) s(p[f]); d.setAlphaMode(0); } }; } _updateMeshScreenCoordinates(e) { const t = e.getTransformMatrix(); let i; this.useCustomMeshPosition ? i = this.customMeshPosition : this.attachedNode ? i = this.attachedNode.position : i = this.mesh.parent ? this.mesh.getAbsolutePosition() : this.mesh.position; const r = D.Project(i, Ae.Identity(), t, this._viewPort); this._screenCoordinates.x = r.x / this._viewPort.width, this._screenCoordinates.y = r.y / this._viewPort.height, this.invert && (this._screenCoordinates.y = 1 - this._screenCoordinates.y); } // Static methods /** * Creates a default mesh for the Volumeric Light Scattering post-process * @param name The mesh name * @param scene The scene where to create the mesh * @returns the default mesh */ static CreateDefaultMesh(e, t) { const i = hx(e, { size: 1 }, t); i.billboardMode = xr.BILLBOARDMODE_ALL; const r = new Dt(e + "Material", t); return r.emissiveColor = new ze(1, 1, 1), i.material = r, i; } } F([ oo() ], Sm.prototype, "customMeshPosition", void 0); F([ W() ], Sm.prototype, "useCustomMeshPosition", void 0); F([ W() ], Sm.prototype, "invert", void 0); F([ hw() ], Sm.prototype, "mesh", void 0); F([ W() ], Sm.prototype, "excludedMeshes", void 0); F([ W() ], Sm.prototype, "includedMeshes", void 0); F([ W() ], Sm.prototype, "exposure", void 0); F([ W() ], Sm.prototype, "decay", void 0); F([ W() ], Sm.prototype, "weight", void 0); F([ W() ], Sm.prototype, "density", void 0); Be("BABYLON.VolumetricLightScatteringPostProcess", Sm); const t4e = "screenSpaceCurvaturePixelShader", i4e = `precision highp float;varying vec2 vUV;uniform sampler2D textureSampler;uniform sampler2D normalSampler;uniform float curvature_ridge;uniform float curvature_valley; #ifndef CURVATURE_OFFSET #define CURVATURE_OFFSET 1 #endif float curvature_soft_clamp(float curvature,float control) {if (curvature<0.5/control) return curvature*(1.0-curvature*control);return 0.25/control;} float calculate_curvature(ivec2 texel,float ridge,float valley) {vec2 normal_up =texelFetch(normalSampler,texel+ivec2(0, CURVATURE_OFFSET),0).rb;vec2 normal_down =texelFetch(normalSampler,texel+ivec2(0,-CURVATURE_OFFSET),0).rb;vec2 normal_left =texelFetch(normalSampler,texel+ivec2(-CURVATURE_OFFSET,0),0).rb;vec2 normal_right=texelFetch(normalSampler,texel+ivec2( CURVATURE_OFFSET,0),0).rb;float normal_diff=((normal_up.g-normal_down.g)+(normal_right.r-normal_left.r));if (normal_diff<0.0) return -2.0*curvature_soft_clamp(-normal_diff,valley);return 2.0*curvature_soft_clamp(normal_diff,ridge);} #define CUSTOM_FRAGMENT_DEFINITIONS void main(void) {ivec2 texel=ivec2(gl_FragCoord.xy);vec4 baseColor=texture2D(textureSampler,vUV);float curvature=calculate_curvature(texel,curvature_ridge,curvature_valley);baseColor.rgb*=curvature+1.0;gl_FragColor=baseColor;}`; je.ShadersStore[t4e] = i4e; class Sw extends Bi { /** * Gets a string identifying the name of the class * @returns "ScreenSpaceCurvaturePostProcess" string */ getClassName() { return "ScreenSpaceCurvaturePostProcess"; } /** * Creates a new instance ScreenSpaceCurvaturePostProcess * @param name The name of the effect. * @param scene The scene containing the objects to blur according to their velocity. * @param options The required width/height ratio to downsize to before computing the render pass. * @param camera The camera to apply the render pass to. * @param samplingMode The sampling mode to be used when computing the pass. (default: 0) * @param engine The engine which the post process will be applied. (default: current engine) * @param reusable If the post process can be reused on the same frame. (default: false) * @param textureType Type of textures used when performing the post process. (default: 0) * @param blockCompilation If compilation of the shader should not be done in the constructor. The updateEffect method can be used to compile the shader at a later time. (default: false) */ constructor(e, t, i, r, s, n, a, l = 0, o = !1) { super(e, "screenSpaceCurvature", ["curvature_ridge", "curvature_valley"], ["textureSampler", "normalSampler"], i, r, s, n, a, void 0, l, void 0, null, o), this.ridge = 1, this.valley = 1, this._geometryBufferRenderer = t.enableGeometryBufferRenderer(), this._geometryBufferRenderer ? (this._geometryBufferRenderer.generateNormalsInWorldSpace && Ce.Error("ScreenSpaceCurvaturePostProcess does not support generateNormalsInWorldSpace=true for the geometry buffer renderer!"), this.onApply = (u) => { u.setFloat("curvature_ridge", 0.5 / Math.max(this.ridge * this.ridge, 1e-4)), u.setFloat("curvature_valley", 0.7 / Math.max(this.valley * this.valley, 1e-4)); const h = this._geometryBufferRenderer.getGBuffer().textures[1]; u.setTexture("normalSampler", h); }) : Ce.Error("Multiple Render Target support needed for screen space curvature post process. Please use IsSupported test first."); } /** * Support test. */ static get IsSupported() { const e = gi.LastCreatedEngine; return e ? e.getCaps().drawBuffersExtension : !1; } /** * @internal */ static _Parse(e, t, i, r) { return St.Parse(() => new Sw(e.name, i, e.options, t, e.renderTargetSamplingMode, i.getEngine(), e.textureType, e.reusable), e, i, r); } } F([ W() ], Sw.prototype, "ridge", void 0); F([ W() ], Sw.prototype, "valley", void 0); Be("BABYLON.ScreenSpaceCurvaturePostProcess", Sw); const r4e = "boundingBoxRendererFragmentDeclaration", s4e = `uniform vec4 color; `; je.IncludesShadersStore[r4e] = s4e; const n4e = "boundingBoxRendererUboDeclaration", a4e = `#ifdef WEBGL2 uniform vec4 color;uniform mat4 world;uniform mat4 viewProjection; #ifdef MULTIVIEW uniform mat4 viewProjectionR; #endif #else layout(std140,column_major) uniform;uniform BoundingBoxRenderer {vec4 color;mat4 world;mat4 viewProjection;mat4 viewProjectionR;}; #endif `; je.IncludesShadersStore[n4e] = a4e; const o4e = "boundingBoxRendererPixelShader", l4e = `#include<__decl__boundingBoxRendererFragment> #define CUSTOM_FRAGMENT_DEFINITIONS void main(void) { #define CUSTOM_FRAGMENT_MAIN_BEGIN gl_FragColor=color; #define CUSTOM_FRAGMENT_MAIN_END }`; je.ShadersStore[o4e] = l4e; const c4e = "boundingBoxRendererVertexDeclaration", u4e = `uniform mat4 world;uniform mat4 viewProjection; #ifdef MULTIVIEW uniform mat4 viewProjectionR; #endif `; je.IncludesShadersStore[c4e] = u4e; const h4e = "boundingBoxRendererVertexShader", d4e = `attribute vec3 position; #include<__decl__boundingBoxRendererVertex> #define CUSTOM_VERTEX_DEFINITIONS void main(void) { #define CUSTOM_VERTEX_MAIN_BEGIN vec4 worldPos=world*vec4(position,1.0); #ifdef MULTIVIEW if (gl_ViewID_OVR==0u) {gl_Position=viewProjection*worldPos;} else {gl_Position=viewProjectionR*worldPos;} #else gl_Position=viewProjection*worldPos; #endif #define CUSTOM_VERTEX_MAIN_END } `; je.ShadersStore[h4e] = d4e; Object.defineProperty(ii.prototype, "forceShowBoundingBoxes", { get: function() { return this._forceShowBoundingBoxes || !1; }, set: function(c) { this._forceShowBoundingBoxes = c, c && this.getBoundingBoxRenderer(); }, enumerable: !0, configurable: !0 }); ii.prototype.getBoundingBoxRenderer = function() { return this._boundingBoxRenderer || (this._boundingBoxRenderer = new Ine(this)), this._boundingBoxRenderer; }; Object.defineProperty(xr.prototype, "showBoundingBox", { get: function() { return this._showBoundingBox || !1; }, set: function(c) { this._showBoundingBox = c, c && this.getScene().getBoundingBoxRenderer(); }, enumerable: !0, configurable: !0 }); class Ine { /** * Instantiates a new bounding box renderer in a scene. * @param scene the scene the renderer renders in */ constructor(e) { this.name = Bt.NAME_BOUNDINGBOXRENDERER, this.frontColor = new ze(1, 1, 1), this.backColor = new ze(0.1, 0.1, 0.1), this.showBackLines = !0, this.onBeforeBoxRenderingObservable = new Fe(), this.onAfterBoxRenderingObservable = new Fe(), this.onResourcesReadyObservable = new Fe(), this.enabled = !0, this.renderList = new xc(32), this._vertexBuffers = {}, this._fillIndexBuffer = null, this._fillIndexData = null, this.scene = e, e._addComponent(this), this._uniformBufferFront = new Vi(this.scene.getEngine(), void 0, void 0, "BoundingBoxRendererFront", !this.scene.getEngine().isWebGPU), this._buildUniformLayout(this._uniformBufferFront), this._uniformBufferBack = new Vi(this.scene.getEngine(), void 0, void 0, "BoundingBoxRendererBack", !this.scene.getEngine().isWebGPU), this._buildUniformLayout(this._uniformBufferBack); } _buildUniformLayout(e) { e.addUniform("color", 4), e.addUniform("world", 16), e.addUniform("viewProjection", 16), e.addUniform("viewProjectionR", 16), e.create(); } /** * Registers the component in a given scene */ register() { this.scene._beforeEvaluateActiveMeshStage.registerStep(Bt.STEP_BEFOREEVALUATEACTIVEMESH_BOUNDINGBOXRENDERER, this, this.reset), this.scene._preActiveMeshStage.registerStep(Bt.STEP_PREACTIVEMESH_BOUNDINGBOXRENDERER, this, this._preActiveMesh), this.scene._evaluateSubMeshStage.registerStep(Bt.STEP_EVALUATESUBMESH_BOUNDINGBOXRENDERER, this, this._evaluateSubMesh), this.scene._afterRenderingGroupDrawStage.registerStep(Bt.STEP_AFTERRENDERINGGROUPDRAW_BOUNDINGBOXRENDERER, this, this.render); } _evaluateSubMesh(e, t) { if (e.showSubMeshesBoundingBox) { const i = t.getBoundingInfo(); i != null && (i.boundingBox._tag = e.renderingGroupId, this.renderList.push(i.boundingBox)); } } _preActiveMesh(e) { if (e.showBoundingBox || this.scene.forceShowBoundingBoxes) { const t = e.getBoundingInfo(); t.boundingBox._tag = e.renderingGroupId, this.renderList.push(t.boundingBox); } } _prepareResources() { if (this._colorShader) return; this._colorShader = new Lo("colorShader", this.scene, "boundingBoxRenderer", { attributes: [Y.PositionKind], uniforms: ["world", "viewProjection", "viewProjectionR", "color"], uniformBuffers: ["BoundingBoxRenderer"] }, !1), this._colorShader.doNotSerialize = !0, this._colorShader.reservedDataStore = { hidden: !0 }, this._colorShaderForOcclusionQuery = new Lo("colorShaderOccQuery", this.scene, "boundingBoxRenderer", { attributes: [Y.PositionKind], uniforms: ["world", "viewProjection", "viewProjectionR", "color"], uniformBuffers: ["BoundingBoxRenderer"] }, !0), this._colorShaderForOcclusionQuery.doNotSerialize = !0, this._colorShaderForOcclusionQuery.reservedDataStore = { hidden: !0 }; const e = this.scene.getEngine(), t = cU({ size: 1 }); this._vertexBuffers[Y.PositionKind] = new Y(e, t.positions, Y.PositionKind, !1), this._createIndexBuffer(), this._fillIndexData = t.indices, this.onResourcesReadyObservable.notifyObservers(this); } _createIndexBuffer() { const e = this.scene.getEngine(); this._indexBuffer = e.createIndexBuffer([0, 1, 1, 2, 2, 3, 3, 0, 4, 5, 5, 6, 6, 7, 7, 4, 0, 7, 1, 6, 2, 5, 3, 4]); } /** * Rebuilds the elements related to this component in case of * context lost for instance. */ rebuild() { const e = this._vertexBuffers[Y.PositionKind]; e && e._rebuild(), this._createIndexBuffer(); } /** * @internal */ reset() { this.renderList.reset(); } /** * Render the bounding boxes of a specific rendering group * @param renderingGroupId defines the rendering group to render */ render(e) { var t, i; if (this.renderList.length === 0 || !this.enabled || (this._prepareResources(), !this._colorShader.isReady())) return; const r = this.scene.getEngine(); r.setDepthWrite(!1); const s = this.scene.getTransformMatrix(); for (let n = 0; n < this.renderList.length; n++) { const a = this.renderList.data[n]; if (a._tag !== e) continue; this._createWrappersForBoundingBox(a), this.onBeforeBoxRenderingObservable.notifyObservers(a); const l = a.minimum, u = a.maximum.subtract(l), h = l.add(u.scale(0.5)), d = Ae.Scaling(u.x, u.y, u.z).multiply(Ae.Translation(h.x, h.y, h.z)).multiply(a.getWorldMatrix()), f = r.useReverseDepthBuffer; if (this.showBackLines) { const m = (t = a._drawWrapperBack) !== null && t !== void 0 ? t : this._colorShader._getDrawWrapper(); this._colorShader._preBind(m), r.bindBuffers(this._vertexBuffers, this._indexBuffer, this._colorShader.getEffect()), f ? r.setDepthFunctionToLessOrEqual() : r.setDepthFunctionToGreaterOrEqual(), this._uniformBufferBack.bindToEffect(m.effect, "BoundingBoxRenderer"), this._uniformBufferBack.updateColor4("color", this.backColor, 1), this._uniformBufferBack.updateMatrix("world", d), this._uniformBufferBack.updateMatrix("viewProjection", s), this._uniformBufferBack.update(), r.drawElementsType(At.LineListDrawMode, 0, 24); } const p = (i = a._drawWrapperFront) !== null && i !== void 0 ? i : this._colorShader._getDrawWrapper(); this._colorShader._preBind(p), r.bindBuffers(this._vertexBuffers, this._indexBuffer, this._colorShader.getEffect()), f ? r.setDepthFunctionToGreater() : r.setDepthFunctionToLess(), this._uniformBufferFront.bindToEffect(p.effect, "BoundingBoxRenderer"), this._uniformBufferFront.updateColor4("color", this.frontColor, 1), this._uniformBufferFront.updateMatrix("world", d), this._uniformBufferFront.updateMatrix("viewProjection", s), this._uniformBufferFront.update(), r.drawElementsType(At.LineListDrawMode, 0, 24), this.onAfterBoxRenderingObservable.notifyObservers(a); } this._colorShader.unbind(), r.setDepthFunctionToLessOrEqual(), r.setDepthWrite(!0); } _createWrappersForBoundingBox(e) { if (!e._drawWrapperFront) { const t = this.scene.getEngine(); e._drawWrapperFront = new $o(t), e._drawWrapperBack = new $o(t), e._drawWrapperFront.setEffect(this._colorShader.getEffect()), e._drawWrapperBack.setEffect(this._colorShader.getEffect()); } } /** * In case of occlusion queries, we can render the occlusion bounding box through this method * @param mesh Define the mesh to render the occlusion bounding box for */ renderOcclusionBoundingBox(e) { const t = this.scene.getEngine(); this._renderPassIdForOcclusionQuery === void 0 && (this._renderPassIdForOcclusionQuery = t.createRenderPassId("Render pass for occlusion query")); const i = t.currentRenderPassId; t.currentRenderPassId = this._renderPassIdForOcclusionQuery, this._prepareResources(); const r = e.subMeshes[0]; if (!this._colorShaderForOcclusionQuery.isReady(e, void 0, r) || !e.hasBoundingInfo) { t.currentRenderPassId = i; return; } this._fillIndexBuffer || (this._fillIndexBuffer = t.createIndexBuffer(this._fillIndexData)); const s = t.useReverseDepthBuffer; t.setDepthWrite(!1), t.setColorWrite(!1); const n = e.getBoundingInfo().boundingBox, a = n.minimum, o = n.maximum.subtract(a), u = a.add(o.scale(0.5)), h = Ae.Scaling(o.x, o.y, o.z).multiply(Ae.Translation(u.x, u.y, u.z)).multiply(n.getWorldMatrix()), d = r._drawWrapper; this._colorShaderForOcclusionQuery._preBind(d), t.bindBuffers(this._vertexBuffers, this._fillIndexBuffer, d.effect), s ? t.setDepthFunctionToGreater() : t.setDepthFunctionToLess(), this.scene.resetCachedMaterial(), this._uniformBufferFront.bindToEffect(d.effect, "BoundingBoxRenderer"), this._uniformBufferFront.updateMatrix("world", h), this._uniformBufferFront.updateMatrix("viewProjection", this.scene.getTransformMatrix()), this._uniformBufferFront.update(), t.drawElementsType(At.TriangleFillMode, 0, 36), this._colorShaderForOcclusionQuery.unbind(), t.setDepthFunctionToLessOrEqual(), t.setDepthWrite(!0), t.setColorWrite(!0), t.currentRenderPassId = i; } /** * Dispose and release the resources attached to this renderer. */ dispose() { if (this._renderPassIdForOcclusionQuery !== void 0 && (this.scene.getEngine().releaseRenderPassId(this._renderPassIdForOcclusionQuery), this._renderPassIdForOcclusionQuery = void 0), !this._colorShader) return; this.onBeforeBoxRenderingObservable.clear(), this.onAfterBoxRenderingObservable.clear(), this.onResourcesReadyObservable.clear(), this.renderList.dispose(), this._colorShader.dispose(), this._colorShaderForOcclusionQuery.dispose(), this._uniformBufferFront.dispose(), this._uniformBufferBack.dispose(); const e = this._vertexBuffers[Y.PositionKind]; e && (e.dispose(), this._vertexBuffers[Y.PositionKind] = null), this.scene.getEngine()._releaseBuffer(this._indexBuffer), this._fillIndexBuffer && (this.scene.getEngine()._releaseBuffer(this._fillIndexBuffer), this._fillIndexBuffer = null); } } ii.prototype.enableDepthRenderer = function(c, e = !1, t = !1, i = 3, r = !1) { if (c = c || this.activeCamera, !c) throw "No camera available to enable depth renderer"; if (this._depthRenderer || (this._depthRenderer = {}), !this._depthRenderer[c.id]) { const s = !!this.getEngine().getCaps().textureFloatRender; let n = 0; this.getEngine().getCaps().textureHalfFloatRender && (!t || !s) ? n = 2 : s ? n = 1 : n = 0, this._depthRenderer[c.id] = new T5(this, n, c, e, i, r); } return this._depthRenderer[c.id]; }; ii.prototype.disableDepthRenderer = function(c) { c = c || this.activeCamera, !(!c || !this._depthRenderer || !this._depthRenderer[c.id]) && this._depthRenderer[c.id].dispose(); }; class Dne { /** * Creates a new instance of the component for the given scene * @param scene Defines the scene to register the component in */ constructor(e) { this.name = Bt.NAME_DEPTHRENDERER, this.scene = e; } /** * Registers the component in a given scene */ register() { this.scene._gatherRenderTargetsStage.registerStep(Bt.STEP_GATHERRENDERTARGETS_DEPTHRENDERER, this, this._gatherRenderTargets), this.scene._gatherActiveCameraRenderTargetsStage.registerStep(Bt.STEP_GATHERACTIVECAMERARENDERTARGETS_DEPTHRENDERER, this, this._gatherActiveCameraRenderTargets); } /** * Rebuilds the elements related to this component in case of * context lost for instance. */ rebuild() { } /** * Disposes the component and the associated resources */ dispose() { for (const e in this.scene._depthRenderer) this.scene._depthRenderer[e].dispose(); } _gatherRenderTargets(e) { if (this.scene._depthRenderer) for (const t in this.scene._depthRenderer) { const i = this.scene._depthRenderer[t]; i.enabled && !i.useOnlyInActiveCamera && e.push(i.getDepthMap()); } } _gatherActiveCameraRenderTargets(e) { if (this.scene._depthRenderer) for (const t in this.scene._depthRenderer) { const i = this.scene._depthRenderer[t]; i.enabled && i.useOnlyInActiveCamera && this.scene.activeCamera.id === t && e.push(i.getDepthMap()); } } } T5._SceneComponentInitialization = (c) => { let e = c._getComponent(Bt.NAME_DEPTHRENDERER); e || (e = new Dne(c), c._addComponent(e)); }; const f4e = "oitFinalPixelShader", p4e = `precision highp float;uniform sampler2D uFrontColor;uniform sampler2D uBackColor;void main() {ivec2 fragCoord=ivec2(gl_FragCoord.xy);vec4 frontColor=texelFetch(uFrontColor,fragCoord,0);vec4 backColor=texelFetch(uBackColor,fragCoord,0);float alphaMultiplier=1.0-frontColor.a;glFragColor=vec4( frontColor.rgb+alphaMultiplier*backColor.rgb, frontColor.a+backColor.a );}`; je.ShadersStore[f4e] = p4e; const _4e = "oitBackBlendPixelShader", m4e = `precision highp float;uniform sampler2D uBackColor;void main() {glFragColor=texelFetch(uBackColor,ivec2(gl_FragCoord.xy),0);if (glFragColor.a==0.0) { discard;}}`; je.ShadersStore[_4e] = m4e; class g4e { constructor() { this.enabled = !0, this.name = "depthPeeling", this.texturesRequired = [4]; } } class XC { /** * Number of depth peeling passes. As we are using dual depth peeling, each pass two levels of transparency are processed. */ get passCount() { return this._passCount; } set passCount(e) { this._passCount !== e && (this._passCount = e, this._createRenderPassIds()); } /** * Instructs the renderer to use render passes. It is an optimization that makes the rendering faster for some engines (like WebGPU) but that consumes more memory, so it is disabled by default. */ get useRenderPasses() { return this._useRenderPasses; } set useRenderPasses(e) { this._useRenderPasses !== e && (this._useRenderPasses = e, this._createRenderPassIds()); } /** * Add a mesh in the exclusion list to prevent it to be handled by the depth peeling renderer * @param mesh The mesh to exclude from the depth peeling renderer */ addExcludedMesh(e) { this._excludedMeshes.indexOf(e.uniqueId) === -1 && this._excludedMeshes.push(e.uniqueId); } /** * Remove a mesh from the exclusion list of the depth peeling renderer * @param mesh The mesh to remove */ removeExcludedMesh(e) { const t = this._excludedMeshes.indexOf(e.uniqueId); t !== -1 && this._excludedMeshes.splice(t, 1); } /** * Instanciates the depth peeling renderer * @param scene Scene to attach to * @param passCount Number of depth layers to peel * @returns The depth peeling renderer */ constructor(e, t = 5) { if (this._thinTextures = [], this._currentPingPongState = 0, this._layoutCacheFormat = [[!0], [!0, !0], [!0, !0, !0]], this._layoutCache = [], this._candidateSubMeshes = new xc(10), this._excludedSubMeshes = new xc(10), this._excludedMeshes = [], this._colorCache = [ new Et(XC._DEPTH_CLEAR_VALUE, XC._DEPTH_CLEAR_VALUE, 0, 0), new Et(-XC._MIN_DEPTH, XC._MAX_DEPTH, 0, 0), new Et(0, 0, 0, 0) ], this._scene = e, this._engine = e.getEngine(), this._passCount = t, !e.enablePrePassRenderer()) { Ce.Warn("Depth peeling for order independant transparency could not enable PrePass, aborting."); return; } for (let i = 0; i < this._layoutCacheFormat.length; ++i) this._layoutCache[i] = this._engine.buildTextureLayout(this._layoutCacheFormat[i]); this._renderPassIds = [], this.useRenderPasses = !1, this._prePassEffectConfiguration = new g4e(), this._createTextures(), this._createEffects(); } _createRenderPassIds() { if (this._releaseRenderPassIds(), this._useRenderPasses) for (let e = 0; e < this._passCount + 1; ++e) this._renderPassIds[e] || (this._renderPassIds[e] = this._engine.createRenderPassId(`DepthPeelingRenderer - pass #${e}`)); } _releaseRenderPassIds() { for (let e = 0; e < this._renderPassIds.length; ++e) this._engine.releaseRenderPassId(this._renderPassIds[e]); this._renderPassIds = []; } _createTextures() { const e = { width: this._engine.getRenderWidth(), height: this._engine.getRenderHeight() }; this._depthMrts = [ new $8("depthPeelingDepth0MRT", e, 3, this._scene, void 0, [ "depthPeelingDepth0MRT_depth", "depthPeelingDepth0MRT_frontColor", "depthPeelingDepth0MRT_backColor" ]), new $8("depthPeelingDepth1MRT", e, 3, this._scene, void 0, [ "depthPeelingDepth1MRT_depth", "depthPeelingDepth1MRT_frontColor", "depthPeelingDepth1MRT_backColor" ]) ], this._colorMrts = [ new $8("depthPeelingColor0MRT", e, 2, this._scene, { generateDepthBuffer: !1 }, [ "depthPeelingColor0MRT_frontColor", "depthPeelingColor0MRT_backColor" ]), new $8("depthPeelingColor1MRT", e, 2, this._scene, { generateDepthBuffer: !1 }, [ "depthPeelingColor1MRT_frontColor", "depthPeelingColor1MRT_backColor" ]) ], this._blendBackMrt = new $8("depthPeelingBackMRT", e, 1, this._scene, { generateDepthBuffer: !1 }, ["depthPeelingBackMRT_blendBack"]), this._outputRT = new ra("depthPeelingOutputRTT", e, this._scene, !1); const t = [ { format: 7, samplingMode: 1, type: this._engine.getCaps().textureFloatLinearFiltering ? 1 : 2, label: "DepthPeelingRenderer-DepthTexture" }, { format: 5, samplingMode: 1, type: 2, label: "DepthPeelingRenderer-ColorTexture" } ]; for (let i = 0; i < 2; i++) { const r = this._engine._createInternalTexture(e, t[0], !1), s = this._engine._createInternalTexture(e, t[1], !1), n = this._engine._createInternalTexture(e, t[1], !1); this._depthMrts[i].setInternalTexture(r, 0), this._depthMrts[i].setInternalTexture(s, 1), this._depthMrts[i].setInternalTexture(n, 2), this._colorMrts[i].setInternalTexture(s, 0), this._colorMrts[i].setInternalTexture(n, 1), this._thinTextures.push(new rT(r), new rT(s), new rT(n)); } } // TODO : explore again MSAA with depth peeling when // we are able to fetch individual samples in a multisampled renderbuffer // public set samples(value: number) { // for (let i = 0; i < 2; i++) { // this._depthMrts[i].samples = value; // this._colorMrts[i].samples = value; // } // this._scene.prePassRenderer!.samples = value; // } _disposeTextures() { for (let e = 0; e < this._thinTextures.length; e++) e !== 6 && this._thinTextures[e].dispose(); for (let e = 0; e < 2; e++) this._depthMrts[e].dispose(!0), this._colorMrts[e].dispose(!0), this._blendBackMrt.dispose(!0); this._outputRT.dispose(), this._thinTextures = [], this._colorMrts = [], this._depthMrts = []; } _updateTextures() { return (this._depthMrts[0].getSize().width !== this._engine.getRenderWidth() || this._depthMrts[0].getSize().height !== this._engine.getRenderHeight()) && (this._disposeTextures(), this._createTextures()), this._updateTextureReferences(); } _updateTextureReferences() { var e; const t = this._scene.prePassRenderer; if (!t) return !1; const i = t.getIndex(4), r = !((e = t.defaultRT.textures) === null || e === void 0) && e.length ? t.defaultRT.textures[i].getInternalTexture() : null; return r ? (this._blendBackTexture !== r && (this._blendBackTexture = r, this._blendBackMrt.setInternalTexture(this._blendBackTexture, 0), this._thinTextures[6] && this._thinTextures[6].dispose(), this._thinTextures[6] = new rT(this._blendBackTexture), t.defaultRT.renderTarget._shareDepth(this._depthMrts[0].renderTarget)), !0) : !1; } _createEffects() { this._blendBackEffectWrapper = new t6({ fragmentShader: "oitBackBlend", useShaderStore: !0, engine: this._engine, samplerNames: ["uBackColor"], uniformNames: [] }), this._blendBackEffectWrapperPingPong = new t6({ fragmentShader: "oitBackBlend", useShaderStore: !0, engine: this._engine, samplerNames: ["uBackColor"], uniformNames: [] }), this._finalEffectWrapper = new t6({ fragmentShader: "oitFinal", useShaderStore: !0, engine: this._engine, samplerNames: ["uFrontColor", "uBackColor"], uniformNames: [] }), this._effectRenderer = new vw(this._engine); } /** * Links to the prepass renderer * @param prePassRenderer The scene PrePassRenderer */ setPrePassRenderer(e) { e.addEffectConfiguration(this._prePassEffectConfiguration); } /** * Binds depth peeling textures on an effect * @param effect The effect to bind textures on */ bind(e) { e.setTexture("oitDepthSampler", this._thinTextures[this._currentPingPongState * 3]), e.setTexture("oitFrontColorSampler", this._thinTextures[this._currentPingPongState * 3 + 1]); } _renderSubMeshes(e) { let t; this._useRenderPasses && (t = {}); for (let i = 0; i < e.length; i++) { const r = e.data[i].getMaterial(); let s = !0, n = !1; const a = e.data[i]; let l, o = !1; if (this._useRenderPasses && (l = a._getDrawWrapper(), o = !l), r && (s = r.allowShaderHotSwapping, n = r.backFaceCulling, r.allowShaderHotSwapping = !1, r.backFaceCulling = !1), a.render(!1), o && (l = a._getDrawWrapper(), l.materialContext)) { let u = t[l.materialContext.uniqueId]; u || (u = t[l.materialContext.uniqueId] = this._engine.createMaterialContext()), a._getDrawWrapper().materialContext = u; } r && (r.allowShaderHotSwapping = s, r.backFaceCulling = n); } } _finalCompose(e) { var t; ((t = this._scene.prePassRenderer) === null || t === void 0 ? void 0 : t.setCustomOutput(this._outputRT)) ? this._engine.bindFramebuffer(this._outputRT.renderTarget) : this._engine.restoreDefaultFramebuffer(), this._engine.setAlphaMode(0), this._engine.applyStates(), this._engine.enableEffect(this._finalEffectWrapper._drawWrapper), this._finalEffectWrapper.effect.setTexture("uFrontColor", this._thinTextures[e * 3 + 1]), this._finalEffectWrapper.effect.setTexture("uBackColor", this._thinTextures[6]), this._effectRenderer.render(this._finalEffectWrapper); } /** * Checks if the depth peeling renderer is ready to render transparent meshes * @returns true if the depth peeling renderer is ready to render the transparent meshes */ isReady() { return this._blendBackEffectWrapper.effect.isReady() && this._blendBackEffectWrapperPingPong.effect.isReady() && this._finalEffectWrapper.effect.isReady() && this._updateTextures(); } /** * Renders transparent submeshes with depth peeling * @param transparentSubMeshes List of transparent meshes to render * @returns The array of submeshes that could not be handled by this renderer */ render(e) { if (this._candidateSubMeshes.length = 0, this._excludedSubMeshes.length = 0, !this.isReady()) return this._excludedSubMeshes; this._scene.activeCamera && this._engine.setViewport(this._scene.activeCamera.viewport); for (let s = 0; s < e.length; s++) { const n = e.data[s], a = n.getMaterial(), l = a && n.getRenderingMesh()._getRenderingFillMode(a.fillMode); a && (l === At.TriangleFanDrawMode || l === At.TriangleFillMode || l === At.TriangleStripDrawMode) && this._excludedMeshes.indexOf(n.getMesh().uniqueId) === -1 ? this._candidateSubMeshes.push(n) : this._excludedSubMeshes.push(n); } if (!this._candidateSubMeshes.length) return this._engine.bindFramebuffer(this._colorMrts[1].renderTarget), this._engine.bindAttachments(this._layoutCache[1]), this._engine.clear(this._colorCache[2], !0, !1, !1), this._engine.unBindFramebuffer(this._colorMrts[1].renderTarget), this._finalCompose(1), this._excludedSubMeshes; const t = this._engine.currentRenderPassId; this._scene.prePassRenderer._enabled = !1, this._useRenderPasses && (this._engine.currentRenderPassId = this._renderPassIds[0]), this._engine.bindFramebuffer(this._depthMrts[0].renderTarget), this._engine.bindAttachments(this._layoutCache[0]), this._engine.clear(this._colorCache[0], !0, !1, !1), this._engine.unBindFramebuffer(this._depthMrts[0].renderTarget), this._engine.bindFramebuffer(this._depthMrts[1].renderTarget), this._engine.bindAttachments(this._layoutCache[0]), this._engine.clear(this._colorCache[1], !0, !1, !1), this._engine.unBindFramebuffer(this._depthMrts[1].renderTarget), this._engine.bindFramebuffer(this._colorMrts[0].renderTarget), this._engine.bindAttachments(this._layoutCache[1]), this._engine.clear(this._colorCache[2], !0, !1, !1), this._engine.unBindFramebuffer(this._colorMrts[0].renderTarget), this._engine.bindFramebuffer(this._colorMrts[1].renderTarget), this._engine.bindAttachments(this._layoutCache[1]), this._engine.clear(this._colorCache[2], !0, !1, !1), this._engine.unBindFramebuffer(this._colorMrts[1].renderTarget), this._engine.bindFramebuffer(this._depthMrts[0].renderTarget), this._engine.bindAttachments(this._layoutCache[0]), this._engine.setAlphaMode(11), this._engine.setAlphaEquation(3), this._engine.depthCullingState.depthMask = !1, this._engine.depthCullingState.depthTest = !0, this._engine.applyStates(), this._currentPingPongState = 1, this._renderSubMeshes(this._candidateSubMeshes), this._engine.unBindFramebuffer(this._depthMrts[0].renderTarget), this._scene.resetCachedMaterial(); let i = 0, r = 0; for (let s = 0; s < this._passCount; s++) { i = s % 2, r = 1 - i, this._currentPingPongState = i, this._useRenderPasses && (this._engine.currentRenderPassId = this._renderPassIds[s + 1]), this._scene.activeCamera && this._engine.setViewport(this._scene.activeCamera.viewport), this._engine.bindFramebuffer(this._depthMrts[r].renderTarget), this._engine.bindAttachments(this._layoutCache[0]), this._engine.clear(this._colorCache[0], !0, !1, !1), this._engine.unBindFramebuffer(this._depthMrts[r].renderTarget), this._engine.bindFramebuffer(this._colorMrts[r].renderTarget), this._engine.bindAttachments(this._layoutCache[1]), this._engine.clear(this._colorCache[2], !0, !1, !1), this._engine.unBindFramebuffer(this._colorMrts[r].renderTarget), this._engine.bindFramebuffer(this._depthMrts[r].renderTarget), this._engine.bindAttachments(this._layoutCache[2]), this._engine.setAlphaMode(11), this._engine.setAlphaEquation(3), this._engine.depthCullingState.depthTest = !1, this._engine.applyStates(), this._renderSubMeshes(this._candidateSubMeshes), this._engine.unBindFramebuffer(this._depthMrts[r].renderTarget), this._scene.resetCachedMaterial(), this._engine.bindFramebuffer(this._blendBackMrt.renderTarget), this._engine.bindAttachments(this._layoutCache[0]), this._engine.setAlphaEquation(0), this._engine.setAlphaMode(17), this._engine.applyStates(); const n = r === 0 || !this._useRenderPasses ? this._blendBackEffectWrapper : this._blendBackEffectWrapperPingPong; this._engine.enableEffect(n._drawWrapper), n.effect.setTexture("uBackColor", this._thinTextures[r * 3 + 2]), this._effectRenderer.render(n), this._engine.unBindFramebuffer(this._blendBackMrt.renderTarget); } return this._engine.currentRenderPassId = t, this._finalCompose(r), this._scene.prePassRenderer._enabled = !0, this._engine.depthCullingState.depthMask = !0, this._engine.depthCullingState.depthTest = !0, this._excludedSubMeshes; } /** * Disposes the depth peeling renderer and associated ressources */ dispose() { this._disposeTextures(), this._blendBackEffectWrapper.dispose(), this._finalEffectWrapper.dispose(), this._effectRenderer.dispose(), this._releaseRenderPassIds(); } } XC._DEPTH_CLEAR_VALUE = -99999; XC._MIN_DEPTH = 0; XC._MAX_DEPTH = 1; Object.defineProperty(ii.prototype, "depthPeelingRenderer", { get: function() { if (!this._depthPeelingRenderer) { let c = this._getComponent(Bt.NAME_DEPTHPEELINGRENDERER); c || (c = new One(this), this._addComponent(c)); } return this._depthPeelingRenderer; }, set: function(c) { this._depthPeelingRenderer = c; }, enumerable: !0, configurable: !0 }); Object.defineProperty(ii.prototype, "useOrderIndependentTransparency", { get: function() { return this._useOrderIndependentTransparency; }, set: function(c) { var e; this._useOrderIndependentTransparency !== c && (this._useOrderIndependentTransparency = c, this.markAllMaterialsAsDirty(63), (e = this.prePassRenderer) === null || e === void 0 || e.markAsDirty()); }, enumerable: !0, configurable: !0 }); class One { /** * Creates a new instance of the component for the given scene * @param scene Defines the scene to register the component in */ constructor(e) { this.name = Bt.NAME_DEPTHPEELINGRENDERER, this.scene = e, e.depthPeelingRenderer = new XC(e); } /** * Registers the component in a given scene */ register() { } /** * Rebuilds the elements related to this component in case of * context lost for instance. */ rebuild() { } /** * Disposes the component and the associated resources. */ dispose() { var e; (e = this.scene.depthPeelingRenderer) === null || e === void 0 || e.dispose(), this.scene.depthPeelingRenderer = null; } } const v4e = "linePixelShader", A4e = `#include uniform vec4 color; #ifdef LOGARITHMICDEPTH #extension GL_EXT_frag_depth : enable #endif #include #define CUSTOM_FRAGMENT_DEFINITIONS void main(void) { #define CUSTOM_FRAGMENT_MAIN_BEGIN #include #include gl_FragColor=color; #define CUSTOM_FRAGMENT_MAIN_END }`; je.ShadersStore[v4e] = A4e; const y4e = "lineVertexShader", C4e = `#include #include attribute vec3 position;attribute vec4 normal;uniform mat4 viewProjection;uniform float width;uniform float aspectRatio; #include #define CUSTOM_VERTEX_DEFINITIONS void main(void) { #define CUSTOM_VERTEX_MAIN_BEGIN #include mat4 worldViewProjection=viewProjection*finalWorld;vec4 viewPosition=worldViewProjection*vec4(position,1.0);vec4 viewPositionNext=worldViewProjection*vec4(normal.xyz,1.0);vec2 currentScreen=viewPosition.xy/viewPosition.w;vec2 nextScreen=viewPositionNext.xy/viewPositionNext.w;currentScreen.x*=aspectRatio;nextScreen.x*=aspectRatio;vec2 dir=normalize(nextScreen-currentScreen);vec2 normalDir=vec2(-dir.y,dir.x);normalDir*=width/2.0;normalDir.x/=aspectRatio;vec4 offset=vec4(normalDir*normal.w,0.0,0.0);gl_Position=viewPosition+offset; #if defined(CLIPPLANE) || defined(CLIPPLANE2) || defined(CLIPPLANE3) || defined(CLIPPLANE4) || defined(CLIPPLANE5) || defined(CLIPPLANE6) vec4 worldPos=finalWorld*vec4(position,1.0); #include #endif #include #define CUSTOM_VERTEX_MAIN_END }`; je.ShadersStore[y4e] = C4e; xr.prototype.disableEdgesRendering = function() { return this._edgesRenderer && (this._edgesRenderer.dispose(), this._edgesRenderer = null), this; }; xr.prototype.enableEdgesRendering = function(c = 0.95, e = !1, t) { return this.disableEdgesRendering(), this._edgesRenderer = new bN(this, c, e, !0, t), this; }; Object.defineProperty(xr.prototype, "edgesRenderer", { get: function() { return this._edgesRenderer; }, enumerable: !0, configurable: !0 }); Ag.prototype.enableEdgesRendering = function(c = 0.95, e = !1) { return this.disableEdgesRendering(), this._edgesRenderer = new wne(this, c, e), this; }; XK.prototype.enableEdgesRendering = function(c = 0.95, e = !1) { return Ag.prototype.enableEdgesRendering.apply(this, arguments), this; }; class x4e { constructor() { this.edges = [], this.edgesConnectedCount = 0; } } class bN { /** Gets the vertices generated by the edge renderer */ get linesPositions() { return this._linesPositions; } /** Gets the normals generated by the edge renderer */ get linesNormals() { return this._linesNormals; } /** Gets the indices generated by the edge renderer */ get linesIndices() { return this._linesIndices; } /** * Gets or sets the shader used to draw the lines */ get lineShader() { return this._lineShader; } set lineShader(e) { this._lineShader = e; } static _GetShader(e) { if (!e._edgeRenderLineShader) { const t = new Lo("lineShader", e, "line", { attributes: ["position", "normal"], uniforms: ["world", "viewProjection", "color", "width", "aspectRatio"] }, !1); t.disableDepthWrite = !0, t.backFaceCulling = !1, t.checkReadyOnEveryCall = e.getEngine().isWebGPU, e._edgeRenderLineShader = t; } return e._edgeRenderLineShader; } /** * Creates an instance of the EdgesRenderer. It is primarily use to display edges of a mesh. * Beware when you use this class with complex objects as the adjacencies computation can be really long * @param source Mesh used to create edges * @param epsilon sum of angles in adjacency to check for edge * @param checkVerticesInsteadOfIndices bases the edges detection on vertices vs indices. Note that this parameter is not used if options.useAlternateEdgeFinder = true * @param generateEdgesLines - should generate Lines or only prepare resources. * @param options The options to apply when generating the edges */ constructor(e, t = 0.95, i = !1, r = !0, s) { var n; this.edgesWidthScalerForOrthographic = 1e3, this.edgesWidthScalerForPerspective = 50, this._linesPositions = new Array(), this._linesNormals = new Array(), this._linesIndices = new Array(), this._buffers = {}, this._buffersForInstances = {}, this._checkVerticesInsteadOfIndices = !1, this.isEnabled = !0, this.customInstances = new xc(32), this._source = e, this._checkVerticesInsteadOfIndices = i, this._options = s ?? null, this._epsilon = t, this._source.getScene().getEngine().isWebGPU && (this._drawWrapper = new $o(e.getEngine())), this._prepareRessources(), r && (!((n = s == null ? void 0 : s.useAlternateEdgeFinder) !== null && n !== void 0) || n ? this._generateEdgesLinesAlternate() : this._generateEdgesLines()), this._meshRebuildObserver = this._source.onRebuildObservable.add(() => { this._rebuild(); }), this._meshDisposeObserver = this._source.onDisposeObservable.add(() => { this.dispose(); }); } _prepareRessources() { this._lineShader || (this._lineShader = bN._GetShader(this._source.getScene())); } /** @internal */ _rebuild() { let e = this._buffers[Y.PositionKind]; e && e._rebuild(), e = this._buffers[Y.NormalKind], e && e._rebuild(); const i = this._source.getScene().getEngine(); this._ib = i.createIndexBuffer(this._linesIndices); } /** * Releases the required resources for the edges renderer */ dispose() { var e; this._source.onRebuildObservable.remove(this._meshRebuildObserver), this._source.onDisposeObservable.remove(this._meshDisposeObserver); let t = this._buffers[Y.PositionKind]; t && (t.dispose(), this._buffers[Y.PositionKind] = null), t = this._buffers[Y.NormalKind], t && (t.dispose(), this._buffers[Y.NormalKind] = null), this._ib && this._source.getScene().getEngine()._releaseBuffer(this._ib), this._lineShader.dispose(), (e = this._drawWrapper) === null || e === void 0 || e.dispose(); } _processEdgeForAdjacencies(e, t, i, r, s) { return e === i && t === r || e === r && t === i ? 0 : e === r && t === s || e === s && t === r ? 1 : e === s && t === i || e === i && t === s ? 2 : -1; } _processEdgeForAdjacenciesWithVertices(e, t, i, r, s) { return e.equalsWithEpsilon(i, 1e-10) && t.equalsWithEpsilon(r, 1e-10) || e.equalsWithEpsilon(r, 1e-10) && t.equalsWithEpsilon(i, 1e-10) ? 0 : e.equalsWithEpsilon(r, 1e-10) && t.equalsWithEpsilon(s, 1e-10) || e.equalsWithEpsilon(s, 1e-10) && t.equalsWithEpsilon(r, 1e-10) ? 1 : e.equalsWithEpsilon(s, 1e-10) && t.equalsWithEpsilon(i, 1e-10) || e.equalsWithEpsilon(i, 1e-10) && t.equalsWithEpsilon(s, 1e-10) ? 2 : -1; } /** * Checks if the pair of p0 and p1 is en edge * @param faceIndex * @param edge * @param faceNormals * @param p0 * @param p1 * @private */ _checkEdge(e, t, i, r, s) { let n; t === void 0 ? n = !0 : n = D.Dot(i[e], i[t]) < this._epsilon, n && this.createLine(r, s, this._linesPositions.length / 3); } /** * push line into the position, normal and index buffer * @param p0 * @param p1 * @param offset * @protected */ // eslint-disable-next-line @typescript-eslint/naming-convention createLine(e, t, i) { this._linesPositions.push(e.x, e.y, e.z, e.x, e.y, e.z, t.x, t.y, t.z, t.x, t.y, t.z), this._linesNormals.push(t.x, t.y, t.z, -1, t.x, t.y, t.z, 1, e.x, e.y, e.z, -1, e.x, e.y, e.z, 1), this._linesIndices.push(i, i + 1, i + 2, i, i + 2, i + 3); } /** * See https://playground.babylonjs.com/#R3JR6V#1 for a visual display of the algorithm * @param edgePoints * @param indexTriangle * @param indices * @param remapVertexIndices */ _tessellateTriangle(e, t, i, r) { const s = (w, V, k) => { k >= 0 && V.push(k); for (let L = 0; L < w.length; ++L) V.push(w[L][0]); }; let n = 0; e[1].length >= e[0].length && e[1].length >= e[2].length ? n = 1 : e[2].length >= e[0].length && e[2].length >= e[1].length && (n = 2); for (let w = 0; w < 3; ++w) w === n ? e[w].sort((V, k) => V[1] < k[1] ? -1 : V[1] > k[1] ? 1 : 0) : e[w].sort((V, k) => V[1] > k[1] ? -1 : V[1] < k[1] ? 1 : 0); const a = [], l = []; s(e[n], a, -1); const o = a.length; for (let w = n + 2; w >= n + 1; --w) s(e[w % 3], l, w !== n + 2 ? r[i[t + (w + 1) % 3]] : -1); const u = l.length, h = 0, d = 0; i.push(r[i[t + n]], a[0], l[0]), i.push(r[i[t + (n + 1) % 3]], l[u - 1], a[o - 1]); const f = o <= u, p = f ? o : u, m = f ? u : o, _ = f ? o - 1 : u - 1, v = f ? 0 : 1; let C = o + u - 2, x = f ? h : d, b = f ? d : h; const S = f ? a : l, M = f ? l : a; let R = 0; for (; C-- > 0; ) { v ? i.push(S[x], M[b]) : i.push(M[b], S[x]), R += p; let w; R >= m && x < _ ? (w = S[++x], R -= m) : w = M[++b], i.push(w); } i[t + 0] = i[i.length - 3], i[t + 1] = i[i.length - 2], i[t + 2] = i[i.length - 1], i.length = i.length - 3; } _generateEdgesLinesAlternate() { var e, t, i, r, s, n, a, l, o, u; const h = this._source.getVerticesData(Y.PositionKind); let d = this._source.getIndices(); if (!d || !h) return; Array.isArray(d) || (d = Array.from(d)); const f = (t = (e = this._options) === null || e === void 0 ? void 0 : e.useFastVertexMerger) !== null && t !== void 0 ? t : !0, p = f ? Math.round(-Math.log((r = (i = this._options) === null || i === void 0 ? void 0 : i.epsilonVertexMerge) !== null && r !== void 0 ? r : 1e-6) / Math.log(10)) : (n = (s = this._options) === null || s === void 0 ? void 0 : s.epsilonVertexMerge) !== null && n !== void 0 ? n : 1e-6, m = [], _ = []; if (f) { const x = {}; for (let b = 0; b < h.length; b += 3) { const S = h[b + 0], M = h[b + 1], R = h[b + 2], w = S.toFixed(p) + "|" + M.toFixed(p) + "|" + R.toFixed(p); if (x[w] !== void 0) m.push(x[w]); else { const V = b / 3; x[w] = V, m.push(V), _.push(V); } } } else for (let x = 0; x < h.length; x += 3) { const b = h[x + 0], S = h[x + 1], M = h[x + 2]; let R = !1; for (let w = 0; w < x && !R; w += 3) { const V = h[w + 0], k = h[w + 1], L = h[w + 2]; if (Math.abs(b - V) < p && Math.abs(S - k) < p && Math.abs(M - L) < p) { m.push(w / 3), R = !0; break; } } R || (m.push(x / 3), _.push(x / 3)); } if (!((a = this._options) === null || a === void 0) && a.applyTessellation) { const x = (o = (l = this._options) === null || l === void 0 ? void 0 : l.epsilonVertexAligned) !== null && o !== void 0 ? o : 1e-6, b = []; for (let S = 0; S < d.length; S += 3) { let M; for (let R = 0; R < 3; ++R) { const w = m[d[S + R]], V = m[d[S + (R + 1) % 3]], k = m[d[S + (R + 2) % 3]]; if (w === V) continue; const L = h[w * 3 + 0], B = h[w * 3 + 1], U = h[w * 3 + 2], K = h[V * 3 + 0], ee = h[V * 3 + 1], Z = h[V * 3 + 2], q = Math.sqrt((K - L) * (K - L) + (ee - B) * (ee - B) + (Z - U) * (Z - U)); for (let le = 0; le < _.length - 1; le++) { const ie = _[le]; if (ie === w || ie === V || ie === k) continue; const $ = h[ie * 3 + 0], j = h[ie * 3 + 1], J = h[ie * 3 + 2], ne = Math.sqrt(($ - L) * ($ - L) + (j - B) * (j - B) + (J - U) * (J - U)), pe = Math.sqrt(($ - K) * ($ - K) + (j - ee) * (j - ee) + (J - Z) * (J - Z)); Math.abs(ne + pe - q) < x && (M || (M = { index: S, edgesPoints: [[], [], []] }, b.push(M)), M.edgesPoints[R].push([ie, ne])); } } } for (let S = 0; S < b.length; ++S) { const M = b[S]; this._tessellateTriangle(M.edgesPoints, M.index, d, m); } b.length = 0; } const v = {}; for (let x = 0; x < d.length; x += 3) { let b; for (let S = 0; S < 3; ++S) { let M = m[d[x + S]], R = m[d[x + (S + 1) % 3]]; const w = m[d[x + (S + 2) % 3]]; if (M === R || (M === w || R === w) && (!((u = this._options) === null || u === void 0) && u.removeDegeneratedTriangles)) continue; if (de.Vector3[0].copyFromFloats(h[M * 3 + 0], h[M * 3 + 1], h[M * 3 + 2]), de.Vector3[1].copyFromFloats(h[R * 3 + 0], h[R * 3 + 1], h[R * 3 + 2]), de.Vector3[2].copyFromFloats(h[w * 3 + 0], h[w * 3 + 1], h[w * 3 + 2]), b || (de.Vector3[1].subtractToRef(de.Vector3[0], de.Vector3[3]), de.Vector3[2].subtractToRef(de.Vector3[1], de.Vector3[4]), b = D.Cross(de.Vector3[3], de.Vector3[4]), b.normalize()), M > R) { const L = M; M = R, R = L; } const V = M + "_" + R, k = v[V]; k ? k.done || (D.Dot(b, k.normal) < this._epsilon && this.createLine(de.Vector3[0], de.Vector3[1], this._linesPositions.length / 3), k.done = !0) : v[V] = { normal: b, done: !1, index: x, i: S }; } } for (const x in v) { const b = v[x]; if (!b.done) { const S = m[d[b.index + b.i]], M = m[d[b.index + (b.i + 1) % 3]]; de.Vector3[0].copyFromFloats(h[S * 3 + 0], h[S * 3 + 1], h[S * 3 + 2]), de.Vector3[1].copyFromFloats(h[M * 3 + 0], h[M * 3 + 1], h[M * 3 + 2]), this.createLine(de.Vector3[0], de.Vector3[1], this._linesPositions.length / 3); } } const C = this._source.getScene().getEngine(); this._buffers[Y.PositionKind] = new Y(C, this._linesPositions, Y.PositionKind, !1), this._buffers[Y.NormalKind] = new Y(C, this._linesNormals, Y.NormalKind, !1, !1, 4), this._buffersForInstances[Y.PositionKind] = this._buffers[Y.PositionKind], this._buffersForInstances[Y.NormalKind] = this._buffers[Y.NormalKind], this._ib = C.createIndexBuffer(this._linesIndices), this._indicesCount = this._linesIndices.length; } /** * Generates lines edges from adjacencjes * @private */ _generateEdgesLines() { const e = this._source.getVerticesData(Y.PositionKind), t = this._source.getIndices(); if (!t || !e) return; const i = [], r = []; let s, n; for (s = 0; s < t.length; s += 3) { n = new x4e(); const l = t[s], o = t[s + 1], u = t[s + 2]; n.p0 = new D(e[l * 3], e[l * 3 + 1], e[l * 3 + 2]), n.p1 = new D(e[o * 3], e[o * 3 + 1], e[o * 3 + 2]), n.p2 = new D(e[u * 3], e[u * 3 + 1], e[u * 3 + 2]); const h = D.Cross(n.p1.subtract(n.p0), n.p2.subtract(n.p1)); h.normalize(), r.push(h), i.push(n); } for (s = 0; s < i.length; s++) { n = i[s]; for (let l = s + 1; l < i.length; l++) { const o = i[l]; if (n.edgesConnectedCount === 3) break; if (o.edgesConnectedCount === 3) continue; const u = t[l * 3], h = t[l * 3 + 1], d = t[l * 3 + 2]; for (let f = 0; f < 3; f++) { let p = 0; if (n.edges[f] === void 0) { switch (f) { case 0: this._checkVerticesInsteadOfIndices ? p = this._processEdgeForAdjacenciesWithVertices(n.p0, n.p1, o.p0, o.p1, o.p2) : p = this._processEdgeForAdjacencies(t[s * 3], t[s * 3 + 1], u, h, d); break; case 1: this._checkVerticesInsteadOfIndices ? p = this._processEdgeForAdjacenciesWithVertices(n.p1, n.p2, o.p0, o.p1, o.p2) : p = this._processEdgeForAdjacencies(t[s * 3 + 1], t[s * 3 + 2], u, h, d); break; case 2: this._checkVerticesInsteadOfIndices ? p = this._processEdgeForAdjacenciesWithVertices(n.p2, n.p0, o.p0, o.p1, o.p2) : p = this._processEdgeForAdjacencies(t[s * 3 + 2], t[s * 3], u, h, d); break; } if (p !== -1 && (n.edges[f] = l, o.edges[p] = s, n.edgesConnectedCount++, o.edgesConnectedCount++, n.edgesConnectedCount === 3)) break; } } } } for (s = 0; s < i.length; s++) { const l = i[s]; this._checkEdge(s, l.edges[0], r, l.p0, l.p1), this._checkEdge(s, l.edges[1], r, l.p1, l.p2), this._checkEdge(s, l.edges[2], r, l.p2, l.p0); } const a = this._source.getScene().getEngine(); this._buffers[Y.PositionKind] = new Y(a, this._linesPositions, Y.PositionKind, !1), this._buffers[Y.NormalKind] = new Y(a, this._linesNormals, Y.NormalKind, !1, !1, 4), this._buffersForInstances[Y.PositionKind] = this._buffers[Y.PositionKind], this._buffersForInstances[Y.NormalKind] = this._buffers[Y.NormalKind], this._ib = a.createIndexBuffer(this._linesIndices), this._indicesCount = this._linesIndices.length; } /** * Checks whether or not the edges renderer is ready to render. * @returns true if ready, otherwise false. */ isReady() { return this._lineShader.isReady(this._source, this._source.hasInstances && this.customInstances.length > 0 || this._source.hasThinInstances); } /** * Renders the edges of the attached mesh, */ render() { const e = this._source.getScene(), t = this._lineShader._getDrawWrapper(); if (this._drawWrapper && this._lineShader._setDrawWrapper(this._drawWrapper), !this.isReady() || !e.activeCamera) { this._lineShader._setDrawWrapper(t); return; } const i = this._source.hasInstances && this.customInstances.length > 0, r = i || this._source.hasThinInstances; let s = 0; if (r) if (this._buffersForInstances.world0 = this._source.getVertexBuffer("world0"), this._buffersForInstances.world1 = this._source.getVertexBuffer("world1"), this._buffersForInstances.world2 = this._source.getVertexBuffer("world2"), this._buffersForInstances.world3 = this._source.getVertexBuffer("world3"), i) { const a = this._source._instanceDataStorage; if (s = this.customInstances.length, !a.instancesData) { this._source.getScene()._activeMeshesFrozen || this.customInstances.reset(); return; } if (!a.isFrozen) { let l = 0; for (let o = 0; o < s; ++o) this.customInstances.data[o].copyToArray(a.instancesData, l), l += 16; a.instancesBuffer.updateDirectly(a.instancesData, 0, s); } } else s = this._source.thinInstanceCount; const n = e.getEngine(); this._lineShader._preBind(), this._source.edgesColor.a !== 1 ? n.setAlphaMode(2) : n.setAlphaMode(0), n.bindBuffers(r ? this._buffersForInstances : this._buffers, this._ib, this._lineShader.getEffect()), e.resetCachedMaterial(), this._lineShader.setColor4("color", this._source.edgesColor), e.activeCamera.mode === Ai.ORTHOGRAPHIC_CAMERA ? this._lineShader.setFloat("width", this._source.edgesWidth / this.edgesWidthScalerForOrthographic) : this._lineShader.setFloat("width", this._source.edgesWidth / this.edgesWidthScalerForPerspective), this._lineShader.setFloat("aspectRatio", n.getAspectRatio(e.activeCamera)), this._lineShader.bind(this._source.getWorldMatrix()), n.drawElementsType(At.TriangleFillMode, 0, this._indicesCount, s), this._lineShader.unbind(), r && n.unbindInstanceAttributes(), this._source.getScene()._activeMeshesFrozen || this.customInstances.reset(), this._lineShader._setDrawWrapper(t); } } class wne extends bN { /** * This constructor turns off auto generating edges line in Edges Renderer to make it here. * @param source LineMesh used to generate edges * @param epsilon not important (specified angle for edge detection) * @param checkVerticesInsteadOfIndices not important for LineMesh */ constructor(e, t = 0.95, i = !1) { super(e, t, i, !1), this._generateEdgesLines(); } /** * Generate edges for each line in LinesMesh. Every Line should be rendered as edge. */ _generateEdgesLines() { const e = this._source.getVerticesData(Y.PositionKind), t = this._source.getIndices(); if (!t || !e) return; const i = de.Vector3[0], r = de.Vector3[1], s = t.length - 1; for (let a = 0, l = 0; a < s; a += 2, l += 4) D.FromArrayToRef(e, 3 * t[a], i), D.FromArrayToRef(e, 3 * t[a + 1], r), this.createLine(i, r, l); const n = this._source.getScene().getEngine(); this._buffers[Y.PositionKind] = new Y(n, this._linesPositions, Y.PositionKind, !1), this._buffers[Y.NormalKind] = new Y(n, this._linesNormals, Y.NormalKind, !1, !1, 4), this._ib = n.createIndexBuffer(this._linesIndices), this._indicesCount = this._linesIndices.length; } } class b4e extends $8 { constructor(e, t, i, r, s, n) { super(e, i, r, s, n), this._beforeCompositionPostProcesses = [], this._internalTextureDirty = !1, this.enabled = !1, this.renderTargetTexture = null, this.renderTargetTexture = t; } /** * Creates a composition effect for this RT * @internal */ _createCompositionEffect() { this.imageProcessingPostProcess = new QU("prePassComposition", 1, null, void 0, this._engine), this.imageProcessingPostProcess._updateParameters(); } /** * Checks that the size of this RT is still adapted to the desired render size. * @internal */ _checkSize() { const e = this._engine.getRenderWidth(!0), t = this._engine.getRenderHeight(!0), i = this.getRenderWidth(), r = this.getRenderHeight(); (i !== e || r !== t) && (this.resize({ width: e, height: t }), this._internalTextureDirty = !0); } /** * Changes the number of render targets in this MRT * Be careful as it will recreate all the data in the new texture. * @param count new texture count * @param options Specifies texture types and sampling modes for new textures * @param textureNames Specifies the names of the textures (optional) */ updateCount(e, t, i) { super.updateCount(e, t, i), this._internalTextureDirty = !0; } /** * Resets the post processes chains applied to this RT. * @internal */ _resetPostProcessChain() { this._beforeCompositionPostProcesses.length = 0; } /** * Diposes this render target */ dispose() { const e = this._scene; if (super.dispose(), e && e.prePassRenderer) { const t = e.prePassRenderer.renderTargets.indexOf(this); t !== -1 && e.prePassRenderer.renderTargets.splice(t, 1); } this.imageProcessingPostProcess && this.imageProcessingPostProcess.dispose(), this.renderTargetTexture && (this.renderTargetTexture._prePassRenderTarget = null), this._outputPostProcess && (this._outputPostProcess.autoClear = !0, this._outputPostProcess.restoreDefaultInputTexture()); } } class Td { /** * Indicates if the prepass renderer is generating normals in world space or camera space (default: camera space) */ get generateNormalsInWorldSpace() { return this._generateNormalsInWorldSpace; } set generateNormalsInWorldSpace(e) { this._generateNormalsInWorldSpace !== e && (this._generateNormalsInWorldSpace = e, this._markAllMaterialsAsPrePassDirty()); } /** * Returns the index of a texture in the multi render target texture array. * @param type Texture type * @returns The index */ getIndex(e) { return this._textureIndices[e]; } /** * How many samples are used for MSAA of the scene render target */ get samples() { return this.defaultRT.samples; } set samples(e) { this.defaultRT.samples = e; } /** * If set to true (default: false), the depth texture will be cleared with the depth value corresponding to the far plane (1 in normal mode, 0 in reverse depth buffer mode) * If set to false, the depth texture is always cleared with 0. */ get useSpecificClearForDepthTexture() { return this._useSpecificClearForDepthTexture; } set useSpecificClearForDepthTexture(e) { this._useSpecificClearForDepthTexture !== e && (this._useSpecificClearForDepthTexture = e, this._isDirty = !0); } /** * @returns the prepass render target for the rendering pass. * If we are currently rendering a render target, it returns the PrePassRenderTarget * associated with that render target. Otherwise, it returns the scene default PrePassRenderTarget */ getRenderTarget() { return this._currentTarget; } /** * @internal * Managed by the scene component * @param prePassRenderTarget */ _setRenderTarget(e) { var t, i; e ? this._currentTarget = e : (this._currentTarget = this.defaultRT, this._engine.currentRenderPassId = (i = (t = this._scene.activeCamera) === null || t === void 0 ? void 0 : t.renderPassId) !== null && i !== void 0 ? i : this._currentTarget.renderPassId); } /** * Returns true if the currently rendered prePassRenderTarget is the one * associated with the scene. */ get currentRTisSceneRT() { return this._currentTarget === this.defaultRT; } _refreshGeometryBufferRendererLink() { if (this.doNotUseGeometryRendererFallback) this._geometryBuffer && this._geometryBuffer._unlinkPrePassRenderer(), this._geometryBuffer = null, this._scene.disableGeometryBufferRenderer(); else { if (this._geometryBuffer = this._scene.enableGeometryBufferRenderer(), !this._geometryBuffer) { this.doNotUseGeometryRendererFallback = !0; return; } this._geometryBuffer._linkPrePassRenderer(this); } } /** * Indicates if the prepass is enabled */ get enabled() { return this._enabled; } /** * Instantiates a prepass renderer * @param scene The scene */ constructor(e) { this.excludedSkinnedMesh = [], this.excludedMaterials = [], this.mrtCount = 0, this._mrtTypes = [], this._mrtFormats = [], this._mrtLayout = [], this._mrtNames = [], this._textureIndices = [], this._generateNormalsInWorldSpace = !1, this._useSpecificClearForDepthTexture = !1, this._isDirty = !0, this._effectConfigurations = [], this.doNotUseGeometryRendererFallback = !0, this.renderTargets = [], this._clearColor = new Et(0, 0, 0, 0), this._clearDepthColor = new Et(1e8, 0, 0, 1), this._enabled = !1, this._needsCompositionForThisPass = !1, this.disableGammaTransform = !1, this._scene = e, this._engine = e.getEngine(); let t = 0; this._engine._caps.textureFloat && this._engine._caps.textureFloatLinearFiltering ? t = 1 : this._engine._caps.textureHalfFloat && this._engine._caps.textureHalfFloatLinearFiltering && (t = 2); for (let i = 0; i < Td.TextureFormats.length; ++i) { const r = Td.TextureFormats[i].format; Td.TextureFormats[i].type === 1 && (Td.TextureFormats[5].type = t, (r === 6 || r === 7 || r === 5) && !this._engine._caps.supportFloatTexturesResolve && (Td.TextureFormats[5].type = 2)); } Td._SceneComponentInitialization(this._scene), this.defaultRT = this._createRenderTarget("sceneprePassRT", null), this._currentTarget = this.defaultRT; } /** * Creates a new PrePassRenderTarget * This should be the only way to instantiate a `PrePassRenderTarget` * @param name Name of the `PrePassRenderTarget` * @param renderTargetTexture RenderTarget the `PrePassRenderTarget` will be attached to. * Can be `null` if the created `PrePassRenderTarget` is attached to the scene (default framebuffer). * @internal */ _createRenderTarget(e, t) { const i = new b4e(e, t, { width: this._engine.getRenderWidth(), height: this._engine.getRenderHeight() }, 0, this._scene, { generateMipMaps: !1, generateStencilBuffer: this._engine.isStencilEnable, defaultType: 0, types: [], drawOnlyOnFirstAttachmentByDefault: !0 }); return this.renderTargets.push(i), this._enabled && this._update(), i; } /** * Indicates if rendering a prepass is supported */ get isSupported() { return this._scene.getEngine().getCaps().drawBuffersExtension; } /** * Sets the proper output textures to draw in the engine. * @param effect The effect that is drawn. It can be or not be compatible with drawing to several output textures. * @param subMesh Submesh on which the effect is applied */ bindAttachmentsForEffect(e, t) { const i = t.getMaterial(), r = i && i.isPrePassCapable, s = i && this.excludedMaterials.indexOf(i) !== -1; this.enabled && this._currentTarget.enabled && (e._multiTarget && r && !s ? this._engine.bindAttachments(this._multiRenderAttachments) : (this._engine._currentRenderTarget ? this._engine.bindAttachments(this._defaultAttachments) : this._engine.restoreSingleAttachment(), this._geometryBuffer && this.currentRTisSceneRT && !s && this._geometryBuffer.renderList.push(t.getRenderingMesh()))); } _reinitializeAttachments() { const e = [], t = [!1], i = [!1], r = [!0]; for (let s = 0; s < this.mrtCount; s++) e.push(!0), s > 0 && (this._useSpecificClearForDepthTexture && this._mrtLayout[s] === 5 ? (t.push(!1), i.push(!0)) : (t.push(!0), i.push(!1)), r.push(!1)); this._multiRenderAttachments = this._engine.buildTextureLayout(e), this._clearAttachments = this._engine.buildTextureLayout(t), this._clearDepthAttachments = this._engine.buildTextureLayout(i), this._defaultAttachments = this._engine.buildTextureLayout(r); } _resetLayout() { for (let e = 0; e < Td.TextureFormats.length; e++) this._textureIndices[Td.TextureFormats[e].purpose] = -1; this._textureIndices[4] = 0, this._mrtLayout = [4], this._mrtTypes = [Td.TextureFormats[4].type], this._mrtFormats = [Td.TextureFormats[4].format], this._mrtNames = [Td.TextureFormats[4].name], this.mrtCount = 1; } _updateGeometryBufferLayout() { if (this._refreshGeometryBufferRendererLink(), this._geometryBuffer) { this._geometryBuffer._resetLayout(); const e = []; for (let i = 0; i < this._mrtLayout.length; i++) e.push(!1); this._geometryBuffer._linkInternalTexture(this.defaultRT.getInternalTexture()); const t = [ { prePassConstant: 5, geometryBufferConstant: _o.DEPTH_TEXTURE_TYPE }, { prePassConstant: 6, geometryBufferConstant: _o.NORMAL_TEXTURE_TYPE }, { prePassConstant: 1, geometryBufferConstant: _o.POSITION_TEXTURE_TYPE }, { prePassConstant: 3, geometryBufferConstant: _o.REFLECTIVITY_TEXTURE_TYPE }, { prePassConstant: 2, geometryBufferConstant: _o.VELOCITY_TEXTURE_TYPE } ]; for (let i = 0; i < t.length; i++) { const r = this._mrtLayout.indexOf(t[i].prePassConstant); r !== -1 && (this._geometryBuffer._forceTextureType(t[i].geometryBufferConstant, r), e[r] = !0); } this._geometryBuffer._setAttachments(this._engine.buildTextureLayout(e)); } } /** * Restores attachments for single texture draw. */ restoreAttachments() { this.enabled && this._currentTarget.enabled && this._defaultAttachments && (this._engine._currentRenderTarget ? this._engine.bindAttachments(this._defaultAttachments) : this._engine.restoreSingleAttachment()); } /** * @internal */ // eslint-disable-next-line @typescript-eslint/no-unused-vars _beforeDraw(e, t, i) { this._isDirty && this._update(), !(!this._enabled || !this._currentTarget.enabled) && (this._geometryBuffer && (this._geometryBuffer.renderList = []), this._setupOutputForThisPass(this._currentTarget, e)); } _prepareFrame(e, t, i) { e.renderTargetTexture ? e.renderTargetTexture._prepareFrame(this._scene, t, i, e.renderTargetTexture.useCameraPostProcesses) : this._postProcessesSourceForThisPass.length ? this._scene.postProcessManager._prepareFrame() : this._engine.restoreDefaultFramebuffer(); } /** * Sets an intermediary texture between prepass and postprocesses. This texture * will be used as input for post processes * @param rt * @returns true if there are postprocesses that will use this texture, * false if there is no postprocesses - and the function has no effect */ setCustomOutput(e) { const t = this._postProcessesSourceForThisPass[0]; return t ? (t.inputTexture = e.renderTarget, !0) : !1; } _renderPostProcesses(e, t) { var i; const r = this._postProcessesSourceForThisPass[0], s = r ? r.inputTexture : e.renderTargetTexture ? e.renderTargetTexture.renderTarget : null; let n = this._currentTarget._beforeCompositionPostProcesses; this._needsCompositionForThisPass && (n = n.concat([this._currentTarget.imageProcessingPostProcess])), n.length && (this._scene.postProcessManager._prepareFrame((i = this._currentTarget.renderTarget) === null || i === void 0 ? void 0 : i.texture, n), this._scene.postProcessManager.directRender(n, s, !1, t)); } /** * @internal */ _afterDraw(e, t) { this._enabled && this._currentTarget.enabled && (this._prepareFrame(this._currentTarget, e, t), this._renderPostProcesses(this._currentTarget, e)); } /** * Clears the current prepass render target (in the sense of settings pixels to the scene clear color value) * @internal */ _clear() { this._enabled && this._currentTarget.enabled && (this._bindFrameBuffer(), this._engine.bindAttachments(this._clearAttachments), this._engine.clear(this._clearColor, !0, !1, !1), this._useSpecificClearForDepthTexture && (this._engine.bindAttachments(this._clearDepthAttachments), this._engine.clear(this._clearDepthColor, !0, !1, !1)), this._engine.bindAttachments(this._defaultAttachments)); } // eslint-disable-next-line @typescript-eslint/no-unused-vars _bindFrameBuffer() { if (this._enabled && this._currentTarget.enabled) { this._currentTarget._checkSize(); const e = this._currentTarget.renderTarget; e && this._engine.bindFramebuffer(e); } } _setEnabled(e) { this._enabled = e; } _setRenderTargetEnabled(e, t) { e.enabled = t, t || this._unlinkInternalTexture(e); } /** * Adds an effect configuration to the prepass render target. * If an effect has already been added, it won't add it twice and will return the configuration * already present. * @param cfg the effect configuration * @returns the effect configuration now used by the prepass */ addEffectConfiguration(e) { for (let t = 0; t < this._effectConfigurations.length; t++) if (this._effectConfigurations[t].name === e.name) return this._effectConfigurations[t]; return this._effectConfigurations.push(e), e; } /** * Retrieves an effect configuration by name * @param name * @returns the effect configuration, or null if not present */ getEffectConfiguration(e) { for (let t = 0; t < this._effectConfigurations.length; t++) if (this._effectConfigurations[t].name === e) return this._effectConfigurations[t]; return null; } _enable() { const e = this.mrtCount; for (let t = 0; t < this._effectConfigurations.length; t++) this._effectConfigurations[t].enabled && this._enableTextures(this._effectConfigurations[t].texturesRequired); for (let t = 0; t < this.renderTargets.length; t++) { (this.mrtCount !== e || this.renderTargets[t].count !== this.mrtCount) && this.renderTargets[t].updateCount(this.mrtCount, { types: this._mrtTypes, formats: this._mrtFormats }, this._mrtNames.concat("prePass_DepthBuffer")), this.renderTargets[t]._resetPostProcessChain(); for (let i = 0; i < this._effectConfigurations.length; i++) this._effectConfigurations[i].enabled && (!this._effectConfigurations[i].postProcess && this._effectConfigurations[i].createPostProcess && this._effectConfigurations[i].createPostProcess(), this._effectConfigurations[i].postProcess && this.renderTargets[t]._beforeCompositionPostProcesses.push(this._effectConfigurations[i].postProcess)); } this._reinitializeAttachments(), this._setEnabled(!0), this._updateGeometryBufferLayout(); } _disable() { this._setEnabled(!1); for (let e = 0; e < this.renderTargets.length; e++) this._setRenderTargetEnabled(this.renderTargets[e], !1); this._resetLayout(); for (let e = 0; e < this._effectConfigurations.length; e++) this._effectConfigurations[e].enabled = !1; } _getPostProcessesSource(e, t) { if (t) return t._postProcesses; if (e.renderTargetTexture) if (e.renderTargetTexture.useCameraPostProcesses) { const i = e.renderTargetTexture.activeCamera ? e.renderTargetTexture.activeCamera : this._scene.activeCamera; return i ? i._postProcesses : []; } else return e.renderTargetTexture.postProcesses ? e.renderTargetTexture.postProcesses : []; else return this._scene.activeCamera ? this._scene.activeCamera._postProcesses : []; } _setupOutputForThisPass(e, t) { const i = t && this._scene.activeCameras && !!this._scene.activeCameras.length && this._scene.activeCameras.indexOf(t) !== 0; this._postProcessesSourceForThisPass = this._getPostProcessesSource(e, t), this._postProcessesSourceForThisPass = this._postProcessesSourceForThisPass.filter((l) => l != null), this._scene.autoClear = !0; const r = this._hasImageProcessing(this._postProcessesSourceForThisPass); this._needsCompositionForThisPass = !r && !this.disableGammaTransform && this._needsImageProcessing() && !i; const s = this._getFirstPostProcess(this._postProcessesSourceForThisPass), n = e._beforeCompositionPostProcesses && e._beforeCompositionPostProcesses[0]; let a = null; this._scene.imageProcessingConfiguration.applyByPostProcess = this._needsCompositionForThisPass || r, this._needsCompositionForThisPass && !e.imageProcessingPostProcess && e._createCompositionEffect(), n ? a = n : this._needsCompositionForThisPass ? a = e.imageProcessingPostProcess : s && (a = s), this._bindFrameBuffer(), this._linkInternalTexture(e, a); } _linkInternalTexture(e, t) { t && (t.autoClear = !1, t.inputTexture = e.renderTarget), e._outputPostProcess !== t && (e._outputPostProcess && this._unlinkInternalTexture(e), e._outputPostProcess = t), e._internalTextureDirty && (this._updateGeometryBufferLayout(), e._internalTextureDirty = !1); } /** * @internal */ _unlinkInternalTexture(e) { e._outputPostProcess && (e._outputPostProcess.autoClear = !0, e._outputPostProcess.restoreDefaultInputTexture(), e._outputPostProcess = null); } _needsImageProcessing() { for (let e = 0; e < this._effectConfigurations.length; e++) if (this._effectConfigurations[e].enabled && this._effectConfigurations[e].needsImageProcessing) return !0; return !1; } _hasImageProcessing(e) { var t; let i = !1; if (e) { for (let r = 0; r < e.length; r++) if (((t = e[r]) === null || t === void 0 ? void 0 : t.getClassName()) === "ImageProcessingPostProcess") { i = !0; break; } } return i; } /** * Internal, gets the first post proces. * @param postProcesses * @returns the first post process to be run on this camera. */ _getFirstPostProcess(e) { for (let t = 0; t < e.length; t++) if (e[t] !== null) return e[t]; return null; } /** * Marks the prepass renderer as dirty, triggering a check if the prepass is necessary for the next rendering. */ markAsDirty() { this._isDirty = !0; } /** * Enables a texture on the MultiRenderTarget for prepass * @param types */ _enableTextures(e) { this._scene.needsPreviousWorldMatrices = !1; for (let t = 0; t < e.length; t++) { const i = e[t]; this._textureIndices[i] === -1 && (this._textureIndices[i] = this._mrtLayout.length, this._mrtLayout.push(i), this._mrtTypes.push(Td.TextureFormats[i].type), this._mrtFormats.push(Td.TextureFormats[i].format), this._mrtNames.push(Td.TextureFormats[i].name), this.mrtCount++), i === 2 && (this._scene.needsPreviousWorldMatrices = !0); } } /** * Makes sure that the prepass renderer is up to date if it has been dirtified. */ update() { this._isDirty && this._update(); } _update() { this._disable(); let e = !1; this._scene.imageProcessingConfiguration.applyByPostProcess = !1, this._scene._depthPeelingRenderer && this._scene.useOrderIndependentTransparency && (this._scene._depthPeelingRenderer.setPrePassRenderer(this), e = !0); for (let i = 0; i < this._scene.materials.length; i++) this._scene.materials[i].setPrePassRenderer(this) && (e = !0); e && this._setRenderTargetEnabled(this.defaultRT, !0); let t; for (let i = 0; i < this.renderTargets.length; i++) { if (this.renderTargets[i].renderTargetTexture) t = this._getPostProcessesSource(this.renderTargets[i]); else { const r = this._scene.activeCamera; if (!r) continue; t = r._postProcesses; } if (t && (t = t.filter((r) => r != null), t)) { for (let r = 0; r < t.length; r++) t[r].setPrePassRenderer(this) && (this._setRenderTargetEnabled(this.renderTargets[i], !0), e = !0); this._hasImageProcessing(t) && (this._scene.imageProcessingConfiguration.applyByPostProcess = !0); } } this._markAllMaterialsAsPrePassDirty(), this._isDirty = !1, e && this._enable(); } _markAllMaterialsAsPrePassDirty() { const e = this._scene.materials; for (let t = 0; t < e.length; t++) e[t].markAsDirty(At.PrePassDirtyFlag); } /** * Disposes the prepass renderer. */ dispose() { for (let e = this.renderTargets.length - 1; e >= 0; e--) this.renderTargets[e].dispose(); for (let e = 0; e < this._effectConfigurations.length; e++) this._effectConfigurations[e].dispose && this._effectConfigurations[e].dispose(); } } Td._SceneComponentInitialization = (c) => { throw yr("PrePassRendererSceneComponent"); }; Td.TextureFormats = [ { purpose: 0, type: 2, format: 5, name: "prePass_Irradiance" }, { purpose: 1, type: 2, format: 5, name: "prePass_Position" }, { purpose: 2, type: 0, format: 5, name: "prePass_Velocity" }, { purpose: 3, type: 0, format: 5, name: "prePass_Reflectivity" }, { purpose: 4, type: 2, format: 5, name: "prePass_Color" }, { purpose: 5, type: 1, format: 6, name: "prePass_Depth" }, { purpose: 6, type: 2, format: 5, name: "prePass_Normal" }, { purpose: 7, type: 0, format: 5, name: "prePass_Albedo" } ]; Object.defineProperty(ii.prototype, "prePassRenderer", { get: function() { return this._prePassRenderer; }, set: function(c) { c && c.isSupported && (this._prePassRenderer = c); }, enumerable: !0, configurable: !0 }); ii.prototype.enablePrePassRenderer = function() { return this._prePassRenderer ? this._prePassRenderer : (this._prePassRenderer = new Td(this), this._prePassRenderer.isSupported || (this._prePassRenderer = null, Ce.Error(`PrePassRenderer needs WebGL 2 support. Maybe you tried to use the following features that need the PrePassRenderer : + Subsurface Scattering`)), this._prePassRenderer); }; ii.prototype.disablePrePassRenderer = function() { this._prePassRenderer && (this._prePassRenderer.dispose(), this._prePassRenderer = null); }; class Lne { /** * Creates a new instance of the component for the given scene * @param scene Defines the scene to register the component in */ constructor(e) { this.name = Bt.NAME_PREPASSRENDERER, this.scene = e; } /** * Registers the component in a given scene */ register() { this.scene._beforeCameraDrawStage.registerStep(Bt.STEP_BEFORECAMERADRAW_PREPASS, this, this._beforeCameraDraw), this.scene._afterCameraDrawStage.registerStep(Bt.STEP_AFTERCAMERADRAW_PREPASS, this, this._afterCameraDraw), this.scene._beforeRenderTargetDrawStage.registerStep(Bt.STEP_BEFORERENDERTARGETDRAW_PREPASS, this, this._beforeRenderTargetDraw), this.scene._afterRenderTargetDrawStage.registerStep(Bt.STEP_AFTERCAMERADRAW_PREPASS, this, this._afterRenderTargetDraw), this.scene._beforeClearStage.registerStep(Bt.STEP_BEFORECLEAR_PREPASS, this, this._beforeClearStage), this.scene._beforeRenderTargetClearStage.registerStep(Bt.STEP_BEFORERENDERTARGETCLEAR_PREPASS, this, this._beforeRenderTargetClearStage), this.scene._beforeRenderingMeshStage.registerStep(Bt.STEP_BEFORERENDERINGMESH_PREPASS, this, this._beforeRenderingMeshStage), this.scene._afterRenderingMeshStage.registerStep(Bt.STEP_AFTERRENDERINGMESH_PREPASS, this, this._afterRenderingMeshStage); } _beforeRenderTargetDraw(e, t, i) { this.scene.prePassRenderer && !e.noPrePassRenderer && (this.scene.prePassRenderer._setRenderTarget(e._prePassRenderTarget), this.scene.prePassRenderer._beforeDraw(void 0, t, i)); } _afterRenderTargetDraw(e, t, i) { this.scene.prePassRenderer && !e.noPrePassRenderer && this.scene.prePassRenderer._afterDraw(t, i); } _beforeRenderTargetClearStage(e) { this.scene.prePassRenderer && !e.noPrePassRenderer && (e._prePassRenderTarget || (e._prePassRenderTarget = this.scene.prePassRenderer._createRenderTarget(e.name + "_prePassRTT", e)), this.scene.prePassRenderer._setRenderTarget(e._prePassRenderTarget), this.scene.prePassRenderer._clear()); } _beforeCameraDraw(e) { this.scene.prePassRenderer && (this.scene.prePassRenderer._setRenderTarget(null), this.scene.prePassRenderer._beforeDraw(e)); } _afterCameraDraw() { this.scene.prePassRenderer && this.scene.prePassRenderer._afterDraw(); } _beforeClearStage() { this.scene.prePassRenderer && (this.scene.prePassRenderer._setRenderTarget(null), this.scene.prePassRenderer._clear()); } _beforeRenderingMeshStage(e, t, i, r) { if (!r) return; const s = e.getScene(); s.prePassRenderer && s.prePassRenderer.bindAttachmentsForEffect(r, t); } _afterRenderingMeshStage(e) { const t = e.getScene(); t.prePassRenderer && t.prePassRenderer.restoreAttachments(); } /** * Rebuilds the elements related to this component in case of * context lost for instance. */ rebuild() { this.scene.disablePrePassRenderer(), this.scene.enablePrePassRenderer(); } /** * Disposes the component and the associated resources */ dispose() { this.scene.disablePrePassRenderer(); } } Td._SceneComponentInitialization = (c) => { let e = c._getComponent(Bt.NAME_PREPASSRENDERER); e || (e = new Lne(c), c._addComponent(e)); }; const E4e = "fibonacci", T4e = `#define rcp(x) 1./x #define GOLDEN_RATIO 1.618033988749895 #define TWO_PI 6.2831855 vec2 Golden2dSeq(int i,float n) {return vec2(float(i)/n+(0.5/n),fract(float(i)*rcp(GOLDEN_RATIO)));} vec2 SampleDiskGolden(int i,int sampleCount) {vec2 f=Golden2dSeq(i,float(sampleCount));return vec2(sqrt(f.x),TWO_PI*f.y);}`; je.IncludesShadersStore[E4e] = T4e; const S4e = "diffusionProfile", M4e = "uniform vec3 diffusionS[5];uniform float diffusionD[5];uniform float filterRadii[5];"; je.IncludesShadersStore[S4e] = M4e; const R4e = "subSurfaceScatteringPixelShader", P4e = `#include #include #include #include varying vec2 vUV;uniform vec2 texelSize;uniform sampler2D textureSampler;uniform sampler2D irradianceSampler;uniform sampler2D depthSampler;uniform sampler2D albedoSampler;uniform vec2 viewportSize;uniform float metersPerUnit;const float LOG2_E=1.4426950408889634;const float SSS_PIXELS_PER_SAMPLE=4.;const int _SssSampleBudget=40; #define rcp(x) 1./x #define Sq(x) x*x #define SSS_BILATERAL_FILTER true vec3 EvalBurleyDiffusionProfile(float r,vec3 S) {vec3 exp_13=exp2(((LOG2_E*(-1.0/3.0))*r)*S); vec3 expSum=exp_13*(1.+exp_13*exp_13); return (S*rcp(8.*PI))*expSum; } vec2 SampleBurleyDiffusionProfile(float u,float rcpS) {u=1.-u; float g=1.+(4.*u)*(2.*u+sqrt(1.+(4.*u)*u));float n=exp2(log2(g)*(-1.0/3.0)); float p=(g*n)*n; float c=1.+p+n; float d=(3./LOG2_E*2.)+(3./LOG2_E)*log2(u); float x=(3./LOG2_E)*log2(c)-d; float rcpExp=((c*c)*c)*rcp((4.*u)*((c*c)+(4.*u)*(4.*u)));float r=x*rcpS;float rcpPdf=(8.*PI*rcpS)*rcpExp; return vec2(r,rcpPdf);} vec3 ComputeBilateralWeight(float xy2,float z,float mmPerUnit,vec3 S,float rcpPdf) { #ifndef SSS_BILATERAL_FILTER z=0.; #endif float r=sqrt(xy2+(z*mmPerUnit)*(z*mmPerUnit));float area=rcpPdf; #if SSS_CLAMP_ARTIFACT return clamp(EvalBurleyDiffusionProfile(r,S)*area,0.0,1.0); #else return EvalBurleyDiffusionProfile(r,S)*area; #endif } void EvaluateSample(int i,int n,vec3 S,float d,vec3 centerPosVS,float mmPerUnit,float pixelsPerMm, float phase,inout vec3 totalIrradiance,inout vec3 totalWeight) {float scale =rcp(float(n));float offset=rcp(float(n))*0.5;float sinPhase,cosPhase;sinPhase=sin(phase);cosPhase=cos(phase);vec2 bdp=SampleBurleyDiffusionProfile(float(i)*scale+offset,d);float r=bdp.x;float rcpPdf=bdp.y;float phi=SampleDiskGolden(i,n).y;float sinPhi,cosPhi;sinPhi=sin(phi);cosPhi=cos(phi);float sinPsi=cosPhase*sinPhi+sinPhase*cosPhi; float cosPsi=cosPhase*cosPhi-sinPhase*sinPhi; vec2 vec=r*vec2(cosPsi,sinPsi);vec2 position; float xy2;position=vUV+round((pixelsPerMm*r)*vec2(cosPsi,sinPsi))*texelSize;xy2 =r*r;vec4 textureSample=texture2D(irradianceSampler,position);float viewZ=texture2D(depthSampler,position).r;vec3 irradiance =textureSample.rgb;if (testLightingForSSS(textureSample.a)) {float relZ=viewZ-centerPosVS.z;vec3 weight=ComputeBilateralWeight(xy2,relZ,mmPerUnit,S,rcpPdf);totalIrradiance+=weight*irradiance;totalWeight +=weight;} else {}} #define CUSTOM_FRAGMENT_DEFINITIONS void main(void) {vec4 irradianceAndDiffusionProfile =texture2D(irradianceSampler,vUV);vec3 centerIrradiance=irradianceAndDiffusionProfile.rgb;int diffusionProfileIndex=int(round(irradianceAndDiffusionProfile.a*255.));float centerDepth =0.;vec4 inputColor=texture2D(textureSampler,vUV);bool passedStencilTest=testLightingForSSS(irradianceAndDiffusionProfile.a);if (passedStencilTest) {centerDepth=texture2D(depthSampler,vUV).r;} if (!passedStencilTest) { gl_FragColor=inputColor;return;} float distScale =1.;vec3 S =diffusionS[diffusionProfileIndex];float d =diffusionD[diffusionProfileIndex];float filterRadius=filterRadii[diffusionProfileIndex];vec2 centerPosNDC=vUV;vec2 cornerPosNDC=vUV+0.5*texelSize;vec3 centerPosVS =vec3(centerPosNDC*viewportSize,1.0)*centerDepth; vec3 cornerPosVS =vec3(cornerPosNDC*viewportSize,1.0)*centerDepth; float mmPerUnit =1000.*(metersPerUnit*rcp(distScale));float unitsPerMm=rcp(mmPerUnit);float unitsPerPixel=2.*abs(cornerPosVS.x-centerPosVS.x);float pixelsPerMm =rcp(unitsPerPixel)*unitsPerMm;float filterArea =PI*Sq(filterRadius*pixelsPerMm);int sampleCount =int(filterArea*rcp(SSS_PIXELS_PER_SAMPLE));int sampleBudget=_SssSampleBudget;int texturingMode=0;vec3 albedo =texture2D(albedoSampler,vUV).rgb;if (distScale==0. || sampleCount<1) { #ifdef DEBUG_SSS_SAMPLES vec3 green=vec3(0.,1.,0.);gl_FragColor=vec4(green,1.0);return; #endif gl_FragColor=vec4(inputColor.rgb+albedo*centerIrradiance,1.0);return;} #ifdef DEBUG_SSS_SAMPLES vec3 red =vec3(1.,0.,0.);vec3 blue=vec3(0.,0.,1.);gl_FragColor=vec4(mix(blue,red,clamp(float(sampleCount)/float(sampleBudget),0.0,1.0)),1.0);return; #endif float phase=0.;int n=min(sampleCount,sampleBudget);vec3 centerWeight =vec3(0.); vec3 totalIrradiance=vec3(0.);vec3 totalWeight =vec3(0.);for (int i=0; i { if (!t.prePassRenderer || !t.subSurfaceConfiguration) { Ce.Error("PrePass and subsurface configuration needs to be enabled for subsurface scattering."); return; } const u = this.texelSize; o.setFloat("metersPerUnit", t.subSurfaceConfiguration.metersPerUnit), o.setFloat2("texelSize", u.x, u.y), o.setTexture("irradianceSampler", t.prePassRenderer.getRenderTarget().textures[t.prePassRenderer.getIndex(0)]), o.setTexture("depthSampler", t.prePassRenderer.getRenderTarget().textures[t.prePassRenderer.getIndex(5)]), o.setTexture("albedoSampler", t.prePassRenderer.getRenderTarget().textures[t.prePassRenderer.getIndex(7)]), o.setFloat2("viewportSize", Math.tan(t.activeCamera.fov / 2) * t.getEngine().getAspectRatio(t.activeCamera, !0), Math.tan(t.activeCamera.fov / 2)), o.setArray3("diffusionS", t.subSurfaceConfiguration.ssDiffusionS), o.setArray("diffusionD", t.subSurfaceConfiguration.ssDiffusionD), o.setArray("filterRadii", t.subSurfaceConfiguration.ssFilterRadii); }); } } class EN { /** * Diffusion profile color for subsurface scattering */ get ssDiffusionS() { return this._ssDiffusionS; } /** * Diffusion profile max color channel value for subsurface scattering */ get ssDiffusionD() { return this._ssDiffusionD; } /** * Diffusion profile filter radius for subsurface scattering */ get ssFilterRadii() { return this._ssFilterRadii; } /** * Builds a subsurface configuration object * @param scene The scene */ constructor(e) { this._ssDiffusionS = [], this._ssFilterRadii = [], this._ssDiffusionD = [], this.enabled = !1, this.needsImageProcessing = !0, this.name = Bt.NAME_SUBSURFACE, this.ssDiffusionProfileColors = [], this.metersPerUnit = 1, this.texturesRequired = [ 5, 7, 4, 0 ], this.addDiffusionProfile(new ze(1, 1, 1)), this._scene = e, EN._SceneComponentInitialization(this._scene); } /** * Adds a new diffusion profile. * Useful for more realistic subsurface scattering on diverse materials. * @param color The color of the diffusion profile. Should be the average color of the material. * @returns The index of the diffusion profile for the material subsurface configuration */ addDiffusionProfile(e) { if (this.ssDiffusionD.length >= 5) return Ce.Error("You already reached the maximum number of diffusion profiles."), 0; for (let t = 0; t < this._ssDiffusionS.length / 3; t++) if (this._ssDiffusionS[t * 3] === e.r && this._ssDiffusionS[t * 3 + 1] === e.g && this._ssDiffusionS[t * 3 + 2] === e.b) return t; return this._ssDiffusionS.push(e.r, e.b, e.g), this._ssDiffusionD.push(Math.max(Math.max(e.r, e.b), e.g)), this._ssFilterRadii.push(this.getDiffusionProfileParameters(e)), this.ssDiffusionProfileColors.push(e), this._ssDiffusionD.length - 1; } /** * Creates the sss post process * @returns The created post process */ createPostProcess() { return this.postProcess = new I4e("subSurfaceScattering", this._scene, 1, null, void 0, this._scene.getEngine()), this.postProcess.autoClear = !1, this.postProcess; } /** * Deletes all diffusion profiles. * Note that in order to render subsurface scattering, you should have at least 1 diffusion profile. */ clearAllDiffusionProfiles() { this._ssDiffusionD = [], this._ssDiffusionS = [], this._ssFilterRadii = [], this.ssDiffusionProfileColors = []; } /** * Disposes this object */ dispose() { this.clearAllDiffusionProfiles(), this.postProcess && this.postProcess.dispose(); } /** * @internal * https://zero-radiance.github.io/post/sampling-diffusion/ * * Importance sample the normalized diffuse reflectance profile for the computed value of 's'. * ------------------------------------------------------------------------------------ * R[r, phi, s] = s * (Exp[-r * s] + Exp[-r * s / 3]) / (8 * Pi * r) * PDF[r, phi, s] = r * R[r, phi, s] * CDF[r, s] = 1 - 1/4 * Exp[-r * s] - 3/4 * Exp[-r * s / 3] * ------------------------------------------------------------------------------------ * We importance sample the color channel with the widest scattering distance. */ getDiffusionProfileParameters(e) { const i = Math.max(e.r, e.g, e.b); return this._sampleBurleyDiffusionProfile(0.997, i); } /** * Performs sampling of a Normalized Burley diffusion profile in polar coordinates. * 'u' is the random number (the value of the CDF): [0, 1). * rcp(s) = 1 / ShapeParam = ScatteringDistance. * Returns the sampled radial distance, s.t. (u = 0 -> r = 0) and (u = 1 -> r = Inf). * @param u * @param rcpS */ _sampleBurleyDiffusionProfile(e, t) { e = 1 - e; const i = 1 + 4 * e * (2 * e + Math.sqrt(1 + 4 * e * e)), r = Math.pow(i, -1 / 3), n = 1 + i * r * r + r; return 3 * Math.log(n / (4 * e)) * t; } } EN._SceneComponentInitialization = (c) => { throw yr("SubSurfaceSceneComponent"); }; Yl.AddParser(Bt.NAME_SUBSURFACE, (c, e) => { if (c.ssDiffusionProfileColors !== void 0 && c.ssDiffusionProfileColors !== null && (e.enableSubSurfaceForPrePass(), e.subSurfaceConfiguration)) for (let t = 0, i = c.ssDiffusionProfileColors.length; t < i; t++) { const r = c.ssDiffusionProfileColors[t]; e.subSurfaceConfiguration.addDiffusionProfile(new ze(r.r, r.g, r.b)); } }); Object.defineProperty(ii.prototype, "subSurfaceConfiguration", { get: function() { return this._subSurfaceConfiguration; }, set: function(c) { c && this.enablePrePassRenderer() && (this._subSurfaceConfiguration = c); }, enumerable: !0, configurable: !0 }); ii.prototype.enableSubSurfaceForPrePass = function() { if (this._subSurfaceConfiguration) return this._subSurfaceConfiguration; const c = this.enablePrePassRenderer(); return c ? (this._subSurfaceConfiguration = new EN(this), c.addEffectConfiguration(this._subSurfaceConfiguration), this._subSurfaceConfiguration) : null; }; ii.prototype.disableSubSurfaceForPrePass = function() { this._subSurfaceConfiguration && (this._subSurfaceConfiguration.dispose(), this._subSurfaceConfiguration = null); }; class Nne { /** * Creates a new instance of the component for the given scene * @param scene Defines the scene to register the component in */ constructor(e) { this.name = Bt.NAME_PREPASSRENDERER, this.scene = e; } /** * Registers the component in a given scene */ register() { } /** * Serializes the component data to the specified json object * @param serializationObject The object to serialize to */ serialize(e) { if (!this.scene.subSurfaceConfiguration) return; const t = this.scene.subSurfaceConfiguration.ssDiffusionProfileColors; e.ssDiffusionProfileColors = []; for (let i = 0; i < t.length; i++) e.ssDiffusionProfileColors.push({ r: t[i].r, g: t[i].g, b: t[i].b }); } /** * Adds all the elements from the container to the scene */ addFromContainer() { } /** * Removes all the elements in the container from the scene */ removeFromContainer() { this.scene.prePassRenderer && this.scene.subSurfaceConfiguration && this.scene.subSurfaceConfiguration.clearAllDiffusionProfiles(); } /** * Rebuilds the elements related to this component in case of * context lost for instance. */ rebuild() { } /** * Disposes the component and the associated resources */ dispose() { } } EN._SceneComponentInitialization = (c) => { let e = c._getComponent(Bt.NAME_SUBSURFACE); e || (e = new Nne(c), c._addComponent(e)); }; const D4e = "outlinePixelShader", O4e = `#ifdef LOGARITHMICDEPTH #extension GL_EXT_frag_depth : enable #endif uniform vec4 color; #ifdef ALPHATEST varying vec2 vUV;uniform sampler2D diffuseSampler; #endif #include #include #define CUSTOM_FRAGMENT_DEFINITIONS void main(void) { #define CUSTOM_FRAGMENT_MAIN_BEGIN #include #ifdef ALPHATEST if (texture2D(diffuseSampler,vUV).a<0.4) discard; #endif #include gl_FragColor=color; #define CUSTOM_FRAGMENT_MAIN_END }`; je.ShadersStore[D4e] = O4e; const w4e = "outlineVertexShader", L4e = `attribute vec3 position;attribute vec3 normal; #include #include #include #include[0..maxSimultaneousMorphTargets] #include uniform float offset; #include uniform mat4 viewProjection; #ifdef ALPHATEST varying vec2 vUV;uniform mat4 diffuseMatrix; #ifdef UV1 attribute vec2 uv; #endif #ifdef UV2 attribute vec2 uv2; #endif #endif #include #define CUSTOM_VERTEX_DEFINITIONS void main(void) {vec3 positionUpdated=position;vec3 normalUpdated=normal; #ifdef UV1 vec2 uvUpdated=uv; #endif #include #include[0..maxSimultaneousMorphTargets] vec3 offsetPosition=positionUpdated+(normalUpdated*offset); #include #include #include vec4 worldPos=finalWorld*vec4(offsetPosition,1.0);gl_Position=viewProjection*worldPos; #ifdef ALPHATEST #ifdef UV1 vUV=vec2(diffuseMatrix*vec4(uvUpdated,1.0,0.0)); #endif #ifdef UV2 vUV=vec2(diffuseMatrix*vec4(uv2,1.0,0.0)); #endif #endif #include #include } `; je.ShadersStore[w4e] = L4e; ii.prototype.getOutlineRenderer = function() { return this._outlineRenderer || (this._outlineRenderer = new tw(this)), this._outlineRenderer; }; Object.defineProperty(ke.prototype, "renderOutline", { get: function() { return this._renderOutline; }, set: function(c) { c && this.getScene().getOutlineRenderer(), this._renderOutline = c; }, enumerable: !0, configurable: !0 }); Object.defineProperty(ke.prototype, "renderOverlay", { get: function() { return this._renderOverlay; }, set: function(c) { c && this.getScene().getOutlineRenderer(), this._renderOverlay = c; }, enumerable: !0, configurable: !0 }); class tw { /** * Instantiates a new outline renderer. (There could be only one per scene). * @param scene Defines the scene it belongs to */ constructor(e) { this.name = Bt.NAME_OUTLINERENDERER, this.zOffset = 1, this.zOffsetUnits = 4, this.scene = e, this._engine = e.getEngine(), this.scene._addComponent(this), this._passIdForDrawWrapper = []; for (let t = 0; t < 4; ++t) this._passIdForDrawWrapper[t] = this._engine.createRenderPassId(`Outline Renderer (${t})`); } /** * Register the component to one instance of a scene. */ register() { this.scene._beforeRenderingMeshStage.registerStep(Bt.STEP_BEFORERENDERINGMESH_OUTLINE, this, this._beforeRenderingMesh), this.scene._afterRenderingMeshStage.registerStep(Bt.STEP_AFTERRENDERINGMESH_OUTLINE, this, this._afterRenderingMesh); } /** * Rebuilds the elements related to this component in case of * context lost for instance. */ rebuild() { } /** * Disposes the component and the associated resources. */ dispose() { for (let e = 0; e < this._passIdForDrawWrapper.length; ++e) this._engine.releaseRenderPassId(this._passIdForDrawWrapper[e]); } /** * Renders the outline in the canvas. * @param subMesh Defines the sumesh to render * @param batch Defines the batch of meshes in case of instances * @param useOverlay Defines if the rendering is for the overlay or the outline * @param renderPassId Render pass id to use to render the mesh */ render(e, t, i = !1, r) { r = r ?? this._passIdForDrawWrapper[0]; const s = this.scene, n = s.getEngine(), a = n.getCaps().instancedArrays && (t.visibleInstances[e._id] !== null && t.visibleInstances[e._id] !== void 0 || e.getRenderingMesh().hasThinInstances); if (!this.isReady(e, a, r)) return; const l = e.getMesh(), o = l._internalAbstractMeshDataInfo._actAsRegularMesh ? l : null, u = e.getRenderingMesh(), h = o || u, d = e.getMaterial(); if (!d || !s.activeCamera) return; const f = e._getDrawWrapper(r), p = $o.GetEffect(f); if (n.enableEffect(f), d.useLogarithmicDepth && p.setFloat("logarithmicDepthConstant", 2 / (Math.log(s.activeCamera.maxZ + 1) / Math.LN2)), p.setFloat("offset", i ? 0 : u.outlineWidth), p.setColor4("color", i ? u.overlayColor : u.outlineColor, i ? u.overlayAlpha : d.alpha), p.setMatrix("viewProjection", s.getTransformMatrix()), p.setMatrix("world", h.getWorldMatrix()), u.useBones && u.computeBonesUsingShaders && u.skeleton && p.setMatrices("mBones", u.skeleton.getTransformMatrices(u)), u.morphTargetManager && u.morphTargetManager.isUsingTextureForTargets && u.morphTargetManager._bind(p), Ke.BindMorphTargetParameters(u, p), a || u._bind(e, p, d.fillMode), d && d.needAlphaTesting()) { const m = d.getAlphaTestTexture(); m && (p.setTexture("diffuseSampler", m), p.setMatrix("diffuseMatrix", m.getTextureMatrix())); } Ec(p, d, s), n.setZOffset(-this.zOffset), n.setZOffsetUnits(-this.zOffsetUnits), u._processRendering(h, e, p, d.fillMode, t, a, (m, _) => { p.setMatrix("world", _); }), n.setZOffset(0), n.setZOffsetUnits(0); } /** * Returns whether or not the outline renderer is ready for a given submesh. * All the dependencies e.g. submeshes, texture, effect... mus be ready * @param subMesh Defines the submesh to check readiness for * @param useInstances Defines whether wee are trying to render instances or not * @param renderPassId Render pass id to use to render the mesh * @returns true if ready otherwise false */ isReady(e, t, i) { i = i ?? this._passIdForDrawWrapper[0]; const r = [], s = [Y.PositionKind, Y.NormalKind], n = e.getMesh(), a = e.getMaterial(); if (!a) return !1; const l = n.getScene(); a.needAlphaTesting() && (r.push("#define ALPHATEST"), n.isVerticesDataPresent(Y.UVKind) && (s.push(Y.UVKind), r.push("#define UV1")), n.isVerticesDataPresent(Y.UV2Kind) && (s.push(Y.UV2Kind), r.push("#define UV2"))), a.useLogarithmicDepth && r.push("#define LOGARITHMICDEPTH"), bT(a, l, r), n.useBones && n.computeBonesUsingShaders ? (s.push(Y.MatricesIndicesKind), s.push(Y.MatricesWeightsKind), n.numBoneInfluencers > 4 && (s.push(Y.MatricesIndicesExtraKind), s.push(Y.MatricesWeightsExtraKind)), r.push("#define NUM_BONE_INFLUENCERS " + n.numBoneInfluencers), r.push("#define BonesPerMesh " + (n.skeleton ? n.skeleton.bones.length + 1 : 0))) : r.push("#define NUM_BONE_INFLUENCERS 0"); const o = n.morphTargetManager; let u = 0; o && o.numInfluencers > 0 && (u = o.numInfluencers, r.push("#define MORPHTARGETS"), r.push("#define NUM_MORPH_INFLUENCERS " + u), o.isUsingTextureForTargets && r.push("#define MORPHTARGETS_TEXTURE"), Ke.PrepareAttributesForMorphTargetsInfluencers(s, n, u)), t && (r.push("#define INSTANCES"), Ke.PushAttributesForInstances(s), e.getRenderingMesh().hasThinInstances && r.push("#define THIN_INSTANCES")); const h = e._getDrawWrapper(i, !0), d = h.defines, f = r.join(` `); if (d !== f) { const p = [ "world", "mBones", "viewProjection", "diffuseMatrix", "offset", "color", "logarithmicDepthConstant", "morphTargetInfluences", "morphTargetTextureInfo", "morphTargetTextureIndices" ]; Gc(p), h.setEffect(this.scene.getEngine().createEffect("outline", s, p, ["diffuseSampler", "morphTargets"], f, void 0, void 0, void 0, { maxSimultaneousMorphTargets: u }), f); } return h.effect.isReady(); } _beforeRenderingMesh(e, t, i) { if (this._savedDepthWrite = this._engine.getDepthWrite(), e.renderOutline) { const r = t.getMaterial(); r && r.needAlphaBlendingForMesh(e) && (this._engine.cacheStencilState(), this._engine.setDepthWrite(!1), this._engine.setColorWrite(!1), this._engine.setStencilBuffer(!0), this._engine.setStencilOperationPass(7681), this._engine.setStencilFunction(519), this._engine.setStencilMask(tw._StencilReference), this._engine.setStencilFunctionReference(tw._StencilReference), this._engine.stencilStateComposer.useStencilGlobalOnly = !0, this.render( t, i, /* This sets offset to 0 */ !0, this._passIdForDrawWrapper[1] ), this._engine.setColorWrite(!0), this._engine.setStencilFunction(517)), this._engine.setDepthWrite(!1), this.render(t, i, !1, this._passIdForDrawWrapper[0]), this._engine.setDepthWrite(this._savedDepthWrite), r && r.needAlphaBlendingForMesh(e) && (this._engine.stencilStateComposer.useStencilGlobalOnly = !1, this._engine.restoreStencilState()); } } _afterRenderingMesh(e, t, i) { if (e.renderOverlay) { const r = this._engine.getAlphaMode(), s = this._engine.alphaState.alphaBlend; this._engine.setAlphaMode(2), this.render(t, i, !0, this._passIdForDrawWrapper[3]), this._engine.setAlphaMode(r), this._engine.setDepthWrite(this._savedDepthWrite), this._engine.alphaState.alphaBlend = s; } e.renderOutline && this._savedDepthWrite && (this._engine.setDepthWrite(!0), this._engine.setColorWrite(!1), this.render(t, i, !1, this._passIdForDrawWrapper[2]), this._engine.setColorWrite(!0)); } } tw._StencilReference = 4; class lj { /** Gets or sets the size of the particle */ get particleSize() { return this._particleSize; } set particleSize(e) { e !== this._particleSize && (this._particleSize = e, this.onParticleSizeChanged.notifyObservers(this)); } /** Indicates if the object uses instancing or not */ get useInstancing() { return !this.indexBuffer; } /** Indicates if velocity of particles should be used when rendering the object. The vertex buffer set must contain a "velocity" buffer for this to work! */ get useVelocity() { return this._useVelocity; } set useVelocity(e) { this._useVelocity === e || !this._hasVelocity() || (this._useVelocity = e, this._effectsAreDirty = !0); } _hasVelocity() { var e; return !!(!((e = this.vertexBuffers) === null || e === void 0) && e.velocity); } /** * Gets the index buffer (or null if the object is using instancing) */ get indexBuffer() { return null; } /** * Gets the name of the class */ getClassName() { return "FluidRenderingObject"; } /** * Instantiates a fluid rendering object * @param scene The scene the object is part of */ constructor(e) { this.priority = 0, this._particleSize = 0.1, this.onParticleSizeChanged = new Fe(), this.particleThicknessAlpha = 0.05, this._useVelocity = !1, this._scene = e, this._engine = e.getEngine(), this._effectsAreDirty = !0, this._depthEffectWrapper = null, this._thicknessEffectWrapper = null; } _createEffects() { const e = ["view", "projection", "particleRadius", "size"], t = ["position", "offset"], i = []; this._effectsAreDirty = !1, this.useVelocity && (t.push("velocity"), i.push("#define FLUIDRENDERING_VELOCITY")), this._scene.useRightHandedSystem && i.push("#define FLUIDRENDERING_RHS"), this._depthEffectWrapper = new t6({ engine: this._engine, useShaderStore: !0, vertexShader: "fluidRenderingParticleDepth", fragmentShader: "fluidRenderingParticleDepth", attributeNames: t, uniformNames: e, samplerNames: [], defines: i }), e.push("particleAlpha"), this._thicknessEffectWrapper = new t6({ engine: this._engine, useShaderStore: !0, vertexShader: "fluidRenderingParticleThickness", fragmentShader: "fluidRenderingParticleThickness", attributeNames: ["position", "offset"], uniformNames: e, samplerNames: [] }); } /** * Indicates if the object is ready to be rendered * @returns True if everything is ready for the object to be rendered, otherwise false */ isReady() { if (this._effectsAreDirty && this._createEffects(), !this._depthEffectWrapper || !this._thicknessEffectWrapper) return !1; const e = this._depthEffectWrapper._drawWrapper.effect, t = this._thicknessEffectWrapper._drawWrapper.effect; return e.isReady() && t.isReady(); } /** * Render the depth texture for this object */ renderDepthTexture() { const e = this.numParticles; if (!this._depthEffectWrapper || e === 0) return; const t = this._depthEffectWrapper._drawWrapper, i = t.effect; this._engine.enableEffect(t), this._engine.bindBuffers(this.vertexBuffers, this.indexBuffer, i), i.setMatrix("view", this._scene.getViewMatrix()), i.setMatrix("projection", this._scene.getProjectionMatrix()), i.setFloat2("size", this._particleSize, this._particleSize), i.setFloat("particleRadius", this._particleSize / 2), this.useInstancing ? this._engine.drawArraysType(7, 0, 4, e) : this._engine.drawElementsType(0, 0, e); } /** * Render the thickness texture for this object */ renderThicknessTexture() { const e = this.numParticles; if (!this._thicknessEffectWrapper || e === 0) return; const t = this._thicknessEffectWrapper._drawWrapper, i = t.effect; this._engine.setAlphaMode(6), this._engine.setDepthWrite(!1), this._engine.enableEffect(t), this._engine.bindBuffers(this.vertexBuffers, this.indexBuffer, i), i.setMatrix("view", this._scene.getViewMatrix()), i.setMatrix("projection", this._scene.getProjectionMatrix()), i.setFloat("particleAlpha", this.particleThicknessAlpha), i.setFloat2("size", this._particleSize, this._particleSize), this.useInstancing ? this._engine.drawArraysType(7, 0, 4, e) : this._engine.drawElementsType(0, 0, e), this._engine.setDepthWrite(!0), this._engine.setAlphaMode(0); } /** * Render the diffuse texture for this object */ renderDiffuseTexture() { } /** * Releases the ressources used by the class */ dispose() { var e, t; (e = this._depthEffectWrapper) === null || e === void 0 || e.dispose(), (t = this._thicknessEffectWrapper) === null || t === void 0 || t.dispose(); } } class Fne extends lj { /** Gets the particle system */ get particleSystem() { return this._particleSystem; } /** * Gets the name of the class */ getClassName() { return "FluidRenderingObjectParticleSystem"; } /** * Gets or sets a boolean indicating that the diffuse texture should be generated based on the regular rendering of the particle system (default: true). * Sometimes, generating the diffuse texture this way may be sub-optimal. In that case, you can disable this property, in which case the particle system will be * rendered using a ALPHA_COMBINE mode instead of the one used by the particle system. */ get useTrueRenderingForDiffuseTexture() { return this._useTrueRenderingForDiffuseTexture; } set useTrueRenderingForDiffuseTexture(e) { this._useTrueRenderingForDiffuseTexture !== e && (this._useTrueRenderingForDiffuseTexture = e, e ? (this._particleSystem.blendMode = this._blendMode, this._particleSystem.onBeforeDrawParticlesObservable.remove(this._onBeforeDrawParticleObserver), this._onBeforeDrawParticleObserver = null) : (this._particleSystem.blendMode = -1, this._onBeforeDrawParticleObserver = this._particleSystem.onBeforeDrawParticlesObservable.add(() => { this._engine.setAlphaMode(2); }))); } /** * Gets the vertex buffers */ get vertexBuffers() { return this._particleSystem.vertexBuffers; } /** * Gets the index buffer (or null if the object is using instancing) */ get indexBuffer() { return this._particleSystem.indexBuffer; } /** * Creates a new instance of the class * @param scene The scene the particle system is part of * @param ps The particle system */ constructor(e, t) { super(e), this._useTrueRenderingForDiffuseTexture = !0, this._particleSystem = t, this._originalRender = t.render.bind(t), this._blendMode = t.blendMode, this._onBeforeDrawParticleObserver = null, this._updateInAnimate = this._particleSystem.updateInAnimate, this._particleSystem.updateInAnimate = !0, this._particleSystem.render = () => 0, this.particleSize = (t.minSize + t.maxSize) / 2, this.useTrueRenderingForDiffuseTexture = !1; } /** * Indicates if the object is ready to be rendered * @returns True if everything is ready for the object to be rendered, otherwise false */ isReady() { return super.isReady() && this._particleSystem.isReady(); } /** * Gets the number of particles in this particle system * @returns The number of particles */ get numParticles() { return this._particleSystem.getActiveCount(); } /** * Render the diffuse texture for this object */ renderDiffuseTexture() { this._originalRender(); } /** * Releases the ressources used by the class */ dispose() { super.dispose(), this._particleSystem.onBeforeDrawParticlesObservable.remove(this._onBeforeDrawParticleObserver), this._onBeforeDrawParticleObserver = null, this._particleSystem.render = this._originalRender, this._particleSystem.blendMode = this._blendMode, this._particleSystem.updateInAnimate = this._updateInAnimate; } } class Zk { get blurNumIterations() { return this._blurNumIterations; } set blurNumIterations(e) { if (this._blurNumIterations !== e && (this._blurNumIterations = e, this._blurPostProcesses !== null)) { const t = this._blurPostProcesses[0], i = this._blurPostProcesses[1]; this._blurPostProcesses = []; for (let r = 0; r < this._blurNumIterations * 2; ++r) this._blurPostProcesses[r] = r & 1 ? i : t; } } get renderTarget() { return this._rt; } get renderTargetBlur() { return this._rtBlur; } get texture() { return this._texture; } get textureBlur() { return this._textureBlurred; } constructor(e, t, i, r, s, n, a = 1, l = 6, o = 1, u = 6, h = !1, d = null, f = !0, p = 1) { this.enableBlur = !0, this.blurSizeDivisor = 1, this.blurFilterSize = 7, this._blurNumIterations = 3, this.blurMaxFilterSize = 100, this.blurDepthScale = 10, this.particleSize = 0.02, this.onDisposeObservable = new Fe(), this._name = e, this._scene = t, this._camera = d, this._engine = t.getEngine(), this._width = i, this._height = r, this._blurTextureSizeX = s, this._blurTextureSizeY = n, this._textureType = a, this._textureFormat = l, this._blurTextureType = o, this._blurTextureFormat = u, this._useStandardBlur = h, this._generateDepthBuffer = f, this._samples = p, this._postProcessRunningIndex = 0, this.enableBlur = s !== 0 && n !== 0, this._rt = null, this._texture = null, this._rtBlur = null, this._textureBlurred = null, this._blurPostProcesses = null; } initialize() { if (this.dispose(), this._createRenderTarget(), this.enableBlur && this._texture) { const [e, t, i] = this._createBlurPostProcesses(this._texture, this._blurTextureType, this._blurTextureFormat, this.blurSizeDivisor, this._name, this._useStandardBlur); this._rtBlur = e, this._textureBlurred = t, this._blurPostProcesses = i; } } applyBlurPostProcesses() { this.enableBlur && this._blurPostProcesses && (this._postProcessRunningIndex = 0, this._scene.postProcessManager.directRender(this._blurPostProcesses, this._rtBlur, !0), this._engine.unBindFramebuffer(this._rtBlur)); } _createRenderTarget() { this._rt = this._engine.createRenderTargetTexture({ width: this._width, height: this._height }, { generateMipMaps: !1, type: this._textureType, format: this._textureFormat, samplingMode: 1, generateDepthBuffer: this._generateDepthBuffer, generateStencilBuffer: !1, samples: this._samples, label: `FluidRenderingRTT-${this._name}` }); const e = this._rt.texture; e.incrementReferences(), this._texture = new De(null, this._scene), this._texture.name = "rtt" + this._name, this._texture._texture = e, this._texture.wrapU = De.CLAMP_ADDRESSMODE, this._texture.wrapV = De.CLAMP_ADDRESSMODE, this._texture.anisotropicFilteringLevel = 1; } _createBlurPostProcesses(e, t, i, r, s, n = !1) { const a = this._scene.getEngine(), l = new at(Math.floor(this._blurTextureSizeX / r), Math.floor(this._blurTextureSizeY / r)), o = t === 1 && a.getCaps().textureFloatLinearFiltering || t === 2 && a.getCaps().textureHalfFloatLinearFiltering, u = this._engine.createRenderTargetTexture({ width: l.x, height: l.y }, { generateMipMaps: !1, type: t, format: i, samplingMode: o ? 2 : 1, generateDepthBuffer: !1, generateStencilBuffer: !1, samples: this._samples, label: `FluidRenderingRTTBlur-${s}` }), h = u.texture; h.incrementReferences(); const d = new De(null, this._scene); if (d.name = "rttBlurred" + s, d._texture = h, d.wrapU = De.CLAMP_ADDRESSMODE, d.wrapV = De.CLAMP_ADDRESSMODE, d.anisotropicFilteringLevel = 1, n) { const f = new Bi("BilateralBlurX", "fluidRenderingStandardBlur", ["filterSize", "blurDir"], null, 1, null, 1, a, !0, null, t, void 0, void 0, void 0, i); f.samples = this._samples, f.externalTextureSamplerBinding = !0, f.onApplyObservable.add((_) => { this._postProcessRunningIndex === 0 ? _.setTexture("textureSampler", e) : _._bindTexture("textureSampler", f.inputTexture.texture), _.setInt("filterSize", this.blurFilterSize), _.setFloat2("blurDir", 1 / this._blurTextureSizeX, 0), this._postProcessRunningIndex++; }), f.onSizeChangedObservable.add(() => { f._textures.forEach((_) => { _.texture.wrapU = De.CLAMP_ADDRESSMODE, _.texture.wrapV = De.CLAMP_ADDRESSMODE; }); }), this._fixReusablePostProcess(f); const p = new Bi("BilateralBlurY", "fluidRenderingStandardBlur", ["filterSize", "blurDir"], null, 1, null, 1, a, !0, null, t, void 0, void 0, void 0, i); p.samples = this._samples, p.onApplyObservable.add((_) => { _.setInt("filterSize", this.blurFilterSize), _.setFloat2("blurDir", 0, 1 / this._blurTextureSizeY), this._postProcessRunningIndex++; }), p.onSizeChangedObservable.add(() => { p._textures.forEach((_) => { _.texture.wrapU = De.CLAMP_ADDRESSMODE, _.texture.wrapV = De.CLAMP_ADDRESSMODE; }); }), this._fixReusablePostProcess(p), f.autoClear = !1, p.autoClear = !1; const m = []; for (let _ = 0; _ < this._blurNumIterations * 2; ++_) m[_] = _ & 1 ? p : f; return [u, d, m]; } else { const f = ["maxFilterSize", "blurDir", "projectedParticleConstant", "depthThreshold"], p = new Bi("BilateralBlurX", "fluidRenderingBilateralBlur", f, null, 1, null, 1, a, !0, null, t, void 0, void 0, void 0, i); p.samples = this._samples, p.externalTextureSamplerBinding = !0, p.onApplyObservable.add((v) => { this._postProcessRunningIndex === 0 ? v.setTexture("textureSampler", e) : v._bindTexture("textureSampler", p.inputTexture.texture), v.setInt("maxFilterSize", this.blurMaxFilterSize), v.setFloat2("blurDir", 1 / this._blurTextureSizeX, 0), v.setFloat("projectedParticleConstant", this._getProjectedParticleConstant()), v.setFloat("depthThreshold", this._getDepthThreshold()), this._postProcessRunningIndex++; }), p.onSizeChangedObservable.add(() => { p._textures.forEach((v) => { v.texture.wrapU = De.CLAMP_ADDRESSMODE, v.texture.wrapV = De.CLAMP_ADDRESSMODE; }); }), this._fixReusablePostProcess(p); const m = new Bi("BilateralBlurY", "fluidRenderingBilateralBlur", f, null, 1, null, 1, a, !0, null, t, void 0, void 0, void 0, i); m.samples = this._samples, m.onApplyObservable.add((v) => { v.setInt("maxFilterSize", this.blurMaxFilterSize), v.setFloat2("blurDir", 0, 1 / this._blurTextureSizeY), v.setFloat("projectedParticleConstant", this._getProjectedParticleConstant()), v.setFloat("depthThreshold", this._getDepthThreshold()), this._postProcessRunningIndex++; }), m.onSizeChangedObservable.add(() => { m._textures.forEach((v) => { v.texture.wrapU = De.CLAMP_ADDRESSMODE, v.texture.wrapV = De.CLAMP_ADDRESSMODE; }); }), this._fixReusablePostProcess(m), p.autoClear = !1, m.autoClear = !1; const _ = []; for (let v = 0; v < this._blurNumIterations * 2; ++v) _[v] = v & 1 ? m : p; return [u, d, _]; } } _fixReusablePostProcess(e) { e.isReusable() && (e.onActivateObservable.add(() => { e._currentRenderTextureInd = (e._currentRenderTextureInd + 1) % 2; }), e.onApplyObservable.add(() => { e._currentRenderTextureInd = (e._currentRenderTextureInd + 1) % 2; })); } _getProjectedParticleConstant() { var e, t; return this.blurFilterSize * this.particleSize * 0.05 * (this._height / 2) / Math.tan(((t = (e = this._camera) === null || e === void 0 ? void 0 : e.fov) !== null && t !== void 0 ? t : 45 * Math.PI / 180) / 2); } _getDepthThreshold() { return this.particleSize / 2 * this.blurDepthScale; } dispose() { var e, t, i, r; this.onDisposeObservable.hasObservers() && this.onDisposeObservable.notifyObservers(this), (e = this._rt) === null || e === void 0 || e.dispose(), this._rt = null, (t = this._texture) === null || t === void 0 || t.dispose(), this._texture = null, (i = this._rtBlur) === null || i === void 0 || i.dispose(), this._rtBlur = null, (r = this._textureBlurred) === null || r === void 0 || r.dispose(), this._textureBlurred = null, this._blurPostProcesses && (this._blurPostProcesses[0].dispose(), this._blurPostProcesses[1].dispose()), this._blurPostProcesses = null; } } var hm; (function(c) { c[c.DepthTexture = 0] = "DepthTexture", c[c.DepthBlurredTexture = 1] = "DepthBlurredTexture", c[c.ThicknessTexture = 2] = "ThicknessTexture", c[c.ThicknessBlurredTexture = 3] = "ThicknessBlurredTexture", c[c.DiffuseTexture = 4] = "DiffuseTexture", c[c.Normals = 5] = "Normals", c[c.DiffuseRendering = 6] = "DiffuseRendering"; })(hm || (hm = {})); class WH { /** * Returns true if the class needs to be reinitialized (because of changes in parameterization) */ get needInitialization() { return this._needInitialization; } /** * Gets or sets a boolean indicating that the diffuse texture should be generated and used for the rendering */ get generateDiffuseTexture() { return this._generateDiffuseTexture; } set generateDiffuseTexture(e) { this._generateDiffuseTexture !== e && (this._generateDiffuseTexture = e, this._needInitialization = !0); } /** * Gets or sets the feature (texture) to be debugged. Not used if debug is false */ get debugFeature() { return this._debugFeature; } set debugFeature(e) { this._debugFeature !== e && (this._needInitialization = !0, this._debugFeature = e); } /** * Gets or sets a boolean indicating if we should display a specific texture (given by debugFeature) for debugging purpose */ get debug() { return this._debug; } set debug(e) { this._debug !== e && (this._debug = e, this._needInitialization = !0); } /** * Gets or sets the environment map used for the reflection part of the shading * If null, no map will be used. If undefined, the scene.environmentMap will be used (if defined) */ get environmentMap() { return this._environmentMap; } set environmentMap(e) { this._environmentMap !== e && (this._needInitialization = !0, this._environmentMap = e); } /** * Gets or sets a boolean indicating that the depth texture should be blurred */ get enableBlurDepth() { return this._enableBlurDepth; } set enableBlurDepth(e) { this._enableBlurDepth !== e && (this._enableBlurDepth = e, this._needInitialization = !0); } /** * Gets or sets the depth size divisor (positive number, generally between 1 and 4), which is used as a divisor when creating the texture used for blurring the depth * For eg. if blurDepthSizeDivisor=2, the texture used to blur the depth will be half the size of the depth texture */ get blurDepthSizeDivisor() { return this._blurDepthSizeDivisor; } set blurDepthSizeDivisor(e) { this._blurDepthSizeDivisor !== e && (this._blurDepthSizeDivisor = e, this._needInitialization = !0); } /** * Size of the kernel used to filter the depth blur texture (positive number, generally between 1 and 20 - higher values will require more processing power from the GPU) */ get blurDepthFilterSize() { return this._blurDepthFilterSize; } set blurDepthFilterSize(e) { this._blurDepthFilterSize !== e && (this._blurDepthFilterSize = e, this._setBlurParameters()); } /** * Number of blurring iterations used to generate the depth blur texture (positive number, generally between 1 and 10 - higher values will require more processing power from the GPU) */ get blurDepthNumIterations() { return this._blurDepthNumIterations; } set blurDepthNumIterations(e) { this._blurDepthNumIterations !== e && (this._blurDepthNumIterations = e, this._setBlurParameters()); } /** * Maximum size of the kernel used to blur the depth texture (positive number, generally between 1 and 200 - higher values will require more processing power from the GPU when the particles are larger on screen) */ get blurDepthMaxFilterSize() { return this._blurDepthMaxFilterSize; } set blurDepthMaxFilterSize(e) { this._blurDepthMaxFilterSize !== e && (this._blurDepthMaxFilterSize = e, this._setBlurParameters()); } /** * Depth weight in the calculation when applying the bilateral blur to generate the depth blur texture (positive number, generally between 0 and 100) */ get blurDepthDepthScale() { return this._blurDepthDepthScale; } set blurDepthDepthScale(e) { this._blurDepthDepthScale !== e && (this._blurDepthDepthScale = e, this._setBlurParameters()); } /** * Gets or sets a boolean indicating that the thickness texture should be blurred */ get enableBlurThickness() { return this._enableBlurThickness; } set enableBlurThickness(e) { this._enableBlurThickness !== e && (this._enableBlurThickness = e, this._needInitialization = !0); } /** * Gets or sets the thickness size divisor (positive number, generally between 1 and 4), which is used as a divisor when creating the texture used for blurring the thickness * For eg. if blurThicknessSizeDivisor=2, the texture used to blur the thickness will be half the size of the thickness texture */ get blurThicknessSizeDivisor() { return this._blurThicknessSizeDivisor; } set blurThicknessSizeDivisor(e) { this._blurThicknessSizeDivisor !== e && (this._blurThicknessSizeDivisor = e, this._needInitialization = !0); } /** * Size of the kernel used to filter the thickness blur texture (positive number, generally between 1 and 20 - higher values will require more processing power from the GPU) */ get blurThicknessFilterSize() { return this._blurThicknessFilterSize; } set blurThicknessFilterSize(e) { this._blurThicknessFilterSize !== e && (this._blurThicknessFilterSize = e, this._setBlurParameters()); } /** * Number of blurring iterations used to generate the thickness blur texture (positive number, generally between 1 and 10 - higher values will require more processing power from the GPU) */ get blurThicknessNumIterations() { return this._blurThicknessNumIterations; } set blurThicknessNumIterations(e) { this._blurThicknessNumIterations !== e && (this._blurThicknessNumIterations = e, this._setBlurParameters()); } /** * Gets or sets a boolean indicating that a fixed thickness should be used instead of generating a thickness texture */ get useFixedThickness() { return this._useFixedThickness; } set useFixedThickness(e) { this._useFixedThickness !== e && (this._useFixedThickness = e, this._needInitialization = !0); } /** * Gets or sets a boolean indicating that the velocity should be used when rendering the particles as a fluid. * Note: the vertex buffers must contain a "velocity" buffer for this to work! */ get useVelocity() { return this._useVelocity; } set useVelocity(e) { this._useVelocity !== e && (this._useVelocity = e, this._needInitialization = !0, this._onUseVelocityChanged.notifyObservers(this)); } /** * Defines the size of the depth texture. * If null, the texture will have the size of the screen */ get depthMapSize() { return this._depthMapSize; } set depthMapSize(e) { this._depthMapSize !== e && (this._depthMapSize = e, this._needInitialization = !0); } /** * Defines the size of the thickness texture. * If null, the texture will have the size of the screen */ get thicknessMapSize() { return this._thicknessMapSize; } set thicknessMapSize(e) { this._thicknessMapSize !== e && (this._thicknessMapSize = e, this._needInitialization = !0); } /** * Defines the size of the diffuse texture. * If null, the texture will have the size of the screen */ get diffuseMapSize() { return this._diffuseMapSize; } set diffuseMapSize(e) { this._diffuseMapSize !== e && (this._diffuseMapSize = e, this._needInitialization = !0); } /** * Gets or sets the number of samples used by MSAA * Note: changing this value in WebGL does not work because depth/stencil textures can't be created with MSAA (see https://github.com/BabylonJS/Babylon.js/issues/12444) */ get samples() { return this._samples; } set samples(e) { this._samples !== e && (this._samples = e, this._needInitialization = !0); } /** * Gets the camera used for the rendering */ get camera() { return this._camera; } /** * Creates an instance of the class * @param scene Scene used to render the fluid object into * @param camera Camera used to render the fluid object. If not provided, use the active camera of the scene instead */ constructor(e, t) { this._generateDiffuseTexture = !1, this.fluidColor = new ze(0.085, 0.6375, 0.765), this.density = 2, this.refractionStrength = 0.1, this.fresnelClamp = 1, this.specularPower = 250, this.minimumThickness = 0, this.dirLight = new D(-2, -1, 1).normalize(), this._debugFeature = hm.DepthBlurredTexture, this._debug = !1, this._enableBlurDepth = !0, this._blurDepthSizeDivisor = 1, this._blurDepthFilterSize = 7, this._blurDepthNumIterations = 3, this._blurDepthMaxFilterSize = 100, this._blurDepthDepthScale = 10, this._enableBlurThickness = !0, this._blurThicknessSizeDivisor = 1, this._blurThicknessFilterSize = 5, this._blurThicknessNumIterations = 1, this._useFixedThickness = !1, this._onUseVelocityChanged = new Fe(), this._useVelocity = !1, this._depthMapSize = null, this._thicknessMapSize = null, this._diffuseMapSize = null, this._samples = 1, this._scene = e, this._engine = e.getEngine(), this._camera = t ?? e.activeCamera, this._needInitialization = !0, this._bgDepthTexture = null, this._invProjectionMatrix = new Ae(), this._depthClearColor = new Et(1e6, 1e6, 1e6, 1), this._thicknessClearColor = new Et(0, 0, 0, 1), this._depthRenderTarget = null, this._diffuseRenderTarget = null, this._thicknessRenderTarget = null, this._renderPostProcess = null; } /** @internal */ _initialize() { var e, t, i; this.dispose(), this._needInitialization = !1; const r = (e = this._depthMapSize) !== null && e !== void 0 ? e : this._engine.getRenderWidth(), s = this._depthMapSize !== null ? Math.round(this._depthMapSize * this._engine.getRenderHeight() / this._engine.getRenderWidth()) : this._engine.getRenderHeight(); if (this._depthRenderTarget = new Zk("Depth", this._scene, r, s, r, s, 1, 7, 1, 7, !1, this._camera, !0, this._samples), this._initializeRenderTarget(this._depthRenderTarget), this.generateDiffuseTexture) { const l = (t = this._diffuseMapSize) !== null && t !== void 0 ? t : this._engine.getRenderWidth(), o = this._diffuseMapSize !== null ? Math.round(this._diffuseMapSize * this._engine.getRenderHeight() / this._engine.getRenderWidth()) : this._engine.getRenderHeight(); this._diffuseRenderTarget = new Zk("Diffuse", this._scene, l, o, 0, 0, 0, 5, 0, 5, !0, this._camera, !0, this._samples), this._initializeRenderTarget(this._diffuseRenderTarget); } const n = (i = this._thicknessMapSize) !== null && i !== void 0 ? i : this._engine.getRenderWidth(), a = this._thicknessMapSize !== null ? Math.round(this._thicknessMapSize * this._engine.getRenderHeight() / this._engine.getRenderWidth()) : this._engine.getRenderHeight(); this._useFixedThickness || (this._thicknessRenderTarget = new Zk("Thickness", this._scene, n, a, n, a, 2, 6, 2, 6, !0, this._camera, !1, this._samples), this._initializeRenderTarget(this._thicknessRenderTarget)), this._createLiquidRenderingPostProcess(); } _setBlurParameters(e = null) { (e === null || e === this._depthRenderTarget) && this._setBlurDepthParameters(), (e === null || e === this._thicknessRenderTarget) && this._setBlurThicknessParameters(); } _setBlurDepthParameters() { this._depthRenderTarget && (this._depthRenderTarget.blurFilterSize = this.blurDepthFilterSize, this._depthRenderTarget.blurMaxFilterSize = this.blurDepthMaxFilterSize, this._depthRenderTarget.blurNumIterations = this.blurDepthNumIterations, this._depthRenderTarget.blurDepthScale = this.blurDepthDepthScale); } _setBlurThicknessParameters() { this._thicknessRenderTarget && (this._thicknessRenderTarget.blurFilterSize = this.blurThicknessFilterSize, this._thicknessRenderTarget.blurNumIterations = this.blurThicknessNumIterations); } _initializeRenderTarget(e) { e !== this._diffuseRenderTarget && (e.enableBlur = e === this._depthRenderTarget ? this.enableBlurDepth : this.enableBlurThickness, e.blurSizeDivisor = e === this._depthRenderTarget ? this.blurDepthSizeDivisor : this.blurThicknessSizeDivisor), this._setBlurParameters(e), e.initialize(); } _createLiquidRenderingPostProcess() { var e; const t = this._scene.getEngine(), i = [ "viewMatrix", "projectionMatrix", "invProjectionMatrix", "texelSize", "dirLight", "cameraFar", "density", "refractionStrength", "fresnelClamp", "specularPower" ], r = ["depthSampler"], s = []; if (this.dispose(!0), !this._camera) return; const n = this._depthRenderTarget.enableBlur ? this._depthRenderTarget.textureBlur : this._depthRenderTarget.texture, a = new at(1 / n.getSize().width, 1 / n.getSize().height); this._scene.useRightHandedSystem && s.push("#define FLUIDRENDERING_RHS"), this._environmentMap !== null && ((e = this._environmentMap) !== null && e !== void 0 ? e : this._scene.environmentTexture) && (r.push("reflectionSampler"), s.push("#define FLUIDRENDERING_ENVIRONMENT")), this._diffuseRenderTarget ? (r.push("diffuseSampler"), s.push("#define FLUIDRENDERING_DIFFUSETEXTURE")) : i.push("diffuseColor"), this._useVelocity && (r.push("velocitySampler"), s.push("#define FLUIDRENDERING_VELOCITY")), this._useFixedThickness ? (i.push("thickness"), r.push("bgDepthSampler"), s.push("#define FLUIDRENDERING_FIXED_THICKNESS")) : (i.push("minimumThickness"), r.push("thicknessSampler")), this._debug && (s.push("#define FLUIDRENDERING_DEBUG"), this._debugFeature === hm.Normals ? s.push("#define FLUIDRENDERING_DEBUG_SHOWNORMAL") : this._debugFeature === hm.DiffuseRendering ? s.push("#define FLUIDRENDERING_DEBUG_DIFFUSERENDERING") : (s.push("#define FLUIDRENDERING_DEBUG_TEXTURE"), r.push("debugSampler"), (this._debugFeature === hm.DepthTexture || this._debugFeature === hm.DepthBlurredTexture) && s.push("#define FLUIDRENDERING_DEBUG_DEPTH"))), this._renderPostProcess = new Bi("FluidRendering", "fluidRenderingRender", i, r, 1, null, 2, t, !1, null, 0, void 0, void 0, !0, void 0), this._renderPostProcess.updateEffect(s.join(` `)), this._renderPostProcess.samples = this._samples, this._renderPostProcess.onApplyObservable.add((l) => { var o, u, h, d, f, p, m, _, v, C, x, b, S, M, R, w, V, k, L, B, U, K, ee; if (this._invProjectionMatrix.copyFrom(this._scene.getProjectionMatrix()), this._invProjectionMatrix.invert(), t.isWebGPU && l.setTextureSampler("textureSamplerSampler", this._renderPostProcess.inputTexture.texture), this._depthRenderTarget.enableBlur ? (l.setTexture("depthSampler", this._depthRenderTarget.textureBlur), t.isWebGPU && l.setTextureSampler("depthSamplerSampler", (d = (h = this._depthRenderTarget.textureBlur) === null || h === void 0 ? void 0 : h.getInternalTexture()) !== null && d !== void 0 ? d : null)) : (l.setTexture("depthSampler", this._depthRenderTarget.texture), t.isWebGPU && l.setTextureSampler("depthSamplerSampler", (u = (o = this._depthRenderTarget.texture) === null || o === void 0 ? void 0 : o.getInternalTexture()) !== null && u !== void 0 ? u : null)), this._diffuseRenderTarget ? this._diffuseRenderTarget.enableBlur ? (l.setTexture("diffuseSampler", this._diffuseRenderTarget.textureBlur), t.isWebGPU && l.setTextureSampler("diffuseSamplerSampler", (_ = (m = this._diffuseRenderTarget.textureBlur) === null || m === void 0 ? void 0 : m.getInternalTexture()) !== null && _ !== void 0 ? _ : null)) : (l.setTexture("diffuseSampler", this._diffuseRenderTarget.texture), t.isWebGPU && l.setTextureSampler("diffuseSamplerSampler", (p = (f = this._diffuseRenderTarget.texture) === null || f === void 0 ? void 0 : f.getInternalTexture()) !== null && p !== void 0 ? p : null)) : l.setColor3("diffuseColor", this.fluidColor), this._useFixedThickness ? (l.setFloat("thickness", this.minimumThickness), l._bindTexture("bgDepthSampler", this._bgDepthTexture), t.isWebGPU && l.setTextureSampler("bgDepthSamplerSampler", (v = this._bgDepthTexture) !== null && v !== void 0 ? v : null)) : (this._thicknessRenderTarget.enableBlur ? (l.setTexture("thicknessSampler", this._thicknessRenderTarget.textureBlur), t.isWebGPU && l.setTextureSampler("thicknessSamplerSampler", (S = (b = this._thicknessRenderTarget.textureBlur) === null || b === void 0 ? void 0 : b.getInternalTexture()) !== null && S !== void 0 ? S : null)) : (l.setTexture("thicknessSampler", this._thicknessRenderTarget.texture), t.isWebGPU && l.setTextureSampler("thicknessSamplerSampler", (x = (C = this._thicknessRenderTarget.texture) === null || C === void 0 ? void 0 : C.getInternalTexture()) !== null && x !== void 0 ? x : null)), l.setFloat("minimumThickness", this.minimumThickness)), this._environmentMap !== null) { const Z = (M = this._environmentMap) !== null && M !== void 0 ? M : this._scene.environmentTexture; Z && (l.setTexture("reflectionSampler", Z), t.isWebGPU && l.setTextureSampler("reflectionSamplerSampler", (R = Z == null ? void 0 : Z.getInternalTexture()) !== null && R !== void 0 ? R : null)); } if (l.setMatrix("viewMatrix", this._scene.getViewMatrix()), l.setMatrix("invProjectionMatrix", this._invProjectionMatrix), l.setMatrix("projectionMatrix", this._scene.getProjectionMatrix()), l.setVector2("texelSize", a), l.setFloat("density", this.density), l.setFloat("refractionStrength", this.refractionStrength), l.setFloat("fresnelClamp", this.fresnelClamp), l.setFloat("specularPower", this.specularPower), l.setVector3("dirLight", this.dirLight), l.setFloat("cameraFar", this._camera.maxZ), this._debug) { let Z = null; switch (this._debugFeature) { case hm.DepthTexture: Z = this._depthRenderTarget.texture; break; case hm.DepthBlurredTexture: Z = this._depthRenderTarget.enableBlur ? this._depthRenderTarget.textureBlur : this._depthRenderTarget.texture; break; case hm.ThicknessTexture: Z = (V = (w = this._thicknessRenderTarget) === null || w === void 0 ? void 0 : w.texture) !== null && V !== void 0 ? V : null; break; case hm.ThicknessBlurredTexture: Z = !((k = this._thicknessRenderTarget) === null || k === void 0) && k.enableBlur ? (B = (L = this._thicknessRenderTarget) === null || L === void 0 ? void 0 : L.textureBlur) !== null && B !== void 0 ? B : null : (K = (U = this._thicknessRenderTarget) === null || U === void 0 ? void 0 : U.texture) !== null && K !== void 0 ? K : null; break; case hm.DiffuseTexture: this._diffuseRenderTarget && (Z = this._diffuseRenderTarget.texture); break; } this._debugFeature !== hm.Normals && (l.setTexture("debugSampler", Z), t.isWebGPU && l.setTextureSampler("debugSamplerSampler", (ee = Z == null ? void 0 : Z.getInternalTexture()) !== null && ee !== void 0 ? ee : null)); } }); } /** @internal */ _clearTargets() { var e, t, i; !((e = this._depthRenderTarget) === null || e === void 0) && e.renderTarget && (this._engine.bindFramebuffer(this._depthRenderTarget.renderTarget), this._engine.clear(this._depthClearColor, !0, !0, !1), this._engine.unBindFramebuffer(this._depthRenderTarget.renderTarget)), !((t = this._diffuseRenderTarget) === null || t === void 0) && t.renderTarget && (this._engine.bindFramebuffer(this._diffuseRenderTarget.renderTarget), this._engine.clear(this._thicknessClearColor, !0, !0, !1), this._engine.unBindFramebuffer(this._diffuseRenderTarget.renderTarget)), !((i = this._thicknessRenderTarget) === null || i === void 0) && i.renderTarget && (this._engine.bindFramebuffer(this._thicknessRenderTarget.renderTarget), this._engine.clear(this._thicknessClearColor, !0, !1, !1), this._engine.unBindFramebuffer(this._thicknessRenderTarget.renderTarget)); } /** @internal */ _render(e) { var t, i, r, s, n, a; if (this._needInitialization || !e.isReady()) return; const l = this._engine._currentRenderTarget; this._engine.setState(!1, void 0, void 0, void 0, !0), this._engine.setDepthBuffer(!0), this._engine.setDepthWrite(!0), this._engine.setAlphaMode(0), !((t = this._depthRenderTarget) === null || t === void 0) && t.renderTarget && (this._engine.bindFramebuffer(this._depthRenderTarget.renderTarget), e.renderDepthTexture(), this._engine.unbindInstanceAttributes(), this._engine.unBindFramebuffer(this._depthRenderTarget.renderTarget)), !((i = this._diffuseRenderTarget) === null || i === void 0) && i.renderTarget && (this._engine.bindFramebuffer(this._diffuseRenderTarget.renderTarget), e.renderDiffuseTexture(), this._engine.unbindInstanceAttributes(), this._engine.unBindFramebuffer(this._diffuseRenderTarget.renderTarget)), !((r = this._thicknessRenderTarget) === null || r === void 0) && r.renderTarget && (this._engine.bindFramebuffer(this._thicknessRenderTarget.renderTarget), e.renderThicknessTexture(), this._engine.unbindInstanceAttributes(), this._engine.unBindFramebuffer(this._thicknessRenderTarget.renderTarget)), (s = this._depthRenderTarget) === null || s === void 0 || s.applyBlurPostProcesses(), (n = this._diffuseRenderTarget) === null || n === void 0 || n.applyBlurPostProcesses(), (a = this._thicknessRenderTarget) === null || a === void 0 || a.applyBlurPostProcesses(), l && this._engine.bindFramebuffer(l); } /** * Releases all the ressources used by the class * @param onlyPostProcesses If true, releases only the ressources used by the render post processes */ dispose(e = !1) { var t, i, r, s; e || ((t = this._depthRenderTarget) === null || t === void 0 || t.dispose(), this._depthRenderTarget = null, (i = this._diffuseRenderTarget) === null || i === void 0 || i.dispose(), this._diffuseRenderTarget = null, (r = this._thicknessRenderTarget) === null || r === void 0 || r.dispose(), this._thicknessRenderTarget = null), this._renderPostProcess && this._camera && this._camera.detachPostProcess(this._renderPostProcess), (s = this._renderPostProcess) === null || s === void 0 || s.dispose(), this._renderPostProcess = null, this._needInitialization = !1; } } class Bne extends lj { /** * Gets the name of the class */ getClassName() { return "FluidRenderingObjectCustomParticles"; } /** * Gets the vertex buffers */ get vertexBuffers() { return this._vertexBuffers; } /** * Creates a new instance of the class * @param scene The scene the particles should be rendered into * @param buffers The list of buffers (must contain at least one "position" buffer!). Note that you don't have to pass all (or any!) buffers at once in the constructor, you can use the addBuffers method to add more later. * @param numParticles Number of vertices to take into account from the buffers */ constructor(e, t, i) { super(e), this._numParticles = i, this._diffuseEffectWrapper = null, this._vertexBuffers = {}, this.addBuffers(t); } /** * Add some new buffers * @param buffers List of buffers */ addBuffers(e) { for (const t in e) { let i, r = !0; switch (t) { case "velocity": i = 3; break; case "offset": r = !1; break; } this._vertexBuffers[t] = new Y(this._engine, e[t], t, !0, !1, i, r); } } _createEffects() { super._createEffects(); const e = ["view", "projection", "size"], t = ["position", "offset", "color"]; this._diffuseEffectWrapper = new t6({ engine: this._engine, useShaderStore: !0, vertexShader: "fluidRenderingParticleDiffuse", fragmentShader: "fluidRenderingParticleDiffuse", attributeNames: t, uniformNames: e, samplerNames: [] }); } /** * Indicates if the object is ready to be rendered * @returns True if everything is ready for the object to be rendered, otherwise false */ isReady() { var e, t; return this._vertexBuffers.offset || (this._vertexBuffers.offset = new Y(this._engine, [0, 0, 1, 0, 0, 1, 1, 1], "offset", !1, !1, 2)), super.isReady() && ((t = (e = this._diffuseEffectWrapper) === null || e === void 0 ? void 0 : e.effect.isReady()) !== null && t !== void 0 ? t : !1); } /** * Gets the number of particles in this object * @returns The number of particles */ get numParticles() { return this._numParticles; } /** * Sets the number of particles in this object * @param num The number of particles to take into account */ setNumParticles(e) { this._numParticles = e; } /** * Render the diffuse texture for this object */ renderDiffuseTexture() { const e = this.numParticles; if (!this._diffuseEffectWrapper || e === 0) return; const t = this._diffuseEffectWrapper._drawWrapper, i = t.effect; this._engine.enableEffect(t), this._engine.bindBuffers(this.vertexBuffers, this.indexBuffer, i), i.setMatrix("view", this._scene.getViewMatrix()), i.setMatrix("projection", this._scene.getProjectionMatrix()), this._particleSize !== null && i.setFloat2("size", this._particleSize, this._particleSize), this.useInstancing ? this._engine.drawArraysType(7, 0, 4, e) : this._engine.drawElementsType(0, 0, e); } /** * Releases the ressources used by the class */ dispose() { var e; super.dispose(), (e = this._diffuseEffectWrapper) === null || e === void 0 || e.dispose(); for (const t in this._vertexBuffers) this._vertexBuffers[t].dispose(); this._vertexBuffers = {}; } } const N4e = "copyTextureToTexturePixelShader", F4e = `uniform float conversion;uniform sampler2D textureSampler;varying vec2 vUV; #include void main(void) {vec4 color=texture2D(textureSampler,vUV); #ifdef DEPTH_TEXTURE gl_FragDepth=color.r; #else if (conversion==1.) {color=toLinearSpace(color);} else if (conversion==2.) {color=toGammaSpace(color);} gl_FragColor=color; #endif } `; je.ShadersStore[N4e] = F4e; var xB; (function(c) { c[c.None = 0] = "None", c[c.ToLinearSpace = 1] = "ToLinearSpace", c[c.ToGammaSpace = 2] = "ToGammaSpace"; })(xB || (xB = {})); class Une { _textureIsInternal(e) { return e.getInternalTexture === void 0; } /** * Constructs a new instance of the class * @param engine The engine to use for the copy * @param isDepthTexture True means that we should write (using gl_FragDepth) into the depth texture attached to the destination (default: false) */ constructor(e, t = !1) { this._engine = e, this._isDepthTexture = t, this._renderer = new vw(e), this._effectWrapper = new t6({ engine: e, name: "CopyTextureToTexture", fragmentShader: "copyTextureToTexture", useShaderStore: !0, uniformNames: ["conversion"], samplerNames: ["textureSampler"], defines: t ? ["#define DEPTH_TEXTURE"] : [] }), this._effectWrapper.onApplyObservable.add(() => { t && (e.setState(!1), e.setDepthBuffer(!0), e.depthCullingState.depthMask = !0, e.depthCullingState.depthFunc = 519), this._textureIsInternal(this._source) ? this._effectWrapper.effect._bindTexture("textureSampler", this._source) : this._effectWrapper.effect.setTexture("textureSampler", this._source), this._effectWrapper.effect.setFloat("conversion", this._conversion); }); } /** * Indicates if the effect is ready to be used for the copy * @returns true if "copy" can be called without delay, else false */ isReady() { return this._effectWrapper.effect.isReady(); } /** * Copy one texture into another * @param source The source texture * @param destination The destination texture * @param conversion The conversion mode that should be applied when copying * @returns */ copy(e, t, i = xB.None) { if (!this.isReady()) return !1; this._source = e, this._conversion = i; const r = this._engine.depthCullingState.depthFunc; return this._renderer.render(this._effectWrapper, t), this._isDepthTexture && r && (this._engine.depthCullingState.depthFunc = r), !0; } /** * Releases all the resources used by the class */ dispose() { this._effectWrapper.dispose(), this._renderer.dispose(); } } class B4e { get depthRTWrapper() { return this._depthRTWrapper; } constructor(e, t, i, r = 1) { this._engine = e, this._copyTextureToTexture = new Une(e, !0), this._depthRTWrapper = this._engine.createRenderTargetTexture({ width: t, height: i }, { generateMipMaps: !1, type: 0, format: 6, samplingMode: 1, generateDepthBuffer: !0, generateStencilBuffer: !1, samples: r, noColorAttachment: !0, label: "FluidRenderingDepthTextureCopyRTT" }); const s = this._depthRTWrapper.createDepthStencilTexture(0, !1, !1, 1, void 0, "FluidRenderingDepthTextureCopyRTTDepthStencil"); s.label = `FluidDepthTextureCopy${t}x${i}x${r}`; } copy(e) { return this._copyTextureToTexture.copy(e, this._depthRTWrapper); } dispose() { this._depthRTWrapper.dispose(), this._copyTextureToTexture.dispose(); } } const U4e = "fluidRenderingParticleDepthVertexShader", V4e = `attribute vec3 position;attribute vec2 offset;uniform mat4 view;uniform mat4 projection;uniform vec2 size;varying vec2 uv;varying vec3 viewPos;varying float sphereRadius; #ifdef FLUIDRENDERING_VELOCITY attribute vec3 velocity;varying float velocityNorm; #endif void main(void) {vec3 cornerPos;cornerPos.xy=vec2(offset.x-0.5,offset.y-0.5)*size;cornerPos.z=0.0;viewPos=(view*vec4(position,1.0)).xyz;gl_Position=projection*vec4(viewPos+cornerPos,1.0);uv=offset;sphereRadius=size.x/2.0; #ifdef FLUIDRENDERING_VELOCITY velocityNorm=length(velocity); #endif } `; je.ShadersStore[U4e] = V4e; const k4e = "fluidRenderingParticleDepthPixelShader", z4e = `uniform mat4 projection;varying vec2 uv;varying vec3 viewPos;varying float sphereRadius; #ifdef FLUIDRENDERING_VELOCITY varying float velocityNorm; #endif void main(void) {vec3 normal;normal.xy=uv*2.0-1.0;float r2=dot(normal.xy,normal.xy);if (r2>1.0) discard;normal.z=sqrt(1.0-r2); #ifndef FLUIDRENDERING_RHS normal.z=-normal.z; #endif vec4 realViewPos=vec4(viewPos+normal*sphereRadius,1.0);vec4 clipSpacePos=projection*realViewPos; #ifdef WEBGPU gl_FragDepth=clipSpacePos.z/clipSpacePos.w; #else gl_FragDepth=(clipSpacePos.z/clipSpacePos.w)*0.5+0.5; #endif #ifdef FLUIDRENDERING_RHS realViewPos.z=-realViewPos.z; #endif #ifdef FLUIDRENDERING_VELOCITY glFragColor=vec4(realViewPos.z,velocityNorm,0.,1.); #else glFragColor=vec4(realViewPos.z,0.,0.,1.); #endif } `; je.ShadersStore[k4e] = z4e; const H4e = "fluidRenderingParticleThicknessVertexShader", G4e = `attribute vec3 position;attribute vec2 offset;uniform mat4 view;uniform mat4 projection;uniform vec2 size;varying vec2 uv;void main(void) {vec3 cornerPos;cornerPos.xy=vec2(offset.x-0.5,offset.y-0.5)*size;cornerPos.z=0.0;vec3 viewPos=(view*vec4(position,1.0)).xyz+cornerPos;gl_Position=projection*vec4(viewPos,1.0);uv=offset;} `; je.ShadersStore[H4e] = G4e; const K4e = "fluidRenderingParticleThicknessPixelShader", W4e = `uniform float particleAlpha;varying vec2 uv;void main(void) {vec3 normal;normal.xy=uv*2.0-1.0;float r2=dot(normal.xy,normal.xy);if (r2>1.0) discard;float thickness=sqrt(1.0-r2);glFragColor=vec4(vec3(particleAlpha*thickness),1.0);} `; je.ShadersStore[K4e] = W4e; const j4e = "fluidRenderingParticleDiffuseVertexShader", X4e = `attribute vec3 position;attribute vec2 offset;attribute vec4 color;uniform mat4 view;uniform mat4 projection;uniform vec2 size;varying vec2 uv;varying vec3 diffuseColor;void main(void) {vec3 cornerPos;cornerPos.xy=vec2(offset.x-0.5,offset.y-0.5)*size;cornerPos.z=0.0;vec3 viewPos=(view*vec4(position,1.0)).xyz+cornerPos;gl_Position=projection*vec4(viewPos,1.0);uv=offset;diffuseColor=color.rgb;} `; je.ShadersStore[j4e] = X4e; const Y4e = "fluidRenderingParticleDiffusePixelShader", Q4e = `uniform float particleAlpha;varying vec2 uv;varying vec3 diffuseColor;void main(void) {vec3 normal;normal.xy=uv*2.0-1.0;float r2=dot(normal.xy,normal.xy);if (r2>1.0) discard;glFragColor=vec4(diffuseColor,1.0);} `; je.ShadersStore[Y4e] = Q4e; const $4e = "fluidRenderingBilateralBlurPixelShader", Z4e = `uniform sampler2D textureSampler;uniform int maxFilterSize;uniform vec2 blurDir;uniform float projectedParticleConstant;uniform float depthThreshold;varying vec2 vUV;void main(void) {float depth=textureLod(textureSampler,vUV,0.).x;if (depth>=1e6 || depth<=0.) {glFragColor=vec4(vec3(depth),1.);return;} int filterSize=min(maxFilterSize,int(ceil(projectedParticleConstant/depth)));float sigma=float(filterSize)/3.0;float two_sigma2=2.0*sigma*sigma;float sigmaDepth=depthThreshold/3.0;float two_sigmaDepth2=2.0*sigmaDepth*sigmaDepth;float sum=0.;float wsum=0.;float sumVel=0.;for (int x=-filterSize; x<=filterSize; ++x) {vec2 coords=vec2(x);vec2 sampleDepthVel=textureLod(textureSampler,vUV+coords*blurDir,0.).rg;float r=dot(coords,coords);float w=exp(-r/two_sigma2);float rDepth=sampleDepthVel.r-depth;float wd=exp(-rDepth*rDepth/two_sigmaDepth2);sum+=sampleDepthVel.r*w*wd;sumVel+=sampleDepthVel.g*w*wd;wsum+=w*wd;} glFragColor=vec4(sum/wsum,sumVel/wsum,0.,1.);} `; je.ShadersStore[$4e] = Z4e; const q4e = "fluidRenderingStandardBlurPixelShader", J4e = `uniform sampler2D textureSampler;uniform int filterSize;uniform vec2 blurDir;varying vec2 vUV;void main(void) {vec4 s=textureLod(textureSampler,vUV,0.);if (s.r==0.) {glFragColor=vec4(0.,0.,0.,1.);return;} float sigma=float(filterSize)/3.0;float twoSigma2=2.0*sigma*sigma;vec4 sum=vec4(0.);float wsum=0.;for (int x=-filterSize; x<=filterSize; ++x) {vec2 coords=vec2(x);vec4 sampl=textureLod(textureSampler,vUV+coords*blurDir,0.);float w=exp(-coords.x*coords.x/twoSigma2);sum+=sampl*w;wsum+=w;} sum/=wsum;glFragColor=vec4(sum.rgb,1.);} `; je.ShadersStore[q4e] = J4e; const eve = "fluidRenderingRenderPixelShader", tve = `/* disable_uniformity_analysis */ #define IOR 1.333 #define ETA 1.0/IOR #define F0 0.02 uniform sampler2D textureSampler;uniform sampler2D depthSampler; #ifdef FLUIDRENDERING_DIFFUSETEXTURE uniform sampler2D diffuseSampler; #else uniform vec3 diffuseColor; #endif #ifdef FLUIDRENDERING_FIXED_THICKNESS uniform float thickness;uniform sampler2D bgDepthSampler; #else uniform float minimumThickness;uniform sampler2D thicknessSampler; #endif #ifdef FLUIDRENDERING_ENVIRONMENT uniform samplerCube reflectionSampler; #endif #if defined(FLUIDRENDERING_DEBUG) && defined(FLUIDRENDERING_DEBUG_TEXTURE) uniform sampler2D debugSampler; #endif uniform mat4 viewMatrix;uniform mat4 projectionMatrix;uniform mat4 invProjectionMatrix;uniform vec2 texelSize;uniform vec3 dirLight;uniform float cameraFar;uniform float density;uniform float refractionStrength;uniform float fresnelClamp;uniform float specularPower;varying vec2 vUV;vec3 computeViewPosFromUVDepth(vec2 texCoord,float depth) {vec4 ndc;ndc.xy=texCoord*2.0-1.0; #ifdef FLUIDRENDERING_RHS ndc.z=-projectionMatrix[2].z+projectionMatrix[3].z/depth; #else ndc.z=projectionMatrix[2].z+projectionMatrix[3].z/depth; #endif ndc.w=1.0;vec4 eyePos=invProjectionMatrix*ndc;eyePos.xyz/=eyePos.w;return eyePos.xyz;} vec3 getViewPosFromTexCoord(vec2 texCoord) {float depth=textureLod(depthSampler,texCoord,0.).x;return computeViewPosFromUVDepth(texCoord,depth);} void main(void) {vec2 texCoord=vUV; #if defined(FLUIDRENDERING_DEBUG) && defined(FLUIDRENDERING_DEBUG_TEXTURE) vec4 color=texture2D(debugSampler,texCoord); #ifdef FLUIDRENDERING_DEBUG_DEPTH glFragColor=vec4(color.rgb/vec3(2.0),1.);if (color.r>0.999 && color.g>0.999) {glFragColor=texture2D(textureSampler,texCoord);} #else glFragColor=vec4(color.rgb,1.);if (color.r<0.001 && color.g<0.001 && color.b<0.001) {glFragColor=texture2D(textureSampler,texCoord);} #endif return; #endif vec2 depthVel=textureLod(depthSampler,texCoord,0.).rg;float depth=depthVel.r; #ifndef FLUIDRENDERING_FIXED_THICKNESS float thickness=texture2D(thicknessSampler,texCoord).x; #else float bgDepth=texture2D(bgDepthSampler,texCoord).x;float depthNonLinear=projectionMatrix[2].z+projectionMatrix[3].z/depth;depthNonLinear=depthNonLinear*0.5+0.5; #endif vec4 backColor=texture2D(textureSampler,texCoord); #ifndef FLUIDRENDERING_FIXED_THICKNESS if (depth>=cameraFar || depth<=0. || thickness<=minimumThickness) { #else if (depth>=cameraFar || depth<=0. || bgDepth<=depthNonLinear) { #endif glFragColor=backColor;return;} vec3 viewPos=computeViewPosFromUVDepth(texCoord,depth);vec3 ddx=getViewPosFromTexCoord(texCoord+vec2(texelSize.x,0.))-viewPos;vec3 ddy=getViewPosFromTexCoord(texCoord+vec2(0.,texelSize.y))-viewPos;vec3 ddx2=viewPos-getViewPosFromTexCoord(texCoord+vec2(-texelSize.x,0.));if (abs(ddx.z)>abs(ddx2.z)) {ddx=ddx2;} vec3 ddy2=viewPos-getViewPosFromTexCoord(texCoord+vec2(0.,-texelSize.y));if (abs(ddy.z)>abs(ddy2.z)) {ddy=ddy2;} vec3 normal=normalize(cross(ddy,ddx)); #ifdef FLUIDRENDERING_RHS normal=-normal; #endif #ifndef WEBGPU if(isnan(normal.x) || isnan(normal.y) || isnan(normal.z) || isinf(normal.x) || isinf(normal.y) || isinf(normal.z)) {normal=vec3(0.,0.,-1.);} #endif #if defined(FLUIDRENDERING_DEBUG) && defined(FLUIDRENDERING_DEBUG_SHOWNORMAL) glFragColor=vec4(normal*0.5+0.5,1.0);return; #endif vec3 rayDir=normalize(viewPos); #ifdef FLUIDRENDERING_DIFFUSETEXTURE vec3 diffuseColor=textureLod(diffuseSampler,texCoord,0.0).rgb; #endif vec3 lightDir=normalize(vec3(viewMatrix*vec4(-dirLight,0.)));vec3 H =normalize(lightDir-rayDir);float specular=pow(max(0.0,dot(H,normal)),specularPower); #ifdef FLUIDRENDERING_DEBUG_DIFFUSERENDERING float diffuse =max(0.0,dot(lightDir,normal))*1.0;glFragColor=vec4(vec3(0.1) /*ambient*/+vec3(0.42,0.50,1.00)*diffuse+vec3(0,0,0.2)+specular,1.);return; #endif vec3 refractionDir=refract(rayDir,normal,ETA);vec4 transmitted=textureLod(textureSampler,vec2(texCoord+refractionDir.xy*thickness*refractionStrength),0.0);vec3 transmittance=exp(-density*thickness*(1.0-diffuseColor)); vec3 refractionColor=transmitted.rgb*transmittance; #ifdef FLUIDRENDERING_ENVIRONMENT vec3 reflectionDir=reflect(rayDir,normal);vec3 reflectionColor=(textureCube(reflectionSampler,reflectionDir).rgb);float fresnel=clamp(F0+(1.0-F0)*pow(1.0-dot(normal,-rayDir),5.0),0.,fresnelClamp);vec3 finalColor=mix(refractionColor,reflectionColor,fresnel)+specular; #else vec3 finalColor=refractionColor+specular; #endif #ifdef FLUIDRENDERING_VELOCITY float velocity=depthVel.g;finalColor=mix(finalColor,vec3(1.0),smoothstep(0.3,1.0,velocity/6.0)); #endif glFragColor=vec4(finalColor,transmitted.a);} `; je.ShadersStore[eve] = tve; Object.defineProperty(ii.prototype, "fluidRenderer", { get: function() { return this._fluidRenderer; }, set: function(c) { this._fluidRenderer = c; }, enumerable: !0, configurable: !0 }); ii.prototype.enableFluidRenderer = function() { return this._fluidRenderer ? this._fluidRenderer : (this._fluidRenderer = new $U(this), this._fluidRenderer); }; ii.prototype.disableFluidRenderer = function() { var c; (c = this._fluidRenderer) === null || c === void 0 || c.dispose(), this._fluidRenderer = null; }; function ive(c) { return !!c.particleSystem; } class Vne { /** * Creates a new instance of the component for the given scene * @param scene Defines the scene to register the component in */ constructor(e) { this.name = Bt.NAME_FLUIDRENDERER, this.scene = e; } /** * Registers the component in a given scene */ register() { this.scene._gatherActiveCameraRenderTargetsStage.registerStep(Bt.STEP_GATHERACTIVECAMERARENDERTARGETS_FLUIDRENDERER, this, this._gatherActiveCameraRenderTargets), this.scene._afterCameraDrawStage.registerStep(Bt.STEP_AFTERCAMERADRAW_FLUIDRENDERER, this, this._afterCameraDraw); } _gatherActiveCameraRenderTargets(e) { var t; (t = this.scene.fluidRenderer) === null || t === void 0 || t._prepareRendering(); } _afterCameraDraw(e) { var t; (t = this.scene.fluidRenderer) === null || t === void 0 || t._render(e); } /** * Rebuilds the elements related to this component in case of * context lost for instance. */ rebuild() { this.scene._fluidRenderer && (this.scene.disableFluidRenderer(), this.scene.enableFluidRenderer()); } /** * Disposes the component and the associated resources */ dispose() { this.scene.disableFluidRenderer(); } } class $U { /** @internal */ static _SceneComponentInitialization(e) { let t = e._getComponent(Bt.NAME_FLUIDRENDERER); t || (t = new Vne(e), e._addComponent(t)); } /** * Initializes the class * @param scene Scene in which the objects are part of */ constructor(e) { this._scene = e, this._engine = e.getEngine(), this._onEngineResizeObserver = null, this.renderObjects = [], this.targetRenderers = [], this._cameras = /* @__PURE__ */ new Map(), $U._SceneComponentInitialization(this._scene), this._onEngineResizeObserver = this._engine.onResizeObservable.add(() => { this._initialize(); }); } /** * Reinitializes the class * Can be used if you change the object priority (FluidRenderingObject.priority), to make sure the objects are rendered in the right order */ recreate() { this._sortRenderingObjects(), this._initialize(); } /** * Gets the render object corresponding to a particle system (null if the particle system is not rendered as a fluid) * @param ps The particle system * @returns the render object corresponding to this particle system if any, otherwise null */ getRenderObjectFromParticleSystem(e) { const t = this._getParticleSystemIndex(e); return t !== -1 ? this.renderObjects[t] : null; } /** * Adds a particle system to the fluid renderer. * @param ps particle system * @param generateDiffuseTexture True if you want to generate a diffuse texture from the particle system and use it as part of the fluid rendering (default: false) * @param targetRenderer The target renderer used to display the particle system as a fluid. If not provided, the method will create a new one * @param camera The camera used by the target renderer (if the target renderer is created by the method) * @returns the render object corresponding to the particle system */ addParticleSystem(e, t, i, r) { const s = new Fne(this._scene, e); s.onParticleSizeChanged.add(() => this._setParticleSizeForRenderTargets()), i || (i = new WH(this._scene, r), this.targetRenderers.push(i)), i._onUseVelocityChanged.hasObservers() || i._onUseVelocityChanged.add(() => this._setUseVelocityForRenderObject()), t !== void 0 && (i.generateDiffuseTexture = t); const n = { object: s, targetRenderer: i }; return this.renderObjects.push(n), this._sortRenderingObjects(), this._setParticleSizeForRenderTargets(), n; } /** * Adds a custom particle set to the fluid renderer. * @param buffers The list of buffers (should contain at least a "position" buffer!) * @param numParticles Number of particles in each buffer * @param generateDiffuseTexture True if you want to generate a diffuse texture from buffers and use it as part of the fluid rendering (default: false). For the texture to be generated correctly, you need a "color" buffer in the set! * @param targetRenderer The target renderer used to display the particle system as a fluid. If not provided, the method will create a new one * @param camera The camera used by the target renderer (if the target renderer is created by the method) * @returns the render object corresponding to the custom particle set */ addCustomParticles(e, t, i, r, s) { const n = new Bne(this._scene, e, t); n.onParticleSizeChanged.add(() => this._setParticleSizeForRenderTargets()), r || (r = new WH(this._scene, s), this.targetRenderers.push(r)), r._onUseVelocityChanged.hasObservers() || r._onUseVelocityChanged.add(() => this._setUseVelocityForRenderObject()), i !== void 0 && (r.generateDiffuseTexture = i); const a = { object: n, targetRenderer: r }; return this.renderObjects.push(a), this._sortRenderingObjects(), this._setParticleSizeForRenderTargets(), a; } /** * Removes a render object from the fluid renderer * @param renderObject the render object to remove * @param removeUnusedTargetRenderer True to remove/dispose of the target renderer if it's not used anymore (default: true) * @returns True if the render object has been found and released, else false */ removeRenderObject(e, t = !0) { const i = this.renderObjects.indexOf(e); return i === -1 ? !1 : (e.object.dispose(), this.renderObjects.splice(i, 1), t && this._removeUnusedTargetRenderers() ? this._initialize() : this._setParticleSizeForRenderTargets(), !0); } _sortRenderingObjects() { this.renderObjects.sort((e, t) => e.object.priority < t.object.priority ? -1 : e.object.priority > t.object.priority ? 1 : 0); } _removeUnusedTargetRenderers() { const e = {}; for (let r = 0; r < this.renderObjects.length; ++r) { const s = this.renderObjects[r].targetRenderer; e[this.targetRenderers.indexOf(s)] = !0; } let t = !1; const i = []; for (let r = 0; r < this.targetRenderers.length; ++r) e[r] ? i.push(this.targetRenderers[r]) : (this.targetRenderers[r].dispose(), t = !0); return t && (this.targetRenderers.length = 0, this.targetRenderers.push(...i)), t; } _getParticleSystemIndex(e) { for (let t = 0; t < this.renderObjects.length; ++t) { const i = this.renderObjects[t].object; if (ive(i) && i.particleSystem === e) return t; } return -1; } _initialize() { for (let i = 0; i < this.targetRenderers.length; ++i) this.targetRenderers[i].dispose(); const e = /* @__PURE__ */ new Map(); for (let i = 0; i < this.targetRenderers.length; ++i) { const r = this.targetRenderers[i]; if (r._initialize(), r.camera && r._renderPostProcess) { let s = e.get(r.camera); s || (s = [[], {}], e.set(r.camera, s)), s[0].push(r), r.camera.attachPostProcess(r._renderPostProcess, i); } } let t = e.keys(); for (let i = t.next(); i.done !== !0; i = t.next()) { const r = i.value, s = e.get(r), n = r._getFirstPostProcess(); if (!n) continue; const [a, l] = s; n.onSizeChangedObservable.add(() => { var o; n.inputTexture.depthStencilTexture || n.inputTexture.createDepthStencilTexture(0, !0, this._engine.isStencilEnable, a[0].samples, this._engine.isStencilEnable ? 13 : 14, `PostProcessRTTDepthStencil-${n.name}`); for (const u of a) { const h = (o = u._thicknessRenderTarget) === null || o === void 0 ? void 0 : o.renderTarget, d = h == null ? void 0 : h.texture; if (h && d) { const f = d.width + "_" + d.height; let p = l[f]; p || (p = l[f] = new B4e(this._engine, d.width, d.height)), p.depthRTWrapper._shareDepth(h); } } }); } t = this._cameras.keys(); for (let i = t.next(); i.done !== !0; i = t.next()) { const r = i.value, n = this._cameras.get(r)[1], a = e.get(r); if (a) for (const l in n) a[1][l] || n[l].dispose(); else for (const l in n) n[l].dispose(); } this._cameras.clear(), this._cameras = e, this._setParticleSizeForRenderTargets(); } _setParticleSizeForRenderTargets() { const e = /* @__PURE__ */ new Map(); for (let t = 0; t < this.renderObjects.length; ++t) { const i = this.renderObjects[t]; let r = e.get(i.targetRenderer); r === void 0 && (r = 0), e.set(i.targetRenderer, Math.max(r, i.object.particleSize)); } e.forEach((t, i) => { i._depthRenderTarget && (i._depthRenderTarget.particleSize = t); }); } _setUseVelocityForRenderObject() { for (const e of this.renderObjects) e.object.useVelocity = e.targetRenderer.useVelocity; } /** @internal */ _prepareRendering() { for (const e of this.targetRenderers) if (e.needInitialization) { this._initialize(); return; } } /** @internal */ _render(e) { var t; for (let r = 0; r < this.targetRenderers.length; ++r) (!e || this.targetRenderers[r].camera === e) && this.targetRenderers[r]._clearTargets(); const i = this._cameras.keys(); for (let r = i.next(); r.done !== !0; r = i.next()) { const s = r.value, n = this._cameras.get(s); if (e && s !== e) continue; const a = s._getFirstPostProcess(); if (!a) continue; const l = (t = a.inputTexture) === null || t === void 0 ? void 0 : t.depthStencilTexture; if (l) { const [o, u] = n; for (const h of o) h._bgDepthTexture = l; for (const h in u) u[h].copy(l); } } for (let r = 0; r < this.renderObjects.length; ++r) { const s = this.renderObjects[r]; (!e || s.targetRenderer.camera === e) && s.targetRenderer._render(s.object); } } /** * Disposes of all the ressources used by the class */ dispose() { this._engine.onResizeObservable.remove(this._onEngineResizeObserver), this._onEngineResizeObserver = null; for (let e = 0; e < this.renderObjects.length; ++e) this.renderObjects[e].object.dispose(); for (let e = 0; e < this.targetRenderers.length; ++e) this.targetRenderers[e].dispose(); this._cameras.forEach((e) => { const t = e[1]; for (const i in t) t[i].dispose(); }), this.renderObjects = [], this.targetRenderers = [], this._cameras.clear(); } } class JC { /** * Return the number of splattings used */ get vertexCount() { return this._vertexCount; } /** * Shader material with alpha blending * @param scene parent scene */ _createMaterial(e) { Cr.ShadersStore.gaussianSplattingVertexShader = JC._VertexShaderSource, Cr.ShadersStore.gaussianSplattingFragmentShader = JC._FragmentShaderSource; const t = new Lo("GaussianSplattingShader", e, { vertex: "gaussianSplatting", fragment: "gaussianSplatting" }, { attributes: ["position"], uniforms: ["projection", "modelView", "viewport"] }); t.backFaceCulling = !1, t.alpha = 0.9999, this._material = t; } /** * * @param scene parent scene * @returns A simple 2 triangles quad */ _getMesh(e) { const t = new ke(this.name, e), i = new Ot(); i.positions = [-2, -2, 0, 2, -2, 0, 2, 2, 0, -2, 2, 0], i.indices = [0, 1, 2, 0, 2, 3], i.applyToMesh(t); const r = t.getBoundingInfo(); return r.reConstruct(this._minimum, this._maximum), r.isLocked = !0, t.doNotSyncBoundingInfo = !0, t.material = this._material, t; } _setData(e) { this._vertexCount = e.length / 32; const i = this._vertexCount; this._positions = new Float32Array(3 * i), this._covA = new Float32Array(3 * i), this._covB = new Float32Array(3 * i); const r = new Float32Array(e.buffer); this._uBuffer = new Uint8Array(e.buffer); const s = Ae.Zero(), n = Ae.Zero(), a = Ze.Identity(); this._minimum.set(Number.MAX_VALUE, Number.MAX_VALUE, Number.MAX_VALUE), this._maximum.set(-Number.MAX_VALUE, -Number.MAX_VALUE, -Number.MAX_VALUE); for (let l = 0; l < i; l++) { const o = r[8 * l + 0], u = -r[8 * l + 1], h = r[8 * l + 2]; this._positions[3 * l + 0] = o, this._positions[3 * l + 1] = u, this._positions[3 * l + 2] = h, this._minimum.minimizeInPlaceFromFloats(o, u, h), this._maximum.maximizeInPlaceFromFloats(o, u, h), a.set((this._uBuffer[32 * l + 28 + 1] - 128) / 128, (this._uBuffer[32 * l + 28 + 2] - 128) / 128, (this._uBuffer[32 * l + 28 + 3] - 128) / 128, -(this._uBuffer[32 * l + 28 + 0] - 128) / 128), a.toRotationMatrix(s), Ae.ScalingToRef(r[8 * l + 3 + 0] * 2, r[8 * l + 3 + 1] * 2, r[8 * l + 3 + 2] * 2, n); const d = s.multiplyToRef(n, de.Matrix[0]).m; this._covA[l * 3 + 0] = d[0] * d[0] + d[1] * d[1] + d[2] * d[2], this._covA[l * 3 + 1] = d[0] * d[4] + d[1] * d[5] + d[2] * d[6], this._covA[l * 3 + 2] = d[0] * d[8] + d[1] * d[9] + d[2] * d[10], this._covB[l * 3 + 0] = d[4] * d[4] + d[5] * d[5] + d[6] * d[6], this._covB[l * 3 + 1] = d[4] * d[8] + d[5] * d[9] + d[6] * d[10], this._covB[l * 3 + 2] = d[8] * d[8] + d[9] * d[9] + d[10] * d[10]; } } /** * Construct a Gaussian Splatting proxy object * @param name name of the mesh used for rendering * @param scene scene it belongs to */ constructor(e, t) { var i; this._vertexCount = 0, this._modelViewMatrix = Ae.Identity(), this._minimum = new D(), this._maximum = new D(), this.name = "GaussianSplatting", this._worker = null, this.scene = t, this.name = e, this._createMaterial(t), (i = this._worker) === null || i === void 0 || i.terminate(), this._worker = null; } _loadData(e) { this.mesh && this.dispose(), this._setData(new Uint8Array(e)); const t = new Float32Array(this.vertexCount * 16), i = (s) => { var n; for (let a = 0; a < this.vertexCount; a++) { const l = s[2 * a], o = a * 16; t[o + 0] = this._positions[l * 3 + 0], t[o + 1] = this._positions[l * 3 + 1], t[o + 2] = this._positions[l * 3 + 2], t[o + 4] = this._uBuffer[32 * l + 24 + 0] / 255, t[o + 5] = this._uBuffer[32 * l + 24 + 1] / 255, t[o + 6] = this._uBuffer[32 * l + 24 + 2] / 255, t[o + 7] = this._uBuffer[32 * l + 24 + 3] / 255, t[o + 8] = this._covA[l * 3 + 0], t[o + 9] = this._covA[l * 3 + 1], t[o + 10] = this._covA[l * 3 + 2], t[o + 12] = this._covB[l * 3 + 0], t[o + 13] = this._covB[l * 3 + 1], t[o + 14] = this._covB[l * 3 + 2]; } (n = this.mesh) === null || n === void 0 || n.thinInstanceBufferUpdated("matrix"); }; this.mesh = this._getMesh(this.scene), this.mesh.thinInstanceSetBuffer("matrix", t, 16, !1), this._worker = new Worker(URL.createObjectURL(new Blob(["(", JC._CreateWorker.toString(), ")(self)"], { type: "application/javascript" }))), this._worker.onmessage = (s) => { const n = new Uint32Array(s.data.depthMix.buffer); i(n); }; const r = new at(); this._sceneBeforeRenderObserver = this.scene.onBeforeRenderObservable.add(() => { var s; const n = this.scene.getEngine(); r.set(n.getRenderWidth(), n.getRenderHeight()), this._material.setVector2("viewport", r); const a = this.mesh.getWorldMatrix(); a.multiplyToRef(this.scene.activeCamera.getViewMatrix(), this._modelViewMatrix); const l = this.mesh.getBoundingInfo(); l.reConstruct(this._minimum, this._maximum, a), l.isLocked = !0, this._material.setMatrix("modelView", this._modelViewMatrix), (s = this._worker) === null || s === void 0 || s.postMessage({ view: this._modelViewMatrix.m, positions: this._positions }); }), this._sceneDisposeObserver = this.scene.onDisposeObservable.add(() => { this.dispose(); }); } /** * Loads a .splat Gaussian Splatting array buffer asynchronously * @param data arraybuffer containing splat file * @returns a promise that resolves when the operation is complete */ loadDataAsync(e) { return Promise.resolve(this._loadData(e)); } /** * Loads a .splat Gaussian Splatting file asynchronously * @param url path to the splat file to load * @returns a promise that resolves when the operation is complete */ loadFileAsync(e) { return Ve.LoadFileAsync(e, !0).then((t) => { this._loadData(t); }); } /** * Clear datas used for Gaussian Splatting and associated resources */ dispose() { var e, t; this.scene.onDisposeObservable.remove(this._sceneDisposeObserver), this.scene.onBeforeRenderObservable.remove(this._sceneBeforeRenderObserver), (e = this._worker) === null || e === void 0 || e.terminate(), this._worker = null, (t = this.mesh) === null || t === void 0 || t.dispose(), this.mesh = null; } } JC._VertexShaderSource = ` precision mediump float; attribute vec2 position; attribute vec4 world0; attribute vec4 world1; attribute vec4 world2; attribute vec4 world3; uniform mat4 projection, modelView; uniform vec2 viewport; varying vec4 vColor; varying vec2 vPosition; void main () { vec3 center = world0.xyz; vec4 color = world1; vec3 covA = world2.xyz; vec3 covB = world3.xyz; vec4 camspace = modelView * vec4(center, 1); vec4 pos2d = projection * camspace; float bounds = 1.2 * pos2d.w; if (pos2d.z < -pos2d.w || pos2d.x < -bounds || pos2d.x > bounds || pos2d.y < -bounds || pos2d.y > bounds) { gl_Position = vec4(0.0, 0.0, 2.0, 1.0); return; } mat3 Vrk = mat3( covA.x, covA.y, covA.z, covA.y, covB.x, covB.y, covA.z, covB.y, covB.z ); vec2 focal = vec2(1132., 1132.); mat3 J = mat3( focal.x / camspace.z, 0., -(focal.x * camspace.x) / (camspace.z * camspace.z), 0., focal.y / camspace.z, -(focal.y * camspace.y) / (camspace.z * camspace.z), 0., 0., 0. ); mat3 invy = mat3(1,0,0, 0,-1,0,0,0,1); mat3 T = invy * transpose(mat3(modelView)) * J; mat3 cov2d = transpose(T) * Vrk * T; float mid = (cov2d[0][0] + cov2d[1][1]) / 2.0; float radius = length(vec2((cov2d[0][0] - cov2d[1][1]) / 2.0, cov2d[0][1])); float lambda1 = mid + radius, lambda2 = mid - radius; if(lambda2 < 0.0) return; vec2 diagonalVector = normalize(vec2(cov2d[0][1], lambda1 - cov2d[0][0])); vec2 majorAxis = min(sqrt(2.0 * lambda1), 1024.0) * diagonalVector; vec2 minorAxis = min(sqrt(2.0 * lambda2), 1024.0) * vec2(diagonalVector.y, -diagonalVector.x); vColor = color; vPosition = position; vec2 vCenter = vec2(pos2d); gl_Position = vec4( vCenter + (position.x * majorAxis * 1. / viewport + position.y * minorAxis * 1. / viewport) * pos2d.w, pos2d.zw); }`; JC._FragmentShaderSource = ` precision highp float; varying vec4 vColor; varying vec2 vPosition; void main () { float A = -dot(vPosition, vPosition); if (A < -4.0) discard; float B = exp(A) * vColor.a; gl_FragColor = vec4(vColor.rgb, B); }`; JC._CreateWorker = function(c) { let e, t = [], i = 0, r; const s = (l) => { i = r.length; const o = new BigInt64Array(i), u = new Uint32Array(o.buffer); for (let d = 0; d < i; d++) u[2 * d] = d; const h = new Float32Array(o.buffer); for (let d = 0; d < i; d++) h[2 * d + 1] = 1e4 - (l[2] * r[3 * d + 0] + l[6] * r[3 * d + 1] + l[10] * r[3 * d + 2]); t = l, o.sort(), c.postMessage({ depthMix: o }, [o.buffer]); }; let n = !1; const a = () => { if (!n) { n = !0; const l = e; s(l), setTimeout(() => { n = !1, l !== e && a(); }, 0); } }; c.onmessage = (l) => { e = l.data.view; const o = t[2] * e[2] + t[6] * e[6] + t[10] * e[10]; Math.abs(o - 1) < 0.01 || (r = l.data.positions, a()); }; }; class rve { /** * Returns a boolean indicating if the animation is started */ get animationStarted() { return this._animationStarted; } /** Gets the initial key for the animation (setting it will restart the animation) */ get fromIndex() { return this._fromIndex; } /** Gets or sets the end key for the animation (setting it will restart the animation) */ get toIndex() { return this._toIndex; } /** Gets or sets a boolean indicating if the animation is looping (setting it will restart the animation) */ get loopAnimation() { return this._loopAnimation; } /** Gets or sets the delay between cell changes (setting it will restart the animation) */ get delay() { return Math.max(this._delay, 1); } /** * Creates a new Thin Sprite */ constructor() { this.width = 1, this.height = 1, this.angle = 0, this.invertU = !1, this.invertV = !1, this.isVisible = !0, this._animationStarted = !1, this._loopAnimation = !1, this._fromIndex = 0, this._toIndex = 0, this._delay = 0, this._direction = 1, this._time = 0, this._onBaseAnimationEnd = null, this.position = { x: 1, y: 1, z: 1 }, this.color = { r: 1, g: 1, b: 1, a: 1 }; } /** * Starts an animation * @param from defines the initial key * @param to defines the end key * @param loop defines if the animation must loop * @param delay defines the start delay (in ms) * @param onAnimationEnd defines a callback for when the animation ends */ playAnimation(e, t, i, r, s) { this._fromIndex = e, this._toIndex = t, this._loopAnimation = i, this._delay = r || 1, this._animationStarted = !0, this._onBaseAnimationEnd = s, e < t ? this._direction = 1 : (this._direction = -1, this._toIndex = e, this._fromIndex = t), this.cellIndex = e, this._time = 0; } /** Stops current animation (if any) */ stopAnimation() { this._animationStarted = !1; } /** * @internal */ _animate(e) { this._animationStarted && (this._time += e, this._time > this._delay && (this._time = this._time % this._delay, this.cellIndex += this._direction, (this._direction > 0 && this.cellIndex > this._toIndex || this._direction < 0 && this.cellIndex < this._fromIndex) && (this._loopAnimation ? this.cellIndex = this._direction > 0 ? this._fromIndex : this._toIndex : (this.cellIndex = this._toIndex, this._animationStarted = !1, this._onBaseAnimationEnd && this._onBaseAnimationEnd())))); } } class ZU extends rve { /** * Gets or sets the sprite size */ get size() { return this.width; } set size(e) { this.width = e, this.height = e; } /** * Gets the manager of this sprite */ get manager() { return this._manager; } /** * Creates a new Sprite * @param name defines the name * @param manager defines the manager */ constructor(e, t) { super(), this.name = e, this.animations = new Array(), this.isPickable = !1, this.useAlphaForPicking = !1, this.onDisposeObservable = new Fe(), this._onAnimationEnd = null, this._endAnimation = () => { this._onAnimationEnd && this._onAnimationEnd(), this.disposeWhenFinishedAnimating && this.dispose(); }, this.color = new Et(1, 1, 1, 1), this.position = D.Zero(), this._manager = t, this._manager.sprites.push(this), this.uniqueId = this._manager.scene.getUniqueId(); } /** * Returns the string "Sprite" * @returns "Sprite" */ getClassName() { return "Sprite"; } /** Gets or sets the initial key for the animation (setting it will restart the animation) */ get fromIndex() { return this._fromIndex; } set fromIndex(e) { this.playAnimation(e, this._toIndex, this._loopAnimation, this._delay, this._onAnimationEnd); } /** Gets or sets the end key for the animation (setting it will restart the animation) */ get toIndex() { return this._toIndex; } set toIndex(e) { this.playAnimation(this._fromIndex, e, this._loopAnimation, this._delay, this._onAnimationEnd); } /** Gets or sets a boolean indicating if the animation is looping (setting it will restart the animation) */ get loopAnimation() { return this._loopAnimation; } set loopAnimation(e) { this.playAnimation(this._fromIndex, this._toIndex, e, this._delay, this._onAnimationEnd); } /** Gets or sets the delay between cell changes (setting it will restart the animation) */ get delay() { return Math.max(this._delay, 1); } set delay(e) { this.playAnimation(this._fromIndex, this._toIndex, this._loopAnimation, e, this._onAnimationEnd); } /** * Starts an animation * @param from defines the initial key * @param to defines the end key * @param loop defines if the animation must loop * @param delay defines the start delay (in ms) * @param onAnimationEnd defines a callback to call when animation ends */ playAnimation(e, t, i, r, s = null) { this._onAnimationEnd = s, super.playAnimation(e, t, i, r, this._endAnimation); } /** Release associated resources */ dispose() { for (let e = 0; e < this._manager.sprites.length; e++) this._manager.sprites[e] == this && this._manager.sprites.splice(e, 1); this.onDisposeObservable.notifyObservers(this), this.onDisposeObservable.clear(); } /** * Serializes the sprite to a JSON object * @returns the JSON object */ serialize() { const e = {}; return e.name = this.name, e.position = this.position.asArray(), e.color = this.color.asArray(), e.width = this.width, e.height = this.height, e.angle = this.angle, e.cellIndex = this.cellIndex, e.cellRef = this.cellRef, e.invertU = this.invertU, e.invertV = this.invertV, e.disposeWhenFinishedAnimating = this.disposeWhenFinishedAnimating, e.isPickable = this.isPickable, e.isVisible = this.isVisible, e.useAlphaForPicking = this.useAlphaForPicking, e.animationStarted = this.animationStarted, e.fromIndex = this.fromIndex, e.toIndex = this.toIndex, e.loopAnimation = this.loopAnimation, e.delay = this.delay, e; } /** * Parses a JSON object to create a new sprite * @param parsedSprite The JSON object to parse * @param manager defines the hosting manager * @returns the new sprite */ static Parse(e, t) { const i = new ZU(e.name, t); return i.position = D.FromArray(e.position), i.color = Et.FromArray(e.color), i.width = e.width, i.height = e.height, i.angle = e.angle, i.cellIndex = e.cellIndex, i.cellRef = e.cellRef, i.invertU = e.invertU, i.invertV = e.invertV, i.disposeWhenFinishedAnimating = e.disposeWhenFinishedAnimating, i.isPickable = e.isPickable, i.isVisible = e.isVisible, i.useAlphaForPicking = e.useAlphaForPicking, i._fromIndex = e.fromIndex, i._toIndex = e.toIndex, i._loopAnimation = e.loopAnimation, i._delay = e.delay, e.animationStarted && i.playAnimation(i.fromIndex, i.toIndex, i.loopAnimation, i.delay), i; } } ii.prototype._internalPickSprites = function(c, e, t, i) { if (!ku) return null; let r = null; if (!i) { if (!this.activeCamera) return null; i = this.activeCamera; } if (this.spriteManagers && this.spriteManagers.length > 0) for (let s = 0; s < this.spriteManagers.length; s++) { const n = this.spriteManagers[s]; if (!n.isPickable) continue; const a = n.intersects(c, i, e, t); if (!(!a || !a.hit) && !(!t && r != null && a.distance >= r.distance) && (r = a, t)) break; } return r || new ku(); }; ii.prototype._internalMultiPickSprites = function(c, e, t) { if (!ku) return null; let i = []; if (!t) { if (!this.activeCamera) return null; t = this.activeCamera; } if (this.spriteManagers && this.spriteManagers.length > 0) for (let r = 0; r < this.spriteManagers.length; r++) { const s = this.spriteManagers[r]; if (!s.isPickable) continue; const n = s.multiIntersects(c, t, e); n !== null && (i = i.concat(n)); } return i; }; ii.prototype.pickSprite = function(c, e, t, i, r) { if (!this._tempSpritePickingRay) return null; this.createPickingRayInCameraSpaceToRef(c, e, this._tempSpritePickingRay, r); const s = this._internalPickSprites(this._tempSpritePickingRay, t, i, r); return s && (s.ray = this.createPickingRayInCameraSpace(c, e, r)), s; }; ii.prototype.pickSpriteWithRay = function(c, e, t, i) { if (!this._tempSpritePickingRay) return null; if (!i) { if (!this.activeCamera) return null; i = this.activeCamera; } gs.TransformToRef(c, i.getViewMatrix(), this._tempSpritePickingRay); const r = this._internalPickSprites(this._tempSpritePickingRay, e, t, i); return r && (r.ray = c), r; }; ii.prototype.multiPickSprite = function(c, e, t, i) { return this.createPickingRayInCameraSpaceToRef(c, e, this._tempSpritePickingRay, i), this._internalMultiPickSprites(this._tempSpritePickingRay, t, i); }; ii.prototype.multiPickSpriteWithRay = function(c, e, t) { if (!this._tempSpritePickingRay) return null; if (!t) { if (!this.activeCamera) return null; t = this.activeCamera; } return gs.TransformToRef(c, t.getViewMatrix(), this._tempSpritePickingRay), this._internalMultiPickSprites(this._tempSpritePickingRay, e, t); }; ii.prototype.setPointerOverSprite = function(c) { this._pointerOverSprite !== c && (this._pointerOverSprite && this._pointerOverSprite.actionManager && this._pointerOverSprite.actionManager.processTrigger(10, Ro.CreateNewFromSprite(this._pointerOverSprite, this)), this._pointerOverSprite = c, this._pointerOverSprite && this._pointerOverSprite.actionManager && this._pointerOverSprite.actionManager.processTrigger(9, Ro.CreateNewFromSprite(this._pointerOverSprite, this))); }; ii.prototype.getPointerOverSprite = function() { return this._pointerOverSprite; }; class kne { /** * Creates a new instance of the component for the given scene * @param scene Defines the scene to register the component in */ constructor(e) { this.name = Bt.NAME_SPRITE, this.scene = e, this.scene.spriteManagers = [], this.scene._tempSpritePickingRay = gs ? gs.Zero() : null, this.scene.onBeforeSpritesRenderingObservable = new Fe(), this.scene.onAfterSpritesRenderingObservable = new Fe(), this._spritePredicate = (t) => t.actionManager ? t.isPickable && t.actionManager.hasPointerTriggers : !1; } /** * Registers the component in a given scene */ register() { this.scene._pointerMoveStage.registerStep(Bt.STEP_POINTERMOVE_SPRITE, this, this._pointerMove), this.scene._pointerDownStage.registerStep(Bt.STEP_POINTERDOWN_SPRITE, this, this._pointerDown), this.scene._pointerUpStage.registerStep(Bt.STEP_POINTERUP_SPRITE, this, this._pointerUp); } /** * Rebuilds the elements related to this component in case of * context lost for instance. */ rebuild() { } /** * Disposes the component and the associated resources. */ dispose() { this.scene.onBeforeSpritesRenderingObservable.clear(), this.scene.onAfterSpritesRenderingObservable.clear(); const e = this.scene.spriteManagers; if (e) for (; e.length; ) e[0].dispose(); } _pickSpriteButKeepRay(e, t, i, r, s) { const n = this.scene.pickSprite(t, i, this._spritePredicate, r, s); return n && (n.ray = e ? e.ray : null), n; } _pointerMove(e, t, i, r, s) { const n = this.scene; return r ? n.setPointerOverSprite(null) : (i = this._pickSpriteButKeepRay(i, e, t, !1, n.cameraToUseForPointers || void 0), i && i.hit && i.pickedSprite ? (n.setPointerOverSprite(i.pickedSprite), !n.doNotHandleCursors && s && (n._pointerOverSprite && n._pointerOverSprite.actionManager && n._pointerOverSprite.actionManager.hoverCursor ? s.style.cursor = n._pointerOverSprite.actionManager.hoverCursor : s.style.cursor = n.hoverCursor)) : n.setPointerOverSprite(null)), i; } _pointerDown(e, t, i, r) { const s = this.scene; if (s._pickedDownSprite = null, s.spriteManagers && s.spriteManagers.length > 0 && (i = s.pickSprite(e, t, this._spritePredicate, !1, s.cameraToUseForPointers || void 0), i && i.hit && i.pickedSprite && i.pickedSprite.actionManager)) { switch (s._pickedDownSprite = i.pickedSprite, r.button) { case 0: i.pickedSprite.actionManager.processTrigger(2, Ro.CreateNewFromSprite(i.pickedSprite, s, r)); break; case 1: i.pickedSprite.actionManager.processTrigger(4, Ro.CreateNewFromSprite(i.pickedSprite, s, r)); break; case 2: i.pickedSprite.actionManager.processTrigger(3, Ro.CreateNewFromSprite(i.pickedSprite, s, r)); break; } i.pickedSprite.actionManager && i.pickedSprite.actionManager.processTrigger(5, Ro.CreateNewFromSprite(i.pickedSprite, s, r)); } return i; } _pointerUp(e, t, i, r, s) { const n = this.scene; if (n.spriteManagers && n.spriteManagers.length > 0) { const a = n.pickSprite(e, t, this._spritePredicate, !1, n.cameraToUseForPointers || void 0); a && (a.hit && a.pickedSprite && a.pickedSprite.actionManager && (a.pickedSprite.actionManager.processTrigger(7, Ro.CreateNewFromSprite(a.pickedSprite, n, r)), a.pickedSprite.actionManager && (this.scene._inputManager._isPointerSwiping() || a.pickedSprite.actionManager.processTrigger(1, Ro.CreateNewFromSprite(a.pickedSprite, n, r)), s && a.pickedSprite.actionManager.processTrigger(6, Ro.CreateNewFromSprite(a.pickedSprite, n, r)))), n._pickedDownSprite && n._pickedDownSprite.actionManager && n._pickedDownSprite !== a.pickedSprite && n._pickedDownSprite.actionManager.processTrigger(16, Ro.CreateNewFromSprite(n._pickedDownSprite, n, r))); } return i; } } const sve = "imageProcessingCompatibility", nve = `#ifdef IMAGEPROCESSINGPOSTPROCESS gl_FragColor.rgb=pow(gl_FragColor.rgb,vec3(2.2)); #endif `; je.IncludesShadersStore[sve] = nve; const ave = "spritesPixelShader", ove = `uniform bool alphaTest;varying vec4 vColor;varying vec2 vUV;uniform sampler2D diffuseSampler; #include #define CUSTOM_FRAGMENT_DEFINITIONS #ifdef PIXEL_PERFECT vec2 uvPixelPerfect(vec2 uv) {vec2 res=vec2(textureSize(diffuseSampler,0));uv=uv*res;vec2 seam=floor(uv+0.5);uv=seam+clamp((uv-seam)/fwidth(uv),-0.5,0.5);return uv/res;} #endif void main(void) { #define CUSTOM_FRAGMENT_MAIN_BEGIN #ifdef PIXEL_PERFECT vec2 uv=uvPixelPerfect(vUV); #else vec2 uv=vUV; #endif vec4 color=texture2D(diffuseSampler,uv);float fAlphaTest=float(alphaTest);if (fAlphaTest != 0.) {if (color.a<0.95) discard;} color*=vColor; #include gl_FragColor=color; #include #define CUSTOM_FRAGMENT_MAIN_END }`; je.ShadersStore[ave] = ove; const lve = "spritesVertexShader", cve = `attribute vec4 position;attribute vec2 options;attribute vec2 offsets;attribute vec2 inverts;attribute vec4 cellInfo;attribute vec4 color;uniform mat4 view;uniform mat4 projection;varying vec2 vUV;varying vec4 vColor; #include #define CUSTOM_VERTEX_DEFINITIONS void main(void) { #define CUSTOM_VERTEX_MAIN_BEGIN vec3 viewPos=(view*vec4(position.xyz,1.0)).xyz; vec2 cornerPos;float angle=position.w;vec2 size=vec2(options.x,options.y);vec2 offset=offsets.xy;cornerPos=vec2(offset.x-0.5,offset.y -0.5)*size;vec3 rotatedCorner;rotatedCorner.x=cornerPos.x*cos(angle)-cornerPos.y*sin(angle);rotatedCorner.y=cornerPos.x*sin(angle)+cornerPos.y*cos(angle);rotatedCorner.z=0.;viewPos+=rotatedCorner;gl_Position=projection*vec4(viewPos,1.0); vColor=color;vec2 uvOffset=vec2(abs(offset.x-inverts.x),abs(1.0-offset.y-inverts.y));vec2 uvPlace=cellInfo.xy;vec2 uvSize=cellInfo.zw;vUV.x=uvPlace.x+uvSize.x*uvOffset.x;vUV.y=uvPlace.y+uvSize.y*uvOffset.y; #ifdef FOG vFogDistance=viewPos; #endif #define CUSTOM_VERTEX_MAIN_END }`; je.ShadersStore[lve] = cve; class uve { /** * Gets the capacity of the manager */ get capacity() { return this._capacity; } /** * Gets or sets a boolean indicating if the renderer must render sprites with pixel perfect rendering * Note that pixel perfect mode is not supported in WebGL 1 */ get pixelPerfect() { return this._pixelPerfect; } set pixelPerfect(e) { this._pixelPerfect !== e && (this._pixelPerfect = e, this._createEffects()); } /** * Creates a new sprite Renderer * @param engine defines the engine the renderer works with * @param capacity defines the maximum allowed number of sprites * @param epsilon defines the epsilon value to align texture (0.01 by default) * @param scene defines the hosting scene */ constructor(e, t, i = 0.01, r = null) { this.blendMode = 2, this.autoResetAlpha = !0, this.disableDepthWrite = !1, this.fogEnabled = !0, this._pixelPerfect = !1, this._useVAO = !1, this._useInstancing = !1, this._vertexBuffers = {}, this._capacity = t, this._epsilon = i, this._engine = e, this._useInstancing = e.getCaps().instancedArrays && e._features.supportSpriteInstancing, this._useVAO = e.getCaps().vertexArrayObject && !e.disableVertexArrayObjects, this._scene = r, this._useInstancing || this._buildIndexBuffer(), this._vertexBufferSize = this._useInstancing ? 16 : 18, this._vertexData = new Float32Array(t * this._vertexBufferSize * (this._useInstancing ? 1 : 4)), this._buffer = new hu(e, this._vertexData, !0, this._vertexBufferSize); const s = this._buffer.createVertexBuffer(Y.PositionKind, 0, 4, this._vertexBufferSize, this._useInstancing), n = this._buffer.createVertexBuffer("options", 4, 2, this._vertexBufferSize, this._useInstancing); let a = 6, l; if (this._useInstancing) { const d = new Float32Array([0, 0, 1, 0, 0, 1, 1, 1]); this._spriteBuffer = new hu(e, d, !1, 2), l = this._spriteBuffer.createVertexBuffer("offsets", 0, 2); } else l = this._buffer.createVertexBuffer("offsets", a, 2, this._vertexBufferSize, this._useInstancing), a += 2; const o = this._buffer.createVertexBuffer("inverts", a, 2, this._vertexBufferSize, this._useInstancing), u = this._buffer.createVertexBuffer("cellInfo", a + 2, 4, this._vertexBufferSize, this._useInstancing), h = this._buffer.createVertexBuffer(Y.ColorKind, a + 6, 4, this._vertexBufferSize, this._useInstancing); this._vertexBuffers[Y.PositionKind] = s, this._vertexBuffers.options = n, this._vertexBuffers.offsets = l, this._vertexBuffers.inverts = o, this._vertexBuffers.cellInfo = u, this._vertexBuffers[Y.ColorKind] = h, this._createEffects(); } _createEffects() { var e, t, i, r; (e = this._drawWrapperBase) === null || e === void 0 || e.dispose(), (t = this._drawWrapperFog) === null || t === void 0 || t.dispose(), (i = this._drawWrapperDepth) === null || i === void 0 || i.dispose(), (r = this._drawWrapperFogDepth) === null || r === void 0 || r.dispose(), this._drawWrapperBase = new $o(this._engine), this._drawWrapperFog = new $o(this._engine), this._drawWrapperDepth = new $o(this._engine, !1), this._drawWrapperFogDepth = new $o(this._engine, !1), this._drawWrapperBase.drawContext && (this._drawWrapperBase.drawContext.useInstancing = this._useInstancing), this._drawWrapperFog.drawContext && (this._drawWrapperFog.drawContext.useInstancing = this._useInstancing), this._drawWrapperDepth.drawContext && (this._drawWrapperDepth.drawContext.useInstancing = this._useInstancing), this._drawWrapperFogDepth.drawContext && (this._drawWrapperFogDepth.drawContext.useInstancing = this._useInstancing); const s = this._pixelPerfect ? `#define PIXEL_PERFECT ` : ""; this._drawWrapperBase.effect = this._engine.createEffect("sprites", [Y.PositionKind, "options", "offsets", "inverts", "cellInfo", Y.ColorKind], ["view", "projection", "textureInfos", "alphaTest"], ["diffuseSampler"], s), this._drawWrapperDepth.effect = this._drawWrapperBase.effect, this._drawWrapperDepth.materialContext = this._drawWrapperBase.materialContext, this._scene && (this._drawWrapperFog.effect = this._scene.getEngine().createEffect("sprites", [Y.PositionKind, "options", "offsets", "inverts", "cellInfo", Y.ColorKind], ["view", "projection", "textureInfos", "alphaTest", "vFogInfos", "vFogColor"], ["diffuseSampler"], s + "#define FOG"), this._drawWrapperFogDepth.effect = this._drawWrapperFog.effect, this._drawWrapperFogDepth.materialContext = this._drawWrapperFog.materialContext); } /** * Render all child sprites * @param sprites defines the list of sprites to render * @param deltaTime defines the time since last frame * @param viewMatrix defines the viewMatrix to use to render the sprites * @param projectionMatrix defines the projectionMatrix to use to render the sprites * @param customSpriteUpdate defines a custom function to update the sprites data before they render */ render(e, t, i, r, s = null) { if (!this.texture || !this.texture.isReady() || !e.length) return; let n = this._drawWrapperBase, a = this._drawWrapperDepth, l = !1; this.fogEnabled && this._scene && this._scene.fogEnabled && this._scene.fogMode !== 0 && (n = this._drawWrapperFog, a = this._drawWrapperFogDepth, l = !0); const o = n.effect; if (!o.isReady()) return; const u = this._engine, h = !!(this._scene && this._scene.useRightHandedSystem), d = this.texture.getBaseSize(), f = Math.min(this._capacity, e.length); let p = 0, m = !0; for (let x = 0; x < f; x++) { const b = e[x]; !b || !b.isVisible || (m = !1, b._animate(t), this._appendSpriteVertex(p++, b, 0, 0, d, h, s), this._useInstancing || (this._appendSpriteVertex(p++, b, 1, 0, d, h, s), this._appendSpriteVertex(p++, b, 1, 1, d, h, s), this._appendSpriteVertex(p++, b, 0, 1, d, h, s))); } if (m) return; this._buffer.update(this._vertexData); const _ = !!u.depthCullingState.cull, v = u.depthCullingState.zOffset, C = u.depthCullingState.zOffsetUnits; if (u.setState(_, v, !1, !1, void 0, void 0, C), u.enableEffect(n), o.setTexture("diffuseSampler", this.texture), o.setMatrix("view", i), o.setMatrix("projection", r), l) { const x = this._scene; o.setFloat4("vFogInfos", x.fogMode, x.fogStart, x.fogEnd, x.fogDensity), o.setColor3("vFogColor", x.fogColor); } this._useVAO ? (this._vertexArrayObject || (this._vertexArrayObject = u.recordVertexArrayObject(this._vertexBuffers, this._indexBuffer, o)), u.bindVertexArrayObject(this._vertexArrayObject, this._indexBuffer)) : u.bindBuffers(this._vertexBuffers, this._indexBuffer, o), u.depthCullingState.depthFunc = u.useReverseDepthBuffer ? 518 : 515, this.disableDepthWrite || (o.setBool("alphaTest", !0), u.setColorWrite(!1), u.enableEffect(a), this._useInstancing ? u.drawArraysType(7, 0, 4, p) : u.drawElementsType(0, 0, p / 4 * 6), u.enableEffect(n), u.setColorWrite(!0), o.setBool("alphaTest", !1)), u.setAlphaMode(this.blendMode), this._useInstancing ? u.drawArraysType(7, 0, 4, p) : u.drawElementsType(0, 0, p / 4 * 6), this.autoResetAlpha && u.setAlphaMode(0), h && this._scene.getEngine().setState(_, v, !1, !0, void 0, void 0, C), u.unbindInstanceAttributes(); } _appendSpriteVertex(e, t, i, r, s, n, a) { let l = e * this._vertexBufferSize; if (i === 0 ? i = this._epsilon : i === 1 && (i = 1 - this._epsilon), r === 0 ? r = this._epsilon : r === 1 && (r = 1 - this._epsilon), a) a(t, s); else { t.cellIndex || (t.cellIndex = 0); const o = s.width / this.cellWidth, u = t.cellIndex / o >> 0; t._xOffset = (t.cellIndex - u * o) * this.cellWidth / s.width, t._yOffset = u * this.cellHeight / s.height, t._xSize = this.cellWidth, t._ySize = this.cellHeight; } this._vertexData[l] = t.position.x, this._vertexData[l + 1] = t.position.y, this._vertexData[l + 2] = t.position.z, this._vertexData[l + 3] = t.angle, this._vertexData[l + 4] = t.width, this._vertexData[l + 5] = t.height, this._useInstancing ? l -= 2 : (this._vertexData[l + 6] = i, this._vertexData[l + 7] = r), n ? this._vertexData[l + 8] = t.invertU ? 0 : 1 : this._vertexData[l + 8] = t.invertU ? 1 : 0, this._vertexData[l + 9] = t.invertV ? 1 : 0, this._vertexData[l + 10] = t._xOffset, this._vertexData[l + 11] = t._yOffset, this._vertexData[l + 12] = t._xSize / s.width, this._vertexData[l + 13] = t._ySize / s.height, this._vertexData[l + 14] = t.color.r, this._vertexData[l + 15] = t.color.g, this._vertexData[l + 16] = t.color.b, this._vertexData[l + 17] = t.color.a; } _buildIndexBuffer() { const e = []; let t = 0; for (let i = 0; i < this._capacity; i++) e.push(t), e.push(t + 1), e.push(t + 2), e.push(t), e.push(t + 2), e.push(t + 3), t += 4; this._indexBuffer = this._engine.createIndexBuffer(e); } /** * Rebuilds the renderer (after a context lost, for eg) */ rebuild() { var e; this._indexBuffer && this._buildIndexBuffer(), this._useVAO && (this._vertexArrayObject = void 0), this._buffer._rebuild(); for (const t in this._vertexBuffers) this._vertexBuffers[t]._rebuild(); (e = this._spriteBuffer) === null || e === void 0 || e._rebuild(); } /** * Release associated resources */ dispose() { this._buffer && (this._buffer.dispose(), this._buffer = null), this._spriteBuffer && (this._spriteBuffer.dispose(), this._spriteBuffer = null), this._indexBuffer && (this._engine._releaseBuffer(this._indexBuffer), this._indexBuffer = null), this._vertexArrayObject && (this._engine.releaseVertexArrayObject(this._vertexArrayObject), this._vertexArrayObject = null), this.texture && (this.texture.dispose(), this.texture = null), this._drawWrapperBase.dispose(), this._drawWrapperFog.dispose(), this._drawWrapperDepth.dispose(), this._drawWrapperFogDepth.dispose(); } } class YC { /** * Callback called when the manager is disposed */ set onDispose(e) { this._onDisposeObserver && this.onDisposeObservable.remove(this._onDisposeObserver), this._onDisposeObserver = this.onDisposeObservable.add(e); } /** * Gets the array of sprites */ get children() { return this.sprites; } /** * Gets the hosting scene */ get scene() { return this._scene; } /** * Gets the capacity of the manager */ get capacity() { return this._spriteRenderer.capacity; } /** * Gets or sets the spritesheet texture */ get texture() { return this._spriteRenderer.texture; } set texture(e) { e.wrapU = De.CLAMP_ADDRESSMODE, e.wrapV = De.CLAMP_ADDRESSMODE, this._spriteRenderer.texture = e, this._textureContent = null; } /** Defines the default width of a cell in the spritesheet */ get cellWidth() { return this._spriteRenderer.cellWidth; } set cellWidth(e) { this._spriteRenderer.cellWidth = e; } /** Defines the default height of a cell in the spritesheet */ get cellHeight() { return this._spriteRenderer.cellHeight; } set cellHeight(e) { this._spriteRenderer.cellHeight = e; } /** Gets or sets a boolean indicating if the manager must consider scene fog when rendering */ get fogEnabled() { return this._spriteRenderer.fogEnabled; } set fogEnabled(e) { this._spriteRenderer.fogEnabled = e; } /** * Blend mode use to render the particle, it can be any of * the static undefined properties provided in this class. * Default value is 2 */ get blendMode() { return this._spriteRenderer.blendMode; } set blendMode(e) { this._spriteRenderer.blendMode = e; } /** Disables writing to the depth buffer when rendering the sprites. * It can be handy to disable depth writing when using textures without alpha channel * and setting some specific blend modes. */ get disableDepthWrite() { return this._disableDepthWrite; } set disableDepthWrite(e) { this._disableDepthWrite = e, this._spriteRenderer.disableDepthWrite = e; } /** * Gets or sets a boolean indicating if the renderer must render sprites with pixel perfect rendering * In this mode, sprites are rendered as "pixel art", which means that they appear as pixelated but remain stable when moving or when rotated or scaled. * Note that for this mode to work as expected, the sprite texture must use the BILINEAR sampling mode, not NEAREST! */ get pixelPerfect() { return this._spriteRenderer.pixelPerfect; } set pixelPerfect(e) { this._spriteRenderer.pixelPerfect = e, e && this.texture.samplingMode !== 3 && this.texture.updateSamplingMode(3); } /** * Creates a new sprite manager * @param name defines the manager's name * @param imgUrl defines the sprite sheet url * @param capacity defines the maximum allowed number of sprites * @param cellSize defines the size of a sprite cell * @param scene defines the hosting scene * @param epsilon defines the epsilon value to align texture (0.01 by default) * @param samplingMode defines the sampling mode to use with spritesheet * @param fromPacked set to false; do not alter * @param spriteJSON null otherwise a JSON object defining sprite sheet data; do not alter */ constructor(e, t, i, r, s, n = 0.01, a = De.TRILINEAR_SAMPLINGMODE, l = !1, o = null) { this.name = e, this.sprites = [], this.renderingGroupId = 0, this.layerMask = 268435455, this.isPickable = !1, this.metadata = null, this._wasDispatched = !1, this.onDisposeObservable = new Fe(), this._disableDepthWrite = !1, this._packedAndReady = !1, this._customUpdate = (h, d) => { h.cellRef || (h.cellIndex = 0); const f = h.cellIndex; typeof f == "number" && isFinite(f) && Math.floor(f) === f && (h.cellRef = this._spriteMap[h.cellIndex]), h._xOffset = this._cellData[h.cellRef].frame.x / d.width, h._yOffset = this._cellData[h.cellRef].frame.y / d.height, h._xSize = this._cellData[h.cellRef].frame.w, h._ySize = this._cellData[h.cellRef].frame.h; }, s || (s = gi.LastCreatedScene), s._getComponent(Bt.NAME_SPRITE) || s._addComponent(new kne(s)), this._fromPacked = l, this._scene = s; const u = this._scene.getEngine(); if (this._spriteRenderer = new uve(u, i, n, s), r.width && r.height) this.cellWidth = r.width, this.cellHeight = r.height; else if (r !== void 0) this.cellWidth = r, this.cellHeight = r; else { this._spriteRenderer = null; return; } this._scene.spriteManagers && this._scene.spriteManagers.push(this), this.uniqueId = this.scene.getUniqueId(), t && (this.texture = new De(t, s, !0, !1, a)), this._fromPacked && this._makePacked(t, o); } /** * Returns the string "SpriteManager" * @returns "SpriteManager" */ getClassName() { return "SpriteManager"; } _makePacked(e, t) { if (t !== null) try { let i; if (typeof t == "string" ? i = JSON.parse(t) : i = t, i.frames.length) { const s = {}; for (let n = 0; n < i.frames.length; n++) { const a = i.frames[n]; if (typeof Object.keys(a)[0] != "string") throw new Error("Invalid JSON Format. Check the frame values and make sure the name is the first parameter."); const l = a[Object.keys(a)[0]]; s[l] = a; } i.frames = s; } const r = Reflect.ownKeys(i.frames); this._spriteMap = r, this._packedAndReady = !0, this._cellData = i.frames; } catch { throw this._fromPacked = !1, this._packedAndReady = !1, new Error("Invalid JSON from string. Spritesheet managed with constant cell size."); } else { const i = /\./g; let r; do r = i.lastIndex, i.test(e); while (i.lastIndex > 0); const s = e.substring(0, r - 1) + ".json", n = () => { Ce.Error("JSON ERROR: Unable to load JSON file."), this._fromPacked = !1, this._packedAndReady = !1; }, a = (l) => { try { const o = JSON.parse(l), u = Reflect.ownKeys(o.frames); this._spriteMap = u, this._packedAndReady = !0, this._cellData = o.frames; } catch { throw this._fromPacked = !1, this._packedAndReady = !1, new Error("Invalid JSON format. Please check documentation for format specifications."); } }; Ve.LoadFile(s, a, void 0, void 0, !1, n); } } _checkTextureAlpha(e, t, i, r, s) { if (!e.useAlphaForPicking || !this.texture) return !0; const n = this.texture.getSize(); this._textureContent || (this._textureContent = new Uint8Array(n.width * n.height * 4), this.texture.readPixels(0, 0, this._textureContent)); const a = de.Vector3[0]; a.copyFrom(t.direction), a.normalize(), a.scaleInPlace(i), a.addInPlace(t.origin); const l = (a.x - r.x) / (s.x - r.x), o = 1 - (a.y - r.y) / (s.y - r.y), u = e._xOffset * n.width + l * e._xSize | 0, h = e._yOffset * n.height + o * e._ySize | 0; return this._textureContent[(u + h * n.width) * 4 + 3] > 0.5; } /** * Intersects the sprites with a ray * @param ray defines the ray to intersect with * @param camera defines the current active camera * @param predicate defines a predicate used to select candidate sprites * @param fastCheck defines if a fast check only must be done (the first potential sprite is will be used and not the closer) * @returns null if no hit or a PickingInfo */ intersects(e, t, i, r) { const s = Math.min(this.capacity, this.sprites.length), n = D.Zero(), a = D.Zero(); let l = Number.MAX_VALUE, o = null; const u = de.Vector3[0], h = de.Vector3[1], d = t.getViewMatrix(); let f = e, p = e; for (let m = 0; m < s; m++) { const _ = this.sprites[m]; if (_) { if (i) { if (!i(_)) continue; } else if (!_.isPickable) continue; if (D.TransformCoordinatesToRef(_.position, d, h), _.angle ? (Ae.TranslationToRef(-h.x, -h.y, 0, de.Matrix[1]), Ae.TranslationToRef(h.x, h.y, 0, de.Matrix[2]), Ae.RotationZToRef(-_.angle, de.Matrix[3]), de.Matrix[1].multiplyToRef(de.Matrix[3], de.Matrix[4]), de.Matrix[4].multiplyToRef(de.Matrix[2], de.Matrix[0]), f = e.clone(), D.TransformCoordinatesToRef(e.origin, de.Matrix[0], f.origin), D.TransformNormalToRef(e.direction, de.Matrix[0], f.direction)) : f = e, n.copyFromFloats(h.x - _.width / 2, h.y - _.height / 2, h.z), a.copyFromFloats(h.x + _.width / 2, h.y + _.height / 2, h.z), f.intersectsBoxMinMax(n, a)) { const v = D.Distance(h, f.origin); if (l > v) { if (!this._checkTextureAlpha(_, f, v, n, a)) continue; if (p = f, l = v, o = _, r) break; } } } } if (o) { const m = new ku(); d.invertToRef(de.Matrix[0]), m.hit = !0, m.pickedSprite = o, m.distance = l; const _ = de.Vector3[2]; return _.copyFrom(p.direction), _.normalize(), _.scaleInPlace(l), p.origin.addToRef(_, u), m.pickedPoint = D.TransformCoordinates(u, de.Matrix[0]), m; } return null; } /** * Intersects the sprites with a ray * @param ray defines the ray to intersect with * @param camera defines the current active camera * @param predicate defines a predicate used to select candidate sprites * @returns null if no hit or a PickingInfo array */ multiIntersects(e, t, i) { const r = Math.min(this.capacity, this.sprites.length), s = D.Zero(), n = D.Zero(); let a; const l = [], o = de.Vector3[0].copyFromFloats(0, 0, 0), u = de.Vector3[1].copyFromFloats(0, 0, 0), h = t.getViewMatrix(); for (let d = 0; d < r; d++) { const f = this.sprites[d]; if (f) { if (i) { if (!i(f)) continue; } else if (!f.isPickable) continue; if (D.TransformCoordinatesToRef(f.position, h, u), s.copyFromFloats(u.x - f.width / 2, u.y - f.height / 2, u.z), n.copyFromFloats(u.x + f.width / 2, u.y + f.height / 2, u.z), e.intersectsBoxMinMax(s, n)) { if (a = D.Distance(u, e.origin), !this._checkTextureAlpha(f, e, a, s, n)) continue; const p = new ku(); l.push(p), h.invertToRef(de.Matrix[0]), p.hit = !0, p.pickedSprite = f, p.distance = a; const m = de.Vector3[2]; m.copyFrom(e.direction), m.normalize(), m.scaleInPlace(a), e.origin.addToRef(m, o), p.pickedPoint = D.TransformCoordinates(o, de.Matrix[0]); } } } return l; } /** * Render all child sprites */ render() { if (this._fromPacked && (!this._packedAndReady || !this._spriteMap || !this._cellData)) return; const t = this._scene.getEngine().getDeltaTime(); this._packedAndReady ? this._spriteRenderer.render(this.sprites, t, this._scene.getViewMatrix(), this._scene.getProjectionMatrix(), this._customUpdate) : this._spriteRenderer.render(this.sprites, t, this._scene.getViewMatrix(), this._scene.getProjectionMatrix()); } /** * Rebuilds the manager (after a context lost, for eg) */ rebuild() { var e; (e = this._spriteRenderer) === null || e === void 0 || e.rebuild(); } /** * Release associated resources */ dispose() { if (this._spriteRenderer && (this._spriteRenderer.dispose(), this._spriteRenderer = null), this._textureContent = null, this._scene.spriteManagers) { const e = this._scene.spriteManagers.indexOf(this); this._scene.spriteManagers.splice(e, 1); } this.onDisposeObservable.notifyObservers(this), this.onDisposeObservable.clear(), this.metadata = null; } /** * Serializes the sprite manager to a JSON object * @param serializeTexture defines if the texture must be serialized as well * @returns the JSON object */ serialize(e = !1) { const t = {}; t.name = this.name, t.capacity = this.capacity, t.cellWidth = this.cellWidth, t.cellHeight = this.cellHeight, t.fogEnabled = this.fogEnabled, t.blendMode = this.blendMode, t.disableDepthWrite = this.disableDepthWrite, t.pixelPerfect = this.pixelPerfect, this.texture && (e ? t.texture = this.texture.serialize() : (t.textureUrl = this.texture.name, t.invertY = this.texture._invertY)), t.sprites = []; for (const i of this.sprites) t.sprites.push(i.serialize()); return t.metadata = this.metadata, t; } /** * Parses a JSON object to create a new sprite manager. * @param parsedManager The JSON object to parse * @param scene The scene to create the sprite manager * @param rootUrl The root url to use to load external dependencies like texture * @returns the new sprite manager */ static Parse(e, t, i) { const r = new YC(e.name, "", e.capacity, { width: e.cellWidth, height: e.cellHeight }, t); e.fogEnabled !== void 0 && (r.fogEnabled = e.fogEnabled), e.blendMode !== void 0 && (r.blendMode = e.blendMode), e.disableDepthWrite !== void 0 && (r.disableDepthWrite = e.disableDepthWrite), e.pixelPerfect !== void 0 && (r.pixelPerfect = e.pixelPerfect), e.metadata !== void 0 && (r.metadata = e.metadata), e.texture ? r.texture = De.Parse(e.texture, t, i) : e.textureName && (r.texture = new De(i + e.textureUrl, t, !1, e.invertY !== void 0 ? e.invertY : !0)); for (const s of e.sprites) ZU.Parse(s, r); return r; } /** * Creates a sprite manager from a snippet saved in a remote file * @param name defines the name of the sprite manager to create (can be null or empty to use the one from the json data) * @param url defines the url to load from * @param scene defines the hosting scene * @param rootUrl defines the root URL to use to load textures and relative dependencies * @returns a promise that will resolve to the new sprite manager */ static ParseFromFileAsync(e, t, i, r = "") { return new Promise((s, n) => { const a = new go(); a.addEventListener("readystatechange", () => { if (a.readyState == 4) if (a.status == 200) { const l = JSON.parse(a.responseText), o = YC.Parse(l, i || gi.LastCreatedScene, r); e && (o.name = e), s(o); } else n("Unable to load the sprite manager"); }), a.open("GET", t), a.send(); }); } /** * Creates a sprite manager from a snippet saved by the sprite editor * @param snippetId defines the snippet to load (can be set to _BLANK to create a default one) * @param scene defines the hosting scene * @param rootUrl defines the root URL to use to load textures and relative dependencies * @returns a promise that will resolve to the new sprite manager */ static ParseFromSnippetAsync(e, t, i = "") { return e === "_BLANK" ? Promise.resolve(new YC("Default sprite manager", "//playground.babylonjs.com/textures/player.png", 500, 64, t)) : new Promise((r, s) => { const n = new go(); n.addEventListener("readystatechange", () => { if (n.readyState == 4) if (n.status == 200) { const a = JSON.parse(JSON.parse(n.responseText).jsonPayload), l = JSON.parse(a.spriteManager), o = YC.Parse(l, t || gi.LastCreatedScene, i); o.snippetId = e, r(o); } else s("Unable to load the snippet " + e); }), n.open("GET", this.SnippetUrl + "/" + e.replace(/#/g, "/")), n.send(); }); } } YC.SnippetUrl = "https://snippet.babylonjs.com"; YC.CreateFromSnippetAsync = YC.ParseFromSnippetAsync; const hve = "spriteMapPixelShader", dve = `#if defined(WEBGL2) || defined(WEBGPU) || defined(NATIVE) #define TEXTUREFUNC(s,c,l) texture2DLodEXT(s,c,l) #else #define TEXTUREFUNC(s,c,b) texture2D(s,c,b) #endif precision highp float;varying vec3 vPosition;varying vec2 vUV;varying vec2 tUV;uniform float time;uniform float spriteCount;uniform sampler2D spriteSheet;uniform vec2 spriteMapSize;uniform vec2 outputSize;uniform vec2 stageSize;uniform sampler2D frameMap;uniform sampler2D tileMaps[LAYERS];uniform sampler2D animationMap;uniform vec3 colorMul;float mt;const float fdStep=1./4.;const float aFrameSteps=1./MAX_ANIMATION_FRAMES;mat4 getFrameData(float frameID){float fX=frameID/spriteCount;return mat4( texture2D(frameMap,vec2(fX,0.),0.), texture2D(frameMap,vec2(fX,fdStep*1.),0.), texture2D(frameMap,vec2(fX,fdStep*2.),0.), vec4(0.) );} void main(){vec4 color=vec4(0.);vec2 tileUV=fract(tUV); #ifdef FLIPU tileUV.y=1.0-tileUV.y; #endif vec2 tileID=floor(tUV);vec2 sheetUnits=1./spriteMapSize;float spriteUnits=1./spriteCount;vec2 stageUnits=1./stageSize;for(int i=0; i0.) {mt=mod(time*animationData.z,1.0);for(float f=0.; fmt){frameID=animationData.x;break;} animationData=TEXTUREFUNC(animationMap,vec2((frameID+0.5)/spriteCount,aFrameSteps*f),0.);}} mat4 frameData=getFrameData(frameID+0.5);vec2 frameSize=(frameData[0].zw)/spriteMapSize;vec2 offset=frameData[0].xy*sheetUnits;vec2 ratio=frameData[2].xy/frameData[0].zw;if (frameData[2].z==1.){tileUV.xy=tileUV.yx;} vec4 nc=texture2D(spriteSheet,tileUV*frameSize+offset);if (i==0){color=nc;} else {float alpha=min(color.a+nc.a,1.0);vec3 mixed=mix(color.xyz,nc.xyz,nc.a);color=vec4(mixed,alpha);}} color.xyz*=colorMul;gl_FragColor=color;}`; je.ShadersStore[hve] = dve; const fve = "spriteMapVertexShader", pve = `precision highp float;attribute vec3 position;attribute vec3 normal;attribute vec2 uv;varying vec3 vPosition;varying vec2 vUV;varying vec2 tUV;varying vec2 stageUnits;varying vec2 levelUnits;varying vec2 tileID;uniform float time;uniform mat4 worldViewProjection;uniform vec2 outputSize;uniform vec2 stageSize;uniform vec2 spriteMapSize;uniform float stageScale;void main() {vec4 p=vec4( position,1. );vPosition=p.xyz;vUV=uv;tUV=uv*stageSize; gl_Position=worldViewProjection*p;}`; je.ShadersStore[fve] = pve; class _ve { /** Returns the Number of Sprites in the System */ get spriteCount() { return this.sprites.length; } /** Returns the Position of Output Plane*/ get position() { return this._output.position; } /** Returns the Position of Output Plane*/ set position(e) { this._output.position = e; } /** Returns the Rotation of Output Plane*/ get rotation() { return this._output.rotation; } /** Returns the Rotation of Output Plane*/ set rotation(e) { this._output.rotation = e; } /** Sets the AnimationMap*/ get animationMap() { return this._animationMap; } /** Sets the AnimationMap*/ set animationMap(e) { const t = e._texture._bufferView, i = this._createTileAnimationBuffer(t); this._animationMap.dispose(), this._animationMap = i, this._material.setTexture("animationMap", this._animationMap); } /** * Creates a new SpriteMap * @param name defines the SpriteMaps Name * @param atlasJSON is the JSON file that controls the Sprites Frames and Meta * @param spriteSheet is the Texture that the Sprites are on. * @param options a basic deployment configuration * @param scene The Scene that the map is deployed on */ constructor(e, t, i, r, s) { this.name = e, this.sprites = [], this.atlasJSON = t, this.sprites = this.atlasJSON.frames, this.spriteSheet = i, this.options = r, r.stageSize = r.stageSize || new at(1, 1), r.outputSize = r.outputSize || r.stageSize, r.outputPosition = r.outputPosition || D.Zero(), r.outputRotation = r.outputRotation || D.Zero(), r.layerCount = r.layerCount || 1, r.maxAnimationFrames = r.maxAnimationFrames || 0, r.baseTile = r.baseTile || 0, r.flipU = r.flipU || !1, r.colorMultiply = r.colorMultiply || new D(1, 1, 1), this._scene = s, this._frameMap = this._createFrameBuffer(), this._tileMaps = new Array(); for (let d = 0; d < r.layerCount; d++) this._tileMaps.push(this._createTileBuffer(null, d)); this._animationMap = this._createTileAnimationBuffer(null); const n = []; n.push("#define LAYERS " + r.layerCount), r.flipU && n.push("#define FLIPU"), n.push(`#define MAX_ANIMATION_FRAMES ${r.maxAnimationFrames}.0`); const a = Cr.ShadersStore.spriteMapPixelShader; let l; if (s.getEngine()._features.supportSwitchCaseInShader) { l = "switch(i) {"; for (let d = 0; d < r.layerCount; d++) l += "case " + d + " : frameID = texture(tileMaps[" + d + "], (tileID + 0.5) / stageSize, 0.).x;", l += "break;"; l += "}"; } else { l = ""; for (let d = 0; d < r.layerCount; d++) l += `if (${d} == i) { frameID = texture2D(tileMaps[${d}], (tileID + 0.5) / stageSize, 0.).x; }`; } Cr.ShadersStore["spriteMap" + this.name + "PixelShader"] = a.replace("#define LAYER_ID_SWITCH", l), this._material = new Lo("spriteMap:" + this.name, this._scene, { vertex: "spriteMap", fragment: "spriteMap" + this.name }, { defines: n, attributes: ["position", "normal", "uv"], uniforms: ["worldViewProjection", "time", "stageSize", "outputSize", "spriteMapSize", "spriteCount", "time", "colorMul", "mousePosition", "curTile", "flipU"], samplers: ["spriteSheet", "frameMap", "tileMaps", "animationMap"], needAlphaBlending: !0 }), this._time = 0, this._material.setFloat("spriteCount", this.spriteCount), this._material.setVector2("stageSize", r.stageSize), this._material.setVector2("outputSize", r.outputSize), this._material.setTexture("spriteSheet", this.spriteSheet), this._material.setVector2("spriteMapSize", new at(1, 1)), this._material.setVector3("colorMul", r.colorMultiply); let o = 0; const u = () => { if (this.spriteSheet && this.spriteSheet.isReady() && this.spriteSheet._texture) { this._material.setVector2("spriteMapSize", new at(this.spriteSheet._texture.baseWidth || 1, this.spriteSheet._texture.baseHeight || 1)); return; } o < 100 && setTimeout(() => { o++, u(); }, 100); }; u(), this._material.setVector3("colorMul", r.colorMultiply), this._material.setTexture("frameMap", this._frameMap), this._material.setTextureArray("tileMaps", this._tileMaps), this._material.setTexture("animationMap", this._animationMap), this._material.setFloat("time", this._time), this._output = hx(e + ":output", { size: 1, updatable: !0 }, s), this._output.scaling.x = r.outputSize.x, this._output.scaling.y = r.outputSize.y, this.position = r.outputPosition, this.rotation = r.outputRotation; const h = () => { this._time += this._scene.getEngine().getDeltaTime(), this._material.setFloat("time", this._time); }; this._scene.onBeforeRenderObservable.add(h), this._output.material = this._material; } /** * Returns tileID location * @returns Vector2 the cell position ID */ getTileID() { const e = this.getMousePosition(); return e.multiplyInPlace(this.options.stageSize || at.Zero()), e.x = Math.floor(e.x), e.y = Math.floor(e.y), e; } /** * Gets the UV location of the mouse over the SpriteMap. * @returns Vector2 the UV position of the mouse interaction */ getMousePosition() { const e = this._output, t = this._scene.pick(this._scene.pointerX, this._scene.pointerY, (r) => r === e); if (!t || !t.hit || !t.getTextureCoordinates) return new at(-1, -1); const i = t.getTextureCoordinates(); return i || new at(-1, -1); } /** * Creates the "frame" texture Buffer * ------------------------------------- * Structure of frames * "filename": "Falling-Water-2.png", * "frame": {"x":69,"y":103,"w":24,"h":32}, * "rotated": true, * "trimmed": true, * "spriteSourceSize": {"x":4,"y":0,"w":24,"h":32}, * "sourceSize": {"w":32,"h":32} * @returns RawTexture of the frameMap */ _createFrameBuffer() { const e = []; for (let r = 0; r < this.spriteCount; r++) e.push(0, 0, 0, 0), e.push(0, 0, 0, 0), e.push(0, 0, 0, 0), e.push(0, 0, 0, 0); for (let r = 0; r < this.spriteCount; r++) { const s = this.sprites[r].frame, n = this.sprites[r].spriteSourceSize, a = this.sprites[r].sourceSize, l = this.sprites[r].rotated ? 1 : 0, o = this.sprites[r].trimmed ? 1 : 0; e[r * 4] = s.x, e[r * 4 + 1] = s.y, e[r * 4 + 2] = s.w, e[r * 4 + 3] = s.h, e[r * 4 + this.spriteCount * 4] = n.x, e[r * 4 + 1 + this.spriteCount * 4] = n.y, e[r * 4 + 3 + this.spriteCount * 4] = n.h, e[r * 4 + this.spriteCount * 8] = a.w, e[r * 4 + 1 + this.spriteCount * 8] = a.h, e[r * 4 + 2 + this.spriteCount * 8] = l, e[r * 4 + 3 + this.spriteCount * 8] = o; } const t = new Float32Array(e); return Po.CreateRGBATexture(t, this.spriteCount, 4, this._scene, !1, !1, De.NEAREST_NEAREST, $e.TEXTURETYPE_FLOAT); } /** * Creates the tileMap texture Buffer * @param buffer normally and array of numbers, or a false to generate from scratch * @param _layer indicates what layer for a logic trigger dealing with the baseTile. The system uses this * @returns RawTexture of the tileMap */ _createTileBuffer(e, t = 0) { let i = []; const r = this.options.stageSize.y || 0, s = this.options.stageSize.x || 0; if (e) i = e; else { let l = this.options.baseTile; t != 0 && (l = 0); for (let o = 0; o < r; o++) for (let u = 0; u < s * 4; u += 4) i.push(l, 0, 0, 0); } const n = new Float32Array(i); return Po.CreateRGBATexture(n, s, r, this._scene, !1, !1, De.NEAREST_NEAREST, $e.TEXTURETYPE_FLOAT); } /** * Modifies the data of the tileMaps * @param _layer is the ID of the layer you want to edit on the SpriteMap * @param pos is the iVector2 Coordinates of the Tile * @param tile The SpriteIndex of the new Tile */ changeTiles(e = 0, t, i = 0) { const r = this._tileMaps[e]._texture._bufferView; if (r === null) return; let s = []; t instanceof at ? s.push(t) : s = t; const n = this.options.stageSize.x || 0; for (let l = 0; l < s.length; l++) { const o = s[l]; o.x = Math.floor(o.x), o.y = Math.floor(o.y); const u = o.x * 4 + o.y * (n * 4); r[u] = i; } const a = this._createTileBuffer(r); this._tileMaps[e].dispose(), this._tileMaps[e] = a, this._material.setTextureArray("tileMap", this._tileMaps); } /** * Creates the animationMap texture Buffer * @param buffer normally and array of numbers, or a false to generate from scratch * @returns RawTexture of the animationMap */ _createTileAnimationBuffer(e) { const t = []; let i; if (e) i = e; else { for (let s = 0; s < this.spriteCount; s++) { t.push(0, 0, 0, 0); let n = 1; for (; n < (this.options.maxAnimationFrames || 4); ) t.push(0, 0, 0, 0), n++; } i = new Float32Array(t); } return Po.CreateRGBATexture(i, this.spriteCount, this.options.maxAnimationFrames || 4, this._scene, !1, !1, De.NEAREST_NEAREST, $e.TEXTURETYPE_FLOAT); } /** * Modifies the data of the animationMap * @param cellID is the Index of the Sprite * @param _frame is the target Animation frame * @param toCell is the Target Index of the next frame of the animation * @param time is a value between 0-1 that is the trigger for when the frame should change tiles * @param speed is a global scalar of the time variable on the map. */ addAnimationToTile(e = 0, t = 0, i = 0, r = 0, s = 1) { const n = this._animationMap._texture._bufferView, a = e * 4 + this.spriteCount * 4 * t; if (!n) return; n[a] = i, n[a + 1] = r, n[a + 2] = s; const l = this._createTileAnimationBuffer(n); this._animationMap.dispose(), this._animationMap = l, this._material.setTexture("animationMap", this._animationMap); } /** * Exports the .tilemaps file */ saveTileMaps() { let e = ""; for (let i = 0; i < this._tileMaps.length; i++) i > 0 && (e += ` \r`), e += this._tileMaps[i]._texture._bufferView.toString(); const t = document.createElement("a"); t.href = "data:octet/stream;charset=utf-8," + encodeURI(e), t.target = "_blank", t.download = this.name + ".tilemaps", t.click(), t.remove(); } /** * Imports the .tilemaps file * @param url of the .tilemaps file */ loadTileMaps(e) { const t = new XMLHttpRequest(); t.open("GET", e); const i = this.options.layerCount || 0; t.onload = () => { const r = t.response.split(` \r`); for (let s = 0; s < i; s++) { const n = r[s].split(",").map(Number), a = this._createTileBuffer(n); this._tileMaps[s].dispose(), this._tileMaps[s] = a; } this._material.setTextureArray("tileMap", this._tileMaps); }, t.send(); } /** * Release associated resources */ dispose() { this._output.dispose(), this._material.dispose(), this._animationMap.dispose(), this._tileMaps.forEach((e) => { e.dispose(); }), this._frameMap.dispose(); } } class mve extends YC { /** * Creates a new sprite manager from a packed sprite sheet * @param name defines the manager's name * @param imgUrl defines the sprite sheet url * @param capacity defines the maximum allowed number of sprites * @param scene defines the hosting scene * @param spriteJSON null otherwise a JSON object defining sprite sheet data * @param epsilon defines the epsilon value to align texture (0.01 by default) * @param samplingMode defines the sampling mode to use with spritesheet * @param fromPacked set to true; do not alter */ constructor(e, t, i, r, s = null, n = 0.01, a = De.TRILINEAR_SAMPLINGMODE) { super(e, t, i, 64, r, n, a, !0, s), this.name = e; } } var GC; (function(c) { c[c.INIT = 0] = "INIT", c[c.RUNNING = 1] = "RUNNING", c[c.DONE = 2] = "DONE", c[c.ERROR = 3] = "ERROR"; })(GC || (GC = {})); class p6 { /** * Creates a new AssetsManager * @param name defines the name of the task */ constructor(e) { this.name = e, this._isCompleted = !1, this._taskState = GC.INIT; } /** * Get if the task is completed */ get isCompleted() { return this._isCompleted; } /** * Gets the current state of the task */ get taskState() { return this._taskState; } /** * Gets the current error object (if task is in error) */ get errorObject() { return this._errorObject; } /** * Internal only * @internal */ _setErrorObject(e, t) { this._errorObject || (this._errorObject = { message: e, exception: t }); } /** * Execute the current task * @param scene defines the scene where you want your assets to be loaded * @param onSuccess is a callback called when the task is successfully executed * @param onError is a callback called if an error occurs */ run(e, t, i) { this._taskState = GC.RUNNING, this.runTask(e, () => { this._onDoneCallback(t, i); }, (r, s) => { this._onErrorCallback(i, r, s); }); } /** * Execute the current task * @param scene defines the scene where you want your assets to be loaded * @param onSuccess is a callback called when the task is successfully executed * @param onError is a callback called if an error occurs */ // eslint-disable-next-line @typescript-eslint/no-unused-vars runTask(e, t, i) { throw new Error("runTask is not implemented"); } /** * Reset will set the task state back to INIT, so the next load call of the assets manager will execute this task again. * This can be used with failed tasks that have the reason for failure fixed. */ reset() { this._taskState = GC.INIT; } _onErrorCallback(e, t, i) { this._taskState = GC.ERROR, this._errorObject = { message: t, exception: i }, this.onError && this.onError(this, t, i), e(); } _onDoneCallback(e, t) { try { this._taskState = GC.DONE, this._isCompleted = !0, this.onSuccess && this.onSuccess(this), e(); } catch (i) { this._onErrorCallback(t, "Task is done, error executing success callback(s)", i); } } } class zne { /** * Creates a AssetsProgressEvent * @param remainingCount defines the number of remaining tasks to process * @param totalCount defines the total number of tasks * @param task defines the task that was just processed */ constructor(e, t, i) { this.remainingCount = e, this.totalCount = t, this.task = i; } } class Hne extends p6 { /** * Creates a new ContainerAssetTask * @param name defines the name of the task * @param meshesNames defines the list of mesh's names you want to load * @param rootUrl defines the root url to use as a base to load your meshes and associated resources * @param sceneFilename defines the filename or File of the scene to load from */ constructor(e, t, i, r, s) { super(e), this.name = e, this.meshesNames = t, this.rootUrl = i, this.sceneFilename = r, this.extension = s; } /** * Execute the current task * @param scene defines the scene where you want your assets to be loaded * @param onSuccess is a callback called when the task is successfully executed * @param onError is a callback called if an error occurs */ runTask(e, t, i) { fr.LoadAssetContainer(this.rootUrl, this.sceneFilename, e, (r) => { this.loadedContainer = r, this.loadedMeshes = r.meshes, this.loadedTransformNodes = r.transformNodes, this.loadedParticleSystems = r.particleSystems, this.loadedSkeletons = r.skeletons, this.loadedAnimationGroups = r.animationGroups, t(); }, null, (r, s, n) => { i(s, n); }, this.extension); } } class Gne extends p6 { /** * Creates a new MeshAssetTask * @param name defines the name of the task * @param meshesNames defines the list of mesh's names you want to load * @param rootUrl defines the root url to use as a base to load your meshes and associated resources * @param sceneFilename defines the filename or File of the scene to load from */ constructor(e, t, i, r, s) { super(e), this.name = e, this.meshesNames = t, this.rootUrl = i, this.sceneFilename = r, this.extension = s; } /** * Execute the current task * @param scene defines the scene where you want your assets to be loaded * @param onSuccess is a callback called when the task is successfully executed * @param onError is a callback called if an error occurs */ runTask(e, t, i) { fr.ImportMesh(this.meshesNames, this.rootUrl, this.sceneFilename, e, (r, s, n, a, l) => { this.loadedMeshes = r, this.loadedTransformNodes = l, this.loadedParticleSystems = s, this.loadedSkeletons = n, this.loadedAnimationGroups = a, t(); }, null, (r, s, n) => { i(s, n); }, this.extension); } } class gve extends p6 { /** * Creates a new AnimationAssetTask * @param name defines the name of the task * @param rootUrl defines the root url to use as a base to load your meshes and associated resources * @param filename defines the filename or File of the scene to load from * @param targetConverter defines a function used to convert animation targets from loaded scene to current scene (default: search node by name) */ constructor(e, t, i, r, s) { super(e), this.name = e, this.rootUrl = t, this.filename = i, this.targetConverter = r, this.extension = s; } /** * Execute the current task * @param scene defines the scene where you want your assets to be loaded * @param onSuccess is a callback called when the task is successfully executed * @param onError is a callback called if an error occurs */ runTask(e, t, i) { const r = e.animatables.length, s = e.animationGroups.length; this.loadedAnimatables = [], this.loadedAnimationGroups = [], fr.ImportAnimations(this.rootUrl, this.filename, e, !1, FC.NoSync, this.targetConverter, () => { this.loadedAnimatables = e.animatables.slice(r), this.loadedAnimationGroups = e.animationGroups.slice(s), t(); }, null, (n, a, l) => { i(a, l); }, this.extension); } } class Kne extends p6 { /** * Creates a new TextFileAssetTask object * @param name defines the name of the task * @param url defines the location of the file to load */ constructor(e, t) { super(e), this.name = e, this.url = t; } /** * Execute the current task * @param scene defines the scene where you want your assets to be loaded * @param onSuccess is a callback called when the task is successfully executed * @param onError is a callback called if an error occurs */ runTask(e, t, i) { e._loadFile(this.url, (r) => { this.text = r, t(); }, void 0, !1, !1, (r, s) => { r && i(r.status + " " + r.statusText, s); }); } } class Wne extends p6 { /** * Creates a new BinaryFileAssetTask object * @param name defines the name of the new task * @param url defines the location of the file to load */ constructor(e, t) { super(e), this.name = e, this.url = t; } /** * Execute the current task * @param scene defines the scene where you want your assets to be loaded * @param onSuccess is a callback called when the task is successfully executed * @param onError is a callback called if an error occurs */ runTask(e, t, i) { e._loadFile(this.url, (r) => { this.data = r, t(); }, void 0, !0, !0, (r, s) => { r && i(r.status + " " + r.statusText, s); }); } } class jne extends p6 { /** * Creates a new ImageAssetTask * @param name defines the name of the task * @param url defines the location of the image to load */ constructor(e, t) { super(e), this.name = e, this.url = t; } /** * Execute the current task * @param scene defines the scene where you want your assets to be loaded * @param onSuccess is a callback called when the task is successfully executed * @param onError is a callback called if an error occurs */ runTask(e, t, i) { const r = new Image(); Ve.SetCorsBehavior(this.url, r), r.onload = () => { this.image = r, t(); }, r.onerror = (s) => { i("Error loading image", s); }, r.src = this.url; } } class Xne extends p6 { /** * Creates a new TextureAssetTask object * @param name defines the name of the task * @param url defines the location of the file to load * @param noMipmap defines if mipmap should not be generated (default is false) * @param invertY defines if texture must be inverted on Y axis (default is true) * @param samplingMode defines the sampling mode to use (default is Texture.TRILINEAR_SAMPLINGMODE) */ constructor(e, t, i, r = !0, s = De.TRILINEAR_SAMPLINGMODE) { super(e), this.name = e, this.url = t, this.noMipmap = i, this.invertY = r, this.samplingMode = s; } /** * Execute the current task * @param scene defines the scene where you want your assets to be loaded * @param onSuccess is a callback called when the task is successfully executed * @param onError is a callback called if an error occurs */ runTask(e, t, i) { const r = () => { t(); }, s = (n, a) => { i(n, a); }; this.texture = new De(this.url, e, this.noMipmap, this.invertY, this.samplingMode, r, s); } } class Yne extends p6 { /** * Creates a new CubeTextureAssetTask * @param name defines the name of the task * @param url defines the location of the files to load (You have to specify the folder where the files are + filename with no extension) * @param extensions defines the extensions to use to load files (["_px", "_py", "_pz", "_nx", "_ny", "_nz"] by default) * @param noMipmap defines if mipmaps should not be generated (default is false) * @param files defines the explicit list of files (undefined by default) * @param prefiltered */ constructor(e, t, i, r, s, n) { super(e), this.name = e, this.url = t, this.extensions = i, this.noMipmap = r, this.files = s, this.prefiltered = n; } /** * Execute the current task * @param scene defines the scene where you want your assets to be loaded * @param onSuccess is a callback called when the task is successfully executed * @param onError is a callback called if an error occurs */ runTask(e, t, i) { const r = () => { t(); }, s = (n, a) => { i(n, a); }; this.texture = new ul(this.url, e, this.extensions, this.noMipmap, this.files, r, s, void 0, this.prefiltered); } } class Qne extends p6 { /** * Creates a new HDRCubeTextureAssetTask object * @param name defines the name of the task * @param url defines the location of the file to load * @param size defines the desired size (the more it increases the longer the generation will be) If the size is omitted this implies you are using a preprocessed cubemap. * @param noMipmap defines if mipmaps should not be generated (default is false) * @param generateHarmonics specifies whether you want to extract the polynomial harmonics during the generation process (default is true) * @param gammaSpace specifies if the texture will be use in gamma or linear space (the PBR material requires those texture in linear space, but the standard material would require them in Gamma space) (default is false) * @param reserved Internal use only */ constructor(e, t, i, r = !1, s = !0, n = !1, a = !1) { super(e), this.name = e, this.url = t, this.size = i, this.noMipmap = r, this.generateHarmonics = s, this.gammaSpace = n, this.reserved = a; } /** * Execute the current task * @param scene defines the scene where you want your assets to be loaded * @param onSuccess is a callback called when the task is successfully executed * @param onError is a callback called if an error occurs */ runTask(e, t, i) { const r = () => { t(); }, s = (n, a) => { i(n, a); }; this.texture = new ZC(this.url, e, this.size, this.noMipmap, this.generateHarmonics, this.gammaSpace, this.reserved, r, s); } } class $ne extends p6 { /** * Creates a new EquiRectangularCubeTextureAssetTask object * @param name defines the name of the task * @param url defines the location of the file to load * @param size defines the desired size (the more it increases the longer the generation will be) * If the size is omitted this implies you are using a preprocessed cubemap. * @param noMipmap defines if mipmaps should not be generated (default is false) * @param gammaSpace specifies if the texture will be used in gamma or linear space * (the PBR material requires those texture in linear space, but the standard material would require them in Gamma space) * (default is true) */ constructor(e, t, i, r = !1, s = !0) { super(e), this.name = e, this.url = t, this.size = i, this.noMipmap = r, this.gammaSpace = s; } /** * Execute the current task * @param scene defines the scene where you want your assets to be loaded * @param onSuccess is a callback called when the task is successfully executed * @param onError is a callback called if an error occurs */ runTask(e, t, i) { const r = () => { t(); }, s = (n, a) => { i(n, a); }; this.texture = new WO(this.url, e, this.size, this.noMipmap, this.gammaSpace, r, s); } } class vve { /** * Creates a new AssetsManager * @param scene defines the scene to work on */ constructor(e) { this._isLoading = !1, this._tasks = new Array(), this._waitingTasksCount = 0, this._totalTasksCount = 0, this.onTaskSuccessObservable = new Fe(), this.onTaskErrorObservable = new Fe(), this.onTasksDoneObservable = new Fe(), this.onProgressObservable = new Fe(), this.useDefaultLoadingScreen = !0, this.autoHideLoadingUI = !0, this._scene = e || gi.LastCreatedScene; } /** * Add a ContainerAssetTask to the list of active tasks * @param taskName defines the name of the new task * @param meshesNames defines the name of meshes to load * @param rootUrl defines the root url to use to locate files * @param sceneFilename defines the filename of the scene file or the File itself * @param extension defines the extension to use to load the file * @returns a new ContainerAssetTask object */ addContainerTask(e, t, i, r, s) { const n = new Hne(e, t, i, r, s); return this._tasks.push(n), n; } /** * Add a MeshAssetTask to the list of active tasks * @param taskName defines the name of the new task * @param meshesNames defines the name of meshes to load * @param rootUrl defines the root url to use to locate files * @param sceneFilename defines the filename of the scene file or the File itself * @param extension defines the extension to use to load the file * @returns a new MeshAssetTask object */ addMeshTask(e, t, i, r, s) { const n = new Gne(e, t, i, r, s); return this._tasks.push(n), n; } /** * Add a TextFileAssetTask to the list of active tasks * @param taskName defines the name of the new task * @param url defines the url of the file to load * @returns a new TextFileAssetTask object */ addTextFileTask(e, t) { const i = new Kne(e, t); return this._tasks.push(i), i; } /** * Add a BinaryFileAssetTask to the list of active tasks * @param taskName defines the name of the new task * @param url defines the url of the file to load * @returns a new BinaryFileAssetTask object */ addBinaryFileTask(e, t) { const i = new Wne(e, t); return this._tasks.push(i), i; } /** * Add a ImageAssetTask to the list of active tasks * @param taskName defines the name of the new task * @param url defines the url of the file to load * @returns a new ImageAssetTask object */ addImageTask(e, t) { const i = new jne(e, t); return this._tasks.push(i), i; } /** * Add a TextureAssetTask to the list of active tasks * @param taskName defines the name of the new task * @param url defines the url of the file to load * @param noMipmap defines if the texture must not receive mipmaps (false by default) * @param invertY defines if you want to invert Y axis of the loaded texture (true by default) * @param samplingMode defines the sampling mode to use (Texture.TRILINEAR_SAMPLINGMODE by default) * @returns a new TextureAssetTask object */ addTextureTask(e, t, i, r, s = De.TRILINEAR_SAMPLINGMODE) { const n = new Xne(e, t, i, r, s); return this._tasks.push(n), n; } /** * Add a CubeTextureAssetTask to the list of active tasks * @param taskName defines the name of the new task * @param url defines the url of the file to load * @param extensions defines the extension to use to load the cube map (can be null) * @param noMipmap defines if the texture must not receive mipmaps (false by default) * @param files defines the list of files to load (can be null) * @param prefiltered defines the prefiltered texture option (default is false) * @returns a new CubeTextureAssetTask object */ addCubeTextureTask(e, t, i, r, s, n) { const a = new Yne(e, t, i, r, s, n); return this._tasks.push(a), a; } /** * * Add a HDRCubeTextureAssetTask to the list of active tasks * @param taskName defines the name of the new task * @param url defines the url of the file to load * @param size defines the size you want for the cubemap (can be null) * @param noMipmap defines if the texture must not receive mipmaps (false by default) * @param generateHarmonics defines if you want to automatically generate (true by default) * @param gammaSpace specifies if the texture will be use in gamma or linear space (the PBR material requires those texture in linear space, but the standard material would require them in Gamma space) (default is false) * @param reserved Internal use only * @returns a new HDRCubeTextureAssetTask object */ addHDRCubeTextureTask(e, t, i, r = !1, s = !0, n = !1, a = !1) { const l = new Qne(e, t, i, r, s, n, a); return this._tasks.push(l), l; } /** * * Add a EquiRectangularCubeTextureAssetTask to the list of active tasks * @param taskName defines the name of the new task * @param url defines the url of the file to load * @param size defines the size you want for the cubemap (can be null) * @param noMipmap defines if the texture must not receive mipmaps (false by default) * @param gammaSpace Specifies if the texture will be used in gamma or linear space * (the PBR material requires those textures in linear space, but the standard material would require them in Gamma space) * @returns a new EquiRectangularCubeTextureAssetTask object */ addEquiRectangularCubeTextureAssetTask(e, t, i, r = !1, s = !0) { const n = new $ne(e, t, i, r, s); return this._tasks.push(n), n; } /** * Remove a task from the assets manager. * @param task the task to remove */ removeTask(e) { const t = this._tasks.indexOf(e); t > -1 && this._tasks.splice(t, 1); } _decreaseWaitingTasksCount(e) { this._waitingTasksCount--; try { this.onProgress && this.onProgress(this._waitingTasksCount, this._totalTasksCount, e), this.onProgressObservable.notifyObservers(new zne(this._waitingTasksCount, this._totalTasksCount, e)); } catch (t) { Ce.Error("Error running progress callbacks."), Ce.Log(t); } if (this._waitingTasksCount === 0) { try { const t = this._tasks.slice(); this.onFinish && this.onFinish(t); for (const i of t) if (i.taskState === GC.DONE) { const r = this._tasks.indexOf(i); r > -1 && this._tasks.splice(r, 1); } this.onTasksDoneObservable.notifyObservers(this._tasks); } catch (t) { Ce.Error("Error running tasks-done callbacks."), Ce.Log(t); } this._isLoading = !1, this.autoHideLoadingUI && this._scene.getEngine().hideLoadingUI(); } } _runTask(e) { const t = () => { try { this.onTaskSuccess && this.onTaskSuccess(e), this.onTaskSuccessObservable.notifyObservers(e), this._decreaseWaitingTasksCount(e); } catch (r) { i("Error executing task success callbacks", r); } }, i = (r, s) => { e._setErrorObject(r, s), this.onTaskError ? this.onTaskError(e) : e.onError || Ce.Error(this._formatTaskErrorMessage(e)), this.onTaskErrorObservable.notifyObservers(e), this._decreaseWaitingTasksCount(e); }; e.run(this._scene, t, i); } _formatTaskErrorMessage(e) { let t = "Unable to complete task " + e.name; return e.errorObject.message && (t += `: ${e.errorObject.message}`), e.errorObject.exception && (t += `: ${e.errorObject.exception}`), t; } /** * Reset the AssetsManager and remove all tasks * @returns the current instance of the AssetsManager */ reset() { return this._isLoading = !1, this._tasks = new Array(), this; } /** * Start the loading process * @returns the current instance of the AssetsManager */ load() { if (this._isLoading) return this; if (this._isLoading = !0, this._waitingTasksCount = this._tasks.length, this._totalTasksCount = this._tasks.length, this._waitingTasksCount === 0) return this._isLoading = !1, this.onFinish && this.onFinish(this._tasks), this.onTasksDoneObservable.notifyObservers(this._tasks), this; this.useDefaultLoadingScreen && this._scene.getEngine().displayLoadingUI(); for (let e = 0; e < this._tasks.length; e++) { const t = this._tasks[e]; t.taskState === GC.INIT && this._runTask(t); } return this; } /** * Start the loading process as an async operation * @returns a promise returning the list of failed tasks */ loadAsync() { return new Promise((e, t) => { if (this._isLoading) { e(); return; } this.onTasksDoneObservable.addOnce((i) => { i && i.length ? t(i) : e(); }), this.load(); }); } } class rO { /** * The resolve method of the promise associated with this deferred object. */ get resolve() { return this._resolve; } /** * The reject method of the promise associated with this deferred object. */ get reject() { return this._reject; } /** * Constructor for this deferred object. */ constructor() { this.promise = new Promise((e, t) => { this._resolve = e, this._reject = t; }); } } class Ave { /** * Explodes meshes from a center mesh. * @param meshes The meshes to explode. * @param centerMesh The mesh to be center of explosion. */ constructor(e, t) { this._meshesOrigins = [], this._toCenterVectors = [], this._scaledDirection = new D(1, 1, 1), this._newPosition = D.Zero(), this._centerPosition = D.Zero(), this._meshes = e.slice(), t ? this._centerMesh = t : this._setCenterMesh(), this._centerMesh.computeWorldMatrix(!0); const i = this._meshes.indexOf(this._centerMesh); i >= 0 && this._meshes.splice(i, 1), this._centerPosition = this._centerMesh.getAbsolutePosition().clone(); for (let r = 0; r < this._meshes.length; r++) if (this._meshes[r]) { const s = this._meshes[r]; this._meshesOrigins[r] = s.getAbsolutePosition().clone(), this._toCenterVectors[r] = D.Zero(), s.hasBoundingInfo && this._centerMesh.hasBoundingInfo && (s.computeWorldMatrix(!0), s.getBoundingInfo().boundingBox.centerWorld.subtractToRef(this._centerMesh.getBoundingInfo().boundingBox.centerWorld, this._toCenterVectors[r])); } } _setCenterMesh() { let e = D.Zero(); const t = D.Zero(); let i = Number.MAX_VALUE; for (let r = 0; r < this._meshes.length; r++) if (this._meshes[r]) { const n = this._meshes[r].getBoundingInfo(); n && t.addInPlace(n.boundingBox.centerWorld); } e = t.scale(1 / this._meshes.length); for (let r = 0; r < this._meshes.length; r++) if (this._meshes[r]) { const s = this._meshes[r], n = s.getBoundingInfo(); if (n) { const a = n.boundingBox.centerWorld.subtract(e).lengthSquared(); a < i && (this._centerMesh = s, i = a); } } } /** * Get class name * @returns "MeshExploder" */ getClassName() { return "MeshExploder"; } /** * "Exploded meshes" * @returns Array of meshes with the centerMesh at index 0. */ getMeshes() { const e = this._meshes.slice(); return e.unshift(this._centerMesh), e; } /** * Explodes meshes giving a specific direction * @param direction Number to multiply distance of each mesh's origin from center. Use a negative number to implode, or zero to reset. */ explode(e = 1) { for (let t = 0; t < this._meshes.length; t++) this._meshes[t] && this._meshesOrigins[t] && this._toCenterVectors[t] && (this._toCenterVectors[t].scaleToRef(e, this._scaledDirection), this._meshesOrigins[t].addToRef(this._scaledDirection, this._newPosition), this._meshes[t].setAbsolutePosition(this._newPosition)); this._centerMesh.setAbsolutePosition(this._centerPosition); } } class cj { /** * List of files ready to be loaded */ static get FilesToLoad() { return JR.FilesToLoad; } /** * Creates a new FilesInput * @param engine defines the rendering engine * @param scene defines the hosting scene * @param sceneLoadedCallback callback called when scene (files provided) is loaded * @param progressCallback callback called to track progress * @param additionalRenderLoopLogicCallback callback called to add user logic to the rendering loop * @param textureLoadingCallback callback called when a texture is loading * @param startingProcessingFilesCallback callback called when the system is about to process all files * @param onReloadCallback callback called when a reload is requested * @param errorCallback callback call if an error occurs * @param useAppend defines if the file loaded must be appended (true) or have the scene replaced (false, default behavior) */ constructor(e, t, i, r, s, n, a, l, o, u = !1) { this.useAppend = u, this.onProcessFileCallback = () => !0, this.displayLoadingUI = !0, this.loadAsync = (h, d) => this.useAppend ? fr.AppendAsync("file:", h, this._currentScene, d) : fr.LoadAsync("file:", h, this._engine, d), this._engine = e, this._currentScene = t, this._sceneLoadedCallback = i, this._progressCallback = r, this._additionalRenderLoopLogicCallback = s, this._textureLoadingCallback = n, this._startingProcessingFilesCallback = a, this._onReloadCallback = l, this._errorCallback = o; } /** * Calls this function to listen to drag'n'drop events on a specific DOM element * @param elementToMonitor defines the DOM element to track */ monitorElementForDragNDrop(e) { e && (this._elementToMonitor = e, this._dragEnterHandler = (t) => { this._drag(t); }, this._dragOverHandler = (t) => { this._drag(t); }, this._dropHandler = (t) => { this._drop(t); }, this._elementToMonitor.addEventListener("dragenter", this._dragEnterHandler, !1), this._elementToMonitor.addEventListener("dragover", this._dragOverHandler, !1), this._elementToMonitor.addEventListener("drop", this._dropHandler, !1)); } /** Gets the current list of files to load */ get filesToLoad() { return this._filesToLoad; } /** * Release all associated resources */ dispose() { this._elementToMonitor && (this._elementToMonitor.removeEventListener("dragenter", this._dragEnterHandler), this._elementToMonitor.removeEventListener("dragover", this._dragOverHandler), this._elementToMonitor.removeEventListener("drop", this._dropHandler)); } _renderFunction() { if (this._additionalRenderLoopLogicCallback && this._additionalRenderLoopLogicCallback(), this._currentScene) { if (this._textureLoadingCallback) { const e = this._currentScene.getWaitingItemsCount(); e > 0 && this._textureLoadingCallback(e); } this._currentScene.render(); } } _drag(e) { e.stopPropagation(), e.preventDefault(); } _drop(e) { e.stopPropagation(), e.preventDefault(), this.loadFiles(e); } _traverseFolder(e, t, i, r) { const s = e.createReader(), n = e.fullPath.replace(/^\//, "").replace(/(.+?)\/?$/, "$1/"); s.readEntries((a) => { i.count += a.length; for (const l of a) l.isFile ? l.file((o) => { o.correctName = n + o.name, t.push(o), --i.count === 0 && r(); }) : l.isDirectory && this._traverseFolder(l, t, i, r); --i.count === 0 && r(); }); } _processFiles(e) { for (let t = 0; t < e.length; t++) { const i = e[t].correctName.toLowerCase(), r = i.split(".").pop(); this.onProcessFileCallback(e[t], i, r, (s) => this._sceneFileToLoad = s) && (fr.IsPluginForExtensionAvailable("." + r) && (this._sceneFileToLoad = e[t]), cj.FilesToLoad[i] = e[t]); } } /** * Load files from a drop event * @param event defines the drop event to use as source */ loadFiles(e) { if (e && e.dataTransfer && e.dataTransfer.files && (this._filesToLoad = e.dataTransfer.files), e && e.target && e.target.files && (this._filesToLoad = e.target.files), !(!this._filesToLoad || this._filesToLoad.length === 0) && (this._startingProcessingFilesCallback && this._startingProcessingFilesCallback(this._filesToLoad), this._filesToLoad && this._filesToLoad.length > 0)) { const t = [], i = [], r = e.dataTransfer ? e.dataTransfer.items : null; for (let s = 0; s < this._filesToLoad.length; s++) { const n = this._filesToLoad[s], a = n.name.toLowerCase(); let l; if (n.correctName = a, r) { const o = r[s]; o.getAsEntry ? l = o.getAsEntry() : o.webkitGetAsEntry && (l = o.webkitGetAsEntry()); } l && l.isDirectory ? i.push(l) : t.push(n); } if (i.length === 0) this._processFiles(t), this._processReload(); else { const s = { count: i.length }; for (const n of i) this._traverseFolder(n, t, s, () => { this._processFiles(t), s.count === 0 && this._processReload(); }); } } } _processReload() { this._onReloadCallback ? this._onReloadCallback(this._sceneFileToLoad) : this.reload(); } /** * Reload the current scene from the loaded files */ reload() { this._sceneFileToLoad ? (this.useAppend || this._currentScene && (Ce.errorsCount > 0 && Ce.ClearLogCache(), this._engine.stopRenderLoop()), fr.ShowLoadingScreen = !1, this.displayLoadingUI && this._engine.displayLoadingUI(), this.loadAsync(this._sceneFileToLoad, this._progressCallback).then((e) => { this.useAppend ? this.displayLoadingUI && this._engine.hideLoadingUI() : (this._currentScene && this._currentScene.dispose(), this._currentScene = e, this._currentScene.executeWhenReady(() => { this.displayLoadingUI && this._engine.hideLoadingUI(), this._engine.runRenderLoop(() => { this._renderFunction(); }); })), this._sceneLoadedCallback && this._currentScene && this._sceneLoadedCallback(this._sceneFileToLoad, this._currentScene); }).catch((e) => { this.displayLoadingUI && this._engine.hideLoadingUI(), this._errorCallback && this._errorCallback(this._sceneFileToLoad, this._currentScene, e.message); })) : Ce.Error("Please provide a valid .babylon file."); } } class uj { /** * Release associated resources */ dispose() { if (this._observers && this._observables) for (let e = 0; e < this._observers.length; e++) this._observables[e].remove(this._observers[e]); this._observers = null, this._observables = null; } /** * Raise a callback when one of the observable will notify * @param observables defines a list of observables to watch * @param callback defines the callback to call on notification * @param mask defines the mask used to filter notifications * @param scope defines the current scope used to restore the JS context * @returns the new MultiObserver */ static Watch(e, t, i = -1, r = null) { const s = new uj(); s._observers = new Array(), s._observables = e; for (const n of e) { const a = n.add(t, i, !1, r); a && s._observers.push(a); } return s; } } Fe.prototype.notifyObserversWithPromise = async function(c, e = -1, t, i, r) { let s = Promise.resolve(c); if (!this.observers.length) return s; const n = this._eventState; return n.mask = e, n.target = t, n.currentTarget = i, n.skipNextObservers = !1, n.userInfo = r, this.observers.forEach((a) => { n.skipNextObservers || a._willBeUnregistered || a.mask & e && (a.scope ? s = s.then((l) => (n.lastReturnValue = l, a.callback.apply(a.scope, [c, n]))) : s = s.then((l) => (n.lastReturnValue = l, a.callback(c, n))), a.unregisterOnNextCall && this._deferUnregister(a)); }), await s, c; }; class px { /** * Gets a string describing the action executed by the current optimization * @returns description string */ getDescription() { return ""; } /** * This function will be called by the SceneOptimizer when its priority is reached in order to apply the change required by the current optimization * @param scene defines the current scene where to apply this optimization * @param optimizer defines the current optimizer * @returns true if everything that can be done was applied */ apply(e, t) { return !0; } /** * Creates the SceneOptimization object * @param priority defines the priority of this optimization (0 by default which means first in the list) */ constructor(e = 0) { this.priority = e; } } class WF extends px { /** * Gets a string describing the action executed by the current optimization * @returns description string */ getDescription() { return "Reducing render target texture size to " + this.maximumSize; } /** * Creates the TextureOptimization object * @param priority defines the priority of this optimization (0 by default which means first in the list) * @param maximumSize defines the maximum sized allowed for textures (1024 is the default value). If a texture is bigger, it will be scaled down using a factor defined by the step parameter * @param step defines the factor (0.5 by default) used to scale down textures bigger than maximum sized allowed. */ constructor(e = 0, t = 1024, i = 0.5) { super(e), this.priority = e, this.maximumSize = t, this.step = i; } /** * This function will be called by the SceneOptimizer when its priority is reached in order to apply the change required by the current optimization * @param scene defines the current scene where to apply this optimization * @param optimizer defines the current optimizer * @returns true if everything that can be done was applied */ apply(e, t) { let i = !0; for (let r = 0; r < e.textures.length; r++) { const s = e.textures[r]; if (!s.canRescale || s.getContext) continue; const n = s.getSize(); Math.max(n.width, n.height) > this.maximumSize && (s.scale(this.step), i = !1); } return i; } } class jH extends px { /** * Gets a string describing the action executed by the current optimization * @returns description string */ getDescription() { return "Setting hardware scaling level to " + this._currentScale; } /** * Creates the HardwareScalingOptimization object * @param priority defines the priority of this optimization (0 by default which means first in the list) * @param maximumScale defines the maximum scale to use (2 by default) * @param step defines the step to use between two passes (0.5 by default) */ constructor(e = 0, t = 2, i = 0.25) { super(e), this.priority = e, this.maximumScale = t, this.step = i, this._currentScale = -1, this._directionOffset = 1; } /** * This function will be called by the SceneOptimizer when its priority is reached in order to apply the change required by the current optimization * @param scene defines the current scene where to apply this optimization * @param optimizer defines the current optimizer * @returns true if everything that can be done was applied */ apply(e, t) { return this._currentScale === -1 && (this._currentScale = e.getEngine().getHardwareScalingLevel(), this._currentScale > this.maximumScale && (this._directionOffset = -1)), this._currentScale += this._directionOffset * this.step, e.getEngine().setHardwareScalingLevel(this._currentScale), this._directionOffset === 1 ? this._currentScale >= this.maximumScale : this._currentScale <= this.maximumScale; } } class jF extends px { /** * Gets a string describing the action executed by the current optimization * @returns description string */ getDescription() { return "Turning shadows on/off"; } /** * This function will be called by the SceneOptimizer when its priority is reached in order to apply the change required by the current optimization * @param scene defines the current scene where to apply this optimization * @param optimizer defines the current optimizer * @returns true if everything that can be done was applied */ apply(e, t) { return e.shadowsEnabled = t.isInImprovementMode, !0; } } class XF extends px { /** * Gets a string describing the action executed by the current optimization * @returns description string */ getDescription() { return "Turning post-processes on/off"; } /** * This function will be called by the SceneOptimizer when its priority is reached in order to apply the change required by the current optimization * @param scene defines the current scene where to apply this optimization * @param optimizer defines the current optimizer * @returns true if everything that can be done was applied */ apply(e, t) { return e.postProcessesEnabled = t.isInImprovementMode, !0; } } class YF extends px { /** * Gets a string describing the action executed by the current optimization * @returns description string */ getDescription() { return "Turning lens flares on/off"; } /** * This function will be called by the SceneOptimizer when its priority is reached in order to apply the change required by the current optimization * @param scene defines the current scene where to apply this optimization * @param optimizer defines the current optimizer * @returns true if everything that can be done was applied */ apply(e, t) { return e.lensFlaresEnabled = t.isInImprovementMode, !0; } } class Zne extends px { /** * Gets a string describing the action executed by the current optimization * @returns description string */ getDescription() { return this.onGetDescription ? this.onGetDescription() : "Running user defined callback"; } /** * This function will be called by the SceneOptimizer when its priority is reached in order to apply the change required by the current optimization * @param scene defines the current scene where to apply this optimization * @param optimizer defines the current optimizer * @returns true if everything that can be done was applied */ apply(e, t) { return this.onApply ? this.onApply(e, t) : !0; } } class QF extends px { /** * Gets a string describing the action executed by the current optimization * @returns description string */ getDescription() { return "Turning particles on/off"; } /** * This function will be called by the SceneOptimizer when its priority is reached in order to apply the change required by the current optimization * @param scene defines the current scene where to apply this optimization * @param optimizer defines the current optimizer * @returns true if everything that can be done was applied */ apply(e, t) { return e.particlesEnabled = t.isInImprovementMode, !0; } } class XH extends px { /** * Gets a string describing the action executed by the current optimization * @returns description string */ getDescription() { return "Turning render targets off"; } /** * This function will be called by the SceneOptimizer when its priority is reached in order to apply the change required by the current optimization * @param scene defines the current scene where to apply this optimization * @param optimizer defines the current optimizer * @returns true if everything that can be done was applied */ apply(e, t) { return e.renderTargetsEnabled = t.isInImprovementMode, !0; } } class dT extends px { constructor() { super(...arguments), this._canBeMerged = (e) => { if (!(e instanceof ke)) return !1; const t = e; return !(t.isDisposed() || !t.isVisible || !t.isEnabled() || t.instances.length > 0 || t.skeleton || t.hasLODLevels || t.getTotalVertices() === 0); }; } /** * Gets or sets a boolean which defines if optimization octree has to be updated */ static get UpdateSelectionTree() { return dT._UpdateSelectionTree; } /** * Gets or sets a boolean which defines if optimization octree has to be updated */ static set UpdateSelectionTree(e) { dT._UpdateSelectionTree = e; } /** * Gets a string describing the action executed by the current optimization * @returns description string */ getDescription() { return "Merging similar meshes together"; } /** * This function will be called by the SceneOptimizer when its priority is reached in order to apply the change required by the current optimization * @param scene defines the current scene where to apply this optimization * @param optimizer defines the current optimizer * @param updateSelectionTree defines that the selection octree has to be updated (false by default) * @returns true if everything that can be done was applied */ apply(e, t, i) { const r = e.meshes.slice(0); let s = r.length; for (let a = 0; a < s; a++) { const l = [], o = r[a]; if (this._canBeMerged(o)) { l.push(o); for (let u = a + 1; u < s; u++) { const h = r[u]; this._canBeMerged(h) && h.material === o.material && h.checkCollisions === o.checkCollisions && (l.push(h), s--, r.splice(u, 1), u--); } l.length < 2 || ke.MergeMeshes(l, void 0, !0); } } const n = e; return n.createOrUpdateSelectionOctree && (i != null ? i && n.createOrUpdateSelectionOctree() : dT.UpdateSelectionTree && n.createOrUpdateSelectionOctree()), !0; } } dT._UpdateSelectionTree = !1; class aP { /** * Creates a new list of options used by SceneOptimizer * @param targetFrameRate defines the target frame rate to reach (60 by default) * @param trackerDuration defines the interval between two checks (2000ms by default) */ constructor(e = 60, t = 2e3) { this.targetFrameRate = e, this.trackerDuration = t, this.optimizations = []; } /** * Add a new optimization * @param optimization defines the SceneOptimization to add to the list of active optimizations * @returns the current SceneOptimizerOptions */ addOptimization(e) { return this.optimizations.push(e), this; } /** * Add a new custom optimization * @param onApply defines the callback called to apply the custom optimization (true if everything that can be done was applied) * @param onGetDescription defines the callback called to get the description attached with the optimization. * @param priority defines the priority of this optimization (0 by default which means first in the list) * @returns the current SceneOptimizerOptions */ addCustomOptimization(e, t, i = 0) { const r = new Zne(i); return r.onApply = e, r.onGetDescription = t, this.optimizations.push(r), this; } /** * Creates a list of pre-defined optimizations aimed to reduce the visual impact on the scene * @param targetFrameRate defines the target frame rate (60 by default) * @returns a SceneOptimizerOptions object */ static LowDegradationAllowed(e) { const t = new aP(e); let i = 0; return t.addOptimization(new dT(i)), t.addOptimization(new jF(i)), t.addOptimization(new YF(i)), i++, t.addOptimization(new XF(i)), t.addOptimization(new QF(i)), i++, t.addOptimization(new WF(i, 1024)), t; } /** * Creates a list of pre-defined optimizations aimed to have a moderate impact on the scene visual * @param targetFrameRate defines the target frame rate (60 by default) * @returns a SceneOptimizerOptions object */ static ModerateDegradationAllowed(e) { const t = new aP(e); let i = 0; return t.addOptimization(new dT(i)), t.addOptimization(new jF(i)), t.addOptimization(new YF(i)), i++, t.addOptimization(new XF(i)), t.addOptimization(new QF(i)), i++, t.addOptimization(new WF(i, 512)), i++, t.addOptimization(new XH(i)), i++, t.addOptimization(new jH(i, 2)), t; } /** * Creates a list of pre-defined optimizations aimed to have a big impact on the scene visual * @param targetFrameRate defines the target frame rate (60 by default) * @returns a SceneOptimizerOptions object */ static HighDegradationAllowed(e) { const t = new aP(e); let i = 0; return t.addOptimization(new dT(i)), t.addOptimization(new jF(i)), t.addOptimization(new YF(i)), i++, t.addOptimization(new XF(i)), t.addOptimization(new QF(i)), i++, t.addOptimization(new WF(i, 256)), i++, t.addOptimization(new XH(i)), i++, t.addOptimization(new jH(i, 4)), t; } } class hj { /** * Gets or sets a boolean indicating if the optimizer is in improvement mode */ get isInImprovementMode() { return this._improvementMode; } set isInImprovementMode(e) { this._improvementMode = e; } /** * Gets the current priority level (0 at start) */ get currentPriorityLevel() { return this._currentPriorityLevel; } /** * Gets the current frame rate checked by the SceneOptimizer */ get currentFrameRate() { return this._currentFrameRate; } /** * Gets or sets the current target frame rate (60 by default) */ get targetFrameRate() { return this._targetFrameRate; } /** * Gets or sets the current target frame rate (60 by default) */ set targetFrameRate(e) { this._targetFrameRate = e; } /** * Gets or sets the current interval between two checks (every 2000ms by default) */ get trackerDuration() { return this._trackerDuration; } /** * Gets or sets the current interval between two checks (every 2000ms by default) */ set trackerDuration(e) { this._trackerDuration = e; } /** * Gets the list of active optimizations */ get optimizations() { return this._options.optimizations; } /** * Creates a new SceneOptimizer * @param scene defines the scene to work on * @param options defines the options to use with the SceneOptimizer * @param autoGeneratePriorities defines if priorities must be generated and not read from SceneOptimization property (true by default) * @param improvementMode defines if the scene optimizer must run the maximum optimization while staying over a target frame instead of trying to reach the target framerate (false by default) */ constructor(e, t, i = !0, r = !1) { if (this._isRunning = !1, this._currentPriorityLevel = 0, this._targetFrameRate = 60, this._trackerDuration = 2e3, this._currentFrameRate = 0, this._improvementMode = !1, this.onSuccessObservable = new Fe(), this.onNewOptimizationAppliedObservable = new Fe(), this.onFailureObservable = new Fe(), t ? this._options = t : this._options = new aP(), this._options.targetFrameRate && (this._targetFrameRate = this._options.targetFrameRate), this._options.trackerDuration && (this._trackerDuration = this._options.trackerDuration), i) { let s = 0; for (const n of this._options.optimizations) n.priority = s++; } this._improvementMode = r, this._scene = e || gi.LastCreatedScene, this._sceneDisposeObserver = this._scene.onDisposeObservable.add(() => { this._sceneDisposeObserver = null, this.dispose(); }); } /** * Stops the current optimizer */ stop() { this._isRunning = !1; } /** * Reset the optimizer to initial step (current priority level = 0) */ reset() { this._currentPriorityLevel = 0; } /** * Start the optimizer. By default it will try to reach a specific framerate * but if the optimizer is set with improvementMode === true then it will run all optimization while frame rate is above the target frame rate */ start() { this._isRunning || (this._isRunning = !0, this._scene.executeWhenReady(() => { setTimeout(() => { this._checkCurrentState(); }, this._trackerDuration); })); } _checkCurrentState() { if (!this._isRunning) return; const e = this._scene, t = this._options; if (this._currentFrameRate = Math.round(e.getEngine().getFps()), this._improvementMode && this._currentFrameRate <= this._targetFrameRate || !this._improvementMode && this._currentFrameRate >= this._targetFrameRate) { this._isRunning = !1, this.onSuccessObservable.notifyObservers(this); return; } let i = !0, r = !0; for (let s = 0; s < t.optimizations.length; s++) { const n = t.optimizations[s]; n.priority === this._currentPriorityLevel && (r = !1, i = i && n.apply(e, this), this.onNewOptimizationAppliedObservable.notifyObservers(n)); } if (r) { this._isRunning = !1, this.onFailureObservable.notifyObservers(this); return; } i && this._currentPriorityLevel++, e.executeWhenReady(() => { setTimeout(() => { this._checkCurrentState(); }, this._trackerDuration); }); } /** * Release all resources */ dispose() { this.stop(), this.onSuccessObservable.clear(), this.onFailureObservable.clear(), this.onNewOptimizationAppliedObservable.clear(), this._sceneDisposeObserver && this._scene.onDisposeObservable.remove(this._sceneDisposeObserver); } /** * Helper function to create a SceneOptimizer with one single line of code * @param scene defines the scene to work on * @param options defines the options to use with the SceneOptimizer * @param onSuccess defines a callback to call on success * @param onFailure defines a callback to call on failure * @returns the new SceneOptimizer object */ static OptimizeAsync(e, t, i, r) { const s = new hj(e, t || aP.ModerateDegradationAllowed(), !1); return i && s.onSuccessObservable.add(() => { i(); }), r && s.onFailureObservable.add(() => { r(); }), s.start(), s; } } let YH = []; const dj = (c, e) => { c.doNotSerialize || (e.vertexData.push(c.serializeVerticeData()), YH[c.id] = !0); }, qne = (c, e) => { const t = {}, i = c._geometry; return i && (c.getScene().getGeometryById(i.id) || dj(i, e.geometries)), c.serialize && c.serialize(t), t; }, yve = (c, e) => { if (c._isMesh) { const t = c; if (t.delayLoadState === 1 || t.delayLoadState === 0) { const i = (s) => { e.materials = e.materials || [], t.material && !e.materials.some((n) => n.id === t.material.id) && e.materials.push(s.serialize()); }; if (t.material && !t.material.doNotSerialize) if (t.material instanceof xm) { if (e.multiMaterials = e.multiMaterials || [], !e.multiMaterials.some((s) => s.id === t.material.id)) { e.multiMaterials.push(t.material.serialize()); for (const s of t.material.subMaterials) s && i(s); } } else i(t.material); else t.material || i(t.getScene().defaultMaterial); const r = t._geometry; r && (e.geometries || (e.geometries = {}, e.geometries.boxes = [], e.geometries.spheres = [], e.geometries.cylinders = [], e.geometries.toruses = [], e.geometries.grounds = [], e.geometries.planes = [], e.geometries.torusKnots = [], e.geometries.vertexData = []), dj(r, e.geometries)), t.skeleton && !t.skeleton.doNotSerialize && (e.skeletons = e.skeletons || [], e.skeletons.push(t.skeleton.serialize())), e.meshes = e.meshes || [], e.meshes.push(qne(t, e)); } } else if (c.getClassName() === "TransformNode") { const t = c; e.transformNodes.push(t.serialize()); } else if (c.getClassName().indexOf("Camera") !== -1) { const t = c; e.cameras.push(t.serialize()); } else if (c.getClassName().indexOf("Light") !== -1) { const t = c; e.lights.push(t.serialize()); } }; class QC { /** * Clear cache used by a previous serialization */ static ClearCache() { YH = []; } /** * Serialize a scene into a JSON compatible object * Note that if the current engine does not support synchronous texture reading (like WebGPU), you should use SerializeAsync instead * as else you may not retrieve the proper base64 encoded texture data (when using the Texture.ForceSerializeBuffers flag) * @param scene defines the scene to serialize * @returns a JSON compatible object */ static Serialize(e) { return QC._Serialize(e); } static _Serialize(e, t = !0) { const i = {}; if (t && !e.getEngine()._features.supportSyncTextureRead && De.ForceSerializeBuffers && Ce.Warn("The serialization object may not contain the proper base64 encoded texture data! You should use the SerializeAsync method instead."), QC.ClearCache(), i.useDelayedTextureLoading = e.useDelayedTextureLoading, i.autoClear = e.autoClear, i.clearColor = e.clearColor.asArray(), i.ambientColor = e.ambientColor.asArray(), i.gravity = e.gravity.asArray(), i.collisionsEnabled = e.collisionsEnabled, i.useRightHandedSystem = e.useRightHandedSystem, e.fogMode && e.fogMode !== 0 && (i.fogMode = e.fogMode, i.fogColor = e.fogColor.asArray(), i.fogStart = e.fogStart, i.fogEnd = e.fogEnd, i.fogDensity = e.fogDensity), e.isPhysicsEnabled && e.isPhysicsEnabled()) { const l = e.getPhysicsEngine(); l && (i.physicsEnabled = !0, i.physicsGravity = l.gravity.asArray(), i.physicsEngine = l.getPhysicsPluginName()); } e.metadata && (i.metadata = e.metadata), i.morphTargetManagers = []; for (const l of e.meshes) { const o = l.morphTargetManager; o && i.morphTargetManagers.push(o.serialize()); } i.lights = []; let r, s; for (r = 0; r < e.lights.length; r++) s = e.lights[r], s.doNotSerialize || i.lights.push(s.serialize()); for (i.cameras = [], r = 0; r < e.cameras.length; r++) { const l = e.cameras[r]; l.doNotSerialize || i.cameras.push(l.serialize()); } if (e.activeCamera && (i.activeCameraID = e.activeCamera.id), St.AppendSerializedAnimations(e, i), e.animationGroups && e.animationGroups.length > 0) { i.animationGroups = []; for (let l = 0; l < e.animationGroups.length; l++) { const o = e.animationGroups[l]; i.animationGroups.push(o.serialize()); } } if (e.reflectionProbes && e.reflectionProbes.length > 0) for (i.reflectionProbes = [], r = 0; r < e.reflectionProbes.length; r++) { const l = e.reflectionProbes[r]; i.reflectionProbes.push(l.serialize()); } i.materials = [], i.multiMaterials = []; let n; for (r = 0; r < e.materials.length; r++) n = e.materials[r], n.doNotSerialize || i.materials.push(n.serialize()); for (i.multiMaterials = [], r = 0; r < e.multiMaterials.length; r++) { const l = e.multiMaterials[r]; i.multiMaterials.push(l.serialize()); } for (e.environmentTexture && (e.environmentTexture._files ? i.environmentTexture = e.environmentTexture.serialize() : (i.environmentTexture = e.environmentTexture.name, i.environmentTextureRotationY = e.environmentTexture.rotationY)), i.environmentIntensity = e.environmentIntensity, i.skeletons = [], r = 0; r < e.skeletons.length; r++) { const l = e.skeletons[r]; l.doNotSerialize || i.skeletons.push(l.serialize()); } for (i.transformNodes = [], r = 0; r < e.transformNodes.length; r++) e.transformNodes[r].doNotSerialize || i.transformNodes.push(e.transformNodes[r].serialize()); i.geometries = {}, i.geometries.boxes = [], i.geometries.spheres = [], i.geometries.cylinders = [], i.geometries.toruses = [], i.geometries.grounds = [], i.geometries.planes = [], i.geometries.torusKnots = [], i.geometries.vertexData = [], YH = []; const a = e.getGeometries(); for (r = 0; r < a.length; r++) { const l = a[r]; l.isReady() && dj(l, i.geometries); } for (i.meshes = [], r = 0; r < e.meshes.length; r++) { const l = e.meshes[r]; if (l instanceof ke) { const o = l; o.doNotSerialize || (o.delayLoadState === 1 || o.delayLoadState === 0) && i.meshes.push(qne(o, i)); } } for (i.particleSystems = [], r = 0; r < e.particleSystems.length; r++) i.particleSystems.push(e.particleSystems[r].serialize(!1)); for (i.postProcesses = [], r = 0; r < e.postProcesses.length; r++) i.postProcesses.push(e.postProcesses[r].serialize()); e.actionManager && (i.actions = e.actionManager.serialize("scene")); for (const l of e._serializableComponents) l.serialize(i); return i; } /** * Serialize a scene into a JSON compatible object * @param scene defines the scene to serialize * @returns a JSON promise compatible object */ static SerializeAsync(e) { const t = QC._Serialize(e, !1), i = []; return this._CollectPromises(t, i), Promise.all(i).then(() => t); } static _CollectPromises(e, t) { if (Array.isArray(e)) for (let i = 0; i < e.length; ++i) { const r = e[i]; r instanceof Promise ? t.push(r.then((s) => e[i] = s)) : (r instanceof Object || Array.isArray(r)) && this._CollectPromises(r, t); } else if (e instanceof Object) { for (const i in e) if (Object.prototype.hasOwnProperty.call(e, i)) { const r = e[i]; r instanceof Promise ? t.push(r.then((s) => e[i] = s)) : (r instanceof Object || Array.isArray(r)) && this._CollectPromises(r, t); } } } /** * Serialize a mesh into a JSON compatible object * @param toSerialize defines the mesh to serialize * @param withParents defines if parents must be serialized as well * @param withChildren defines if children must be serialized as well * @returns a JSON compatible object */ static SerializeMesh(e, t = !1, i = !1) { const r = {}; if (r.meshes = [], r.transformNodes = [], r.cameras = [], r.lights = [], QC.ClearCache(), e = e instanceof Array ? e : [e], t || i) for (let s = 0; s < e.length; ++s) i && e[s].getDescendants().forEach((n) => { e.indexOf(n) < 0 && !n.doNotSerialize && e.push(n); }), t && e[s].parent && e.indexOf(e[s].parent) < 0 && !e[s].parent.doNotSerialize && e.push(e[s].parent); return e.forEach((s) => { yve(s, r); }), r; } } class yL { /** * Returns whether or not the VideoRecorder is available in your browser. * @param engine Defines the Babylon Engine. * @returns true if supported otherwise false. */ static IsSupported(e) { const t = e.getRenderingCanvas(); return !!t && typeof t.captureStream == "function"; } /** * True when a recording is already in progress. */ get isRecording() { return !!this._canvas && this._canvas.isRecording; } /** * Create a new VideoCapture object which can help converting what you see in Babylon to a video file. * @param engine Defines the BabylonJS Engine you wish to record. * @param options Defines options that can be used to customize the capture. */ constructor(e, t = {}) { if (!yL.IsSupported(e)) throw "Your browser does not support recording so far."; const i = e.getRenderingCanvas(); if (!i) throw "The babylon engine must have a canvas to be recorded"; this._canvas = i, this._canvas.isRecording = !1, this._options = Object.assign(Object.assign({}, yL._DefaultOptions), t); const r = this._canvas.captureStream(this._options.fps); if (this._options.audioTracks) for (const s of this._options.audioTracks) r.addTrack(s); this._mediaRecorder = new MediaRecorder(r, { mimeType: this._options.mimeType }), this._mediaRecorder.ondataavailable = (s) => this._handleDataAvailable(s), this._mediaRecorder.onerror = (s) => this._handleError(s), this._mediaRecorder.onstop = () => this._handleStop(); } /** * Stops the current recording before the default capture timeout passed in the startRecording function. */ stopRecording() { !this._canvas || !this._mediaRecorder || this.isRecording && (this._canvas.isRecording = !1, this._mediaRecorder.stop()); } /** * Starts recording the canvas for a max duration specified in parameters. * @param fileName Defines the name of the file to be downloaded when the recording stop. * If null no automatic download will start and you can rely on the promise to get the data back. * @param maxDuration Defines the maximum recording time in seconds. * It defaults to 7 seconds. A value of zero will not stop automatically, you would need to call stopRecording manually. * @returns A promise callback at the end of the recording with the video data in Blob. */ startRecording(e = "babylonjs.webm", t = 7) { if (!this._canvas || !this._mediaRecorder) throw "Recorder has already been disposed"; if (this.isRecording) throw "Recording already in progress"; return t > 0 && setTimeout(() => { this.stopRecording(); }, t * 1e3), this._fileName = e, this._recordedChunks = [], this._resolve = null, this._reject = null, this._canvas.isRecording = !0, this._mediaRecorder.start(this._options.recordChunckSize), new Promise((i, r) => { this._resolve = i, this._reject = r; }); } /** * Releases internal resources used during the recording. */ dispose() { this._canvas = null, this._mediaRecorder = null, this._recordedChunks = [], this._fileName = null, this._resolve = null, this._reject = null; } _handleDataAvailable(e) { e.data.size > 0 && this._recordedChunks.push(e.data); } _handleError(e) { if (this.stopRecording(), this._reject) this._reject(e.error); else throw new e.error(); } _handleStop() { this.stopRecording(); const e = new Blob(this._recordedChunks); this._resolve && this._resolve(e), window.URL.createObjectURL(e), this._fileName && Ve.Download(e, this._fileName); } } yL._DefaultOptions = { mimeType: "video/webm", fps: 25, recordChunckSize: 3e3 }; let V8 = null; function TN(c, e, t, i, r = "image/png", s = !1, n) { const { height: a, width: l } = eae(c, e, t); if (!(a && l)) { Ce.Error("Invalid 'size' parameter !"); return; } V8 || (V8 = document.createElement("canvas")), V8.width = l, V8.height = a; const o = V8.getContext("2d"), u = c.getRenderWidth() / c.getRenderHeight(); let h = l, d = h / u; d > a && (d = a, h = d * u); const f = Math.max(0, l - h) / 2, p = Math.max(0, a - d) / 2; e.getScene().activeCamera !== e ? SN(c, e, t, (_) => { if (s) { const v = new Blob([_]); Ve.DownloadBlob(v), i && i(""); } else i && i(_); }, r, 1, c.getCreationOptions().antialias, void 0, void 0, void 0, void 0, n) : c.onEndFrameObservable.addOnce(() => { const _ = c.getRenderingCanvas(); o && _ && o.drawImage(_, f, p, h, d), V8 && (s ? (Ve.EncodeScreenshotCanvasData(V8, void 0, r, void 0, n), i && i("")) : Ve.EncodeScreenshotCanvasData(V8, i, r, void 0, n)); }); } function fj(c, e, t, i = "image/png", r) { return new Promise((s, n) => { TN(c, e, t, (a) => { typeof a < "u" ? s(a) : n(new Error("Data is undefined")); }, i, void 0, r); }); } function Jne(c, e, t, i, r = "image/png", s) { return new Promise((n) => { TN(c, e, { width: t, height: i }, () => { n(); }, r, !0, s); }); } function SN(c, e, t, i, r = "image/png", s = 1, n = !1, a, l = !1, o = !1, u = !0, h, d) { const { height: f, width: p, finalWidth: m, finalHeight: _ } = eae(c, e, t), v = { width: p, height: f }; if (!(f && p)) { Ce.Error("Invalid 'size' parameter !"); return; } const C = { width: c.getRenderWidth(), height: c.getRenderHeight() }; c.setSize(p, f); const x = e.getScene(), b = new ra("screenShot", v, x, !1, !1, 0, !1, De.BILINEAR_SAMPLINGMODE, void 0, o, void 0, void 0, void 0, s); b.renderList = x.meshes.slice(), b.samples = s, b.renderSprites = l, b.activeCamera = e, b.forceLayerMaskCheck = u, d == null || d(b); const S = () => { b.isReadyForRendering() && e.isReady(!0) ? (c.onEndFrameObservable.addOnce(() => { m === p && _ === f ? b.readPixels(void 0, void 0, void 0, !1).then((R) => { qh.DumpData(p, f, R, i, r, a, !0, void 0, h), b.dispose(); }) : vU("pass", b.getInternalTexture(), x, void 0, void 0, void 0, m, _).then((R) => { c._readTexturePixels(R, m, _, -1, 0, null, !0, !1, 0, 0).then((w) => { qh.DumpData(m, _, w, i, r, a, !0, void 0, h), R.dispose(); }); }); }), b.render(!0), x.incrementRenderId(), x.resetCachedMaterial(), c.setSize(C.width, C.height), e.getProjectionMatrix(!0), x.render()) : setTimeout(S, 16); }, M = () => { x.incrementRenderId(), x.resetCachedMaterial(), S(); }; if (n) { const R = new $I("antialiasing", 1, x.activeCamera); b.addPostProcess(R), R.getEffect().isReady() ? M() : R.getEffect().onCompiled = () => { M(); }; } else M(); } function pj(c, e, t, i = "image/png", r = 1, s = !1, n, a = !1, l = !1, o = !0, u) { return new Promise((h, d) => { SN(c, e, t, (f) => { typeof f < "u" ? h(f) : d(new Error("Data is undefined")); }, i, r, s, n, a, l, o, u); }); } function eae(c, e, t) { let i = 0, r = 0, s = 0, n = 0; if (typeof t == "object") { const a = t.precision ? Math.abs(t.precision) : 1; t.width && t.height ? (i = t.height * a, r = t.width * a) : t.width && !t.height ? (r = t.width * a, i = Math.round(r / c.getAspectRatio(e))) : t.height && !t.width ? (i = t.height * a, r = Math.round(i * c.getAspectRatio(e))) : (r = Math.round(c.getRenderWidth() * a), i = Math.round(r / c.getAspectRatio(e))), t.finalWidth && t.finalHeight ? (n = t.finalHeight, s = t.finalWidth) : t.finalWidth && !t.finalHeight ? (s = t.finalWidth, n = Math.round(s / c.getAspectRatio(e))) : t.finalHeight && !t.finalWidth ? (n = t.finalHeight, s = Math.round(n * c.getAspectRatio(e))) : (s = r, n = i); } else isNaN(t) || (i = t, r = t, s = t, n = t); return r && (r = Math.floor(r)), i && (i = Math.floor(i)), s && (s = Math.floor(s)), n && (n = Math.floor(n)), { height: i | 0, width: r | 0, finalWidth: s | 0, finalHeight: n | 0 }; } const Cve = { /** * Captures a screenshot of the current rendering * @see https://doc.babylonjs.com/features/featuresDeepDive/scene/renderToPNG * @param engine defines the rendering engine * @param camera defines the source camera * @param size This parameter can be set to a single number or to an object with the * following (optional) properties: precision, width, height. If a single number is passed, * it will be used for both width and height. If an object is passed, the screenshot size * will be derived from the parameters. The precision property is a multiplier allowing * rendering at a higher or lower resolution * @param successCallback defines the callback receives a single parameter which contains the * screenshot as a string of base64-encoded characters. This string can be assigned to the * src parameter of an to display it * @param mimeType defines the MIME type of the screenshot image (default: image/png). * Check your browser for supported MIME types * @param forceDownload force the system to download the image even if a successCallback is provided * @param quality The quality of the image if lossy mimeType is used (e.g. image/jpeg, image/webp). See {@link https://developer.mozilla.org/en-US/docs/Web/API/HTMLCanvasElement/toBlob | HTMLCanvasElement.toBlob()}'s `quality` parameter. */ CreateScreenshot: TN, /** * Captures a screenshot of the current rendering * @see https://doc.babylonjs.com/features/featuresDeepDive/scene/renderToPNG * @param engine defines the rendering engine * @param camera defines the source camera * @param size This parameter can be set to a single number or to an object with the * following (optional) properties: precision, width, height. If a single number is passed, * it will be used for both width and height. If an object is passed, the screenshot size * will be derived from the parameters. The precision property is a multiplier allowing * rendering at a higher or lower resolution * @param mimeType defines the MIME type of the screenshot image (default: image/png). * Check your browser for supported MIME types * @param quality The quality of the image if lossy mimeType is used (e.g. image/jpeg, image/webp). See {@link https://developer.mozilla.org/en-US/docs/Web/API/HTMLCanvasElement/toBlob | HTMLCanvasElement.toBlob()}'s `quality` parameter. * @returns screenshot as a string of base64-encoded characters. This string can be assigned * to the src parameter of an to display it */ CreateScreenshotAsync: fj, /** * Captures a screenshot of the current rendering for a specific size. This will render the entire canvas but will generate a blink (due to canvas resize) * @see https://doc.babylonjs.com/features/featuresDeepDive/scene/renderToPNG * @param engine defines the rendering engine * @param camera defines the source camera * @param width defines the expected width * @param height defines the expected height * @param mimeType defines the MIME type of the screenshot image (default: image/png). * Check your browser for supported MIME types * @param quality The quality of the image if lossy mimeType is used (e.g. image/jpeg, image/webp). See {@link https://developer.mozilla.org/en-US/docs/Web/API/HTMLCanvasElement/toBlob | HTMLCanvasElement.toBlob()}'s `quality` parameter. * @returns screenshot as a string of base64-encoded characters. This string can be assigned * to the src parameter of an to display it */ CreateScreenshotWithResizeAsync: Jne, /** * Generates an image screenshot from the specified camera. * @see https://doc.babylonjs.com/features/featuresDeepDive/scene/renderToPNG * @param engine The engine to use for rendering * @param camera The camera to use for rendering * @param size This parameter can be set to a single number or to an object with the * following (optional) properties: precision, width, height. If a single number is passed, * it will be used for both width and height. If an object is passed, the screenshot size * will be derived from the parameters. The precision property is a multiplier allowing * rendering at a higher or lower resolution * @param successCallback The callback receives a single parameter which contains the * screenshot as a string of base64-encoded characters. This string can be assigned to the * src parameter of an to display it * @param mimeType The MIME type of the screenshot image (default: image/png). * Check your browser for supported MIME types * @param samples Texture samples (default: 1) * @param antialiasing Whether antialiasing should be turned on or not (default: false) * @param fileName A name for for the downloaded file. * @param renderSprites Whether the sprites should be rendered or not (default: false) * @param enableStencilBuffer Whether the stencil buffer should be enabled or not (default: false) * @param quality The quality of the image if lossy mimeType is used (e.g. image/jpeg, image/webp). See {@link https://developer.mozilla.org/en-US/docs/Web/API/HTMLCanvasElement/toBlob | HTMLCanvasElement.toBlob()}'s `quality` parameter. */ CreateScreenshotUsingRenderTarget: SN, /** * Generates an image screenshot from the specified camera. * @see https://doc.babylonjs.com/features/featuresDeepDive/scene/renderToPNG * @param engine The engine to use for rendering * @param camera The camera to use for rendering * @param size This parameter can be set to a single number or to an object with the * following (optional) properties: precision, width, height. If a single number is passed, * it will be used for both width and height. If an object is passed, the screenshot size * will be derived from the parameters. The precision property is a multiplier allowing * rendering at a higher or lower resolution * @param mimeType The MIME type of the screenshot image (default: image/png). * Check your browser for supported MIME types * @param samples Texture samples (default: 1) * @param antialiasing Whether antialiasing should be turned on or not (default: false) * @param fileName A name for for the downloaded file. * @param renderSprites Whether the sprites should be rendered or not (default: false) * @param quality The quality of the image if lossy mimeType is used (e.g. image/jpeg, image/webp). See {@link https://developer.mozilla.org/en-US/docs/Web/API/HTMLCanvasElement/toBlob | HTMLCanvasElement.toBlob()}'s `quality` parameter. * @returns screenshot as a string of base64-encoded characters. This string can be assigned * to the src parameter of an to display it */ CreateScreenshotUsingRenderTargetAsync: pj }, xve = () => { Ve.CreateScreenshot = TN, Ve.CreateScreenshotAsync = fj, Ve.CreateScreenshotUsingRenderTarget = SN, Ve.CreateScreenshotUsingRenderTargetAsync = pj; }; xve(); var QH; (function(c) { c[c.Checkbox = 0] = "Checkbox", c[c.Slider = 1] = "Slider", c[c.Vector3 = 2] = "Vector3", c[c.Quaternion = 3] = "Quaternion", c[c.Color3 = 4] = "Color3", c[c.String = 5] = "String", c[c.Button = 6] = "Button", c[c.Options = 7] = "Options", c[c.Tab = 8] = "Tab", c[c.FileButton = 9] = "FileButton", c[c.Vector2 = 10] = "Vector2"; })(QH || (QH = {})); class O9 { /** * Constructor * @param buffer The buffer to read */ constructor(e) { this.byteOffset = 0, this.buffer = e; } /** * Loads the given byte length. * @param byteLength The byte length to load * @returns A promise that resolves when the load is complete */ loadAsync(e) { return this.buffer.readAsync(this.byteOffset, e).then((t) => { this._dataView = new DataView(t.buffer, t.byteOffset, t.byteLength), this._dataByteOffset = 0; }); } /** * Read a unsigned 32-bit integer from the currently loaded data range. * @returns The 32-bit integer read */ readUint32() { const e = this._dataView.getUint32(this._dataByteOffset, !0); return this._dataByteOffset += 4, this.byteOffset += 4, e; } /** * Read a byte array from the currently loaded data range. * @param byteLength The byte length to read * @returns The byte array read */ readUint8Array(e) { const t = new Uint8Array(this._dataView.buffer, this._dataView.byteOffset + this._dataByteOffset, e); return this._dataByteOffset += e, this.byteOffset += e, t; } /** * Read a string from the currently loaded data range. * @param byteLength The byte length to read * @returns The string read */ readString(e) { return rK(this.readUint8Array(e)); } /** * Skips the given byte length the currently loaded data range. * @param byteLength The byte length to skip */ skipBytes(e) { this._dataByteOffset += e, this.byteOffset += e; } } class $H { static _GetStorage() { try { return localStorage.setItem("test", ""), localStorage.removeItem("test"), localStorage; } catch { const t = {}; return { getItem: (i) => { const r = t[i]; return r === void 0 ? null : r; }, setItem: (i, r) => { t[i] = r; } }; } } /** * Reads a string from the data storage * @param key The key to read * @param defaultValue The value if the key doesn't exist * @returns The string value */ static ReadString(e, t) { const i = this._Storage.getItem(e); return i !== null ? i : t; } /** * Writes a string to the data storage * @param key The key to write * @param value The value to write */ static WriteString(e, t) { this._Storage.setItem(e, t); } /** * Reads a boolean from the data storage * @param key The key to read * @param defaultValue The value if the key doesn't exist * @returns The boolean value */ static ReadBoolean(e, t) { const i = this._Storage.getItem(e); return i !== null ? i === "true" : t; } /** * Writes a boolean to the data storage * @param key The key to write * @param value The value to write */ static WriteBoolean(e, t) { this._Storage.setItem(e, t ? "true" : "false"); } /** * Reads a number from the data storage * @param key The key to read * @param defaultValue The value if the key doesn't exist * @returns The number value */ static ReadNumber(e, t) { const i = this._Storage.getItem(e); return i !== null ? parseFloat(i) : t; } /** * Writes a number to the data storage * @param key The key to write * @param value The value to write */ static WriteNumber(e, t) { this._Storage.setItem(e, t.toString()); } } $H._Storage = $H._GetStorage(); class bve { constructor() { this._trackedScene = null; } /** * Track a given scene. This means the current scene state will be considered the original state * @param scene defines the scene to track */ track(e) { this._trackedScene = e, St.AllowLoadingUniqueId = !0, this._savedJSON = QC.Serialize(e), St.AllowLoadingUniqueId = !1; } /** * Get the delta between current state and original state * @returns a any containing the delta */ getDelta() { if (!this._trackedScene) return null; const e = De.ForceSerializeBuffers; De.ForceSerializeBuffers = !1, St.AllowLoadingUniqueId = !0; const t = QC.Serialize(this._trackedScene); St.AllowLoadingUniqueId = !1; const i = {}; for (const r in t) this._compareCollections(r, this._savedJSON[r], t[r], i); return De.ForceSerializeBuffers = e, i; } _compareArray(e, t, i, r) { if (t.length === 0 && i.length === 0) return !0; if (t.length && !isNaN(t[0]) || i.length && !isNaN(i[0])) { if (t.length !== i.length) return !1; if (t.length === 0) return !0; for (let n = 0; n < t.length; n++) if (t[n] !== i[n]) return r[e] = i, !1; return !0; } const s = []; for (let n = 0; n < t.length; n++) { const a = t[n], l = a.uniqueId; s.push(l); const o = i.filter((u) => u.uniqueId === l); if (o.length) { const u = o[0], h = {}; this._compareObjects(a, u, h) || (r[e] || (r[e] = []), h.__state = { id: u.id || u.name }, r[e].push(h)); } else { const u = { __state: { deleteId: a.id || a.name } }; r[e] || (r[e] = []), r[e].push(u); } } for (let n = 0; n < i.length; n++) { const a = i[n], l = a.uniqueId; s.indexOf(l) === -1 && (r[e] || (r[e] = []), r[e].push(a)); } return !0; } _compareObjects(e, t, i) { let r = !1; for (const s in e) { if (!Object.prototype.hasOwnProperty.call(e, s)) continue; const n = e[s], a = t[s]; let l = !1; if (Array.isArray(n)) l = JSON.stringify(n) !== JSON.stringify(a); else if (!isNaN(n) || Object.prototype.toString.call(n) == "[object String]") l = n !== a; else if (typeof n == "object" && typeof a == "object") { const o = {}; this._compareObjects(n, a, o) || (i[s] = o, r = !0); } l && (r = !0, i[s] = a); } return !r; } _compareCollections(e, t, i, r) { if (t !== i && t && i) { if (Array.isArray(t) && Array.isArray(i)) { if (this._compareArray(e, t, i, r)) return; } else if (typeof t == "object" && typeof i == "object") { const s = {}; this._compareObjects(t, i, s) || (r[e] = s); return; } } } static GetShadowGeneratorById(e, t) { const i = e.lights.map((r) => r.getShadowGenerators()); for (const r of i) if (r) { const s = r.values(); for (let n = s.next(); n.done !== !0; n = s.next()) { const a = n.value; if (a && a.id === t) return a; } } return null; } /** * Apply a given delta to a given scene * @param deltaJSON defines the JSON containing the delta * @param scene defines the scene to apply the delta to */ static ApplyDelta(e, t) { typeof e == "string" && (e = JSON.parse(e)); const i = t; for (const r in e) { const s = e[r], n = i[r]; if (Array.isArray(n) || r === "shadowGenerators") switch (r) { case "cameras": this._ApplyDeltaForEntity(s, t, t.getCameraById.bind(t), (a) => Ai.Parse(a, t)); break; case "lights": this._ApplyDeltaForEntity(s, t, t.getLightById.bind(t), (a) => hs.Parse(a, t)); break; case "shadowGenerators": this._ApplyDeltaForEntity(s, t, (a) => this.GetShadowGeneratorById(t, a), (a) => hr.Parse(a, t)); break; case "meshes": this._ApplyDeltaForEntity(s, t, t.getMeshById.bind(t), (a) => ke.Parse(a, t, "")); break; case "skeletons": this._ApplyDeltaForEntity(s, t, t.getSkeletonById.bind(t), (a) => sx.Parse(a, t)); break; case "materials": this._ApplyDeltaForEntity(s, t, t.getMaterialById.bind(t), (a) => At.Parse(a, t, "")); break; case "multiMaterials": this._ApplyDeltaForEntity(s, t, t.getMaterialById.bind(t), (a) => xm.Parse(a, t, "")); break; case "transformNodes": this._ApplyDeltaForEntity(s, t, t.getTransformNodeById.bind(t), (a) => xi.Parse(a, t, "")); break; case "particleSystems": this._ApplyDeltaForEntity(s, t, t.getParticleSystemById.bind(t), (a) => ns.Parse(a, t, "")); break; case "morphTargetManagers": this._ApplyDeltaForEntity(s, t, t.getMorphTargetById.bind(t), (a) => O4.Parse(a, t)); break; case "postProcesses": this._ApplyDeltaForEntity(s, t, t.getPostProcessByName.bind(t), (a) => Bi.Parse(a, t, "")); break; } else isNaN(n) ? n.fromArray && n.fromArray(s) : i[r] = s; } } static _ApplyPropertiesToEntity(e, t) { for (const i in e) { const r = e[i], s = t[i]; s !== void 0 && (!isNaN(s) || Array.isArray(s) ? t[i] = r : s.fromArray ? s.fromArray(r) : typeof s == "object" && s !== null && this._ApplyPropertiesToEntity(r, s)); } } static _ApplyDeltaForEntity(e, t, i, r) { for (const s of e) if (s.__state && s.__state.id !== void 0) { const n = i(s.__state.id); n && (this._ApplyPropertiesToEntity(s, n), St.ParseProperties(s, n, t, null)); } else if (s.__state && s.__state.deleteId !== void 0) { const n = i(s.__state.deleteId); n == null || n.dispose(); } else r(s); } } var iw; (function(c) { class e { /** * Serialize the Alphabet to JSON string. * @returns JSON serialization */ serialize() { const r = {}, s = new Array(this._characterToIdx.size); return this._characterToIdx.forEach((n, a) => { s[n] = a; }), r.characters = s, r.insertionCosts = this._insertionCosts, r.deletionCosts = this._deletionCosts, r.substitutionCosts = this._substitutionCosts, JSON.stringify(r); } /** * Parse an Alphabet from a JSON serialization. * @param json JSON string to deserialize * @returns deserialized Alphabet */ static Deserialize(r) { const s = JSON.parse(r), n = new e(s.characters); return n._insertionCosts = s.insertionCosts, n._deletionCosts = s.deletionCosts, n._substitutionCosts = s.substitutionCosts, n; } /** * Create a new Alphabet. * @param characters characters of the alphabet * @param charToInsertionCost function mapping characters to insertion costs * @param charToDeletionCost function mapping characters to deletion costs * @param charsToSubstitutionCost function mapping character pairs to substitution costs */ constructor(r, s = null, n = null, a = null) { s = s ?? (() => 1), n = n ?? (() => 1), a = a ?? ((o, u) => o === u ? 0 : 1), this._characterToIdx = /* @__PURE__ */ new Map(), this._insertionCosts = new Array(r.length), this._deletionCosts = new Array(r.length), this._substitutionCosts = new Array(r.length); let l; for (let o = 0; o < r.length; ++o) { l = r[o], this._characterToIdx.set(l, o), this._insertionCosts[o] = s(l), this._deletionCosts[o] = n(l), this._substitutionCosts[o] = new Array(r.length); for (let u = o; u < r.length; ++u) this._substitutionCosts[o][u] = a(l, r[u]); } } /** * Get the index (internally-assigned number) for a character. * @param char character * @returns index */ getCharacterIdx(r) { return this._characterToIdx.get(r); } /** * Get the insertion cost of a character from its index. * @param idx character index * @returns insertion cost */ getInsertionCost(r) { return this._insertionCosts[r]; } /** * Get the deletion cost of a character from its index. * @param idx character index * @returns deletion cost */ getDeletionCost(r) { return this._deletionCosts[r]; } /** * Gets the cost to substitute two characters. NOTE: this cost is * required to be bi-directional, meaning it cannot matter which of * the provided characters is being removed and which is being inserted. * @param idx1 the first character index * @param idx2 the second character index * @returns substitution cost */ getSubstitutionCost(r, s) { const n = Math.min(r, s), a = Math.max(r, s); return this._substitutionCosts[n][a]; } } c.Alphabet = e; class t { /** * Serialize to JSON string. JSON representation does NOT include the Alphabet * from which this Sequence was created; Alphabet must be independently * serialized. * @returns JSON string */ serialize() { return JSON.stringify(this._characters); } /** * Deserialize from JSON string and Alphabet. This should be the same Alphabet * from which the Sequence was originally created, which must be serialized and * deserialized independently so that it can be passed in here. * @param json JSON string representation of Sequence * @param alphabet Alphabet from which Sequence was originally created * @returns Sequence */ static Deserialize(r, s) { const n = new t([], s); return n._characters = JSON.parse(r), n; } /** * Create a new Sequence. * @param characters characters in the new Sequence * @param alphabet Alphabet, which must include all used characters */ constructor(r, s) { if (r.length > t._MAX_SEQUENCE_LENGTH) throw new Error("Sequences longer than " + t._MAX_SEQUENCE_LENGTH + " not supported."); this._alphabet = s, this._characters = r.map((n) => this._alphabet.getCharacterIdx(n)); } /** * Get the distance between this Sequence and another. * @param other sequence to compare to * @returns Levenshtein distance */ distance(r) { return t._Distance(this, r); } /** * Compute the Levenshtein distance between two Sequences. * @param a first Sequence * @param b second Sequence * @returns Levenshtein distance */ static _Distance(r, s) { const n = r._alphabet; if (n !== s._alphabet) throw new Error("Cannot Levenshtein compare Sequences built from different alphabets."); const a = r._characters, l = s._characters, o = a.length, u = l.length, h = t._CostMatrix; h[0][0] = 0; for (let d = 0; d < o; ++d) h[d + 1][0] = h[d][0] + n.getInsertionCost(a[d]); for (let d = 0; d < u; ++d) h[0][d + 1] = h[0][d] + n.getInsertionCost(l[d]); for (let d = 0; d < o; ++d) for (let f = 0; f < u; ++f) t._InsertionCost = h[d + 1][f] + n.getInsertionCost(l[f]), t._DeletionCost = h[d][f + 1] + n.getDeletionCost(a[d]), t._SubstitutionCost = h[d][f] + n.getSubstitutionCost(a[d], l[f]), h[d + 1][f + 1] = Math.min(t._InsertionCost, t._DeletionCost, t._SubstitutionCost); return h[o][u]; } } t._MAX_SEQUENCE_LENGTH = 256, t._CostMatrix = [...Array(t._MAX_SEQUENCE_LENGTH + 1)].map(() => new Array(t._MAX_SEQUENCE_LENGTH + 1)), c.Sequence = t; })(iw || (iw = {})); class ca { /** * Serialize to JSON. * @returns serialized JSON string */ serialize() { return JSON.stringify(this); } /** * Deserialize from JSON. * @param json serialized JSON string * @returns deserialized Trajectory */ static Deserialize(e) { const t = JSON.parse(e), i = new ca(t._segmentLength); return i._points = t._points.map((r) => new D(r._x, r._y, r._z)), i; } /** * Create a new empty Trajectory. * @param segmentLength radius of discretization for Trajectory points */ constructor(e = 0.01) { this._points = [], this._segmentLength = e; } /** * Get the length of the Trajectory. * @returns length of the Trajectory */ getLength() { return this._points.length * this._segmentLength; } /** * Append a new point to the Trajectory. * NOTE: This implementation has many allocations. * @param point point to append to the Trajectory */ add(e) { let t = this._points.length; if (t === 0) this._points.push(e.clone()); else { const i = () => this._segmentLength / D.Distance(this._points[t - 1], e); for (let r = i(); r <= 1; r = i()) { const s = this._points[t - 1].scale(1 - r); e.scaleAndAddToRef(r, s), this._points.push(s), ++t; } } } /** * Create a new Trajectory with a segment length chosen to make it * probable that the new Trajectory will have a specified number of * segments. This operation is imprecise. * @param targetResolution number of segments desired * @returns new Trajectory with approximately the requested number of segments */ resampleAtTargetResolution(e) { const t = new ca(this.getLength() / e); return this._points.forEach((i) => { t.add(i); }), t; } /** * Convert Trajectory segments into tokenized representation. This * representation is an array of numbers where each nth number is the * index of the token which is most similar to the nth segment of the * Trajectory. * @param tokens list of vectors which serve as discrete tokens * @returns list of indices of most similar token per segment */ tokenize(e) { const t = [], i = new D(); for (let r = 2; r < this._points.length; ++r) ca._TransformSegmentDirToRef(this._points[r - 2], this._points[r - 1], this._points[r], i) && t.push(ca._TokenizeSegment(i, e)); return t; } /** * Transform the rotation (i.e., direction) of a segment to isolate * the relative transformation represented by the segment. This operation * may or may not succeed due to singularities in the equations that define * motion relativity in this context. * @param priorVec the origin of the prior segment * @param fromVec the origin of the current segment * @param toVec the destination of the current segment * @param result reference to output variable * @returns whether or not transformation was successful */ static _TransformSegmentDirToRef(e, t, i, r) { return t.subtractToRef(e, ca._ForwardDir), ca._ForwardDir.normalize(), t.scaleToRef(-1, ca._InverseFromVec), ca._InverseFromVec.normalize(), Math.abs(D.Dot(ca._ForwardDir, ca._InverseFromVec)) > 0.98 ? !1 : (D.CrossToRef(ca._ForwardDir, ca._InverseFromVec, ca._UpDir), ca._UpDir.normalize(), Ae.LookAtLHToRef(e, t, ca._UpDir, ca._LookMatrix), i.subtractToRef(t, ca._FromToVec), ca._FromToVec.normalize(), D.TransformNormalToRef(ca._FromToVec, ca._LookMatrix, r), !0); } /** * Determine which token vector is most similar to the * segment vector. * @param segment segment vector * @param tokens token vector list * @returns index of the most similar token to the segment */ static _TokenizeSegment(e, t) { ca._BestMatch = 0, ca._Score = D.Dot(e, t[0]), ca._BestScore = ca._Score; for (let i = 1; i < t.length; ++i) ca._Score = D.Dot(e, t[i]), ca._Score > ca._BestScore && (ca._BestMatch = i, ca._BestScore = ca._Score); return ca._BestMatch; } } ca._ForwardDir = new D(); ca._InverseFromVec = new D(); ca._UpDir = new D(); ca._FromToVec = new D(); ca._LookMatrix = new Ae(); class CL { /** * Helper method to create new "spikeball" Vector3Alphabets. Uses a naive * optimize-from-random strategy to space points around the unit sphere * surface as a simple alternative to really doing the math to tile the * sphere. * @param alphabetSize size of the desired alphabet * @param iterations number of iterations over which to optimize the "spikeball" * @param startingStepSize distance factor to move points in early optimization iterations * @param endingStepSize distance factor to move points in late optimization iterations * @param fixedValues alphabet "characters" that are required and cannot be moved by optimization * @returns a new randomly generated and optimized Vector3Alphabet of the specified size */ static Generate(e = 64, t = 256, i = 0.1, r = 1e-3, s = []) { const l = new CL(e); for (let p = 0; p < e; ++p) l.chars[p] = new D(Math.random() - 0.5, Math.random() - 0.5, Math.random() - 0.5), l.chars[p].normalize(); for (let p = 0; p < s.length; ++p) l.chars[p].copyFrom(s[p]); let o, u; const h = new D(), d = new D(), f = (p, m, _) => (1 - _) * p + _ * m; for (let p = 0; p < t; ++p) { o = f(i, r, p / (t - 1)); for (let m = s.length; m < l.chars.length; ++m) h.copyFromFloats(0, 0, 0), l.chars.forEach((_) => { l.chars[m].subtractToRef(_, d), u = d.lengthSquared(), u > 1e-6 && d.scaleAndAddToRef(1 / (d.lengthSquared() * u), h); }), h.scaleInPlace(o), l.chars[m].addInPlace(h), l.chars[m].normalize(); } return l; } /** * Serialize to JSON. * @returns JSON serialization */ serialize() { return JSON.stringify(this.chars); } /** * Deserialize from JSON. * @param json JSON serialization * @returns deserialized Vector3Alphabet */ static Deserialize(e) { const t = JSON.parse(e), i = new CL(t.length); for (let r = 0; r < t.length; ++r) i.chars[r] = new D(t[r]._x, t[r]._y, t[r]._z); return i; } constructor(e) { this.chars = new Array(e); } } class KC { /** * Serialize to JSON. * @returns JSON serialization */ serialize() { return JSON.stringify(this._sequences.map((e) => e.serialize())); } /** * Deserialize from JSON string and Alphabet. This should be the same Alphabet * from which the descriptor was originally created, which must be serialized and * deserialized independently so that it can be passed in here. * @param json JSON serialization * @param alphabet Alphabet from which descriptor was originally created * @returns deserialized TrajectoryDescriptor */ static Deserialize(e, t) { const i = new KC(); return i._sequences = JSON.parse(e).map((r) => iw.Sequence.Deserialize(r, t)), i; } /** * Create a new TrajectoryDescriptor to describe a provided Trajectory according * to the provided alphabets. * @param trajectory Trajectory to be described * @param vector3Alphabet Vector3Alphabet to be used to tokenize the Trajectory * @param levenshteinAlphabet Levenshtein.Alphabet to be used as basis for comparison with other descriptors * @returns TrajectoryDescriptor describing provided Trajectory */ static CreateFromTrajectory(e, t, i) { return KC.CreateFromTokenizationPyramid(KC._GetTokenizationPyramid(e, t), i); } /** * Create a new TrajectoryDescriptor from a pre-existing pyramid of tokens. * NOTE: This function exists to support an outdated serialization mechanism and should * be deleted if it is no longer useful. * @param pyramid tokenization pyramid * @param levenshteinAlphabet Levenshtein.Alphabet to be uses as basis for comparison with other descriptors * @returns TrajectoryDescriptor describing the Trajectory from which the pyramid was built */ static CreateFromTokenizationPyramid(e, t) { const i = new KC(); return i._sequences = e.map((r) => new iw.Sequence(r, t)), i; } constructor() { this._sequences = []; } /** * Create the tokenization pyramid for the provided Trajectory according to the given * Vector3Alphabet. * @param trajectory Trajectory to be tokenized * @param alphabet Vector3Alphabet containing tokens * @param targetResolution finest resolution of descriptor * @returns tokenization pyramid for Trajectory */ static _GetTokenizationPyramid(e, t, i = KC._FINEST_DESCRIPTOR_RESOLUTION) { const r = []; for (let s = i; s > 4; s = Math.floor(s / 2)) r.push(e.resampleAtTargetResolution(s).tokenize(t.chars)); return r; } /** * Calculate a distance metric between this TrajectoryDescriptor and another. This is * essentially a similarity score and does not directly represent Euclidean distance, * edit distance, or any other formal distance metric. * @param other TrajectoryDescriptor from which to determine distance * @returns distance, a nonnegative similarity score where larger values indicate dissimilarity */ distance(e) { let t = 0, i; for (let r = 0; r < this._sequences.length; ++r) i = Math.pow(2, r), t += i * this._sequences[r].distance(e._sequences[r]); return t; } } KC._FINEST_DESCRIPTOR_RESOLUTION = 32; class rw { /** * Serialize to JSON. * @returns JSON serialization */ serialize() { const e = {}; return e.descriptors = this._descriptors.map((t) => t.serialize()), e.centroidIdx = this._centroidIdx, e.averageDistance = this._averageDistance, JSON.stringify(e); } /** * Deserialize from JSON string and Alphabet. This should be the same Alphabet * from which the descriptors were originally created, which must be serialized and * deserialized independently so that it can be passed in here. * @param json JSON string representation * @param alphabet Alphabet from which TrajectoryDescriptors were originally created * @returns deserialized TrajectoryDescriptor */ static Deserialize(e, t) { const i = JSON.parse(e), r = new rw(); return r._descriptors = i.descriptors.map((s) => KC.Deserialize(s, t)), r._centroidIdx = i.centroidIdx, r._averageDistance = i.averageDistance, r; } /** * Create a new DescribedTrajectory. * @param descriptors currently-known TrajectoryDescriptors, if any */ constructor(e = []) { this._descriptors = e, this._centroidIdx = -1, this._averageDistance = 0, this._refreshDescription(); } /** * Add a new TrajectoryDescriptor to the list of descriptors known to describe * this same DescribedTrajectory. * @param descriptor descriptor to be added */ add(e) { this._descriptors.push(e), this._refreshDescription(); } /** * Compute the cost, which is inversely related to the likelihood that the provided * TrajectoryDescriptor describes a Trajectory that is considered to be the same as * the class represented by this DescribedTrajectory. * @param descriptor the descriptor to be costed * @returns cost of the match, which is a nonnegative similarity metric where larger values indicate dissimilarity */ getMatchCost(e) { return e.distance(this._descriptors[this._centroidIdx]) / this._averageDistance; } /** * Compute the minimum distance between the queried TrajectoryDescriptor and a * descriptor which is a member of this collection. This is an alternative way of * conceptualizing match cost from getMatchCost(), and it serves a different function. * @param descriptor the descriptor to find the minimum distance to * @returns minimum descriptor distance to a member descriptor of this DescribedTrajectory */ getMatchMinimumDistance(e) { return Math.min(...this._descriptors.map((t) => t.distance(e))); } /** * Refreshes the internal representation of this DescribedTrajectory. */ _refreshDescription() { this._centroidIdx = -1; let e; const t = this._descriptors.map((i) => (e = 0, this._descriptors.forEach((r) => { e += i.distance(r); }), e)); for (let i = 0; i < t.length; ++i) (this._centroidIdx < 0 || t[i] < t[this._centroidIdx]) && (this._centroidIdx = i); this._averageDistance = 0, this._descriptors.forEach((i) => { this._averageDistance += i.distance(this._descriptors[this._centroidIdx]); }), this._descriptors.length > 0 && (this._averageDistance = Math.max(this._averageDistance / this._descriptors.length, rw._MIN_AVERAGE_DISTANCE)); } } rw._MIN_AVERAGE_DISTANCE = 1; class bB { /** * Serialize to JSON. * @returns JSON serialization */ serialize() { const e = {}; return e.maximumAllowableMatchCost = this._maximumAllowableMatchCost, e.vector3Alphabet = this._vector3Alphabet.serialize(), e.levenshteinAlphabet = this._levenshteinAlphabet.serialize(), e.nameToDescribedTrajectory = [], this._nameToDescribedTrajectory.forEach((t, i) => { e.nameToDescribedTrajectory.push(i), e.nameToDescribedTrajectory.push(t.serialize()); }), JSON.stringify(e); } /** * Deserialize from JSON. * @param json JSON serialization * @returns deserialized TrajectorySet */ static Deserialize(e) { const t = JSON.parse(e), i = new bB(); i._maximumAllowableMatchCost = t.maximumAllowableMatchCost, i._vector3Alphabet = CL.Deserialize(t.vector3Alphabet), i._levenshteinAlphabet = iw.Alphabet.Deserialize(t.levenshteinAlphabet); for (let r = 0; r < t.nameToDescribedTrajectory.length; r += 2) i._nameToDescribedTrajectory.set(t.nameToDescribedTrajectory[r], rw.Deserialize(t.nameToDescribedTrajectory[r + 1], i._levenshteinAlphabet)); return i; } /** * Initialize a new empty TrajectorySet with auto-generated Alphabets. * VERY naive, need to be generating these things from known * sets. Better version later, probably eliminating this one. * @returns auto-generated TrajectorySet */ static Generate() { const e = CL.Generate(64, 256, 0.1, 1e-3, [D.Forward()]), t = new Array(e.chars.length); for (let s = 0; s < t.length; ++s) t[s] = s; const i = new iw.Alphabet(t, (s) => s === 0 ? 0 : 1, (s) => s === 0 ? 0 : 1, (s, n) => Math.min(1 - D.Dot(e.chars[s], e.chars[n]), 1)), r = new bB(); return r._vector3Alphabet = e, r._levenshteinAlphabet = i, r; } constructor() { this._maximumAllowableMatchCost = 4, this._nameToDescribedTrajectory = /* @__PURE__ */ new Map(); } /** * Add a new Trajectory to the set with a given name. * @param trajectory new Trajectory to be added * @param classification name to which to add the Trajectory */ addTrajectoryToClassification(e, t) { this._nameToDescribedTrajectory.has(t) || this._nameToDescribedTrajectory.set(t, new rw()), this._nameToDescribedTrajectory.get(t).add(KC.CreateFromTrajectory(e, this._vector3Alphabet, this._levenshteinAlphabet)); } /** * Remove a known named trajectory and all Trajectories associated with it. * @param classification name to remove * @returns whether anything was removed */ deleteClassification(e) { return this._nameToDescribedTrajectory.delete(e); } /** * Attempt to recognize a Trajectory from among all the classifications * already known to the classifier. * @param trajectory Trajectory to be recognized * @returns classification of Trajectory if recognized, null otherwise */ classifyTrajectory(e) { const t = KC.CreateFromTrajectory(e, this._vector3Alphabet, this._levenshteinAlphabet), i = []; if (this._nameToDescribedTrajectory.forEach((a, l) => { a.getMatchCost(t) < this._maximumAllowableMatchCost && i.push(l); }), i.length === 0) return null; let r = 0, s = this._nameToDescribedTrajectory.get(i[r]).getMatchMinimumDistance(t), n; for (let a = 0; a < i.length; ++a) n = this._nameToDescribedTrajectory.get(i[a]).getMatchMinimumDistance(t), n < s && (s = n, r = a); return i[r]; } } class xL { /** * Constructs a reflector object. * @param scene The scene to use * @param hostname The hostname of the reflector bridge * @param port The port of the reflector bridge */ constructor(e, t, i) { this._scene = e, Ce.Log(`[Reflector] Connecting to ws://${t}:${i}`), this._webSocket = new WebSocket(`ws://${t}:${i}`), this._webSocket.onmessage = (r) => { const s = r.data; if (s.startsWith(xL._SERVER_PREFIX)) { const n = s.substr(xL._SERVER_PREFIX.length); Ce.Log(`[Reflector] Received server message: ${n.substr(0, 64)}`), this._handleServerMessage(n); return; } else Ce.Log(`[Reflector] Received client message: ${s.substr(0, 64)}`), this._handleClientMessage(); }, this._webSocket.onclose = (r) => { Ce.Log(`[Reflector] Disconnected ${r.code} ${r.reason}`); }; } /** * Closes the reflector connection */ close() { this._webSocket.close(); } _handleServerMessage(e) { switch (e) { case "connected": { QC.SerializeAsync(this._scene).then((t) => { this._webSocket.send(`load|${JSON.stringify(t)}`); }); break; } } } _handleClientMessage() { } } xL._SERVER_PREFIX = "$$"; class qU { /** * A pressure observer will call this callback, whenever these thresholds are met. * @param options An object containing the thresholds used to decide what value to to return for each update property (average of start and end of a threshold boundary). */ constructor(e) { this._observer = null, this._currentState = [], this.onPressureChanged = new Fe(), qU.IsAvailable && (this._observer = new PressureObserver((t) => { this._currentState = t, this.onPressureChanged.notifyObservers(t); }, e)); } /** * Returns true if PressureObserver is available for use, false otherwise. */ static get IsAvailable() { return typeof PressureObserver < "u" && PressureObserver.supportedSources.includes("cpu"); } /** * Method that must be called to begin observing changes, and triggering callbacks. * @param source defines the source to observe */ observe(e) { var t; try { (t = this._observer) === null || t === void 0 || t.observe(e), this.onPressureChanged.notifyObservers(this._currentState); } catch { } } /** * Method that must be called to stop observing changes and triggering callbacks (cleanup function). * @param source defines the source to unobserve */ unobserve(e) { var t; try { (t = this._observer) === null || t === void 0 || t.unobserve(e); } catch { } } /** * Release the associated resources. */ dispose() { var e; (e = this._observer) === null || e === void 0 || e.disconnect(), this._observer = null, this.onPressureChanged.clear(); } } const Eve = 1.5; class WE { /** * Creates a new DynamicFloat32Array with the desired item capacity. * @param itemCapacity The initial item capacity you would like to set for the array. */ constructor(e) { this._view = new Float32Array(e), this._itemLength = 0; } /** * The number of items currently in the array. */ get itemLength() { return this._itemLength; } /** * Gets value at index, NaN if no such index exists. * @param index the index to get the value at. * @returns the value at the index provided. */ at(e) { return e < 0 || e >= this._itemLength ? NaN : this._view[e]; } /** * Gets a view of the original array from start to end (exclusive of end). * @param start starting index. * @param end ending index. * @returns a subarray of the original array. */ subarray(e, t) { return e >= t || e < 0 ? new Float32Array(0) : (t > this._itemLength && (t = this._itemLength), this._view.subarray(e, t)); } /** * Pushes items to the end of the array. * @param item The item to push into the array. */ push(e) { this._view[this._itemLength] = e, this._itemLength++, this._itemLength >= this._view.length && this._growArray(); } /** * Grows the array by the growth factor when necessary. */ _growArray() { const e = Math.floor(this._view.length * Eve), t = new Float32Array(e); t.set(this._view), this._view = t; } } const k8 = 1800, Tve = 24, Sve = "0", mq = "timestamp", gq = "numPoints", Mve = /\r/g, qk = "@"; class x4 { /** * The offset for when actual data values start appearing inside a slice. */ static get SliceDataOffset() { return 2; } /** * The offset for the value of the number of points inside a slice. */ static get NumberOfPointsOffset() { return 1; } /** * Handles the creation of a performance viewer collector. * @param _scene the scene to collect on. * @param _enabledStrategyCallbacks the list of data to collect with callbacks for initialization purposes. */ constructor(e, t) { this._scene = e, this._collectDataAtFrame = () => { const i = Gs.Now - this._startingTimestamp, r = this.datasets.ids.length, s = this.datasets.startingIndices.itemLength; let n = 0; if (s > 0) { const a = this.datasets.startingIndices.at(s - 1); n = a + this.datasets.data.at(a + x4.NumberOfPointsOffset) + x4.SliceDataOffset; } if (this.datasets.startingIndices.push(n), this.datasets.data.push(i), this.datasets.data.push(r), this.datasets.ids.forEach((a) => { const l = this._strategies.get(a); l && this.datasets.data.push(l.getData()); }), this.datasetObservable.hasObservers()) { const a = [i, r]; for (let l = 0; l < r; l++) a.push(this.datasets.data.at(n + x4.SliceDataOffset + l)); this.datasetObservable.notifyObservers(a); } }, this.datasets = { ids: [], data: new WE(k8), startingIndices: new WE(k8) }, this._strategies = /* @__PURE__ */ new Map(), this._datasetMeta = /* @__PURE__ */ new Map(), this._eventRestoreSet = /* @__PURE__ */ new Set(), this._customEventObservable = new Fe(), this.datasetObservable = new Fe(), this.metadataObservable = new Fe((i) => i.callback(this._datasetMeta, new XG(0))), t && this.addCollectionStrategies(...t); } /** * Registers a custom string event which will be callable via sendEvent. This method returns an event object which will contain the id of the event. * The user can set a value optionally, which will be used in the sendEvent method. If the value is set, we will record this value at the end of each frame, * if not we will increment our counter and record the value of the counter at the end of each frame. The value recorded is 0 if no sendEvent method is called, within a frame. * @param name The name of the event to register * @param forceUpdate if the code should force add an event, and replace the last one. * @param category the category for that event * @returns The event registered, used in sendEvent */ registerEvent(e, t, i) { var r; if (this._strategies.has(e) && !t) return; this._strategies.has(e) && t && ((r = this._strategies.get(e)) === null || r === void 0 || r.dispose(), this._strategies.delete(e)); const s = (a) => { let l = 0, o = 0; const u = a.onAfterRenderObservable.add(() => { o = l, l = 0; }), h = this._customEventObservable.add((d) => { e === d.name && (d.value !== void 0 ? l = d.value : l++); }); return { id: e, getData: () => o, dispose: () => { a.onAfterRenderObservable.remove(u), this._customEventObservable.remove(h); } }; }, n = { name: e }; return this._eventRestoreSet.add(e), this.addCollectionStrategies({ strategyCallback: s, category: i }), n; } /** * Lets the perf collector handle an event, occurences or event value depending on if the event.value params is set. * @param event the event to handle an occurence for */ sendEvent(e) { this._customEventObservable.notifyObservers(e); } /** * This event restores all custom string events if necessary. */ _restoreStringEvents() { this._eventRestoreSet.size !== this._customEventObservable.observers.length && this._eventRestoreSet.forEach((e) => { this.registerEvent(e, !0); }); } /** * This method adds additional collection strategies for data collection purposes. * @param strategyCallbacks the list of data to collect with callbacks. */ addCollectionStrategies(...e) { for (let { strategyCallback: t, category: i, hidden: r } of e) { const s = t(this._scene); if (this._strategies.has(s.id)) { s.dispose(); continue; } this.datasets.ids.push(s.id), i && (i = i.replace(new RegExp(qk, "g"), "")), this._datasetMeta.set(s.id, { color: this._getHexColorFromId(s.id), category: i, hidden: r }), this._strategies.set(s.id, s); } this.metadataObservable.notifyObservers(this._datasetMeta); } /** * Gets a 6 character hexcode representing the colour from a passed in string. * @param id the string to get a hex code for. * @returns a hexcode hashed from the id. */ _getHexColorFromId(e) { let t = 0; for (let r = 0; r < e.length; r++) t = e.charCodeAt(r) + ((t << 5) - t); let i = "#"; for (let r = 0; r < Tve; r += 8) { const s = t >> r & 255; i += (Sve + s.toString(16)).substr(-2); } return i; } /** * Collects and then sends the latest slice to any observers by using the appropriate strategy when the user wants. * The slice will be of the form [timestamp, numberOfPoints, value1, value2...] * This method does not add onto the collected data accessible via the datasets variable. */ getCurrentSlice() { const e = Gs.Now - this._startingTimestamp, t = this.datasets.ids.length, i = [e, t]; this.datasets.ids.forEach((r) => { const s = this._strategies.get(r); s && this.datasetObservable.hasObservers() && i.push(s.getData()); }), this.datasetObservable.hasObservers() && this.datasetObservable.notifyObservers(i); } /** * Updates a property for a dataset's metadata with the value provided. * @param id the id of the dataset which needs its metadata updated. * @param prop the property to update. * @param value the value to update the property with. */ updateMetadata(e, t, i) { const r = this._datasetMeta.get(e); r && (r[t] = i, this.metadataObservable.notifyObservers(this._datasetMeta)); } /** * Completely clear, data, ids, and strategies saved to this performance collector. * @param preserveStringEventsRestore if it should preserve the string events, by default will clear string events registered when called. */ clear(e) { this.datasets.data = new WE(k8), this.datasets.ids.length = 0, this.datasets.startingIndices = new WE(k8), this._datasetMeta.clear(), this._strategies.forEach((t) => t.dispose()), this._strategies.clear(), e || this._eventRestoreSet.clear(), this._hasLoadedData = !1; } /** * Accessor which lets the caller know if the performance collector has data loaded from a file or not! * Call clear() to reset this value. * @returns true if the data is loaded from a file, false otherwise. */ get hasLoadedData() { return this._hasLoadedData; } /** * Given a string containing file data, this function parses the file data into the datasets object. * It returns a boolean to indicate if this object was successfully loaded with the data. * @param data string content representing the file data. * @param keepDatasetMeta if it should use reuse the existing dataset metadata * @returns true if the data was successfully loaded, false otherwise. */ loadFromFileData(e, t) { const i = e.replace(Mve, "").split(` `).map((h) => h.split(",").filter((d) => d.length > 0)).filter((h) => h.length > 0), r = 0, s = x4.NumberOfPointsOffset; if (i.length < 2) return !1; const n = { ids: [], data: new WE(k8), startingIndices: new WE(k8) }, [a, ...l] = i; if (a.length < 2 || a[r] !== mq || a[s] !== gq) return !1; const o = /* @__PURE__ */ new Map(); for (let h = x4.SliceDataOffset; h < a.length; h++) { const [d, f] = a[h].split(qk); n.ids.push(d), o.set(d, f); } let u = 0; for (const h of l) { if (h.length < 2) return !1; const d = parseFloat(h[r]), f = parseInt(h[s]); if (isNaN(f) || isNaN(d) || (n.data.push(d), n.data.push(f), f + x4.SliceDataOffset !== h.length)) return !1; for (let p = x4.SliceDataOffset; p < h.length; p++) { const m = parseFloat(h[p]); if (isNaN(m)) return !1; n.data.push(m); } n.startingIndices.push(u), u += h.length; } if (this.datasets.ids = n.ids, this.datasets.data = n.data, this.datasets.startingIndices = n.startingIndices, t || this._datasetMeta.clear(), this._strategies.forEach((h) => h.dispose()), this._strategies.clear(), !t) for (const h of this.datasets.ids) { const d = o.get(h); this._datasetMeta.set(h, { category: d, color: this._getHexColorFromId(h) }); } return this.metadataObservable.notifyObservers(this._datasetMeta), this._hasLoadedData = !0, !0; } /** * Exports the datasets inside of the collector to a csv. */ exportDataToCsv() { let e = ""; e += `${mq},${gq}`; for (let i = 0; i < this.datasets.ids.length; i++) if (e += `,${this.datasets.ids[i]}`, this._datasetMeta) { const r = this._datasetMeta.get(this.datasets.ids[i]); r != null && r.category && (e += `${qk}${r.category}`); } e += ` `; for (let i = 0; i < this.datasets.startingIndices.itemLength; i++) { const r = this.datasets.startingIndices.at(i), s = this.datasets.data.at(r), n = this.datasets.data.at(r + x4.NumberOfPointsOffset); e += `${s},${n}`; for (let a = 0; a < n; a++) e += `,${this.datasets.data.at(r + x4.SliceDataOffset + a)}`; for (let a = 0; a < this.datasets.ids.length - n; a++) e += ","; e += ` `; } const t = `${(/* @__PURE__ */ new Date()).toISOString()}-perfdata.csv`; Ve.Download(new Blob([e], { type: "text/csv" }), t); } /** * Starts the realtime collection of data. * @param shouldPreserve optional boolean param, if set will preserve the dataset between calls of start. */ start(e) { e ? this._startingTimestamp === void 0 && (this._startingTimestamp = Gs.Now) : (this.datasets.data = new WE(k8), this.datasets.startingIndices = new WE(k8), this._startingTimestamp = Gs.Now), this._scene.onAfterRenderObservable.add(this._collectDataAtFrame), this._restoreStringEvents(), this._isStarted = !0; } /** * Stops the collection of data. */ stop() { this._scene.onAfterRenderObservable.removeCallback(this._collectDataAtFrame), this._isStarted = !1; } /** * Returns if the perf collector has been started or not. */ get isStarted() { return this._isStarted; } /** * Disposes of the object */ dispose() { this._scene.onAfterRenderObservable.removeCallback(this._collectDataAtFrame), this._datasetMeta.clear(), this._strategies.forEach((e) => { e.dispose(); }), this.datasetObservable.clear(), this.metadataObservable.clear(), this._isStarted = !1, this.datasets = null; } } const m4 = () => { }; class Rve { /** * Gets the initializer for the strategy used for collection of fps metrics * @returns the initializer for the fps strategy */ static FpsStrategy() { return (e) => { const t = e.getEngine(); return { id: "FPS", getData: () => t.getFps(), dispose: m4 }; }; } /** * Gets the initializer for the strategy used for collection of thermal utilization metrics. * Needs the experimental pressure API. * @returns the initializer for the thermal utilization strategy */ static ThermalStrategy() { return this._PressureStrategy("Thermal utilization", "thermal"); } /** * Gets the initializer for the strategy used for collection of power supply utilization metrics. * Needs the experimental pressure API. * @returns the initializer for the power supply utilization strategy */ static PowerSupplyStrategy() { return this._PressureStrategy("Power supply utilization", "power-supply"); } /** * Gets the initializer for the strategy used for collection of pressure metrics. * Needs the experimental pressure API. * @returns the initializer for the pressure strategy */ static PressureStrategy() { return this._PressureStrategy("Pressure"); } static _PressureStrategy(e, t = null) { return () => { let i = 0; const r = new qU(); return r.observe("cpu"), r.onPressureChanged.add((s) => { var n, a; for (const l of s) if (t && l.factors.includes(t) || !t && ((a = (n = l.factors) === null || n === void 0 ? void 0 : n.length) !== null && a !== void 0 ? a : 0) === 0) switch (l.state) { case "nominal": i = 0; break; case "fair": i = 0.25; break; case "serious": i = 0.5; break; case "critical": i = 1; break; } }), { id: e, getData: () => i, dispose: () => r.dispose() }; }; } /** * Gets the initializer for the strategy used for collection of total meshes metrics. * @returns the initializer for the total meshes strategy */ static TotalMeshesStrategy() { return (e) => ({ id: "Total meshes", getData: () => e.meshes.length, dispose: m4 }); } /** * Gets the initializer for the strategy used for collection of active meshes metrics. * @returns the initializer for the active meshes strategy */ static ActiveMeshesStrategy() { return (e) => ({ id: "Active meshes", getData: () => e.getActiveMeshes().length, dispose: m4 }); } /** * Gets the initializer for the strategy used for collection of active indices metrics. * @returns the initializer for the active indices strategy */ static ActiveIndicesStrategy() { return (e) => ({ id: "Active indices", getData: () => e.getActiveIndices(), dispose: m4 }); } /** * Gets the initializer for the strategy used for collection of active faces metrics. * @returns the initializer for the active faces strategy */ static ActiveFacesStrategy() { return (e) => ({ id: "Active faces", getData: () => e.getActiveIndices() / 3, dispose: m4 }); } /** * Gets the initializer for the strategy used for collection of active bones metrics. * @returns the initializer for the active bones strategy */ static ActiveBonesStrategy() { return (e) => ({ id: "Active bones", getData: () => e.getActiveBones(), dispose: m4 }); } /** * Gets the initializer for the strategy used for collection of active particles metrics. * @returns the initializer for the active particles strategy */ static ActiveParticlesStrategy() { return (e) => ({ id: "Active particles", getData: () => e.getActiveParticles(), dispose: m4 }); } /** * Gets the initializer for the strategy used for collection of draw calls metrics. * @returns the initializer for the draw calls strategy */ static DrawCallsStrategy() { return (e) => { let t = 0; const i = e.onBeforeAnimationsObservable.add(() => { e.getEngine()._drawCalls.fetchNewFrame(); }), r = e.onAfterRenderObservable.add(() => { t = e.getEngine()._drawCalls.current; }); return { id: "Draw calls", getData: () => t, dispose: () => { e.onBeforeAnimationsObservable.remove(i), e.onAfterRenderObservable.remove(r); } }; }; } /** * Gets the initializer for the strategy used for collection of total lights metrics. * @returns the initializer for the total lights strategy */ static TotalLightsStrategy() { return (e) => ({ id: "Total lights", getData: () => e.lights.length, dispose: m4 }); } /** * Gets the initializer for the strategy used for collection of total vertices metrics. * @returns the initializer for the total vertices strategy */ static TotalVerticesStrategy() { return (e) => ({ id: "Total vertices", getData: () => e.getTotalVertices(), dispose: m4 }); } /** * Gets the initializer for the strategy used for collection of total materials metrics. * @returns the initializer for the total materials strategy */ static TotalMaterialsStrategy() { return (e) => ({ id: "Total materials", getData: () => e.materials.length, dispose: m4 }); } /** * Gets the initializer for the strategy used for collection of total textures metrics. * @returns the initializer for the total textures strategy */ static TotalTexturesStrategy() { return (e) => ({ id: "Total textures", getData: () => e.textures.length, dispose: m4 }); } /** * Gets the initializer for the strategy used for collection of absolute fps metrics. * @returns the initializer for the absolute fps strategy */ static AbsoluteFpsStrategy() { return (e) => { const t = new ure(e); return t.captureFrameTime = !0, { id: "Absolute FPS", getData: () => 1e3 / t.frameTimeCounter.lastSecAverage, dispose: m4 }; }; } /** * Gets the initializer for the strategy used for collection of meshes selection time metrics. * @returns the initializer for the meshes selection time strategy */ static MeshesSelectionStrategy() { return (e) => { let t = Gs.Now, i = 0; const r = e.onBeforeActiveMeshesEvaluationObservable.add(() => { t = Gs.Now; }), s = e.onAfterActiveMeshesEvaluationObservable.add(() => { i = Gs.Now - t; }); return { id: "Meshes Selection", getData: () => i, dispose: () => { e.onBeforeActiveMeshesEvaluationObservable.remove(r), e.onAfterActiveMeshesEvaluationObservable.remove(s); } }; }; } /** * Gets the initializer for the strategy used for collection of render targets time metrics. * @returns the initializer for the render targets time strategy */ static RenderTargetsStrategy() { return (e) => { let t = Gs.Now, i = 0; const r = e.onBeforeRenderTargetsRenderObservable.add(() => { t = Gs.Now; }), s = e.onAfterRenderTargetsRenderObservable.add(() => { i = Gs.Now - t; }); return { id: "Render Targets", getData: () => i, dispose: () => { e.onBeforeRenderTargetsRenderObservable.remove(r), e.onAfterRenderTargetsRenderObservable.remove(s); } }; }; } /** * Gets the initializer for the strategy used for collection of particles time metrics. * @returns the initializer for the particles time strategy */ static ParticlesStrategy() { return (e) => { let t = Gs.Now, i = 0; const r = e.onBeforeParticlesRenderingObservable.add(() => { t = Gs.Now; }), s = e.onAfterParticlesRenderingObservable.add(() => { i = Gs.Now - t; }); return { id: "Particles", getData: () => i, dispose: () => { e.onBeforeParticlesRenderingObservable.remove(r), e.onAfterParticlesRenderingObservable.remove(s); } }; }; } /** * Gets the initializer for the strategy used for collection of sprites time metrics. * @returns the initializer for the sprites time strategy */ static SpritesStrategy() { return (e) => { var t, i; let r = Gs.Now, s = 0; const n = (t = e.onBeforeSpritesRenderingObservable) === null || t === void 0 ? void 0 : t.add(() => { r = Gs.Now; }), a = (i = e.onAfterSpritesRenderingObservable) === null || i === void 0 ? void 0 : i.add(() => { s = Gs.Now - r; }); return { id: "Sprites", getData: () => s, dispose: () => { var l, o; (l = e.onBeforeSpritesRenderingObservable) === null || l === void 0 || l.remove(n), (o = e.onAfterSpritesRenderingObservable) === null || o === void 0 || o.remove(a); } }; }; } /** * Gets the initializer for the strategy used for collection of animations time metrics. * @returns the initializer for the animations time strategy */ static AnimationsStrategy() { return (e) => { let t = Gs.Now, i = 0; const r = e.onBeforeAnimationsObservable.add(() => { t = Gs.Now; }), s = e.onAfterAnimationsObservable.add(() => { i = Gs.Now - t; }); return { id: "Animations", getData: () => i, dispose: () => { e.onBeforeAnimationsObservable.remove(r), e.onAfterAnimationsObservable.remove(s); } }; }; } /** * Gets the initializer for the strategy used for collection of physics time metrics. * @returns the initializer for the physics time strategy */ static PhysicsStrategy() { return (e) => { var t, i; let r = Gs.Now, s = 0; const n = (t = e.onBeforePhysicsObservable) === null || t === void 0 ? void 0 : t.add(() => { r = Gs.Now; }), a = (i = e.onAfterPhysicsObservable) === null || i === void 0 ? void 0 : i.add(() => { s = Gs.Now - r; }); return { id: "Physics", getData: () => s, dispose: () => { var l, o; (l = e.onBeforePhysicsObservable) === null || l === void 0 || l.remove(n), (o = e.onAfterPhysicsObservable) === null || o === void 0 || o.remove(a); } }; }; } /** * Gets the initializer for the strategy used for collection of render time metrics. * @returns the initializer for the render time strategy */ static RenderStrategy() { return (e) => { let t = Gs.Now, i = 0; const r = e.onBeforeDrawPhaseObservable.add(() => { t = Gs.Now; }), s = e.onAfterDrawPhaseObservable.add(() => { i = Gs.Now - t; }); return { id: "Render", getData: () => i, dispose: () => { e.onBeforeDrawPhaseObservable.remove(r), e.onAfterDrawPhaseObservable.remove(s); } }; }; } /** * Gets the initializer for the strategy used for collection of total frame time metrics. * @returns the initializer for the total frame time strategy */ static FrameTotalStrategy() { return (e) => { let t = Gs.Now, i = 0; const r = e.onBeforeAnimationsObservable.add(() => { t = Gs.Now; }), s = e.onAfterRenderObservable.add(() => { i = Gs.Now - t; }); return { id: "Frame Total", getData: () => i, dispose: () => { e.onBeforeAnimationsObservable.remove(r), e.onAfterRenderObservable.remove(s); } }; }; } /** * Gets the initializer for the strategy used for collection of inter-frame time metrics. * @returns the initializer for the inter-frame time strategy */ static InterFrameStrategy() { return (e) => { let t = Gs.Now, i = 0; const r = e.onBeforeAnimationsObservable.add(() => { i = Gs.Now - t; }), s = e.onAfterRenderObservable.add(() => { t = Gs.Now; }); return { id: "Inter-frame", getData: () => i, dispose: () => { e.onBeforeAnimationsObservable.remove(r), e.onAfterRenderObservable.remove(s); } }; }; } /** * Gets the initializer for the strategy used for collection of gpu frame time metrics. * @returns the initializer for the gpu frame time strategy */ static GpuFrameTimeStrategy() { return (e) => { const t = new cre(e.getEngine()); return t.captureGPUFrameTime = !0, { id: "GPU frame time", getData: () => Math.max(t.gpuFrameTimeCounter.current * 1e-6, 0), dispose: () => { t.dispose(); } }; }; } } ii.prototype.getPerfCollector = function() { return this._perfCollector || (this._perfCollector = new x4(this)), this._perfCollector; }; function Pve(c) { const e = new Array(), t = new Array(), i = new Array(), r = c.add(() => { const n = e.length; for (let a = 0; a < n; a++) tL(e.shift(), t.shift(), i.shift()); }); return { scheduler: (n, a, l) => { e.push(n), t.push(a), i.push(l); }, dispose: () => { c.remove(r); } }; } Fe.prototype.runCoroutineAsync = function(c) { if (!this._coroutineScheduler) { const e = Pve(this); this._coroutineScheduler = e.scheduler, this._coroutineSchedulerDispose = e.dispose; } return jB(c, this._coroutineScheduler); }; Fe.prototype.cancelAllCoroutines = function() { this._coroutineSchedulerDispose && this._coroutineSchedulerDispose(), this._coroutineScheduler = void 0, this._coroutineSchedulerDispose = void 0; }; const Ive = "equirectangularPanoramaPixelShader", Dve = `#ifdef GL_ES precision highp float; #endif #define M_PI 3.1415926535897932384626433832795 varying vec2 vUV;uniform samplerCube cubeMap;void main(void) {vec2 uv=vUV;float longitude=uv.x*2.*M_PI-M_PI+M_PI/2.;float latitude=(1.-uv.y)*M_PI;vec3 dir=vec3( - sin( longitude )*sin( latitude ), cos( latitude ), - cos( longitude )*sin( latitude ) );normalize( dir );gl_FragColor=textureCube( cubeMap,dir );}`; je.ShadersStore[Ive] = Dve; async function Ove(c, e) { var t, i; const r = (t = e.probe) !== null && t !== void 0 ? t : new WI("tempProbe", e.size, c), s = !!e.probe; s || (e.position ? r.position = e.position.clone() : c.activeCamera && (r.position = c.activeCamera.position.clone())); const n = e.meshesFilter ? c.meshes.filter(e.meshesFilter) : c.meshes; (i = r.renderList) === null || i === void 0 || i.push(...n), r.refreshRate = ra.REFRESHRATE_RENDER_ONCE, r.cubeTexture.render(); const a = new Tre("tempProceduralTexture", "equirectangularPanorama", { width: e.size * 2, height: e.size }, c); return a.setTexture("cubeMap", r.cubeTexture), new Promise((l, o) => { a.onGeneratedObservable.addOnce(() => { const u = a.readPixels(); if (!u) { o(new Error("No Pixel Data found on procedural texture")), a.dispose(), s || r.dispose(); return; } u.then((h) => { a.dispose(), s || r.dispose(), e.filename ? (qh.DumpData(e.size * 2, e.size, h, void 0, "image/png", e.filename), l(null)) : l(h); }); }); }); } class fT extends Ku { /** * Creates a new instance of the (legacy version) hit test feature * @param _xrSessionManager an instance of WebXRSessionManager * @param options options to use when constructing this feature */ constructor(e, t = {}) { super(e), this.options = t, this._direction = new D(0, 0, -1), this._mat = new Ae(), this._onSelectEnabled = !1, this._origin = new D(0, 0, 0), this.lastNativeXRHitResults = [], this.onHitTestResultObservable = new Fe(), this._onHitTestResults = (i) => { const r = i.map((s) => { const n = Ae.FromArray(s.hitMatrix); return this._xrSessionManager.scene.useRightHandedSystem || n.toggleModelMatrixHandInPlace(), this.options.worldParentNode && n.multiplyToRef(this.options.worldParentNode.getWorldMatrix(), n), { xrHitResult: s, transformationMatrix: n }; }); this.lastNativeXRHitResults = i, this.onHitTestResultObservable.notifyObservers(r); }, this._onSelect = (i) => { this._onSelectEnabled && fT.XRHitTestWithSelectEvent(i, this._xrSessionManager.referenceSpace); }, this.xrNativeFeatureName = "hit-test", Ve.Warn("A newer version of this plugin is available"); } /** * execute a hit test with an XR Ray * * @param xrSession a native xrSession that will execute this hit test * @param xrRay the ray (position and direction) to use for ray-casting * @param referenceSpace native XR reference space to use for the hit-test * @param filter filter function that will filter the results * @returns a promise that resolves with an array of native XR hit result in xr coordinates system */ static XRHitTestWithRay(e, t, i, r) { return e.requestHitTest(t, i).then((s) => { const n = r || ((a) => !!a.hitMatrix); return s.filter(n); }); } /** * Execute a hit test on the current running session using a select event returned from a transient input (such as touch) * @param event the (select) event to use to select with * @param referenceSpace the reference space to use for this hit test * @returns a promise that resolves with an array of native XR hit result in xr coordinates system */ static XRHitTestWithSelectEvent(e, t) { const i = e.frame.getPose(e.inputSource.targetRaySpace, t); if (!i) return Promise.resolve([]); const r = new XRRay(i.transform); return this.XRHitTestWithRay(e.frame.session, r, t); } /** * attach this feature * Will usually be called by the features manager * * @returns true if successful. */ attach() { return super.attach() ? (this.options.testOnPointerDownOnly && this._xrSessionManager.session.addEventListener("select", this._onSelect, !1), !0) : !1; } /** * detach this feature. * Will usually be called by the features manager * * @returns true if successful. */ detach() { return super.detach() ? (this._onSelectEnabled = !1, this._xrSessionManager.session.removeEventListener("select", this._onSelect), !0) : !1; } /** * Dispose this feature and all of the resources attached */ dispose() { super.dispose(), this.onHitTestResultObservable.clear(); } _onXRFrame(e) { if (!this.attached || this.options.testOnPointerDownOnly) return; const t = e.getViewerPose(this._xrSessionManager.referenceSpace); if (!t) return; Ae.FromArrayToRef(t.transform.matrix, 0, this._mat), D.TransformCoordinatesFromFloatsToRef(0, 0, 0, this._mat, this._origin), D.TransformCoordinatesFromFloatsToRef(0, 0, -1, this._mat, this._direction), this._direction.subtractInPlace(this._origin), this._direction.normalize(); const i = new XRRay({ x: this._origin.x, y: this._origin.y, z: this._origin.z, w: 0 }, { x: this._direction.x, y: this._direction.y, z: this._direction.z, w: 0 }); fT.XRHitTestWithRay(this._xrSessionManager.session, i, this._xrSessionManager.referenceSpace).then(this._onHitTestResults); } } fT.Name = Qs.HIT_TEST; fT.Version = 1; Oo.AddWebXRFeature(fT.Name, (c, e) => () => new fT(c, e), fT.Version, !1); let wve = 0; class gO extends Ku { /** * Set the reference space to use for anchor creation, when not using a hit test. * Will default to the session's reference space if not defined */ set referenceSpaceForFrameAnchors(e) { this._referenceSpaceForFrameAnchors = e; } /** * constructs a new anchor system * @param _xrSessionManager an instance of WebXRSessionManager * @param _options configuration object for this feature */ constructor(e, t = {}) { super(e), this._options = t, this._lastFrameDetected = /* @__PURE__ */ new Set(), this._trackedAnchors = [], this._futureAnchors = [], this.onAnchorAddedObservable = new Fe(), this.onAnchorRemovedObservable = new Fe(), this.onAnchorUpdatedObservable = new Fe(), this._tmpVector = new D(), this._tmpQuaternion = new Ze(), this.xrNativeFeatureName = "anchors"; } _populateTmpTransformation(e, t) { return this._tmpVector.copyFrom(e), this._tmpQuaternion.copyFrom(t), this._xrSessionManager.scene.useRightHandedSystem || (this._tmpVector.z *= -1, this._tmpQuaternion.z *= -1, this._tmpQuaternion.w *= -1), { position: this._tmpVector, rotationQuaternion: this._tmpQuaternion }; } /** * Create a new anchor point using a hit test result at a specific point in the scene * An anchor is tracked only after it is added to the trackerAnchors in xrFrame. The promise returned here does not yet guaranty that. * Use onAnchorAddedObservable to get newly added anchors if you require tracking guaranty. * * @param hitTestResult The hit test result to use for this anchor creation * @param position an optional position offset for this anchor * @param rotationQuaternion an optional rotation offset for this anchor * @returns A promise that fulfills when babylon has created the corresponding WebXRAnchor object and tracking has begun */ async addAnchorPointUsingHitTestResultAsync(e, t = new D(), i = new Ze()) { this._populateTmpTransformation(t, i); const r = new XRRigidTransform({ x: this._tmpVector.x, y: this._tmpVector.y, z: this._tmpVector.z }, { x: this._tmpQuaternion.x, y: this._tmpQuaternion.y, z: this._tmpQuaternion.z, w: this._tmpQuaternion.w }); if (e.xrHitResult.createAnchor) try { const s = await e.xrHitResult.createAnchor(r); return new Promise((n, a) => { this._futureAnchors.push({ nativeAnchor: s, resolved: !1, submitted: !0, xrTransformation: r, resolve: n, reject: a }); }); } catch (s) { throw new Error(s); } else throw this.detach(), new Error("Anchors not enabled in this environment/browser"); } /** * Add a new anchor at a specific position and rotation * This function will add a new anchor per default in the next available frame. Unless forced, the createAnchor function * will be called in the next xrFrame loop to make sure that the anchor can be created correctly. * An anchor is tracked only after it is added to the trackerAnchors in xrFrame. The promise returned here does not yet guaranty that. * Use onAnchorAddedObservable to get newly added anchors if you require tracking guaranty. * * @param position the position in which to add an anchor * @param rotationQuaternion an optional rotation for the anchor transformation * @param forceCreateInCurrentFrame force the creation of this anchor in the current frame. Must be called inside xrFrame loop! * @returns A promise that fulfills when babylon has created the corresponding WebXRAnchor object and tracking has begun */ async addAnchorAtPositionAndRotationAsync(e, t = new Ze(), i = !1) { this._populateTmpTransformation(e, t); const r = new XRRigidTransform({ x: this._tmpVector.x, y: this._tmpVector.y, z: this._tmpVector.z }, { x: this._tmpQuaternion.x, y: this._tmpQuaternion.y, z: this._tmpQuaternion.z, w: this._tmpQuaternion.w }), s = i && this.attached && this._xrSessionManager.currentFrame ? await this._createAnchorAtTransformation(r, this._xrSessionManager.currentFrame) : void 0; return new Promise((n, a) => { this._futureAnchors.push({ nativeAnchor: s, resolved: !1, submitted: !1, xrTransformation: r, resolve: n, reject: a }); }); } /** * Get the list of anchors currently being tracked by the system */ get anchors() { return this._trackedAnchors; } /** * detach this feature. * Will usually be called by the features manager * * @returns true if successful. */ detach() { if (!super.detach()) return !1; if (!this._options.doNotRemoveAnchorsOnSessionEnded) for (; this._trackedAnchors.length; ) { const e = this._trackedAnchors.pop(); if (e) { try { e.remove(); } catch { } this.onAnchorRemovedObservable.notifyObservers(e); } } return !0; } /** * Dispose this feature and all of the resources attached */ dispose() { this._futureAnchors.length = 0, super.dispose(), this.onAnchorAddedObservable.clear(), this.onAnchorRemovedObservable.clear(), this.onAnchorUpdatedObservable.clear(); } _onXRFrame(e) { if (!this.attached || !e) return; const t = e.trackedAnchors; if (t) { const i = this._trackedAnchors.filter((s) => !t.has(s.xrAnchor)).map((s) => this._trackedAnchors.indexOf(s)); let r = 0; i.forEach((s) => { const n = this._trackedAnchors.splice(s - r, 1)[0]; this.onAnchorRemovedObservable.notifyObservers(n), r++; }), t.forEach((s) => { if (this._lastFrameDetected.has(s)) { const n = this._findIndexInAnchorArray(s), a = this._trackedAnchors[n]; try { this._updateAnchorWithXRFrame(s, a, e), a.attachedNode && (a.attachedNode.rotationQuaternion = a.attachedNode.rotationQuaternion || new Ze(), a.transformationMatrix.decompose(a.attachedNode.scaling, a.attachedNode.rotationQuaternion, a.attachedNode.position)), this.onAnchorUpdatedObservable.notifyObservers(a); } catch { Ve.Warn("Anchor could not be updated"); } } else { const n = { id: wve++, xrAnchor: s, remove: () => s.delete() }, a = this._updateAnchorWithXRFrame(s, n, e); this._trackedAnchors.push(a), this.onAnchorAddedObservable.notifyObservers(a); const o = this._futureAnchors.filter((u) => u.nativeAnchor === s)[0]; o && (o.resolve(a), o.resolved = !0); } }), this._lastFrameDetected = t; } this._futureAnchors.forEach((i) => { !i.resolved && !i.submitted && (this._createAnchorAtTransformation(i.xrTransformation, e).then((r) => { i.nativeAnchor = r; }, (r) => { i.resolved = !0, i.reject(r); }), i.submitted = !0); }); } /** * avoiding using Array.find for global support. * @param xrAnchor the plane to find in the array */ _findIndexInAnchorArray(e) { for (let t = 0; t < this._trackedAnchors.length; ++t) if (this._trackedAnchors[t].xrAnchor === e) return t; return -1; } _updateAnchorWithXRFrame(e, t, i) { const r = i.getPose(e.anchorSpace, this._xrSessionManager.referenceSpace); if (r) { const s = t.transformationMatrix || new Ae(); Ae.FromArrayToRef(r.transform.matrix, 0, s), this._xrSessionManager.scene.useRightHandedSystem || s.toggleModelMatrixHandInPlace(), t.transformationMatrix = s, this._options.worldParentNode && s.multiplyToRef(this._options.worldParentNode.getWorldMatrix(), s); } return t; } async _createAnchorAtTransformation(e, t) { var i; if (t.createAnchor) try { return t.createAnchor(e, (i = this._referenceSpaceForFrameAnchors) !== null && i !== void 0 ? i : this._xrSessionManager.referenceSpace); } catch (r) { throw new Error(r); } else throw this.detach(), new Error("Anchors are not enabled in your browser"); } } gO.Name = Qs.ANCHOR_SYSTEM; gO.Version = 1; Oo.AddWebXRFeature(gO.Name, (c, e) => () => new gO(c, e), gO.Version); let Lve = 0; class vO extends Ku { /** * construct a new Plane Detector * @param _xrSessionManager an instance of xr Session manager * @param _options configuration to use when constructing this feature */ constructor(e, t = {}) { super(e), this._options = t, this._detectedPlanes = [], this._enabled = !1, this._lastFrameDetected = /* @__PURE__ */ new Set(), this.onPlaneAddedObservable = new Fe(), this.onPlaneRemovedObservable = new Fe(), this.onPlaneUpdatedObservable = new Fe(), this.xrNativeFeatureName = "plane-detection", this._xrSessionManager.session ? this._init() : this._xrSessionManager.onXRSessionInit.addOnce(() => { this._init(); }); } /** * detach this feature. * Will usually be called by the features manager * * @returns true if successful. */ detach() { if (!super.detach()) return !1; if (!this._options.doNotRemovePlanesOnSessionEnded) for (; this._detectedPlanes.length; ) { const e = this._detectedPlanes.pop(); e && this.onPlaneRemovedObservable.notifyObservers(e); } return !0; } /** * Dispose this feature and all of the resources attached */ dispose() { super.dispose(), this.onPlaneAddedObservable.clear(), this.onPlaneRemovedObservable.clear(), this.onPlaneUpdatedObservable.clear(); } /** * Check if the needed objects are defined. * This does not mean that the feature is enabled, but that the objects needed are well defined. */ isCompatible() { return typeof XRPlane < "u"; } /** * Enable room capture mode. * When enabled and supported by the system, * the detectedPlanes array will be populated with the detected room boundaries * @see https://immersive-web.github.io/real-world-geometry/plane-detection.html#dom-xrsession-initiateroomcapture * @returns true if plane detection is enabled and supported. Will reject if not supported. */ async initiateRoomCapture() { return this._xrSessionManager.session.initiateRoomCapture ? this._xrSessionManager.session.initiateRoomCapture() : Promise.reject("initiateRoomCapture is not supported on this session"); } _onXRFrame(e) { var t; if (!this.attached || !this._enabled || !e) return; const i = e.detectedPlanes || ((t = e.worldInformation) === null || t === void 0 ? void 0 : t.detectedPlanes); if (i) { for (let r = 0; r < this._detectedPlanes.length; r++) { const s = this._detectedPlanes[r]; i.has(s.xrPlane) || (this._detectedPlanes.splice(r--, 1), this.onPlaneRemovedObservable.notifyObservers(s)); } i.forEach((r) => { if (this._lastFrameDetected.has(r)) { if (r.lastChangedTime === this._xrSessionManager.currentTimestamp) { const s = this._findIndexInPlaneArray(r), n = this._detectedPlanes[s]; this._updatePlaneWithXRPlane(r, n, e), this.onPlaneUpdatedObservable.notifyObservers(n); } } else { const s = { id: Lve++, xrPlane: r, polygonDefinition: [] }, n = this._updatePlaneWithXRPlane(r, s, e); this._detectedPlanes.push(n), this.onPlaneAddedObservable.notifyObservers(n); } }), this._lastFrameDetected = i; } } _init() { const e = () => { this._enabled = !0, this._detectedPlanes.length && (this._detectedPlanes.length = 0); }; if (this._xrSessionManager.isNative && this._options.preferredDetectorOptions && this._xrSessionManager.session.trySetPreferredPlaneDetectorOptions && this._xrSessionManager.session.trySetPreferredPlaneDetectorOptions(this._options.preferredDetectorOptions), !this._xrSessionManager.session.updateWorldTrackingState) { e(); return; } this._xrSessionManager.session.updateWorldTrackingState({ planeDetectionState: { enabled: !0 } }), e(); } _updatePlaneWithXRPlane(e, t, i) { t.polygonDefinition = e.polygon.map((s) => { const n = this._xrSessionManager.scene.useRightHandedSystem ? 1 : -1; return new D(s.x, s.y, s.z * n); }); const r = i.getPose(e.planeSpace, this._xrSessionManager.referenceSpace); if (r) { const s = t.transformationMatrix || new Ae(); Ae.FromArrayToRef(r.transform.matrix, 0, s), this._xrSessionManager.scene.useRightHandedSystem || s.toggleModelMatrixHandInPlace(), t.transformationMatrix = s, this._options.worldParentNode && s.multiplyToRef(this._options.worldParentNode.getWorldMatrix(), s); } return t; } /** * avoiding using Array.find for global support. * @param xrPlane the plane to find in the array */ _findIndexInPlaneArray(e) { for (let t = 0; t < this._detectedPlanes.length; ++t) if (this._detectedPlanes[t].xrPlane === e) return t; return -1; } } vO.Name = Qs.PLANE_DETECTION; vO.Version = 1; Oo.AddWebXRFeature(vO.Name, (c, e) => () => new vO(c, e), vO.Version); class AO extends Ku { /** * constructs a new background remover module * @param _xrSessionManager the session manager for this module * @param options read-only options to be used in this module */ constructor(e, t = {}) { super(e), this.options = t, this.onBackgroundStateChangedObservable = new Fe(); } /** * attach this feature * Will usually be called by the features manager * * @returns true if successful. */ attach() { return this._setBackgroundState(!1), super.attach(); } /** * detach this feature. * Will usually be called by the features manager * * @returns true if successful. */ detach() { return this._setBackgroundState(!0), super.detach(); } /** * Dispose this feature and all of the resources attached */ dispose() { super.dispose(), this.onBackgroundStateChangedObservable.clear(); } _onXRFrame(e) { } _setBackgroundState(e) { const t = this._xrSessionManager.scene; if (!this.options.ignoreEnvironmentHelper) if (this.options.environmentHelperRemovalFlags) { if (this.options.environmentHelperRemovalFlags.skyBox) { const i = t.getMeshByName("BackgroundSkybox"); i && i.setEnabled(e); } if (this.options.environmentHelperRemovalFlags.ground) { const i = t.getMeshByName("BackgroundPlane"); i && i.setEnabled(e); } } else { const i = t.getMeshByName("BackgroundHelper"); i && i.setEnabled(e); } this.options.backgroundMeshes && this.options.backgroundMeshes.forEach((i) => i.setEnabled(e)), this.onBackgroundStateChangedObservable.notifyObservers(e); } } AO.Name = Qs.BACKGROUND_REMOVER; AO.Version = 1; Oo.AddWebXRFeature(AO.Name, (c, e) => () => new AO(c, e), AO.Version, !0); class Nve { } class yO extends Ku { _createPhysicsImpostor(e) { const t = this._options.physicsProperties.impostorType || tr.SphereImpostor, i = this._options.physicsProperties.impostorSize || 0.1, r = Rd("impostor-mesh-" + e.uniqueId, { diameterX: typeof i == "number" ? i : i.width, diameterY: typeof i == "number" ? i : i.height, diameterZ: typeof i == "number" ? i : i.depth }); r.isVisible = this._debugMode, r.isPickable = !1, r.rotationQuaternion = new Ze(); const s = e.grip || e.pointer; r.position.copyFrom(s.position), r.rotationQuaternion.copyFrom(s.rotationQuaternion); const n = new tr(r, t, Object.assign({ mass: 0 }, this._options.physicsProperties)); this._controllers[e.uniqueId] = { xrController: e, impostor: n, impostorMesh: r }; } /** * Construct a new Controller Physics Feature * @param _xrSessionManager the corresponding xr session manager * @param _options options to create this feature with */ constructor(e, t) { super(e), this._options = t, this._attachController = (i) => { this._controllers[i.uniqueId] || (this._xrSessionManager.scene.isPhysicsEnabled() || Ce.Warn("physics engine not enabled, skipped. Please add this controller manually."), this._options.physicsProperties.useControllerMesh && i.inputSource.gamepad ? i.onMotionControllerInitObservable.addOnce((r) => { r._doNotLoadControllerMesh ? this._createPhysicsImpostor(i) : r.onModelLoadedObservable.addOnce(() => { const s = new tr(r.rootMesh, tr.MeshImpostor, Object.assign({ mass: 0 }, this._options.physicsProperties)), n = i.grip || i.pointer; this._controllers[i.uniqueId] = { xrController: i, impostor: s, oldPos: n.position.clone(), oldRotation: n.rotationQuaternion.clone() }; }); }) : this._createPhysicsImpostor(i)); }, this._controllers = {}, this._debugMode = !1, this._delta = 0, this._lastTimestamp = 0, this._tmpQuaternion = new Ze(), this._tmpVector = new D(), this._options.physicsProperties || (this._options.physicsProperties = {}); } /** * @internal * enable debugging - will show console outputs and the impostor mesh */ _enablePhysicsDebug() { this._debugMode = !0, Object.keys(this._controllers).forEach((e) => { const t = this._controllers[e]; t.impostorMesh && (t.impostorMesh.isVisible = !0); }); } /** * Manually add a controller (if no xrInput was provided or physics engine was not enabled) * @param xrController the controller to add */ addController(e) { this._attachController(e); } /** * attach this feature * Will usually be called by the features manager * * @returns true if successful. */ attach() { if (!super.attach()) return !1; if (!this._options.xrInput) return !0; if (this._options.xrInput.controllers.forEach(this._attachController), this._addNewAttachObserver(this._options.xrInput.onControllerAddedObservable, this._attachController), this._addNewAttachObserver(this._options.xrInput.onControllerRemovedObservable, (e) => { this._detachController(e.uniqueId); }), this._options.enableHeadsetImpostor) { const e = this._options.headsetImpostorParams || { impostorType: tr.SphereImpostor, restitution: 0.8, impostorSize: 0.3 }, t = e.impostorSize || 0.3; this._headsetMesh = Rd("headset-mesh", { diameterX: typeof t == "number" ? t : t.width, diameterY: typeof t == "number" ? t : t.height, diameterZ: typeof t == "number" ? t : t.depth }), this._headsetMesh.rotationQuaternion = new Ze(), this._headsetMesh.isVisible = !1, this._headsetImpostor = new tr(this._headsetMesh, e.impostorType, Object.assign({ mass: 0 }, e)); } return !0; } /** * detach this feature. * Will usually be called by the features manager * * @returns true if successful. */ detach() { return super.detach() ? (Object.keys(this._controllers).forEach((e) => { this._detachController(e); }), this._headsetMesh && this._headsetMesh.dispose(), !0) : !1; } /** * Get the headset impostor, if enabled * @returns the impostor */ getHeadsetImpostor() { return this._headsetImpostor; } /** * Get the physics impostor of a specific controller. * The impostor is not attached to a mesh because a mesh for each controller is not obligatory * @param controller the controller or the controller id of which to get the impostor * @returns the impostor or null */ getImpostorForController(e) { const t = typeof e == "string" ? e : e.uniqueId; return this._controllers[t] ? this._controllers[t].impostor : null; } /** * Update the physics properties provided in the constructor * @param newProperties the new properties object * @param newProperties.impostorType * @param newProperties.impostorSize * @param newProperties.friction * @param newProperties.restitution */ setPhysicsProperties(e) { this._options.physicsProperties = Object.assign(Object.assign({}, this._options.physicsProperties), e); } _onXRFrame(e) { var t, i; if (this._delta = this._xrSessionManager.currentTimestamp - this._lastTimestamp, this._lastTimestamp = this._xrSessionManager.currentTimestamp, this._headsetMesh && this._headsetImpostor) { if (this._headsetMesh.position.copyFrom(this._options.xrInput.xrCamera.globalPosition), this._headsetMesh.rotationQuaternion.copyFrom(this._options.xrInput.xrCamera.absoluteRotation), !((t = this._options.xrInput.xrCamera._lastXRViewerPose) === null || t === void 0) && t.linearVelocity) { const r = this._options.xrInput.xrCamera._lastXRViewerPose.linearVelocity; this._tmpVector.set(r.x, r.y, r.z), this._headsetImpostor.setLinearVelocity(this._tmpVector); } if (!((i = this._options.xrInput.xrCamera._lastXRViewerPose) === null || i === void 0) && i.angularVelocity) { const r = this._options.xrInput.xrCamera._lastXRViewerPose.angularVelocity; this._tmpVector.set(r.x, r.y, r.z), this._headsetImpostor.setAngularVelocity(this._tmpVector); } } Object.keys(this._controllers).forEach((r) => { var s, n; const a = this._controllers[r], l = a.xrController.grip || a.xrController.pointer, o = a.oldPos || a.impostorMesh.position; if (!((s = a.xrController._lastXRPose) === null || s === void 0) && s.linearVelocity) { const h = a.xrController._lastXRPose.linearVelocity; this._tmpVector.set(h.x, h.y, h.z), a.impostor.setLinearVelocity(this._tmpVector); } else l.position.subtractToRef(o, this._tmpVector), this._tmpVector.scaleInPlace(1e3 / this._delta), a.impostor.setLinearVelocity(this._tmpVector); o.copyFrom(l.position), this._debugMode && Ce.Log([this._tmpVector, "linear"]); const u = a.oldRotation || a.impostorMesh.rotationQuaternion; if (!((n = a.xrController._lastXRPose) === null || n === void 0) && n.angularVelocity) { const h = a.xrController._lastXRPose.angularVelocity; this._tmpVector.set(h.x, h.y, h.z), a.impostor.setAngularVelocity(this._tmpVector); } else if (!u.equalsWithEpsilon(l.rotationQuaternion)) { u.conjugateInPlace().multiplyToRef(l.rotationQuaternion, this._tmpQuaternion); const h = Math.sqrt(this._tmpQuaternion.x * this._tmpQuaternion.x + this._tmpQuaternion.y * this._tmpQuaternion.y + this._tmpQuaternion.z * this._tmpQuaternion.z); if (this._tmpVector.set(this._tmpQuaternion.x, this._tmpQuaternion.y, this._tmpQuaternion.z), h < 1e-3) this._tmpVector.scaleInPlace(2); else { const d = 2 * Math.atan2(h, this._tmpQuaternion.w); this._tmpVector.scaleInPlace(d / (h * (this._delta / 1e3))); } a.impostor.setAngularVelocity(this._tmpVector); } u.copyFrom(l.rotationQuaternion), this._debugMode && Ce.Log([this._tmpVector, this._tmpQuaternion, "angular"]); }); } _detachController(e) { const t = this._controllers[e]; t && (t.impostorMesh && t.impostorMesh.dispose(), delete this._controllers[e]); } } yO.Name = Qs.PHYSICS_CONTROLLERS; yO.Version = 1; Oo.AddWebXRFeature(yO.Name, (c, e) => () => new yO(c, e), yO.Version, !0); class CO extends Ku { /** * Creates a new instance of the hit test feature * @param _xrSessionManager an instance of WebXRSessionManager * @param options options to use when constructing this feature */ constructor(e, t = {}) { super(e), this.options = t, this._tmpMat = new Ae(), this._tmpPos = new D(), this._tmpQuat = new Ze(), this._initHitTestSource = (i) => { if (!i) return; const r = new XRRay(this.options.offsetRay || {}), s = { space: this.options.useReferenceSpace ? i : this._xrSessionManager.viewerReferenceSpace, offsetRay: r }; if (this.options.entityTypes && (s.entityTypes = this.options.entityTypes), !s.space) { Ve.Warn("waiting for viewer reference space to initialize"); return; } this._xrSessionManager.session.requestHitTestSource(s).then((n) => { this._xrHitTestSource && this._xrHitTestSource.cancel(), this._xrHitTestSource = n; }); }, this.autoCloneTransformation = !1, this.onHitTestResultObservable = new Fe(), this.paused = !1, this.xrNativeFeatureName = "hit-test", Ve.Warn("Hit test is an experimental and unstable feature."); } /** * attach this feature * Will usually be called by the features manager * * @returns true if successful. */ attach() { if (!super.attach() || !this._xrSessionManager.session.requestHitTestSource) return !1; if (this.options.disablePermanentHitTest || (this._xrSessionManager.referenceSpace && this._initHitTestSource(this._xrSessionManager.referenceSpace), this._xrSessionManager.onXRReferenceSpaceChanged.add(this._initHitTestSource)), this.options.enableTransientHitTest) { const e = new XRRay(this.options.transientOffsetRay || {}); this._xrSessionManager.session.requestHitTestSourceForTransientInput({ profile: this.options.transientHitTestProfile || "generic-touchscreen", offsetRay: e, entityTypes: this.options.entityTypes }).then((t) => { this._transientXrHitTestSource = t; }); } return !0; } /** * detach this feature. * Will usually be called by the features manager * * @returns true if successful. */ detach() { return super.detach() ? (this._xrHitTestSource && (this._xrHitTestSource.cancel(), this._xrHitTestSource = null), this._xrSessionManager.onXRReferenceSpaceChanged.removeCallback(this._initHitTestSource), this._transientXrHitTestSource && (this._transientXrHitTestSource.cancel(), this._transientXrHitTestSource = null), !0) : !1; } /** * Dispose this feature and all of the resources attached */ dispose() { super.dispose(), this.onHitTestResultObservable.clear(); } _onXRFrame(e) { if (!(!this.attached || this.paused)) { if (this._xrHitTestSource) { const t = e.getHitTestResults(this._xrHitTestSource); this._processWebXRHitTestResult(t); } this._transientXrHitTestSource && e.getHitTestResultsForTransientInput(this._transientXrHitTestSource).forEach((i) => { this._processWebXRHitTestResult(i.results, i.inputSource); }); } } _processWebXRHitTestResult(e, t) { const i = []; e.forEach((r) => { const s = r.getPose(this._xrSessionManager.referenceSpace); if (!s) return; const n = s.transform.position, a = s.transform.orientation; this._tmpPos.set(n.x, n.y, n.z), this._tmpQuat.set(a.x, a.y, a.z, a.w), Ae.FromFloat32ArrayToRefScaled(s.transform.matrix, 0, 1, this._tmpMat), this._xrSessionManager.scene.useRightHandedSystem || (this._tmpPos.z *= -1, this._tmpQuat.z *= -1, this._tmpQuat.w *= -1, this._tmpMat.toggleModelMatrixHandInPlace()); const l = { position: this.autoCloneTransformation ? this._tmpPos.clone() : this._tmpPos, rotationQuaternion: this.autoCloneTransformation ? this._tmpQuat.clone() : this._tmpQuat, transformationMatrix: this.autoCloneTransformation ? this._tmpMat.clone() : this._tmpMat, inputSource: t, isTransient: !!t, xrHitResult: r }; i.push(l); }), this.onHitTestResultObservable.notifyObservers(i); } } CO.Name = Qs.HIT_TEST; CO.Version = 2; Oo.AddWebXRFeature(CO.Name, (c, e) => () => new CO(c, e), CO.Version, !1); class xO extends Ku { /** * The current feature point cloud maintained across frames. */ get featurePointCloud() { return this._featurePointCloud; } /** * construct the feature point system * @param _xrSessionManager an instance of xr Session manager */ constructor(e) { super(e), this._enabled = !1, this._featurePointCloud = [], this.onFeaturePointsAddedObservable = new Fe(), this.onFeaturePointsUpdatedObservable = new Fe(), this.xrNativeFeatureName = "bjsfeature-points", this._xrSessionManager.session ? this._init() : this._xrSessionManager.onXRSessionInit.addOnce(() => { this._init(); }); } /** * Detach this feature. * Will usually be called by the features manager * * @returns true if successful. */ detach() { return super.detach() ? (this.featurePointCloud.length = 0, !0) : !1; } /** * Dispose this feature and all of the resources attached */ dispose() { super.dispose(), this._featurePointCloud.length = 0, this.onFeaturePointsUpdatedObservable.clear(), this.onFeaturePointsAddedObservable.clear(); } /** * On receiving a new XR frame if this feature is attached notify observers new feature point data is available. * @param frame */ _onXRFrame(e) { if (!this.attached || !this._enabled || !e) return; const t = e.featurePointCloud; if (!(!t || t.length === 0)) { if (t.length % 5 !== 0) throw new Error("Received malformed feature point cloud of length: " + t.length); const i = t.length / 5, r = [], s = []; for (let n = 0; n < i; n++) { const a = n * 5, l = t[a + 4]; this._featurePointCloud[l] ? r.push(l) : (this._featurePointCloud[l] = { position: new D(), confidenceValue: 0 }, s.push(l)), this._featurePointCloud[l].position.x = t[a], this._featurePointCloud[l].position.y = t[a + 1], this._featurePointCloud[l].position.z = t[a + 2], this._featurePointCloud[l].confidenceValue = t[a + 3]; } s.length > 0 && this.onFeaturePointsAddedObservable.notifyObservers(s), r.length > 0 && this.onFeaturePointsUpdatedObservable.notifyObservers(r); } } /** * Initializes the feature. If the feature point feature is not available for this environment do not mark the feature as enabled. */ _init() { !this._xrSessionManager.session.trySetFeaturePointCloudEnabled || !this._xrSessionManager.session.trySetFeaturePointCloudEnabled(!0) || (this._enabled = !0); } } xO.Name = Qs.FEATURE_POINTS; xO.Version = 1; Oo.AddWebXRFeature(xO.Name, (c) => () => new xO(c), xO.Version); let Fve = 0; class bO extends Ku { constructor(e, t = {}) { super(e), this._options = t, this._detectedMeshes = /* @__PURE__ */ new Map(), this.onMeshAddedObservable = new Fe(), this.onMeshRemovedObservable = new Fe(), this.onMeshUpdatedObservable = new Fe(), this.xrNativeFeatureName = "mesh-detection", this._options.generateMeshes && (this._options.convertCoordinateSystems = !0), this._xrSessionManager.session ? this._init() : this._xrSessionManager.onXRSessionInit.addOnce(() => { this._init(); }); } detach() { return super.detach() ? (this._xrSessionManager.isNative && this._xrSessionManager.session.trySetMeshDetectorEnabled && this._xrSessionManager.session.trySetMeshDetectorEnabled(!1), this._options.doNotRemoveMeshesOnSessionEnded || (this._detectedMeshes.forEach((e) => { this.onMeshRemovedObservable.notifyObservers(e); }), this._detectedMeshes.clear()), !0) : !1; } dispose() { super.dispose(), this.onMeshAddedObservable.clear(), this.onMeshRemovedObservable.clear(), this.onMeshUpdatedObservable.clear(); } _onXRFrame(e) { var t; try { if (!this.attached || !e) return; const i = e.detectedMeshes || ((t = e.worldInformation) === null || t === void 0 ? void 0 : t.detectedMeshes); if (i) { const r = /* @__PURE__ */ new Set(); this._detectedMeshes.forEach((s, n) => { i.has(n) || r.add(n); }), r.forEach((s) => { const n = this._detectedMeshes.get(s); n && (this.onMeshRemovedObservable.notifyObservers(n), this._detectedMeshes.delete(s)); }), i.forEach((s) => { if (this._detectedMeshes.has(s)) { if (s.lastChangedTime === this._xrSessionManager.currentTimestamp) { const n = this._detectedMeshes.get(s); n && (this._updateVertexDataWithXRMesh(s, n, e), this.onMeshUpdatedObservable.notifyObservers(n)); } } else { const n = { id: Fve++, xrMesh: s }, a = this._updateVertexDataWithXRMesh(s, n, e); this._detectedMeshes.set(s, a), this.onMeshAddedObservable.notifyObservers(a); } }); } } catch (i) { Ce.Log(i.stack); } } _init() { this._xrSessionManager.isNative && (this._xrSessionManager.session.trySetMeshDetectorEnabled && this._xrSessionManager.session.trySetMeshDetectorEnabled(!0), this._options.preferredDetectorOptions && this._xrSessionManager.session.trySetPreferredMeshDetectorOptions && this._xrSessionManager.session.trySetPreferredMeshDetectorOptions(this._options.preferredDetectorOptions)); } _updateVertexDataWithXRMesh(e, t, i) { var r; t.xrMesh = e, t.worldParentNode = this._options.worldParentNode; const s = e.vertices || e.positions; if (this._options.convertCoordinateSystems) { if (this._xrSessionManager.scene.useRightHandedSystem) t.positions = s, t.normals = e.normals; else { t.positions = new Float32Array(s.length); for (let a = 0; a < s.length; a += 3) t.positions[a] = s[a], t.positions[a + 1] = s[a + 1], t.positions[a + 2] = -1 * s[a + 2]; if (e.normals) { t.normals = new Float32Array(e.normals.length); for (let a = 0; a < e.normals.length; a += 3) t.normals[a] = e.normals[a], t.normals[a + 1] = e.normals[a + 1], t.normals[a + 2] = -1 * e.normals[a + 2]; } } t.indices = e.indices; const n = i.getPose(e.meshSpace, this._xrSessionManager.referenceSpace); if (n) { const a = t.transformationMatrix || new Ae(); Ae.FromArrayToRef(n.transform.matrix, 0, a), this._xrSessionManager.scene.useRightHandedSystem || a.toggleModelMatrixHandInPlace(), t.transformationMatrix = a, this._options.worldParentNode && a.multiplyToRef(this._options.worldParentNode.getWorldMatrix(), a); } if (this._options.generateMeshes) { if (t.mesh) { const a = t.mesh; a.updateVerticesData(Y.PositionKind, t.positions), t.normals ? a.updateVerticesData(Y.NormalKind, t.normals) : a.createNormals(!0), a.updateIndices(t.indices); } else { const a = new ke("xr mesh " + t.id, this._xrSessionManager.scene); a.rotationQuaternion = new Ze(), a.setVerticesData(Y.PositionKind, t.positions), t.normals ? a.setVerticesData(Y.NormalKind, t.normals) : a.createNormals(!0), a.setIndices(t.indices, void 0, !0), t.mesh = a; } (r = t.transformationMatrix) === null || r === void 0 || r.decompose(t.mesh.scaling, t.mesh.rotationQuaternion, t.mesh.position); } } return t; } } bO.Name = Qs.MESH_DETECTION; bO.Version = 1; Oo.AddWebXRFeature(bO.Name, (c, e) => () => new bO(c, e), bO.Version, !1); var PC; (function(c) { c[c.NotReceived = 0] = "NotReceived", c[c.Waiting = 1] = "Waiting", c[c.Received = 2] = "Received"; })(PC || (PC = {})); class EO extends Ku { /** * constructs the image tracking feature * @param _xrSessionManager the session manager for this module * @param options read-only options to be used in this module */ constructor(e, t) { super(e), this.options = t, this.onUntrackableImageFoundObservable = new Fe(), this.onTrackableImageFoundObservable = new Fe(), this.onTrackedImageUpdatedObservable = new Fe(), this._trackableScoreStatus = PC.NotReceived, this._trackedImages = [], this.xrNativeFeatureName = "image-tracking"; } /** * attach this feature * Will usually be called by the features manager * * @returns true if successful. */ attach() { return super.attach(); } /** * detach this feature. * Will usually be called by the features manager * * @returns true if successful. */ detach() { return super.detach(); } /** * Get a tracked image by its ID. * * @param id the id of the image to load (position in the init array) * @returns a trackable image, if exists in this location */ getTrackedImageById(e) { return this._trackedImages[e] || null; } /** * Dispose this feature and all of the resources attached */ dispose() { super.dispose(), this._trackedImages.forEach((e) => { e.originalBitmap.close(); }), this._trackedImages.length = 0, this.onTrackableImageFoundObservable.clear(), this.onUntrackableImageFoundObservable.clear(), this.onTrackedImageUpdatedObservable.clear(); } /** * Extends the session init object if needed * @returns augmentation object fo the xr session init object. */ async getXRSessionInitExtension() { if (!this.options.images || !this.options.images.length) return {}; const e = this.options.images.map((t) => typeof t.src == "string" ? this._xrSessionManager.scene.getEngine()._createImageBitmapFromSource(t.src) : Promise.resolve(t.src)); try { const t = await Promise.all(e); return this._originalTrackingRequest = t.map((i, r) => ({ image: i, widthInMeters: this.options.images[r].estimatedRealWorldWidth })), { trackedImages: this._originalTrackingRequest }; } catch { return Ve.Error("Error loading images for tracking, WebXRImageTracking disabled for this session."), {}; } } _onXRFrame(e) { if (!e.getImageTrackingResults || this._trackableScoreStatus === PC.Waiting) return; if (this._trackableScoreStatus === PC.NotReceived) { this._checkScoresAsync(); return; } const t = e.getImageTrackingResults(); for (const i of t) { let r = !1; const s = i.index, n = this._trackedImages[s]; if (!n) continue; n.xrTrackingResult = i, n.realWorldWidth !== i.measuredWidthInMeters && (n.realWorldWidth = i.measuredWidthInMeters, r = !0); const a = e.getPose(i.imageSpace, this._xrSessionManager.referenceSpace); if (a) { const u = n.transformationMatrix; Ae.FromArrayToRef(a.transform.matrix, 0, u), this._xrSessionManager.scene.useRightHandedSystem || u.toggleModelMatrixHandInPlace(), r = !0; } const o = i.trackingState === "emulated"; n.emulated !== o && (n.emulated = o, r = !0), r && this.onTrackedImageUpdatedObservable.notifyObservers(n); } } async _checkScoresAsync() { if (!this._xrSessionManager.session.getTrackedImageScores || this._trackableScoreStatus !== PC.NotReceived) return; this._trackableScoreStatus = PC.Waiting; const e = await this._xrSessionManager.session.getTrackedImageScores(); if (!e || e.length === 0) { this._trackableScoreStatus = PC.NotReceived; return; } for (let t = 0; t < e.length; ++t) if (e[t] == "untrackable") this.onUntrackableImageFoundObservable.notifyObservers(t); else { const i = this._originalTrackingRequest[t].image, r = { id: t, originalBitmap: i, transformationMatrix: new Ae(), ratio: i.width / i.height }; this._trackedImages[t] = r, this.onTrackableImageFoundObservable.notifyObservers(r); } this._trackableScoreStatus = e.length > 0 ? PC.Received : PC.NotReceived; } } EO.Name = Qs.IMAGE_TRACKING; EO.Version = 1; Oo.AddWebXRFeature(EO.Name, (c, e) => () => new EO(c, e), EO.Version, !1); class TO extends Ku { /** * Creates a new instance of the dom-overlay feature * @param _xrSessionManager an instance of WebXRSessionManager * @param options options to use when constructing this feature */ constructor(e, t) { super(e), this.options = t, this._domOverlayType = null, this._beforeXRSelectListener = null, this._element = null, this.xrNativeFeatureName = "dom-overlay", Ve.Warn("dom-overlay is an experimental and unstable feature."); } /** * attach this feature * Will usually be called by the features manager * * @returns true if successful. */ attach() { return !super.attach() || !this._xrSessionManager.session.domOverlayState || this._xrSessionManager.session.domOverlayState.type === null ? !1 : (this._domOverlayType = this._xrSessionManager.session.domOverlayState.type, this._element !== null && this.options.supressXRSelectEvents === !0 && (this._beforeXRSelectListener = (e) => { e.preventDefault(); }, this._element.addEventListener("beforexrselect", this._beforeXRSelectListener)), !0); } /** * The type of DOM overlay (null when not supported). Provided by UA and remains unchanged for duration of session. */ get domOverlayType() { return this._domOverlayType; } /** * Dispose this feature and all of the resources attached */ dispose() { super.dispose(), this._element !== null && this._beforeXRSelectListener && this._element.removeEventListener("beforexrselect", this._beforeXRSelectListener); } _onXRFrame(e) { } /** * Extends the session init object if needed * @returns augmentation object for the xr session init object. */ async getXRSessionInitExtension() { if (this.options.element === void 0) return Ve.Warn('"element" option must be provided to attach xr-dom-overlay feature.'), {}; if (typeof this.options.element == "string") { const e = document.querySelector(this.options.element); if (e === null) return Ve.Warn(`element not found '${this.options.element}' (not requesting xr-dom-overlay)`), {}; this._element = e; } else this._element = this.options.element; return { domOverlay: { root: this._element } }; } } TO.Name = Qs.DOM_OVERLAY; TO.Version = 1; Oo.AddWebXRFeature(TO.Name, (c, e) => () => new TO(c, e), TO.Version, !1); class pT extends Ku { /** * Current movement direction. Will be null before XR Frames have been processed. */ get movementDirection() { return this._movementDirection; } /** * Is movement enabled */ get movementEnabled() { return this._featureContext.movementEnabled; } /** * Sets whether movement is enabled or not * @param enabled is movement enabled */ set movementEnabled(e) { this._featureContext.movementEnabled = e; } /** * If movement follows viewer pose */ get movementOrientationFollowsViewerPose() { return this._featureContext.movementOrientationFollowsViewerPose; } /** * Sets whether movement follows viewer pose * @param followsPose is movement should follow viewer pose */ set movementOrientationFollowsViewerPose(e) { this._featureContext.movementOrientationFollowsViewerPose = e; } /** * Gets movement speed */ get movementSpeed() { return this._featureContext.movementSpeed; } /** * Sets movement speed * @param movementSpeed movement speed */ set movementSpeed(e) { this._featureContext.movementSpeed = e; } /** * Gets minimum threshold the controller's thumbstick/touchpad must pass before being recognized for movement (avoids jitter/unintentional movement) */ get movementThreshold() { return this._featureContext.movementThreshold; } /** * Sets minimum threshold the controller's thumbstick/touchpad must pass before being recognized for movement (avoids jitter/unintentional movement) * @param movementThreshold new threshold */ set movementThreshold(e) { this._featureContext.movementThreshold = e; } /** * Is rotation enabled */ get rotationEnabled() { return this._featureContext.rotationEnabled; } /** * Sets whether rotation is enabled or not * @param enabled is rotation enabled */ set rotationEnabled(e) { this._featureContext.rotationEnabled = e; } /** * Gets rotation speed factor */ get rotationSpeed() { return this._featureContext.rotationSpeed; } /** * Sets rotation speed factor (1.0 is default) * @param rotationSpeed new rotation speed factor */ set rotationSpeed(e) { this._featureContext.rotationSpeed = e; } /** * Gets minimum threshold the controller's thumbstick/touchpad must pass before being recognized for rotation (avoids jitter/unintentional rotation) */ get rotationThreshold() { return this._featureContext.rotationThreshold; } /** * Sets minimum threshold the controller's thumbstick/touchpad must pass before being recognized for rotation (avoids jitter/unintentional rotation) * @param threshold new threshold */ set rotationThreshold(e) { this._featureContext.rotationThreshold = e; } /** * constructs a new movement controller system * @param _xrSessionManager an instance of WebXRSessionManager * @param options configuration object for this feature */ constructor(e, t) { var i, r, s, n, a, l; if (super(e), this._controllers = {}, this._currentRegistrationConfigurations = [], this._movementDirection = new Ze(), this._tmpRotationMatrix = Ae.Identity(), this._tmpTranslationDirection = new D(), this._tmpMovementTranslation = new D(), this._tempCacheQuaternion = new Ze(), this._attachController = (o) => { if (this._controllers[o.uniqueId]) return; this._controllers[o.uniqueId] = { xrController: o, registeredComponents: [] }; const u = this._controllers[o.uniqueId]; if (u.xrController.inputSource.targetRayMode === "tracked-pointer" && u.xrController.inputSource.gamepad) { const h = () => { if (o.motionController) for (const d of this._currentRegistrationConfigurations) { let f = null; if (d.allowedComponentTypes) for (const m of d.allowedComponentTypes) { const _ = o.motionController.getComponentOfType(m); if (_ !== null) { f = _; break; } } if (d.mainComponentOnly) { const m = o.motionController.getMainComponent(); if (m === null) continue; f = m; } if (typeof d.componentSelectionPredicate == "function" && (f = d.componentSelectionPredicate(o)), f && d.forceHandedness && o.inputSource.handedness !== d.forceHandedness || f === null) continue; const p = { registrationConfiguration: d, component: f }; u.registeredComponents.push(p), "axisChangedHandler" in d && (p.onAxisChangedObserver = f.onAxisValueChangedObservable.add((m) => { d.axisChangedHandler(m, this._movementState, this._featureContext, this._xrInput); })), "buttonChangedhandler" in d && (p.onButtonChangedObserver = f.onButtonStateChangedObservable.add(() => { f.changes.pressed && d.buttonChangedhandler(f.changes.pressed, this._movementState, this._featureContext, this._xrInput); })); } }; o.motionController ? h() : o.onMotionControllerInitObservable.addOnce(() => { h(); }); } }, !t || t.xrInput === void 0) { Ve.Error('WebXRControllerMovement feature requires "xrInput" option.'); return; } Array.isArray(t.customRegistrationConfigurations) ? this._currentRegistrationConfigurations = t.customRegistrationConfigurations : this._currentRegistrationConfigurations = pT.REGISTRATIONS.default, this._featureContext = { movementEnabled: t.movementEnabled || !0, movementOrientationFollowsViewerPose: (i = t.movementOrientationFollowsViewerPose) !== null && i !== void 0 ? i : !0, movementSpeed: (r = t.movementSpeed) !== null && r !== void 0 ? r : 1, movementThreshold: (s = t.movementThreshold) !== null && s !== void 0 ? s : 0.25, rotationEnabled: (n = t.rotationEnabled) !== null && n !== void 0 ? n : !0, rotationSpeed: (a = t.rotationSpeed) !== null && a !== void 0 ? a : 1, rotationThreshold: (l = t.rotationThreshold) !== null && l !== void 0 ? l : 0.25 }, this._movementState = { moveX: 0, moveY: 0, rotateX: 0, rotateY: 0 }, this._xrInput = t.xrInput; } attach() { return super.attach() ? (this._xrInput.controllers.forEach(this._attachController), this._addNewAttachObserver(this._xrInput.onControllerAddedObservable, this._attachController), this._addNewAttachObserver(this._xrInput.onControllerRemovedObservable, (e) => { this._detachController(e.uniqueId); }), !0) : !1; } detach() { return super.detach() ? (Object.keys(this._controllers).forEach((e) => { this._detachController(e); }), this._controllers = {}, !0) : !1; } /** * Occurs on every XR frame. * @param _xrFrame */ _onXRFrame(e) { if (this.attached) { if (this._movementState.rotateX !== 0 && this._featureContext.rotationEnabled) { const i = this._xrSessionManager.scene.getEngine().getDeltaTime() * 1e-3 * this._featureContext.rotationSpeed * this._movementState.rotateX * (this._xrSessionManager.scene.useRightHandedSystem ? -1 : 1); this._featureContext.movementOrientationFollowsViewerPose ? (this._xrInput.xrCamera.cameraRotation.y += i, Ze.RotationYawPitchRollToRef(i, 0, 0, this._tempCacheQuaternion), this._xrInput.xrCamera.rotationQuaternion.multiplyToRef(this._tempCacheQuaternion, this._movementDirection)) : (Ze.RotationYawPitchRollToRef(i * 3, 0, 0, this._tempCacheQuaternion), this._movementDirection.multiplyInPlace(this._tempCacheQuaternion)); } else this._featureContext.movementOrientationFollowsViewerPose && this._movementDirection.copyFrom(this._xrInput.xrCamera.rotationQuaternion); (this._movementState.moveX || this._movementState.moveY) && this._featureContext.movementEnabled && (Ae.FromQuaternionToRef(this._movementDirection, this._tmpRotationMatrix), this._tmpTranslationDirection.set(this._movementState.moveX, 0, this._movementState.moveY * (this._xrSessionManager.scene.useRightHandedSystem ? 1 : -1)), D.TransformCoordinatesToRef(this._tmpTranslationDirection, this._tmpRotationMatrix, this._tmpMovementTranslation), this._tmpMovementTranslation.scaleInPlace(this._xrInput.xrCamera._computeLocalCameraSpeed() * this._featureContext.movementSpeed), this._xrInput.xrCamera.cameraDirection.addInPlace(this._tmpMovementTranslation)); } } _detachController(e) { const t = this._controllers[e]; if (t) { for (const i of t.registeredComponents) i.onAxisChangedObserver && i.component.onAxisValueChangedObservable.remove(i.onAxisChangedObserver), i.onButtonChangedObserver && i.component.onButtonStateChangedObservable.remove(i.onButtonChangedObserver); delete this._controllers[e]; } } } pT.Name = Qs.MOVEMENT; pT.REGISTRATIONS = { default: [ { allowedComponentTypes: [j_.THUMBSTICK_TYPE, j_.TOUCHPAD_TYPE], forceHandedness: "left", axisChangedHandler: (c, e, t) => { e.rotateX = Math.abs(c.x) > t.rotationThreshold ? c.x : 0, e.rotateY = Math.abs(c.y) > t.rotationThreshold ? c.y : 0; } }, { allowedComponentTypes: [j_.THUMBSTICK_TYPE, j_.TOUCHPAD_TYPE], forceHandedness: "right", axisChangedHandler: (c, e, t) => { e.moveX = Math.abs(c.x) > t.movementThreshold ? c.x : 0, e.moveY = Math.abs(c.y) > t.movementThreshold ? c.y : 0; } } ] }; pT.Version = 1; Oo.AddWebXRFeature(pT.Name, (c, e) => () => new pT(c, e), pT.Version, !0); class SO extends Ku { /** * Creates a new instance of the light estimation feature * @param _xrSessionManager an instance of WebXRSessionManager * @param options options to use when constructing this feature */ constructor(e, t) { super(e), this.options = t, this._canvasContext = null, this._reflectionCubeMap = null, this._xrLightEstimate = null, this._xrLightProbe = null, this._xrWebGLBinding = null, this._lightDirection = D.Up().negateInPlace(), this._lightColor = ze.White(), this._intensity = 1, this._sphericalHarmonics = new m5(), this._cubeMapPollTime = Date.now(), this._lightEstimationPollTime = Date.now(), this._reflectionCubeMapTextureSize = 16, this.directionalLight = null, this.onReflectionCubeMapUpdatedObservable = new Fe(), this._updateReflectionCubeMap = () => { var i; if (!this._xrLightProbe) return; if (this.options.cubeMapPollInterval) { const s = Date.now(); if (s - this._cubeMapPollTime < this.options.cubeMapPollInterval) return; this._cubeMapPollTime = s; } const r = this._getXRGLBinding().getReflectionCubeMap(this._xrLightProbe); if (r && this._reflectionCubeMap) { if (this._reflectionCubeMap._texture) (i = this._reflectionCubeMap._texture._hardwareTexture) === null || i === void 0 || i.set(r), this._reflectionCubeMap._texture.getEngine().resetTextureCache(); else { const s = new ln(this._xrSessionManager.scene.getEngine(), ts.Unknown); s.isCube = !0, s.invertY = !1, s._useSRGBBuffer = this.options.reflectionFormat === "srgba8", s.format = 5, s.generateMipMaps = !0, s.type = this.options.reflectionFormat !== "srgba8" ? 2 : 0, s.samplingMode = 3, s.width = this._reflectionCubeMapTextureSize, s.height = this._reflectionCubeMapTextureSize, s._cachedWrapU = 1, s._cachedWrapV = 1, s._hardwareTexture = new BI(r, this._getCanvasContext()), this._reflectionCubeMap._texture = s; } this._reflectionCubeMap._texture.isReady = !0, this.options.disablePreFiltering ? (this._xrSessionManager.scene.markAllMaterialsAsDirty(1), this.onReflectionCubeMapUpdatedObservable.notifyObservers(this._reflectionCubeMap)) : (this._xrLightProbe.removeEventListener("reflectionchange", this._updateReflectionCubeMap), this._hdrFilter.prefilter(this._reflectionCubeMap).then(() => { this._xrSessionManager.scene.markAllMaterialsAsDirty(1), this.onReflectionCubeMapUpdatedObservable.notifyObservers(this._reflectionCubeMap), this._xrLightProbe.addEventListener("reflectionchange", this._updateReflectionCubeMap); })); } }, this.xrNativeFeatureName = "light-estimation", this.options.createDirectionalLightSource && (this.directionalLight = new Pd("light estimation directional", this._lightDirection, this._xrSessionManager.scene), this.directionalLight.position = new D(0, 8, 0), this.directionalLight.intensity = 0, this.directionalLight.falloffType = ia.FALLOFF_GLTF), this._hdrFilter = new pW(this._xrSessionManager.scene.getEngine()), Ve.Warn("light-estimation is an experimental and unstable feature."); } /** * While the estimated cube map is expected to update over time to better reflect the user's environment as they move around those changes are unlikely to happen with every XRFrame. * Since creating and processing the cube map is potentially expensive, especially if mip maps are needed, you can listen to the onReflectionCubeMapUpdatedObservable to determine * when it has been updated. */ get reflectionCubeMapTexture() { return this._reflectionCubeMap; } /** * The most recent light estimate. Available starting on the first frame where the device provides a light probe. */ get xrLightingEstimate() { return this._xrLightEstimate ? { lightColor: this._lightColor, lightDirection: this._lightDirection, lightIntensity: this._intensity, sphericalHarmonics: this._sphericalHarmonics } : this._xrLightEstimate; } _getCanvasContext() { return this._canvasContext === null && (this._canvasContext = this._xrSessionManager.scene.getEngine()._gl), this._canvasContext; } _getXRGLBinding() { if (this._xrWebGLBinding === null) { const e = this._getCanvasContext(); this._xrWebGLBinding = new XRWebGLBinding(this._xrSessionManager.session, e); } return this._xrWebGLBinding; } /** * attach this feature * Will usually be called by the features manager * * @returns true if successful. */ attach() { var e; if (!super.attach()) return !1; const t = (e = this.options.reflectionFormat) !== null && e !== void 0 ? e : this._xrSessionManager.session.preferredReflectionFormat || "srgba8"; return this.options.reflectionFormat = t, this._xrSessionManager.session.requestLightProbe({ reflectionFormat: t }).then((i) => { this._xrLightProbe = i, this.options.disableCubeMapReflection || (this._reflectionCubeMap || (this._reflectionCubeMap = new dn(this._xrSessionManager.scene), this._reflectionCubeMap._isCube = !0, this._reflectionCubeMap.coordinatesMode = 3, this.options.setSceneEnvironmentTexture && (this._xrSessionManager.scene.environmentTexture = this._reflectionCubeMap)), this._xrLightProbe.addEventListener("reflectionchange", this._updateReflectionCubeMap)); }), !0; } /** * detach this feature. * Will usually be called by the features manager * * @returns true if successful. */ detach() { const e = super.detach(); return this._xrLightProbe !== null && !this.options.disableCubeMapReflection && (this._xrLightProbe.removeEventListener("reflectionchange", this._updateReflectionCubeMap), this._xrLightProbe = null), this._canvasContext = null, this._xrLightEstimate = null, this._xrWebGLBinding = null, e; } /** * Dispose this feature and all of the resources attached */ dispose() { super.dispose(), this.onReflectionCubeMapUpdatedObservable.clear(), this.directionalLight && (this.directionalLight.dispose(), this.directionalLight = null), this._reflectionCubeMap !== null && (this._reflectionCubeMap._texture && this._reflectionCubeMap._texture.dispose(), this._reflectionCubeMap.dispose(), this._reflectionCubeMap = null); } _onXRFrame(e) { var t; if (this._xrLightProbe !== null) { if (this.options.lightEstimationPollInterval) { const i = Date.now(); if (i - this._lightEstimationPollTime < this.options.lightEstimationPollInterval) return; this._lightEstimationPollTime = i; } if (this._xrLightEstimate = e.getLightEstimate(this._xrLightProbe), this._xrLightEstimate) { this._intensity = Math.max(1, this._xrLightEstimate.primaryLightIntensity.x, this._xrLightEstimate.primaryLightIntensity.y, this._xrLightEstimate.primaryLightIntensity.z); const i = this._xrSessionManager.scene.useRightHandedSystem ? 1 : -1; this.options.disableVectorReuse && (this._lightDirection = new D(), this._lightColor = new ze(), this.directionalLight && (this.directionalLight.direction = this._lightDirection, this.directionalLight.diffuse = this._lightColor)), this._lightDirection.copyFromFloats(this._xrLightEstimate.primaryLightDirection.x, this._xrLightEstimate.primaryLightDirection.y, this._xrLightEstimate.primaryLightDirection.z * i), this._lightColor.copyFromFloats(this._xrLightEstimate.primaryLightIntensity.x / this._intensity, this._xrLightEstimate.primaryLightIntensity.y / this._intensity, this._xrLightEstimate.primaryLightIntensity.z / this._intensity), this._sphericalHarmonics.updateFromFloatsArray(this._xrLightEstimate.sphericalHarmonicsCoefficients), this._reflectionCubeMap && !this.options.disableSphericalPolynomial && (this._reflectionCubeMap.sphericalPolynomial = this._reflectionCubeMap.sphericalPolynomial || new ax(), (t = this._reflectionCubeMap.sphericalPolynomial) === null || t === void 0 || t.updateFromHarmonics(this._sphericalHarmonics)), this._lightDirection.negateInPlace(), this.directionalLight && (this.directionalLight.direction.copyFrom(this._lightDirection), this.directionalLight.intensity = Math.min(this._intensity, 1), this.directionalLight.diffuse.copyFrom(this._lightColor)); } } } } SO.Name = Qs.LIGHT_ESTIMATION; SO.Version = 1; Oo.AddWebXRFeature(SO.Name, (c, e) => () => new SO(c, e), SO.Version, !1); class MO extends Ku { /** * Creates a new instance of the XR eye tracking feature. * @param _xrSessionManager An instance of WebXRSessionManager. */ constructor(e) { super(e), this.onEyeTrackingStartedObservable = new Fe(), this.onEyeTrackingEndedObservable = new Fe(), this.onEyeTrackingFrameUpdateObservable = new Fe(), this._eyeTrackingStartListener = (t) => { this._latestEyeSpace = t.gazeSpace, this._gazeRay = new gs(D.Zero(), D.Forward()), this.onEyeTrackingStartedObservable.notifyObservers(this._gazeRay); }, this._eyeTrackingEndListener = () => { this._latestEyeSpace = null, this._gazeRay = null, this.onEyeTrackingEndedObservable.notifyObservers(); }, this.xrNativeFeatureName = "eye-tracking", this._xrSessionManager.session ? this._init() : this._xrSessionManager.onXRSessionInit.addOnce(() => { this._init(); }); } /** * Dispose this feature and all of the resources attached. */ dispose() { super.dispose(), this._xrSessionManager.session.removeEventListener("eyetrackingstart", this._eyeTrackingStartListener), this._xrSessionManager.session.removeEventListener("eyetrackingend", this._eyeTrackingEndListener), this.onEyeTrackingStartedObservable.clear(), this.onEyeTrackingEndedObservable.clear(), this.onEyeTrackingFrameUpdateObservable.clear(); } /** * Returns whether the gaze data is valid or not * @returns true if the data is valid */ get isEyeGazeValid() { return !!this._gazeRay; } /** * Get a reference to the gaze ray. This data is valid while eye tracking persists, and will be set to null when gaze data is no longer available * @returns a reference to the gaze ray if it exists and is valid, returns null otherwise. */ getEyeGaze() { return this._gazeRay; } _onXRFrame(e) { if (!(!this.attached || !e) && this._latestEyeSpace && this._gazeRay) { const t = e.getPose(this._latestEyeSpace, this._xrSessionManager.referenceSpace); if (t) { this._gazeRay.origin.set(t.transform.position.x, t.transform.position.y, t.transform.position.z); const i = t.transform.orientation; de.Quaternion[0].set(i.x, i.y, i.z, i.w), this._xrSessionManager.scene.useRightHandedSystem ? D.RightHandedForwardReadOnly.rotateByQuaternionToRef(de.Quaternion[0], this._gazeRay.direction) : (this._gazeRay.origin.z *= -1, de.Quaternion[0].z *= -1, de.Quaternion[0].w *= -1, D.LeftHandedForwardReadOnly.rotateByQuaternionToRef(de.Quaternion[0], this._gazeRay.direction)), this.onEyeTrackingFrameUpdateObservable.notifyObservers(this._gazeRay); } } } _init() { this._xrSessionManager.isNative && (this._xrSessionManager.session.addEventListener("eyetrackingstart", this._eyeTrackingStartListener), this._xrSessionManager.session.addEventListener("eyetrackingend", this._eyeTrackingEndListener)); } } MO.Name = Qs.EYE_TRACKING; MO.Version = 1; Oo.AddWebXRFeature(MO.Name, (c) => () => new MO(c), MO.Version, !1); class Bve { constructor(e, t) { this._samples = [], this._idx = 0; for (let i = 0; i < e; ++i) this._samples.push(t ? t() : at.Zero()); } get length() { return this._samples.length; } push(e, t) { this._idx = (this._idx + this._samples.length - 1) % this._samples.length, this.at(0).copyFromFloats(e, t); } at(e) { if (e >= this._samples.length) throw new Error("Index out of bounds"); return this._samples[(this._idx + e) % this._samples.length]; } } class Uve { constructor() { this._samples = new Bve(20), this._entropy = 0, this.onFirstStepDetected = new Fe(); } update(e, t, i, r) { this._samples.push(e, t); const s = this._samples.at(0); if (this._entropy *= this._entropyDecayFactor, this._entropy += at.Distance(s, this._samples.at(1)), this._entropy > this._entropyThreshold) return; let n; for (n = this._samePointCheckStartIdx; n < this._samples.length && !(at.DistanceSquared(s, this._samples.at(n)) < this._samePointSquaredDistanceThreshold); ++n) ; if (n === this._samples.length) return; let a = -1, l = 0; for (let b, S = 1; S < n; ++S) b = at.DistanceSquared(s, this._samples.at(S)), b > a && (l = S, a = b); if (a < this._apexSquaredDistanceThreshold) return; const o = this._samples.at(l), u = o.subtract(s); u.normalize(); const h = de.Vector2[0]; let d, f, p = 0; for (let b = 1; b < n; ++b) f = this._samples.at(b), f.subtractToRef(s, h), d = at.Dot(u, h), p += h.lengthSquared() - d * d; if (p > n * this._squaredProjectionDistanceThreshold) return; const m = de.Vector3[0]; m.set(i, r, 0); const _ = de.Vector3[1]; _.set(u.x, u.y, 0); const v = D.Cross(m, _).z > 0, C = s.clone(), x = s.clone(); o.subtractToRef(s, u), v ? (u.scaleAndAddToRef(this._axisToApexShrinkFactor, C), u.scaleAndAddToRef(this._axisToApexExtendFactor, x)) : (u.scaleAndAddToRef(this._axisToApexExtendFactor, C), u.scaleAndAddToRef(this._axisToApexShrinkFactor, x)), this.onFirstStepDetected.notifyObservers({ leftApex: C, rightApex: x, currentPosition: s, currentStepDirection: v ? "right" : "left" }); } reset() { for (let e = 0; e < this._samples.length; ++e) this._samples.at(e).copyFromFloats(0, 0); } get _samePointCheckStartIdx() { return Math.floor(this._samples.length / 3); } get _samePointSquaredDistanceThreshold() { return 0.03 * 0.03; } get _apexSquaredDistanceThreshold() { return 0.09 * 0.09; } get _squaredProjectionDistanceThreshold() { return 0.03 * 0.03; } get _axisToApexShrinkFactor() { return 0.8; } get _axisToApexExtendFactor() { return -1.6; } get _entropyDecayFactor() { return 0.93; } get _entropyThreshold() { return 0.4; } } class Vve { constructor(e, t, i, r) { this._leftApex = new at(), this._rightApex = new at(), this._currentPosition = new at(), this._axis = new at(), this._axisLength = -1, this._forward = new at(), this._steppingLeft = !1, this._t = -1, this._maxT = -1, this._maxTPosition = new at(), this._vitality = 0, this.onMovement = new Fe(), this.onFootfall = new Fe(), this._reset(e, t, i, r === "left"); } _reset(e, t, i, r) { this._leftApex.copyFrom(e), this._rightApex.copyFrom(t), this._steppingLeft = r, this._steppingLeft ? (this._leftApex.subtractToRef(this._rightApex, this._axis), this._forward.copyFromFloats(-this._axis.y, this._axis.x)) : (this._rightApex.subtractToRef(this._leftApex, this._axis), this._forward.copyFromFloats(this._axis.y, -this._axis.x)), this._axisLength = this._axis.length(), this._forward.scaleInPlace(1 / this._axisLength), this._updateTAndVitality(i.x, i.y), this._maxT = this._t, this._maxTPosition.copyFrom(i), this._vitality = 1; } _updateTAndVitality(e, t) { this._currentPosition.copyFromFloats(e, t), this._steppingLeft ? this._currentPosition.subtractInPlace(this._rightApex) : this._currentPosition.subtractInPlace(this._leftApex); const i = this._t, r = at.Dot(this._currentPosition, this._axis); this._t = r / (this._axisLength * this._axisLength); const s = this._currentPosition.lengthSquared() - r / this._axisLength * (r / this._axisLength); this._vitality *= 0.92 - 100 * Math.max(s - 16e-4, 0) + Math.max(this._t - i, 0); } update(e, t) { if (this._vitality < this._vitalityThreshold) return !1; const i = this._t; return this._updateTAndVitality(e, t), this._t > this._maxT && (this._maxT = this._t, this._maxTPosition.copyFromFloats(e, t)), !(this._vitality < this._vitalityThreshold || (this._t > i && (this.onMovement.notifyObservers({ deltaT: this._t - i }), i < 0.5 && this._t >= 0.5 && this.onFootfall.notifyObservers({ foot: this._steppingLeft ? "left" : "right" })), this._t < 0.95 * this._maxT && (this._currentPosition.copyFromFloats(e, t), this._steppingLeft ? this._leftApex.copyFrom(this._maxTPosition) : this._rightApex.copyFrom(this._maxTPosition), this._reset(this._leftApex, this._rightApex, this._currentPosition, !this._steppingLeft)), this._axisLength < 0.03)); } get _vitalityThreshold() { return 0.1; } get forward() { return this._forward; } } class X9 { static get _MillisecondsPerUpdate() { return 1e3 / 15; } constructor(e) { this._detector = new Uve(), this._walker = null, this._movement = new at(), this._millisecondsSinceLastUpdate = X9._MillisecondsPerUpdate, this.movementThisFrame = D.Zero(), this._engine = e, this._detector.onFirstStepDetected.add((t) => { this._walker || (this._walker = new Vve(t.leftApex, t.rightApex, t.currentPosition, t.currentStepDirection), this._walker.onFootfall.add(() => { Ce.Log("Footfall!"); }), this._walker.onMovement.add((i) => { this._walker.forward.scaleAndAddToRef(0.024 * i.deltaT, this._movement); })); }); } update(e, t) { t.y = 0, t.normalize(), this._millisecondsSinceLastUpdate += this._engine.getDeltaTime(), this._millisecondsSinceLastUpdate >= X9._MillisecondsPerUpdate && (this._millisecondsSinceLastUpdate -= X9._MillisecondsPerUpdate, this._detector.update(e.x, e.z, t.x, t.z), this._walker && (this._walker.update(e.x, e.z) || (this._walker = null)), this._movement.scaleInPlace(0.85)), this.movementThisFrame.set(this._movement.x, 0, this._movement.y); } } class $F extends Ku { /** * The module's name. */ static get Name() { return Qs.WALKING_LOCOMOTION; } /** * The (Babylon) version of this module. * This is an integer representing the implementation version. * This number has no external basis. */ static get Version() { return 1; } /** * The target to be articulated by walking locomotion. * When the walking locomotion feature detects walking in place, this element's * X and Z coordinates will be modified to reflect locomotion. This target should * be either the XR space's origin (i.e., the parent node of the WebXRCamera) or * the WebXRCamera itself. Note that the WebXRCamera path will modify the position * of the WebXRCamera directly and is thus discouraged. */ get locomotionTarget() { return this._locomotionTarget; } /** * The target to be articulated by walking locomotion. * When the walking locomotion feature detects walking in place, this element's * X and Z coordinates will be modified to reflect locomotion. This target should * be either the XR space's origin (i.e., the parent node of the WebXRCamera) or * the WebXRCamera itself. Note that the WebXRCamera path will modify the position * of the WebXRCamera directly and is thus discouraged. */ set locomotionTarget(e) { this._locomotionTarget = e, this._isLocomotionTargetWebXRCamera = this._locomotionTarget.getClassName() === "WebXRCamera"; } /** * Construct a new Walking Locomotion feature. * @param sessionManager manager for the current XR session * @param options creation options, prominently including the vector target for locomotion */ constructor(e, t) { super(e), this._up = new D(), this._forward = new D(), this._position = new D(), this._movement = new D(), this._sessionManager = e, this.locomotionTarget = t.locomotionTarget, this._isLocomotionTargetWebXRCamera && Ce.Warn("Using walking locomotion directly on a WebXRCamera may have unintended interactions with other XR techniques. Using an XR space parent is highly recommended"); } /** * Checks whether this feature is compatible with the current WebXR session. * Walking locomotion is only compatible with "immersive-vr" sessions. * @returns true if compatible, false otherwise */ isCompatible() { return this._sessionManager.sessionMode === void 0 || this._sessionManager.sessionMode === "immersive-vr"; } /** * Attaches the feature. * Typically called automatically by the features manager. * @returns true if attach succeeded, false otherwise */ attach() { return !this.isCompatible || !super.attach() ? !1 : (this._walker = new X9(this._sessionManager.scene.getEngine()), !0); } /** * Detaches the feature. * Typically called automatically by the features manager. * @returns true if detach succeeded, false otherwise */ detach() { return super.detach() ? (this._walker = null, !0) : !1; } _onXRFrame(e) { const t = e.getViewerPose(this._sessionManager.baseReferenceSpace); if (!t) return; const i = this.locomotionTarget.getScene().useRightHandedSystem ? 1 : -1, r = t.transform.matrix; this._up.copyFromFloats(r[4], r[5], i * r[6]), this._forward.copyFromFloats(r[8], r[9], i * r[10]), this._position.copyFromFloats(r[12], r[13], i * r[14]), this._forward.scaleAndAddToRef(0.05, this._position), this._up.scaleAndAddToRef(-0.05, this._position), this._walker.update(this._position, this._forward), this._movement.copyFrom(this._walker.movementThisFrame), this._isLocomotionTargetWebXRCamera || D.TransformNormalToRef(this._movement, this.locomotionTarget.getWorldMatrix(), this._movement), this.locomotionTarget.position.addInPlace(this._movement); } } Oo.AddWebXRFeature($F.Name, (c, e) => () => new $F(c, e), $F.Version, !1); class tae extends LK { constructor(e, t, i, r, s, n) { super(e, t, i, r, n), this.getWidth = e, this.getHeight = t, this.layer = i, this.layerType = r, this.isMultiview = s, this.createRTTProvider = n; } } class kve extends NK { constructor(e, t, i) { super(e.scene, i), this._xrSessionManager = e, this._xrWebGLBinding = t, this.layerWrapper = i, this._lastSubImages = /* @__PURE__ */ new Map(), this._compositionLayer = i.layer; } _getRenderTargetForSubImage(e, t) { var i, r, s, n; const a = this._lastSubImages.get(t), l = t == "left" ? 0 : 1, o = (i = e.colorTextureWidth) !== null && i !== void 0 ? i : e.textureWidth, u = (r = e.colorTextureHeight) !== null && r !== void 0 ? r : e.textureHeight; if (!this._renderTargetTextures[l] || (a == null ? void 0 : a.textureWidth) !== o || (a == null ? void 0 : a.textureHeight) !== u) { let h; const d = (s = e.depthStencilTextureWidth) !== null && s !== void 0 ? s : o, f = (n = e.depthStencilTextureHeight) !== null && n !== void 0 ? n : u; (o === d || u === f) && (h = e.depthStencilTexture), this._renderTargetTextures[l] = this._createRenderTargetTexture(o, u, null, e.colorTexture, h, this.layerWrapper.isMultiview), this._framebufferDimensions = { framebufferWidth: o, framebufferHeight: u }; } return this._lastSubImages.set(t, e), this._renderTargetTextures[l]; } _getSubImageForEye(e) { const t = this._xrSessionManager.currentFrame; return t ? this._xrWebGLBinding.getSubImage(this._compositionLayer, t, e) : null; } getRenderTargetTextureForEye(e) { const t = this._getSubImageForEye(e); return t ? this._getRenderTargetForSubImage(t, e) : null; } getRenderTargetTextureForView(e) { return this.getRenderTargetTextureForEye(e.eye); } _setViewportForSubImage(e, t) { var i, r; const s = (i = t.colorTextureWidth) !== null && i !== void 0 ? i : t.textureWidth, n = (r = t.colorTextureWidth) !== null && r !== void 0 ? r : t.textureHeight, a = t.viewport; e.x = a.x / s, e.y = a.y / n, e.width = a.width / s, e.height = a.height / n; } trySetViewportForView(e, t) { const i = this._lastSubImages.get(t.eye) || this._getSubImageForEye(t.eye); return i ? (this._setViewportForSubImage(e, i), !0) : !1; } } class iae extends tae { constructor(e, t, i) { super(() => e.textureWidth, () => e.textureHeight, e, "XRProjectionLayer", t, (r) => new zve(r, i, this)), this.layer = e; } } class zve extends kve { constructor(e, t, i) { super(e, t, i), this.layerWrapper = i, this._projectionLayer = i.layer; } _getSubImageForView(e) { return this._xrWebGLBinding.getViewSubImage(this._projectionLayer, e); } getRenderTargetTextureForView(e) { return this._getRenderTargetForSubImage(this._getSubImageForView(e), e.eye); } getRenderTargetTextureForEye(e) { const t = this._lastSubImages.get(e); return t ? this._getRenderTargetForSubImage(t, e) : null; } trySetViewportForView(e, t) { const i = this._lastSubImages.get(t.eye) || this._getSubImageForView(t); return i ? (this._setViewportForSubImage(e, i), !0) : !1; } } const Hve = {}, vq = { textureType: "texture", colorFormat: 6408, depthFormat: 35056, scaleFactor: 1 }; class RO extends Ku { constructor(e, t = {}) { super(e), this._options = t, this._existingLayers = [], this.xrNativeFeatureName = "layers"; } /** * Attach this feature. * Will usually be called by the features manager. * * @returns true if successful. */ attach() { if (!super.attach()) return !1; const e = this._xrSessionManager.scene.getEngine(); this._glContext = e._gl, this._xrWebGLBinding = new XRWebGLBinding(this._xrSessionManager.session, this._glContext), this._existingLayers.length = 0; const t = Object.assign({}, vq), i = this._options.preferMultiviewOnInit && e.getCaps().multiview; return i && (t.textureType = "texture-array"), this.addXRSessionLayer(this.createProjectionLayer(t, i)), !0; } detach() { return super.detach() ? (this._existingLayers.length = 0, !0) : !1; } /** * Creates a new XRWebGLLayer. * @param params an object providing configuration options for the new XRWebGLLayer * @returns the XRWebGLLayer */ createXRWebGLLayer(e = Hve) { const t = new XRWebGLLayer(this._xrSessionManager.session, this._glContext, e); return new FK(t); } /** * Creates a new XRProjectionLayer. * @param params an object providing configuration options for the new XRProjectionLayer. * @param multiview whether the projection layer should render with multiview. * @returns the projection layer */ createProjectionLayer(e = vq, t = !1) { if (t && e.textureType !== "texture-array") throw new Error("Projection layers can only be made multiview if they use texture arrays. Set the textureType parameter to 'texture-array'."); if (!t && e.textureType === "texture-array") throw new Error("We currently only support multiview rendering when the textureType parameter is set to 'texture-array'."); const i = this._xrWebGLBinding.createProjectionLayer(e); return new iae(i, t, this._xrWebGLBinding); } /** * Add a new layer to the already-existing list of layers * @param wrappedLayer the new layer to add to the existing ones */ addXRSessionLayer(e) { this.setXRSessionLayers([...this._existingLayers, e]); } /** * Sets the layers to be used by the XR session. * Note that you must call this function with any layers you wish to render to * since it adds them to the XR session's render state * (replacing any layers that were added in a previous call to setXRSessionLayers or updateRenderState). * This method also sets up the session manager's render target texture provider * as the first layer in the array, which feeds the WebXR camera(s) attached to the session. * @param wrappedLayers An array of WebXRLayerWrapper, usually returned from the WebXRLayers createLayer functions. */ setXRSessionLayers(e) { this._existingLayers = e; const t = Object.assign({}, this._xrSessionManager.session.renderState); t.baseLayer = void 0, t.layers = e.map((i) => i.layer), this._xrSessionManager.updateRenderState(t), this._xrSessionManager._setBaseLayerWrapper(e.length > 0 ? e[0] : null); } isCompatible() { return !this._xrSessionManager.isNative && typeof XRWebGLBinding < "u" && !!XRWebGLBinding.prototype.createProjectionLayer; } /** * Dispose this feature and all of the resources attached. */ dispose() { super.dispose(); } _onXRFrame(e) { } } RO.Name = Qs.LAYERS; RO.Version = 1; Oo.AddWebXRFeature(RO.Name, (c, e) => () => new RO(c, e), RO.Version, !1); class PO extends Ku { /** * Width of depth data. If depth data is not exist, returns null. */ get width() { return this._width; } /** * Height of depth data. If depth data is not exist, returns null. */ get height() { return this._height; } /** * Scale factor by which the raw depth values must be multiplied in order to get the depths in meters. */ get rawValueToMeters() { return this._rawValueToMeters; } /** * An XRRigidTransform that needs to be applied when indexing into the depth buffer. */ get normDepthBufferFromNormView() { return this._normDepthBufferFromNormView; } /** * Describes which depth-sensing usage ("cpu" or "gpu") is used. */ get depthUsage() { switch (this._xrSessionManager.session.depthUsage) { case "cpu-optimized": return "cpu"; case "gpu-optimized": return "gpu"; } } /** * Describes which depth sensing data format ("ushort" or "float") is used. */ get depthDataFormat() { switch (this._xrSessionManager.session.depthDataFormat) { case "luminance-alpha": return "ushort"; case "float32": return "float"; } } /** * Latest cached InternalTexture which containing depth buffer information. * This can be used when the depth usage is "gpu". */ get latestInternalTexture() { var e, t; if (!this._cachedWebGLTexture) return null; const i = this._xrSessionManager.scene.getEngine(), r = new ln(i, ts.Unknown); return r.isCube = !1, r.invertY = !1, r._useSRGBBuffer = !1, r.format = this.depthDataFormat === "ushort" ? 2 : 5, r.generateMipMaps = !1, r.type = this.depthDataFormat === "ushort" ? 5 : 1, r.samplingMode = 7, r.width = (e = this.width) !== null && e !== void 0 ? e : 0, r.height = (t = this.height) !== null && t !== void 0 ? t : 0, r._cachedWrapU = 1, r._cachedWrapV = 1, r._hardwareTexture = new BI(this._cachedWebGLTexture, i._gl), r; } /** * cached depth buffer */ get latestDepthBuffer() { return this._cachedDepthBuffer ? this.depthDataFormat === "ushort" ? new Uint16Array(this._cachedDepthBuffer) : new Float32Array(this._cachedDepthBuffer) : null; } /** * Latest cached Texture of depth image which is made from the depth buffer data. */ get latestDepthImageTexture() { return this._cachedDepthImageTexture; } /** * Creates a new instance of the depth sensing feature * @param _xrSessionManager the WebXRSessionManager * @param options options for WebXR Depth Sensing Feature */ constructor(e, t) { super(e), this.options = t, this._width = null, this._height = null, this._rawValueToMeters = null, this._normDepthBufferFromNormView = null, this._cachedDepthBuffer = null, this._cachedWebGLTexture = null, this._cachedDepthImageTexture = null, this.onGetDepthInMetersAvailable = new Fe(), this.xrNativeFeatureName = "depth-sensing", Ve.Warn("depth-sensing is an experimental and unstable feature."); } /** * attach this feature * Will usually be called by the features manager * * @returns true if successful. */ attach(e) { return !super.attach(e) || this._xrSessionManager.session.depthDataFormat == null || this._xrSessionManager.session.depthUsage == null ? !1 : (this._glBinding = new XRWebGLBinding(this._xrSessionManager.session, this._xrSessionManager.scene.getEngine()._gl), !0); } /** * Dispose this feature and all of the resources attached */ dispose() { var e; (e = this._cachedDepthImageTexture) === null || e === void 0 || e.dispose(); } _onXRFrame(e) { const t = this._xrSessionManager.referenceSpace, i = e.getViewerPose(t); if (i != null) for (const r of i.views) switch (this.depthUsage) { case "cpu": this._updateDepthInformationAndTextureCPUDepthUsage(e, r, this.depthDataFormat); break; case "gpu": if (!this._glBinding) break; this._updateDepthInformationAndTextureWebGLDepthUsage(this._glBinding, r, this.depthDataFormat); break; default: Ve.Error("Unknown depth usage"), this.detach(); break; } } _updateDepthInformationAndTextureCPUDepthUsage(e, t, i) { const r = e.getDepthInformation(t); if (r === null) return; const { data: s, width: n, height: a, rawValueToMeters: l, getDepthInMeters: o } = r; switch (this._width = n, this._height = a, this._rawValueToMeters = l, this._cachedDepthBuffer = s, this.onGetDepthInMetersAvailable.notifyObservers(o.bind(r)), this._cachedDepthImageTexture || (this._cachedDepthImageTexture = Po.CreateRTexture(null, n, a, this._xrSessionManager.scene, !1, !0, De.NEAREST_SAMPLINGMODE, $e.TEXTURETYPE_FLOAT)), i) { case "ushort": this._cachedDepthImageTexture.update(Float32Array.from(new Uint16Array(s)).map((u) => u * l)); break; case "float": this._cachedDepthImageTexture.update(new Float32Array(s).map((u) => u * l)); break; } } _updateDepthInformationAndTextureWebGLDepthUsage(e, t, i) { const r = e.getDepthInformation(t); if (r === null) return; const { texture: s, width: n, height: a } = r; this._width = n, this._height = a, this._cachedWebGLTexture = s; const l = this._xrSessionManager.scene, u = l.getEngine().wrapWebGLTexture(s); this._cachedDepthImageTexture || (this._cachedDepthImageTexture = Po.CreateRTexture(null, n, a, l, !1, !0, De.NEAREST_SAMPLINGMODE, i === "ushort" ? $e.TEXTURETYPE_UNSIGNED_BYTE : $e.TEXTURETYPE_FLOAT)), this._cachedDepthImageTexture._texture = u; } /** * Extends the session init object if needed * @returns augmentation object for the xr session init object. */ getXRSessionInitExtension() { const e = this.options.usagePreference != null && this.options.usagePreference.length !== 0, t = this.options.dataFormatPreference != null && this.options.dataFormatPreference.length !== 0; return new Promise((i) => { if (e && t) { const r = this.options.usagePreference.map((n) => { switch (n) { case "cpu": return "cpu-optimized"; case "gpu": return "gpu-optimized"; } }), s = this.options.dataFormatPreference.map((n) => { switch (n) { case "ushort": return "luminance-alpha"; case "float": return "float32"; } }); i({ depthSensing: { usagePreference: r, dataFormatPreference: s } }); } else i({}); }); } } PO.Name = Qs.DEPTH_SENSING; PO.Version = 1; Oo.AddWebXRFeature(PO.Name, (c, e) => () => new PO(c, e), PO.Version, !1); const Gve = "velocityPixelShader", Kve = `precision highp float; #define CUSTOM_FRAGMENT_BEGIN varying vec4 clipPos;varying vec4 previousClipPos; #define CUSTOM_FRAGMENT_DEFINITIONS void main(void) { #define CUSTOM_FRAGMENT_MAIN_BEGIN highp vec4 motionVector=( clipPos/clipPos.w-previousClipPos/previousClipPos.w );gl_FragColor=motionVector; #define CUSTOM_FRAGMENT_MAIN_END }`; je.ShadersStore[Gve] = Kve; const Wve = "velocityVertexShader", jve = `#define CUSTOM_VERTEX_BEGIN #define VELOCITY attribute vec3 position; #include uniform mat4 viewProjection;uniform mat4 previousViewProjection; #ifdef MULTIVIEW uniform mat4 viewProjectionR;uniform mat4 previousViewProjectionR; #endif varying vec4 clipPos;varying vec4 previousClipPos; #define CUSTOM_VERTEX_DEFINITIONS void main(void) { #define CUSTOM_VERTEX_MAIN_BEGIN vec3 positionUpdated=position; #include vec4 worldPos=finalWorld*vec4(positionUpdated,1.0);vec4 previousWorldPos=finalPreviousWorld*vec4(positionUpdated,1.0); #ifdef MULTIVIEW if (gl_ViewID_OVR==0u) {clipPos=viewProjection*worldPos;previousClipPos=previousViewProjection*previousWorldPos;gl_Position=clipPos;} else {clipPos=viewProjectionR*worldPos;previousClipPos=previousViewProjectionR*previousWorldPos;gl_Position=clipPos;} #elif clipPos=viewProjection*worldPos;previousClipPos=previousViewProjection*previousWorldPos;gl_Position=clipPos; #endif #define CUSTOM_VERTEX_MAIN_END }`; je.ShadersStore[Wve] = jve; class rae extends ra { /** * Creates a Space Warp render target * @param motionVectorTexture WebGLTexture provided by WebGLSubImage * @param depthStencilTexture WebGLTexture provided by WebGLSubImage * @param scene scene used with the render target * @param size the size of the render target (used for each view) */ constructor(e, t, i, r = 512) { super("spacewarp rtt", r, i, !1, !0, 2, !1, void 0, !1, !1, !0, void 0, !0), this._originalPairing = [], this._previousWorldMatrices = [], this._previousTransforms = [Ae.Identity(), Ae.Identity()], this._renderTarget = this.getScene().getEngine().createMultiviewRenderTargetTexture(this.getRenderWidth(), this.getRenderHeight(), e, t), this._renderTarget._disposeOnlyFramebuffers = !0, this._texture = this._renderTarget.texture, this._texture.isMultiview = !0, this._texture.format = 5, i && (this._velocityMaterial = new Lo("velocity shader material", i, { vertex: "velocity", fragment: "velocity" }, { uniforms: ["world", "previousWorld", "viewProjection", "viewProjectionR", "previousViewProjection", "previousViewProjectionR"] }), this._velocityMaterial._materialHelperNeedsPreviousMatrices = !0, this._velocityMaterial.onBindObservable.add((s) => { this._previousWorldMatrices[s.uniqueId] = this._previousWorldMatrices[s.uniqueId] || s.getWorldMatrix(), this._velocityMaterial.getEffect().setMatrix("previousWorld", this._previousWorldMatrices[s.uniqueId]), this._previousWorldMatrices[s.uniqueId] = s.getWorldMatrix(), this._velocityMaterial.getEffect().setMatrix("previousViewProjection", this._previousTransforms[0]), this._velocityMaterial.getEffect().setMatrix("previousViewProjectionR", this._previousTransforms[1]), this._previousTransforms[0].copyFrom(i.getTransformMatrix()), this._previousTransforms[1].copyFrom(i._transformMatrixR); }), this._velocityMaterial.freeze()); } /** * {@inheritDoc} */ render(e = !1, t = !1) { this._originalPairing.length = 0; const i = this.getScene(); i && this._velocityMaterial && i.getActiveMeshes().forEach((r) => { this._originalPairing.push([r, r.material]), r.material = this._velocityMaterial; }), super.render(e, t), this._originalPairing.forEach((r) => { r[0].material = r[1]; }); } /** * @internal */ _bindFrameBuffer() { this._renderTarget && this.getScene().getEngine().bindSpaceWarpFramebuffer(this._renderTarget); } /** * Gets the number of views the corresponding to the texture (eg. a SpaceWarpRenderTarget will have > 1) * @returns the view count */ getViewCount() { return 2; } /** * {@inheritdoc} */ dispose() { super.dispose(), this._velocityMaterial.dispose(), this._previousTransforms.length = 0, this._previousWorldMatrices.length = 0, this._originalPairing.length = 0; } } class sae { constructor(e, t, i) { this._scene = e, this._xrSessionManager = t, this._xrWebGLBinding = i, this._lastSubImages = /* @__PURE__ */ new Map(), this._renderTargetTextures = /* @__PURE__ */ new Map(), this._engine = e.getEngine(); } _getSubImageForView(e) { const t = this._xrSessionManager._getBaseLayerWrapper(); if (!t) throw new Error("For Space Warp, the base layer should be a WebXR Projection Layer."); if (t.layerType !== "XRProjectionLayer") throw new Error('For Space Warp, the base layer type should "XRProjectionLayer".'); const i = t.layer; return this._xrWebGLBinding.getViewSubImage(i, e); } _setViewportForSubImage(e, t) { e.x = 0, e.y = 0, e.width = t.motionVectorTextureWidth, e.height = t.motionVectorTextureHeight; } _createRenderTargetTexture(e, t, i, r, s) { if (!this._engine) throw new Error("Engine is disposed"); const n = { width: e, height: t }, a = new rae(r, s, this._scene, n), l = a.renderTarget; return i && (l._framebuffer = i), l._colorTextureArray = r, l._depthStencilTextureArray = s, a.disableRescaling(), a.renderListPredicate = () => !0, a; } _getRenderTargetForSubImage(e, t) { const i = this._lastSubImages.get(t); let r = this._renderTargetTextures.get(t.eye); const s = e.motionVectorTextureWidth, n = e.motionVectorTextureHeight; return (!r || (i == null ? void 0 : i.textureWidth) !== s || (i == null ? void 0 : i.textureHeight) != n) && (r = this._createRenderTargetTexture(s, n, null, e.motionVectorTexture, e.depthStencilTexture), this._renderTargetTextures.set(t.eye, r), this._framebufferDimensions = { framebufferWidth: s, framebufferHeight: n }), this._lastSubImages.set(t, e), r; } /** * {@inheritDoc} */ trySetViewportForView(e, t) { const i = this._lastSubImages.get(t) || this._getSubImageForView(t); return i ? (this._setViewportForSubImage(e, i), !0) : !1; } /** * Access the motion vector (which will turn on Space Warp) * @param view the view to access the motion vector texture for */ accessMotionVector(e) { const t = this._getSubImageForView(e); t && (t.motionVectorTexture, t.depthStencilTexture); } /** * {@inheritDoc} */ getRenderTargetTextureForEye(e) { return null; } /** * {@inheritDoc} */ getRenderTargetTextureForView(e) { const t = this._getSubImageForView(e); return t ? this._getRenderTargetForSubImage(t, e) : null; } /** * {@inheritDoc} */ dispose() { this._renderTargetTextures.forEach((e) => e.dispose()), this._renderTargetTextures.clear(); } } class IO extends Ku { /** * constructor for the space warp feature * @param _xrSessionManager the xr session manager for this feature */ constructor(e) { super(e), this._onAfterRenderObserver = null, this.dependsOn = [Qs.LAYERS], this.xrNativeFeatureName = "space-warp", this._xrSessionManager.scene.needsPreviousWorldMatrices = !0; } /** * Attach this feature. * Will usually be called by the features manager. * * @returns true if successful. */ attach() { if (!super.attach()) return !1; const e = this._xrSessionManager.scene.getEngine(); return this._glContext = e._gl, this._xrWebGLBinding = new XRWebGLBinding(this._xrSessionManager.session, this._glContext), this.spaceWarpRTTProvider = new sae(this._xrSessionManager.scene, this._xrSessionManager, this._xrWebGLBinding), this._onAfterRenderObserver = this._xrSessionManager.scene.onAfterRenderObservable.add(() => this._onAfterRender()), !0; } detach() { return this._xrSessionManager.scene.onAfterRenderObservable.remove(this._onAfterRenderObserver), super.detach(); } _onAfterRender() { this.attached && this._renderTargetTexture && this._renderTargetTexture.render(!1, !1); } /** * {@inheritdoc} */ isCompatible() { return this._xrSessionManager.scene.getEngine().getCaps().colorBufferHalfFloat || !1; } /** * {@inheritdoc} */ dispose() { super.dispose(); } _onXRFrame(e) { const t = e.getViewerPose(this._xrSessionManager.referenceSpace); if (!t) return; const i = t.views[0]; this._renderTargetTexture = this._renderTargetTexture || this.spaceWarpRTTProvider.getRenderTargetTextureForView(i), this.spaceWarpRTTProvider.accessMotionVector(i); } } IO.Name = Qs.SPACE_WARP; IO.Version = 1; Oo.AddWebXRFeature(IO.Name, (c) => () => new IO(c), IO.Version, !1); class DO extends Ku { /** * Creates a new instance of the feature * @param _xrSessionManager the WebXRSessionManager * @param options options for the Feature */ constructor(e, t = {}) { super(e), this.options = t, this._cachedInternalTextures = [], this.texturesData = [], this.viewIndex = [], this.cameraIntrinsics = [], this.onTexturesUpdatedObservable = new Fe(), this.xrNativeFeatureName = "camera-access"; } attach(e) { return super.attach(e) ? (this._glContext = this._xrSessionManager.scene.getEngine()._gl, this._glBinding = new XRWebGLBinding(this._xrSessionManager.session, this._glContext), !0) : !1; } detach() { return super.detach() ? (this._glBinding = void 0, this.options.doNotDisposeOnDetach || (this._cachedInternalTextures.forEach((e) => e.dispose()), this.texturesData.forEach((e) => e.dispose()), this._cachedInternalTextures.length = 0, this.texturesData.length = 0, this.cameraIntrinsics.length = 0), !0) : !1; } /** * Dispose this feature and all of the resources attached */ dispose() { super.dispose(), this.onTexturesUpdatedObservable.clear(); } /** * @see https://github.com/immersive-web/raw-camera-access/blob/main/explainer.md */ _updateCameraIntrinsics(e, t) { const i = { width: e.camera.width, height: e.camera.height, x: 0, y: 0 }, r = e.projectionMatrix, s = (1 - r[8]) * i.width / 2 + i.x, n = (1 - r[9]) * i.height / 2 + i.y, a = i.width / 2 * r[0], l = i.height / 2 * r[5], o = i.width / 2 * r[4]; this.cameraIntrinsics[t] = { u0: s, v0: n, ax: a, ay: l, gamma: o, width: i.width, height: i.height, viewportX: i.x, viewportY: i.y }; } _updateInternalTextures(e, t = 0) { var i, r; if (!e.camera) return !1; this.viewIndex[t] = e.eye; const s = (i = this._glBinding) === null || i === void 0 ? void 0 : i.getCameraImage(e.camera); if (this._cachedInternalTextures[t]) (r = this._cachedInternalTextures[t]._hardwareTexture) === null || r === void 0 || r.set(s); else { const n = new ln(this._xrSessionManager.scene.getEngine(), ts.Unknown, !0); n.isCube = !0, n.invertY = !1, n.format = 5, n.generateMipMaps = !0, n.type = 1, n.samplingMode = 3, n.width = e.camera.width, n.height = e.camera.height, n._cachedWrapU = 1, n._cachedWrapV = 1, n._hardwareTexture = new BI(s, this._glContext), this._cachedInternalTextures[t] = n; const a = new dn(this._xrSessionManager.scene); a.name = `WebXR Raw Camera Access (${t})`, a._texture = this._cachedInternalTextures[t], this.texturesData[t] = a, this._updateCameraIntrinsics(e, t); } return this._cachedInternalTextures[t].isReady = !0, !0; } _onXRFrame(e) { const t = this._xrSessionManager.referenceSpace, i = e.getViewerPose(t); if (!i || !i.views) return; let r = !0; i.views.forEach((s, n) => { r = r && this._updateInternalTextures(s, n); }), r && this.onTexturesUpdatedObservable.notifyObservers(this.texturesData); } } DO.Name = Qs.RAW_CAMERA_ACCESS; DO.Version = 1; Oo.AddWebXRFeature(DO.Name, (c, e) => () => new DO(c, e), DO.Version, !1); class nae extends KI { /** * Create a new hand controller object, without loading a controller model * @param scene the scene to use to create this controller * @param gamepadObject the corresponding gamepad object * @param handedness the handedness of the controller */ constructor(e, t, i) { super(e, Xve[i], t, i, !0), this.profileId = "generic-hand-select-grasp"; } _getFilenameAndPath() { return { filename: "generic.babylon", path: "https://controllers.babylonjs.com/generic/" }; } _getModelLoadingConstraints() { return !0; } _processLoadedModel(e) { } _setRootMesh(e) { } _updateModel() { } } Vu.RegisterController("generic-hand-select-grasp", (c, e) => new nae(e, c.gamepad, c.handedness)); const Xve = { left: { selectComponentId: "xr-standard-trigger", components: { // eslint-disable-next-line @typescript-eslint/naming-convention "xr-standard-trigger": { type: "trigger", gamepadIndices: { button: 0 }, rootNodeName: "xr-standard-trigger", visualResponses: {} }, grasp: { type: "trigger", gamepadIndices: { button: 4 }, rootNodeName: "grasp", visualResponses: {} } }, gamepadMapping: "xr-standard", rootNodeName: "generic-hand-select-grasp-left", assetPath: "left.glb" }, right: { selectComponentId: "xr-standard-trigger", components: { // eslint-disable-next-line @typescript-eslint/naming-convention "xr-standard-trigger": { type: "trigger", gamepadIndices: { button: 0 }, rootNodeName: "xr-standard-trigger", visualResponses: {} }, grasp: { type: "trigger", gamepadIndices: { button: 4 }, rootNodeName: "grasp", visualResponses: {} } }, gamepadMapping: "xr-standard", rootNodeName: "generic-hand-select-grasp-right", assetPath: "right.glb" }, none: { selectComponentId: "xr-standard-trigger", components: { // eslint-disable-next-line @typescript-eslint/naming-convention "xr-standard-trigger": { type: "trigger", gamepadIndices: { button: 0 }, rootNodeName: "xr-standard-trigger", visualResponses: {} }, grasp: { type: "trigger", gamepadIndices: { button: 4 }, rootNodeName: "grasp", visualResponses: {} } }, gamepadMapping: "xr-standard", rootNodeName: "generic-hand-select-grasp-none", assetPath: "none.glb" } }; class _T extends KI { constructor(e, t, i) { super(e, Yve["left-right"], t, i), this._mapping = { defaultButton: { valueNodeName: "VALUE", unpressedNodeName: "UNPRESSED", pressedNodeName: "PRESSED" }, defaultAxis: { valueNodeName: "VALUE", minNodeName: "MIN", maxNodeName: "MAX" }, buttons: { "xr-standard-trigger": { rootNodeName: "SELECT", componentProperty: "button", states: ["default", "touched", "pressed"] }, "xr-standard-squeeze": { rootNodeName: "GRASP", componentProperty: "state", states: ["pressed"] }, "xr-standard-touchpad": { rootNodeName: "TOUCHPAD_PRESS", labelAnchorNodeName: "squeeze-label", touchPointNodeName: "TOUCH" // TODO - use this for visual feedback }, "xr-standard-thumbstick": { rootNodeName: "THUMBSTICK_PRESS", componentProperty: "state", states: ["pressed"] } }, axes: { "xr-standard-touchpad": { "x-axis": { rootNodeName: "TOUCHPAD_TOUCH_X" }, "y-axis": { rootNodeName: "TOUCHPAD_TOUCH_Y" } }, "xr-standard-thumbstick": { "x-axis": { rootNodeName: "THUMBSTICK_X" }, "y-axis": { rootNodeName: "THUMBSTICK_Y" } } } }, this.profileId = "microsoft-mixed-reality"; } _getFilenameAndPath() { let e = ""; this.handedness === "left" ? e = _T.MODEL_LEFT_FILENAME : e = _T.MODEL_RIGHT_FILENAME; const t = "default", i = _T.MODEL_BASE_URL + t + "/"; return { filename: e, path: i }; } _getModelLoadingConstraints() { const e = fr.IsPluginForExtensionAvailable(".glb"); return e || Ce.Warn("glTF / glb loaded was not registered, using generic controller instead"), e; } _processLoadedModel(e) { this.rootMesh && (this.getComponentIds().forEach((t, i) => { if (!this.disableAnimation && t && this.rootMesh) { const r = this._mapping.buttons[t], s = r.rootNodeName; if (!s) { Ce.Log("Skipping unknown button at index: " + i + " with mapped name: " + t); return; } const n = this._getChildByName(this.rootMesh, s); if (!n) { Ce.Warn("Missing button mesh with name: " + s); return; } if (r.valueMesh = this._getImmediateChildByName(n, this._mapping.defaultButton.valueNodeName), r.pressedMesh = this._getImmediateChildByName(n, this._mapping.defaultButton.pressedNodeName), r.unpressedMesh = this._getImmediateChildByName(n, this._mapping.defaultButton.unpressedNodeName), r.valueMesh && r.pressedMesh && r.unpressedMesh) { const a = this.getComponent(t); a && a.onButtonStateChangedObservable.add((l) => { this._lerpTransform(r, l.value); }, void 0, !0); } else Ce.Warn("Missing button submesh under mesh with name: " + s); } }), this.getComponentIds().forEach((t) => { const i = this.getComponent(t); i.isAxes() && ["x-axis", "y-axis"].forEach((r) => { if (!this.rootMesh) return; const s = this._mapping.axes[t][r], n = this._getChildByName(this.rootMesh, s.rootNodeName); if (!n) { Ce.Warn("Missing axis mesh with name: " + s.rootNodeName); return; } s.valueMesh = this._getImmediateChildByName(n, this._mapping.defaultAxis.valueNodeName), s.minMesh = this._getImmediateChildByName(n, this._mapping.defaultAxis.minNodeName), s.maxMesh = this._getImmediateChildByName(n, this._mapping.defaultAxis.maxNodeName), s.valueMesh && s.minMesh && s.maxMesh ? i && i.onAxisValueChangedObservable.add((a) => { const l = r === "x-axis" ? a.x : a.y; this._lerpTransform(s, l, !0); }, void 0, !0) : Ce.Warn("Missing axis submesh under mesh with name: " + s.rootNodeName); }); })); } _setRootMesh(e) { this.rootMesh = new ke(this.profileId + " " + this.handedness, this.scene), this.rootMesh.isPickable = !1; let t; for (let i = 0; i < e.length; i++) { const r = e[i]; r.isPickable = !1, r.parent || (t = r); } t && t.setParent(this.rootMesh), this.scene.useRightHandedSystem || (this.rootMesh.rotationQuaternion = Ze.FromEulerAngles(0, Math.PI, 0)); } _updateModel() { } } _T.MODEL_BASE_URL = "https://controllers.babylonjs.com/microsoft/"; _T.MODEL_LEFT_FILENAME = "left.glb"; _T.MODEL_RIGHT_FILENAME = "right.glb"; Vu.RegisterController("windows-mixed-reality", (c, e) => new _T(e, c.gamepad, c.handedness)); const Yve = { left: { selectComponentId: "xr-standard-trigger", components: { "xr-standard-trigger": { type: "trigger", gamepadIndices: { button: 0 }, rootNodeName: "xr_standard_trigger", visualResponses: { xr_standard_trigger_pressed: { componentProperty: "button", states: ["default", "touched", "pressed"], valueNodeProperty: "transform", valueNodeName: "xr_standard_trigger_pressed_value", minNodeName: "xr_standard_trigger_pressed_min", maxNodeName: "xr_standard_trigger_pressed_max" } } }, "xr-standard-squeeze": { type: "squeeze", gamepadIndices: { button: 1 }, rootNodeName: "xr_standard_squeeze", visualResponses: { xr_standard_squeeze_pressed: { componentProperty: "button", states: ["default", "touched", "pressed"], valueNodeProperty: "transform", valueNodeName: "xr_standard_squeeze_pressed_value", minNodeName: "xr_standard_squeeze_pressed_min", maxNodeName: "xr_standard_squeeze_pressed_max" } } }, "xr-standard-touchpad": { type: "touchpad", gamepadIndices: { button: 2, xAxis: 0, yAxis: 1 }, rootNodeName: "xr_standard_touchpad", visualResponses: { xr_standard_touchpad_pressed: { componentProperty: "button", states: ["default", "touched", "pressed"], valueNodeProperty: "transform", valueNodeName: "xr_standard_touchpad_pressed_value", minNodeName: "xr_standard_touchpad_pressed_min", maxNodeName: "xr_standard_touchpad_pressed_max" }, xr_standard_touchpad_xaxis_pressed: { componentProperty: "xAxis", states: ["default", "touched", "pressed"], valueNodeProperty: "transform", valueNodeName: "xr_standard_touchpad_xaxis_pressed_value", minNodeName: "xr_standard_touchpad_xaxis_pressed_min", maxNodeName: "xr_standard_touchpad_xaxis_pressed_max" }, xr_standard_touchpad_yaxis_pressed: { componentProperty: "yAxis", states: ["default", "touched", "pressed"], valueNodeProperty: "transform", valueNodeName: "xr_standard_touchpad_yaxis_pressed_value", minNodeName: "xr_standard_touchpad_yaxis_pressed_min", maxNodeName: "xr_standard_touchpad_yaxis_pressed_max" }, xr_standard_touchpad_xaxis_touched: { componentProperty: "xAxis", states: ["default", "touched", "pressed"], valueNodeProperty: "transform", valueNodeName: "xr_standard_touchpad_xaxis_touched_value", minNodeName: "xr_standard_touchpad_xaxis_touched_min", maxNodeName: "xr_standard_touchpad_xaxis_touched_max" }, xr_standard_touchpad_yaxis_touched: { componentProperty: "yAxis", states: ["default", "touched", "pressed"], valueNodeProperty: "transform", valueNodeName: "xr_standard_touchpad_yaxis_touched_value", minNodeName: "xr_standard_touchpad_yaxis_touched_min", maxNodeName: "xr_standard_touchpad_yaxis_touched_max" }, xr_standard_touchpad_axes_touched: { componentProperty: "state", states: ["touched", "pressed"], valueNodeProperty: "visibility", valueNodeName: "xr_standard_touchpad_axes_touched_value" } }, touchPointNodeName: "xr_standard_touchpad_axes_touched_value" }, "xr-standard-thumbstick": { type: "thumbstick", gamepadIndices: { button: 3, xAxis: 2, yAxis: 3 }, rootNodeName: "xr_standard_thumbstick", visualResponses: { xr_standard_thumbstick_pressed: { componentProperty: "button", states: ["default", "touched", "pressed"], valueNodeProperty: "transform", valueNodeName: "xr_standard_thumbstick_pressed_value", minNodeName: "xr_standard_thumbstick_pressed_min", maxNodeName: "xr_standard_thumbstick_pressed_max" }, xr_standard_thumbstick_xaxis_pressed: { componentProperty: "xAxis", states: ["default", "touched", "pressed"], valueNodeProperty: "transform", valueNodeName: "xr_standard_thumbstick_xaxis_pressed_value", minNodeName: "xr_standard_thumbstick_xaxis_pressed_min", maxNodeName: "xr_standard_thumbstick_xaxis_pressed_max" }, xr_standard_thumbstick_yaxis_pressed: { componentProperty: "yAxis", states: ["default", "touched", "pressed"], valueNodeProperty: "transform", valueNodeName: "xr_standard_thumbstick_yaxis_pressed_value", minNodeName: "xr_standard_thumbstick_yaxis_pressed_min", maxNodeName: "xr_standard_thumbstick_yaxis_pressed_max" } } } }, gamepadMapping: "xr-standard", rootNodeName: "microsoft-mixed-reality-left", assetPath: "left.glb" }, right: { selectComponentId: "xr-standard-trigger", components: { "xr-standard-trigger": { type: "trigger", gamepadIndices: { button: 0 }, rootNodeName: "xr_standard_trigger", visualResponses: { xr_standard_trigger_pressed: { componentProperty: "button", states: ["default", "touched", "pressed"], valueNodeProperty: "transform", valueNodeName: "xr_standard_trigger_pressed_value", minNodeName: "xr_standard_trigger_pressed_min", maxNodeName: "xr_standard_trigger_pressed_max" } } }, "xr-standard-squeeze": { type: "squeeze", gamepadIndices: { button: 1 }, rootNodeName: "xr_standard_squeeze", visualResponses: { xr_standard_squeeze_pressed: { componentProperty: "button", states: ["default", "touched", "pressed"], valueNodeProperty: "transform", valueNodeName: "xr_standard_squeeze_pressed_value", minNodeName: "xr_standard_squeeze_pressed_min", maxNodeName: "xr_standard_squeeze_pressed_max" } } }, "xr-standard-touchpad": { type: "touchpad", gamepadIndices: { button: 2, xAxis: 0, yAxis: 1 }, rootNodeName: "xr_standard_touchpad", visualResponses: { xr_standard_touchpad_pressed: { componentProperty: "button", states: ["default", "touched", "pressed"], valueNodeProperty: "transform", valueNodeName: "xr_standard_touchpad_pressed_value", minNodeName: "xr_standard_touchpad_pressed_min", maxNodeName: "xr_standard_touchpad_pressed_max" }, xr_standard_touchpad_xaxis_pressed: { componentProperty: "xAxis", states: ["default", "touched", "pressed"], valueNodeProperty: "transform", valueNodeName: "xr_standard_touchpad_xaxis_pressed_value", minNodeName: "xr_standard_touchpad_xaxis_pressed_min", maxNodeName: "xr_standard_touchpad_xaxis_pressed_max" }, xr_standard_touchpad_yaxis_pressed: { componentProperty: "yAxis", states: ["default", "touched", "pressed"], valueNodeProperty: "transform", valueNodeName: "xr_standard_touchpad_yaxis_pressed_value", minNodeName: "xr_standard_touchpad_yaxis_pressed_min", maxNodeName: "xr_standard_touchpad_yaxis_pressed_max" }, xr_standard_touchpad_xaxis_touched: { componentProperty: "xAxis", states: ["default", "touched", "pressed"], valueNodeProperty: "transform", valueNodeName: "xr_standard_touchpad_xaxis_touched_value", minNodeName: "xr_standard_touchpad_xaxis_touched_min", maxNodeName: "xr_standard_touchpad_xaxis_touched_max" }, xr_standard_touchpad_yaxis_touched: { componentProperty: "yAxis", states: ["default", "touched", "pressed"], valueNodeProperty: "transform", valueNodeName: "xr_standard_touchpad_yaxis_touched_value", minNodeName: "xr_standard_touchpad_yaxis_touched_min", maxNodeName: "xr_standard_touchpad_yaxis_touched_max" }, xr_standard_touchpad_axes_touched: { componentProperty: "state", states: ["touched", "pressed"], valueNodeProperty: "visibility", valueNodeName: "xr_standard_touchpad_axes_touched_value" } }, touchPointNodeName: "xr_standard_touchpad_axes_touched_value" }, "xr-standard-thumbstick": { type: "thumbstick", gamepadIndices: { button: 3, xAxis: 2, yAxis: 3 }, rootNodeName: "xr_standard_thumbstick", visualResponses: { xr_standard_thumbstick_pressed: { componentProperty: "button", states: ["default", "touched", "pressed"], valueNodeProperty: "transform", valueNodeName: "xr_standard_thumbstick_pressed_value", minNodeName: "xr_standard_thumbstick_pressed_min", maxNodeName: "xr_standard_thumbstick_pressed_max" }, xr_standard_thumbstick_xaxis_pressed: { componentProperty: "xAxis", states: ["default", "touched", "pressed"], valueNodeProperty: "transform", valueNodeName: "xr_standard_thumbstick_xaxis_pressed_value", minNodeName: "xr_standard_thumbstick_xaxis_pressed_min", maxNodeName: "xr_standard_thumbstick_xaxis_pressed_max" }, xr_standard_thumbstick_yaxis_pressed: { componentProperty: "yAxis", states: ["default", "touched", "pressed"], valueNodeProperty: "transform", valueNodeName: "xr_standard_thumbstick_yaxis_pressed_value", minNodeName: "xr_standard_thumbstick_yaxis_pressed_min", maxNodeName: "xr_standard_thumbstick_yaxis_pressed_max" } } } }, gamepadMapping: "xr-standard", rootNodeName: "microsoft-mixed-reality-right", assetPath: "right.glb" } }; class N4 extends KI { constructor(e, t, i, r = !1, s = !1) { super(e, Qve[i], t, i), this._forceLegacyControllers = s, this.profileId = "oculus-touch"; } _getFilenameAndPath() { let e = ""; this.handedness === "left" ? e = N4.MODEL_LEFT_FILENAME : e = N4.MODEL_RIGHT_FILENAME; const t = this._isQuest() ? N4.QUEST_MODEL_BASE_URL : N4.MODEL_BASE_URL; return { filename: e, path: t }; } _getModelLoadingConstraints() { return !0; } _processLoadedModel(e) { const t = this._isQuest(), i = this.handedness === "right" ? -1 : 1; this.getComponentIds().forEach((r) => { const s = r && this.getComponent(r); s && s.onButtonStateChangedObservable.add((n) => { if (!(!this.rootMesh || this.disableAnimation)) switch (r) { case "xr-standard-trigger": t || (this._modelRootNode.getChildren()[3].rotation.x = -n.value * 0.2, this._modelRootNode.getChildren()[3].position.y = -n.value * 5e-3, this._modelRootNode.getChildren()[3].position.z = -n.value * 5e-3); return; case "xr-standard-squeeze": t || (this._modelRootNode.getChildren()[4].position.x = i * n.value * 35e-4); return; case "xr-standard-thumbstick": return; case "a-button": case "x-button": t || (n.pressed ? this._modelRootNode.getChildren()[1].position.y = -1e-3 : this._modelRootNode.getChildren()[1].position.y = 0); return; case "b-button": case "y-button": t || (n.pressed ? this._modelRootNode.getChildren()[2].position.y = -1e-3 : this._modelRootNode.getChildren()[2].position.y = 0); return; } }, void 0, !0); }); } _setRootMesh(e) { this.rootMesh = new ke(this.profileId + " " + this.handedness, this.scene), this.scene.useRightHandedSystem || (this.rootMesh.rotationQuaternion = Ze.FromEulerAngles(0, Math.PI, 0)), e.forEach((t) => { t.isPickable = !1; }), this._isQuest() ? this._modelRootNode = e[0] : (this._modelRootNode = e[1], this.rootMesh.position.y = 0.034, this.rootMesh.position.z = 0.052), this._modelRootNode.parent = this.rootMesh; } _updateModel() { } /** * Is this the new type of oculus touch. At the moment both have the same profile and it is impossible to differentiate * between the touch and touch 2. */ _isQuest() { return !!navigator.userAgent.match(/Quest/gi) && !this._forceLegacyControllers; } } N4.MODEL_BASE_URL = "https://controllers.babylonjs.com/oculus/"; N4.MODEL_LEFT_FILENAME = "left.babylon"; N4.MODEL_RIGHT_FILENAME = "right.babylon"; N4.QUEST_MODEL_BASE_URL = "https://controllers.babylonjs.com/oculusQuest/"; Vu.RegisterController("oculus-touch", (c, e) => new N4(e, c.gamepad, c.handedness)); Vu.RegisterController("oculus-touch-legacy", (c, e) => new N4(e, c.gamepad, c.handedness, !0)); const Qve = { left: { selectComponentId: "xr-standard-trigger", components: { "xr-standard-trigger": { type: "trigger", gamepadIndices: { button: 0 }, rootNodeName: "xr_standard_trigger", visualResponses: {} }, "xr-standard-squeeze": { type: "squeeze", gamepadIndices: { button: 1 }, rootNodeName: "xr_standard_squeeze", visualResponses: {} }, "xr-standard-thumbstick": { type: "thumbstick", gamepadIndices: { button: 3, xAxis: 2, yAxis: 3 }, rootNodeName: "xr_standard_thumbstick", visualResponses: {} }, "x-button": { type: "button", gamepadIndices: { button: 4 }, rootNodeName: "x_button", visualResponses: {} }, "y-button": { type: "button", gamepadIndices: { button: 5 }, rootNodeName: "y_button", visualResponses: {} }, thumbrest: { type: "button", gamepadIndices: { button: 6 }, rootNodeName: "thumbrest", visualResponses: {} } }, gamepadMapping: "xr-standard", rootNodeName: "oculus-touch-v2-left", assetPath: "left.glb" }, right: { selectComponentId: "xr-standard-trigger", components: { "xr-standard-trigger": { type: "trigger", gamepadIndices: { button: 0 }, rootNodeName: "xr_standard_trigger", visualResponses: {} }, "xr-standard-squeeze": { type: "squeeze", gamepadIndices: { button: 1 }, rootNodeName: "xr_standard_squeeze", visualResponses: {} }, "xr-standard-thumbstick": { type: "thumbstick", gamepadIndices: { button: 3, xAxis: 2, yAxis: 3 }, rootNodeName: "xr_standard_thumbstick", visualResponses: {} }, "a-button": { type: "button", gamepadIndices: { button: 4 }, rootNodeName: "a_button", visualResponses: {} }, "b-button": { type: "button", gamepadIndices: { button: 5 }, rootNodeName: "b_button", visualResponses: {} }, thumbrest: { type: "button", gamepadIndices: { button: 6 }, rootNodeName: "thumbrest", visualResponses: {} } }, gamepadMapping: "xr-standard", rootNodeName: "oculus-touch-v2-right", assetPath: "right.glb" } }; class EP extends KI { /** * Create a new Vive motion controller object * @param scene the scene to use to create this controller * @param gamepadObject the corresponding gamepad object * @param handedness the handedness of the controller */ constructor(e, t, i) { super(e, $ve[i], t, i), this.profileId = "htc-vive"; } _getFilenameAndPath() { const e = EP.MODEL_FILENAME, t = EP.MODEL_BASE_URL; return { filename: e, path: t }; } _getModelLoadingConstraints() { return !0; } _processLoadedModel(e) { this.getComponentIds().forEach((t) => { const i = t && this.getComponent(t); i && i.onButtonStateChangedObservable.add((r) => { if (!(!this.rootMesh || this.disableAnimation)) switch (t) { case "xr-standard-trigger": this._modelRootNode.getChildren()[6].rotation.x = -r.value * 0.15; return; case "xr-standard-touchpad": return; case "xr-standard-squeeze": return; } }, void 0, !0); }); } _setRootMesh(e) { this.rootMesh = new ke(this.profileId + " " + this.handedness, this.scene), e.forEach((t) => { t.isPickable = !1; }), this._modelRootNode = e[1], this._modelRootNode.parent = this.rootMesh, this.scene.useRightHandedSystem || (this.rootMesh.rotationQuaternion = Ze.FromEulerAngles(0, Math.PI, 0)); } _updateModel() { } } EP.MODEL_BASE_URL = "https://controllers.babylonjs.com/vive/"; EP.MODEL_FILENAME = "wand.babylon"; Vu.RegisterController("htc-vive", (c, e) => new EP(e, c.gamepad, c.handedness)); const $ve = { left: { selectComponentId: "xr-standard-trigger", components: { "xr-standard-trigger": { type: "trigger", gamepadIndices: { button: 0 }, rootNodeName: "xr_standard_trigger", visualResponses: {} }, "xr-standard-squeeze": { type: "squeeze", gamepadIndices: { button: 1 }, rootNodeName: "xr_standard_squeeze", visualResponses: {} }, "xr-standard-touchpad": { type: "touchpad", gamepadIndices: { button: 2, xAxis: 0, yAxis: 1 }, rootNodeName: "xr_standard_touchpad", visualResponses: {} }, menu: { type: "button", gamepadIndices: { button: 4 }, rootNodeName: "menu", visualResponses: {} } }, gamepadMapping: "xr-standard", rootNodeName: "htc_vive_none", assetPath: "none.glb" }, right: { selectComponentId: "xr-standard-trigger", components: { "xr-standard-trigger": { type: "trigger", gamepadIndices: { button: 0 }, rootNodeName: "xr_standard_trigger", visualResponses: {} }, "xr-standard-squeeze": { type: "squeeze", gamepadIndices: { button: 1 }, rootNodeName: "xr_standard_squeeze", visualResponses: {} }, "xr-standard-touchpad": { type: "touchpad", gamepadIndices: { button: 2, xAxis: 0, yAxis: 1 }, rootNodeName: "xr_standard_touchpad", visualResponses: {} }, menu: { type: "button", gamepadIndices: { button: 4 }, rootNodeName: "menu", visualResponses: {} } }, gamepadMapping: "xr-standard", rootNodeName: "htc_vive_none", assetPath: "none.glb" }, none: { selectComponentId: "xr-standard-trigger", components: { "xr-standard-trigger": { type: "trigger", gamepadIndices: { button: 0 }, rootNodeName: "xr_standard_trigger", visualResponses: {} }, "xr-standard-squeeze": { type: "squeeze", gamepadIndices: { button: 1 }, rootNodeName: "xr_standard_squeeze", visualResponses: {} }, "xr-standard-touchpad": { type: "touchpad", gamepadIndices: { button: 2, xAxis: 0, yAxis: 1 }, rootNodeName: "xr_standard_touchpad", visualResponses: {} }, menu: { type: "button", gamepadIndices: { button: 4 }, rootNodeName: "menu", visualResponses: {} } }, gamepadMapping: "xr-standard", rootNodeName: "htc-vive-none", assetPath: "none.glb" } }; class aae { get session() { return this._nativeImpl.session; } constructor(e) { this._nativeImpl = e, this._xrTransform = new XRRigidTransform(), this._xrPose = { transform: this._xrTransform, emulatedPosition: !1 }, this._xrPoseVectorData = new Float32Array(8), this.fillPoses = this._nativeImpl.fillPoses.bind(this._nativeImpl), this.getViewerPose = this._nativeImpl.getViewerPose.bind(this._nativeImpl), this.getHitTestResults = this._nativeImpl.getHitTestResults.bind(this._nativeImpl), this.getHitTestResultsForTransientInput = () => { throw new Error("XRFrame.getHitTestResultsForTransientInput not supported on native."); }, this.createAnchor = this._nativeImpl.createAnchor.bind(this._nativeImpl), this.getJointPose = this._nativeImpl.getJointPose.bind(this._nativeImpl), this.fillJointRadii = this._nativeImpl.fillJointRadii.bind(this._nativeImpl), this.getLightEstimate = () => { throw new Error("XRFrame.getLightEstimate not supported on native."); }, this.getImageTrackingResults = () => { var t; return (t = this._nativeImpl._imageTrackingResults) !== null && t !== void 0 ? t : []; }; } getPose(e, t) { if (!this._nativeImpl.getPoseData(e, t, this._xrPoseVectorData.buffer, this._xrTransform.matrix.buffer)) return; const i = this._xrTransform.position; i.x = this._xrPoseVectorData[0], i.y = this._xrPoseVectorData[1], i.z = this._xrPoseVectorData[2], i.w = this._xrPoseVectorData[3]; const r = this._xrTransform.orientation; return r.x = this._xrPoseVectorData[4], r.y = this._xrPoseVectorData[5], r.z = this._xrPoseVectorData[6], r.w = this._xrPoseVectorData[7], this._xrPose; } get trackedAnchors() { return this._nativeImpl.trackedAnchors; } get worldInformation() { return this._nativeImpl.worldInformation; } get detectedPlanes() { return this._nativeImpl.detectedPlanes; } get featurePointCloud() { return this._nativeImpl.featurePointCloud; } getDepthInformation(e) { throw new Error("This function is not available in Babylon Native"); } } zie("NativeXRFrame", aae); var X_; (function(c) { c[c.Input = 0] = "Input", c[c.Output = 1] = "Output"; })(X_ || (X_ = {})); class EB { constructor(e, t, i) { this._ownerBlock = i, this._connectedPoint = [], this.uniqueId = G_(), this.connectedPointIds = [], this.name = e, this._connectionType = t; } /** * The type of the connection */ get connectionType() { return this._connectionType; } /** * @internal * Override this to indicate if a point can connect to more than one point. */ _isSingularConnection() { return !0; } /** * Returns if a point is connected to any other point. * @returns boolean indicating if the point is connected. */ isConnected() { return this._connectedPoint.length > 0; } /** * Connects two points together. * @param point */ connectTo(e) { if (this._connectionType === e._connectionType) throw new Error(`Cannot connect two points of type ${this.connectionType}`); if (this._isSingularConnection() && this._connectedPoint.length > 0 || e._isSingularConnection() && e._connectedPoint.length > 0) throw new Error("Max number of connections for point reached"); this._connectedPoint.push(e), e._connectedPoint.push(this); } /** * Saves the connection to a JSON object. */ serialize(e = {}) { e.uniqueId = this.uniqueId, e.name = this.name, e._connectionType = this._connectionType, e.connectedPointIds = [], e.className = this.getClassName(); for (const t of this._connectedPoint) e.connectedPointIds.push(t.uniqueId); } getClassName() { return "FGConnection"; } /** * Deserialize from a object into this * @param serializationObject */ deserialize(e) { this.uniqueId = e.uniqueId, this.name = e.name, this._connectionType = e._connectionType, this.connectedPointIds = e.connectedPointIds; } /** * Parses a connection from an object * @param serializationObject * @param ownerBlock * @returns */ static Parse(e = {}, t) { const i = Ve.Instantiate(e.className), r = new i(e.name, e._connectionType, t); return r.deserialize(e), r; } } class $_ { constructor(e, t) { this.typeName = e, this.defaultValue = t; } serialize(e) { e.typeName = this.typeName, e.defaultValue = this.defaultValue; } static Parse(e) { return new $_(e.typeName, e.defaultValue); } } const Ci = new $_("any", void 0), oae = new $_("string", ""), Rs = new $_("number", 0), Th = new $_("boolean", !1), TB = new $_("Vector2", at.Zero()), ex = new $_("Vector3", D.Zero()), lae = new $_("Vector4", Di.Zero()), Zve = new $_("Matrix", Ae.Identity()), cae = new $_("Color3", ze.Black()), uae = new $_("Color4", new Et(0, 0, 0, 0)), hae = new $_("Quaternion", Ze.Identity()); function dae(c) { switch (typeof c) { case "string": return oae; case "number": return Rs; case "boolean": return Th; case "object": return c instanceof at ? TB : c instanceof D ? ex : c instanceof Di ? lae : c instanceof ze ? cae : c instanceof Et ? uae : c instanceof Ze ? hae : Ci; default: return Ci; } } class SB extends EB { /** * Create a new data connection point. * @param name * @param connectionType * @param ownerBlock * @param richType */ constructor(e, t, i, r) { super(e, t, i), this.richType = r; } /** * An output data block can connect to multiple input data blocks, * but an input data block can only connect to one output data block. */ _isSingularConnection() { return this.connectionType === X_.Input; } /** * Set the value of the connection in a specific context. * @param value the value to set * @param context the context to which the value is set */ setValue(e, t) { t._setConnectionValue(this, e); } connectTo(e) { super.connectTo(e); } _getValueOrDefault(e) { return e._hasConnectionValue(this) ? e._getConnectionValue(this) : this.richType.defaultValue; } /** * Gets the value of the connection in a specific context. * @param context the context from which the value is retrieved * @returns the value of the connection */ getValue(e) { return this.connectionType === X_.Output ? (e._notifyExecuteNode(this._ownerBlock), this._ownerBlock._updateOutputs(e), this._getValueOrDefault(e)) : this.isConnected() ? this._connectedPoint[0].getValue(e) : this._getValueOrDefault(e); } getClassName() { return "FGDataConnection"; } serialize(e = {}) { super.serialize(e), e.richType = {}, this.richType.serialize(e.richType); } static Parse(e, t) { const i = EB.Parse(e, t); return i.richType = $_.Parse(e.richType), i; } } Be("FGDataConnection", SB); const qve = /([./])({?\w+}?)/g; class ym { constructor(e) { this._templateSubstitutions = {}, this._pathParts = [], this._templateStrings = [], this.hasTemplateStrings = !1, this._path = e; const { pathParts: t, templateStrings: i } = this._getPathPartsAndTemplateStrings(e); this._pathParts = t, this._templateStrings = i, this.hasTemplateStrings = i.length > 0; } _getPathPartsAndTemplateStrings(e) { const t = e.matchAll(qve), i = [], r = []; let s = t.next(); for (; !s.done; ) { const n = s.value, [, a, l] = n; let o = l, u = !1; l.startsWith("{") && l.endsWith("}") && (u = !0, o = l.slice(1, l.length - 1), r.indexOf(o) === -1 && r.push(o)), i.push({ value: l, isTemplate: u, valueWithoutBraces: o, separator: a }), s = t.next(); } return { pathParts: i, templateStrings: r }; } /** * Gets the template strings in this path. * @returns an array containing the template strings in this path. */ getTemplateStrings() { return this._templateStrings; } setTemplateSubstitution(e, t) { if (this._templateStrings.indexOf(e) === -1) throw new Error(`Template string ${e} does not exist in path ${this._path}`); this._templateSubstitutions[e] = t; } _evaluateTemplates() { for (const e of this._pathParts) if (e.isTemplate) { const t = this._templateSubstitutions[e.valueWithoutBraces]; if (t === void 0) throw new Error(`Template string ${e.value} was not substituted`); e.replacedValue = t.toString(); } } /** * Gets the final path after all template strings have been substituted. * @returns a string representing the final path. */ getFinalPath() { let e = ""; for (const t of this._pathParts) e += t.separator, t.isTemplate ? e += t.replacedValue : e += t.value; return e; } /* * Breaks the path into a chain of entities, for example, * /x/y/z would be split into [context._userVariables.x, context._userVariables.x.y, context._userVariables.x.y.z], * and the path that was split, i.e. /x/y/z, would be split into ["x", "y", "z"]. */ _evaluatePath(e) { this._evaluateTemplates(); const t = [], i = []; let r = e.userVariables; for (const s of this._pathParts) { if (r === void 0) throw new Error(`Could not find path ${this.getFinalPath()} in target context`); const n = s.isTemplate ? s.replacedValue : s.value; if (!n) throw new Error(`Invalid path ${this.getFinalPath()}`); r = r[n], t.push(r), i.push(n); } return { entityChain: t, splitPath: i }; } getProperty(e) { for (const i of ym.Extensions) if (i.shouldProcess(this)) return i.processGet(this, e); const { entityChain: t } = this._evaluatePath(e); return t[t.length - 1]; } setProperty(e, t) { for (const a of ym.Extensions) if (a.shouldProcess(this)) { a.processSet(this, e, t); return; } const { entityChain: i, splitPath: r } = this._evaluatePath(e), s = i[i.length - 2], n = r[r.length - 1]; s[n] = t; } getClassName() { return ym.ClassName; } serialize(e = {}) { return e.path = this._path, e.className = this.getClassName(), e; } static Parse(e) { return new ym(e.path); } } ym.Extensions = []; ym.ClassName = "FGPath"; Be(ym.ClassName, ym); function fae(c) { return c === "Mesh" || c === "AbstractMesh" || c === "GroundMesh" || c === "InstanceMesh" || c === "LinesMesh" || c === "GoldbergMesh" || c === "GreasedLineMesh" || c === "TrailMesh"; } function pae(c) { return c === "Vector2" || c === "Vector3" || c === "Vector4" || c === "Quaternion" || c === "Color3" || c === "Color4"; } function Jve(c, e) { if (c === "Vector2") return at.FromArray(e); if (c === "Vector3") return D.FromArray(e); if (c === "Vector4") return Di.FromArray(e); if (c === "Quaternion") return Ze.FromArray(e); if (c === "Color3") return new ze(e[0], e[1], e[2]); if (c === "Color4") return new Et(e[0], e[1], e[2], e[3]); throw new Error(`Unknown vector class name ${c}`); } function _j(c, e, t) { var i, r; const s = (r = (i = e == null ? void 0 : e.getClassName) === null || i === void 0 ? void 0 : i.call(e)) !== null && r !== void 0 ? r : ""; fae(s) ? t[c] = { name: e.name, className: s } : pae(s) ? t[c] = { value: e.asArray(), className: s } : t[c] = e; } function _ae(c, e, t) { const i = e[c]; let r; const s = i == null ? void 0 : i.className; return fae(s) ? r = t.getMeshByName(i.name) : pae(s) ? r = Jve(s, i.value) : s === ym.ClassName ? r = ym.Parse(i) : i && i.value !== void 0 ? r = i.value : r = i, r; } class MT { /** Constructor is protected so only subclasses can be instantiated */ constructor(e) { this.config = e, this.uniqueId = G_(), this.configure(); } configure() { var e, t; this.name = (t = (e = this.config) === null || e === void 0 ? void 0 : e.name) !== null && t !== void 0 ? t : this.getClassName(), this.dataInputs = [], this.dataOutputs = []; } /** * @internal */ _updateOutputs(e) { } registerDataInput(e, t) { const i = new SB(e, X_.Input, this, t); return this.dataInputs.push(i), i; } registerDataOutput(e, t) { const i = new SB(e, X_.Output, this, t); return this.dataOutputs.push(i), i; } getDataInput(e) { return this.dataInputs.find((t) => t.name === e); } getDataOutput(e) { return this.dataOutputs.find((t) => t.name === e); } serialize(e = {}, t = _j) { e.uniqueId = this.uniqueId, e.config = {}, this.config && (e.config.name = this.config.name), e.dataInputs = [], e.dataOutputs = [], e.className = this.getClassName(); for (const i of this.dataInputs) { const r = {}; i.serialize(r), e.dataInputs.push(r); } for (const i of this.dataOutputs) { const r = {}; i.serialize(r), e.dataOutputs.push(r); } } getClassName() { return "FGBlock"; } static Parse(e, t, i = _ae) { const r = Ve.Instantiate(e.className), s = {}; if (e.config) for (const a in e.config) s[a] = i(a, e.config, t); const n = new r(s); n.uniqueId = e.uniqueId; for (let a = 0; a < e.dataInputs.length; a++) { const l = n.getDataInput(e.dataInputs[a].name); if (l) l.deserialize(e.dataInputs[a]); else throw new Error("Could not find data input with name " + e.dataInputs[a].name + " in block " + e.className); } for (let a = 0; a < e.dataOutputs.length; a++) { const l = n.getDataOutput(e.dataOutputs[a].name); if (l) l.deserialize(e.dataOutputs[a]); else throw new Error("Could not find data output with name " + e.dataOutputs[a].name + " in block " + e.className); } return n.metadata = e.metadata, n.deserialize && n.deserialize(e), n; } } class MB extends EB { /** * A signal input can be connected to more than one signal output, * but a signal output can only connect to one signal input */ _isSingularConnection() { return this.connectionType === X_.Output; } /** * @internal */ _activateSignal(e) { var t; this.connectionType === X_.Input ? (e._notifyExecuteNode(this._ownerBlock), this._ownerBlock._execute(e, this), e._increaseExecutionId()) : (t = this._connectedPoint[0]) === null || t === void 0 || t._activateSignal(e); } } Be("FlowGraphSignalConnection", MB); class n6 extends MT { constructor(e) { super(e), this.in = this._registerSignalInput("in"); } configure() { super.configure(), this.signalInputs = [], this.signalOutputs = []; } _registerSignalInput(e) { const t = new MB(e, X_.Input, this); return this.signalInputs.push(t), t; } _registerSignalOutput(e) { const t = new MB(e, X_.Output, this); return this.signalOutputs.push(t), t; } getSignalInput(e) { return this.signalInputs.find((t) => t.name === e); } getSignalOutput(e) { return this.signalOutputs.find((t) => t.name === e); } serialize(e = {}) { super.serialize(e), e.signalInputs = [], e.signalOutputs = []; for (const t of this.signalInputs) { const i = {}; t.serialize(i), e.signalInputs.push(i); } for (const t of this.signalOutputs) { const i = {}; t.serialize(i), e.signalOutputs.push(i); } } deserialize(e) { for (let t = 0; t < e.signalInputs.length; t++) { const i = this.getSignalInput(e.signalInputs[t].name); if (i) i.deserialize(e.signalInputs[t]); else throw new Error("Could not find signal input with name " + e.signalInputs[t].name + " in block " + e.className); } for (let t = 0; t < e.signalOutputs.length; t++) { const i = this.getSignalOutput(e.signalOutputs[t].name); if (i) i.deserialize(e.signalOutputs[t]); else throw new Error("Could not find signal output with name " + e.signalOutputs[t].name + " in block " + e.className); } } getClassName() { return "FGExecutionBlock"; } } class mj extends n6 { constructor(e) { super(e), this.out = this._registerSignalOutput("out"), this.done = this._registerSignalOutput("done"); } /** * @internal * @param context */ _startPendingTasks(e) { this._preparePendingTasks(e), e._addPendingBlock(this); } } class Mw extends mj { /** * @internal */ _execute(e) { e._notifyExecuteNode(this), this.out._activateSignal(e); } } class RB { constructor(e) { this.uniqueId = G_(), this._userVariables = {}, this._executionVariables = {}, this._connectionValues = {}, this._pendingBlocks = [], this._executionId = 0, this.onNodeExecutedObservable = new Fe(), this._configuration = e; } /** * Check if a user-defined variable is defined. * @param name * @returns */ hasVariable(e) { return e in this._userVariables; } /** * Set a user-defined variable. * @param name * @param value */ setVariable(e, t) { this._userVariables[e] = t; } /** * Get a user-defined variable. * @param name * @returns */ getVariable(e) { return this._userVariables[e]; } /** * Gets all user variables map */ get userVariables() { return this._userVariables; } _getUniqueIdPrefixedName(e, t) { return `${e.uniqueId}_${t}`; } /** * Set an internal execution variable * @internal * @param name * @param value */ _setExecutionVariable(e, t, i) { this._executionVariables[this._getUniqueIdPrefixedName(e, t)] = i; } /** * Get an internal execution variable * @internal * @param name * @returns */ _getExecutionVariable(e, t, i) { return this._hasExecutionVariable(e, t) ? this._executionVariables[this._getUniqueIdPrefixedName(e, t)] : i; } /** * Delete an internal execution variable * @internal * @param block * @param name */ _deleteExecutionVariable(e, t) { delete this._executionVariables[this._getUniqueIdPrefixedName(e, t)]; } /** * Check if an internal execution variable is defined * @internal * @param block * @param name * @returns */ _hasExecutionVariable(e, t) { return this._getUniqueIdPrefixedName(e, t) in this._executionVariables; } /** * Check if a connection value is defined * @internal * @param connectionPoint * @returns */ _hasConnectionValue(e) { return e.uniqueId in this._connectionValues; } /** * Set a connection value * @internal * @param connectionPoint * @param value */ _setConnectionValue(e, t) { this._connectionValues[e.uniqueId] = t; } /** * Get a connection value * @internal * @param connectionPoint * @returns */ _getConnectionValue(e) { return this._connectionValues[e.uniqueId]; } /** * Get the configuration * @internal * @param name * @param value */ get configuration() { return this._configuration; } /** * Add a block to the list of blocks that have pending tasks. * @internal * @param block */ _addPendingBlock(e) { this._pendingBlocks.push(e); } /** * Remove a block from the list of blocks that have pending tasks. * @internal * @param block */ _removePendingBlock(e) { const t = this._pendingBlocks.indexOf(e); t !== -1 && this._pendingBlocks.splice(t, 1); } /** * Clear all pending blocks. * @internal */ _clearPendingBlocks() { for (const e of this._pendingBlocks) e._cancelPendingTasks(this); this._pendingBlocks.length = 0; } /** * @internal * Function that notifies the node executed observable * @param node */ _notifyExecuteNode(e) { this.onNodeExecutedObservable.notifyObservers(e); } /** * @internal */ _increaseExecutionId() { this._executionId++; } /** * A monotonically increasing ID for each execution. * Incremented for every block executed. */ get executionId() { return this._executionId; } /** * Serializes a context * @param serializationObject the object to write the values in * @param valueSerializationFunction a function to serialize complex values */ serialize(e = {}, t = _j) { e.uniqueId = this.uniqueId, e._userVariables = {}; for (const i in this._userVariables) t(i, this._userVariables[i], e._userVariables); e._connectionValues = {}; for (const i in this._connectionValues) t(i, this._connectionValues[i], e._connectionValues); } getClassName() { return "FGContext"; } /** * Parses a context * @param serializationObject the object containing the context serialization values * @param graph the graph to which the context should belong * @param valueParseFunction a function to parse complex values * @returns */ static Parse(e, t, i = _ae) { const r = t.createContext(); r.uniqueId = e.uniqueId; for (const s in e._userVariables) { const n = i(s, e._userVariables, r._configuration.scene); r._userVariables[s] = n; } for (const s in e._connectionValues) { const n = i(s, e._connectionValues, r._configuration.scene); r._connectionValues[s] = n; } return r; } } F([ W() ], RB.prototype, "uniqueId", void 0); function gj(c, e) { return !!(c.parent && (c.parent === e || gj(c.parent, e))); } class TP extends Mw { constructor(e) { e.path.hasTemplateStrings && Ve.Warn("Template strings are not supported in the path of mesh pick event blocks."), super(e), this.config = e; } _getReferencedMesh(e) { return this.config.path.getProperty(e); } /** * @internal */ _preparePendingTasks(e) { let t = e._getExecutionVariable(this, "meshPickObserver"); if (!t) { const i = this.config.path.getProperty(e); if (!i || !(i instanceof xr)) throw new Error("Mesh pick event block requires a valid mesh"); e._setExecutionVariable(this, "mesh", i), t = i.getScene().onPointerObservable.add((s) => { var n, a, l; s.type === si.POINTERPICK && (!((n = s.pickInfo) === null || n === void 0) && n.pickedMesh) && (((a = s.pickInfo) === null || a === void 0 ? void 0 : a.pickedMesh) === i || gj((l = s.pickInfo) === null || l === void 0 ? void 0 : l.pickedMesh, i)) && this._execute(e); }); const r = i.onDisposeObservable.add(() => this._onDispose); e._setExecutionVariable(this, "meshPickObserver", t), e._setExecutionVariable(this, "meshDisposeObserver", r); } } _onDispose(e) { this._cancelPendingTasks(e), e._removePendingBlock(this); } /** * @internal */ _cancelPendingTasks(e) { const t = e._getExecutionVariable(this, "mesh"), i = e._getExecutionVariable(this, "meshPickObserver"), r = e._getExecutionVariable(this, "meshDisposeObserver"); t.getScene().onPointerObservable.remove(i), t.onDisposeObservable.remove(r), e._deleteExecutionVariable(this, "mesh"), e._deleteExecutionVariable(this, "meshPickObserver"), e._deleteExecutionVariable(this, "meshDisposeObserver"); } getClassName() { return TP.ClassName; } serialize(e) { super.serialize(e), e.config.path = this.config.path.serialize(); } } TP.ClassName = "FGMeshPickEventBlock"; Be(TP.ClassName, TP); var q8; (function(c) { c[c.Stopped = 0] = "Stopped", c[c.Started = 1] = "Started"; })(q8 || (q8 = {})); class SP { /** * Construct a Flow Graph * @param params construction parameters. currently only the scene */ constructor(e) { this._eventBlocks = [], this._executionContexts = [], this.state = q8.Stopped, this._scene = e.scene, this._coordinator = e.coordinator, this._sceneDisposeObserver = this._scene.onDisposeObservable.add(() => this.dispose()); } /** * Create a context. A context represents one self contained execution for the graph, with its own variables. * @returns the context, where you can get and set variables */ createContext() { const e = new RB({ scene: this._scene, coordinator: this._coordinator }); return this._executionContexts.push(e), e; } getContext(e) { return this._executionContexts[e]; } /** * Add an event block. When the graph is started, it will start listening to events * from the block and execute the graph when they are triggered. * @param block */ addEventBlock(e) { this._eventBlocks.push(e); } /** * Starts the flow graph. Initializes the event blocks and starts listening to events. */ start() { if (this.state !== q8.Started) { this.state = q8.Started, this._executionContexts.length === 0 && this.createContext(); for (const e of this._executionContexts) { const t = this._getContextualOrder(e); for (const i of t) i._startPendingTasks(e); } } } _getContextualOrder(e) { const t = []; for (const i of this._eventBlocks) if (i.getClassName() === TP.ClassName) { const r = i._getReferencedMesh(e); let s = 0; for (; s < t.length; s++) { const a = t[s]._getReferencedMesh(e); if (r && a && gj(r, a)) break; } t.splice(s, 0, i); } else t.push(i); return t; } /** * Disposes of the flow graph. Cancels any pending tasks and removes all event listeners. */ dispose() { if (this.state !== q8.Stopped) { this.state = q8.Stopped; for (const e of this._executionContexts) e._clearPendingBlocks(); this._executionContexts.length = 0, this._eventBlocks.length = 0, this._scene.onDisposeObservable.remove(this._sceneDisposeObserver), this._sceneDisposeObserver = null; } } /** * Executes a function in all blocks of a flow graph, starting with the event blocks. * @param visitor the function to execute. */ visitAllBlocks(e) { const t = [], i = /* @__PURE__ */ new Set(); for (const r of this._eventBlocks) t.push(r), i.add(r.uniqueId); for (; t.length > 0; ) { const r = t.pop(); e(r); for (const s of r.dataInputs) for (const n of s._connectedPoint) i.has(n._ownerBlock.uniqueId) || (t.push(n._ownerBlock), i.add(n._ownerBlock.uniqueId)); if (r instanceof n6) for (const s of r.signalOutputs) for (const n of s._connectedPoint) i.has(n._ownerBlock.uniqueId) || (t.push(n._ownerBlock), i.add(n._ownerBlock.uniqueId)); } } /** * Serializes a graph * @param serializationObject the object to write the values in * @param valueSerializeFunction a function to serialize complex values */ serialize(e = {}, t) { e.allBlocks = [], this.visitAllBlocks((i) => { const r = {}; i.serialize(r), e.allBlocks.push(r); }), e.executionContexts = []; for (const i of this._executionContexts) { const r = {}; i.serialize(r, t), e.executionContexts.push(r); } } /** * Given a list of blocks, find an output data connection that has a specific unique id * @param blocks * @param uniqueId * @returns */ static GetDataOutConnectionByUniqueId(e, t) { for (const i of e) for (const r of i.dataOutputs) if (r.uniqueId === t) return r; throw new Error("Could not find data out connection with unique id " + t); } /** * Given a list of blocks, find an input signal connection that has a specific unique id * @param blocks * @param uniqueId * @returns */ static GetSignalInConnectionByUniqueId(e, t) { for (const i of e) if (i instanceof n6) { for (const r of i.signalInputs) if (r.uniqueId === t) return r; } throw new Error("Could not find signal in connection with unique id " + t); } /** * Parses a graph from a given serialization object * @param serializationObject the object where the values are written * @param coordinator the flow graph coordinator * @param valueParseFunction a function to parse complex values in a scene * @returns */ static Parse(e, t, i) { const r = t.createGraph(), s = []; for (const n of e.allBlocks) { const a = MT.Parse(n, t.config.scene, i); s.push(a), a instanceof Mw && r.addEventBlock(a); } for (const n of s) { for (const a of n.dataInputs) for (const l of a.connectedPointIds) { const o = SP.GetDataOutConnectionByUniqueId(s, l); a.connectTo(o); } if (n instanceof n6) for (const a of n.signalOutputs) for (const l of a.connectedPointIds) { const o = SP.GetSignalInConnectionByUniqueId(s, l); a.connectTo(o); } } for (const n of e.executionContexts) RB.Parse(n, r, i); return r; } } class eAe { } class oP { constructor(e) { var t; this.config = e, this._flowGraphs = [], this._customEventsMap = /* @__PURE__ */ new Map(), this.config.scene.onDisposeObservable.add(() => { this.dispose(); }), ((t = oP.SceneCoordinators.get(this.config.scene)) !== null && t !== void 0 ? t : []).push(this); } /** * Creates a new flow graph and adds it to the list of existing flow graphs * @returns a new flow graph */ createGraph() { const e = new SP({ scene: this.config.scene, coordinator: this }); return this._flowGraphs.push(e), e; } /** * Removes a flow graph from the list of existing flow graphs and disposes it * @param graph the graph to remove */ removeGraph(e) { const t = this._flowGraphs.indexOf(e); t !== -1 && (e.dispose(), this._flowGraphs.splice(t, 1)); } /** * Starts all graphs */ start() { this._flowGraphs.forEach((e) => e.start()); } /** * Disposes all graphs */ dispose() { var e; this._flowGraphs.forEach((r) => r.dispose()), this._flowGraphs.length = 0; const t = (e = oP.SceneCoordinators.get(this.config.scene)) !== null && e !== void 0 ? e : [], i = t.indexOf(this); i !== -1 && t.splice(i, 1); } serialize(e, t) { e._flowGraphs = [], this._flowGraphs.forEach((i) => { const r = {}; i.serialize(r, t), e._flowGraphs.push(r); }); } static Parse(e, t, i) { var r; const s = new oP({ scene: t }); return (r = e._flowGraphs) === null || r === void 0 || r.forEach((n) => { SP.Parse(n, s, i); }), s; } /** * Gets the list of flow graphs */ get flowGraphs() { return this._flowGraphs; } /* Get an observable that will be notified when the event with the given id is fired. * @param id the id of the event * @returns the observable for the event */ getCustomEventObservable(e) { let t = this._customEventsMap.get(e); return t || (t = new Fe(), this._customEventsMap.set(e, t)), t; } /** * Notifies the observable for the given event id with the given data. * @param id the id of the event * @param data the data to send with the event */ notifyCustomEvent(e, t) { const i = this._customEventsMap.get(e); i && i.notifyObservers(t); } } oP.SceneCoordinators = /* @__PURE__ */ new Map(); class tAe { constructor(e) { this._context = e, this._context.onNodeExecutedObservable.add((t) => { Ve.Log(`Node executed: ${t.getClassName()}`); }); } } class Tg extends n6 { constructor(e) { super(e), this.out = this._registerSignalOutput("out"); } } class MP extends Tg { constructor(e) { super(e), this.message = this.registerDataInput("message", Ci); } /** * @internal */ _execute(e) { const t = this.message.getValue(e); Ce.Log(t), this.out._activateSignal(e); } getClassName() { return MP.ClassName; } } MP.ClassName = "FGConsoleLogBlock"; Be(MP.ClassName, MP); class RP extends Tg { constructor(e) { super(e), this.config = e, this.input = this.registerDataInput(e.variableName, Ci); } _execute(e) { const t = this.config.variableName, i = this.input.getValue(e); e.setVariable(t, i), this.out._activateSignal(e); } getClassName() { return RP.ClassName; } } RP.ClassName = "FGSetVariableBlock"; Be(RP.ClassName, RP); class PB { constructor(e, t) { this.templateStringInputs = [], this.path = e, this.ownerBlock = t; for (const i of e.getTemplateStrings()) this.templateStringInputs.push(this.ownerBlock.registerDataInput(i, Rs)); } /** * Get the inputs of all of the numeric data inputs and use them to fill in the * template strings in the path. * @param context the context to use to get the values of the numeric data inputs * @returns the path with the template strings filled in */ substitutePath(e) { for (const t of this.templateStringInputs) { const i = t.getValue(e), r = t.name; this.path.setTemplateSubstitution(r, i); } return this.path; } /** * Substitutes the template strings in the path and gets the property on the target object. * @param context * @returns */ getProperty(e) { return this.substitutePath(e), this.path.getProperty(e); } /** * Substitutes the template strings in the path and sets the property on the target object. * @param context * @param value */ setProperty(e, t) { this.substitutePath(e), this.path.setProperty(e, t); } } class Rw extends Tg { constructor(e) { super(e), this.config = e, this.a = this.registerDataInput("a", Ci), this.templateComponent = new PB(e.path, this); } _execute(e) { const t = this.a.getValue(e); this.templateComponent.setProperty(e, t), this.out._activateSignal(e); } serialize(e = {}) { super.serialize(e), e.config.path = this.config.path.serialize(); } getClassName() { return Rw.ClassName; } } Rw.ClassName = "FGSetPropertyBlock"; Be("FGSetPropertyBlock", Rw); class Pw extends Tg { constructor(e) { super(e), this.config = e; } configure() { super.configure(); for (let e = 0; e < this.config.eventData.length; e++) { const t = this.config.eventData[e]; this.registerDataInput(t, Ci); } } _execute(e) { const t = this.config.eventId, i = this.dataInputs.map((r) => r.getValue(e)); e.configuration.coordinator.notifyCustomEvent(t, i), this.out._activateSignal(e); } getClassName() { return Pw.ClassName; } } Pw.ClassName = "FGSendCustomEventBlock"; Be("FGSendCustomEventBlock", Pw); class mae extends n6 { constructor(e) { super(e), this.condition = this.registerDataInput("condition", Th), this.onTrue = this._registerSignalOutput("onTrue"), this.onFalse = this._registerSignalOutput("onFalse"); } _execute(e) { this.condition.getValue(e) ? this.onTrue._activateSignal(e) : this.onFalse._activateSignal(e); } getClassName() { return "FGBranchBlock"; } } Be("FGBranchBlock", mae); class PP extends Tg { constructor(e = { startIndex: 0 }) { super(e), this.config = e, this.reset = this._registerSignalInput("reset"), this.n = this.registerDataInput("n", Rs), this.value = this.registerDataOutput("value", Rs); } _execute(e, t) { if (t === this.reset) this.value.setValue(this.config.startIndex, e); else { const i = this.value.getValue(e); i < this.n.getValue(e) && (this.value.setValue(i + 1, e), this.out._activateSignal(e)); } } getClassName() { return PP.ClassName; } } PP.ClassName = "FGDoNBlock"; Be(PP.ClassName, PP); class gae extends Tg { constructor(e) { super(e), this.startIndex = this.registerDataInput("startIndex", Rs), this.endIndex = this.registerDataInput("endIndex", Rs), this.step = this.registerDataInput("step", Rs), this.index = this.registerDataOutput("index", Rs), this.onLoop = this._registerSignalOutput("onLoop"); } _executeLoop(e) { let t = e._getExecutionVariable(this, "index"); const i = e._getExecutionVariable(this, "endIndex"); if (t < i) { this.index.setValue(t, e), this.onLoop._activateSignal(e); const r = e._getExecutionVariable(this, "step", 1); t += r, e._setExecutionVariable(this, "index", t), this._executeLoop(e); } else this.out._activateSignal(e); } /** * @internal */ _execute(e) { const t = this.startIndex.getValue(e), i = this.endIndex.getValue(e), r = this.step.getValue(e); e._setExecutionVariable(this, "index", t), e._setExecutionVariable(this, "endIndex", i), e._setExecutionVariable(this, "step", r), this._executeLoop(e); } getClassName() { return "FGForLoopBlock"; } } Be("FGForLoopBlock", gae); class vae extends Tg { constructor(e) { super(e), this.reset = this._registerSignalInput("reset"), this.duration = this.registerDataInput("duration", Rs), this.timeRemaining = this.registerDataOutput("timeRemaining", Rs); } _execute(e, t) { const i = e._getExecutionVariable(this, "lastExecutedTime"), r = this.duration.getValue(e), s = Date.now(); if (t === this.reset || i === void 0 || s - i > r) this.timeRemaining.setValue(0, e), this.out._activateSignal(e), e._setExecutionVariable(this, "lastExecutedTime", s); else { const n = r - (s - i); this.timeRemaining.setValue(n, e); } } getClassName() { return "FGThrottleBlock"; } } Be("FGThrottleBlock", vae); class Iw extends mj { constructor(e) { super(e), this.timeout = this.registerDataInput("timeout", Rs); } _preparePendingTasks(e) { const t = this.timeout.getValue(e); if (t !== void 0 && t >= 0) { const i = e._getExecutionVariable(this, "runningTimers") || [], r = e.configuration.scene, s = new lre({ timeout: t, contextObservable: r.onBeforeRenderObservable, onEnded: () => this._onEnded(s, e) }); s.start(), i.push(s), e._setExecutionVariable(this, "runningTimers", i); } } /** * @internal */ _execute(e) { this._startPendingTasks(e), this.out._activateSignal(e); } _onEnded(e, t) { const i = t._getExecutionVariable(this, "runningTimers") || [], r = i.indexOf(e); r !== -1 ? i.splice(r, 1) : Ve.Warn("FlowGraphTimerBlock: Timer ended but was not found in the running timers list"), t._removePendingBlock(this), this.done._activateSignal(t); } _cancelPendingTasks(e) { const t = e._getExecutionVariable(this, "runningTimers") || []; for (const i of t) i.dispose(); e._deleteExecutionVariable(this, "runningTimers"); } getClassName() { return Iw.ClassName; } } Iw.ClassName = "FGTimerBlock"; Be("FGTimerBlock", Iw); class Aae extends n6 { constructor(e) { super(e), this.config = e, this._cachedUnusedIndexes = [], this.reset = this._registerSignalInput("reset"), this.currentIndex = this.registerDataOutput("currentIndex", Rs); } configure() { super.configure(), this.config.startIndex = this.config.startIndex !== void 0 ? this.config.startIndex : 0, this.config.startIndex = Math.max(0, Math.min(this.config.startIndex, this.config.numberOutputFlows - 1)), this.outFlows = []; for (let e = 0; e < this.config.numberOutputFlows; e++) this.outFlows.push(this._registerSignalOutput(`out${e}`)); } _getUnusedIndexes(e) { const t = this._cachedUnusedIndexes; if (t.length = 0, e._hasExecutionVariable(this, "unusedIndexes")) { const i = e._getExecutionVariable(this, "unusedIndexes"); for (let r = 0; r < i.length; r++) t.push(i[r]); } else for (let i = 0; i < this.config.numberOutputFlows; i++) t.push(i); return t; } _getNextOutput(e, t) { if (this.config.isRandom) { const i = Math.floor(Math.random() * t.length); return t[i]; } else return e + 1; } _execute(e, t) { var i; const r = (i = e._getExecutionVariable(this, "currentIndex")) !== null && i !== void 0 ? i : this.config.startIndex - 1; let s = this._getUnusedIndexes(e); if (t === this.reset) { e._deleteExecutionVariable(this, "currentIndex"), e._deleteExecutionVariable(this, "unusedIndexes"); return; } let n = this._getNextOutput(r, s); if (n >= this.config.numberOutputFlows && this.config.loop) n = 0; else if (n >= this.config.numberOutputFlows && !this.config.loop) return; if (s = s.filter((a) => a !== n), s.length === 0) for (let a = 0; a < this.config.numberOutputFlows; a++) s.push(a); e._setExecutionVariable(this, "unusedIndexes", s), e._setExecutionVariable(this, "currentIndex", n), this.currentIndex.setValue(n, e), this.outFlows[n]._activateSignal(e); } getClassName() { return "FGMultiGateBlock"; } serialize(e) { super.serialize(e), e.config.numberOutputFlows = this.config.numberOutputFlows, e.config.isRandom = this.config.isRandom, e.config.loop = this.config.loop, e.config.startIndex = this.config.startIndex; } } Be("FGMultiGateBlock", Aae); class yae extends n6 { constructor(e) { super(e), this.config = e, this.selection = this.registerDataInput("selection", Ci); } configure() { super.configure(), this.outputFlows = []; for (let e = 0; e <= this.config.cases.length; e++) this.outputFlows.push(this._registerSignalOutput(`out${e}`)); } _execute(e, t) { const i = this.selection.getValue(e); for (let r = 0; r < this.config.cases.length; r++) if (i === this.config.cases[r]) { this.outputFlows[r]._activateSignal(e); return; } this.outputFlows[this.outputFlows.length - 1]._activateSignal(e); } getClassName() { return "FGSwitchBlock"; } serialize(e) { super.serialize(e), e.cases = this.config.cases; } } Be("FGSwitchBlock", yae); class Cae extends Tg { constructor(e) { super(e), this.config = e, this.inFlows = [], this._cachedActivationState = [], this.reset = this._registerSignalInput("reset"); } configure() { for (let e = 1; e < this.config.numberInputFlows; e++) this.inFlows.push(this._registerSignalInput(`in${e}`)); } _getCurrentActivationState(e) { const t = this._cachedActivationState; if (t.length = 0, e._hasExecutionVariable(this, "activationState")) { const i = e._getExecutionVariable(this, "activationState"); for (let r = 0; r < i.length; r++) t.push(i[r]); } else for (let i = 0; i < this.config.numberInputFlows; i++) t.push(!1); return t; } _execute(e, t) { const i = this._getCurrentActivationState(e); if (t === this.reset) for (let r = 0; r < this.config.numberInputFlows; r++) i[r] = !1; else if (t === this.in) i[0] = !0; else { const r = this.inFlows.indexOf(t); r >= 0 && (i[r + 1] = !0); } if (e._setExecutionVariable(this, "activationState", i.slice()), i.every((r) => r)) { this.out._activateSignal(e); for (let r = 0; r < this.config.numberInputFlows; r++) i[r] = !1; } } getClassName() { return "FGWaitAllBlock"; } serialize(e) { super.serialize(e), e.config.numberInputFlows = this.config.numberInputFlows; } } Be("FGWaitAllBlock", Cae); class xae extends Tg { constructor(e) { super(e), this.count = this.registerDataOutput("count", Rs), this.reset = this._registerSignalInput("reset"); } _execute(e, t) { var i; if (t === this.reset) { e._setExecutionVariable(this, "count", 0), this.count.setValue(0, e); return; } const r = ((i = e._getExecutionVariable(this, "count")) !== null && i !== void 0 ? i : 0) + 1; e._setExecutionVariable(this, "count", r), this.count.setValue(r, e), this.out._activateSignal(e); } getClassName() { return "FGCounterBlock"; } } Be("FGCounterBlock", xae); class IP extends Tg { constructor(e) { super(e), this.config = e, this.condition = this.registerDataInput("condition", Th), this.loopBody = this._registerSignalOutput("loopBody"); } _execute(e, t) { var i; let r = this.condition.getValue(e); for (!((i = this.config) === null || i === void 0) && i.isDo && !r && this.loopBody._activateSignal(e); r; ) this.loopBody._activateSignal(e), r = this.condition.getValue(e); this.out._activateSignal(e); } getClassName() { return IP.ClassName; } serialize(e) { var t; super.serialize(e), e.isDo = (t = this.config) === null || t === void 0 ? void 0 : t.isDo; } } IP.ClassName = "FGWhileLoopBlock"; Be(IP.ClassName, IP); class bae extends Tg { constructor(e) { super(e), this.count = this.registerDataInput("count", Rs), this.reset = this._registerSignalInput("reset"), this.currentCount = this.registerDataOutput("currentCount", Rs); } _execute(e, t) { if (t === this.reset) { e._setExecutionVariable(this, "debounceCount", 0); return; } const i = this.count.getValue(e), s = e._getExecutionVariable(this, "debounceCount", 0) + 1; this.currentCount.setValue(s, e), e._setExecutionVariable(this, "debounceCount", s), s >= i && (this.out._activateSignal(e), e._setExecutionVariable(this, "debounceCount", 0)); } getClassName() { return "FGDebounceBlock"; } } Be("FGDebounceBlock", bae); class Eae extends n6 { constructor(e) { super(e), this.onOn = this._registerSignalOutput("onOn"), this.onOff = this._registerSignalOutput("onOff"), this.isOn = this.registerDataOutput("isOn", Th); } _execute(e, t) { let i = e._getExecutionVariable(this, "value", !1); i = !i, e._setExecutionVariable(this, "value", i), this.isOn.setValue(i, e), i ? this.onOn._activateSignal(e) : this.onOff._activateSignal(e); } getClassName() { return "FGFlipFlopBlock"; } } Be("FGFlipFlopBlock", Eae); class DP extends n6 { constructor(e) { super(e), this.config = e; } configure() { super.configure(), this.outFlows = []; for (let e = 0; e < this.config.numberOutputFlows; e++) this.outFlows.push(this._registerSignalOutput(`${e}`)); } _execute(e) { for (let t = 0; t < this.config.numberOutputFlows; t++) this.outFlows[t]._activateSignal(e); } getClassName() { return DP.ClassName; } } DP.ClassName = "FGSequenceBlock"; Be(DP.ClassName, DP); class Tae extends mj { constructor(e) { super(e), this.config = e, this.templateTargetComponent = new PB(e.targetPath, this), this.templateAnimationComponent = new PB(e.animationPath, this), this.speed = this.registerDataInput("speed", Rs), this.loop = this.registerDataInput("loop", Th), this.from = this.registerDataInput("from", Rs), this.to = this.registerDataInput("to", Rs), this.runningAnimatable = this.registerDataOutput("runningAnimatable", Ci); } /** * @internal * @param context */ _preparePendingTasks(e) { var t; const i = this.templateTargetComponent.getProperty(e), r = this.templateAnimationComponent.getProperty(e); if (!i || !r) throw new Error("Cannot play animation without target or animation"); const s = (t = e._getExecutionVariable(this, "runningAnimatables")) !== null && t !== void 0 ? t : [], n = this.runningAnimatable.getValue(e); if (n && n.paused) n.restart(); else { const l = e.configuration.scene.beginDirectAnimation(i, [r], this.from.getValue(e), this.to.getValue(e), this.loop.getValue(e), this.speed.getValue(e), () => this._onAnimationEnd(l, e)); this.runningAnimatable.setValue(l, e), s.push(l); } e._setExecutionVariable(this, "runningAnimatables", s); } _execute(e) { this._startPendingTasks(e), this.out._activateSignal(e); } _onAnimationEnd(e, t) { var i; const r = (i = t._getExecutionVariable(this, "runningAnimatables")) !== null && i !== void 0 ? i : [], s = r.indexOf(e); s !== -1 && r.splice(s, 1), t._removePendingBlock(this), this.done._activateSignal(t); } /** * @internal * Stop any currently running animations. */ _cancelPendingTasks(e) { var t; const i = (t = e._getExecutionVariable(this, "runningAnimatables")) !== null && t !== void 0 ? t : []; for (const r of i) r.stop(); e._deleteExecutionVariable(this, "runningAnimatables"); } getClassName() { return "FGPlayAnimationBlock"; } serialize(e = {}) { super.serialize(e), e.config.targetPath = this.config.targetPath.serialize(), e.config.animationPath = this.config.animationPath.serialize(); } } Be("FGPlayAnimationBlock", Tae); class Sae extends Tg { constructor(e) { super(e), this.animationToStop = this.registerDataInput("animationToStop", Ci); } _execute(e) { this.animationToStop.getValue(e).stop(), this.out._activateSignal(e); } getClassName() { return "FGStopAnimationBlock"; } } Be("FGStopAnimationBlock", Sae); class Mae extends Tg { constructor(e) { super(e), this.animationToPause = this.registerDataInput("animationToPause", Ci); } _execute(e) { this.animationToPause.getValue(e).pause(), this.out._activateSignal(e); } getClassName() { return "FGPauseAnimationBlock"; } } Be("FGPauseAnimationBlock", Mae); class Rae extends MT { constructor(e) { super(e), this.condition = this.registerDataInput("condition", Th), this.trueValue = this.registerDataInput("trueValue", Ci), this.falseValue = this.registerDataInput("falseValue", Ci), this.output = this.registerDataOutput("output", Ci); } /** * @internal */ _updateOutputs(e) { this.output.setValue(this.condition.getValue(e) ? this.trueValue.getValue(e) : this.falseValue.getValue(e), e); } getClassName() { return "FGConditionalDataBlock"; } } Be("FGConditionalDataBlock", Rae); class OP extends MT { /** * Construct a FlowGraphGetVariableBlock. * @param params optional construction parameters */ constructor(e) { super(e), this.config = e, this.output = this.registerDataOutput(e.variableName, Ci); } /** * @internal */ _updateOutputs(e) { const t = this.config.variableName; e.hasVariable(t) && this.output.setValue(e.getVariable(t), e); } getClassName() { return OP.ClassName; } serialize(e) { super.serialize(e), e.config.variableName = this.config.variableName; } } OP.ClassName = "FGGetVariableBlock"; Be(OP.ClassName, OP); class Pae extends MT { /** * Creates a new FlowGraphCoordinateTransformBlock */ constructor(e) { super(e), this.sourceSystem = this.registerDataInput("sourceSystem", Ci), this.destinationSystem = this.registerDataInput("destinationSystem", Ci), this.inputCoordinates = this.registerDataInput("inputCoordinates", ex), this.outputCoordinates = this.registerDataOutput("outputCoordinates", ex); } _updateOutputs(e) { const t = this.sourceSystem.getValue(e), i = this.destinationSystem.getValue(e), r = this.inputCoordinates.getValue(e), s = t.getWorldMatrix(), n = i.getWorldMatrix(), a = de.Matrix[0].copyFrom(n); a.invert(); const l = de.Matrix[1]; a.multiplyToRef(s, l); const o = this.outputCoordinates.getValue(e); D.TransformCoordinatesToRef(r, l, o); } getClassName() { return "FGCoordinateTransformBlock"; } } Be("FGCoordinateTransformBlock", Pae); class Iae extends MT { constructor(e) { super(e), this.config = e, this.output = this.registerDataOutput("output", dae(e.value)); } _updateOutputs(e) { this.output.setValue(this.config.value, e); } getClassName() { return "FGConstantBlock"; } serialize(e = {}, t = _j) { super.serialize(e), t("value", this.config.value, e.config); } } Be("FGConstantBlock", Iae); class wP extends MT { constructor(e) { super(e), this.config = e, this.value = this.registerDataOutput("value", Ci), this.templateComponent = new PB(e.path, this); } _updateOutputs(e) { const t = this.templateComponent.getProperty(e); this.value.setValue(t, e); } getClassName() { return wP.ClassName; } } wP.ClassName = "FGGetPropertyBlock"; Be(wP.ClassName, wP); const Aq = "cachedOperationValue", yq = "cachedExecutionId"; class JU extends MT { constructor(e, t) { super(t), this.value = this.registerDataOutput("value", e); } _updateOutputs(e) { const t = e._getExecutionVariable(this, yq), i = e._getExecutionVariable(this, Aq); if (i !== void 0 && t === e.executionId) this.value.setValue(i, e); else { const r = this._doOperation(e); e._setExecutionVariable(this, Aq, r), e._setExecutionVariable(this, yq, e.executionId), this.value.setValue(r, e); } } } class vu extends JU { constructor(e, t, i, r, s, n) { super(i, n), this._operation = r, this._className = s, this.a = this.registerDataInput("a", e), this.b = this.registerDataInput("b", t); } _doOperation(e) { return this._operation(this.a.getValue(e), this.b.getValue(e)); } getClassName() { return this._className; } } class Ra extends JU { constructor(e, t, i, r, s) { super(t, s), this._operation = i, this._className = r, this.input = this.registerDataInput("input", e); } _doOperation(e) { return this._operation(this.input.getValue(e)); } getClassName() { return this._className; } } const B1 = "FGBitwise", Dae = "AndBlock", Oae = "OrBlock", wae = "XorBlock", Lae = "NotBlock", Nae = "LeftShiftBlock", Fae = "RightShiftBlock", Bae = "CountLeadingZerosBlock", Uae = "CountTrailingZerosBlock"; class Vae extends vu { constructor(e) { super(Rs, Rs, Rs, (t, i) => t & i, `${B1}${Dae}`, e); } } Be(`${B1}${Dae}`, Vae); class kae extends vu { constructor(e) { super(Rs, Rs, Rs, (t, i) => t | i, `${B1}${Oae}`, e); } } Be(`${B1}${Oae}`, kae); class zae extends vu { constructor(e) { super(Rs, Rs, Rs, (t, i) => t ^ i, `${B1}${wae}`, e); } } Be(`${B1}${wae}`, zae); class Hae extends Ra { constructor(e) { super(Rs, Rs, (t) => ~t, `${B1}${Lae}`, e); } } Be(`${B1}${Lae}`, Hae); class Gae extends vu { constructor(e) { super(Rs, Rs, Rs, (t, i) => t << i, `${B1}${Nae}`, e); } } Be(`${B1}${Nae}`, Gae); class Kae extends vu { constructor(e) { super(Rs, Rs, Rs, (t, i) => t >> i, `${B1}${Fae}`, e); } } Be(`${B1}${Fae}`, Kae); class Wae extends Ra { constructor(e) { super(Rs, Rs, (t) => Math.clz32(t), `${B1}${Bae}`, e); } } Be(`${B1}${Bae}`, Wae); class jae extends Ra { // from: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/clz32#implementing_count_leading_ones_and_beyond _ctrz(e) { return e >>>= 0, e === 0 ? 32 : (e &= -e, 31 - Math.clz32(e)); } constructor(e) { super(Rs, Rs, (t) => this._ctrz(t), `${B1}${Uae}`, e); } } Be(`${B1}${Uae}`, jae); const Dw = "FGLogic", Xae = "AndBlock", Yae = "OrBlock", Qae = "NotBlock"; class $ae extends vu { constructor(e) { super(Th, Th, Th, (t, i) => t && i, `${Dw}${Xae}`, e); } } Be(`${Dw}${Xae}`, $ae); class Zae extends vu { constructor(e) { super(Th, Th, Th, (t, i) => t || i, `${Dw}${Yae}`, e); } } Be(`${Dw}${Yae}`, Zae); class qae extends Ra { constructor(e) { super(Th, Th, (t) => !t, `${Dw}${Qae}`, e); } } Be(`${Dw}${Qae}`, qae); class MN extends JU { constructor(e, t, i, r) { super(e, r), this._operation = t, this._className = i; } _doOperation(e) { return this._operation(); } getClassName() { return this._className; } } class vj extends JU { constructor(e, t, i, r, s, n, a) { super(r, a), this._operation = s, this._className = n, this.a = this.registerDataInput("a", e), this.b = this.registerDataInput("b", t), this.c = this.registerDataInput("c", i); } _doOperation(e) { return this._operation(this.a.getValue(e), this.b.getValue(e), this.c.getValue(e)); } getClassName() { return this._className; } } function Sh(c) { return c.getClassName ? c.getClassName() : ""; } function RN(c, e) { return c === "Vector2" && e === "Vector2" || c === "Vector3" && e === "Vector3" || c === "Vector4" && e === "Vector4"; } class v5 extends vu { constructor(e) { super(Ci, Ci, Ci, (t, i) => this._polymorphicAdd(t, i), v5.ClassName, e); } _polymorphicAdd(e, t) { const i = Sh(e), r = Sh(t); return RN(i, r) ? e.add(t) : e + t; } getClassName() { return v5.ClassName; } } v5.ClassName = "FGAddBlock"; Be(v5.ClassName, v5); class A5 extends vu { constructor(e) { super(Ci, Ci, Ci, (t, i) => this._polymorphicAdd(t, i), A5.ClassName, e); } _polymorphicAdd(e, t) { const i = Sh(e), r = Sh(t); return RN(i, r) ? e.subtract(t) : e - t; } getClassName() { return A5.ClassName; } } A5.ClassName = "FGSubBlock"; Be(A5.ClassName, A5); class LP extends vu { constructor(e) { super(Ci, Ci, Ci, (t, i) => this._polymorphicMultiply(t, i), LP.ClassName, e); } _polymorphicMultiply(e, t) { const i = Sh(e), r = Sh(t); return RN(i, r) ? e.multiply(t) : e * t; } } LP.ClassName = "FGMultiplyBlock"; Be(LP.ClassName, LP); class NP extends vu { constructor(e) { super(Ci, Ci, Ci, (t, i) => this._polymorphicDivide(t, i), NP.ClassName, e); } _polymorphicDivide(e, t) { const i = Sh(e), r = Sh(t); return RN(i, r) ? e.divide(t) : e / t; } } NP.ClassName = "FGDivideBlock"; Be(NP.ClassName, NP); class FP extends MN { constructor(e) { super(Rs, () => Math.random(), FP.ClassName, e); } } FP.ClassName = "FGRandomBlock"; Be(FP.ClassName, FP); class BP extends vu { constructor(e) { super(Ci, Ci, Rs, (t, i) => this._polymorphicDot(t, i), BP.ClassName, e); } _polymorphicDot(e, t) { switch (Sh(e)) { case "Vector2": return at.Dot(e, t); case "Vector3": return D.Dot(e, t); case "Vector4": return Di.Dot(e, t); default: throw new Error(`Cannot get dot product of ${e} and ${t}`); } } } BP.ClassName = "FGDotBlock"; Be(BP.ClassName, BP); class UP extends MN { constructor(e) { super(Rs, () => Math.E, UP.ClassName, e); } } UP.ClassName = "FGEBlock"; Be(UP.ClassName, UP); class VP extends MN { constructor(e) { super(Rs, () => Math.PI, VP.ClassName, e); } } VP.ClassName = "FGPIBlock"; Be(VP.ClassName, VP); class kP extends MN { constructor(e) { super(Rs, () => Number.POSITIVE_INFINITY, kP.ClassName, e); } } kP.ClassName = "FGInfBlock"; Be(kP.ClassName, kP); class zP extends MN { constructor(e) { super(Rs, () => Number.NaN, zP.ClassName, e); } } zP.ClassName = "FGNaNBlock"; Be(zP.ClassName, zP); function Tl(c, e) { switch (Sh(c)) { case "Vector2": return new at(e(c.x), e(c.y)); case "Vector3": return new D(e(c.x), e(c.y), e(c.z)); case "Vector4": return new Di(e(c.x), e(c.y), e(c.z), e(c.w)); default: return e(c); } } class HP extends Ra { constructor(e) { super(Ci, Ci, (t) => this._polymorphicAbs(t), HP.ClassName, e); } _polymorphicAbs(e) { return Tl(e, Math.abs); } } HP.ClassName = "FGAbsBlock"; Be(HP.ClassName, HP); class GP extends Ra { constructor(e) { super(Ci, Ci, (t) => this._polymorphicSign(t), GP.ClassName, e); } _polymorphicSign(e) { return Tl(e, Math.sign); } } GP.ClassName = "FGSignBlock"; Be(GP.ClassName, GP); class KP extends Ra { constructor(e) { super(Ci, Ci, (t) => this._polymorphicTrunc(t), KP.ClassName, e); } _polymorphicTrunc(e) { return Tl(e, Math.trunc); } } KP.ClassName = "FGTruncBlock"; Be(KP.ClassName, KP); class WP extends Ra { constructor(e) { super(Ci, Ci, (t) => this._polymorphicFloor(t), WP.ClassName, e); } _polymorphicFloor(e) { return Tl(e, Math.floor); } } WP.ClassName = "FGFloorBlock"; Be(WP.ClassName, WP); class jP extends Ra { constructor(e) { super(Ci, Ci, (t) => this._polymorphicCeiling(t), jP.ClassName, e); } _polymorphicCeiling(e) { return Tl(e, Math.ceil); } } jP.ClassName = "FGCeilBlock"; Be(jP.ClassName, jP); class XP extends Ra { constructor(e) { super(Ci, Ci, (t) => this._polymorphicFract(t), XP.ClassName, e); } _polymorphicFract(e) { return Tl(e, (t) => t - Math.floor(t)); } } XP.ClassName = "FGFractBlock"; Be(XP.ClassName, XP); class YP extends Ra { constructor(e) { super(Ci, Ci, (t) => this._polymorphicNeg(t), YP.ClassName, e); } _polymorphicNeg(e) { return Tl(e, (t) => -t); } } YP.ClassName = "FGNegBlock"; Be(YP.ClassName, YP); function PN(c, e, t) { switch (Sh(c)) { case "Vector2": return new at(t(c.x, e.x), t(c.y, e.y)); case "Vector3": return new D(t(c.x, e.x), t(c.y, e.y), t(c.z, e.z)); case "Vector4": return new Di(t(c.x, e.x), t(c.y, e.y), t(c.z, e.z), t(c.w, e.w)); default: return t(c, e); } } class QP extends vu { constructor(e) { super(Ci, Ci, Ci, (t, i) => this._polymorphicRemainder(t, i), QP.ClassName, e); } _polymorphicRemainder(e, t) { return PN(e, t, (i, r) => i % r); } } QP.ClassName = "FGRemainderBlock"; Be(QP.ClassName, QP); class $P extends vu { constructor(e) { super(Ci, Ci, Ci, (t, i) => this._polymorphicMin(t, i), $P.ClassName, e); } _polymorphicMin(e, t) { return PN(e, t, Math.min); } } $P.ClassName = "FGMinBlock"; Be($P.ClassName, $P); class ZP extends vu { constructor(e) { super(Ci, Ci, Ci, (t, i) => this._polymorphicMax(t, i), ZP.ClassName, e); } _polymorphicMax(e, t) { return PN(e, t, Math.max); } } ZP.ClassName = "FGMaxBlock"; Be(ZP.ClassName, ZP); function Jae(c, e, t) { return Math.min(Math.max(c, e), t); } function eoe(c, e, t, i) { switch (Sh(c)) { case "Vector2": return new at(i(c.x, e.x, t.x), i(c.y, e.y, t.y)); case "Vector3": return new D(i(c.x, e.x, t.x), i(c.y, e.y, t.y), i(c.z, e.z, t.z)); case "Vector4": return new Di(i(c.x, e.x, t.x), i(c.y, e.y, t.y), i(c.z, e.z, t.z), i(c.w, e.w, t.w)); default: return i(c, e, t); } } class qP extends vj { constructor(e) { super(Ci, Ci, Ci, Ci, (t, i, r) => this._polymorphicClamp(t, i, r), qP.ClassName, e); } _polymorphicClamp(e, t, i) { return eoe(e, t, i, Jae); } } qP.ClassName = "FGClampBlock"; Be(qP.ClassName, qP); class JP extends Ra { constructor(e) { super(Ci, Ci, (t) => this._polymorphicSaturate(t), JP.ClassName, e); } _polymorphicSaturate(e) { return Tl(e, (t) => Jae(t, 0, 1)); } } JP.ClassName = "FGSaturateBlock"; Be(JP.ClassName, JP); class eI extends vj { constructor(e) { super(Ci, Ci, Ci, Ci, (t, i, r) => this._polymorphicInterpolate(t, i, r), eI.ClassName, e); } _interpolate(e, t, i) { return (1 - i) * e + i * t; } _polymorphicInterpolate(e, t, i) { return eoe(e, t, i, this._interpolate); } } eI.ClassName = "FGInterpolateBlock"; Be(eI.ClassName, eI); class tI extends vu { constructor(e) { super(Ci, Ci, Th, (t, i) => this._polymorphicEq(t, i), tI.ClassName, e); } _polymorphicEq(e, t) { const i = Sh(e), r = Sh(t); return RN(i, r) ? e.equals(t) : e === t; } } tI.ClassName = "FGEqBlock"; Be(tI.ClassName, tI); class iI extends vu { constructor(e) { super(Ci, Ci, Th, (t, i) => this._polymorphicLessThan(t, i), iI.ClassName, e); } _polymorphicLessThan(e, t) { switch (Sh(e)) { case "Vector2": return e.x < t.x && e.y < t.y; case "Vector3": return e.x < t.x && e.y < t.y && e.z < t.z; case "Vector4": return e.x < t.x && e.y < t.y && e.z < t.z && e.w < t.w; default: return e < t; } } } iI.ClassName = "FGLessThanBlock"; Be(iI.ClassName, iI); class IN extends vu { constructor(e) { super(Ci, Ci, Th, (t, i) => this._polymorphicLessThanOrEqual(t, i), IN.ClassName, e); } _polymorphicLessThanOrEqual(e, t) { switch (Sh(e)) { case "Vector2": return e.x <= t.x && e.y <= t.y; case "Vector3": return e.x <= t.x && e.y <= t.y && e.z <= t.z; case "Vector4": return e.x <= t.x && e.y <= t.y && e.z <= t.z && e.w <= t.w; default: return e <= t; } } } IN.ClassName = "FGLessThanOrEqualBlock"; class rI extends vu { constructor(e) { super(Ci, Ci, Th, (t, i) => this._polymorphicGreaterThan(t, i), rI.ClassName, e); } _polymorphicGreaterThan(e, t) { switch (Sh(e)) { case "Vector2": return e.x > t.x && e.y > t.y; case "Vector3": return e.x > t.x && e.y > t.y && e.z > t.z; case "Vector4": return e.x > t.x && e.y > t.y && e.z > t.z && e.w > t.w; default: return e > t; } } } rI.ClassName = "FGGreaterThanBlock"; Be(rI.ClassName, rI); class sI extends vu { constructor(e) { super(Ci, Ci, Th, (t, i) => this._polymorphicGreaterThanOrEqual(t, i), sI.ClassName, e); } _polymorphicGreaterThanOrEqual(e, t) { switch (Sh(e)) { case "Vector2": return e.x >= t.x && e.y >= t.y; case "Vector3": return e.x >= t.x && e.y >= t.y && e.z >= t.z; case "Vector4": return e.x >= t.x && e.y >= t.y && e.z >= t.z && e.w >= t.w; default: return e >= t; } } } sI.ClassName = "FGGreaterThanOrEqualBlock"; Be(sI.ClassName, sI); class nI extends Ra { constructor(e) { super(Ci, Th, (t) => this._polymorphicIsNan(t), nI.ClassName, e); } _polymorphicIsNan(e) { switch (Sh(e)) { case "Vector2": return isNaN(e.x) || isNaN(e.y); case "Vector3": return isNaN(e.x) || isNaN(e.y) || isNaN(e.z); case "Vector4": return isNaN(e.x) || isNaN(e.y) || isNaN(e.z) || isNaN(e.w); default: return isNaN(e); } } } nI.ClassName = "FGIsNanBlock"; Be(nI.ClassName, nI); class DN extends Ra { constructor(e) { super(Ci, Th, (t) => this._polymorphicIsInf(t), DN.ClassName, e); } _polymorphicIsInf(e) { switch (Sh(e)) { case "Vector2": return !isFinite(e.x) || !isFinite(e.y); case "Vector3": return !isFinite(e.x) || !isFinite(e.y) || !isFinite(e.z); case "Vector4": return !isFinite(e.x) || !isFinite(e.y) || !isFinite(e.z) || !isFinite(e.w); default: return !isFinite(e); } } } DN.ClassName = "FGIsInfBlock"; class aI extends Ra { constructor(e) { super(Ci, Ci, (t) => this._polymorphicDegToRad(t), aI.ClassName, e); } _degToRad(e) { return e * Math.PI / 180; } _polymorphicDegToRad(e) { return Tl(e, this._degToRad); } } aI.ClassName = "FGDegToRadBlock"; Be(aI.ClassName, aI); class oI extends Ra { constructor(e) { super(Ci, Ci, (t) => this._polymorphicRadToDeg(t), oI.ClassName, e); } _radToDeg(e) { return e * 180 / Math.PI; } _polymorphicRadToDeg(e) { return Tl(e, this._radToDeg); } } oI.ClassName = "FGRadToDegBlock"; Be(oI.ClassName, oI); class lI extends Ra { constructor(e) { super(Ci, Ci, (t) => this._polymorphicSin(t), lI.ClassName, e); } _polymorphicSin(e) { return Tl(e, Math.sin); } } lI.ClassName = "FGSinBlock"; Be(lI.ClassName, lI); class cI extends Ra { constructor(e) { super(Ci, Ci, (t) => this._polymorphicCos(t), cI.ClassName, e); } _polymorphicCos(e) { return Tl(e, Math.cos); } } cI.ClassName = "FGCosBlock"; Be(cI.ClassName, cI); class uI extends Ra { constructor(e) { super(Ci, Ci, (t) => this._polymorphicTan(t), uI.ClassName, e); } _polymorphicTan(e) { return Tl(e, Math.tan); } } uI.ClassName = "FGTanBlock"; Be(uI.ClassName, uI); class hI extends Ra { constructor(e) { super(Ci, Ci, (t) => this._polymorphicAsin(t), hI.ClassName, e); } _polymorphicAsin(e) { return Tl(e, Math.asin); } } hI.ClassName = "FGAsinBlock"; Be(hI.ClassName, hI); class dI extends Ra { constructor(e) { super(Ci, Ci, (t) => this._polymorphicAcos(t), dI.ClassName, e); } _polymorphicAcos(e) { return Tl(e, Math.acos); } } dI.ClassName = "FGAcosBlock"; Be(dI.ClassName, dI); class fI extends Ra { constructor(e) { super(Ci, Ci, (t) => this._polymorphicAtan(t), fI.ClassName, e); } _polymorphicAtan(e) { return Tl(e, Math.atan); } } fI.ClassName = "FGAtanBlock"; Be(fI.ClassName, fI); class pI extends vu { constructor(e) { super(Ci, Ci, Ci, (t, i) => this._polymorphicAtan2(t, i), pI.ClassName, e); } _polymorphicAtan2(e, t) { return PN(e, t, Math.atan2); } } pI.ClassName = "FGAtan2Block"; Be(pI.ClassName, pI); class _I extends Ra { constructor(e) { super(Ci, Ci, (t) => this._polymorphicSinh(t), _I.ClassName, e); } _polymorphicSinh(e) { return Tl(e, Math.sinh); } } _I.ClassName = "FGSinhBlock"; Be(_I.ClassName, _I); class mI extends Ra { constructor(e) { super(Ci, Ci, (t) => this._polymorphicCosh(t), mI.ClassName, e); } _polymorphicCosh(e) { return Tl(e, Math.cosh); } } mI.ClassName = "FGCoshBlock"; Be(mI.ClassName, mI); class gI extends Ra { constructor(e) { super(Ci, Ci, (t) => this._polymorphicTanh(t), gI.ClassName, e); } _polymorphicTanh(e) { return Tl(e, Math.tanh); } } gI.ClassName = "FGTanhBlock"; Be(gI.ClassName, gI); class vI extends Ra { constructor(e) { super(Ci, Rs, (t) => this._polymorphicAsinh(t), vI.ClassName, e); } _polymorphicAsinh(e) { return Tl(e, Math.asinh); } } vI.ClassName = "FGAsinhBlock"; Be(vI.ClassName, vI); class AI extends Ra { constructor(e) { super(Ci, Rs, (t) => this._polymorphicAcosh(t), AI.ClassName, e); } _polymorphicAcosh(e) { return Tl(e, Math.acosh); } } AI.ClassName = "FGAcoshBlock"; Be(AI.ClassName, AI); class yI extends Ra { constructor(e) { super(Ci, Rs, (t) => this._polymorphicAtanh(t), yI.ClassName, e); } _polymorphicAtanh(e) { return Tl(e, Math.atanh); } } yI.ClassName = "FGAtanhBlock"; Be(yI.ClassName, yI); class CI extends Ra { constructor(e) { super(Ci, Rs, (t) => this._polymorphicExp(t), CI.ClassName, e); } _polymorphicExp(e) { return Tl(e, Math.exp); } } CI.ClassName = "FGExpBlock"; Be(CI.ClassName, CI); class xI extends Ra { constructor(e) { super(Ci, Rs, (t) => this._polymorphicLog(t), xI.ClassName, e); } _polymorphicLog(e) { return Tl(e, Math.log); } } xI.ClassName = "FGLogBlock"; Be(xI.ClassName, xI); class bI extends Ra { constructor(e) { super(Ci, Rs, (t) => this._polymorphicLog2(t), bI.ClassName, e); } _polymorphicLog2(e) { return Tl(e, Math.log2); } } bI.ClassName = "FGLog2Block"; Be(bI.ClassName, bI); class EI extends Ra { constructor(e) { super(Ci, Rs, (t) => this._polymorphicLog10(t), EI.ClassName, e); } _polymorphicLog10(e) { return Tl(e, Math.log10); } } EI.ClassName = "FGLog10Block"; Be(EI.ClassName, EI); class TI extends Ra { constructor(e) { super(Ci, Rs, (t) => this._polymorphicSqrt(t), TI.ClassName, e); } _polymorphicSqrt(e) { return Tl(e, Math.sqrt); } } TI.ClassName = "FGSqrtBlock"; Be(TI.ClassName, TI); class SI extends Ra { constructor(e) { super(Ci, Rs, (t) => this._polymorphicCubeRoot(t), SI.ClassName, e); } _polymorphicCubeRoot(e) { return Tl(e, Math.cbrt); } } SI.ClassName = "FGCubeRootBlock"; Be(SI.ClassName, SI); class MI extends vu { constructor(e) { super(Ci, Rs, Rs, (t, i) => this._polymorphicPow(t, i), MI.ClassName, e); } _polymorphicPow(e, t) { return PN(e, t, Math.pow); } } MI.ClassName = "FGPowBlock"; Be(MI.ClassName, MI); class RI extends Ra { constructor(e) { super(Ci, Rs, (t) => this._polymorphicLength(t), RI.ClassName, e); } _polymorphicLength(e) { switch (Sh(e)) { case "Vector2": case "Vector3": case "Vector4": return e.length(); default: throw new Error(`Cannot compute length of value ${e}`); } } } RI.ClassName = "FGLengthBlock"; Be(RI.ClassName, RI); class PI extends Ra { constructor(e) { super(Ci, Ci, (t) => this._polymorphicNormalize(t), PI.ClassName, e); } _polymorphicNormalize(e) { switch (Sh(e)) { case "Vector2": case "Vector3": case "Vector4": return e.normalize(); default: throw new Error(`Cannot normalize value ${e}`); } } } PI.ClassName = "FGNormalizeBlock"; Be(PI.ClassName, PI); class II extends vu { constructor(e) { super(ex, ex, ex, (t, i) => D.Cross(t, i), II.ClassName, e); } } II.ClassName = "FGCrossBlock"; Be(II.ClassName, II); class DI extends vu { constructor(e) { super(TB, Rs, TB, (t, i) => at.Transform(t, Ae.RotationZ(i)), DI.ClassName, e); } } DI.ClassName = "FGRotate2DBlock"; Be(DI.ClassName, DI); class OI extends vj { constructor(e) { super(ex, ex, Rs, ex, (t, i, r) => D.TransformCoordinates(t, Ae.RotationAxis(i, r)), OI.ClassName, e); } } OI.ClassName = "FGRotate3DBlock"; Be(OI.ClassName, OI); class Ow extends Mw { /** * @internal */ _preparePendingTasks(e) { if (!e._getExecutionVariable(this, "sceneReadyObserver")) { const i = e.configuration.scene.onReadyObservable.add(() => { this._execute(e); }); e._setExecutionVariable(this, "sceneReadyObserver", i); } } /** * @internal */ _cancelPendingTasks(e) { const t = e._getExecutionVariable(this, "sceneReadyObserver"); e.configuration.scene.onReadyObservable.remove(t), e._deleteExecutionVariable(this, "sceneReadyObserver"); } getClassName() { return Ow.ClassName; } } Ow.ClassName = "FGSceneReadyEventBlock"; Be("FGSceneReadyEventBlock", Ow); class wI extends Mw { constructor(e) { super(e), this.config = e; } configure() { super.configure(); for (let e = 0; e < this.config.eventData.length; e++) { const t = this.config.eventData[e]; this.registerDataOutput(t, Ci); } } _preparePendingTasks(e) { const t = e.configuration.coordinator.getCustomEventObservable(this.config.eventId); this._eventObserver = t.add((i) => { for (let r = 0; r < i.length; r++) this.dataOutputs[r].setValue(i[r], e); this._execute(e); }); } _cancelPendingTasks(e) { const t = e.configuration.coordinator.getCustomEventObservable(this.config.eventId); t ? t.remove(this._eventObserver) : Ve.Warn(`FlowGraphReceiveCustomEventBlock: Missing observable for event ${this.config.eventId}`); } getClassName() { return wI.ClassName; } serialize(e) { super.serialize(e), e.eventId = this.config.eventId, e.eventData = this.config.eventData; } } wI.ClassName = "FGReceiveCustomEventBlock"; Be(wI.ClassName, wI); class LI extends Mw { /** * @internal */ _preparePendingTasks(e) { if (!e._getExecutionVariable(this, "sceneBeforeRender")) { const i = e.configuration.scene.onBeforeRenderObservable.add(() => { this._execute(e); }); e._setExecutionVariable(this, "sceneBeforeRender", i); } } /** * @internal */ _cancelPendingTasks(e) { const t = e._getExecutionVariable(this, "sceneBeforeRender"); e.configuration.scene.onBeforeRenderObservable.remove(t), e._deleteExecutionVariable(this, "sceneBeforeRender"); } getClassName() { return LI.ClassName; } } LI.ClassName = "FGSceneTickEventBlock"; Be(LI.ClassName, LI); const iAe = /* @__PURE__ */ Object.freeze(/* @__PURE__ */ Object.defineProperty({ __proto__: null, AbstractActionManager: H_, AbstractAssetTask: p6, AbstractMesh: xr, AbstractScene: Yl, AcquireNativeObjectAsync: kie, Action: Sa, ActionEvent: Ro, ActionManager: Ln, AddBlock: Wre, get AddressMode() { return XR; }, AdvancedTimer: lre, AlignBlock: tne, AlphaState: ote, AmmoJSPlugin: ZA, AnaglyphArcRotateCamera: eie, AnaglyphFreeCamera: tie, AnaglyphGamepadCamera: iie, AnaglyphPostProcess: OK, AnaglyphUniversalCamera: rie, Analyser: bce, AndOrNotEvaluator: K8, Angle: WA, Animatable: hK, get AnimatedInputBlockTypes() { return tT; }, Animation: nt, AnimationAssetTask: gve, AnimationEvent: KB, AnimationGroup: S4, AnimationGroupMask: _ce, get AnimationGroupMaskMode() { return eL; }, get AnimationKeyInterpolation() { return $9; }, AnimationPropertiesOverride: ace, AnimationRange: hP, AnisotropyBlock: AN, ApplyPostProcess: vU, Arc2: vte, ArcFollowCamera: Qte, ArcRotateCamera: Pn, ArcRotateCameraGamepadInput: jL, ArcRotateCameraInputsManager: JB, ArcRotateCameraKeyboardMoveInput: H4, ArcRotateCameraMouseWheelInput: Aw, ArcRotateCameraPointersInput: N1, ArcRotateCameraVRDeviceOrientationInput: MK, ArcTan2Block: hse, AssetContainer: NL, get AssetTaskState() { return GC; }, AssetsManager: vve, AssetsProgressEvent: zne, AsyncLoop: ug, AttachToBoxBehavior: Sce, AudioEngine: wte, AudioSceneComponent: L1, get AutoLayoutMode() { return zO; }, AutoReleaseWorkerPool: xw, AutoRotationBehavior: Vte, AxesViewer: aT, Axis: bl, AxisDragGizmo: hg, AxisScaleGizmo: dg, BRDFTextureTools: M1e, BabylonFileLoaderConfiguration: j9, BackEase: yte, BackgroundMaterial: Ls, BakedVertexAnimationManager: VI, BallAndSocketConstraint: bne, BaseCameraMouseWheelInput: KL, BaseCameraPointersInput: qB, BaseError: dP, BaseParticleSystem: V4, BaseSixDofDragBehavior: NC, BaseTexture: dn, BasisTools: wU, BasisToolsOptions: CT, BasisTranscodeConfiguration: jme, BezierCurve: gte, BezierCurveEase: fce, BiPlanarBlock: Ise, BinaryFileAssetTask: Wne, BlackAndWhitePostProcess: yN, get BlendFactor() { return rf; }, get BlendOperation() { return ZE; }, BloomEffect: GH, BloomMergePostProcess: XU, BlurPostProcess: fu, Bone: ha, BoneAxesViewer: Lde, BoneIKController: pm, BoneLookController: Ed, BonesBlock: Sre, BooleanGeometryBlock: HU, get BooleanGeometryOperations() { return YA; }, BounceEase: oce, BouncingBehavior: a5, BoundingBlock: dne, BoundingBox: fg, BoundingBoxGizmo: hW, BoundingBoxRenderer: Ine, BoundingInfo: zf, BoundingSphere: e6, BoxBlock: BU, BoxBuilder: Nde, BoxParticleEmitter: o5, Buffer: hu, get BufferBindingType() { return uT; }, get BufferMapState() { return EH; }, get BufferUsage() { return ya; }, CSG: zA, Camera: Ai, CameraGizmo: fO, CameraInputTypes: Dd, CameraInputsManager: WL, CannonJSPlugin: gB, get CanvasAlphaMode() { return lL; }, CapsuleBlock: NW, CapsuleBuilder: Bde, CascadedShadowGenerator: vh, ChromaticAberrationPostProcess: d6, CircleEase: Ate, CircleOfConfusionPostProcess: QI, ClampBlock: NU, ClearCoatBlock: xP, ClipPlanesBlock: zre, ClipboardEventTypes: yg, ClipboardInfo: zF, CloudBlock: bW, CloudPoint: yne, Collider: rN, Color3: ze, Color3Gradient: gne, Color4: Et, ColorCorrectionPostProcess: CN, ColorCurves: so, ColorGradient: ej, ColorGradingTexture: d5, ColorMergerBlock: Zre, ColorSplitterBlock: TK, get ColorWrite() { return RH; }, CombineAction: Yee, get CompareFunction() { return yh; }, CompatibilityOptions: hn, get CompilationMessageType() { return SH; }, CompleteGreasedLineColorTable: Hse, CompleteGreasedLineWidthTable: zse, get ComputeBindingType() { return ro; }, ComputeEffect: mP, ComputeNormalsBlock: Yse, get ComputePassTimestampLocation() { return PH; }, ComputeShader: HI, ComputeShaderParticleSystem: mne, Condition: NO, ConditionBlock: HW, get ConditionBlockTests() { return Bu; }, ConditionalBlock: Mse, get ConditionalBlockConditions() { return um; }, ConeParticleEmitter: UL, Constants: et, ContainerAssetTask: Hne, get ConversionMode() { return xB; }, ConvolutionPostProcess: f6, get Coordinate() { return W8; }, CopyTextureToTexture: Une, CopyTools: Ece, CreateBox: B4, CreateBoxVertexData: cU, CreateCapsule: sN, CreateCapsuleVertexData: hU, CreateCylinder: Hf, CreateCylinderVertexData: lU, CreateDashedLines: pU, CreateDashedLinesVertexData: QK, CreateDecal: gU, CreateDisc: Cw, CreateDiscVertexData: dU, CreateEnvTextureAsync: Bie, CreateGeodesic: Eie, CreateGoldberg: Sie, CreateGoldbergVertexData: Tie, CreateGreasedLine: k0e, CreateGreasedLineMaterial: kse, CreateGround: zI, CreateGroundFromHeightMap: nU, CreateGroundFromHeightMapVertexData: UK, CreateGroundVertexData: BC, CreateHemisphere: cL, CreateIcoSphere: GL, CreateIcoSphereVertexData: ZB, CreateImageDataArrayBufferViews: yU, CreateLathe: _U, CreateLineSystem: tP, CreateLineSystemVertexData: YK, CreateLines: Ba, CreatePlane: hx, CreatePlaneVertexData: QB, CreatePolygon: nN, CreatePolygonVertexData: $K, CreatePolyhedron: AP, CreatePolyhedronVertexData: ZK, CreateResizedCopy: Lie, CreateRibbon: nx, CreateRibbonVertexData: zK, CreateScreenshot: TN, CreateScreenshotAsync: fj, CreateScreenshotUsingRenderTarget: SN, CreateScreenshotUsingRenderTargetAsync: pj, CreateScreenshotWithResizeAsync: Jne, CreateSegmentedBoxVertexData: Cie, CreateSphere: Rd, CreateSphereVertexData: uU, CreateText: Mie, CreateTextShapePaths: JK, CreateTiledBox: WK, CreateTiledBoxVertexData: KK, CreateTiledGround: sU, CreateTiledGroundVertexData: BK, CreateTiledPlane: GK, CreateTiledPlaneVertexData: uO, CreateTorus: o6, CreateTorusKnot: fU, CreateTorusKnotVertexData: jK, CreateTorusVertexData: aU, CreateTube: mU, CrossBlock: Xre, CubeMapToSphericalPolynomialTools: GI, CubeTexture: ul, CubeTextureAssetTask: Yne, CubicEase: lce, get CullMode() { return hO; }, CurrentScreenBlock: CK, Curve3: T4, CurveBlock: wse, get CurveBlockTypes() { return io; }, CustomBlock: Yre, CustomOptimization: Zne, CustomParticleEmitter: l5, CustomProceduralTexture: Tre, CylinderBlock: LW, CylinderBuilder: wde, CylinderDirectedParticleEmitter: VL, CylinderParticleEmitter: mw, DDSTools: ua, DataBuffer: JA, DataReader: O9, DataStorage: $H, Database: Dp, DebugBlock: one, DebugLayer: vP, get DebugLayerTab() { return yH; }, DecalBuilder: ofe, DecalMapConfiguration: Ew, DecalMapDefines: Lse, Decode: rK, DecodeBase64ToBinary: OL, DecodeBase64ToString: kB, DecodeBase64UrlToBinary: pw, DecodeBase64UrlToString: cK, DeepCopier: id, DefaultCollisionCoordinator: yie, DefaultKTX2DecoderOptions: ire, DefaultLoadingScreen: oT, DefaultRenderingPipeline: Wu, Deferred: rO, DepthCullingState: sK, DepthOfFieldBlurPostProcess: AL, DepthOfFieldEffect: KH, get DepthOfFieldEffectBlurLevel() { return nP; }, DepthOfFieldMergePostProcess: Tne, DepthPeelingRenderer: XC, DepthPeelingSceneComponent: One, DepthReducer: mre, DepthRenderer: T5, DepthRendererSceneComponent: Dne, DepthSortedParticle: vne, DerivativeBlock: Dre, DesaturateBlock: Ese, DetailMapConfiguration: dx, get DeviceInputEventType() { return fH; }, get DeviceLostReason() { return DH; }, DeviceOrientationCamera: eU, DeviceSource: pH, DeviceSourceManager: mte, get DeviceType() { return cr; }, DirectionalLight: Pd, DirectionalLightFrustumViewer: hfe, DiscBlock: FW, DiscBuilder: Vde, DiscardBlock: Pre, DisplayPassPostProcess: YU, DistanceBlock: ase, DistanceConstraint: hge, DistanceJoint: Oce, DivideBlock: ese, DoNothingAction: YG, DomManagement: Ule, DotBlock: Qre, DracoCompression: k_, DrawWrapper: $o, get DualSenseInput() { return uH; }, get DualShockButton() { return lg; }, get DualShockDpad() { return WR; }, get DualShockInput() { return cH; }, DualShockPad: Zte, DumpTools: qh, DynamicFloat32Array: WE, DynamicTexture: gg, EasingFunction: hl, EdgesRenderer: bN, Effect: Cr, EffectFallbacks: pl, EffectLayer: Tm, EffectLayerSceneComponent: hre, EffectRenderer: vw, EffectWrapper: t6, ElasticEase: cce, ElbowBlock: Pse, EncodeArrayBufferToBase64: DL, EndsWith: ste, Engine: $e, EngineFactory: Zpe, get EngineFormat() { return _B; }, EngineInstrumentation: cre, EngineStore: gi, EngineView: ffe, EnvironmentHelper: g5, EnvironmentTextureTools: xfe, Epsilon: Sr, EquiRectangularCubeTexture: WO, EquiRectangularCubeTextureAssetTask: $ne, ErrorCodes: $C, get ErrorFilter() { return OH; }, EventConstants: UI, EventState: XG, ExecuteCodeAction: Qee, ExponentialEase: Cte, ExternalTexture: rW, ExtractHighlightsPostProcess: jU, ExtrudePolygon: aN, ExtrudeShape: oN, ExtrudeShapeCustom: lN, FactorGradient: tj, FadeInOutBehavior: Mce, get FeatureName() { return UC; }, get FileTools() { return ZD; }, FileToolsOptions: ou, FilesInput: cj, FilesInputStore: JR, get FilterMode() { return Xs; }, FilterPostProcess: xN, FlowGraph: SP, FlowGraphAbsBlock: HP, FlowGraphAcosBlock: dI, FlowGraphAcoshBlock: AI, FlowGraphAddBlock: v5, FlowGraphAsinBlock: hI, FlowGraphAsinhBlock: vI, FlowGraphAtan2Block: pI, FlowGraphAtanBlock: fI, FlowGraphAtanhBlock: yI, FlowGraphBitwiseAndBlock: Vae, FlowGraphBitwiseLeftShiftBlock: Gae, FlowGraphBitwiseNotBlock: Hae, FlowGraphBitwiseOrBlock: kae, FlowGraphBitwiseRightShiftBlock: Kae, FlowGraphBitwiseXorBlock: zae, FlowGraphBlock: MT, FlowGraphBranchBlock: mae, FlowGraphCeilBlock: jP, FlowGraphClampBlock: qP, FlowGraphConditionalDataBlock: Rae, FlowGraphConnection: EB, get FlowGraphConnectionType() { return X_; }, FlowGraphConsoleLogBlock: MP, FlowGraphConstantBlock: Iae, FlowGraphContext: RB, FlowGraphContextLogger: tAe, FlowGraphCoordinateTransformBlock: Pae, FlowGraphCoordinator: oP, FlowGraphCosBlock: cI, FlowGraphCoshBlock: mI, FlowGraphCountLeadingZerosBlock: Wae, FlowGraphCountTrailingZerosBlock: jae, FlowGraphCounterBlock: xae, FlowGraphCrossBlock: II, FlowGraphCubeRootBlock: SI, FlowGraphDataConnection: SB, FlowGraphDebounceBlock: bae, FlowGraphDegToRadBlock: aI, FlowGraphDivideBlock: NP, FlowGraphDoNBlock: PP, FlowGraphDotBlock: BP, FlowGraphEBlock: UP, FlowGraphEqBlock: tI, FlowGraphEventBlock: Mw, FlowGraphExecutionBlock: n6, FlowGraphExpBlock: CI, FlowGraphFlipFlopBlock: Eae, FlowGraphFloorBlock: WP, FlowGraphForLoopBlock: gae, FlowGraphFractBlock: XP, FlowGraphGetPropertyBlock: wP, FlowGraphGetVariableBlock: OP, FlowGraphGreaterThanBlock: rI, FlowGraphGreaterThanOrEqualBlock: sI, FlowGraphInfBlock: kP, FlowGraphInterpolateBlock: eI, FlowGraphIsInfBlock: DN, FlowGraphIsNanBlock: nI, FlowGraphLengthBlock: RI, FlowGraphLessThanBlock: iI, FlowGraphLessThanOrEqualBlock: IN, FlowGraphLog10Block: EI, FlowGraphLog2Block: bI, FlowGraphLogBlock: xI, FlowGraphLogicAndBlock: $ae, FlowGraphLogicNotBlock: qae, FlowGraphLogicOrBlock: Zae, FlowGraphMaxBlock: ZP, FlowGraphMeshPickEventBlock: TP, FlowGraphMinBlock: $P, FlowGraphMultiGateBlock: Aae, FlowGraphMultiplyBlock: LP, FlowGraphNaNBlock: zP, FlowGraphNegBlock: YP, FlowGraphNormalizeBlock: PI, FlowGraphPath: ym, FlowGraphPauseAnimationBlock: Mae, FlowGraphPiBlock: VP, FlowGraphPlayAnimationBlock: Tae, FlowGraphPowBlock: MI, FlowGraphRadToDegBlock: oI, FlowGraphRandomBlock: FP, FlowGraphReceiveCustomEventBlock: wI, FlowGraphRemainderBlock: QP, FlowGraphRotate2DBlock: DI, FlowGraphRotate3DBlock: OI, FlowGraphSaturateBlock: JP, FlowGraphSceneReadyEventBlock: Ow, FlowGraphSceneTickEventBlock: LI, FlowGraphSendCustomEventBlock: Pw, FlowGraphSequenceBlock: DP, FlowGraphSetPropertyBlock: Rw, FlowGraphSetVariableBlock: RP, FlowGraphSignBlock: GP, FlowGraphSignalConnection: MB, FlowGraphSinBlock: lI, FlowGraphSinhBlock: _I, FlowGraphSqrtBlock: TI, get FlowGraphState() { return q8; }, FlowGraphStopAnimationBlock: Sae, FlowGraphSubtractBlock: A5, FlowGraphSwitchBlock: yae, FlowGraphTanBlock: uI, FlowGraphTanhBlock: gI, FlowGraphThrottleBlock: vae, FlowGraphTimerBlock: Iw, FlowGraphTruncBlock: KP, FlowGraphWaitAllBlock: Cae, FlowGraphWhileLoopBlock: IP, FluidRenderer: $U, FluidRendererSceneComponent: Vne, get FluidRenderingDebug() { return hm; }, FluidRenderingObject: lj, FluidRenderingObjectCustomParticles: Bne, FluidRenderingObjectParticleSystem: Fne, FluidRenderingTargetRenderer: WH, FlyCamera: qL, FlyCameraInputsManager: Xte, FlyCameraKeyboardInput: TT, FlyCameraMouseInput: XL, FogBlock: Vre, FollowBehavior: Dce, FollowCamera: Fp, FollowCameraInputsManager: Yte, FollowCameraKeyboardMoveInput: rd, FollowCameraMouseWheelInput: C5, FollowCameraPointersInput: F1, FragCoordBlock: Ore, FragDepthBlock: Fre, FragmentOutputBlock: sT, FramingBehavior: I1, FreeCamera: du, FreeCameraDeviceOrientationInput: RK, FreeCameraGamepadInput: ZL, FreeCameraInputsManager: $L, FreeCameraKeyboardMoveInput: Em, FreeCameraMouseInput: YL, FreeCameraMouseWheelInput: G4, FreeCameraTouchInput: QL, FreeCameraVirtualJoystickInput: PK, FresnelBlock: rse, FresnelParameters: uL, FromHalfFloat: kA, get FrontFace() { return nL; }, FrontFacingBlock: Ire, Frustum: gm, FxaaPostProcess: $I, GPUParticleSystem: L4, GUID: ece, Gamepad: zu, GamepadCamera: tU, GamepadManager: qte, GamepadSystemSceneComponent: Jte, GaussianSplatting: JC, GenerateBase64StringFromPixelData: YB, GenerateBase64StringFromTexture: mK, GenerateBase64StringFromTextureAsync: gK, GenericPad: jte, GeodesicData: cN, Geometry: yc, GeometryBufferRenderer: _o, GeometryBufferRendererSceneComponent: Mne, GeometryCollectionBlock: WW, GeometryElbowBlock: Xse, GeometryInfoBlock: lne, GeometryInputBlock: bc, GeometryOptimizeBlock: UU, GeometryOutputBlock: MW, GeometryTextureBlock: qW, GeometryTextureFetchBlock: JW, GeometryTransformBlock: YW, GeometryTrigonometryBlock: XW, get GeometryTrigonometryBlockOperations() { return Mn; }, GetClass: Qo, GetDOMTextContent: IL, GetEnvInfo: AU, GetEnvironmentBRDFTexture: pN, GetInternalFormatFromBasisFormat: bre, GetTGAHeader: mN, GetTextureDataAsync: Nie, Gizmo: Do, get GizmoAnchorPoint() { return rL; }, get GizmoCoordinatesMode() { return p5; }, GizmoManager: qpe, GlowLayer: Y_, GoldbergMesh: uN, GradientBlock: gse, GradientBlockColorStep: HF, GradientHelper: N_, GrainPostProcess: ZI, GreasedLineBaseMesh: SW, GreasedLineMaterialDefaults: bh, GreasedLineMesh: Al, get GreasedLineMeshColorDistribution() { return VC; }, get GreasedLineMeshColorDistributionType() { return jC; }, get GreasedLineMeshColorMode() { return vm; }, get GreasedLineMeshMaterialType() { return XO; }, get GreasedLineMeshWidthDistribution() { return iT; }, GreasedLinePluginMaterial: cx, get GreasedLineRibbonAutoDirectionMode() { return f5; }, get GreasedLineRibbonFacesMode() { return QO; }, GreasedLineRibbonMesh: jl, get GreasedLineRibbonPointsMode() { return M4; }, GreasedLineSimpleMaterial: TW, GreasedLineTools: Hn, GridBlock: OW, GroundBuilder: Pde, GroundMesh: yw, HDRCubeTexture: ZC, HDRCubeTextureAssetTask: Qne, HDRFiltering: pW, HDRTools: mB, HandConstraintBehavior: Wce, get HandConstraintOrientation() { return QE; }, get HandConstraintVisibility() { return X8; }, get HandConstraintZone() { return j8; }, get HandPart() { return YE; }, HardwareScalingOptimization: jH, HavokPlugin: yge, HeightToNormalBlock: gN, HemisphereBuilder: Jpe, HemisphericLight: vg, HemisphericParticleEmitter: kL, HighlightLayer: of, HighlightsPostProcess: qge, Hinge2Joint: Lce, HingeConstraint: dge, HingeJoint: wce, HtmlElementTexture: OU, IFlowGraphCoordinatorConfiguration: eAe, IWebXRControllerPhysicsOptions: Nve, IcoSphereBlock: IW, IcoSphereBuilder: Gce, ImageAssetTask: jne, ImageProcessingBlock: AW, ImageProcessingConfiguration: Ds, ImageProcessingConfigurationDefines: fte, ImageProcessingPostProcess: QU, ImageSourceBlock: U4, IncrementValueAction: Wee, get IndexFormat() { return yT; }, InputBlock: vs, get InspectableType() { return QH; }, InstancedLinesMesh: XK, InstancedMesh: Cg, InstancesBlock: Mre, InstantiateBlock: rne, InstantiateLinearBlock: sne, InstantiateOnFacesBlock: QW, InstantiateOnVerticesBlock: kU, InstantiateOnVolumeBlock: $W, InstantiateRadialBlock: nne, InstantiatedEntries: Ote, IntFloatConverterBlock: ane, InternalTexture: ln, get InternalTextureSource() { return ts; }, InterpolateValueAction: tte, IntersectionInfo: sB, IsBase64DataUrl: wL, IsDocumentAvailable: qR, IsFileURL: lK, IsNavigatorAvailable: GR, IsWindowObjectExist: cu, get JoystickAxis() { return Lu; }, KeepAssets: Dte, KeyboardEventTypes: rx, KeyboardInfo: rB, KeyboardInfoPre: lH, KhronosTextureContainer: Lp, KhronosTextureContainer2: mh, LatheBuilder: qde, Layer: fre, LayerSceneComponent: dre, LengthBlock: ose, LensFlare: IU, LensFlareSystem: CP, LensFlareSystemSceneComponent: pre, LensFlaresOptimization: YF, LensRenderingPipeline: x3e, LerpBlock: Jre, Light: hs, LightBlock: CB, LightGizmo: E4, LightInformationBlock: Rre, LineEdgesRenderer: wne, LinesBuilder: Xde, LinesMesh: Ag, LoadFile: vT, LoadFileError: UO, LoadImage: fw, get LoadOp() { return au; }, LoadTextureFromTranscodeResult: yB, LockConstraint: pge, Logger: Ce, get MapMode() { return c5; }, MapRangeBlock: Wse, MappingBlock: ZW, get MappingTypes() { return XA; }, Material: At, MaterialAnisotropicDefines: $ie, MaterialClearCoatDefines: Yie, MaterialDefines: sa, MaterialDetailMapDefines: pie, MaterialFlags: Tt, MaterialGreasedLineDefines: Nse, MaterialHelper: Ke, MaterialIridescenceDefines: Qie, MaterialPluginBase: Q_, get MaterialPluginEvent() { return xh; }, MaterialPluginManager: nT, MaterialSheenDefines: Zie, MaterialSubSurfaceDefines: qie, MathBlock: zW, get MathBlockOperations() { return wp; }, Matrix: Ae, MatrixBuilderBlock: Sse, MatrixComposeBlock: cne, MatrixDeterminantBlock: Dse, MatrixTransposeBlock: Ose, MaxBlock: sse, MergeGeometryBlock: KW, MergeMeshesOptimization: dT, Mesh: ke, MeshAssetTask: Gne, MeshAttributeExistsBlock: EW, get MeshAttributeExistsBlockTypes() { return ba; }, MeshBlock: PW, MeshBuilder: mo, get MeshDebugMode() { return rP; }, MeshDebugPluginMaterial: Hu, MeshExploder: Ave, MeshLODLevel: Ite, MeshParticleEmitter: $B, MeshUVSpaceRenderer: RC, MeshoptCompression: HC, MinBlock: nse, MinMaxReducer: _re, get MipmapFilterMode() { return TH; }, MirrorTexture: u5, ModBlock: Tse, ModelShape: HH, MorphTarget: h5, MorphTargetManager: O4, MorphTargetsBlock: vW, MotionBlurPostProcess: R5, MotorEnabledJoint: vK, MultiMaterial: xm, MultiObserver: uj, MultiPointerScaleBehavior: Pce, MultiRenderTarget: $8, MultiplyBlock: oB, NLerpBlock: vse, NativeDataStream: _5, NativeEngine: yP, get NativePointerInput() { return J9; }, NativeXRFrame: aae, NativeXRLayerRenderTargetTextureProvider: vie, NativeXRLayerWrapper: gie, NativeXRRenderTarget: Aie, NegateBlock: lse, Node: In, NodeGeometry: V_, NodeGeometryBlock: Bs, get NodeGeometryBlockConnectionPointTypes() { return We; }, NodeGeometryBuildState: Gse, NodeGeometryConnectionPoint: kH, get NodeGeometryConnectionPointCompatibilityStates() { return JE; }, get NodeGeometryConnectionPointDirection() { return $O; }, get NodeGeometryContextualSources() { return Rn; }, NodeMaterial: Ta, NodeMaterialBlock: Wi, get NodeMaterialBlockConnectionPointMode() { return bd; }, get NodeMaterialBlockConnectionPointTypes() { return ue; }, get NodeMaterialBlockTargets() { return Le; }, NodeMaterialConnectionPoint: pP, get NodeMaterialConnectionPointCompatibilityStates() { return fm; }, NodeMaterialConnectionPointCustomObject: Yo, get NodeMaterialConnectionPointDirection() { return no; }, NodeMaterialDefines: D9, get NodeMaterialModes() { return Ip; }, NodeMaterialOptimizer: i0e, get NodeMaterialSystemValues() { return Ms; }, NodeMaterialTeleportInBlock: Gre, NodeMaterialTeleportOutBlock: Kre, NoiseBlock: jse, NoiseProceduralTexture: hL, NormalBlendBlock: yse, NormalizeBlock: $re, NormalizeVectorBlock: $se, NullBlock: Kse, NullEngine: Pie, NullEngineOptions: Rie, Observable: Fe, Observer: Bee, OcclusionMaterial: Mme, Octree: gP, OctreeBlock: iL, OctreeSceneComponent: kK, OimoJSPlugin: BH, OnAfterEnteringVRObservableEvent: Dde, OneMinusBlock: yW, get Orientation() { return eP; }, OutlineRenderer: tw, PBRAnisotropicConfiguration: E5, PBRBaseMaterial: on, PBRBaseSimpleMaterial: Vp, PBRClearCoatConfiguration: _u, PBRIridescenceConfiguration: lf, PBRMaterial: Ri, PBRMaterialDefines: wH, PBRMetallicRoughnessBlock: gu, PBRMetallicRoughnessMaterial: ox, PBRSheenConfiguration: K4, PBRSpecularGlossinessMaterial: lx, PBRSubSurfaceConfiguration: Zo, PHI: Uc, PadNumber: ate, PanoramaToCubeMapTools: ST, Particle: ZO, ParticleBlendMultiplyBlock: EK, ParticleHelper: bP, ParticleRampGradientBlock: bK, ParticleSystem: ns, ParticleSystemSet: qC, ParticleTextureBlock: xK, ParticlesOptimization: QF, PassCubePostProcess: DK, PassPostProcess: h6, Path2: _w, Path3D: fP, PathCursor: pce, PerfCollectionStrategy: Rve, PerfCounter: Vc, PerformanceConfigurator: Uu, PerformanceMonitor: Ste, PerformanceViewerCollector: x4, PerturbNormalBlock: bw, PhotoDome: HO, Physics6DoFConstraint: xne, Physics6DoFLimit: uge, PhysicsAggregate: Ene, PhysicsBody: KU, PhysicsConstraint: M5, get PhysicsConstraintAxis() { return HA; }, get PhysicsConstraintAxisLimitMode() { return OC; }, get PhysicsConstraintMotorType() { return Z8; }, get PhysicsConstraintType() { return af; }, PhysicsEngine: _W, PhysicsEngineV2: GU, get PhysicsEventType() { return C4; }, PhysicsHelper: xge, PhysicsImpostor: tr, PhysicsJoint: ta, get PhysicsMaterialCombineMode() { return wC; }, get PhysicsMotionType() { return P4; }, PhysicsRadialExplosionEventOptions: mO, get PhysicsRadialImpulseFalloff() { return vL; }, PhysicsRaycastResult: _N, PhysicsShape: fx, PhysicsShapeBox: nj, PhysicsShapeCapsule: rj, PhysicsShapeContainer: cge, PhysicsShapeConvexHull: oge, PhysicsShapeCylinder: sj, PhysicsShapeMesh: lge, PhysicsShapeSphere: ij, get PhysicsShapeType() { return Cc; }, PhysicsUpdraftEventOptions: aj, get PhysicsUpdraftMode() { return ew; }, PhysicsViewer: ufe, PhysicsVortexEventOptions: oj, PickingInfo: ku, get PipelineErrorReason() { return MH; }, PivotTools: Nn, Plane: Sd, PlaneBlock: RW, PlaneBuilder: Rce, PlaneDragGizmo: dO, PlaneRotationGizmo: D4, PlayAnimationAction: jee, PlaySoundAction: $ee, get PointColor() { return Rp; }, PointLight: s6, PointParticleEmitter: zL, PointerDragBehavior: Fu, PointerEventTypes: si, PointerInfo: cg, PointerInfoBase: uK, PointerInfoPre: _te, get PointerInput() { return Gr; }, PointsCloudSystem: Cne, PointsGroup: KF, Polar: v4, Polygon: Qde, PolygonBuilder: $de, PolygonMeshBuilder: xie, PolyhedronBuilder: efe, PolyhedronData: uB, PositionGizmo: EU, PositionNormalTextureVertex: yK, PositionNormalVertex: AK, PostProcess: Bi, PostProcessManager: q9, PostProcessRenderEffect: gn, PostProcessRenderPipeline: I5, PostProcessRenderPipelineManager: Rne, PostProcessRenderPipelineManagerSceneComponent: Pne, PostProcessesOptimization: XF, PosterizeBlock: _se, PowBlock: cse, PowerEase: uce, get PowerPreference() { return bH; }, PrePassOutputBlock: Ure, PrePassRenderer: Td, PrePassRendererSceneComponent: Lne, PrePassTextureBlock: Hre, PrecisionDate: Gs, PredicateCondition: kee, PressureObserverWrapper: qU, get PrimitiveTopology() { return B_; }, PrismaticConstraint: _ge, ProceduralTexture: z4, ProceduralTextureSceneComponent: Gte, get PropertyTypeForEdition() { return $i; }, PushMaterial: fl, QuadraticEase: dK, QuadraticErrorSimplification: Use, QuarticEase: hce, Quaternion: Ze, get QueryType() { return oL; }, QuinticEase: dce, RGBDTextureTools: hB, Ragdoll: vge, RagdollBoneProperties: gge, RandomBlock: GW, get RandomBlockLocks() { return R4; }, RandomGUID: G_, RandomNumberBlock: use, RawCubeTexture: LU, RawTexture: Po, RawTexture2DArray: DU, RawTexture3D: Jme, Ray: gs, RayHelper: eW, ReadFile: VO, ReadFileError: zB, RecastJSCrowd: fne, RecastJSPlugin: z0e, ReciprocalBlock: fse, ReflectBlock: xse, ReflectionBlock: YI, ReflectionProbe: WI, ReflectionTextureBlock: kre, Reflector: xL, RefractBlock: bse, RefractionBlock: S5, RefractionPostProcess: P5, RefractionTexture: gW, RegisterClass: Be, RegisterMaterialPlugin: Sde, RegisterNativeTypeAsync: zie, RemapBlock: BL, get RenderPassTimestampLocation() { return IH; }, RenderTargetTexture: ra, RenderTargetWrapper: FL, RenderTargetsOptimization: XH, RenderingGroup: b4, RenderingGroupInfo: pte, RenderingManager: Zh, ReplaceColorBlock: pse, RequestFile: GB, RequestFileError: Z9, RetryStrategy: rte, RibbonBuilder: Ude, RichType: $_, RichTypeAny: Ci, RichTypeBoolean: Th, RichTypeColor3: cae, RichTypeColor4: uae, RichTypeMatrix: Zve, RichTypeNumber: Rs, RichTypeQuaternion: hae, RichTypeString: oae, RichTypeVector2: TB, RichTypeVector3: ex, RichTypeVector4: lae, RollingAverage: Mte, Rotate2dBlock: Cse, RotationGizmo: dW, RotationXBlock: Zse, RotationYBlock: qse, RotationZBlock: Jse, RuntimeAnimation: ite, RuntimeError: F4, SSAO2RenderingPipeline: mu, SSAORenderingPipeline: Tw, SSRRenderingPipeline: Ma, get SamplerBindingType() { return AT; }, Scalar: yt, ScaleBlock: jre, ScaleGizmo: fW, ScalingBlock: ene, Scene: ii, SceneComponentConstants: Bt, SceneDepthBlock: vN, SceneInstrumentation: ure, SceneLoader: fr, get SceneLoaderAnimationGroupLoadingMode() { return FC; }, SceneLoaderFlags: uu, SceneOptimization: px, SceneOptimizer: hj, SceneOptimizerOptions: aP, get ScenePerformancePriority() { return $A; }, SceneRecorder: bve, SceneSerializer: QC, ScreenSizeBlock: wre, ScreenSpaceBlock: Lre, ScreenSpaceCurvaturePostProcess: Sw, ScreenSpaceReflectionPostProcess: Eg, ScreenshotTools: Cve, SerializationHelper: St, SetColorsBlock: VW, SetCorsBehavior: HB, SetMaterialIDBlock: jW, SetNormalsBlock: UW, SetParentAction: QG, SetPositionsBlock: BW, SetStateAction: Gee, SetTangentsBlock: kW, SetUVsBlock: VU, SetValueAction: Kee, ShaderCodeInliner: cT, get ShaderLanguage() { return Xa; }, ShaderMaterial: Lo, get ShaderStage() { return $E; }, ShaderStore: je, ShadowDepthWrapper: s0e, ShadowGenerator: hr, ShadowGeneratorSceneComponent: gre, ShadowLight: b5, ShadowMapBlock: Bre, ShadowsOptimization: jF, ShapeBuilder: Zde, SharpenPostProcess: qI, SheenBlock: XI, SimplexPerlin3DBlock: Ase, SimplicationQueueSceneComponent: Vse, SimplificationQueue: Bse, SimplificationSettings: F0e, get SimplificationType() { return gL; }, SineEase: fK, SixDofDragBehavior: kte, Size: kf, Skeleton: sx, SkeletonViewer: P1, SliderConstraint: fge, SmartArray: xc, SmartArrayNoDuplicate: XE, SmoothStepBlock: dse, SolidParticle: zH, SolidParticleSystem: age, SolidParticleVertex: Ane, Sound: I4, SoundTrack: Lte, get SourceTextureFormat() { return LH; }, get Space() { return qr; }, SphereBlock: DW, SphereBuilder: Fde, SphereDirectedParticleEmitter: HL, SphereParticleEmitter: gw, Spherical: A4, SphericalHarmonics: m5, SphericalPolynomial: ax, SpotLight: td, SpringConstraint: mge, Sprite: ZU, SpriteManager: YC, SpriteMap: _ve, SpritePackedManager: mve, SpriteSceneComponent: kne, Stage: Kl, StandardMaterial: Dt, StandardMaterialDefines: _ie, StandardRenderingPipeline: Dn, StartsWith: nte, StateCondition: zee, get StencilOperation() { return y4; }, StencilState: WC, StencilStateComposer: oK, StepBlock: ise, StereoscopicArcRotateCamera: nie, StereoscopicFreeCamera: aie, StereoscopicGamepadCamera: oie, StereoscopicInterlacePostProcess: eue, StereoscopicInterlacePostProcessI: sie, StereoscopicScreenUniversalCamera: tue, StereoscopicUniversalCamera: lie, StickValues: jce, StopAnimationAction: Xee, StopSoundAction: Zee, StorageBuffer: Wte, get StorageTextureAccess() { return dB; }, get StoreOp() { return _m; }, StringDictionary: iB, StringTools: Vle, SubEmitter: hT, get SubEmitterType() { return qO; }, SubMesh: ed, SubSurfaceBlock: jO, SubSurfaceSceneComponent: Nne, SubtractBlock: tse, SurfaceMagnetismBehavior: Ice, SwitchBooleanAction: Hee, get SwitchInput() { return dH; }, TBNBlock: jI, TGATools: yre, Tags: $s, TargetCamera: Cl, TargetedAnimation: xte, TeleportInBlock: une, TeleportOutBlock: hne, TestBase64DataUrl: hte, TextFileAssetTask: Kne, Texture: De, get TextureAspect() { return jA; }, TextureAssetTask: Xne, TextureBlock: dL, get TextureDimension() { return _g; }, get TextureFormat() { return Re; }, TextureOptimization: WF, TexturePacker: w4, TexturePackerFrame: UH, get TextureSampleType() { return K_; }, TextureSampler: nK, TextureTools: Fie, get TextureUsage() { return fo; }, get TextureViewDimension() { return Ea; }, ThinEngine: mi, ThinRenderTargetTexture: e0e, ThinTexture: rT, TiledBoxBuilder: zde, TiledPlaneBuilder: kde, get TimerState() { return Q8; }, TmpColors: mn, TmpVectors: de, ToGammaSpace: nO, ToHalfFloat: GA, ToLinearSpace: V9, TonemapPostProcess: Y3e, get TonemappingOperator() { return $R; }, Tools: Ve, TorusBlock: wW, TorusBuilder: Ide, TorusKnotBuilder: Hde, TouchCamera: IK, TrailMesh: mL, Trajectory: ca, TrajectoryClassifier: bB, TranscodeAsync: AB, get TranscodeTarget() { return pO; }, TransformBlock: aB, TransformNode: xi, TranslationBlock: ine, TriPlanarBlock: FU, TrigonometryBlock: SK, get TrigonometryBlockOperations() { return nu; }, TubeBuilder: Jde, TwirlBlock: Nre, UniformBuffer: Vi, UniversalCamera: x5, UnregisterAllMaterialPlugins: wK, UnregisterMaterialPlugin: Mde, UploadContent: mW, UploadEnvLevelsAsync: iW, UploadEnvSpherical: CU, UploadLevelsAsync: sL, UtilityLayerRenderer: bn, VRCameraMetrics: kI, VRDeviceOrientationArcRotateCamera: die, VRDeviceOrientationFreeCamera: rU, VRDeviceOrientationGamepadCamera: fie, VRDistortionCorrectionPostProcess: mH, VRExperienceHelper: _P, VRMultiviewToSingleviewPostProcess: hie, ValidatedNativeDataStream: Hie, ValueCondition: Nu, Vector2: at, Vector3: D, Vector4: Di, VectorConverterBlock: Qse, VectorMergerBlock: K9, VectorSplitterBlock: qre, VertexAnimationBaker: Tce, VertexBuffer: Y, VertexData: Ot, VertexDataMaterialInfo: H9, get VertexFormat() { return So; }, VertexOutputBlock: G9, get VertexStepMode() { return aL; }, VideoDome: PU, VideoRecorder: yL, VideoTexture: Cm, ViewDirectionBlock: CW, Viewport: Md, VirtualJoystick: Ji, VirtualJoysticksCamera: cie, VolumetricLightScatteringPostProcess: Sm, VoronoiNoiseBlock: Rse, WaveBlock: mse, get WaveBlockKind() { return YR; }, WebGL2ParticleSystem: _ne, WebGL2ShaderProcessor: aK, WebGLDataBuffer: FO, WebGLHardwareTexture: BI, WebGLPipelineContext: lte, WebGPUCacheBindGroups: cl, WebGPUCacheRenderPipeline: po, WebGPUCacheRenderPipelineTree: zC, WebGPUCacheSampler: e5, WebGPUDataBuffer: Kie, WebGPUDrawContext: dN, WebGPUEngine: br, WebGPURenderTargetWrapper: Xie, WebGPUTintWASM: U_, WebRequest: go, WebXRAbstractFeature: Ku, WebXRAbstractMotionController: KI, WebXRAnchorSystem: gO, WebXRBackgroundRemover: AO, WebXRCamera: GO, WebXRCompositionLayerWrapper: tae, WebXRControllerComponent: j_, WebXRControllerMovement: pT, WebXRControllerPhysics: yO, WebXRControllerPointerSelection: i6, WebXRDefaultExperience: RU, WebXRDefaultExperienceOptions: v2e, WebXRDepthSensing: PO, WebXRDomOverlay: TO, WebXREnterExitUI: MU, WebXREnterExitUIButton: ore, WebXREnterExitUIOptions: g2e, WebXRExperienceHelper: SU, WebXREyeTracking: MO, WebXRFeatureName: Qs, WebXRFeaturePointSystem: xO, WebXRFeaturesManager: Oo, WebXRGenericHandController: nae, WebXRGenericTriggerMotionController: KO, WebXRHTCViveMotionController: EP, WebXRHand: Kte, get WebXRHandJoint() { return gr; }, WebXRHandTracking: da, WebXRHitTest: CO, WebXRHitTestLegacy: fT, WebXRImageTracking: EO, WebXRInput: are, WebXRInputSource: nre, WebXRLayers: RO, WebXRLightEstimation: SO, WebXRManagedOutputCanvas: mie, WebXRManagedOutputCanvasOptions: tN, WebXRMeshDetector: bO, WebXRMicrosoftMixedRealityController: _T, WebXRMotionControllerManager: Vu, WebXRMotionControllerTeleportation: iP, get WebXRNearControllerMode() { return Y8; }, WebXRNearInteraction: r6, WebXROculusTouchMotionController: N4, WebXRPlaneDetector: vO, WebXRProfiledMotionController: sre, WebXRProjectionLayerWrapper: iae, WebXRRawCameraAccess: DO, WebXRSessionManager: iN, WebXRSpaceWarp: IO, WebXRSpaceWarpRenderTargetTextureProvider: sae, get WebXRState() { return lu; }, get WebXRTrackingState() { return jR; }, WebXRWalkingLocomotion: $F, WeightedSound: Nte, WorkerPool: tre, WorleyNoise3DBlock: xW, XRSpaceWarpRenderTarget: rae, get Xbox360Button() { return F_; }, get Xbox360Dpad() { return KR; }, Xbox360Pad: $te, get XboxInput() { return hH; }, _BabylonLoaderRegistered: Tme, _BasisTextureLoader: Ere, _CreationDataStorage: _K, _DDSTextureLoader: Jie, _ENVTextureLoader: ere, _HDRTextureLoader: xre, _InstancesBatch: _H, _KTXTextureLoader: rre, _MeshCollisionData: Rte, _OcclusionDataStorage: Die, _PrimaryIsoTriangle: qK, _TGATextureLoader: Cre, _TimeToken: Iie, _UpdateRGBDAsync: Uie, _forceSceneHelpersToBundle: A2e, _forceTransformFeedbackToBundle: dfe, _injectLTSFileTools: dte, _staticOffsetValueColor3: tK, _staticOffsetValueColor4: iK, _staticOffsetValueQuaternion: ZG, _staticOffsetValueSize: eK, _staticOffsetValueVector2: JG, _staticOffsetValueVector3: qG, addClipPlaneUniforms: Gc, allocateAndCopyTypedBuffer: nB, bindClipPlane: Ec, captureEquirectangularFromScene: Ove, className: tce, createDetailMapPlugin: h0e, createPBRAnisotropicPlugin: n0e, createPBRBRDFPlugin: a0e, createPBRClearCoatPlugin: o0e, createPBRIridescencePlugin: l0e, createPBRSheenPlugin: c0e, createPBRSubSurfacePlugin: u0e, createYieldingScheduler: bte, editableInPropertyPage: ir, expandToProperty: ct, extractMinAndMax: kO, extractMinAndMaxIndexed: Tte, getRichTypeFromValue: dae, inlineScheduler: tL, makeAsyncFunction: mce, makeSyncFunction: Ete, nativeOverride: gT, normalizeEnvInfo: hN, prepareDefinesForClipPlanes: Pte, prepareStringDefinesForClipPlanes: bT, runCoroutine: pK, runCoroutineAsync: jB, runCoroutineSync: WB, serialize: W, serializeAsCameraReference: ete, serializeAsColor3: Fs, serializeAsColor4: dw, serializeAsColorCurves: qee, serializeAsFresnelParameters: uw, serializeAsImageProcessingConfiguration: $G, serializeAsMatrix: VB, serializeAsMeshReference: hw, serializeAsQuaternion: Jee, serializeAsTexture: er, serializeAsVector2: PL, serializeAsVector3: oo, setAndStartTimer: FH, setStereoscopicAnaglyphRigMode: JL, setStereoscopicRigMode: eN, setVRRigMode: iU }, Symbol.toStringTag, { value: "Module" })); function ZH(c, e, t, i) { const r = { externalResourceFunction: i }; return t && (r.uri = e === "file:" ? t : e + t), ArrayBuffer.isView(c) ? GLTFValidator.validateBytes(c, r) : GLTFValidator.validateString(c, r); } function rAe() { const c = []; onmessage = (e) => { const t = e.data; switch (t.id) { case "init": { importScripts(t.url); break; } case "validate": { ZH(t.data, t.rootUrl, t.fileName, (i) => new Promise((r, s) => { const n = c.length; c.push({ resolve: r, reject: s }), postMessage({ id: "getExternalResource", index: n, uri: i }); })).then((i) => { postMessage({ id: "validate.resolve", value: i }); }, (i) => { postMessage({ id: "validate.reject", reason: i }); }); break; } case "getExternalResource.resolve": { c[t.index].resolve(t.value); break; } case "getExternalResource.reject": { c[t.index].reject(t.reason); break; } } }; } class toe { /** * Validate a glTF asset using the glTF-Validator. * @param data The JSON of a glTF or the array buffer of a binary glTF * @param rootUrl The root url for the glTF * @param fileName The file name for the glTF * @param getExternalResource The callback to get external resources for the glTF validator * @returns A promise that resolves with the glTF validation results once complete */ static ValidateAsync(e, t, i, r) { return typeof Worker == "function" ? new Promise((s, n) => { const a = `${ZH}(${rAe})()`, l = URL.createObjectURL(new Blob([a], { type: "application/javascript" })), o = new Worker(l), u = (d) => { o.removeEventListener("error", u), o.removeEventListener("message", h), n(d); }, h = (d) => { const f = d.data; switch (f.id) { case "getExternalResource": { r(f.uri).then((p) => { o.postMessage({ id: "getExternalResource.resolve", index: f.index, value: p }, [p]); }, (p) => { o.postMessage({ id: "getExternalResource.reject", index: f.index, reason: p }); }); break; } case "validate.resolve": { o.removeEventListener("error", u), o.removeEventListener("message", h), s(f.value), o.terminate(); break; } case "validate.reject": o.removeEventListener("error", u), o.removeEventListener("message", h), n(f.reason), o.terminate(); } }; if (o.addEventListener("error", u), o.addEventListener("message", h), o.postMessage({ id: "init", url: Ve.GetBabylonScriptURL(this.Configuration.url) }), ArrayBuffer.isView(e)) { const d = e.slice(); o.postMessage({ id: "validate", data: d, rootUrl: t, fileName: i }, [d.buffer]); } else o.postMessage({ id: "validate", data: e, rootUrl: t, fileName: i }); }) : (this._LoadScriptPromise || (this._LoadScriptPromise = Ve.LoadBabylonScriptAsync(this.Configuration.url)), this._LoadScriptPromise.then(() => ZH(e, t, i, r))); } } toe.Configuration = { url: `${Ve._DefaultCdnUrl}/gltf_validator.js` }; function Cq(c, e, t) { try { return Promise.resolve(new Uint8Array(c, e, t)); } catch (i) { return Promise.reject(i); } } function sAe(c, e, t) { try { if (e < 0 || e >= c.byteLength) throw new RangeError("Offset is out of range."); if (e + t > c.byteLength) throw new RangeError("Length is out of range."); return Promise.resolve(new Uint8Array(c.buffer, c.byteOffset + e, t)); } catch (i) { return Promise.reject(i); } } var bL; (function(c) { c[c.AUTO = 0] = "AUTO", c[c.FORCE_RIGHT_HANDED = 1] = "FORCE_RIGHT_HANDED"; })(bL || (bL = {})); var OO; (function(c) { c[c.NONE = 0] = "NONE", c[c.FIRST = 1] = "FIRST", c[c.ALL = 2] = "ALL"; })(OO || (OO = {})); var dm; (function(c) { c[c.LOADING = 0] = "LOADING", c[c.READY = 1] = "READY", c[c.COMPLETE = 2] = "COMPLETE"; })(dm || (dm = {})); class yl { constructor() { this.onParsedObservable = new Fe(), this.coordinateSystemMode = bL.AUTO, this.animationStartMode = OO.FIRST, this.compileMaterials = !1, this.useClipPlane = !1, this.compileShadowGenerators = !1, this.transparencyAsCoverage = !1, this.useRangeRequests = !1, this.createInstances = !0, this.alwaysComputeBoundingBox = !1, this.loadAllMaterials = !1, this.loadOnlyMaterials = !1, this.skipMaterials = !1, this.useSRGBBuffers = !0, this.targetFps = 60, this.alwaysComputeSkeletonRootNode = !1, this.preprocessUrlAsync = (e) => Promise.resolve(e), this.onMeshLoadedObservable = new Fe(), this.onSkinLoadedObservable = new Fe(), this.onTextureLoadedObservable = new Fe(), this.onMaterialLoadedObservable = new Fe(), this.onCameraLoadedObservable = new Fe(), this.onCompleteObservable = new Fe(), this.onErrorObservable = new Fe(), this.onDisposeObservable = new Fe(), this.onExtensionLoadedObservable = new Fe(), this.validate = !1, this.onValidatedObservable = new Fe(), this._loader = null, this._state = null, this._requests = new Array(), this.name = "gltf", this.extensions = { ".gltf": { isBinary: !1 }, ".glb": { isBinary: !0 } }, this.onLoaderStateChangedObservable = new Fe(), this._logIndentLevel = 0, this._loggingEnabled = !1, this._log = this._logDisabled, this._capturePerformanceCounters = !1, this._startPerformanceCounter = this._startPerformanceCounterDisabled, this._endPerformanceCounter = this._endPerformanceCounterDisabled; } /** * Raised when the asset has been parsed */ set onParsed(e) { this._onParsedObserver && this.onParsedObservable.remove(this._onParsedObserver), this._onParsedObserver = this.onParsedObservable.add(e); } /** * Callback raised when the loader creates a mesh after parsing the glTF properties of the mesh. * Note that the callback is called as soon as the mesh object is created, meaning some data may not have been setup yet for this mesh (vertex data, morph targets, material, ...) */ set onMeshLoaded(e) { this._onMeshLoadedObserver && this.onMeshLoadedObservable.remove(this._onMeshLoadedObserver), this._onMeshLoadedObserver = this.onMeshLoadedObservable.add(e); } /** * Callback raised when the loader creates a texture after parsing the glTF properties of the texture. */ set onTextureLoaded(e) { this._onTextureLoadedObserver && this.onTextureLoadedObservable.remove(this._onTextureLoadedObserver), this._onTextureLoadedObserver = this.onTextureLoadedObservable.add(e); } /** * Callback raised when the loader creates a material after parsing the glTF properties of the material. */ set onMaterialLoaded(e) { this._onMaterialLoadedObserver && this.onMaterialLoadedObservable.remove(this._onMaterialLoadedObserver), this._onMaterialLoadedObserver = this.onMaterialLoadedObservable.add(e); } /** * Callback raised when the loader creates a camera after parsing the glTF properties of the camera. */ set onCameraLoaded(e) { this._onCameraLoadedObserver && this.onCameraLoadedObservable.remove(this._onCameraLoadedObserver), this._onCameraLoadedObserver = this.onCameraLoadedObservable.add(e); } /** * Callback raised when the asset is completely loaded, immediately before the loader is disposed. * For assets with LODs, raised when all of the LODs are complete. * For assets without LODs, raised when the model is complete, immediately after the loader resolves the returned promise. */ set onComplete(e) { this._onCompleteObserver && this.onCompleteObservable.remove(this._onCompleteObserver), this._onCompleteObserver = this.onCompleteObservable.add(e); } /** * Callback raised when an error occurs. */ set onError(e) { this._onErrorObserver && this.onErrorObservable.remove(this._onErrorObserver), this._onErrorObserver = this.onErrorObservable.add(e); } /** * Callback raised after the loader is disposed. */ set onDispose(e) { this._onDisposeObserver && this.onDisposeObservable.remove(this._onDisposeObserver), this._onDisposeObserver = this.onDisposeObservable.add(e); } /** * Callback raised after a loader extension is created. */ set onExtensionLoaded(e) { this._onExtensionLoadedObserver && this.onExtensionLoadedObservable.remove(this._onExtensionLoadedObserver), this._onExtensionLoadedObserver = this.onExtensionLoadedObservable.add(e); } /** * Defines if the loader logging is enabled. */ get loggingEnabled() { return this._loggingEnabled; } set loggingEnabled(e) { this._loggingEnabled !== e && (this._loggingEnabled = e, this._loggingEnabled ? this._log = this._logEnabled : this._log = this._logDisabled); } /** * Defines if the loader should capture performance counters. */ get capturePerformanceCounters() { return this._capturePerformanceCounters; } set capturePerformanceCounters(e) { this._capturePerformanceCounters !== e && (this._capturePerformanceCounters = e, this._capturePerformanceCounters ? (this._startPerformanceCounter = this._startPerformanceCounterEnabled, this._endPerformanceCounter = this._endPerformanceCounterEnabled) : (this._startPerformanceCounter = this._startPerformanceCounterDisabled, this._endPerformanceCounter = this._endPerformanceCounterDisabled)); } /** * Callback raised after a loader extension is created. */ set onValidated(e) { this._onValidatedObserver && this.onValidatedObservable.remove(this._onValidatedObserver), this._onValidatedObserver = this.onValidatedObservable.add(e); } /** * Disposes the loader, releases resources during load, and cancels any outstanding requests. */ dispose() { this._loader && (this._loader.dispose(), this._loader = null); for (const e of this._requests) e.abort(); this._requests.length = 0, delete this._progressCallback, this.preprocessUrlAsync = (e) => Promise.resolve(e), this.onMeshLoadedObservable.clear(), this.onSkinLoadedObservable.clear(), this.onTextureLoadedObservable.clear(), this.onMaterialLoadedObservable.clear(), this.onCameraLoadedObservable.clear(), this.onCompleteObservable.clear(), this.onExtensionLoadedObservable.clear(), this.onDisposeObservable.notifyObservers(void 0), this.onDisposeObservable.clear(); } /** * @internal */ loadFile(e, t, i, r, s, n, a, l) { if (ArrayBuffer.isView(t)) return this._loadBinary(e, t, i, r, a, l), null; this._progressCallback = s; const o = t.name || Ve.GetFilename(t); if (n) { if (this.useRangeRequests) { this.validate && Ce.Warn("glTF validation is not supported when range requests are enabled"); const u = { abort: () => { }, onCompleteObservable: new Fe() }, h = { readAsync: (d, f) => new Promise((p, m) => { this._loadFile(e, t, (_) => { p(new Uint8Array(_)); }, !0, (_) => { m(_); }, (_) => { _.setRequestHeader("Range", `bytes=${d}-${d + f - 1}`); }); }), byteLength: 0 }; return this._unpackBinaryAsync(new O9(h)).then((d) => { u.onCompleteObservable.notifyObservers(u), r(d); }, a ? (d) => a(void 0, d) : void 0), u; } return this._loadFile(e, t, (u) => { this._validate(e, new Uint8Array(u, 0, u.byteLength), i, o), this._unpackBinaryAsync(new O9({ readAsync: (h, d) => Cq(u, h, d), byteLength: u.byteLength })).then((h) => { r(h); }, a ? (h) => a(void 0, h) : void 0); }, !0, a); } else return this._loadFile(e, t, (u) => { this._validate(e, u, i, o), r({ json: this._parseJson(u) }); }, !1, a); } _loadBinary(e, t, i, r, s, n) { this._validate(e, new Uint8Array(t.buffer, t.byteOffset, t.byteLength), i, n), this._unpackBinaryAsync(new O9({ readAsync: (a, l) => sAe(t, a, l), byteLength: t.byteLength })).then((a) => { r(a); }, s ? (a) => s(void 0, a) : void 0); } /** * @internal */ importMeshAsync(e, t, i, r, s, n) { return Promise.resolve().then(() => (this.onParsedObservable.notifyObservers(i), this.onParsedObservable.clear(), this._log(`Loading ${n || ""}`), this._loader = this._getLoader(i), this._loader.importMeshAsync(e, t, null, i, r, s, n))); } /** * @internal */ loadAsync(e, t, i, r, s) { return Promise.resolve().then(() => (this.onParsedObservable.notifyObservers(t), this.onParsedObservable.clear(), this._log(`Loading ${s || ""}`), this._loader = this._getLoader(t), this._loader.loadAsync(e, t, i, r, s))); } /** * @internal */ loadAssetContainerAsync(e, t, i, r, s) { return Promise.resolve().then(() => { this.onParsedObservable.notifyObservers(t), this.onParsedObservable.clear(), this._log(`Loading ${s || ""}`), this._loader = this._getLoader(t); const n = new NL(e), a = []; this.onMaterialLoadedObservable.add((h) => { a.push(h); }); const l = []; this.onTextureLoadedObservable.add((h) => { l.push(h); }); const o = []; this.onCameraLoadedObservable.add((h) => { o.push(h); }); const u = []; return this.onMeshLoadedObservable.add((h) => { h.morphTargetManager && u.push(h.morphTargetManager); }), this._loader.importMeshAsync(null, e, n, t, i, r, s).then((h) => (Array.prototype.push.apply(n.geometries, h.geometries), Array.prototype.push.apply(n.meshes, h.meshes), Array.prototype.push.apply(n.particleSystems, h.particleSystems), Array.prototype.push.apply(n.skeletons, h.skeletons), Array.prototype.push.apply(n.animationGroups, h.animationGroups), Array.prototype.push.apply(n.materials, a), Array.prototype.push.apply(n.textures, l), Array.prototype.push.apply(n.lights, h.lights), Array.prototype.push.apply(n.transformNodes, h.transformNodes), Array.prototype.push.apply(n.cameras, o), Array.prototype.push.apply(n.morphTargetManagers, u), n)); }); } /** * @internal */ canDirectLoad(e) { return e.indexOf("asset") !== -1 && e.indexOf("version") !== -1 || e.startsWith("data:base64," + yl._MagicBase64Encoded) || // this is technically incorrect, but will continue to support for backcompat. e.startsWith("data:;base64," + yl._MagicBase64Encoded) || e.startsWith("data:application/octet-stream;base64," + yl._MagicBase64Encoded) || e.startsWith("data:model/gltf-binary;base64," + yl._MagicBase64Encoded); } /** * @internal */ directLoad(e, t) { if (t.startsWith("base64," + yl._MagicBase64Encoded) || // this is technically incorrect, but will continue to support for backcompat. t.startsWith(";base64," + yl._MagicBase64Encoded) || t.startsWith("application/octet-stream;base64," + yl._MagicBase64Encoded) || t.startsWith("model/gltf-binary;base64," + yl._MagicBase64Encoded)) { const i = pw(t); return this._validate(e, new Uint8Array(i, 0, i.byteLength)), this._unpackBinaryAsync(new O9({ readAsync: (r, s) => Cq(i, r, s), byteLength: i.byteLength })); } return this._validate(e, t), Promise.resolve({ json: this._parseJson(t) }); } /** @internal */ createPlugin() { return new yl(); } /** * The loader state or null if the loader is not active. */ get loaderState() { return this._state; } /** * Returns a promise that resolves when the asset is completely loaded. * @returns a promise that resolves when the asset is completely loaded. */ whenCompleteAsync() { return new Promise((e, t) => { this.onCompleteObservable.addOnce(() => { e(); }), this.onErrorObservable.addOnce((i) => { t(i); }); }); } /** * @internal */ _setState(e) { this._state !== e && (this._state = e, this.onLoaderStateChangedObservable.notifyObservers(this._state), this._log(dm[this._state])); } /** * @internal */ _loadFile(e, t, i, r, s, n) { const a = e._loadFile(t, i, (l) => { this._onProgress(l, a); }, !0, r, s, n); return a.onCompleteObservable.add((l) => { this._requests.splice(this._requests.indexOf(l), 1); }), this._requests.push(a), a; } _onProgress(e, t) { if (!this._progressCallback) return; t._lengthComputable = e.lengthComputable, t._loaded = e.loaded, t._total = e.total; let i = !0, r = 0, s = 0; for (const n of this._requests) { if (n._lengthComputable === void 0 || n._loaded === void 0 || n._total === void 0) return; i = i && n._lengthComputable, r += n._loaded, s += n._total; } this._progressCallback({ lengthComputable: i, loaded: r, total: i ? s : 0 }); } _validate(e, t, i = "", r = "") { this.validate && (this._startPerformanceCounter("Validate JSON"), toe.ValidateAsync(t, i, r, (s) => this.preprocessUrlAsync(i + s).then((n) => e._loadFileAsync(n, void 0, !0, !0).then((a) => new Uint8Array(a, 0, a.byteLength)))).then((s) => { this._endPerformanceCounter("Validate JSON"), this.onValidatedObservable.notifyObservers(s), this.onValidatedObservable.clear(); }, (s) => { this._endPerformanceCounter("Validate JSON"), Ve.Warn(`Failed to validate: ${s.message}`), this.onValidatedObservable.clear(); })); } _getLoader(e) { const t = e.json.asset || {}; this._log(`Asset version: ${t.version}`), t.minVersion && this._log(`Asset minimum version: ${t.minVersion}`), t.generator && this._log(`Asset generator: ${t.generator}`); const i = yl._parseVersion(t.version); if (!i) throw new Error("Invalid version: " + t.version); if (t.minVersion !== void 0) { const n = yl._parseVersion(t.minVersion); if (!n) throw new Error("Invalid minimum version: " + t.minVersion); if (yl._compareVersion(n, { major: 2, minor: 0 }) > 0) throw new Error("Incompatible minimum version: " + t.minVersion); } const s = { 1: yl._CreateGLTF1Loader, 2: yl._CreateGLTF2Loader }[i.major]; if (!s) throw new Error("Unsupported version: " + t.version); return s(this); } _parseJson(e) { this._startPerformanceCounter("Parse JSON"), this._log(`JSON length: ${e.length}`); const t = JSON.parse(e); return this._endPerformanceCounter("Parse JSON"), t; } _unpackBinaryAsync(e) { return this._startPerformanceCounter("Unpack Binary"), e.loadAsync(20).then(() => { const t = { Magic: 1179937895 }, i = e.readUint32(); if (i !== t.Magic) throw new F4("Unexpected magic: " + i, $C.GLTFLoaderUnexpectedMagicError); const r = e.readUint32(); this.loggingEnabled && this._log(`Binary version: ${r}`); const s = e.readUint32(); !this.useRangeRequests && s !== e.buffer.byteLength && Ce.Warn(`Length in header does not match actual data length: ${s} != ${e.buffer.byteLength}`); let n; switch (r) { case 1: { n = this._unpackBinaryV1Async(e, s); break; } case 2: { n = this._unpackBinaryV2Async(e, s); break; } default: throw new Error("Unsupported version: " + r); } return this._endPerformanceCounter("Unpack Binary"), n; }); } _unpackBinaryV1Async(e, t) { const i = { JSON: 0 }, r = e.readUint32(), s = e.readUint32(); if (s !== i.JSON) throw new Error(`Unexpected content format: ${s}`); const n = t - e.byteOffset, a = { json: this._parseJson(e.readString(r)), bin: null }; if (n !== 0) { const l = e.byteOffset; a.bin = { readAsync: (o, u) => e.buffer.readAsync(l + o, u), byteLength: n }; } return Promise.resolve(a); } _unpackBinaryV2Async(e, t) { const i = { JSON: 1313821514, BIN: 5130562 }, r = e.readUint32(); if (e.readUint32() !== i.JSON) throw new Error("First chunk format is not JSON"); return e.byteOffset + r === t ? e.loadAsync(r).then(() => ({ json: this._parseJson(e.readString(r)), bin: null })) : e.loadAsync(r + 8).then(() => { const n = { json: this._parseJson(e.readString(r)), bin: null }, a = () => { const l = e.readUint32(); switch (e.readUint32()) { case i.JSON: throw new Error("Unexpected JSON chunk"); case i.BIN: { const u = e.byteOffset; n.bin = { readAsync: (h, d) => e.buffer.readAsync(u + h, d), byteLength: l }, e.skipBytes(l); break; } default: { e.skipBytes(l); break; } } return e.byteOffset !== t ? e.loadAsync(8).then(a) : Promise.resolve(n); }; return a(); }); } static _parseVersion(e) { if (e === "1.0" || e === "1.0.1") return { major: 1, minor: 0 }; const t = (e + "").match(/^(\d+)\.(\d+)/); return t ? { major: parseInt(t[1]), minor: parseInt(t[2]) } : null; } static _compareVersion(e, t) { return e.major > t.major ? 1 : e.major < t.major ? -1 : e.minor > t.minor ? 1 : e.minor < t.minor ? -1 : 0; } /** * @internal */ _logOpen(e) { this._log(e), this._logIndentLevel++; } /** @internal */ _logClose() { --this._logIndentLevel; } _logEnabled(e) { const t = yl._logSpaces.substr(0, this._logIndentLevel * 2); Ce.Log(`${t}${e}`); } _logDisabled(e) { } _startPerformanceCounterEnabled(e) { Ve.StartPerformanceCounter(e); } _startPerformanceCounterDisabled(e) { } _endPerformanceCounterEnabled(e) { Ve.EndPerformanceCounter(e); } _endPerformanceCounterDisabled(e) { } } yl.IncrementalLoading = !0; yl.HomogeneousCoordinates = !1; yl._MagicBase64Encoded = "Z2xURg"; yl._logSpaces = " "; fr && fr.RegisterPlugin(new yl()); var i5; (function(c) { c[c.BYTE = 5120] = "BYTE", c[c.UNSIGNED_BYTE = 5121] = "UNSIGNED_BYTE", c[c.SHORT = 5122] = "SHORT", c[c.UNSIGNED_SHORT = 5123] = "UNSIGNED_SHORT", c[c.FLOAT = 5126] = "FLOAT"; })(i5 || (i5 = {})); var qH; (function(c) { c[c.FRAGMENT = 35632] = "FRAGMENT", c[c.VERTEX = 35633] = "VERTEX"; })(qH || (qH = {})); var z_; (function(c) { c[c.BYTE = 5120] = "BYTE", c[c.UNSIGNED_BYTE = 5121] = "UNSIGNED_BYTE", c[c.SHORT = 5122] = "SHORT", c[c.UNSIGNED_SHORT = 5123] = "UNSIGNED_SHORT", c[c.INT = 5124] = "INT", c[c.UNSIGNED_INT = 5125] = "UNSIGNED_INT", c[c.FLOAT = 5126] = "FLOAT", c[c.FLOAT_VEC2 = 35664] = "FLOAT_VEC2", c[c.FLOAT_VEC3 = 35665] = "FLOAT_VEC3", c[c.FLOAT_VEC4 = 35666] = "FLOAT_VEC4", c[c.INT_VEC2 = 35667] = "INT_VEC2", c[c.INT_VEC3 = 35668] = "INT_VEC3", c[c.INT_VEC4 = 35669] = "INT_VEC4", c[c.BOOL = 35670] = "BOOL", c[c.BOOL_VEC2 = 35671] = "BOOL_VEC2", c[c.BOOL_VEC3 = 35672] = "BOOL_VEC3", c[c.BOOL_VEC4 = 35673] = "BOOL_VEC4", c[c.FLOAT_MAT2 = 35674] = "FLOAT_MAT2", c[c.FLOAT_MAT3 = 35675] = "FLOAT_MAT3", c[c.FLOAT_MAT4 = 35676] = "FLOAT_MAT4", c[c.SAMPLER_2D = 35678] = "SAMPLER_2D"; })(z_ || (z_ = {})); var Y9; (function(c) { c[c.CLAMP_TO_EDGE = 33071] = "CLAMP_TO_EDGE", c[c.MIRRORED_REPEAT = 33648] = "MIRRORED_REPEAT", c[c.REPEAT = 10497] = "REPEAT"; })(Y9 || (Y9 = {})); var QA; (function(c) { c[c.NEAREST = 9728] = "NEAREST", c[c.LINEAR = 9728] = "LINEAR", c[c.NEAREST_MIPMAP_NEAREST = 9984] = "NEAREST_MIPMAP_NEAREST", c[c.LINEAR_MIPMAP_NEAREST = 9985] = "LINEAR_MIPMAP_NEAREST", c[c.NEAREST_MIPMAP_LINEAR = 9986] = "NEAREST_MIPMAP_LINEAR", c[c.LINEAR_MIPMAP_LINEAR = 9987] = "LINEAR_MIPMAP_LINEAR"; })(QA || (QA = {})); var xq; (function(c) { c[c.ALPHA = 6406] = "ALPHA", c[c.RGB = 6407] = "RGB", c[c.RGBA = 6408] = "RGBA", c[c.LUMINANCE = 6409] = "LUMINANCE", c[c.LUMINANCE_ALPHA = 6410] = "LUMINANCE_ALPHA"; })(xq || (xq = {})); var JH; (function(c) { c[c.FRONT = 1028] = "FRONT", c[c.BACK = 1029] = "BACK", c[c.FRONT_AND_BACK = 1032] = "FRONT_AND_BACK"; })(JH || (JH = {})); var gc; (function(c) { c[c.ZERO = 0] = "ZERO", c[c.ONE = 1] = "ONE", c[c.SRC_COLOR = 768] = "SRC_COLOR", c[c.ONE_MINUS_SRC_COLOR = 769] = "ONE_MINUS_SRC_COLOR", c[c.DST_COLOR = 774] = "DST_COLOR", c[c.ONE_MINUS_DST_COLOR = 775] = "ONE_MINUS_DST_COLOR", c[c.SRC_ALPHA = 770] = "SRC_ALPHA", c[c.ONE_MINUS_SRC_ALPHA = 771] = "ONE_MINUS_SRC_ALPHA", c[c.DST_ALPHA = 772] = "DST_ALPHA", c[c.ONE_MINUS_DST_ALPHA = 773] = "ONE_MINUS_DST_ALPHA", c[c.CONSTANT_COLOR = 32769] = "CONSTANT_COLOR", c[c.ONE_MINUS_CONSTANT_COLOR = 32770] = "ONE_MINUS_CONSTANT_COLOR", c[c.CONSTANT_ALPHA = 32771] = "CONSTANT_ALPHA", c[c.ONE_MINUS_CONSTANT_ALPHA = 32772] = "ONE_MINUS_CONSTANT_ALPHA", c[c.SRC_ALPHA_SATURATE = 776] = "SRC_ALPHA_SATURATE"; })(gc || (gc = {})); class Eh { /** * Sets the given "parameter" matrix * @param scene the Scene object * @param source the source node where to pick the matrix * @param parameter the GLTF technique parameter * @param uniformName the name of the shader's uniform * @param shaderMaterial the shader material */ static SetMatrix(e, t, i, r, s) { let n = null; if (i.semantic === "MODEL" ? n = t.getWorldMatrix() : i.semantic === "PROJECTION" ? n = e.getProjectionMatrix() : i.semantic === "VIEW" ? n = e.getViewMatrix() : i.semantic === "MODELVIEWINVERSETRANSPOSE" ? n = Ae.Transpose(t.getWorldMatrix().multiply(e.getViewMatrix()).invert()) : i.semantic === "MODELVIEW" ? n = t.getWorldMatrix().multiply(e.getViewMatrix()) : i.semantic === "MODELVIEWPROJECTION" ? n = t.getWorldMatrix().multiply(e.getTransformMatrix()) : i.semantic === "MODELINVERSE" ? n = t.getWorldMatrix().invert() : i.semantic === "VIEWINVERSE" ? n = e.getViewMatrix().invert() : i.semantic === "PROJECTIONINVERSE" ? n = e.getProjectionMatrix().invert() : i.semantic === "MODELVIEWINVERSE" ? n = t.getWorldMatrix().multiply(e.getViewMatrix()).invert() : i.semantic === "MODELVIEWPROJECTIONINVERSE" ? n = t.getWorldMatrix().multiply(e.getTransformMatrix()).invert() : i.semantic === "MODELINVERSETRANSPOSE" && (n = Ae.Transpose(t.getWorldMatrix().invert())), n) switch (i.type) { case z_.FLOAT_MAT2: s.setMatrix2x2(r, Ae.GetAsMatrix2x2(n)); break; case z_.FLOAT_MAT3: s.setMatrix3x3(r, Ae.GetAsMatrix3x3(n)); break; case z_.FLOAT_MAT4: s.setMatrix(r, n); break; } } /** * Sets the given "parameter" matrix * @param shaderMaterial the shader material * @param uniform the name of the shader's uniform * @param value the value of the uniform * @param type the uniform's type (EParameterType FLOAT, VEC2, VEC3 or VEC4) */ static SetUniform(e, t, i, r) { switch (r) { case z_.FLOAT: return e.setFloat(t, i), !0; case z_.FLOAT_VEC2: return e.setVector2(t, at.FromArray(i)), !0; case z_.FLOAT_VEC3: return e.setVector3(t, D.FromArray(i)), !0; case z_.FLOAT_VEC4: return e.setVector4(t, Di.FromArray(i)), !0; default: return !1; } } /** * Returns the wrap mode of the texture * @param mode the mode value */ static GetWrapMode(e) { switch (e) { case Y9.CLAMP_TO_EDGE: return De.CLAMP_ADDRESSMODE; case Y9.MIRRORED_REPEAT: return De.MIRROR_ADDRESSMODE; case Y9.REPEAT: return De.WRAP_ADDRESSMODE; default: return De.WRAP_ADDRESSMODE; } } /** * Returns the byte stride giving an accessor * @param accessor the GLTF accessor objet */ static GetByteStrideFromType(e) { switch (e.type) { case "VEC2": return 2; case "VEC3": return 3; case "VEC4": return 4; case "MAT2": return 4; case "MAT3": return 9; case "MAT4": return 16; default: return 1; } } /** * Returns the texture filter mode giving a mode value * @param mode the filter mode value * @returns the filter mode (TODO - needs to be a type?) */ static GetTextureFilterMode(e) { switch (e) { case QA.LINEAR: case QA.LINEAR_MIPMAP_NEAREST: case QA.LINEAR_MIPMAP_LINEAR: return De.TRILINEAR_SAMPLINGMODE; case QA.NEAREST: case QA.NEAREST_MIPMAP_NEAREST: return De.NEAREST_SAMPLINGMODE; default: return De.BILINEAR_SAMPLINGMODE; } } static GetBufferFromBufferView(e, t, i, r, s) { i = t.byteOffset + i; const n = e.loadedBufferViews[t.buffer]; if (i + r > n.byteLength) throw new Error("Buffer access is out of range"); const a = n.buffer; switch (i += n.byteOffset, s) { case i5.BYTE: return new Int8Array(a, i, r); case i5.UNSIGNED_BYTE: return new Uint8Array(a, i, r); case i5.SHORT: return new Int16Array(a, i, r); case i5.UNSIGNED_SHORT: return new Uint16Array(a, i, r); default: return new Float32Array(a, i, r); } } /** * Returns a buffer from its accessor * @param gltfRuntime the GLTF runtime * @param accessor the GLTF accessor */ static GetBufferFromAccessor(e, t) { const i = e.bufferViews[t.bufferView], r = t.count * Eh.GetByteStrideFromType(t); return Eh.GetBufferFromBufferView(e, i, t.byteOffset, r, t.componentType); } /** * Decodes a buffer view into a string * @param view the buffer view */ static DecodeBufferToText(e) { let t = ""; const i = e.byteLength; for (let r = 0; r < i; ++r) t += String.fromCharCode(e[r]); return t; } /** * Returns the default material of gltf. Related to * https://github.com/KhronosGroup/glTF/tree/master/specification/1.0#appendix-a-default-material * @param scene the Babylon.js scene */ static GetDefaultMaterial(e) { if (!Eh._DefaultMaterial) { Cr.ShadersStore.GLTFDefaultMaterialVertexShader = [ "precision highp float;", "", "uniform mat4 worldView;", "uniform mat4 projection;", "", "attribute vec3 position;", "", "void main(void)", "{", " gl_Position = projection * worldView * vec4(position, 1.0);", "}" ].join(` `), Cr.ShadersStore.GLTFDefaultMaterialPixelShader = [ "precision highp float;", "", "uniform vec4 u_emission;", "", "void main(void)", "{", " gl_FragColor = u_emission;", "}" ].join(` `); const t = { vertex: "GLTFDefaultMaterial", fragment: "GLTFDefaultMaterial" }, i = { attributes: ["position"], uniforms: ["worldView", "projection", "u_emission"], samplers: new Array(), needAlphaBlending: !1 }; Eh._DefaultMaterial = new Lo("GLTFDefaultMaterial", e, t, i), Eh._DefaultMaterial.setColor4("u_emission", new Et(0.5, 0.5, 0.5, 1)); } return Eh._DefaultMaterial; } } Eh._DefaultMaterial = null; var r5; (function(c) { c[c.IDENTIFIER = 1] = "IDENTIFIER", c[c.UNKNOWN = 2] = "UNKNOWN", c[c.END_OF_INPUT = 3] = "END_OF_INPUT"; })(r5 || (r5 = {})); class bq { constructor(e) { this._pos = 0, this.currentToken = r5.UNKNOWN, this.currentIdentifier = "", this.currentString = "", this.isLetterOrDigitPattern = /^[a-zA-Z0-9]+$/, this._toParse = e, this._maxPos = e.length; } getNextToken() { if (this.isEnd()) return r5.END_OF_INPUT; if (this.currentString = this.read(), this.currentToken = r5.UNKNOWN, this.currentString === "_" || this.isLetterOrDigitPattern.test(this.currentString)) for (this.currentToken = r5.IDENTIFIER, this.currentIdentifier = this.currentString; !this.isEnd() && (this.isLetterOrDigitPattern.test(this.currentString = this.peek()) || this.currentString === "_"); ) this.currentIdentifier += this.currentString, this.forward(); return this.currentToken; } peek() { return this._toParse[this._pos]; } read() { return this._toParse[this._pos++]; } forward() { this._pos++; } isEnd() { return this._pos >= this._maxPos; } } const ioe = ["MODEL", "VIEW", "PROJECTION", "MODELVIEW", "MODELVIEWPROJECTION", "JOINTMATRIX"], roe = ["world", "view", "projection", "worldView", "worldViewProjection", "mBones"], nAe = ["translation", "rotation", "scale"], aAe = ["position", "rotationQuaternion", "scaling"], oAe = (c, e) => { for (const t in c) { const i = c[t]; e.buffers[t] = i, e.buffersCount++; } }, lAe = (c, e) => { for (const t in c) { const i = c[t]; e.shaders[t] = i, e.shaderscount++; } }, M1 = (c, e, t) => { for (const i in c) { const r = c[i]; t[e][i] = r; } }, cAe = (c) => { if (c) for (let e = 0; e < c.length / 2; e++) c[e * 2 + 1] = 1 - c[e * 2 + 1]; }, Eq = (c) => { if (c.semantic === "NORMAL") return "normal"; if (c.semantic === "POSITION") return "position"; if (c.semantic === "JOINT") return "matricesIndices"; if (c.semantic === "WEIGHT") return "matricesWeights"; if (c.semantic === "COLOR") return "color"; if (c.semantic && c.semantic.indexOf("TEXCOORD_") !== -1) { const e = Number(c.semantic.split("_")[1]); return "uv" + (e === 0 ? "" : e + 1); } return null; }, uAe = (c) => { for (const e in c.animations) { const t = c.animations[e]; if (!t.channels || !t.samplers) continue; let i = null; for (let r = 0; r < t.channels.length; r++) { const s = t.channels[r], n = t.samplers[s.sampler]; if (!n) continue; let a = null, l = null; t.parameters ? (a = t.parameters[n.input], l = t.parameters[n.output]) : (a = n.input, l = n.output); const o = Eh.GetBufferFromAccessor(c, c.accessors[a]), u = Eh.GetBufferFromAccessor(c, c.accessors[l]), h = s.target.id; let d = c.scene.getNodeById(h); if (d === null && (d = c.scene.getNodeByName(h)), d === null) { Ve.Warn("Creating animation named " + e + ". But cannot find node named " + h + " to attach to"); continue; } const f = d instanceof ha; let p = s.target.path; const m = nAe.indexOf(p); m !== -1 && (p = aAe[m]); let _ = nt.ANIMATIONTYPE_MATRIX; f || (p === "rotationQuaternion" ? (_ = nt.ANIMATIONTYPE_QUATERNION, d.rotationQuaternion = new Ze()) : _ = nt.ANIMATIONTYPE_VECTOR3); let v = null; const C = []; let x = 0, b = !1; f && i && i.getKeys().length === o.length && (v = i, b = !0), b || (c.scene._blockEntityCollection = !!c.assetContainer, v = new nt(e, f ? "_matrix" : p, 1, _, nt.ANIMATIONLOOPMODE_CYCLE), c.scene._blockEntityCollection = !1); for (let S = 0; S < o.length; S++) { let M = null; if (p === "rotationQuaternion" ? (M = Ze.FromArray([u[x], u[x + 1], u[x + 2], u[x + 3]]), x += 4) : (M = D.FromArray([u[x], u[x + 1], u[x + 2]]), x += 3), f) { const R = d; let w = D.Zero(), V = new Ze(), k = D.Zero(), L = R.getBaseMatrix(); b && i && (L = i.getKeys()[S].value), L.decompose(k, V, w), p === "position" ? w = M : p === "rotationQuaternion" ? V = M : k = M, M = Ae.Compose(k, V, w); } b ? i && (i.getKeys()[S].value = M) : C.push({ frame: o[S], value: M }); } !b && v && (v.setKeys(C), d.animations.push(v)), i = v, c.scene.stopAnimation(d), c.scene.beginAnimation(d, 0, o[o.length - 1], !0, 1); } } }, Aj = (c) => { let e = null; if (c.translation || c.rotation || c.scale) { const t = D.FromArray(c.scale || [1, 1, 1]), i = Ze.FromArray(c.rotation || [0, 0, 0, 1]), r = D.FromArray(c.translation || [0, 0, 0]); e = Ae.Compose(t, i, r); } else e = Ae.FromArray(c.matrix); return e; }, soe = (c, e, t, i) => { for (let s = 0; s < i.bones.length; s++) if (i.bones[s].name === t) return i.bones[s]; const r = c.nodes; for (const s in r) { const n = r[s]; if (!n.jointName) continue; const a = n.children; for (let l = 0; l < a.length; l++) { const o = c.nodes[a[l]]; if (o.jointName && o.jointName === t) { const u = Aj(n), h = new ha(n.name || "", i, soe(c, e, n.jointName, i), u); return h.id = s, h; } } } return null; }, hAe = (c, e) => { for (let t = 0; t < c.length; t++) { const i = c[t]; for (let r = 0; r < i.node.children.length; r++) if (i.node.children[r] === e) return i.bone; } return null; }, Jk = (c, e) => { const t = c.nodes; let i = t[e]; if (i) return { node: i, id: e }; for (const r in t) if (i = t[r], i.jointName === e) return { node: i, id: r }; return null; }, dAe = (c, e) => { for (let t = 0; t < c.jointNames.length; t++) if (c.jointNames[t] === e) return !0; return !1; }, fAe = (c, e, t, i) => { for (const r in c.nodes) { const s = c.nodes[r], n = r; if (!s.jointName || dAe(t, s.jointName)) continue; const a = Aj(s), l = new ha(s.name || "", e, null, a); l.id = n, i.push({ bone: l, node: s, id: n }); } for (let r = 0; r < i.length; r++) { const s = i[r], n = s.node.children; for (let a = 0; a < n.length; a++) { let l = null; for (let o = 0; o < i.length; o++) if (i[o].id === n[a]) { l = i[o]; break; } l && (l.bone._parent = s.bone, s.bone.children.push(l.bone)); } } }, pAe = (c, e, t, i) => { if (i || (i = new sx(e.name || "", "", c.scene)), !e.babylonSkeleton) return i; const r = [], s = []; fAe(c, i, e, r), i.bones = []; for (let a = 0; a < e.jointNames.length; a++) { const l = Jk(c, e.jointNames[a]); if (!l) continue; const o = l.node; if (!o) { Ve.Warn("Joint named " + e.jointNames[a] + " does not exist"); continue; } const u = l.id, h = c.scene.getBoneById(u); if (h) { i.bones.push(h); continue; } let d = !1, f = null; for (let _ = 0; _ < a; _++) { const v = Jk(c, e.jointNames[_]); if (!v) continue; const C = v.node; if (!C) { Ve.Warn("Joint named " + e.jointNames[_] + " does not exist when looking for parent"); continue; } const x = C.children; if (x) { d = !1; for (let b = 0; b < x.length; b++) if (x[b] === u) { f = soe(c, e, e.jointNames[_], i), d = !0; break; } if (d) break; } } const p = Aj(o); !f && r.length > 0 && (f = hAe(r, u), f && s.indexOf(f) === -1 && s.push(f)); const m = new ha(o.jointName || "", i, f, p); m.id = u; } const n = i.bones; i.bones = []; for (let a = 0; a < e.jointNames.length; a++) { const l = Jk(c, e.jointNames[a]); if (l) { for (let o = 0; o < n.length; o++) if (n[o].id === l.id) { i.bones.push(n[o]); break; } } } i.prepare(); for (let a = 0; a < s.length; a++) i.bones.push(s[a]); return i; }, Tq = (c, e, t, i, r) => { if (r || (c.scene._blockEntityCollection = !!c.assetContainer, r = new ke(e.name || "", c.scene), r._parentContainer = c.assetContainer, c.scene._blockEntityCollection = !1, r.id = i), !e.babylonNode) return r; const s = []; let n = null; const a = [], l = [], o = [], u = []; for (let f = 0; f < t.length; f++) { const p = t[f], m = c.meshes[p]; if (m) for (let _ = 0; _ < m.primitives.length; _++) { const v = new Ot(), C = m.primitives[_]; C.mode; const x = C.attributes; let b = null, S = null; for (const R in x) if (b = c.accessors[x[R]], S = Eh.GetBufferFromAccessor(c, b), R === "NORMAL") v.normals = new Float32Array(S.length), v.normals.set(S); else if (R === "POSITION") { if (yl.HomogeneousCoordinates) { v.positions = new Float32Array(S.length - S.length / 4); for (let w = 0; w < S.length; w += 4) v.positions[w] = S[w], v.positions[w + 1] = S[w + 1], v.positions[w + 2] = S[w + 2]; } else v.positions = new Float32Array(S.length), v.positions.set(S); l.push(v.positions.length); } else if (R.indexOf("TEXCOORD_") !== -1) { const w = Number(R.split("_")[1]), V = Y.UVKind + (w === 0 ? "" : w + 1), k = new Float32Array(S.length); k.set(S), cAe(k), v.set(k, V); } else R === "JOINT" ? (v.matricesIndices = new Float32Array(S.length), v.matricesIndices.set(S)) : R === "WEIGHT" ? (v.matricesWeights = new Float32Array(S.length), v.matricesWeights.set(S)) : R === "COLOR" && (v.colors = new Float32Array(S.length), v.colors.set(S)); if (b = c.accessors[C.indices], b) S = Eh.GetBufferFromAccessor(c, b), v.indices = new Int32Array(S.length), v.indices.set(S), u.push(v.indices.length); else { const R = []; for (let w = 0; w < v.positions.length / 3; w++) R.push(w); v.indices = new Int32Array(R), u.push(v.indices.length); } n ? n.merge(v) : n = v; const M = c.scene.getMaterialById(C.material); s.push(M === null ? Eh.GetDefaultMaterial(c.scene) : M), a.push(a.length === 0 ? 0 : a[a.length - 1] + l[l.length - 2]), o.push(o.length === 0 ? 0 : o[o.length - 1] + u[u.length - 2]); } } let h; c.scene._blockEntityCollection = !!c.assetContainer, s.length > 1 ? (h = new xm("multimat" + i, c.scene), h.subMaterials = s) : h = new Dt("multimat" + i, c.scene), s.length === 1 && (h = s[0]), h._parentContainer = c.assetContainer, r.material || (r.material = h), new yc(i, c.scene, n, !1, r), r.computeWorldMatrix(!0), c.scene._blockEntityCollection = !1, r.subMeshes = []; let d = 0; for (let f = 0; f < t.length; f++) { const p = t[f], m = c.meshes[p]; if (m) for (let _ = 0; _ < m.primitives.length; _++) m.primitives[_].mode, ed.AddToMesh(d, a[d], l[d], o[d], u[d], r, r, !0), d++; } return r; }, eG = (c, e, t, i) => { c.position && (c.position = e), (c.rotationQuaternion || c.rotation) && (c.rotationQuaternion = t), c.scaling && (c.scaling = i); }, _Ae = (c, e) => { if (e.matrix) { const t = new D(0, 0, 0), i = new Ze(), r = new D(0, 0, 0); Ae.FromArray(e.matrix).decompose(r, i, t), eG(c, t, i, r); } else e.translation && e.rotation && e.scale && eG(c, D.FromArray(e.translation), Ze.FromArray(e.rotation), D.FromArray(e.scale)); c.computeWorldMatrix(!0); }, mAe = (c, e, t) => { let i = null; if (c.importOnlyMeshes && (e.skin || e.meshes) && c.importMeshesNames && c.importMeshesNames.length > 0 && c.importMeshesNames.indexOf(e.name || "") === -1) return null; if (e.skin) { if (e.meshes) { const r = c.skins[e.skin], s = Tq(c, e, e.meshes, t, e.babylonNode); s.skeleton = c.scene.getLastSkeletonById(e.skin), s.skeleton === null && (s.skeleton = pAe(c, r, s, r.babylonSkeleton), r.babylonSkeleton || (r.babylonSkeleton = s.skeleton)), i = s; } } else if (e.meshes) i = Tq(c, e, e.mesh ? [e.mesh] : e.meshes, t, e.babylonNode); else if (e.light && !e.babylonNode && !c.importOnlyMeshes) { const r = c.lights[e.light]; if (r) { if (r.type === "ambient") { const s = r[r.type], n = new vg(e.light, D.Zero(), c.scene); n.name = e.name || "", s.color && (n.diffuse = ze.FromArray(s.color)), i = n; } else if (r.type === "directional") { const s = r[r.type], n = new Pd(e.light, D.Zero(), c.scene); n.name = e.name || "", s.color && (n.diffuse = ze.FromArray(s.color)), i = n; } else if (r.type === "point") { const s = r[r.type], n = new s6(e.light, D.Zero(), c.scene); n.name = e.name || "", s.color && (n.diffuse = ze.FromArray(s.color)), i = n; } else if (r.type === "spot") { const s = r[r.type], n = new td(e.light, D.Zero(), D.Zero(), 0, 0, c.scene); n.name = e.name || "", s.color && (n.diffuse = ze.FromArray(s.color)), s.fallOfAngle && (n.angle = s.fallOfAngle), s.fallOffExponent && (n.exponent = s.fallOffExponent), i = n; } } } else if (e.camera && !e.babylonNode && !c.importOnlyMeshes) { const r = c.cameras[e.camera]; if (r) { if (c.scene._blockEntityCollection = !!c.assetContainer, r.type === "orthographic") { const s = new du(e.camera, D.Zero(), c.scene, !1); s.name = e.name || "", s.mode = Ai.ORTHOGRAPHIC_CAMERA, s.attachControl(), i = s, s._parentContainer = c.assetContainer; } else if (r.type === "perspective") { const s = r[r.type], n = new du(e.camera, D.Zero(), c.scene, !1); n.name = e.name || "", n.attachControl(), s.aspectRatio || (s.aspectRatio = c.scene.getEngine().getRenderWidth() / c.scene.getEngine().getRenderHeight()), s.znear && s.zfar && (n.maxZ = s.zfar, n.minZ = s.znear), i = n, n._parentContainer = c.assetContainer; } c.scene._blockEntityCollection = !1; } } if (!e.jointName) { if (e.babylonNode) return e.babylonNode; if (i === null) { c.scene._blockEntityCollection = !!c.assetContainer; const r = new ke(e.name || "", c.scene); r._parentContainer = c.assetContainer, c.scene._blockEntityCollection = !1, e.babylonNode = r, i = r; } } if (i !== null) { if (e.matrix && i instanceof ke) _Ae(i, e); else { const r = e.translation || [0, 0, 0], s = e.rotation || [0, 0, 0, 1], n = e.scale || [1, 1, 1]; eG(i, D.FromArray(r), Ze.FromArray(s), D.FromArray(n)); } i.updateCache(!0), e.babylonNode = i; } return i; }, EL = (c, e, t, i = !1) => { const r = c.nodes[e]; let s = null; if (c.importOnlyMeshes && !i && c.importMeshesNames ? c.importMeshesNames.indexOf(r.name || "") !== -1 || c.importMeshesNames.length === 0 ? i = !0 : i = !1 : i = !0, !r.jointName && i && (s = mAe(c, r, e), s !== null && (s.id = e, s.parent = t)), r.children) for (let n = 0; n < r.children.length; n++) EL(c, r.children[n], s, i); }, Sq = (c) => { let e = c.currentScene; if (e) for (let t = 0; t < e.nodes.length; t++) EL(c, e.nodes[t], null); else for (const t in c.scenes) { e = c.scenes[t]; for (let i = 0; i < e.nodes.length; i++) EL(c, e.nodes[i], null); } uAe(c); for (let t = 0; t < c.scene.skeletons.length; t++) { const i = c.scene.skeletons[t]; c.scene.beginAnimation(i, 0, Number.MAX_VALUE, !0, 1); } }, gAe = (c, e, t, i, r, s, n) => { const a = s.values || r.parameters; for (const l in t) { const o = t[l], u = o.type; if (u === z_.FLOAT_MAT2 || u === z_.FLOAT_MAT3 || u === z_.FLOAT_MAT4) { if (o.semantic && !o.source && !o.node) Eh.SetMatrix(e.scene, c, o, l, i.getEffect()); else if (o.semantic && (o.source || o.node)) { let h = e.scene.getNodeByName(o.source || o.node || ""); if (h === null && (h = e.scene.getNodeById(o.source || o.node || "")), h === null) continue; Eh.SetMatrix(e.scene, h, o, l, i.getEffect()); } } else { const h = a[r.uniforms[l]]; if (!h) continue; if (u === z_.SAMPLER_2D) { const d = e.textures[s.values ? h : o.value].babylonTexture; if (d == null) continue; i.getEffect().setTexture(l, d); } else Eh.SetUniform(i.getEffect(), l, h, u); } } n(i); }, vAe = (c, e, t, i, r) => { const s = i.values || t.parameters, n = t.uniforms; for (const a in r) { const l = r[a], o = l.type; let u = s[n[a]]; if (u === void 0 && (u = l.value), !u) continue; const h = (d) => (f) => { l.value && d && (e.setTexture(d, f), delete r[d]); }; o === z_.SAMPLER_2D ? sf.LoadTextureAsync(c, i.values ? u : l.value, h(a), () => h(null)) : l.value && Eh.SetUniform(e, a, i.values ? u : l.value, o) && delete r[a]; } }, AAe = (c, e, t) => (i, r) => { e.dispose(!0), t("Cannot compile program named " + c.name + ". Error: " + r + ". Default material will be applied"); }, yAe = (c, e, t, i, r, s) => (n) => { vAe(c, e, t, i, r), e.onBind = (a) => { gAe(a, c, r, e, t, i, s); }; }, Mq = (c, e, t) => { for (const i in e.uniforms) { const r = e.uniforms[i], s = e.parameters[r]; if (c.currentIdentifier === i && s.semantic && !s.source && !s.node) { const n = ioe.indexOf(s.semantic); if (n !== -1) return delete t[i], roe[n]; } } return c.currentIdentifier; }, Rq = (c) => { for (const e in c.materials) sf.LoadMaterialAsync(c, e, () => { }, () => { }); }; class eT { static CreateRuntime(e, t, i) { const r = { extensions: {}, accessors: {}, buffers: {}, bufferViews: {}, meshes: {}, lights: {}, cameras: {}, nodes: {}, images: {}, textures: {}, shaders: {}, programs: {}, samplers: {}, techniques: {}, materials: {}, animations: {}, skins: {}, extensionsUsed: [], scenes: {}, buffersCount: 0, shaderscount: 0, scene: t, rootUrl: i, loadedBufferCount: 0, loadedBufferViews: {}, loadedShaderCount: 0, importOnlyMeshes: !1, dummyNodes: [], assetContainer: null }; return e.extensions && M1(e.extensions, "extensions", r), e.extensionsUsed && M1(e.extensionsUsed, "extensionsUsed", r), e.buffers && oAe(e.buffers, r), e.bufferViews && M1(e.bufferViews, "bufferViews", r), e.accessors && M1(e.accessors, "accessors", r), e.meshes && M1(e.meshes, "meshes", r), e.lights && M1(e.lights, "lights", r), e.cameras && M1(e.cameras, "cameras", r), e.nodes && M1(e.nodes, "nodes", r), e.images && M1(e.images, "images", r), e.textures && M1(e.textures, "textures", r), e.shaders && lAe(e.shaders, r), e.programs && M1(e.programs, "programs", r), e.samplers && M1(e.samplers, "samplers", r), e.techniques && M1(e.techniques, "techniques", r), e.materials && M1(e.materials, "materials", r), e.animations && M1(e.animations, "animations", r), e.skins && M1(e.skins, "skins", r), e.scenes && (r.scenes = e.scenes), e.scene && e.scenes && (r.currentScene = e.scenes[e.scene]), r; } static LoadBufferAsync(e, t, i, r, s) { const n = e.buffers[t]; Ve.IsBase64(n.uri) ? setTimeout(() => i(new Uint8Array(Ve.DecodeBase64(n.uri)))) : Ve.LoadFile(e.rootUrl + n.uri, (a) => i(new Uint8Array(a)), s, void 0, !0, (a) => { a && r(a.status + " " + a.statusText); }); } static LoadTextureBufferAsync(e, t, i, r) { const s = e.textures[t]; if (!s || !s.source) { r(""); return; } if (s.babylonTexture) { i(null); return; } const n = e.images[s.source]; Ve.IsBase64(n.uri) ? setTimeout(() => i(new Uint8Array(Ve.DecodeBase64(n.uri)))) : Ve.LoadFile(e.rootUrl + n.uri, (a) => i(new Uint8Array(a)), void 0, void 0, !0, (a) => { a && r(a.status + " " + a.statusText); }); } static CreateTextureAsync(e, t, i, r) { const s = e.textures[t]; if (s.babylonTexture) { r(s.babylonTexture); return; } const n = e.samplers[s.sampler], a = n.minFilter === QA.NEAREST_MIPMAP_NEAREST || n.minFilter === QA.NEAREST_MIPMAP_LINEAR || n.minFilter === QA.LINEAR_MIPMAP_NEAREST || n.minFilter === QA.LINEAR_MIPMAP_LINEAR, l = De.BILINEAR_SAMPLINGMODE, o = i == null ? new Blob() : new Blob([i]), u = URL.createObjectURL(o), h = () => URL.revokeObjectURL(u), d = new De(u, e.scene, !a, !0, l, h, h); n.wrapS !== void 0 && (d.wrapU = Eh.GetWrapMode(n.wrapS)), n.wrapT !== void 0 && (d.wrapV = Eh.GetWrapMode(n.wrapT)), d.name = t, s.babylonTexture = d, r(d); } static LoadShaderStringAsync(e, t, i, r) { const s = e.shaders[t]; if (Ve.IsBase64(s.uri)) { const n = atob(s.uri.split(",")[1]); i && i(n); } else Ve.LoadFile(e.rootUrl + s.uri, i, void 0, void 0, !1, (n) => { n && r && r(n.status + " " + n.statusText); }); } static LoadMaterialAsync(e, t, i, r) { const s = e.materials[t]; if (!s.technique) { r && r("No technique found."); return; } const n = e.techniques[s.technique]; if (!n) { e.scene._blockEntityCollection = !!e.assetContainer; const M = new Dt(t, e.scene); M._parentContainer = e.assetContainer, e.scene._blockEntityCollection = !1, M.diffuseColor = new ze(0.5, 0.5, 0.5), M.sideOrientation = At.CounterClockWiseSideOrientation, i(M); return; } const a = e.programs[n.program], l = n.states, o = Cr.ShadersStore[a.vertexShader + "VertexShader"], u = Cr.ShadersStore[a.fragmentShader + "PixelShader"]; let h = "", d = ""; const f = new bq(o), p = new bq(u), m = {}, _ = [], v = [], C = []; for (const M in n.uniforms) { const R = n.uniforms[M], w = n.parameters[R]; if (m[M] = w, w.semantic && !w.node && !w.source) { const V = ioe.indexOf(w.semantic); V !== -1 ? (_.push(roe[V]), delete m[M]) : _.push(M); } else w.type === z_.SAMPLER_2D ? C.push(M) : _.push(M); } for (const M in n.attributes) { const R = n.attributes[M], w = n.parameters[R]; if (w.semantic) { const V = Eq(w); V && v.push(V); } } for (; !f.isEnd() && f.getNextToken(); ) { if (f.currentToken !== r5.IDENTIFIER) { h += f.currentString; continue; } let R = !1; for (const w in n.attributes) { const V = n.attributes[w], k = n.parameters[V]; if (f.currentIdentifier === w && k.semantic) { h += Eq(k), R = !0; break; } } R || (h += Mq(f, n, m)); } for (; !p.isEnd() && p.getNextToken(); ) { if (p.currentToken !== r5.IDENTIFIER) { d += p.currentString; continue; } d += Mq(p, n, m); } const x = { vertex: a.vertexShader + t, fragment: a.fragmentShader + t }, b = { attributes: v, uniforms: _, samplers: C, needAlphaBlending: l && l.enable && l.enable.indexOf(3042) !== -1 }; Cr.ShadersStore[a.vertexShader + t + "VertexShader"] = h, Cr.ShadersStore[a.fragmentShader + t + "PixelShader"] = d; const S = new Lo(t, e.scene, x, b); if (S.onError = AAe(a, S, r), S.onCompiled = yAe(e, S, n, s, m, i), S.sideOrientation = At.CounterClockWiseSideOrientation, l && l.functions) { const M = l.functions; M.cullFace && M.cullFace[0] !== JH.BACK && (S.backFaceCulling = !1); const R = M.blendFuncSeparate; R && (R[0] === gc.SRC_ALPHA && R[1] === gc.ONE_MINUS_SRC_ALPHA && R[2] === gc.ONE && R[3] === gc.ONE ? S.alphaMode = et.ALPHA_COMBINE : R[0] === gc.ONE && R[1] === gc.ONE && R[2] === gc.ZERO && R[3] === gc.ONE ? S.alphaMode = et.ALPHA_ONEONE : R[0] === gc.SRC_ALPHA && R[1] === gc.ONE && R[2] === gc.ZERO && R[3] === gc.ONE ? S.alphaMode = et.ALPHA_ADD : R[0] === gc.ZERO && R[1] === gc.ONE_MINUS_SRC_COLOR && R[2] === gc.ONE && R[3] === gc.ONE ? S.alphaMode = et.ALPHA_SUBTRACT : R[0] === gc.DST_COLOR && R[1] === gc.ZERO && R[2] === gc.ONE && R[3] === gc.ONE ? S.alphaMode = et.ALPHA_MULTIPLY : R[0] === gc.SRC_ALPHA && R[1] === gc.ONE_MINUS_SRC_COLOR && R[2] === gc.ONE && R[3] === gc.ONE && (S.alphaMode = et.ALPHA_MAXIMIZED)); } } } let sw = class tG { static RegisterExtension(e) { if (tG.Extensions[e.name]) { Ve.Error('Tool with the same name "' + e.name + '" already exists'); return; } tG.Extensions[e.name] = e; } dispose() { } _importMeshAsync(e, t, i, r, s, n, a, l) { return t.useRightHandedSystem = !0, sf.LoadRuntimeAsync(t, i, r, (o) => { o.assetContainer = s, o.importOnlyMeshes = !0, e === "" ? o.importMeshesNames = [] : typeof e == "string" ? o.importMeshesNames = [e] : e && !(e instanceof Array) ? o.importMeshesNames = [e] : (o.importMeshesNames = [], Ve.Warn("Argument meshesNames must be of type string or string[]")), this._createNodes(o); const u = [], h = []; for (const d in o.nodes) { const f = o.nodes[d]; f.babylonNode instanceof xr && u.push(f.babylonNode); } for (const d in o.skins) { const f = o.skins[d]; f.babylonSkeleton instanceof sx && h.push(f.babylonSkeleton); } this._loadBuffersAsync(o, () => { this._loadShadersAsync(o, () => { Rq(o), Sq(o), !yl.IncrementalLoading && n && n(u, h); }); }), yl.IncrementalLoading && n && n(u, h); }, l), !0; } /** * Imports one or more meshes from a loaded gltf file and adds them to the scene * @param meshesNames a string or array of strings of the mesh names that should be loaded from the file * @param scene the scene the meshes should be added to * @param assetContainer defines the asset container to use (can be null) * @param data gltf data containing information of the meshes in a loaded file * @param rootUrl root url to load from * @param onProgress event that fires when loading progress has occured * @returns a promise containg the loaded meshes, particles, skeletons and animations */ importMeshAsync(e, t, i, r, s, n) { return new Promise((a, l) => { this._importMeshAsync(e, t, r, s, i, (o, u) => { a({ meshes: o, particleSystems: [], skeletons: u, animationGroups: [], lights: [], transformNodes: [], geometries: [] }); }, n, (o) => { l(new Error(o)); }); }); } _loadAsync(e, t, i, r, s, n) { e.useRightHandedSystem = !0, sf.LoadRuntimeAsync(e, t, i, (a) => { sf.LoadRuntimeExtensionsAsync(a, () => { this._createNodes(a), this._loadBuffersAsync(a, () => { this._loadShadersAsync(a, () => { Rq(a), Sq(a), yl.IncrementalLoading || r(); }); }), yl.IncrementalLoading && r(); }, n); }, n); } /** * Imports all objects from a loaded gltf file and adds them to the scene * @param scene the scene the objects should be added to * @param data gltf data containing information of the meshes in a loaded file * @param rootUrl root url to load from * @param onProgress event that fires when loading progress has occured * @returns a promise which completes when objects have been loaded to the scene */ loadAsync(e, t, i, r) { return new Promise((s, n) => { this._loadAsync(e, t, i, () => { s(); }, r, (a) => { n(new Error(a)); }); }); } _loadShadersAsync(e, t) { let i = !1; const r = (s, n) => { sf.LoadShaderStringAsync(e, s, (a) => { a instanceof ArrayBuffer || (e.loadedShaderCount++, a && (Cr.ShadersStore[s + (n.type === qH.VERTEX ? "VertexShader" : "PixelShader")] = a), e.loadedShaderCount === e.shaderscount && t()); }, () => { Ve.Error("Error when loading shader program named " + s + " located at " + n.uri); }); }; for (const s in e.shaders) { i = !0; const n = e.shaders[s]; n ? r.bind(this, s, n)() : Ve.Error("No shader named: " + s); } i || t(); } _loadBuffersAsync(e, t) { let i = !1; const r = (s, n) => { sf.LoadBufferAsync(e, s, (a) => { e.loadedBufferCount++, a && (a.byteLength != e.buffers[s].byteLength && Ve.Error("Buffer named " + s + " is length " + a.byteLength + ". Expected: " + n.byteLength), e.loadedBufferViews[s] = a), e.loadedBufferCount === e.buffersCount && t(); }, () => { Ve.Error("Error when loading buffer named " + s + " located at " + n.uri); }); }; for (const s in e.buffers) { i = !0; const n = e.buffers[s]; n ? r.bind(this, s, n)() : Ve.Error("No buffer named: " + s); } i || t(); } _createNodes(e) { let t = e.currentScene; if (t) for (let i = 0; i < t.nodes.length; i++) EL(e, t.nodes[i], null); else for (const i in e.scenes) { t = e.scenes[i]; for (let r = 0; r < t.nodes.length; r++) EL(e, t.nodes[r], null); } } }; sw.Extensions = {}; class sf { constructor(e) { this._name = e; } get name() { return this._name; } /** * Defines an override for loading the runtime * Return true to stop further extensions from loading the runtime * @param scene * @param data * @param rootUrl * @param onSuccess * @param onError */ loadRuntimeAsync(e, t, i, r, s) { return !1; } /** * Defines an onverride for creating gltf runtime * Return true to stop further extensions from creating the runtime * @param gltfRuntime * @param onSuccess * @param onError */ loadRuntimeExtensionsAsync(e, t, i) { return !1; } /** * Defines an override for loading buffers * Return true to stop further extensions from loading this buffer * @param gltfRuntime * @param id * @param onSuccess * @param onError * @param onProgress */ loadBufferAsync(e, t, i, r, s) { return !1; } /** * Defines an override for loading texture buffers * Return true to stop further extensions from loading this texture data * @param gltfRuntime * @param id * @param onSuccess * @param onError */ loadTextureBufferAsync(e, t, i, r) { return !1; } /** * Defines an override for creating textures * Return true to stop further extensions from loading this texture * @param gltfRuntime * @param id * @param buffer * @param onSuccess * @param onError */ createTextureAsync(e, t, i, r, s) { return !1; } /** * Defines an override for loading shader strings * Return true to stop further extensions from loading this shader data * @param gltfRuntime * @param id * @param onSuccess * @param onError */ loadShaderStringAsync(e, t, i, r) { return !1; } /** * Defines an override for loading materials * Return true to stop further extensions from loading this material * @param gltfRuntime * @param id * @param onSuccess * @param onError */ loadMaterialAsync(e, t, i, r) { return !1; } // --------- // Utilities // --------- static LoadRuntimeAsync(e, t, i, r, s) { sf._ApplyExtensions((n) => n.loadRuntimeAsync(e, t, i, r, s), () => { setTimeout(() => { r && r(eT.CreateRuntime(t.json, e, i)); }); }); } static LoadRuntimeExtensionsAsync(e, t, i) { sf._ApplyExtensions((r) => r.loadRuntimeExtensionsAsync(e, t, i), () => { setTimeout(() => { t(); }); }); } static LoadBufferAsync(e, t, i, r, s) { sf._ApplyExtensions((n) => n.loadBufferAsync(e, t, i, r, s), () => { eT.LoadBufferAsync(e, t, i, r, s); }); } static LoadTextureAsync(e, t, i, r) { sf._LoadTextureBufferAsync(e, t, (s) => { s && sf._CreateTextureAsync(e, t, s, i, r); }, r); } static LoadShaderStringAsync(e, t, i, r) { sf._ApplyExtensions((s) => s.loadShaderStringAsync(e, t, i, r), () => { eT.LoadShaderStringAsync(e, t, i, r); }); } static LoadMaterialAsync(e, t, i, r) { sf._ApplyExtensions((s) => s.loadMaterialAsync(e, t, i, r), () => { eT.LoadMaterialAsync(e, t, i, r); }); } static _LoadTextureBufferAsync(e, t, i, r) { sf._ApplyExtensions((s) => s.loadTextureBufferAsync(e, t, i, r), () => { eT.LoadTextureBufferAsync(e, t, i, r); }); } static _CreateTextureAsync(e, t, i, r, s) { sf._ApplyExtensions((n) => n.createTextureAsync(e, t, i, r, s), () => { eT.CreateTextureAsync(e, t, i, r); }); } static _ApplyExtensions(e, t) { for (const i in sw.Extensions) { const r = sw.Extensions[i]; if (e(r)) return; } t(); } } yl._CreateGLTF1Loader = () => new sw(); const CAe = "binary_glTF"; class xAe extends sf { constructor() { super("KHR_binary_glTF"); } loadRuntimeAsync(e, t, i, r) { const s = t.json.extensionsUsed; return !s || s.indexOf(this.name) === -1 || !t.bin ? !1 : (this._bin = t.bin, r(eT.CreateRuntime(t.json, e, i)), !0); } loadBufferAsync(e, t, i, r) { return e.extensionsUsed.indexOf(this.name) === -1 || t !== CAe ? !1 : (this._bin.readAsync(0, this._bin.byteLength).then(i, (s) => r(s.message)), !0); } loadTextureBufferAsync(e, t, i) { const r = e.textures[t], s = e.images[r.source]; if (!s.extensions || !(this.name in s.extensions)) return !1; const n = s.extensions[this.name], a = e.bufferViews[n.bufferView], l = Eh.GetBufferFromBufferView(e, a, 0, a.byteLength, i5.UNSIGNED_BYTE); return i(l), !0; } loadShaderStringAsync(e, t, i) { const r = e.shaders[t]; if (!r.extensions || !(this.name in r.extensions)) return !1; const s = r.extensions[this.name], n = e.bufferViews[s.bufferView], a = Eh.GetBufferFromBufferView(e, n, 0, n.byteLength, i5.UNSIGNED_BYTE); return setTimeout(() => { const l = Eh.DecodeBufferToText(a); i(l); }), !0; } } sw.RegisterExtension(new xAe()); class bAe extends sf { constructor() { super("KHR_materials_common"); } loadRuntimeExtensionsAsync(e) { if (!e.extensions) return !1; const t = e.extensions[this.name]; if (!t) return !1; const i = t.lights; if (i) for (const r in i) { const s = i[r]; switch (s.type) { case "ambient": { const n = new vg(s.name, new D(0, 1, 0), e.scene), a = s.ambient; a && (n.diffuse = ze.FromArray(a.color || [1, 1, 1])); break; } case "point": { const n = new s6(s.name, new D(10, 10, 10), e.scene), a = s.point; a && (n.diffuse = ze.FromArray(a.color || [1, 1, 1])); break; } case "directional": { const n = new Pd(s.name, new D(0, -1, 0), e.scene), a = s.directional; a && (n.diffuse = ze.FromArray(a.color || [1, 1, 1])); break; } case "spot": { const n = s.spot; if (n) { const a = new td(s.name, new D(0, 10, 0), new D(0, -1, 0), n.fallOffAngle || Math.PI, n.fallOffExponent || 0, e.scene); a.diffuse = ze.FromArray(n.color || [1, 1, 1]); } break; } default: Ve.Warn('GLTF Material Common extension: light type "' + s.type + "” not supported"); break; } } return !1; } loadMaterialAsync(e, t, i, r) { const s = e.materials[t]; if (!s || !s.extensions) return !1; const n = s.extensions[this.name]; if (!n) return !1; const a = new Dt(t, e.scene); return a.sideOrientation = At.CounterClockWiseSideOrientation, n.technique === "CONSTANT" && (a.disableLighting = !0), a.backFaceCulling = n.doubleSided === void 0 ? !1 : !n.doubleSided, a.alpha = n.values.transparency === void 0 ? 1 : n.values.transparency, a.specularPower = n.values.shininess === void 0 ? 0 : n.values.shininess, typeof n.values.ambient == "string" ? this._loadTexture(e, n.values.ambient, a, "ambientTexture", r) : a.ambientColor = ze.FromArray(n.values.ambient || [0, 0, 0]), typeof n.values.diffuse == "string" ? this._loadTexture(e, n.values.diffuse, a, "diffuseTexture", r) : a.diffuseColor = ze.FromArray(n.values.diffuse || [0, 0, 0]), typeof n.values.emission == "string" ? this._loadTexture(e, n.values.emission, a, "emissiveTexture", r) : a.emissiveColor = ze.FromArray(n.values.emission || [0, 0, 0]), typeof n.values.specular == "string" ? this._loadTexture(e, n.values.specular, a, "specularTexture", r) : a.specularColor = ze.FromArray(n.values.specular || [0, 0, 0]), !0; } _loadTexture(e, t, i, r, s) { eT.LoadTextureBufferAsync(e, t, (n) => { eT.CreateTextureAsync(e, t, n, (a) => i[r] = a); }, s); } } sw.RegisterExtension(new bAe()); function Pq(c, e, t, i) { return D.FromArray(e, t).scaleInPlace(i); } function EAe(c, e, t, i) { return Ze.FromArray(e, t).scaleInPlace(i); } function TAe(c, e, t, i) { const r = new Array(c._numMorphTargets); for (let s = 0; s < r.length; s++) r[s] = e[t++] * i; return r; } class ON { /** @internal */ constructor(e, t, i, r) { this.type = e, this.name = t, this.getValue = i, this.getStride = r; } _buildAnimation(e, t, i) { const r = new nt(e, this.name, t, this.type); return r.setKeys(i), r; } } class ez extends ON { /** @internal */ buildAnimations(e, t, i, r, s) { s(e._babylonTransformNode, this._buildAnimation(t, i, r)); } } class SAe extends ON { buildAnimations(e, t, i, r, s) { if (e._numMorphTargets) for (let n = 0; n < e._numMorphTargets; n++) { const a = new nt(`${t}_${n}`, this.name, i, this.type); if (a.setKeys(r.map((l) => ({ frame: l.frame, inTangent: l.inTangent ? l.inTangent[n] : void 0, value: l.value[n], outTangent: l.outTangent ? l.outTangent[n] : void 0, interpolation: l.interpolation }))), e._primitiveBabylonMeshes) { for (const l of e._primitiveBabylonMeshes) if (l.morphTargetManager) { const o = l.morphTargetManager.getTarget(n), u = a.clone(); o.animations.push(u), s(o, u); } } } } } const w9 = { translation: [new ez(nt.ANIMATIONTYPE_VECTOR3, "position", Pq, () => 3)], rotation: [new ez(nt.ANIMATIONTYPE_QUATERNION, "rotationQuaternion", EAe, () => 4)], scale: [new ez(nt.ANIMATIONTYPE_VECTOR3, "scaling", Pq, () => 3)], weights: [new SAe(nt.ANIMATIONTYPE_FLOAT, "influence", TAe, (c) => c._numMorphTargets)] }; function noe(...c) { const e = (t) => t && typeof t == "object"; return c.reduce((t, i) => (Object.keys(i).forEach((r) => { const s = t[r], n = i[r]; Array.isArray(s) && Array.isArray(n) ? t[r] = s.concat(...n) : e(s) && e(n) ? t[r] = noe(s, n) : t[r] = n; }), t), {}); } class es { /** * Gets an item from the given array. * @param context The context when loading the asset * @param array The array to get the item from * @param index The index to the array * @returns The array item */ static Get(e, t, i) { if (!t || i == null || !t[i]) throw new Error(`${e}: Failed to find index (${i})`); return t[i]; } /** * Gets an item from the given array or returns null if not available. * @param array The array to get the item from * @param index The index to the array * @returns The array item or null */ static TryGet(e, t) { return !e || t == null || !e[t] ? null : e[t]; } /** * Assign an `index` field to each item of the given array. * @param array The array of items */ static Assign(e) { if (e) for (let t = 0; t < e.length; t++) e[t].index = t; } } class Xi { /** * Registers a loader extension. * @param name The name of the loader extension. * @param factory The factory function that creates the loader extension. */ static RegisterExtension(e, t) { Xi.UnregisterExtension(e) && Ce.Warn(`Extension with the name '${e}' already exists`), Xi._RegisteredExtensions[e] = { factory: t }; } /** * Unregisters a loader extension. * @param name The name of the loader extension. * @returns A boolean indicating whether the extension has been unregistered */ static UnregisterExtension(e) { return Xi._RegisteredExtensions[e] ? (delete Xi._RegisteredExtensions[e], !0) : !1; } /** * The object that represents the glTF JSON. */ get gltf() { if (!this._gltf) throw new Error("glTF JSON is not available"); return this._gltf; } /** * The BIN chunk of a binary glTF. */ get bin() { return this._bin; } /** * The parent file loader. */ get parent() { return this._parent; } /** * The Babylon scene when loading the asset. */ get babylonScene() { if (!this._babylonScene) throw new Error("Scene is not available"); return this._babylonScene; } /** * The root Babylon mesh when loading the asset. */ get rootBabylonMesh() { return this._rootBabylonMesh; } /** * @internal */ constructor(e) { this._completePromises = new Array(), this._assetContainer = null, this._babylonLights = [], this._disableInstancedMesh = 0, this._allMaterialsDirtyRequired = !1, this._extensions = new Array(), this._disposed = !1, this._rootUrl = null, this._fileName = null, this._uniqueRootUrl = null, this._bin = null, this._rootBabylonMesh = null, this._defaultBabylonMaterialData = {}, this._postSceneLoadActions = new Array(), this._parent = e; } /** @internal */ dispose() { this._disposed || (this._disposed = !0, this._completePromises.length = 0, this._extensions.forEach((e) => e.dispose && e.dispose()), this._extensions.length = 0, this._gltf = null, this._bin = null, this._babylonScene = null, this._rootBabylonMesh = null, this._defaultBabylonMaterialData = {}, this._postSceneLoadActions.length = 0, this._parent.dispose()); } /** * @internal */ importMeshAsync(e, t, i, r, s, n, a = "") { return Promise.resolve().then(() => { this._babylonScene = t, this._assetContainer = i, this._loadData(r); let l = null; if (e) { const o = {}; if (this._gltf.nodes) for (const h of this._gltf.nodes) h.name && (o[h.name] = h.index); l = (e instanceof Array ? e : [e]).map((h) => { const d = o[h]; if (d === void 0) throw new Error(`Failed to find node '${h}'`); return d; }); } return this._loadAsync(s, a, l, () => ({ meshes: this._getMeshes(), particleSystems: [], skeletons: this._getSkeletons(), animationGroups: this._getAnimationGroups(), lights: this._babylonLights, transformNodes: this._getTransformNodes(), geometries: this._getGeometries() })); }); } /** * @internal */ loadAsync(e, t, i, r, s = "") { return Promise.resolve().then(() => (this._babylonScene = e, this._loadData(t), this._loadAsync(i, s, null, () => { }))); } _loadAsync(e, t, i, r) { return Promise.resolve().then(() => { this._rootUrl = e, this._uniqueRootUrl = !e.startsWith("file:") && t ? e : `${e}${Date.now()}/`, this._fileName = t, this._allMaterialsDirtyRequired = !1, this._loadExtensions(), this._checkExtensions(); const s = `${dm[dm.LOADING]} => ${dm[dm.READY]}`, n = `${dm[dm.LOADING]} => ${dm[dm.COMPLETE]}`; this._parent._startPerformanceCounter(s), this._parent._startPerformanceCounter(n), this._parent._setState(dm.LOADING), this._extensionsOnLoading(); const a = new Array(), l = this._babylonScene.blockMaterialDirtyMechanism; if (this._babylonScene.blockMaterialDirtyMechanism = !0, !this.parent.loadOnlyMaterials) { if (i) a.push(this.loadSceneAsync("/nodes", { nodes: i, index: -1 })); else if (this._gltf.scene != null || this._gltf.scenes && this._gltf.scenes[0]) { const u = es.Get("/scene", this._gltf.scenes, this._gltf.scene || 0); a.push(this.loadSceneAsync(`/scenes/${u.index}`, u)); } } if (!this.parent.skipMaterials && this.parent.loadAllMaterials && this._gltf.materials) for (let u = 0; u < this._gltf.materials.length; ++u) { const h = this._gltf.materials[u], d = "/materials/" + u, f = At.TriangleFillMode; a.push(this._loadMaterialAsync(d, h, null, f, () => { })); } return this._allMaterialsDirtyRequired ? this._babylonScene.blockMaterialDirtyMechanism = l : this._babylonScene._forceBlockMaterialDirtyMechanism(l), this._parent.compileMaterials && a.push(this._compileMaterialsAsync()), this._parent.compileShadowGenerators && a.push(this._compileShadowGeneratorsAsync()), Promise.all(a).then(() => (this._rootBabylonMesh && this._rootBabylonMesh.setEnabled(!0), this._extensionsOnReady(), this._parent._setState(dm.READY), this._startAnimations(), r())).then((u) => (this._parent._endPerformanceCounter(s), Ve.SetImmediate(() => { this._disposed || Promise.all(this._completePromises).then(() => { this._parent._endPerformanceCounter(n), this._parent._setState(dm.COMPLETE), this._parent.onCompleteObservable.notifyObservers(void 0), this._parent.onCompleteObservable.clear(), this.dispose(); }, (h) => { this._parent.onErrorObservable.notifyObservers(h), this._parent.onErrorObservable.clear(), this.dispose(); }); }), u)); }).catch((s) => { throw this._disposed || (this._parent.onErrorObservable.notifyObservers(s), this._parent.onErrorObservable.clear(), this.dispose()), s; }); } _loadData(e) { if (this._gltf = e.json, this._setupData(), e.bin) { const t = this._gltf.buffers; if (t && t[0] && !t[0].uri) { const i = t[0]; (i.byteLength < e.bin.byteLength - 3 || i.byteLength > e.bin.byteLength) && Ce.Warn(`Binary buffer length (${i.byteLength}) from JSON does not match chunk length (${e.bin.byteLength})`), this._bin = e.bin; } else Ce.Warn("Unexpected BIN chunk"); } } _setupData() { if (es.Assign(this._gltf.accessors), es.Assign(this._gltf.animations), es.Assign(this._gltf.buffers), es.Assign(this._gltf.bufferViews), es.Assign(this._gltf.cameras), es.Assign(this._gltf.images), es.Assign(this._gltf.materials), es.Assign(this._gltf.meshes), es.Assign(this._gltf.nodes), es.Assign(this._gltf.samplers), es.Assign(this._gltf.scenes), es.Assign(this._gltf.skins), es.Assign(this._gltf.textures), this._gltf.nodes) { const e = {}; for (const i of this._gltf.nodes) if (i.children) for (const r of i.children) e[r] = i.index; const t = this._createRootNode(); for (const i of this._gltf.nodes) { const r = e[i.index]; i.parent = r === void 0 ? t : this._gltf.nodes[r]; } } } _loadExtensions() { for (const e in Xi._RegisteredExtensions) { const t = Xi._RegisteredExtensions[e].factory(this); t.name !== e && Ce.Warn(`The name of the glTF loader extension instance does not match the registered name: ${t.name} !== ${e}`), this._extensions.push(t), this._parent.onExtensionLoadedObservable.notifyObservers(t); } this._extensions.sort((e, t) => (e.order || Number.MAX_VALUE) - (t.order || Number.MAX_VALUE)), this._parent.onExtensionLoadedObservable.clear(); } _checkExtensions() { if (this._gltf.extensionsRequired) { for (const e of this._gltf.extensionsRequired) if (!this._extensions.some((i) => i.name === e && i.enabled)) throw new Error(`Required extension ${e} is not available`); } } _createRootNode() { this._babylonScene._blockEntityCollection = !!this._assetContainer, this._rootBabylonMesh = new ke("__root__", this._babylonScene), this._rootBabylonMesh._parentContainer = this._assetContainer, this._babylonScene._blockEntityCollection = !1, this._rootBabylonMesh.setEnabled(!1); const e = { _babylonTransformNode: this._rootBabylonMesh, index: -1 }; switch (this._parent.coordinateSystemMode) { case bL.AUTO: { this._babylonScene.useRightHandedSystem || (e.rotation = [0, 1, 0, 0], e.scale = [1, 1, -1], Xi._LoadTransform(e, this._rootBabylonMesh)); break; } case bL.FORCE_RIGHT_HANDED: { this._babylonScene.useRightHandedSystem = !0; break; } default: throw new Error(`Invalid coordinate system mode (${this._parent.coordinateSystemMode})`); } return this._parent.onMeshLoadedObservable.notifyObservers(this._rootBabylonMesh), e; } /** * Loads a glTF scene. * @param context The context when loading the asset * @param scene The glTF scene property * @returns A promise that resolves when the load is complete */ loadSceneAsync(e, t) { const i = this._extensionsLoadSceneAsync(e, t); if (i) return i; const r = new Array(); if (this.logOpen(`${e} ${t.name || ""}`), t.nodes) for (const s of t.nodes) { const n = es.Get(`${e}/nodes/${s}`, this._gltf.nodes, s); r.push(this.loadNodeAsync(`/nodes/${n.index}`, n, (a) => { a.parent = this._rootBabylonMesh; })); } for (const s of this._postSceneLoadActions) s(); return r.push(this._loadAnimationsAsync()), this.logClose(), Promise.all(r).then(() => { }); } _forEachPrimitive(e, t) { if (e._primitiveBabylonMeshes) for (const i of e._primitiveBabylonMeshes) t(i); } _getGeometries() { const e = [], t = this._gltf.nodes; if (t) for (const i of t) this._forEachPrimitive(i, (r) => { const s = r.geometry; s && e.indexOf(s) === -1 && e.push(s); }); return e; } _getMeshes() { const e = []; this._rootBabylonMesh && e.push(this._rootBabylonMesh); const t = this._gltf.nodes; if (t) for (const i of t) this._forEachPrimitive(i, (r) => { e.push(r); }); return e; } _getTransformNodes() { const e = [], t = this._gltf.nodes; if (t) for (const i of t) i._babylonTransformNode && i._babylonTransformNode.getClassName() === "TransformNode" && e.push(i._babylonTransformNode), i._babylonTransformNodeForSkin && e.push(i._babylonTransformNodeForSkin); return e; } _getSkeletons() { const e = [], t = this._gltf.skins; if (t) for (const i of t) i._data && e.push(i._data.babylonSkeleton); return e; } _getAnimationGroups() { const e = [], t = this._gltf.animations; if (t) for (const i of t) i._babylonAnimationGroup && e.push(i._babylonAnimationGroup); return e; } _startAnimations() { switch (this._parent.animationStartMode) { case OO.NONE: break; case OO.FIRST: { const e = this._getAnimationGroups(); e.length !== 0 && e[0].start(!0); break; } case OO.ALL: { const e = this._getAnimationGroups(); for (const t of e) t.start(!0); break; } default: { Ce.Error(`Invalid animation start mode (${this._parent.animationStartMode})`); return; } } } /** * Loads a glTF node. * @param context The context when loading the asset * @param node The glTF node property * @param assign A function called synchronously after parsing the glTF properties * @returns A promise that resolves with the loaded Babylon mesh when the load is complete */ loadNodeAsync(e, t, i = () => { }) { const r = this._extensionsLoadNodeAsync(e, t, i); if (r) return r; if (t._babylonTransformNode) throw new Error(`${e}: Invalid recursive node hierarchy`); const s = new Array(); this.logOpen(`${e} ${t.name || ""}`); const n = (a) => { if (Xi.AddPointerMetadata(a, e), Xi._LoadTransform(t, a), t.camera != null) { const l = es.Get(`${e}/camera`, this._gltf.cameras, t.camera); s.push(this.loadCameraAsync(`/cameras/${l.index}`, l, (o) => { o.parent = a; })); } if (t.children) for (const l of t.children) { const o = es.Get(`${e}/children/${l}`, this._gltf.nodes, l); s.push(this.loadNodeAsync(`/nodes/${o.index}`, o, (u) => { u.parent = a; })); } i(a); }; if (t.mesh == null || t.skin != null) { const a = t.name || `node${t.index}`; this._babylonScene._blockEntityCollection = !!this._assetContainer; const l = new xi(a, this._babylonScene); l._parentContainer = this._assetContainer, this._babylonScene._blockEntityCollection = !1, t.mesh == null ? t._babylonTransformNode = l : t._babylonTransformNodeForSkin = l, n(l); } if (t.mesh != null) if (t.skin == null) { const a = es.Get(`${e}/mesh`, this._gltf.meshes, t.mesh); s.push(this._loadMeshAsync(`/meshes/${a.index}`, t, a, n)); } else { const a = es.Get(`${e}/mesh`, this._gltf.meshes, t.mesh); s.push(this._loadMeshAsync(`/meshes/${a.index}`, t, a, (l) => { const o = t._babylonTransformNodeForSkin; l.metadata = noe(o.metadata, l.metadata || {}); const u = es.Get(`${e}/skin`, this._gltf.skins, t.skin); s.push(this._loadSkinAsync(`/skins/${u.index}`, t, u, (h) => { this._forEachPrimitive(t, (d) => { d.skeleton = h; }), this._postSceneLoadActions.push(() => { if (u.skeleton != null) { const d = es.Get(`/skins/${u.index}/skeleton`, this._gltf.nodes, u.skeleton).parent; t.index === d.index ? l.parent = o.parent : l.parent = d._babylonTransformNode; } else l.parent = this._rootBabylonMesh; this._parent.onSkinLoadedObservable.notifyObservers({ node: o, skinnedNode: l }); }); })); })); } return this.logClose(), Promise.all(s).then(() => (this._forEachPrimitive(t, (a) => { a.geometry && a.geometry.useBoundingInfoFromGeometry ? a._updateBoundingInfo() : a.refreshBoundingInfo(!0); }), t._babylonTransformNode)); } _loadMeshAsync(e, t, i, r) { const s = i.primitives; if (!s || !s.length) throw new Error(`${e}: Primitives are missing`); s[0].index == null && es.Assign(s); const n = new Array(); this.logOpen(`${e} ${i.name || ""}`); const a = t.name || `node${t.index}`; if (s.length === 1) { const l = i.primitives[0]; n.push(this._loadMeshPrimitiveAsync(`${e}/primitives/${l.index}`, a, t, i, l, (o) => { t._babylonTransformNode = o, t._primitiveBabylonMeshes = [o]; })); } else { this._babylonScene._blockEntityCollection = !!this._assetContainer, t._babylonTransformNode = new xi(a, this._babylonScene), t._babylonTransformNode._parentContainer = this._assetContainer, this._babylonScene._blockEntityCollection = !1, t._primitiveBabylonMeshes = []; for (const l of s) n.push(this._loadMeshPrimitiveAsync(`${e}/primitives/${l.index}`, `${a}_primitive${l.index}`, t, i, l, (o) => { o.parent = t._babylonTransformNode, t._primitiveBabylonMeshes.push(o); })); } return r(t._babylonTransformNode), this.logClose(), Promise.all(n).then(() => t._babylonTransformNode); } /** * @internal Define this method to modify the default behavior when loading data for mesh primitives. * @param context The context when loading the asset * @param name The mesh name when loading the asset * @param node The glTF node when loading the asset * @param mesh The glTF mesh when loading the asset * @param primitive The glTF mesh primitive property * @param assign A function called synchronously after parsing the glTF properties * @returns A promise that resolves with the loaded mesh when the load is complete or null if not handled */ _loadMeshPrimitiveAsync(e, t, i, r, s, n) { const a = this._extensionsLoadMeshPrimitiveAsync(e, t, i, r, s, n); if (a) return a; this.logOpen(`${e}`); const l = this._disableInstancedMesh === 0 && this._parent.createInstances && i.skin == null && !r.primitives[0].targets; let o, u; if (l && s._instanceData) this._babylonScene._blockEntityCollection = !!this._assetContainer, o = s._instanceData.babylonSourceMesh.createInstance(t), o._parentContainer = this._assetContainer, this._babylonScene._blockEntityCollection = !1, u = s._instanceData.promise; else { const h = new Array(); this._babylonScene._blockEntityCollection = !!this._assetContainer; const d = new ke(t, this._babylonScene); d._parentContainer = this._assetContainer, this._babylonScene._blockEntityCollection = !1, d.overrideMaterialSideOrientation = this._babylonScene.useRightHandedSystem ? At.CounterClockWiseSideOrientation : At.ClockWiseSideOrientation, this._createMorphTargets(e, i, r, s, d), h.push(this._loadVertexDataAsync(e, s, d).then((p) => this._loadMorphTargetsAsync(e, s, d, p).then(() => { this._disposed || (this._babylonScene._blockEntityCollection = !!this._assetContainer, p.applyToMesh(d), p._parentContainer = this._assetContainer, this._babylonScene._blockEntityCollection = !1); }))); const f = Xi._GetDrawMode(e, s.mode); if (s.material == null) { let p = this._defaultBabylonMaterialData[f]; p || (p = this._createDefaultMaterial("__GLTFLoader._default", f), this._parent.onMaterialLoadedObservable.notifyObservers(p), this._defaultBabylonMaterialData[f] = p), d.material = p; } else if (!this.parent.skipMaterials) { const p = es.Get(`${e}/material`, this._gltf.materials, s.material); h.push(this._loadMaterialAsync(`/materials/${p.index}`, p, d, f, (m) => { d.material = m; })); } u = Promise.all(h), l && (s._instanceData = { babylonSourceMesh: d, promise: u }), o = d; } return Xi.AddPointerMetadata(o, e), this._parent.onMeshLoadedObservable.notifyObservers(o), n(o), this.logClose(), u.then(() => o); } _loadVertexDataAsync(e, t, i) { const r = this._extensionsLoadVertexDataAsync(e, t, i); if (r) return r; const s = t.attributes; if (!s) throw new Error(`${e}: Attributes are missing`); const n = new Array(), a = new yc(i.name, this._babylonScene); if (t.indices == null) i.isUnIndexed = !0; else { const o = es.Get(`${e}/indices`, this._gltf.accessors, t.indices); n.push(this._loadIndicesAccessorAsync(`/accessors/${o.index}`, o).then((u) => { a.setIndices(u); })); } const l = (o, u, h) => { if (s[o] == null) return; i._delayInfo = i._delayInfo || [], i._delayInfo.indexOf(u) === -1 && i._delayInfo.push(u); const d = es.Get(`${e}/attributes/${o}`, this._gltf.accessors, s[o]); n.push(this._loadVertexAccessorAsync(`/accessors/${d.index}`, d, u).then((f) => { if (f.getKind() === Y.PositionKind && !this.parent.alwaysComputeBoundingBox && !i.skeleton && d.min && d.max) { const p = de.Vector3[0].copyFromFloats(...d.min), m = de.Vector3[1].copyFromFloats(...d.max); if (d.normalized && d.componentType !== 5126) { let _ = 1; switch (d.componentType) { case 5120: _ = 127; break; case 5121: _ = 255; break; case 5122: _ = 32767; break; case 5123: _ = 65535; break; } const v = 1 / _; p.scaleInPlace(v), m.scaleInPlace(v); } a._boundingInfo = new zf(p, m), a.useBoundingInfoFromGeometry = !0; } a.setVerticesBuffer(f, d.count); })), u == Y.MatricesIndicesExtraKind && (i.numBoneInfluencers = 8), h && h(d); }; return l("POSITION", Y.PositionKind), l("NORMAL", Y.NormalKind), l("TANGENT", Y.TangentKind), l("TEXCOORD_0", Y.UVKind), l("TEXCOORD_1", Y.UV2Kind), l("TEXCOORD_2", Y.UV3Kind), l("TEXCOORD_3", Y.UV4Kind), l("TEXCOORD_4", Y.UV5Kind), l("TEXCOORD_5", Y.UV6Kind), l("JOINTS_0", Y.MatricesIndicesKind), l("WEIGHTS_0", Y.MatricesWeightsKind), l("JOINTS_1", Y.MatricesIndicesExtraKind), l("WEIGHTS_1", Y.MatricesWeightsExtraKind), l("COLOR_0", Y.ColorKind, (o) => { o.type === "VEC4" && (i.hasVertexAlpha = !0); }), Promise.all(n).then(() => a); } _createMorphTargets(e, t, i, r, s) { if (!r.targets) return; if (t._numMorphTargets == null) t._numMorphTargets = r.targets.length; else if (r.targets.length !== t._numMorphTargets) throw new Error(`${e}: Primitives do not have the same number of targets`); const n = i.extras ? i.extras.targetNames : null; this._babylonScene._blockEntityCollection = !!this._assetContainer, s.morphTargetManager = new O4(this._babylonScene), s.morphTargetManager._parentContainer = this._assetContainer, this._babylonScene._blockEntityCollection = !1, s.morphTargetManager.areUpdatesFrozen = !0; for (let a = 0; a < r.targets.length; a++) { const l = t.weights ? t.weights[a] : i.weights ? i.weights[a] : 0, o = n ? n[a] : `morphTarget${a}`; s.morphTargetManager.addTarget(new h5(o, l, s.getScene())); } } _loadMorphTargetsAsync(e, t, i, r) { if (!t.targets) return Promise.resolve(); const s = new Array(), n = i.morphTargetManager; for (let a = 0; a < n.numTargets; a++) { const l = n.getTarget(a); s.push(this._loadMorphTargetVertexDataAsync(`${e}/targets/${a}`, r, t.targets[a], l)); } return Promise.all(s).then(() => { n.areUpdatesFrozen = !1; }); } _loadMorphTargetVertexDataAsync(e, t, i, r) { const s = new Array(), n = (a, l, o) => { if (i[a] == null) return; const u = t.getVertexBuffer(l); if (!u) return; const h = es.Get(`${e}/${a}`, this._gltf.accessors, i[a]); s.push(this._loadFloatAccessorAsync(`/accessors/${h.index}`, h).then((d) => { o(u, d); })); }; return n("POSITION", Y.PositionKind, (a, l) => { const o = new Float32Array(l.length); a.forEach(l.length, (u, h) => { o[h] = l[h] + u; }), r.setPositions(o); }), n("NORMAL", Y.NormalKind, (a, l) => { const o = new Float32Array(l.length); a.forEach(o.length, (u, h) => { o[h] = l[h] + u; }), r.setNormals(o); }), n("TANGENT", Y.TangentKind, (a, l) => { const o = new Float32Array(l.length / 3 * 4); let u = 0; a.forEach(l.length / 3 * 4, (h, d) => { (d + 1) % 4 !== 0 && (o[u] = l[u] + h, u++); }), r.setTangents(o); }), Promise.all(s).then(() => { }); } static _LoadTransform(e, t) { if (e.skin != null) return; let i = D.Zero(), r = Ze.Identity(), s = D.One(); e.matrix ? Ae.FromArray(e.matrix).decompose(s, r, i) : (e.translation && (i = D.FromArray(e.translation)), e.rotation && (r = Ze.FromArray(e.rotation)), e.scale && (s = D.FromArray(e.scale))), t.position = i, t.rotationQuaternion = r, t.scaling = s; } _loadSkinAsync(e, t, i, r) { const s = this._extensionsLoadSkinAsync(e, t, i); if (s) return s; if (i._data) return r(i._data.babylonSkeleton), i._data.promise; const n = `skeleton${i.index}`; this._babylonScene._blockEntityCollection = !!this._assetContainer; const a = new sx(i.name || n, n, this._babylonScene); a._parentContainer = this._assetContainer, this._babylonScene._blockEntityCollection = !1, this._loadBones(e, i, a); const l = this._loadSkinInverseBindMatricesDataAsync(e, i).then((o) => { this._updateBoneMatrices(a, o); }); return i._data = { babylonSkeleton: a, promise: l }, r(a), l; } _loadBones(e, t, i) { if (t.skeleton == null || this._parent.alwaysComputeSkeletonRootNode) { const s = this._findSkeletonRootNode(`${e}/joints`, t.joints); if (s) if (t.skeleton === void 0) t.skeleton = s.index; else { const n = (l, o) => { for (; o.parent; o = o.parent) if (o.parent === l) return !0; return !1; }, a = es.Get(`${e}/skeleton`, this._gltf.nodes, t.skeleton); a !== s && !n(a, s) && (Ce.Warn(`${e}/skeleton: Overriding with nearest common ancestor as skeleton node is not a common root`), t.skeleton = s.index); } else Ce.Warn(`${e}: Failed to find common root`); } const r = {}; for (const s of t.joints) { const n = es.Get(`${e}/joints/${s}`, this._gltf.nodes, s); this._loadBone(n, t, i, r); } } _findSkeletonRootNode(e, t) { if (t.length === 0) return null; const i = {}; for (const s of t) { const n = []; let a = es.Get(`${e}/${s}`, this._gltf.nodes, s); for (; a.index !== -1; ) n.unshift(a), a = a.parent; i[s] = n; } let r = null; for (let s = 0; ; ++s) { let n = i[t[0]]; if (s >= n.length) return r; const a = n[s]; for (let l = 1; l < t.length; ++l) if (n = i[t[l]], s >= n.length || a !== n[s]) return r; r = a; } } _loadBone(e, t, i, r) { let s = r[e.index]; if (s) return s; let n = null; e.index !== t.skeleton && (e.parent && e.parent.index !== -1 ? n = this._loadBone(e.parent, t, i, r) : t.skeleton !== void 0 && Ce.Warn(`/skins/${t.index}/skeleton: Skeleton node is not a common root`)); const a = t.joints.indexOf(e.index); return s = new ha(e.name || `joint${e.index}`, i, n, this._getNodeMatrix(e), null, null, a), r[e.index] = s, this._postSceneLoadActions.push(() => { s.linkTransformNode(e._babylonTransformNode); }), s; } _loadSkinInverseBindMatricesDataAsync(e, t) { if (t.inverseBindMatrices == null) return Promise.resolve(null); const i = es.Get(`${e}/inverseBindMatrices`, this._gltf.accessors, t.inverseBindMatrices); return this._loadFloatAccessorAsync(`/accessors/${i.index}`, i); } _updateBoneMatrices(e, t) { for (const i of e.bones) { const r = Ae.Identity(), s = i._index; t && s !== -1 && (Ae.FromArrayToRef(t, s * 16, r), r.invertToRef(r)); const n = i.getParent(); n && r.multiplyToRef(n.getAbsoluteInverseBindMatrix(), r), i.updateMatrix(r, !1, !1), i._updateAbsoluteBindMatrices(void 0, !1); } } _getNodeMatrix(e) { return e.matrix ? Ae.FromArray(e.matrix) : Ae.Compose(e.scale ? D.FromArray(e.scale) : D.One(), e.rotation ? Ze.FromArray(e.rotation) : Ze.Identity(), e.translation ? D.FromArray(e.translation) : D.Zero()); } /** * Loads a glTF camera. * @param context The context when loading the asset * @param camera The glTF camera property * @param assign A function called synchronously after parsing the glTF properties * @returns A promise that resolves with the loaded Babylon camera when the load is complete */ loadCameraAsync(e, t, i = () => { }) { const r = this._extensionsLoadCameraAsync(e, t, i); if (r) return r; const s = new Array(); this.logOpen(`${e} ${t.name || ""}`), this._babylonScene._blockEntityCollection = !!this._assetContainer; const n = new du(t.name || `camera${t.index}`, D.Zero(), this._babylonScene, !1); switch (n._parentContainer = this._assetContainer, this._babylonScene._blockEntityCollection = !1, n.ignoreParentScaling = !0, t._babylonCamera = n, n.rotation.set(0, Math.PI, 0), t.type) { case "perspective": { const a = t.perspective; if (!a) throw new Error(`${e}: Camera perspective properties are missing`); n.fov = a.yfov, n.minZ = a.znear, n.maxZ = a.zfar || 0; break; } case "orthographic": { if (!t.orthographic) throw new Error(`${e}: Camera orthographic properties are missing`); n.mode = Ai.ORTHOGRAPHIC_CAMERA, n.orthoLeft = -t.orthographic.xmag, n.orthoRight = t.orthographic.xmag, n.orthoBottom = -t.orthographic.ymag, n.orthoTop = t.orthographic.ymag, n.minZ = t.orthographic.znear, n.maxZ = t.orthographic.zfar; break; } default: throw new Error(`${e}: Invalid camera type (${t.type})`); } return Xi.AddPointerMetadata(n, e), this._parent.onCameraLoadedObservable.notifyObservers(n), i(n), this.logClose(), Promise.all(s).then(() => n); } _loadAnimationsAsync() { const e = this._gltf.animations; if (!e) return Promise.resolve(); const t = new Array(); for (let i = 0; i < e.length; i++) { const r = e[i]; t.push(this.loadAnimationAsync(`/animations/${r.index}`, r).then((s) => { s.targetedAnimations.length === 0 && s.dispose(); })); } return Promise.all(t).then(() => { }); } /** * Loads a glTF animation. * @param context The context when loading the asset * @param animation The glTF animation property * @returns A promise that resolves with the loaded Babylon animation group when the load is complete */ loadAnimationAsync(e, t) { const i = this._extensionsLoadAnimationAsync(e, t); if (i) return i; this._babylonScene._blockEntityCollection = !!this._assetContainer; const r = new S4(t.name || `animation${t.index}`, this._babylonScene); r._parentContainer = this._assetContainer, this._babylonScene._blockEntityCollection = !1, t._babylonAnimationGroup = r; const s = new Array(); es.Assign(t.channels), es.Assign(t.samplers); for (const n of t.channels) s.push(this._loadAnimationChannelAsync(`${e}/channels/${n.index}`, e, t, n, (a, l) => { a.animations = a.animations || [], a.animations.push(l), r.addTargetedAnimation(l, a); })); return Promise.all(s).then(() => (r.normalize(0), r)); } /** * @hidden * Loads a glTF animation channel. * @param context The context when loading the asset * @param animationContext The context of the animation when loading the asset * @param animation The glTF animation property * @param channel The glTF animation channel property * @param onLoad Called for each animation loaded * @returns A void promise that resolves when the load is complete */ _loadAnimationChannelAsync(e, t, i, r, s) { const n = this._extensionsLoadAnimationChannelAsync(e, t, i, r, s); if (n) return n; if (r.target.node == null) return Promise.resolve(); const a = es.Get(`${e}/target/node`, this._gltf.nodes, r.target.node); if (r.target.path === "weights" && !a._numMorphTargets || r.target.path !== "weights" && !a._babylonTransformNode) return Promise.resolve(); let l; switch (r.target.path) { case "translation": { l = w9.translation; break; } case "rotation": { l = w9.rotation; break; } case "scale": { l = w9.scale; break; } case "weights": { l = w9.weights; break; } default: throw new Error(`${e}/target/path: Invalid value (${r.target.path})`); } const o = { target: a, properties: l }; return this._loadAnimationChannelFromTargetInfoAsync(e, t, i, r, o, s); } /** * @hidden * Loads a glTF animation channel. * @param context The context when loading the asset * @param animationContext The context of the animation when loading the asset * @param animation The glTF animation property * @param channel The glTF animation channel property * @param targetInfo The glTF target and properties * @param onLoad Called for each animation loaded * @returns A void promise that resolves when the load is complete */ _loadAnimationChannelFromTargetInfoAsync(e, t, i, r, s, n) { const a = this.parent.targetFps, l = 1 / a, o = es.Get(`${e}/sampler`, i.samplers, r.sampler); return this._loadAnimationSamplerAsync(`${t}/samplers/${r.sampler}`, o).then((u) => { let h = 0; for (const d of s.properties) { const f = d.getStride(s.target), p = u.input, m = u.output, _ = new Array(p.length); let v = 0; switch (u.interpolation) { case "STEP": { for (let C = 0; C < p.length; C++) { const x = d.getValue(s.target, m, v, 1); v += f, _[C] = { frame: p[C] * a, value: x, interpolation: $9.STEP }; } break; } case "CUBICSPLINE": { for (let C = 0; C < p.length; C++) { const x = d.getValue(s.target, m, v, l); v += f; const b = d.getValue(s.target, m, v, 1); v += f; const S = d.getValue(s.target, m, v, l); v += f, _[C] = { frame: p[C] * a, inTangent: x, value: b, outTangent: S }; } break; } case "LINEAR": { for (let C = 0; C < p.length; C++) { const x = d.getValue(s.target, m, v, 1); v += f, _[C] = { frame: p[C] * a, value: x }; } break; } } if (v > 0) { const C = `${i.name || `animation${i.index}`}_channel${r.index}_${h}`; d.buildAnimations(s.target, C, a, _, (x, b) => { ++h, n(x, b); }); } } }); } _loadAnimationSamplerAsync(e, t) { if (t._data) return t._data; const i = t.interpolation || "LINEAR"; switch (i) { case "STEP": case "LINEAR": case "CUBICSPLINE": break; default: throw new Error(`${e}/interpolation: Invalid value (${t.interpolation})`); } const r = es.Get(`${e}/input`, this._gltf.accessors, t.input), s = es.Get(`${e}/output`, this._gltf.accessors, t.output); return t._data = Promise.all([ this._loadFloatAccessorAsync(`/accessors/${r.index}`, r), this._loadFloatAccessorAsync(`/accessors/${s.index}`, s) ]).then(([n, a]) => ({ input: n, interpolation: i, output: a })), t._data; } /** * Loads a glTF buffer. * @param context The context when loading the asset * @param buffer The glTF buffer property * @param byteOffset The byte offset to use * @param byteLength The byte length to use * @returns A promise that resolves with the loaded data when the load is complete */ loadBufferAsync(e, t, i, r) { const s = this._extensionsLoadBufferAsync(e, t, i, r); if (s) return s; if (!t._data) if (t.uri) t._data = this.loadUriAsync(`${e}/uri`, t, t.uri); else { if (!this._bin) throw new Error(`${e}: Uri is missing or the binary glTF is missing its binary chunk`); t._data = this._bin.readAsync(0, t.byteLength); } return t._data.then((n) => { try { return new Uint8Array(n.buffer, n.byteOffset + i, r); } catch (a) { throw new Error(`${e}: ${a.message}`); } }); } /** * Loads a glTF buffer view. * @param context The context when loading the asset * @param bufferView The glTF buffer view property * @returns A promise that resolves with the loaded data when the load is complete */ loadBufferViewAsync(e, t) { const i = this._extensionsLoadBufferViewAsync(e, t); if (i) return i; if (t._data) return t._data; const r = es.Get(`${e}/buffer`, this._gltf.buffers, t.buffer); return t._data = this.loadBufferAsync(`/buffers/${r.index}`, r, t.byteOffset || 0, t.byteLength), t._data; } _loadAccessorAsync(e, t, i) { if (t._data) return t._data; const r = Xi._GetNumComponents(e, t.type), s = r * Y.GetTypeByteLength(t.componentType), n = r * t.count; if (t.bufferView == null) t._data = Promise.resolve(new i(n)); else { const a = es.Get(`${e}/bufferView`, this._gltf.bufferViews, t.bufferView); t._data = this.loadBufferViewAsync(`/bufferViews/${a.index}`, a).then((l) => { if (t.componentType === 5126 && !t.normalized && (!a.byteStride || a.byteStride === s)) return Xi._GetTypedArray(e, t.componentType, l, t.byteOffset, n); { const o = new i(n); return Y.ForEach(l, t.byteOffset || 0, a.byteStride || s, r, t.componentType, o.length, t.normalized || !1, (u, h) => { o[h] = u; }), o; } }); } if (t.sparse) { const a = t.sparse; t._data = t._data.then((l) => { const o = l, u = es.Get(`${e}/sparse/indices/bufferView`, this._gltf.bufferViews, a.indices.bufferView), h = es.Get(`${e}/sparse/values/bufferView`, this._gltf.bufferViews, a.values.bufferView); return Promise.all([ this.loadBufferViewAsync(`/bufferViews/${u.index}`, u), this.loadBufferViewAsync(`/bufferViews/${h.index}`, h) ]).then(([d, f]) => { const p = Xi._GetTypedArray(`${e}/sparse/indices`, a.indices.componentType, d, a.indices.byteOffset, a.count), m = r * a.count; let _; if (t.componentType === 5126 && !t.normalized) _ = Xi._GetTypedArray(`${e}/sparse/values`, t.componentType, f, a.values.byteOffset, m); else { const C = Xi._GetTypedArray(`${e}/sparse/values`, t.componentType, f, a.values.byteOffset, m); _ = new i(m), Y.ForEach(C, 0, s, r, t.componentType, _.length, t.normalized || !1, (x, b) => { _[b] = x; }); } let v = 0; for (let C = 0; C < p.length; C++) { let x = p[C] * r; for (let b = 0; b < r; b++) o[x++] = _[v++]; } return o; }); }); } return t._data; } /** * @internal */ _loadFloatAccessorAsync(e, t) { return this._loadAccessorAsync(e, t, Float32Array); } /** * @internal */ _loadIndicesAccessorAsync(e, t) { if (t.type !== "SCALAR") throw new Error(`${e}/type: Invalid value ${t.type}`); if (t.componentType !== 5121 && t.componentType !== 5123 && t.componentType !== 5125) throw new Error(`${e}/componentType: Invalid value ${t.componentType}`); if (t._data) return t._data; if (t.sparse) { const i = Xi._GetTypedArrayConstructor(`${e}/componentType`, t.componentType); t._data = this._loadAccessorAsync(e, t, i); } else { const i = es.Get(`${e}/bufferView`, this._gltf.bufferViews, t.bufferView); t._data = this.loadBufferViewAsync(`/bufferViews/${i.index}`, i).then((r) => Xi._GetTypedArray(e, t.componentType, r, t.byteOffset, t.count)); } return t._data; } /** * @internal */ _loadVertexBufferViewAsync(e) { if (e._babylonBuffer) return e._babylonBuffer; const t = this._babylonScene.getEngine(); return e._babylonBuffer = this.loadBufferViewAsync(`/bufferViews/${e.index}`, e).then((i) => new hu(t, i, !1)), e._babylonBuffer; } /** * @internal */ _loadVertexAccessorAsync(e, t, i) { var r; if (!((r = t._babylonVertexBuffer) === null || r === void 0) && r[i]) return t._babylonVertexBuffer[i]; t._babylonVertexBuffer || (t._babylonVertexBuffer = {}); const s = this._babylonScene.getEngine(); if (t.sparse || t.bufferView == null) t._babylonVertexBuffer[i] = this._loadFloatAccessorAsync(e, t).then((n) => new Y(s, n, i, !1)); else { const n = es.Get(`${e}/bufferView`, this._gltf.bufferViews, t.bufferView); t._babylonVertexBuffer[i] = this._loadVertexBufferViewAsync(n).then((a) => { const l = Xi._GetNumComponents(e, t.type); return new Y(s, a, i, !1, void 0, n.byteStride, void 0, t.byteOffset, l, t.componentType, t.normalized, !0, void 0, !0); }); } return t._babylonVertexBuffer[i]; } _loadMaterialMetallicRoughnessPropertiesAsync(e, t, i) { if (!(i instanceof Ri)) throw new Error(`${e}: Material type not supported`); const r = new Array(); return t && (t.baseColorFactor ? (i.albedoColor = ze.FromArray(t.baseColorFactor), i.alpha = t.baseColorFactor[3]) : i.albedoColor = ze.White(), i.metallic = t.metallicFactor == null ? 1 : t.metallicFactor, i.roughness = t.roughnessFactor == null ? 1 : t.roughnessFactor, t.baseColorTexture && r.push(this.loadTextureInfoAsync(`${e}/baseColorTexture`, t.baseColorTexture, (s) => { s.name = `${i.name} (Base Color)`, i.albedoTexture = s; })), t.metallicRoughnessTexture && (t.metallicRoughnessTexture.nonColorData = !0, r.push(this.loadTextureInfoAsync(`${e}/metallicRoughnessTexture`, t.metallicRoughnessTexture, (s) => { s.name = `${i.name} (Metallic Roughness)`, i.metallicTexture = s; })), i.useMetallnessFromMetallicTextureBlue = !0, i.useRoughnessFromMetallicTextureGreen = !0, i.useRoughnessFromMetallicTextureAlpha = !1)), Promise.all(r).then(() => { }); } /** * @internal */ _loadMaterialAsync(e, t, i, r, s = () => { }) { const n = this._extensionsLoadMaterialAsync(e, t, i, r, s); if (n) return n; t._data = t._data || {}; let a = t._data[r]; if (!a) { this.logOpen(`${e} ${t.name || ""}`); const l = this.createMaterial(e, t, r); a = { babylonMaterial: l, babylonMeshes: [], promise: this.loadMaterialPropertiesAsync(e, t, l) }, t._data[r] = a, Xi.AddPointerMetadata(l, e), this._parent.onMaterialLoadedObservable.notifyObservers(l), this.logClose(); } return i && (a.babylonMeshes.push(i), i.onDisposeObservable.addOnce(() => { const l = a.babylonMeshes.indexOf(i); l !== -1 && a.babylonMeshes.splice(l, 1); })), s(a.babylonMaterial), a.promise.then(() => a.babylonMaterial); } _createDefaultMaterial(e, t) { this._babylonScene._blockEntityCollection = !!this._assetContainer; const i = new Ri(e, this._babylonScene); return i._parentContainer = this._assetContainer, this._babylonScene._blockEntityCollection = !1, i.fillMode = t, i.enableSpecularAntiAliasing = !0, i.useRadianceOverAlpha = !this._parent.transparencyAsCoverage, i.useSpecularOverAlpha = !this._parent.transparencyAsCoverage, i.transparencyMode = Ri.PBRMATERIAL_OPAQUE, i.metallic = 1, i.roughness = 1, i; } /** * Creates a Babylon material from a glTF material. * @param context The context when loading the asset * @param material The glTF material property * @param babylonDrawMode The draw mode for the Babylon material * @returns The Babylon material */ createMaterial(e, t, i) { const r = this._extensionsCreateMaterial(e, t, i); if (r) return r; const s = t.name || `material${t.index}`; return this._createDefaultMaterial(s, i); } /** * Loads properties from a glTF material into a Babylon material. * @param context The context when loading the asset * @param material The glTF material property * @param babylonMaterial The Babylon material * @returns A promise that resolves when the load is complete */ loadMaterialPropertiesAsync(e, t, i) { const r = this._extensionsLoadMaterialPropertiesAsync(e, t, i); if (r) return r; const s = new Array(); return s.push(this.loadMaterialBasePropertiesAsync(e, t, i)), t.pbrMetallicRoughness && s.push(this._loadMaterialMetallicRoughnessPropertiesAsync(`${e}/pbrMetallicRoughness`, t.pbrMetallicRoughness, i)), this.loadMaterialAlphaProperties(e, t, i), Promise.all(s).then(() => { }); } /** * Loads the normal, occlusion, and emissive properties from a glTF material into a Babylon material. * @param context The context when loading the asset * @param material The glTF material property * @param babylonMaterial The Babylon material * @returns A promise that resolves when the load is complete */ loadMaterialBasePropertiesAsync(e, t, i) { if (!(i instanceof Ri)) throw new Error(`${e}: Material type not supported`); const r = new Array(); return i.emissiveColor = t.emissiveFactor ? ze.FromArray(t.emissiveFactor) : new ze(0, 0, 0), t.doubleSided && (i.backFaceCulling = !1, i.twoSidedLighting = !0), t.normalTexture && (t.normalTexture.nonColorData = !0, r.push(this.loadTextureInfoAsync(`${e}/normalTexture`, t.normalTexture, (s) => { s.name = `${i.name} (Normal)`, i.bumpTexture = s; })), i.invertNormalMapX = !this._babylonScene.useRightHandedSystem, i.invertNormalMapY = this._babylonScene.useRightHandedSystem, t.normalTexture.scale != null && i.bumpTexture && (i.bumpTexture.level = t.normalTexture.scale), i.forceIrradianceInFragment = !0), t.occlusionTexture && (t.occlusionTexture.nonColorData = !0, r.push(this.loadTextureInfoAsync(`${e}/occlusionTexture`, t.occlusionTexture, (s) => { s.name = `${i.name} (Occlusion)`, i.ambientTexture = s; })), i.useAmbientInGrayScale = !0, t.occlusionTexture.strength != null && (i.ambientTextureStrength = t.occlusionTexture.strength)), t.emissiveTexture && r.push(this.loadTextureInfoAsync(`${e}/emissiveTexture`, t.emissiveTexture, (s) => { s.name = `${i.name} (Emissive)`, i.emissiveTexture = s; })), Promise.all(r).then(() => { }); } /** * Loads the alpha properties from a glTF material into a Babylon material. * Must be called after the setting the albedo texture of the Babylon material when the material has an albedo texture. * @param context The context when loading the asset * @param material The glTF material property * @param babylonMaterial The Babylon material */ loadMaterialAlphaProperties(e, t, i) { if (!(i instanceof Ri)) throw new Error(`${e}: Material type not supported`); switch (t.alphaMode || "OPAQUE") { case "OPAQUE": { i.transparencyMode = Ri.PBRMATERIAL_OPAQUE, i.alpha = 1; break; } case "MASK": { i.transparencyMode = Ri.PBRMATERIAL_ALPHATEST, i.alphaCutOff = t.alphaCutoff == null ? 0.5 : t.alphaCutoff, i.albedoTexture && (i.albedoTexture.hasAlpha = !0); break; } case "BLEND": { i.transparencyMode = Ri.PBRMATERIAL_ALPHABLEND, i.albedoTexture && (i.albedoTexture.hasAlpha = !0, i.useAlphaFromAlbedoTexture = !0); break; } default: throw new Error(`${e}/alphaMode: Invalid value (${t.alphaMode})`); } } /** * Loads a glTF texture info. * @param context The context when loading the asset * @param textureInfo The glTF texture info property * @param assign A function called synchronously after parsing the glTF properties * @returns A promise that resolves with the loaded Babylon texture when the load is complete */ loadTextureInfoAsync(e, t, i = () => { }) { const r = this._extensionsLoadTextureInfoAsync(e, t, i); if (r) return r; if (this.logOpen(`${e}`), t.texCoord >= 6) throw new Error(`${e}/texCoord: Invalid value (${t.texCoord})`); const s = es.Get(`${e}/index`, this._gltf.textures, t.index); s._textureInfo = t; const n = this._loadTextureAsync(`/textures/${t.index}`, s, (a) => { a.coordinatesIndex = t.texCoord || 0, Xi.AddPointerMetadata(a, e), this._parent.onTextureLoadedObservable.notifyObservers(a), i(a); }); return this.logClose(), n; } /** * @internal */ _loadTextureAsync(e, t, i = () => { }) { const r = this._extensionsLoadTextureAsync(e, t, i); if (r) return r; this.logOpen(`${e} ${t.name || ""}`); const s = t.sampler == null ? Xi.DefaultSampler : es.Get(`${e}/sampler`, this._gltf.samplers, t.sampler), n = es.Get(`${e}/source`, this._gltf.images, t.source), a = this._createTextureAsync(e, s, n, i, void 0, !t._textureInfo.nonColorData); return this.logClose(), a; } /** * @internal */ _createTextureAsync(e, t, i, r = () => { }, s, n) { const a = this._loadSampler(`/samplers/${t.index}`, t), l = new Array(), o = new rO(); this._babylonScene._blockEntityCollection = !!this._assetContainer; const u = { noMipmap: a.noMipMaps, invertY: !1, samplingMode: a.samplingMode, onLoad: () => { this._disposed || o.resolve(); }, onError: (d, f) => { this._disposed || o.reject(new Error(`${e}: ${f && f.message ? f.message : d || "Failed to load texture"}`)); }, mimeType: i.mimeType, loaderOptions: s, useSRGBBuffer: !!n && this._parent.useSRGBBuffers }, h = new De(null, this._babylonScene, u); return h._parentContainer = this._assetContainer, this._babylonScene._blockEntityCollection = !1, l.push(o.promise), l.push(this.loadImageAsync(`/images/${i.index}`, i).then((d) => { const f = i.uri || `${this._fileName}#image${i.index}`, p = `data:${this._uniqueRootUrl}${f}`; h.updateURL(p, d); })), h.wrapU = a.wrapU, h.wrapV = a.wrapV, r(h), Promise.all(l).then(() => h); } _loadSampler(e, t) { return t._data || (t._data = { noMipMaps: t.minFilter === 9728 || t.minFilter === 9729, samplingMode: Xi._GetTextureSamplingMode(e, t), wrapU: Xi._GetTextureWrapMode(`${e}/wrapS`, t.wrapS), wrapV: Xi._GetTextureWrapMode(`${e}/wrapT`, t.wrapT) }), t._data; } /** * Loads a glTF image. * @param context The context when loading the asset * @param image The glTF image property * @returns A promise that resolves with the loaded data when the load is complete */ loadImageAsync(e, t) { if (!t._data) { if (this.logOpen(`${e} ${t.name || ""}`), t.uri) t._data = this.loadUriAsync(`${e}/uri`, t, t.uri); else { const i = es.Get(`${e}/bufferView`, this._gltf.bufferViews, t.bufferView); t._data = this.loadBufferViewAsync(`/bufferViews/${i.index}`, i); } this.logClose(); } return t._data; } /** * Loads a glTF uri. * @param context The context when loading the asset * @param property The glTF property associated with the uri * @param uri The base64 or relative uri * @returns A promise that resolves with the loaded data when the load is complete */ loadUriAsync(e, t, i) { const r = this._extensionsLoadUriAsync(e, t, i); if (r) return r; if (!Xi._ValidateUri(i)) throw new Error(`${e}: '${i}' is invalid`); if (wL(i)) { const s = new Uint8Array(pw(i)); return this.log(`${e}: Decoded ${i.substr(0, 64)}... (${s.length} bytes)`), Promise.resolve(s); } return this.log(`${e}: Loading ${i}`), this._parent.preprocessUrlAsync(this._rootUrl + i).then((s) => new Promise((n, a) => { this._parent._loadFile(this._babylonScene, s, (l) => { this._disposed || (this.log(`${e}: Loaded ${i} (${l.byteLength} bytes)`), n(new Uint8Array(l))); }, !0, (l) => { a(new UO(`${e}: Failed to load '${i}'${l ? ": " + l.status + " " + l.statusText : ""}`, l)); }); })); } /** * Adds a JSON pointer to the _internalMetadata of the Babylon object at `._internalMetadata.gltf.pointers`. * @param babylonObject the Babylon object with _internalMetadata * @param pointer the JSON pointer */ static AddPointerMetadata(e, t) { e.metadata = e.metadata || {}; const i = e._internalMetadata = e._internalMetadata || {}, r = i.gltf = i.gltf || {}; (r.pointers = r.pointers || []).push(t); } static _GetTextureWrapMode(e, t) { switch (t = t ?? 10497, t) { case 33071: return De.CLAMP_ADDRESSMODE; case 33648: return De.MIRROR_ADDRESSMODE; case 10497: return De.WRAP_ADDRESSMODE; default: return Ce.Warn(`${e}: Invalid value (${t})`), De.WRAP_ADDRESSMODE; } } static _GetTextureSamplingMode(e, t) { const i = t.magFilter == null ? 9729 : t.magFilter, r = t.minFilter == null ? 9987 : t.minFilter; if (i === 9729) switch (r) { case 9728: return De.LINEAR_NEAREST; case 9729: return De.LINEAR_LINEAR; case 9984: return De.LINEAR_NEAREST_MIPNEAREST; case 9985: return De.LINEAR_LINEAR_MIPNEAREST; case 9986: return De.LINEAR_NEAREST_MIPLINEAR; case 9987: return De.LINEAR_LINEAR_MIPLINEAR; default: return Ce.Warn(`${e}/minFilter: Invalid value (${r})`), De.LINEAR_LINEAR_MIPLINEAR; } else switch (i !== 9728 && Ce.Warn(`${e}/magFilter: Invalid value (${i})`), r) { case 9728: return De.NEAREST_NEAREST; case 9729: return De.NEAREST_LINEAR; case 9984: return De.NEAREST_NEAREST_MIPNEAREST; case 9985: return De.NEAREST_LINEAR_MIPNEAREST; case 9986: return De.NEAREST_NEAREST_MIPLINEAR; case 9987: return De.NEAREST_LINEAR_MIPLINEAR; default: return Ce.Warn(`${e}/minFilter: Invalid value (${r})`), De.NEAREST_NEAREST_MIPNEAREST; } } static _GetTypedArrayConstructor(e, t) { switch (t) { case 5120: return Int8Array; case 5121: return Uint8Array; case 5122: return Int16Array; case 5123: return Uint16Array; case 5125: return Uint32Array; case 5126: return Float32Array; default: throw new Error(`${e}: Invalid component type ${t}`); } } static _GetTypedArray(e, t, i, r, s) { const n = i.buffer; r = i.byteOffset + (r || 0); const a = Xi._GetTypedArrayConstructor(`${e}/componentType`, t), l = Y.GetTypeByteLength(t); return r % l !== 0 ? (Ce.Warn(`${e}: Copying buffer as byte offset (${r}) is not a multiple of component type byte length (${l})`), new a(n.slice(r, r + s * l), 0)) : new a(n, r, s); } static _GetNumComponents(e, t) { switch (t) { case "SCALAR": return 1; case "VEC2": return 2; case "VEC3": return 3; case "VEC4": return 4; case "MAT2": return 4; case "MAT3": return 9; case "MAT4": return 16; } throw new Error(`${e}: Invalid type (${t})`); } static _ValidateUri(e) { return Ve.IsBase64(e) || e.indexOf("..") === -1; } /** * @internal */ static _GetDrawMode(e, t) { switch (t == null && (t = 4), t) { case 0: return At.PointListDrawMode; case 1: return At.LineListDrawMode; case 2: return At.LineLoopDrawMode; case 3: return At.LineStripDrawMode; case 4: return At.TriangleFillMode; case 5: return At.TriangleStripDrawMode; case 6: return At.TriangleFanDrawMode; } throw new Error(`${e}: Invalid mesh primitive mode (${t})`); } _compileMaterialsAsync() { this._parent._startPerformanceCounter("Compile materials"); const e = new Array(); if (this._gltf.materials) { for (const t of this._gltf.materials) if (t._data) for (const i in t._data) { const r = t._data[i]; for (const s of r.babylonMeshes) { s.computeWorldMatrix(!0); const n = r.babylonMaterial; e.push(n.forceCompilationAsync(s)), e.push(n.forceCompilationAsync(s, { useInstances: !0 })), this._parent.useClipPlane && (e.push(n.forceCompilationAsync(s, { clipPlane: !0 })), e.push(n.forceCompilationAsync(s, { clipPlane: !0, useInstances: !0 }))); } } } return Promise.all(e).then(() => { this._parent._endPerformanceCounter("Compile materials"); }); } _compileShadowGeneratorsAsync() { this._parent._startPerformanceCounter("Compile shadow generators"); const e = new Array(), t = this._babylonScene.lights; for (const i of t) { const r = i.getShadowGenerator(); r && e.push(r.forceCompilationAsync()); } return Promise.all(e).then(() => { this._parent._endPerformanceCounter("Compile shadow generators"); }); } _forEachExtensions(e) { for (const t of this._extensions) t.enabled && e(t); } _applyExtensions(e, t, i) { for (const r of this._extensions) if (r.enabled) { const s = `${r.name}.${t}`, n = e; n._activeLoaderExtensionFunctions = n._activeLoaderExtensionFunctions || {}; const a = n._activeLoaderExtensionFunctions; if (!a[s]) { a[s] = !0; try { const l = i(r); if (l) return l; } finally { delete a[s]; } } } return null; } _extensionsOnLoading() { this._forEachExtensions((e) => e.onLoading && e.onLoading()); } _extensionsOnReady() { this._forEachExtensions((e) => e.onReady && e.onReady()); } _extensionsLoadSceneAsync(e, t) { return this._applyExtensions(t, "loadScene", (i) => i.loadSceneAsync && i.loadSceneAsync(e, t)); } _extensionsLoadNodeAsync(e, t, i) { return this._applyExtensions(t, "loadNode", (r) => r.loadNodeAsync && r.loadNodeAsync(e, t, i)); } _extensionsLoadCameraAsync(e, t, i) { return this._applyExtensions(t, "loadCamera", (r) => r.loadCameraAsync && r.loadCameraAsync(e, t, i)); } _extensionsLoadVertexDataAsync(e, t, i) { return this._applyExtensions(t, "loadVertexData", (r) => r._loadVertexDataAsync && r._loadVertexDataAsync(e, t, i)); } _extensionsLoadMeshPrimitiveAsync(e, t, i, r, s, n) { return this._applyExtensions(s, "loadMeshPrimitive", (a) => a._loadMeshPrimitiveAsync && a._loadMeshPrimitiveAsync(e, t, i, r, s, n)); } _extensionsLoadMaterialAsync(e, t, i, r, s) { return this._applyExtensions(t, "loadMaterial", (n) => n._loadMaterialAsync && n._loadMaterialAsync(e, t, i, r, s)); } _extensionsCreateMaterial(e, t, i) { return this._applyExtensions(t, "createMaterial", (r) => r.createMaterial && r.createMaterial(e, t, i)); } _extensionsLoadMaterialPropertiesAsync(e, t, i) { return this._applyExtensions(t, "loadMaterialProperties", (r) => r.loadMaterialPropertiesAsync && r.loadMaterialPropertiesAsync(e, t, i)); } _extensionsLoadTextureInfoAsync(e, t, i) { return this._applyExtensions(t, "loadTextureInfo", (r) => r.loadTextureInfoAsync && r.loadTextureInfoAsync(e, t, i)); } _extensionsLoadTextureAsync(e, t, i) { return this._applyExtensions(t, "loadTexture", (r) => r._loadTextureAsync && r._loadTextureAsync(e, t, i)); } _extensionsLoadAnimationAsync(e, t) { return this._applyExtensions(t, "loadAnimation", (i) => i.loadAnimationAsync && i.loadAnimationAsync(e, t)); } _extensionsLoadAnimationChannelAsync(e, t, i, r, s) { return this._applyExtensions(i, "loadAnimationChannel", (n) => n._loadAnimationChannelAsync && n._loadAnimationChannelAsync(e, t, i, r, s)); } _extensionsLoadSkinAsync(e, t, i) { return this._applyExtensions(i, "loadSkin", (r) => r._loadSkinAsync && r._loadSkinAsync(e, t, i)); } _extensionsLoadUriAsync(e, t, i) { return this._applyExtensions(t, "loadUri", (r) => r._loadUriAsync && r._loadUriAsync(e, t, i)); } _extensionsLoadBufferViewAsync(e, t) { return this._applyExtensions(t, "loadBufferView", (i) => i.loadBufferViewAsync && i.loadBufferViewAsync(e, t)); } _extensionsLoadBufferAsync(e, t, i, r) { return this._applyExtensions(t, "loadBuffer", (s) => s.loadBufferAsync && s.loadBufferAsync(e, t, i, r)); } /** * Helper method called by a loader extension to load an glTF extension. * @param context The context when loading the asset * @param property The glTF property to load the extension from * @param extensionName The name of the extension to load * @param actionAsync The action to run * @returns The promise returned by actionAsync or null if the extension does not exist */ static LoadExtensionAsync(e, t, i, r) { if (!t.extensions) return null; const n = t.extensions[i]; return n ? r(`${e}/extensions/${i}`, n) : null; } /** * Helper method called by a loader extension to load a glTF extra. * @param context The context when loading the asset * @param property The glTF property to load the extra from * @param extensionName The name of the extension to load * @param actionAsync The action to run * @returns The promise returned by actionAsync or null if the extra does not exist */ static LoadExtraAsync(e, t, i, r) { if (!t.extras) return null; const n = t.extras[i]; return n ? r(`${e}/extras/${i}`, n) : null; } /** * Checks for presence of an extension. * @param name The name of the extension to check * @returns A boolean indicating the presence of the given extension name in `extensionsUsed` */ isExtensionUsed(e) { return !!this._gltf.extensionsUsed && this._gltf.extensionsUsed.indexOf(e) !== -1; } /** * Increments the indentation level and logs a message. * @param message The message to log */ logOpen(e) { this._parent._logOpen(e); } /** * Decrements the indentation level. */ logClose() { this._parent._logClose(); } /** * Logs a message * @param message The message to log */ log(e) { this._parent._log(e); } /** * Starts a performance counter. * @param counterName The name of the performance counter */ startPerformanceCounter(e) { this._parent._startPerformanceCounter(e); } /** * Ends a performance counter. * @param counterName The name of the performance counter */ endPerformanceCounter(e) { this._parent._endPerformanceCounter(e); } } Xi._RegisteredExtensions = {}; Xi.DefaultSampler = { index: -1 }; yl._CreateGLTF2Loader = (c) => new Xi(c); const iG = "EXT_lights_image_based"; class MAe { /** * @internal */ constructor(e) { this.name = iG, this._loader = e, this.enabled = this._loader.isExtensionUsed(iG); } /** @internal */ dispose() { this._loader = null, delete this._lights; } /** @internal */ onLoading() { const e = this._loader.gltf.extensions; if (e && e[this.name]) { const t = e[this.name]; this._lights = t.lights; } } /** * @internal */ loadSceneAsync(e, t) { return Xi.LoadExtensionAsync(e, t, this.name, (i, r) => { this._loader._allMaterialsDirtyRequired = !0; const s = new Array(); s.push(this._loader.loadSceneAsync(e, t)), this._loader.logOpen(`${i}`); const n = es.Get(`${i}/light`, this._lights, r.light); return s.push(this._loadLightAsync(`/extensions/${this.name}/lights/${r.light}`, n).then((a) => { this._loader.babylonScene.environmentTexture = a; })), this._loader.logClose(), Promise.all(s).then(() => { }); }); } _loadLightAsync(e, t) { if (!t._loaded) { const i = new Array(); this._loader.logOpen(`${e}`); const r = new Array(t.specularImages.length); for (let s = 0; s < t.specularImages.length; s++) { const n = t.specularImages[s]; r[s] = new Array(n.length); for (let a = 0; a < n.length; a++) { const l = `${e}/specularImages/${s}/${a}`; this._loader.logOpen(`${l}`); const o = n[a], u = es.Get(l, this._loader.gltf.images, o); i.push(this._loader.loadImageAsync(`/images/${o}`, u).then((h) => { r[s][a] = h; })), this._loader.logClose(); } } this._loader.logClose(), t._loaded = Promise.all(i).then(() => { const s = new LU(this._loader.babylonScene, null, t.specularImageSize); if (s.name = t.name || "environment", t._babylonTexture = s, t.intensity != null && (s.level = t.intensity), t.rotation) { let o = Ze.FromArray(t.rotation); this._loader.babylonScene.useRightHandedSystem || (o = Ze.Inverse(o)), Ae.FromQuaternionToRef(o, s.getReflectionTextureMatrix()); } if (!t.irradianceCoefficients) throw new Error(`${e}: Irradiance coefficients are missing`); const n = m5.FromArray(t.irradianceCoefficients); n.scaleInPlace(t.intensity), n.convertIrradianceToLambertianRadiance(); const a = ax.FromHarmonics(n), l = (r.length - 1) / yt.Log2(t.specularImageSize); return s.updateRGBDAsync(r, a, l); }); } return t._loaded.then(() => t._babylonTexture); } } Xi.RegisterExtension(iG, (c) => new MAe(c)); const rG = "EXT_mesh_gpu_instancing"; class RAe { /** * @internal */ constructor(e) { this.name = rG, this._loader = e, this.enabled = this._loader.isExtensionUsed(rG); } /** @internal */ dispose() { this._loader = null; } /** * @internal */ loadNodeAsync(e, t, i) { return Xi.LoadExtensionAsync(e, t, this.name, (r, s) => { this._loader._disableInstancedMesh++; const n = this._loader.loadNodeAsync(`/nodes/${t.index}`, t, i); if (this._loader._disableInstancedMesh--, !t._primitiveBabylonMeshes) return n; const a = new Array(); let l = 0; const o = (u) => { if (s.attributes[u] == null) { a.push(Promise.resolve(null)); return; } const h = es.Get(`${r}/attributes/${u}`, this._loader.gltf.accessors, s.attributes[u]); if (a.push(this._loader._loadFloatAccessorAsync(`/accessors/${h.bufferView}`, h)), l === 0) l = h.count; else if (l !== h.count) throw new Error(`${r}/attributes: Instance buffer accessors do not have the same count.`); }; return o("TRANSLATION"), o("ROTATION"), o("SCALE"), n.then((u) => Promise.all(a).then(([h, d, f]) => { const p = new Float32Array(l * 16); de.Vector3[0].copyFromFloats(0, 0, 0), de.Quaternion[0].copyFromFloats(0, 0, 0, 1), de.Vector3[1].copyFromFloats(1, 1, 1); for (let m = 0; m < l; ++m) h && D.FromArrayToRef(h, m * 3, de.Vector3[0]), d && Ze.FromArrayToRef(d, m * 4, de.Quaternion[0]), f && D.FromArrayToRef(f, m * 3, de.Vector3[1]), Ae.ComposeToRef(de.Vector3[1], de.Quaternion[0], de.Vector3[0], de.Matrix[0]), de.Matrix[0].copyToArray(p, m * 16); for (const m of t._primitiveBabylonMeshes) m.thinInstanceSetBuffer("matrix", p, 16, !0); return u; })); }); } } Xi.RegisterExtension(rG, (c) => new RAe(c)); const sG = "EXT_meshopt_compression"; class PAe { /** * @internal */ constructor(e) { this.name = sG, this.enabled = e.isExtensionUsed(sG), this._loader = e; } /** @internal */ dispose() { this._loader = null; } /** * @internal */ loadBufferViewAsync(e, t) { return Xi.LoadExtensionAsync(e, t, this.name, (i, r) => { const s = t; if (s._meshOptData) return s._meshOptData; const n = es.Get(`${e}/buffer`, this._loader.gltf.buffers, r.buffer); return s._meshOptData = this._loader.loadBufferAsync(`/buffers/${n.index}`, n, r.byteOffset || 0, r.byteLength).then((a) => HC.Default.decodeGltfBufferAsync(a, r.count, r.byteStride, r.mode, r.filter)), s._meshOptData; }); } } Xi.RegisterExtension(sG, (c) => new PAe(c)); const nG = "EXT_texture_webp"; class IAe { /** * @internal */ constructor(e) { this.name = nG, this._loader = e, this.enabled = e.isExtensionUsed(nG); } /** @internal */ dispose() { this._loader = null; } /** * @internal */ _loadTextureAsync(e, t, i) { return Xi.LoadExtensionAsync(e, t, this.name, (r, s) => { const n = t.sampler == null ? Xi.DefaultSampler : es.Get(`${e}/sampler`, this._loader.gltf.samplers, t.sampler), a = es.Get(`${r}/source`, this._loader.gltf.images, s.source); return this._loader._createTextureAsync(e, n, a, (l) => { i(l); }, void 0, !t._textureInfo.nonColorData); }); } } Xi.RegisterExtension(nG, (c) => new IAe(c)); const aG = "KHR_draco_mesh_compression"; class DAe { /** * @internal */ constructor(e) { this.name = aG, this.useNormalizedFlagFromAccessor = !0, this._loader = e, this.enabled = k_.DecoderAvailable && this._loader.isExtensionUsed(aG); } /** @internal */ dispose() { delete this.dracoCompression, this._loader = null; } /** * @internal */ _loadVertexDataAsync(e, t, i) { return Xi.LoadExtensionAsync(e, t, this.name, (r, s) => { if (t.mode != null && t.mode !== 4 && t.mode !== 5) throw new Error(`${e}: Unsupported mode ${t.mode}`); const n = {}, a = {}, l = (u, h) => { const d = s.attributes[u]; if (d != null && (i._delayInfo = i._delayInfo || [], i._delayInfo.indexOf(h) === -1 && i._delayInfo.push(h), n[h] = d, this.useNormalizedFlagFromAccessor)) { const f = es.TryGet(this._loader.gltf.accessors, t.attributes[u]); f && (a[h] = f.normalized || !1); } }; l("POSITION", Y.PositionKind), l("NORMAL", Y.NormalKind), l("TANGENT", Y.TangentKind), l("TEXCOORD_0", Y.UVKind), l("TEXCOORD_1", Y.UV2Kind), l("TEXCOORD_2", Y.UV3Kind), l("TEXCOORD_3", Y.UV4Kind), l("TEXCOORD_4", Y.UV5Kind), l("TEXCOORD_5", Y.UV6Kind), l("JOINTS_0", Y.MatricesIndicesKind), l("WEIGHTS_0", Y.MatricesWeightsKind), l("COLOR_0", Y.ColorKind); const o = es.Get(r, this._loader.gltf.bufferViews, s.bufferView); return o._dracoBabylonGeometry || (o._dracoBabylonGeometry = this._loader.loadBufferViewAsync(`/bufferViews/${o.index}`, o).then((u) => (this.dracoCompression || k_.Default)._decodeMeshToGeometryForGltfAsync(i.name, this._loader.babylonScene, u, n, a).catch((d) => { throw new Error(`${e}: ${d.message}`); }))), o._dracoBabylonGeometry; }); } } Xi.RegisterExtension(aG, (c) => new DAe(c)); const oG = "KHR_lights_punctual"; class OAe { /** * @internal */ constructor(e) { this.name = oG, this._loader = e, this.enabled = this._loader.isExtensionUsed(oG); } /** @internal */ dispose() { this._loader = null, delete this._lights; } /** @internal */ onLoading() { const e = this._loader.gltf.extensions; if (e && e[this.name]) { const t = e[this.name]; this._lights = t.lights, es.Assign(this._lights); } } /** * @internal */ loadNodeAsync(e, t, i) { return Xi.LoadExtensionAsync(e, t, this.name, (r, s) => (this._loader._allMaterialsDirtyRequired = !0, this._loader.loadNodeAsync(e, t, (n) => { let a; const l = es.Get(r, this._lights, s.light), o = l.name || n.name; switch (this._loader.babylonScene._blockEntityCollection = !!this._loader._assetContainer, l.type) { case "directional": { const u = new Pd(o, D.Backward(), this._loader.babylonScene); u.position.setAll(0), a = u; break; } case "point": { a = new s6(o, D.Zero(), this._loader.babylonScene); break; } case "spot": { const u = new td(o, D.Zero(), D.Backward(), 0, 1, this._loader.babylonScene); u.angle = (l.spot && l.spot.outerConeAngle || Math.PI / 4) * 2, u.innerAngle = (l.spot && l.spot.innerConeAngle || 0) * 2, a = u; break; } default: throw this._loader.babylonScene._blockEntityCollection = !1, new Error(`${r}: Invalid light type (${l.type})`); } a._parentContainer = this._loader._assetContainer, this._loader.babylonScene._blockEntityCollection = !1, l._babylonLight = a, a.falloffType = hs.FALLOFF_GLTF, a.diffuse = l.color ? ze.FromArray(l.color) : ze.White(), a.intensity = l.intensity == null ? 1 : l.intensity, a.range = l.range == null ? Number.MAX_VALUE : l.range, a.parent = n, this._loader._babylonLights.push(a), Xi.AddPointerMetadata(a, r), i(n); }))); } } Xi.RegisterExtension(oG, (c) => new OAe(c)); const lG = "KHR_materials_pbrSpecularGlossiness"; class wAe { /** * @internal */ constructor(e) { this.name = lG, this.order = 200, this._loader = e, this.enabled = this._loader.isExtensionUsed(lG); } /** @internal */ dispose() { this._loader = null; } /** * @internal */ loadMaterialPropertiesAsync(e, t, i) { return Xi.LoadExtensionAsync(e, t, this.name, (r, s) => { const n = new Array(); return n.push(this._loader.loadMaterialBasePropertiesAsync(e, t, i)), n.push(this._loadSpecularGlossinessPropertiesAsync(r, s, i)), this._loader.loadMaterialAlphaProperties(e, t, i), Promise.all(n).then(() => { }); }); } _loadSpecularGlossinessPropertiesAsync(e, t, i) { if (!(i instanceof Ri)) throw new Error(`${e}: Material type not supported`); const r = new Array(); return i.metallic = null, i.roughness = null, t.diffuseFactor ? (i.albedoColor = ze.FromArray(t.diffuseFactor), i.alpha = t.diffuseFactor[3]) : i.albedoColor = ze.White(), i.reflectivityColor = t.specularFactor ? ze.FromArray(t.specularFactor) : ze.White(), i.microSurface = t.glossinessFactor == null ? 1 : t.glossinessFactor, t.diffuseTexture && r.push(this._loader.loadTextureInfoAsync(`${e}/diffuseTexture`, t.diffuseTexture, (s) => { s.name = `${i.name} (Diffuse)`, i.albedoTexture = s; })), t.specularGlossinessTexture && (r.push(this._loader.loadTextureInfoAsync(`${e}/specularGlossinessTexture`, t.specularGlossinessTexture, (s) => { s.name = `${i.name} (Specular Glossiness)`, i.reflectivityTexture = s, i.reflectivityTexture.hasAlpha = !0; })), i.useMicroSurfaceFromReflectivityMapAlpha = !0), Promise.all(r).then(() => { }); } } Xi.RegisterExtension(lG, (c) => new wAe(c)); const cG = "KHR_materials_unlit"; class LAe { /** * @internal */ constructor(e) { this.name = cG, this.order = 210, this._loader = e, this.enabled = this._loader.isExtensionUsed(cG); } /** @internal */ dispose() { this._loader = null; } /** * @internal */ loadMaterialPropertiesAsync(e, t, i) { return Xi.LoadExtensionAsync(e, t, this.name, () => this._loadUnlitPropertiesAsync(e, t, i)); } _loadUnlitPropertiesAsync(e, t, i) { if (!(i instanceof Ri)) throw new Error(`${e}: Material type not supported`); const r = new Array(); i.unlit = !0; const s = t.pbrMetallicRoughness; return s && (s.baseColorFactor ? (i.albedoColor = ze.FromArray(s.baseColorFactor), i.alpha = s.baseColorFactor[3]) : i.albedoColor = ze.White(), s.baseColorTexture && r.push(this._loader.loadTextureInfoAsync(`${e}/baseColorTexture`, s.baseColorTexture, (n) => { n.name = `${i.name} (Base Color)`, i.albedoTexture = n; }))), t.doubleSided && (i.backFaceCulling = !1, i.twoSidedLighting = !0), this._loader.loadMaterialAlphaProperties(e, t, i), Promise.all(r).then(() => { }); } } Xi.RegisterExtension(cG, (c) => new LAe(c)); const uG = "KHR_materials_clearcoat"; class NAe { /** * @internal */ constructor(e) { this.name = uG, this.order = 190, this._loader = e, this.enabled = this._loader.isExtensionUsed(uG); } /** @internal */ dispose() { this._loader = null; } /** * @internal */ loadMaterialPropertiesAsync(e, t, i) { return Xi.LoadExtensionAsync(e, t, this.name, (r, s) => { const n = new Array(); return n.push(this._loader.loadMaterialPropertiesAsync(e, t, i)), n.push(this._loadClearCoatPropertiesAsync(r, s, i)), Promise.all(n).then(() => { }); }); } _loadClearCoatPropertiesAsync(e, t, i) { if (!(i instanceof Ri)) throw new Error(`${e}: Material type not supported`); const r = new Array(); return i.clearCoat.isEnabled = !0, i.clearCoat.useRoughnessFromMainTexture = !1, i.clearCoat.remapF0OnInterfaceChange = !1, t.clearcoatFactor != null ? i.clearCoat.intensity = t.clearcoatFactor : i.clearCoat.intensity = 0, t.clearcoatTexture && r.push(this._loader.loadTextureInfoAsync(`${e}/clearcoatTexture`, t.clearcoatTexture, (s) => { s.name = `${i.name} (ClearCoat Intensity)`, i.clearCoat.texture = s; })), t.clearcoatRoughnessFactor != null ? i.clearCoat.roughness = t.clearcoatRoughnessFactor : i.clearCoat.roughness = 0, t.clearcoatRoughnessTexture && (t.clearcoatRoughnessTexture.nonColorData = !0, r.push(this._loader.loadTextureInfoAsync(`${e}/clearcoatRoughnessTexture`, t.clearcoatRoughnessTexture, (s) => { s.name = `${i.name} (ClearCoat Roughness)`, i.clearCoat.textureRoughness = s; }))), t.clearcoatNormalTexture && (t.clearcoatNormalTexture.nonColorData = !0, r.push(this._loader.loadTextureInfoAsync(`${e}/clearcoatNormalTexture`, t.clearcoatNormalTexture, (s) => { s.name = `${i.name} (ClearCoat Normal)`, i.clearCoat.bumpTexture = s; })), i.invertNormalMapX = !i.getScene().useRightHandedSystem, i.invertNormalMapY = i.getScene().useRightHandedSystem, t.clearcoatNormalTexture.scale != null && (i.clearCoat.bumpTexture.level = t.clearcoatNormalTexture.scale)), Promise.all(r).then(() => { }); } } Xi.RegisterExtension(uG, (c) => new NAe(c)); const hG = "KHR_materials_iridescence"; class FAe { /** * @internal */ constructor(e) { this.name = hG, this.order = 195, this._loader = e, this.enabled = this._loader.isExtensionUsed(hG); } /** @internal */ dispose() { this._loader = null; } /** * @internal */ loadMaterialPropertiesAsync(e, t, i) { return Xi.LoadExtensionAsync(e, t, this.name, (r, s) => { const n = new Array(); return n.push(this._loader.loadMaterialPropertiesAsync(e, t, i)), n.push(this._loadIridescencePropertiesAsync(r, s, i)), Promise.all(n).then(() => { }); }); } _loadIridescencePropertiesAsync(e, t, i) { var r, s, n, a, l; if (!(i instanceof Ri)) throw new Error(`${e}: Material type not supported`); const o = new Array(); return i.iridescence.isEnabled = !0, i.iridescence.intensity = (r = t.iridescenceFactor) !== null && r !== void 0 ? r : 0, i.iridescence.indexOfRefraction = (n = (s = t.iridescenceIor) !== null && s !== void 0 ? s : t.iridescenceIOR) !== null && n !== void 0 ? n : 1.3, i.iridescence.minimumThickness = (a = t.iridescenceThicknessMinimum) !== null && a !== void 0 ? a : 100, i.iridescence.maximumThickness = (l = t.iridescenceThicknessMaximum) !== null && l !== void 0 ? l : 400, t.iridescenceTexture && o.push(this._loader.loadTextureInfoAsync(`${e}/iridescenceTexture`, t.iridescenceTexture, (u) => { u.name = `${i.name} (Iridescence Intensity)`, i.iridescence.texture = u; })), t.iridescenceThicknessTexture && o.push(this._loader.loadTextureInfoAsync(`${e}/iridescenceThicknessTexture`, t.iridescenceThicknessTexture, (u) => { u.name = `${i.name} (Iridescence Thickness)`, i.iridescence.thicknessTexture = u; })), Promise.all(o).then(() => { }); } } Xi.RegisterExtension(hG, (c) => new FAe(c)); const dG = "KHR_materials_anisotropy"; class BAe { /** * @internal */ constructor(e) { this.name = dG, this.order = 195, this._loader = e, this.enabled = this._loader.isExtensionUsed(dG); } /** @internal */ dispose() { this._loader = null; } /** * @internal */ loadMaterialPropertiesAsync(e, t, i) { return Xi.LoadExtensionAsync(e, t, this.name, (r, s) => { const n = new Array(); return n.push(this._loader.loadMaterialPropertiesAsync(e, t, i)), n.push(this._loadIridescencePropertiesAsync(r, s, i)), Promise.all(n).then(() => { }); }); } _loadIridescencePropertiesAsync(e, t, i) { var r, s; if (!(i instanceof Ri)) throw new Error(`${e}: Material type not supported`); const n = new Array(); return i.anisotropy.isEnabled = !0, i.anisotropy.intensity = (r = t.anisotropyStrength) !== null && r !== void 0 ? r : 0, i.anisotropy.angle = (s = t.anisotropyRotation) !== null && s !== void 0 ? s : 0, t.anisotropyTexture && n.push(this._loader.loadTextureInfoAsync(`${e}/anisotropyTexture`, t.anisotropyTexture, (a) => { a.name = `${i.name} (Anisotropy Intensity)`, i.anisotropy.texture = a; })), Promise.all(n).then(() => { }); } } Xi.RegisterExtension(dG, (c) => new BAe(c)); const fG = "KHR_materials_emissive_strength"; class UAe { /** * @internal */ constructor(e) { this.name = fG, this.order = 170, this._loader = e, this.enabled = this._loader.isExtensionUsed(fG); } /** @internal */ dispose() { this._loader = null; } /** * @internal */ loadMaterialPropertiesAsync(e, t, i) { return Xi.LoadExtensionAsync(e, t, this.name, (r, s) => this._loader.loadMaterialPropertiesAsync(e, t, i).then(() => { this._loadEmissiveProperties(r, s, i); })); } _loadEmissiveProperties(e, t, i) { if (!(i instanceof Ri)) throw new Error(`${e}: Material type not supported`); t.emissiveStrength !== void 0 && i.emissiveColor.scaleToRef(t.emissiveStrength, i.emissiveColor); } } Xi.RegisterExtension(fG, (c) => new UAe(c)); const pG = "KHR_materials_sheen"; class VAe { /** * @internal */ constructor(e) { this.name = pG, this.order = 190, this._loader = e, this.enabled = this._loader.isExtensionUsed(pG); } /** @internal */ dispose() { this._loader = null; } /** * @internal */ loadMaterialPropertiesAsync(e, t, i) { return Xi.LoadExtensionAsync(e, t, this.name, (r, s) => { const n = new Array(); return n.push(this._loader.loadMaterialPropertiesAsync(e, t, i)), n.push(this._loadSheenPropertiesAsync(r, s, i)), Promise.all(n).then(() => { }); }); } _loadSheenPropertiesAsync(e, t, i) { if (!(i instanceof Ri)) throw new Error(`${e}: Material type not supported`); const r = new Array(); return i.sheen.isEnabled = !0, i.sheen.intensity = 1, t.sheenColorFactor != null ? i.sheen.color = ze.FromArray(t.sheenColorFactor) : i.sheen.color = ze.Black(), t.sheenColorTexture && r.push(this._loader.loadTextureInfoAsync(`${e}/sheenColorTexture`, t.sheenColorTexture, (s) => { s.name = `${i.name} (Sheen Color)`, i.sheen.texture = s; })), t.sheenRoughnessFactor !== void 0 ? i.sheen.roughness = t.sheenRoughnessFactor : i.sheen.roughness = 0, t.sheenRoughnessTexture && (t.sheenRoughnessTexture.nonColorData = !0, r.push(this._loader.loadTextureInfoAsync(`${e}/sheenRoughnessTexture`, t.sheenRoughnessTexture, (s) => { s.name = `${i.name} (Sheen Roughness)`, i.sheen.textureRoughness = s; }))), i.sheen.albedoScaling = !0, i.sheen.useRoughnessFromMainTexture = !1, Promise.all(r).then(() => { }); } } Xi.RegisterExtension(pG, (c) => new VAe(c)); const _G = "KHR_materials_specular"; class kAe { /** * @internal */ constructor(e) { this.name = _G, this.order = 190, this._loader = e, this.enabled = this._loader.isExtensionUsed(_G); } /** @internal */ dispose() { this._loader = null; } /** * @internal */ loadMaterialPropertiesAsync(e, t, i) { return Xi.LoadExtensionAsync(e, t, this.name, (r, s) => { const n = new Array(); return n.push(this._loader.loadMaterialPropertiesAsync(e, t, i)), n.push(this._loadSpecularPropertiesAsync(r, s, i)), Promise.all(n).then(() => { }); }); } _loadSpecularPropertiesAsync(e, t, i) { if (!(i instanceof Ri)) throw new Error(`${e}: Material type not supported`); const r = new Array(); return t.specularFactor !== void 0 && (i.metallicF0Factor = t.specularFactor), t.specularColorFactor !== void 0 && (i.metallicReflectanceColor = ze.FromArray(t.specularColorFactor)), t.specularTexture && (t.specularTexture.nonColorData = !0, r.push(this._loader.loadTextureInfoAsync(`${e}/specularTexture`, t.specularTexture, (s) => { s.name = `${i.name} (Specular F0 Strength)`, i.metallicReflectanceTexture = s, i.useOnlyMetallicFromMetallicReflectanceTexture = !0; }))), t.specularColorTexture && r.push(this._loader.loadTextureInfoAsync(`${e}/specularColorTexture`, t.specularColorTexture, (s) => { s.name = `${i.name} (Specular F0 Color)`, i.reflectanceTexture = s; })), Promise.all(r).then(() => { }); } } Xi.RegisterExtension(_G, (c) => new kAe(c)); const mG = "KHR_materials_ior"; class eV { /** * @internal */ constructor(e) { this.name = mG, this.order = 180, this._loader = e, this.enabled = this._loader.isExtensionUsed(mG); } /** @internal */ dispose() { this._loader = null; } /** * @internal */ loadMaterialPropertiesAsync(e, t, i) { return Xi.LoadExtensionAsync(e, t, this.name, (r, s) => { const n = new Array(); return n.push(this._loader.loadMaterialPropertiesAsync(e, t, i)), n.push(this._loadIorPropertiesAsync(r, s, i)), Promise.all(n).then(() => { }); }); } _loadIorPropertiesAsync(e, t, i) { if (!(i instanceof Ri)) throw new Error(`${e}: Material type not supported`); return t.ior !== void 0 ? i.indexOfRefraction = t.ior : i.indexOfRefraction = eV._DEFAULT_IOR, Promise.resolve(); } } eV._DEFAULT_IOR = 1.5; Xi.RegisterExtension(mG, (c) => new eV(c)); const L_ = "KHR_materials_variants"; class H8 { /** * @internal */ constructor(e) { this.name = L_, this._loader = e, this.enabled = this._loader.isExtensionUsed(L_); } /** @internal */ dispose() { this._loader = null; } /** * Gets the list of available variant names for this asset. * @param rootMesh The glTF root mesh * @returns the list of all the variant names for this model */ static GetAvailableVariants(e) { const t = this._GetExtensionMetadata(e); return t ? Object.keys(t.variants) : []; } /** * Gets the list of available variant names for this asset. * @param rootMesh The glTF root mesh * @returns the list of all the variant names for this model */ getAvailableVariants(e) { return H8.GetAvailableVariants(e); } /** * Select a variant given a variant name or a list of variant names. * @param rootMesh The glTF root mesh * @param variantName The variant name(s) to select. */ static SelectVariant(e, t) { const i = this._GetExtensionMetadata(e); if (!i) throw new Error(`Cannot select variant on a glTF mesh that does not have the ${L_} extension`); const r = (s) => { const n = i.variants[s]; if (n) for (const a of n) a.mesh.material = a.material; }; if (t instanceof Array) for (const s of t) r(s); else r(t); i.lastSelected = t; } /** * Select a variant given a variant name or a list of variant names. * @param rootMesh The glTF root mesh * @param variantName The variant name(s) to select. */ selectVariant(e, t) { return H8.SelectVariant(e, t); } /** * Reset back to the original before selecting a variant. * @param rootMesh The glTF root mesh */ static Reset(e) { const t = this._GetExtensionMetadata(e); if (!t) throw new Error(`Cannot reset on a glTF mesh that does not have the ${L_} extension`); for (const i of t.original) i.mesh.material = i.material; t.lastSelected = null; } /** * Reset back to the original before selecting a variant. * @param rootMesh The glTF root mesh */ reset(e) { return H8.Reset(e); } /** * Gets the last selected variant name(s) or null if original. * @param rootMesh The glTF root mesh * @returns The selected variant name(s). */ static GetLastSelectedVariant(e) { const t = this._GetExtensionMetadata(e); if (!t) throw new Error(`Cannot get the last selected variant on a glTF mesh that does not have the ${L_} extension`); return t.lastSelected; } /** * Gets the last selected variant name(s) or null if original. * @param rootMesh The glTF root mesh * @returns The selected variant name(s). */ getLastSelectedVariant(e) { return H8.GetLastSelectedVariant(e); } static _GetExtensionMetadata(e) { var t, i; return ((i = (t = e == null ? void 0 : e._internalMetadata) === null || t === void 0 ? void 0 : t.gltf) === null || i === void 0 ? void 0 : i[L_]) || null; } /** @internal */ onLoading() { const e = this._loader.gltf.extensions; if (e && e[this.name]) { const t = e[this.name]; this._variants = t.variants; } } /** * @internal */ _loadMeshPrimitiveAsync(e, t, i, r, s, n) { return Xi.LoadExtensionAsync(e, s, this.name, (a, l) => { const o = new Array(); return o.push(this._loader._loadMeshPrimitiveAsync(e, t, i, r, s, (u) => { if (n(u), u instanceof ke) { const h = Xi._GetDrawMode(e, s.mode), d = this._loader.rootBabylonMesh, f = d ? d._internalMetadata = d._internalMetadata || {} : {}, p = f.gltf = f.gltf || {}, m = p[L_] = p[L_] || { lastSelected: null, original: [], variants: {} }; m.original.push({ mesh: u, material: u.material }); for (let _ = 0; _ < l.mappings.length; ++_) { const v = l.mappings[_], C = es.Get(`${a}/mappings/${_}/material`, this._loader.gltf.materials, v.material); o.push(this._loader._loadMaterialAsync(`#/materials/${v.material}`, C, u, h, (x) => { for (let b = 0; b < v.variants.length; ++b) { const S = v.variants[b], M = es.Get(`/extensions/${L_}/variants/${S}`, this._variants, S); m.variants[M.name] = m.variants[M.name] || [], m.variants[M.name].push({ mesh: u, material: x }), u.onClonedObservable.add((R) => { const w = R; let V = null, k = w; do { if (k = k.parent, !k) return; V = H8._GetExtensionMetadata(k); } while (V === null); if (d && V === H8._GetExtensionMetadata(d)) { k._internalMetadata = {}; for (const L in d._internalMetadata) k._internalMetadata[L] = d._internalMetadata[L]; k._internalMetadata.gltf = []; for (const L in d._internalMetadata.gltf) k._internalMetadata.gltf[L] = d._internalMetadata.gltf[L]; k._internalMetadata.gltf[L_] = { lastSelected: null, original: [], variants: {} }; for (const L of V.original) k._internalMetadata.gltf[L_].original.push({ mesh: L.mesh, material: L.material }); for (const L in V.variants) if (Object.prototype.hasOwnProperty.call(V.variants, L)) { k._internalMetadata.gltf[L_].variants[L] = []; for (const B of V.variants[L]) k._internalMetadata.gltf[L_].variants[L].push({ mesh: B.mesh, material: B.material }); } V = k._internalMetadata.gltf[L_]; } for (const L of V.original) L.mesh === u && (L.mesh = w); for (const L of V.variants[M.name]) L.mesh === u && (L.mesh = w); }); } })); } } })), Promise.all(o).then(([u]) => u); }); } } Xi.RegisterExtension(L_, (c) => new H8(c)); class yj { /** * Creates the default options for the helper. */ static _GetDefaultOptions() { return { renderSize: 1024, samples: 4, lodGenerationScale: 1, lodGenerationOffset: -4, renderTargetTextureType: et.TEXTURETYPE_HALF_FLOAT, generateMipmaps: !0 }; } /** * constructor * @param options Defines the options we want to customize the helper * @param scene The scene to add the material to */ constructor(e, t) { this._opaqueRenderTarget = null, this._opaqueMeshesCache = [], this._transparentMeshesCache = [], this._materialObservers = {}, this._options = { ...yj._GetDefaultOptions(), ...e }, this._scene = t, this._scene._transmissionHelper = this, this.onErrorObservable = new Fe(), this._scene.onDisposeObservable.addOnce(() => { this.dispose(); }), this._parseScene(), this._setupRenderTargets(); } /** * Updates the background according to the new options * @param options */ updateOptions(e) { if (!Object.keys(e).filter((s) => this._options[s] !== e[s]).length) return; const i = { ...this._options, ...e }, r = this._options; this._options = i, i.renderSize !== r.renderSize || i.renderTargetTextureType !== r.renderTargetTextureType || i.generateMipmaps !== r.generateMipmaps || !this._opaqueRenderTarget ? this._setupRenderTargets() : (this._opaqueRenderTarget.samples = i.samples, this._opaqueRenderTarget.lodGenerationScale = i.lodGenerationScale, this._opaqueRenderTarget.lodGenerationOffset = i.lodGenerationOffset); } /** * Gets the opaque render target texture or null if not available. */ getOpaqueTarget() { return this._opaqueRenderTarget; } _shouldRenderAsTransmission(e) { return e ? !!(e instanceof Ri && e.subSurface.isRefractionEnabled) : !1; } _addMesh(e) { this._materialObservers[e.uniqueId] = e.onMaterialChangedObservable.add(this._onMeshMaterialChanged.bind(this)), Ve.SetImmediate(() => { this._shouldRenderAsTransmission(e.material) ? (e.material.refractionTexture = this._opaqueRenderTarget, this._transparentMeshesCache.indexOf(e) === -1 && this._transparentMeshesCache.push(e)) : this._opaqueMeshesCache.indexOf(e) === -1 && this._opaqueMeshesCache.push(e); }); } _removeMesh(e) { e.onMaterialChangedObservable.remove(this._materialObservers[e.uniqueId]), delete this._materialObservers[e.uniqueId]; let t = this._transparentMeshesCache.indexOf(e); t !== -1 && this._transparentMeshesCache.splice(t, 1), t = this._opaqueMeshesCache.indexOf(e), t !== -1 && this._opaqueMeshesCache.splice(t, 1); } _parseScene() { this._scene.meshes.forEach(this._addMesh.bind(this)), this._scene.onNewMeshAddedObservable.add(this._addMesh.bind(this)), this._scene.onMeshRemovedObservable.add(this._removeMesh.bind(this)); } // When one of the meshes in the scene has its material changed, make sure that it's in the correct cache list. _onMeshMaterialChanged(e) { const t = this._transparentMeshesCache.indexOf(e), i = this._opaqueMeshesCache.indexOf(e); this._shouldRenderAsTransmission(e.material) ? (e.material instanceof Ri && (e.material.subSurface.refractionTexture = this._opaqueRenderTarget), i !== -1 ? (this._opaqueMeshesCache.splice(i, 1), this._transparentMeshesCache.push(e)) : t === -1 && this._transparentMeshesCache.push(e)) : t !== -1 ? (this._transparentMeshesCache.splice(t, 1), this._opaqueMeshesCache.push(e)) : i === -1 && this._opaqueMeshesCache.push(e); } /** * @internal * Check if the opaque render target has not been disposed and can still be used. * @returns */ _isRenderTargetValid() { var e; return ((e = this._opaqueRenderTarget) === null || e === void 0 ? void 0 : e.getInternalTexture()) !== null; } /** * @internal * Setup the render targets according to the specified options. */ _setupRenderTargets() { var e, t; this._opaqueRenderTarget && this._opaqueRenderTarget.dispose(), this._opaqueRenderTarget = new ra("opaqueSceneTexture", this._options.renderSize, this._scene, this._options.generateMipmaps, void 0, this._options.renderTargetTextureType), this._opaqueRenderTarget.ignoreCameraViewport = !0, this._opaqueRenderTarget.renderList = this._opaqueMeshesCache, this._opaqueRenderTarget.clearColor = (t = (e = this._options.clearColor) === null || e === void 0 ? void 0 : e.clone()) !== null && t !== void 0 ? t : this._scene.clearColor.clone(), this._opaqueRenderTarget.gammaSpace = !1, this._opaqueRenderTarget.lodGenerationScale = this._options.lodGenerationScale, this._opaqueRenderTarget.lodGenerationOffset = this._options.lodGenerationOffset, this._opaqueRenderTarget.samples = this._options.samples, this._opaqueRenderTarget.renderSprites = !0, this._opaqueRenderTarget.renderParticles = !0; let i, r; this._opaqueRenderTarget.onBeforeBindObservable.add((s) => { r = this._scene.environmentIntensity, this._scene.environmentIntensity = 1, i = this._scene.imageProcessingConfiguration.applyByPostProcess, this._options.clearColor ? s.clearColor.copyFrom(this._options.clearColor) : this._scene.clearColor.toLinearSpaceToRef(s.clearColor, this._scene.getEngine().useExactSrgbConversions), this._scene.imageProcessingConfiguration._applyByPostProcess = !0; }), this._opaqueRenderTarget.onAfterUnbindObservable.add(() => { this._scene.environmentIntensity = r, this._scene.imageProcessingConfiguration._applyByPostProcess = i; }), this._transparentMeshesCache.forEach((s) => { this._shouldRenderAsTransmission(s.material) && (s.material.refractionTexture = this._opaqueRenderTarget); }); } /** * Dispose all the elements created by the Helper. */ dispose() { this._scene._transmissionHelper = void 0, this._opaqueRenderTarget && (this._opaqueRenderTarget.dispose(), this._opaqueRenderTarget = null), this._transparentMeshesCache = [], this._opaqueMeshesCache = []; } } const gG = "KHR_materials_transmission"; class zAe { /** * @internal */ constructor(e) { this.name = gG, this.order = 175, this._loader = e, this.enabled = this._loader.isExtensionUsed(gG), this.enabled && (e.parent.transparencyAsCoverage = !0); } /** @internal */ dispose() { this._loader = null; } /** * @internal */ loadMaterialPropertiesAsync(e, t, i) { return Xi.LoadExtensionAsync(e, t, this.name, (r, s) => { const n = new Array(); return n.push(this._loader.loadMaterialBasePropertiesAsync(e, t, i)), n.push(this._loader.loadMaterialPropertiesAsync(e, t, i)), n.push(this._loadTransparentPropertiesAsync(r, t, i, s)), Promise.all(n).then(() => { }); }); } _loadTransparentPropertiesAsync(e, t, i, r) { var s, n; if (!(i instanceof Ri)) throw new Error(`${e}: Material type not supported`); const a = i; if (a.subSurface.isRefractionEnabled = !0, a.subSurface.volumeIndexOfRefraction = 1, a.subSurface.useAlbedoToTintRefraction = !0, r.transmissionFactor !== void 0) { a.subSurface.refractionIntensity = r.transmissionFactor; const l = a.getScene(); a.subSurface.refractionIntensity && !l._transmissionHelper ? new yj({}, a.getScene()) : a.subSurface.refractionIntensity && !(!((s = l._transmissionHelper) === null || s === void 0) && s._isRenderTargetValid()) && ((n = l._transmissionHelper) === null || n === void 0 || n._setupRenderTargets()); } else return a.subSurface.refractionIntensity = 0, a.subSurface.isRefractionEnabled = !1, Promise.resolve(); return a.subSurface.minimumThickness = 0, a.subSurface.maximumThickness = 0, r.transmissionTexture ? (r.transmissionTexture.nonColorData = !0, this._loader.loadTextureInfoAsync(`${e}/transmissionTexture`, r.transmissionTexture, void 0).then((l) => { a.subSurface.refractionIntensityTexture = l, a.subSurface.useGltfStyleTextures = !0; })) : Promise.resolve(); } } Xi.RegisterExtension(gG, (c) => new zAe(c)); const vG = "KHR_materials_translucency"; class HAe { /** * @internal */ constructor(e) { this.name = vG, this.order = 174, this._loader = e, this.enabled = this._loader.isExtensionUsed(vG), this.enabled && (e.parent.transparencyAsCoverage = !0); } /** @internal */ dispose() { this._loader = null; } /** * @internal */ loadMaterialPropertiesAsync(e, t, i) { return Xi.LoadExtensionAsync(e, t, this.name, (r, s) => { const n = new Array(); return n.push(this._loader.loadMaterialBasePropertiesAsync(e, t, i)), n.push(this._loader.loadMaterialPropertiesAsync(e, t, i)), n.push(this._loadTranslucentPropertiesAsync(r, t, i, s)), Promise.all(n).then(() => { }); }); } _loadTranslucentPropertiesAsync(e, t, i, r) { if (!(i instanceof Ri)) throw new Error(`${e}: Material type not supported`); const s = i; if (s.subSurface.isTranslucencyEnabled = !0, s.subSurface.volumeIndexOfRefraction = 1, s.subSurface.minimumThickness = 0, s.subSurface.maximumThickness = 0, s.subSurface.useAlbedoToTintTranslucency = !0, r.translucencyFactor !== void 0) s.subSurface.translucencyIntensity = r.translucencyFactor; else return s.subSurface.translucencyIntensity = 0, s.subSurface.isTranslucencyEnabled = !1, Promise.resolve(); return r.translucencyTexture ? (r.translucencyTexture.nonColorData = !0, this._loader.loadTextureInfoAsync(`${e}/translucencyTexture`, r.translucencyTexture).then((n) => { s.subSurface.translucencyIntensityTexture = n; })) : Promise.resolve(); } } Xi.RegisterExtension(vG, (c) => new HAe(c)); const AG = "KHR_materials_volume"; class GAe { /** * @internal */ constructor(e) { this.name = AG, this.order = 173, this._loader = e, this.enabled = this._loader.isExtensionUsed(AG), this.enabled && this._loader._disableInstancedMesh++; } /** @internal */ dispose() { this.enabled && this._loader._disableInstancedMesh--, this._loader = null; } /** * @internal */ loadMaterialPropertiesAsync(e, t, i) { return Xi.LoadExtensionAsync(e, t, this.name, (r, s) => { const n = new Array(); return n.push(this._loader.loadMaterialBasePropertiesAsync(e, t, i)), n.push(this._loader.loadMaterialPropertiesAsync(e, t, i)), n.push(this._loadVolumePropertiesAsync(r, t, i, s)), Promise.all(n).then(() => { }); }); } _loadVolumePropertiesAsync(e, t, i, r) { if (!(i instanceof Ri)) throw new Error(`${e}: Material type not supported`); if (!i.subSurface.isRefractionEnabled && !i.subSurface.isTranslucencyEnabled || !r.thicknessFactor) return Promise.resolve(); i.subSurface.volumeIndexOfRefraction = i.indexOfRefraction; const s = r.attenuationDistance !== void 0 ? r.attenuationDistance : Number.MAX_VALUE; return i.subSurface.tintColorAtDistance = s, r.attenuationColor !== void 0 && r.attenuationColor.length == 3 && i.subSurface.tintColor.copyFromFloats(r.attenuationColor[0], r.attenuationColor[1], r.attenuationColor[2]), i.subSurface.minimumThickness = 0, i.subSurface.maximumThickness = r.thicknessFactor, i.subSurface.useThicknessAsDepth = !0, r.thicknessTexture ? (r.thicknessTexture.nonColorData = !0, this._loader.loadTextureInfoAsync(`${e}/thicknessTexture`, r.thicknessTexture).then((n) => { i.subSurface.thicknessTexture = n, i.subSurface.useGltfStyleTextures = !0; })) : Promise.resolve(); } } Xi.RegisterExtension(AG, (c) => new GAe(c)); const yG = "KHR_materials_dispersion"; class KAe { /** * @internal */ constructor(e) { this.name = yG, this.order = 174, this._loader = e, this.enabled = this._loader.isExtensionUsed(yG); } /** @internal */ dispose() { this._loader = null; } /** * @internal */ loadMaterialPropertiesAsync(e, t, i) { return Xi.LoadExtensionAsync(e, t, this.name, (r, s) => { const n = new Array(); return n.push(this._loader.loadMaterialBasePropertiesAsync(e, t, i)), n.push(this._loader.loadMaterialPropertiesAsync(e, t, i)), n.push(this._loadDispersionPropertiesAsync(r, t, i, s)), Promise.all(n).then(() => { }); }); } _loadDispersionPropertiesAsync(e, t, i, r) { if (!(i instanceof Ri)) throw new Error(`${e}: Material type not supported`); return !i.subSurface.isRefractionEnabled || !r.dispersion || (i.subSurface.isDispersionEnabled = !0, i.subSurface.dispersion = r.dispersion), Promise.resolve(); } } Xi.RegisterExtension(yG, (c) => new KAe(c)); const CG = "KHR_mesh_quantization"; class WAe { /** * @internal */ constructor(e) { this.name = CG, this.enabled = e.isExtensionUsed(CG); } /** @internal */ dispose() { } } Xi.RegisterExtension(CG, (c) => new WAe(c)); const xG = "KHR_texture_basisu"; class jAe { /** * @internal */ constructor(e) { this.name = xG, this._loader = e, this.enabled = e.isExtensionUsed(xG); } /** @internal */ dispose() { this._loader = null; } /** * @internal */ _loadTextureAsync(e, t, i) { return Xi.LoadExtensionAsync(e, t, this.name, (r, s) => { const n = t.sampler == null ? Xi.DefaultSampler : es.Get(`${e}/sampler`, this._loader.gltf.samplers, t.sampler), a = es.Get(`${r}/source`, this._loader.gltf.images, s.source); return this._loader._createTextureAsync(e, n, a, (l) => { i(l); }, t._textureInfo.nonColorData ? { useRGBAIfASTCBC7NotAvailableWhenUASTC: !0 } : void 0, !t._textureInfo.nonColorData); }); } } Xi.RegisterExtension(xG, (c) => new jAe(c)); const bG = "KHR_texture_transform"; class XAe { /** * @internal */ constructor(e) { this.name = bG, this._loader = e, this.enabled = this._loader.isExtensionUsed(bG); } /** @internal */ dispose() { this._loader = null; } /** * @internal */ loadTextureInfoAsync(e, t, i) { return Xi.LoadExtensionAsync(e, t, this.name, (r, s) => this._loader.loadTextureInfoAsync(e, t, (n) => { if (!(n instanceof De)) throw new Error(`${r}: Texture type not supported`); s.offset && (n.uOffset = s.offset[0], n.vOffset = s.offset[1]), n.uRotationCenter = 0, n.vRotationCenter = 0, s.rotation && (n.wAng = -s.rotation), s.scale && (n.uScale = s.scale[0], n.vScale = s.scale[1]), s.texCoord != null && (n.coordinatesIndex = s.texCoord), i(n); })); } } Xi.RegisterExtension(bG, (c) => new XAe(c)); const EG = "KHR_xmp_json_ld"; class YAe { /** * @internal */ constructor(e) { this.name = EG, this.order = 100, this._loader = e, this.enabled = this._loader.isExtensionUsed(EG); } /** @internal */ dispose() { this._loader = null; } /** * Called after the loader state changes to LOADING. */ onLoading() { var e, t, i; if (this._loader.rootBabylonMesh === null) return; const r = (e = this._loader.gltf.extensions) === null || e === void 0 ? void 0 : e.KHR_xmp_json_ld, s = (i = (t = this._loader.gltf.asset) === null || t === void 0 ? void 0 : t.extensions) === null || i === void 0 ? void 0 : i.KHR_xmp_json_ld; if (r && s) { const n = +s.packet; r.packets && n < r.packets.length && (this._loader.rootBabylonMesh.metadata = this._loader.rootBabylonMesh.metadata || {}, this._loader.rootBabylonMesh.metadata.xmp = r.packets[n]); } } } Xi.RegisterExtension(EG, (c) => new YAe(c)); function JD(c, e, t, i) { return ze.FromArray(e, t).scale(i); } function QAe(c, e, t, i) { return e[t + 3] * i; } function ll(c, e, t, i) { return e[t] * i; } function TG(c, e, t, i) { return -e[t] * i; } function IB(c, e, t, i) { return e[t + 1] * i; } function Iq(c, e, t, i) { return e[t] * i * 2; } function tz(c) { return { scale: [ new Mo(nt.ANIMATIONTYPE_FLOAT, `${c}.uScale`, ll, () => 2), new Mo(nt.ANIMATIONTYPE_FLOAT, `${c}.vScale`, IB, () => 2) ], offset: [ new Mo(nt.ANIMATIONTYPE_FLOAT, `${c}.uOffset`, ll, () => 2), new Mo(nt.ANIMATIONTYPE_FLOAT, `${c}.vOffset`, IB, () => 2) ], rotation: [new Mo(nt.ANIMATIONTYPE_FLOAT, `${c}.wAng`, TG, () => 1)] }; } class GE extends ON { /** @internal */ buildAnimations(e, t, i, r, s) { s(e._babylonCamera, this._buildAnimation(t, i, r)); } } class Mo extends ON { /** @internal */ buildAnimations(e, t, i, r, s) { for (const n in e._data) s(e._data[n].babylonMaterial, this._buildAnimation(t, i, r)); } } class E9 extends ON { /** @internal */ buildAnimations(e, t, i, r, s) { s(e._babylonLight, this._buildAnimation(t, i, r)); } } const $Ae = { __array__: { __target__: !0, ...w9 } }, ZAe = { __array__: { __target__: !0, orthographic: { xmag: [ new GE(nt.ANIMATIONTYPE_FLOAT, "orthoLeft", TG, () => 1), new GE(nt.ANIMATIONTYPE_FLOAT, "orthoRight", IB, () => 1) ], ymag: [ new GE(nt.ANIMATIONTYPE_FLOAT, "orthoBottom", TG, () => 1), new GE(nt.ANIMATIONTYPE_FLOAT, "orthoTop", IB, () => 1) ], zfar: [new GE(nt.ANIMATIONTYPE_FLOAT, "maxZ", ll, () => 1)], znear: [new GE(nt.ANIMATIONTYPE_FLOAT, "minZ", ll, () => 1)] }, perspective: { yfov: [new GE(nt.ANIMATIONTYPE_FLOAT, "fov", ll, () => 1)], zfar: [new GE(nt.ANIMATIONTYPE_FLOAT, "maxZ", ll, () => 1)], znear: [new GE(nt.ANIMATIONTYPE_FLOAT, "minZ", ll, () => 1)] } } }, qAe = { __array__: { __target__: !0, pbrMetallicRoughness: { baseColorFactor: [ new Mo(nt.ANIMATIONTYPE_COLOR3, "albedoColor", JD, () => 4), new Mo(nt.ANIMATIONTYPE_FLOAT, "alpha", QAe, () => 4) ], metallicFactor: [new Mo(nt.ANIMATIONTYPE_FLOAT, "metallic", ll, () => 1)], roughnessFactor: [new Mo(nt.ANIMATIONTYPE_FLOAT, "roughness", ll, () => 1)], baseColorTexture: { extensions: { KHR_texture_transform: tz("albedoTexture") } } }, emissiveFactor: [new Mo(nt.ANIMATIONTYPE_COLOR3, "emissiveColor", JD, () => 3)], normalTexture: { scale: [new Mo(nt.ANIMATIONTYPE_FLOAT, "bumpTexture.level", ll, () => 1)] }, occlusionTexture: { strength: [new Mo(nt.ANIMATIONTYPE_FLOAT, "ambientTextureStrength", ll, () => 1)], extensions: { KHR_texture_transform: tz("ambientTexture") } }, emissiveTexture: { extensions: { KHR_texture_transform: tz("emissiveTexture") } }, extensions: { KHR_materials_ior: { ior: [new Mo(nt.ANIMATIONTYPE_FLOAT, "indexOfRefraction", ll, () => 1)] }, KHR_materials_clearcoat: { clearcoatFactor: [new Mo(nt.ANIMATIONTYPE_FLOAT, "clearCoat.intensity", ll, () => 1)], clearcoatRoughnessFactor: [new Mo(nt.ANIMATIONTYPE_FLOAT, "clearCoat.roughness", ll, () => 1)] }, KHR_materials_sheen: { sheenColorFactor: [new Mo(nt.ANIMATIONTYPE_COLOR3, "sheen.color", JD, () => 3)], sheenRoughnessFactor: [new Mo(nt.ANIMATIONTYPE_FLOAT, "sheen.roughness", ll, () => 1)] }, KHR_materials_specular: { specularFactor: [new Mo(nt.ANIMATIONTYPE_FLOAT, "metallicF0Factor", ll, () => 1)], specularColorFactor: [new Mo(nt.ANIMATIONTYPE_COLOR3, "metallicReflectanceColor", JD, () => 3)] }, KHR_materials_emissive_strength: { emissiveStrength: [new Mo(nt.ANIMATIONTYPE_FLOAT, "emissiveIntensity", ll, () => 1)] }, KHR_materials_transmission: { transmissionFactor: [new Mo(nt.ANIMATIONTYPE_FLOAT, "subSurface.refractionIntensity", ll, () => 1)] }, KHR_materials_volume: { attenuationColor: [new Mo(nt.ANIMATIONTYPE_COLOR3, "subSurface.tintColor", JD, () => 3)], attenuationDistance: [new Mo(nt.ANIMATIONTYPE_FLOAT, "subSurface.tintColorAtDistance", ll, () => 1)], thicknessFactor: [new Mo(nt.ANIMATIONTYPE_FLOAT, "subSurface.maximumThickness", ll, () => 1)] }, KHR_materials_dispersion: { dispersion: [new Mo(nt.ANIMATIONTYPE_FLOAT, "subSurface.dispersion", ll, () => 1)] }, KHR_materials_iridescence: { iridescenceFactor: [new Mo(nt.ANIMATIONTYPE_FLOAT, "iridescence.intensity", ll, () => 1)], iridescenceIor: [new Mo(nt.ANIMATIONTYPE_FLOAT, "iridescence.indexOfRefraction", ll, () => 1)], iridescenceThicknessMinimum: [new Mo(nt.ANIMATIONTYPE_FLOAT, "iridescence.minimumThickness", ll, () => 1)], iridescenceThicknessMaximum: [new Mo(nt.ANIMATIONTYPE_FLOAT, "iridescence.maximumThickness", ll, () => 1)] }, KHR_materials_anisotropy: { anisotropyStrength: [new Mo(nt.ANIMATIONTYPE_FLOAT, "anisotropy.intensity", ll, () => 1)], anisotropyRotation: [new Mo(nt.ANIMATIONTYPE_FLOAT, "anisotropy.angle", ll, () => 1)] } } } }, JAe = { KHR_lights_punctual: { lights: { __array__: { __target__: !0, color: [new E9(nt.ANIMATIONTYPE_COLOR3, "diffuse", JD, () => 3)], intensity: [new E9(nt.ANIMATIONTYPE_FLOAT, "intensity", ll, () => 1)], range: [new E9(nt.ANIMATIONTYPE_FLOAT, "range", ll, () => 1)], spot: { innerConeAngle: [new E9(nt.ANIMATIONTYPE_FLOAT, "innerAngle", Iq, () => 1)], outerConeAngle: [new E9(nt.ANIMATIONTYPE_FLOAT, "angle", Iq, () => 1)] } } } } }, e6e = { nodes: $Ae, materials: qAe, cameras: ZAe, extensions: JAe }, SG = "KHR_animation_pointer"; class t6e { /** * @internal */ constructor(e) { this.name = SG, this._loader = e; } /** * Defines whether this extension is enabled. */ get enabled() { return this._loader.isExtensionUsed(SG); } /** @internal */ dispose() { this._loader = null; } /** * Loads a glTF animation channel. * @param context The context when loading the asset * @param animationContext The context of the animation when loading the asset * @param animation The glTF animation property * @param channel The glTF animation channel property * @param onLoad Called for each animation loaded * @returns A void promise that resolves when the load is complete or null if not handled */ _loadAnimationChannelAsync(e, t, i, r, s) { var n; const a = (n = r.target.extensions) === null || n === void 0 ? void 0 : n.KHR_animation_pointer; if (!a) return null; r.target.path !== "pointer" && Ce.Warn(`${e}/target/path: Value (${r.target.path}) must be (pointer) when using the ${this.name} extension`), r.target.node != null && Ce.Warn(`${e}/target/node: Value (${r.target.node}) must not be present when using the ${this.name} extension`); const l = `${e}/extensions/${this.name}`, o = a.pointer; if (!o) throw new Error(`${l}: Pointer is missing`); const u = this._parseAnimationPointer(`${l}/pointer`, o); return u ? this._loader._loadAnimationChannelFromTargetInfoAsync(e, t, i, r, u, s) : (Ce.Warn(`${l}/pointer: Invalid pointer (${o}) skipped`), null); } /** * The pointer string is represented by a [JSON pointer](https://datatracker.ietf.org/doc/html/rfc6901). * := /// * := "nodes" | "materials" | "meshes" | "cameras" | "extensions" * := | * := | * := "extensions"// * := | / * := W+ * := D+ * * Examples: * - "/nodes/0/rotation" * - "/materials/2/emissiveFactor" * - "/materials/2/pbrMetallicRoughness/baseColorFactor" * - "/materials/2/extensions/KHR_materials_emissive_strength/emissiveStrength" */ _parseAnimationPointer(e, t) { if (!t.startsWith("/")) return Ce.Warn(`${e}: Value (${t}) must start with a slash`), null; const i = t.split("/"); i.shift(); let r = e6e, s = this._loader.gltf, n; for (const a of i) { if (r.__array__) r = r.__array__; else if (r = r[a], !r) return null; s = s && s[a], r.__target__ && (n = s); } return !n || !Array.isArray(r) ? null : { target: n, properties: r }; } } Xi.RegisterExtension(SG, (c) => new t6e(c)); const MG = "MSFT_audio_emitter"; class i6e { /** * @internal */ constructor(e) { this.name = MG, this._loader = e, this.enabled = this._loader.isExtensionUsed(MG); } /** @internal */ dispose() { this._loader = null, this._clips = null, this._emitters = null; } /** @internal */ onLoading() { const e = this._loader.gltf.extensions; if (e && e[this.name]) { const t = e[this.name]; this._clips = t.clips, this._emitters = t.emitters, es.Assign(this._clips), es.Assign(this._emitters); } } /** * @internal */ loadSceneAsync(e, t) { return Xi.LoadExtensionAsync(e, t, this.name, (i, r) => { const s = new Array(); s.push(this._loader.loadSceneAsync(e, t)); for (const n of r.emitters) { const a = es.Get(`${i}/emitters`, this._emitters, n); if (a.refDistance != null || a.maxDistance != null || a.rolloffFactor != null || a.distanceModel != null || a.innerAngle != null || a.outerAngle != null) throw new Error(`${i}: Direction or Distance properties are not allowed on emitters attached to a scene`); s.push(this._loadEmitterAsync(`${i}/emitters/${a.index}`, a)); } return Promise.all(s).then(() => { }); }); } /** * @internal */ loadNodeAsync(e, t, i) { return Xi.LoadExtensionAsync(e, t, this.name, (r, s) => { const n = new Array(); return this._loader.loadNodeAsync(r, t, (a) => { for (const l of s.emitters) { const o = es.Get(`${r}/emitters`, this._emitters, l); n.push(this._loadEmitterAsync(`${r}/emitters/${o.index}`, o).then(() => { for (const u of o._babylonSounds) u.attachToMesh(a), (o.innerAngle != null || o.outerAngle != null) && (u.setLocalDirectionToMesh(D.Forward()), u.setDirectionalCone(2 * Ve.ToDegrees(o.innerAngle == null ? Math.PI : o.innerAngle), 2 * Ve.ToDegrees(o.outerAngle == null ? Math.PI : o.outerAngle), 0)); })); } i(a); }).then((a) => Promise.all(n).then(() => a)); }); } /** * @internal */ loadAnimationAsync(e, t) { return Xi.LoadExtensionAsync(e, t, this.name, (i, r) => this._loader.loadAnimationAsync(e, t).then((s) => { const n = new Array(); es.Assign(r.events); for (const a of r.events) n.push(this._loadAnimationEventAsync(`${i}/events/${a.index}`, e, t, a, s)); return Promise.all(n).then(() => s); })); } _loadClipAsync(e, t) { if (t._objectURL) return t._objectURL; let i; if (t.uri) i = this._loader.loadUriAsync(e, t, t.uri); else { const r = es.Get(`${e}/bufferView`, this._loader.gltf.bufferViews, t.bufferView); i = this._loader.loadBufferViewAsync(`/bufferViews/${r.index}`, r); } return t._objectURL = i.then((r) => URL.createObjectURL(new Blob([r], { type: t.mimeType }))), t._objectURL; } _loadEmitterAsync(e, t) { if (t._babylonSounds = t._babylonSounds || [], !t._babylonData) { const i = new Array(), r = t.name || `emitter${t.index}`, s = { loop: !1, autoplay: !1, volume: t.volume == null ? 1 : t.volume }; for (let a = 0; a < t.clips.length; a++) { const l = `/extensions/${this.name}/clips`, o = es.Get(l, this._clips, t.clips[a].clip); i.push(this._loadClipAsync(`${l}/${t.clips[a].clip}`, o).then((u) => { const h = t._babylonSounds[a] = new I4(r, u, this._loader.babylonScene, null, s); h.refDistance = t.refDistance || 1, h.maxDistance = t.maxDistance || 256, h.rolloffFactor = t.rolloffFactor || 1, h.distanceModel = t.distanceModel || "exponential"; })); } const n = Promise.all(i).then(() => { const a = t.clips.map((o) => o.weight || 1), l = new Nte(t.loop || !1, t._babylonSounds, a); t.innerAngle && (l.directionalConeInnerAngle = 2 * Ve.ToDegrees(t.innerAngle)), t.outerAngle && (l.directionalConeOuterAngle = 2 * Ve.ToDegrees(t.outerAngle)), t.volume && (l.volume = t.volume), t._babylonData.sound = l; }); t._babylonData = { loaded: n }; } return t._babylonData.loaded; } _getEventAction(e, t, i, r, s) { switch (i) { case "play": return (n) => { const a = (s || 0) + (n - r); t.play(a); }; case "stop": return () => { t.stop(); }; case "pause": return () => { t.pause(); }; default: throw new Error(`${e}: Unsupported action ${i}`); } } _loadAnimationEventAsync(e, t, i, r, s) { if (s.targetedAnimations.length == 0) return Promise.resolve(); const n = s.targetedAnimations[0], a = r.emitter, l = es.Get(`/extensions/${this.name}/emitters`, this._emitters, a); return this._loadEmitterAsync(e, l).then(() => { const o = l._babylonData.sound; if (o) { const u = new KB(r.time, this._getEventAction(e, o, r.action, r.time, r.startOffset)); n.animation.addEvent(u), s.onAnimationGroupEndObservable.add(() => { o.stop(); }), s.onAnimationGroupPauseObservable.add(() => { o.pause(); }); } }); } } Xi.RegisterExtension(MG, (c) => new i6e(c)); const RG = "MSFT_lod"; class r6e { /** * @internal */ constructor(e) { this.name = RG, this.order = 100, this.maxLODsToLoad = 10, this.onNodeLODsLoadedObservable = new Fe(), this.onMaterialLODsLoadedObservable = new Fe(), this._bufferLODs = new Array(), this._nodeIndexLOD = null, this._nodeSignalLODs = new Array(), this._nodePromiseLODs = new Array(), this._nodeBufferLODs = new Array(), this._materialIndexLOD = null, this._materialSignalLODs = new Array(), this._materialPromiseLODs = new Array(), this._materialBufferLODs = new Array(), this._loader = e, this.enabled = this._loader.isExtensionUsed(RG); } /** @internal */ dispose() { this._loader = null, this._nodeIndexLOD = null, this._nodeSignalLODs.length = 0, this._nodePromiseLODs.length = 0, this._nodeBufferLODs.length = 0, this._materialIndexLOD = null, this._materialSignalLODs.length = 0, this._materialPromiseLODs.length = 0, this._materialBufferLODs.length = 0, this.onMaterialLODsLoadedObservable.clear(), this.onNodeLODsLoadedObservable.clear(); } /** @internal */ onReady() { for (let e = 0; e < this._nodePromiseLODs.length; e++) { const t = Promise.all(this._nodePromiseLODs[e]).then(() => { e !== 0 && (this._loader.endPerformanceCounter(`Node LOD ${e}`), this._loader.log(`Loaded node LOD ${e}`)), this.onNodeLODsLoadedObservable.notifyObservers(e), e !== this._nodePromiseLODs.length - 1 && (this._loader.startPerformanceCounter(`Node LOD ${e + 1}`), this._loadBufferLOD(this._nodeBufferLODs, e + 1), this._nodeSignalLODs[e] && this._nodeSignalLODs[e].resolve()); }); this._loader._completePromises.push(t); } for (let e = 0; e < this._materialPromiseLODs.length; e++) { const t = Promise.all(this._materialPromiseLODs[e]).then(() => { e !== 0 && (this._loader.endPerformanceCounter(`Material LOD ${e}`), this._loader.log(`Loaded material LOD ${e}`)), this.onMaterialLODsLoadedObservable.notifyObservers(e), e !== this._materialPromiseLODs.length - 1 && (this._loader.startPerformanceCounter(`Material LOD ${e + 1}`), this._loadBufferLOD(this._materialBufferLODs, e + 1), this._materialSignalLODs[e] && this._materialSignalLODs[e].resolve()); }); this._loader._completePromises.push(t); } } /** * @internal */ loadSceneAsync(e, t) { const i = this._loader.loadSceneAsync(e, t); return this._loadBufferLOD(this._bufferLODs, 0), i; } /** * @internal */ loadNodeAsync(e, t, i) { return Xi.LoadExtensionAsync(e, t, this.name, (r, s) => { let n; const a = this._getLODs(r, t, this._loader.gltf.nodes, s.ids); this._loader.logOpen(`${r}`); for (let l = 0; l < a.length; l++) { const o = a[l]; l !== 0 && (this._nodeIndexLOD = l, this._nodeSignalLODs[l] = this._nodeSignalLODs[l] || new rO()); const u = (d) => { i(d), d.setEnabled(!1); }, h = this._loader.loadNodeAsync(`/nodes/${o.index}`, o, u).then((d) => { if (l !== 0) { const f = a[l - 1]; f._babylonTransformNode && (this._disposeTransformNode(f._babylonTransformNode), delete f._babylonTransformNode); } return d.setEnabled(!0), d; }); this._nodePromiseLODs[l] = this._nodePromiseLODs[l] || [], l === 0 ? n = h : (this._nodeIndexLOD = null, this._nodePromiseLODs[l].push(h)); } return this._loader.logClose(), n; }); } /** * @internal */ _loadMaterialAsync(e, t, i, r, s) { return this._nodeIndexLOD ? null : Xi.LoadExtensionAsync(e, t, this.name, (n, a) => { let l; const o = this._getLODs(n, t, this._loader.gltf.materials, a.ids); this._loader.logOpen(`${n}`); for (let u = 0; u < o.length; u++) { const h = o[u]; u !== 0 && (this._materialIndexLOD = u); const d = this._loader._loadMaterialAsync(`/materials/${h.index}`, h, i, r, (f) => { u === 0 && s(f); }).then((f) => { if (u !== 0) { s(f); const p = o[u - 1]._data; p[r] && (this._disposeMaterials([p[r].babylonMaterial]), delete p[r]); } return f; }); this._materialPromiseLODs[u] = this._materialPromiseLODs[u] || [], u === 0 ? l = d : (this._materialIndexLOD = null, this._materialPromiseLODs[u].push(d)); } return this._loader.logClose(), l; }); } /** * @internal */ _loadUriAsync(e, t, i) { if (this._nodeIndexLOD !== null) { this._loader.log("deferred"); const r = this._nodeIndexLOD - 1; return this._nodeSignalLODs[r] = this._nodeSignalLODs[r] || new rO(), this._nodeSignalLODs[this._nodeIndexLOD - 1].promise.then(() => this._loader.loadUriAsync(e, t, i)); } else if (this._materialIndexLOD !== null) { this._loader.log("deferred"); const r = this._materialIndexLOD - 1; return this._materialSignalLODs[r] = this._materialSignalLODs[r] || new rO(), this._materialSignalLODs[r].promise.then(() => this._loader.loadUriAsync(e, t, i)); } return null; } /** * @internal */ loadBufferAsync(e, t, i, r) { if (this._loader.parent.useRangeRequests && !t.uri) { if (!this._loader.bin) throw new Error(`${e}: Uri is missing or the binary glTF is missing its binary chunk`); const s = (n, a) => { const l = i, o = l + r - 1; let u = n[a]; return u ? (u.start = Math.min(u.start, l), u.end = Math.max(u.end, o)) : (u = { start: l, end: o, loaded: new rO() }, n[a] = u), u.loaded.promise.then((h) => new Uint8Array(h.buffer, h.byteOffset + i - u.start, r)); }; return this._loader.log("deferred"), this._nodeIndexLOD !== null ? s(this._nodeBufferLODs, this._nodeIndexLOD) : this._materialIndexLOD !== null ? s(this._materialBufferLODs, this._materialIndexLOD) : s(this._bufferLODs, 0); } return null; } _loadBufferLOD(e, t) { const i = e[t]; i && (this._loader.log(`Loading buffer range [${i.start}-${i.end}]`), this._loader.bin.readAsync(i.start, i.end - i.start + 1).then((r) => { i.loaded.resolve(r); }, (r) => { i.loaded.reject(r); })); } /** * Gets an array of LOD properties from lowest to highest. * @param context * @param property * @param array * @param ids */ _getLODs(e, t, i, r) { if (this.maxLODsToLoad <= 0) throw new Error("maxLODsToLoad must be greater than zero"); const s = []; for (let n = r.length - 1; n >= 0; n--) if (s.push(es.Get(`${e}/ids/${r[n]}`, i, r[n])), s.length === this.maxLODsToLoad) return s; return s.push(t), s; } _disposeTransformNode(e) { const t = [], i = e.material; i && t.push(i); for (const s of e.getChildMeshes()) s.material && t.push(s.material); e.dispose(); const r = t.filter((s) => this._loader.babylonScene.meshes.every((n) => n.material != s)); this._disposeMaterials(r); } _disposeMaterials(e) { const t = {}; for (const i of e) { for (const r of i.getActiveTextures()) t[r.uniqueId] = r; i.dispose(); } for (const i in t) for (const r of this._loader.babylonScene.materials) r.hasTexture(t[i]) && delete t[i]; for (const i in t) t[i].dispose(); } } Xi.RegisterExtension(RG, (c) => new r6e(c)); const PG = "MSFT_minecraftMesh"; class s6e { /** @internal */ constructor(e) { this.name = PG, this._loader = e, this.enabled = this._loader.isExtensionUsed(PG); } /** @internal */ dispose() { this._loader = null; } /** @internal */ loadMaterialPropertiesAsync(e, t, i) { return Xi.LoadExtraAsync(e, t, this.name, (r, s) => { if (s) { if (!(i instanceof Ri)) throw new Error(`${r}: Material type not supported`); const n = this._loader.loadMaterialPropertiesAsync(e, t, i); return i.needAlphaBlending() && (i.forceDepthWrite = !0, i.separateCullingPass = !0), i.backFaceCulling = i.forceDepthWrite, i.twoSidedLighting = !0, n; } return null; }); } } Xi.RegisterExtension(PG, (c) => new s6e(c)); const IG = "MSFT_sRGBFactors"; class n6e { /** @internal */ constructor(e) { this.name = IG, this._loader = e, this.enabled = this._loader.isExtensionUsed(IG); } /** @internal */ dispose() { this._loader = null; } /** @internal */ loadMaterialPropertiesAsync(e, t, i) { return Xi.LoadExtraAsync(e, t, this.name, (r, s) => { if (s) { if (!(i instanceof Ri)) throw new Error(`${r}: Material type not supported`); const n = this._loader.loadMaterialPropertiesAsync(e, t, i), a = i.getScene().getEngine().useExactSrgbConversions; return i.albedoTexture || i.albedoColor.toLinearSpaceToRef(i.albedoColor, a), i.reflectivityTexture || i.reflectivityColor.toLinearSpaceToRef(i.reflectivityColor, a), n; } return null; }); } } Xi.RegisterExtension(IG, (c) => new n6e(c)); const a6e = { "lifecycle/onStart": Ow.ClassName, "lifecycle/onTick": LI.ClassName, log: MP.ClassName, "flow/delay": Iw.ClassName, "customEvent/send": Pw.ClassName, "customEvent/receive": wI.ClassName, "flow/sequence": DP.ClassName, "world/get": wP.ClassName, "world/set": Rw.ClassName, "flow/doN": PP.ClassName, "variable/get": OP.ClassName, "variable/set": RP.ClassName, "flow/whileLoop": IP.ClassName, "math/random": FP.ClassName, "math/e": UP.ClassName, "math/pi": VP.ClassName, "math/inf": kP.ClassName, "math/nan": zP.ClassName, "math/abs": HP.ClassName, "math/sign": GP.ClassName, "math/trunc": KP.ClassName, "math/floor": WP.ClassName, "math/ceil": jP.ClassName, "math/fract": XP.ClassName, "math/neg": YP.ClassName, "math/add": v5.ClassName, "math/sub": A5.ClassName, "math/mul": LP.ClassName, "math/div": NP.ClassName, "math/rem": QP.ClassName, "math/min": $P.ClassName, "math/max": ZP.ClassName, "math/clamp": qP.ClassName, "math/saturate": JP.ClassName, "math/mix": eI.ClassName, "math/eq": tI.ClassName, "math/lt": iI.ClassName, "math/le": IN.ClassName, "math/gt": rI.ClassName, "math/ge": sI.ClassName, "math/isnan": nI.ClassName, "math/isinf": DN.ClassName, "math/rad": aI.ClassName, "math/deg": oI.ClassName, "math/sin": lI.ClassName, "math/cos": cI.ClassName, "math/tan": uI.ClassName, "math/asin": hI.ClassName, "math/acos": dI.ClassName, "math/atan": fI.ClassName, "math/atan2": pI.ClassName, "math/sinh": _I.ClassName, "math/cosh": mI.ClassName, "math/tanh": gI.ClassName, "math/asinh": vI.ClassName, "math/acosh": AI.ClassName, "math/atanh": yI.ClassName, "math/exp": CI.ClassName, "math/log": xI.ClassName, "math/log2": bI.ClassName, "math/log10": EI.ClassName, "math/sqrt": TI.ClassName, "math/cbrt": SI.ClassName, "math/pow": MI.ClassName, "math/length": RI.ClassName, "math/normalize": PI.ClassName, "math/dot": BP.ClassName, "math/cross": II.ClassName, "math/rotate2d": DI.ClassName, "math/rotate3d": OI.ClassName }, o6e = { float2: "Vector2", float3: "Vector3", float4: "Vector4" }; function DG(c, e, t) { if (c.type !== void 0) { const i = e.types && e.types[c.type]; if (!i) throw new Error(`${t}: Unknown type: ${c.type}`); const r = i.signature; if (!r) throw new Error(`${t}: Type ${c.type} has no signature`); const s = o6e[r]; return { value: c.value, className: s }; } else return c.value; } function l6e(c, e, t) { var i; const r = {}, s = (i = c.configuration) !== null && i !== void 0 ? i : []; for (const n of s) if (n.id === "customEvent") { const a = e.customEvents && e.customEvents[n.value]; if (!a) throw new Error(`/extensions/KHR_interactivity/nodes/${t}: Unknown custom event: ${n.value}`); r.eventId = a.id, r.eventData = a.values.map((l) => l.id); } else if (n.id === "variable") { const a = e.variables && e.variables[n.value]; if (!a) throw new Error(`/extensions/KHR_interactivity/nodes/${t}: Unknown variable: ${n.value}`); r.variableName = a.id; } else if (n.id === "path") { const a = n.value; r.path = { path: a, className: "FGPath" }; } else r[n.id] = DG(n, e, `/extensions/KHR_interactivity/nodes/${t}`); return r; } function c6e(c, e, t) { const i = a6e[e.type]; if (!i) throw new Error(`/extensions/KHR_interactivity/nodes/${c}: Unknown block type: ${e.type}`); const r = c.toString(), s = l6e(e, t, r), n = e.metadata; return { className: i, config: s, uniqueId: r, metadata: n, dataInputs: [], dataOutputs: [], signalInputs: [], signalOutputs: [] }; } function u6e(c) { var e, t, i; const r = { uniqueId: G_(), _userVariables: {}, _connectionValues: {} }, s = [r], n = []; for (let l = 0; l < c.nodes.length; l++) { const o = c.nodes[l], u = c6e(l, o, c); n.push(u); } for (let l = 0; l < c.nodes.length; l++) { const o = c.nodes[l], u = n[l], h = (e = o.flows) !== null && e !== void 0 ? e : []; for (const f of h) { const p = f.id, m = { uniqueId: G_(), name: p, _connectionType: X_.Output, connectedPointIds: [] }; u.signalOutputs.push(m); const _ = f.node, v = f.socket, C = n[_]; if (!C) throw new Error(`/extensions/KHR_interactivity/nodes/${l}: Could not find node with id ${_} that connects its input with with node ${l}'s output ${p}`); let x = C.signalInputs.find((b) => b.name === v); x || (x = { uniqueId: G_(), name: v, _connectionType: X_.Input, connectedPointIds: [] }, C.signalInputs.push(x)), x.connectedPointIds.push(m.uniqueId), m.connectedPointIds.push(x.uniqueId); } const d = (t = o.values) !== null && t !== void 0 ? t : []; for (const f of d) { const p = f.id, m = { uniqueId: G_(), name: p, _connectionType: X_.Input, connectedPointIds: [] }; if (u.dataInputs.push(m), f.value !== void 0) { const _ = DG(f, c, `/extensions/KHR_interactivity/nodes/${l}`); r._connectionValues[m.uniqueId] = _; } else if (f.node !== void 0 && f.socket !== void 0) { const _ = f.node, v = f.socket, C = n[_]; if (!C) throw new Error(`/extensions/KHR_interactivity/nodes/${l}: Could not find node with id ${_} that connects its output with node${l}'s input ${p}`); let x = C.dataOutputs.find((b) => b.name === v); x || (x = { uniqueId: G_(), name: v, _connectionType: X_.Output, connectedPointIds: [] }, C.dataOutputs.push(x)), m.connectedPointIds.push(x.uniqueId), x.connectedPointIds.push(m.uniqueId); } else throw new Error(`/extensions/KHR_interactivity/nodes/${l}: Invalid socket ${p} in node ${l}`); } } const a = (i = c.variables) !== null && i !== void 0 ? i : []; for (let l = 0; l < a.length; l++) { const o = a[l], u = o.id; r._userVariables[u] = DG(o, c, `/extensions/KHR_interactivity/variables/${l}`); } return { allBlocks: n, executionContexts: s }; } const aoe = /^\/nodes\/(\d+)\/(translation|rotation|scale)$/; function Dq(c, e) { const t = c.getFinalPath(), i = e.getVariable("gltf"); if (!i) throw new Error(`No glTF tree found for path ${t}`); const r = t.match(aoe); if (!r || r.length !== 3) throw new Error(`Invalid path ${t}`); const s = parseInt(r[1]), n = i.nodes && i.nodes[s]; if (!n) throw new Error(`Invalid node index for path ${t}`); const a = n._babylonTransformNode; if (!a) throw new Error(`No Babylon node found for path ${t}`); const l = r[2]; if (!l) throw new Error(`Invalid property for path ${t}`); const o = h6e[l]; if (!o) throw new Error(`Invalid property for path ${t}`); return { babylonNode: a, babylonProperty: o }; } const h6e = { translation: "position", scale: "scaling", rotation: "rotationQuaternion" }, d6e = { shouldProcess(c) { return !!c.getFinalPath().match(aoe); }, processGet(c, e) { const { babylonNode: t, babylonProperty: i } = Dq(c, e); return t[i]; }, processSet(c, e, t) { const { babylonNode: i, babylonProperty: r } = Dq(c, e); i[r] = t; } }, ooe = /^\/materials\/(\d+)\/(pbrMetallicRoughness\/baseColorFactor|pbrMetallicRoughness\/metallicFactor|pbrMetallicRoughness\/roughnessFactor|alphaCutoff|emissiveFactor|normalTexture\/scale|emissiveTexture\/strength)$/, f6e = { "pbrMetallicRoughness/baseColorFactor": "albedoColor", "pbrMetallicRoughness/metallicFactor": "metallic", "pbrMetallicRoughness/roughnessFactor": "roughness", emissiveFactor: "emissiveColor" }; function Oq(c, e) { var t; const i = c.getFinalPath(), r = e.getVariable("gltf"); if (!r) throw new Error(`No glTF tree found for path ${i}`); const s = i.match(ooe); if (!s || s.length !== 3) throw new Error(`Invalid path ${i}`); const n = parseInt(s[1]), a = r.materials && r.materials[n]; if (!a) throw new Error(`Invalid material index for path ${i}`); const l = []; if (!a._data) throw new Error(`No Babylon materials found for path ${i}`); for (const h of Object.keys(a._data)) { const d = a._data[parseInt(h)].babylonMaterial; d && l.push(d); } if (!l || l.length === 0) throw new Error(`No Babylon materials found for path ${i}`); const o = s[2]; if (!o) throw new Error(`Invalid property for path ${i}`); const u = (t = f6e[o]) !== null && t !== void 0 ? t : o; return { babylonMaterials: l, babylonProperty: u }; } const p6e = { shouldProcess(c) { return !!c.getFinalPath().match(ooe); }, processGet(c, e) { var t, i; const { babylonMaterials: r, babylonProperty: s } = Oq(c, e); return s === "normalTexture/scale" ? (t = r[0].bumpTexture) === null || t === void 0 ? void 0 : t.uScale : s === "emissiveTexture/strength" ? (i = r[0].emissiveTexture) === null || i === void 0 ? void 0 : i.level : r[0][s]; }, processSet(c, e, t) { const { babylonMaterials: i, babylonProperty: r } = Oq(c, e); for (const s of i) if (r === "normalTexture/scale") s.bumpTexture.uScale = t, s.bumpTexture.vScale = t; else if (r === "emissiveTexture/strength") s.emissiveTexture.level = t; else { let n = t; (r === "albedoColor" || r === "emissiveColor") && (n = new ze(t.x, t.y, t.z)), s[r] = n; } } }, loe = /^\/cameras\/(\d+)\/(orthographic|perspective)\/(xmag|ymag|zfar|znear|aspectRatio|yfov)$/; function wq(c, e) { const t = c.getFinalPath(), i = e.getVariable("gltf"); if (!i) throw new Error(`No glTF tree found for path ${t}`); const r = t.match(loe); if (!r || r.length !== 4) throw new Error(`Invalid path ${t}`); const s = parseInt(r[1]), n = i.cameras && i.cameras[s]; if (!n) throw new Error(`Invalid camera index for path ${t}`); const a = n._babylonCamera; if (!a) throw new Error(`No Babylon camera found for path ${t}`); const l = r[3]; if (!l) throw new Error(`Invalid property for path ${t}`); return { babylonCamera: a, gltfProperty: l }; } const _6e = { shouldProcess(c) { return !!c.getFinalPath().match(loe); }, processGet(c, e) { const { babylonCamera: t, gltfProperty: i } = wq(c, e); switch (i) { case "aspectRatio": return Ve.Warn("Getting aspect ratio is not supported."), -1; case "zNear": return t.minZ; case "zFar": return t.maxZ; case "yfov": return t.fov; case "xmag": return t.orthoRight; case "ymag": return t.orthoTop; } }, processSet(c, e, t) { const { babylonCamera: i, gltfProperty: r } = wq(c, e); switch (r) { case "aspectRatio": Ve.Warn("Setting aspect ratio is not supported."); break; case "zNear": i.minZ = t; break; case "zFar": i.maxZ = t; break; case "yfov": i.fov = t; break; case "xmag": i.orthoLeft = -t, i.orthoRight = t; break; case "ymag": i.orthoTop = t, i.orthoBottom = -t; break; } } }, m6e = [d6e, p6e, _6e], OG = "KHR_interactivity"; class g6e { /** * @internal * @param _loader */ constructor(e) { this._loader = e, this.name = OG, this.enabled = this._loader.isExtensionUsed(OG); } dispose() { this._loader = null; } onReady() { var e; if (!this._loader.babylonScene) return; const t = this._loader.babylonScene, i = (e = this._loader.gltf.extensions) === null || e === void 0 ? void 0 : e.KHR_interactivity; for (const l of m6e) ym.Extensions.includes(l) || ym.Extensions.push(l); const r = u6e(i), s = new oP({ scene: t }); SP.Parse(r, s).getContext(0).setVariable("gltf", this._loader.gltf), s.start(); } } Xi.RegisterExtension(OG, (c) => new g6e(c)); const coe = "ExtrasAsMetadata"; class v6e { _assignExtras(e, t) { if (t.extras && Object.keys(t.extras).length > 0) { const i = e.metadata = e.metadata || {}, r = i.gltf = i.gltf || {}; r.extras = t.extras; } } /** * @internal */ constructor(e) { this.name = coe, this.enabled = !0, this._loader = e; } /** @internal */ dispose() { this._loader = null; } /** * @internal */ loadNodeAsync(e, t, i) { return this._loader.loadNodeAsync(e, t, (r) => { this._assignExtras(r, t), i(r); }); } /** * @internal */ loadCameraAsync(e, t, i) { return this._loader.loadCameraAsync(e, t, (r) => { this._assignExtras(r, t), i(r); }); } /** * @internal */ createMaterial(e, t, i) { const r = this._loader.createMaterial(e, t, i); return this._assignExtras(r, t), r; } } Xi.RegisterExtension(coe, (c) => new v6e(c)); class KA { constructor() { this.materials = []; } /** * This function will read the mtl file and create each material described inside * This function could be improve by adding : * -some component missing (Ni, Tf...) * -including the specific options available * * @param scene defines the scene the material will be created in * @param data defines the mtl data to parse * @param rootUrl defines the rooturl to use in order to load relative dependencies * @param assetContainer defines the asset container to store the material in (can be null) */ parseMTL(e, t, i, r) { if (t instanceof ArrayBuffer) return; const s = t.split(` `), n = /\s+/; let a, l = null; for (let o = 0; o < s.length; o++) { const u = s[o].trim(); if (u.length === 0 || u.charAt(0) === "#") continue; const h = u.indexOf(" "); let d = h >= 0 ? u.substring(0, h) : u; d = d.toLowerCase(); const f = h >= 0 ? u.substring(h + 1).trim() : ""; if (d === "newmtl") l && this.materials.push(l), e._blockEntityCollection = !!r, l = new Dt(f, e), l._parentContainer = r, e._blockEntityCollection = !1; else if (d === "kd" && l) a = f.split(n, 3).map(parseFloat), l.diffuseColor = ze.FromArray(a); else if (d === "ka" && l) a = f.split(n, 3).map(parseFloat), l.ambientColor = ze.FromArray(a); else if (d === "ks" && l) a = f.split(n, 3).map(parseFloat), l.specularColor = ze.FromArray(a); else if (d === "ke" && l) a = f.split(n, 3).map(parseFloat), l.emissiveColor = ze.FromArray(a); else if (d === "ns" && l) l.specularPower = parseFloat(f); else if (d === "d" && l) l.alpha = parseFloat(f); else if (d === "map_ka" && l) l.ambientTexture = KA._GetTexture(i, f, e); else if (d === "map_kd" && l) l.diffuseTexture = KA._GetTexture(i, f, e); else if (d === "map_ks" && l) l.specularTexture = KA._GetTexture(i, f, e); else if (d !== "map_ns") if (d === "map_bump" && l) { const p = f.split(n), m = p.indexOf("-bm"); let _ = null; m >= 0 && (_ = p[m + 1], p.splice(m, 2)), l.bumpTexture = KA._GetTexture(i, p.join(" "), e), l.bumpTexture && _ !== null && (l.bumpTexture.level = parseFloat(_)); } else d === "map_d" && l && (l.opacityTexture = KA._GetTexture(i, f, e)); } l && this.materials.push(l); } /** * Gets the texture for the material. * * If the material is imported from input file, * We sanitize the url to ensure it takes the texture from aside the material. * * @param rootUrl The root url to load from * @param value The value stored in the mtl * @param scene * @returns The Texture */ static _GetTexture(e, t, i) { if (!t) return null; let r = e; if (e === "file:") { let s = t.lastIndexOf("\\"); s === -1 && (s = t.lastIndexOf("/")), s > -1 ? r += t.substr(s + 1) : r += t; } else r += t; return new De(r, i, !1, KA.INVERT_TEXTURE_Y); } } KA.INVERT_TEXTURE_Y = !0; class Fa { /** * Creates a new SolidParser * @param materialToUse defines the array to fill with the list of materials to use (it will be filled by the parse function) * @param babylonMeshesArray defines the array to fill with the list of loaded meshes (it will be filled by the parse function) * @param loadingOptions defines the loading options to use */ constructor(e, t, i) { this._positions = [], this._normals = [], this._uvs = [], this._colors = [], this._meshesFromObj = [], this._indicesForBabylon = [], this._wrappedPositionForBabylon = [], this._wrappedUvsForBabylon = [], this._wrappedColorsForBabylon = [], this._wrappedNormalsForBabylon = [], this._tuplePosNorm = [], this._curPositionInIndices = 0, this._hasMeshes = !1, this._unwrappedPositionsForBabylon = [], this._unwrappedColorsForBabylon = [], this._unwrappedNormalsForBabylon = [], this._unwrappedUVForBabylon = [], this._triangles = [], this._materialNameFromObj = "", this._objMeshName = "", this._increment = 1, this._isFirstMaterial = !0, this._grayColor = new Et(0.5, 0.5, 0.5, 1), this._materialToUse = e, this._babylonMeshesArray = t, this._loadingOptions = i; } /** * Search for obj in the given array. * This function is called to check if a couple of data already exists in an array. * * If found, returns the index of the founded tuple index. Returns -1 if not found * @param arr Array<{ normals: Array, idx: Array }> * @param obj Array * @returns {boolean} */ _isInArray(e, t) { e[t[0]] || (e[t[0]] = { normals: [], idx: [] }); const i = e[t[0]].normals.indexOf(t[1]); return i === -1 ? -1 : e[t[0]].idx[i]; } _isInArrayUV(e, t) { e[t[0]] || (e[t[0]] = { normals: [], idx: [], uv: [] }); const i = e[t[0]].normals.indexOf(t[1]); return i != 1 && t[2] === e[t[0]].uv[i] ? e[t[0]].idx[i] : -1; } /** * This function set the data for each triangle. * Data are position, normals and uvs * If a tuple of (position, normal) is not set, add the data into the corresponding array * If the tuple already exist, add only their indice * * @param indicePositionFromObj Integer The index in positions array * @param indiceUvsFromObj Integer The index in uvs array * @param indiceNormalFromObj Integer The index in normals array * @param positionVectorFromOBJ Vector3 The value of position at index objIndice * @param textureVectorFromOBJ Vector3 The value of uvs * @param normalsVectorFromOBJ Vector3 The value of normals at index objNormale * @param positionColorsFromOBJ */ _setData(e, t, i, r, s, n, a) { let l; this._loadingOptions.optimizeWithUV ? l = this._isInArrayUV(this._tuplePosNorm, [e, i, t]) : l = this._isInArray(this._tuplePosNorm, [e, i]), l === -1 ? (this._indicesForBabylon.push(this._wrappedPositionForBabylon.length), this._wrappedPositionForBabylon.push(r), this._wrappedUvsForBabylon.push(s), this._wrappedNormalsForBabylon.push(n), a !== void 0 && this._wrappedColorsForBabylon.push(a), this._tuplePosNorm[e].normals.push(i), this._tuplePosNorm[e].idx.push(this._curPositionInIndices++), this._loadingOptions.optimizeWithUV && this._tuplePosNorm[e].uv.push(t)) : this._indicesForBabylon.push(l); } /** * Transform Vector() and BABYLON.Color() objects into numbers in an array */ _unwrapData() { for (let e = 0; e < this._wrappedPositionForBabylon.length; e++) this._unwrappedPositionsForBabylon.push(this._wrappedPositionForBabylon[e].x, this._wrappedPositionForBabylon[e].y, this._wrappedPositionForBabylon[e].z), this._unwrappedNormalsForBabylon.push(this._wrappedNormalsForBabylon[e].x, this._wrappedNormalsForBabylon[e].y, this._wrappedNormalsForBabylon[e].z), this._unwrappedUVForBabylon.push(this._wrappedUvsForBabylon[e].x, this._wrappedUvsForBabylon[e].y), this._loadingOptions.importVertexColors && this._unwrappedColorsForBabylon.push(this._wrappedColorsForBabylon[e].r, this._wrappedColorsForBabylon[e].g, this._wrappedColorsForBabylon[e].b, this._wrappedColorsForBabylon[e].a); this._wrappedPositionForBabylon.length = 0, this._wrappedNormalsForBabylon.length = 0, this._wrappedUvsForBabylon.length = 0, this._wrappedColorsForBabylon.length = 0, this._tuplePosNorm.length = 0, this._curPositionInIndices = 0; } /** * Create triangles from polygons * It is important to notice that a triangle is a polygon * We get 5 patterns of face defined in OBJ File : * facePattern1 = ["1","2","3","4","5","6"] * facePattern2 = ["1/1","2/2","3/3","4/4","5/5","6/6"] * facePattern3 = ["1/1/1","2/2/2","3/3/3","4/4/4","5/5/5","6/6/6"] * facePattern4 = ["1//1","2//2","3//3","4//4","5//5","6//6"] * facePattern5 = ["-1/-1/-1","-2/-2/-2","-3/-3/-3","-4/-4/-4","-5/-5/-5","-6/-6/-6"] * Each pattern is divided by the same method * @param faces Array[String] The indices of elements * @param v Integer The variable to increment */ _getTriangles(e, t) { for (let i = t; i < e.length - 1; i++) this._triangles.push(e[0], e[i], e[i + 1]); } /** * Create triangles and push the data for each polygon for the pattern 1 * In this pattern we get vertice positions * @param face * @param v */ _setDataForCurrentFaceWithPattern1(e, t) { this._getTriangles(e, t); for (let i = 0; i < this._triangles.length; i++) { const r = parseInt(this._triangles[i]) - 1; this._setData( r, 0, 0, // In the pattern 1, normals and uvs are not defined this._positions[r], // Get the vectors data at.Zero(), D.Up(), // Create default vectors this._loadingOptions.importVertexColors ? this._colors[r] : void 0 ); } this._triangles.length = 0; } /** * Create triangles and push the data for each polygon for the pattern 2 * In this pattern we get vertice positions and uvs * @param face * @param v */ _setDataForCurrentFaceWithPattern2(e, t) { this._getTriangles(e, t); for (let i = 0; i < this._triangles.length; i++) { const r = this._triangles[i].split("/"), s = parseInt(r[0]) - 1, n = parseInt(r[1]) - 1; this._setData( s, n, 0, //Default value for normals this._positions[s], //Get the values for each element this._uvs[n], D.Up(), //Default value for normals this._loadingOptions.importVertexColors ? this._colors[s] : void 0 ); } this._triangles.length = 0; } /** * Create triangles and push the data for each polygon for the pattern 3 * In this pattern we get vertice positions, uvs and normals * @param face * @param v */ _setDataForCurrentFaceWithPattern3(e, t) { this._getTriangles(e, t); for (let i = 0; i < this._triangles.length; i++) { const r = this._triangles[i].split("/"), s = parseInt(r[0]) - 1, n = parseInt(r[1]) - 1, a = parseInt(r[2]) - 1; this._setData( s, n, a, this._positions[s], this._uvs[n], this._normals[a] //Set the vector for each component ); } this._triangles.length = 0; } /** * Create triangles and push the data for each polygon for the pattern 4 * In this pattern we get vertice positions and normals * @param face * @param v */ _setDataForCurrentFaceWithPattern4(e, t) { this._getTriangles(e, t); for (let i = 0; i < this._triangles.length; i++) { const r = this._triangles[i].split("//"), s = parseInt(r[0]) - 1, n = parseInt(r[1]) - 1; this._setData( s, 1, //Default value for uv n, this._positions[s], //Get each vector of data at.Zero(), this._normals[n], this._loadingOptions.importVertexColors ? this._colors[s] : void 0 ); } this._triangles.length = 0; } /* * Create triangles and push the data for each polygon for the pattern 3 * In this pattern we get vertice positions, uvs and normals * @param face * @param v */ _setDataForCurrentFaceWithPattern5(e, t) { this._getTriangles(e, t); for (let i = 0; i < this._triangles.length; i++) { const r = this._triangles[i].split("/"), s = this._positions.length + parseInt(r[0]), n = this._uvs.length + parseInt(r[1]), a = this._normals.length + parseInt(r[2]); this._setData( s, n, a, this._positions[s], this._uvs[n], this._normals[a], //Set the vector for each component this._loadingOptions.importVertexColors ? this._colors[s] : void 0 ); } this._triangles.length = 0; } _addPreviousObjMesh() { this._meshesFromObj.length > 0 && (this._handledMesh = this._meshesFromObj[this._meshesFromObj.length - 1], this._unwrapData(), this._indicesForBabylon.reverse(), this._handledMesh.indices = this._indicesForBabylon.slice(), this._handledMesh.positions = this._unwrappedPositionsForBabylon.slice(), this._handledMesh.normals = this._unwrappedNormalsForBabylon.slice(), this._handledMesh.uvs = this._unwrappedUVForBabylon.slice(), this._loadingOptions.importVertexColors && (this._handledMesh.colors = this._unwrappedColorsForBabylon.slice()), this._indicesForBabylon.length = 0, this._unwrappedPositionsForBabylon.length = 0, this._unwrappedColorsForBabylon.length = 0, this._unwrappedNormalsForBabylon.length = 0, this._unwrappedUVForBabylon.length = 0); } _optimizeNormals(e) { const t = e.getVerticesData(Y.PositionKind), i = e.getVerticesData(Y.NormalKind), r = {}; if (!t || !i) return; for (let n = 0; n < t.length / 3; n++) { const a = t[n * 3 + 0], l = t[n * 3 + 1], o = t[n * 3 + 2], u = a + "_" + l + "_" + o; let h = r[u]; h || (h = [], r[u] = h), h.push(n); } const s = new D(); for (const n in r) { const a = r[n]; if (a.length < 2) continue; const l = a[0]; for (let o = 1; o < a.length; ++o) { const u = a[o]; i[l * 3 + 0] += i[u * 3 + 0], i[l * 3 + 1] += i[u * 3 + 1], i[l * 3 + 2] += i[u * 3 + 2]; } s.copyFromFloats(i[l * 3 + 0], i[l * 3 + 1], i[l * 3 + 2]), s.normalize(); for (let o = 0; o < a.length; ++o) { const u = a[o]; i[u * 3 + 0] = s.x, i[u * 3 + 1] = s.y, i[u * 3 + 2] = s.z; } } e.setVerticesData(Y.NormalKind, i); } /** * Function used to parse an OBJ string * @param meshesNames defines the list of meshes to load (all if not defined) * @param data defines the OBJ string * @param scene defines the hosting scene * @param assetContainer defines the asset container to load data in * @param onFileToLoadFound defines a callback that will be called if a MTL file is found */ parse(e, t, i, r, s) { var n; const a = t.split(` `); for (let l = 0; l < a.length; l++) { const o = a[l].trim().replace(/\s\s/g, " "); let u; if (!(o.length === 0 || o.charAt(0) === "#")) if (Fa.VertexPattern.test(o)) { if (u = o.match(/[^ ]+/g), this._positions.push(new D(parseFloat(u[1]), parseFloat(u[2]), parseFloat(u[3]))), this._loadingOptions.importVertexColors) if (u.length >= 7) { const h = parseFloat(u[4]), d = parseFloat(u[5]), f = parseFloat(u[6]); this._colors.push(new Et(h > 1 ? h / 255 : h, d > 1 ? d / 255 : d, f > 1 ? f / 255 : f, u.length === 7 || u[7] === void 0 ? 1 : parseFloat(u[7]))); } else this._colors.push(this._grayColor); } else if ((u = Fa.NormalPattern.exec(o)) !== null) this._normals.push(new D(parseFloat(u[1]), parseFloat(u[2]), parseFloat(u[3]))); else if ((u = Fa.UVPattern.exec(o)) !== null) this._uvs.push(new at(parseFloat(u[1]) * this._loadingOptions.UVScaling.x, parseFloat(u[2]) * this._loadingOptions.UVScaling.y)); else if ((u = Fa.FacePattern3.exec(o)) !== null) this._setDataForCurrentFaceWithPattern3( u[1].trim().split(" "), // ["1/1/1", "2/2/2", "3/3/3"] 1 ); else if ((u = Fa.FacePattern4.exec(o)) !== null) this._setDataForCurrentFaceWithPattern4( u[1].trim().split(" "), // ["1//1", "2//2", "3//3"] 1 ); else if ((u = Fa.FacePattern5.exec(o)) !== null) this._setDataForCurrentFaceWithPattern5( u[1].trim().split(" "), // ["-1/-1/-1", "-2/-2/-2", "-3/-3/-3"] 1 ); else if ((u = Fa.FacePattern2.exec(o)) !== null) this._setDataForCurrentFaceWithPattern2( u[1].trim().split(" "), // ["1/1", "2/2", "3/3"] 1 ); else if ((u = Fa.FacePattern1.exec(o)) !== null) this._setDataForCurrentFaceWithPattern1( u[1].trim().split(" "), // ["1", "2", "3"] 1 ); else if ((u = Fa.LinePattern1.exec(o)) !== null) this._setDataForCurrentFaceWithPattern1( u[1].trim().split(" "), // ["1", "2"] 0 ); else if ((u = Fa.LinePattern2.exec(o)) !== null) this._setDataForCurrentFaceWithPattern2( u[1].trim().split(" "), // ["1/1", "2/2"] 0 ); else if ((u = Fa.LinePattern3.exec(o)) !== null) this._setDataForCurrentFaceWithPattern3( u[1].trim().split(" "), // ["1/1/1", "2/2/2"] 0 ); else if (Fa.GroupDescriptor.test(o) || Fa.ObjectDescriptor.test(o)) { const h = { name: o.substring(2).trim(), indices: void 0, positions: void 0, normals: void 0, uvs: void 0, colors: void 0, materialName: this._materialNameFromObj, isObject: Fa.ObjectDescriptor.test(o) }; this._addPreviousObjMesh(), this._meshesFromObj.push(h), this._hasMeshes = !0, this._isFirstMaterial = !0, this._increment = 1; } else if (Fa.UseMtlDescriptor.test(o)) { if (this._materialNameFromObj = o.substring(7).trim(), !this._isFirstMaterial || !this._hasMeshes) { this._addPreviousObjMesh(); const h = ( //Set the name of the current obj mesh { name: (this._objMeshName || "mesh") + "_mm" + this._increment.toString(), indices: void 0, positions: void 0, normals: void 0, uvs: void 0, colors: void 0, materialName: this._materialNameFromObj, isObject: !1 } ); this._increment++, this._meshesFromObj.push(h), this._hasMeshes = !0; } this._hasMeshes && this._isFirstMaterial && (this._meshesFromObj[this._meshesFromObj.length - 1].materialName = this._materialNameFromObj, this._isFirstMaterial = !1); } else Fa.MtlLibGroupDescriptor.test(o) ? s(o.substring(7).trim()) : Fa.SmoothDescriptor.test(o) || Ce.Log("Unhandled expression at line : " + o); } if (this._hasMeshes && (this._handledMesh = this._meshesFromObj[this._meshesFromObj.length - 1], this._indicesForBabylon.reverse(), this._unwrapData(), this._handledMesh.indices = this._indicesForBabylon, this._handledMesh.positions = this._unwrappedPositionsForBabylon, this._handledMesh.normals = this._unwrappedNormalsForBabylon, this._handledMesh.uvs = this._unwrappedUVForBabylon, this._loadingOptions.importVertexColors && (this._handledMesh.colors = this._unwrappedColorsForBabylon)), !this._hasMeshes) { let l = null; if (this._indicesForBabylon.length) this._indicesForBabylon.reverse(), this._unwrapData(); else { for (const o of this._positions) this._unwrappedPositionsForBabylon.push(o.x, o.y, o.z); if (this._normals.length) for (const o of this._normals) this._unwrappedNormalsForBabylon.push(o.x, o.y, o.z); if (this._uvs.length) for (const o of this._uvs) this._unwrappedUVForBabylon.push(o.x, o.y); if (this._colors.length) for (const o of this._colors) this._unwrappedColorsForBabylon.push(o.r, o.g, o.b, o.a); this._materialNameFromObj || (l = new Dt(yc.RandomId(), i), l.pointsCloud = !0, this._materialNameFromObj = l.name, this._normals.length || (l.disableLighting = !0, l.emissiveColor = ze.White())); } this._meshesFromObj.push({ name: yc.RandomId(), indices: this._indicesForBabylon, positions: this._unwrappedPositionsForBabylon, colors: this._unwrappedColorsForBabylon, normals: this._unwrappedNormalsForBabylon, uvs: this._unwrappedUVForBabylon, materialName: this._materialNameFromObj, directMaterial: l, isObject: !0 }); } for (let l = 0; l < this._meshesFromObj.length; l++) { if (e && this._meshesFromObj[l].name) { if (e instanceof Array) { if (e.indexOf(this._meshesFromObj[l].name) === -1) continue; } else if (this._meshesFromObj[l].name !== e) continue; } this._handledMesh = this._meshesFromObj[l], i._blockEntityCollection = !!r; const o = new ke(this._meshesFromObj[l].name, i); if (o._parentContainer = r, i._blockEntityCollection = !1, this._handledMesh._babylonMesh = o, !this._handledMesh.isObject) { for (let h = l - 1; h >= 0; --h) if (this._meshesFromObj[h].isObject && this._meshesFromObj[h]._babylonMesh) { o.parent = this._meshesFromObj[h]._babylonMesh; break; } } if (this._materialToUse.push(this._meshesFromObj[l].materialName), ((n = this._handledMesh.positions) === null || n === void 0 ? void 0 : n.length) === 0) { this._babylonMeshesArray.push(o); continue; } const u = new Ot(); if (u.uvs = this._handledMesh.uvs, u.indices = this._handledMesh.indices, u.positions = this._handledMesh.positions, this._loadingOptions.computeNormals) { const h = new Array(); Ot.ComputeNormals(this._handledMesh.positions, this._handledMesh.indices, h), u.normals = h; } else u.normals = this._handledMesh.normals; this._loadingOptions.importVertexColors && (u.colors = this._handledMesh.colors), u.applyToMesh(o), this._loadingOptions.invertY && (o.scaling.y *= -1), this._loadingOptions.optimizeNormals && this._optimizeNormals(o), this._babylonMeshesArray.push(o), this._handledMesh.directMaterial && (o.material = this._handledMesh.directMaterial); } } } Fa.ObjectDescriptor = /^o/; Fa.GroupDescriptor = /^g/; Fa.MtlLibGroupDescriptor = /^mtllib /; Fa.UseMtlDescriptor = /^usemtl /; Fa.SmoothDescriptor = /^s /; Fa.VertexPattern = /^v(\s+[\d|.|+|\-|e|E]+){3,7}/; Fa.NormalPattern = /^vn(\s+[\d|.|+|\-|e|E]+)( +[\d|.|+|\-|e|E]+)( +[\d|.|+|\-|e|E]+)/; Fa.UVPattern = /^vt(\s+[\d|.|+|\-|e|E]+)( +[\d|.|+|\-|e|E]+)/; Fa.FacePattern1 = /^f\s+(([\d]{1,}[\s]?){3,})+/; Fa.FacePattern2 = /^f\s+((([\d]{1,}\/[\d]{1,}[\s]?){3,})+)/; Fa.FacePattern3 = /^f\s+((([\d]{1,}\/[\d]{1,}\/[\d]{1,}[\s]?){3,})+)/; Fa.FacePattern4 = /^f\s+((([\d]{1,}\/\/[\d]{1,}[\s]?){3,})+)/; Fa.FacePattern5 = /^f\s+(((-[\d]{1,}\/-[\d]{1,}\/-[\d]{1,}[\s]?){3,})+)/; Fa.LinePattern1 = /^l\s+(([\d]{1,}[\s]?){2,})+/; Fa.LinePattern2 = /^l\s+((([\d]{1,}\/[\d]{1,}[\s]?){2,})+)/; Fa.LinePattern3 = /^l\s+((([\d]{1,}\/[\d]{1,}\/[\d]{1,}[\s]?){2,})+)/; class Ah { /** * Invert Y-Axis of referenced textures on load */ static get INVERT_TEXTURE_Y() { return KA.INVERT_TEXTURE_Y; } static set INVERT_TEXTURE_Y(e) { KA.INVERT_TEXTURE_Y = e; } /** * Creates loader for .OBJ files * * @param loadingOptions options for loading and parsing OBJ/MTL files. */ constructor(e) { this.name = "obj", this.extensions = ".obj", this._assetContainer = null, this._loadingOptions = e || Ah._DefaultLoadingOptions; } static get _DefaultLoadingOptions() { return { computeNormals: Ah.COMPUTE_NORMALS, optimizeNormals: Ah.OPTIMIZE_NORMALS, importVertexColors: Ah.IMPORT_VERTEX_COLORS, invertY: Ah.INVERT_Y, invertTextureY: Ah.INVERT_TEXTURE_Y, // eslint-disable-next-line @typescript-eslint/naming-convention UVScaling: Ah.UV_SCALING, materialLoadingFailsSilently: Ah.MATERIAL_LOADING_FAILS_SILENTLY, optimizeWithUV: Ah.OPTIMIZE_WITH_UV, skipMaterials: Ah.SKIP_MATERIALS }; } /** * Calls synchronously the MTL file attached to this obj. * Load function or importMesh function don't enable to load 2 files in the same time asynchronously. * Without this function materials are not displayed in the first frame (but displayed after). * In consequence it is impossible to get material information in your HTML file * * @param url The URL of the MTL file * @param rootUrl defines where to load data from * @param onSuccess Callback function to be called when the MTL file is loaded * @param onFailure */ _loadMTL(e, t, i, r) { const s = t + e; Ve.LoadFile(s, i, void 0, void 0, !1, (n, a) => { r(s, a); }); } /** * Instantiates a OBJ file loader plugin. * @returns the created plugin */ createPlugin() { return new Ah(Ah._DefaultLoadingOptions); } /** * If the data string can be loaded directly. * @returns if the data can be loaded directly */ canDirectLoad() { return !1; } /** * Imports one or more meshes from the loaded OBJ data and adds them to the scene * @param meshesNames a string or array of strings of the mesh names that should be loaded from the file * @param scene the scene the meshes should be added to * @param data the OBJ data to load * @param rootUrl root url to load from * @returns a promise containing the loaded meshes, particles, skeletons and animations */ importMeshAsync(e, t, i, r) { return this._parseSolid(e, t, i, r).then((s) => ({ meshes: s, particleSystems: [], skeletons: [], animationGroups: [], transformNodes: [], geometries: [], lights: [] })); } /** * Imports all objects from the loaded OBJ data and adds them to the scene * @param scene the scene the objects should be added to * @param data the OBJ data to load * @param rootUrl root url to load from * @returns a promise which completes when objects have been loaded to the scene */ loadAsync(e, t, i) { return this.importMeshAsync(null, e, t, i).then(() => { }); } /** * Load into an asset container. * @param scene The scene to load into * @param data The data to import * @param rootUrl The root url for scene and resources * @returns The loaded asset container */ loadAssetContainerAsync(e, t, i) { const r = new NL(e); return this._assetContainer = r, this.importMeshAsync(null, e, t, i).then((s) => (s.meshes.forEach((n) => r.meshes.push(n)), s.meshes.forEach((n) => { const a = n.material; a && r.materials.indexOf(a) == -1 && (r.materials.push(a), a.getActiveTextures().forEach((o) => { r.textures.indexOf(o) == -1 && r.textures.push(o); })); }), this._assetContainer = null, r)).catch((s) => { throw this._assetContainer = null, s; }); } /** * Read the OBJ file and create an Array of meshes. * Each mesh contains all information given by the OBJ and the MTL file. * i.e. vertices positions and indices, optional normals values, optional UV values, optional material * @param meshesNames defines a string or array of strings of the mesh names that should be loaded from the file * @param scene defines the scene where are displayed the data * @param data defines the content of the obj file * @param rootUrl defines the path to the folder * @returns the list of loaded meshes */ _parseSolid(e, t, i, r) { let s = ""; const n = new KA(), a = [], l = []; new Fa(a, l, this._loadingOptions).parse(e, i, t, this._assetContainer, (h) => { s = h; }); const u = []; return s !== "" && !this._loadingOptions.skipMaterials && u.push(new Promise((h, d) => { this._loadMTL(s, r, (f) => { try { n.parseMTL(t, f, r, this._assetContainer); for (let p = 0; p < n.materials.length; p++) { let m = 0; const _ = []; let v; for (; (v = a.indexOf(n.materials[p].name, m)) > -1; ) _.push(v), m = v + 1; if (v === -1 && _.length === 0) n.materials[p].dispose(); else for (let C = 0; C < _.length; C++) { const x = l[_[C]], b = n.materials[p]; x.material = b, x.getTotalIndices() || (b.pointsCloud = !0); } } h(); } catch (p) { Ve.Warn(`Error processing MTL file: '${s}'`), this._loadingOptions.materialLoadingFailsSilently ? h() : d(p); } }, (f, p) => { Ve.Warn(`Error downloading MTL file: '${s}'`), this._loadingOptions.materialLoadingFailsSilently ? h() : d(p); }); })), Promise.all(u).then(() => l); } } Ah.OPTIMIZE_WITH_UV = !0; Ah.INVERT_Y = !1; Ah.IMPORT_VERTEX_COLORS = !1; Ah.COMPUTE_NORMALS = !1; Ah.OPTIMIZE_NORMALS = !1; Ah.UV_SCALING = new at(1, 1); Ah.SKIP_MATERIALS = !1; Ah.MATERIAL_LOADING_FAILS_SILENTLY = !0; fr && fr.RegisterPlugin(new Ah()); class s5 { constructor() { this.solidPattern = /solid (\S*)([\S\s]*?)endsolid[ ]*(\S*)/g, this.facetsPattern = /facet([\s\S]*?)endfacet/g, this.normalPattern = /normal[\s]+([-+]?[0-9]+\.?[0-9]*([eE][-+]?[0-9]+)?)+[\s]+([-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?)+[\s]+([-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?)+/g, this.vertexPattern = /vertex[\s]+([-+]?[0-9]+\.?[0-9]*([eE][-+]?[0-9]+)?)+[\s]+([-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?)+[\s]+([-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?)+/g, this.name = "stl", this.extensions = { ".stl": { isBinary: !0 } }; } /** * Import meshes into a scene. * @param meshesNames An array of mesh names, a single mesh name, or empty string for all meshes that filter what meshes are imported * @param scene The scene to import into * @param data The data to import * @param rootUrl The root url for scene and resources * @param meshes The meshes array to import into * @returns True if successful or false otherwise */ importMesh(e, t, i, r, s) { let n; if (typeof i != "string") { if (this._isBinary(i)) { const a = new ke("stlmesh", t); return this._parseBinary(a, i), s && s.push(a), !0; } i = new TextDecoder().decode(new Uint8Array(i)); } for (; n = this.solidPattern.exec(i); ) { let a = n[1]; const l = n[3]; if (l && a != l) return Ve.Error("Error in STL, solid name != endsolid name"), !1; if (e && a) { if (e instanceof Array) { if (!e.indexOf(a)) continue; } else if (a !== e) continue; } a = a || "stlmesh"; const o = new ke(a, t); this._parseASCII(o, n[2]), s && s.push(o); } return !0; } /** * Load into a scene. * @param scene The scene to load into * @param data The data to import * @param rootUrl The root url for scene and resources * @returns true if successful or false otherwise */ load(e, t, i) { return this.importMesh(null, e, t, i, null); } /** * Load into an asset container. * @param scene The scene to load into * @param data The data to import * @param rootUrl The root url for scene and resources * @returns The loaded asset container */ loadAssetContainer(e, t, i) { const r = new NL(e); return e._blockEntityCollection = !0, this.importMesh(null, e, t, i, r.meshes), e._blockEntityCollection = !1, r; } _isBinary(e) { const t = new DataView(e); if (t.byteLength <= 80) return !1; const i = 32 / 8 * 3 + 32 / 8 * 3 * 3 + 16 / 8, r = t.getUint32(80, !0); if (80 + 32 / 8 + r * i === t.byteLength) return !0; const s = [115, 111, 108, 105, 100]; for (let n = 0; n < 5; n++) if (t.getUint8(n) !== s[n]) return !0; return !1; } _parseBinary(e, t) { const i = new DataView(t), r = i.getUint32(80, !0), s = 84, n = 12 * 4 + 2; let a = 0; const l = new Float32Array(r * 3 * 3), o = new Float32Array(r * 3 * 3), u = new Uint32Array(r * 3); let h = 0; for (let d = 0; d < r; d++) { const f = s + d * n, p = i.getFloat32(f, !0), m = i.getFloat32(f + 4, !0), _ = i.getFloat32(f + 8, !0); for (let v = 1; v <= 3; v++) { const C = f + v * 12; l[a] = i.getFloat32(C, !0), o[a] = p, s5.DO_NOT_ALTER_FILE_COORDINATES ? (l[a + 1] = i.getFloat32(C + 4, !0), l[a + 2] = i.getFloat32(C + 8, !0), o[a + 1] = m, o[a + 2] = _) : (l[a + 2] = i.getFloat32(C + 4, !0), l[a + 1] = i.getFloat32(C + 8, !0), o[a + 2] = m, o[a + 1] = _), a += 3; } s5.DO_NOT_ALTER_FILE_COORDINATES ? (u[h] = h, u[h + 1] = h + 2, u[h + 2] = h + 1, h += 3) : (u[h] = h++, u[h] = h++, u[h] = h++); } e.setVerticesData(Y.PositionKind, l), e.setVerticesData(Y.NormalKind, o), e.setIndices(u), e.computeWorldMatrix(!0); } _parseASCII(e, t) { const i = [], r = [], s = []; let n = 0, a; for (; a = this.facetsPattern.exec(t); ) { const l = a[1], o = this.normalPattern.exec(l); if (this.normalPattern.lastIndex = 0, !o) continue; const u = [Number(o[1]), Number(o[5]), Number(o[3])]; let h; for (; h = this.vertexPattern.exec(l); ) s5.DO_NOT_ALTER_FILE_COORDINATES ? (i.push(Number(h[1]), Number(h[3]), Number(h[5])), r.push(u[0], u[2], u[1])) : (i.push(Number(h[1]), Number(h[5]), Number(h[3])), r.push(u[0], u[1], u[2])); s5.DO_NOT_ALTER_FILE_COORDINATES ? (s.push(n, n + 2, n + 1), n += 3) : s.push(n++, n++, n++), this.vertexPattern.lastIndex = 0; } this.facetsPattern.lastIndex = 0, e.setVerticesData(Y.PositionKind, i), e.setVerticesData(Y.NormalKind, r), e.setIndices(s), e.computeWorldMatrix(!0); } } s5.DO_NOT_ALTER_FILE_COORDINATES = !1; fr && fr.RegisterPlugin(new s5()); class Cj { //private _loadingOptions: SPLATLoadingOptions; /** * Creates loader for gaussian splatting files */ constructor() { this.name = "splat", this.extensions = { ".splat": { isBinary: !0 }, ".ply": { isBinary: !0 } }; } /** * Instantiates a gaussian splatting file loader plugin. * @returns the created plugin */ createPlugin() { return new Cj(); } /** * If the data string can be loaded directly. * @returns if the data can be loaded directly */ canDirectLoad() { return !1; } /** * Code from https://github.com/dylanebert/gsplat.js/blob/main/src/loaders/PLYLoader.ts Under MIT license * Loads a .ply from data array buffer * if data array buffer is not ply, returns the original buffer */ _loadPLY(e) { const t = new Uint8Array(e), i = new TextDecoder().decode(t.slice(0, 1024 * 10)), r = `end_header `, s = i.indexOf(r); if (s < 0 || !i) return e; const n = parseInt(/element vertex (\d+)\n/.exec(i)[1]); let a = 0; const l = { double: 8, int: 4, uint: 4, float: 4, short: 2, ushort: 2, uchar: 1 }, o = [], u = i.slice(0, s).split(` `).filter((_) => _.startsWith("property ")); for (const _ of u) { const [v, C, x] = _.split(" "); if (o.push({ name: x, type: C, offset: a }), !l[C]) throw new Error(`Unsupported property type: ${C}`); a += l[C]; } const h = 3 * 4 + 3 * 4 + 4 + 4, d = 0.28209479177387814, f = new DataView(e, s + r.length), p = new ArrayBuffer(h * n), m = new Ze(); for (let _ = 0; _ < n; _++) { const v = new Float32Array(p, _ * h, 3), C = new Float32Array(p, _ * h + 12, 3), x = new Uint8ClampedArray(p, _ * h + 24, 4), b = new Uint8ClampedArray(p, _ * h + 28, 4); let S = 255, M = 0, R = 0, w = 0; for (let V = 0; V < o.length; V++) { const k = o[V]; let L; switch (k.type) { case "float": L = f.getFloat32(k.offset + _ * a, !0); break; case "int": L = f.getInt32(k.offset + _ * a, !0); break; default: throw new Error(`Unsupported property type: ${k.type}`); } switch (k.name) { case "x": v[0] = L; break; case "y": v[1] = L; break; case "z": v[2] = L; break; case "scale_0": C[0] = Math.exp(L); break; case "scale_1": C[1] = Math.exp(L); break; case "scale_2": C[2] = Math.exp(L); break; case "red": x[0] = L; break; case "green": x[1] = L; break; case "blue": x[2] = L; break; case "f_dc_0": x[0] = (0.5 + d * L) * 255; break; case "f_dc_1": x[1] = (0.5 + d * L) * 255; break; case "f_dc_2": x[2] = (0.5 + d * L) * 255; break; case "f_dc_3": x[3] = (0.5 + d * L) * 255; break; case "opacity": x[3] = 1 / (1 + Math.exp(-L)) * 255; break; case "rot_0": S = L; break; case "rot_1": M = L; break; case "rot_2": R = L; break; case "rot_3": w = L; break; } } m.set(M, R, w, S), m.normalize(), b[0] = m.w * 128 + 128, b[1] = m.x * 128 + 128, b[2] = m.y * 128 + 128, b[3] = m.z * 128 + 128; } return p; } /** * Imports from the loaded gaussian splatting data and adds them to the scene * @param meshesNames a string or array of strings of the mesh names that should be loaded from the file * @param scene the scene the meshes should be added to * @param data the gaussian splatting data to load * @param rootUrl root url to load from * @returns a promise containing the loaded meshes, particles, skeletons and animations */ importMeshAsync(e, t, i, r) { return new JC("", t).loadFileAsync(r); } /** * Imports all objects from the loaded gaussian splatting data and adds them to the scene * @param scene the scene the objects should be added to * @param data the gaussian splatting data to load * @param rootUrl root url to load from * @returns a promise which completes when objects have been loaded to the scene */ loadAsync(e, t, i) { return new JC("GaussianSplatting", e).loadDataAsync(this._loadPLY(t)); } /** * Load into an asset container. * @param scene The scene to load into * @param data The data to import * @param rootUrl The root url for scene and resources * @returns The loaded asset container */ loadAssetContainerAsync(e, t, i) { throw new Error("loadAssetContainerAsync not implemented for Gaussian Splatting loading"); } } fr && fr.RegisterPlugin(new Cj()); function A6e(c) { return { all: c = c || /* @__PURE__ */ new Map(), on: function(e, t) { var i = c.get(e); i ? i.push(t) : c.set(e, [t]); }, off: function(e, t) { var i = c.get(e); i && (t ? i.splice(i.indexOf(t) >>> 0, 1) : c.set(e, [])); }, emit: function(e, t) { var i = c.get(e); i && i.slice().map(function(r) { r(t); }), (i = c.get("*")) && i.slice().map(function(r) { r(e, t); }); } }; } const ZF = { HoverChildMesh: { buttons: [ { code: "alt", altKey: !0, onlyFirstTrigger: !0 } ], callback(c) { var t; const e = (t = c.pick().meshPickInfo) == null ? void 0 : t.pickedMesh; c.selectorManager.hoverNodes = e ? [e] : []; } }, HoverParentMesh: { buttons: [ { code: "alt", keyDown: !1, altKey: !1 } ], callback(c) { var t; const e = c.getRoot((t = c.pick().meshPickInfo) == null ? void 0 : t.pickedMesh); c.selectorManager.hoverNodes = e ? [e] : []; } }, CopyMeshes: { buttons: [ { code: "c", keyDown: !1, ctrlKey: !0, shiftKey: !0 }, { code: "c", keyDown: !1, ctrlKey: !0, shiftKey: !1 } ], callback(c, e) { const t = c.getSelectedNodes(), i = e.shiftKey; if (t.length) { const r = []; c.copyNodes(t, { instanced: i, interact: !0, onStop: (s, n) => { s === 1 && (r.push(...n), c.selectNodes(r)); } }); } } }, DeselectMesh: { buttons: [ { code: "escape", keyDown: !1 } ], callback(c, e) { if (e.isActivating()) { e.stopActiveNodes(2, c); return; } c.selectNodes([]); } }, DeleteMesh: { buttons: [ { code: "delete", keyDown: !1 }, { code: "backspace", keyDown: !1 } ], callback(c, e) { if (e.isActivating()) { e.stopActiveNodes(2, c); return; } const t = c.getSelectedNodes(); c.deleteNodes(t); } }, MoveMeshYUp: { buttons: [ { code: "pageup" } ], callback(c) { const e = c.getSelectedNodes(); c.setValue( e.map((t) => { const i = c.getValue(t, "position"); return { target: t, props: { position: { ...i, y: i.y + c.MOVE_DIFF } } }; }) ); } }, MoveMeshYDown: { buttons: [ { code: "pagedown" } ], callback(c) { const e = c.getSelectedNodes(); c.setValue( e.map((t) => { const i = c.getValue(t, "position"); return { target: t, props: { position: { ...i, y: i.y - c.MOVE_DIFF } } }; }) ); } }, MoveMeshXDown: { buttons: [ { code: "arrowleft" } ], callback(c) { const e = c.getSelectedNodes(); c.setValue( e.map((t) => { const i = c.getValue(t, "position"); return { target: t, props: { position: { ...i, x: i.x - c.MOVE_DIFF } } }; }) ); } }, MoveMeshXUp: { buttons: [ { code: "arrowright" } ], callback(c) { const e = c.getSelectedNodes(); c.setValue( e.map((t) => { const i = c.getValue(t, "position"); return { target: t, props: { position: { ...i, x: i.x + c.MOVE_DIFF } } }; }) ); } }, MoveMeshZUp: { buttons: [ { code: "arrowup" } ], callback(c) { const e = c.getSelectedNodes(); c.setValue( e.map((t) => { const i = c.getValue(t, "position"); return { target: t, props: { position: { ...i, z: i.z + c.MOVE_DIFF } } }; }) ); } }, MoveMeshZDown: { buttons: [ { code: "arrowdown" } ], callback(c) { const e = c.getSelectedNodes(); c.setValue( e.map((t) => { const i = c.getValue(t, "position"); return { target: t, props: { position: { ...i, z: i.z - c.MOVE_DIFF } } }; }) ); } }, ScaleMeshXUp: { buttons: { code: "arrowright", shiftKey: !0 }, callback(c) { const e = c.getSelectedNodes(); c.setValue( e.map((t) => { const i = c.getValue(t, "scaling"); return { target: t, props: { scaling: { ...i, x: i.x + c.SCALE_DIFF } } }; }) ); } }, ScaleMeshXDown: { buttons: { code: "arrowleft", shiftKey: !0 }, callback(c) { const e = c.getSelectedNodes(); c.setValue( e.map((t) => { const i = c.getValue(t, "scaling"); return { target: t, props: { scaling: { ...i, x: i.x - c.SCALE_DIFF } } }; }) ); } }, ScaleMeshZUp: { buttons: { code: "arrowup", shiftKey: !0 }, callback(c) { const e = c.getSelectedNodes(); c.setValue( e.map((t) => { const i = c.getValue(t, "scaling"); return { target: t, props: { scaling: { ...i, z: i.z + c.SCALE_DIFF } } }; }) ); } }, ScaleMeshZDown: { buttons: { code: "arrowdown", shiftKey: !0 }, callback(c) { const e = c.getSelectedNodes(); c.setValue( e.map((t) => { const i = c.getValue(t, "scaling"); return { target: t, props: { scaling: { ...i, z: i.z - c.SCALE_DIFF } } }; }) ); } }, ScaleMeshYUp: { buttons: { code: "pageup", shiftKey: !0 }, callback(c) { const e = c.getSelectedNodes(); c.setValue( e.map((t) => { const i = c.getValue(t, "scaling"); return { target: t, props: { scaling: { ...i, y: i.y + c.SCALE_DIFF } } }; }) ); } }, ScaleMeshYDown: { buttons: { code: "pagedown", shiftKey: !0 }, callback(c) { const e = c.getSelectedNodes(); c.setValue( e.map((t) => { const i = c.getValue(t, "scaling"); return { target: t, props: { scaling: { ...i, y: i.y - c.SCALE_DIFF } } }; }) ); } }, RotateMeshXUp: { buttons: { code: "arrowright", altKey: !0 }, callback(c) { const e = c.getSelectedNodes(); c.setValue( e.map((t) => { const i = c.getValue(t, "rotation"); return { target: t, props: { rotation: { ...i, x: i.x + c.ROTATE_DIFF } } }; }) ); } }, RotateMeshXDown: { buttons: { code: "arrowleft", altKey: !0 }, callback(c) { const e = c.getSelectedNodes(); c.setValue( e.map((t) => { const i = c.getValue(t, "rotation"); return { target: t, props: { rotation: { ...i, x: i.x - c.ROTATE_DIFF } } }; }) ); } }, RotateMeshZUp: { buttons: { code: "arrowup", altKey: !0 }, callback(c) { const e = c.getSelectedNodes(); c.setValue( e.map((t) => { const i = c.getValue(t, "rotation"); return { target: t, props: { rotation: { ...i, z: i.z + c.ROTATE_DIFF } } }; }) ); } }, RotateMeshZDown: { buttons: { code: "arrowdown", altKey: !0 }, callback(c) { const e = c.getSelectedNodes(); c.setValue( e.map((t) => { const i = c.getValue(t, "rotation"); return { target: t, props: { rotation: { ...i, z: i.z - c.ROTATE_DIFF } } }; }) ); } }, RotateMeshYUp: { buttons: { code: "pageup", altKey: !0 }, callback(c) { const e = c.getSelectedNodes(); c.setValue( e.map((t) => { const i = c.getValue(t, "rotation"); return { target: t, props: { rotation: { ...i, y: i.y + c.ROTATE_DIFF } } }; }) ); } }, RotateMeshYDown: { buttons: { code: "pagedown", altKey: !0 }, callback(c) { const e = c.getSelectedNodes(); c.setValue( e.map((t) => { const i = c.getValue(t, "rotation"); return { target: t, props: { rotation: { ...i, y: i.y - c.ROTATE_DIFF } } }; }) ); } }, Undo: { buttons: { code: "z", ctrlKey: !0 }, callback(c) { c.undo(); } }, Redo: { buttons: { code: "z", ctrlKey: !0, shiftKey: !0 }, callback(c) { c.redo(); } }, SwitchGizmo: { buttons: [ { code: "1" }, { code: "2" }, { code: "3" } ], callback(c, e) { const t = { 1: "position", 2: "scaling", 3: "rotation" }[e.code]; c.root.setOptions({ selectorType: t }); } } }; var Xo = /* @__PURE__ */ ((c) => (c[c.String = 0] = "String", c[c.Number = 1] = "Number", c[c.Boolean = 2] = "Boolean", c))(Xo || {}), wN = /* @__PURE__ */ ((c) => (c[c.Value = 0] = "Value", c[c.DynamicData = 1] = "DynamicData", c))(wN || {}), tx = /* @__PURE__ */ ((c) => (c.None = "None", c.PX_PY_PZ = "PX_PY_PZ", c.PX_CY_PZ = "PX_CY_PZ", c.PX_NY_PZ = "PX_NY_PZ", c.PX_PY_CZ = "PX_PY_CZ", c.PX_PY_NZ = "PX_PY_NZ", c.PX_CY_CZ = "PX_CY_CZ", c.PX_CY_NZ = "PX_CY_NZ", c.PX_NY_CZ = "PX_NY_CZ", c.PX_NY_NZ = "PX_NY_NZ", c.CX_PY_PZ = "CX_PY_PZ", c.CX_CY_PZ = "CX_CY_PZ", c.CX_NY_PZ = "CX_NY_PZ", c.CX_PY_CZ = "CX_PY_CZ", c.CX_PY_NZ = "CX_PY_NZ", c.CX_CY_CZ = "CX_CY_CZ", c.CX_CY_NZ = "CX_CY_NZ", c.CX_NY_CZ = "CX_NY_CZ", c.CX_NY_NZ = "CX_NY_NZ", c.NX_PY_PZ = "NX_PY_PZ", c.NX_CY_PZ = "NX_CY_PZ", c.NX_NY_PZ = "NX_NY_PZ", c.NX_PY_CZ = "NX_PY_CZ", c.NX_PY_NZ = "NX_PY_NZ", c.NX_CY_CZ = "NX_CY_CZ", c.NX_CY_NZ = "NX_CY_NZ", c.NX_NY_CZ = "NX_NY_CZ", c.NX_NY_NZ = "NX_NY_NZ", c))(tx || {}); function pIe(c) { const e = {}, t = [], i = []; c.forEach((r) => { e[r.name] ? e[r.name]++ : e[r.name] = 0; }), Object.keys(e).forEach((r) => { e[r] ? t.push(r) : i.push(r); }), console.log("map: ", e), console.log("same: ", t), console.log("diff: ", i); } function _Ie(c) { return Object.keys(c).map((e) => [e, c[e]]); } function mIe(c) { console.log("x: ", c.x, "y: ", c.y, "z: ", c.z); } function lT(c, e = (t) => { }) { try { c && c(); } catch (t) { console.error(t), e(t); } } function y6e(c) { return c == null || c === ""; } function Kr(c) { return `${c || ""}${Ve.RandomId()}`; } function gIe() { return ((1 + Math.random()) * 65536 | 0).toString(16).substring(1); } function vIe(c, e) { for (let t = 0; t < e; t++) { const i = c.clone("clone", null); i.name = Kr(); const r = Math.random() >= 0.5, s = Math.random() >= 0.5, n = Math.random() * 200, a = Math.random() * 200, l = new D(r ? n : -n, 0, s ? a : -a); i.position = l; } } function AIe(c) { const e = {}; [...c.meshes, ...c.transformNodes].forEach((t) => { const i = t.name; typeof e[i] != "number" && (e[i] = 0), e[i]++; }), console.log(e); } const yIe = (c) => Object.prototype.toString.call(c) === "[object Number]", CIe = (c) => Object.prototype.toString.call(c) === "[object String]", xIe = (c) => Object.prototype.toString.call(c) === "[object Boolean]", Lq = (c) => Object.prototype.toString.call(c) === "[object Array]", qA = (c) => Object.prototype.toString.call(c) === "[object Object]", bIe = (c) => Object.prototype.toString.call(c) === "[object Function]", EIe = (c) => Object.prototype.toString.call(c) === "[object Undefined]", TIe = (c) => Object.prototype.toString.call(c) === "[object Null]", UA = (c, e) => { const t = { [Xo.Boolean]: "boolean", [Xo.Number]: "number", [Xo.String]: "string" }[e]; if (typeof c === t) return c; if (t === "boolean") return !!c; if (t === "number") { let i = Number(c); return !isNaN(i) || (i = parseFloat(c), !isNaN(i)) ? i : 0; } if (t === "string") return c == null ? "" : String(c); }; var C6e = typeof global == "object" && global && global.Object === Object && global; const uoe = C6e; var x6e = typeof self == "object" && self && self.Object === Object && self, b6e = uoe || x6e || Function("return this")(); const _6 = b6e; var E6e = _6.Symbol; const y5 = E6e; var hoe = Object.prototype, T6e = hoe.hasOwnProperty, S6e = hoe.toString, T9 = y5 ? y5.toStringTag : void 0; function M6e(c) { var e = T6e.call(c, T9), t = c[T9]; try { c[T9] = void 0; var i = !0; } catch { } var r = S6e.call(c); return i && (e ? c[T9] = t : delete c[T9]), r; } var R6e = Object.prototype, P6e = R6e.toString; function I6e(c) { return P6e.call(c); } var D6e = "[object Null]", O6e = "[object Undefined]", Nq = y5 ? y5.toStringTag : void 0; function ww(c) { return c == null ? c === void 0 ? O6e : D6e : Nq && Nq in Object(c) ? M6e(c) : I6e(c); } function Lw(c) { return c != null && typeof c == "object"; } var w6e = "[object Symbol]"; function doe(c) { return typeof c == "symbol" || Lw(c) && ww(c) == w6e; } function L6e(c, e) { for (var t = -1, i = c == null ? 0 : c.length, r = Array(i); ++t < i; ) r[t] = e(c[t], t, c); return r; } var N6e = Array.isArray; const tV = N6e; var F6e = 1 / 0, Fq = y5 ? y5.prototype : void 0, Bq = Fq ? Fq.toString : void 0; function foe(c) { if (typeof c == "string") return c; if (tV(c)) return L6e(c, foe) + ""; if (doe(c)) return Bq ? Bq.call(c) : ""; var e = c + ""; return e == "0" && 1 / c == -F6e ? "-0" : e; } var B6e = /\s/; function U6e(c) { for (var e = c.length; e-- && B6e.test(c.charAt(e)); ) ; return e; } var V6e = /^\s+/; function k6e(c) { return c && c.slice(0, U6e(c) + 1).replace(V6e, ""); } function NI(c) { var e = typeof c; return c != null && (e == "object" || e == "function"); } var Uq = NaN, z6e = /^[-+]0x[0-9a-f]+$/i, H6e = /^0b[01]+$/i, G6e = /^0o[0-7]+$/i, K6e = parseInt; function poe(c) { if (typeof c == "number") return c; if (doe(c)) return Uq; if (NI(c)) { var e = typeof c.valueOf == "function" ? c.valueOf() : c; c = NI(e) ? e + "" : e; } if (typeof c != "string") return c === 0 ? c : +c; c = k6e(c); var t = H6e.test(c); return t || G6e.test(c) ? K6e(c.slice(2), t ? 2 : 8) : z6e.test(c) ? Uq : +c; } var Vq = 1 / 0, W6e = 17976931348623157e292; function j6e(c) { if (!c) return c === 0 ? c : 0; if (c = poe(c), c === Vq || c === -Vq) { var e = c < 0 ? -1 : 1; return e * W6e; } return c === c ? c : 0; } function X6e(c) { var e = j6e(c), t = e % 1; return e === e ? t ? e - t : e : 0; } var Y6e = "[object AsyncFunction]", Q6e = "[object Function]", $6e = "[object GeneratorFunction]", Z6e = "[object Proxy]"; function _oe(c) { if (!NI(c)) return !1; var e = ww(c); return e == Q6e || e == $6e || e == Y6e || e == Z6e; } var q6e = _6["__core-js_shared__"]; const iz = q6e; var kq = function() { var c = /[^.]+$/.exec(iz && iz.keys && iz.keys.IE_PROTO || ""); return c ? "Symbol(src)_1." + c : ""; }(); function J6e(c) { return !!kq && kq in c; } var eye = Function.prototype, tye = eye.toString; function JI(c) { if (c != null) { try { return tye.call(c); } catch { } try { return c + ""; } catch { } } return ""; } var iye = /[\\^$.*+?()[\]{}|]/g, rye = /^\[object .+?Constructor\]$/, sye = Function.prototype, nye = Object.prototype, aye = sye.toString, oye = nye.hasOwnProperty, lye = RegExp( "^" + aye.call(oye).replace(iye, "\\$&").replace(/hasOwnProperty|(function).*?(?=\\\()| for .+?(?=\\\])/g, "$1.*?") + "$" ); function cye(c) { if (!NI(c) || J6e(c)) return !1; var e = _oe(c) ? lye : rye; return e.test(JI(c)); } function uye(c, e) { return c == null ? void 0 : c[e]; } function e7(c, e) { var t = uye(c, e); return cye(t) ? t : void 0; } var hye = e7(_6, "WeakMap"); const wG = hye; var zq = Object.create, dye = /* @__PURE__ */ function() { function c() { } return function(e) { if (!NI(e)) return {}; if (zq) return zq(e); c.prototype = e; var t = new c(); return c.prototype = void 0, t; }; }(); const fye = dye; function pye(c, e) { var t = -1, i = c.length; for (e || (e = Array(i)); ++t < i; ) e[t] = c[t]; return e; } var _ye = function() { try { var c = e7(Object, "defineProperty"); return c({}, "", {}), c; } catch { } }(); const Hq = _ye; function mye(c, e) { for (var t = -1, i = c == null ? 0 : c.length; ++t < i && e(c[t], t, c) !== !1; ) ; return c; } var gye = 9007199254740991, vye = /^(?:0|[1-9]\d*)$/; function Aye(c, e) { var t = typeof c; return e = e ?? gye, !!e && (t == "number" || t != "symbol" && vye.test(c)) && c > -1 && c % 1 == 0 && c < e; } function moe(c, e, t) { e == "__proto__" && Hq ? Hq(c, e, { configurable: !0, enumerable: !0, value: t, writable: !0 }) : c[e] = t; } function goe(c, e) { return c === e || c !== c && e !== e; } var yye = Object.prototype, Cye = yye.hasOwnProperty; function voe(c, e, t) { var i = c[e]; (!(Cye.call(c, e) && goe(i, t)) || t === void 0 && !(e in c)) && moe(c, e, t); } function iV(c, e, t, i) { var r = !t; t || (t = {}); for (var s = -1, n = e.length; ++s < n; ) { var a = e[s], l = i ? i(t[a], c[a], a, t, c) : void 0; l === void 0 && (l = c[a]), r ? moe(t, a, l) : voe(t, a, l); } return t; } var xye = 9007199254740991; function Aoe(c) { return typeof c == "number" && c > -1 && c % 1 == 0 && c <= xye; } function yoe(c) { return c != null && Aoe(c.length) && !_oe(c); } var bye = Object.prototype; function xj(c) { var e = c && c.constructor, t = typeof e == "function" && e.prototype || bye; return c === t; } function Eye(c, e) { for (var t = -1, i = Array(c); ++t < c; ) i[t] = e(t); return i; } var Tye = "[object Arguments]"; function Gq(c) { return Lw(c) && ww(c) == Tye; } var Coe = Object.prototype, Sye = Coe.hasOwnProperty, Mye = Coe.propertyIsEnumerable, Rye = Gq(/* @__PURE__ */ function() { return arguments; }()) ? Gq : function(c) { return Lw(c) && Sye.call(c, "callee") && !Mye.call(c, "callee"); }; const Pye = Rye; function Iye() { return !1; } var xoe = typeof exports == "object" && exports && !exports.nodeType && exports, Kq = xoe && typeof module == "object" && module && !module.nodeType && module, Dye = Kq && Kq.exports === xoe, Wq = Dye ? _6.Buffer : void 0, Oye = Wq ? Wq.isBuffer : void 0, wye = Oye || Iye; const boe = wye; var Lye = "[object Arguments]", Nye = "[object Array]", Fye = "[object Boolean]", Bye = "[object Date]", Uye = "[object Error]", Vye = "[object Function]", kye = "[object Map]", zye = "[object Number]", Hye = "[object Object]", Gye = "[object RegExp]", Kye = "[object Set]", Wye = "[object String]", jye = "[object WeakMap]", Xye = "[object ArrayBuffer]", Yye = "[object DataView]", Qye = "[object Float32Array]", $ye = "[object Float64Array]", Zye = "[object Int8Array]", qye = "[object Int16Array]", Jye = "[object Int32Array]", eCe = "[object Uint8Array]", tCe = "[object Uint8ClampedArray]", iCe = "[object Uint16Array]", rCe = "[object Uint32Array]", Bc = {}; Bc[Qye] = Bc[$ye] = Bc[Zye] = Bc[qye] = Bc[Jye] = Bc[eCe] = Bc[tCe] = Bc[iCe] = Bc[rCe] = !0; Bc[Lye] = Bc[Nye] = Bc[Xye] = Bc[Fye] = Bc[Yye] = Bc[Bye] = Bc[Uye] = Bc[Vye] = Bc[kye] = Bc[zye] = Bc[Hye] = Bc[Gye] = Bc[Kye] = Bc[Wye] = Bc[jye] = !1; function sCe(c) { return Lw(c) && Aoe(c.length) && !!Bc[ww(c)]; } function bj(c) { return function(e) { return c(e); }; } var Eoe = typeof exports == "object" && exports && !exports.nodeType && exports, Q9 = Eoe && typeof module == "object" && module && !module.nodeType && module, nCe = Q9 && Q9.exports === Eoe, rz = nCe && uoe.process, aCe = function() { try { var c = Q9 && Q9.require && Q9.require("util").types; return c || rz && rz.binding && rz.binding("util"); } catch { } }(); const nw = aCe; var jq = nw && nw.isTypedArray, oCe = jq ? bj(jq) : sCe; const lCe = oCe; var cCe = Object.prototype, uCe = cCe.hasOwnProperty; function Toe(c, e) { var t = tV(c), i = !t && Pye(c), r = !t && !i && boe(c), s = !t && !i && !r && lCe(c), n = t || i || r || s, a = n ? Eye(c.length, String) : [], l = a.length; for (var o in c) (e || uCe.call(c, o)) && !(n && // Safari 9 has enumerable `arguments.length` in strict mode. (o == "length" || // Node.js 0.10 has enumerable non-index properties on buffers. r && (o == "offset" || o == "parent") || // PhantomJS 2 has enumerable non-index properties on typed arrays. s && (o == "buffer" || o == "byteLength" || o == "byteOffset") || // Skip index properties. Aye(o, l))) && a.push(o); return a; } function Soe(c, e) { return function(t) { return c(e(t)); }; } var hCe = Soe(Object.keys, Object); const dCe = hCe; var fCe = Object.prototype, pCe = fCe.hasOwnProperty; function _Ce(c) { if (!xj(c)) return dCe(c); var e = []; for (var t in Object(c)) pCe.call(c, t) && t != "constructor" && e.push(t); return e; } function Ej(c) { return yoe(c) ? Toe(c) : _Ce(c); } function mCe(c) { var e = []; if (c != null) for (var t in Object(c)) e.push(t); return e; } var gCe = Object.prototype, vCe = gCe.hasOwnProperty; function ACe(c) { if (!NI(c)) return mCe(c); var e = xj(c), t = []; for (var i in c) i == "constructor" && (e || !vCe.call(c, i)) || t.push(i); return t; } function Tj(c) { return yoe(c) ? Toe(c, !0) : ACe(c); } var yCe = e7(Object, "create"); const TL = yCe; function CCe() { this.__data__ = TL ? TL(null) : {}, this.size = 0; } function xCe(c) { var e = this.has(c) && delete this.__data__[c]; return this.size -= e ? 1 : 0, e; } var bCe = "__lodash_hash_undefined__", ECe = Object.prototype, TCe = ECe.hasOwnProperty; function SCe(c) { var e = this.__data__; if (TL) { var t = e[c]; return t === bCe ? void 0 : t; } return TCe.call(e, c) ? e[c] : void 0; } var MCe = Object.prototype, RCe = MCe.hasOwnProperty; function PCe(c) { var e = this.__data__; return TL ? e[c] !== void 0 : RCe.call(e, c); } var ICe = "__lodash_hash_undefined__"; function DCe(c, e) { var t = this.__data__; return this.size += this.has(c) ? 0 : 1, t[c] = TL && e === void 0 ? ICe : e, this; } function FI(c) { var e = -1, t = c == null ? 0 : c.length; for (this.clear(); ++e < t; ) { var i = c[e]; this.set(i[0], i[1]); } } FI.prototype.clear = CCe; FI.prototype.delete = xCe; FI.prototype.get = SCe; FI.prototype.has = PCe; FI.prototype.set = DCe; function OCe() { this.__data__ = [], this.size = 0; } function rV(c, e) { for (var t = c.length; t--; ) if (goe(c[t][0], e)) return t; return -1; } var wCe = Array.prototype, LCe = wCe.splice; function NCe(c) { var e = this.__data__, t = rV(e, c); if (t < 0) return !1; var i = e.length - 1; return t == i ? e.pop() : LCe.call(e, t, 1), --this.size, !0; } function FCe(c) { var e = this.__data__, t = rV(e, c); return t < 0 ? void 0 : e[t][1]; } function BCe(c) { return rV(this.__data__, c) > -1; } function UCe(c, e) { var t = this.__data__, i = rV(t, c); return i < 0 ? (++this.size, t.push([c, e])) : t[i][1] = e, this; } function RT(c) { var e = -1, t = c == null ? 0 : c.length; for (this.clear(); ++e < t; ) { var i = c[e]; this.set(i[0], i[1]); } } RT.prototype.clear = OCe; RT.prototype.delete = NCe; RT.prototype.get = FCe; RT.prototype.has = BCe; RT.prototype.set = UCe; var VCe = e7(_6, "Map"); const SL = VCe; function kCe() { this.size = 0, this.__data__ = { hash: new FI(), map: new (SL || RT)(), string: new FI() }; } function zCe(c) { var e = typeof c; return e == "string" || e == "number" || e == "symbol" || e == "boolean" ? c !== "__proto__" : c === null; } function sV(c, e) { var t = c.__data__; return zCe(e) ? t[typeof e == "string" ? "string" : "hash"] : t.map; } function HCe(c) { var e = sV(this, c).delete(c); return this.size -= e ? 1 : 0, e; } function GCe(c) { return sV(this, c).get(c); } function KCe(c) { return sV(this, c).has(c); } function WCe(c, e) { var t = sV(this, c), i = t.size; return t.set(c, e), this.size += t.size == i ? 0 : 1, this; } function Nw(c) { var e = -1, t = c == null ? 0 : c.length; for (this.clear(); ++e < t; ) { var i = c[e]; this.set(i[0], i[1]); } } Nw.prototype.clear = kCe; Nw.prototype.delete = HCe; Nw.prototype.get = GCe; Nw.prototype.has = KCe; Nw.prototype.set = WCe; function Xq(c) { return c == null ? "" : foe(c); } function Moe(c, e) { for (var t = -1, i = e.length, r = c.length; ++t < i; ) c[r + t] = e[t]; return c; } var jCe = Soe(Object.getPrototypeOf, Object); const Roe = jCe; var XCe = _6.isFinite, YCe = Math.min; function QCe(c) { var e = Math[c]; return function(t, i) { if (t = poe(t), i = i == null ? 0 : YCe(X6e(i), 292), i && XCe(t)) { var r = (Xq(t) + "e").split("e"), s = e(r[0] + "e" + (+r[1] + i)); return r = (Xq(s) + "e").split("e"), +(r[0] + "e" + (+r[1] - i)); } return e(t); }; } function $Ce() { this.__data__ = new RT(), this.size = 0; } function ZCe(c) { var e = this.__data__, t = e.delete(c); return this.size = e.size, t; } function qCe(c) { return this.__data__.get(c); } function JCe(c) { return this.__data__.has(c); } var exe = 200; function txe(c, e) { var t = this.__data__; if (t instanceof RT) { var i = t.__data__; if (!SL || i.length < exe - 1) return i.push([c, e]), this.size = ++t.size, this; t = this.__data__ = new Nw(i); } return t.set(c, e), this.size = t.size, this; } function Fw(c) { var e = this.__data__ = new RT(c); this.size = e.size; } Fw.prototype.clear = $Ce; Fw.prototype.delete = ZCe; Fw.prototype.get = qCe; Fw.prototype.has = JCe; Fw.prototype.set = txe; function ixe(c, e) { return c && iV(e, Ej(e), c); } function rxe(c, e) { return c && iV(e, Tj(e), c); } var Poe = typeof exports == "object" && exports && !exports.nodeType && exports, Yq = Poe && typeof module == "object" && module && !module.nodeType && module, sxe = Yq && Yq.exports === Poe, Qq = sxe ? _6.Buffer : void 0, $q = Qq ? Qq.allocUnsafe : void 0; function nxe(c, e) { if (e) return c.slice(); var t = c.length, i = $q ? $q(t) : new c.constructor(t); return c.copy(i), i; } function axe(c, e) { for (var t = -1, i = c == null ? 0 : c.length, r = 0, s = []; ++t < i; ) { var n = c[t]; e(n, t, c) && (s[r++] = n); } return s; } function Ioe() { return []; } var oxe = Object.prototype, lxe = oxe.propertyIsEnumerable, Zq = Object.getOwnPropertySymbols, cxe = Zq ? function(c) { return c == null ? [] : (c = Object(c), axe(Zq(c), function(e) { return lxe.call(c, e); })); } : Ioe; const Sj = cxe; function uxe(c, e) { return iV(c, Sj(c), e); } var hxe = Object.getOwnPropertySymbols, dxe = hxe ? function(c) { for (var e = []; c; ) Moe(e, Sj(c)), c = Roe(c); return e; } : Ioe; const Doe = dxe; function fxe(c, e) { return iV(c, Doe(c), e); } function Ooe(c, e, t) { var i = e(c); return tV(c) ? i : Moe(i, t(c)); } function pxe(c) { return Ooe(c, Ej, Sj); } function _xe(c) { return Ooe(c, Tj, Doe); } var mxe = e7(_6, "DataView"); const LG = mxe; var gxe = e7(_6, "Promise"); const NG = gxe; var vxe = e7(_6, "Set"); const FG = vxe; var qq = "[object Map]", Axe = "[object Object]", Jq = "[object Promise]", eJ = "[object Set]", tJ = "[object WeakMap]", iJ = "[object DataView]", yxe = JI(LG), Cxe = JI(SL), xxe = JI(NG), bxe = JI(FG), Exe = JI(wG), zR = ww; (LG && zR(new LG(new ArrayBuffer(1))) != iJ || SL && zR(new SL()) != qq || NG && zR(NG.resolve()) != Jq || FG && zR(new FG()) != eJ || wG && zR(new wG()) != tJ) && (zR = function(c) { var e = ww(c), t = e == Axe ? c.constructor : void 0, i = t ? JI(t) : ""; if (i) switch (i) { case yxe: return iJ; case Cxe: return qq; case xxe: return Jq; case bxe: return eJ; case Exe: return tJ; } return e; }); const Mj = zR; var Txe = Object.prototype, Sxe = Txe.hasOwnProperty; function Mxe(c) { var e = c.length, t = new c.constructor(e); return e && typeof c[0] == "string" && Sxe.call(c, "index") && (t.index = c.index, t.input = c.input), t; } var Rxe = _6.Uint8Array; const rJ = Rxe; function Rj(c) { var e = new c.constructor(c.byteLength); return new rJ(e).set(new rJ(c)), e; } function Pxe(c, e) { var t = e ? Rj(c.buffer) : c.buffer; return new c.constructor(t, c.byteOffset, c.byteLength); } var Ixe = /\w*$/; function Dxe(c) { var e = new c.constructor(c.source, Ixe.exec(c)); return e.lastIndex = c.lastIndex, e; } var sJ = y5 ? y5.prototype : void 0, nJ = sJ ? sJ.valueOf : void 0; function Oxe(c) { return nJ ? Object(nJ.call(c)) : {}; } function wxe(c, e) { var t = e ? Rj(c.buffer) : c.buffer; return new c.constructor(t, c.byteOffset, c.length); } var Lxe = "[object Boolean]", Nxe = "[object Date]", Fxe = "[object Map]", Bxe = "[object Number]", Uxe = "[object RegExp]", Vxe = "[object Set]", kxe = "[object String]", zxe = "[object Symbol]", Hxe = "[object ArrayBuffer]", Gxe = "[object DataView]", Kxe = "[object Float32Array]", Wxe = "[object Float64Array]", jxe = "[object Int8Array]", Xxe = "[object Int16Array]", Yxe = "[object Int32Array]", Qxe = "[object Uint8Array]", $xe = "[object Uint8ClampedArray]", Zxe = "[object Uint16Array]", qxe = "[object Uint32Array]"; function Jxe(c, e, t) { var i = c.constructor; switch (e) { case Hxe: return Rj(c); case Lxe: case Nxe: return new i(+c); case Gxe: return Pxe(c, t); case Kxe: case Wxe: case jxe: case Xxe: case Yxe: case Qxe: case $xe: case Zxe: case qxe: return wxe(c, t); case Fxe: return new i(); case Bxe: case kxe: return new i(c); case Uxe: return Dxe(c); case Vxe: return new i(); case zxe: return Oxe(c); } } function ebe(c) { return typeof c.constructor == "function" && !xj(c) ? fye(Roe(c)) : {}; } var tbe = "[object Map]"; function ibe(c) { return Lw(c) && Mj(c) == tbe; } var aJ = nw && nw.isMap, rbe = aJ ? bj(aJ) : ibe; const sbe = rbe; var nbe = "[object Set]"; function abe(c) { return Lw(c) && Mj(c) == nbe; } var oJ = nw && nw.isSet, obe = oJ ? bj(oJ) : abe; const lbe = obe; var cbe = 1, ube = 2, hbe = 4, woe = "[object Arguments]", dbe = "[object Array]", fbe = "[object Boolean]", pbe = "[object Date]", _be = "[object Error]", Loe = "[object Function]", mbe = "[object GeneratorFunction]", gbe = "[object Map]", vbe = "[object Number]", Noe = "[object Object]", Abe = "[object RegExp]", ybe = "[object Set]", Cbe = "[object String]", xbe = "[object Symbol]", bbe = "[object WeakMap]", Ebe = "[object ArrayBuffer]", Tbe = "[object DataView]", Sbe = "[object Float32Array]", Mbe = "[object Float64Array]", Rbe = "[object Int8Array]", Pbe = "[object Int16Array]", Ibe = "[object Int32Array]", Dbe = "[object Uint8Array]", Obe = "[object Uint8ClampedArray]", wbe = "[object Uint16Array]", Lbe = "[object Uint32Array]", vc = {}; vc[woe] = vc[dbe] = vc[Ebe] = vc[Tbe] = vc[fbe] = vc[pbe] = vc[Sbe] = vc[Mbe] = vc[Rbe] = vc[Pbe] = vc[Ibe] = vc[gbe] = vc[vbe] = vc[Noe] = vc[Abe] = vc[ybe] = vc[Cbe] = vc[xbe] = vc[Dbe] = vc[Obe] = vc[wbe] = vc[Lbe] = !0; vc[_be] = vc[Loe] = vc[bbe] = !1; function qF(c, e, t, i, r, s) { var n, a = e & cbe, l = e & ube, o = e & hbe; if (t && (n = r ? t(c, i, r, s) : t(c)), n !== void 0) return n; if (!NI(c)) return c; var u = tV(c); if (u) { if (n = Mxe(c), !a) return pye(c, n); } else { var h = Mj(c), d = h == Loe || h == mbe; if (boe(c)) return nxe(c, a); if (h == Noe || h == woe || d && !r) { if (n = l || d ? {} : ebe(c), !a) return l ? fxe(c, rxe(n, c)) : uxe(c, ixe(n, c)); } else { if (!vc[h]) return r ? c : {}; n = Jxe(c, h, a); } } s || (s = new Fw()); var f = s.get(c); if (f) return f; s.set(c, n), lbe(c) ? c.forEach(function(_) { n.add(qF(_, e, t, _, c, s)); }) : sbe(c) && c.forEach(function(_, v) { n.set(v, qF(_, e, t, v, c, s)); }); var p = o ? l ? _xe : pxe : l ? Tj : Ej, m = u ? void 0 : p(c); return mye(m || c, function(_, v) { m && (v = _, _ = c[v]), voe(n, v, qF(_, e, t, v, c, s)); }), n; } var Nbe = 1, Fbe = 4; function Gu(c) { return qF(c, Nbe | Fbe); } var Bbe = QCe("round"); const Ube = Bbe; function SIe(c, e, t) { const i = c.getScene(); let r = [], s = c.material, n = i.getMaterialById("le5le_alarmMat"); c.material instanceof Dt || c.material instanceof Ri ? c.material instanceof Dt ? r = c.material.diffuseColor.asArray() : r = c.material.albedoColor.asArray() : n || (n = new Dt("le5le_alarmMat", i), c.material = n, r = n.diffuseColor.asArray()); const a = O1(e); let l; s instanceof Ri ? l = new nt( "alarm", "albedoColor", 60, nt.ANIMATIONTYPE_COLOR3, nt.ANIMATIONLOOPMODE_CYCLE ) : l = new nt( "alarm", "diffuseColor", 60, nt.ANIMATIONTYPE_COLOR3, nt.ANIMATIONLOOPMODE_CYCLE ); const o = []; o.push({ frame: 0, value: ze.FromArray(r) }), o.push({ frame: t / 2, value: a }), o.push({ frame: t, value: ze.FromArray(r) }), l.setKeys(o); const u = i.beginDirectAnimation( c.material, [l], 0, t, !0 ); return u.disposeOnEnd = !0, u.onAnimationEndObservable.add(() => { }), u.onAnimationLoopObservable.add(() => { u.stop(); }), u; } class MIe { constructor(e, t = []) { this.materials = /* @__PURE__ */ new Map(), this.addMaterial = (i) => { i.id = i.id || Kr(); const { type: r, id: s } = i; return this.materials.get(r) || this.materials.set(r, /* @__PURE__ */ new Map()), this.materials.get(r).set(s, i), i; }, this.getMaterial = (i) => { for (const [, r] of this.materials) if (r.has(i)) return r.get(i); }, this.data = () => Gu(this.allMaterials()), this.matDispose = (i, r) => { let s = r.getMaterialByName(i); s && (s.diffuseColor && (s.diffuseColor = null), s.diffuseTexture && s.diffuseTexture.dispose(), s.emissiveColor && (s.diffuseColor = null), s.emissiveTexture && s.emissiveTexture.dispose(), s.diffuseTexture && s.diffuseTexture.name === "video" && s.diffuseTexture.video.pause(), s.dispose()); }, this.dispose = () => { }, this.root = e, this.materials.set("image", /* @__PURE__ */ new Map()), this.materials.set("video", /* @__PURE__ */ new Map()), t.forEach(([i, r]) => { this.materials.get(i) || this.materials.set(i, /* @__PURE__ */ new Map()); const s = this.materials.get(i); r.forEach((n) => { const { id: a } = n; s.set(a, n); }); }); } //删除某一条材质信息 removeMaterial(e) { for (const [, t] of this.materials) if (t.get(e)) return this.root.getAllScenes().forEach((i) => { this.matDispose(e, i.scene); }), t.delete(e); } //----------- updateMaterial(e) { this.materials.forEach((t) => { t.get(e.id) && (t.delete(e.id), this.addMaterial(e), this.root.getAllScenes().forEach((i) => { let r = i.scene.getMaterialByName(e.id); if (r) { let s = e.type === "image" ? this.createMatImage(e, i.scene) : this.createMatVideo(e, i.scene); i.scene.meshes.forEach((n) => { n.material && n.material.name && n.material.name === r.name && (n.material = s); }), r.dispose(); } })); }); } //获取this.materials列表的信息; allMaterials() { return [...this.materials].map(([e, t]) => [ e, [...t].map(([, i]) => i) ]); } //将材质绑定在Mesh上 bindMaterialToMesh(e, t) { if (e && e.length > 0) { let i = e, r = i[0].getScene(), s = r.getMaterialByName(t); if (s) i.forEach((n) => { n.material = null, n.material = s; }); else { let n = this.getMaterial(t), a = n.type === "image" ? this.createMatImage(n, r) : this.createMatVideo(n, r); i.forEach((l) => { l.material = null, l.material = a; }); } } } //将Mat解绑 removeMatToMesh(e) { let t = null; if (e && e.length > 0) { e.forEach((s) => { t = s.material, s.material = null; }); let r = !0; this.root.getAllScenes().forEach((s) => { let n = s.scene; n.meshes.forEach((a) => { t && a.material && a.material.name && t.name && a.material.name === t.name && (r = !1); }), r && t && t.name && this.matDispose(t.name, n); }); } } //创建一个图片贴图类型的材质; createMatImage(e, t) { let i = new Dt("standardMaterial", t); return i.name = e.id, this.matImage(i, e, t), i; } /** * 根据前端UI的交互传过来的materialData修改material的属性(img); * @param mat StandardMaterial 材质实例对象; * @param data MaterialData 信息 * @param scene Scene 当前场景 */ matImage(e, t, i) { t.diffuseColor && (e.diffuseColor = O1(t.diffuseColor)), t.emissiveColor && (e.emissiveColor = O1(t.emissiveColor)), t.type === "image" && (t.diffuseTexture && (e.diffuseTexture = new De(t.diffuseTexture, i)), t.emissiveTexture && (e.emissiveTexture = new De(t.emissiveTexture, i))); } /** * 创建一个视频贴图类的材质 * @param data 属性数据 * @param scene 当前的场景 * @returns 实例化材质 */ createMatVideo(e, t) { let i = new Dt("standardMaterial", t); return this.matVideo(i, e, t), i.name = e.id, i; } /** * 根据前端UI的交互传过来的materialData修改material的属性(video); * @param videoMat 材质实例对象; * @param data 信息 * @param scene 当前场景 */ matVideo(e, t, i) { t.diffuseVideo && (e.diffuseTexture = new Cm( "video", t.diffuseVideo, i )); let r = e.diffuseTexture; r.video.autoplay = !1, r.coordinatesIndex = 0, e.emissiveColor = ze.White(); } /** * 修改实例材质信息 * @param mat 实例材质 * @param data 属性数据 */ chargeMaterialProp(e, t) { this.root.getAllScenes().forEach((i) => { let r = i.scene; t.type == "image" ? (e.diffuseColor && (e.diffuseColor = null), e.diffuseTexture && e.diffuseTexture.dispose(), e.emissiveColor && (e.diffuseColor = null), e.emissiveTexture && e.emissiveTexture.dispose(), this.matImage(e, t, r)) : (e.diffuseTexture && e.diffuseTexture.dispose(), this.matVideo(e, t, r)); }); } } var Pp = /* @__PURE__ */ ((c) => (c.Grad = "Grad", c.OutLine = "OutLine", c))(Pp || {}); function RIe(c, e, t) { const i = c.getScene(); Cr.ShadersStore[c.name + "VertexShader"] = `\r precision highp float;\r //attribute\r attribute vec3 position;\r attribute vec2 uv;\r attribute vec3 normal;\r //uniform\r uniform mat4 worldViewProjection;\r //varying\r varying vec3 vPosition;\r varying vec2 vUV;\r void main(void) {\r gl_Position = worldViewProjection*vec4(position,1.0);\r vPosition = position;\r vUV = uv;\r }\r `, Cr.ShadersStore[c.name + "ragmentShader"] = `\r precision highp float;\r varying vec3 vPosition;\r uniform float distance;\r uniform vec3 changeColor;\r uniform vec3 center;\r uniform float pow;\r float distanceTwoVec3(vec3 p1,vec3 p2){\r float d = sqrt((p1.x-p2.x)*(p1.x-p2.x)+(p1.y-p2.y)*(p1.y-p2.y)+(p1.z-p2.z)*(p1.z-p2.z));\r return d;\r }\r void main(void) {\r float alpha = distanceTwoVec3(center,vPosition)*0.8/distance;\r gl_FragColor = vec4(changeColor,pow(alpha,pow);\r }\r `; let r = new Lo( c.name, i, { vertex: "custom", fragment: "custom" }, { attributes: ["normals", "position", "uv"], uniforms: [ "world", "worldView", "worldViewProjection", "view", "projection" ], needAlphaBlending: !0, needAlphaTesting: !0 } ); r.name = "shader" + c.name; const s = ze.FromHexString(e), n = t || 2, a = c.getBoundingInfo().boundingBox.center, l = c.getBoundingInfo().boundingBox.maximum, o = D.Distance(a, l); r.setFloat("distance", o), r.setColor3("changeColor", s), r.setVector3("center", a), r.setFloat("pow", n); const u = []; return u.push({ name: "pow", unifrom: n }), u.push({ name: "changeColor", unifrom: s }), { mat: r, rtList: u }; } function Vbe(c, e) { Cr.ShadersStore[c + "VertexShader"] = `\r precision highp float;\r //attribute\r attribute vec3 position;\r attribute vec2 uv;\r attribute vec3 normal;\r //uniform\r uniform mat4 worldViewProjection;\r //varying\r varying vec3 vPosition;\r varying vec2 vUV;\r varying vec3 vNormal;\r void main(void) {\r gl_Position = worldViewProjection*vec4(position,1.0);\r vPosition = position;\r vUV = uv;\r vNormal = normal;\r }\r `, Cr.ShadersStore[c + "FragmentShader"] = `\r precision highp float;\r varying vec3 vPosition;\r varying vec3 vNormal;\r uniform float distance;\r uniform vec3 changeColor;\r uniform vec3 center;\r uniform float power;\r uniform float k;\r uniform vec3 viewPoint;\r float distanceTwoVec3(vec3 p1,vec3 p2){\r float d = sqrt((p1.x-p2.x)*(p1.x-p2.x)+(p1.y-p2.y)*(p1.y-p2.y)+(p1.z-p2.z)*(p1.z-p2.z));\r return d;\r }\r void main(void) {\r float modelDot = abs(dot(viewPoint,vNormal)/(length(viewPoint)*length(vNormal)));\r float alpha = distanceTwoVec3(center,vPosition)*k /distance;\r vec3 color = changeColor;\r alpha = modelDot * pow(alpha,power);\r gl_FragColor = vec4(color,alpha);\r }\r `; let t = new Lo( "effectGlShader" + c, e, { vertex: c, fragment: c }, { attributes: ["normal", "position", "uv"], uniforms: [ "world", "worldView", "worldViewProjection", "view", "projection" ], needAlphaBlending: !0, needAlphaTesting: !0 } ); return t.onBind = (i) => { const r = i.getBoundingInfo().boundingBox, s = D.Normalize(r.centerWorld), n = D.Distance(r.centerWorld, r.maximumWorld); t.setFloat("distance", n), t.setVector3("center", s), t.setVector3("viewPoint", D.Normalize(i.getScene().activeCamera.position)), t.onBind = null; }, t.setFloat("distance", 20), t.setVector3("center", new D(10, 10 * Math.random(), 10)), t.setVector3("viewPoint", new D(10, 10 * Math.random(), 10)), t; } function kbe(c, e) { return Cr.ShadersStore["gradient" + c + "VertexShader"] = `\r precision highp float;\r //attribute\r attribute vec3 position;\r attribute vec2 uv;\r attribute vec3 normal;\r //uniform\r uniform mat4 worldViewProjection;\r //varying\r varying vec3 vPosition;\r varying vec2 vUV;\r varying vec3 vNormal;\r void main(void) {\r gl_Position = worldViewProjection*vec4(position,1.0);\r vPosition = position;\r vUV = uv;\r vNormal = normal;\r }\r `, Cr.ShadersStore["gradient" + c + "FragmentShader"] = `\r precision highp float;\r varying vec3 vPosition;\r float distanceTwoVec3(vec3 p1,vec3 p2){\r float d = sqrt((p1.x-p2.x)*(p1.x-p2.x)+(p1.y-p2.y)*(p1.y-p2.y)+(p1.z-p2.z)*(p1.z-p2.z));\r return d;\r }\r uniform vec3 mainColor;\r uniform float maxHeight;\r uniform float minHeight;\r uniform float alphaK;\r void main(void) {\r float height = maxHeight - minHeight;\r vec3 color = mainColor;\r float alpha = (vPosition.y-minHeight)/height;\r gl_FragColor = vec4(color,alphaK+pow(alpha,0.5)*alpha*(1.0-alphaK));\r }\r `, new Lo( "effectGradAlpha" + c, e, { vertex: "gradient" + c, fragment: "gradient" + c }, { attributes: ["normal", "position", "uv"], uniforms: [ "world", "worldView", "worldViewProjection", "view", "projection" ], needAlphaBlending: !0, needAlphaTesting: !0 } ); } var Ys = typeof globalThis < "u" ? globalThis : typeof window < "u" ? window : typeof global < "u" ? global : typeof self < "u" ? self : {}; function Foe(c) { return c && c.__esModule && Object.prototype.hasOwnProperty.call(c, "default") ? c.default : c; } function Boe(c) { if (c.__esModule) return c; var e = c.default; if (typeof e == "function") { var t = function i() { return this instanceof i ? Reflect.construct(e, arguments, this.constructor) : e.apply(this, arguments); }; t.prototype = e.prototype; } else t = {}; return Object.defineProperty(t, "__esModule", { value: !0 }), Object.keys(c).forEach(function(i) { var r = Object.getOwnPropertyDescriptor(c, i); Object.defineProperty(t, i, r.get ? r : { enumerable: !0, get: function() { return c[i]; } }); }), t; } var Uoe = { exports: {} }; const zbe = /* @__PURE__ */ Boe(iAe); (function(c, e) { (function(i, r) { c.exports = r(zbe); })(typeof self < "u" ? self : Ys, (t) => ( /******/ (() => { var i = { /***/ "../../../../node_modules/css-loader/dist/cjs.js??ruleSet[1].rules[3].use[1]!../../../../node_modules/sass-loader/dist/cjs.js??ruleSet[1].rules[3].use[2]!../../../dev/sharedUiComponents/dist/colorPicker/colorPicker.scss": ( /*!**********************************************************************************************************************************************************************************************************************************!*\ !*** ../../../../node_modules/css-loader/dist/cjs.js??ruleSet[1].rules[3].use[1]!../../../../node_modules/sass-loader/dist/cjs.js??ruleSet[1].rules[3].use[2]!../../../dev/sharedUiComponents/dist/colorPicker/colorPicker.scss ***! \**********************************************************************************************************************************************************************************************************************************/ /***/ (a, l, o) => { o.r(l), o.d(l, { /* harmony export */ default: () => m /* harmony export */ }); var u = o( /*! ../../../../../node_modules/css-loader/dist/runtime/sourceMaps.js */ "../../../../node_modules/css-loader/dist/runtime/sourceMaps.js" ), h = /* @__PURE__ */ o.n(u), d = o( /*! ../../../../../node_modules/css-loader/dist/runtime/api.js */ "../../../../node_modules/css-loader/dist/runtime/api.js" ), f = /* @__PURE__ */ o.n(d), p = f()(h()); p.push([a.id, `.color-picker-container { width: 320px; height: 300px; background-color: white; display: grid; grid-template-columns: 100%; grid-template-rows: 50% 50px 60px 40px 1fr auto; font-family: "acumin-pro-condensed"; font-weight: normal; font-size: 14px; } .color-picker-container.with-hints { height: 380px; } .color-picker-container .color-picker-saturation { grid-row: 1; grid-column: 1; display: grid; grid-template-columns: 100%; grid-template-rows: 100%; position: relative; cursor: pointer; } .color-picker-container .color-picker-saturation .color-picker-saturation-white { grid-row: 1; grid-column: 1; background: -webkit-linear-gradient(to right, #fff, rgba(255, 255, 255, 0)); background: linear-gradient(to right, #fff, rgba(255, 255, 255, 0)); } .color-picker-container .color-picker-saturation .color-picker-saturation-black { grid-row: 1; grid-column: 1; background: -webkit-linear-gradient(to top, #000, rgba(0, 0, 0, 0)); background: linear-gradient(to top, #000, rgba(0, 0, 0, 0)); } .color-picker-container .color-picker-saturation .color-picker-saturation-cursor { pointer-events: none; width: 4px; height: 4px; box-shadow: 0 0 0 1.5px #fff, inset 0 0 1px 1px rgba(0, 0, 0, 0.3), 0 0 1px 2px rgba(0, 0, 0, 0.4); border-radius: 50%; transform: translate(-2px, -2px); position: absolute; } .color-picker-container .color-picker-hue { grid-row: 2; grid-column: 1; display: grid; margin: 10px; grid-template-columns: 24% 76%; grid-template-rows: 100%; } .color-picker-container .color-picker-hue .color-picker-hue-color { grid-row: 1; grid-column: 1; align-self: center; justify-self: center; width: 30px; height: 30px; border-radius: 15px; border: 1px solid black; } .color-picker-container .color-picker-hue .color-picker-hue-slider { grid-row: 1; grid-column: 2; align-self: center; height: 16px; position: relative; cursor: pointer; background: linear-gradient(to right, #f00 0%, #ff0 17%, #0f0 33%, #0ff 50%, #00f 67%, #f0f 83%, #f00 100%); background: -webkit-linear-gradient(to right, #f00 0%, #ff0 17%, #0f0 33%, #0ff 50%, #00f 67%, #f0f 83%, #f00 100%); } .color-picker-container .color-picker-hue .color-picker-hue-slider .color-picker-hue-cursor { pointer-events: none; width: 8px; height: 18px; transform: translate(-4px, -2px); background-color: rgb(248, 248, 248); box-shadow: 0 1px 4px 0 rgba(0, 0, 0, 0.37); position: absolute; } .color-picker-container .color-picker-component { display: grid; margin: 5px; grid-template-columns: 100%; grid-template-rows: 50% 50%; } .color-picker-container .color-picker-component .color-picker-component-value { justify-self: center; align-self: center; grid-row: 1; grid-column: 1; margin-bottom: 4px; } .color-picker-container .color-picker-component .color-picker-component-value input { width: 50px; } .color-picker-container .color-picker-component .color-picker-component-label { justify-self: center; align-self: center; grid-row: 2; grid-column: 1; color: black; } .color-picker-container .color-picker-rgb { grid-row: 3; grid-column: 1; display: grid; margin: 10px; grid-template-columns: 20% 6.66% 20% 6.66% 20% 6.66% 20%; grid-template-rows: 100%; } .color-picker-container .red { grid-row: 1; grid-column: 1; } .color-picker-container .green { grid-row: 1; grid-column: 3; } .color-picker-container .blue { grid-row: 1; grid-column: 5; } .color-picker-container .alpha { grid-row: 1; grid-column: 7; } .color-picker-container .alpha.grayed { opacity: 0.5; } .color-picker-container .color-picker-hex { grid-row: 4; grid-column: 1; display: grid; grid-template-columns: 20% 80%; grid-template-rows: 100%; } .color-picker-container .color-picker-hex .color-picker-hex-label { justify-self: center; align-self: center; grid-row: 1; grid-column: 1; margin-left: 10px; color: black; } .color-picker-container .color-picker-hex .color-picker-hex-value { justify-self: left; align-self: center; grid-row: 1; grid-column: 2; margin-left: 10px; margin-right: 10px; } .color-picker-container .color-picker-hex .color-picker-hex-value input { width: 70px; } .color-picker-container .color-picker-warning { color: black; font-size: 11px; padding: 4px; justify-self: left; align-self: center; grid-row: 6; grid-column: 1; }`, "", { version: 3, sources: ["webpack://./../../../dev/sharedUiComponents/dist/colorPicker/colorPicker.scss"], names: [], mappings: "AAAA;EACI,YAAA;EACA,aAAA;EACA,uBAAA;EACA,aAAA;EACA,2BAAA;EACA,+CAAA;EACA,mCAAA;EACA,mBAAA;EACA,eAAA;AACJ;AACI;EACI,aAAA;AACR;AAEI;EACI,WAAA;EACA,cAAA;EACA,aAAA;EACA,2BAAA;EACA,wBAAA;EACA,kBAAA;EACA,eAAA;AAAR;AAEQ;EACI,WAAA;EACA,cAAA;EAEA,2EAAA;EACA,mEAAA;AADZ;AAIQ;EACI,WAAA;EACA,cAAA;EAEA,mEAAA;EACA,2DAAA;AAHZ;AAMQ;EACI,oBAAA;EACA,UAAA;EACA,WAAA;EACA,kGACI;EAGJ,kBAAA;EACA,gCAAA;EACA,kBAAA;AAPZ;AAWI;EACI,WAAA;EACA,cAAA;EACA,aAAA;EACA,YAAA;EACA,8BAAA;EACA,wBAAA;AATR;AAWQ;EACI,WAAA;EACA,cAAA;EACA,kBAAA;EACA,oBAAA;EACA,WAAA;EACA,YAAA;EACA,mBAAA;EACA,uBAAA;AATZ;AAYQ;EACI,WAAA;EACA,cAAA;EACA,kBAAA;EACA,YAAA;EACA,kBAAA;EACA,eAAA;EAEA,2GAAA;EACA,mHAAA;AAXZ;AAaY;EACI,oBAAA;EACA,UAAA;EACA,YAAA;EACA,gCAAA;EACA,oCAAA;EACA,2CAAA;EACA,kBAAA;AAXhB;AAgBI;EACI,aAAA;EACA,WAAA;EACA,2BAAA;EACA,2BAAA;AAdR;AAgBQ;EACI,oBAAA;EACA,kBAAA;EACA,WAAA;EACA,cAAA;EACA,kBAAA;AAdZ;AAgBY;EACI,WAAA;AAdhB;AAkBQ;EACI,oBAAA;EACA,kBAAA;EACA,WAAA;EACA,cAAA;EACA,YAAA;AAhBZ;AAoBI;EACI,WAAA;EACA,cAAA;EACA,aAAA;EACA,YAAA;EACA,wDAAA;EACA,wBAAA;AAlBR;AAqBI;EACI,WAAA;EACA,cAAA;AAnBR;AAsBI;EACI,WAAA;EACA,cAAA;AApBR;AAuBI;EACI,WAAA;EACA,cAAA;AArBR;AAwBI;EACI,WAAA;EACA,cAAA;AAtBR;AAwBQ;EACI,YAAA;AAtBZ;AA0BI;EACI,WAAA;EACA,cAAA;EACA,aAAA;EACA,8BAAA;EACA,wBAAA;AAxBR;AA0BQ;EACI,oBAAA;EACA,kBAAA;EACA,WAAA;EACA,cAAA;EACA,iBAAA;EACA,YAAA;AAxBZ;AA2BQ;EACI,kBAAA;EACA,kBAAA;EACA,WAAA;EACA,cAAA;EACA,iBAAA;EACA,kBAAA;AAzBZ;AA2BY;EACI,WAAA;AAzBhB;AA8BI;EACI,YAAA;EACA,eAAA;EACA,YAAA;EACA,kBAAA;EACA,kBAAA;EACA,WAAA;EACA,cAAA;AA5BR", sourcesContent: [`.color-picker-container {\r width: 320px;\r height: 300px;\r background-color: white;\r display: grid;\r grid-template-columns: 100%;\r grid-template-rows: 50% 50px 60px 40px 1fr auto;\r font-family: "acumin-pro-condensed";\r font-weight: normal;\r font-size: 14px;\r \r &.with-hints {\r height: 380px;\r }\r \r .color-picker-saturation {\r grid-row: 1;\r grid-column: 1;\r display: grid;\r grid-template-columns: 100%;\r grid-template-rows: 100%;\r position: relative;\r cursor: pointer;\r \r .color-picker-saturation-white {\r grid-row: 1;\r grid-column: 1;\r \r background: -webkit-linear-gradient(to right, #fff, rgba(255, 255, 255, 0));\r background: linear-gradient(to right, #fff, rgba(255, 255, 255, 0));\r }\r \r .color-picker-saturation-black {\r grid-row: 1;\r grid-column: 1;\r \r background: -webkit-linear-gradient(to top, #000, rgba(0, 0, 0, 0));\r background: linear-gradient(to top, #000, rgba(0, 0, 0, 0));\r }\r \r .color-picker-saturation-cursor {\r pointer-events: none;\r width: 4px;\r height: 4px;\r box-shadow:\r 0 0 0 1.5px #fff,\r inset 0 0 1px 1px rgba(0, 0, 0, 0.3),\r 0 0 1px 2px rgba(0, 0, 0, 0.4);\r border-radius: 50%;\r transform: translate(-2px, -2px);\r position: absolute;\r }\r }\r \r .color-picker-hue {\r grid-row: 2;\r grid-column: 1;\r display: grid;\r margin: 10px;\r grid-template-columns: 24% 76%;\r grid-template-rows: 100%;\r \r .color-picker-hue-color {\r grid-row: 1;\r grid-column: 1;\r align-self: center;\r justify-self: center;\r width: 30px;\r height: 30px;\r border-radius: 15px;\r border: 1px solid black;\r }\r \r .color-picker-hue-slider {\r grid-row: 1;\r grid-column: 2;\r align-self: center;\r height: 16px;\r position: relative;\r cursor: pointer;\r \r background: linear-gradient(to right, #f00 0%, #ff0 17%, #0f0 33%, #0ff 50%, #00f 67%, #f0f 83%, #f00 100%);\r background: -webkit-linear-gradient(to right, #f00 0%, #ff0 17%, #0f0 33%, #0ff 50%, #00f 67%, #f0f 83%, #f00 100%);\r \r .color-picker-hue-cursor {\r pointer-events: none;\r width: 8px;\r height: 18px;\r transform: translate(-4px, -2px);\r background-color: rgb(248, 248, 248);\r box-shadow: 0 1px 4px 0 rgba(0, 0, 0, 0.37);\r position: absolute;\r }\r }\r }\r \r .color-picker-component {\r display: grid;\r margin: 5px;\r grid-template-columns: 100%;\r grid-template-rows: 50% 50%;\r \r .color-picker-component-value {\r justify-self: center;\r align-self: center;\r grid-row: 1;\r grid-column: 1;\r margin-bottom: 4px;\r \r input {\r width: 50px;\r }\r }\r \r .color-picker-component-label {\r justify-self: center;\r align-self: center;\r grid-row: 2;\r grid-column: 1;\r color: black;\r }\r }\r \r .color-picker-rgb {\r grid-row: 3;\r grid-column: 1;\r display: grid;\r margin: 10px;\r grid-template-columns: 20% 6.66% 20% 6.66% 20% 6.66% 20%;\r grid-template-rows: 100%;\r }\r \r .red {\r grid-row: 1;\r grid-column: 1;\r }\r \r .green {\r grid-row: 1;\r grid-column: 3;\r }\r \r .blue {\r grid-row: 1;\r grid-column: 5;\r }\r \r .alpha {\r grid-row: 1;\r grid-column: 7;\r \r &.grayed {\r opacity: 0.5;\r }\r }\r \r .color-picker-hex {\r grid-row: 4;\r grid-column: 1;\r display: grid;\r grid-template-columns: 20% 80%;\r grid-template-rows: 100%;\r \r .color-picker-hex-label {\r justify-self: center;\r align-self: center;\r grid-row: 1;\r grid-column: 1;\r margin-left: 10px;\r color: black;\r }\r \r .color-picker-hex-value {\r justify-self: left;\r align-self: center;\r grid-row: 1;\r grid-column: 2;\r margin-left: 10px;\r margin-right: 10px;\r \r input {\r width: 70px;\r }\r }\r }\r \r .color-picker-warning {\r color: black;\r font-size: 11px;\r padding: 4px;\r justify-self: left;\r align-self: center;\r grid-row: 6;\r grid-column: 1;\r }\r }\r `], sourceRoot: "" }]), p.locals = {}; const m = p; } ), /***/ "../../../../node_modules/css-loader/dist/cjs.js??ruleSet[1].rules[3].use[1]!../../../../node_modules/sass-loader/dist/cjs.js??ruleSet[1].rules[3].use[2]!../../../dev/sharedUiComponents/dist/nodeGraphSystem/searchBox.scss": ( /*!************************************************************************************************************************************************************************************************************************************!*\ !*** ../../../../node_modules/css-loader/dist/cjs.js??ruleSet[1].rules[3].use[1]!../../../../node_modules/sass-loader/dist/cjs.js??ruleSet[1].rules[3].use[2]!../../../dev/sharedUiComponents/dist/nodeGraphSystem/searchBox.scss ***! \************************************************************************************************************************************************************************************************************************************/ /***/ (a, l, o) => { o.r(l), o.d(l, { /* harmony export */ default: () => m /* harmony export */ }); var u = o( /*! ../../../../../node_modules/css-loader/dist/runtime/sourceMaps.js */ "../../../../node_modules/css-loader/dist/runtime/sourceMaps.js" ), h = /* @__PURE__ */ o.n(u), d = o( /*! ../../../../../node_modules/css-loader/dist/runtime/api.js */ "../../../../node_modules/css-loader/dist/runtime/api.js" ), f = /* @__PURE__ */ o.n(d), p = f()(h()); p.push([a.id, `#graph-search-container { position: absolute; width: 100%; height: 100%; left: 0; top: 0; } #graph-search-container #graph-search-picking-blocker { position: absolute; width: 100%; height: 100%; left: 0; top: 0; cursor: default; } #graph-search-container #graph-search-box { width: 300px; height: 400px; position: absolute; left: calc(50% - 150px); top: calc(50% - 200px); background: rgb(72, 72, 72); border: 2px solid black; z-index: 100; cursor: auto; display: grid; grid-template-rows: auto auto 1fr; } #graph-search-container #graph-search-box .graph-search-box-title { border-bottom: 1px solid rgb(40, 40, 40); margin: 4px; padding-bottom: 4px; color: white; grid-row: 1; } #graph-search-container #graph-search-box .graph-search-box-filter { margin: 4px; padding-bottom: 4px; border-radius: 4px; width: calc(100% - 16px); grid-row: 2; } #graph-search-container #graph-search-box .graph-search-box-list { grid-row: 3; overflow-y: auto; display: flex; flex-direction: column; } #graph-search-container #graph-search-box .graph-search-box-list .graph-search-box-list-item { margin-left: 4px; cursor: pointer; color: white; margin-top: 4px; margin-bottom: 4px; } #graph-search-container #graph-search-box .graph-search-box-list .graph-search-box-list-item:hover { background: rgb(30, 30, 30); } #graph-search-container #graph-search-box .graph-search-box-list .graph-search-box-list-item.selected { background: rgb(230, 230, 230); color: black; }`, "", { version: 3, sources: ["webpack://./../../../dev/sharedUiComponents/dist/nodeGraphSystem/searchBox.scss"], names: [], mappings: "AAAA;EACI,kBAAA;EACA,WAAA;EACA,YAAA;EACA,OAAA;EACA,MAAA;AACJ;AACI;EACI,kBAAA;EACA,WAAA;EACA,YAAA;EACA,OAAA;EACA,MAAA;EACA,eAAA;AACR;AAEI;EACI,YAAA;EACA,aAAA;EACA,kBAAA;EACA,uBAAA;EACA,sBAAA;EACA,2BAAA;EACA,uBAAA;EACA,YAAA;EACA,YAAA;EACA,aAAA;EACA,iCAAA;AAAR;AAEQ;EACI,wCAAA;EACA,WAAA;EACA,mBAAA;EACA,YAAA;EACA,WAAA;AAAZ;AAGQ;EACI,WAAA;EACA,mBAAA;EACA,kBAAA;EACA,wBAAA;EACA,WAAA;AADZ;AAIQ;EACI,WAAA;EACA,gBAAA;EACA,aAAA;EACA,sBAAA;AAFZ;AAIY;EACI,gBAAA;EACA,eAAA;EACA,YAAA;EACA,eAAA;EACA,kBAAA;AAFhB;AAIgB;EACI,2BAAA;AAFpB;AAKgB;EACI,8BAAA;EACA,YAAA;AAHpB", sourcesContent: [`#graph-search-container {\r position: absolute;\r width: 100%;\r height: 100%;\r left: 0;\r top: 0;\r \r #graph-search-picking-blocker {\r position: absolute;\r width: 100%;\r height: 100%;\r left: 0;\r top: 0;\r cursor: default;\r }\r \r #graph-search-box {\r width: 300px;\r height: 400px;\r position: absolute;\r left: calc(50% - 150px);\r top: calc(50% - 200px);\r background: rgb(72, 72, 72);\r border: 2px solid black;\r z-index: 100;\r cursor: auto;\r display: grid;\r grid-template-rows: auto auto 1fr;\r \r .graph-search-box-title {\r border-bottom: 1px solid rgb(40, 40, 40);\r margin: 4px;\r padding-bottom: 4px;\r color: white;\r grid-row: 1;\r }\r \r .graph-search-box-filter {\r margin: 4px;\r padding-bottom: 4px;\r border-radius: 4px;\r width: calc(100% - 16px);\r grid-row: 2;\r }\r \r .graph-search-box-list {\r grid-row: 3;\r overflow-y: auto;\r display: flex;\r flex-direction: column;\r \r .graph-search-box-list-item {\r margin-left: 4px;\r cursor: pointer;\r color: white;\r margin-top: 4px;\r margin-bottom: 4px;\r \r &:hover {\r background: rgb(30, 30, 30);\r }\r \r &.selected {\r background: rgb(230, 230, 230);\r color: black;\r }\r }\r }\r }\r }\r `], sourceRoot: "" }]), p.locals = {}; const m = p; } ), /***/ "../../../../node_modules/css-loader/dist/cjs.js??ruleSet[1].rules[3].use[1]!../../../../node_modules/sass-loader/dist/cjs.js??ruleSet[1].rules[3].use[2]!../../../tools/nodeEditor/dist/components/log/log.scss": ( /*!***********************************************************************************************************************************************************************************************************************!*\ !*** ../../../../node_modules/css-loader/dist/cjs.js??ruleSet[1].rules[3].use[1]!../../../../node_modules/sass-loader/dist/cjs.js??ruleSet[1].rules[3].use[2]!../../../tools/nodeEditor/dist/components/log/log.scss ***! \***********************************************************************************************************************************************************************************************************************/ /***/ (a, l, o) => { o.r(l), o.d(l, { /* harmony export */ default: () => m /* harmony export */ }); var u = o( /*! ../../../../../../node_modules/css-loader/dist/runtime/sourceMaps.js */ "../../../../node_modules/css-loader/dist/runtime/sourceMaps.js" ), h = /* @__PURE__ */ o.n(u), d = o( /*! ../../../../../../node_modules/css-loader/dist/runtime/api.js */ "../../../../node_modules/css-loader/dist/runtime/api.js" ), f = /* @__PURE__ */ o.n(d), p = f()(h()); p.push([a.id, `#nme-log-console { background: #333333; height: 120px; box-sizing: border-box; margin: 0; padding: 10px; width: 100%; overflow: hidden; overflow-y: auto; grid-row: 2; grid-column: 3; } #nme-log-console .log { color: white; font-size: 14px; font-family: "Courier New", Courier, monospace; } #nme-log-console .log.error { color: red; }`, "", { version: 3, sources: ["webpack://./../../../tools/nodeEditor/dist/components/log/log.scss"], names: [], mappings: "AAAA;EACI,mBAAA;EACA,aAAA;EACA,sBAAA;EACA,SAAA;EACA,aAAA;EACA,WAAA;EACA,gBAAA;EACA,gBAAA;EACA,WAAA;EACA,cAAA;AACJ;AACI;EACI,YAAA;EACA,eAAA;EACA,8CAAA;AACR;AACQ;EACI,UAAA;AACZ", sourcesContent: [`#nme-log-console {\r background: #333333;\r height: 120px;\r box-sizing: border-box;\r margin: 0;\r padding: 10px;\r width: 100%;\r overflow: hidden;\r overflow-y: auto;\r grid-row: 2;\r grid-column: 3;\r \r .log {\r color: white;\r font-size: 14px;\r font-family: "Courier New", Courier, monospace;\r \r &.error {\r color: red;\r }\r }\r }\r `], sourceRoot: "" }]), p.locals = {}; const m = p; } ), /***/ "../../../../node_modules/css-loader/dist/cjs.js??ruleSet[1].rules[3].use[1]!../../../../node_modules/sass-loader/dist/cjs.js??ruleSet[1].rules[3].use[2]!../../../tools/nodeEditor/dist/components/nodeList/nodeList.scss": ( /*!*********************************************************************************************************************************************************************************************************************************!*\ !*** ../../../../node_modules/css-loader/dist/cjs.js??ruleSet[1].rules[3].use[1]!../../../../node_modules/sass-loader/dist/cjs.js??ruleSet[1].rules[3].use[2]!../../../tools/nodeEditor/dist/components/nodeList/nodeList.scss ***! \*********************************************************************************************************************************************************************************************************************************/ /***/ (a, l, o) => { o.r(l), o.d(l, { /* harmony export */ default: () => m /* harmony export */ }); var u = o( /*! ../../../../../../node_modules/css-loader/dist/runtime/sourceMaps.js */ "../../../../node_modules/css-loader/dist/runtime/sourceMaps.js" ), h = /* @__PURE__ */ o.n(u), d = o( /*! ../../../../../../node_modules/css-loader/dist/runtime/api.js */ "../../../../node_modules/css-loader/dist/runtime/api.js" ), f = /* @__PURE__ */ o.n(d), p = f()(h()); p.push([a.id, `#nmeNodeList { background: #333333; height: 100%; margin: 0; padding: 0; display: grid; width: 100%; overflow: hidden; } #nmeNodeList .panes { overflow: hidden; } #nmeNodeList .panes .pane { color: white; overflow: hidden; height: 100%; -webkit-user-select: none; -moz-user-select: none; -ms-user-select: none; user-select: none; } #nmeNodeList .panes .pane .filter { display: flex; align-items: stretch; } #nmeNodeList .panes .pane .filter input { width: 100%; margin: 10px 10px 5px 10px; display: block; border: none; padding: 0; border-bottom: solid 1px rgb(51, 122, 183); background: linear-gradient(to bottom, rgba(255, 255, 255, 0) 96%, rgb(51, 122, 183) 4%); background-position: -1000px 0; background-size: 1000px 100%; background-repeat: no-repeat; color: white; } #nmeNodeList .panes .pane .filter input:focus { box-shadow: none; outline: none; background-position: 0 0; } #nmeNodeList .panes .pane .filter input::placeholder { color: gray; } #nmeNodeList .panes .pane .list-container { overflow-x: hidden; overflow-y: auto; height: calc(100% - 32px); } #nmeNodeList .panes .pane .list-container .underline { border-bottom: 0.5px solid rgba(255, 255, 255, 0.5); } #nmeNodeList .panes .pane .list-container .draggableLine { height: 30px; display: grid; align-items: center; justify-items: stretch; background: #222222; cursor: grab; text-align: center; margin: 0; box-sizing: border-box; } #nmeNodeList .panes .pane .list-container .draggableLine:hover { background: rgb(51, 122, 183); color: white; } #nmeNodeList .panes .pane .list-container .nonDraggableLine { height: 30px; display: grid; align-items: center; justify-items: stretch; background: #222222; text-align: center; margin: 0; box-sizing: border-box; } #nmeNodeList .panes .pane .list-container .withButton { height: 30px; position: relative; } #nmeNodeList .panes .pane .list-container .withButton .icon { position: absolute; right: 4px; top: 5px; } #nmeNodeList .panes .pane .list-container .withButton .icon:hover { cursor: pointer; } #nmeNodeList .panes .pane .list-container .withButton .icon .img { height: 17px; width: 17px; } #nmeNodeList .panes .pane .list-container .withButton .buttonLine { height: 30px; display: grid; align-items: center; justify-items: stretch; padding-bottom: 5px; position: absolute; right: 0px; top: 2px; } #nmeNodeList .panes .pane .list-container .withButton .buttonLine input[type=file] { display: none; } #nmeNodeList .panes .pane .list-container .withButton .buttonLine .file-upload { background: transparent; border: transparent; padding: 15px 200px; opacity: 0.9; cursor: pointer; text-align: center; } #nmeNodeList .panes .pane .list-container .withButton .buttonLine .file-upload:hover { opacity: 1; } #nmeNodeList .panes .pane .list-container .withButton .buttonLine .file-upload:active { transform: scale(0.98); transform-origin: 0.5 0.5; } #nmeNodeList .panes .pane .list-container .withButton .buttonLine button { background: transparent; border: transparent; margin: 5px 10px 5px 10px; color: white; padding: 4px 5px; opacity: 0.9; } #nmeNodeList .panes .pane .list-container .withButton .buttonLine button:hover { opacity: 0; } #nmeNodeList .panes .pane .list-container .withButton .buttonLine button:active { background: transparent; } #nmeNodeList .panes .pane .list-container .withButton .buttonLine button:focus { border: transparent; outline: 0px; } #nmeNodeList .panes .pane .list-container .paneContainer { margin-top: 3px; display: grid; grid-template-rows: 100%; grid-template-columns: 100%; } #nmeNodeList .panes .pane .list-container .paneContainer .paneContainer-content { grid-row: 1; grid-column: 1; } #nmeNodeList .panes .pane .list-container .paneContainer .paneContainer-content .header { display: grid; grid-template-columns: 1fr auto; background: #555555; height: 30px; padding-right: 5px; cursor: pointer; } #nmeNodeList .panes .pane .list-container .paneContainer .paneContainer-content .header .title { border-left: 3px solid transparent; padding-left: 5px; grid-column: 1; display: flex; align-items: center; } #nmeNodeList .panes .pane .list-container .paneContainer .paneContainer-content .header .collapse { grid-column: 2; display: flex; align-items: center; justify-items: center; transform-origin: center; } #nmeNodeList .panes .pane .list-container .paneContainer .paneContainer-content .header .collapse.closed { transform: rotate(180deg); } #nmeNodeList .panes .pane .list-container .paneContainer .paneContainer-content .paneList > div:not(:last-child) { border-bottom: 1px solid rgba(255, 255, 255, 0.3); }`, "", { version: 3, sources: ["webpack://./../../../tools/nodeEditor/dist/components/nodeList/nodeList.scss"], names: [], mappings: "AAAA;EACI,mBAAA;EACA,YAAA;EACA,SAAA;EACA,UAAA;EACA,aAAA;EACA,WAAA;EACA,gBAAA;AACJ;AACI;EACI,gBAAA;AACR;AACQ;EACI,YAAA;EAEA,gBAAA;EACA,YAAA;EAEA,yBAAA;EACA,sBAAA;EACA,qBAAA;EACA,iBAAA;AADZ;AAGY;EACI,aAAA;EACA,oBAAA;AADhB;AAGgB;EACI,WAAA;EACA,0BAAA;EACA,cAAA;EACA,YAAA;EACA,UAAA;EACA,0CAAA;EACA,wFAAA;EACA,8BAAA;EACA,4BAAA;EACA,4BAAA;EACA,YAAA;AADpB;AAIgB;EACI,gBAAA;EACA,aAAA;EACA,wBAAA;AAFpB;AAKgB;EACI,WAAA;AAHpB;AAOY;EACI,kBAAA;EACA,gBAAA;EACA,yBAAA;AALhB;AAOgB;EACI,mDAAA;AALpB;AAQgB;EACI,YAAA;EACA,aAAA;EACA,mBAAA;EACA,sBAAA;EACA,mBAAA;EACA,YAAA;EACA,kBAAA;EACA,SAAA;EACA,sBAAA;AANpB;AAQoB;EACI,6BAAA;EACA,YAAA;AANxB;AAUgB;EACI,YAAA;EACA,aAAA;EACA,mBAAA;EACA,sBAAA;EACA,mBAAA;EACA,kBAAA;EACA,SAAA;EACA,sBAAA;AARpB;AAWgB;EACI,YAAA;EACA,kBAAA;AATpB;AAUoB;EACI,kBAAA;EACA,UAAA;EACA,QAAA;AARxB;AASwB;EACI,eAAA;AAP5B;AAUwB;EACI,YAAA;EACA,WAAA;AAR5B;AAYoB;EACI,YAAA;EACA,aAAA;EACA,mBAAA;EACA,sBAAA;EACA,mBAAA;EACA,kBAAA;EACA,UAAA;EACA,QAAA;AAVxB;AAWwB;EACI,aAAA;AAT5B;AAYwB;EACI,uBAAA;EACA,mBAAA;EACA,mBAAA;EACA,YAAA;EACA,eAAA;EACA,kBAAA;AAV5B;AAawB;EACI,UAAA;AAX5B;AAcwB;EACI,sBAAA;EACA,yBAAA;AAZ5B;AAewB;EACI,uBAAA;EACA,mBAAA;EACA,yBAAA;EACA,YAAA;EACA,gBAAA;EACA,YAAA;AAb5B;AAgBwB;EACI,UAAA;AAd5B;AAiBwB;EACI,uBAAA;AAf5B;AAkBwB;EACI,mBAAA;EACA,YAAA;AAhB5B;AAqBgB;EACI,eAAA;EACA,aAAA;EACA,wBAAA;EACA,2BAAA;AAnBpB;AAqBoB;EACI,WAAA;EACA,cAAA;AAnBxB;AAqBwB;EACI,aAAA;EACA,+BAAA;EACA,mBAAA;EACA,YAAA;EACA,kBAAA;EACA,eAAA;AAnB5B;AAqB4B;EACI,kCAAA;EACA,iBAAA;EACA,cAAA;EACA,aAAA;EACA,mBAAA;AAnBhC;AAsB4B;EACI,cAAA;EACA,aAAA;EACA,mBAAA;EACA,qBAAA;EACA,wBAAA;AApBhC;AAsBgC;EACI,yBAAA;AApBpC;AAyBwB;EACI,iDAAA;AAvB5B", sourcesContent: [`#nmeNodeList {\r background: #333333;\r height: 100%;\r margin: 0;\r padding: 0;\r display: grid;\r width: 100%;\r overflow: hidden;\r \r .panes {\r overflow: hidden;\r \r .pane {\r color: white;\r \r overflow: hidden;\r height: 100%;\r \r -webkit-user-select: none;\r -moz-user-select: none;\r -ms-user-select: none;\r user-select: none;\r \r .filter {\r display: flex;\r align-items: stretch;\r \r input {\r width: 100%;\r margin: 10px 10px 5px 10px;\r display: block;\r border: none;\r padding: 0;\r border-bottom: solid 1px rgb(51, 122, 183);\r background: linear-gradient(to bottom, rgba(255, 255, 255, 0) 96%, rgb(51, 122, 183) 4%);\r background-position: -1000px 0;\r background-size: 1000px 100%;\r background-repeat: no-repeat;\r color: white;\r }\r \r input:focus {\r box-shadow: none;\r outline: none;\r background-position: 0 0;\r }\r \r input::placeholder {\r color: gray;\r }\r }\r \r .list-container {\r overflow-x: hidden;\r overflow-y: auto;\r height: calc(100% - 32px);\r \r .underline {\r border-bottom: 0.5px solid rgba(255, 255, 255, 0.5);\r }\r \r .draggableLine {\r height: 30px;\r display: grid;\r align-items: center;\r justify-items: stretch;\r background: #222222;\r cursor: grab;\r text-align: center;\r margin: 0;\r box-sizing: border-box;\r \r &:hover {\r background: rgb(51, 122, 183);\r color: white;\r }\r }\r \r .nonDraggableLine {\r height: 30px;\r display: grid;\r align-items: center;\r justify-items: stretch;\r background: #222222;\r text-align: center;\r margin: 0;\r box-sizing: border-box;\r }\r \r .withButton {\r height: 30px;\r position: relative;\r .icon {\r position: absolute;\r right: 4px;\r top: 5px;\r &:hover {\r cursor: pointer;\r }\r \r .img {\r height: 17px;\r width: 17px;\r }\r }\r \r .buttonLine {\r height: 30px;\r display: grid;\r align-items: center;\r justify-items: stretch;\r padding-bottom: 5px;\r position: absolute;\r right: 0px;\r top: 2px;\r input[type="file"] {\r display: none;\r }\r \r .file-upload {\r background: transparent;\r border: transparent;\r padding: 15px 200px;\r opacity: 0.9;\r cursor: pointer;\r text-align: center;\r }\r \r .file-upload:hover {\r opacity: 1;\r }\r \r .file-upload:active {\r transform: scale(0.98);\r transform-origin: 0.5 0.5;\r }\r \r button {\r background: transparent;\r border: transparent;\r margin: 5px 10px 5px 10px;\r color: white;\r padding: 4px 5px;\r opacity: 0.9;\r }\r \r button:hover {\r opacity: 0;\r }\r \r button:active {\r background: transparent;\r }\r \r button:focus {\r border: transparent;\r outline: 0px;\r }\r }\r }\r \r .paneContainer {\r margin-top: 3px;\r display: grid;\r grid-template-rows: 100%;\r grid-template-columns: 100%;\r \r .paneContainer-content {\r grid-row: 1;\r grid-column: 1;\r \r .header {\r display: grid;\r grid-template-columns: 1fr auto;\r background: #555555;\r height: 30px;\r padding-right: 5px;\r cursor: pointer;\r \r .title {\r border-left: 3px solid transparent;\r padding-left: 5px;\r grid-column: 1;\r display: flex;\r align-items: center;\r }\r \r .collapse {\r grid-column: 2;\r display: flex;\r align-items: center;\r justify-items: center;\r transform-origin: center;\r \r &.closed {\r transform: rotate(180deg);\r }\r }\r }\r \r .paneList > div:not(:last-child) {\r border-bottom: 1px solid rgba(255, 255, 255, 0.3);\r }\r }\r }\r }\r }\r }\r }\r `], sourceRoot: "" }]), p.locals = {}; const m = p; } ), /***/ "../../../../node_modules/css-loader/dist/cjs.js??ruleSet[1].rules[3].use[1]!../../../../node_modules/sass-loader/dist/cjs.js??ruleSet[1].rules[3].use[2]!../../../tools/nodeEditor/dist/components/propertyTab/propertyTab.scss": ( /*!***************************************************************************************************************************************************************************************************************************************!*\ !*** ../../../../node_modules/css-loader/dist/cjs.js??ruleSet[1].rules[3].use[1]!../../../../node_modules/sass-loader/dist/cjs.js??ruleSet[1].rules[3].use[2]!../../../tools/nodeEditor/dist/components/propertyTab/propertyTab.scss ***! \***************************************************************************************************************************************************************************************************************************************/ /***/ (a, l, o) => { o.r(l), o.d(l, { /* harmony export */ default: () => m /* harmony export */ }); var u = o( /*! ../../../../../../node_modules/css-loader/dist/runtime/sourceMaps.js */ "../../../../node_modules/css-loader/dist/runtime/sourceMaps.js" ), h = /* @__PURE__ */ o.n(u), d = o( /*! ../../../../../../node_modules/css-loader/dist/runtime/api.js */ "../../../../node_modules/css-loader/dist/runtime/api.js" ), f = /* @__PURE__ */ o.n(d), p = f()(h()); p.push([a.id, `.nme-right-panel #propertyTab { color: white; background: #333333; } .nme-right-panel #propertyTab #header { height: 30px; font-size: 16px; color: white; background: #222222; grid-row: 1; text-align: center; display: grid; grid-template-columns: 30px 1fr; -webkit-user-select: none; -moz-user-select: none; -ms-user-select: none; user-select: none; } .nme-right-panel #propertyTab #header #logo { position: relative; grid-column: 1; width: 24px; height: 24px; left: 0; display: flex; align-self: center; justify-self: center; } .nme-right-panel #propertyTab #header #title { grid-column: 2; display: grid; align-items: center; text-align: center; } .nme-right-panel #propertyTab .range { -webkit-appearance: none; width: 120px; height: 6px; background: #d3d3d3; border-radius: 5px; outline: none; opacity: 0.7; -webkit-transition: 0.2s; transition: opacity 0.2s; } .nme-right-panel #propertyTab .range:hover { opacity: 1; } .nme-right-panel #propertyTab .range::-webkit-slider-thumb { -webkit-appearance: none; appearance: none; width: 14px; height: 14px; border-radius: 50%; background: rgb(51, 122, 183); cursor: pointer; } .nme-right-panel #propertyTab .range::-moz-range-thumb { width: 14px; height: 14px; border-radius: 50%; background: rgb(51, 122, 183); cursor: pointer; } .nme-right-panel #propertyTab input[type=color] { -webkit-appearance: none; border: 1px solid rgba(255, 255, 255, 0.5); padding: 0; width: 30px; height: 20px; } .nme-right-panel #propertyTab input[type=color]::-webkit-color-swatch-wrapper { padding: 0; } .nme-right-panel #propertyTab input[type=color]::-webkit-color-swatch { border: none; } .nme-right-panel #propertyTab .sliderLine { padding-left: 5px; height: 30px; display: grid; grid-template-rows: 100%; grid-template-columns: 1fr 50px auto; } .nme-right-panel #propertyTab .sliderLine .label { grid-column: 1; display: flex; align-items: center; } .nme-right-panel #propertyTab .sliderLine .slider { grid-column: 3; grid-row: 1; margin-right: 5px; width: 90%; display: flex; align-items: center; } .nme-right-panel #propertyTab .sliderLine .floatLine { grid-column: 2; padding-left: 5px; } .nme-right-panel #propertyTab .sliderLine .floatLine .label { grid-column: 1; display: flex; align-items: center; } .nme-right-panel #propertyTab .sliderLine .floatLine .short { grid-column: 1; display: flex; align-items: center; } .nme-right-panel #propertyTab .sliderLine .floatLine .short input { width: 35px; } .nme-right-panel #propertyTab .sliderLine .floatLine .short input::-webkit-outer-spin-button, .nme-right-panel #propertyTab .sliderLine .floatLine .short input::-webkit-inner-spin-button { -webkit-appearance: none; margin: 0; } .nme-right-panel #propertyTab .sliderLine .floatLine .short input[type=number] { -moz-appearance: textfield; } .nme-right-panel #propertyTab .textInputLine { padding-left: 5px; height: 30px; display: grid; grid-template-columns: 1fr 120px auto; } .nme-right-panel #propertyTab .textInputLine .label { grid-column: 1; display: flex; align-items: center; } .nme-right-panel #propertyTab .textInputLine .value { display: flex; align-items: center; grid-column: 2; } .nme-right-panel #propertyTab .textInputLine .value input { width: calc(100% - 5px); margin-right: 5px; } .nme-right-panel #propertyTab .textInputArea { padding-left: 5px; height: 50px; display: grid; grid-template-columns: 1fr 120px; } .nme-right-panel #propertyTab .textInputArea .label { grid-column: 1; display: flex; align-items: center; } .nme-right-panel #propertyTab .textInputArea textarea { margin-right: 5px; margin-left: -50%; height: 40px; resize: none; } .nme-right-panel #propertyTab .textInputArea .value { display: flex; align-items: center; grid-column: 2; } .nme-right-panel #propertyTab .paneContainer { margin-top: 3px; display: grid; grid-template-rows: 100%; grid-template-columns: 100%; } .nme-right-panel #propertyTab .paneContainer .paneList { border-left: 3px solid transparent; } .nme-right-panel #propertyTab .paneContainer:hover .paneList { border-left: 3px solid rgba(51, 122, 183, 0.8); } .nme-right-panel #propertyTab .paneContainer:hover .paneContainer-content .header .title { border-left: 3px solid rgb(51, 122, 183); } .nme-right-panel #propertyTab .paneContainer .paneContainer-highlight-border { grid-row: 1; grid-column: 1; opacity: 1; border: 3px solid red; transition: opacity 250ms; pointer-events: none; } .nme-right-panel #propertyTab .paneContainer .paneContainer-highlight-border.transparent { opacity: 0; } .nme-right-panel #propertyTab .paneContainer .paneContainer-content { grid-row: 1; grid-column: 1; } .nme-right-panel #propertyTab .paneContainer .paneContainer-content .header { display: grid; grid-template-columns: 1fr auto; background: #555555; height: 30px; padding-right: 5px; cursor: pointer; } .nme-right-panel #propertyTab .paneContainer .paneContainer-content .header .title { border-left: 3px solid transparent; padding-left: 5px; grid-column: 1; display: flex; align-items: center; } .nme-right-panel #propertyTab .paneContainer .paneContainer-content .header .collapse { grid-column: 2; display: flex; align-items: center; justify-items: center; transform-origin: center; } .nme-right-panel #propertyTab .paneContainer .paneContainer-content .header .collapse.closed { transform: rotate(180deg); } .nme-right-panel #propertyTab .paneContainer .paneContainer-content .paneList > div:not(:last-child) { border-bottom: 0.5px solid rgba(255, 255, 255, 0.1); } .nme-right-panel #propertyTab .paneContainer .paneContainer-content .fragment > div:not(:last-child) { border-bottom: 0.5px solid rgba(255, 255, 255, 0.1); } .nme-right-panel #propertyTab .color-picker { height: calc(100% - 8px); margin: 4px; width: calc(100% - 8px); } .nme-right-panel #propertyTab .color-picker .color-rect { height: calc(100% - 4px); border: 2px white solid; cursor: pointer; min-height: 18px; } .nme-right-panel #propertyTab .color-picker .color-picker-cover { position: fixed; top: 0px; right: 0px; bottom: 0px; left: 0px; z-index: 1; } .nme-right-panel #propertyTab .color-picker .color-picker-float { z-index: 2; position: absolute; } .nme-right-panel #propertyTab .gradient-step { display: grid; grid-template-rows: 100%; grid-template-columns: 20px 30px 40px auto 20px 30px; padding-top: 5px; padding-left: 5px; padding-bottom: 5px; } .nme-right-panel #propertyTab .gradient-step .step { grid-row: 1; grid-column: 1; } .nme-right-panel #propertyTab .gradient-step .color { grid-row: 1; grid-column: 2; cursor: pointer; } .nme-right-panel #propertyTab .gradient-step .step-value { margin-left: 5px; grid-row: 1; grid-column: 3; text-align: right; margin-right: 5px; } .nme-right-panel #propertyTab .gradient-step .step-slider { grid-row: 1; grid-column: 4; display: grid; justify-content: stretch; align-content: center; margin-right: -5px; padding-left: 12px; } .nme-right-panel #propertyTab .gradient-step .step-slider input { width: 90%; } .nme-right-panel #propertyTab .gradient-step .gradient-copy { grid-row: 1; grid-column: 5; display: grid; align-content: center; justify-content: center; } .nme-right-panel #propertyTab .gradient-step .gradient-copy .img { height: 20px; width: 20px; } .nme-right-panel #propertyTab .gradient-step .gradient-copy .img:hover { cursor: pointer; } .nme-right-panel #propertyTab .gradient-step .gradient-delete { grid-row: 1; grid-column: 6; display: grid; align-content: center; justify-content: center; } .nme-right-panel #propertyTab .gradient-step .gradient-delete .img { height: 20px; width: 20px; } .nme-right-panel #propertyTab .gradient-step .gradient-delete .img:hover { cursor: pointer; } .nme-right-panel #propertyTab .floatLine { padding-left: 5px; height: 30px; display: grid; grid-template-columns: 1fr 120px; } .nme-right-panel #propertyTab .floatLine .label { grid-column: 1; display: flex; align-items: center; } .nme-right-panel #propertyTab .floatLine .value { grid-column: 2; display: flex; align-items: center; } .nme-right-panel #propertyTab .floatLine .value input { width: 110px; } .nme-right-panel #propertyTab .floatLine .short { grid-column: 2; display: flex; align-items: center; } .nme-right-panel #propertyTab .floatLine .short input { width: 27px; } .nme-right-panel #propertyTab .floatLine .short input::-webkit-outer-spin-button, .nme-right-panel #propertyTab .floatLine .short input::-webkit-inner-spin-button { -webkit-appearance: none; margin: 0; } .nme-right-panel #propertyTab .floatLine .short input[type=number] { -moz-appearance: textfield; } .nme-right-panel #propertyTab .vector3Line { padding-left: 5px; display: grid; } .nme-right-panel #propertyTab .vector3Line .firstLine { display: grid; grid-template-columns: 1fr auto 20px; height: 30px; } .nme-right-panel #propertyTab .vector3Line .firstLine .label { grid-column: 1; display: flex; align-items: center; } .nme-right-panel #propertyTab .vector3Line .firstLine .vector { grid-column: 2; display: flex; align-items: center; text-align: right; opacity: 0.8; } .nme-right-panel #propertyTab .vector3Line .firstLine .expand { grid-column: 3; display: grid; align-items: center; justify-items: center; cursor: pointer; } .nme-right-panel #propertyTab .vector3Line .secondLine { display: grid; padding-right: 5px; border-left: 1px solid rgb(51, 122, 183); } .nme-right-panel #propertyTab .vector3Line .secondLine .no-right-margin { margin-right: 0; } .nme-right-panel #propertyTab .vector3Line .secondLine .numeric { display: grid; grid-template-columns: 1fr auto; } .nme-right-panel #propertyTab .vector3Line .secondLine .numeric-label { text-align: right; grid-column: 1; display: flex; align-items: center; justify-self: right; margin-right: 10px; } .nme-right-panel #propertyTab .vector3Line .secondLine .numeric-value { width: 120px; grid-column: 2; display: flex; align-items: center; border: 1px solid rgb(51, 122, 183); } .nme-right-panel #propertyTab .buttonLine { height: 30px; display: grid; align-items: center; justify-items: stretch; padding-bottom: 5px; } .nme-right-panel #propertyTab .buttonLine.disabled { opacity: 0.3; } .nme-right-panel #propertyTab .buttonLine input[type=file] { display: none; } .nme-right-panel #propertyTab .buttonLine .file-upload { background: #222222; border: 1px solid rgb(51, 122, 183); margin: 5px 10px; color: white; padding: 4px 5px; padding-top: 0px; opacity: 0.9; cursor: pointer; text-align: center; } .nme-right-panel #propertyTab .buttonLine .file-upload:hover { opacity: 1; } .nme-right-panel #propertyTab .buttonLine .file-upload:active { transform: scale(0.98); transform-origin: 0.5 0.5; } .nme-right-panel #propertyTab .buttonLine button { background: #222222; border: 1px solid rgb(51, 122, 183); margin: 5px 10px 5px 10px; color: white; padding: 4px 5px; opacity: 0.9; } .nme-right-panel #propertyTab .buttonLine button:hover { opacity: 1; } .nme-right-panel #propertyTab .buttonLine button:active { background: #282828; } .nme-right-panel #propertyTab .buttonLine button:focus { border: 1px solid rgb(51, 122, 183); outline: 0px; } .nme-right-panel #propertyTab .checkBoxLine { padding-left: 5px; height: 30px; display: grid; grid-template-columns: 1fr auto; } .nme-right-panel #propertyTab .checkBoxLine .label { grid-column: 1; display: flex; align-items: center; } .nme-right-panel #propertyTab .checkBoxLine .checkBox { grid-column: 2; display: flex; align-items: center; } .nme-right-panel #propertyTab .checkBoxLine .checkBox .lbl { position: relative; display: block; height: 14px; width: 34px; margin-right: 5px; background: #898989; border-radius: 100px; cursor: pointer; transition: all 0.3s ease; } .nme-right-panel #propertyTab .checkBoxLine .checkBox .lbl:after { position: absolute; left: 3px; top: 2px; display: block; width: 10px; height: 10px; border-radius: 100px; background: #fff; box-shadow: 0px 3px 3px rgba(0, 0, 0, 0.05); content: ""; transition: all 0.15s ease; } .nme-right-panel #propertyTab .checkBoxLine .checkBox .lbl:active:after { transform: scale(1.15, 0.85); } .nme-right-panel #propertyTab .checkBoxLine .checkBox .cbx:checked ~ label { background: rgb(51, 122, 183); } .nme-right-panel #propertyTab .checkBoxLine .checkBox .cbx:checked ~ label:after { left: 20px; background: rgb(22, 73, 117); } .nme-right-panel #propertyTab .checkBoxLine .checkBox .cbx:checked ~ label.disabled { background: rgb(22, 73, 117); cursor: pointer; } .nme-right-panel #propertyTab .checkBoxLine .checkBox .cbx:checked ~ label.disabled:after { left: 20px; background: rgb(85, 85, 85); cursor: pointer; } .nme-right-panel #propertyTab .checkBoxLine .checkBox .cbx ~ label.disabled { background: rgb(85, 85, 85); cursor: pointer; } .nme-right-panel #propertyTab .checkBoxLine .checkBox .hidden { display: none; } .nme-right-panel #propertyTab .listLine { padding-left: 5px; height: 30px; display: grid; grid-template-columns: 1fr auto; } .nme-right-panel #propertyTab .listLine .label { grid-column: 1; display: flex; align-items: center; } .nme-right-panel #propertyTab .listLine .options { grid-column: 2; display: flex; align-items: center; margin-right: 5px; } .nme-right-panel #propertyTab .listLine .options select { width: 115px; } .nme-right-panel #propertyTab .color3Line { padding-left: 5px; display: grid; } .nme-right-panel #propertyTab .color3Line .firstLine { height: 30px; display: grid; grid-template-columns: 1fr auto 0px 20px 20px; } .nme-right-panel #propertyTab .color3Line .firstLine .label { grid-column: 1; display: flex; align-items: center; } .nme-right-panel #propertyTab .color3Line .firstLine .textInputLine { display: none; } .nme-right-panel #propertyTab .color3Line .firstLine .color3 { grid-column: 2; width: 50px; display: flex; align-items: center; } .nme-right-panel #propertyTab .color3Line .firstLine .color3 input { margin-right: 5px; } .nme-right-panel #propertyTab .color3Line .firstLine .copy { grid-column: 4; display: grid; align-items: center; justify-items: center; cursor: pointer; } .nme-right-panel #propertyTab .color3Line .firstLine .copy img { height: 100%; width: 24px; } .nme-right-panel #propertyTab .color3Line .firstLine .expand { grid-column: 5; display: grid; align-items: center; justify-items: center; cursor: pointer; } .nme-right-panel #propertyTab .color3Line .firstLine .expand img { height: 100%; width: 20px; } .nme-right-panel #propertyTab .color3Line .secondLine { display: grid; padding-right: 5px; border-left: 1px solid rgb(51, 122, 183); } .nme-right-panel #propertyTab .color3Line .secondLine .numeric { display: grid; grid-template-columns: 1fr auto; } .nme-right-panel #propertyTab .color3Line .secondLine .numeric-label { text-align: right; grid-column: 1; display: flex; align-items: center; justify-self: right; margin-right: 10px; } .nme-right-panel #propertyTab .color3Line .secondLine .numeric-value { width: 120px; grid-column: 2; display: flex; align-items: center; border: 1px solid rgb(51, 122, 183); } .nme-right-panel #propertyTab .textLine { padding-left: 5px; height: 30px; display: grid; grid-template-columns: 1fr auto; } .nme-right-panel #propertyTab .textLine .label { grid-column: 1; display: flex; align-items: center; } .nme-right-panel #propertyTab .textLine .link-value { grid-column: 2; white-space: nowrap; text-overflow: ellipsis; overflow: hidden; text-align: end; opacity: 0.8; margin: 5px; margin-top: 6px; max-width: 140px; text-decoration: underline; cursor: pointer; } .nme-right-panel #propertyTab .textLine .value { grid-column: 2; white-space: nowrap; text-overflow: ellipsis; overflow: hidden; text-align: end; opacity: 0.8; margin: 5px; margin-top: 6px; max-width: 200px; -webkit-user-select: text; -moz-user-select: text; -ms-user-select: text; user-select: text; } .nme-right-panel #propertyTab .textLine .value.check { color: green; } .nme-right-panel #propertyTab .textLine .value.uncheck { color: red; }`, "", { version: 3, sources: ["webpack://./../../../tools/nodeEditor/dist/components/propertyTab/propertyTab.scss"], names: [], mappings: "AACI;EAEI,YAAA;EACA,mBAAA;AADR;AAGQ;EACI,YAAA;EACA,eAAA;EACA,YAAA;EACA,mBAAA;EACA,WAAA;EACA,kBAAA;EACA,aAAA;EACA,+BAAA;EACA,yBAAA;EACA,sBAAA;EACA,qBAAA;EACA,iBAAA;AADZ;AAGY;EACI,kBAAA;EACA,cAAA;EACA,WAAA;EACA,YAAA;EACA,OAAA;EACA,aAAA;EACA,kBAAA;EACA,oBAAA;AADhB;AAIY;EACI,cAAA;EACA,aAAA;EACA,mBAAA;EACA,kBAAA;AAFhB;AAMQ;EACI,wBAAA;EACA,YAAA;EACA,WAAA;EACA,mBAAA;EACA,kBAAA;EACA,aAAA;EACA,YAAA;EACA,wBAAA;EACA,wBAAA;AAJZ;AAOQ;EACI,UAAA;AALZ;AAQQ;EACI,wBAAA;EACA,gBAAA;EACA,WAAA;EACA,YAAA;EACA,kBAAA;EACA,6BAAA;EACA,eAAA;AANZ;AASQ;EACI,WAAA;EACA,YAAA;EACA,kBAAA;EACA,6BAAA;EACA,eAAA;AAPZ;AAUQ;EACI,wBAAA;EACA,0CAAA;EACA,UAAA;EACA,WAAA;EACA,YAAA;AARZ;AAUQ;EACI,UAAA;AARZ;AAUQ;EACI,YAAA;AARZ;AAWQ;EACI,iBAtFgB;EAuFhB,YAAA;EACA,aAAA;EACA,wBAAA;EACA,oCAAA;AATZ;AAWY;EACI,cAAA;EACA,aAAA;EACA,mBAAA;AAThB;AAYY;EACI,cAAA;EACA,WAAA;EACA,iBAAA;EACA,UAAA;EACA,aAAA;EACA,mBAAA;AAVhB;AAaY;EACI,cAAA;EACA,iBA7GY;AAkG5B;AAagB;EACI,cAAA;EACA,aAAA;EACA,mBAAA;AAXpB;AAcgB;EACI,cAAA;EACA,aAAA;EACA,mBAAA;AAZpB;AAcoB;EACI,WAAA;AAZxB;AAeoB;;EAEI,wBAAA;EACA,SAAA;AAbxB;AAgBoB;EACI,0BAAA;AAdxB;AAoBQ;EACI,iBA5IgB;EA6IhB,YAAA;EACA,aAAA;EACA,qCAAA;AAlBZ;AAoBY;EACI,cAAA;EACA,aAAA;EACA,mBAAA;AAlBhB;AAqBY;EACI,aAAA;EACA,mBAAA;EACA,cAAA;AAnBhB;AAqBgB;EACI,uBAAA;EACA,iBAAA;AAnBpB;AAwBQ;EACI,iBApKgB;EAqKhB,YAAA;EACA,aAAA;EACA,gCAAA;AAtBZ;AAwBY;EACI,cAAA;EACA,aAAA;EACA,mBAAA;AAtBhB;AAyBY;EACI,iBAAA;EACA,iBAAA;EACA,YAAA;EACA,YAAA;AAvBhB;AA0BY;EACI,aAAA;EACA,mBAAA;EACA,cAAA;AAxBhB;AA4BQ;EACI,eAAA;EACA,aAAA;EACA,wBAAA;EACA,2BAAA;AA1BZ;AA4BY;EACI,kCAAA;AA1BhB;AA8BgB;EACI,8CAAA;AA5BpB;AAiCwB;EACI,wCAAA;AA/B5B;AAqCY;EACI,WAAA;EACA,cAAA;EACA,UAAA;EACA,qBAAA;EACA,yBAAA;EACA,oBAAA;AAnChB;AAqCgB;EACI,UAAA;AAnCpB;AAuCY;EACI,WAAA;EACA,cAAA;AArChB;AAuCgB;EACI,aAAA;EACA,+BAAA;EACA,mBAAA;EACA,YAAA;EACA,kBAAA;EACA,eAAA;AArCpB;AAuCoB;EACI,kCAAA;EACA,iBAAA;EACA,cAAA;EACA,aAAA;EACA,mBAAA;AArCxB;AAwCoB;EACI,cAAA;EACA,aAAA;EACA,mBAAA;EACA,qBAAA;EACA,wBAAA;AAtCxB;AAwCwB;EACI,yBAAA;AAtC5B;AA2CgB;EACI,mDAAA;AAzCpB;AA4CgB;EACI,mDAAA;AA1CpB;AA+CQ;EACI,wBAAA;EACA,WAAA;EACA,uBAAA;AA7CZ;AA+CY;EACI,wBAAA;EACA,uBAAA;EACA,eAAA;EACA,gBAAA;AA7ChB;AAgDY;EACI,eAAA;EACA,QAAA;EACA,UAAA;EACA,WAAA;EACA,SAAA;EACA,UAAA;AA9ChB;AAiDY;EACI,UAAA;EACA,kBAAA;AA/ChB;AAmDQ;EACI,aAAA;EACA,wBAAA;EACA,oDAAA;EACA,gBAAA;EACA,iBAAA;EACA,mBAAA;AAjDZ;AAmDY;EACI,WAAA;EACA,cAAA;AAjDhB;AAoDY;EACI,WAAA;EACA,cAAA;EACA,eAAA;AAlDhB;AAqDY;EACI,gBAAA;EACA,WAAA;EACA,cAAA;EACA,iBAAA;EACA,iBAAA;AAnDhB;AAsDY;EACI,WAAA;EACA,cAAA;EACA,aAAA;EACA,wBAAA;EACA,qBAAA;EACA,kBAAA;EACA,kBAAA;AApDhB;AAsDgB;EACI,UAAA;AApDpB;AAwDY;EACI,WAAA;EACA,cAAA;EACA,aAAA;EACA,qBAAA;EACA,uBAAA;AAtDhB;AAwDgB;EACI,YAAA;EACA,WAAA;AAtDpB;AAwDgB;EACI,eAAA;AAtDpB;AAyDY;EACI,WAAA;EACA,cAAA;EACA,aAAA;EACA,qBAAA;EACA,uBAAA;AAvDhB;AAwDgB;EACI,YAAA;EACA,WAAA;AAtDpB;AAwDgB;EACI,eAAA;AAtDpB;AA2DQ;EACI,iBAjXgB;EAkXhB,YAAA;EACA,aAAA;EACA,gCAAA;AAzDZ;AA2DY;EACI,cAAA;EACA,aAAA;EACA,mBAAA;AAzDhB;AA4DY;EACI,cAAA;EAEA,aAAA;EACA,mBAAA;AA3DhB;AA6DgB;EACI,YAAA;AA3DpB;AA+DY;EACI,cAAA;EAEA,aAAA;EACA,mBAAA;AA9DhB;AAgEgB;EACI,WAAA;AA9DpB;AAiEgB;;EAEI,wBAAA;EACA,SAAA;AA/DpB;AAkEgB;EACI,0BAAA;AAhEpB;AAqEQ;EACI,iBA9ZgB;EA+ZhB,aAAA;AAnEZ;AAqEY;EACI,aAAA;EACA,oCAAA;EACA,YAAA;AAnEhB;AAqEgB;EACI,cAAA;EACA,aAAA;EACA,mBAAA;AAnEpB;AAsEgB;EACI,cAAA;EACA,aAAA;EACA,mBAAA;EACA,iBAAA;EACA,YAAA;AApEpB;AAuEgB;EACI,cAAA;EACA,aAAA;EACA,mBAAA;EACA,qBAAA;EACA,eAAA;AArEpB;AAyEY;EACI,aAAA;EACA,kBAAA;EACA,wCAAA;AAvEhB;AAyEgB;EACI,eAAA;AAvEpB;AA0EgB;EACI,aAAA;EACA,+BAAA;AAxEpB;AA2EgB;EACI,iBAAA;EACA,cAAA;EACA,aAAA;EACA,mBAAA;EACA,mBAAA;EACA,kBAAA;AAzEpB;AA4EgB;EACI,YAAA;EACA,cAAA;EACA,aAAA;EACA,mBAAA;EACA,mCAAA;AA1EpB;AA+EQ;EACI,YAAA;EACA,aAAA;EACA,mBAAA;EACA,sBAAA;EACA,mBAAA;AA7EZ;AA+EY;EACI,YAAA;AA7EhB;AAgFY;EACI,aAAA;AA9EhB;AAiFY;EACI,mBAAA;EACA,mCAAA;EACA,gBAAA;EACA,YAAA;EACA,gBAAA;EACA,gBAAA;EACA,YAAA;EACA,eAAA;EACA,kBAAA;AA/EhB;AAkFY;EACI,UAAA;AAhFhB;AAmFY;EACI,sBAAA;EACA,yBAAA;AAjFhB;AAoFY;EACI,mBAAA;EACA,mCAAA;EACA,yBAAA;EACA,YAAA;EACA,gBAAA;EACA,YAAA;AAlFhB;AAqFY;EACI,UAAA;AAnFhB;AAsFY;EACI,mBAAA;AApFhB;AAuFY;EACI,mCAAA;EACA,YAAA;AArFhB;AAyFQ;EACI,iBA1hBgB;EA2hBhB,YAAA;EACA,aAAA;EACA,+BAAA;AAvFZ;AAyFY;EACI,cAAA;EACA,aAAA;EACA,mBAAA;AAvFhB;AA0FY;EACI,cAAA;EAEA,aAAA;EACA,mBAAA;AAzFhB;AA2FgB;EACI,kBAAA;EACA,cAAA;EACA,YAAA;EACA,WAAA;EACA,iBAAA;EACA,mBAAA;EACA,oBAAA;EACA,eAAA;EACA,yBAAA;AAzFpB;AA4FgB;EACI,kBAAA;EACA,SAAA;EACA,QAAA;EACA,cAAA;EACA,WAAA;EACA,YAAA;EACA,oBAAA;EACA,gBAAA;EACA,2CAAA;EACA,WAAA;EACA,0BAAA;AA1FpB;AA6FgB;EACI,4BAAA;AA3FpB;AA8FgB;EACI,6BAAA;AA5FpB;AA+FgB;EACI,UAAA;EACA,4BAAA;AA7FpB;AAgGgB;EACI,4BAAA;EACA,eAAA;AA9FpB;AAiGgB;EACI,UAAA;EACA,2BAAA;EACA,eAAA;AA/FpB;AAkGgB;EACI,2BAAA;EACA,eAAA;AAhGpB;AAmGgB;EACI,aAAA;AAjGpB;AAsGQ;EACI,iBAzmBgB;EA0mBhB,YAAA;EACA,aAAA;EACA,+BAAA;AApGZ;AAsGY;EACI,cAAA;EACA,aAAA;EACA,mBAAA;AApGhB;AAuGY;EACI,cAAA;EAEA,aAAA;EACA,mBAAA;EACA,iBAAA;AAtGhB;AAwGgB;EACI,YAAA;AAtGpB;AA2GQ;EACI,iBAloBgB;EAmoBhB,aAAA;AAzGZ;AA2GY;EACI,YAAA;EACA,aAAA;EACA,6CAAA;AAzGhB;AA2GgB;EACI,cAAA;EACA,aAAA;EACA,mBAAA;AAzGpB;AA4GgB;EACI,aAAA;AA1GpB;AA6GgB;EACI,cAAA;EACA,WAAA;EAEA,aAAA;EACA,mBAAA;AA5GpB;AA8GoB;EACI,iBAAA;AA5GxB;AAgHgB;EACI,cAAA;EACA,aAAA;EACA,mBAAA;EACA,qBAAA;EACA,eAAA;AA9GpB;AAgHoB;EACI,YAAA;EACA,WAAA;AA9GxB;AAkHgB;EACI,cAAA;EACA,aAAA;EACA,mBAAA;EACA,qBAAA;EACA,eAAA;AAhHpB;AAkHoB;EACI,YAAA;EACA,WAAA;AAhHxB;AAqHY;EACI,aAAA;EACA,kBAAA;EACA,wCAAA;AAnHhB;AAqHgB;EACI,aAAA;EACA,+BAAA;AAnHpB;AAsHgB;EACI,iBAAA;EACA,cAAA;EACA,aAAA;EACA,mBAAA;EACA,mBAAA;EACA,kBAAA;AApHpB;AAuHgB;EACI,YAAA;EACA,cAAA;EACA,aAAA;EACA,mBAAA;EACA,mCAAA;AArHpB;AA0HQ;EACI,iBAztBgB;EA0tBhB,YAAA;EACA,aAAA;EACA,+BAAA;AAxHZ;AA0HY;EACI,cAAA;EACA,aAAA;EACA,mBAAA;AAxHhB;AA2HY;EACI,cAAA;EACA,mBAAA;EACA,uBAAA;EACA,gBAAA;EACA,eAAA;EACA,YAAA;EACA,WAAA;EACA,eAAA;EACA,gBAAA;EACA,0BAAA;EACA,eAAA;AAzHhB;AA4HY;EACI,cAAA;EACA,mBAAA;EACA,uBAAA;EACA,gBAAA;EACA,eAAA;EACA,YAAA;EACA,WAAA;EACA,eAAA;EACA,gBAAA;EACA,yBAAA;EACA,sBAAA;EACA,qBAAA;EACA,iBAAA;AA1HhB;AA4HgB;EACI,YAAA;AA1HpB;AA6HgB;EACI,UAAA;AA3HpB", sourcesContent: [`.nme-right-panel {\r #propertyTab {\r $line-padding-left: 5px;\r color: white;\r background: #333333;\r \r #header {\r height: 30px;\r font-size: 16px;\r color: white;\r background: #222222;\r grid-row: 1;\r text-align: center;\r display: grid;\r grid-template-columns: 30px 1fr;\r -webkit-user-select: none;\r -moz-user-select: none;\r -ms-user-select: none;\r user-select: none;\r \r #logo {\r position: relative;\r grid-column: 1;\r width: 24px;\r height: 24px;\r left: 0;\r display: flex;\r align-self: center;\r justify-self: center;\r }\r \r #title {\r grid-column: 2;\r display: grid;\r align-items: center;\r text-align: center;\r }\r }\r \r .range {\r -webkit-appearance: none;\r width: 120px;\r height: 6px;\r background: #d3d3d3;\r border-radius: 5px;\r outline: none;\r opacity: 0.7;\r -webkit-transition: 0.2s;\r transition: opacity 0.2s;\r }\r \r .range:hover {\r opacity: 1;\r }\r \r .range::-webkit-slider-thumb {\r -webkit-appearance: none;\r appearance: none;\r width: 14px;\r height: 14px;\r border-radius: 50%;\r background: rgb(51, 122, 183);\r cursor: pointer;\r }\r \r .range::-moz-range-thumb {\r width: 14px;\r height: 14px;\r border-radius: 50%;\r background: rgb(51, 122, 183);\r cursor: pointer;\r }\r \r input[type="color"] {\r -webkit-appearance: none;\r border: 1px solid rgba(255, 255, 255, 0.5);\r padding: 0;\r width: 30px;\r height: 20px;\r }\r input[type="color"]::-webkit-color-swatch-wrapper {\r padding: 0;\r }\r input[type="color"]::-webkit-color-swatch {\r border: none;\r }\r \r .sliderLine {\r padding-left: $line-padding-left;\r height: 30px;\r display: grid;\r grid-template-rows: 100%;\r grid-template-columns: 1fr 50px auto;\r \r .label {\r grid-column: 1;\r display: flex;\r align-items: center;\r }\r \r .slider {\r grid-column: 3;\r grid-row: 1;\r margin-right: 5px;\r width: 90%;\r display: flex;\r align-items: center;\r }\r \r .floatLine {\r grid-column: 2;\r padding-left: $line-padding-left;\r \r .label {\r grid-column: 1;\r display: flex;\r align-items: center;\r }\r \r .short {\r grid-column: 1;\r display: flex;\r align-items: center;\r \r input {\r width: 35px;\r }\r \r input::-webkit-outer-spin-button,\r input::-webkit-inner-spin-button {\r -webkit-appearance: none;\r margin: 0;\r }\r \r input[type="number"] {\r -moz-appearance: textfield;\r }\r }\r }\r }\r \r .textInputLine {\r padding-left: $line-padding-left;\r height: 30px;\r display: grid;\r grid-template-columns: 1fr 120px auto;\r \r .label {\r grid-column: 1;\r display: flex;\r align-items: center;\r }\r \r .value {\r display: flex;\r align-items: center;\r grid-column: 2;\r \r input {\r width: calc(100% - 5px);\r margin-right: 5px;\r }\r }\r }\r \r .textInputArea {\r padding-left: $line-padding-left;\r height: 50px;\r display: grid;\r grid-template-columns: 1fr 120px;\r \r .label {\r grid-column: 1;\r display: flex;\r align-items: center;\r }\r \r textarea {\r margin-right: 5px;\r margin-left: -50%;\r height: 40px;\r resize: none;\r }\r \r .value {\r display: flex;\r align-items: center;\r grid-column: 2;\r }\r }\r \r .paneContainer {\r margin-top: 3px;\r display: grid;\r grid-template-rows: 100%;\r grid-template-columns: 100%;\r \r .paneList {\r border-left: 3px solid transparent;\r }\r \r &:hover {\r .paneList {\r border-left: 3px solid rgba(51, 122, 183, 0.8);\r }\r \r .paneContainer-content {\r .header {\r .title {\r border-left: 3px solid rgb(51, 122, 183);\r }\r }\r }\r }\r \r .paneContainer-highlight-border {\r grid-row: 1;\r grid-column: 1;\r opacity: 1;\r border: 3px solid red;\r transition: opacity 250ms;\r pointer-events: none;\r \r &.transparent {\r opacity: 0;\r }\r }\r \r .paneContainer-content {\r grid-row: 1;\r grid-column: 1;\r \r .header {\r display: grid;\r grid-template-columns: 1fr auto;\r background: #555555;\r height: 30px;\r padding-right: 5px;\r cursor: pointer;\r \r .title {\r border-left: 3px solid transparent;\r padding-left: 5px;\r grid-column: 1;\r display: flex;\r align-items: center;\r }\r \r .collapse {\r grid-column: 2;\r display: flex;\r align-items: center;\r justify-items: center;\r transform-origin: center;\r \r &.closed {\r transform: rotate(180deg);\r }\r }\r }\r \r .paneList > div:not(:last-child) {\r border-bottom: 0.5px solid rgba(255, 255, 255, 0.1);\r }\r \r .fragment > div:not(:last-child) {\r border-bottom: 0.5px solid rgba(255, 255, 255, 0.1);\r }\r }\r }\r \r .color-picker {\r height: calc(100% - 8px);\r margin: 4px;\r width: calc(100% - 8px);\r \r .color-rect {\r height: calc(100% - 4px);\r border: 2px white solid;\r cursor: pointer;\r min-height: 18px;\r }\r \r .color-picker-cover {\r position: fixed;\r top: 0px;\r right: 0px;\r bottom: 0px;\r left: 0px;\r z-index: 1;\r }\r \r .color-picker-float {\r z-index: 2;\r position: absolute;\r }\r }\r \r .gradient-step {\r display: grid;\r grid-template-rows: 100%;\r grid-template-columns: 20px 30px 40px auto 20px 30px;\r padding-top: 5px;\r padding-left: 5px;\r padding-bottom: 5px;\r \r .step {\r grid-row: 1;\r grid-column: 1;\r }\r \r .color {\r grid-row: 1;\r grid-column: 2;\r cursor: pointer;\r }\r \r .step-value {\r margin-left: 5px;\r grid-row: 1;\r grid-column: 3;\r text-align: right;\r margin-right: 5px;\r }\r \r .step-slider {\r grid-row: 1;\r grid-column: 4;\r display: grid;\r justify-content: stretch;\r align-content: center;\r margin-right: -5px;\r padding-left: 12px;\r \r input {\r width: 90%;\r }\r }\r \r .gradient-copy {\r grid-row: 1;\r grid-column: 5;\r display: grid;\r align-content: center;\r justify-content: center;\r \r .img {\r height: 20px;\r width: 20px;\r }\r .img:hover {\r cursor: pointer;\r }\r }\r .gradient-delete {\r grid-row: 1;\r grid-column: 6;\r display: grid;\r align-content: center;\r justify-content: center;\r .img {\r height: 20px;\r width: 20px;\r }\r .img:hover {\r cursor: pointer;\r }\r }\r }\r \r .floatLine {\r padding-left: $line-padding-left;\r height: 30px;\r display: grid;\r grid-template-columns: 1fr 120px;\r \r .label {\r grid-column: 1;\r display: flex;\r align-items: center;\r }\r \r .value {\r grid-column: 2;\r \r display: flex;\r align-items: center;\r \r input {\r width: 110px;\r }\r }\r \r .short {\r grid-column: 2;\r \r display: flex;\r align-items: center;\r \r input {\r width: 27px;\r }\r \r input::-webkit-outer-spin-button,\r input::-webkit-inner-spin-button {\r -webkit-appearance: none;\r margin: 0;\r }\r \r input[type="number"] {\r -moz-appearance: textfield;\r }\r }\r }\r \r .vector3Line {\r padding-left: $line-padding-left;\r display: grid;\r \r .firstLine {\r display: grid;\r grid-template-columns: 1fr auto 20px;\r height: 30px;\r \r .label {\r grid-column: 1;\r display: flex;\r align-items: center;\r }\r \r .vector {\r grid-column: 2;\r display: flex;\r align-items: center;\r text-align: right;\r opacity: 0.8;\r }\r \r .expand {\r grid-column: 3;\r display: grid;\r align-items: center;\r justify-items: center;\r cursor: pointer;\r }\r }\r \r .secondLine {\r display: grid;\r padding-right: 5px;\r border-left: 1px solid rgb(51, 122, 183);\r \r .no-right-margin {\r margin-right: 0;\r }\r \r .numeric {\r display: grid;\r grid-template-columns: 1fr auto;\r }\r \r .numeric-label {\r text-align: right;\r grid-column: 1;\r display: flex;\r align-items: center;\r justify-self: right;\r margin-right: 10px;\r }\r \r .numeric-value {\r width: 120px;\r grid-column: 2;\r display: flex;\r align-items: center;\r border: 1px solid rgb(51, 122, 183);\r }\r }\r }\r \r .buttonLine {\r height: 30px;\r display: grid;\r align-items: center;\r justify-items: stretch;\r padding-bottom: 5px;\r \r &.disabled {\r opacity: 0.3;\r }\r \r input[type="file"] {\r display: none;\r }\r \r .file-upload {\r background: #222222;\r border: 1px solid rgb(51, 122, 183);\r margin: 5px 10px;\r color: white;\r padding: 4px 5px;\r padding-top: 0px;\r opacity: 0.9;\r cursor: pointer;\r text-align: center;\r }\r \r .file-upload:hover {\r opacity: 1;\r }\r \r .file-upload:active {\r transform: scale(0.98);\r transform-origin: 0.5 0.5;\r }\r \r button {\r background: #222222;\r border: 1px solid rgb(51, 122, 183);\r margin: 5px 10px 5px 10px;\r color: white;\r padding: 4px 5px;\r opacity: 0.9;\r }\r \r button:hover {\r opacity: 1;\r }\r \r button:active {\r background: #282828;\r }\r \r button:focus {\r border: 1px solid rgb(51, 122, 183);\r outline: 0px;\r }\r }\r \r .checkBoxLine {\r padding-left: $line-padding-left;\r height: 30px;\r display: grid;\r grid-template-columns: 1fr auto;\r \r .label {\r grid-column: 1;\r display: flex;\r align-items: center;\r }\r \r .checkBox {\r grid-column: 2;\r \r display: flex;\r align-items: center;\r \r .lbl {\r position: relative;\r display: block;\r height: 14px;\r width: 34px;\r margin-right: 5px;\r background: #898989;\r border-radius: 100px;\r cursor: pointer;\r transition: all 0.3s ease;\r }\r \r .lbl:after {\r position: absolute;\r left: 3px;\r top: 2px;\r display: block;\r width: 10px;\r height: 10px;\r border-radius: 100px;\r background: #fff;\r box-shadow: 0px 3px 3px rgba(0, 0, 0, 0.05);\r content: "";\r transition: all 0.15s ease;\r }\r \r .lbl:active:after {\r transform: scale(1.15, 0.85);\r }\r \r .cbx:checked ~ label {\r background: rgb(51, 122, 183);\r }\r \r .cbx:checked ~ label:after {\r left: 20px;\r background: rgb(22, 73, 117);\r }\r \r .cbx:checked ~ label.disabled {\r background: rgb(22, 73, 117);\r cursor: pointer;\r }\r \r .cbx:checked ~ label.disabled:after {\r left: 20px;\r background: rgb(85, 85, 85);\r cursor: pointer;\r }\r \r .cbx ~ label.disabled {\r background: rgb(85, 85, 85);\r cursor: pointer;\r }\r \r .hidden {\r display: none;\r }\r }\r }\r \r .listLine {\r padding-left: $line-padding-left;\r height: 30px;\r display: grid;\r grid-template-columns: 1fr auto;\r \r .label {\r grid-column: 1;\r display: flex;\r align-items: center;\r }\r \r .options {\r grid-column: 2;\r \r display: flex;\r align-items: center;\r margin-right: 5px;\r \r select {\r width: 115px;\r }\r }\r }\r \r .color3Line {\r padding-left: $line-padding-left;\r display: grid;\r \r .firstLine {\r height: 30px;\r display: grid;\r grid-template-columns: 1fr auto 0px 20px 20px;\r \r .label {\r grid-column: 1;\r display: flex;\r align-items: center;\r }\r \r .textInputLine {\r display: none;\r }\r \r .color3 {\r grid-column: 2;\r width: 50px;\r \r display: flex;\r align-items: center;\r \r input {\r margin-right: 5px;\r }\r }\r \r .copy {\r grid-column: 4;\r display: grid;\r align-items: center;\r justify-items: center;\r cursor: pointer;\r \r img {\r height: 100%;\r width: 24px;\r }\r }\r \r .expand {\r grid-column: 5;\r display: grid;\r align-items: center;\r justify-items: center;\r cursor: pointer;\r \r img {\r height: 100%;\r width: 20px;\r }\r }\r }\r \r .secondLine {\r display: grid;\r padding-right: 5px;\r border-left: 1px solid rgb(51, 122, 183);\r \r .numeric {\r display: grid;\r grid-template-columns: 1fr auto;\r }\r \r .numeric-label {\r text-align: right;\r grid-column: 1;\r display: flex;\r align-items: center;\r justify-self: right;\r margin-right: 10px;\r }\r \r .numeric-value {\r width: 120px;\r grid-column: 2;\r display: flex;\r align-items: center;\r border: 1px solid rgb(51, 122, 183);\r }\r }\r }\r \r .textLine {\r padding-left: $line-padding-left;\r height: 30px;\r display: grid;\r grid-template-columns: 1fr auto;\r \r .label {\r grid-column: 1;\r display: flex;\r align-items: center;\r }\r \r .link-value {\r grid-column: 2;\r white-space: nowrap;\r text-overflow: ellipsis;\r overflow: hidden;\r text-align: end;\r opacity: 0.8;\r margin: 5px;\r margin-top: 6px;\r max-width: 140px;\r text-decoration: underline;\r cursor: pointer;\r }\r \r .value {\r grid-column: 2;\r white-space: nowrap;\r text-overflow: ellipsis;\r overflow: hidden;\r text-align: end;\r opacity: 0.8;\r margin: 5px;\r margin-top: 6px;\r max-width: 200px;\r -webkit-user-select: text;\r -moz-user-select: text;\r -ms-user-select: text;\r user-select: text;\r \r &.check {\r color: green;\r }\r \r &.uncheck {\r color: red;\r }\r }\r }\r }\r }\r `], sourceRoot: "" }]), p.locals = {}; const m = p; } ), /***/ "../../../../node_modules/css-loader/dist/cjs.js??ruleSet[1].rules[3].use[1]!../../../../node_modules/sass-loader/dist/cjs.js??ruleSet[1].rules[3].use[2]!../../../tools/nodeEditor/dist/main.scss": ( /*!*********************************************************************************************************************************************************************************************************!*\ !*** ../../../../node_modules/css-loader/dist/cjs.js??ruleSet[1].rules[3].use[1]!../../../../node_modules/sass-loader/dist/cjs.js??ruleSet[1].rules[3].use[2]!../../../tools/nodeEditor/dist/main.scss ***! \*********************************************************************************************************************************************************************************************************/ /***/ (a, l, o) => { o.r(l), o.d(l, { /* harmony export */ default: () => m /* harmony export */ }); var u = o( /*! ../../../../node_modules/css-loader/dist/runtime/sourceMaps.js */ "../../../../node_modules/css-loader/dist/runtime/sourceMaps.js" ), h = /* @__PURE__ */ o.n(u), d = o( /*! ../../../../node_modules/css-loader/dist/runtime/api.js */ "../../../../node_modules/css-loader/dist/runtime/api.js" ), f = /* @__PURE__ */ o.n(d), p = f()(h()); p.push([a.id, `#node-editor-graph-root { display: grid; grid-template-rows: calc(100% - 120px) 120px; height: 100%; width: 100%; background: #464646; font: 14px "acumin-pro"; } #node-editor-graph-root.popup { grid-template-columns: 100%; overflow: hidden; } #node-editor-graph-root .wait-screen { display: grid; justify-content: center; align-content: center; height: 100%; width: 100%; background: #464646; opacity: 0.95; color: white; font: 24px "acumin-pro"; position: absolute; top: 0; left: 0; } #node-editor-graph-root .wait-screen.hidden { visibility: hidden; } #node-editor-graph-root #nmeNodeList { grid-row: 1/span 2; grid-column: 1; } #node-editor-graph-root #leftGrab { grid-row: 1/span 2; grid-column: 2; cursor: ew-resize; } #node-editor-graph-root #rightGrab { grid-row: 1/span 2; grid-column: 4; cursor: ew-resize; } #node-editor-graph-root .diagram-container { grid-row: 1; grid-column: 3; background: #5f5b60; width: 100%; height: 100%; } #node-editor-graph-root .diagram-container .diagram { display: none; width: 100%; height: 100%; } #node-editor-graph-root .nme-right-panel { grid-row: 1/span 2; grid-column: 5; display: grid; grid-template-rows: 1fr 40px auto 40px; grid-template-columns: 100%; height: 100%; overflow-y: auto; } #node-editor-graph-root #propertyTab { grid-row: 1; grid-column: 1; } #node-editor-graph-root .button { display: grid; justify-content: center; align-content: center; height: auto; width: 14.2857142857%; cursor: pointer; } #node-editor-graph-root .button:hover { background: rgb(51, 122, 183); color: white; opacity: 0.8; } #node-editor-graph-root .button.selected { background: rgb(51, 122, 183); color: white; } #node-editor-graph-root .button.align { justify-content: stretch; text-align: center; } #node-editor-graph-root #preview-mesh-bar { grid-row: 2; grid-column: 1; display: grid; grid-template-columns: auto 1fr 40px 40px 40px; align-items: center; font-size: 18px; background-color: #555555; } #node-editor-graph-root #preview-mesh-bar #file-picker { display: none; } #node-editor-graph-root #preview-mesh-bar .listLine { grid-column: 1; height: 40px; display: grid; grid-template-columns: 0px 1fr; } #node-editor-graph-root #preview-mesh-bar .listLine .label { grid-column: 1; display: flex; align-items: center; font-size: 14px; } #node-editor-graph-root #preview-mesh-bar .listLine .options { grid-column: 2; display: flex; align-items: center; margin-left: 5px; } #node-editor-graph-root #preview-mesh-bar .listLine .options select { width: 115px; } #node-editor-graph-root #preview-mesh-bar .button { color: #ffffff; width: 40px; height: 40px; transform-origin: 50% 50%; } #node-editor-graph-root #preview-mesh-bar .button:active { transform: scale(0.9); } #node-editor-graph-root #preview-mesh-bar .button:hover { background: #3f3461; } #node-editor-graph-root #preview-mesh-bar .button.selected { background: #9379e6; } #node-editor-graph-root #preview-mesh-bar .button img { height: 24px; width: 24px; } #node-editor-graph-root #preview-mesh-bar #play-button { grid-column: 3; } #node-editor-graph-root #preview-mesh-bar #color-picker-button { grid-column: 4; display: grid; grid-template-columns: 100%; grid-template-rows: 100%; } #node-editor-graph-root #preview-mesh-bar #color-picker-button img { height: 24px; width: 24px; } #node-editor-graph-root #preview-mesh-bar #color-picker-button #color-picker-image { padding-left: 7px; padding-top: 8px; } #node-editor-graph-root #preview-mesh-bar #color-picker-button #color-picker { transform: scale(0); grid-column: 1; grid-row: 1; } #node-editor-graph-root #preview-mesh-bar #color-picker-button #color-picker-label { width: 100%; background: transparent; cursor: pointer; } #node-editor-graph-root #preview-mesh-bar #preview-new-window { grid-column: 5; } #node-editor-graph-root #preview-mesh-bar select { background-color: #a3a3a3; color: #333333; } #node-editor-graph-root #preview-config-bar { grid-row: 4; grid-column: 1; display: grid; grid-template-columns: 40px 40px 40px 1fr 40px 40px; color: white; align-items: center; font-size: 18px; } #node-editor-graph-root #preview-config-bar.extended { grid-template-columns: 1fr; } #node-editor-graph-root #preview-config-bar .listLine { padding-left: 5px; height: 30px; display: grid; grid-template-columns: 1fr auto; } #node-editor-graph-root #preview-config-bar .listLine .label { grid-column: 1; display: flex; align-items: center; font-size: 14px; } #node-editor-graph-root #preview-config-bar .listLine .options { grid-column: 2; display: flex; align-items: center; margin-right: 5px; } #node-editor-graph-root #preview-config-bar .listLine .options select { width: 115px; } #node-editor-graph-root #preview-config-bar .button { width: 40px; grid-row: 1; height: 40px; transform-origin: 50% 50%; } #node-editor-graph-root #preview-config-bar .button:hover { background: #3f3461; } #node-editor-graph-root #preview-config-bar .button.selected { background: #9379e6; } #node-editor-graph-root #preview-config-bar .button:active { transform: scale(0.9); } #node-editor-graph-root #preview-config-bar .button img { height: auto; width: 100%; } #node-editor-graph-root #preview-config-bar .button.back-face { grid-column: 6; } #node-editor-graph-root #preview-config-bar .button.depth-pass { grid-column: 5/6; } #node-editor-graph-root #preview-config-bar .button.hemispheric-light { grid-column: 3/4; } #node-editor-graph-root #preview-config-bar .button.direction-light-1 { grid-column: 2/3; } #node-editor-graph-root #preview-config-bar .button.direction-light-0 { grid-column: 1/2; } #node-editor-graph-root #preview { border-top: 1px solid rgb(85, 85, 85); grid-row: 3; grid-column: 1; width: 100%; display: grid; grid-template-columns: 100%; grid-template-rows: 100%; outline: 0 !important; padding: 0 !important; user-select: none; overflow: hidden; } #node-editor-graph-root #preview #preview-canvas { width: 100%; height: 100%; outline: 0 !important; padding: 0 !important; grid-row: 1; grid-column: 1; } #node-editor-graph-root #preview .waitPanel { width: 100%; height: 100%; grid-row: 1; grid-column: 1; color: white; font-size: 18px; align-content: center; justify-content: center; background: rgba(20, 20, 20, 0.95); z-index: 10; display: grid; transition: opacity 250ms; } #node-editor-graph-root #preview .waitPanel.hidden { opacity: 0; pointer-events: none; } #node-editor-graph-root .blocker { visibility: hidden; position: absolute; width: calc(100% - 40px); height: 100%; top: 0; left: 0; background: rgba(20, 20, 20, 0.95); font-family: "acumin-pro"; color: white; font-size: 24px; display: grid; align-content: center; justify-content: center; user-select: none; padding: 20px; text-align: center; } #node-editor-graph-root #log-console { grid-row: 2; grid-column: 3; } #node-editor-graph-root .LightInformationBlock { width: 280px; } #node-editor-graph-root .InputBlock { width: 250px; }`, "", { version: 3, sources: ["webpack://./../../../tools/nodeEditor/dist/main.scss"], names: [], mappings: "AAAA;EACI,aAAA;EACA,4CAAA;EACA,YAAA;EACA,WAAA;EACA,mBAAA;EACA,uBAAA;AACJ;AACI;EACI,2BAAA;EACA,gBAAA;AACR;AAEI;EACI,aAAA;EACA,uBAAA;EACA,qBAAA;EACA,YAAA;EACA,WAAA;EACA,mBAAA;EACA,aAAA;EACA,YAAA;EACA,uBAAA;EACA,kBAAA;EACA,MAAA;EACA,OAAA;AAAR;AAEQ;EACI,kBAAA;AAAZ;AAII;EACI,kBAAA;EACA,cAAA;AAFR;AAKI;EACI,kBAAA;EACA,cAAA;EACA,iBAAA;AAHR;AAMI;EACI,kBAAA;EACA,cAAA;EACA,iBAAA;AAJR;AAOI;EACI,WAAA;EACA,cAAA;EACA,mBAAA;EACA,WAAA;EACA,YAAA;AALR;AAOQ;EACI,aAAA;EACA,WAAA;EACA,YAAA;AALZ;AASI;EACI,kBAAA;EACA,cAAA;EACA,aAAA;EACA,sCAAA;EACA,2BAAA;EACA,YAAA;EACA,gBAAA;AAPR;AAUI;EACI,WAAA;EACA,cAAA;AARR;AAWI;EACI,aAAA;EACA,uBAAA;EACA,qBAAA;EACA,YAAA;EACA,qBAAA;EACA,eAAA;AATR;AAWQ;EACI,6BAAA;EACA,YAAA;EACA,YAAA;AATZ;AAYQ;EACI,6BAAA;EACA,YAAA;AAVZ;AAaQ;EACI,wBAAA;EACA,kBAAA;AAXZ;AAeI;EACI,WAAA;EACA,cAAA;EACA,aAAA;EACA,8CAAA;EACA,mBAAA;EACA,eAAA;EACA,yBAAA;AAbR;AAeQ;EACI,aAAA;AAbZ;AAgBQ;EACI,cAAA;EACA,YAAA;EACA,aAAA;EACA,8BAAA;AAdZ;AAgBY;EACI,cAAA;EACA,aAAA;EACA,mBAAA;EACA,eAAA;AAdhB;AAiBY;EACI,cAAA;EAEA,aAAA;EACA,mBAAA;EACA,gBAAA;AAhBhB;AAkBgB;EACI,YAAA;AAhBpB;AAqBQ;EACI,cAAA;EACA,WAAA;EACA,YAAA;EACA,yBAAA;AAnBZ;AAqBY;EACI,qBAAA;AAnBhB;AAsBY;EACI,mBAAA;AApBhB;AAuBY;EACI,mBAAA;AArBhB;AAwBY;EACI,YAAA;EACA,WAAA;AAtBhB;AA0BQ;EACI,cAAA;AAxBZ;AA2BQ;EACI,cAAA;EACA,aAAA;EACA,2BAAA;EACA,wBAAA;AAzBZ;AA2BY;EACI,YAAA;EACA,WAAA;AAzBhB;AA2BY;EACI,iBAAA;EACA,gBAAA;AAzBhB;AA4BY;EACI,mBAAA;EACA,cAAA;EACA,WAAA;AA1BhB;AA6BY;EACI,WAAA;EACA,uBAAA;EACA,eAAA;AA3BhB;AA+BQ;EACI,cAAA;AA7BZ;AAgCQ;EACI,yBAAA;EACA,cAAA;AA9BZ;AAkCI;EACI,WAAA;EACA,cAAA;EACA,aAAA;EACA,mDAAA;EACA,YAAA;EACA,mBAAA;EACA,eAAA;AAhCR;AAkCQ;EACI,0BAAA;AAhCZ;AAmCQ;EACI,iBAAA;EACA,YAAA;EACA,aAAA;EACA,+BAAA;AAjCZ;AAmCY;EACI,cAAA;EACA,aAAA;EACA,mBAAA;EACA,eAAA;AAjChB;AAoCY;EACI,cAAA;EAEA,aAAA;EACA,mBAAA;EACA,iBAAA;AAnChB;AAqCgB;EACI,YAAA;AAnCpB;AAwCQ;EACI,WAAA;EACA,WAAA;EACA,YAAA;EACA,yBAAA;AAtCZ;AAwCY;EACI,mBAAA;AAtChB;AAyCY;EACI,mBAAA;AAvChB;AA0CY;EACI,qBAAA;AAxChB;AA2CY;EACI,YAAA;EACA,WAAA;AAzChB;AA4CY;EACI,cAAA;AA1ChB;AA6CY;EACI,gBAAA;AA3ChB;AA8CY;EACI,gBAAA;AA5ChB;AA8CY;EACI,gBAAA;AA5ChB;AA8CY;EACI,gBAAA;AA5ChB;AAiDI;EACI,qCAAA;EACA,WAAA;EACA,cAAA;EACA,WAAA;EACA,aAAA;EACA,2BAAA;EACA,wBAAA;EACA,qBAAA;EACA,qBAAA;EACA,iBAAA;EACA,gBAAA;AA/CR;AAiDQ;EACI,WAAA;EACA,YAAA;EACA,qBAAA;EACA,qBAAA;EACA,WAAA;EACA,cAAA;AA/CZ;AAkDQ;EACI,WAAA;EACA,YAAA;EACA,WAAA;EACA,cAAA;EACA,YAAA;EACA,eAAA;EACA,qBAAA;EACA,uBAAA;EACA,kCAAA;EACA,WAAA;EACA,aAAA;EACA,yBAAA;AAhDZ;AAkDY;EACI,UAAA;EACA,oBAAA;AAhDhB;AAqDI;EACI,kBAAA;EACA,kBAAA;EACA,wBAAA;EACA,YAAA;EACA,MAAA;EACA,OAAA;EAEA,kCAAA;EACA,yBAAA;EACA,YAAA;EACA,eAAA;EAEA,aAAA;EACA,qBAAA;EACA,uBAAA;EAEA,iBAAA;EAEA,aAAA;EACA,kBAAA;AAvDR;AA0DI;EACI,WAAA;EACA,cAAA;AAxDR;AA2DI;EACI,YAAA;AAzDR;AA4DI;EACI,YAAA;AA1DR", sourcesContent: [`#node-editor-graph-root {\r display: grid;\r grid-template-rows: calc(100% - 120px) 120px;\r height: 100%;\r width: 100%;\r background: #464646;\r font: 14px "acumin-pro";\r \r &.popup {\r grid-template-columns: 100%;\r overflow: hidden;\r }\r \r .wait-screen {\r display: grid;\r justify-content: center;\r align-content: center;\r height: 100%;\r width: 100%;\r background: #464646;\r opacity: 0.95;\r color: white;\r font: 24px "acumin-pro";\r position: absolute;\r top: 0;\r left: 0;\r \r &.hidden {\r visibility: hidden;\r }\r }\r \r #nmeNodeList {\r grid-row: 1 / span 2;\r grid-column: 1;\r }\r \r #leftGrab {\r grid-row: 1 / span 2;\r grid-column: 2;\r cursor: ew-resize;\r }\r \r #rightGrab {\r grid-row: 1 / span 2;\r grid-column: 4;\r cursor: ew-resize;\r }\r \r .diagram-container {\r grid-row: 1;\r grid-column: 3;\r background: #5f5b60;\r width: 100%;\r height: 100%;\r \r .diagram {\r display: none;\r width: 100%;\r height: 100%;\r }\r }\r \r .nme-right-panel {\r grid-row: 1 / span 2;\r grid-column: 5;\r display: grid;\r grid-template-rows: 1fr 40px auto 40px;\r grid-template-columns: 100%;\r height: 100%;\r overflow-y: auto;\r }\r \r #propertyTab {\r grid-row: 1;\r grid-column: 1;\r }\r \r .button {\r display: grid;\r justify-content: center;\r align-content: center;\r height: auto;\r width: calc(100% / 7);\r cursor: pointer;\r \r &:hover {\r background: rgb(51, 122, 183);\r color: white;\r opacity: 0.8;\r }\r \r &.selected {\r background: rgb(51, 122, 183);\r color: white;\r }\r \r &.align {\r justify-content: stretch;\r text-align: center;\r }\r }\r \r #preview-mesh-bar {\r grid-row: 2;\r grid-column: 1;\r display: grid;\r grid-template-columns: auto 1fr 40px 40px 40px;\r align-items: center;\r font-size: 18px;\r background-color: #555555;\r \r #file-picker {\r display: none;\r }\r \r .listLine {\r grid-column: 1;\r height: 40px;\r display: grid;\r grid-template-columns: 0px 1fr;\r \r .label {\r grid-column: 1;\r display: flex;\r align-items: center;\r font-size: 14px;\r }\r \r .options {\r grid-column: 2;\r \r display: flex;\r align-items: center;\r margin-left: 5px;\r \r select {\r width: 115px;\r }\r }\r }\r \r .button {\r color: #ffffff;\r width: 40px;\r height: 40px;\r transform-origin: 50% 50%;\r \r &:active {\r transform: scale(0.9);\r }\r \r &:hover {\r background: #3f3461;\r }\r \r &.selected {\r background: #9379e6;\r }\r \r img {\r height: 24px;\r width: 24px;\r }\r }\r \r #play-button {\r grid-column: 3;\r }\r \r #color-picker-button {\r grid-column: 4;\r display: grid;\r grid-template-columns: 100%;\r grid-template-rows: 100%;\r \r img {\r height: 24px;\r width: 24px;\r }\r #color-picker-image {\r padding-left: 7px;\r padding-top: 8px;\r }\r \r #color-picker {\r transform: scale(0);\r grid-column: 1;\r grid-row: 1;\r }\r \r #color-picker-label {\r width: 100%;\r background: transparent;\r cursor: pointer;\r }\r }\r \r #preview-new-window {\r grid-column: 5;\r }\r \r select {\r background-color: #a3a3a3;\r color: #333333;\r }\r }\r \r #preview-config-bar {\r grid-row: 4;\r grid-column: 1;\r display: grid;\r grid-template-columns: 40px 40px 40px 1fr 40px 40px;\r color: white;\r align-items: center;\r font-size: 18px;\r \r &.extended {\r grid-template-columns: 1fr;\r }\r \r .listLine {\r padding-left: 5px;\r height: 30px;\r display: grid;\r grid-template-columns: 1fr auto;\r \r .label {\r grid-column: 1;\r display: flex;\r align-items: center;\r font-size: 14px;\r }\r \r .options {\r grid-column: 2;\r \r display: flex;\r align-items: center;\r margin-right: 5px;\r \r select {\r width: 115px;\r }\r }\r }\r \r .button {\r width: 40px;\r grid-row: 1;\r height: 40px;\r transform-origin: 50% 50%;\r \r &:hover {\r background: #3f3461;\r }\r \r &.selected {\r background: #9379e6;\r }\r \r &:active {\r transform: scale(0.9);\r }\r \r img {\r height: auto;\r width: 100%;\r }\r \r &.back-face {\r grid-column: 6;\r }\r \r &.depth-pass {\r grid-column: 5 / 6;\r }\r \r &.hemispheric-light {\r grid-column: 3 / 4;\r }\r &.direction-light-1 {\r grid-column: 2 / 3;\r }\r &.direction-light-0 {\r grid-column: 1 / 2;\r }\r }\r }\r \r #preview {\r border-top: 1px solid rgb(85, 85, 85);\r grid-row: 3;\r grid-column: 1;\r width: 100%;\r display: grid;\r grid-template-columns: 100%;\r grid-template-rows: 100%;\r outline: 0 !important;\r padding: 0 !important;\r user-select: none;\r overflow: hidden;\r \r #preview-canvas {\r width: 100%;\r height: 100%;\r outline: 0 !important;\r padding: 0 !important;\r grid-row: 1;\r grid-column: 1;\r }\r \r .waitPanel {\r width: 100%;\r height: 100%;\r grid-row: 1;\r grid-column: 1;\r color: white;\r font-size: 18px;\r align-content: center;\r justify-content: center;\r background: rgba(20, 20, 20, 0.95);\r z-index: 10;\r display: grid;\r transition: opacity 250ms;\r \r &.hidden {\r opacity: 0;\r pointer-events: none;\r }\r }\r }\r \r .blocker {\r visibility: hidden;\r position: absolute;\r width: calc(100% - 40px);\r height: 100%;\r top: 0;\r left: 0;\r \r background: rgba(20, 20, 20, 0.95);\r font-family: "acumin-pro";\r color: white;\r font-size: 24px;\r \r display: grid;\r align-content: center;\r justify-content: center;\r \r user-select: none;\r \r padding: 20px;\r text-align: center;\r }\r \r #log-console {\r grid-row: 2;\r grid-column: 3;\r }\r \r .LightInformationBlock {\r width: 280px;\r }\r \r .InputBlock {\r width: 250px;\r }\r }\r `], sourceRoot: "" }]), p.locals = {}; const m = p; } ), /***/ "../../../../node_modules/css-loader/dist/cjs.js??ruleSet[1].rules[4].use[1]!../../../../node_modules/sass-loader/dist/cjs.js??ruleSet[1].rules[4].use[2]!../../../dev/sharedUiComponents/dist/components/MessageDialog.modules.scss": ( /*!*******************************************************************************************************************************************************************************************************************************************!*\ !*** ../../../../node_modules/css-loader/dist/cjs.js??ruleSet[1].rules[4].use[1]!../../../../node_modules/sass-loader/dist/cjs.js??ruleSet[1].rules[4].use[2]!../../../dev/sharedUiComponents/dist/components/MessageDialog.modules.scss ***! \*******************************************************************************************************************************************************************************************************************************************/ /***/ (a, l, o) => { o.r(l), o.d(l, { /* harmony export */ default: () => m /* harmony export */ }); var u = o( /*! ../../../../../node_modules/css-loader/dist/runtime/sourceMaps.js */ "../../../../node_modules/css-loader/dist/runtime/sourceMaps.js" ), h = /* @__PURE__ */ o.n(u), d = o( /*! ../../../../../node_modules/css-loader/dist/runtime/api.js */ "../../../../node_modules/css-loader/dist/runtime/api.js" ), f = /* @__PURE__ */ o.n(d), p = f()(h()); p.push([a.id, `.\\---------dev-sharedUiComponents-dist-components-MessageDialog-modules__dialog-container { position: absolute; width: 100%; height: 100%; background: rgba(0, 0, 0, 0.6); display: grid; font-family: "acumin-pro"; top: 0; } .\\---------dev-sharedUiComponents-dist-components-MessageDialog-modules__dialog-container .\\---------dev-sharedUiComponents-dist-components-MessageDialog-modules__dialog { align-self: center; justify-self: center; min-height: 140px; max-width: 400px; border-radius: 10px; background: white; padding: 10px; display: grid; grid-template-columns: 100%; grid-template-rows: 1fr 50px; } .\\---------dev-sharedUiComponents-dist-components-MessageDialog-modules__dialog-container .\\---------dev-sharedUiComponents-dist-components-MessageDialog-modules__dialog .\\---------dev-sharedUiComponents-dist-components-MessageDialog-modules__dialog-message { grid-row: 1; grid-column: 1; margin-top: 20px; padding: 10px; font-size: 18px; color: black; } .\\---------dev-sharedUiComponents-dist-components-MessageDialog-modules__dialog-container .\\---------dev-sharedUiComponents-dist-components-MessageDialog-modules__dialog .\\---------dev-sharedUiComponents-dist-components-MessageDialog-modules__dialog-buttons { grid-row: 2; grid-column: 1; display: grid; grid-template-rows: 100%; grid-template-columns: 100%; color: white; } .\\---------dev-sharedUiComponents-dist-components-MessageDialog-modules__dialog-container .\\---------dev-sharedUiComponents-dist-components-MessageDialog-modules__dialog .\\---------dev-sharedUiComponents-dist-components-MessageDialog-modules__dialog-buttons .\\---------dev-sharedUiComponents-dist-components-MessageDialog-modules__dialog-button-ok { cursor: pointer; justify-self: center; background: green; min-width: 80px; justify-content: center; display: grid; align-content: center; align-self: center; height: 35px; border-radius: 10px; } .\\---------dev-sharedUiComponents-dist-components-MessageDialog-modules__dialog-container .\\---------dev-sharedUiComponents-dist-components-MessageDialog-modules__dialog .\\---------dev-sharedUiComponents-dist-components-MessageDialog-modules__dialog-buttons .\\---------dev-sharedUiComponents-dist-components-MessageDialog-modules__dialog-button-ok:hover { opacity: 0.8; } .\\---------dev-sharedUiComponents-dist-components-MessageDialog-modules__dialog-container .\\---------dev-sharedUiComponents-dist-components-MessageDialog-modules__dialog .\\---------dev-sharedUiComponents-dist-components-MessageDialog-modules__dialog-buttons .\\---------dev-sharedUiComponents-dist-components-MessageDialog-modules__dialog-button-ok.\\---------dev-sharedUiComponents-dist-components-MessageDialog-modules__error { background: red; }`, "", { version: 3, sources: ["webpack://./../../../dev/sharedUiComponents/dist/components/MessageDialog.modules.scss"], names: [], mappings: "AAAA;EACI,kBAAA;EACA,WAAA;EACA,YAAA;EACA,8BAAA;EACA,aAAA;EACA,yBAAA;EACA,MAAA;AACJ;AACI;EACI,kBAAA;EACA,oBAAA;EACA,iBAAA;EACA,gBAAA;EACA,mBAAA;EACA,iBAAA;EACA,aAAA;EAEA,aAAA;EACA,2BAAA;EACA,4BAAA;AAAR;AAEQ;EACI,WAAA;EACA,cAAA;EACA,gBAAA;EACA,aAAA;EACA,eAAA;EACA,YAAA;AAAZ;AAGQ;EACI,WAAA;EACA,cAAA;EACA,aAAA;EACA,wBAAA;EACA,2BAAA;EACA,YAAA;AADZ;AAGY;EACI,eAAA;EACA,oBAAA;EACA,iBAAA;EACA,eAAA;EACA,uBAAA;EACA,aAAA;EACA,qBAAA;EACA,kBAAA;EACA,YAAA;EACA,mBAAA;AADhB;AAGgB;EACI,YAAA;AADpB;AAIgB;EACI,eAAA;AAFpB", sourcesContent: [`:local .dialog-container {\r position: absolute;\r width: 100%;\r height: 100%;\r background: rgba(0.1, 0.1, 0.1, 0.6);\r display: grid;\r font-family: "acumin-pro";\r top: 0;\r \r .dialog {\r align-self: center;\r justify-self: center;\r min-height: 140px;\r max-width: 400px;\r border-radius: 10px;\r background: white;\r padding: 10px;\r \r display: grid;\r grid-template-columns: 100%;\r grid-template-rows: 1fr 50px;\r \r .dialog-message {\r grid-row: 1;\r grid-column: 1;\r margin-top: 20px;\r padding: 10px;\r font-size: 18px;\r color: black;\r }\r \r .dialog-buttons {\r grid-row: 2;\r grid-column: 1;\r display: grid;\r grid-template-rows: 100%;\r grid-template-columns: 100%;\r color: white;\r \r .dialog-button-ok {\r cursor: pointer;\r justify-self: center;\r background: green;\r min-width: 80px;\r justify-content: center;\r display: grid;\r align-content: center;\r align-self: center;\r height: 35px;\r border-radius: 10px;\r \r &:hover {\r opacity: 0.8;\r }\r \r &.error {\r background: red;\r }\r }\r }\r }\r }\r `], sourceRoot: "" }]), p.locals = { "dialog-container": "---------dev-sharedUiComponents-dist-components-MessageDialog-modules__dialog-container", dialog: "---------dev-sharedUiComponents-dist-components-MessageDialog-modules__dialog", "dialog-message": "---------dev-sharedUiComponents-dist-components-MessageDialog-modules__dialog-message", "dialog-buttons": "---------dev-sharedUiComponents-dist-components-MessageDialog-modules__dialog-buttons", "dialog-button-ok": "---------dev-sharedUiComponents-dist-components-MessageDialog-modules__dialog-button-ok", error: "---------dev-sharedUiComponents-dist-components-MessageDialog-modules__error" }; const m = p; } ), /***/ "../../../../node_modules/css-loader/dist/cjs.js??ruleSet[1].rules[4].use[1]!../../../../node_modules/sass-loader/dist/cjs.js??ruleSet[1].rules[4].use[2]!../../../dev/sharedUiComponents/dist/nodeGraphSystem/common.modules.scss": ( /*!*****************************************************************************************************************************************************************************************************************************************!*\ !*** ../../../../node_modules/css-loader/dist/cjs.js??ruleSet[1].rules[4].use[1]!../../../../node_modules/sass-loader/dist/cjs.js??ruleSet[1].rules[4].use[2]!../../../dev/sharedUiComponents/dist/nodeGraphSystem/common.modules.scss ***! \*****************************************************************************************************************************************************************************************************************************************/ /***/ (a, l, o) => { o.r(l), o.d(l, { /* harmony export */ default: () => m /* harmony export */ }); var u = o( /*! ../../../../../node_modules/css-loader/dist/runtime/sourceMaps.js */ "../../../../node_modules/css-loader/dist/runtime/sourceMaps.js" ), h = /* @__PURE__ */ o.n(u), d = o( /*! ../../../../../node_modules/css-loader/dist/runtime/api.js */ "../../../../node_modules/css-loader/dist/runtime/api.js" ), f = /* @__PURE__ */ o.n(d), p = f()(h()); p.push([a.id, `.\\---------dev-sharedUiComponents-dist-nodeGraphSystem-common-modules__hidden { display: none !important; } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-common-modules__port { border-radius: 20px; width: 20px; height: 20px; align-self: center; display: grid; grid-template-columns: 100%; grid-template-rows: 100%; } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-common-modules__port .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-common-modules__img { width: 100%; } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-common-modules__port:hover, .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-common-modules__port.\\---------dev-sharedUiComponents-dist-nodeGraphSystem-common-modules__selected { filter: brightness(2); } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-common-modules__portLine { height: 24px; display: grid; grid-template-rows: 100%; } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-common-modules__port-label { align-items: center; } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-common-modules__inputsContainer { grid-row: 1; grid-column: 1; } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-common-modules__inputsContainer .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-common-modules__portLine { grid-template-columns: 12px calc(100% - 15px); } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-common-modules__inputsContainer .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-common-modules__portLine .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-common-modules__port-label { grid-row: 1; grid-column: 2; } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-common-modules__inputsContainer .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-common-modules__portLine .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-common-modules__port { grid-row: 1; grid-column: 1; transform: translateX(-12px); } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-common-modules__outputsContainer { grid-row: 1; grid-column: 2; } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-common-modules__outputsContainer .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-common-modules__portLine { grid-template-columns: calc(100% - 10px) 12px; } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-common-modules__outputsContainer .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-common-modules__portLine .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-common-modules__port-label { grid-row: 1; grid-column: 1; text-align: right; } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-common-modules__outputsContainer .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-common-modules__portLine .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-common-modules__port { grid-row: 1; grid-column: 2; transform: translateX(2px); } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-common-modules__frame-box { position: absolute; background: rgba(72, 72, 72, 0.7); display: grid; grid-template-rows: 40px calc(100% - 40px); grid-template-columns: 100%; box-sizing: border-box; }`, "", { version: 3, sources: ["webpack://./../../../dev/sharedUiComponents/dist/nodeGraphSystem/common.modules.scss"], names: [], mappings: "AAAA;EACI,wBAAA;AACJ;;AAEA;EACI,mBAAA;EACA,WAAA;EACA,YAAA;EACA,kBAAA;EACA,aAAA;EACA,2BAAA;EACA,wBAAA;AACJ;AACI;EACI,WAAA;AACR;AAEI;EAEI,qBAAA;AADR;;AAKA;EACI,YAAA;EACA,aAAA;EACA,wBAAA;AAFJ;;AAKA;EACI,mBAAA;AAFJ;;AAKA;EACI,WAAA;EACA,cAAA;AAFJ;AAII;EACI,6CAAA;AAFR;AAIQ;EACI,WAAA;EACA,cAAA;AAFZ;AAKQ;EACI,WAAA;EACA,cAAA;EACA,4BAAA;AAHZ;;AAQA;EACI,WAAA;EACA,cAAA;AALJ;AAOI;EACI,6CAAA;AALR;AAOQ;EACI,WAAA;EACA,cAAA;EACA,iBAAA;AALZ;AAQQ;EACI,WAAA;EACA,cAAA;EACA,0BAAA;AANZ;;AAWA;EACI,kBAAA;EACA,iCAAA;EACA,aAAA;EACA,0CAAA;EACA,2BAAA;EACA,sBAAA;AARJ", sourcesContent: [`.hidden {\r display: none !important;\r }\r \r .port {\r border-radius: 20px;\r width: 20px;\r height: 20px;\r align-self: center;\r display: grid;\r grid-template-columns: 100%;\r grid-template-rows: 100%;\r \r .img {\r width: 100%;\r }\r \r &:hover,\r &.selected {\r filter: brightness(2);\r }\r }\r \r .portLine {\r height: 24px;\r display: grid;\r grid-template-rows: 100%;\r }\r \r .port-label {\r align-items: center;\r }\r \r .inputsContainer {\r grid-row: 1;\r grid-column: 1;\r \r .portLine {\r grid-template-columns: 12px calc(100% - 15px);\r \r .port-label {\r grid-row: 1;\r grid-column: 2;\r }\r \r .port {\r grid-row: 1;\r grid-column: 1;\r transform: translateX(-12px);\r }\r }\r }\r \r .outputsContainer {\r grid-row: 1;\r grid-column: 2;\r \r .portLine {\r grid-template-columns: calc(100% - 10px) 12px;\r \r .port-label {\r grid-row: 1;\r grid-column: 1;\r text-align: right;\r }\r \r .port {\r grid-row: 1;\r grid-column: 2;\r transform: translateX(2px);\r }\r }\r }\r \r .frame-box {\r position: absolute;\r background: rgba(72, 72, 72, 0.7);\r display: grid;\r grid-template-rows: 40px calc(100% - 40px);\r grid-template-columns: 100%;\r box-sizing: border-box;\r }\r `], sourceRoot: "" }]), p.locals = { hidden: "---------dev-sharedUiComponents-dist-nodeGraphSystem-common-modules__hidden", port: "---------dev-sharedUiComponents-dist-nodeGraphSystem-common-modules__port", img: "---------dev-sharedUiComponents-dist-nodeGraphSystem-common-modules__img", selected: "---------dev-sharedUiComponents-dist-nodeGraphSystem-common-modules__selected", portLine: "---------dev-sharedUiComponents-dist-nodeGraphSystem-common-modules__portLine", "port-label": "---------dev-sharedUiComponents-dist-nodeGraphSystem-common-modules__port-label", inputsContainer: "---------dev-sharedUiComponents-dist-nodeGraphSystem-common-modules__inputsContainer", outputsContainer: "---------dev-sharedUiComponents-dist-nodeGraphSystem-common-modules__outputsContainer", "frame-box": "---------dev-sharedUiComponents-dist-nodeGraphSystem-common-modules__frame-box" }; const m = p; } ), /***/ "../../../../node_modules/css-loader/dist/cjs.js??ruleSet[1].rules[4].use[1]!../../../../node_modules/sass-loader/dist/cjs.js??ruleSet[1].rules[4].use[2]!../../../dev/sharedUiComponents/dist/nodeGraphSystem/graphCanvas.modules.scss": ( /*!**********************************************************************************************************************************************************************************************************************************************!*\ !*** ../../../../node_modules/css-loader/dist/cjs.js??ruleSet[1].rules[4].use[1]!../../../../node_modules/sass-loader/dist/cjs.js??ruleSet[1].rules[4].use[2]!../../../dev/sharedUiComponents/dist/nodeGraphSystem/graphCanvas.modules.scss ***! \**********************************************************************************************************************************************************************************************************************************************/ /***/ (a, l, o) => { o.r(l), o.d(l, { /* harmony export */ default: () => m /* harmony export */ }); var u = o( /*! ../../../../../node_modules/css-loader/dist/runtime/sourceMaps.js */ "../../../../node_modules/css-loader/dist/runtime/sourceMaps.js" ), h = /* @__PURE__ */ o.n(u), d = o( /*! ../../../../../node_modules/css-loader/dist/runtime/api.js */ "../../../../node_modules/css-loader/dist/runtime/api.js" ), f = /* @__PURE__ */ o.n(d), p = f()(h()); p.push([a.id, `.\\---------dev-sharedUiComponents-dist-nodeGraphSystem-graphCanvas-modules__graph-canvas { width: 100%; height: 100%; margin: 0; padding: 0; font: 14px "acumin-pro"; user-select: none; overflow: hidden; cursor: move; position: relative; background-image: linear-gradient(to right, #4f4e4f 1px, transparent 1px), linear-gradient(to bottom, #4f4e4f 1px, transparent 1px); } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-graphCanvas-modules__graph-container { width: 100%; height: 100%; left: 0; top: 0; transform-origin: left top; display: grid; grid-template-rows: 100%; grid-template-columns: 100%; } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-graphCanvas-modules__frame-container { overflow: visible; grid-row: 1; grid-column: 1; position: relative; width: 100%; height: 100%; } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-graphCanvas-modules__graph-svg-container { grid-row: 1; grid-column: 1; position: relative; width: 100%; height: 100%; overflow: visible; pointer-events: none; z-index: 2; filter: drop-shadow(7px 6px 2px rgba(0, 0, 0, 0.2)); } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-graphCanvas-modules__graph-canvas-container { grid-row: 1; grid-column: 1; position: relative; width: 100%; height: 100%; } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-graphCanvas-modules__selection-container { pointer-events: none; } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-graphCanvas-modules__selection-box { z-index: 10; position: absolute; background: rgba(72, 72, 196, 0.5); border: blue solid 2px; }`, "", { version: 3, sources: ["webpack://./../../../dev/sharedUiComponents/dist/nodeGraphSystem/graphCanvas.modules.scss"], names: [], mappings: "AAAA;EACI,WAAA;EACA,YAAA;EACA,SAAA;EACA,UAAA;EACA,uBAAA;EACA,iBAAA;EACA,gBAAA;EACA,YAAA;EACA,kBAAA;EACA,mIAAA;AACJ;;AAEA;EACI,WAAA;EACA,YAAA;EACA,OAAA;EACA,MAAA;EACA,0BAAA;EACA,aAAA;EACA,wBAAA;EACA,2BAAA;AACJ;;AAEA;EACI,iBAAA;EACA,WAAA;EACA,cAAA;EACA,kBAAA;EACA,WAAA;EACA,YAAA;AACJ;;AAEA;EACI,WAAA;EACA,cAAA;EACA,kBAAA;EACA,WAAA;EACA,YAAA;EACA,iBAAA;EACA,oBAAA;EACA,UAAA;EACA,mDAAA;AACJ;;AAEA;EACI,WAAA;EACA,cAAA;EACA,kBAAA;EACA,WAAA;EACA,YAAA;AACJ;;AAEA;EACI,oBAAA;AACJ;;AAEA;EACI,WAAA;EACA,kBAAA;EACA,kCAAA;EACA,sBAAA;AACJ", sourcesContent: [`.graph-canvas {\r width: 100%;\r height: 100%;\r margin: 0;\r padding: 0;\r font: 14px "acumin-pro";\r user-select: none;\r overflow: hidden;\r cursor: move;\r position: relative;\r background-image: linear-gradient(to right, #4f4e4f 1px, transparent 1px), linear-gradient(to bottom, #4f4e4f 1px, transparent 1px);\r }\r \r .graph-container {\r width: 100%;\r height: 100%;\r left: 0;\r top: 0;\r transform-origin: left top;\r display: grid;\r grid-template-rows: 100%;\r grid-template-columns: 100%;\r }\r \r .frame-container {\r overflow: visible;\r grid-row: 1;\r grid-column: 1;\r position: relative;\r width: 100%;\r height: 100%;\r }\r \r .graph-svg-container {\r grid-row: 1;\r grid-column: 1;\r position: relative;\r width: 100%;\r height: 100%;\r overflow: visible;\r pointer-events: none;\r z-index: 2;\r filter: drop-shadow(7px 6px 2px rgba(0, 0, 0, 0.2));\r }\r \r .graph-canvas-container {\r grid-row: 1;\r grid-column: 1;\r position: relative;\r width: 100%;\r height: 100%;\r }\r \r .selection-container {\r pointer-events: none;\r }\r \r .selection-box {\r z-index: 10;\r position: absolute;\r background: rgba(72, 72, 196, 0.5);\r border: blue solid 2px;\r }\r `], sourceRoot: "" }]), p.locals = { "graph-canvas": "---------dev-sharedUiComponents-dist-nodeGraphSystem-graphCanvas-modules__graph-canvas", "graph-container": "---------dev-sharedUiComponents-dist-nodeGraphSystem-graphCanvas-modules__graph-container", "frame-container": "---------dev-sharedUiComponents-dist-nodeGraphSystem-graphCanvas-modules__frame-container", "graph-svg-container": "---------dev-sharedUiComponents-dist-nodeGraphSystem-graphCanvas-modules__graph-svg-container", "graph-canvas-container": "---------dev-sharedUiComponents-dist-nodeGraphSystem-graphCanvas-modules__graph-canvas-container", "selection-container": "---------dev-sharedUiComponents-dist-nodeGraphSystem-graphCanvas-modules__selection-container", "selection-box": "---------dev-sharedUiComponents-dist-nodeGraphSystem-graphCanvas-modules__selection-box" }; const m = p; } ), /***/ "../../../../node_modules/css-loader/dist/cjs.js??ruleSet[1].rules[4].use[1]!../../../../node_modules/sass-loader/dist/cjs.js??ruleSet[1].rules[4].use[2]!../../../dev/sharedUiComponents/dist/nodeGraphSystem/graphFrame.modules.scss": ( /*!*********************************************************************************************************************************************************************************************************************************************!*\ !*** ../../../../node_modules/css-loader/dist/cjs.js??ruleSet[1].rules[4].use[1]!../../../../node_modules/sass-loader/dist/cjs.js??ruleSet[1].rules[4].use[2]!../../../dev/sharedUiComponents/dist/nodeGraphSystem/graphFrame.modules.scss ***! \*********************************************************************************************************************************************************************************************************************************************/ /***/ (a, l, o) => { o.r(l), o.d(l, { /* harmony export */ default: () => m /* harmony export */ }); var u = o( /*! ../../../../../node_modules/css-loader/dist/runtime/sourceMaps.js */ "../../../../node_modules/css-loader/dist/runtime/sourceMaps.js" ), h = /* @__PURE__ */ o.n(u), d = o( /*! ../../../../../node_modules/css-loader/dist/runtime/api.js */ "../../../../node_modules/css-loader/dist/runtime/api.js" ), f = /* @__PURE__ */ o.n(d), p = f()(h()); p.push([a.id, `.\\---------dev-sharedUiComponents-dist-nodeGraphSystem-graphFrame-modules__port-container { margin-top: 6px; margin-bottom: 6px; margin-left: 4px; margin-right: 4px; color: white; grid-row: 2; grid-column: 1; display: grid; grid-template-rows: 100%; grid-template-columns: 50% 50%; z-index: 2; } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-graphFrame-modules__frame-box-border { grid-row: 1/span 2; grid-column: 1; width: 100%; height: 100%; border: transparent solid 4px; pointer-events: none; box-sizing: border-box; } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-graphFrame-modules__frame-comments { display: grid; grid-row: 2; grid-column: 1; padding: 0 10px; font-style: italic; word-wrap: break-word; } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-graphFrame-modules__selected.\\---------dev-sharedUiComponents-dist-nodeGraphSystem-graphFrame-modules__frame-box-border { border-color: white; } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-graphFrame-modules__frame-box-header { grid-row: 1; grid-column: 1; background: rgb(72, 72, 72); color: white; text-align: center; display: grid; grid-template-rows: 100%; grid-template-columns: calc(100% - 74px) 30px 7px 30px 7px; align-content: center; overflow: hidden; } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-graphFrame-modules__frame-box-header-button { cursor: pointer; align-self: center; transform-origin: 50% 50%; transform: scale(1); stroke: transparent; fill: white; display: grid; } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-graphFrame-modules__frame-box-header-button.\\---------dev-sharedUiComponents-dist-nodeGraphSystem-graphFrame-modules__down { transform: scale(0.9); } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-graphFrame-modules__frame-box-header-collapse { grid-column: 2; grid-row: 1; } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-graphFrame-modules__frame-box-header-close { grid-column: 4; grid-row: 1; } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-graphFrame-modules__frame-box-header-title { grid-column: 1; grid-row: 1; display: grid; height: 100%; width: 100%; align-self: stretch; align-items: center; margin-top: -2px; } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-graphFrame-modules__right-handle { grid-area: 1/2/3/2; width: 4px; background-color: transparent; cursor: ew-resize; } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-graphFrame-modules__right-handle::after { content: ""; width: 8px; position: absolute; top: 0; bottom: 0; margin-left: -4px; cursor: ew-resize; } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-graphFrame-modules__top-right-corner-handle { background-color: transparent; height: 4px; z-index: 21; cursor: ne-resize; width: 4px; margin-left: -6px; } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-graphFrame-modules__top-right-corner-handle::after { background-color: transparent; cursor: ne-resize; margin-left: unset; top: -4px; height: 10px; width: 10px; } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-graphFrame-modules__bottom-right-corner-handle { background-color: transparent; height: 0px; z-index: 21; cursor: nw-resize; grid-area: 4/2/4/2; margin-left: -2px; } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-graphFrame-modules__bottom-right-corner-handle::after { background-color: transparent; height: 10px; cursor: nw-resize; top: unset; bottom: -4px; width: 10px; } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-graphFrame-modules__left-handle { grid-area: 1/1/3/1; width: 4px; background-color: transparent; cursor: ew-resize; } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-graphFrame-modules__left-handle::before { content: ""; width: 8px; position: absolute; top: 0; bottom: 0; margin-left: -4px; } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-graphFrame-modules__top-left-corner-handle { background-color: transparent; height: 4px; z-index: 21; cursor: nw-resize; width: 4px; margin-left: -4px; } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-graphFrame-modules__top-left-corner-handle::before { background-color: transparent; cursor: nw-resize; margin-left: unset; top: -4px; height: 10px; width: 10px; } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-graphFrame-modules__bottom-left-corner-handle { background-color: transparent; height: 0px; z-index: 21; cursor: sw-resize; grid-area: 4/1/4/1; } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-graphFrame-modules__bottom-left-corner-handle::before { background-color: transparent; height: 10px; cursor: sw-resize; top: unset; bottom: -4px; width: 10px; } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-graphFrame-modules__top-handle { grid-area: 1/1/1/1; background-color: transparent; height: 4px; cursor: ns-resize; } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-graphFrame-modules__top-handle::before { content: ""; width: 100%; position: absolute; top: -4px; bottom: 100%; right: 0; left: 0; margin-bottom: -8px; cursor: ns-resize; height: 8px; } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-graphFrame-modules__bottom-handle { grid-area: 3/1/3/1; background-color: transparent; height: 4px; cursor: ns-resize; } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-graphFrame-modules__bottom-handle::after { content: ""; width: 100%; position: absolute; top: 100%; bottom: 0; right: 0; left: 0; margin-top: -8px; cursor: ns-resize; height: 12px; } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-graphFrame-modules__expanded { font-size: 24px; } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-graphFrame-modules__collapsed { height: auto !important; width: 200px !important; z-index: 3; font-size: 16px; } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-graphFrame-modules__collapsedHeader { grid-template-columns: calc(100% - 37px) 30px 7px; }`, "", { version: 3, sources: ["webpack://./../../../dev/sharedUiComponents/dist/nodeGraphSystem/graphFrame.modules.scss"], names: [], mappings: "AAAA;EACI,eAAA;EACA,kBAAA;EACA,gBAAA;EACA,iBAAA;EACA,YAAA;EACA,WAAA;EACA,cAAA;EACA,aAAA;EACA,wBAAA;EACA,8BAAA;EACA,UAAA;AACJ;;AAEA;EACI,kBAAA;EACA,cAAA;EACA,WAAA;EACA,YAAA;EACA,6BAAA;EACA,oBAAA;EACA,sBAAA;AACJ;;AAEA;EACI,aAAA;EACA,WAAA;EACA,cAAA;EACA,eAAA;EACA,kBAAA;EACA,qBAAA;AACJ;;AAEA;EACI,mBAAA;AACJ;;AAEA;EACI,WAAA;EACA,cAAA;EACA,2BAAA;EACA,YAAA;EACA,kBAAA;EACA,aAAA;EACA,wBAAA;EACA,0DAAA;EACA,qBAAA;EACA,gBAAA;AACJ;;AAEA;EACI,eAAA;EACA,kBAAA;EACA,yBAAA;EACA,mBAAA;EACA,mBAAA;EACA,WAAA;EACA,aAAA;AACJ;AACI;EACI,qBAAA;AACR;;AAGA;EACI,cAAA;EACA,WAAA;AAAJ;;AAGA;EACI,cAAA;EACA,WAAA;AAAJ;;AAGA;EACI,cAAA;EACA,WAAA;EACA,aAAA;EACA,YAAA;EACA,WAAA;EACA,mBAAA;EACA,mBAAA;EACA,gBAAA;AAAJ;;AAGA;EACI,kBAAA;EACA,UAAA;EACA,6BAAA;EACA,iBAAA;AAAJ;AAEI;EACI,WAAA;EACA,UAAA;EACA,kBAAA;EACA,MAAA;EACA,SAAA;EACA,iBAAA;EACA,iBAAA;AAAR;;AAIA;EACI,6BAAA;EACA,WAAA;EACA,WAAA;EACA,iBAAA;EACA,UAAA;EACA,iBAAA;AADJ;AAGI;EACI,6BAAA;EACA,iBAAA;EACA,kBAAA;EACA,SAAA;EACA,YAAA;EACA,WAAA;AADR;;AAKA;EACI,6BAAA;EACA,WAAA;EACA,WAAA;EACA,iBAAA;EACA,kBAAA;EACA,iBAAA;AAFJ;AAII;EACI,6BAAA;EACA,YAAA;EACA,iBAAA;EACA,UAAA;EACA,YAAA;EACA,WAAA;AAFR;;AAMA;EACI,kBAAA;EACA,UAAA;EACA,6BAAA;EACA,iBAAA;AAHJ;AAKI;EACI,WAAA;EACA,UAAA;EACA,kBAAA;EACA,MAAA;EACA,SAAA;EACA,iBAAA;AAHR;;AAOA;EACI,6BAAA;EACA,WAAA;EACA,WAAA;EACA,iBAAA;EACA,UAAA;EACA,iBAAA;AAJJ;AAMI;EACI,6BAAA;EACA,iBAAA;EACA,kBAAA;EACA,SAAA;EACA,YAAA;EACA,WAAA;AAJR;;AAQA;EACI,6BAAA;EACA,WAAA;EACA,WAAA;EACA,iBAAA;EACA,kBAAA;AALJ;AAOI;EACI,6BAAA;EACA,YAAA;EACA,iBAAA;EACA,UAAA;EACA,YAAA;EACA,WAAA;AALR;;AASA;EACI,kBAAA;EACA,6BAAA;EACA,WAAA;EACA,iBAAA;AANJ;AAQI;EACI,WAAA;EACA,WAAA;EACA,kBAAA;EACA,SAAA;EACA,YAAA;EACA,QAAA;EACA,OAAA;EACA,mBAAA;EACA,iBAAA;EACA,WAAA;AANR;;AAUA;EACI,kBAAA;EACA,6BAAA;EACA,WAAA;EACA,iBAAA;AAPJ;AASI;EACI,WAAA;EACA,WAAA;EACA,kBAAA;EACA,SAAA;EACA,SAAA;EACA,QAAA;EACA,OAAA;EACA,gBAAA;EACA,iBAAA;EACA,YAAA;AAPR;;AAWA;EACI,eAAA;AARJ;;AAWA;EACI,uBAAA;EACA,uBAAA;EACA,UAAA;EACA,eAAA;AARJ;;AAWA;EACI,iDAAA;AARJ", sourcesContent: [`.port-container {\r margin-top: 6px;\r margin-bottom: 6px;\r margin-left: 4px;\r margin-right: 4px;\r color: white;\r grid-row: 2;\r grid-column: 1;\r display: grid;\r grid-template-rows: 100%;\r grid-template-columns: 50% 50%;\r z-index: 2;\r }\r \r .frame-box-border {\r grid-row: 1 / span 2;\r grid-column: 1;\r width: 100%;\r height: 100%;\r border: transparent solid 4px;\r pointer-events: none;\r box-sizing: border-box;\r }\r \r .frame-comments {\r display: grid;\r grid-row: 2;\r grid-column: 1;\r padding: 0 10px;\r font-style: italic;\r word-wrap: break-word;\r }\r \r .selected.frame-box-border {\r border-color: white;\r }\r \r .frame-box-header {\r grid-row: 1;\r grid-column: 1;\r background: rgba(72, 72, 72, 1);\r color: white;\r text-align: center;\r display: grid;\r grid-template-rows: 100%;\r grid-template-columns: calc(100% - 74px) 30px 7px 30px 7px;\r align-content: center;\r overflow: hidden;\r }\r \r .frame-box-header-button {\r cursor: pointer;\r align-self: center;\r transform-origin: 50% 50%;\r transform: scale(1);\r stroke: transparent;\r fill: white;\r display: grid;\r \r &.down {\r transform: scale(0.9);\r }\r }\r \r .frame-box-header-collapse {\r grid-column: 2;\r grid-row: 1;\r }\r \r .frame-box-header-close {\r grid-column: 4;\r grid-row: 1;\r }\r \r .frame-box-header-title {\r grid-column: 1;\r grid-row: 1;\r display: grid;\r height: 100%;\r width: 100%;\r align-self: stretch;\r align-items: center;\r margin-top: -2px;\r }\r \r .right-handle {\r grid-area: 1 / 2 / 3 / 2;\r width: 4px;\r background-color: transparent;\r cursor: ew-resize;\r \r &::after {\r content: "";\r width: 8px;\r position: absolute;\r top: 0;\r bottom: 0;\r margin-left: -4px;\r cursor: ew-resize;\r }\r }\r \r .top-right-corner-handle {\r background-color: transparent;\r height: 4px;\r z-index: 21;\r cursor: ne-resize;\r width: 4px;\r margin-left: -6px;\r \r &::after {\r background-color: transparent;\r cursor: ne-resize;\r margin-left: unset;\r top: -4px;\r height: 10px;\r width: 10px;\r }\r }\r \r .bottom-right-corner-handle {\r background-color: transparent;\r height: 0px;\r z-index: 21;\r cursor: nw-resize;\r grid-area: 4 / 2 / 4 / 2;\r margin-left: -2px;\r \r &::after {\r background-color: transparent;\r height: 10px;\r cursor: nw-resize;\r top: unset;\r bottom: -4px;\r width: 10px;\r }\r }\r \r .left-handle {\r grid-area: 1 / 1 / 3 / 1;\r width: 4px;\r background-color: transparent;\r cursor: ew-resize;\r \r &::before {\r content: "";\r width: 8px;\r position: absolute;\r top: 0;\r bottom: 0;\r margin-left: -4px;\r }\r }\r \r .top-left-corner-handle {\r background-color: transparent;\r height: 4px;\r z-index: 21;\r cursor: nw-resize;\r width: 4px;\r margin-left: -4px;\r \r &::before {\r background-color: transparent;\r cursor: nw-resize;\r margin-left: unset;\r top: -4px;\r height: 10px;\r width: 10px;\r }\r }\r \r .bottom-left-corner-handle {\r background-color: transparent;\r height: 0px;\r z-index: 21;\r cursor: sw-resize;\r grid-area: 4 / 1 / 4 / 1;\r \r &::before {\r background-color: transparent;\r height: 10px;\r cursor: sw-resize;\r top: unset;\r bottom: -4px;\r width: 10px;\r }\r }\r \r .top-handle {\r grid-area: 1 / 1 / 1 / 1;\r background-color: transparent;\r height: 4px;\r cursor: ns-resize;\r \r &::before {\r content: "";\r width: 100%;\r position: absolute;\r top: -4px;\r bottom: 100%;\r right: 0;\r left: 0;\r margin-bottom: -8px;\r cursor: ns-resize;\r height: 8px;\r }\r }\r \r .bottom-handle {\r grid-area: 3 / 1 / 3 / 1;\r background-color: transparent;\r height: 4px;\r cursor: ns-resize;\r \r &::after {\r content: "";\r width: 100%;\r position: absolute;\r top: 100%;\r bottom: 0;\r right: 0;\r left: 0;\r margin-top: -8px;\r cursor: ns-resize;\r height: 12px;\r }\r }\r \r .expanded {\r font-size: 24px;\r }\r \r .collapsed {\r height: auto !important;\r width: 200px !important;\r z-index: 3;\r font-size: 16px;\r }\r \r .collapsedHeader {\r grid-template-columns: calc(100% - 37px) 30px 7px;\r }\r `], sourceRoot: "" }]), p.locals = { "port-container": "---------dev-sharedUiComponents-dist-nodeGraphSystem-graphFrame-modules__port-container", "frame-box-border": "---------dev-sharedUiComponents-dist-nodeGraphSystem-graphFrame-modules__frame-box-border", "frame-comments": "---------dev-sharedUiComponents-dist-nodeGraphSystem-graphFrame-modules__frame-comments", selected: "---------dev-sharedUiComponents-dist-nodeGraphSystem-graphFrame-modules__selected", "frame-box-header": "---------dev-sharedUiComponents-dist-nodeGraphSystem-graphFrame-modules__frame-box-header", "frame-box-header-button": "---------dev-sharedUiComponents-dist-nodeGraphSystem-graphFrame-modules__frame-box-header-button", down: "---------dev-sharedUiComponents-dist-nodeGraphSystem-graphFrame-modules__down", "frame-box-header-collapse": "---------dev-sharedUiComponents-dist-nodeGraphSystem-graphFrame-modules__frame-box-header-collapse", "frame-box-header-close": "---------dev-sharedUiComponents-dist-nodeGraphSystem-graphFrame-modules__frame-box-header-close", "frame-box-header-title": "---------dev-sharedUiComponents-dist-nodeGraphSystem-graphFrame-modules__frame-box-header-title", "right-handle": "---------dev-sharedUiComponents-dist-nodeGraphSystem-graphFrame-modules__right-handle", "top-right-corner-handle": "---------dev-sharedUiComponents-dist-nodeGraphSystem-graphFrame-modules__top-right-corner-handle", "bottom-right-corner-handle": "---------dev-sharedUiComponents-dist-nodeGraphSystem-graphFrame-modules__bottom-right-corner-handle", "left-handle": "---------dev-sharedUiComponents-dist-nodeGraphSystem-graphFrame-modules__left-handle", "top-left-corner-handle": "---------dev-sharedUiComponents-dist-nodeGraphSystem-graphFrame-modules__top-left-corner-handle", "bottom-left-corner-handle": "---------dev-sharedUiComponents-dist-nodeGraphSystem-graphFrame-modules__bottom-left-corner-handle", "top-handle": "---------dev-sharedUiComponents-dist-nodeGraphSystem-graphFrame-modules__top-handle", "bottom-handle": "---------dev-sharedUiComponents-dist-nodeGraphSystem-graphFrame-modules__bottom-handle", expanded: "---------dev-sharedUiComponents-dist-nodeGraphSystem-graphFrame-modules__expanded", collapsed: "---------dev-sharedUiComponents-dist-nodeGraphSystem-graphFrame-modules__collapsed", collapsedHeader: "---------dev-sharedUiComponents-dist-nodeGraphSystem-graphFrame-modules__collapsedHeader" }; const m = p; } ), /***/ "../../../../node_modules/css-loader/dist/cjs.js??ruleSet[1].rules[4].use[1]!../../../../node_modules/sass-loader/dist/cjs.js??ruleSet[1].rules[4].use[2]!../../../dev/sharedUiComponents/dist/nodeGraphSystem/graphNode.modules.scss": ( /*!********************************************************************************************************************************************************************************************************************************************!*\ !*** ../../../../node_modules/css-loader/dist/cjs.js??ruleSet[1].rules[4].use[1]!../../../../node_modules/sass-loader/dist/cjs.js??ruleSet[1].rules[4].use[2]!../../../dev/sharedUiComponents/dist/nodeGraphSystem/graphNode.modules.scss ***! \********************************************************************************************************************************************************************************************************************************************/ /***/ (a, l, o) => { o.r(l), o.d(l, { /* harmony export */ default: () => m /* harmony export */ }); var u = o( /*! ../../../../../node_modules/css-loader/dist/runtime/sourceMaps.js */ "../../../../node_modules/css-loader/dist/runtime/sourceMaps.js" ), h = /* @__PURE__ */ o.n(u), d = o( /*! ../../../../../node_modules/css-loader/dist/runtime/api.js */ "../../../../node_modules/css-loader/dist/runtime/api.js" ), f = /* @__PURE__ */ o.n(d), p = f()(h()); p.push([a.id, `.\\---------dev-sharedUiComponents-dist-nodeGraphSystem-graphNode-modules__visual { z-index: 4; width: 200px; position: absolute; left: 0; top: 0; background: gray; border: 4px solid black; border-radius: 12px; display: grid; grid-template-rows: 30px auto; grid-template-columns: 100%; color: white; box-shadow: 7px 6px 2px rgba(0, 0, 0, 0.2); } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-graphNode-modules__highlighted { animation: \\---------dev-sharedUiComponents-dist-nodeGraphSystem-graphNode-modules__glow 0.5s infinite alternate; } @keyframes \\---------dev-sharedUiComponents-dist-nodeGraphSystem-graphNode-modules__glow { to { border-color: white; } } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-graphNode-modules__header-container { grid-row: 1; grid-column: 1; position: relative; border: 4px solid black; border-top-right-radius: 7px; border-top-left-radius: 7px; background: black; color: white; transform: scaleX(1.01) translateY(-0.5px); transform-origin: center; display: grid; grid-template-columns: 1fr auto; grid-template-rows: 100%; } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-graphNode-modules__headerIcon { z-index: 10; align-self: center; user-select: none; pointer-events: none; width: 20px; display: grid; margin-left: 6px; } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-graphNode-modules__header { font-size: 16px; text-align: center; white-space: nowrap; text-overflow: ellipsis; overflow: hidden; } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-graphNode-modules__headerWithIcon { margin-left: calc(20px + 6px); } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-graphNode-modules__connections { grid-row: 2; grid-column: 1; display: grid; grid-template-columns: 50% 50%; transform: scale(1); } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-graphNode-modules__content { min-height: 20px; grid-row: 3; grid-column: 1; } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-graphNode-modules__comments { position: absolute; top: -50px; width: 200px; height: 45px; overflow: hidden; font-style: italic; opacity: 0.8; display: grid; align-items: flex-end; pointer-events: none; } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-graphNode-modules__executionTime { position: absolute; bottom: 1px; width: 195px; height: 20px; overflow: hidden; font-size: 11px; opacity: 0.5; display: grid; align-items: flex-end; justify-content: end; pointer-events: none; } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-graphNode-modules__selected { border-color: white !important; }`, "", { version: 3, sources: ["webpack://./../../../dev/sharedUiComponents/dist/nodeGraphSystem/graphNode.modules.scss"], names: [], mappings: "AAAA;EACI,UAAA;EACA,YAAA;EACA,kBAAA;EACA,OAAA;EACA,MAAA;EACA,gBAAA;EACA,uBAAA;EACA,mBAAA;EACA,aAAA;EACA,6BAAA;EACA,2BAAA;EACA,YAAA;EACA,0CAAA;AACJ;;AAEA;EAMI,gHAAA;AAJJ;AADI;EACI;IACI,mBAAA;EAGV;AACF;;AAEA;EACI,WAAA;EACA,cAAA;EACA,kBAAA;EACA,uBAAA;EACA,4BAAA;EACA,2BAAA;EACA,iBAAA;EACA,YAAA;EACA,0CAAA;EACA,wBAAA;EACA,aAAA;EACA,+BAAA;EACA,wBAAA;AACJ;;AAKA;EACI,WAAA;EACA,kBAAA;EACA,iBAAA;EACA,oBAAA;EACA,WARO;EASP,aAAA;EACA,gBATS;AAOb;;AAKA;EACI,eAAA;EACA,kBAAA;EACA,mBAAA;EACA,uBAAA;EACA,gBAAA;AAFJ;;AAKA;EACI,6BAAA;AAFJ;;AAKA;EACI,WAAA;EACA,cAAA;EAEA,aAAA;EACA,8BAAA;EACA,mBAAA;AAHJ;;AAMA;EACI,gBAAA;EACA,WAAA;EACA,cAAA;AAHJ;;AAMA;EACI,kBAAA;EACA,UAAA;EACA,YAAA;EACA,YAAA;EACA,gBAAA;EACA,kBAAA;EACA,YAAA;EACA,aAAA;EACA,qBAAA;EACA,oBAAA;AAHJ;;AAMA;EACI,kBAAA;EACA,WAAA;EACA,YAAA;EACA,YAAA;EACA,gBAAA;EACA,eAAA;EACA,YAAA;EACA,aAAA;EACA,qBAAA;EACA,oBAAA;EACA,oBAAA;AAHJ;;AAMA;EACI,8BAAA;AAHJ", sourcesContent: [`.visual {\r z-index: 4;\r width: 200px;\r position: absolute;\r left: 0;\r top: 0;\r background: gray;\r border: 4px solid black;\r border-radius: 12px;\r display: grid;\r grid-template-rows: 30px auto;\r grid-template-columns: 100%;\r color: white;\r box-shadow: 7px 6px 2px rgba(0, 0, 0, 0.2);\r }\r \r .highlighted {\r @keyframes glow {\r to {\r border-color: white;\r }\r }\r animation: glow 0.5s infinite alternate;\r }\r \r .header-container {\r grid-row: 1;\r grid-column: 1;\r position: relative;\r border: 4px solid black;\r border-top-right-radius: 7px;\r border-top-left-radius: 7px;\r background: black;\r color: white;\r transform: scaleX(1.01) translateY(-0.5px);\r transform-origin: center;\r display: grid;\r grid-template-columns: 1fr auto;\r grid-template-rows: 100%;\r }\r \r $iconSize: 20px;\r $iconMargin: 6px;\r \r .headerIcon {\r z-index: 10;\r align-self: center;\r user-select: none;\r pointer-events: none;\r width: $iconSize;\r display: grid;\r margin-left: $iconMargin;\r }\r \r .header {\r font-size: 16px;\r text-align: center;\r white-space: nowrap;\r text-overflow: ellipsis;\r overflow: hidden;\r }\r \r .headerWithIcon {\r margin-left: calc(#{$iconSize} + #{$iconMargin});\r }\r \r .connections {\r grid-row: 2;\r grid-column: 1;\r \r display: grid;\r grid-template-columns: 50% 50%;\r transform: scale(1);\r }\r \r .content {\r min-height: 20px;\r grid-row: 3;\r grid-column: 1;\r }\r \r .comments {\r position: absolute;\r top: -50px;\r width: 200px;\r height: 45px;\r overflow: hidden;\r font-style: italic;\r opacity: 0.8;\r display: grid;\r align-items: flex-end;\r pointer-events: none;\r }\r \r .executionTime {\r position: absolute;\r bottom: 1px;\r width: 195px;\r height: 20px;\r overflow: hidden;\r font-size: 11px;\r opacity: 0.5;\r display: grid;\r align-items: flex-end;\r justify-content: end;\r pointer-events: none;\r }\r \r .selected {\r border-color: white !important;\r }\r `], sourceRoot: "" }]), p.locals = { visual: "---------dev-sharedUiComponents-dist-nodeGraphSystem-graphNode-modules__visual", highlighted: "---------dev-sharedUiComponents-dist-nodeGraphSystem-graphNode-modules__highlighted", glow: "---------dev-sharedUiComponents-dist-nodeGraphSystem-graphNode-modules__glow", "header-container": "---------dev-sharedUiComponents-dist-nodeGraphSystem-graphNode-modules__header-container", headerIcon: "---------dev-sharedUiComponents-dist-nodeGraphSystem-graphNode-modules__headerIcon", header: "---------dev-sharedUiComponents-dist-nodeGraphSystem-graphNode-modules__header", headerWithIcon: "---------dev-sharedUiComponents-dist-nodeGraphSystem-graphNode-modules__headerWithIcon", connections: "---------dev-sharedUiComponents-dist-nodeGraphSystem-graphNode-modules__connections", content: "---------dev-sharedUiComponents-dist-nodeGraphSystem-graphNode-modules__content", comments: "---------dev-sharedUiComponents-dist-nodeGraphSystem-graphNode-modules__comments", executionTime: "---------dev-sharedUiComponents-dist-nodeGraphSystem-graphNode-modules__executionTime", selected: "---------dev-sharedUiComponents-dist-nodeGraphSystem-graphNode-modules__selected" }; const m = p; } ), /***/ "../../../../node_modules/css-loader/dist/cjs.js??ruleSet[1].rules[4].use[1]!../../../../node_modules/sass-loader/dist/cjs.js??ruleSet[1].rules[4].use[2]!../../../dev/sharedUiComponents/dist/nodeGraphSystem/nodeLink.modules.scss": ( /*!*******************************************************************************************************************************************************************************************************************************************!*\ !*** ../../../../node_modules/css-loader/dist/cjs.js??ruleSet[1].rules[4].use[1]!../../../../node_modules/sass-loader/dist/cjs.js??ruleSet[1].rules[4].use[2]!../../../dev/sharedUiComponents/dist/nodeGraphSystem/nodeLink.modules.scss ***! \*******************************************************************************************************************************************************************************************************************************************/ /***/ (a, l, o) => { o.r(l), o.d(l, { /* harmony export */ default: () => m /* harmony export */ }); var u = o( /*! ../../../../../node_modules/css-loader/dist/runtime/sourceMaps.js */ "../../../../node_modules/css-loader/dist/runtime/sourceMaps.js" ), h = /* @__PURE__ */ o.n(u), d = o( /*! ../../../../../node_modules/css-loader/dist/runtime/api.js */ "../../../../node_modules/css-loader/dist/runtime/api.js" ), f = /* @__PURE__ */ o.n(d), p = f()(h()); p.push([a.id, `.\\---------dev-sharedUiComponents-dist-nodeGraphSystem-nodeLink-modules__link { stroke-width: 4px; } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-nodeLink-modules__link.\\---------dev-sharedUiComponents-dist-nodeGraphSystem-nodeLink-modules__selected { stroke: white !important; stroke-dasharray: 10, 2; } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-nodeLink-modules__target-candidate { filter: brightness(1.5); stroke-width: 6px; } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-nodeLink-modules__selection-link { pointer-events: all; stroke-width: 16px; opacity: 0; transition: opacity 75ms; stroke: transparent; cursor: pointer; } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-nodeLink-modules__selection-link:hover, .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-nodeLink-modules__selection-link.\\---------dev-sharedUiComponents-dist-nodeGraphSystem-nodeLink-modules__selected { stroke: white !important; opacity: 0.4; }`, "", { version: 3, sources: ["webpack://./../../../dev/sharedUiComponents/dist/nodeGraphSystem/nodeLink.modules.scss"], names: [], mappings: "AAAA;EACI,iBAAA;AACJ;AAAI;EACI,wBAAA;EACA,uBAAA;AAER;;AAEA;EACI,uBAAA;EACA,iBAAA;AACJ;;AAEA;EACI,mBAAA;EACA,kBAAA;EACA,UAAA;EACA,wBAAA;EACA,mBAAA;EACA,eAAA;AACJ;AACI;EAEI,wBAAA;EACA,YAAA;AAAR", sourcesContent: [`.link {\r stroke-width: 4px;\r &.selected {\r stroke: white !important;\r stroke-dasharray: 10, 2;\r }\r }\r \r .target-candidate {\r filter: brightness(1.5);\r stroke-width: 6px;\r }\r \r .selection-link {\r pointer-events: all;\r stroke-width: 16px;\r opacity: 0;\r transition: opacity 75ms;\r stroke: transparent;\r cursor: pointer;\r \r &:hover,\r &.selected {\r stroke: white !important;\r opacity: 0.4;\r }\r }\r `], sourceRoot: "" }]), p.locals = { link: "---------dev-sharedUiComponents-dist-nodeGraphSystem-nodeLink-modules__link", selected: "---------dev-sharedUiComponents-dist-nodeGraphSystem-nodeLink-modules__selected", "target-candidate": "---------dev-sharedUiComponents-dist-nodeGraphSystem-nodeLink-modules__target-candidate", "selection-link": "---------dev-sharedUiComponents-dist-nodeGraphSystem-nodeLink-modules__selection-link" }; const m = p; } ), /***/ "../../../../node_modules/css-loader/dist/cjs.js??ruleSet[1].rules[4].use[1]!../../../../node_modules/sass-loader/dist/cjs.js??ruleSet[1].rules[4].use[2]!../../../dev/sharedUiComponents/dist/nodeGraphSystem/nodePort.modules.scss": ( /*!*******************************************************************************************************************************************************************************************************************************************!*\ !*** ../../../../node_modules/css-loader/dist/cjs.js??ruleSet[1].rules[4].use[1]!../../../../node_modules/sass-loader/dist/cjs.js??ruleSet[1].rules[4].use[2]!../../../dev/sharedUiComponents/dist/nodeGraphSystem/nodePort.modules.scss ***! \*******************************************************************************************************************************************************************************************************************************************/ /***/ (a, l, o) => { o.r(l), o.d(l, { /* harmony export */ default: () => m /* harmony export */ }); var u = o( /*! ../../../../../node_modules/css-loader/dist/runtime/sourceMaps.js */ "../../../../node_modules/css-loader/dist/runtime/sourceMaps.js" ), h = /* @__PURE__ */ o.n(u), d = o( /*! ../../../../../node_modules/css-loader/dist/runtime/api.js */ "../../../../node_modules/css-loader/dist/runtime/api.js" ), f = /* @__PURE__ */ o.n(d), p = f()(h()); p.push([a.id, `.\\---------dev-sharedUiComponents-dist-nodeGraphSystem-nodePort-modules__selected { filter: brightness(2); } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-nodePort-modules__pip { background: green; width: 6px; height: 6px; grid-row: 1; grid-column: 1; align-self: center; justify-self: center; border-radius: 7px; border: 2px solid black; } .\\---------dev-sharedUiComponents-dist-nodeGraphSystem-nodePort-modules__pip.\\---------dev-sharedUiComponents-dist-nodeGraphSystem-nodePort-modules__hidden { display: none; } img { grid-row: 1; grid-column: 1; } img.\\---------dev-sharedUiComponents-dist-nodeGraphSystem-nodePort-modules__selected { box-shadow: 0 0 0 2px; border-radius: 50%; }`, "", { version: 3, sources: ["webpack://./../../../dev/sharedUiComponents/dist/nodeGraphSystem/nodePort.modules.scss"], names: [], mappings: "AAAA;EACI,qBAAA;AACJ;;AAEA;EACI,iBAAA;EACA,UAAA;EACA,WAAA;EACA,WAAA;EACA,cAAA;EACA,kBAAA;EACA,oBAAA;EACA,kBAAA;EACA,uBAAA;AACJ;AACI;EACI,aAAA;AACR;;AAGA;EACI,WAAA;EACA,cAAA;AAAJ;;AAGA;EACI,qBAAA;EACA,kBAAA;AAAJ", sourcesContent: [`.selected {\r filter: brightness(2);\r }\r \r .pip {\r background: green;\r width: 6px;\r height: 6px;\r grid-row: 1;\r grid-column: 1;\r align-self: center;\r justify-self: center;\r border-radius: 7px;\r border: 2px solid black;\r \r &.hidden {\r display: none;\r }\r }\r \r img {\r grid-row: 1;\r grid-column: 1;\r }\r \r img.selected {\r box-shadow: 0 0 0 2px;\r border-radius: 50%;\r }\r `], sourceRoot: "" }]), p.locals = { selected: "---------dev-sharedUiComponents-dist-nodeGraphSystem-nodePort-modules__selected", pip: "---------dev-sharedUiComponents-dist-nodeGraphSystem-nodePort-modules__pip", hidden: "---------dev-sharedUiComponents-dist-nodeGraphSystem-nodePort-modules__hidden" }; const m = p; } ), /***/ "../../../../node_modules/css-loader/dist/cjs.js??ruleSet[1].rules[4].use[1]!../../../../node_modules/sass-loader/dist/cjs.js??ruleSet[1].rules[4].use[2]!../../../tools/nodeEditor/dist/graphSystem/blockNodeData.modules.scss": ( /*!**************************************************************************************************************************************************************************************************************************************!*\ !*** ../../../../node_modules/css-loader/dist/cjs.js??ruleSet[1].rules[4].use[1]!../../../../node_modules/sass-loader/dist/cjs.js??ruleSet[1].rules[4].use[2]!../../../tools/nodeEditor/dist/graphSystem/blockNodeData.modules.scss ***! \**************************************************************************************************************************************************************************************************************************************/ /***/ (a, l, o) => { o.r(l), o.d(l, { /* harmony export */ default: () => m /* harmony export */ }); var u = o( /*! ../../../../../node_modules/css-loader/dist/runtime/sourceMaps.js */ "../../../../node_modules/css-loader/dist/runtime/sourceMaps.js" ), h = /* @__PURE__ */ o.n(u), d = o( /*! ../../../../../node_modules/css-loader/dist/runtime/api.js */ "../../../../node_modules/css-loader/dist/runtime/api.js" ), f = /* @__PURE__ */ o.n(d), p = f()(h()); p.push([a.id, `.\\---------tools-nodeEditor-dist-graphSystem-blockNodeData-modules__hidden { display: none !important; }`, "", { version: 3, sources: ["webpack://./../../../tools/nodeEditor/dist/graphSystem/blockNodeData.modules.scss"], names: [], mappings: "AAAA;EACI,wBAAA;AACJ", sourcesContent: [`.hidden {\r display: none !important;\r }\r `], sourceRoot: "" }]), p.locals = { hidden: "---------tools-nodeEditor-dist-graphSystem-blockNodeData-modules__hidden" }; const m = p; } ), /***/ "../../../../node_modules/css-loader/dist/cjs.js??ruleSet[1].rules[4].use[1]!../../../../node_modules/sass-loader/dist/cjs.js??ruleSet[1].rules[4].use[2]!../../../tools/nodeEditor/dist/graphSystem/display/clampDisplayManager.modules.scss": ( /*!****************************************************************************************************************************************************************************************************************************************************!*\ !*** ../../../../node_modules/css-loader/dist/cjs.js??ruleSet[1].rules[4].use[1]!../../../../node_modules/sass-loader/dist/cjs.js??ruleSet[1].rules[4].use[2]!../../../tools/nodeEditor/dist/graphSystem/display/clampDisplayManager.modules.scss ***! \****************************************************************************************************************************************************************************************************************************************************/ /***/ (a, l, o) => { o.r(l), o.d(l, { /* harmony export */ default: () => m /* harmony export */ }); var u = o( /*! ../../../../../../node_modules/css-loader/dist/runtime/sourceMaps.js */ "../../../../node_modules/css-loader/dist/runtime/sourceMaps.js" ), h = /* @__PURE__ */ o.n(u), d = o( /*! ../../../../../../node_modules/css-loader/dist/runtime/api.js */ "../../../../node_modules/css-loader/dist/runtime/api.js" ), f = /* @__PURE__ */ o.n(d), p = f()(h()); p.push([a.id, `.\\---------tools-nodeEditor-dist-graphSystem-display-clampDisplayManager-modules__clampBlock { grid-row: 2; height: 34px; text-align: center; font-size: 18px; font-weight: bold; margin: 0 10px; }`, "", { version: 3, sources: ["webpack://./../../../tools/nodeEditor/dist/graphSystem/display/clampDisplayManager.modules.scss"], names: [], mappings: "AAAA;EACI,WAAA;EACA,YAAA;EACA,kBAAA;EACA,eAAA;EACA,iBAAA;EACA,cAAA;AACJ", sourcesContent: [`.clampBlock {\r grid-row: 2;\r height: 34px;\r text-align: center;\r font-size: 18px;\r font-weight: bold;\r margin: 0 10px;\r }\r `], sourceRoot: "" }]), p.locals = { clampBlock: "---------tools-nodeEditor-dist-graphSystem-display-clampDisplayManager-modules__clampBlock" }; const m = p; } ), /***/ "../../../../node_modules/css-loader/dist/cjs.js??ruleSet[1].rules[4].use[1]!../../../../node_modules/sass-loader/dist/cjs.js??ruleSet[1].rules[4].use[2]!../../../tools/nodeEditor/dist/graphSystem/display/common.modules.scss": ( /*!***************************************************************************************************************************************************************************************************************************************!*\ !*** ../../../../node_modules/css-loader/dist/cjs.js??ruleSet[1].rules[4].use[1]!../../../../node_modules/sass-loader/dist/cjs.js??ruleSet[1].rules[4].use[2]!../../../tools/nodeEditor/dist/graphSystem/display/common.modules.scss ***! \***************************************************************************************************************************************************************************************************************************************/ /***/ (a, l, o) => { o.r(l), o.d(l, { /* harmony export */ default: () => m /* harmony export */ }); var u = o( /*! ../../../../../../node_modules/css-loader/dist/runtime/sourceMaps.js */ "../../../../node_modules/css-loader/dist/runtime/sourceMaps.js" ), h = /* @__PURE__ */ o.n(u), d = o( /*! ../../../../../../node_modules/css-loader/dist/runtime/api.js */ "../../../../node_modules/css-loader/dist/runtime/api.js" ), f = /* @__PURE__ */ o.n(d), p = f()(h()); p.push([a.id, `.\\---------tools-nodeEditor-dist-graphSystem-display-common-modules__texture-block { grid-row: 2; height: 140px; width: 140px; overflow: hidden; border-bottom-left-radius: 7px; border: black 4px solid; border-left: 0px; border-bottom: 0px; } .\\---------tools-nodeEditor-dist-graphSystem-display-common-modules__texture-block img { width: 100%; height: 100%; pointer-events: none; } .\\---------tools-nodeEditor-dist-graphSystem-display-common-modules__texture-block img.\\---------tools-nodeEditor-dist-graphSystem-display-common-modules__empty { display: none; } .\\---------tools-nodeEditor-dist-graphSystem-display-common-modules__empty { display: none; }`, "", { version: 3, sources: ["webpack://./../../../tools/nodeEditor/dist/graphSystem/display/common.modules.scss"], names: [], mappings: "AAAA;EACI,WAAA;EACA,aAAA;EACA,YAAA;EACA,gBAAA;EACA,8BAAA;EACA,uBAAA;EACA,gBAAA;EACA,kBAAA;AACJ;AACI;EACI,WAAA;EACA,YAAA;EACA,oBAAA;AACR;AACQ;EACI,aAAA;AACZ;;AAIA;EACI,aAAA;AADJ", sourcesContent: [`.texture-block {\r grid-row: 2;\r height: 140px;\r width: 140px;\r overflow: hidden;\r border-bottom-left-radius: 7px;\r border: black 4px solid;\r border-left: 0px;\r border-bottom: 0px;\r \r img {\r width: 100%;\r height: 100%;\r pointer-events: none;\r \r &.empty {\r display: none;\r }\r }\r }\r \r .empty {\r display: none;\r }\r `], sourceRoot: "" }]), p.locals = { "texture-block": "---------tools-nodeEditor-dist-graphSystem-display-common-modules__texture-block", empty: "---------tools-nodeEditor-dist-graphSystem-display-common-modules__empty" }; const m = p; } ), /***/ "../../../../node_modules/css-loader/dist/cjs.js??ruleSet[1].rules[4].use[1]!../../../../node_modules/sass-loader/dist/cjs.js??ruleSet[1].rules[4].use[2]!../../../tools/nodeEditor/dist/graphSystem/display/curveDisplayManager.modules.scss": ( /*!****************************************************************************************************************************************************************************************************************************************************!*\ !*** ../../../../node_modules/css-loader/dist/cjs.js??ruleSet[1].rules[4].use[1]!../../../../node_modules/sass-loader/dist/cjs.js??ruleSet[1].rules[4].use[2]!../../../tools/nodeEditor/dist/graphSystem/display/curveDisplayManager.modules.scss ***! \****************************************************************************************************************************************************************************************************************************************************/ /***/ (a, l, o) => { o.r(l), o.d(l, { /* harmony export */ default: () => m /* harmony export */ }); var u = o( /*! ../../../../../../node_modules/css-loader/dist/runtime/sourceMaps.js */ "../../../../node_modules/css-loader/dist/runtime/sourceMaps.js" ), h = /* @__PURE__ */ o.n(u), d = o( /*! ../../../../../../node_modules/css-loader/dist/runtime/api.js */ "../../../../node_modules/css-loader/dist/runtime/api.js" ), f = /* @__PURE__ */ o.n(d), p = f()(h()); p.push([a.id, `.\\---------tools-nodeEditor-dist-graphSystem-display-curveDisplayManager-modules__curve-block { grid-row: 2; height: 34px; text-align: center; font-size: 18px; font-weight: bold; margin: 0 10px; }`, "", { version: 3, sources: ["webpack://./../../../tools/nodeEditor/dist/graphSystem/display/curveDisplayManager.modules.scss"], names: [], mappings: "AAAA;EACI,WAAA;EACA,YAAA;EACA,kBAAA;EACA,eAAA;EACA,iBAAA;EACA,cAAA;AACJ", sourcesContent: [`.curve-block {\r grid-row: 2;\r height: 34px;\r text-align: center;\r font-size: 18px;\r font-weight: bold;\r margin: 0 10px;\r }\r `], sourceRoot: "" }]), p.locals = { "curve-block": "---------tools-nodeEditor-dist-graphSystem-display-curveDisplayManager-modules__curve-block" }; const m = p; } ), /***/ "../../../../node_modules/css-loader/dist/cjs.js??ruleSet[1].rules[4].use[1]!../../../../node_modules/sass-loader/dist/cjs.js??ruleSet[1].rules[4].use[2]!../../../tools/nodeEditor/dist/graphSystem/display/elbowDisplayManager.modules.scss": ( /*!****************************************************************************************************************************************************************************************************************************************************!*\ !*** ../../../../node_modules/css-loader/dist/cjs.js??ruleSet[1].rules[4].use[1]!../../../../node_modules/sass-loader/dist/cjs.js??ruleSet[1].rules[4].use[2]!../../../tools/nodeEditor/dist/graphSystem/display/elbowDisplayManager.modules.scss ***! \****************************************************************************************************************************************************************************************************************************************************/ /***/ (a, l, o) => { o.r(l), o.d(l, { /* harmony export */ default: () => m /* harmony export */ }); var u = o( /*! ../../../../../../node_modules/css-loader/dist/runtime/sourceMaps.js */ "../../../../node_modules/css-loader/dist/runtime/sourceMaps.js" ), h = /* @__PURE__ */ o.n(u), d = o( /*! ../../../../../../node_modules/css-loader/dist/runtime/api.js */ "../../../../node_modules/css-loader/dist/runtime/api.js" ), f = /* @__PURE__ */ o.n(d), p = f()(h()); p.push([a.id, `.\\---------tools-nodeEditor-dist-graphSystem-display-elbowDisplayManager-modules__elbowBlock { width: 40px; grid-template-rows: 0px 40px 0px; border-radius: 40px; transform: translateY(-7px); } .\\---------tools-nodeEditor-dist-graphSystem-display-elbowDisplayManager-modules__hidden { display: none; } .\\---------tools-nodeEditor-dist-graphSystem-display-elbowDisplayManager-modules__translatedConnections { transform: translateY(7px); } .\\---------tools-nodeEditor-dist-graphSystem-display-elbowDisplayManager-modules__roundSelectionBorder { border-radius: 40px; }`, "", { version: 3, sources: ["webpack://./../../../tools/nodeEditor/dist/graphSystem/display/elbowDisplayManager.modules.scss"], names: [], mappings: "AAAA;EACI,WAAA;EACA,gCAAA;EACA,mBAAA;EACA,2BAAA;AACJ;;AAEA;EACI,aAAA;AACJ;;AAEA;EACI,0BAAA;AACJ;;AAEA;EACI,mBAAA;AACJ", sourcesContent: [`.elbowBlock {\r width: 40px;\r grid-template-rows: 0px 40px 0px;\r border-radius: 40px;\r transform: translateY(-7px);\r }\r \r .hidden {\r display: none;\r }\r \r .translatedConnections {\r transform: translateY(7px);\r }\r \r .roundSelectionBorder {\r border-radius: 40px;\r }\r `], sourceRoot: "" }]), p.locals = { elbowBlock: "---------tools-nodeEditor-dist-graphSystem-display-elbowDisplayManager-modules__elbowBlock", hidden: "---------tools-nodeEditor-dist-graphSystem-display-elbowDisplayManager-modules__hidden", translatedConnections: "---------tools-nodeEditor-dist-graphSystem-display-elbowDisplayManager-modules__translatedConnections", roundSelectionBorder: "---------tools-nodeEditor-dist-graphSystem-display-elbowDisplayManager-modules__roundSelectionBorder" }; const m = p; } ), /***/ "../../../../node_modules/css-loader/dist/cjs.js??ruleSet[1].rules[4].use[1]!../../../../node_modules/sass-loader/dist/cjs.js??ruleSet[1].rules[4].use[2]!../../../tools/nodeEditor/dist/graphSystem/display/gradientDisplayManager.modules.scss": ( /*!*******************************************************************************************************************************************************************************************************************************************************!*\ !*** ../../../../node_modules/css-loader/dist/cjs.js??ruleSet[1].rules[4].use[1]!../../../../node_modules/sass-loader/dist/cjs.js??ruleSet[1].rules[4].use[2]!../../../tools/nodeEditor/dist/graphSystem/display/gradientDisplayManager.modules.scss ***! \*******************************************************************************************************************************************************************************************************************************************************/ /***/ (a, l, o) => { o.r(l), o.d(l, { /* harmony export */ default: () => m /* harmony export */ }); var u = o( /*! ../../../../../../node_modules/css-loader/dist/runtime/sourceMaps.js */ "../../../../node_modules/css-loader/dist/runtime/sourceMaps.js" ), h = /* @__PURE__ */ o.n(u), d = o( /*! ../../../../../../node_modules/css-loader/dist/runtime/api.js */ "../../../../node_modules/css-loader/dist/runtime/api.js" ), f = /* @__PURE__ */ o.n(d), p = f()(h()); p.push([a.id, `.\\---------tools-nodeEditor-dist-graphSystem-display-gradientDisplayManager-modules__gradientBlock { grid-row: 2; height: 34px; }`, "", { version: 3, sources: ["webpack://./../../../tools/nodeEditor/dist/graphSystem/display/gradientDisplayManager.modules.scss"], names: [], mappings: "AAAA;EACI,WAAA;EACA,YAAA;AACJ", sourcesContent: [`.gradientBlock {\r grid-row: 2;\r height: 34px;\r }\r `], sourceRoot: "" }]), p.locals = { gradientBlock: "---------tools-nodeEditor-dist-graphSystem-display-gradientDisplayManager-modules__gradientBlock" }; const m = p; } ), /***/ "../../../../node_modules/css-loader/dist/cjs.js??ruleSet[1].rules[4].use[1]!../../../../node_modules/sass-loader/dist/cjs.js??ruleSet[1].rules[4].use[2]!../../../tools/nodeEditor/dist/graphSystem/display/imageSourceDisplayManager.modules.scss": ( /*!**********************************************************************************************************************************************************************************************************************************************************!*\ !*** ../../../../node_modules/css-loader/dist/cjs.js??ruleSet[1].rules[4].use[1]!../../../../node_modules/sass-loader/dist/cjs.js??ruleSet[1].rules[4].use[2]!../../../tools/nodeEditor/dist/graphSystem/display/imageSourceDisplayManager.modules.scss ***! \**********************************************************************************************************************************************************************************************************************************************************/ /***/ (a, l, o) => { o.r(l), o.d(l, { /* harmony export */ default: () => m /* harmony export */ }); var u = o( /*! ../../../../../../node_modules/css-loader/dist/runtime/sourceMaps.js */ "../../../../node_modules/css-loader/dist/runtime/sourceMaps.js" ), h = /* @__PURE__ */ o.n(u), d = o( /*! ../../../../../../node_modules/css-loader/dist/runtime/api.js */ "../../../../node_modules/css-loader/dist/runtime/api.js" ), f = /* @__PURE__ */ o.n(d), p = f()(h()); p.push([a.id, `.\\---------tools-nodeEditor-dist-graphSystem-display-imageSourceDisplayManager-modules__image-source-block { margin-top: 5px; }`, "", { version: 3, sources: ["webpack://./../../../tools/nodeEditor/dist/graphSystem/display/imageSourceDisplayManager.modules.scss"], names: [], mappings: "AAAA;EACI,eAAA;AACJ", sourcesContent: [`.image-source-block {\r margin-top: 5px;\r }\r `], sourceRoot: "" }]), p.locals = { "image-source-block": "---------tools-nodeEditor-dist-graphSystem-display-imageSourceDisplayManager-modules__image-source-block" }; const m = p; } ), /***/ "../../../../node_modules/css-loader/dist/cjs.js??ruleSet[1].rules[4].use[1]!../../../../node_modules/sass-loader/dist/cjs.js??ruleSet[1].rules[4].use[2]!../../../tools/nodeEditor/dist/graphSystem/display/inputDisplayManager.modules.scss": ( /*!****************************************************************************************************************************************************************************************************************************************************!*\ !*** ../../../../node_modules/css-loader/dist/cjs.js??ruleSet[1].rules[4].use[1]!../../../../node_modules/sass-loader/dist/cjs.js??ruleSet[1].rules[4].use[2]!../../../tools/nodeEditor/dist/graphSystem/display/inputDisplayManager.modules.scss ***! \****************************************************************************************************************************************************************************************************************************************************/ /***/ (a, l, o) => { o.r(l), o.d(l, { /* harmony export */ default: () => m /* harmony export */ }); var u = o( /*! ../../../../../../node_modules/css-loader/dist/runtime/sourceMaps.js */ "../../../../node_modules/css-loader/dist/runtime/sourceMaps.js" ), h = /* @__PURE__ */ o.n(u), d = o( /*! ../../../../../../node_modules/css-loader/dist/runtime/api.js */ "../../../../node_modules/css-loader/dist/runtime/api.js" ), f = /* @__PURE__ */ o.n(d), p = f()(h()); p.push([a.id, `.\\---------tools-nodeEditor-dist-graphSystem-display-inputDisplayManager-modules__input-block { grid-row: 2; min-height: 34px; text-align: center; font-size: 18px; font-weight: bold; margin: 0 10px 5px; display: grid; align-content: center; } .\\---------tools-nodeEditor-dist-graphSystem-display-inputDisplayManager-modules__input-block.\\---------tools-nodeEditor-dist-graphSystem-display-inputDisplayManager-modules__small-font { font-size: 17px; } .\\---------tools-nodeEditor-dist-graphSystem-display-inputDisplayManager-modules__constant { border-color: #464348; background: #464348; } .\\---------tools-nodeEditor-dist-graphSystem-display-inputDisplayManager-modules__inspector { border-color: #66491b; background: #66491b; }`, "", { version: 3, sources: ["webpack://./../../../tools/nodeEditor/dist/graphSystem/display/inputDisplayManager.modules.scss"], names: [], mappings: "AAAA;EACI,WAAA;EACA,gBAAA;EACA,kBAAA;EACA,eAAA;EACA,iBAAA;EACA,kBAAA;EACA,aAAA;EACA,qBAAA;AACJ;AACI;EACI,eAAA;AACR;;AAGA;EACI,qBAAA;EACA,mBAAA;AAAJ;;AAGA;EACI,qBAAA;EACA,mBAAA;AAAJ", sourcesContent: [`.input-block {\r grid-row: 2;\r min-height: 34px;\r text-align: center;\r font-size: 18px;\r font-weight: bold;\r margin: 0 10px 5px;\r display: grid;\r align-content: center;\r \r &.small-font {\r font-size: 17px;\r }\r }\r \r .constant {\r border-color: #464348;\r background: #464348;\r }\r \r .inspector {\r border-color: #66491b;\r background: #66491b;\r }\r `], sourceRoot: "" }]), p.locals = { "input-block": "---------tools-nodeEditor-dist-graphSystem-display-inputDisplayManager-modules__input-block", "small-font": "---------tools-nodeEditor-dist-graphSystem-display-inputDisplayManager-modules__small-font", constant: "---------tools-nodeEditor-dist-graphSystem-display-inputDisplayManager-modules__constant", inspector: "---------tools-nodeEditor-dist-graphSystem-display-inputDisplayManager-modules__inspector" }; const m = p; } ), /***/ "../../../../node_modules/css-loader/dist/cjs.js??ruleSet[1].rules[4].use[1]!../../../../node_modules/sass-loader/dist/cjs.js??ruleSet[1].rules[4].use[2]!../../../tools/nodeEditor/dist/graphSystem/display/remapDisplayManager.modules.scss": ( /*!****************************************************************************************************************************************************************************************************************************************************!*\ !*** ../../../../node_modules/css-loader/dist/cjs.js??ruleSet[1].rules[4].use[1]!../../../../node_modules/sass-loader/dist/cjs.js??ruleSet[1].rules[4].use[2]!../../../tools/nodeEditor/dist/graphSystem/display/remapDisplayManager.modules.scss ***! \****************************************************************************************************************************************************************************************************************************************************/ /***/ (a, l, o) => { o.r(l), o.d(l, { /* harmony export */ default: () => m /* harmony export */ }); var u = o( /*! ../../../../../../node_modules/css-loader/dist/runtime/sourceMaps.js */ "../../../../node_modules/css-loader/dist/runtime/sourceMaps.js" ), h = /* @__PURE__ */ o.n(u), d = o( /*! ../../../../../../node_modules/css-loader/dist/runtime/api.js */ "../../../../node_modules/css-loader/dist/runtime/api.js" ), f = /* @__PURE__ */ o.n(d), p = f()(h()); p.push([a.id, `.\\---------tools-nodeEditor-dist-graphSystem-display-remapDisplayManager-modules__remap-block { height: 34px; text-align: center; font-size: 18px; font-weight: bold; margin: 0 10px; }`, "", { version: 3, sources: ["webpack://./../../../tools/nodeEditor/dist/graphSystem/display/remapDisplayManager.modules.scss"], names: [], mappings: "AAAA;EACI,YAAA;EACA,kBAAA;EACA,eAAA;EACA,iBAAA;EACA,cAAA;AACJ", sourcesContent: [`.remap-block {\r height: 34px;\r text-align: center;\r font-size: 18px;\r font-weight: bold;\r margin: 0 10px;\r }\r `], sourceRoot: "" }]), p.locals = { "remap-block": "---------tools-nodeEditor-dist-graphSystem-display-remapDisplayManager-modules__remap-block" }; const m = p; } ), /***/ "../../../../node_modules/css-loader/dist/cjs.js??ruleSet[1].rules[4].use[1]!../../../../node_modules/sass-loader/dist/cjs.js??ruleSet[1].rules[4].use[2]!../../../tools/nodeEditor/dist/graphSystem/display/textureDisplayManager.modules.scss": ( /*!******************************************************************************************************************************************************************************************************************************************************!*\ !*** ../../../../node_modules/css-loader/dist/cjs.js??ruleSet[1].rules[4].use[1]!../../../../node_modules/sass-loader/dist/cjs.js??ruleSet[1].rules[4].use[2]!../../../tools/nodeEditor/dist/graphSystem/display/textureDisplayManager.modules.scss ***! \******************************************************************************************************************************************************************************************************************************************************/ /***/ (a, l, o) => { o.r(l), o.d(l, { /* harmony export */ default: () => m /* harmony export */ }); var u = o( /*! ../../../../../../node_modules/css-loader/dist/runtime/sourceMaps.js */ "../../../../node_modules/css-loader/dist/runtime/sourceMaps.js" ), h = /* @__PURE__ */ o.n(u), d = o( /*! ../../../../../../node_modules/css-loader/dist/runtime/api.js */ "../../../../node_modules/css-loader/dist/runtime/api.js" ), f = /* @__PURE__ */ o.n(d), p = f()(h()); p.push([a.id, `.\\---------tools-nodeEditor-dist-graphSystem-display-textureDisplayManager-modules__regular-texture-block { margin-top: 80px; } .\\---------tools-nodeEditor-dist-graphSystem-display-textureDisplayManager-modules__reduced-texture-block { margin-top: 30px; } .\\---------tools-nodeEditor-dist-graphSystem-display-textureDisplayManager-modules__reflection-block { grid-row: 3 !important; } .\\---------tools-nodeEditor-dist-graphSystem-display-textureDisplayManager-modules__triplanar-texture-block { margin-top: 155px; } .\\---------tools-nodeEditor-dist-graphSystem-display-textureDisplayManager-modules__refraction-texture-block { margin-top: 80px; }`, "", { version: 3, sources: ["webpack://./../../../tools/nodeEditor/dist/graphSystem/display/textureDisplayManager.modules.scss"], names: [], mappings: "AAAA;EACI,gBAAA;AACJ;;AAEA;EACI,gBAAA;AACJ;;AAEA;EACI,sBAAA;AACJ;;AAEA;EACI,iBAAA;AACJ;;AAEA;EACI,gBAAA;AACJ", sourcesContent: [`.regular-texture-block {\r margin-top: 80px;\r }\r \r .reduced-texture-block {\r margin-top: 30px;\r }\r \r .reflection-block {\r grid-row: 3 !important;\r }\r \r .triplanar-texture-block {\r margin-top: 155px;\r }\r \r .refraction-texture-block {\r margin-top: 80px;\r }\r `], sourceRoot: "" }]), p.locals = { "regular-texture-block": "---------tools-nodeEditor-dist-graphSystem-display-textureDisplayManager-modules__regular-texture-block", "reduced-texture-block": "---------tools-nodeEditor-dist-graphSystem-display-textureDisplayManager-modules__reduced-texture-block", "reflection-block": "---------tools-nodeEditor-dist-graphSystem-display-textureDisplayManager-modules__reflection-block", "triplanar-texture-block": "---------tools-nodeEditor-dist-graphSystem-display-textureDisplayManager-modules__triplanar-texture-block", "refraction-texture-block": "---------tools-nodeEditor-dist-graphSystem-display-textureDisplayManager-modules__refraction-texture-block" }; const m = p; } ), /***/ "../../../../node_modules/css-loader/dist/cjs.js??ruleSet[1].rules[4].use[1]!../../../../node_modules/sass-loader/dist/cjs.js??ruleSet[1].rules[4].use[2]!../../../tools/nodeEditor/dist/graphSystem/display/trigonometryDisplayManager.modules.scss": ( /*!***********************************************************************************************************************************************************************************************************************************************************!*\ !*** ../../../../node_modules/css-loader/dist/cjs.js??ruleSet[1].rules[4].use[1]!../../../../node_modules/sass-loader/dist/cjs.js??ruleSet[1].rules[4].use[2]!../../../tools/nodeEditor/dist/graphSystem/display/trigonometryDisplayManager.modules.scss ***! \***********************************************************************************************************************************************************************************************************************************************************/ /***/ (a, l, o) => { o.r(l), o.d(l, { /* harmony export */ default: () => m /* harmony export */ }); var u = o( /*! ../../../../../../node_modules/css-loader/dist/runtime/sourceMaps.js */ "../../../../node_modules/css-loader/dist/runtime/sourceMaps.js" ), h = /* @__PURE__ */ o.n(u), d = o( /*! ../../../../../../node_modules/css-loader/dist/runtime/api.js */ "../../../../node_modules/css-loader/dist/runtime/api.js" ), f = /* @__PURE__ */ o.n(d), p = f()(h()); p.push([a.id, `.\\---------tools-nodeEditor-dist-graphSystem-display-trigonometryDisplayManager-modules__trigonometry-block { grid-row: 2; height: 34px; text-align: center; font-size: 18px; font-weight: bold; margin: 0 10px; }`, "", { version: 3, sources: ["webpack://./../../../tools/nodeEditor/dist/graphSystem/display/trigonometryDisplayManager.modules.scss"], names: [], mappings: "AAAA;EACI,WAAA;EACA,YAAA;EACA,kBAAA;EACA,eAAA;EACA,iBAAA;EACA,cAAA;AACJ", sourcesContent: [`.trigonometry-block {\r grid-row: 2;\r height: 34px;\r text-align: center;\r font-size: 18px;\r font-weight: bold;\r margin: 0 10px;\r }\r `], sourceRoot: "" }]), p.locals = { "trigonometry-block": "---------tools-nodeEditor-dist-graphSystem-display-trigonometryDisplayManager-modules__trigonometry-block" }; const m = p; } ), /***/ "../../../../node_modules/@fortawesome/react-fontawesome/index.es.js": ( /*!***************************************************************************!*\ !*** ../../../../node_modules/@fortawesome/react-fontawesome/index.es.js ***! \***************************************************************************/ /***/ (a, l, o) => { o.r(l), o.d(l, { /* harmony export */ FontAwesomeIcon: () => ( /* binding */ j ) /* harmony export */ }); var u = o( /*! @fortawesome/fontawesome-svg-core */ "../../../../node_modules/@fortawesome/fontawesome-svg-core/index.mjs" ), h = o( /*! prop-types */ "../../../../node_modules/prop-types/index.js" ), d = /* @__PURE__ */ o.n(h), f = o( /*! react */ "../../../../node_modules/react/index.js" ); function p(ne, pe) { var ge = Object.keys(ne); if (Object.getOwnPropertySymbols) { var Ie = Object.getOwnPropertySymbols(ne); pe && (Ie = Ie.filter(function(ye) { return Object.getOwnPropertyDescriptor(ne, ye).enumerable; })), ge.push.apply(ge, Ie); } return ge; } function m(ne) { for (var pe = 1; pe < arguments.length; pe++) { var ge = arguments[pe] != null ? arguments[pe] : {}; pe % 2 ? p(Object(ge), !0).forEach(function(Ie) { v(ne, Ie, ge[Ie]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(ne, Object.getOwnPropertyDescriptors(ge)) : p(Object(ge)).forEach(function(Ie) { Object.defineProperty(ne, Ie, Object.getOwnPropertyDescriptor(ge, Ie)); }); } return ne; } function _(ne) { "@babel/helpers - typeof"; return _ = typeof Symbol == "function" && typeof Symbol.iterator == "symbol" ? function(pe) { return typeof pe; } : function(pe) { return pe && typeof Symbol == "function" && pe.constructor === Symbol && pe !== Symbol.prototype ? "symbol" : typeof pe; }, _(ne); } function v(ne, pe, ge) { return pe in ne ? Object.defineProperty(ne, pe, { value: ge, enumerable: !0, configurable: !0, writable: !0 }) : ne[pe] = ge, ne; } function C(ne, pe) { if (ne == null) return {}; var ge = {}, Ie = Object.keys(ne), ye, Se; for (Se = 0; Se < Ie.length; Se++) ye = Ie[Se], !(pe.indexOf(ye) >= 0) && (ge[ye] = ne[ye]); return ge; } function x(ne, pe) { if (ne == null) return {}; var ge = C(ne, pe), Ie, ye; if (Object.getOwnPropertySymbols) { var Se = Object.getOwnPropertySymbols(ne); for (ye = 0; ye < Se.length; ye++) Ie = Se[ye], !(pe.indexOf(Ie) >= 0) && Object.prototype.propertyIsEnumerable.call(ne, Ie) && (ge[Ie] = ne[Ie]); } return ge; } function b(ne) { return S(ne) || M(ne) || R(ne) || V(); } function S(ne) { if (Array.isArray(ne)) return w(ne); } function M(ne) { if (typeof Symbol < "u" && ne[Symbol.iterator] != null || ne["@@iterator"] != null) return Array.from(ne); } function R(ne, pe) { if (ne) { if (typeof ne == "string") return w(ne, pe); var ge = Object.prototype.toString.call(ne).slice(8, -1); if (ge === "Object" && ne.constructor && (ge = ne.constructor.name), ge === "Map" || ge === "Set") return Array.from(ne); if (ge === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(ge)) return w(ne, pe); } } function w(ne, pe) { (pe == null || pe > ne.length) && (pe = ne.length); for (var ge = 0, Ie = new Array(pe); ge < pe; ge++) Ie[ge] = ne[ge]; return Ie; } function V() { throw new TypeError(`Invalid attempt to spread non-iterable instance. In order to be iterable, non-array objects must have a [Symbol.iterator]() method.`); } function k(ne) { var pe, ge = ne.beat, Ie = ne.fade, ye = ne.beatFade, Se = ne.bounce, re = ne.shake, te = ne.flash, he = ne.spin, be = ne.spinPulse, Ue = ne.spinReverse, Ee = ne.pulse, He = ne.fixedWidth, Xe = ne.inverse, rt = ne.border, dt = ne.listItem, bt = ne.flip, Mt = ne.size, Ct = ne.rotation, di = ne.pull, Kt = (pe = { "fa-beat": ge, "fa-fade": Ie, "fa-beat-fade": ye, "fa-bounce": Se, "fa-shake": re, "fa-flash": te, "fa-spin": he, "fa-spin-reverse": Ue, "fa-spin-pulse": be, "fa-pulse": Ee, "fa-fw": He, "fa-inverse": Xe, "fa-border": rt, "fa-li": dt, "fa-flip": bt === !0, "fa-flip-horizontal": bt === "horizontal" || bt === "both", "fa-flip-vertical": bt === "vertical" || bt === "both" }, v(pe, "fa-".concat(Mt), typeof Mt < "u" && Mt !== null), v(pe, "fa-rotate-".concat(Ct), typeof Ct < "u" && Ct !== null && Ct !== 0), v(pe, "fa-pull-".concat(di), typeof di < "u" && di !== null), v(pe, "fa-swap-opacity", ne.swapOpacity), pe); return Object.keys(Kt).map(function(ei) { return Kt[ei] ? ei : null; }).filter(function(ei) { return ei; }); } function L(ne) { return ne = ne - 0, ne === ne; } function B(ne) { return L(ne) ? ne : (ne = ne.replace(/[\-_\s]+(.)?/g, function(pe, ge) { return ge ? ge.toUpperCase() : ""; }), ne.substr(0, 1).toLowerCase() + ne.substr(1)); } var U = ["style"]; function K(ne) { return ne.charAt(0).toUpperCase() + ne.slice(1); } function ee(ne) { return ne.split(";").map(function(pe) { return pe.trim(); }).filter(function(pe) { return pe; }).reduce(function(pe, ge) { var Ie = ge.indexOf(":"), ye = B(ge.slice(0, Ie)), Se = ge.slice(Ie + 1).trim(); return ye.startsWith("webkit") ? pe[K(ye)] = Se : pe[ye] = Se, pe; }, {}); } function Z(ne, pe) { var ge = arguments.length > 2 && arguments[2] !== void 0 ? arguments[2] : {}; if (typeof pe == "string") return pe; var Ie = (pe.children || []).map(function(he) { return Z(ne, he); }), ye = Object.keys(pe.attributes || {}).reduce(function(he, be) { var Ue = pe.attributes[be]; switch (be) { case "class": he.attrs.className = Ue, delete pe.attributes.class; break; case "style": he.attrs.style = ee(Ue); break; default: be.indexOf("aria-") === 0 || be.indexOf("data-") === 0 ? he.attrs[be.toLowerCase()] = Ue : he.attrs[B(be)] = Ue; } return he; }, { attrs: {} }), Se = ge.style, re = Se === void 0 ? {} : Se, te = x(ge, U); return ye.attrs.style = m(m({}, ye.attrs.style), re), ne.apply(void 0, [pe.tag, m(m({}, ye.attrs), te)].concat(b(Ie))); } var q = !1; try { q = !1; } catch { } function le() { if (!q && console && typeof console.error == "function") { var ne; (ne = console).error.apply(ne, arguments); } } function ie(ne) { if (ne && _(ne) === "object" && ne.prefix && ne.iconName && ne.icon) return ne; if (u.parse.icon) return u.parse.icon(ne); if (ne === null) return null; if (ne && _(ne) === "object" && ne.prefix && ne.iconName) return ne; if (Array.isArray(ne) && ne.length === 2) return { prefix: ne[0], iconName: ne[1] }; if (typeof ne == "string") return { prefix: "fas", iconName: ne }; } function $(ne, pe) { return Array.isArray(pe) && pe.length > 0 || !Array.isArray(pe) && pe ? v({}, ne, pe) : {}; } var j = /* @__PURE__ */ f.forwardRef(function(ne, pe) { var ge = ne.icon, Ie = ne.mask, ye = ne.symbol, Se = ne.className, re = ne.title, te = ne.titleId, he = ne.maskId, be = ie(ge), Ue = $("classes", [].concat(b(k(ne)), b(Se.split(" ")))), Ee = $("transform", typeof ne.transform == "string" ? u.parse.transform(ne.transform) : ne.transform), He = $("mask", ie(Ie)), Xe = (0, u.icon)(be, m(m(m(m({}, Ue), Ee), He), {}, { symbol: ye, title: re, titleId: te, maskId: he })); if (!Xe) return le("Could not find icon", be), null; var rt = Xe.abstract, dt = { ref: pe }; return Object.keys(ne).forEach(function(bt) { j.defaultProps.hasOwnProperty(bt) || (dt[bt] = ne[bt]); }), J(rt[0], dt); }); j.displayName = "FontAwesomeIcon", j.propTypes = { beat: d().bool, border: d().bool, beatFade: d().bool, bounce: d().bool, className: d().string, fade: d().bool, flash: d().bool, mask: d().oneOfType([d().object, d().array, d().string]), maskId: d().string, fixedWidth: d().bool, inverse: d().bool, flip: d().oneOf([!0, !1, "horizontal", "vertical", "both"]), icon: d().oneOfType([d().object, d().array, d().string]), listItem: d().bool, pull: d().oneOf(["right", "left"]), pulse: d().bool, rotation: d().oneOf([0, 90, 180, 270]), shake: d().bool, size: d().oneOf(["2xs", "xs", "sm", "lg", "xl", "2xl", "1x", "2x", "3x", "4x", "5x", "6x", "7x", "8x", "9x", "10x"]), spin: d().bool, spinPulse: d().bool, spinReverse: d().bool, symbol: d().oneOfType([d().bool, d().string]), title: d().string, titleId: d().string, transform: d().oneOfType([d().string, d().object]), swapOpacity: d().bool }, j.defaultProps = { border: !1, className: "", mask: null, maskId: null, fixedWidth: !1, inverse: !1, flip: !1, icon: null, listItem: !1, pull: null, pulse: !1, rotation: null, size: null, spin: !1, spinPulse: !1, spinReverse: !1, beat: !1, fade: !1, beatFade: !1, bounce: !1, shake: !1, symbol: !1, title: "", titleId: null, transform: null, swapOpacity: !1 }; var J = Z.bind(null, f.createElement); } ), /***/ "../../../../node_modules/css-loader/dist/runtime/api.js": ( /*!***************************************************************!*\ !*** ../../../../node_modules/css-loader/dist/runtime/api.js ***! \***************************************************************/ /***/ (a) => { a.exports = function(l) { var o = []; return o.toString = function() { return this.map(function(h) { var d = "", f = typeof h[5] < "u"; return h[4] && (d += "@supports (".concat(h[4], ") {")), h[2] && (d += "@media ".concat(h[2], " {")), f && (d += "@layer".concat(h[5].length > 0 ? " ".concat(h[5]) : "", " {")), d += l(h), f && (d += "}"), h[2] && (d += "}"), h[4] && (d += "}"), d; }).join(""); }, o.i = function(h, d, f, p, m) { typeof h == "string" && (h = [[null, h, void 0]]); var _ = {}; if (f) for (var v = 0; v < this.length; v++) { var C = this[v][0]; C != null && (_[C] = !0); } for (var x = 0; x < h.length; x++) { var b = [].concat(h[x]); f && _[b[0]] || (typeof m < "u" && (typeof b[5] > "u" || (b[1] = "@layer".concat(b[5].length > 0 ? " ".concat(b[5]) : "", " {").concat(b[1], "}")), b[5] = m), d && (b[2] && (b[1] = "@media ".concat(b[2], " {").concat(b[1], "}")), b[2] = d), p && (b[4] ? (b[1] = "@supports (".concat(b[4], ") {").concat(b[1], "}"), b[4] = p) : b[4] = "".concat(p)), o.push(b)); } }, o; }; } ), /***/ "../../../../node_modules/css-loader/dist/runtime/sourceMaps.js": ( /*!**********************************************************************!*\ !*** ../../../../node_modules/css-loader/dist/runtime/sourceMaps.js ***! \**********************************************************************/ /***/ (a) => { a.exports = function(l) { var o = l[1], u = l[3]; if (!u) return o; if (typeof btoa == "function") { var h = btoa(unescape(encodeURIComponent(JSON.stringify(u)))), d = "sourceMappingURL=data:application/json;charset=utf-8;base64,".concat(h), f = "/*# ".concat(d, " */"); return [o].concat([f]).join(` `); } return [o].join(` `); }; } ), /***/ "../../../../node_modules/dagre/index.js": ( /*!***********************************************!*\ !*** ../../../../node_modules/dagre/index.js ***! \***********************************************/ /***/ (a, l, o) => { a.exports = { graphlib: o( /*! ./lib/graphlib */ "../../../../node_modules/dagre/lib/graphlib.js" ), layout: o( /*! ./lib/layout */ "../../../../node_modules/dagre/lib/layout.js" ), debug: o( /*! ./lib/debug */ "../../../../node_modules/dagre/lib/debug.js" ), util: { time: o( /*! ./lib/util */ "../../../../node_modules/dagre/lib/util.js" ).time, notime: o( /*! ./lib/util */ "../../../../node_modules/dagre/lib/util.js" ).notime }, version: o( /*! ./lib/version */ "../../../../node_modules/dagre/lib/version.js" ) }; } ), /***/ "../../../../node_modules/dagre/lib/acyclic.js": ( /*!*****************************************************!*\ !*** ../../../../node_modules/dagre/lib/acyclic.js ***! \*****************************************************/ /***/ (a, l, o) => { var u = o( /*! ./lodash */ "../../../../node_modules/dagre/lib/lodash.js" ), h = o( /*! ./greedy-fas */ "../../../../node_modules/dagre/lib/greedy-fas.js" ); a.exports = { run: d, undo: p }; function d(m) { var _ = m.graph().acyclicer === "greedy" ? h(m, v(m)) : f(m); u.forEach(_, function(C) { var x = m.edge(C); m.removeEdge(C), x.forwardName = C.name, x.reversed = !0, m.setEdge(C.w, C.v, x, u.uniqueId("rev")); }); function v(C) { return function(x) { return C.edge(x).weight; }; } } function f(m) { var _ = [], v = {}, C = {}; function x(b) { u.has(C, b) || (C[b] = !0, v[b] = !0, u.forEach(m.outEdges(b), function(S) { u.has(v, S.w) ? _.push(S) : x(S.w); }), delete v[b]); } return u.forEach(m.nodes(), x), _; } function p(m) { u.forEach(m.edges(), function(_) { var v = m.edge(_); if (v.reversed) { m.removeEdge(_); var C = v.forwardName; delete v.reversed, delete v.forwardName, m.setEdge(_.w, _.v, v, C); } }); } } ), /***/ "../../../../node_modules/dagre/lib/add-border-segments.js": ( /*!*****************************************************************!*\ !*** ../../../../node_modules/dagre/lib/add-border-segments.js ***! \*****************************************************************/ /***/ (a, l, o) => { var u = o( /*! ./lodash */ "../../../../node_modules/dagre/lib/lodash.js" ), h = o( /*! ./util */ "../../../../node_modules/dagre/lib/util.js" ); a.exports = d; function d(p) { function m(_) { var v = p.children(_), C = p.node(_); if (v.length && u.forEach(v, m), u.has(C, "minRank")) { C.borderLeft = [], C.borderRight = []; for (var x = C.minRank, b = C.maxRank + 1; x < b; ++x) f(p, "borderLeft", "_bl", _, C, x), f(p, "borderRight", "_br", _, C, x); } } u.forEach(p.children(), m); } function f(p, m, _, v, C, x) { var b = { width: 0, height: 0, rank: x, borderType: m }, S = C[m][x - 1], M = h.addDummyNode(p, "border", b, _); C[m][x] = M, p.setParent(M, v), S && p.setEdge(S, M, { weight: 1 }); } } ), /***/ "../../../../node_modules/dagre/lib/coordinate-system.js": ( /*!***************************************************************!*\ !*** ../../../../node_modules/dagre/lib/coordinate-system.js ***! \***************************************************************/ /***/ (a, l, o) => { var u = o( /*! ./lodash */ "../../../../node_modules/dagre/lib/lodash.js" ); a.exports = { adjust: h, undo: d }; function h(x) { var b = x.graph().rankdir.toLowerCase(); (b === "lr" || b === "rl") && f(x); } function d(x) { var b = x.graph().rankdir.toLowerCase(); (b === "bt" || b === "rl") && m(x), (b === "lr" || b === "rl") && (v(x), f(x)); } function f(x) { u.forEach(x.nodes(), function(b) { p(x.node(b)); }), u.forEach(x.edges(), function(b) { p(x.edge(b)); }); } function p(x) { var b = x.width; x.width = x.height, x.height = b; } function m(x) { u.forEach(x.nodes(), function(b) { _(x.node(b)); }), u.forEach(x.edges(), function(b) { var S = x.edge(b); u.forEach(S.points, _), u.has(S, "y") && _(S); }); } function _(x) { x.y = -x.y; } function v(x) { u.forEach(x.nodes(), function(b) { C(x.node(b)); }), u.forEach(x.edges(), function(b) { var S = x.edge(b); u.forEach(S.points, C), u.has(S, "x") && C(S); }); } function C(x) { var b = x.x; x.x = x.y, x.y = b; } } ), /***/ "../../../../node_modules/dagre/lib/data/list.js": ( /*!*******************************************************!*\ !*** ../../../../node_modules/dagre/lib/data/list.js ***! \*******************************************************/ /***/ (a) => { a.exports = l; function l() { var h = {}; h._next = h._prev = h, this._sentinel = h; } l.prototype.dequeue = function() { var h = this._sentinel, d = h._prev; if (d !== h) return o(d), d; }, l.prototype.enqueue = function(h) { var d = this._sentinel; h._prev && h._next && o(h), h._next = d._next, d._next._prev = h, d._next = h, h._prev = d; }, l.prototype.toString = function() { for (var h = [], d = this._sentinel, f = d._prev; f !== d; ) h.push(JSON.stringify(f, u)), f = f._prev; return "[" + h.join(", ") + "]"; }; function o(h) { h._prev._next = h._next, h._next._prev = h._prev, delete h._next, delete h._prev; } function u(h, d) { if (h !== "_next" && h !== "_prev") return d; } } ), /***/ "../../../../node_modules/dagre/lib/debug.js": ( /*!***************************************************!*\ !*** ../../../../node_modules/dagre/lib/debug.js ***! \***************************************************/ /***/ (a, l, o) => { var u = o( /*! ./lodash */ "../../../../node_modules/dagre/lib/lodash.js" ), h = o( /*! ./util */ "../../../../node_modules/dagre/lib/util.js" ), d = o( /*! ./graphlib */ "../../../../node_modules/dagre/lib/graphlib.js" ).Graph; a.exports = { debugOrdering: f }; function f(p) { var m = h.buildLayerMatrix(p), _ = new d({ compound: !0, multigraph: !0 }).setGraph({}); return u.forEach(p.nodes(), function(v) { _.setNode(v, { label: v }), _.setParent(v, "layer" + p.node(v).rank); }), u.forEach(p.edges(), function(v) { _.setEdge(v.v, v.w, {}, v.name); }), u.forEach(m, function(v, C) { var x = "layer" + C; _.setNode(x, { rank: "same" }), u.reduce(v, function(b, S) { return _.setEdge(b, S, { style: "invis" }), S; }); }), _; } } ), /***/ "../../../../node_modules/dagre/lib/graphlib.js": ( /*!******************************************************!*\ !*** ../../../../node_modules/dagre/lib/graphlib.js ***! \******************************************************/ /***/ (a, l, o) => { var u; try { u = o( /*! graphlib */ "../../../../node_modules/graphlib/index.js" ); } catch { } u || (u = window.graphlib), a.exports = u; } ), /***/ "../../../../node_modules/dagre/lib/greedy-fas.js": ( /*!********************************************************!*\ !*** ../../../../node_modules/dagre/lib/greedy-fas.js ***! \********************************************************/ /***/ (a, l, o) => { var u = o( /*! ./lodash */ "../../../../node_modules/dagre/lib/lodash.js" ), h = o( /*! ./graphlib */ "../../../../node_modules/dagre/lib/graphlib.js" ).Graph, d = o( /*! ./data/list */ "../../../../node_modules/dagre/lib/data/list.js" ); a.exports = p; var f = u.constant(1); function p(x, b) { if (x.nodeCount() <= 1) return []; var S = v(x, b || f), M = m(S.graph, S.buckets, S.zeroIdx); return u.flatten(u.map(M, function(R) { return x.outEdges(R.v, R.w); }), !0); } function m(x, b, S) { for (var M = [], R = b[b.length - 1], w = b[0], V; x.nodeCount(); ) { for (; V = w.dequeue(); ) _(x, b, S, V); for (; V = R.dequeue(); ) _(x, b, S, V); if (x.nodeCount()) { for (var k = b.length - 2; k > 0; --k) if (V = b[k].dequeue(), V) { M = M.concat(_(x, b, S, V, !0)); break; } } } return M; } function _(x, b, S, M, R) { var w = R ? [] : void 0; return u.forEach(x.inEdges(M.v), function(V) { var k = x.edge(V), L = x.node(V.v); R && w.push({ v: V.v, w: V.w }), L.out -= k, C(b, S, L); }), u.forEach(x.outEdges(M.v), function(V) { var k = x.edge(V), L = V.w, B = x.node(L); B.in -= k, C(b, S, B); }), x.removeNode(M.v), w; } function v(x, b) { var S = new h(), M = 0, R = 0; u.forEach(x.nodes(), function(k) { S.setNode(k, { v: k, in: 0, out: 0 }); }), u.forEach(x.edges(), function(k) { var L = S.edge(k.v, k.w) || 0, B = b(k), U = L + B; S.setEdge(k.v, k.w, U), R = Math.max(R, S.node(k.v).out += B), M = Math.max(M, S.node(k.w).in += B); }); var w = u.range(R + M + 3).map(function() { return new d(); }), V = M + 1; return u.forEach(S.nodes(), function(k) { C(w, V, S.node(k)); }), { graph: S, buckets: w, zeroIdx: V }; } function C(x, b, S) { S.out ? S.in ? x[S.out - S.in + b].enqueue(S) : x[x.length - 1].enqueue(S) : x[0].enqueue(S); } } ), /***/ "../../../../node_modules/dagre/lib/layout.js": ( /*!****************************************************!*\ !*** ../../../../node_modules/dagre/lib/layout.js ***! \****************************************************/ /***/ (a, l, o) => { var u = o( /*! ./lodash */ "../../../../node_modules/dagre/lib/lodash.js" ), h = o( /*! ./acyclic */ "../../../../node_modules/dagre/lib/acyclic.js" ), d = o( /*! ./normalize */ "../../../../node_modules/dagre/lib/normalize.js" ), f = o( /*! ./rank */ "../../../../node_modules/dagre/lib/rank/index.js" ), p = o( /*! ./util */ "../../../../node_modules/dagre/lib/util.js" ).normalizeRanks, m = o( /*! ./parent-dummy-chains */ "../../../../node_modules/dagre/lib/parent-dummy-chains.js" ), _ = o( /*! ./util */ "../../../../node_modules/dagre/lib/util.js" ).removeEmptyRanks, v = o( /*! ./nesting-graph */ "../../../../node_modules/dagre/lib/nesting-graph.js" ), C = o( /*! ./add-border-segments */ "../../../../node_modules/dagre/lib/add-border-segments.js" ), x = o( /*! ./coordinate-system */ "../../../../node_modules/dagre/lib/coordinate-system.js" ), b = o( /*! ./order */ "../../../../node_modules/dagre/lib/order/index.js" ), S = o( /*! ./position */ "../../../../node_modules/dagre/lib/position/index.js" ), M = o( /*! ./util */ "../../../../node_modules/dagre/lib/util.js" ), R = o( /*! ./graphlib */ "../../../../node_modules/dagre/lib/graphlib.js" ).Graph; a.exports = w; function w(Ee, He) { var Xe = He && He.debugTiming ? M.time : M.notime; Xe("layout", function() { var rt = Xe(" buildLayoutGraph", function() { return ie(Ee); }); Xe(" runLayout", function() { V(rt, Xe); }), Xe(" updateInputGraph", function() { k(Ee, rt); }); }); } function V(Ee, He) { He(" makeSpaceForEdgeLabels", function() { $(Ee); }), He(" removeSelfEdges", function() { re(Ee); }), He(" acyclic", function() { h.run(Ee); }), He(" nestingGraph.run", function() { v.run(Ee); }), He(" rank", function() { f(M.asNonCompoundGraph(Ee)); }), He(" injectEdgeLabelProxies", function() { j(Ee); }), He(" removeEmptyRanks", function() { _(Ee); }), He(" nestingGraph.cleanup", function() { v.cleanup(Ee); }), He(" normalizeRanks", function() { p(Ee); }), He(" assignRankMinMax", function() { J(Ee); }), He(" removeEdgeLabelProxies", function() { ne(Ee); }), He(" normalize.run", function() { d.run(Ee); }), He(" parentDummyChains", function() { m(Ee); }), He(" addBorderSegments", function() { C(Ee); }), He(" order", function() { b(Ee); }), He(" insertSelfEdges", function() { te(Ee); }), He(" adjustCoordinateSystem", function() { x.adjust(Ee); }), He(" position", function() { S(Ee); }), He(" positionSelfEdges", function() { he(Ee); }), He(" removeBorderNodes", function() { Se(Ee); }), He(" normalize.undo", function() { d.undo(Ee); }), He(" fixupEdgeLabelCoords", function() { Ie(Ee); }), He(" undoCoordinateSystem", function() { x.undo(Ee); }), He(" translateGraph", function() { pe(Ee); }), He(" assignNodeIntersects", function() { ge(Ee); }), He(" reversePoints", function() { ye(Ee); }), He(" acyclic.undo", function() { h.undo(Ee); }); } function k(Ee, He) { u.forEach(Ee.nodes(), function(Xe) { var rt = Ee.node(Xe), dt = He.node(Xe); rt && (rt.x = dt.x, rt.y = dt.y, He.children(Xe).length && (rt.width = dt.width, rt.height = dt.height)); }), u.forEach(Ee.edges(), function(Xe) { var rt = Ee.edge(Xe), dt = He.edge(Xe); rt.points = dt.points, u.has(dt, "x") && (rt.x = dt.x, rt.y = dt.y); }), Ee.graph().width = He.graph().width, Ee.graph().height = He.graph().height; } var L = ["nodesep", "edgesep", "ranksep", "marginx", "marginy"], B = { ranksep: 50, edgesep: 20, nodesep: 50, rankdir: "tb" }, U = ["acyclicer", "ranker", "rankdir", "align"], K = ["width", "height"], ee = { width: 0, height: 0 }, Z = ["minlen", "weight", "width", "height", "labeloffset"], q = { minlen: 1, weight: 1, width: 0, height: 0, labeloffset: 10, labelpos: "r" }, le = ["labelpos"]; function ie(Ee) { var He = new R({ multigraph: !0, compound: !0 }), Xe = Ue(Ee.graph()); return He.setGraph(u.merge( {}, B, be(Xe, L), u.pick(Xe, U) )), u.forEach(Ee.nodes(), function(rt) { var dt = Ue(Ee.node(rt)); He.setNode(rt, u.defaults(be(dt, K), ee)), He.setParent(rt, Ee.parent(rt)); }), u.forEach(Ee.edges(), function(rt) { var dt = Ue(Ee.edge(rt)); He.setEdge(rt, u.merge( {}, q, be(dt, Z), u.pick(dt, le) )); }), He; } function $(Ee) { var He = Ee.graph(); He.ranksep /= 2, u.forEach(Ee.edges(), function(Xe) { var rt = Ee.edge(Xe); rt.minlen *= 2, rt.labelpos.toLowerCase() !== "c" && (He.rankdir === "TB" || He.rankdir === "BT" ? rt.width += rt.labeloffset : rt.height += rt.labeloffset); }); } function j(Ee) { u.forEach(Ee.edges(), function(He) { var Xe = Ee.edge(He); if (Xe.width && Xe.height) { var rt = Ee.node(He.v), dt = Ee.node(He.w), bt = { rank: (dt.rank - rt.rank) / 2 + rt.rank, e: He }; M.addDummyNode(Ee, "edge-proxy", bt, "_ep"); } }); } function J(Ee) { var He = 0; u.forEach(Ee.nodes(), function(Xe) { var rt = Ee.node(Xe); rt.borderTop && (rt.minRank = Ee.node(rt.borderTop).rank, rt.maxRank = Ee.node(rt.borderBottom).rank, He = u.max(He, rt.maxRank)); }), Ee.graph().maxRank = He; } function ne(Ee) { u.forEach(Ee.nodes(), function(He) { var Xe = Ee.node(He); Xe.dummy === "edge-proxy" && (Ee.edge(Xe.e).labelRank = Xe.rank, Ee.removeNode(He)); }); } function pe(Ee) { var He = Number.POSITIVE_INFINITY, Xe = 0, rt = Number.POSITIVE_INFINITY, dt = 0, bt = Ee.graph(), Mt = bt.marginx || 0, Ct = bt.marginy || 0; function di(Kt) { var ei = Kt.x, bi = Kt.y, vr = Kt.width, yi = Kt.height; He = Math.min(He, ei - vr / 2), Xe = Math.max(Xe, ei + vr / 2), rt = Math.min(rt, bi - yi / 2), dt = Math.max(dt, bi + yi / 2); } u.forEach(Ee.nodes(), function(Kt) { di(Ee.node(Kt)); }), u.forEach(Ee.edges(), function(Kt) { var ei = Ee.edge(Kt); u.has(ei, "x") && di(ei); }), He -= Mt, rt -= Ct, u.forEach(Ee.nodes(), function(Kt) { var ei = Ee.node(Kt); ei.x -= He, ei.y -= rt; }), u.forEach(Ee.edges(), function(Kt) { var ei = Ee.edge(Kt); u.forEach(ei.points, function(bi) { bi.x -= He, bi.y -= rt; }), u.has(ei, "x") && (ei.x -= He), u.has(ei, "y") && (ei.y -= rt); }), bt.width = Xe - He + Mt, bt.height = dt - rt + Ct; } function ge(Ee) { u.forEach(Ee.edges(), function(He) { var Xe = Ee.edge(He), rt = Ee.node(He.v), dt = Ee.node(He.w), bt, Mt; Xe.points ? (bt = Xe.points[0], Mt = Xe.points[Xe.points.length - 1]) : (Xe.points = [], bt = dt, Mt = rt), Xe.points.unshift(M.intersectRect(rt, bt)), Xe.points.push(M.intersectRect(dt, Mt)); }); } function Ie(Ee) { u.forEach(Ee.edges(), function(He) { var Xe = Ee.edge(He); if (u.has(Xe, "x")) switch ((Xe.labelpos === "l" || Xe.labelpos === "r") && (Xe.width -= Xe.labeloffset), Xe.labelpos) { case "l": Xe.x -= Xe.width / 2 + Xe.labeloffset; break; case "r": Xe.x += Xe.width / 2 + Xe.labeloffset; break; } }); } function ye(Ee) { u.forEach(Ee.edges(), function(He) { var Xe = Ee.edge(He); Xe.reversed && Xe.points.reverse(); }); } function Se(Ee) { u.forEach(Ee.nodes(), function(He) { if (Ee.children(He).length) { var Xe = Ee.node(He), rt = Ee.node(Xe.borderTop), dt = Ee.node(Xe.borderBottom), bt = Ee.node(u.last(Xe.borderLeft)), Mt = Ee.node(u.last(Xe.borderRight)); Xe.width = Math.abs(Mt.x - bt.x), Xe.height = Math.abs(dt.y - rt.y), Xe.x = bt.x + Xe.width / 2, Xe.y = rt.y + Xe.height / 2; } }), u.forEach(Ee.nodes(), function(He) { Ee.node(He).dummy === "border" && Ee.removeNode(He); }); } function re(Ee) { u.forEach(Ee.edges(), function(He) { if (He.v === He.w) { var Xe = Ee.node(He.v); Xe.selfEdges || (Xe.selfEdges = []), Xe.selfEdges.push({ e: He, label: Ee.edge(He) }), Ee.removeEdge(He); } }); } function te(Ee) { var He = M.buildLayerMatrix(Ee); u.forEach(He, function(Xe) { var rt = 0; u.forEach(Xe, function(dt, bt) { var Mt = Ee.node(dt); Mt.order = bt + rt, u.forEach(Mt.selfEdges, function(Ct) { M.addDummyNode(Ee, "selfedge", { width: Ct.label.width, height: Ct.label.height, rank: Mt.rank, order: bt + ++rt, e: Ct.e, label: Ct.label }, "_se"); }), delete Mt.selfEdges; }); }); } function he(Ee) { u.forEach(Ee.nodes(), function(He) { var Xe = Ee.node(He); if (Xe.dummy === "selfedge") { var rt = Ee.node(Xe.e.v), dt = rt.x + rt.width / 2, bt = rt.y, Mt = Xe.x - dt, Ct = rt.height / 2; Ee.setEdge(Xe.e, Xe.label), Ee.removeNode(He), Xe.label.points = [ { x: dt + 2 * Mt / 3, y: bt - Ct }, { x: dt + 5 * Mt / 6, y: bt - Ct }, { x: dt + Mt, y: bt }, { x: dt + 5 * Mt / 6, y: bt + Ct }, { x: dt + 2 * Mt / 3, y: bt + Ct } ], Xe.label.x = Xe.x, Xe.label.y = Xe.y; } }); } function be(Ee, He) { return u.mapValues(u.pick(Ee, He), Number); } function Ue(Ee) { var He = {}; return u.forEach(Ee, function(Xe, rt) { He[rt.toLowerCase()] = Xe; }), He; } } ), /***/ "../../../../node_modules/dagre/lib/lodash.js": ( /*!****************************************************!*\ !*** ../../../../node_modules/dagre/lib/lodash.js ***! \****************************************************/ /***/ (a, l, o) => { var u; try { u = { cloneDeep: o( /*! lodash/cloneDeep */ "../../../../node_modules/lodash/cloneDeep.js" ), constant: o( /*! lodash/constant */ "../../../../node_modules/lodash/constant.js" ), defaults: o( /*! lodash/defaults */ "../../../../node_modules/lodash/defaults.js" ), each: o( /*! lodash/each */ "../../../../node_modules/lodash/each.js" ), filter: o( /*! lodash/filter */ "../../../../node_modules/lodash/filter.js" ), find: o( /*! lodash/find */ "../../../../node_modules/lodash/find.js" ), flatten: o( /*! lodash/flatten */ "../../../../node_modules/lodash/flatten.js" ), forEach: o( /*! lodash/forEach */ "../../../../node_modules/lodash/forEach.js" ), forIn: o( /*! lodash/forIn */ "../../../../node_modules/lodash/forIn.js" ), has: o( /*! lodash/has */ "../../../../node_modules/lodash/has.js" ), isUndefined: o( /*! lodash/isUndefined */ "../../../../node_modules/lodash/isUndefined.js" ), last: o( /*! lodash/last */ "../../../../node_modules/lodash/last.js" ), map: o( /*! lodash/map */ "../../../../node_modules/lodash/map.js" ), mapValues: o( /*! lodash/mapValues */ "../../../../node_modules/lodash/mapValues.js" ), max: o( /*! lodash/max */ "../../../../node_modules/lodash/max.js" ), merge: o( /*! lodash/merge */ "../../../../node_modules/lodash/merge.js" ), min: o( /*! lodash/min */ "../../../../node_modules/lodash/min.js" ), minBy: o( /*! lodash/minBy */ "../../../../node_modules/lodash/minBy.js" ), now: o( /*! lodash/now */ "../../../../node_modules/lodash/now.js" ), pick: o( /*! lodash/pick */ "../../../../node_modules/lodash/pick.js" ), range: o( /*! lodash/range */ "../../../../node_modules/lodash/range.js" ), reduce: o( /*! lodash/reduce */ "../../../../node_modules/lodash/reduce.js" ), sortBy: o( /*! lodash/sortBy */ "../../../../node_modules/lodash/sortBy.js" ), uniqueId: o( /*! lodash/uniqueId */ "../../../../node_modules/lodash/uniqueId.js" ), values: o( /*! lodash/values */ "../../../../node_modules/lodash/values.js" ), zipObject: o( /*! lodash/zipObject */ "../../../../node_modules/lodash/zipObject.js" ) }; } catch { } u || (u = window._), a.exports = u; } ), /***/ "../../../../node_modules/dagre/lib/nesting-graph.js": ( /*!***********************************************************!*\ !*** ../../../../node_modules/dagre/lib/nesting-graph.js ***! \***********************************************************/ /***/ (a, l, o) => { var u = o( /*! ./lodash */ "../../../../node_modules/dagre/lib/lodash.js" ), h = o( /*! ./util */ "../../../../node_modules/dagre/lib/util.js" ); a.exports = { run: d, cleanup: _ }; function d(v) { var C = h.addDummyNode(v, "root", {}, "_root"), x = p(v), b = u.max(u.values(x)) - 1, S = 2 * b + 1; v.graph().nestingRoot = C, u.forEach(v.edges(), function(R) { v.edge(R).minlen *= S; }); var M = m(v) + 1; u.forEach(v.children(), function(R) { f(v, C, S, M, b, x, R); }), v.graph().nodeRankFactor = S; } function f(v, C, x, b, S, M, R) { var w = v.children(R); if (!w.length) { R !== C && v.setEdge(C, R, { weight: 0, minlen: x }); return; } var V = h.addBorderNode(v, "_bt"), k = h.addBorderNode(v, "_bb"), L = v.node(R); v.setParent(V, R), L.borderTop = V, v.setParent(k, R), L.borderBottom = k, u.forEach(w, function(B) { f(v, C, x, b, S, M, B); var U = v.node(B), K = U.borderTop ? U.borderTop : B, ee = U.borderBottom ? U.borderBottom : B, Z = U.borderTop ? b : 2 * b, q = K !== ee ? 1 : S - M[R] + 1; v.setEdge(V, K, { weight: Z, minlen: q, nestingEdge: !0 }), v.setEdge(ee, k, { weight: Z, minlen: q, nestingEdge: !0 }); }), v.parent(R) || v.setEdge(C, V, { weight: 0, minlen: S + M[R] }); } function p(v) { var C = {}; function x(b, S) { var M = v.children(b); M && M.length && u.forEach(M, function(R) { x(R, S + 1); }), C[b] = S; } return u.forEach(v.children(), function(b) { x(b, 1); }), C; } function m(v) { return u.reduce(v.edges(), function(C, x) { return C + v.edge(x).weight; }, 0); } function _(v) { var C = v.graph(); v.removeNode(C.nestingRoot), delete C.nestingRoot, u.forEach(v.edges(), function(x) { var b = v.edge(x); b.nestingEdge && v.removeEdge(x); }); } } ), /***/ "../../../../node_modules/dagre/lib/normalize.js": ( /*!*******************************************************!*\ !*** ../../../../node_modules/dagre/lib/normalize.js ***! \*******************************************************/ /***/ (a, l, o) => { var u = o( /*! ./lodash */ "../../../../node_modules/dagre/lib/lodash.js" ), h = o( /*! ./util */ "../../../../node_modules/dagre/lib/util.js" ); a.exports = { run: d, undo: p }; function d(m) { m.graph().dummyChains = [], u.forEach(m.edges(), function(_) { f(m, _); }); } function f(m, _) { var v = _.v, C = m.node(v).rank, x = _.w, b = m.node(x).rank, S = _.name, M = m.edge(_), R = M.labelRank; if (b !== C + 1) { m.removeEdge(_); var w, V, k; for (k = 0, ++C; C < b; ++k, ++C) M.points = [], V = { width: 0, height: 0, edgeLabel: M, edgeObj: _, rank: C }, w = h.addDummyNode(m, "edge", V, "_d"), C === R && (V.width = M.width, V.height = M.height, V.dummy = "edge-label", V.labelpos = M.labelpos), m.setEdge(v, w, { weight: M.weight }, S), k === 0 && m.graph().dummyChains.push(w), v = w; m.setEdge(v, x, { weight: M.weight }, S); } } function p(m) { u.forEach(m.graph().dummyChains, function(_) { var v = m.node(_), C = v.edgeLabel, x; for (m.setEdge(v.edgeObj, C); v.dummy; ) x = m.successors(_)[0], m.removeNode(_), C.points.push({ x: v.x, y: v.y }), v.dummy === "edge-label" && (C.x = v.x, C.y = v.y, C.width = v.width, C.height = v.height), _ = x, v = m.node(_); }); } } ), /***/ "../../../../node_modules/dagre/lib/order/add-subgraph-constraints.js": ( /*!****************************************************************************!*\ !*** ../../../../node_modules/dagre/lib/order/add-subgraph-constraints.js ***! \****************************************************************************/ /***/ (a, l, o) => { var u = o( /*! ../lodash */ "../../../../node_modules/dagre/lib/lodash.js" ); a.exports = h; function h(d, f, p) { var m = {}, _; u.forEach(p, function(v) { for (var C = d.parent(v), x, b; C; ) { if (x = d.parent(C), x ? (b = m[x], m[x] = C) : (b = _, _ = C), b && b !== C) { f.setEdge(b, C); return; } C = x; } }); } } ), /***/ "../../../../node_modules/dagre/lib/order/barycenter.js": ( /*!**************************************************************!*\ !*** ../../../../node_modules/dagre/lib/order/barycenter.js ***! \**************************************************************/ /***/ (a, l, o) => { var u = o( /*! ../lodash */ "../../../../node_modules/dagre/lib/lodash.js" ); a.exports = h; function h(d, f) { return u.map(f, function(p) { var m = d.inEdges(p); if (m.length) { var _ = u.reduce(m, function(v, C) { var x = d.edge(C), b = d.node(C.v); return { sum: v.sum + x.weight * b.order, weight: v.weight + x.weight }; }, { sum: 0, weight: 0 }); return { v: p, barycenter: _.sum / _.weight, weight: _.weight }; } else return { v: p }; }); } } ), /***/ "../../../../node_modules/dagre/lib/order/build-layer-graph.js": ( /*!*********************************************************************!*\ !*** ../../../../node_modules/dagre/lib/order/build-layer-graph.js ***! \*********************************************************************/ /***/ (a, l, o) => { var u = o( /*! ../lodash */ "../../../../node_modules/dagre/lib/lodash.js" ), h = o( /*! ../graphlib */ "../../../../node_modules/dagre/lib/graphlib.js" ).Graph; a.exports = d; function d(p, m, _) { var v = f(p), C = new h({ compound: !0 }).setGraph({ root: v }).setDefaultNodeLabel(function(x) { return p.node(x); }); return u.forEach(p.nodes(), function(x) { var b = p.node(x), S = p.parent(x); (b.rank === m || b.minRank <= m && m <= b.maxRank) && (C.setNode(x), C.setParent(x, S || v), u.forEach(p[_](x), function(M) { var R = M.v === x ? M.w : M.v, w = C.edge(R, x), V = u.isUndefined(w) ? 0 : w.weight; C.setEdge(R, x, { weight: p.edge(M).weight + V }); }), u.has(b, "minRank") && C.setNode(x, { borderLeft: b.borderLeft[m], borderRight: b.borderRight[m] })); }), C; } function f(p) { for (var m; p.hasNode(m = u.uniqueId("_root")); ) ; return m; } } ), /***/ "../../../../node_modules/dagre/lib/order/cross-count.js": ( /*!***************************************************************!*\ !*** ../../../../node_modules/dagre/lib/order/cross-count.js ***! \***************************************************************/ /***/ (a, l, o) => { var u = o( /*! ../lodash */ "../../../../node_modules/dagre/lib/lodash.js" ); a.exports = h; function h(f, p) { for (var m = 0, _ = 1; _ < p.length; ++_) m += d(f, p[_ - 1], p[_]); return m; } function d(f, p, m) { for (var _ = u.zipObject( m, u.map(m, function(M, R) { return R; }) ), v = u.flatten(u.map(p, function(M) { return u.sortBy(u.map(f.outEdges(M), function(R) { return { pos: _[R.w], weight: f.edge(R).weight }; }), "pos"); }), !0), C = 1; C < m.length; ) C <<= 1; var x = 2 * C - 1; C -= 1; var b = u.map(new Array(x), function() { return 0; }), S = 0; return u.forEach(v.forEach(function(M) { var R = M.pos + C; b[R] += M.weight; for (var w = 0; R > 0; ) R % 2 && (w += b[R + 1]), R = R - 1 >> 1, b[R] += M.weight; S += M.weight * w; })), S; } } ), /***/ "../../../../node_modules/dagre/lib/order/index.js": ( /*!*********************************************************!*\ !*** ../../../../node_modules/dagre/lib/order/index.js ***! \*********************************************************/ /***/ (a, l, o) => { var u = o( /*! ../lodash */ "../../../../node_modules/dagre/lib/lodash.js" ), h = o( /*! ./init-order */ "../../../../node_modules/dagre/lib/order/init-order.js" ), d = o( /*! ./cross-count */ "../../../../node_modules/dagre/lib/order/cross-count.js" ), f = o( /*! ./sort-subgraph */ "../../../../node_modules/dagre/lib/order/sort-subgraph.js" ), p = o( /*! ./build-layer-graph */ "../../../../node_modules/dagre/lib/order/build-layer-graph.js" ), m = o( /*! ./add-subgraph-constraints */ "../../../../node_modules/dagre/lib/order/add-subgraph-constraints.js" ), _ = o( /*! ../graphlib */ "../../../../node_modules/dagre/lib/graphlib.js" ).Graph, v = o( /*! ../util */ "../../../../node_modules/dagre/lib/util.js" ); a.exports = C; function C(M) { var R = v.maxRank(M), w = x(M, u.range(1, R + 1), "inEdges"), V = x(M, u.range(R - 1, -1, -1), "outEdges"), k = h(M); S(M, k); for (var L = Number.POSITIVE_INFINITY, B, U = 0, K = 0; K < 4; ++U, ++K) { b(U % 2 ? w : V, U % 4 >= 2), k = v.buildLayerMatrix(M); var ee = d(M, k); ee < L && (K = 0, B = u.cloneDeep(k), L = ee); } S(M, B); } function x(M, R, w) { return u.map(R, function(V) { return p(M, V, w); }); } function b(M, R) { var w = new _(); u.forEach(M, function(V) { var k = V.graph().root, L = f(V, k, w, R); u.forEach(L.vs, function(B, U) { V.node(B).order = U; }), m(V, w, L.vs); }); } function S(M, R) { u.forEach(R, function(w) { u.forEach(w, function(V, k) { M.node(V).order = k; }); }); } } ), /***/ "../../../../node_modules/dagre/lib/order/init-order.js": ( /*!**************************************************************!*\ !*** ../../../../node_modules/dagre/lib/order/init-order.js ***! \**************************************************************/ /***/ (a, l, o) => { var u = o( /*! ../lodash */ "../../../../node_modules/dagre/lib/lodash.js" ); a.exports = h; function h(d) { var f = {}, p = u.filter(d.nodes(), function(x) { return !d.children(x).length; }), m = u.max(u.map(p, function(x) { return d.node(x).rank; })), _ = u.map(u.range(m + 1), function() { return []; }); function v(x) { if (!u.has(f, x)) { f[x] = !0; var b = d.node(x); _[b.rank].push(x), u.forEach(d.successors(x), v); } } var C = u.sortBy(p, function(x) { return d.node(x).rank; }); return u.forEach(C, v), _; } } ), /***/ "../../../../node_modules/dagre/lib/order/resolve-conflicts.js": ( /*!*********************************************************************!*\ !*** ../../../../node_modules/dagre/lib/order/resolve-conflicts.js ***! \*********************************************************************/ /***/ (a, l, o) => { var u = o( /*! ../lodash */ "../../../../node_modules/dagre/lib/lodash.js" ); a.exports = h; function h(p, m) { var _ = {}; u.forEach(p, function(C, x) { var b = _[C.v] = { indegree: 0, in: [], out: [], vs: [C.v], i: x }; u.isUndefined(C.barycenter) || (b.barycenter = C.barycenter, b.weight = C.weight); }), u.forEach(m.edges(), function(C) { var x = _[C.v], b = _[C.w]; !u.isUndefined(x) && !u.isUndefined(b) && (b.indegree++, x.out.push(_[C.w])); }); var v = u.filter(_, function(C) { return !C.indegree; }); return d(v); } function d(p) { var m = []; function _(x) { return function(b) { b.merged || (u.isUndefined(b.barycenter) || u.isUndefined(x.barycenter) || b.barycenter >= x.barycenter) && f(x, b); }; } function v(x) { return function(b) { b.in.push(x), --b.indegree === 0 && p.push(b); }; } for (; p.length; ) { var C = p.pop(); m.push(C), u.forEach(C.in.reverse(), _(C)), u.forEach(C.out, v(C)); } return u.map( u.filter(m, function(x) { return !x.merged; }), function(x) { return u.pick(x, ["vs", "i", "barycenter", "weight"]); } ); } function f(p, m) { var _ = 0, v = 0; p.weight && (_ += p.barycenter * p.weight, v += p.weight), m.weight && (_ += m.barycenter * m.weight, v += m.weight), p.vs = m.vs.concat(p.vs), p.barycenter = _ / v, p.weight = v, p.i = Math.min(m.i, p.i), m.merged = !0; } } ), /***/ "../../../../node_modules/dagre/lib/order/sort-subgraph.js": ( /*!*****************************************************************!*\ !*** ../../../../node_modules/dagre/lib/order/sort-subgraph.js ***! \*****************************************************************/ /***/ (a, l, o) => { var u = o( /*! ../lodash */ "../../../../node_modules/dagre/lib/lodash.js" ), h = o( /*! ./barycenter */ "../../../../node_modules/dagre/lib/order/barycenter.js" ), d = o( /*! ./resolve-conflicts */ "../../../../node_modules/dagre/lib/order/resolve-conflicts.js" ), f = o( /*! ./sort */ "../../../../node_modules/dagre/lib/order/sort.js" ); a.exports = p; function p(v, C, x, b) { var S = v.children(C), M = v.node(C), R = M ? M.borderLeft : void 0, w = M ? M.borderRight : void 0, V = {}; R && (S = u.filter(S, function(ee) { return ee !== R && ee !== w; })); var k = h(v, S); u.forEach(k, function(ee) { if (v.children(ee.v).length) { var Z = p(v, ee.v, x, b); V[ee.v] = Z, u.has(Z, "barycenter") && _(ee, Z); } }); var L = d(k, x); m(L, V); var B = f(L, b); if (R && (B.vs = u.flatten([R, B.vs, w], !0), v.predecessors(R).length)) { var U = v.node(v.predecessors(R)[0]), K = v.node(v.predecessors(w)[0]); u.has(B, "barycenter") || (B.barycenter = 0, B.weight = 0), B.barycenter = (B.barycenter * B.weight + U.order + K.order) / (B.weight + 2), B.weight += 2; } return B; } function m(v, C) { u.forEach(v, function(x) { x.vs = u.flatten(x.vs.map(function(b) { return C[b] ? C[b].vs : b; }), !0); }); } function _(v, C) { u.isUndefined(v.barycenter) ? (v.barycenter = C.barycenter, v.weight = C.weight) : (v.barycenter = (v.barycenter * v.weight + C.barycenter * C.weight) / (v.weight + C.weight), v.weight += C.weight); } } ), /***/ "../../../../node_modules/dagre/lib/order/sort.js": ( /*!********************************************************!*\ !*** ../../../../node_modules/dagre/lib/order/sort.js ***! \********************************************************/ /***/ (a, l, o) => { var u = o( /*! ../lodash */ "../../../../node_modules/dagre/lib/lodash.js" ), h = o( /*! ../util */ "../../../../node_modules/dagre/lib/util.js" ); a.exports = d; function d(m, _) { var v = h.partition(m, function(V) { return u.has(V, "barycenter"); }), C = v.lhs, x = u.sortBy(v.rhs, function(V) { return -V.i; }), b = [], S = 0, M = 0, R = 0; C.sort(p(!!_)), R = f(b, x, R), u.forEach(C, function(V) { R += V.vs.length, b.push(V.vs), S += V.barycenter * V.weight, M += V.weight, R = f(b, x, R); }); var w = { vs: u.flatten(b, !0) }; return M && (w.barycenter = S / M, w.weight = M), w; } function f(m, _, v) { for (var C; _.length && (C = u.last(_)).i <= v; ) _.pop(), m.push(C.vs), v++; return v; } function p(m) { return function(_, v) { return _.barycenter < v.barycenter ? -1 : _.barycenter > v.barycenter ? 1 : m ? v.i - _.i : _.i - v.i; }; } } ), /***/ "../../../../node_modules/dagre/lib/parent-dummy-chains.js": ( /*!*****************************************************************!*\ !*** ../../../../node_modules/dagre/lib/parent-dummy-chains.js ***! \*****************************************************************/ /***/ (a, l, o) => { var u = o( /*! ./lodash */ "../../../../node_modules/dagre/lib/lodash.js" ); a.exports = h; function h(p) { var m = f(p); u.forEach(p.graph().dummyChains, function(_) { for (var v = p.node(_), C = v.edgeObj, x = d(p, m, C.v, C.w), b = x.path, S = x.lca, M = 0, R = b[M], w = !0; _ !== C.w; ) { if (v = p.node(_), w) { for (; (R = b[M]) !== S && p.node(R).maxRank < v.rank; ) M++; R === S && (w = !1); } if (!w) { for (; M < b.length - 1 && p.node(R = b[M + 1]).minRank <= v.rank; ) M++; R = b[M]; } p.setParent(_, R), _ = p.successors(_)[0]; } }); } function d(p, m, _, v) { var C = [], x = [], b = Math.min(m[_].low, m[v].low), S = Math.max(m[_].lim, m[v].lim), M, R; M = _; do M = p.parent(M), C.push(M); while (M && (m[M].low > b || S > m[M].lim)); for (R = M, M = v; (M = p.parent(M)) !== R; ) x.push(M); return { path: C.concat(x.reverse()), lca: R }; } function f(p) { var m = {}, _ = 0; function v(C) { var x = _; u.forEach(p.children(C), v), m[C] = { low: x, lim: _++ }; } return u.forEach(p.children(), v), m; } } ), /***/ "../../../../node_modules/dagre/lib/position/bk.js": ( /*!*********************************************************!*\ !*** ../../../../node_modules/dagre/lib/position/bk.js ***! \*********************************************************/ /***/ (a, l, o) => { var u = o( /*! ../lodash */ "../../../../node_modules/dagre/lib/lodash.js" ), h = o( /*! ../graphlib */ "../../../../node_modules/dagre/lib/graphlib.js" ).Graph, d = o( /*! ../util */ "../../../../node_modules/dagre/lib/util.js" ); a.exports = { positionX: w, findType1Conflicts: f, findType2Conflicts: p, addConflict: _, hasConflict: v, verticalAlignment: C, horizontalCompaction: x, alignCoordinates: M, findSmallestWidthAlignment: S, balance: R }; function f(L, B) { var U = {}; function K(ee, Z) { var q = 0, le = 0, ie = ee.length, $ = u.last(Z); return u.forEach(Z, function(j, J) { var ne = m(L, j), pe = ne ? L.node(ne).order : ie; (ne || j === $) && (u.forEach(Z.slice(le, J + 1), function(ge) { u.forEach(L.predecessors(ge), function(Ie) { var ye = L.node(Ie), Se = ye.order; (Se < q || pe < Se) && !(ye.dummy && L.node(ge).dummy) && _(U, Ie, ge); }); }), le = J + 1, q = pe); }), Z; } return u.reduce(B, K), U; } function p(L, B) { var U = {}; function K(Z, q, le, ie, $) { var j; u.forEach(u.range(q, le), function(J) { j = Z[J], L.node(j).dummy && u.forEach(L.predecessors(j), function(ne) { var pe = L.node(ne); pe.dummy && (pe.order < ie || pe.order > $) && _(U, ne, j); }); }); } function ee(Z, q) { var le = -1, ie, $ = 0; return u.forEach(q, function(j, J) { if (L.node(j).dummy === "border") { var ne = L.predecessors(j); ne.length && (ie = L.node(ne[0]).order, K(q, $, J, le, ie), $ = J, le = ie); } K(q, $, q.length, ie, Z.length); }), q; } return u.reduce(B, ee), U; } function m(L, B) { if (L.node(B).dummy) return u.find(L.predecessors(B), function(U) { return L.node(U).dummy; }); } function _(L, B, U) { if (B > U) { var K = B; B = U, U = K; } var ee = L[B]; ee || (L[B] = ee = {}), ee[U] = !0; } function v(L, B, U) { if (B > U) { var K = B; B = U, U = K; } return u.has(L[B], U); } function C(L, B, U, K) { var ee = {}, Z = {}, q = {}; return u.forEach(B, function(le) { u.forEach(le, function(ie, $) { ee[ie] = ie, Z[ie] = ie, q[ie] = $; }); }), u.forEach(B, function(le) { var ie = -1; u.forEach(le, function($) { var j = K($); if (j.length) { j = u.sortBy(j, function(Ie) { return q[Ie]; }); for (var J = (j.length - 1) / 2, ne = Math.floor(J), pe = Math.ceil(J); ne <= pe; ++ne) { var ge = j[ne]; Z[$] === $ && ie < q[ge] && !v(U, $, ge) && (Z[ge] = $, Z[$] = ee[$] = ee[ge], ie = q[ge]); } } }); }), { root: ee, align: Z }; } function x(L, B, U, K, ee) { var Z = {}, q = b(L, B, U, ee), le = ee ? "borderLeft" : "borderRight"; function ie(J, ne) { for (var pe = q.nodes(), ge = pe.pop(), Ie = {}; ge; ) Ie[ge] ? J(ge) : (Ie[ge] = !0, pe.push(ge), pe = pe.concat(ne(ge))), ge = pe.pop(); } function $(J) { Z[J] = q.inEdges(J).reduce(function(ne, pe) { return Math.max(ne, Z[pe.v] + q.edge(pe)); }, 0); } function j(J) { var ne = q.outEdges(J).reduce(function(ge, Ie) { return Math.min(ge, Z[Ie.w] - q.edge(Ie)); }, Number.POSITIVE_INFINITY), pe = L.node(J); ne !== Number.POSITIVE_INFINITY && pe.borderType !== le && (Z[J] = Math.max(Z[J], ne)); } return ie($, q.predecessors.bind(q)), ie(j, q.successors.bind(q)), u.forEach(K, function(J) { Z[J] = Z[U[J]]; }), Z; } function b(L, B, U, K) { var ee = new h(), Z = L.graph(), q = V(Z.nodesep, Z.edgesep, K); return u.forEach(B, function(le) { var ie; u.forEach(le, function($) { var j = U[$]; if (ee.setNode(j), ie) { var J = U[ie], ne = ee.edge(J, j); ee.setEdge(J, j, Math.max(q(L, $, ie), ne || 0)); } ie = $; }); }), ee; } function S(L, B) { return u.minBy(u.values(B), function(U) { var K = Number.NEGATIVE_INFINITY, ee = Number.POSITIVE_INFINITY; return u.forIn(U, function(Z, q) { var le = k(L, q) / 2; K = Math.max(Z + le, K), ee = Math.min(Z - le, ee); }), K - ee; }); } function M(L, B) { var U = u.values(B), K = u.min(U), ee = u.max(U); u.forEach(["u", "d"], function(Z) { u.forEach(["l", "r"], function(q) { var le = Z + q, ie = L[le], $; if (ie !== B) { var j = u.values(ie); $ = q === "l" ? K - u.min(j) : ee - u.max(j), $ && (L[le] = u.mapValues(ie, function(J) { return J + $; })); } }); }); } function R(L, B) { return u.mapValues(L.ul, function(U, K) { if (B) return L[B.toLowerCase()][K]; var ee = u.sortBy(u.map(L, K)); return (ee[1] + ee[2]) / 2; }); } function w(L) { var B = d.buildLayerMatrix(L), U = u.merge( f(L, B), p(L, B) ), K = {}, ee; u.forEach(["u", "d"], function(q) { ee = q === "u" ? B : u.values(B).reverse(), u.forEach(["l", "r"], function(le) { le === "r" && (ee = u.map(ee, function(J) { return u.values(J).reverse(); })); var ie = (q === "u" ? L.predecessors : L.successors).bind(L), $ = C(L, ee, U, ie), j = x( L, ee, $.root, $.align, le === "r" ); le === "r" && (j = u.mapValues(j, function(J) { return -J; })), K[q + le] = j; }); }); var Z = S(L, K); return M(K, Z), R(K, L.graph().align); } function V(L, B, U) { return function(K, ee, Z) { var q = K.node(ee), le = K.node(Z), ie = 0, $; if (ie += q.width / 2, u.has(q, "labelpos")) switch (q.labelpos.toLowerCase()) { case "l": $ = -q.width / 2; break; case "r": $ = q.width / 2; break; } if ($ && (ie += U ? $ : -$), $ = 0, ie += (q.dummy ? B : L) / 2, ie += (le.dummy ? B : L) / 2, ie += le.width / 2, u.has(le, "labelpos")) switch (le.labelpos.toLowerCase()) { case "l": $ = le.width / 2; break; case "r": $ = -le.width / 2; break; } return $ && (ie += U ? $ : -$), $ = 0, ie; }; } function k(L, B) { return L.node(B).width; } } ), /***/ "../../../../node_modules/dagre/lib/position/index.js": ( /*!************************************************************!*\ !*** ../../../../node_modules/dagre/lib/position/index.js ***! \************************************************************/ /***/ (a, l, o) => { var u = o( /*! ../lodash */ "../../../../node_modules/dagre/lib/lodash.js" ), h = o( /*! ../util */ "../../../../node_modules/dagre/lib/util.js" ), d = o( /*! ./bk */ "../../../../node_modules/dagre/lib/position/bk.js" ).positionX; a.exports = f; function f(m) { m = h.asNonCompoundGraph(m), p(m), u.forEach(d(m), function(_, v) { m.node(v).x = _; }); } function p(m) { var _ = h.buildLayerMatrix(m), v = m.graph().ranksep, C = 0; u.forEach(_, function(x) { var b = u.max(u.map(x, function(S) { return m.node(S).height; })); u.forEach(x, function(S) { m.node(S).y = C + b / 2; }), C += b + v; }); } } ), /***/ "../../../../node_modules/dagre/lib/rank/feasible-tree.js": ( /*!****************************************************************!*\ !*** ../../../../node_modules/dagre/lib/rank/feasible-tree.js ***! \****************************************************************/ /***/ (a, l, o) => { var u = o( /*! ../lodash */ "../../../../node_modules/dagre/lib/lodash.js" ), h = o( /*! ../graphlib */ "../../../../node_modules/dagre/lib/graphlib.js" ).Graph, d = o( /*! ./util */ "../../../../node_modules/dagre/lib/rank/util.js" ).slack; a.exports = f; function f(v) { var C = new h({ directed: !1 }), x = v.nodes()[0], b = v.nodeCount(); C.setNode(x, {}); for (var S, M; p(C, v) < b; ) S = m(C, v), M = C.hasNode(S.v) ? d(v, S) : -d(v, S), _(C, v, M); return C; } function p(v, C) { function x(b) { u.forEach(C.nodeEdges(b), function(S) { var M = S.v, R = b === M ? S.w : M; !v.hasNode(R) && !d(C, S) && (v.setNode(R, {}), v.setEdge(b, R, {}), x(R)); }); } return u.forEach(v.nodes(), x), v.nodeCount(); } function m(v, C) { return u.minBy(C.edges(), function(x) { if (v.hasNode(x.v) !== v.hasNode(x.w)) return d(C, x); }); } function _(v, C, x) { u.forEach(v.nodes(), function(b) { C.node(b).rank += x; }); } } ), /***/ "../../../../node_modules/dagre/lib/rank/index.js": ( /*!********************************************************!*\ !*** ../../../../node_modules/dagre/lib/rank/index.js ***! \********************************************************/ /***/ (a, l, o) => { var u = o( /*! ./util */ "../../../../node_modules/dagre/lib/rank/util.js" ), h = u.longestPath, d = o( /*! ./feasible-tree */ "../../../../node_modules/dagre/lib/rank/feasible-tree.js" ), f = o( /*! ./network-simplex */ "../../../../node_modules/dagre/lib/rank/network-simplex.js" ); a.exports = p; function p(C) { switch (C.graph().ranker) { case "network-simplex": v(C); break; case "tight-tree": _(C); break; case "longest-path": m(C); break; default: v(C); } } var m = h; function _(C) { h(C), d(C); } function v(C) { f(C); } } ), /***/ "../../../../node_modules/dagre/lib/rank/network-simplex.js": ( /*!******************************************************************!*\ !*** ../../../../node_modules/dagre/lib/rank/network-simplex.js ***! \******************************************************************/ /***/ (a, l, o) => { var u = o( /*! ../lodash */ "../../../../node_modules/dagre/lib/lodash.js" ), h = o( /*! ./feasible-tree */ "../../../../node_modules/dagre/lib/rank/feasible-tree.js" ), d = o( /*! ./util */ "../../../../node_modules/dagre/lib/rank/util.js" ).slack, f = o( /*! ./util */ "../../../../node_modules/dagre/lib/rank/util.js" ).longestPath, p = o( /*! ../graphlib */ "../../../../node_modules/dagre/lib/graphlib.js" ).alg.preorder, m = o( /*! ../graphlib */ "../../../../node_modules/dagre/lib/graphlib.js" ).alg.postorder, _ = o( /*! ../util */ "../../../../node_modules/dagre/lib/util.js" ).simplify; a.exports = v, v.initLowLimValues = S, v.initCutValues = C, v.calcCutValue = b, v.leaveEdge = R, v.enterEdge = w, v.exchangeEdges = V; function v(U) { U = _(U), f(U); var K = h(U); S(K), C(K, U); for (var ee, Z; ee = R(K); ) Z = w(K, U, ee), V(K, U, ee, Z); } function C(U, K) { var ee = m(U, U.nodes()); ee = ee.slice(0, ee.length - 1), u.forEach(ee, function(Z) { x(U, K, Z); }); } function x(U, K, ee) { var Z = U.node(ee), q = Z.parent; U.edge(ee, q).cutvalue = b(U, K, ee); } function b(U, K, ee) { var Z = U.node(ee), q = Z.parent, le = !0, ie = K.edge(ee, q), $ = 0; return ie || (le = !1, ie = K.edge(q, ee)), $ = ie.weight, u.forEach(K.nodeEdges(ee), function(j) { var J = j.v === ee, ne = J ? j.w : j.v; if (ne !== q) { var pe = J === le, ge = K.edge(j).weight; if ($ += pe ? ge : -ge, L(U, ee, ne)) { var Ie = U.edge(ee, ne).cutvalue; $ += pe ? -Ie : Ie; } } }), $; } function S(U, K) { arguments.length < 2 && (K = U.nodes()[0]), M(U, {}, 1, K); } function M(U, K, ee, Z, q) { var le = ee, ie = U.node(Z); return K[Z] = !0, u.forEach(U.neighbors(Z), function($) { u.has(K, $) || (ee = M(U, K, ee, $, Z)); }), ie.low = le, ie.lim = ee++, q ? ie.parent = q : delete ie.parent, ee; } function R(U) { return u.find(U.edges(), function(K) { return U.edge(K).cutvalue < 0; }); } function w(U, K, ee) { var Z = ee.v, q = ee.w; K.hasEdge(Z, q) || (Z = ee.w, q = ee.v); var le = U.node(Z), ie = U.node(q), $ = le, j = !1; le.lim > ie.lim && ($ = ie, j = !0); var J = u.filter(K.edges(), function(ne) { return j === B(U, U.node(ne.v), $) && j !== B(U, U.node(ne.w), $); }); return u.minBy(J, function(ne) { return d(K, ne); }); } function V(U, K, ee, Z) { var q = ee.v, le = ee.w; U.removeEdge(q, le), U.setEdge(Z.v, Z.w, {}), S(U), C(U, K), k(U, K); } function k(U, K) { var ee = u.find(U.nodes(), function(q) { return !K.node(q).parent; }), Z = p(U, ee); Z = Z.slice(1), u.forEach(Z, function(q) { var le = U.node(q).parent, ie = K.edge(q, le), $ = !1; ie || (ie = K.edge(le, q), $ = !0), K.node(q).rank = K.node(le).rank + ($ ? ie.minlen : -ie.minlen); }); } function L(U, K, ee) { return U.hasEdge(K, ee); } function B(U, K, ee) { return ee.low <= K.lim && K.lim <= ee.lim; } } ), /***/ "../../../../node_modules/dagre/lib/rank/util.js": ( /*!*******************************************************!*\ !*** ../../../../node_modules/dagre/lib/rank/util.js ***! \*******************************************************/ /***/ (a, l, o) => { var u = o( /*! ../lodash */ "../../../../node_modules/dagre/lib/lodash.js" ); a.exports = { longestPath: h, slack: d }; function h(f) { var p = {}; function m(_) { var v = f.node(_); if (u.has(p, _)) return v.rank; p[_] = !0; var C = u.min(u.map(f.outEdges(_), function(x) { return m(x.w) - f.edge(x).minlen; })); return (C === Number.POSITIVE_INFINITY || // return value of _.map([]) for Lodash 3 C === void 0 || // return value of _.map([]) for Lodash 4 C === null) && (C = 0), v.rank = C; } u.forEach(f.sources(), m); } function d(f, p) { return f.node(p.w).rank - f.node(p.v).rank - f.edge(p).minlen; } } ), /***/ "../../../../node_modules/dagre/lib/util.js": ( /*!**************************************************!*\ !*** ../../../../node_modules/dagre/lib/util.js ***! \**************************************************/ /***/ (a, l, o) => { var u = o( /*! ./lodash */ "../../../../node_modules/dagre/lib/lodash.js" ), h = o( /*! ./graphlib */ "../../../../node_modules/dagre/lib/graphlib.js" ).Graph; a.exports = { addDummyNode: d, simplify: f, asNonCompoundGraph: p, successorWeights: m, predecessorWeights: _, intersectRect: v, buildLayerMatrix: C, normalizeRanks: x, removeEmptyRanks: b, addBorderNode: S, maxRank: M, partition: R, time: w, notime: V }; function d(k, L, B, U) { var K; do K = u.uniqueId(U); while (k.hasNode(K)); return B.dummy = L, k.setNode(K, B), K; } function f(k) { var L = new h().setGraph(k.graph()); return u.forEach(k.nodes(), function(B) { L.setNode(B, k.node(B)); }), u.forEach(k.edges(), function(B) { var U = L.edge(B.v, B.w) || { weight: 0, minlen: 1 }, K = k.edge(B); L.setEdge(B.v, B.w, { weight: U.weight + K.weight, minlen: Math.max(U.minlen, K.minlen) }); }), L; } function p(k) { var L = new h({ multigraph: k.isMultigraph() }).setGraph(k.graph()); return u.forEach(k.nodes(), function(B) { k.children(B).length || L.setNode(B, k.node(B)); }), u.forEach(k.edges(), function(B) { L.setEdge(B, k.edge(B)); }), L; } function m(k) { var L = u.map(k.nodes(), function(B) { var U = {}; return u.forEach(k.outEdges(B), function(K) { U[K.w] = (U[K.w] || 0) + k.edge(K).weight; }), U; }); return u.zipObject(k.nodes(), L); } function _(k) { var L = u.map(k.nodes(), function(B) { var U = {}; return u.forEach(k.inEdges(B), function(K) { U[K.v] = (U[K.v] || 0) + k.edge(K).weight; }), U; }); return u.zipObject(k.nodes(), L); } function v(k, L) { var B = k.x, U = k.y, K = L.x - B, ee = L.y - U, Z = k.width / 2, q = k.height / 2; if (!K && !ee) throw new Error("Not possible to find intersection inside of the rectangle"); var le, ie; return Math.abs(ee) * Z > Math.abs(K) * q ? (ee < 0 && (q = -q), le = q * K / ee, ie = q) : (K < 0 && (Z = -Z), le = Z, ie = Z * ee / K), { x: B + le, y: U + ie }; } function C(k) { var L = u.map(u.range(M(k) + 1), function() { return []; }); return u.forEach(k.nodes(), function(B) { var U = k.node(B), K = U.rank; u.isUndefined(K) || (L[K][U.order] = B); }), L; } function x(k) { var L = u.min(u.map(k.nodes(), function(B) { return k.node(B).rank; })); u.forEach(k.nodes(), function(B) { var U = k.node(B); u.has(U, "rank") && (U.rank -= L); }); } function b(k) { var L = u.min(u.map(k.nodes(), function(ee) { return k.node(ee).rank; })), B = []; u.forEach(k.nodes(), function(ee) { var Z = k.node(ee).rank - L; B[Z] || (B[Z] = []), B[Z].push(ee); }); var U = 0, K = k.graph().nodeRankFactor; u.forEach(B, function(ee, Z) { u.isUndefined(ee) && Z % K !== 0 ? --U : U && u.forEach(ee, function(q) { k.node(q).rank += U; }); }); } function S(k, L, B, U) { var K = { width: 0, height: 0 }; return arguments.length >= 4 && (K.rank = B, K.order = U), d(k, "border", K, L); } function M(k) { return u.max(u.map(k.nodes(), function(L) { var B = k.node(L).rank; if (!u.isUndefined(B)) return B; })); } function R(k, L) { var B = { lhs: [], rhs: [] }; return u.forEach(k, function(U) { L(U) ? B.lhs.push(U) : B.rhs.push(U); }), B; } function w(k, L) { var B = u.now(); try { return L(); } finally { console.log(k + " time: " + (u.now() - B) + "ms"); } } function V(k, L) { return L(); } } ), /***/ "../../../../node_modules/dagre/lib/version.js": ( /*!*****************************************************!*\ !*** ../../../../node_modules/dagre/lib/version.js ***! \*****************************************************/ /***/ (a) => { a.exports = "0.8.5"; } ), /***/ "../../../../node_modules/graphlib/index.js": ( /*!**************************************************!*\ !*** ../../../../node_modules/graphlib/index.js ***! \**************************************************/ /***/ (a, l, o) => { var u = o( /*! ./lib */ "../../../../node_modules/graphlib/lib/index.js" ); a.exports = { Graph: u.Graph, json: o( /*! ./lib/json */ "../../../../node_modules/graphlib/lib/json.js" ), alg: o( /*! ./lib/alg */ "../../../../node_modules/graphlib/lib/alg/index.js" ), version: u.version }; } ), /***/ "../../../../node_modules/graphlib/lib/alg/components.js": ( /*!***************************************************************!*\ !*** ../../../../node_modules/graphlib/lib/alg/components.js ***! \***************************************************************/ /***/ (a, l, o) => { var u = o( /*! ../lodash */ "../../../../node_modules/graphlib/lib/lodash.js" ); a.exports = h; function h(d) { var f = {}, p = [], m; function _(v) { u.has(f, v) || (f[v] = !0, m.push(v), u.each(d.successors(v), _), u.each(d.predecessors(v), _)); } return u.each(d.nodes(), function(v) { m = [], _(v), m.length && p.push(m); }), p; } } ), /***/ "../../../../node_modules/graphlib/lib/alg/dfs.js": ( /*!********************************************************!*\ !*** ../../../../node_modules/graphlib/lib/alg/dfs.js ***! \********************************************************/ /***/ (a, l, o) => { var u = o( /*! ../lodash */ "../../../../node_modules/graphlib/lib/lodash.js" ); a.exports = h; function h(f, p, m) { u.isArray(p) || (p = [p]); var _ = (f.isDirected() ? f.successors : f.neighbors).bind(f), v = [], C = {}; return u.each(p, function(x) { if (!f.hasNode(x)) throw new Error("Graph does not have node: " + x); d(f, x, m === "post", C, _, v); }), v; } function d(f, p, m, _, v, C) { u.has(_, p) || (_[p] = !0, m || C.push(p), u.each(v(p), function(x) { d(f, x, m, _, v, C); }), m && C.push(p)); } } ), /***/ "../../../../node_modules/graphlib/lib/alg/dijkstra-all.js": ( /*!*****************************************************************!*\ !*** ../../../../node_modules/graphlib/lib/alg/dijkstra-all.js ***! \*****************************************************************/ /***/ (a, l, o) => { var u = o( /*! ./dijkstra */ "../../../../node_modules/graphlib/lib/alg/dijkstra.js" ), h = o( /*! ../lodash */ "../../../../node_modules/graphlib/lib/lodash.js" ); a.exports = d; function d(f, p, m) { return h.transform(f.nodes(), function(_, v) { _[v] = u(f, v, p, m); }, {}); } } ), /***/ "../../../../node_modules/graphlib/lib/alg/dijkstra.js": ( /*!*************************************************************!*\ !*** ../../../../node_modules/graphlib/lib/alg/dijkstra.js ***! \*************************************************************/ /***/ (a, l, o) => { var u = o( /*! ../lodash */ "../../../../node_modules/graphlib/lib/lodash.js" ), h = o( /*! ../data/priority-queue */ "../../../../node_modules/graphlib/lib/data/priority-queue.js" ); a.exports = f; var d = u.constant(1); function f(m, _, v, C) { return p( m, String(_), v || d, C || function(x) { return m.outEdges(x); } ); } function p(m, _, v, C) { var x = {}, b = new h(), S, M, R = function(w) { var V = w.v !== S ? w.v : w.w, k = x[V], L = v(w), B = M.distance + L; if (L < 0) throw new Error("dijkstra does not allow negative edge weights. Bad edge: " + w + " Weight: " + L); B < k.distance && (k.distance = B, k.predecessor = S, b.decrease(V, B)); }; for (m.nodes().forEach(function(w) { var V = w === _ ? 0 : Number.POSITIVE_INFINITY; x[w] = { distance: V }, b.add(w, V); }); b.size() > 0 && (S = b.removeMin(), M = x[S], M.distance !== Number.POSITIVE_INFINITY); ) C(S).forEach(R); return x; } } ), /***/ "../../../../node_modules/graphlib/lib/alg/find-cycles.js": ( /*!****************************************************************!*\ !*** ../../../../node_modules/graphlib/lib/alg/find-cycles.js ***! \****************************************************************/ /***/ (a, l, o) => { var u = o( /*! ../lodash */ "../../../../node_modules/graphlib/lib/lodash.js" ), h = o( /*! ./tarjan */ "../../../../node_modules/graphlib/lib/alg/tarjan.js" ); a.exports = d; function d(f) { return u.filter(h(f), function(p) { return p.length > 1 || p.length === 1 && f.hasEdge(p[0], p[0]); }); } } ), /***/ "../../../../node_modules/graphlib/lib/alg/floyd-warshall.js": ( /*!*******************************************************************!*\ !*** ../../../../node_modules/graphlib/lib/alg/floyd-warshall.js ***! \*******************************************************************/ /***/ (a, l, o) => { var u = o( /*! ../lodash */ "../../../../node_modules/graphlib/lib/lodash.js" ); a.exports = d; var h = u.constant(1); function d(p, m, _) { return f( p, m || h, _ || function(v) { return p.outEdges(v); } ); } function f(p, m, _) { var v = {}, C = p.nodes(); return C.forEach(function(x) { v[x] = {}, v[x][x] = { distance: 0 }, C.forEach(function(b) { x !== b && (v[x][b] = { distance: Number.POSITIVE_INFINITY }); }), _(x).forEach(function(b) { var S = b.v === x ? b.w : b.v, M = m(b); v[x][S] = { distance: M, predecessor: x }; }); }), C.forEach(function(x) { var b = v[x]; C.forEach(function(S) { var M = v[S]; C.forEach(function(R) { var w = M[x], V = b[R], k = M[R], L = w.distance + V.distance; L < k.distance && (k.distance = L, k.predecessor = V.predecessor); }); }); }), v; } } ), /***/ "../../../../node_modules/graphlib/lib/alg/index.js": ( /*!**********************************************************!*\ !*** ../../../../node_modules/graphlib/lib/alg/index.js ***! \**********************************************************/ /***/ (a, l, o) => { a.exports = { components: o( /*! ./components */ "../../../../node_modules/graphlib/lib/alg/components.js" ), dijkstra: o( /*! ./dijkstra */ "../../../../node_modules/graphlib/lib/alg/dijkstra.js" ), dijkstraAll: o( /*! ./dijkstra-all */ "../../../../node_modules/graphlib/lib/alg/dijkstra-all.js" ), findCycles: o( /*! ./find-cycles */ "../../../../node_modules/graphlib/lib/alg/find-cycles.js" ), floydWarshall: o( /*! ./floyd-warshall */ "../../../../node_modules/graphlib/lib/alg/floyd-warshall.js" ), isAcyclic: o( /*! ./is-acyclic */ "../../../../node_modules/graphlib/lib/alg/is-acyclic.js" ), postorder: o( /*! ./postorder */ "../../../../node_modules/graphlib/lib/alg/postorder.js" ), preorder: o( /*! ./preorder */ "../../../../node_modules/graphlib/lib/alg/preorder.js" ), prim: o( /*! ./prim */ "../../../../node_modules/graphlib/lib/alg/prim.js" ), tarjan: o( /*! ./tarjan */ "../../../../node_modules/graphlib/lib/alg/tarjan.js" ), topsort: o( /*! ./topsort */ "../../../../node_modules/graphlib/lib/alg/topsort.js" ) }; } ), /***/ "../../../../node_modules/graphlib/lib/alg/is-acyclic.js": ( /*!***************************************************************!*\ !*** ../../../../node_modules/graphlib/lib/alg/is-acyclic.js ***! \***************************************************************/ /***/ (a, l, o) => { var u = o( /*! ./topsort */ "../../../../node_modules/graphlib/lib/alg/topsort.js" ); a.exports = h; function h(d) { try { u(d); } catch (f) { if (f instanceof u.CycleException) return !1; throw f; } return !0; } } ), /***/ "../../../../node_modules/graphlib/lib/alg/postorder.js": ( /*!**************************************************************!*\ !*** ../../../../node_modules/graphlib/lib/alg/postorder.js ***! \**************************************************************/ /***/ (a, l, o) => { var u = o( /*! ./dfs */ "../../../../node_modules/graphlib/lib/alg/dfs.js" ); a.exports = h; function h(d, f) { return u(d, f, "post"); } } ), /***/ "../../../../node_modules/graphlib/lib/alg/preorder.js": ( /*!*************************************************************!*\ !*** ../../../../node_modules/graphlib/lib/alg/preorder.js ***! \*************************************************************/ /***/ (a, l, o) => { var u = o( /*! ./dfs */ "../../../../node_modules/graphlib/lib/alg/dfs.js" ); a.exports = h; function h(d, f) { return u(d, f, "pre"); } } ), /***/ "../../../../node_modules/graphlib/lib/alg/prim.js": ( /*!*********************************************************!*\ !*** ../../../../node_modules/graphlib/lib/alg/prim.js ***! \*********************************************************/ /***/ (a, l, o) => { var u = o( /*! ../lodash */ "../../../../node_modules/graphlib/lib/lodash.js" ), h = o( /*! ../graph */ "../../../../node_modules/graphlib/lib/graph.js" ), d = o( /*! ../data/priority-queue */ "../../../../node_modules/graphlib/lib/data/priority-queue.js" ); a.exports = f; function f(p, m) { var _ = new h(), v = {}, C = new d(), x; function b(M) { var R = M.v === x ? M.w : M.v, w = C.priority(R); if (w !== void 0) { var V = m(M); V < w && (v[R] = x, C.decrease(R, V)); } } if (p.nodeCount() === 0) return _; u.each(p.nodes(), function(M) { C.add(M, Number.POSITIVE_INFINITY), _.setNode(M); }), C.decrease(p.nodes()[0], 0); for (var S = !1; C.size() > 0; ) { if (x = C.removeMin(), u.has(v, x)) _.setEdge(x, v[x]); else { if (S) throw new Error("Input graph is not connected: " + p); S = !0; } p.nodeEdges(x).forEach(b); } return _; } } ), /***/ "../../../../node_modules/graphlib/lib/alg/tarjan.js": ( /*!***********************************************************!*\ !*** ../../../../node_modules/graphlib/lib/alg/tarjan.js ***! \***********************************************************/ /***/ (a, l, o) => { var u = o( /*! ../lodash */ "../../../../node_modules/graphlib/lib/lodash.js" ); a.exports = h; function h(d) { var f = 0, p = [], m = {}, _ = []; function v(C) { var x = m[C] = { onStack: !0, lowlink: f, index: f++ }; if (p.push(C), d.successors(C).forEach(function(M) { u.has(m, M) ? m[M].onStack && (x.lowlink = Math.min(x.lowlink, m[M].index)) : (v(M), x.lowlink = Math.min(x.lowlink, m[M].lowlink)); }), x.lowlink === x.index) { var b = [], S; do S = p.pop(), m[S].onStack = !1, b.push(S); while (C !== S); _.push(b); } } return d.nodes().forEach(function(C) { u.has(m, C) || v(C); }), _; } } ), /***/ "../../../../node_modules/graphlib/lib/alg/topsort.js": ( /*!************************************************************!*\ !*** ../../../../node_modules/graphlib/lib/alg/topsort.js ***! \************************************************************/ /***/ (a, l, o) => { var u = o( /*! ../lodash */ "../../../../node_modules/graphlib/lib/lodash.js" ); a.exports = h, h.CycleException = d; function h(f) { var p = {}, m = {}, _ = []; function v(C) { if (u.has(m, C)) throw new d(); u.has(p, C) || (m[C] = !0, p[C] = !0, u.each(f.predecessors(C), v), delete m[C], _.push(C)); } if (u.each(f.sinks(), v), u.size(p) !== f.nodeCount()) throw new d(); return _; } function d() { } d.prototype = new Error(); } ), /***/ "../../../../node_modules/graphlib/lib/data/priority-queue.js": ( /*!********************************************************************!*\ !*** ../../../../node_modules/graphlib/lib/data/priority-queue.js ***! \********************************************************************/ /***/ (a, l, o) => { var u = o( /*! ../lodash */ "../../../../node_modules/graphlib/lib/lodash.js" ); a.exports = h; function h() { this._arr = [], this._keyIndices = {}; } h.prototype.size = function() { return this._arr.length; }, h.prototype.keys = function() { return this._arr.map(function(d) { return d.key; }); }, h.prototype.has = function(d) { return u.has(this._keyIndices, d); }, h.prototype.priority = function(d) { var f = this._keyIndices[d]; if (f !== void 0) return this._arr[f].priority; }, h.prototype.min = function() { if (this.size() === 0) throw new Error("Queue underflow"); return this._arr[0].key; }, h.prototype.add = function(d, f) { var p = this._keyIndices; if (d = String(d), !u.has(p, d)) { var m = this._arr, _ = m.length; return p[d] = _, m.push({ key: d, priority: f }), this._decrease(_), !0; } return !1; }, h.prototype.removeMin = function() { this._swap(0, this._arr.length - 1); var d = this._arr.pop(); return delete this._keyIndices[d.key], this._heapify(0), d.key; }, h.prototype.decrease = function(d, f) { var p = this._keyIndices[d]; if (f > this._arr[p].priority) throw new Error("New priority is greater than current priority. Key: " + d + " Old: " + this._arr[p].priority + " New: " + f); this._arr[p].priority = f, this._decrease(p); }, h.prototype._heapify = function(d) { var f = this._arr, p = 2 * d, m = p + 1, _ = d; p < f.length && (_ = f[p].priority < f[_].priority ? p : _, m < f.length && (_ = f[m].priority < f[_].priority ? m : _), _ !== d && (this._swap(d, _), this._heapify(_))); }, h.prototype._decrease = function(d) { for (var f = this._arr, p = f[d].priority, m; d !== 0 && (m = d >> 1, !(f[m].priority < p)); ) this._swap(d, m), d = m; }, h.prototype._swap = function(d, f) { var p = this._arr, m = this._keyIndices, _ = p[d], v = p[f]; p[d] = v, p[f] = _, m[v.key] = d, m[_.key] = f; }; } ), /***/ "../../../../node_modules/graphlib/lib/graph.js": ( /*!******************************************************!*\ !*** ../../../../node_modules/graphlib/lib/graph.js ***! \******************************************************/ /***/ (a, l, o) => { var u = o( /*! ./lodash */ "../../../../node_modules/graphlib/lib/lodash.js" ); a.exports = p; var h = "\0", d = "\0", f = ""; function p(b) { this._isDirected = u.has(b, "directed") ? b.directed : !0, this._isMultigraph = u.has(b, "multigraph") ? b.multigraph : !1, this._isCompound = u.has(b, "compound") ? b.compound : !1, this._label = void 0, this._defaultNodeLabelFn = u.constant(void 0), this._defaultEdgeLabelFn = u.constant(void 0), this._nodes = {}, this._isCompound && (this._parent = {}, this._children = {}, this._children[d] = {}), this._in = {}, this._preds = {}, this._out = {}, this._sucs = {}, this._edgeObjs = {}, this._edgeLabels = {}; } p.prototype._nodeCount = 0, p.prototype._edgeCount = 0, p.prototype.isDirected = function() { return this._isDirected; }, p.prototype.isMultigraph = function() { return this._isMultigraph; }, p.prototype.isCompound = function() { return this._isCompound; }, p.prototype.setGraph = function(b) { return this._label = b, this; }, p.prototype.graph = function() { return this._label; }, p.prototype.setDefaultNodeLabel = function(b) { return u.isFunction(b) || (b = u.constant(b)), this._defaultNodeLabelFn = b, this; }, p.prototype.nodeCount = function() { return this._nodeCount; }, p.prototype.nodes = function() { return u.keys(this._nodes); }, p.prototype.sources = function() { var b = this; return u.filter(this.nodes(), function(S) { return u.isEmpty(b._in[S]); }); }, p.prototype.sinks = function() { var b = this; return u.filter(this.nodes(), function(S) { return u.isEmpty(b._out[S]); }); }, p.prototype.setNodes = function(b, S) { var M = arguments, R = this; return u.each(b, function(w) { M.length > 1 ? R.setNode(w, S) : R.setNode(w); }), this; }, p.prototype.setNode = function(b, S) { return u.has(this._nodes, b) ? (arguments.length > 1 && (this._nodes[b] = S), this) : (this._nodes[b] = arguments.length > 1 ? S : this._defaultNodeLabelFn(b), this._isCompound && (this._parent[b] = d, this._children[b] = {}, this._children[d][b] = !0), this._in[b] = {}, this._preds[b] = {}, this._out[b] = {}, this._sucs[b] = {}, ++this._nodeCount, this); }, p.prototype.node = function(b) { return this._nodes[b]; }, p.prototype.hasNode = function(b) { return u.has(this._nodes, b); }, p.prototype.removeNode = function(b) { var S = this; if (u.has(this._nodes, b)) { var M = function(R) { S.removeEdge(S._edgeObjs[R]); }; delete this._nodes[b], this._isCompound && (this._removeFromParentsChildList(b), delete this._parent[b], u.each(this.children(b), function(R) { S.setParent(R); }), delete this._children[b]), u.each(u.keys(this._in[b]), M), delete this._in[b], delete this._preds[b], u.each(u.keys(this._out[b]), M), delete this._out[b], delete this._sucs[b], --this._nodeCount; } return this; }, p.prototype.setParent = function(b, S) { if (!this._isCompound) throw new Error("Cannot set parent in a non-compound graph"); if (u.isUndefined(S)) S = d; else { S += ""; for (var M = S; !u.isUndefined(M); M = this.parent(M)) if (M === b) throw new Error("Setting " + S + " as parent of " + b + " would create a cycle"); this.setNode(S); } return this.setNode(b), this._removeFromParentsChildList(b), this._parent[b] = S, this._children[S][b] = !0, this; }, p.prototype._removeFromParentsChildList = function(b) { delete this._children[this._parent[b]][b]; }, p.prototype.parent = function(b) { if (this._isCompound) { var S = this._parent[b]; if (S !== d) return S; } }, p.prototype.children = function(b) { if (u.isUndefined(b) && (b = d), this._isCompound) { var S = this._children[b]; if (S) return u.keys(S); } else { if (b === d) return this.nodes(); if (this.hasNode(b)) return []; } }, p.prototype.predecessors = function(b) { var S = this._preds[b]; if (S) return u.keys(S); }, p.prototype.successors = function(b) { var S = this._sucs[b]; if (S) return u.keys(S); }, p.prototype.neighbors = function(b) { var S = this.predecessors(b); if (S) return u.union(S, this.successors(b)); }, p.prototype.isLeaf = function(b) { var S; return this.isDirected() ? S = this.successors(b) : S = this.neighbors(b), S.length === 0; }, p.prototype.filterNodes = function(b) { var S = new this.constructor({ directed: this._isDirected, multigraph: this._isMultigraph, compound: this._isCompound }); S.setGraph(this.graph()); var M = this; u.each(this._nodes, function(V, k) { b(k) && S.setNode(k, V); }), u.each(this._edgeObjs, function(V) { S.hasNode(V.v) && S.hasNode(V.w) && S.setEdge(V, M.edge(V)); }); var R = {}; function w(V) { var k = M.parent(V); return k === void 0 || S.hasNode(k) ? (R[V] = k, k) : k in R ? R[k] : w(k); } return this._isCompound && u.each(S.nodes(), function(V) { S.setParent(V, w(V)); }), S; }, p.prototype.setDefaultEdgeLabel = function(b) { return u.isFunction(b) || (b = u.constant(b)), this._defaultEdgeLabelFn = b, this; }, p.prototype.edgeCount = function() { return this._edgeCount; }, p.prototype.edges = function() { return u.values(this._edgeObjs); }, p.prototype.setPath = function(b, S) { var M = this, R = arguments; return u.reduce(b, function(w, V) { return R.length > 1 ? M.setEdge(w, V, S) : M.setEdge(w, V), V; }), this; }, p.prototype.setEdge = function() { var b, S, M, R, w = !1, V = arguments[0]; typeof V == "object" && V !== null && "v" in V ? (b = V.v, S = V.w, M = V.name, arguments.length === 2 && (R = arguments[1], w = !0)) : (b = V, S = arguments[1], M = arguments[3], arguments.length > 2 && (R = arguments[2], w = !0)), b = "" + b, S = "" + S, u.isUndefined(M) || (M = "" + M); var k = v(this._isDirected, b, S, M); if (u.has(this._edgeLabels, k)) return w && (this._edgeLabels[k] = R), this; if (!u.isUndefined(M) && !this._isMultigraph) throw new Error("Cannot set a named edge when isMultigraph = false"); this.setNode(b), this.setNode(S), this._edgeLabels[k] = w ? R : this._defaultEdgeLabelFn(b, S, M); var L = C(this._isDirected, b, S, M); return b = L.v, S = L.w, Object.freeze(L), this._edgeObjs[k] = L, m(this._preds[S], b), m(this._sucs[b], S), this._in[S][k] = L, this._out[b][k] = L, this._edgeCount++, this; }, p.prototype.edge = function(b, S, M) { var R = arguments.length === 1 ? x(this._isDirected, arguments[0]) : v(this._isDirected, b, S, M); return this._edgeLabels[R]; }, p.prototype.hasEdge = function(b, S, M) { var R = arguments.length === 1 ? x(this._isDirected, arguments[0]) : v(this._isDirected, b, S, M); return u.has(this._edgeLabels, R); }, p.prototype.removeEdge = function(b, S, M) { var R = arguments.length === 1 ? x(this._isDirected, arguments[0]) : v(this._isDirected, b, S, M), w = this._edgeObjs[R]; return w && (b = w.v, S = w.w, delete this._edgeLabels[R], delete this._edgeObjs[R], _(this._preds[S], b), _(this._sucs[b], S), delete this._in[S][R], delete this._out[b][R], this._edgeCount--), this; }, p.prototype.inEdges = function(b, S) { var M = this._in[b]; if (M) { var R = u.values(M); return S ? u.filter(R, function(w) { return w.v === S; }) : R; } }, p.prototype.outEdges = function(b, S) { var M = this._out[b]; if (M) { var R = u.values(M); return S ? u.filter(R, function(w) { return w.w === S; }) : R; } }, p.prototype.nodeEdges = function(b, S) { var M = this.inEdges(b, S); if (M) return M.concat(this.outEdges(b, S)); }; function m(b, S) { b[S] ? b[S]++ : b[S] = 1; } function _(b, S) { --b[S] || delete b[S]; } function v(b, S, M, R) { var w = "" + S, V = "" + M; if (!b && w > V) { var k = w; w = V, V = k; } return w + f + V + f + (u.isUndefined(R) ? h : R); } function C(b, S, M, R) { var w = "" + S, V = "" + M; if (!b && w > V) { var k = w; w = V, V = k; } var L = { v: w, w: V }; return R && (L.name = R), L; } function x(b, S) { return v(b, S.v, S.w, S.name); } } ), /***/ "../../../../node_modules/graphlib/lib/index.js": ( /*!******************************************************!*\ !*** ../../../../node_modules/graphlib/lib/index.js ***! \******************************************************/ /***/ (a, l, o) => { a.exports = { Graph: o( /*! ./graph */ "../../../../node_modules/graphlib/lib/graph.js" ), version: o( /*! ./version */ "../../../../node_modules/graphlib/lib/version.js" ) }; } ), /***/ "../../../../node_modules/graphlib/lib/json.js": ( /*!*****************************************************!*\ !*** ../../../../node_modules/graphlib/lib/json.js ***! \*****************************************************/ /***/ (a, l, o) => { var u = o( /*! ./lodash */ "../../../../node_modules/graphlib/lib/lodash.js" ), h = o( /*! ./graph */ "../../../../node_modules/graphlib/lib/graph.js" ); a.exports = { write: d, read: m }; function d(_) { var v = { options: { directed: _.isDirected(), multigraph: _.isMultigraph(), compound: _.isCompound() }, nodes: f(_), edges: p(_) }; return u.isUndefined(_.graph()) || (v.value = u.clone(_.graph())), v; } function f(_) { return u.map(_.nodes(), function(v) { var C = _.node(v), x = _.parent(v), b = { v }; return u.isUndefined(C) || (b.value = C), u.isUndefined(x) || (b.parent = x), b; }); } function p(_) { return u.map(_.edges(), function(v) { var C = _.edge(v), x = { v: v.v, w: v.w }; return u.isUndefined(v.name) || (x.name = v.name), u.isUndefined(C) || (x.value = C), x; }); } function m(_) { var v = new h(_.options).setGraph(_.value); return u.each(_.nodes, function(C) { v.setNode(C.v, C.value), C.parent && v.setParent(C.v, C.parent); }), u.each(_.edges, function(C) { v.setEdge({ v: C.v, w: C.w, name: C.name }, C.value); }), v; } } ), /***/ "../../../../node_modules/graphlib/lib/lodash.js": ( /*!*******************************************************!*\ !*** ../../../../node_modules/graphlib/lib/lodash.js ***! \*******************************************************/ /***/ (a, l, o) => { var u; try { u = { clone: o( /*! lodash/clone */ "../../../../node_modules/lodash/clone.js" ), constant: o( /*! lodash/constant */ "../../../../node_modules/lodash/constant.js" ), each: o( /*! lodash/each */ "../../../../node_modules/lodash/each.js" ), filter: o( /*! lodash/filter */ "../../../../node_modules/lodash/filter.js" ), has: o( /*! lodash/has */ "../../../../node_modules/lodash/has.js" ), isArray: o( /*! lodash/isArray */ "../../../../node_modules/lodash/isArray.js" ), isEmpty: o( /*! lodash/isEmpty */ "../../../../node_modules/lodash/isEmpty.js" ), isFunction: o( /*! lodash/isFunction */ "../../../../node_modules/lodash/isFunction.js" ), isUndefined: o( /*! lodash/isUndefined */ "../../../../node_modules/lodash/isUndefined.js" ), keys: o( /*! lodash/keys */ "../../../../node_modules/lodash/keys.js" ), map: o( /*! lodash/map */ "../../../../node_modules/lodash/map.js" ), reduce: o( /*! lodash/reduce */ "../../../../node_modules/lodash/reduce.js" ), size: o( /*! lodash/size */ "../../../../node_modules/lodash/size.js" ), transform: o( /*! lodash/transform */ "../../../../node_modules/lodash/transform.js" ), union: o( /*! lodash/union */ "../../../../node_modules/lodash/union.js" ), values: o( /*! lodash/values */ "../../../../node_modules/lodash/values.js" ) }; } catch { } u || (u = window._), a.exports = u; } ), /***/ "../../../../node_modules/graphlib/lib/version.js": ( /*!********************************************************!*\ !*** ../../../../node_modules/graphlib/lib/version.js ***! \********************************************************/ /***/ (a) => { a.exports = "2.1.8"; } ), /***/ "../../../../node_modules/lodash/_DataView.js": ( /*!****************************************************!*\ !*** ../../../../node_modules/lodash/_DataView.js ***! \****************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_getNative */ "../../../../node_modules/lodash/_getNative.js" ), h = o( /*! ./_root */ "../../../../node_modules/lodash/_root.js" ), d = u(h, "DataView"); a.exports = d; } ), /***/ "../../../../node_modules/lodash/_Hash.js": ( /*!************************************************!*\ !*** ../../../../node_modules/lodash/_Hash.js ***! \************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_hashClear */ "../../../../node_modules/lodash/_hashClear.js" ), h = o( /*! ./_hashDelete */ "../../../../node_modules/lodash/_hashDelete.js" ), d = o( /*! ./_hashGet */ "../../../../node_modules/lodash/_hashGet.js" ), f = o( /*! ./_hashHas */ "../../../../node_modules/lodash/_hashHas.js" ), p = o( /*! ./_hashSet */ "../../../../node_modules/lodash/_hashSet.js" ); function m(_) { var v = -1, C = _ == null ? 0 : _.length; for (this.clear(); ++v < C; ) { var x = _[v]; this.set(x[0], x[1]); } } m.prototype.clear = u, m.prototype.delete = h, m.prototype.get = d, m.prototype.has = f, m.prototype.set = p, a.exports = m; } ), /***/ "../../../../node_modules/lodash/_ListCache.js": ( /*!*****************************************************!*\ !*** ../../../../node_modules/lodash/_ListCache.js ***! \*****************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_listCacheClear */ "../../../../node_modules/lodash/_listCacheClear.js" ), h = o( /*! ./_listCacheDelete */ "../../../../node_modules/lodash/_listCacheDelete.js" ), d = o( /*! ./_listCacheGet */ "../../../../node_modules/lodash/_listCacheGet.js" ), f = o( /*! ./_listCacheHas */ "../../../../node_modules/lodash/_listCacheHas.js" ), p = o( /*! ./_listCacheSet */ "../../../../node_modules/lodash/_listCacheSet.js" ); function m(_) { var v = -1, C = _ == null ? 0 : _.length; for (this.clear(); ++v < C; ) { var x = _[v]; this.set(x[0], x[1]); } } m.prototype.clear = u, m.prototype.delete = h, m.prototype.get = d, m.prototype.has = f, m.prototype.set = p, a.exports = m; } ), /***/ "../../../../node_modules/lodash/_Map.js": ( /*!***********************************************!*\ !*** ../../../../node_modules/lodash/_Map.js ***! \***********************************************/ /***/ (a, l, o) => { var u = o( /*! ./_getNative */ "../../../../node_modules/lodash/_getNative.js" ), h = o( /*! ./_root */ "../../../../node_modules/lodash/_root.js" ), d = u(h, "Map"); a.exports = d; } ), /***/ "../../../../node_modules/lodash/_MapCache.js": ( /*!****************************************************!*\ !*** ../../../../node_modules/lodash/_MapCache.js ***! \****************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_mapCacheClear */ "../../../../node_modules/lodash/_mapCacheClear.js" ), h = o( /*! ./_mapCacheDelete */ "../../../../node_modules/lodash/_mapCacheDelete.js" ), d = o( /*! ./_mapCacheGet */ "../../../../node_modules/lodash/_mapCacheGet.js" ), f = o( /*! ./_mapCacheHas */ "../../../../node_modules/lodash/_mapCacheHas.js" ), p = o( /*! ./_mapCacheSet */ "../../../../node_modules/lodash/_mapCacheSet.js" ); function m(_) { var v = -1, C = _ == null ? 0 : _.length; for (this.clear(); ++v < C; ) { var x = _[v]; this.set(x[0], x[1]); } } m.prototype.clear = u, m.prototype.delete = h, m.prototype.get = d, m.prototype.has = f, m.prototype.set = p, a.exports = m; } ), /***/ "../../../../node_modules/lodash/_Promise.js": ( /*!***************************************************!*\ !*** ../../../../node_modules/lodash/_Promise.js ***! \***************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_getNative */ "../../../../node_modules/lodash/_getNative.js" ), h = o( /*! ./_root */ "../../../../node_modules/lodash/_root.js" ), d = u(h, "Promise"); a.exports = d; } ), /***/ "../../../../node_modules/lodash/_Set.js": ( /*!***********************************************!*\ !*** ../../../../node_modules/lodash/_Set.js ***! \***********************************************/ /***/ (a, l, o) => { var u = o( /*! ./_getNative */ "../../../../node_modules/lodash/_getNative.js" ), h = o( /*! ./_root */ "../../../../node_modules/lodash/_root.js" ), d = u(h, "Set"); a.exports = d; } ), /***/ "../../../../node_modules/lodash/_SetCache.js": ( /*!****************************************************!*\ !*** ../../../../node_modules/lodash/_SetCache.js ***! \****************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_MapCache */ "../../../../node_modules/lodash/_MapCache.js" ), h = o( /*! ./_setCacheAdd */ "../../../../node_modules/lodash/_setCacheAdd.js" ), d = o( /*! ./_setCacheHas */ "../../../../node_modules/lodash/_setCacheHas.js" ); function f(p) { var m = -1, _ = p == null ? 0 : p.length; for (this.__data__ = new u(); ++m < _; ) this.add(p[m]); } f.prototype.add = f.prototype.push = h, f.prototype.has = d, a.exports = f; } ), /***/ "../../../../node_modules/lodash/_Stack.js": ( /*!*************************************************!*\ !*** ../../../../node_modules/lodash/_Stack.js ***! \*************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_ListCache */ "../../../../node_modules/lodash/_ListCache.js" ), h = o( /*! ./_stackClear */ "../../../../node_modules/lodash/_stackClear.js" ), d = o( /*! ./_stackDelete */ "../../../../node_modules/lodash/_stackDelete.js" ), f = o( /*! ./_stackGet */ "../../../../node_modules/lodash/_stackGet.js" ), p = o( /*! ./_stackHas */ "../../../../node_modules/lodash/_stackHas.js" ), m = o( /*! ./_stackSet */ "../../../../node_modules/lodash/_stackSet.js" ); function _(v) { var C = this.__data__ = new u(v); this.size = C.size; } _.prototype.clear = h, _.prototype.delete = d, _.prototype.get = f, _.prototype.has = p, _.prototype.set = m, a.exports = _; } ), /***/ "../../../../node_modules/lodash/_Symbol.js": ( /*!**************************************************!*\ !*** ../../../../node_modules/lodash/_Symbol.js ***! \**************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_root */ "../../../../node_modules/lodash/_root.js" ), h = u.Symbol; a.exports = h; } ), /***/ "../../../../node_modules/lodash/_Uint8Array.js": ( /*!******************************************************!*\ !*** ../../../../node_modules/lodash/_Uint8Array.js ***! \******************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_root */ "../../../../node_modules/lodash/_root.js" ), h = u.Uint8Array; a.exports = h; } ), /***/ "../../../../node_modules/lodash/_WeakMap.js": ( /*!***************************************************!*\ !*** ../../../../node_modules/lodash/_WeakMap.js ***! \***************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_getNative */ "../../../../node_modules/lodash/_getNative.js" ), h = o( /*! ./_root */ "../../../../node_modules/lodash/_root.js" ), d = u(h, "WeakMap"); a.exports = d; } ), /***/ "../../../../node_modules/lodash/_apply.js": ( /*!*************************************************!*\ !*** ../../../../node_modules/lodash/_apply.js ***! \*************************************************/ /***/ (a) => { function l(o, u, h) { switch (h.length) { case 0: return o.call(u); case 1: return o.call(u, h[0]); case 2: return o.call(u, h[0], h[1]); case 3: return o.call(u, h[0], h[1], h[2]); } return o.apply(u, h); } a.exports = l; } ), /***/ "../../../../node_modules/lodash/_arrayEach.js": ( /*!*****************************************************!*\ !*** ../../../../node_modules/lodash/_arrayEach.js ***! \*****************************************************/ /***/ (a) => { function l(o, u) { for (var h = -1, d = o == null ? 0 : o.length; ++h < d && u(o[h], h, o) !== !1; ) ; return o; } a.exports = l; } ), /***/ "../../../../node_modules/lodash/_arrayFilter.js": ( /*!*******************************************************!*\ !*** ../../../../node_modules/lodash/_arrayFilter.js ***! \*******************************************************/ /***/ (a) => { function l(o, u) { for (var h = -1, d = o == null ? 0 : o.length, f = 0, p = []; ++h < d; ) { var m = o[h]; u(m, h, o) && (p[f++] = m); } return p; } a.exports = l; } ), /***/ "../../../../node_modules/lodash/_arrayIncludes.js": ( /*!*********************************************************!*\ !*** ../../../../node_modules/lodash/_arrayIncludes.js ***! \*********************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_baseIndexOf */ "../../../../node_modules/lodash/_baseIndexOf.js" ); function h(d, f) { var p = d == null ? 0 : d.length; return !!p && u(d, f, 0) > -1; } a.exports = h; } ), /***/ "../../../../node_modules/lodash/_arrayIncludesWith.js": ( /*!*************************************************************!*\ !*** ../../../../node_modules/lodash/_arrayIncludesWith.js ***! \*************************************************************/ /***/ (a) => { function l(o, u, h) { for (var d = -1, f = o == null ? 0 : o.length; ++d < f; ) if (h(u, o[d])) return !0; return !1; } a.exports = l; } ), /***/ "../../../../node_modules/lodash/_arrayLikeKeys.js": ( /*!*********************************************************!*\ !*** ../../../../node_modules/lodash/_arrayLikeKeys.js ***! \*********************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_baseTimes */ "../../../../node_modules/lodash/_baseTimes.js" ), h = o( /*! ./isArguments */ "../../../../node_modules/lodash/isArguments.js" ), d = o( /*! ./isArray */ "../../../../node_modules/lodash/isArray.js" ), f = o( /*! ./isBuffer */ "../../../../node_modules/lodash/isBuffer.js" ), p = o( /*! ./_isIndex */ "../../../../node_modules/lodash/_isIndex.js" ), m = o( /*! ./isTypedArray */ "../../../../node_modules/lodash/isTypedArray.js" ), _ = Object.prototype, v = _.hasOwnProperty; function C(x, b) { var S = d(x), M = !S && h(x), R = !S && !M && f(x), w = !S && !M && !R && m(x), V = S || M || R || w, k = V ? u(x.length, String) : [], L = k.length; for (var B in x) (b || v.call(x, B)) && !(V && // Safari 9 has enumerable `arguments.length` in strict mode. (B == "length" || // Node.js 0.10 has enumerable non-index properties on buffers. R && (B == "offset" || B == "parent") || // PhantomJS 2 has enumerable non-index properties on typed arrays. w && (B == "buffer" || B == "byteLength" || B == "byteOffset") || // Skip index properties. p(B, L))) && k.push(B); return k; } a.exports = C; } ), /***/ "../../../../node_modules/lodash/_arrayMap.js": ( /*!****************************************************!*\ !*** ../../../../node_modules/lodash/_arrayMap.js ***! \****************************************************/ /***/ (a) => { function l(o, u) { for (var h = -1, d = o == null ? 0 : o.length, f = Array(d); ++h < d; ) f[h] = u(o[h], h, o); return f; } a.exports = l; } ), /***/ "../../../../node_modules/lodash/_arrayPush.js": ( /*!*****************************************************!*\ !*** ../../../../node_modules/lodash/_arrayPush.js ***! \*****************************************************/ /***/ (a) => { function l(o, u) { for (var h = -1, d = u.length, f = o.length; ++h < d; ) o[f + h] = u[h]; return o; } a.exports = l; } ), /***/ "../../../../node_modules/lodash/_arrayReduce.js": ( /*!*******************************************************!*\ !*** ../../../../node_modules/lodash/_arrayReduce.js ***! \*******************************************************/ /***/ (a) => { function l(o, u, h, d) { var f = -1, p = o == null ? 0 : o.length; for (d && p && (h = o[++f]); ++f < p; ) h = u(h, o[f], f, o); return h; } a.exports = l; } ), /***/ "../../../../node_modules/lodash/_arraySome.js": ( /*!*****************************************************!*\ !*** ../../../../node_modules/lodash/_arraySome.js ***! \*****************************************************/ /***/ (a) => { function l(o, u) { for (var h = -1, d = o == null ? 0 : o.length; ++h < d; ) if (u(o[h], h, o)) return !0; return !1; } a.exports = l; } ), /***/ "../../../../node_modules/lodash/_asciiSize.js": ( /*!*****************************************************!*\ !*** ../../../../node_modules/lodash/_asciiSize.js ***! \*****************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_baseProperty */ "../../../../node_modules/lodash/_baseProperty.js" ), h = u("length"); a.exports = h; } ), /***/ "../../../../node_modules/lodash/_assignMergeValue.js": ( /*!************************************************************!*\ !*** ../../../../node_modules/lodash/_assignMergeValue.js ***! \************************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_baseAssignValue */ "../../../../node_modules/lodash/_baseAssignValue.js" ), h = o( /*! ./eq */ "../../../../node_modules/lodash/eq.js" ); function d(f, p, m) { (m !== void 0 && !h(f[p], m) || m === void 0 && !(p in f)) && u(f, p, m); } a.exports = d; } ), /***/ "../../../../node_modules/lodash/_assignValue.js": ( /*!*******************************************************!*\ !*** ../../../../node_modules/lodash/_assignValue.js ***! \*******************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_baseAssignValue */ "../../../../node_modules/lodash/_baseAssignValue.js" ), h = o( /*! ./eq */ "../../../../node_modules/lodash/eq.js" ), d = Object.prototype, f = d.hasOwnProperty; function p(m, _, v) { var C = m[_]; (!(f.call(m, _) && h(C, v)) || v === void 0 && !(_ in m)) && u(m, _, v); } a.exports = p; } ), /***/ "../../../../node_modules/lodash/_assocIndexOf.js": ( /*!********************************************************!*\ !*** ../../../../node_modules/lodash/_assocIndexOf.js ***! \********************************************************/ /***/ (a, l, o) => { var u = o( /*! ./eq */ "../../../../node_modules/lodash/eq.js" ); function h(d, f) { for (var p = d.length; p--; ) if (u(d[p][0], f)) return p; return -1; } a.exports = h; } ), /***/ "../../../../node_modules/lodash/_baseAssign.js": ( /*!******************************************************!*\ !*** ../../../../node_modules/lodash/_baseAssign.js ***! \******************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_copyObject */ "../../../../node_modules/lodash/_copyObject.js" ), h = o( /*! ./keys */ "../../../../node_modules/lodash/keys.js" ); function d(f, p) { return f && u(p, h(p), f); } a.exports = d; } ), /***/ "../../../../node_modules/lodash/_baseAssignIn.js": ( /*!********************************************************!*\ !*** ../../../../node_modules/lodash/_baseAssignIn.js ***! \********************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_copyObject */ "../../../../node_modules/lodash/_copyObject.js" ), h = o( /*! ./keysIn */ "../../../../node_modules/lodash/keysIn.js" ); function d(f, p) { return f && u(p, h(p), f); } a.exports = d; } ), /***/ "../../../../node_modules/lodash/_baseAssignValue.js": ( /*!***********************************************************!*\ !*** ../../../../node_modules/lodash/_baseAssignValue.js ***! \***********************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_defineProperty */ "../../../../node_modules/lodash/_defineProperty.js" ); function h(d, f, p) { f == "__proto__" && u ? u(d, f, { configurable: !0, enumerable: !0, value: p, writable: !0 }) : d[f] = p; } a.exports = h; } ), /***/ "../../../../node_modules/lodash/_baseClone.js": ( /*!*****************************************************!*\ !*** ../../../../node_modules/lodash/_baseClone.js ***! \*****************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_Stack */ "../../../../node_modules/lodash/_Stack.js" ), h = o( /*! ./_arrayEach */ "../../../../node_modules/lodash/_arrayEach.js" ), d = o( /*! ./_assignValue */ "../../../../node_modules/lodash/_assignValue.js" ), f = o( /*! ./_baseAssign */ "../../../../node_modules/lodash/_baseAssign.js" ), p = o( /*! ./_baseAssignIn */ "../../../../node_modules/lodash/_baseAssignIn.js" ), m = o( /*! ./_cloneBuffer */ "../../../../node_modules/lodash/_cloneBuffer.js" ), _ = o( /*! ./_copyArray */ "../../../../node_modules/lodash/_copyArray.js" ), v = o( /*! ./_copySymbols */ "../../../../node_modules/lodash/_copySymbols.js" ), C = o( /*! ./_copySymbolsIn */ "../../../../node_modules/lodash/_copySymbolsIn.js" ), x = o( /*! ./_getAllKeys */ "../../../../node_modules/lodash/_getAllKeys.js" ), b = o( /*! ./_getAllKeysIn */ "../../../../node_modules/lodash/_getAllKeysIn.js" ), S = o( /*! ./_getTag */ "../../../../node_modules/lodash/_getTag.js" ), M = o( /*! ./_initCloneArray */ "../../../../node_modules/lodash/_initCloneArray.js" ), R = o( /*! ./_initCloneByTag */ "../../../../node_modules/lodash/_initCloneByTag.js" ), w = o( /*! ./_initCloneObject */ "../../../../node_modules/lodash/_initCloneObject.js" ), V = o( /*! ./isArray */ "../../../../node_modules/lodash/isArray.js" ), k = o( /*! ./isBuffer */ "../../../../node_modules/lodash/isBuffer.js" ), L = o( /*! ./isMap */ "../../../../node_modules/lodash/isMap.js" ), B = o( /*! ./isObject */ "../../../../node_modules/lodash/isObject.js" ), U = o( /*! ./isSet */ "../../../../node_modules/lodash/isSet.js" ), K = o( /*! ./keys */ "../../../../node_modules/lodash/keys.js" ), ee = o( /*! ./keysIn */ "../../../../node_modules/lodash/keysIn.js" ), Z = 1, q = 2, le = 4, ie = "[object Arguments]", $ = "[object Array]", j = "[object Boolean]", J = "[object Date]", ne = "[object Error]", pe = "[object Function]", ge = "[object GeneratorFunction]", Ie = "[object Map]", ye = "[object Number]", Se = "[object Object]", re = "[object RegExp]", te = "[object Set]", he = "[object String]", be = "[object Symbol]", Ue = "[object WeakMap]", Ee = "[object ArrayBuffer]", He = "[object DataView]", Xe = "[object Float32Array]", rt = "[object Float64Array]", dt = "[object Int8Array]", bt = "[object Int16Array]", Mt = "[object Int32Array]", Ct = "[object Uint8Array]", di = "[object Uint8ClampedArray]", Kt = "[object Uint16Array]", ei = "[object Uint32Array]", bi = {}; bi[ie] = bi[$] = bi[Ee] = bi[He] = bi[j] = bi[J] = bi[Xe] = bi[rt] = bi[dt] = bi[bt] = bi[Mt] = bi[Ie] = bi[ye] = bi[Se] = bi[re] = bi[te] = bi[he] = bi[be] = bi[Ct] = bi[di] = bi[Kt] = bi[ei] = !0, bi[ne] = bi[pe] = bi[Ue] = !1; function vr(yi, Vr, Rr, ks, Qt, Ei) { var Pi, rr = Vr & Z, sr = Vr & q, dr = Vr & le; if (Rr && (Pi = Qt ? Rr(yi, ks, Qt, Ei) : Rr(yi)), Pi !== void 0) return Pi; if (!B(yi)) return yi; var nr = V(yi); if (nr) { if (Pi = M(yi), !rr) return _(yi, Pi); } else { var Pr = S(yi), ti = Pr == pe || Pr == ge; if (k(yi)) return m(yi, rr); if (Pr == Se || Pr == ie || ti && !Qt) { if (Pi = sr || ti ? {} : w(yi), !rr) return sr ? C(yi, p(Pi, yi)) : v(yi, f(Pi, yi)); } else { if (!bi[Pr]) return Qt ? yi : {}; Pi = R(yi, Pr, rr); } } Ei || (Ei = new u()); var Oi = Ei.get(yi); if (Oi) return Oi; Ei.set(yi, Pi), U(yi) ? yi.forEach(function(wr) { Pi.add(vr(wr, Vr, Rr, wr, yi, Ei)); }) : L(yi) && yi.forEach(function(wr, Lr) { Pi.set(Lr, vr(wr, Vr, Rr, Lr, yi, Ei)); }); var ri = dr ? sr ? b : x : sr ? ee : K, ki = nr ? void 0 : ri(yi); return h(ki || yi, function(wr, Lr) { ki && (Lr = wr, wr = yi[Lr]), d(Pi, Lr, vr(wr, Vr, Rr, Lr, yi, Ei)); }), Pi; } a.exports = vr; } ), /***/ "../../../../node_modules/lodash/_baseCreate.js": ( /*!******************************************************!*\ !*** ../../../../node_modules/lodash/_baseCreate.js ***! \******************************************************/ /***/ (a, l, o) => { var u = o( /*! ./isObject */ "../../../../node_modules/lodash/isObject.js" ), h = Object.create, d = /* @__PURE__ */ function() { function f() { } return function(p) { if (!u(p)) return {}; if (h) return h(p); f.prototype = p; var m = new f(); return f.prototype = void 0, m; }; }(); a.exports = d; } ), /***/ "../../../../node_modules/lodash/_baseEach.js": ( /*!****************************************************!*\ !*** ../../../../node_modules/lodash/_baseEach.js ***! \****************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_baseForOwn */ "../../../../node_modules/lodash/_baseForOwn.js" ), h = o( /*! ./_createBaseEach */ "../../../../node_modules/lodash/_createBaseEach.js" ), d = h(u); a.exports = d; } ), /***/ "../../../../node_modules/lodash/_baseExtremum.js": ( /*!********************************************************!*\ !*** ../../../../node_modules/lodash/_baseExtremum.js ***! \********************************************************/ /***/ (a, l, o) => { var u = o( /*! ./isSymbol */ "../../../../node_modules/lodash/isSymbol.js" ); function h(d, f, p) { for (var m = -1, _ = d.length; ++m < _; ) { var v = d[m], C = f(v); if (C != null && (x === void 0 ? C === C && !u(C) : p(C, x))) var x = C, b = v; } return b; } a.exports = h; } ), /***/ "../../../../node_modules/lodash/_baseFilter.js": ( /*!******************************************************!*\ !*** ../../../../node_modules/lodash/_baseFilter.js ***! \******************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_baseEach */ "../../../../node_modules/lodash/_baseEach.js" ); function h(d, f) { var p = []; return u(d, function(m, _, v) { f(m, _, v) && p.push(m); }), p; } a.exports = h; } ), /***/ "../../../../node_modules/lodash/_baseFindIndex.js": ( /*!*********************************************************!*\ !*** ../../../../node_modules/lodash/_baseFindIndex.js ***! \*********************************************************/ /***/ (a) => { function l(o, u, h, d) { for (var f = o.length, p = h + (d ? 1 : -1); d ? p-- : ++p < f; ) if (u(o[p], p, o)) return p; return -1; } a.exports = l; } ), /***/ "../../../../node_modules/lodash/_baseFlatten.js": ( /*!*******************************************************!*\ !*** ../../../../node_modules/lodash/_baseFlatten.js ***! \*******************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_arrayPush */ "../../../../node_modules/lodash/_arrayPush.js" ), h = o( /*! ./_isFlattenable */ "../../../../node_modules/lodash/_isFlattenable.js" ); function d(f, p, m, _, v) { var C = -1, x = f.length; for (m || (m = h), v || (v = []); ++C < x; ) { var b = f[C]; p > 0 && m(b) ? p > 1 ? d(b, p - 1, m, _, v) : u(v, b) : _ || (v[v.length] = b); } return v; } a.exports = d; } ), /***/ "../../../../node_modules/lodash/_baseFor.js": ( /*!***************************************************!*\ !*** ../../../../node_modules/lodash/_baseFor.js ***! \***************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_createBaseFor */ "../../../../node_modules/lodash/_createBaseFor.js" ), h = u(); a.exports = h; } ), /***/ "../../../../node_modules/lodash/_baseForOwn.js": ( /*!******************************************************!*\ !*** ../../../../node_modules/lodash/_baseForOwn.js ***! \******************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_baseFor */ "../../../../node_modules/lodash/_baseFor.js" ), h = o( /*! ./keys */ "../../../../node_modules/lodash/keys.js" ); function d(f, p) { return f && u(f, p, h); } a.exports = d; } ), /***/ "../../../../node_modules/lodash/_baseGet.js": ( /*!***************************************************!*\ !*** ../../../../node_modules/lodash/_baseGet.js ***! \***************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_castPath */ "../../../../node_modules/lodash/_castPath.js" ), h = o( /*! ./_toKey */ "../../../../node_modules/lodash/_toKey.js" ); function d(f, p) { p = u(p, f); for (var m = 0, _ = p.length; f != null && m < _; ) f = f[h(p[m++])]; return m && m == _ ? f : void 0; } a.exports = d; } ), /***/ "../../../../node_modules/lodash/_baseGetAllKeys.js": ( /*!**********************************************************!*\ !*** ../../../../node_modules/lodash/_baseGetAllKeys.js ***! \**********************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_arrayPush */ "../../../../node_modules/lodash/_arrayPush.js" ), h = o( /*! ./isArray */ "../../../../node_modules/lodash/isArray.js" ); function d(f, p, m) { var _ = p(f); return h(f) ? _ : u(_, m(f)); } a.exports = d; } ), /***/ "../../../../node_modules/lodash/_baseGetTag.js": ( /*!******************************************************!*\ !*** ../../../../node_modules/lodash/_baseGetTag.js ***! \******************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_Symbol */ "../../../../node_modules/lodash/_Symbol.js" ), h = o( /*! ./_getRawTag */ "../../../../node_modules/lodash/_getRawTag.js" ), d = o( /*! ./_objectToString */ "../../../../node_modules/lodash/_objectToString.js" ), f = "[object Null]", p = "[object Undefined]", m = u ? u.toStringTag : void 0; function _(v) { return v == null ? v === void 0 ? p : f : m && m in Object(v) ? h(v) : d(v); } a.exports = _; } ), /***/ "../../../../node_modules/lodash/_baseGt.js": ( /*!**************************************************!*\ !*** ../../../../node_modules/lodash/_baseGt.js ***! \**************************************************/ /***/ (a) => { function l(o, u) { return o > u; } a.exports = l; } ), /***/ "../../../../node_modules/lodash/_baseHas.js": ( /*!***************************************************!*\ !*** ../../../../node_modules/lodash/_baseHas.js ***! \***************************************************/ /***/ (a) => { var l = Object.prototype, o = l.hasOwnProperty; function u(h, d) { return h != null && o.call(h, d); } a.exports = u; } ), /***/ "../../../../node_modules/lodash/_baseHasIn.js": ( /*!*****************************************************!*\ !*** ../../../../node_modules/lodash/_baseHasIn.js ***! \*****************************************************/ /***/ (a) => { function l(o, u) { return o != null && u in Object(o); } a.exports = l; } ), /***/ "../../../../node_modules/lodash/_baseIndexOf.js": ( /*!*******************************************************!*\ !*** ../../../../node_modules/lodash/_baseIndexOf.js ***! \*******************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_baseFindIndex */ "../../../../node_modules/lodash/_baseFindIndex.js" ), h = o( /*! ./_baseIsNaN */ "../../../../node_modules/lodash/_baseIsNaN.js" ), d = o( /*! ./_strictIndexOf */ "../../../../node_modules/lodash/_strictIndexOf.js" ); function f(p, m, _) { return m === m ? d(p, m, _) : u(p, h, _); } a.exports = f; } ), /***/ "../../../../node_modules/lodash/_baseIsArguments.js": ( /*!***********************************************************!*\ !*** ../../../../node_modules/lodash/_baseIsArguments.js ***! \***********************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_baseGetTag */ "../../../../node_modules/lodash/_baseGetTag.js" ), h = o( /*! ./isObjectLike */ "../../../../node_modules/lodash/isObjectLike.js" ), d = "[object Arguments]"; function f(p) { return h(p) && u(p) == d; } a.exports = f; } ), /***/ "../../../../node_modules/lodash/_baseIsEqual.js": ( /*!*******************************************************!*\ !*** ../../../../node_modules/lodash/_baseIsEqual.js ***! \*******************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_baseIsEqualDeep */ "../../../../node_modules/lodash/_baseIsEqualDeep.js" ), h = o( /*! ./isObjectLike */ "../../../../node_modules/lodash/isObjectLike.js" ); function d(f, p, m, _, v) { return f === p ? !0 : f == null || p == null || !h(f) && !h(p) ? f !== f && p !== p : u(f, p, m, _, d, v); } a.exports = d; } ), /***/ "../../../../node_modules/lodash/_baseIsEqualDeep.js": ( /*!***********************************************************!*\ !*** ../../../../node_modules/lodash/_baseIsEqualDeep.js ***! \***********************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_Stack */ "../../../../node_modules/lodash/_Stack.js" ), h = o( /*! ./_equalArrays */ "../../../../node_modules/lodash/_equalArrays.js" ), d = o( /*! ./_equalByTag */ "../../../../node_modules/lodash/_equalByTag.js" ), f = o( /*! ./_equalObjects */ "../../../../node_modules/lodash/_equalObjects.js" ), p = o( /*! ./_getTag */ "../../../../node_modules/lodash/_getTag.js" ), m = o( /*! ./isArray */ "../../../../node_modules/lodash/isArray.js" ), _ = o( /*! ./isBuffer */ "../../../../node_modules/lodash/isBuffer.js" ), v = o( /*! ./isTypedArray */ "../../../../node_modules/lodash/isTypedArray.js" ), C = 1, x = "[object Arguments]", b = "[object Array]", S = "[object Object]", M = Object.prototype, R = M.hasOwnProperty; function w(V, k, L, B, U, K) { var ee = m(V), Z = m(k), q = ee ? b : p(V), le = Z ? b : p(k); q = q == x ? S : q, le = le == x ? S : le; var ie = q == S, $ = le == S, j = q == le; if (j && _(V)) { if (!_(k)) return !1; ee = !0, ie = !1; } if (j && !ie) return K || (K = new u()), ee || v(V) ? h(V, k, L, B, U, K) : d(V, k, q, L, B, U, K); if (!(L & C)) { var J = ie && R.call(V, "__wrapped__"), ne = $ && R.call(k, "__wrapped__"); if (J || ne) { var pe = J ? V.value() : V, ge = ne ? k.value() : k; return K || (K = new u()), U(pe, ge, L, B, K); } } return j ? (K || (K = new u()), f(V, k, L, B, U, K)) : !1; } a.exports = w; } ), /***/ "../../../../node_modules/lodash/_baseIsMap.js": ( /*!*****************************************************!*\ !*** ../../../../node_modules/lodash/_baseIsMap.js ***! \*****************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_getTag */ "../../../../node_modules/lodash/_getTag.js" ), h = o( /*! ./isObjectLike */ "../../../../node_modules/lodash/isObjectLike.js" ), d = "[object Map]"; function f(p) { return h(p) && u(p) == d; } a.exports = f; } ), /***/ "../../../../node_modules/lodash/_baseIsMatch.js": ( /*!*******************************************************!*\ !*** ../../../../node_modules/lodash/_baseIsMatch.js ***! \*******************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_Stack */ "../../../../node_modules/lodash/_Stack.js" ), h = o( /*! ./_baseIsEqual */ "../../../../node_modules/lodash/_baseIsEqual.js" ), d = 1, f = 2; function p(m, _, v, C) { var x = v.length, b = x, S = !C; if (m == null) return !b; for (m = Object(m); x--; ) { var M = v[x]; if (S && M[2] ? M[1] !== m[M[0]] : !(M[0] in m)) return !1; } for (; ++x < b; ) { M = v[x]; var R = M[0], w = m[R], V = M[1]; if (S && M[2]) { if (w === void 0 && !(R in m)) return !1; } else { var k = new u(); if (C) var L = C(w, V, R, m, _, k); if (!(L === void 0 ? h(V, w, d | f, C, k) : L)) return !1; } } return !0; } a.exports = p; } ), /***/ "../../../../node_modules/lodash/_baseIsNaN.js": ( /*!*****************************************************!*\ !*** ../../../../node_modules/lodash/_baseIsNaN.js ***! \*****************************************************/ /***/ (a) => { function l(o) { return o !== o; } a.exports = l; } ), /***/ "../../../../node_modules/lodash/_baseIsNative.js": ( /*!********************************************************!*\ !*** ../../../../node_modules/lodash/_baseIsNative.js ***! \********************************************************/ /***/ (a, l, o) => { var u = o( /*! ./isFunction */ "../../../../node_modules/lodash/isFunction.js" ), h = o( /*! ./_isMasked */ "../../../../node_modules/lodash/_isMasked.js" ), d = o( /*! ./isObject */ "../../../../node_modules/lodash/isObject.js" ), f = o( /*! ./_toSource */ "../../../../node_modules/lodash/_toSource.js" ), p = /[\\^$.*+?()[\]{}|]/g, m = /^\[object .+?Constructor\]$/, _ = Function.prototype, v = Object.prototype, C = _.toString, x = v.hasOwnProperty, b = RegExp( "^" + C.call(x).replace(p, "\\$&").replace(/hasOwnProperty|(function).*?(?=\\\()| for .+?(?=\\\])/g, "$1.*?") + "$" ); function S(M) { if (!d(M) || h(M)) return !1; var R = u(M) ? b : m; return R.test(f(M)); } a.exports = S; } ), /***/ "../../../../node_modules/lodash/_baseIsSet.js": ( /*!*****************************************************!*\ !*** ../../../../node_modules/lodash/_baseIsSet.js ***! \*****************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_getTag */ "../../../../node_modules/lodash/_getTag.js" ), h = o( /*! ./isObjectLike */ "../../../../node_modules/lodash/isObjectLike.js" ), d = "[object Set]"; function f(p) { return h(p) && u(p) == d; } a.exports = f; } ), /***/ "../../../../node_modules/lodash/_baseIsTypedArray.js": ( /*!************************************************************!*\ !*** ../../../../node_modules/lodash/_baseIsTypedArray.js ***! \************************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_baseGetTag */ "../../../../node_modules/lodash/_baseGetTag.js" ), h = o( /*! ./isLength */ "../../../../node_modules/lodash/isLength.js" ), d = o( /*! ./isObjectLike */ "../../../../node_modules/lodash/isObjectLike.js" ), f = "[object Arguments]", p = "[object Array]", m = "[object Boolean]", _ = "[object Date]", v = "[object Error]", C = "[object Function]", x = "[object Map]", b = "[object Number]", S = "[object Object]", M = "[object RegExp]", R = "[object Set]", w = "[object String]", V = "[object WeakMap]", k = "[object ArrayBuffer]", L = "[object DataView]", B = "[object Float32Array]", U = "[object Float64Array]", K = "[object Int8Array]", ee = "[object Int16Array]", Z = "[object Int32Array]", q = "[object Uint8Array]", le = "[object Uint8ClampedArray]", ie = "[object Uint16Array]", $ = "[object Uint32Array]", j = {}; j[B] = j[U] = j[K] = j[ee] = j[Z] = j[q] = j[le] = j[ie] = j[$] = !0, j[f] = j[p] = j[k] = j[m] = j[L] = j[_] = j[v] = j[C] = j[x] = j[b] = j[S] = j[M] = j[R] = j[w] = j[V] = !1; function J(ne) { return d(ne) && h(ne.length) && !!j[u(ne)]; } a.exports = J; } ), /***/ "../../../../node_modules/lodash/_baseIteratee.js": ( /*!********************************************************!*\ !*** ../../../../node_modules/lodash/_baseIteratee.js ***! \********************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_baseMatches */ "../../../../node_modules/lodash/_baseMatches.js" ), h = o( /*! ./_baseMatchesProperty */ "../../../../node_modules/lodash/_baseMatchesProperty.js" ), d = o( /*! ./identity */ "../../../../node_modules/lodash/identity.js" ), f = o( /*! ./isArray */ "../../../../node_modules/lodash/isArray.js" ), p = o( /*! ./property */ "../../../../node_modules/lodash/property.js" ); function m(_) { return typeof _ == "function" ? _ : _ == null ? d : typeof _ == "object" ? f(_) ? h(_[0], _[1]) : u(_) : p(_); } a.exports = m; } ), /***/ "../../../../node_modules/lodash/_baseKeys.js": ( /*!****************************************************!*\ !*** ../../../../node_modules/lodash/_baseKeys.js ***! \****************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_isPrototype */ "../../../../node_modules/lodash/_isPrototype.js" ), h = o( /*! ./_nativeKeys */ "../../../../node_modules/lodash/_nativeKeys.js" ), d = Object.prototype, f = d.hasOwnProperty; function p(m) { if (!u(m)) return h(m); var _ = []; for (var v in Object(m)) f.call(m, v) && v != "constructor" && _.push(v); return _; } a.exports = p; } ), /***/ "../../../../node_modules/lodash/_baseKeysIn.js": ( /*!******************************************************!*\ !*** ../../../../node_modules/lodash/_baseKeysIn.js ***! \******************************************************/ /***/ (a, l, o) => { var u = o( /*! ./isObject */ "../../../../node_modules/lodash/isObject.js" ), h = o( /*! ./_isPrototype */ "../../../../node_modules/lodash/_isPrototype.js" ), d = o( /*! ./_nativeKeysIn */ "../../../../node_modules/lodash/_nativeKeysIn.js" ), f = Object.prototype, p = f.hasOwnProperty; function m(_) { if (!u(_)) return d(_); var v = h(_), C = []; for (var x in _) x == "constructor" && (v || !p.call(_, x)) || C.push(x); return C; } a.exports = m; } ), /***/ "../../../../node_modules/lodash/_baseLt.js": ( /*!**************************************************!*\ !*** ../../../../node_modules/lodash/_baseLt.js ***! \**************************************************/ /***/ (a) => { function l(o, u) { return o < u; } a.exports = l; } ), /***/ "../../../../node_modules/lodash/_baseMap.js": ( /*!***************************************************!*\ !*** ../../../../node_modules/lodash/_baseMap.js ***! \***************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_baseEach */ "../../../../node_modules/lodash/_baseEach.js" ), h = o( /*! ./isArrayLike */ "../../../../node_modules/lodash/isArrayLike.js" ); function d(f, p) { var m = -1, _ = h(f) ? Array(f.length) : []; return u(f, function(v, C, x) { _[++m] = p(v, C, x); }), _; } a.exports = d; } ), /***/ "../../../../node_modules/lodash/_baseMatches.js": ( /*!*******************************************************!*\ !*** ../../../../node_modules/lodash/_baseMatches.js ***! \*******************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_baseIsMatch */ "../../../../node_modules/lodash/_baseIsMatch.js" ), h = o( /*! ./_getMatchData */ "../../../../node_modules/lodash/_getMatchData.js" ), d = o( /*! ./_matchesStrictComparable */ "../../../../node_modules/lodash/_matchesStrictComparable.js" ); function f(p) { var m = h(p); return m.length == 1 && m[0][2] ? d(m[0][0], m[0][1]) : function(_) { return _ === p || u(_, p, m); }; } a.exports = f; } ), /***/ "../../../../node_modules/lodash/_baseMatchesProperty.js": ( /*!***************************************************************!*\ !*** ../../../../node_modules/lodash/_baseMatchesProperty.js ***! \***************************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_baseIsEqual */ "../../../../node_modules/lodash/_baseIsEqual.js" ), h = o( /*! ./get */ "../../../../node_modules/lodash/get.js" ), d = o( /*! ./hasIn */ "../../../../node_modules/lodash/hasIn.js" ), f = o( /*! ./_isKey */ "../../../../node_modules/lodash/_isKey.js" ), p = o( /*! ./_isStrictComparable */ "../../../../node_modules/lodash/_isStrictComparable.js" ), m = o( /*! ./_matchesStrictComparable */ "../../../../node_modules/lodash/_matchesStrictComparable.js" ), _ = o( /*! ./_toKey */ "../../../../node_modules/lodash/_toKey.js" ), v = 1, C = 2; function x(b, S) { return f(b) && p(S) ? m(_(b), S) : function(M) { var R = h(M, b); return R === void 0 && R === S ? d(M, b) : u(S, R, v | C); }; } a.exports = x; } ), /***/ "../../../../node_modules/lodash/_baseMerge.js": ( /*!*****************************************************!*\ !*** ../../../../node_modules/lodash/_baseMerge.js ***! \*****************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_Stack */ "../../../../node_modules/lodash/_Stack.js" ), h = o( /*! ./_assignMergeValue */ "../../../../node_modules/lodash/_assignMergeValue.js" ), d = o( /*! ./_baseFor */ "../../../../node_modules/lodash/_baseFor.js" ), f = o( /*! ./_baseMergeDeep */ "../../../../node_modules/lodash/_baseMergeDeep.js" ), p = o( /*! ./isObject */ "../../../../node_modules/lodash/isObject.js" ), m = o( /*! ./keysIn */ "../../../../node_modules/lodash/keysIn.js" ), _ = o( /*! ./_safeGet */ "../../../../node_modules/lodash/_safeGet.js" ); function v(C, x, b, S, M) { C !== x && d(x, function(R, w) { if (M || (M = new u()), p(R)) f(C, x, w, b, v, S, M); else { var V = S ? S(_(C, w), R, w + "", C, x, M) : void 0; V === void 0 && (V = R), h(C, w, V); } }, m); } a.exports = v; } ), /***/ "../../../../node_modules/lodash/_baseMergeDeep.js": ( /*!*********************************************************!*\ !*** ../../../../node_modules/lodash/_baseMergeDeep.js ***! \*********************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_assignMergeValue */ "../../../../node_modules/lodash/_assignMergeValue.js" ), h = o( /*! ./_cloneBuffer */ "../../../../node_modules/lodash/_cloneBuffer.js" ), d = o( /*! ./_cloneTypedArray */ "../../../../node_modules/lodash/_cloneTypedArray.js" ), f = o( /*! ./_copyArray */ "../../../../node_modules/lodash/_copyArray.js" ), p = o( /*! ./_initCloneObject */ "../../../../node_modules/lodash/_initCloneObject.js" ), m = o( /*! ./isArguments */ "../../../../node_modules/lodash/isArguments.js" ), _ = o( /*! ./isArray */ "../../../../node_modules/lodash/isArray.js" ), v = o( /*! ./isArrayLikeObject */ "../../../../node_modules/lodash/isArrayLikeObject.js" ), C = o( /*! ./isBuffer */ "../../../../node_modules/lodash/isBuffer.js" ), x = o( /*! ./isFunction */ "../../../../node_modules/lodash/isFunction.js" ), b = o( /*! ./isObject */ "../../../../node_modules/lodash/isObject.js" ), S = o( /*! ./isPlainObject */ "../../../../node_modules/lodash/isPlainObject.js" ), M = o( /*! ./isTypedArray */ "../../../../node_modules/lodash/isTypedArray.js" ), R = o( /*! ./_safeGet */ "../../../../node_modules/lodash/_safeGet.js" ), w = o( /*! ./toPlainObject */ "../../../../node_modules/lodash/toPlainObject.js" ); function V(k, L, B, U, K, ee, Z) { var q = R(k, B), le = R(L, B), ie = Z.get(le); if (ie) { u(k, B, ie); return; } var $ = ee ? ee(q, le, B + "", k, L, Z) : void 0, j = $ === void 0; if (j) { var J = _(le), ne = !J && C(le), pe = !J && !ne && M(le); $ = le, J || ne || pe ? _(q) ? $ = q : v(q) ? $ = f(q) : ne ? (j = !1, $ = h(le, !0)) : pe ? (j = !1, $ = d(le, !0)) : $ = [] : S(le) || m(le) ? ($ = q, m(q) ? $ = w(q) : (!b(q) || x(q)) && ($ = p(le))) : j = !1; } j && (Z.set(le, $), K($, le, U, ee, Z), Z.delete(le)), u(k, B, $); } a.exports = V; } ), /***/ "../../../../node_modules/lodash/_baseOrderBy.js": ( /*!*******************************************************!*\ !*** ../../../../node_modules/lodash/_baseOrderBy.js ***! \*******************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_arrayMap */ "../../../../node_modules/lodash/_arrayMap.js" ), h = o( /*! ./_baseGet */ "../../../../node_modules/lodash/_baseGet.js" ), d = o( /*! ./_baseIteratee */ "../../../../node_modules/lodash/_baseIteratee.js" ), f = o( /*! ./_baseMap */ "../../../../node_modules/lodash/_baseMap.js" ), p = o( /*! ./_baseSortBy */ "../../../../node_modules/lodash/_baseSortBy.js" ), m = o( /*! ./_baseUnary */ "../../../../node_modules/lodash/_baseUnary.js" ), _ = o( /*! ./_compareMultiple */ "../../../../node_modules/lodash/_compareMultiple.js" ), v = o( /*! ./identity */ "../../../../node_modules/lodash/identity.js" ), C = o( /*! ./isArray */ "../../../../node_modules/lodash/isArray.js" ); function x(b, S, M) { S.length ? S = u(S, function(V) { return C(V) ? function(k) { return h(k, V.length === 1 ? V[0] : V); } : V; }) : S = [v]; var R = -1; S = u(S, m(d)); var w = f(b, function(V, k, L) { var B = u(S, function(U) { return U(V); }); return { criteria: B, index: ++R, value: V }; }); return p(w, function(V, k) { return _(V, k, M); }); } a.exports = x; } ), /***/ "../../../../node_modules/lodash/_basePick.js": ( /*!****************************************************!*\ !*** ../../../../node_modules/lodash/_basePick.js ***! \****************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_basePickBy */ "../../../../node_modules/lodash/_basePickBy.js" ), h = o( /*! ./hasIn */ "../../../../node_modules/lodash/hasIn.js" ); function d(f, p) { return u(f, p, function(m, _) { return h(f, _); }); } a.exports = d; } ), /***/ "../../../../node_modules/lodash/_basePickBy.js": ( /*!******************************************************!*\ !*** ../../../../node_modules/lodash/_basePickBy.js ***! \******************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_baseGet */ "../../../../node_modules/lodash/_baseGet.js" ), h = o( /*! ./_baseSet */ "../../../../node_modules/lodash/_baseSet.js" ), d = o( /*! ./_castPath */ "../../../../node_modules/lodash/_castPath.js" ); function f(p, m, _) { for (var v = -1, C = m.length, x = {}; ++v < C; ) { var b = m[v], S = u(p, b); _(S, b) && h(x, d(b, p), S); } return x; } a.exports = f; } ), /***/ "../../../../node_modules/lodash/_baseProperty.js": ( /*!********************************************************!*\ !*** ../../../../node_modules/lodash/_baseProperty.js ***! \********************************************************/ /***/ (a) => { function l(o) { return function(u) { return u == null ? void 0 : u[o]; }; } a.exports = l; } ), /***/ "../../../../node_modules/lodash/_basePropertyDeep.js": ( /*!************************************************************!*\ !*** ../../../../node_modules/lodash/_basePropertyDeep.js ***! \************************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_baseGet */ "../../../../node_modules/lodash/_baseGet.js" ); function h(d) { return function(f) { return u(f, d); }; } a.exports = h; } ), /***/ "../../../../node_modules/lodash/_baseRange.js": ( /*!*****************************************************!*\ !*** ../../../../node_modules/lodash/_baseRange.js ***! \*****************************************************/ /***/ (a) => { var l = Math.ceil, o = Math.max; function u(h, d, f, p) { for (var m = -1, _ = o(l((d - h) / (f || 1)), 0), v = Array(_); _--; ) v[p ? _ : ++m] = h, h += f; return v; } a.exports = u; } ), /***/ "../../../../node_modules/lodash/_baseReduce.js": ( /*!******************************************************!*\ !*** ../../../../node_modules/lodash/_baseReduce.js ***! \******************************************************/ /***/ (a) => { function l(o, u, h, d, f) { return f(o, function(p, m, _) { h = d ? (d = !1, p) : u(h, p, m, _); }), h; } a.exports = l; } ), /***/ "../../../../node_modules/lodash/_baseRest.js": ( /*!****************************************************!*\ !*** ../../../../node_modules/lodash/_baseRest.js ***! \****************************************************/ /***/ (a, l, o) => { var u = o( /*! ./identity */ "../../../../node_modules/lodash/identity.js" ), h = o( /*! ./_overRest */ "../../../../node_modules/lodash/_overRest.js" ), d = o( /*! ./_setToString */ "../../../../node_modules/lodash/_setToString.js" ); function f(p, m) { return d(h(p, m, u), p + ""); } a.exports = f; } ), /***/ "../../../../node_modules/lodash/_baseSet.js": ( /*!***************************************************!*\ !*** ../../../../node_modules/lodash/_baseSet.js ***! \***************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_assignValue */ "../../../../node_modules/lodash/_assignValue.js" ), h = o( /*! ./_castPath */ "../../../../node_modules/lodash/_castPath.js" ), d = o( /*! ./_isIndex */ "../../../../node_modules/lodash/_isIndex.js" ), f = o( /*! ./isObject */ "../../../../node_modules/lodash/isObject.js" ), p = o( /*! ./_toKey */ "../../../../node_modules/lodash/_toKey.js" ); function m(_, v, C, x) { if (!f(_)) return _; v = h(v, _); for (var b = -1, S = v.length, M = S - 1, R = _; R != null && ++b < S; ) { var w = p(v[b]), V = C; if (w === "__proto__" || w === "constructor" || w === "prototype") return _; if (b != M) { var k = R[w]; V = x ? x(k, w, R) : void 0, V === void 0 && (V = f(k) ? k : d(v[b + 1]) ? [] : {}); } u(R, w, V), R = R[w]; } return _; } a.exports = m; } ), /***/ "../../../../node_modules/lodash/_baseSetToString.js": ( /*!***********************************************************!*\ !*** ../../../../node_modules/lodash/_baseSetToString.js ***! \***********************************************************/ /***/ (a, l, o) => { var u = o( /*! ./constant */ "../../../../node_modules/lodash/constant.js" ), h = o( /*! ./_defineProperty */ "../../../../node_modules/lodash/_defineProperty.js" ), d = o( /*! ./identity */ "../../../../node_modules/lodash/identity.js" ), f = h ? function(p, m) { return h(p, "toString", { configurable: !0, enumerable: !1, value: u(m), writable: !0 }); } : d; a.exports = f; } ), /***/ "../../../../node_modules/lodash/_baseSortBy.js": ( /*!******************************************************!*\ !*** ../../../../node_modules/lodash/_baseSortBy.js ***! \******************************************************/ /***/ (a) => { function l(o, u) { var h = o.length; for (o.sort(u); h--; ) o[h] = o[h].value; return o; } a.exports = l; } ), /***/ "../../../../node_modules/lodash/_baseTimes.js": ( /*!*****************************************************!*\ !*** ../../../../node_modules/lodash/_baseTimes.js ***! \*****************************************************/ /***/ (a) => { function l(o, u) { for (var h = -1, d = Array(o); ++h < o; ) d[h] = u(h); return d; } a.exports = l; } ), /***/ "../../../../node_modules/lodash/_baseToString.js": ( /*!********************************************************!*\ !*** ../../../../node_modules/lodash/_baseToString.js ***! \********************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_Symbol */ "../../../../node_modules/lodash/_Symbol.js" ), h = o( /*! ./_arrayMap */ "../../../../node_modules/lodash/_arrayMap.js" ), d = o( /*! ./isArray */ "../../../../node_modules/lodash/isArray.js" ), f = o( /*! ./isSymbol */ "../../../../node_modules/lodash/isSymbol.js" ), p = 1 / 0, m = u ? u.prototype : void 0, _ = m ? m.toString : void 0; function v(C) { if (typeof C == "string") return C; if (d(C)) return h(C, v) + ""; if (f(C)) return _ ? _.call(C) : ""; var x = C + ""; return x == "0" && 1 / C == -p ? "-0" : x; } a.exports = v; } ), /***/ "../../../../node_modules/lodash/_baseTrim.js": ( /*!****************************************************!*\ !*** ../../../../node_modules/lodash/_baseTrim.js ***! \****************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_trimmedEndIndex */ "../../../../node_modules/lodash/_trimmedEndIndex.js" ), h = /^\s+/; function d(f) { return f && f.slice(0, u(f) + 1).replace(h, ""); } a.exports = d; } ), /***/ "../../../../node_modules/lodash/_baseUnary.js": ( /*!*****************************************************!*\ !*** ../../../../node_modules/lodash/_baseUnary.js ***! \*****************************************************/ /***/ (a) => { function l(o) { return function(u) { return o(u); }; } a.exports = l; } ), /***/ "../../../../node_modules/lodash/_baseUniq.js": ( /*!****************************************************!*\ !*** ../../../../node_modules/lodash/_baseUniq.js ***! \****************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_SetCache */ "../../../../node_modules/lodash/_SetCache.js" ), h = o( /*! ./_arrayIncludes */ "../../../../node_modules/lodash/_arrayIncludes.js" ), d = o( /*! ./_arrayIncludesWith */ "../../../../node_modules/lodash/_arrayIncludesWith.js" ), f = o( /*! ./_cacheHas */ "../../../../node_modules/lodash/_cacheHas.js" ), p = o( /*! ./_createSet */ "../../../../node_modules/lodash/_createSet.js" ), m = o( /*! ./_setToArray */ "../../../../node_modules/lodash/_setToArray.js" ), _ = 200; function v(C, x, b) { var S = -1, M = h, R = C.length, w = !0, V = [], k = V; if (b) w = !1, M = d; else if (R >= _) { var L = x ? null : p(C); if (L) return m(L); w = !1, M = f, k = new u(); } else k = x ? [] : V; e: for (; ++S < R; ) { var B = C[S], U = x ? x(B) : B; if (B = b || B !== 0 ? B : 0, w && U === U) { for (var K = k.length; K--; ) if (k[K] === U) continue e; x && k.push(U), V.push(B); } else M(k, U, b) || (k !== V && k.push(U), V.push(B)); } return V; } a.exports = v; } ), /***/ "../../../../node_modules/lodash/_baseValues.js": ( /*!******************************************************!*\ !*** ../../../../node_modules/lodash/_baseValues.js ***! \******************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_arrayMap */ "../../../../node_modules/lodash/_arrayMap.js" ); function h(d, f) { return u(f, function(p) { return d[p]; }); } a.exports = h; } ), /***/ "../../../../node_modules/lodash/_baseZipObject.js": ( /*!*********************************************************!*\ !*** ../../../../node_modules/lodash/_baseZipObject.js ***! \*********************************************************/ /***/ (a) => { function l(o, u, h) { for (var d = -1, f = o.length, p = u.length, m = {}; ++d < f; ) { var _ = d < p ? u[d] : void 0; h(m, o[d], _); } return m; } a.exports = l; } ), /***/ "../../../../node_modules/lodash/_cacheHas.js": ( /*!****************************************************!*\ !*** ../../../../node_modules/lodash/_cacheHas.js ***! \****************************************************/ /***/ (a) => { function l(o, u) { return o.has(u); } a.exports = l; } ), /***/ "../../../../node_modules/lodash/_castFunction.js": ( /*!********************************************************!*\ !*** ../../../../node_modules/lodash/_castFunction.js ***! \********************************************************/ /***/ (a, l, o) => { var u = o( /*! ./identity */ "../../../../node_modules/lodash/identity.js" ); function h(d) { return typeof d == "function" ? d : u; } a.exports = h; } ), /***/ "../../../../node_modules/lodash/_castPath.js": ( /*!****************************************************!*\ !*** ../../../../node_modules/lodash/_castPath.js ***! \****************************************************/ /***/ (a, l, o) => { var u = o( /*! ./isArray */ "../../../../node_modules/lodash/isArray.js" ), h = o( /*! ./_isKey */ "../../../../node_modules/lodash/_isKey.js" ), d = o( /*! ./_stringToPath */ "../../../../node_modules/lodash/_stringToPath.js" ), f = o( /*! ./toString */ "../../../../node_modules/lodash/toString.js" ); function p(m, _) { return u(m) ? m : h(m, _) ? [m] : d(f(m)); } a.exports = p; } ), /***/ "../../../../node_modules/lodash/_cloneArrayBuffer.js": ( /*!************************************************************!*\ !*** ../../../../node_modules/lodash/_cloneArrayBuffer.js ***! \************************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_Uint8Array */ "../../../../node_modules/lodash/_Uint8Array.js" ); function h(d) { var f = new d.constructor(d.byteLength); return new u(f).set(new u(d)), f; } a.exports = h; } ), /***/ "../../../../node_modules/lodash/_cloneBuffer.js": ( /*!*******************************************************!*\ !*** ../../../../node_modules/lodash/_cloneBuffer.js ***! \*******************************************************/ /***/ (a, l, o) => { a = o.nmd(a); var u = o( /*! ./_root */ "../../../../node_modules/lodash/_root.js" ), h = l && !l.nodeType && l, d = h && !0 && a && !a.nodeType && a, f = d && d.exports === h, p = f ? u.Buffer : void 0, m = p ? p.allocUnsafe : void 0; function _(v, C) { if (C) return v.slice(); var x = v.length, b = m ? m(x) : new v.constructor(x); return v.copy(b), b; } a.exports = _; } ), /***/ "../../../../node_modules/lodash/_cloneDataView.js": ( /*!*********************************************************!*\ !*** ../../../../node_modules/lodash/_cloneDataView.js ***! \*********************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_cloneArrayBuffer */ "../../../../node_modules/lodash/_cloneArrayBuffer.js" ); function h(d, f) { var p = f ? u(d.buffer) : d.buffer; return new d.constructor(p, d.byteOffset, d.byteLength); } a.exports = h; } ), /***/ "../../../../node_modules/lodash/_cloneRegExp.js": ( /*!*******************************************************!*\ !*** ../../../../node_modules/lodash/_cloneRegExp.js ***! \*******************************************************/ /***/ (a) => { var l = /\w*$/; function o(u) { var h = new u.constructor(u.source, l.exec(u)); return h.lastIndex = u.lastIndex, h; } a.exports = o; } ), /***/ "../../../../node_modules/lodash/_cloneSymbol.js": ( /*!*******************************************************!*\ !*** ../../../../node_modules/lodash/_cloneSymbol.js ***! \*******************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_Symbol */ "../../../../node_modules/lodash/_Symbol.js" ), h = u ? u.prototype : void 0, d = h ? h.valueOf : void 0; function f(p) { return d ? Object(d.call(p)) : {}; } a.exports = f; } ), /***/ "../../../../node_modules/lodash/_cloneTypedArray.js": ( /*!***********************************************************!*\ !*** ../../../../node_modules/lodash/_cloneTypedArray.js ***! \***********************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_cloneArrayBuffer */ "../../../../node_modules/lodash/_cloneArrayBuffer.js" ); function h(d, f) { var p = f ? u(d.buffer) : d.buffer; return new d.constructor(p, d.byteOffset, d.length); } a.exports = h; } ), /***/ "../../../../node_modules/lodash/_compareAscending.js": ( /*!************************************************************!*\ !*** ../../../../node_modules/lodash/_compareAscending.js ***! \************************************************************/ /***/ (a, l, o) => { var u = o( /*! ./isSymbol */ "../../../../node_modules/lodash/isSymbol.js" ); function h(d, f) { if (d !== f) { var p = d !== void 0, m = d === null, _ = d === d, v = u(d), C = f !== void 0, x = f === null, b = f === f, S = u(f); if (!x && !S && !v && d > f || v && C && b && !x && !S || m && C && b || !p && b || !_) return 1; if (!m && !v && !S && d < f || S && p && _ && !m && !v || x && p && _ || !C && _ || !b) return -1; } return 0; } a.exports = h; } ), /***/ "../../../../node_modules/lodash/_compareMultiple.js": ( /*!***********************************************************!*\ !*** ../../../../node_modules/lodash/_compareMultiple.js ***! \***********************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_compareAscending */ "../../../../node_modules/lodash/_compareAscending.js" ); function h(d, f, p) { for (var m = -1, _ = d.criteria, v = f.criteria, C = _.length, x = p.length; ++m < C; ) { var b = u(_[m], v[m]); if (b) { if (m >= x) return b; var S = p[m]; return b * (S == "desc" ? -1 : 1); } } return d.index - f.index; } a.exports = h; } ), /***/ "../../../../node_modules/lodash/_copyArray.js": ( /*!*****************************************************!*\ !*** ../../../../node_modules/lodash/_copyArray.js ***! \*****************************************************/ /***/ (a) => { function l(o, u) { var h = -1, d = o.length; for (u || (u = Array(d)); ++h < d; ) u[h] = o[h]; return u; } a.exports = l; } ), /***/ "../../../../node_modules/lodash/_copyObject.js": ( /*!******************************************************!*\ !*** ../../../../node_modules/lodash/_copyObject.js ***! \******************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_assignValue */ "../../../../node_modules/lodash/_assignValue.js" ), h = o( /*! ./_baseAssignValue */ "../../../../node_modules/lodash/_baseAssignValue.js" ); function d(f, p, m, _) { var v = !m; m || (m = {}); for (var C = -1, x = p.length; ++C < x; ) { var b = p[C], S = _ ? _(m[b], f[b], b, m, f) : void 0; S === void 0 && (S = f[b]), v ? h(m, b, S) : u(m, b, S); } return m; } a.exports = d; } ), /***/ "../../../../node_modules/lodash/_copySymbols.js": ( /*!*******************************************************!*\ !*** ../../../../node_modules/lodash/_copySymbols.js ***! \*******************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_copyObject */ "../../../../node_modules/lodash/_copyObject.js" ), h = o( /*! ./_getSymbols */ "../../../../node_modules/lodash/_getSymbols.js" ); function d(f, p) { return u(f, h(f), p); } a.exports = d; } ), /***/ "../../../../node_modules/lodash/_copySymbolsIn.js": ( /*!*********************************************************!*\ !*** ../../../../node_modules/lodash/_copySymbolsIn.js ***! \*********************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_copyObject */ "../../../../node_modules/lodash/_copyObject.js" ), h = o( /*! ./_getSymbolsIn */ "../../../../node_modules/lodash/_getSymbolsIn.js" ); function d(f, p) { return u(f, h(f), p); } a.exports = d; } ), /***/ "../../../../node_modules/lodash/_coreJsData.js": ( /*!******************************************************!*\ !*** ../../../../node_modules/lodash/_coreJsData.js ***! \******************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_root */ "../../../../node_modules/lodash/_root.js" ), h = u["__core-js_shared__"]; a.exports = h; } ), /***/ "../../../../node_modules/lodash/_createAssigner.js": ( /*!**********************************************************!*\ !*** ../../../../node_modules/lodash/_createAssigner.js ***! \**********************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_baseRest */ "../../../../node_modules/lodash/_baseRest.js" ), h = o( /*! ./_isIterateeCall */ "../../../../node_modules/lodash/_isIterateeCall.js" ); function d(f) { return u(function(p, m) { var _ = -1, v = m.length, C = v > 1 ? m[v - 1] : void 0, x = v > 2 ? m[2] : void 0; for (C = f.length > 3 && typeof C == "function" ? (v--, C) : void 0, x && h(m[0], m[1], x) && (C = v < 3 ? void 0 : C, v = 1), p = Object(p); ++_ < v; ) { var b = m[_]; b && f(p, b, _, C); } return p; }); } a.exports = d; } ), /***/ "../../../../node_modules/lodash/_createBaseEach.js": ( /*!**********************************************************!*\ !*** ../../../../node_modules/lodash/_createBaseEach.js ***! \**********************************************************/ /***/ (a, l, o) => { var u = o( /*! ./isArrayLike */ "../../../../node_modules/lodash/isArrayLike.js" ); function h(d, f) { return function(p, m) { if (p == null) return p; if (!u(p)) return d(p, m); for (var _ = p.length, v = f ? _ : -1, C = Object(p); (f ? v-- : ++v < _) && m(C[v], v, C) !== !1; ) ; return p; }; } a.exports = h; } ), /***/ "../../../../node_modules/lodash/_createBaseFor.js": ( /*!*********************************************************!*\ !*** ../../../../node_modules/lodash/_createBaseFor.js ***! \*********************************************************/ /***/ (a) => { function l(o) { return function(u, h, d) { for (var f = -1, p = Object(u), m = d(u), _ = m.length; _--; ) { var v = m[o ? _ : ++f]; if (h(p[v], v, p) === !1) break; } return u; }; } a.exports = l; } ), /***/ "../../../../node_modules/lodash/_createFind.js": ( /*!******************************************************!*\ !*** ../../../../node_modules/lodash/_createFind.js ***! \******************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_baseIteratee */ "../../../../node_modules/lodash/_baseIteratee.js" ), h = o( /*! ./isArrayLike */ "../../../../node_modules/lodash/isArrayLike.js" ), d = o( /*! ./keys */ "../../../../node_modules/lodash/keys.js" ); function f(p) { return function(m, _, v) { var C = Object(m); if (!h(m)) { var x = u(_, 3); m = d(m), _ = function(S) { return x(C[S], S, C); }; } var b = p(m, _, v); return b > -1 ? C[x ? m[b] : b] : void 0; }; } a.exports = f; } ), /***/ "../../../../node_modules/lodash/_createRange.js": ( /*!*******************************************************!*\ !*** ../../../../node_modules/lodash/_createRange.js ***! \*******************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_baseRange */ "../../../../node_modules/lodash/_baseRange.js" ), h = o( /*! ./_isIterateeCall */ "../../../../node_modules/lodash/_isIterateeCall.js" ), d = o( /*! ./toFinite */ "../../../../node_modules/lodash/toFinite.js" ); function f(p) { return function(m, _, v) { return v && typeof v != "number" && h(m, _, v) && (_ = v = void 0), m = d(m), _ === void 0 ? (_ = m, m = 0) : _ = d(_), v = v === void 0 ? m < _ ? 1 : -1 : d(v), u(m, _, v, p); }; } a.exports = f; } ), /***/ "../../../../node_modules/lodash/_createSet.js": ( /*!*****************************************************!*\ !*** ../../../../node_modules/lodash/_createSet.js ***! \*****************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_Set */ "../../../../node_modules/lodash/_Set.js" ), h = o( /*! ./noop */ "../../../../node_modules/lodash/noop.js" ), d = o( /*! ./_setToArray */ "../../../../node_modules/lodash/_setToArray.js" ), f = 1 / 0, p = u && 1 / d(new u([, -0]))[1] == f ? function(m) { return new u(m); } : h; a.exports = p; } ), /***/ "../../../../node_modules/lodash/_defineProperty.js": ( /*!**********************************************************!*\ !*** ../../../../node_modules/lodash/_defineProperty.js ***! \**********************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_getNative */ "../../../../node_modules/lodash/_getNative.js" ), h = function() { try { var d = u(Object, "defineProperty"); return d({}, "", {}), d; } catch { } }(); a.exports = h; } ), /***/ "../../../../node_modules/lodash/_equalArrays.js": ( /*!*******************************************************!*\ !*** ../../../../node_modules/lodash/_equalArrays.js ***! \*******************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_SetCache */ "../../../../node_modules/lodash/_SetCache.js" ), h = o( /*! ./_arraySome */ "../../../../node_modules/lodash/_arraySome.js" ), d = o( /*! ./_cacheHas */ "../../../../node_modules/lodash/_cacheHas.js" ), f = 1, p = 2; function m(_, v, C, x, b, S) { var M = C & f, R = _.length, w = v.length; if (R != w && !(M && w > R)) return !1; var V = S.get(_), k = S.get(v); if (V && k) return V == v && k == _; var L = -1, B = !0, U = C & p ? new u() : void 0; for (S.set(_, v), S.set(v, _); ++L < R; ) { var K = _[L], ee = v[L]; if (x) var Z = M ? x(ee, K, L, v, _, S) : x(K, ee, L, _, v, S); if (Z !== void 0) { if (Z) continue; B = !1; break; } if (U) { if (!h(v, function(q, le) { if (!d(U, le) && (K === q || b(K, q, C, x, S))) return U.push(le); })) { B = !1; break; } } else if (!(K === ee || b(K, ee, C, x, S))) { B = !1; break; } } return S.delete(_), S.delete(v), B; } a.exports = m; } ), /***/ "../../../../node_modules/lodash/_equalByTag.js": ( /*!******************************************************!*\ !*** ../../../../node_modules/lodash/_equalByTag.js ***! \******************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_Symbol */ "../../../../node_modules/lodash/_Symbol.js" ), h = o( /*! ./_Uint8Array */ "../../../../node_modules/lodash/_Uint8Array.js" ), d = o( /*! ./eq */ "../../../../node_modules/lodash/eq.js" ), f = o( /*! ./_equalArrays */ "../../../../node_modules/lodash/_equalArrays.js" ), p = o( /*! ./_mapToArray */ "../../../../node_modules/lodash/_mapToArray.js" ), m = o( /*! ./_setToArray */ "../../../../node_modules/lodash/_setToArray.js" ), _ = 1, v = 2, C = "[object Boolean]", x = "[object Date]", b = "[object Error]", S = "[object Map]", M = "[object Number]", R = "[object RegExp]", w = "[object Set]", V = "[object String]", k = "[object Symbol]", L = "[object ArrayBuffer]", B = "[object DataView]", U = u ? u.prototype : void 0, K = U ? U.valueOf : void 0; function ee(Z, q, le, ie, $, j, J) { switch (le) { case B: if (Z.byteLength != q.byteLength || Z.byteOffset != q.byteOffset) return !1; Z = Z.buffer, q = q.buffer; case L: return !(Z.byteLength != q.byteLength || !j(new h(Z), new h(q))); case C: case x: case M: return d(+Z, +q); case b: return Z.name == q.name && Z.message == q.message; case R: case V: return Z == q + ""; case S: var ne = p; case w: var pe = ie & _; if (ne || (ne = m), Z.size != q.size && !pe) return !1; var ge = J.get(Z); if (ge) return ge == q; ie |= v, J.set(Z, q); var Ie = f(ne(Z), ne(q), ie, $, j, J); return J.delete(Z), Ie; case k: if (K) return K.call(Z) == K.call(q); } return !1; } a.exports = ee; } ), /***/ "../../../../node_modules/lodash/_equalObjects.js": ( /*!********************************************************!*\ !*** ../../../../node_modules/lodash/_equalObjects.js ***! \********************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_getAllKeys */ "../../../../node_modules/lodash/_getAllKeys.js" ), h = 1, d = Object.prototype, f = d.hasOwnProperty; function p(m, _, v, C, x, b) { var S = v & h, M = u(m), R = M.length, w = u(_), V = w.length; if (R != V && !S) return !1; for (var k = R; k--; ) { var L = M[k]; if (!(S ? L in _ : f.call(_, L))) return !1; } var B = b.get(m), U = b.get(_); if (B && U) return B == _ && U == m; var K = !0; b.set(m, _), b.set(_, m); for (var ee = S; ++k < R; ) { L = M[k]; var Z = m[L], q = _[L]; if (C) var le = S ? C(q, Z, L, _, m, b) : C(Z, q, L, m, _, b); if (!(le === void 0 ? Z === q || x(Z, q, v, C, b) : le)) { K = !1; break; } ee || (ee = L == "constructor"); } if (K && !ee) { var ie = m.constructor, $ = _.constructor; ie != $ && "constructor" in m && "constructor" in _ && !(typeof ie == "function" && ie instanceof ie && typeof $ == "function" && $ instanceof $) && (K = !1); } return b.delete(m), b.delete(_), K; } a.exports = p; } ), /***/ "../../../../node_modules/lodash/_flatRest.js": ( /*!****************************************************!*\ !*** ../../../../node_modules/lodash/_flatRest.js ***! \****************************************************/ /***/ (a, l, o) => { var u = o( /*! ./flatten */ "../../../../node_modules/lodash/flatten.js" ), h = o( /*! ./_overRest */ "../../../../node_modules/lodash/_overRest.js" ), d = o( /*! ./_setToString */ "../../../../node_modules/lodash/_setToString.js" ); function f(p) { return d(h(p, void 0, u), p + ""); } a.exports = f; } ), /***/ "../../../../node_modules/lodash/_freeGlobal.js": ( /*!******************************************************!*\ !*** ../../../../node_modules/lodash/_freeGlobal.js ***! \******************************************************/ /***/ (a, l, o) => { var u = typeof o.g == "object" && o.g && o.g.Object === Object && o.g; a.exports = u; } ), /***/ "../../../../node_modules/lodash/_getAllKeys.js": ( /*!******************************************************!*\ !*** ../../../../node_modules/lodash/_getAllKeys.js ***! \******************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_baseGetAllKeys */ "../../../../node_modules/lodash/_baseGetAllKeys.js" ), h = o( /*! ./_getSymbols */ "../../../../node_modules/lodash/_getSymbols.js" ), d = o( /*! ./keys */ "../../../../node_modules/lodash/keys.js" ); function f(p) { return u(p, d, h); } a.exports = f; } ), /***/ "../../../../node_modules/lodash/_getAllKeysIn.js": ( /*!********************************************************!*\ !*** ../../../../node_modules/lodash/_getAllKeysIn.js ***! \********************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_baseGetAllKeys */ "../../../../node_modules/lodash/_baseGetAllKeys.js" ), h = o( /*! ./_getSymbolsIn */ "../../../../node_modules/lodash/_getSymbolsIn.js" ), d = o( /*! ./keysIn */ "../../../../node_modules/lodash/keysIn.js" ); function f(p) { return u(p, d, h); } a.exports = f; } ), /***/ "../../../../node_modules/lodash/_getMapData.js": ( /*!******************************************************!*\ !*** ../../../../node_modules/lodash/_getMapData.js ***! \******************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_isKeyable */ "../../../../node_modules/lodash/_isKeyable.js" ); function h(d, f) { var p = d.__data__; return u(f) ? p[typeof f == "string" ? "string" : "hash"] : p.map; } a.exports = h; } ), /***/ "../../../../node_modules/lodash/_getMatchData.js": ( /*!********************************************************!*\ !*** ../../../../node_modules/lodash/_getMatchData.js ***! \********************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_isStrictComparable */ "../../../../node_modules/lodash/_isStrictComparable.js" ), h = o( /*! ./keys */ "../../../../node_modules/lodash/keys.js" ); function d(f) { for (var p = h(f), m = p.length; m--; ) { var _ = p[m], v = f[_]; p[m] = [_, v, u(v)]; } return p; } a.exports = d; } ), /***/ "../../../../node_modules/lodash/_getNative.js": ( /*!*****************************************************!*\ !*** ../../../../node_modules/lodash/_getNative.js ***! \*****************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_baseIsNative */ "../../../../node_modules/lodash/_baseIsNative.js" ), h = o( /*! ./_getValue */ "../../../../node_modules/lodash/_getValue.js" ); function d(f, p) { var m = h(f, p); return u(m) ? m : void 0; } a.exports = d; } ), /***/ "../../../../node_modules/lodash/_getPrototype.js": ( /*!********************************************************!*\ !*** ../../../../node_modules/lodash/_getPrototype.js ***! \********************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_overArg */ "../../../../node_modules/lodash/_overArg.js" ), h = u(Object.getPrototypeOf, Object); a.exports = h; } ), /***/ "../../../../node_modules/lodash/_getRawTag.js": ( /*!*****************************************************!*\ !*** ../../../../node_modules/lodash/_getRawTag.js ***! \*****************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_Symbol */ "../../../../node_modules/lodash/_Symbol.js" ), h = Object.prototype, d = h.hasOwnProperty, f = h.toString, p = u ? u.toStringTag : void 0; function m(_) { var v = d.call(_, p), C = _[p]; try { _[p] = void 0; var x = !0; } catch { } var b = f.call(_); return x && (v ? _[p] = C : delete _[p]), b; } a.exports = m; } ), /***/ "../../../../node_modules/lodash/_getSymbols.js": ( /*!******************************************************!*\ !*** ../../../../node_modules/lodash/_getSymbols.js ***! \******************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_arrayFilter */ "../../../../node_modules/lodash/_arrayFilter.js" ), h = o( /*! ./stubArray */ "../../../../node_modules/lodash/stubArray.js" ), d = Object.prototype, f = d.propertyIsEnumerable, p = Object.getOwnPropertySymbols, m = p ? function(_) { return _ == null ? [] : (_ = Object(_), u(p(_), function(v) { return f.call(_, v); })); } : h; a.exports = m; } ), /***/ "../../../../node_modules/lodash/_getSymbolsIn.js": ( /*!********************************************************!*\ !*** ../../../../node_modules/lodash/_getSymbolsIn.js ***! \********************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_arrayPush */ "../../../../node_modules/lodash/_arrayPush.js" ), h = o( /*! ./_getPrototype */ "../../../../node_modules/lodash/_getPrototype.js" ), d = o( /*! ./_getSymbols */ "../../../../node_modules/lodash/_getSymbols.js" ), f = o( /*! ./stubArray */ "../../../../node_modules/lodash/stubArray.js" ), p = Object.getOwnPropertySymbols, m = p ? function(_) { for (var v = []; _; ) u(v, d(_)), _ = h(_); return v; } : f; a.exports = m; } ), /***/ "../../../../node_modules/lodash/_getTag.js": ( /*!**************************************************!*\ !*** ../../../../node_modules/lodash/_getTag.js ***! \**************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_DataView */ "../../../../node_modules/lodash/_DataView.js" ), h = o( /*! ./_Map */ "../../../../node_modules/lodash/_Map.js" ), d = o( /*! ./_Promise */ "../../../../node_modules/lodash/_Promise.js" ), f = o( /*! ./_Set */ "../../../../node_modules/lodash/_Set.js" ), p = o( /*! ./_WeakMap */ "../../../../node_modules/lodash/_WeakMap.js" ), m = o( /*! ./_baseGetTag */ "../../../../node_modules/lodash/_baseGetTag.js" ), _ = o( /*! ./_toSource */ "../../../../node_modules/lodash/_toSource.js" ), v = "[object Map]", C = "[object Object]", x = "[object Promise]", b = "[object Set]", S = "[object WeakMap]", M = "[object DataView]", R = _(u), w = _(h), V = _(d), k = _(f), L = _(p), B = m; (u && B(new u(new ArrayBuffer(1))) != M || h && B(new h()) != v || d && B(d.resolve()) != x || f && B(new f()) != b || p && B(new p()) != S) && (B = function(U) { var K = m(U), ee = K == C ? U.constructor : void 0, Z = ee ? _(ee) : ""; if (Z) switch (Z) { case R: return M; case w: return v; case V: return x; case k: return b; case L: return S; } return K; }), a.exports = B; } ), /***/ "../../../../node_modules/lodash/_getValue.js": ( /*!****************************************************!*\ !*** ../../../../node_modules/lodash/_getValue.js ***! \****************************************************/ /***/ (a) => { function l(o, u) { return o == null ? void 0 : o[u]; } a.exports = l; } ), /***/ "../../../../node_modules/lodash/_hasPath.js": ( /*!***************************************************!*\ !*** ../../../../node_modules/lodash/_hasPath.js ***! \***************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_castPath */ "../../../../node_modules/lodash/_castPath.js" ), h = o( /*! ./isArguments */ "../../../../node_modules/lodash/isArguments.js" ), d = o( /*! ./isArray */ "../../../../node_modules/lodash/isArray.js" ), f = o( /*! ./_isIndex */ "../../../../node_modules/lodash/_isIndex.js" ), p = o( /*! ./isLength */ "../../../../node_modules/lodash/isLength.js" ), m = o( /*! ./_toKey */ "../../../../node_modules/lodash/_toKey.js" ); function _(v, C, x) { C = u(C, v); for (var b = -1, S = C.length, M = !1; ++b < S; ) { var R = m(C[b]); if (!(M = v != null && x(v, R))) break; v = v[R]; } return M || ++b != S ? M : (S = v == null ? 0 : v.length, !!S && p(S) && f(R, S) && (d(v) || h(v))); } a.exports = _; } ), /***/ "../../../../node_modules/lodash/_hasUnicode.js": ( /*!******************************************************!*\ !*** ../../../../node_modules/lodash/_hasUnicode.js ***! \******************************************************/ /***/ (a) => { var l = "\\ud800-\\udfff", o = "\\u0300-\\u036f", u = "\\ufe20-\\ufe2f", h = "\\u20d0-\\u20ff", d = o + u + h, f = "\\ufe0e\\ufe0f", p = "\\u200d", m = RegExp("[" + p + l + d + f + "]"); function _(v) { return m.test(v); } a.exports = _; } ), /***/ "../../../../node_modules/lodash/_hashClear.js": ( /*!*****************************************************!*\ !*** ../../../../node_modules/lodash/_hashClear.js ***! \*****************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_nativeCreate */ "../../../../node_modules/lodash/_nativeCreate.js" ); function h() { this.__data__ = u ? u(null) : {}, this.size = 0; } a.exports = h; } ), /***/ "../../../../node_modules/lodash/_hashDelete.js": ( /*!******************************************************!*\ !*** ../../../../node_modules/lodash/_hashDelete.js ***! \******************************************************/ /***/ (a) => { function l(o) { var u = this.has(o) && delete this.__data__[o]; return this.size -= u ? 1 : 0, u; } a.exports = l; } ), /***/ "../../../../node_modules/lodash/_hashGet.js": ( /*!***************************************************!*\ !*** ../../../../node_modules/lodash/_hashGet.js ***! \***************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_nativeCreate */ "../../../../node_modules/lodash/_nativeCreate.js" ), h = "__lodash_hash_undefined__", d = Object.prototype, f = d.hasOwnProperty; function p(m) { var _ = this.__data__; if (u) { var v = _[m]; return v === h ? void 0 : v; } return f.call(_, m) ? _[m] : void 0; } a.exports = p; } ), /***/ "../../../../node_modules/lodash/_hashHas.js": ( /*!***************************************************!*\ !*** ../../../../node_modules/lodash/_hashHas.js ***! \***************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_nativeCreate */ "../../../../node_modules/lodash/_nativeCreate.js" ), h = Object.prototype, d = h.hasOwnProperty; function f(p) { var m = this.__data__; return u ? m[p] !== void 0 : d.call(m, p); } a.exports = f; } ), /***/ "../../../../node_modules/lodash/_hashSet.js": ( /*!***************************************************!*\ !*** ../../../../node_modules/lodash/_hashSet.js ***! \***************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_nativeCreate */ "../../../../node_modules/lodash/_nativeCreate.js" ), h = "__lodash_hash_undefined__"; function d(f, p) { var m = this.__data__; return this.size += this.has(f) ? 0 : 1, m[f] = u && p === void 0 ? h : p, this; } a.exports = d; } ), /***/ "../../../../node_modules/lodash/_initCloneArray.js": ( /*!**********************************************************!*\ !*** ../../../../node_modules/lodash/_initCloneArray.js ***! \**********************************************************/ /***/ (a) => { var l = Object.prototype, o = l.hasOwnProperty; function u(h) { var d = h.length, f = new h.constructor(d); return d && typeof h[0] == "string" && o.call(h, "index") && (f.index = h.index, f.input = h.input), f; } a.exports = u; } ), /***/ "../../../../node_modules/lodash/_initCloneByTag.js": ( /*!**********************************************************!*\ !*** ../../../../node_modules/lodash/_initCloneByTag.js ***! \**********************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_cloneArrayBuffer */ "../../../../node_modules/lodash/_cloneArrayBuffer.js" ), h = o( /*! ./_cloneDataView */ "../../../../node_modules/lodash/_cloneDataView.js" ), d = o( /*! ./_cloneRegExp */ "../../../../node_modules/lodash/_cloneRegExp.js" ), f = o( /*! ./_cloneSymbol */ "../../../../node_modules/lodash/_cloneSymbol.js" ), p = o( /*! ./_cloneTypedArray */ "../../../../node_modules/lodash/_cloneTypedArray.js" ), m = "[object Boolean]", _ = "[object Date]", v = "[object Map]", C = "[object Number]", x = "[object RegExp]", b = "[object Set]", S = "[object String]", M = "[object Symbol]", R = "[object ArrayBuffer]", w = "[object DataView]", V = "[object Float32Array]", k = "[object Float64Array]", L = "[object Int8Array]", B = "[object Int16Array]", U = "[object Int32Array]", K = "[object Uint8Array]", ee = "[object Uint8ClampedArray]", Z = "[object Uint16Array]", q = "[object Uint32Array]"; function le(ie, $, j) { var J = ie.constructor; switch ($) { case R: return u(ie); case m: case _: return new J(+ie); case w: return h(ie, j); case V: case k: case L: case B: case U: case K: case ee: case Z: case q: return p(ie, j); case v: return new J(); case C: case S: return new J(ie); case x: return d(ie); case b: return new J(); case M: return f(ie); } } a.exports = le; } ), /***/ "../../../../node_modules/lodash/_initCloneObject.js": ( /*!***********************************************************!*\ !*** ../../../../node_modules/lodash/_initCloneObject.js ***! \***********************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_baseCreate */ "../../../../node_modules/lodash/_baseCreate.js" ), h = o( /*! ./_getPrototype */ "../../../../node_modules/lodash/_getPrototype.js" ), d = o( /*! ./_isPrototype */ "../../../../node_modules/lodash/_isPrototype.js" ); function f(p) { return typeof p.constructor == "function" && !d(p) ? u(h(p)) : {}; } a.exports = f; } ), /***/ "../../../../node_modules/lodash/_isFlattenable.js": ( /*!*********************************************************!*\ !*** ../../../../node_modules/lodash/_isFlattenable.js ***! \*********************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_Symbol */ "../../../../node_modules/lodash/_Symbol.js" ), h = o( /*! ./isArguments */ "../../../../node_modules/lodash/isArguments.js" ), d = o( /*! ./isArray */ "../../../../node_modules/lodash/isArray.js" ), f = u ? u.isConcatSpreadable : void 0; function p(m) { return d(m) || h(m) || !!(f && m && m[f]); } a.exports = p; } ), /***/ "../../../../node_modules/lodash/_isIndex.js": ( /*!***************************************************!*\ !*** ../../../../node_modules/lodash/_isIndex.js ***! \***************************************************/ /***/ (a) => { var l = 9007199254740991, o = /^(?:0|[1-9]\d*)$/; function u(h, d) { var f = typeof h; return d = d ?? l, !!d && (f == "number" || f != "symbol" && o.test(h)) && h > -1 && h % 1 == 0 && h < d; } a.exports = u; } ), /***/ "../../../../node_modules/lodash/_isIterateeCall.js": ( /*!**********************************************************!*\ !*** ../../../../node_modules/lodash/_isIterateeCall.js ***! \**********************************************************/ /***/ (a, l, o) => { var u = o( /*! ./eq */ "../../../../node_modules/lodash/eq.js" ), h = o( /*! ./isArrayLike */ "../../../../node_modules/lodash/isArrayLike.js" ), d = o( /*! ./_isIndex */ "../../../../node_modules/lodash/_isIndex.js" ), f = o( /*! ./isObject */ "../../../../node_modules/lodash/isObject.js" ); function p(m, _, v) { if (!f(v)) return !1; var C = typeof _; return (C == "number" ? h(v) && d(_, v.length) : C == "string" && _ in v) ? u(v[_], m) : !1; } a.exports = p; } ), /***/ "../../../../node_modules/lodash/_isKey.js": ( /*!*************************************************!*\ !*** ../../../../node_modules/lodash/_isKey.js ***! \*************************************************/ /***/ (a, l, o) => { var u = o( /*! ./isArray */ "../../../../node_modules/lodash/isArray.js" ), h = o( /*! ./isSymbol */ "../../../../node_modules/lodash/isSymbol.js" ), d = /\.|\[(?:[^[\]]*|(["'])(?:(?!\1)[^\\]|\\.)*?\1)\]/, f = /^\w*$/; function p(m, _) { if (u(m)) return !1; var v = typeof m; return v == "number" || v == "symbol" || v == "boolean" || m == null || h(m) ? !0 : f.test(m) || !d.test(m) || _ != null && m in Object(_); } a.exports = p; } ), /***/ "../../../../node_modules/lodash/_isKeyable.js": ( /*!*****************************************************!*\ !*** ../../../../node_modules/lodash/_isKeyable.js ***! \*****************************************************/ /***/ (a) => { function l(o) { var u = typeof o; return u == "string" || u == "number" || u == "symbol" || u == "boolean" ? o !== "__proto__" : o === null; } a.exports = l; } ), /***/ "../../../../node_modules/lodash/_isMasked.js": ( /*!****************************************************!*\ !*** ../../../../node_modules/lodash/_isMasked.js ***! \****************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_coreJsData */ "../../../../node_modules/lodash/_coreJsData.js" ), h = function() { var f = /[^.]+$/.exec(u && u.keys && u.keys.IE_PROTO || ""); return f ? "Symbol(src)_1." + f : ""; }(); function d(f) { return !!h && h in f; } a.exports = d; } ), /***/ "../../../../node_modules/lodash/_isPrototype.js": ( /*!*******************************************************!*\ !*** ../../../../node_modules/lodash/_isPrototype.js ***! \*******************************************************/ /***/ (a) => { var l = Object.prototype; function o(u) { var h = u && u.constructor, d = typeof h == "function" && h.prototype || l; return u === d; } a.exports = o; } ), /***/ "../../../../node_modules/lodash/_isStrictComparable.js": ( /*!**************************************************************!*\ !*** ../../../../node_modules/lodash/_isStrictComparable.js ***! \**************************************************************/ /***/ (a, l, o) => { var u = o( /*! ./isObject */ "../../../../node_modules/lodash/isObject.js" ); function h(d) { return d === d && !u(d); } a.exports = h; } ), /***/ "../../../../node_modules/lodash/_listCacheClear.js": ( /*!**********************************************************!*\ !*** ../../../../node_modules/lodash/_listCacheClear.js ***! \**********************************************************/ /***/ (a) => { function l() { this.__data__ = [], this.size = 0; } a.exports = l; } ), /***/ "../../../../node_modules/lodash/_listCacheDelete.js": ( /*!***********************************************************!*\ !*** ../../../../node_modules/lodash/_listCacheDelete.js ***! \***********************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_assocIndexOf */ "../../../../node_modules/lodash/_assocIndexOf.js" ), h = Array.prototype, d = h.splice; function f(p) { var m = this.__data__, _ = u(m, p); if (_ < 0) return !1; var v = m.length - 1; return _ == v ? m.pop() : d.call(m, _, 1), --this.size, !0; } a.exports = f; } ), /***/ "../../../../node_modules/lodash/_listCacheGet.js": ( /*!********************************************************!*\ !*** ../../../../node_modules/lodash/_listCacheGet.js ***! \********************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_assocIndexOf */ "../../../../node_modules/lodash/_assocIndexOf.js" ); function h(d) { var f = this.__data__, p = u(f, d); return p < 0 ? void 0 : f[p][1]; } a.exports = h; } ), /***/ "../../../../node_modules/lodash/_listCacheHas.js": ( /*!********************************************************!*\ !*** ../../../../node_modules/lodash/_listCacheHas.js ***! \********************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_assocIndexOf */ "../../../../node_modules/lodash/_assocIndexOf.js" ); function h(d) { return u(this.__data__, d) > -1; } a.exports = h; } ), /***/ "../../../../node_modules/lodash/_listCacheSet.js": ( /*!********************************************************!*\ !*** ../../../../node_modules/lodash/_listCacheSet.js ***! \********************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_assocIndexOf */ "../../../../node_modules/lodash/_assocIndexOf.js" ); function h(d, f) { var p = this.__data__, m = u(p, d); return m < 0 ? (++this.size, p.push([d, f])) : p[m][1] = f, this; } a.exports = h; } ), /***/ "../../../../node_modules/lodash/_mapCacheClear.js": ( /*!*********************************************************!*\ !*** ../../../../node_modules/lodash/_mapCacheClear.js ***! \*********************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_Hash */ "../../../../node_modules/lodash/_Hash.js" ), h = o( /*! ./_ListCache */ "../../../../node_modules/lodash/_ListCache.js" ), d = o( /*! ./_Map */ "../../../../node_modules/lodash/_Map.js" ); function f() { this.size = 0, this.__data__ = { hash: new u(), map: new (d || h)(), string: new u() }; } a.exports = f; } ), /***/ "../../../../node_modules/lodash/_mapCacheDelete.js": ( /*!**********************************************************!*\ !*** ../../../../node_modules/lodash/_mapCacheDelete.js ***! \**********************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_getMapData */ "../../../../node_modules/lodash/_getMapData.js" ); function h(d) { var f = u(this, d).delete(d); return this.size -= f ? 1 : 0, f; } a.exports = h; } ), /***/ "../../../../node_modules/lodash/_mapCacheGet.js": ( /*!*******************************************************!*\ !*** ../../../../node_modules/lodash/_mapCacheGet.js ***! \*******************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_getMapData */ "../../../../node_modules/lodash/_getMapData.js" ); function h(d) { return u(this, d).get(d); } a.exports = h; } ), /***/ "../../../../node_modules/lodash/_mapCacheHas.js": ( /*!*******************************************************!*\ !*** ../../../../node_modules/lodash/_mapCacheHas.js ***! \*******************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_getMapData */ "../../../../node_modules/lodash/_getMapData.js" ); function h(d) { return u(this, d).has(d); } a.exports = h; } ), /***/ "../../../../node_modules/lodash/_mapCacheSet.js": ( /*!*******************************************************!*\ !*** ../../../../node_modules/lodash/_mapCacheSet.js ***! \*******************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_getMapData */ "../../../../node_modules/lodash/_getMapData.js" ); function h(d, f) { var p = u(this, d), m = p.size; return p.set(d, f), this.size += p.size == m ? 0 : 1, this; } a.exports = h; } ), /***/ "../../../../node_modules/lodash/_mapToArray.js": ( /*!******************************************************!*\ !*** ../../../../node_modules/lodash/_mapToArray.js ***! \******************************************************/ /***/ (a) => { function l(o) { var u = -1, h = Array(o.size); return o.forEach(function(d, f) { h[++u] = [f, d]; }), h; } a.exports = l; } ), /***/ "../../../../node_modules/lodash/_matchesStrictComparable.js": ( /*!*******************************************************************!*\ !*** ../../../../node_modules/lodash/_matchesStrictComparable.js ***! \*******************************************************************/ /***/ (a) => { function l(o, u) { return function(h) { return h == null ? !1 : h[o] === u && (u !== void 0 || o in Object(h)); }; } a.exports = l; } ), /***/ "../../../../node_modules/lodash/_memoizeCapped.js": ( /*!*********************************************************!*\ !*** ../../../../node_modules/lodash/_memoizeCapped.js ***! \*********************************************************/ /***/ (a, l, o) => { var u = o( /*! ./memoize */ "../../../../node_modules/lodash/memoize.js" ), h = 500; function d(f) { var p = u(f, function(_) { return m.size === h && m.clear(), _; }), m = p.cache; return p; } a.exports = d; } ), /***/ "../../../../node_modules/lodash/_nativeCreate.js": ( /*!********************************************************!*\ !*** ../../../../node_modules/lodash/_nativeCreate.js ***! \********************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_getNative */ "../../../../node_modules/lodash/_getNative.js" ), h = u(Object, "create"); a.exports = h; } ), /***/ "../../../../node_modules/lodash/_nativeKeys.js": ( /*!******************************************************!*\ !*** ../../../../node_modules/lodash/_nativeKeys.js ***! \******************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_overArg */ "../../../../node_modules/lodash/_overArg.js" ), h = u(Object.keys, Object); a.exports = h; } ), /***/ "../../../../node_modules/lodash/_nativeKeysIn.js": ( /*!********************************************************!*\ !*** ../../../../node_modules/lodash/_nativeKeysIn.js ***! \********************************************************/ /***/ (a) => { function l(o) { var u = []; if (o != null) for (var h in Object(o)) u.push(h); return u; } a.exports = l; } ), /***/ "../../../../node_modules/lodash/_nodeUtil.js": ( /*!****************************************************!*\ !*** ../../../../node_modules/lodash/_nodeUtil.js ***! \****************************************************/ /***/ (a, l, o) => { a = o.nmd(a); var u = o( /*! ./_freeGlobal */ "../../../../node_modules/lodash/_freeGlobal.js" ), h = l && !l.nodeType && l, d = h && !0 && a && !a.nodeType && a, f = d && d.exports === h, p = f && u.process, m = function() { try { var _ = d && d.require && d.require("util").types; return _ || p && p.binding && p.binding("util"); } catch { } }(); a.exports = m; } ), /***/ "../../../../node_modules/lodash/_objectToString.js": ( /*!**********************************************************!*\ !*** ../../../../node_modules/lodash/_objectToString.js ***! \**********************************************************/ /***/ (a) => { var l = Object.prototype, o = l.toString; function u(h) { return o.call(h); } a.exports = u; } ), /***/ "../../../../node_modules/lodash/_overArg.js": ( /*!***************************************************!*\ !*** ../../../../node_modules/lodash/_overArg.js ***! \***************************************************/ /***/ (a) => { function l(o, u) { return function(h) { return o(u(h)); }; } a.exports = l; } ), /***/ "../../../../node_modules/lodash/_overRest.js": ( /*!****************************************************!*\ !*** ../../../../node_modules/lodash/_overRest.js ***! \****************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_apply */ "../../../../node_modules/lodash/_apply.js" ), h = Math.max; function d(f, p, m) { return p = h(p === void 0 ? f.length - 1 : p, 0), function() { for (var _ = arguments, v = -1, C = h(_.length - p, 0), x = Array(C); ++v < C; ) x[v] = _[p + v]; v = -1; for (var b = Array(p + 1); ++v < p; ) b[v] = _[v]; return b[p] = m(x), u(f, this, b); }; } a.exports = d; } ), /***/ "../../../../node_modules/lodash/_root.js": ( /*!************************************************!*\ !*** ../../../../node_modules/lodash/_root.js ***! \************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_freeGlobal */ "../../../../node_modules/lodash/_freeGlobal.js" ), h = typeof self == "object" && self && self.Object === Object && self, d = u || h || Function("return this")(); a.exports = d; } ), /***/ "../../../../node_modules/lodash/_safeGet.js": ( /*!***************************************************!*\ !*** ../../../../node_modules/lodash/_safeGet.js ***! \***************************************************/ /***/ (a) => { function l(o, u) { if (!(u === "constructor" && typeof o[u] == "function") && u != "__proto__") return o[u]; } a.exports = l; } ), /***/ "../../../../node_modules/lodash/_setCacheAdd.js": ( /*!*******************************************************!*\ !*** ../../../../node_modules/lodash/_setCacheAdd.js ***! \*******************************************************/ /***/ (a) => { var l = "__lodash_hash_undefined__"; function o(u) { return this.__data__.set(u, l), this; } a.exports = o; } ), /***/ "../../../../node_modules/lodash/_setCacheHas.js": ( /*!*******************************************************!*\ !*** ../../../../node_modules/lodash/_setCacheHas.js ***! \*******************************************************/ /***/ (a) => { function l(o) { return this.__data__.has(o); } a.exports = l; } ), /***/ "../../../../node_modules/lodash/_setToArray.js": ( /*!******************************************************!*\ !*** ../../../../node_modules/lodash/_setToArray.js ***! \******************************************************/ /***/ (a) => { function l(o) { var u = -1, h = Array(o.size); return o.forEach(function(d) { h[++u] = d; }), h; } a.exports = l; } ), /***/ "../../../../node_modules/lodash/_setToString.js": ( /*!*******************************************************!*\ !*** ../../../../node_modules/lodash/_setToString.js ***! \*******************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_baseSetToString */ "../../../../node_modules/lodash/_baseSetToString.js" ), h = o( /*! ./_shortOut */ "../../../../node_modules/lodash/_shortOut.js" ), d = h(u); a.exports = d; } ), /***/ "../../../../node_modules/lodash/_shortOut.js": ( /*!****************************************************!*\ !*** ../../../../node_modules/lodash/_shortOut.js ***! \****************************************************/ /***/ (a) => { var l = 800, o = 16, u = Date.now; function h(d) { var f = 0, p = 0; return function() { var m = u(), _ = o - (m - p); if (p = m, _ > 0) { if (++f >= l) return arguments[0]; } else f = 0; return d.apply(void 0, arguments); }; } a.exports = h; } ), /***/ "../../../../node_modules/lodash/_stackClear.js": ( /*!******************************************************!*\ !*** ../../../../node_modules/lodash/_stackClear.js ***! \******************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_ListCache */ "../../../../node_modules/lodash/_ListCache.js" ); function h() { this.__data__ = new u(), this.size = 0; } a.exports = h; } ), /***/ "../../../../node_modules/lodash/_stackDelete.js": ( /*!*******************************************************!*\ !*** ../../../../node_modules/lodash/_stackDelete.js ***! \*******************************************************/ /***/ (a) => { function l(o) { var u = this.__data__, h = u.delete(o); return this.size = u.size, h; } a.exports = l; } ), /***/ "../../../../node_modules/lodash/_stackGet.js": ( /*!****************************************************!*\ !*** ../../../../node_modules/lodash/_stackGet.js ***! \****************************************************/ /***/ (a) => { function l(o) { return this.__data__.get(o); } a.exports = l; } ), /***/ "../../../../node_modules/lodash/_stackHas.js": ( /*!****************************************************!*\ !*** ../../../../node_modules/lodash/_stackHas.js ***! \****************************************************/ /***/ (a) => { function l(o) { return this.__data__.has(o); } a.exports = l; } ), /***/ "../../../../node_modules/lodash/_stackSet.js": ( /*!****************************************************!*\ !*** ../../../../node_modules/lodash/_stackSet.js ***! \****************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_ListCache */ "../../../../node_modules/lodash/_ListCache.js" ), h = o( /*! ./_Map */ "../../../../node_modules/lodash/_Map.js" ), d = o( /*! ./_MapCache */ "../../../../node_modules/lodash/_MapCache.js" ), f = 200; function p(m, _) { var v = this.__data__; if (v instanceof u) { var C = v.__data__; if (!h || C.length < f - 1) return C.push([m, _]), this.size = ++v.size, this; v = this.__data__ = new d(C); } return v.set(m, _), this.size = v.size, this; } a.exports = p; } ), /***/ "../../../../node_modules/lodash/_strictIndexOf.js": ( /*!*********************************************************!*\ !*** ../../../../node_modules/lodash/_strictIndexOf.js ***! \*********************************************************/ /***/ (a) => { function l(o, u, h) { for (var d = h - 1, f = o.length; ++d < f; ) if (o[d] === u) return d; return -1; } a.exports = l; } ), /***/ "../../../../node_modules/lodash/_stringSize.js": ( /*!******************************************************!*\ !*** ../../../../node_modules/lodash/_stringSize.js ***! \******************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_asciiSize */ "../../../../node_modules/lodash/_asciiSize.js" ), h = o( /*! ./_hasUnicode */ "../../../../node_modules/lodash/_hasUnicode.js" ), d = o( /*! ./_unicodeSize */ "../../../../node_modules/lodash/_unicodeSize.js" ); function f(p) { return h(p) ? d(p) : u(p); } a.exports = f; } ), /***/ "../../../../node_modules/lodash/_stringToPath.js": ( /*!********************************************************!*\ !*** ../../../../node_modules/lodash/_stringToPath.js ***! \********************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_memoizeCapped */ "../../../../node_modules/lodash/_memoizeCapped.js" ), h = /[^.[\]]+|\[(?:(-?\d+(?:\.\d+)?)|(["'])((?:(?!\2)[^\\]|\\.)*?)\2)\]|(?=(?:\.|\[\])(?:\.|\[\]|$))/g, d = /\\(\\)?/g, f = u(function(p) { var m = []; return p.charCodeAt(0) === 46 && m.push(""), p.replace(h, function(_, v, C, x) { m.push(C ? x.replace(d, "$1") : v || _); }), m; }); a.exports = f; } ), /***/ "../../../../node_modules/lodash/_toKey.js": ( /*!*************************************************!*\ !*** ../../../../node_modules/lodash/_toKey.js ***! \*************************************************/ /***/ (a, l, o) => { var u = o( /*! ./isSymbol */ "../../../../node_modules/lodash/isSymbol.js" ), h = 1 / 0; function d(f) { if (typeof f == "string" || u(f)) return f; var p = f + ""; return p == "0" && 1 / f == -h ? "-0" : p; } a.exports = d; } ), /***/ "../../../../node_modules/lodash/_toSource.js": ( /*!****************************************************!*\ !*** ../../../../node_modules/lodash/_toSource.js ***! \****************************************************/ /***/ (a) => { var l = Function.prototype, o = l.toString; function u(h) { if (h != null) { try { return o.call(h); } catch { } try { return h + ""; } catch { } } return ""; } a.exports = u; } ), /***/ "../../../../node_modules/lodash/_trimmedEndIndex.js": ( /*!***********************************************************!*\ !*** ../../../../node_modules/lodash/_trimmedEndIndex.js ***! \***********************************************************/ /***/ (a) => { var l = /\s/; function o(u) { for (var h = u.length; h-- && l.test(u.charAt(h)); ) ; return h; } a.exports = o; } ), /***/ "../../../../node_modules/lodash/_unicodeSize.js": ( /*!*******************************************************!*\ !*** ../../../../node_modules/lodash/_unicodeSize.js ***! \*******************************************************/ /***/ (a) => { var l = "\\ud800-\\udfff", o = "\\u0300-\\u036f", u = "\\ufe20-\\ufe2f", h = "\\u20d0-\\u20ff", d = o + u + h, f = "\\ufe0e\\ufe0f", p = "[" + l + "]", m = "[" + d + "]", _ = "\\ud83c[\\udffb-\\udfff]", v = "(?:" + m + "|" + _ + ")", C = "[^" + l + "]", x = "(?:\\ud83c[\\udde6-\\uddff]){2}", b = "[\\ud800-\\udbff][\\udc00-\\udfff]", S = "\\u200d", M = v + "?", R = "[" + f + "]?", w = "(?:" + S + "(?:" + [C, x, b].join("|") + ")" + R + M + ")*", V = R + M + w, k = "(?:" + [C + m + "?", m, x, b, p].join("|") + ")", L = RegExp(_ + "(?=" + _ + ")|" + k + V, "g"); function B(U) { for (var K = L.lastIndex = 0; L.test(U); ) ++K; return K; } a.exports = B; } ), /***/ "../../../../node_modules/lodash/clone.js": ( /*!************************************************!*\ !*** ../../../../node_modules/lodash/clone.js ***! \************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_baseClone */ "../../../../node_modules/lodash/_baseClone.js" ), h = 4; function d(f) { return u(f, h); } a.exports = d; } ), /***/ "../../../../node_modules/lodash/cloneDeep.js": ( /*!****************************************************!*\ !*** ../../../../node_modules/lodash/cloneDeep.js ***! \****************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_baseClone */ "../../../../node_modules/lodash/_baseClone.js" ), h = 1, d = 4; function f(p) { return u(p, h | d); } a.exports = f; } ), /***/ "../../../../node_modules/lodash/constant.js": ( /*!***************************************************!*\ !*** ../../../../node_modules/lodash/constant.js ***! \***************************************************/ /***/ (a) => { function l(o) { return function() { return o; }; } a.exports = l; } ), /***/ "../../../../node_modules/lodash/defaults.js": ( /*!***************************************************!*\ !*** ../../../../node_modules/lodash/defaults.js ***! \***************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_baseRest */ "../../../../node_modules/lodash/_baseRest.js" ), h = o( /*! ./eq */ "../../../../node_modules/lodash/eq.js" ), d = o( /*! ./_isIterateeCall */ "../../../../node_modules/lodash/_isIterateeCall.js" ), f = o( /*! ./keysIn */ "../../../../node_modules/lodash/keysIn.js" ), p = Object.prototype, m = p.hasOwnProperty, _ = u(function(v, C) { v = Object(v); var x = -1, b = C.length, S = b > 2 ? C[2] : void 0; for (S && d(C[0], C[1], S) && (b = 1); ++x < b; ) for (var M = C[x], R = f(M), w = -1, V = R.length; ++w < V; ) { var k = R[w], L = v[k]; (L === void 0 || h(L, p[k]) && !m.call(v, k)) && (v[k] = M[k]); } return v; }); a.exports = _; } ), /***/ "../../../../node_modules/lodash/each.js": ( /*!***********************************************!*\ !*** ../../../../node_modules/lodash/each.js ***! \***********************************************/ /***/ (a, l, o) => { a.exports = o( /*! ./forEach */ "../../../../node_modules/lodash/forEach.js" ); } ), /***/ "../../../../node_modules/lodash/eq.js": ( /*!*********************************************!*\ !*** ../../../../node_modules/lodash/eq.js ***! \*********************************************/ /***/ (a) => { function l(o, u) { return o === u || o !== o && u !== u; } a.exports = l; } ), /***/ "../../../../node_modules/lodash/filter.js": ( /*!*************************************************!*\ !*** ../../../../node_modules/lodash/filter.js ***! \*************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_arrayFilter */ "../../../../node_modules/lodash/_arrayFilter.js" ), h = o( /*! ./_baseFilter */ "../../../../node_modules/lodash/_baseFilter.js" ), d = o( /*! ./_baseIteratee */ "../../../../node_modules/lodash/_baseIteratee.js" ), f = o( /*! ./isArray */ "../../../../node_modules/lodash/isArray.js" ); function p(m, _) { var v = f(m) ? u : h; return v(m, d(_, 3)); } a.exports = p; } ), /***/ "../../../../node_modules/lodash/find.js": ( /*!***********************************************!*\ !*** ../../../../node_modules/lodash/find.js ***! \***********************************************/ /***/ (a, l, o) => { var u = o( /*! ./_createFind */ "../../../../node_modules/lodash/_createFind.js" ), h = o( /*! ./findIndex */ "../../../../node_modules/lodash/findIndex.js" ), d = u(h); a.exports = d; } ), /***/ "../../../../node_modules/lodash/findIndex.js": ( /*!****************************************************!*\ !*** ../../../../node_modules/lodash/findIndex.js ***! \****************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_baseFindIndex */ "../../../../node_modules/lodash/_baseFindIndex.js" ), h = o( /*! ./_baseIteratee */ "../../../../node_modules/lodash/_baseIteratee.js" ), d = o( /*! ./toInteger */ "../../../../node_modules/lodash/toInteger.js" ), f = Math.max; function p(m, _, v) { var C = m == null ? 0 : m.length; if (!C) return -1; var x = v == null ? 0 : d(v); return x < 0 && (x = f(C + x, 0)), u(m, h(_, 3), x); } a.exports = p; } ), /***/ "../../../../node_modules/lodash/flatten.js": ( /*!**************************************************!*\ !*** ../../../../node_modules/lodash/flatten.js ***! \**************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_baseFlatten */ "../../../../node_modules/lodash/_baseFlatten.js" ); function h(d) { var f = d == null ? 0 : d.length; return f ? u(d, 1) : []; } a.exports = h; } ), /***/ "../../../../node_modules/lodash/forEach.js": ( /*!**************************************************!*\ !*** ../../../../node_modules/lodash/forEach.js ***! \**************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_arrayEach */ "../../../../node_modules/lodash/_arrayEach.js" ), h = o( /*! ./_baseEach */ "../../../../node_modules/lodash/_baseEach.js" ), d = o( /*! ./_castFunction */ "../../../../node_modules/lodash/_castFunction.js" ), f = o( /*! ./isArray */ "../../../../node_modules/lodash/isArray.js" ); function p(m, _) { var v = f(m) ? u : h; return v(m, d(_)); } a.exports = p; } ), /***/ "../../../../node_modules/lodash/forIn.js": ( /*!************************************************!*\ !*** ../../../../node_modules/lodash/forIn.js ***! \************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_baseFor */ "../../../../node_modules/lodash/_baseFor.js" ), h = o( /*! ./_castFunction */ "../../../../node_modules/lodash/_castFunction.js" ), d = o( /*! ./keysIn */ "../../../../node_modules/lodash/keysIn.js" ); function f(p, m) { return p == null ? p : u(p, h(m), d); } a.exports = f; } ), /***/ "../../../../node_modules/lodash/get.js": ( /*!**********************************************!*\ !*** ../../../../node_modules/lodash/get.js ***! \**********************************************/ /***/ (a, l, o) => { var u = o( /*! ./_baseGet */ "../../../../node_modules/lodash/_baseGet.js" ); function h(d, f, p) { var m = d == null ? void 0 : u(d, f); return m === void 0 ? p : m; } a.exports = h; } ), /***/ "../../../../node_modules/lodash/has.js": ( /*!**********************************************!*\ !*** ../../../../node_modules/lodash/has.js ***! \**********************************************/ /***/ (a, l, o) => { var u = o( /*! ./_baseHas */ "../../../../node_modules/lodash/_baseHas.js" ), h = o( /*! ./_hasPath */ "../../../../node_modules/lodash/_hasPath.js" ); function d(f, p) { return f != null && h(f, p, u); } a.exports = d; } ), /***/ "../../../../node_modules/lodash/hasIn.js": ( /*!************************************************!*\ !*** ../../../../node_modules/lodash/hasIn.js ***! \************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_baseHasIn */ "../../../../node_modules/lodash/_baseHasIn.js" ), h = o( /*! ./_hasPath */ "../../../../node_modules/lodash/_hasPath.js" ); function d(f, p) { return f != null && h(f, p, u); } a.exports = d; } ), /***/ "../../../../node_modules/lodash/identity.js": ( /*!***************************************************!*\ !*** ../../../../node_modules/lodash/identity.js ***! \***************************************************/ /***/ (a) => { function l(o) { return o; } a.exports = l; } ), /***/ "../../../../node_modules/lodash/isArguments.js": ( /*!******************************************************!*\ !*** ../../../../node_modules/lodash/isArguments.js ***! \******************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_baseIsArguments */ "../../../../node_modules/lodash/_baseIsArguments.js" ), h = o( /*! ./isObjectLike */ "../../../../node_modules/lodash/isObjectLike.js" ), d = Object.prototype, f = d.hasOwnProperty, p = d.propertyIsEnumerable, m = u(/* @__PURE__ */ function() { return arguments; }()) ? u : function(_) { return h(_) && f.call(_, "callee") && !p.call(_, "callee"); }; a.exports = m; } ), /***/ "../../../../node_modules/lodash/isArray.js": ( /*!**************************************************!*\ !*** ../../../../node_modules/lodash/isArray.js ***! \**************************************************/ /***/ (a) => { var l = Array.isArray; a.exports = l; } ), /***/ "../../../../node_modules/lodash/isArrayLike.js": ( /*!******************************************************!*\ !*** ../../../../node_modules/lodash/isArrayLike.js ***! \******************************************************/ /***/ (a, l, o) => { var u = o( /*! ./isFunction */ "../../../../node_modules/lodash/isFunction.js" ), h = o( /*! ./isLength */ "../../../../node_modules/lodash/isLength.js" ); function d(f) { return f != null && h(f.length) && !u(f); } a.exports = d; } ), /***/ "../../../../node_modules/lodash/isArrayLikeObject.js": ( /*!************************************************************!*\ !*** ../../../../node_modules/lodash/isArrayLikeObject.js ***! \************************************************************/ /***/ (a, l, o) => { var u = o( /*! ./isArrayLike */ "../../../../node_modules/lodash/isArrayLike.js" ), h = o( /*! ./isObjectLike */ "../../../../node_modules/lodash/isObjectLike.js" ); function d(f) { return h(f) && u(f); } a.exports = d; } ), /***/ "../../../../node_modules/lodash/isBuffer.js": ( /*!***************************************************!*\ !*** ../../../../node_modules/lodash/isBuffer.js ***! \***************************************************/ /***/ (a, l, o) => { a = o.nmd(a); var u = o( /*! ./_root */ "../../../../node_modules/lodash/_root.js" ), h = o( /*! ./stubFalse */ "../../../../node_modules/lodash/stubFalse.js" ), d = l && !l.nodeType && l, f = d && !0 && a && !a.nodeType && a, p = f && f.exports === d, m = p ? u.Buffer : void 0, _ = m ? m.isBuffer : void 0, v = _ || h; a.exports = v; } ), /***/ "../../../../node_modules/lodash/isEmpty.js": ( /*!**************************************************!*\ !*** ../../../../node_modules/lodash/isEmpty.js ***! \**************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_baseKeys */ "../../../../node_modules/lodash/_baseKeys.js" ), h = o( /*! ./_getTag */ "../../../../node_modules/lodash/_getTag.js" ), d = o( /*! ./isArguments */ "../../../../node_modules/lodash/isArguments.js" ), f = o( /*! ./isArray */ "../../../../node_modules/lodash/isArray.js" ), p = o( /*! ./isArrayLike */ "../../../../node_modules/lodash/isArrayLike.js" ), m = o( /*! ./isBuffer */ "../../../../node_modules/lodash/isBuffer.js" ), _ = o( /*! ./_isPrototype */ "../../../../node_modules/lodash/_isPrototype.js" ), v = o( /*! ./isTypedArray */ "../../../../node_modules/lodash/isTypedArray.js" ), C = "[object Map]", x = "[object Set]", b = Object.prototype, S = b.hasOwnProperty; function M(R) { if (R == null) return !0; if (p(R) && (f(R) || typeof R == "string" || typeof R.splice == "function" || m(R) || v(R) || d(R))) return !R.length; var w = h(R); if (w == C || w == x) return !R.size; if (_(R)) return !u(R).length; for (var V in R) if (S.call(R, V)) return !1; return !0; } a.exports = M; } ), /***/ "../../../../node_modules/lodash/isFunction.js": ( /*!*****************************************************!*\ !*** ../../../../node_modules/lodash/isFunction.js ***! \*****************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_baseGetTag */ "../../../../node_modules/lodash/_baseGetTag.js" ), h = o( /*! ./isObject */ "../../../../node_modules/lodash/isObject.js" ), d = "[object AsyncFunction]", f = "[object Function]", p = "[object GeneratorFunction]", m = "[object Proxy]"; function _(v) { if (!h(v)) return !1; var C = u(v); return C == f || C == p || C == d || C == m; } a.exports = _; } ), /***/ "../../../../node_modules/lodash/isLength.js": ( /*!***************************************************!*\ !*** ../../../../node_modules/lodash/isLength.js ***! \***************************************************/ /***/ (a) => { var l = 9007199254740991; function o(u) { return typeof u == "number" && u > -1 && u % 1 == 0 && u <= l; } a.exports = o; } ), /***/ "../../../../node_modules/lodash/isMap.js": ( /*!************************************************!*\ !*** ../../../../node_modules/lodash/isMap.js ***! \************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_baseIsMap */ "../../../../node_modules/lodash/_baseIsMap.js" ), h = o( /*! ./_baseUnary */ "../../../../node_modules/lodash/_baseUnary.js" ), d = o( /*! ./_nodeUtil */ "../../../../node_modules/lodash/_nodeUtil.js" ), f = d && d.isMap, p = f ? h(f) : u; a.exports = p; } ), /***/ "../../../../node_modules/lodash/isObject.js": ( /*!***************************************************!*\ !*** ../../../../node_modules/lodash/isObject.js ***! \***************************************************/ /***/ (a) => { function l(o) { var u = typeof o; return o != null && (u == "object" || u == "function"); } a.exports = l; } ), /***/ "../../../../node_modules/lodash/isObjectLike.js": ( /*!*******************************************************!*\ !*** ../../../../node_modules/lodash/isObjectLike.js ***! \*******************************************************/ /***/ (a) => { function l(o) { return o != null && typeof o == "object"; } a.exports = l; } ), /***/ "../../../../node_modules/lodash/isPlainObject.js": ( /*!********************************************************!*\ !*** ../../../../node_modules/lodash/isPlainObject.js ***! \********************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_baseGetTag */ "../../../../node_modules/lodash/_baseGetTag.js" ), h = o( /*! ./_getPrototype */ "../../../../node_modules/lodash/_getPrototype.js" ), d = o( /*! ./isObjectLike */ "../../../../node_modules/lodash/isObjectLike.js" ), f = "[object Object]", p = Function.prototype, m = Object.prototype, _ = p.toString, v = m.hasOwnProperty, C = _.call(Object); function x(b) { if (!d(b) || u(b) != f) return !1; var S = h(b); if (S === null) return !0; var M = v.call(S, "constructor") && S.constructor; return typeof M == "function" && M instanceof M && _.call(M) == C; } a.exports = x; } ), /***/ "../../../../node_modules/lodash/isSet.js": ( /*!************************************************!*\ !*** ../../../../node_modules/lodash/isSet.js ***! \************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_baseIsSet */ "../../../../node_modules/lodash/_baseIsSet.js" ), h = o( /*! ./_baseUnary */ "../../../../node_modules/lodash/_baseUnary.js" ), d = o( /*! ./_nodeUtil */ "../../../../node_modules/lodash/_nodeUtil.js" ), f = d && d.isSet, p = f ? h(f) : u; a.exports = p; } ), /***/ "../../../../node_modules/lodash/isString.js": ( /*!***************************************************!*\ !*** ../../../../node_modules/lodash/isString.js ***! \***************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_baseGetTag */ "../../../../node_modules/lodash/_baseGetTag.js" ), h = o( /*! ./isArray */ "../../../../node_modules/lodash/isArray.js" ), d = o( /*! ./isObjectLike */ "../../../../node_modules/lodash/isObjectLike.js" ), f = "[object String]"; function p(m) { return typeof m == "string" || !h(m) && d(m) && u(m) == f; } a.exports = p; } ), /***/ "../../../../node_modules/lodash/isSymbol.js": ( /*!***************************************************!*\ !*** ../../../../node_modules/lodash/isSymbol.js ***! \***************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_baseGetTag */ "../../../../node_modules/lodash/_baseGetTag.js" ), h = o( /*! ./isObjectLike */ "../../../../node_modules/lodash/isObjectLike.js" ), d = "[object Symbol]"; function f(p) { return typeof p == "symbol" || h(p) && u(p) == d; } a.exports = f; } ), /***/ "../../../../node_modules/lodash/isTypedArray.js": ( /*!*******************************************************!*\ !*** ../../../../node_modules/lodash/isTypedArray.js ***! \*******************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_baseIsTypedArray */ "../../../../node_modules/lodash/_baseIsTypedArray.js" ), h = o( /*! ./_baseUnary */ "../../../../node_modules/lodash/_baseUnary.js" ), d = o( /*! ./_nodeUtil */ "../../../../node_modules/lodash/_nodeUtil.js" ), f = d && d.isTypedArray, p = f ? h(f) : u; a.exports = p; } ), /***/ "../../../../node_modules/lodash/isUndefined.js": ( /*!******************************************************!*\ !*** ../../../../node_modules/lodash/isUndefined.js ***! \******************************************************/ /***/ (a) => { function l(o) { return o === void 0; } a.exports = l; } ), /***/ "../../../../node_modules/lodash/keys.js": ( /*!***********************************************!*\ !*** ../../../../node_modules/lodash/keys.js ***! \***********************************************/ /***/ (a, l, o) => { var u = o( /*! ./_arrayLikeKeys */ "../../../../node_modules/lodash/_arrayLikeKeys.js" ), h = o( /*! ./_baseKeys */ "../../../../node_modules/lodash/_baseKeys.js" ), d = o( /*! ./isArrayLike */ "../../../../node_modules/lodash/isArrayLike.js" ); function f(p) { return d(p) ? u(p) : h(p); } a.exports = f; } ), /***/ "../../../../node_modules/lodash/keysIn.js": ( /*!*************************************************!*\ !*** ../../../../node_modules/lodash/keysIn.js ***! \*************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_arrayLikeKeys */ "../../../../node_modules/lodash/_arrayLikeKeys.js" ), h = o( /*! ./_baseKeysIn */ "../../../../node_modules/lodash/_baseKeysIn.js" ), d = o( /*! ./isArrayLike */ "../../../../node_modules/lodash/isArrayLike.js" ); function f(p) { return d(p) ? u(p, !0) : h(p); } a.exports = f; } ), /***/ "../../../../node_modules/lodash/last.js": ( /*!***********************************************!*\ !*** ../../../../node_modules/lodash/last.js ***! \***********************************************/ /***/ (a) => { function l(o) { var u = o == null ? 0 : o.length; return u ? o[u - 1] : void 0; } a.exports = l; } ), /***/ "../../../../node_modules/lodash/map.js": ( /*!**********************************************!*\ !*** ../../../../node_modules/lodash/map.js ***! \**********************************************/ /***/ (a, l, o) => { var u = o( /*! ./_arrayMap */ "../../../../node_modules/lodash/_arrayMap.js" ), h = o( /*! ./_baseIteratee */ "../../../../node_modules/lodash/_baseIteratee.js" ), d = o( /*! ./_baseMap */ "../../../../node_modules/lodash/_baseMap.js" ), f = o( /*! ./isArray */ "../../../../node_modules/lodash/isArray.js" ); function p(m, _) { var v = f(m) ? u : d; return v(m, h(_, 3)); } a.exports = p; } ), /***/ "../../../../node_modules/lodash/mapValues.js": ( /*!****************************************************!*\ !*** ../../../../node_modules/lodash/mapValues.js ***! \****************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_baseAssignValue */ "../../../../node_modules/lodash/_baseAssignValue.js" ), h = o( /*! ./_baseForOwn */ "../../../../node_modules/lodash/_baseForOwn.js" ), d = o( /*! ./_baseIteratee */ "../../../../node_modules/lodash/_baseIteratee.js" ); function f(p, m) { var _ = {}; return m = d(m, 3), h(p, function(v, C, x) { u(_, C, m(v, C, x)); }), _; } a.exports = f; } ), /***/ "../../../../node_modules/lodash/max.js": ( /*!**********************************************!*\ !*** ../../../../node_modules/lodash/max.js ***! \**********************************************/ /***/ (a, l, o) => { var u = o( /*! ./_baseExtremum */ "../../../../node_modules/lodash/_baseExtremum.js" ), h = o( /*! ./_baseGt */ "../../../../node_modules/lodash/_baseGt.js" ), d = o( /*! ./identity */ "../../../../node_modules/lodash/identity.js" ); function f(p) { return p && p.length ? u(p, d, h) : void 0; } a.exports = f; } ), /***/ "../../../../node_modules/lodash/memoize.js": ( /*!**************************************************!*\ !*** ../../../../node_modules/lodash/memoize.js ***! \**************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_MapCache */ "../../../../node_modules/lodash/_MapCache.js" ), h = "Expected a function"; function d(f, p) { if (typeof f != "function" || p != null && typeof p != "function") throw new TypeError(h); var m = function() { var _ = arguments, v = p ? p.apply(this, _) : _[0], C = m.cache; if (C.has(v)) return C.get(v); var x = f.apply(this, _); return m.cache = C.set(v, x) || C, x; }; return m.cache = new (d.Cache || u)(), m; } d.Cache = u, a.exports = d; } ), /***/ "../../../../node_modules/lodash/merge.js": ( /*!************************************************!*\ !*** ../../../../node_modules/lodash/merge.js ***! \************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_baseMerge */ "../../../../node_modules/lodash/_baseMerge.js" ), h = o( /*! ./_createAssigner */ "../../../../node_modules/lodash/_createAssigner.js" ), d = h(function(f, p, m) { u(f, p, m); }); a.exports = d; } ), /***/ "../../../../node_modules/lodash/min.js": ( /*!**********************************************!*\ !*** ../../../../node_modules/lodash/min.js ***! \**********************************************/ /***/ (a, l, o) => { var u = o( /*! ./_baseExtremum */ "../../../../node_modules/lodash/_baseExtremum.js" ), h = o( /*! ./_baseLt */ "../../../../node_modules/lodash/_baseLt.js" ), d = o( /*! ./identity */ "../../../../node_modules/lodash/identity.js" ); function f(p) { return p && p.length ? u(p, d, h) : void 0; } a.exports = f; } ), /***/ "../../../../node_modules/lodash/minBy.js": ( /*!************************************************!*\ !*** ../../../../node_modules/lodash/minBy.js ***! \************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_baseExtremum */ "../../../../node_modules/lodash/_baseExtremum.js" ), h = o( /*! ./_baseIteratee */ "../../../../node_modules/lodash/_baseIteratee.js" ), d = o( /*! ./_baseLt */ "../../../../node_modules/lodash/_baseLt.js" ); function f(p, m) { return p && p.length ? u(p, h(m, 2), d) : void 0; } a.exports = f; } ), /***/ "../../../../node_modules/lodash/noop.js": ( /*!***********************************************!*\ !*** ../../../../node_modules/lodash/noop.js ***! \***********************************************/ /***/ (a) => { function l() { } a.exports = l; } ), /***/ "../../../../node_modules/lodash/now.js": ( /*!**********************************************!*\ !*** ../../../../node_modules/lodash/now.js ***! \**********************************************/ /***/ (a, l, o) => { var u = o( /*! ./_root */ "../../../../node_modules/lodash/_root.js" ), h = function() { return u.Date.now(); }; a.exports = h; } ), /***/ "../../../../node_modules/lodash/pick.js": ( /*!***********************************************!*\ !*** ../../../../node_modules/lodash/pick.js ***! \***********************************************/ /***/ (a, l, o) => { var u = o( /*! ./_basePick */ "../../../../node_modules/lodash/_basePick.js" ), h = o( /*! ./_flatRest */ "../../../../node_modules/lodash/_flatRest.js" ), d = h(function(f, p) { return f == null ? {} : u(f, p); }); a.exports = d; } ), /***/ "../../../../node_modules/lodash/property.js": ( /*!***************************************************!*\ !*** ../../../../node_modules/lodash/property.js ***! \***************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_baseProperty */ "../../../../node_modules/lodash/_baseProperty.js" ), h = o( /*! ./_basePropertyDeep */ "../../../../node_modules/lodash/_basePropertyDeep.js" ), d = o( /*! ./_isKey */ "../../../../node_modules/lodash/_isKey.js" ), f = o( /*! ./_toKey */ "../../../../node_modules/lodash/_toKey.js" ); function p(m) { return d(m) ? u(f(m)) : h(m); } a.exports = p; } ), /***/ "../../../../node_modules/lodash/range.js": ( /*!************************************************!*\ !*** ../../../../node_modules/lodash/range.js ***! \************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_createRange */ "../../../../node_modules/lodash/_createRange.js" ), h = u(); a.exports = h; } ), /***/ "../../../../node_modules/lodash/reduce.js": ( /*!*************************************************!*\ !*** ../../../../node_modules/lodash/reduce.js ***! \*************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_arrayReduce */ "../../../../node_modules/lodash/_arrayReduce.js" ), h = o( /*! ./_baseEach */ "../../../../node_modules/lodash/_baseEach.js" ), d = o( /*! ./_baseIteratee */ "../../../../node_modules/lodash/_baseIteratee.js" ), f = o( /*! ./_baseReduce */ "../../../../node_modules/lodash/_baseReduce.js" ), p = o( /*! ./isArray */ "../../../../node_modules/lodash/isArray.js" ); function m(_, v, C) { var x = p(_) ? u : f, b = arguments.length < 3; return x(_, d(v, 4), C, b, h); } a.exports = m; } ), /***/ "../../../../node_modules/lodash/size.js": ( /*!***********************************************!*\ !*** ../../../../node_modules/lodash/size.js ***! \***********************************************/ /***/ (a, l, o) => { var u = o( /*! ./_baseKeys */ "../../../../node_modules/lodash/_baseKeys.js" ), h = o( /*! ./_getTag */ "../../../../node_modules/lodash/_getTag.js" ), d = o( /*! ./isArrayLike */ "../../../../node_modules/lodash/isArrayLike.js" ), f = o( /*! ./isString */ "../../../../node_modules/lodash/isString.js" ), p = o( /*! ./_stringSize */ "../../../../node_modules/lodash/_stringSize.js" ), m = "[object Map]", _ = "[object Set]"; function v(C) { if (C == null) return 0; if (d(C)) return f(C) ? p(C) : C.length; var x = h(C); return x == m || x == _ ? C.size : u(C).length; } a.exports = v; } ), /***/ "../../../../node_modules/lodash/sortBy.js": ( /*!*************************************************!*\ !*** ../../../../node_modules/lodash/sortBy.js ***! \*************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_baseFlatten */ "../../../../node_modules/lodash/_baseFlatten.js" ), h = o( /*! ./_baseOrderBy */ "../../../../node_modules/lodash/_baseOrderBy.js" ), d = o( /*! ./_baseRest */ "../../../../node_modules/lodash/_baseRest.js" ), f = o( /*! ./_isIterateeCall */ "../../../../node_modules/lodash/_isIterateeCall.js" ), p = d(function(m, _) { if (m == null) return []; var v = _.length; return v > 1 && f(m, _[0], _[1]) ? _ = [] : v > 2 && f(_[0], _[1], _[2]) && (_ = [_[0]]), h(m, u(_, 1), []); }); a.exports = p; } ), /***/ "../../../../node_modules/lodash/stubArray.js": ( /*!****************************************************!*\ !*** ../../../../node_modules/lodash/stubArray.js ***! \****************************************************/ /***/ (a) => { function l() { return []; } a.exports = l; } ), /***/ "../../../../node_modules/lodash/stubFalse.js": ( /*!****************************************************!*\ !*** ../../../../node_modules/lodash/stubFalse.js ***! \****************************************************/ /***/ (a) => { function l() { return !1; } a.exports = l; } ), /***/ "../../../../node_modules/lodash/toFinite.js": ( /*!***************************************************!*\ !*** ../../../../node_modules/lodash/toFinite.js ***! \***************************************************/ /***/ (a, l, o) => { var u = o( /*! ./toNumber */ "../../../../node_modules/lodash/toNumber.js" ), h = 1 / 0, d = 17976931348623157e292; function f(p) { if (!p) return p === 0 ? p : 0; if (p = u(p), p === h || p === -h) { var m = p < 0 ? -1 : 1; return m * d; } return p === p ? p : 0; } a.exports = f; } ), /***/ "../../../../node_modules/lodash/toInteger.js": ( /*!****************************************************!*\ !*** ../../../../node_modules/lodash/toInteger.js ***! \****************************************************/ /***/ (a, l, o) => { var u = o( /*! ./toFinite */ "../../../../node_modules/lodash/toFinite.js" ); function h(d) { var f = u(d), p = f % 1; return f === f ? p ? f - p : f : 0; } a.exports = h; } ), /***/ "../../../../node_modules/lodash/toNumber.js": ( /*!***************************************************!*\ !*** ../../../../node_modules/lodash/toNumber.js ***! \***************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_baseTrim */ "../../../../node_modules/lodash/_baseTrim.js" ), h = o( /*! ./isObject */ "../../../../node_modules/lodash/isObject.js" ), d = o( /*! ./isSymbol */ "../../../../node_modules/lodash/isSymbol.js" ), f = NaN, p = /^[-+]0x[0-9a-f]+$/i, m = /^0b[01]+$/i, _ = /^0o[0-7]+$/i, v = parseInt; function C(x) { if (typeof x == "number") return x; if (d(x)) return f; if (h(x)) { var b = typeof x.valueOf == "function" ? x.valueOf() : x; x = h(b) ? b + "" : b; } if (typeof x != "string") return x === 0 ? x : +x; x = u(x); var S = m.test(x); return S || _.test(x) ? v(x.slice(2), S ? 2 : 8) : p.test(x) ? f : +x; } a.exports = C; } ), /***/ "../../../../node_modules/lodash/toPlainObject.js": ( /*!********************************************************!*\ !*** ../../../../node_modules/lodash/toPlainObject.js ***! \********************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_copyObject */ "../../../../node_modules/lodash/_copyObject.js" ), h = o( /*! ./keysIn */ "../../../../node_modules/lodash/keysIn.js" ); function d(f) { return u(f, h(f)); } a.exports = d; } ), /***/ "../../../../node_modules/lodash/toString.js": ( /*!***************************************************!*\ !*** ../../../../node_modules/lodash/toString.js ***! \***************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_baseToString */ "../../../../node_modules/lodash/_baseToString.js" ); function h(d) { return d == null ? "" : u(d); } a.exports = h; } ), /***/ "../../../../node_modules/lodash/transform.js": ( /*!****************************************************!*\ !*** ../../../../node_modules/lodash/transform.js ***! \****************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_arrayEach */ "../../../../node_modules/lodash/_arrayEach.js" ), h = o( /*! ./_baseCreate */ "../../../../node_modules/lodash/_baseCreate.js" ), d = o( /*! ./_baseForOwn */ "../../../../node_modules/lodash/_baseForOwn.js" ), f = o( /*! ./_baseIteratee */ "../../../../node_modules/lodash/_baseIteratee.js" ), p = o( /*! ./_getPrototype */ "../../../../node_modules/lodash/_getPrototype.js" ), m = o( /*! ./isArray */ "../../../../node_modules/lodash/isArray.js" ), _ = o( /*! ./isBuffer */ "../../../../node_modules/lodash/isBuffer.js" ), v = o( /*! ./isFunction */ "../../../../node_modules/lodash/isFunction.js" ), C = o( /*! ./isObject */ "../../../../node_modules/lodash/isObject.js" ), x = o( /*! ./isTypedArray */ "../../../../node_modules/lodash/isTypedArray.js" ); function b(S, M, R) { var w = m(S), V = w || _(S) || x(S); if (M = f(M, 4), R == null) { var k = S && S.constructor; V ? R = w ? new k() : [] : C(S) ? R = v(k) ? h(p(S)) : {} : R = {}; } return (V ? u : d)(S, function(L, B, U) { return M(R, L, B, U); }), R; } a.exports = b; } ), /***/ "../../../../node_modules/lodash/union.js": ( /*!************************************************!*\ !*** ../../../../node_modules/lodash/union.js ***! \************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_baseFlatten */ "../../../../node_modules/lodash/_baseFlatten.js" ), h = o( /*! ./_baseRest */ "../../../../node_modules/lodash/_baseRest.js" ), d = o( /*! ./_baseUniq */ "../../../../node_modules/lodash/_baseUniq.js" ), f = o( /*! ./isArrayLikeObject */ "../../../../node_modules/lodash/isArrayLikeObject.js" ), p = h(function(m) { return d(u(m, 1, f, !0)); }); a.exports = p; } ), /***/ "../../../../node_modules/lodash/uniqueId.js": ( /*!***************************************************!*\ !*** ../../../../node_modules/lodash/uniqueId.js ***! \***************************************************/ /***/ (a, l, o) => { var u = o( /*! ./toString */ "../../../../node_modules/lodash/toString.js" ), h = 0; function d(f) { var p = ++h; return u(f) + p; } a.exports = d; } ), /***/ "../../../../node_modules/lodash/values.js": ( /*!*************************************************!*\ !*** ../../../../node_modules/lodash/values.js ***! \*************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_baseValues */ "../../../../node_modules/lodash/_baseValues.js" ), h = o( /*! ./keys */ "../../../../node_modules/lodash/keys.js" ); function d(f) { return f == null ? [] : u(f, h(f)); } a.exports = d; } ), /***/ "../../../../node_modules/lodash/zipObject.js": ( /*!****************************************************!*\ !*** ../../../../node_modules/lodash/zipObject.js ***! \****************************************************/ /***/ (a, l, o) => { var u = o( /*! ./_assignValue */ "../../../../node_modules/lodash/_assignValue.js" ), h = o( /*! ./_baseZipObject */ "../../../../node_modules/lodash/_baseZipObject.js" ); function d(f, p) { return h(f || [], p || [], u); } a.exports = d; } ), /***/ "../../../../node_modules/object-assign/index.js": ( /*!*******************************************************!*\ !*** ../../../../node_modules/object-assign/index.js ***! \*******************************************************/ /***/ (a) => { /* object-assign (c) Sindre Sorhus @license MIT */ var l = Object.getOwnPropertySymbols, o = Object.prototype.hasOwnProperty, u = Object.prototype.propertyIsEnumerable; function h(f) { if (f == null) throw new TypeError("Object.assign cannot be called with null or undefined"); return Object(f); } function d() { try { if (!Object.assign) return !1; var f = new String("abc"); if (f[5] = "de", Object.getOwnPropertyNames(f)[0] === "5") return !1; for (var p = {}, m = 0; m < 10; m++) p["_" + String.fromCharCode(m)] = m; var _ = Object.getOwnPropertyNames(p).map(function(C) { return p[C]; }); if (_.join("") !== "0123456789") return !1; var v = {}; return "abcdefghijklmnopqrst".split("").forEach(function(C) { v[C] = C; }), Object.keys(Object.assign({}, v)).join("") === "abcdefghijklmnopqrst"; } catch { return !1; } } a.exports = d() ? Object.assign : function(f, p) { for (var m, _ = h(f), v, C = 1; C < arguments.length; C++) { m = Object(arguments[C]); for (var x in m) o.call(m, x) && (_[x] = m[x]); if (l) { v = l(m); for (var b = 0; b < v.length; b++) u.call(m, v[b]) && (_[v[b]] = m[v[b]]); } } return _; }; } ), /***/ "../../../../node_modules/prop-types/checkPropTypes.js": ( /*!*************************************************************!*\ !*** ../../../../node_modules/prop-types/checkPropTypes.js ***! \*************************************************************/ /***/ (a, l, o) => { var u = function() { }; { var h = o( /*! ./lib/ReactPropTypesSecret */ "../../../../node_modules/prop-types/lib/ReactPropTypesSecret.js" ), d = {}, f = o( /*! ./lib/has */ "../../../../node_modules/prop-types/lib/has.js" ); u = function(m) { var _ = "Warning: " + m; typeof console < "u" && console.error(_); try { throw new Error(_); } catch { } }; } function p(m, _, v, C, x) { for (var b in m) if (f(m, b)) { var S; try { if (typeof m[b] != "function") { var M = Error( (C || "React class") + ": " + v + " type `" + b + "` is invalid; it must be a function, usually from the `prop-types` package, but received `" + typeof m[b] + "`.This often happens because of typos such as `PropTypes.function` instead of `PropTypes.func`." ); throw M.name = "Invariant Violation", M; } S = m[b](_, b, C, v, null, h); } catch (w) { S = w; } if (S && !(S instanceof Error) && u( (C || "React class") + ": type specification of " + v + " `" + b + "` is invalid; the type checker function must return `null` or an `Error` but returned a " + typeof S + ". You may have forgotten to pass an argument to the type checker creator (arrayOf, instanceOf, objectOf, oneOf, oneOfType, and shape all require an argument)." ), S instanceof Error && !(S.message in d)) { d[S.message] = !0; var R = x ? x() : ""; u( "Failed " + v + " type: " + S.message + (R ?? "") ); } } } p.resetWarningCache = function() { d = {}; }, a.exports = p; } ), /***/ "../../../../node_modules/prop-types/factoryWithTypeCheckers.js": ( /*!**********************************************************************!*\ !*** ../../../../node_modules/prop-types/factoryWithTypeCheckers.js ***! \**********************************************************************/ /***/ (a, l, o) => { var u = o( /*! react-is */ "../../../../node_modules/prop-types/node_modules/react-is/index.js" ), h = o( /*! object-assign */ "../../../../node_modules/object-assign/index.js" ), d = o( /*! ./lib/ReactPropTypesSecret */ "../../../../node_modules/prop-types/lib/ReactPropTypesSecret.js" ), f = o( /*! ./lib/has */ "../../../../node_modules/prop-types/lib/has.js" ), p = o( /*! ./checkPropTypes */ "../../../../node_modules/prop-types/checkPropTypes.js" ), m = function() { }; m = function(v) { var C = "Warning: " + v; typeof console < "u" && console.error(C); try { throw new Error(C); } catch { } }; function _() { return null; } a.exports = function(v, C) { var x = typeof Symbol == "function" && Symbol.iterator, b = "@@iterator"; function S(te) { var he = te && (x && te[x] || te[b]); if (typeof he == "function") return he; } var M = "<>", R = { array: L("array"), bigint: L("bigint"), bool: L("boolean"), func: L("function"), number: L("number"), object: L("object"), string: L("string"), symbol: L("symbol"), any: B(), arrayOf: U, element: K(), elementType: ee(), instanceOf: Z, node: $(), objectOf: le, oneOf: q, oneOfType: ie, shape: J, exact: ne }; function w(te, he) { return te === he ? te !== 0 || 1 / te === 1 / he : te !== te && he !== he; } function V(te, he) { this.message = te, this.data = he && typeof he == "object" ? he : {}, this.stack = ""; } V.prototype = Error.prototype; function k(te) { var he = {}, be = 0; function Ue(He, Xe, rt, dt, bt, Mt, Ct) { if (dt = dt || M, Mt = Mt || rt, Ct !== d) { if (C) { var di = new Error( "Calling PropTypes validators directly is not supported by the `prop-types` package. Use `PropTypes.checkPropTypes()` to call them. Read more at http://fb.me/use-check-prop-types" ); throw di.name = "Invariant Violation", di; } else if (typeof console < "u") { var Kt = dt + ":" + rt; !he[Kt] && // Avoid spamming the console because they are often not actionable except for lib authors be < 3 && (m( "You are manually calling a React.PropTypes validation function for the `" + Mt + "` prop on `" + dt + "`. This is deprecated and will throw in the standalone `prop-types` package. You may be seeing this warning due to a third-party PropTypes library. See https://fb.me/react-warning-dont-call-proptypes for details." ), he[Kt] = !0, be++); } } return Xe[rt] == null ? He ? Xe[rt] === null ? new V("The " + bt + " `" + Mt + "` is marked as required " + ("in `" + dt + "`, but its value is `null`.")) : new V("The " + bt + " `" + Mt + "` is marked as required in " + ("`" + dt + "`, but its value is `undefined`.")) : null : te(Xe, rt, dt, bt, Mt); } var Ee = Ue.bind(null, !1); return Ee.isRequired = Ue.bind(null, !0), Ee; } function L(te) { function he(be, Ue, Ee, He, Xe, rt) { var dt = be[Ue], bt = Ie(dt); if (bt !== te) { var Mt = ye(dt); return new V( "Invalid " + He + " `" + Xe + "` of type " + ("`" + Mt + "` supplied to `" + Ee + "`, expected ") + ("`" + te + "`."), { expectedType: te } ); } return null; } return k(he); } function B() { return k(_); } function U(te) { function he(be, Ue, Ee, He, Xe) { if (typeof te != "function") return new V("Property `" + Xe + "` of component `" + Ee + "` has invalid PropType notation inside arrayOf."); var rt = be[Ue]; if (!Array.isArray(rt)) { var dt = Ie(rt); return new V("Invalid " + He + " `" + Xe + "` of type " + ("`" + dt + "` supplied to `" + Ee + "`, expected an array.")); } for (var bt = 0; bt < rt.length; bt++) { var Mt = te(rt, bt, Ee, He, Xe + "[" + bt + "]", d); if (Mt instanceof Error) return Mt; } return null; } return k(he); } function K() { function te(he, be, Ue, Ee, He) { var Xe = he[be]; if (!v(Xe)) { var rt = Ie(Xe); return new V("Invalid " + Ee + " `" + He + "` of type " + ("`" + rt + "` supplied to `" + Ue + "`, expected a single ReactElement.")); } return null; } return k(te); } function ee() { function te(he, be, Ue, Ee, He) { var Xe = he[be]; if (!u.isValidElementType(Xe)) { var rt = Ie(Xe); return new V("Invalid " + Ee + " `" + He + "` of type " + ("`" + rt + "` supplied to `" + Ue + "`, expected a single ReactElement type.")); } return null; } return k(te); } function Z(te) { function he(be, Ue, Ee, He, Xe) { if (!(be[Ue] instanceof te)) { var rt = te.name || M, dt = re(be[Ue]); return new V("Invalid " + He + " `" + Xe + "` of type " + ("`" + dt + "` supplied to `" + Ee + "`, expected ") + ("instance of `" + rt + "`.")); } return null; } return k(he); } function q(te) { if (!Array.isArray(te)) return arguments.length > 1 ? m( "Invalid arguments supplied to oneOf, expected an array, got " + arguments.length + " arguments. A common mistake is to write oneOf(x, y, z) instead of oneOf([x, y, z])." ) : m("Invalid argument supplied to oneOf, expected an array."), _; function he(be, Ue, Ee, He, Xe) { for (var rt = be[Ue], dt = 0; dt < te.length; dt++) if (w(rt, te[dt])) return null; var bt = JSON.stringify(te, function(Ct, di) { var Kt = ye(di); return Kt === "symbol" ? String(di) : di; }); return new V("Invalid " + He + " `" + Xe + "` of value `" + String(rt) + "` " + ("supplied to `" + Ee + "`, expected one of " + bt + ".")); } return k(he); } function le(te) { function he(be, Ue, Ee, He, Xe) { if (typeof te != "function") return new V("Property `" + Xe + "` of component `" + Ee + "` has invalid PropType notation inside objectOf."); var rt = be[Ue], dt = Ie(rt); if (dt !== "object") return new V("Invalid " + He + " `" + Xe + "` of type " + ("`" + dt + "` supplied to `" + Ee + "`, expected an object.")); for (var bt in rt) if (f(rt, bt)) { var Mt = te(rt, bt, Ee, He, Xe + "." + bt, d); if (Mt instanceof Error) return Mt; } return null; } return k(he); } function ie(te) { if (!Array.isArray(te)) return m("Invalid argument supplied to oneOfType, expected an instance of array."), _; for (var he = 0; he < te.length; he++) { var be = te[he]; if (typeof be != "function") return m( "Invalid argument supplied to oneOfType. Expected an array of check functions, but received " + Se(be) + " at index " + he + "." ), _; } function Ue(Ee, He, Xe, rt, dt) { for (var bt = [], Mt = 0; Mt < te.length; Mt++) { var Ct = te[Mt], di = Ct(Ee, He, Xe, rt, dt, d); if (di == null) return null; di.data && f(di.data, "expectedType") && bt.push(di.data.expectedType); } var Kt = bt.length > 0 ? ", expected one of type [" + bt.join(", ") + "]" : ""; return new V("Invalid " + rt + " `" + dt + "` supplied to " + ("`" + Xe + "`" + Kt + ".")); } return k(Ue); } function $() { function te(he, be, Ue, Ee, He) { return pe(he[be]) ? null : new V("Invalid " + Ee + " `" + He + "` supplied to " + ("`" + Ue + "`, expected a ReactNode.")); } return k(te); } function j(te, he, be, Ue, Ee) { return new V( (te || "React class") + ": " + he + " type `" + be + "." + Ue + "` is invalid; it must be a function, usually from the `prop-types` package, but received `" + Ee + "`." ); } function J(te) { function he(be, Ue, Ee, He, Xe) { var rt = be[Ue], dt = Ie(rt); if (dt !== "object") return new V("Invalid " + He + " `" + Xe + "` of type `" + dt + "` " + ("supplied to `" + Ee + "`, expected `object`.")); for (var bt in te) { var Mt = te[bt]; if (typeof Mt != "function") return j(Ee, He, Xe, bt, ye(Mt)); var Ct = Mt(rt, bt, Ee, He, Xe + "." + bt, d); if (Ct) return Ct; } return null; } return k(he); } function ne(te) { function he(be, Ue, Ee, He, Xe) { var rt = be[Ue], dt = Ie(rt); if (dt !== "object") return new V("Invalid " + He + " `" + Xe + "` of type `" + dt + "` " + ("supplied to `" + Ee + "`, expected `object`.")); var bt = h({}, be[Ue], te); for (var Mt in bt) { var Ct = te[Mt]; if (f(te, Mt) && typeof Ct != "function") return j(Ee, He, Xe, Mt, ye(Ct)); if (!Ct) return new V( "Invalid " + He + " `" + Xe + "` key `" + Mt + "` supplied to `" + Ee + "`.\nBad object: " + JSON.stringify(be[Ue], null, " ") + ` Valid keys: ` + JSON.stringify(Object.keys(te), null, " ") ); var di = Ct(rt, Mt, Ee, He, Xe + "." + Mt, d); if (di) return di; } return null; } return k(he); } function pe(te) { switch (typeof te) { case "number": case "string": case "undefined": return !0; case "boolean": return !te; case "object": if (Array.isArray(te)) return te.every(pe); if (te === null || v(te)) return !0; var he = S(te); if (he) { var be = he.call(te), Ue; if (he !== te.entries) { for (; !(Ue = be.next()).done; ) if (!pe(Ue.value)) return !1; } else for (; !(Ue = be.next()).done; ) { var Ee = Ue.value; if (Ee && !pe(Ee[1])) return !1; } } else return !1; return !0; default: return !1; } } function ge(te, he) { return te === "symbol" ? !0 : he ? he["@@toStringTag"] === "Symbol" || typeof Symbol == "function" && he instanceof Symbol : !1; } function Ie(te) { var he = typeof te; return Array.isArray(te) ? "array" : te instanceof RegExp ? "object" : ge(he, te) ? "symbol" : he; } function ye(te) { if (typeof te > "u" || te === null) return "" + te; var he = Ie(te); if (he === "object") { if (te instanceof Date) return "date"; if (te instanceof RegExp) return "regexp"; } return he; } function Se(te) { var he = ye(te); switch (he) { case "array": case "object": return "an " + he; case "boolean": case "date": case "regexp": return "a " + he; default: return he; } } function re(te) { return !te.constructor || !te.constructor.name ? M : te.constructor.name; } return R.checkPropTypes = p, R.resetWarningCache = p.resetWarningCache, R.PropTypes = R, R; }; } ), /***/ "../../../../node_modules/prop-types/index.js": ( /*!****************************************************!*\ !*** ../../../../node_modules/prop-types/index.js ***! \****************************************************/ /***/ (a, l, o) => { { var u = o( /*! react-is */ "../../../../node_modules/prop-types/node_modules/react-is/index.js" ), h = !0; a.exports = o( /*! ./factoryWithTypeCheckers */ "../../../../node_modules/prop-types/factoryWithTypeCheckers.js" )(u.isElement, h); } } ), /***/ "../../../../node_modules/prop-types/lib/ReactPropTypesSecret.js": ( /*!***********************************************************************!*\ !*** ../../../../node_modules/prop-types/lib/ReactPropTypesSecret.js ***! \***********************************************************************/ /***/ (a) => { var l = "SECRET_DO_NOT_PASS_THIS_OR_YOU_WILL_BE_FIRED"; a.exports = l; } ), /***/ "../../../../node_modules/prop-types/lib/has.js": ( /*!******************************************************!*\ !*** ../../../../node_modules/prop-types/lib/has.js ***! \******************************************************/ /***/ (a) => { a.exports = Function.call.bind(Object.prototype.hasOwnProperty); } ), /***/ "../../../../node_modules/prop-types/node_modules/react-is/cjs/react-is.development.js": ( /*!*********************************************************************************************!*\ !*** ../../../../node_modules/prop-types/node_modules/react-is/cjs/react-is.development.js ***! \*********************************************************************************************/ /***/ (a, l) => { /** @license React v16.13.1 * react-is.development.js * * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ (function() { var o = typeof Symbol == "function" && Symbol.for, u = o ? Symbol.for("react.element") : 60103, h = o ? Symbol.for("react.portal") : 60106, d = o ? Symbol.for("react.fragment") : 60107, f = o ? Symbol.for("react.strict_mode") : 60108, p = o ? Symbol.for("react.profiler") : 60114, m = o ? Symbol.for("react.provider") : 60109, _ = o ? Symbol.for("react.context") : 60110, v = o ? Symbol.for("react.async_mode") : 60111, C = o ? Symbol.for("react.concurrent_mode") : 60111, x = o ? Symbol.for("react.forward_ref") : 60112, b = o ? Symbol.for("react.suspense") : 60113, S = o ? Symbol.for("react.suspense_list") : 60120, M = o ? Symbol.for("react.memo") : 60115, R = o ? Symbol.for("react.lazy") : 60116, w = o ? Symbol.for("react.block") : 60121, V = o ? Symbol.for("react.fundamental") : 60117, k = o ? Symbol.for("react.responder") : 60118, L = o ? Symbol.for("react.scope") : 60119; function B(Ct) { return typeof Ct == "string" || typeof Ct == "function" || // Note: its typeof might be other than 'symbol' or 'number' if it's a polyfill. Ct === d || Ct === C || Ct === p || Ct === f || Ct === b || Ct === S || typeof Ct == "object" && Ct !== null && (Ct.$$typeof === R || Ct.$$typeof === M || Ct.$$typeof === m || Ct.$$typeof === _ || Ct.$$typeof === x || Ct.$$typeof === V || Ct.$$typeof === k || Ct.$$typeof === L || Ct.$$typeof === w); } function U(Ct) { if (typeof Ct == "object" && Ct !== null) { var di = Ct.$$typeof; switch (di) { case u: var Kt = Ct.type; switch (Kt) { case v: case C: case d: case p: case f: case b: return Kt; default: var ei = Kt && Kt.$$typeof; switch (ei) { case _: case x: case R: case M: case m: return ei; default: return di; } } case h: return di; } } } var K = v, ee = C, Z = _, q = m, le = u, ie = x, $ = d, j = R, J = M, ne = h, pe = p, ge = f, Ie = b, ye = !1; function Se(Ct) { return ye || (ye = !0, console.warn("The ReactIs.isAsyncMode() alias has been deprecated, and will be removed in React 17+. Update your code to use ReactIs.isConcurrentMode() instead. It has the exact same API.")), re(Ct) || U(Ct) === v; } function re(Ct) { return U(Ct) === C; } function te(Ct) { return U(Ct) === _; } function he(Ct) { return U(Ct) === m; } function be(Ct) { return typeof Ct == "object" && Ct !== null && Ct.$$typeof === u; } function Ue(Ct) { return U(Ct) === x; } function Ee(Ct) { return U(Ct) === d; } function He(Ct) { return U(Ct) === R; } function Xe(Ct) { return U(Ct) === M; } function rt(Ct) { return U(Ct) === h; } function dt(Ct) { return U(Ct) === p; } function bt(Ct) { return U(Ct) === f; } function Mt(Ct) { return U(Ct) === b; } l.AsyncMode = K, l.ConcurrentMode = ee, l.ContextConsumer = Z, l.ContextProvider = q, l.Element = le, l.ForwardRef = ie, l.Fragment = $, l.Lazy = j, l.Memo = J, l.Portal = ne, l.Profiler = pe, l.StrictMode = ge, l.Suspense = Ie, l.isAsyncMode = Se, l.isConcurrentMode = re, l.isContextConsumer = te, l.isContextProvider = he, l.isElement = be, l.isForwardRef = Ue, l.isFragment = Ee, l.isLazy = He, l.isMemo = Xe, l.isPortal = rt, l.isProfiler = dt, l.isStrictMode = bt, l.isSuspense = Mt, l.isValidElementType = B, l.typeOf = U; })(); } ), /***/ "../../../../node_modules/prop-types/node_modules/react-is/index.js": ( /*!**************************************************************************!*\ !*** ../../../../node_modules/prop-types/node_modules/react-is/index.js ***! \**************************************************************************/ /***/ (a, l, o) => { a.exports = o( /*! ./cjs/react-is.development.js */ "../../../../node_modules/prop-types/node_modules/react-is/cjs/react-is.development.js" ); } ), /***/ "../../../../node_modules/react-dom/cjs/react-dom.development.js": ( /*!***********************************************************************!*\ !*** ../../../../node_modules/react-dom/cjs/react-dom.development.js ***! \***********************************************************************/ /***/ (a, l, o) => { /** @license React v17.0.2 * react-dom.development.js * * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ (function() { var u = o( /*! react */ "../../../../node_modules/react/index.js" ), h = o( /*! object-assign */ "../../../../node_modules/object-assign/index.js" ), d = o( /*! scheduler */ "../../../../node_modules/scheduler/index.js" ), f = o( /*! scheduler/tracing */ "../../../../node_modules/scheduler/tracing.js" ), p = u.__SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED; function m(A) { { for (var E = arguments.length, O = new Array(E > 1 ? E - 1 : 0), H = 1; H < E; H++) O[H - 1] = arguments[H]; v("warn", A, O); } } function _(A) { { for (var E = arguments.length, O = new Array(E > 1 ? E - 1 : 0), H = 1; H < E; H++) O[H - 1] = arguments[H]; v("error", A, O); } } function v(A, E, O) { { var H = p.ReactDebugCurrentFrame, X = H.getStackAddendum(); X !== "" && (E += "%s", O = O.concat([X])); var oe = O.map(function(_e) { return "" + _e; }); oe.unshift("Warning: " + E), Function.prototype.apply.call(console[A], console, oe); } } if (!u) throw Error("ReactDOM was loaded before React. Make sure you load the React package before loading ReactDOM."); var C = 0, x = 1, b = 2, S = 3, M = 4, R = 5, w = 6, V = 7, k = 8, L = 9, B = 10, U = 11, K = 12, ee = 13, Z = 14, q = 15, le = 16, ie = 17, $ = 18, j = 19, J = 20, ne = 21, pe = 22, ge = 23, Ie = 24, ye = !0, Se = !1, re = !1, te = !1, he = /* @__PURE__ */ new Set(), be = {}, Ue = {}; function Ee(A, E) { He(A, E), He(A + "Capture", E); } function He(A, E) { be[A] && _("EventRegistry: More than one plugin attempted to publish the same registration name, `%s`.", A), be[A] = E; { var O = A.toLowerCase(); Ue[O] = A, A === "onDoubleClick" && (Ue.ondblclick = A); } for (var H = 0; H < E.length; H++) he.add(E[H]); } var Xe = typeof window < "u" && typeof window.document < "u" && typeof window.document.createElement < "u", rt = 0, dt = 1, bt = 2, Mt = 3, Ct = 4, di = 5, Kt = 6, ei = ":A-Z_a-z\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD", bi = ei + "\\-.0-9\\u00B7\\u0300-\\u036F\\u203F-\\u2040", vr = "data-reactroot", yi = new RegExp("^[" + ei + "][" + bi + "]*$"), Vr = Object.prototype.hasOwnProperty, Rr = {}, ks = {}; function Qt(A) { return Vr.call(ks, A) ? !0 : Vr.call(Rr, A) ? !1 : yi.test(A) ? (ks[A] = !0, !0) : (Rr[A] = !0, _("Invalid attribute name: `%s`", A), !1); } function Ei(A, E, O) { return E !== null ? E.type === rt : O ? !1 : A.length > 2 && (A[0] === "o" || A[0] === "O") && (A[1] === "n" || A[1] === "N"); } function Pi(A, E, O, H) { if (O !== null && O.type === rt) return !1; switch (typeof E) { case "function": case "symbol": return !0; case "boolean": { if (H) return !1; if (O !== null) return !O.acceptsBooleans; var X = A.toLowerCase().slice(0, 5); return X !== "data-" && X !== "aria-"; } default: return !1; } } function rr(A, E, O, H) { if (E === null || typeof E > "u" || Pi(A, E, O, H)) return !0; if (H) return !1; if (O !== null) switch (O.type) { case Mt: return !E; case Ct: return E === !1; case di: return isNaN(E); case Kt: return isNaN(E) || E < 1; } return !1; } function sr(A) { return nr.hasOwnProperty(A) ? nr[A] : null; } function dr(A, E, O, H, X, oe, _e) { this.acceptsBooleans = E === bt || E === Mt || E === Ct, this.attributeName = H, this.attributeNamespace = X, this.mustUseProperty = O, this.propertyName = A, this.type = E, this.sanitizeURL = oe, this.removeEmptyString = _e; } var nr = {}, Pr = [ "children", "dangerouslySetInnerHTML", // TODO: This prevents the assignment of defaultValue to regular // elements (not just inputs). Now that ReactDOMInput assigns to the // defaultValue property -- do we need this? "defaultValue", "defaultChecked", "innerHTML", "suppressContentEditableWarning", "suppressHydrationWarning", "style" ]; Pr.forEach(function(A) { nr[A] = new dr( A, rt, !1, // mustUseProperty A, // attributeName null, // attributeNamespace !1, // sanitizeURL !1 ); }), [["acceptCharset", "accept-charset"], ["className", "class"], ["htmlFor", "for"], ["httpEquiv", "http-equiv"]].forEach(function(A) { var E = A[0], O = A[1]; nr[E] = new dr( E, dt, !1, // mustUseProperty O, // attributeName null, // attributeNamespace !1, // sanitizeURL !1 ); }), ["contentEditable", "draggable", "spellCheck", "value"].forEach(function(A) { nr[A] = new dr( A, bt, !1, // mustUseProperty A.toLowerCase(), // attributeName null, // attributeNamespace !1, // sanitizeURL !1 ); }), ["autoReverse", "externalResourcesRequired", "focusable", "preserveAlpha"].forEach(function(A) { nr[A] = new dr( A, bt, !1, // mustUseProperty A, // attributeName null, // attributeNamespace !1, // sanitizeURL !1 ); }), [ "allowFullScreen", "async", // Note: there is a special case that prevents it from being written to the DOM // on the client side because the browsers are inconsistent. Instead we call focus(). "autoFocus", "autoPlay", "controls", "default", "defer", "disabled", "disablePictureInPicture", "disableRemotePlayback", "formNoValidate", "hidden", "loop", "noModule", "noValidate", "open", "playsInline", "readOnly", "required", "reversed", "scoped", "seamless", // Microdata "itemScope" ].forEach(function(A) { nr[A] = new dr( A, Mt, !1, // mustUseProperty A.toLowerCase(), // attributeName null, // attributeNamespace !1, // sanitizeURL !1 ); }), [ "checked", // Note: `option.selected` is not updated if `select.multiple` is // disabled with `removeAttribute`. We have special logic for handling this. "multiple", "muted", "selected" // NOTE: if you add a camelCased prop to this list, // you'll need to set attributeName to name.toLowerCase() // instead in the assignment below. ].forEach(function(A) { nr[A] = new dr( A, Mt, !0, // mustUseProperty A, // attributeName null, // attributeNamespace !1, // sanitizeURL !1 ); }), [ "capture", "download" // NOTE: if you add a camelCased prop to this list, // you'll need to set attributeName to name.toLowerCase() // instead in the assignment below. ].forEach(function(A) { nr[A] = new dr( A, Ct, !1, // mustUseProperty A, // attributeName null, // attributeNamespace !1, // sanitizeURL !1 ); }), [ "cols", "rows", "size", "span" // NOTE: if you add a camelCased prop to this list, // you'll need to set attributeName to name.toLowerCase() // instead in the assignment below. ].forEach(function(A) { nr[A] = new dr( A, Kt, !1, // mustUseProperty A, // attributeName null, // attributeNamespace !1, // sanitizeURL !1 ); }), ["rowSpan", "start"].forEach(function(A) { nr[A] = new dr( A, di, !1, // mustUseProperty A.toLowerCase(), // attributeName null, // attributeNamespace !1, // sanitizeURL !1 ); }); var ti = /[\-\:]([a-z])/g, Oi = function(A) { return A[1].toUpperCase(); }; [ "accent-height", "alignment-baseline", "arabic-form", "baseline-shift", "cap-height", "clip-path", "clip-rule", "color-interpolation", "color-interpolation-filters", "color-profile", "color-rendering", "dominant-baseline", "enable-background", "fill-opacity", "fill-rule", "flood-color", "flood-opacity", "font-family", "font-size", "font-size-adjust", "font-stretch", "font-style", "font-variant", "font-weight", "glyph-name", "glyph-orientation-horizontal", "glyph-orientation-vertical", "horiz-adv-x", "horiz-origin-x", "image-rendering", "letter-spacing", "lighting-color", "marker-end", "marker-mid", "marker-start", "overline-position", "overline-thickness", "paint-order", "panose-1", "pointer-events", "rendering-intent", "shape-rendering", "stop-color", "stop-opacity", "strikethrough-position", "strikethrough-thickness", "stroke-dasharray", "stroke-dashoffset", "stroke-linecap", "stroke-linejoin", "stroke-miterlimit", "stroke-opacity", "stroke-width", "text-anchor", "text-decoration", "text-rendering", "underline-position", "underline-thickness", "unicode-bidi", "unicode-range", "units-per-em", "v-alphabetic", "v-hanging", "v-ideographic", "v-mathematical", "vector-effect", "vert-adv-y", "vert-origin-x", "vert-origin-y", "word-spacing", "writing-mode", "xmlns:xlink", "x-height" // NOTE: if you add a camelCased prop to this list, // you'll need to set attributeName to name.toLowerCase() // instead in the assignment below. ].forEach(function(A) { var E = A.replace(ti, Oi); nr[E] = new dr( E, dt, !1, // mustUseProperty A, null, // attributeNamespace !1, // sanitizeURL !1 ); }), [ "xlink:actuate", "xlink:arcrole", "xlink:role", "xlink:show", "xlink:title", "xlink:type" // NOTE: if you add a camelCased prop to this list, // you'll need to set attributeName to name.toLowerCase() // instead in the assignment below. ].forEach(function(A) { var E = A.replace(ti, Oi); nr[E] = new dr( E, dt, !1, // mustUseProperty A, "http://www.w3.org/1999/xlink", !1, // sanitizeURL !1 ); }), [ "xml:base", "xml:lang", "xml:space" // NOTE: if you add a camelCased prop to this list, // you'll need to set attributeName to name.toLowerCase() // instead in the assignment below. ].forEach(function(A) { var E = A.replace(ti, Oi); nr[E] = new dr( E, dt, !1, // mustUseProperty A, "http://www.w3.org/XML/1998/namespace", !1, // sanitizeURL !1 ); }), ["tabIndex", "crossOrigin"].forEach(function(A) { nr[A] = new dr( A, dt, !1, // mustUseProperty A.toLowerCase(), // attributeName null, // attributeNamespace !1, // sanitizeURL !1 ); }); var ri = "xlinkHref"; nr[ri] = new dr( "xlinkHref", dt, !1, // mustUseProperty "xlink:href", "http://www.w3.org/1999/xlink", !0, // sanitizeURL !1 ), ["src", "href", "action", "formAction"].forEach(function(A) { nr[A] = new dr( A, dt, !1, // mustUseProperty A.toLowerCase(), // attributeName null, // attributeNamespace !0, // sanitizeURL !0 ); }); var ki = /^[\u0000-\u001F ]*j[\r\n\t]*a[\r\n\t]*v[\r\n\t]*a[\r\n\t]*s[\r\n\t]*c[\r\n\t]*r[\r\n\t]*i[\r\n\t]*p[\r\n\t]*t[\r\n\t]*\:/i, wr = !1; function Lr(A) { !wr && ki.test(A) && (wr = !0, _("A future version of React will block javascript: URLs as a security precaution. Use event handlers instead if you can. If you need to generate unsafe HTML try using dangerouslySetInnerHTML instead. React was passed %s.", JSON.stringify(A))); } function Us(A, E, O, H) { if (H.mustUseProperty) { var X = H.propertyName; return A[X]; } else { H.sanitizeURL && Lr("" + O); var oe = H.attributeName, _e = null; if (H.type === Ct) { if (A.hasAttribute(oe)) { var xe = A.getAttribute(oe); return xe === "" ? !0 : rr(E, O, H, !1) ? xe : xe === "" + O ? O : xe; } } else if (A.hasAttribute(oe)) { if (rr(E, O, H, !1)) return A.getAttribute(oe); if (H.type === Mt) return O; _e = A.getAttribute(oe); } return rr(E, O, H, !1) ? _e === null ? O : _e : _e === "" + O ? O : _e; } } function nn(A, E, O) { { if (!Qt(E)) return; if (oy(O)) return O; if (!A.hasAttribute(E)) return O === void 0 ? void 0 : null; var H = A.getAttribute(E); return H === "" + O ? O : H; } } function Li(A, E, O, H) { var X = sr(E); if (!Ei(E, X, H)) { if (rr(E, O, X, H) && (O = null), H || X === null) { if (Qt(E)) { var oe = E; O === null ? A.removeAttribute(oe) : A.setAttribute(oe, "" + O); } return; } var _e = X.mustUseProperty; if (_e) { var xe = X.propertyName; if (O === null) { var Ne = X.type; A[xe] = Ne === Mt ? !1 : ""; } else A[xe] = O; return; } var tt = X.attributeName, lt = X.attributeNamespace; if (O === null) A.removeAttribute(tt); else { var Lt = X.type, _t; Lt === Mt || Lt === Ct && O === !0 ? _t = "" : (_t = "" + O, X.sanitizeURL && Lr(_t.toString())), lt ? A.setAttributeNS(lt, tt, _t) : A.setAttribute(tt, _t); } } } var Os = 60103, rn = 60106, Ts = 60107, Zs = 60108, as = 60114, ui = 60109, zi = 60110, _i = 60112, cn = 60113, st = 60120, qt = 60115, wi = 60116, Je = 60121, Xt = 60119, pi = 60128, Ni = 60129, ji = 60130, _s = 60131; if (typeof Symbol == "function" && Symbol.for) { var Qi = Symbol.for; Os = Qi("react.element"), rn = Qi("react.portal"), Ts = Qi("react.fragment"), Zs = Qi("react.strict_mode"), as = Qi("react.profiler"), ui = Qi("react.provider"), zi = Qi("react.context"), _i = Qi("react.forward_ref"), cn = Qi("react.suspense"), st = Qi("react.suspense_list"), qt = Qi("react.memo"), wi = Qi("react.lazy"), Je = Qi("react.block"), Qi("react.server.block"), Qi("react.fundamental"), Xt = Qi("react.scope"), pi = Qi("react.opaque.id"), Ni = Qi("react.debug_trace_mode"), ji = Qi("react.offscreen"), _s = Qi("react.legacy_hidden"); } var Nr = typeof Symbol == "function" && Symbol.iterator, Ks = "@@iterator"; function fs(A) { if (A === null || typeof A != "object") return null; var E = Nr && A[Nr] || A[Ks]; return typeof E == "function" ? E : null; } var vn = 0, ws, Sl, Ml, cs, Tc, Mh, Rh; function Ph() { } Ph.__reactDisabledLog = !0; function Ca() { { if (vn === 0) { ws = console.log, Sl = console.info, Ml = console.warn, cs = console.error, Tc = console.group, Mh = console.groupCollapsed, Rh = console.groupEnd; var A = { configurable: !0, enumerable: !0, value: Ph, writable: !0 }; Object.defineProperties(console, { info: A, log: A, warn: A, error: A, group: A, groupCollapsed: A, groupEnd: A }); } vn++; } } function Rl() { { if (vn--, vn === 0) { var A = { configurable: !0, enumerable: !0, writable: !0 }; Object.defineProperties(console, { log: h({}, A, { value: ws }), info: h({}, A, { value: Sl }), warn: h({}, A, { value: Ml }), error: h({}, A, { value: cs }), group: h({}, A, { value: Tc }), groupCollapsed: h({}, A, { value: Mh }), groupEnd: h({}, A, { value: Rh }) }); } vn < 0 && _("disabledDepth fell below zero. This is a bug in React. Please file an issue."); } } var vo = p.ReactCurrentDispatcher, Au; function qo(A, E, O) { { if (Au === void 0) try { throw Error(); } catch (X) { var H = X.stack.trim().match(/\n( *(at )?)/); Au = H && H[1] || ""; } return ` ` + Au + A; } } var yu = !1, Cu; { var Sc = typeof WeakMap == "function" ? WeakMap : Map; Cu = new Sc(); } function Mc(A, E) { if (!A || yu) return ""; { var O = Cu.get(A); if (O !== void 0) return O; } var H; yu = !0; var X = Error.prepareStackTrace; Error.prepareStackTrace = void 0; var oe; oe = vo.current, vo.current = null, Ca(); try { if (E) { var _e = function() { throw Error(); }; if (Object.defineProperty(_e.prototype, "props", { set: function() { throw Error(); } }), typeof Reflect == "object" && Reflect.construct) { try { Reflect.construct(_e, []); } catch (Mi) { H = Mi; } Reflect.construct(A, [], _e); } else { try { _e.call(); } catch (Mi) { H = Mi; } A.call(_e.prototype); } } else { try { throw Error(); } catch (Mi) { H = Mi; } A(); } } catch (Mi) { if (Mi && H && typeof Mi.stack == "string") { for (var xe = Mi.stack.split(` `), Ne = H.stack.split(` `), tt = xe.length - 1, lt = Ne.length - 1; tt >= 1 && lt >= 0 && xe[tt] !== Ne[lt]; ) lt--; for (; tt >= 1 && lt >= 0; tt--, lt--) if (xe[tt] !== Ne[lt]) { if (tt !== 1 || lt !== 1) do if (tt--, lt--, lt < 0 || xe[tt] !== Ne[lt]) { var Lt = ` ` + xe[tt].replace(" at new ", " at "); return typeof A == "function" && Cu.set(A, Lt), Lt; } while (tt >= 1 && lt >= 0); break; } } } finally { yu = !1, vo.current = oe, Rl(), Error.prepareStackTrace = X; } var _t = A ? A.displayName || A.name : "", Ht = _t ? qo(_t) : ""; return typeof A == "function" && Cu.set(A, Ht), Ht; } function Pl(A, E, O) { return Mc(A, !0); } function _l(A, E, O) { return Mc(A, !1); } function Ih(A) { var E = A.prototype; return !!(E && E.isReactComponent); } function xu(A, E, O) { if (A == null) return ""; if (typeof A == "function") return Mc(A, Ih(A)); if (typeof A == "string") return qo(A); switch (A) { case cn: return qo("Suspense"); case st: return qo("SuspenseList"); } if (typeof A == "object") switch (A.$$typeof) { case _i: return _l(A.render); case qt: return xu(A.type, E, O); case Je: return _l(A._render); case wi: { var H = A, X = H._payload, oe = H._init; try { return xu(oe(X), E, O); } catch { } } } return ""; } function ju(A) { switch (A._debugOwner && A._debugOwner.type, A._debugSource, A.tag) { case R: return qo(A.type); case le: return qo("Lazy"); case ee: return qo("Suspense"); case j: return qo("SuspenseList"); case C: case b: case q: return _l(A.type); case U: return _l(A.type.render); case pe: return _l(A.type._render); case x: return Pl(A.type); default: return ""; } } function Od(A) { try { var E = "", O = A; do E += ju(O), O = O.return; while (O); return E; } catch (H) { return ` Error generating stack: ` + H.message + ` ` + H.stack; } } function Ql(A, E, O) { var H = E.displayName || E.name || ""; return A.displayName || (H !== "" ? O + "(" + H + ")" : O); } function jf(A) { return A.displayName || "Context"; } function Zi(A) { if (A == null) return null; if (typeof A.tag == "number" && _("Received an unexpected object in getComponentName(). This is likely a bug in React. Please file an issue."), typeof A == "function") return A.displayName || A.name || null; if (typeof A == "string") return A; switch (A) { case Ts: return "Fragment"; case rn: return "Portal"; case as: return "Profiler"; case Zs: return "StrictMode"; case cn: return "Suspense"; case st: return "SuspenseList"; } if (typeof A == "object") switch (A.$$typeof) { case zi: var E = A; return jf(E) + ".Consumer"; case ui: var O = A; return jf(O._context) + ".Provider"; case _i: return Ql(A, A.render, "ForwardRef"); case qt: return Zi(A.type); case Je: return Zi(A._render); case wi: { var H = A, X = H._payload, oe = H._init; try { return Zi(oe(X)); } catch { return null; } } } return null; } var An = p.ReactDebugCurrentFrame, Ao = null, $l = !1; function Xu() { { if (Ao === null) return null; var A = Ao._debugOwner; if (A !== null && typeof A < "u") return Zi(A.type); } return null; } function Ws() { return Ao === null ? "" : Od(Ao); } function Va() { An.getCurrentStack = null, Ao = null, $l = !1; } function Pa(A) { An.getCurrentStack = Ws, Ao = A, $l = !1; } function kr(A) { $l = A; } function Dh() { return $l; } function qs(A) { return "" + A; } function Qa(A) { switch (typeof A) { case "boolean": case "number": case "object": case "string": case "undefined": return A; default: return ""; } } var Jo = { button: !0, checkbox: !0, image: !0, hidden: !0, radio: !0, reset: !0, submit: !0 }; function Kc(A, E) { Jo[E.type] || E.onChange || E.onInput || E.readOnly || E.disabled || E.value == null || _("You provided a `value` prop to a form field without an `onChange` handler. This will render a read-only field. If the field should be mutable use `defaultValue`. Otherwise, set either `onChange` or `readOnly`."), E.onChange || E.readOnly || E.disabled || E.checked == null || _("You provided a `checked` prop to a form field without an `onChange` handler. This will render a read-only field. If the field should be mutable use `defaultChecked`. Otherwise, set either `onChange` or `readOnly`."); } function Ia(A) { var E = A.type, O = A.nodeName; return O && O.toLowerCase() === "input" && (E === "checkbox" || E === "radio"); } function un(A) { return A._valueTracker; } function Oh(A) { A._valueTracker = null; } function Rc(A) { var E = ""; return A && (Ia(A) ? E = A.checked ? "true" : "false" : E = A.value), E; } function Oe(A) { var E = Ia(A) ? "checked" : "value", O = Object.getOwnPropertyDescriptor(A.constructor.prototype, E), H = "" + A[E]; if (!(A.hasOwnProperty(E) || typeof O > "u" || typeof O.get != "function" || typeof O.set != "function")) { var X = O.get, oe = O.set; Object.defineProperty(A, E, { configurable: !0, get: function() { return X.call(this); }, set: function(xe) { H = "" + xe, oe.call(this, xe); } }), Object.defineProperty(A, E, { enumerable: O.enumerable }); var _e = { getValue: function() { return H; }, setValue: function(xe) { H = "" + xe; }, stopTracking: function() { Oh(A), delete A[E]; } }; return _e; } } function pt(A) { un(A) || (A._valueTracker = Oe(A)); } function zt(A) { if (!A) return !1; var E = un(A); if (!E) return !0; var O = E.getValue(), H = Rc(A); return H !== O ? (E.setValue(H), !0) : !1; } function Jt(A) { if (A = A || (typeof document < "u" ? document : void 0), typeof A > "u") return null; try { return A.activeElement || A.body; } catch { return A.body; } } var qi = !1, As = !1, Wr = !1, Jr = !1; function fn(A) { var E = A.type === "checkbox" || A.type === "radio"; return E ? A.checked != null : A.value != null; } function Cs(A, E) { var O = A, H = E.checked, X = h({}, E, { defaultChecked: void 0, defaultValue: void 0, value: void 0, checked: H ?? O._wrapperState.initialChecked }); return X; } function $n(A, E) { Kc("input", E), E.checked !== void 0 && E.defaultChecked !== void 0 && !As && (_("%s contains an input of type %s with both checked and defaultChecked props. Input elements must be either controlled or uncontrolled (specify either the checked prop, or the defaultChecked prop, but not both). Decide between using a controlled or uncontrolled input element and remove one of these props. More info: https://reactjs.org/link/controlled-components", Xu() || "A component", E.type), As = !0), E.value !== void 0 && E.defaultValue !== void 0 && !qi && (_("%s contains an input of type %s with both value and defaultValue props. Input elements must be either controlled or uncontrolled (specify either the value prop, or the defaultValue prop, but not both). Decide between using a controlled or uncontrolled input element and remove one of these props. More info: https://reactjs.org/link/controlled-components", Xu() || "A component", E.type), qi = !0); var O = A, H = E.defaultValue == null ? "" : E.defaultValue; O._wrapperState = { initialChecked: E.checked != null ? E.checked : E.defaultChecked, initialValue: Qa(E.value != null ? E.value : H), controlled: fn(E) }; } function Un(A, E) { var O = A, H = E.checked; H != null && Li(O, "checked", H, !1); } function ka(A, E) { var O = A; { var H = fn(E); !O._wrapperState.controlled && H && !Jr && (_("A component is changing an uncontrolled input to be controlled. This is likely caused by the value changing from undefined to a defined value, which should not happen. Decide between using a controlled or uncontrolled input element for the lifetime of the component. More info: https://reactjs.org/link/controlled-components"), Jr = !0), O._wrapperState.controlled && !H && !Wr && (_("A component is changing a controlled input to be uncontrolled. This is likely caused by the value changing from a defined to undefined, which should not happen. Decide between using a controlled or uncontrolled input element for the lifetime of the component. More info: https://reactjs.org/link/controlled-components"), Wr = !0); } Un(A, E); var X = Qa(E.value), oe = E.type; if (X != null) oe === "number" ? (X === 0 && O.value === "" || // We explicitly want to coerce to number here if possible. // eslint-disable-next-line O.value != X) && (O.value = qs(X)) : O.value !== qs(X) && (O.value = qs(X)); else if (oe === "submit" || oe === "reset") { O.removeAttribute("value"); return; } E.hasOwnProperty("value") ? Il(O, E.type, X) : E.hasOwnProperty("defaultValue") && Il(O, E.type, Qa(E.defaultValue)), E.checked == null && E.defaultChecked != null && (O.defaultChecked = !!E.defaultChecked); } function lo(A, E, O) { var H = A; if (E.hasOwnProperty("value") || E.hasOwnProperty("defaultValue")) { var X = E.type, oe = X === "submit" || X === "reset"; if (oe && (E.value === void 0 || E.value === null)) return; var _e = qs(H._wrapperState.initialValue); O || _e !== H.value && (H.value = _e), H.defaultValue = _e; } var xe = H.name; xe !== "" && (H.name = ""), H.defaultChecked = !H.defaultChecked, H.defaultChecked = !!H._wrapperState.initialChecked, xe !== "" && (H.name = xe); } function Fo(A, E) { var O = A; ka(O, E), sd(O, E); } function sd(A, E) { var O = E.name; if (E.type === "radio" && O != null) { for (var H = A; H.parentNode; ) H = H.parentNode; for (var X = H.querySelectorAll("input[name=" + JSON.stringify("" + O) + '][type="radio"]'), oe = 0; oe < X.length; oe++) { var _e = X[oe]; if (!(_e === A || _e.form !== A.form)) { var xe = hy(_e); if (!xe) throw Error("ReactDOMInput: Mixing React and non-React radio inputs with the same `name` is not supported."); zt(_e), ka(_e, xe); } } } } function Il(A, E, O) { // Focused number inputs synchronize on blur. See ChangeEventPlugin.js (E !== "number" || Jt(A.ownerDocument) !== A) && (O == null ? A.defaultValue = qs(A._wrapperState.initialValue) : A.defaultValue !== qs(O) && (A.defaultValue = qs(O))); } var Me = !1, ut = !1; function kt(A) { var E = ""; return u.Children.forEach(A, function(O) { O != null && (E += O); }), E; } function Zt(A, E) { typeof E.children == "object" && E.children !== null && u.Children.forEach(E.children, function(O) { O != null && (typeof O == "string" || typeof O == "number" || typeof O.type == "string" && (ut || (ut = !0, _("Only strings and numbers are supported as