3D-Renderer-omeka-module/asset/vendor/google/model-viewer.js

56629 lines
1.5 MiB

/**
* @license
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
* This code may only be used under the BSD style license found at
* http://polymer.github.io/LICENSE.txt
* The complete set of authors may be found at
* http://polymer.github.io/AUTHORS.txt
* The complete set of contributors may be found at
* http://polymer.github.io/CONTRIBUTORS.txt
* Code distributed by Google as part of the polymer project is also
* subject to an additional IP rights grant found at
* http://polymer.github.io/PATENTS.txt
*/
/**
* An expression marker with embedded unique key to avoid collision with
* possible text in templates.
*/
`{{lit-${String(Math.random()).slice(2)}}}`;
/**
* @license
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
* This code may only be used under the BSD style license found at
* http://polymer.github.io/LICENSE.txt
* The complete set of authors may be found at
* http://polymer.github.io/AUTHORS.txt
* The complete set of contributors may be found at
* http://polymer.github.io/CONTRIBUTORS.txt
* Code distributed by Google as part of the polymer project is also
* subject to an additional IP rights grant found at
* http://polymer.github.io/PATENTS.txt
*/
/**
* Our TrustedTypePolicy for HTML which is declared using the html template
* tag function.
*
* That HTML is a developer-authored constant, and is parsed with innerHTML
* before any untrusted expressions have been mixed in. Therefor it is
* considered safe by construction.
*/
window.trustedTypes &&
trustedTypes.createPolicy('lit-html', { createHTML: (s) => s });
/**
* @license
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
* This code may only be used under the BSD style license found at
* http://polymer.github.io/LICENSE.txt
* The complete set of authors may be found at
* http://polymer.github.io/AUTHORS.txt
* The complete set of contributors may be found at
* http://polymer.github.io/CONTRIBUTORS.txt
* Code distributed by Google as part of the polymer project is also
* subject to an additional IP rights grant found at
* http://polymer.github.io/PATENTS.txt
*/
// Detect event listener options support. If the `capture` property is read
// from the options object, then options are supported. If not, then the third
// argument to add/removeEventListener is interpreted as the boolean capture
// value so we should only pass the `capture` property.
let eventOptionsSupported = false;
// Wrap into an IIFE because MS Edge <= v41 does not support having try/catch
// blocks right into the body of a module
(() => {
try {
const options = {
get capture() {
eventOptionsSupported = true;
return false;
}
};
// eslint-disable-next-line @typescript-eslint/no-explicit-any
window.addEventListener('test', options, options);
// eslint-disable-next-line @typescript-eslint/no-explicit-any
window.removeEventListener('test', options, options);
}
catch (_e) {
// event options not supported
}
})();
/**
* @license
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
* This code may only be used under the BSD style license found at
* http://polymer.github.io/LICENSE.txt
* The complete set of authors may be found at
* http://polymer.github.io/AUTHORS.txt
* The complete set of contributors may be found at
* http://polymer.github.io/CONTRIBUTORS.txt
* Code distributed by Google as part of the polymer project is also
* subject to an additional IP rights grant found at
* http://polymer.github.io/PATENTS.txt
*/
// IMPORTANT: do not change the property name or the assignment expression.
// This line will be used in regexes to search for lit-html usage.
// TODO(justinfagnani): inject version number at build time
if (typeof window !== 'undefined') {
(window['litHtmlVersions'] || (window['litHtmlVersions'] = [])).push('1.3.0');
}
/**
* @license
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
* This code may only be used under the BSD style license found at
* http://polymer.github.io/LICENSE.txt
* The complete set of authors may be found at
* http://polymer.github.io/AUTHORS.txt
* The complete set of contributors may be found at
* http://polymer.github.io/CONTRIBUTORS.txt
* Code distributed by Google as part of the polymer project is also
* subject to an additional IP rights grant found at
* http://polymer.github.io/PATENTS.txt
*/
if (typeof window.ShadyCSS === 'undefined') ;
else if (typeof window.ShadyCSS.prepareTemplateDom === 'undefined') {
console.warn(`Incompatible ShadyCSS version detected. ` +
`Please update to at least @webcomponents/webcomponentsjs@2.0.2 and ` +
`@webcomponents/shadycss@1.3.1.`);
}
/**
* @license
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
* This code may only be used under the BSD style license found at
* http://polymer.github.io/LICENSE.txt
* The complete set of authors may be found at
* http://polymer.github.io/AUTHORS.txt
* The complete set of contributors may be found at
* http://polymer.github.io/CONTRIBUTORS.txt
* Code distributed by Google as part of the polymer project is also
* subject to an additional IP rights grant found at
* http://polymer.github.io/PATENTS.txt
*/
var _a$d;
/**
* Use this module if you want to create your own base class extending
* [[UpdatingElement]].
* @packageDocumentation
*/
/*
* When using Closure Compiler, JSCompiler_renameProperty(property, object) is
* replaced at compile time by the munged name for object[property]. We cannot
* alias this function, so we have to use a small shim that has the same
* behavior when not compiling.
*/
window.JSCompiler_renameProperty =
(prop, _obj) => prop;
const defaultConverter = {
toAttribute(value, type) {
switch (type) {
case Boolean:
return value ? '' : null;
case Object:
case Array:
// if the value is `null` or `undefined` pass this through
// to allow removing/no change behavior.
return value == null ? value : JSON.stringify(value);
}
return value;
},
fromAttribute(value, type) {
switch (type) {
case Boolean:
return value !== null;
case Number:
return value === null ? null : Number(value);
case Object:
case Array:
return JSON.parse(value);
}
return value;
}
};
/**
* Change function that returns true if `value` is different from `oldValue`.
* This method is used as the default for a property's `hasChanged` function.
*/
const notEqual = (value, old) => {
// This ensures (old==NaN, value==NaN) always returns false
return old !== value && (old === old || value === value);
};
const defaultPropertyDeclaration = {
attribute: true,
type: String,
converter: defaultConverter,
reflect: false,
hasChanged: notEqual
};
const STATE_HAS_UPDATED = 1;
const STATE_UPDATE_REQUESTED = 1 << 2;
const STATE_IS_REFLECTING_TO_ATTRIBUTE = 1 << 3;
const STATE_IS_REFLECTING_TO_PROPERTY = 1 << 4;
/**
* The Closure JS Compiler doesn't currently have good support for static
* property semantics where "this" is dynamic (e.g.
* https://github.com/google/closure-compiler/issues/3177 and others) so we use
* this hack to bypass any rewriting by the compiler.
*/
const finalized = 'finalized';
/**
* Base element class which manages element properties and attributes. When
* properties change, the `update` method is asynchronously called. This method
* should be supplied by subclassers to render updates as desired.
* @noInheritDoc
*/
class UpdatingElement extends HTMLElement {
constructor() {
super();
this.initialize();
}
/**
* Returns a list of attributes corresponding to the registered properties.
* @nocollapse
*/
static get observedAttributes() {
// note: piggy backing on this to ensure we're finalized.
this.finalize();
const attributes = [];
// Use forEach so this works even if for/of loops are compiled to for loops
// expecting arrays
this._classProperties.forEach((v, p) => {
const attr = this._attributeNameForProperty(p, v);
if (attr !== undefined) {
this._attributeToPropertyMap.set(attr, p);
attributes.push(attr);
}
});
return attributes;
}
/**
* Ensures the private `_classProperties` property metadata is created.
* In addition to `finalize` this is also called in `createProperty` to
* ensure the `@property` decorator can add property metadata.
*/
/** @nocollapse */
static _ensureClassProperties() {
// ensure private storage for property declarations.
if (!this.hasOwnProperty(JSCompiler_renameProperty('_classProperties', this))) {
this._classProperties = new Map();
// NOTE: Workaround IE11 not supporting Map constructor argument.
const superProperties = Object.getPrototypeOf(this)._classProperties;
if (superProperties !== undefined) {
superProperties.forEach((v, k) => this._classProperties.set(k, v));
}
}
}
/**
* Creates a property accessor on the element prototype if one does not exist
* and stores a PropertyDeclaration for the property with the given options.
* The property setter calls the property's `hasChanged` property option
* or uses a strict identity check to determine whether or not to request
* an update.
*
* This method may be overridden to customize properties; however,
* when doing so, it's important to call `super.createProperty` to ensure
* the property is setup correctly. This method calls
* `getPropertyDescriptor` internally to get a descriptor to install.
* To customize what properties do when they are get or set, override
* `getPropertyDescriptor`. To customize the options for a property,
* implement `createProperty` like this:
*
* static createProperty(name, options) {
* options = Object.assign(options, {myOption: true});
* super.createProperty(name, options);
* }
*
* @nocollapse
*/
static createProperty(name, options = defaultPropertyDeclaration) {
// Note, since this can be called by the `@property` decorator which
// is called before `finalize`, we ensure storage exists for property
// metadata.
this._ensureClassProperties();
this._classProperties.set(name, options);
// Do not generate an accessor if the prototype already has one, since
// it would be lost otherwise and that would never be the user's intention;
// Instead, we expect users to call `requestUpdate` themselves from
// user-defined accessors. Note that if the super has an accessor we will
// still overwrite it
if (options.noAccessor || this.prototype.hasOwnProperty(name)) {
return;
}
const key = typeof name === 'symbol' ? Symbol() : `__${name}`;
const descriptor = this.getPropertyDescriptor(name, key, options);
if (descriptor !== undefined) {
Object.defineProperty(this.prototype, name, descriptor);
}
}
/**
* Returns a property descriptor to be defined on the given named property.
* If no descriptor is returned, the property will not become an accessor.
* For example,
*
* class MyElement extends LitElement {
* static getPropertyDescriptor(name, key, options) {
* const defaultDescriptor =
* super.getPropertyDescriptor(name, key, options);
* const setter = defaultDescriptor.set;
* return {
* get: defaultDescriptor.get,
* set(value) {
* setter.call(this, value);
* // custom action.
* },
* configurable: true,
* enumerable: true
* }
* }
* }
*
* @nocollapse
*/
static getPropertyDescriptor(name, key, options) {
return {
// tslint:disable-next-line:no-any no symbol in index
get() {
return this[key];
},
set(value) {
const oldValue = this[name];
this[key] = value;
this
.requestUpdateInternal(name, oldValue, options);
},
configurable: true,
enumerable: true
};
}
/**
* Returns the property options associated with the given property.
* These options are defined with a PropertyDeclaration via the `properties`
* object or the `@property` decorator and are registered in
* `createProperty(...)`.
*
* Note, this method should be considered "final" and not overridden. To
* customize the options for a given property, override `createProperty`.
*
* @nocollapse
* @final
*/
static getPropertyOptions(name) {
return this._classProperties && this._classProperties.get(name) ||
defaultPropertyDeclaration;
}
/**
* Creates property accessors for registered properties and ensures
* any superclasses are also finalized.
* @nocollapse
*/
static finalize() {
// finalize any superclasses
const superCtor = Object.getPrototypeOf(this);
if (!superCtor.hasOwnProperty(finalized)) {
superCtor.finalize();
}
this[finalized] = true;
this._ensureClassProperties();
// initialize Map populated in observedAttributes
this._attributeToPropertyMap = new Map();
// make any properties
// Note, only process "own" properties since this element will inherit
// any properties defined on the superClass, and finalization ensures
// the entire prototype chain is finalized.
if (this.hasOwnProperty(JSCompiler_renameProperty('properties', this))) {
const props = this.properties;
// support symbols in properties (IE11 does not support this)
const propKeys = [
...Object.getOwnPropertyNames(props),
...(typeof Object.getOwnPropertySymbols === 'function') ?
Object.getOwnPropertySymbols(props) :
[]
];
// This for/of is ok because propKeys is an array
for (const p of propKeys) {
// note, use of `any` is due to TypeSript lack of support for symbol in
// index types
// tslint:disable-next-line:no-any no symbol in index
this.createProperty(p, props[p]);
}
}
}
/**
* Returns the property name for the given attribute `name`.
* @nocollapse
*/
static _attributeNameForProperty(name, options) {
const attribute = options.attribute;
return attribute === false ?
undefined :
(typeof attribute === 'string' ?
attribute :
(typeof name === 'string' ? name.toLowerCase() : undefined));
}
/**
* Returns true if a property should request an update.
* Called when a property value is set and uses the `hasChanged`
* option for the property if present or a strict identity check.
* @nocollapse
*/
static _valueHasChanged(value, old, hasChanged = notEqual) {
return hasChanged(value, old);
}
/**
* Returns the property value for the given attribute value.
* Called via the `attributeChangedCallback` and uses the property's
* `converter` or `converter.fromAttribute` property option.
* @nocollapse
*/
static _propertyValueFromAttribute(value, options) {
const type = options.type;
const converter = options.converter || defaultConverter;
const fromAttribute = (typeof converter === 'function' ? converter : converter.fromAttribute);
return fromAttribute ? fromAttribute(value, type) : value;
}
/**
* Returns the attribute value for the given property value. If this
* returns undefined, the property will *not* be reflected to an attribute.
* If this returns null, the attribute will be removed, otherwise the
* attribute will be set to the value.
* This uses the property's `reflect` and `type.toAttribute` property options.
* @nocollapse
*/
static _propertyValueToAttribute(value, options) {
if (options.reflect === undefined) {
return;
}
const type = options.type;
const converter = options.converter;
const toAttribute = converter && converter.toAttribute ||
defaultConverter.toAttribute;
return toAttribute(value, type);
}
/**
* Performs element initialization. By default captures any pre-set values for
* registered properties.
*/
initialize() {
this._updateState = 0;
this._updatePromise =
new Promise((res) => this._enableUpdatingResolver = res);
this._changedProperties = new Map();
this._saveInstanceProperties();
// ensures first update will be caught by an early access of
// `updateComplete`
this.requestUpdateInternal();
}
/**
* Fixes any properties set on the instance before upgrade time.
* Otherwise these would shadow the accessor and break these properties.
* The properties are stored in a Map which is played back after the
* constructor runs. Note, on very old versions of Safari (<=9) or Chrome
* (<=41), properties created for native platform properties like (`id` or
* `name`) may not have default values set in the element constructor. On
* these browsers native properties appear on instances and therefore their
* default value will overwrite any element default (e.g. if the element sets
* this.id = 'id' in the constructor, the 'id' will become '' since this is
* the native platform default).
*/
_saveInstanceProperties() {
// Use forEach so this works even if for/of loops are compiled to for loops
// expecting arrays
this.constructor
._classProperties.forEach((_v, p) => {
if (this.hasOwnProperty(p)) {
const value = this[p];
delete this[p];
if (!this._instanceProperties) {
this._instanceProperties = new Map();
}
this._instanceProperties.set(p, value);
}
});
}
/**
* Applies previously saved instance properties.
*/
_applyInstanceProperties() {
// Use forEach so this works even if for/of loops are compiled to for loops
// expecting arrays
// tslint:disable-next-line:no-any
this._instanceProperties.forEach((v, p) => this[p] = v);
this._instanceProperties = undefined;
}
connectedCallback() {
// Ensure first connection completes an update. Updates cannot complete
// before connection.
this.enableUpdating();
}
enableUpdating() {
if (this._enableUpdatingResolver !== undefined) {
this._enableUpdatingResolver();
this._enableUpdatingResolver = undefined;
}
}
/**
* Allows for `super.disconnectedCallback()` in extensions while
* reserving the possibility of making non-breaking feature additions
* when disconnecting at some point in the future.
*/
disconnectedCallback() {
}
/**
* Synchronizes property values when attributes change.
*/
attributeChangedCallback(name, old, value) {
if (old !== value) {
this._attributeToProperty(name, value);
}
}
_propertyToAttribute(name, value, options = defaultPropertyDeclaration) {
const ctor = this.constructor;
const attr = ctor._attributeNameForProperty(name, options);
if (attr !== undefined) {
const attrValue = ctor._propertyValueToAttribute(value, options);
// an undefined value does not change the attribute.
if (attrValue === undefined) {
return;
}
// Track if the property is being reflected to avoid
// setting the property again via `attributeChangedCallback`. Note:
// 1. this takes advantage of the fact that the callback is synchronous.
// 2. will behave incorrectly if multiple attributes are in the reaction
// stack at time of calling. However, since we process attributes
// in `update` this should not be possible (or an extreme corner case
// that we'd like to discover).
// mark state reflecting
this._updateState = this._updateState | STATE_IS_REFLECTING_TO_ATTRIBUTE;
if (attrValue == null) {
this.removeAttribute(attr);
}
else {
this.setAttribute(attr, attrValue);
}
// mark state not reflecting
this._updateState = this._updateState & ~STATE_IS_REFLECTING_TO_ATTRIBUTE;
}
}
_attributeToProperty(name, value) {
// Use tracking info to avoid deserializing attribute value if it was
// just set from a property setter.
if (this._updateState & STATE_IS_REFLECTING_TO_ATTRIBUTE) {
return;
}
const ctor = this.constructor;
// Note, hint this as an `AttributeMap` so closure clearly understands
// the type; it has issues with tracking types through statics
// tslint:disable-next-line:no-unnecessary-type-assertion
const propName = ctor._attributeToPropertyMap.get(name);
if (propName !== undefined) {
const options = ctor.getPropertyOptions(propName);
// mark state reflecting
this._updateState = this._updateState | STATE_IS_REFLECTING_TO_PROPERTY;
this[propName] =
// tslint:disable-next-line:no-any
ctor._propertyValueFromAttribute(value, options);
// mark state not reflecting
this._updateState = this._updateState & ~STATE_IS_REFLECTING_TO_PROPERTY;
}
}
/**
* This protected version of `requestUpdate` does not access or return the
* `updateComplete` promise. This promise can be overridden and is therefore
* not free to access.
*/
requestUpdateInternal(name, oldValue, options) {
let shouldRequestUpdate = true;
// If we have a property key, perform property update steps.
if (name !== undefined) {
const ctor = this.constructor;
options = options || ctor.getPropertyOptions(name);
if (ctor._valueHasChanged(this[name], oldValue, options.hasChanged)) {
if (!this._changedProperties.has(name)) {
this._changedProperties.set(name, oldValue);
}
// Add to reflecting properties set.
// Note, it's important that every change has a chance to add the
// property to `_reflectingProperties`. This ensures setting
// attribute + property reflects correctly.
if (options.reflect === true &&
!(this._updateState & STATE_IS_REFLECTING_TO_PROPERTY)) {
if (this._reflectingProperties === undefined) {
this._reflectingProperties = new Map();
}
this._reflectingProperties.set(name, options);
}
}
else {
// Abort the request if the property should not be considered changed.
shouldRequestUpdate = false;
}
}
if (!this._hasRequestedUpdate && shouldRequestUpdate) {
this._updatePromise = this._enqueueUpdate();
}
}
/**
* Requests an update which is processed asynchronously. This should
* be called when an element should update based on some state not triggered
* by setting a property. In this case, pass no arguments. It should also be
* called when manually implementing a property setter. In this case, pass the
* property `name` and `oldValue` to ensure that any configured property
* options are honored. Returns the `updateComplete` Promise which is resolved
* when the update completes.
*
* @param name {PropertyKey} (optional) name of requesting property
* @param oldValue {any} (optional) old value of requesting property
* @returns {Promise} A Promise that is resolved when the update completes.
*/
requestUpdate(name, oldValue) {
this.requestUpdateInternal(name, oldValue);
return this.updateComplete;
}
/**
* Sets up the element to asynchronously update.
*/
async _enqueueUpdate() {
this._updateState = this._updateState | STATE_UPDATE_REQUESTED;
try {
// Ensure any previous update has resolved before updating.
// This `await` also ensures that property changes are batched.
await this._updatePromise;
}
catch (e) {
// Ignore any previous errors. We only care that the previous cycle is
// done. Any error should have been handled in the previous update.
}
const result = this.performUpdate();
// If `performUpdate` returns a Promise, we await it. This is done to
// enable coordinating updates with a scheduler. Note, the result is
// checked to avoid delaying an additional microtask unless we need to.
if (result != null) {
await result;
}
return !this._hasRequestedUpdate;
}
get _hasRequestedUpdate() {
return (this._updateState & STATE_UPDATE_REQUESTED);
}
get hasUpdated() {
return (this._updateState & STATE_HAS_UPDATED);
}
/**
* Performs an element update. Note, if an exception is thrown during the
* update, `firstUpdated` and `updated` will not be called.
*
* You can override this method to change the timing of updates. If this
* method is overridden, `super.performUpdate()` must be called.
*
* For instance, to schedule updates to occur just before the next frame:
*
* ```
* protected async performUpdate(): Promise<unknown> {
* await new Promise((resolve) => requestAnimationFrame(() => resolve()));
* super.performUpdate();
* }
* ```
*/
performUpdate() {
// Abort any update if one is not pending when this is called.
// This can happen if `performUpdate` is called early to "flush"
// the update.
if (!this._hasRequestedUpdate) {
return;
}
// Mixin instance properties once, if they exist.
if (this._instanceProperties) {
this._applyInstanceProperties();
}
let shouldUpdate = false;
const changedProperties = this._changedProperties;
try {
shouldUpdate = this.shouldUpdate(changedProperties);
if (shouldUpdate) {
this.update(changedProperties);
}
else {
this._markUpdated();
}
}
catch (e) {
// Prevent `firstUpdated` and `updated` from running when there's an
// update exception.
shouldUpdate = false;
// Ensure element can accept additional updates after an exception.
this._markUpdated();
throw e;
}
if (shouldUpdate) {
if (!(this._updateState & STATE_HAS_UPDATED)) {
this._updateState = this._updateState | STATE_HAS_UPDATED;
this.firstUpdated(changedProperties);
}
this.updated(changedProperties);
}
}
_markUpdated() {
this._changedProperties = new Map();
this._updateState = this._updateState & ~STATE_UPDATE_REQUESTED;
}
/**
* Returns a Promise that resolves when the element has completed updating.
* The Promise value is a boolean that is `true` if the element completed the
* update without triggering another update. The Promise result is `false` if
* a property was set inside `updated()`. If the Promise is rejected, an
* exception was thrown during the update.
*
* To await additional asynchronous work, override the `_getUpdateComplete`
* method. For example, it is sometimes useful to await a rendered element
* before fulfilling this Promise. To do this, first await
* `super._getUpdateComplete()`, then any subsequent state.
*
* @returns {Promise} The Promise returns a boolean that indicates if the
* update resolved without triggering another update.
*/
get updateComplete() {
return this._getUpdateComplete();
}
/**
* Override point for the `updateComplete` promise.
*
* It is not safe to override the `updateComplete` getter directly due to a
* limitation in TypeScript which means it is not possible to call a
* superclass getter (e.g. `super.updateComplete.then(...)`) when the target
* language is ES5 (https://github.com/microsoft/TypeScript/issues/338).
* This method should be overridden instead. For example:
*
* class MyElement extends LitElement {
* async _getUpdateComplete() {
* await super._getUpdateComplete();
* await this._myChild.updateComplete;
* }
* }
*/
_getUpdateComplete() {
return this._updatePromise;
}
/**
* Controls whether or not `update` should be called when the element requests
* an update. By default, this method always returns `true`, but this can be
* customized to control when to update.
*
* @param _changedProperties Map of changed properties with old values
*/
shouldUpdate(_changedProperties) {
return true;
}
/**
* Updates the element. This method reflects property values to attributes.
* It can be overridden to render and keep updated element DOM.
* Setting properties inside this method will *not* trigger
* another update.
*
* @param _changedProperties Map of changed properties with old values
*/
update(_changedProperties) {
if (this._reflectingProperties !== undefined &&
this._reflectingProperties.size > 0) {
// Use forEach so this works even if for/of loops are compiled to for
// loops expecting arrays
this._reflectingProperties.forEach((v, k) => this._propertyToAttribute(k, this[k], v));
this._reflectingProperties = undefined;
}
this._markUpdated();
}
/**
* Invoked whenever the element is updated. Implement to perform
* post-updating tasks via DOM APIs, for example, focusing an element.
*
* Setting properties inside this method will trigger the element to update
* again after this update cycle completes.
*
* @param _changedProperties Map of changed properties with old values
*/
updated(_changedProperties) {
}
/**
* Invoked when the element is first updated. Implement to perform one time
* work on the element after update.
*
* Setting properties inside this method will trigger the element to update
* again after this update cycle completes.
*
* @param _changedProperties Map of changed properties with old values
*/
firstUpdated(_changedProperties) {
}
}
_a$d = finalized;
/**
* Marks class as having finished creating properties.
*/
UpdatingElement[_a$d] = true;
/**
* @license
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
* This code may only be used under the BSD style license found at
* http://polymer.github.io/LICENSE.txt
* The complete set of authors may be found at
* http://polymer.github.io/AUTHORS.txt
* The complete set of contributors may be found at
* http://polymer.github.io/CONTRIBUTORS.txt
* Code distributed by Google as part of the polymer project is also
* subject to an additional IP rights grant found at
* http://polymer.github.io/PATENTS.txt
*/
const standardProperty = (options, element) => {
// When decorating an accessor, pass it through and add property metadata.
// Note, the `hasOwnProperty` check in `createProperty` ensures we don't
// stomp over the user's accessor.
if (element.kind === 'method' && element.descriptor &&
!('value' in element.descriptor)) {
return Object.assign(Object.assign({}, element), { finisher(clazz) {
clazz.createProperty(element.key, options);
} });
}
else {
// createProperty() takes care of defining the property, but we still
// must return some kind of descriptor, so return a descriptor for an
// unused prototype field. The finisher calls createProperty().
return {
kind: 'field',
key: Symbol(),
placement: 'own',
descriptor: {},
// When @babel/plugin-proposal-decorators implements initializers,
// do this instead of the initializer below. See:
// https://github.com/babel/babel/issues/9260 extras: [
// {
// kind: 'initializer',
// placement: 'own',
// initializer: descriptor.initializer,
// }
// ],
initializer() {
if (typeof element.initializer === 'function') {
this[element.key] = element.initializer.call(this);
}
},
finisher(clazz) {
clazz.createProperty(element.key, options);
}
};
}
};
const legacyProperty = (options, proto, name) => {
proto.constructor
.createProperty(name, options);
};
/**
* A property decorator which creates a LitElement property which reflects a
* corresponding attribute value. A [[`PropertyDeclaration`]] may optionally be
* supplied to configure property features.
*
* This decorator should only be used for public fields. Private or protected
* fields should use the [[`internalProperty`]] decorator.
*
* @example
* ```ts
* class MyElement {
* @property({ type: Boolean })
* clicked = false;
* }
* ```
* @category Decorator
* @ExportDecoratedItems
*/
function property(options) {
// tslint:disable-next-line:no-any decorator
return (protoOrDescriptor, name) => (name !== undefined) ?
legacyProperty(options, protoOrDescriptor, name) :
standardProperty(options, protoOrDescriptor);
}
/**
@license
Copyright (c) 2019 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
* Whether the current browser supports `adoptedStyleSheets`.
*/
(window.ShadowRoot) &&
(window.ShadyCSS === undefined || window.ShadyCSS.nativeShadow) &&
('adoptedStyleSheets' in Document.prototype) &&
('replace' in CSSStyleSheet.prototype);
/**
* @license
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
* This code may only be used under the BSD style license found at
* http://polymer.github.io/LICENSE.txt
* The complete set of authors may be found at
* http://polymer.github.io/AUTHORS.txt
* The complete set of contributors may be found at
* http://polymer.github.io/CONTRIBUTORS.txt
* Code distributed by Google as part of the polymer project is also
* subject to an additional IP rights grant found at
* http://polymer.github.io/PATENTS.txt
*/
// IMPORTANT: do not change the property name or the assignment expression.
// This line will be used in regexes to search for LitElement usage.
// TODO(justinfagnani): inject version number at build time
(window['litElementVersions'] || (window['litElementVersions'] = []))
.push('2.4.0');
/* @license
* Copyright 2019 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the 'License');
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an 'AS IS' BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// NOTE(cdata): The HAS_WEBXR_* constants can be enabled in Chrome by turning on
// the appropriate flags. However, just because we have the API does not
// guarantee that AR will work.
const HAS_WEBXR_DEVICE_API = navigator.xr != null &&
self.XRSession != null && navigator.xr.isSessionSupported != null;
const HAS_WEBXR_HIT_TEST_API = HAS_WEBXR_DEVICE_API && self.XRSession.prototype.requestHitTestSource;
const HAS_RESIZE_OBSERVER = self.ResizeObserver != null;
const HAS_INTERSECTION_OBSERVER = self.IntersectionObserver != null;
const IS_WEBXR_AR_CANDIDATE = HAS_WEBXR_HIT_TEST_API;
(() => {
const userAgent = navigator.userAgent || navigator.vendor || self.opera;
let check = false;
// eslint-disable-next-line
if (/(android|bb\d+|meego).+mobile|avantgo|bada\/|blackberry|blazer|compal|elaine|fennec|hiptop|iemobile|ip(hone|od)|iris|kindle|lge |maemo|midp|mmp|mobile.+firefox|netfront|opera m(ob|in)i|palm( os)?|phone|p(ixi|re)\/|plucker|pocket|psp|series(4|6)0|symbian|treo|up\.(browser|link)|vodafone|wap|windows ce|xda|xiino/i
.test(userAgent) ||
/1207|6310|6590|3gso|4thp|50[1-6]i|770s|802s|a wa|abac|ac(er|oo|s\-)|ai(ko|rn)|al(av|ca|co)|amoi|an(ex|ny|yw)|aptu|ar(ch|go)|as(te|us)|attw|au(di|\-m|r |s )|avan|be(ck|ll|nq)|bi(lb|rd)|bl(ac|az)|br(e|v)w|bumb|bw\-(n|u)|c55\/|capi|ccwa|cdm\-|cell|chtm|cldc|cmd\-|co(mp|nd)|craw|da(it|ll|ng)|dbte|dc\-s|devi|dica|dmob|do(c|p)o|ds(12|\-d)|el(49|ai)|em(l2|ul)|er(ic|k0)|esl8|ez([4-7]0|os|wa|ze)|fetc|fly(\-|_)|g1 u|g560|gene|gf\-5|g\-mo|go(\.w|od)|gr(ad|un)|haie|hcit|hd\-(m|p|t)|hei\-|hi(pt|ta)|hp( i|ip)|hs\-c|ht(c(\-| |_|a|g|p|s|t)|tp)|hu(aw|tc)|i\-(20|go|ma)|i230|iac( |\-|\/)|ibro|idea|ig01|ikom|im1k|inno|ipaq|iris|ja(t|v)a|jbro|jemu|jigs|kddi|keji|kgt( |\/)|klon|kpt |kwc\-|kyo(c|k)|le(no|xi)|lg( g|\/(k|l|u)|50|54|\-[a-w])|libw|lynx|m1\-w|m3ga|m50\/|ma(te|ui|xo)|mc(01|21|ca)|m\-cr|me(rc|ri)|mi(o8|oa|ts)|mmef|mo(01|02|bi|de|do|t(\-| |o|v)|zz)|mt(50|p1|v )|mwbp|mywa|n10[0-2]|n20[2-3]|n30(0|2)|n50(0|2|5)|n7(0(0|1)|10)|ne((c|m)\-|on|tf|wf|wg|wt)|nok(6|i)|nzph|o2im|op(ti|wv)|oran|owg1|p800|pan(a|d|t)|pdxg|pg(13|\-([1-8]|c))|phil|pire|pl(ay|uc)|pn\-2|po(ck|rt|se)|prox|psio|pt\-g|qa\-a|qc(07|12|21|32|60|\-[2-7]|i\-)|qtek|r380|r600|raks|rim9|ro(ve|zo)|s55\/|sa(ge|ma|mm|ms|ny|va)|sc(01|h\-|oo|p\-)|sdk\/|se(c(\-|0|1)|47|mc|nd|ri)|sgh\-|shar|sie(\-|m)|sk\-0|sl(45|id)|sm(al|ar|b3|it|t5)|so(ft|ny)|sp(01|h\-|v\-|v )|sy(01|mb)|t2(18|50)|t6(00|10|18)|ta(gt|lk)|tcl\-|tdg\-|tel(i|m)|tim\-|t\-mo|to(pl|sh)|ts(70|m\-|m3|m5)|tx\-9|up(\.b|g1|si)|utst|v400|v750|veri|vi(rg|te)|vk(40|5[0-3]|\-v)|vm40|voda|vulc|vx(52|53|60|61|70|80|81|83|85|98)|w3c(\-| )|webc|whit|wi(g |nc|nw)|wmlb|wonu|x700|yas\-|your|zeto|zte\-/i
.test(userAgent.substr(0, 4))) {
check = true;
}
return check;
})();
/\bCrOS\b/.test(navigator.userAgent);
// Disabling offscreen canvas for now because it is slower and has bugs relating
// to janky updates and out of sync frames.
const USE_OFFSCREEN_CANVAS = false;
// Boolean((self as any).OffscreenCanvas) &&
// Boolean((self as any).OffscreenCanvas.prototype.transferToImageBitmap) &&
// !IS_CHROMEOS; // TODO(elalish): file a bug on inverted renders
const IS_ANDROID = /android/i.test(navigator.userAgent);
// Prior to iOS 13, detecting iOS Safari was relatively straight-forward.
// As of iOS 13, Safari on iPad (in its default configuration) reports the same
// user-agent string as Safari on desktop MacOS. Strictly speaking, we only care
// about iOS for the purposes if selecting for cases where Quick Look is known
// to be supported. However, for API correctness purposes, we must rely on
// known, detectable signals to distinguish iOS Safari from MacOS Safari. At the
// time of this writing, there are no non-iOS/iPadOS Apple devices with
// multi-touch displays.
// @see https://stackoverflow.com/questions/57765958/how-to-detect-ipad-and-ipad-os-version-in-ios-13-and-up
// @see https://forums.developer.apple.com/thread/119186
// @see https://github.com/google/model-viewer/issues/758
const IS_IOS = (/iPad|iPhone|iPod/.test(navigator.userAgent) && !self.MSStream) ||
(navigator.platform === 'MacIntel' && navigator.maxTouchPoints > 1);
const IS_AR_QUICKLOOK_CANDIDATE = (() => {
const tempAnchor = document.createElement('a');
return Boolean(tempAnchor.relList && tempAnchor.relList.supports &&
tempAnchor.relList.supports('ar'));
})();
// @see https://developer.chrome.com/multidevice/user-agent
/Safari\//.test(navigator.userAgent);
const IS_FIREFOX = /firefox/i.test(navigator.userAgent);
const IS_OCULUS = /OculusBrowser/.test(navigator.userAgent);
IS_IOS && /CriOS\//.test(navigator.userAgent);
const IS_SCENEVIEWER_CANDIDATE = IS_ANDROID && !IS_FIREFOX && !IS_OCULUS;
/* @license
* Copyright 2019 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the 'License');
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an 'AS IS' BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var CloseIcon = `
<svg xmlns="http://www.w3.org/2000/svg" width="24px" height="24px" viewBox="0 0 24 24" fill="#000000">
<!-- NOTE(cdata): This SVG filter is a stop-gap until we can implement
support for dynamic re-coloring of UI components -->
<defs>
<filter id="drop-shadow" x="-100%" y="-100%" width="300%" height="300%">
<feGaussianBlur in="SourceAlpha" stdDeviation="1"/>
<feOffset dx="0" dy="0" result="offsetblur"/>
<feFlood flood-color="#000000"/>
<feComposite in2="offsetblur" operator="in"/>
<feMerge>
<feMergeNode/>
<feMergeNode in="SourceGraphic"/>
</feMerge>
</filter>
</defs>
<path filter="url(#drop-shadow)" d="M19 6.41L17.59 5 12 10.59 6.41 5 5 6.41 10.59 12 5 17.59 6.41 19 12 13.41 17.59 19 19 17.59 13.41 12z"/>
<path d="M0 0h24v24H0z" fill="none"/>
</svg>`;
/* @license
* Copyright 2019 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the 'License');
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an 'AS IS' BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var ControlsPrompt = `
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="25" height="36">
<defs>
<path id="A" d="M.001.232h24.997V36H.001z" />
</defs>
<g transform="translate(-11 -4)" fill="none" fill-rule="evenodd">
<path fill-opacity="0" fill="#fff" d="M0 0h44v44H0z" />
<g transform="translate(11 3)">
<path d="M8.733 11.165c.04-1.108.766-2.027 1.743-2.307a2.54 2.54 0 0 1 .628-.089c.16 0 .314.017.463.044 1.088.2 1.9 1.092 1.9 2.16v8.88h1.26c2.943-1.39 5-4.45 5-8.025a9.01 9.01 0 0 0-1.9-5.56l-.43-.5c-.765-.838-1.683-1.522-2.712-2-1.057-.49-2.226-.77-3.46-.77s-2.4.278-3.46.77c-1.03.478-1.947 1.162-2.71 2l-.43.5a9.01 9.01 0 0 0-1.9 5.56 9.04 9.04 0 0 0 .094 1.305c.03.21.088.41.13.617l.136.624c.083.286.196.56.305.832l.124.333a8.78 8.78 0 0 0 .509.953l.065.122a8.69 8.69 0 0 0 3.521 3.191l1.11.537v-9.178z" fill-opacity=".5" fill="#e4e4e4" />
<path d="M22.94 26.218l-2.76 7.74c-.172.485-.676.8-1.253.8H12.24c-1.606 0-3.092-.68-3.98-1.82-1.592-2.048-3.647-3.822-6.11-5.27-.095-.055-.15-.137-.152-.23-.004-.1.046-.196.193-.297.56-.393 1.234-.6 1.926-.6a3.43 3.43 0 0 1 .691.069l4.922.994V10.972c0-.663.615-1.203 1.37-1.203s1.373.54 1.373 1.203v9.882h2.953c.273 0 .533.073.757.21l6.257 3.874c.027.017.045.042.07.06.41.296.586.77.426 1.22M4.1 16.614c-.024-.04-.042-.083-.065-.122a8.69 8.69 0 0 1-.509-.953c-.048-.107-.08-.223-.124-.333l-.305-.832c-.058-.202-.09-.416-.136-.624l-.13-.617a9.03 9.03 0 0 1-.094-1.305c0-2.107.714-4.04 1.9-5.56l.43-.5c.764-.84 1.682-1.523 2.71-2 1.058-.49 2.226-.77 3.46-.77s2.402.28 3.46.77c1.03.477 1.947 1.16 2.712 2l.428.5a9 9 0 0 1 1.901 5.559c0 3.577-2.056 6.636-5 8.026h-1.26v-8.882c0-1.067-.822-1.96-1.9-2.16-.15-.028-.304-.044-.463-.044-.22 0-.427.037-.628.09-.977.28-1.703 1.198-1.743 2.306v9.178l-1.11-.537C6.18 19.098 4.96 18 4.1 16.614M22.97 24.09l-6.256-3.874c-.102-.063-.218-.098-.33-.144 2.683-1.8 4.354-4.855 4.354-8.243 0-.486-.037-.964-.104-1.43a9.97 9.97 0 0 0-1.57-4.128l-.295-.408-.066-.092a10.05 10.05 0 0 0-.949-1.078c-.342-.334-.708-.643-1.094-.922-1.155-.834-2.492-1.412-3.94-1.65l-.732-.088-.748-.03a9.29 9.29 0 0 0-1.482.119c-1.447.238-2.786.816-3.94 1.65a9.33 9.33 0 0 0-.813.686 9.59 9.59 0 0 0-.845.877l-.385.437-.36.5-.288.468-.418.778-.04.09c-.593 1.28-.93 2.71-.93 4.222 0 3.832 2.182 7.342 5.56 8.938l1.437.68v4.946L5 25.64a4.44 4.44 0 0 0-.888-.086c-.017 0-.034.003-.05.003-.252.004-.503.033-.75.08a5.08 5.08 0 0 0-.237.056c-.193.046-.382.107-.568.18-.075.03-.15.057-.225.1-.25.114-.494.244-.723.405a1.31 1.31 0 0 0-.566 1.122 1.28 1.28 0 0 0 .645 1.051C4 29.925 5.96 31.614 7.473 33.563a5.06 5.06 0 0 0 .434.491c1.086 1.082 2.656 1.713 4.326 1.715h6.697c.748-.001 1.43-.333 1.858-.872.142-.18.256-.38.336-.602l2.757-7.74c.094-.26.13-.53.112-.794s-.088-.52-.203-.76a2.19 2.19 0 0 0-.821-.91" fill-opacity=".6" fill="#000" />
<path d="M22.444 24.94l-6.257-3.874a1.45 1.45 0 0 0-.757-.211h-2.953v-9.88c0-.663-.616-1.203-1.373-1.203s-1.37.54-1.37 1.203v16.643l-4.922-.994a3.44 3.44 0 0 0-.692-.069 3.35 3.35 0 0 0-1.925.598c-.147.102-.198.198-.194.298.004.094.058.176.153.23 2.462 1.448 4.517 3.22 6.11 5.27.887 1.14 2.373 1.82 3.98 1.82h6.686c.577 0 1.08-.326 1.253-.8l2.76-7.74c.16-.448-.017-.923-.426-1.22-.025-.02-.043-.043-.07-.06z" fill="#fff" />
<g transform="translate(0 .769)">
<mask id="B" fill="#fff">
<use xlink:href="#A" />
</mask>
<path d="M23.993 24.992a1.96 1.96 0 0 1-.111.794l-2.758 7.74c-.08.22-.194.423-.336.602-.427.54-1.11.87-1.857.872h-6.698c-1.67-.002-3.24-.633-4.326-1.715-.154-.154-.3-.318-.434-.49C5.96 30.846 4 29.157 1.646 27.773c-.385-.225-.626-.618-.645-1.05a1.31 1.31 0 0 1 .566-1.122 4.56 4.56 0 0 1 .723-.405l.225-.1a4.3 4.3 0 0 1 .568-.18l.237-.056c.248-.046.5-.075.75-.08.018 0 .034-.003.05-.003.303-.001.597.027.89.086l3.722.752V20.68l-1.436-.68c-3.377-1.596-5.56-5.106-5.56-8.938 0-1.51.336-2.94.93-4.222.015-.03.025-.06.04-.09.127-.267.268-.525.418-.778.093-.16.186-.316.288-.468.063-.095.133-.186.2-.277L3.773 5c.118-.155.26-.29.385-.437.266-.3.544-.604.845-.877a9.33 9.33 0 0 1 .813-.686C6.97 2.167 8.31 1.59 9.757 1.35a9.27 9.27 0 0 1 1.481-.119 8.82 8.82 0 0 1 .748.031c.247.02.49.05.733.088 1.448.238 2.786.816 3.94 1.65.387.28.752.588 1.094.922a9.94 9.94 0 0 1 .949 1.078l.066.092c.102.133.203.268.295.408a9.97 9.97 0 0 1 1.571 4.128c.066.467.103.945.103 1.43 0 3.388-1.67 6.453-4.353 8.243.11.046.227.08.33.144l6.256 3.874c.37.23.645.55.82.9.115.24.185.498.203.76m.697-1.195c-.265-.55-.677-1.007-1.194-1.326l-5.323-3.297c2.255-2.037 3.564-4.97 3.564-8.114 0-2.19-.637-4.304-1.84-6.114-.126-.188-.26-.37-.4-.552-.645-.848-1.402-1.6-2.252-2.204C15.472.91 13.393.232 11.238.232A10.21 10.21 0 0 0 5.23 2.19c-.848.614-1.606 1.356-2.253 2.205-.136.18-.272.363-.398.55C1.374 6.756.737 8.87.737 11.06c0 4.218 2.407 8.08 6.133 9.842l.863.41v3.092l-2.525-.51c-.356-.07-.717-.106-1.076-.106a5.45 5.45 0 0 0-3.14.996c-.653.46-1.022 1.202-.99 1.983a2.28 2.28 0 0 0 1.138 1.872c2.24 1.318 4.106 2.923 5.543 4.772 1.26 1.62 3.333 2.59 5.55 2.592h6.698c1.42-.001 2.68-.86 3.134-2.138l2.76-7.74c.272-.757.224-1.584-.134-2.325" fill-opacity=".05" fill="#000" mask="url(#B)" />
</g>
</g>
</g>
</svg>`;
/* @license
* Copyright 2019 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the 'License');
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an 'AS IS' BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var ARGlyph = `
<svg version="1.1" id="view_x5F_in_x5F_AR_x5F_icon"
xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" width="24px" height="24px"
viewBox="0 0 24 24" enable-background="new 0 0 24 24" xml:space="preserve">
<rect id="Bounding_Box" x="0" y="0" fill="none" width="24" height="24"/>
<g id="Art_layer">
<path d="M3,4c0-0.55,0.45-1,1-1h2V1H4C2.35,1,1,2.35,1,4v2h2V4z"/>
<path d="M20,3c0.55,0,1,0.45,1,1v2h2V4c0-1.65-1.35-3-3-3h-2v2H20z"/>
<path d="M4,21c-0.55,0-1-0.45-1-1v-2H1v2c0,1.65,1.35,3,3,3h2v-2H4z"/>
<path d="M20,21c0.55,0,1-0.45,1-1v-2h2v2c0,1.65-1.35,3-3,3h-2v-2H20z"/>
<g>
<path d="M18.25,7.6l-5.5-3.18c-0.46-0.27-1.04-0.27-1.5,0L5.75,7.6C5.29,7.87,5,8.36,5,8.9v6.35c0,0.54,0.29,1.03,0.75,1.3
l5.5,3.18c0.46,0.27,1.04,0.27,1.5,0l5.5-3.18c0.46-0.27,0.75-0.76,0.75-1.3V8.9C19,8.36,18.71,7.87,18.25,7.6z M7,14.96v-4.62
l4,2.32v4.61L7,14.96z M12,10.93L8,8.61l4-2.31l4,2.31L12,10.93z M13,17.27v-4.61l4-2.32v4.62L13,17.27z"/>
</g>
</g>
</svg>`;
/* @license
* Copyright 2019 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the 'License');
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an 'AS IS' BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
const template = document.createElement('template');
template.innerHTML = `
<style>
:host {
display: block;
position: relative;
contain: strict;
width: 300px;
height: 150px;
}
/* NOTE: This ruleset is our integration surface area with the
* :focus-visible polyfill.
*
* @see https://github.com/WICG/focus-visible/pull/196 */
:host([data-js-focus-visible]:focus:not(.focus-visible)),
:host([data-js-focus-visible]) :focus:not(.focus-visible) {
outline: none;
}
.container {
position: relative;
}
.userInput {
width: 100%;
height: 100%;
display: block;
position: relative;
overflow: hidden;
}
canvas {
position: absolute;
display: none;
pointer-events: none;
/* NOTE(cdata): Chrome 76 and below apparently have a bug
* that causes our canvas not to display pixels unless it is
* on its own render layer
* @see https://github.com/google/model-viewer/pull/755#issuecomment-536597893
*/
transform: translateZ(0);
}
canvas.show {
display: block;
}
/* Adapted from HTML5 Boilerplate
*
* @see https://github.com/h5bp/html5-boilerplate/blob/ceb4620c78fc82e13534fc44202a3f168754873f/dist/css/main.css#L122-L133 */
.screen-reader-only {
border: 0;
clip: rect(0, 0, 0, 0);
height: 1px;
margin: -1px;
overflow: hidden;
padding: 0;
position: absolute;
white-space: nowrap;
width: 1px;
}
.slot {
position: absolute;
pointer-events: none;
top: 0;
left: 0;
width: 100%;
height: 100%;
}
.slot > * {
pointer-events: initial;
}
.annotation-wrapper ::slotted(*) {
opacity: var(--max-hotspot-opacity, 1);
transition: opacity 0.3s;
}
.pointer-tumbling .annotation-wrapper ::slotted(*) {
pointer-events: none;
}
.annotation-wrapper ::slotted(*) {
pointer-events: initial;
}
.annotation-wrapper.hide ::slotted(*) {
opacity: var(--min-hotspot-opacity, 0.25);
}
.slot.poster {
opacity: 0;
transition: opacity 0.3s 0.3s;
background-color: inherit;
}
.slot.poster.show {
opacity: 1;
transition: none;
}
.slot.poster > * {
pointer-events: initial;
}
.slot.poster:not(.show) > * {
pointer-events: none;
}
#default-poster {
width: 100%;
height: 100%;
/* The default poster is a <button> so we need to set display
* to prevent it from being affected by text-align: */
display: block;
position: absolute;
border: none;
padding: 0;
background-size: contain;
background-repeat: no-repeat;
background-position: center;
background-color: var(--poster-color, #fff);
background-image: var(--poster-image, none);
}
#default-progress-bar {
display: block;
position: relative;
width: 100%;
height: 100%;
pointer-events: none;
overflow: hidden;
}
#default-progress-bar > .mask {
position: absolute;
top: 0;
left: 0;
width: 100%;
height: 100%;
background: var(--progress-mask, #fff);
transition: opacity 0.3s;
opacity: 0.2;
}
#default-progress-bar > .bar {
position: absolute;
top: 0;
left: 0;
width: 100%;
height: var(--progress-bar-height, 5px);
background-color: var(--progress-bar-color, rgba(0, 0, 0, 0.4));
transition: transform 0.09s;
transform-origin: top left;
transform: scaleX(0);
overflow: hidden;
}
#default-progress-bar > .bar.hide {
transition: opacity 0.3s 1s;
opacity: 0;
}
.slot.interaction-prompt {
display: var(--interaction-prompt-display, flex);
position: absolute;
top: 0;
left: 0;
width: 100%;
height: 100%;
pointer-events: none;
align-items: center;
justify-content: center;
opacity: 0;
will-change: opacity;
overflow: hidden;
transition: opacity 0.3s;
}
.slot.interaction-prompt.visible {
opacity: 1;
}
.slot.interaction-prompt > .animated-container {
will-change: transform, opacity;
}
.slot.interaction-prompt > * {
pointer-events: none;
}
.slot.ar-button {
-moz-user-select: none;
-webkit-tap-highlight-color: transparent;
user-select: none;
display: var(--ar-button-display, block);
}
.slot.ar-button:not(.enabled) {
display: none;
}
.fab {
display: flex;
align-items: center;
justify-content: center;
box-sizing: border-box;
width: 40px;
height: 40px;
cursor: pointer;
background-color: #fff;
box-shadow: 0px 0px 4px rgba(0, 0, 0, 0.15);
border-radius: 100px;
}
.fab > * {
opacity: 0.87;
}
#default-ar-button {
position: absolute;
bottom: 16px;
right: 16px;
transform: scale(var(--ar-button-scale, 1));
transform-origin: bottom right;
}
.slot.default {
pointer-events: none;
}
.slot.progress-bar {
pointer-events: none;
}
.slot.exit-webxr-ar-button {
pointer-events: none;
}
.slot.exit-webxr-ar-button:not(.enabled) {
display: none;
}
#default-exit-webxr-ar-button {
display: flex;
align-items: center;
justify-content: center;
position: absolute;
top: 16px;
right: 16px;
width: 40px;
height: 40px;
box-sizing: border-box;
}
#default-exit-webxr-ar-button > svg {
fill: #fff;
}
</style>
<div class="container">
<div class="userInput" tabindex="0" role="img"
aria-label="A depiction of a 3D model"
aria-live="polite">
<div class="slot canvas">
<slot name="canvas">
<canvas></canvas>
</slot>
</div>
</div>
<!-- NOTE(cdata): We need to wrap slots because browsers without ShadowDOM
will have their <slot> elements removed by ShadyCSS -->
<div class="slot poster">
<slot name="poster">
<button type="button" id="default-poster" aria-hidden="true" aria-label="Activate to view in 3D!"></button>
</slot>
</div>
<div class="slot ar-button">
<slot name="ar-button">
<a id="default-ar-button" part="default-ar-button" class="fab"
tabindex="2"
aria-label="View this 3D model up close">
${ARGlyph}
</a>
</slot>
</div>
<div class="slot interaction-prompt">
<div class="animated-container">
<slot name="interaction-prompt" aria-hidden="true">
${ControlsPrompt}
</slot>
</div>
</div>
<div class="slot default">
<slot></slot>
<div class="slot progress-bar">
<slot name="progress-bar">
<div id="default-progress-bar" aria-hidden="true">
<div class="mask" part="default-progress-mask"></div>
<div class="bar" part="default-progress-bar"></div>
</div>
</slot>
</div>
<div class="slot exit-webxr-ar-button">
<slot name="exit-webxr-ar-button">
<a id="default-exit-webxr-ar-button" part="default-exit-webxr-ar-button"
tabindex="3"
aria-label="Exit AR"
aria-hidden="true">
${CloseIcon}
</a>
</slot>
</div>
</div>
</div>`;
const makeTemplate = (tagName) => {
const clone = document.createElement('template');
clone.innerHTML = template.innerHTML;
if (window.ShadyCSS) {
window.ShadyCSS.prepareTemplate(clone, tagName);
}
return clone;
};
/**
* @license
* Copyright 2010-2021 Three.js Authors
* SPDX-License-Identifier: MIT
*/
const REVISION = '128';
const CullFaceNone = 0;
const CullFaceBack = 1;
const CullFaceFront = 2;
const PCFShadowMap = 1;
const PCFSoftShadowMap = 2;
const VSMShadowMap = 3;
const FrontSide = 0;
const BackSide = 1;
const DoubleSide = 2;
const FlatShading = 1;
const NoBlending = 0;
const NormalBlending = 1;
const AdditiveBlending = 2;
const SubtractiveBlending = 3;
const MultiplyBlending = 4;
const CustomBlending = 5;
const AddEquation = 100;
const SubtractEquation = 101;
const ReverseSubtractEquation = 102;
const MinEquation = 103;
const MaxEquation = 104;
const ZeroFactor = 200;
const OneFactor = 201;
const SrcColorFactor = 202;
const OneMinusSrcColorFactor = 203;
const SrcAlphaFactor = 204;
const OneMinusSrcAlphaFactor = 205;
const DstAlphaFactor = 206;
const OneMinusDstAlphaFactor = 207;
const DstColorFactor = 208;
const OneMinusDstColorFactor = 209;
const SrcAlphaSaturateFactor = 210;
const NeverDepth = 0;
const AlwaysDepth = 1;
const LessDepth = 2;
const LessEqualDepth = 3;
const EqualDepth = 4;
const GreaterEqualDepth = 5;
const GreaterDepth = 6;
const NotEqualDepth = 7;
const MultiplyOperation = 0;
const MixOperation = 1;
const AddOperation = 2;
const NoToneMapping = 0;
const LinearToneMapping = 1;
const ReinhardToneMapping = 2;
const CineonToneMapping = 3;
const ACESFilmicToneMapping = 4;
const CustomToneMapping = 5;
const UVMapping = 300;
const CubeReflectionMapping = 301;
const CubeRefractionMapping = 302;
const EquirectangularReflectionMapping = 303;
const EquirectangularRefractionMapping = 304;
const CubeUVReflectionMapping = 306;
const CubeUVRefractionMapping = 307;
const RepeatWrapping = 1000;
const ClampToEdgeWrapping = 1001;
const MirroredRepeatWrapping = 1002;
const NearestFilter = 1003;
const NearestMipmapNearestFilter = 1004;
const NearestMipmapLinearFilter = 1005;
const LinearFilter = 1006;
const LinearMipmapNearestFilter = 1007;
const LinearMipmapLinearFilter = 1008;
const UnsignedByteType = 1009;
const ByteType = 1010;
const ShortType = 1011;
const UnsignedShortType = 1012;
const IntType = 1013;
const UnsignedIntType = 1014;
const FloatType = 1015;
const HalfFloatType = 1016;
const UnsignedShort4444Type = 1017;
const UnsignedShort5551Type = 1018;
const UnsignedShort565Type = 1019;
const UnsignedInt248Type = 1020;
const AlphaFormat = 1021;
const RGBFormat = 1022;
const RGBAFormat = 1023;
const LuminanceFormat = 1024;
const LuminanceAlphaFormat = 1025;
const RGBEFormat = RGBAFormat;
const DepthFormat = 1026;
const DepthStencilFormat = 1027;
const RedFormat = 1028;
const RedIntegerFormat = 1029;
const RGFormat = 1030;
const RGIntegerFormat = 1031;
const RGBIntegerFormat = 1032;
const RGBAIntegerFormat = 1033;
const RGB_S3TC_DXT1_Format = 33776;
const RGBA_S3TC_DXT1_Format = 33777;
const RGBA_S3TC_DXT3_Format = 33778;
const RGBA_S3TC_DXT5_Format = 33779;
const RGB_PVRTC_4BPPV1_Format = 35840;
const RGB_PVRTC_2BPPV1_Format = 35841;
const RGBA_PVRTC_4BPPV1_Format = 35842;
const RGBA_PVRTC_2BPPV1_Format = 35843;
const RGB_ETC1_Format = 36196;
const RGB_ETC2_Format = 37492;
const RGBA_ETC2_EAC_Format = 37496;
const RGBA_ASTC_4x4_Format = 37808;
const RGBA_ASTC_5x4_Format = 37809;
const RGBA_ASTC_5x5_Format = 37810;
const RGBA_ASTC_6x5_Format = 37811;
const RGBA_ASTC_6x6_Format = 37812;
const RGBA_ASTC_8x5_Format = 37813;
const RGBA_ASTC_8x6_Format = 37814;
const RGBA_ASTC_8x8_Format = 37815;
const RGBA_ASTC_10x5_Format = 37816;
const RGBA_ASTC_10x6_Format = 37817;
const RGBA_ASTC_10x8_Format = 37818;
const RGBA_ASTC_10x10_Format = 37819;
const RGBA_ASTC_12x10_Format = 37820;
const RGBA_ASTC_12x12_Format = 37821;
const RGBA_BPTC_Format = 36492;
const SRGB8_ALPHA8_ASTC_4x4_Format = 37840;
const SRGB8_ALPHA8_ASTC_5x4_Format = 37841;
const SRGB8_ALPHA8_ASTC_5x5_Format = 37842;
const SRGB8_ALPHA8_ASTC_6x5_Format = 37843;
const SRGB8_ALPHA8_ASTC_6x6_Format = 37844;
const SRGB8_ALPHA8_ASTC_8x5_Format = 37845;
const SRGB8_ALPHA8_ASTC_8x6_Format = 37846;
const SRGB8_ALPHA8_ASTC_8x8_Format = 37847;
const SRGB8_ALPHA8_ASTC_10x5_Format = 37848;
const SRGB8_ALPHA8_ASTC_10x6_Format = 37849;
const SRGB8_ALPHA8_ASTC_10x8_Format = 37850;
const SRGB8_ALPHA8_ASTC_10x10_Format = 37851;
const SRGB8_ALPHA8_ASTC_12x10_Format = 37852;
const SRGB8_ALPHA8_ASTC_12x12_Format = 37853;
const LoopOnce = 2200;
const LoopRepeat = 2201;
const LoopPingPong = 2202;
const InterpolateDiscrete = 2300;
const InterpolateLinear = 2301;
const InterpolateSmooth = 2302;
const ZeroCurvatureEnding = 2400;
const ZeroSlopeEnding = 2401;
const WrapAroundEnding = 2402;
const NormalAnimationBlendMode = 2500;
const AdditiveAnimationBlendMode = 2501;
const TrianglesDrawMode = 0;
const TriangleStripDrawMode = 1;
const TriangleFanDrawMode = 2;
const LinearEncoding = 3000;
const sRGBEncoding = 3001;
const GammaEncoding = 3007;
const RGBEEncoding = 3002;
const LogLuvEncoding = 3003;
const RGBM7Encoding = 3004;
const RGBM16Encoding = 3005;
const RGBDEncoding = 3006;
const BasicDepthPacking = 3200;
const RGBADepthPacking = 3201;
const TangentSpaceNormalMap = 0;
const ObjectSpaceNormalMap = 1;
const KeepStencilOp = 7680;
const AlwaysStencilFunc = 519;
const StaticDrawUsage = 35044;
const DynamicDrawUsage = 35048;
const GLSL3 = '300 es';
/**
* https://github.com/mrdoob/eventdispatcher.js/
*/
class EventDispatcher {
addEventListener( type, listener ) {
if ( this._listeners === undefined ) this._listeners = {};
const listeners = this._listeners;
if ( listeners[ type ] === undefined ) {
listeners[ type ] = [];
}
if ( listeners[ type ].indexOf( listener ) === - 1 ) {
listeners[ type ].push( listener );
}
}
hasEventListener( type, listener ) {
if ( this._listeners === undefined ) return false;
const listeners = this._listeners;
return listeners[ type ] !== undefined && listeners[ type ].indexOf( listener ) !== - 1;
}
removeEventListener( type, listener ) {
if ( this._listeners === undefined ) return;
const listeners = this._listeners;
const listenerArray = listeners[ type ];
if ( listenerArray !== undefined ) {
const index = listenerArray.indexOf( listener );
if ( index !== - 1 ) {
listenerArray.splice( index, 1 );
}
}
}
dispatchEvent( event ) {
if ( this._listeners === undefined ) return;
const listeners = this._listeners;
const listenerArray = listeners[ event.type ];
if ( listenerArray !== undefined ) {
event.target = this;
// Make a copy, in case listeners are removed while iterating.
const array = listenerArray.slice( 0 );
for ( let i = 0, l = array.length; i < l; i ++ ) {
array[ i ].call( this, event );
}
event.target = null;
}
}
}
const _lut = [];
for ( let i = 0; i < 256; i ++ ) {
_lut[ i ] = ( i < 16 ? '0' : '' ) + ( i ).toString( 16 );
}
let _seed = 1234567;
const DEG2RAD = Math.PI / 180;
const RAD2DEG = 180 / Math.PI;
// http://stackoverflow.com/questions/105034/how-to-create-a-guid-uuid-in-javascript/21963136#21963136
function generateUUID() {
const d0 = Math.random() * 0xffffffff | 0;
const d1 = Math.random() * 0xffffffff | 0;
const d2 = Math.random() * 0xffffffff | 0;
const d3 = Math.random() * 0xffffffff | 0;
const uuid = _lut[ d0 & 0xff ] + _lut[ d0 >> 8 & 0xff ] + _lut[ d0 >> 16 & 0xff ] + _lut[ d0 >> 24 & 0xff ] + '-' +
_lut[ d1 & 0xff ] + _lut[ d1 >> 8 & 0xff ] + '-' + _lut[ d1 >> 16 & 0x0f | 0x40 ] + _lut[ d1 >> 24 & 0xff ] + '-' +
_lut[ d2 & 0x3f | 0x80 ] + _lut[ d2 >> 8 & 0xff ] + '-' + _lut[ d2 >> 16 & 0xff ] + _lut[ d2 >> 24 & 0xff ] +
_lut[ d3 & 0xff ] + _lut[ d3 >> 8 & 0xff ] + _lut[ d3 >> 16 & 0xff ] + _lut[ d3 >> 24 & 0xff ];
// .toUpperCase() here flattens concatenated strings to save heap memory space.
return uuid.toUpperCase();
}
function clamp$1( value, min, max ) {
return Math.max( min, Math.min( max, value ) );
}
// compute euclidian modulo of m % n
// https://en.wikipedia.org/wiki/Modulo_operation
function euclideanModulo( n, m ) {
return ( ( n % m ) + m ) % m;
}
// Linear mapping from range <a1, a2> to range <b1, b2>
function mapLinear( x, a1, a2, b1, b2 ) {
return b1 + ( x - a1 ) * ( b2 - b1 ) / ( a2 - a1 );
}
// https://www.gamedev.net/tutorials/programming/general-and-gameplay-programming/inverse-lerp-a-super-useful-yet-often-overlooked-function-r5230/
function inverseLerp( x, y, value ) {
if ( x !== y ) {
return ( value - x ) / ( y - x );
} else {
return 0;
}
}
// https://en.wikipedia.org/wiki/Linear_interpolation
function lerp( x, y, t ) {
return ( 1 - t ) * x + t * y;
}
// http://www.rorydriscoll.com/2016/03/07/frame-rate-independent-damping-using-lerp/
function damp( x, y, lambda, dt ) {
return lerp( x, y, 1 - Math.exp( - lambda * dt ) );
}
// https://www.desmos.com/calculator/vcsjnyz7x4
function pingpong( x, length = 1 ) {
return length - Math.abs( euclideanModulo( x, length * 2 ) - length );
}
// http://en.wikipedia.org/wiki/Smoothstep
function smoothstep( x, min, max ) {
if ( x <= min ) return 0;
if ( x >= max ) return 1;
x = ( x - min ) / ( max - min );
return x * x * ( 3 - 2 * x );
}
function smootherstep( x, min, max ) {
if ( x <= min ) return 0;
if ( x >= max ) return 1;
x = ( x - min ) / ( max - min );
return x * x * x * ( x * ( x * 6 - 15 ) + 10 );
}
// Random integer from <low, high> interval
function randInt( low, high ) {
return low + Math.floor( Math.random() * ( high - low + 1 ) );
}
// Random float from <low, high> interval
function randFloat( low, high ) {
return low + Math.random() * ( high - low );
}
// Random float from <-range/2, range/2> interval
function randFloatSpread( range ) {
return range * ( 0.5 - Math.random() );
}
// Deterministic pseudo-random float in the interval [ 0, 1 ]
function seededRandom( s ) {
if ( s !== undefined ) _seed = s % 2147483647;
// Park-Miller algorithm
_seed = _seed * 16807 % 2147483647;
return ( _seed - 1 ) / 2147483646;
}
function degToRad( degrees ) {
return degrees * DEG2RAD;
}
function radToDeg( radians ) {
return radians * RAD2DEG;
}
function isPowerOfTwo( value ) {
return ( value & ( value - 1 ) ) === 0 && value !== 0;
}
function ceilPowerOfTwo( value ) {
return Math.pow( 2, Math.ceil( Math.log( value ) / Math.LN2 ) );
}
function floorPowerOfTwo( value ) {
return Math.pow( 2, Math.floor( Math.log( value ) / Math.LN2 ) );
}
function setQuaternionFromProperEuler( q, a, b, c, order ) {
// Intrinsic Proper Euler Angles - see https://en.wikipedia.org/wiki/Euler_angles
// rotations are applied to the axes in the order specified by 'order'
// rotation by angle 'a' is applied first, then by angle 'b', then by angle 'c'
// angles are in radians
const cos = Math.cos;
const sin = Math.sin;
const c2 = cos( b / 2 );
const s2 = sin( b / 2 );
const c13 = cos( ( a + c ) / 2 );
const s13 = sin( ( a + c ) / 2 );
const c1_3 = cos( ( a - c ) / 2 );
const s1_3 = sin( ( a - c ) / 2 );
const c3_1 = cos( ( c - a ) / 2 );
const s3_1 = sin( ( c - a ) / 2 );
switch ( order ) {
case 'XYX':
q.set( c2 * s13, s2 * c1_3, s2 * s1_3, c2 * c13 );
break;
case 'YZY':
q.set( s2 * s1_3, c2 * s13, s2 * c1_3, c2 * c13 );
break;
case 'ZXZ':
q.set( s2 * c1_3, s2 * s1_3, c2 * s13, c2 * c13 );
break;
case 'XZX':
q.set( c2 * s13, s2 * s3_1, s2 * c3_1, c2 * c13 );
break;
case 'YXY':
q.set( s2 * c3_1, c2 * s13, s2 * s3_1, c2 * c13 );
break;
case 'ZYZ':
q.set( s2 * s3_1, s2 * c3_1, c2 * s13, c2 * c13 );
break;
default:
console.warn( 'THREE.MathUtils: .setQuaternionFromProperEuler() encountered an unknown order: ' + order );
}
}
var MathUtils = /*#__PURE__*/Object.freeze({
__proto__: null,
DEG2RAD: DEG2RAD,
RAD2DEG: RAD2DEG,
generateUUID: generateUUID,
clamp: clamp$1,
euclideanModulo: euclideanModulo,
mapLinear: mapLinear,
inverseLerp: inverseLerp,
lerp: lerp,
damp: damp,
pingpong: pingpong,
smoothstep: smoothstep,
smootherstep: smootherstep,
randInt: randInt,
randFloat: randFloat,
randFloatSpread: randFloatSpread,
seededRandom: seededRandom,
degToRad: degToRad,
radToDeg: radToDeg,
isPowerOfTwo: isPowerOfTwo,
ceilPowerOfTwo: ceilPowerOfTwo,
floorPowerOfTwo: floorPowerOfTwo,
setQuaternionFromProperEuler: setQuaternionFromProperEuler
});
class Vector2 {
constructor( x = 0, y = 0 ) {
this.x = x;
this.y = y;
}
get width() {
return this.x;
}
set width( value ) {
this.x = value;
}
get height() {
return this.y;
}
set height( value ) {
this.y = value;
}
set( x, y ) {
this.x = x;
this.y = y;
return this;
}
setScalar( scalar ) {
this.x = scalar;
this.y = scalar;
return this;
}
setX( x ) {
this.x = x;
return this;
}
setY( y ) {
this.y = y;
return this;
}
setComponent( index, value ) {
switch ( index ) {
case 0: this.x = value; break;
case 1: this.y = value; break;
default: throw new Error( 'index is out of range: ' + index );
}
return this;
}
getComponent( index ) {
switch ( index ) {
case 0: return this.x;
case 1: return this.y;
default: throw new Error( 'index is out of range: ' + index );
}
}
clone() {
return new this.constructor( this.x, this.y );
}
copy( v ) {
this.x = v.x;
this.y = v.y;
return this;
}
add( v, w ) {
if ( w !== undefined ) {
console.warn( 'THREE.Vector2: .add() now only accepts one argument. Use .addVectors( a, b ) instead.' );
return this.addVectors( v, w );
}
this.x += v.x;
this.y += v.y;
return this;
}
addScalar( s ) {
this.x += s;
this.y += s;
return this;
}
addVectors( a, b ) {
this.x = a.x + b.x;
this.y = a.y + b.y;
return this;
}
addScaledVector( v, s ) {
this.x += v.x * s;
this.y += v.y * s;
return this;
}
sub( v, w ) {
if ( w !== undefined ) {
console.warn( 'THREE.Vector2: .sub() now only accepts one argument. Use .subVectors( a, b ) instead.' );
return this.subVectors( v, w );
}
this.x -= v.x;
this.y -= v.y;
return this;
}
subScalar( s ) {
this.x -= s;
this.y -= s;
return this;
}
subVectors( a, b ) {
this.x = a.x - b.x;
this.y = a.y - b.y;
return this;
}
multiply( v ) {
this.x *= v.x;
this.y *= v.y;
return this;
}
multiplyScalar( scalar ) {
this.x *= scalar;
this.y *= scalar;
return this;
}
divide( v ) {
this.x /= v.x;
this.y /= v.y;
return this;
}
divideScalar( scalar ) {
return this.multiplyScalar( 1 / scalar );
}
applyMatrix3( m ) {
const x = this.x, y = this.y;
const e = m.elements;
this.x = e[ 0 ] * x + e[ 3 ] * y + e[ 6 ];
this.y = e[ 1 ] * x + e[ 4 ] * y + e[ 7 ];
return this;
}
min( v ) {
this.x = Math.min( this.x, v.x );
this.y = Math.min( this.y, v.y );
return this;
}
max( v ) {
this.x = Math.max( this.x, v.x );
this.y = Math.max( this.y, v.y );
return this;
}
clamp( min, max ) {
// assumes min < max, componentwise
this.x = Math.max( min.x, Math.min( max.x, this.x ) );
this.y = Math.max( min.y, Math.min( max.y, this.y ) );
return this;
}
clampScalar( minVal, maxVal ) {
this.x = Math.max( minVal, Math.min( maxVal, this.x ) );
this.y = Math.max( minVal, Math.min( maxVal, this.y ) );
return this;
}
clampLength( min, max ) {
const length = this.length();
return this.divideScalar( length || 1 ).multiplyScalar( Math.max( min, Math.min( max, length ) ) );
}
floor() {
this.x = Math.floor( this.x );
this.y = Math.floor( this.y );
return this;
}
ceil() {
this.x = Math.ceil( this.x );
this.y = Math.ceil( this.y );
return this;
}
round() {
this.x = Math.round( this.x );
this.y = Math.round( this.y );
return this;
}
roundToZero() {
this.x = ( this.x < 0 ) ? Math.ceil( this.x ) : Math.floor( this.x );
this.y = ( this.y < 0 ) ? Math.ceil( this.y ) : Math.floor( this.y );
return this;
}
negate() {
this.x = - this.x;
this.y = - this.y;
return this;
}
dot( v ) {
return this.x * v.x + this.y * v.y;
}
cross( v ) {
return this.x * v.y - this.y * v.x;
}
lengthSq() {
return this.x * this.x + this.y * this.y;
}
length() {
return Math.sqrt( this.x * this.x + this.y * this.y );
}
manhattanLength() {
return Math.abs( this.x ) + Math.abs( this.y );
}
normalize() {
return this.divideScalar( this.length() || 1 );
}
angle() {
// computes the angle in radians with respect to the positive x-axis
const angle = Math.atan2( - this.y, - this.x ) + Math.PI;
return angle;
}
distanceTo( v ) {
return Math.sqrt( this.distanceToSquared( v ) );
}
distanceToSquared( v ) {
const dx = this.x - v.x, dy = this.y - v.y;
return dx * dx + dy * dy;
}
manhattanDistanceTo( v ) {
return Math.abs( this.x - v.x ) + Math.abs( this.y - v.y );
}
setLength( length ) {
return this.normalize().multiplyScalar( length );
}
lerp( v, alpha ) {
this.x += ( v.x - this.x ) * alpha;
this.y += ( v.y - this.y ) * alpha;
return this;
}
lerpVectors( v1, v2, alpha ) {
this.x = v1.x + ( v2.x - v1.x ) * alpha;
this.y = v1.y + ( v2.y - v1.y ) * alpha;
return this;
}
equals( v ) {
return ( ( v.x === this.x ) && ( v.y === this.y ) );
}
fromArray( array, offset = 0 ) {
this.x = array[ offset ];
this.y = array[ offset + 1 ];
return this;
}
toArray( array = [], offset = 0 ) {
array[ offset ] = this.x;
array[ offset + 1 ] = this.y;
return array;
}
fromBufferAttribute( attribute, index, offset ) {
if ( offset !== undefined ) {
console.warn( 'THREE.Vector2: offset has been removed from .fromBufferAttribute().' );
}
this.x = attribute.getX( index );
this.y = attribute.getY( index );
return this;
}
rotateAround( center, angle ) {
const c = Math.cos( angle ), s = Math.sin( angle );
const x = this.x - center.x;
const y = this.y - center.y;
this.x = x * c - y * s + center.x;
this.y = x * s + y * c + center.y;
return this;
}
random() {
this.x = Math.random();
this.y = Math.random();
return this;
}
}
Vector2.prototype.isVector2 = true;
class Matrix3 {
constructor() {
this.elements = [
1, 0, 0,
0, 1, 0,
0, 0, 1
];
if ( arguments.length > 0 ) {
console.error( 'THREE.Matrix3: the constructor no longer reads arguments. use .set() instead.' );
}
}
set( n11, n12, n13, n21, n22, n23, n31, n32, n33 ) {
const te = this.elements;
te[ 0 ] = n11; te[ 1 ] = n21; te[ 2 ] = n31;
te[ 3 ] = n12; te[ 4 ] = n22; te[ 5 ] = n32;
te[ 6 ] = n13; te[ 7 ] = n23; te[ 8 ] = n33;
return this;
}
identity() {
this.set(
1, 0, 0,
0, 1, 0,
0, 0, 1
);
return this;
}
copy( m ) {
const te = this.elements;
const me = m.elements;
te[ 0 ] = me[ 0 ]; te[ 1 ] = me[ 1 ]; te[ 2 ] = me[ 2 ];
te[ 3 ] = me[ 3 ]; te[ 4 ] = me[ 4 ]; te[ 5 ] = me[ 5 ];
te[ 6 ] = me[ 6 ]; te[ 7 ] = me[ 7 ]; te[ 8 ] = me[ 8 ];
return this;
}
extractBasis( xAxis, yAxis, zAxis ) {
xAxis.setFromMatrix3Column( this, 0 );
yAxis.setFromMatrix3Column( this, 1 );
zAxis.setFromMatrix3Column( this, 2 );
return this;
}
setFromMatrix4( m ) {
const me = m.elements;
this.set(
me[ 0 ], me[ 4 ], me[ 8 ],
me[ 1 ], me[ 5 ], me[ 9 ],
me[ 2 ], me[ 6 ], me[ 10 ]
);
return this;
}
multiply( m ) {
return this.multiplyMatrices( this, m );
}
premultiply( m ) {
return this.multiplyMatrices( m, this );
}
multiplyMatrices( a, b ) {
const ae = a.elements;
const be = b.elements;
const te = this.elements;
const a11 = ae[ 0 ], a12 = ae[ 3 ], a13 = ae[ 6 ];
const a21 = ae[ 1 ], a22 = ae[ 4 ], a23 = ae[ 7 ];
const a31 = ae[ 2 ], a32 = ae[ 5 ], a33 = ae[ 8 ];
const b11 = be[ 0 ], b12 = be[ 3 ], b13 = be[ 6 ];
const b21 = be[ 1 ], b22 = be[ 4 ], b23 = be[ 7 ];
const b31 = be[ 2 ], b32 = be[ 5 ], b33 = be[ 8 ];
te[ 0 ] = a11 * b11 + a12 * b21 + a13 * b31;
te[ 3 ] = a11 * b12 + a12 * b22 + a13 * b32;
te[ 6 ] = a11 * b13 + a12 * b23 + a13 * b33;
te[ 1 ] = a21 * b11 + a22 * b21 + a23 * b31;
te[ 4 ] = a21 * b12 + a22 * b22 + a23 * b32;
te[ 7 ] = a21 * b13 + a22 * b23 + a23 * b33;
te[ 2 ] = a31 * b11 + a32 * b21 + a33 * b31;
te[ 5 ] = a31 * b12 + a32 * b22 + a33 * b32;
te[ 8 ] = a31 * b13 + a32 * b23 + a33 * b33;
return this;
}
multiplyScalar( s ) {
const te = this.elements;
te[ 0 ] *= s; te[ 3 ] *= s; te[ 6 ] *= s;
te[ 1 ] *= s; te[ 4 ] *= s; te[ 7 ] *= s;
te[ 2 ] *= s; te[ 5 ] *= s; te[ 8 ] *= s;
return this;
}
determinant() {
const te = this.elements;
const a = te[ 0 ], b = te[ 1 ], c = te[ 2 ],
d = te[ 3 ], e = te[ 4 ], f = te[ 5 ],
g = te[ 6 ], h = te[ 7 ], i = te[ 8 ];
return a * e * i - a * f * h - b * d * i + b * f * g + c * d * h - c * e * g;
}
invert() {
const te = this.elements,
n11 = te[ 0 ], n21 = te[ 1 ], n31 = te[ 2 ],
n12 = te[ 3 ], n22 = te[ 4 ], n32 = te[ 5 ],
n13 = te[ 6 ], n23 = te[ 7 ], n33 = te[ 8 ],
t11 = n33 * n22 - n32 * n23,
t12 = n32 * n13 - n33 * n12,
t13 = n23 * n12 - n22 * n13,
det = n11 * t11 + n21 * t12 + n31 * t13;
if ( det === 0 ) return this.set( 0, 0, 0, 0, 0, 0, 0, 0, 0 );
const detInv = 1 / det;
te[ 0 ] = t11 * detInv;
te[ 1 ] = ( n31 * n23 - n33 * n21 ) * detInv;
te[ 2 ] = ( n32 * n21 - n31 * n22 ) * detInv;
te[ 3 ] = t12 * detInv;
te[ 4 ] = ( n33 * n11 - n31 * n13 ) * detInv;
te[ 5 ] = ( n31 * n12 - n32 * n11 ) * detInv;
te[ 6 ] = t13 * detInv;
te[ 7 ] = ( n21 * n13 - n23 * n11 ) * detInv;
te[ 8 ] = ( n22 * n11 - n21 * n12 ) * detInv;
return this;
}
transpose() {
let tmp;
const m = this.elements;
tmp = m[ 1 ]; m[ 1 ] = m[ 3 ]; m[ 3 ] = tmp;
tmp = m[ 2 ]; m[ 2 ] = m[ 6 ]; m[ 6 ] = tmp;
tmp = m[ 5 ]; m[ 5 ] = m[ 7 ]; m[ 7 ] = tmp;
return this;
}
getNormalMatrix( matrix4 ) {
return this.setFromMatrix4( matrix4 ).invert().transpose();
}
transposeIntoArray( r ) {
const m = this.elements;
r[ 0 ] = m[ 0 ];
r[ 1 ] = m[ 3 ];
r[ 2 ] = m[ 6 ];
r[ 3 ] = m[ 1 ];
r[ 4 ] = m[ 4 ];
r[ 5 ] = m[ 7 ];
r[ 6 ] = m[ 2 ];
r[ 7 ] = m[ 5 ];
r[ 8 ] = m[ 8 ];
return this;
}
setUvTransform( tx, ty, sx, sy, rotation, cx, cy ) {
const c = Math.cos( rotation );
const s = Math.sin( rotation );
this.set(
sx * c, sx * s, - sx * ( c * cx + s * cy ) + cx + tx,
- sy * s, sy * c, - sy * ( - s * cx + c * cy ) + cy + ty,
0, 0, 1
);
return this;
}
scale( sx, sy ) {
const te = this.elements;
te[ 0 ] *= sx; te[ 3 ] *= sx; te[ 6 ] *= sx;
te[ 1 ] *= sy; te[ 4 ] *= sy; te[ 7 ] *= sy;
return this;
}
rotate( theta ) {
const c = Math.cos( theta );
const s = Math.sin( theta );
const te = this.elements;
const a11 = te[ 0 ], a12 = te[ 3 ], a13 = te[ 6 ];
const a21 = te[ 1 ], a22 = te[ 4 ], a23 = te[ 7 ];
te[ 0 ] = c * a11 + s * a21;
te[ 3 ] = c * a12 + s * a22;
te[ 6 ] = c * a13 + s * a23;
te[ 1 ] = - s * a11 + c * a21;
te[ 4 ] = - s * a12 + c * a22;
te[ 7 ] = - s * a13 + c * a23;
return this;
}
translate( tx, ty ) {
const te = this.elements;
te[ 0 ] += tx * te[ 2 ]; te[ 3 ] += tx * te[ 5 ]; te[ 6 ] += tx * te[ 8 ];
te[ 1 ] += ty * te[ 2 ]; te[ 4 ] += ty * te[ 5 ]; te[ 7 ] += ty * te[ 8 ];
return this;
}
equals( matrix ) {
const te = this.elements;
const me = matrix.elements;
for ( let i = 0; i < 9; i ++ ) {
if ( te[ i ] !== me[ i ] ) return false;
}
return true;
}
fromArray( array, offset = 0 ) {
for ( let i = 0; i < 9; i ++ ) {
this.elements[ i ] = array[ i + offset ];
}
return this;
}
toArray( array = [], offset = 0 ) {
const te = this.elements;
array[ offset ] = te[ 0 ];
array[ offset + 1 ] = te[ 1 ];
array[ offset + 2 ] = te[ 2 ];
array[ offset + 3 ] = te[ 3 ];
array[ offset + 4 ] = te[ 4 ];
array[ offset + 5 ] = te[ 5 ];
array[ offset + 6 ] = te[ 6 ];
array[ offset + 7 ] = te[ 7 ];
array[ offset + 8 ] = te[ 8 ];
return array;
}
clone() {
return new this.constructor().fromArray( this.elements );
}
}
Matrix3.prototype.isMatrix3 = true;
let _canvas;
class ImageUtils {
static getDataURL( image ) {
if ( /^data:/i.test( image.src ) ) {
return image.src;
}
if ( typeof HTMLCanvasElement == 'undefined' ) {
return image.src;
}
let canvas;
if ( image instanceof HTMLCanvasElement ) {
canvas = image;
} else {
if ( _canvas === undefined ) _canvas = document.createElementNS( 'http://www.w3.org/1999/xhtml', 'canvas' );
_canvas.width = image.width;
_canvas.height = image.height;
const context = _canvas.getContext( '2d' );
if ( image instanceof ImageData ) {
context.putImageData( image, 0, 0 );
} else {
context.drawImage( image, 0, 0, image.width, image.height );
}
canvas = _canvas;
}
if ( canvas.width > 2048 || canvas.height > 2048 ) {
console.warn( 'THREE.ImageUtils.getDataURL: Image converted to jpg for performance reasons', image );
return canvas.toDataURL( 'image/jpeg', 0.6 );
} else {
return canvas.toDataURL( 'image/png' );
}
}
}
let textureId = 0;
class Texture$1 extends EventDispatcher {
constructor( image = Texture$1.DEFAULT_IMAGE, mapping = Texture$1.DEFAULT_MAPPING, wrapS = ClampToEdgeWrapping, wrapT = ClampToEdgeWrapping, magFilter = LinearFilter, minFilter = LinearMipmapLinearFilter, format = RGBAFormat, type = UnsignedByteType, anisotropy = 1, encoding = LinearEncoding ) {
super();
Object.defineProperty( this, 'id', { value: textureId ++ } );
this.uuid = generateUUID();
this.name = '';
this.image = image;
this.mipmaps = [];
this.mapping = mapping;
this.wrapS = wrapS;
this.wrapT = wrapT;
this.magFilter = magFilter;
this.minFilter = minFilter;
this.anisotropy = anisotropy;
this.format = format;
this.internalFormat = null;
this.type = type;
this.offset = new Vector2( 0, 0 );
this.repeat = new Vector2( 1, 1 );
this.center = new Vector2( 0, 0 );
this.rotation = 0;
this.matrixAutoUpdate = true;
this.matrix = new Matrix3();
this.generateMipmaps = true;
this.premultiplyAlpha = false;
this.flipY = true;
this.unpackAlignment = 4; // valid values: 1, 2, 4, 8 (see http://www.khronos.org/opengles/sdk/docs/man/xhtml/glPixelStorei.xml)
// Values of encoding !== THREE.LinearEncoding only supported on map, envMap and emissiveMap.
//
// Also changing the encoding after already used by a Material will not automatically make the Material
// update. You need to explicitly call Material.needsUpdate to trigger it to recompile.
this.encoding = encoding;
this.version = 0;
this.onUpdate = null;
}
updateMatrix() {
this.matrix.setUvTransform( this.offset.x, this.offset.y, this.repeat.x, this.repeat.y, this.rotation, this.center.x, this.center.y );
}
clone() {
return new this.constructor().copy( this );
}
copy( source ) {
this.name = source.name;
this.image = source.image;
this.mipmaps = source.mipmaps.slice( 0 );
this.mapping = source.mapping;
this.wrapS = source.wrapS;
this.wrapT = source.wrapT;
this.magFilter = source.magFilter;
this.minFilter = source.minFilter;
this.anisotropy = source.anisotropy;
this.format = source.format;
this.internalFormat = source.internalFormat;
this.type = source.type;
this.offset.copy( source.offset );
this.repeat.copy( source.repeat );
this.center.copy( source.center );
this.rotation = source.rotation;
this.matrixAutoUpdate = source.matrixAutoUpdate;
this.matrix.copy( source.matrix );
this.generateMipmaps = source.generateMipmaps;
this.premultiplyAlpha = source.premultiplyAlpha;
this.flipY = source.flipY;
this.unpackAlignment = source.unpackAlignment;
this.encoding = source.encoding;
return this;
}
toJSON( meta ) {
const isRootObject = ( meta === undefined || typeof meta === 'string' );
if ( ! isRootObject && meta.textures[ this.uuid ] !== undefined ) {
return meta.textures[ this.uuid ];
}
const output = {
metadata: {
version: 4.5,
type: 'Texture',
generator: 'Texture.toJSON'
},
uuid: this.uuid,
name: this.name,
mapping: this.mapping,
repeat: [ this.repeat.x, this.repeat.y ],
offset: [ this.offset.x, this.offset.y ],
center: [ this.center.x, this.center.y ],
rotation: this.rotation,
wrap: [ this.wrapS, this.wrapT ],
format: this.format,
type: this.type,
encoding: this.encoding,
minFilter: this.minFilter,
magFilter: this.magFilter,
anisotropy: this.anisotropy,
flipY: this.flipY,
premultiplyAlpha: this.premultiplyAlpha,
unpackAlignment: this.unpackAlignment
};
if ( this.image !== undefined ) {
// TODO: Move to THREE.Image
const image = this.image;
if ( image.uuid === undefined ) {
image.uuid = generateUUID(); // UGH
}
if ( ! isRootObject && meta.images[ image.uuid ] === undefined ) {
let url;
if ( Array.isArray( image ) ) {
// process array of images e.g. CubeTexture
url = [];
for ( let i = 0, l = image.length; i < l; i ++ ) {
// check cube texture with data textures
if ( image[ i ].isDataTexture ) {
url.push( serializeImage( image[ i ].image ) );
} else {
url.push( serializeImage( image[ i ] ) );
}
}
} else {
// process single image
url = serializeImage( image );
}
meta.images[ image.uuid ] = {
uuid: image.uuid,
url: url
};
}
output.image = image.uuid;
}
if ( ! isRootObject ) {
meta.textures[ this.uuid ] = output;
}
return output;
}
dispose() {
this.dispatchEvent( { type: 'dispose' } );
}
transformUv( uv ) {
if ( this.mapping !== UVMapping ) return uv;
uv.applyMatrix3( this.matrix );
if ( uv.x < 0 || uv.x > 1 ) {
switch ( this.wrapS ) {
case RepeatWrapping:
uv.x = uv.x - Math.floor( uv.x );
break;
case ClampToEdgeWrapping:
uv.x = uv.x < 0 ? 0 : 1;
break;
case MirroredRepeatWrapping:
if ( Math.abs( Math.floor( uv.x ) % 2 ) === 1 ) {
uv.x = Math.ceil( uv.x ) - uv.x;
} else {
uv.x = uv.x - Math.floor( uv.x );
}
break;
}
}
if ( uv.y < 0 || uv.y > 1 ) {
switch ( this.wrapT ) {
case RepeatWrapping:
uv.y = uv.y - Math.floor( uv.y );
break;
case ClampToEdgeWrapping:
uv.y = uv.y < 0 ? 0 : 1;
break;
case MirroredRepeatWrapping:
if ( Math.abs( Math.floor( uv.y ) % 2 ) === 1 ) {
uv.y = Math.ceil( uv.y ) - uv.y;
} else {
uv.y = uv.y - Math.floor( uv.y );
}
break;
}
}
if ( this.flipY ) {
uv.y = 1 - uv.y;
}
return uv;
}
set needsUpdate( value ) {
if ( value === true ) this.version ++;
}
}
Texture$1.DEFAULT_IMAGE = undefined;
Texture$1.DEFAULT_MAPPING = UVMapping;
Texture$1.prototype.isTexture = true;
function serializeImage( image ) {
if ( ( typeof HTMLImageElement !== 'undefined' && image instanceof HTMLImageElement ) ||
( typeof HTMLCanvasElement !== 'undefined' && image instanceof HTMLCanvasElement ) ||
( typeof ImageBitmap !== 'undefined' && image instanceof ImageBitmap ) ) {
// default images
return ImageUtils.getDataURL( image );
} else {
if ( image.data ) {
// images of DataTexture
return {
data: Array.prototype.slice.call( image.data ),
width: image.width,
height: image.height,
type: image.data.constructor.name
};
} else {
console.warn( 'THREE.Texture: Unable to serialize Texture.' );
return {};
}
}
}
class Vector4 {
constructor( x = 0, y = 0, z = 0, w = 1 ) {
this.x = x;
this.y = y;
this.z = z;
this.w = w;
}
get width() {
return this.z;
}
set width( value ) {
this.z = value;
}
get height() {
return this.w;
}
set height( value ) {
this.w = value;
}
set( x, y, z, w ) {
this.x = x;
this.y = y;
this.z = z;
this.w = w;
return this;
}
setScalar( scalar ) {
this.x = scalar;
this.y = scalar;
this.z = scalar;
this.w = scalar;
return this;
}
setX( x ) {
this.x = x;
return this;
}
setY( y ) {
this.y = y;
return this;
}
setZ( z ) {
this.z = z;
return this;
}
setW( w ) {
this.w = w;
return this;
}
setComponent( index, value ) {
switch ( index ) {
case 0: this.x = value; break;
case 1: this.y = value; break;
case 2: this.z = value; break;
case 3: this.w = value; break;
default: throw new Error( 'index is out of range: ' + index );
}
return this;
}
getComponent( index ) {
switch ( index ) {
case 0: return this.x;
case 1: return this.y;
case 2: return this.z;
case 3: return this.w;
default: throw new Error( 'index is out of range: ' + index );
}
}
clone() {
return new this.constructor( this.x, this.y, this.z, this.w );
}
copy( v ) {
this.x = v.x;
this.y = v.y;
this.z = v.z;
this.w = ( v.w !== undefined ) ? v.w : 1;
return this;
}
add( v, w ) {
if ( w !== undefined ) {
console.warn( 'THREE.Vector4: .add() now only accepts one argument. Use .addVectors( a, b ) instead.' );
return this.addVectors( v, w );
}
this.x += v.x;
this.y += v.y;
this.z += v.z;
this.w += v.w;
return this;
}
addScalar( s ) {
this.x += s;
this.y += s;
this.z += s;
this.w += s;
return this;
}
addVectors( a, b ) {
this.x = a.x + b.x;
this.y = a.y + b.y;
this.z = a.z + b.z;
this.w = a.w + b.w;
return this;
}
addScaledVector( v, s ) {
this.x += v.x * s;
this.y += v.y * s;
this.z += v.z * s;
this.w += v.w * s;
return this;
}
sub( v, w ) {
if ( w !== undefined ) {
console.warn( 'THREE.Vector4: .sub() now only accepts one argument. Use .subVectors( a, b ) instead.' );
return this.subVectors( v, w );
}
this.x -= v.x;
this.y -= v.y;
this.z -= v.z;
this.w -= v.w;
return this;
}
subScalar( s ) {
this.x -= s;
this.y -= s;
this.z -= s;
this.w -= s;
return this;
}
subVectors( a, b ) {
this.x = a.x - b.x;
this.y = a.y - b.y;
this.z = a.z - b.z;
this.w = a.w - b.w;
return this;
}
multiply( v ) {
this.x *= v.x;
this.y *= v.y;
this.z *= v.z;
this.w *= v.w;
return this;
}
multiplyScalar( scalar ) {
this.x *= scalar;
this.y *= scalar;
this.z *= scalar;
this.w *= scalar;
return this;
}
applyMatrix4( m ) {
const x = this.x, y = this.y, z = this.z, w = this.w;
const e = m.elements;
this.x = e[ 0 ] * x + e[ 4 ] * y + e[ 8 ] * z + e[ 12 ] * w;
this.y = e[ 1 ] * x + e[ 5 ] * y + e[ 9 ] * z + e[ 13 ] * w;
this.z = e[ 2 ] * x + e[ 6 ] * y + e[ 10 ] * z + e[ 14 ] * w;
this.w = e[ 3 ] * x + e[ 7 ] * y + e[ 11 ] * z + e[ 15 ] * w;
return this;
}
divideScalar( scalar ) {
return this.multiplyScalar( 1 / scalar );
}
setAxisAngleFromQuaternion( q ) {
// http://www.euclideanspace.com/maths/geometry/rotations/conversions/quaternionToAngle/index.htm
// q is assumed to be normalized
this.w = 2 * Math.acos( q.w );
const s = Math.sqrt( 1 - q.w * q.w );
if ( s < 0.0001 ) {
this.x = 1;
this.y = 0;
this.z = 0;
} else {
this.x = q.x / s;
this.y = q.y / s;
this.z = q.z / s;
}
return this;
}
setAxisAngleFromRotationMatrix( m ) {
// http://www.euclideanspace.com/maths/geometry/rotations/conversions/matrixToAngle/index.htm
// assumes the upper 3x3 of m is a pure rotation matrix (i.e, unscaled)
let angle, x, y, z; // variables for result
const epsilon = 0.01, // margin to allow for rounding errors
epsilon2 = 0.1, // margin to distinguish between 0 and 180 degrees
te = m.elements,
m11 = te[ 0 ], m12 = te[ 4 ], m13 = te[ 8 ],
m21 = te[ 1 ], m22 = te[ 5 ], m23 = te[ 9 ],
m31 = te[ 2 ], m32 = te[ 6 ], m33 = te[ 10 ];
if ( ( Math.abs( m12 - m21 ) < epsilon ) &&
( Math.abs( m13 - m31 ) < epsilon ) &&
( Math.abs( m23 - m32 ) < epsilon ) ) {
// singularity found
// first check for identity matrix which must have +1 for all terms
// in leading diagonal and zero in other terms
if ( ( Math.abs( m12 + m21 ) < epsilon2 ) &&
( Math.abs( m13 + m31 ) < epsilon2 ) &&
( Math.abs( m23 + m32 ) < epsilon2 ) &&
( Math.abs( m11 + m22 + m33 - 3 ) < epsilon2 ) ) {
// this singularity is identity matrix so angle = 0
this.set( 1, 0, 0, 0 );
return this; // zero angle, arbitrary axis
}
// otherwise this singularity is angle = 180
angle = Math.PI;
const xx = ( m11 + 1 ) / 2;
const yy = ( m22 + 1 ) / 2;
const zz = ( m33 + 1 ) / 2;
const xy = ( m12 + m21 ) / 4;
const xz = ( m13 + m31 ) / 4;
const yz = ( m23 + m32 ) / 4;
if ( ( xx > yy ) && ( xx > zz ) ) {
// m11 is the largest diagonal term
if ( xx < epsilon ) {
x = 0;
y = 0.707106781;
z = 0.707106781;
} else {
x = Math.sqrt( xx );
y = xy / x;
z = xz / x;
}
} else if ( yy > zz ) {
// m22 is the largest diagonal term
if ( yy < epsilon ) {
x = 0.707106781;
y = 0;
z = 0.707106781;
} else {
y = Math.sqrt( yy );
x = xy / y;
z = yz / y;
}
} else {
// m33 is the largest diagonal term so base result on this
if ( zz < epsilon ) {
x = 0.707106781;
y = 0.707106781;
z = 0;
} else {
z = Math.sqrt( zz );
x = xz / z;
y = yz / z;
}
}
this.set( x, y, z, angle );
return this; // return 180 deg rotation
}
// as we have reached here there are no singularities so we can handle normally
let s = Math.sqrt( ( m32 - m23 ) * ( m32 - m23 ) +
( m13 - m31 ) * ( m13 - m31 ) +
( m21 - m12 ) * ( m21 - m12 ) ); // used to normalize
if ( Math.abs( s ) < 0.001 ) s = 1;
// prevent divide by zero, should not happen if matrix is orthogonal and should be
// caught by singularity test above, but I've left it in just in case
this.x = ( m32 - m23 ) / s;
this.y = ( m13 - m31 ) / s;
this.z = ( m21 - m12 ) / s;
this.w = Math.acos( ( m11 + m22 + m33 - 1 ) / 2 );
return this;
}
min( v ) {
this.x = Math.min( this.x, v.x );
this.y = Math.min( this.y, v.y );
this.z = Math.min( this.z, v.z );
this.w = Math.min( this.w, v.w );
return this;
}
max( v ) {
this.x = Math.max( this.x, v.x );
this.y = Math.max( this.y, v.y );
this.z = Math.max( this.z, v.z );
this.w = Math.max( this.w, v.w );
return this;
}
clamp( min, max ) {
// assumes min < max, componentwise
this.x = Math.max( min.x, Math.min( max.x, this.x ) );
this.y = Math.max( min.y, Math.min( max.y, this.y ) );
this.z = Math.max( min.z, Math.min( max.z, this.z ) );
this.w = Math.max( min.w, Math.min( max.w, this.w ) );
return this;
}
clampScalar( minVal, maxVal ) {
this.x = Math.max( minVal, Math.min( maxVal, this.x ) );
this.y = Math.max( minVal, Math.min( maxVal, this.y ) );
this.z = Math.max( minVal, Math.min( maxVal, this.z ) );
this.w = Math.max( minVal, Math.min( maxVal, this.w ) );
return this;
}
clampLength( min, max ) {
const length = this.length();
return this.divideScalar( length || 1 ).multiplyScalar( Math.max( min, Math.min( max, length ) ) );
}
floor() {
this.x = Math.floor( this.x );
this.y = Math.floor( this.y );
this.z = Math.floor( this.z );
this.w = Math.floor( this.w );
return this;
}
ceil() {
this.x = Math.ceil( this.x );
this.y = Math.ceil( this.y );
this.z = Math.ceil( this.z );
this.w = Math.ceil( this.w );
return this;
}
round() {
this.x = Math.round( this.x );
this.y = Math.round( this.y );
this.z = Math.round( this.z );
this.w = Math.round( this.w );
return this;
}
roundToZero() {
this.x = ( this.x < 0 ) ? Math.ceil( this.x ) : Math.floor( this.x );
this.y = ( this.y < 0 ) ? Math.ceil( this.y ) : Math.floor( this.y );
this.z = ( this.z < 0 ) ? Math.ceil( this.z ) : Math.floor( this.z );
this.w = ( this.w < 0 ) ? Math.ceil( this.w ) : Math.floor( this.w );
return this;
}
negate() {
this.x = - this.x;
this.y = - this.y;
this.z = - this.z;
this.w = - this.w;
return this;
}
dot( v ) {
return this.x * v.x + this.y * v.y + this.z * v.z + this.w * v.w;
}
lengthSq() {
return this.x * this.x + this.y * this.y + this.z * this.z + this.w * this.w;
}
length() {
return Math.sqrt( this.x * this.x + this.y * this.y + this.z * this.z + this.w * this.w );
}
manhattanLength() {
return Math.abs( this.x ) + Math.abs( this.y ) + Math.abs( this.z ) + Math.abs( this.w );
}
normalize() {
return this.divideScalar( this.length() || 1 );
}
setLength( length ) {
return this.normalize().multiplyScalar( length );
}
lerp( v, alpha ) {
this.x += ( v.x - this.x ) * alpha;
this.y += ( v.y - this.y ) * alpha;
this.z += ( v.z - this.z ) * alpha;
this.w += ( v.w - this.w ) * alpha;
return this;
}
lerpVectors( v1, v2, alpha ) {
this.x = v1.x + ( v2.x - v1.x ) * alpha;
this.y = v1.y + ( v2.y - v1.y ) * alpha;
this.z = v1.z + ( v2.z - v1.z ) * alpha;
this.w = v1.w + ( v2.w - v1.w ) * alpha;
return this;
}
equals( v ) {
return ( ( v.x === this.x ) && ( v.y === this.y ) && ( v.z === this.z ) && ( v.w === this.w ) );
}
fromArray( array, offset = 0 ) {
this.x = array[ offset ];
this.y = array[ offset + 1 ];
this.z = array[ offset + 2 ];
this.w = array[ offset + 3 ];
return this;
}
toArray( array = [], offset = 0 ) {
array[ offset ] = this.x;
array[ offset + 1 ] = this.y;
array[ offset + 2 ] = this.z;
array[ offset + 3 ] = this.w;
return array;
}
fromBufferAttribute( attribute, index, offset ) {
if ( offset !== undefined ) {
console.warn( 'THREE.Vector4: offset has been removed from .fromBufferAttribute().' );
}
this.x = attribute.getX( index );
this.y = attribute.getY( index );
this.z = attribute.getZ( index );
this.w = attribute.getW( index );
return this;
}
random() {
this.x = Math.random();
this.y = Math.random();
this.z = Math.random();
this.w = Math.random();
return this;
}
}
Vector4.prototype.isVector4 = true;
/*
In options, we can specify:
* Texture parameters for an auto-generated target texture
* depthBuffer/stencilBuffer: Booleans to indicate if we should generate these buffers
*/
class WebGLRenderTarget extends EventDispatcher {
constructor( width, height, options ) {
super();
this.width = width;
this.height = height;
this.depth = 1;
this.scissor = new Vector4( 0, 0, width, height );
this.scissorTest = false;
this.viewport = new Vector4( 0, 0, width, height );
options = options || {};
this.texture = new Texture$1( undefined, options.mapping, options.wrapS, options.wrapT, options.magFilter, options.minFilter, options.format, options.type, options.anisotropy, options.encoding );
this.texture.image = {};
this.texture.image.width = width;
this.texture.image.height = height;
this.texture.image.depth = 1;
this.texture.generateMipmaps = options.generateMipmaps !== undefined ? options.generateMipmaps : false;
this.texture.minFilter = options.minFilter !== undefined ? options.minFilter : LinearFilter;
this.depthBuffer = options.depthBuffer !== undefined ? options.depthBuffer : true;
this.stencilBuffer = options.stencilBuffer !== undefined ? options.stencilBuffer : false;
this.depthTexture = options.depthTexture !== undefined ? options.depthTexture : null;
}
setTexture( texture ) {
texture.image = {
width: this.width,
height: this.height,
depth: this.depth
};
this.texture = texture;
}
setSize( width, height, depth = 1 ) {
if ( this.width !== width || this.height !== height || this.depth !== depth ) {
this.width = width;
this.height = height;
this.depth = depth;
this.texture.image.width = width;
this.texture.image.height = height;
this.texture.image.depth = depth;
this.dispose();
}
this.viewport.set( 0, 0, width, height );
this.scissor.set( 0, 0, width, height );
}
clone() {
return new this.constructor().copy( this );
}
copy( source ) {
this.width = source.width;
this.height = source.height;
this.depth = source.depth;
this.viewport.copy( source.viewport );
this.texture = source.texture.clone();
this.depthBuffer = source.depthBuffer;
this.stencilBuffer = source.stencilBuffer;
this.depthTexture = source.depthTexture;
return this;
}
dispose() {
this.dispatchEvent( { type: 'dispose' } );
}
}
WebGLRenderTarget.prototype.isWebGLRenderTarget = true;
class Quaternion {
constructor( x = 0, y = 0, z = 0, w = 1 ) {
this._x = x;
this._y = y;
this._z = z;
this._w = w;
}
static slerp( qa, qb, qm, t ) {
console.warn( 'THREE.Quaternion: Static .slerp() has been deprecated. Use qm.slerpQuaternions( qa, qb, t ) instead.' );
return qm.slerpQuaternions( qa, qb, t );
}
static slerpFlat( dst, dstOffset, src0, srcOffset0, src1, srcOffset1, t ) {
// fuzz-free, array-based Quaternion SLERP operation
let x0 = src0[ srcOffset0 + 0 ],
y0 = src0[ srcOffset0 + 1 ],
z0 = src0[ srcOffset0 + 2 ],
w0 = src0[ srcOffset0 + 3 ];
const x1 = src1[ srcOffset1 + 0 ],
y1 = src1[ srcOffset1 + 1 ],
z1 = src1[ srcOffset1 + 2 ],
w1 = src1[ srcOffset1 + 3 ];
if ( t === 0 ) {
dst[ dstOffset + 0 ] = x0;
dst[ dstOffset + 1 ] = y0;
dst[ dstOffset + 2 ] = z0;
dst[ dstOffset + 3 ] = w0;
return;
}
if ( t === 1 ) {
dst[ dstOffset + 0 ] = x1;
dst[ dstOffset + 1 ] = y1;
dst[ dstOffset + 2 ] = z1;
dst[ dstOffset + 3 ] = w1;
return;
}
if ( w0 !== w1 || x0 !== x1 || y0 !== y1 || z0 !== z1 ) {
let s = 1 - t;
const cos = x0 * x1 + y0 * y1 + z0 * z1 + w0 * w1,
dir = ( cos >= 0 ? 1 : - 1 ),
sqrSin = 1 - cos * cos;
// Skip the Slerp for tiny steps to avoid numeric problems:
if ( sqrSin > Number.EPSILON ) {
const sin = Math.sqrt( sqrSin ),
len = Math.atan2( sin, cos * dir );
s = Math.sin( s * len ) / sin;
t = Math.sin( t * len ) / sin;
}
const tDir = t * dir;
x0 = x0 * s + x1 * tDir;
y0 = y0 * s + y1 * tDir;
z0 = z0 * s + z1 * tDir;
w0 = w0 * s + w1 * tDir;
// Normalize in case we just did a lerp:
if ( s === 1 - t ) {
const f = 1 / Math.sqrt( x0 * x0 + y0 * y0 + z0 * z0 + w0 * w0 );
x0 *= f;
y0 *= f;
z0 *= f;
w0 *= f;
}
}
dst[ dstOffset ] = x0;
dst[ dstOffset + 1 ] = y0;
dst[ dstOffset + 2 ] = z0;
dst[ dstOffset + 3 ] = w0;
}
static multiplyQuaternionsFlat( dst, dstOffset, src0, srcOffset0, src1, srcOffset1 ) {
const x0 = src0[ srcOffset0 ];
const y0 = src0[ srcOffset0 + 1 ];
const z0 = src0[ srcOffset0 + 2 ];
const w0 = src0[ srcOffset0 + 3 ];
const x1 = src1[ srcOffset1 ];
const y1 = src1[ srcOffset1 + 1 ];
const z1 = src1[ srcOffset1 + 2 ];
const w1 = src1[ srcOffset1 + 3 ];
dst[ dstOffset ] = x0 * w1 + w0 * x1 + y0 * z1 - z0 * y1;
dst[ dstOffset + 1 ] = y0 * w1 + w0 * y1 + z0 * x1 - x0 * z1;
dst[ dstOffset + 2 ] = z0 * w1 + w0 * z1 + x0 * y1 - y0 * x1;
dst[ dstOffset + 3 ] = w0 * w1 - x0 * x1 - y0 * y1 - z0 * z1;
return dst;
}
get x() {
return this._x;
}
set x( value ) {
this._x = value;
this._onChangeCallback();
}
get y() {
return this._y;
}
set y( value ) {
this._y = value;
this._onChangeCallback();
}
get z() {
return this._z;
}
set z( value ) {
this._z = value;
this._onChangeCallback();
}
get w() {
return this._w;
}
set w( value ) {
this._w = value;
this._onChangeCallback();
}
set( x, y, z, w ) {
this._x = x;
this._y = y;
this._z = z;
this._w = w;
this._onChangeCallback();
return this;
}
clone() {
return new this.constructor( this._x, this._y, this._z, this._w );
}
copy( quaternion ) {
this._x = quaternion.x;
this._y = quaternion.y;
this._z = quaternion.z;
this._w = quaternion.w;
this._onChangeCallback();
return this;
}
setFromEuler( euler, update ) {
if ( ! ( euler && euler.isEuler ) ) {
throw new Error( 'THREE.Quaternion: .setFromEuler() now expects an Euler rotation rather than a Vector3 and order.' );
}
const x = euler._x, y = euler._y, z = euler._z, order = euler._order;
// http://www.mathworks.com/matlabcentral/fileexchange/
// 20696-function-to-convert-between-dcm-euler-angles-quaternions-and-euler-vectors/
// content/SpinCalc.m
const cos = Math.cos;
const sin = Math.sin;
const c1 = cos( x / 2 );
const c2 = cos( y / 2 );
const c3 = cos( z / 2 );
const s1 = sin( x / 2 );
const s2 = sin( y / 2 );
const s3 = sin( z / 2 );
switch ( order ) {
case 'XYZ':
this._x = s1 * c2 * c3 + c1 * s2 * s3;
this._y = c1 * s2 * c3 - s1 * c2 * s3;
this._z = c1 * c2 * s3 + s1 * s2 * c3;
this._w = c1 * c2 * c3 - s1 * s2 * s3;
break;
case 'YXZ':
this._x = s1 * c2 * c3 + c1 * s2 * s3;
this._y = c1 * s2 * c3 - s1 * c2 * s3;
this._z = c1 * c2 * s3 - s1 * s2 * c3;
this._w = c1 * c2 * c3 + s1 * s2 * s3;
break;
case 'ZXY':
this._x = s1 * c2 * c3 - c1 * s2 * s3;
this._y = c1 * s2 * c3 + s1 * c2 * s3;
this._z = c1 * c2 * s3 + s1 * s2 * c3;
this._w = c1 * c2 * c3 - s1 * s2 * s3;
break;
case 'ZYX':
this._x = s1 * c2 * c3 - c1 * s2 * s3;
this._y = c1 * s2 * c3 + s1 * c2 * s3;
this._z = c1 * c2 * s3 - s1 * s2 * c3;
this._w = c1 * c2 * c3 + s1 * s2 * s3;
break;
case 'YZX':
this._x = s1 * c2 * c3 + c1 * s2 * s3;
this._y = c1 * s2 * c3 + s1 * c2 * s3;
this._z = c1 * c2 * s3 - s1 * s2 * c3;
this._w = c1 * c2 * c3 - s1 * s2 * s3;
break;
case 'XZY':
this._x = s1 * c2 * c3 - c1 * s2 * s3;
this._y = c1 * s2 * c3 - s1 * c2 * s3;
this._z = c1 * c2 * s3 + s1 * s2 * c3;
this._w = c1 * c2 * c3 + s1 * s2 * s3;
break;
default:
console.warn( 'THREE.Quaternion: .setFromEuler() encountered an unknown order: ' + order );
}
if ( update !== false ) this._onChangeCallback();
return this;
}
setFromAxisAngle( axis, angle ) {
// http://www.euclideanspace.com/maths/geometry/rotations/conversions/angleToQuaternion/index.htm
// assumes axis is normalized
const halfAngle = angle / 2, s = Math.sin( halfAngle );
this._x = axis.x * s;
this._y = axis.y * s;
this._z = axis.z * s;
this._w = Math.cos( halfAngle );
this._onChangeCallback();
return this;
}
setFromRotationMatrix( m ) {
// http://www.euclideanspace.com/maths/geometry/rotations/conversions/matrixToQuaternion/index.htm
// assumes the upper 3x3 of m is a pure rotation matrix (i.e, unscaled)
const te = m.elements,
m11 = te[ 0 ], m12 = te[ 4 ], m13 = te[ 8 ],
m21 = te[ 1 ], m22 = te[ 5 ], m23 = te[ 9 ],
m31 = te[ 2 ], m32 = te[ 6 ], m33 = te[ 10 ],
trace = m11 + m22 + m33;
if ( trace > 0 ) {
const s = 0.5 / Math.sqrt( trace + 1.0 );
this._w = 0.25 / s;
this._x = ( m32 - m23 ) * s;
this._y = ( m13 - m31 ) * s;
this._z = ( m21 - m12 ) * s;
} else if ( m11 > m22 && m11 > m33 ) {
const s = 2.0 * Math.sqrt( 1.0 + m11 - m22 - m33 );
this._w = ( m32 - m23 ) / s;
this._x = 0.25 * s;
this._y = ( m12 + m21 ) / s;
this._z = ( m13 + m31 ) / s;
} else if ( m22 > m33 ) {
const s = 2.0 * Math.sqrt( 1.0 + m22 - m11 - m33 );
this._w = ( m13 - m31 ) / s;
this._x = ( m12 + m21 ) / s;
this._y = 0.25 * s;
this._z = ( m23 + m32 ) / s;
} else {
const s = 2.0 * Math.sqrt( 1.0 + m33 - m11 - m22 );
this._w = ( m21 - m12 ) / s;
this._x = ( m13 + m31 ) / s;
this._y = ( m23 + m32 ) / s;
this._z = 0.25 * s;
}
this._onChangeCallback();
return this;
}
setFromUnitVectors( vFrom, vTo ) {
// assumes direction vectors vFrom and vTo are normalized
let r = vFrom.dot( vTo ) + 1;
if ( r < Number.EPSILON ) {
// vFrom and vTo point in opposite directions
r = 0;
if ( Math.abs( vFrom.x ) > Math.abs( vFrom.z ) ) {
this._x = - vFrom.y;
this._y = vFrom.x;
this._z = 0;
this._w = r;
} else {
this._x = 0;
this._y = - vFrom.z;
this._z = vFrom.y;
this._w = r;
}
} else {
// crossVectors( vFrom, vTo ); // inlined to avoid cyclic dependency on Vector3
this._x = vFrom.y * vTo.z - vFrom.z * vTo.y;
this._y = vFrom.z * vTo.x - vFrom.x * vTo.z;
this._z = vFrom.x * vTo.y - vFrom.y * vTo.x;
this._w = r;
}
return this.normalize();
}
angleTo( q ) {
return 2 * Math.acos( Math.abs( clamp$1( this.dot( q ), - 1, 1 ) ) );
}
rotateTowards( q, step ) {
const angle = this.angleTo( q );
if ( angle === 0 ) return this;
const t = Math.min( 1, step / angle );
this.slerp( q, t );
return this;
}
identity() {
return this.set( 0, 0, 0, 1 );
}
invert() {
// quaternion is assumed to have unit length
return this.conjugate();
}
conjugate() {
this._x *= - 1;
this._y *= - 1;
this._z *= - 1;
this._onChangeCallback();
return this;
}
dot( v ) {
return this._x * v._x + this._y * v._y + this._z * v._z + this._w * v._w;
}
lengthSq() {
return this._x * this._x + this._y * this._y + this._z * this._z + this._w * this._w;
}
length() {
return Math.sqrt( this._x * this._x + this._y * this._y + this._z * this._z + this._w * this._w );
}
normalize() {
let l = this.length();
if ( l === 0 ) {
this._x = 0;
this._y = 0;
this._z = 0;
this._w = 1;
} else {
l = 1 / l;
this._x = this._x * l;
this._y = this._y * l;
this._z = this._z * l;
this._w = this._w * l;
}
this._onChangeCallback();
return this;
}
multiply( q, p ) {
if ( p !== undefined ) {
console.warn( 'THREE.Quaternion: .multiply() now only accepts one argument. Use .multiplyQuaternions( a, b ) instead.' );
return this.multiplyQuaternions( q, p );
}
return this.multiplyQuaternions( this, q );
}
premultiply( q ) {
return this.multiplyQuaternions( q, this );
}
multiplyQuaternions( a, b ) {
// from http://www.euclideanspace.com/maths/algebra/realNormedAlgebra/quaternions/code/index.htm
const qax = a._x, qay = a._y, qaz = a._z, qaw = a._w;
const qbx = b._x, qby = b._y, qbz = b._z, qbw = b._w;
this._x = qax * qbw + qaw * qbx + qay * qbz - qaz * qby;
this._y = qay * qbw + qaw * qby + qaz * qbx - qax * qbz;
this._z = qaz * qbw + qaw * qbz + qax * qby - qay * qbx;
this._w = qaw * qbw - qax * qbx - qay * qby - qaz * qbz;
this._onChangeCallback();
return this;
}
slerp( qb, t ) {
if ( t === 0 ) return this;
if ( t === 1 ) return this.copy( qb );
const x = this._x, y = this._y, z = this._z, w = this._w;
// http://www.euclideanspace.com/maths/algebra/realNormedAlgebra/quaternions/slerp/
let cosHalfTheta = w * qb._w + x * qb._x + y * qb._y + z * qb._z;
if ( cosHalfTheta < 0 ) {
this._w = - qb._w;
this._x = - qb._x;
this._y = - qb._y;
this._z = - qb._z;
cosHalfTheta = - cosHalfTheta;
} else {
this.copy( qb );
}
if ( cosHalfTheta >= 1.0 ) {
this._w = w;
this._x = x;
this._y = y;
this._z = z;
return this;
}
const sqrSinHalfTheta = 1.0 - cosHalfTheta * cosHalfTheta;
if ( sqrSinHalfTheta <= Number.EPSILON ) {
const s = 1 - t;
this._w = s * w + t * this._w;
this._x = s * x + t * this._x;
this._y = s * y + t * this._y;
this._z = s * z + t * this._z;
this.normalize();
this._onChangeCallback();
return this;
}
const sinHalfTheta = Math.sqrt( sqrSinHalfTheta );
const halfTheta = Math.atan2( sinHalfTheta, cosHalfTheta );
const ratioA = Math.sin( ( 1 - t ) * halfTheta ) / sinHalfTheta,
ratioB = Math.sin( t * halfTheta ) / sinHalfTheta;
this._w = ( w * ratioA + this._w * ratioB );
this._x = ( x * ratioA + this._x * ratioB );
this._y = ( y * ratioA + this._y * ratioB );
this._z = ( z * ratioA + this._z * ratioB );
this._onChangeCallback();
return this;
}
slerpQuaternions( qa, qb, t ) {
this.copy( qa ).slerp( qb, t );
}
equals( quaternion ) {
return ( quaternion._x === this._x ) && ( quaternion._y === this._y ) && ( quaternion._z === this._z ) && ( quaternion._w === this._w );
}
fromArray( array, offset = 0 ) {
this._x = array[ offset ];
this._y = array[ offset + 1 ];
this._z = array[ offset + 2 ];
this._w = array[ offset + 3 ];
this._onChangeCallback();
return this;
}
toArray( array = [], offset = 0 ) {
array[ offset ] = this._x;
array[ offset + 1 ] = this._y;
array[ offset + 2 ] = this._z;
array[ offset + 3 ] = this._w;
return array;
}
fromBufferAttribute( attribute, index ) {
this._x = attribute.getX( index );
this._y = attribute.getY( index );
this._z = attribute.getZ( index );
this._w = attribute.getW( index );
return this;
}
_onChange( callback ) {
this._onChangeCallback = callback;
return this;
}
_onChangeCallback() {}
}
Quaternion.prototype.isQuaternion = true;
class Vector3 {
constructor( x = 0, y = 0, z = 0 ) {
this.x = x;
this.y = y;
this.z = z;
}
set( x, y, z ) {
if ( z === undefined ) z = this.z; // sprite.scale.set(x,y)
this.x = x;
this.y = y;
this.z = z;
return this;
}
setScalar( scalar ) {
this.x = scalar;
this.y = scalar;
this.z = scalar;
return this;
}
setX( x ) {
this.x = x;
return this;
}
setY( y ) {
this.y = y;
return this;
}
setZ( z ) {
this.z = z;
return this;
}
setComponent( index, value ) {
switch ( index ) {
case 0: this.x = value; break;
case 1: this.y = value; break;
case 2: this.z = value; break;
default: throw new Error( 'index is out of range: ' + index );
}
return this;
}
getComponent( index ) {
switch ( index ) {
case 0: return this.x;
case 1: return this.y;
case 2: return this.z;
default: throw new Error( 'index is out of range: ' + index );
}
}
clone() {
return new this.constructor( this.x, this.y, this.z );
}
copy( v ) {
this.x = v.x;
this.y = v.y;
this.z = v.z;
return this;
}
add( v, w ) {
if ( w !== undefined ) {
console.warn( 'THREE.Vector3: .add() now only accepts one argument. Use .addVectors( a, b ) instead.' );
return this.addVectors( v, w );
}
this.x += v.x;
this.y += v.y;
this.z += v.z;
return this;
}
addScalar( s ) {
this.x += s;
this.y += s;
this.z += s;
return this;
}
addVectors( a, b ) {
this.x = a.x + b.x;
this.y = a.y + b.y;
this.z = a.z + b.z;
return this;
}
addScaledVector( v, s ) {
this.x += v.x * s;
this.y += v.y * s;
this.z += v.z * s;
return this;
}
sub( v, w ) {
if ( w !== undefined ) {
console.warn( 'THREE.Vector3: .sub() now only accepts one argument. Use .subVectors( a, b ) instead.' );
return this.subVectors( v, w );
}
this.x -= v.x;
this.y -= v.y;
this.z -= v.z;
return this;
}
subScalar( s ) {
this.x -= s;
this.y -= s;
this.z -= s;
return this;
}
subVectors( a, b ) {
this.x = a.x - b.x;
this.y = a.y - b.y;
this.z = a.z - b.z;
return this;
}
multiply( v, w ) {
if ( w !== undefined ) {
console.warn( 'THREE.Vector3: .multiply() now only accepts one argument. Use .multiplyVectors( a, b ) instead.' );
return this.multiplyVectors( v, w );
}
this.x *= v.x;
this.y *= v.y;
this.z *= v.z;
return this;
}
multiplyScalar( scalar ) {
this.x *= scalar;
this.y *= scalar;
this.z *= scalar;
return this;
}
multiplyVectors( a, b ) {
this.x = a.x * b.x;
this.y = a.y * b.y;
this.z = a.z * b.z;
return this;
}
applyEuler( euler ) {
if ( ! ( euler && euler.isEuler ) ) {
console.error( 'THREE.Vector3: .applyEuler() now expects an Euler rotation rather than a Vector3 and order.' );
}
return this.applyQuaternion( _quaternion$4.setFromEuler( euler ) );
}
applyAxisAngle( axis, angle ) {
return this.applyQuaternion( _quaternion$4.setFromAxisAngle( axis, angle ) );
}
applyMatrix3( m ) {
const x = this.x, y = this.y, z = this.z;
const e = m.elements;
this.x = e[ 0 ] * x + e[ 3 ] * y + e[ 6 ] * z;
this.y = e[ 1 ] * x + e[ 4 ] * y + e[ 7 ] * z;
this.z = e[ 2 ] * x + e[ 5 ] * y + e[ 8 ] * z;
return this;
}
applyNormalMatrix( m ) {
return this.applyMatrix3( m ).normalize();
}
applyMatrix4( m ) {
const x = this.x, y = this.y, z = this.z;
const e = m.elements;
const w = 1 / ( e[ 3 ] * x + e[ 7 ] * y + e[ 11 ] * z + e[ 15 ] );
this.x = ( e[ 0 ] * x + e[ 4 ] * y + e[ 8 ] * z + e[ 12 ] ) * w;
this.y = ( e[ 1 ] * x + e[ 5 ] * y + e[ 9 ] * z + e[ 13 ] ) * w;
this.z = ( e[ 2 ] * x + e[ 6 ] * y + e[ 10 ] * z + e[ 14 ] ) * w;
return this;
}
applyQuaternion( q ) {
const x = this.x, y = this.y, z = this.z;
const qx = q.x, qy = q.y, qz = q.z, qw = q.w;
// calculate quat * vector
const ix = qw * x + qy * z - qz * y;
const iy = qw * y + qz * x - qx * z;
const iz = qw * z + qx * y - qy * x;
const iw = - qx * x - qy * y - qz * z;
// calculate result * inverse quat
this.x = ix * qw + iw * - qx + iy * - qz - iz * - qy;
this.y = iy * qw + iw * - qy + iz * - qx - ix * - qz;
this.z = iz * qw + iw * - qz + ix * - qy - iy * - qx;
return this;
}
project( camera ) {
return this.applyMatrix4( camera.matrixWorldInverse ).applyMatrix4( camera.projectionMatrix );
}
unproject( camera ) {
return this.applyMatrix4( camera.projectionMatrixInverse ).applyMatrix4( camera.matrixWorld );
}
transformDirection( m ) {
// input: THREE.Matrix4 affine matrix
// vector interpreted as a direction
const x = this.x, y = this.y, z = this.z;
const e = m.elements;
this.x = e[ 0 ] * x + e[ 4 ] * y + e[ 8 ] * z;
this.y = e[ 1 ] * x + e[ 5 ] * y + e[ 9 ] * z;
this.z = e[ 2 ] * x + e[ 6 ] * y + e[ 10 ] * z;
return this.normalize();
}
divide( v ) {
this.x /= v.x;
this.y /= v.y;
this.z /= v.z;
return this;
}
divideScalar( scalar ) {
return this.multiplyScalar( 1 / scalar );
}
min( v ) {
this.x = Math.min( this.x, v.x );
this.y = Math.min( this.y, v.y );
this.z = Math.min( this.z, v.z );
return this;
}
max( v ) {
this.x = Math.max( this.x, v.x );
this.y = Math.max( this.y, v.y );
this.z = Math.max( this.z, v.z );
return this;
}
clamp( min, max ) {
// assumes min < max, componentwise
this.x = Math.max( min.x, Math.min( max.x, this.x ) );
this.y = Math.max( min.y, Math.min( max.y, this.y ) );
this.z = Math.max( min.z, Math.min( max.z, this.z ) );
return this;
}
clampScalar( minVal, maxVal ) {
this.x = Math.max( minVal, Math.min( maxVal, this.x ) );
this.y = Math.max( minVal, Math.min( maxVal, this.y ) );
this.z = Math.max( minVal, Math.min( maxVal, this.z ) );
return this;
}
clampLength( min, max ) {
const length = this.length();
return this.divideScalar( length || 1 ).multiplyScalar( Math.max( min, Math.min( max, length ) ) );
}
floor() {
this.x = Math.floor( this.x );
this.y = Math.floor( this.y );
this.z = Math.floor( this.z );
return this;
}
ceil() {
this.x = Math.ceil( this.x );
this.y = Math.ceil( this.y );
this.z = Math.ceil( this.z );
return this;
}
round() {
this.x = Math.round( this.x );
this.y = Math.round( this.y );
this.z = Math.round( this.z );
return this;
}
roundToZero() {
this.x = ( this.x < 0 ) ? Math.ceil( this.x ) : Math.floor( this.x );
this.y = ( this.y < 0 ) ? Math.ceil( this.y ) : Math.floor( this.y );
this.z = ( this.z < 0 ) ? Math.ceil( this.z ) : Math.floor( this.z );
return this;
}
negate() {
this.x = - this.x;
this.y = - this.y;
this.z = - this.z;
return this;
}
dot( v ) {
return this.x * v.x + this.y * v.y + this.z * v.z;
}
// TODO lengthSquared?
lengthSq() {
return this.x * this.x + this.y * this.y + this.z * this.z;
}
length() {
return Math.sqrt( this.x * this.x + this.y * this.y + this.z * this.z );
}
manhattanLength() {
return Math.abs( this.x ) + Math.abs( this.y ) + Math.abs( this.z );
}
normalize() {
return this.divideScalar( this.length() || 1 );
}
setLength( length ) {
return this.normalize().multiplyScalar( length );
}
lerp( v, alpha ) {
this.x += ( v.x - this.x ) * alpha;
this.y += ( v.y - this.y ) * alpha;
this.z += ( v.z - this.z ) * alpha;
return this;
}
lerpVectors( v1, v2, alpha ) {
this.x = v1.x + ( v2.x - v1.x ) * alpha;
this.y = v1.y + ( v2.y - v1.y ) * alpha;
this.z = v1.z + ( v2.z - v1.z ) * alpha;
return this;
}
cross( v, w ) {
if ( w !== undefined ) {
console.warn( 'THREE.Vector3: .cross() now only accepts one argument. Use .crossVectors( a, b ) instead.' );
return this.crossVectors( v, w );
}
return this.crossVectors( this, v );
}
crossVectors( a, b ) {
const ax = a.x, ay = a.y, az = a.z;
const bx = b.x, by = b.y, bz = b.z;
this.x = ay * bz - az * by;
this.y = az * bx - ax * bz;
this.z = ax * by - ay * bx;
return this;
}
projectOnVector( v ) {
const denominator = v.lengthSq();
if ( denominator === 0 ) return this.set( 0, 0, 0 );
const scalar = v.dot( this ) / denominator;
return this.copy( v ).multiplyScalar( scalar );
}
projectOnPlane( planeNormal ) {
_vector$c.copy( this ).projectOnVector( planeNormal );
return this.sub( _vector$c );
}
reflect( normal ) {
// reflect incident vector off plane orthogonal to normal
// normal is assumed to have unit length
return this.sub( _vector$c.copy( normal ).multiplyScalar( 2 * this.dot( normal ) ) );
}
angleTo( v ) {
const denominator = Math.sqrt( this.lengthSq() * v.lengthSq() );
if ( denominator === 0 ) return Math.PI / 2;
const theta = this.dot( v ) / denominator;
// clamp, to handle numerical problems
return Math.acos( clamp$1( theta, - 1, 1 ) );
}
distanceTo( v ) {
return Math.sqrt( this.distanceToSquared( v ) );
}
distanceToSquared( v ) {
const dx = this.x - v.x, dy = this.y - v.y, dz = this.z - v.z;
return dx * dx + dy * dy + dz * dz;
}
manhattanDistanceTo( v ) {
return Math.abs( this.x - v.x ) + Math.abs( this.y - v.y ) + Math.abs( this.z - v.z );
}
setFromSpherical( s ) {
return this.setFromSphericalCoords( s.radius, s.phi, s.theta );
}
setFromSphericalCoords( radius, phi, theta ) {
const sinPhiRadius = Math.sin( phi ) * radius;
this.x = sinPhiRadius * Math.sin( theta );
this.y = Math.cos( phi ) * radius;
this.z = sinPhiRadius * Math.cos( theta );
return this;
}
setFromCylindrical( c ) {
return this.setFromCylindricalCoords( c.radius, c.theta, c.y );
}
setFromCylindricalCoords( radius, theta, y ) {
this.x = radius * Math.sin( theta );
this.y = y;
this.z = radius * Math.cos( theta );
return this;
}
setFromMatrixPosition( m ) {
const e = m.elements;
this.x = e[ 12 ];
this.y = e[ 13 ];
this.z = e[ 14 ];
return this;
}
setFromMatrixScale( m ) {
const sx = this.setFromMatrixColumn( m, 0 ).length();
const sy = this.setFromMatrixColumn( m, 1 ).length();
const sz = this.setFromMatrixColumn( m, 2 ).length();
this.x = sx;
this.y = sy;
this.z = sz;
return this;
}
setFromMatrixColumn( m, index ) {
return this.fromArray( m.elements, index * 4 );
}
setFromMatrix3Column( m, index ) {
return this.fromArray( m.elements, index * 3 );
}
equals( v ) {
return ( ( v.x === this.x ) && ( v.y === this.y ) && ( v.z === this.z ) );
}
fromArray( array, offset = 0 ) {
this.x = array[ offset ];
this.y = array[ offset + 1 ];
this.z = array[ offset + 2 ];
return this;
}
toArray( array = [], offset = 0 ) {
array[ offset ] = this.x;
array[ offset + 1 ] = this.y;
array[ offset + 2 ] = this.z;
return array;
}
fromBufferAttribute( attribute, index, offset ) {
if ( offset !== undefined ) {
console.warn( 'THREE.Vector3: offset has been removed from .fromBufferAttribute().' );
}
this.x = attribute.getX( index );
this.y = attribute.getY( index );
this.z = attribute.getZ( index );
return this;
}
random() {
this.x = Math.random();
this.y = Math.random();
this.z = Math.random();
return this;
}
}
Vector3.prototype.isVector3 = true;
const _vector$c = /*@__PURE__*/ new Vector3();
const _quaternion$4 = /*@__PURE__*/ new Quaternion();
class Box3 {
constructor( min = new Vector3( + Infinity, + Infinity, + Infinity ), max = new Vector3( - Infinity, - Infinity, - Infinity ) ) {
this.min = min;
this.max = max;
}
set( min, max ) {
this.min.copy( min );
this.max.copy( max );
return this;
}
setFromArray( array ) {
let minX = + Infinity;
let minY = + Infinity;
let minZ = + Infinity;
let maxX = - Infinity;
let maxY = - Infinity;
let maxZ = - Infinity;
for ( let i = 0, l = array.length; i < l; i += 3 ) {
const x = array[ i ];
const y = array[ i + 1 ];
const z = array[ i + 2 ];
if ( x < minX ) minX = x;
if ( y < minY ) minY = y;
if ( z < minZ ) minZ = z;
if ( x > maxX ) maxX = x;
if ( y > maxY ) maxY = y;
if ( z > maxZ ) maxZ = z;
}
this.min.set( minX, minY, minZ );
this.max.set( maxX, maxY, maxZ );
return this;
}
setFromBufferAttribute( attribute ) {
let minX = + Infinity;
let minY = + Infinity;
let minZ = + Infinity;
let maxX = - Infinity;
let maxY = - Infinity;
let maxZ = - Infinity;
for ( let i = 0, l = attribute.count; i < l; i ++ ) {
const x = attribute.getX( i );
const y = attribute.getY( i );
const z = attribute.getZ( i );
if ( x < minX ) minX = x;
if ( y < minY ) minY = y;
if ( z < minZ ) minZ = z;
if ( x > maxX ) maxX = x;
if ( y > maxY ) maxY = y;
if ( z > maxZ ) maxZ = z;
}
this.min.set( minX, minY, minZ );
this.max.set( maxX, maxY, maxZ );
return this;
}
setFromPoints( points ) {
this.makeEmpty();
for ( let i = 0, il = points.length; i < il; i ++ ) {
this.expandByPoint( points[ i ] );
}
return this;
}
setFromCenterAndSize( center, size ) {
const halfSize = _vector$b.copy( size ).multiplyScalar( 0.5 );
this.min.copy( center ).sub( halfSize );
this.max.copy( center ).add( halfSize );
return this;
}
setFromObject( object ) {
this.makeEmpty();
return this.expandByObject( object );
}
clone() {
return new this.constructor().copy( this );
}
copy( box ) {
this.min.copy( box.min );
this.max.copy( box.max );
return this;
}
makeEmpty() {
this.min.x = this.min.y = this.min.z = + Infinity;
this.max.x = this.max.y = this.max.z = - Infinity;
return this;
}
isEmpty() {
// this is a more robust check for empty than ( volume <= 0 ) because volume can get positive with two negative axes
return ( this.max.x < this.min.x ) || ( this.max.y < this.min.y ) || ( this.max.z < this.min.z );
}
getCenter( target ) {
if ( target === undefined ) {
console.warn( 'THREE.Box3: .getCenter() target is now required' );
target = new Vector3();
}
return this.isEmpty() ? target.set( 0, 0, 0 ) : target.addVectors( this.min, this.max ).multiplyScalar( 0.5 );
}
getSize( target ) {
if ( target === undefined ) {
console.warn( 'THREE.Box3: .getSize() target is now required' );
target = new Vector3();
}
return this.isEmpty() ? target.set( 0, 0, 0 ) : target.subVectors( this.max, this.min );
}
expandByPoint( point ) {
this.min.min( point );
this.max.max( point );
return this;
}
expandByVector( vector ) {
this.min.sub( vector );
this.max.add( vector );
return this;
}
expandByScalar( scalar ) {
this.min.addScalar( - scalar );
this.max.addScalar( scalar );
return this;
}
expandByObject( object ) {
// Computes the world-axis-aligned bounding box of an object (including its children),
// accounting for both the object's, and children's, world transforms
object.updateWorldMatrix( false, false );
const geometry = object.geometry;
if ( geometry !== undefined ) {
if ( geometry.boundingBox === null ) {
geometry.computeBoundingBox();
}
_box$3.copy( geometry.boundingBox );
_box$3.applyMatrix4( object.matrixWorld );
this.union( _box$3 );
}
const children = object.children;
for ( let i = 0, l = children.length; i < l; i ++ ) {
this.expandByObject( children[ i ] );
}
return this;
}
containsPoint( point ) {
return point.x < this.min.x || point.x > this.max.x ||
point.y < this.min.y || point.y > this.max.y ||
point.z < this.min.z || point.z > this.max.z ? false : true;
}
containsBox( box ) {
return this.min.x <= box.min.x && box.max.x <= this.max.x &&
this.min.y <= box.min.y && box.max.y <= this.max.y &&
this.min.z <= box.min.z && box.max.z <= this.max.z;
}
getParameter( point, target ) {
// This can potentially have a divide by zero if the box
// has a size dimension of 0.
if ( target === undefined ) {
console.warn( 'THREE.Box3: .getParameter() target is now required' );
target = new Vector3();
}
return target.set(
( point.x - this.min.x ) / ( this.max.x - this.min.x ),
( point.y - this.min.y ) / ( this.max.y - this.min.y ),
( point.z - this.min.z ) / ( this.max.z - this.min.z )
);
}
intersectsBox( box ) {
// using 6 splitting planes to rule out intersections.
return box.max.x < this.min.x || box.min.x > this.max.x ||
box.max.y < this.min.y || box.min.y > this.max.y ||
box.max.z < this.min.z || box.min.z > this.max.z ? false : true;
}
intersectsSphere( sphere ) {
// Find the point on the AABB closest to the sphere center.
this.clampPoint( sphere.center, _vector$b );
// If that point is inside the sphere, the AABB and sphere intersect.
return _vector$b.distanceToSquared( sphere.center ) <= ( sphere.radius * sphere.radius );
}
intersectsPlane( plane ) {
// We compute the minimum and maximum dot product values. If those values
// are on the same side (back or front) of the plane, then there is no intersection.
let min, max;
if ( plane.normal.x > 0 ) {
min = plane.normal.x * this.min.x;
max = plane.normal.x * this.max.x;
} else {
min = plane.normal.x * this.max.x;
max = plane.normal.x * this.min.x;
}
if ( plane.normal.y > 0 ) {
min += plane.normal.y * this.min.y;
max += plane.normal.y * this.max.y;
} else {
min += plane.normal.y * this.max.y;
max += plane.normal.y * this.min.y;
}
if ( plane.normal.z > 0 ) {
min += plane.normal.z * this.min.z;
max += plane.normal.z * this.max.z;
} else {
min += plane.normal.z * this.max.z;
max += plane.normal.z * this.min.z;
}
return ( min <= - plane.constant && max >= - plane.constant );
}
intersectsTriangle( triangle ) {
if ( this.isEmpty() ) {
return false;
}
// compute box center and extents
this.getCenter( _center );
_extents.subVectors( this.max, _center );
// translate triangle to aabb origin
_v0$2.subVectors( triangle.a, _center );
_v1$7.subVectors( triangle.b, _center );
_v2$3.subVectors( triangle.c, _center );
// compute edge vectors for triangle
_f0.subVectors( _v1$7, _v0$2 );
_f1.subVectors( _v2$3, _v1$7 );
_f2.subVectors( _v0$2, _v2$3 );
// test against axes that are given by cross product combinations of the edges of the triangle and the edges of the aabb
// make an axis testing of each of the 3 sides of the aabb against each of the 3 sides of the triangle = 9 axis of separation
// axis_ij = u_i x f_j (u0, u1, u2 = face normals of aabb = x,y,z axes vectors since aabb is axis aligned)
let axes = [
0, - _f0.z, _f0.y, 0, - _f1.z, _f1.y, 0, - _f2.z, _f2.y,
_f0.z, 0, - _f0.x, _f1.z, 0, - _f1.x, _f2.z, 0, - _f2.x,
- _f0.y, _f0.x, 0, - _f1.y, _f1.x, 0, - _f2.y, _f2.x, 0
];
if ( ! satForAxes( axes, _v0$2, _v1$7, _v2$3, _extents ) ) {
return false;
}
// test 3 face normals from the aabb
axes = [ 1, 0, 0, 0, 1, 0, 0, 0, 1 ];
if ( ! satForAxes( axes, _v0$2, _v1$7, _v2$3, _extents ) ) {
return false;
}
// finally testing the face normal of the triangle
// use already existing triangle edge vectors here
_triangleNormal.crossVectors( _f0, _f1 );
axes = [ _triangleNormal.x, _triangleNormal.y, _triangleNormal.z ];
return satForAxes( axes, _v0$2, _v1$7, _v2$3, _extents );
}
clampPoint( point, target ) {
if ( target === undefined ) {
console.warn( 'THREE.Box3: .clampPoint() target is now required' );
target = new Vector3();
}
return target.copy( point ).clamp( this.min, this.max );
}
distanceToPoint( point ) {
const clampedPoint = _vector$b.copy( point ).clamp( this.min, this.max );
return clampedPoint.sub( point ).length();
}
getBoundingSphere( target ) {
if ( target === undefined ) {
console.error( 'THREE.Box3: .getBoundingSphere() target is now required' );
//target = new Sphere(); // removed to avoid cyclic dependency
}
this.getCenter( target.center );
target.radius = this.getSize( _vector$b ).length() * 0.5;
return target;
}
intersect( box ) {
this.min.max( box.min );
this.max.min( box.max );
// ensure that if there is no overlap, the result is fully empty, not slightly empty with non-inf/+inf values that will cause subsequence intersects to erroneously return valid values.
if ( this.isEmpty() ) this.makeEmpty();
return this;
}
union( box ) {
this.min.min( box.min );
this.max.max( box.max );
return this;
}
applyMatrix4( matrix ) {
// transform of empty box is an empty box.
if ( this.isEmpty() ) return this;
// NOTE: I am using a binary pattern to specify all 2^3 combinations below
_points[ 0 ].set( this.min.x, this.min.y, this.min.z ).applyMatrix4( matrix ); // 000
_points[ 1 ].set( this.min.x, this.min.y, this.max.z ).applyMatrix4( matrix ); // 001
_points[ 2 ].set( this.min.x, this.max.y, this.min.z ).applyMatrix4( matrix ); // 010
_points[ 3 ].set( this.min.x, this.max.y, this.max.z ).applyMatrix4( matrix ); // 011
_points[ 4 ].set( this.max.x, this.min.y, this.min.z ).applyMatrix4( matrix ); // 100
_points[ 5 ].set( this.max.x, this.min.y, this.max.z ).applyMatrix4( matrix ); // 101
_points[ 6 ].set( this.max.x, this.max.y, this.min.z ).applyMatrix4( matrix ); // 110
_points[ 7 ].set( this.max.x, this.max.y, this.max.z ).applyMatrix4( matrix ); // 111
this.setFromPoints( _points );
return this;
}
translate( offset ) {
this.min.add( offset );
this.max.add( offset );
return this;
}
equals( box ) {
return box.min.equals( this.min ) && box.max.equals( this.max );
}
}
Box3.prototype.isBox3 = true;
const _points = [
/*@__PURE__*/ new Vector3(),
/*@__PURE__*/ new Vector3(),
/*@__PURE__*/ new Vector3(),
/*@__PURE__*/ new Vector3(),
/*@__PURE__*/ new Vector3(),
/*@__PURE__*/ new Vector3(),
/*@__PURE__*/ new Vector3(),
/*@__PURE__*/ new Vector3()
];
const _vector$b = /*@__PURE__*/ new Vector3();
const _box$3 = /*@__PURE__*/ new Box3();
// triangle centered vertices
const _v0$2 = /*@__PURE__*/ new Vector3();
const _v1$7 = /*@__PURE__*/ new Vector3();
const _v2$3 = /*@__PURE__*/ new Vector3();
// triangle edge vectors
const _f0 = /*@__PURE__*/ new Vector3();
const _f1 = /*@__PURE__*/ new Vector3();
const _f2 = /*@__PURE__*/ new Vector3();
const _center = /*@__PURE__*/ new Vector3();
const _extents = /*@__PURE__*/ new Vector3();
const _triangleNormal = /*@__PURE__*/ new Vector3();
const _testAxis = /*@__PURE__*/ new Vector3();
function satForAxes( axes, v0, v1, v2, extents ) {
for ( let i = 0, j = axes.length - 3; i <= j; i += 3 ) {
_testAxis.fromArray( axes, i );
// project the aabb onto the seperating axis
const r = extents.x * Math.abs( _testAxis.x ) + extents.y * Math.abs( _testAxis.y ) + extents.z * Math.abs( _testAxis.z );
// project all 3 vertices of the triangle onto the seperating axis
const p0 = v0.dot( _testAxis );
const p1 = v1.dot( _testAxis );
const p2 = v2.dot( _testAxis );
// actual test, basically see if either of the most extreme of the triangle points intersects r
if ( Math.max( - Math.max( p0, p1, p2 ), Math.min( p0, p1, p2 ) ) > r ) {
// points of the projected triangle are outside the projected half-length of the aabb
// the axis is seperating and we can exit
return false;
}
}
return true;
}
const _box$2 = /*@__PURE__*/ new Box3();
const _v1$6 = /*@__PURE__*/ new Vector3();
const _toFarthestPoint = /*@__PURE__*/ new Vector3();
const _toPoint = /*@__PURE__*/ new Vector3();
class Sphere {
constructor( center = new Vector3(), radius = - 1 ) {
this.center = center;
this.radius = radius;
}
set( center, radius ) {
this.center.copy( center );
this.radius = radius;
return this;
}
setFromPoints( points, optionalCenter ) {
const center = this.center;
if ( optionalCenter !== undefined ) {
center.copy( optionalCenter );
} else {
_box$2.setFromPoints( points ).getCenter( center );
}
let maxRadiusSq = 0;
for ( let i = 0, il = points.length; i < il; i ++ ) {
maxRadiusSq = Math.max( maxRadiusSq, center.distanceToSquared( points[ i ] ) );
}
this.radius = Math.sqrt( maxRadiusSq );
return this;
}
copy( sphere ) {
this.center.copy( sphere.center );
this.radius = sphere.radius;
return this;
}
isEmpty() {
return ( this.radius < 0 );
}
makeEmpty() {
this.center.set( 0, 0, 0 );
this.radius = - 1;
return this;
}
containsPoint( point ) {
return ( point.distanceToSquared( this.center ) <= ( this.radius * this.radius ) );
}
distanceToPoint( point ) {
return ( point.distanceTo( this.center ) - this.radius );
}
intersectsSphere( sphere ) {
const radiusSum = this.radius + sphere.radius;
return sphere.center.distanceToSquared( this.center ) <= ( radiusSum * radiusSum );
}
intersectsBox( box ) {
return box.intersectsSphere( this );
}
intersectsPlane( plane ) {
return Math.abs( plane.distanceToPoint( this.center ) ) <= this.radius;
}
clampPoint( point, target ) {
const deltaLengthSq = this.center.distanceToSquared( point );
if ( target === undefined ) {
console.warn( 'THREE.Sphere: .clampPoint() target is now required' );
target = new Vector3();
}
target.copy( point );
if ( deltaLengthSq > ( this.radius * this.radius ) ) {
target.sub( this.center ).normalize();
target.multiplyScalar( this.radius ).add( this.center );
}
return target;
}
getBoundingBox( target ) {
if ( target === undefined ) {
console.warn( 'THREE.Sphere: .getBoundingBox() target is now required' );
target = new Box3();
}
if ( this.isEmpty() ) {
// Empty sphere produces empty bounding box
target.makeEmpty();
return target;
}
target.set( this.center, this.center );
target.expandByScalar( this.radius );
return target;
}
applyMatrix4( matrix ) {
this.center.applyMatrix4( matrix );
this.radius = this.radius * matrix.getMaxScaleOnAxis();
return this;
}
translate( offset ) {
this.center.add( offset );
return this;
}
expandByPoint( point ) {
// from https://github.com/juj/MathGeoLib/blob/2940b99b99cfe575dd45103ef20f4019dee15b54/src/Geometry/Sphere.cpp#L649-L671
_toPoint.subVectors( point, this.center );
const lengthSq = _toPoint.lengthSq();
if ( lengthSq > ( this.radius * this.radius ) ) {
const length = Math.sqrt( lengthSq );
const missingRadiusHalf = ( length - this.radius ) * 0.5;
// Nudge this sphere towards the target point. Add half the missing distance to radius,
// and the other half to position. This gives a tighter enclosure, instead of if
// the whole missing distance were just added to radius.
this.center.add( _toPoint.multiplyScalar( missingRadiusHalf / length ) );
this.radius += missingRadiusHalf;
}
return this;
}
union( sphere ) {
// from https://github.com/juj/MathGeoLib/blob/2940b99b99cfe575dd45103ef20f4019dee15b54/src/Geometry/Sphere.cpp#L759-L769
// To enclose another sphere into this sphere, we only need to enclose two points:
// 1) Enclose the farthest point on the other sphere into this sphere.
// 2) Enclose the opposite point of the farthest point into this sphere.
_toFarthestPoint.subVectors( sphere.center, this.center ).normalize().multiplyScalar( sphere.radius );
this.expandByPoint( _v1$6.copy( sphere.center ).add( _toFarthestPoint ) );
this.expandByPoint( _v1$6.copy( sphere.center ).sub( _toFarthestPoint ) );
return this;
}
equals( sphere ) {
return sphere.center.equals( this.center ) && ( sphere.radius === this.radius );
}
clone() {
return new this.constructor().copy( this );
}
}
const _vector$a = /*@__PURE__*/ new Vector3();
const _segCenter = /*@__PURE__*/ new Vector3();
const _segDir = /*@__PURE__*/ new Vector3();
const _diff = /*@__PURE__*/ new Vector3();
const _edge1 = /*@__PURE__*/ new Vector3();
const _edge2 = /*@__PURE__*/ new Vector3();
const _normal$1 = /*@__PURE__*/ new Vector3();
class Ray {
constructor( origin = new Vector3(), direction = new Vector3( 0, 0, - 1 ) ) {
this.origin = origin;
this.direction = direction;
}
set( origin, direction ) {
this.origin.copy( origin );
this.direction.copy( direction );
return this;
}
copy( ray ) {
this.origin.copy( ray.origin );
this.direction.copy( ray.direction );
return this;
}
at( t, target ) {
if ( target === undefined ) {
console.warn( 'THREE.Ray: .at() target is now required' );
target = new Vector3();
}
return target.copy( this.direction ).multiplyScalar( t ).add( this.origin );
}
lookAt( v ) {
this.direction.copy( v ).sub( this.origin ).normalize();
return this;
}
recast( t ) {
this.origin.copy( this.at( t, _vector$a ) );
return this;
}
closestPointToPoint( point, target ) {
if ( target === undefined ) {
console.warn( 'THREE.Ray: .closestPointToPoint() target is now required' );
target = new Vector3();
}
target.subVectors( point, this.origin );
const directionDistance = target.dot( this.direction );
if ( directionDistance < 0 ) {
return target.copy( this.origin );
}
return target.copy( this.direction ).multiplyScalar( directionDistance ).add( this.origin );
}
distanceToPoint( point ) {
return Math.sqrt( this.distanceSqToPoint( point ) );
}
distanceSqToPoint( point ) {
const directionDistance = _vector$a.subVectors( point, this.origin ).dot( this.direction );
// point behind the ray
if ( directionDistance < 0 ) {
return this.origin.distanceToSquared( point );
}
_vector$a.copy( this.direction ).multiplyScalar( directionDistance ).add( this.origin );
return _vector$a.distanceToSquared( point );
}
distanceSqToSegment( v0, v1, optionalPointOnRay, optionalPointOnSegment ) {
// from http://www.geometrictools.com/GTEngine/Include/Mathematics/GteDistRaySegment.h
// It returns the min distance between the ray and the segment
// defined by v0 and v1
// It can also set two optional targets :
// - The closest point on the ray
// - The closest point on the segment
_segCenter.copy( v0 ).add( v1 ).multiplyScalar( 0.5 );
_segDir.copy( v1 ).sub( v0 ).normalize();
_diff.copy( this.origin ).sub( _segCenter );
const segExtent = v0.distanceTo( v1 ) * 0.5;
const a01 = - this.direction.dot( _segDir );
const b0 = _diff.dot( this.direction );
const b1 = - _diff.dot( _segDir );
const c = _diff.lengthSq();
const det = Math.abs( 1 - a01 * a01 );
let s0, s1, sqrDist, extDet;
if ( det > 0 ) {
// The ray and segment are not parallel.
s0 = a01 * b1 - b0;
s1 = a01 * b0 - b1;
extDet = segExtent * det;
if ( s0 >= 0 ) {
if ( s1 >= - extDet ) {
if ( s1 <= extDet ) {
// region 0
// Minimum at interior points of ray and segment.
const invDet = 1 / det;
s0 *= invDet;
s1 *= invDet;
sqrDist = s0 * ( s0 + a01 * s1 + 2 * b0 ) + s1 * ( a01 * s0 + s1 + 2 * b1 ) + c;
} else {
// region 1
s1 = segExtent;
s0 = Math.max( 0, - ( a01 * s1 + b0 ) );
sqrDist = - s0 * s0 + s1 * ( s1 + 2 * b1 ) + c;
}
} else {
// region 5
s1 = - segExtent;
s0 = Math.max( 0, - ( a01 * s1 + b0 ) );
sqrDist = - s0 * s0 + s1 * ( s1 + 2 * b1 ) + c;
}
} else {
if ( s1 <= - extDet ) {
// region 4
s0 = Math.max( 0, - ( - a01 * segExtent + b0 ) );
s1 = ( s0 > 0 ) ? - segExtent : Math.min( Math.max( - segExtent, - b1 ), segExtent );
sqrDist = - s0 * s0 + s1 * ( s1 + 2 * b1 ) + c;
} else if ( s1 <= extDet ) {
// region 3
s0 = 0;
s1 = Math.min( Math.max( - segExtent, - b1 ), segExtent );
sqrDist = s1 * ( s1 + 2 * b1 ) + c;
} else {
// region 2
s0 = Math.max( 0, - ( a01 * segExtent + b0 ) );
s1 = ( s0 > 0 ) ? segExtent : Math.min( Math.max( - segExtent, - b1 ), segExtent );
sqrDist = - s0 * s0 + s1 * ( s1 + 2 * b1 ) + c;
}
}
} else {
// Ray and segment are parallel.
s1 = ( a01 > 0 ) ? - segExtent : segExtent;
s0 = Math.max( 0, - ( a01 * s1 + b0 ) );
sqrDist = - s0 * s0 + s1 * ( s1 + 2 * b1 ) + c;
}
if ( optionalPointOnRay ) {
optionalPointOnRay.copy( this.direction ).multiplyScalar( s0 ).add( this.origin );
}
if ( optionalPointOnSegment ) {
optionalPointOnSegment.copy( _segDir ).multiplyScalar( s1 ).add( _segCenter );
}
return sqrDist;
}
intersectSphere( sphere, target ) {
_vector$a.subVectors( sphere.center, this.origin );
const tca = _vector$a.dot( this.direction );
const d2 = _vector$a.dot( _vector$a ) - tca * tca;
const radius2 = sphere.radius * sphere.radius;
if ( d2 > radius2 ) return null;
const thc = Math.sqrt( radius2 - d2 );
// t0 = first intersect point - entrance on front of sphere
const t0 = tca - thc;
// t1 = second intersect point - exit point on back of sphere
const t1 = tca + thc;
// test to see if both t0 and t1 are behind the ray - if so, return null
if ( t0 < 0 && t1 < 0 ) return null;
// test to see if t0 is behind the ray:
// if it is, the ray is inside the sphere, so return the second exit point scaled by t1,
// in order to always return an intersect point that is in front of the ray.
if ( t0 < 0 ) return this.at( t1, target );
// else t0 is in front of the ray, so return the first collision point scaled by t0
return this.at( t0, target );
}
intersectsSphere( sphere ) {
return this.distanceSqToPoint( sphere.center ) <= ( sphere.radius * sphere.radius );
}
distanceToPlane( plane ) {
const denominator = plane.normal.dot( this.direction );
if ( denominator === 0 ) {
// line is coplanar, return origin
if ( plane.distanceToPoint( this.origin ) === 0 ) {
return 0;
}
// Null is preferable to undefined since undefined means.... it is undefined
return null;
}
const t = - ( this.origin.dot( plane.normal ) + plane.constant ) / denominator;
// Return if the ray never intersects the plane
return t >= 0 ? t : null;
}
intersectPlane( plane, target ) {
const t = this.distanceToPlane( plane );
if ( t === null ) {
return null;
}
return this.at( t, target );
}
intersectsPlane( plane ) {
// check if the ray lies on the plane first
const distToPoint = plane.distanceToPoint( this.origin );
if ( distToPoint === 0 ) {
return true;
}
const denominator = plane.normal.dot( this.direction );
if ( denominator * distToPoint < 0 ) {
return true;
}
// ray origin is behind the plane (and is pointing behind it)
return false;
}
intersectBox( box, target ) {
let tmin, tmax, tymin, tymax, tzmin, tzmax;
const invdirx = 1 / this.direction.x,
invdiry = 1 / this.direction.y,
invdirz = 1 / this.direction.z;
const origin = this.origin;
if ( invdirx >= 0 ) {
tmin = ( box.min.x - origin.x ) * invdirx;
tmax = ( box.max.x - origin.x ) * invdirx;
} else {
tmin = ( box.max.x - origin.x ) * invdirx;
tmax = ( box.min.x - origin.x ) * invdirx;
}
if ( invdiry >= 0 ) {
tymin = ( box.min.y - origin.y ) * invdiry;
tymax = ( box.max.y - origin.y ) * invdiry;
} else {
tymin = ( box.max.y - origin.y ) * invdiry;
tymax = ( box.min.y - origin.y ) * invdiry;
}
if ( ( tmin > tymax ) || ( tymin > tmax ) ) return null;
// These lines also handle the case where tmin or tmax is NaN
// (result of 0 * Infinity). x !== x returns true if x is NaN
if ( tymin > tmin || tmin !== tmin ) tmin = tymin;
if ( tymax < tmax || tmax !== tmax ) tmax = tymax;
if ( invdirz >= 0 ) {
tzmin = ( box.min.z - origin.z ) * invdirz;
tzmax = ( box.max.z - origin.z ) * invdirz;
} else {
tzmin = ( box.max.z - origin.z ) * invdirz;
tzmax = ( box.min.z - origin.z ) * invdirz;
}
if ( ( tmin > tzmax ) || ( tzmin > tmax ) ) return null;
if ( tzmin > tmin || tmin !== tmin ) tmin = tzmin;
if ( tzmax < tmax || tmax !== tmax ) tmax = tzmax;
//return point closest to the ray (positive side)
if ( tmax < 0 ) return null;
return this.at( tmin >= 0 ? tmin : tmax, target );
}
intersectsBox( box ) {
return this.intersectBox( box, _vector$a ) !== null;
}
intersectTriangle( a, b, c, backfaceCulling, target ) {
// Compute the offset origin, edges, and normal.
// from http://www.geometrictools.com/GTEngine/Include/Mathematics/GteIntrRay3Triangle3.h
_edge1.subVectors( b, a );
_edge2.subVectors( c, a );
_normal$1.crossVectors( _edge1, _edge2 );
// Solve Q + t*D = b1*E1 + b2*E2 (Q = kDiff, D = ray direction,
// E1 = kEdge1, E2 = kEdge2, N = Cross(E1,E2)) by
// |Dot(D,N)|*b1 = sign(Dot(D,N))*Dot(D,Cross(Q,E2))
// |Dot(D,N)|*b2 = sign(Dot(D,N))*Dot(D,Cross(E1,Q))
// |Dot(D,N)|*t = -sign(Dot(D,N))*Dot(Q,N)
let DdN = this.direction.dot( _normal$1 );
let sign;
if ( DdN > 0 ) {
if ( backfaceCulling ) return null;
sign = 1;
} else if ( DdN < 0 ) {
sign = - 1;
DdN = - DdN;
} else {
return null;
}
_diff.subVectors( this.origin, a );
const DdQxE2 = sign * this.direction.dot( _edge2.crossVectors( _diff, _edge2 ) );
// b1 < 0, no intersection
if ( DdQxE2 < 0 ) {
return null;
}
const DdE1xQ = sign * this.direction.dot( _edge1.cross( _diff ) );
// b2 < 0, no intersection
if ( DdE1xQ < 0 ) {
return null;
}
// b1+b2 > 1, no intersection
if ( DdQxE2 + DdE1xQ > DdN ) {
return null;
}
// Line intersects triangle, check if ray does.
const QdN = - sign * _diff.dot( _normal$1 );
// t < 0, no intersection
if ( QdN < 0 ) {
return null;
}
// Ray intersects triangle.
return this.at( QdN / DdN, target );
}
applyMatrix4( matrix4 ) {
this.origin.applyMatrix4( matrix4 );
this.direction.transformDirection( matrix4 );
return this;
}
equals( ray ) {
return ray.origin.equals( this.origin ) && ray.direction.equals( this.direction );
}
clone() {
return new this.constructor().copy( this );
}
}
class Matrix4 {
constructor() {
this.elements = [
1, 0, 0, 0,
0, 1, 0, 0,
0, 0, 1, 0,
0, 0, 0, 1
];
if ( arguments.length > 0 ) {
console.error( 'THREE.Matrix4: the constructor no longer reads arguments. use .set() instead.' );
}
}
set( n11, n12, n13, n14, n21, n22, n23, n24, n31, n32, n33, n34, n41, n42, n43, n44 ) {
const te = this.elements;
te[ 0 ] = n11; te[ 4 ] = n12; te[ 8 ] = n13; te[ 12 ] = n14;
te[ 1 ] = n21; te[ 5 ] = n22; te[ 9 ] = n23; te[ 13 ] = n24;
te[ 2 ] = n31; te[ 6 ] = n32; te[ 10 ] = n33; te[ 14 ] = n34;
te[ 3 ] = n41; te[ 7 ] = n42; te[ 11 ] = n43; te[ 15 ] = n44;
return this;
}
identity() {
this.set(
1, 0, 0, 0,
0, 1, 0, 0,
0, 0, 1, 0,
0, 0, 0, 1
);
return this;
}
clone() {
return new Matrix4().fromArray( this.elements );
}
copy( m ) {
const te = this.elements;
const me = m.elements;
te[ 0 ] = me[ 0 ]; te[ 1 ] = me[ 1 ]; te[ 2 ] = me[ 2 ]; te[ 3 ] = me[ 3 ];
te[ 4 ] = me[ 4 ]; te[ 5 ] = me[ 5 ]; te[ 6 ] = me[ 6 ]; te[ 7 ] = me[ 7 ];
te[ 8 ] = me[ 8 ]; te[ 9 ] = me[ 9 ]; te[ 10 ] = me[ 10 ]; te[ 11 ] = me[ 11 ];
te[ 12 ] = me[ 12 ]; te[ 13 ] = me[ 13 ]; te[ 14 ] = me[ 14 ]; te[ 15 ] = me[ 15 ];
return this;
}
copyPosition( m ) {
const te = this.elements, me = m.elements;
te[ 12 ] = me[ 12 ];
te[ 13 ] = me[ 13 ];
te[ 14 ] = me[ 14 ];
return this;
}
setFromMatrix3( m ) {
const me = m.elements;
this.set(
me[ 0 ], me[ 3 ], me[ 6 ], 0,
me[ 1 ], me[ 4 ], me[ 7 ], 0,
me[ 2 ], me[ 5 ], me[ 8 ], 0,
0, 0, 0, 1
);
return this;
}
extractBasis( xAxis, yAxis, zAxis ) {
xAxis.setFromMatrixColumn( this, 0 );
yAxis.setFromMatrixColumn( this, 1 );
zAxis.setFromMatrixColumn( this, 2 );
return this;
}
makeBasis( xAxis, yAxis, zAxis ) {
this.set(
xAxis.x, yAxis.x, zAxis.x, 0,
xAxis.y, yAxis.y, zAxis.y, 0,
xAxis.z, yAxis.z, zAxis.z, 0,
0, 0, 0, 1
);
return this;
}
extractRotation( m ) {
// this method does not support reflection matrices
const te = this.elements;
const me = m.elements;
const scaleX = 1 / _v1$5.setFromMatrixColumn( m, 0 ).length();
const scaleY = 1 / _v1$5.setFromMatrixColumn( m, 1 ).length();
const scaleZ = 1 / _v1$5.setFromMatrixColumn( m, 2 ).length();
te[ 0 ] = me[ 0 ] * scaleX;
te[ 1 ] = me[ 1 ] * scaleX;
te[ 2 ] = me[ 2 ] * scaleX;
te[ 3 ] = 0;
te[ 4 ] = me[ 4 ] * scaleY;
te[ 5 ] = me[ 5 ] * scaleY;
te[ 6 ] = me[ 6 ] * scaleY;
te[ 7 ] = 0;
te[ 8 ] = me[ 8 ] * scaleZ;
te[ 9 ] = me[ 9 ] * scaleZ;
te[ 10 ] = me[ 10 ] * scaleZ;
te[ 11 ] = 0;
te[ 12 ] = 0;
te[ 13 ] = 0;
te[ 14 ] = 0;
te[ 15 ] = 1;
return this;
}
makeRotationFromEuler( euler ) {
if ( ! ( euler && euler.isEuler ) ) {
console.error( 'THREE.Matrix4: .makeRotationFromEuler() now expects a Euler rotation rather than a Vector3 and order.' );
}
const te = this.elements;
const x = euler.x, y = euler.y, z = euler.z;
const a = Math.cos( x ), b = Math.sin( x );
const c = Math.cos( y ), d = Math.sin( y );
const e = Math.cos( z ), f = Math.sin( z );
if ( euler.order === 'XYZ' ) {
const ae = a * e, af = a * f, be = b * e, bf = b * f;
te[ 0 ] = c * e;
te[ 4 ] = - c * f;
te[ 8 ] = d;
te[ 1 ] = af + be * d;
te[ 5 ] = ae - bf * d;
te[ 9 ] = - b * c;
te[ 2 ] = bf - ae * d;
te[ 6 ] = be + af * d;
te[ 10 ] = a * c;
} else if ( euler.order === 'YXZ' ) {
const ce = c * e, cf = c * f, de = d * e, df = d * f;
te[ 0 ] = ce + df * b;
te[ 4 ] = de * b - cf;
te[ 8 ] = a * d;
te[ 1 ] = a * f;
te[ 5 ] = a * e;
te[ 9 ] = - b;
te[ 2 ] = cf * b - de;
te[ 6 ] = df + ce * b;
te[ 10 ] = a * c;
} else if ( euler.order === 'ZXY' ) {
const ce = c * e, cf = c * f, de = d * e, df = d * f;
te[ 0 ] = ce - df * b;
te[ 4 ] = - a * f;
te[ 8 ] = de + cf * b;
te[ 1 ] = cf + de * b;
te[ 5 ] = a * e;
te[ 9 ] = df - ce * b;
te[ 2 ] = - a * d;
te[ 6 ] = b;
te[ 10 ] = a * c;
} else if ( euler.order === 'ZYX' ) {
const ae = a * e, af = a * f, be = b * e, bf = b * f;
te[ 0 ] = c * e;
te[ 4 ] = be * d - af;
te[ 8 ] = ae * d + bf;
te[ 1 ] = c * f;
te[ 5 ] = bf * d + ae;
te[ 9 ] = af * d - be;
te[ 2 ] = - d;
te[ 6 ] = b * c;
te[ 10 ] = a * c;
} else if ( euler.order === 'YZX' ) {
const ac = a * c, ad = a * d, bc = b * c, bd = b * d;
te[ 0 ] = c * e;
te[ 4 ] = bd - ac * f;
te[ 8 ] = bc * f + ad;
te[ 1 ] = f;
te[ 5 ] = a * e;
te[ 9 ] = - b * e;
te[ 2 ] = - d * e;
te[ 6 ] = ad * f + bc;
te[ 10 ] = ac - bd * f;
} else if ( euler.order === 'XZY' ) {
const ac = a * c, ad = a * d, bc = b * c, bd = b * d;
te[ 0 ] = c * e;
te[ 4 ] = - f;
te[ 8 ] = d * e;
te[ 1 ] = ac * f + bd;
te[ 5 ] = a * e;
te[ 9 ] = ad * f - bc;
te[ 2 ] = bc * f - ad;
te[ 6 ] = b * e;
te[ 10 ] = bd * f + ac;
}
// bottom row
te[ 3 ] = 0;
te[ 7 ] = 0;
te[ 11 ] = 0;
// last column
te[ 12 ] = 0;
te[ 13 ] = 0;
te[ 14 ] = 0;
te[ 15 ] = 1;
return this;
}
makeRotationFromQuaternion( q ) {
return this.compose( _zero, q, _one );
}
lookAt( eye, target, up ) {
const te = this.elements;
_z.subVectors( eye, target );
if ( _z.lengthSq() === 0 ) {
// eye and target are in the same position
_z.z = 1;
}
_z.normalize();
_x.crossVectors( up, _z );
if ( _x.lengthSq() === 0 ) {
// up and z are parallel
if ( Math.abs( up.z ) === 1 ) {
_z.x += 0.0001;
} else {
_z.z += 0.0001;
}
_z.normalize();
_x.crossVectors( up, _z );
}
_x.normalize();
_y.crossVectors( _z, _x );
te[ 0 ] = _x.x; te[ 4 ] = _y.x; te[ 8 ] = _z.x;
te[ 1 ] = _x.y; te[ 5 ] = _y.y; te[ 9 ] = _z.y;
te[ 2 ] = _x.z; te[ 6 ] = _y.z; te[ 10 ] = _z.z;
return this;
}
multiply( m, n ) {
if ( n !== undefined ) {
console.warn( 'THREE.Matrix4: .multiply() now only accepts one argument. Use .multiplyMatrices( a, b ) instead.' );
return this.multiplyMatrices( m, n );
}
return this.multiplyMatrices( this, m );
}
premultiply( m ) {
return this.multiplyMatrices( m, this );
}
multiplyMatrices( a, b ) {
const ae = a.elements;
const be = b.elements;
const te = this.elements;
const a11 = ae[ 0 ], a12 = ae[ 4 ], a13 = ae[ 8 ], a14 = ae[ 12 ];
const a21 = ae[ 1 ], a22 = ae[ 5 ], a23 = ae[ 9 ], a24 = ae[ 13 ];
const a31 = ae[ 2 ], a32 = ae[ 6 ], a33 = ae[ 10 ], a34 = ae[ 14 ];
const a41 = ae[ 3 ], a42 = ae[ 7 ], a43 = ae[ 11 ], a44 = ae[ 15 ];
const b11 = be[ 0 ], b12 = be[ 4 ], b13 = be[ 8 ], b14 = be[ 12 ];
const b21 = be[ 1 ], b22 = be[ 5 ], b23 = be[ 9 ], b24 = be[ 13 ];
const b31 = be[ 2 ], b32 = be[ 6 ], b33 = be[ 10 ], b34 = be[ 14 ];
const b41 = be[ 3 ], b42 = be[ 7 ], b43 = be[ 11 ], b44 = be[ 15 ];
te[ 0 ] = a11 * b11 + a12 * b21 + a13 * b31 + a14 * b41;
te[ 4 ] = a11 * b12 + a12 * b22 + a13 * b32 + a14 * b42;
te[ 8 ] = a11 * b13 + a12 * b23 + a13 * b33 + a14 * b43;
te[ 12 ] = a11 * b14 + a12 * b24 + a13 * b34 + a14 * b44;
te[ 1 ] = a21 * b11 + a22 * b21 + a23 * b31 + a24 * b41;
te[ 5 ] = a21 * b12 + a22 * b22 + a23 * b32 + a24 * b42;
te[ 9 ] = a21 * b13 + a22 * b23 + a23 * b33 + a24 * b43;
te[ 13 ] = a21 * b14 + a22 * b24 + a23 * b34 + a24 * b44;
te[ 2 ] = a31 * b11 + a32 * b21 + a33 * b31 + a34 * b41;
te[ 6 ] = a31 * b12 + a32 * b22 + a33 * b32 + a34 * b42;
te[ 10 ] = a31 * b13 + a32 * b23 + a33 * b33 + a34 * b43;
te[ 14 ] = a31 * b14 + a32 * b24 + a33 * b34 + a34 * b44;
te[ 3 ] = a41 * b11 + a42 * b21 + a43 * b31 + a44 * b41;
te[ 7 ] = a41 * b12 + a42 * b22 + a43 * b32 + a44 * b42;
te[ 11 ] = a41 * b13 + a42 * b23 + a43 * b33 + a44 * b43;
te[ 15 ] = a41 * b14 + a42 * b24 + a43 * b34 + a44 * b44;
return this;
}
multiplyScalar( s ) {
const te = this.elements;
te[ 0 ] *= s; te[ 4 ] *= s; te[ 8 ] *= s; te[ 12 ] *= s;
te[ 1 ] *= s; te[ 5 ] *= s; te[ 9 ] *= s; te[ 13 ] *= s;
te[ 2 ] *= s; te[ 6 ] *= s; te[ 10 ] *= s; te[ 14 ] *= s;
te[ 3 ] *= s; te[ 7 ] *= s; te[ 11 ] *= s; te[ 15 ] *= s;
return this;
}
determinant() {
const te = this.elements;
const n11 = te[ 0 ], n12 = te[ 4 ], n13 = te[ 8 ], n14 = te[ 12 ];
const n21 = te[ 1 ], n22 = te[ 5 ], n23 = te[ 9 ], n24 = te[ 13 ];
const n31 = te[ 2 ], n32 = te[ 6 ], n33 = te[ 10 ], n34 = te[ 14 ];
const n41 = te[ 3 ], n42 = te[ 7 ], n43 = te[ 11 ], n44 = te[ 15 ];
//TODO: make this more efficient
//( based on http://www.euclideanspace.com/maths/algebra/matrix/functions/inverse/fourD/index.htm )
return (
n41 * (
+ n14 * n23 * n32
- n13 * n24 * n32
- n14 * n22 * n33
+ n12 * n24 * n33
+ n13 * n22 * n34
- n12 * n23 * n34
) +
n42 * (
+ n11 * n23 * n34
- n11 * n24 * n33
+ n14 * n21 * n33
- n13 * n21 * n34
+ n13 * n24 * n31
- n14 * n23 * n31
) +
n43 * (
+ n11 * n24 * n32
- n11 * n22 * n34
- n14 * n21 * n32
+ n12 * n21 * n34
+ n14 * n22 * n31
- n12 * n24 * n31
) +
n44 * (
- n13 * n22 * n31
- n11 * n23 * n32
+ n11 * n22 * n33
+ n13 * n21 * n32
- n12 * n21 * n33
+ n12 * n23 * n31
)
);
}
transpose() {
const te = this.elements;
let tmp;
tmp = te[ 1 ]; te[ 1 ] = te[ 4 ]; te[ 4 ] = tmp;
tmp = te[ 2 ]; te[ 2 ] = te[ 8 ]; te[ 8 ] = tmp;
tmp = te[ 6 ]; te[ 6 ] = te[ 9 ]; te[ 9 ] = tmp;
tmp = te[ 3 ]; te[ 3 ] = te[ 12 ]; te[ 12 ] = tmp;
tmp = te[ 7 ]; te[ 7 ] = te[ 13 ]; te[ 13 ] = tmp;
tmp = te[ 11 ]; te[ 11 ] = te[ 14 ]; te[ 14 ] = tmp;
return this;
}
setPosition( x, y, z ) {
const te = this.elements;
if ( x.isVector3 ) {
te[ 12 ] = x.x;
te[ 13 ] = x.y;
te[ 14 ] = x.z;
} else {
te[ 12 ] = x;
te[ 13 ] = y;
te[ 14 ] = z;
}
return this;
}
invert() {
// based on http://www.euclideanspace.com/maths/algebra/matrix/functions/inverse/fourD/index.htm
const te = this.elements,
n11 = te[ 0 ], n21 = te[ 1 ], n31 = te[ 2 ], n41 = te[ 3 ],
n12 = te[ 4 ], n22 = te[ 5 ], n32 = te[ 6 ], n42 = te[ 7 ],
n13 = te[ 8 ], n23 = te[ 9 ], n33 = te[ 10 ], n43 = te[ 11 ],
n14 = te[ 12 ], n24 = te[ 13 ], n34 = te[ 14 ], n44 = te[ 15 ],
t11 = n23 * n34 * n42 - n24 * n33 * n42 + n24 * n32 * n43 - n22 * n34 * n43 - n23 * n32 * n44 + n22 * n33 * n44,
t12 = n14 * n33 * n42 - n13 * n34 * n42 - n14 * n32 * n43 + n12 * n34 * n43 + n13 * n32 * n44 - n12 * n33 * n44,
t13 = n13 * n24 * n42 - n14 * n23 * n42 + n14 * n22 * n43 - n12 * n24 * n43 - n13 * n22 * n44 + n12 * n23 * n44,
t14 = n14 * n23 * n32 - n13 * n24 * n32 - n14 * n22 * n33 + n12 * n24 * n33 + n13 * n22 * n34 - n12 * n23 * n34;
const det = n11 * t11 + n21 * t12 + n31 * t13 + n41 * t14;
if ( det === 0 ) return this.set( 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 );
const detInv = 1 / det;
te[ 0 ] = t11 * detInv;
te[ 1 ] = ( n24 * n33 * n41 - n23 * n34 * n41 - n24 * n31 * n43 + n21 * n34 * n43 + n23 * n31 * n44 - n21 * n33 * n44 ) * detInv;
te[ 2 ] = ( n22 * n34 * n41 - n24 * n32 * n41 + n24 * n31 * n42 - n21 * n34 * n42 - n22 * n31 * n44 + n21 * n32 * n44 ) * detInv;
te[ 3 ] = ( n23 * n32 * n41 - n22 * n33 * n41 - n23 * n31 * n42 + n21 * n33 * n42 + n22 * n31 * n43 - n21 * n32 * n43 ) * detInv;
te[ 4 ] = t12 * detInv;
te[ 5 ] = ( n13 * n34 * n41 - n14 * n33 * n41 + n14 * n31 * n43 - n11 * n34 * n43 - n13 * n31 * n44 + n11 * n33 * n44 ) * detInv;
te[ 6 ] = ( n14 * n32 * n41 - n12 * n34 * n41 - n14 * n31 * n42 + n11 * n34 * n42 + n12 * n31 * n44 - n11 * n32 * n44 ) * detInv;
te[ 7 ] = ( n12 * n33 * n41 - n13 * n32 * n41 + n13 * n31 * n42 - n11 * n33 * n42 - n12 * n31 * n43 + n11 * n32 * n43 ) * detInv;
te[ 8 ] = t13 * detInv;
te[ 9 ] = ( n14 * n23 * n41 - n13 * n24 * n41 - n14 * n21 * n43 + n11 * n24 * n43 + n13 * n21 * n44 - n11 * n23 * n44 ) * detInv;
te[ 10 ] = ( n12 * n24 * n41 - n14 * n22 * n41 + n14 * n21 * n42 - n11 * n24 * n42 - n12 * n21 * n44 + n11 * n22 * n44 ) * detInv;
te[ 11 ] = ( n13 * n22 * n41 - n12 * n23 * n41 - n13 * n21 * n42 + n11 * n23 * n42 + n12 * n21 * n43 - n11 * n22 * n43 ) * detInv;
te[ 12 ] = t14 * detInv;
te[ 13 ] = ( n13 * n24 * n31 - n14 * n23 * n31 + n14 * n21 * n33 - n11 * n24 * n33 - n13 * n21 * n34 + n11 * n23 * n34 ) * detInv;
te[ 14 ] = ( n14 * n22 * n31 - n12 * n24 * n31 - n14 * n21 * n32 + n11 * n24 * n32 + n12 * n21 * n34 - n11 * n22 * n34 ) * detInv;
te[ 15 ] = ( n12 * n23 * n31 - n13 * n22 * n31 + n13 * n21 * n32 - n11 * n23 * n32 - n12 * n21 * n33 + n11 * n22 * n33 ) * detInv;
return this;
}
scale( v ) {
const te = this.elements;
const x = v.x, y = v.y, z = v.z;
te[ 0 ] *= x; te[ 4 ] *= y; te[ 8 ] *= z;
te[ 1 ] *= x; te[ 5 ] *= y; te[ 9 ] *= z;
te[ 2 ] *= x; te[ 6 ] *= y; te[ 10 ] *= z;
te[ 3 ] *= x; te[ 7 ] *= y; te[ 11 ] *= z;
return this;
}
getMaxScaleOnAxis() {
const te = this.elements;
const scaleXSq = te[ 0 ] * te[ 0 ] + te[ 1 ] * te[ 1 ] + te[ 2 ] * te[ 2 ];
const scaleYSq = te[ 4 ] * te[ 4 ] + te[ 5 ] * te[ 5 ] + te[ 6 ] * te[ 6 ];
const scaleZSq = te[ 8 ] * te[ 8 ] + te[ 9 ] * te[ 9 ] + te[ 10 ] * te[ 10 ];
return Math.sqrt( Math.max( scaleXSq, scaleYSq, scaleZSq ) );
}
makeTranslation( x, y, z ) {
this.set(
1, 0, 0, x,
0, 1, 0, y,
0, 0, 1, z,
0, 0, 0, 1
);
return this;
}
makeRotationX( theta ) {
const c = Math.cos( theta ), s = Math.sin( theta );
this.set(
1, 0, 0, 0,
0, c, - s, 0,
0, s, c, 0,
0, 0, 0, 1
);
return this;
}
makeRotationY( theta ) {
const c = Math.cos( theta ), s = Math.sin( theta );
this.set(
c, 0, s, 0,
0, 1, 0, 0,
- s, 0, c, 0,
0, 0, 0, 1
);
return this;
}
makeRotationZ( theta ) {
const c = Math.cos( theta ), s = Math.sin( theta );
this.set(
c, - s, 0, 0,
s, c, 0, 0,
0, 0, 1, 0,
0, 0, 0, 1
);
return this;
}
makeRotationAxis( axis, angle ) {
// Based on http://www.gamedev.net/reference/articles/article1199.asp
const c = Math.cos( angle );
const s = Math.sin( angle );
const t = 1 - c;
const x = axis.x, y = axis.y, z = axis.z;
const tx = t * x, ty = t * y;
this.set(
tx * x + c, tx * y - s * z, tx * z + s * y, 0,
tx * y + s * z, ty * y + c, ty * z - s * x, 0,
tx * z - s * y, ty * z + s * x, t * z * z + c, 0,
0, 0, 0, 1
);
return this;
}
makeScale( x, y, z ) {
this.set(
x, 0, 0, 0,
0, y, 0, 0,
0, 0, z, 0,
0, 0, 0, 1
);
return this;
}
makeShear( x, y, z ) {
this.set(
1, y, z, 0,
x, 1, z, 0,
x, y, 1, 0,
0, 0, 0, 1
);
return this;
}
compose( position, quaternion, scale ) {
const te = this.elements;
const x = quaternion._x, y = quaternion._y, z = quaternion._z, w = quaternion._w;
const x2 = x + x, y2 = y + y, z2 = z + z;
const xx = x * x2, xy = x * y2, xz = x * z2;
const yy = y * y2, yz = y * z2, zz = z * z2;
const wx = w * x2, wy = w * y2, wz = w * z2;
const sx = scale.x, sy = scale.y, sz = scale.z;
te[ 0 ] = ( 1 - ( yy + zz ) ) * sx;
te[ 1 ] = ( xy + wz ) * sx;
te[ 2 ] = ( xz - wy ) * sx;
te[ 3 ] = 0;
te[ 4 ] = ( xy - wz ) * sy;
te[ 5 ] = ( 1 - ( xx + zz ) ) * sy;
te[ 6 ] = ( yz + wx ) * sy;
te[ 7 ] = 0;
te[ 8 ] = ( xz + wy ) * sz;
te[ 9 ] = ( yz - wx ) * sz;
te[ 10 ] = ( 1 - ( xx + yy ) ) * sz;
te[ 11 ] = 0;
te[ 12 ] = position.x;
te[ 13 ] = position.y;
te[ 14 ] = position.z;
te[ 15 ] = 1;
return this;
}
decompose( position, quaternion, scale ) {
const te = this.elements;
let sx = _v1$5.set( te[ 0 ], te[ 1 ], te[ 2 ] ).length();
const sy = _v1$5.set( te[ 4 ], te[ 5 ], te[ 6 ] ).length();
const sz = _v1$5.set( te[ 8 ], te[ 9 ], te[ 10 ] ).length();
// if determine is negative, we need to invert one scale
const det = this.determinant();
if ( det < 0 ) sx = - sx;
position.x = te[ 12 ];
position.y = te[ 13 ];
position.z = te[ 14 ];
// scale the rotation part
_m1$2.copy( this );
const invSX = 1 / sx;
const invSY = 1 / sy;
const invSZ = 1 / sz;
_m1$2.elements[ 0 ] *= invSX;
_m1$2.elements[ 1 ] *= invSX;
_m1$2.elements[ 2 ] *= invSX;
_m1$2.elements[ 4 ] *= invSY;
_m1$2.elements[ 5 ] *= invSY;
_m1$2.elements[ 6 ] *= invSY;
_m1$2.elements[ 8 ] *= invSZ;
_m1$2.elements[ 9 ] *= invSZ;
_m1$2.elements[ 10 ] *= invSZ;
quaternion.setFromRotationMatrix( _m1$2 );
scale.x = sx;
scale.y = sy;
scale.z = sz;
return this;
}
makePerspective( left, right, top, bottom, near, far ) {
if ( far === undefined ) {
console.warn( 'THREE.Matrix4: .makePerspective() has been redefined and has a new signature. Please check the docs.' );
}
const te = this.elements;
const x = 2 * near / ( right - left );
const y = 2 * near / ( top - bottom );
const a = ( right + left ) / ( right - left );
const b = ( top + bottom ) / ( top - bottom );
const c = - ( far + near ) / ( far - near );
const d = - 2 * far * near / ( far - near );
te[ 0 ] = x; te[ 4 ] = 0; te[ 8 ] = a; te[ 12 ] = 0;
te[ 1 ] = 0; te[ 5 ] = y; te[ 9 ] = b; te[ 13 ] = 0;
te[ 2 ] = 0; te[ 6 ] = 0; te[ 10 ] = c; te[ 14 ] = d;
te[ 3 ] = 0; te[ 7 ] = 0; te[ 11 ] = - 1; te[ 15 ] = 0;
return this;
}
makeOrthographic( left, right, top, bottom, near, far ) {
const te = this.elements;
const w = 1.0 / ( right - left );
const h = 1.0 / ( top - bottom );
const p = 1.0 / ( far - near );
const x = ( right + left ) * w;
const y = ( top + bottom ) * h;
const z = ( far + near ) * p;
te[ 0 ] = 2 * w; te[ 4 ] = 0; te[ 8 ] = 0; te[ 12 ] = - x;
te[ 1 ] = 0; te[ 5 ] = 2 * h; te[ 9 ] = 0; te[ 13 ] = - y;
te[ 2 ] = 0; te[ 6 ] = 0; te[ 10 ] = - 2 * p; te[ 14 ] = - z;
te[ 3 ] = 0; te[ 7 ] = 0; te[ 11 ] = 0; te[ 15 ] = 1;
return this;
}
equals( matrix ) {
const te = this.elements;
const me = matrix.elements;
for ( let i = 0; i < 16; i ++ ) {
if ( te[ i ] !== me[ i ] ) return false;
}
return true;
}
fromArray( array, offset = 0 ) {
for ( let i = 0; i < 16; i ++ ) {
this.elements[ i ] = array[ i + offset ];
}
return this;
}
toArray( array = [], offset = 0 ) {
const te = this.elements;
array[ offset ] = te[ 0 ];
array[ offset + 1 ] = te[ 1 ];
array[ offset + 2 ] = te[ 2 ];
array[ offset + 3 ] = te[ 3 ];
array[ offset + 4 ] = te[ 4 ];
array[ offset + 5 ] = te[ 5 ];
array[ offset + 6 ] = te[ 6 ];
array[ offset + 7 ] = te[ 7 ];
array[ offset + 8 ] = te[ 8 ];
array[ offset + 9 ] = te[ 9 ];
array[ offset + 10 ] = te[ 10 ];
array[ offset + 11 ] = te[ 11 ];
array[ offset + 12 ] = te[ 12 ];
array[ offset + 13 ] = te[ 13 ];
array[ offset + 14 ] = te[ 14 ];
array[ offset + 15 ] = te[ 15 ];
return array;
}
}
Matrix4.prototype.isMatrix4 = true;
const _v1$5 = /*@__PURE__*/ new Vector3();
const _m1$2 = /*@__PURE__*/ new Matrix4();
const _zero = /*@__PURE__*/ new Vector3( 0, 0, 0 );
const _one = /*@__PURE__*/ new Vector3( 1, 1, 1 );
const _x = /*@__PURE__*/ new Vector3();
const _y = /*@__PURE__*/ new Vector3();
const _z = /*@__PURE__*/ new Vector3();
const _matrix$1 = /*@__PURE__*/ new Matrix4();
const _quaternion$3 = /*@__PURE__*/ new Quaternion();
class Euler {
constructor( x = 0, y = 0, z = 0, order = Euler.DefaultOrder ) {
this._x = x;
this._y = y;
this._z = z;
this._order = order;
}
get x() {
return this._x;
}
set x( value ) {
this._x = value;
this._onChangeCallback();
}
get y() {
return this._y;
}
set y( value ) {
this._y = value;
this._onChangeCallback();
}
get z() {
return this._z;
}
set z( value ) {
this._z = value;
this._onChangeCallback();
}
get order() {
return this._order;
}
set order( value ) {
this._order = value;
this._onChangeCallback();
}
set( x, y, z, order ) {
this._x = x;
this._y = y;
this._z = z;
this._order = order || this._order;
this._onChangeCallback();
return this;
}
clone() {
return new this.constructor( this._x, this._y, this._z, this._order );
}
copy( euler ) {
this._x = euler._x;
this._y = euler._y;
this._z = euler._z;
this._order = euler._order;
this._onChangeCallback();
return this;
}
setFromRotationMatrix( m, order, update ) {
// assumes the upper 3x3 of m is a pure rotation matrix (i.e, unscaled)
const te = m.elements;
const m11 = te[ 0 ], m12 = te[ 4 ], m13 = te[ 8 ];
const m21 = te[ 1 ], m22 = te[ 5 ], m23 = te[ 9 ];
const m31 = te[ 2 ], m32 = te[ 6 ], m33 = te[ 10 ];
order = order || this._order;
switch ( order ) {
case 'XYZ':
this._y = Math.asin( clamp$1( m13, - 1, 1 ) );
if ( Math.abs( m13 ) < 0.9999999 ) {
this._x = Math.atan2( - m23, m33 );
this._z = Math.atan2( - m12, m11 );
} else {
this._x = Math.atan2( m32, m22 );
this._z = 0;
}
break;
case 'YXZ':
this._x = Math.asin( - clamp$1( m23, - 1, 1 ) );
if ( Math.abs( m23 ) < 0.9999999 ) {
this._y = Math.atan2( m13, m33 );
this._z = Math.atan2( m21, m22 );
} else {
this._y = Math.atan2( - m31, m11 );
this._z = 0;
}
break;
case 'ZXY':
this._x = Math.asin( clamp$1( m32, - 1, 1 ) );
if ( Math.abs( m32 ) < 0.9999999 ) {
this._y = Math.atan2( - m31, m33 );
this._z = Math.atan2( - m12, m22 );
} else {
this._y = 0;
this._z = Math.atan2( m21, m11 );
}
break;
case 'ZYX':
this._y = Math.asin( - clamp$1( m31, - 1, 1 ) );
if ( Math.abs( m31 ) < 0.9999999 ) {
this._x = Math.atan2( m32, m33 );
this._z = Math.atan2( m21, m11 );
} else {
this._x = 0;
this._z = Math.atan2( - m12, m22 );
}
break;
case 'YZX':
this._z = Math.asin( clamp$1( m21, - 1, 1 ) );
if ( Math.abs( m21 ) < 0.9999999 ) {
this._x = Math.atan2( - m23, m22 );
this._y = Math.atan2( - m31, m11 );
} else {
this._x = 0;
this._y = Math.atan2( m13, m33 );
}
break;
case 'XZY':
this._z = Math.asin( - clamp$1( m12, - 1, 1 ) );
if ( Math.abs( m12 ) < 0.9999999 ) {
this._x = Math.atan2( m32, m22 );
this._y = Math.atan2( m13, m11 );
} else {
this._x = Math.atan2( - m23, m33 );
this._y = 0;
}
break;
default:
console.warn( 'THREE.Euler: .setFromRotationMatrix() encountered an unknown order: ' + order );
}
this._order = order;
if ( update !== false ) this._onChangeCallback();
return this;
}
setFromQuaternion( q, order, update ) {
_matrix$1.makeRotationFromQuaternion( q );
return this.setFromRotationMatrix( _matrix$1, order, update );
}
setFromVector3( v, order ) {
return this.set( v.x, v.y, v.z, order || this._order );
}
reorder( newOrder ) {
// WARNING: this discards revolution information -bhouston
_quaternion$3.setFromEuler( this );
return this.setFromQuaternion( _quaternion$3, newOrder );
}
equals( euler ) {
return ( euler._x === this._x ) && ( euler._y === this._y ) && ( euler._z === this._z ) && ( euler._order === this._order );
}
fromArray( array ) {
this._x = array[ 0 ];
this._y = array[ 1 ];
this._z = array[ 2 ];
if ( array[ 3 ] !== undefined ) this._order = array[ 3 ];
this._onChangeCallback();
return this;
}
toArray( array = [], offset = 0 ) {
array[ offset ] = this._x;
array[ offset + 1 ] = this._y;
array[ offset + 2 ] = this._z;
array[ offset + 3 ] = this._order;
return array;
}
toVector3( optionalResult ) {
if ( optionalResult ) {
return optionalResult.set( this._x, this._y, this._z );
} else {
return new Vector3( this._x, this._y, this._z );
}
}
_onChange( callback ) {
this._onChangeCallback = callback;
return this;
}
_onChangeCallback() {}
}
Euler.prototype.isEuler = true;
Euler.DefaultOrder = 'XYZ';
Euler.RotationOrders = [ 'XYZ', 'YZX', 'ZXY', 'XZY', 'YXZ', 'ZYX' ];
class Layers {
constructor() {
this.mask = 1 | 0;
}
set( channel ) {
this.mask = 1 << channel | 0;
}
enable( channel ) {
this.mask |= 1 << channel | 0;
}
enableAll() {
this.mask = 0xffffffff | 0;
}
toggle( channel ) {
this.mask ^= 1 << channel | 0;
}
disable( channel ) {
this.mask &= ~ ( 1 << channel | 0 );
}
disableAll() {
this.mask = 0;
}
test( layers ) {
return ( this.mask & layers.mask ) !== 0;
}
}
let _object3DId = 0;
const _v1$4 = new /*@__PURE__*/ Vector3();
const _q1 = new /*@__PURE__*/ Quaternion();
const _m1$1 = new /*@__PURE__*/ Matrix4();
const _target = new /*@__PURE__*/ Vector3();
const _position$3 = new /*@__PURE__*/ Vector3();
const _scale$2 = new /*@__PURE__*/ Vector3();
const _quaternion$2 = new /*@__PURE__*/ Quaternion();
const _xAxis = new /*@__PURE__*/ Vector3( 1, 0, 0 );
const _yAxis = new /*@__PURE__*/ Vector3( 0, 1, 0 );
const _zAxis = new /*@__PURE__*/ Vector3( 0, 0, 1 );
const _addedEvent = { type: 'added' };
const _removedEvent = { type: 'removed' };
class Object3D extends EventDispatcher {
constructor() {
super();
Object.defineProperty( this, 'id', { value: _object3DId ++ } );
this.uuid = generateUUID();
this.name = '';
this.type = 'Object3D';
this.parent = null;
this.children = [];
this.up = Object3D.DefaultUp.clone();
const position = new Vector3();
const rotation = new Euler();
const quaternion = new Quaternion();
const scale = new Vector3( 1, 1, 1 );
function onRotationChange() {
quaternion.setFromEuler( rotation, false );
}
function onQuaternionChange() {
rotation.setFromQuaternion( quaternion, undefined, false );
}
rotation._onChange( onRotationChange );
quaternion._onChange( onQuaternionChange );
Object.defineProperties( this, {
position: {
configurable: true,
enumerable: true,
value: position
},
rotation: {
configurable: true,
enumerable: true,
value: rotation
},
quaternion: {
configurable: true,
enumerable: true,
value: quaternion
},
scale: {
configurable: true,
enumerable: true,
value: scale
},
modelViewMatrix: {
value: new Matrix4()
},
normalMatrix: {
value: new Matrix3()
}
} );
this.matrix = new Matrix4();
this.matrixWorld = new Matrix4();
this.matrixAutoUpdate = Object3D.DefaultMatrixAutoUpdate;
this.matrixWorldNeedsUpdate = false;
this.layers = new Layers();
this.visible = true;
this.castShadow = false;
this.receiveShadow = false;
this.frustumCulled = true;
this.renderOrder = 0;
this.animations = [];
this.userData = {};
}
onBeforeRender() {}
onAfterRender() {}
applyMatrix4( matrix ) {
if ( this.matrixAutoUpdate ) this.updateMatrix();
this.matrix.premultiply( matrix );
this.matrix.decompose( this.position, this.quaternion, this.scale );
}
applyQuaternion( q ) {
this.quaternion.premultiply( q );
return this;
}
setRotationFromAxisAngle( axis, angle ) {
// assumes axis is normalized
this.quaternion.setFromAxisAngle( axis, angle );
}
setRotationFromEuler( euler ) {
this.quaternion.setFromEuler( euler, true );
}
setRotationFromMatrix( m ) {
// assumes the upper 3x3 of m is a pure rotation matrix (i.e, unscaled)
this.quaternion.setFromRotationMatrix( m );
}
setRotationFromQuaternion( q ) {
// assumes q is normalized
this.quaternion.copy( q );
}
rotateOnAxis( axis, angle ) {
// rotate object on axis in object space
// axis is assumed to be normalized
_q1.setFromAxisAngle( axis, angle );
this.quaternion.multiply( _q1 );
return this;
}
rotateOnWorldAxis( axis, angle ) {
// rotate object on axis in world space
// axis is assumed to be normalized
// method assumes no rotated parent
_q1.setFromAxisAngle( axis, angle );
this.quaternion.premultiply( _q1 );
return this;
}
rotateX( angle ) {
return this.rotateOnAxis( _xAxis, angle );
}
rotateY( angle ) {
return this.rotateOnAxis( _yAxis, angle );
}
rotateZ( angle ) {
return this.rotateOnAxis( _zAxis, angle );
}
translateOnAxis( axis, distance ) {
// translate object by distance along axis in object space
// axis is assumed to be normalized
_v1$4.copy( axis ).applyQuaternion( this.quaternion );
this.position.add( _v1$4.multiplyScalar( distance ) );
return this;
}
translateX( distance ) {
return this.translateOnAxis( _xAxis, distance );
}
translateY( distance ) {
return this.translateOnAxis( _yAxis, distance );
}
translateZ( distance ) {
return this.translateOnAxis( _zAxis, distance );
}
localToWorld( vector ) {
return vector.applyMatrix4( this.matrixWorld );
}
worldToLocal( vector ) {
return vector.applyMatrix4( _m1$1.copy( this.matrixWorld ).invert() );
}
lookAt( x, y, z ) {
// This method does not support objects having non-uniformly-scaled parent(s)
if ( x.isVector3 ) {
_target.copy( x );
} else {
_target.set( x, y, z );
}
const parent = this.parent;
this.updateWorldMatrix( true, false );
_position$3.setFromMatrixPosition( this.matrixWorld );
if ( this.isCamera || this.isLight ) {
_m1$1.lookAt( _position$3, _target, this.up );
} else {
_m1$1.lookAt( _target, _position$3, this.up );
}
this.quaternion.setFromRotationMatrix( _m1$1 );
if ( parent ) {
_m1$1.extractRotation( parent.matrixWorld );
_q1.setFromRotationMatrix( _m1$1 );
this.quaternion.premultiply( _q1.invert() );
}
}
add( object ) {
if ( arguments.length > 1 ) {
for ( let i = 0; i < arguments.length; i ++ ) {
this.add( arguments[ i ] );
}
return this;
}
if ( object === this ) {
console.error( 'THREE.Object3D.add: object can\'t be added as a child of itself.', object );
return this;
}
if ( object && object.isObject3D ) {
if ( object.parent !== null ) {
object.parent.remove( object );
}
object.parent = this;
this.children.push( object );
object.dispatchEvent( _addedEvent );
} else {
console.error( 'THREE.Object3D.add: object not an instance of THREE.Object3D.', object );
}
return this;
}
remove( object ) {
if ( arguments.length > 1 ) {
for ( let i = 0; i < arguments.length; i ++ ) {
this.remove( arguments[ i ] );
}
return this;
}
const index = this.children.indexOf( object );
if ( index !== - 1 ) {
object.parent = null;
this.children.splice( index, 1 );
object.dispatchEvent( _removedEvent );
}
return this;
}
clear() {
for ( let i = 0; i < this.children.length; i ++ ) {
const object = this.children[ i ];
object.parent = null;
object.dispatchEvent( _removedEvent );
}
this.children.length = 0;
return this;
}
attach( object ) {
// adds object as a child of this, while maintaining the object's world transform
this.updateWorldMatrix( true, false );
_m1$1.copy( this.matrixWorld ).invert();
if ( object.parent !== null ) {
object.parent.updateWorldMatrix( true, false );
_m1$1.multiply( object.parent.matrixWorld );
}
object.applyMatrix4( _m1$1 );
this.add( object );
object.updateWorldMatrix( false, true );
return this;
}
getObjectById( id ) {
return this.getObjectByProperty( 'id', id );
}
getObjectByName( name ) {
return this.getObjectByProperty( 'name', name );
}
getObjectByProperty( name, value ) {
if ( this[ name ] === value ) return this;
for ( let i = 0, l = this.children.length; i < l; i ++ ) {
const child = this.children[ i ];
const object = child.getObjectByProperty( name, value );
if ( object !== undefined ) {
return object;
}
}
return undefined;
}
getWorldPosition( target ) {
if ( target === undefined ) {
console.warn( 'THREE.Object3D: .getWorldPosition() target is now required' );
target = new Vector3();
}
this.updateWorldMatrix( true, false );
return target.setFromMatrixPosition( this.matrixWorld );
}
getWorldQuaternion( target ) {
if ( target === undefined ) {
console.warn( 'THREE.Object3D: .getWorldQuaternion() target is now required' );
target = new Quaternion();
}
this.updateWorldMatrix( true, false );
this.matrixWorld.decompose( _position$3, target, _scale$2 );
return target;
}
getWorldScale( target ) {
if ( target === undefined ) {
console.warn( 'THREE.Object3D: .getWorldScale() target is now required' );
target = new Vector3();
}
this.updateWorldMatrix( true, false );
this.matrixWorld.decompose( _position$3, _quaternion$2, target );
return target;
}
getWorldDirection( target ) {
if ( target === undefined ) {
console.warn( 'THREE.Object3D: .getWorldDirection() target is now required' );
target = new Vector3();
}
this.updateWorldMatrix( true, false );
const e = this.matrixWorld.elements;
return target.set( e[ 8 ], e[ 9 ], e[ 10 ] ).normalize();
}
raycast() {}
traverse( callback ) {
callback( this );
const children = this.children;
for ( let i = 0, l = children.length; i < l; i ++ ) {
children[ i ].traverse( callback );
}
}
traverseVisible( callback ) {
if ( this.visible === false ) return;
callback( this );
const children = this.children;
for ( let i = 0, l = children.length; i < l; i ++ ) {
children[ i ].traverseVisible( callback );
}
}
traverseAncestors( callback ) {
const parent = this.parent;
if ( parent !== null ) {
callback( parent );
parent.traverseAncestors( callback );
}
}
updateMatrix() {
this.matrix.compose( this.position, this.quaternion, this.scale );
this.matrixWorldNeedsUpdate = true;
}
updateMatrixWorld( force ) {
if ( this.matrixAutoUpdate ) this.updateMatrix();
if ( this.matrixWorldNeedsUpdate || force ) {
if ( this.parent === null ) {
this.matrixWorld.copy( this.matrix );
} else {
this.matrixWorld.multiplyMatrices( this.parent.matrixWorld, this.matrix );
}
this.matrixWorldNeedsUpdate = false;
force = true;
}
// update children
const children = this.children;
for ( let i = 0, l = children.length; i < l; i ++ ) {
children[ i ].updateMatrixWorld( force );
}
}
updateWorldMatrix( updateParents, updateChildren ) {
const parent = this.parent;
if ( updateParents === true && parent !== null ) {
parent.updateWorldMatrix( true, false );
}
if ( this.matrixAutoUpdate ) this.updateMatrix();
if ( this.parent === null ) {
this.matrixWorld.copy( this.matrix );
} else {
this.matrixWorld.multiplyMatrices( this.parent.matrixWorld, this.matrix );
}
// update children
if ( updateChildren === true ) {
const children = this.children;
for ( let i = 0, l = children.length; i < l; i ++ ) {
children[ i ].updateWorldMatrix( false, true );
}
}
}
toJSON( meta ) {
// meta is a string when called from JSON.stringify
const isRootObject = ( meta === undefined || typeof meta === 'string' );
const output = {};
// meta is a hash used to collect geometries, materials.
// not providing it implies that this is the root object
// being serialized.
if ( isRootObject ) {
// initialize meta obj
meta = {
geometries: {},
materials: {},
textures: {},
images: {},
shapes: {},
skeletons: {},
animations: {}
};
output.metadata = {
version: 4.5,
type: 'Object',
generator: 'Object3D.toJSON'
};
}
// standard Object3D serialization
const object = {};
object.uuid = this.uuid;
object.type = this.type;
if ( this.name !== '' ) object.name = this.name;
if ( this.castShadow === true ) object.castShadow = true;
if ( this.receiveShadow === true ) object.receiveShadow = true;
if ( this.visible === false ) object.visible = false;
if ( this.frustumCulled === false ) object.frustumCulled = false;
if ( this.renderOrder !== 0 ) object.renderOrder = this.renderOrder;
if ( JSON.stringify( this.userData ) !== '{}' ) object.userData = this.userData;
object.layers = this.layers.mask;
object.matrix = this.matrix.toArray();
if ( this.matrixAutoUpdate === false ) object.matrixAutoUpdate = false;
// object specific properties
if ( this.isInstancedMesh ) {
object.type = 'InstancedMesh';
object.count = this.count;
object.instanceMatrix = this.instanceMatrix.toJSON();
if ( this.instanceColor !== null ) object.instanceColor = this.instanceColor.toJSON();
}
//
function serialize( library, element ) {
if ( library[ element.uuid ] === undefined ) {
library[ element.uuid ] = element.toJSON( meta );
}
return element.uuid;
}
if ( this.isMesh || this.isLine || this.isPoints ) {
object.geometry = serialize( meta.geometries, this.geometry );
const parameters = this.geometry.parameters;
if ( parameters !== undefined && parameters.shapes !== undefined ) {
const shapes = parameters.shapes;
if ( Array.isArray( shapes ) ) {
for ( let i = 0, l = shapes.length; i < l; i ++ ) {
const shape = shapes[ i ];
serialize( meta.shapes, shape );
}
} else {
serialize( meta.shapes, shapes );
}
}
}
if ( this.isSkinnedMesh ) {
object.bindMode = this.bindMode;
object.bindMatrix = this.bindMatrix.toArray();
if ( this.skeleton !== undefined ) {
serialize( meta.skeletons, this.skeleton );
object.skeleton = this.skeleton.uuid;
}
}
if ( this.material !== undefined ) {
if ( Array.isArray( this.material ) ) {
const uuids = [];
for ( let i = 0, l = this.material.length; i < l; i ++ ) {
uuids.push( serialize( meta.materials, this.material[ i ] ) );
}
object.material = uuids;
} else {
object.material = serialize( meta.materials, this.material );
}
}
//
if ( this.children.length > 0 ) {
object.children = [];
for ( let i = 0; i < this.children.length; i ++ ) {
object.children.push( this.children[ i ].toJSON( meta ).object );
}
}
//
if ( this.animations.length > 0 ) {
object.animations = [];
for ( let i = 0; i < this.animations.length; i ++ ) {
const animation = this.animations[ i ];
object.animations.push( serialize( meta.animations, animation ) );
}
}
if ( isRootObject ) {
const geometries = extractFromCache( meta.geometries );
const materials = extractFromCache( meta.materials );
const textures = extractFromCache( meta.textures );
const images = extractFromCache( meta.images );
const shapes = extractFromCache( meta.shapes );
const skeletons = extractFromCache( meta.skeletons );
const animations = extractFromCache( meta.animations );
if ( geometries.length > 0 ) output.geometries = geometries;
if ( materials.length > 0 ) output.materials = materials;
if ( textures.length > 0 ) output.textures = textures;
if ( images.length > 0 ) output.images = images;
if ( shapes.length > 0 ) output.shapes = shapes;
if ( skeletons.length > 0 ) output.skeletons = skeletons;
if ( animations.length > 0 ) output.animations = animations;
}
output.object = object;
return output;
// extract data from the cache hash
// remove metadata on each item
// and return as array
function extractFromCache( cache ) {
const values = [];
for ( const key in cache ) {
const data = cache[ key ];
delete data.metadata;
values.push( data );
}
return values;
}
}
clone( recursive ) {
return new this.constructor().copy( this, recursive );
}
copy( source, recursive = true ) {
this.name = source.name;
this.up.copy( source.up );
this.position.copy( source.position );
this.rotation.order = source.rotation.order;
this.quaternion.copy( source.quaternion );
this.scale.copy( source.scale );
this.matrix.copy( source.matrix );
this.matrixWorld.copy( source.matrixWorld );
this.matrixAutoUpdate = source.matrixAutoUpdate;
this.matrixWorldNeedsUpdate = source.matrixWorldNeedsUpdate;
this.layers.mask = source.layers.mask;
this.visible = source.visible;
this.castShadow = source.castShadow;
this.receiveShadow = source.receiveShadow;
this.frustumCulled = source.frustumCulled;
this.renderOrder = source.renderOrder;
this.userData = JSON.parse( JSON.stringify( source.userData ) );
if ( recursive === true ) {
for ( let i = 0; i < source.children.length; i ++ ) {
const child = source.children[ i ];
this.add( child.clone() );
}
}
return this;
}
}
Object3D.DefaultUp = new Vector3( 0, 1, 0 );
Object3D.DefaultMatrixAutoUpdate = true;
Object3D.prototype.isObject3D = true;
const _vector1 = /*@__PURE__*/ new Vector3();
const _vector2$1 = /*@__PURE__*/ new Vector3();
const _normalMatrix = /*@__PURE__*/ new Matrix3();
class Plane {
constructor( normal = new Vector3( 1, 0, 0 ), constant = 0 ) {
// normal is assumed to be normalized
this.normal = normal;
this.constant = constant;
}
set( normal, constant ) {
this.normal.copy( normal );
this.constant = constant;
return this;
}
setComponents( x, y, z, w ) {
this.normal.set( x, y, z );
this.constant = w;
return this;
}
setFromNormalAndCoplanarPoint( normal, point ) {
this.normal.copy( normal );
this.constant = - point.dot( this.normal );
return this;
}
setFromCoplanarPoints( a, b, c ) {
const normal = _vector1.subVectors( c, b ).cross( _vector2$1.subVectors( a, b ) ).normalize();
// Q: should an error be thrown if normal is zero (e.g. degenerate plane)?
this.setFromNormalAndCoplanarPoint( normal, a );
return this;
}
copy( plane ) {
this.normal.copy( plane.normal );
this.constant = plane.constant;
return this;
}
normalize() {
// Note: will lead to a divide by zero if the plane is invalid.
const inverseNormalLength = 1.0 / this.normal.length();
this.normal.multiplyScalar( inverseNormalLength );
this.constant *= inverseNormalLength;
return this;
}
negate() {
this.constant *= - 1;
this.normal.negate();
return this;
}
distanceToPoint( point ) {
return this.normal.dot( point ) + this.constant;
}
distanceToSphere( sphere ) {
return this.distanceToPoint( sphere.center ) - sphere.radius;
}
projectPoint( point, target ) {
if ( target === undefined ) {
console.warn( 'THREE.Plane: .projectPoint() target is now required' );
target = new Vector3();
}
return target.copy( this.normal ).multiplyScalar( - this.distanceToPoint( point ) ).add( point );
}
intersectLine( line, target ) {
if ( target === undefined ) {
console.warn( 'THREE.Plane: .intersectLine() target is now required' );
target = new Vector3();
}
const direction = line.delta( _vector1 );
const denominator = this.normal.dot( direction );
if ( denominator === 0 ) {
// line is coplanar, return origin
if ( this.distanceToPoint( line.start ) === 0 ) {
return target.copy( line.start );
}
// Unsure if this is the correct method to handle this case.
return null;
}
const t = - ( line.start.dot( this.normal ) + this.constant ) / denominator;
if ( t < 0 || t > 1 ) {
return null;
}
return target.copy( direction ).multiplyScalar( t ).add( line.start );
}
intersectsLine( line ) {
// Note: this tests if a line intersects the plane, not whether it (or its end-points) are coplanar with it.
const startSign = this.distanceToPoint( line.start );
const endSign = this.distanceToPoint( line.end );
return ( startSign < 0 && endSign > 0 ) || ( endSign < 0 && startSign > 0 );
}
intersectsBox( box ) {
return box.intersectsPlane( this );
}
intersectsSphere( sphere ) {
return sphere.intersectsPlane( this );
}
coplanarPoint( target ) {
if ( target === undefined ) {
console.warn( 'THREE.Plane: .coplanarPoint() target is now required' );
target = new Vector3();
}
return target.copy( this.normal ).multiplyScalar( - this.constant );
}
applyMatrix4( matrix, optionalNormalMatrix ) {
const normalMatrix = optionalNormalMatrix || _normalMatrix.getNormalMatrix( matrix );
const referencePoint = this.coplanarPoint( _vector1 ).applyMatrix4( matrix );
const normal = this.normal.applyMatrix3( normalMatrix ).normalize();
this.constant = - referencePoint.dot( normal );
return this;
}
translate( offset ) {
this.constant -= offset.dot( this.normal );
return this;
}
equals( plane ) {
return plane.normal.equals( this.normal ) && ( plane.constant === this.constant );
}
clone() {
return new this.constructor().copy( this );
}
}
Plane.prototype.isPlane = true;
const _v0$1 = /*@__PURE__*/ new Vector3();
const _v1$3 = /*@__PURE__*/ new Vector3();
const _v2$2 = /*@__PURE__*/ new Vector3();
const _v3$1 = /*@__PURE__*/ new Vector3();
const _vab = /*@__PURE__*/ new Vector3();
const _vac = /*@__PURE__*/ new Vector3();
const _vbc = /*@__PURE__*/ new Vector3();
const _vap = /*@__PURE__*/ new Vector3();
const _vbp = /*@__PURE__*/ new Vector3();
const _vcp = /*@__PURE__*/ new Vector3();
class Triangle {
constructor( a = new Vector3(), b = new Vector3(), c = new Vector3() ) {
this.a = a;
this.b = b;
this.c = c;
}
static getNormal( a, b, c, target ) {
if ( target === undefined ) {
console.warn( 'THREE.Triangle: .getNormal() target is now required' );
target = new Vector3();
}
target.subVectors( c, b );
_v0$1.subVectors( a, b );
target.cross( _v0$1 );
const targetLengthSq = target.lengthSq();
if ( targetLengthSq > 0 ) {
return target.multiplyScalar( 1 / Math.sqrt( targetLengthSq ) );
}
return target.set( 0, 0, 0 );
}
// static/instance method to calculate barycentric coordinates
// based on: http://www.blackpawn.com/texts/pointinpoly/default.html
static getBarycoord( point, a, b, c, target ) {
_v0$1.subVectors( c, a );
_v1$3.subVectors( b, a );
_v2$2.subVectors( point, a );
const dot00 = _v0$1.dot( _v0$1 );
const dot01 = _v0$1.dot( _v1$3 );
const dot02 = _v0$1.dot( _v2$2 );
const dot11 = _v1$3.dot( _v1$3 );
const dot12 = _v1$3.dot( _v2$2 );
const denom = ( dot00 * dot11 - dot01 * dot01 );
if ( target === undefined ) {
console.warn( 'THREE.Triangle: .getBarycoord() target is now required' );
target = new Vector3();
}
// collinear or singular triangle
if ( denom === 0 ) {
// arbitrary location outside of triangle?
// not sure if this is the best idea, maybe should be returning undefined
return target.set( - 2, - 1, - 1 );
}
const invDenom = 1 / denom;
const u = ( dot11 * dot02 - dot01 * dot12 ) * invDenom;
const v = ( dot00 * dot12 - dot01 * dot02 ) * invDenom;
// barycentric coordinates must always sum to 1
return target.set( 1 - u - v, v, u );
}
static containsPoint( point, a, b, c ) {
this.getBarycoord( point, a, b, c, _v3$1 );
return ( _v3$1.x >= 0 ) && ( _v3$1.y >= 0 ) && ( ( _v3$1.x + _v3$1.y ) <= 1 );
}
static getUV( point, p1, p2, p3, uv1, uv2, uv3, target ) {
this.getBarycoord( point, p1, p2, p3, _v3$1 );
target.set( 0, 0 );
target.addScaledVector( uv1, _v3$1.x );
target.addScaledVector( uv2, _v3$1.y );
target.addScaledVector( uv3, _v3$1.z );
return target;
}
static isFrontFacing( a, b, c, direction ) {
_v0$1.subVectors( c, b );
_v1$3.subVectors( a, b );
// strictly front facing
return ( _v0$1.cross( _v1$3 ).dot( direction ) < 0 ) ? true : false;
}
set( a, b, c ) {
this.a.copy( a );
this.b.copy( b );
this.c.copy( c );
return this;
}
setFromPointsAndIndices( points, i0, i1, i2 ) {
this.a.copy( points[ i0 ] );
this.b.copy( points[ i1 ] );
this.c.copy( points[ i2 ] );
return this;
}
clone() {
return new this.constructor().copy( this );
}
copy( triangle ) {
this.a.copy( triangle.a );
this.b.copy( triangle.b );
this.c.copy( triangle.c );
return this;
}
getArea() {
_v0$1.subVectors( this.c, this.b );
_v1$3.subVectors( this.a, this.b );
return _v0$1.cross( _v1$3 ).length() * 0.5;
}
getMidpoint( target ) {
if ( target === undefined ) {
console.warn( 'THREE.Triangle: .getMidpoint() target is now required' );
target = new Vector3();
}
return target.addVectors( this.a, this.b ).add( this.c ).multiplyScalar( 1 / 3 );
}
getNormal( target ) {
return Triangle.getNormal( this.a, this.b, this.c, target );
}
getPlane( target ) {
if ( target === undefined ) {
console.warn( 'THREE.Triangle: .getPlane() target is now required' );
target = new Plane();
}
return target.setFromCoplanarPoints( this.a, this.b, this.c );
}
getBarycoord( point, target ) {
return Triangle.getBarycoord( point, this.a, this.b, this.c, target );
}
getUV( point, uv1, uv2, uv3, target ) {
return Triangle.getUV( point, this.a, this.b, this.c, uv1, uv2, uv3, target );
}
containsPoint( point ) {
return Triangle.containsPoint( point, this.a, this.b, this.c );
}
isFrontFacing( direction ) {
return Triangle.isFrontFacing( this.a, this.b, this.c, direction );
}
intersectsBox( box ) {
return box.intersectsTriangle( this );
}
closestPointToPoint( p, target ) {
if ( target === undefined ) {
console.warn( 'THREE.Triangle: .closestPointToPoint() target is now required' );
target = new Vector3();
}
const a = this.a, b = this.b, c = this.c;
let v, w;
// algorithm thanks to Real-Time Collision Detection by Christer Ericson,
// published by Morgan Kaufmann Publishers, (c) 2005 Elsevier Inc.,
// under the accompanying license; see chapter 5.1.5 for detailed explanation.
// basically, we're distinguishing which of the voronoi regions of the triangle
// the point lies in with the minimum amount of redundant computation.
_vab.subVectors( b, a );
_vac.subVectors( c, a );
_vap.subVectors( p, a );
const d1 = _vab.dot( _vap );
const d2 = _vac.dot( _vap );
if ( d1 <= 0 && d2 <= 0 ) {
// vertex region of A; barycentric coords (1, 0, 0)
return target.copy( a );
}
_vbp.subVectors( p, b );
const d3 = _vab.dot( _vbp );
const d4 = _vac.dot( _vbp );
if ( d3 >= 0 && d4 <= d3 ) {
// vertex region of B; barycentric coords (0, 1, 0)
return target.copy( b );
}
const vc = d1 * d4 - d3 * d2;
if ( vc <= 0 && d1 >= 0 && d3 <= 0 ) {
v = d1 / ( d1 - d3 );
// edge region of AB; barycentric coords (1-v, v, 0)
return target.copy( a ).addScaledVector( _vab, v );
}
_vcp.subVectors( p, c );
const d5 = _vab.dot( _vcp );
const d6 = _vac.dot( _vcp );
if ( d6 >= 0 && d5 <= d6 ) {
// vertex region of C; barycentric coords (0, 0, 1)
return target.copy( c );
}
const vb = d5 * d2 - d1 * d6;
if ( vb <= 0 && d2 >= 0 && d6 <= 0 ) {
w = d2 / ( d2 - d6 );
// edge region of AC; barycentric coords (1-w, 0, w)
return target.copy( a ).addScaledVector( _vac, w );
}
const va = d3 * d6 - d5 * d4;
if ( va <= 0 && ( d4 - d3 ) >= 0 && ( d5 - d6 ) >= 0 ) {
_vbc.subVectors( c, b );
w = ( d4 - d3 ) / ( ( d4 - d3 ) + ( d5 - d6 ) );
// edge region of BC; barycentric coords (0, 1-w, w)
return target.copy( b ).addScaledVector( _vbc, w ); // edge region of BC
}
// face region
const denom = 1 / ( va + vb + vc );
// u = va * denom
v = vb * denom;
w = vc * denom;
return target.copy( a ).addScaledVector( _vab, v ).addScaledVector( _vac, w );
}
equals( triangle ) {
return triangle.a.equals( this.a ) && triangle.b.equals( this.b ) && triangle.c.equals( this.c );
}
}
let materialId = 0;
function Material$1() {
Object.defineProperty( this, 'id', { value: materialId ++ } );
this.uuid = generateUUID();
this.name = '';
this.type = 'Material';
this.fog = true;
this.blending = NormalBlending;
this.side = FrontSide;
this.vertexColors = false;
this.opacity = 1;
this.transparent = false;
this.blendSrc = SrcAlphaFactor;
this.blendDst = OneMinusSrcAlphaFactor;
this.blendEquation = AddEquation;
this.blendSrcAlpha = null;
this.blendDstAlpha = null;
this.blendEquationAlpha = null;
this.depthFunc = LessEqualDepth;
this.depthTest = true;
this.depthWrite = true;
this.stencilWriteMask = 0xff;
this.stencilFunc = AlwaysStencilFunc;
this.stencilRef = 0;
this.stencilFuncMask = 0xff;
this.stencilFail = KeepStencilOp;
this.stencilZFail = KeepStencilOp;
this.stencilZPass = KeepStencilOp;
this.stencilWrite = false;
this.clippingPlanes = null;
this.clipIntersection = false;
this.clipShadows = false;
this.shadowSide = null;
this.colorWrite = true;
this.precision = null; // override the renderer's default precision for this material
this.polygonOffset = false;
this.polygonOffsetFactor = 0;
this.polygonOffsetUnits = 0;
this.dithering = false;
this.alphaTest = 0;
this.alphaToCoverage = false;
this.premultipliedAlpha = false;
this.visible = true;
this.toneMapped = true;
this.userData = {};
this.version = 0;
}
Material$1.prototype = Object.assign( Object.create( EventDispatcher.prototype ), {
constructor: Material$1,
isMaterial: true,
onBuild: function ( /* shaderobject, renderer */ ) {},
onBeforeCompile: function ( /* shaderobject, renderer */ ) {},
customProgramCacheKey: function () {
return this.onBeforeCompile.toString();
},
setValues: function ( values ) {
if ( values === undefined ) return;
for ( const key in values ) {
const newValue = values[ key ];
if ( newValue === undefined ) {
console.warn( 'THREE.Material: \'' + key + '\' parameter is undefined.' );
continue;
}
// for backward compatability if shading is set in the constructor
if ( key === 'shading' ) {
console.warn( 'THREE.' + this.type + ': .shading has been removed. Use the boolean .flatShading instead.' );
this.flatShading = ( newValue === FlatShading ) ? true : false;
continue;
}
const currentValue = this[ key ];
if ( currentValue === undefined ) {
console.warn( 'THREE.' + this.type + ': \'' + key + '\' is not a property of this material.' );
continue;
}
if ( currentValue && currentValue.isColor ) {
currentValue.set( newValue );
} else if ( ( currentValue && currentValue.isVector3 ) && ( newValue && newValue.isVector3 ) ) {
currentValue.copy( newValue );
} else {
this[ key ] = newValue;
}
}
},
toJSON: function ( meta ) {
const isRoot = ( meta === undefined || typeof meta === 'string' );
if ( isRoot ) {
meta = {
textures: {},
images: {}
};
}
const data = {
metadata: {
version: 4.5,
type: 'Material',
generator: 'Material.toJSON'
}
};
// standard Material serialization
data.uuid = this.uuid;
data.type = this.type;
if ( this.name !== '' ) data.name = this.name;
if ( this.color && this.color.isColor ) data.color = this.color.getHex();
if ( this.roughness !== undefined ) data.roughness = this.roughness;
if ( this.metalness !== undefined ) data.metalness = this.metalness;
if ( this.sheen && this.sheen.isColor ) data.sheen = this.sheen.getHex();
if ( this.emissive && this.emissive.isColor ) data.emissive = this.emissive.getHex();
if ( this.emissiveIntensity && this.emissiveIntensity !== 1 ) data.emissiveIntensity = this.emissiveIntensity;
if ( this.specular && this.specular.isColor ) data.specular = this.specular.getHex();
if ( this.shininess !== undefined ) data.shininess = this.shininess;
if ( this.clearcoat !== undefined ) data.clearcoat = this.clearcoat;
if ( this.clearcoatRoughness !== undefined ) data.clearcoatRoughness = this.clearcoatRoughness;
if ( this.clearcoatMap && this.clearcoatMap.isTexture ) {
data.clearcoatMap = this.clearcoatMap.toJSON( meta ).uuid;
}
if ( this.clearcoatRoughnessMap && this.clearcoatRoughnessMap.isTexture ) {
data.clearcoatRoughnessMap = this.clearcoatRoughnessMap.toJSON( meta ).uuid;
}
if ( this.clearcoatNormalMap && this.clearcoatNormalMap.isTexture ) {
data.clearcoatNormalMap = this.clearcoatNormalMap.toJSON( meta ).uuid;
data.clearcoatNormalScale = this.clearcoatNormalScale.toArray();
}
if ( this.map && this.map.isTexture ) data.map = this.map.toJSON( meta ).uuid;
if ( this.matcap && this.matcap.isTexture ) data.matcap = this.matcap.toJSON( meta ).uuid;
if ( this.alphaMap && this.alphaMap.isTexture ) data.alphaMap = this.alphaMap.toJSON( meta ).uuid;
if ( this.lightMap && this.lightMap.isTexture ) {
data.lightMap = this.lightMap.toJSON( meta ).uuid;
data.lightMapIntensity = this.lightMapIntensity;
}
if ( this.aoMap && this.aoMap.isTexture ) {
data.aoMap = this.aoMap.toJSON( meta ).uuid;
data.aoMapIntensity = this.aoMapIntensity;
}
if ( this.bumpMap && this.bumpMap.isTexture ) {
data.bumpMap = this.bumpMap.toJSON( meta ).uuid;
data.bumpScale = this.bumpScale;
}
if ( this.normalMap && this.normalMap.isTexture ) {
data.normalMap = this.normalMap.toJSON( meta ).uuid;
data.normalMapType = this.normalMapType;
data.normalScale = this.normalScale.toArray();
}
if ( this.displacementMap && this.displacementMap.isTexture ) {
data.displacementMap = this.displacementMap.toJSON( meta ).uuid;
data.displacementScale = this.displacementScale;
data.displacementBias = this.displacementBias;
}
if ( this.roughnessMap && this.roughnessMap.isTexture ) data.roughnessMap = this.roughnessMap.toJSON( meta ).uuid;
if ( this.metalnessMap && this.metalnessMap.isTexture ) data.metalnessMap = this.metalnessMap.toJSON( meta ).uuid;
if ( this.emissiveMap && this.emissiveMap.isTexture ) data.emissiveMap = this.emissiveMap.toJSON( meta ).uuid;
if ( this.specularMap && this.specularMap.isTexture ) data.specularMap = this.specularMap.toJSON( meta ).uuid;
if ( this.envMap && this.envMap.isTexture ) {
data.envMap = this.envMap.toJSON( meta ).uuid;
if ( this.combine !== undefined ) data.combine = this.combine;
}
if ( this.envMapIntensity !== undefined ) data.envMapIntensity = this.envMapIntensity;
if ( this.reflectivity !== undefined ) data.reflectivity = this.reflectivity;
if ( this.refractionRatio !== undefined ) data.refractionRatio = this.refractionRatio;
if ( this.gradientMap && this.gradientMap.isTexture ) {
data.gradientMap = this.gradientMap.toJSON( meta ).uuid;
}
if ( this.size !== undefined ) data.size = this.size;
if ( this.shadowSide !== null ) data.shadowSide = this.shadowSide;
if ( this.sizeAttenuation !== undefined ) data.sizeAttenuation = this.sizeAttenuation;
if ( this.blending !== NormalBlending ) data.blending = this.blending;
if ( this.side !== FrontSide ) data.side = this.side;
if ( this.vertexColors ) data.vertexColors = true;
if ( this.opacity < 1 ) data.opacity = this.opacity;
if ( this.transparent === true ) data.transparent = this.transparent;
data.depthFunc = this.depthFunc;
data.depthTest = this.depthTest;
data.depthWrite = this.depthWrite;
data.colorWrite = this.colorWrite;
data.stencilWrite = this.stencilWrite;
data.stencilWriteMask = this.stencilWriteMask;
data.stencilFunc = this.stencilFunc;
data.stencilRef = this.stencilRef;
data.stencilFuncMask = this.stencilFuncMask;
data.stencilFail = this.stencilFail;
data.stencilZFail = this.stencilZFail;
data.stencilZPass = this.stencilZPass;
// rotation (SpriteMaterial)
if ( this.rotation && this.rotation !== 0 ) data.rotation = this.rotation;
if ( this.polygonOffset === true ) data.polygonOffset = true;
if ( this.polygonOffsetFactor !== 0 ) data.polygonOffsetFactor = this.polygonOffsetFactor;
if ( this.polygonOffsetUnits !== 0 ) data.polygonOffsetUnits = this.polygonOffsetUnits;
if ( this.linewidth && this.linewidth !== 1 ) data.linewidth = this.linewidth;
if ( this.dashSize !== undefined ) data.dashSize = this.dashSize;
if ( this.gapSize !== undefined ) data.gapSize = this.gapSize;
if ( this.scale !== undefined ) data.scale = this.scale;
if ( this.dithering === true ) data.dithering = true;
if ( this.alphaTest > 0 ) data.alphaTest = this.alphaTest;
if ( this.alphaToCoverage === true ) data.alphaToCoverage = this.alphaToCoverage;
if ( this.premultipliedAlpha === true ) data.premultipliedAlpha = this.premultipliedAlpha;
if ( this.wireframe === true ) data.wireframe = this.wireframe;
if ( this.wireframeLinewidth > 1 ) data.wireframeLinewidth = this.wireframeLinewidth;
if ( this.wireframeLinecap !== 'round' ) data.wireframeLinecap = this.wireframeLinecap;
if ( this.wireframeLinejoin !== 'round' ) data.wireframeLinejoin = this.wireframeLinejoin;
if ( this.morphTargets === true ) data.morphTargets = true;
if ( this.morphNormals === true ) data.morphNormals = true;
if ( this.skinning === true ) data.skinning = true;
if ( this.flatShading === true ) data.flatShading = this.flatShading;
if ( this.visible === false ) data.visible = false;
if ( this.toneMapped === false ) data.toneMapped = false;
if ( JSON.stringify( this.userData ) !== '{}' ) data.userData = this.userData;
// TODO: Copied from Object3D.toJSON
function extractFromCache( cache ) {
const values = [];
for ( const key in cache ) {
const data = cache[ key ];
delete data.metadata;
values.push( data );
}
return values;
}
if ( isRoot ) {
const textures = extractFromCache( meta.textures );
const images = extractFromCache( meta.images );
if ( textures.length > 0 ) data.textures = textures;
if ( images.length > 0 ) data.images = images;
}
return data;
},
clone: function () {
return new this.constructor().copy( this );
},
copy: function ( source ) {
this.name = source.name;
this.fog = source.fog;
this.blending = source.blending;
this.side = source.side;
this.vertexColors = source.vertexColors;
this.opacity = source.opacity;
this.transparent = source.transparent;
this.blendSrc = source.blendSrc;
this.blendDst = source.blendDst;
this.blendEquation = source.blendEquation;
this.blendSrcAlpha = source.blendSrcAlpha;
this.blendDstAlpha = source.blendDstAlpha;
this.blendEquationAlpha = source.blendEquationAlpha;
this.depthFunc = source.depthFunc;
this.depthTest = source.depthTest;
this.depthWrite = source.depthWrite;
this.stencilWriteMask = source.stencilWriteMask;
this.stencilFunc = source.stencilFunc;
this.stencilRef = source.stencilRef;
this.stencilFuncMask = source.stencilFuncMask;
this.stencilFail = source.stencilFail;
this.stencilZFail = source.stencilZFail;
this.stencilZPass = source.stencilZPass;
this.stencilWrite = source.stencilWrite;
const srcPlanes = source.clippingPlanes;
let dstPlanes = null;
if ( srcPlanes !== null ) {
const n = srcPlanes.length;
dstPlanes = new Array( n );
for ( let i = 0; i !== n; ++ i ) {
dstPlanes[ i ] = srcPlanes[ i ].clone();
}
}
this.clippingPlanes = dstPlanes;
this.clipIntersection = source.clipIntersection;
this.clipShadows = source.clipShadows;
this.shadowSide = source.shadowSide;
this.colorWrite = source.colorWrite;
this.precision = source.precision;
this.polygonOffset = source.polygonOffset;
this.polygonOffsetFactor = source.polygonOffsetFactor;
this.polygonOffsetUnits = source.polygonOffsetUnits;
this.dithering = source.dithering;
this.alphaTest = source.alphaTest;
this.alphaToCoverage = source.alphaToCoverage;
this.premultipliedAlpha = source.premultipliedAlpha;
this.visible = source.visible;
this.toneMapped = source.toneMapped;
this.userData = JSON.parse( JSON.stringify( source.userData ) );
return this;
},
dispose: function () {
this.dispatchEvent( { type: 'dispose' } );
}
} );
Object.defineProperty( Material$1.prototype, 'needsUpdate', {
set: function ( value ) {
if ( value === true ) this.version ++;
}
} );
const _colorKeywords = { 'aliceblue': 0xF0F8FF, 'antiquewhite': 0xFAEBD7, 'aqua': 0x00FFFF, 'aquamarine': 0x7FFFD4, 'azure': 0xF0FFFF,
'beige': 0xF5F5DC, 'bisque': 0xFFE4C4, 'black': 0x000000, 'blanchedalmond': 0xFFEBCD, 'blue': 0x0000FF, 'blueviolet': 0x8A2BE2,
'brown': 0xA52A2A, 'burlywood': 0xDEB887, 'cadetblue': 0x5F9EA0, 'chartreuse': 0x7FFF00, 'chocolate': 0xD2691E, 'coral': 0xFF7F50,
'cornflowerblue': 0x6495ED, 'cornsilk': 0xFFF8DC, 'crimson': 0xDC143C, 'cyan': 0x00FFFF, 'darkblue': 0x00008B, 'darkcyan': 0x008B8B,
'darkgoldenrod': 0xB8860B, 'darkgray': 0xA9A9A9, 'darkgreen': 0x006400, 'darkgrey': 0xA9A9A9, 'darkkhaki': 0xBDB76B, 'darkmagenta': 0x8B008B,
'darkolivegreen': 0x556B2F, 'darkorange': 0xFF8C00, 'darkorchid': 0x9932CC, 'darkred': 0x8B0000, 'darksalmon': 0xE9967A, 'darkseagreen': 0x8FBC8F,
'darkslateblue': 0x483D8B, 'darkslategray': 0x2F4F4F, 'darkslategrey': 0x2F4F4F, 'darkturquoise': 0x00CED1, 'darkviolet': 0x9400D3,
'deeppink': 0xFF1493, 'deepskyblue': 0x00BFFF, 'dimgray': 0x696969, 'dimgrey': 0x696969, 'dodgerblue': 0x1E90FF, 'firebrick': 0xB22222,
'floralwhite': 0xFFFAF0, 'forestgreen': 0x228B22, 'fuchsia': 0xFF00FF, 'gainsboro': 0xDCDCDC, 'ghostwhite': 0xF8F8FF, 'gold': 0xFFD700,
'goldenrod': 0xDAA520, 'gray': 0x808080, 'green': 0x008000, 'greenyellow': 0xADFF2F, 'grey': 0x808080, 'honeydew': 0xF0FFF0, 'hotpink': 0xFF69B4,
'indianred': 0xCD5C5C, 'indigo': 0x4B0082, 'ivory': 0xFFFFF0, 'khaki': 0xF0E68C, 'lavender': 0xE6E6FA, 'lavenderblush': 0xFFF0F5, 'lawngreen': 0x7CFC00,
'lemonchiffon': 0xFFFACD, 'lightblue': 0xADD8E6, 'lightcoral': 0xF08080, 'lightcyan': 0xE0FFFF, 'lightgoldenrodyellow': 0xFAFAD2, 'lightgray': 0xD3D3D3,
'lightgreen': 0x90EE90, 'lightgrey': 0xD3D3D3, 'lightpink': 0xFFB6C1, 'lightsalmon': 0xFFA07A, 'lightseagreen': 0x20B2AA, 'lightskyblue': 0x87CEFA,
'lightslategray': 0x778899, 'lightslategrey': 0x778899, 'lightsteelblue': 0xB0C4DE, 'lightyellow': 0xFFFFE0, 'lime': 0x00FF00, 'limegreen': 0x32CD32,
'linen': 0xFAF0E6, 'magenta': 0xFF00FF, 'maroon': 0x800000, 'mediumaquamarine': 0x66CDAA, 'mediumblue': 0x0000CD, 'mediumorchid': 0xBA55D3,
'mediumpurple': 0x9370DB, 'mediumseagreen': 0x3CB371, 'mediumslateblue': 0x7B68EE, 'mediumspringgreen': 0x00FA9A, 'mediumturquoise': 0x48D1CC,
'mediumvioletred': 0xC71585, 'midnightblue': 0x191970, 'mintcream': 0xF5FFFA, 'mistyrose': 0xFFE4E1, 'moccasin': 0xFFE4B5, 'navajowhite': 0xFFDEAD,
'navy': 0x000080, 'oldlace': 0xFDF5E6, 'olive': 0x808000, 'olivedrab': 0x6B8E23, 'orange': 0xFFA500, 'orangered': 0xFF4500, 'orchid': 0xDA70D6,
'palegoldenrod': 0xEEE8AA, 'palegreen': 0x98FB98, 'paleturquoise': 0xAFEEEE, 'palevioletred': 0xDB7093, 'papayawhip': 0xFFEFD5, 'peachpuff': 0xFFDAB9,
'peru': 0xCD853F, 'pink': 0xFFC0CB, 'plum': 0xDDA0DD, 'powderblue': 0xB0E0E6, 'purple': 0x800080, 'rebeccapurple': 0x663399, 'red': 0xFF0000, 'rosybrown': 0xBC8F8F,
'royalblue': 0x4169E1, 'saddlebrown': 0x8B4513, 'salmon': 0xFA8072, 'sandybrown': 0xF4A460, 'seagreen': 0x2E8B57, 'seashell': 0xFFF5EE,
'sienna': 0xA0522D, 'silver': 0xC0C0C0, 'skyblue': 0x87CEEB, 'slateblue': 0x6A5ACD, 'slategray': 0x708090, 'slategrey': 0x708090, 'snow': 0xFFFAFA,
'springgreen': 0x00FF7F, 'steelblue': 0x4682B4, 'tan': 0xD2B48C, 'teal': 0x008080, 'thistle': 0xD8BFD8, 'tomato': 0xFF6347, 'turquoise': 0x40E0D0,
'violet': 0xEE82EE, 'wheat': 0xF5DEB3, 'white': 0xFFFFFF, 'whitesmoke': 0xF5F5F5, 'yellow': 0xFFFF00, 'yellowgreen': 0x9ACD32 };
const _hslA = { h: 0, s: 0, l: 0 };
const _hslB = { h: 0, s: 0, l: 0 };
function hue2rgb( p, q, t ) {
if ( t < 0 ) t += 1;
if ( t > 1 ) t -= 1;
if ( t < 1 / 6 ) return p + ( q - p ) * 6 * t;
if ( t < 1 / 2 ) return q;
if ( t < 2 / 3 ) return p + ( q - p ) * 6 * ( 2 / 3 - t );
return p;
}
function SRGBToLinear( c ) {
return ( c < 0.04045 ) ? c * 0.0773993808 : Math.pow( c * 0.9478672986 + 0.0521327014, 2.4 );
}
function LinearToSRGB( c ) {
return ( c < 0.0031308 ) ? c * 12.92 : 1.055 * ( Math.pow( c, 0.41666 ) ) - 0.055;
}
class Color {
constructor( r, g, b ) {
if ( g === undefined && b === undefined ) {
// r is THREE.Color, hex or string
return this.set( r );
}
return this.setRGB( r, g, b );
}
set( value ) {
if ( value && value.isColor ) {
this.copy( value );
} else if ( typeof value === 'number' ) {
this.setHex( value );
} else if ( typeof value === 'string' ) {
this.setStyle( value );
}
return this;
}
setScalar( scalar ) {
this.r = scalar;
this.g = scalar;
this.b = scalar;
return this;
}
setHex( hex ) {
hex = Math.floor( hex );
this.r = ( hex >> 16 & 255 ) / 255;
this.g = ( hex >> 8 & 255 ) / 255;
this.b = ( hex & 255 ) / 255;
return this;
}
setRGB( r, g, b ) {
this.r = r;
this.g = g;
this.b = b;
return this;
}
setHSL( h, s, l ) {
// h,s,l ranges are in 0.0 - 1.0
h = euclideanModulo( h, 1 );
s = clamp$1( s, 0, 1 );
l = clamp$1( l, 0, 1 );
if ( s === 0 ) {
this.r = this.g = this.b = l;
} else {
const p = l <= 0.5 ? l * ( 1 + s ) : l + s - ( l * s );
const q = ( 2 * l ) - p;
this.r = hue2rgb( q, p, h + 1 / 3 );
this.g = hue2rgb( q, p, h );
this.b = hue2rgb( q, p, h - 1 / 3 );
}
return this;
}
setStyle( style ) {
function handleAlpha( string ) {
if ( string === undefined ) return;
if ( parseFloat( string ) < 1 ) {
console.warn( 'THREE.Color: Alpha component of ' + style + ' will be ignored.' );
}
}
let m;
if ( m = /^((?:rgb|hsl)a?)\(([^\)]*)\)/.exec( style ) ) {
// rgb / hsl
let color;
const name = m[ 1 ];
const components = m[ 2 ];
switch ( name ) {
case 'rgb':
case 'rgba':
if ( color = /^\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*(?:,\s*(\d*\.?\d+)\s*)?$/.exec( components ) ) {
// rgb(255,0,0) rgba(255,0,0,0.5)
this.r = Math.min( 255, parseInt( color[ 1 ], 10 ) ) / 255;
this.g = Math.min( 255, parseInt( color[ 2 ], 10 ) ) / 255;
this.b = Math.min( 255, parseInt( color[ 3 ], 10 ) ) / 255;
handleAlpha( color[ 4 ] );
return this;
}
if ( color = /^\s*(\d+)\%\s*,\s*(\d+)\%\s*,\s*(\d+)\%\s*(?:,\s*(\d*\.?\d+)\s*)?$/.exec( components ) ) {
// rgb(100%,0%,0%) rgba(100%,0%,0%,0.5)
this.r = Math.min( 100, parseInt( color[ 1 ], 10 ) ) / 100;
this.g = Math.min( 100, parseInt( color[ 2 ], 10 ) ) / 100;
this.b = Math.min( 100, parseInt( color[ 3 ], 10 ) ) / 100;
handleAlpha( color[ 4 ] );
return this;
}
break;
case 'hsl':
case 'hsla':
if ( color = /^\s*(\d*\.?\d+)\s*,\s*(\d+)\%\s*,\s*(\d+)\%\s*(?:,\s*(\d*\.?\d+)\s*)?$/.exec( components ) ) {
// hsl(120,50%,50%) hsla(120,50%,50%,0.5)
const h = parseFloat( color[ 1 ] ) / 360;
const s = parseInt( color[ 2 ], 10 ) / 100;
const l = parseInt( color[ 3 ], 10 ) / 100;
handleAlpha( color[ 4 ] );
return this.setHSL( h, s, l );
}
break;
}
} else if ( m = /^\#([A-Fa-f\d]+)$/.exec( style ) ) {
// hex color
const hex = m[ 1 ];
const size = hex.length;
if ( size === 3 ) {
// #ff0
this.r = parseInt( hex.charAt( 0 ) + hex.charAt( 0 ), 16 ) / 255;
this.g = parseInt( hex.charAt( 1 ) + hex.charAt( 1 ), 16 ) / 255;
this.b = parseInt( hex.charAt( 2 ) + hex.charAt( 2 ), 16 ) / 255;
return this;
} else if ( size === 6 ) {
// #ff0000
this.r = parseInt( hex.charAt( 0 ) + hex.charAt( 1 ), 16 ) / 255;
this.g = parseInt( hex.charAt( 2 ) + hex.charAt( 3 ), 16 ) / 255;
this.b = parseInt( hex.charAt( 4 ) + hex.charAt( 5 ), 16 ) / 255;
return this;
}
}
if ( style && style.length > 0 ) {
return this.setColorName( style );
}
return this;
}
setColorName( style ) {
// color keywords
const hex = _colorKeywords[ style.toLowerCase() ];
if ( hex !== undefined ) {
// red
this.setHex( hex );
} else {
// unknown color
console.warn( 'THREE.Color: Unknown color ' + style );
}
return this;
}
clone() {
return new this.constructor( this.r, this.g, this.b );
}
copy( color ) {
this.r = color.r;
this.g = color.g;
this.b = color.b;
return this;
}
copyGammaToLinear( color, gammaFactor = 2.0 ) {
this.r = Math.pow( color.r, gammaFactor );
this.g = Math.pow( color.g, gammaFactor );
this.b = Math.pow( color.b, gammaFactor );
return this;
}
copyLinearToGamma( color, gammaFactor = 2.0 ) {
const safeInverse = ( gammaFactor > 0 ) ? ( 1.0 / gammaFactor ) : 1.0;
this.r = Math.pow( color.r, safeInverse );
this.g = Math.pow( color.g, safeInverse );
this.b = Math.pow( color.b, safeInverse );
return this;
}
convertGammaToLinear( gammaFactor ) {
this.copyGammaToLinear( this, gammaFactor );
return this;
}
convertLinearToGamma( gammaFactor ) {
this.copyLinearToGamma( this, gammaFactor );
return this;
}
copySRGBToLinear( color ) {
this.r = SRGBToLinear( color.r );
this.g = SRGBToLinear( color.g );
this.b = SRGBToLinear( color.b );
return this;
}
copyLinearToSRGB( color ) {
this.r = LinearToSRGB( color.r );
this.g = LinearToSRGB( color.g );
this.b = LinearToSRGB( color.b );
return this;
}
convertSRGBToLinear() {
this.copySRGBToLinear( this );
return this;
}
convertLinearToSRGB() {
this.copyLinearToSRGB( this );
return this;
}
getHex() {
return ( this.r * 255 ) << 16 ^ ( this.g * 255 ) << 8 ^ ( this.b * 255 ) << 0;
}
getHexString() {
return ( '000000' + this.getHex().toString( 16 ) ).slice( - 6 );
}
getHSL( target ) {
// h,s,l ranges are in 0.0 - 1.0
if ( target === undefined ) {
console.warn( 'THREE.Color: .getHSL() target is now required' );
target = { h: 0, s: 0, l: 0 };
}
const r = this.r, g = this.g, b = this.b;
const max = Math.max( r, g, b );
const min = Math.min( r, g, b );
let hue, saturation;
const lightness = ( min + max ) / 2.0;
if ( min === max ) {
hue = 0;
saturation = 0;
} else {
const delta = max - min;
saturation = lightness <= 0.5 ? delta / ( max + min ) : delta / ( 2 - max - min );
switch ( max ) {
case r: hue = ( g - b ) / delta + ( g < b ? 6 : 0 ); break;
case g: hue = ( b - r ) / delta + 2; break;
case b: hue = ( r - g ) / delta + 4; break;
}
hue /= 6;
}
target.h = hue;
target.s = saturation;
target.l = lightness;
return target;
}
getStyle() {
return 'rgb(' + ( ( this.r * 255 ) | 0 ) + ',' + ( ( this.g * 255 ) | 0 ) + ',' + ( ( this.b * 255 ) | 0 ) + ')';
}
offsetHSL( h, s, l ) {
this.getHSL( _hslA );
_hslA.h += h; _hslA.s += s; _hslA.l += l;
this.setHSL( _hslA.h, _hslA.s, _hslA.l );
return this;
}
add( color ) {
this.r += color.r;
this.g += color.g;
this.b += color.b;
return this;
}
addColors( color1, color2 ) {
this.r = color1.r + color2.r;
this.g = color1.g + color2.g;
this.b = color1.b + color2.b;
return this;
}
addScalar( s ) {
this.r += s;
this.g += s;
this.b += s;
return this;
}
sub( color ) {
this.r = Math.max( 0, this.r - color.r );
this.g = Math.max( 0, this.g - color.g );
this.b = Math.max( 0, this.b - color.b );
return this;
}
multiply( color ) {
this.r *= color.r;
this.g *= color.g;
this.b *= color.b;
return this;
}
multiplyScalar( s ) {
this.r *= s;
this.g *= s;
this.b *= s;
return this;
}
lerp( color, alpha ) {
this.r += ( color.r - this.r ) * alpha;
this.g += ( color.g - this.g ) * alpha;
this.b += ( color.b - this.b ) * alpha;
return this;
}
lerpColors( color1, color2, alpha ) {
this.r = color1.r + ( color2.r - color1.r ) * alpha;
this.g = color1.g + ( color2.g - color1.g ) * alpha;
this.b = color1.b + ( color2.b - color1.b ) * alpha;
return this;
}
lerpHSL( color, alpha ) {
this.getHSL( _hslA );
color.getHSL( _hslB );
const h = lerp( _hslA.h, _hslB.h, alpha );
const s = lerp( _hslA.s, _hslB.s, alpha );
const l = lerp( _hslA.l, _hslB.l, alpha );
this.setHSL( h, s, l );
return this;
}
equals( c ) {
return ( c.r === this.r ) && ( c.g === this.g ) && ( c.b === this.b );
}
fromArray( array, offset = 0 ) {
this.r = array[ offset ];
this.g = array[ offset + 1 ];
this.b = array[ offset + 2 ];
return this;
}
toArray( array = [], offset = 0 ) {
array[ offset ] = this.r;
array[ offset + 1 ] = this.g;
array[ offset + 2 ] = this.b;
return array;
}
fromBufferAttribute( attribute, index ) {
this.r = attribute.getX( index );
this.g = attribute.getY( index );
this.b = attribute.getZ( index );
if ( attribute.normalized === true ) {
// assuming Uint8Array
this.r /= 255;
this.g /= 255;
this.b /= 255;
}
return this;
}
toJSON() {
return this.getHex();
}
}
Color.NAMES = _colorKeywords;
Color.prototype.isColor = true;
Color.prototype.r = 1;
Color.prototype.g = 1;
Color.prototype.b = 1;
/**
* parameters = {
* color: <hex>,
* opacity: <float>,
* map: new THREE.Texture( <Image> ),
*
* lightMap: new THREE.Texture( <Image> ),
* lightMapIntensity: <float>
*
* aoMap: new THREE.Texture( <Image> ),
* aoMapIntensity: <float>
*
* specularMap: new THREE.Texture( <Image> ),
*
* alphaMap: new THREE.Texture( <Image> ),
*
* envMap: new THREE.CubeTexture( [posx, negx, posy, negy, posz, negz] ),
* combine: THREE.Multiply,
* reflectivity: <float>,
* refractionRatio: <float>,
*
* depthTest: <bool>,
* depthWrite: <bool>,
*
* wireframe: <boolean>,
* wireframeLinewidth: <float>,
*
* skinning: <bool>,
* morphTargets: <bool>
* }
*/
class MeshBasicMaterial extends Material$1 {
constructor( parameters ) {
super();
this.type = 'MeshBasicMaterial';
this.color = new Color( 0xffffff ); // emissive
this.map = null;
this.lightMap = null;
this.lightMapIntensity = 1.0;
this.aoMap = null;
this.aoMapIntensity = 1.0;
this.specularMap = null;
this.alphaMap = null;
this.envMap = null;
this.combine = MultiplyOperation;
this.reflectivity = 1;
this.refractionRatio = 0.98;
this.wireframe = false;
this.wireframeLinewidth = 1;
this.wireframeLinecap = 'round';
this.wireframeLinejoin = 'round';
this.skinning = false;
this.morphTargets = false;
this.setValues( parameters );
}
copy( source ) {
super.copy( source );
this.color.copy( source.color );
this.map = source.map;
this.lightMap = source.lightMap;
this.lightMapIntensity = source.lightMapIntensity;
this.aoMap = source.aoMap;
this.aoMapIntensity = source.aoMapIntensity;
this.specularMap = source.specularMap;
this.alphaMap = source.alphaMap;
this.envMap = source.envMap;
this.combine = source.combine;
this.reflectivity = source.reflectivity;
this.refractionRatio = source.refractionRatio;
this.wireframe = source.wireframe;
this.wireframeLinewidth = source.wireframeLinewidth;
this.wireframeLinecap = source.wireframeLinecap;
this.wireframeLinejoin = source.wireframeLinejoin;
this.skinning = source.skinning;
this.morphTargets = source.morphTargets;
return this;
}
}
MeshBasicMaterial.prototype.isMeshBasicMaterial = true;
const _vector$9 = new /*@__PURE__*/ Vector3();
const _vector2 = new /*@__PURE__*/ Vector2();
class BufferAttribute {
constructor( array, itemSize, normalized ) {
if ( Array.isArray( array ) ) {
throw new TypeError( 'THREE.BufferAttribute: array should be a Typed Array.' );
}
this.name = '';
this.array = array;
this.itemSize = itemSize;
this.count = array !== undefined ? array.length / itemSize : 0;
this.normalized = normalized === true;
this.usage = StaticDrawUsage;
this.updateRange = { offset: 0, count: - 1 };
this.version = 0;
this.onUploadCallback = function () {};
}
set needsUpdate( value ) {
if ( value === true ) this.version ++;
}
setUsage( value ) {
this.usage = value;
return this;
}
copy( source ) {
this.name = source.name;
this.array = new source.array.constructor( source.array );
this.itemSize = source.itemSize;
this.count = source.count;
this.normalized = source.normalized;
this.usage = source.usage;
return this;
}
copyAt( index1, attribute, index2 ) {
index1 *= this.itemSize;
index2 *= attribute.itemSize;
for ( let i = 0, l = this.itemSize; i < l; i ++ ) {
this.array[ index1 + i ] = attribute.array[ index2 + i ];
}
return this;
}
copyArray( array ) {
this.array.set( array );
return this;
}
copyColorsArray( colors ) {
const array = this.array;
let offset = 0;
for ( let i = 0, l = colors.length; i < l; i ++ ) {
let color = colors[ i ];
if ( color === undefined ) {
console.warn( 'THREE.BufferAttribute.copyColorsArray(): color is undefined', i );
color = new Color();
}
array[ offset ++ ] = color.r;
array[ offset ++ ] = color.g;
array[ offset ++ ] = color.b;
}
return this;
}
copyVector2sArray( vectors ) {
const array = this.array;
let offset = 0;
for ( let i = 0, l = vectors.length; i < l; i ++ ) {
let vector = vectors[ i ];
if ( vector === undefined ) {
console.warn( 'THREE.BufferAttribute.copyVector2sArray(): vector is undefined', i );
vector = new Vector2();
}
array[ offset ++ ] = vector.x;
array[ offset ++ ] = vector.y;
}
return this;
}
copyVector3sArray( vectors ) {
const array = this.array;
let offset = 0;
for ( let i = 0, l = vectors.length; i < l; i ++ ) {
let vector = vectors[ i ];
if ( vector === undefined ) {
console.warn( 'THREE.BufferAttribute.copyVector3sArray(): vector is undefined', i );
vector = new Vector3();
}
array[ offset ++ ] = vector.x;
array[ offset ++ ] = vector.y;
array[ offset ++ ] = vector.z;
}
return this;
}
copyVector4sArray( vectors ) {
const array = this.array;
let offset = 0;
for ( let i = 0, l = vectors.length; i < l; i ++ ) {
let vector = vectors[ i ];
if ( vector === undefined ) {
console.warn( 'THREE.BufferAttribute.copyVector4sArray(): vector is undefined', i );
vector = new Vector4();
}
array[ offset ++ ] = vector.x;
array[ offset ++ ] = vector.y;
array[ offset ++ ] = vector.z;
array[ offset ++ ] = vector.w;
}
return this;
}
applyMatrix3( m ) {
if ( this.itemSize === 2 ) {
for ( let i = 0, l = this.count; i < l; i ++ ) {
_vector2.fromBufferAttribute( this, i );
_vector2.applyMatrix3( m );
this.setXY( i, _vector2.x, _vector2.y );
}
} else if ( this.itemSize === 3 ) {
for ( let i = 0, l = this.count; i < l; i ++ ) {
_vector$9.fromBufferAttribute( this, i );
_vector$9.applyMatrix3( m );
this.setXYZ( i, _vector$9.x, _vector$9.y, _vector$9.z );
}
}
return this;
}
applyMatrix4( m ) {
for ( let i = 0, l = this.count; i < l; i ++ ) {
_vector$9.x = this.getX( i );
_vector$9.y = this.getY( i );
_vector$9.z = this.getZ( i );
_vector$9.applyMatrix4( m );
this.setXYZ( i, _vector$9.x, _vector$9.y, _vector$9.z );
}
return this;
}
applyNormalMatrix( m ) {
for ( let i = 0, l = this.count; i < l; i ++ ) {
_vector$9.x = this.getX( i );
_vector$9.y = this.getY( i );
_vector$9.z = this.getZ( i );
_vector$9.applyNormalMatrix( m );
this.setXYZ( i, _vector$9.x, _vector$9.y, _vector$9.z );
}
return this;
}
transformDirection( m ) {
for ( let i = 0, l = this.count; i < l; i ++ ) {
_vector$9.x = this.getX( i );
_vector$9.y = this.getY( i );
_vector$9.z = this.getZ( i );
_vector$9.transformDirection( m );
this.setXYZ( i, _vector$9.x, _vector$9.y, _vector$9.z );
}
return this;
}
set( value, offset = 0 ) {
this.array.set( value, offset );
return this;
}
getX( index ) {
return this.array[ index * this.itemSize ];
}
setX( index, x ) {
this.array[ index * this.itemSize ] = x;
return this;
}
getY( index ) {
return this.array[ index * this.itemSize + 1 ];
}
setY( index, y ) {
this.array[ index * this.itemSize + 1 ] = y;
return this;
}
getZ( index ) {
return this.array[ index * this.itemSize + 2 ];
}
setZ( index, z ) {
this.array[ index * this.itemSize + 2 ] = z;
return this;
}
getW( index ) {
return this.array[ index * this.itemSize + 3 ];
}
setW( index, w ) {
this.array[ index * this.itemSize + 3 ] = w;
return this;
}
setXY( index, x, y ) {
index *= this.itemSize;
this.array[ index + 0 ] = x;
this.array[ index + 1 ] = y;
return this;
}
setXYZ( index, x, y, z ) {
index *= this.itemSize;
this.array[ index + 0 ] = x;
this.array[ index + 1 ] = y;
this.array[ index + 2 ] = z;
return this;
}
setXYZW( index, x, y, z, w ) {
index *= this.itemSize;
this.array[ index + 0 ] = x;
this.array[ index + 1 ] = y;
this.array[ index + 2 ] = z;
this.array[ index + 3 ] = w;
return this;
}
onUpload( callback ) {
this.onUploadCallback = callback;
return this;
}
clone() {
return new this.constructor( this.array, this.itemSize ).copy( this );
}
toJSON() {
const data = {
itemSize: this.itemSize,
type: this.array.constructor.name,
array: Array.prototype.slice.call( this.array ),
normalized: this.normalized
};
if ( this.name !== '' ) data.name = this.name;
if ( this.usage !== StaticDrawUsage ) data.usage = this.usage;
if ( this.updateRange.offset !== 0 || this.updateRange.count !== - 1 ) data.updateRange = this.updateRange;
return data;
}
}
BufferAttribute.prototype.isBufferAttribute = true;
class Uint16BufferAttribute extends BufferAttribute {
constructor( array, itemSize, normalized ) {
super( new Uint16Array( array ), itemSize, normalized );
}
}
class Uint32BufferAttribute extends BufferAttribute {
constructor( array, itemSize, normalized ) {
super( new Uint32Array( array ), itemSize, normalized );
}
}
class Float32BufferAttribute extends BufferAttribute {
constructor( array, itemSize, normalized ) {
super( new Float32Array( array ), itemSize, normalized );
}
}
function arrayMax( array ) {
if ( array.length === 0 ) return - Infinity;
let max = array[ 0 ];
for ( let i = 1, l = array.length; i < l; ++ i ) {
if ( array[ i ] > max ) max = array[ i ];
}
return max;
}
let _id = 0;
const _m1 = new /*@__PURE__*/ Matrix4();
const _obj = new /*@__PURE__*/ Object3D();
const _offset = new /*@__PURE__*/ Vector3();
const _box$1 = new /*@__PURE__*/ Box3();
const _boxMorphTargets = new /*@__PURE__*/ Box3();
const _vector$8 = new /*@__PURE__*/ Vector3();
class BufferGeometry extends EventDispatcher {
constructor() {
super();
Object.defineProperty( this, 'id', { value: _id ++ } );
this.uuid = generateUUID();
this.name = '';
this.type = 'BufferGeometry';
this.index = null;
this.attributes = {};
this.morphAttributes = {};
this.morphTargetsRelative = false;
this.groups = [];
this.boundingBox = null;
this.boundingSphere = null;
this.drawRange = { start: 0, count: Infinity };
this.userData = {};
}
getIndex() {
return this.index;
}
setIndex( index ) {
if ( Array.isArray( index ) ) {
this.index = new ( arrayMax( index ) > 65535 ? Uint32BufferAttribute : Uint16BufferAttribute )( index, 1 );
} else {
this.index = index;
}
return this;
}
getAttribute( name ) {
return this.attributes[ name ];
}
setAttribute( name, attribute ) {
this.attributes[ name ] = attribute;
return this;
}
deleteAttribute( name ) {
delete this.attributes[ name ];
return this;
}
hasAttribute( name ) {
return this.attributes[ name ] !== undefined;
}
addGroup( start, count, materialIndex = 0 ) {
this.groups.push( {
start: start,
count: count,
materialIndex: materialIndex
} );
}
clearGroups() {
this.groups = [];
}
setDrawRange( start, count ) {
this.drawRange.start = start;
this.drawRange.count = count;
}
applyMatrix4( matrix ) {
const position = this.attributes.position;
if ( position !== undefined ) {
position.applyMatrix4( matrix );
position.needsUpdate = true;
}
const normal = this.attributes.normal;
if ( normal !== undefined ) {
const normalMatrix = new Matrix3().getNormalMatrix( matrix );
normal.applyNormalMatrix( normalMatrix );
normal.needsUpdate = true;
}
const tangent = this.attributes.tangent;
if ( tangent !== undefined ) {
tangent.transformDirection( matrix );
tangent.needsUpdate = true;
}
if ( this.boundingBox !== null ) {
this.computeBoundingBox();
}
if ( this.boundingSphere !== null ) {
this.computeBoundingSphere();
}
return this;
}
rotateX( angle ) {
// rotate geometry around world x-axis
_m1.makeRotationX( angle );
this.applyMatrix4( _m1 );
return this;
}
rotateY( angle ) {
// rotate geometry around world y-axis
_m1.makeRotationY( angle );
this.applyMatrix4( _m1 );
return this;
}
rotateZ( angle ) {
// rotate geometry around world z-axis
_m1.makeRotationZ( angle );
this.applyMatrix4( _m1 );
return this;
}
translate( x, y, z ) {
// translate geometry
_m1.makeTranslation( x, y, z );
this.applyMatrix4( _m1 );
return this;
}
scale( x, y, z ) {
// scale geometry
_m1.makeScale( x, y, z );
this.applyMatrix4( _m1 );
return this;
}
lookAt( vector ) {
_obj.lookAt( vector );
_obj.updateMatrix();
this.applyMatrix4( _obj.matrix );
return this;
}
center() {
this.computeBoundingBox();
this.boundingBox.getCenter( _offset ).negate();
this.translate( _offset.x, _offset.y, _offset.z );
return this;
}
setFromPoints( points ) {
const position = [];
for ( let i = 0, l = points.length; i < l; i ++ ) {
const point = points[ i ];
position.push( point.x, point.y, point.z || 0 );
}
this.setAttribute( 'position', new Float32BufferAttribute( position, 3 ) );
return this;
}
computeBoundingBox() {
if ( this.boundingBox === null ) {
this.boundingBox = new Box3();
}
const position = this.attributes.position;
const morphAttributesPosition = this.morphAttributes.position;
if ( position && position.isGLBufferAttribute ) {
console.error( 'THREE.BufferGeometry.computeBoundingBox(): GLBufferAttribute requires a manual bounding box. Alternatively set "mesh.frustumCulled" to "false".', this );
this.boundingBox.set(
new Vector3( - Infinity, - Infinity, - Infinity ),
new Vector3( + Infinity, + Infinity, + Infinity )
);
return;
}
if ( position !== undefined ) {
this.boundingBox.setFromBufferAttribute( position );
// process morph attributes if present
if ( morphAttributesPosition ) {
for ( let i = 0, il = morphAttributesPosition.length; i < il; i ++ ) {
const morphAttribute = morphAttributesPosition[ i ];
_box$1.setFromBufferAttribute( morphAttribute );
if ( this.morphTargetsRelative ) {
_vector$8.addVectors( this.boundingBox.min, _box$1.min );
this.boundingBox.expandByPoint( _vector$8 );
_vector$8.addVectors( this.boundingBox.max, _box$1.max );
this.boundingBox.expandByPoint( _vector$8 );
} else {
this.boundingBox.expandByPoint( _box$1.min );
this.boundingBox.expandByPoint( _box$1.max );
}
}
}
} else {
this.boundingBox.makeEmpty();
}
if ( isNaN( this.boundingBox.min.x ) || isNaN( this.boundingBox.min.y ) || isNaN( this.boundingBox.min.z ) ) {
console.error( 'THREE.BufferGeometry.computeBoundingBox(): Computed min/max have NaN values. The "position" attribute is likely to have NaN values.', this );
}
}
computeBoundingSphere() {
if ( this.boundingSphere === null ) {
this.boundingSphere = new Sphere();
}
const position = this.attributes.position;
const morphAttributesPosition = this.morphAttributes.position;
if ( position && position.isGLBufferAttribute ) {
console.error( 'THREE.BufferGeometry.computeBoundingSphere(): GLBufferAttribute requires a manual bounding sphere. Alternatively set "mesh.frustumCulled" to "false".', this );
this.boundingSphere.set( new Vector3(), Infinity );
return;
}
if ( position ) {
// first, find the center of the bounding sphere
const center = this.boundingSphere.center;
_box$1.setFromBufferAttribute( position );
// process morph attributes if present
if ( morphAttributesPosition ) {
for ( let i = 0, il = morphAttributesPosition.length; i < il; i ++ ) {
const morphAttribute = morphAttributesPosition[ i ];
_boxMorphTargets.setFromBufferAttribute( morphAttribute );
if ( this.morphTargetsRelative ) {
_vector$8.addVectors( _box$1.min, _boxMorphTargets.min );
_box$1.expandByPoint( _vector$8 );
_vector$8.addVectors( _box$1.max, _boxMorphTargets.max );
_box$1.expandByPoint( _vector$8 );
} else {
_box$1.expandByPoint( _boxMorphTargets.min );
_box$1.expandByPoint( _boxMorphTargets.max );
}
}
}
_box$1.getCenter( center );
// second, try to find a boundingSphere with a radius smaller than the
// boundingSphere of the boundingBox: sqrt(3) smaller in the best case
let maxRadiusSq = 0;
for ( let i = 0, il = position.count; i < il; i ++ ) {
_vector$8.fromBufferAttribute( position, i );
maxRadiusSq = Math.max( maxRadiusSq, center.distanceToSquared( _vector$8 ) );
}
// process morph attributes if present
if ( morphAttributesPosition ) {
for ( let i = 0, il = morphAttributesPosition.length; i < il; i ++ ) {
const morphAttribute = morphAttributesPosition[ i ];
const morphTargetsRelative = this.morphTargetsRelative;
for ( let j = 0, jl = morphAttribute.count; j < jl; j ++ ) {
_vector$8.fromBufferAttribute( morphAttribute, j );
if ( morphTargetsRelative ) {
_offset.fromBufferAttribute( position, j );
_vector$8.add( _offset );
}
maxRadiusSq = Math.max( maxRadiusSq, center.distanceToSquared( _vector$8 ) );
}
}
}
this.boundingSphere.radius = Math.sqrt( maxRadiusSq );
if ( isNaN( this.boundingSphere.radius ) ) {
console.error( 'THREE.BufferGeometry.computeBoundingSphere(): Computed radius is NaN. The "position" attribute is likely to have NaN values.', this );
}
}
}
computeFaceNormals() {
// backwards compatibility
}
computeTangents() {
const index = this.index;
const attributes = this.attributes;
// based on http://www.terathon.com/code/tangent.html
// (per vertex tangents)
if ( index === null ||
attributes.position === undefined ||
attributes.normal === undefined ||
attributes.uv === undefined ) {
console.error( 'THREE.BufferGeometry: .computeTangents() failed. Missing required attributes (index, position, normal or uv)' );
return;
}
const indices = index.array;
const positions = attributes.position.array;
const normals = attributes.normal.array;
const uvs = attributes.uv.array;
const nVertices = positions.length / 3;
if ( attributes.tangent === undefined ) {
this.setAttribute( 'tangent', new BufferAttribute( new Float32Array( 4 * nVertices ), 4 ) );
}
const tangents = attributes.tangent.array;
const tan1 = [], tan2 = [];
for ( let i = 0; i < nVertices; i ++ ) {
tan1[ i ] = new Vector3();
tan2[ i ] = new Vector3();
}
const vA = new Vector3(),
vB = new Vector3(),
vC = new Vector3(),
uvA = new Vector2(),
uvB = new Vector2(),
uvC = new Vector2(),
sdir = new Vector3(),
tdir = new Vector3();
function handleTriangle( a, b, c ) {
vA.fromArray( positions, a * 3 );
vB.fromArray( positions, b * 3 );
vC.fromArray( positions, c * 3 );
uvA.fromArray( uvs, a * 2 );
uvB.fromArray( uvs, b * 2 );
uvC.fromArray( uvs, c * 2 );
vB.sub( vA );
vC.sub( vA );
uvB.sub( uvA );
uvC.sub( uvA );
const r = 1.0 / ( uvB.x * uvC.y - uvC.x * uvB.y );
// silently ignore degenerate uv triangles having coincident or colinear vertices
if ( ! isFinite( r ) ) return;
sdir.copy( vB ).multiplyScalar( uvC.y ).addScaledVector( vC, - uvB.y ).multiplyScalar( r );
tdir.copy( vC ).multiplyScalar( uvB.x ).addScaledVector( vB, - uvC.x ).multiplyScalar( r );
tan1[ a ].add( sdir );
tan1[ b ].add( sdir );
tan1[ c ].add( sdir );
tan2[ a ].add( tdir );
tan2[ b ].add( tdir );
tan2[ c ].add( tdir );
}
let groups = this.groups;
if ( groups.length === 0 ) {
groups = [ {
start: 0,
count: indices.length
} ];
}
for ( let i = 0, il = groups.length; i < il; ++ i ) {
const group = groups[ i ];
const start = group.start;
const count = group.count;
for ( let j = start, jl = start + count; j < jl; j += 3 ) {
handleTriangle(
indices[ j + 0 ],
indices[ j + 1 ],
indices[ j + 2 ]
);
}
}
const tmp = new Vector3(), tmp2 = new Vector3();
const n = new Vector3(), n2 = new Vector3();
function handleVertex( v ) {
n.fromArray( normals, v * 3 );
n2.copy( n );
const t = tan1[ v ];
// Gram-Schmidt orthogonalize
tmp.copy( t );
tmp.sub( n.multiplyScalar( n.dot( t ) ) ).normalize();
// Calculate handedness
tmp2.crossVectors( n2, t );
const test = tmp2.dot( tan2[ v ] );
const w = ( test < 0.0 ) ? - 1.0 : 1.0;
tangents[ v * 4 ] = tmp.x;
tangents[ v * 4 + 1 ] = tmp.y;
tangents[ v * 4 + 2 ] = tmp.z;
tangents[ v * 4 + 3 ] = w;
}
for ( let i = 0, il = groups.length; i < il; ++ i ) {
const group = groups[ i ];
const start = group.start;
const count = group.count;
for ( let j = start, jl = start + count; j < jl; j += 3 ) {
handleVertex( indices[ j + 0 ] );
handleVertex( indices[ j + 1 ] );
handleVertex( indices[ j + 2 ] );
}
}
}
computeVertexNormals() {
const index = this.index;
const positionAttribute = this.getAttribute( 'position' );
if ( positionAttribute !== undefined ) {
let normalAttribute = this.getAttribute( 'normal' );
if ( normalAttribute === undefined ) {
normalAttribute = new BufferAttribute( new Float32Array( positionAttribute.count * 3 ), 3 );
this.setAttribute( 'normal', normalAttribute );
} else {
// reset existing normals to zero
for ( let i = 0, il = normalAttribute.count; i < il; i ++ ) {
normalAttribute.setXYZ( i, 0, 0, 0 );
}
}
const pA = new Vector3(), pB = new Vector3(), pC = new Vector3();
const nA = new Vector3(), nB = new Vector3(), nC = new Vector3();
const cb = new Vector3(), ab = new Vector3();
// indexed elements
if ( index ) {
for ( let i = 0, il = index.count; i < il; i += 3 ) {
const vA = index.getX( i + 0 );
const vB = index.getX( i + 1 );
const vC = index.getX( i + 2 );
pA.fromBufferAttribute( positionAttribute, vA );
pB.fromBufferAttribute( positionAttribute, vB );
pC.fromBufferAttribute( positionAttribute, vC );
cb.subVectors( pC, pB );
ab.subVectors( pA, pB );
cb.cross( ab );
nA.fromBufferAttribute( normalAttribute, vA );
nB.fromBufferAttribute( normalAttribute, vB );
nC.fromBufferAttribute( normalAttribute, vC );
nA.add( cb );
nB.add( cb );
nC.add( cb );
normalAttribute.setXYZ( vA, nA.x, nA.y, nA.z );
normalAttribute.setXYZ( vB, nB.x, nB.y, nB.z );
normalAttribute.setXYZ( vC, nC.x, nC.y, nC.z );
}
} else {
// non-indexed elements (unconnected triangle soup)
for ( let i = 0, il = positionAttribute.count; i < il; i += 3 ) {
pA.fromBufferAttribute( positionAttribute, i + 0 );
pB.fromBufferAttribute( positionAttribute, i + 1 );
pC.fromBufferAttribute( positionAttribute, i + 2 );
cb.subVectors( pC, pB );
ab.subVectors( pA, pB );
cb.cross( ab );
normalAttribute.setXYZ( i + 0, cb.x, cb.y, cb.z );
normalAttribute.setXYZ( i + 1, cb.x, cb.y, cb.z );
normalAttribute.setXYZ( i + 2, cb.x, cb.y, cb.z );
}
}
this.normalizeNormals();
normalAttribute.needsUpdate = true;
}
}
merge( geometry, offset ) {
if ( ! ( geometry && geometry.isBufferGeometry ) ) {
console.error( 'THREE.BufferGeometry.merge(): geometry not an instance of THREE.BufferGeometry.', geometry );
return;
}
if ( offset === undefined ) {
offset = 0;
console.warn(
'THREE.BufferGeometry.merge(): Overwriting original geometry, starting at offset=0. '
+ 'Use BufferGeometryUtils.mergeBufferGeometries() for lossless merge.'
);
}
const attributes = this.attributes;
for ( const key in attributes ) {
if ( geometry.attributes[ key ] === undefined ) continue;
const attribute1 = attributes[ key ];
const attributeArray1 = attribute1.array;
const attribute2 = geometry.attributes[ key ];
const attributeArray2 = attribute2.array;
const attributeOffset = attribute2.itemSize * offset;
const length = Math.min( attributeArray2.length, attributeArray1.length - attributeOffset );
for ( let i = 0, j = attributeOffset; i < length; i ++, j ++ ) {
attributeArray1[ j ] = attributeArray2[ i ];
}
}
return this;
}
normalizeNormals() {
const normals = this.attributes.normal;
for ( let i = 0, il = normals.count; i < il; i ++ ) {
_vector$8.fromBufferAttribute( normals, i );
_vector$8.normalize();
normals.setXYZ( i, _vector$8.x, _vector$8.y, _vector$8.z );
}
}
toNonIndexed() {
function convertBufferAttribute( attribute, indices ) {
const array = attribute.array;
const itemSize = attribute.itemSize;
const normalized = attribute.normalized;
const array2 = new array.constructor( indices.length * itemSize );
let index = 0, index2 = 0;
for ( let i = 0, l = indices.length; i < l; i ++ ) {
index = indices[ i ] * itemSize;
for ( let j = 0; j < itemSize; j ++ ) {
array2[ index2 ++ ] = array[ index ++ ];
}
}
return new BufferAttribute( array2, itemSize, normalized );
}
//
if ( this.index === null ) {
console.warn( 'THREE.BufferGeometry.toNonIndexed(): BufferGeometry is already non-indexed.' );
return this;
}
const geometry2 = new BufferGeometry();
const indices = this.index.array;
const attributes = this.attributes;
// attributes
for ( const name in attributes ) {
const attribute = attributes[ name ];
const newAttribute = convertBufferAttribute( attribute, indices );
geometry2.setAttribute( name, newAttribute );
}
// morph attributes
const morphAttributes = this.morphAttributes;
for ( const name in morphAttributes ) {
const morphArray = [];
const morphAttribute = morphAttributes[ name ]; // morphAttribute: array of Float32BufferAttributes
for ( let i = 0, il = morphAttribute.length; i < il; i ++ ) {
const attribute = morphAttribute[ i ];
const newAttribute = convertBufferAttribute( attribute, indices );
morphArray.push( newAttribute );
}
geometry2.morphAttributes[ name ] = morphArray;
}
geometry2.morphTargetsRelative = this.morphTargetsRelative;
// groups
const groups = this.groups;
for ( let i = 0, l = groups.length; i < l; i ++ ) {
const group = groups[ i ];
geometry2.addGroup( group.start, group.count, group.materialIndex );
}
return geometry2;
}
toJSON() {
const data = {
metadata: {
version: 4.5,
type: 'BufferGeometry',
generator: 'BufferGeometry.toJSON'
}
};
// standard BufferGeometry serialization
data.uuid = this.uuid;
data.type = this.type;
if ( this.name !== '' ) data.name = this.name;
if ( Object.keys( this.userData ).length > 0 ) data.userData = this.userData;
if ( this.parameters !== undefined ) {
const parameters = this.parameters;
for ( const key in parameters ) {
if ( parameters[ key ] !== undefined ) data[ key ] = parameters[ key ];
}
return data;
}
// for simplicity the code assumes attributes are not shared across geometries, see #15811
data.data = { attributes: {} };
const index = this.index;
if ( index !== null ) {
data.data.index = {
type: index.array.constructor.name,
array: Array.prototype.slice.call( index.array )
};
}
const attributes = this.attributes;
for ( const key in attributes ) {
const attribute = attributes[ key ];
data.data.attributes[ key ] = attribute.toJSON( data.data );
}
const morphAttributes = {};
let hasMorphAttributes = false;
for ( const key in this.morphAttributes ) {
const attributeArray = this.morphAttributes[ key ];
const array = [];
for ( let i = 0, il = attributeArray.length; i < il; i ++ ) {
const attribute = attributeArray[ i ];
array.push( attribute.toJSON( data.data ) );
}
if ( array.length > 0 ) {
morphAttributes[ key ] = array;
hasMorphAttributes = true;
}
}
if ( hasMorphAttributes ) {
data.data.morphAttributes = morphAttributes;
data.data.morphTargetsRelative = this.morphTargetsRelative;
}
const groups = this.groups;
if ( groups.length > 0 ) {
data.data.groups = JSON.parse( JSON.stringify( groups ) );
}
const boundingSphere = this.boundingSphere;
if ( boundingSphere !== null ) {
data.data.boundingSphere = {
center: boundingSphere.center.toArray(),
radius: boundingSphere.radius
};
}
return data;
}
clone() {
/*
// Handle primitives
const parameters = this.parameters;
if ( parameters !== undefined ) {
const values = [];
for ( const key in parameters ) {
values.push( parameters[ key ] );
}
const geometry = Object.create( this.constructor.prototype );
this.constructor.apply( geometry, values );
return geometry;
}
return new this.constructor().copy( this );
*/
return new BufferGeometry().copy( this );
}
copy( source ) {
// reset
this.index = null;
this.attributes = {};
this.morphAttributes = {};
this.groups = [];
this.boundingBox = null;
this.boundingSphere = null;
// used for storing cloned, shared data
const data = {};
// name
this.name = source.name;
// index
const index = source.index;
if ( index !== null ) {
this.setIndex( index.clone( data ) );
}
// attributes
const attributes = source.attributes;
for ( const name in attributes ) {
const attribute = attributes[ name ];
this.setAttribute( name, attribute.clone( data ) );
}
// morph attributes
const morphAttributes = source.morphAttributes;
for ( const name in morphAttributes ) {
const array = [];
const morphAttribute = morphAttributes[ name ]; // morphAttribute: array of Float32BufferAttributes
for ( let i = 0, l = morphAttribute.length; i < l; i ++ ) {
array.push( morphAttribute[ i ].clone( data ) );
}
this.morphAttributes[ name ] = array;
}
this.morphTargetsRelative = source.morphTargetsRelative;
// groups
const groups = source.groups;
for ( let i = 0, l = groups.length; i < l; i ++ ) {
const group = groups[ i ];
this.addGroup( group.start, group.count, group.materialIndex );
}
// bounding box
const boundingBox = source.boundingBox;
if ( boundingBox !== null ) {
this.boundingBox = boundingBox.clone();
}
// bounding sphere
const boundingSphere = source.boundingSphere;
if ( boundingSphere !== null ) {
this.boundingSphere = boundingSphere.clone();
}
// draw range
this.drawRange.start = source.drawRange.start;
this.drawRange.count = source.drawRange.count;
// user data
this.userData = source.userData;
return this;
}
dispose() {
this.dispatchEvent( { type: 'dispose' } );
}
}
BufferGeometry.prototype.isBufferGeometry = true;
const _inverseMatrix$2 = /*@__PURE__*/ new Matrix4();
const _ray$2 = /*@__PURE__*/ new Ray();
const _sphere$3 = /*@__PURE__*/ new Sphere();
const _vA$1 = /*@__PURE__*/ new Vector3();
const _vB$1 = /*@__PURE__*/ new Vector3();
const _vC$1 = /*@__PURE__*/ new Vector3();
const _tempA = /*@__PURE__*/ new Vector3();
const _tempB = /*@__PURE__*/ new Vector3();
const _tempC = /*@__PURE__*/ new Vector3();
const _morphA = /*@__PURE__*/ new Vector3();
const _morphB = /*@__PURE__*/ new Vector3();
const _morphC = /*@__PURE__*/ new Vector3();
const _uvA$1 = /*@__PURE__*/ new Vector2();
const _uvB$1 = /*@__PURE__*/ new Vector2();
const _uvC$1 = /*@__PURE__*/ new Vector2();
const _intersectionPoint = /*@__PURE__*/ new Vector3();
const _intersectionPointWorld = /*@__PURE__*/ new Vector3();
class Mesh extends Object3D {
constructor( geometry = new BufferGeometry(), material = new MeshBasicMaterial() ) {
super();
this.type = 'Mesh';
this.geometry = geometry;
this.material = material;
this.updateMorphTargets();
}
copy( source ) {
super.copy( source );
if ( source.morphTargetInfluences !== undefined ) {
this.morphTargetInfluences = source.morphTargetInfluences.slice();
}
if ( source.morphTargetDictionary !== undefined ) {
this.morphTargetDictionary = Object.assign( {}, source.morphTargetDictionary );
}
this.material = source.material;
this.geometry = source.geometry;
return this;
}
updateMorphTargets() {
const geometry = this.geometry;
if ( geometry.isBufferGeometry ) {
const morphAttributes = geometry.morphAttributes;
const keys = Object.keys( morphAttributes );
if ( keys.length > 0 ) {
const morphAttribute = morphAttributes[ keys[ 0 ] ];
if ( morphAttribute !== undefined ) {
this.morphTargetInfluences = [];
this.morphTargetDictionary = {};
for ( let m = 0, ml = morphAttribute.length; m < ml; m ++ ) {
const name = morphAttribute[ m ].name || String( m );
this.morphTargetInfluences.push( 0 );
this.morphTargetDictionary[ name ] = m;
}
}
}
} else {
const morphTargets = geometry.morphTargets;
if ( morphTargets !== undefined && morphTargets.length > 0 ) {
console.error( 'THREE.Mesh.updateMorphTargets() no longer supports THREE.Geometry. Use THREE.BufferGeometry instead.' );
}
}
}
raycast( raycaster, intersects ) {
const geometry = this.geometry;
const material = this.material;
const matrixWorld = this.matrixWorld;
if ( material === undefined ) return;
// Checking boundingSphere distance to ray
if ( geometry.boundingSphere === null ) geometry.computeBoundingSphere();
_sphere$3.copy( geometry.boundingSphere );
_sphere$3.applyMatrix4( matrixWorld );
if ( raycaster.ray.intersectsSphere( _sphere$3 ) === false ) return;
//
_inverseMatrix$2.copy( matrixWorld ).invert();
_ray$2.copy( raycaster.ray ).applyMatrix4( _inverseMatrix$2 );
// Check boundingBox before continuing
if ( geometry.boundingBox !== null ) {
if ( _ray$2.intersectsBox( geometry.boundingBox ) === false ) return;
}
let intersection;
if ( geometry.isBufferGeometry ) {
const index = geometry.index;
const position = geometry.attributes.position;
const morphPosition = geometry.morphAttributes.position;
const morphTargetsRelative = geometry.morphTargetsRelative;
const uv = geometry.attributes.uv;
const uv2 = geometry.attributes.uv2;
const groups = geometry.groups;
const drawRange = geometry.drawRange;
if ( index !== null ) {
// indexed buffer geometry
if ( Array.isArray( material ) ) {
for ( let i = 0, il = groups.length; i < il; i ++ ) {
const group = groups[ i ];
const groupMaterial = material[ group.materialIndex ];
const start = Math.max( group.start, drawRange.start );
const end = Math.min( ( group.start + group.count ), ( drawRange.start + drawRange.count ) );
for ( let j = start, jl = end; j < jl; j += 3 ) {
const a = index.getX( j );
const b = index.getX( j + 1 );
const c = index.getX( j + 2 );
intersection = checkBufferGeometryIntersection( this, groupMaterial, raycaster, _ray$2, position, morphPosition, morphTargetsRelative, uv, uv2, a, b, c );
if ( intersection ) {
intersection.faceIndex = Math.floor( j / 3 ); // triangle number in indexed buffer semantics
intersection.face.materialIndex = group.materialIndex;
intersects.push( intersection );
}
}
}
} else {
const start = Math.max( 0, drawRange.start );
const end = Math.min( index.count, ( drawRange.start + drawRange.count ) );
for ( let i = start, il = end; i < il; i += 3 ) {
const a = index.getX( i );
const b = index.getX( i + 1 );
const c = index.getX( i + 2 );
intersection = checkBufferGeometryIntersection( this, material, raycaster, _ray$2, position, morphPosition, morphTargetsRelative, uv, uv2, a, b, c );
if ( intersection ) {
intersection.faceIndex = Math.floor( i / 3 ); // triangle number in indexed buffer semantics
intersects.push( intersection );
}
}
}
} else if ( position !== undefined ) {
// non-indexed buffer geometry
if ( Array.isArray( material ) ) {
for ( let i = 0, il = groups.length; i < il; i ++ ) {
const group = groups[ i ];
const groupMaterial = material[ group.materialIndex ];
const start = Math.max( group.start, drawRange.start );
const end = Math.min( ( group.start + group.count ), ( drawRange.start + drawRange.count ) );
for ( let j = start, jl = end; j < jl; j += 3 ) {
const a = j;
const b = j + 1;
const c = j + 2;
intersection = checkBufferGeometryIntersection( this, groupMaterial, raycaster, _ray$2, position, morphPosition, morphTargetsRelative, uv, uv2, a, b, c );
if ( intersection ) {
intersection.faceIndex = Math.floor( j / 3 ); // triangle number in non-indexed buffer semantics
intersection.face.materialIndex = group.materialIndex;
intersects.push( intersection );
}
}
}
} else {
const start = Math.max( 0, drawRange.start );
const end = Math.min( position.count, ( drawRange.start + drawRange.count ) );
for ( let i = start, il = end; i < il; i += 3 ) {
const a = i;
const b = i + 1;
const c = i + 2;
intersection = checkBufferGeometryIntersection( this, material, raycaster, _ray$2, position, morphPosition, morphTargetsRelative, uv, uv2, a, b, c );
if ( intersection ) {
intersection.faceIndex = Math.floor( i / 3 ); // triangle number in non-indexed buffer semantics
intersects.push( intersection );
}
}
}
}
} else if ( geometry.isGeometry ) {
console.error( 'THREE.Mesh.raycast() no longer supports THREE.Geometry. Use THREE.BufferGeometry instead.' );
}
}
}
Mesh.prototype.isMesh = true;
function checkIntersection( object, material, raycaster, ray, pA, pB, pC, point ) {
let intersect;
if ( material.side === BackSide ) {
intersect = ray.intersectTriangle( pC, pB, pA, true, point );
} else {
intersect = ray.intersectTriangle( pA, pB, pC, material.side !== DoubleSide, point );
}
if ( intersect === null ) return null;
_intersectionPointWorld.copy( point );
_intersectionPointWorld.applyMatrix4( object.matrixWorld );
const distance = raycaster.ray.origin.distanceTo( _intersectionPointWorld );
if ( distance < raycaster.near || distance > raycaster.far ) return null;
return {
distance: distance,
point: _intersectionPointWorld.clone(),
object: object
};
}
function checkBufferGeometryIntersection( object, material, raycaster, ray, position, morphPosition, morphTargetsRelative, uv, uv2, a, b, c ) {
_vA$1.fromBufferAttribute( position, a );
_vB$1.fromBufferAttribute( position, b );
_vC$1.fromBufferAttribute( position, c );
const morphInfluences = object.morphTargetInfluences;
if ( material.morphTargets && morphPosition && morphInfluences ) {
_morphA.set( 0, 0, 0 );
_morphB.set( 0, 0, 0 );
_morphC.set( 0, 0, 0 );
for ( let i = 0, il = morphPosition.length; i < il; i ++ ) {
const influence = morphInfluences[ i ];
const morphAttribute = morphPosition[ i ];
if ( influence === 0 ) continue;
_tempA.fromBufferAttribute( morphAttribute, a );
_tempB.fromBufferAttribute( morphAttribute, b );
_tempC.fromBufferAttribute( morphAttribute, c );
if ( morphTargetsRelative ) {
_morphA.addScaledVector( _tempA, influence );
_morphB.addScaledVector( _tempB, influence );
_morphC.addScaledVector( _tempC, influence );
} else {
_morphA.addScaledVector( _tempA.sub( _vA$1 ), influence );
_morphB.addScaledVector( _tempB.sub( _vB$1 ), influence );
_morphC.addScaledVector( _tempC.sub( _vC$1 ), influence );
}
}
_vA$1.add( _morphA );
_vB$1.add( _morphB );
_vC$1.add( _morphC );
}
if ( object.isSkinnedMesh && material.skinning ) {
object.boneTransform( a, _vA$1 );
object.boneTransform( b, _vB$1 );
object.boneTransform( c, _vC$1 );
}
const intersection = checkIntersection( object, material, raycaster, ray, _vA$1, _vB$1, _vC$1, _intersectionPoint );
if ( intersection ) {
if ( uv ) {
_uvA$1.fromBufferAttribute( uv, a );
_uvB$1.fromBufferAttribute( uv, b );
_uvC$1.fromBufferAttribute( uv, c );
intersection.uv = Triangle.getUV( _intersectionPoint, _vA$1, _vB$1, _vC$1, _uvA$1, _uvB$1, _uvC$1, new Vector2() );
}
if ( uv2 ) {
_uvA$1.fromBufferAttribute( uv2, a );
_uvB$1.fromBufferAttribute( uv2, b );
_uvC$1.fromBufferAttribute( uv2, c );
intersection.uv2 = Triangle.getUV( _intersectionPoint, _vA$1, _vB$1, _vC$1, _uvA$1, _uvB$1, _uvC$1, new Vector2() );
}
const face = {
a: a,
b: b,
c: c,
normal: new Vector3(),
materialIndex: 0
};
Triangle.getNormal( _vA$1, _vB$1, _vC$1, face.normal );
intersection.face = face;
}
return intersection;
}
class BoxGeometry extends BufferGeometry {
constructor( width = 1, height = 1, depth = 1, widthSegments = 1, heightSegments = 1, depthSegments = 1 ) {
super();
this.type = 'BoxGeometry';
this.parameters = {
width: width,
height: height,
depth: depth,
widthSegments: widthSegments,
heightSegments: heightSegments,
depthSegments: depthSegments
};
const scope = this;
// segments
widthSegments = Math.floor( widthSegments );
heightSegments = Math.floor( heightSegments );
depthSegments = Math.floor( depthSegments );
// buffers
const indices = [];
const vertices = [];
const normals = [];
const uvs = [];
// helper variables
let numberOfVertices = 0;
let groupStart = 0;
// build each side of the box geometry
buildPlane( 'z', 'y', 'x', - 1, - 1, depth, height, width, depthSegments, heightSegments, 0 ); // px
buildPlane( 'z', 'y', 'x', 1, - 1, depth, height, - width, depthSegments, heightSegments, 1 ); // nx
buildPlane( 'x', 'z', 'y', 1, 1, width, depth, height, widthSegments, depthSegments, 2 ); // py
buildPlane( 'x', 'z', 'y', 1, - 1, width, depth, - height, widthSegments, depthSegments, 3 ); // ny
buildPlane( 'x', 'y', 'z', 1, - 1, width, height, depth, widthSegments, heightSegments, 4 ); // pz
buildPlane( 'x', 'y', 'z', - 1, - 1, width, height, - depth, widthSegments, heightSegments, 5 ); // nz
// build geometry
this.setIndex( indices );
this.setAttribute( 'position', new Float32BufferAttribute( vertices, 3 ) );
this.setAttribute( 'normal', new Float32BufferAttribute( normals, 3 ) );
this.setAttribute( 'uv', new Float32BufferAttribute( uvs, 2 ) );
function buildPlane( u, v, w, udir, vdir, width, height, depth, gridX, gridY, materialIndex ) {
const segmentWidth = width / gridX;
const segmentHeight = height / gridY;
const widthHalf = width / 2;
const heightHalf = height / 2;
const depthHalf = depth / 2;
const gridX1 = gridX + 1;
const gridY1 = gridY + 1;
let vertexCounter = 0;
let groupCount = 0;
const vector = new Vector3();
// generate vertices, normals and uvs
for ( let iy = 0; iy < gridY1; iy ++ ) {
const y = iy * segmentHeight - heightHalf;
for ( let ix = 0; ix < gridX1; ix ++ ) {
const x = ix * segmentWidth - widthHalf;
// set values to correct vector component
vector[ u ] = x * udir;
vector[ v ] = y * vdir;
vector[ w ] = depthHalf;
// now apply vector to vertex buffer
vertices.push( vector.x, vector.y, vector.z );
// set values to correct vector component
vector[ u ] = 0;
vector[ v ] = 0;
vector[ w ] = depth > 0 ? 1 : - 1;
// now apply vector to normal buffer
normals.push( vector.x, vector.y, vector.z );
// uvs
uvs.push( ix / gridX );
uvs.push( 1 - ( iy / gridY ) );
// counters
vertexCounter += 1;
}
}
// indices
// 1. you need three indices to draw a single face
// 2. a single segment consists of two faces
// 3. so we need to generate six (2*3) indices per segment
for ( let iy = 0; iy < gridY; iy ++ ) {
for ( let ix = 0; ix < gridX; ix ++ ) {
const a = numberOfVertices + ix + gridX1 * iy;
const b = numberOfVertices + ix + gridX1 * ( iy + 1 );
const c = numberOfVertices + ( ix + 1 ) + gridX1 * ( iy + 1 );
const d = numberOfVertices + ( ix + 1 ) + gridX1 * iy;
// faces
indices.push( a, b, d );
indices.push( b, c, d );
// increase counter
groupCount += 6;
}
}
// add a group to the geometry. this will ensure multi material support
scope.addGroup( groupStart, groupCount, materialIndex );
// calculate new start value for groups
groupStart += groupCount;
// update total number of vertices
numberOfVertices += vertexCounter;
}
}
}
/**
* Uniform Utilities
*/
function cloneUniforms( src ) {
const dst = {};
for ( const u in src ) {
dst[ u ] = {};
for ( const p in src[ u ] ) {
const property = src[ u ][ p ];
if ( property && ( property.isColor ||
property.isMatrix3 || property.isMatrix4 ||
property.isVector2 || property.isVector3 || property.isVector4 ||
property.isTexture || property.isQuaternion ) ) {
dst[ u ][ p ] = property.clone();
} else if ( Array.isArray( property ) ) {
dst[ u ][ p ] = property.slice();
} else {
dst[ u ][ p ] = property;
}
}
}
return dst;
}
function mergeUniforms( uniforms ) {
const merged = {};
for ( let u = 0; u < uniforms.length; u ++ ) {
const tmp = cloneUniforms( uniforms[ u ] );
for ( const p in tmp ) {
merged[ p ] = tmp[ p ];
}
}
return merged;
}
// Legacy
const UniformsUtils = { clone: cloneUniforms, merge: mergeUniforms };
var default_vertex = "void main() {\n\tgl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );\n}";
var default_fragment = "void main() {\n\tgl_FragColor = vec4( 1.0, 0.0, 0.0, 1.0 );\n}";
/**
* parameters = {
* defines: { "label" : "value" },
* uniforms: { "parameter1": { value: 1.0 }, "parameter2": { value2: 2 } },
*
* fragmentShader: <string>,
* vertexShader: <string>,
*
* wireframe: <boolean>,
* wireframeLinewidth: <float>,
*
* lights: <bool>,
*
* skinning: <bool>,
* morphTargets: <bool>,
* morphNormals: <bool>
* }
*/
class ShaderMaterial extends Material$1 {
constructor( parameters ) {
super();
this.type = 'ShaderMaterial';
this.defines = {};
this.uniforms = {};
this.vertexShader = default_vertex;
this.fragmentShader = default_fragment;
this.linewidth = 1;
this.wireframe = false;
this.wireframeLinewidth = 1;
this.fog = false; // set to use scene fog
this.lights = false; // set to use scene lights
this.clipping = false; // set to use user-defined clipping planes
this.skinning = false; // set to use skinning attribute streams
this.morphTargets = false; // set to use morph targets
this.morphNormals = false; // set to use morph normals
this.extensions = {
derivatives: false, // set to use derivatives
fragDepth: false, // set to use fragment depth values
drawBuffers: false, // set to use draw buffers
shaderTextureLOD: false // set to use shader texture LOD
};
// When rendered geometry doesn't include these attributes but the material does,
// use these default values in WebGL. This avoids errors when buffer data is missing.
this.defaultAttributeValues = {
'color': [ 1, 1, 1 ],
'uv': [ 0, 0 ],
'uv2': [ 0, 0 ]
};
this.index0AttributeName = undefined;
this.uniformsNeedUpdate = false;
this.glslVersion = null;
if ( parameters !== undefined ) {
if ( parameters.attributes !== undefined ) {
console.error( 'THREE.ShaderMaterial: attributes should now be defined in THREE.BufferGeometry instead.' );
}
this.setValues( parameters );
}
}
copy( source ) {
super.copy( source );
this.fragmentShader = source.fragmentShader;
this.vertexShader = source.vertexShader;
this.uniforms = cloneUniforms( source.uniforms );
this.defines = Object.assign( {}, source.defines );
this.wireframe = source.wireframe;
this.wireframeLinewidth = source.wireframeLinewidth;
this.lights = source.lights;
this.clipping = source.clipping;
this.skinning = source.skinning;
this.morphTargets = source.morphTargets;
this.morphNormals = source.morphNormals;
this.extensions = Object.assign( {}, source.extensions );
this.glslVersion = source.glslVersion;
return this;
}
toJSON( meta ) {
const data = super.toJSON( meta );
data.glslVersion = this.glslVersion;
data.uniforms = {};
for ( const name in this.uniforms ) {
const uniform = this.uniforms[ name ];
const value = uniform.value;
if ( value && value.isTexture ) {
data.uniforms[ name ] = {
type: 't',
value: value.toJSON( meta ).uuid
};
} else if ( value && value.isColor ) {
data.uniforms[ name ] = {
type: 'c',
value: value.getHex()
};
} else if ( value && value.isVector2 ) {
data.uniforms[ name ] = {
type: 'v2',
value: value.toArray()
};
} else if ( value && value.isVector3 ) {
data.uniforms[ name ] = {
type: 'v3',
value: value.toArray()
};
} else if ( value && value.isVector4 ) {
data.uniforms[ name ] = {
type: 'v4',
value: value.toArray()
};
} else if ( value && value.isMatrix3 ) {
data.uniforms[ name ] = {
type: 'm3',
value: value.toArray()
};
} else if ( value && value.isMatrix4 ) {
data.uniforms[ name ] = {
type: 'm4',
value: value.toArray()
};
} else {
data.uniforms[ name ] = {
value: value
};
// note: the array variants v2v, v3v, v4v, m4v and tv are not supported so far
}
}
if ( Object.keys( this.defines ).length > 0 ) data.defines = this.defines;
data.vertexShader = this.vertexShader;
data.fragmentShader = this.fragmentShader;
const extensions = {};
for ( const key in this.extensions ) {
if ( this.extensions[ key ] === true ) extensions[ key ] = true;
}
if ( Object.keys( extensions ).length > 0 ) data.extensions = extensions;
return data;
}
}
ShaderMaterial.prototype.isShaderMaterial = true;
class Camera extends Object3D {
constructor() {
super();
this.type = 'Camera';
this.matrixWorldInverse = new Matrix4();
this.projectionMatrix = new Matrix4();
this.projectionMatrixInverse = new Matrix4();
}
copy( source, recursive ) {
super.copy( source, recursive );
this.matrixWorldInverse.copy( source.matrixWorldInverse );
this.projectionMatrix.copy( source.projectionMatrix );
this.projectionMatrixInverse.copy( source.projectionMatrixInverse );
return this;
}
getWorldDirection( target ) {
if ( target === undefined ) {
console.warn( 'THREE.Camera: .getWorldDirection() target is now required' );
target = new Vector3();
}
this.updateWorldMatrix( true, false );
const e = this.matrixWorld.elements;
return target.set( - e[ 8 ], - e[ 9 ], - e[ 10 ] ).normalize();
}
updateMatrixWorld( force ) {
super.updateMatrixWorld( force );
this.matrixWorldInverse.copy( this.matrixWorld ).invert();
}
updateWorldMatrix( updateParents, updateChildren ) {
super.updateWorldMatrix( updateParents, updateChildren );
this.matrixWorldInverse.copy( this.matrixWorld ).invert();
}
clone() {
return new this.constructor().copy( this );
}
}
Camera.prototype.isCamera = true;
class PerspectiveCamera extends Camera {
constructor( fov = 50, aspect = 1, near = 0.1, far = 2000 ) {
super();
this.type = 'PerspectiveCamera';
this.fov = fov;
this.zoom = 1;
this.near = near;
this.far = far;
this.focus = 10;
this.aspect = aspect;
this.view = null;
this.filmGauge = 35; // width of the film (default in millimeters)
this.filmOffset = 0; // horizontal film offset (same unit as gauge)
this.updateProjectionMatrix();
}
copy( source, recursive ) {
super.copy( source, recursive );
this.fov = source.fov;
this.zoom = source.zoom;
this.near = source.near;
this.far = source.far;
this.focus = source.focus;
this.aspect = source.aspect;
this.view = source.view === null ? null : Object.assign( {}, source.view );
this.filmGauge = source.filmGauge;
this.filmOffset = source.filmOffset;
return this;
}
/**
* Sets the FOV by focal length in respect to the current .filmGauge.
*
* The default film gauge is 35, so that the focal length can be specified for
* a 35mm (full frame) camera.
*
* Values for focal length and film gauge must have the same unit.
*/
setFocalLength( focalLength ) {
/** see {@link http://www.bobatkins.com/photography/technical/field_of_view.html} */
const vExtentSlope = 0.5 * this.getFilmHeight() / focalLength;
this.fov = RAD2DEG * 2 * Math.atan( vExtentSlope );
this.updateProjectionMatrix();
}
/**
* Calculates the focal length from the current .fov and .filmGauge.
*/
getFocalLength() {
const vExtentSlope = Math.tan( DEG2RAD * 0.5 * this.fov );
return 0.5 * this.getFilmHeight() / vExtentSlope;
}
getEffectiveFOV() {
return RAD2DEG * 2 * Math.atan(
Math.tan( DEG2RAD * 0.5 * this.fov ) / this.zoom );
}
getFilmWidth() {
// film not completely covered in portrait format (aspect < 1)
return this.filmGauge * Math.min( this.aspect, 1 );
}
getFilmHeight() {
// film not completely covered in landscape format (aspect > 1)
return this.filmGauge / Math.max( this.aspect, 1 );
}
/**
* Sets an offset in a larger frustum. This is useful for multi-window or
* multi-monitor/multi-machine setups.
*
* For example, if you have 3x2 monitors and each monitor is 1920x1080 and
* the monitors are in grid like this
*
* +---+---+---+
* | A | B | C |
* +---+---+---+
* | D | E | F |
* +---+---+---+
*
* then for each monitor you would call it like this
*
* const w = 1920;
* const h = 1080;
* const fullWidth = w * 3;
* const fullHeight = h * 2;
*
* --A--
* camera.setViewOffset( fullWidth, fullHeight, w * 0, h * 0, w, h );
* --B--
* camera.setViewOffset( fullWidth, fullHeight, w * 1, h * 0, w, h );
* --C--
* camera.setViewOffset( fullWidth, fullHeight, w * 2, h * 0, w, h );
* --D--
* camera.setViewOffset( fullWidth, fullHeight, w * 0, h * 1, w, h );
* --E--
* camera.setViewOffset( fullWidth, fullHeight, w * 1, h * 1, w, h );
* --F--
* camera.setViewOffset( fullWidth, fullHeight, w * 2, h * 1, w, h );
*
* Note there is no reason monitors have to be the same size or in a grid.
*/
setViewOffset( fullWidth, fullHeight, x, y, width, height ) {
this.aspect = fullWidth / fullHeight;
if ( this.view === null ) {
this.view = {
enabled: true,
fullWidth: 1,
fullHeight: 1,
offsetX: 0,
offsetY: 0,
width: 1,
height: 1
};
}
this.view.enabled = true;
this.view.fullWidth = fullWidth;
this.view.fullHeight = fullHeight;
this.view.offsetX = x;
this.view.offsetY = y;
this.view.width = width;
this.view.height = height;
this.updateProjectionMatrix();
}
clearViewOffset() {
if ( this.view !== null ) {
this.view.enabled = false;
}
this.updateProjectionMatrix();
}
updateProjectionMatrix() {
const near = this.near;
let top = near * Math.tan( DEG2RAD * 0.5 * this.fov ) / this.zoom;
let height = 2 * top;
let width = this.aspect * height;
let left = - 0.5 * width;
const view = this.view;
if ( this.view !== null && this.view.enabled ) {
const fullWidth = view.fullWidth,
fullHeight = view.fullHeight;
left += view.offsetX * width / fullWidth;
top -= view.offsetY * height / fullHeight;
width *= view.width / fullWidth;
height *= view.height / fullHeight;
}
const skew = this.filmOffset;
if ( skew !== 0 ) left += near * skew / this.getFilmWidth();
this.projectionMatrix.makePerspective( left, left + width, top, top - height, near, this.far );
this.projectionMatrixInverse.copy( this.projectionMatrix ).invert();
}
toJSON( meta ) {
const data = super.toJSON( meta );
data.object.fov = this.fov;
data.object.zoom = this.zoom;
data.object.near = this.near;
data.object.far = this.far;
data.object.focus = this.focus;
data.object.aspect = this.aspect;
if ( this.view !== null ) data.object.view = Object.assign( {}, this.view );
data.object.filmGauge = this.filmGauge;
data.object.filmOffset = this.filmOffset;
return data;
}
}
PerspectiveCamera.prototype.isPerspectiveCamera = true;
const fov = 90, aspect = 1;
class CubeCamera extends Object3D {
constructor( near, far, renderTarget ) {
super();
this.type = 'CubeCamera';
if ( renderTarget.isWebGLCubeRenderTarget !== true ) {
console.error( 'THREE.CubeCamera: The constructor now expects an instance of WebGLCubeRenderTarget as third parameter.' );
return;
}
this.renderTarget = renderTarget;
const cameraPX = new PerspectiveCamera( fov, aspect, near, far );
cameraPX.layers = this.layers;
cameraPX.up.set( 0, - 1, 0 );
cameraPX.lookAt( new Vector3( 1, 0, 0 ) );
this.add( cameraPX );
const cameraNX = new PerspectiveCamera( fov, aspect, near, far );
cameraNX.layers = this.layers;
cameraNX.up.set( 0, - 1, 0 );
cameraNX.lookAt( new Vector3( - 1, 0, 0 ) );
this.add( cameraNX );
const cameraPY = new PerspectiveCamera( fov, aspect, near, far );
cameraPY.layers = this.layers;
cameraPY.up.set( 0, 0, 1 );
cameraPY.lookAt( new Vector3( 0, 1, 0 ) );
this.add( cameraPY );
const cameraNY = new PerspectiveCamera( fov, aspect, near, far );
cameraNY.layers = this.layers;
cameraNY.up.set( 0, 0, - 1 );
cameraNY.lookAt( new Vector3( 0, - 1, 0 ) );
this.add( cameraNY );
const cameraPZ = new PerspectiveCamera( fov, aspect, near, far );
cameraPZ.layers = this.layers;
cameraPZ.up.set( 0, - 1, 0 );
cameraPZ.lookAt( new Vector3( 0, 0, 1 ) );
this.add( cameraPZ );
const cameraNZ = new PerspectiveCamera( fov, aspect, near, far );
cameraNZ.layers = this.layers;
cameraNZ.up.set( 0, - 1, 0 );
cameraNZ.lookAt( new Vector3( 0, 0, - 1 ) );
this.add( cameraNZ );
}
update( renderer, scene ) {
if ( this.parent === null ) this.updateMatrixWorld();
const renderTarget = this.renderTarget;
const [ cameraPX, cameraNX, cameraPY, cameraNY, cameraPZ, cameraNZ ] = this.children;
const currentXrEnabled = renderer.xr.enabled;
const currentRenderTarget = renderer.getRenderTarget();
renderer.xr.enabled = false;
const generateMipmaps = renderTarget.texture.generateMipmaps;
renderTarget.texture.generateMipmaps = false;
renderer.setRenderTarget( renderTarget, 0 );
renderer.render( scene, cameraPX );
renderer.setRenderTarget( renderTarget, 1 );
renderer.render( scene, cameraNX );
renderer.setRenderTarget( renderTarget, 2 );
renderer.render( scene, cameraPY );
renderer.setRenderTarget( renderTarget, 3 );
renderer.render( scene, cameraNY );
renderer.setRenderTarget( renderTarget, 4 );
renderer.render( scene, cameraPZ );
renderTarget.texture.generateMipmaps = generateMipmaps;
renderer.setRenderTarget( renderTarget, 5 );
renderer.render( scene, cameraNZ );
renderer.setRenderTarget( currentRenderTarget );
renderer.xr.enabled = currentXrEnabled;
}
}
class CubeTexture extends Texture$1 {
constructor( images, mapping, wrapS, wrapT, magFilter, minFilter, format, type, anisotropy, encoding ) {
images = images !== undefined ? images : [];
mapping = mapping !== undefined ? mapping : CubeReflectionMapping;
format = format !== undefined ? format : RGBFormat;
super( images, mapping, wrapS, wrapT, magFilter, minFilter, format, type, anisotropy, encoding );
// Why CubeTexture._needsFlipEnvMap is necessary:
//
// By convention -- likely based on the RenderMan spec from the 1990's -- cube maps are specified by WebGL (and three.js)
// in a coordinate system in which positive-x is to the right when looking up the positive-z axis -- in other words,
// in a left-handed coordinate system. By continuing this convention, preexisting cube maps continued to render correctly.
// three.js uses a right-handed coordinate system. So environment maps used in three.js appear to have px and nx swapped
// and the flag _needsFlipEnvMap controls this conversion. The flip is not required (and thus _needsFlipEnvMap is set to false)
// when using WebGLCubeRenderTarget.texture as a cube texture.
this._needsFlipEnvMap = true;
this.flipY = false;
}
get images() {
return this.image;
}
set images( value ) {
this.image = value;
}
}
CubeTexture.prototype.isCubeTexture = true;
class WebGLCubeRenderTarget extends WebGLRenderTarget {
constructor( size, options, dummy ) {
if ( Number.isInteger( options ) ) {
console.warn( 'THREE.WebGLCubeRenderTarget: constructor signature is now WebGLCubeRenderTarget( size, options )' );
options = dummy;
}
super( size, size, options );
options = options || {};
this.texture = new CubeTexture( undefined, options.mapping, options.wrapS, options.wrapT, options.magFilter, options.minFilter, options.format, options.type, options.anisotropy, options.encoding );
this.texture.generateMipmaps = options.generateMipmaps !== undefined ? options.generateMipmaps : false;
this.texture.minFilter = options.minFilter !== undefined ? options.minFilter : LinearFilter;
this.texture._needsFlipEnvMap = false;
}
fromEquirectangularTexture( renderer, texture ) {
this.texture.type = texture.type;
this.texture.format = RGBAFormat; // see #18859
this.texture.encoding = texture.encoding;
this.texture.generateMipmaps = texture.generateMipmaps;
this.texture.minFilter = texture.minFilter;
this.texture.magFilter = texture.magFilter;
const shader = {
uniforms: {
tEquirect: { value: null },
},
vertexShader: /* glsl */`
varying vec3 vWorldDirection;
vec3 transformDirection( in vec3 dir, in mat4 matrix ) {
return normalize( ( matrix * vec4( dir, 0.0 ) ).xyz );
}
void main() {
vWorldDirection = transformDirection( position, modelMatrix );
#include <begin_vertex>
#include <project_vertex>
}
`,
fragmentShader: /* glsl */`
uniform sampler2D tEquirect;
varying vec3 vWorldDirection;
#include <common>
void main() {
vec3 direction = normalize( vWorldDirection );
vec2 sampleUV = equirectUv( direction );
gl_FragColor = texture2D( tEquirect, sampleUV );
}
`
};
const geometry = new BoxGeometry( 5, 5, 5 );
const material = new ShaderMaterial( {
name: 'CubemapFromEquirect',
uniforms: cloneUniforms( shader.uniforms ),
vertexShader: shader.vertexShader,
fragmentShader: shader.fragmentShader,
side: BackSide,
blending: NoBlending
} );
material.uniforms.tEquirect.value = texture;
const mesh = new Mesh( geometry, material );
const currentMinFilter = texture.minFilter;
// Avoid blurred poles
if ( texture.minFilter === LinearMipmapLinearFilter ) texture.minFilter = LinearFilter;
const camera = new CubeCamera( 1, 10, this );
camera.update( renderer, mesh );
texture.minFilter = currentMinFilter;
mesh.geometry.dispose();
mesh.material.dispose();
return this;
}
clear( renderer, color, depth, stencil ) {
const currentRenderTarget = renderer.getRenderTarget();
for ( let i = 0; i < 6; i ++ ) {
renderer.setRenderTarget( this, i );
renderer.clear( color, depth, stencil );
}
renderer.setRenderTarget( currentRenderTarget );
}
}
WebGLCubeRenderTarget.prototype.isWebGLCubeRenderTarget = true;
class DataTexture extends Texture$1 {
constructor( data, width, height, format, type, mapping, wrapS, wrapT, magFilter, minFilter, anisotropy, encoding ) {
super( null, mapping, wrapS, wrapT, magFilter, minFilter, format, type, anisotropy, encoding );
this.image = { data: data || null, width: width || 1, height: height || 1 };
this.magFilter = magFilter !== undefined ? magFilter : NearestFilter;
this.minFilter = minFilter !== undefined ? minFilter : NearestFilter;
this.generateMipmaps = false;
this.flipY = false;
this.unpackAlignment = 1;
this.needsUpdate = true;
}
}
DataTexture.prototype.isDataTexture = true;
const _sphere$2 = /*@__PURE__*/ new Sphere();
const _vector$7 = /*@__PURE__*/ new Vector3();
class Frustum {
constructor( p0 = new Plane(), p1 = new Plane(), p2 = new Plane(), p3 = new Plane(), p4 = new Plane(), p5 = new Plane() ) {
this.planes = [ p0, p1, p2, p3, p4, p5 ];
}
set( p0, p1, p2, p3, p4, p5 ) {
const planes = this.planes;
planes[ 0 ].copy( p0 );
planes[ 1 ].copy( p1 );
planes[ 2 ].copy( p2 );
planes[ 3 ].copy( p3 );
planes[ 4 ].copy( p4 );
planes[ 5 ].copy( p5 );
return this;
}
copy( frustum ) {
const planes = this.planes;
for ( let i = 0; i < 6; i ++ ) {
planes[ i ].copy( frustum.planes[ i ] );
}
return this;
}
setFromProjectionMatrix( m ) {
const planes = this.planes;
const me = m.elements;
const me0 = me[ 0 ], me1 = me[ 1 ], me2 = me[ 2 ], me3 = me[ 3 ];
const me4 = me[ 4 ], me5 = me[ 5 ], me6 = me[ 6 ], me7 = me[ 7 ];
const me8 = me[ 8 ], me9 = me[ 9 ], me10 = me[ 10 ], me11 = me[ 11 ];
const me12 = me[ 12 ], me13 = me[ 13 ], me14 = me[ 14 ], me15 = me[ 15 ];
planes[ 0 ].setComponents( me3 - me0, me7 - me4, me11 - me8, me15 - me12 ).normalize();
planes[ 1 ].setComponents( me3 + me0, me7 + me4, me11 + me8, me15 + me12 ).normalize();
planes[ 2 ].setComponents( me3 + me1, me7 + me5, me11 + me9, me15 + me13 ).normalize();
planes[ 3 ].setComponents( me3 - me1, me7 - me5, me11 - me9, me15 - me13 ).normalize();
planes[ 4 ].setComponents( me3 - me2, me7 - me6, me11 - me10, me15 - me14 ).normalize();
planes[ 5 ].setComponents( me3 + me2, me7 + me6, me11 + me10, me15 + me14 ).normalize();
return this;
}
intersectsObject( object ) {
const geometry = object.geometry;
if ( geometry.boundingSphere === null ) geometry.computeBoundingSphere();
_sphere$2.copy( geometry.boundingSphere ).applyMatrix4( object.matrixWorld );
return this.intersectsSphere( _sphere$2 );
}
intersectsSprite( sprite ) {
_sphere$2.center.set( 0, 0, 0 );
_sphere$2.radius = 0.7071067811865476;
_sphere$2.applyMatrix4( sprite.matrixWorld );
return this.intersectsSphere( _sphere$2 );
}
intersectsSphere( sphere ) {
const planes = this.planes;
const center = sphere.center;
const negRadius = - sphere.radius;
for ( let i = 0; i < 6; i ++ ) {
const distance = planes[ i ].distanceToPoint( center );
if ( distance < negRadius ) {
return false;
}
}
return true;
}
intersectsBox( box ) {
const planes = this.planes;
for ( let i = 0; i < 6; i ++ ) {
const plane = planes[ i ];
// corner at max distance
_vector$7.x = plane.normal.x > 0 ? box.max.x : box.min.x;
_vector$7.y = plane.normal.y > 0 ? box.max.y : box.min.y;
_vector$7.z = plane.normal.z > 0 ? box.max.z : box.min.z;
if ( plane.distanceToPoint( _vector$7 ) < 0 ) {
return false;
}
}
return true;
}
containsPoint( point ) {
const planes = this.planes;
for ( let i = 0; i < 6; i ++ ) {
if ( planes[ i ].distanceToPoint( point ) < 0 ) {
return false;
}
}
return true;
}
clone() {
return new this.constructor().copy( this );
}
}
function WebGLAnimation() {
let context = null;
let isAnimating = false;
let animationLoop = null;
let requestId = null;
function onAnimationFrame( time, frame ) {
animationLoop( time, frame );
requestId = context.requestAnimationFrame( onAnimationFrame );
}
return {
start: function () {
if ( isAnimating === true ) return;
if ( animationLoop === null ) return;
requestId = context.requestAnimationFrame( onAnimationFrame );
isAnimating = true;
},
stop: function () {
context.cancelAnimationFrame( requestId );
isAnimating = false;
},
setAnimationLoop: function ( callback ) {
animationLoop = callback;
},
setContext: function ( value ) {
context = value;
}
};
}
function WebGLAttributes( gl, capabilities ) {
const isWebGL2 = capabilities.isWebGL2;
const buffers = new WeakMap();
function createBuffer( attribute, bufferType ) {
const array = attribute.array;
const usage = attribute.usage;
const buffer = gl.createBuffer();
gl.bindBuffer( bufferType, buffer );
gl.bufferData( bufferType, array, usage );
attribute.onUploadCallback();
let type = 5126;
if ( array instanceof Float32Array ) {
type = 5126;
} else if ( array instanceof Float64Array ) {
console.warn( 'THREE.WebGLAttributes: Unsupported data buffer format: Float64Array.' );
} else if ( array instanceof Uint16Array ) {
if ( attribute.isFloat16BufferAttribute ) {
if ( isWebGL2 ) {
type = 5131;
} else {
console.warn( 'THREE.WebGLAttributes: Usage of Float16BufferAttribute requires WebGL2.' );
}
} else {
type = 5123;
}
} else if ( array instanceof Int16Array ) {
type = 5122;
} else if ( array instanceof Uint32Array ) {
type = 5125;
} else if ( array instanceof Int32Array ) {
type = 5124;
} else if ( array instanceof Int8Array ) {
type = 5120;
} else if ( array instanceof Uint8Array ) {
type = 5121;
}
return {
buffer: buffer,
type: type,
bytesPerElement: array.BYTES_PER_ELEMENT,
version: attribute.version
};
}
function updateBuffer( buffer, attribute, bufferType ) {
const array = attribute.array;
const updateRange = attribute.updateRange;
gl.bindBuffer( bufferType, buffer );
if ( updateRange.count === - 1 ) {
// Not using update ranges
gl.bufferSubData( bufferType, 0, array );
} else {
if ( isWebGL2 ) {
gl.bufferSubData( bufferType, updateRange.offset * array.BYTES_PER_ELEMENT,
array, updateRange.offset, updateRange.count );
} else {
gl.bufferSubData( bufferType, updateRange.offset * array.BYTES_PER_ELEMENT,
array.subarray( updateRange.offset, updateRange.offset + updateRange.count ) );
}
updateRange.count = - 1; // reset range
}
}
//
function get( attribute ) {
if ( attribute.isInterleavedBufferAttribute ) attribute = attribute.data;
return buffers.get( attribute );
}
function remove( attribute ) {
if ( attribute.isInterleavedBufferAttribute ) attribute = attribute.data;
const data = buffers.get( attribute );
if ( data ) {
gl.deleteBuffer( data.buffer );
buffers.delete( attribute );
}
}
function update( attribute, bufferType ) {
if ( attribute.isGLBufferAttribute ) {
const cached = buffers.get( attribute );
if ( ! cached || cached.version < attribute.version ) {
buffers.set( attribute, {
buffer: attribute.buffer,
type: attribute.type,
bytesPerElement: attribute.elementSize,
version: attribute.version
} );
}
return;
}
if ( attribute.isInterleavedBufferAttribute ) attribute = attribute.data;
const data = buffers.get( attribute );
if ( data === undefined ) {
buffers.set( attribute, createBuffer( attribute, bufferType ) );
} else if ( data.version < attribute.version ) {
updateBuffer( data.buffer, attribute, bufferType );
data.version = attribute.version;
}
}
return {
get: get,
remove: remove,
update: update
};
}
class PlaneGeometry extends BufferGeometry {
constructor( width = 1, height = 1, widthSegments = 1, heightSegments = 1 ) {
super();
this.type = 'PlaneGeometry';
this.parameters = {
width: width,
height: height,
widthSegments: widthSegments,
heightSegments: heightSegments
};
const width_half = width / 2;
const height_half = height / 2;
const gridX = Math.floor( widthSegments );
const gridY = Math.floor( heightSegments );
const gridX1 = gridX + 1;
const gridY1 = gridY + 1;
const segment_width = width / gridX;
const segment_height = height / gridY;
//
const indices = [];
const vertices = [];
const normals = [];
const uvs = [];
for ( let iy = 0; iy < gridY1; iy ++ ) {
const y = iy * segment_height - height_half;
for ( let ix = 0; ix < gridX1; ix ++ ) {
const x = ix * segment_width - width_half;
vertices.push( x, - y, 0 );
normals.push( 0, 0, 1 );
uvs.push( ix / gridX );
uvs.push( 1 - ( iy / gridY ) );
}
}
for ( let iy = 0; iy < gridY; iy ++ ) {
for ( let ix = 0; ix < gridX; ix ++ ) {
const a = ix + gridX1 * iy;
const b = ix + gridX1 * ( iy + 1 );
const c = ( ix + 1 ) + gridX1 * ( iy + 1 );
const d = ( ix + 1 ) + gridX1 * iy;
indices.push( a, b, d );
indices.push( b, c, d );
}
}
this.setIndex( indices );
this.setAttribute( 'position', new Float32BufferAttribute( vertices, 3 ) );
this.setAttribute( 'normal', new Float32BufferAttribute( normals, 3 ) );
this.setAttribute( 'uv', new Float32BufferAttribute( uvs, 2 ) );
}
}
var alphamap_fragment = "#ifdef USE_ALPHAMAP\n\tdiffuseColor.a *= texture2D( alphaMap, vUv ).g;\n#endif";
var alphamap_pars_fragment = "#ifdef USE_ALPHAMAP\n\tuniform sampler2D alphaMap;\n#endif";
var alphatest_fragment = "#ifdef ALPHATEST\n\tif ( diffuseColor.a < ALPHATEST ) discard;\n#endif";
var aomap_fragment = "#ifdef USE_AOMAP\n\tfloat ambientOcclusion = ( texture2D( aoMap, vUv2 ).r - 1.0 ) * aoMapIntensity + 1.0;\n\treflectedLight.indirectDiffuse *= ambientOcclusion;\n\t#if defined( USE_ENVMAP ) && defined( STANDARD )\n\t\tfloat dotNV = saturate( dot( geometry.normal, geometry.viewDir ) );\n\t\treflectedLight.indirectSpecular *= computeSpecularOcclusion( dotNV, ambientOcclusion, material.specularRoughness );\n\t#endif\n#endif";
var aomap_pars_fragment = "#ifdef USE_AOMAP\n\tuniform sampler2D aoMap;\n\tuniform float aoMapIntensity;\n#endif";
var begin_vertex = "vec3 transformed = vec3( position );";
var beginnormal_vertex = "vec3 objectNormal = vec3( normal );\n#ifdef USE_TANGENT\n\tvec3 objectTangent = vec3( tangent.xyz );\n#endif";
var bsdfs = "vec2 integrateSpecularBRDF( const in float dotNV, const in float roughness ) {\n\tconst vec4 c0 = vec4( - 1, - 0.0275, - 0.572, 0.022 );\n\tconst vec4 c1 = vec4( 1, 0.0425, 1.04, - 0.04 );\n\tvec4 r = roughness * c0 + c1;\n\tfloat a004 = min( r.x * r.x, exp2( - 9.28 * dotNV ) ) * r.x + r.y;\n\treturn vec2( -1.04, 1.04 ) * a004 + r.zw;\n}\nfloat punctualLightIntensityToIrradianceFactor( const in float lightDistance, const in float cutoffDistance, const in float decayExponent ) {\n#if defined ( PHYSICALLY_CORRECT_LIGHTS )\n\tfloat distanceFalloff = 1.0 / max( pow( lightDistance, decayExponent ), 0.01 );\n\tif( cutoffDistance > 0.0 ) {\n\t\tdistanceFalloff *= pow2( saturate( 1.0 - pow4( lightDistance / cutoffDistance ) ) );\n\t}\n\treturn distanceFalloff;\n#else\n\tif( cutoffDistance > 0.0 && decayExponent > 0.0 ) {\n\t\treturn pow( saturate( -lightDistance / cutoffDistance + 1.0 ), decayExponent );\n\t}\n\treturn 1.0;\n#endif\n}\nvec3 BRDF_Diffuse_Lambert( const in vec3 diffuseColor ) {\n\treturn RECIPROCAL_PI * diffuseColor;\n}\nvec3 F_Schlick( const in vec3 specularColor, const in float dotLH ) {\n\tfloat fresnel = exp2( ( -5.55473 * dotLH - 6.98316 ) * dotLH );\n\treturn ( 1.0 - specularColor ) * fresnel + specularColor;\n}\nvec3 F_Schlick_RoughnessDependent( const in vec3 F0, const in float dotNV, const in float roughness ) {\n\tfloat fresnel = exp2( ( -5.55473 * dotNV - 6.98316 ) * dotNV );\n\tvec3 Fr = max( vec3( 1.0 - roughness ), F0 ) - F0;\n\treturn Fr * fresnel + F0;\n}\nfloat G_GGX_Smith( const in float alpha, const in float dotNL, const in float dotNV ) {\n\tfloat a2 = pow2( alpha );\n\tfloat gl = dotNL + sqrt( a2 + ( 1.0 - a2 ) * pow2( dotNL ) );\n\tfloat gv = dotNV + sqrt( a2 + ( 1.0 - a2 ) * pow2( dotNV ) );\n\treturn 1.0 / ( gl * gv );\n}\nfloat G_GGX_SmithCorrelated( const in float alpha, const in float dotNL, const in float dotNV ) {\n\tfloat a2 = pow2( alpha );\n\tfloat gv = dotNL * sqrt( a2 + ( 1.0 - a2 ) * pow2( dotNV ) );\n\tfloat gl = dotNV * sqrt( a2 + ( 1.0 - a2 ) * pow2( dotNL ) );\n\treturn 0.5 / max( gv + gl, EPSILON );\n}\nfloat D_GGX( const in float alpha, const in float dotNH ) {\n\tfloat a2 = pow2( alpha );\n\tfloat denom = pow2( dotNH ) * ( a2 - 1.0 ) + 1.0;\n\treturn RECIPROCAL_PI * a2 / pow2( denom );\n}\nvec3 BRDF_Specular_GGX( const in IncidentLight incidentLight, const in vec3 viewDir, const in vec3 normal, const in vec3 specularColor, const in float roughness ) {\n\tfloat alpha = pow2( roughness );\n\tvec3 halfDir = normalize( incidentLight.direction + viewDir );\n\tfloat dotNL = saturate( dot( normal, incidentLight.direction ) );\n\tfloat dotNV = saturate( dot( normal, viewDir ) );\n\tfloat dotNH = saturate( dot( normal, halfDir ) );\n\tfloat dotLH = saturate( dot( incidentLight.direction, halfDir ) );\n\tvec3 F = F_Schlick( specularColor, dotLH );\n\tfloat G = G_GGX_SmithCorrelated( alpha, dotNL, dotNV );\n\tfloat D = D_GGX( alpha, dotNH );\n\treturn F * ( G * D );\n}\nvec2 LTC_Uv( const in vec3 N, const in vec3 V, const in float roughness ) {\n\tconst float LUT_SIZE = 64.0;\n\tconst float LUT_SCALE = ( LUT_SIZE - 1.0 ) / LUT_SIZE;\n\tconst float LUT_BIAS = 0.5 / LUT_SIZE;\n\tfloat dotNV = saturate( dot( N, V ) );\n\tvec2 uv = vec2( roughness, sqrt( 1.0 - dotNV ) );\n\tuv = uv * LUT_SCALE + LUT_BIAS;\n\treturn uv;\n}\nfloat LTC_ClippedSphereFormFactor( const in vec3 f ) {\n\tfloat l = length( f );\n\treturn max( ( l * l + f.z ) / ( l + 1.0 ), 0.0 );\n}\nvec3 LTC_EdgeVectorFormFactor( const in vec3 v1, const in vec3 v2 ) {\n\tfloat x = dot( v1, v2 );\n\tfloat y = abs( x );\n\tfloat a = 0.8543985 + ( 0.4965155 + 0.0145206 * y ) * y;\n\tfloat b = 3.4175940 + ( 4.1616724 + y ) * y;\n\tfloat v = a / b;\n\tfloat theta_sintheta = ( x > 0.0 ) ? v : 0.5 * inversesqrt( max( 1.0 - x * x, 1e-7 ) ) - v;\n\treturn cross( v1, v2 ) * theta_sintheta;\n}\nvec3 LTC_Evaluate( const in vec3 N, const in vec3 V, const in vec3 P, const in mat3 mInv, const in vec3 rectCoords[ 4 ] ) {\n\tvec3 v1 = rectCoords[ 1 ] - rectCoords[ 0 ];\n\tvec3 v2 = rectCoords[ 3 ] - rectCoords[ 0 ];\n\tvec3 lightNormal = cross( v1, v2 );\n\tif( dot( lightNormal, P - rectCoords[ 0 ] ) < 0.0 ) return vec3( 0.0 );\n\tvec3 T1, T2;\n\tT1 = normalize( V - N * dot( V, N ) );\n\tT2 = - cross( N, T1 );\n\tmat3 mat = mInv * transposeMat3( mat3( T1, T2, N ) );\n\tvec3 coords[ 4 ];\n\tcoords[ 0 ] = mat * ( rectCoords[ 0 ] - P );\n\tcoords[ 1 ] = mat * ( rectCoords[ 1 ] - P );\n\tcoords[ 2 ] = mat * ( rectCoords[ 2 ] - P );\n\tcoords[ 3 ] = mat * ( rectCoords[ 3 ] - P );\n\tcoords[ 0 ] = normalize( coords[ 0 ] );\n\tcoords[ 1 ] = normalize( coords[ 1 ] );\n\tcoords[ 2 ] = normalize( coords[ 2 ] );\n\tcoords[ 3 ] = normalize( coords[ 3 ] );\n\tvec3 vectorFormFactor = vec3( 0.0 );\n\tvectorFormFactor += LTC_EdgeVectorFormFactor( coords[ 0 ], coords[ 1 ] );\n\tvectorFormFactor += LTC_EdgeVectorFormFactor( coords[ 1 ], coords[ 2 ] );\n\tvectorFormFactor += LTC_EdgeVectorFormFactor( coords[ 2 ], coords[ 3 ] );\n\tvectorFormFactor += LTC_EdgeVectorFormFactor( coords[ 3 ], coords[ 0 ] );\n\tfloat result = LTC_ClippedSphereFormFactor( vectorFormFactor );\n\treturn vec3( result );\n}\nvec3 BRDF_Specular_GGX_Environment( const in vec3 viewDir, const in vec3 normal, const in vec3 specularColor, const in float roughness ) {\n\tfloat dotNV = saturate( dot( normal, viewDir ) );\n\tvec2 brdf = integrateSpecularBRDF( dotNV, roughness );\n\treturn specularColor * brdf.x + brdf.y;\n}\nvoid BRDF_Specular_Multiscattering_Environment( const in GeometricContext geometry, const in vec3 specularColor, const in float roughness, inout vec3 singleScatter, inout vec3 multiScatter ) {\n\tfloat dotNV = saturate( dot( geometry.normal, geometry.viewDir ) );\n\tvec3 F = F_Schlick_RoughnessDependent( specularColor, dotNV, roughness );\n\tvec2 brdf = integrateSpecularBRDF( dotNV, roughness );\n\tvec3 FssEss = F * brdf.x + brdf.y;\n\tfloat Ess = brdf.x + brdf.y;\n\tfloat Ems = 1.0 - Ess;\n\tvec3 Favg = specularColor + ( 1.0 - specularColor ) * 0.047619;\tvec3 Fms = FssEss * Favg / ( 1.0 - Ems * Favg );\n\tsingleScatter += FssEss;\n\tmultiScatter += Fms * Ems;\n}\nfloat G_BlinnPhong_Implicit( ) {\n\treturn 0.25;\n}\nfloat D_BlinnPhong( const in float shininess, const in float dotNH ) {\n\treturn RECIPROCAL_PI * ( shininess * 0.5 + 1.0 ) * pow( dotNH, shininess );\n}\nvec3 BRDF_Specular_BlinnPhong( const in IncidentLight incidentLight, const in GeometricContext geometry, const in vec3 specularColor, const in float shininess ) {\n\tvec3 halfDir = normalize( incidentLight.direction + geometry.viewDir );\n\tfloat dotNH = saturate( dot( geometry.normal, halfDir ) );\n\tfloat dotLH = saturate( dot( incidentLight.direction, halfDir ) );\n\tvec3 F = F_Schlick( specularColor, dotLH );\n\tfloat G = G_BlinnPhong_Implicit( );\n\tfloat D = D_BlinnPhong( shininess, dotNH );\n\treturn F * ( G * D );\n}\nfloat GGXRoughnessToBlinnExponent( const in float ggxRoughness ) {\n\treturn ( 2.0 / pow2( ggxRoughness + 0.0001 ) - 2.0 );\n}\nfloat BlinnExponentToGGXRoughness( const in float blinnExponent ) {\n\treturn sqrt( 2.0 / ( blinnExponent + 2.0 ) );\n}\n#if defined( USE_SHEEN )\nfloat D_Charlie(float roughness, float NoH) {\n\tfloat invAlpha = 1.0 / roughness;\n\tfloat cos2h = NoH * NoH;\n\tfloat sin2h = max(1.0 - cos2h, 0.0078125);\treturn (2.0 + invAlpha) * pow(sin2h, invAlpha * 0.5) / (2.0 * PI);\n}\nfloat V_Neubelt(float NoV, float NoL) {\n\treturn saturate(1.0 / (4.0 * (NoL + NoV - NoL * NoV)));\n}\nvec3 BRDF_Specular_Sheen( const in float roughness, const in vec3 L, const in GeometricContext geometry, vec3 specularColor ) {\n\tvec3 N = geometry.normal;\n\tvec3 V = geometry.viewDir;\n\tvec3 H = normalize( V + L );\n\tfloat dotNH = saturate( dot( N, H ) );\n\treturn specularColor * D_Charlie( roughness, dotNH ) * V_Neubelt( dot(N, V), dot(N, L) );\n}\n#endif";
var bumpmap_pars_fragment = "#ifdef USE_BUMPMAP\n\tuniform sampler2D bumpMap;\n\tuniform float bumpScale;\n\tvec2 dHdxy_fwd() {\n\t\tvec2 dSTdx = dFdx( vUv );\n\t\tvec2 dSTdy = dFdy( vUv );\n\t\tfloat Hll = bumpScale * texture2D( bumpMap, vUv ).x;\n\t\tfloat dBx = bumpScale * texture2D( bumpMap, vUv + dSTdx ).x - Hll;\n\t\tfloat dBy = bumpScale * texture2D( bumpMap, vUv + dSTdy ).x - Hll;\n\t\treturn vec2( dBx, dBy );\n\t}\n\tvec3 perturbNormalArb( vec3 surf_pos, vec3 surf_norm, vec2 dHdxy, float faceDirection ) {\n\t\tvec3 vSigmaX = vec3( dFdx( surf_pos.x ), dFdx( surf_pos.y ), dFdx( surf_pos.z ) );\n\t\tvec3 vSigmaY = vec3( dFdy( surf_pos.x ), dFdy( surf_pos.y ), dFdy( surf_pos.z ) );\n\t\tvec3 vN = surf_norm;\n\t\tvec3 R1 = cross( vSigmaY, vN );\n\t\tvec3 R2 = cross( vN, vSigmaX );\n\t\tfloat fDet = dot( vSigmaX, R1 ) * faceDirection;\n\t\tvec3 vGrad = sign( fDet ) * ( dHdxy.x * R1 + dHdxy.y * R2 );\n\t\treturn normalize( abs( fDet ) * surf_norm - vGrad );\n\t}\n#endif";
var clipping_planes_fragment = "#if NUM_CLIPPING_PLANES > 0\n\tvec4 plane;\n\t#pragma unroll_loop_start\n\tfor ( int i = 0; i < UNION_CLIPPING_PLANES; i ++ ) {\n\t\tplane = clippingPlanes[ i ];\n\t\tif ( dot( vClipPosition, plane.xyz ) > plane.w ) discard;\n\t}\n\t#pragma unroll_loop_end\n\t#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n\t\tbool clipped = true;\n\t\t#pragma unroll_loop_start\n\t\tfor ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n\t\t\tplane = clippingPlanes[ i ];\n\t\t\tclipped = ( dot( vClipPosition, plane.xyz ) > plane.w ) && clipped;\n\t\t}\n\t\t#pragma unroll_loop_end\n\t\tif ( clipped ) discard;\n\t#endif\n#endif";
var clipping_planes_pars_fragment = "#if NUM_CLIPPING_PLANES > 0\n\tvarying vec3 vClipPosition;\n\tuniform vec4 clippingPlanes[ NUM_CLIPPING_PLANES ];\n#endif";
var clipping_planes_pars_vertex = "#if NUM_CLIPPING_PLANES > 0\n\tvarying vec3 vClipPosition;\n#endif";
var clipping_planes_vertex = "#if NUM_CLIPPING_PLANES > 0\n\tvClipPosition = - mvPosition.xyz;\n#endif";
var color_fragment = "#if defined( USE_COLOR_ALPHA )\n\tdiffuseColor *= vColor;\n#elif defined( USE_COLOR )\n\tdiffuseColor.rgb *= vColor;\n#endif";
var color_pars_fragment = "#if defined( USE_COLOR_ALPHA )\n\tvarying vec4 vColor;\n#elif defined( USE_COLOR )\n\tvarying vec3 vColor;\n#endif";
var color_pars_vertex = "#if defined( USE_COLOR_ALPHA )\n\tvarying vec4 vColor;\n#elif defined( USE_COLOR ) || defined( USE_INSTANCING_COLOR )\n\tvarying vec3 vColor;\n#endif";
var color_vertex = "#if defined( USE_COLOR_ALPHA )\n\tvColor = vec4( 1.0 );\n#elif defined( USE_COLOR ) || defined( USE_INSTANCING_COLOR )\n\tvColor = vec3( 1.0 );\n#endif\n#ifdef USE_COLOR\n\tvColor *= color;\n#endif\n#ifdef USE_INSTANCING_COLOR\n\tvColor.xyz *= instanceColor.xyz;\n#endif";
var common = "#define PI 3.141592653589793\n#define PI2 6.283185307179586\n#define PI_HALF 1.5707963267948966\n#define RECIPROCAL_PI 0.3183098861837907\n#define RECIPROCAL_PI2 0.15915494309189535\n#define EPSILON 1e-6\n#ifndef saturate\n#define saturate(a) clamp( a, 0.0, 1.0 )\n#endif\n#define whiteComplement(a) ( 1.0 - saturate( a ) )\nfloat pow2( const in float x ) { return x*x; }\nfloat pow3( const in float x ) { return x*x*x; }\nfloat pow4( const in float x ) { float x2 = x*x; return x2*x2; }\nfloat average( const in vec3 color ) { return dot( color, vec3( 0.3333 ) ); }\nhighp float rand( const in vec2 uv ) {\n\tconst highp float a = 12.9898, b = 78.233, c = 43758.5453;\n\thighp float dt = dot( uv.xy, vec2( a,b ) ), sn = mod( dt, PI );\n\treturn fract(sin(sn) * c);\n}\n#ifdef HIGH_PRECISION\n\tfloat precisionSafeLength( vec3 v ) { return length( v ); }\n#else\n\tfloat max3( vec3 v ) { return max( max( v.x, v.y ), v.z ); }\n\tfloat precisionSafeLength( vec3 v ) {\n\t\tfloat maxComponent = max3( abs( v ) );\n\t\treturn length( v / maxComponent ) * maxComponent;\n\t}\n#endif\nstruct IncidentLight {\n\tvec3 color;\n\tvec3 direction;\n\tbool visible;\n};\nstruct ReflectedLight {\n\tvec3 directDiffuse;\n\tvec3 directSpecular;\n\tvec3 indirectDiffuse;\n\tvec3 indirectSpecular;\n};\nstruct GeometricContext {\n\tvec3 position;\n\tvec3 normal;\n\tvec3 viewDir;\n#ifdef CLEARCOAT\n\tvec3 clearcoatNormal;\n#endif\n};\nvec3 transformDirection( in vec3 dir, in mat4 matrix ) {\n\treturn normalize( ( matrix * vec4( dir, 0.0 ) ).xyz );\n}\nvec3 inverseTransformDirection( in vec3 dir, in mat4 matrix ) {\n\treturn normalize( ( vec4( dir, 0.0 ) * matrix ).xyz );\n}\nvec3 projectOnPlane(in vec3 point, in vec3 pointOnPlane, in vec3 planeNormal ) {\n\tfloat distance = dot( planeNormal, point - pointOnPlane );\n\treturn - distance * planeNormal + point;\n}\nfloat sideOfPlane( in vec3 point, in vec3 pointOnPlane, in vec3 planeNormal ) {\n\treturn sign( dot( point - pointOnPlane, planeNormal ) );\n}\nvec3 linePlaneIntersect( in vec3 pointOnLine, in vec3 lineDirection, in vec3 pointOnPlane, in vec3 planeNormal ) {\n\treturn lineDirection * ( dot( planeNormal, pointOnPlane - pointOnLine ) / dot( planeNormal, lineDirection ) ) + pointOnLine;\n}\nmat3 transposeMat3( const in mat3 m ) {\n\tmat3 tmp;\n\ttmp[ 0 ] = vec3( m[ 0 ].x, m[ 1 ].x, m[ 2 ].x );\n\ttmp[ 1 ] = vec3( m[ 0 ].y, m[ 1 ].y, m[ 2 ].y );\n\ttmp[ 2 ] = vec3( m[ 0 ].z, m[ 1 ].z, m[ 2 ].z );\n\treturn tmp;\n}\nfloat linearToRelativeLuminance( const in vec3 color ) {\n\tvec3 weights = vec3( 0.2126, 0.7152, 0.0722 );\n\treturn dot( weights, color.rgb );\n}\nbool isPerspectiveMatrix( mat4 m ) {\n\treturn m[ 2 ][ 3 ] == - 1.0;\n}\nvec2 equirectUv( in vec3 dir ) {\n\tfloat u = atan( dir.z, dir.x ) * RECIPROCAL_PI2 + 0.5;\n\tfloat v = asin( clamp( dir.y, - 1.0, 1.0 ) ) * RECIPROCAL_PI + 0.5;\n\treturn vec2( u, v );\n}";
var cube_uv_reflection_fragment = "#ifdef ENVMAP_TYPE_CUBE_UV\n\t#define cubeUV_maxMipLevel 8.0\n\t#define cubeUV_minMipLevel 4.0\n\t#define cubeUV_maxTileSize 256.0\n\t#define cubeUV_minTileSize 16.0\n\tfloat getFace( vec3 direction ) {\n\t\tvec3 absDirection = abs( direction );\n\t\tfloat face = - 1.0;\n\t\tif ( absDirection.x > absDirection.z ) {\n\t\t\tif ( absDirection.x > absDirection.y )\n\t\t\t\tface = direction.x > 0.0 ? 0.0 : 3.0;\n\t\t\telse\n\t\t\t\tface = direction.y > 0.0 ? 1.0 : 4.0;\n\t\t} else {\n\t\t\tif ( absDirection.z > absDirection.y )\n\t\t\t\tface = direction.z > 0.0 ? 2.0 : 5.0;\n\t\t\telse\n\t\t\t\tface = direction.y > 0.0 ? 1.0 : 4.0;\n\t\t}\n\t\treturn face;\n\t}\n\tvec2 getUV( vec3 direction, float face ) {\n\t\tvec2 uv;\n\t\tif ( face == 0.0 ) {\n\t\t\tuv = vec2( direction.z, direction.y ) / abs( direction.x );\n\t\t} else if ( face == 1.0 ) {\n\t\t\tuv = vec2( - direction.x, - direction.z ) / abs( direction.y );\n\t\t} else if ( face == 2.0 ) {\n\t\t\tuv = vec2( - direction.x, direction.y ) / abs( direction.z );\n\t\t} else if ( face == 3.0 ) {\n\t\t\tuv = vec2( - direction.z, direction.y ) / abs( direction.x );\n\t\t} else if ( face == 4.0 ) {\n\t\t\tuv = vec2( - direction.x, direction.z ) / abs( direction.y );\n\t\t} else {\n\t\t\tuv = vec2( direction.x, direction.y ) / abs( direction.z );\n\t\t}\n\t\treturn 0.5 * ( uv + 1.0 );\n\t}\n\tvec3 bilinearCubeUV( sampler2D envMap, vec3 direction, float mipInt ) {\n\t\tfloat face = getFace( direction );\n\t\tfloat filterInt = max( cubeUV_minMipLevel - mipInt, 0.0 );\n\t\tmipInt = max( mipInt, cubeUV_minMipLevel );\n\t\tfloat faceSize = exp2( mipInt );\n\t\tfloat texelSize = 1.0 / ( 3.0 * cubeUV_maxTileSize );\n\t\tvec2 uv = getUV( direction, face ) * ( faceSize - 1.0 );\n\t\tvec2 f = fract( uv );\n\t\tuv += 0.5 - f;\n\t\tif ( face > 2.0 ) {\n\t\t\tuv.y += faceSize;\n\t\t\tface -= 3.0;\n\t\t}\n\t\tuv.x += face * faceSize;\n\t\tif ( mipInt < cubeUV_maxMipLevel ) {\n\t\t\tuv.y += 2.0 * cubeUV_maxTileSize;\n\t\t}\n\t\tuv.y += filterInt * 2.0 * cubeUV_minTileSize;\n\t\tuv.x += 3.0 * max( 0.0, cubeUV_maxTileSize - 2.0 * faceSize );\n\t\tuv *= texelSize;\n\t\tvec3 tl = envMapTexelToLinear( texture2D( envMap, uv ) ).rgb;\n\t\tuv.x += texelSize;\n\t\tvec3 tr = envMapTexelToLinear( texture2D( envMap, uv ) ).rgb;\n\t\tuv.y += texelSize;\n\t\tvec3 br = envMapTexelToLinear( texture2D( envMap, uv ) ).rgb;\n\t\tuv.x -= texelSize;\n\t\tvec3 bl = envMapTexelToLinear( texture2D( envMap, uv ) ).rgb;\n\t\tvec3 tm = mix( tl, tr, f.x );\n\t\tvec3 bm = mix( bl, br, f.x );\n\t\treturn mix( tm, bm, f.y );\n\t}\n\t#define r0 1.0\n\t#define v0 0.339\n\t#define m0 - 2.0\n\t#define r1 0.8\n\t#define v1 0.276\n\t#define m1 - 1.0\n\t#define r4 0.4\n\t#define v4 0.046\n\t#define m4 2.0\n\t#define r5 0.305\n\t#define v5 0.016\n\t#define m5 3.0\n\t#define r6 0.21\n\t#define v6 0.0038\n\t#define m6 4.0\n\tfloat roughnessToMip( float roughness ) {\n\t\tfloat mip = 0.0;\n\t\tif ( roughness >= r1 ) {\n\t\t\tmip = ( r0 - roughness ) * ( m1 - m0 ) / ( r0 - r1 ) + m0;\n\t\t} else if ( roughness >= r4 ) {\n\t\t\tmip = ( r1 - roughness ) * ( m4 - m1 ) / ( r1 - r4 ) + m1;\n\t\t} else if ( roughness >= r5 ) {\n\t\t\tmip = ( r4 - roughness ) * ( m5 - m4 ) / ( r4 - r5 ) + m4;\n\t\t} else if ( roughness >= r6 ) {\n\t\t\tmip = ( r5 - roughness ) * ( m6 - m5 ) / ( r5 - r6 ) + m5;\n\t\t} else {\n\t\t\tmip = - 2.0 * log2( 1.16 * roughness );\t\t}\n\t\treturn mip;\n\t}\n\tvec4 textureCubeUV( sampler2D envMap, vec3 sampleDir, float roughness ) {\n\t\tfloat mip = clamp( roughnessToMip( roughness ), m0, cubeUV_maxMipLevel );\n\t\tfloat mipF = fract( mip );\n\t\tfloat mipInt = floor( mip );\n\t\tvec3 color0 = bilinearCubeUV( envMap, sampleDir, mipInt );\n\t\tif ( mipF == 0.0 ) {\n\t\t\treturn vec4( color0, 1.0 );\n\t\t} else {\n\t\t\tvec3 color1 = bilinearCubeUV( envMap, sampleDir, mipInt + 1.0 );\n\t\t\treturn vec4( mix( color0, color1, mipF ), 1.0 );\n\t\t}\n\t}\n#endif";
var defaultnormal_vertex = "vec3 transformedNormal = objectNormal;\n#ifdef USE_INSTANCING\n\tmat3 m = mat3( instanceMatrix );\n\ttransformedNormal /= vec3( dot( m[ 0 ], m[ 0 ] ), dot( m[ 1 ], m[ 1 ] ), dot( m[ 2 ], m[ 2 ] ) );\n\ttransformedNormal = m * transformedNormal;\n#endif\ntransformedNormal = normalMatrix * transformedNormal;\n#ifdef FLIP_SIDED\n\ttransformedNormal = - transformedNormal;\n#endif\n#ifdef USE_TANGENT\n\tvec3 transformedTangent = ( modelViewMatrix * vec4( objectTangent, 0.0 ) ).xyz;\n\t#ifdef FLIP_SIDED\n\t\ttransformedTangent = - transformedTangent;\n\t#endif\n#endif";
var displacementmap_pars_vertex = "#ifdef USE_DISPLACEMENTMAP\n\tuniform sampler2D displacementMap;\n\tuniform float displacementScale;\n\tuniform float displacementBias;\n#endif";
var displacementmap_vertex = "#ifdef USE_DISPLACEMENTMAP\n\ttransformed += normalize( objectNormal ) * ( texture2D( displacementMap, vUv ).x * displacementScale + displacementBias );\n#endif";
var emissivemap_fragment = "#ifdef USE_EMISSIVEMAP\n\tvec4 emissiveColor = texture2D( emissiveMap, vUv );\n\temissiveColor.rgb = emissiveMapTexelToLinear( emissiveColor ).rgb;\n\ttotalEmissiveRadiance *= emissiveColor.rgb;\n#endif";
var emissivemap_pars_fragment = "#ifdef USE_EMISSIVEMAP\n\tuniform sampler2D emissiveMap;\n#endif";
var encodings_fragment = "gl_FragColor = linearToOutputTexel( gl_FragColor );";
var encodings_pars_fragment = "\nvec4 LinearToLinear( in vec4 value ) {\n\treturn value;\n}\nvec4 GammaToLinear( in vec4 value, in float gammaFactor ) {\n\treturn vec4( pow( value.rgb, vec3( gammaFactor ) ), value.a );\n}\nvec4 LinearToGamma( in vec4 value, in float gammaFactor ) {\n\treturn vec4( pow( value.rgb, vec3( 1.0 / gammaFactor ) ), value.a );\n}\nvec4 sRGBToLinear( in vec4 value ) {\n\treturn vec4( mix( pow( value.rgb * 0.9478672986 + vec3( 0.0521327014 ), vec3( 2.4 ) ), value.rgb * 0.0773993808, vec3( lessThanEqual( value.rgb, vec3( 0.04045 ) ) ) ), value.a );\n}\nvec4 LinearTosRGB( in vec4 value ) {\n\treturn vec4( mix( pow( value.rgb, vec3( 0.41666 ) ) * 1.055 - vec3( 0.055 ), value.rgb * 12.92, vec3( lessThanEqual( value.rgb, vec3( 0.0031308 ) ) ) ), value.a );\n}\nvec4 RGBEToLinear( in vec4 value ) {\n\treturn vec4( value.rgb * exp2( value.a * 255.0 - 128.0 ), 1.0 );\n}\nvec4 LinearToRGBE( in vec4 value ) {\n\tfloat maxComponent = max( max( value.r, value.g ), value.b );\n\tfloat fExp = clamp( ceil( log2( maxComponent ) ), -128.0, 127.0 );\n\treturn vec4( value.rgb / exp2( fExp ), ( fExp + 128.0 ) / 255.0 );\n}\nvec4 RGBMToLinear( in vec4 value, in float maxRange ) {\n\treturn vec4( value.rgb * value.a * maxRange, 1.0 );\n}\nvec4 LinearToRGBM( in vec4 value, in float maxRange ) {\n\tfloat maxRGB = max( value.r, max( value.g, value.b ) );\n\tfloat M = clamp( maxRGB / maxRange, 0.0, 1.0 );\n\tM = ceil( M * 255.0 ) / 255.0;\n\treturn vec4( value.rgb / ( M * maxRange ), M );\n}\nvec4 RGBDToLinear( in vec4 value, in float maxRange ) {\n\treturn vec4( value.rgb * ( ( maxRange / 255.0 ) / value.a ), 1.0 );\n}\nvec4 LinearToRGBD( in vec4 value, in float maxRange ) {\n\tfloat maxRGB = max( value.r, max( value.g, value.b ) );\n\tfloat D = max( maxRange / maxRGB, 1.0 );\n\tD = clamp( floor( D ) / 255.0, 0.0, 1.0 );\n\treturn vec4( value.rgb * ( D * ( 255.0 / maxRange ) ), D );\n}\nconst mat3 cLogLuvM = mat3( 0.2209, 0.3390, 0.4184, 0.1138, 0.6780, 0.7319, 0.0102, 0.1130, 0.2969 );\nvec4 LinearToLogLuv( in vec4 value ) {\n\tvec3 Xp_Y_XYZp = cLogLuvM * value.rgb;\n\tXp_Y_XYZp = max( Xp_Y_XYZp, vec3( 1e-6, 1e-6, 1e-6 ) );\n\tvec4 vResult;\n\tvResult.xy = Xp_Y_XYZp.xy / Xp_Y_XYZp.z;\n\tfloat Le = 2.0 * log2(Xp_Y_XYZp.y) + 127.0;\n\tvResult.w = fract( Le );\n\tvResult.z = ( Le - ( floor( vResult.w * 255.0 ) ) / 255.0 ) / 255.0;\n\treturn vResult;\n}\nconst mat3 cLogLuvInverseM = mat3( 6.0014, -2.7008, -1.7996, -1.3320, 3.1029, -5.7721, 0.3008, -1.0882, 5.6268 );\nvec4 LogLuvToLinear( in vec4 value ) {\n\tfloat Le = value.z * 255.0 + value.w;\n\tvec3 Xp_Y_XYZp;\n\tXp_Y_XYZp.y = exp2( ( Le - 127.0 ) / 2.0 );\n\tXp_Y_XYZp.z = Xp_Y_XYZp.y / value.y;\n\tXp_Y_XYZp.x = value.x * Xp_Y_XYZp.z;\n\tvec3 vRGB = cLogLuvInverseM * Xp_Y_XYZp.rgb;\n\treturn vec4( max( vRGB, 0.0 ), 1.0 );\n}";
var envmap_fragment = "#ifdef USE_ENVMAP\n\t#ifdef ENV_WORLDPOS\n\t\tvec3 cameraToFrag;\n\t\tif ( isOrthographic ) {\n\t\t\tcameraToFrag = normalize( vec3( - viewMatrix[ 0 ][ 2 ], - viewMatrix[ 1 ][ 2 ], - viewMatrix[ 2 ][ 2 ] ) );\n\t\t} else {\n\t\t\tcameraToFrag = normalize( vWorldPosition - cameraPosition );\n\t\t}\n\t\tvec3 worldNormal = inverseTransformDirection( normal, viewMatrix );\n\t\t#ifdef ENVMAP_MODE_REFLECTION\n\t\t\tvec3 reflectVec = reflect( cameraToFrag, worldNormal );\n\t\t#else\n\t\t\tvec3 reflectVec = refract( cameraToFrag, worldNormal, refractionRatio );\n\t\t#endif\n\t#else\n\t\tvec3 reflectVec = vReflect;\n\t#endif\n\t#ifdef ENVMAP_TYPE_CUBE\n\t\tvec4 envColor = textureCube( envMap, vec3( flipEnvMap * reflectVec.x, reflectVec.yz ) );\n\t#elif defined( ENVMAP_TYPE_CUBE_UV )\n\t\tvec4 envColor = textureCubeUV( envMap, reflectVec, 0.0 );\n\t#else\n\t\tvec4 envColor = vec4( 0.0 );\n\t#endif\n\t#ifndef ENVMAP_TYPE_CUBE_UV\n\t\tenvColor = envMapTexelToLinear( envColor );\n\t#endif\n\t#ifdef ENVMAP_BLENDING_MULTIPLY\n\t\toutgoingLight = mix( outgoingLight, outgoingLight * envColor.xyz, specularStrength * reflectivity );\n\t#elif defined( ENVMAP_BLENDING_MIX )\n\t\toutgoingLight = mix( outgoingLight, envColor.xyz, specularStrength * reflectivity );\n\t#elif defined( ENVMAP_BLENDING_ADD )\n\t\toutgoingLight += envColor.xyz * specularStrength * reflectivity;\n\t#endif\n#endif";
var envmap_common_pars_fragment = "#ifdef USE_ENVMAP\n\tuniform float envMapIntensity;\n\tuniform float flipEnvMap;\n\tuniform int maxMipLevel;\n\t#ifdef ENVMAP_TYPE_CUBE\n\t\tuniform samplerCube envMap;\n\t#else\n\t\tuniform sampler2D envMap;\n\t#endif\n\t\n#endif";
var envmap_pars_fragment = "#ifdef USE_ENVMAP\n\tuniform float reflectivity;\n\t#if defined( USE_BUMPMAP ) || defined( USE_NORMALMAP ) || defined( PHONG )\n\t\t#define ENV_WORLDPOS\n\t#endif\n\t#ifdef ENV_WORLDPOS\n\t\tvarying vec3 vWorldPosition;\n\t\tuniform float refractionRatio;\n\t#else\n\t\tvarying vec3 vReflect;\n\t#endif\n#endif";
var envmap_pars_vertex = "#ifdef USE_ENVMAP\n\t#if defined( USE_BUMPMAP ) || defined( USE_NORMALMAP ) ||defined( PHONG )\n\t\t#define ENV_WORLDPOS\n\t#endif\n\t#ifdef ENV_WORLDPOS\n\t\t\n\t\tvarying vec3 vWorldPosition;\n\t#else\n\t\tvarying vec3 vReflect;\n\t\tuniform float refractionRatio;\n\t#endif\n#endif";
var envmap_vertex = "#ifdef USE_ENVMAP\n\t#ifdef ENV_WORLDPOS\n\t\tvWorldPosition = worldPosition.xyz;\n\t#else\n\t\tvec3 cameraToVertex;\n\t\tif ( isOrthographic ) {\n\t\t\tcameraToVertex = normalize( vec3( - viewMatrix[ 0 ][ 2 ], - viewMatrix[ 1 ][ 2 ], - viewMatrix[ 2 ][ 2 ] ) );\n\t\t} else {\n\t\t\tcameraToVertex = normalize( worldPosition.xyz - cameraPosition );\n\t\t}\n\t\tvec3 worldNormal = inverseTransformDirection( transformedNormal, viewMatrix );\n\t\t#ifdef ENVMAP_MODE_REFLECTION\n\t\t\tvReflect = reflect( cameraToVertex, worldNormal );\n\t\t#else\n\t\t\tvReflect = refract( cameraToVertex, worldNormal, refractionRatio );\n\t\t#endif\n\t#endif\n#endif";
var fog_vertex = "#ifdef USE_FOG\n\tfogDepth = - mvPosition.z;\n#endif";
var fog_pars_vertex = "#ifdef USE_FOG\n\tvarying float fogDepth;\n#endif";
var fog_fragment = "#ifdef USE_FOG\n\t#ifdef FOG_EXP2\n\t\tfloat fogFactor = 1.0 - exp( - fogDensity * fogDensity * fogDepth * fogDepth );\n\t#else\n\t\tfloat fogFactor = smoothstep( fogNear, fogFar, fogDepth );\n\t#endif\n\tgl_FragColor.rgb = mix( gl_FragColor.rgb, fogColor, fogFactor );\n#endif";
var fog_pars_fragment = "#ifdef USE_FOG\n\tuniform vec3 fogColor;\n\tvarying float fogDepth;\n\t#ifdef FOG_EXP2\n\t\tuniform float fogDensity;\n\t#else\n\t\tuniform float fogNear;\n\t\tuniform float fogFar;\n\t#endif\n#endif";
var gradientmap_pars_fragment = "#ifdef USE_GRADIENTMAP\n\tuniform sampler2D gradientMap;\n#endif\nvec3 getGradientIrradiance( vec3 normal, vec3 lightDirection ) {\n\tfloat dotNL = dot( normal, lightDirection );\n\tvec2 coord = vec2( dotNL * 0.5 + 0.5, 0.0 );\n\t#ifdef USE_GRADIENTMAP\n\t\treturn texture2D( gradientMap, coord ).rgb;\n\t#else\n\t\treturn ( coord.x < 0.7 ) ? vec3( 0.7 ) : vec3( 1.0 );\n\t#endif\n}";
var lightmap_fragment = "#ifdef USE_LIGHTMAP\n\tvec4 lightMapTexel= texture2D( lightMap, vUv2 );\n\treflectedLight.indirectDiffuse += PI * lightMapTexelToLinear( lightMapTexel ).rgb * lightMapIntensity;\n#endif";
var lightmap_pars_fragment = "#ifdef USE_LIGHTMAP\n\tuniform sampler2D lightMap;\n\tuniform float lightMapIntensity;\n#endif";
var lights_lambert_vertex = "vec3 diffuse = vec3( 1.0 );\nGeometricContext geometry;\ngeometry.position = mvPosition.xyz;\ngeometry.normal = normalize( transformedNormal );\ngeometry.viewDir = ( isOrthographic ) ? vec3( 0, 0, 1 ) : normalize( -mvPosition.xyz );\nGeometricContext backGeometry;\nbackGeometry.position = geometry.position;\nbackGeometry.normal = -geometry.normal;\nbackGeometry.viewDir = geometry.viewDir;\nvLightFront = vec3( 0.0 );\nvIndirectFront = vec3( 0.0 );\n#ifdef DOUBLE_SIDED\n\tvLightBack = vec3( 0.0 );\n\tvIndirectBack = vec3( 0.0 );\n#endif\nIncidentLight directLight;\nfloat dotNL;\nvec3 directLightColor_Diffuse;\nvIndirectFront += getAmbientLightIrradiance( ambientLightColor );\nvIndirectFront += getLightProbeIrradiance( lightProbe, geometry );\n#ifdef DOUBLE_SIDED\n\tvIndirectBack += getAmbientLightIrradiance( ambientLightColor );\n\tvIndirectBack += getLightProbeIrradiance( lightProbe, backGeometry );\n#endif\n#if NUM_POINT_LIGHTS > 0\n\t#pragma unroll_loop_start\n\tfor ( int i = 0; i < NUM_POINT_LIGHTS; i ++ ) {\n\t\tgetPointDirectLightIrradiance( pointLights[ i ], geometry, directLight );\n\t\tdotNL = dot( geometry.normal, directLight.direction );\n\t\tdirectLightColor_Diffuse = PI * directLight.color;\n\t\tvLightFront += saturate( dotNL ) * directLightColor_Diffuse;\n\t\t#ifdef DOUBLE_SIDED\n\t\t\tvLightBack += saturate( -dotNL ) * directLightColor_Diffuse;\n\t\t#endif\n\t}\n\t#pragma unroll_loop_end\n#endif\n#if NUM_SPOT_LIGHTS > 0\n\t#pragma unroll_loop_start\n\tfor ( int i = 0; i < NUM_SPOT_LIGHTS; i ++ ) {\n\t\tgetSpotDirectLightIrradiance( spotLights[ i ], geometry, directLight );\n\t\tdotNL = dot( geometry.normal, directLight.direction );\n\t\tdirectLightColor_Diffuse = PI * directLight.color;\n\t\tvLightFront += saturate( dotNL ) * directLightColor_Diffuse;\n\t\t#ifdef DOUBLE_SIDED\n\t\t\tvLightBack += saturate( -dotNL ) * directLightColor_Diffuse;\n\t\t#endif\n\t}\n\t#pragma unroll_loop_end\n#endif\n#if NUM_DIR_LIGHTS > 0\n\t#pragma unroll_loop_start\n\tfor ( int i = 0; i < NUM_DIR_LIGHTS; i ++ ) {\n\t\tgetDirectionalDirectLightIrradiance( directionalLights[ i ], geometry, directLight );\n\t\tdotNL = dot( geometry.normal, directLight.direction );\n\t\tdirectLightColor_Diffuse = PI * directLight.color;\n\t\tvLightFront += saturate( dotNL ) * directLightColor_Diffuse;\n\t\t#ifdef DOUBLE_SIDED\n\t\t\tvLightBack += saturate( -dotNL ) * directLightColor_Diffuse;\n\t\t#endif\n\t}\n\t#pragma unroll_loop_end\n#endif\n#if NUM_HEMI_LIGHTS > 0\n\t#pragma unroll_loop_start\n\tfor ( int i = 0; i < NUM_HEMI_LIGHTS; i ++ ) {\n\t\tvIndirectFront += getHemisphereLightIrradiance( hemisphereLights[ i ], geometry );\n\t\t#ifdef DOUBLE_SIDED\n\t\t\tvIndirectBack += getHemisphereLightIrradiance( hemisphereLights[ i ], backGeometry );\n\t\t#endif\n\t}\n\t#pragma unroll_loop_end\n#endif";
var lights_pars_begin = "uniform bool receiveShadow;\nuniform vec3 ambientLightColor;\nuniform vec3 lightProbe[ 9 ];\nvec3 shGetIrradianceAt( in vec3 normal, in vec3 shCoefficients[ 9 ] ) {\n\tfloat x = normal.x, y = normal.y, z = normal.z;\n\tvec3 result = shCoefficients[ 0 ] * 0.886227;\n\tresult += shCoefficients[ 1 ] * 2.0 * 0.511664 * y;\n\tresult += shCoefficients[ 2 ] * 2.0 * 0.511664 * z;\n\tresult += shCoefficients[ 3 ] * 2.0 * 0.511664 * x;\n\tresult += shCoefficients[ 4 ] * 2.0 * 0.429043 * x * y;\n\tresult += shCoefficients[ 5 ] * 2.0 * 0.429043 * y * z;\n\tresult += shCoefficients[ 6 ] * ( 0.743125 * z * z - 0.247708 );\n\tresult += shCoefficients[ 7 ] * 2.0 * 0.429043 * x * z;\n\tresult += shCoefficients[ 8 ] * 0.429043 * ( x * x - y * y );\n\treturn result;\n}\nvec3 getLightProbeIrradiance( const in vec3 lightProbe[ 9 ], const in GeometricContext geometry ) {\n\tvec3 worldNormal = inverseTransformDirection( geometry.normal, viewMatrix );\n\tvec3 irradiance = shGetIrradianceAt( worldNormal, lightProbe );\n\treturn irradiance;\n}\nvec3 getAmbientLightIrradiance( const in vec3 ambientLightColor ) {\n\tvec3 irradiance = ambientLightColor;\n\t#ifndef PHYSICALLY_CORRECT_LIGHTS\n\t\tirradiance *= PI;\n\t#endif\n\treturn irradiance;\n}\n#if NUM_DIR_LIGHTS > 0\n\tstruct DirectionalLight {\n\t\tvec3 direction;\n\t\tvec3 color;\n\t};\n\tuniform DirectionalLight directionalLights[ NUM_DIR_LIGHTS ];\n\tvoid getDirectionalDirectLightIrradiance( const in DirectionalLight directionalLight, const in GeometricContext geometry, out IncidentLight directLight ) {\n\t\tdirectLight.color = directionalLight.color;\n\t\tdirectLight.direction = directionalLight.direction;\n\t\tdirectLight.visible = true;\n\t}\n#endif\n#if NUM_POINT_LIGHTS > 0\n\tstruct PointLight {\n\t\tvec3 position;\n\t\tvec3 color;\n\t\tfloat distance;\n\t\tfloat decay;\n\t};\n\tuniform PointLight pointLights[ NUM_POINT_LIGHTS ];\n\tvoid getPointDirectLightIrradiance( const in PointLight pointLight, const in GeometricContext geometry, out IncidentLight directLight ) {\n\t\tvec3 lVector = pointLight.position - geometry.position;\n\t\tdirectLight.direction = normalize( lVector );\n\t\tfloat lightDistance = length( lVector );\n\t\tdirectLight.color = pointLight.color;\n\t\tdirectLight.color *= punctualLightIntensityToIrradianceFactor( lightDistance, pointLight.distance, pointLight.decay );\n\t\tdirectLight.visible = ( directLight.color != vec3( 0.0 ) );\n\t}\n#endif\n#if NUM_SPOT_LIGHTS > 0\n\tstruct SpotLight {\n\t\tvec3 position;\n\t\tvec3 direction;\n\t\tvec3 color;\n\t\tfloat distance;\n\t\tfloat decay;\n\t\tfloat coneCos;\n\t\tfloat penumbraCos;\n\t};\n\tuniform SpotLight spotLights[ NUM_SPOT_LIGHTS ];\n\tvoid getSpotDirectLightIrradiance( const in SpotLight spotLight, const in GeometricContext geometry, out IncidentLight directLight ) {\n\t\tvec3 lVector = spotLight.position - geometry.position;\n\t\tdirectLight.direction = normalize( lVector );\n\t\tfloat lightDistance = length( lVector );\n\t\tfloat angleCos = dot( directLight.direction, spotLight.direction );\n\t\tif ( angleCos > spotLight.coneCos ) {\n\t\t\tfloat spotEffect = smoothstep( spotLight.coneCos, spotLight.penumbraCos, angleCos );\n\t\t\tdirectLight.color = spotLight.color;\n\t\t\tdirectLight.color *= spotEffect * punctualLightIntensityToIrradianceFactor( lightDistance, spotLight.distance, spotLight.decay );\n\t\t\tdirectLight.visible = true;\n\t\t} else {\n\t\t\tdirectLight.color = vec3( 0.0 );\n\t\t\tdirectLight.visible = false;\n\t\t}\n\t}\n#endif\n#if NUM_RECT_AREA_LIGHTS > 0\n\tstruct RectAreaLight {\n\t\tvec3 color;\n\t\tvec3 position;\n\t\tvec3 halfWidth;\n\t\tvec3 halfHeight;\n\t};\n\tuniform sampler2D ltc_1;\tuniform sampler2D ltc_2;\n\tuniform RectAreaLight rectAreaLights[ NUM_RECT_AREA_LIGHTS ];\n#endif\n#if NUM_HEMI_LIGHTS > 0\n\tstruct HemisphereLight {\n\t\tvec3 direction;\n\t\tvec3 skyColor;\n\t\tvec3 groundColor;\n\t};\n\tuniform HemisphereLight hemisphereLights[ NUM_HEMI_LIGHTS ];\n\tvec3 getHemisphereLightIrradiance( const in HemisphereLight hemiLight, const in GeometricContext geometry ) {\n\t\tfloat dotNL = dot( geometry.normal, hemiLight.direction );\n\t\tfloat hemiDiffuseWeight = 0.5 * dotNL + 0.5;\n\t\tvec3 irradiance = mix( hemiLight.groundColor, hemiLight.skyColor, hemiDiffuseWeight );\n\t\t#ifndef PHYSICALLY_CORRECT_LIGHTS\n\t\t\tirradiance *= PI;\n\t\t#endif\n\t\treturn irradiance;\n\t}\n#endif";
var envmap_physical_pars_fragment = "#if defined( USE_ENVMAP )\n\t#ifdef ENVMAP_MODE_REFRACTION\n\t\tuniform float refractionRatio;\n\t#endif\n\tvec3 getLightProbeIndirectIrradiance( const in GeometricContext geometry, const in int maxMIPLevel ) {\n\t\tvec3 worldNormal = inverseTransformDirection( geometry.normal, viewMatrix );\n\t\t#ifdef ENVMAP_TYPE_CUBE\n\t\t\tvec3 queryVec = vec3( flipEnvMap * worldNormal.x, worldNormal.yz );\n\t\t\t#ifdef TEXTURE_LOD_EXT\n\t\t\t\tvec4 envMapColor = textureCubeLodEXT( envMap, queryVec, float( maxMIPLevel ) );\n\t\t\t#else\n\t\t\t\tvec4 envMapColor = textureCube( envMap, queryVec, float( maxMIPLevel ) );\n\t\t\t#endif\n\t\t\tenvMapColor.rgb = envMapTexelToLinear( envMapColor ).rgb;\n\t\t#elif defined( ENVMAP_TYPE_CUBE_UV )\n\t\t\tvec4 envMapColor = textureCubeUV( envMap, worldNormal, 1.0 );\n\t\t#else\n\t\t\tvec4 envMapColor = vec4( 0.0 );\n\t\t#endif\n\t\treturn PI * envMapColor.rgb * envMapIntensity;\n\t}\n\tfloat getSpecularMIPLevel( const in float roughness, const in int maxMIPLevel ) {\n\t\tfloat maxMIPLevelScalar = float( maxMIPLevel );\n\t\tfloat sigma = PI * roughness * roughness / ( 1.0 + roughness );\n\t\tfloat desiredMIPLevel = maxMIPLevelScalar + log2( sigma );\n\t\treturn clamp( desiredMIPLevel, 0.0, maxMIPLevelScalar );\n\t}\n\tvec3 getLightProbeIndirectRadiance( const in vec3 viewDir, const in vec3 normal, const in float roughness, const in int maxMIPLevel ) {\n\t\t#ifdef ENVMAP_MODE_REFLECTION\n\t\t\tvec3 reflectVec = reflect( -viewDir, normal );\n\t\t\treflectVec = normalize( mix( reflectVec, normal, roughness * roughness) );\n\t\t#else\n\t\t\tvec3 reflectVec = refract( -viewDir, normal, refractionRatio );\n\t\t#endif\n\t\treflectVec = inverseTransformDirection( reflectVec, viewMatrix );\n\t\tfloat specularMIPLevel = getSpecularMIPLevel( roughness, maxMIPLevel );\n\t\t#ifdef ENVMAP_TYPE_CUBE\n\t\t\tvec3 queryReflectVec = vec3( flipEnvMap * reflectVec.x, reflectVec.yz );\n\t\t\t#ifdef TEXTURE_LOD_EXT\n\t\t\t\tvec4 envMapColor = textureCubeLodEXT( envMap, queryReflectVec, specularMIPLevel );\n\t\t\t#else\n\t\t\t\tvec4 envMapColor = textureCube( envMap, queryReflectVec, specularMIPLevel );\n\t\t\t#endif\n\t\t\tenvMapColor.rgb = envMapTexelToLinear( envMapColor ).rgb;\n\t\t#elif defined( ENVMAP_TYPE_CUBE_UV )\n\t\t\tvec4 envMapColor = textureCubeUV( envMap, reflectVec, roughness );\n\t\t#endif\n\t\treturn envMapColor.rgb * envMapIntensity;\n\t}\n#endif";
var lights_toon_fragment = "ToonMaterial material;\nmaterial.diffuseColor = diffuseColor.rgb;";
var lights_toon_pars_fragment = "varying vec3 vViewPosition;\n#ifndef FLAT_SHADED\n\tvarying vec3 vNormal;\n#endif\nstruct ToonMaterial {\n\tvec3 diffuseColor;\n};\nvoid RE_Direct_Toon( const in IncidentLight directLight, const in GeometricContext geometry, const in ToonMaterial material, inout ReflectedLight reflectedLight ) {\n\tvec3 irradiance = getGradientIrradiance( geometry.normal, directLight.direction ) * directLight.color;\n\t#ifndef PHYSICALLY_CORRECT_LIGHTS\n\t\tirradiance *= PI;\n\t#endif\n\treflectedLight.directDiffuse += irradiance * BRDF_Diffuse_Lambert( material.diffuseColor );\n}\nvoid RE_IndirectDiffuse_Toon( const in vec3 irradiance, const in GeometricContext geometry, const in ToonMaterial material, inout ReflectedLight reflectedLight ) {\n\treflectedLight.indirectDiffuse += irradiance * BRDF_Diffuse_Lambert( material.diffuseColor );\n}\n#define RE_Direct\t\t\t\tRE_Direct_Toon\n#define RE_IndirectDiffuse\t\tRE_IndirectDiffuse_Toon\n#define Material_LightProbeLOD( material )\t(0)";
var lights_phong_fragment = "BlinnPhongMaterial material;\nmaterial.diffuseColor = diffuseColor.rgb;\nmaterial.specularColor = specular;\nmaterial.specularShininess = shininess;\nmaterial.specularStrength = specularStrength;";
var lights_phong_pars_fragment = "varying vec3 vViewPosition;\n#ifndef FLAT_SHADED\n\tvarying vec3 vNormal;\n#endif\nstruct BlinnPhongMaterial {\n\tvec3 diffuseColor;\n\tvec3 specularColor;\n\tfloat specularShininess;\n\tfloat specularStrength;\n};\nvoid RE_Direct_BlinnPhong( const in IncidentLight directLight, const in GeometricContext geometry, const in BlinnPhongMaterial material, inout ReflectedLight reflectedLight ) {\n\tfloat dotNL = saturate( dot( geometry.normal, directLight.direction ) );\n\tvec3 irradiance = dotNL * directLight.color;\n\t#ifndef PHYSICALLY_CORRECT_LIGHTS\n\t\tirradiance *= PI;\n\t#endif\n\treflectedLight.directDiffuse += irradiance * BRDF_Diffuse_Lambert( material.diffuseColor );\n\treflectedLight.directSpecular += irradiance * BRDF_Specular_BlinnPhong( directLight, geometry, material.specularColor, material.specularShininess ) * material.specularStrength;\n}\nvoid RE_IndirectDiffuse_BlinnPhong( const in vec3 irradiance, const in GeometricContext geometry, const in BlinnPhongMaterial material, inout ReflectedLight reflectedLight ) {\n\treflectedLight.indirectDiffuse += irradiance * BRDF_Diffuse_Lambert( material.diffuseColor );\n}\n#define RE_Direct\t\t\t\tRE_Direct_BlinnPhong\n#define RE_IndirectDiffuse\t\tRE_IndirectDiffuse_BlinnPhong\n#define Material_LightProbeLOD( material )\t(0)";
var lights_physical_fragment = "PhysicalMaterial material;\nmaterial.diffuseColor = diffuseColor.rgb * ( 1.0 - metalnessFactor );\nvec3 dxy = max( abs( dFdx( geometryNormal ) ), abs( dFdy( geometryNormal ) ) );\nfloat geometryRoughness = max( max( dxy.x, dxy.y ), dxy.z );\nmaterial.specularRoughness = max( roughnessFactor, 0.0525 );material.specularRoughness += geometryRoughness;\nmaterial.specularRoughness = min( material.specularRoughness, 1.0 );\n#ifdef REFLECTIVITY\n\tmaterial.specularColor = mix( vec3( MAXIMUM_SPECULAR_COEFFICIENT * pow2( reflectivity ) ), diffuseColor.rgb, metalnessFactor );\n#else\n\tmaterial.specularColor = mix( vec3( DEFAULT_SPECULAR_COEFFICIENT ), diffuseColor.rgb, metalnessFactor );\n#endif\n#ifdef CLEARCOAT\n\tmaterial.clearcoat = clearcoat;\n\tmaterial.clearcoatRoughness = clearcoatRoughness;\n\t#ifdef USE_CLEARCOATMAP\n\t\tmaterial.clearcoat *= texture2D( clearcoatMap, vUv ).x;\n\t#endif\n\t#ifdef USE_CLEARCOAT_ROUGHNESSMAP\n\t\tmaterial.clearcoatRoughness *= texture2D( clearcoatRoughnessMap, vUv ).y;\n\t#endif\n\tmaterial.clearcoat = saturate( material.clearcoat );\tmaterial.clearcoatRoughness = max( material.clearcoatRoughness, 0.0525 );\n\tmaterial.clearcoatRoughness += geometryRoughness;\n\tmaterial.clearcoatRoughness = min( material.clearcoatRoughness, 1.0 );\n#endif\n#ifdef USE_SHEEN\n\tmaterial.sheenColor = sheen;\n#endif";
var lights_physical_pars_fragment = "struct PhysicalMaterial {\n\tvec3 diffuseColor;\n\tfloat specularRoughness;\n\tvec3 specularColor;\n#ifdef CLEARCOAT\n\tfloat clearcoat;\n\tfloat clearcoatRoughness;\n#endif\n#ifdef USE_SHEEN\n\tvec3 sheenColor;\n#endif\n};\n#define MAXIMUM_SPECULAR_COEFFICIENT 0.16\n#define DEFAULT_SPECULAR_COEFFICIENT 0.04\nfloat clearcoatDHRApprox( const in float roughness, const in float dotNL ) {\n\treturn DEFAULT_SPECULAR_COEFFICIENT + ( 1.0 - DEFAULT_SPECULAR_COEFFICIENT ) * ( pow( 1.0 - dotNL, 5.0 ) * pow( 1.0 - roughness, 2.0 ) );\n}\n#if NUM_RECT_AREA_LIGHTS > 0\n\tvoid RE_Direct_RectArea_Physical( const in RectAreaLight rectAreaLight, const in GeometricContext geometry, const in PhysicalMaterial material, inout ReflectedLight reflectedLight ) {\n\t\tvec3 normal = geometry.normal;\n\t\tvec3 viewDir = geometry.viewDir;\n\t\tvec3 position = geometry.position;\n\t\tvec3 lightPos = rectAreaLight.position;\n\t\tvec3 halfWidth = rectAreaLight.halfWidth;\n\t\tvec3 halfHeight = rectAreaLight.halfHeight;\n\t\tvec3 lightColor = rectAreaLight.color;\n\t\tfloat roughness = material.specularRoughness;\n\t\tvec3 rectCoords[ 4 ];\n\t\trectCoords[ 0 ] = lightPos + halfWidth - halfHeight;\t\trectCoords[ 1 ] = lightPos - halfWidth - halfHeight;\n\t\trectCoords[ 2 ] = lightPos - halfWidth + halfHeight;\n\t\trectCoords[ 3 ] = lightPos + halfWidth + halfHeight;\n\t\tvec2 uv = LTC_Uv( normal, viewDir, roughness );\n\t\tvec4 t1 = texture2D( ltc_1, uv );\n\t\tvec4 t2 = texture2D( ltc_2, uv );\n\t\tmat3 mInv = mat3(\n\t\t\tvec3( t1.x, 0, t1.y ),\n\t\t\tvec3( 0, 1, 0 ),\n\t\t\tvec3( t1.z, 0, t1.w )\n\t\t);\n\t\tvec3 fresnel = ( material.specularColor * t2.x + ( vec3( 1.0 ) - material.specularColor ) * t2.y );\n\t\treflectedLight.directSpecular += lightColor * fresnel * LTC_Evaluate( normal, viewDir, position, mInv, rectCoords );\n\t\treflectedLight.directDiffuse += lightColor * material.diffuseColor * LTC_Evaluate( normal, viewDir, position, mat3( 1.0 ), rectCoords );\n\t}\n#endif\nvoid RE_Direct_Physical( const in IncidentLight directLight, const in GeometricContext geometry, const in PhysicalMaterial material, inout ReflectedLight reflectedLight ) {\n\tfloat dotNL = saturate( dot( geometry.normal, directLight.direction ) );\n\tvec3 irradiance = dotNL * directLight.color;\n\t#ifndef PHYSICALLY_CORRECT_LIGHTS\n\t\tirradiance *= PI;\n\t#endif\n\t#ifdef CLEARCOAT\n\t\tfloat ccDotNL = saturate( dot( geometry.clearcoatNormal, directLight.direction ) );\n\t\tvec3 ccIrradiance = ccDotNL * directLight.color;\n\t\t#ifndef PHYSICALLY_CORRECT_LIGHTS\n\t\t\tccIrradiance *= PI;\n\t\t#endif\n\t\tfloat clearcoatDHR = material.clearcoat * clearcoatDHRApprox( material.clearcoatRoughness, ccDotNL );\n\t\treflectedLight.directSpecular += ccIrradiance * material.clearcoat * BRDF_Specular_GGX( directLight, geometry.viewDir, geometry.clearcoatNormal, vec3( DEFAULT_SPECULAR_COEFFICIENT ), material.clearcoatRoughness );\n\t#else\n\t\tfloat clearcoatDHR = 0.0;\n\t#endif\n\t#ifdef USE_SHEEN\n\t\treflectedLight.directSpecular += ( 1.0 - clearcoatDHR ) * irradiance * BRDF_Specular_Sheen(\n\t\t\tmaterial.specularRoughness,\n\t\t\tdirectLight.direction,\n\t\t\tgeometry,\n\t\t\tmaterial.sheenColor\n\t\t);\n\t#else\n\t\treflectedLight.directSpecular += ( 1.0 - clearcoatDHR ) * irradiance * BRDF_Specular_GGX( directLight, geometry.viewDir, geometry.normal, material.specularColor, material.specularRoughness);\n\t#endif\n\treflectedLight.directDiffuse += ( 1.0 - clearcoatDHR ) * irradiance * BRDF_Diffuse_Lambert( material.diffuseColor );\n}\nvoid RE_IndirectDiffuse_Physical( const in vec3 irradiance, const in GeometricContext geometry, const in PhysicalMaterial material, inout ReflectedLight reflectedLight ) {\n\treflectedLight.indirectDiffuse += irradiance * BRDF_Diffuse_Lambert( material.diffuseColor );\n}\nvoid RE_IndirectSpecular_Physical( const in vec3 radiance, const in vec3 irradiance, const in vec3 clearcoatRadiance, const in GeometricContext geometry, const in PhysicalMaterial material, inout ReflectedLight reflectedLight) {\n\t#ifdef CLEARCOAT\n\t\tfloat ccDotNV = saturate( dot( geometry.clearcoatNormal, geometry.viewDir ) );\n\t\treflectedLight.indirectSpecular += clearcoatRadiance * material.clearcoat * BRDF_Specular_GGX_Environment( geometry.viewDir, geometry.clearcoatNormal, vec3( DEFAULT_SPECULAR_COEFFICIENT ), material.clearcoatRoughness );\n\t\tfloat ccDotNL = ccDotNV;\n\t\tfloat clearcoatDHR = material.clearcoat * clearcoatDHRApprox( material.clearcoatRoughness, ccDotNL );\n\t#else\n\t\tfloat clearcoatDHR = 0.0;\n\t#endif\n\tfloat clearcoatInv = 1.0 - clearcoatDHR;\n\tvec3 singleScattering = vec3( 0.0 );\n\tvec3 multiScattering = vec3( 0.0 );\n\tvec3 cosineWeightedIrradiance = irradiance * RECIPROCAL_PI;\n\tBRDF_Specular_Multiscattering_Environment( geometry, material.specularColor, material.specularRoughness, singleScattering, multiScattering );\n\tvec3 diffuse = material.diffuseColor * ( 1.0 - ( singleScattering + multiScattering ) );\n\treflectedLight.indirectSpecular += clearcoatInv * radiance * singleScattering;\n\treflectedLight.indirectSpecular += multiScattering * cosineWeightedIrradiance;\n\treflectedLight.indirectDiffuse += diffuse * cosineWeightedIrradiance;\n}\n#define RE_Direct\t\t\t\tRE_Direct_Physical\n#define RE_Direct_RectArea\t\tRE_Direct_RectArea_Physical\n#define RE_IndirectDiffuse\t\tRE_IndirectDiffuse_Physical\n#define RE_IndirectSpecular\t\tRE_IndirectSpecular_Physical\nfloat computeSpecularOcclusion( const in float dotNV, const in float ambientOcclusion, const in float roughness ) {\n\treturn saturate( pow( dotNV + ambientOcclusion, exp2( - 16.0 * roughness - 1.0 ) ) - 1.0 + ambientOcclusion );\n}";
var lights_fragment_begin = "\nGeometricContext geometry;\ngeometry.position = - vViewPosition;\ngeometry.normal = normal;\ngeometry.viewDir = ( isOrthographic ) ? vec3( 0, 0, 1 ) : normalize( vViewPosition );\n#ifdef CLEARCOAT\n\tgeometry.clearcoatNormal = clearcoatNormal;\n#endif\nIncidentLight directLight;\n#if ( NUM_POINT_LIGHTS > 0 ) && defined( RE_Direct )\n\tPointLight pointLight;\n\t#if defined( USE_SHADOWMAP ) && NUM_POINT_LIGHT_SHADOWS > 0\n\tPointLightShadow pointLightShadow;\n\t#endif\n\t#pragma unroll_loop_start\n\tfor ( int i = 0; i < NUM_POINT_LIGHTS; i ++ ) {\n\t\tpointLight = pointLights[ i ];\n\t\tgetPointDirectLightIrradiance( pointLight, geometry, directLight );\n\t\t#if defined( USE_SHADOWMAP ) && ( UNROLLED_LOOP_INDEX < NUM_POINT_LIGHT_SHADOWS )\n\t\tpointLightShadow = pointLightShadows[ i ];\n\t\tdirectLight.color *= all( bvec2( directLight.visible, receiveShadow ) ) ? getPointShadow( pointShadowMap[ i ], pointLightShadow.shadowMapSize, pointLightShadow.shadowBias, pointLightShadow.shadowRadius, vPointShadowCoord[ i ], pointLightShadow.shadowCameraNear, pointLightShadow.shadowCameraFar ) : 1.0;\n\t\t#endif\n\t\tRE_Direct( directLight, geometry, material, reflectedLight );\n\t}\n\t#pragma unroll_loop_end\n#endif\n#if ( NUM_SPOT_LIGHTS > 0 ) && defined( RE_Direct )\n\tSpotLight spotLight;\n\t#if defined( USE_SHADOWMAP ) && NUM_SPOT_LIGHT_SHADOWS > 0\n\tSpotLightShadow spotLightShadow;\n\t#endif\n\t#pragma unroll_loop_start\n\tfor ( int i = 0; i < NUM_SPOT_LIGHTS; i ++ ) {\n\t\tspotLight = spotLights[ i ];\n\t\tgetSpotDirectLightIrradiance( spotLight, geometry, directLight );\n\t\t#if defined( USE_SHADOWMAP ) && ( UNROLLED_LOOP_INDEX < NUM_SPOT_LIGHT_SHADOWS )\n\t\tspotLightShadow = spotLightShadows[ i ];\n\t\tdirectLight.color *= all( bvec2( directLight.visible, receiveShadow ) ) ? getShadow( spotShadowMap[ i ], spotLightShadow.shadowMapSize, spotLightShadow.shadowBias, spotLightShadow.shadowRadius, vSpotShadowCoord[ i ] ) : 1.0;\n\t\t#endif\n\t\tRE_Direct( directLight, geometry, material, reflectedLight );\n\t}\n\t#pragma unroll_loop_end\n#endif\n#if ( NUM_DIR_LIGHTS > 0 ) && defined( RE_Direct )\n\tDirectionalLight directionalLight;\n\t#if defined( USE_SHADOWMAP ) && NUM_DIR_LIGHT_SHADOWS > 0\n\tDirectionalLightShadow directionalLightShadow;\n\t#endif\n\t#pragma unroll_loop_start\n\tfor ( int i = 0; i < NUM_DIR_LIGHTS; i ++ ) {\n\t\tdirectionalLight = directionalLights[ i ];\n\t\tgetDirectionalDirectLightIrradiance( directionalLight, geometry, directLight );\n\t\t#if defined( USE_SHADOWMAP ) && ( UNROLLED_LOOP_INDEX < NUM_DIR_LIGHT_SHADOWS )\n\t\tdirectionalLightShadow = directionalLightShadows[ i ];\n\t\tdirectLight.color *= all( bvec2( directLight.visible, receiveShadow ) ) ? getShadow( directionalShadowMap[ i ], directionalLightShadow.shadowMapSize, directionalLightShadow.shadowBias, directionalLightShadow.shadowRadius, vDirectionalShadowCoord[ i ] ) : 1.0;\n\t\t#endif\n\t\tRE_Direct( directLight, geometry, material, reflectedLight );\n\t}\n\t#pragma unroll_loop_end\n#endif\n#if ( NUM_RECT_AREA_LIGHTS > 0 ) && defined( RE_Direct_RectArea )\n\tRectAreaLight rectAreaLight;\n\t#pragma unroll_loop_start\n\tfor ( int i = 0; i < NUM_RECT_AREA_LIGHTS; i ++ ) {\n\t\trectAreaLight = rectAreaLights[ i ];\n\t\tRE_Direct_RectArea( rectAreaLight, geometry, material, reflectedLight );\n\t}\n\t#pragma unroll_loop_end\n#endif\n#if defined( RE_IndirectDiffuse )\n\tvec3 iblIrradiance = vec3( 0.0 );\n\tvec3 irradiance = getAmbientLightIrradiance( ambientLightColor );\n\tirradiance += getLightProbeIrradiance( lightProbe, geometry );\n\t#if ( NUM_HEMI_LIGHTS > 0 )\n\t\t#pragma unroll_loop_start\n\t\tfor ( int i = 0; i < NUM_HEMI_LIGHTS; i ++ ) {\n\t\t\tirradiance += getHemisphereLightIrradiance( hemisphereLights[ i ], geometry );\n\t\t}\n\t\t#pragma unroll_loop_end\n\t#endif\n#endif\n#if defined( RE_IndirectSpecular )\n\tvec3 radiance = vec3( 0.0 );\n\tvec3 clearcoatRadiance = vec3( 0.0 );\n#endif";
var lights_fragment_maps = "#if defined( RE_IndirectDiffuse )\n\t#ifdef USE_LIGHTMAP\n\t\tvec4 lightMapTexel= texture2D( lightMap, vUv2 );\n\t\tvec3 lightMapIrradiance = lightMapTexelToLinear( lightMapTexel ).rgb * lightMapIntensity;\n\t\t#ifndef PHYSICALLY_CORRECT_LIGHTS\n\t\t\tlightMapIrradiance *= PI;\n\t\t#endif\n\t\tirradiance += lightMapIrradiance;\n\t#endif\n\t#if defined( USE_ENVMAP ) && defined( STANDARD ) && defined( ENVMAP_TYPE_CUBE_UV )\n\t\tiblIrradiance += getLightProbeIndirectIrradiance( geometry, maxMipLevel );\n\t#endif\n#endif\n#if defined( USE_ENVMAP ) && defined( RE_IndirectSpecular )\n\tradiance += getLightProbeIndirectRadiance( geometry.viewDir, geometry.normal, material.specularRoughness, maxMipLevel );\n\t#ifdef CLEARCOAT\n\t\tclearcoatRadiance += getLightProbeIndirectRadiance( geometry.viewDir, geometry.clearcoatNormal, material.clearcoatRoughness, maxMipLevel );\n\t#endif\n#endif";
var lights_fragment_end = "#if defined( RE_IndirectDiffuse )\n\tRE_IndirectDiffuse( irradiance, geometry, material, reflectedLight );\n#endif\n#if defined( RE_IndirectSpecular )\n\tRE_IndirectSpecular( radiance, iblIrradiance, clearcoatRadiance, geometry, material, reflectedLight );\n#endif";
var logdepthbuf_fragment = "#if defined( USE_LOGDEPTHBUF ) && defined( USE_LOGDEPTHBUF_EXT )\n\tgl_FragDepthEXT = vIsPerspective == 0.0 ? gl_FragCoord.z : log2( vFragDepth ) * logDepthBufFC * 0.5;\n#endif";
var logdepthbuf_pars_fragment = "#if defined( USE_LOGDEPTHBUF ) && defined( USE_LOGDEPTHBUF_EXT )\n\tuniform float logDepthBufFC;\n\tvarying float vFragDepth;\n\tvarying float vIsPerspective;\n#endif";
var logdepthbuf_pars_vertex = "#ifdef USE_LOGDEPTHBUF\n\t#ifdef USE_LOGDEPTHBUF_EXT\n\t\tvarying float vFragDepth;\n\t\tvarying float vIsPerspective;\n\t#else\n\t\tuniform float logDepthBufFC;\n\t#endif\n#endif";
var logdepthbuf_vertex = "#ifdef USE_LOGDEPTHBUF\n\t#ifdef USE_LOGDEPTHBUF_EXT\n\t\tvFragDepth = 1.0 + gl_Position.w;\n\t\tvIsPerspective = float( isPerspectiveMatrix( projectionMatrix ) );\n\t#else\n\t\tif ( isPerspectiveMatrix( projectionMatrix ) ) {\n\t\t\tgl_Position.z = log2( max( EPSILON, gl_Position.w + 1.0 ) ) * logDepthBufFC - 1.0;\n\t\t\tgl_Position.z *= gl_Position.w;\n\t\t}\n\t#endif\n#endif";
var map_fragment = "#ifdef USE_MAP\n\tvec4 texelColor = texture2D( map, vUv );\n\ttexelColor = mapTexelToLinear( texelColor );\n\tdiffuseColor *= texelColor;\n#endif";
var map_pars_fragment = "#ifdef USE_MAP\n\tuniform sampler2D map;\n#endif";
var map_particle_fragment = "#if defined( USE_MAP ) || defined( USE_ALPHAMAP )\n\tvec2 uv = ( uvTransform * vec3( gl_PointCoord.x, 1.0 - gl_PointCoord.y, 1 ) ).xy;\n#endif\n#ifdef USE_MAP\n\tvec4 mapTexel = texture2D( map, uv );\n\tdiffuseColor *= mapTexelToLinear( mapTexel );\n#endif\n#ifdef USE_ALPHAMAP\n\tdiffuseColor.a *= texture2D( alphaMap, uv ).g;\n#endif";
var map_particle_pars_fragment = "#if defined( USE_MAP ) || defined( USE_ALPHAMAP )\n\tuniform mat3 uvTransform;\n#endif\n#ifdef USE_MAP\n\tuniform sampler2D map;\n#endif\n#ifdef USE_ALPHAMAP\n\tuniform sampler2D alphaMap;\n#endif";
var metalnessmap_fragment = "float metalnessFactor = metalness;\n#ifdef USE_METALNESSMAP\n\tvec4 texelMetalness = texture2D( metalnessMap, vUv );\n\tmetalnessFactor *= texelMetalness.b;\n#endif";
var metalnessmap_pars_fragment = "#ifdef USE_METALNESSMAP\n\tuniform sampler2D metalnessMap;\n#endif";
var morphnormal_vertex = "#ifdef USE_MORPHNORMALS\n\tobjectNormal *= morphTargetBaseInfluence;\n\tobjectNormal += morphNormal0 * morphTargetInfluences[ 0 ];\n\tobjectNormal += morphNormal1 * morphTargetInfluences[ 1 ];\n\tobjectNormal += morphNormal2 * morphTargetInfluences[ 2 ];\n\tobjectNormal += morphNormal3 * morphTargetInfluences[ 3 ];\n#endif";
var morphtarget_pars_vertex = "#ifdef USE_MORPHTARGETS\n\tuniform float morphTargetBaseInfluence;\n\t#ifndef USE_MORPHNORMALS\n\t\tuniform float morphTargetInfluences[ 8 ];\n\t#else\n\t\tuniform float morphTargetInfluences[ 4 ];\n\t#endif\n#endif";
var morphtarget_vertex = "#ifdef USE_MORPHTARGETS\n\ttransformed *= morphTargetBaseInfluence;\n\ttransformed += morphTarget0 * morphTargetInfluences[ 0 ];\n\ttransformed += morphTarget1 * morphTargetInfluences[ 1 ];\n\ttransformed += morphTarget2 * morphTargetInfluences[ 2 ];\n\ttransformed += morphTarget3 * morphTargetInfluences[ 3 ];\n\t#ifndef USE_MORPHNORMALS\n\t\ttransformed += morphTarget4 * morphTargetInfluences[ 4 ];\n\t\ttransformed += morphTarget5 * morphTargetInfluences[ 5 ];\n\t\ttransformed += morphTarget6 * morphTargetInfluences[ 6 ];\n\t\ttransformed += morphTarget7 * morphTargetInfluences[ 7 ];\n\t#endif\n#endif";
var normal_fragment_begin = "float faceDirection = gl_FrontFacing ? 1.0 : - 1.0;\n#ifdef FLAT_SHADED\n\tvec3 fdx = vec3( dFdx( vViewPosition.x ), dFdx( vViewPosition.y ), dFdx( vViewPosition.z ) );\n\tvec3 fdy = vec3( dFdy( vViewPosition.x ), dFdy( vViewPosition.y ), dFdy( vViewPosition.z ) );\n\tvec3 normal = normalize( cross( fdx, fdy ) );\n#else\n\tvec3 normal = normalize( vNormal );\n\t#ifdef DOUBLE_SIDED\n\t\tnormal = normal * faceDirection;\n\t#endif\n\t#ifdef USE_TANGENT\n\t\tvec3 tangent = normalize( vTangent );\n\t\tvec3 bitangent = normalize( vBitangent );\n\t\t#ifdef DOUBLE_SIDED\n\t\t\ttangent = tangent * faceDirection;\n\t\t\tbitangent = bitangent * faceDirection;\n\t\t#endif\n\t\t#if defined( TANGENTSPACE_NORMALMAP ) || defined( USE_CLEARCOAT_NORMALMAP )\n\t\t\tmat3 vTBN = mat3( tangent, bitangent, normal );\n\t\t#endif\n\t#endif\n#endif\nvec3 geometryNormal = normal;";
var normal_fragment_maps = "#ifdef OBJECTSPACE_NORMALMAP\n\tnormal = texture2D( normalMap, vUv ).xyz * 2.0 - 1.0;\n\t#ifdef FLIP_SIDED\n\t\tnormal = - normal;\n\t#endif\n\t#ifdef DOUBLE_SIDED\n\t\tnormal = normal * faceDirection;\n\t#endif\n\tnormal = normalize( normalMatrix * normal );\n#elif defined( TANGENTSPACE_NORMALMAP )\n\tvec3 mapN = texture2D( normalMap, vUv ).xyz * 2.0 - 1.0;\n\tmapN.xy *= normalScale;\n\t#ifdef USE_TANGENT\n\t\tnormal = normalize( vTBN * mapN );\n\t#else\n\t\tnormal = perturbNormal2Arb( -vViewPosition, normal, mapN, faceDirection );\n\t#endif\n#elif defined( USE_BUMPMAP )\n\tnormal = perturbNormalArb( -vViewPosition, normal, dHdxy_fwd(), faceDirection );\n#endif";
var normalmap_pars_fragment = "#ifdef USE_NORMALMAP\n\tuniform sampler2D normalMap;\n\tuniform vec2 normalScale;\n#endif\n#ifdef OBJECTSPACE_NORMALMAP\n\tuniform mat3 normalMatrix;\n#endif\n#if ! defined ( USE_TANGENT ) && ( defined ( TANGENTSPACE_NORMALMAP ) || defined ( USE_CLEARCOAT_NORMALMAP ) )\n\tvec3 perturbNormal2Arb( vec3 eye_pos, vec3 surf_norm, vec3 mapN, float faceDirection ) {\n\t\tvec3 q0 = vec3( dFdx( eye_pos.x ), dFdx( eye_pos.y ), dFdx( eye_pos.z ) );\n\t\tvec3 q1 = vec3( dFdy( eye_pos.x ), dFdy( eye_pos.y ), dFdy( eye_pos.z ) );\n\t\tvec2 st0 = dFdx( vUv.st );\n\t\tvec2 st1 = dFdy( vUv.st );\n\t\tvec3 N = surf_norm;\n\t\tvec3 q1perp = cross( q1, N );\n\t\tvec3 q0perp = cross( N, q0 );\n\t\tvec3 T = q1perp * st0.x + q0perp * st1.x;\n\t\tvec3 B = q1perp * st0.y + q0perp * st1.y;\n\t\tfloat det = max( dot( T, T ), dot( B, B ) );\n\t\tfloat scale = ( det == 0.0 ) ? 0.0 : faceDirection * inversesqrt( det );\n\t\treturn normalize( T * ( mapN.x * scale ) + B * ( mapN.y * scale ) + N * mapN.z );\n\t}\n#endif";
var clearcoat_normal_fragment_begin = "#ifdef CLEARCOAT\n\tvec3 clearcoatNormal = geometryNormal;\n#endif";
var clearcoat_normal_fragment_maps = "#ifdef USE_CLEARCOAT_NORMALMAP\n\tvec3 clearcoatMapN = texture2D( clearcoatNormalMap, vUv ).xyz * 2.0 - 1.0;\n\tclearcoatMapN.xy *= clearcoatNormalScale;\n\t#ifdef USE_TANGENT\n\t\tclearcoatNormal = normalize( vTBN * clearcoatMapN );\n\t#else\n\t\tclearcoatNormal = perturbNormal2Arb( - vViewPosition, clearcoatNormal, clearcoatMapN, faceDirection );\n\t#endif\n#endif";
var clearcoat_pars_fragment = "#ifdef USE_CLEARCOATMAP\n\tuniform sampler2D clearcoatMap;\n#endif\n#ifdef USE_CLEARCOAT_ROUGHNESSMAP\n\tuniform sampler2D clearcoatRoughnessMap;\n#endif\n#ifdef USE_CLEARCOAT_NORMALMAP\n\tuniform sampler2D clearcoatNormalMap;\n\tuniform vec2 clearcoatNormalScale;\n#endif";
var packing = "vec3 packNormalToRGB( const in vec3 normal ) {\n\treturn normalize( normal ) * 0.5 + 0.5;\n}\nvec3 unpackRGBToNormal( const in vec3 rgb ) {\n\treturn 2.0 * rgb.xyz - 1.0;\n}\nconst float PackUpscale = 256. / 255.;const float UnpackDownscale = 255. / 256.;\nconst vec3 PackFactors = vec3( 256. * 256. * 256., 256. * 256., 256. );\nconst vec4 UnpackFactors = UnpackDownscale / vec4( PackFactors, 1. );\nconst float ShiftRight8 = 1. / 256.;\nvec4 packDepthToRGBA( const in float v ) {\n\tvec4 r = vec4( fract( v * PackFactors ), v );\n\tr.yzw -= r.xyz * ShiftRight8;\treturn r * PackUpscale;\n}\nfloat unpackRGBAToDepth( const in vec4 v ) {\n\treturn dot( v, UnpackFactors );\n}\nvec4 pack2HalfToRGBA( vec2 v ) {\n\tvec4 r = vec4( v.x, fract( v.x * 255.0 ), v.y, fract( v.y * 255.0 ));\n\treturn vec4( r.x - r.y / 255.0, r.y, r.z - r.w / 255.0, r.w);\n}\nvec2 unpackRGBATo2Half( vec4 v ) {\n\treturn vec2( v.x + ( v.y / 255.0 ), v.z + ( v.w / 255.0 ) );\n}\nfloat viewZToOrthographicDepth( const in float viewZ, const in float near, const in float far ) {\n\treturn ( viewZ + near ) / ( near - far );\n}\nfloat orthographicDepthToViewZ( const in float linearClipZ, const in float near, const in float far ) {\n\treturn linearClipZ * ( near - far ) - near;\n}\nfloat viewZToPerspectiveDepth( const in float viewZ, const in float near, const in float far ) {\n\treturn (( near + viewZ ) * far ) / (( far - near ) * viewZ );\n}\nfloat perspectiveDepthToViewZ( const in float invClipZ, const in float near, const in float far ) {\n\treturn ( near * far ) / ( ( far - near ) * invClipZ - far );\n}";
var premultiplied_alpha_fragment = "#ifdef PREMULTIPLIED_ALPHA\n\tgl_FragColor.rgb *= gl_FragColor.a;\n#endif";
var project_vertex = "vec4 mvPosition = vec4( transformed, 1.0 );\n#ifdef USE_INSTANCING\n\tmvPosition = instanceMatrix * mvPosition;\n#endif\nmvPosition = modelViewMatrix * mvPosition;\ngl_Position = projectionMatrix * mvPosition;";
var dithering_fragment = "#ifdef DITHERING\n\tgl_FragColor.rgb = dithering( gl_FragColor.rgb );\n#endif";
var dithering_pars_fragment = "#ifdef DITHERING\n\tvec3 dithering( vec3 color ) {\n\t\tfloat grid_position = rand( gl_FragCoord.xy );\n\t\tvec3 dither_shift_RGB = vec3( 0.25 / 255.0, -0.25 / 255.0, 0.25 / 255.0 );\n\t\tdither_shift_RGB = mix( 2.0 * dither_shift_RGB, -2.0 * dither_shift_RGB, grid_position );\n\t\treturn color + dither_shift_RGB;\n\t}\n#endif";
var roughnessmap_fragment = "float roughnessFactor = roughness;\n#ifdef USE_ROUGHNESSMAP\n\tvec4 texelRoughness = texture2D( roughnessMap, vUv );\n\troughnessFactor *= texelRoughness.g;\n#endif";
var roughnessmap_pars_fragment = "#ifdef USE_ROUGHNESSMAP\n\tuniform sampler2D roughnessMap;\n#endif";
var shadowmap_pars_fragment = "#ifdef USE_SHADOWMAP\n\t#if NUM_DIR_LIGHT_SHADOWS > 0\n\t\tuniform sampler2D directionalShadowMap[ NUM_DIR_LIGHT_SHADOWS ];\n\t\tvarying vec4 vDirectionalShadowCoord[ NUM_DIR_LIGHT_SHADOWS ];\n\t\tstruct DirectionalLightShadow {\n\t\t\tfloat shadowBias;\n\t\t\tfloat shadowNormalBias;\n\t\t\tfloat shadowRadius;\n\t\t\tvec2 shadowMapSize;\n\t\t};\n\t\tuniform DirectionalLightShadow directionalLightShadows[ NUM_DIR_LIGHT_SHADOWS ];\n\t#endif\n\t#if NUM_SPOT_LIGHT_SHADOWS > 0\n\t\tuniform sampler2D spotShadowMap[ NUM_SPOT_LIGHT_SHADOWS ];\n\t\tvarying vec4 vSpotShadowCoord[ NUM_SPOT_LIGHT_SHADOWS ];\n\t\tstruct SpotLightShadow {\n\t\t\tfloat shadowBias;\n\t\t\tfloat shadowNormalBias;\n\t\t\tfloat shadowRadius;\n\t\t\tvec2 shadowMapSize;\n\t\t};\n\t\tuniform SpotLightShadow spotLightShadows[ NUM_SPOT_LIGHT_SHADOWS ];\n\t#endif\n\t#if NUM_POINT_LIGHT_SHADOWS > 0\n\t\tuniform sampler2D pointShadowMap[ NUM_POINT_LIGHT_SHADOWS ];\n\t\tvarying vec4 vPointShadowCoord[ NUM_POINT_LIGHT_SHADOWS ];\n\t\tstruct PointLightShadow {\n\t\t\tfloat shadowBias;\n\t\t\tfloat shadowNormalBias;\n\t\t\tfloat shadowRadius;\n\t\t\tvec2 shadowMapSize;\n\t\t\tfloat shadowCameraNear;\n\t\t\tfloat shadowCameraFar;\n\t\t};\n\t\tuniform PointLightShadow pointLightShadows[ NUM_POINT_LIGHT_SHADOWS ];\n\t#endif\n\tfloat texture2DCompare( sampler2D depths, vec2 uv, float compare ) {\n\t\treturn step( compare, unpackRGBAToDepth( texture2D( depths, uv ) ) );\n\t}\n\tvec2 texture2DDistribution( sampler2D shadow, vec2 uv ) {\n\t\treturn unpackRGBATo2Half( texture2D( shadow, uv ) );\n\t}\n\tfloat VSMShadow (sampler2D shadow, vec2 uv, float compare ){\n\t\tfloat occlusion = 1.0;\n\t\tvec2 distribution = texture2DDistribution( shadow, uv );\n\t\tfloat hard_shadow = step( compare , distribution.x );\n\t\tif (hard_shadow != 1.0 ) {\n\t\t\tfloat distance = compare - distribution.x ;\n\t\t\tfloat variance = max( 0.00000, distribution.y * distribution.y );\n\t\t\tfloat softness_probability = variance / (variance + distance * distance );\t\t\tsoftness_probability = clamp( ( softness_probability - 0.3 ) / ( 0.95 - 0.3 ), 0.0, 1.0 );\t\t\tocclusion = clamp( max( hard_shadow, softness_probability ), 0.0, 1.0 );\n\t\t}\n\t\treturn occlusion;\n\t}\n\tfloat getShadow( sampler2D shadowMap, vec2 shadowMapSize, float shadowBias, float shadowRadius, vec4 shadowCoord ) {\n\t\tfloat shadow = 1.0;\n\t\tshadowCoord.xyz /= shadowCoord.w;\n\t\tshadowCoord.z += shadowBias;\n\t\tbvec4 inFrustumVec = bvec4 ( shadowCoord.x >= 0.0, shadowCoord.x <= 1.0, shadowCoord.y >= 0.0, shadowCoord.y <= 1.0 );\n\t\tbool inFrustum = all( inFrustumVec );\n\t\tbvec2 frustumTestVec = bvec2( inFrustum, shadowCoord.z <= 1.0 );\n\t\tbool frustumTest = all( frustumTestVec );\n\t\tif ( frustumTest ) {\n\t\t#if defined( SHADOWMAP_TYPE_PCF )\n\t\t\tvec2 texelSize = vec2( 1.0 ) / shadowMapSize;\n\t\t\tfloat dx0 = - texelSize.x * shadowRadius;\n\t\t\tfloat dy0 = - texelSize.y * shadowRadius;\n\t\t\tfloat dx1 = + texelSize.x * shadowRadius;\n\t\t\tfloat dy1 = + texelSize.y * shadowRadius;\n\t\t\tfloat dx2 = dx0 / 2.0;\n\t\t\tfloat dy2 = dy0 / 2.0;\n\t\t\tfloat dx3 = dx1 / 2.0;\n\t\t\tfloat dy3 = dy1 / 2.0;\n\t\t\tshadow = (\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( dx0, dy0 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( 0.0, dy0 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( dx1, dy0 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( dx2, dy2 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( 0.0, dy2 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( dx3, dy2 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( dx0, 0.0 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( dx2, 0.0 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy, shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( dx3, 0.0 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( dx1, 0.0 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( dx2, dy3 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( 0.0, dy3 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( dx3, dy3 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( dx0, dy1 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( 0.0, dy1 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( dx1, dy1 ), shadowCoord.z )\n\t\t\t) * ( 1.0 / 17.0 );\n\t\t#elif defined( SHADOWMAP_TYPE_PCF_SOFT )\n\t\t\tvec2 texelSize = vec2( 1.0 ) / shadowMapSize;\n\t\t\tfloat dx = texelSize.x;\n\t\t\tfloat dy = texelSize.y;\n\t\t\tvec2 uv = shadowCoord.xy;\n\t\t\tvec2 f = fract( uv * shadowMapSize + 0.5 );\n\t\t\tuv -= f * texelSize;\n\t\t\tshadow = (\n\t\t\t\ttexture2DCompare( shadowMap, uv, shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, uv + vec2( dx, 0.0 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, uv + vec2( 0.0, dy ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, uv + texelSize, shadowCoord.z ) +\n\t\t\t\tmix( texture2DCompare( shadowMap, uv + vec2( -dx, 0.0 ), shadowCoord.z ), \n\t\t\t\t\t texture2DCompare( shadowMap, uv + vec2( 2.0 * dx, 0.0 ), shadowCoord.z ),\n\t\t\t\t\t f.x ) +\n\t\t\t\tmix( texture2DCompare( shadowMap, uv + vec2( -dx, dy ), shadowCoord.z ), \n\t\t\t\t\t texture2DCompare( shadowMap, uv + vec2( 2.0 * dx, dy ), shadowCoord.z ),\n\t\t\t\t\t f.x ) +\n\t\t\t\tmix( texture2DCompare( shadowMap, uv + vec2( 0.0, -dy ), shadowCoord.z ), \n\t\t\t\t\t texture2DCompare( shadowMap, uv + vec2( 0.0, 2.0 * dy ), shadowCoord.z ),\n\t\t\t\t\t f.y ) +\n\t\t\t\tmix( texture2DCompare( shadowMap, uv + vec2( dx, -dy ), shadowCoord.z ), \n\t\t\t\t\t texture2DCompare( shadowMap, uv + vec2( dx, 2.0 * dy ), shadowCoord.z ),\n\t\t\t\t\t f.y ) +\n\t\t\t\tmix( mix( texture2DCompare( shadowMap, uv + vec2( -dx, -dy ), shadowCoord.z ), \n\t\t\t\t\t\t texture2DCompare( shadowMap, uv + vec2( 2.0 * dx, -dy ), shadowCoord.z ),\n\t\t\t\t\t\t f.x ),\n\t\t\t\t\t mix( texture2DCompare( shadowMap, uv + vec2( -dx, 2.0 * dy ), shadowCoord.z ), \n\t\t\t\t\t\t texture2DCompare( shadowMap, uv + vec2( 2.0 * dx, 2.0 * dy ), shadowCoord.z ),\n\t\t\t\t\t\t f.x ),\n\t\t\t\t\t f.y )\n\t\t\t) * ( 1.0 / 9.0 );\n\t\t#elif defined( SHADOWMAP_TYPE_VSM )\n\t\t\tshadow = VSMShadow( shadowMap, shadowCoord.xy, shadowCoord.z );\n\t\t#else\n\t\t\tshadow = texture2DCompare( shadowMap, shadowCoord.xy, shadowCoord.z );\n\t\t#endif\n\t\t}\n\t\treturn shadow;\n\t}\n\tvec2 cubeToUV( vec3 v, float texelSizeY ) {\n\t\tvec3 absV = abs( v );\n\t\tfloat scaleToCube = 1.0 / max( absV.x, max( absV.y, absV.z ) );\n\t\tabsV *= scaleToCube;\n\t\tv *= scaleToCube * ( 1.0 - 2.0 * texelSizeY );\n\t\tvec2 planar = v.xy;\n\t\tfloat almostATexel = 1.5 * texelSizeY;\n\t\tfloat almostOne = 1.0 - almostATexel;\n\t\tif ( absV.z >= almostOne ) {\n\t\t\tif ( v.z > 0.0 )\n\t\t\t\tplanar.x = 4.0 - v.x;\n\t\t} else if ( absV.x >= almostOne ) {\n\t\t\tfloat signX = sign( v.x );\n\t\t\tplanar.x = v.z * signX + 2.0 * signX;\n\t\t} else if ( absV.y >= almostOne ) {\n\t\t\tfloat signY = sign( v.y );\n\t\t\tplanar.x = v.x + 2.0 * signY + 2.0;\n\t\t\tplanar.y = v.z * signY - 2.0;\n\t\t}\n\t\treturn vec2( 0.125, 0.25 ) * planar + vec2( 0.375, 0.75 );\n\t}\n\tfloat getPointShadow( sampler2D shadowMap, vec2 shadowMapSize, float shadowBias, float shadowRadius, vec4 shadowCoord, float shadowCameraNear, float shadowCameraFar ) {\n\t\tvec2 texelSize = vec2( 1.0 ) / ( shadowMapSize * vec2( 4.0, 2.0 ) );\n\t\tvec3 lightToPosition = shadowCoord.xyz;\n\t\tfloat dp = ( length( lightToPosition ) - shadowCameraNear ) / ( shadowCameraFar - shadowCameraNear );\t\tdp += shadowBias;\n\t\tvec3 bd3D = normalize( lightToPosition );\n\t\t#if defined( SHADOWMAP_TYPE_PCF ) || defined( SHADOWMAP_TYPE_PCF_SOFT ) || defined( SHADOWMAP_TYPE_VSM )\n\t\t\tvec2 offset = vec2( - 1, 1 ) * shadowRadius * texelSize.y;\n\t\t\treturn (\n\t\t\t\ttexture2DCompare( shadowMap, cubeToUV( bd3D + offset.xyy, texelSize.y ), dp ) +\n\t\t\t\ttexture2DCompare( shadowMap, cubeToUV( bd3D + offset.yyy, texelSize.y ), dp ) +\n\t\t\t\ttexture2DCompare( shadowMap, cubeToUV( bd3D + offset.xyx, texelSize.y ), dp ) +\n\t\t\t\ttexture2DCompare( shadowMap, cubeToUV( bd3D + offset.yyx, texelSize.y ), dp ) +\n\t\t\t\ttexture2DCompare( shadowMap, cubeToUV( bd3D, texelSize.y ), dp ) +\n\t\t\t\ttexture2DCompare( shadowMap, cubeToUV( bd3D + offset.xxy, texelSize.y ), dp ) +\n\t\t\t\ttexture2DCompare( shadowMap, cubeToUV( bd3D + offset.yxy, texelSize.y ), dp ) +\n\t\t\t\ttexture2DCompare( shadowMap, cubeToUV( bd3D + offset.xxx, texelSize.y ), dp ) +\n\t\t\t\ttexture2DCompare( shadowMap, cubeToUV( bd3D + offset.yxx, texelSize.y ), dp )\n\t\t\t) * ( 1.0 / 9.0 );\n\t\t#else\n\t\t\treturn texture2DCompare( shadowMap, cubeToUV( bd3D, texelSize.y ), dp );\n\t\t#endif\n\t}\n#endif";
var shadowmap_pars_vertex = "#ifdef USE_SHADOWMAP\n\t#if NUM_DIR_LIGHT_SHADOWS > 0\n\t\tuniform mat4 directionalShadowMatrix[ NUM_DIR_LIGHT_SHADOWS ];\n\t\tvarying vec4 vDirectionalShadowCoord[ NUM_DIR_LIGHT_SHADOWS ];\n\t\tstruct DirectionalLightShadow {\n\t\t\tfloat shadowBias;\n\t\t\tfloat shadowNormalBias;\n\t\t\tfloat shadowRadius;\n\t\t\tvec2 shadowMapSize;\n\t\t};\n\t\tuniform DirectionalLightShadow directionalLightShadows[ NUM_DIR_LIGHT_SHADOWS ];\n\t#endif\n\t#if NUM_SPOT_LIGHT_SHADOWS > 0\n\t\tuniform mat4 spotShadowMatrix[ NUM_SPOT_LIGHT_SHADOWS ];\n\t\tvarying vec4 vSpotShadowCoord[ NUM_SPOT_LIGHT_SHADOWS ];\n\t\tstruct SpotLightShadow {\n\t\t\tfloat shadowBias;\n\t\t\tfloat shadowNormalBias;\n\t\t\tfloat shadowRadius;\n\t\t\tvec2 shadowMapSize;\n\t\t};\n\t\tuniform SpotLightShadow spotLightShadows[ NUM_SPOT_LIGHT_SHADOWS ];\n\t#endif\n\t#if NUM_POINT_LIGHT_SHADOWS > 0\n\t\tuniform mat4 pointShadowMatrix[ NUM_POINT_LIGHT_SHADOWS ];\n\t\tvarying vec4 vPointShadowCoord[ NUM_POINT_LIGHT_SHADOWS ];\n\t\tstruct PointLightShadow {\n\t\t\tfloat shadowBias;\n\t\t\tfloat shadowNormalBias;\n\t\t\tfloat shadowRadius;\n\t\t\tvec2 shadowMapSize;\n\t\t\tfloat shadowCameraNear;\n\t\t\tfloat shadowCameraFar;\n\t\t};\n\t\tuniform PointLightShadow pointLightShadows[ NUM_POINT_LIGHT_SHADOWS ];\n\t#endif\n#endif";
var shadowmap_vertex = "#ifdef USE_SHADOWMAP\n\t#if NUM_DIR_LIGHT_SHADOWS > 0 || NUM_SPOT_LIGHT_SHADOWS > 0 || NUM_POINT_LIGHT_SHADOWS > 0\n\t\tvec3 shadowWorldNormal = inverseTransformDirection( transformedNormal, viewMatrix );\n\t\tvec4 shadowWorldPosition;\n\t#endif\n\t#if NUM_DIR_LIGHT_SHADOWS > 0\n\t#pragma unroll_loop_start\n\tfor ( int i = 0; i < NUM_DIR_LIGHT_SHADOWS; i ++ ) {\n\t\tshadowWorldPosition = worldPosition + vec4( shadowWorldNormal * directionalLightShadows[ i ].shadowNormalBias, 0 );\n\t\tvDirectionalShadowCoord[ i ] = directionalShadowMatrix[ i ] * shadowWorldPosition;\n\t}\n\t#pragma unroll_loop_end\n\t#endif\n\t#if NUM_SPOT_LIGHT_SHADOWS > 0\n\t#pragma unroll_loop_start\n\tfor ( int i = 0; i < NUM_SPOT_LIGHT_SHADOWS; i ++ ) {\n\t\tshadowWorldPosition = worldPosition + vec4( shadowWorldNormal * spotLightShadows[ i ].shadowNormalBias, 0 );\n\t\tvSpotShadowCoord[ i ] = spotShadowMatrix[ i ] * shadowWorldPosition;\n\t}\n\t#pragma unroll_loop_end\n\t#endif\n\t#if NUM_POINT_LIGHT_SHADOWS > 0\n\t#pragma unroll_loop_start\n\tfor ( int i = 0; i < NUM_POINT_LIGHT_SHADOWS; i ++ ) {\n\t\tshadowWorldPosition = worldPosition + vec4( shadowWorldNormal * pointLightShadows[ i ].shadowNormalBias, 0 );\n\t\tvPointShadowCoord[ i ] = pointShadowMatrix[ i ] * shadowWorldPosition;\n\t}\n\t#pragma unroll_loop_end\n\t#endif\n#endif";
var shadowmask_pars_fragment = "float getShadowMask() {\n\tfloat shadow = 1.0;\n\t#ifdef USE_SHADOWMAP\n\t#if NUM_DIR_LIGHT_SHADOWS > 0\n\tDirectionalLightShadow directionalLight;\n\t#pragma unroll_loop_start\n\tfor ( int i = 0; i < NUM_DIR_LIGHT_SHADOWS; i ++ ) {\n\t\tdirectionalLight = directionalLightShadows[ i ];\n\t\tshadow *= receiveShadow ? getShadow( directionalShadowMap[ i ], directionalLight.shadowMapSize, directionalLight.shadowBias, directionalLight.shadowRadius, vDirectionalShadowCoord[ i ] ) : 1.0;\n\t}\n\t#pragma unroll_loop_end\n\t#endif\n\t#if NUM_SPOT_LIGHT_SHADOWS > 0\n\tSpotLightShadow spotLight;\n\t#pragma unroll_loop_start\n\tfor ( int i = 0; i < NUM_SPOT_LIGHT_SHADOWS; i ++ ) {\n\t\tspotLight = spotLightShadows[ i ];\n\t\tshadow *= receiveShadow ? getShadow( spotShadowMap[ i ], spotLight.shadowMapSize, spotLight.shadowBias, spotLight.shadowRadius, vSpotShadowCoord[ i ] ) : 1.0;\n\t}\n\t#pragma unroll_loop_end\n\t#endif\n\t#if NUM_POINT_LIGHT_SHADOWS > 0\n\tPointLightShadow pointLight;\n\t#pragma unroll_loop_start\n\tfor ( int i = 0; i < NUM_POINT_LIGHT_SHADOWS; i ++ ) {\n\t\tpointLight = pointLightShadows[ i ];\n\t\tshadow *= receiveShadow ? getPointShadow( pointShadowMap[ i ], pointLight.shadowMapSize, pointLight.shadowBias, pointLight.shadowRadius, vPointShadowCoord[ i ], pointLight.shadowCameraNear, pointLight.shadowCameraFar ) : 1.0;\n\t}\n\t#pragma unroll_loop_end\n\t#endif\n\t#endif\n\treturn shadow;\n}";
var skinbase_vertex = "#ifdef USE_SKINNING\n\tmat4 boneMatX = getBoneMatrix( skinIndex.x );\n\tmat4 boneMatY = getBoneMatrix( skinIndex.y );\n\tmat4 boneMatZ = getBoneMatrix( skinIndex.z );\n\tmat4 boneMatW = getBoneMatrix( skinIndex.w );\n#endif";
var skinning_pars_vertex = "#ifdef USE_SKINNING\n\tuniform mat4 bindMatrix;\n\tuniform mat4 bindMatrixInverse;\n\t#ifdef BONE_TEXTURE\n\t\tuniform highp sampler2D boneTexture;\n\t\tuniform int boneTextureSize;\n\t\tmat4 getBoneMatrix( const in float i ) {\n\t\t\tfloat j = i * 4.0;\n\t\t\tfloat x = mod( j, float( boneTextureSize ) );\n\t\t\tfloat y = floor( j / float( boneTextureSize ) );\n\t\t\tfloat dx = 1.0 / float( boneTextureSize );\n\t\t\tfloat dy = 1.0 / float( boneTextureSize );\n\t\t\ty = dy * ( y + 0.5 );\n\t\t\tvec4 v1 = texture2D( boneTexture, vec2( dx * ( x + 0.5 ), y ) );\n\t\t\tvec4 v2 = texture2D( boneTexture, vec2( dx * ( x + 1.5 ), y ) );\n\t\t\tvec4 v3 = texture2D( boneTexture, vec2( dx * ( x + 2.5 ), y ) );\n\t\t\tvec4 v4 = texture2D( boneTexture, vec2( dx * ( x + 3.5 ), y ) );\n\t\t\tmat4 bone = mat4( v1, v2, v3, v4 );\n\t\t\treturn bone;\n\t\t}\n\t#else\n\t\tuniform mat4 boneMatrices[ MAX_BONES ];\n\t\tmat4 getBoneMatrix( const in float i ) {\n\t\t\tmat4 bone = boneMatrices[ int(i) ];\n\t\t\treturn bone;\n\t\t}\n\t#endif\n#endif";
var skinning_vertex = "#ifdef USE_SKINNING\n\tvec4 skinVertex = bindMatrix * vec4( transformed, 1.0 );\n\tvec4 skinned = vec4( 0.0 );\n\tskinned += boneMatX * skinVertex * skinWeight.x;\n\tskinned += boneMatY * skinVertex * skinWeight.y;\n\tskinned += boneMatZ * skinVertex * skinWeight.z;\n\tskinned += boneMatW * skinVertex * skinWeight.w;\n\ttransformed = ( bindMatrixInverse * skinned ).xyz;\n#endif";
var skinnormal_vertex = "#ifdef USE_SKINNING\n\tmat4 skinMatrix = mat4( 0.0 );\n\tskinMatrix += skinWeight.x * boneMatX;\n\tskinMatrix += skinWeight.y * boneMatY;\n\tskinMatrix += skinWeight.z * boneMatZ;\n\tskinMatrix += skinWeight.w * boneMatW;\n\tskinMatrix = bindMatrixInverse * skinMatrix * bindMatrix;\n\tobjectNormal = vec4( skinMatrix * vec4( objectNormal, 0.0 ) ).xyz;\n\t#ifdef USE_TANGENT\n\t\tobjectTangent = vec4( skinMatrix * vec4( objectTangent, 0.0 ) ).xyz;\n\t#endif\n#endif";
var specularmap_fragment = "float specularStrength;\n#ifdef USE_SPECULARMAP\n\tvec4 texelSpecular = texture2D( specularMap, vUv );\n\tspecularStrength = texelSpecular.r;\n#else\n\tspecularStrength = 1.0;\n#endif";
var specularmap_pars_fragment = "#ifdef USE_SPECULARMAP\n\tuniform sampler2D specularMap;\n#endif";
var tonemapping_fragment = "#if defined( TONE_MAPPING )\n\tgl_FragColor.rgb = toneMapping( gl_FragColor.rgb );\n#endif";
var tonemapping_pars_fragment = "#ifndef saturate\n#define saturate(a) clamp( a, 0.0, 1.0 )\n#endif\nuniform float toneMappingExposure;\nvec3 LinearToneMapping( vec3 color ) {\n\treturn toneMappingExposure * color;\n}\nvec3 ReinhardToneMapping( vec3 color ) {\n\tcolor *= toneMappingExposure;\n\treturn saturate( color / ( vec3( 1.0 ) + color ) );\n}\nvec3 OptimizedCineonToneMapping( vec3 color ) {\n\tcolor *= toneMappingExposure;\n\tcolor = max( vec3( 0.0 ), color - 0.004 );\n\treturn pow( ( color * ( 6.2 * color + 0.5 ) ) / ( color * ( 6.2 * color + 1.7 ) + 0.06 ), vec3( 2.2 ) );\n}\nvec3 RRTAndODTFit( vec3 v ) {\n\tvec3 a = v * ( v + 0.0245786 ) - 0.000090537;\n\tvec3 b = v * ( 0.983729 * v + 0.4329510 ) + 0.238081;\n\treturn a / b;\n}\nvec3 ACESFilmicToneMapping( vec3 color ) {\n\tconst mat3 ACESInputMat = mat3(\n\t\tvec3( 0.59719, 0.07600, 0.02840 ),\t\tvec3( 0.35458, 0.90834, 0.13383 ),\n\t\tvec3( 0.04823, 0.01566, 0.83777 )\n\t);\n\tconst mat3 ACESOutputMat = mat3(\n\t\tvec3( 1.60475, -0.10208, -0.00327 ),\t\tvec3( -0.53108, 1.10813, -0.07276 ),\n\t\tvec3( -0.07367, -0.00605, 1.07602 )\n\t);\n\tcolor *= toneMappingExposure / 0.6;\n\tcolor = ACESInputMat * color;\n\tcolor = RRTAndODTFit( color );\n\tcolor = ACESOutputMat * color;\n\treturn saturate( color );\n}\nvec3 CustomToneMapping( vec3 color ) { return color; }";
var transmissionmap_fragment = "#ifdef USE_TRANSMISSIONMAP\n\ttotalTransmission *= texture2D( transmissionMap, vUv ).r;\n#endif";
var transmissionmap_pars_fragment = "#ifdef USE_TRANSMISSIONMAP\n\tuniform sampler2D transmissionMap;\n#endif";
var uv_pars_fragment = "#if ( defined( USE_UV ) && ! defined( UVS_VERTEX_ONLY ) )\n\tvarying vec2 vUv;\n#endif";
var uv_pars_vertex = "#ifdef USE_UV\n\t#ifdef UVS_VERTEX_ONLY\n\t\tvec2 vUv;\n\t#else\n\t\tvarying vec2 vUv;\n\t#endif\n\tuniform mat3 uvTransform;\n#endif";
var uv_vertex = "#ifdef USE_UV\n\tvUv = ( uvTransform * vec3( uv, 1 ) ).xy;\n#endif";
var uv2_pars_fragment = "#if defined( USE_LIGHTMAP ) || defined( USE_AOMAP )\n\tvarying vec2 vUv2;\n#endif";
var uv2_pars_vertex = "#if defined( USE_LIGHTMAP ) || defined( USE_AOMAP )\n\tattribute vec2 uv2;\n\tvarying vec2 vUv2;\n\tuniform mat3 uv2Transform;\n#endif";
var uv2_vertex = "#if defined( USE_LIGHTMAP ) || defined( USE_AOMAP )\n\tvUv2 = ( uv2Transform * vec3( uv2, 1 ) ).xy;\n#endif";
var worldpos_vertex = "#if defined( USE_ENVMAP ) || defined( DISTANCE ) || defined ( USE_SHADOWMAP )\n\tvec4 worldPosition = vec4( transformed, 1.0 );\n\t#ifdef USE_INSTANCING\n\t\tworldPosition = instanceMatrix * worldPosition;\n\t#endif\n\tworldPosition = modelMatrix * worldPosition;\n#endif";
var background_frag = "uniform sampler2D t2D;\nvarying vec2 vUv;\nvoid main() {\n\tvec4 texColor = texture2D( t2D, vUv );\n\tgl_FragColor = mapTexelToLinear( texColor );\n\t#include <tonemapping_fragment>\n\t#include <encodings_fragment>\n}";
var background_vert = "varying vec2 vUv;\nuniform mat3 uvTransform;\nvoid main() {\n\tvUv = ( uvTransform * vec3( uv, 1 ) ).xy;\n\tgl_Position = vec4( position.xy, 1.0, 1.0 );\n}";
var cube_frag = "#include <envmap_common_pars_fragment>\nuniform float opacity;\nvarying vec3 vWorldDirection;\n#include <cube_uv_reflection_fragment>\nvoid main() {\n\tvec3 vReflect = vWorldDirection;\n\t#include <envmap_fragment>\n\tgl_FragColor = envColor;\n\tgl_FragColor.a *= opacity;\n\t#include <tonemapping_fragment>\n\t#include <encodings_fragment>\n}";
var cube_vert = "varying vec3 vWorldDirection;\n#include <common>\nvoid main() {\n\tvWorldDirection = transformDirection( position, modelMatrix );\n\t#include <begin_vertex>\n\t#include <project_vertex>\n\tgl_Position.z = gl_Position.w;\n}";
var depth_frag = "#if DEPTH_PACKING == 3200\n\tuniform float opacity;\n#endif\n#include <common>\n#include <packing>\n#include <uv_pars_fragment>\n#include <map_pars_fragment>\n#include <alphamap_pars_fragment>\n#include <logdepthbuf_pars_fragment>\n#include <clipping_planes_pars_fragment>\nvarying vec2 vHighPrecisionZW;\nvoid main() {\n\t#include <clipping_planes_fragment>\n\tvec4 diffuseColor = vec4( 1.0 );\n\t#if DEPTH_PACKING == 3200\n\t\tdiffuseColor.a = opacity;\n\t#endif\n\t#include <map_fragment>\n\t#include <alphamap_fragment>\n\t#include <alphatest_fragment>\n\t#include <logdepthbuf_fragment>\n\tfloat fragCoordZ = 0.5 * vHighPrecisionZW[0] / vHighPrecisionZW[1] + 0.5;\n\t#if DEPTH_PACKING == 3200\n\t\tgl_FragColor = vec4( vec3( 1.0 - fragCoordZ ), opacity );\n\t#elif DEPTH_PACKING == 3201\n\t\tgl_FragColor = packDepthToRGBA( fragCoordZ );\n\t#endif\n}";
var depth_vert = "#include <common>\n#include <uv_pars_vertex>\n#include <displacementmap_pars_vertex>\n#include <morphtarget_pars_vertex>\n#include <skinning_pars_vertex>\n#include <logdepthbuf_pars_vertex>\n#include <clipping_planes_pars_vertex>\nvarying vec2 vHighPrecisionZW;\nvoid main() {\n\t#include <uv_vertex>\n\t#include <skinbase_vertex>\n\t#ifdef USE_DISPLACEMENTMAP\n\t\t#include <beginnormal_vertex>\n\t\t#include <morphnormal_vertex>\n\t\t#include <skinnormal_vertex>\n\t#endif\n\t#include <begin_vertex>\n\t#include <morphtarget_vertex>\n\t#include <skinning_vertex>\n\t#include <displacementmap_vertex>\n\t#include <project_vertex>\n\t#include <logdepthbuf_vertex>\n\t#include <clipping_planes_vertex>\n\tvHighPrecisionZW = gl_Position.zw;\n}";
var distanceRGBA_frag = "#define DISTANCE\nuniform vec3 referencePosition;\nuniform float nearDistance;\nuniform float farDistance;\nvarying vec3 vWorldPosition;\n#include <common>\n#include <packing>\n#include <uv_pars_fragment>\n#include <map_pars_fragment>\n#include <alphamap_pars_fragment>\n#include <clipping_planes_pars_fragment>\nvoid main () {\n\t#include <clipping_planes_fragment>\n\tvec4 diffuseColor = vec4( 1.0 );\n\t#include <map_fragment>\n\t#include <alphamap_fragment>\n\t#include <alphatest_fragment>\n\tfloat dist = length( vWorldPosition - referencePosition );\n\tdist = ( dist - nearDistance ) / ( farDistance - nearDistance );\n\tdist = saturate( dist );\n\tgl_FragColor = packDepthToRGBA( dist );\n}";
var distanceRGBA_vert = "#define DISTANCE\nvarying vec3 vWorldPosition;\n#include <common>\n#include <uv_pars_vertex>\n#include <displacementmap_pars_vertex>\n#include <morphtarget_pars_vertex>\n#include <skinning_pars_vertex>\n#include <clipping_planes_pars_vertex>\nvoid main() {\n\t#include <uv_vertex>\n\t#include <skinbase_vertex>\n\t#ifdef USE_DISPLACEMENTMAP\n\t\t#include <beginnormal_vertex>\n\t\t#include <morphnormal_vertex>\n\t\t#include <skinnormal_vertex>\n\t#endif\n\t#include <begin_vertex>\n\t#include <morphtarget_vertex>\n\t#include <skinning_vertex>\n\t#include <displacementmap_vertex>\n\t#include <project_vertex>\n\t#include <worldpos_vertex>\n\t#include <clipping_planes_vertex>\n\tvWorldPosition = worldPosition.xyz;\n}";
var equirect_frag = "uniform sampler2D tEquirect;\nvarying vec3 vWorldDirection;\n#include <common>\nvoid main() {\n\tvec3 direction = normalize( vWorldDirection );\n\tvec2 sampleUV = equirectUv( direction );\n\tvec4 texColor = texture2D( tEquirect, sampleUV );\n\tgl_FragColor = mapTexelToLinear( texColor );\n\t#include <tonemapping_fragment>\n\t#include <encodings_fragment>\n}";
var equirect_vert = "varying vec3 vWorldDirection;\n#include <common>\nvoid main() {\n\tvWorldDirection = transformDirection( position, modelMatrix );\n\t#include <begin_vertex>\n\t#include <project_vertex>\n}";
var linedashed_frag = "uniform vec3 diffuse;\nuniform float opacity;\nuniform float dashSize;\nuniform float totalSize;\nvarying float vLineDistance;\n#include <common>\n#include <color_pars_fragment>\n#include <fog_pars_fragment>\n#include <logdepthbuf_pars_fragment>\n#include <clipping_planes_pars_fragment>\nvoid main() {\n\t#include <clipping_planes_fragment>\n\tif ( mod( vLineDistance, totalSize ) > dashSize ) {\n\t\tdiscard;\n\t}\n\tvec3 outgoingLight = vec3( 0.0 );\n\tvec4 diffuseColor = vec4( diffuse, opacity );\n\t#include <logdepthbuf_fragment>\n\t#include <color_fragment>\n\toutgoingLight = diffuseColor.rgb;\n\tgl_FragColor = vec4( outgoingLight, diffuseColor.a );\n\t#include <tonemapping_fragment>\n\t#include <encodings_fragment>\n\t#include <fog_fragment>\n\t#include <premultiplied_alpha_fragment>\n}";
var linedashed_vert = "uniform float scale;\nattribute float lineDistance;\nvarying float vLineDistance;\n#include <common>\n#include <color_pars_vertex>\n#include <fog_pars_vertex>\n#include <morphtarget_pars_vertex>\n#include <logdepthbuf_pars_vertex>\n#include <clipping_planes_pars_vertex>\nvoid main() {\n\tvLineDistance = scale * lineDistance;\n\t#include <color_vertex>\n\t#include <begin_vertex>\n\t#include <morphtarget_vertex>\n\t#include <project_vertex>\n\t#include <logdepthbuf_vertex>\n\t#include <clipping_planes_vertex>\n\t#include <fog_vertex>\n}";
var meshbasic_frag = "uniform vec3 diffuse;\nuniform float opacity;\n#ifndef FLAT_SHADED\n\tvarying vec3 vNormal;\n#endif\n#include <common>\n#include <dithering_pars_fragment>\n#include <color_pars_fragment>\n#include <uv_pars_fragment>\n#include <uv2_pars_fragment>\n#include <map_pars_fragment>\n#include <alphamap_pars_fragment>\n#include <aomap_pars_fragment>\n#include <lightmap_pars_fragment>\n#include <envmap_common_pars_fragment>\n#include <envmap_pars_fragment>\n#include <cube_uv_reflection_fragment>\n#include <fog_pars_fragment>\n#include <specularmap_pars_fragment>\n#include <logdepthbuf_pars_fragment>\n#include <clipping_planes_pars_fragment>\nvoid main() {\n\t#include <clipping_planes_fragment>\n\tvec4 diffuseColor = vec4( diffuse, opacity );\n\t#include <logdepthbuf_fragment>\n\t#include <map_fragment>\n\t#include <color_fragment>\n\t#include <alphamap_fragment>\n\t#include <alphatest_fragment>\n\t#include <specularmap_fragment>\n\tReflectedLight reflectedLight = ReflectedLight( vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ) );\n\t#ifdef USE_LIGHTMAP\n\t\n\t\tvec4 lightMapTexel= texture2D( lightMap, vUv2 );\n\t\treflectedLight.indirectDiffuse += lightMapTexelToLinear( lightMapTexel ).rgb * lightMapIntensity;\n\t#else\n\t\treflectedLight.indirectDiffuse += vec3( 1.0 );\n\t#endif\n\t#include <aomap_fragment>\n\treflectedLight.indirectDiffuse *= diffuseColor.rgb;\n\tvec3 outgoingLight = reflectedLight.indirectDiffuse;\n\t#include <envmap_fragment>\n\tgl_FragColor = vec4( outgoingLight, diffuseColor.a );\n\t#include <tonemapping_fragment>\n\t#include <encodings_fragment>\n\t#include <fog_fragment>\n\t#include <premultiplied_alpha_fragment>\n\t#include <dithering_fragment>\n}";
var meshbasic_vert = "#include <common>\n#include <uv_pars_vertex>\n#include <uv2_pars_vertex>\n#include <envmap_pars_vertex>\n#include <color_pars_vertex>\n#include <fog_pars_vertex>\n#include <morphtarget_pars_vertex>\n#include <skinning_pars_vertex>\n#include <logdepthbuf_pars_vertex>\n#include <clipping_planes_pars_vertex>\nvoid main() {\n\t#include <uv_vertex>\n\t#include <uv2_vertex>\n\t#include <color_vertex>\n\t#include <skinbase_vertex>\n\t#ifdef USE_ENVMAP\n\t#include <beginnormal_vertex>\n\t#include <morphnormal_vertex>\n\t#include <skinnormal_vertex>\n\t#include <defaultnormal_vertex>\n\t#endif\n\t#include <begin_vertex>\n\t#include <morphtarget_vertex>\n\t#include <skinning_vertex>\n\t#include <project_vertex>\n\t#include <logdepthbuf_vertex>\n\t#include <worldpos_vertex>\n\t#include <clipping_planes_vertex>\n\t#include <envmap_vertex>\n\t#include <fog_vertex>\n}";
var meshlambert_frag = "uniform vec3 diffuse;\nuniform vec3 emissive;\nuniform float opacity;\nvarying vec3 vLightFront;\nvarying vec3 vIndirectFront;\n#ifdef DOUBLE_SIDED\n\tvarying vec3 vLightBack;\n\tvarying vec3 vIndirectBack;\n#endif\n#include <common>\n#include <packing>\n#include <dithering_pars_fragment>\n#include <color_pars_fragment>\n#include <uv_pars_fragment>\n#include <uv2_pars_fragment>\n#include <map_pars_fragment>\n#include <alphamap_pars_fragment>\n#include <aomap_pars_fragment>\n#include <lightmap_pars_fragment>\n#include <emissivemap_pars_fragment>\n#include <envmap_common_pars_fragment>\n#include <envmap_pars_fragment>\n#include <cube_uv_reflection_fragment>\n#include <bsdfs>\n#include <lights_pars_begin>\n#include <fog_pars_fragment>\n#include <shadowmap_pars_fragment>\n#include <shadowmask_pars_fragment>\n#include <specularmap_pars_fragment>\n#include <logdepthbuf_pars_fragment>\n#include <clipping_planes_pars_fragment>\nvoid main() {\n\t#include <clipping_planes_fragment>\n\tvec4 diffuseColor = vec4( diffuse, opacity );\n\tReflectedLight reflectedLight = ReflectedLight( vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ) );\n\tvec3 totalEmissiveRadiance = emissive;\n\t#include <logdepthbuf_fragment>\n\t#include <map_fragment>\n\t#include <color_fragment>\n\t#include <alphamap_fragment>\n\t#include <alphatest_fragment>\n\t#include <specularmap_fragment>\n\t#include <emissivemap_fragment>\n\t#ifdef DOUBLE_SIDED\n\t\treflectedLight.indirectDiffuse += ( gl_FrontFacing ) ? vIndirectFront : vIndirectBack;\n\t#else\n\t\treflectedLight.indirectDiffuse += vIndirectFront;\n\t#endif\n\t#include <lightmap_fragment>\n\treflectedLight.indirectDiffuse *= BRDF_Diffuse_Lambert( diffuseColor.rgb );\n\t#ifdef DOUBLE_SIDED\n\t\treflectedLight.directDiffuse = ( gl_FrontFacing ) ? vLightFront : vLightBack;\n\t#else\n\t\treflectedLight.directDiffuse = vLightFront;\n\t#endif\n\treflectedLight.directDiffuse *= BRDF_Diffuse_Lambert( diffuseColor.rgb ) * getShadowMask();\n\t#include <aomap_fragment>\n\tvec3 outgoingLight = reflectedLight.directDiffuse + reflectedLight.indirectDiffuse + totalEmissiveRadiance;\n\t#include <envmap_fragment>\n\tgl_FragColor = vec4( outgoingLight, diffuseColor.a );\n\t#include <tonemapping_fragment>\n\t#include <encodings_fragment>\n\t#include <fog_fragment>\n\t#include <premultiplied_alpha_fragment>\n\t#include <dithering_fragment>\n}";
var meshlambert_vert = "#define LAMBERT\nvarying vec3 vLightFront;\nvarying vec3 vIndirectFront;\n#ifdef DOUBLE_SIDED\n\tvarying vec3 vLightBack;\n\tvarying vec3 vIndirectBack;\n#endif\n#include <common>\n#include <uv_pars_vertex>\n#include <uv2_pars_vertex>\n#include <envmap_pars_vertex>\n#include <bsdfs>\n#include <lights_pars_begin>\n#include <color_pars_vertex>\n#include <fog_pars_vertex>\n#include <morphtarget_pars_vertex>\n#include <skinning_pars_vertex>\n#include <shadowmap_pars_vertex>\n#include <logdepthbuf_pars_vertex>\n#include <clipping_planes_pars_vertex>\nvoid main() {\n\t#include <uv_vertex>\n\t#include <uv2_vertex>\n\t#include <color_vertex>\n\t#include <beginnormal_vertex>\n\t#include <morphnormal_vertex>\n\t#include <skinbase_vertex>\n\t#include <skinnormal_vertex>\n\t#include <defaultnormal_vertex>\n\t#include <begin_vertex>\n\t#include <morphtarget_vertex>\n\t#include <skinning_vertex>\n\t#include <project_vertex>\n\t#include <logdepthbuf_vertex>\n\t#include <clipping_planes_vertex>\n\t#include <worldpos_vertex>\n\t#include <envmap_vertex>\n\t#include <lights_lambert_vertex>\n\t#include <shadowmap_vertex>\n\t#include <fog_vertex>\n}";
var meshmatcap_frag = "#define MATCAP\nuniform vec3 diffuse;\nuniform float opacity;\nuniform sampler2D matcap;\nvarying vec3 vViewPosition;\n#ifndef FLAT_SHADED\n\tvarying vec3 vNormal;\n#endif\n#include <common>\n#include <dithering_pars_fragment>\n#include <color_pars_fragment>\n#include <uv_pars_fragment>\n#include <map_pars_fragment>\n#include <alphamap_pars_fragment>\n#include <fog_pars_fragment>\n#include <bumpmap_pars_fragment>\n#include <normalmap_pars_fragment>\n#include <logdepthbuf_pars_fragment>\n#include <clipping_planes_pars_fragment>\nvoid main() {\n\t#include <clipping_planes_fragment>\n\tvec4 diffuseColor = vec4( diffuse, opacity );\n\t#include <logdepthbuf_fragment>\n\t#include <map_fragment>\n\t#include <color_fragment>\n\t#include <alphamap_fragment>\n\t#include <alphatest_fragment>\n\t#include <normal_fragment_begin>\n\t#include <normal_fragment_maps>\n\tvec3 viewDir = normalize( vViewPosition );\n\tvec3 x = normalize( vec3( viewDir.z, 0.0, - viewDir.x ) );\n\tvec3 y = cross( viewDir, x );\n\tvec2 uv = vec2( dot( x, normal ), dot( y, normal ) ) * 0.495 + 0.5;\n\t#ifdef USE_MATCAP\n\t\tvec4 matcapColor = texture2D( matcap, uv );\n\t\tmatcapColor = matcapTexelToLinear( matcapColor );\n\t#else\n\t\tvec4 matcapColor = vec4( 1.0 );\n\t#endif\n\tvec3 outgoingLight = diffuseColor.rgb * matcapColor.rgb;\n\tgl_FragColor = vec4( outgoingLight, diffuseColor.a );\n\t#include <tonemapping_fragment>\n\t#include <encodings_fragment>\n\t#include <fog_fragment>\n\t#include <premultiplied_alpha_fragment>\n\t#include <dithering_fragment>\n}";
var meshmatcap_vert = "#define MATCAP\nvarying vec3 vViewPosition;\n#ifndef FLAT_SHADED\n\tvarying vec3 vNormal;\n#endif\n#include <common>\n#include <uv_pars_vertex>\n#include <color_pars_vertex>\n#include <displacementmap_pars_vertex>\n#include <fog_pars_vertex>\n#include <morphtarget_pars_vertex>\n#include <skinning_pars_vertex>\n#include <logdepthbuf_pars_vertex>\n#include <clipping_planes_pars_vertex>\nvoid main() {\n\t#include <uv_vertex>\n\t#include <color_vertex>\n\t#include <beginnormal_vertex>\n\t#include <morphnormal_vertex>\n\t#include <skinbase_vertex>\n\t#include <skinnormal_vertex>\n\t#include <defaultnormal_vertex>\n\t#ifndef FLAT_SHADED\n\t\tvNormal = normalize( transformedNormal );\n\t#endif\n\t#include <begin_vertex>\n\t#include <morphtarget_vertex>\n\t#include <skinning_vertex>\n\t#include <displacementmap_vertex>\n\t#include <project_vertex>\n\t#include <logdepthbuf_vertex>\n\t#include <clipping_planes_vertex>\n\t#include <fog_vertex>\n\tvViewPosition = - mvPosition.xyz;\n}";
var meshtoon_frag = "#define TOON\nuniform vec3 diffuse;\nuniform vec3 emissive;\nuniform float opacity;\n#include <common>\n#include <packing>\n#include <dithering_pars_fragment>\n#include <color_pars_fragment>\n#include <uv_pars_fragment>\n#include <uv2_pars_fragment>\n#include <map_pars_fragment>\n#include <alphamap_pars_fragment>\n#include <aomap_pars_fragment>\n#include <lightmap_pars_fragment>\n#include <emissivemap_pars_fragment>\n#include <gradientmap_pars_fragment>\n#include <fog_pars_fragment>\n#include <bsdfs>\n#include <lights_pars_begin>\n#include <lights_toon_pars_fragment>\n#include <shadowmap_pars_fragment>\n#include <bumpmap_pars_fragment>\n#include <normalmap_pars_fragment>\n#include <logdepthbuf_pars_fragment>\n#include <clipping_planes_pars_fragment>\nvoid main() {\n\t#include <clipping_planes_fragment>\n\tvec4 diffuseColor = vec4( diffuse, opacity );\n\tReflectedLight reflectedLight = ReflectedLight( vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ) );\n\tvec3 totalEmissiveRadiance = emissive;\n\t#include <logdepthbuf_fragment>\n\t#include <map_fragment>\n\t#include <color_fragment>\n\t#include <alphamap_fragment>\n\t#include <alphatest_fragment>\n\t#include <normal_fragment_begin>\n\t#include <normal_fragment_maps>\n\t#include <emissivemap_fragment>\n\t#include <lights_toon_fragment>\n\t#include <lights_fragment_begin>\n\t#include <lights_fragment_maps>\n\t#include <lights_fragment_end>\n\t#include <aomap_fragment>\n\tvec3 outgoingLight = reflectedLight.directDiffuse + reflectedLight.indirectDiffuse + totalEmissiveRadiance;\n\tgl_FragColor = vec4( outgoingLight, diffuseColor.a );\n\t#include <tonemapping_fragment>\n\t#include <encodings_fragment>\n\t#include <fog_fragment>\n\t#include <premultiplied_alpha_fragment>\n\t#include <dithering_fragment>\n}";
var meshtoon_vert = "#define TOON\nvarying vec3 vViewPosition;\n#ifndef FLAT_SHADED\n\tvarying vec3 vNormal;\n#endif\n#include <common>\n#include <uv_pars_vertex>\n#include <uv2_pars_vertex>\n#include <displacementmap_pars_vertex>\n#include <color_pars_vertex>\n#include <fog_pars_vertex>\n#include <morphtarget_pars_vertex>\n#include <skinning_pars_vertex>\n#include <shadowmap_pars_vertex>\n#include <logdepthbuf_pars_vertex>\n#include <clipping_planes_pars_vertex>\nvoid main() {\n\t#include <uv_vertex>\n\t#include <uv2_vertex>\n\t#include <color_vertex>\n\t#include <beginnormal_vertex>\n\t#include <morphnormal_vertex>\n\t#include <skinbase_vertex>\n\t#include <skinnormal_vertex>\n\t#include <defaultnormal_vertex>\n#ifndef FLAT_SHADED\n\tvNormal = normalize( transformedNormal );\n#endif\n\t#include <begin_vertex>\n\t#include <morphtarget_vertex>\n\t#include <skinning_vertex>\n\t#include <displacementmap_vertex>\n\t#include <project_vertex>\n\t#include <logdepthbuf_vertex>\n\t#include <clipping_planes_vertex>\n\tvViewPosition = - mvPosition.xyz;\n\t#include <worldpos_vertex>\n\t#include <shadowmap_vertex>\n\t#include <fog_vertex>\n}";
var meshphong_frag = "#define PHONG\nuniform vec3 diffuse;\nuniform vec3 emissive;\nuniform vec3 specular;\nuniform float shininess;\nuniform float opacity;\n#include <common>\n#include <packing>\n#include <dithering_pars_fragment>\n#include <color_pars_fragment>\n#include <uv_pars_fragment>\n#include <uv2_pars_fragment>\n#include <map_pars_fragment>\n#include <alphamap_pars_fragment>\n#include <aomap_pars_fragment>\n#include <lightmap_pars_fragment>\n#include <emissivemap_pars_fragment>\n#include <envmap_common_pars_fragment>\n#include <envmap_pars_fragment>\n#include <cube_uv_reflection_fragment>\n#include <fog_pars_fragment>\n#include <bsdfs>\n#include <lights_pars_begin>\n#include <lights_phong_pars_fragment>\n#include <shadowmap_pars_fragment>\n#include <bumpmap_pars_fragment>\n#include <normalmap_pars_fragment>\n#include <specularmap_pars_fragment>\n#include <logdepthbuf_pars_fragment>\n#include <clipping_planes_pars_fragment>\nvoid main() {\n\t#include <clipping_planes_fragment>\n\tvec4 diffuseColor = vec4( diffuse, opacity );\n\tReflectedLight reflectedLight = ReflectedLight( vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ) );\n\tvec3 totalEmissiveRadiance = emissive;\n\t#include <logdepthbuf_fragment>\n\t#include <map_fragment>\n\t#include <color_fragment>\n\t#include <alphamap_fragment>\n\t#include <alphatest_fragment>\n\t#include <specularmap_fragment>\n\t#include <normal_fragment_begin>\n\t#include <normal_fragment_maps>\n\t#include <emissivemap_fragment>\n\t#include <lights_phong_fragment>\n\t#include <lights_fragment_begin>\n\t#include <lights_fragment_maps>\n\t#include <lights_fragment_end>\n\t#include <aomap_fragment>\n\tvec3 outgoingLight = reflectedLight.directDiffuse + reflectedLight.indirectDiffuse + reflectedLight.directSpecular + reflectedLight.indirectSpecular + totalEmissiveRadiance;\n\t#include <envmap_fragment>\n\tgl_FragColor = vec4( outgoingLight, diffuseColor.a );\n\t#include <tonemapping_fragment>\n\t#include <encodings_fragment>\n\t#include <fog_fragment>\n\t#include <premultiplied_alpha_fragment>\n\t#include <dithering_fragment>\n}";
var meshphong_vert = "#define PHONG\nvarying vec3 vViewPosition;\n#ifndef FLAT_SHADED\n\tvarying vec3 vNormal;\n#endif\n#include <common>\n#include <uv_pars_vertex>\n#include <uv2_pars_vertex>\n#include <displacementmap_pars_vertex>\n#include <envmap_pars_vertex>\n#include <color_pars_vertex>\n#include <fog_pars_vertex>\n#include <morphtarget_pars_vertex>\n#include <skinning_pars_vertex>\n#include <shadowmap_pars_vertex>\n#include <logdepthbuf_pars_vertex>\n#include <clipping_planes_pars_vertex>\nvoid main() {\n\t#include <uv_vertex>\n\t#include <uv2_vertex>\n\t#include <color_vertex>\n\t#include <beginnormal_vertex>\n\t#include <morphnormal_vertex>\n\t#include <skinbase_vertex>\n\t#include <skinnormal_vertex>\n\t#include <defaultnormal_vertex>\n#ifndef FLAT_SHADED\n\tvNormal = normalize( transformedNormal );\n#endif\n\t#include <begin_vertex>\n\t#include <morphtarget_vertex>\n\t#include <skinning_vertex>\n\t#include <displacementmap_vertex>\n\t#include <project_vertex>\n\t#include <logdepthbuf_vertex>\n\t#include <clipping_planes_vertex>\n\tvViewPosition = - mvPosition.xyz;\n\t#include <worldpos_vertex>\n\t#include <envmap_vertex>\n\t#include <shadowmap_vertex>\n\t#include <fog_vertex>\n}";
var meshphysical_frag = "#define STANDARD\n#ifdef PHYSICAL\n\t#define REFLECTIVITY\n\t#define CLEARCOAT\n\t#define TRANSMISSION\n#endif\nuniform vec3 diffuse;\nuniform vec3 emissive;\nuniform float roughness;\nuniform float metalness;\nuniform float opacity;\n#ifdef TRANSMISSION\n\tuniform float transmission;\n#endif\n#ifdef REFLECTIVITY\n\tuniform float reflectivity;\n#endif\n#ifdef CLEARCOAT\n\tuniform float clearcoat;\n\tuniform float clearcoatRoughness;\n#endif\n#ifdef USE_SHEEN\n\tuniform vec3 sheen;\n#endif\nvarying vec3 vViewPosition;\n#ifndef FLAT_SHADED\n\tvarying vec3 vNormal;\n\t#ifdef USE_TANGENT\n\t\tvarying vec3 vTangent;\n\t\tvarying vec3 vBitangent;\n\t#endif\n#endif\n#include <common>\n#include <packing>\n#include <dithering_pars_fragment>\n#include <color_pars_fragment>\n#include <uv_pars_fragment>\n#include <uv2_pars_fragment>\n#include <map_pars_fragment>\n#include <alphamap_pars_fragment>\n#include <aomap_pars_fragment>\n#include <lightmap_pars_fragment>\n#include <emissivemap_pars_fragment>\n#include <transmissionmap_pars_fragment>\n#include <bsdfs>\n#include <cube_uv_reflection_fragment>\n#include <envmap_common_pars_fragment>\n#include <envmap_physical_pars_fragment>\n#include <fog_pars_fragment>\n#include <lights_pars_begin>\n#include <lights_physical_pars_fragment>\n#include <shadowmap_pars_fragment>\n#include <bumpmap_pars_fragment>\n#include <normalmap_pars_fragment>\n#include <clearcoat_pars_fragment>\n#include <roughnessmap_pars_fragment>\n#include <metalnessmap_pars_fragment>\n#include <logdepthbuf_pars_fragment>\n#include <clipping_planes_pars_fragment>\nvoid main() {\n\t#include <clipping_planes_fragment>\n\tvec4 diffuseColor = vec4( diffuse, opacity );\n\tReflectedLight reflectedLight = ReflectedLight( vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ) );\n\tvec3 totalEmissiveRadiance = emissive;\n\t#ifdef TRANSMISSION\n\t\tfloat totalTransmission = transmission;\n\t#endif\n\t#include <logdepthbuf_fragment>\n\t#include <map_fragment>\n\t#include <color_fragment>\n\t#include <alphamap_fragment>\n\t#include <alphatest_fragment>\n\t#include <roughnessmap_fragment>\n\t#include <metalnessmap_fragment>\n\t#include <normal_fragment_begin>\n\t#include <normal_fragment_maps>\n\t#include <clearcoat_normal_fragment_begin>\n\t#include <clearcoat_normal_fragment_maps>\n\t#include <emissivemap_fragment>\n\t#include <transmissionmap_fragment>\n\t#include <lights_physical_fragment>\n\t#include <lights_fragment_begin>\n\t#include <lights_fragment_maps>\n\t#include <lights_fragment_end>\n\t#include <aomap_fragment>\n\tvec3 outgoingLight = reflectedLight.directDiffuse + reflectedLight.indirectDiffuse + reflectedLight.directSpecular + reflectedLight.indirectSpecular + totalEmissiveRadiance;\n\t#ifdef TRANSMISSION\n\t\tdiffuseColor.a *= mix( saturate( 1. - totalTransmission + linearToRelativeLuminance( reflectedLight.directSpecular + reflectedLight.indirectSpecular ) ), 1.0, metalness );\n\t#endif\n\tgl_FragColor = vec4( outgoingLight, diffuseColor.a );\n\t#include <tonemapping_fragment>\n\t#include <encodings_fragment>\n\t#include <fog_fragment>\n\t#include <premultiplied_alpha_fragment>\n\t#include <dithering_fragment>\n}";
var meshphysical_vert = "#define STANDARD\nvarying vec3 vViewPosition;\n#ifndef FLAT_SHADED\n\tvarying vec3 vNormal;\n\t#ifdef USE_TANGENT\n\t\tvarying vec3 vTangent;\n\t\tvarying vec3 vBitangent;\n\t#endif\n#endif\n#include <common>\n#include <uv_pars_vertex>\n#include <uv2_pars_vertex>\n#include <displacementmap_pars_vertex>\n#include <color_pars_vertex>\n#include <fog_pars_vertex>\n#include <morphtarget_pars_vertex>\n#include <skinning_pars_vertex>\n#include <shadowmap_pars_vertex>\n#include <logdepthbuf_pars_vertex>\n#include <clipping_planes_pars_vertex>\nvoid main() {\n\t#include <uv_vertex>\n\t#include <uv2_vertex>\n\t#include <color_vertex>\n\t#include <beginnormal_vertex>\n\t#include <morphnormal_vertex>\n\t#include <skinbase_vertex>\n\t#include <skinnormal_vertex>\n\t#include <defaultnormal_vertex>\n#ifndef FLAT_SHADED\n\tvNormal = normalize( transformedNormal );\n\t#ifdef USE_TANGENT\n\t\tvTangent = normalize( transformedTangent );\n\t\tvBitangent = normalize( cross( vNormal, vTangent ) * tangent.w );\n\t#endif\n#endif\n\t#include <begin_vertex>\n\t#include <morphtarget_vertex>\n\t#include <skinning_vertex>\n\t#include <displacementmap_vertex>\n\t#include <project_vertex>\n\t#include <logdepthbuf_vertex>\n\t#include <clipping_planes_vertex>\n\tvViewPosition = - mvPosition.xyz;\n\t#include <worldpos_vertex>\n\t#include <shadowmap_vertex>\n\t#include <fog_vertex>\n}";
var normal_frag = "#define NORMAL\nuniform float opacity;\n#if defined( FLAT_SHADED ) || defined( USE_BUMPMAP ) || defined( TANGENTSPACE_NORMALMAP )\n\tvarying vec3 vViewPosition;\n#endif\n#ifndef FLAT_SHADED\n\tvarying vec3 vNormal;\n\t#ifdef USE_TANGENT\n\t\tvarying vec3 vTangent;\n\t\tvarying vec3 vBitangent;\n\t#endif\n#endif\n#include <packing>\n#include <uv_pars_fragment>\n#include <bumpmap_pars_fragment>\n#include <normalmap_pars_fragment>\n#include <logdepthbuf_pars_fragment>\n#include <clipping_planes_pars_fragment>\nvoid main() {\n\t#include <clipping_planes_fragment>\n\t#include <logdepthbuf_fragment>\n\t#include <normal_fragment_begin>\n\t#include <normal_fragment_maps>\n\tgl_FragColor = vec4( packNormalToRGB( normal ), opacity );\n}";
var normal_vert = "#define NORMAL\n#if defined( FLAT_SHADED ) || defined( USE_BUMPMAP ) || defined( TANGENTSPACE_NORMALMAP )\n\tvarying vec3 vViewPosition;\n#endif\n#ifndef FLAT_SHADED\n\tvarying vec3 vNormal;\n\t#ifdef USE_TANGENT\n\t\tvarying vec3 vTangent;\n\t\tvarying vec3 vBitangent;\n\t#endif\n#endif\n#include <common>\n#include <uv_pars_vertex>\n#include <displacementmap_pars_vertex>\n#include <morphtarget_pars_vertex>\n#include <skinning_pars_vertex>\n#include <logdepthbuf_pars_vertex>\n#include <clipping_planes_pars_vertex>\nvoid main() {\n\t#include <uv_vertex>\n\t#include <beginnormal_vertex>\n\t#include <morphnormal_vertex>\n\t#include <skinbase_vertex>\n\t#include <skinnormal_vertex>\n\t#include <defaultnormal_vertex>\n#ifndef FLAT_SHADED\n\tvNormal = normalize( transformedNormal );\n\t#ifdef USE_TANGENT\n\t\tvTangent = normalize( transformedTangent );\n\t\tvBitangent = normalize( cross( vNormal, vTangent ) * tangent.w );\n\t#endif\n#endif\n\t#include <begin_vertex>\n\t#include <morphtarget_vertex>\n\t#include <skinning_vertex>\n\t#include <displacementmap_vertex>\n\t#include <project_vertex>\n\t#include <logdepthbuf_vertex>\n\t#include <clipping_planes_vertex>\n#if defined( FLAT_SHADED ) || defined( USE_BUMPMAP ) || defined( TANGENTSPACE_NORMALMAP )\n\tvViewPosition = - mvPosition.xyz;\n#endif\n}";
var points_frag = "uniform vec3 diffuse;\nuniform float opacity;\n#include <common>\n#include <color_pars_fragment>\n#include <map_particle_pars_fragment>\n#include <fog_pars_fragment>\n#include <logdepthbuf_pars_fragment>\n#include <clipping_planes_pars_fragment>\nvoid main() {\n\t#include <clipping_planes_fragment>\n\tvec3 outgoingLight = vec3( 0.0 );\n\tvec4 diffuseColor = vec4( diffuse, opacity );\n\t#include <logdepthbuf_fragment>\n\t#include <map_particle_fragment>\n\t#include <color_fragment>\n\t#include <alphatest_fragment>\n\toutgoingLight = diffuseColor.rgb;\n\tgl_FragColor = vec4( outgoingLight, diffuseColor.a );\n\t#include <tonemapping_fragment>\n\t#include <encodings_fragment>\n\t#include <fog_fragment>\n\t#include <premultiplied_alpha_fragment>\n}";
var points_vert = "uniform float size;\nuniform float scale;\n#include <common>\n#include <color_pars_vertex>\n#include <fog_pars_vertex>\n#include <morphtarget_pars_vertex>\n#include <logdepthbuf_pars_vertex>\n#include <clipping_planes_pars_vertex>\nvoid main() {\n\t#include <color_vertex>\n\t#include <begin_vertex>\n\t#include <morphtarget_vertex>\n\t#include <project_vertex>\n\tgl_PointSize = size;\n\t#ifdef USE_SIZEATTENUATION\n\t\tbool isPerspective = isPerspectiveMatrix( projectionMatrix );\n\t\tif ( isPerspective ) gl_PointSize *= ( scale / - mvPosition.z );\n\t#endif\n\t#include <logdepthbuf_vertex>\n\t#include <clipping_planes_vertex>\n\t#include <worldpos_vertex>\n\t#include <fog_vertex>\n}";
var shadow_frag = "uniform vec3 color;\nuniform float opacity;\n#include <common>\n#include <packing>\n#include <fog_pars_fragment>\n#include <bsdfs>\n#include <lights_pars_begin>\n#include <shadowmap_pars_fragment>\n#include <shadowmask_pars_fragment>\nvoid main() {\n\tgl_FragColor = vec4( color, opacity * ( 1.0 - getShadowMask() ) );\n\t#include <tonemapping_fragment>\n\t#include <encodings_fragment>\n\t#include <fog_fragment>\n}";
var shadow_vert = "#include <common>\n#include <fog_pars_vertex>\n#include <shadowmap_pars_vertex>\nvoid main() {\n\t#include <begin_vertex>\n\t#include <project_vertex>\n\t#include <worldpos_vertex>\n\t#include <beginnormal_vertex>\n\t#include <morphnormal_vertex>\n\t#include <skinbase_vertex>\n\t#include <skinnormal_vertex>\n\t#include <defaultnormal_vertex>\n\t#include <shadowmap_vertex>\n\t#include <fog_vertex>\n}";
var sprite_frag = "uniform vec3 diffuse;\nuniform float opacity;\n#include <common>\n#include <uv_pars_fragment>\n#include <map_pars_fragment>\n#include <alphamap_pars_fragment>\n#include <fog_pars_fragment>\n#include <logdepthbuf_pars_fragment>\n#include <clipping_planes_pars_fragment>\nvoid main() {\n\t#include <clipping_planes_fragment>\n\tvec3 outgoingLight = vec3( 0.0 );\n\tvec4 diffuseColor = vec4( diffuse, opacity );\n\t#include <logdepthbuf_fragment>\n\t#include <map_fragment>\n\t#include <alphamap_fragment>\n\t#include <alphatest_fragment>\n\toutgoingLight = diffuseColor.rgb;\n\tgl_FragColor = vec4( outgoingLight, diffuseColor.a );\n\t#include <tonemapping_fragment>\n\t#include <encodings_fragment>\n\t#include <fog_fragment>\n}";
var sprite_vert = "uniform float rotation;\nuniform vec2 center;\n#include <common>\n#include <uv_pars_vertex>\n#include <fog_pars_vertex>\n#include <logdepthbuf_pars_vertex>\n#include <clipping_planes_pars_vertex>\nvoid main() {\n\t#include <uv_vertex>\n\tvec4 mvPosition = modelViewMatrix * vec4( 0.0, 0.0, 0.0, 1.0 );\n\tvec2 scale;\n\tscale.x = length( vec3( modelMatrix[ 0 ].x, modelMatrix[ 0 ].y, modelMatrix[ 0 ].z ) );\n\tscale.y = length( vec3( modelMatrix[ 1 ].x, modelMatrix[ 1 ].y, modelMatrix[ 1 ].z ) );\n\t#ifndef USE_SIZEATTENUATION\n\t\tbool isPerspective = isPerspectiveMatrix( projectionMatrix );\n\t\tif ( isPerspective ) scale *= - mvPosition.z;\n\t#endif\n\tvec2 alignedPosition = ( position.xy - ( center - vec2( 0.5 ) ) ) * scale;\n\tvec2 rotatedPosition;\n\trotatedPosition.x = cos( rotation ) * alignedPosition.x - sin( rotation ) * alignedPosition.y;\n\trotatedPosition.y = sin( rotation ) * alignedPosition.x + cos( rotation ) * alignedPosition.y;\n\tmvPosition.xy += rotatedPosition;\n\tgl_Position = projectionMatrix * mvPosition;\n\t#include <logdepthbuf_vertex>\n\t#include <clipping_planes_vertex>\n\t#include <fog_vertex>\n}";
const ShaderChunk = {
alphamap_fragment: alphamap_fragment,
alphamap_pars_fragment: alphamap_pars_fragment,
alphatest_fragment: alphatest_fragment,
aomap_fragment: aomap_fragment,
aomap_pars_fragment: aomap_pars_fragment,
begin_vertex: begin_vertex,
beginnormal_vertex: beginnormal_vertex,
bsdfs: bsdfs,
bumpmap_pars_fragment: bumpmap_pars_fragment,
clipping_planes_fragment: clipping_planes_fragment,
clipping_planes_pars_fragment: clipping_planes_pars_fragment,
clipping_planes_pars_vertex: clipping_planes_pars_vertex,
clipping_planes_vertex: clipping_planes_vertex,
color_fragment: color_fragment,
color_pars_fragment: color_pars_fragment,
color_pars_vertex: color_pars_vertex,
color_vertex: color_vertex,
common: common,
cube_uv_reflection_fragment: cube_uv_reflection_fragment,
defaultnormal_vertex: defaultnormal_vertex,
displacementmap_pars_vertex: displacementmap_pars_vertex,
displacementmap_vertex: displacementmap_vertex,
emissivemap_fragment: emissivemap_fragment,
emissivemap_pars_fragment: emissivemap_pars_fragment,
encodings_fragment: encodings_fragment,
encodings_pars_fragment: encodings_pars_fragment,
envmap_fragment: envmap_fragment,
envmap_common_pars_fragment: envmap_common_pars_fragment,
envmap_pars_fragment: envmap_pars_fragment,
envmap_pars_vertex: envmap_pars_vertex,
envmap_physical_pars_fragment: envmap_physical_pars_fragment,
envmap_vertex: envmap_vertex,
fog_vertex: fog_vertex,
fog_pars_vertex: fog_pars_vertex,
fog_fragment: fog_fragment,
fog_pars_fragment: fog_pars_fragment,
gradientmap_pars_fragment: gradientmap_pars_fragment,
lightmap_fragment: lightmap_fragment,
lightmap_pars_fragment: lightmap_pars_fragment,
lights_lambert_vertex: lights_lambert_vertex,
lights_pars_begin: lights_pars_begin,
lights_toon_fragment: lights_toon_fragment,
lights_toon_pars_fragment: lights_toon_pars_fragment,
lights_phong_fragment: lights_phong_fragment,
lights_phong_pars_fragment: lights_phong_pars_fragment,
lights_physical_fragment: lights_physical_fragment,
lights_physical_pars_fragment: lights_physical_pars_fragment,
lights_fragment_begin: lights_fragment_begin,
lights_fragment_maps: lights_fragment_maps,
lights_fragment_end: lights_fragment_end,
logdepthbuf_fragment: logdepthbuf_fragment,
logdepthbuf_pars_fragment: logdepthbuf_pars_fragment,
logdepthbuf_pars_vertex: logdepthbuf_pars_vertex,
logdepthbuf_vertex: logdepthbuf_vertex,
map_fragment: map_fragment,
map_pars_fragment: map_pars_fragment,
map_particle_fragment: map_particle_fragment,
map_particle_pars_fragment: map_particle_pars_fragment,
metalnessmap_fragment: metalnessmap_fragment,
metalnessmap_pars_fragment: metalnessmap_pars_fragment,
morphnormal_vertex: morphnormal_vertex,
morphtarget_pars_vertex: morphtarget_pars_vertex,
morphtarget_vertex: morphtarget_vertex,
normal_fragment_begin: normal_fragment_begin,
normal_fragment_maps: normal_fragment_maps,
normalmap_pars_fragment: normalmap_pars_fragment,
clearcoat_normal_fragment_begin: clearcoat_normal_fragment_begin,
clearcoat_normal_fragment_maps: clearcoat_normal_fragment_maps,
clearcoat_pars_fragment: clearcoat_pars_fragment,
packing: packing,
premultiplied_alpha_fragment: premultiplied_alpha_fragment,
project_vertex: project_vertex,
dithering_fragment: dithering_fragment,
dithering_pars_fragment: dithering_pars_fragment,
roughnessmap_fragment: roughnessmap_fragment,
roughnessmap_pars_fragment: roughnessmap_pars_fragment,
shadowmap_pars_fragment: shadowmap_pars_fragment,
shadowmap_pars_vertex: shadowmap_pars_vertex,
shadowmap_vertex: shadowmap_vertex,
shadowmask_pars_fragment: shadowmask_pars_fragment,
skinbase_vertex: skinbase_vertex,
skinning_pars_vertex: skinning_pars_vertex,
skinning_vertex: skinning_vertex,
skinnormal_vertex: skinnormal_vertex,
specularmap_fragment: specularmap_fragment,
specularmap_pars_fragment: specularmap_pars_fragment,
tonemapping_fragment: tonemapping_fragment,
tonemapping_pars_fragment: tonemapping_pars_fragment,
transmissionmap_fragment: transmissionmap_fragment,
transmissionmap_pars_fragment: transmissionmap_pars_fragment,
uv_pars_fragment: uv_pars_fragment,
uv_pars_vertex: uv_pars_vertex,
uv_vertex: uv_vertex,
uv2_pars_fragment: uv2_pars_fragment,
uv2_pars_vertex: uv2_pars_vertex,
uv2_vertex: uv2_vertex,
worldpos_vertex: worldpos_vertex,
background_frag: background_frag,
background_vert: background_vert,
cube_frag: cube_frag,
cube_vert: cube_vert,
depth_frag: depth_frag,
depth_vert: depth_vert,
distanceRGBA_frag: distanceRGBA_frag,
distanceRGBA_vert: distanceRGBA_vert,
equirect_frag: equirect_frag,
equirect_vert: equirect_vert,
linedashed_frag: linedashed_frag,
linedashed_vert: linedashed_vert,
meshbasic_frag: meshbasic_frag,
meshbasic_vert: meshbasic_vert,
meshlambert_frag: meshlambert_frag,
meshlambert_vert: meshlambert_vert,
meshmatcap_frag: meshmatcap_frag,
meshmatcap_vert: meshmatcap_vert,
meshtoon_frag: meshtoon_frag,
meshtoon_vert: meshtoon_vert,
meshphong_frag: meshphong_frag,
meshphong_vert: meshphong_vert,
meshphysical_frag: meshphysical_frag,
meshphysical_vert: meshphysical_vert,
normal_frag: normal_frag,
normal_vert: normal_vert,
points_frag: points_frag,
points_vert: points_vert,
shadow_frag: shadow_frag,
shadow_vert: shadow_vert,
sprite_frag: sprite_frag,
sprite_vert: sprite_vert
};
/**
* Uniforms library for shared webgl shaders
*/
const UniformsLib = {
common: {
diffuse: { value: new Color( 0xeeeeee ) },
opacity: { value: 1.0 },
map: { value: null },
uvTransform: { value: new Matrix3() },
uv2Transform: { value: new Matrix3() },
alphaMap: { value: null },
},
specularmap: {
specularMap: { value: null },
},
envmap: {
envMap: { value: null },
flipEnvMap: { value: - 1 },
reflectivity: { value: 1.0 },
refractionRatio: { value: 0.98 },
maxMipLevel: { value: 0 }
},
aomap: {
aoMap: { value: null },
aoMapIntensity: { value: 1 }
},
lightmap: {
lightMap: { value: null },
lightMapIntensity: { value: 1 }
},
emissivemap: {
emissiveMap: { value: null }
},
bumpmap: {
bumpMap: { value: null },
bumpScale: { value: 1 }
},
normalmap: {
normalMap: { value: null },
normalScale: { value: new Vector2( 1, 1 ) }
},
displacementmap: {
displacementMap: { value: null },
displacementScale: { value: 1 },
displacementBias: { value: 0 }
},
roughnessmap: {
roughnessMap: { value: null }
},
metalnessmap: {
metalnessMap: { value: null }
},
gradientmap: {
gradientMap: { value: null }
},
fog: {
fogDensity: { value: 0.00025 },
fogNear: { value: 1 },
fogFar: { value: 2000 },
fogColor: { value: new Color( 0xffffff ) }
},
lights: {
ambientLightColor: { value: [] },
lightProbe: { value: [] },
directionalLights: { value: [], properties: {
direction: {},
color: {}
} },
directionalLightShadows: { value: [], properties: {
shadowBias: {},
shadowNormalBias: {},
shadowRadius: {},
shadowMapSize: {}
} },
directionalShadowMap: { value: [] },
directionalShadowMatrix: { value: [] },
spotLights: { value: [], properties: {
color: {},
position: {},
direction: {},
distance: {},
coneCos: {},
penumbraCos: {},
decay: {}
} },
spotLightShadows: { value: [], properties: {
shadowBias: {},
shadowNormalBias: {},
shadowRadius: {},
shadowMapSize: {}
} },
spotShadowMap: { value: [] },
spotShadowMatrix: { value: [] },
pointLights: { value: [], properties: {
color: {},
position: {},
decay: {},
distance: {}
} },
pointLightShadows: { value: [], properties: {
shadowBias: {},
shadowNormalBias: {},
shadowRadius: {},
shadowMapSize: {},
shadowCameraNear: {},
shadowCameraFar: {}
} },
pointShadowMap: { value: [] },
pointShadowMatrix: { value: [] },
hemisphereLights: { value: [], properties: {
direction: {},
skyColor: {},
groundColor: {}
} },
// TODO (abelnation): RectAreaLight BRDF data needs to be moved from example to main src
rectAreaLights: { value: [], properties: {
color: {},
position: {},
width: {},
height: {}
} },
ltc_1: { value: null },
ltc_2: { value: null }
},
points: {
diffuse: { value: new Color( 0xeeeeee ) },
opacity: { value: 1.0 },
size: { value: 1.0 },
scale: { value: 1.0 },
map: { value: null },
alphaMap: { value: null },
uvTransform: { value: new Matrix3() }
},
sprite: {
diffuse: { value: new Color( 0xeeeeee ) },
opacity: { value: 1.0 },
center: { value: new Vector2( 0.5, 0.5 ) },
rotation: { value: 0.0 },
map: { value: null },
alphaMap: { value: null },
uvTransform: { value: new Matrix3() }
}
};
const ShaderLib = {
basic: {
uniforms: mergeUniforms( [
UniformsLib.common,
UniformsLib.specularmap,
UniformsLib.envmap,
UniformsLib.aomap,
UniformsLib.lightmap,
UniformsLib.fog
] ),
vertexShader: ShaderChunk.meshbasic_vert,
fragmentShader: ShaderChunk.meshbasic_frag
},
lambert: {
uniforms: mergeUniforms( [
UniformsLib.common,
UniformsLib.specularmap,
UniformsLib.envmap,
UniformsLib.aomap,
UniformsLib.lightmap,
UniformsLib.emissivemap,
UniformsLib.fog,
UniformsLib.lights,
{
emissive: { value: new Color( 0x000000 ) }
}
] ),
vertexShader: ShaderChunk.meshlambert_vert,
fragmentShader: ShaderChunk.meshlambert_frag
},
phong: {
uniforms: mergeUniforms( [
UniformsLib.common,
UniformsLib.specularmap,
UniformsLib.envmap,
UniformsLib.aomap,
UniformsLib.lightmap,
UniformsLib.emissivemap,
UniformsLib.bumpmap,
UniformsLib.normalmap,
UniformsLib.displacementmap,
UniformsLib.fog,
UniformsLib.lights,
{
emissive: { value: new Color( 0x000000 ) },
specular: { value: new Color( 0x111111 ) },
shininess: { value: 30 }
}
] ),
vertexShader: ShaderChunk.meshphong_vert,
fragmentShader: ShaderChunk.meshphong_frag
},
standard: {
uniforms: mergeUniforms( [
UniformsLib.common,
UniformsLib.envmap,
UniformsLib.aomap,
UniformsLib.lightmap,
UniformsLib.emissivemap,
UniformsLib.bumpmap,
UniformsLib.normalmap,
UniformsLib.displacementmap,
UniformsLib.roughnessmap,
UniformsLib.metalnessmap,
UniformsLib.fog,
UniformsLib.lights,
{
emissive: { value: new Color( 0x000000 ) },
roughness: { value: 1.0 },
metalness: { value: 0.0 },
envMapIntensity: { value: 1 } // temporary
}
] ),
vertexShader: ShaderChunk.meshphysical_vert,
fragmentShader: ShaderChunk.meshphysical_frag
},
toon: {
uniforms: mergeUniforms( [
UniformsLib.common,
UniformsLib.aomap,
UniformsLib.lightmap,
UniformsLib.emissivemap,
UniformsLib.bumpmap,
UniformsLib.normalmap,
UniformsLib.displacementmap,
UniformsLib.gradientmap,
UniformsLib.fog,
UniformsLib.lights,
{
emissive: { value: new Color( 0x000000 ) }
}
] ),
vertexShader: ShaderChunk.meshtoon_vert,
fragmentShader: ShaderChunk.meshtoon_frag
},
matcap: {
uniforms: mergeUniforms( [
UniformsLib.common,
UniformsLib.bumpmap,
UniformsLib.normalmap,
UniformsLib.displacementmap,
UniformsLib.fog,
{
matcap: { value: null }
}
] ),
vertexShader: ShaderChunk.meshmatcap_vert,
fragmentShader: ShaderChunk.meshmatcap_frag
},
points: {
uniforms: mergeUniforms( [
UniformsLib.points,
UniformsLib.fog
] ),
vertexShader: ShaderChunk.points_vert,
fragmentShader: ShaderChunk.points_frag
},
dashed: {
uniforms: mergeUniforms( [
UniformsLib.common,
UniformsLib.fog,
{
scale: { value: 1 },
dashSize: { value: 1 },
totalSize: { value: 2 }
}
] ),
vertexShader: ShaderChunk.linedashed_vert,
fragmentShader: ShaderChunk.linedashed_frag
},
depth: {
uniforms: mergeUniforms( [
UniformsLib.common,
UniformsLib.displacementmap
] ),
vertexShader: ShaderChunk.depth_vert,
fragmentShader: ShaderChunk.depth_frag
},
normal: {
uniforms: mergeUniforms( [
UniformsLib.common,
UniformsLib.bumpmap,
UniformsLib.normalmap,
UniformsLib.displacementmap,
{
opacity: { value: 1.0 }
}
] ),
vertexShader: ShaderChunk.normal_vert,
fragmentShader: ShaderChunk.normal_frag
},
sprite: {
uniforms: mergeUniforms( [
UniformsLib.sprite,
UniformsLib.fog
] ),
vertexShader: ShaderChunk.sprite_vert,
fragmentShader: ShaderChunk.sprite_frag
},
background: {
uniforms: {
uvTransform: { value: new Matrix3() },
t2D: { value: null },
},
vertexShader: ShaderChunk.background_vert,
fragmentShader: ShaderChunk.background_frag
},
/* -------------------------------------------------------------------------
// Cube map shader
------------------------------------------------------------------------- */
cube: {
uniforms: mergeUniforms( [
UniformsLib.envmap,
{
opacity: { value: 1.0 }
}
] ),
vertexShader: ShaderChunk.cube_vert,
fragmentShader: ShaderChunk.cube_frag
},
equirect: {
uniforms: {
tEquirect: { value: null },
},
vertexShader: ShaderChunk.equirect_vert,
fragmentShader: ShaderChunk.equirect_frag
},
distanceRGBA: {
uniforms: mergeUniforms( [
UniformsLib.common,
UniformsLib.displacementmap,
{
referencePosition: { value: new Vector3() },
nearDistance: { value: 1 },
farDistance: { value: 1000 }
}
] ),
vertexShader: ShaderChunk.distanceRGBA_vert,
fragmentShader: ShaderChunk.distanceRGBA_frag
},
shadow: {
uniforms: mergeUniforms( [
UniformsLib.lights,
UniformsLib.fog,
{
color: { value: new Color( 0x00000 ) },
opacity: { value: 1.0 }
},
] ),
vertexShader: ShaderChunk.shadow_vert,
fragmentShader: ShaderChunk.shadow_frag
}
};
ShaderLib.physical = {
uniforms: mergeUniforms( [
ShaderLib.standard.uniforms,
{
clearcoat: { value: 0 },
clearcoatMap: { value: null },
clearcoatRoughness: { value: 0 },
clearcoatRoughnessMap: { value: null },
clearcoatNormalScale: { value: new Vector2( 1, 1 ) },
clearcoatNormalMap: { value: null },
sheen: { value: new Color( 0x000000 ) },
transmission: { value: 0 },
transmissionMap: { value: null },
}
] ),
vertexShader: ShaderChunk.meshphysical_vert,
fragmentShader: ShaderChunk.meshphysical_frag
};
function WebGLBackground( renderer, cubemaps, state, objects, premultipliedAlpha ) {
const clearColor = new Color( 0x000000 );
let clearAlpha = 0;
let planeMesh;
let boxMesh;
let currentBackground = null;
let currentBackgroundVersion = 0;
let currentTonemapping = null;
function render( renderList, scene, camera, forceClear ) {
let background = scene.isScene === true ? scene.background : null;
if ( background && background.isTexture ) {
background = cubemaps.get( background );
}
// Ignore background in AR
// TODO: Reconsider this.
const xr = renderer.xr;
const session = xr.getSession && xr.getSession();
if ( session && session.environmentBlendMode === 'additive' ) {
background = null;
}
if ( background === null ) {
setClear( clearColor, clearAlpha );
} else if ( background && background.isColor ) {
setClear( background, 1 );
forceClear = true;
}
if ( renderer.autoClear || forceClear ) {
renderer.clear( renderer.autoClearColor, renderer.autoClearDepth, renderer.autoClearStencil );
}
if ( background && ( background.isCubeTexture || background.mapping === CubeUVReflectionMapping ) ) {
if ( boxMesh === undefined ) {
boxMesh = new Mesh(
new BoxGeometry( 1, 1, 1 ),
new ShaderMaterial( {
name: 'BackgroundCubeMaterial',
uniforms: cloneUniforms( ShaderLib.cube.uniforms ),
vertexShader: ShaderLib.cube.vertexShader,
fragmentShader: ShaderLib.cube.fragmentShader,
side: BackSide,
depthTest: false,
depthWrite: false,
fog: false
} )
);
boxMesh.geometry.deleteAttribute( 'normal' );
boxMesh.geometry.deleteAttribute( 'uv' );
boxMesh.onBeforeRender = function ( renderer, scene, camera ) {
this.matrixWorld.copyPosition( camera.matrixWorld );
};
// enable code injection for non-built-in material
Object.defineProperty( boxMesh.material, 'envMap', {
get: function () {
return this.uniforms.envMap.value;
}
} );
objects.update( boxMesh );
}
boxMesh.material.uniforms.envMap.value = background;
boxMesh.material.uniforms.flipEnvMap.value = ( background.isCubeTexture && background._needsFlipEnvMap ) ? - 1 : 1;
if ( currentBackground !== background ||
currentBackgroundVersion !== background.version ||
currentTonemapping !== renderer.toneMapping ) {
boxMesh.material.needsUpdate = true;
currentBackground = background;
currentBackgroundVersion = background.version;
currentTonemapping = renderer.toneMapping;
}
// push to the pre-sorted opaque render list
renderList.unshift( boxMesh, boxMesh.geometry, boxMesh.material, 0, 0, null );
} else if ( background && background.isTexture ) {
if ( planeMesh === undefined ) {
planeMesh = new Mesh(
new PlaneGeometry( 2, 2 ),
new ShaderMaterial( {
name: 'BackgroundMaterial',
uniforms: cloneUniforms( ShaderLib.background.uniforms ),
vertexShader: ShaderLib.background.vertexShader,
fragmentShader: ShaderLib.background.fragmentShader,
side: FrontSide,
depthTest: false,
depthWrite: false,
fog: false
} )
);
planeMesh.geometry.deleteAttribute( 'normal' );
// enable code injection for non-built-in material
Object.defineProperty( planeMesh.material, 'map', {
get: function () {
return this.uniforms.t2D.value;
}
} );
objects.update( planeMesh );
}
planeMesh.material.uniforms.t2D.value = background;
if ( background.matrixAutoUpdate === true ) {
background.updateMatrix();
}
planeMesh.material.uniforms.uvTransform.value.copy( background.matrix );
if ( currentBackground !== background ||
currentBackgroundVersion !== background.version ||
currentTonemapping !== renderer.toneMapping ) {
planeMesh.material.needsUpdate = true;
currentBackground = background;
currentBackgroundVersion = background.version;
currentTonemapping = renderer.toneMapping;
}
// push to the pre-sorted opaque render list
renderList.unshift( planeMesh, planeMesh.geometry, planeMesh.material, 0, 0, null );
}
}
function setClear( color, alpha ) {
state.buffers.color.setClear( color.r, color.g, color.b, alpha, premultipliedAlpha );
}
return {
getClearColor: function () {
return clearColor;
},
setClearColor: function ( color, alpha = 1 ) {
clearColor.set( color );
clearAlpha = alpha;
setClear( clearColor, clearAlpha );
},
getClearAlpha: function () {
return clearAlpha;
},
setClearAlpha: function ( alpha ) {
clearAlpha = alpha;
setClear( clearColor, clearAlpha );
},
render: render
};
}
function WebGLBindingStates( gl, extensions, attributes, capabilities ) {
const maxVertexAttributes = gl.getParameter( 34921 );
const extension = capabilities.isWebGL2 ? null : extensions.get( 'OES_vertex_array_object' );
const vaoAvailable = capabilities.isWebGL2 || extension !== null;
const bindingStates = {};
const defaultState = createBindingState( null );
let currentState = defaultState;
function setup( object, material, program, geometry, index ) {
let updateBuffers = false;
if ( vaoAvailable ) {
const state = getBindingState( geometry, program, material );
if ( currentState !== state ) {
currentState = state;
bindVertexArrayObject( currentState.object );
}
updateBuffers = needsUpdate( geometry, index );
if ( updateBuffers ) saveCache( geometry, index );
} else {
const wireframe = ( material.wireframe === true );
if ( currentState.geometry !== geometry.id ||
currentState.program !== program.id ||
currentState.wireframe !== wireframe ) {
currentState.geometry = geometry.id;
currentState.program = program.id;
currentState.wireframe = wireframe;
updateBuffers = true;
}
}
if ( object.isInstancedMesh === true ) {
updateBuffers = true;
}
if ( index !== null ) {
attributes.update( index, 34963 );
}
if ( updateBuffers ) {
setupVertexAttributes( object, material, program, geometry );
if ( index !== null ) {
gl.bindBuffer( 34963, attributes.get( index ).buffer );
}
}
}
function createVertexArrayObject() {
if ( capabilities.isWebGL2 ) return gl.createVertexArray();
return extension.createVertexArrayOES();
}
function bindVertexArrayObject( vao ) {
if ( capabilities.isWebGL2 ) return gl.bindVertexArray( vao );
return extension.bindVertexArrayOES( vao );
}
function deleteVertexArrayObject( vao ) {
if ( capabilities.isWebGL2 ) return gl.deleteVertexArray( vao );
return extension.deleteVertexArrayOES( vao );
}
function getBindingState( geometry, program, material ) {
const wireframe = ( material.wireframe === true );
let programMap = bindingStates[ geometry.id ];
if ( programMap === undefined ) {
programMap = {};
bindingStates[ geometry.id ] = programMap;
}
let stateMap = programMap[ program.id ];
if ( stateMap === undefined ) {
stateMap = {};
programMap[ program.id ] = stateMap;
}
let state = stateMap[ wireframe ];
if ( state === undefined ) {
state = createBindingState( createVertexArrayObject() );
stateMap[ wireframe ] = state;
}
return state;
}
function createBindingState( vao ) {
const newAttributes = [];
const enabledAttributes = [];
const attributeDivisors = [];
for ( let i = 0; i < maxVertexAttributes; i ++ ) {
newAttributes[ i ] = 0;
enabledAttributes[ i ] = 0;
attributeDivisors[ i ] = 0;
}
return {
// for backward compatibility on non-VAO support browser
geometry: null,
program: null,
wireframe: false,
newAttributes: newAttributes,
enabledAttributes: enabledAttributes,
attributeDivisors: attributeDivisors,
object: vao,
attributes: {},
index: null
};
}
function needsUpdate( geometry, index ) {
const cachedAttributes = currentState.attributes;
const geometryAttributes = geometry.attributes;
let attributesNum = 0;
for ( const key in geometryAttributes ) {
const cachedAttribute = cachedAttributes[ key ];
const geometryAttribute = geometryAttributes[ key ];
if ( cachedAttribute === undefined ) return true;
if ( cachedAttribute.attribute !== geometryAttribute ) return true;
if ( cachedAttribute.data !== geometryAttribute.data ) return true;
attributesNum ++;
}
if ( currentState.attributesNum !== attributesNum ) return true;
if ( currentState.index !== index ) return true;
return false;
}
function saveCache( geometry, index ) {
const cache = {};
const attributes = geometry.attributes;
let attributesNum = 0;
for ( const key in attributes ) {
const attribute = attributes[ key ];
const data = {};
data.attribute = attribute;
if ( attribute.data ) {
data.data = attribute.data;
}
cache[ key ] = data;
attributesNum ++;
}
currentState.attributes = cache;
currentState.attributesNum = attributesNum;
currentState.index = index;
}
function initAttributes() {
const newAttributes = currentState.newAttributes;
for ( let i = 0, il = newAttributes.length; i < il; i ++ ) {
newAttributes[ i ] = 0;
}
}
function enableAttribute( attribute ) {
enableAttributeAndDivisor( attribute, 0 );
}
function enableAttributeAndDivisor( attribute, meshPerAttribute ) {
const newAttributes = currentState.newAttributes;
const enabledAttributes = currentState.enabledAttributes;
const attributeDivisors = currentState.attributeDivisors;
newAttributes[ attribute ] = 1;
if ( enabledAttributes[ attribute ] === 0 ) {
gl.enableVertexAttribArray( attribute );
enabledAttributes[ attribute ] = 1;
}
if ( attributeDivisors[ attribute ] !== meshPerAttribute ) {
const extension = capabilities.isWebGL2 ? gl : extensions.get( 'ANGLE_instanced_arrays' );
extension[ capabilities.isWebGL2 ? 'vertexAttribDivisor' : 'vertexAttribDivisorANGLE' ]( attribute, meshPerAttribute );
attributeDivisors[ attribute ] = meshPerAttribute;
}
}
function disableUnusedAttributes() {
const newAttributes = currentState.newAttributes;
const enabledAttributes = currentState.enabledAttributes;
for ( let i = 0, il = enabledAttributes.length; i < il; i ++ ) {
if ( enabledAttributes[ i ] !== newAttributes[ i ] ) {
gl.disableVertexAttribArray( i );
enabledAttributes[ i ] = 0;
}
}
}
function vertexAttribPointer( index, size, type, normalized, stride, offset ) {
if ( capabilities.isWebGL2 === true && ( type === 5124 || type === 5125 ) ) {
gl.vertexAttribIPointer( index, size, type, stride, offset );
} else {
gl.vertexAttribPointer( index, size, type, normalized, stride, offset );
}
}
function setupVertexAttributes( object, material, program, geometry ) {
if ( capabilities.isWebGL2 === false && ( object.isInstancedMesh || geometry.isInstancedBufferGeometry ) ) {
if ( extensions.get( 'ANGLE_instanced_arrays' ) === null ) return;
}
initAttributes();
const geometryAttributes = geometry.attributes;
const programAttributes = program.getAttributes();
const materialDefaultAttributeValues = material.defaultAttributeValues;
for ( const name in programAttributes ) {
const programAttribute = programAttributes[ name ];
if ( programAttribute >= 0 ) {
const geometryAttribute = geometryAttributes[ name ];
if ( geometryAttribute !== undefined ) {
const normalized = geometryAttribute.normalized;
const size = geometryAttribute.itemSize;
const attribute = attributes.get( geometryAttribute );
// TODO Attribute may not be available on context restore
if ( attribute === undefined ) continue;
const buffer = attribute.buffer;
const type = attribute.type;
const bytesPerElement = attribute.bytesPerElement;
if ( geometryAttribute.isInterleavedBufferAttribute ) {
const data = geometryAttribute.data;
const stride = data.stride;
const offset = geometryAttribute.offset;
if ( data && data.isInstancedInterleavedBuffer ) {
enableAttributeAndDivisor( programAttribute, data.meshPerAttribute );
if ( geometry._maxInstanceCount === undefined ) {
geometry._maxInstanceCount = data.meshPerAttribute * data.count;
}
} else {
enableAttribute( programAttribute );
}
gl.bindBuffer( 34962, buffer );
vertexAttribPointer( programAttribute, size, type, normalized, stride * bytesPerElement, offset * bytesPerElement );
} else {
if ( geometryAttribute.isInstancedBufferAttribute ) {
enableAttributeAndDivisor( programAttribute, geometryAttribute.meshPerAttribute );
if ( geometry._maxInstanceCount === undefined ) {
geometry._maxInstanceCount = geometryAttribute.meshPerAttribute * geometryAttribute.count;
}
} else {
enableAttribute( programAttribute );
}
gl.bindBuffer( 34962, buffer );
vertexAttribPointer( programAttribute, size, type, normalized, 0, 0 );
}
} else if ( name === 'instanceMatrix' ) {
const attribute = attributes.get( object.instanceMatrix );
// TODO Attribute may not be available on context restore
if ( attribute === undefined ) continue;
const buffer = attribute.buffer;
const type = attribute.type;
enableAttributeAndDivisor( programAttribute + 0, 1 );
enableAttributeAndDivisor( programAttribute + 1, 1 );
enableAttributeAndDivisor( programAttribute + 2, 1 );
enableAttributeAndDivisor( programAttribute + 3, 1 );
gl.bindBuffer( 34962, buffer );
gl.vertexAttribPointer( programAttribute + 0, 4, type, false, 64, 0 );
gl.vertexAttribPointer( programAttribute + 1, 4, type, false, 64, 16 );
gl.vertexAttribPointer( programAttribute + 2, 4, type, false, 64, 32 );
gl.vertexAttribPointer( programAttribute + 3, 4, type, false, 64, 48 );
} else if ( name === 'instanceColor' ) {
const attribute = attributes.get( object.instanceColor );
// TODO Attribute may not be available on context restore
if ( attribute === undefined ) continue;
const buffer = attribute.buffer;
const type = attribute.type;
enableAttributeAndDivisor( programAttribute, 1 );
gl.bindBuffer( 34962, buffer );
gl.vertexAttribPointer( programAttribute, 3, type, false, 12, 0 );
} else if ( materialDefaultAttributeValues !== undefined ) {
const value = materialDefaultAttributeValues[ name ];
if ( value !== undefined ) {
switch ( value.length ) {
case 2:
gl.vertexAttrib2fv( programAttribute, value );
break;
case 3:
gl.vertexAttrib3fv( programAttribute, value );
break;
case 4:
gl.vertexAttrib4fv( programAttribute, value );
break;
default:
gl.vertexAttrib1fv( programAttribute, value );
}
}
}
}
}
disableUnusedAttributes();
}
function dispose() {
reset();
for ( const geometryId in bindingStates ) {
const programMap = bindingStates[ geometryId ];
for ( const programId in programMap ) {
const stateMap = programMap[ programId ];
for ( const wireframe in stateMap ) {
deleteVertexArrayObject( stateMap[ wireframe ].object );
delete stateMap[ wireframe ];
}
delete programMap[ programId ];
}
delete bindingStates[ geometryId ];
}
}
function releaseStatesOfGeometry( geometry ) {
if ( bindingStates[ geometry.id ] === undefined ) return;
const programMap = bindingStates[ geometry.id ];
for ( const programId in programMap ) {
const stateMap = programMap[ programId ];
for ( const wireframe in stateMap ) {
deleteVertexArrayObject( stateMap[ wireframe ].object );
delete stateMap[ wireframe ];
}
delete programMap[ programId ];
}
delete bindingStates[ geometry.id ];
}
function releaseStatesOfProgram( program ) {
for ( const geometryId in bindingStates ) {
const programMap = bindingStates[ geometryId ];
if ( programMap[ program.id ] === undefined ) continue;
const stateMap = programMap[ program.id ];
for ( const wireframe in stateMap ) {
deleteVertexArrayObject( stateMap[ wireframe ].object );
delete stateMap[ wireframe ];
}
delete programMap[ program.id ];
}
}
function reset() {
resetDefaultState();
if ( currentState === defaultState ) return;
currentState = defaultState;
bindVertexArrayObject( currentState.object );
}
// for backward-compatilibity
function resetDefaultState() {
defaultState.geometry = null;
defaultState.program = null;
defaultState.wireframe = false;
}
return {
setup: setup,
reset: reset,
resetDefaultState: resetDefaultState,
dispose: dispose,
releaseStatesOfGeometry: releaseStatesOfGeometry,
releaseStatesOfProgram: releaseStatesOfProgram,
initAttributes: initAttributes,
enableAttribute: enableAttribute,
disableUnusedAttributes: disableUnusedAttributes
};
}
function WebGLBufferRenderer( gl, extensions, info, capabilities ) {
const isWebGL2 = capabilities.isWebGL2;
let mode;
function setMode( value ) {
mode = value;
}
function render( start, count ) {
gl.drawArrays( mode, start, count );
info.update( count, mode, 1 );
}
function renderInstances( start, count, primcount ) {
if ( primcount === 0 ) return;
let extension, methodName;
if ( isWebGL2 ) {
extension = gl;
methodName = 'drawArraysInstanced';
} else {
extension = extensions.get( 'ANGLE_instanced_arrays' );
methodName = 'drawArraysInstancedANGLE';
if ( extension === null ) {
console.error( 'THREE.WebGLBufferRenderer: using THREE.InstancedBufferGeometry but hardware does not support extension ANGLE_instanced_arrays.' );
return;
}
}
extension[ methodName ]( mode, start, count, primcount );
info.update( count, mode, primcount );
}
//
this.setMode = setMode;
this.render = render;
this.renderInstances = renderInstances;
}
function WebGLCapabilities( gl, extensions, parameters ) {
let maxAnisotropy;
function getMaxAnisotropy() {
if ( maxAnisotropy !== undefined ) return maxAnisotropy;
if ( extensions.has( 'EXT_texture_filter_anisotropic' ) === true ) {
const extension = extensions.get( 'EXT_texture_filter_anisotropic' );
maxAnisotropy = gl.getParameter( extension.MAX_TEXTURE_MAX_ANISOTROPY_EXT );
} else {
maxAnisotropy = 0;
}
return maxAnisotropy;
}
function getMaxPrecision( precision ) {
if ( precision === 'highp' ) {
if ( gl.getShaderPrecisionFormat( 35633, 36338 ).precision > 0 &&
gl.getShaderPrecisionFormat( 35632, 36338 ).precision > 0 ) {
return 'highp';
}
precision = 'mediump';
}
if ( precision === 'mediump' ) {
if ( gl.getShaderPrecisionFormat( 35633, 36337 ).precision > 0 &&
gl.getShaderPrecisionFormat( 35632, 36337 ).precision > 0 ) {
return 'mediump';
}
}
return 'lowp';
}
/* eslint-disable no-undef */
const isWebGL2 = ( typeof WebGL2RenderingContext !== 'undefined' && gl instanceof WebGL2RenderingContext ) ||
( typeof WebGL2ComputeRenderingContext !== 'undefined' && gl instanceof WebGL2ComputeRenderingContext );
/* eslint-enable no-undef */
let precision = parameters.precision !== undefined ? parameters.precision : 'highp';
const maxPrecision = getMaxPrecision( precision );
if ( maxPrecision !== precision ) {
console.warn( 'THREE.WebGLRenderer:', precision, 'not supported, using', maxPrecision, 'instead.' );
precision = maxPrecision;
}
const logarithmicDepthBuffer = parameters.logarithmicDepthBuffer === true;
const maxTextures = gl.getParameter( 34930 );
const maxVertexTextures = gl.getParameter( 35660 );
const maxTextureSize = gl.getParameter( 3379 );
const maxCubemapSize = gl.getParameter( 34076 );
const maxAttributes = gl.getParameter( 34921 );
const maxVertexUniforms = gl.getParameter( 36347 );
const maxVaryings = gl.getParameter( 36348 );
const maxFragmentUniforms = gl.getParameter( 36349 );
const vertexTextures = maxVertexTextures > 0;
const floatFragmentTextures = isWebGL2 || extensions.has( 'OES_texture_float' );
const floatVertexTextures = vertexTextures && floatFragmentTextures;
const maxSamples = isWebGL2 ? gl.getParameter( 36183 ) : 0;
return {
isWebGL2: isWebGL2,
getMaxAnisotropy: getMaxAnisotropy,
getMaxPrecision: getMaxPrecision,
precision: precision,
logarithmicDepthBuffer: logarithmicDepthBuffer,
maxTextures: maxTextures,
maxVertexTextures: maxVertexTextures,
maxTextureSize: maxTextureSize,
maxCubemapSize: maxCubemapSize,
maxAttributes: maxAttributes,
maxVertexUniforms: maxVertexUniforms,
maxVaryings: maxVaryings,
maxFragmentUniforms: maxFragmentUniforms,
vertexTextures: vertexTextures,
floatFragmentTextures: floatFragmentTextures,
floatVertexTextures: floatVertexTextures,
maxSamples: maxSamples
};
}
function WebGLClipping( properties ) {
const scope = this;
let globalState = null,
numGlobalPlanes = 0,
localClippingEnabled = false,
renderingShadows = false;
const plane = new Plane(),
viewNormalMatrix = new Matrix3(),
uniform = { value: null, needsUpdate: false };
this.uniform = uniform;
this.numPlanes = 0;
this.numIntersection = 0;
this.init = function ( planes, enableLocalClipping, camera ) {
const enabled =
planes.length !== 0 ||
enableLocalClipping ||
// enable state of previous frame - the clipping code has to
// run another frame in order to reset the state:
numGlobalPlanes !== 0 ||
localClippingEnabled;
localClippingEnabled = enableLocalClipping;
globalState = projectPlanes( planes, camera, 0 );
numGlobalPlanes = planes.length;
return enabled;
};
this.beginShadows = function () {
renderingShadows = true;
projectPlanes( null );
};
this.endShadows = function () {
renderingShadows = false;
resetGlobalState();
};
this.setState = function ( material, camera, useCache ) {
const planes = material.clippingPlanes,
clipIntersection = material.clipIntersection,
clipShadows = material.clipShadows;
const materialProperties = properties.get( material );
if ( ! localClippingEnabled || planes === null || planes.length === 0 || renderingShadows && ! clipShadows ) {
// there's no local clipping
if ( renderingShadows ) {
// there's no global clipping
projectPlanes( null );
} else {
resetGlobalState();
}
} else {
const nGlobal = renderingShadows ? 0 : numGlobalPlanes,
lGlobal = nGlobal * 4;
let dstArray = materialProperties.clippingState || null;
uniform.value = dstArray; // ensure unique state
dstArray = projectPlanes( planes, camera, lGlobal, useCache );
for ( let i = 0; i !== lGlobal; ++ i ) {
dstArray[ i ] = globalState[ i ];
}
materialProperties.clippingState = dstArray;
this.numIntersection = clipIntersection ? this.numPlanes : 0;
this.numPlanes += nGlobal;
}
};
function resetGlobalState() {
if ( uniform.value !== globalState ) {
uniform.value = globalState;
uniform.needsUpdate = numGlobalPlanes > 0;
}
scope.numPlanes = numGlobalPlanes;
scope.numIntersection = 0;
}
function projectPlanes( planes, camera, dstOffset, skipTransform ) {
const nPlanes = planes !== null ? planes.length : 0;
let dstArray = null;
if ( nPlanes !== 0 ) {
dstArray = uniform.value;
if ( skipTransform !== true || dstArray === null ) {
const flatSize = dstOffset + nPlanes * 4,
viewMatrix = camera.matrixWorldInverse;
viewNormalMatrix.getNormalMatrix( viewMatrix );
if ( dstArray === null || dstArray.length < flatSize ) {
dstArray = new Float32Array( flatSize );
}
for ( let i = 0, i4 = dstOffset; i !== nPlanes; ++ i, i4 += 4 ) {
plane.copy( planes[ i ] ).applyMatrix4( viewMatrix, viewNormalMatrix );
plane.normal.toArray( dstArray, i4 );
dstArray[ i4 + 3 ] = plane.constant;
}
}
uniform.value = dstArray;
uniform.needsUpdate = true;
}
scope.numPlanes = nPlanes;
scope.numIntersection = 0;
return dstArray;
}
}
function WebGLCubeMaps( renderer ) {
let cubemaps = new WeakMap();
function mapTextureMapping( texture, mapping ) {
if ( mapping === EquirectangularReflectionMapping ) {
texture.mapping = CubeReflectionMapping;
} else if ( mapping === EquirectangularRefractionMapping ) {
texture.mapping = CubeRefractionMapping;
}
return texture;
}
function get( texture ) {
if ( texture && texture.isTexture ) {
const mapping = texture.mapping;
if ( mapping === EquirectangularReflectionMapping || mapping === EquirectangularRefractionMapping ) {
if ( cubemaps.has( texture ) ) {
const cubemap = cubemaps.get( texture ).texture;
return mapTextureMapping( cubemap, texture.mapping );
} else {
const image = texture.image;
if ( image && image.height > 0 ) {
const currentRenderTarget = renderer.getRenderTarget();
const renderTarget = new WebGLCubeRenderTarget( image.height / 2 );
renderTarget.fromEquirectangularTexture( renderer, texture );
cubemaps.set( texture, renderTarget );
renderer.setRenderTarget( currentRenderTarget );
texture.addEventListener( 'dispose', onTextureDispose );
return mapTextureMapping( renderTarget.texture, texture.mapping );
} else {
// image not yet ready. try the conversion next frame
return null;
}
}
}
}
return texture;
}
function onTextureDispose( event ) {
const texture = event.target;
texture.removeEventListener( 'dispose', onTextureDispose );
const cubemap = cubemaps.get( texture );
if ( cubemap !== undefined ) {
cubemaps.delete( texture );
cubemap.dispose();
}
}
function dispose() {
cubemaps = new WeakMap();
}
return {
get: get,
dispose: dispose
};
}
function WebGLExtensions( gl ) {
const extensions = {};
function getExtension( name ) {
if ( extensions[ name ] !== undefined ) {
return extensions[ name ];
}
let extension;
switch ( name ) {
case 'WEBGL_depth_texture':
extension = gl.getExtension( 'WEBGL_depth_texture' ) || gl.getExtension( 'MOZ_WEBGL_depth_texture' ) || gl.getExtension( 'WEBKIT_WEBGL_depth_texture' );
break;
case 'EXT_texture_filter_anisotropic':
extension = gl.getExtension( 'EXT_texture_filter_anisotropic' ) || gl.getExtension( 'MOZ_EXT_texture_filter_anisotropic' ) || gl.getExtension( 'WEBKIT_EXT_texture_filter_anisotropic' );
break;
case 'WEBGL_compressed_texture_s3tc':
extension = gl.getExtension( 'WEBGL_compressed_texture_s3tc' ) || gl.getExtension( 'MOZ_WEBGL_compressed_texture_s3tc' ) || gl.getExtension( 'WEBKIT_WEBGL_compressed_texture_s3tc' );
break;
case 'WEBGL_compressed_texture_pvrtc':
extension = gl.getExtension( 'WEBGL_compressed_texture_pvrtc' ) || gl.getExtension( 'WEBKIT_WEBGL_compressed_texture_pvrtc' );
break;
default:
extension = gl.getExtension( name );
}
extensions[ name ] = extension;
return extension;
}
return {
has: function ( name ) {
return getExtension( name ) !== null;
},
init: function ( capabilities ) {
if ( capabilities.isWebGL2 ) {
getExtension( 'EXT_color_buffer_float' );
} else {
getExtension( 'WEBGL_depth_texture' );
getExtension( 'OES_texture_float' );
getExtension( 'OES_texture_half_float' );
getExtension( 'OES_texture_half_float_linear' );
getExtension( 'OES_standard_derivatives' );
getExtension( 'OES_element_index_uint' );
getExtension( 'OES_vertex_array_object' );
getExtension( 'ANGLE_instanced_arrays' );
}
getExtension( 'OES_texture_float_linear' );
getExtension( 'EXT_color_buffer_half_float' );
},
get: function ( name ) {
const extension = getExtension( name );
if ( extension === null ) {
console.warn( 'THREE.WebGLRenderer: ' + name + ' extension not supported.' );
}
return extension;
}
};
}
function WebGLGeometries( gl, attributes, info, bindingStates ) {
const geometries = {};
const wireframeAttributes = new WeakMap();
function onGeometryDispose( event ) {
const geometry = event.target;
if ( geometry.index !== null ) {
attributes.remove( geometry.index );
}
for ( const name in geometry.attributes ) {
attributes.remove( geometry.attributes[ name ] );
}
geometry.removeEventListener( 'dispose', onGeometryDispose );
delete geometries[ geometry.id ];
const attribute = wireframeAttributes.get( geometry );
if ( attribute ) {
attributes.remove( attribute );
wireframeAttributes.delete( geometry );
}
bindingStates.releaseStatesOfGeometry( geometry );
if ( geometry.isInstancedBufferGeometry === true ) {
delete geometry._maxInstanceCount;
}
//
info.memory.geometries --;
}
function get( object, geometry ) {
if ( geometries[ geometry.id ] === true ) return geometry;
geometry.addEventListener( 'dispose', onGeometryDispose );
geometries[ geometry.id ] = true;
info.memory.geometries ++;
return geometry;
}
function update( geometry ) {
const geometryAttributes = geometry.attributes;
// Updating index buffer in VAO now. See WebGLBindingStates.
for ( const name in geometryAttributes ) {
attributes.update( geometryAttributes[ name ], 34962 );
}
// morph targets
const morphAttributes = geometry.morphAttributes;
for ( const name in morphAttributes ) {
const array = morphAttributes[ name ];
for ( let i = 0, l = array.length; i < l; i ++ ) {
attributes.update( array[ i ], 34962 );
}
}
}
function updateWireframeAttribute( geometry ) {
const indices = [];
const geometryIndex = geometry.index;
const geometryPosition = geometry.attributes.position;
let version = 0;
if ( geometryIndex !== null ) {
const array = geometryIndex.array;
version = geometryIndex.version;
for ( let i = 0, l = array.length; i < l; i += 3 ) {
const a = array[ i + 0 ];
const b = array[ i + 1 ];
const c = array[ i + 2 ];
indices.push( a, b, b, c, c, a );
}
} else {
const array = geometryPosition.array;
version = geometryPosition.version;
for ( let i = 0, l = ( array.length / 3 ) - 1; i < l; i += 3 ) {
const a = i + 0;
const b = i + 1;
const c = i + 2;
indices.push( a, b, b, c, c, a );
}
}
const attribute = new ( arrayMax( indices ) > 65535 ? Uint32BufferAttribute : Uint16BufferAttribute )( indices, 1 );
attribute.version = version;
// Updating index buffer in VAO now. See WebGLBindingStates
//
const previousAttribute = wireframeAttributes.get( geometry );
if ( previousAttribute ) attributes.remove( previousAttribute );
//
wireframeAttributes.set( geometry, attribute );
}
function getWireframeAttribute( geometry ) {
const currentAttribute = wireframeAttributes.get( geometry );
if ( currentAttribute ) {
const geometryIndex = geometry.index;
if ( geometryIndex !== null ) {
// if the attribute is obsolete, create a new one
if ( currentAttribute.version < geometryIndex.version ) {
updateWireframeAttribute( geometry );
}
}
} else {
updateWireframeAttribute( geometry );
}
return wireframeAttributes.get( geometry );
}
return {
get: get,
update: update,
getWireframeAttribute: getWireframeAttribute
};
}
function WebGLIndexedBufferRenderer( gl, extensions, info, capabilities ) {
const isWebGL2 = capabilities.isWebGL2;
let mode;
function setMode( value ) {
mode = value;
}
let type, bytesPerElement;
function setIndex( value ) {
type = value.type;
bytesPerElement = value.bytesPerElement;
}
function render( start, count ) {
gl.drawElements( mode, count, type, start * bytesPerElement );
info.update( count, mode, 1 );
}
function renderInstances( start, count, primcount ) {
if ( primcount === 0 ) return;
let extension, methodName;
if ( isWebGL2 ) {
extension = gl;
methodName = 'drawElementsInstanced';
} else {
extension = extensions.get( 'ANGLE_instanced_arrays' );
methodName = 'drawElementsInstancedANGLE';
if ( extension === null ) {
console.error( 'THREE.WebGLIndexedBufferRenderer: using THREE.InstancedBufferGeometry but hardware does not support extension ANGLE_instanced_arrays.' );
return;
}
}
extension[ methodName ]( mode, count, type, start * bytesPerElement, primcount );
info.update( count, mode, primcount );
}
//
this.setMode = setMode;
this.setIndex = setIndex;
this.render = render;
this.renderInstances = renderInstances;
}
function WebGLInfo( gl ) {
const memory = {
geometries: 0,
textures: 0
};
const render = {
frame: 0,
calls: 0,
triangles: 0,
points: 0,
lines: 0
};
function update( count, mode, instanceCount ) {
render.calls ++;
switch ( mode ) {
case 4:
render.triangles += instanceCount * ( count / 3 );
break;
case 1:
render.lines += instanceCount * ( count / 2 );
break;
case 3:
render.lines += instanceCount * ( count - 1 );
break;
case 2:
render.lines += instanceCount * count;
break;
case 0:
render.points += instanceCount * count;
break;
default:
console.error( 'THREE.WebGLInfo: Unknown draw mode:', mode );
break;
}
}
function reset() {
render.frame ++;
render.calls = 0;
render.triangles = 0;
render.points = 0;
render.lines = 0;
}
return {
memory: memory,
render: render,
programs: null,
autoReset: true,
reset: reset,
update: update
};
}
function numericalSort( a, b ) {
return a[ 0 ] - b[ 0 ];
}
function absNumericalSort( a, b ) {
return Math.abs( b[ 1 ] ) - Math.abs( a[ 1 ] );
}
function WebGLMorphtargets( gl ) {
const influencesList = {};
const morphInfluences = new Float32Array( 8 );
const workInfluences = [];
for ( let i = 0; i < 8; i ++ ) {
workInfluences[ i ] = [ i, 0 ];
}
function update( object, geometry, material, program ) {
const objectInfluences = object.morphTargetInfluences;
// When object doesn't have morph target influences defined, we treat it as a 0-length array
// This is important to make sure we set up morphTargetBaseInfluence / morphTargetInfluences
const length = objectInfluences === undefined ? 0 : objectInfluences.length;
let influences = influencesList[ geometry.id ];
if ( influences === undefined ) {
// initialise list
influences = [];
for ( let i = 0; i < length; i ++ ) {
influences[ i ] = [ i, 0 ];
}
influencesList[ geometry.id ] = influences;
}
// Collect influences
for ( let i = 0; i < length; i ++ ) {
const influence = influences[ i ];
influence[ 0 ] = i;
influence[ 1 ] = objectInfluences[ i ];
}
influences.sort( absNumericalSort );
for ( let i = 0; i < 8; i ++ ) {
if ( i < length && influences[ i ][ 1 ] ) {
workInfluences[ i ][ 0 ] = influences[ i ][ 0 ];
workInfluences[ i ][ 1 ] = influences[ i ][ 1 ];
} else {
workInfluences[ i ][ 0 ] = Number.MAX_SAFE_INTEGER;
workInfluences[ i ][ 1 ] = 0;
}
}
workInfluences.sort( numericalSort );
const morphTargets = material.morphTargets && geometry.morphAttributes.position;
const morphNormals = material.morphNormals && geometry.morphAttributes.normal;
let morphInfluencesSum = 0;
for ( let i = 0; i < 8; i ++ ) {
const influence = workInfluences[ i ];
const index = influence[ 0 ];
const value = influence[ 1 ];
if ( index !== Number.MAX_SAFE_INTEGER && value ) {
if ( morphTargets && geometry.getAttribute( 'morphTarget' + i ) !== morphTargets[ index ] ) {
geometry.setAttribute( 'morphTarget' + i, morphTargets[ index ] );
}
if ( morphNormals && geometry.getAttribute( 'morphNormal' + i ) !== morphNormals[ index ] ) {
geometry.setAttribute( 'morphNormal' + i, morphNormals[ index ] );
}
morphInfluences[ i ] = value;
morphInfluencesSum += value;
} else {
if ( morphTargets && geometry.hasAttribute( 'morphTarget' + i ) === true ) {
geometry.deleteAttribute( 'morphTarget' + i );
}
if ( morphNormals && geometry.hasAttribute( 'morphNormal' + i ) === true ) {
geometry.deleteAttribute( 'morphNormal' + i );
}
morphInfluences[ i ] = 0;
}
}
// GLSL shader uses formula baseinfluence * base + sum(target * influence)
// This allows us to switch between absolute morphs and relative morphs without changing shader code
// When baseinfluence = 1 - sum(influence), the above is equivalent to sum((target - base) * influence)
const morphBaseInfluence = geometry.morphTargetsRelative ? 1 : 1 - morphInfluencesSum;
program.getUniforms().setValue( gl, 'morphTargetBaseInfluence', morphBaseInfluence );
program.getUniforms().setValue( gl, 'morphTargetInfluences', morphInfluences );
}
return {
update: update
};
}
function WebGLObjects( gl, geometries, attributes, info ) {
let updateMap = new WeakMap();
function update( object ) {
const frame = info.render.frame;
const geometry = object.geometry;
const buffergeometry = geometries.get( object, geometry );
// Update once per frame
if ( updateMap.get( buffergeometry ) !== frame ) {
geometries.update( buffergeometry );
updateMap.set( buffergeometry, frame );
}
if ( object.isInstancedMesh ) {
if ( object.hasEventListener( 'dispose', onInstancedMeshDispose ) === false ) {
object.addEventListener( 'dispose', onInstancedMeshDispose );
}
attributes.update( object.instanceMatrix, 34962 );
if ( object.instanceColor !== null ) {
attributes.update( object.instanceColor, 34962 );
}
}
return buffergeometry;
}
function dispose() {
updateMap = new WeakMap();
}
function onInstancedMeshDispose( event ) {
const instancedMesh = event.target;
instancedMesh.removeEventListener( 'dispose', onInstancedMeshDispose );
attributes.remove( instancedMesh.instanceMatrix );
if ( instancedMesh.instanceColor !== null ) attributes.remove( instancedMesh.instanceColor );
}
return {
update: update,
dispose: dispose
};
}
class DataTexture2DArray extends Texture$1 {
constructor( data = null, width = 1, height = 1, depth = 1 ) {
super( null );
this.image = { data, width, height, depth };
this.magFilter = NearestFilter;
this.minFilter = NearestFilter;
this.wrapR = ClampToEdgeWrapping;
this.generateMipmaps = false;
this.flipY = false;
this.unpackAlignment = 1;
this.needsUpdate = true;
}
}
DataTexture2DArray.prototype.isDataTexture2DArray = true;
class DataTexture3D extends Texture$1 {
constructor( data = null, width = 1, height = 1, depth = 1 ) {
// We're going to add .setXXX() methods for setting properties later.
// Users can still set in DataTexture3D directly.
//
// const texture = new THREE.DataTexture3D( data, width, height, depth );
// texture.anisotropy = 16;
//
// See #14839
super( null );
this.image = { data, width, height, depth };
this.magFilter = NearestFilter;
this.minFilter = NearestFilter;
this.wrapR = ClampToEdgeWrapping;
this.generateMipmaps = false;
this.flipY = false;
this.unpackAlignment = 1;
this.needsUpdate = true;
}
}
DataTexture3D.prototype.isDataTexture3D = true;
/**
* Uniforms of a program.
* Those form a tree structure with a special top-level container for the root,
* which you get by calling 'new WebGLUniforms( gl, program )'.
*
*
* Properties of inner nodes including the top-level container:
*
* .seq - array of nested uniforms
* .map - nested uniforms by name
*
*
* Methods of all nodes except the top-level container:
*
* .setValue( gl, value, [textures] )
*
* uploads a uniform value(s)
* the 'textures' parameter is needed for sampler uniforms
*
*
* Static methods of the top-level container (textures factorizations):
*
* .upload( gl, seq, values, textures )
*
* sets uniforms in 'seq' to 'values[id].value'
*
* .seqWithValue( seq, values ) : filteredSeq
*
* filters 'seq' entries with corresponding entry in values
*
*
* Methods of the top-level container (textures factorizations):
*
* .setValue( gl, name, value, textures )
*
* sets uniform with name 'name' to 'value'
*
* .setOptional( gl, obj, prop )
*
* like .set for an optional property of the object
*
*/
const emptyTexture = new Texture$1();
const emptyTexture2dArray = new DataTexture2DArray();
const emptyTexture3d = new DataTexture3D();
const emptyCubeTexture = new CubeTexture();
// --- Utilities ---
// Array Caches (provide typed arrays for temporary by size)
const arrayCacheF32 = [];
const arrayCacheI32 = [];
// Float32Array caches used for uploading Matrix uniforms
const mat4array = new Float32Array( 16 );
const mat3array = new Float32Array( 9 );
const mat2array = new Float32Array( 4 );
// Flattening for arrays of vectors and matrices
function flatten( array, nBlocks, blockSize ) {
const firstElem = array[ 0 ];
if ( firstElem <= 0 || firstElem > 0 ) return array;
// unoptimized: ! isNaN( firstElem )
// see http://jacksondunstan.com/articles/983
const n = nBlocks * blockSize;
let r = arrayCacheF32[ n ];
if ( r === undefined ) {
r = new Float32Array( n );
arrayCacheF32[ n ] = r;
}
if ( nBlocks !== 0 ) {
firstElem.toArray( r, 0 );
for ( let i = 1, offset = 0; i !== nBlocks; ++ i ) {
offset += blockSize;
array[ i ].toArray( r, offset );
}
}
return r;
}
function arraysEqual( a, b ) {
if ( a.length !== b.length ) return false;
for ( let i = 0, l = a.length; i < l; i ++ ) {
if ( a[ i ] !== b[ i ] ) return false;
}
return true;
}
function copyArray( a, b ) {
for ( let i = 0, l = b.length; i < l; i ++ ) {
a[ i ] = b[ i ];
}
}
// Texture unit allocation
function allocTexUnits( textures, n ) {
let r = arrayCacheI32[ n ];
if ( r === undefined ) {
r = new Int32Array( n );
arrayCacheI32[ n ] = r;
}
for ( let i = 0; i !== n; ++ i ) {
r[ i ] = textures.allocateTextureUnit();
}
return r;
}
// --- Setters ---
// Note: Defining these methods externally, because they come in a bunch
// and this way their names minify.
// Single scalar
function setValueV1f( gl, v ) {
const cache = this.cache;
if ( cache[ 0 ] === v ) return;
gl.uniform1f( this.addr, v );
cache[ 0 ] = v;
}
// Single float vector (from flat array or THREE.VectorN)
function setValueV2f( gl, v ) {
const cache = this.cache;
if ( v.x !== undefined ) {
if ( cache[ 0 ] !== v.x || cache[ 1 ] !== v.y ) {
gl.uniform2f( this.addr, v.x, v.y );
cache[ 0 ] = v.x;
cache[ 1 ] = v.y;
}
} else {
if ( arraysEqual( cache, v ) ) return;
gl.uniform2fv( this.addr, v );
copyArray( cache, v );
}
}
function setValueV3f( gl, v ) {
const cache = this.cache;
if ( v.x !== undefined ) {
if ( cache[ 0 ] !== v.x || cache[ 1 ] !== v.y || cache[ 2 ] !== v.z ) {
gl.uniform3f( this.addr, v.x, v.y, v.z );
cache[ 0 ] = v.x;
cache[ 1 ] = v.y;
cache[ 2 ] = v.z;
}
} else if ( v.r !== undefined ) {
if ( cache[ 0 ] !== v.r || cache[ 1 ] !== v.g || cache[ 2 ] !== v.b ) {
gl.uniform3f( this.addr, v.r, v.g, v.b );
cache[ 0 ] = v.r;
cache[ 1 ] = v.g;
cache[ 2 ] = v.b;
}
} else {
if ( arraysEqual( cache, v ) ) return;
gl.uniform3fv( this.addr, v );
copyArray( cache, v );
}
}
function setValueV4f( gl, v ) {
const cache = this.cache;
if ( v.x !== undefined ) {
if ( cache[ 0 ] !== v.x || cache[ 1 ] !== v.y || cache[ 2 ] !== v.z || cache[ 3 ] !== v.w ) {
gl.uniform4f( this.addr, v.x, v.y, v.z, v.w );
cache[ 0 ] = v.x;
cache[ 1 ] = v.y;
cache[ 2 ] = v.z;
cache[ 3 ] = v.w;
}
} else {
if ( arraysEqual( cache, v ) ) return;
gl.uniform4fv( this.addr, v );
copyArray( cache, v );
}
}
// Single matrix (from flat array or THREE.MatrixN)
function setValueM2( gl, v ) {
const cache = this.cache;
const elements = v.elements;
if ( elements === undefined ) {
if ( arraysEqual( cache, v ) ) return;
gl.uniformMatrix2fv( this.addr, false, v );
copyArray( cache, v );
} else {
if ( arraysEqual( cache, elements ) ) return;
mat2array.set( elements );
gl.uniformMatrix2fv( this.addr, false, mat2array );
copyArray( cache, elements );
}
}
function setValueM3( gl, v ) {
const cache = this.cache;
const elements = v.elements;
if ( elements === undefined ) {
if ( arraysEqual( cache, v ) ) return;
gl.uniformMatrix3fv( this.addr, false, v );
copyArray( cache, v );
} else {
if ( arraysEqual( cache, elements ) ) return;
mat3array.set( elements );
gl.uniformMatrix3fv( this.addr, false, mat3array );
copyArray( cache, elements );
}
}
function setValueM4( gl, v ) {
const cache = this.cache;
const elements = v.elements;
if ( elements === undefined ) {
if ( arraysEqual( cache, v ) ) return;
gl.uniformMatrix4fv( this.addr, false, v );
copyArray( cache, v );
} else {
if ( arraysEqual( cache, elements ) ) return;
mat4array.set( elements );
gl.uniformMatrix4fv( this.addr, false, mat4array );
copyArray( cache, elements );
}
}
// Single integer / boolean
function setValueV1i( gl, v ) {
const cache = this.cache;
if ( cache[ 0 ] === v ) return;
gl.uniform1i( this.addr, v );
cache[ 0 ] = v;
}
// Single integer / boolean vector (from flat array)
function setValueV2i( gl, v ) {
const cache = this.cache;
if ( arraysEqual( cache, v ) ) return;
gl.uniform2iv( this.addr, v );
copyArray( cache, v );
}
function setValueV3i( gl, v ) {
const cache = this.cache;
if ( arraysEqual( cache, v ) ) return;
gl.uniform3iv( this.addr, v );
copyArray( cache, v );
}
function setValueV4i( gl, v ) {
const cache = this.cache;
if ( arraysEqual( cache, v ) ) return;
gl.uniform4iv( this.addr, v );
copyArray( cache, v );
}
// Single unsigned integer
function setValueV1ui( gl, v ) {
const cache = this.cache;
if ( cache[ 0 ] === v ) return;
gl.uniform1ui( this.addr, v );
cache[ 0 ] = v;
}
// Single unsigned integer vector (from flat array)
function setValueV2ui( gl, v ) {
const cache = this.cache;
if ( arraysEqual( cache, v ) ) return;
gl.uniform2uiv( this.addr, v );
copyArray( cache, v );
}
function setValueV3ui( gl, v ) {
const cache = this.cache;
if ( arraysEqual( cache, v ) ) return;
gl.uniform3uiv( this.addr, v );
copyArray( cache, v );
}
function setValueV4ui( gl, v ) {
const cache = this.cache;
if ( arraysEqual( cache, v ) ) return;
gl.uniform4uiv( this.addr, v );
copyArray( cache, v );
}
// Single texture (2D / Cube)
function setValueT1( gl, v, textures ) {
const cache = this.cache;
const unit = textures.allocateTextureUnit();
if ( cache[ 0 ] !== unit ) {
gl.uniform1i( this.addr, unit );
cache[ 0 ] = unit;
}
textures.safeSetTexture2D( v || emptyTexture, unit );
}
function setValueT3D1( gl, v, textures ) {
const cache = this.cache;
const unit = textures.allocateTextureUnit();
if ( cache[ 0 ] !== unit ) {
gl.uniform1i( this.addr, unit );
cache[ 0 ] = unit;
}
textures.setTexture3D( v || emptyTexture3d, unit );
}
function setValueT6( gl, v, textures ) {
const cache = this.cache;
const unit = textures.allocateTextureUnit();
if ( cache[ 0 ] !== unit ) {
gl.uniform1i( this.addr, unit );
cache[ 0 ] = unit;
}
textures.safeSetTextureCube( v || emptyCubeTexture, unit );
}
function setValueT2DArray1( gl, v, textures ) {
const cache = this.cache;
const unit = textures.allocateTextureUnit();
if ( cache[ 0 ] !== unit ) {
gl.uniform1i( this.addr, unit );
cache[ 0 ] = unit;
}
textures.setTexture2DArray( v || emptyTexture2dArray, unit );
}
// Helper to pick the right setter for the singular case
function getSingularSetter( type ) {
switch ( type ) {
case 0x1406: return setValueV1f; // FLOAT
case 0x8b50: return setValueV2f; // _VEC2
case 0x8b51: return setValueV3f; // _VEC3
case 0x8b52: return setValueV4f; // _VEC4
case 0x8b5a: return setValueM2; // _MAT2
case 0x8b5b: return setValueM3; // _MAT3
case 0x8b5c: return setValueM4; // _MAT4
case 0x1404: case 0x8b56: return setValueV1i; // INT, BOOL
case 0x8b53: case 0x8b57: return setValueV2i; // _VEC2
case 0x8b54: case 0x8b58: return setValueV3i; // _VEC3
case 0x8b55: case 0x8b59: return setValueV4i; // _VEC4
case 0x1405: return setValueV1ui; // UINT
case 0x8dc6: return setValueV2ui; // _VEC2
case 0x8dc7: return setValueV3ui; // _VEC3
case 0x8dc8: return setValueV4ui; // _VEC4
case 0x8b5e: // SAMPLER_2D
case 0x8d66: // SAMPLER_EXTERNAL_OES
case 0x8dca: // INT_SAMPLER_2D
case 0x8dd2: // UNSIGNED_INT_SAMPLER_2D
case 0x8b62: // SAMPLER_2D_SHADOW
return setValueT1;
case 0x8b5f: // SAMPLER_3D
case 0x8dcb: // INT_SAMPLER_3D
case 0x8dd3: // UNSIGNED_INT_SAMPLER_3D
return setValueT3D1;
case 0x8b60: // SAMPLER_CUBE
case 0x8dcc: // INT_SAMPLER_CUBE
case 0x8dd4: // UNSIGNED_INT_SAMPLER_CUBE
case 0x8dc5: // SAMPLER_CUBE_SHADOW
return setValueT6;
case 0x8dc1: // SAMPLER_2D_ARRAY
case 0x8dcf: // INT_SAMPLER_2D_ARRAY
case 0x8dd7: // UNSIGNED_INT_SAMPLER_2D_ARRAY
case 0x8dc4: // SAMPLER_2D_ARRAY_SHADOW
return setValueT2DArray1;
}
}
// Array of scalars
function setValueV1fArray( gl, v ) {
gl.uniform1fv( this.addr, v );
}
// Array of vectors (from flat array or array of THREE.VectorN)
function setValueV2fArray( gl, v ) {
const data = flatten( v, this.size, 2 );
gl.uniform2fv( this.addr, data );
}
function setValueV3fArray( gl, v ) {
const data = flatten( v, this.size, 3 );
gl.uniform3fv( this.addr, data );
}
function setValueV4fArray( gl, v ) {
const data = flatten( v, this.size, 4 );
gl.uniform4fv( this.addr, data );
}
// Array of matrices (from flat array or array of THREE.MatrixN)
function setValueM2Array( gl, v ) {
const data = flatten( v, this.size, 4 );
gl.uniformMatrix2fv( this.addr, false, data );
}
function setValueM3Array( gl, v ) {
const data = flatten( v, this.size, 9 );
gl.uniformMatrix3fv( this.addr, false, data );
}
function setValueM4Array( gl, v ) {
const data = flatten( v, this.size, 16 );
gl.uniformMatrix4fv( this.addr, false, data );
}
// Array of integer / boolean
function setValueV1iArray( gl, v ) {
gl.uniform1iv( this.addr, v );
}
// Array of integer / boolean vectors (from flat array)
function setValueV2iArray( gl, v ) {
gl.uniform2iv( this.addr, v );
}
function setValueV3iArray( gl, v ) {
gl.uniform3iv( this.addr, v );
}
function setValueV4iArray( gl, v ) {
gl.uniform4iv( this.addr, v );
}
// Array of unsigned integer
function setValueV1uiArray( gl, v ) {
gl.uniform1uiv( this.addr, v );
}
// Array of unsigned integer vectors (from flat array)
function setValueV2uiArray( gl, v ) {
gl.uniform2uiv( this.addr, v );
}
function setValueV3uiArray( gl, v ) {
gl.uniform3uiv( this.addr, v );
}
function setValueV4uiArray( gl, v ) {
gl.uniform4uiv( this.addr, v );
}
// Array of textures (2D / Cube)
function setValueT1Array( gl, v, textures ) {
const n = v.length;
const units = allocTexUnits( textures, n );
gl.uniform1iv( this.addr, units );
for ( let i = 0; i !== n; ++ i ) {
textures.safeSetTexture2D( v[ i ] || emptyTexture, units[ i ] );
}
}
function setValueT6Array( gl, v, textures ) {
const n = v.length;
const units = allocTexUnits( textures, n );
gl.uniform1iv( this.addr, units );
for ( let i = 0; i !== n; ++ i ) {
textures.safeSetTextureCube( v[ i ] || emptyCubeTexture, units[ i ] );
}
}
// Helper to pick the right setter for a pure (bottom-level) array
function getPureArraySetter( type ) {
switch ( type ) {
case 0x1406: return setValueV1fArray; // FLOAT
case 0x8b50: return setValueV2fArray; // _VEC2
case 0x8b51: return setValueV3fArray; // _VEC3
case 0x8b52: return setValueV4fArray; // _VEC4
case 0x8b5a: return setValueM2Array; // _MAT2
case 0x8b5b: return setValueM3Array; // _MAT3
case 0x8b5c: return setValueM4Array; // _MAT4
case 0x1404: case 0x8b56: return setValueV1iArray; // INT, BOOL
case 0x8b53: case 0x8b57: return setValueV2iArray; // _VEC2
case 0x8b54: case 0x8b58: return setValueV3iArray; // _VEC3
case 0x8b55: case 0x8b59: return setValueV4iArray; // _VEC4
case 0x1405: return setValueV1uiArray; // UINT
case 0x8dc6: return setValueV2uiArray; // _VEC2
case 0x8dc7: return setValueV3uiArray; // _VEC3
case 0x8dc8: return setValueV4uiArray; // _VEC4
case 0x8b5e: // SAMPLER_2D
case 0x8d66: // SAMPLER_EXTERNAL_OES
case 0x8dca: // INT_SAMPLER_2D
case 0x8dd2: // UNSIGNED_INT_SAMPLER_2D
case 0x8b62: // SAMPLER_2D_SHADOW
return setValueT1Array;
case 0x8b60: // SAMPLER_CUBE
case 0x8dcc: // INT_SAMPLER_CUBE
case 0x8dd4: // UNSIGNED_INT_SAMPLER_CUBE
case 0x8dc5: // SAMPLER_CUBE_SHADOW
return setValueT6Array;
}
}
// --- Uniform Classes ---
function SingleUniform( id, activeInfo, addr ) {
this.id = id;
this.addr = addr;
this.cache = [];
this.setValue = getSingularSetter( activeInfo.type );
// this.path = activeInfo.name; // DEBUG
}
function PureArrayUniform( id, activeInfo, addr ) {
this.id = id;
this.addr = addr;
this.cache = [];
this.size = activeInfo.size;
this.setValue = getPureArraySetter( activeInfo.type );
// this.path = activeInfo.name; // DEBUG
}
PureArrayUniform.prototype.updateCache = function ( data ) {
const cache = this.cache;
if ( data instanceof Float32Array && cache.length !== data.length ) {
this.cache = new Float32Array( data.length );
}
copyArray( cache, data );
};
function StructuredUniform( id ) {
this.id = id;
this.seq = [];
this.map = {};
}
StructuredUniform.prototype.setValue = function ( gl, value, textures ) {
const seq = this.seq;
for ( let i = 0, n = seq.length; i !== n; ++ i ) {
const u = seq[ i ];
u.setValue( gl, value[ u.id ], textures );
}
};
// --- Top-level ---
// Parser - builds up the property tree from the path strings
const RePathPart = /(\w+)(\])?(\[|\.)?/g;
// extracts
// - the identifier (member name or array index)
// - followed by an optional right bracket (found when array index)
// - followed by an optional left bracket or dot (type of subscript)
//
// Note: These portions can be read in a non-overlapping fashion and
// allow straightforward parsing of the hierarchy that WebGL encodes
// in the uniform names.
function addUniform( container, uniformObject ) {
container.seq.push( uniformObject );
container.map[ uniformObject.id ] = uniformObject;
}
function parseUniform( activeInfo, addr, container ) {
const path = activeInfo.name,
pathLength = path.length;
// reset RegExp object, because of the early exit of a previous run
RePathPart.lastIndex = 0;
while ( true ) {
const match = RePathPart.exec( path ),
matchEnd = RePathPart.lastIndex;
let id = match[ 1 ];
const idIsIndex = match[ 2 ] === ']',
subscript = match[ 3 ];
if ( idIsIndex ) id = id | 0; // convert to integer
if ( subscript === undefined || subscript === '[' && matchEnd + 2 === pathLength ) {
// bare name or "pure" bottom-level array "[0]" suffix
addUniform( container, subscript === undefined ?
new SingleUniform( id, activeInfo, addr ) :
new PureArrayUniform( id, activeInfo, addr ) );
break;
} else {
// step into inner node / create it in case it doesn't exist
const map = container.map;
let next = map[ id ];
if ( next === undefined ) {
next = new StructuredUniform( id );
addUniform( container, next );
}
container = next;
}
}
}
// Root Container
function WebGLUniforms( gl, program ) {
this.seq = [];
this.map = {};
const n = gl.getProgramParameter( program, 35718 );
for ( let i = 0; i < n; ++ i ) {
const info = gl.getActiveUniform( program, i ),
addr = gl.getUniformLocation( program, info.name );
parseUniform( info, addr, this );
}
}
WebGLUniforms.prototype.setValue = function ( gl, name, value, textures ) {
const u = this.map[ name ];
if ( u !== undefined ) u.setValue( gl, value, textures );
};
WebGLUniforms.prototype.setOptional = function ( gl, object, name ) {
const v = object[ name ];
if ( v !== undefined ) this.setValue( gl, name, v );
};
// Static interface
WebGLUniforms.upload = function ( gl, seq, values, textures ) {
for ( let i = 0, n = seq.length; i !== n; ++ i ) {
const u = seq[ i ],
v = values[ u.id ];
if ( v.needsUpdate !== false ) {
// note: always updating when .needsUpdate is undefined
u.setValue( gl, v.value, textures );
}
}
};
WebGLUniforms.seqWithValue = function ( seq, values ) {
const r = [];
for ( let i = 0, n = seq.length; i !== n; ++ i ) {
const u = seq[ i ];
if ( u.id in values ) r.push( u );
}
return r;
};
function WebGLShader( gl, type, string ) {
const shader = gl.createShader( type );
gl.shaderSource( shader, string );
gl.compileShader( shader );
return shader;
}
let programIdCount = 0;
function addLineNumbers( string ) {
const lines = string.split( '\n' );
for ( let i = 0; i < lines.length; i ++ ) {
lines[ i ] = ( i + 1 ) + ': ' + lines[ i ];
}
return lines.join( '\n' );
}
function getEncodingComponents( encoding ) {
switch ( encoding ) {
case LinearEncoding:
return [ 'Linear', '( value )' ];
case sRGBEncoding:
return [ 'sRGB', '( value )' ];
case RGBEEncoding:
return [ 'RGBE', '( value )' ];
case RGBM7Encoding:
return [ 'RGBM', '( value, 7.0 )' ];
case RGBM16Encoding:
return [ 'RGBM', '( value, 16.0 )' ];
case RGBDEncoding:
return [ 'RGBD', '( value, 256.0 )' ];
case GammaEncoding:
return [ 'Gamma', '( value, float( GAMMA_FACTOR ) )' ];
case LogLuvEncoding:
return [ 'LogLuv', '( value )' ];
default:
console.warn( 'THREE.WebGLProgram: Unsupported encoding:', encoding );
return [ 'Linear', '( value )' ];
}
}
function getShaderErrors( gl, shader, type ) {
const status = gl.getShaderParameter( shader, 35713 );
const log = gl.getShaderInfoLog( shader ).trim();
if ( status && log === '' ) return '';
// --enable-privileged-webgl-extension
// console.log( '**' + type + '**', gl.getExtension( 'WEBGL_debug_shaders' ).getTranslatedShaderSource( shader ) );
const source = gl.getShaderSource( shader );
return 'THREE.WebGLShader: gl.getShaderInfoLog() ' + type + '\n' + log + addLineNumbers( source );
}
function getTexelDecodingFunction( functionName, encoding ) {
const components = getEncodingComponents( encoding );
return 'vec4 ' + functionName + '( vec4 value ) { return ' + components[ 0 ] + 'ToLinear' + components[ 1 ] + '; }';
}
function getTexelEncodingFunction( functionName, encoding ) {
const components = getEncodingComponents( encoding );
return 'vec4 ' + functionName + '( vec4 value ) { return LinearTo' + components[ 0 ] + components[ 1 ] + '; }';
}
function getToneMappingFunction( functionName, toneMapping ) {
let toneMappingName;
switch ( toneMapping ) {
case LinearToneMapping:
toneMappingName = 'Linear';
break;
case ReinhardToneMapping:
toneMappingName = 'Reinhard';
break;
case CineonToneMapping:
toneMappingName = 'OptimizedCineon';
break;
case ACESFilmicToneMapping:
toneMappingName = 'ACESFilmic';
break;
case CustomToneMapping:
toneMappingName = 'Custom';
break;
default:
console.warn( 'THREE.WebGLProgram: Unsupported toneMapping:', toneMapping );
toneMappingName = 'Linear';
}
return 'vec3 ' + functionName + '( vec3 color ) { return ' + toneMappingName + 'ToneMapping( color ); }';
}
function generateExtensions( parameters ) {
const chunks = [
( parameters.extensionDerivatives || parameters.envMapCubeUV || parameters.bumpMap || parameters.tangentSpaceNormalMap || parameters.clearcoatNormalMap || parameters.flatShading || parameters.shaderID === 'physical' ) ? '#extension GL_OES_standard_derivatives : enable' : '',
( parameters.extensionFragDepth || parameters.logarithmicDepthBuffer ) && parameters.rendererExtensionFragDepth ? '#extension GL_EXT_frag_depth : enable' : '',
( parameters.extensionDrawBuffers && parameters.rendererExtensionDrawBuffers ) ? '#extension GL_EXT_draw_buffers : require' : '',
( parameters.extensionShaderTextureLOD || parameters.envMap ) && parameters.rendererExtensionShaderTextureLod ? '#extension GL_EXT_shader_texture_lod : enable' : ''
];
return chunks.filter( filterEmptyLine ).join( '\n' );
}
function generateDefines( defines ) {
const chunks = [];
for ( const name in defines ) {
const value = defines[ name ];
if ( value === false ) continue;
chunks.push( '#define ' + name + ' ' + value );
}
return chunks.join( '\n' );
}
function fetchAttributeLocations( gl, program ) {
const attributes = {};
const n = gl.getProgramParameter( program, 35721 );
for ( let i = 0; i < n; i ++ ) {
const info = gl.getActiveAttrib( program, i );
const name = info.name;
// console.log( 'THREE.WebGLProgram: ACTIVE VERTEX ATTRIBUTE:', name, i );
attributes[ name ] = gl.getAttribLocation( program, name );
}
return attributes;
}
function filterEmptyLine( string ) {
return string !== '';
}
function replaceLightNums( string, parameters ) {
return string
.replace( /NUM_DIR_LIGHTS/g, parameters.numDirLights )
.replace( /NUM_SPOT_LIGHTS/g, parameters.numSpotLights )
.replace( /NUM_RECT_AREA_LIGHTS/g, parameters.numRectAreaLights )
.replace( /NUM_POINT_LIGHTS/g, parameters.numPointLights )
.replace( /NUM_HEMI_LIGHTS/g, parameters.numHemiLights )
.replace( /NUM_DIR_LIGHT_SHADOWS/g, parameters.numDirLightShadows )
.replace( /NUM_SPOT_LIGHT_SHADOWS/g, parameters.numSpotLightShadows )
.replace( /NUM_POINT_LIGHT_SHADOWS/g, parameters.numPointLightShadows );
}
function replaceClippingPlaneNums( string, parameters ) {
return string
.replace( /NUM_CLIPPING_PLANES/g, parameters.numClippingPlanes )
.replace( /UNION_CLIPPING_PLANES/g, ( parameters.numClippingPlanes - parameters.numClipIntersection ) );
}
// Resolve Includes
const includePattern = /^[ \t]*#include +<([\w\d./]+)>/gm;
function resolveIncludes( string ) {
return string.replace( includePattern, includeReplacer );
}
function includeReplacer( match, include ) {
const string = ShaderChunk[ include ];
if ( string === undefined ) {
throw new Error( 'Can not resolve #include <' + include + '>' );
}
return resolveIncludes( string );
}
// Unroll Loops
const deprecatedUnrollLoopPattern = /#pragma unroll_loop[\s]+?for \( int i \= (\d+)\; i < (\d+)\; i \+\+ \) \{([\s\S]+?)(?=\})\}/g;
const unrollLoopPattern = /#pragma unroll_loop_start\s+for\s*\(\s*int\s+i\s*=\s*(\d+)\s*;\s*i\s*<\s*(\d+)\s*;\s*i\s*\+\+\s*\)\s*{([\s\S]+?)}\s+#pragma unroll_loop_end/g;
function unrollLoops( string ) {
return string
.replace( unrollLoopPattern, loopReplacer )
.replace( deprecatedUnrollLoopPattern, deprecatedLoopReplacer );
}
function deprecatedLoopReplacer( match, start, end, snippet ) {
console.warn( 'WebGLProgram: #pragma unroll_loop shader syntax is deprecated. Please use #pragma unroll_loop_start syntax instead.' );
return loopReplacer( match, start, end, snippet );
}
function loopReplacer( match, start, end, snippet ) {
let string = '';
for ( let i = parseInt( start ); i < parseInt( end ); i ++ ) {
string += snippet
.replace( /\[\s*i\s*\]/g, '[ ' + i + ' ]' )
.replace( /UNROLLED_LOOP_INDEX/g, i );
}
return string;
}
//
function generatePrecision( parameters ) {
let precisionstring = 'precision ' + parameters.precision + ' float;\nprecision ' + parameters.precision + ' int;';
if ( parameters.precision === 'highp' ) {
precisionstring += '\n#define HIGH_PRECISION';
} else if ( parameters.precision === 'mediump' ) {
precisionstring += '\n#define MEDIUM_PRECISION';
} else if ( parameters.precision === 'lowp' ) {
precisionstring += '\n#define LOW_PRECISION';
}
return precisionstring;
}
function generateShadowMapTypeDefine( parameters ) {
let shadowMapTypeDefine = 'SHADOWMAP_TYPE_BASIC';
if ( parameters.shadowMapType === PCFShadowMap ) {
shadowMapTypeDefine = 'SHADOWMAP_TYPE_PCF';
} else if ( parameters.shadowMapType === PCFSoftShadowMap ) {
shadowMapTypeDefine = 'SHADOWMAP_TYPE_PCF_SOFT';
} else if ( parameters.shadowMapType === VSMShadowMap ) {
shadowMapTypeDefine = 'SHADOWMAP_TYPE_VSM';
}
return shadowMapTypeDefine;
}
function generateEnvMapTypeDefine( parameters ) {
let envMapTypeDefine = 'ENVMAP_TYPE_CUBE';
if ( parameters.envMap ) {
switch ( parameters.envMapMode ) {
case CubeReflectionMapping:
case CubeRefractionMapping:
envMapTypeDefine = 'ENVMAP_TYPE_CUBE';
break;
case CubeUVReflectionMapping:
case CubeUVRefractionMapping:
envMapTypeDefine = 'ENVMAP_TYPE_CUBE_UV';
break;
}
}
return envMapTypeDefine;
}
function generateEnvMapModeDefine( parameters ) {
let envMapModeDefine = 'ENVMAP_MODE_REFLECTION';
if ( parameters.envMap ) {
switch ( parameters.envMapMode ) {
case CubeRefractionMapping:
case CubeUVRefractionMapping:
envMapModeDefine = 'ENVMAP_MODE_REFRACTION';
break;
}
}
return envMapModeDefine;
}
function generateEnvMapBlendingDefine( parameters ) {
let envMapBlendingDefine = 'ENVMAP_BLENDING_NONE';
if ( parameters.envMap ) {
switch ( parameters.combine ) {
case MultiplyOperation:
envMapBlendingDefine = 'ENVMAP_BLENDING_MULTIPLY';
break;
case MixOperation:
envMapBlendingDefine = 'ENVMAP_BLENDING_MIX';
break;
case AddOperation:
envMapBlendingDefine = 'ENVMAP_BLENDING_ADD';
break;
}
}
return envMapBlendingDefine;
}
function WebGLProgram( renderer, cacheKey, parameters, bindingStates ) {
const gl = renderer.getContext();
const defines = parameters.defines;
let vertexShader = parameters.vertexShader;
let fragmentShader = parameters.fragmentShader;
const shadowMapTypeDefine = generateShadowMapTypeDefine( parameters );
const envMapTypeDefine = generateEnvMapTypeDefine( parameters );
const envMapModeDefine = generateEnvMapModeDefine( parameters );
const envMapBlendingDefine = generateEnvMapBlendingDefine( parameters );
const gammaFactorDefine = ( renderer.gammaFactor > 0 ) ? renderer.gammaFactor : 1.0;
const customExtensions = parameters.isWebGL2 ? '' : generateExtensions( parameters );
const customDefines = generateDefines( defines );
const program = gl.createProgram();
let prefixVertex, prefixFragment;
let versionString = parameters.glslVersion ? '#version ' + parameters.glslVersion + '\n' : '';
if ( parameters.isRawShaderMaterial ) {
prefixVertex = [
customDefines
].filter( filterEmptyLine ).join( '\n' );
if ( prefixVertex.length > 0 ) {
prefixVertex += '\n';
}
prefixFragment = [
customExtensions,
customDefines
].filter( filterEmptyLine ).join( '\n' );
if ( prefixFragment.length > 0 ) {
prefixFragment += '\n';
}
} else {
prefixVertex = [
generatePrecision( parameters ),
'#define SHADER_NAME ' + parameters.shaderName,
customDefines,
parameters.instancing ? '#define USE_INSTANCING' : '',
parameters.instancingColor ? '#define USE_INSTANCING_COLOR' : '',
parameters.supportsVertexTextures ? '#define VERTEX_TEXTURES' : '',
'#define GAMMA_FACTOR ' + gammaFactorDefine,
'#define MAX_BONES ' + parameters.maxBones,
( parameters.useFog && parameters.fog ) ? '#define USE_FOG' : '',
( parameters.useFog && parameters.fogExp2 ) ? '#define FOG_EXP2' : '',
parameters.map ? '#define USE_MAP' : '',
parameters.envMap ? '#define USE_ENVMAP' : '',
parameters.envMap ? '#define ' + envMapModeDefine : '',
parameters.lightMap ? '#define USE_LIGHTMAP' : '',
parameters.aoMap ? '#define USE_AOMAP' : '',
parameters.emissiveMap ? '#define USE_EMISSIVEMAP' : '',
parameters.bumpMap ? '#define USE_BUMPMAP' : '',
parameters.normalMap ? '#define USE_NORMALMAP' : '',
( parameters.normalMap && parameters.objectSpaceNormalMap ) ? '#define OBJECTSPACE_NORMALMAP' : '',
( parameters.normalMap && parameters.tangentSpaceNormalMap ) ? '#define TANGENTSPACE_NORMALMAP' : '',
parameters.clearcoatMap ? '#define USE_CLEARCOATMAP' : '',
parameters.clearcoatRoughnessMap ? '#define USE_CLEARCOAT_ROUGHNESSMAP' : '',
parameters.clearcoatNormalMap ? '#define USE_CLEARCOAT_NORMALMAP' : '',
parameters.displacementMap && parameters.supportsVertexTextures ? '#define USE_DISPLACEMENTMAP' : '',
parameters.specularMap ? '#define USE_SPECULARMAP' : '',
parameters.roughnessMap ? '#define USE_ROUGHNESSMAP' : '',
parameters.metalnessMap ? '#define USE_METALNESSMAP' : '',
parameters.alphaMap ? '#define USE_ALPHAMAP' : '',
parameters.transmissionMap ? '#define USE_TRANSMISSIONMAP' : '',
parameters.vertexTangents ? '#define USE_TANGENT' : '',
parameters.vertexColors ? '#define USE_COLOR' : '',
parameters.vertexAlphas ? '#define USE_COLOR_ALPHA' : '',
parameters.vertexUvs ? '#define USE_UV' : '',
parameters.uvsVertexOnly ? '#define UVS_VERTEX_ONLY' : '',
parameters.flatShading ? '#define FLAT_SHADED' : '',
parameters.skinning ? '#define USE_SKINNING' : '',
parameters.useVertexTexture ? '#define BONE_TEXTURE' : '',
parameters.morphTargets ? '#define USE_MORPHTARGETS' : '',
parameters.morphNormals && parameters.flatShading === false ? '#define USE_MORPHNORMALS' : '',
parameters.doubleSided ? '#define DOUBLE_SIDED' : '',
parameters.flipSided ? '#define FLIP_SIDED' : '',
parameters.shadowMapEnabled ? '#define USE_SHADOWMAP' : '',
parameters.shadowMapEnabled ? '#define ' + shadowMapTypeDefine : '',
parameters.sizeAttenuation ? '#define USE_SIZEATTENUATION' : '',
parameters.logarithmicDepthBuffer ? '#define USE_LOGDEPTHBUF' : '',
( parameters.logarithmicDepthBuffer && parameters.rendererExtensionFragDepth ) ? '#define USE_LOGDEPTHBUF_EXT' : '',
'uniform mat4 modelMatrix;',
'uniform mat4 modelViewMatrix;',
'uniform mat4 projectionMatrix;',
'uniform mat4 viewMatrix;',
'uniform mat3 normalMatrix;',
'uniform vec3 cameraPosition;',
'uniform bool isOrthographic;',
'#ifdef USE_INSTANCING',
' attribute mat4 instanceMatrix;',
'#endif',
'#ifdef USE_INSTANCING_COLOR',
' attribute vec3 instanceColor;',
'#endif',
'attribute vec3 position;',
'attribute vec3 normal;',
'attribute vec2 uv;',
'#ifdef USE_TANGENT',
' attribute vec4 tangent;',
'#endif',
'#if defined( USE_COLOR_ALPHA )',
' attribute vec4 color;',
'#elif defined( USE_COLOR )',
' attribute vec3 color;',
'#endif',
'#ifdef USE_MORPHTARGETS',
' attribute vec3 morphTarget0;',
' attribute vec3 morphTarget1;',
' attribute vec3 morphTarget2;',
' attribute vec3 morphTarget3;',
' #ifdef USE_MORPHNORMALS',
' attribute vec3 morphNormal0;',
' attribute vec3 morphNormal1;',
' attribute vec3 morphNormal2;',
' attribute vec3 morphNormal3;',
' #else',
' attribute vec3 morphTarget4;',
' attribute vec3 morphTarget5;',
' attribute vec3 morphTarget6;',
' attribute vec3 morphTarget7;',
' #endif',
'#endif',
'#ifdef USE_SKINNING',
' attribute vec4 skinIndex;',
' attribute vec4 skinWeight;',
'#endif',
'\n'
].filter( filterEmptyLine ).join( '\n' );
prefixFragment = [
customExtensions,
generatePrecision( parameters ),
'#define SHADER_NAME ' + parameters.shaderName,
customDefines,
parameters.alphaTest ? '#define ALPHATEST ' + parameters.alphaTest + ( parameters.alphaTest % 1 ? '' : '.0' ) : '', // add '.0' if integer
'#define GAMMA_FACTOR ' + gammaFactorDefine,
( parameters.useFog && parameters.fog ) ? '#define USE_FOG' : '',
( parameters.useFog && parameters.fogExp2 ) ? '#define FOG_EXP2' : '',
parameters.map ? '#define USE_MAP' : '',
parameters.matcap ? '#define USE_MATCAP' : '',
parameters.envMap ? '#define USE_ENVMAP' : '',
parameters.envMap ? '#define ' + envMapTypeDefine : '',
parameters.envMap ? '#define ' + envMapModeDefine : '',
parameters.envMap ? '#define ' + envMapBlendingDefine : '',
parameters.lightMap ? '#define USE_LIGHTMAP' : '',
parameters.aoMap ? '#define USE_AOMAP' : '',
parameters.emissiveMap ? '#define USE_EMISSIVEMAP' : '',
parameters.bumpMap ? '#define USE_BUMPMAP' : '',
parameters.normalMap ? '#define USE_NORMALMAP' : '',
( parameters.normalMap && parameters.objectSpaceNormalMap ) ? '#define OBJECTSPACE_NORMALMAP' : '',
( parameters.normalMap && parameters.tangentSpaceNormalMap ) ? '#define TANGENTSPACE_NORMALMAP' : '',
parameters.clearcoatMap ? '#define USE_CLEARCOATMAP' : '',
parameters.clearcoatRoughnessMap ? '#define USE_CLEARCOAT_ROUGHNESSMAP' : '',
parameters.clearcoatNormalMap ? '#define USE_CLEARCOAT_NORMALMAP' : '',
parameters.specularMap ? '#define USE_SPECULARMAP' : '',
parameters.roughnessMap ? '#define USE_ROUGHNESSMAP' : '',
parameters.metalnessMap ? '#define USE_METALNESSMAP' : '',
parameters.alphaMap ? '#define USE_ALPHAMAP' : '',
parameters.sheen ? '#define USE_SHEEN' : '',
parameters.transmissionMap ? '#define USE_TRANSMISSIONMAP' : '',
parameters.vertexTangents ? '#define USE_TANGENT' : '',
parameters.vertexColors || parameters.instancingColor ? '#define USE_COLOR' : '',
parameters.vertexAlphas ? '#define USE_COLOR_ALPHA' : '',
parameters.vertexUvs ? '#define USE_UV' : '',
parameters.uvsVertexOnly ? '#define UVS_VERTEX_ONLY' : '',
parameters.gradientMap ? '#define USE_GRADIENTMAP' : '',
parameters.flatShading ? '#define FLAT_SHADED' : '',
parameters.doubleSided ? '#define DOUBLE_SIDED' : '',
parameters.flipSided ? '#define FLIP_SIDED' : '',
parameters.shadowMapEnabled ? '#define USE_SHADOWMAP' : '',
parameters.shadowMapEnabled ? '#define ' + shadowMapTypeDefine : '',
parameters.premultipliedAlpha ? '#define PREMULTIPLIED_ALPHA' : '',
parameters.physicallyCorrectLights ? '#define PHYSICALLY_CORRECT_LIGHTS' : '',
parameters.logarithmicDepthBuffer ? '#define USE_LOGDEPTHBUF' : '',
( parameters.logarithmicDepthBuffer && parameters.rendererExtensionFragDepth ) ? '#define USE_LOGDEPTHBUF_EXT' : '',
( ( parameters.extensionShaderTextureLOD || parameters.envMap ) && parameters.rendererExtensionShaderTextureLod ) ? '#define TEXTURE_LOD_EXT' : '',
'uniform mat4 viewMatrix;',
'uniform vec3 cameraPosition;',
'uniform bool isOrthographic;',
( parameters.toneMapping !== NoToneMapping ) ? '#define TONE_MAPPING' : '',
( parameters.toneMapping !== NoToneMapping ) ? ShaderChunk[ 'tonemapping_pars_fragment' ] : '', // this code is required here because it is used by the toneMapping() function defined below
( parameters.toneMapping !== NoToneMapping ) ? getToneMappingFunction( 'toneMapping', parameters.toneMapping ) : '',
parameters.dithering ? '#define DITHERING' : '',
ShaderChunk[ 'encodings_pars_fragment' ], // this code is required here because it is used by the various encoding/decoding function defined below
parameters.map ? getTexelDecodingFunction( 'mapTexelToLinear', parameters.mapEncoding ) : '',
parameters.matcap ? getTexelDecodingFunction( 'matcapTexelToLinear', parameters.matcapEncoding ) : '',
parameters.envMap ? getTexelDecodingFunction( 'envMapTexelToLinear', parameters.envMapEncoding ) : '',
parameters.emissiveMap ? getTexelDecodingFunction( 'emissiveMapTexelToLinear', parameters.emissiveMapEncoding ) : '',
parameters.lightMap ? getTexelDecodingFunction( 'lightMapTexelToLinear', parameters.lightMapEncoding ) : '',
getTexelEncodingFunction( 'linearToOutputTexel', parameters.outputEncoding ),
parameters.depthPacking ? '#define DEPTH_PACKING ' + parameters.depthPacking : '',
'\n'
].filter( filterEmptyLine ).join( '\n' );
}
vertexShader = resolveIncludes( vertexShader );
vertexShader = replaceLightNums( vertexShader, parameters );
vertexShader = replaceClippingPlaneNums( vertexShader, parameters );
fragmentShader = resolveIncludes( fragmentShader );
fragmentShader = replaceLightNums( fragmentShader, parameters );
fragmentShader = replaceClippingPlaneNums( fragmentShader, parameters );
vertexShader = unrollLoops( vertexShader );
fragmentShader = unrollLoops( fragmentShader );
if ( parameters.isWebGL2 && parameters.isRawShaderMaterial !== true ) {
// GLSL 3.0 conversion for built-in materials and ShaderMaterial
versionString = '#version 300 es\n';
prefixVertex = [
'#define attribute in',
'#define varying out',
'#define texture2D texture'
].join( '\n' ) + '\n' + prefixVertex;
prefixFragment = [
'#define varying in',
( parameters.glslVersion === GLSL3 ) ? '' : 'out highp vec4 pc_fragColor;',
( parameters.glslVersion === GLSL3 ) ? '' : '#define gl_FragColor pc_fragColor',
'#define gl_FragDepthEXT gl_FragDepth',
'#define texture2D texture',
'#define textureCube texture',
'#define texture2DProj textureProj',
'#define texture2DLodEXT textureLod',
'#define texture2DProjLodEXT textureProjLod',
'#define textureCubeLodEXT textureLod',
'#define texture2DGradEXT textureGrad',
'#define texture2DProjGradEXT textureProjGrad',
'#define textureCubeGradEXT textureGrad'
].join( '\n' ) + '\n' + prefixFragment;
}
const vertexGlsl = versionString + prefixVertex + vertexShader;
const fragmentGlsl = versionString + prefixFragment + fragmentShader;
// console.log( '*VERTEX*', vertexGlsl );
// console.log( '*FRAGMENT*', fragmentGlsl );
const glVertexShader = WebGLShader( gl, 35633, vertexGlsl );
const glFragmentShader = WebGLShader( gl, 35632, fragmentGlsl );
gl.attachShader( program, glVertexShader );
gl.attachShader( program, glFragmentShader );
// Force a particular attribute to index 0.
if ( parameters.index0AttributeName !== undefined ) {
gl.bindAttribLocation( program, 0, parameters.index0AttributeName );
} else if ( parameters.morphTargets === true ) {
// programs with morphTargets displace position out of attribute 0
gl.bindAttribLocation( program, 0, 'position' );
}
gl.linkProgram( program );
// check for link errors
if ( renderer.debug.checkShaderErrors ) {
const programLog = gl.getProgramInfoLog( program ).trim();
const vertexLog = gl.getShaderInfoLog( glVertexShader ).trim();
const fragmentLog = gl.getShaderInfoLog( glFragmentShader ).trim();
let runnable = true;
let haveDiagnostics = true;
if ( gl.getProgramParameter( program, 35714 ) === false ) {
runnable = false;
const vertexErrors = getShaderErrors( gl, glVertexShader, 'vertex' );
const fragmentErrors = getShaderErrors( gl, glFragmentShader, 'fragment' );
console.error( 'THREE.WebGLProgram: shader error: ', gl.getError(), '35715', gl.getProgramParameter( program, 35715 ), 'gl.getProgramInfoLog', programLog, vertexErrors, fragmentErrors );
} else if ( programLog !== '' ) {
console.warn( 'THREE.WebGLProgram: gl.getProgramInfoLog()', programLog );
} else if ( vertexLog === '' || fragmentLog === '' ) {
haveDiagnostics = false;
}
if ( haveDiagnostics ) {
this.diagnostics = {
runnable: runnable,
programLog: programLog,
vertexShader: {
log: vertexLog,
prefix: prefixVertex
},
fragmentShader: {
log: fragmentLog,
prefix: prefixFragment
}
};
}
}
// Clean up
// Crashes in iOS9 and iOS10. #18402
// gl.detachShader( program, glVertexShader );
// gl.detachShader( program, glFragmentShader );
gl.deleteShader( glVertexShader );
gl.deleteShader( glFragmentShader );
// set up caching for uniform locations
let cachedUniforms;
this.getUniforms = function () {
if ( cachedUniforms === undefined ) {
cachedUniforms = new WebGLUniforms( gl, program );
}
return cachedUniforms;
};
// set up caching for attribute locations
let cachedAttributes;
this.getAttributes = function () {
if ( cachedAttributes === undefined ) {
cachedAttributes = fetchAttributeLocations( gl, program );
}
return cachedAttributes;
};
// free resource
this.destroy = function () {
bindingStates.releaseStatesOfProgram( this );
gl.deleteProgram( program );
this.program = undefined;
};
//
this.name = parameters.shaderName;
this.id = programIdCount ++;
this.cacheKey = cacheKey;
this.usedTimes = 1;
this.program = program;
this.vertexShader = glVertexShader;
this.fragmentShader = glFragmentShader;
return this;
}
function WebGLPrograms( renderer, cubemaps, extensions, capabilities, bindingStates, clipping ) {
const programs = [];
const isWebGL2 = capabilities.isWebGL2;
const logarithmicDepthBuffer = capabilities.logarithmicDepthBuffer;
const floatVertexTextures = capabilities.floatVertexTextures;
const maxVertexUniforms = capabilities.maxVertexUniforms;
const vertexTextures = capabilities.vertexTextures;
let precision = capabilities.precision;
const shaderIDs = {
MeshDepthMaterial: 'depth',
MeshDistanceMaterial: 'distanceRGBA',
MeshNormalMaterial: 'normal',
MeshBasicMaterial: 'basic',
MeshLambertMaterial: 'lambert',
MeshPhongMaterial: 'phong',
MeshToonMaterial: 'toon',
MeshStandardMaterial: 'physical',
MeshPhysicalMaterial: 'physical',
MeshMatcapMaterial: 'matcap',
LineBasicMaterial: 'basic',
LineDashedMaterial: 'dashed',
PointsMaterial: 'points',
ShadowMaterial: 'shadow',
SpriteMaterial: 'sprite'
};
const parameterNames = [
'precision', 'isWebGL2', 'supportsVertexTextures', 'outputEncoding', 'instancing', 'instancingColor',
'map', 'mapEncoding', 'matcap', 'matcapEncoding', 'envMap', 'envMapMode', 'envMapEncoding', 'envMapCubeUV',
'lightMap', 'lightMapEncoding', 'aoMap', 'emissiveMap', 'emissiveMapEncoding', 'bumpMap', 'normalMap', 'objectSpaceNormalMap', 'tangentSpaceNormalMap', 'clearcoatMap', 'clearcoatRoughnessMap', 'clearcoatNormalMap', 'displacementMap', 'specularMap',
'roughnessMap', 'metalnessMap', 'gradientMap',
'alphaMap', 'combine', 'vertexColors', 'vertexAlphas', 'vertexTangents', 'vertexUvs', 'uvsVertexOnly', 'fog', 'useFog', 'fogExp2',
'flatShading', 'sizeAttenuation', 'logarithmicDepthBuffer', 'skinning',
'maxBones', 'useVertexTexture', 'morphTargets', 'morphNormals', 'premultipliedAlpha',
'numDirLights', 'numPointLights', 'numSpotLights', 'numHemiLights', 'numRectAreaLights',
'numDirLightShadows', 'numPointLightShadows', 'numSpotLightShadows',
'shadowMapEnabled', 'shadowMapType', 'toneMapping', 'physicallyCorrectLights',
'alphaTest', 'doubleSided', 'flipSided', 'numClippingPlanes', 'numClipIntersection', 'depthPacking', 'dithering',
'sheen', 'transmissionMap'
];
function getMaxBones( object ) {
const skeleton = object.skeleton;
const bones = skeleton.bones;
if ( floatVertexTextures ) {
return 1024;
} else {
// default for when object is not specified
// ( for example when prebuilding shader to be used with multiple objects )
//
// - leave some extra space for other uniforms
// - limit here is ANGLE's 254 max uniform vectors
// (up to 54 should be safe)
const nVertexUniforms = maxVertexUniforms;
const nVertexMatrices = Math.floor( ( nVertexUniforms - 20 ) / 4 );
const maxBones = Math.min( nVertexMatrices, bones.length );
if ( maxBones < bones.length ) {
console.warn( 'THREE.WebGLRenderer: Skeleton has ' + bones.length + ' bones. This GPU supports ' + maxBones + '.' );
return 0;
}
return maxBones;
}
}
function getTextureEncodingFromMap( map ) {
let encoding;
if ( map && map.isTexture ) {
encoding = map.encoding;
} else if ( map && map.isWebGLRenderTarget ) {
console.warn( 'THREE.WebGLPrograms.getTextureEncodingFromMap: don\'t use render targets as textures. Use their .texture property instead.' );
encoding = map.texture.encoding;
} else {
encoding = LinearEncoding;
}
return encoding;
}
function getParameters( material, lights, shadows, scene, object ) {
const fog = scene.fog;
const environment = material.isMeshStandardMaterial ? scene.environment : null;
const envMap = cubemaps.get( material.envMap || environment );
const shaderID = shaderIDs[ material.type ];
// heuristics to create shader parameters according to lights in the scene
// (not to blow over maxLights budget)
const maxBones = object.isSkinnedMesh ? getMaxBones( object ) : 0;
if ( material.precision !== null ) {
precision = capabilities.getMaxPrecision( material.precision );
if ( precision !== material.precision ) {
console.warn( 'THREE.WebGLProgram.getParameters:', material.precision, 'not supported, using', precision, 'instead.' );
}
}
let vertexShader, fragmentShader;
if ( shaderID ) {
const shader = ShaderLib[ shaderID ];
vertexShader = shader.vertexShader;
fragmentShader = shader.fragmentShader;
} else {
vertexShader = material.vertexShader;
fragmentShader = material.fragmentShader;
}
const currentRenderTarget = renderer.getRenderTarget();
const parameters = {
isWebGL2: isWebGL2,
shaderID: shaderID,
shaderName: material.type,
vertexShader: vertexShader,
fragmentShader: fragmentShader,
defines: material.defines,
isRawShaderMaterial: material.isRawShaderMaterial === true,
glslVersion: material.glslVersion,
precision: precision,
instancing: object.isInstancedMesh === true,
instancingColor: object.isInstancedMesh === true && object.instanceColor !== null,
supportsVertexTextures: vertexTextures,
outputEncoding: ( currentRenderTarget !== null ) ? getTextureEncodingFromMap( currentRenderTarget.texture ) : renderer.outputEncoding,
map: !! material.map,
mapEncoding: getTextureEncodingFromMap( material.map ),
matcap: !! material.matcap,
matcapEncoding: getTextureEncodingFromMap( material.matcap ),
envMap: !! envMap,
envMapMode: envMap && envMap.mapping,
envMapEncoding: getTextureEncodingFromMap( envMap ),
envMapCubeUV: ( !! envMap ) && ( ( envMap.mapping === CubeUVReflectionMapping ) || ( envMap.mapping === CubeUVRefractionMapping ) ),
lightMap: !! material.lightMap,
lightMapEncoding: getTextureEncodingFromMap( material.lightMap ),
aoMap: !! material.aoMap,
emissiveMap: !! material.emissiveMap,
emissiveMapEncoding: getTextureEncodingFromMap( material.emissiveMap ),
bumpMap: !! material.bumpMap,
normalMap: !! material.normalMap,
objectSpaceNormalMap: material.normalMapType === ObjectSpaceNormalMap,
tangentSpaceNormalMap: material.normalMapType === TangentSpaceNormalMap,
clearcoatMap: !! material.clearcoatMap,
clearcoatRoughnessMap: !! material.clearcoatRoughnessMap,
clearcoatNormalMap: !! material.clearcoatNormalMap,
displacementMap: !! material.displacementMap,
roughnessMap: !! material.roughnessMap,
metalnessMap: !! material.metalnessMap,
specularMap: !! material.specularMap,
alphaMap: !! material.alphaMap,
gradientMap: !! material.gradientMap,
sheen: !! material.sheen,
transmissionMap: !! material.transmissionMap,
combine: material.combine,
vertexTangents: ( material.normalMap && material.vertexTangents ),
vertexColors: material.vertexColors,
vertexAlphas: material.vertexColors === true && object.geometry && object.geometry.attributes.color && object.geometry.attributes.color.itemSize === 4,
vertexUvs: !! material.map || !! material.bumpMap || !! material.normalMap || !! material.specularMap || !! material.alphaMap || !! material.emissiveMap || !! material.roughnessMap || !! material.metalnessMap || !! material.clearcoatMap || !! material.clearcoatRoughnessMap || !! material.clearcoatNormalMap || !! material.displacementMap || !! material.transmissionMap,
uvsVertexOnly: ! ( !! material.map || !! material.bumpMap || !! material.normalMap || !! material.specularMap || !! material.alphaMap || !! material.emissiveMap || !! material.roughnessMap || !! material.metalnessMap || !! material.clearcoatNormalMap || !! material.transmissionMap ) && !! material.displacementMap,
fog: !! fog,
useFog: material.fog,
fogExp2: ( fog && fog.isFogExp2 ),
flatShading: !! material.flatShading,
sizeAttenuation: material.sizeAttenuation,
logarithmicDepthBuffer: logarithmicDepthBuffer,
skinning: material.skinning && maxBones > 0,
maxBones: maxBones,
useVertexTexture: floatVertexTextures,
morphTargets: material.morphTargets,
morphNormals: material.morphNormals,
numDirLights: lights.directional.length,
numPointLights: lights.point.length,
numSpotLights: lights.spot.length,
numRectAreaLights: lights.rectArea.length,
numHemiLights: lights.hemi.length,
numDirLightShadows: lights.directionalShadowMap.length,
numPointLightShadows: lights.pointShadowMap.length,
numSpotLightShadows: lights.spotShadowMap.length,
numClippingPlanes: clipping.numPlanes,
numClipIntersection: clipping.numIntersection,
dithering: material.dithering,
shadowMapEnabled: renderer.shadowMap.enabled && shadows.length > 0,
shadowMapType: renderer.shadowMap.type,
toneMapping: material.toneMapped ? renderer.toneMapping : NoToneMapping,
physicallyCorrectLights: renderer.physicallyCorrectLights,
premultipliedAlpha: material.premultipliedAlpha,
alphaTest: material.alphaTest,
doubleSided: material.side === DoubleSide,
flipSided: material.side === BackSide,
depthPacking: ( material.depthPacking !== undefined ) ? material.depthPacking : false,
index0AttributeName: material.index0AttributeName,
extensionDerivatives: material.extensions && material.extensions.derivatives,
extensionFragDepth: material.extensions && material.extensions.fragDepth,
extensionDrawBuffers: material.extensions && material.extensions.drawBuffers,
extensionShaderTextureLOD: material.extensions && material.extensions.shaderTextureLOD,
rendererExtensionFragDepth: isWebGL2 || extensions.has( 'EXT_frag_depth' ),
rendererExtensionDrawBuffers: isWebGL2 || extensions.has( 'WEBGL_draw_buffers' ),
rendererExtensionShaderTextureLod: isWebGL2 || extensions.has( 'EXT_shader_texture_lod' ),
customProgramCacheKey: material.customProgramCacheKey()
};
return parameters;
}
function getProgramCacheKey( parameters ) {
const array = [];
if ( parameters.shaderID ) {
array.push( parameters.shaderID );
} else {
array.push( parameters.fragmentShader );
array.push( parameters.vertexShader );
}
if ( parameters.defines !== undefined ) {
for ( const name in parameters.defines ) {
array.push( name );
array.push( parameters.defines[ name ] );
}
}
if ( parameters.isRawShaderMaterial === false ) {
for ( let i = 0; i < parameterNames.length; i ++ ) {
array.push( parameters[ parameterNames[ i ] ] );
}
array.push( renderer.outputEncoding );
array.push( renderer.gammaFactor );
}
array.push( parameters.customProgramCacheKey );
return array.join();
}
function getUniforms( material ) {
const shaderID = shaderIDs[ material.type ];
let uniforms;
if ( shaderID ) {
const shader = ShaderLib[ shaderID ];
uniforms = UniformsUtils.clone( shader.uniforms );
} else {
uniforms = material.uniforms;
}
return uniforms;
}
function acquireProgram( parameters, cacheKey ) {
let program;
// Check if code has been already compiled
for ( let p = 0, pl = programs.length; p < pl; p ++ ) {
const preexistingProgram = programs[ p ];
if ( preexistingProgram.cacheKey === cacheKey ) {
program = preexistingProgram;
++ program.usedTimes;
break;
}
}
if ( program === undefined ) {
program = new WebGLProgram( renderer, cacheKey, parameters, bindingStates );
programs.push( program );
}
return program;
}
function releaseProgram( program ) {
if ( -- program.usedTimes === 0 ) {
// Remove from unordered set
const i = programs.indexOf( program );
programs[ i ] = programs[ programs.length - 1 ];
programs.pop();
// Free WebGL resources
program.destroy();
}
}
return {
getParameters: getParameters,
getProgramCacheKey: getProgramCacheKey,
getUniforms: getUniforms,
acquireProgram: acquireProgram,
releaseProgram: releaseProgram,
// Exposed for resource monitoring & error feedback via renderer.info:
programs: programs
};
}
function WebGLProperties() {
let properties = new WeakMap();
function get( object ) {
let map = properties.get( object );
if ( map === undefined ) {
map = {};
properties.set( object, map );
}
return map;
}
function remove( object ) {
properties.delete( object );
}
function update( object, key, value ) {
properties.get( object )[ key ] = value;
}
function dispose() {
properties = new WeakMap();
}
return {
get: get,
remove: remove,
update: update,
dispose: dispose
};
}
function painterSortStable( a, b ) {
if ( a.groupOrder !== b.groupOrder ) {
return a.groupOrder - b.groupOrder;
} else if ( a.renderOrder !== b.renderOrder ) {
return a.renderOrder - b.renderOrder;
} else if ( a.program !== b.program ) {
return a.program.id - b.program.id;
} else if ( a.material.id !== b.material.id ) {
return a.material.id - b.material.id;
} else if ( a.z !== b.z ) {
return a.z - b.z;
} else {
return a.id - b.id;
}
}
function reversePainterSortStable( a, b ) {
if ( a.groupOrder !== b.groupOrder ) {
return a.groupOrder - b.groupOrder;
} else if ( a.renderOrder !== b.renderOrder ) {
return a.renderOrder - b.renderOrder;
} else if ( a.z !== b.z ) {
return b.z - a.z;
} else {
return a.id - b.id;
}
}
function WebGLRenderList( properties ) {
const renderItems = [];
let renderItemsIndex = 0;
const opaque = [];
const transparent = [];
const defaultProgram = { id: - 1 };
function init() {
renderItemsIndex = 0;
opaque.length = 0;
transparent.length = 0;
}
function getNextRenderItem( object, geometry, material, groupOrder, z, group ) {
let renderItem = renderItems[ renderItemsIndex ];
const materialProperties = properties.get( material );
if ( renderItem === undefined ) {
renderItem = {
id: object.id,
object: object,
geometry: geometry,
material: material,
program: materialProperties.program || defaultProgram,
groupOrder: groupOrder,
renderOrder: object.renderOrder,
z: z,
group: group
};
renderItems[ renderItemsIndex ] = renderItem;
} else {
renderItem.id = object.id;
renderItem.object = object;
renderItem.geometry = geometry;
renderItem.material = material;
renderItem.program = materialProperties.program || defaultProgram;
renderItem.groupOrder = groupOrder;
renderItem.renderOrder = object.renderOrder;
renderItem.z = z;
renderItem.group = group;
}
renderItemsIndex ++;
return renderItem;
}
function push( object, geometry, material, groupOrder, z, group ) {
const renderItem = getNextRenderItem( object, geometry, material, groupOrder, z, group );
( material.transparent === true ? transparent : opaque ).push( renderItem );
}
function unshift( object, geometry, material, groupOrder, z, group ) {
const renderItem = getNextRenderItem( object, geometry, material, groupOrder, z, group );
( material.transparent === true ? transparent : opaque ).unshift( renderItem );
}
function sort( customOpaqueSort, customTransparentSort ) {
if ( opaque.length > 1 ) opaque.sort( customOpaqueSort || painterSortStable );
if ( transparent.length > 1 ) transparent.sort( customTransparentSort || reversePainterSortStable );
}
function finish() {
// Clear references from inactive renderItems in the list
for ( let i = renderItemsIndex, il = renderItems.length; i < il; i ++ ) {
const renderItem = renderItems[ i ];
if ( renderItem.id === null ) break;
renderItem.id = null;
renderItem.object = null;
renderItem.geometry = null;
renderItem.material = null;
renderItem.program = null;
renderItem.group = null;
}
}
return {
opaque: opaque,
transparent: transparent,
init: init,
push: push,
unshift: unshift,
finish: finish,
sort: sort
};
}
function WebGLRenderLists( properties ) {
let lists = new WeakMap();
function get( scene, renderCallDepth ) {
let list;
if ( lists.has( scene ) === false ) {
list = new WebGLRenderList( properties );
lists.set( scene, [ list ] );
} else {
if ( renderCallDepth >= lists.get( scene ).length ) {
list = new WebGLRenderList( properties );
lists.get( scene ).push( list );
} else {
list = lists.get( scene )[ renderCallDepth ];
}
}
return list;
}
function dispose() {
lists = new WeakMap();
}
return {
get: get,
dispose: dispose
};
}
function UniformsCache() {
const lights = {};
return {
get: function ( light ) {
if ( lights[ light.id ] !== undefined ) {
return lights[ light.id ];
}
let uniforms;
switch ( light.type ) {
case 'DirectionalLight':
uniforms = {
direction: new Vector3(),
color: new Color()
};
break;
case 'SpotLight':
uniforms = {
position: new Vector3(),
direction: new Vector3(),
color: new Color(),
distance: 0,
coneCos: 0,
penumbraCos: 0,
decay: 0
};
break;
case 'PointLight':
uniforms = {
position: new Vector3(),
color: new Color(),
distance: 0,
decay: 0
};
break;
case 'HemisphereLight':
uniforms = {
direction: new Vector3(),
skyColor: new Color(),
groundColor: new Color()
};
break;
case 'RectAreaLight':
uniforms = {
color: new Color(),
position: new Vector3(),
halfWidth: new Vector3(),
halfHeight: new Vector3()
};
break;
}
lights[ light.id ] = uniforms;
return uniforms;
}
};
}
function ShadowUniformsCache() {
const lights = {};
return {
get: function ( light ) {
if ( lights[ light.id ] !== undefined ) {
return lights[ light.id ];
}
let uniforms;
switch ( light.type ) {
case 'DirectionalLight':
uniforms = {
shadowBias: 0,
shadowNormalBias: 0,
shadowRadius: 1,
shadowMapSize: new Vector2()
};
break;
case 'SpotLight':
uniforms = {
shadowBias: 0,
shadowNormalBias: 0,
shadowRadius: 1,
shadowMapSize: new Vector2()
};
break;
case 'PointLight':
uniforms = {
shadowBias: 0,
shadowNormalBias: 0,
shadowRadius: 1,
shadowMapSize: new Vector2(),
shadowCameraNear: 1,
shadowCameraFar: 1000
};
break;
// TODO (abelnation): set RectAreaLight shadow uniforms
}
lights[ light.id ] = uniforms;
return uniforms;
}
};
}
let nextVersion = 0;
function shadowCastingLightsFirst( lightA, lightB ) {
return ( lightB.castShadow ? 1 : 0 ) - ( lightA.castShadow ? 1 : 0 );
}
function WebGLLights( extensions, capabilities ) {
const cache = new UniformsCache();
const shadowCache = ShadowUniformsCache();
const state = {
version: 0,
hash: {
directionalLength: - 1,
pointLength: - 1,
spotLength: - 1,
rectAreaLength: - 1,
hemiLength: - 1,
numDirectionalShadows: - 1,
numPointShadows: - 1,
numSpotShadows: - 1
},
ambient: [ 0, 0, 0 ],
probe: [],
directional: [],
directionalShadow: [],
directionalShadowMap: [],
directionalShadowMatrix: [],
spot: [],
spotShadow: [],
spotShadowMap: [],
spotShadowMatrix: [],
rectArea: [],
rectAreaLTC1: null,
rectAreaLTC2: null,
point: [],
pointShadow: [],
pointShadowMap: [],
pointShadowMatrix: [],
hemi: []
};
for ( let i = 0; i < 9; i ++ ) state.probe.push( new Vector3() );
const vector3 = new Vector3();
const matrix4 = new Matrix4();
const matrix42 = new Matrix4();
function setup( lights ) {
let r = 0, g = 0, b = 0;
for ( let i = 0; i < 9; i ++ ) state.probe[ i ].set( 0, 0, 0 );
let directionalLength = 0;
let pointLength = 0;
let spotLength = 0;
let rectAreaLength = 0;
let hemiLength = 0;
let numDirectionalShadows = 0;
let numPointShadows = 0;
let numSpotShadows = 0;
lights.sort( shadowCastingLightsFirst );
for ( let i = 0, l = lights.length; i < l; i ++ ) {
const light = lights[ i ];
const color = light.color;
const intensity = light.intensity;
const distance = light.distance;
const shadowMap = ( light.shadow && light.shadow.map ) ? light.shadow.map.texture : null;
if ( light.isAmbientLight ) {
r += color.r * intensity;
g += color.g * intensity;
b += color.b * intensity;
} else if ( light.isLightProbe ) {
for ( let j = 0; j < 9; j ++ ) {
state.probe[ j ].addScaledVector( light.sh.coefficients[ j ], intensity );
}
} else if ( light.isDirectionalLight ) {
const uniforms = cache.get( light );
uniforms.color.copy( light.color ).multiplyScalar( light.intensity );
if ( light.castShadow ) {
const shadow = light.shadow;
const shadowUniforms = shadowCache.get( light );
shadowUniforms.shadowBias = shadow.bias;
shadowUniforms.shadowNormalBias = shadow.normalBias;
shadowUniforms.shadowRadius = shadow.radius;
shadowUniforms.shadowMapSize = shadow.mapSize;
state.directionalShadow[ directionalLength ] = shadowUniforms;
state.directionalShadowMap[ directionalLength ] = shadowMap;
state.directionalShadowMatrix[ directionalLength ] = light.shadow.matrix;
numDirectionalShadows ++;
}
state.directional[ directionalLength ] = uniforms;
directionalLength ++;
} else if ( light.isSpotLight ) {
const uniforms = cache.get( light );
uniforms.position.setFromMatrixPosition( light.matrixWorld );
uniforms.color.copy( color ).multiplyScalar( intensity );
uniforms.distance = distance;
uniforms.coneCos = Math.cos( light.angle );
uniforms.penumbraCos = Math.cos( light.angle * ( 1 - light.penumbra ) );
uniforms.decay = light.decay;
if ( light.castShadow ) {
const shadow = light.shadow;
const shadowUniforms = shadowCache.get( light );
shadowUniforms.shadowBias = shadow.bias;
shadowUniforms.shadowNormalBias = shadow.normalBias;
shadowUniforms.shadowRadius = shadow.radius;
shadowUniforms.shadowMapSize = shadow.mapSize;
state.spotShadow[ spotLength ] = shadowUniforms;
state.spotShadowMap[ spotLength ] = shadowMap;
state.spotShadowMatrix[ spotLength ] = light.shadow.matrix;
numSpotShadows ++;
}
state.spot[ spotLength ] = uniforms;
spotLength ++;
} else if ( light.isRectAreaLight ) {
const uniforms = cache.get( light );
// (a) intensity is the total visible light emitted
//uniforms.color.copy( color ).multiplyScalar( intensity / ( light.width * light.height * Math.PI ) );
// (b) intensity is the brightness of the light
uniforms.color.copy( color ).multiplyScalar( intensity );
uniforms.halfWidth.set( light.width * 0.5, 0.0, 0.0 );
uniforms.halfHeight.set( 0.0, light.height * 0.5, 0.0 );
state.rectArea[ rectAreaLength ] = uniforms;
rectAreaLength ++;
} else if ( light.isPointLight ) {
const uniforms = cache.get( light );
uniforms.color.copy( light.color ).multiplyScalar( light.intensity );
uniforms.distance = light.distance;
uniforms.decay = light.decay;
if ( light.castShadow ) {
const shadow = light.shadow;
const shadowUniforms = shadowCache.get( light );
shadowUniforms.shadowBias = shadow.bias;
shadowUniforms.shadowNormalBias = shadow.normalBias;
shadowUniforms.shadowRadius = shadow.radius;
shadowUniforms.shadowMapSize = shadow.mapSize;
shadowUniforms.shadowCameraNear = shadow.camera.near;
shadowUniforms.shadowCameraFar = shadow.camera.far;
state.pointShadow[ pointLength ] = shadowUniforms;
state.pointShadowMap[ pointLength ] = shadowMap;
state.pointShadowMatrix[ pointLength ] = light.shadow.matrix;
numPointShadows ++;
}
state.point[ pointLength ] = uniforms;
pointLength ++;
} else if ( light.isHemisphereLight ) {
const uniforms = cache.get( light );
uniforms.skyColor.copy( light.color ).multiplyScalar( intensity );
uniforms.groundColor.copy( light.groundColor ).multiplyScalar( intensity );
state.hemi[ hemiLength ] = uniforms;
hemiLength ++;
}
}
if ( rectAreaLength > 0 ) {
if ( capabilities.isWebGL2 ) {
// WebGL 2
state.rectAreaLTC1 = UniformsLib.LTC_FLOAT_1;
state.rectAreaLTC2 = UniformsLib.LTC_FLOAT_2;
} else {
// WebGL 1
if ( extensions.has( 'OES_texture_float_linear' ) === true ) {
state.rectAreaLTC1 = UniformsLib.LTC_FLOAT_1;
state.rectAreaLTC2 = UniformsLib.LTC_FLOAT_2;
} else if ( extensions.has( 'OES_texture_half_float_linear' ) === true ) {
state.rectAreaLTC1 = UniformsLib.LTC_HALF_1;
state.rectAreaLTC2 = UniformsLib.LTC_HALF_2;
} else {
console.error( 'THREE.WebGLRenderer: Unable to use RectAreaLight. Missing WebGL extensions.' );
}
}
}
state.ambient[ 0 ] = r;
state.ambient[ 1 ] = g;
state.ambient[ 2 ] = b;
const hash = state.hash;
if ( hash.directionalLength !== directionalLength ||
hash.pointLength !== pointLength ||
hash.spotLength !== spotLength ||
hash.rectAreaLength !== rectAreaLength ||
hash.hemiLength !== hemiLength ||
hash.numDirectionalShadows !== numDirectionalShadows ||
hash.numPointShadows !== numPointShadows ||
hash.numSpotShadows !== numSpotShadows ) {
state.directional.length = directionalLength;
state.spot.length = spotLength;
state.rectArea.length = rectAreaLength;
state.point.length = pointLength;
state.hemi.length = hemiLength;
state.directionalShadow.length = numDirectionalShadows;
state.directionalShadowMap.length = numDirectionalShadows;
state.pointShadow.length = numPointShadows;
state.pointShadowMap.length = numPointShadows;
state.spotShadow.length = numSpotShadows;
state.spotShadowMap.length = numSpotShadows;
state.directionalShadowMatrix.length = numDirectionalShadows;
state.pointShadowMatrix.length = numPointShadows;
state.spotShadowMatrix.length = numSpotShadows;
hash.directionalLength = directionalLength;
hash.pointLength = pointLength;
hash.spotLength = spotLength;
hash.rectAreaLength = rectAreaLength;
hash.hemiLength = hemiLength;
hash.numDirectionalShadows = numDirectionalShadows;
hash.numPointShadows = numPointShadows;
hash.numSpotShadows = numSpotShadows;
state.version = nextVersion ++;
}
}
function setupView( lights, camera ) {
let directionalLength = 0;
let pointLength = 0;
let spotLength = 0;
let rectAreaLength = 0;
let hemiLength = 0;
const viewMatrix = camera.matrixWorldInverse;
for ( let i = 0, l = lights.length; i < l; i ++ ) {
const light = lights[ i ];
if ( light.isDirectionalLight ) {
const uniforms = state.directional[ directionalLength ];
uniforms.direction.setFromMatrixPosition( light.matrixWorld );
vector3.setFromMatrixPosition( light.target.matrixWorld );
uniforms.direction.sub( vector3 );
uniforms.direction.transformDirection( viewMatrix );
directionalLength ++;
} else if ( light.isSpotLight ) {
const uniforms = state.spot[ spotLength ];
uniforms.position.setFromMatrixPosition( light.matrixWorld );
uniforms.position.applyMatrix4( viewMatrix );
uniforms.direction.setFromMatrixPosition( light.matrixWorld );
vector3.setFromMatrixPosition( light.target.matrixWorld );
uniforms.direction.sub( vector3 );
uniforms.direction.transformDirection( viewMatrix );
spotLength ++;
} else if ( light.isRectAreaLight ) {
const uniforms = state.rectArea[ rectAreaLength ];
uniforms.position.setFromMatrixPosition( light.matrixWorld );
uniforms.position.applyMatrix4( viewMatrix );
// extract local rotation of light to derive width/height half vectors
matrix42.identity();
matrix4.copy( light.matrixWorld );
matrix4.premultiply( viewMatrix );
matrix42.extractRotation( matrix4 );
uniforms.halfWidth.set( light.width * 0.5, 0.0, 0.0 );
uniforms.halfHeight.set( 0.0, light.height * 0.5, 0.0 );
uniforms.halfWidth.applyMatrix4( matrix42 );
uniforms.halfHeight.applyMatrix4( matrix42 );
rectAreaLength ++;
} else if ( light.isPointLight ) {
const uniforms = state.point[ pointLength ];
uniforms.position.setFromMatrixPosition( light.matrixWorld );
uniforms.position.applyMatrix4( viewMatrix );
pointLength ++;
} else if ( light.isHemisphereLight ) {
const uniforms = state.hemi[ hemiLength ];
uniforms.direction.setFromMatrixPosition( light.matrixWorld );
uniforms.direction.transformDirection( viewMatrix );
uniforms.direction.normalize();
hemiLength ++;
}
}
}
return {
setup: setup,
setupView: setupView,
state: state
};
}
function WebGLRenderState( extensions, capabilities ) {
const lights = new WebGLLights( extensions, capabilities );
const lightsArray = [];
const shadowsArray = [];
function init() {
lightsArray.length = 0;
shadowsArray.length = 0;
}
function pushLight( light ) {
lightsArray.push( light );
}
function pushShadow( shadowLight ) {
shadowsArray.push( shadowLight );
}
function setupLights() {
lights.setup( lightsArray );
}
function setupLightsView( camera ) {
lights.setupView( lightsArray, camera );
}
const state = {
lightsArray: lightsArray,
shadowsArray: shadowsArray,
lights: lights
};
return {
init: init,
state: state,
setupLights: setupLights,
setupLightsView: setupLightsView,
pushLight: pushLight,
pushShadow: pushShadow
};
}
function WebGLRenderStates( extensions, capabilities ) {
let renderStates = new WeakMap();
function get( scene, renderCallDepth = 0 ) {
let renderState;
if ( renderStates.has( scene ) === false ) {
renderState = new WebGLRenderState( extensions, capabilities );
renderStates.set( scene, [ renderState ] );
} else {
if ( renderCallDepth >= renderStates.get( scene ).length ) {
renderState = new WebGLRenderState( extensions, capabilities );
renderStates.get( scene ).push( renderState );
} else {
renderState = renderStates.get( scene )[ renderCallDepth ];
}
}
return renderState;
}
function dispose() {
renderStates = new WeakMap();
}
return {
get: get,
dispose: dispose
};
}
/**
* parameters = {
*
* opacity: <float>,
*
* map: new THREE.Texture( <Image> ),
*
* alphaMap: new THREE.Texture( <Image> ),
*
* displacementMap: new THREE.Texture( <Image> ),
* displacementScale: <float>,
* displacementBias: <float>,
*
* wireframe: <boolean>,
* wireframeLinewidth: <float>
* }
*/
class MeshDepthMaterial extends Material$1 {
constructor( parameters ) {
super();
this.type = 'MeshDepthMaterial';
this.depthPacking = BasicDepthPacking;
this.skinning = false;
this.morphTargets = false;
this.map = null;
this.alphaMap = null;
this.displacementMap = null;
this.displacementScale = 1;
this.displacementBias = 0;
this.wireframe = false;
this.wireframeLinewidth = 1;
this.fog = false;
this.setValues( parameters );
}
copy( source ) {
super.copy( source );
this.depthPacking = source.depthPacking;
this.skinning = source.skinning;
this.morphTargets = source.morphTargets;
this.map = source.map;
this.alphaMap = source.alphaMap;
this.displacementMap = source.displacementMap;
this.displacementScale = source.displacementScale;
this.displacementBias = source.displacementBias;
this.wireframe = source.wireframe;
this.wireframeLinewidth = source.wireframeLinewidth;
return this;
}
}
MeshDepthMaterial.prototype.isMeshDepthMaterial = true;
/**
* parameters = {
*
* referencePosition: <float>,
* nearDistance: <float>,
* farDistance: <float>,
*
* skinning: <bool>,
* morphTargets: <bool>,
*
* map: new THREE.Texture( <Image> ),
*
* alphaMap: new THREE.Texture( <Image> ),
*
* displacementMap: new THREE.Texture( <Image> ),
* displacementScale: <float>,
* displacementBias: <float>
*
* }
*/
class MeshDistanceMaterial extends Material$1 {
constructor( parameters ) {
super();
this.type = 'MeshDistanceMaterial';
this.referencePosition = new Vector3();
this.nearDistance = 1;
this.farDistance = 1000;
this.skinning = false;
this.morphTargets = false;
this.map = null;
this.alphaMap = null;
this.displacementMap = null;
this.displacementScale = 1;
this.displacementBias = 0;
this.fog = false;
this.setValues( parameters );
}
copy( source ) {
super.copy( source );
this.referencePosition.copy( source.referencePosition );
this.nearDistance = source.nearDistance;
this.farDistance = source.farDistance;
this.skinning = source.skinning;
this.morphTargets = source.morphTargets;
this.map = source.map;
this.alphaMap = source.alphaMap;
this.displacementMap = source.displacementMap;
this.displacementScale = source.displacementScale;
this.displacementBias = source.displacementBias;
return this;
}
}
MeshDistanceMaterial.prototype.isMeshDistanceMaterial = true;
var vsm_frag = "uniform sampler2D shadow_pass;\nuniform vec2 resolution;\nuniform float radius;\n#include <packing>\nvoid main() {\n\tfloat mean = 0.0;\n\tfloat squared_mean = 0.0;\n\tfloat depth = unpackRGBAToDepth( texture2D( shadow_pass, ( gl_FragCoord.xy ) / resolution ) );\n\tfor ( float i = -1.0; i < 1.0 ; i += SAMPLE_RATE) {\n\t\t#ifdef HORIZONTAL_PASS\n\t\t\tvec2 distribution = unpackRGBATo2Half( texture2D( shadow_pass, ( gl_FragCoord.xy + vec2( i, 0.0 ) * radius ) / resolution ) );\n\t\t\tmean += distribution.x;\n\t\t\tsquared_mean += distribution.y * distribution.y + distribution.x * distribution.x;\n\t\t#else\n\t\t\tfloat depth = unpackRGBAToDepth( texture2D( shadow_pass, ( gl_FragCoord.xy + vec2( 0.0, i ) * radius ) / resolution ) );\n\t\t\tmean += depth;\n\t\t\tsquared_mean += depth * depth;\n\t\t#endif\n\t}\n\tmean = mean * HALF_SAMPLE_RATE;\n\tsquared_mean = squared_mean * HALF_SAMPLE_RATE;\n\tfloat std_dev = sqrt( squared_mean - mean * mean );\n\tgl_FragColor = pack2HalfToRGBA( vec2( mean, std_dev ) );\n}";
var vsm_vert = "void main() {\n\tgl_Position = vec4( position, 1.0 );\n}";
function WebGLShadowMap( _renderer, _objects, _capabilities ) {
let _frustum = new Frustum();
const _shadowMapSize = new Vector2(),
_viewportSize = new Vector2(),
_viewport = new Vector4(),
_depthMaterials = [],
_distanceMaterials = [],
_materialCache = {},
_maxTextureSize = _capabilities.maxTextureSize;
const shadowSide = { 0: BackSide, 1: FrontSide, 2: DoubleSide };
const shadowMaterialVertical = new ShaderMaterial( {
defines: {
SAMPLE_RATE: 2.0 / 8.0,
HALF_SAMPLE_RATE: 1.0 / 8.0
},
uniforms: {
shadow_pass: { value: null },
resolution: { value: new Vector2() },
radius: { value: 4.0 }
},
vertexShader: vsm_vert,
fragmentShader: vsm_frag
} );
const shadowMaterialHorizontal = shadowMaterialVertical.clone();
shadowMaterialHorizontal.defines.HORIZONTAL_PASS = 1;
const fullScreenTri = new BufferGeometry();
fullScreenTri.setAttribute(
'position',
new BufferAttribute(
new Float32Array( [ - 1, - 1, 0.5, 3, - 1, 0.5, - 1, 3, 0.5 ] ),
3
)
);
const fullScreenMesh = new Mesh( fullScreenTri, shadowMaterialVertical );
const scope = this;
this.enabled = false;
this.autoUpdate = true;
this.needsUpdate = false;
this.type = PCFShadowMap;
this.render = function ( lights, scene, camera ) {
if ( scope.enabled === false ) return;
if ( scope.autoUpdate === false && scope.needsUpdate === false ) return;
if ( lights.length === 0 ) return;
const currentRenderTarget = _renderer.getRenderTarget();
const activeCubeFace = _renderer.getActiveCubeFace();
const activeMipmapLevel = _renderer.getActiveMipmapLevel();
const _state = _renderer.state;
// Set GL state for depth map.
_state.setBlending( NoBlending );
_state.buffers.color.setClear( 1, 1, 1, 1 );
_state.buffers.depth.setTest( true );
_state.setScissorTest( false );
// render depth map
for ( let i = 0, il = lights.length; i < il; i ++ ) {
const light = lights[ i ];
const shadow = light.shadow;
if ( shadow === undefined ) {
console.warn( 'THREE.WebGLShadowMap:', light, 'has no shadow.' );
continue;
}
if ( shadow.autoUpdate === false && shadow.needsUpdate === false ) continue;
_shadowMapSize.copy( shadow.mapSize );
const shadowFrameExtents = shadow.getFrameExtents();
_shadowMapSize.multiply( shadowFrameExtents );
_viewportSize.copy( shadow.mapSize );
if ( _shadowMapSize.x > _maxTextureSize || _shadowMapSize.y > _maxTextureSize ) {
if ( _shadowMapSize.x > _maxTextureSize ) {
_viewportSize.x = Math.floor( _maxTextureSize / shadowFrameExtents.x );
_shadowMapSize.x = _viewportSize.x * shadowFrameExtents.x;
shadow.mapSize.x = _viewportSize.x;
}
if ( _shadowMapSize.y > _maxTextureSize ) {
_viewportSize.y = Math.floor( _maxTextureSize / shadowFrameExtents.y );
_shadowMapSize.y = _viewportSize.y * shadowFrameExtents.y;
shadow.mapSize.y = _viewportSize.y;
}
}
if ( shadow.map === null && ! shadow.isPointLightShadow && this.type === VSMShadowMap ) {
const pars = { minFilter: LinearFilter, magFilter: LinearFilter, format: RGBAFormat };
shadow.map = new WebGLRenderTarget( _shadowMapSize.x, _shadowMapSize.y, pars );
shadow.map.texture.name = light.name + '.shadowMap';
shadow.mapPass = new WebGLRenderTarget( _shadowMapSize.x, _shadowMapSize.y, pars );
shadow.camera.updateProjectionMatrix();
}
if ( shadow.map === null ) {
const pars = { minFilter: NearestFilter, magFilter: NearestFilter, format: RGBAFormat };
shadow.map = new WebGLRenderTarget( _shadowMapSize.x, _shadowMapSize.y, pars );
shadow.map.texture.name = light.name + '.shadowMap';
shadow.camera.updateProjectionMatrix();
}
_renderer.setRenderTarget( shadow.map );
_renderer.clear();
const viewportCount = shadow.getViewportCount();
for ( let vp = 0; vp < viewportCount; vp ++ ) {
const viewport = shadow.getViewport( vp );
_viewport.set(
_viewportSize.x * viewport.x,
_viewportSize.y * viewport.y,
_viewportSize.x * viewport.z,
_viewportSize.y * viewport.w
);
_state.viewport( _viewport );
shadow.updateMatrices( light, vp );
_frustum = shadow.getFrustum();
renderObject( scene, camera, shadow.camera, light, this.type );
}
// do blur pass for VSM
if ( ! shadow.isPointLightShadow && this.type === VSMShadowMap ) {
VSMPass( shadow, camera );
}
shadow.needsUpdate = false;
}
scope.needsUpdate = false;
_renderer.setRenderTarget( currentRenderTarget, activeCubeFace, activeMipmapLevel );
};
function VSMPass( shadow, camera ) {
const geometry = _objects.update( fullScreenMesh );
// vertical pass
shadowMaterialVertical.uniforms.shadow_pass.value = shadow.map.texture;
shadowMaterialVertical.uniforms.resolution.value = shadow.mapSize;
shadowMaterialVertical.uniforms.radius.value = shadow.radius;
_renderer.setRenderTarget( shadow.mapPass );
_renderer.clear();
_renderer.renderBufferDirect( camera, null, geometry, shadowMaterialVertical, fullScreenMesh, null );
// horizontal pass
shadowMaterialHorizontal.uniforms.shadow_pass.value = shadow.mapPass.texture;
shadowMaterialHorizontal.uniforms.resolution.value = shadow.mapSize;
shadowMaterialHorizontal.uniforms.radius.value = shadow.radius;
_renderer.setRenderTarget( shadow.map );
_renderer.clear();
_renderer.renderBufferDirect( camera, null, geometry, shadowMaterialHorizontal, fullScreenMesh, null );
}
function getDepthMaterialVariant( useMorphing, useSkinning, useInstancing ) {
const index = useMorphing << 0 | useSkinning << 1 | useInstancing << 2;
let material = _depthMaterials[ index ];
if ( material === undefined ) {
material = new MeshDepthMaterial( {
depthPacking: RGBADepthPacking,
morphTargets: useMorphing,
skinning: useSkinning
} );
_depthMaterials[ index ] = material;
}
return material;
}
function getDistanceMaterialVariant( useMorphing, useSkinning, useInstancing ) {
const index = useMorphing << 0 | useSkinning << 1 | useInstancing << 2;
let material = _distanceMaterials[ index ];
if ( material === undefined ) {
material = new MeshDistanceMaterial( {
morphTargets: useMorphing,
skinning: useSkinning
} );
_distanceMaterials[ index ] = material;
}
return material;
}
function getDepthMaterial( object, geometry, material, light, shadowCameraNear, shadowCameraFar, type ) {
let result = null;
let getMaterialVariant = getDepthMaterialVariant;
let customMaterial = object.customDepthMaterial;
if ( light.isPointLight === true ) {
getMaterialVariant = getDistanceMaterialVariant;
customMaterial = object.customDistanceMaterial;
}
if ( customMaterial === undefined ) {
let useMorphing = false;
if ( material.morphTargets === true ) {
useMorphing = geometry.morphAttributes && geometry.morphAttributes.position && geometry.morphAttributes.position.length > 0;
}
let useSkinning = false;
if ( object.isSkinnedMesh === true ) {
if ( material.skinning === true ) {
useSkinning = true;
} else {
console.warn( 'THREE.WebGLShadowMap: THREE.SkinnedMesh with material.skinning set to false:', object );
}
}
const useInstancing = object.isInstancedMesh === true;
result = getMaterialVariant( useMorphing, useSkinning, useInstancing );
} else {
result = customMaterial;
}
if ( _renderer.localClippingEnabled &&
material.clipShadows === true &&
material.clippingPlanes.length !== 0 ) {
// in this case we need a unique material instance reflecting the
// appropriate state
const keyA = result.uuid, keyB = material.uuid;
let materialsForVariant = _materialCache[ keyA ];
if ( materialsForVariant === undefined ) {
materialsForVariant = {};
_materialCache[ keyA ] = materialsForVariant;
}
let cachedMaterial = materialsForVariant[ keyB ];
if ( cachedMaterial === undefined ) {
cachedMaterial = result.clone();
materialsForVariant[ keyB ] = cachedMaterial;
}
result = cachedMaterial;
}
result.visible = material.visible;
result.wireframe = material.wireframe;
if ( type === VSMShadowMap ) {
result.side = ( material.shadowSide !== null ) ? material.shadowSide : material.side;
} else {
result.side = ( material.shadowSide !== null ) ? material.shadowSide : shadowSide[ material.side ];
}
result.clipShadows = material.clipShadows;
result.clippingPlanes = material.clippingPlanes;
result.clipIntersection = material.clipIntersection;
result.wireframeLinewidth = material.wireframeLinewidth;
result.linewidth = material.linewidth;
if ( light.isPointLight === true && result.isMeshDistanceMaterial === true ) {
result.referencePosition.setFromMatrixPosition( light.matrixWorld );
result.nearDistance = shadowCameraNear;
result.farDistance = shadowCameraFar;
}
return result;
}
function renderObject( object, camera, shadowCamera, light, type ) {
if ( object.visible === false ) return;
const visible = object.layers.test( camera.layers );
if ( visible && ( object.isMesh || object.isLine || object.isPoints ) ) {
if ( ( object.castShadow || ( object.receiveShadow && type === VSMShadowMap ) ) && ( ! object.frustumCulled || _frustum.intersectsObject( object ) ) ) {
object.modelViewMatrix.multiplyMatrices( shadowCamera.matrixWorldInverse, object.matrixWorld );
const geometry = _objects.update( object );
const material = object.material;
if ( Array.isArray( material ) ) {
const groups = geometry.groups;
for ( let k = 0, kl = groups.length; k < kl; k ++ ) {
const group = groups[ k ];
const groupMaterial = material[ group.materialIndex ];
if ( groupMaterial && groupMaterial.visible ) {
const depthMaterial = getDepthMaterial( object, geometry, groupMaterial, light, shadowCamera.near, shadowCamera.far, type );
_renderer.renderBufferDirect( shadowCamera, null, geometry, depthMaterial, object, group );
}
}
} else if ( material.visible ) {
const depthMaterial = getDepthMaterial( object, geometry, material, light, shadowCamera.near, shadowCamera.far, type );
_renderer.renderBufferDirect( shadowCamera, null, geometry, depthMaterial, object, null );
}
}
}
const children = object.children;
for ( let i = 0, l = children.length; i < l; i ++ ) {
renderObject( children[ i ], camera, shadowCamera, light, type );
}
}
}
function WebGLState( gl, extensions, capabilities ) {
const isWebGL2 = capabilities.isWebGL2;
function ColorBuffer() {
let locked = false;
const color = new Vector4();
let currentColorMask = null;
const currentColorClear = new Vector4( 0, 0, 0, 0 );
return {
setMask: function ( colorMask ) {
if ( currentColorMask !== colorMask && ! locked ) {
gl.colorMask( colorMask, colorMask, colorMask, colorMask );
currentColorMask = colorMask;
}
},
setLocked: function ( lock ) {
locked = lock;
},
setClear: function ( r, g, b, a, premultipliedAlpha ) {
if ( premultipliedAlpha === true ) {
r *= a; g *= a; b *= a;
}
color.set( r, g, b, a );
if ( currentColorClear.equals( color ) === false ) {
gl.clearColor( r, g, b, a );
currentColorClear.copy( color );
}
},
reset: function () {
locked = false;
currentColorMask = null;
currentColorClear.set( - 1, 0, 0, 0 ); // set to invalid state
}
};
}
function DepthBuffer() {
let locked = false;
let currentDepthMask = null;
let currentDepthFunc = null;
let currentDepthClear = null;
return {
setTest: function ( depthTest ) {
if ( depthTest ) {
enable( 2929 );
} else {
disable( 2929 );
}
},
setMask: function ( depthMask ) {
if ( currentDepthMask !== depthMask && ! locked ) {
gl.depthMask( depthMask );
currentDepthMask = depthMask;
}
},
setFunc: function ( depthFunc ) {
if ( currentDepthFunc !== depthFunc ) {
if ( depthFunc ) {
switch ( depthFunc ) {
case NeverDepth:
gl.depthFunc( 512 );
break;
case AlwaysDepth:
gl.depthFunc( 519 );
break;
case LessDepth:
gl.depthFunc( 513 );
break;
case LessEqualDepth:
gl.depthFunc( 515 );
break;
case EqualDepth:
gl.depthFunc( 514 );
break;
case GreaterEqualDepth:
gl.depthFunc( 518 );
break;
case GreaterDepth:
gl.depthFunc( 516 );
break;
case NotEqualDepth:
gl.depthFunc( 517 );
break;
default:
gl.depthFunc( 515 );
}
} else {
gl.depthFunc( 515 );
}
currentDepthFunc = depthFunc;
}
},
setLocked: function ( lock ) {
locked = lock;
},
setClear: function ( depth ) {
if ( currentDepthClear !== depth ) {
gl.clearDepth( depth );
currentDepthClear = depth;
}
},
reset: function () {
locked = false;
currentDepthMask = null;
currentDepthFunc = null;
currentDepthClear = null;
}
};
}
function StencilBuffer() {
let locked = false;
let currentStencilMask = null;
let currentStencilFunc = null;
let currentStencilRef = null;
let currentStencilFuncMask = null;
let currentStencilFail = null;
let currentStencilZFail = null;
let currentStencilZPass = null;
let currentStencilClear = null;
return {
setTest: function ( stencilTest ) {
if ( ! locked ) {
if ( stencilTest ) {
enable( 2960 );
} else {
disable( 2960 );
}
}
},
setMask: function ( stencilMask ) {
if ( currentStencilMask !== stencilMask && ! locked ) {
gl.stencilMask( stencilMask );
currentStencilMask = stencilMask;
}
},
setFunc: function ( stencilFunc, stencilRef, stencilMask ) {
if ( currentStencilFunc !== stencilFunc ||
currentStencilRef !== stencilRef ||
currentStencilFuncMask !== stencilMask ) {
gl.stencilFunc( stencilFunc, stencilRef, stencilMask );
currentStencilFunc = stencilFunc;
currentStencilRef = stencilRef;
currentStencilFuncMask = stencilMask;
}
},
setOp: function ( stencilFail, stencilZFail, stencilZPass ) {
if ( currentStencilFail !== stencilFail ||
currentStencilZFail !== stencilZFail ||
currentStencilZPass !== stencilZPass ) {
gl.stencilOp( stencilFail, stencilZFail, stencilZPass );
currentStencilFail = stencilFail;
currentStencilZFail = stencilZFail;
currentStencilZPass = stencilZPass;
}
},
setLocked: function ( lock ) {
locked = lock;
},
setClear: function ( stencil ) {
if ( currentStencilClear !== stencil ) {
gl.clearStencil( stencil );
currentStencilClear = stencil;
}
},
reset: function () {
locked = false;
currentStencilMask = null;
currentStencilFunc = null;
currentStencilRef = null;
currentStencilFuncMask = null;
currentStencilFail = null;
currentStencilZFail = null;
currentStencilZPass = null;
currentStencilClear = null;
}
};
}
//
const colorBuffer = new ColorBuffer();
const depthBuffer = new DepthBuffer();
const stencilBuffer = new StencilBuffer();
let enabledCapabilities = {};
let xrFramebuffer = null;
let currentBoundFramebuffers = {};
let currentProgram = null;
let currentBlendingEnabled = false;
let currentBlending = null;
let currentBlendEquation = null;
let currentBlendSrc = null;
let currentBlendDst = null;
let currentBlendEquationAlpha = null;
let currentBlendSrcAlpha = null;
let currentBlendDstAlpha = null;
let currentPremultipledAlpha = false;
let currentFlipSided = null;
let currentCullFace = null;
let currentLineWidth = null;
let currentPolygonOffsetFactor = null;
let currentPolygonOffsetUnits = null;
const maxTextures = gl.getParameter( 35661 );
let lineWidthAvailable = false;
let version = 0;
const glVersion = gl.getParameter( 7938 );
if ( glVersion.indexOf( 'WebGL' ) !== - 1 ) {
version = parseFloat( /^WebGL (\d)/.exec( glVersion )[ 1 ] );
lineWidthAvailable = ( version >= 1.0 );
} else if ( glVersion.indexOf( 'OpenGL ES' ) !== - 1 ) {
version = parseFloat( /^OpenGL ES (\d)/.exec( glVersion )[ 1 ] );
lineWidthAvailable = ( version >= 2.0 );
}
let currentTextureSlot = null;
let currentBoundTextures = {};
const currentScissor = new Vector4( 0, 0, gl.canvas.width, gl.canvas.height );
const currentViewport = new Vector4( 0, 0, gl.canvas.width, gl.canvas.height );
function createTexture( type, target, count ) {
const data = new Uint8Array( 4 ); // 4 is required to match default unpack alignment of 4.
const texture = gl.createTexture();
gl.bindTexture( type, texture );
gl.texParameteri( type, 10241, 9728 );
gl.texParameteri( type, 10240, 9728 );
for ( let i = 0; i < count; i ++ ) {
gl.texImage2D( target + i, 0, 6408, 1, 1, 0, 6408, 5121, data );
}
return texture;
}
const emptyTextures = {};
emptyTextures[ 3553 ] = createTexture( 3553, 3553, 1 );
emptyTextures[ 34067 ] = createTexture( 34067, 34069, 6 );
// init
colorBuffer.setClear( 0, 0, 0, 1 );
depthBuffer.setClear( 1 );
stencilBuffer.setClear( 0 );
enable( 2929 );
depthBuffer.setFunc( LessEqualDepth );
setFlipSided( false );
setCullFace( CullFaceBack );
enable( 2884 );
setBlending( NoBlending );
//
function enable( id ) {
if ( enabledCapabilities[ id ] !== true ) {
gl.enable( id );
enabledCapabilities[ id ] = true;
}
}
function disable( id ) {
if ( enabledCapabilities[ id ] !== false ) {
gl.disable( id );
enabledCapabilities[ id ] = false;
}
}
function bindXRFramebuffer( framebuffer ) {
if ( framebuffer !== xrFramebuffer ) {
gl.bindFramebuffer( 36160, framebuffer );
xrFramebuffer = framebuffer;
}
}
function bindFramebuffer( target, framebuffer ) {
if ( framebuffer === null && xrFramebuffer !== null ) framebuffer = xrFramebuffer; // use active XR framebuffer if available
if ( currentBoundFramebuffers[ target ] !== framebuffer ) {
gl.bindFramebuffer( target, framebuffer );
currentBoundFramebuffers[ target ] = framebuffer;
if ( isWebGL2 ) {
// 36009 is equivalent to 36160
if ( target === 36009 ) {
currentBoundFramebuffers[ 36160 ] = framebuffer;
}
if ( target === 36160 ) {
currentBoundFramebuffers[ 36009 ] = framebuffer;
}
}
}
}
function useProgram( program ) {
if ( currentProgram !== program ) {
gl.useProgram( program );
currentProgram = program;
return true;
}
return false;
}
const equationToGL = {
[ AddEquation ]: 32774,
[ SubtractEquation ]: 32778,
[ ReverseSubtractEquation ]: 32779
};
if ( isWebGL2 ) {
equationToGL[ MinEquation ] = 32775;
equationToGL[ MaxEquation ] = 32776;
} else {
const extension = extensions.get( 'EXT_blend_minmax' );
if ( extension !== null ) {
equationToGL[ MinEquation ] = extension.MIN_EXT;
equationToGL[ MaxEquation ] = extension.MAX_EXT;
}
}
const factorToGL = {
[ ZeroFactor ]: 0,
[ OneFactor ]: 1,
[ SrcColorFactor ]: 768,
[ SrcAlphaFactor ]: 770,
[ SrcAlphaSaturateFactor ]: 776,
[ DstColorFactor ]: 774,
[ DstAlphaFactor ]: 772,
[ OneMinusSrcColorFactor ]: 769,
[ OneMinusSrcAlphaFactor ]: 771,
[ OneMinusDstColorFactor ]: 775,
[ OneMinusDstAlphaFactor ]: 773
};
function setBlending( blending, blendEquation, blendSrc, blendDst, blendEquationAlpha, blendSrcAlpha, blendDstAlpha, premultipliedAlpha ) {
if ( blending === NoBlending ) {
if ( currentBlendingEnabled === true ) {
disable( 3042 );
currentBlendingEnabled = false;
}
return;
}
if ( currentBlendingEnabled === false ) {
enable( 3042 );
currentBlendingEnabled = true;
}
if ( blending !== CustomBlending ) {
if ( blending !== currentBlending || premultipliedAlpha !== currentPremultipledAlpha ) {
if ( currentBlendEquation !== AddEquation || currentBlendEquationAlpha !== AddEquation ) {
gl.blendEquation( 32774 );
currentBlendEquation = AddEquation;
currentBlendEquationAlpha = AddEquation;
}
if ( premultipliedAlpha ) {
switch ( blending ) {
case NormalBlending:
gl.blendFuncSeparate( 1, 771, 1, 771 );
break;
case AdditiveBlending:
gl.blendFunc( 1, 1 );
break;
case SubtractiveBlending:
gl.blendFuncSeparate( 0, 0, 769, 771 );
break;
case MultiplyBlending:
gl.blendFuncSeparate( 0, 768, 0, 770 );
break;
default:
console.error( 'THREE.WebGLState: Invalid blending: ', blending );
break;
}
} else {
switch ( blending ) {
case NormalBlending:
gl.blendFuncSeparate( 770, 771, 1, 771 );
break;
case AdditiveBlending:
gl.blendFunc( 770, 1 );
break;
case SubtractiveBlending:
gl.blendFunc( 0, 769 );
break;
case MultiplyBlending:
gl.blendFunc( 0, 768 );
break;
default:
console.error( 'THREE.WebGLState: Invalid blending: ', blending );
break;
}
}
currentBlendSrc = null;
currentBlendDst = null;
currentBlendSrcAlpha = null;
currentBlendDstAlpha = null;
currentBlending = blending;
currentPremultipledAlpha = premultipliedAlpha;
}
return;
}
// custom blending
blendEquationAlpha = blendEquationAlpha || blendEquation;
blendSrcAlpha = blendSrcAlpha || blendSrc;
blendDstAlpha = blendDstAlpha || blendDst;
if ( blendEquation !== currentBlendEquation || blendEquationAlpha !== currentBlendEquationAlpha ) {
gl.blendEquationSeparate( equationToGL[ blendEquation ], equationToGL[ blendEquationAlpha ] );
currentBlendEquation = blendEquation;
currentBlendEquationAlpha = blendEquationAlpha;
}
if ( blendSrc !== currentBlendSrc || blendDst !== currentBlendDst || blendSrcAlpha !== currentBlendSrcAlpha || blendDstAlpha !== currentBlendDstAlpha ) {
gl.blendFuncSeparate( factorToGL[ blendSrc ], factorToGL[ blendDst ], factorToGL[ blendSrcAlpha ], factorToGL[ blendDstAlpha ] );
currentBlendSrc = blendSrc;
currentBlendDst = blendDst;
currentBlendSrcAlpha = blendSrcAlpha;
currentBlendDstAlpha = blendDstAlpha;
}
currentBlending = blending;
currentPremultipledAlpha = null;
}
function setMaterial( material, frontFaceCW ) {
material.side === DoubleSide
? disable( 2884 )
: enable( 2884 );
let flipSided = ( material.side === BackSide );
if ( frontFaceCW ) flipSided = ! flipSided;
setFlipSided( flipSided );
( material.blending === NormalBlending && material.transparent === false )
? setBlending( NoBlending )
: setBlending( material.blending, material.blendEquation, material.blendSrc, material.blendDst, material.blendEquationAlpha, material.blendSrcAlpha, material.blendDstAlpha, material.premultipliedAlpha );
depthBuffer.setFunc( material.depthFunc );
depthBuffer.setTest( material.depthTest );
depthBuffer.setMask( material.depthWrite );
colorBuffer.setMask( material.colorWrite );
const stencilWrite = material.stencilWrite;
stencilBuffer.setTest( stencilWrite );
if ( stencilWrite ) {
stencilBuffer.setMask( material.stencilWriteMask );
stencilBuffer.setFunc( material.stencilFunc, material.stencilRef, material.stencilFuncMask );
stencilBuffer.setOp( material.stencilFail, material.stencilZFail, material.stencilZPass );
}
setPolygonOffset( material.polygonOffset, material.polygonOffsetFactor, material.polygonOffsetUnits );
material.alphaToCoverage === true
? enable( 32926 )
: disable( 32926 );
}
//
function setFlipSided( flipSided ) {
if ( currentFlipSided !== flipSided ) {
if ( flipSided ) {
gl.frontFace( 2304 );
} else {
gl.frontFace( 2305 );
}
currentFlipSided = flipSided;
}
}
function setCullFace( cullFace ) {
if ( cullFace !== CullFaceNone ) {
enable( 2884 );
if ( cullFace !== currentCullFace ) {
if ( cullFace === CullFaceBack ) {
gl.cullFace( 1029 );
} else if ( cullFace === CullFaceFront ) {
gl.cullFace( 1028 );
} else {
gl.cullFace( 1032 );
}
}
} else {
disable( 2884 );
}
currentCullFace = cullFace;
}
function setLineWidth( width ) {
if ( width !== currentLineWidth ) {
if ( lineWidthAvailable ) gl.lineWidth( width );
currentLineWidth = width;
}
}
function setPolygonOffset( polygonOffset, factor, units ) {
if ( polygonOffset ) {
enable( 32823 );
if ( currentPolygonOffsetFactor !== factor || currentPolygonOffsetUnits !== units ) {
gl.polygonOffset( factor, units );
currentPolygonOffsetFactor = factor;
currentPolygonOffsetUnits = units;
}
} else {
disable( 32823 );
}
}
function setScissorTest( scissorTest ) {
if ( scissorTest ) {
enable( 3089 );
} else {
disable( 3089 );
}
}
// texture
function activeTexture( webglSlot ) {
if ( webglSlot === undefined ) webglSlot = 33984 + maxTextures - 1;
if ( currentTextureSlot !== webglSlot ) {
gl.activeTexture( webglSlot );
currentTextureSlot = webglSlot;
}
}
function bindTexture( webglType, webglTexture ) {
if ( currentTextureSlot === null ) {
activeTexture();
}
let boundTexture = currentBoundTextures[ currentTextureSlot ];
if ( boundTexture === undefined ) {
boundTexture = { type: undefined, texture: undefined };
currentBoundTextures[ currentTextureSlot ] = boundTexture;
}
if ( boundTexture.type !== webglType || boundTexture.texture !== webglTexture ) {
gl.bindTexture( webglType, webglTexture || emptyTextures[ webglType ] );
boundTexture.type = webglType;
boundTexture.texture = webglTexture;
}
}
function unbindTexture() {
const boundTexture = currentBoundTextures[ currentTextureSlot ];
if ( boundTexture !== undefined && boundTexture.type !== undefined ) {
gl.bindTexture( boundTexture.type, null );
boundTexture.type = undefined;
boundTexture.texture = undefined;
}
}
function compressedTexImage2D() {
try {
gl.compressedTexImage2D.apply( gl, arguments );
} catch ( error ) {
console.error( 'THREE.WebGLState:', error );
}
}
function texImage2D() {
try {
gl.texImage2D.apply( gl, arguments );
} catch ( error ) {
console.error( 'THREE.WebGLState:', error );
}
}
function texImage3D() {
try {
gl.texImage3D.apply( gl, arguments );
} catch ( error ) {
console.error( 'THREE.WebGLState:', error );
}
}
//
function scissor( scissor ) {
if ( currentScissor.equals( scissor ) === false ) {
gl.scissor( scissor.x, scissor.y, scissor.z, scissor.w );
currentScissor.copy( scissor );
}
}
function viewport( viewport ) {
if ( currentViewport.equals( viewport ) === false ) {
gl.viewport( viewport.x, viewport.y, viewport.z, viewport.w );
currentViewport.copy( viewport );
}
}
//
function reset() {
// reset state
gl.disable( 3042 );
gl.disable( 2884 );
gl.disable( 2929 );
gl.disable( 32823 );
gl.disable( 3089 );
gl.disable( 2960 );
gl.disable( 32926 );
gl.blendEquation( 32774 );
gl.blendFunc( 1, 0 );
gl.blendFuncSeparate( 1, 0, 1, 0 );
gl.colorMask( true, true, true, true );
gl.clearColor( 0, 0, 0, 0 );
gl.depthMask( true );
gl.depthFunc( 513 );
gl.clearDepth( 1 );
gl.stencilMask( 0xffffffff );
gl.stencilFunc( 519, 0, 0xffffffff );
gl.stencilOp( 7680, 7680, 7680 );
gl.clearStencil( 0 );
gl.cullFace( 1029 );
gl.frontFace( 2305 );
gl.polygonOffset( 0, 0 );
gl.activeTexture( 33984 );
gl.bindFramebuffer( 36160, null );
if ( isWebGL2 === true ) {
gl.bindFramebuffer( 36009, null );
gl.bindFramebuffer( 36008, null );
}
gl.useProgram( null );
gl.lineWidth( 1 );
gl.scissor( 0, 0, gl.canvas.width, gl.canvas.height );
gl.viewport( 0, 0, gl.canvas.width, gl.canvas.height );
// reset internals
enabledCapabilities = {};
currentTextureSlot = null;
currentBoundTextures = {};
xrFramebuffer = null;
currentBoundFramebuffers = {};
currentProgram = null;
currentBlendingEnabled = false;
currentBlending = null;
currentBlendEquation = null;
currentBlendSrc = null;
currentBlendDst = null;
currentBlendEquationAlpha = null;
currentBlendSrcAlpha = null;
currentBlendDstAlpha = null;
currentPremultipledAlpha = false;
currentFlipSided = null;
currentCullFace = null;
currentLineWidth = null;
currentPolygonOffsetFactor = null;
currentPolygonOffsetUnits = null;
currentScissor.set( 0, 0, gl.canvas.width, gl.canvas.height );
currentViewport.set( 0, 0, gl.canvas.width, gl.canvas.height );
colorBuffer.reset();
depthBuffer.reset();
stencilBuffer.reset();
}
return {
buffers: {
color: colorBuffer,
depth: depthBuffer,
stencil: stencilBuffer
},
enable: enable,
disable: disable,
bindFramebuffer: bindFramebuffer,
bindXRFramebuffer: bindXRFramebuffer,
useProgram: useProgram,
setBlending: setBlending,
setMaterial: setMaterial,
setFlipSided: setFlipSided,
setCullFace: setCullFace,
setLineWidth: setLineWidth,
setPolygonOffset: setPolygonOffset,
setScissorTest: setScissorTest,
activeTexture: activeTexture,
bindTexture: bindTexture,
unbindTexture: unbindTexture,
compressedTexImage2D: compressedTexImage2D,
texImage2D: texImage2D,
texImage3D: texImage3D,
scissor: scissor,
viewport: viewport,
reset: reset
};
}
function WebGLTextures( _gl, extensions, state, properties, capabilities, utils, info ) {
const isWebGL2 = capabilities.isWebGL2;
const maxTextures = capabilities.maxTextures;
const maxCubemapSize = capabilities.maxCubemapSize;
const maxTextureSize = capabilities.maxTextureSize;
const maxSamples = capabilities.maxSamples;
const _videoTextures = new WeakMap();
let _canvas;
// cordova iOS (as of 5.0) still uses UIWebView, which provides OffscreenCanvas,
// also OffscreenCanvas.getContext("webgl"), but not OffscreenCanvas.getContext("2d")!
// Some implementations may only implement OffscreenCanvas partially (e.g. lacking 2d).
let useOffscreenCanvas = false;
try {
useOffscreenCanvas = typeof OffscreenCanvas !== 'undefined'
&& ( new OffscreenCanvas( 1, 1 ).getContext( '2d' ) ) !== null;
} catch ( err ) {
// Ignore any errors
}
function createCanvas( width, height ) {
// Use OffscreenCanvas when available. Specially needed in web workers
return useOffscreenCanvas ?
new OffscreenCanvas( width, height ) :
document.createElementNS( 'http://www.w3.org/1999/xhtml', 'canvas' );
}
function resizeImage( image, needsPowerOfTwo, needsNewCanvas, maxSize ) {
let scale = 1;
// handle case if texture exceeds max size
if ( image.width > maxSize || image.height > maxSize ) {
scale = maxSize / Math.max( image.width, image.height );
}
// only perform resize if necessary
if ( scale < 1 || needsPowerOfTwo === true ) {
// only perform resize for certain image types
if ( ( typeof HTMLImageElement !== 'undefined' && image instanceof HTMLImageElement ) ||
( typeof HTMLCanvasElement !== 'undefined' && image instanceof HTMLCanvasElement ) ||
( typeof ImageBitmap !== 'undefined' && image instanceof ImageBitmap ) ) {
const floor = needsPowerOfTwo ? floorPowerOfTwo : Math.floor;
const width = floor( scale * image.width );
const height = floor( scale * image.height );
if ( _canvas === undefined ) _canvas = createCanvas( width, height );
// cube textures can't reuse the same canvas
const canvas = needsNewCanvas ? createCanvas( width, height ) : _canvas;
canvas.width = width;
canvas.height = height;
const context = canvas.getContext( '2d' );
context.drawImage( image, 0, 0, width, height );
console.warn( 'THREE.WebGLRenderer: Texture has been resized from (' + image.width + 'x' + image.height + ') to (' + width + 'x' + height + ').' );
return canvas;
} else {
if ( 'data' in image ) {
console.warn( 'THREE.WebGLRenderer: Image in DataTexture is too big (' + image.width + 'x' + image.height + ').' );
}
return image;
}
}
return image;
}
function isPowerOfTwo$1( image ) {
return isPowerOfTwo( image.width ) && isPowerOfTwo( image.height );
}
function textureNeedsPowerOfTwo( texture ) {
if ( isWebGL2 ) return false;
return ( texture.wrapS !== ClampToEdgeWrapping || texture.wrapT !== ClampToEdgeWrapping ) ||
( texture.minFilter !== NearestFilter && texture.minFilter !== LinearFilter );
}
function textureNeedsGenerateMipmaps( texture, supportsMips ) {
return texture.generateMipmaps && supportsMips &&
texture.minFilter !== NearestFilter && texture.minFilter !== LinearFilter;
}
function generateMipmap( target, texture, width, height ) {
_gl.generateMipmap( target );
const textureProperties = properties.get( texture );
textureProperties.__maxMipLevel = Math.log2( Math.max( width, height ) );
}
function getInternalFormat( internalFormatName, glFormat, glType ) {
if ( isWebGL2 === false ) return glFormat;
if ( internalFormatName !== null ) {
if ( _gl[ internalFormatName ] !== undefined ) return _gl[ internalFormatName ];
console.warn( 'THREE.WebGLRenderer: Attempt to use non-existing WebGL internal format \'' + internalFormatName + '\'' );
}
let internalFormat = glFormat;
if ( glFormat === 6403 ) {
if ( glType === 5126 ) internalFormat = 33326;
if ( glType === 5131 ) internalFormat = 33325;
if ( glType === 5121 ) internalFormat = 33321;
}
if ( glFormat === 6407 ) {
if ( glType === 5126 ) internalFormat = 34837;
if ( glType === 5131 ) internalFormat = 34843;
if ( glType === 5121 ) internalFormat = 32849;
}
if ( glFormat === 6408 ) {
if ( glType === 5126 ) internalFormat = 34836;
if ( glType === 5131 ) internalFormat = 34842;
if ( glType === 5121 ) internalFormat = 32856;
}
if ( internalFormat === 33325 || internalFormat === 33326 ||
internalFormat === 34842 || internalFormat === 34836 ) {
extensions.get( 'EXT_color_buffer_float' );
}
return internalFormat;
}
// Fallback filters for non-power-of-2 textures
function filterFallback( f ) {
if ( f === NearestFilter || f === NearestMipmapNearestFilter || f === NearestMipmapLinearFilter ) {
return 9728;
}
return 9729;
}
//
function onTextureDispose( event ) {
const texture = event.target;
texture.removeEventListener( 'dispose', onTextureDispose );
deallocateTexture( texture );
if ( texture.isVideoTexture ) {
_videoTextures.delete( texture );
}
info.memory.textures --;
}
function onRenderTargetDispose( event ) {
const renderTarget = event.target;
renderTarget.removeEventListener( 'dispose', onRenderTargetDispose );
deallocateRenderTarget( renderTarget );
info.memory.textures --;
}
//
function deallocateTexture( texture ) {
const textureProperties = properties.get( texture );
if ( textureProperties.__webglInit === undefined ) return;
_gl.deleteTexture( textureProperties.__webglTexture );
properties.remove( texture );
}
function deallocateRenderTarget( renderTarget ) {
const texture = renderTarget.texture;
const renderTargetProperties = properties.get( renderTarget );
const textureProperties = properties.get( texture );
if ( ! renderTarget ) return;
if ( textureProperties.__webglTexture !== undefined ) {
_gl.deleteTexture( textureProperties.__webglTexture );
}
if ( renderTarget.depthTexture ) {
renderTarget.depthTexture.dispose();
}
if ( renderTarget.isWebGLCubeRenderTarget ) {
for ( let i = 0; i < 6; i ++ ) {
_gl.deleteFramebuffer( renderTargetProperties.__webglFramebuffer[ i ] );
if ( renderTargetProperties.__webglDepthbuffer ) _gl.deleteRenderbuffer( renderTargetProperties.__webglDepthbuffer[ i ] );
}
} else {
_gl.deleteFramebuffer( renderTargetProperties.__webglFramebuffer );
if ( renderTargetProperties.__webglDepthbuffer ) _gl.deleteRenderbuffer( renderTargetProperties.__webglDepthbuffer );
if ( renderTargetProperties.__webglMultisampledFramebuffer ) _gl.deleteFramebuffer( renderTargetProperties.__webglMultisampledFramebuffer );
if ( renderTargetProperties.__webglColorRenderbuffer ) _gl.deleteRenderbuffer( renderTargetProperties.__webglColorRenderbuffer );
if ( renderTargetProperties.__webglDepthRenderbuffer ) _gl.deleteRenderbuffer( renderTargetProperties.__webglDepthRenderbuffer );
}
properties.remove( texture );
properties.remove( renderTarget );
}
//
let textureUnits = 0;
function resetTextureUnits() {
textureUnits = 0;
}
function allocateTextureUnit() {
const textureUnit = textureUnits;
if ( textureUnit >= maxTextures ) {
console.warn( 'THREE.WebGLTextures: Trying to use ' + textureUnit + ' texture units while this GPU supports only ' + maxTextures );
}
textureUnits += 1;
return textureUnit;
}
//
function setTexture2D( texture, slot ) {
const textureProperties = properties.get( texture );
if ( texture.isVideoTexture ) updateVideoTexture( texture );
if ( texture.version > 0 && textureProperties.__version !== texture.version ) {
const image = texture.image;
if ( image === undefined ) {
console.warn( 'THREE.WebGLRenderer: Texture marked for update but image is undefined' );
} else if ( image.complete === false ) {
console.warn( 'THREE.WebGLRenderer: Texture marked for update but image is incomplete' );
} else {
uploadTexture( textureProperties, texture, slot );
return;
}
}
state.activeTexture( 33984 + slot );
state.bindTexture( 3553, textureProperties.__webglTexture );
}
function setTexture2DArray( texture, slot ) {
const textureProperties = properties.get( texture );
if ( texture.version > 0 && textureProperties.__version !== texture.version ) {
uploadTexture( textureProperties, texture, slot );
return;
}
state.activeTexture( 33984 + slot );
state.bindTexture( 35866, textureProperties.__webglTexture );
}
function setTexture3D( texture, slot ) {
const textureProperties = properties.get( texture );
if ( texture.version > 0 && textureProperties.__version !== texture.version ) {
uploadTexture( textureProperties, texture, slot );
return;
}
state.activeTexture( 33984 + slot );
state.bindTexture( 32879, textureProperties.__webglTexture );
}
function setTextureCube( texture, slot ) {
const textureProperties = properties.get( texture );
if ( texture.version > 0 && textureProperties.__version !== texture.version ) {
uploadCubeTexture( textureProperties, texture, slot );
return;
}
state.activeTexture( 33984 + slot );
state.bindTexture( 34067, textureProperties.__webglTexture );
}
const wrappingToGL = {
[ RepeatWrapping ]: 10497,
[ ClampToEdgeWrapping ]: 33071,
[ MirroredRepeatWrapping ]: 33648
};
const filterToGL = {
[ NearestFilter ]: 9728,
[ NearestMipmapNearestFilter ]: 9984,
[ NearestMipmapLinearFilter ]: 9986,
[ LinearFilter ]: 9729,
[ LinearMipmapNearestFilter ]: 9985,
[ LinearMipmapLinearFilter ]: 9987
};
function setTextureParameters( textureType, texture, supportsMips ) {
if ( supportsMips ) {
_gl.texParameteri( textureType, 10242, wrappingToGL[ texture.wrapS ] );
_gl.texParameteri( textureType, 10243, wrappingToGL[ texture.wrapT ] );
if ( textureType === 32879 || textureType === 35866 ) {
_gl.texParameteri( textureType, 32882, wrappingToGL[ texture.wrapR ] );
}
_gl.texParameteri( textureType, 10240, filterToGL[ texture.magFilter ] );
_gl.texParameteri( textureType, 10241, filterToGL[ texture.minFilter ] );
} else {
_gl.texParameteri( textureType, 10242, 33071 );
_gl.texParameteri( textureType, 10243, 33071 );
if ( textureType === 32879 || textureType === 35866 ) {
_gl.texParameteri( textureType, 32882, 33071 );
}
if ( texture.wrapS !== ClampToEdgeWrapping || texture.wrapT !== ClampToEdgeWrapping ) {
console.warn( 'THREE.WebGLRenderer: Texture is not power of two. Texture.wrapS and Texture.wrapT should be set to THREE.ClampToEdgeWrapping.' );
}
_gl.texParameteri( textureType, 10240, filterFallback( texture.magFilter ) );
_gl.texParameteri( textureType, 10241, filterFallback( texture.minFilter ) );
if ( texture.minFilter !== NearestFilter && texture.minFilter !== LinearFilter ) {
console.warn( 'THREE.WebGLRenderer: Texture is not power of two. Texture.minFilter should be set to THREE.NearestFilter or THREE.LinearFilter.' );
}
}
if ( extensions.has( 'EXT_texture_filter_anisotropic' ) === true ) {
const extension = extensions.get( 'EXT_texture_filter_anisotropic' );
if ( texture.type === FloatType && extensions.has( 'OES_texture_float_linear' ) === false ) return; // verify extension for WebGL 1 and WebGL 2
if ( isWebGL2 === false && ( texture.type === HalfFloatType && extensions.has( 'OES_texture_half_float_linear' ) === false ) ) return; // verify extension for WebGL 1 only
if ( texture.anisotropy > 1 || properties.get( texture ).__currentAnisotropy ) {
_gl.texParameterf( textureType, extension.TEXTURE_MAX_ANISOTROPY_EXT, Math.min( texture.anisotropy, capabilities.getMaxAnisotropy() ) );
properties.get( texture ).__currentAnisotropy = texture.anisotropy;
}
}
}
function initTexture( textureProperties, texture ) {
if ( textureProperties.__webglInit === undefined ) {
textureProperties.__webglInit = true;
texture.addEventListener( 'dispose', onTextureDispose );
textureProperties.__webglTexture = _gl.createTexture();
info.memory.textures ++;
}
}
function uploadTexture( textureProperties, texture, slot ) {
let textureType = 3553;
if ( texture.isDataTexture2DArray ) textureType = 35866;
if ( texture.isDataTexture3D ) textureType = 32879;
initTexture( textureProperties, texture );
state.activeTexture( 33984 + slot );
state.bindTexture( textureType, textureProperties.__webglTexture );
_gl.pixelStorei( 37440, texture.flipY );
_gl.pixelStorei( 37441, texture.premultiplyAlpha );
_gl.pixelStorei( 3317, texture.unpackAlignment );
_gl.pixelStorei( 37443, 0 );
const needsPowerOfTwo = textureNeedsPowerOfTwo( texture ) && isPowerOfTwo$1( texture.image ) === false;
const image = resizeImage( texture.image, needsPowerOfTwo, false, maxTextureSize );
const supportsMips = isPowerOfTwo$1( image ) || isWebGL2,
glFormat = utils.convert( texture.format );
let glType = utils.convert( texture.type ),
glInternalFormat = getInternalFormat( texture.internalFormat, glFormat, glType );
setTextureParameters( textureType, texture, supportsMips );
let mipmap;
const mipmaps = texture.mipmaps;
if ( texture.isDepthTexture ) {
// populate depth texture with dummy data
glInternalFormat = 6402;
if ( isWebGL2 ) {
if ( texture.type === FloatType ) {
glInternalFormat = 36012;
} else if ( texture.type === UnsignedIntType ) {
glInternalFormat = 33190;
} else if ( texture.type === UnsignedInt248Type ) {
glInternalFormat = 35056;
} else {
glInternalFormat = 33189; // WebGL2 requires sized internalformat for glTexImage2D
}
} else {
if ( texture.type === FloatType ) {
console.error( 'WebGLRenderer: Floating point depth texture requires WebGL2.' );
}
}
// validation checks for WebGL 1
if ( texture.format === DepthFormat && glInternalFormat === 6402 ) {
// The error INVALID_OPERATION is generated by texImage2D if format and internalformat are
// DEPTH_COMPONENT and type is not UNSIGNED_SHORT or UNSIGNED_INT
// (https://www.khronos.org/registry/webgl/extensions/WEBGL_depth_texture/)
if ( texture.type !== UnsignedShortType && texture.type !== UnsignedIntType ) {
console.warn( 'THREE.WebGLRenderer: Use UnsignedShortType or UnsignedIntType for DepthFormat DepthTexture.' );
texture.type = UnsignedShortType;
glType = utils.convert( texture.type );
}
}
if ( texture.format === DepthStencilFormat && glInternalFormat === 6402 ) {
// Depth stencil textures need the DEPTH_STENCIL internal format
// (https://www.khronos.org/registry/webgl/extensions/WEBGL_depth_texture/)
glInternalFormat = 34041;
// The error INVALID_OPERATION is generated by texImage2D if format and internalformat are
// DEPTH_STENCIL and type is not UNSIGNED_INT_24_8_WEBGL.
// (https://www.khronos.org/registry/webgl/extensions/WEBGL_depth_texture/)
if ( texture.type !== UnsignedInt248Type ) {
console.warn( 'THREE.WebGLRenderer: Use UnsignedInt248Type for DepthStencilFormat DepthTexture.' );
texture.type = UnsignedInt248Type;
glType = utils.convert( texture.type );
}
}
//
state.texImage2D( 3553, 0, glInternalFormat, image.width, image.height, 0, glFormat, glType, null );
} else if ( texture.isDataTexture ) {
// use manually created mipmaps if available
// if there are no manual mipmaps
// set 0 level mipmap and then use GL to generate other mipmap levels
if ( mipmaps.length > 0 && supportsMips ) {
for ( let i = 0, il = mipmaps.length; i < il; i ++ ) {
mipmap = mipmaps[ i ];
state.texImage2D( 3553, i, glInternalFormat, mipmap.width, mipmap.height, 0, glFormat, glType, mipmap.data );
}
texture.generateMipmaps = false;
textureProperties.__maxMipLevel = mipmaps.length - 1;
} else {
state.texImage2D( 3553, 0, glInternalFormat, image.width, image.height, 0, glFormat, glType, image.data );
textureProperties.__maxMipLevel = 0;
}
} else if ( texture.isCompressedTexture ) {
for ( let i = 0, il = mipmaps.length; i < il; i ++ ) {
mipmap = mipmaps[ i ];
if ( texture.format !== RGBAFormat && texture.format !== RGBFormat ) {
if ( glFormat !== null ) {
state.compressedTexImage2D( 3553, i, glInternalFormat, mipmap.width, mipmap.height, 0, mipmap.data );
} else {
console.warn( 'THREE.WebGLRenderer: Attempt to load unsupported compressed texture format in .uploadTexture()' );
}
} else {
state.texImage2D( 3553, i, glInternalFormat, mipmap.width, mipmap.height, 0, glFormat, glType, mipmap.data );
}
}
textureProperties.__maxMipLevel = mipmaps.length - 1;
} else if ( texture.isDataTexture2DArray ) {
state.texImage3D( 35866, 0, glInternalFormat, image.width, image.height, image.depth, 0, glFormat, glType, image.data );
textureProperties.__maxMipLevel = 0;
} else if ( texture.isDataTexture3D ) {
state.texImage3D( 32879, 0, glInternalFormat, image.width, image.height, image.depth, 0, glFormat, glType, image.data );
textureProperties.__maxMipLevel = 0;
} else {
// regular Texture (image, video, canvas)
// use manually created mipmaps if available
// if there are no manual mipmaps
// set 0 level mipmap and then use GL to generate other mipmap levels
if ( mipmaps.length > 0 && supportsMips ) {
for ( let i = 0, il = mipmaps.length; i < il; i ++ ) {
mipmap = mipmaps[ i ];
state.texImage2D( 3553, i, glInternalFormat, glFormat, glType, mipmap );
}
texture.generateMipmaps = false;
textureProperties.__maxMipLevel = mipmaps.length - 1;
} else {
state.texImage2D( 3553, 0, glInternalFormat, glFormat, glType, image );
textureProperties.__maxMipLevel = 0;
}
}
if ( textureNeedsGenerateMipmaps( texture, supportsMips ) ) {
generateMipmap( textureType, texture, image.width, image.height );
}
textureProperties.__version = texture.version;
if ( texture.onUpdate ) texture.onUpdate( texture );
}
function uploadCubeTexture( textureProperties, texture, slot ) {
if ( texture.image.length !== 6 ) return;
initTexture( textureProperties, texture );
state.activeTexture( 33984 + slot );
state.bindTexture( 34067, textureProperties.__webglTexture );
_gl.pixelStorei( 37440, texture.flipY );
_gl.pixelStorei( 37441, texture.premultiplyAlpha );
_gl.pixelStorei( 3317, texture.unpackAlignment );
_gl.pixelStorei( 37443, 0 );
const isCompressed = ( texture && ( texture.isCompressedTexture || texture.image[ 0 ].isCompressedTexture ) );
const isDataTexture = ( texture.image[ 0 ] && texture.image[ 0 ].isDataTexture );
const cubeImage = [];
for ( let i = 0; i < 6; i ++ ) {
if ( ! isCompressed && ! isDataTexture ) {
cubeImage[ i ] = resizeImage( texture.image[ i ], false, true, maxCubemapSize );
} else {
cubeImage[ i ] = isDataTexture ? texture.image[ i ].image : texture.image[ i ];
}
}
const image = cubeImage[ 0 ],
supportsMips = isPowerOfTwo$1( image ) || isWebGL2,
glFormat = utils.convert( texture.format ),
glType = utils.convert( texture.type ),
glInternalFormat = getInternalFormat( texture.internalFormat, glFormat, glType );
setTextureParameters( 34067, texture, supportsMips );
let mipmaps;
if ( isCompressed ) {
for ( let i = 0; i < 6; i ++ ) {
mipmaps = cubeImage[ i ].mipmaps;
for ( let j = 0; j < mipmaps.length; j ++ ) {
const mipmap = mipmaps[ j ];
if ( texture.format !== RGBAFormat && texture.format !== RGBFormat ) {
if ( glFormat !== null ) {
state.compressedTexImage2D( 34069 + i, j, glInternalFormat, mipmap.width, mipmap.height, 0, mipmap.data );
} else {
console.warn( 'THREE.WebGLRenderer: Attempt to load unsupported compressed texture format in .setTextureCube()' );
}
} else {
state.texImage2D( 34069 + i, j, glInternalFormat, mipmap.width, mipmap.height, 0, glFormat, glType, mipmap.data );
}
}
}
textureProperties.__maxMipLevel = mipmaps.length - 1;
} else {
mipmaps = texture.mipmaps;
for ( let i = 0; i < 6; i ++ ) {
if ( isDataTexture ) {
state.texImage2D( 34069 + i, 0, glInternalFormat, cubeImage[ i ].width, cubeImage[ i ].height, 0, glFormat, glType, cubeImage[ i ].data );
for ( let j = 0; j < mipmaps.length; j ++ ) {
const mipmap = mipmaps[ j ];
const mipmapImage = mipmap.image[ i ].image;
state.texImage2D( 34069 + i, j + 1, glInternalFormat, mipmapImage.width, mipmapImage.height, 0, glFormat, glType, mipmapImage.data );
}
} else {
state.texImage2D( 34069 + i, 0, glInternalFormat, glFormat, glType, cubeImage[ i ] );
for ( let j = 0; j < mipmaps.length; j ++ ) {
const mipmap = mipmaps[ j ];
state.texImage2D( 34069 + i, j + 1, glInternalFormat, glFormat, glType, mipmap.image[ i ] );
}
}
}
textureProperties.__maxMipLevel = mipmaps.length;
}
if ( textureNeedsGenerateMipmaps( texture, supportsMips ) ) {
// We assume images for cube map have the same size.
generateMipmap( 34067, texture, image.width, image.height );
}
textureProperties.__version = texture.version;
if ( texture.onUpdate ) texture.onUpdate( texture );
}
// Render targets
// Setup storage for target texture and bind it to correct framebuffer
function setupFrameBufferTexture( framebuffer, renderTarget, attachment, textureTarget ) {
const texture = renderTarget.texture;
const glFormat = utils.convert( texture.format );
const glType = utils.convert( texture.type );
const glInternalFormat = getInternalFormat( texture.internalFormat, glFormat, glType );
if ( textureTarget === 32879 || textureTarget === 35866 ) {
state.texImage3D( textureTarget, 0, glInternalFormat, renderTarget.width, renderTarget.height, renderTarget.depth, 0, glFormat, glType, null );
} else {
state.texImage2D( textureTarget, 0, glInternalFormat, renderTarget.width, renderTarget.height, 0, glFormat, glType, null );
}
state.bindFramebuffer( 36160, framebuffer );
_gl.framebufferTexture2D( 36160, attachment, textureTarget, properties.get( texture ).__webglTexture, 0 );
state.bindFramebuffer( 36160, null );
}
// Setup storage for internal depth/stencil buffers and bind to correct framebuffer
function setupRenderBufferStorage( renderbuffer, renderTarget, isMultisample ) {
_gl.bindRenderbuffer( 36161, renderbuffer );
if ( renderTarget.depthBuffer && ! renderTarget.stencilBuffer ) {
let glInternalFormat = 33189;
if ( isMultisample ) {
const depthTexture = renderTarget.depthTexture;
if ( depthTexture && depthTexture.isDepthTexture ) {
if ( depthTexture.type === FloatType ) {
glInternalFormat = 36012;
} else if ( depthTexture.type === UnsignedIntType ) {
glInternalFormat = 33190;
}
}
const samples = getRenderTargetSamples( renderTarget );
_gl.renderbufferStorageMultisample( 36161, samples, glInternalFormat, renderTarget.width, renderTarget.height );
} else {
_gl.renderbufferStorage( 36161, glInternalFormat, renderTarget.width, renderTarget.height );
}
_gl.framebufferRenderbuffer( 36160, 36096, 36161, renderbuffer );
} else if ( renderTarget.depthBuffer && renderTarget.stencilBuffer ) {
if ( isMultisample ) {
const samples = getRenderTargetSamples( renderTarget );
_gl.renderbufferStorageMultisample( 36161, samples, 35056, renderTarget.width, renderTarget.height );
} else {
_gl.renderbufferStorage( 36161, 34041, renderTarget.width, renderTarget.height );
}
_gl.framebufferRenderbuffer( 36160, 33306, 36161, renderbuffer );
} else {
const texture = renderTarget.texture;
const glFormat = utils.convert( texture.format );
const glType = utils.convert( texture.type );
const glInternalFormat = getInternalFormat( texture.internalFormat, glFormat, glType );
if ( isMultisample ) {
const samples = getRenderTargetSamples( renderTarget );
_gl.renderbufferStorageMultisample( 36161, samples, glInternalFormat, renderTarget.width, renderTarget.height );
} else {
_gl.renderbufferStorage( 36161, glInternalFormat, renderTarget.width, renderTarget.height );
}
}
_gl.bindRenderbuffer( 36161, null );
}
// Setup resources for a Depth Texture for a FBO (needs an extension)
function setupDepthTexture( framebuffer, renderTarget ) {
const isCube = ( renderTarget && renderTarget.isWebGLCubeRenderTarget );
if ( isCube ) throw new Error( 'Depth Texture with cube render targets is not supported' );
state.bindFramebuffer( 36160, framebuffer );
if ( ! ( renderTarget.depthTexture && renderTarget.depthTexture.isDepthTexture ) ) {
throw new Error( 'renderTarget.depthTexture must be an instance of THREE.DepthTexture' );
}
// upload an empty depth texture with framebuffer size
if ( ! properties.get( renderTarget.depthTexture ).__webglTexture ||
renderTarget.depthTexture.image.width !== renderTarget.width ||
renderTarget.depthTexture.image.height !== renderTarget.height ) {
renderTarget.depthTexture.image.width = renderTarget.width;
renderTarget.depthTexture.image.height = renderTarget.height;
renderTarget.depthTexture.needsUpdate = true;
}
setTexture2D( renderTarget.depthTexture, 0 );
const webglDepthTexture = properties.get( renderTarget.depthTexture ).__webglTexture;
if ( renderTarget.depthTexture.format === DepthFormat ) {
_gl.framebufferTexture2D( 36160, 36096, 3553, webglDepthTexture, 0 );
} else if ( renderTarget.depthTexture.format === DepthStencilFormat ) {
_gl.framebufferTexture2D( 36160, 33306, 3553, webglDepthTexture, 0 );
} else {
throw new Error( 'Unknown depthTexture format' );
}
}
// Setup GL resources for a non-texture depth buffer
function setupDepthRenderbuffer( renderTarget ) {
const renderTargetProperties = properties.get( renderTarget );
const isCube = ( renderTarget.isWebGLCubeRenderTarget === true );
if ( renderTarget.depthTexture ) {
if ( isCube ) throw new Error( 'target.depthTexture not supported in Cube render targets' );
setupDepthTexture( renderTargetProperties.__webglFramebuffer, renderTarget );
} else {
if ( isCube ) {
renderTargetProperties.__webglDepthbuffer = [];
for ( let i = 0; i < 6; i ++ ) {
state.bindFramebuffer( 36160, renderTargetProperties.__webglFramebuffer[ i ] );
renderTargetProperties.__webglDepthbuffer[ i ] = _gl.createRenderbuffer();
setupRenderBufferStorage( renderTargetProperties.__webglDepthbuffer[ i ], renderTarget, false );
}
} else {
state.bindFramebuffer( 36160, renderTargetProperties.__webglFramebuffer );
renderTargetProperties.__webglDepthbuffer = _gl.createRenderbuffer();
setupRenderBufferStorage( renderTargetProperties.__webglDepthbuffer, renderTarget, false );
}
}
state.bindFramebuffer( 36160, null );
}
// Set up GL resources for the render target
function setupRenderTarget( renderTarget ) {
const texture = renderTarget.texture;
const renderTargetProperties = properties.get( renderTarget );
const textureProperties = properties.get( texture );
renderTarget.addEventListener( 'dispose', onRenderTargetDispose );
textureProperties.__webglTexture = _gl.createTexture();
textureProperties.__version = texture.version;
info.memory.textures ++;
const isCube = ( renderTarget.isWebGLCubeRenderTarget === true );
const isMultisample = ( renderTarget.isWebGLMultisampleRenderTarget === true );
const isRenderTarget3D = texture.isDataTexture3D || texture.isDataTexture2DArray;
const supportsMips = isPowerOfTwo$1( renderTarget ) || isWebGL2;
// Handles WebGL2 RGBFormat fallback - #18858
if ( isWebGL2 && texture.format === RGBFormat && ( texture.type === FloatType || texture.type === HalfFloatType ) ) {
texture.format = RGBAFormat;
console.warn( 'THREE.WebGLRenderer: Rendering to textures with RGB format is not supported. Using RGBA format instead.' );
}
// Setup framebuffer
if ( isCube ) {
renderTargetProperties.__webglFramebuffer = [];
for ( let i = 0; i < 6; i ++ ) {
renderTargetProperties.__webglFramebuffer[ i ] = _gl.createFramebuffer();
}
} else {
renderTargetProperties.__webglFramebuffer = _gl.createFramebuffer();
if ( isMultisample ) {
if ( isWebGL2 ) {
renderTargetProperties.__webglMultisampledFramebuffer = _gl.createFramebuffer();
renderTargetProperties.__webglColorRenderbuffer = _gl.createRenderbuffer();
_gl.bindRenderbuffer( 36161, renderTargetProperties.__webglColorRenderbuffer );
const glFormat = utils.convert( texture.format );
const glType = utils.convert( texture.type );
const glInternalFormat = getInternalFormat( texture.internalFormat, glFormat, glType );
const samples = getRenderTargetSamples( renderTarget );
_gl.renderbufferStorageMultisample( 36161, samples, glInternalFormat, renderTarget.width, renderTarget.height );
state.bindFramebuffer( 36160, renderTargetProperties.__webglMultisampledFramebuffer );
_gl.framebufferRenderbuffer( 36160, 36064, 36161, renderTargetProperties.__webglColorRenderbuffer );
_gl.bindRenderbuffer( 36161, null );
if ( renderTarget.depthBuffer ) {
renderTargetProperties.__webglDepthRenderbuffer = _gl.createRenderbuffer();
setupRenderBufferStorage( renderTargetProperties.__webglDepthRenderbuffer, renderTarget, true );
}
state.bindFramebuffer( 36160, null );
} else {
console.warn( 'THREE.WebGLRenderer: WebGLMultisampleRenderTarget can only be used with WebGL2.' );
}
}
}
// Setup color buffer
if ( isCube ) {
state.bindTexture( 34067, textureProperties.__webglTexture );
setTextureParameters( 34067, texture, supportsMips );
for ( let i = 0; i < 6; i ++ ) {
setupFrameBufferTexture( renderTargetProperties.__webglFramebuffer[ i ], renderTarget, 36064, 34069 + i );
}
if ( textureNeedsGenerateMipmaps( texture, supportsMips ) ) {
generateMipmap( 34067, texture, renderTarget.width, renderTarget.height );
}
state.bindTexture( 34067, null );
} else {
let glTextureType = 3553;
if ( isRenderTarget3D ) {
// Render targets containing layers, i.e: Texture 3D and 2d arrays
if ( isWebGL2 ) {
const isTexture3D = texture.isDataTexture3D;
glTextureType = isTexture3D ? 32879 : 35866;
} else {
console.warn( 'THREE.DataTexture3D and THREE.DataTexture2DArray only supported with WebGL2.' );
}
}
state.bindTexture( glTextureType, textureProperties.__webglTexture );
setTextureParameters( glTextureType, texture, supportsMips );
setupFrameBufferTexture( renderTargetProperties.__webglFramebuffer, renderTarget, 36064, glTextureType );
if ( textureNeedsGenerateMipmaps( texture, supportsMips ) ) {
generateMipmap( 3553, texture, renderTarget.width, renderTarget.height );
}
state.bindTexture( 3553, null );
}
// Setup depth and stencil buffers
if ( renderTarget.depthBuffer ) {
setupDepthRenderbuffer( renderTarget );
}
}
function updateRenderTargetMipmap( renderTarget ) {
const texture = renderTarget.texture;
const supportsMips = isPowerOfTwo$1( renderTarget ) || isWebGL2;
if ( textureNeedsGenerateMipmaps( texture, supportsMips ) ) {
const target = renderTarget.isWebGLCubeRenderTarget ? 34067 : 3553;
const webglTexture = properties.get( texture ).__webglTexture;
state.bindTexture( target, webglTexture );
generateMipmap( target, texture, renderTarget.width, renderTarget.height );
state.bindTexture( target, null );
}
}
function updateMultisampleRenderTarget( renderTarget ) {
if ( renderTarget.isWebGLMultisampleRenderTarget ) {
if ( isWebGL2 ) {
const width = renderTarget.width;
const height = renderTarget.height;
let mask = 16384;
if ( renderTarget.depthBuffer ) mask |= 256;
if ( renderTarget.stencilBuffer ) mask |= 1024;
const renderTargetProperties = properties.get( renderTarget );
state.bindFramebuffer( 36008, renderTargetProperties.__webglMultisampledFramebuffer );
state.bindFramebuffer( 36009, renderTargetProperties.__webglFramebuffer );
_gl.blitFramebuffer( 0, 0, width, height, 0, 0, width, height, mask, 9728 );
state.bindFramebuffer( 36008, null );
state.bindFramebuffer( 36009, renderTargetProperties.__webglMultisampledFramebuffer );
} else {
console.warn( 'THREE.WebGLRenderer: WebGLMultisampleRenderTarget can only be used with WebGL2.' );
}
}
}
function getRenderTargetSamples( renderTarget ) {
return ( isWebGL2 && renderTarget.isWebGLMultisampleRenderTarget ) ?
Math.min( maxSamples, renderTarget.samples ) : 0;
}
function updateVideoTexture( texture ) {
const frame = info.render.frame;
// Check the last frame we updated the VideoTexture
if ( _videoTextures.get( texture ) !== frame ) {
_videoTextures.set( texture, frame );
texture.update();
}
}
// backwards compatibility
let warnedTexture2D = false;
let warnedTextureCube = false;
function safeSetTexture2D( texture, slot ) {
if ( texture && texture.isWebGLRenderTarget ) {
if ( warnedTexture2D === false ) {
console.warn( 'THREE.WebGLTextures.safeSetTexture2D: don\'t use render targets as textures. Use their .texture property instead.' );
warnedTexture2D = true;
}
texture = texture.texture;
}
setTexture2D( texture, slot );
}
function safeSetTextureCube( texture, slot ) {
if ( texture && texture.isWebGLCubeRenderTarget ) {
if ( warnedTextureCube === false ) {
console.warn( 'THREE.WebGLTextures.safeSetTextureCube: don\'t use cube render targets as textures. Use their .texture property instead.' );
warnedTextureCube = true;
}
texture = texture.texture;
}
setTextureCube( texture, slot );
}
//
this.allocateTextureUnit = allocateTextureUnit;
this.resetTextureUnits = resetTextureUnits;
this.setTexture2D = setTexture2D;
this.setTexture2DArray = setTexture2DArray;
this.setTexture3D = setTexture3D;
this.setTextureCube = setTextureCube;
this.setupRenderTarget = setupRenderTarget;
this.updateRenderTargetMipmap = updateRenderTargetMipmap;
this.updateMultisampleRenderTarget = updateMultisampleRenderTarget;
this.safeSetTexture2D = safeSetTexture2D;
this.safeSetTextureCube = safeSetTextureCube;
}
function WebGLUtils( gl, extensions, capabilities ) {
const isWebGL2 = capabilities.isWebGL2;
function convert( p ) {
let extension;
if ( p === UnsignedByteType ) return 5121;
if ( p === UnsignedShort4444Type ) return 32819;
if ( p === UnsignedShort5551Type ) return 32820;
if ( p === UnsignedShort565Type ) return 33635;
if ( p === ByteType ) return 5120;
if ( p === ShortType ) return 5122;
if ( p === UnsignedShortType ) return 5123;
if ( p === IntType ) return 5124;
if ( p === UnsignedIntType ) return 5125;
if ( p === FloatType ) return 5126;
if ( p === HalfFloatType ) {
if ( isWebGL2 ) return 5131;
extension = extensions.get( 'OES_texture_half_float' );
if ( extension !== null ) {
return extension.HALF_FLOAT_OES;
} else {
return null;
}
}
if ( p === AlphaFormat ) return 6406;
if ( p === RGBFormat ) return 6407;
if ( p === RGBAFormat ) return 6408;
if ( p === LuminanceFormat ) return 6409;
if ( p === LuminanceAlphaFormat ) return 6410;
if ( p === DepthFormat ) return 6402;
if ( p === DepthStencilFormat ) return 34041;
if ( p === RedFormat ) return 6403;
// WebGL2 formats.
if ( p === RedIntegerFormat ) return 36244;
if ( p === RGFormat ) return 33319;
if ( p === RGIntegerFormat ) return 33320;
if ( p === RGBIntegerFormat ) return 36248;
if ( p === RGBAIntegerFormat ) return 36249;
if ( p === RGB_S3TC_DXT1_Format || p === RGBA_S3TC_DXT1_Format ||
p === RGBA_S3TC_DXT3_Format || p === RGBA_S3TC_DXT5_Format ) {
extension = extensions.get( 'WEBGL_compressed_texture_s3tc' );
if ( extension !== null ) {
if ( p === RGB_S3TC_DXT1_Format ) return extension.COMPRESSED_RGB_S3TC_DXT1_EXT;
if ( p === RGBA_S3TC_DXT1_Format ) return extension.COMPRESSED_RGBA_S3TC_DXT1_EXT;
if ( p === RGBA_S3TC_DXT3_Format ) return extension.COMPRESSED_RGBA_S3TC_DXT3_EXT;
if ( p === RGBA_S3TC_DXT5_Format ) return extension.COMPRESSED_RGBA_S3TC_DXT5_EXT;
} else {
return null;
}
}
if ( p === RGB_PVRTC_4BPPV1_Format || p === RGB_PVRTC_2BPPV1_Format ||
p === RGBA_PVRTC_4BPPV1_Format || p === RGBA_PVRTC_2BPPV1_Format ) {
extension = extensions.get( 'WEBGL_compressed_texture_pvrtc' );
if ( extension !== null ) {
if ( p === RGB_PVRTC_4BPPV1_Format ) return extension.COMPRESSED_RGB_PVRTC_4BPPV1_IMG;
if ( p === RGB_PVRTC_2BPPV1_Format ) return extension.COMPRESSED_RGB_PVRTC_2BPPV1_IMG;
if ( p === RGBA_PVRTC_4BPPV1_Format ) return extension.COMPRESSED_RGBA_PVRTC_4BPPV1_IMG;
if ( p === RGBA_PVRTC_2BPPV1_Format ) return extension.COMPRESSED_RGBA_PVRTC_2BPPV1_IMG;
} else {
return null;
}
}
if ( p === RGB_ETC1_Format ) {
extension = extensions.get( 'WEBGL_compressed_texture_etc1' );
if ( extension !== null ) {
return extension.COMPRESSED_RGB_ETC1_WEBGL;
} else {
return null;
}
}
if ( p === RGB_ETC2_Format || p === RGBA_ETC2_EAC_Format ) {
extension = extensions.get( 'WEBGL_compressed_texture_etc' );
if ( extension !== null ) {
if ( p === RGB_ETC2_Format ) return extension.COMPRESSED_RGB8_ETC2;
if ( p === RGBA_ETC2_EAC_Format ) return extension.COMPRESSED_RGBA8_ETC2_EAC;
}
}
if ( p === RGBA_ASTC_4x4_Format || p === RGBA_ASTC_5x4_Format || p === RGBA_ASTC_5x5_Format ||
p === RGBA_ASTC_6x5_Format || p === RGBA_ASTC_6x6_Format || p === RGBA_ASTC_8x5_Format ||
p === RGBA_ASTC_8x6_Format || p === RGBA_ASTC_8x8_Format || p === RGBA_ASTC_10x5_Format ||
p === RGBA_ASTC_10x6_Format || p === RGBA_ASTC_10x8_Format || p === RGBA_ASTC_10x10_Format ||
p === RGBA_ASTC_12x10_Format || p === RGBA_ASTC_12x12_Format ||
p === SRGB8_ALPHA8_ASTC_4x4_Format || p === SRGB8_ALPHA8_ASTC_5x4_Format || p === SRGB8_ALPHA8_ASTC_5x5_Format ||
p === SRGB8_ALPHA8_ASTC_6x5_Format || p === SRGB8_ALPHA8_ASTC_6x6_Format || p === SRGB8_ALPHA8_ASTC_8x5_Format ||
p === SRGB8_ALPHA8_ASTC_8x6_Format || p === SRGB8_ALPHA8_ASTC_8x8_Format || p === SRGB8_ALPHA8_ASTC_10x5_Format ||
p === SRGB8_ALPHA8_ASTC_10x6_Format || p === SRGB8_ALPHA8_ASTC_10x8_Format || p === SRGB8_ALPHA8_ASTC_10x10_Format ||
p === SRGB8_ALPHA8_ASTC_12x10_Format || p === SRGB8_ALPHA8_ASTC_12x12_Format ) {
extension = extensions.get( 'WEBGL_compressed_texture_astc' );
if ( extension !== null ) {
// TODO Complete?
return p;
} else {
return null;
}
}
if ( p === RGBA_BPTC_Format ) {
extension = extensions.get( 'EXT_texture_compression_bptc' );
if ( extension !== null ) {
// TODO Complete?
return p;
} else {
return null;
}
}
if ( p === UnsignedInt248Type ) {
if ( isWebGL2 ) return 34042;
extension = extensions.get( 'WEBGL_depth_texture' );
if ( extension !== null ) {
return extension.UNSIGNED_INT_24_8_WEBGL;
} else {
return null;
}
}
}
return { convert: convert };
}
class ArrayCamera extends PerspectiveCamera {
constructor( array = [] ) {
super();
this.cameras = array;
}
}
ArrayCamera.prototype.isArrayCamera = true;
class Group extends Object3D {
constructor() {
super();
this.type = 'Group';
}
}
Group.prototype.isGroup = true;
const _moveEvent = { type: 'move' };
class WebXRController {
constructor() {
this._targetRay = null;
this._grip = null;
this._hand = null;
}
getHandSpace() {
if ( this._hand === null ) {
this._hand = new Group();
this._hand.matrixAutoUpdate = false;
this._hand.visible = false;
this._hand.joints = {};
this._hand.inputState = { pinching: false };
}
return this._hand;
}
getTargetRaySpace() {
if ( this._targetRay === null ) {
this._targetRay = new Group();
this._targetRay.matrixAutoUpdate = false;
this._targetRay.visible = false;
this._targetRay.hasLinearVelocity = false;
this._targetRay.linearVelocity = new Vector3();
this._targetRay.hasAngularVelocity = false;
this._targetRay.angularVelocity = new Vector3();
}
return this._targetRay;
}
getGripSpace() {
if ( this._grip === null ) {
this._grip = new Group();
this._grip.matrixAutoUpdate = false;
this._grip.visible = false;
this._grip.hasLinearVelocity = false;
this._grip.linearVelocity = new Vector3();
this._grip.hasAngularVelocity = false;
this._grip.angularVelocity = new Vector3();
}
return this._grip;
}
dispatchEvent( event ) {
if ( this._targetRay !== null ) {
this._targetRay.dispatchEvent( event );
}
if ( this._grip !== null ) {
this._grip.dispatchEvent( event );
}
if ( this._hand !== null ) {
this._hand.dispatchEvent( event );
}
return this;
}
disconnect( inputSource ) {
this.dispatchEvent( { type: 'disconnected', data: inputSource } );
if ( this._targetRay !== null ) {
this._targetRay.visible = false;
}
if ( this._grip !== null ) {
this._grip.visible = false;
}
if ( this._hand !== null ) {
this._hand.visible = false;
}
return this;
}
update( inputSource, frame, referenceSpace ) {
let inputPose = null;
let gripPose = null;
let handPose = null;
const targetRay = this._targetRay;
const grip = this._grip;
const hand = this._hand;
if ( inputSource && frame.session.visibilityState !== 'visible-blurred' ) {
if ( targetRay !== null ) {
inputPose = frame.getPose( inputSource.targetRaySpace, referenceSpace );
if ( inputPose !== null ) {
targetRay.matrix.fromArray( inputPose.transform.matrix );
targetRay.matrix.decompose( targetRay.position, targetRay.rotation, targetRay.scale );
if ( inputPose.linearVelocity ) {
targetRay.hasLinearVelocity = true;
targetRay.linearVelocity.copy( inputPose.linearVelocity );
} else {
targetRay.hasLinearVelocity = false;
}
if ( inputPose.angularVelocity ) {
targetRay.hasAngularVelocity = true;
targetRay.angularVelocity.copy( inputPose.angularVelocity );
} else {
targetRay.hasAngularVelocity = false;
}
this.dispatchEvent( _moveEvent );
}
}
if ( hand && inputSource.hand ) {
handPose = true;
for ( const inputjoint of inputSource.hand.values() ) {
// Update the joints groups with the XRJoint poses
const jointPose = frame.getJointPose( inputjoint, referenceSpace );
if ( hand.joints[ inputjoint.jointName ] === undefined ) {
// The transform of this joint will be updated with the joint pose on each frame
const joint = new Group();
joint.matrixAutoUpdate = false;
joint.visible = false;
hand.joints[ inputjoint.jointName ] = joint;
// ??
hand.add( joint );
}
const joint = hand.joints[ inputjoint.jointName ];
if ( jointPose !== null ) {
joint.matrix.fromArray( jointPose.transform.matrix );
joint.matrix.decompose( joint.position, joint.rotation, joint.scale );
joint.jointRadius = jointPose.radius;
}
joint.visible = jointPose !== null;
}
// Custom events
// Check pinchz
const indexTip = hand.joints[ 'index-finger-tip' ];
const thumbTip = hand.joints[ 'thumb-tip' ];
const distance = indexTip.position.distanceTo( thumbTip.position );
const distanceToPinch = 0.02;
const threshold = 0.005;
if ( hand.inputState.pinching && distance > distanceToPinch + threshold ) {
hand.inputState.pinching = false;
this.dispatchEvent( {
type: 'pinchend',
handedness: inputSource.handedness,
target: this
} );
} else if ( ! hand.inputState.pinching && distance <= distanceToPinch - threshold ) {
hand.inputState.pinching = true;
this.dispatchEvent( {
type: 'pinchstart',
handedness: inputSource.handedness,
target: this
} );
}
} else {
if ( grip !== null && inputSource.gripSpace ) {
gripPose = frame.getPose( inputSource.gripSpace, referenceSpace );
if ( gripPose !== null ) {
grip.matrix.fromArray( gripPose.transform.matrix );
grip.matrix.decompose( grip.position, grip.rotation, grip.scale );
if ( gripPose.linearVelocity ) {
grip.hasLinearVelocity = true;
grip.linearVelocity.copy( gripPose.linearVelocity );
} else {
grip.hasLinearVelocity = false;
}
if ( gripPose.angularVelocity ) {
grip.hasAngularVelocity = true;
grip.angularVelocity.copy( gripPose.angularVelocity );
} else {
grip.hasAngularVelocity = false;
}
}
}
}
}
if ( targetRay !== null ) {
targetRay.visible = ( inputPose !== null );
}
if ( grip !== null ) {
grip.visible = ( gripPose !== null );
}
if ( hand !== null ) {
hand.visible = ( handPose !== null );
}
return this;
}
}
class WebXRManager extends EventDispatcher {
constructor( renderer, gl ) {
super();
const scope = this;
const state = renderer.state;
let session = null;
let framebufferScaleFactor = 1.0;
let referenceSpace = null;
let referenceSpaceType = 'local-floor';
let pose = null;
const controllers = [];
const inputSourcesMap = new Map();
//
const cameraL = new PerspectiveCamera();
cameraL.layers.enable( 1 );
cameraL.viewport = new Vector4();
const cameraR = new PerspectiveCamera();
cameraR.layers.enable( 2 );
cameraR.viewport = new Vector4();
const cameras = [ cameraL, cameraR ];
const cameraVR = new ArrayCamera();
cameraVR.layers.enable( 1 );
cameraVR.layers.enable( 2 );
let _currentDepthNear = null;
let _currentDepthFar = null;
//
this.enabled = false;
this.isPresenting = false;
this.getController = function ( index ) {
let controller = controllers[ index ];
if ( controller === undefined ) {
controller = new WebXRController();
controllers[ index ] = controller;
}
return controller.getTargetRaySpace();
};
this.getControllerGrip = function ( index ) {
let controller = controllers[ index ];
if ( controller === undefined ) {
controller = new WebXRController();
controllers[ index ] = controller;
}
return controller.getGripSpace();
};
this.getHand = function ( index ) {
let controller = controllers[ index ];
if ( controller === undefined ) {
controller = new WebXRController();
controllers[ index ] = controller;
}
return controller.getHandSpace();
};
//
function onSessionEvent( event ) {
const controller = inputSourcesMap.get( event.inputSource );
if ( controller ) {
controller.dispatchEvent( { type: event.type, data: event.inputSource } );
}
}
function onSessionEnd() {
inputSourcesMap.forEach( function ( controller, inputSource ) {
controller.disconnect( inputSource );
} );
inputSourcesMap.clear();
_currentDepthNear = null;
_currentDepthFar = null;
// restore framebuffer/rendering state
state.bindXRFramebuffer( null );
renderer.setRenderTarget( renderer.getRenderTarget() );
//
animation.stop();
scope.isPresenting = false;
scope.dispatchEvent( { type: 'sessionend' } );
}
this.setFramebufferScaleFactor = function ( value ) {
framebufferScaleFactor = value;
if ( scope.isPresenting === true ) {
console.warn( 'THREE.WebXRManager: Cannot change framebuffer scale while presenting.' );
}
};
this.setReferenceSpaceType = function ( value ) {
referenceSpaceType = value;
if ( scope.isPresenting === true ) {
console.warn( 'THREE.WebXRManager: Cannot change reference space type while presenting.' );
}
};
this.getReferenceSpace = function () {
return referenceSpace;
};
this.getSession = function () {
return session;
};
this.setSession = async function ( value ) {
session = value;
if ( session !== null ) {
session.addEventListener( 'select', onSessionEvent );
session.addEventListener( 'selectstart', onSessionEvent );
session.addEventListener( 'selectend', onSessionEvent );
session.addEventListener( 'squeeze', onSessionEvent );
session.addEventListener( 'squeezestart', onSessionEvent );
session.addEventListener( 'squeezeend', onSessionEvent );
session.addEventListener( 'end', onSessionEnd );
session.addEventListener( 'inputsourceschange', onInputSourcesChange );
const attributes = gl.getContextAttributes();
if ( attributes.xrCompatible !== true ) {
await gl.makeXRCompatible();
}
const layerInit = {
antialias: attributes.antialias,
alpha: attributes.alpha,
depth: attributes.depth,
stencil: attributes.stencil,
framebufferScaleFactor: framebufferScaleFactor
};
// eslint-disable-next-line no-undef
const baseLayer = new XRWebGLLayer( session, gl, layerInit );
session.updateRenderState( { baseLayer: baseLayer } );
referenceSpace = await session.requestReferenceSpace( referenceSpaceType );
animation.setContext( session );
animation.start();
scope.isPresenting = true;
scope.dispatchEvent( { type: 'sessionstart' } );
}
};
function onInputSourcesChange( event ) {
const inputSources = session.inputSources;
// Assign inputSources to available controllers
for ( let i = 0; i < controllers.length; i ++ ) {
inputSourcesMap.set( inputSources[ i ], controllers[ i ] );
}
// Notify disconnected
for ( let i = 0; i < event.removed.length; i ++ ) {
const inputSource = event.removed[ i ];
const controller = inputSourcesMap.get( inputSource );
if ( controller ) {
controller.dispatchEvent( { type: 'disconnected', data: inputSource } );
inputSourcesMap.delete( inputSource );
}
}
// Notify connected
for ( let i = 0; i < event.added.length; i ++ ) {
const inputSource = event.added[ i ];
const controller = inputSourcesMap.get( inputSource );
if ( controller ) {
controller.dispatchEvent( { type: 'connected', data: inputSource } );
}
}
}
//
const cameraLPos = new Vector3();
const cameraRPos = new Vector3();
/**
* Assumes 2 cameras that are parallel and share an X-axis, and that
* the cameras' projection and world matrices have already been set.
* And that near and far planes are identical for both cameras.
* Visualization of this technique: https://computergraphics.stackexchange.com/a/4765
*/
function setProjectionFromUnion( camera, cameraL, cameraR ) {
cameraLPos.setFromMatrixPosition( cameraL.matrixWorld );
cameraRPos.setFromMatrixPosition( cameraR.matrixWorld );
const ipd = cameraLPos.distanceTo( cameraRPos );
const projL = cameraL.projectionMatrix.elements;
const projR = cameraR.projectionMatrix.elements;
// VR systems will have identical far and near planes, and
// most likely identical top and bottom frustum extents.
// Use the left camera for these values.
const near = projL[ 14 ] / ( projL[ 10 ] - 1 );
const far = projL[ 14 ] / ( projL[ 10 ] + 1 );
const topFov = ( projL[ 9 ] + 1 ) / projL[ 5 ];
const bottomFov = ( projL[ 9 ] - 1 ) / projL[ 5 ];
const leftFov = ( projL[ 8 ] - 1 ) / projL[ 0 ];
const rightFov = ( projR[ 8 ] + 1 ) / projR[ 0 ];
const left = near * leftFov;
const right = near * rightFov;
// Calculate the new camera's position offset from the
// left camera. xOffset should be roughly half `ipd`.
const zOffset = ipd / ( - leftFov + rightFov );
const xOffset = zOffset * - leftFov;
// TODO: Better way to apply this offset?
cameraL.matrixWorld.decompose( camera.position, camera.quaternion, camera.scale );
camera.translateX( xOffset );
camera.translateZ( zOffset );
camera.matrixWorld.compose( camera.position, camera.quaternion, camera.scale );
camera.matrixWorldInverse.copy( camera.matrixWorld ).invert();
// Find the union of the frustum values of the cameras and scale
// the values so that the near plane's position does not change in world space,
// although must now be relative to the new union camera.
const near2 = near + zOffset;
const far2 = far + zOffset;
const left2 = left - xOffset;
const right2 = right + ( ipd - xOffset );
const top2 = topFov * far / far2 * near2;
const bottom2 = bottomFov * far / far2 * near2;
camera.projectionMatrix.makePerspective( left2, right2, top2, bottom2, near2, far2 );
}
function updateCamera( camera, parent ) {
if ( parent === null ) {
camera.matrixWorld.copy( camera.matrix );
} else {
camera.matrixWorld.multiplyMatrices( parent.matrixWorld, camera.matrix );
}
camera.matrixWorldInverse.copy( camera.matrixWorld ).invert();
}
this.getCamera = function ( camera ) {
cameraVR.near = cameraR.near = cameraL.near = camera.near;
cameraVR.far = cameraR.far = cameraL.far = camera.far;
if ( _currentDepthNear !== cameraVR.near || _currentDepthFar !== cameraVR.far ) {
// Note that the new renderState won't apply until the next frame. See #18320
session.updateRenderState( {
depthNear: cameraVR.near,
depthFar: cameraVR.far
} );
_currentDepthNear = cameraVR.near;
_currentDepthFar = cameraVR.far;
}
const parent = camera.parent;
const cameras = cameraVR.cameras;
updateCamera( cameraVR, parent );
for ( let i = 0; i < cameras.length; i ++ ) {
updateCamera( cameras[ i ], parent );
}
// update camera and its children
camera.matrixWorld.copy( cameraVR.matrixWorld );
camera.matrix.copy( cameraVR.matrix );
camera.matrix.decompose( camera.position, camera.quaternion, camera.scale );
const children = camera.children;
for ( let i = 0, l = children.length; i < l; i ++ ) {
children[ i ].updateMatrixWorld( true );
}
// update projection matrix for proper view frustum culling
if ( cameras.length === 2 ) {
setProjectionFromUnion( cameraVR, cameraL, cameraR );
} else {
// assume single camera setup (AR)
cameraVR.projectionMatrix.copy( cameraL.projectionMatrix );
}
return cameraVR;
};
// Animation Loop
let onAnimationFrameCallback = null;
function onAnimationFrame( time, frame ) {
pose = frame.getViewerPose( referenceSpace );
if ( pose !== null ) {
const views = pose.views;
const baseLayer = session.renderState.baseLayer;
state.bindXRFramebuffer( baseLayer.framebuffer );
let cameraVRNeedsUpdate = false;
// check if it's necessary to rebuild cameraVR's camera list
if ( views.length !== cameraVR.cameras.length ) {
cameraVR.cameras.length = 0;
cameraVRNeedsUpdate = true;
}
for ( let i = 0; i < views.length; i ++ ) {
const view = views[ i ];
const viewport = baseLayer.getViewport( view );
const camera = cameras[ i ];
camera.matrix.fromArray( view.transform.matrix );
camera.projectionMatrix.fromArray( view.projectionMatrix );
camera.viewport.set( viewport.x, viewport.y, viewport.width, viewport.height );
if ( i === 0 ) {
cameraVR.matrix.copy( camera.matrix );
}
if ( cameraVRNeedsUpdate === true ) {
cameraVR.cameras.push( camera );
}
}
}
//
const inputSources = session.inputSources;
for ( let i = 0; i < controllers.length; i ++ ) {
const controller = controllers[ i ];
const inputSource = inputSources[ i ];
controller.update( inputSource, frame, referenceSpace );
}
if ( onAnimationFrameCallback ) onAnimationFrameCallback( time, frame );
}
const animation = new WebGLAnimation();
animation.setAnimationLoop( onAnimationFrame );
this.setAnimationLoop = function ( callback ) {
onAnimationFrameCallback = callback;
};
this.dispose = function () {};
}
}
function WebGLMaterials( properties ) {
function refreshFogUniforms( uniforms, fog ) {
uniforms.fogColor.value.copy( fog.color );
if ( fog.isFog ) {
uniforms.fogNear.value = fog.near;
uniforms.fogFar.value = fog.far;
} else if ( fog.isFogExp2 ) {
uniforms.fogDensity.value = fog.density;
}
}
function refreshMaterialUniforms( uniforms, material, pixelRatio, height ) {
if ( material.isMeshBasicMaterial ) {
refreshUniformsCommon( uniforms, material );
} else if ( material.isMeshLambertMaterial ) {
refreshUniformsCommon( uniforms, material );
refreshUniformsLambert( uniforms, material );
} else if ( material.isMeshToonMaterial ) {
refreshUniformsCommon( uniforms, material );
refreshUniformsToon( uniforms, material );
} else if ( material.isMeshPhongMaterial ) {
refreshUniformsCommon( uniforms, material );
refreshUniformsPhong( uniforms, material );
} else if ( material.isMeshStandardMaterial ) {
refreshUniformsCommon( uniforms, material );
if ( material.isMeshPhysicalMaterial ) {
refreshUniformsPhysical( uniforms, material );
} else {
refreshUniformsStandard( uniforms, material );
}
} else if ( material.isMeshMatcapMaterial ) {
refreshUniformsCommon( uniforms, material );
refreshUniformsMatcap( uniforms, material );
} else if ( material.isMeshDepthMaterial ) {
refreshUniformsCommon( uniforms, material );
refreshUniformsDepth( uniforms, material );
} else if ( material.isMeshDistanceMaterial ) {
refreshUniformsCommon( uniforms, material );
refreshUniformsDistance( uniforms, material );
} else if ( material.isMeshNormalMaterial ) {
refreshUniformsCommon( uniforms, material );
refreshUniformsNormal( uniforms, material );
} else if ( material.isLineBasicMaterial ) {
refreshUniformsLine( uniforms, material );
if ( material.isLineDashedMaterial ) {
refreshUniformsDash( uniforms, material );
}
} else if ( material.isPointsMaterial ) {
refreshUniformsPoints( uniforms, material, pixelRatio, height );
} else if ( material.isSpriteMaterial ) {
refreshUniformsSprites( uniforms, material );
} else if ( material.isShadowMaterial ) {
uniforms.color.value.copy( material.color );
uniforms.opacity.value = material.opacity;
} else if ( material.isShaderMaterial ) {
material.uniformsNeedUpdate = false; // #15581
}
}
function refreshUniformsCommon( uniforms, material ) {
uniforms.opacity.value = material.opacity;
if ( material.color ) {
uniforms.diffuse.value.copy( material.color );
}
if ( material.emissive ) {
uniforms.emissive.value.copy( material.emissive ).multiplyScalar( material.emissiveIntensity );
}
if ( material.map ) {
uniforms.map.value = material.map;
}
if ( material.alphaMap ) {
uniforms.alphaMap.value = material.alphaMap;
}
if ( material.specularMap ) {
uniforms.specularMap.value = material.specularMap;
}
const envMap = properties.get( material ).envMap;
if ( envMap ) {
uniforms.envMap.value = envMap;
uniforms.flipEnvMap.value = ( envMap.isCubeTexture && envMap._needsFlipEnvMap ) ? - 1 : 1;
uniforms.reflectivity.value = material.reflectivity;
uniforms.refractionRatio.value = material.refractionRatio;
const maxMipLevel = properties.get( envMap ).__maxMipLevel;
if ( maxMipLevel !== undefined ) {
uniforms.maxMipLevel.value = maxMipLevel;
}
}
if ( material.lightMap ) {
uniforms.lightMap.value = material.lightMap;
uniforms.lightMapIntensity.value = material.lightMapIntensity;
}
if ( material.aoMap ) {
uniforms.aoMap.value = material.aoMap;
uniforms.aoMapIntensity.value = material.aoMapIntensity;
}
// uv repeat and offset setting priorities
// 1. color map
// 2. specular map
// 3. displacementMap map
// 4. normal map
// 5. bump map
// 6. roughnessMap map
// 7. metalnessMap map
// 8. alphaMap map
// 9. emissiveMap map
// 10. clearcoat map
// 11. clearcoat normal map
// 12. clearcoat roughnessMap map
let uvScaleMap;
if ( material.map ) {
uvScaleMap = material.map;
} else if ( material.specularMap ) {
uvScaleMap = material.specularMap;
} else if ( material.displacementMap ) {
uvScaleMap = material.displacementMap;
} else if ( material.normalMap ) {
uvScaleMap = material.normalMap;
} else if ( material.bumpMap ) {
uvScaleMap = material.bumpMap;
} else if ( material.roughnessMap ) {
uvScaleMap = material.roughnessMap;
} else if ( material.metalnessMap ) {
uvScaleMap = material.metalnessMap;
} else if ( material.alphaMap ) {
uvScaleMap = material.alphaMap;
} else if ( material.emissiveMap ) {
uvScaleMap = material.emissiveMap;
} else if ( material.clearcoatMap ) {
uvScaleMap = material.clearcoatMap;
} else if ( material.clearcoatNormalMap ) {
uvScaleMap = material.clearcoatNormalMap;
} else if ( material.clearcoatRoughnessMap ) {
uvScaleMap = material.clearcoatRoughnessMap;
}
if ( uvScaleMap !== undefined ) {
// backwards compatibility
if ( uvScaleMap.isWebGLRenderTarget ) {
uvScaleMap = uvScaleMap.texture;
}
if ( uvScaleMap.matrixAutoUpdate === true ) {
uvScaleMap.updateMatrix();
}
uniforms.uvTransform.value.copy( uvScaleMap.matrix );
}
// uv repeat and offset setting priorities for uv2
// 1. ao map
// 2. light map
let uv2ScaleMap;
if ( material.aoMap ) {
uv2ScaleMap = material.aoMap;
} else if ( material.lightMap ) {
uv2ScaleMap = material.lightMap;
}
if ( uv2ScaleMap !== undefined ) {
// backwards compatibility
if ( uv2ScaleMap.isWebGLRenderTarget ) {
uv2ScaleMap = uv2ScaleMap.texture;
}
if ( uv2ScaleMap.matrixAutoUpdate === true ) {
uv2ScaleMap.updateMatrix();
}
uniforms.uv2Transform.value.copy( uv2ScaleMap.matrix );
}
}
function refreshUniformsLine( uniforms, material ) {
uniforms.diffuse.value.copy( material.color );
uniforms.opacity.value = material.opacity;
}
function refreshUniformsDash( uniforms, material ) {
uniforms.dashSize.value = material.dashSize;
uniforms.totalSize.value = material.dashSize + material.gapSize;
uniforms.scale.value = material.scale;
}
function refreshUniformsPoints( uniforms, material, pixelRatio, height ) {
uniforms.diffuse.value.copy( material.color );
uniforms.opacity.value = material.opacity;
uniforms.size.value = material.size * pixelRatio;
uniforms.scale.value = height * 0.5;
if ( material.map ) {
uniforms.map.value = material.map;
}
if ( material.alphaMap ) {
uniforms.alphaMap.value = material.alphaMap;
}
// uv repeat and offset setting priorities
// 1. color map
// 2. alpha map
let uvScaleMap;
if ( material.map ) {
uvScaleMap = material.map;
} else if ( material.alphaMap ) {
uvScaleMap = material.alphaMap;
}
if ( uvScaleMap !== undefined ) {
if ( uvScaleMap.matrixAutoUpdate === true ) {
uvScaleMap.updateMatrix();
}
uniforms.uvTransform.value.copy( uvScaleMap.matrix );
}
}
function refreshUniformsSprites( uniforms, material ) {
uniforms.diffuse.value.copy( material.color );
uniforms.opacity.value = material.opacity;
uniforms.rotation.value = material.rotation;
if ( material.map ) {
uniforms.map.value = material.map;
}
if ( material.alphaMap ) {
uniforms.alphaMap.value = material.alphaMap;
}
// uv repeat and offset setting priorities
// 1. color map
// 2. alpha map
let uvScaleMap;
if ( material.map ) {
uvScaleMap = material.map;
} else if ( material.alphaMap ) {
uvScaleMap = material.alphaMap;
}
if ( uvScaleMap !== undefined ) {
if ( uvScaleMap.matrixAutoUpdate === true ) {
uvScaleMap.updateMatrix();
}
uniforms.uvTransform.value.copy( uvScaleMap.matrix );
}
}
function refreshUniformsLambert( uniforms, material ) {
if ( material.emissiveMap ) {
uniforms.emissiveMap.value = material.emissiveMap;
}
}
function refreshUniformsPhong( uniforms, material ) {
uniforms.specular.value.copy( material.specular );
uniforms.shininess.value = Math.max( material.shininess, 1e-4 ); // to prevent pow( 0.0, 0.0 )
if ( material.emissiveMap ) {
uniforms.emissiveMap.value = material.emissiveMap;
}
if ( material.bumpMap ) {
uniforms.bumpMap.value = material.bumpMap;
uniforms.bumpScale.value = material.bumpScale;
if ( material.side === BackSide ) uniforms.bumpScale.value *= - 1;
}
if ( material.normalMap ) {
uniforms.normalMap.value = material.normalMap;
uniforms.normalScale.value.copy( material.normalScale );
if ( material.side === BackSide ) uniforms.normalScale.value.negate();
}
if ( material.displacementMap ) {
uniforms.displacementMap.value = material.displacementMap;
uniforms.displacementScale.value = material.displacementScale;
uniforms.displacementBias.value = material.displacementBias;
}
}
function refreshUniformsToon( uniforms, material ) {
if ( material.gradientMap ) {
uniforms.gradientMap.value = material.gradientMap;
}
if ( material.emissiveMap ) {
uniforms.emissiveMap.value = material.emissiveMap;
}
if ( material.bumpMap ) {
uniforms.bumpMap.value = material.bumpMap;
uniforms.bumpScale.value = material.bumpScale;
if ( material.side === BackSide ) uniforms.bumpScale.value *= - 1;
}
if ( material.normalMap ) {
uniforms.normalMap.value = material.normalMap;
uniforms.normalScale.value.copy( material.normalScale );
if ( material.side === BackSide ) uniforms.normalScale.value.negate();
}
if ( material.displacementMap ) {
uniforms.displacementMap.value = material.displacementMap;
uniforms.displacementScale.value = material.displacementScale;
uniforms.displacementBias.value = material.displacementBias;
}
}
function refreshUniformsStandard( uniforms, material ) {
uniforms.roughness.value = material.roughness;
uniforms.metalness.value = material.metalness;
if ( material.roughnessMap ) {
uniforms.roughnessMap.value = material.roughnessMap;
}
if ( material.metalnessMap ) {
uniforms.metalnessMap.value = material.metalnessMap;
}
if ( material.emissiveMap ) {
uniforms.emissiveMap.value = material.emissiveMap;
}
if ( material.bumpMap ) {
uniforms.bumpMap.value = material.bumpMap;
uniforms.bumpScale.value = material.bumpScale;
if ( material.side === BackSide ) uniforms.bumpScale.value *= - 1;
}
if ( material.normalMap ) {
uniforms.normalMap.value = material.normalMap;
uniforms.normalScale.value.copy( material.normalScale );
if ( material.side === BackSide ) uniforms.normalScale.value.negate();
}
if ( material.displacementMap ) {
uniforms.displacementMap.value = material.displacementMap;
uniforms.displacementScale.value = material.displacementScale;
uniforms.displacementBias.value = material.displacementBias;
}
const envMap = properties.get( material ).envMap;
if ( envMap ) {
//uniforms.envMap.value = material.envMap; // part of uniforms common
uniforms.envMapIntensity.value = material.envMapIntensity;
}
}
function refreshUniformsPhysical( uniforms, material ) {
refreshUniformsStandard( uniforms, material );
uniforms.reflectivity.value = material.reflectivity; // also part of uniforms common
uniforms.clearcoat.value = material.clearcoat;
uniforms.clearcoatRoughness.value = material.clearcoatRoughness;
if ( material.sheen ) uniforms.sheen.value.copy( material.sheen );
if ( material.clearcoatMap ) {
uniforms.clearcoatMap.value = material.clearcoatMap;
}
if ( material.clearcoatRoughnessMap ) {
uniforms.clearcoatRoughnessMap.value = material.clearcoatRoughnessMap;
}
if ( material.clearcoatNormalMap ) {
uniforms.clearcoatNormalScale.value.copy( material.clearcoatNormalScale );
uniforms.clearcoatNormalMap.value = material.clearcoatNormalMap;
if ( material.side === BackSide ) {
uniforms.clearcoatNormalScale.value.negate();
}
}
uniforms.transmission.value = material.transmission;
if ( material.transmissionMap ) {
uniforms.transmissionMap.value = material.transmissionMap;
}
}
function refreshUniformsMatcap( uniforms, material ) {
if ( material.matcap ) {
uniforms.matcap.value = material.matcap;
}
if ( material.bumpMap ) {
uniforms.bumpMap.value = material.bumpMap;
uniforms.bumpScale.value = material.bumpScale;
if ( material.side === BackSide ) uniforms.bumpScale.value *= - 1;
}
if ( material.normalMap ) {
uniforms.normalMap.value = material.normalMap;
uniforms.normalScale.value.copy( material.normalScale );
if ( material.side === BackSide ) uniforms.normalScale.value.negate();
}
if ( material.displacementMap ) {
uniforms.displacementMap.value = material.displacementMap;
uniforms.displacementScale.value = material.displacementScale;
uniforms.displacementBias.value = material.displacementBias;
}
}
function refreshUniformsDepth( uniforms, material ) {
if ( material.displacementMap ) {
uniforms.displacementMap.value = material.displacementMap;
uniforms.displacementScale.value = material.displacementScale;
uniforms.displacementBias.value = material.displacementBias;
}
}
function refreshUniformsDistance( uniforms, material ) {
if ( material.displacementMap ) {
uniforms.displacementMap.value = material.displacementMap;
uniforms.displacementScale.value = material.displacementScale;
uniforms.displacementBias.value = material.displacementBias;
}
uniforms.referencePosition.value.copy( material.referencePosition );
uniforms.nearDistance.value = material.nearDistance;
uniforms.farDistance.value = material.farDistance;
}
function refreshUniformsNormal( uniforms, material ) {
if ( material.bumpMap ) {
uniforms.bumpMap.value = material.bumpMap;
uniforms.bumpScale.value = material.bumpScale;
if ( material.side === BackSide ) uniforms.bumpScale.value *= - 1;
}
if ( material.normalMap ) {
uniforms.normalMap.value = material.normalMap;
uniforms.normalScale.value.copy( material.normalScale );
if ( material.side === BackSide ) uniforms.normalScale.value.negate();
}
if ( material.displacementMap ) {
uniforms.displacementMap.value = material.displacementMap;
uniforms.displacementScale.value = material.displacementScale;
uniforms.displacementBias.value = material.displacementBias;
}
}
return {
refreshFogUniforms: refreshFogUniforms,
refreshMaterialUniforms: refreshMaterialUniforms
};
}
function createCanvasElement() {
const canvas = document.createElementNS( 'http://www.w3.org/1999/xhtml', 'canvas' );
canvas.style.display = 'block';
return canvas;
}
function WebGLRenderer( parameters ) {
parameters = parameters || {};
const _canvas = parameters.canvas !== undefined ? parameters.canvas : createCanvasElement(),
_context = parameters.context !== undefined ? parameters.context : null,
_alpha = parameters.alpha !== undefined ? parameters.alpha : false,
_depth = parameters.depth !== undefined ? parameters.depth : true,
_stencil = parameters.stencil !== undefined ? parameters.stencil : true,
_antialias = parameters.antialias !== undefined ? parameters.antialias : false,
_premultipliedAlpha = parameters.premultipliedAlpha !== undefined ? parameters.premultipliedAlpha : true,
_preserveDrawingBuffer = parameters.preserveDrawingBuffer !== undefined ? parameters.preserveDrawingBuffer : false,
_powerPreference = parameters.powerPreference !== undefined ? parameters.powerPreference : 'default',
_failIfMajorPerformanceCaveat = parameters.failIfMajorPerformanceCaveat !== undefined ? parameters.failIfMajorPerformanceCaveat : false;
let currentRenderList = null;
let currentRenderState = null;
// render() can be called from within a callback triggered by another render.
// We track this so that the nested render call gets its list and state isolated from the parent render call.
const renderListStack = [];
const renderStateStack = [];
// public properties
this.domElement = _canvas;
// Debug configuration container
this.debug = {
/**
* Enables error checking and reporting when shader programs are being compiled
* @type {boolean}
*/
checkShaderErrors: true
};
// clearing
this.autoClear = true;
this.autoClearColor = true;
this.autoClearDepth = true;
this.autoClearStencil = true;
// scene graph
this.sortObjects = true;
// user-defined clipping
this.clippingPlanes = [];
this.localClippingEnabled = false;
// physically based shading
this.gammaFactor = 2.0; // for backwards compatibility
this.outputEncoding = LinearEncoding;
// physical lights
this.physicallyCorrectLights = false;
// tone mapping
this.toneMapping = NoToneMapping;
this.toneMappingExposure = 1.0;
// internal properties
const _this = this;
let _isContextLost = false;
// internal state cache
let _currentActiveCubeFace = 0;
let _currentActiveMipmapLevel = 0;
let _currentRenderTarget = null;
let _currentMaterialId = - 1;
let _currentCamera = null;
const _currentViewport = new Vector4();
const _currentScissor = new Vector4();
let _currentScissorTest = null;
//
let _width = _canvas.width;
let _height = _canvas.height;
let _pixelRatio = 1;
let _opaqueSort = null;
let _transparentSort = null;
const _viewport = new Vector4( 0, 0, _width, _height );
const _scissor = new Vector4( 0, 0, _width, _height );
let _scissorTest = false;
// frustum
const _frustum = new Frustum();
// clipping
let _clippingEnabled = false;
let _localClippingEnabled = false;
// camera matrices cache
const _projScreenMatrix = new Matrix4();
const _vector3 = new Vector3();
const _emptyScene = { background: null, fog: null, environment: null, overrideMaterial: null, isScene: true };
function getTargetPixelRatio() {
return _currentRenderTarget === null ? _pixelRatio : 1;
}
// initialize
let _gl = _context;
function getContext( contextNames, contextAttributes ) {
for ( let i = 0; i < contextNames.length; i ++ ) {
const contextName = contextNames[ i ];
const context = _canvas.getContext( contextName, contextAttributes );
if ( context !== null ) return context;
}
return null;
}
try {
const contextAttributes = {
alpha: _alpha,
depth: _depth,
stencil: _stencil,
antialias: _antialias,
premultipliedAlpha: _premultipliedAlpha,
preserveDrawingBuffer: _preserveDrawingBuffer,
powerPreference: _powerPreference,
failIfMajorPerformanceCaveat: _failIfMajorPerformanceCaveat
};
// event listeners must be registered before WebGL context is created, see #12753
_canvas.addEventListener( 'webglcontextlost', onContextLost, false );
_canvas.addEventListener( 'webglcontextrestored', onContextRestore, false );
if ( _gl === null ) {
const contextNames = [ 'webgl2', 'webgl', 'experimental-webgl' ];
if ( _this.isWebGL1Renderer === true ) {
contextNames.shift();
}
_gl = getContext( contextNames, contextAttributes );
if ( _gl === null ) {
if ( getContext( contextNames ) ) {
throw new Error( 'Error creating WebGL context with your selected attributes.' );
} else {
throw new Error( 'Error creating WebGL context.' );
}
}
}
// Some experimental-webgl implementations do not have getShaderPrecisionFormat
if ( _gl.getShaderPrecisionFormat === undefined ) {
_gl.getShaderPrecisionFormat = function () {
return { 'rangeMin': 1, 'rangeMax': 1, 'precision': 1 };
};
}
} catch ( error ) {
console.error( 'THREE.WebGLRenderer: ' + error.message );
throw error;
}
let extensions, capabilities, state, info;
let properties, textures, cubemaps, attributes, geometries, objects;
let programCache, materials, renderLists, renderStates, clipping, shadowMap;
let background, morphtargets, bufferRenderer, indexedBufferRenderer;
let utils, bindingStates;
function initGLContext() {
extensions = new WebGLExtensions( _gl );
capabilities = new WebGLCapabilities( _gl, extensions, parameters );
extensions.init( capabilities );
utils = new WebGLUtils( _gl, extensions, capabilities );
state = new WebGLState( _gl, extensions, capabilities );
info = new WebGLInfo( _gl );
properties = new WebGLProperties();
textures = new WebGLTextures( _gl, extensions, state, properties, capabilities, utils, info );
cubemaps = new WebGLCubeMaps( _this );
attributes = new WebGLAttributes( _gl, capabilities );
bindingStates = new WebGLBindingStates( _gl, extensions, attributes, capabilities );
geometries = new WebGLGeometries( _gl, attributes, info, bindingStates );
objects = new WebGLObjects( _gl, geometries, attributes, info );
morphtargets = new WebGLMorphtargets( _gl );
clipping = new WebGLClipping( properties );
programCache = new WebGLPrograms( _this, cubemaps, extensions, capabilities, bindingStates, clipping );
materials = new WebGLMaterials( properties );
renderLists = new WebGLRenderLists( properties );
renderStates = new WebGLRenderStates( extensions, capabilities );
background = new WebGLBackground( _this, cubemaps, state, objects, _premultipliedAlpha );
shadowMap = new WebGLShadowMap( _this, objects, capabilities );
bufferRenderer = new WebGLBufferRenderer( _gl, extensions, info, capabilities );
indexedBufferRenderer = new WebGLIndexedBufferRenderer( _gl, extensions, info, capabilities );
info.programs = programCache.programs;
_this.capabilities = capabilities;
_this.extensions = extensions;
_this.properties = properties;
_this.renderLists = renderLists;
_this.shadowMap = shadowMap;
_this.state = state;
_this.info = info;
}
initGLContext();
// xr
const xr = new WebXRManager( _this, _gl );
this.xr = xr;
// API
this.getContext = function () {
return _gl;
};
this.getContextAttributes = function () {
return _gl.getContextAttributes();
};
this.forceContextLoss = function () {
const extension = extensions.get( 'WEBGL_lose_context' );
if ( extension ) extension.loseContext();
};
this.forceContextRestore = function () {
const extension = extensions.get( 'WEBGL_lose_context' );
if ( extension ) extension.restoreContext();
};
this.getPixelRatio = function () {
return _pixelRatio;
};
this.setPixelRatio = function ( value ) {
if ( value === undefined ) return;
_pixelRatio = value;
this.setSize( _width, _height, false );
};
this.getSize = function ( target ) {
if ( target === undefined ) {
console.warn( 'WebGLRenderer: .getsize() now requires a Vector2 as an argument' );
target = new Vector2();
}
return target.set( _width, _height );
};
this.setSize = function ( width, height, updateStyle ) {
if ( xr.isPresenting ) {
console.warn( 'THREE.WebGLRenderer: Can\'t change size while VR device is presenting.' );
return;
}
_width = width;
_height = height;
_canvas.width = Math.floor( width * _pixelRatio );
_canvas.height = Math.floor( height * _pixelRatio );
if ( updateStyle !== false ) {
_canvas.style.width = width + 'px';
_canvas.style.height = height + 'px';
}
this.setViewport( 0, 0, width, height );
};
this.getDrawingBufferSize = function ( target ) {
if ( target === undefined ) {
console.warn( 'WebGLRenderer: .getdrawingBufferSize() now requires a Vector2 as an argument' );
target = new Vector2();
}
return target.set( _width * _pixelRatio, _height * _pixelRatio ).floor();
};
this.setDrawingBufferSize = function ( width, height, pixelRatio ) {
_width = width;
_height = height;
_pixelRatio = pixelRatio;
_canvas.width = Math.floor( width * pixelRatio );
_canvas.height = Math.floor( height * pixelRatio );
this.setViewport( 0, 0, width, height );
};
this.getCurrentViewport = function ( target ) {
if ( target === undefined ) {
console.warn( 'WebGLRenderer: .getCurrentViewport() now requires a Vector4 as an argument' );
target = new Vector4();
}
return target.copy( _currentViewport );
};
this.getViewport = function ( target ) {
return target.copy( _viewport );
};
this.setViewport = function ( x, y, width, height ) {
if ( x.isVector4 ) {
_viewport.set( x.x, x.y, x.z, x.w );
} else {
_viewport.set( x, y, width, height );
}
state.viewport( _currentViewport.copy( _viewport ).multiplyScalar( _pixelRatio ).floor() );
};
this.getScissor = function ( target ) {
return target.copy( _scissor );
};
this.setScissor = function ( x, y, width, height ) {
if ( x.isVector4 ) {
_scissor.set( x.x, x.y, x.z, x.w );
} else {
_scissor.set( x, y, width, height );
}
state.scissor( _currentScissor.copy( _scissor ).multiplyScalar( _pixelRatio ).floor() );
};
this.getScissorTest = function () {
return _scissorTest;
};
this.setScissorTest = function ( boolean ) {
state.setScissorTest( _scissorTest = boolean );
};
this.setOpaqueSort = function ( method ) {
_opaqueSort = method;
};
this.setTransparentSort = function ( method ) {
_transparentSort = method;
};
// Clearing
this.getClearColor = function ( target ) {
if ( target === undefined ) {
console.warn( 'WebGLRenderer: .getClearColor() now requires a Color as an argument' );
target = new Color();
}
return target.copy( background.getClearColor() );
};
this.setClearColor = function () {
background.setClearColor.apply( background, arguments );
};
this.getClearAlpha = function () {
return background.getClearAlpha();
};
this.setClearAlpha = function () {
background.setClearAlpha.apply( background, arguments );
};
this.clear = function ( color, depth, stencil ) {
let bits = 0;
if ( color === undefined || color ) bits |= 16384;
if ( depth === undefined || depth ) bits |= 256;
if ( stencil === undefined || stencil ) bits |= 1024;
_gl.clear( bits );
};
this.clearColor = function () {
this.clear( true, false, false );
};
this.clearDepth = function () {
this.clear( false, true, false );
};
this.clearStencil = function () {
this.clear( false, false, true );
};
//
this.dispose = function () {
_canvas.removeEventListener( 'webglcontextlost', onContextLost, false );
_canvas.removeEventListener( 'webglcontextrestored', onContextRestore, false );
renderLists.dispose();
renderStates.dispose();
properties.dispose();
cubemaps.dispose();
objects.dispose();
bindingStates.dispose();
xr.dispose();
xr.removeEventListener( 'sessionstart', onXRSessionStart );
xr.removeEventListener( 'sessionend', onXRSessionEnd );
animation.stop();
};
// Events
function onContextLost( event ) {
event.preventDefault();
console.log( 'THREE.WebGLRenderer: Context Lost.' );
_isContextLost = true;
}
function onContextRestore( /* event */ ) {
console.log( 'THREE.WebGLRenderer: Context Restored.' );
_isContextLost = false;
const infoAutoReset = info.autoReset;
const shadowMapEnabled = shadowMap.enabled;
const shadowMapAutoUpdate = shadowMap.autoUpdate;
const shadowMapNeedsUpdate = shadowMap.needsUpdate;
const shadowMapType = shadowMap.type;
initGLContext();
info.autoReset = infoAutoReset;
shadowMap.enabled = shadowMapEnabled;
shadowMap.autoUpdate = shadowMapAutoUpdate;
shadowMap.needsUpdate = shadowMapNeedsUpdate;
shadowMap.type = shadowMapType;
}
function onMaterialDispose( event ) {
const material = event.target;
material.removeEventListener( 'dispose', onMaterialDispose );
deallocateMaterial( material );
}
// Buffer deallocation
function deallocateMaterial( material ) {
releaseMaterialProgramReferences( material );
properties.remove( material );
}
function releaseMaterialProgramReferences( material ) {
const programs = properties.get( material ).programs;
if ( programs !== undefined ) {
programs.forEach( function ( program ) {
programCache.releaseProgram( program );
} );
}
}
// Buffer rendering
function renderObjectImmediate( object, program ) {
object.render( function ( object ) {
_this.renderBufferImmediate( object, program );
} );
}
this.renderBufferImmediate = function ( object, program ) {
bindingStates.initAttributes();
const buffers = properties.get( object );
if ( object.hasPositions && ! buffers.position ) buffers.position = _gl.createBuffer();
if ( object.hasNormals && ! buffers.normal ) buffers.normal = _gl.createBuffer();
if ( object.hasUvs && ! buffers.uv ) buffers.uv = _gl.createBuffer();
if ( object.hasColors && ! buffers.color ) buffers.color = _gl.createBuffer();
const programAttributes = program.getAttributes();
if ( object.hasPositions ) {
_gl.bindBuffer( 34962, buffers.position );
_gl.bufferData( 34962, object.positionArray, 35048 );
bindingStates.enableAttribute( programAttributes.position );
_gl.vertexAttribPointer( programAttributes.position, 3, 5126, false, 0, 0 );
}
if ( object.hasNormals ) {
_gl.bindBuffer( 34962, buffers.normal );
_gl.bufferData( 34962, object.normalArray, 35048 );
bindingStates.enableAttribute( programAttributes.normal );
_gl.vertexAttribPointer( programAttributes.normal, 3, 5126, false, 0, 0 );
}
if ( object.hasUvs ) {
_gl.bindBuffer( 34962, buffers.uv );
_gl.bufferData( 34962, object.uvArray, 35048 );
bindingStates.enableAttribute( programAttributes.uv );
_gl.vertexAttribPointer( programAttributes.uv, 2, 5126, false, 0, 0 );
}
if ( object.hasColors ) {
_gl.bindBuffer( 34962, buffers.color );
_gl.bufferData( 34962, object.colorArray, 35048 );
bindingStates.enableAttribute( programAttributes.color );
_gl.vertexAttribPointer( programAttributes.color, 3, 5126, false, 0, 0 );
}
bindingStates.disableUnusedAttributes();
_gl.drawArrays( 4, 0, object.count );
object.count = 0;
};
this.renderBufferDirect = function ( camera, scene, geometry, material, object, group ) {
if ( scene === null ) scene = _emptyScene; // renderBufferDirect second parameter used to be fog (could be null)
const frontFaceCW = ( object.isMesh && object.matrixWorld.determinant() < 0 );
const program = setProgram( camera, scene, material, object );
state.setMaterial( material, frontFaceCW );
//
let index = geometry.index;
const position = geometry.attributes.position;
//
if ( index === null ) {
if ( position === undefined || position.count === 0 ) return;
} else if ( index.count === 0 ) {
return;
}
//
let rangeFactor = 1;
if ( material.wireframe === true ) {
index = geometries.getWireframeAttribute( geometry );
rangeFactor = 2;
}
if ( material.morphTargets || material.morphNormals ) {
morphtargets.update( object, geometry, material, program );
}
bindingStates.setup( object, material, program, geometry, index );
let attribute;
let renderer = bufferRenderer;
if ( index !== null ) {
attribute = attributes.get( index );
renderer = indexedBufferRenderer;
renderer.setIndex( attribute );
}
//
const dataCount = ( index !== null ) ? index.count : position.count;
const rangeStart = geometry.drawRange.start * rangeFactor;
const rangeCount = geometry.drawRange.count * rangeFactor;
const groupStart = group !== null ? group.start * rangeFactor : 0;
const groupCount = group !== null ? group.count * rangeFactor : Infinity;
const drawStart = Math.max( rangeStart, groupStart );
const drawEnd = Math.min( dataCount, rangeStart + rangeCount, groupStart + groupCount ) - 1;
const drawCount = Math.max( 0, drawEnd - drawStart + 1 );
if ( drawCount === 0 ) return;
//
if ( object.isMesh ) {
if ( material.wireframe === true ) {
state.setLineWidth( material.wireframeLinewidth * getTargetPixelRatio() );
renderer.setMode( 1 );
} else {
renderer.setMode( 4 );
}
} else if ( object.isLine ) {
let lineWidth = material.linewidth;
if ( lineWidth === undefined ) lineWidth = 1; // Not using Line*Material
state.setLineWidth( lineWidth * getTargetPixelRatio() );
if ( object.isLineSegments ) {
renderer.setMode( 1 );
} else if ( object.isLineLoop ) {
renderer.setMode( 2 );
} else {
renderer.setMode( 3 );
}
} else if ( object.isPoints ) {
renderer.setMode( 0 );
} else if ( object.isSprite ) {
renderer.setMode( 4 );
}
if ( object.isInstancedMesh ) {
renderer.renderInstances( drawStart, drawCount, object.count );
} else if ( geometry.isInstancedBufferGeometry ) {
const instanceCount = Math.min( geometry.instanceCount, geometry._maxInstanceCount );
renderer.renderInstances( drawStart, drawCount, instanceCount );
} else {
renderer.render( drawStart, drawCount );
}
};
// Compile
this.compile = function ( scene, camera ) {
currentRenderState = renderStates.get( scene );
currentRenderState.init();
scene.traverseVisible( function ( object ) {
if ( object.isLight && object.layers.test( camera.layers ) ) {
currentRenderState.pushLight( object );
if ( object.castShadow ) {
currentRenderState.pushShadow( object );
}
}
} );
currentRenderState.setupLights();
scene.traverse( function ( object ) {
const material = object.material;
if ( material ) {
if ( Array.isArray( material ) ) {
for ( let i = 0; i < material.length; i ++ ) {
const material2 = material[ i ];
getProgram( material2, scene, object );
}
} else {
getProgram( material, scene, object );
}
}
} );
};
// Animation Loop
let onAnimationFrameCallback = null;
function onAnimationFrame( time ) {
if ( onAnimationFrameCallback ) onAnimationFrameCallback( time );
}
function onXRSessionStart() {
animation.stop();
}
function onXRSessionEnd() {
animation.start();
}
const animation = new WebGLAnimation();
animation.setAnimationLoop( onAnimationFrame );
if ( typeof window !== 'undefined' ) animation.setContext( window );
this.setAnimationLoop = function ( callback ) {
onAnimationFrameCallback = callback;
xr.setAnimationLoop( callback );
( callback === null ) ? animation.stop() : animation.start();
};
xr.addEventListener( 'sessionstart', onXRSessionStart );
xr.addEventListener( 'sessionend', onXRSessionEnd );
// Rendering
this.render = function ( scene, camera ) {
let renderTarget, forceClear;
if ( arguments[ 2 ] !== undefined ) {
console.warn( 'THREE.WebGLRenderer.render(): the renderTarget argument has been removed. Use .setRenderTarget() instead.' );
renderTarget = arguments[ 2 ];
}
if ( arguments[ 3 ] !== undefined ) {
console.warn( 'THREE.WebGLRenderer.render(): the forceClear argument has been removed. Use .clear() instead.' );
forceClear = arguments[ 3 ];
}
if ( camera !== undefined && camera.isCamera !== true ) {
console.error( 'THREE.WebGLRenderer.render: camera is not an instance of THREE.Camera.' );
return;
}
if ( _isContextLost === true ) return;
// update scene graph
if ( scene.autoUpdate === true ) scene.updateMatrixWorld();
// update camera matrices and frustum
if ( camera.parent === null ) camera.updateMatrixWorld();
if ( xr.enabled === true && xr.isPresenting === true ) {
camera = xr.getCamera( camera );
}
//
if ( scene.isScene === true ) scene.onBeforeRender( _this, scene, camera, renderTarget || _currentRenderTarget );
currentRenderState = renderStates.get( scene, renderStateStack.length );
currentRenderState.init();
renderStateStack.push( currentRenderState );
_projScreenMatrix.multiplyMatrices( camera.projectionMatrix, camera.matrixWorldInverse );
_frustum.setFromProjectionMatrix( _projScreenMatrix );
_localClippingEnabled = this.localClippingEnabled;
_clippingEnabled = clipping.init( this.clippingPlanes, _localClippingEnabled, camera );
currentRenderList = renderLists.get( scene, renderListStack.length );
currentRenderList.init();
renderListStack.push( currentRenderList );
projectObject( scene, camera, 0, _this.sortObjects );
currentRenderList.finish();
if ( _this.sortObjects === true ) {
currentRenderList.sort( _opaqueSort, _transparentSort );
}
//
if ( _clippingEnabled === true ) clipping.beginShadows();
const shadowsArray = currentRenderState.state.shadowsArray;
shadowMap.render( shadowsArray, scene, camera );
currentRenderState.setupLights();
currentRenderState.setupLightsView( camera );
if ( _clippingEnabled === true ) clipping.endShadows();
//
if ( this.info.autoReset === true ) this.info.reset();
if ( renderTarget !== undefined ) {
this.setRenderTarget( renderTarget );
}
//
background.render( currentRenderList, scene, camera, forceClear );
// render scene
const opaqueObjects = currentRenderList.opaque;
const transparentObjects = currentRenderList.transparent;
if ( opaqueObjects.length > 0 ) renderObjects( opaqueObjects, scene, camera );
if ( transparentObjects.length > 0 ) renderObjects( transparentObjects, scene, camera );
//
if ( _currentRenderTarget !== null ) {
// Generate mipmap if we're using any kind of mipmap filtering
textures.updateRenderTargetMipmap( _currentRenderTarget );
// resolve multisample renderbuffers to a single-sample texture if necessary
textures.updateMultisampleRenderTarget( _currentRenderTarget );
}
//
if ( scene.isScene === true ) scene.onAfterRender( _this, scene, camera );
// Ensure depth buffer writing is enabled so it can be cleared on next render
state.buffers.depth.setTest( true );
state.buffers.depth.setMask( true );
state.buffers.color.setMask( true );
state.setPolygonOffset( false );
// _gl.finish();
bindingStates.resetDefaultState();
_currentMaterialId = - 1;
_currentCamera = null;
renderStateStack.pop();
if ( renderStateStack.length > 0 ) {
currentRenderState = renderStateStack[ renderStateStack.length - 1 ];
} else {
currentRenderState = null;
}
renderListStack.pop();
if ( renderListStack.length > 0 ) {
currentRenderList = renderListStack[ renderListStack.length - 1 ];
} else {
currentRenderList = null;
}
};
function projectObject( object, camera, groupOrder, sortObjects ) {
if ( object.visible === false ) return;
const visible = object.layers.test( camera.layers );
if ( visible ) {
if ( object.isGroup ) {
groupOrder = object.renderOrder;
} else if ( object.isLOD ) {
if ( object.autoUpdate === true ) object.update( camera );
} else if ( object.isLight ) {
currentRenderState.pushLight( object );
if ( object.castShadow ) {
currentRenderState.pushShadow( object );
}
} else if ( object.isSprite ) {
if ( ! object.frustumCulled || _frustum.intersectsSprite( object ) ) {
if ( sortObjects ) {
_vector3.setFromMatrixPosition( object.matrixWorld )
.applyMatrix4( _projScreenMatrix );
}
const geometry = objects.update( object );
const material = object.material;
if ( material.visible ) {
currentRenderList.push( object, geometry, material, groupOrder, _vector3.z, null );
}
}
} else if ( object.isImmediateRenderObject ) {
if ( sortObjects ) {
_vector3.setFromMatrixPosition( object.matrixWorld )
.applyMatrix4( _projScreenMatrix );
}
currentRenderList.push( object, null, object.material, groupOrder, _vector3.z, null );
} else if ( object.isMesh || object.isLine || object.isPoints ) {
if ( object.isSkinnedMesh ) {
// update skeleton only once in a frame
if ( object.skeleton.frame !== info.render.frame ) {
object.skeleton.update();
object.skeleton.frame = info.render.frame;
}
}
if ( ! object.frustumCulled || _frustum.intersectsObject( object ) ) {
if ( sortObjects ) {
_vector3.setFromMatrixPosition( object.matrixWorld )
.applyMatrix4( _projScreenMatrix );
}
const geometry = objects.update( object );
const material = object.material;
if ( Array.isArray( material ) ) {
const groups = geometry.groups;
for ( let i = 0, l = groups.length; i < l; i ++ ) {
const group = groups[ i ];
const groupMaterial = material[ group.materialIndex ];
if ( groupMaterial && groupMaterial.visible ) {
currentRenderList.push( object, geometry, groupMaterial, groupOrder, _vector3.z, group );
}
}
} else if ( material.visible ) {
currentRenderList.push( object, geometry, material, groupOrder, _vector3.z, null );
}
}
}
}
const children = object.children;
for ( let i = 0, l = children.length; i < l; i ++ ) {
projectObject( children[ i ], camera, groupOrder, sortObjects );
}
}
function renderObjects( renderList, scene, camera ) {
const overrideMaterial = scene.isScene === true ? scene.overrideMaterial : null;
for ( let i = 0, l = renderList.length; i < l; i ++ ) {
const renderItem = renderList[ i ];
const object = renderItem.object;
const geometry = renderItem.geometry;
const material = overrideMaterial === null ? renderItem.material : overrideMaterial;
const group = renderItem.group;
if ( camera.isArrayCamera ) {
const cameras = camera.cameras;
for ( let j = 0, jl = cameras.length; j < jl; j ++ ) {
const camera2 = cameras[ j ];
if ( object.layers.test( camera2.layers ) ) {
state.viewport( _currentViewport.copy( camera2.viewport ) );
currentRenderState.setupLightsView( camera2 );
renderObject( object, scene, camera2, geometry, material, group );
}
}
} else {
renderObject( object, scene, camera, geometry, material, group );
}
}
}
function renderObject( object, scene, camera, geometry, material, group ) {
object.onBeforeRender( _this, scene, camera, geometry, material, group );
object.modelViewMatrix.multiplyMatrices( camera.matrixWorldInverse, object.matrixWorld );
object.normalMatrix.getNormalMatrix( object.modelViewMatrix );
if ( object.isImmediateRenderObject ) {
const program = setProgram( camera, scene, material, object );
state.setMaterial( material );
bindingStates.reset();
renderObjectImmediate( object, program );
} else {
_this.renderBufferDirect( camera, scene, geometry, material, object, group );
}
object.onAfterRender( _this, scene, camera, geometry, material, group );
}
function getProgram( material, scene, object ) {
if ( scene.isScene !== true ) scene = _emptyScene; // scene could be a Mesh, Line, Points, ...
const materialProperties = properties.get( material );
const lights = currentRenderState.state.lights;
const shadowsArray = currentRenderState.state.shadowsArray;
const lightsStateVersion = lights.state.version;
const parameters = programCache.getParameters( material, lights.state, shadowsArray, scene, object );
const programCacheKey = programCache.getProgramCacheKey( parameters );
let programs = materialProperties.programs;
// always update environment and fog - changing these trigger an getProgram call, but it's possible that the program doesn't change
materialProperties.environment = material.isMeshStandardMaterial ? scene.environment : null;
materialProperties.fog = scene.fog;
materialProperties.envMap = cubemaps.get( material.envMap || materialProperties.environment );
if ( programs === undefined ) {
// new material
material.addEventListener( 'dispose', onMaterialDispose );
programs = new Map();
materialProperties.programs = programs;
}
let program = programs.get( programCacheKey );
if ( program !== undefined ) {
// early out if program and light state is identical
if ( materialProperties.currentProgram === program && materialProperties.lightsStateVersion === lightsStateVersion ) {
updateCommonMaterialProperties( material, parameters );
return program;
}
} else {
parameters.uniforms = programCache.getUniforms( material );
material.onBuild( parameters, _this );
material.onBeforeCompile( parameters, _this );
program = programCache.acquireProgram( parameters, programCacheKey );
programs.set( programCacheKey, program );
materialProperties.uniforms = parameters.uniforms;
}
const uniforms = materialProperties.uniforms;
if ( ( ! material.isShaderMaterial && ! material.isRawShaderMaterial ) || material.clipping === true ) {
uniforms.clippingPlanes = clipping.uniform;
}
updateCommonMaterialProperties( material, parameters );
// store the light setup it was created for
materialProperties.needsLights = materialNeedsLights( material );
materialProperties.lightsStateVersion = lightsStateVersion;
if ( materialProperties.needsLights ) {
// wire up the material to this renderer's lighting state
uniforms.ambientLightColor.value = lights.state.ambient;
uniforms.lightProbe.value = lights.state.probe;
uniforms.directionalLights.value = lights.state.directional;
uniforms.directionalLightShadows.value = lights.state.directionalShadow;
uniforms.spotLights.value = lights.state.spot;
uniforms.spotLightShadows.value = lights.state.spotShadow;
uniforms.rectAreaLights.value = lights.state.rectArea;
uniforms.ltc_1.value = lights.state.rectAreaLTC1;
uniforms.ltc_2.value = lights.state.rectAreaLTC2;
uniforms.pointLights.value = lights.state.point;
uniforms.pointLightShadows.value = lights.state.pointShadow;
uniforms.hemisphereLights.value = lights.state.hemi;
uniforms.directionalShadowMap.value = lights.state.directionalShadowMap;
uniforms.directionalShadowMatrix.value = lights.state.directionalShadowMatrix;
uniforms.spotShadowMap.value = lights.state.spotShadowMap;
uniforms.spotShadowMatrix.value = lights.state.spotShadowMatrix;
uniforms.pointShadowMap.value = lights.state.pointShadowMap;
uniforms.pointShadowMatrix.value = lights.state.pointShadowMatrix;
// TODO (abelnation): add area lights shadow info to uniforms
}
const progUniforms = program.getUniforms();
const uniformsList = WebGLUniforms.seqWithValue( progUniforms.seq, uniforms );
materialProperties.currentProgram = program;
materialProperties.uniformsList = uniformsList;
return program;
}
function updateCommonMaterialProperties( material, parameters ) {
const materialProperties = properties.get( material );
materialProperties.outputEncoding = parameters.outputEncoding;
materialProperties.instancing = parameters.instancing;
materialProperties.numClippingPlanes = parameters.numClippingPlanes;
materialProperties.numIntersection = parameters.numClipIntersection;
materialProperties.vertexAlphas = parameters.vertexAlphas;
}
function setProgram( camera, scene, material, object ) {
if ( scene.isScene !== true ) scene = _emptyScene; // scene could be a Mesh, Line, Points, ...
textures.resetTextureUnits();
const fog = scene.fog;
const environment = material.isMeshStandardMaterial ? scene.environment : null;
const encoding = ( _currentRenderTarget === null ) ? _this.outputEncoding : _currentRenderTarget.texture.encoding;
const envMap = cubemaps.get( material.envMap || environment );
const vertexAlphas = material.vertexColors === true && object.geometry && object.geometry.attributes.color && object.geometry.attributes.color.itemSize === 4;
const materialProperties = properties.get( material );
const lights = currentRenderState.state.lights;
if ( _clippingEnabled === true ) {
if ( _localClippingEnabled === true || camera !== _currentCamera ) {
const useCache =
camera === _currentCamera &&
material.id === _currentMaterialId;
// we might want to call this function with some ClippingGroup
// object instead of the material, once it becomes feasible
// (#8465, #8379)
clipping.setState( material, camera, useCache );
}
}
//
let needsProgramChange = false;
if ( material.version === materialProperties.__version ) {
if ( materialProperties.needsLights && ( materialProperties.lightsStateVersion !== lights.state.version ) ) {
needsProgramChange = true;
} else if ( materialProperties.outputEncoding !== encoding ) {
needsProgramChange = true;
} else if ( object.isInstancedMesh && materialProperties.instancing === false ) {
needsProgramChange = true;
} else if ( ! object.isInstancedMesh && materialProperties.instancing === true ) {
needsProgramChange = true;
} else if ( materialProperties.envMap !== envMap ) {
needsProgramChange = true;
} else if ( material.fog && materialProperties.fog !== fog ) {
needsProgramChange = true;
} else if ( materialProperties.numClippingPlanes !== undefined &&
( materialProperties.numClippingPlanes !== clipping.numPlanes ||
materialProperties.numIntersection !== clipping.numIntersection ) ) {
needsProgramChange = true;
} else if ( materialProperties.vertexAlphas !== vertexAlphas ) {
needsProgramChange = true;
}
} else {
needsProgramChange = true;
materialProperties.__version = material.version;
}
//
let program = materialProperties.currentProgram;
if ( needsProgramChange === true ) {
program = getProgram( material, scene, object );
}
let refreshProgram = false;
let refreshMaterial = false;
let refreshLights = false;
const p_uniforms = program.getUniforms(),
m_uniforms = materialProperties.uniforms;
if ( state.useProgram( program.program ) ) {
refreshProgram = true;
refreshMaterial = true;
refreshLights = true;
}
if ( material.id !== _currentMaterialId ) {
_currentMaterialId = material.id;
refreshMaterial = true;
}
if ( refreshProgram || _currentCamera !== camera ) {
p_uniforms.setValue( _gl, 'projectionMatrix', camera.projectionMatrix );
if ( capabilities.logarithmicDepthBuffer ) {
p_uniforms.setValue( _gl, 'logDepthBufFC',
2.0 / ( Math.log( camera.far + 1.0 ) / Math.LN2 ) );
}
if ( _currentCamera !== camera ) {
_currentCamera = camera;
// lighting uniforms depend on the camera so enforce an update
// now, in case this material supports lights - or later, when
// the next material that does gets activated:
refreshMaterial = true; // set to true on material change
refreshLights = true; // remains set until update done
}
// load material specific uniforms
// (shader material also gets them for the sake of genericity)
if ( material.isShaderMaterial ||
material.isMeshPhongMaterial ||
material.isMeshToonMaterial ||
material.isMeshStandardMaterial ||
material.envMap ) {
const uCamPos = p_uniforms.map.cameraPosition;
if ( uCamPos !== undefined ) {
uCamPos.setValue( _gl,
_vector3.setFromMatrixPosition( camera.matrixWorld ) );
}
}
if ( material.isMeshPhongMaterial ||
material.isMeshToonMaterial ||
material.isMeshLambertMaterial ||
material.isMeshBasicMaterial ||
material.isMeshStandardMaterial ||
material.isShaderMaterial ) {
p_uniforms.setValue( _gl, 'isOrthographic', camera.isOrthographicCamera === true );
}
if ( material.isMeshPhongMaterial ||
material.isMeshToonMaterial ||
material.isMeshLambertMaterial ||
material.isMeshBasicMaterial ||
material.isMeshStandardMaterial ||
material.isShaderMaterial ||
material.isShadowMaterial ||
material.skinning ) {
p_uniforms.setValue( _gl, 'viewMatrix', camera.matrixWorldInverse );
}
}
// skinning uniforms must be set even if material didn't change
// auto-setting of texture unit for bone texture must go before other textures
// otherwise textures used for skinning can take over texture units reserved for other material textures
if ( material.skinning ) {
p_uniforms.setOptional( _gl, object, 'bindMatrix' );
p_uniforms.setOptional( _gl, object, 'bindMatrixInverse' );
const skeleton = object.skeleton;
if ( skeleton ) {
const bones = skeleton.bones;
if ( capabilities.floatVertexTextures ) {
if ( skeleton.boneTexture === null ) {
// layout (1 matrix = 4 pixels)
// RGBA RGBA RGBA RGBA (=> column1, column2, column3, column4)
// with 8x8 pixel texture max 16 bones * 4 pixels = (8 * 8)
// 16x16 pixel texture max 64 bones * 4 pixels = (16 * 16)
// 32x32 pixel texture max 256 bones * 4 pixels = (32 * 32)
// 64x64 pixel texture max 1024 bones * 4 pixels = (64 * 64)
let size = Math.sqrt( bones.length * 4 ); // 4 pixels needed for 1 matrix
size = ceilPowerOfTwo( size );
size = Math.max( size, 4 );
const boneMatrices = new Float32Array( size * size * 4 ); // 4 floats per RGBA pixel
boneMatrices.set( skeleton.boneMatrices ); // copy current values
const boneTexture = new DataTexture( boneMatrices, size, size, RGBAFormat, FloatType );
skeleton.boneMatrices = boneMatrices;
skeleton.boneTexture = boneTexture;
skeleton.boneTextureSize = size;
}
p_uniforms.setValue( _gl, 'boneTexture', skeleton.boneTexture, textures );
p_uniforms.setValue( _gl, 'boneTextureSize', skeleton.boneTextureSize );
} else {
p_uniforms.setOptional( _gl, skeleton, 'boneMatrices' );
}
}
}
if ( refreshMaterial || materialProperties.receiveShadow !== object.receiveShadow ) {
materialProperties.receiveShadow = object.receiveShadow;
p_uniforms.setValue( _gl, 'receiveShadow', object.receiveShadow );
}
if ( refreshMaterial ) {
p_uniforms.setValue( _gl, 'toneMappingExposure', _this.toneMappingExposure );
if ( materialProperties.needsLights ) {
// the current material requires lighting info
// note: all lighting uniforms are always set correctly
// they simply reference the renderer's state for their
// values
//
// use the current material's .needsUpdate flags to set
// the GL state when required
markUniformsLightsNeedsUpdate( m_uniforms, refreshLights );
}
// refresh uniforms common to several materials
if ( fog && material.fog ) {
materials.refreshFogUniforms( m_uniforms, fog );
}
materials.refreshMaterialUniforms( m_uniforms, material, _pixelRatio, _height );
WebGLUniforms.upload( _gl, materialProperties.uniformsList, m_uniforms, textures );
}
if ( material.isShaderMaterial && material.uniformsNeedUpdate === true ) {
WebGLUniforms.upload( _gl, materialProperties.uniformsList, m_uniforms, textures );
material.uniformsNeedUpdate = false;
}
if ( material.isSpriteMaterial ) {
p_uniforms.setValue( _gl, 'center', object.center );
}
// common matrices
p_uniforms.setValue( _gl, 'modelViewMatrix', object.modelViewMatrix );
p_uniforms.setValue( _gl, 'normalMatrix', object.normalMatrix );
p_uniforms.setValue( _gl, 'modelMatrix', object.matrixWorld );
return program;
}
// If uniforms are marked as clean, they don't need to be loaded to the GPU.
function markUniformsLightsNeedsUpdate( uniforms, value ) {
uniforms.ambientLightColor.needsUpdate = value;
uniforms.lightProbe.needsUpdate = value;
uniforms.directionalLights.needsUpdate = value;
uniforms.directionalLightShadows.needsUpdate = value;
uniforms.pointLights.needsUpdate = value;
uniforms.pointLightShadows.needsUpdate = value;
uniforms.spotLights.needsUpdate = value;
uniforms.spotLightShadows.needsUpdate = value;
uniforms.rectAreaLights.needsUpdate = value;
uniforms.hemisphereLights.needsUpdate = value;
}
function materialNeedsLights( material ) {
return material.isMeshLambertMaterial || material.isMeshToonMaterial || material.isMeshPhongMaterial ||
material.isMeshStandardMaterial || material.isShadowMaterial ||
( material.isShaderMaterial && material.lights === true );
}
this.getActiveCubeFace = function () {
return _currentActiveCubeFace;
};
this.getActiveMipmapLevel = function () {
return _currentActiveMipmapLevel;
};
this.getRenderTarget = function () {
return _currentRenderTarget;
};
this.setRenderTarget = function ( renderTarget, activeCubeFace = 0, activeMipmapLevel = 0 ) {
_currentRenderTarget = renderTarget;
_currentActiveCubeFace = activeCubeFace;
_currentActiveMipmapLevel = activeMipmapLevel;
if ( renderTarget && properties.get( renderTarget ).__webglFramebuffer === undefined ) {
textures.setupRenderTarget( renderTarget );
}
let framebuffer = null;
let isCube = false;
let isRenderTarget3D = false;
if ( renderTarget ) {
const texture = renderTarget.texture;
if ( texture.isDataTexture3D || texture.isDataTexture2DArray ) {
isRenderTarget3D = true;
}
const __webglFramebuffer = properties.get( renderTarget ).__webglFramebuffer;
if ( renderTarget.isWebGLCubeRenderTarget ) {
framebuffer = __webglFramebuffer[ activeCubeFace ];
isCube = true;
} else if ( renderTarget.isWebGLMultisampleRenderTarget ) {
framebuffer = properties.get( renderTarget ).__webglMultisampledFramebuffer;
} else {
framebuffer = __webglFramebuffer;
}
_currentViewport.copy( renderTarget.viewport );
_currentScissor.copy( renderTarget.scissor );
_currentScissorTest = renderTarget.scissorTest;
} else {
_currentViewport.copy( _viewport ).multiplyScalar( _pixelRatio ).floor();
_currentScissor.copy( _scissor ).multiplyScalar( _pixelRatio ).floor();
_currentScissorTest = _scissorTest;
}
state.bindFramebuffer( 36160, framebuffer );
state.viewport( _currentViewport );
state.scissor( _currentScissor );
state.setScissorTest( _currentScissorTest );
if ( isCube ) {
const textureProperties = properties.get( renderTarget.texture );
_gl.framebufferTexture2D( 36160, 36064, 34069 + activeCubeFace, textureProperties.__webglTexture, activeMipmapLevel );
} else if ( isRenderTarget3D ) {
const textureProperties = properties.get( renderTarget.texture );
const layer = activeCubeFace || 0;
_gl.framebufferTextureLayer( 36160, 36064, textureProperties.__webglTexture, activeMipmapLevel || 0, layer );
}
};
this.readRenderTargetPixels = function ( renderTarget, x, y, width, height, buffer, activeCubeFaceIndex ) {
if ( ! ( renderTarget && renderTarget.isWebGLRenderTarget ) ) {
console.error( 'THREE.WebGLRenderer.readRenderTargetPixels: renderTarget is not THREE.WebGLRenderTarget.' );
return;
}
let framebuffer = properties.get( renderTarget ).__webglFramebuffer;
if ( renderTarget.isWebGLCubeRenderTarget && activeCubeFaceIndex !== undefined ) {
framebuffer = framebuffer[ activeCubeFaceIndex ];
}
if ( framebuffer ) {
state.bindFramebuffer( 36160, framebuffer );
try {
const texture = renderTarget.texture;
const textureFormat = texture.format;
const textureType = texture.type;
if ( textureFormat !== RGBAFormat && utils.convert( textureFormat ) !== _gl.getParameter( 35739 ) ) {
console.error( 'THREE.WebGLRenderer.readRenderTargetPixels: renderTarget is not in RGBA or implementation defined format.' );
return;
}
const halfFloatSupportedByExt = ( textureType === HalfFloatType ) && ( extensions.has( 'EXT_color_buffer_half_float' ) || ( capabilities.isWebGL2 && extensions.has( 'EXT_color_buffer_float' ) ) );
if ( textureType !== UnsignedByteType && utils.convert( textureType ) !== _gl.getParameter( 35738 ) && // Edge and Chrome Mac < 52 (#9513)
! ( textureType === FloatType && ( capabilities.isWebGL2 || extensions.has( 'OES_texture_float' ) || extensions.has( 'WEBGL_color_buffer_float' ) ) ) && // Chrome Mac >= 52 and Firefox
! halfFloatSupportedByExt ) {
console.error( 'THREE.WebGLRenderer.readRenderTargetPixels: renderTarget is not in UnsignedByteType or implementation defined type.' );
return;
}
if ( _gl.checkFramebufferStatus( 36160 ) === 36053 ) {
// the following if statement ensures valid read requests (no out-of-bounds pixels, see #8604)
if ( ( x >= 0 && x <= ( renderTarget.width - width ) ) && ( y >= 0 && y <= ( renderTarget.height - height ) ) ) {
_gl.readPixels( x, y, width, height, utils.convert( textureFormat ), utils.convert( textureType ), buffer );
}
} else {
console.error( 'THREE.WebGLRenderer.readRenderTargetPixels: readPixels from renderTarget failed. Framebuffer not complete.' );
}
} finally {
// restore framebuffer of current render target if necessary
const framebuffer = ( _currentRenderTarget !== null ) ? properties.get( _currentRenderTarget ).__webglFramebuffer : null;
state.bindFramebuffer( 36160, framebuffer );
}
}
};
this.copyFramebufferToTexture = function ( position, texture, level = 0 ) {
const levelScale = Math.pow( 2, - level );
const width = Math.floor( texture.image.width * levelScale );
const height = Math.floor( texture.image.height * levelScale );
const glFormat = utils.convert( texture.format );
textures.setTexture2D( texture, 0 );
_gl.copyTexImage2D( 3553, level, glFormat, position.x, position.y, width, height, 0 );
state.unbindTexture();
};
this.copyTextureToTexture = function ( position, srcTexture, dstTexture, level = 0 ) {
const width = srcTexture.image.width;
const height = srcTexture.image.height;
const glFormat = utils.convert( dstTexture.format );
const glType = utils.convert( dstTexture.type );
textures.setTexture2D( dstTexture, 0 );
// As another texture upload may have changed pixelStorei
// parameters, make sure they are correct for the dstTexture
_gl.pixelStorei( 37440, dstTexture.flipY );
_gl.pixelStorei( 37441, dstTexture.premultiplyAlpha );
_gl.pixelStorei( 3317, dstTexture.unpackAlignment );
if ( srcTexture.isDataTexture ) {
_gl.texSubImage2D( 3553, level, position.x, position.y, width, height, glFormat, glType, srcTexture.image.data );
} else {
if ( srcTexture.isCompressedTexture ) {
_gl.compressedTexSubImage2D( 3553, level, position.x, position.y, srcTexture.mipmaps[ 0 ].width, srcTexture.mipmaps[ 0 ].height, glFormat, srcTexture.mipmaps[ 0 ].data );
} else {
_gl.texSubImage2D( 3553, level, position.x, position.y, glFormat, glType, srcTexture.image );
}
}
// Generate mipmaps only when copying level 0
if ( level === 0 && dstTexture.generateMipmaps ) _gl.generateMipmap( 3553 );
state.unbindTexture();
};
this.copyTextureToTexture3D = function ( sourceBox, position, srcTexture, dstTexture, level = 0 ) {
if ( _this.isWebGL1Renderer ) {
console.warn( 'THREE.WebGLRenderer.copyTextureToTexture3D: can only be used with WebGL2.' );
return;
}
const { width, height, data } = srcTexture.image;
const glFormat = utils.convert( dstTexture.format );
const glType = utils.convert( dstTexture.type );
let glTarget;
if ( dstTexture.isDataTexture3D ) {
textures.setTexture3D( dstTexture, 0 );
glTarget = 32879;
} else if ( dstTexture.isDataTexture2DArray ) {
textures.setTexture2DArray( dstTexture, 0 );
glTarget = 35866;
} else {
console.warn( 'THREE.WebGLRenderer.copyTextureToTexture3D: only supports THREE.DataTexture3D and THREE.DataTexture2DArray.' );
return;
}
_gl.pixelStorei( 37440, dstTexture.flipY );
_gl.pixelStorei( 37441, dstTexture.premultiplyAlpha );
_gl.pixelStorei( 3317, dstTexture.unpackAlignment );
const unpackRowLen = _gl.getParameter( 3314 );
const unpackImageHeight = _gl.getParameter( 32878 );
const unpackSkipPixels = _gl.getParameter( 3316 );
const unpackSkipRows = _gl.getParameter( 3315 );
const unpackSkipImages = _gl.getParameter( 32877 );
_gl.pixelStorei( 3314, width );
_gl.pixelStorei( 32878, height );
_gl.pixelStorei( 3316, sourceBox.min.x );
_gl.pixelStorei( 3315, sourceBox.min.y );
_gl.pixelStorei( 32877, sourceBox.min.z );
_gl.texSubImage3D(
glTarget,
level,
position.x,
position.y,
position.z,
sourceBox.max.x - sourceBox.min.x + 1,
sourceBox.max.y - sourceBox.min.y + 1,
sourceBox.max.z - sourceBox.min.z + 1,
glFormat,
glType,
data
);
_gl.pixelStorei( 3314, unpackRowLen );
_gl.pixelStorei( 32878, unpackImageHeight );
_gl.pixelStorei( 3316, unpackSkipPixels );
_gl.pixelStorei( 3315, unpackSkipRows );
_gl.pixelStorei( 32877, unpackSkipImages );
// Generate mipmaps only when copying level 0
if ( level === 0 && dstTexture.generateMipmaps ) _gl.generateMipmap( glTarget );
state.unbindTexture();
};
this.initTexture = function ( texture ) {
textures.setTexture2D( texture, 0 );
state.unbindTexture();
};
this.resetState = function () {
_currentActiveCubeFace = 0;
_currentActiveMipmapLevel = 0;
_currentRenderTarget = null;
state.reset();
bindingStates.reset();
};
if ( typeof __THREE_DEVTOOLS__ !== 'undefined' ) {
__THREE_DEVTOOLS__.dispatchEvent( new CustomEvent( 'observe', { detail: this } ) ); // eslint-disable-line no-undef
}
}
class WebGL1Renderer extends WebGLRenderer {}
WebGL1Renderer.prototype.isWebGL1Renderer = true;
class Scene extends Object3D {
constructor() {
super();
this.type = 'Scene';
this.background = null;
this.environment = null;
this.fog = null;
this.overrideMaterial = null;
this.autoUpdate = true; // checked by the renderer
if ( typeof __THREE_DEVTOOLS__ !== 'undefined' ) {
__THREE_DEVTOOLS__.dispatchEvent( new CustomEvent( 'observe', { detail: this } ) ); // eslint-disable-line no-undef
}
}
copy( source, recursive ) {
super.copy( source, recursive );
if ( source.background !== null ) this.background = source.background.clone();
if ( source.environment !== null ) this.environment = source.environment.clone();
if ( source.fog !== null ) this.fog = source.fog.clone();
if ( source.overrideMaterial !== null ) this.overrideMaterial = source.overrideMaterial.clone();
this.autoUpdate = source.autoUpdate;
this.matrixAutoUpdate = source.matrixAutoUpdate;
return this;
}
toJSON( meta ) {
const data = super.toJSON( meta );
if ( this.background !== null ) data.object.background = this.background.toJSON( meta );
if ( this.environment !== null ) data.object.environment = this.environment.toJSON( meta );
if ( this.fog !== null ) data.object.fog = this.fog.toJSON();
return data;
}
}
Scene.prototype.isScene = true;
class InterleavedBuffer {
constructor( array, stride ) {
this.array = array;
this.stride = stride;
this.count = array !== undefined ? array.length / stride : 0;
this.usage = StaticDrawUsage;
this.updateRange = { offset: 0, count: - 1 };
this.version = 0;
this.uuid = generateUUID();
this.onUploadCallback = function () {};
}
set needsUpdate( value ) {
if ( value === true ) this.version ++;
}
setUsage( value ) {
this.usage = value;
return this;
}
copy( source ) {
this.array = new source.array.constructor( source.array );
this.count = source.count;
this.stride = source.stride;
this.usage = source.usage;
return this;
}
copyAt( index1, attribute, index2 ) {
index1 *= this.stride;
index2 *= attribute.stride;
for ( let i = 0, l = this.stride; i < l; i ++ ) {
this.array[ index1 + i ] = attribute.array[ index2 + i ];
}
return this;
}
set( value, offset = 0 ) {
this.array.set( value, offset );
return this;
}
clone( data ) {
if ( data.arrayBuffers === undefined ) {
data.arrayBuffers = {};
}
if ( this.array.buffer._uuid === undefined ) {
this.array.buffer._uuid = generateUUID();
}
if ( data.arrayBuffers[ this.array.buffer._uuid ] === undefined ) {
data.arrayBuffers[ this.array.buffer._uuid ] = this.array.slice( 0 ).buffer;
}
const array = new this.array.constructor( data.arrayBuffers[ this.array.buffer._uuid ] );
const ib = new InterleavedBuffer( array, this.stride );
ib.setUsage( this.usage );
return ib;
}
onUpload( callback ) {
this.onUploadCallback = callback;
return this;
}
toJSON( data ) {
if ( data.arrayBuffers === undefined ) {
data.arrayBuffers = {};
}
// generate UUID for array buffer if necessary
if ( this.array.buffer._uuid === undefined ) {
this.array.buffer._uuid = generateUUID();
}
if ( data.arrayBuffers[ this.array.buffer._uuid ] === undefined ) {
data.arrayBuffers[ this.array.buffer._uuid ] = Array.prototype.slice.call( new Uint32Array( this.array.buffer ) );
}
//
return {
uuid: this.uuid,
buffer: this.array.buffer._uuid,
type: this.array.constructor.name,
stride: this.stride
};
}
}
InterleavedBuffer.prototype.isInterleavedBuffer = true;
const _vector$6 = new /*@__PURE__*/ Vector3();
class InterleavedBufferAttribute {
constructor( interleavedBuffer, itemSize, offset, normalized ) {
this.name = '';
this.data = interleavedBuffer;
this.itemSize = itemSize;
this.offset = offset;
this.normalized = normalized === true;
}
get count() {
return this.data.count;
}
get array() {
return this.data.array;
}
set needsUpdate( value ) {
this.data.needsUpdate = value;
}
applyMatrix4( m ) {
for ( let i = 0, l = this.data.count; i < l; i ++ ) {
_vector$6.x = this.getX( i );
_vector$6.y = this.getY( i );
_vector$6.z = this.getZ( i );
_vector$6.applyMatrix4( m );
this.setXYZ( i, _vector$6.x, _vector$6.y, _vector$6.z );
}
return this;
}
applyNormalMatrix( m ) {
for ( let i = 0, l = this.count; i < l; i ++ ) {
_vector$6.x = this.getX( i );
_vector$6.y = this.getY( i );
_vector$6.z = this.getZ( i );
_vector$6.applyNormalMatrix( m );
this.setXYZ( i, _vector$6.x, _vector$6.y, _vector$6.z );
}
return this;
}
transformDirection( m ) {
for ( let i = 0, l = this.count; i < l; i ++ ) {
_vector$6.x = this.getX( i );
_vector$6.y = this.getY( i );
_vector$6.z = this.getZ( i );
_vector$6.transformDirection( m );
this.setXYZ( i, _vector$6.x, _vector$6.y, _vector$6.z );
}
return this;
}
setX( index, x ) {
this.data.array[ index * this.data.stride + this.offset ] = x;
return this;
}
setY( index, y ) {
this.data.array[ index * this.data.stride + this.offset + 1 ] = y;
return this;
}
setZ( index, z ) {
this.data.array[ index * this.data.stride + this.offset + 2 ] = z;
return this;
}
setW( index, w ) {
this.data.array[ index * this.data.stride + this.offset + 3 ] = w;
return this;
}
getX( index ) {
return this.data.array[ index * this.data.stride + this.offset ];
}
getY( index ) {
return this.data.array[ index * this.data.stride + this.offset + 1 ];
}
getZ( index ) {
return this.data.array[ index * this.data.stride + this.offset + 2 ];
}
getW( index ) {
return this.data.array[ index * this.data.stride + this.offset + 3 ];
}
setXY( index, x, y ) {
index = index * this.data.stride + this.offset;
this.data.array[ index + 0 ] = x;
this.data.array[ index + 1 ] = y;
return this;
}
setXYZ( index, x, y, z ) {
index = index * this.data.stride + this.offset;
this.data.array[ index + 0 ] = x;
this.data.array[ index + 1 ] = y;
this.data.array[ index + 2 ] = z;
return this;
}
setXYZW( index, x, y, z, w ) {
index = index * this.data.stride + this.offset;
this.data.array[ index + 0 ] = x;
this.data.array[ index + 1 ] = y;
this.data.array[ index + 2 ] = z;
this.data.array[ index + 3 ] = w;
return this;
}
clone( data ) {
if ( data === undefined ) {
console.log( 'THREE.InterleavedBufferAttribute.clone(): Cloning an interlaved buffer attribute will deinterleave buffer data.' );
const array = [];
for ( let i = 0; i < this.count; i ++ ) {
const index = i * this.data.stride + this.offset;
for ( let j = 0; j < this.itemSize; j ++ ) {
array.push( this.data.array[ index + j ] );
}
}
return new BufferAttribute( new this.array.constructor( array ), this.itemSize, this.normalized );
} else {
if ( data.interleavedBuffers === undefined ) {
data.interleavedBuffers = {};
}
if ( data.interleavedBuffers[ this.data.uuid ] === undefined ) {
data.interleavedBuffers[ this.data.uuid ] = this.data.clone( data );
}
return new InterleavedBufferAttribute( data.interleavedBuffers[ this.data.uuid ], this.itemSize, this.offset, this.normalized );
}
}
toJSON( data ) {
if ( data === undefined ) {
console.log( 'THREE.InterleavedBufferAttribute.toJSON(): Serializing an interlaved buffer attribute will deinterleave buffer data.' );
const array = [];
for ( let i = 0; i < this.count; i ++ ) {
const index = i * this.data.stride + this.offset;
for ( let j = 0; j < this.itemSize; j ++ ) {
array.push( this.data.array[ index + j ] );
}
}
// deinterleave data and save it as an ordinary buffer attribute for now
return {
itemSize: this.itemSize,
type: this.array.constructor.name,
array: array,
normalized: this.normalized
};
} else {
// save as true interlaved attribtue
if ( data.interleavedBuffers === undefined ) {
data.interleavedBuffers = {};
}
if ( data.interleavedBuffers[ this.data.uuid ] === undefined ) {
data.interleavedBuffers[ this.data.uuid ] = this.data.toJSON( data );
}
return {
isInterleavedBufferAttribute: true,
itemSize: this.itemSize,
data: this.data.uuid,
offset: this.offset,
normalized: this.normalized
};
}
}
}
InterleavedBufferAttribute.prototype.isInterleavedBufferAttribute = true;
const _basePosition = /*@__PURE__*/ new Vector3();
const _skinIndex = /*@__PURE__*/ new Vector4();
const _skinWeight = /*@__PURE__*/ new Vector4();
const _vector$5 = /*@__PURE__*/ new Vector3();
const _matrix = /*@__PURE__*/ new Matrix4();
class SkinnedMesh extends Mesh {
constructor( geometry, material ) {
super( geometry, material );
this.type = 'SkinnedMesh';
this.bindMode = 'attached';
this.bindMatrix = new Matrix4();
this.bindMatrixInverse = new Matrix4();
}
copy( source ) {
super.copy( source );
this.bindMode = source.bindMode;
this.bindMatrix.copy( source.bindMatrix );
this.bindMatrixInverse.copy( source.bindMatrixInverse );
this.skeleton = source.skeleton;
return this;
}
bind( skeleton, bindMatrix ) {
this.skeleton = skeleton;
if ( bindMatrix === undefined ) {
this.updateMatrixWorld( true );
this.skeleton.calculateInverses();
bindMatrix = this.matrixWorld;
}
this.bindMatrix.copy( bindMatrix );
this.bindMatrixInverse.copy( bindMatrix ).invert();
}
pose() {
this.skeleton.pose();
}
normalizeSkinWeights() {
const vector = new Vector4();
const skinWeight = this.geometry.attributes.skinWeight;
for ( let i = 0, l = skinWeight.count; i < l; i ++ ) {
vector.x = skinWeight.getX( i );
vector.y = skinWeight.getY( i );
vector.z = skinWeight.getZ( i );
vector.w = skinWeight.getW( i );
const scale = 1.0 / vector.manhattanLength();
if ( scale !== Infinity ) {
vector.multiplyScalar( scale );
} else {
vector.set( 1, 0, 0, 0 ); // do something reasonable
}
skinWeight.setXYZW( i, vector.x, vector.y, vector.z, vector.w );
}
}
updateMatrixWorld( force ) {
super.updateMatrixWorld( force );
if ( this.bindMode === 'attached' ) {
this.bindMatrixInverse.copy( this.matrixWorld ).invert();
} else if ( this.bindMode === 'detached' ) {
this.bindMatrixInverse.copy( this.bindMatrix ).invert();
} else {
console.warn( 'THREE.SkinnedMesh: Unrecognized bindMode: ' + this.bindMode );
}
}
boneTransform( index, target ) {
const skeleton = this.skeleton;
const geometry = this.geometry;
_skinIndex.fromBufferAttribute( geometry.attributes.skinIndex, index );
_skinWeight.fromBufferAttribute( geometry.attributes.skinWeight, index );
_basePosition.fromBufferAttribute( geometry.attributes.position, index ).applyMatrix4( this.bindMatrix );
target.set( 0, 0, 0 );
for ( let i = 0; i < 4; i ++ ) {
const weight = _skinWeight.getComponent( i );
if ( weight !== 0 ) {
const boneIndex = _skinIndex.getComponent( i );
_matrix.multiplyMatrices( skeleton.bones[ boneIndex ].matrixWorld, skeleton.boneInverses[ boneIndex ] );
target.addScaledVector( _vector$5.copy( _basePosition ).applyMatrix4( _matrix ), weight );
}
}
return target.applyMatrix4( this.bindMatrixInverse );
}
}
SkinnedMesh.prototype.isSkinnedMesh = true;
class Bone extends Object3D {
constructor() {
super();
this.type = 'Bone';
}
}
Bone.prototype.isBone = true;
const _offsetMatrix = /*@__PURE__*/ new Matrix4();
const _identityMatrix = /*@__PURE__*/ new Matrix4();
class Skeleton {
constructor( bones = [], boneInverses = [] ) {
this.uuid = generateUUID();
this.bones = bones.slice( 0 );
this.boneInverses = boneInverses;
this.boneMatrices = null;
this.boneTexture = null;
this.boneTextureSize = 0;
this.frame = - 1;
this.init();
}
init() {
const bones = this.bones;
const boneInverses = this.boneInverses;
this.boneMatrices = new Float32Array( bones.length * 16 );
// calculate inverse bone matrices if necessary
if ( boneInverses.length === 0 ) {
this.calculateInverses();
} else {
// handle special case
if ( bones.length !== boneInverses.length ) {
console.warn( 'THREE.Skeleton: Number of inverse bone matrices does not match amount of bones.' );
this.boneInverses = [];
for ( let i = 0, il = this.bones.length; i < il; i ++ ) {
this.boneInverses.push( new Matrix4() );
}
}
}
}
calculateInverses() {
this.boneInverses.length = 0;
for ( let i = 0, il = this.bones.length; i < il; i ++ ) {
const inverse = new Matrix4();
if ( this.bones[ i ] ) {
inverse.copy( this.bones[ i ].matrixWorld ).invert();
}
this.boneInverses.push( inverse );
}
}
pose() {
// recover the bind-time world matrices
for ( let i = 0, il = this.bones.length; i < il; i ++ ) {
const bone = this.bones[ i ];
if ( bone ) {
bone.matrixWorld.copy( this.boneInverses[ i ] ).invert();
}
}
// compute the local matrices, positions, rotations and scales
for ( let i = 0, il = this.bones.length; i < il; i ++ ) {
const bone = this.bones[ i ];
if ( bone ) {
if ( bone.parent && bone.parent.isBone ) {
bone.matrix.copy( bone.parent.matrixWorld ).invert();
bone.matrix.multiply( bone.matrixWorld );
} else {
bone.matrix.copy( bone.matrixWorld );
}
bone.matrix.decompose( bone.position, bone.quaternion, bone.scale );
}
}
}
update() {
const bones = this.bones;
const boneInverses = this.boneInverses;
const boneMatrices = this.boneMatrices;
const boneTexture = this.boneTexture;
// flatten bone matrices to array
for ( let i = 0, il = bones.length; i < il; i ++ ) {
// compute the offset between the current and the original transform
const matrix = bones[ i ] ? bones[ i ].matrixWorld : _identityMatrix;
_offsetMatrix.multiplyMatrices( matrix, boneInverses[ i ] );
_offsetMatrix.toArray( boneMatrices, i * 16 );
}
if ( boneTexture !== null ) {
boneTexture.needsUpdate = true;
}
}
clone() {
return new Skeleton( this.bones, this.boneInverses );
}
getBoneByName( name ) {
for ( let i = 0, il = this.bones.length; i < il; i ++ ) {
const bone = this.bones[ i ];
if ( bone.name === name ) {
return bone;
}
}
return undefined;
}
dispose( ) {
if ( this.boneTexture !== null ) {
this.boneTexture.dispose();
this.boneTexture = null;
}
}
fromJSON( json, bones ) {
this.uuid = json.uuid;
for ( let i = 0, l = json.bones.length; i < l; i ++ ) {
const uuid = json.bones[ i ];
let bone = bones[ uuid ];
if ( bone === undefined ) {
console.warn( 'THREE.Skeleton: No bone found with UUID:', uuid );
bone = new Bone();
}
this.bones.push( bone );
this.boneInverses.push( new Matrix4().fromArray( json.boneInverses[ i ] ) );
}
this.init();
return this;
}
toJSON() {
const data = {
metadata: {
version: 4.5,
type: 'Skeleton',
generator: 'Skeleton.toJSON'
},
bones: [],
boneInverses: []
};
data.uuid = this.uuid;
const bones = this.bones;
const boneInverses = this.boneInverses;
for ( let i = 0, l = bones.length; i < l; i ++ ) {
const bone = bones[ i ];
data.bones.push( bone.uuid );
const boneInverse = boneInverses[ i ];
data.boneInverses.push( boneInverse.toArray() );
}
return data;
}
}
/**
* parameters = {
* color: <hex>,
* opacity: <float>,
*
* linewidth: <float>,
* linecap: "round",
* linejoin: "round"
* }
*/
class LineBasicMaterial extends Material$1 {
constructor( parameters ) {
super();
this.type = 'LineBasicMaterial';
this.color = new Color( 0xffffff );
this.linewidth = 1;
this.linecap = 'round';
this.linejoin = 'round';
this.morphTargets = false;
this.setValues( parameters );
}
copy( source ) {
super.copy( source );
this.color.copy( source.color );
this.linewidth = source.linewidth;
this.linecap = source.linecap;
this.linejoin = source.linejoin;
this.morphTargets = source.morphTargets;
return this;
}
}
LineBasicMaterial.prototype.isLineBasicMaterial = true;
const _start$1 = /*@__PURE__*/ new Vector3();
const _end$1 = /*@__PURE__*/ new Vector3();
const _inverseMatrix$1 = /*@__PURE__*/ new Matrix4();
const _ray$1 = /*@__PURE__*/ new Ray();
const _sphere$1 = /*@__PURE__*/ new Sphere();
class Line extends Object3D {
constructor( geometry = new BufferGeometry(), material = new LineBasicMaterial() ) {
super();
this.type = 'Line';
this.geometry = geometry;
this.material = material;
this.updateMorphTargets();
}
copy( source ) {
super.copy( source );
this.material = source.material;
this.geometry = source.geometry;
return this;
}
computeLineDistances() {
const geometry = this.geometry;
if ( geometry.isBufferGeometry ) {
// we assume non-indexed geometry
if ( geometry.index === null ) {
const positionAttribute = geometry.attributes.position;
const lineDistances = [ 0 ];
for ( let i = 1, l = positionAttribute.count; i < l; i ++ ) {
_start$1.fromBufferAttribute( positionAttribute, i - 1 );
_end$1.fromBufferAttribute( positionAttribute, i );
lineDistances[ i ] = lineDistances[ i - 1 ];
lineDistances[ i ] += _start$1.distanceTo( _end$1 );
}
geometry.setAttribute( 'lineDistance', new Float32BufferAttribute( lineDistances, 1 ) );
} else {
console.warn( 'THREE.Line.computeLineDistances(): Computation only possible with non-indexed BufferGeometry.' );
}
} else if ( geometry.isGeometry ) {
console.error( 'THREE.Line.computeLineDistances() no longer supports THREE.Geometry. Use THREE.BufferGeometry instead.' );
}
return this;
}
raycast( raycaster, intersects ) {
const geometry = this.geometry;
const matrixWorld = this.matrixWorld;
const threshold = raycaster.params.Line.threshold;
const drawRange = geometry.drawRange;
// Checking boundingSphere distance to ray
if ( geometry.boundingSphere === null ) geometry.computeBoundingSphere();
_sphere$1.copy( geometry.boundingSphere );
_sphere$1.applyMatrix4( matrixWorld );
_sphere$1.radius += threshold;
if ( raycaster.ray.intersectsSphere( _sphere$1 ) === false ) return;
//
_inverseMatrix$1.copy( matrixWorld ).invert();
_ray$1.copy( raycaster.ray ).applyMatrix4( _inverseMatrix$1 );
const localThreshold = threshold / ( ( this.scale.x + this.scale.y + this.scale.z ) / 3 );
const localThresholdSq = localThreshold * localThreshold;
const vStart = new Vector3();
const vEnd = new Vector3();
const interSegment = new Vector3();
const interRay = new Vector3();
const step = this.isLineSegments ? 2 : 1;
if ( geometry.isBufferGeometry ) {
const index = geometry.index;
const attributes = geometry.attributes;
const positionAttribute = attributes.position;
if ( index !== null ) {
const start = Math.max( 0, drawRange.start );
const end = Math.min( index.count, ( drawRange.start + drawRange.count ) );
for ( let i = start, l = end - 1; i < l; i += step ) {
const a = index.getX( i );
const b = index.getX( i + 1 );
vStart.fromBufferAttribute( positionAttribute, a );
vEnd.fromBufferAttribute( positionAttribute, b );
const distSq = _ray$1.distanceSqToSegment( vStart, vEnd, interRay, interSegment );
if ( distSq > localThresholdSq ) continue;
interRay.applyMatrix4( this.matrixWorld ); //Move back to world space for distance calculation
const distance = raycaster.ray.origin.distanceTo( interRay );
if ( distance < raycaster.near || distance > raycaster.far ) continue;
intersects.push( {
distance: distance,
// What do we want? intersection point on the ray or on the segment??
// point: raycaster.ray.at( distance ),
point: interSegment.clone().applyMatrix4( this.matrixWorld ),
index: i,
face: null,
faceIndex: null,
object: this
} );
}
} else {
const start = Math.max( 0, drawRange.start );
const end = Math.min( positionAttribute.count, ( drawRange.start + drawRange.count ) );
for ( let i = start, l = end - 1; i < l; i += step ) {
vStart.fromBufferAttribute( positionAttribute, i );
vEnd.fromBufferAttribute( positionAttribute, i + 1 );
const distSq = _ray$1.distanceSqToSegment( vStart, vEnd, interRay, interSegment );
if ( distSq > localThresholdSq ) continue;
interRay.applyMatrix4( this.matrixWorld ); //Move back to world space for distance calculation
const distance = raycaster.ray.origin.distanceTo( interRay );
if ( distance < raycaster.near || distance > raycaster.far ) continue;
intersects.push( {
distance: distance,
// What do we want? intersection point on the ray or on the segment??
// point: raycaster.ray.at( distance ),
point: interSegment.clone().applyMatrix4( this.matrixWorld ),
index: i,
face: null,
faceIndex: null,
object: this
} );
}
}
} else if ( geometry.isGeometry ) {
console.error( 'THREE.Line.raycast() no longer supports THREE.Geometry. Use THREE.BufferGeometry instead.' );
}
}
updateMorphTargets() {
const geometry = this.geometry;
if ( geometry.isBufferGeometry ) {
const morphAttributes = geometry.morphAttributes;
const keys = Object.keys( morphAttributes );
if ( keys.length > 0 ) {
const morphAttribute = morphAttributes[ keys[ 0 ] ];
if ( morphAttribute !== undefined ) {
this.morphTargetInfluences = [];
this.morphTargetDictionary = {};
for ( let m = 0, ml = morphAttribute.length; m < ml; m ++ ) {
const name = morphAttribute[ m ].name || String( m );
this.morphTargetInfluences.push( 0 );
this.morphTargetDictionary[ name ] = m;
}
}
}
} else {
const morphTargets = geometry.morphTargets;
if ( morphTargets !== undefined && morphTargets.length > 0 ) {
console.error( 'THREE.Line.updateMorphTargets() does not support THREE.Geometry. Use THREE.BufferGeometry instead.' );
}
}
}
}
Line.prototype.isLine = true;
const _start = /*@__PURE__*/ new Vector3();
const _end = /*@__PURE__*/ new Vector3();
class LineSegments extends Line {
constructor( geometry, material ) {
super( geometry, material );
this.type = 'LineSegments';
}
computeLineDistances() {
const geometry = this.geometry;
if ( geometry.isBufferGeometry ) {
// we assume non-indexed geometry
if ( geometry.index === null ) {
const positionAttribute = geometry.attributes.position;
const lineDistances = [];
for ( let i = 0, l = positionAttribute.count; i < l; i += 2 ) {
_start.fromBufferAttribute( positionAttribute, i );
_end.fromBufferAttribute( positionAttribute, i + 1 );
lineDistances[ i ] = ( i === 0 ) ? 0 : lineDistances[ i - 1 ];
lineDistances[ i + 1 ] = lineDistances[ i ] + _start.distanceTo( _end );
}
geometry.setAttribute( 'lineDistance', new Float32BufferAttribute( lineDistances, 1 ) );
} else {
console.warn( 'THREE.LineSegments.computeLineDistances(): Computation only possible with non-indexed BufferGeometry.' );
}
} else if ( geometry.isGeometry ) {
console.error( 'THREE.LineSegments.computeLineDistances() no longer supports THREE.Geometry. Use THREE.BufferGeometry instead.' );
}
return this;
}
}
LineSegments.prototype.isLineSegments = true;
class LineLoop extends Line {
constructor( geometry, material ) {
super( geometry, material );
this.type = 'LineLoop';
}
}
LineLoop.prototype.isLineLoop = true;
/**
* parameters = {
* color: <hex>,
* opacity: <float>,
* map: new THREE.Texture( <Image> ),
* alphaMap: new THREE.Texture( <Image> ),
*
* size: <float>,
* sizeAttenuation: <bool>
*
* morphTargets: <bool>
* }
*/
class PointsMaterial extends Material$1 {
constructor( parameters ) {
super();
this.type = 'PointsMaterial';
this.color = new Color( 0xffffff );
this.map = null;
this.alphaMap = null;
this.size = 1;
this.sizeAttenuation = true;
this.morphTargets = false;
this.setValues( parameters );
}
copy( source ) {
super.copy( source );
this.color.copy( source.color );
this.map = source.map;
this.alphaMap = source.alphaMap;
this.size = source.size;
this.sizeAttenuation = source.sizeAttenuation;
this.morphTargets = source.morphTargets;
return this;
}
}
PointsMaterial.prototype.isPointsMaterial = true;
const _inverseMatrix = /*@__PURE__*/ new Matrix4();
const _ray = /*@__PURE__*/ new Ray();
const _sphere = /*@__PURE__*/ new Sphere();
const _position$2 = /*@__PURE__*/ new Vector3();
class Points extends Object3D {
constructor( geometry = new BufferGeometry(), material = new PointsMaterial() ) {
super();
this.type = 'Points';
this.geometry = geometry;
this.material = material;
this.updateMorphTargets();
}
copy( source ) {
super.copy( source );
this.material = source.material;
this.geometry = source.geometry;
return this;
}
raycast( raycaster, intersects ) {
const geometry = this.geometry;
const matrixWorld = this.matrixWorld;
const threshold = raycaster.params.Points.threshold;
const drawRange = geometry.drawRange;
// Checking boundingSphere distance to ray
if ( geometry.boundingSphere === null ) geometry.computeBoundingSphere();
_sphere.copy( geometry.boundingSphere );
_sphere.applyMatrix4( matrixWorld );
_sphere.radius += threshold;
if ( raycaster.ray.intersectsSphere( _sphere ) === false ) return;
//
_inverseMatrix.copy( matrixWorld ).invert();
_ray.copy( raycaster.ray ).applyMatrix4( _inverseMatrix );
const localThreshold = threshold / ( ( this.scale.x + this.scale.y + this.scale.z ) / 3 );
const localThresholdSq = localThreshold * localThreshold;
if ( geometry.isBufferGeometry ) {
const index = geometry.index;
const attributes = geometry.attributes;
const positionAttribute = attributes.position;
if ( index !== null ) {
const start = Math.max( 0, drawRange.start );
const end = Math.min( index.count, ( drawRange.start + drawRange.count ) );
for ( let i = start, il = end; i < il; i ++ ) {
const a = index.getX( i );
_position$2.fromBufferAttribute( positionAttribute, a );
testPoint( _position$2, a, localThresholdSq, matrixWorld, raycaster, intersects, this );
}
} else {
const start = Math.max( 0, drawRange.start );
const end = Math.min( positionAttribute.count, ( drawRange.start + drawRange.count ) );
for ( let i = start, l = end; i < l; i ++ ) {
_position$2.fromBufferAttribute( positionAttribute, i );
testPoint( _position$2, i, localThresholdSq, matrixWorld, raycaster, intersects, this );
}
}
} else {
console.error( 'THREE.Points.raycast() no longer supports THREE.Geometry. Use THREE.BufferGeometry instead.' );
}
}
updateMorphTargets() {
const geometry = this.geometry;
if ( geometry.isBufferGeometry ) {
const morphAttributes = geometry.morphAttributes;
const keys = Object.keys( morphAttributes );
if ( keys.length > 0 ) {
const morphAttribute = morphAttributes[ keys[ 0 ] ];
if ( morphAttribute !== undefined ) {
this.morphTargetInfluences = [];
this.morphTargetDictionary = {};
for ( let m = 0, ml = morphAttribute.length; m < ml; m ++ ) {
const name = morphAttribute[ m ].name || String( m );
this.morphTargetInfluences.push( 0 );
this.morphTargetDictionary[ name ] = m;
}
}
}
} else {
const morphTargets = geometry.morphTargets;
if ( morphTargets !== undefined && morphTargets.length > 0 ) {
console.error( 'THREE.Points.updateMorphTargets() does not support THREE.Geometry. Use THREE.BufferGeometry instead.' );
}
}
}
}
Points.prototype.isPoints = true;
function testPoint( point, index, localThresholdSq, matrixWorld, raycaster, intersects, object ) {
const rayPointDistanceSq = _ray.distanceSqToPoint( point );
if ( rayPointDistanceSq < localThresholdSq ) {
const intersectPoint = new Vector3();
_ray.closestPointToPoint( point, intersectPoint );
intersectPoint.applyMatrix4( matrixWorld );
const distance = raycaster.ray.origin.distanceTo( intersectPoint );
if ( distance < raycaster.near || distance > raycaster.far ) return;
intersects.push( {
distance: distance,
distanceToRay: Math.sqrt( rayPointDistanceSq ),
point: intersectPoint,
index: index,
face: null,
object: object
} );
}
}
class CompressedTexture extends Texture$1 {
constructor( mipmaps, width, height, format, type, mapping, wrapS, wrapT, magFilter, minFilter, anisotropy, encoding ) {
super( null, mapping, wrapS, wrapT, magFilter, minFilter, format, type, anisotropy, encoding );
this.image = { width: width, height: height };
this.mipmaps = mipmaps;
// no flipping for cube textures
// (also flipping doesn't work for compressed textures )
this.flipY = false;
// can't generate mipmaps for compressed textures
// mips must be embedded in DDS files
this.generateMipmaps = false;
}
}
CompressedTexture.prototype.isCompressedTexture = true;
class CanvasTexture extends Texture$1 {
constructor( canvas, mapping, wrapS, wrapT, magFilter, minFilter, format, type, anisotropy ) {
super( canvas, mapping, wrapS, wrapT, magFilter, minFilter, format, type, anisotropy );
this.needsUpdate = true;
}
}
CanvasTexture.prototype.isCanvasTexture = true;
/**
* parameters = {
* color: <THREE.Color>
* }
*/
class ShadowMaterial extends Material$1 {
constructor( parameters ) {
super();
this.type = 'ShadowMaterial';
this.color = new Color( 0x000000 );
this.transparent = true;
this.setValues( parameters );
}
copy( source ) {
super.copy( source );
this.color.copy( source.color );
return this;
}
}
ShadowMaterial.prototype.isShadowMaterial = true;
class RawShaderMaterial extends ShaderMaterial {
constructor( parameters ) {
super( parameters );
this.type = 'RawShaderMaterial';
}
}
RawShaderMaterial.prototype.isRawShaderMaterial = true;
/**
* parameters = {
* color: <hex>,
* roughness: <float>,
* metalness: <float>,
* opacity: <float>,
*
* map: new THREE.Texture( <Image> ),
*
* lightMap: new THREE.Texture( <Image> ),
* lightMapIntensity: <float>
*
* aoMap: new THREE.Texture( <Image> ),
* aoMapIntensity: <float>
*
* emissive: <hex>,
* emissiveIntensity: <float>
* emissiveMap: new THREE.Texture( <Image> ),
*
* bumpMap: new THREE.Texture( <Image> ),
* bumpScale: <float>,
*
* normalMap: new THREE.Texture( <Image> ),
* normalMapType: THREE.TangentSpaceNormalMap,
* normalScale: <Vector2>,
*
* displacementMap: new THREE.Texture( <Image> ),
* displacementScale: <float>,
* displacementBias: <float>,
*
* roughnessMap: new THREE.Texture( <Image> ),
*
* metalnessMap: new THREE.Texture( <Image> ),
*
* alphaMap: new THREE.Texture( <Image> ),
*
* envMap: new THREE.CubeTexture( [posx, negx, posy, negy, posz, negz] ),
* envMapIntensity: <float>
*
* refractionRatio: <float>,
*
* wireframe: <boolean>,
* wireframeLinewidth: <float>,
*
* skinning: <bool>,
* morphTargets: <bool>,
* morphNormals: <bool>,
*
* flatShading: <bool>
* }
*/
class MeshStandardMaterial extends Material$1 {
constructor( parameters ) {
super();
this.defines = { 'STANDARD': '' };
this.type = 'MeshStandardMaterial';
this.color = new Color( 0xffffff ); // diffuse
this.roughness = 1.0;
this.metalness = 0.0;
this.map = null;
this.lightMap = null;
this.lightMapIntensity = 1.0;
this.aoMap = null;
this.aoMapIntensity = 1.0;
this.emissive = new Color( 0x000000 );
this.emissiveIntensity = 1.0;
this.emissiveMap = null;
this.bumpMap = null;
this.bumpScale = 1;
this.normalMap = null;
this.normalMapType = TangentSpaceNormalMap;
this.normalScale = new Vector2( 1, 1 );
this.displacementMap = null;
this.displacementScale = 1;
this.displacementBias = 0;
this.roughnessMap = null;
this.metalnessMap = null;
this.alphaMap = null;
this.envMap = null;
this.envMapIntensity = 1.0;
this.refractionRatio = 0.98;
this.wireframe = false;
this.wireframeLinewidth = 1;
this.wireframeLinecap = 'round';
this.wireframeLinejoin = 'round';
this.skinning = false;
this.morphTargets = false;
this.morphNormals = false;
this.flatShading = false;
this.vertexTangents = false;
this.setValues( parameters );
}
copy( source ) {
super.copy( source );
this.defines = { 'STANDARD': '' };
this.color.copy( source.color );
this.roughness = source.roughness;
this.metalness = source.metalness;
this.map = source.map;
this.lightMap = source.lightMap;
this.lightMapIntensity = source.lightMapIntensity;
this.aoMap = source.aoMap;
this.aoMapIntensity = source.aoMapIntensity;
this.emissive.copy( source.emissive );
this.emissiveMap = source.emissiveMap;
this.emissiveIntensity = source.emissiveIntensity;
this.bumpMap = source.bumpMap;
this.bumpScale = source.bumpScale;
this.normalMap = source.normalMap;
this.normalMapType = source.normalMapType;
this.normalScale.copy( source.normalScale );
this.displacementMap = source.displacementMap;
this.displacementScale = source.displacementScale;
this.displacementBias = source.displacementBias;
this.roughnessMap = source.roughnessMap;
this.metalnessMap = source.metalnessMap;
this.alphaMap = source.alphaMap;
this.envMap = source.envMap;
this.envMapIntensity = source.envMapIntensity;
this.refractionRatio = source.refractionRatio;
this.wireframe = source.wireframe;
this.wireframeLinewidth = source.wireframeLinewidth;
this.wireframeLinecap = source.wireframeLinecap;
this.wireframeLinejoin = source.wireframeLinejoin;
this.skinning = source.skinning;
this.morphTargets = source.morphTargets;
this.morphNormals = source.morphNormals;
this.flatShading = source.flatShading;
this.vertexTangents = source.vertexTangents;
return this;
}
}
MeshStandardMaterial.prototype.isMeshStandardMaterial = true;
/**
* parameters = {
* clearcoat: <float>,
* clearcoatMap: new THREE.Texture( <Image> ),
* clearcoatRoughness: <float>,
* clearcoatRoughnessMap: new THREE.Texture( <Image> ),
* clearcoatNormalScale: <Vector2>,
* clearcoatNormalMap: new THREE.Texture( <Image> ),
*
* reflectivity: <float>,
* ior: <float>,
*
* sheen: <Color>,
*
* transmission: <float>,
* transmissionMap: new THREE.Texture( <Image> )
* }
*/
class MeshPhysicalMaterial extends MeshStandardMaterial {
constructor( parameters ) {
super();
this.defines = {
'STANDARD': '',
'PHYSICAL': ''
};
this.type = 'MeshPhysicalMaterial';
this.clearcoat = 0.0;
this.clearcoatMap = null;
this.clearcoatRoughness = 0.0;
this.clearcoatRoughnessMap = null;
this.clearcoatNormalScale = new Vector2( 1, 1 );
this.clearcoatNormalMap = null;
this.reflectivity = 0.5; // maps to F0 = 0.04
Object.defineProperty( this, 'ior', {
get: function () {
return ( 1 + 0.4 * this.reflectivity ) / ( 1 - 0.4 * this.reflectivity );
},
set: function ( ior ) {
this.reflectivity = clamp$1( 2.5 * ( ior - 1 ) / ( ior + 1 ), 0, 1 );
}
} );
this.sheen = null; // null will disable sheen bsdf
this.transmission = 0.0;
this.transmissionMap = null;
this.setValues( parameters );
}
copy( source ) {
super.copy( source );
this.defines = {
'STANDARD': '',
'PHYSICAL': ''
};
this.clearcoat = source.clearcoat;
this.clearcoatMap = source.clearcoatMap;
this.clearcoatRoughness = source.clearcoatRoughness;
this.clearcoatRoughnessMap = source.clearcoatRoughnessMap;
this.clearcoatNormalMap = source.clearcoatNormalMap;
this.clearcoatNormalScale.copy( source.clearcoatNormalScale );
this.reflectivity = source.reflectivity;
if ( source.sheen ) {
this.sheen = ( this.sheen || new Color() ).copy( source.sheen );
} else {
this.sheen = null;
}
this.transmission = source.transmission;
this.transmissionMap = source.transmissionMap;
return this;
}
}
MeshPhysicalMaterial.prototype.isMeshPhysicalMaterial = true;
const AnimationUtils = {
// same as Array.prototype.slice, but also works on typed arrays
arraySlice: function ( array, from, to ) {
if ( AnimationUtils.isTypedArray( array ) ) {
// in ios9 array.subarray(from, undefined) will return empty array
// but array.subarray(from) or array.subarray(from, len) is correct
return new array.constructor( array.subarray( from, to !== undefined ? to : array.length ) );
}
return array.slice( from, to );
},
// converts an array to a specific type
convertArray: function ( array, type, forceClone ) {
if ( ! array || // let 'undefined' and 'null' pass
! forceClone && array.constructor === type ) return array;
if ( typeof type.BYTES_PER_ELEMENT === 'number' ) {
return new type( array ); // create typed array
}
return Array.prototype.slice.call( array ); // create Array
},
isTypedArray: function ( object ) {
return ArrayBuffer.isView( object ) &&
! ( object instanceof DataView );
},
// returns an array by which times and values can be sorted
getKeyframeOrder: function ( times ) {
function compareTime( i, j ) {
return times[ i ] - times[ j ];
}
const n = times.length;
const result = new Array( n );
for ( let i = 0; i !== n; ++ i ) result[ i ] = i;
result.sort( compareTime );
return result;
},
// uses the array previously returned by 'getKeyframeOrder' to sort data
sortedArray: function ( values, stride, order ) {
const nValues = values.length;
const result = new values.constructor( nValues );
for ( let i = 0, dstOffset = 0; dstOffset !== nValues; ++ i ) {
const srcOffset = order[ i ] * stride;
for ( let j = 0; j !== stride; ++ j ) {
result[ dstOffset ++ ] = values[ srcOffset + j ];
}
}
return result;
},
// function for parsing AOS keyframe formats
flattenJSON: function ( jsonKeys, times, values, valuePropertyName ) {
let i = 1, key = jsonKeys[ 0 ];
while ( key !== undefined && key[ valuePropertyName ] === undefined ) {
key = jsonKeys[ i ++ ];
}
if ( key === undefined ) return; // no data
let value = key[ valuePropertyName ];
if ( value === undefined ) return; // no data
if ( Array.isArray( value ) ) {
do {
value = key[ valuePropertyName ];
if ( value !== undefined ) {
times.push( key.time );
values.push.apply( values, value ); // push all elements
}
key = jsonKeys[ i ++ ];
} while ( key !== undefined );
} else if ( value.toArray !== undefined ) {
// ...assume THREE.Math-ish
do {
value = key[ valuePropertyName ];
if ( value !== undefined ) {
times.push( key.time );
value.toArray( values, values.length );
}
key = jsonKeys[ i ++ ];
} while ( key !== undefined );
} else {
// otherwise push as-is
do {
value = key[ valuePropertyName ];
if ( value !== undefined ) {
times.push( key.time );
values.push( value );
}
key = jsonKeys[ i ++ ];
} while ( key !== undefined );
}
},
subclip: function ( sourceClip, name, startFrame, endFrame, fps = 30 ) {
const clip = sourceClip.clone();
clip.name = name;
const tracks = [];
for ( let i = 0; i < clip.tracks.length; ++ i ) {
const track = clip.tracks[ i ];
const valueSize = track.getValueSize();
const times = [];
const values = [];
for ( let j = 0; j < track.times.length; ++ j ) {
const frame = track.times[ j ] * fps;
if ( frame < startFrame || frame >= endFrame ) continue;
times.push( track.times[ j ] );
for ( let k = 0; k < valueSize; ++ k ) {
values.push( track.values[ j * valueSize + k ] );
}
}
if ( times.length === 0 ) continue;
track.times = AnimationUtils.convertArray( times, track.times.constructor );
track.values = AnimationUtils.convertArray( values, track.values.constructor );
tracks.push( track );
}
clip.tracks = tracks;
// find minimum .times value across all tracks in the trimmed clip
let minStartTime = Infinity;
for ( let i = 0; i < clip.tracks.length; ++ i ) {
if ( minStartTime > clip.tracks[ i ].times[ 0 ] ) {
minStartTime = clip.tracks[ i ].times[ 0 ];
}
}
// shift all tracks such that clip begins at t=0
for ( let i = 0; i < clip.tracks.length; ++ i ) {
clip.tracks[ i ].shift( - 1 * minStartTime );
}
clip.resetDuration();
return clip;
},
makeClipAdditive: function ( targetClip, referenceFrame = 0, referenceClip = targetClip, fps = 30 ) {
if ( fps <= 0 ) fps = 30;
const numTracks = referenceClip.tracks.length;
const referenceTime = referenceFrame / fps;
// Make each track's values relative to the values at the reference frame
for ( let i = 0; i < numTracks; ++ i ) {
const referenceTrack = referenceClip.tracks[ i ];
const referenceTrackType = referenceTrack.ValueTypeName;
// Skip this track if it's non-numeric
if ( referenceTrackType === 'bool' || referenceTrackType === 'string' ) continue;
// Find the track in the target clip whose name and type matches the reference track
const targetTrack = targetClip.tracks.find( function ( track ) {
return track.name === referenceTrack.name
&& track.ValueTypeName === referenceTrackType;
} );
if ( targetTrack === undefined ) continue;
let referenceOffset = 0;
const referenceValueSize = referenceTrack.getValueSize();
if ( referenceTrack.createInterpolant.isInterpolantFactoryMethodGLTFCubicSpline ) {
referenceOffset = referenceValueSize / 3;
}
let targetOffset = 0;
const targetValueSize = targetTrack.getValueSize();
if ( targetTrack.createInterpolant.isInterpolantFactoryMethodGLTFCubicSpline ) {
targetOffset = targetValueSize / 3;
}
const lastIndex = referenceTrack.times.length - 1;
let referenceValue;
// Find the value to subtract out of the track
if ( referenceTime <= referenceTrack.times[ 0 ] ) {
// Reference frame is earlier than the first keyframe, so just use the first keyframe
const startIndex = referenceOffset;
const endIndex = referenceValueSize - referenceOffset;
referenceValue = AnimationUtils.arraySlice( referenceTrack.values, startIndex, endIndex );
} else if ( referenceTime >= referenceTrack.times[ lastIndex ] ) {
// Reference frame is after the last keyframe, so just use the last keyframe
const startIndex = lastIndex * referenceValueSize + referenceOffset;
const endIndex = startIndex + referenceValueSize - referenceOffset;
referenceValue = AnimationUtils.arraySlice( referenceTrack.values, startIndex, endIndex );
} else {
// Interpolate to the reference value
const interpolant = referenceTrack.createInterpolant();
const startIndex = referenceOffset;
const endIndex = referenceValueSize - referenceOffset;
interpolant.evaluate( referenceTime );
referenceValue = AnimationUtils.arraySlice( interpolant.resultBuffer, startIndex, endIndex );
}
// Conjugate the quaternion
if ( referenceTrackType === 'quaternion' ) {
const referenceQuat = new Quaternion().fromArray( referenceValue ).normalize().conjugate();
referenceQuat.toArray( referenceValue );
}
// Subtract the reference value from all of the track values
const numTimes = targetTrack.times.length;
for ( let j = 0; j < numTimes; ++ j ) {
const valueStart = j * targetValueSize + targetOffset;
if ( referenceTrackType === 'quaternion' ) {
// Multiply the conjugate for quaternion track types
Quaternion.multiplyQuaternionsFlat(
targetTrack.values,
valueStart,
referenceValue,
0,
targetTrack.values,
valueStart
);
} else {
const valueEnd = targetValueSize - targetOffset * 2;
// Subtract each value for all other numeric track types
for ( let k = 0; k < valueEnd; ++ k ) {
targetTrack.values[ valueStart + k ] -= referenceValue[ k ];
}
}
}
}
targetClip.blendMode = AdditiveAnimationBlendMode;
return targetClip;
}
};
/**
* Abstract base class of interpolants over parametric samples.
*
* The parameter domain is one dimensional, typically the time or a path
* along a curve defined by the data.
*
* The sample values can have any dimensionality and derived classes may
* apply special interpretations to the data.
*
* This class provides the interval seek in a Template Method, deferring
* the actual interpolation to derived classes.
*
* Time complexity is O(1) for linear access crossing at most two points
* and O(log N) for random access, where N is the number of positions.
*
* References:
*
* http://www.oodesign.com/template-method-pattern.html
*
*/
class Interpolant {
constructor( parameterPositions, sampleValues, sampleSize, resultBuffer ) {
this.parameterPositions = parameterPositions;
this._cachedIndex = 0;
this.resultBuffer = resultBuffer !== undefined ?
resultBuffer : new sampleValues.constructor( sampleSize );
this.sampleValues = sampleValues;
this.valueSize = sampleSize;
this.settings = null;
this.DefaultSettings_ = {};
}
evaluate( t ) {
const pp = this.parameterPositions;
let i1 = this._cachedIndex,
t1 = pp[ i1 ],
t0 = pp[ i1 - 1 ];
validate_interval: {
seek: {
let right;
linear_scan: {
//- See http://jsperf.com/comparison-to-undefined/3
//- slower code:
//-
//- if ( t >= t1 || t1 === undefined ) {
forward_scan: if ( ! ( t < t1 ) ) {
for ( let giveUpAt = i1 + 2; ; ) {
if ( t1 === undefined ) {
if ( t < t0 ) break forward_scan;
// after end
i1 = pp.length;
this._cachedIndex = i1;
return this.afterEnd_( i1 - 1, t, t0 );
}
if ( i1 === giveUpAt ) break; // this loop
t0 = t1;
t1 = pp[ ++ i1 ];
if ( t < t1 ) {
// we have arrived at the sought interval
break seek;
}
}
// prepare binary search on the right side of the index
right = pp.length;
break linear_scan;
}
//- slower code:
//- if ( t < t0 || t0 === undefined ) {
if ( ! ( t >= t0 ) ) {
// looping?
const t1global = pp[ 1 ];
if ( t < t1global ) {
i1 = 2; // + 1, using the scan for the details
t0 = t1global;
}
// linear reverse scan
for ( let giveUpAt = i1 - 2; ; ) {
if ( t0 === undefined ) {
// before start
this._cachedIndex = 0;
return this.beforeStart_( 0, t, t1 );
}
if ( i1 === giveUpAt ) break; // this loop
t1 = t0;
t0 = pp[ -- i1 - 1 ];
if ( t >= t0 ) {
// we have arrived at the sought interval
break seek;
}
}
// prepare binary search on the left side of the index
right = i1;
i1 = 0;
break linear_scan;
}
// the interval is valid
break validate_interval;
} // linear scan
// binary search
while ( i1 < right ) {
const mid = ( i1 + right ) >>> 1;
if ( t < pp[ mid ] ) {
right = mid;
} else {
i1 = mid + 1;
}
}
t1 = pp[ i1 ];
t0 = pp[ i1 - 1 ];
// check boundary cases, again
if ( t0 === undefined ) {
this._cachedIndex = 0;
return this.beforeStart_( 0, t, t1 );
}
if ( t1 === undefined ) {
i1 = pp.length;
this._cachedIndex = i1;
return this.afterEnd_( i1 - 1, t0, t );
}
} // seek
this._cachedIndex = i1;
this.intervalChanged_( i1, t0, t1 );
} // validate_interval
return this.interpolate_( i1, t0, t, t1 );
}
getSettings_() {
return this.settings || this.DefaultSettings_;
}
copySampleValue_( index ) {
// copies a sample value to the result buffer
const result = this.resultBuffer,
values = this.sampleValues,
stride = this.valueSize,
offset = index * stride;
for ( let i = 0; i !== stride; ++ i ) {
result[ i ] = values[ offset + i ];
}
return result;
}
// Template methods for derived classes:
interpolate_( /* i1, t0, t, t1 */ ) {
throw new Error( 'call to abstract method' );
// implementations shall return this.resultBuffer
}
intervalChanged_( /* i1, t0, t1 */ ) {
// empty
}
}
// ALIAS DEFINITIONS
Interpolant.prototype.beforeStart_ = Interpolant.prototype.copySampleValue_;
Interpolant.prototype.afterEnd_ = Interpolant.prototype.copySampleValue_;
/**
* Fast and simple cubic spline interpolant.
*
* It was derived from a Hermitian construction setting the first derivative
* at each sample position to the linear slope between neighboring positions
* over their parameter interval.
*/
class CubicInterpolant extends Interpolant {
constructor( parameterPositions, sampleValues, sampleSize, resultBuffer ) {
super( parameterPositions, sampleValues, sampleSize, resultBuffer );
this._weightPrev = - 0;
this._offsetPrev = - 0;
this._weightNext = - 0;
this._offsetNext = - 0;
this.DefaultSettings_ = {
endingStart: ZeroCurvatureEnding,
endingEnd: ZeroCurvatureEnding
};
}
intervalChanged_( i1, t0, t1 ) {
const pp = this.parameterPositions;
let iPrev = i1 - 2,
iNext = i1 + 1,
tPrev = pp[ iPrev ],
tNext = pp[ iNext ];
if ( tPrev === undefined ) {
switch ( this.getSettings_().endingStart ) {
case ZeroSlopeEnding:
// f'(t0) = 0
iPrev = i1;
tPrev = 2 * t0 - t1;
break;
case WrapAroundEnding:
// use the other end of the curve
iPrev = pp.length - 2;
tPrev = t0 + pp[ iPrev ] - pp[ iPrev + 1 ];
break;
default: // ZeroCurvatureEnding
// f''(t0) = 0 a.k.a. Natural Spline
iPrev = i1;
tPrev = t1;
}
}
if ( tNext === undefined ) {
switch ( this.getSettings_().endingEnd ) {
case ZeroSlopeEnding:
// f'(tN) = 0
iNext = i1;
tNext = 2 * t1 - t0;
break;
case WrapAroundEnding:
// use the other end of the curve
iNext = 1;
tNext = t1 + pp[ 1 ] - pp[ 0 ];
break;
default: // ZeroCurvatureEnding
// f''(tN) = 0, a.k.a. Natural Spline
iNext = i1 - 1;
tNext = t0;
}
}
const halfDt = ( t1 - t0 ) * 0.5,
stride = this.valueSize;
this._weightPrev = halfDt / ( t0 - tPrev );
this._weightNext = halfDt / ( tNext - t1 );
this._offsetPrev = iPrev * stride;
this._offsetNext = iNext * stride;
}
interpolate_( i1, t0, t, t1 ) {
const result = this.resultBuffer,
values = this.sampleValues,
stride = this.valueSize,
o1 = i1 * stride, o0 = o1 - stride,
oP = this._offsetPrev, oN = this._offsetNext,
wP = this._weightPrev, wN = this._weightNext,
p = ( t - t0 ) / ( t1 - t0 ),
pp = p * p,
ppp = pp * p;
// evaluate polynomials
const sP = - wP * ppp + 2 * wP * pp - wP * p;
const s0 = ( 1 + wP ) * ppp + ( - 1.5 - 2 * wP ) * pp + ( - 0.5 + wP ) * p + 1;
const s1 = ( - 1 - wN ) * ppp + ( 1.5 + wN ) * pp + 0.5 * p;
const sN = wN * ppp - wN * pp;
// combine data linearly
for ( let i = 0; i !== stride; ++ i ) {
result[ i ] =
sP * values[ oP + i ] +
s0 * values[ o0 + i ] +
s1 * values[ o1 + i ] +
sN * values[ oN + i ];
}
return result;
}
}
class LinearInterpolant extends Interpolant {
constructor( parameterPositions, sampleValues, sampleSize, resultBuffer ) {
super( parameterPositions, sampleValues, sampleSize, resultBuffer );
}
interpolate_( i1, t0, t, t1 ) {
const result = this.resultBuffer,
values = this.sampleValues,
stride = this.valueSize,
offset1 = i1 * stride,
offset0 = offset1 - stride,
weight1 = ( t - t0 ) / ( t1 - t0 ),
weight0 = 1 - weight1;
for ( let i = 0; i !== stride; ++ i ) {
result[ i ] =
values[ offset0 + i ] * weight0 +
values[ offset1 + i ] * weight1;
}
return result;
}
}
/**
*
* Interpolant that evaluates to the sample value at the position preceeding
* the parameter.
*/
class DiscreteInterpolant extends Interpolant {
constructor( parameterPositions, sampleValues, sampleSize, resultBuffer ) {
super( parameterPositions, sampleValues, sampleSize, resultBuffer );
}
interpolate_( i1 /*, t0, t, t1 */ ) {
return this.copySampleValue_( i1 - 1 );
}
}
class KeyframeTrack {
constructor( name, times, values, interpolation ) {
if ( name === undefined ) throw new Error( 'THREE.KeyframeTrack: track name is undefined' );
if ( times === undefined || times.length === 0 ) throw new Error( 'THREE.KeyframeTrack: no keyframes in track named ' + name );
this.name = name;
this.times = AnimationUtils.convertArray( times, this.TimeBufferType );
this.values = AnimationUtils.convertArray( values, this.ValueBufferType );
this.setInterpolation( interpolation || this.DefaultInterpolation );
}
// Serialization (in static context, because of constructor invocation
// and automatic invocation of .toJSON):
static toJSON( track ) {
const trackType = track.constructor;
let json;
// derived classes can define a static toJSON method
if ( trackType.toJSON !== this.toJSON ) {
json = trackType.toJSON( track );
} else {
// by default, we assume the data can be serialized as-is
json = {
'name': track.name,
'times': AnimationUtils.convertArray( track.times, Array ),
'values': AnimationUtils.convertArray( track.values, Array )
};
const interpolation = track.getInterpolation();
if ( interpolation !== track.DefaultInterpolation ) {
json.interpolation = interpolation;
}
}
json.type = track.ValueTypeName; // mandatory
return json;
}
InterpolantFactoryMethodDiscrete( result ) {
return new DiscreteInterpolant( this.times, this.values, this.getValueSize(), result );
}
InterpolantFactoryMethodLinear( result ) {
return new LinearInterpolant( this.times, this.values, this.getValueSize(), result );
}
InterpolantFactoryMethodSmooth( result ) {
return new CubicInterpolant( this.times, this.values, this.getValueSize(), result );
}
setInterpolation( interpolation ) {
let factoryMethod;
switch ( interpolation ) {
case InterpolateDiscrete:
factoryMethod = this.InterpolantFactoryMethodDiscrete;
break;
case InterpolateLinear:
factoryMethod = this.InterpolantFactoryMethodLinear;
break;
case InterpolateSmooth:
factoryMethod = this.InterpolantFactoryMethodSmooth;
break;
}
if ( factoryMethod === undefined ) {
const message = 'unsupported interpolation for ' +
this.ValueTypeName + ' keyframe track named ' + this.name;
if ( this.createInterpolant === undefined ) {
// fall back to default, unless the default itself is messed up
if ( interpolation !== this.DefaultInterpolation ) {
this.setInterpolation( this.DefaultInterpolation );
} else {
throw new Error( message ); // fatal, in this case
}
}
console.warn( 'THREE.KeyframeTrack:', message );
return this;
}
this.createInterpolant = factoryMethod;
return this;
}
getInterpolation() {
switch ( this.createInterpolant ) {
case this.InterpolantFactoryMethodDiscrete:
return InterpolateDiscrete;
case this.InterpolantFactoryMethodLinear:
return InterpolateLinear;
case this.InterpolantFactoryMethodSmooth:
return InterpolateSmooth;
}
}
getValueSize() {
return this.values.length / this.times.length;
}
// move all keyframes either forwards or backwards in time
shift( timeOffset ) {
if ( timeOffset !== 0.0 ) {
const times = this.times;
for ( let i = 0, n = times.length; i !== n; ++ i ) {
times[ i ] += timeOffset;
}
}
return this;
}
// scale all keyframe times by a factor (useful for frame <-> seconds conversions)
scale( timeScale ) {
if ( timeScale !== 1.0 ) {
const times = this.times;
for ( let i = 0, n = times.length; i !== n; ++ i ) {
times[ i ] *= timeScale;
}
}
return this;
}
// removes keyframes before and after animation without changing any values within the range [startTime, endTime].
// IMPORTANT: We do not shift around keys to the start of the track time, because for interpolated keys this will change their values
trim( startTime, endTime ) {
const times = this.times,
nKeys = times.length;
let from = 0,
to = nKeys - 1;
while ( from !== nKeys && times[ from ] < startTime ) {
++ from;
}
while ( to !== - 1 && times[ to ] > endTime ) {
-- to;
}
++ to; // inclusive -> exclusive bound
if ( from !== 0 || to !== nKeys ) {
// empty tracks are forbidden, so keep at least one keyframe
if ( from >= to ) {
to = Math.max( to, 1 );
from = to - 1;
}
const stride = this.getValueSize();
this.times = AnimationUtils.arraySlice( times, from, to );
this.values = AnimationUtils.arraySlice( this.values, from * stride, to * stride );
}
return this;
}
// ensure we do not get a GarbageInGarbageOut situation, make sure tracks are at least minimally viable
validate() {
let valid = true;
const valueSize = this.getValueSize();
if ( valueSize - Math.floor( valueSize ) !== 0 ) {
console.error( 'THREE.KeyframeTrack: Invalid value size in track.', this );
valid = false;
}
const times = this.times,
values = this.values,
nKeys = times.length;
if ( nKeys === 0 ) {
console.error( 'THREE.KeyframeTrack: Track is empty.', this );
valid = false;
}
let prevTime = null;
for ( let i = 0; i !== nKeys; i ++ ) {
const currTime = times[ i ];
if ( typeof currTime === 'number' && isNaN( currTime ) ) {
console.error( 'THREE.KeyframeTrack: Time is not a valid number.', this, i, currTime );
valid = false;
break;
}
if ( prevTime !== null && prevTime > currTime ) {
console.error( 'THREE.KeyframeTrack: Out of order keys.', this, i, currTime, prevTime );
valid = false;
break;
}
prevTime = currTime;
}
if ( values !== undefined ) {
if ( AnimationUtils.isTypedArray( values ) ) {
for ( let i = 0, n = values.length; i !== n; ++ i ) {
const value = values[ i ];
if ( isNaN( value ) ) {
console.error( 'THREE.KeyframeTrack: Value is not a valid number.', this, i, value );
valid = false;
break;
}
}
}
}
return valid;
}
// removes equivalent sequential keys as common in morph target sequences
// (0,0,0,0,1,1,1,0,0,0,0,0,0,0) --> (0,0,1,1,0,0)
optimize() {
// times or values may be shared with other tracks, so overwriting is unsafe
const times = AnimationUtils.arraySlice( this.times ),
values = AnimationUtils.arraySlice( this.values ),
stride = this.getValueSize(),
smoothInterpolation = this.getInterpolation() === InterpolateSmooth,
lastIndex = times.length - 1;
let writeIndex = 1;
for ( let i = 1; i < lastIndex; ++ i ) {
let keep = false;
const time = times[ i ];
const timeNext = times[ i + 1 ];
// remove adjacent keyframes scheduled at the same time
if ( time !== timeNext && ( i !== 1 || time !== times[ 0 ] ) ) {
if ( ! smoothInterpolation ) {
// remove unnecessary keyframes same as their neighbors
const offset = i * stride,
offsetP = offset - stride,
offsetN = offset + stride;
for ( let j = 0; j !== stride; ++ j ) {
const value = values[ offset + j ];
if ( value !== values[ offsetP + j ] ||
value !== values[ offsetN + j ] ) {
keep = true;
break;
}
}
} else {
keep = true;
}
}
// in-place compaction
if ( keep ) {
if ( i !== writeIndex ) {
times[ writeIndex ] = times[ i ];
const readOffset = i * stride,
writeOffset = writeIndex * stride;
for ( let j = 0; j !== stride; ++ j ) {
values[ writeOffset + j ] = values[ readOffset + j ];
}
}
++ writeIndex;
}
}
// flush last keyframe (compaction looks ahead)
if ( lastIndex > 0 ) {
times[ writeIndex ] = times[ lastIndex ];
for ( let readOffset = lastIndex * stride, writeOffset = writeIndex * stride, j = 0; j !== stride; ++ j ) {
values[ writeOffset + j ] = values[ readOffset + j ];
}
++ writeIndex;
}
if ( writeIndex !== times.length ) {
this.times = AnimationUtils.arraySlice( times, 0, writeIndex );
this.values = AnimationUtils.arraySlice( values, 0, writeIndex * stride );
} else {
this.times = times;
this.values = values;
}
return this;
}
clone() {
const times = AnimationUtils.arraySlice( this.times, 0 );
const values = AnimationUtils.arraySlice( this.values, 0 );
const TypedKeyframeTrack = this.constructor;
const track = new TypedKeyframeTrack( this.name, times, values );
// Interpolant argument to constructor is not saved, so copy the factory method directly.
track.createInterpolant = this.createInterpolant;
return track;
}
}
KeyframeTrack.prototype.TimeBufferType = Float32Array;
KeyframeTrack.prototype.ValueBufferType = Float32Array;
KeyframeTrack.prototype.DefaultInterpolation = InterpolateLinear;
/**
* A Track of Boolean keyframe values.
*/
class BooleanKeyframeTrack extends KeyframeTrack {}
BooleanKeyframeTrack.prototype.ValueTypeName = 'bool';
BooleanKeyframeTrack.prototype.ValueBufferType = Array;
BooleanKeyframeTrack.prototype.DefaultInterpolation = InterpolateDiscrete;
BooleanKeyframeTrack.prototype.InterpolantFactoryMethodLinear = undefined;
BooleanKeyframeTrack.prototype.InterpolantFactoryMethodSmooth = undefined;
/**
* A Track of keyframe values that represent color.
*/
class ColorKeyframeTrack extends KeyframeTrack {}
ColorKeyframeTrack.prototype.ValueTypeName = 'color';
/**
* A Track of numeric keyframe values.
*/
class NumberKeyframeTrack extends KeyframeTrack {}
NumberKeyframeTrack.prototype.ValueTypeName = 'number';
/**
* Spherical linear unit quaternion interpolant.
*/
class QuaternionLinearInterpolant extends Interpolant {
constructor( parameterPositions, sampleValues, sampleSize, resultBuffer ) {
super( parameterPositions, sampleValues, sampleSize, resultBuffer );
}
interpolate_( i1, t0, t, t1 ) {
const result = this.resultBuffer,
values = this.sampleValues,
stride = this.valueSize,
alpha = ( t - t0 ) / ( t1 - t0 );
let offset = i1 * stride;
for ( let end = offset + stride; offset !== end; offset += 4 ) {
Quaternion.slerpFlat( result, 0, values, offset - stride, values, offset, alpha );
}
return result;
}
}
/**
* A Track of quaternion keyframe values.
*/
class QuaternionKeyframeTrack extends KeyframeTrack {
InterpolantFactoryMethodLinear( result ) {
return new QuaternionLinearInterpolant( this.times, this.values, this.getValueSize(), result );
}
}
QuaternionKeyframeTrack.prototype.ValueTypeName = 'quaternion';
// ValueBufferType is inherited
QuaternionKeyframeTrack.prototype.DefaultInterpolation = InterpolateLinear;
QuaternionKeyframeTrack.prototype.InterpolantFactoryMethodSmooth = undefined;
/**
* A Track that interpolates Strings
*/
class StringKeyframeTrack extends KeyframeTrack {}
StringKeyframeTrack.prototype.ValueTypeName = 'string';
StringKeyframeTrack.prototype.ValueBufferType = Array;
StringKeyframeTrack.prototype.DefaultInterpolation = InterpolateDiscrete;
StringKeyframeTrack.prototype.InterpolantFactoryMethodLinear = undefined;
StringKeyframeTrack.prototype.InterpolantFactoryMethodSmooth = undefined;
/**
* A Track of vectored keyframe values.
*/
class VectorKeyframeTrack extends KeyframeTrack {}
VectorKeyframeTrack.prototype.ValueTypeName = 'vector';
class AnimationClip {
constructor( name, duration = - 1, tracks, blendMode = NormalAnimationBlendMode ) {
this.name = name;
this.tracks = tracks;
this.duration = duration;
this.blendMode = blendMode;
this.uuid = generateUUID();
// this means it should figure out its duration by scanning the tracks
if ( this.duration < 0 ) {
this.resetDuration();
}
}
static parse( json ) {
const tracks = [],
jsonTracks = json.tracks,
frameTime = 1.0 / ( json.fps || 1.0 );
for ( let i = 0, n = jsonTracks.length; i !== n; ++ i ) {
tracks.push( parseKeyframeTrack( jsonTracks[ i ] ).scale( frameTime ) );
}
const clip = new this( json.name, json.duration, tracks, json.blendMode );
clip.uuid = json.uuid;
return clip;
}
static toJSON( clip ) {
const tracks = [],
clipTracks = clip.tracks;
const json = {
'name': clip.name,
'duration': clip.duration,
'tracks': tracks,
'uuid': clip.uuid,
'blendMode': clip.blendMode
};
for ( let i = 0, n = clipTracks.length; i !== n; ++ i ) {
tracks.push( KeyframeTrack.toJSON( clipTracks[ i ] ) );
}
return json;
}
static CreateFromMorphTargetSequence( name, morphTargetSequence, fps, noLoop ) {
const numMorphTargets = morphTargetSequence.length;
const tracks = [];
for ( let i = 0; i < numMorphTargets; i ++ ) {
let times = [];
let values = [];
times.push(
( i + numMorphTargets - 1 ) % numMorphTargets,
i,
( i + 1 ) % numMorphTargets );
values.push( 0, 1, 0 );
const order = AnimationUtils.getKeyframeOrder( times );
times = AnimationUtils.sortedArray( times, 1, order );
values = AnimationUtils.sortedArray( values, 1, order );
// if there is a key at the first frame, duplicate it as the
// last frame as well for perfect loop.
if ( ! noLoop && times[ 0 ] === 0 ) {
times.push( numMorphTargets );
values.push( values[ 0 ] );
}
tracks.push(
new NumberKeyframeTrack(
'.morphTargetInfluences[' + morphTargetSequence[ i ].name + ']',
times, values
).scale( 1.0 / fps ) );
}
return new this( name, - 1, tracks );
}
static findByName( objectOrClipArray, name ) {
let clipArray = objectOrClipArray;
if ( ! Array.isArray( objectOrClipArray ) ) {
const o = objectOrClipArray;
clipArray = o.geometry && o.geometry.animations || o.animations;
}
for ( let i = 0; i < clipArray.length; i ++ ) {
if ( clipArray[ i ].name === name ) {
return clipArray[ i ];
}
}
return null;
}
static CreateClipsFromMorphTargetSequences( morphTargets, fps, noLoop ) {
const animationToMorphTargets = {};
// tested with https://regex101.com/ on trick sequences
// such flamingo_flyA_003, flamingo_run1_003, crdeath0059
const pattern = /^([\w-]*?)([\d]+)$/;
// sort morph target names into animation groups based
// patterns like Walk_001, Walk_002, Run_001, Run_002
for ( let i = 0, il = morphTargets.length; i < il; i ++ ) {
const morphTarget = morphTargets[ i ];
const parts = morphTarget.name.match( pattern );
if ( parts && parts.length > 1 ) {
const name = parts[ 1 ];
let animationMorphTargets = animationToMorphTargets[ name ];
if ( ! animationMorphTargets ) {
animationToMorphTargets[ name ] = animationMorphTargets = [];
}
animationMorphTargets.push( morphTarget );
}
}
const clips = [];
for ( const name in animationToMorphTargets ) {
clips.push( this.CreateFromMorphTargetSequence( name, animationToMorphTargets[ name ], fps, noLoop ) );
}
return clips;
}
// parse the animation.hierarchy format
static parseAnimation( animation, bones ) {
if ( ! animation ) {
console.error( 'THREE.AnimationClip: No animation in JSONLoader data.' );
return null;
}
const addNonemptyTrack = function ( trackType, trackName, animationKeys, propertyName, destTracks ) {
// only return track if there are actually keys.
if ( animationKeys.length !== 0 ) {
const times = [];
const values = [];
AnimationUtils.flattenJSON( animationKeys, times, values, propertyName );
// empty keys are filtered out, so check again
if ( times.length !== 0 ) {
destTracks.push( new trackType( trackName, times, values ) );
}
}
};
const tracks = [];
const clipName = animation.name || 'default';
const fps = animation.fps || 30;
const blendMode = animation.blendMode;
// automatic length determination in AnimationClip.
let duration = animation.length || - 1;
const hierarchyTracks = animation.hierarchy || [];
for ( let h = 0; h < hierarchyTracks.length; h ++ ) {
const animationKeys = hierarchyTracks[ h ].keys;
// skip empty tracks
if ( ! animationKeys || animationKeys.length === 0 ) continue;
// process morph targets
if ( animationKeys[ 0 ].morphTargets ) {
// figure out all morph targets used in this track
const morphTargetNames = {};
let k;
for ( k = 0; k < animationKeys.length; k ++ ) {
if ( animationKeys[ k ].morphTargets ) {
for ( let m = 0; m < animationKeys[ k ].morphTargets.length; m ++ ) {
morphTargetNames[ animationKeys[ k ].morphTargets[ m ] ] = - 1;
}
}
}
// create a track for each morph target with all zero
// morphTargetInfluences except for the keys in which
// the morphTarget is named.
for ( const morphTargetName in morphTargetNames ) {
const times = [];
const values = [];
for ( let m = 0; m !== animationKeys[ k ].morphTargets.length; ++ m ) {
const animationKey = animationKeys[ k ];
times.push( animationKey.time );
values.push( ( animationKey.morphTarget === morphTargetName ) ? 1 : 0 );
}
tracks.push( new NumberKeyframeTrack( '.morphTargetInfluence[' + morphTargetName + ']', times, values ) );
}
duration = morphTargetNames.length * ( fps || 1.0 );
} else {
// ...assume skeletal animation
const boneName = '.bones[' + bones[ h ].name + ']';
addNonemptyTrack(
VectorKeyframeTrack, boneName + '.position',
animationKeys, 'pos', tracks );
addNonemptyTrack(
QuaternionKeyframeTrack, boneName + '.quaternion',
animationKeys, 'rot', tracks );
addNonemptyTrack(
VectorKeyframeTrack, boneName + '.scale',
animationKeys, 'scl', tracks );
}
}
if ( tracks.length === 0 ) {
return null;
}
const clip = new this( clipName, duration, tracks, blendMode );
return clip;
}
resetDuration() {
const tracks = this.tracks;
let duration = 0;
for ( let i = 0, n = tracks.length; i !== n; ++ i ) {
const track = this.tracks[ i ];
duration = Math.max( duration, track.times[ track.times.length - 1 ] );
}
this.duration = duration;
return this;
}
trim() {
for ( let i = 0; i < this.tracks.length; i ++ ) {
this.tracks[ i ].trim( 0, this.duration );
}
return this;
}
validate() {
let valid = true;
for ( let i = 0; i < this.tracks.length; i ++ ) {
valid = valid && this.tracks[ i ].validate();
}
return valid;
}
optimize() {
for ( let i = 0; i < this.tracks.length; i ++ ) {
this.tracks[ i ].optimize();
}
return this;
}
clone() {
const tracks = [];
for ( let i = 0; i < this.tracks.length; i ++ ) {
tracks.push( this.tracks[ i ].clone() );
}
return new this.constructor( this.name, this.duration, tracks, this.blendMode );
}
toJSON() {
return this.constructor.toJSON( this );
}
}
function getTrackTypeForValueTypeName( typeName ) {
switch ( typeName.toLowerCase() ) {
case 'scalar':
case 'double':
case 'float':
case 'number':
case 'integer':
return NumberKeyframeTrack;
case 'vector':
case 'vector2':
case 'vector3':
case 'vector4':
return VectorKeyframeTrack;
case 'color':
return ColorKeyframeTrack;
case 'quaternion':
return QuaternionKeyframeTrack;
case 'bool':
case 'boolean':
return BooleanKeyframeTrack;
case 'string':
return StringKeyframeTrack;
}
throw new Error( 'THREE.KeyframeTrack: Unsupported typeName: ' + typeName );
}
function parseKeyframeTrack( json ) {
if ( json.type === undefined ) {
throw new Error( 'THREE.KeyframeTrack: track type undefined, can not parse' );
}
const trackType = getTrackTypeForValueTypeName( json.type );
if ( json.times === undefined ) {
const times = [], values = [];
AnimationUtils.flattenJSON( json.keys, times, values, 'value' );
json.times = times;
json.values = values;
}
// derived classes can define a static parse method
if ( trackType.parse !== undefined ) {
return trackType.parse( json );
} else {
// by default, we assume a constructor compatible with the base
return new trackType( json.name, json.times, json.values, json.interpolation );
}
}
const Cache = {
enabled: false,
files: {},
add: function ( key, file ) {
if ( this.enabled === false ) return;
// console.log( 'THREE.Cache', 'Adding key:', key );
this.files[ key ] = file;
},
get: function ( key ) {
if ( this.enabled === false ) return;
// console.log( 'THREE.Cache', 'Checking key:', key );
return this.files[ key ];
},
remove: function ( key ) {
delete this.files[ key ];
},
clear: function () {
this.files = {};
}
};
class LoadingManager {
constructor( onLoad, onProgress, onError ) {
const scope = this;
let isLoading = false;
let itemsLoaded = 0;
let itemsTotal = 0;
let urlModifier = undefined;
const handlers = [];
// Refer to #5689 for the reason why we don't set .onStart
// in the constructor
this.onStart = undefined;
this.onLoad = onLoad;
this.onProgress = onProgress;
this.onError = onError;
this.itemStart = function ( url ) {
itemsTotal ++;
if ( isLoading === false ) {
if ( scope.onStart !== undefined ) {
scope.onStart( url, itemsLoaded, itemsTotal );
}
}
isLoading = true;
};
this.itemEnd = function ( url ) {
itemsLoaded ++;
if ( scope.onProgress !== undefined ) {
scope.onProgress( url, itemsLoaded, itemsTotal );
}
if ( itemsLoaded === itemsTotal ) {
isLoading = false;
if ( scope.onLoad !== undefined ) {
scope.onLoad();
}
}
};
this.itemError = function ( url ) {
if ( scope.onError !== undefined ) {
scope.onError( url );
}
};
this.resolveURL = function ( url ) {
if ( urlModifier ) {
return urlModifier( url );
}
return url;
};
this.setURLModifier = function ( transform ) {
urlModifier = transform;
return this;
};
this.addHandler = function ( regex, loader ) {
handlers.push( regex, loader );
return this;
};
this.removeHandler = function ( regex ) {
const index = handlers.indexOf( regex );
if ( index !== - 1 ) {
handlers.splice( index, 2 );
}
return this;
};
this.getHandler = function ( file ) {
for ( let i = 0, l = handlers.length; i < l; i += 2 ) {
const regex = handlers[ i ];
const loader = handlers[ i + 1 ];
if ( regex.global ) regex.lastIndex = 0; // see #17920
if ( regex.test( file ) ) {
return loader;
}
}
return null;
};
}
}
const DefaultLoadingManager = new LoadingManager();
class Loader {
constructor( manager ) {
this.manager = ( manager !== undefined ) ? manager : DefaultLoadingManager;
this.crossOrigin = 'anonymous';
this.withCredentials = false;
this.path = '';
this.resourcePath = '';
this.requestHeader = {};
}
load( /* url, onLoad, onProgress, onError */ ) {}
loadAsync( url, onProgress ) {
const scope = this;
return new Promise( function ( resolve, reject ) {
scope.load( url, resolve, onProgress, reject );
} );
}
parse( /* data */ ) {}
setCrossOrigin( crossOrigin ) {
this.crossOrigin = crossOrigin;
return this;
}
setWithCredentials( value ) {
this.withCredentials = value;
return this;
}
setPath( path ) {
this.path = path;
return this;
}
setResourcePath( resourcePath ) {
this.resourcePath = resourcePath;
return this;
}
setRequestHeader( requestHeader ) {
this.requestHeader = requestHeader;
return this;
}
}
const loading = {};
class FileLoader extends Loader {
constructor( manager ) {
super( manager );
}
load( url, onLoad, onProgress, onError ) {
if ( url === undefined ) url = '';
if ( this.path !== undefined ) url = this.path + url;
url = this.manager.resolveURL( url );
const scope = this;
const cached = Cache.get( url );
if ( cached !== undefined ) {
scope.manager.itemStart( url );
setTimeout( function () {
if ( onLoad ) onLoad( cached );
scope.manager.itemEnd( url );
}, 0 );
return cached;
}
// Check if request is duplicate
if ( loading[ url ] !== undefined ) {
loading[ url ].push( {
onLoad: onLoad,
onProgress: onProgress,
onError: onError
} );
return;
}
// Check for data: URI
const dataUriRegex = /^data:(.*?)(;base64)?,(.*)$/;
const dataUriRegexResult = url.match( dataUriRegex );
let request;
// Safari can not handle Data URIs through XMLHttpRequest so process manually
if ( dataUriRegexResult ) {
const mimeType = dataUriRegexResult[ 1 ];
const isBase64 = !! dataUriRegexResult[ 2 ];
let data = dataUriRegexResult[ 3 ];
data = decodeURIComponent( data );
if ( isBase64 ) data = atob( data );
try {
let response;
const responseType = ( this.responseType || '' ).toLowerCase();
switch ( responseType ) {
case 'arraybuffer':
case 'blob':
const view = new Uint8Array( data.length );
for ( let i = 0; i < data.length; i ++ ) {
view[ i ] = data.charCodeAt( i );
}
if ( responseType === 'blob' ) {
response = new Blob( [ view.buffer ], { type: mimeType } );
} else {
response = view.buffer;
}
break;
case 'document':
const parser = new DOMParser();
response = parser.parseFromString( data, mimeType );
break;
case 'json':
response = JSON.parse( data );
break;
default: // 'text' or other
response = data;
break;
}
// Wait for next browser tick like standard XMLHttpRequest event dispatching does
setTimeout( function () {
if ( onLoad ) onLoad( response );
scope.manager.itemEnd( url );
}, 0 );
} catch ( error ) {
// Wait for next browser tick like standard XMLHttpRequest event dispatching does
setTimeout( function () {
if ( onError ) onError( error );
scope.manager.itemError( url );
scope.manager.itemEnd( url );
}, 0 );
}
} else {
// Initialise array for duplicate requests
loading[ url ] = [];
loading[ url ].push( {
onLoad: onLoad,
onProgress: onProgress,
onError: onError
} );
request = new XMLHttpRequest();
request.open( 'GET', url, true );
request.addEventListener( 'load', function ( event ) {
const response = this.response;
const callbacks = loading[ url ];
delete loading[ url ];
if ( this.status === 200 || this.status === 0 ) {
// Some browsers return HTTP Status 0 when using non-http protocol
// e.g. 'file://' or 'data://'. Handle as success.
if ( this.status === 0 ) console.warn( 'THREE.FileLoader: HTTP Status 0 received.' );
// Add to cache only on HTTP success, so that we do not cache
// error response bodies as proper responses to requests.
Cache.add( url, response );
for ( let i = 0, il = callbacks.length; i < il; i ++ ) {
const callback = callbacks[ i ];
if ( callback.onLoad ) callback.onLoad( response );
}
scope.manager.itemEnd( url );
} else {
for ( let i = 0, il = callbacks.length; i < il; i ++ ) {
const callback = callbacks[ i ];
if ( callback.onError ) callback.onError( event );
}
scope.manager.itemError( url );
scope.manager.itemEnd( url );
}
}, false );
request.addEventListener( 'progress', function ( event ) {
const callbacks = loading[ url ];
for ( let i = 0, il = callbacks.length; i < il; i ++ ) {
const callback = callbacks[ i ];
if ( callback.onProgress ) callback.onProgress( event );
}
}, false );
request.addEventListener( 'error', function ( event ) {
const callbacks = loading[ url ];
delete loading[ url ];
for ( let i = 0, il = callbacks.length; i < il; i ++ ) {
const callback = callbacks[ i ];
if ( callback.onError ) callback.onError( event );
}
scope.manager.itemError( url );
scope.manager.itemEnd( url );
}, false );
request.addEventListener( 'abort', function ( event ) {
const callbacks = loading[ url ];
delete loading[ url ];
for ( let i = 0, il = callbacks.length; i < il; i ++ ) {
const callback = callbacks[ i ];
if ( callback.onError ) callback.onError( event );
}
scope.manager.itemError( url );
scope.manager.itemEnd( url );
}, false );
if ( this.responseType !== undefined ) request.responseType = this.responseType;
if ( this.withCredentials !== undefined ) request.withCredentials = this.withCredentials;
if ( request.overrideMimeType ) request.overrideMimeType( this.mimeType !== undefined ? this.mimeType : 'text/plain' );
for ( const header in this.requestHeader ) {
request.setRequestHeader( header, this.requestHeader[ header ] );
}
request.send( null );
}
scope.manager.itemStart( url );
return request;
}
setResponseType( value ) {
this.responseType = value;
return this;
}
setMimeType( value ) {
this.mimeType = value;
return this;
}
}
/**
* Abstract Base class to block based textures loader (dds, pvr, ...)
*
* Sub classes have to implement the parse() method which will be used in load().
*/
class CompressedTextureLoader extends Loader {
constructor( manager ) {
super( manager );
}
load( url, onLoad, onProgress, onError ) {
const scope = this;
const images = [];
const texture = new CompressedTexture();
const loader = new FileLoader( this.manager );
loader.setPath( this.path );
loader.setResponseType( 'arraybuffer' );
loader.setRequestHeader( this.requestHeader );
loader.setWithCredentials( scope.withCredentials );
let loaded = 0;
function loadTexture( i ) {
loader.load( url[ i ], function ( buffer ) {
const texDatas = scope.parse( buffer, true );
images[ i ] = {
width: texDatas.width,
height: texDatas.height,
format: texDatas.format,
mipmaps: texDatas.mipmaps
};
loaded += 1;
if ( loaded === 6 ) {
if ( texDatas.mipmapCount === 1 ) texture.minFilter = LinearFilter;
texture.image = images;
texture.format = texDatas.format;
texture.needsUpdate = true;
if ( onLoad ) onLoad( texture );
}
}, onProgress, onError );
}
if ( Array.isArray( url ) ) {
for ( let i = 0, il = url.length; i < il; ++ i ) {
loadTexture( i );
}
} else {
// compressed cubemap texture stored in a single DDS file
loader.load( url, function ( buffer ) {
const texDatas = scope.parse( buffer, true );
if ( texDatas.isCubemap ) {
const faces = texDatas.mipmaps.length / texDatas.mipmapCount;
for ( let f = 0; f < faces; f ++ ) {
images[ f ] = { mipmaps: [] };
for ( let i = 0; i < texDatas.mipmapCount; i ++ ) {
images[ f ].mipmaps.push( texDatas.mipmaps[ f * texDatas.mipmapCount + i ] );
images[ f ].format = texDatas.format;
images[ f ].width = texDatas.width;
images[ f ].height = texDatas.height;
}
}
texture.image = images;
} else {
texture.image.width = texDatas.width;
texture.image.height = texDatas.height;
texture.mipmaps = texDatas.mipmaps;
}
if ( texDatas.mipmapCount === 1 ) {
texture.minFilter = LinearFilter;
}
texture.format = texDatas.format;
texture.needsUpdate = true;
if ( onLoad ) onLoad( texture );
}, onProgress, onError );
}
return texture;
}
}
class ImageLoader extends Loader {
constructor( manager ) {
super( manager );
}
load( url, onLoad, onProgress, onError ) {
if ( this.path !== undefined ) url = this.path + url;
url = this.manager.resolveURL( url );
const scope = this;
const cached = Cache.get( url );
if ( cached !== undefined ) {
scope.manager.itemStart( url );
setTimeout( function () {
if ( onLoad ) onLoad( cached );
scope.manager.itemEnd( url );
}, 0 );
return cached;
}
const image = document.createElementNS( 'http://www.w3.org/1999/xhtml', 'img' );
function onImageLoad() {
image.removeEventListener( 'load', onImageLoad, false );
image.removeEventListener( 'error', onImageError, false );
Cache.add( url, this );
if ( onLoad ) onLoad( this );
scope.manager.itemEnd( url );
}
function onImageError( event ) {
image.removeEventListener( 'load', onImageLoad, false );
image.removeEventListener( 'error', onImageError, false );
if ( onError ) onError( event );
scope.manager.itemError( url );
scope.manager.itemEnd( url );
}
image.addEventListener( 'load', onImageLoad, false );
image.addEventListener( 'error', onImageError, false );
if ( url.substr( 0, 5 ) !== 'data:' ) {
if ( this.crossOrigin !== undefined ) image.crossOrigin = this.crossOrigin;
}
scope.manager.itemStart( url );
image.src = url;
return image;
}
}
class CubeTextureLoader extends Loader {
constructor( manager ) {
super( manager );
}
load( urls, onLoad, onProgress, onError ) {
const texture = new CubeTexture();
const loader = new ImageLoader( this.manager );
loader.setCrossOrigin( this.crossOrigin );
loader.setPath( this.path );
let loaded = 0;
function loadTexture( i ) {
loader.load( urls[ i ], function ( image ) {
texture.images[ i ] = image;
loaded ++;
if ( loaded === 6 ) {
texture.needsUpdate = true;
if ( onLoad ) onLoad( texture );
}
}, undefined, onError );
}
for ( let i = 0; i < urls.length; ++ i ) {
loadTexture( i );
}
return texture;
}
}
/**
* Abstract Base class to load generic binary textures formats (rgbe, hdr, ...)
*
* Sub classes have to implement the parse() method which will be used in load().
*/
class DataTextureLoader extends Loader {
constructor( manager ) {
super( manager );
}
load( url, onLoad, onProgress, onError ) {
const scope = this;
const texture = new DataTexture();
const loader = new FileLoader( this.manager );
loader.setResponseType( 'arraybuffer' );
loader.setRequestHeader( this.requestHeader );
loader.setPath( this.path );
loader.setWithCredentials( scope.withCredentials );
loader.load( url, function ( buffer ) {
const texData = scope.parse( buffer );
if ( ! texData ) return;
if ( texData.image !== undefined ) {
texture.image = texData.image;
} else if ( texData.data !== undefined ) {
texture.image.width = texData.width;
texture.image.height = texData.height;
texture.image.data = texData.data;
}
texture.wrapS = texData.wrapS !== undefined ? texData.wrapS : ClampToEdgeWrapping;
texture.wrapT = texData.wrapT !== undefined ? texData.wrapT : ClampToEdgeWrapping;
texture.magFilter = texData.magFilter !== undefined ? texData.magFilter : LinearFilter;
texture.minFilter = texData.minFilter !== undefined ? texData.minFilter : LinearFilter;
texture.anisotropy = texData.anisotropy !== undefined ? texData.anisotropy : 1;
if ( texData.encoding !== undefined ) {
texture.encoding = texData.encoding;
}
if ( texData.flipY !== undefined ) {
texture.flipY = texData.flipY;
}
if ( texData.format !== undefined ) {
texture.format = texData.format;
}
if ( texData.type !== undefined ) {
texture.type = texData.type;
}
if ( texData.mipmaps !== undefined ) {
texture.mipmaps = texData.mipmaps;
texture.minFilter = LinearMipmapLinearFilter; // presumably...
}
if ( texData.mipmapCount === 1 ) {
texture.minFilter = LinearFilter;
}
if ( texData.generateMipmaps !== undefined ) {
texture.generateMipmaps = texData.generateMipmaps;
}
texture.needsUpdate = true;
if ( onLoad ) onLoad( texture, texData );
}, onProgress, onError );
return texture;
}
}
class TextureLoader extends Loader {
constructor( manager ) {
super( manager );
}
load( url, onLoad, onProgress, onError ) {
const texture = new Texture$1();
const loader = new ImageLoader( this.manager );
loader.setCrossOrigin( this.crossOrigin );
loader.setPath( this.path );
loader.load( url, function ( image ) {
texture.image = image;
// JPEGs can't have an alpha channel, so memory can be saved by storing them as RGB.
const isJPEG = url.search( /\.jpe?g($|\?)/i ) > 0 || url.search( /^data\:image\/jpeg/ ) === 0;
texture.format = isJPEG ? RGBFormat : RGBAFormat;
texture.needsUpdate = true;
if ( onLoad !== undefined ) {
onLoad( texture );
}
}, onProgress, onError );
return texture;
}
}
class Light extends Object3D {
constructor( color, intensity = 1 ) {
super();
this.type = 'Light';
this.color = new Color( color );
this.intensity = intensity;
}
dispose() {
// Empty here in base class; some subclasses override.
}
copy( source ) {
super.copy( source );
this.color.copy( source.color );
this.intensity = source.intensity;
return this;
}
toJSON( meta ) {
const data = super.toJSON( meta );
data.object.color = this.color.getHex();
data.object.intensity = this.intensity;
if ( this.groundColor !== undefined ) data.object.groundColor = this.groundColor.getHex();
if ( this.distance !== undefined ) data.object.distance = this.distance;
if ( this.angle !== undefined ) data.object.angle = this.angle;
if ( this.decay !== undefined ) data.object.decay = this.decay;
if ( this.penumbra !== undefined ) data.object.penumbra = this.penumbra;
if ( this.shadow !== undefined ) data.object.shadow = this.shadow.toJSON();
return data;
}
}
Light.prototype.isLight = true;
const _projScreenMatrix$1 = /*@__PURE__*/ new Matrix4();
const _lightPositionWorld$1 = /*@__PURE__*/ new Vector3();
const _lookTarget$1 = /*@__PURE__*/ new Vector3();
class LightShadow {
constructor( camera ) {
this.camera = camera;
this.bias = 0;
this.normalBias = 0;
this.radius = 1;
this.mapSize = new Vector2( 512, 512 );
this.map = null;
this.mapPass = null;
this.matrix = new Matrix4();
this.autoUpdate = true;
this.needsUpdate = false;
this._frustum = new Frustum();
this._frameExtents = new Vector2( 1, 1 );
this._viewportCount = 1;
this._viewports = [
new Vector4( 0, 0, 1, 1 )
];
}
getViewportCount() {
return this._viewportCount;
}
getFrustum() {
return this._frustum;
}
updateMatrices( light ) {
const shadowCamera = this.camera;
const shadowMatrix = this.matrix;
_lightPositionWorld$1.setFromMatrixPosition( light.matrixWorld );
shadowCamera.position.copy( _lightPositionWorld$1 );
_lookTarget$1.setFromMatrixPosition( light.target.matrixWorld );
shadowCamera.lookAt( _lookTarget$1 );
shadowCamera.updateMatrixWorld();
_projScreenMatrix$1.multiplyMatrices( shadowCamera.projectionMatrix, shadowCamera.matrixWorldInverse );
this._frustum.setFromProjectionMatrix( _projScreenMatrix$1 );
shadowMatrix.set(
0.5, 0.0, 0.0, 0.5,
0.0, 0.5, 0.0, 0.5,
0.0, 0.0, 0.5, 0.5,
0.0, 0.0, 0.0, 1.0
);
shadowMatrix.multiply( shadowCamera.projectionMatrix );
shadowMatrix.multiply( shadowCamera.matrixWorldInverse );
}
getViewport( viewportIndex ) {
return this._viewports[ viewportIndex ];
}
getFrameExtents() {
return this._frameExtents;
}
dispose() {
if ( this.map ) {
this.map.dispose();
}
if ( this.mapPass ) {
this.mapPass.dispose();
}
}
copy( source ) {
this.camera = source.camera.clone();
this.bias = source.bias;
this.radius = source.radius;
this.mapSize.copy( source.mapSize );
return this;
}
clone() {
return new this.constructor().copy( this );
}
toJSON() {
const object = {};
if ( this.bias !== 0 ) object.bias = this.bias;
if ( this.normalBias !== 0 ) object.normalBias = this.normalBias;
if ( this.radius !== 1 ) object.radius = this.radius;
if ( this.mapSize.x !== 512 || this.mapSize.y !== 512 ) object.mapSize = this.mapSize.toArray();
object.camera = this.camera.toJSON( false ).object;
delete object.camera.matrix;
return object;
}
}
class SpotLightShadow extends LightShadow {
constructor() {
super( new PerspectiveCamera( 50, 1, 0.5, 500 ) );
this.focus = 1;
}
updateMatrices( light ) {
const camera = this.camera;
const fov = RAD2DEG * 2 * light.angle * this.focus;
const aspect = this.mapSize.width / this.mapSize.height;
const far = light.distance || camera.far;
if ( fov !== camera.fov || aspect !== camera.aspect || far !== camera.far ) {
camera.fov = fov;
camera.aspect = aspect;
camera.far = far;
camera.updateProjectionMatrix();
}
super.updateMatrices( light );
}
copy( source ) {
super.copy( source );
this.focus = source.focus;
return this;
}
}
SpotLightShadow.prototype.isSpotLightShadow = true;
class SpotLight extends Light {
constructor( color, intensity, distance = 0, angle = Math.PI / 3, penumbra = 0, decay = 1 ) {
super( color, intensity );
this.type = 'SpotLight';
this.position.copy( Object3D.DefaultUp );
this.updateMatrix();
this.target = new Object3D();
this.distance = distance;
this.angle = angle;
this.penumbra = penumbra;
this.decay = decay; // for physically correct lights, should be 2.
this.shadow = new SpotLightShadow();
}
get power() {
// intensity = power per solid angle.
// ref: equation (17) from https://seblagarde.files.wordpress.com/2015/07/course_notes_moving_frostbite_to_pbr_v32.pdf
return this.intensity * Math.PI;
}
set power( power ) {
// intensity = power per solid angle.
// ref: equation (17) from https://seblagarde.files.wordpress.com/2015/07/course_notes_moving_frostbite_to_pbr_v32.pdf
this.intensity = power / Math.PI;
}
dispose() {
this.shadow.dispose();
}
copy( source ) {
super.copy( source );
this.distance = source.distance;
this.angle = source.angle;
this.penumbra = source.penumbra;
this.decay = source.decay;
this.target = source.target.clone();
this.shadow = source.shadow.clone();
return this;
}
}
SpotLight.prototype.isSpotLight = true;
const _projScreenMatrix = /*@__PURE__*/ new Matrix4();
const _lightPositionWorld = /*@__PURE__*/ new Vector3();
const _lookTarget = /*@__PURE__*/ new Vector3();
class PointLightShadow extends LightShadow {
constructor() {
super( new PerspectiveCamera( 90, 1, 0.5, 500 ) );
this._frameExtents = new Vector2( 4, 2 );
this._viewportCount = 6;
this._viewports = [
// These viewports map a cube-map onto a 2D texture with the
// following orientation:
//
// xzXZ
// y Y
//
// X - Positive x direction
// x - Negative x direction
// Y - Positive y direction
// y - Negative y direction
// Z - Positive z direction
// z - Negative z direction
// positive X
new Vector4( 2, 1, 1, 1 ),
// negative X
new Vector4( 0, 1, 1, 1 ),
// positive Z
new Vector4( 3, 1, 1, 1 ),
// negative Z
new Vector4( 1, 1, 1, 1 ),
// positive Y
new Vector4( 3, 0, 1, 1 ),
// negative Y
new Vector4( 1, 0, 1, 1 )
];
this._cubeDirections = [
new Vector3( 1, 0, 0 ), new Vector3( - 1, 0, 0 ), new Vector3( 0, 0, 1 ),
new Vector3( 0, 0, - 1 ), new Vector3( 0, 1, 0 ), new Vector3( 0, - 1, 0 )
];
this._cubeUps = [
new Vector3( 0, 1, 0 ), new Vector3( 0, 1, 0 ), new Vector3( 0, 1, 0 ),
new Vector3( 0, 1, 0 ), new Vector3( 0, 0, 1 ), new Vector3( 0, 0, - 1 )
];
}
updateMatrices( light, viewportIndex = 0 ) {
const camera = this.camera;
const shadowMatrix = this.matrix;
const far = light.distance || camera.far;
if ( far !== camera.far ) {
camera.far = far;
camera.updateProjectionMatrix();
}
_lightPositionWorld.setFromMatrixPosition( light.matrixWorld );
camera.position.copy( _lightPositionWorld );
_lookTarget.copy( camera.position );
_lookTarget.add( this._cubeDirections[ viewportIndex ] );
camera.up.copy( this._cubeUps[ viewportIndex ] );
camera.lookAt( _lookTarget );
camera.updateMatrixWorld();
shadowMatrix.makeTranslation( - _lightPositionWorld.x, - _lightPositionWorld.y, - _lightPositionWorld.z );
_projScreenMatrix.multiplyMatrices( camera.projectionMatrix, camera.matrixWorldInverse );
this._frustum.setFromProjectionMatrix( _projScreenMatrix );
}
}
PointLightShadow.prototype.isPointLightShadow = true;
class PointLight extends Light {
constructor( color, intensity, distance = 0, decay = 1 ) {
super( color, intensity );
this.type = 'PointLight';
this.distance = distance;
this.decay = decay; // for physically correct lights, should be 2.
this.shadow = new PointLightShadow();
}
get power() {
// intensity = power per solid angle.
// ref: equation (15) from https://seblagarde.files.wordpress.com/2015/07/course_notes_moving_frostbite_to_pbr_v32.pdf
return this.intensity * 4 * Math.PI;
}
set power( power ) {
// intensity = power per solid angle.
// ref: equation (15) from https://seblagarde.files.wordpress.com/2015/07/course_notes_moving_frostbite_to_pbr_v32.pdf
this.intensity = power / ( 4 * Math.PI );
}
dispose() {
this.shadow.dispose();
}
copy( source ) {
super.copy( source );
this.distance = source.distance;
this.decay = source.decay;
this.shadow = source.shadow.clone();
return this;
}
}
PointLight.prototype.isPointLight = true;
class OrthographicCamera extends Camera {
constructor( left = - 1, right = 1, top = 1, bottom = - 1, near = 0.1, far = 2000 ) {
super();
this.type = 'OrthographicCamera';
this.zoom = 1;
this.view = null;
this.left = left;
this.right = right;
this.top = top;
this.bottom = bottom;
this.near = near;
this.far = far;
this.updateProjectionMatrix();
}
copy( source, recursive ) {
super.copy( source, recursive );
this.left = source.left;
this.right = source.right;
this.top = source.top;
this.bottom = source.bottom;
this.near = source.near;
this.far = source.far;
this.zoom = source.zoom;
this.view = source.view === null ? null : Object.assign( {}, source.view );
return this;
}
setViewOffset( fullWidth, fullHeight, x, y, width, height ) {
if ( this.view === null ) {
this.view = {
enabled: true,
fullWidth: 1,
fullHeight: 1,
offsetX: 0,
offsetY: 0,
width: 1,
height: 1
};
}
this.view.enabled = true;
this.view.fullWidth = fullWidth;
this.view.fullHeight = fullHeight;
this.view.offsetX = x;
this.view.offsetY = y;
this.view.width = width;
this.view.height = height;
this.updateProjectionMatrix();
}
clearViewOffset() {
if ( this.view !== null ) {
this.view.enabled = false;
}
this.updateProjectionMatrix();
}
updateProjectionMatrix() {
const dx = ( this.right - this.left ) / ( 2 * this.zoom );
const dy = ( this.top - this.bottom ) / ( 2 * this.zoom );
const cx = ( this.right + this.left ) / 2;
const cy = ( this.top + this.bottom ) / 2;
let left = cx - dx;
let right = cx + dx;
let top = cy + dy;
let bottom = cy - dy;
if ( this.view !== null && this.view.enabled ) {
const scaleW = ( this.right - this.left ) / this.view.fullWidth / this.zoom;
const scaleH = ( this.top - this.bottom ) / this.view.fullHeight / this.zoom;
left += scaleW * this.view.offsetX;
right = left + scaleW * this.view.width;
top -= scaleH * this.view.offsetY;
bottom = top - scaleH * this.view.height;
}
this.projectionMatrix.makeOrthographic( left, right, top, bottom, this.near, this.far );
this.projectionMatrixInverse.copy( this.projectionMatrix ).invert();
}
toJSON( meta ) {
const data = super.toJSON( meta );
data.object.zoom = this.zoom;
data.object.left = this.left;
data.object.right = this.right;
data.object.top = this.top;
data.object.bottom = this.bottom;
data.object.near = this.near;
data.object.far = this.far;
if ( this.view !== null ) data.object.view = Object.assign( {}, this.view );
return data;
}
}
OrthographicCamera.prototype.isOrthographicCamera = true;
class DirectionalLightShadow extends LightShadow {
constructor() {
super( new OrthographicCamera( - 5, 5, 5, - 5, 0.5, 500 ) );
}
}
DirectionalLightShadow.prototype.isDirectionalLightShadow = true;
class DirectionalLight extends Light {
constructor( color, intensity ) {
super( color, intensity );
this.type = 'DirectionalLight';
this.position.copy( Object3D.DefaultUp );
this.updateMatrix();
this.target = new Object3D();
this.shadow = new DirectionalLightShadow();
}
dispose() {
this.shadow.dispose();
}
copy( source ) {
super.copy( source );
this.target = source.target.clone();
this.shadow = source.shadow.clone();
return this;
}
}
DirectionalLight.prototype.isDirectionalLight = true;
class LoaderUtils {
static decodeText( array ) {
if ( typeof TextDecoder !== 'undefined' ) {
return new TextDecoder().decode( array );
}
// Avoid the String.fromCharCode.apply(null, array) shortcut, which
// throws a "maximum call stack size exceeded" error for large arrays.
let s = '';
for ( let i = 0, il = array.length; i < il; i ++ ) {
// Implicitly assumes little-endian.
s += String.fromCharCode( array[ i ] );
}
try {
// merges multi-byte utf-8 characters.
return decodeURIComponent( escape( s ) );
} catch ( e ) { // see #16358
return s;
}
}
static extractUrlBase( url ) {
const index = url.lastIndexOf( '/' );
if ( index === - 1 ) return './';
return url.substr( 0, index + 1 );
}
}
class ImageBitmapLoader extends Loader {
constructor( manager ) {
super( manager );
if ( typeof createImageBitmap === 'undefined' ) {
console.warn( 'THREE.ImageBitmapLoader: createImageBitmap() not supported.' );
}
if ( typeof fetch === 'undefined' ) {
console.warn( 'THREE.ImageBitmapLoader: fetch() not supported.' );
}
this.options = { premultiplyAlpha: 'none' };
}
setOptions( options ) {
this.options = options;
return this;
}
load( url, onLoad, onProgress, onError ) {
if ( url === undefined ) url = '';
if ( this.path !== undefined ) url = this.path + url;
url = this.manager.resolveURL( url );
const scope = this;
const cached = Cache.get( url );
if ( cached !== undefined ) {
scope.manager.itemStart( url );
setTimeout( function () {
if ( onLoad ) onLoad( cached );
scope.manager.itemEnd( url );
}, 0 );
return cached;
}
const fetchOptions = {};
fetchOptions.credentials = ( this.crossOrigin === 'anonymous' ) ? 'same-origin' : 'include';
fetchOptions.headers = this.requestHeader;
fetch( url, fetchOptions ).then( function ( res ) {
return res.blob();
} ).then( function ( blob ) {
return createImageBitmap( blob, Object.assign( scope.options, { colorSpaceConversion: 'none' } ) );
} ).then( function ( imageBitmap ) {
Cache.add( url, imageBitmap );
if ( onLoad ) onLoad( imageBitmap );
scope.manager.itemEnd( url );
} ).catch( function ( e ) {
if ( onError ) onError( e );
scope.manager.itemError( url );
scope.manager.itemEnd( url );
} );
scope.manager.itemStart( url );
}
}
ImageBitmapLoader.prototype.isImageBitmapLoader = true;
class PropertyMixer {
constructor( binding, typeName, valueSize ) {
this.binding = binding;
this.valueSize = valueSize;
let mixFunction,
mixFunctionAdditive,
setIdentity;
// buffer layout: [ incoming | accu0 | accu1 | orig | addAccu | (optional work) ]
//
// interpolators can use .buffer as their .result
// the data then goes to 'incoming'
//
// 'accu0' and 'accu1' are used frame-interleaved for
// the cumulative result and are compared to detect
// changes
//
// 'orig' stores the original state of the property
//
// 'add' is used for additive cumulative results
//
// 'work' is optional and is only present for quaternion types. It is used
// to store intermediate quaternion multiplication results
switch ( typeName ) {
case 'quaternion':
mixFunction = this._slerp;
mixFunctionAdditive = this._slerpAdditive;
setIdentity = this._setAdditiveIdentityQuaternion;
this.buffer = new Float64Array( valueSize * 6 );
this._workIndex = 5;
break;
case 'string':
case 'bool':
mixFunction = this._select;
// Use the regular mix function and for additive on these types,
// additive is not relevant for non-numeric types
mixFunctionAdditive = this._select;
setIdentity = this._setAdditiveIdentityOther;
this.buffer = new Array( valueSize * 5 );
break;
default:
mixFunction = this._lerp;
mixFunctionAdditive = this._lerpAdditive;
setIdentity = this._setAdditiveIdentityNumeric;
this.buffer = new Float64Array( valueSize * 5 );
}
this._mixBufferRegion = mixFunction;
this._mixBufferRegionAdditive = mixFunctionAdditive;
this._setIdentity = setIdentity;
this._origIndex = 3;
this._addIndex = 4;
this.cumulativeWeight = 0;
this.cumulativeWeightAdditive = 0;
this.useCount = 0;
this.referenceCount = 0;
}
// accumulate data in the 'incoming' region into 'accu<i>'
accumulate( accuIndex, weight ) {
// note: happily accumulating nothing when weight = 0, the caller knows
// the weight and shouldn't have made the call in the first place
const buffer = this.buffer,
stride = this.valueSize,
offset = accuIndex * stride + stride;
let currentWeight = this.cumulativeWeight;
if ( currentWeight === 0 ) {
// accuN := incoming * weight
for ( let i = 0; i !== stride; ++ i ) {
buffer[ offset + i ] = buffer[ i ];
}
currentWeight = weight;
} else {
// accuN := accuN + incoming * weight
currentWeight += weight;
const mix = weight / currentWeight;
this._mixBufferRegion( buffer, offset, 0, mix, stride );
}
this.cumulativeWeight = currentWeight;
}
// accumulate data in the 'incoming' region into 'add'
accumulateAdditive( weight ) {
const buffer = this.buffer,
stride = this.valueSize,
offset = stride * this._addIndex;
if ( this.cumulativeWeightAdditive === 0 ) {
// add = identity
this._setIdentity();
}
// add := add + incoming * weight
this._mixBufferRegionAdditive( buffer, offset, 0, weight, stride );
this.cumulativeWeightAdditive += weight;
}
// apply the state of 'accu<i>' to the binding when accus differ
apply( accuIndex ) {
const stride = this.valueSize,
buffer = this.buffer,
offset = accuIndex * stride + stride,
weight = this.cumulativeWeight,
weightAdditive = this.cumulativeWeightAdditive,
binding = this.binding;
this.cumulativeWeight = 0;
this.cumulativeWeightAdditive = 0;
if ( weight < 1 ) {
// accuN := accuN + original * ( 1 - cumulativeWeight )
const originalValueOffset = stride * this._origIndex;
this._mixBufferRegion(
buffer, offset, originalValueOffset, 1 - weight, stride );
}
if ( weightAdditive > 0 ) {
// accuN := accuN + additive accuN
this._mixBufferRegionAdditive( buffer, offset, this._addIndex * stride, 1, stride );
}
for ( let i = stride, e = stride + stride; i !== e; ++ i ) {
if ( buffer[ i ] !== buffer[ i + stride ] ) {
// value has changed -> update scene graph
binding.setValue( buffer, offset );
break;
}
}
}
// remember the state of the bound property and copy it to both accus
saveOriginalState() {
const binding = this.binding;
const buffer = this.buffer,
stride = this.valueSize,
originalValueOffset = stride * this._origIndex;
binding.getValue( buffer, originalValueOffset );
// accu[0..1] := orig -- initially detect changes against the original
for ( let i = stride, e = originalValueOffset; i !== e; ++ i ) {
buffer[ i ] = buffer[ originalValueOffset + ( i % stride ) ];
}
// Add to identity for additive
this._setIdentity();
this.cumulativeWeight = 0;
this.cumulativeWeightAdditive = 0;
}
// apply the state previously taken via 'saveOriginalState' to the binding
restoreOriginalState() {
const originalValueOffset = this.valueSize * 3;
this.binding.setValue( this.buffer, originalValueOffset );
}
_setAdditiveIdentityNumeric() {
const startIndex = this._addIndex * this.valueSize;
const endIndex = startIndex + this.valueSize;
for ( let i = startIndex; i < endIndex; i ++ ) {
this.buffer[ i ] = 0;
}
}
_setAdditiveIdentityQuaternion() {
this._setAdditiveIdentityNumeric();
this.buffer[ this._addIndex * this.valueSize + 3 ] = 1;
}
_setAdditiveIdentityOther() {
const startIndex = this._origIndex * this.valueSize;
const targetIndex = this._addIndex * this.valueSize;
for ( let i = 0; i < this.valueSize; i ++ ) {
this.buffer[ targetIndex + i ] = this.buffer[ startIndex + i ];
}
}
// mix functions
_select( buffer, dstOffset, srcOffset, t, stride ) {
if ( t >= 0.5 ) {
for ( let i = 0; i !== stride; ++ i ) {
buffer[ dstOffset + i ] = buffer[ srcOffset + i ];
}
}
}
_slerp( buffer, dstOffset, srcOffset, t ) {
Quaternion.slerpFlat( buffer, dstOffset, buffer, dstOffset, buffer, srcOffset, t );
}
_slerpAdditive( buffer, dstOffset, srcOffset, t, stride ) {
const workOffset = this._workIndex * stride;
// Store result in intermediate buffer offset
Quaternion.multiplyQuaternionsFlat( buffer, workOffset, buffer, dstOffset, buffer, srcOffset );
// Slerp to the intermediate result
Quaternion.slerpFlat( buffer, dstOffset, buffer, dstOffset, buffer, workOffset, t );
}
_lerp( buffer, dstOffset, srcOffset, t, stride ) {
const s = 1 - t;
for ( let i = 0; i !== stride; ++ i ) {
const j = dstOffset + i;
buffer[ j ] = buffer[ j ] * s + buffer[ srcOffset + i ] * t;
}
}
_lerpAdditive( buffer, dstOffset, srcOffset, t, stride ) {
for ( let i = 0; i !== stride; ++ i ) {
const j = dstOffset + i;
buffer[ j ] = buffer[ j ] + buffer[ srcOffset + i ] * t;
}
}
}
// Characters [].:/ are reserved for track binding syntax.
const _RESERVED_CHARS_RE = '\\[\\]\\.:\\/';
const _reservedRe = new RegExp( '[' + _RESERVED_CHARS_RE + ']', 'g' );
// Attempts to allow node names from any language. ES5's `\w` regexp matches
// only latin characters, and the unicode \p{L} is not yet supported. So
// instead, we exclude reserved characters and match everything else.
const _wordChar = '[^' + _RESERVED_CHARS_RE + ']';
const _wordCharOrDot = '[^' + _RESERVED_CHARS_RE.replace( '\\.', '' ) + ']';
// Parent directories, delimited by '/' or ':'. Currently unused, but must
// be matched to parse the rest of the track name.
const _directoryRe = /((?:WC+[\/:])*)/.source.replace( 'WC', _wordChar );
// Target node. May contain word characters (a-zA-Z0-9_) and '.' or '-'.
const _nodeRe = /(WCOD+)?/.source.replace( 'WCOD', _wordCharOrDot );
// Object on target node, and accessor. May not contain reserved
// characters. Accessor may contain any character except closing bracket.
const _objectRe = /(?:\.(WC+)(?:\[(.+)\])?)?/.source.replace( 'WC', _wordChar );
// Property and accessor. May not contain reserved characters. Accessor may
// contain any non-bracket characters.
const _propertyRe = /\.(WC+)(?:\[(.+)\])?/.source.replace( 'WC', _wordChar );
const _trackRe = new RegExp( ''
+ '^'
+ _directoryRe
+ _nodeRe
+ _objectRe
+ _propertyRe
+ '$'
);
const _supportedObjectNames = [ 'material', 'materials', 'bones' ];
class Composite {
constructor( targetGroup, path, optionalParsedPath ) {
const parsedPath = optionalParsedPath || PropertyBinding.parseTrackName( path );
this._targetGroup = targetGroup;
this._bindings = targetGroup.subscribe_( path, parsedPath );
}
getValue( array, offset ) {
this.bind(); // bind all binding
const firstValidIndex = this._targetGroup.nCachedObjects_,
binding = this._bindings[ firstValidIndex ];
// and only call .getValue on the first
if ( binding !== undefined ) binding.getValue( array, offset );
}
setValue( array, offset ) {
const bindings = this._bindings;
for ( let i = this._targetGroup.nCachedObjects_, n = bindings.length; i !== n; ++ i ) {
bindings[ i ].setValue( array, offset );
}
}
bind() {
const bindings = this._bindings;
for ( let i = this._targetGroup.nCachedObjects_, n = bindings.length; i !== n; ++ i ) {
bindings[ i ].bind();
}
}
unbind() {
const bindings = this._bindings;
for ( let i = this._targetGroup.nCachedObjects_, n = bindings.length; i !== n; ++ i ) {
bindings[ i ].unbind();
}
}
}
// Note: This class uses a State pattern on a per-method basis:
// 'bind' sets 'this.getValue' / 'setValue' and shadows the
// prototype version of these methods with one that represents
// the bound state. When the property is not found, the methods
// become no-ops.
class PropertyBinding {
constructor( rootNode, path, parsedPath ) {
this.path = path;
this.parsedPath = parsedPath || PropertyBinding.parseTrackName( path );
this.node = PropertyBinding.findNode( rootNode, this.parsedPath.nodeName ) || rootNode;
this.rootNode = rootNode;
// initial state of these methods that calls 'bind'
this.getValue = this._getValue_unbound;
this.setValue = this._setValue_unbound;
}
static create( root, path, parsedPath ) {
if ( ! ( root && root.isAnimationObjectGroup ) ) {
return new PropertyBinding( root, path, parsedPath );
} else {
return new PropertyBinding.Composite( root, path, parsedPath );
}
}
/**
* Replaces spaces with underscores and removes unsupported characters from
* node names, to ensure compatibility with parseTrackName().
*
* @param {string} name Node name to be sanitized.
* @return {string}
*/
static sanitizeNodeName( name ) {
return name.replace( /\s/g, '_' ).replace( _reservedRe, '' );
}
static parseTrackName( trackName ) {
const matches = _trackRe.exec( trackName );
if ( ! matches ) {
throw new Error( 'PropertyBinding: Cannot parse trackName: ' + trackName );
}
const results = {
// directoryName: matches[ 1 ], // (tschw) currently unused
nodeName: matches[ 2 ],
objectName: matches[ 3 ],
objectIndex: matches[ 4 ],
propertyName: matches[ 5 ], // required
propertyIndex: matches[ 6 ]
};
const lastDot = results.nodeName && results.nodeName.lastIndexOf( '.' );
if ( lastDot !== undefined && lastDot !== - 1 ) {
const objectName = results.nodeName.substring( lastDot + 1 );
// Object names must be checked against an allowlist. Otherwise, there
// is no way to parse 'foo.bar.baz': 'baz' must be a property, but
// 'bar' could be the objectName, or part of a nodeName (which can
// include '.' characters).
if ( _supportedObjectNames.indexOf( objectName ) !== - 1 ) {
results.nodeName = results.nodeName.substring( 0, lastDot );
results.objectName = objectName;
}
}
if ( results.propertyName === null || results.propertyName.length === 0 ) {
throw new Error( 'PropertyBinding: can not parse propertyName from trackName: ' + trackName );
}
return results;
}
static findNode( root, nodeName ) {
if ( ! nodeName || nodeName === '' || nodeName === '.' || nodeName === - 1 || nodeName === root.name || nodeName === root.uuid ) {
return root;
}
// search into skeleton bones.
if ( root.skeleton ) {
const bone = root.skeleton.getBoneByName( nodeName );
if ( bone !== undefined ) {
return bone;
}
}
// search into node subtree.
if ( root.children ) {
const searchNodeSubtree = function ( children ) {
for ( let i = 0; i < children.length; i ++ ) {
const childNode = children[ i ];
if ( childNode.name === nodeName || childNode.uuid === nodeName ) {
return childNode;
}
const result = searchNodeSubtree( childNode.children );
if ( result ) return result;
}
return null;
};
const subTreeNode = searchNodeSubtree( root.children );
if ( subTreeNode ) {
return subTreeNode;
}
}
return null;
}
// these are used to "bind" a nonexistent property
_getValue_unavailable() {}
_setValue_unavailable() {}
// Getters
_getValue_direct( buffer, offset ) {
buffer[ offset ] = this.node[ this.propertyName ];
}
_getValue_array( buffer, offset ) {
const source = this.resolvedProperty;
for ( let i = 0, n = source.length; i !== n; ++ i ) {
buffer[ offset ++ ] = source[ i ];
}
}
_getValue_arrayElement( buffer, offset ) {
buffer[ offset ] = this.resolvedProperty[ this.propertyIndex ];
}
_getValue_toArray( buffer, offset ) {
this.resolvedProperty.toArray( buffer, offset );
}
// Direct
_setValue_direct( buffer, offset ) {
this.targetObject[ this.propertyName ] = buffer[ offset ];
}
_setValue_direct_setNeedsUpdate( buffer, offset ) {
this.targetObject[ this.propertyName ] = buffer[ offset ];
this.targetObject.needsUpdate = true;
}
_setValue_direct_setMatrixWorldNeedsUpdate( buffer, offset ) {
this.targetObject[ this.propertyName ] = buffer[ offset ];
this.targetObject.matrixWorldNeedsUpdate = true;
}
// EntireArray
_setValue_array( buffer, offset ) {
const dest = this.resolvedProperty;
for ( let i = 0, n = dest.length; i !== n; ++ i ) {
dest[ i ] = buffer[ offset ++ ];
}
}
_setValue_array_setNeedsUpdate( buffer, offset ) {
const dest = this.resolvedProperty;
for ( let i = 0, n = dest.length; i !== n; ++ i ) {
dest[ i ] = buffer[ offset ++ ];
}
this.targetObject.needsUpdate = true;
}
_setValue_array_setMatrixWorldNeedsUpdate( buffer, offset ) {
const dest = this.resolvedProperty;
for ( let i = 0, n = dest.length; i !== n; ++ i ) {
dest[ i ] = buffer[ offset ++ ];
}
this.targetObject.matrixWorldNeedsUpdate = true;
}
// ArrayElement
_setValue_arrayElement( buffer, offset ) {
this.resolvedProperty[ this.propertyIndex ] = buffer[ offset ];
}
_setValue_arrayElement_setNeedsUpdate( buffer, offset ) {
this.resolvedProperty[ this.propertyIndex ] = buffer[ offset ];
this.targetObject.needsUpdate = true;
}
_setValue_arrayElement_setMatrixWorldNeedsUpdate( buffer, offset ) {
this.resolvedProperty[ this.propertyIndex ] = buffer[ offset ];
this.targetObject.matrixWorldNeedsUpdate = true;
}
// HasToFromArray
_setValue_fromArray( buffer, offset ) {
this.resolvedProperty.fromArray( buffer, offset );
}
_setValue_fromArray_setNeedsUpdate( buffer, offset ) {
this.resolvedProperty.fromArray( buffer, offset );
this.targetObject.needsUpdate = true;
}
_setValue_fromArray_setMatrixWorldNeedsUpdate( buffer, offset ) {
this.resolvedProperty.fromArray( buffer, offset );
this.targetObject.matrixWorldNeedsUpdate = true;
}
_getValue_unbound( targetArray, offset ) {
this.bind();
this.getValue( targetArray, offset );
}
_setValue_unbound( sourceArray, offset ) {
this.bind();
this.setValue( sourceArray, offset );
}
// create getter / setter pair for a property in the scene graph
bind() {
let targetObject = this.node;
const parsedPath = this.parsedPath;
const objectName = parsedPath.objectName;
const propertyName = parsedPath.propertyName;
let propertyIndex = parsedPath.propertyIndex;
if ( ! targetObject ) {
targetObject = PropertyBinding.findNode( this.rootNode, parsedPath.nodeName ) || this.rootNode;
this.node = targetObject;
}
// set fail state so we can just 'return' on error
this.getValue = this._getValue_unavailable;
this.setValue = this._setValue_unavailable;
// ensure there is a value node
if ( ! targetObject ) {
console.error( 'THREE.PropertyBinding: Trying to update node for track: ' + this.path + ' but it wasn\'t found.' );
return;
}
if ( objectName ) {
let objectIndex = parsedPath.objectIndex;
// special cases were we need to reach deeper into the hierarchy to get the face materials....
switch ( objectName ) {
case 'materials':
if ( ! targetObject.material ) {
console.error( 'THREE.PropertyBinding: Can not bind to material as node does not have a material.', this );
return;
}
if ( ! targetObject.material.materials ) {
console.error( 'THREE.PropertyBinding: Can not bind to material.materials as node.material does not have a materials array.', this );
return;
}
targetObject = targetObject.material.materials;
break;
case 'bones':
if ( ! targetObject.skeleton ) {
console.error( 'THREE.PropertyBinding: Can not bind to bones as node does not have a skeleton.', this );
return;
}
// potential future optimization: skip this if propertyIndex is already an integer
// and convert the integer string to a true integer.
targetObject = targetObject.skeleton.bones;
// support resolving morphTarget names into indices.
for ( let i = 0; i < targetObject.length; i ++ ) {
if ( targetObject[ i ].name === objectIndex ) {
objectIndex = i;
break;
}
}
break;
default:
if ( targetObject[ objectName ] === undefined ) {
console.error( 'THREE.PropertyBinding: Can not bind to objectName of node undefined.', this );
return;
}
targetObject = targetObject[ objectName ];
}
if ( objectIndex !== undefined ) {
if ( targetObject[ objectIndex ] === undefined ) {
console.error( 'THREE.PropertyBinding: Trying to bind to objectIndex of objectName, but is undefined.', this, targetObject );
return;
}
targetObject = targetObject[ objectIndex ];
}
}
// resolve property
const nodeProperty = targetObject[ propertyName ];
if ( nodeProperty === undefined ) {
const nodeName = parsedPath.nodeName;
console.error( 'THREE.PropertyBinding: Trying to update property for track: ' + nodeName +
'.' + propertyName + ' but it wasn\'t found.', targetObject );
return;
}
// determine versioning scheme
let versioning = this.Versioning.None;
this.targetObject = targetObject;
if ( targetObject.needsUpdate !== undefined ) { // material
versioning = this.Versioning.NeedsUpdate;
} else if ( targetObject.matrixWorldNeedsUpdate !== undefined ) { // node transform
versioning = this.Versioning.MatrixWorldNeedsUpdate;
}
// determine how the property gets bound
let bindingType = this.BindingType.Direct;
if ( propertyIndex !== undefined ) {
// access a sub element of the property array (only primitives are supported right now)
if ( propertyName === 'morphTargetInfluences' ) {
// potential optimization, skip this if propertyIndex is already an integer, and convert the integer string to a true integer.
// support resolving morphTarget names into indices.
if ( ! targetObject.geometry ) {
console.error( 'THREE.PropertyBinding: Can not bind to morphTargetInfluences because node does not have a geometry.', this );
return;
}
if ( targetObject.geometry.isBufferGeometry ) {
if ( ! targetObject.geometry.morphAttributes ) {
console.error( 'THREE.PropertyBinding: Can not bind to morphTargetInfluences because node does not have a geometry.morphAttributes.', this );
return;
}
if ( targetObject.morphTargetDictionary[ propertyIndex ] !== undefined ) {
propertyIndex = targetObject.morphTargetDictionary[ propertyIndex ];
}
} else {
console.error( 'THREE.PropertyBinding: Can not bind to morphTargetInfluences on THREE.Geometry. Use THREE.BufferGeometry instead.', this );
return;
}
}
bindingType = this.BindingType.ArrayElement;
this.resolvedProperty = nodeProperty;
this.propertyIndex = propertyIndex;
} else if ( nodeProperty.fromArray !== undefined && nodeProperty.toArray !== undefined ) {
// must use copy for Object3D.Euler/Quaternion
bindingType = this.BindingType.HasFromToArray;
this.resolvedProperty = nodeProperty;
} else if ( Array.isArray( nodeProperty ) ) {
bindingType = this.BindingType.EntireArray;
this.resolvedProperty = nodeProperty;
} else {
this.propertyName = propertyName;
}
// select getter / setter
this.getValue = this.GetterByBindingType[ bindingType ];
this.setValue = this.SetterByBindingTypeAndVersioning[ bindingType ][ versioning ];
}
unbind() {
this.node = null;
// back to the prototype version of getValue / setValue
// note: avoiding to mutate the shape of 'this' via 'delete'
this.getValue = this._getValue_unbound;
this.setValue = this._setValue_unbound;
}
}
PropertyBinding.Composite = Composite;
PropertyBinding.prototype.BindingType = {
Direct: 0,
EntireArray: 1,
ArrayElement: 2,
HasFromToArray: 3
};
PropertyBinding.prototype.Versioning = {
None: 0,
NeedsUpdate: 1,
MatrixWorldNeedsUpdate: 2
};
PropertyBinding.prototype.GetterByBindingType = [
PropertyBinding.prototype._getValue_direct,
PropertyBinding.prototype._getValue_array,
PropertyBinding.prototype._getValue_arrayElement,
PropertyBinding.prototype._getValue_toArray,
];
PropertyBinding.prototype.SetterByBindingTypeAndVersioning = [
[
// Direct
PropertyBinding.prototype._setValue_direct,
PropertyBinding.prototype._setValue_direct_setNeedsUpdate,
PropertyBinding.prototype._setValue_direct_setMatrixWorldNeedsUpdate,
], [
// EntireArray
PropertyBinding.prototype._setValue_array,
PropertyBinding.prototype._setValue_array_setNeedsUpdate,
PropertyBinding.prototype._setValue_array_setMatrixWorldNeedsUpdate,
], [
// ArrayElement
PropertyBinding.prototype._setValue_arrayElement,
PropertyBinding.prototype._setValue_arrayElement_setNeedsUpdate,
PropertyBinding.prototype._setValue_arrayElement_setMatrixWorldNeedsUpdate,
], [
// HasToFromArray
PropertyBinding.prototype._setValue_fromArray,
PropertyBinding.prototype._setValue_fromArray_setNeedsUpdate,
PropertyBinding.prototype._setValue_fromArray_setMatrixWorldNeedsUpdate,
]
];
class AnimationAction {
constructor( mixer, clip, localRoot = null, blendMode = clip.blendMode ) {
this._mixer = mixer;
this._clip = clip;
this._localRoot = localRoot;
this.blendMode = blendMode;
const tracks = clip.tracks,
nTracks = tracks.length,
interpolants = new Array( nTracks );
const interpolantSettings = {
endingStart: ZeroCurvatureEnding,
endingEnd: ZeroCurvatureEnding
};
for ( let i = 0; i !== nTracks; ++ i ) {
const interpolant = tracks[ i ].createInterpolant( null );
interpolants[ i ] = interpolant;
interpolant.settings = interpolantSettings;
}
this._interpolantSettings = interpolantSettings;
this._interpolants = interpolants; // bound by the mixer
// inside: PropertyMixer (managed by the mixer)
this._propertyBindings = new Array( nTracks );
this._cacheIndex = null; // for the memory manager
this._byClipCacheIndex = null; // for the memory manager
this._timeScaleInterpolant = null;
this._weightInterpolant = null;
this.loop = LoopRepeat;
this._loopCount = - 1;
// global mixer time when the action is to be started
// it's set back to 'null' upon start of the action
this._startTime = null;
// scaled local time of the action
// gets clamped or wrapped to 0..clip.duration according to loop
this.time = 0;
this.timeScale = 1;
this._effectiveTimeScale = 1;
this.weight = 1;
this._effectiveWeight = 1;
this.repetitions = Infinity; // no. of repetitions when looping
this.paused = false; // true -> zero effective time scale
this.enabled = true; // false -> zero effective weight
this.clampWhenFinished = false;// keep feeding the last frame?
this.zeroSlopeAtStart = true;// for smooth interpolation w/o separate
this.zeroSlopeAtEnd = true;// clips for start, loop and end
}
// State & Scheduling
play() {
this._mixer._activateAction( this );
return this;
}
stop() {
this._mixer._deactivateAction( this );
return this.reset();
}
reset() {
this.paused = false;
this.enabled = true;
this.time = 0; // restart clip
this._loopCount = - 1;// forget previous loops
this._startTime = null;// forget scheduling
return this.stopFading().stopWarping();
}
isRunning() {
return this.enabled && ! this.paused && this.timeScale !== 0 &&
this._startTime === null && this._mixer._isActiveAction( this );
}
// return true when play has been called
isScheduled() {
return this._mixer._isActiveAction( this );
}
startAt( time ) {
this._startTime = time;
return this;
}
setLoop( mode, repetitions ) {
this.loop = mode;
this.repetitions = repetitions;
return this;
}
// Weight
// set the weight stopping any scheduled fading
// although .enabled = false yields an effective weight of zero, this
// method does *not* change .enabled, because it would be confusing
setEffectiveWeight( weight ) {
this.weight = weight;
// note: same logic as when updated at runtime
this._effectiveWeight = this.enabled ? weight : 0;
return this.stopFading();
}
// return the weight considering fading and .enabled
getEffectiveWeight() {
return this._effectiveWeight;
}
fadeIn( duration ) {
return this._scheduleFading( duration, 0, 1 );
}
fadeOut( duration ) {
return this._scheduleFading( duration, 1, 0 );
}
crossFadeFrom( fadeOutAction, duration, warp ) {
fadeOutAction.fadeOut( duration );
this.fadeIn( duration );
if ( warp ) {
const fadeInDuration = this._clip.duration,
fadeOutDuration = fadeOutAction._clip.duration,
startEndRatio = fadeOutDuration / fadeInDuration,
endStartRatio = fadeInDuration / fadeOutDuration;
fadeOutAction.warp( 1.0, startEndRatio, duration );
this.warp( endStartRatio, 1.0, duration );
}
return this;
}
crossFadeTo( fadeInAction, duration, warp ) {
return fadeInAction.crossFadeFrom( this, duration, warp );
}
stopFading() {
const weightInterpolant = this._weightInterpolant;
if ( weightInterpolant !== null ) {
this._weightInterpolant = null;
this._mixer._takeBackControlInterpolant( weightInterpolant );
}
return this;
}
// Time Scale Control
// set the time scale stopping any scheduled warping
// although .paused = true yields an effective time scale of zero, this
// method does *not* change .paused, because it would be confusing
setEffectiveTimeScale( timeScale ) {
this.timeScale = timeScale;
this._effectiveTimeScale = this.paused ? 0 : timeScale;
return this.stopWarping();
}
// return the time scale considering warping and .paused
getEffectiveTimeScale() {
return this._effectiveTimeScale;
}
setDuration( duration ) {
this.timeScale = this._clip.duration / duration;
return this.stopWarping();
}
syncWith( action ) {
this.time = action.time;
this.timeScale = action.timeScale;
return this.stopWarping();
}
halt( duration ) {
return this.warp( this._effectiveTimeScale, 0, duration );
}
warp( startTimeScale, endTimeScale, duration ) {
const mixer = this._mixer,
now = mixer.time,
timeScale = this.timeScale;
let interpolant = this._timeScaleInterpolant;
if ( interpolant === null ) {
interpolant = mixer._lendControlInterpolant();
this._timeScaleInterpolant = interpolant;
}
const times = interpolant.parameterPositions,
values = interpolant.sampleValues;
times[ 0 ] = now;
times[ 1 ] = now + duration;
values[ 0 ] = startTimeScale / timeScale;
values[ 1 ] = endTimeScale / timeScale;
return this;
}
stopWarping() {
const timeScaleInterpolant = this._timeScaleInterpolant;
if ( timeScaleInterpolant !== null ) {
this._timeScaleInterpolant = null;
this._mixer._takeBackControlInterpolant( timeScaleInterpolant );
}
return this;
}
// Object Accessors
getMixer() {
return this._mixer;
}
getClip() {
return this._clip;
}
getRoot() {
return this._localRoot || this._mixer._root;
}
// Interna
_update( time, deltaTime, timeDirection, accuIndex ) {
// called by the mixer
if ( ! this.enabled ) {
// call ._updateWeight() to update ._effectiveWeight
this._updateWeight( time );
return;
}
const startTime = this._startTime;
if ( startTime !== null ) {
// check for scheduled start of action
const timeRunning = ( time - startTime ) * timeDirection;
if ( timeRunning < 0 || timeDirection === 0 ) {
return; // yet to come / don't decide when delta = 0
}
// start
this._startTime = null; // unschedule
deltaTime = timeDirection * timeRunning;
}
// apply time scale and advance time
deltaTime *= this._updateTimeScale( time );
const clipTime = this._updateTime( deltaTime );
// note: _updateTime may disable the action resulting in
// an effective weight of 0
const weight = this._updateWeight( time );
if ( weight > 0 ) {
const interpolants = this._interpolants;
const propertyMixers = this._propertyBindings;
switch ( this.blendMode ) {
case AdditiveAnimationBlendMode:
for ( let j = 0, m = interpolants.length; j !== m; ++ j ) {
interpolants[ j ].evaluate( clipTime );
propertyMixers[ j ].accumulateAdditive( weight );
}
break;
case NormalAnimationBlendMode:
default:
for ( let j = 0, m = interpolants.length; j !== m; ++ j ) {
interpolants[ j ].evaluate( clipTime );
propertyMixers[ j ].accumulate( accuIndex, weight );
}
}
}
}
_updateWeight( time ) {
let weight = 0;
if ( this.enabled ) {
weight = this.weight;
const interpolant = this._weightInterpolant;
if ( interpolant !== null ) {
const interpolantValue = interpolant.evaluate( time )[ 0 ];
weight *= interpolantValue;
if ( time > interpolant.parameterPositions[ 1 ] ) {
this.stopFading();
if ( interpolantValue === 0 ) {
// faded out, disable
this.enabled = false;
}
}
}
}
this._effectiveWeight = weight;
return weight;
}
_updateTimeScale( time ) {
let timeScale = 0;
if ( ! this.paused ) {
timeScale = this.timeScale;
const interpolant = this._timeScaleInterpolant;
if ( interpolant !== null ) {
const interpolantValue = interpolant.evaluate( time )[ 0 ];
timeScale *= interpolantValue;
if ( time > interpolant.parameterPositions[ 1 ] ) {
this.stopWarping();
if ( timeScale === 0 ) {
// motion has halted, pause
this.paused = true;
} else {
// warp done - apply final time scale
this.timeScale = timeScale;
}
}
}
}
this._effectiveTimeScale = timeScale;
return timeScale;
}
_updateTime( deltaTime ) {
const duration = this._clip.duration;
const loop = this.loop;
let time = this.time + deltaTime;
let loopCount = this._loopCount;
const pingPong = ( loop === LoopPingPong );
if ( deltaTime === 0 ) {
if ( loopCount === - 1 ) return time;
return ( pingPong && ( loopCount & 1 ) === 1 ) ? duration - time : time;
}
if ( loop === LoopOnce ) {
if ( loopCount === - 1 ) {
// just started
this._loopCount = 0;
this._setEndings( true, true, false );
}
handle_stop: {
if ( time >= duration ) {
time = duration;
} else if ( time < 0 ) {
time = 0;
} else {
this.time = time;
break handle_stop;
}
if ( this.clampWhenFinished ) this.paused = true;
else this.enabled = false;
this.time = time;
this._mixer.dispatchEvent( {
type: 'finished', action: this,
direction: deltaTime < 0 ? - 1 : 1
} );
}
} else { // repetitive Repeat or PingPong
if ( loopCount === - 1 ) {
// just started
if ( deltaTime >= 0 ) {
loopCount = 0;
this._setEndings( true, this.repetitions === 0, pingPong );
} else {
// when looping in reverse direction, the initial
// transition through zero counts as a repetition,
// so leave loopCount at -1
this._setEndings( this.repetitions === 0, true, pingPong );
}
}
if ( time >= duration || time < 0 ) {
// wrap around
const loopDelta = Math.floor( time / duration ); // signed
time -= duration * loopDelta;
loopCount += Math.abs( loopDelta );
const pending = this.repetitions - loopCount;
if ( pending <= 0 ) {
// have to stop (switch state, clamp time, fire event)
if ( this.clampWhenFinished ) this.paused = true;
else this.enabled = false;
time = deltaTime > 0 ? duration : 0;
this.time = time;
this._mixer.dispatchEvent( {
type: 'finished', action: this,
direction: deltaTime > 0 ? 1 : - 1
} );
} else {
// keep running
if ( pending === 1 ) {
// entering the last round
const atStart = deltaTime < 0;
this._setEndings( atStart, ! atStart, pingPong );
} else {
this._setEndings( false, false, pingPong );
}
this._loopCount = loopCount;
this.time = time;
this._mixer.dispatchEvent( {
type: 'loop', action: this, loopDelta: loopDelta
} );
}
} else {
this.time = time;
}
if ( pingPong && ( loopCount & 1 ) === 1 ) {
// invert time for the "pong round"
return duration - time;
}
}
return time;
}
_setEndings( atStart, atEnd, pingPong ) {
const settings = this._interpolantSettings;
if ( pingPong ) {
settings.endingStart = ZeroSlopeEnding;
settings.endingEnd = ZeroSlopeEnding;
} else {
// assuming for LoopOnce atStart == atEnd == true
if ( atStart ) {
settings.endingStart = this.zeroSlopeAtStart ? ZeroSlopeEnding : ZeroCurvatureEnding;
} else {
settings.endingStart = WrapAroundEnding;
}
if ( atEnd ) {
settings.endingEnd = this.zeroSlopeAtEnd ? ZeroSlopeEnding : ZeroCurvatureEnding;
} else {
settings.endingEnd = WrapAroundEnding;
}
}
}
_scheduleFading( duration, weightNow, weightThen ) {
const mixer = this._mixer, now = mixer.time;
let interpolant = this._weightInterpolant;
if ( interpolant === null ) {
interpolant = mixer._lendControlInterpolant();
this._weightInterpolant = interpolant;
}
const times = interpolant.parameterPositions,
values = interpolant.sampleValues;
times[ 0 ] = now;
values[ 0 ] = weightNow;
times[ 1 ] = now + duration;
values[ 1 ] = weightThen;
return this;
}
}
class AnimationMixer extends EventDispatcher {
constructor( root ) {
super();
this._root = root;
this._initMemoryManager();
this._accuIndex = 0;
this.time = 0;
this.timeScale = 1.0;
}
_bindAction( action, prototypeAction ) {
const root = action._localRoot || this._root,
tracks = action._clip.tracks,
nTracks = tracks.length,
bindings = action._propertyBindings,
interpolants = action._interpolants,
rootUuid = root.uuid,
bindingsByRoot = this._bindingsByRootAndName;
let bindingsByName = bindingsByRoot[ rootUuid ];
if ( bindingsByName === undefined ) {
bindingsByName = {};
bindingsByRoot[ rootUuid ] = bindingsByName;
}
for ( let i = 0; i !== nTracks; ++ i ) {
const track = tracks[ i ],
trackName = track.name;
let binding = bindingsByName[ trackName ];
if ( binding !== undefined ) {
bindings[ i ] = binding;
} else {
binding = bindings[ i ];
if ( binding !== undefined ) {
// existing binding, make sure the cache knows
if ( binding._cacheIndex === null ) {
++ binding.referenceCount;
this._addInactiveBinding( binding, rootUuid, trackName );
}
continue;
}
const path = prototypeAction && prototypeAction.
_propertyBindings[ i ].binding.parsedPath;
binding = new PropertyMixer(
PropertyBinding.create( root, trackName, path ),
track.ValueTypeName, track.getValueSize() );
++ binding.referenceCount;
this._addInactiveBinding( binding, rootUuid, trackName );
bindings[ i ] = binding;
}
interpolants[ i ].resultBuffer = binding.buffer;
}
}
_activateAction( action ) {
if ( ! this._isActiveAction( action ) ) {
if ( action._cacheIndex === null ) {
// this action has been forgotten by the cache, but the user
// appears to be still using it -> rebind
const rootUuid = ( action._localRoot || this._root ).uuid,
clipUuid = action._clip.uuid,
actionsForClip = this._actionsByClip[ clipUuid ];
this._bindAction( action,
actionsForClip && actionsForClip.knownActions[ 0 ] );
this._addInactiveAction( action, clipUuid, rootUuid );
}
const bindings = action._propertyBindings;
// increment reference counts / sort out state
for ( let i = 0, n = bindings.length; i !== n; ++ i ) {
const binding = bindings[ i ];
if ( binding.useCount ++ === 0 ) {
this._lendBinding( binding );
binding.saveOriginalState();
}
}
this._lendAction( action );
}
}
_deactivateAction( action ) {
if ( this._isActiveAction( action ) ) {
const bindings = action._propertyBindings;
// decrement reference counts / sort out state
for ( let i = 0, n = bindings.length; i !== n; ++ i ) {
const binding = bindings[ i ];
if ( -- binding.useCount === 0 ) {
binding.restoreOriginalState();
this._takeBackBinding( binding );
}
}
this._takeBackAction( action );
}
}
// Memory manager
_initMemoryManager() {
this._actions = []; // 'nActiveActions' followed by inactive ones
this._nActiveActions = 0;
this._actionsByClip = {};
// inside:
// {
// knownActions: Array< AnimationAction > - used as prototypes
// actionByRoot: AnimationAction - lookup
// }
this._bindings = []; // 'nActiveBindings' followed by inactive ones
this._nActiveBindings = 0;
this._bindingsByRootAndName = {}; // inside: Map< name, PropertyMixer >
this._controlInterpolants = []; // same game as above
this._nActiveControlInterpolants = 0;
const scope = this;
this.stats = {
actions: {
get total() {
return scope._actions.length;
},
get inUse() {
return scope._nActiveActions;
}
},
bindings: {
get total() {
return scope._bindings.length;
},
get inUse() {
return scope._nActiveBindings;
}
},
controlInterpolants: {
get total() {
return scope._controlInterpolants.length;
},
get inUse() {
return scope._nActiveControlInterpolants;
}
}
};
}
// Memory management for AnimationAction objects
_isActiveAction( action ) {
const index = action._cacheIndex;
return index !== null && index < this._nActiveActions;
}
_addInactiveAction( action, clipUuid, rootUuid ) {
const actions = this._actions,
actionsByClip = this._actionsByClip;
let actionsForClip = actionsByClip[ clipUuid ];
if ( actionsForClip === undefined ) {
actionsForClip = {
knownActions: [ action ],
actionByRoot: {}
};
action._byClipCacheIndex = 0;
actionsByClip[ clipUuid ] = actionsForClip;
} else {
const knownActions = actionsForClip.knownActions;
action._byClipCacheIndex = knownActions.length;
knownActions.push( action );
}
action._cacheIndex = actions.length;
actions.push( action );
actionsForClip.actionByRoot[ rootUuid ] = action;
}
_removeInactiveAction( action ) {
const actions = this._actions,
lastInactiveAction = actions[ actions.length - 1 ],
cacheIndex = action._cacheIndex;
lastInactiveAction._cacheIndex = cacheIndex;
actions[ cacheIndex ] = lastInactiveAction;
actions.pop();
action._cacheIndex = null;
const clipUuid = action._clip.uuid,
actionsByClip = this._actionsByClip,
actionsForClip = actionsByClip[ clipUuid ],
knownActionsForClip = actionsForClip.knownActions,
lastKnownAction =
knownActionsForClip[ knownActionsForClip.length - 1 ],
byClipCacheIndex = action._byClipCacheIndex;
lastKnownAction._byClipCacheIndex = byClipCacheIndex;
knownActionsForClip[ byClipCacheIndex ] = lastKnownAction;
knownActionsForClip.pop();
action._byClipCacheIndex = null;
const actionByRoot = actionsForClip.actionByRoot,
rootUuid = ( action._localRoot || this._root ).uuid;
delete actionByRoot[ rootUuid ];
if ( knownActionsForClip.length === 0 ) {
delete actionsByClip[ clipUuid ];
}
this._removeInactiveBindingsForAction( action );
}
_removeInactiveBindingsForAction( action ) {
const bindings = action._propertyBindings;
for ( let i = 0, n = bindings.length; i !== n; ++ i ) {
const binding = bindings[ i ];
if ( -- binding.referenceCount === 0 ) {
this._removeInactiveBinding( binding );
}
}
}
_lendAction( action ) {
// [ active actions | inactive actions ]
// [ active actions >| inactive actions ]
// s a
// <-swap->
// a s
const actions = this._actions,
prevIndex = action._cacheIndex,
lastActiveIndex = this._nActiveActions ++,
firstInactiveAction = actions[ lastActiveIndex ];
action._cacheIndex = lastActiveIndex;
actions[ lastActiveIndex ] = action;
firstInactiveAction._cacheIndex = prevIndex;
actions[ prevIndex ] = firstInactiveAction;
}
_takeBackAction( action ) {
// [ active actions | inactive actions ]
// [ active actions |< inactive actions ]
// a s
// <-swap->
// s a
const actions = this._actions,
prevIndex = action._cacheIndex,
firstInactiveIndex = -- this._nActiveActions,
lastActiveAction = actions[ firstInactiveIndex ];
action._cacheIndex = firstInactiveIndex;
actions[ firstInactiveIndex ] = action;
lastActiveAction._cacheIndex = prevIndex;
actions[ prevIndex ] = lastActiveAction;
}
// Memory management for PropertyMixer objects
_addInactiveBinding( binding, rootUuid, trackName ) {
const bindingsByRoot = this._bindingsByRootAndName,
bindings = this._bindings;
let bindingByName = bindingsByRoot[ rootUuid ];
if ( bindingByName === undefined ) {
bindingByName = {};
bindingsByRoot[ rootUuid ] = bindingByName;
}
bindingByName[ trackName ] = binding;
binding._cacheIndex = bindings.length;
bindings.push( binding );
}
_removeInactiveBinding( binding ) {
const bindings = this._bindings,
propBinding = binding.binding,
rootUuid = propBinding.rootNode.uuid,
trackName = propBinding.path,
bindingsByRoot = this._bindingsByRootAndName,
bindingByName = bindingsByRoot[ rootUuid ],
lastInactiveBinding = bindings[ bindings.length - 1 ],
cacheIndex = binding._cacheIndex;
lastInactiveBinding._cacheIndex = cacheIndex;
bindings[ cacheIndex ] = lastInactiveBinding;
bindings.pop();
delete bindingByName[ trackName ];
if ( Object.keys( bindingByName ).length === 0 ) {
delete bindingsByRoot[ rootUuid ];
}
}
_lendBinding( binding ) {
const bindings = this._bindings,
prevIndex = binding._cacheIndex,
lastActiveIndex = this._nActiveBindings ++,
firstInactiveBinding = bindings[ lastActiveIndex ];
binding._cacheIndex = lastActiveIndex;
bindings[ lastActiveIndex ] = binding;
firstInactiveBinding._cacheIndex = prevIndex;
bindings[ prevIndex ] = firstInactiveBinding;
}
_takeBackBinding( binding ) {
const bindings = this._bindings,
prevIndex = binding._cacheIndex,
firstInactiveIndex = -- this._nActiveBindings,
lastActiveBinding = bindings[ firstInactiveIndex ];
binding._cacheIndex = firstInactiveIndex;
bindings[ firstInactiveIndex ] = binding;
lastActiveBinding._cacheIndex = prevIndex;
bindings[ prevIndex ] = lastActiveBinding;
}
// Memory management of Interpolants for weight and time scale
_lendControlInterpolant() {
const interpolants = this._controlInterpolants,
lastActiveIndex = this._nActiveControlInterpolants ++;
let interpolant = interpolants[ lastActiveIndex ];
if ( interpolant === undefined ) {
interpolant = new LinearInterpolant(
new Float32Array( 2 ), new Float32Array( 2 ),
1, this._controlInterpolantsResultBuffer );
interpolant.__cacheIndex = lastActiveIndex;
interpolants[ lastActiveIndex ] = interpolant;
}
return interpolant;
}
_takeBackControlInterpolant( interpolant ) {
const interpolants = this._controlInterpolants,
prevIndex = interpolant.__cacheIndex,
firstInactiveIndex = -- this._nActiveControlInterpolants,
lastActiveInterpolant = interpolants[ firstInactiveIndex ];
interpolant.__cacheIndex = firstInactiveIndex;
interpolants[ firstInactiveIndex ] = interpolant;
lastActiveInterpolant.__cacheIndex = prevIndex;
interpolants[ prevIndex ] = lastActiveInterpolant;
}
// return an action for a clip optionally using a custom root target
// object (this method allocates a lot of dynamic memory in case a
// previously unknown clip/root combination is specified)
clipAction( clip, optionalRoot, blendMode ) {
const root = optionalRoot || this._root,
rootUuid = root.uuid;
let clipObject = typeof clip === 'string' ? AnimationClip.findByName( root, clip ) : clip;
const clipUuid = clipObject !== null ? clipObject.uuid : clip;
const actionsForClip = this._actionsByClip[ clipUuid ];
let prototypeAction = null;
if ( blendMode === undefined ) {
if ( clipObject !== null ) {
blendMode = clipObject.blendMode;
} else {
blendMode = NormalAnimationBlendMode;
}
}
if ( actionsForClip !== undefined ) {
const existingAction = actionsForClip.actionByRoot[ rootUuid ];
if ( existingAction !== undefined && existingAction.blendMode === blendMode ) {
return existingAction;
}
// we know the clip, so we don't have to parse all
// the bindings again but can just copy
prototypeAction = actionsForClip.knownActions[ 0 ];
// also, take the clip from the prototype action
if ( clipObject === null )
clipObject = prototypeAction._clip;
}
// clip must be known when specified via string
if ( clipObject === null ) return null;
// allocate all resources required to run it
const newAction = new AnimationAction( this, clipObject, optionalRoot, blendMode );
this._bindAction( newAction, prototypeAction );
// and make the action known to the memory manager
this._addInactiveAction( newAction, clipUuid, rootUuid );
return newAction;
}
// get an existing action
existingAction( clip, optionalRoot ) {
const root = optionalRoot || this._root,
rootUuid = root.uuid,
clipObject = typeof clip === 'string' ?
AnimationClip.findByName( root, clip ) : clip,
clipUuid = clipObject ? clipObject.uuid : clip,
actionsForClip = this._actionsByClip[ clipUuid ];
if ( actionsForClip !== undefined ) {
return actionsForClip.actionByRoot[ rootUuid ] || null;
}
return null;
}
// deactivates all previously scheduled actions
stopAllAction() {
const actions = this._actions,
nActions = this._nActiveActions;
for ( let i = nActions - 1; i >= 0; -- i ) {
actions[ i ].stop();
}
return this;
}
// advance the time and update apply the animation
update( deltaTime ) {
deltaTime *= this.timeScale;
const actions = this._actions,
nActions = this._nActiveActions,
time = this.time += deltaTime,
timeDirection = Math.sign( deltaTime ),
accuIndex = this._accuIndex ^= 1;
// run active actions
for ( let i = 0; i !== nActions; ++ i ) {
const action = actions[ i ];
action._update( time, deltaTime, timeDirection, accuIndex );
}
// update scene graph
const bindings = this._bindings,
nBindings = this._nActiveBindings;
for ( let i = 0; i !== nBindings; ++ i ) {
bindings[ i ].apply( accuIndex );
}
return this;
}
// Allows you to seek to a specific time in an animation.
setTime( timeInSeconds ) {
this.time = 0; // Zero out time attribute for AnimationMixer object;
for ( let i = 0; i < this._actions.length; i ++ ) {
this._actions[ i ].time = 0; // Zero out time attribute for all associated AnimationAction objects.
}
return this.update( timeInSeconds ); // Update used to set exact time. Returns "this" AnimationMixer object.
}
// return this mixer's root target object
getRoot() {
return this._root;
}
// free all resources specific to a particular clip
uncacheClip( clip ) {
const actions = this._actions,
clipUuid = clip.uuid,
actionsByClip = this._actionsByClip,
actionsForClip = actionsByClip[ clipUuid ];
if ( actionsForClip !== undefined ) {
// note: just calling _removeInactiveAction would mess up the
// iteration state and also require updating the state we can
// just throw away
const actionsToRemove = actionsForClip.knownActions;
for ( let i = 0, n = actionsToRemove.length; i !== n; ++ i ) {
const action = actionsToRemove[ i ];
this._deactivateAction( action );
const cacheIndex = action._cacheIndex,
lastInactiveAction = actions[ actions.length - 1 ];
action._cacheIndex = null;
action._byClipCacheIndex = null;
lastInactiveAction._cacheIndex = cacheIndex;
actions[ cacheIndex ] = lastInactiveAction;
actions.pop();
this._removeInactiveBindingsForAction( action );
}
delete actionsByClip[ clipUuid ];
}
}
// free all resources specific to a particular root target object
uncacheRoot( root ) {
const rootUuid = root.uuid,
actionsByClip = this._actionsByClip;
for ( const clipUuid in actionsByClip ) {
const actionByRoot = actionsByClip[ clipUuid ].actionByRoot,
action = actionByRoot[ rootUuid ];
if ( action !== undefined ) {
this._deactivateAction( action );
this._removeInactiveAction( action );
}
}
const bindingsByRoot = this._bindingsByRootAndName,
bindingByName = bindingsByRoot[ rootUuid ];
if ( bindingByName !== undefined ) {
for ( const trackName in bindingByName ) {
const binding = bindingByName[ trackName ];
binding.restoreOriginalState();
this._removeInactiveBinding( binding );
}
}
}
// remove a targeted clip from the cache
uncacheAction( clip, optionalRoot ) {
const action = this.existingAction( clip, optionalRoot );
if ( action !== null ) {
this._deactivateAction( action );
this._removeInactiveAction( action );
}
}
}
AnimationMixer.prototype._controlInterpolantsResultBuffer = new Float32Array( 1 );
class Raycaster {
constructor( origin, direction, near = 0, far = Infinity ) {
this.ray = new Ray( origin, direction );
// direction is assumed to be normalized (for accurate distance calculations)
this.near = near;
this.far = far;
this.camera = null;
this.layers = new Layers();
this.params = {
Mesh: {},
Line: { threshold: 1 },
LOD: {},
Points: { threshold: 1 },
Sprite: {}
};
}
set( origin, direction ) {
// direction is assumed to be normalized (for accurate distance calculations)
this.ray.set( origin, direction );
}
setFromCamera( coords, camera ) {
if ( camera && camera.isPerspectiveCamera ) {
this.ray.origin.setFromMatrixPosition( camera.matrixWorld );
this.ray.direction.set( coords.x, coords.y, 0.5 ).unproject( camera ).sub( this.ray.origin ).normalize();
this.camera = camera;
} else if ( camera && camera.isOrthographicCamera ) {
this.ray.origin.set( coords.x, coords.y, ( camera.near + camera.far ) / ( camera.near - camera.far ) ).unproject( camera ); // set origin in plane of camera
this.ray.direction.set( 0, 0, - 1 ).transformDirection( camera.matrixWorld );
this.camera = camera;
} else {
console.error( 'THREE.Raycaster: Unsupported camera type: ' + camera.type );
}
}
intersectObject( object, recursive = false, intersects = [] ) {
intersectObject( object, this, intersects, recursive );
intersects.sort( ascSort );
return intersects;
}
intersectObjects( objects, recursive = false, intersects = [] ) {
for ( let i = 0, l = objects.length; i < l; i ++ ) {
intersectObject( objects[ i ], this, intersects, recursive );
}
intersects.sort( ascSort );
return intersects;
}
}
function ascSort( a, b ) {
return a.distance - b.distance;
}
function intersectObject( object, raycaster, intersects, recursive ) {
if ( object.layers.test( raycaster.layers ) ) {
object.raycast( raycaster, intersects );
}
if ( recursive === true ) {
const children = object.children;
for ( let i = 0, l = children.length; i < l; i ++ ) {
intersectObject( children[ i ], raycaster, intersects, true );
}
}
}
/**
* Ref: https://en.wikipedia.org/wiki/Spherical_coordinate_system
*
* The polar angle (phi) is measured from the positive y-axis. The positive y-axis is up.
* The azimuthal angle (theta) is measured from the positive z-axis.
*/
class Spherical {
constructor( radius = 1, phi = 0, theta = 0 ) {
this.radius = radius;
this.phi = phi; // polar angle
this.theta = theta; // azimuthal angle
return this;
}
set( radius, phi, theta ) {
this.radius = radius;
this.phi = phi;
this.theta = theta;
return this;
}
copy( other ) {
this.radius = other.radius;
this.phi = other.phi;
this.theta = other.theta;
return this;
}
// restrict phi to be betwee EPS and PI-EPS
makeSafe() {
const EPS = 0.000001;
this.phi = Math.max( EPS, Math.min( Math.PI - EPS, this.phi ) );
return this;
}
setFromVector3( v ) {
return this.setFromCartesianCoords( v.x, v.y, v.z );
}
setFromCartesianCoords( x, y, z ) {
this.radius = Math.sqrt( x * x + y * y + z * z );
if ( this.radius === 0 ) {
this.theta = 0;
this.phi = 0;
} else {
this.theta = Math.atan2( x, z );
this.phi = Math.acos( clamp$1( y / this.radius, - 1, 1 ) );
}
return this;
}
clone() {
return new this.constructor().copy( this );
}
}
const _vector$2 = /*@__PURE__*/ new Vector3();
const _boneMatrix = /*@__PURE__*/ new Matrix4();
const _matrixWorldInv = /*@__PURE__*/ new Matrix4();
class SkeletonHelper extends LineSegments {
constructor( object ) {
const bones = getBoneList( object );
const geometry = new BufferGeometry();
const vertices = [];
const colors = [];
const color1 = new Color( 0, 0, 1 );
const color2 = new Color( 0, 1, 0 );
for ( let i = 0; i < bones.length; i ++ ) {
const bone = bones[ i ];
if ( bone.parent && bone.parent.isBone ) {
vertices.push( 0, 0, 0 );
vertices.push( 0, 0, 0 );
colors.push( color1.r, color1.g, color1.b );
colors.push( color2.r, color2.g, color2.b );
}
}
geometry.setAttribute( 'position', new Float32BufferAttribute( vertices, 3 ) );
geometry.setAttribute( 'color', new Float32BufferAttribute( colors, 3 ) );
const material = new LineBasicMaterial( { vertexColors: true, depthTest: false, depthWrite: false, toneMapped: false, transparent: true } );
super( geometry, material );
this.type = 'SkeletonHelper';
this.isSkeletonHelper = true;
this.root = object;
this.bones = bones;
this.matrix = object.matrixWorld;
this.matrixAutoUpdate = false;
}
updateMatrixWorld( force ) {
const bones = this.bones;
const geometry = this.geometry;
const position = geometry.getAttribute( 'position' );
_matrixWorldInv.copy( this.root.matrixWorld ).invert();
for ( let i = 0, j = 0; i < bones.length; i ++ ) {
const bone = bones[ i ];
if ( bone.parent && bone.parent.isBone ) {
_boneMatrix.multiplyMatrices( _matrixWorldInv, bone.matrixWorld );
_vector$2.setFromMatrixPosition( _boneMatrix );
position.setXYZ( j, _vector$2.x, _vector$2.y, _vector$2.z );
_boneMatrix.multiplyMatrices( _matrixWorldInv, bone.parent.matrixWorld );
_vector$2.setFromMatrixPosition( _boneMatrix );
position.setXYZ( j + 1, _vector$2.x, _vector$2.y, _vector$2.z );
j += 2;
}
}
geometry.getAttribute( 'position' ).needsUpdate = true;
super.updateMatrixWorld( force );
}
}
function getBoneList( object ) {
const boneList = [];
if ( object && object.isBone ) {
boneList.push( object );
}
for ( let i = 0; i < object.children.length; i ++ ) {
boneList.push.apply( boneList, getBoneList( object.children[ i ] ) );
}
return boneList;
}
const _floatView = new Float32Array( 1 );
const _int32View = new Int32Array( _floatView.buffer );
class DataUtils {
// Converts float32 to float16 (stored as uint16 value).
static toHalfFloat( val ) {
// Source: http://gamedev.stackexchange.com/questions/17326/conversion-of-a-number-from-single-precision-floating-point-representation-to-a/17410#17410
/* This method is faster than the OpenEXR implementation (very often
* used, eg. in Ogre), with the additional benefit of rounding, inspired
* by James Tursa?s half-precision code. */
_floatView[ 0 ] = val;
const x = _int32View[ 0 ];
let bits = ( x >> 16 ) & 0x8000; /* Get the sign */
let m = ( x >> 12 ) & 0x07ff; /* Keep one extra bit for rounding */
const e = ( x >> 23 ) & 0xff; /* Using int is faster here */
/* If zero, or denormal, or exponent underflows too much for a denormal
* half, return signed zero. */
if ( e < 103 ) return bits;
/* If NaN, return NaN. If Inf or exponent overflow, return Inf. */
if ( e > 142 ) {
bits |= 0x7c00;
/* If exponent was 0xff and one mantissa bit was set, it means NaN,
* not Inf, so make sure we set one mantissa bit too. */
bits |= ( ( e == 255 ) ? 0 : 1 ) && ( x & 0x007fffff );
return bits;
}
/* If exponent underflows but not too much, return a denormal */
if ( e < 113 ) {
m |= 0x0800;
/* Extra rounding may overflow and set mantissa to 0 and exponent
* to 1, which is OK. */
bits |= ( m >> ( 114 - e ) ) + ( ( m >> ( 113 - e ) ) & 1 );
return bits;
}
bits |= ( ( e - 112 ) << 10 ) | ( m >> 1 );
/* Extra rounding. An overflow will set mantissa to 0 and increment
* the exponent, which is OK. */
bits += m & 1;
return bits;
}
}
const LOD_MIN = 4;
const LOD_MAX = 8;
const SIZE_MAX = Math.pow( 2, LOD_MAX );
// The standard deviations (radians) associated with the extra mips. These are
// chosen to approximate a Trowbridge-Reitz distribution function times the
// geometric shadowing function. These sigma values squared must match the
// variance #defines in cube_uv_reflection_fragment.glsl.js.
const EXTRA_LOD_SIGMA = [ 0.125, 0.215, 0.35, 0.446, 0.526, 0.582 ];
const TOTAL_LODS = LOD_MAX - LOD_MIN + 1 + EXTRA_LOD_SIGMA.length;
// The maximum length of the blur for loop. Smaller sigmas will use fewer
// samples and exit early, but not recompile the shader.
const MAX_SAMPLES = 20;
const ENCODINGS = {
[ LinearEncoding ]: 0,
[ sRGBEncoding ]: 1,
[ RGBEEncoding ]: 2,
[ RGBM7Encoding ]: 3,
[ RGBM16Encoding ]: 4,
[ RGBDEncoding ]: 5,
[ GammaEncoding ]: 6
};
const backgroundMaterial = new MeshBasicMaterial( {
side: BackSide,
depthWrite: false,
depthTest: false,
} );
const backgroundBox = new Mesh( new BoxGeometry(), backgroundMaterial );
const _flatCamera$1 = /*@__PURE__*/ new OrthographicCamera();
const { _lodPlanes, _sizeLods, _sigmas } = /*@__PURE__*/ _createPlanes();
const _clearColor = /*@__PURE__*/ new Color();
let _oldTarget = null;
// Golden Ratio
const PHI = ( 1 + Math.sqrt( 5 ) ) / 2;
const INV_PHI = 1 / PHI;
// Vertices of a dodecahedron (except the opposites, which represent the
// same axis), used as axis directions evenly spread on a sphere.
const _axisDirections = [
/*@__PURE__*/ new Vector3( 1, 1, 1 ),
/*@__PURE__*/ new Vector3( - 1, 1, 1 ),
/*@__PURE__*/ new Vector3( 1, 1, - 1 ),
/*@__PURE__*/ new Vector3( - 1, 1, - 1 ),
/*@__PURE__*/ new Vector3( 0, PHI, INV_PHI ),
/*@__PURE__*/ new Vector3( 0, PHI, - INV_PHI ),
/*@__PURE__*/ new Vector3( INV_PHI, 0, PHI ),
/*@__PURE__*/ new Vector3( - INV_PHI, 0, PHI ),
/*@__PURE__*/ new Vector3( PHI, INV_PHI, 0 ),
/*@__PURE__*/ new Vector3( - PHI, INV_PHI, 0 ) ];
/**
* This class generates a Prefiltered, Mipmapped Radiance Environment Map
* (PMREM) from a cubeMap environment texture. This allows different levels of
* blur to be quickly accessed based on material roughness. It is packed into a
* special CubeUV format that allows us to perform custom interpolation so that
* we can support nonlinear formats such as RGBE. Unlike a traditional mipmap
* chain, it only goes down to the LOD_MIN level (above), and then creates extra
* even more filtered 'mips' at the same LOD_MIN resolution, associated with
* higher roughness levels. In this way we maintain resolution to smoothly
* interpolate diffuse lighting while limiting sampling computation.
*
* Paper: Fast, Accurate Image-Based Lighting
* https://drive.google.com/file/d/15y8r_UpKlU9SvV4ILb0C3qCPecS8pvLz/view
*/
function convertLinearToRGBE( color ) {
const maxComponent = Math.max( color.r, color.g, color.b );
const fExp = Math.min( Math.max( Math.ceil( Math.log2( maxComponent ) ), - 128.0 ), 127.0 );
color.multiplyScalar( Math.pow( 2.0, - fExp ) );
const alpha = ( fExp + 128.0 ) / 255.0;
return alpha;
}
class PMREMGenerator {
constructor( renderer ) {
this._renderer = renderer;
this._pingPongRenderTarget = null;
this._blurMaterial = _getBlurShader( MAX_SAMPLES );
this._equirectShader = null;
this._cubemapShader = null;
this._compileMaterial( this._blurMaterial );
}
/**
* Generates a PMREM from a supplied Scene, which can be faster than using an
* image if networking bandwidth is low. Optional sigma specifies a blur radius
* in radians to be applied to the scene before PMREM generation. Optional near
* and far planes ensure the scene is rendered in its entirety (the cubeCamera
* is placed at the origin).
*/
fromScene( scene, sigma = 0, near = 0.1, far = 100 ) {
_oldTarget = this._renderer.getRenderTarget();
const cubeUVRenderTarget = this._allocateTargets();
this._sceneToCubeUV( scene, near, far, cubeUVRenderTarget );
if ( sigma > 0 ) {
this._blur( cubeUVRenderTarget, 0, 0, sigma );
}
this._applyPMREM( cubeUVRenderTarget );
this._cleanup( cubeUVRenderTarget );
return cubeUVRenderTarget;
}
/**
* Generates a PMREM from an equirectangular texture, which can be either LDR
* (RGBFormat) or HDR (RGBEFormat). The ideal input image size is 1k (1024 x 512),
* as this matches best with the 256 x 256 cubemap output.
*/
fromEquirectangular( equirectangular ) {
return this._fromTexture( equirectangular );
}
/**
* Generates a PMREM from an cubemap texture, which can be either LDR
* (RGBFormat) or HDR (RGBEFormat). The ideal input cube size is 256 x 256,
* as this matches best with the 256 x 256 cubemap output.
*/
fromCubemap( cubemap ) {
return this._fromTexture( cubemap );
}
/**
* Pre-compiles the cubemap shader. You can get faster start-up by invoking this method during
* your texture's network fetch for increased concurrency.
*/
compileCubemapShader() {
if ( this._cubemapShader === null ) {
this._cubemapShader = _getCubemapShader();
this._compileMaterial( this._cubemapShader );
}
}
/**
* Pre-compiles the equirectangular shader. You can get faster start-up by invoking this method during
* your texture's network fetch for increased concurrency.
*/
compileEquirectangularShader() {
if ( this._equirectShader === null ) {
this._equirectShader = _getEquirectShader();
this._compileMaterial( this._equirectShader );
}
}
/**
* Disposes of the PMREMGenerator's internal memory. Note that PMREMGenerator is a static class,
* so you should not need more than one PMREMGenerator object. If you do, calling dispose() on
* one of them will cause any others to also become unusable.
*/
dispose() {
this._blurMaterial.dispose();
if ( this._cubemapShader !== null ) this._cubemapShader.dispose();
if ( this._equirectShader !== null ) this._equirectShader.dispose();
for ( let i = 0; i < _lodPlanes.length; i ++ ) {
_lodPlanes[ i ].dispose();
}
}
// private interface
_cleanup( outputTarget ) {
this._pingPongRenderTarget.dispose();
this._renderer.setRenderTarget( _oldTarget );
outputTarget.scissorTest = false;
_setViewport( outputTarget, 0, 0, outputTarget.width, outputTarget.height );
}
_fromTexture( texture ) {
_oldTarget = this._renderer.getRenderTarget();
const cubeUVRenderTarget = this._allocateTargets( texture );
this._textureToCubeUV( texture, cubeUVRenderTarget );
this._applyPMREM( cubeUVRenderTarget );
this._cleanup( cubeUVRenderTarget );
return cubeUVRenderTarget;
}
_allocateTargets( texture ) { // warning: null texture is valid
const params = {
magFilter: NearestFilter,
minFilter: NearestFilter,
generateMipmaps: false,
type: UnsignedByteType,
format: RGBEFormat,
encoding: _isLDR( texture ) ? texture.encoding : RGBEEncoding,
depthBuffer: false
};
const cubeUVRenderTarget = _createRenderTarget( params );
cubeUVRenderTarget.depthBuffer = texture ? false : true;
this._pingPongRenderTarget = _createRenderTarget( params );
return cubeUVRenderTarget;
}
_compileMaterial( material ) {
const tmpMesh = new Mesh( _lodPlanes[ 0 ], material );
this._renderer.compile( tmpMesh, _flatCamera$1 );
}
_sceneToCubeUV( scene, near, far, cubeUVRenderTarget ) {
const fov = 90;
const aspect = 1;
const cubeCamera = new PerspectiveCamera( fov, aspect, near, far );
const upSign = [ 1, - 1, 1, 1, 1, 1 ];
const forwardSign = [ 1, 1, 1, - 1, - 1, - 1 ];
const renderer = this._renderer;
const originalAutoClear = renderer.autoClear;
const outputEncoding = renderer.outputEncoding;
const toneMapping = renderer.toneMapping;
renderer.getClearColor( _clearColor );
renderer.toneMapping = NoToneMapping;
renderer.outputEncoding = LinearEncoding;
renderer.autoClear = false;
let useSolidColor = false;
const background = scene.background;
if ( background ) {
if ( background.isColor ) {
backgroundMaterial.color.copy( background ).convertSRGBToLinear();
scene.background = null;
const alpha = convertLinearToRGBE( backgroundMaterial.color );
backgroundMaterial.opacity = alpha;
useSolidColor = true;
}
} else {
backgroundMaterial.color.copy( _clearColor ).convertSRGBToLinear();
const alpha = convertLinearToRGBE( backgroundMaterial.color );
backgroundMaterial.opacity = alpha;
useSolidColor = true;
}
for ( let i = 0; i < 6; i ++ ) {
const col = i % 3;
if ( col == 0 ) {
cubeCamera.up.set( 0, upSign[ i ], 0 );
cubeCamera.lookAt( forwardSign[ i ], 0, 0 );
} else if ( col == 1 ) {
cubeCamera.up.set( 0, 0, upSign[ i ] );
cubeCamera.lookAt( 0, forwardSign[ i ], 0 );
} else {
cubeCamera.up.set( 0, upSign[ i ], 0 );
cubeCamera.lookAt( 0, 0, forwardSign[ i ] );
}
_setViewport( cubeUVRenderTarget,
col * SIZE_MAX, i > 2 ? SIZE_MAX : 0, SIZE_MAX, SIZE_MAX );
renderer.setRenderTarget( cubeUVRenderTarget );
if ( useSolidColor ) {
renderer.render( backgroundBox, cubeCamera );
}
renderer.render( scene, cubeCamera );
}
renderer.toneMapping = toneMapping;
renderer.outputEncoding = outputEncoding;
renderer.autoClear = originalAutoClear;
}
_textureToCubeUV( texture, cubeUVRenderTarget ) {
const renderer = this._renderer;
if ( texture.isCubeTexture ) {
if ( this._cubemapShader == null ) {
this._cubemapShader = _getCubemapShader();
}
} else {
if ( this._equirectShader == null ) {
this._equirectShader = _getEquirectShader();
}
}
const material = texture.isCubeTexture ? this._cubemapShader : this._equirectShader;
const mesh = new Mesh( _lodPlanes[ 0 ], material );
const uniforms = material.uniforms;
uniforms[ 'envMap' ].value = texture;
if ( ! texture.isCubeTexture ) {
uniforms[ 'texelSize' ].value.set( 1.0 / texture.image.width, 1.0 / texture.image.height );
}
uniforms[ 'inputEncoding' ].value = ENCODINGS[ texture.encoding ];
uniforms[ 'outputEncoding' ].value = ENCODINGS[ cubeUVRenderTarget.texture.encoding ];
_setViewport( cubeUVRenderTarget, 0, 0, 3 * SIZE_MAX, 2 * SIZE_MAX );
renderer.setRenderTarget( cubeUVRenderTarget );
renderer.render( mesh, _flatCamera$1 );
}
_applyPMREM( cubeUVRenderTarget ) {
const renderer = this._renderer;
const autoClear = renderer.autoClear;
renderer.autoClear = false;
for ( let i = 1; i < TOTAL_LODS; i ++ ) {
const sigma = Math.sqrt( _sigmas[ i ] * _sigmas[ i ] - _sigmas[ i - 1 ] * _sigmas[ i - 1 ] );
const poleAxis = _axisDirections[ ( i - 1 ) % _axisDirections.length ];
this._blur( cubeUVRenderTarget, i - 1, i, sigma, poleAxis );
}
renderer.autoClear = autoClear;
}
/**
* This is a two-pass Gaussian blur for a cubemap. Normally this is done
* vertically and horizontally, but this breaks down on a cube. Here we apply
* the blur latitudinally (around the poles), and then longitudinally (towards
* the poles) to approximate the orthogonally-separable blur. It is least
* accurate at the poles, but still does a decent job.
*/
_blur( cubeUVRenderTarget, lodIn, lodOut, sigma, poleAxis ) {
const pingPongRenderTarget = this._pingPongRenderTarget;
this._halfBlur(
cubeUVRenderTarget,
pingPongRenderTarget,
lodIn,
lodOut,
sigma,
'latitudinal',
poleAxis );
this._halfBlur(
pingPongRenderTarget,
cubeUVRenderTarget,
lodOut,
lodOut,
sigma,
'longitudinal',
poleAxis );
}
_halfBlur( targetIn, targetOut, lodIn, lodOut, sigmaRadians, direction, poleAxis ) {
const renderer = this._renderer;
const blurMaterial = this._blurMaterial;
if ( direction !== 'latitudinal' && direction !== 'longitudinal' ) {
console.error(
'blur direction must be either latitudinal or longitudinal!' );
}
// Number of standard deviations at which to cut off the discrete approximation.
const STANDARD_DEVIATIONS = 3;
const blurMesh = new Mesh( _lodPlanes[ lodOut ], blurMaterial );
const blurUniforms = blurMaterial.uniforms;
const pixels = _sizeLods[ lodIn ] - 1;
const radiansPerPixel = isFinite( sigmaRadians ) ? Math.PI / ( 2 * pixels ) : 2 * Math.PI / ( 2 * MAX_SAMPLES - 1 );
const sigmaPixels = sigmaRadians / radiansPerPixel;
const samples = isFinite( sigmaRadians ) ? 1 + Math.floor( STANDARD_DEVIATIONS * sigmaPixels ) : MAX_SAMPLES;
if ( samples > MAX_SAMPLES ) {
console.warn( `sigmaRadians, ${
sigmaRadians}, is too large and will clip, as it requested ${
samples} samples when the maximum is set to ${MAX_SAMPLES}` );
}
const weights = [];
let sum = 0;
for ( let i = 0; i < MAX_SAMPLES; ++ i ) {
const x = i / sigmaPixels;
const weight = Math.exp( - x * x / 2 );
weights.push( weight );
if ( i == 0 ) {
sum += weight;
} else if ( i < samples ) {
sum += 2 * weight;
}
}
for ( let i = 0; i < weights.length; i ++ ) {
weights[ i ] = weights[ i ] / sum;
}
blurUniforms[ 'envMap' ].value = targetIn.texture;
blurUniforms[ 'samples' ].value = samples;
blurUniforms[ 'weights' ].value = weights;
blurUniforms[ 'latitudinal' ].value = direction === 'latitudinal';
if ( poleAxis ) {
blurUniforms[ 'poleAxis' ].value = poleAxis;
}
blurUniforms[ 'dTheta' ].value = radiansPerPixel;
blurUniforms[ 'mipInt' ].value = LOD_MAX - lodIn;
blurUniforms[ 'inputEncoding' ].value = ENCODINGS[ targetIn.texture.encoding ];
blurUniforms[ 'outputEncoding' ].value = ENCODINGS[ targetIn.texture.encoding ];
const outputSize = _sizeLods[ lodOut ];
const x = 3 * Math.max( 0, SIZE_MAX - 2 * outputSize );
const y = ( lodOut === 0 ? 0 : 2 * SIZE_MAX ) + 2 * outputSize * ( lodOut > LOD_MAX - LOD_MIN ? lodOut - LOD_MAX + LOD_MIN : 0 );
_setViewport( targetOut, x, y, 3 * outputSize, 2 * outputSize );
renderer.setRenderTarget( targetOut );
renderer.render( blurMesh, _flatCamera$1 );
}
}
function _isLDR( texture ) {
if ( texture === undefined || texture.type !== UnsignedByteType ) return false;
return texture.encoding === LinearEncoding || texture.encoding === sRGBEncoding || texture.encoding === GammaEncoding;
}
function _createPlanes() {
const _lodPlanes = [];
const _sizeLods = [];
const _sigmas = [];
let lod = LOD_MAX;
for ( let i = 0; i < TOTAL_LODS; i ++ ) {
const sizeLod = Math.pow( 2, lod );
_sizeLods.push( sizeLod );
let sigma = 1.0 / sizeLod;
if ( i > LOD_MAX - LOD_MIN ) {
sigma = EXTRA_LOD_SIGMA[ i - LOD_MAX + LOD_MIN - 1 ];
} else if ( i == 0 ) {
sigma = 0;
}
_sigmas.push( sigma );
const texelSize = 1.0 / ( sizeLod - 1 );
const min = - texelSize / 2;
const max = 1 + texelSize / 2;
const uv1 = [ min, min, max, min, max, max, min, min, max, max, min, max ];
const cubeFaces = 6;
const vertices = 6;
const positionSize = 3;
const uvSize = 2;
const faceIndexSize = 1;
const position = new Float32Array( positionSize * vertices * cubeFaces );
const uv = new Float32Array( uvSize * vertices * cubeFaces );
const faceIndex = new Float32Array( faceIndexSize * vertices * cubeFaces );
for ( let face = 0; face < cubeFaces; face ++ ) {
const x = ( face % 3 ) * 2 / 3 - 1;
const y = face > 2 ? 0 : - 1;
const coordinates = [
x, y, 0,
x + 2 / 3, y, 0,
x + 2 / 3, y + 1, 0,
x, y, 0,
x + 2 / 3, y + 1, 0,
x, y + 1, 0
];
position.set( coordinates, positionSize * vertices * face );
uv.set( uv1, uvSize * vertices * face );
const fill = [ face, face, face, face, face, face ];
faceIndex.set( fill, faceIndexSize * vertices * face );
}
const planes = new BufferGeometry();
planes.setAttribute( 'position', new BufferAttribute( position, positionSize ) );
planes.setAttribute( 'uv', new BufferAttribute( uv, uvSize ) );
planes.setAttribute( 'faceIndex', new BufferAttribute( faceIndex, faceIndexSize ) );
_lodPlanes.push( planes );
if ( lod > LOD_MIN ) {
lod --;
}
}
return { _lodPlanes, _sizeLods, _sigmas };
}
function _createRenderTarget( params ) {
const cubeUVRenderTarget = new WebGLRenderTarget( 3 * SIZE_MAX, 3 * SIZE_MAX, params );
cubeUVRenderTarget.texture.mapping = CubeUVReflectionMapping;
cubeUVRenderTarget.texture.name = 'PMREM.cubeUv';
cubeUVRenderTarget.scissorTest = true;
return cubeUVRenderTarget;
}
function _setViewport( target, x, y, width, height ) {
target.viewport.set( x, y, width, height );
target.scissor.set( x, y, width, height );
}
function _getBlurShader( maxSamples ) {
const weights = new Float32Array( maxSamples );
const poleAxis = new Vector3( 0, 1, 0 );
const shaderMaterial = new RawShaderMaterial( {
name: 'SphericalGaussianBlur',
defines: { 'n': maxSamples },
uniforms: {
'envMap': { value: null },
'samples': { value: 1 },
'weights': { value: weights },
'latitudinal': { value: false },
'dTheta': { value: 0 },
'mipInt': { value: 0 },
'poleAxis': { value: poleAxis },
'inputEncoding': { value: ENCODINGS[ LinearEncoding ] },
'outputEncoding': { value: ENCODINGS[ LinearEncoding ] }
},
vertexShader: _getCommonVertexShader(),
fragmentShader: /* glsl */`
precision mediump float;
precision mediump int;
varying vec3 vOutputDirection;
uniform sampler2D envMap;
uniform int samples;
uniform float weights[ n ];
uniform bool latitudinal;
uniform float dTheta;
uniform float mipInt;
uniform vec3 poleAxis;
${ _getEncodings() }
#define ENVMAP_TYPE_CUBE_UV
#include <cube_uv_reflection_fragment>
vec3 getSample( float theta, vec3 axis ) {
float cosTheta = cos( theta );
// Rodrigues' axis-angle rotation
vec3 sampleDirection = vOutputDirection * cosTheta
+ cross( axis, vOutputDirection ) * sin( theta )
+ axis * dot( axis, vOutputDirection ) * ( 1.0 - cosTheta );
return bilinearCubeUV( envMap, sampleDirection, mipInt );
}
void main() {
vec3 axis = latitudinal ? poleAxis : cross( poleAxis, vOutputDirection );
if ( all( equal( axis, vec3( 0.0 ) ) ) ) {
axis = vec3( vOutputDirection.z, 0.0, - vOutputDirection.x );
}
axis = normalize( axis );
gl_FragColor = vec4( 0.0, 0.0, 0.0, 1.0 );
gl_FragColor.rgb += weights[ 0 ] * getSample( 0.0, axis );
for ( int i = 1; i < n; i++ ) {
if ( i >= samples ) {
break;
}
float theta = dTheta * float( i );
gl_FragColor.rgb += weights[ i ] * getSample( -1.0 * theta, axis );
gl_FragColor.rgb += weights[ i ] * getSample( theta, axis );
}
gl_FragColor = linearToOutputTexel( gl_FragColor );
}
`,
blending: NoBlending,
depthTest: false,
depthWrite: false
} );
return shaderMaterial;
}
function _getEquirectShader() {
const texelSize = new Vector2( 1, 1 );
const shaderMaterial = new RawShaderMaterial( {
name: 'EquirectangularToCubeUV',
uniforms: {
'envMap': { value: null },
'texelSize': { value: texelSize },
'inputEncoding': { value: ENCODINGS[ LinearEncoding ] },
'outputEncoding': { value: ENCODINGS[ LinearEncoding ] }
},
vertexShader: _getCommonVertexShader(),
fragmentShader: /* glsl */`
precision mediump float;
precision mediump int;
varying vec3 vOutputDirection;
uniform sampler2D envMap;
uniform vec2 texelSize;
${ _getEncodings() }
#include <common>
void main() {
gl_FragColor = vec4( 0.0, 0.0, 0.0, 1.0 );
vec3 outputDirection = normalize( vOutputDirection );
vec2 uv = equirectUv( outputDirection );
vec2 f = fract( uv / texelSize - 0.5 );
uv -= f * texelSize;
vec3 tl = envMapTexelToLinear( texture2D ( envMap, uv ) ).rgb;
uv.x += texelSize.x;
vec3 tr = envMapTexelToLinear( texture2D ( envMap, uv ) ).rgb;
uv.y += texelSize.y;
vec3 br = envMapTexelToLinear( texture2D ( envMap, uv ) ).rgb;
uv.x -= texelSize.x;
vec3 bl = envMapTexelToLinear( texture2D ( envMap, uv ) ).rgb;
vec3 tm = mix( tl, tr, f.x );
vec3 bm = mix( bl, br, f.x );
gl_FragColor.rgb = mix( tm, bm, f.y );
gl_FragColor = linearToOutputTexel( gl_FragColor );
}
`,
blending: NoBlending,
depthTest: false,
depthWrite: false
} );
return shaderMaterial;
}
function _getCubemapShader() {
const shaderMaterial = new RawShaderMaterial( {
name: 'CubemapToCubeUV',
uniforms: {
'envMap': { value: null },
'inputEncoding': { value: ENCODINGS[ LinearEncoding ] },
'outputEncoding': { value: ENCODINGS[ LinearEncoding ] }
},
vertexShader: _getCommonVertexShader(),
fragmentShader: /* glsl */`
precision mediump float;
precision mediump int;
varying vec3 vOutputDirection;
uniform samplerCube envMap;
${ _getEncodings() }
void main() {
gl_FragColor = vec4( 0.0, 0.0, 0.0, 1.0 );
gl_FragColor.rgb = envMapTexelToLinear( textureCube( envMap, vec3( - vOutputDirection.x, vOutputDirection.yz ) ) ).rgb;
gl_FragColor = linearToOutputTexel( gl_FragColor );
}
`,
blending: NoBlending,
depthTest: false,
depthWrite: false
} );
return shaderMaterial;
}
function _getCommonVertexShader() {
return /* glsl */`
precision mediump float;
precision mediump int;
attribute vec3 position;
attribute vec2 uv;
attribute float faceIndex;
varying vec3 vOutputDirection;
// RH coordinate system; PMREM face-indexing convention
vec3 getDirection( vec2 uv, float face ) {
uv = 2.0 * uv - 1.0;
vec3 direction = vec3( uv, 1.0 );
if ( face == 0.0 ) {
direction = direction.zyx; // ( 1, v, u ) pos x
} else if ( face == 1.0 ) {
direction = direction.xzy;
direction.xz *= -1.0; // ( -u, 1, -v ) pos y
} else if ( face == 2.0 ) {
direction.x *= -1.0; // ( -u, v, 1 ) pos z
} else if ( face == 3.0 ) {
direction = direction.zyx;
direction.xz *= -1.0; // ( -1, v, -u ) neg x
} else if ( face == 4.0 ) {
direction = direction.xzy;
direction.xy *= -1.0; // ( -u, -1, v ) neg y
} else if ( face == 5.0 ) {
direction.z *= -1.0; // ( u, v, -1 ) neg z
}
return direction;
}
void main() {
vOutputDirection = getDirection( uv, faceIndex );
gl_Position = vec4( position, 1.0 );
}
`;
}
function _getEncodings() {
return /* glsl */`
uniform int inputEncoding;
uniform int outputEncoding;
#include <encodings_pars_fragment>
vec4 inputTexelToLinear( vec4 value ) {
if ( inputEncoding == 0 ) {
return value;
} else if ( inputEncoding == 1 ) {
return sRGBToLinear( value );
} else if ( inputEncoding == 2 ) {
return RGBEToLinear( value );
} else if ( inputEncoding == 3 ) {
return RGBMToLinear( value, 7.0 );
} else if ( inputEncoding == 4 ) {
return RGBMToLinear( value, 16.0 );
} else if ( inputEncoding == 5 ) {
return RGBDToLinear( value, 256.0 );
} else {
return GammaToLinear( value, 2.2 );
}
}
vec4 linearToOutputTexel( vec4 value ) {
if ( outputEncoding == 0 ) {
return value;
} else if ( outputEncoding == 1 ) {
return LinearTosRGB( value );
} else if ( outputEncoding == 2 ) {
return LinearToRGBE( value );
} else if ( outputEncoding == 3 ) {
return LinearToRGBM( value, 7.0 );
} else if ( outputEncoding == 4 ) {
return LinearToRGBM( value, 16.0 );
} else if ( outputEncoding == 5 ) {
return LinearToRGBD( value, 256.0 );
} else {
return LinearToGamma( value, 2.2 );
}
}
vec4 envMapTexelToLinear( vec4 color ) {
return inputTexelToLinear( color );
}
`;
}
SkeletonHelper.prototype.update = function () {
console.error( 'THREE.SkeletonHelper: update() no longer needs to be called.' );
};
//
Loader.prototype.extractUrlBase = function ( url ) {
console.warn( 'THREE.Loader: .extractUrlBase() has been deprecated. Use THREE.LoaderUtils.extractUrlBase() instead.' );
return LoaderUtils.extractUrlBase( url );
};
Loader.Handlers = {
add: function ( /* regex, loader */ ) {
console.error( 'THREE.Loader: Handlers.add() has been removed. Use LoadingManager.addHandler() instead.' );
},
get: function ( /* file */ ) {
console.error( 'THREE.Loader: Handlers.get() has been removed. Use LoadingManager.getHandler() instead.' );
}
};
//
Box3.prototype.center = function ( optionalTarget ) {
console.warn( 'THREE.Box3: .center() has been renamed to .getCenter().' );
return this.getCenter( optionalTarget );
};
Box3.prototype.empty = function () {
console.warn( 'THREE.Box3: .empty() has been renamed to .isEmpty().' );
return this.isEmpty();
};
Box3.prototype.isIntersectionBox = function ( box ) {
console.warn( 'THREE.Box3: .isIntersectionBox() has been renamed to .intersectsBox().' );
return this.intersectsBox( box );
};
Box3.prototype.isIntersectionSphere = function ( sphere ) {
console.warn( 'THREE.Box3: .isIntersectionSphere() has been renamed to .intersectsSphere().' );
return this.intersectsSphere( sphere );
};
Box3.prototype.size = function ( optionalTarget ) {
console.warn( 'THREE.Box3: .size() has been renamed to .getSize().' );
return this.getSize( optionalTarget );
};
//
Sphere.prototype.empty = function () {
console.warn( 'THREE.Sphere: .empty() has been renamed to .isEmpty().' );
return this.isEmpty();
};
//
Frustum.prototype.setFromMatrix = function ( m ) {
console.warn( 'THREE.Frustum: .setFromMatrix() has been renamed to .setFromProjectionMatrix().' );
return this.setFromProjectionMatrix( m );
};
//
Matrix3.prototype.flattenToArrayOffset = function ( array, offset ) {
console.warn( 'THREE.Matrix3: .flattenToArrayOffset() has been deprecated. Use .toArray() instead.' );
return this.toArray( array, offset );
};
Matrix3.prototype.multiplyVector3 = function ( vector ) {
console.warn( 'THREE.Matrix3: .multiplyVector3() has been removed. Use vector.applyMatrix3( matrix ) instead.' );
return vector.applyMatrix3( this );
};
Matrix3.prototype.multiplyVector3Array = function ( /* a */ ) {
console.error( 'THREE.Matrix3: .multiplyVector3Array() has been removed.' );
};
Matrix3.prototype.applyToBufferAttribute = function ( attribute ) {
console.warn( 'THREE.Matrix3: .applyToBufferAttribute() has been removed. Use attribute.applyMatrix3( matrix ) instead.' );
return attribute.applyMatrix3( this );
};
Matrix3.prototype.applyToVector3Array = function ( /* array, offset, length */ ) {
console.error( 'THREE.Matrix3: .applyToVector3Array() has been removed.' );
};
Matrix3.prototype.getInverse = function ( matrix ) {
console.warn( 'THREE.Matrix3: .getInverse() has been removed. Use matrixInv.copy( matrix ).invert(); instead.' );
return this.copy( matrix ).invert();
};
//
Matrix4.prototype.extractPosition = function ( m ) {
console.warn( 'THREE.Matrix4: .extractPosition() has been renamed to .copyPosition().' );
return this.copyPosition( m );
};
Matrix4.prototype.flattenToArrayOffset = function ( array, offset ) {
console.warn( 'THREE.Matrix4: .flattenToArrayOffset() has been deprecated. Use .toArray() instead.' );
return this.toArray( array, offset );
};
Matrix4.prototype.getPosition = function () {
console.warn( 'THREE.Matrix4: .getPosition() has been removed. Use Vector3.setFromMatrixPosition( matrix ) instead.' );
return new Vector3().setFromMatrixColumn( this, 3 );
};
Matrix4.prototype.setRotationFromQuaternion = function ( q ) {
console.warn( 'THREE.Matrix4: .setRotationFromQuaternion() has been renamed to .makeRotationFromQuaternion().' );
return this.makeRotationFromQuaternion( q );
};
Matrix4.prototype.multiplyToArray = function () {
console.warn( 'THREE.Matrix4: .multiplyToArray() has been removed.' );
};
Matrix4.prototype.multiplyVector3 = function ( vector ) {
console.warn( 'THREE.Matrix4: .multiplyVector3() has been removed. Use vector.applyMatrix4( matrix ) instead.' );
return vector.applyMatrix4( this );
};
Matrix4.prototype.multiplyVector4 = function ( vector ) {
console.warn( 'THREE.Matrix4: .multiplyVector4() has been removed. Use vector.applyMatrix4( matrix ) instead.' );
return vector.applyMatrix4( this );
};
Matrix4.prototype.multiplyVector3Array = function ( /* a */ ) {
console.error( 'THREE.Matrix4: .multiplyVector3Array() has been removed.' );
};
Matrix4.prototype.rotateAxis = function ( v ) {
console.warn( 'THREE.Matrix4: .rotateAxis() has been removed. Use Vector3.transformDirection( matrix ) instead.' );
v.transformDirection( this );
};
Matrix4.prototype.crossVector = function ( vector ) {
console.warn( 'THREE.Matrix4: .crossVector() has been removed. Use vector.applyMatrix4( matrix ) instead.' );
return vector.applyMatrix4( this );
};
Matrix4.prototype.translate = function () {
console.error( 'THREE.Matrix4: .translate() has been removed.' );
};
Matrix4.prototype.rotateX = function () {
console.error( 'THREE.Matrix4: .rotateX() has been removed.' );
};
Matrix4.prototype.rotateY = function () {
console.error( 'THREE.Matrix4: .rotateY() has been removed.' );
};
Matrix4.prototype.rotateZ = function () {
console.error( 'THREE.Matrix4: .rotateZ() has been removed.' );
};
Matrix4.prototype.rotateByAxis = function () {
console.error( 'THREE.Matrix4: .rotateByAxis() has been removed.' );
};
Matrix4.prototype.applyToBufferAttribute = function ( attribute ) {
console.warn( 'THREE.Matrix4: .applyToBufferAttribute() has been removed. Use attribute.applyMatrix4( matrix ) instead.' );
return attribute.applyMatrix4( this );
};
Matrix4.prototype.applyToVector3Array = function ( /* array, offset, length */ ) {
console.error( 'THREE.Matrix4: .applyToVector3Array() has been removed.' );
};
Matrix4.prototype.makeFrustum = function ( left, right, bottom, top, near, far ) {
console.warn( 'THREE.Matrix4: .makeFrustum() has been removed. Use .makePerspective( left, right, top, bottom, near, far ) instead.' );
return this.makePerspective( left, right, top, bottom, near, far );
};
Matrix4.prototype.getInverse = function ( matrix ) {
console.warn( 'THREE.Matrix4: .getInverse() has been removed. Use matrixInv.copy( matrix ).invert(); instead.' );
return this.copy( matrix ).invert();
};
//
Plane.prototype.isIntersectionLine = function ( line ) {
console.warn( 'THREE.Plane: .isIntersectionLine() has been renamed to .intersectsLine().' );
return this.intersectsLine( line );
};
//
Quaternion.prototype.multiplyVector3 = function ( vector ) {
console.warn( 'THREE.Quaternion: .multiplyVector3() has been removed. Use is now vector.applyQuaternion( quaternion ) instead.' );
return vector.applyQuaternion( this );
};
Quaternion.prototype.inverse = function ( ) {
console.warn( 'THREE.Quaternion: .inverse() has been renamed to invert().' );
return this.invert();
};
//
Ray.prototype.isIntersectionBox = function ( box ) {
console.warn( 'THREE.Ray: .isIntersectionBox() has been renamed to .intersectsBox().' );
return this.intersectsBox( box );
};
Ray.prototype.isIntersectionPlane = function ( plane ) {
console.warn( 'THREE.Ray: .isIntersectionPlane() has been renamed to .intersectsPlane().' );
return this.intersectsPlane( plane );
};
Ray.prototype.isIntersectionSphere = function ( sphere ) {
console.warn( 'THREE.Ray: .isIntersectionSphere() has been renamed to .intersectsSphere().' );
return this.intersectsSphere( sphere );
};
//
Triangle.prototype.area = function () {
console.warn( 'THREE.Triangle: .area() has been renamed to .getArea().' );
return this.getArea();
};
Triangle.prototype.barycoordFromPoint = function ( point, target ) {
console.warn( 'THREE.Triangle: .barycoordFromPoint() has been renamed to .getBarycoord().' );
return this.getBarycoord( point, target );
};
Triangle.prototype.midpoint = function ( target ) {
console.warn( 'THREE.Triangle: .midpoint() has been renamed to .getMidpoint().' );
return this.getMidpoint( target );
};
Triangle.prototypenormal = function ( target ) {
console.warn( 'THREE.Triangle: .normal() has been renamed to .getNormal().' );
return this.getNormal( target );
};
Triangle.prototype.plane = function ( target ) {
console.warn( 'THREE.Triangle: .plane() has been renamed to .getPlane().' );
return this.getPlane( target );
};
Triangle.barycoordFromPoint = function ( point, a, b, c, target ) {
console.warn( 'THREE.Triangle: .barycoordFromPoint() has been renamed to .getBarycoord().' );
return Triangle.getBarycoord( point, a, b, c, target );
};
Triangle.normal = function ( a, b, c, target ) {
console.warn( 'THREE.Triangle: .normal() has been renamed to .getNormal().' );
return Triangle.getNormal( a, b, c, target );
};
//
Vector2.prototype.fromAttribute = function ( attribute, index, offset ) {
console.warn( 'THREE.Vector2: .fromAttribute() has been renamed to .fromBufferAttribute().' );
return this.fromBufferAttribute( attribute, index, offset );
};
Vector2.prototype.distanceToManhattan = function ( v ) {
console.warn( 'THREE.Vector2: .distanceToManhattan() has been renamed to .manhattanDistanceTo().' );
return this.manhattanDistanceTo( v );
};
Vector2.prototype.lengthManhattan = function () {
console.warn( 'THREE.Vector2: .lengthManhattan() has been renamed to .manhattanLength().' );
return this.manhattanLength();
};
//
Vector3.prototype.setEulerFromRotationMatrix = function () {
console.error( 'THREE.Vector3: .setEulerFromRotationMatrix() has been removed. Use Euler.setFromRotationMatrix() instead.' );
};
Vector3.prototype.setEulerFromQuaternion = function () {
console.error( 'THREE.Vector3: .setEulerFromQuaternion() has been removed. Use Euler.setFromQuaternion() instead.' );
};
Vector3.prototype.getPositionFromMatrix = function ( m ) {
console.warn( 'THREE.Vector3: .getPositionFromMatrix() has been renamed to .setFromMatrixPosition().' );
return this.setFromMatrixPosition( m );
};
Vector3.prototype.getScaleFromMatrix = function ( m ) {
console.warn( 'THREE.Vector3: .getScaleFromMatrix() has been renamed to .setFromMatrixScale().' );
return this.setFromMatrixScale( m );
};
Vector3.prototype.getColumnFromMatrix = function ( index, matrix ) {
console.warn( 'THREE.Vector3: .getColumnFromMatrix() has been renamed to .setFromMatrixColumn().' );
return this.setFromMatrixColumn( matrix, index );
};
Vector3.prototype.applyProjection = function ( m ) {
console.warn( 'THREE.Vector3: .applyProjection() has been removed. Use .applyMatrix4( m ) instead.' );
return this.applyMatrix4( m );
};
Vector3.prototype.fromAttribute = function ( attribute, index, offset ) {
console.warn( 'THREE.Vector3: .fromAttribute() has been renamed to .fromBufferAttribute().' );
return this.fromBufferAttribute( attribute, index, offset );
};
Vector3.prototype.distanceToManhattan = function ( v ) {
console.warn( 'THREE.Vector3: .distanceToManhattan() has been renamed to .manhattanDistanceTo().' );
return this.manhattanDistanceTo( v );
};
Vector3.prototype.lengthManhattan = function () {
console.warn( 'THREE.Vector3: .lengthManhattan() has been renamed to .manhattanLength().' );
return this.manhattanLength();
};
//
Vector4.prototype.fromAttribute = function ( attribute, index, offset ) {
console.warn( 'THREE.Vector4: .fromAttribute() has been renamed to .fromBufferAttribute().' );
return this.fromBufferAttribute( attribute, index, offset );
};
Vector4.prototype.lengthManhattan = function () {
console.warn( 'THREE.Vector4: .lengthManhattan() has been renamed to .manhattanLength().' );
return this.manhattanLength();
};
//
Object3D.prototype.getChildByName = function ( name ) {
console.warn( 'THREE.Object3D: .getChildByName() has been renamed to .getObjectByName().' );
return this.getObjectByName( name );
};
Object3D.prototype.renderDepth = function () {
console.warn( 'THREE.Object3D: .renderDepth has been removed. Use .renderOrder, instead.' );
};
Object3D.prototype.translate = function ( distance, axis ) {
console.warn( 'THREE.Object3D: .translate() has been removed. Use .translateOnAxis( axis, distance ) instead.' );
return this.translateOnAxis( axis, distance );
};
Object3D.prototype.getWorldRotation = function () {
console.error( 'THREE.Object3D: .getWorldRotation() has been removed. Use THREE.Object3D.getWorldQuaternion( target ) instead.' );
};
Object3D.prototype.applyMatrix = function ( matrix ) {
console.warn( 'THREE.Object3D: .applyMatrix() has been renamed to .applyMatrix4().' );
return this.applyMatrix4( matrix );
};
Object.defineProperties( Object3D.prototype, {
eulerOrder: {
get: function () {
console.warn( 'THREE.Object3D: .eulerOrder is now .rotation.order.' );
return this.rotation.order;
},
set: function ( value ) {
console.warn( 'THREE.Object3D: .eulerOrder is now .rotation.order.' );
this.rotation.order = value;
}
},
useQuaternion: {
get: function () {
console.warn( 'THREE.Object3D: .useQuaternion has been removed. The library now uses quaternions by default.' );
},
set: function () {
console.warn( 'THREE.Object3D: .useQuaternion has been removed. The library now uses quaternions by default.' );
}
}
} );
Mesh.prototype.setDrawMode = function () {
console.error( 'THREE.Mesh: .setDrawMode() has been removed. The renderer now always assumes THREE.TrianglesDrawMode. Transform your geometry via BufferGeometryUtils.toTrianglesDrawMode() if necessary.' );
};
Object.defineProperties( Mesh.prototype, {
drawMode: {
get: function () {
console.error( 'THREE.Mesh: .drawMode has been removed. The renderer now always assumes THREE.TrianglesDrawMode.' );
return TrianglesDrawMode;
},
set: function () {
console.error( 'THREE.Mesh: .drawMode has been removed. The renderer now always assumes THREE.TrianglesDrawMode. Transform your geometry via BufferGeometryUtils.toTrianglesDrawMode() if necessary.' );
}
}
} );
SkinnedMesh.prototype.initBones = function () {
console.error( 'THREE.SkinnedMesh: initBones() has been removed.' );
};
//
PerspectiveCamera.prototype.setLens = function ( focalLength, filmGauge ) {
console.warn( 'THREE.PerspectiveCamera.setLens is deprecated. ' +
'Use .setFocalLength and .filmGauge for a photographic setup.' );
if ( filmGauge !== undefined ) this.filmGauge = filmGauge;
this.setFocalLength( focalLength );
};
//
Object.defineProperties( Light.prototype, {
onlyShadow: {
set: function () {
console.warn( 'THREE.Light: .onlyShadow has been removed.' );
}
},
shadowCameraFov: {
set: function ( value ) {
console.warn( 'THREE.Light: .shadowCameraFov is now .shadow.camera.fov.' );
this.shadow.camera.fov = value;
}
},
shadowCameraLeft: {
set: function ( value ) {
console.warn( 'THREE.Light: .shadowCameraLeft is now .shadow.camera.left.' );
this.shadow.camera.left = value;
}
},
shadowCameraRight: {
set: function ( value ) {
console.warn( 'THREE.Light: .shadowCameraRight is now .shadow.camera.right.' );
this.shadow.camera.right = value;
}
},
shadowCameraTop: {
set: function ( value ) {
console.warn( 'THREE.Light: .shadowCameraTop is now .shadow.camera.top.' );
this.shadow.camera.top = value;
}
},
shadowCameraBottom: {
set: function ( value ) {
console.warn( 'THREE.Light: .shadowCameraBottom is now .shadow.camera.bottom.' );
this.shadow.camera.bottom = value;
}
},
shadowCameraNear: {
set: function ( value ) {
console.warn( 'THREE.Light: .shadowCameraNear is now .shadow.camera.near.' );
this.shadow.camera.near = value;
}
},
shadowCameraFar: {
set: function ( value ) {
console.warn( 'THREE.Light: .shadowCameraFar is now .shadow.camera.far.' );
this.shadow.camera.far = value;
}
},
shadowCameraVisible: {
set: function () {
console.warn( 'THREE.Light: .shadowCameraVisible has been removed. Use new THREE.CameraHelper( light.shadow.camera ) instead.' );
}
},
shadowBias: {
set: function ( value ) {
console.warn( 'THREE.Light: .shadowBias is now .shadow.bias.' );
this.shadow.bias = value;
}
},
shadowDarkness: {
set: function () {
console.warn( 'THREE.Light: .shadowDarkness has been removed.' );
}
},
shadowMapWidth: {
set: function ( value ) {
console.warn( 'THREE.Light: .shadowMapWidth is now .shadow.mapSize.width.' );
this.shadow.mapSize.width = value;
}
},
shadowMapHeight: {
set: function ( value ) {
console.warn( 'THREE.Light: .shadowMapHeight is now .shadow.mapSize.height.' );
this.shadow.mapSize.height = value;
}
}
} );
//
Object.defineProperties( BufferAttribute.prototype, {
length: {
get: function () {
console.warn( 'THREE.BufferAttribute: .length has been deprecated. Use .count instead.' );
return this.array.length;
}
},
dynamic: {
get: function () {
console.warn( 'THREE.BufferAttribute: .dynamic has been deprecated. Use .usage instead.' );
return this.usage === DynamicDrawUsage;
},
set: function ( /* value */ ) {
console.warn( 'THREE.BufferAttribute: .dynamic has been deprecated. Use .usage instead.' );
this.setUsage( DynamicDrawUsage );
}
}
} );
BufferAttribute.prototype.setDynamic = function ( value ) {
console.warn( 'THREE.BufferAttribute: .setDynamic() has been deprecated. Use .setUsage() instead.' );
this.setUsage( value === true ? DynamicDrawUsage : StaticDrawUsage );
return this;
};
BufferAttribute.prototype.copyIndicesArray = function ( /* indices */ ) {
console.error( 'THREE.BufferAttribute: .copyIndicesArray() has been removed.' );
},
BufferAttribute.prototype.setArray = function ( /* array */ ) {
console.error( 'THREE.BufferAttribute: .setArray has been removed. Use BufferGeometry .setAttribute to replace/resize attribute buffers' );
};
//
BufferGeometry.prototype.addIndex = function ( index ) {
console.warn( 'THREE.BufferGeometry: .addIndex() has been renamed to .setIndex().' );
this.setIndex( index );
};
BufferGeometry.prototype.addAttribute = function ( name, attribute ) {
console.warn( 'THREE.BufferGeometry: .addAttribute() has been renamed to .setAttribute().' );
if ( ! ( attribute && attribute.isBufferAttribute ) && ! ( attribute && attribute.isInterleavedBufferAttribute ) ) {
console.warn( 'THREE.BufferGeometry: .addAttribute() now expects ( name, attribute ).' );
return this.setAttribute( name, new BufferAttribute( arguments[ 1 ], arguments[ 2 ] ) );
}
if ( name === 'index' ) {
console.warn( 'THREE.BufferGeometry.addAttribute: Use .setIndex() for index attribute.' );
this.setIndex( attribute );
return this;
}
return this.setAttribute( name, attribute );
};
BufferGeometry.prototype.addDrawCall = function ( start, count, indexOffset ) {
if ( indexOffset !== undefined ) {
console.warn( 'THREE.BufferGeometry: .addDrawCall() no longer supports indexOffset.' );
}
console.warn( 'THREE.BufferGeometry: .addDrawCall() is now .addGroup().' );
this.addGroup( start, count );
};
BufferGeometry.prototype.clearDrawCalls = function () {
console.warn( 'THREE.BufferGeometry: .clearDrawCalls() is now .clearGroups().' );
this.clearGroups();
};
BufferGeometry.prototype.computeOffsets = function () {
console.warn( 'THREE.BufferGeometry: .computeOffsets() has been removed.' );
};
BufferGeometry.prototype.removeAttribute = function ( name ) {
console.warn( 'THREE.BufferGeometry: .removeAttribute() has been renamed to .deleteAttribute().' );
return this.deleteAttribute( name );
};
BufferGeometry.prototype.applyMatrix = function ( matrix ) {
console.warn( 'THREE.BufferGeometry: .applyMatrix() has been renamed to .applyMatrix4().' );
return this.applyMatrix4( matrix );
};
Object.defineProperties( BufferGeometry.prototype, {
drawcalls: {
get: function () {
console.error( 'THREE.BufferGeometry: .drawcalls has been renamed to .groups.' );
return this.groups;
}
},
offsets: {
get: function () {
console.warn( 'THREE.BufferGeometry: .offsets has been renamed to .groups.' );
return this.groups;
}
}
} );
InterleavedBuffer.prototype.setDynamic = function ( value ) {
console.warn( 'THREE.InterleavedBuffer: .setDynamic() has been deprecated. Use .setUsage() instead.' );
this.setUsage( value === true ? DynamicDrawUsage : StaticDrawUsage );
return this;
};
InterleavedBuffer.prototype.setArray = function ( /* array */ ) {
console.error( 'THREE.InterleavedBuffer: .setArray has been removed. Use BufferGeometry .setAttribute to replace/resize attribute buffers' );
};
//
Scene.prototype.dispose = function () {
console.error( 'THREE.Scene: .dispose() has been removed.' );
};
//
Object.defineProperties( Material$1.prototype, {
wrapAround: {
get: function () {
console.warn( 'THREE.Material: .wrapAround has been removed.' );
},
set: function () {
console.warn( 'THREE.Material: .wrapAround has been removed.' );
}
},
overdraw: {
get: function () {
console.warn( 'THREE.Material: .overdraw has been removed.' );
},
set: function () {
console.warn( 'THREE.Material: .overdraw has been removed.' );
}
},
wrapRGB: {
get: function () {
console.warn( 'THREE.Material: .wrapRGB has been removed.' );
return new Color();
}
},
shading: {
get: function () {
console.error( 'THREE.' + this.type + ': .shading has been removed. Use the boolean .flatShading instead.' );
},
set: function ( value ) {
console.warn( 'THREE.' + this.type + ': .shading has been removed. Use the boolean .flatShading instead.' );
this.flatShading = ( value === FlatShading );
}
},
stencilMask: {
get: function () {
console.warn( 'THREE.' + this.type + ': .stencilMask has been removed. Use .stencilFuncMask instead.' );
return this.stencilFuncMask;
},
set: function ( value ) {
console.warn( 'THREE.' + this.type + ': .stencilMask has been removed. Use .stencilFuncMask instead.' );
this.stencilFuncMask = value;
}
}
} );
Object.defineProperties( ShaderMaterial.prototype, {
derivatives: {
get: function () {
console.warn( 'THREE.ShaderMaterial: .derivatives has been moved to .extensions.derivatives.' );
return this.extensions.derivatives;
},
set: function ( value ) {
console.warn( 'THREE. ShaderMaterial: .derivatives has been moved to .extensions.derivatives.' );
this.extensions.derivatives = value;
}
}
} );
//
WebGLRenderer.prototype.clearTarget = function ( renderTarget, color, depth, stencil ) {
console.warn( 'THREE.WebGLRenderer: .clearTarget() has been deprecated. Use .setRenderTarget() and .clear() instead.' );
this.setRenderTarget( renderTarget );
this.clear( color, depth, stencil );
};
WebGLRenderer.prototype.animate = function ( callback ) {
console.warn( 'THREE.WebGLRenderer: .animate() is now .setAnimationLoop().' );
this.setAnimationLoop( callback );
};
WebGLRenderer.prototype.getCurrentRenderTarget = function () {
console.warn( 'THREE.WebGLRenderer: .getCurrentRenderTarget() is now .getRenderTarget().' );
return this.getRenderTarget();
};
WebGLRenderer.prototype.getMaxAnisotropy = function () {
console.warn( 'THREE.WebGLRenderer: .getMaxAnisotropy() is now .capabilities.getMaxAnisotropy().' );
return this.capabilities.getMaxAnisotropy();
};
WebGLRenderer.prototype.getPrecision = function () {
console.warn( 'THREE.WebGLRenderer: .getPrecision() is now .capabilities.precision.' );
return this.capabilities.precision;
};
WebGLRenderer.prototype.resetGLState = function () {
console.warn( 'THREE.WebGLRenderer: .resetGLState() is now .state.reset().' );
return this.state.reset();
};
WebGLRenderer.prototype.supportsFloatTextures = function () {
console.warn( 'THREE.WebGLRenderer: .supportsFloatTextures() is now .extensions.get( \'OES_texture_float\' ).' );
return this.extensions.get( 'OES_texture_float' );
};
WebGLRenderer.prototype.supportsHalfFloatTextures = function () {
console.warn( 'THREE.WebGLRenderer: .supportsHalfFloatTextures() is now .extensions.get( \'OES_texture_half_float\' ).' );
return this.extensions.get( 'OES_texture_half_float' );
};
WebGLRenderer.prototype.supportsStandardDerivatives = function () {
console.warn( 'THREE.WebGLRenderer: .supportsStandardDerivatives() is now .extensions.get( \'OES_standard_derivatives\' ).' );
return this.extensions.get( 'OES_standard_derivatives' );
};
WebGLRenderer.prototype.supportsCompressedTextureS3TC = function () {
console.warn( 'THREE.WebGLRenderer: .supportsCompressedTextureS3TC() is now .extensions.get( \'WEBGL_compressed_texture_s3tc\' ).' );
return this.extensions.get( 'WEBGL_compressed_texture_s3tc' );
};
WebGLRenderer.prototype.supportsCompressedTexturePVRTC = function () {
console.warn( 'THREE.WebGLRenderer: .supportsCompressedTexturePVRTC() is now .extensions.get( \'WEBGL_compressed_texture_pvrtc\' ).' );
return this.extensions.get( 'WEBGL_compressed_texture_pvrtc' );
};
WebGLRenderer.prototype.supportsBlendMinMax = function () {
console.warn( 'THREE.WebGLRenderer: .supportsBlendMinMax() is now .extensions.get( \'EXT_blend_minmax\' ).' );
return this.extensions.get( 'EXT_blend_minmax' );
};
WebGLRenderer.prototype.supportsVertexTextures = function () {
console.warn( 'THREE.WebGLRenderer: .supportsVertexTextures() is now .capabilities.vertexTextures.' );
return this.capabilities.vertexTextures;
};
WebGLRenderer.prototype.supportsInstancedArrays = function () {
console.warn( 'THREE.WebGLRenderer: .supportsInstancedArrays() is now .extensions.get( \'ANGLE_instanced_arrays\' ).' );
return this.extensions.get( 'ANGLE_instanced_arrays' );
};
WebGLRenderer.prototype.enableScissorTest = function ( boolean ) {
console.warn( 'THREE.WebGLRenderer: .enableScissorTest() is now .setScissorTest().' );
this.setScissorTest( boolean );
};
WebGLRenderer.prototype.initMaterial = function () {
console.warn( 'THREE.WebGLRenderer: .initMaterial() has been removed.' );
};
WebGLRenderer.prototype.addPrePlugin = function () {
console.warn( 'THREE.WebGLRenderer: .addPrePlugin() has been removed.' );
};
WebGLRenderer.prototype.addPostPlugin = function () {
console.warn( 'THREE.WebGLRenderer: .addPostPlugin() has been removed.' );
};
WebGLRenderer.prototype.updateShadowMap = function () {
console.warn( 'THREE.WebGLRenderer: .updateShadowMap() has been removed.' );
};
WebGLRenderer.prototype.setFaceCulling = function () {
console.warn( 'THREE.WebGLRenderer: .setFaceCulling() has been removed.' );
};
WebGLRenderer.prototype.allocTextureUnit = function () {
console.warn( 'THREE.WebGLRenderer: .allocTextureUnit() has been removed.' );
};
WebGLRenderer.prototype.setTexture = function () {
console.warn( 'THREE.WebGLRenderer: .setTexture() has been removed.' );
};
WebGLRenderer.prototype.setTexture2D = function () {
console.warn( 'THREE.WebGLRenderer: .setTexture2D() has been removed.' );
};
WebGLRenderer.prototype.setTextureCube = function () {
console.warn( 'THREE.WebGLRenderer: .setTextureCube() has been removed.' );
};
WebGLRenderer.prototype.getActiveMipMapLevel = function () {
console.warn( 'THREE.WebGLRenderer: .getActiveMipMapLevel() is now .getActiveMipmapLevel().' );
return this.getActiveMipmapLevel();
};
Object.defineProperties( WebGLRenderer.prototype, {
shadowMapEnabled: {
get: function () {
return this.shadowMap.enabled;
},
set: function ( value ) {
console.warn( 'THREE.WebGLRenderer: .shadowMapEnabled is now .shadowMap.enabled.' );
this.shadowMap.enabled = value;
}
},
shadowMapType: {
get: function () {
return this.shadowMap.type;
},
set: function ( value ) {
console.warn( 'THREE.WebGLRenderer: .shadowMapType is now .shadowMap.type.' );
this.shadowMap.type = value;
}
},
shadowMapCullFace: {
get: function () {
console.warn( 'THREE.WebGLRenderer: .shadowMapCullFace has been removed. Set Material.shadowSide instead.' );
return undefined;
},
set: function ( /* value */ ) {
console.warn( 'THREE.WebGLRenderer: .shadowMapCullFace has been removed. Set Material.shadowSide instead.' );
}
},
context: {
get: function () {
console.warn( 'THREE.WebGLRenderer: .context has been removed. Use .getContext() instead.' );
return this.getContext();
}
},
vr: {
get: function () {
console.warn( 'THREE.WebGLRenderer: .vr has been renamed to .xr' );
return this.xr;
}
},
gammaInput: {
get: function () {
console.warn( 'THREE.WebGLRenderer: .gammaInput has been removed. Set the encoding for textures via Texture.encoding instead.' );
return false;
},
set: function () {
console.warn( 'THREE.WebGLRenderer: .gammaInput has been removed. Set the encoding for textures via Texture.encoding instead.' );
}
},
gammaOutput: {
get: function () {
console.warn( 'THREE.WebGLRenderer: .gammaOutput has been removed. Set WebGLRenderer.outputEncoding instead.' );
return false;
},
set: function ( value ) {
console.warn( 'THREE.WebGLRenderer: .gammaOutput has been removed. Set WebGLRenderer.outputEncoding instead.' );
this.outputEncoding = ( value === true ) ? sRGBEncoding : LinearEncoding;
}
},
toneMappingWhitePoint: {
get: function () {
console.warn( 'THREE.WebGLRenderer: .toneMappingWhitePoint has been removed.' );
return 1.0;
},
set: function () {
console.warn( 'THREE.WebGLRenderer: .toneMappingWhitePoint has been removed.' );
}
},
} );
Object.defineProperties( WebGLShadowMap.prototype, {
cullFace: {
get: function () {
console.warn( 'THREE.WebGLRenderer: .shadowMap.cullFace has been removed. Set Material.shadowSide instead.' );
return undefined;
},
set: function ( /* cullFace */ ) {
console.warn( 'THREE.WebGLRenderer: .shadowMap.cullFace has been removed. Set Material.shadowSide instead.' );
}
},
renderReverseSided: {
get: function () {
console.warn( 'THREE.WebGLRenderer: .shadowMap.renderReverseSided has been removed. Set Material.shadowSide instead.' );
return undefined;
},
set: function () {
console.warn( 'THREE.WebGLRenderer: .shadowMap.renderReverseSided has been removed. Set Material.shadowSide instead.' );
}
},
renderSingleSided: {
get: function () {
console.warn( 'THREE.WebGLRenderer: .shadowMap.renderSingleSided has been removed. Set Material.shadowSide instead.' );
return undefined;
},
set: function () {
console.warn( 'THREE.WebGLRenderer: .shadowMap.renderSingleSided has been removed. Set Material.shadowSide instead.' );
}
}
} );
//
Object.defineProperties( WebGLRenderTarget.prototype, {
wrapS: {
get: function () {
console.warn( 'THREE.WebGLRenderTarget: .wrapS is now .texture.wrapS.' );
return this.texture.wrapS;
},
set: function ( value ) {
console.warn( 'THREE.WebGLRenderTarget: .wrapS is now .texture.wrapS.' );
this.texture.wrapS = value;
}
},
wrapT: {
get: function () {
console.warn( 'THREE.WebGLRenderTarget: .wrapT is now .texture.wrapT.' );
return this.texture.wrapT;
},
set: function ( value ) {
console.warn( 'THREE.WebGLRenderTarget: .wrapT is now .texture.wrapT.' );
this.texture.wrapT = value;
}
},
magFilter: {
get: function () {
console.warn( 'THREE.WebGLRenderTarget: .magFilter is now .texture.magFilter.' );
return this.texture.magFilter;
},
set: function ( value ) {
console.warn( 'THREE.WebGLRenderTarget: .magFilter is now .texture.magFilter.' );
this.texture.magFilter = value;
}
},
minFilter: {
get: function () {
console.warn( 'THREE.WebGLRenderTarget: .minFilter is now .texture.minFilter.' );
return this.texture.minFilter;
},
set: function ( value ) {
console.warn( 'THREE.WebGLRenderTarget: .minFilter is now .texture.minFilter.' );
this.texture.minFilter = value;
}
},
anisotropy: {
get: function () {
console.warn( 'THREE.WebGLRenderTarget: .anisotropy is now .texture.anisotropy.' );
return this.texture.anisotropy;
},
set: function ( value ) {
console.warn( 'THREE.WebGLRenderTarget: .anisotropy is now .texture.anisotropy.' );
this.texture.anisotropy = value;
}
},
offset: {
get: function () {
console.warn( 'THREE.WebGLRenderTarget: .offset is now .texture.offset.' );
return this.texture.offset;
},
set: function ( value ) {
console.warn( 'THREE.WebGLRenderTarget: .offset is now .texture.offset.' );
this.texture.offset = value;
}
},
repeat: {
get: function () {
console.warn( 'THREE.WebGLRenderTarget: .repeat is now .texture.repeat.' );
return this.texture.repeat;
},
set: function ( value ) {
console.warn( 'THREE.WebGLRenderTarget: .repeat is now .texture.repeat.' );
this.texture.repeat = value;
}
},
format: {
get: function () {
console.warn( 'THREE.WebGLRenderTarget: .format is now .texture.format.' );
return this.texture.format;
},
set: function ( value ) {
console.warn( 'THREE.WebGLRenderTarget: .format is now .texture.format.' );
this.texture.format = value;
}
},
type: {
get: function () {
console.warn( 'THREE.WebGLRenderTarget: .type is now .texture.type.' );
return this.texture.type;
},
set: function ( value ) {
console.warn( 'THREE.WebGLRenderTarget: .type is now .texture.type.' );
this.texture.type = value;
}
},
generateMipmaps: {
get: function () {
console.warn( 'THREE.WebGLRenderTarget: .generateMipmaps is now .texture.generateMipmaps.' );
return this.texture.generateMipmaps;
},
set: function ( value ) {
console.warn( 'THREE.WebGLRenderTarget: .generateMipmaps is now .texture.generateMipmaps.' );
this.texture.generateMipmaps = value;
}
}
} );
//
CubeCamera.prototype.updateCubeMap = function ( renderer, scene ) {
console.warn( 'THREE.CubeCamera: .updateCubeMap() is now .update().' );
return this.update( renderer, scene );
};
CubeCamera.prototype.clear = function ( renderer, color, depth, stencil ) {
console.warn( 'THREE.CubeCamera: .clear() is now .renderTarget.clear().' );
return this.renderTarget.clear( renderer, color, depth, stencil );
};
ImageUtils.crossOrigin = undefined;
ImageUtils.loadTexture = function ( url, mapping, onLoad, onError ) {
console.warn( 'THREE.ImageUtils.loadTexture has been deprecated. Use THREE.TextureLoader() instead.' );
const loader = new TextureLoader();
loader.setCrossOrigin( this.crossOrigin );
const texture = loader.load( url, onLoad, undefined, onError );
if ( mapping ) texture.mapping = mapping;
return texture;
};
ImageUtils.loadTextureCube = function ( urls, mapping, onLoad, onError ) {
console.warn( 'THREE.ImageUtils.loadTextureCube has been deprecated. Use THREE.CubeTextureLoader() instead.' );
const loader = new CubeTextureLoader();
loader.setCrossOrigin( this.crossOrigin );
const texture = loader.load( urls, onLoad, undefined, onError );
if ( mapping ) texture.mapping = mapping;
return texture;
};
ImageUtils.loadCompressedTexture = function () {
console.error( 'THREE.ImageUtils.loadCompressedTexture has been removed. Use THREE.DDSLoader instead.' );
};
ImageUtils.loadCompressedTextureCube = function () {
console.error( 'THREE.ImageUtils.loadCompressedTextureCube has been removed. Use THREE.DDSLoader instead.' );
};
if ( typeof __THREE_DEVTOOLS__ !== 'undefined' ) {
/* eslint-disable no-undef */
__THREE_DEVTOOLS__.dispatchEvent( new CustomEvent( 'register', { detail: {
revision: REVISION,
} } ) );
/* eslint-enable no-undef */
}
if ( typeof window !== 'undefined' ) {
if ( window.__THREE__ ) {
console.warn( 'WARNING: Multiple instances of Three.js being imported.' );
} else {
window.__THREE__ = REVISION;
}
}
const _taskCache$1 = new WeakMap();
class DRACOLoader extends Loader {
constructor( manager ) {
super( manager );
this.decoderPath = '';
this.decoderConfig = {};
this.decoderBinary = null;
this.decoderPending = null;
this.workerLimit = 4;
this.workerPool = [];
this.workerNextTaskID = 1;
this.workerSourceURL = '';
this.defaultAttributeIDs = {
position: 'POSITION',
normal: 'NORMAL',
color: 'COLOR',
uv: 'TEX_COORD'
};
this.defaultAttributeTypes = {
position: 'Float32Array',
normal: 'Float32Array',
color: 'Float32Array',
uv: 'Float32Array'
};
}
setDecoderPath( path ) {
this.decoderPath = path;
return this;
}
setDecoderConfig( config ) {
this.decoderConfig = config;
return this;
}
setWorkerLimit( workerLimit ) {
this.workerLimit = workerLimit;
return this;
}
load( url, onLoad, onProgress, onError ) {
const loader = new FileLoader( this.manager );
loader.setPath( this.path );
loader.setResponseType( 'arraybuffer' );
loader.setRequestHeader( this.requestHeader );
loader.setWithCredentials( this.withCredentials );
loader.load( url, ( buffer ) => {
const taskConfig = {
attributeIDs: this.defaultAttributeIDs,
attributeTypes: this.defaultAttributeTypes,
useUniqueIDs: false
};
this.decodeGeometry( buffer, taskConfig )
.then( onLoad )
.catch( onError );
}, onProgress, onError );
}
/** @deprecated Kept for backward-compatibility with previous DRACOLoader versions. */
decodeDracoFile( buffer, callback, attributeIDs, attributeTypes ) {
const taskConfig = {
attributeIDs: attributeIDs || this.defaultAttributeIDs,
attributeTypes: attributeTypes || this.defaultAttributeTypes,
useUniqueIDs: !! attributeIDs
};
this.decodeGeometry( buffer, taskConfig ).then( callback );
}
decodeGeometry( buffer, taskConfig ) {
// TODO: For backward-compatibility, support 'attributeTypes' objects containing
// references (rather than names) to typed array constructors. These must be
// serialized before sending them to the worker.
for ( const attribute in taskConfig.attributeTypes ) {
const type = taskConfig.attributeTypes[ attribute ];
if ( type.BYTES_PER_ELEMENT !== undefined ) {
taskConfig.attributeTypes[ attribute ] = type.name;
}
}
//
const taskKey = JSON.stringify( taskConfig );
// Check for an existing task using this buffer. A transferred buffer cannot be transferred
// again from this thread.
if ( _taskCache$1.has( buffer ) ) {
const cachedTask = _taskCache$1.get( buffer );
if ( cachedTask.key === taskKey ) {
return cachedTask.promise;
} else if ( buffer.byteLength === 0 ) {
// Technically, it would be possible to wait for the previous task to complete,
// transfer the buffer back, and decode again with the second configuration. That
// is complex, and I don't know of any reason to decode a Draco buffer twice in
// different ways, so this is left unimplemented.
throw new Error(
'THREE.DRACOLoader: Unable to re-decode a buffer with different ' +
'settings. Buffer has already been transferred.'
);
}
}
//
let worker;
const taskID = this.workerNextTaskID ++;
const taskCost = buffer.byteLength;
// Obtain a worker and assign a task, and construct a geometry instance
// when the task completes.
const geometryPending = this._getWorker( taskID, taskCost )
.then( ( _worker ) => {
worker = _worker;
return new Promise( ( resolve, reject ) => {
worker._callbacks[ taskID ] = { resolve, reject };
worker.postMessage( { type: 'decode', id: taskID, taskConfig, buffer }, [ buffer ] );
// this.debug();
} );
} )
.then( ( message ) => this._createGeometry( message.geometry ) );
// Remove task from the task list.
// Note: replaced '.finally()' with '.catch().then()' block - iOS 11 support (#19416)
geometryPending
.catch( () => true )
.then( () => {
if ( worker && taskID ) {
this._releaseTask( worker, taskID );
// this.debug();
}
} );
// Cache the task result.
_taskCache$1.set( buffer, {
key: taskKey,
promise: geometryPending
} );
return geometryPending;
}
_createGeometry( geometryData ) {
const geometry = new BufferGeometry();
if ( geometryData.index ) {
geometry.setIndex( new BufferAttribute( geometryData.index.array, 1 ) );
}
for ( let i = 0; i < geometryData.attributes.length; i ++ ) {
const attribute = geometryData.attributes[ i ];
const name = attribute.name;
const array = attribute.array;
const itemSize = attribute.itemSize;
geometry.setAttribute( name, new BufferAttribute( array, itemSize ) );
}
return geometry;
}
_loadLibrary( url, responseType ) {
const loader = new FileLoader( this.manager );
loader.setPath( this.decoderPath );
loader.setResponseType( responseType );
loader.setWithCredentials( this.withCredentials );
return new Promise( ( resolve, reject ) => {
loader.load( url, resolve, undefined, reject );
} );
}
preload() {
this._initDecoder();
return this;
}
_initDecoder() {
if ( this.decoderPending ) return this.decoderPending;
const useJS = typeof WebAssembly !== 'object' || this.decoderConfig.type === 'js';
const librariesPending = [];
if ( useJS ) {
librariesPending.push( this._loadLibrary( 'draco_decoder.js', 'text' ) );
} else {
librariesPending.push( this._loadLibrary( 'draco_wasm_wrapper.js', 'text' ) );
librariesPending.push( this._loadLibrary( 'draco_decoder.wasm', 'arraybuffer' ) );
}
this.decoderPending = Promise.all( librariesPending )
.then( ( libraries ) => {
const jsContent = libraries[ 0 ];
if ( ! useJS ) {
this.decoderConfig.wasmBinary = libraries[ 1 ];
}
const fn = DRACOWorker.toString();
const body = [
'/* draco decoder */',
jsContent,
'',
'/* worker */',
fn.substring( fn.indexOf( '{' ) + 1, fn.lastIndexOf( '}' ) )
].join( '\n' );
this.workerSourceURL = URL.createObjectURL( new Blob( [ body ] ) );
} );
return this.decoderPending;
}
_getWorker( taskID, taskCost ) {
return this._initDecoder().then( () => {
if ( this.workerPool.length < this.workerLimit ) {
const worker = new Worker( this.workerSourceURL );
worker._callbacks = {};
worker._taskCosts = {};
worker._taskLoad = 0;
worker.postMessage( { type: 'init', decoderConfig: this.decoderConfig } );
worker.onmessage = function ( e ) {
const message = e.data;
switch ( message.type ) {
case 'decode':
worker._callbacks[ message.id ].resolve( message );
break;
case 'error':
worker._callbacks[ message.id ].reject( message );
break;
default:
console.error( 'THREE.DRACOLoader: Unexpected message, "' + message.type + '"' );
}
};
this.workerPool.push( worker );
} else {
this.workerPool.sort( function ( a, b ) {
return a._taskLoad > b._taskLoad ? - 1 : 1;
} );
}
const worker = this.workerPool[ this.workerPool.length - 1 ];
worker._taskCosts[ taskID ] = taskCost;
worker._taskLoad += taskCost;
return worker;
} );
}
_releaseTask( worker, taskID ) {
worker._taskLoad -= worker._taskCosts[ taskID ];
delete worker._callbacks[ taskID ];
delete worker._taskCosts[ taskID ];
}
debug() {
console.log( 'Task load: ', this.workerPool.map( ( worker ) => worker._taskLoad ) );
}
dispose() {
for ( let i = 0; i < this.workerPool.length; ++ i ) {
this.workerPool[ i ].terminate();
}
this.workerPool.length = 0;
return this;
}
}
/* WEB WORKER */
function DRACOWorker() {
let decoderConfig;
let decoderPending;
onmessage = function ( e ) {
const message = e.data;
switch ( message.type ) {
case 'init':
decoderConfig = message.decoderConfig;
decoderPending = new Promise( function ( resolve/*, reject*/ ) {
decoderConfig.onModuleLoaded = function ( draco ) {
// Module is Promise-like. Wrap before resolving to avoid loop.
resolve( { draco: draco } );
};
DracoDecoderModule( decoderConfig ); // eslint-disable-line no-undef
} );
break;
case 'decode':
const buffer = message.buffer;
const taskConfig = message.taskConfig;
decoderPending.then( ( module ) => {
const draco = module.draco;
const decoder = new draco.Decoder();
const decoderBuffer = new draco.DecoderBuffer();
decoderBuffer.Init( new Int8Array( buffer ), buffer.byteLength );
try {
const geometry = decodeGeometry( draco, decoder, decoderBuffer, taskConfig );
const buffers = geometry.attributes.map( ( attr ) => attr.array.buffer );
if ( geometry.index ) buffers.push( geometry.index.array.buffer );
self.postMessage( { type: 'decode', id: message.id, geometry }, buffers );
} catch ( error ) {
console.error( error );
self.postMessage( { type: 'error', id: message.id, error: error.message } );
} finally {
draco.destroy( decoderBuffer );
draco.destroy( decoder );
}
} );
break;
}
};
function decodeGeometry( draco, decoder, decoderBuffer, taskConfig ) {
const attributeIDs = taskConfig.attributeIDs;
const attributeTypes = taskConfig.attributeTypes;
let dracoGeometry;
let decodingStatus;
const geometryType = decoder.GetEncodedGeometryType( decoderBuffer );
if ( geometryType === draco.TRIANGULAR_MESH ) {
dracoGeometry = new draco.Mesh();
decodingStatus = decoder.DecodeBufferToMesh( decoderBuffer, dracoGeometry );
} else if ( geometryType === draco.POINT_CLOUD ) {
dracoGeometry = new draco.PointCloud();
decodingStatus = decoder.DecodeBufferToPointCloud( decoderBuffer, dracoGeometry );
} else {
throw new Error( 'THREE.DRACOLoader: Unexpected geometry type.' );
}
if ( ! decodingStatus.ok() || dracoGeometry.ptr === 0 ) {
throw new Error( 'THREE.DRACOLoader: Decoding failed: ' + decodingStatus.error_msg() );
}
const geometry = { index: null, attributes: [] };
// Gather all vertex attributes.
for ( const attributeName in attributeIDs ) {
const attributeType = self[ attributeTypes[ attributeName ] ];
let attribute;
let attributeID;
// A Draco file may be created with default vertex attributes, whose attribute IDs
// are mapped 1:1 from their semantic name (POSITION, NORMAL, ...). Alternatively,
// a Draco file may contain a custom set of attributes, identified by known unique
// IDs. glTF files always do the latter, and `.drc` files typically do the former.
if ( taskConfig.useUniqueIDs ) {
attributeID = attributeIDs[ attributeName ];
attribute = decoder.GetAttributeByUniqueId( dracoGeometry, attributeID );
} else {
attributeID = decoder.GetAttributeId( dracoGeometry, draco[ attributeIDs[ attributeName ] ] );
if ( attributeID === - 1 ) continue;
attribute = decoder.GetAttribute( dracoGeometry, attributeID );
}
geometry.attributes.push( decodeAttribute( draco, decoder, dracoGeometry, attributeName, attributeType, attribute ) );
}
// Add index.
if ( geometryType === draco.TRIANGULAR_MESH ) {
geometry.index = decodeIndex( draco, decoder, dracoGeometry );
}
draco.destroy( dracoGeometry );
return geometry;
}
function decodeIndex( draco, decoder, dracoGeometry ) {
const numFaces = dracoGeometry.num_faces();
const numIndices = numFaces * 3;
const byteLength = numIndices * 4;
const ptr = draco._malloc( byteLength );
decoder.GetTrianglesUInt32Array( dracoGeometry, byteLength, ptr );
const index = new Uint32Array( draco.HEAPF32.buffer, ptr, numIndices ).slice();
draco._free( ptr );
return { array: index, itemSize: 1 };
}
function decodeAttribute( draco, decoder, dracoGeometry, attributeName, attributeType, attribute ) {
const numComponents = attribute.num_components();
const numPoints = dracoGeometry.num_points();
const numValues = numPoints * numComponents;
const byteLength = numValues * attributeType.BYTES_PER_ELEMENT;
const dataType = getDracoDataType( draco, attributeType );
const ptr = draco._malloc( byteLength );
decoder.GetAttributeDataArrayForAllPoints( dracoGeometry, attribute, dataType, byteLength, ptr );
const array = new attributeType( draco.HEAPF32.buffer, ptr, numValues ).slice();
draco._free( ptr );
return {
name: attributeName,
array: array,
itemSize: numComponents
};
}
function getDracoDataType( draco, attributeType ) {
switch ( attributeType ) {
case Float32Array: return draco.DT_FLOAT32;
case Int8Array: return draco.DT_INT8;
case Int16Array: return draco.DT_INT16;
case Int32Array: return draco.DT_INT32;
case Uint8Array: return draco.DT_UINT8;
case Uint16Array: return draco.DT_UINT16;
case Uint32Array: return draco.DT_UINT32;
}
}
}
class GLTFLoader extends Loader {
constructor( manager ) {
super( manager );
this.dracoLoader = null;
this.ktx2Loader = null;
this.meshoptDecoder = null;
this.pluginCallbacks = [];
this.register( function ( parser ) {
return new GLTFMaterialsClearcoatExtension( parser );
} );
this.register( function ( parser ) {
return new GLTFTextureBasisUExtension( parser );
} );
this.register( function ( parser ) {
return new GLTFTextureWebPExtension( parser );
} );
this.register( function ( parser ) {
return new GLTFMaterialsTransmissionExtension( parser );
} );
this.register( function ( parser ) {
return new GLTFLightsExtension( parser );
} );
this.register( function ( parser ) {
return new GLTFMeshoptCompression( parser );
} );
}
load( url, onLoad, onProgress, onError ) {
const scope = this;
let resourcePath;
if ( this.resourcePath !== '' ) {
resourcePath = this.resourcePath;
} else if ( this.path !== '' ) {
resourcePath = this.path;
} else {
resourcePath = LoaderUtils.extractUrlBase( url );
}
// Tells the LoadingManager to track an extra item, which resolves after
// the model is fully loaded. This means the count of items loaded will
// be incorrect, but ensures manager.onLoad() does not fire early.
this.manager.itemStart( url );
const _onError = function ( e ) {
if ( onError ) {
onError( e );
} else {
console.error( e );
}
scope.manager.itemError( url );
scope.manager.itemEnd( url );
};
const loader = new FileLoader( this.manager );
loader.setPath( this.path );
loader.setResponseType( 'arraybuffer' );
loader.setRequestHeader( this.requestHeader );
loader.setWithCredentials( this.withCredentials );
loader.load( url, function ( data ) {
try {
scope.parse( data, resourcePath, function ( gltf ) {
onLoad( gltf );
scope.manager.itemEnd( url );
}, _onError );
} catch ( e ) {
_onError( e );
}
}, onProgress, _onError );
}
setDRACOLoader( dracoLoader ) {
this.dracoLoader = dracoLoader;
return this;
}
setDDSLoader() {
throw new Error(
'THREE.GLTFLoader: "MSFT_texture_dds" no longer supported. Please update to "KHR_texture_basisu".'
);
}
setKTX2Loader( ktx2Loader ) {
this.ktx2Loader = ktx2Loader;
return this;
}
setMeshoptDecoder( meshoptDecoder ) {
this.meshoptDecoder = meshoptDecoder;
return this;
}
register( callback ) {
if ( this.pluginCallbacks.indexOf( callback ) === - 1 ) {
this.pluginCallbacks.push( callback );
}
return this;
}
unregister( callback ) {
if ( this.pluginCallbacks.indexOf( callback ) !== - 1 ) {
this.pluginCallbacks.splice( this.pluginCallbacks.indexOf( callback ), 1 );
}
return this;
}
parse( data, path, onLoad, onError ) {
let content;
const extensions = {};
const plugins = {};
if ( typeof data === 'string' ) {
content = data;
} else {
const magic = LoaderUtils.decodeText( new Uint8Array( data, 0, 4 ) );
if ( magic === BINARY_EXTENSION_HEADER_MAGIC ) {
try {
extensions[ EXTENSIONS.KHR_BINARY_GLTF ] = new GLTFBinaryExtension( data );
} catch ( error ) {
if ( onError ) onError( error );
return;
}
content = extensions[ EXTENSIONS.KHR_BINARY_GLTF ].content;
} else {
content = LoaderUtils.decodeText( new Uint8Array( data ) );
}
}
const json = JSON.parse( content );
if ( json.asset === undefined || json.asset.version[ 0 ] < 2 ) {
if ( onError ) onError( new Error( 'THREE.GLTFLoader: Unsupported asset. glTF versions >=2.0 are supported.' ) );
return;
}
const parser = new GLTFParser( json, {
path: path || this.resourcePath || '',
crossOrigin: this.crossOrigin,
requestHeader: this.requestHeader,
manager: this.manager,
ktx2Loader: this.ktx2Loader,
meshoptDecoder: this.meshoptDecoder
} );
parser.fileLoader.setRequestHeader( this.requestHeader );
for ( let i = 0; i < this.pluginCallbacks.length; i ++ ) {
const plugin = this.pluginCallbacks[ i ]( parser );
plugins[ plugin.name ] = plugin;
// Workaround to avoid determining as unknown extension
// in addUnknownExtensionsToUserData().
// Remove this workaround if we move all the existing
// extension handlers to plugin system
extensions[ plugin.name ] = true;
}
if ( json.extensionsUsed ) {
for ( let i = 0; i < json.extensionsUsed.length; ++ i ) {
const extensionName = json.extensionsUsed[ i ];
const extensionsRequired = json.extensionsRequired || [];
switch ( extensionName ) {
case EXTENSIONS.KHR_MATERIALS_UNLIT:
extensions[ extensionName ] = new GLTFMaterialsUnlitExtension$1();
break;
case EXTENSIONS.KHR_MATERIALS_PBR_SPECULAR_GLOSSINESS:
extensions[ extensionName ] = new GLTFMaterialsPbrSpecularGlossinessExtension();
break;
case EXTENSIONS.KHR_DRACO_MESH_COMPRESSION:
extensions[ extensionName ] = new GLTFDracoMeshCompressionExtension( json, this.dracoLoader );
break;
case EXTENSIONS.KHR_TEXTURE_TRANSFORM:
extensions[ extensionName ] = new GLTFTextureTransformExtension();
break;
case EXTENSIONS.KHR_MESH_QUANTIZATION:
extensions[ extensionName ] = new GLTFMeshQuantizationExtension();
break;
default:
if ( extensionsRequired.indexOf( extensionName ) >= 0 && plugins[ extensionName ] === undefined ) {
console.warn( 'THREE.GLTFLoader: Unknown extension "' + extensionName + '".' );
}
}
}
}
parser.setExtensions( extensions );
parser.setPlugins( plugins );
parser.parse( onLoad, onError );
}
}
/* GLTFREGISTRY */
function GLTFRegistry() {
let objects = {};
return {
get: function ( key ) {
return objects[ key ];
},
add: function ( key, object ) {
objects[ key ] = object;
},
remove: function ( key ) {
delete objects[ key ];
},
removeAll: function () {
objects = {};
}
};
}
/*********************************/
/********** EXTENSIONS ***********/
/*********************************/
const EXTENSIONS = {
KHR_BINARY_GLTF: 'KHR_binary_glTF',
KHR_DRACO_MESH_COMPRESSION: 'KHR_draco_mesh_compression',
KHR_LIGHTS_PUNCTUAL: 'KHR_lights_punctual',
KHR_MATERIALS_CLEARCOAT: 'KHR_materials_clearcoat',
KHR_MATERIALS_PBR_SPECULAR_GLOSSINESS: 'KHR_materials_pbrSpecularGlossiness',
KHR_MATERIALS_TRANSMISSION: 'KHR_materials_transmission',
KHR_MATERIALS_UNLIT: 'KHR_materials_unlit',
KHR_TEXTURE_BASISU: 'KHR_texture_basisu',
KHR_TEXTURE_TRANSFORM: 'KHR_texture_transform',
KHR_MESH_QUANTIZATION: 'KHR_mesh_quantization',
EXT_TEXTURE_WEBP: 'EXT_texture_webp',
EXT_MESHOPT_COMPRESSION: 'EXT_meshopt_compression'
};
/**
* Punctual Lights Extension
*
* Specification: https://github.com/KhronosGroup/glTF/tree/master/extensions/2.0/Khronos/KHR_lights_punctual
*/
class GLTFLightsExtension {
constructor( parser ) {
this.parser = parser;
this.name = EXTENSIONS.KHR_LIGHTS_PUNCTUAL;
// Object3D instance caches
this.cache = { refs: {}, uses: {} };
}
_markDefs() {
const parser = this.parser;
const nodeDefs = this.parser.json.nodes || [];
for ( let nodeIndex = 0, nodeLength = nodeDefs.length; nodeIndex < nodeLength; nodeIndex ++ ) {
const nodeDef = nodeDefs[ nodeIndex ];
if ( nodeDef.extensions
&& nodeDef.extensions[ this.name ]
&& nodeDef.extensions[ this.name ].light !== undefined ) {
parser._addNodeRef( this.cache, nodeDef.extensions[ this.name ].light );
}
}
}
_loadLight( lightIndex ) {
const parser = this.parser;
const cacheKey = 'light:' + lightIndex;
let dependency = parser.cache.get( cacheKey );
if ( dependency ) return dependency;
const json = parser.json;
const extensions = ( json.extensions && json.extensions[ this.name ] ) || {};
const lightDefs = extensions.lights || [];
const lightDef = lightDefs[ lightIndex ];
let lightNode;
const color = new Color( 0xffffff );
if ( lightDef.color !== undefined ) color.fromArray( lightDef.color );
const range = lightDef.range !== undefined ? lightDef.range : 0;
switch ( lightDef.type ) {
case 'directional':
lightNode = new DirectionalLight( color );
lightNode.target.position.set( 0, 0, - 1 );
lightNode.add( lightNode.target );
break;
case 'point':
lightNode = new PointLight( color );
lightNode.distance = range;
break;
case 'spot':
lightNode = new SpotLight( color );
lightNode.distance = range;
// Handle spotlight properties.
lightDef.spot = lightDef.spot || {};
lightDef.spot.innerConeAngle = lightDef.spot.innerConeAngle !== undefined ? lightDef.spot.innerConeAngle : 0;
lightDef.spot.outerConeAngle = lightDef.spot.outerConeAngle !== undefined ? lightDef.spot.outerConeAngle : Math.PI / 4.0;
lightNode.angle = lightDef.spot.outerConeAngle;
lightNode.penumbra = 1.0 - lightDef.spot.innerConeAngle / lightDef.spot.outerConeAngle;
lightNode.target.position.set( 0, 0, - 1 );
lightNode.add( lightNode.target );
break;
default:
throw new Error( 'THREE.GLTFLoader: Unexpected light type: ' + lightDef.type );
}
// Some lights (e.g. spot) default to a position other than the origin. Reset the position
// here, because node-level parsing will only override position if explicitly specified.
lightNode.position.set( 0, 0, 0 );
lightNode.decay = 2;
if ( lightDef.intensity !== undefined ) lightNode.intensity = lightDef.intensity;
lightNode.name = parser.createUniqueName( lightDef.name || ( 'light_' + lightIndex ) );
dependency = Promise.resolve( lightNode );
parser.cache.add( cacheKey, dependency );
return dependency;
}
createNodeAttachment( nodeIndex ) {
const self = this;
const parser = this.parser;
const json = parser.json;
const nodeDef = json.nodes[ nodeIndex ];
const lightDef = ( nodeDef.extensions && nodeDef.extensions[ this.name ] ) || {};
const lightIndex = lightDef.light;
if ( lightIndex === undefined ) return null;
return this._loadLight( lightIndex ).then( function ( light ) {
return parser._getNodeRef( self.cache, lightIndex, light );
} );
}
}
/**
* Unlit Materials Extension
*
* Specification: https://github.com/KhronosGroup/glTF/tree/master/extensions/2.0/Khronos/KHR_materials_unlit
*/
class GLTFMaterialsUnlitExtension$1 {
constructor() {
this.name = EXTENSIONS.KHR_MATERIALS_UNLIT;
}
getMaterialType() {
return MeshBasicMaterial;
}
extendParams( materialParams, materialDef, parser ) {
const pending = [];
materialParams.color = new Color( 1.0, 1.0, 1.0 );
materialParams.opacity = 1.0;
const metallicRoughness = materialDef.pbrMetallicRoughness;
if ( metallicRoughness ) {
if ( Array.isArray( metallicRoughness.baseColorFactor ) ) {
const array = metallicRoughness.baseColorFactor;
materialParams.color.fromArray( array );
materialParams.opacity = array[ 3 ];
}
if ( metallicRoughness.baseColorTexture !== undefined ) {
pending.push( parser.assignTexture( materialParams, 'map', metallicRoughness.baseColorTexture ) );
}
}
return Promise.all( pending );
}
}
/**
* Clearcoat Materials Extension
*
* Specification: https://github.com/KhronosGroup/glTF/tree/master/extensions/2.0/Khronos/KHR_materials_clearcoat
*/
class GLTFMaterialsClearcoatExtension {
constructor( parser ) {
this.parser = parser;
this.name = EXTENSIONS.KHR_MATERIALS_CLEARCOAT;
}
getMaterialType( materialIndex ) {
const parser = this.parser;
const materialDef = parser.json.materials[ materialIndex ];
if ( ! materialDef.extensions || ! materialDef.extensions[ this.name ] ) return null;
return MeshPhysicalMaterial;
}
extendMaterialParams( materialIndex, materialParams ) {
const parser = this.parser;
const materialDef = parser.json.materials[ materialIndex ];
if ( ! materialDef.extensions || ! materialDef.extensions[ this.name ] ) {
return Promise.resolve();
}
const pending = [];
const extension = materialDef.extensions[ this.name ];
if ( extension.clearcoatFactor !== undefined ) {
materialParams.clearcoat = extension.clearcoatFactor;
}
if ( extension.clearcoatTexture !== undefined ) {
pending.push( parser.assignTexture( materialParams, 'clearcoatMap', extension.clearcoatTexture ) );
}
if ( extension.clearcoatRoughnessFactor !== undefined ) {
materialParams.clearcoatRoughness = extension.clearcoatRoughnessFactor;
}
if ( extension.clearcoatRoughnessTexture !== undefined ) {
pending.push( parser.assignTexture( materialParams, 'clearcoatRoughnessMap', extension.clearcoatRoughnessTexture ) );
}
if ( extension.clearcoatNormalTexture !== undefined ) {
pending.push( parser.assignTexture( materialParams, 'clearcoatNormalMap', extension.clearcoatNormalTexture ) );
if ( extension.clearcoatNormalTexture.scale !== undefined ) {
const scale = extension.clearcoatNormalTexture.scale;
// https://github.com/mrdoob/three.js/issues/11438#issuecomment-507003995
materialParams.clearcoatNormalScale = new Vector2( scale, - scale );
}
}
return Promise.all( pending );
}
}
/**
* Transmission Materials Extension
*
* Specification: https://github.com/KhronosGroup/glTF/tree/master/extensions/2.0/Khronos/KHR_materials_transmission
* Draft: https://github.com/KhronosGroup/glTF/pull/1698
*/
class GLTFMaterialsTransmissionExtension {
constructor( parser ) {
this.parser = parser;
this.name = EXTENSIONS.KHR_MATERIALS_TRANSMISSION;
}
getMaterialType( materialIndex ) {
const parser = this.parser;
const materialDef = parser.json.materials[ materialIndex ];
if ( ! materialDef.extensions || ! materialDef.extensions[ this.name ] ) return null;
return MeshPhysicalMaterial;
}
extendMaterialParams( materialIndex, materialParams ) {
const parser = this.parser;
const materialDef = parser.json.materials[ materialIndex ];
if ( ! materialDef.extensions || ! materialDef.extensions[ this.name ] ) {
return Promise.resolve();
}
const pending = [];
const extension = materialDef.extensions[ this.name ];
if ( extension.transmissionFactor !== undefined ) {
materialParams.transmission = extension.transmissionFactor;
}
if ( extension.transmissionTexture !== undefined ) {
pending.push( parser.assignTexture( materialParams, 'transmissionMap', extension.transmissionTexture ) );
}
return Promise.all( pending );
}
}
/**
* BasisU Texture Extension
*
* Specification: https://github.com/KhronosGroup/glTF/tree/master/extensions/2.0/Khronos/KHR_texture_basisu
*/
class GLTFTextureBasisUExtension {
constructor( parser ) {
this.parser = parser;
this.name = EXTENSIONS.KHR_TEXTURE_BASISU;
}
loadTexture( textureIndex ) {
const parser = this.parser;
const json = parser.json;
const textureDef = json.textures[ textureIndex ];
if ( ! textureDef.extensions || ! textureDef.extensions[ this.name ] ) {
return null;
}
const extension = textureDef.extensions[ this.name ];
const source = json.images[ extension.source ];
const loader = parser.options.ktx2Loader;
if ( ! loader ) {
if ( json.extensionsRequired && json.extensionsRequired.indexOf( this.name ) >= 0 ) {
throw new Error( 'THREE.GLTFLoader: setKTX2Loader must be called before loading KTX2 textures' );
} else {
// Assumes that the extension is optional and that a fallback texture is present
return null;
}
}
return parser.loadTextureImage( textureIndex, source, loader );
}
}
/**
* WebP Texture Extension
*
* Specification: https://github.com/KhronosGroup/glTF/tree/master/extensions/2.0/Vendor/EXT_texture_webp
*/
class GLTFTextureWebPExtension {
constructor( parser ) {
this.parser = parser;
this.name = EXTENSIONS.EXT_TEXTURE_WEBP;
this.isSupported = null;
}
loadTexture( textureIndex ) {
const name = this.name;
const parser = this.parser;
const json = parser.json;
const textureDef = json.textures[ textureIndex ];
if ( ! textureDef.extensions || ! textureDef.extensions[ name ] ) {
return null;
}
const extension = textureDef.extensions[ name ];
const source = json.images[ extension.source ];
let loader = parser.textureLoader;
if ( source.uri ) {
const handler = parser.options.manager.getHandler( source.uri );
if ( handler !== null ) loader = handler;
}
return this.detectSupport().then( function ( isSupported ) {
if ( isSupported ) return parser.loadTextureImage( textureIndex, source, loader );
if ( json.extensionsRequired && json.extensionsRequired.indexOf( name ) >= 0 ) {
throw new Error( 'THREE.GLTFLoader: WebP required by asset but unsupported.' );
}
// Fall back to PNG or JPEG.
return parser.loadTexture( textureIndex );
} );
}
detectSupport() {
if ( ! this.isSupported ) {
this.isSupported = new Promise( function ( resolve ) {
const image = new Image();
// Lossy test image. Support for lossy images doesn't guarantee support for all
// WebP images, unfortunately.
image.src = 'data:image/webp;base64,UklGRiIAAABXRUJQVlA4IBYAAAAwAQCdASoBAAEADsD+JaQAA3AAAAAA';
image.onload = image.onerror = function () {
resolve( image.height === 1 );
};
} );
}
return this.isSupported;
}
}
/**
* meshopt BufferView Compression Extension
*
* Specification: https://github.com/KhronosGroup/glTF/tree/master/extensions/2.0/Vendor/EXT_meshopt_compression
*/
class GLTFMeshoptCompression {
constructor( parser ) {
this.name = EXTENSIONS.EXT_MESHOPT_COMPRESSION;
this.parser = parser;
}
loadBufferView( index ) {
const json = this.parser.json;
const bufferView = json.bufferViews[ index ];
if ( bufferView.extensions && bufferView.extensions[ this.name ] ) {
const extensionDef = bufferView.extensions[ this.name ];
const buffer = this.parser.getDependency( 'buffer', extensionDef.buffer );
const decoder = this.parser.options.meshoptDecoder;
if ( ! decoder || ! decoder.supported ) {
if ( json.extensionsRequired && json.extensionsRequired.indexOf( this.name ) >= 0 ) {
throw new Error( 'THREE.GLTFLoader: setMeshoptDecoder must be called before loading compressed files' );
} else {
// Assumes that the extension is optional and that fallback buffer data is present
return null;
}
}
return Promise.all( [ buffer, decoder.ready ] ).then( function ( res ) {
const byteOffset = extensionDef.byteOffset || 0;
const byteLength = extensionDef.byteLength || 0;
const count = extensionDef.count;
const stride = extensionDef.byteStride;
const result = new ArrayBuffer( count * stride );
const source = new Uint8Array( res[ 0 ], byteOffset, byteLength );
decoder.decodeGltfBuffer( new Uint8Array( result ), count, stride, source, extensionDef.mode, extensionDef.filter );
return result;
} );
} else {
return null;
}
}
}
/* BINARY EXTENSION */
const BINARY_EXTENSION_HEADER_MAGIC = 'glTF';
const BINARY_EXTENSION_HEADER_LENGTH = 12;
const BINARY_EXTENSION_CHUNK_TYPES = { JSON: 0x4E4F534A, BIN: 0x004E4942 };
class GLTFBinaryExtension {
constructor( data ) {
this.name = EXTENSIONS.KHR_BINARY_GLTF;
this.content = null;
this.body = null;
const headerView = new DataView( data, 0, BINARY_EXTENSION_HEADER_LENGTH );
this.header = {
magic: LoaderUtils.decodeText( new Uint8Array( data.slice( 0, 4 ) ) ),
version: headerView.getUint32( 4, true ),
length: headerView.getUint32( 8, true )
};
if ( this.header.magic !== BINARY_EXTENSION_HEADER_MAGIC ) {
throw new Error( 'THREE.GLTFLoader: Unsupported glTF-Binary header.' );
} else if ( this.header.version < 2.0 ) {
throw new Error( 'THREE.GLTFLoader: Legacy binary file detected.' );
}
const chunkContentsLength = this.header.length - BINARY_EXTENSION_HEADER_LENGTH;
const chunkView = new DataView( data, BINARY_EXTENSION_HEADER_LENGTH );
let chunkIndex = 0;
while ( chunkIndex < chunkContentsLength ) {
const chunkLength = chunkView.getUint32( chunkIndex, true );
chunkIndex += 4;
const chunkType = chunkView.getUint32( chunkIndex, true );
chunkIndex += 4;
if ( chunkType === BINARY_EXTENSION_CHUNK_TYPES.JSON ) {
const contentArray = new Uint8Array( data, BINARY_EXTENSION_HEADER_LENGTH + chunkIndex, chunkLength );
this.content = LoaderUtils.decodeText( contentArray );
} else if ( chunkType === BINARY_EXTENSION_CHUNK_TYPES.BIN ) {
const byteOffset = BINARY_EXTENSION_HEADER_LENGTH + chunkIndex;
this.body = data.slice( byteOffset, byteOffset + chunkLength );
}
// Clients must ignore chunks with unknown types.
chunkIndex += chunkLength;
}
if ( this.content === null ) {
throw new Error( 'THREE.GLTFLoader: JSON content not found.' );
}
}
}
/**
* DRACO Mesh Compression Extension
*
* Specification: https://github.com/KhronosGroup/glTF/tree/master/extensions/2.0/Khronos/KHR_draco_mesh_compression
*/
class GLTFDracoMeshCompressionExtension {
constructor( json, dracoLoader ) {
if ( ! dracoLoader ) {
throw new Error( 'THREE.GLTFLoader: No DRACOLoader instance provided.' );
}
this.name = EXTENSIONS.KHR_DRACO_MESH_COMPRESSION;
this.json = json;
this.dracoLoader = dracoLoader;
this.dracoLoader.preload();
}
decodePrimitive( primitive, parser ) {
const json = this.json;
const dracoLoader = this.dracoLoader;
const bufferViewIndex = primitive.extensions[ this.name ].bufferView;
const gltfAttributeMap = primitive.extensions[ this.name ].attributes;
const threeAttributeMap = {};
const attributeNormalizedMap = {};
const attributeTypeMap = {};
for ( const attributeName in gltfAttributeMap ) {
const threeAttributeName = ATTRIBUTES[ attributeName ] || attributeName.toLowerCase();
threeAttributeMap[ threeAttributeName ] = gltfAttributeMap[ attributeName ];
}
for ( const attributeName in primitive.attributes ) {
const threeAttributeName = ATTRIBUTES[ attributeName ] || attributeName.toLowerCase();
if ( gltfAttributeMap[ attributeName ] !== undefined ) {
const accessorDef = json.accessors[ primitive.attributes[ attributeName ] ];
const componentType = WEBGL_COMPONENT_TYPES[ accessorDef.componentType ];
attributeTypeMap[ threeAttributeName ] = componentType;
attributeNormalizedMap[ threeAttributeName ] = accessorDef.normalized === true;
}
}
return parser.getDependency( 'bufferView', bufferViewIndex ).then( function ( bufferView ) {
return new Promise( function ( resolve ) {
dracoLoader.decodeDracoFile( bufferView, function ( geometry ) {
for ( const attributeName in geometry.attributes ) {
const attribute = geometry.attributes[ attributeName ];
const normalized = attributeNormalizedMap[ attributeName ];
if ( normalized !== undefined ) attribute.normalized = normalized;
}
resolve( geometry );
}, threeAttributeMap, attributeTypeMap );
} );
} );
}
}
/**
* Texture Transform Extension
*
* Specification: https://github.com/KhronosGroup/glTF/tree/master/extensions/2.0/Khronos/KHR_texture_transform
*/
class GLTFTextureTransformExtension {
constructor() {
this.name = EXTENSIONS.KHR_TEXTURE_TRANSFORM;
}
extendTexture( texture, transform ) {
texture = texture.clone();
if ( transform.offset !== undefined ) {
texture.offset.fromArray( transform.offset );
}
if ( transform.rotation !== undefined ) {
texture.rotation = transform.rotation;
}
if ( transform.scale !== undefined ) {
texture.repeat.fromArray( transform.scale );
}
if ( transform.texCoord !== undefined ) {
console.warn( 'THREE.GLTFLoader: Custom UV sets in "' + this.name + '" extension not yet supported.' );
}
texture.needsUpdate = true;
return texture;
}
}
/**
* Specular-Glossiness Extension
*
* Specification: https://github.com/KhronosGroup/glTF/tree/master/extensions/2.0/Khronos/KHR_materials_pbrSpecularGlossiness
*/
/**
* A sub class of StandardMaterial with some of the functionality
* changed via the `onBeforeCompile` callback
* @pailhead
*/
class GLTFMeshStandardSGMaterial extends MeshStandardMaterial {
constructor( params ) {
super();
this.isGLTFSpecularGlossinessMaterial = true;
//various chunks that need replacing
const specularMapParsFragmentChunk = [
'#ifdef USE_SPECULARMAP',
' uniform sampler2D specularMap;',
'#endif'
].join( '\n' );
const glossinessMapParsFragmentChunk = [
'#ifdef USE_GLOSSINESSMAP',
' uniform sampler2D glossinessMap;',
'#endif'
].join( '\n' );
const specularMapFragmentChunk = [
'vec3 specularFactor = specular;',
'#ifdef USE_SPECULARMAP',
' vec4 texelSpecular = texture2D( specularMap, vUv );',
' texelSpecular = sRGBToLinear( texelSpecular );',
' // reads channel RGB, compatible with a glTF Specular-Glossiness (RGBA) texture',
' specularFactor *= texelSpecular.rgb;',
'#endif'
].join( '\n' );
const glossinessMapFragmentChunk = [
'float glossinessFactor = glossiness;',
'#ifdef USE_GLOSSINESSMAP',
' vec4 texelGlossiness = texture2D( glossinessMap, vUv );',
' // reads channel A, compatible with a glTF Specular-Glossiness (RGBA) texture',
' glossinessFactor *= texelGlossiness.a;',
'#endif'
].join( '\n' );
const lightPhysicalFragmentChunk = [
'PhysicalMaterial material;',
'material.diffuseColor = diffuseColor.rgb * ( 1. - max( specularFactor.r, max( specularFactor.g, specularFactor.b ) ) );',
'vec3 dxy = max( abs( dFdx( geometryNormal ) ), abs( dFdy( geometryNormal ) ) );',
'float geometryRoughness = max( max( dxy.x, dxy.y ), dxy.z );',
'material.specularRoughness = max( 1.0 - glossinessFactor, 0.0525 ); // 0.0525 corresponds to the base mip of a 256 cubemap.',
'material.specularRoughness += geometryRoughness;',
'material.specularRoughness = min( material.specularRoughness, 1.0 );',
'material.specularColor = specularFactor;',
].join( '\n' );
const uniforms = {
specular: { value: new Color().setHex( 0xffffff ) },
glossiness: { value: 1 },
specularMap: { value: null },
glossinessMap: { value: null }
};
this._extraUniforms = uniforms;
this.onBeforeCompile = function ( shader ) {
for ( const uniformName in uniforms ) {
shader.uniforms[ uniformName ] = uniforms[ uniformName ];
}
shader.fragmentShader = shader.fragmentShader
.replace( 'uniform float roughness;', 'uniform vec3 specular;' )
.replace( 'uniform float metalness;', 'uniform float glossiness;' )
.replace( '#include <roughnessmap_pars_fragment>', specularMapParsFragmentChunk )
.replace( '#include <metalnessmap_pars_fragment>', glossinessMapParsFragmentChunk )
.replace( '#include <roughnessmap_fragment>', specularMapFragmentChunk )
.replace( '#include <metalnessmap_fragment>', glossinessMapFragmentChunk )
.replace( '#include <lights_physical_fragment>', lightPhysicalFragmentChunk );
};
Object.defineProperties( this, {
specular: {
get: function () {
return uniforms.specular.value;
},
set: function ( v ) {
uniforms.specular.value = v;
}
},
specularMap: {
get: function () {
return uniforms.specularMap.value;
},
set: function ( v ) {
uniforms.specularMap.value = v;
if ( v ) {
this.defines.USE_SPECULARMAP = ''; // USE_UV is set by the renderer for specular maps
} else {
delete this.defines.USE_SPECULARMAP;
}
}
},
glossiness: {
get: function () {
return uniforms.glossiness.value;
},
set: function ( v ) {
uniforms.glossiness.value = v;
}
},
glossinessMap: {
get: function () {
return uniforms.glossinessMap.value;
},
set: function ( v ) {
uniforms.glossinessMap.value = v;
if ( v ) {
this.defines.USE_GLOSSINESSMAP = '';
this.defines.USE_UV = '';
} else {
delete this.defines.USE_GLOSSINESSMAP;
delete this.defines.USE_UV;
}
}
}
} );
delete this.metalness;
delete this.roughness;
delete this.metalnessMap;
delete this.roughnessMap;
this.setValues( params );
}
copy( source ) {
super.copy( source );
this.specularMap = source.specularMap;
this.specular.copy( source.specular );
this.glossinessMap = source.glossinessMap;
this.glossiness = source.glossiness;
delete this.metalness;
delete this.roughness;
delete this.metalnessMap;
delete this.roughnessMap;
return this;
}
}
class GLTFMaterialsPbrSpecularGlossinessExtension {
constructor() {
this.name = EXTENSIONS.KHR_MATERIALS_PBR_SPECULAR_GLOSSINESS;
this.specularGlossinessParams = [
'color',
'map',
'lightMap',
'lightMapIntensity',
'aoMap',
'aoMapIntensity',
'emissive',
'emissiveIntensity',
'emissiveMap',
'bumpMap',
'bumpScale',
'normalMap',
'normalMapType',
'displacementMap',
'displacementScale',
'displacementBias',
'specularMap',
'specular',
'glossinessMap',
'glossiness',
'alphaMap',
'envMap',
'envMapIntensity',
'refractionRatio',
];
}
getMaterialType() {
return GLTFMeshStandardSGMaterial;
}
extendParams( materialParams, materialDef, parser ) {
const pbrSpecularGlossiness = materialDef.extensions[ this.name ];
materialParams.color = new Color( 1.0, 1.0, 1.0 );
materialParams.opacity = 1.0;
const pending = [];
if ( Array.isArray( pbrSpecularGlossiness.diffuseFactor ) ) {
const array = pbrSpecularGlossiness.diffuseFactor;
materialParams.color.fromArray( array );
materialParams.opacity = array[ 3 ];
}
if ( pbrSpecularGlossiness.diffuseTexture !== undefined ) {
pending.push( parser.assignTexture( materialParams, 'map', pbrSpecularGlossiness.diffuseTexture ) );
}
materialParams.emissive = new Color( 0.0, 0.0, 0.0 );
materialParams.glossiness = pbrSpecularGlossiness.glossinessFactor !== undefined ? pbrSpecularGlossiness.glossinessFactor : 1.0;
materialParams.specular = new Color( 1.0, 1.0, 1.0 );
if ( Array.isArray( pbrSpecularGlossiness.specularFactor ) ) {
materialParams.specular.fromArray( pbrSpecularGlossiness.specularFactor );
}
if ( pbrSpecularGlossiness.specularGlossinessTexture !== undefined ) {
const specGlossMapDef = pbrSpecularGlossiness.specularGlossinessTexture;
pending.push( parser.assignTexture( materialParams, 'glossinessMap', specGlossMapDef ) );
pending.push( parser.assignTexture( materialParams, 'specularMap', specGlossMapDef ) );
}
return Promise.all( pending );
}
createMaterial( materialParams ) {
const material = new GLTFMeshStandardSGMaterial( materialParams );
material.fog = true;
material.color = materialParams.color;
material.map = materialParams.map === undefined ? null : materialParams.map;
material.lightMap = null;
material.lightMapIntensity = 1.0;
material.aoMap = materialParams.aoMap === undefined ? null : materialParams.aoMap;
material.aoMapIntensity = 1.0;
material.emissive = materialParams.emissive;
material.emissiveIntensity = 1.0;
material.emissiveMap = materialParams.emissiveMap === undefined ? null : materialParams.emissiveMap;
material.bumpMap = materialParams.bumpMap === undefined ? null : materialParams.bumpMap;
material.bumpScale = 1;
material.normalMap = materialParams.normalMap === undefined ? null : materialParams.normalMap;
material.normalMapType = TangentSpaceNormalMap;
if ( materialParams.normalScale ) material.normalScale = materialParams.normalScale;
material.displacementMap = null;
material.displacementScale = 1;
material.displacementBias = 0;
material.specularMap = materialParams.specularMap === undefined ? null : materialParams.specularMap;
material.specular = materialParams.specular;
material.glossinessMap = materialParams.glossinessMap === undefined ? null : materialParams.glossinessMap;
material.glossiness = materialParams.glossiness;
material.alphaMap = null;
material.envMap = materialParams.envMap === undefined ? null : materialParams.envMap;
material.envMapIntensity = 1.0;
material.refractionRatio = 0.98;
return material;
}
}
/**
* Mesh Quantization Extension
*
* Specification: https://github.com/KhronosGroup/glTF/tree/master/extensions/2.0/Khronos/KHR_mesh_quantization
*/
class GLTFMeshQuantizationExtension {
constructor() {
this.name = EXTENSIONS.KHR_MESH_QUANTIZATION;
}
}
/*********************************/
/********** INTERPOLATION ********/
/*********************************/
// Spline Interpolation
// Specification: https://github.com/KhronosGroup/glTF/blob/master/specification/2.0/README.md#appendix-c-spline-interpolation
class GLTFCubicSplineInterpolant extends Interpolant {
constructor( parameterPositions, sampleValues, sampleSize, resultBuffer ) {
super( parameterPositions, sampleValues, sampleSize, resultBuffer );
}
copySampleValue_( index ) {
// Copies a sample value to the result buffer. See description of glTF
// CUBICSPLINE values layout in interpolate_() function below.
const result = this.resultBuffer,
values = this.sampleValues,
valueSize = this.valueSize,
offset = index * valueSize * 3 + valueSize;
for ( let i = 0; i !== valueSize; i ++ ) {
result[ i ] = values[ offset + i ];
}
return result;
}
}
GLTFCubicSplineInterpolant.prototype.beforeStart_ = GLTFCubicSplineInterpolant.prototype.copySampleValue_;
GLTFCubicSplineInterpolant.prototype.afterEnd_ = GLTFCubicSplineInterpolant.prototype.copySampleValue_;
GLTFCubicSplineInterpolant.prototype.interpolate_ = function ( i1, t0, t, t1 ) {
const result = this.resultBuffer;
const values = this.sampleValues;
const stride = this.valueSize;
const stride2 = stride * 2;
const stride3 = stride * 3;
const td = t1 - t0;
const p = ( t - t0 ) / td;
const pp = p * p;
const ppp = pp * p;
const offset1 = i1 * stride3;
const offset0 = offset1 - stride3;
const s2 = - 2 * ppp + 3 * pp;
const s3 = ppp - pp;
const s0 = 1 - s2;
const s1 = s3 - pp + p;
// Layout of keyframe output values for CUBICSPLINE animations:
// [ inTangent_1, splineVertex_1, outTangent_1, inTangent_2, splineVertex_2, ... ]
for ( let i = 0; i !== stride; i ++ ) {
const p0 = values[ offset0 + i + stride ]; // splineVertex_k
const m0 = values[ offset0 + i + stride2 ] * td; // outTangent_k * (t_k+1 - t_k)
const p1 = values[ offset1 + i + stride ]; // splineVertex_k+1
const m1 = values[ offset1 + i ] * td; // inTangent_k+1 * (t_k+1 - t_k)
result[ i ] = s0 * p0 + s1 * m0 + s2 * p1 + s3 * m1;
}
return result;
};
/*********************************/
/********** INTERNALS ************/
/*********************************/
/* CONSTANTS */
const WEBGL_CONSTANTS$1 = {
FLOAT: 5126,
//FLOAT_MAT2: 35674,
FLOAT_MAT3: 35675,
FLOAT_MAT4: 35676,
FLOAT_VEC2: 35664,
FLOAT_VEC3: 35665,
FLOAT_VEC4: 35666,
LINEAR: 9729,
REPEAT: 10497,
SAMPLER_2D: 35678,
POINTS: 0,
LINES: 1,
LINE_LOOP: 2,
LINE_STRIP: 3,
TRIANGLES: 4,
TRIANGLE_STRIP: 5,
TRIANGLE_FAN: 6,
UNSIGNED_BYTE: 5121,
UNSIGNED_SHORT: 5123
};
const WEBGL_COMPONENT_TYPES = {
5120: Int8Array,
5121: Uint8Array,
5122: Int16Array,
5123: Uint16Array,
5125: Uint32Array,
5126: Float32Array
};
const WEBGL_FILTERS = {
9728: NearestFilter,
9729: LinearFilter,
9984: NearestMipmapNearestFilter,
9985: LinearMipmapNearestFilter,
9986: NearestMipmapLinearFilter,
9987: LinearMipmapLinearFilter
};
const WEBGL_WRAPPINGS = {
33071: ClampToEdgeWrapping,
33648: MirroredRepeatWrapping,
10497: RepeatWrapping
};
const WEBGL_TYPE_SIZES = {
'SCALAR': 1,
'VEC2': 2,
'VEC3': 3,
'VEC4': 4,
'MAT2': 4,
'MAT3': 9,
'MAT4': 16
};
const ATTRIBUTES = {
POSITION: 'position',
NORMAL: 'normal',
TANGENT: 'tangent',
TEXCOORD_0: 'uv',
TEXCOORD_1: 'uv2',
COLOR_0: 'color',
WEIGHTS_0: 'skinWeight',
JOINTS_0: 'skinIndex',
};
const PATH_PROPERTIES$1 = {
scale: 'scale',
translation: 'position',
rotation: 'quaternion',
weights: 'morphTargetInfluences'
};
const INTERPOLATION = {
CUBICSPLINE: undefined, // We use a custom interpolant (GLTFCubicSplineInterpolation) for CUBICSPLINE tracks. Each
// keyframe track will be initialized with a default interpolation type, then modified.
LINEAR: InterpolateLinear,
STEP: InterpolateDiscrete
};
const ALPHA_MODES = {
OPAQUE: 'OPAQUE',
MASK: 'MASK',
BLEND: 'BLEND'
};
/* UTILITY FUNCTIONS */
function resolveURL( url, path ) {
// Invalid URL
if ( typeof url !== 'string' || url === '' ) return '';
// Host Relative URL
if ( /^https?:\/\//i.test( path ) && /^\//.test( url ) ) {
path = path.replace( /(^https?:\/\/[^\/]+).*/i, '$1' );
}
// Absolute URL http://,https://,//
if ( /^(https?:)?\/\//i.test( url ) ) return url;
// Data URI
if ( /^data:.*,.*$/i.test( url ) ) return url;
// Blob URL
if ( /^blob:.*$/i.test( url ) ) return url;
// Relative URL
return path + url;
}
/**
* Specification: https://github.com/KhronosGroup/glTF/blob/master/specification/2.0/README.md#default-material
*/
function createDefaultMaterial( cache ) {
if ( cache[ 'DefaultMaterial' ] === undefined ) {
cache[ 'DefaultMaterial' ] = new MeshStandardMaterial( {
color: 0xFFFFFF,
emissive: 0x000000,
metalness: 1,
roughness: 1,
transparent: false,
depthTest: true,
side: FrontSide
} );
}
return cache[ 'DefaultMaterial' ];
}
function addUnknownExtensionsToUserData( knownExtensions, object, objectDef ) {
// Add unknown glTF extensions to an object's userData.
for ( const name in objectDef.extensions ) {
if ( knownExtensions[ name ] === undefined ) {
object.userData.gltfExtensions = object.userData.gltfExtensions || {};
object.userData.gltfExtensions[ name ] = objectDef.extensions[ name ];
}
}
}
/**
* @param {Object3D|Material|BufferGeometry} object
* @param {GLTF.definition} gltfDef
*/
function assignExtrasToUserData( object, gltfDef ) {
if ( gltfDef.extras !== undefined ) {
if ( typeof gltfDef.extras === 'object' ) {
Object.assign( object.userData, gltfDef.extras );
} else {
console.warn( 'THREE.GLTFLoader: Ignoring primitive type .extras, ' + gltfDef.extras );
}
}
}
/**
* Specification: https://github.com/KhronosGroup/glTF/blob/master/specification/2.0/README.md#morph-targets
*
* @param {BufferGeometry} geometry
* @param {Array<GLTF.Target>} targets
* @param {GLTFParser} parser
* @return {Promise<BufferGeometry>}
*/
function addMorphTargets( geometry, targets, parser ) {
let hasMorphPosition = false;
let hasMorphNormal = false;
for ( let i = 0, il = targets.length; i < il; i ++ ) {
const target = targets[ i ];
if ( target.POSITION !== undefined ) hasMorphPosition = true;
if ( target.NORMAL !== undefined ) hasMorphNormal = true;
if ( hasMorphPosition && hasMorphNormal ) break;
}
if ( ! hasMorphPosition && ! hasMorphNormal ) return Promise.resolve( geometry );
const pendingPositionAccessors = [];
const pendingNormalAccessors = [];
for ( let i = 0, il = targets.length; i < il; i ++ ) {
const target = targets[ i ];
if ( hasMorphPosition ) {
const pendingAccessor = target.POSITION !== undefined
? parser.getDependency( 'accessor', target.POSITION )
: geometry.attributes.position;
pendingPositionAccessors.push( pendingAccessor );
}
if ( hasMorphNormal ) {
const pendingAccessor = target.NORMAL !== undefined
? parser.getDependency( 'accessor', target.NORMAL )
: geometry.attributes.normal;
pendingNormalAccessors.push( pendingAccessor );
}
}
return Promise.all( [
Promise.all( pendingPositionAccessors ),
Promise.all( pendingNormalAccessors )
] ).then( function ( accessors ) {
const morphPositions = accessors[ 0 ];
const morphNormals = accessors[ 1 ];
if ( hasMorphPosition ) geometry.morphAttributes.position = morphPositions;
if ( hasMorphNormal ) geometry.morphAttributes.normal = morphNormals;
geometry.morphTargetsRelative = true;
return geometry;
} );
}
/**
* @param {Mesh} mesh
* @param {GLTF.Mesh} meshDef
*/
function updateMorphTargets( mesh, meshDef ) {
mesh.updateMorphTargets();
if ( meshDef.weights !== undefined ) {
for ( let i = 0, il = meshDef.weights.length; i < il; i ++ ) {
mesh.morphTargetInfluences[ i ] = meshDef.weights[ i ];
}
}
// .extras has user-defined data, so check that .extras.targetNames is an array.
if ( meshDef.extras && Array.isArray( meshDef.extras.targetNames ) ) {
const targetNames = meshDef.extras.targetNames;
if ( mesh.morphTargetInfluences.length === targetNames.length ) {
mesh.morphTargetDictionary = {};
for ( let i = 0, il = targetNames.length; i < il; i ++ ) {
mesh.morphTargetDictionary[ targetNames[ i ] ] = i;
}
} else {
console.warn( 'THREE.GLTFLoader: Invalid extras.targetNames length. Ignoring names.' );
}
}
}
function createPrimitiveKey( primitiveDef ) {
const dracoExtension = primitiveDef.extensions && primitiveDef.extensions[ EXTENSIONS.KHR_DRACO_MESH_COMPRESSION ];
let geometryKey;
if ( dracoExtension ) {
geometryKey = 'draco:' + dracoExtension.bufferView
+ ':' + dracoExtension.indices
+ ':' + createAttributesKey( dracoExtension.attributes );
} else {
geometryKey = primitiveDef.indices + ':' + createAttributesKey( primitiveDef.attributes ) + ':' + primitiveDef.mode;
}
return geometryKey;
}
function createAttributesKey( attributes ) {
let attributesKey = '';
const keys = Object.keys( attributes ).sort();
for ( let i = 0, il = keys.length; i < il; i ++ ) {
attributesKey += keys[ i ] + ':' + attributes[ keys[ i ] ] + ';';
}
return attributesKey;
}
function getNormalizedComponentScale( constructor ) {
// Reference:
// https://github.com/KhronosGroup/glTF/tree/master/extensions/2.0/Khronos/KHR_mesh_quantization#encoding-quantized-data
switch ( constructor ) {
case Int8Array:
return 1 / 127;
case Uint8Array:
return 1 / 255;
case Int16Array:
return 1 / 32767;
case Uint16Array:
return 1 / 65535;
default:
throw new Error( 'THREE.GLTFLoader: Unsupported normalized accessor component type.' );
}
}
/* GLTF PARSER */
class GLTFParser {
constructor( json = {}, options = {} ) {
this.json = json;
this.extensions = {};
this.plugins = {};
this.options = options;
// loader object cache
this.cache = new GLTFRegistry();
// associations between Three.js objects and glTF elements
this.associations = new Map();
// BufferGeometry caching
this.primitiveCache = {};
// Object3D instance caches
this.meshCache = { refs: {}, uses: {} };
this.cameraCache = { refs: {}, uses: {} };
this.lightCache = { refs: {}, uses: {} };
// Track node names, to ensure no duplicates
this.nodeNamesUsed = {};
// Use an ImageBitmapLoader if imageBitmaps are supported. Moves much of the
// expensive work of uploading a texture to the GPU off the main thread.
if ( typeof createImageBitmap !== 'undefined' && /Firefox/.test( navigator.userAgent ) === false ) {
this.textureLoader = new ImageBitmapLoader( this.options.manager );
} else {
this.textureLoader = new TextureLoader( this.options.manager );
}
this.textureLoader.setCrossOrigin( this.options.crossOrigin );
this.textureLoader.setRequestHeader( this.options.requestHeader );
this.fileLoader = new FileLoader( this.options.manager );
this.fileLoader.setResponseType( 'arraybuffer' );
if ( this.options.crossOrigin === 'use-credentials' ) {
this.fileLoader.setWithCredentials( true );
}
}
setExtensions( extensions ) {
this.extensions = extensions;
}
setPlugins( plugins ) {
this.plugins = plugins;
}
parse( onLoad, onError ) {
const parser = this;
const json = this.json;
const extensions = this.extensions;
// Clear the loader cache
this.cache.removeAll();
// Mark the special nodes/meshes in json for efficient parse
this._invokeAll( function ( ext ) {
return ext._markDefs && ext._markDefs();
} );
Promise.all( this._invokeAll( function ( ext ) {
return ext.beforeRoot && ext.beforeRoot();
} ) ).then( function () {
return Promise.all( [
parser.getDependencies( 'scene' ),
parser.getDependencies( 'animation' ),
parser.getDependencies( 'camera' ),
] );
} ).then( function ( dependencies ) {
const result = {
scene: dependencies[ 0 ][ json.scene || 0 ],
scenes: dependencies[ 0 ],
animations: dependencies[ 1 ],
cameras: dependencies[ 2 ],
asset: json.asset,
parser: parser,
userData: {}
};
addUnknownExtensionsToUserData( extensions, result, json );
assignExtrasToUserData( result, json );
Promise.all( parser._invokeAll( function ( ext ) {
return ext.afterRoot && ext.afterRoot( result );
} ) ).then( function () {
onLoad( result );
} );
} ).catch( onError );
}
/**
* Marks the special nodes/meshes in json for efficient parse.
*/
_markDefs() {
const nodeDefs = this.json.nodes || [];
const skinDefs = this.json.skins || [];
const meshDefs = this.json.meshes || [];
// Nothing in the node definition indicates whether it is a Bone or an
// Object3D. Use the skins' joint references to mark bones.
for ( let skinIndex = 0, skinLength = skinDefs.length; skinIndex < skinLength; skinIndex ++ ) {
const joints = skinDefs[ skinIndex ].joints;
for ( let i = 0, il = joints.length; i < il; i ++ ) {
nodeDefs[ joints[ i ] ].isBone = true;
}
}
// Iterate over all nodes, marking references to shared resources,
// as well as skeleton joints.
for ( let nodeIndex = 0, nodeLength = nodeDefs.length; nodeIndex < nodeLength; nodeIndex ++ ) {
const nodeDef = nodeDefs[ nodeIndex ];
if ( nodeDef.mesh !== undefined ) {
this._addNodeRef( this.meshCache, nodeDef.mesh );
// Nothing in the mesh definition indicates whether it is
// a SkinnedMesh or Mesh. Use the node's mesh reference
// to mark SkinnedMesh if node has skin.
if ( nodeDef.skin !== undefined ) {
meshDefs[ nodeDef.mesh ].isSkinnedMesh = true;
}
}
if ( nodeDef.camera !== undefined ) {
this._addNodeRef( this.cameraCache, nodeDef.camera );
}
}
}
/**
* Counts references to shared node / Object3D resources. These resources
* can be reused, or "instantiated", at multiple nodes in the scene
* hierarchy. Mesh, Camera, and Light instances are instantiated and must
* be marked. Non-scenegraph resources (like Materials, Geometries, and
* Textures) can be reused directly and are not marked here.
*
* Example: CesiumMilkTruck sample model reuses "Wheel" meshes.
*/
_addNodeRef( cache, index ) {
if ( index === undefined ) return;
if ( cache.refs[ index ] === undefined ) {
cache.refs[ index ] = cache.uses[ index ] = 0;
}
cache.refs[ index ] ++;
}
/** Returns a reference to a shared resource, cloning it if necessary. */
_getNodeRef( cache, index, object ) {
if ( cache.refs[ index ] <= 1 ) return object;
const ref = object.clone();
ref.name += '_instance_' + ( cache.uses[ index ] ++ );
return ref;
}
_invokeOne( func ) {
const extensions = Object.values( this.plugins );
extensions.push( this );
for ( let i = 0; i < extensions.length; i ++ ) {
const result = func( extensions[ i ] );
if ( result ) return result;
}
return null;
}
_invokeAll( func ) {
const extensions = Object.values( this.plugins );
extensions.unshift( this );
const pending = [];
for ( let i = 0; i < extensions.length; i ++ ) {
const result = func( extensions[ i ] );
if ( result ) pending.push( result );
}
return pending;
}
/**
* Requests the specified dependency asynchronously, with caching.
* @param {string} type
* @param {number} index
* @return {Promise<Object3D|Material|THREE.Texture|AnimationClip|ArrayBuffer|Object>}
*/
getDependency( type, index ) {
const cacheKey = type + ':' + index;
let dependency = this.cache.get( cacheKey );
if ( ! dependency ) {
switch ( type ) {
case 'scene':
dependency = this.loadScene( index );
break;
case 'node':
dependency = this.loadNode( index );
break;
case 'mesh':
dependency = this._invokeOne( function ( ext ) {
return ext.loadMesh && ext.loadMesh( index );
} );
break;
case 'accessor':
dependency = this.loadAccessor( index );
break;
case 'bufferView':
dependency = this._invokeOne( function ( ext ) {
return ext.loadBufferView && ext.loadBufferView( index );
} );
break;
case 'buffer':
dependency = this.loadBuffer( index );
break;
case 'material':
dependency = this._invokeOne( function ( ext ) {
return ext.loadMaterial && ext.loadMaterial( index );
} );
break;
case 'texture':
dependency = this._invokeOne( function ( ext ) {
return ext.loadTexture && ext.loadTexture( index );
} );
break;
case 'skin':
dependency = this.loadSkin( index );
break;
case 'animation':
dependency = this.loadAnimation( index );
break;
case 'camera':
dependency = this.loadCamera( index );
break;
default:
throw new Error( 'Unknown type: ' + type );
}
this.cache.add( cacheKey, dependency );
}
return dependency;
}
/**
* Requests all dependencies of the specified type asynchronously, with caching.
* @param {string} type
* @return {Promise<Array<Object>>}
*/
getDependencies( type ) {
let dependencies = this.cache.get( type );
if ( ! dependencies ) {
const parser = this;
const defs = this.json[ type + ( type === 'mesh' ? 'es' : 's' ) ] || [];
dependencies = Promise.all( defs.map( function ( def, index ) {
return parser.getDependency( type, index );
} ) );
this.cache.add( type, dependencies );
}
return dependencies;
}
/**
* Specification: https://github.com/KhronosGroup/glTF/blob/master/specification/2.0/README.md#buffers-and-buffer-views
* @param {number} bufferIndex
* @return {Promise<ArrayBuffer>}
*/
loadBuffer( bufferIndex ) {
const bufferDef = this.json.buffers[ bufferIndex ];
const loader = this.fileLoader;
if ( bufferDef.type && bufferDef.type !== 'arraybuffer' ) {
throw new Error( 'THREE.GLTFLoader: ' + bufferDef.type + ' buffer type is not supported.' );
}
// If present, GLB container is required to be the first buffer.
if ( bufferDef.uri === undefined && bufferIndex === 0 ) {
return Promise.resolve( this.extensions[ EXTENSIONS.KHR_BINARY_GLTF ].body );
}
const options = this.options;
return new Promise( function ( resolve, reject ) {
loader.load( resolveURL( bufferDef.uri, options.path ), resolve, undefined, function () {
reject( new Error( 'THREE.GLTFLoader: Failed to load buffer "' + bufferDef.uri + '".' ) );
} );
} );
}
/**
* Specification: https://github.com/KhronosGroup/glTF/blob/master/specification/2.0/README.md#buffers-and-buffer-views
* @param {number} bufferViewIndex
* @return {Promise<ArrayBuffer>}
*/
loadBufferView( bufferViewIndex ) {
const bufferViewDef = this.json.bufferViews[ bufferViewIndex ];
return this.getDependency( 'buffer', bufferViewDef.buffer ).then( function ( buffer ) {
const byteLength = bufferViewDef.byteLength || 0;
const byteOffset = bufferViewDef.byteOffset || 0;
return buffer.slice( byteOffset, byteOffset + byteLength );
} );
}
/**
* Specification: https://github.com/KhronosGroup/glTF/blob/master/specification/2.0/README.md#accessors
* @param {number} accessorIndex
* @return {Promise<BufferAttribute|InterleavedBufferAttribute>}
*/
loadAccessor( accessorIndex ) {
const parser = this;
const json = this.json;
const accessorDef = this.json.accessors[ accessorIndex ];
if ( accessorDef.bufferView === undefined && accessorDef.sparse === undefined ) {
// Ignore empty accessors, which may be used to declare runtime
// information about attributes coming from another source (e.g. Draco
// compression extension).
return Promise.resolve( null );
}
const pendingBufferViews = [];
if ( accessorDef.bufferView !== undefined ) {
pendingBufferViews.push( this.getDependency( 'bufferView', accessorDef.bufferView ) );
} else {
pendingBufferViews.push( null );
}
if ( accessorDef.sparse !== undefined ) {
pendingBufferViews.push( this.getDependency( 'bufferView', accessorDef.sparse.indices.bufferView ) );
pendingBufferViews.push( this.getDependency( 'bufferView', accessorDef.sparse.values.bufferView ) );
}
return Promise.all( pendingBufferViews ).then( function ( bufferViews ) {
const bufferView = bufferViews[ 0 ];
const itemSize = WEBGL_TYPE_SIZES[ accessorDef.type ];
const TypedArray = WEBGL_COMPONENT_TYPES[ accessorDef.componentType ];
// For VEC3: itemSize is 3, elementBytes is 4, itemBytes is 12.
const elementBytes = TypedArray.BYTES_PER_ELEMENT;
const itemBytes = elementBytes * itemSize;
const byteOffset = accessorDef.byteOffset || 0;
const byteStride = accessorDef.bufferView !== undefined ? json.bufferViews[ accessorDef.bufferView ].byteStride : undefined;
const normalized = accessorDef.normalized === true;
let array, bufferAttribute;
// The buffer is not interleaved if the stride is the item size in bytes.
if ( byteStride && byteStride !== itemBytes ) {
// Each "slice" of the buffer, as defined by 'count' elements of 'byteStride' bytes, gets its own InterleavedBuffer
// This makes sure that IBA.count reflects accessor.count properly
const ibSlice = Math.floor( byteOffset / byteStride );
const ibCacheKey = 'InterleavedBuffer:' + accessorDef.bufferView + ':' + accessorDef.componentType + ':' + ibSlice + ':' + accessorDef.count;
let ib = parser.cache.get( ibCacheKey );
if ( ! ib ) {
array = new TypedArray( bufferView, ibSlice * byteStride, accessorDef.count * byteStride / elementBytes );
// Integer parameters to IB/IBA are in array elements, not bytes.
ib = new InterleavedBuffer( array, byteStride / elementBytes );
parser.cache.add( ibCacheKey, ib );
}
bufferAttribute = new InterleavedBufferAttribute( ib, itemSize, ( byteOffset % byteStride ) / elementBytes, normalized );
} else {
if ( bufferView === null ) {
array = new TypedArray( accessorDef.count * itemSize );
} else {
array = new TypedArray( bufferView, byteOffset, accessorDef.count * itemSize );
}
bufferAttribute = new BufferAttribute( array, itemSize, normalized );
}
// https://github.com/KhronosGroup/glTF/blob/master/specification/2.0/README.md#sparse-accessors
if ( accessorDef.sparse !== undefined ) {
const itemSizeIndices = WEBGL_TYPE_SIZES.SCALAR;
const TypedArrayIndices = WEBGL_COMPONENT_TYPES[ accessorDef.sparse.indices.componentType ];
const byteOffsetIndices = accessorDef.sparse.indices.byteOffset || 0;
const byteOffsetValues = accessorDef.sparse.values.byteOffset || 0;
const sparseIndices = new TypedArrayIndices( bufferViews[ 1 ], byteOffsetIndices, accessorDef.sparse.count * itemSizeIndices );
const sparseValues = new TypedArray( bufferViews[ 2 ], byteOffsetValues, accessorDef.sparse.count * itemSize );
if ( bufferView !== null ) {
// Avoid modifying the original ArrayBuffer, if the bufferView wasn't initialized with zeroes.
bufferAttribute = new BufferAttribute( bufferAttribute.array.slice(), bufferAttribute.itemSize, bufferAttribute.normalized );
}
for ( let i = 0, il = sparseIndices.length; i < il; i ++ ) {
const index = sparseIndices[ i ];
bufferAttribute.setX( index, sparseValues[ i * itemSize ] );
if ( itemSize >= 2 ) bufferAttribute.setY( index, sparseValues[ i * itemSize + 1 ] );
if ( itemSize >= 3 ) bufferAttribute.setZ( index, sparseValues[ i * itemSize + 2 ] );
if ( itemSize >= 4 ) bufferAttribute.setW( index, sparseValues[ i * itemSize + 3 ] );
if ( itemSize >= 5 ) throw new Error( 'THREE.GLTFLoader: Unsupported itemSize in sparse BufferAttribute.' );
}
}
return bufferAttribute;
} );
}
/**
* Specification: https://github.com/KhronosGroup/glTF/tree/master/specification/2.0#textures
* @param {number} textureIndex
* @return {Promise<THREE.Texture>}
*/
loadTexture( textureIndex ) {
const json = this.json;
const options = this.options;
const textureDef = json.textures[ textureIndex ];
const source = json.images[ textureDef.source ];
let loader = this.textureLoader;
if ( source.uri ) {
const handler = options.manager.getHandler( source.uri );
if ( handler !== null ) loader = handler;
}
return this.loadTextureImage( textureIndex, source, loader );
}
loadTextureImage( textureIndex, source, loader ) {
const parser = this;
const json = this.json;
const options = this.options;
const textureDef = json.textures[ textureIndex ];
const URL = self.URL || self.webkitURL;
let sourceURI = source.uri;
let isObjectURL = false;
let hasAlpha = true;
if ( source.mimeType === 'image/jpeg' ) hasAlpha = false;
if ( source.bufferView !== undefined ) {
// Load binary image data from bufferView, if provided.
sourceURI = parser.getDependency( 'bufferView', source.bufferView ).then( function ( bufferView ) {
if ( source.mimeType === 'image/png' ) {
// Inspect the PNG 'IHDR' chunk to determine whether the image could have an
// alpha channel. This check is conservative — the image could have an alpha
// channel with all values == 1, and the indexed type (colorType == 3) only
// sometimes contains alpha.
//
// https://en.wikipedia.org/wiki/Portable_Network_Graphics#File_header
const colorType = new DataView( bufferView, 25, 1 ).getUint8( 0, false );
hasAlpha = colorType === 6 || colorType === 4 || colorType === 3;
}
isObjectURL = true;
const blob = new Blob( [ bufferView ], { type: source.mimeType } );
sourceURI = URL.createObjectURL( blob );
return sourceURI;
} );
} else if ( source.uri === undefined ) {
throw new Error( 'THREE.GLTFLoader: Image ' + textureIndex + ' is missing URI and bufferView' );
}
return Promise.resolve( sourceURI ).then( function ( sourceURI ) {
return new Promise( function ( resolve, reject ) {
let onLoad = resolve;
if ( loader.isImageBitmapLoader === true ) {
onLoad = function ( imageBitmap ) {
resolve( new CanvasTexture( imageBitmap ) );
};
}
loader.load( resolveURL( sourceURI, options.path ), onLoad, undefined, reject );
} );
} ).then( function ( texture ) {
// Clean up resources and configure Texture.
if ( isObjectURL === true ) {
URL.revokeObjectURL( sourceURI );
}
texture.flipY = false;
if ( textureDef.name ) texture.name = textureDef.name;
// When there is definitely no alpha channel in the texture, set RGBFormat to save space.
if ( ! hasAlpha ) texture.format = RGBFormat;
const samplers = json.samplers || {};
const sampler = samplers[ textureDef.sampler ] || {};
texture.magFilter = WEBGL_FILTERS[ sampler.magFilter ] || LinearFilter;
texture.minFilter = WEBGL_FILTERS[ sampler.minFilter ] || LinearMipmapLinearFilter;
texture.wrapS = WEBGL_WRAPPINGS[ sampler.wrapS ] || RepeatWrapping;
texture.wrapT = WEBGL_WRAPPINGS[ sampler.wrapT ] || RepeatWrapping;
parser.associations.set( texture, {
type: 'textures',
index: textureIndex
} );
return texture;
} );
}
/**
* Asynchronously assigns a texture to the given material parameters.
* @param {Object} materialParams
* @param {string} mapName
* @param {Object} mapDef
* @return {Promise}
*/
assignTexture( materialParams, mapName, mapDef ) {
const parser = this;
return this.getDependency( 'texture', mapDef.index ).then( function ( texture ) {
// Materials sample aoMap from UV set 1 and other maps from UV set 0 - this can't be configured
// However, we will copy UV set 0 to UV set 1 on demand for aoMap
if ( mapDef.texCoord !== undefined && mapDef.texCoord != 0 && ! ( mapName === 'aoMap' && mapDef.texCoord == 1 ) ) {
console.warn( 'THREE.GLTFLoader: Custom UV set ' + mapDef.texCoord + ' for texture ' + mapName + ' not yet supported.' );
}
if ( parser.extensions[ EXTENSIONS.KHR_TEXTURE_TRANSFORM ] ) {
const transform = mapDef.extensions !== undefined ? mapDef.extensions[ EXTENSIONS.KHR_TEXTURE_TRANSFORM ] : undefined;
if ( transform ) {
const gltfReference = parser.associations.get( texture );
texture = parser.extensions[ EXTENSIONS.KHR_TEXTURE_TRANSFORM ].extendTexture( texture, transform );
parser.associations.set( texture, gltfReference );
}
}
materialParams[ mapName ] = texture;
} );
}
/**
* Assigns final material to a Mesh, Line, or Points instance. The instance
* already has a material (generated from the glTF material options alone)
* but reuse of the same glTF material may require multiple threejs materials
* to accommodate different primitive types, defines, etc. New materials will
* be created if necessary, and reused from a cache.
* @param {Object3D} mesh Mesh, Line, or Points instance.
*/
assignFinalMaterial( mesh ) {
const geometry = mesh.geometry;
let material = mesh.material;
const useVertexTangents = geometry.attributes.tangent !== undefined;
const useVertexColors = geometry.attributes.color !== undefined;
const useFlatShading = geometry.attributes.normal === undefined;
const useSkinning = mesh.isSkinnedMesh === true;
const useMorphTargets = Object.keys( geometry.morphAttributes ).length > 0;
const useMorphNormals = useMorphTargets && geometry.morphAttributes.normal !== undefined;
if ( mesh.isPoints ) {
const cacheKey = 'PointsMaterial:' + material.uuid;
let pointsMaterial = this.cache.get( cacheKey );
if ( ! pointsMaterial ) {
pointsMaterial = new PointsMaterial();
Material$1.prototype.copy.call( pointsMaterial, material );
pointsMaterial.color.copy( material.color );
pointsMaterial.map = material.map;
pointsMaterial.sizeAttenuation = false; // glTF spec says points should be 1px
this.cache.add( cacheKey, pointsMaterial );
}
material = pointsMaterial;
} else if ( mesh.isLine ) {
const cacheKey = 'LineBasicMaterial:' + material.uuid;
let lineMaterial = this.cache.get( cacheKey );
if ( ! lineMaterial ) {
lineMaterial = new LineBasicMaterial();
Material$1.prototype.copy.call( lineMaterial, material );
lineMaterial.color.copy( material.color );
this.cache.add( cacheKey, lineMaterial );
}
material = lineMaterial;
}
// Clone the material if it will be modified
if ( useVertexTangents || useVertexColors || useFlatShading || useSkinning || useMorphTargets ) {
let cacheKey = 'ClonedMaterial:' + material.uuid + ':';
if ( material.isGLTFSpecularGlossinessMaterial ) cacheKey += 'specular-glossiness:';
if ( useSkinning ) cacheKey += 'skinning:';
if ( useVertexTangents ) cacheKey += 'vertex-tangents:';
if ( useVertexColors ) cacheKey += 'vertex-colors:';
if ( useFlatShading ) cacheKey += 'flat-shading:';
if ( useMorphTargets ) cacheKey += 'morph-targets:';
if ( useMorphNormals ) cacheKey += 'morph-normals:';
let cachedMaterial = this.cache.get( cacheKey );
if ( ! cachedMaterial ) {
cachedMaterial = material.clone();
if ( useSkinning ) cachedMaterial.skinning = true;
if ( useVertexColors ) cachedMaterial.vertexColors = true;
if ( useFlatShading ) cachedMaterial.flatShading = true;
if ( useMorphTargets ) cachedMaterial.morphTargets = true;
if ( useMorphNormals ) cachedMaterial.morphNormals = true;
if ( useVertexTangents ) {
cachedMaterial.vertexTangents = true;
// https://github.com/mrdoob/three.js/issues/11438#issuecomment-507003995
if ( cachedMaterial.normalScale ) cachedMaterial.normalScale.y *= - 1;
if ( cachedMaterial.clearcoatNormalScale ) cachedMaterial.clearcoatNormalScale.y *= - 1;
}
this.cache.add( cacheKey, cachedMaterial );
this.associations.set( cachedMaterial, this.associations.get( material ) );
}
material = cachedMaterial;
}
// workarounds for mesh and geometry
if ( material.aoMap && geometry.attributes.uv2 === undefined && geometry.attributes.uv !== undefined ) {
geometry.setAttribute( 'uv2', geometry.attributes.uv );
}
mesh.material = material;
}
getMaterialType( /* materialIndex */ ) {
return MeshStandardMaterial;
}
/**
* Specification: https://github.com/KhronosGroup/glTF/blob/master/specification/2.0/README.md#materials
* @param {number} materialIndex
* @return {Promise<Material>}
*/
loadMaterial( materialIndex ) {
const parser = this;
const json = this.json;
const extensions = this.extensions;
const materialDef = json.materials[ materialIndex ];
let materialType;
const materialParams = {};
const materialExtensions = materialDef.extensions || {};
const pending = [];
if ( materialExtensions[ EXTENSIONS.KHR_MATERIALS_PBR_SPECULAR_GLOSSINESS ] ) {
const sgExtension = extensions[ EXTENSIONS.KHR_MATERIALS_PBR_SPECULAR_GLOSSINESS ];
materialType = sgExtension.getMaterialType();
pending.push( sgExtension.extendParams( materialParams, materialDef, parser ) );
} else if ( materialExtensions[ EXTENSIONS.KHR_MATERIALS_UNLIT ] ) {
const kmuExtension = extensions[ EXTENSIONS.KHR_MATERIALS_UNLIT ];
materialType = kmuExtension.getMaterialType();
pending.push( kmuExtension.extendParams( materialParams, materialDef, parser ) );
} else {
// Specification:
// https://github.com/KhronosGroup/glTF/tree/master/specification/2.0#metallic-roughness-material
const metallicRoughness = materialDef.pbrMetallicRoughness || {};
materialParams.color = new Color( 1.0, 1.0, 1.0 );
materialParams.opacity = 1.0;
if ( Array.isArray( metallicRoughness.baseColorFactor ) ) {
const array = metallicRoughness.baseColorFactor;
materialParams.color.fromArray( array );
materialParams.opacity = array[ 3 ];
}
if ( metallicRoughness.baseColorTexture !== undefined ) {
pending.push( parser.assignTexture( materialParams, 'map', metallicRoughness.baseColorTexture ) );
}
materialParams.metalness = metallicRoughness.metallicFactor !== undefined ? metallicRoughness.metallicFactor : 1.0;
materialParams.roughness = metallicRoughness.roughnessFactor !== undefined ? metallicRoughness.roughnessFactor : 1.0;
if ( metallicRoughness.metallicRoughnessTexture !== undefined ) {
pending.push( parser.assignTexture( materialParams, 'metalnessMap', metallicRoughness.metallicRoughnessTexture ) );
pending.push( parser.assignTexture( materialParams, 'roughnessMap', metallicRoughness.metallicRoughnessTexture ) );
}
materialType = this._invokeOne( function ( ext ) {
return ext.getMaterialType && ext.getMaterialType( materialIndex );
} );
pending.push( Promise.all( this._invokeAll( function ( ext ) {
return ext.extendMaterialParams && ext.extendMaterialParams( materialIndex, materialParams );
} ) ) );
}
if ( materialDef.doubleSided === true ) {
materialParams.side = DoubleSide;
}
const alphaMode = materialDef.alphaMode || ALPHA_MODES.OPAQUE;
if ( alphaMode === ALPHA_MODES.BLEND ) {
materialParams.transparent = true;
// See: https://github.com/mrdoob/three.js/issues/17706
materialParams.depthWrite = false;
} else {
materialParams.transparent = false;
if ( alphaMode === ALPHA_MODES.MASK ) {
materialParams.alphaTest = materialDef.alphaCutoff !== undefined ? materialDef.alphaCutoff : 0.5;
}
}
if ( materialDef.normalTexture !== undefined && materialType !== MeshBasicMaterial ) {
pending.push( parser.assignTexture( materialParams, 'normalMap', materialDef.normalTexture ) );
// https://github.com/mrdoob/three.js/issues/11438#issuecomment-507003995
materialParams.normalScale = new Vector2( 1, - 1 );
if ( materialDef.normalTexture.scale !== undefined ) {
materialParams.normalScale.set( materialDef.normalTexture.scale, - materialDef.normalTexture.scale );
}
}
if ( materialDef.occlusionTexture !== undefined && materialType !== MeshBasicMaterial ) {
pending.push( parser.assignTexture( materialParams, 'aoMap', materialDef.occlusionTexture ) );
if ( materialDef.occlusionTexture.strength !== undefined ) {
materialParams.aoMapIntensity = materialDef.occlusionTexture.strength;
}
}
if ( materialDef.emissiveFactor !== undefined && materialType !== MeshBasicMaterial ) {
materialParams.emissive = new Color().fromArray( materialDef.emissiveFactor );
}
if ( materialDef.emissiveTexture !== undefined && materialType !== MeshBasicMaterial ) {
pending.push( parser.assignTexture( materialParams, 'emissiveMap', materialDef.emissiveTexture ) );
}
return Promise.all( pending ).then( function () {
let material;
if ( materialType === GLTFMeshStandardSGMaterial ) {
material = extensions[ EXTENSIONS.KHR_MATERIALS_PBR_SPECULAR_GLOSSINESS ].createMaterial( materialParams );
} else {
material = new materialType( materialParams );
}
if ( materialDef.name ) material.name = materialDef.name;
// baseColorTexture, emissiveTexture, and specularGlossinessTexture use sRGB encoding.
if ( material.map ) material.map.encoding = sRGBEncoding;
if ( material.emissiveMap ) material.emissiveMap.encoding = sRGBEncoding;
assignExtrasToUserData( material, materialDef );
parser.associations.set( material, { type: 'materials', index: materialIndex } );
if ( materialDef.extensions ) addUnknownExtensionsToUserData( extensions, material, materialDef );
return material;
} );
}
/** When Object3D instances are targeted by animation, they need unique names. */
createUniqueName( originalName ) {
const sanitizedName = PropertyBinding.sanitizeNodeName( originalName || '' );
let name = sanitizedName;
for ( let i = 1; this.nodeNamesUsed[ name ]; ++ i ) {
name = sanitizedName + '_' + i;
}
this.nodeNamesUsed[ name ] = true;
return name;
}
/**
* Specification: https://github.com/KhronosGroup/glTF/blob/master/specification/2.0/README.md#geometry
*
* Creates BufferGeometries from primitives.
*
* @param {Array<GLTF.Primitive>} primitives
* @return {Promise<Array<BufferGeometry>>}
*/
loadGeometries( primitives ) {
const parser = this;
const extensions = this.extensions;
const cache = this.primitiveCache;
function createDracoPrimitive( primitive ) {
return extensions[ EXTENSIONS.KHR_DRACO_MESH_COMPRESSION ]
.decodePrimitive( primitive, parser )
.then( function ( geometry ) {
return addPrimitiveAttributes( geometry, primitive, parser );
} );
}
const pending = [];
for ( let i = 0, il = primitives.length; i < il; i ++ ) {
const primitive = primitives[ i ];
const cacheKey = createPrimitiveKey( primitive );
// See if we've already created this geometry
const cached = cache[ cacheKey ];
if ( cached ) {
// Use the cached geometry if it exists
pending.push( cached.promise );
} else {
let geometryPromise;
if ( primitive.extensions && primitive.extensions[ EXTENSIONS.KHR_DRACO_MESH_COMPRESSION ] ) {
// Use DRACO geometry if available
geometryPromise = createDracoPrimitive( primitive );
} else {
// Otherwise create a new geometry
geometryPromise = addPrimitiveAttributes( new BufferGeometry(), primitive, parser );
}
// Cache this geometry
cache[ cacheKey ] = { primitive: primitive, promise: geometryPromise };
pending.push( geometryPromise );
}
}
return Promise.all( pending );
}
/**
* Specification: https://github.com/KhronosGroup/glTF/blob/master/specification/2.0/README.md#meshes
* @param {number} meshIndex
* @return {Promise<Group|Mesh|SkinnedMesh>}
*/
loadMesh( meshIndex ) {
const parser = this;
const json = this.json;
const extensions = this.extensions;
const meshDef = json.meshes[ meshIndex ];
const primitives = meshDef.primitives;
const pending = [];
for ( let i = 0, il = primitives.length; i < il; i ++ ) {
const material = primitives[ i ].material === undefined
? createDefaultMaterial( this.cache )
: this.getDependency( 'material', primitives[ i ].material );
pending.push( material );
}
pending.push( parser.loadGeometries( primitives ) );
return Promise.all( pending ).then( function ( results ) {
const materials = results.slice( 0, results.length - 1 );
const geometries = results[ results.length - 1 ];
const meshes = [];
for ( let i = 0, il = geometries.length; i < il; i ++ ) {
const geometry = geometries[ i ];
const primitive = primitives[ i ];
// 1. create Mesh
let mesh;
const material = materials[ i ];
if ( primitive.mode === WEBGL_CONSTANTS$1.TRIANGLES ||
primitive.mode === WEBGL_CONSTANTS$1.TRIANGLE_STRIP ||
primitive.mode === WEBGL_CONSTANTS$1.TRIANGLE_FAN ||
primitive.mode === undefined ) {
// .isSkinnedMesh isn't in glTF spec. See ._markDefs()
mesh = meshDef.isSkinnedMesh === true
? new SkinnedMesh( geometry, material )
: new Mesh( geometry, material );
if ( mesh.isSkinnedMesh === true && ! mesh.geometry.attributes.skinWeight.normalized ) {
// we normalize floating point skin weight array to fix malformed assets (see #15319)
// it's important to skip this for non-float32 data since normalizeSkinWeights assumes non-normalized inputs
mesh.normalizeSkinWeights();
}
if ( primitive.mode === WEBGL_CONSTANTS$1.TRIANGLE_STRIP ) {
mesh.geometry = toTrianglesDrawMode( mesh.geometry, TriangleStripDrawMode );
} else if ( primitive.mode === WEBGL_CONSTANTS$1.TRIANGLE_FAN ) {
mesh.geometry = toTrianglesDrawMode( mesh.geometry, TriangleFanDrawMode );
}
} else if ( primitive.mode === WEBGL_CONSTANTS$1.LINES ) {
mesh = new LineSegments( geometry, material );
} else if ( primitive.mode === WEBGL_CONSTANTS$1.LINE_STRIP ) {
mesh = new Line( geometry, material );
} else if ( primitive.mode === WEBGL_CONSTANTS$1.LINE_LOOP ) {
mesh = new LineLoop( geometry, material );
} else if ( primitive.mode === WEBGL_CONSTANTS$1.POINTS ) {
mesh = new Points( geometry, material );
} else {
throw new Error( 'THREE.GLTFLoader: Primitive mode unsupported: ' + primitive.mode );
}
if ( Object.keys( mesh.geometry.morphAttributes ).length > 0 ) {
updateMorphTargets( mesh, meshDef );
}
mesh.name = parser.createUniqueName( meshDef.name || ( 'mesh_' + meshIndex ) );
assignExtrasToUserData( mesh, meshDef );
if ( primitive.extensions ) addUnknownExtensionsToUserData( extensions, mesh, primitive );
parser.assignFinalMaterial( mesh );
meshes.push( mesh );
}
if ( meshes.length === 1 ) {
return meshes[ 0 ];
}
const group = new Group();
for ( let i = 0, il = meshes.length; i < il; i ++ ) {
group.add( meshes[ i ] );
}
return group;
} );
}
/**
* Specification: https://github.com/KhronosGroup/glTF/tree/master/specification/2.0#cameras
* @param {number} cameraIndex
* @return {Promise<THREE.Camera>}
*/
loadCamera( cameraIndex ) {
let camera;
const cameraDef = this.json.cameras[ cameraIndex ];
const params = cameraDef[ cameraDef.type ];
if ( ! params ) {
console.warn( 'THREE.GLTFLoader: Missing camera parameters.' );
return;
}
if ( cameraDef.type === 'perspective' ) {
camera = new PerspectiveCamera( MathUtils.radToDeg( params.yfov ), params.aspectRatio || 1, params.znear || 1, params.zfar || 2e6 );
} else if ( cameraDef.type === 'orthographic' ) {
camera = new OrthographicCamera( - params.xmag, params.xmag, params.ymag, - params.ymag, params.znear, params.zfar );
}
if ( cameraDef.name ) camera.name = this.createUniqueName( cameraDef.name );
assignExtrasToUserData( camera, cameraDef );
return Promise.resolve( camera );
}
/**
* Specification: https://github.com/KhronosGroup/glTF/tree/master/specification/2.0#skins
* @param {number} skinIndex
* @return {Promise<Object>}
*/
loadSkin( skinIndex ) {
const skinDef = this.json.skins[ skinIndex ];
const skinEntry = { joints: skinDef.joints };
if ( skinDef.inverseBindMatrices === undefined ) {
return Promise.resolve( skinEntry );
}
return this.getDependency( 'accessor', skinDef.inverseBindMatrices ).then( function ( accessor ) {
skinEntry.inverseBindMatrices = accessor;
return skinEntry;
} );
}
/**
* Specification: https://github.com/KhronosGroup/glTF/tree/master/specification/2.0#animations
* @param {number} animationIndex
* @return {Promise<AnimationClip>}
*/
loadAnimation( animationIndex ) {
const json = this.json;
const animationDef = json.animations[ animationIndex ];
const pendingNodes = [];
const pendingInputAccessors = [];
const pendingOutputAccessors = [];
const pendingSamplers = [];
const pendingTargets = [];
for ( let i = 0, il = animationDef.channels.length; i < il; i ++ ) {
const channel = animationDef.channels[ i ];
const sampler = animationDef.samplers[ channel.sampler ];
const target = channel.target;
const name = target.node !== undefined ? target.node : target.id; // NOTE: target.id is deprecated.
const input = animationDef.parameters !== undefined ? animationDef.parameters[ sampler.input ] : sampler.input;
const output = animationDef.parameters !== undefined ? animationDef.parameters[ sampler.output ] : sampler.output;
pendingNodes.push( this.getDependency( 'node', name ) );
pendingInputAccessors.push( this.getDependency( 'accessor', input ) );
pendingOutputAccessors.push( this.getDependency( 'accessor', output ) );
pendingSamplers.push( sampler );
pendingTargets.push( target );
}
return Promise.all( [
Promise.all( pendingNodes ),
Promise.all( pendingInputAccessors ),
Promise.all( pendingOutputAccessors ),
Promise.all( pendingSamplers ),
Promise.all( pendingTargets )
] ).then( function ( dependencies ) {
const nodes = dependencies[ 0 ];
const inputAccessors = dependencies[ 1 ];
const outputAccessors = dependencies[ 2 ];
const samplers = dependencies[ 3 ];
const targets = dependencies[ 4 ];
const tracks = [];
for ( let i = 0, il = nodes.length; i < il; i ++ ) {
const node = nodes[ i ];
const inputAccessor = inputAccessors[ i ];
const outputAccessor = outputAccessors[ i ];
const sampler = samplers[ i ];
const target = targets[ i ];
if ( node === undefined ) continue;
node.updateMatrix();
node.matrixAutoUpdate = true;
let TypedKeyframeTrack;
switch ( PATH_PROPERTIES$1[ target.path ] ) {
case PATH_PROPERTIES$1.weights:
TypedKeyframeTrack = NumberKeyframeTrack;
break;
case PATH_PROPERTIES$1.rotation:
TypedKeyframeTrack = QuaternionKeyframeTrack;
break;
case PATH_PROPERTIES$1.position:
case PATH_PROPERTIES$1.scale:
default:
TypedKeyframeTrack = VectorKeyframeTrack;
break;
}
const targetName = node.name ? node.name : node.uuid;
const interpolation = sampler.interpolation !== undefined ? INTERPOLATION[ sampler.interpolation ] : InterpolateLinear;
const targetNames = [];
if ( PATH_PROPERTIES$1[ target.path ] === PATH_PROPERTIES$1.weights ) {
// Node may be a Group (glTF mesh with several primitives) or a Mesh.
node.traverse( function ( object ) {
if ( object.isMesh === true && object.morphTargetInfluences ) {
targetNames.push( object.name ? object.name : object.uuid );
}
} );
} else {
targetNames.push( targetName );
}
let outputArray = outputAccessor.array;
if ( outputAccessor.normalized ) {
const scale = getNormalizedComponentScale( outputArray.constructor );
const scaled = new Float32Array( outputArray.length );
for ( let j = 0, jl = outputArray.length; j < jl; j ++ ) {
scaled[ j ] = outputArray[ j ] * scale;
}
outputArray = scaled;
}
for ( let j = 0, jl = targetNames.length; j < jl; j ++ ) {
const track = new TypedKeyframeTrack(
targetNames[ j ] + '.' + PATH_PROPERTIES$1[ target.path ],
inputAccessor.array,
outputArray,
interpolation
);
// Override interpolation with custom factory method.
if ( sampler.interpolation === 'CUBICSPLINE' ) {
track.createInterpolant = function InterpolantFactoryMethodGLTFCubicSpline( result ) {
// A CUBICSPLINE keyframe in glTF has three output values for each input value,
// representing inTangent, splineVertex, and outTangent. As a result, track.getValueSize()
// must be divided by three to get the interpolant's sampleSize argument.
return new GLTFCubicSplineInterpolant( this.times, this.values, this.getValueSize() / 3, result );
};
// Mark as CUBICSPLINE. `track.getInterpolation()` doesn't support custom interpolants.
track.createInterpolant.isInterpolantFactoryMethodGLTFCubicSpline = true;
}
tracks.push( track );
}
}
const name = animationDef.name ? animationDef.name : 'animation_' + animationIndex;
return new AnimationClip( name, undefined, tracks );
} );
}
createNodeMesh( nodeIndex ) {
const json = this.json;
const parser = this;
const nodeDef = json.nodes[ nodeIndex ];
if ( nodeDef.mesh === undefined ) return null;
return parser.getDependency( 'mesh', nodeDef.mesh ).then( function ( mesh ) {
const node = parser._getNodeRef( parser.meshCache, nodeDef.mesh, mesh );
// if weights are provided on the node, override weights on the mesh.
if ( nodeDef.weights !== undefined ) {
node.traverse( function ( o ) {
if ( ! o.isMesh ) return;
for ( let i = 0, il = nodeDef.weights.length; i < il; i ++ ) {
o.morphTargetInfluences[ i ] = nodeDef.weights[ i ];
}
} );
}
return node;
} );
}
/**
* Specification: https://github.com/KhronosGroup/glTF/tree/master/specification/2.0#nodes-and-hierarchy
* @param {number} nodeIndex
* @return {Promise<Object3D>}
*/
loadNode( nodeIndex ) {
const json = this.json;
const extensions = this.extensions;
const parser = this;
const nodeDef = json.nodes[ nodeIndex ];
// reserve node's name before its dependencies, so the root has the intended name.
const nodeName = nodeDef.name ? parser.createUniqueName( nodeDef.name ) : '';
return ( function () {
const pending = [];
const meshPromise = parser._invokeOne( function ( ext ) {
return ext.createNodeMesh && ext.createNodeMesh( nodeIndex );
} );
if ( meshPromise ) {
pending.push( meshPromise );
}
if ( nodeDef.camera !== undefined ) {
pending.push( parser.getDependency( 'camera', nodeDef.camera ).then( function ( camera ) {
return parser._getNodeRef( parser.cameraCache, nodeDef.camera, camera );
} ) );
}
parser._invokeAll( function ( ext ) {
return ext.createNodeAttachment && ext.createNodeAttachment( nodeIndex );
} ).forEach( function ( promise ) {
pending.push( promise );
} );
return Promise.all( pending );
}() ).then( function ( objects ) {
let node;
// .isBone isn't in glTF spec. See ._markDefs
if ( nodeDef.isBone === true ) {
node = new Bone();
} else if ( objects.length > 1 ) {
node = new Group();
} else if ( objects.length === 1 ) {
node = objects[ 0 ];
} else {
node = new Object3D();
}
if ( node !== objects[ 0 ] ) {
for ( let i = 0, il = objects.length; i < il; i ++ ) {
node.add( objects[ i ] );
}
}
if ( nodeDef.name ) {
node.userData.name = nodeDef.name;
node.name = nodeName;
}
assignExtrasToUserData( node, nodeDef );
if ( nodeDef.extensions ) addUnknownExtensionsToUserData( extensions, node, nodeDef );
if ( nodeDef.matrix !== undefined ) {
const matrix = new Matrix4();
matrix.fromArray( nodeDef.matrix );
node.applyMatrix4( matrix );
} else {
if ( nodeDef.translation !== undefined ) {
node.position.fromArray( nodeDef.translation );
}
if ( nodeDef.rotation !== undefined ) {
node.quaternion.fromArray( nodeDef.rotation );
}
if ( nodeDef.scale !== undefined ) {
node.scale.fromArray( nodeDef.scale );
}
}
parser.associations.set( node, { type: 'nodes', index: nodeIndex } );
return node;
} );
}
/**
* Specification: https://github.com/KhronosGroup/glTF/tree/master/specification/2.0#scenes
* @param {number} sceneIndex
* @return {Promise<Group>}
*/
loadScene( sceneIndex ) {
const json = this.json;
const extensions = this.extensions;
const sceneDef = this.json.scenes[ sceneIndex ];
const parser = this;
// Loader returns Group, not Scene.
// See: https://github.com/mrdoob/three.js/issues/18342#issuecomment-578981172
const scene = new Group();
if ( sceneDef.name ) scene.name = parser.createUniqueName( sceneDef.name );
assignExtrasToUserData( scene, sceneDef );
if ( sceneDef.extensions ) addUnknownExtensionsToUserData( extensions, scene, sceneDef );
const nodeIds = sceneDef.nodes || [];
const pending = [];
for ( let i = 0, il = nodeIds.length; i < il; i ++ ) {
pending.push( buildNodeHierachy( nodeIds[ i ], scene, json, parser ) );
}
return Promise.all( pending ).then( function () {
return scene;
} );
}
}
function buildNodeHierachy( nodeId, parentObject, json, parser ) {
const nodeDef = json.nodes[ nodeId ];
return parser.getDependency( 'node', nodeId ).then( function ( node ) {
if ( nodeDef.skin === undefined ) return node;
// build skeleton here as well
let skinEntry;
return parser.getDependency( 'skin', nodeDef.skin ).then( function ( skin ) {
skinEntry = skin;
const pendingJoints = [];
for ( let i = 0, il = skinEntry.joints.length; i < il; i ++ ) {
pendingJoints.push( parser.getDependency( 'node', skinEntry.joints[ i ] ) );
}
return Promise.all( pendingJoints );
} ).then( function ( jointNodes ) {
node.traverse( function ( mesh ) {
if ( ! mesh.isMesh ) return;
const bones = [];
const boneInverses = [];
for ( let j = 0, jl = jointNodes.length; j < jl; j ++ ) {
const jointNode = jointNodes[ j ];
if ( jointNode ) {
bones.push( jointNode );
const mat = new Matrix4();
if ( skinEntry.inverseBindMatrices !== undefined ) {
mat.fromArray( skinEntry.inverseBindMatrices.array, j * 16 );
}
boneInverses.push( mat );
} else {
console.warn( 'THREE.GLTFLoader: Joint "%s" could not be found.', skinEntry.joints[ j ] );
}
}
mesh.bind( new Skeleton( bones, boneInverses ), mesh.matrixWorld );
} );
return node;
} );
} ).then( function ( node ) {
// build node hierachy
parentObject.add( node );
const pending = [];
if ( nodeDef.children ) {
const children = nodeDef.children;
for ( let i = 0, il = children.length; i < il; i ++ ) {
const child = children[ i ];
pending.push( buildNodeHierachy( child, node, json, parser ) );
}
}
return Promise.all( pending );
} );
}
/**
* @param {BufferGeometry} geometry
* @param {GLTF.Primitive} primitiveDef
* @param {GLTFParser} parser
*/
function computeBounds( geometry, primitiveDef, parser ) {
const attributes = primitiveDef.attributes;
const box = new Box3();
if ( attributes.POSITION !== undefined ) {
const accessor = parser.json.accessors[ attributes.POSITION ];
const min = accessor.min;
const max = accessor.max;
// glTF requires 'min' and 'max', but VRM (which extends glTF) currently ignores that requirement.
if ( min !== undefined && max !== undefined ) {
box.set(
new Vector3( min[ 0 ], min[ 1 ], min[ 2 ] ),
new Vector3( max[ 0 ], max[ 1 ], max[ 2 ] )
);
if ( accessor.normalized ) {
const boxScale = getNormalizedComponentScale( WEBGL_COMPONENT_TYPES[ accessor.componentType ] );
box.min.multiplyScalar( boxScale );
box.max.multiplyScalar( boxScale );
}
} else {
console.warn( 'THREE.GLTFLoader: Missing min/max properties for accessor POSITION.' );
return;
}
} else {
return;
}
const targets = primitiveDef.targets;
if ( targets !== undefined ) {
const maxDisplacement = new Vector3();
const vector = new Vector3();
for ( let i = 0, il = targets.length; i < il; i ++ ) {
const target = targets[ i ];
if ( target.POSITION !== undefined ) {
const accessor = parser.json.accessors[ target.POSITION ];
const min = accessor.min;
const max = accessor.max;
// glTF requires 'min' and 'max', but VRM (which extends glTF) currently ignores that requirement.
if ( min !== undefined && max !== undefined ) {
// we need to get max of absolute components because target weight is [-1,1]
vector.setX( Math.max( Math.abs( min[ 0 ] ), Math.abs( max[ 0 ] ) ) );
vector.setY( Math.max( Math.abs( min[ 1 ] ), Math.abs( max[ 1 ] ) ) );
vector.setZ( Math.max( Math.abs( min[ 2 ] ), Math.abs( max[ 2 ] ) ) );
if ( accessor.normalized ) {
const boxScale = getNormalizedComponentScale( WEBGL_COMPONENT_TYPES[ accessor.componentType ] );
vector.multiplyScalar( boxScale );
}
// Note: this assumes that the sum of all weights is at most 1. This isn't quite correct - it's more conservative
// to assume that each target can have a max weight of 1. However, for some use cases - notably, when morph targets
// are used to implement key-frame animations and as such only two are active at a time - this results in very large
// boxes. So for now we make a box that's sometimes a touch too small but is hopefully mostly of reasonable size.
maxDisplacement.max( vector );
} else {
console.warn( 'THREE.GLTFLoader: Missing min/max properties for accessor POSITION.' );
}
}
}
// As per comment above this box isn't conservative, but has a reasonable size for a very large number of morph targets.
box.expandByVector( maxDisplacement );
}
geometry.boundingBox = box;
const sphere = new Sphere();
box.getCenter( sphere.center );
sphere.radius = box.min.distanceTo( box.max ) / 2;
geometry.boundingSphere = sphere;
}
/**
* @param {BufferGeometry} geometry
* @param {GLTF.Primitive} primitiveDef
* @param {GLTFParser} parser
* @return {Promise<BufferGeometry>}
*/
function addPrimitiveAttributes( geometry, primitiveDef, parser ) {
const attributes = primitiveDef.attributes;
const pending = [];
function assignAttributeAccessor( accessorIndex, attributeName ) {
return parser.getDependency( 'accessor', accessorIndex )
.then( function ( accessor ) {
geometry.setAttribute( attributeName, accessor );
} );
}
for ( const gltfAttributeName in attributes ) {
const threeAttributeName = ATTRIBUTES[ gltfAttributeName ] || gltfAttributeName.toLowerCase();
// Skip attributes already provided by e.g. Draco extension.
if ( threeAttributeName in geometry.attributes ) continue;
pending.push( assignAttributeAccessor( attributes[ gltfAttributeName ], threeAttributeName ) );
}
if ( primitiveDef.indices !== undefined && ! geometry.index ) {
const accessor = parser.getDependency( 'accessor', primitiveDef.indices ).then( function ( accessor ) {
geometry.setIndex( accessor );
} );
pending.push( accessor );
}
assignExtrasToUserData( geometry, primitiveDef );
computeBounds( geometry, primitiveDef, parser );
return Promise.all( pending ).then( function () {
return primitiveDef.targets !== undefined
? addMorphTargets( geometry, primitiveDef.targets, parser )
: geometry;
} );
}
/**
* @param {BufferGeometry} geometry
* @param {Number} drawMode
* @return {BufferGeometry}
*/
function toTrianglesDrawMode( geometry, drawMode ) {
let index = geometry.getIndex();
// generate index if not present
if ( index === null ) {
const indices = [];
const position = geometry.getAttribute( 'position' );
if ( position !== undefined ) {
for ( let i = 0; i < position.count; i ++ ) {
indices.push( i );
}
geometry.setIndex( indices );
index = geometry.getIndex();
} else {
console.error( 'THREE.GLTFLoader.toTrianglesDrawMode(): Undefined position attribute. Processing not possible.' );
return geometry;
}
}
//
const numberOfTriangles = index.count - 2;
const newIndices = [];
if ( drawMode === TriangleFanDrawMode ) {
// gl.TRIANGLE_FAN
for ( let i = 1; i <= numberOfTriangles; i ++ ) {
newIndices.push( index.getX( 0 ) );
newIndices.push( index.getX( i ) );
newIndices.push( index.getX( i + 1 ) );
}
} else {
// gl.TRIANGLE_STRIP
for ( let i = 0; i < numberOfTriangles; i ++ ) {
if ( i % 2 === 0 ) {
newIndices.push( index.getX( i ) );
newIndices.push( index.getX( i + 1 ) );
newIndices.push( index.getX( i + 2 ) );
} else {
newIndices.push( index.getX( i + 2 ) );
newIndices.push( index.getX( i + 1 ) );
newIndices.push( index.getX( i ) );
}
}
}
if ( ( newIndices.length / 3 ) !== numberOfTriangles ) {
console.error( 'THREE.GLTFLoader.toTrianglesDrawMode(): Unable to generate correct amount of triangles.' );
}
// build final geometry
const newGeometry = geometry.clone();
newGeometry.setIndex( newIndices );
return newGeometry;
}
const e=[171,75,84,88,32,50,48,187,13,10,26,10];var n,i$1,s,a,r,o,l,f;!function(t){t[t.NONE=0]="NONE",t[t.BASISLZ=1]="BASISLZ",t[t.ZSTD=2]="ZSTD",t[t.ZLIB=3]="ZLIB";}(n||(n={})),function(t){t[t.BASICFORMAT=0]="BASICFORMAT";}(i$1||(i$1={})),function(t){t[t.UNSPECIFIED=0]="UNSPECIFIED",t[t.ETC1S=163]="ETC1S",t[t.UASTC=166]="UASTC";}(s||(s={})),function(t){t[t.UNSPECIFIED=0]="UNSPECIFIED",t[t.SRGB=1]="SRGB";}(a||(a={})),function(t){t[t.UNSPECIFIED=0]="UNSPECIFIED",t[t.LINEAR=1]="LINEAR",t[t.SRGB=2]="SRGB",t[t.ITU=3]="ITU",t[t.NTSC=4]="NTSC",t[t.SLOG=5]="SLOG",t[t.SLOG2=6]="SLOG2";}(r||(r={})),function(t){t[t.ALPHA_STRAIGHT=0]="ALPHA_STRAIGHT",t[t.ALPHA_PREMULTIPLIED=1]="ALPHA_PREMULTIPLIED";}(o||(o={})),function(t){t[t.RGB=0]="RGB",t[t.RRR=3]="RRR",t[t.GGG=4]="GGG",t[t.AAA=15]="AAA";}(l||(l={})),function(t){t[t.RGB=0]="RGB",t[t.RGBA=3]="RGBA",t[t.RRR=4]="RRR",t[t.RRRG=5]="RRRG";}(f||(f={}));class U{constructor(){this.vkFormat=0,this.typeSize=1,this.pixelWidth=0,this.pixelHeight=0,this.pixelDepth=0,this.layerCount=0,this.faceCount=1,this.supercompressionScheme=n.NONE,this.levels=[],this.dataFormatDescriptor=[{vendorId:0,descriptorType:i$1.BASICFORMAT,versionNumber:2,descriptorBlockSize:40,colorModel:s.UNSPECIFIED,colorPrimaries:a.SRGB,transferFunction:a.SRGB,flags:o.ALPHA_STRAIGHT,texelBlockDimension:{x:4,y:4,z:1,w:1},bytesPlane:[],samples:[]}],this.keyValue={},this.globalData=null;}}class c{constructor(t,e,n,i){this._dataView=new DataView(t.buffer,t.byteOffset+e,n),this._littleEndian=i,this._offset=0;}_nextUint8(){const t=this._dataView.getUint8(this._offset);return this._offset+=1,t}_nextUint16(){const t=this._dataView.getUint16(this._offset,this._littleEndian);return this._offset+=2,t}_nextUint32(){const t=this._dataView.getUint32(this._offset,this._littleEndian);return this._offset+=4,t}_nextUint64(){const t=this._dataView.getUint32(this._offset,this._littleEndian)+2**32*this._dataView.getUint32(this._offset+4,this._littleEndian);return this._offset+=8,t}_skip(t){return this._offset+=t,this}_scan(t,e=0){const n=this._offset;let i=0;for(;this._dataView.getUint8(this._offset)!==e&&i<t;)i++,this._offset++;return i<t&&this._offset++,new Uint8Array(this._dataView.buffer,this._dataView.byteOffset+n,i)}}function _(t){return "undefined"!=typeof TextDecoder?(new TextDecoder).decode(t):Buffer.from(t).toString("utf8")}function p(t){const n=new Uint8Array(t.buffer,t.byteOffset,e.length);if(n[0]!==e[0]||n[1]!==e[1]||n[2]!==e[2]||n[3]!==e[3]||n[4]!==e[4]||n[5]!==e[5]||n[6]!==e[6]||n[7]!==e[7]||n[8]!==e[8]||n[9]!==e[9]||n[10]!==e[10]||n[11]!==e[11])throw new Error("Missing KTX 2.0 identifier.");const i=new U,s=17*Uint32Array.BYTES_PER_ELEMENT,a=new c(t,e.length,s,!0);i.vkFormat=a._nextUint32(),i.typeSize=a._nextUint32(),i.pixelWidth=a._nextUint32(),i.pixelHeight=a._nextUint32(),i.pixelDepth=a._nextUint32(),i.layerCount=a._nextUint32(),i.faceCount=a._nextUint32();const r=a._nextUint32();i.supercompressionScheme=a._nextUint32();const o=a._nextUint32(),l=a._nextUint32(),f=a._nextUint32(),h=a._nextUint32(),g=a._nextUint64(),p=a._nextUint64(),x=new c(t,e.length+s,3*r*8,!0);for(let e=0;e<r;e++)i.levels.push({levelData:new Uint8Array(t.buffer,t.byteOffset+x._nextUint64(),x._nextUint64()),uncompressedByteLength:x._nextUint64()});const u=new c(t,o,l,!0),y={vendorId:u._skip(4)._nextUint16(),descriptorType:u._nextUint16(),versionNumber:u._nextUint16(),descriptorBlockSize:u._nextUint16(),colorModel:u._nextUint8(),colorPrimaries:u._nextUint8(),transferFunction:u._nextUint8(),flags:u._nextUint8(),texelBlockDimension:{x:u._nextUint8()+1,y:u._nextUint8()+1,z:u._nextUint8()+1,w:u._nextUint8()+1},bytesPlane:[u._nextUint8(),u._nextUint8(),u._nextUint8(),u._nextUint8(),u._nextUint8(),u._nextUint8(),u._nextUint8(),u._nextUint8()],samples:[]},D=(y.descriptorBlockSize/4-6)/4;for(let t=0;t<D;t++)y.samples[t]={bitOffset:u._nextUint16(),bitLength:u._nextUint8(),channelID:u._nextUint8(),samplePosition:[u._nextUint8(),u._nextUint8(),u._nextUint8(),u._nextUint8()],sampleLower:u._nextUint32(),sampleUpper:u._nextUint32()};i.dataFormatDescriptor.length=0,i.dataFormatDescriptor.push(y);const b=new c(t,f,h,!0);for(;b._offset<h;){const t=b._nextUint32(),e=b._scan(t),n=_(e),s=b._scan(t-e.byteLength);i.keyValue[n]=n.match(/^ktx/i)?_(s):s,b._offset%4&&b._skip(4-b._offset%4);}if(p<=0)return i;const d=new c(t,g,p,!0),B=d._nextUint16(),w=d._nextUint16(),A=d._nextUint32(),S=d._nextUint32(),m=d._nextUint32(),L=d._nextUint32(),I=[];for(let t=0;t<r;t++)I.push({imageFlags:d._nextUint32(),rgbSliceByteOffset:d._nextUint32(),rgbSliceByteLength:d._nextUint32(),alphaSliceByteOffset:d._nextUint32(),alphaSliceByteLength:d._nextUint32()});const R=g+d._offset,E=R+A,T=E+S,O=T+m,P=new Uint8Array(t.buffer,t.byteOffset+R,A),C=new Uint8Array(t.buffer,t.byteOffset+E,S),F=new Uint8Array(t.buffer,t.byteOffset+T,m),G=new Uint8Array(t.buffer,t.byteOffset+O,L);return i.globalData={endpointCount:B,selectorCount:w,imageDescs:I,endpointsData:P,selectorsData:C,tablesData:F,extendedData:G},i}
/**
* Loader for Basis Universal GPU Texture Codec.
*
* Basis Universal is a "supercompressed" GPU texture and texture video
* compression system that outputs a highly compressed intermediate file format
* (.basis) that can be quickly transcoded to a wide variety of GPU texture
* compression formats.
*
* This loader parallelizes the transcoding process across a configurable number
* of web workers, before transferring the transcoded compressed texture back
* to the main thread.
*/
const _taskCache = new WeakMap();
class BasisTextureLoader extends Loader {
constructor( manager ) {
super( manager );
this.transcoderPath = '';
this.transcoderBinary = null;
this.transcoderPending = null;
this.workerLimit = 4;
this.workerPool = [];
this.workerNextTaskID = 1;
this.workerSourceURL = '';
this.workerConfig = null;
}
setTranscoderPath( path ) {
this.transcoderPath = path;
return this;
}
setWorkerLimit( workerLimit ) {
this.workerLimit = workerLimit;
return this;
}
detectSupport( renderer ) {
this.workerConfig = {
astcSupported: renderer.extensions.has( 'WEBGL_compressed_texture_astc' ),
etc1Supported: renderer.extensions.has( 'WEBGL_compressed_texture_etc1' ),
etc2Supported: renderer.extensions.has( 'WEBGL_compressed_texture_etc' ),
dxtSupported: renderer.extensions.has( 'WEBGL_compressed_texture_s3tc' ),
bptcSupported: renderer.extensions.has( 'EXT_texture_compression_bptc' ),
pvrtcSupported: renderer.extensions.has( 'WEBGL_compressed_texture_pvrtc' )
|| renderer.extensions.has( 'WEBKIT_WEBGL_compressed_texture_pvrtc' )
};
return this;
}
load( url, onLoad, onProgress, onError ) {
const loader = new FileLoader( this.manager );
loader.setResponseType( 'arraybuffer' );
loader.setWithCredentials( this.withCredentials );
const texture = new CompressedTexture();
loader.load( url, ( buffer ) => {
// Check for an existing task using this buffer. A transferred buffer cannot be transferred
// again from this thread.
if ( _taskCache.has( buffer ) ) {
const cachedTask = _taskCache.get( buffer );
return cachedTask.promise.then( onLoad ).catch( onError );
}
this._createTexture( [ buffer ] )
.then( function ( _texture ) {
texture.copy( _texture );
texture.needsUpdate = true;
if ( onLoad ) onLoad( texture );
} )
.catch( onError );
}, onProgress, onError );
return texture;
}
/** Low-level transcoding API, exposed for use by KTX2Loader. */
parseInternalAsync( options ) {
const { levels } = options;
const buffers = new Set();
for ( let i = 0; i < levels.length; i ++ ) {
buffers.add( levels[ i ].data.buffer );
}
return this._createTexture( Array.from( buffers ), { ...options, lowLevel: true } );
}
/**
* @param {ArrayBuffer[]} buffers
* @param {object?} config
* @return {Promise<CompressedTexture>}
*/
_createTexture( buffers, config = {} ) {
let worker;
let taskID;
const taskConfig = config;
let taskCost = 0;
for ( let i = 0; i < buffers.length; i ++ ) {
taskCost += buffers[ i ].byteLength;
}
const texturePending = this._allocateWorker( taskCost )
.then( ( _worker ) => {
worker = _worker;
taskID = this.workerNextTaskID ++;
return new Promise( ( resolve, reject ) => {
worker._callbacks[ taskID ] = { resolve, reject };
worker.postMessage( { type: 'transcode', id: taskID, buffers: buffers, taskConfig: taskConfig }, buffers );
} );
} )
.then( ( message ) => {
const { mipmaps, width, height, format } = message;
const texture = new CompressedTexture( mipmaps, width, height, format, UnsignedByteType );
texture.minFilter = mipmaps.length === 1 ? LinearFilter : LinearMipmapLinearFilter;
texture.magFilter = LinearFilter;
texture.generateMipmaps = false;
texture.needsUpdate = true;
return texture;
} );
// Note: replaced '.finally()' with '.catch().then()' block - iOS 11 support (#19416)
texturePending
.catch( () => true )
.then( () => {
if ( worker && taskID ) {
worker._taskLoad -= taskCost;
delete worker._callbacks[ taskID ];
}
} );
// Cache the task result.
_taskCache.set( buffers[ 0 ], { promise: texturePending } );
return texturePending;
}
_initTranscoder() {
if ( ! this.transcoderPending ) {
// Load transcoder wrapper.
const jsLoader = new FileLoader( this.manager );
jsLoader.setPath( this.transcoderPath );
jsLoader.setWithCredentials( this.withCredentials );
const jsContent = new Promise( ( resolve, reject ) => {
jsLoader.load( 'basis_transcoder.js', resolve, undefined, reject );
} );
// Load transcoder WASM binary.
const binaryLoader = new FileLoader( this.manager );
binaryLoader.setPath( this.transcoderPath );
binaryLoader.setResponseType( 'arraybuffer' );
binaryLoader.setWithCredentials( this.withCredentials );
const binaryContent = new Promise( ( resolve, reject ) => {
binaryLoader.load( 'basis_transcoder.wasm', resolve, undefined, reject );
} );
this.transcoderPending = Promise.all( [ jsContent, binaryContent ] )
.then( ( [ jsContent, binaryContent ] ) => {
const fn = BasisTextureLoader.BasisWorker.toString();
const body = [
'/* constants */',
'let _EngineFormat = ' + JSON.stringify( BasisTextureLoader.EngineFormat ),
'let _TranscoderFormat = ' + JSON.stringify( BasisTextureLoader.TranscoderFormat ),
'let _BasisFormat = ' + JSON.stringify( BasisTextureLoader.BasisFormat ),
'/* basis_transcoder.js */',
jsContent,
'/* worker */',
fn.substring( fn.indexOf( '{' ) + 1, fn.lastIndexOf( '}' ) )
].join( '\n' );
this.workerSourceURL = URL.createObjectURL( new Blob( [ body ] ) );
this.transcoderBinary = binaryContent;
} );
}
return this.transcoderPending;
}
_allocateWorker( taskCost ) {
return this._initTranscoder().then( () => {
if ( this.workerPool.length < this.workerLimit ) {
const worker = new Worker( this.workerSourceURL );
worker._callbacks = {};
worker._taskLoad = 0;
worker.postMessage( {
type: 'init',
config: this.workerConfig,
transcoderBinary: this.transcoderBinary,
} );
worker.onmessage = function ( e ) {
const message = e.data;
switch ( message.type ) {
case 'transcode':
worker._callbacks[ message.id ].resolve( message );
break;
case 'error':
worker._callbacks[ message.id ].reject( message );
break;
default:
console.error( 'THREE.BasisTextureLoader: Unexpected message, "' + message.type + '"' );
}
};
this.workerPool.push( worker );
} else {
this.workerPool.sort( function ( a, b ) {
return a._taskLoad > b._taskLoad ? - 1 : 1;
} );
}
const worker = this.workerPool[ this.workerPool.length - 1 ];
worker._taskLoad += taskCost;
return worker;
} );
}
dispose() {
for ( let i = 0; i < this.workerPool.length; i ++ ) {
this.workerPool[ i ].terminate();
}
this.workerPool.length = 0;
return this;
}
}
/* CONSTANTS */
BasisTextureLoader.BasisFormat = {
ETC1S: 0,
UASTC_4x4: 1,
};
BasisTextureLoader.TranscoderFormat = {
ETC1: 0,
ETC2: 1,
BC1: 2,
BC3: 3,
BC4: 4,
BC5: 5,
BC7_M6_OPAQUE_ONLY: 6,
BC7_M5: 7,
PVRTC1_4_RGB: 8,
PVRTC1_4_RGBA: 9,
ASTC_4x4: 10,
ATC_RGB: 11,
ATC_RGBA_INTERPOLATED_ALPHA: 12,
RGBA32: 13,
RGB565: 14,
BGR565: 15,
RGBA4444: 16,
};
BasisTextureLoader.EngineFormat = {
RGBAFormat: RGBAFormat,
RGBA_ASTC_4x4_Format: RGBA_ASTC_4x4_Format,
RGBA_BPTC_Format: RGBA_BPTC_Format,
RGBA_ETC2_EAC_Format: RGBA_ETC2_EAC_Format,
RGBA_PVRTC_4BPPV1_Format: RGBA_PVRTC_4BPPV1_Format,
RGBA_S3TC_DXT5_Format: RGBA_S3TC_DXT5_Format,
RGB_ETC1_Format: RGB_ETC1_Format,
RGB_ETC2_Format: RGB_ETC2_Format,
RGB_PVRTC_4BPPV1_Format: RGB_PVRTC_4BPPV1_Format,
RGB_S3TC_DXT1_Format: RGB_S3TC_DXT1_Format,
};
/* WEB WORKER */
BasisTextureLoader.BasisWorker = function () {
let config;
let transcoderPending;
let BasisModule;
const EngineFormat = _EngineFormat; // eslint-disable-line no-undef
const TranscoderFormat = _TranscoderFormat; // eslint-disable-line no-undef
const BasisFormat = _BasisFormat; // eslint-disable-line no-undef
onmessage = function ( e ) {
const message = e.data;
switch ( message.type ) {
case 'init':
config = message.config;
init( message.transcoderBinary );
break;
case 'transcode':
transcoderPending.then( () => {
try {
const { width, height, hasAlpha, mipmaps, format } = message.taskConfig.lowLevel
? transcodeLowLevel( message.taskConfig )
: transcode( message.buffers[ 0 ] );
const buffers = [];
for ( let i = 0; i < mipmaps.length; ++ i ) {
buffers.push( mipmaps[ i ].data.buffer );
}
self.postMessage( { type: 'transcode', id: message.id, width, height, hasAlpha, mipmaps, format }, buffers );
} catch ( error ) {
console.error( error );
self.postMessage( { type: 'error', id: message.id, error: error.message } );
}
} );
break;
}
};
function init( wasmBinary ) {
transcoderPending = new Promise( ( resolve ) => {
BasisModule = { wasmBinary, onRuntimeInitialized: resolve };
BASIS( BasisModule ); // eslint-disable-line no-undef
} ).then( () => {
BasisModule.initializeBasis();
} );
}
function transcodeLowLevel( taskConfig ) {
const { basisFormat, width, height, hasAlpha } = taskConfig;
const { transcoderFormat, engineFormat } = getTranscoderFormat( basisFormat, width, height, hasAlpha );
const blockByteLength = BasisModule.getBytesPerBlockOrPixel( transcoderFormat );
assert( BasisModule.isFormatSupported( transcoderFormat ), 'THREE.BasisTextureLoader: Unsupported format.' );
const mipmaps = [];
if ( basisFormat === BasisFormat.ETC1S ) {
const transcoder = new BasisModule.LowLevelETC1SImageTranscoder();
const { endpointCount, endpointsData, selectorCount, selectorsData, tablesData } = taskConfig.globalData;
try {
let ok;
ok = transcoder.decodePalettes( endpointCount, endpointsData, selectorCount, selectorsData );
assert( ok, 'THREE.BasisTextureLoader: decodePalettes() failed.' );
ok = transcoder.decodeTables( tablesData );
assert( ok, 'THREE.BasisTextureLoader: decodeTables() failed.' );
for ( let i = 0; i < taskConfig.levels.length; i ++ ) {
const level = taskConfig.levels[ i ];
const imageDesc = taskConfig.globalData.imageDescs[ i ];
const dstByteLength = getTranscodedImageByteLength( transcoderFormat, level.width, level.height );
const dst = new Uint8Array( dstByteLength );
ok = transcoder.transcodeImage(
transcoderFormat,
dst, dstByteLength / blockByteLength,
level.data,
getWidthInBlocks( transcoderFormat, level.width ),
getHeightInBlocks( transcoderFormat, level.height ),
level.width, level.height, level.index,
imageDesc.rgbSliceByteOffset, imageDesc.rgbSliceByteLength,
imageDesc.alphaSliceByteOffset, imageDesc.alphaSliceByteLength,
imageDesc.imageFlags,
hasAlpha,
false,
0, 0
);
assert( ok, 'THREE.BasisTextureLoader: transcodeImage() failed for level ' + level.index + '.' );
mipmaps.push( { data: dst, width: level.width, height: level.height } );
}
} finally {
transcoder.delete();
}
} else {
for ( let i = 0; i < taskConfig.levels.length; i ++ ) {
const level = taskConfig.levels[ i ];
const dstByteLength = getTranscodedImageByteLength( transcoderFormat, level.width, level.height );
const dst = new Uint8Array( dstByteLength );
const ok = BasisModule.transcodeUASTCImage(
transcoderFormat,
dst, dstByteLength / blockByteLength,
level.data,
getWidthInBlocks( transcoderFormat, level.width ),
getHeightInBlocks( transcoderFormat, level.height ),
level.width, level.height, level.index,
0,
level.data.byteLength,
0,
hasAlpha,
false,
0, 0,
- 1, - 1
);
assert( ok, 'THREE.BasisTextureLoader: transcodeUASTCImage() failed for level ' + level.index + '.' );
mipmaps.push( { data: dst, width: level.width, height: level.height } );
}
}
return { width, height, hasAlpha, mipmaps, format: engineFormat };
}
function transcode( buffer ) {
const basisFile = new BasisModule.BasisFile( new Uint8Array( buffer ) );
const basisFormat = basisFile.isUASTC() ? BasisFormat.UASTC_4x4 : BasisFormat.ETC1S;
const width = basisFile.getImageWidth( 0, 0 );
const height = basisFile.getImageHeight( 0, 0 );
const levels = basisFile.getNumLevels( 0 );
const hasAlpha = basisFile.getHasAlpha();
function cleanup() {
basisFile.close();
basisFile.delete();
}
const { transcoderFormat, engineFormat } = getTranscoderFormat( basisFormat, width, height, hasAlpha );
if ( ! width || ! height || ! levels ) {
cleanup();
throw new Error( 'THREE.BasisTextureLoader: Invalid texture' );
}
if ( ! basisFile.startTranscoding() ) {
cleanup();
throw new Error( 'THREE.BasisTextureLoader: .startTranscoding failed' );
}
const mipmaps = [];
for ( let mip = 0; mip < levels; mip ++ ) {
const mipWidth = basisFile.getImageWidth( 0, mip );
const mipHeight = basisFile.getImageHeight( 0, mip );
const dst = new Uint8Array( basisFile.getImageTranscodedSizeInBytes( 0, mip, transcoderFormat ) );
const status = basisFile.transcodeImage(
dst,
0,
mip,
transcoderFormat,
0,
hasAlpha
);
if ( ! status ) {
cleanup();
throw new Error( 'THREE.BasisTextureLoader: .transcodeImage failed.' );
}
mipmaps.push( { data: dst, width: mipWidth, height: mipHeight } );
}
cleanup();
return { width, height, hasAlpha, mipmaps, format: engineFormat };
}
//
// Optimal choice of a transcoder target format depends on the Basis format (ETC1S or UASTC),
// device capabilities, and texture dimensions. The list below ranks the formats separately
// for ETC1S and UASTC.
//
// In some cases, transcoding UASTC to RGBA32 might be preferred for higher quality (at
// significant memory cost) compared to ETC1/2, BC1/3, and PVRTC. The transcoder currently
// chooses RGBA32 only as a last resort and does not expose that option to the caller.
const FORMAT_OPTIONS = [
{
if: 'astcSupported',
basisFormat: [ BasisFormat.UASTC_4x4 ],
transcoderFormat: [ TranscoderFormat.ASTC_4x4, TranscoderFormat.ASTC_4x4 ],
engineFormat: [ EngineFormat.RGBA_ASTC_4x4_Format, EngineFormat.RGBA_ASTC_4x4_Format ],
priorityETC1S: Infinity,
priorityUASTC: 1,
needsPowerOfTwo: false,
},
{
if: 'bptcSupported',
basisFormat: [ BasisFormat.ETC1S, BasisFormat.UASTC_4x4 ],
transcoderFormat: [ TranscoderFormat.BC7_M5, TranscoderFormat.BC7_M5 ],
engineFormat: [ EngineFormat.RGBA_BPTC_Format, EngineFormat.RGBA_BPTC_Format ],
priorityETC1S: 3,
priorityUASTC: 2,
needsPowerOfTwo: false,
},
{
if: 'dxtSupported',
basisFormat: [ BasisFormat.ETC1S, BasisFormat.UASTC_4x4 ],
transcoderFormat: [ TranscoderFormat.BC1, TranscoderFormat.BC3 ],
engineFormat: [ EngineFormat.RGB_S3TC_DXT1_Format, EngineFormat.RGBA_S3TC_DXT5_Format ],
priorityETC1S: 4,
priorityUASTC: 5,
needsPowerOfTwo: false,
},
{
if: 'etc2Supported',
basisFormat: [ BasisFormat.ETC1S, BasisFormat.UASTC_4x4 ],
transcoderFormat: [ TranscoderFormat.ETC1, TranscoderFormat.ETC2 ],
engineFormat: [ EngineFormat.RGB_ETC2_Format, EngineFormat.RGBA_ETC2_EAC_Format ],
priorityETC1S: 1,
priorityUASTC: 3,
needsPowerOfTwo: false,
},
{
if: 'etc1Supported',
basisFormat: [ BasisFormat.ETC1S, BasisFormat.UASTC_4x4 ],
transcoderFormat: [ TranscoderFormat.ETC1, TranscoderFormat.ETC1 ],
engineFormat: [ EngineFormat.RGB_ETC1_Format, EngineFormat.RGB_ETC1_Format ],
priorityETC1S: 2,
priorityUASTC: 4,
needsPowerOfTwo: false,
},
{
if: 'pvrtcSupported',
basisFormat: [ BasisFormat.ETC1S, BasisFormat.UASTC_4x4 ],
transcoderFormat: [ TranscoderFormat.PVRTC1_4_RGB, TranscoderFormat.PVRTC1_4_RGBA ],
engineFormat: [ EngineFormat.RGB_PVRTC_4BPPV1_Format, EngineFormat.RGBA_PVRTC_4BPPV1_Format ],
priorityETC1S: 5,
priorityUASTC: 6,
needsPowerOfTwo: true,
},
];
const ETC1S_OPTIONS = FORMAT_OPTIONS.sort( function ( a, b ) {
return a.priorityETC1S - b.priorityETC1S;
} );
const UASTC_OPTIONS = FORMAT_OPTIONS.sort( function ( a, b ) {
return a.priorityUASTC - b.priorityUASTC;
} );
function getTranscoderFormat( basisFormat, width, height, hasAlpha ) {
let transcoderFormat;
let engineFormat;
const options = basisFormat === BasisFormat.ETC1S ? ETC1S_OPTIONS : UASTC_OPTIONS;
for ( let i = 0; i < options.length; i ++ ) {
const opt = options[ i ];
if ( ! config[ opt.if ] ) continue;
if ( ! opt.basisFormat.includes( basisFormat ) ) continue;
if ( opt.needsPowerOfTwo && ! ( isPowerOfTwo( width ) && isPowerOfTwo( height ) ) ) continue;
transcoderFormat = opt.transcoderFormat[ hasAlpha ? 1 : 0 ];
engineFormat = opt.engineFormat[ hasAlpha ? 1 : 0 ];
return { transcoderFormat, engineFormat };
}
console.warn( 'THREE.BasisTextureLoader: No suitable compressed texture format found. Decoding to RGBA32.' );
transcoderFormat = TranscoderFormat.RGBA32;
engineFormat = EngineFormat.RGBAFormat;
return { transcoderFormat, engineFormat };
}
function assert( ok, message ) {
if ( ! ok ) throw new Error( message );
}
function getWidthInBlocks( transcoderFormat, width ) {
return Math.ceil( width / BasisModule.getFormatBlockWidth( transcoderFormat ) );
}
function getHeightInBlocks( transcoderFormat, height ) {
return Math.ceil( height / BasisModule.getFormatBlockHeight( transcoderFormat ) );
}
function getTranscodedImageByteLength( transcoderFormat, width, height ) {
const blockByteLength = BasisModule.getBytesPerBlockOrPixel( transcoderFormat );
if ( BasisModule.formatIsUncompressed( transcoderFormat ) ) {
return width * height * blockByteLength;
}
if ( transcoderFormat === TranscoderFormat.PVRTC1_4_RGB
|| transcoderFormat === TranscoderFormat.PVRTC1_4_RGBA ) {
// GL requires extra padding for very small textures:
// https://www.khronos.org/registry/OpenGL/extensions/IMG/IMG_texture_compression_pvrtc.txt
const paddedWidth = ( width + 3 ) & ~ 3;
const paddedHeight = ( height + 3 ) & ~ 3;
return ( Math.max( 8, paddedWidth ) * Math.max( 8, paddedHeight ) * 4 + 7 ) / 8;
}
return ( getWidthInBlocks( transcoderFormat, width )
* getHeightInBlocks( transcoderFormat, height )
* blockByteLength );
}
function isPowerOfTwo( value ) {
if ( value <= 2 ) return true;
return ( value & ( value - 1 ) ) === 0 && value !== 0;
}
};
/**
* @author Don McCurdy / https://www.donmccurdy.com
*/
let init, instance, heap;
const importObject = {
env: {
emscripten_notify_memory_growth: function ( index ) {
heap = new Uint8Array( instance.exports.memory.buffer );
}
}
};
/**
* ZSTD (Zstandard) decoder.
*
* Compiled from https://github.com/facebook/zstd/tree/dev/contrib/single_file_libs, with the
* following steps:
*
* ```
* ./combine.sh -r ../../lib -o zstddeclib.c zstddeclib-in.c
* emcc zstddeclib.c -Oz -s EXPORTED_FUNCTIONS="['_ZSTD_decompress', '_ZSTD_findDecompressedSize', '_ZSTD_isError', '_malloc', '_free']" -s ALLOW_MEMORY_GROWTH=1 -s MALLOC=emmalloc -o zstddec.wasm
* base64 zstddec.wasm > zstddec.txt
* ```
*
* The base64 string written to `zstddec.txt` is embedded as the `wasm` variable at the bottom
* of this file. The rest of this file is written by hand, in order to avoid an additional JS
* wrapper generated by Emscripten.
*/
class ZSTDDecoder {
init () {
if ( ! init ) {
init = fetch( 'data:application/wasm;base64,' + wasm )
.then( ( response ) => response.arrayBuffer() )
.then( ( arrayBuffer ) => WebAssembly.instantiate( arrayBuffer, importObject ) )
.then( ( result ) => {
instance = result.instance;
importObject.env.emscripten_notify_memory_growth( 0 ); // initialize heap.
});
}
return init;
}
decode ( array, uncompressedSize = 0 ) {
// Write compressed data into WASM memory.
const compressedSize = array.byteLength;
const compressedPtr = instance.exports.malloc( compressedSize );
heap.set( array, compressedPtr );
// Decompress into WASM memory.
uncompressedSize = uncompressedSize || Number( instance.exports.ZSTD_findDecompressedSize( compressedPtr, compressedSize ) );
const uncompressedPtr = instance.exports.malloc( uncompressedSize );
const actualSize = instance.exports.ZSTD_decompress( uncompressedPtr, uncompressedSize, compressedPtr, compressedSize );
// Read decompressed data and free WASM memory.
const dec = heap.slice( uncompressedPtr, uncompressedPtr + actualSize );
instance.exports.free( compressedPtr );
instance.exports.free( uncompressedPtr );
return dec;
}
}
/**
* BSD License
*
* For Zstandard software
*
* Copyright (c) 2016-present, Yann Collet, Facebook, Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * Neither the name Facebook nor the names of its contributors may be used to
* endorse or promote products derived from this software without specific
* prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
const wasm = 'AGFzbQEAAAABpQEVYAF/AX9gAn9/AGADf39/AX9gBX9/f39/AX9gAX8AYAJ/fwF/YAR/f39/AX9gA39/fwBgBn9/f39/fwF/YAd/f39/f39/AX9gAn9/AX5gAn5+AX5gAABgBX9/f39/AGAGf39/f39/AGAIf39/f39/f38AYAl/f39/f39/f38AYAABf2AIf39/f39/f38Bf2ANf39/f39/f39/f39/fwF/YAF/AX4CJwEDZW52H2Vtc2NyaXB0ZW5fbm90aWZ5X21lbW9yeV9ncm93dGgABANpaAEFAAAFAgEFCwACAQABAgIFBQcAAwABDgsBAQcAEhMHAAUBDAQEAAANBwQCAgYCBAgDAwMDBgEACQkHBgICAAYGAgQUBwYGAwIGAAMCAQgBBwUGCgoEEQAEBAEIAwgDBQgDEA8IAAcABAUBcAECAgUEAQCAAgYJAX8BQaCgwAILB2AHBm1lbW9yeQIABm1hbGxvYwAoBGZyZWUAJgxaU1REX2lzRXJyb3IAaBlaU1REX2ZpbmREZWNvbXByZXNzZWRTaXplAFQPWlNURF9kZWNvbXByZXNzAEoGX3N0YXJ0ACQJBwEAQQELASQKussBaA8AIAAgACgCBCABajYCBAsZACAAKAIAIAAoAgRBH3F0QQAgAWtBH3F2CwgAIABBiH9LC34BBH9BAyEBIAAoAgQiA0EgTQRAIAAoAggiASAAKAIQTwRAIAAQDQ8LIAAoAgwiAiABRgRAQQFBAiADQSBJGw8LIAAgASABIAJrIANBA3YiBCABIARrIAJJIgEbIgJrIgQ2AgggACADIAJBA3RrNgIEIAAgBCgAADYCAAsgAQsUAQF/IAAgARACIQIgACABEAEgAgv3AQECfyACRQRAIABCADcCACAAQQA2AhAgAEIANwIIQbh/DwsgACABNgIMIAAgAUEEajYCECACQQRPBEAgACABIAJqIgFBfGoiAzYCCCAAIAMoAAA2AgAgAUF/ai0AACIBBEAgAEEIIAEQFGs2AgQgAg8LIABBADYCBEF/DwsgACABNgIIIAAgAS0AACIDNgIAIAJBfmoiBEEBTQRAIARBAWtFBEAgACABLQACQRB0IANyIgM2AgALIAAgAS0AAUEIdCADajYCAAsgASACakF/ai0AACIBRQRAIABBADYCBEFsDwsgAEEoIAEQFCACQQN0ams2AgQgAgsWACAAIAEpAAA3AAAgACABKQAINwAICy8BAX8gAUECdEGgHWooAgAgACgCAEEgIAEgACgCBGprQR9xdnEhAiAAIAEQASACCyEAIAFCz9bTvtLHq9lCfiAAfEIfiUKHla+vmLbem55/fgsdAQF/IAAoAgggACgCDEYEfyAAKAIEQSBGBUEACwuCBAEDfyACQYDAAE8EQCAAIAEgAhBnIAAPCyAAIAJqIQMCQCAAIAFzQQNxRQRAAkAgAkEBSARAIAAhAgwBCyAAQQNxRQRAIAAhAgwBCyAAIQIDQCACIAEtAAA6AAAgAUEBaiEBIAJBAWoiAiADTw0BIAJBA3ENAAsLAkAgA0F8cSIEQcAASQ0AIAIgBEFAaiIFSw0AA0AgAiABKAIANgIAIAIgASgCBDYCBCACIAEoAgg2AgggAiABKAIMNgIMIAIgASgCEDYCECACIAEoAhQ2AhQgAiABKAIYNgIYIAIgASgCHDYCHCACIAEoAiA2AiAgAiABKAIkNgIkIAIgASgCKDYCKCACIAEoAiw2AiwgAiABKAIwNgIwIAIgASgCNDYCNCACIAEoAjg2AjggAiABKAI8NgI8IAFBQGshASACQUBrIgIgBU0NAAsLIAIgBE8NAQNAIAIgASgCADYCACABQQRqIQEgAkEEaiICIARJDQALDAELIANBBEkEQCAAIQIMAQsgA0F8aiIEIABJBEAgACECDAELIAAhAgNAIAIgAS0AADoAACACIAEtAAE6AAEgAiABLQACOgACIAIgAS0AAzoAAyABQQRqIQEgAkEEaiICIARNDQALCyACIANJBEADQCACIAEtAAA6AAAgAUEBaiEBIAJBAWoiAiADRw0ACwsgAAsMACAAIAEpAAA3AAALQQECfyAAKAIIIgEgACgCEEkEQEEDDwsgACAAKAIEIgJBB3E2AgQgACABIAJBA3ZrIgE2AgggACABKAAANgIAQQALDAAgACABKAIANgAAC/cCAQJ/AkAgACABRg0AAkAgASACaiAASwRAIAAgAmoiBCABSw0BCyAAIAEgAhALDwsgACABc0EDcSEDAkACQCAAIAFJBEAgAwRAIAAhAwwDCyAAQQNxRQRAIAAhAwwCCyAAIQMDQCACRQ0EIAMgAS0AADoAACABQQFqIQEgAkF/aiECIANBAWoiA0EDcQ0ACwwBCwJAIAMNACAEQQNxBEADQCACRQ0FIAAgAkF/aiICaiIDIAEgAmotAAA6AAAgA0EDcQ0ACwsgAkEDTQ0AA0AgACACQXxqIgJqIAEgAmooAgA2AgAgAkEDSw0ACwsgAkUNAgNAIAAgAkF/aiICaiABIAJqLQAAOgAAIAINAAsMAgsgAkEDTQ0AIAIhBANAIAMgASgCADYCACABQQRqIQEgA0EEaiEDIARBfGoiBEEDSw0ACyACQQNxIQILIAJFDQADQCADIAEtAAA6AAAgA0EBaiEDIAFBAWohASACQX9qIgINAAsLIAAL8wICAn8BfgJAIAJFDQAgACACaiIDQX9qIAE6AAAgACABOgAAIAJBA0kNACADQX5qIAE6AAAgACABOgABIANBfWogAToAACAAIAE6AAIgAkEHSQ0AIANBfGogAToAACAAIAE6AAMgAkEJSQ0AIABBACAAa0EDcSIEaiIDIAFB/wFxQYGChAhsIgE2AgAgAyACIARrQXxxIgRqIgJBfGogATYCACAEQQlJDQAgAyABNgIIIAMgATYCBCACQXhqIAE2AgAgAkF0aiABNgIAIARBGUkNACADIAE2AhggAyABNgIUIAMgATYCECADIAE2AgwgAkFwaiABNgIAIAJBbGogATYCACACQWhqIAE2AgAgAkFkaiABNgIAIAQgA0EEcUEYciIEayICQSBJDQAgAa0iBUIghiAFhCEFIAMgBGohAQNAIAEgBTcDGCABIAU3AxAgASAFNwMIIAEgBTcDACABQSBqIQEgAkFgaiICQR9LDQALCyAACy8BAn8gACgCBCAAKAIAQQJ0aiICLQACIQMgACACLwEAIAEgAi0AAxAIajYCACADCy8BAn8gACgCBCAAKAIAQQJ0aiICLQACIQMgACACLwEAIAEgAi0AAxAFajYCACADCx8AIAAgASACKAIEEAg2AgAgARAEGiAAIAJBCGo2AgQLCAAgAGdBH3MLugUBDX8jAEEQayIKJAACfyAEQQNNBEAgCkEANgIMIApBDGogAyAEEAsaIAAgASACIApBDGpBBBAVIgBBbCAAEAMbIAAgACAESxsMAQsgAEEAIAEoAgBBAXRBAmoQECENQVQgAygAACIGQQ9xIgBBCksNABogAiAAQQVqNgIAIAMgBGoiAkF8aiEMIAJBeWohDiACQXtqIRAgAEEGaiELQQQhBSAGQQR2IQRBICAAdCIAQQFyIQkgASgCACEPQQAhAiADIQYCQANAIAlBAkggAiAPS3JFBEAgAiEHAkAgCARAA0AgBEH//wNxQf//A0YEQCAHQRhqIQcgBiAQSQR/IAZBAmoiBigAACAFdgUgBUEQaiEFIARBEHYLIQQMAQsLA0AgBEEDcSIIQQNGBEAgBUECaiEFIARBAnYhBCAHQQNqIQcMAQsLIAcgCGoiByAPSw0EIAVBAmohBQNAIAIgB0kEQCANIAJBAXRqQQA7AQAgAkEBaiECDAELCyAGIA5LQQAgBiAFQQN1aiIHIAxLG0UEQCAHKAAAIAVBB3EiBXYhBAwCCyAEQQJ2IQQLIAYhBwsCfyALQX9qIAQgAEF/anEiBiAAQQF0QX9qIgggCWsiEUkNABogBCAIcSIEQQAgESAEIABIG2shBiALCyEIIA0gAkEBdGogBkF/aiIEOwEAIAlBASAGayAEIAZBAUgbayEJA0AgCSAASARAIABBAXUhACALQX9qIQsMAQsLAn8gByAOS0EAIAcgBSAIaiIFQQN1aiIGIAxLG0UEQCAFQQdxDAELIAUgDCIGIAdrQQN0awshBSACQQFqIQIgBEUhCCAGKAAAIAVBH3F2IQQMAQsLQWwgCUEBRyAFQSBKcg0BGiABIAJBf2o2AgAgBiAFQQdqQQN1aiADawwBC0FQCyEAIApBEGokACAACwkAQQFBBSAAGwsMACAAIAEoAAA2AAALqgMBCn8jAEHwAGsiCiQAIAJBAWohDiAAQQhqIQtBgIAEIAVBf2p0QRB1IQxBACECQQEhBkEBIAV0IglBf2oiDyEIA0AgAiAORkUEQAJAIAEgAkEBdCINai8BACIHQf//A0YEQCALIAhBA3RqIAI2AgQgCEF/aiEIQQEhBwwBCyAGQQAgDCAHQRB0QRB1ShshBgsgCiANaiAHOwEAIAJBAWohAgwBCwsgACAFNgIEIAAgBjYCACAJQQN2IAlBAXZqQQNqIQxBACEAQQAhBkEAIQIDQCAGIA5GBEADQAJAIAAgCUYNACAKIAsgAEEDdGoiASgCBCIGQQF0aiICIAIvAQAiAkEBajsBACABIAUgAhAUayIIOgADIAEgAiAIQf8BcXQgCWs7AQAgASAEIAZBAnQiAmooAgA6AAIgASACIANqKAIANgIEIABBAWohAAwBCwsFIAEgBkEBdGouAQAhDUEAIQcDQCAHIA1ORQRAIAsgAkEDdGogBjYCBANAIAIgDGogD3EiAiAISw0ACyAHQQFqIQcMAQsLIAZBAWohBgwBCwsgCkHwAGokAAsjAEIAIAEQCSAAhUKHla+vmLbem55/fkLj3MqV/M7y9YV/fAsQACAAQn43AwggACABNgIACyQBAX8gAARAIAEoAgQiAgRAIAEoAgggACACEQEADwsgABAmCwsfACAAIAEgAi8BABAINgIAIAEQBBogACACQQRqNgIEC0oBAX9BoCAoAgAiASAAaiIAQX9MBEBBiCBBMDYCAEF/DwsCQCAAPwBBEHRNDQAgABBmDQBBiCBBMDYCAEF/DwtBoCAgADYCACABC9cBAQh/Qbp/IQoCQCACKAIEIgggAigCACIJaiIOIAEgAGtLDQBBbCEKIAkgBCADKAIAIgtrSw0AIAAgCWoiBCACKAIIIgxrIQ0gACABQWBqIg8gCyAJQQAQKSADIAkgC2o2AgACQAJAIAwgBCAFa00EQCANIQUMAQsgDCAEIAZrSw0CIAcgDSAFayIAaiIBIAhqIAdNBEAgBCABIAgQDxoMAgsgBCABQQAgAGsQDyEBIAIgACAIaiIINgIEIAEgAGshBAsgBCAPIAUgCEEBECkLIA4hCgsgCgubAgEBfyMAQYABayINJAAgDSADNgJ8AkAgAkEDSwRAQX8hCQwBCwJAAkACQAJAIAJBAWsOAwADAgELIAZFBEBBuH8hCQwEC0FsIQkgBS0AACICIANLDQMgACAHIAJBAnQiAmooAgAgAiAIaigCABA7IAEgADYCAEEBIQkMAwsgASAJNgIAQQAhCQwCCyAKRQRAQWwhCQwCC0EAIQkgC0UgDEEZSHINAUEIIAR0QQhqIQBBACECA0AgAiAATw0CIAJBQGshAgwAAAsAC0FsIQkgDSANQfwAaiANQfgAaiAFIAYQFSICEAMNACANKAJ4IgMgBEsNACAAIA0gDSgCfCAHIAggAxAYIAEgADYCACACIQkLIA1BgAFqJAAgCQsLACAAIAEgAhALGgsQACAALwAAIAAtAAJBEHRyCy8AAn9BuH8gAUEISQ0AGkFyIAAoAAQiAEF3Sw0AGkG4fyAAQQhqIgAgACABSxsLCwkAIAAgATsAAAsDAAELigYBBX8gACAAKAIAIgVBfnE2AgBBACAAIAVBAXZqQYQgKAIAIgQgAEYbIQECQAJAIAAoAgQiAkUNACACKAIAIgNBAXENACACQQhqIgUgA0EBdkF4aiIDQQggA0EISxtnQR9zQQJ0QYAfaiIDKAIARgRAIAMgAigCDDYCAAsgAigCCCIDBEAgAyACKAIMNgIECyACKAIMIgMEQCADIAIoAgg2AgALIAIgAigCACAAKAIAQX5xajYCAEGEICEAAkACQCABRQ0AIAEgAjYCBCABKAIAIgNBAXENASADQQF2QXhqIgNBCCADQQhLG2dBH3NBAnRBgB9qIgMoAgAgAUEIakYEQCADIAEoAgw2AgALIAEoAggiAwRAIAMgASgCDDYCBAsgASgCDCIDBEAgAyABKAIINgIAQYQgKAIAIQQLIAIgAigCACABKAIAQX5xajYCACABIARGDQAgASABKAIAQQF2akEEaiEACyAAIAI2AgALIAIoAgBBAXZBeGoiAEEIIABBCEsbZ0Efc0ECdEGAH2oiASgCACEAIAEgBTYCACACIAA2AgwgAkEANgIIIABFDQEgACAFNgIADwsCQCABRQ0AIAEoAgAiAkEBcQ0AIAJBAXZBeGoiAkEIIAJBCEsbZ0Efc0ECdEGAH2oiAigCACABQQhqRgRAIAIgASgCDDYCAAsgASgCCCICBEAgAiABKAIMNgIECyABKAIMIgIEQCACIAEoAgg2AgBBhCAoAgAhBAsgACAAKAIAIAEoAgBBfnFqIgI2AgACQCABIARHBEAgASABKAIAQQF2aiAANgIEIAAoAgAhAgwBC0GEICAANgIACyACQQF2QXhqIgFBCCABQQhLG2dBH3NBAnRBgB9qIgIoAgAhASACIABBCGoiAjYCACAAIAE2AgwgAEEANgIIIAFFDQEgASACNgIADwsgBUEBdkF4aiIBQQggAUEISxtnQR9zQQJ0QYAfaiICKAIAIQEgAiAAQQhqIgI2AgAgACABNgIMIABBADYCCCABRQ0AIAEgAjYCAAsLDgAgAARAIABBeGoQJQsLgAIBA38CQCAAQQ9qQXhxQYQgKAIAKAIAQQF2ayICEB1Bf0YNAAJAQYQgKAIAIgAoAgAiAUEBcQ0AIAFBAXZBeGoiAUEIIAFBCEsbZ0Efc0ECdEGAH2oiASgCACAAQQhqRgRAIAEgACgCDDYCAAsgACgCCCIBBEAgASAAKAIMNgIECyAAKAIMIgFFDQAgASAAKAIINgIAC0EBIQEgACAAKAIAIAJBAXRqIgI2AgAgAkEBcQ0AIAJBAXZBeGoiAkEIIAJBCEsbZ0Efc0ECdEGAH2oiAygCACECIAMgAEEIaiIDNgIAIAAgAjYCDCAAQQA2AgggAkUNACACIAM2AgALIAELtwIBA38CQAJAIABBASAAGyICEDgiAA0AAkACQEGEICgCACIARQ0AIAAoAgAiA0EBcQ0AIAAgA0EBcjYCACADQQF2QXhqIgFBCCABQQhLG2dBH3NBAnRBgB9qIgEoAgAgAEEIakYEQCABIAAoAgw2AgALIAAoAggiAQRAIAEgACgCDDYCBAsgACgCDCIBBEAgASAAKAIINgIACyACECchAkEAIQFBhCAoAgAhACACDQEgACAAKAIAQX5xNgIAQQAPCyACQQ9qQXhxIgMQHSICQX9GDQIgAkEHakF4cSIAIAJHBEAgACACaxAdQX9GDQMLAkBBhCAoAgAiAUUEQEGAICAANgIADAELIAAgATYCBAtBhCAgADYCACAAIANBAXRBAXI2AgAMAQsgAEUNAQsgAEEIaiEBCyABC7kDAQJ/IAAgA2ohBQJAIANBB0wEQANAIAAgBU8NAiAAIAItAAA6AAAgAEEBaiEAIAJBAWohAgwAAAsACyAEQQFGBEACQCAAIAJrIgZBB00EQCAAIAItAAA6AAAgACACLQABOgABIAAgAi0AAjoAAiAAIAItAAM6AAMgAEEEaiACIAZBAnQiBkHAHmooAgBqIgIQFyACIAZB4B5qKAIAayECDAELIAAgAhAMCyACQQhqIQIgAEEIaiEACwJAAkACQAJAIAUgAU0EQCAAIANqIQEgBEEBRyAAIAJrQQ9Kcg0BA0AgACACEAwgAkEIaiECIABBCGoiACABSQ0ACwwFCyAAIAFLBEAgACEBDAQLIARBAUcgACACa0EPSnINASAAIQMgAiEEA0AgAyAEEAwgBEEIaiEEIANBCGoiAyABSQ0ACwwCCwNAIAAgAhAHIAJBEGohAiAAQRBqIgAgAUkNAAsMAwsgACEDIAIhBANAIAMgBBAHIARBEGohBCADQRBqIgMgAUkNAAsLIAIgASAAa2ohAgsDQCABIAVPDQEgASACLQAAOgAAIAFBAWohASACQQFqIQIMAAALAAsLQQECfyAAIAAoArjgASIDNgLE4AEgACgCvOABIQQgACABNgK84AEgACABIAJqNgK44AEgACABIAQgA2tqNgLA4AELpgEBAX8gACAAKALs4QEQFjYCyOABIABCADcD+OABIABCADcDuOABIABBwOABakIANwMAIABBqNAAaiIBQYyAgOAANgIAIABBADYCmOIBIABCADcDiOEBIABCAzcDgOEBIABBrNABakHgEikCADcCACAAQbTQAWpB6BIoAgA2AgAgACABNgIMIAAgAEGYIGo2AgggACAAQaAwajYCBCAAIABBEGo2AgALYQEBf0G4fyEDAkAgAUEDSQ0AIAIgABAhIgFBA3YiADYCCCACIAFBAXE2AgQgAiABQQF2QQNxIgM2AgACQCADQX9qIgFBAksNAAJAIAFBAWsOAgEAAgtBbA8LIAAhAwsgAwsMACAAIAEgAkEAEC4LiAQCA38CfiADEBYhBCAAQQBBKBAQIQAgBCACSwRAIAQPCyABRQRAQX8PCwJAAkAgA0EBRg0AIAEoAAAiBkGo6r5pRg0AQXYhAyAGQXBxQdDUtMIBRw0BQQghAyACQQhJDQEgAEEAQSgQECEAIAEoAAQhASAAQQE2AhQgACABrTcDAEEADwsgASACIAMQLyIDIAJLDQAgACADNgIYQXIhAyABIARqIgVBf2otAAAiAkEIcQ0AIAJBIHEiBkUEQEFwIQMgBS0AACIFQacBSw0BIAVBB3GtQgEgBUEDdkEKaq2GIgdCA4h+IAd8IQggBEEBaiEECyACQQZ2IQMgAkECdiEFAkAgAkEDcUF/aiICQQJLBEBBACECDAELAkACQAJAIAJBAWsOAgECAAsgASAEai0AACECIARBAWohBAwCCyABIARqLwAAIQIgBEECaiEEDAELIAEgBGooAAAhAiAEQQRqIQQLIAVBAXEhBQJ+AkACQAJAIANBf2oiA0ECTQRAIANBAWsOAgIDAQtCfyAGRQ0DGiABIARqMQAADAMLIAEgBGovAACtQoACfAwCCyABIARqKAAArQwBCyABIARqKQAACyEHIAAgBTYCICAAIAI2AhwgACAHNwMAQQAhAyAAQQA2AhQgACAHIAggBhsiBzcDCCAAIAdCgIAIIAdCgIAIVBs+AhALIAMLWwEBf0G4fyEDIAIQFiICIAFNBH8gACACakF/ai0AACIAQQNxQQJ0QaAeaigCACACaiAAQQZ2IgFBAnRBsB5qKAIAaiAAQSBxIgBFaiABRSAAQQV2cWoFQbh/CwsdACAAKAKQ4gEQWiAAQQA2AqDiASAAQgA3A5DiAQu1AwEFfyMAQZACayIKJABBuH8hBgJAIAVFDQAgBCwAACIIQf8BcSEHAkAgCEF/TARAIAdBgn9qQQF2IgggBU8NAkFsIQYgB0GBf2oiBUGAAk8NAiAEQQFqIQdBACEGA0AgBiAFTwRAIAUhBiAIIQcMAwUgACAGaiAHIAZBAXZqIgQtAABBBHY6AAAgACAGQQFyaiAELQAAQQ9xOgAAIAZBAmohBgwBCwAACwALIAcgBU8NASAAIARBAWogByAKEFMiBhADDQELIAYhBEEAIQYgAUEAQTQQECEJQQAhBQNAIAQgBkcEQCAAIAZqIggtAAAiAUELSwRAQWwhBgwDBSAJIAFBAnRqIgEgASgCAEEBajYCACAGQQFqIQZBASAILQAAdEEBdSAFaiEFDAILAAsLQWwhBiAFRQ0AIAUQFEEBaiIBQQxLDQAgAyABNgIAQQFBASABdCAFayIDEBQiAXQgA0cNACAAIARqIAFBAWoiADoAACAJIABBAnRqIgAgACgCAEEBajYCACAJKAIEIgBBAkkgAEEBcXINACACIARBAWo2AgAgB0EBaiEGCyAKQZACaiQAIAYLxhEBDH8jAEHwAGsiBSQAQWwhCwJAIANBCkkNACACLwAAIQogAi8AAiEJIAIvAAQhByAFQQhqIAQQDgJAIAMgByAJIApqakEGaiIMSQ0AIAUtAAohCCAFQdgAaiACQQZqIgIgChAGIgsQAw0BIAVBQGsgAiAKaiICIAkQBiILEAMNASAFQShqIAIgCWoiAiAHEAYiCxADDQEgBUEQaiACIAdqIAMgDGsQBiILEAMNASAAIAFqIg9BfWohECAEQQRqIQZBASELIAAgAUEDakECdiIDaiIMIANqIgIgA2oiDiEDIAIhBCAMIQcDQCALIAMgEElxBEAgACAGIAVB2ABqIAgQAkECdGoiCS8BADsAACAFQdgAaiAJLQACEAEgCS0AAyELIAcgBiAFQUBrIAgQAkECdGoiCS8BADsAACAFQUBrIAktAAIQASAJLQADIQogBCAGIAVBKGogCBACQQJ0aiIJLwEAOwAAIAVBKGogCS0AAhABIAktAAMhCSADIAYgBUEQaiAIEAJBAnRqIg0vAQA7AAAgBUEQaiANLQACEAEgDS0AAyENIAAgC2oiCyAGIAVB2ABqIAgQAkECdGoiAC8BADsAACAFQdgAaiAALQACEAEgAC0AAyEAIAcgCmoiCiAGIAVBQGsgCBACQQJ0aiIHLwEAOwAAIAVBQGsgBy0AAhABIActAAMhByAEIAlqIgkgBiAFQShqIAgQAkECdGoiBC8BADsAACAFQShqIAQtAAIQASAELQADIQQgAyANaiIDIAYgBUEQaiAIEAJBAnRqIg0vAQA7AAAgBUEQaiANLQACEAEgACALaiEAIAcgCmohByAEIAlqIQQgAyANLQADaiEDIAVB2ABqEA0gBUFAaxANciAFQShqEA1yIAVBEGoQDXJFIQsMAQsLIAQgDksgByACS3INAEFsIQsgACAMSw0BIAxBfWohCQNAQQAgACAJSSAFQdgAahAEGwRAIAAgBiAFQdgAaiAIEAJBAnRqIgovAQA7AAAgBUHYAGogCi0AAhABIAAgCi0AA2oiACAGIAVB2ABqIAgQAkECdGoiCi8BADsAACAFQdgAaiAKLQACEAEgACAKLQADaiEADAEFIAxBfmohCgNAIAVB2ABqEAQgACAKS3JFBEAgACAGIAVB2ABqIAgQAkECdGoiCS8BADsAACAFQdgAaiAJLQACEAEgACAJLQADaiEADAELCwNAIAAgCk0EQCAAIAYgBUHYAGogCBACQQJ0aiIJLwEAOwAAIAVB2ABqIAktAAIQASAAIAktAANqIQAMAQsLAkAgACAMTw0AIAAgBiAFQdgAaiAIEAIiAEECdGoiDC0AADoAACAMLQADQQFGBEAgBUHYAGogDC0AAhABDAELIAUoAlxBH0sNACAFQdgAaiAGIABBAnRqLQACEAEgBSgCXEEhSQ0AIAVBIDYCXAsgAkF9aiEMA0BBACAHIAxJIAVBQGsQBBsEQCAHIAYgBUFAayAIEAJBAnRqIgAvAQA7AAAgBUFAayAALQACEAEgByAALQADaiIAIAYgBUFAayAIEAJBAnRqIgcvAQA7AAAgBUFAayAHLQACEAEgACAHLQADaiEHDAEFIAJBfmohDANAIAVBQGsQBCAHIAxLckUEQCAHIAYgBUFAayAIEAJBAnRqIgAvAQA7AAAgBUFAayAALQACEAEgByAALQADaiEHDAELCwNAIAcgDE0EQCAHIAYgBUFAayAIEAJBAnRqIgAvAQA7AAAgBUFAayAALQACEAEgByAALQADaiEHDAELCwJAIAcgAk8NACAHIAYgBUFAayAIEAIiAEECdGoiAi0AADoAACACLQADQQFGBEAgBUFAayACLQACEAEMAQsgBSgCREEfSw0AIAVBQGsgBiAAQQJ0ai0AAhABIAUoAkRBIUkNACAFQSA2AkQLIA5BfWohAgNAQQAgBCACSSAFQShqEAQbBEAgBCAGIAVBKGogCBACQQJ0aiIALwEAOwAAIAVBKGogAC0AAhABIAQgAC0AA2oiACAGIAVBKGogCBACQQJ0aiIELwEAOwAAIAVBKGogBC0AAhABIAAgBC0AA2ohBAwBBSAOQX5qIQIDQCAFQShqEAQgBCACS3JFBEAgBCAGIAVBKGogCBACQQJ0aiIALwEAOwAAIAVBKGogAC0AAhABIAQgAC0AA2ohBAwBCwsDQCAEIAJNBEAgBCAGIAVBKGogCBACQQJ0aiIALwEAOwAAIAVBKGogAC0AAhABIAQgAC0AA2ohBAwBCwsCQCAEIA5PDQAgBCAGIAVBKGogCBACIgBBAnRqIgItAAA6AAAgAi0AA0EBRgRAIAVBKGogAi0AAhABDAELIAUoAixBH0sNACAFQShqIAYgAEECdGotAAIQASAFKAIsQSFJDQAgBUEgNgIsCwNAQQAgAyAQSSAFQRBqEAQbBEAgAyAGIAVBEGogCBACQQJ0aiIALwEAOwAAIAVBEGogAC0AAhABIAMgAC0AA2oiACAGIAVBEGogCBACQQJ0aiICLwEAOwAAIAVBEGogAi0AAhABIAAgAi0AA2ohAwwBBSAPQX5qIQIDQCAFQRBqEAQgAyACS3JFBEAgAyAGIAVBEGogCBACQQJ0aiIALwEAOwAAIAVBEGogAC0AAhABIAMgAC0AA2ohAwwBCwsDQCADIAJNBEAgAyAGIAVBEGogCBACQQJ0aiIALwEAOwAAIAVBEGogAC0AAhABIAMgAC0AA2ohAwwBCwsCQCADIA9PDQAgAyAGIAVBEGogCBACIgBBAnRqIgItAAA6AAAgAi0AA0EBRgRAIAVBEGogAi0AAhABDAELIAUoAhRBH0sNACAFQRBqIAYgAEECdGotAAIQASAFKAIUQSFJDQAgBUEgNgIUCyABQWwgBUHYAGoQCiAFQUBrEApxIAVBKGoQCnEgBUEQahAKcRshCwwJCwAACwALAAALAAsAAAsACwAACwALQWwhCwsgBUHwAGokACALC7UEAQ5/IwBBEGsiBiQAIAZBBGogABAOQVQhBQJAIARB3AtJDQAgBi0ABCEHIANB8ARqQQBB7AAQECEIIAdBDEsNACADQdwJaiIJIAggBkEIaiAGQQxqIAEgAhAxIhAQA0UEQCAGKAIMIgQgB0sNASADQdwFaiEPIANBpAVqIREgAEEEaiESIANBqAVqIQEgBCEFA0AgBSICQX9qIQUgCCACQQJ0aigCAEUNAAsgAkEBaiEOQQEhBQNAIAUgDk9FBEAgCCAFQQJ0IgtqKAIAIQwgASALaiAKNgIAIAVBAWohBSAKIAxqIQoMAQsLIAEgCjYCAEEAIQUgBigCCCELA0AgBSALRkUEQCABIAUgCWotAAAiDEECdGoiDSANKAIAIg1BAWo2AgAgDyANQQF0aiINIAw6AAEgDSAFOgAAIAVBAWohBQwBCwtBACEBIANBADYCqAUgBEF/cyAHaiEJQQEhBQNAIAUgDk9FBEAgCCAFQQJ0IgtqKAIAIQwgAyALaiABNgIAIAwgBSAJanQgAWohASAFQQFqIQUMAQsLIAcgBEEBaiIBIAJrIgRrQQFqIQgDQEEBIQUgBCAIT0UEQANAIAUgDk9FBEAgBUECdCIJIAMgBEE0bGpqIAMgCWooAgAgBHY2AgAgBUEBaiEFDAELCyAEQQFqIQQMAQsLIBIgByAPIAogESADIAIgARBkIAZBAToABSAGIAc6AAYgACAGKAIENgIACyAQIQULIAZBEGokACAFC8ENAQt/IwBB8ABrIgUkAEFsIQkCQCADQQpJDQAgAi8AACEKIAIvAAIhDCACLwAEIQYgBUEIaiAEEA4CQCADIAYgCiAMampBBmoiDUkNACAFLQAKIQcgBUHYAGogAkEGaiICIAoQBiIJEAMNASAFQUBrIAIgCmoiAiAMEAYiCRADDQEgBUEoaiACIAxqIgIgBhAGIgkQAw0BIAVBEGogAiAGaiADIA1rEAYiCRADDQEgACABaiIOQX1qIQ8gBEEEaiEGQQEhCSAAIAFBA2pBAnYiAmoiCiACaiIMIAJqIg0hAyAMIQQgCiECA0AgCSADIA9JcQRAIAYgBUHYAGogBxACQQF0aiIILQAAIQsgBUHYAGogCC0AARABIAAgCzoAACAGIAVBQGsgBxACQQF0aiIILQAAIQsgBUFAayAILQABEAEgAiALOgAAIAYgBUEoaiAHEAJBAXRqIggtAAAhCyAFQShqIAgtAAEQASAEIAs6AAAgBiAFQRBqIAcQAkEBdGoiCC0AACELIAVBEGogCC0AARABIAMgCzoAACAGIAVB2ABqIAcQAkEBdGoiCC0AACELIAVB2ABqIAgtAAEQASAAIAs6AAEgBiAFQUBrIAcQAkEBdGoiCC0AACELIAVBQGsgCC0AARABIAIgCzoAASAGIAVBKGogBxACQQF0aiIILQAAIQsgBUEoaiAILQABEAEgBCALOgABIAYgBUEQaiAHEAJBAXRqIggtAAAhCyAFQRBqIAgtAAEQASADIAs6AAEgA0ECaiEDIARBAmohBCACQQJqIQIgAEECaiEAIAkgBUHYAGoQDUVxIAVBQGsQDUVxIAVBKGoQDUVxIAVBEGoQDUVxIQkMAQsLIAQgDUsgAiAMS3INAEFsIQkgACAKSw0BIApBfWohCQNAIAVB2ABqEAQgACAJT3JFBEAgBiAFQdgAaiAHEAJBAXRqIggtAAAhCyAFQdgAaiAILQABEAEgACALOgAAIAYgBUHYAGogBxACQQF0aiIILQAAIQsgBUHYAGogCC0AARABIAAgCzoAASAAQQJqIQAMAQsLA0AgBUHYAGoQBCAAIApPckUEQCAGIAVB2ABqIAcQAkEBdGoiCS0AACEIIAVB2ABqIAktAAEQASAAIAg6AAAgAEEBaiEADAELCwNAIAAgCkkEQCAGIAVB2ABqIAcQAkEBdGoiCS0AACEIIAVB2ABqIAktAAEQASAAIAg6AAAgAEEBaiEADAELCyAMQX1qIQADQCAFQUBrEAQgAiAAT3JFBEAgBiAFQUBrIAcQAkEBdGoiCi0AACEJIAVBQGsgCi0AARABIAIgCToAACAGIAVBQGsgBxACQQF0aiIKLQAAIQkgBUFAayAKLQABEAEgAiAJOgABIAJBAmohAgwBCwsDQCAFQUBrEAQgAiAMT3JFBEAgBiAFQUBrIAcQAkEBdGoiAC0AACEKIAVBQGsgAC0AARABIAIgCjoAACACQQFqIQIMAQsLA0AgAiAMSQRAIAYgBUFAayAHEAJBAXRqIgAtAAAhCiAFQUBrIAAtAAEQASACIAo6AAAgAkEBaiECDAELCyANQX1qIQADQCAFQShqEAQgBCAAT3JFBEAgBiAFQShqIAcQAkEBdGoiAi0AACEKIAVBKGogAi0AARABIAQgCjoAACAGIAVBKGogBxACQQF0aiICLQAAIQogBUEoaiACLQABEAEgBCAKOgABIARBAmohBAwBCwsDQCAFQShqEAQgBCANT3JFBEAgBiAFQShqIAcQAkEBdGoiAC0AACECIAVBKGogAC0AARABIAQgAjoAACAEQQFqIQQMAQsLA0AgBCANSQRAIAYgBUEoaiAHEAJBAXRqIgAtAAAhAiAFQShqIAAtAAEQASAEIAI6AAAgBEEBaiEEDAELCwNAIAVBEGoQBCADIA9PckUEQCAGIAVBEGogBxACQQF0aiIALQAAIQIgBUEQaiAALQABEAEgAyACOgAAIAYgBUEQaiAHEAJBAXRqIgAtAAAhAiAFQRBqIAAtAAEQASADIAI6AAEgA0ECaiEDDAELCwNAIAVBEGoQBCADIA5PckUEQCAGIAVBEGogBxACQQF0aiIALQAAIQIgBUEQaiAALQABEAEgAyACOgAAIANBAWohAwwBCwsDQCADIA5JBEAgBiAFQRBqIAcQAkEBdGoiAC0AACECIAVBEGogAC0AARABIAMgAjoAACADQQFqIQMMAQsLIAFBbCAFQdgAahAKIAVBQGsQCnEgBUEoahAKcSAFQRBqEApxGyEJDAELQWwhCQsgBUHwAGokACAJC8oCAQR/IwBBIGsiBSQAIAUgBBAOIAUtAAIhByAFQQhqIAIgAxAGIgIQA0UEQCAEQQRqIQIgACABaiIDQX1qIQQDQCAFQQhqEAQgACAET3JFBEAgAiAFQQhqIAcQAkEBdGoiBi0AACEIIAVBCGogBi0AARABIAAgCDoAACACIAVBCGogBxACQQF0aiIGLQAAIQggBUEIaiAGLQABEAEgACAIOgABIABBAmohAAwBCwsDQCAFQQhqEAQgACADT3JFBEAgAiAFQQhqIAcQAkEBdGoiBC0AACEGIAVBCGogBC0AARABIAAgBjoAACAAQQFqIQAMAQsLA0AgACADT0UEQCACIAVBCGogBxACQQF0aiIELQAAIQYgBUEIaiAELQABEAEgACAGOgAAIABBAWohAAwBCwsgAUFsIAVBCGoQChshAgsgBUEgaiQAIAILtgMBCX8jAEEQayIGJAAgBkEANgIMIAZBADYCCEFUIQQCQAJAIANBQGsiDCADIAZBCGogBkEMaiABIAIQMSICEAMNACAGQQRqIAAQDiAGKAIMIgcgBi0ABEEBaksNASAAQQRqIQogBkEAOgAFIAYgBzoABiAAIAYoAgQ2AgAgB0EBaiEJQQEhBANAIAQgCUkEQCADIARBAnRqIgEoAgAhACABIAU2AgAgACAEQX9qdCAFaiEFIARBAWohBAwBCwsgB0EBaiEHQQAhBSAGKAIIIQkDQCAFIAlGDQEgAyAFIAxqLQAAIgRBAnRqIgBBASAEdEEBdSILIAAoAgAiAWoiADYCACAHIARrIQhBACEEAkAgC0EDTQRAA0AgBCALRg0CIAogASAEakEBdGoiACAIOgABIAAgBToAACAEQQFqIQQMAAALAAsDQCABIABPDQEgCiABQQF0aiIEIAg6AAEgBCAFOgAAIAQgCDoAAyAEIAU6AAIgBCAIOgAFIAQgBToABCAEIAg6AAcgBCAFOgAGIAFBBGohAQwAAAsACyAFQQFqIQUMAAALAAsgAiEECyAGQRBqJAAgBAutAQECfwJAQYQgKAIAIABHIAAoAgBBAXYiAyABa0F4aiICQXhxQQhHcgR/IAIFIAMQJ0UNASACQQhqC0EQSQ0AIAAgACgCACICQQFxIAAgAWpBD2pBeHEiASAAa0EBdHI2AgAgASAANgIEIAEgASgCAEEBcSAAIAJBAXZqIAFrIgJBAXRyNgIAQYQgIAEgAkH/////B3FqQQRqQYQgKAIAIABGGyABNgIAIAEQJQsLygIBBX8CQAJAAkAgAEEIIABBCEsbZ0EfcyAAaUEBR2oiAUEESSAAIAF2cg0AIAFBAnRB/B5qKAIAIgJFDQADQCACQXhqIgMoAgBBAXZBeGoiBSAATwRAIAIgBUEIIAVBCEsbZ0Efc0ECdEGAH2oiASgCAEYEQCABIAIoAgQ2AgALDAMLIARBHksNASAEQQFqIQQgAigCBCICDQALC0EAIQMgAUEgTw0BA0AgAUECdEGAH2ooAgAiAkUEQCABQR5LIQIgAUEBaiEBIAJFDQEMAwsLIAIgAkF4aiIDKAIAQQF2QXhqIgFBCCABQQhLG2dBH3NBAnRBgB9qIgEoAgBGBEAgASACKAIENgIACwsgAigCACIBBEAgASACKAIENgIECyACKAIEIgEEQCABIAIoAgA2AgALIAMgAygCAEEBcjYCACADIAAQNwsgAwvhCwINfwV+IwBB8ABrIgckACAHIAAoAvDhASIINgJcIAEgAmohDSAIIAAoAoDiAWohDwJAAkAgBUUEQCABIQQMAQsgACgCxOABIRAgACgCwOABIREgACgCvOABIQ4gAEEBNgKM4QFBACEIA0AgCEEDRwRAIAcgCEECdCICaiAAIAJqQazQAWooAgA2AkQgCEEBaiEIDAELC0FsIQwgB0EYaiADIAQQBhADDQEgB0EsaiAHQRhqIAAoAgAQEyAHQTRqIAdBGGogACgCCBATIAdBPGogB0EYaiAAKAIEEBMgDUFgaiESIAEhBEEAIQwDQCAHKAIwIAcoAixBA3RqKQIAIhRCEIinQf8BcSEIIAcoAkAgBygCPEEDdGopAgAiFUIQiKdB/wFxIQsgBygCOCAHKAI0QQN0aikCACIWQiCIpyEJIBVCIIghFyAUQiCIpyECAkAgFkIQiKdB/wFxIgNBAk8EQAJAIAZFIANBGUlyRQRAIAkgB0EYaiADQSAgBygCHGsiCiAKIANLGyIKEAUgAyAKayIDdGohCSAHQRhqEAQaIANFDQEgB0EYaiADEAUgCWohCQwBCyAHQRhqIAMQBSAJaiEJIAdBGGoQBBoLIAcpAkQhGCAHIAk2AkQgByAYNwNIDAELAkAgA0UEQCACBEAgBygCRCEJDAMLIAcoAkghCQwBCwJAAkAgB0EYakEBEAUgCSACRWpqIgNBA0YEQCAHKAJEQX9qIgMgA0VqIQkMAQsgA0ECdCAHaigCRCIJIAlFaiEJIANBAUYNAQsgByAHKAJINgJMCwsgByAHKAJENgJIIAcgCTYCRAsgF6chAyALBEAgB0EYaiALEAUgA2ohAwsgCCALakEUTwRAIAdBGGoQBBoLIAgEQCAHQRhqIAgQBSACaiECCyAHQRhqEAQaIAcgB0EYaiAUQhiIp0H/AXEQCCAUp0H//wNxajYCLCAHIAdBGGogFUIYiKdB/wFxEAggFadB//8DcWo2AjwgB0EYahAEGiAHIAdBGGogFkIYiKdB/wFxEAggFqdB//8DcWo2AjQgByACNgJgIAcoAlwhCiAHIAk2AmggByADNgJkAkACQAJAIAQgAiADaiILaiASSw0AIAIgCmoiEyAPSw0AIA0gBGsgC0Egak8NAQsgByAHKQNoNwMQIAcgBykDYDcDCCAEIA0gB0EIaiAHQdwAaiAPIA4gESAQEB4hCwwBCyACIARqIQggBCAKEAcgAkERTwRAIARBEGohAgNAIAIgCkEQaiIKEAcgAkEQaiICIAhJDQALCyAIIAlrIQIgByATNgJcIAkgCCAOa0sEQCAJIAggEWtLBEBBbCELDAILIBAgAiAOayICaiIKIANqIBBNBEAgCCAKIAMQDxoMAgsgCCAKQQAgAmsQDyEIIAcgAiADaiIDNgJkIAggAmshCCAOIQILIAlBEE8EQCADIAhqIQMDQCAIIAIQByACQRBqIQIgCEEQaiIIIANJDQALDAELAkAgCUEHTQRAIAggAi0AADoAACAIIAItAAE6AAEgCCACLQACOgACIAggAi0AAzoAAyAIQQRqIAIgCUECdCIDQcAeaigCAGoiAhAXIAIgA0HgHmooAgBrIQIgBygCZCEDDAELIAggAhAMCyADQQlJDQAgAyAIaiEDIAhBCGoiCCACQQhqIgJrQQ9MBEADQCAIIAIQDCACQQhqIQIgCEEIaiIIIANJDQAMAgALAAsDQCAIIAIQByACQRBqIQIgCEEQaiIIIANJDQALCyAHQRhqEAQaIAsgDCALEAMiAhshDCAEIAQgC2ogAhshBCAFQX9qIgUNAAsgDBADDQFBbCEMIAdBGGoQBEECSQ0BQQAhCANAIAhBA0cEQCAAIAhBAnQiAmpBrNABaiACIAdqKAJENgIAIAhBAWohCAwBCwsgBygCXCEIC0G6fyEMIA8gCGsiACANIARrSw0AIAQEfyAEIAggABALIABqBUEACyABayEMCyAHQfAAaiQAIAwLkRcCFn8FfiMAQdABayIHJAAgByAAKALw4QEiCDYCvAEgASACaiESIAggACgCgOIBaiETAkACQCAFRQRAIAEhAwwBCyAAKALE4AEhESAAKALA4AEhFSAAKAK84AEhDyAAQQE2AozhAUEAIQgDQCAIQQNHBEAgByAIQQJ0IgJqIAAgAmpBrNABaigCADYCVCAIQQFqIQgMAQsLIAcgETYCZCAHIA82AmAgByABIA9rNgJoQWwhECAHQShqIAMgBBAGEAMNASAFQQQgBUEESBshFyAHQTxqIAdBKGogACgCABATIAdBxABqIAdBKGogACgCCBATIAdBzABqIAdBKGogACgCBBATQQAhBCAHQeAAaiEMIAdB5ABqIQoDQCAHQShqEARBAksgBCAXTnJFBEAgBygCQCAHKAI8QQN0aikCACIdQhCIp0H/AXEhCyAHKAJQIAcoAkxBA3RqKQIAIh5CEIinQf8BcSEJIAcoAkggBygCREEDdGopAgAiH0IgiKchCCAeQiCIISAgHUIgiKchAgJAIB9CEIinQf8BcSIDQQJPBEACQCAGRSADQRlJckUEQCAIIAdBKGogA0EgIAcoAixrIg0gDSADSxsiDRAFIAMgDWsiA3RqIQggB0EoahAEGiADRQ0BIAdBKGogAxAFIAhqIQgMAQsgB0EoaiADEAUgCGohCCAHQShqEAQaCyAHKQJUISEgByAINgJUIAcgITcDWAwBCwJAIANFBEAgAgRAIAcoAlQhCAwDCyAHKAJYIQgMAQsCQAJAIAdBKGpBARAFIAggAkVqaiIDQQNGBEAgBygCVEF/aiIDIANFaiEIDAELIANBAnQgB2ooAlQiCCAIRWohCCADQQFGDQELIAcgBygCWDYCXAsLIAcgBygCVDYCWCAHIAg2AlQLICCnIQMgCQRAIAdBKGogCRAFIANqIQMLIAkgC2pBFE8EQCAHQShqEAQaCyALBEAgB0EoaiALEAUgAmohAgsgB0EoahAEGiAHIAcoAmggAmoiCSADajYCaCAKIAwgCCAJSxsoAgAhDSAHIAdBKGogHUIYiKdB/wFxEAggHadB//8DcWo2AjwgByAHQShqIB5CGIinQf8BcRAIIB6nQf//A3FqNgJMIAdBKGoQBBogB0EoaiAfQhiIp0H/AXEQCCEOIAdB8ABqIARBBHRqIgsgCSANaiAIazYCDCALIAg2AgggCyADNgIEIAsgAjYCACAHIA4gH6dB//8DcWo2AkQgBEEBaiEEDAELCyAEIBdIDQEgEkFgaiEYIAdB4ABqIRogB0HkAGohGyABIQMDQCAHQShqEARBAksgBCAFTnJFBEAgBygCQCAHKAI8QQN0aikCACIdQhCIp0H/AXEhCyAHKAJQIAcoAkxBA3RqKQIAIh5CEIinQf8BcSEIIAcoAkggBygCREEDdGopAgAiH0IgiKchCSAeQiCIISAgHUIgiKchDAJAIB9CEIinQf8BcSICQQJPBEACQCAGRSACQRlJckUEQCAJIAdBKGogAkEgIAcoAixrIgogCiACSxsiChAFIAIgCmsiAnRqIQkgB0EoahAEGiACRQ0BIAdBKGogAhAFIAlqIQkMAQsgB0EoaiACEAUgCWohCSAHQShqEAQaCyAHKQJUISEgByAJNgJUIAcgITcDWAwBCwJAIAJFBEAgDARAIAcoAlQhCQwDCyAHKAJYIQkMAQsCQAJAIAdBKGpBARAFIAkgDEVqaiICQQNGBEAgBygCVEF/aiICIAJFaiEJDAELIAJBAnQgB2ooAlQiCSAJRWohCSACQQFGDQELIAcgBygCWDYCXAsLIAcgBygCVDYCWCAHIAk2AlQLICCnIRQgCARAIAdBKGogCBAFIBRqIRQLIAggC2pBFE8EQCAHQShqEAQaCyALBEAgB0EoaiALEAUgDGohDAsgB0EoahAEGiAHIAcoAmggDGoiGSAUajYCaCAbIBogCSAZSxsoAgAhHCAHIAdBKGogHUIYiKdB/wFxEAggHadB//8DcWo2AjwgByAHQShqIB5CGIinQf8BcRAIIB6nQf//A3FqNgJMIAdBKGoQBBogByAHQShqIB9CGIinQf8BcRAIIB+nQf//A3FqNgJEIAcgB0HwAGogBEEDcUEEdGoiDSkDCCIdNwPIASAHIA0pAwAiHjcDwAECQAJAAkAgBygCvAEiDiAepyICaiIWIBNLDQAgAyAHKALEASIKIAJqIgtqIBhLDQAgEiADayALQSBqTw0BCyAHIAcpA8gBNwMQIAcgBykDwAE3AwggAyASIAdBCGogB0G8AWogEyAPIBUgERAeIQsMAQsgAiADaiEIIAMgDhAHIAJBEU8EQCADQRBqIQIDQCACIA5BEGoiDhAHIAJBEGoiAiAISQ0ACwsgCCAdpyIOayECIAcgFjYCvAEgDiAIIA9rSwRAIA4gCCAVa0sEQEFsIQsMAgsgESACIA9rIgJqIhYgCmogEU0EQCAIIBYgChAPGgwCCyAIIBZBACACaxAPIQggByACIApqIgo2AsQBIAggAmshCCAPIQILIA5BEE8EQCAIIApqIQoDQCAIIAIQByACQRBqIQIgCEEQaiIIIApJDQALDAELAkAgDkEHTQRAIAggAi0AADoAACAIIAItAAE6AAEgCCACLQACOgACIAggAi0AAzoAAyAIQQRqIAIgDkECdCIKQcAeaigCAGoiAhAXIAIgCkHgHmooAgBrIQIgBygCxAEhCgwBCyAIIAIQDAsgCkEJSQ0AIAggCmohCiAIQQhqIgggAkEIaiICa0EPTARAA0AgCCACEAwgAkEIaiECIAhBCGoiCCAKSQ0ADAIACwALA0AgCCACEAcgAkEQaiECIAhBEGoiCCAKSQ0ACwsgCxADBEAgCyEQDAQFIA0gDDYCACANIBkgHGogCWs2AgwgDSAJNgIIIA0gFDYCBCAEQQFqIQQgAyALaiEDDAILAAsLIAQgBUgNASAEIBdrIQtBACEEA0AgCyAFSARAIAcgB0HwAGogC0EDcUEEdGoiAikDCCIdNwPIASAHIAIpAwAiHjcDwAECQAJAAkAgBygCvAEiDCAepyICaiIKIBNLDQAgAyAHKALEASIJIAJqIhBqIBhLDQAgEiADayAQQSBqTw0BCyAHIAcpA8gBNwMgIAcgBykDwAE3AxggAyASIAdBGGogB0G8AWogEyAPIBUgERAeIRAMAQsgAiADaiEIIAMgDBAHIAJBEU8EQCADQRBqIQIDQCACIAxBEGoiDBAHIAJBEGoiAiAISQ0ACwsgCCAdpyIGayECIAcgCjYCvAEgBiAIIA9rSwRAIAYgCCAVa0sEQEFsIRAMAgsgESACIA9rIgJqIgwgCWogEU0EQCAIIAwgCRAPGgwCCyAIIAxBACACaxAPIQggByACIAlqIgk2AsQBIAggAmshCCAPIQILIAZBEE8EQCAIIAlqIQYDQCAIIAIQByACQRBqIQIgCEEQaiIIIAZJDQALDAELAkAgBkEHTQRAIAggAi0AADoAACAIIAItAAE6AAEgCCACLQACOgACIAggAi0AAzoAAyAIQQRqIAIgBkECdCIGQcAeaigCAGoiAhAXIAIgBkHgHmooAgBrIQIgBygCxAEhCQwBCyAIIAIQDAsgCUEJSQ0AIAggCWohBiAIQQhqIgggAkEIaiICa0EPTARAA0AgCCACEAwgAkEIaiECIAhBCGoiCCAGSQ0ADAIACwALA0AgCCACEAcgAkEQaiECIAhBEGoiCCAGSQ0ACwsgEBADDQMgC0EBaiELIAMgEGohAwwBCwsDQCAEQQNHBEAgACAEQQJ0IgJqQazQAWogAiAHaigCVDYCACAEQQFqIQQMAQsLIAcoArwBIQgLQbp/IRAgEyAIayIAIBIgA2tLDQAgAwR/IAMgCCAAEAsgAGoFQQALIAFrIRALIAdB0AFqJAAgEAslACAAQgA3AgAgAEEAOwEIIABBADoACyAAIAE2AgwgACACOgAKC7QFAQN/IwBBMGsiBCQAIABB/wFqIgVBfWohBgJAIAMvAQIEQCAEQRhqIAEgAhAGIgIQAw0BIARBEGogBEEYaiADEBwgBEEIaiAEQRhqIAMQHCAAIQMDQAJAIARBGGoQBCADIAZPckUEQCADIARBEGogBEEYahASOgAAIAMgBEEIaiAEQRhqEBI6AAEgBEEYahAERQ0BIANBAmohAwsgBUF+aiEFAn8DQEG6fyECIAMiASAFSw0FIAEgBEEQaiAEQRhqEBI6AAAgAUEBaiEDIARBGGoQBEEDRgRAQQIhAiAEQQhqDAILIAMgBUsNBSABIARBCGogBEEYahASOgABIAFBAmohA0EDIQIgBEEYahAEQQNHDQALIARBEGoLIQUgAyAFIARBGGoQEjoAACABIAJqIABrIQIMAwsgAyAEQRBqIARBGGoQEjoAAiADIARBCGogBEEYahASOgADIANBBGohAwwAAAsACyAEQRhqIAEgAhAGIgIQAw0AIARBEGogBEEYaiADEBwgBEEIaiAEQRhqIAMQHCAAIQMDQAJAIARBGGoQBCADIAZPckUEQCADIARBEGogBEEYahAROgAAIAMgBEEIaiAEQRhqEBE6AAEgBEEYahAERQ0BIANBAmohAwsgBUF+aiEFAn8DQEG6fyECIAMiASAFSw0EIAEgBEEQaiAEQRhqEBE6AAAgAUEBaiEDIARBGGoQBEEDRgRAQQIhAiAEQQhqDAILIAMgBUsNBCABIARBCGogBEEYahAROgABIAFBAmohA0EDIQIgBEEYahAEQQNHDQALIARBEGoLIQUgAyAFIARBGGoQEToAACABIAJqIABrIQIMAgsgAyAEQRBqIARBGGoQEToAAiADIARBCGogBEEYahAROgADIANBBGohAwwAAAsACyAEQTBqJAAgAgtpAQF/An8CQAJAIAJBB00NACABKAAAQbfIwuF+Rw0AIAAgASgABDYCmOIBQWIgAEEQaiABIAIQPiIDEAMNAhogAEKBgICAEDcDiOEBIAAgASADaiACIANrECoMAQsgACABIAIQKgtBAAsLrQMBBn8jAEGAAWsiAyQAQWIhCAJAIAJBCUkNACAAQZjQAGogAUEIaiIEIAJBeGogAEGY0AAQMyIFEAMiBg0AIANBHzYCfCADIANB/ABqIANB+ABqIAQgBCAFaiAGGyIEIAEgAmoiAiAEaxAVIgUQAw0AIAMoAnwiBkEfSw0AIAMoAngiB0EJTw0AIABBiCBqIAMgBkGAC0GADCAHEBggA0E0NgJ8IAMgA0H8AGogA0H4AGogBCAFaiIEIAIgBGsQFSIFEAMNACADKAJ8IgZBNEsNACADKAJ4IgdBCk8NACAAQZAwaiADIAZBgA1B4A4gBxAYIANBIzYCfCADIANB/ABqIANB+ABqIAQgBWoiBCACIARrEBUiBRADDQAgAygCfCIGQSNLDQAgAygCeCIHQQpPDQAgACADIAZBwBBB0BEgBxAYIAQgBWoiBEEMaiIFIAJLDQAgAiAFayEFQQAhAgNAIAJBA0cEQCAEKAAAIgZBf2ogBU8NAiAAIAJBAnRqQZzQAWogBjYCACACQQFqIQIgBEEEaiEEDAELCyAEIAFrIQgLIANBgAFqJAAgCAtGAQN/IABBCGohAyAAKAIEIQJBACEAA0AgACACdkUEQCABIAMgAEEDdGotAAJBFktqIQEgAEEBaiEADAELCyABQQggAmt0C4YDAQV/Qbh/IQcCQCADRQ0AIAItAAAiBEUEQCABQQA2AgBBAUG4fyADQQFGGw8LAn8gAkEBaiIFIARBGHRBGHUiBkF/Sg0AGiAGQX9GBEAgA0EDSA0CIAUvAABBgP4BaiEEIAJBA2oMAQsgA0ECSA0BIAItAAEgBEEIdHJBgIB+aiEEIAJBAmoLIQUgASAENgIAIAVBAWoiASACIANqIgNLDQBBbCEHIABBEGogACAFLQAAIgVBBnZBI0EJIAEgAyABa0HAEEHQEUHwEiAAKAKM4QEgACgCnOIBIAQQHyIGEAMiCA0AIABBmCBqIABBCGogBUEEdkEDcUEfQQggASABIAZqIAgbIgEgAyABa0GAC0GADEGAFyAAKAKM4QEgACgCnOIBIAQQHyIGEAMiCA0AIABBoDBqIABBBGogBUECdkEDcUE0QQkgASABIAZqIAgbIgEgAyABa0GADUHgDkGQGSAAKAKM4QEgACgCnOIBIAQQHyIAEAMNACAAIAFqIAJrIQcLIAcLrQMBCn8jAEGABGsiCCQAAn9BUiACQf8BSw0AGkFUIANBDEsNABogAkEBaiELIABBBGohCUGAgAQgA0F/anRBEHUhCkEAIQJBASEEQQEgA3QiB0F/aiIMIQUDQCACIAtGRQRAAkAgASACQQF0Ig1qLwEAIgZB//8DRgRAIAkgBUECdGogAjoAAiAFQX9qIQVBASEGDAELIARBACAKIAZBEHRBEHVKGyEECyAIIA1qIAY7AQAgAkEBaiECDAELCyAAIAQ7AQIgACADOwEAIAdBA3YgB0EBdmpBA2ohBkEAIQRBACECA0AgBCALRkUEQCABIARBAXRqLgEAIQpBACEAA0AgACAKTkUEQCAJIAJBAnRqIAQ6AAIDQCACIAZqIAxxIgIgBUsNAAsgAEEBaiEADAELCyAEQQFqIQQMAQsLQX8gAg0AGkEAIQIDfyACIAdGBH9BAAUgCCAJIAJBAnRqIgAtAAJBAXRqIgEgAS8BACIBQQFqOwEAIAAgAyABEBRrIgU6AAMgACABIAVB/wFxdCAHazsBACACQQFqIQIMAQsLCyEFIAhBgARqJAAgBQvjBgEIf0FsIQcCQCACQQNJDQACQAJAAkACQCABLQAAIgNBA3EiCUEBaw4DAwEAAgsgACgCiOEBDQBBYg8LIAJBBUkNAkEDIQYgASgAACEFAn8CQAJAIANBAnZBA3EiCEF+aiIEQQFNBEAgBEEBaw0BDAILIAVBDnZB/wdxIQQgBUEEdkH/B3EhAyAIRQwCCyAFQRJ2IQRBBCEGIAVBBHZB//8AcSEDQQAMAQsgBUEEdkH//w9xIgNBgIAISw0DIAEtAARBCnQgBUEWdnIhBEEFIQZBAAshBSAEIAZqIgogAksNAgJAIANBgQZJDQAgACgCnOIBRQ0AQQAhAgNAIAJBg4ABSw0BIAJBQGshAgwAAAsACwJ/IAlBA0YEQCABIAZqIQEgAEHw4gFqIQIgACgCDCEGIAUEQCACIAMgASAEIAYQXwwCCyACIAMgASAEIAYQXQwBCyAAQbjQAWohAiABIAZqIQEgAEHw4gFqIQYgAEGo0ABqIQggBQRAIAggBiADIAEgBCACEF4MAQsgCCAGIAMgASAEIAIQXAsQAw0CIAAgAzYCgOIBIABBATYCiOEBIAAgAEHw4gFqNgLw4QEgCUECRgRAIAAgAEGo0ABqNgIMCyAAIANqIgBBiOMBakIANwAAIABBgOMBakIANwAAIABB+OIBakIANwAAIABB8OIBakIANwAAIAoPCwJ/AkACQAJAIANBAnZBA3FBf2oiBEECSw0AIARBAWsOAgACAQtBASEEIANBA3YMAgtBAiEEIAEvAABBBHYMAQtBAyEEIAEQIUEEdgsiAyAEaiIFQSBqIAJLBEAgBSACSw0CIABB8OIBaiABIARqIAMQCyEBIAAgAzYCgOIBIAAgATYC8OEBIAEgA2oiAEIANwAYIABCADcAECAAQgA3AAggAEIANwAAIAUPCyAAIAM2AoDiASAAIAEgBGo2AvDhASAFDwsCfwJAAkACQCADQQJ2QQNxQX9qIgRBAksNACAEQQFrDgIAAgELQQEhByADQQN2DAILQQIhByABLwAAQQR2DAELIAJBBEkgARAhIgJBj4CAAUtyDQFBAyEHIAJBBHYLIQIgAEHw4gFqIAEgB2otAAAgAkEgahAQIQEgACACNgKA4gEgACABNgLw4QEgB0EBaiEHCyAHC0sAIABC+erQ0OfJoeThADcDICAAQgA3AxggAELP1tO+0ser2UI3AxAgAELW64Lu6v2J9eAANwMIIABCADcDACAAQShqQQBBKBAQGgviAgICfwV+IABBKGoiASAAKAJIaiECAn4gACkDACIDQiBaBEAgACkDECIEQgeJIAApAwgiBUIBiXwgACkDGCIGQgyJfCAAKQMgIgdCEol8IAUQGSAEEBkgBhAZIAcQGQwBCyAAKQMYQsXP2bLx5brqJ3wLIAN8IQMDQCABQQhqIgAgAk0EQEIAIAEpAAAQCSADhUIbiUKHla+vmLbem55/fkLj3MqV/M7y9YV/fCEDIAAhAQwBCwsCQCABQQRqIgAgAksEQCABIQAMAQsgASgAAK1Ch5Wvr5i23puef34gA4VCF4lCz9bTvtLHq9lCfkL5893xmfaZqxZ8IQMLA0AgACACSQRAIAAxAABCxc/ZsvHluuonfiADhUILiUKHla+vmLbem55/fiEDIABBAWohAAwBCwsgA0IhiCADhULP1tO+0ser2UJ+IgNCHYggA4VC+fPd8Zn2masWfiIDQiCIIAOFC+8CAgJ/BH4gACAAKQMAIAKtfDcDAAJAAkAgACgCSCIDIAJqIgRBH00EQCABRQ0BIAAgA2pBKGogASACECAgACgCSCACaiEEDAELIAEgAmohAgJ/IAMEQCAAQShqIgQgA2ogAUEgIANrECAgACAAKQMIIAQpAAAQCTcDCCAAIAApAxAgACkAMBAJNwMQIAAgACkDGCAAKQA4EAk3AxggACAAKQMgIABBQGspAAAQCTcDICAAKAJIIQMgAEEANgJIIAEgA2tBIGohAQsgAUEgaiACTQsEQCACQWBqIQMgACkDICEFIAApAxghBiAAKQMQIQcgACkDCCEIA0AgCCABKQAAEAkhCCAHIAEpAAgQCSEHIAYgASkAEBAJIQYgBSABKQAYEAkhBSABQSBqIgEgA00NAAsgACAFNwMgIAAgBjcDGCAAIAc3AxAgACAINwMICyABIAJPDQEgAEEoaiABIAIgAWsiBBAgCyAAIAQ2AkgLCy8BAX8gAEUEQEG2f0EAIAMbDwtBun8hBCADIAFNBH8gACACIAMQEBogAwVBun8LCy8BAX8gAEUEQEG2f0EAIAMbDwtBun8hBCADIAFNBH8gACACIAMQCxogAwVBun8LC6gCAQZ/IwBBEGsiByQAIABB2OABaikDAEKAgIAQViEIQbh/IQUCQCAEQf//B0sNACAAIAMgBBBCIgUQAyIGDQAgACgCnOIBIQkgACAHQQxqIAMgAyAFaiAGGyIKIARBACAFIAYbayIGEEAiAxADBEAgAyEFDAELIAcoAgwhBCABRQRAQbp/IQUgBEEASg0BCyAGIANrIQUgAyAKaiEDAkAgCQRAIABBADYCnOIBDAELAkACQAJAIARBBUgNACAAQdjgAWopAwBCgICACFgNAAwBCyAAQQA2ApziAQwBCyAAKAIIED8hBiAAQQA2ApziASAGQRRPDQELIAAgASACIAMgBSAEIAgQOSEFDAELIAAgASACIAMgBSAEIAgQOiEFCyAHQRBqJAAgBQtnACAAQdDgAWogASACIAAoAuzhARAuIgEQAwRAIAEPC0G4fyECAkAgAQ0AIABB7OABaigCACIBBEBBYCECIAAoApjiASABRw0BC0EAIQIgAEHw4AFqKAIARQ0AIABBkOEBahBDCyACCycBAX8QVyIERQRAQUAPCyAEIAAgASACIAMgBBBLEE8hACAEEFYgAAs/AQF/AkACQAJAIAAoAqDiAUEBaiIBQQJLDQAgAUEBaw4CAAECCyAAEDBBAA8LIABBADYCoOIBCyAAKAKU4gELvAMCB38BfiMAQRBrIgkkAEG4fyEGAkAgBCgCACIIQQVBCSAAKALs4QEiBRtJDQAgAygCACIHQQFBBSAFGyAFEC8iBRADBEAgBSEGDAELIAggBUEDakkNACAAIAcgBRBJIgYQAw0AIAEgAmohCiAAQZDhAWohCyAIIAVrIQIgBSAHaiEHIAEhBQNAIAcgAiAJECwiBhADDQEgAkF9aiICIAZJBEBBuH8hBgwCCyAJKAIAIghBAksEQEFsIQYMAgsgB0EDaiEHAn8CQAJAAkAgCEEBaw4CAgABCyAAIAUgCiAFayAHIAYQSAwCCyAFIAogBWsgByAGEEcMAQsgBSAKIAVrIActAAAgCSgCCBBGCyIIEAMEQCAIIQYMAgsgACgC8OABBEAgCyAFIAgQRQsgAiAGayECIAYgB2ohByAFIAhqIQUgCSgCBEUNAAsgACkD0OABIgxCf1IEQEFsIQYgDCAFIAFrrFINAQsgACgC8OABBEBBaiEGIAJBBEkNASALEEQhDCAHKAAAIAynRw0BIAdBBGohByACQXxqIQILIAMgBzYCACAEIAI2AgAgBSABayEGCyAJQRBqJAAgBgsuACAAECsCf0EAQQAQAw0AGiABRSACRXJFBEBBYiAAIAEgAhA9EAMNARoLQQALCzcAIAEEQCAAIAAoAsTgASABKAIEIAEoAghqRzYCnOIBCyAAECtBABADIAFFckUEQCAAIAEQWwsL0QIBB38jAEEQayIGJAAgBiAENgIIIAYgAzYCDCAFBEAgBSgCBCEKIAUoAgghCQsgASEIAkACQANAIAAoAuzhARAWIQsCQANAIAQgC0kNASADKAAAQXBxQdDUtMIBRgRAIAMgBBAiIgcQAw0EIAQgB2shBCADIAdqIQMMAQsLIAYgAzYCDCAGIAQ2AggCQCAFBEAgACAFEE5BACEHQQAQA0UNAQwFCyAAIAogCRBNIgcQAw0ECyAAIAgQUCAMQQFHQQAgACAIIAIgBkEMaiAGQQhqEEwiByIDa0EAIAMQAxtBCkdyRQRAQbh/IQcMBAsgBxADDQMgAiAHayECIAcgCGohCEEBIQwgBigCDCEDIAYoAgghBAwBCwsgBiADNgIMIAYgBDYCCEG4fyEHIAQNASAIIAFrIQcMAQsgBiADNgIMIAYgBDYCCAsgBkEQaiQAIAcLRgECfyABIAAoArjgASICRwRAIAAgAjYCxOABIAAgATYCuOABIAAoArzgASEDIAAgATYCvOABIAAgASADIAJrajYCwOABCwutAgIEfwF+IwBBQGoiBCQAAkACQCACQQhJDQAgASgAAEFwcUHQ1LTCAUcNACABIAIQIiEBIABCADcDCCAAQQA2AgQgACABNgIADAELIARBGGogASACEC0iAxADBEAgACADEBoMAQsgAwRAIABBuH8QGgwBCyACIAQoAjAiA2shAiABIANqIQMDQAJAIAAgAyACIARBCGoQLCIFEAMEfyAFBSACIAVBA2oiBU8NAUG4fwsQGgwCCyAGQQFqIQYgAiAFayECIAMgBWohAyAEKAIMRQ0ACyAEKAI4BEAgAkEDTQRAIABBuH8QGgwCCyADQQRqIQMLIAQoAighAiAEKQMYIQcgAEEANgIEIAAgAyABazYCACAAIAIgBmytIAcgB0J/URs3AwgLIARBQGskAAslAQF/IwBBEGsiAiQAIAIgACABEFEgAigCACEAIAJBEGokACAAC30BBH8jAEGQBGsiBCQAIARB/wE2AggCQCAEQRBqIARBCGogBEEMaiABIAIQFSIGEAMEQCAGIQUMAQtBVCEFIAQoAgwiB0EGSw0AIAMgBEEQaiAEKAIIIAcQQSIFEAMNACAAIAEgBmogAiAGayADEDwhBQsgBEGQBGokACAFC4cBAgJ/An5BABAWIQMCQANAIAEgA08EQAJAIAAoAABBcHFB0NS0wgFGBEAgACABECIiAhADRQ0BQn4PCyAAIAEQVSIEQn1WDQMgBCAFfCIFIARUIQJCfiEEIAINAyAAIAEQUiICEAMNAwsgASACayEBIAAgAmohAAwBCwtCfiAFIAEbIQQLIAQLPwIBfwF+IwBBMGsiAiQAAn5CfiACQQhqIAAgARAtDQAaQgAgAigCHEEBRg0AGiACKQMICyEDIAJBMGokACADC40BAQJ/IwBBMGsiASQAAkAgAEUNACAAKAKI4gENACABIABB/OEBaigCADYCKCABIAApAvThATcDICAAEDAgACgCqOIBIQIgASABKAIoNgIYIAEgASkDIDcDECACIAFBEGoQGyAAQQA2AqjiASABIAEoAig2AgggASABKQMgNwMAIAAgARAbCyABQTBqJAALKgECfyMAQRBrIgAkACAAQQA2AgggAEIANwMAIAAQWCEBIABBEGokACABC4cBAQN/IwBBEGsiAiQAAkAgACgCAEUgACgCBEVzDQAgAiAAKAIINgIIIAIgACkCADcDAAJ/IAIoAgAiAQRAIAIoAghBqOMJIAERBQAMAQtBqOMJECgLIgFFDQAgASAAKQIANwL04QEgAUH84QFqIAAoAgg2AgAgARBZIAEhAwsgAkEQaiQAIAMLywEBAn8jAEEgayIBJAAgAEGBgIDAADYCtOIBIABBADYCiOIBIABBADYC7OEBIABCADcDkOIBIABBADYCpOMJIABBADYC3OIBIABCADcCzOIBIABBADYCvOIBIABBADYCxOABIABCADcCnOIBIABBpOIBakIANwIAIABBrOIBakEANgIAIAFCADcCECABQgA3AhggASABKQMYNwMIIAEgASkDEDcDACABKAIIQQh2QQFxIQIgAEEANgLg4gEgACACNgKM4gEgAUEgaiQAC3YBA38jAEEwayIBJAAgAARAIAEgAEHE0AFqIgIoAgA2AiggASAAKQK80AE3AyAgACgCACEDIAEgAigCADYCGCABIAApArzQATcDECADIAFBEGoQGyABIAEoAig2AgggASABKQMgNwMAIAAgARAbCyABQTBqJAALzAEBAX8gACABKAK00AE2ApjiASAAIAEoAgQiAjYCwOABIAAgAjYCvOABIAAgAiABKAIIaiICNgK44AEgACACNgLE4AEgASgCuNABBEAgAEKBgICAEDcDiOEBIAAgAUGk0ABqNgIMIAAgAUGUIGo2AgggACABQZwwajYCBCAAIAFBDGo2AgAgAEGs0AFqIAFBqNABaigCADYCACAAQbDQAWogAUGs0AFqKAIANgIAIABBtNABaiABQbDQAWooAgA2AgAPCyAAQgA3A4jhAQs7ACACRQRAQbp/DwsgBEUEQEFsDwsgAiAEEGAEQCAAIAEgAiADIAQgBRBhDwsgACABIAIgAyAEIAUQZQtGAQF/IwBBEGsiBSQAIAVBCGogBBAOAn8gBS0ACQRAIAAgASACIAMgBBAyDAELIAAgASACIAMgBBA0CyEAIAVBEGokACAACzQAIAAgAyAEIAUQNiIFEAMEQCAFDwsgBSAESQR/IAEgAiADIAVqIAQgBWsgABA1BUG4fwsLRgEBfyMAQRBrIgUkACAFQQhqIAQQDgJ/IAUtAAkEQCAAIAEgAiADIAQQYgwBCyAAIAEgAiADIAQQNQshACAFQRBqJAAgAAtZAQF/QQ8hAiABIABJBEAgAUEEdCAAbiECCyAAQQh2IgEgAkEYbCIAQYwIaigCAGwgAEGICGooAgBqIgJBA3YgAmogAEGACGooAgAgAEGECGooAgAgAWxqSQs3ACAAIAMgBCAFQYAQEDMiBRADBEAgBQ8LIAUgBEkEfyABIAIgAyAFaiAEIAVrIAAQMgVBuH8LC78DAQN/IwBBIGsiBSQAIAVBCGogAiADEAYiAhADRQRAIAAgAWoiB0F9aiEGIAUgBBAOIARBBGohAiAFLQACIQMDQEEAIAAgBkkgBUEIahAEGwRAIAAgAiAFQQhqIAMQAkECdGoiBC8BADsAACAFQQhqIAQtAAIQASAAIAQtAANqIgQgAiAFQQhqIAMQAkECdGoiAC8BADsAACAFQQhqIAAtAAIQASAEIAAtAANqIQAMAQUgB0F+aiEEA0AgBUEIahAEIAAgBEtyRQRAIAAgAiAFQQhqIAMQAkECdGoiBi8BADsAACAFQQhqIAYtAAIQASAAIAYtAANqIQAMAQsLA0AgACAES0UEQCAAIAIgBUEIaiADEAJBAnRqIgYvAQA7AAAgBUEIaiAGLQACEAEgACAGLQADaiEADAELCwJAIAAgB08NACAAIAIgBUEIaiADEAIiA0ECdGoiAC0AADoAACAALQADQQFGBEAgBUEIaiAALQACEAEMAQsgBSgCDEEfSw0AIAVBCGogAiADQQJ0ai0AAhABIAUoAgxBIUkNACAFQSA2AgwLIAFBbCAFQQhqEAobIQILCwsgBUEgaiQAIAILkgIBBH8jAEFAaiIJJAAgCSADQTQQCyEDAkAgBEECSA0AIAMgBEECdGooAgAhCSADQTxqIAgQIyADQQE6AD8gAyACOgA+QQAhBCADKAI8IQoDQCAEIAlGDQEgACAEQQJ0aiAKNgEAIARBAWohBAwAAAsAC0EAIQkDQCAGIAlGRQRAIAMgBSAJQQF0aiIKLQABIgtBAnRqIgwoAgAhBCADQTxqIAotAABBCHQgCGpB//8DcRAjIANBAjoAPyADIAcgC2siCiACajoAPiAEQQEgASAKa3RqIQogAygCPCELA0AgACAEQQJ0aiALNgEAIARBAWoiBCAKSQ0ACyAMIAo2AgAgCUEBaiEJDAELCyADQUBrJAALowIBCX8jAEHQAGsiCSQAIAlBEGogBUE0EAsaIAcgBmshDyAHIAFrIRADQAJAIAMgCkcEQEEBIAEgByACIApBAXRqIgYtAAEiDGsiCGsiC3QhDSAGLQAAIQ4gCUEQaiAMQQJ0aiIMKAIAIQYgCyAPTwRAIAAgBkECdGogCyAIIAUgCEE0bGogCCAQaiIIQQEgCEEBShsiCCACIAQgCEECdGooAgAiCEEBdGogAyAIayAHIA4QYyAGIA1qIQgMAgsgCUEMaiAOECMgCUEBOgAPIAkgCDoADiAGIA1qIQggCSgCDCELA0AgBiAITw0CIAAgBkECdGogCzYBACAGQQFqIQYMAAALAAsgCUHQAGokAA8LIAwgCDYCACAKQQFqIQoMAAALAAs0ACAAIAMgBCAFEDYiBRADBEAgBQ8LIAUgBEkEfyABIAIgAyAFaiAEIAVrIAAQNAVBuH8LCyMAIAA/AEEQdGtB//8DakEQdkAAQX9GBEBBAA8LQQAQAEEBCzsBAX8gAgRAA0AgACABIAJBgCAgAkGAIEkbIgMQCyEAIAFBgCBqIQEgAEGAIGohACACIANrIgINAAsLCwYAIAAQAwsLqBUJAEGICAsNAQAAAAEAAAACAAAAAgBBoAgLswYBAAAAAQAAAAIAAAACAAAAJgAAAIIAAAAhBQAASgAAAGcIAAAmAAAAwAEAAIAAAABJBQAASgAAAL4IAAApAAAALAIAAIAAAABJBQAASgAAAL4IAAAvAAAAygIAAIAAAACKBQAASgAAAIQJAAA1AAAAcwMAAIAAAACdBQAASgAAAKAJAAA9AAAAgQMAAIAAAADrBQAASwAAAD4KAABEAAAAngMAAIAAAABNBgAASwAAAKoKAABLAAAAswMAAIAAAADBBgAATQAAAB8NAABNAAAAUwQAAIAAAAAjCAAAUQAAAKYPAABUAAAAmQQAAIAAAABLCQAAVwAAALESAABYAAAA2gQAAIAAAABvCQAAXQAAACMUAABUAAAARQUAAIAAAABUCgAAagAAAIwUAABqAAAArwUAAIAAAAB2CQAAfAAAAE4QAAB8AAAA0gIAAIAAAABjBwAAkQAAAJAHAACSAAAAAAAAAAEAAAABAAAABQAAAA0AAAAdAAAAPQAAAH0AAAD9AAAA/QEAAP0DAAD9BwAA/Q8AAP0fAAD9PwAA/X8AAP3/AAD9/wEA/f8DAP3/BwD9/w8A/f8fAP3/PwD9/38A/f//AP3//wH9//8D/f//B/3//w/9//8f/f//P/3//38AAAAAAQAAAAIAAAADAAAABAAAAAUAAAAGAAAABwAAAAgAAAAJAAAACgAAAAsAAAAMAAAADQAAAA4AAAAPAAAAEAAAABEAAAASAAAAEwAAABQAAAAVAAAAFgAAABcAAAAYAAAAGQAAABoAAAAbAAAAHAAAAB0AAAAeAAAAHwAAAAMAAAAEAAAABQAAAAYAAAAHAAAACAAAAAkAAAAKAAAACwAAAAwAAAANAAAADgAAAA8AAAAQAAAAEQAAABIAAAATAAAAFAAAABUAAAAWAAAAFwAAABgAAAAZAAAAGgAAABsAAAAcAAAAHQAAAB4AAAAfAAAAIAAAACEAAAAiAAAAIwAAACUAAAAnAAAAKQAAACsAAAAvAAAAMwAAADsAAABDAAAAUwAAAGMAAACDAAAAAwEAAAMCAAADBAAAAwgAAAMQAAADIAAAA0AAAAOAAAADAAEAQeAPC1EBAAAAAQAAAAEAAAABAAAAAgAAAAIAAAADAAAAAwAAAAQAAAAEAAAABQAAAAcAAAAIAAAACQAAAAoAAAALAAAADAAAAA0AAAAOAAAADwAAABAAQcQQC4sBAQAAAAIAAAADAAAABAAAAAUAAAAGAAAABwAAAAgAAAAJAAAACgAAAAsAAAAMAAAADQAAAA4AAAAPAAAAEAAAABIAAAAUAAAAFgAAABgAAAAcAAAAIAAAACgAAAAwAAAAQAAAAIAAAAAAAQAAAAIAAAAEAAAACAAAABAAAAAgAAAAQAAAAIAAAAAAAQBBkBIL5gQBAAAAAQAAAAEAAAABAAAAAgAAAAIAAAADAAAAAwAAAAQAAAAGAAAABwAAAAgAAAAJAAAACgAAAAsAAAAMAAAADQAAAA4AAAAPAAAAEAAAAAEAAAAEAAAACAAAAAAAAAABAAEBBgAAAAAAAAQAAAAAEAAABAAAAAAgAAAFAQAAAAAAAAUDAAAAAAAABQQAAAAAAAAFBgAAAAAAAAUHAAAAAAAABQkAAAAAAAAFCgAAAAAAAAUMAAAAAAAABg4AAAAAAAEFEAAAAAAAAQUUAAAAAAABBRYAAAAAAAIFHAAAAAAAAwUgAAAAAAAEBTAAAAAgAAYFQAAAAAAABwWAAAAAAAAIBgABAAAAAAoGAAQAAAAADAYAEAAAIAAABAAAAAAAAAAEAQAAAAAAAAUCAAAAIAAABQQAAAAAAAAFBQAAACAAAAUHAAAAAAAABQgAAAAgAAAFCgAAAAAAAAULAAAAAAAABg0AAAAgAAEFEAAAAAAAAQUSAAAAIAABBRYAAAAAAAIFGAAAACAAAwUgAAAAAAADBSgAAAAAAAYEQAAAABAABgRAAAAAIAAHBYAAAAAAAAkGAAIAAAAACwYACAAAMAAABAAAAAAQAAAEAQAAACAAAAUCAAAAIAAABQMAAAAgAAAFBQAAACAAAAUGAAAAIAAABQgAAAAgAAAFCQAAACAAAAULAAAAIAAABQwAAAAAAAAGDwAAACAAAQUSAAAAIAABBRQAAAAgAAIFGAAAACAAAgUcAAAAIAADBSgAAAAgAAQFMAAAAAAAEAYAAAEAAAAPBgCAAAAAAA4GAEAAAAAADQYAIABBgBcLhwIBAAEBBQAAAAAAAAUAAAAAAAAGBD0AAAAAAAkF/QEAAAAADwX9fwAAAAAVBf3/HwAAAAMFBQAAAAAABwR9AAAAAAAMBf0PAAAAABIF/f8DAAAAFwX9/38AAAAFBR0AAAAAAAgE/QAAAAAADgX9PwAAAAAUBf3/DwAAAAIFAQAAABAABwR9AAAAAAALBf0HAAAAABEF/f8BAAAAFgX9/z8AAAAEBQ0AAAAQAAgE/QAAAAAADQX9HwAAAAATBf3/BwAAAAEFAQAAABAABgQ9AAAAAAAKBf0DAAAAABAF/f8AAAAAHAX9//8PAAAbBf3//wcAABoF/f//AwAAGQX9//8BAAAYBf3//wBBkBkLhgQBAAEBBgAAAAAAAAYDAAAAAAAABAQAAAAgAAAFBQAAAAAAAAUGAAAAAAAABQgAAAAAAAAFCQAAAAAAAAULAAAAAAAABg0AAAAAAAAGEAAAAAAAAAYTAAAAAAAABhYAAAAAAAAGGQAAAAAAAAYcAAAAAAAABh8AAAAAAAAGIgAAAAAAAQYlAAAAAAABBikAAAAAAAIGLwAAAAAAAwY7AAAAAAAEBlMAAAAAAAcGgwAAAAAACQYDAgAAEAAABAQAAAAAAAAEBQAAACAAAAUGAAAAAAAABQcAAAAgAAAFCQAAAAAAAAUKAAAAAAAABgwAAAAAAAAGDwAAAAAAAAYSAAAAAAAABhUAAAAAAAAGGAAAAAAAAAYbAAAAAAAABh4AAAAAAAAGIQAAAAAAAQYjAAAAAAABBicAAAAAAAIGKwAAAAAAAwYzAAAAAAAEBkMAAAAAAAUGYwAAAAAACAYDAQAAIAAABAQAAAAwAAAEBAAAABAAAAQFAAAAIAAABQcAAAAgAAAFCAAAACAAAAUKAAAAIAAABQsAAAAAAAAGDgAAAAAAAAYRAAAAAAAABhQAAAAAAAAGFwAAAAAAAAYaAAAAAAAABh0AAAAAAAAGIAAAAAAAEAYDAAEAAAAPBgOAAAAAAA4GA0AAAAAADQYDIAAAAAAMBgMQAAAAAAsGAwgAAAAACgYDBABBpB0L2QEBAAAAAwAAAAcAAAAPAAAAHwAAAD8AAAB/AAAA/wAAAP8BAAD/AwAA/wcAAP8PAAD/HwAA/z8AAP9/AAD//wAA//8BAP//AwD//wcA//8PAP//HwD//z8A//9/AP///wD///8B////A////wf///8P////H////z////9/AAAAAAEAAAACAAAABAAAAAAAAAACAAAABAAAAAgAAAAAAAAAAQAAAAIAAAABAAAABAAAAAQAAAAEAAAABAAAAAgAAAAIAAAACAAAAAcAAAAIAAAACQAAAAoAAAALAEGgIAsDwBBQ';
/**
* Loader for KTX 2.0 GPU Texture containers.
*
* KTX 2.0 is a container format for various GPU texture formats. The loader
* supports Basis Universal GPU textures, which can be quickly transcoded to
* a wide variety of GPU texture compression formats. While KTX 2.0 also allows
* other hardware-specific formats, this loader does not yet parse them.
*
* This loader parses the KTX 2.0 container and then relies on
* THREE.BasisTextureLoader to complete the transcoding process.
*
* References:
* - KTX: http://github.khronos.org/KTX-Specification/
* - DFD: https://www.khronos.org/registry/DataFormat/specs/1.3/dataformat.1.3.html#basicdescriptor
*/
class KTX2Loader extends CompressedTextureLoader {
constructor( manager ) {
super( manager );
this.basisLoader = new BasisTextureLoader( manager );
this.zstd = new ZSTDDecoder();
this.zstd.init();
if ( typeof MSC_TRANSCODER !== 'undefined' ) {
console.warn(
'THREE.KTX2Loader: Please update to latest "basis_transcoder".'
+ ' "msc_basis_transcoder" is no longer supported in three.js r125+.'
);
}
}
setTranscoderPath( path ) {
this.basisLoader.setTranscoderPath( path );
return this;
}
setWorkerLimit( path ) {
this.basisLoader.setWorkerLimit( path );
return this;
}
detectSupport( renderer ) {
this.basisLoader.detectSupport( renderer );
return this;
}
dispose() {
this.basisLoader.dispose();
return this;
}
load( url, onLoad, onProgress, onError ) {
var scope = this;
var texture = new CompressedTexture();
var bufferPending = new Promise( function ( resolve, reject ) {
new FileLoader( scope.manager )
.setPath( scope.path )
.setResponseType( 'arraybuffer' )
.load( url, resolve, onProgress, reject );
} );
bufferPending
.then( function ( buffer ) {
scope.parse( buffer, function ( _texture ) {
texture.copy( _texture );
texture.needsUpdate = true;
if ( onLoad ) onLoad( texture );
}, onError );
} )
.catch( onError );
return texture;
}
parse( buffer, onLoad, onError ) {
var scope = this;
var ktx = p( new Uint8Array( buffer ) );
if ( ktx.pixelDepth > 0 ) {
throw new Error( 'THREE.KTX2Loader: Only 2D textures are currently supported.' );
}
if ( ktx.layerCount > 1 ) {
throw new Error( 'THREE.KTX2Loader: Array textures are not currently supported.' );
}
if ( ktx.faceCount > 1 ) {
throw new Error( 'THREE.KTX2Loader: Cube textures are not currently supported.' );
}
var dfd = KTX2Utils.getBasicDFD( ktx );
KTX2Utils.createLevels( ktx, this.zstd ).then( function ( levels ) {
var basisFormat = dfd.colorModel === s.UASTC
? BasisTextureLoader.BasisFormat.UASTC_4x4
: BasisTextureLoader.BasisFormat.ETC1S;
var parseConfig = {
levels: levels,
width: ktx.pixelWidth,
height: ktx.pixelHeight,
basisFormat: basisFormat,
hasAlpha: KTX2Utils.getAlpha( ktx ),
};
if ( basisFormat === BasisTextureLoader.BasisFormat.ETC1S ) {
parseConfig.globalData = ktx.globalData;
}
return scope.basisLoader.parseInternalAsync( parseConfig );
} ).then( function ( texture ) {
texture.encoding = dfd.transferFunction === r.SRGB
? sRGBEncoding
: LinearEncoding;
texture.premultiplyAlpha = KTX2Utils.getPremultiplyAlpha( ktx );
onLoad( texture );
} ).catch( onError );
return this;
}
}
var KTX2Utils = {
createLevels: async function ( ktx, zstd ) {
if ( ktx.supercompressionScheme === n.ZSTD ) {
await zstd.init();
}
var levels = [];
var width = ktx.pixelWidth;
var height = ktx.pixelHeight;
for ( var levelIndex = 0; levelIndex < ktx.levels.length; levelIndex ++ ) {
var levelWidth = Math.max( 1, Math.floor( width / Math.pow( 2, levelIndex ) ) );
var levelHeight = Math.max( 1, Math.floor( height / Math.pow( 2, levelIndex ) ) );
var levelData = ktx.levels[ levelIndex ].levelData;
if ( ktx.supercompressionScheme === n.ZSTD ) {
levelData = zstd.decode( levelData, ktx.levels[ levelIndex ].uncompressedByteLength );
}
levels.push( {
index: levelIndex,
width: levelWidth,
height: levelHeight,
data: levelData,
} );
}
return levels;
},
getBasicDFD: function ( ktx ) {
// Basic Data Format Descriptor Block is always the first DFD.
return ktx.dataFormatDescriptor[ 0 ];
},
getAlpha: function ( ktx ) {
var dfd = this.getBasicDFD( ktx );
// UASTC
if ( dfd.colorModel === s.UASTC ) {
if ( ( dfd.samples[ 0 ].channelID & 0xF ) === f.RGBA ) {
return true;
}
return false;
}
// ETC1S
if ( dfd.samples.length === 2
&& ( dfd.samples[ 1 ].channelID & 0xF ) === l.AAA ) {
return true;
}
return false;
},
getPremultiplyAlpha: function ( ktx ) {
var dfd = this.getBasicDFD( ktx );
return !! ( dfd.flags & o.ALPHA_PREMULTIPLIED );
},
};
/* @license
* Copyright 2019 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the 'License');
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an 'AS IS' BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var _a$c, _b$b;
const $retainerCount = Symbol('retainerCount');
const $recentlyUsed = Symbol('recentlyUsed');
const $evict = Symbol('evict');
const $evictionThreshold = Symbol('evictionThreshold');
const $cache = Symbol('cache');
/**
* The CacheEvictionPolicy manages the lifecycle for items in a cache,
* evicting any items outside some threshold bounds in "recently used" order,
* if they are evictable.
*
* Items are considered cached as they are retained. When all retainers
* of an item release it, that item is considered evictable.
*/
class CacheEvictionPolicy {
constructor(cache, evictionThreshold = 5) {
this[_a$c] = new Map();
this[_b$b] = [];
this[$cache] = cache;
this[$evictionThreshold] = evictionThreshold;
}
/**
* The eviction threshold is the maximum number of items to hold
* in cache indefinitely. Items within the threshold (in recently
* used order) will continue to be cached even if they have zero
* retainers.
*/
set evictionThreshold(value) {
this[$evictionThreshold] = value;
this[$evict]();
}
get evictionThreshold() {
return this[$evictionThreshold];
}
/**
* A reference to the cache that operates under this policy
*/
get cache() {
return this[$cache];
}
/**
* Given an item key, returns the number of retainers of that item
*/
retainerCount(key) {
return this[$retainerCount].get(key) || 0;
}
/**
* Resets the internal tracking of cache item retainers. Use only in cases
* where it is certain that all retained cache items have been accounted for!
*/
reset() {
this[$retainerCount].clear();
this[$recentlyUsed] = [];
}
/**
* Mark a given cache item as retained, where the item is represented
* by its key. An item can have any number of retainers.
*/
retain(key) {
if (!this[$retainerCount].has(key)) {
this[$retainerCount].set(key, 0);
}
this[$retainerCount].set(key, this[$retainerCount].get(key) + 1);
const recentlyUsedIndex = this[$recentlyUsed].indexOf(key);
if (recentlyUsedIndex !== -1) {
this[$recentlyUsed].splice(recentlyUsedIndex, 1);
}
this[$recentlyUsed].unshift(key);
// Evict, in case retaining a new item pushed an evictable item beyond the
// eviction threshold
this[$evict]();
}
/**
* Mark a given cache item as released by one of its retainers, where the item
* is represented by its key. When all retainers of an item have released it,
* the item is considered evictable.
*/
release(key) {
if (this[$retainerCount].has(key)) {
this[$retainerCount].set(key, Math.max(this[$retainerCount].get(key) - 1, 0));
}
this[$evict]();
}
[(_a$c = $retainerCount, _b$b = $recentlyUsed, $evict)]() {
if (this[$recentlyUsed].length < this[$evictionThreshold]) {
return;
}
for (let i = this[$recentlyUsed].length - 1; i >= this[$evictionThreshold]; --i) {
const key = this[$recentlyUsed][i];
const retainerCount = this[$retainerCount].get(key);
if (retainerCount === 0) {
this[$cache].delete(key);
this[$recentlyUsed].splice(i, 1);
}
}
}
}
/* @license
* Copyright 2019 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the 'License');
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an 'AS IS' BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var _a$b, _b$a;
/**
* A helper to Promise-ify a Three.js GLTFLoader
*/
const loadWithLoader = (url, loader, progressCallback = () => { }) => {
const onProgress = (event) => {
const fraction = event.loaded / event.total;
progressCallback(Math.max(0, Math.min(1, isFinite(fraction) ? fraction : 1)));
};
return new Promise((resolve, reject) => {
loader.load(url, resolve, onProgress, reject);
});
};
const cache = new Map();
const preloaded = new Map();
let dracoDecoderLocation;
const dracoLoader = new DRACOLoader();
let ktx2TranscoderLocation;
const ktx2Loader = new KTX2Loader();
const $loader = Symbol('loader');
const $evictionPolicy = Symbol('evictionPolicy');
const $GLTFInstance = Symbol('GLTFInstance');
class CachingGLTFLoader extends EventDispatcher {
constructor(GLTFInstance) {
super();
this[_b$a] = new GLTFLoader();
this[$GLTFInstance] = GLTFInstance;
this[$loader].setDRACOLoader(dracoLoader);
this[$loader].setKTX2Loader(ktx2Loader);
}
static setDRACODecoderLocation(url) {
dracoDecoderLocation = url;
dracoLoader.setDecoderPath(url);
}
static getDRACODecoderLocation() {
return dracoDecoderLocation;
}
static setKTX2TranscoderLocation(url) {
ktx2TranscoderLocation = url;
ktx2Loader.setTranscoderPath(url);
}
static getKTX2TranscoderLocation() {
return ktx2TranscoderLocation;
}
static initializeKTX2Loader(renderer) {
ktx2Loader.detectSupport(renderer);
}
static get cache() {
return cache;
}
/** @nocollapse */
static clearCache() {
cache.forEach((_value, url) => {
this.delete(url);
});
this[$evictionPolicy].reset();
}
static has(url) {
return cache.has(url);
}
/** @nocollapse */
static async delete(url) {
if (!this.has(url)) {
return;
}
const gltfLoads = cache.get(url);
preloaded.delete(url);
cache.delete(url);
const gltf = await gltfLoads;
// Dispose of the cached glTF's materials and geometries:
gltf.dispose();
}
/**
* Returns true if the model that corresponds to the specified url is
* available in our local cache.
*/
static hasFinishedLoading(url) {
return !!preloaded.get(url);
}
get [(_a$b = $evictionPolicy, _b$a = $loader, $evictionPolicy)]() {
return this.constructor[$evictionPolicy];
}
/**
* Preloads a glTF, populating the cache. Returns a promise that resolves
* when the cache is populated.
*/
async preload(url, element, progressCallback = () => { }) {
this.dispatchEvent({ type: 'preload', element: element, src: url });
if (!cache.has(url)) {
const rawGLTFLoads = loadWithLoader(url, this[$loader], (progress) => {
progressCallback(progress * 0.8);
});
const GLTFInstance = this[$GLTFInstance];
const gltfInstanceLoads = rawGLTFLoads
.then((rawGLTF) => {
return GLTFInstance.prepare(rawGLTF);
})
.then((preparedGLTF) => {
progressCallback(0.9);
return new GLTFInstance(preparedGLTF);
});
cache.set(url, gltfInstanceLoads);
}
await cache.get(url);
preloaded.set(url, true);
if (progressCallback) {
progressCallback(1.0);
}
}
/**
* Loads a glTF from the specified url and resolves a unique clone of the
* glTF. If the glTF has already been loaded, makes a clone of the cached
* copy.
*/
async load(url, element, progressCallback = () => { }) {
await this.preload(url, element, progressCallback);
const gltf = await cache.get(url);
const clone = await gltf.clone();
this[$evictionPolicy].retain(url);
// Patch dispose so that we can properly account for instance use
// in the caching layer:
clone.dispose = (() => {
const originalDispose = clone.dispose;
let disposed = false;
return () => {
if (disposed) {
return;
}
disposed = true;
originalDispose.apply(clone);
this[$evictionPolicy].release(url);
};
})();
return clone;
}
}
CachingGLTFLoader[_a$b] = new CacheEvictionPolicy(CachingGLTFLoader);
class CSS2DObject extends Object3D {
constructor( element ) {
super();
this.element = element || document.createElement( 'div' );
this.element.style.position = 'absolute';
this.addEventListener( 'removed', function () {
this.traverse( function ( object ) {
if ( object.element instanceof Element && object.element.parentNode !== null ) {
object.element.parentNode.removeChild( object.element );
}
} );
} );
}
copy( source, recursive ) {
super.copy( source, recursive );
this.element = source.element.cloneNode( true );
return this;
}
}
CSS2DObject.prototype.isCSS2DObject = true;
//
const _vector = new Vector3();
const _viewMatrix = new Matrix4();
const _viewProjectionMatrix = new Matrix4();
const _a$a = new Vector3();
const _b$9 = new Vector3();
class CSS2DRenderer {
constructor() {
const _this = this;
let _width, _height;
let _widthHalf, _heightHalf;
const cache = {
objects: new WeakMap()
};
const domElement = document.createElement( 'div' );
domElement.style.overflow = 'hidden';
this.domElement = domElement;
this.getSize = function () {
return {
width: _width,
height: _height
};
};
this.render = function ( scene, camera ) {
if ( scene.autoUpdate === true ) scene.updateMatrixWorld();
if ( camera.parent === null ) camera.updateMatrixWorld();
_viewMatrix.copy( camera.matrixWorldInverse );
_viewProjectionMatrix.multiplyMatrices( camera.projectionMatrix, _viewMatrix );
renderObject( scene, scene, camera );
zOrder( scene );
};
this.setSize = function ( width, height ) {
_width = width;
_height = height;
_widthHalf = _width / 2;
_heightHalf = _height / 2;
domElement.style.width = width + 'px';
domElement.style.height = height + 'px';
};
function renderObject( object, scene, camera ) {
if ( object.isCSS2DObject ) {
object.onBeforeRender( _this, scene, camera );
_vector.setFromMatrixPosition( object.matrixWorld );
_vector.applyMatrix4( _viewProjectionMatrix );
const element = object.element;
if ( /apple/i.test( navigator.vendor ) ) {
// https://github.com/mrdoob/three.js/issues/21415
element.style.transform = 'translate(-50%,-50%) translate(' + Math.round( _vector.x * _widthHalf + _widthHalf ) + 'px,' + Math.round( - _vector.y * _heightHalf + _heightHalf ) + 'px)';
} else {
element.style.transform = 'translate(-50%,-50%) translate(' + ( _vector.x * _widthHalf + _widthHalf ) + 'px,' + ( - _vector.y * _heightHalf + _heightHalf ) + 'px)';
}
element.style.display = ( object.visible && _vector.z >= - 1 && _vector.z <= 1 ) ? '' : 'none';
const objectData = {
distanceToCameraSquared: getDistanceToSquared( camera, object )
};
cache.objects.set( object, objectData );
if ( element.parentNode !== domElement ) {
domElement.appendChild( element );
}
object.onAfterRender( _this, scene, camera );
}
for ( let i = 0, l = object.children.length; i < l; i ++ ) {
renderObject( object.children[ i ], scene, camera );
}
}
function getDistanceToSquared( object1, object2 ) {
_a$a.setFromMatrixPosition( object1.matrixWorld );
_b$9.setFromMatrixPosition( object2.matrixWorld );
return _a$a.distanceToSquared( _b$9 );
}
function filterAndFlatten( scene ) {
const result = [];
scene.traverse( function ( object ) {
if ( object.isCSS2DObject ) result.push( object );
} );
return result;
}
function zOrder( scene ) {
const sorted = filterAndFlatten( scene ).sort( function ( a, b ) {
const distanceA = cache.objects.get( a ).distanceToCameraSquared;
const distanceB = cache.objects.get( b ).distanceToCameraSquared;
return distanceA - distanceB;
} );
const zMax = sorted.length;
for ( let i = 0, l = sorted.length; i < l; i ++ ) {
sorted[ i ].element.style.zIndex = zMax - i;
}
}
}
}
/* @license
* Copyright 2019 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the 'License');
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an 'AS IS' BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
const deserializeUrl = (url) => (!!url && url !== 'null') ? toFullUrl(url) : null;
const assertIsArCandidate = () => {
if (IS_WEBXR_AR_CANDIDATE) {
return;
}
const missingApis = [];
if (!HAS_WEBXR_DEVICE_API) {
missingApis.push('WebXR Device API');
}
if (!HAS_WEBXR_HIT_TEST_API) {
missingApis.push('WebXR Hit Test API');
}
throw new Error(`The following APIs are required for AR, but are missing in this browser: ${missingApis.join(', ')}`);
};
/**
* Converts a partial URL string to a fully qualified URL string.
*
* @param {String} url
* @return {String}
*/
const toFullUrl = (partialUrl) => {
const url = new URL(partialUrl, window.location.toString());
return url.toString();
};
/**
* Returns a throttled version of a given function that is only invoked at most
* once within a given threshold of time in milliseconds.
*
* The throttled version of the function has a "flush" property that resets the
* threshold for cases when immediate invokation is desired.
*/
const throttle = (fn, ms) => {
let timer = null;
const throttled = (...args) => {
if (timer != null) {
return;
}
fn(...args);
timer = self.setTimeout(() => timer = null, ms);
};
throttled.flush = () => {
if (timer != null) {
self.clearTimeout(timer);
timer = null;
}
};
return throttled;
};
const debounce = (fn, ms) => {
let timer = null;
return (...args) => {
if (timer != null) {
self.clearTimeout(timer);
}
timer = self.setTimeout(() => {
timer = null;
fn(...args);
}, ms);
};
};
/**
* @param {Number} value
* @param {Number} lowerLimit
* @param {Number} upperLimit
* @return {Number} value clamped within lowerLimit..upperLimit
*/
const clamp = (value, lowerLimit, upperLimit) => Math.max(lowerLimit, Math.min(upperLimit, value));
// The DPR we use for a "capped" scenario (see resolveDpr below):
const CAPPED_DEVICE_PIXEL_RATIO = 1;
/**
* This helper analyzes the layout of the current page to decide if we should
* use the natural device pixel ratio, or a capped value.
*
* We cap DPR if there is no meta viewport (suggesting that user is not
* consciously specifying how to scale the viewport relative to the device
* screen size).
*
* The rationale is that this condition typically leads to a pathological
* outcome on mobile devices. When the window dimensions are scaled up on a
* device with a high DPR, we create a canvas that is much larger than
* appropriate to accomodate for the pixel density if we naively use the
* reported DPR.
*
* This value needs to be measured in real time, as device pixel ratio can
* change over time (e.g., when a user zooms the page). Also, in some cases
* (such as Firefox on Android), the window's innerWidth is initially reported
* as the same as the screen's availWidth but changes later.
*
* A user who specifies a meta viewport, thereby consciously creating scaling
* conditions where <model-viewer> is slow, will be encouraged to live their
* best life.
*/
const resolveDpr = (() => {
// If true, implies that the user is conscious of the viewport scaling
// relative to the device screen size.
const HAS_META_VIEWPORT_TAG = (() => {
const metas = document.head != null ?
Array.from(document.head.querySelectorAll('meta')) :
[];
for (const meta of metas) {
if (meta.name === 'viewport') {
return true;
}
}
return false;
})();
if (!HAS_META_VIEWPORT_TAG) {
console.warn('No <meta name="viewport"> detected; <model-viewer> will cap pixel density at 1.');
}
return () => HAS_META_VIEWPORT_TAG ? window.devicePixelRatio :
CAPPED_DEVICE_PIXEL_RATIO;
})();
/**
* Debug mode is enabled when one of the two following conditions is true:
*
* 1. A 'model-viewer-debug-mode' query parameter is present in the current
* search string
* 2. There is a global object ModelViewerElement with a debugMode property set
* to true
*/
const isDebugMode = (() => {
const debugQueryParameterName = 'model-viewer-debug-mode';
const debugQueryParameter = new RegExp(`[\?&]${debugQueryParameterName}(&|$)`);
return () => (self.ModelViewerElement &&
self.ModelViewerElement.debugMode) ||
(self.location && self.location.search &&
self.location.search.match(debugQueryParameter));
})();
/**
* Returns the first key in a Map in iteration order.
*
* NOTE(cdata): This is necessary because IE11 does not implement iterator
* methods of Map, and polymer-build does not polyfill these methods for
* compatibility and performance reasons. This helper proposes that it is
* a reasonable compromise to sacrifice a very small amount of runtime
* performance in IE11 for the sake of code clarity.
*/
const getFirstMapKey = (map) => {
if (map.keys != null) {
return map.keys().next().value || null;
}
let firstKey = null;
try {
map.forEach((_value, key, _map) => {
firstKey = key;
// Stop iterating the Map with forEach:
throw new Error();
});
}
catch (_error) {
}
return firstKey;
};
const timePasses = (ms = 0) => new Promise(resolve => setTimeout(resolve, ms));
/**
* @param {EventTarget|EventDispatcher} target
* @param {string} eventName
* @param {?Function} predicate
*/
const waitForEvent = (target, eventName, predicate = null) => new Promise(resolve => {
function handler(event) {
if (!predicate || predicate(event)) {
resolve(event);
target.removeEventListener(eventName, handler);
}
}
target.addEventListener(eventName, handler);
});
/* @license
* Copyright 2020 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the 'License');
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an 'AS IS' BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
const SETTLING_TIME = 10000; // plenty long enough
const MIN_DECAY_MILLISECONDS = 0.001;
const DECAY_MILLISECONDS = 50;
/**
* The Damper class is a generic second-order critically damped system that does
* one linear step of the desired length of time. The only parameter is
* DECAY_MILLISECONDS. This common parameter makes all states converge at the
* same rate regardless of scale. xNormalization is a number to provide the
* rough scale of x, such that NIL_SPEED clamping also happens at roughly the
* same convergence for all states.
*/
class Damper {
constructor(decayMilliseconds = DECAY_MILLISECONDS) {
this.velocity = 0;
this.naturalFrequency = 0;
this.setDecayTime(decayMilliseconds);
}
setDecayTime(decayMilliseconds) {
this.naturalFrequency =
1 / Math.max(MIN_DECAY_MILLISECONDS, decayMilliseconds);
}
update(x, xGoal, timeStepMilliseconds, xNormalization) {
const nilSpeed = 0.0002 * this.naturalFrequency;
if (x == null || xNormalization === 0) {
return xGoal;
}
if (x === xGoal && this.velocity === 0) {
return xGoal;
}
if (timeStepMilliseconds < 0) {
return x;
}
// Exact solution to a critically damped second-order system, where:
// acceleration = this.naturalFrequency * this.naturalFrequency * (xGoal
// - x) - 2 * this.naturalFrequency * this.velocity;
const deltaX = (x - xGoal);
const intermediateVelocity = this.velocity + this.naturalFrequency * deltaX;
const intermediateX = deltaX + timeStepMilliseconds * intermediateVelocity;
const decay = Math.exp(-this.naturalFrequency * timeStepMilliseconds);
const newVelocity = (intermediateVelocity - this.naturalFrequency * intermediateX) * decay;
const acceleration = -this.naturalFrequency * (newVelocity + intermediateVelocity * decay);
if (Math.abs(newVelocity) < nilSpeed * Math.abs(xNormalization) &&
acceleration * deltaX >= 0) {
// This ensures the controls settle and stop calling this function instead
// of asymptotically approaching their goal.
this.velocity = 0;
return xGoal;
}
else {
this.velocity = newVelocity;
return xGoal + intermediateX * decay;
}
}
}
/* @license
* Copyright 2019 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the 'License');
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an 'AS IS' BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
const numberNode = (value, unit) => ({ type: 'number', number: value, unit });
/**
* Given a string representing a comma-separated set of CSS-like expressions,
* parses and returns an array of ASTs that correspond to those expressions.
*
* Currently supported syntax includes:
*
* - functions (top-level and nested)
* - calc() arithmetic operators
* - numbers with units
* - hexidecimal-encoded colors in 3, 6 or 8 digit form
* - idents
*
* All syntax is intended to match the parsing rules and semantics of the actual
* CSS spec as closely as possible.
*
* @see https://www.w3.org/TR/CSS2/
* @see https://www.w3.org/TR/css-values-3/
*/
const parseExpressions = (() => {
const cache = {};
const MAX_PARSE_ITERATIONS = 1000; // Arbitrarily large
return (inputString) => {
const cacheKey = inputString;
if (cacheKey in cache) {
return cache[cacheKey];
}
const expressions = [];
let parseIterations = 0;
while (inputString) {
if (++parseIterations > MAX_PARSE_ITERATIONS) {
// Avoid a potentially infinite loop due to typos:
inputString = '';
break;
}
const expressionParseResult = parseExpression(inputString);
const expression = expressionParseResult.nodes[0];
if (expression == null || expression.terms.length === 0) {
break;
}
expressions.push(expression);
inputString = expressionParseResult.remainingInput;
}
return cache[cacheKey] = expressions;
};
})();
/**
* Parse a single expression. For the purposes of our supported syntax, an
* expression is the set of semantically meaningful terms that appear before the
* next comma, or between the parens of a function invokation.
*/
const parseExpression = (() => {
const IS_IDENT_RE = /^(\-\-|[a-z\u0240-\uffff])/i;
const IS_OPERATOR_RE = /^([\*\+\/]|[\-]\s)/i;
const IS_EXPRESSION_END_RE = /^[\),]/;
const FUNCTION_ARGUMENTS_FIRST_TOKEN = '(';
const HEX_FIRST_TOKEN = '#';
return (inputString) => {
const terms = [];
while (inputString.length) {
inputString = inputString.trim();
if (IS_EXPRESSION_END_RE.test(inputString)) {
break;
}
else if (inputString[0] === FUNCTION_ARGUMENTS_FIRST_TOKEN) {
const { nodes, remainingInput } = parseFunctionArguments(inputString);
inputString = remainingInput;
terms.push({
type: 'function',
name: { type: 'ident', value: 'calc' },
arguments: nodes
});
}
else if (IS_IDENT_RE.test(inputString)) {
const identParseResult = parseIdent(inputString);
const identNode = identParseResult.nodes[0];
inputString = identParseResult.remainingInput;
if (inputString[0] === FUNCTION_ARGUMENTS_FIRST_TOKEN) {
const { nodes, remainingInput } = parseFunctionArguments(inputString);
terms.push({ type: 'function', name: identNode, arguments: nodes });
inputString = remainingInput;
}
else {
terms.push(identNode);
}
}
else if (IS_OPERATOR_RE.test(inputString)) {
// Operators are always a single character, so just pluck them out:
terms.push({ type: 'operator', value: inputString[0] });
inputString = inputString.slice(1);
}
else {
const { nodes, remainingInput } = inputString[0] === HEX_FIRST_TOKEN ?
parseHex(inputString) :
parseNumber(inputString);
// The remaining string may not have had any meaningful content. Exit
// early if this is the case:
if (nodes.length === 0) {
break;
}
terms.push(nodes[0]);
inputString = remainingInput;
}
}
return { nodes: [{ type: 'expression', terms }], remainingInput: inputString };
};
})();
/**
* An ident is something like a function name or the keyword "auto".
*/
const parseIdent = (() => {
const NOT_IDENT_RE = /[^a-z^0-9^_^\-^\u0240-\uffff]/i;
return (inputString) => {
const match = inputString.match(NOT_IDENT_RE);
const ident = match == null ? inputString : inputString.substr(0, match.index);
const remainingInput = match == null ? '' : inputString.substr(match.index);
return { nodes: [{ type: 'ident', value: ident }], remainingInput };
};
})();
/**
* Parses a number. A number value can be expressed with an integer or
* non-integer syntax, and usually includes a unit (but does not strictly
* require one for our purposes).
*/
const parseNumber = (() => {
// @see https://www.w3.org/TR/css-syntax/#number-token-diagram
const VALUE_RE = /[\+\-]?(\d+[\.]\d+|\d+|[\.]\d+)([eE][\+\-]?\d+)?/;
const UNIT_RE = /^[a-z%]+/i;
const ALLOWED_UNITS = /^(m|mm|cm|rad|deg|[%])$/;
return (inputString) => {
const valueMatch = inputString.match(VALUE_RE);
const value = valueMatch == null ? '0' : valueMatch[0];
inputString = value == null ? inputString : inputString.slice(value.length);
const unitMatch = inputString.match(UNIT_RE);
let unit = unitMatch != null && unitMatch[0] !== '' ? unitMatch[0] : null;
const remainingInput = unitMatch == null ? inputString : inputString.slice(unit.length);
if (unit != null && !ALLOWED_UNITS.test(unit)) {
unit = null;
}
return {
nodes: [{
type: 'number',
number: parseFloat(value) || 0,
unit: unit
}],
remainingInput
};
};
})();
/**
* Parses a hexidecimal-encoded color in 3, 6 or 8 digit form.
*/
const parseHex = (() => {
// TODO(cdata): right now we don't actually enforce the number of digits
const HEX_RE = /^[a-f0-9]*/i;
return (inputString) => {
inputString = inputString.slice(1).trim();
const hexMatch = inputString.match(HEX_RE);
const nodes = hexMatch == null ? [] : [{ type: 'hex', value: hexMatch[0] }];
return {
nodes,
remainingInput: hexMatch == null ? inputString :
inputString.slice(hexMatch[0].length)
};
};
})();
/**
* Parses arguments passed to a function invokation (e.g., the expressions
* within a matched set of parens).
*/
const parseFunctionArguments = (inputString) => {
const expressionNodes = [];
// Consume the opening paren
inputString = inputString.slice(1).trim();
while (inputString.length) {
const expressionParseResult = parseExpression(inputString);
expressionNodes.push(expressionParseResult.nodes[0]);
inputString = expressionParseResult.remainingInput.trim();
if (inputString[0] === ',') {
inputString = inputString.slice(1).trim();
}
else if (inputString[0] === ')') {
// Consume the closing paren and stop parsing
inputString = inputString.slice(1);
break;
}
}
return { nodes: expressionNodes, remainingInput: inputString };
};
const $visitedTypes = Symbol('visitedTypes');
/**
* An ASTWalker walks an array of ASTs such as the type produced by
* parseExpressions and invokes a callback for a configured set of nodes that
* the user wishes to "visit" during the walk.
*/
class ASTWalker {
constructor(visitedTypes) {
this[$visitedTypes] = visitedTypes;
}
/**
* Walk the given set of ASTs, and invoke the provided callback for nodes that
* match the filtered set that the ASTWalker was constructed with.
*/
walk(ast, callback) {
const remaining = ast.slice();
while (remaining.length) {
const next = remaining.shift();
if (this[$visitedTypes].indexOf(next.type) > -1) {
callback(next);
}
switch (next.type) {
case 'expression':
remaining.unshift(...next.terms);
break;
case 'function':
remaining.unshift(next.name, ...next.arguments);
break;
}
}
}
}
const ZERO = Object.freeze({ type: 'number', number: 0, unit: null });
/* @license
* Copyright 2019 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the 'License');
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an 'AS IS' BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Ensures that a given number is expressed in radians. If the number is already
* in radians, does nothing. If the value is in degrees, converts it to radians.
* If the value has no specified unit, the unit is assumed to be radians. If the
* value is not in radians or degrees, the value is resolved as 0 radians.
*
* Also accepts a second argument that is a default value to use if the input
* numberNode number is NaN or Infinity.
*/
const degreesToRadians = (numberNode, fallbackRadianValue = 0) => {
let { number, unit } = numberNode;
if (!isFinite(number)) {
number = fallbackRadianValue;
unit = 'rad';
}
else if (numberNode.unit === 'rad' || numberNode.unit == null) {
return numberNode;
}
const valueIsDegrees = unit === 'deg' && number != null;
const value = valueIsDegrees ? number : 0;
const radians = value * Math.PI / 180;
return { type: 'number', number: radians, unit: 'rad' };
};
/**
* Converts a given length to meters. Currently supported input units are
* meters, centimeters and millimeters.
*
* Also accepts a second argument that is a default value to use if the input
* numberNode number is NaN or Infinity.
*/
const lengthToBaseMeters = (numberNode, fallbackMeterValue = 0) => {
let { number, unit } = numberNode;
if (!isFinite(number)) {
number = fallbackMeterValue;
unit = 'm';
}
else if (numberNode.unit === 'm') {
return numberNode;
}
let scale;
switch (unit) {
default:
scale = 1;
break;
case 'cm':
scale = 1 / 100;
break;
case 'mm':
scale = 1 / 1000;
break;
}
const value = scale * number;
return { type: 'number', number: value, unit: 'm' };
};
/**
* Normalizes the unit of a given input number so that it is expressed in a
* preferred unit. For length nodes, the return value will be expressed in
* meters. For angle nodes, the return value will be expressed in radians.
*
* Also takes a fallback number that is used when the number value is not a
* valid number or when the unit of the given number cannot be normalized.
*/
const normalizeUnit = (() => {
const identity = (node) => node;
const unitNormalizers = {
'rad': identity,
'deg': degreesToRadians,
'm': identity,
'mm': lengthToBaseMeters,
'cm': lengthToBaseMeters
};
return (node, fallback = ZERO) => {
let { number, unit } = node;
if (!isFinite(number)) {
number = fallback.number;
unit = fallback.unit;
}
if (unit == null) {
return node;
}
const normalize = unitNormalizers[unit];
if (normalize == null) {
return fallback;
}
return normalize(node);
};
})();
/* @license
* Copyright 2020 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the 'License');
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an 'AS IS' BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* The Hotspot object is a reference-counted slot. If decrement() returns true,
* it should be removed from the tree so it can be garbage-collected.
*/
class Hotspot extends CSS2DObject {
constructor(config) {
super(document.createElement('div'));
this.normal = new Vector3(0, 1, 0);
this.initialized = false;
this.referenceCount = 1;
this.pivot = document.createElement('div');
this.slot = document.createElement('slot');
this.element.classList.add('annotation-wrapper');
this.slot.name = config.name;
this.element.appendChild(this.pivot);
this.pivot.appendChild(this.slot);
this.updatePosition(config.position);
this.updateNormal(config.normal);
}
get facingCamera() {
return !this.element.classList.contains('hide');
}
/**
* Sets the hotspot to be in the highly visible foreground state.
*/
show() {
if (!this.facingCamera || !this.initialized) {
this.updateVisibility(true);
}
}
/**
* Sets the hotspot to be in the diminished background state.
*/
hide() {
if (this.facingCamera || !this.initialized) {
this.updateVisibility(false);
}
}
/**
* Call this when adding elements to the same slot to keep track.
*/
increment() {
this.referenceCount++;
}
/**
* Call this when removing elements from the slot; returns true when the slot
* is unused.
*/
decrement() {
if (this.referenceCount > 0) {
--this.referenceCount;
}
return this.referenceCount === 0;
}
/**
* Change the position of the hotspot to the input string, in the same format
* as the data-position attribute.
*/
updatePosition(position) {
if (position == null)
return;
const positionNodes = parseExpressions(position)[0].terms;
for (let i = 0; i < 3; ++i) {
this.position.setComponent(i, normalizeUnit(positionNodes[i]).number);
}
this.updateMatrixWorld();
}
/**
* Change the hotspot's normal to the input string, in the same format as the
* data-normal attribute.
*/
updateNormal(normal) {
if (normal == null)
return;
const normalNodes = parseExpressions(normal)[0].terms;
for (let i = 0; i < 3; ++i) {
this.normal.setComponent(i, normalizeUnit(normalNodes[i]).number);
}
}
orient(radians) {
this.pivot.style.transform = `rotate(${radians}rad)`;
}
updateVisibility(show) {
// NOTE: IE11 doesn't support a second arg for classList.toggle
if (show) {
this.element.classList.remove('hide');
}
else {
this.element.classList.add('hide');
}
// NOTE: ShadyDOM doesn't support slot.assignedElements, otherwise we could
// use that here.
this.slot.assignedNodes().forEach((node) => {
if (node.nodeType !== Node.ELEMENT_NODE) {
return;
}
const element = node;
// Visibility attribute can be configured per-node in the hotspot:
const visibilityAttribute = element.dataset.visibilityAttribute;
if (visibilityAttribute != null) {
const attributeName = `data-${visibilityAttribute}`;
// NOTE: IE11 doesn't support toggleAttribute
if (show) {
element.setAttribute(attributeName, '');
}
else {
element.removeAttribute(attributeName);
}
}
element.dispatchEvent(new CustomEvent('hotspot-visibility', {
detail: {
visible: show,
},
}));
});
this.initialized = true;
}
}
/* @license
* Copyright 2019 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the 'License');
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an 'AS IS' BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Performs a reduction across all the vertices of the input model and all its
* children. The supplied function takes the reduced value and a vertex and
* returns the newly reduced value. The value is initialized as zero.
*
* Adapted from Three.js, @see https://github.com/mrdoob/three.js/blob/7e0a78beb9317e580d7fa4da9b5b12be051c6feb/src/math/Box3.js#L241
*/
const reduceVertices = (model, func, initialValue) => {
let value = initialValue;
const vertex = new Vector3();
model.traverse((object) => {
let i, l;
object.updateWorldMatrix(false, false);
const geometry = object.geometry;
if (geometry !== undefined) {
if (geometry.isGeometry) {
const vertices = geometry.vertices;
for (i = 0, l = vertices.length; i < l; i++) {
vertex.copy(vertices[i]);
vertex.applyMatrix4(object.matrixWorld);
value = func(value, vertex);
}
}
else if (geometry.isBufferGeometry) {
const { position } = geometry.attributes;
if (position !== undefined) {
for (i = 0, l = position.count; i < l; i++) {
vertex.fromBufferAttribute(position, i)
.applyMatrix4(object.matrixWorld);
value = func(value, vertex);
}
}
}
}
});
return value;
};
/* @license
* Copyright 2019 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the 'License');
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an 'AS IS' BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Nothing within Offset of the bottom of the scene casts a shadow
// (this is to avoid having a baked-in shadow plane cast its own shadow).
const OFFSET = 0.002;
// The softness [0, 1] of the shadow is mapped to a resolution between
// 2^LOG_MAX_RESOLUTION and 2^LOG_MIN_RESOLUTION.
const LOG_MAX_RESOLUTION = 9;
const LOG_MIN_RESOLUTION = 6;
// Animated models are not in general contained in their bounding box, as this
// is calculated only for their resting pose. We create a cubic shadow volume
// for animated models sized to their largest bounding box dimesion multiplied
// by this scale factor.
const ANIMATION_SCALING = 2;
/**
* The Shadow class creates a shadow that fits a given scene and follows a
* target. This shadow will follow the scene without any updates needed so long
* as the shadow and scene are both parented to the same object (call it the
* scene) and this scene is passed as the target parameter to the shadow's
* constructor. We also must constrain the scene to motion within the horizontal
* plane and call the setRotation() method whenever the scene's Y-axis rotation
* changes. For motion outside of the horizontal plane, this.needsUpdate must be
* set to true.
*
* The softness of the shadow is controlled by changing its resolution, making
* softer shadows faster, but less precise.
*/
class Shadow extends DirectionalLight {
constructor(scene, softness, side) {
super();
this.shadowMaterial = new ShadowMaterial;
this.boundingBox = new Box3;
this.size = new Vector3;
this.shadowScale = 1;
this.isAnimated = false;
this.side = 'bottom';
this.needsUpdate = false;
// We use the light only to cast a shadow, not to light the scene.
this.intensity = 0;
this.castShadow = true;
this.frustumCulled = false;
this.floor = new Mesh(new PlaneGeometry, this.shadowMaterial);
this.floor.rotateX(-Math.PI / 2);
this.floor.receiveShadow = true;
this.floor.castShadow = false;
this.floor.frustumCulled = false;
this.add(this.floor);
scene.target.add(this);
this.target = scene.target;
this.setScene(scene, softness, side);
}
/**
* Update the shadow's size and position for a new scene. Softness is also
* needed, as this controls the shadow's resolution.
*/
setScene(scene, softness, side) {
this.side = side;
this.isAnimated = scene.animationNames.length > 0;
this.boundingBox.copy(scene.boundingBox);
this.size.copy(scene.size);
if (this.side === 'back') {
const { min, max } = this.boundingBox;
[min.y, min.z] = [min.z, min.y];
[max.y, max.z] = [max.z, max.y];
[this.size.y, this.size.z] = [this.size.z, this.size.y];
this.rotation.x = Math.PI / 2;
this.rotation.y = Math.PI;
}
const { boundingBox, size } = this;
if (this.isAnimated) {
const maxDimension = Math.max(size.x, size.y, size.z) * ANIMATION_SCALING;
size.y = maxDimension;
boundingBox.expandByVector(size.subScalar(maxDimension).multiplyScalar(-0.5));
boundingBox.max.y = boundingBox.min.y + maxDimension;
size.set(maxDimension, maxDimension, maxDimension);
}
boundingBox.getCenter(this.floor.position);
const shadowOffset = boundingBox.max.y + size.y * OFFSET;
if (side === 'bottom') {
this.position.y = shadowOffset;
this.shadow.camera.up.set(0, 0, 1);
}
else {
this.position.y = 0;
this.position.z = shadowOffset;
this.shadow.camera.up.set(0, 1, 0);
}
this.setSoftness(softness);
}
/**
* Update the shadow's resolution based on softness (between 0 and 1). Should
* not be called frequently, as this results in reallocation.
*/
setSoftness(softness) {
const resolution = Math.pow(2, LOG_MAX_RESOLUTION -
softness * (LOG_MAX_RESOLUTION - LOG_MIN_RESOLUTION));
this.setMapSize(resolution);
}
/**
* Lower-level version of the above function.
*/
setMapSize(maxMapSize) {
const { camera, mapSize, map } = this.shadow;
const { size, boundingBox } = this;
if (map != null) {
map.dispose();
this.shadow.map = null;
}
if (this.isAnimated) {
maxMapSize *= ANIMATION_SCALING;
}
const width = Math.floor(size.x > size.z ? maxMapSize : maxMapSize * size.x / size.z);
const height = Math.floor(size.x > size.z ? maxMapSize * size.z / size.x : maxMapSize);
mapSize.set(width, height);
// These pads account for the softening radius around the shadow.
const widthPad = 2.5 * size.x / width;
const heightPad = 2.5 * size.z / height;
camera.left = -boundingBox.max.x - widthPad;
camera.right = -boundingBox.min.x + widthPad;
camera.bottom = boundingBox.min.z - heightPad;
camera.top = boundingBox.max.z + heightPad;
this.setScaleAndOffset(this.shadowScale, 0);
this.floor.scale.set(size.x + 2 * widthPad, size.z + 2 * heightPad, 1);
this.needsUpdate = true;
}
/**
* Set the shadow's intensity (0 to 1), which is just its opacity. Turns off
* shadow rendering if zero.
*/
setIntensity(intensity) {
this.shadowMaterial.opacity = intensity;
if (intensity > 0) {
this.visible = true;
this.floor.visible = true;
}
else {
this.visible = false;
this.floor.visible = false;
}
}
getIntensity() {
return this.shadowMaterial.opacity;
}
/**
* The shadow does not rotate with its parent transforms, so the rotation must
* be manually updated here if it rotates in world space. The input is its
* absolute orientation about the Y-axis (other rotations are not supported).
*/
setRotation(radiansY) {
if (this.side !== 'bottom') {
// We don't support rotation about a horizontal axis yet.
this.shadow.updateMatrices(this);
return;
}
this.shadow.camera.up.set(Math.sin(radiansY), 0, Math.cos(radiansY));
this.shadow.updateMatrices(this);
}
/**
* The scale is also not inherited from parents, so it must be set here in
* accordance with any transforms. An offset can also be specified to move the
* shadow vertically relative to the bottom of the scene. Positive is up, so
* values are generally negative.
*/
setScaleAndOffset(scale, offset) {
const sizeY = this.size.y;
const { camera } = this.shadow;
this.shadowScale = scale;
camera.near = 0;
camera.far = sizeY - offset / scale;
camera.updateProjectionMatrix();
camera.scale.setScalar(scale);
// Floor plane is up slightly from the bottom of the bounding box to avoid
// Z-fighting with baked-in shadows and to stay inside the shadow camera.
const shadowOffset = sizeY * OFFSET;
this.floor.position.y = 2 * shadowOffset - camera.far;
}
}
/* @license
* Copyright 2019 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the 'License');
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an 'AS IS' BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
const DEFAULT_FOV_DEG = 45;
const DEFAULT_HALF_FOV = (DEFAULT_FOV_DEG / 2) * Math.PI / 180;
const SAFE_RADIUS_RATIO = Math.sin(DEFAULT_HALF_FOV);
const DEFAULT_TAN_FOV = Math.tan(DEFAULT_HALF_FOV);
const view = new Vector3();
const target = new Vector3();
const normalWorld = new Vector3();
const raycaster = new Raycaster();
const vector3$1 = new Vector3();
/**
* A THREE.Scene object that takes a Model and CanvasHTMLElement and
* constructs a framed scene based off of the canvas dimensions.
* Provides lights and cameras to be used in a renderer.
*/
class ModelScene extends Scene {
constructor({ canvas, element, width, height }) {
super();
this.context = null;
this.annotationRenderer = new CSS2DRenderer();
this.width = 1;
this.height = 1;
this.aspect = 1;
this.isDirty = false;
this.renderCount = 0;
this.externalRenderer = null;
// These default camera values are never used, as they are reset once the
// model is loaded and framing is computed.
this.camera = new PerspectiveCamera(45, 1, 0.1, 100);
this.url = null;
this.target = new Object3D();
this.modelContainer = new Object3D();
this.animationNames = [];
this.boundingBox = new Box3();
this.size = new Vector3();
this.idealCameraDistance = 0;
this.fieldOfViewAspect = 0;
this.framedFieldOfView = DEFAULT_FOV_DEG;
this.shadow = null;
this.shadowIntensity = 0;
this.shadowSoftness = 1;
this.exposure = 1;
this.canScale = true;
this.tightBounds = false;
this.goalTarget = new Vector3();
this.targetDamperX = new Damper();
this.targetDamperY = new Damper();
this.targetDamperZ = new Damper();
this._currentGLTF = null;
this.cancelPendingSourceChange = null;
this.animationsByName = new Map();
this.currentAnimationAction = null;
this.name = 'ModelScene';
this.element = element;
this.canvas = canvas;
// These default camera values are never used, as they are reset once the
// model is loaded and framing is computed.
this.camera = new PerspectiveCamera(45, 1, 0.1, 100);
this.camera.name = 'MainCamera';
this.add(this.target);
this.setSize(width, height);
this.target.name = 'Target';
this.modelContainer.name = 'ModelContainer';
this.target.add(this.modelContainer);
this.mixer = new AnimationMixer(this.modelContainer);
const { domElement } = this.annotationRenderer;
const { style } = domElement;
style.display = 'none';
style.pointerEvents = 'none';
style.position = 'absolute';
style.top = '0';
this.element.shadowRoot.querySelector('.default').appendChild(domElement);
}
/**
* Function to create the context lazily, as when there is only one
* <model-viewer> element, the renderer's 3D context can be displayed
* directly. This extra context is necessary to copy the renderings into when
* there are more than one.
*/
createContext() {
{
this.context = this.canvas.getContext('2d');
}
}
/**
* Pass in a THREE.Object3D to be controlled
* by this model.
*/
async setObject(model) {
this.reset();
this.modelContainer.add(model);
await this.setupScene();
}
/**
* Sets the model via URL.
*/
async setSource(url, progressCallback = () => { }) {
if (!url || url === this.url) {
progressCallback(1);
return;
}
this.reset();
this.url = url;
if (this.externalRenderer != null) {
const framingInfo = await this.externalRenderer.load(progressCallback);
this.idealCameraDistance = framingInfo.framedRadius / SAFE_RADIUS_RATIO;
this.fieldOfViewAspect = framingInfo.fieldOfViewAspect;
this.frameModel();
this.dispatchEvent({ type: 'model-load', url: this.url });
return;
}
// If we have pending work due to a previous source change in progress,
// cancel it so that we do not incur a race condition:
if (this.cancelPendingSourceChange != null) {
this.cancelPendingSourceChange();
this.cancelPendingSourceChange = null;
}
let gltf;
try {
gltf = await new Promise(async (resolve, reject) => {
this.cancelPendingSourceChange = () => reject();
try {
const result = await this.element[$renderer].loader.load(url, this.element, progressCallback);
resolve(result);
}
catch (error) {
reject(error);
}
});
}
catch (error) {
if (error == null) {
// Loading was cancelled, so silently return
return;
}
throw error;
}
this.reset();
this.url = url;
this._currentGLTF = gltf;
if (gltf != null) {
this.modelContainer.add(gltf.scene);
}
const { animations } = gltf;
const animationsByName = new Map();
const animationNames = [];
for (const animation of animations) {
animationsByName.set(animation.name, animation);
animationNames.push(animation.name);
}
this.animations = animations;
this.animationsByName = animationsByName;
this.animationNames = animationNames;
await this.setupScene();
}
async setupScene() {
this.updateBoundingBox();
let target = null;
if (this.tightBounds === true) {
await this.element.requestUpdate('cameraTarget');
target = this.getTarget();
}
this.updateFraming(target);
this.frameModel();
this.setShadowIntensity(this.shadowIntensity);
this.dispatchEvent({ type: 'model-load', url: this.url });
}
reset() {
this.url = null;
this.isDirty = true;
if (this.shadow != null) {
this.shadow.setIntensity(0);
}
const gltf = this._currentGLTF;
// Remove all current children
if (gltf != null) {
for (const child of this.modelContainer.children) {
this.modelContainer.remove(child);
}
gltf.dispose();
this._currentGLTF = null;
}
if (this.currentAnimationAction != null) {
this.currentAnimationAction.stop();
this.currentAnimationAction = null;
}
this.mixer.stopAllAction();
this.mixer.uncacheRoot(this);
}
get currentGLTF() {
return this._currentGLTF;
}
/**
* Updates the ModelScene for a new container size in CSS pixels.
*/
setSize(width, height) {
if (this.width === width && this.height === height) {
return;
}
this.width = Math.max(width, 1);
this.height = Math.max(height, 1);
this.annotationRenderer.setSize(width, height);
this.aspect = this.width / this.height;
this.frameModel();
if (this.externalRenderer != null) {
const dpr = resolveDpr();
this.externalRenderer.resize(width * dpr, height * dpr);
}
this.isDirty = true;
}
updateBoundingBox() {
this.target.remove(this.modelContainer);
if (this.tightBounds === true) {
const bound = (box, vertex) => {
return box.expandByPoint(vertex);
};
this.boundingBox = reduceVertices(this.modelContainer, bound, new Box3());
}
else {
this.boundingBox.setFromObject(this.modelContainer);
}
this.boundingBox.getSize(this.size);
this.target.add(this.modelContainer);
}
/**
* Calculates the idealCameraDistance and fieldOfViewAspect that allows the 3D
* object to be framed tightly in a 2D window of any aspect ratio without
* clipping at any camera orbit. The camera's center target point can be
* optionally specified. If no center is specified, it defaults to the center
* of the bounding box, which means asymmetric models will tend to be tight on
* one side instead of both. Proper choice of center can correct this.
*/
updateFraming(center = null) {
this.target.remove(this.modelContainer);
if (center == null) {
center = this.boundingBox.getCenter(new Vector3());
}
const radiusSquared = (value, vertex) => {
return Math.max(value, center.distanceToSquared(vertex));
};
const framedRadius = Math.sqrt(reduceVertices(this.modelContainer, radiusSquared, 0));
this.idealCameraDistance = framedRadius / SAFE_RADIUS_RATIO;
const horizontalFov = (value, vertex) => {
vertex.sub(center);
const radiusXZ = Math.sqrt(vertex.x * vertex.x + vertex.z * vertex.z);
return Math.max(value, radiusXZ / (this.idealCameraDistance - Math.abs(vertex.y)));
};
this.fieldOfViewAspect =
reduceVertices(this.modelContainer, horizontalFov, 0) / DEFAULT_TAN_FOV;
this.target.add(this.modelContainer);
}
/**
* Set's the framedFieldOfView based on the aspect ratio of the window in
* order to keep the model fully visible at any camera orientation.
*/
frameModel() {
const vertical = DEFAULT_TAN_FOV * Math.max(1, this.fieldOfViewAspect / this.aspect);
this.framedFieldOfView = 2 * Math.atan(vertical) * 180 / Math.PI;
}
/**
* Returns the size of the corresponding canvas element.
*/
getSize() {
return { width: this.width, height: this.height };
}
/**
* Sets the point in model coordinates the model should orbit/pivot around.
*/
setTarget(modelX, modelY, modelZ) {
this.goalTarget.set(-modelX, -modelY, -modelZ);
}
/**
* Set the decay time of, affects the speed of target transitions.
*/
setTargetDamperDecayTime(decayMilliseconds) {
this.targetDamperX.setDecayTime(decayMilliseconds);
this.targetDamperY.setDecayTime(decayMilliseconds);
this.targetDamperZ.setDecayTime(decayMilliseconds);
}
/**
* Gets the point in model coordinates the model should orbit/pivot around.
*/
getTarget() {
return vector3$1.copy(this.goalTarget).multiplyScalar(-1);
}
/**
* Shifts the model to the target point immediately instead of easing in.
*/
jumpToGoal() {
this.updateTarget(SETTLING_TIME);
}
/**
* This should be called every frame with the frame delta to cause the target
* to transition to its set point.
*/
updateTarget(delta) {
const goal = this.goalTarget;
const target = this.target.position;
if (!goal.equals(target)) {
const radius = this.idealCameraDistance;
let { x, y, z } = target;
x = this.targetDamperX.update(x, goal.x, delta, radius);
y = this.targetDamperY.update(y, goal.y, delta, radius);
z = this.targetDamperZ.update(z, goal.z, delta, radius);
this.target.position.set(x, y, z);
this.target.updateMatrixWorld();
this.setShadowRotation(this.yaw);
this.isDirty = true;
}
}
/**
* Yaw the +z (front) of the model toward the indicated world coordinates.
*/
pointTowards(worldX, worldZ) {
const { x, z } = this.position;
this.yaw = Math.atan2(worldX - x, worldZ - z);
}
/**
* Yaw is the scene's orientation about the y-axis, around the rotation
* center.
*/
set yaw(radiansY) {
this.rotation.y = radiansY;
this.updateMatrixWorld(true);
this.setShadowRotation(radiansY);
this.isDirty = true;
}
get yaw() {
return this.rotation.y;
}
set animationTime(value) {
this.mixer.setTime(value);
}
get animationTime() {
if (this.currentAnimationAction != null) {
return this.currentAnimationAction.time;
}
return 0;
}
get duration() {
if (this.currentAnimationAction != null &&
this.currentAnimationAction.getClip()) {
return this.currentAnimationAction.getClip().duration;
}
return 0;
}
get hasActiveAnimation() {
return this.currentAnimationAction != null;
}
/**
* Plays an animation if there are any associated with the current model.
* Accepts an optional string name of an animation to play. If no name is
* provided, or if no animation is found by the given name, always falls back
* to playing the first animation.
*/
playAnimation(name = null, crossfadeTime = 0) {
if (this._currentGLTF == null) {
return;
}
const { animations } = this;
if (animations == null || animations.length === 0) {
console.warn(`Cannot play animation (model does not have any animations)`);
return;
}
let animationClip = null;
if (name != null) {
animationClip = this.animationsByName.get(name);
}
if (animationClip == null) {
animationClip = animations[0];
}
try {
const { currentAnimationAction: lastAnimationAction } = this;
this.currentAnimationAction =
this.mixer.clipAction(animationClip, this).play();
this.currentAnimationAction.enabled = true;
if (lastAnimationAction != null &&
this.currentAnimationAction !== lastAnimationAction) {
this.currentAnimationAction.crossFadeFrom(lastAnimationAction, crossfadeTime, false);
}
}
catch (error) {
console.error(error);
}
}
stopAnimation() {
if (this.currentAnimationAction != null) {
this.currentAnimationAction.stop();
this.currentAnimationAction.reset();
this.currentAnimationAction = null;
}
this.mixer.stopAllAction();
}
updateAnimation(step) {
this.mixer.update(step);
}
/**
* Call if the object has been changed in such a way that the shadow's shape
* has changed (not a rotation about the Y axis).
*/
updateShadow() {
const shadow = this.shadow;
if (shadow != null) {
const side = this.element.arPlacement === 'wall' ? 'back' : 'bottom';
shadow.setScene(this, this.shadowSoftness, side);
}
}
/**
* Sets the shadow's intensity, lazily creating the shadow as necessary.
*/
setShadowIntensity(shadowIntensity) {
this.shadowIntensity = shadowIntensity;
if (this._currentGLTF == null) {
return;
}
let shadow = this.shadow;
const side = this.element.arPlacement === 'wall' ? 'back' : 'bottom';
if (shadow != null) {
shadow.setIntensity(shadowIntensity);
shadow.setScene(this, this.shadowSoftness, side);
}
else if (shadowIntensity > 0) {
shadow = new Shadow(this, this.shadowSoftness, side);
shadow.setIntensity(shadowIntensity);
this.shadow = shadow;
}
}
/**
* Sets the shadow's softness by mapping a [0, 1] softness parameter to the
* shadow's resolution. This involves reallocation, so it should not be
* changed frequently. Softer shadows are cheaper to render.
*/
setShadowSoftness(softness) {
this.shadowSoftness = softness;
const shadow = this.shadow;
if (shadow != null) {
shadow.setSoftness(softness);
}
}
/**
* The shadow must be rotated manually to match any global rotation applied to
* this model. The input is the global orientation about the Y axis.
*/
setShadowRotation(radiansY) {
const shadow = this.shadow;
if (shadow != null) {
shadow.setRotation(radiansY);
}
}
/**
* Call to check if the shadow needs an updated render; returns true if an
* update is needed and resets the state.
*/
isShadowDirty() {
const shadow = this.shadow;
if (shadow == null) {
return false;
}
else {
const { needsUpdate } = shadow;
shadow.needsUpdate = false;
return needsUpdate;
}
}
/**
* Shift the floor vertically from the bottom of the model's bounding box by
* offset (should generally be negative).
*/
setShadowScaleAndOffset(scale, offset) {
const shadow = this.shadow;
if (shadow != null) {
shadow.setScaleAndOffset(scale, offset);
}
}
/**
* This method returns the world position and model-space normal of the point
* on the mesh corresponding to the input pixel coordinates given relative to
* the model-viewer element. If the mesh is not hit, the result is null.
*/
positionAndNormalFromPoint(pixelPosition, object = this) {
raycaster.setFromCamera(pixelPosition, this.camera);
const hits = raycaster.intersectObject(object, true);
if (hits.length === 0) {
return null;
}
const hit = hits[0];
if (hit.face == null) {
return null;
}
hit.face.normal.applyNormalMatrix(new Matrix3().getNormalMatrix(hit.object.matrixWorld));
return { position: hit.point, normal: hit.face.normal };
}
/**
* The following methods are for operating on the set of Hotspot objects
* attached to the scene. These come from DOM elements, provided to slots by
* the Annotation Mixin.
*/
addHotspot(hotspot) {
this.target.add(hotspot);
// This happens automatically in render(), but we do it early so that
// the slots appear in the shadow DOM and the elements get attached,
// allowing us to dispatch events on them.
this.annotationRenderer.domElement.appendChild(hotspot.element);
}
removeHotspot(hotspot) {
this.target.remove(hotspot);
}
/**
* Helper method to apply a function to all hotspots.
*/
forHotspots(func) {
const { children } = this.target;
for (let i = 0, l = children.length; i < l; i++) {
const hotspot = children[i];
if (hotspot instanceof Hotspot) {
func(hotspot);
}
}
}
/**
* Update the CSS visibility of the hotspots based on whether their normals
* point toward the camera.
*/
updateHotspots(viewerPosition) {
this.forHotspots((hotspot) => {
view.copy(viewerPosition);
target.setFromMatrixPosition(hotspot.matrixWorld);
view.sub(target);
normalWorld.copy(hotspot.normal)
.transformDirection(this.target.matrixWorld);
if (view.dot(normalWorld) < 0) {
hotspot.hide();
}
else {
hotspot.show();
}
});
}
/**
* Rotate all hotspots to an absolute orientation given by the input number of
* radians. Zero returns them to upright.
*/
orientHotspots(radians) {
this.forHotspots((hotspot) => {
hotspot.orient(radians);
});
}
/**
* Set the rendering visibility of all hotspots. This is used to hide them
* during transitions and such.
*/
setHotspotsVisibility(visible) {
this.forHotspots((hotspot) => {
hotspot.visible = visible;
});
}
postRender() {
const { camera } = this;
if (this.isDirty) {
this.updateHotspots(camera.position);
this.annotationRenderer.domElement.style.display = '';
this.annotationRenderer.render(this, camera);
}
}
}
/**
* This class generates custom mipmaps for a roughness map by encoding the lost variation in the
* normal map mip levels as increased roughness in the corresponding roughness mip levels. This
* helps with rendering accuracy for MeshStandardMaterial, and also helps with anti-aliasing when
* using PMREM. If the normal map is larger than the roughness map, the roughness map will be
* enlarged to match the dimensions of the normal map.
*/
const _mipmapMaterial = _getMipmapMaterial();
const _mesh = new Mesh( new PlaneGeometry( 2, 2 ), _mipmapMaterial );
const _flatCamera = new OrthographicCamera( 0, 1, 0, 1, 0, 1 );
let _tempTarget = null;
let _renderer = null;
class RoughnessMipmapper {
constructor( renderer ) {
_renderer = renderer;
_renderer.compile( _mesh, _flatCamera );
}
generateMipmaps( material ) {
if ( 'roughnessMap' in material === false ) return;
const { roughnessMap, normalMap } = material;
if ( roughnessMap === null || normalMap === null || ! roughnessMap.generateMipmaps || material.userData.roughnessUpdated ) return;
material.userData.roughnessUpdated = true;
let width = Math.max( roughnessMap.image.width, normalMap.image.width );
let height = Math.max( roughnessMap.image.height, normalMap.image.height );
if ( ! MathUtils.isPowerOfTwo( width ) || ! MathUtils.isPowerOfTwo( height ) ) return;
const oldTarget = _renderer.getRenderTarget();
const autoClear = _renderer.autoClear;
_renderer.autoClear = false;
if ( _tempTarget === null || _tempTarget.width !== width || _tempTarget.height !== height ) {
if ( _tempTarget !== null ) _tempTarget.dispose();
_tempTarget = new WebGLRenderTarget( width, height, { depthBuffer: false } );
_tempTarget.scissorTest = true;
}
if ( width !== roughnessMap.image.width || height !== roughnessMap.image.height ) {
const params = {
wrapS: roughnessMap.wrapS,
wrapT: roughnessMap.wrapT,
magFilter: roughnessMap.magFilter,
minFilter: roughnessMap.minFilter,
depthBuffer: false
};
const newRoughnessTarget = new WebGLRenderTarget( width, height, params );
newRoughnessTarget.texture.generateMipmaps = true;
// Setting the render target causes the memory to be allocated.
_renderer.setRenderTarget( newRoughnessTarget );
material.roughnessMap = newRoughnessTarget.texture;
if ( material.metalnessMap == roughnessMap ) material.metalnessMap = material.roughnessMap;
if ( material.aoMap == roughnessMap ) material.aoMap = material.roughnessMap;
// Copy UV transform parameters
material.roughnessMap.offset.copy( roughnessMap.offset );
material.roughnessMap.repeat.copy( roughnessMap.repeat );
material.roughnessMap.center.copy( roughnessMap.center );
material.roughnessMap.rotation = roughnessMap.rotation;
material.roughnessMap.matrixAutoUpdate = roughnessMap.matrixAutoUpdate;
material.roughnessMap.matrix.copy( roughnessMap.matrix );
}
_mipmapMaterial.uniforms.roughnessMap.value = roughnessMap;
_mipmapMaterial.uniforms.normalMap.value = normalMap;
const position = new Vector2( 0, 0 );
const texelSize = _mipmapMaterial.uniforms.texelSize.value;
for ( let mip = 0; width >= 1 && height >= 1; ++ mip, width /= 2, height /= 2 ) {
// Rendering to a mip level is not allowed in webGL1. Instead we must set
// up a secondary texture to write the result to, then copy it back to the
// proper mipmap level.
texelSize.set( 1.0 / width, 1.0 / height );
if ( mip == 0 ) texelSize.set( 0.0, 0.0 );
_tempTarget.viewport.set( position.x, position.y, width, height );
_tempTarget.scissor.set( position.x, position.y, width, height );
_renderer.setRenderTarget( _tempTarget );
_renderer.render( _mesh, _flatCamera );
_renderer.copyFramebufferToTexture( position, material.roughnessMap, mip );
_mipmapMaterial.uniforms.roughnessMap.value = material.roughnessMap;
}
if ( roughnessMap !== material.roughnessMap ) roughnessMap.dispose();
_renderer.setRenderTarget( oldTarget );
_renderer.autoClear = autoClear;
}
dispose() {
_mipmapMaterial.dispose();
_mesh.geometry.dispose();
if ( _tempTarget != null ) _tempTarget.dispose();
}
}
function _getMipmapMaterial() {
const shaderMaterial = new RawShaderMaterial( {
uniforms: {
roughnessMap: { value: null },
normalMap: { value: null },
texelSize: { value: new Vector2( 1, 1 ) }
},
vertexShader: /* glsl */`
precision mediump float;
precision mediump int;
attribute vec3 position;
attribute vec2 uv;
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = vec4( position, 1.0 );
}
`,
fragmentShader: /* glsl */`
precision mediump float;
precision mediump int;
varying vec2 vUv;
uniform sampler2D roughnessMap;
uniform sampler2D normalMap;
uniform vec2 texelSize;
#define ENVMAP_TYPE_CUBE_UV
vec4 envMapTexelToLinear( vec4 a ) { return a; }
#include <cube_uv_reflection_fragment>
float roughnessToVariance( float roughness ) {
float variance = 0.0;
if ( roughness >= r1 ) {
variance = ( r0 - roughness ) * ( v1 - v0 ) / ( r0 - r1 ) + v0;
} else if ( roughness >= r4 ) {
variance = ( r1 - roughness ) * ( v4 - v1 ) / ( r1 - r4 ) + v1;
} else if ( roughness >= r5 ) {
variance = ( r4 - roughness ) * ( v5 - v4 ) / ( r4 - r5 ) + v4;
} else {
float roughness2 = roughness * roughness;
variance = 1.79 * roughness2 * roughness2;
}
return variance;
}
float varianceToRoughness( float variance ) {
float roughness = 0.0;
if ( variance >= v1 ) {
roughness = ( v0 - variance ) * ( r1 - r0 ) / ( v0 - v1 ) + r0;
} else if ( variance >= v4 ) {
roughness = ( v1 - variance ) * ( r4 - r1 ) / ( v1 - v4 ) + r1;
} else if ( variance >= v5 ) {
roughness = ( v4 - variance ) * ( r5 - r4 ) / ( v4 - v5 ) + r4;
} else {
roughness = pow( 0.559 * variance, 0.25 ); // 0.559 = 1.0 / 1.79
}
return roughness;
}
void main() {
gl_FragColor = texture2D( roughnessMap, vUv, - 1.0 );
if ( texelSize.x == 0.0 ) return;
float roughness = gl_FragColor.g;
float variance = roughnessToVariance( roughness );
vec3 avgNormal;
for ( float x = - 1.0; x < 2.0; x += 2.0 ) {
for ( float y = - 1.0; y < 2.0; y += 2.0 ) {
vec2 uv = vUv + vec2( x, y ) * 0.25 * texelSize;
avgNormal += normalize( texture2D( normalMap, uv, - 1.0 ).xyz - 0.5 );
}
}
variance += 1.0 - 0.25 * length( avgNormal );
gl_FragColor.g = varianceToRoughness( variance );
}
`,
blending: NoBlending,
depthTest: false,
depthWrite: false
} );
shaderMaterial.type = 'RoughnessMipmapper';
return shaderMaterial;
}
/* @license
* Copyright 2020 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the 'License');
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an 'AS IS' BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
const RADIUS = 0.2;
const LINE_WIDTH = 0.03;
const MAX_OPACITY = 0.75;
const SEGMENTS = 12;
const DELTA_PHI = Math.PI / (2 * SEGMENTS);
const vector2 = new Vector2();
/**
* Adds a quarter-annulus of vertices to the array, centered on cornerX,
* cornerY.
*/
const addCorner = (vertices, cornerX, cornerY) => {
let phi = cornerX > 0 ? (cornerY > 0 ? 0 : -Math.PI / 2) :
(cornerY > 0 ? Math.PI / 2 : Math.PI);
for (let i = 0; i <= SEGMENTS; ++i) {
vertices.push(cornerX + (RADIUS - LINE_WIDTH) * Math.cos(phi), cornerY + (RADIUS - LINE_WIDTH) * Math.sin(phi), 0, cornerX + RADIUS * Math.cos(phi), cornerY + RADIUS * Math.sin(phi), 0);
phi += DELTA_PHI;
}
};
/**
* This class is a set of two coincident planes. The first is just a cute box
* outline with rounded corners and damped opacity to indicate the floor extents
* of a scene. It is purposely larger than the scene's bounding box by RADIUS on
* all sides so that small scenes are still visible / selectable. Its center is
* actually carved out by vertices to ensure its fragment shader doesn't add
* much time.
*
* The child plane is a simple plane with the same extents for use in hit
* testing (translation is triggered when the touch hits the plane, rotation
* otherwise).
*/
class PlacementBox extends Mesh {
constructor(scene, side) {
const geometry = new BufferGeometry();
const triangles = [];
const vertices = [];
const { size, boundingBox } = scene;
const x = size.x / 2;
const y = (side === 'back' ? size.y : size.z) / 2;
addCorner(vertices, x, y);
addCorner(vertices, -x, y);
addCorner(vertices, -x, -y);
addCorner(vertices, x, -y);
const numVertices = vertices.length / 3;
for (let i = 0; i < numVertices - 2; i += 2) {
triangles.push(i, i + 1, i + 3, i, i + 3, i + 2);
}
const i = numVertices - 2;
triangles.push(i, i + 1, 1, i, 1, 0);
geometry.setAttribute('position', new Float32BufferAttribute(vertices, 3));
geometry.setIndex(triangles);
super(geometry);
this.side = side;
const material = this.material;
material.side = DoubleSide;
material.transparent = true;
material.opacity = 0;
this.goalOpacity = 0;
this.opacityDamper = new Damper();
this.hitPlane =
new Mesh(new PlaneGeometry(2 * (x + RADIUS), 2 * (y + RADIUS)));
this.hitPlane.visible = false;
this.add(this.hitPlane);
boundingBox.getCenter(this.position);
switch (side) {
case 'bottom':
this.rotateX(-Math.PI / 2);
this.shadowHeight = boundingBox.min.y;
this.position.y = this.shadowHeight;
break;
case 'back':
this.shadowHeight = boundingBox.min.z;
this.position.z = this.shadowHeight;
}
scene.target.add(this);
}
/**
* Get the world hit position if the touch coordinates hit the box, and null
* otherwise. Pass the scene in to get access to its raycaster.
*/
getHit(scene, screenX, screenY) {
vector2.set(screenX, -screenY);
this.hitPlane.visible = true;
const hitResult = scene.positionAndNormalFromPoint(vector2, this.hitPlane);
this.hitPlane.visible = false;
return hitResult == null ? null : hitResult.position;
}
/**
* Offset the height of the box relative to the bottom of the scene. Positive
* is up, so generally only negative values are used.
*/
set offsetHeight(offset) {
if (this.side === 'back') {
this.position.z = this.shadowHeight + offset;
}
else {
this.position.y = this.shadowHeight + offset;
}
}
get offsetHeight() {
if (this.side === 'back') {
return this.position.z - this.shadowHeight;
}
else {
return this.position.y - this.shadowHeight;
}
}
/**
* Set the box's visibility; it will fade in and out.
*/
set show(visible) {
this.goalOpacity = visible ? MAX_OPACITY : 0;
}
/**
* Call on each frame with the frame delta to fade the box.
*/
updateOpacity(delta) {
const material = this.material;
material.opacity =
this.opacityDamper.update(material.opacity, this.goalOpacity, delta, 1);
this.visible = material.opacity > 0;
}
/**
* Call this to clean up Three's cache when you remove the box.
*/
dispose() {
var _a;
const { geometry, material } = this.hitPlane;
geometry.dispose();
material.dispose();
this.geometry.dispose();
this.material.dispose();
(_a = this.parent) === null || _a === void 0 ? void 0 : _a.remove(this);
}
}
/* @license
* Copyright 2019 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the 'License');
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an 'AS IS' BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// number of initial null pose XRFrames allowed before we post not-tracking
const INIT_FRAMES = 30;
// AR shadow is not user-configurable. This is to pave the way for AR lighting
// estimation, which will be used once available in WebXR.
const AR_SHADOW_INTENSITY = 0.3;
const ROTATION_RATE = 1.5;
// Angle down (towards bottom of screen) from camera center ray to use for hit
// testing against the floor. This makes placement faster and more intuitive
// assuming the phone is in portrait mode. This seems to be a reasonable
// assumption for the start of the session and UI will lack landscape mode to
// encourage upright use.
const HIT_ANGLE_DEG = 20;
// Slow down the dampers for initial placement.
const INTRO_DECAY = 120;
const SCALE_SNAP_HIGH = 1.2;
const SCALE_SNAP_LOW = 1 / SCALE_SNAP_HIGH;
// For automatic dynamic viewport scaling, don't let the scale drop below this
// limit.
const MIN_VIEWPORT_SCALE = 0.25;
const ARStatus = {
NOT_PRESENTING: 'not-presenting',
SESSION_STARTED: 'session-started',
OBJECT_PLACED: 'object-placed',
FAILED: 'failed'
};
const ARTracking = {
TRACKING: 'tracking',
NOT_TRACKING: 'not-tracking'
};
const vector3 = new Vector3();
const matrix4 = new Matrix4();
const hitPosition = new Vector3();
class ARRenderer extends EventDispatcher {
constructor(renderer) {
super();
this.renderer = renderer;
this.currentSession = null;
this.placeOnWall = false;
this.cameraPosition = new Vector3();
this.placementBox = null;
this.lastTick = null;
this.turntableRotation = null;
this.oldShadowIntensity = null;
this.oldBackground = null;
this.frame = null;
this.initialHitSource = null;
this.transientHitTestSource = null;
this.inputSource = null;
this._presentedScene = null;
this.resolveCleanup = null;
this.exitWebXRButtonContainer = null;
this.overlay = null;
this.tracking = true;
this.frames = 0;
this.initialized = false;
this.projectionMatrix = new Matrix4();
this.projectionMatrixInverse = new Matrix4();
this.oldTarget = new Vector3();
this.placementComplete = false;
this.isTranslating = false;
this.isRotating = false;
this.isTwoFingering = false;
this.lastDragPosition = new Vector3();
this.firstRatio = 0;
this.lastAngle = 0;
this.goalPosition = new Vector3();
this.goalYaw = 0;
this.goalScale = 1;
this.xDamper = new Damper();
this.yDamper = new Damper();
this.zDamper = new Damper();
this.yawDamper = new Damper();
this.scaleDamper = new Damper();
this.onExitWebXRButtonContainerClick = () => this.stopPresenting();
this.onUpdateScene = () => {
if (this.placementBox != null && this.isPresenting) {
this.placementBox.dispose();
this.placementBox = new PlacementBox(this.presentedScene, this.placeOnWall ? 'back' : 'bottom');
}
};
this.onSelectStart = (event) => {
const hitSource = this.transientHitTestSource;
if (hitSource == null) {
return;
}
const fingers = this.frame.getHitTestResultsForTransientInput(hitSource);
const scene = this.presentedScene;
const box = this.placementBox;
if (fingers.length === 1) {
this.inputSource = event.inputSource;
const { axes } = this.inputSource.gamepad;
const hitPosition = box.getHit(this.presentedScene, axes[0], axes[1]);
box.show = true;
if (hitPosition != null) {
this.isTranslating = true;
this.lastDragPosition.copy(hitPosition);
}
else if (this.placeOnWall === false) {
this.isRotating = true;
this.lastAngle = axes[0] * ROTATION_RATE;
}
}
else if (fingers.length === 2) {
box.show = true;
this.isTwoFingering = true;
const { separation } = this.fingerPolar(fingers);
this.firstRatio = separation / scene.scale.x;
}
};
this.onSelectEnd = () => {
this.isTranslating = false;
this.isRotating = false;
this.isTwoFingering = false;
this.inputSource = null;
this.goalPosition.y +=
this.placementBox.offsetHeight * this.presentedScene.scale.x;
this.placementBox.show = false;
};
this.threeRenderer = renderer.threeRenderer;
this.threeRenderer.xr.enabled = true;
}
async resolveARSession() {
assertIsArCandidate();
const session = await navigator.xr.requestSession('immersive-ar', {
requiredFeatures: ['hit-test'],
optionalFeatures: ['dom-overlay'],
domOverlay: { root: this.overlay }
});
this.threeRenderer.xr.setReferenceSpaceType('local');
await this.threeRenderer.xr.setSession(session);
return session;
}
/**
* The currently presented scene, if any
*/
get presentedScene() {
return this._presentedScene;
}
/**
* Resolves to true if the renderer has detected all the necessary qualities
* to support presentation in AR.
*/
async supportsPresentation() {
try {
assertIsArCandidate();
return await navigator.xr.isSessionSupported('immersive-ar');
}
catch (error) {
console.warn('Request to present in WebXR denied:');
console.warn(error);
console.warn('Falling back to next ar-mode');
return false;
}
}
/**
* Present a scene in AR
*/
async present(scene) {
if (this.isPresenting) {
console.warn('Cannot present while a model is already presenting');
}
let waitForAnimationFrame = new Promise((resolve, _reject) => {
requestAnimationFrame(() => resolve());
});
scene.setHotspotsVisibility(false);
scene.isDirty = true;
// Render a frame to turn off the hotspots
await waitForAnimationFrame;
// This sets isPresenting to true
this._presentedScene = scene;
this.overlay = scene.element.shadowRoot.querySelector('div.default');
const currentSession = await this.resolveARSession();
currentSession.addEventListener('end', () => {
this.postSessionCleanup();
}, { once: true });
const exitButton = scene.element.shadowRoot.querySelector('.slot.exit-webxr-ar-button');
exitButton.classList.add('enabled');
exitButton.addEventListener('click', this.onExitWebXRButtonContainerClick);
this.exitWebXRButtonContainer = exitButton;
const viewerRefSpace = await currentSession.requestReferenceSpace('viewer');
this.tracking = true;
this.frames = 0;
this.initialized = false;
this.turntableRotation = scene.yaw;
this.goalYaw = scene.yaw;
this.goalScale = 1;
this.oldBackground = scene.background;
scene.background = null;
this.oldShadowIntensity = scene.shadowIntensity;
scene.setShadowIntensity(0);
this.oldTarget.copy(scene.getTarget());
scene.addEventListener('model-load', this.onUpdateScene);
const radians = HIT_ANGLE_DEG * Math.PI / 180;
const ray = this.placeOnWall === true ?
undefined :
new XRRay(new DOMPoint(0, 0, 0), { x: 0, y: -Math.sin(radians), z: -Math.cos(radians) });
currentSession.requestHitTestSource({ space: viewerRefSpace, offsetRay: ray })
.then(hitTestSource => {
this.initialHitSource = hitTestSource;
});
this.currentSession = currentSession;
this.placementBox =
new PlacementBox(scene, this.placeOnWall ? 'back' : 'bottom');
this.placementComplete = false;
this.xDamper.setDecayTime(INTRO_DECAY);
this.yDamper.setDecayTime(INTRO_DECAY);
this.zDamper.setDecayTime(INTRO_DECAY);
this.lastTick = performance.now();
this.dispatchEvent({ type: 'status', status: ARStatus.SESSION_STARTED });
}
/**
* If currently presenting a scene in AR, stops presentation and exits AR.
*/
async stopPresenting() {
if (!this.isPresenting) {
return;
}
const cleanupPromise = new Promise((resolve) => {
this.resolveCleanup = resolve;
});
try {
await this.currentSession.end();
await cleanupPromise;
}
catch (error) {
console.warn('Error while trying to end WebXR AR session');
console.warn(error);
this.postSessionCleanup();
}
}
/**
* True if a scene is currently in the process of being presented in AR
*/
get isPresenting() {
return this.presentedScene != null;
}
get target() {
return this.oldTarget;
}
updateTarget() {
const scene = this.presentedScene;
if (scene != null) {
const target = scene.getTarget();
this.oldTarget.copy(target);
if (this.placeOnWall) {
// Move the scene's target to the center of the back of the model's
// bounding box.
scene.setTarget(target.x, target.y, scene.boundingBox.min.z);
}
else {
// Move the scene's target to the model's floor height.
scene.setTarget(target.x, scene.boundingBox.min.y, target.z);
}
}
}
postSessionCleanup() {
const session = this.currentSession;
if (session != null) {
session.removeEventListener('selectstart', this.onSelectStart);
session.removeEventListener('selectend', this.onSelectEnd);
this.currentSession = null;
}
const scene = this.presentedScene;
if (scene != null) {
const { element } = scene;
scene.position.set(0, 0, 0);
scene.scale.set(1, 1, 1);
scene.setShadowScaleAndOffset(1, 0);
const yaw = this.turntableRotation;
if (yaw != null) {
scene.yaw = yaw;
}
const intensity = this.oldShadowIntensity;
if (intensity != null) {
scene.setShadowIntensity(intensity);
}
const background = this.oldBackground;
if (background != null) {
scene.background = background;
}
const point = this.oldTarget;
scene.setTarget(point.x, point.y, point.z);
scene.removeEventListener('model-load', this.onUpdateScene);
scene.orientHotspots(0);
element.requestUpdate('cameraTarget');
element.requestUpdate('maxCameraOrbit');
element[$onResize](element.getBoundingClientRect());
}
// Force the Renderer to update its size
this.renderer.height = 0;
const exitButton = this.exitWebXRButtonContainer;
if (exitButton != null) {
exitButton.classList.remove('enabled');
exitButton.removeEventListener('click', this.onExitWebXRButtonContainerClick);
this.exitWebXRButtonContainer = null;
}
const hitSource = this.transientHitTestSource;
if (hitSource != null) {
hitSource.cancel();
this.transientHitTestSource = null;
}
const hitSourceInitial = this.initialHitSource;
if (hitSourceInitial != null) {
hitSourceInitial.cancel();
this.initialHitSource = null;
}
if (this.placementBox != null) {
this.placementBox.dispose();
this.placementBox = null;
}
this.lastTick = null;
this.turntableRotation = null;
this.oldShadowIntensity = null;
this.oldBackground = null;
this._presentedScene = null;
this.frame = null;
this.inputSource = null;
this.overlay = null;
if (this.resolveCleanup != null) {
this.resolveCleanup();
}
this.dispatchEvent({ type: 'status', status: ARStatus.NOT_PRESENTING });
}
updateView(view) {
const viewMatrix = view.transform.matrix;
const scene = this.presentedScene;
const { camera } = scene;
camera.near = 0.1;
camera.far = 100;
this.presentedScene.orientHotspots(Math.atan2(viewMatrix[1], viewMatrix[5]));
this.cameraPosition.set(viewMatrix[12], viewMatrix[13], viewMatrix[14]);
if (!this.initialized) {
const { position, element } = scene;
const { width, height } = this.overlay.getBoundingClientRect();
scene.setSize(width, height);
if (this.threeRenderer.xr.getSession() != null) {
this.projectionMatrix.copy(this.threeRenderer.xr.getCamera(camera).projectionMatrix);
this.projectionMatrixInverse.copy(this.projectionMatrix).invert();
}
const { theta, radius } = element
.getCameraOrbit();
// Orient model to match the 3D camera view
const cameraDirection = vector3.set(viewMatrix[8], viewMatrix[9], viewMatrix[10]);
scene.yaw = Math.atan2(cameraDirection.x, cameraDirection.z) - theta;
this.goalYaw = scene.yaw;
position.copy(this.cameraPosition)
.add(cameraDirection.multiplyScalar(-1 * radius));
this.goalPosition.copy(position);
scene.setHotspotsVisibility(true);
this.initialized = true;
}
// Ensure the camera uses the AR projection matrix without inverting on
// every frame.
camera.projectionMatrix.copy(this.projectionMatrix);
camera.projectionMatrixInverse.copy(this.projectionMatrixInverse);
// Use automatic dynamic viewport scaling if supported.
if (view.requestViewportScale && view.recommendedViewportScale) {
const scale = view.recommendedViewportScale;
view.requestViewportScale(Math.max(scale, MIN_VIEWPORT_SCALE));
}
const layer = this.currentSession.renderState.baseLayer;
const viewport = layer.getViewport(view);
this.threeRenderer.setViewport(viewport.x, viewport.y, viewport.width, viewport.height);
}
placeInitially(frame) {
const hitSource = this.initialHitSource;
if (hitSource == null) {
return;
}
const hitTestResults = frame.getHitTestResults(hitSource);
if (hitTestResults.length == 0) {
return;
}
const hit = hitTestResults[0];
const hitPoint = this.getHitPoint(hit);
if (hitPoint == null) {
return;
}
this.placeModel(hitPoint);
hitSource.cancel();
this.initialHitSource = null;
const { session } = frame;
session.addEventListener('selectstart', this.onSelectStart);
session.addEventListener('selectend', this.onSelectEnd);
session
.requestHitTestSourceForTransientInput({ profile: 'generic-touchscreen' })
.then(hitTestSource => {
this.transientHitTestSource = hitTestSource;
});
}
getHitPoint(hitResult) {
const refSpace = this.threeRenderer.xr.getReferenceSpace();
const pose = hitResult.getPose(refSpace);
if (pose == null) {
return null;
}
const hitMatrix = matrix4.fromArray(pose.transform.matrix);
if (this.placeOnWall === true) {
// Orient the model to the wall's normal vector.
this.goalYaw = Math.atan2(hitMatrix.elements[4], hitMatrix.elements[6]);
}
// Check that the y-coordinate of the normal is large enough that the normal
// is pointing up for floor placement; opposite for wall placement.
return hitMatrix.elements[5] > 0.75 !== this.placeOnWall ?
hitPosition.setFromMatrixPosition(hitMatrix) :
null;
}
placeModel(hit) {
this.placementBox.show = true;
if (this.placeOnWall) {
this.goalPosition.copy(hit);
}
else {
this.goalPosition.y = hit.y;
}
this.updateTarget();
this.dispatchEvent({ type: 'status', status: ARStatus.OBJECT_PLACED });
}
fingerPolar(fingers) {
const fingerOne = fingers[0].inputSource.gamepad.axes;
const fingerTwo = fingers[1].inputSource.gamepad.axes;
const deltaX = fingerTwo[0] - fingerOne[0];
const deltaY = fingerTwo[1] - fingerOne[1];
const angle = Math.atan2(deltaY, deltaX);
let deltaYaw = this.lastAngle - angle;
if (deltaYaw > Math.PI) {
deltaYaw -= 2 * Math.PI;
}
else if (deltaYaw < -Math.PI) {
deltaYaw += 2 * Math.PI;
}
this.lastAngle = angle;
return {
separation: Math.sqrt(deltaX * deltaX + deltaY * deltaY),
deltaYaw: deltaYaw
};
}
processInput(frame) {
const hitSource = this.transientHitTestSource;
if (hitSource == null) {
return;
}
if (!this.isTranslating && !this.isTwoFingering && !this.isRotating) {
return;
}
const fingers = frame.getHitTestResultsForTransientInput(hitSource);
const scene = this.presentedScene;
const scale = scene.scale.x;
// Rotating, translating and scaling are mutually exclusive operations; only
// one can happen at a time, but we can switch during a gesture.
if (this.isTwoFingering) {
if (fingers.length < 2) {
// If we lose the second finger, stop scaling (in fact, stop processing
// input altogether until a new gesture starts).
this.isTwoFingering = false;
}
else {
const { separation, deltaYaw } = this.fingerPolar(fingers);
if (this.placeOnWall === false) {
this.goalYaw += deltaYaw;
}
if (scene.canScale) {
const scale = separation / this.firstRatio;
this.goalScale =
(scale < SCALE_SNAP_HIGH && scale > SCALE_SNAP_LOW) ? 1 : scale;
}
}
return;
}
else if (fingers.length === 2) {
// If we were rotating or translating and we get a second finger, switch
// to scaling instead.
this.isTranslating = false;
this.isRotating = false;
this.isTwoFingering = true;
const { separation } = this.fingerPolar(fingers);
this.firstRatio = separation / scale;
return;
}
if (this.isRotating) {
const angle = this.inputSource.gamepad.axes[0] * ROTATION_RATE;
this.goalYaw += angle - this.lastAngle;
this.lastAngle = angle;
}
else if (this.isTranslating) {
fingers.forEach(finger => {
if (finger.inputSource !== this.inputSource ||
finger.results.length < 1) {
return;
}
const hit = this.getHitPoint(finger.results[0]);
if (hit == null) {
return;
}
this.goalPosition.sub(this.lastDragPosition);
if (this.placeOnWall === false) {
const offset = hit.y - this.lastDragPosition.y;
// When a lower floor is found, keep the model at the same height, but
// drop the placement box to the floor. The model falls on select end.
if (offset < 0) {
this.placementBox.offsetHeight = offset / scale;
this.presentedScene.setShadowScaleAndOffset(scale, offset);
// Interpolate hit ray up to drag plane
const cameraPosition = vector3.copy(this.cameraPosition);
const alpha = -offset / (cameraPosition.y - hit.y);
cameraPosition.multiplyScalar(alpha);
hit.multiplyScalar(1 - alpha).add(cameraPosition);
}
}
this.goalPosition.add(hit);
this.lastDragPosition.copy(hit);
});
}
}
moveScene(delta) {
const scene = this.presentedScene;
const { position, yaw, idealCameraDistance: radius } = scene;
const goal = this.goalPosition;
const oldScale = scene.scale.x;
const box = this.placementBox;
if (this.initialHitSource == null &&
(!goal.equals(position) || this.goalScale !== oldScale)) {
let { x, y, z } = position;
x = this.xDamper.update(x, goal.x, delta, radius);
y = this.yDamper.update(y, goal.y, delta, radius);
z = this.zDamper.update(z, goal.z, delta, radius);
position.set(x, y, z);
const newScale = this.scaleDamper.update(oldScale, this.goalScale, delta, 1);
scene.scale.set(newScale, newScale, newScale);
if (!this.isTranslating) {
const offset = goal.y - y;
if (this.placementComplete && this.placeOnWall === false) {
box.offsetHeight = offset / newScale;
scene.setShadowScaleAndOffset(newScale, offset);
}
else if (offset === 0) {
this.placementComplete = true;
box.show = false;
scene.setShadowIntensity(AR_SHADOW_INTENSITY);
this.xDamper.setDecayTime(DECAY_MILLISECONDS);
this.yDamper.setDecayTime(DECAY_MILLISECONDS);
this.zDamper.setDecayTime(DECAY_MILLISECONDS);
}
}
}
box.updateOpacity(delta);
scene.updateTarget(delta);
// yaw must be updated last, since this also updates the shadow position.
scene.yaw = this.yawDamper.update(yaw, this.goalYaw, delta, Math.PI);
}
/**
* Only public to make it testable.
*/
onWebXRFrame(time, frame) {
this.frame = frame;
++this.frames;
const refSpace = this.threeRenderer.xr.getReferenceSpace();
const pose = frame.getViewerPose(refSpace);
if (pose == null && this.tracking === true && this.frames > INIT_FRAMES) {
this.tracking = false;
this.dispatchEvent({ type: 'tracking', status: ARTracking.NOT_TRACKING });
}
const scene = this.presentedScene;
if (pose == null || scene == null || !scene.element[$sceneIsReady]()) {
this.threeRenderer.clear();
return;
}
if (this.tracking === false) {
this.tracking = true;
this.dispatchEvent({ type: 'tracking', status: ARTracking.TRACKING });
}
// WebXR may return multiple views, i.e. for headset AR. This
// isn't really supported at this point, but make a best-effort
// attempt to render other views also, using the first view
// as the main viewpoint.
let isFirstView = true;
for (const view of pose.views) {
this.updateView(view);
if (isFirstView) {
this.placeInitially(frame);
this.processInput(frame);
const delta = time - this.lastTick;
this.moveScene(delta);
this.renderer.preRender(scene, time, delta);
this.lastTick = time;
}
// TODO: This is a workaround for a Chrome bug, which should be fixed
// soon: https://bugs.chromium.org/p/chromium/issues/detail?id=1184085
const gl = this.threeRenderer.getContext();
gl.depthMask(false);
gl.clear(gl.DEPTH_BUFFER_BIT);
gl.depthMask(true);
this.threeRenderer.render(scene, scene.camera);
isFirstView = false;
}
}
}
/* @license
* Copyright 2019 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the 'License');
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an 'AS IS' BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* This Debugger exposes internal details of the <model-viewer> rendering
* substructure so that external tools can more easily inspect and operate on
* them.
*
* It also activates shader debugging on the associated GL context. Shader
* debugging trades performance for useful error information, so it is not
* recommended to activate this unless needed.
*/
class Debugger {
constructor(renderer) {
// Force WebGL shader debugging on:
renderer.threeRenderer.debug = { checkShaderErrors: true };
// Announce debug details at microtask timing to give the `Renderer`
// constructor time to complete its initialization, just to be on the safe
// side:
Promise.resolve().then(() => {
self.dispatchEvent(new CustomEvent('model-viewer-renderer-debug', {
detail: {
renderer,
THREE: {
ShaderMaterial,
Texture: Texture$1,
Mesh,
Scene,
PlaneBufferGeometry: PlaneGeometry,
OrthographicCamera,
WebGLRenderTarget
}
}
}));
});
}
addScene(scene) {
self.dispatchEvent(new CustomEvent('model-viewer-scene-added-debug', { detail: { scene } }));
}
removeScene(scene) {
self.dispatchEvent(new CustomEvent('model-viewer-scene-removed-debug', { detail: { scene } }));
}
}
class SkeletonUtils {
static retarget( target, source, options = {} ) {
const pos = new Vector3(),
quat = new Quaternion(),
scale = new Vector3(),
bindBoneMatrix = new Matrix4(),
relativeMatrix = new Matrix4(),
globalMatrix = new Matrix4();
options.preserveMatrix = options.preserveMatrix !== undefined ? options.preserveMatrix : true;
options.preservePosition = options.preservePosition !== undefined ? options.preservePosition : true;
options.preserveHipPosition = options.preserveHipPosition !== undefined ? options.preserveHipPosition : false;
options.useTargetMatrix = options.useTargetMatrix !== undefined ? options.useTargetMatrix : false;
options.hip = options.hip !== undefined ? options.hip : 'hip';
options.names = options.names || {};
const sourceBones = source.isObject3D ? source.skeleton.bones : this.getBones( source ),
bones = target.isObject3D ? target.skeleton.bones : this.getBones( target );
let bindBones,
bone, name, boneTo,
bonesPosition;
// reset bones
if ( target.isObject3D ) {
target.skeleton.pose();
} else {
options.useTargetMatrix = true;
options.preserveMatrix = false;
}
if ( options.preservePosition ) {
bonesPosition = [];
for ( let i = 0; i < bones.length; i ++ ) {
bonesPosition.push( bones[ i ].position.clone() );
}
}
if ( options.preserveMatrix ) {
// reset matrix
target.updateMatrixWorld();
target.matrixWorld.identity();
// reset children matrix
for ( let i = 0; i < target.children.length; ++ i ) {
target.children[ i ].updateMatrixWorld( true );
}
}
if ( options.offsets ) {
bindBones = [];
for ( let i = 0; i < bones.length; ++ i ) {
bone = bones[ i ];
name = options.names[ bone.name ] || bone.name;
if ( options.offsets && options.offsets[ name ] ) {
bone.matrix.multiply( options.offsets[ name ] );
bone.matrix.decompose( bone.position, bone.quaternion, bone.scale );
bone.updateMatrixWorld();
}
bindBones.push( bone.matrixWorld.clone() );
}
}
for ( let i = 0; i < bones.length; ++ i ) {
bone = bones[ i ];
name = options.names[ bone.name ] || bone.name;
boneTo = this.getBoneByName( name, sourceBones );
globalMatrix.copy( bone.matrixWorld );
if ( boneTo ) {
boneTo.updateMatrixWorld();
if ( options.useTargetMatrix ) {
relativeMatrix.copy( boneTo.matrixWorld );
} else {
relativeMatrix.copy( target.matrixWorld ).invert();
relativeMatrix.multiply( boneTo.matrixWorld );
}
// ignore scale to extract rotation
scale.setFromMatrixScale( relativeMatrix );
relativeMatrix.scale( scale.set( 1 / scale.x, 1 / scale.y, 1 / scale.z ) );
// apply to global matrix
globalMatrix.makeRotationFromQuaternion( quat.setFromRotationMatrix( relativeMatrix ) );
if ( target.isObject3D ) {
const boneIndex = bones.indexOf( bone ),
wBindMatrix = bindBones ? bindBones[ boneIndex ] : bindBoneMatrix.copy( target.skeleton.boneInverses[ boneIndex ] ).invert();
globalMatrix.multiply( wBindMatrix );
}
globalMatrix.copyPosition( relativeMatrix );
}
if ( bone.parent && bone.parent.isBone ) {
bone.matrix.copy( bone.parent.matrixWorld ).invert();
bone.matrix.multiply( globalMatrix );
} else {
bone.matrix.copy( globalMatrix );
}
if ( options.preserveHipPosition && name === options.hip ) {
bone.matrix.setPosition( pos.set( 0, bone.position.y, 0 ) );
}
bone.matrix.decompose( bone.position, bone.quaternion, bone.scale );
bone.updateMatrixWorld();
}
if ( options.preservePosition ) {
for ( let i = 0; i < bones.length; ++ i ) {
bone = bones[ i ];
name = options.names[ bone.name ] || bone.name;
if ( name !== options.hip ) {
bone.position.copy( bonesPosition[ i ] );
}
}
}
if ( options.preserveMatrix ) {
// restore matrix
target.updateMatrixWorld( true );
}
}
static retargetClip( target, source, clip, options = {} ) {
options.useFirstFramePosition = options.useFirstFramePosition !== undefined ? options.useFirstFramePosition : false;
options.fps = options.fps !== undefined ? options.fps : 30;
options.names = options.names || [];
if ( ! source.isObject3D ) {
source = this.getHelperFromSkeleton( source );
}
const numFrames = Math.round( clip.duration * ( options.fps / 1000 ) * 1000 ),
delta = 1 / options.fps,
convertedTracks = [],
mixer = new AnimationMixer( source ),
bones = this.getBones( target.skeleton ),
boneDatas = [];
let positionOffset,
bone, boneTo, boneData,
name;
mixer.clipAction( clip ).play();
mixer.update( 0 );
source.updateMatrixWorld();
for ( let i = 0; i < numFrames; ++ i ) {
const time = i * delta;
this.retarget( target, source, options );
for ( let j = 0; j < bones.length; ++ j ) {
name = options.names[ bones[ j ].name ] || bones[ j ].name;
boneTo = this.getBoneByName( name, source.skeleton );
if ( boneTo ) {
bone = bones[ j ];
boneData = boneDatas[ j ] = boneDatas[ j ] || { bone: bone };
if ( options.hip === name ) {
if ( ! boneData.pos ) {
boneData.pos = {
times: new Float32Array( numFrames ),
values: new Float32Array( numFrames * 3 )
};
}
if ( options.useFirstFramePosition ) {
if ( i === 0 ) {
positionOffset = bone.position.clone();
}
bone.position.sub( positionOffset );
}
boneData.pos.times[ i ] = time;
bone.position.toArray( boneData.pos.values, i * 3 );
}
if ( ! boneData.quat ) {
boneData.quat = {
times: new Float32Array( numFrames ),
values: new Float32Array( numFrames * 4 )
};
}
boneData.quat.times[ i ] = time;
bone.quaternion.toArray( boneData.quat.values, i * 4 );
}
}
mixer.update( delta );
source.updateMatrixWorld();
}
for ( let i = 0; i < boneDatas.length; ++ i ) {
boneData = boneDatas[ i ];
if ( boneData ) {
if ( boneData.pos ) {
convertedTracks.push( new VectorKeyframeTrack(
'.bones[' + boneData.bone.name + '].position',
boneData.pos.times,
boneData.pos.values
) );
}
convertedTracks.push( new QuaternionKeyframeTrack(
'.bones[' + boneData.bone.name + '].quaternion',
boneData.quat.times,
boneData.quat.values
) );
}
}
mixer.uncacheAction( clip );
return new AnimationClip( clip.name, - 1, convertedTracks );
}
static getHelperFromSkeleton( skeleton ) {
const source = new SkeletonHelper( skeleton.bones[ 0 ] );
source.skeleton = skeleton;
return source;
}
static getSkeletonOffsets( target, source, options = {} ) {
const targetParentPos = new Vector3(),
targetPos = new Vector3(),
sourceParentPos = new Vector3(),
sourcePos = new Vector3(),
targetDir = new Vector2(),
sourceDir = new Vector2();
options.hip = options.hip !== undefined ? options.hip : 'hip';
options.names = options.names || {};
if ( ! source.isObject3D ) {
source = this.getHelperFromSkeleton( source );
}
const nameKeys = Object.keys( options.names ),
nameValues = Object.values( options.names ),
sourceBones = source.isObject3D ? source.skeleton.bones : this.getBones( source ),
bones = target.isObject3D ? target.skeleton.bones : this.getBones( target ),
offsets = [];
let bone, boneTo,
name, i;
target.skeleton.pose();
for ( i = 0; i < bones.length; ++ i ) {
bone = bones[ i ];
name = options.names[ bone.name ] || bone.name;
boneTo = this.getBoneByName( name, sourceBones );
if ( boneTo && name !== options.hip ) {
const boneParent = this.getNearestBone( bone.parent, nameKeys ),
boneToParent = this.getNearestBone( boneTo.parent, nameValues );
boneParent.updateMatrixWorld();
boneToParent.updateMatrixWorld();
targetParentPos.setFromMatrixPosition( boneParent.matrixWorld );
targetPos.setFromMatrixPosition( bone.matrixWorld );
sourceParentPos.setFromMatrixPosition( boneToParent.matrixWorld );
sourcePos.setFromMatrixPosition( boneTo.matrixWorld );
targetDir.subVectors(
new Vector2( targetPos.x, targetPos.y ),
new Vector2( targetParentPos.x, targetParentPos.y )
).normalize();
sourceDir.subVectors(
new Vector2( sourcePos.x, sourcePos.y ),
new Vector2( sourceParentPos.x, sourceParentPos.y )
).normalize();
const laterialAngle = targetDir.angle() - sourceDir.angle();
const offset = new Matrix4().makeRotationFromEuler(
new Euler(
0,
0,
laterialAngle
)
);
bone.matrix.multiply( offset );
bone.matrix.decompose( bone.position, bone.quaternion, bone.scale );
bone.updateMatrixWorld();
offsets[ name ] = offset;
}
}
return offsets;
}
static renameBones( skeleton, names ) {
const bones = this.getBones( skeleton );
for ( let i = 0; i < bones.length; ++ i ) {
const bone = bones[ i ];
if ( names[ bone.name ] ) {
bone.name = names[ bone.name ];
}
}
return this;
}
static getBones( skeleton ) {
return Array.isArray( skeleton ) ? skeleton : skeleton.bones;
}
static getBoneByName( name, skeleton ) {
for ( let i = 0, bones = this.getBones( skeleton ); i < bones.length; i ++ ) {
if ( name === bones[ i ].name )
return bones[ i ];
}
}
static getNearestBone( bone, names ) {
while ( bone.isBone ) {
if ( names.indexOf( bone.name ) !== - 1 ) {
return bone;
}
bone = bone.parent;
}
}
static findBoneTrackData( name, tracks ) {
const regexp = /\[(.*)\]\.(.*)/,
result = { name: name };
for ( let i = 0; i < tracks.length; ++ i ) {
// 1 is track name
// 2 is track type
const trackData = regexp.exec( tracks[ i ].name );
if ( trackData && name === trackData[ 1 ] ) {
result[ trackData[ 2 ] ] = i;
}
}
return result;
}
static getEqualsBonesNames( skeleton, targetSkeleton ) {
const sourceBones = this.getBones( skeleton ),
targetBones = this.getBones( targetSkeleton ),
bones = [];
search : for ( let i = 0; i < sourceBones.length; i ++ ) {
const boneName = sourceBones[ i ].name;
for ( let j = 0; j < targetBones.length; j ++ ) {
if ( boneName === targetBones[ j ].name ) {
bones.push( boneName );
continue search;
}
}
}
return bones;
}
static clone( source ) {
const sourceLookup = new Map();
const cloneLookup = new Map();
const clone = source.clone();
parallelTraverse( source, clone, function ( sourceNode, clonedNode ) {
sourceLookup.set( clonedNode, sourceNode );
cloneLookup.set( sourceNode, clonedNode );
} );
clone.traverse( function ( node ) {
if ( ! node.isSkinnedMesh ) return;
const clonedMesh = node;
const sourceMesh = sourceLookup.get( node );
const sourceBones = sourceMesh.skeleton.bones;
clonedMesh.skeleton = sourceMesh.skeleton.clone();
clonedMesh.bindMatrix.copy( sourceMesh.bindMatrix );
clonedMesh.skeleton.bones = sourceBones.map( function ( bone ) {
return cloneLookup.get( bone );
} );
clonedMesh.bind( clonedMesh.skeleton, clonedMesh.bindMatrix );
} );
return clone;
}
}
function parallelTraverse( a, b, callback ) {
callback( a, b );
for ( let i = 0; i < a.children.length; i ++ ) {
parallelTraverse( a.children[ i ], b.children[ i ], callback );
}
}
/* @license
* Copyright 2020 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the 'License');
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an 'AS IS' BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
const $prepared = Symbol('prepared');
const $prepare = Symbol('prepare');
const $preparedGLTF = Symbol('preparedGLTF');
const $clone = Symbol('clone');
/**
* Represents the preparation and enhancement of the output of a Three.js
* GLTFLoader (a Three.js-flavor "GLTF"), to make it suitable for optimal,
* correct viewing in a given presentation context and also make the cloning
* process more explicit and legible.
*
* A GLTFInstance is API-compatible with a Three.js-flavor "GLTF", so it should
* be considered to be interchangeable with the loaded result of a GLTFLoader.
*
* This basic implementation only implements trivial preparation and enhancement
* of a GLTF. These operations are intended to be enhanced by inheriting
* classes.
*/
class GLTFInstance {
constructor(preparedGLTF) {
this[$preparedGLTF] = preparedGLTF;
}
/**
* Prepares a given GLTF for presentation and future cloning. A GLTF that is
* prepared can safely have this method invoked on it multiple times; it will
* only be prepared once, including after being cloned.
*/
static prepare(source) {
if (source.scene == null) {
throw new Error('Model does not have a scene');
}
if (source[$prepared]) {
return source;
}
const prepared = this[$prepare](source);
// NOTE: ES5 Symbol polyfill is not compatible with spread operator
// so {...prepared, [$prepared]: true} does not work
prepared[$prepared] = true;
return prepared;
}
/**
* Override in an inheriting class to apply specialty one-time preparations
* for a given input GLTF.
*/
static [$prepare](source) {
// TODO(#195,#1003): We don't currently support multiple scenes, so we don't
// bother preparing extra scenes for now:
const { scene } = source;
const scenes = [scene];
return Object.assign(Object.assign({}, source), { scene, scenes });
}
get parser() {
return this[$preparedGLTF].parser;
}
get animations() {
return this[$preparedGLTF].animations;
}
get scene() {
return this[$preparedGLTF].scene;
}
get scenes() {
return this[$preparedGLTF].scenes;
}
get cameras() {
return this[$preparedGLTF].cameras;
}
get asset() {
return this[$preparedGLTF].asset;
}
get userData() {
return this[$preparedGLTF].userData;
}
/**
* Creates and returns a copy of this instance.
*/
clone() {
const GLTFInstanceConstructor = this.constructor;
const clonedGLTF = this[$clone]();
return new GLTFInstanceConstructor(clonedGLTF);
}
/**
* Cleans up any retained memory that might not otherwise be released when
* this instance is done being used.
*/
dispose() {
this.scenes.forEach((scene) => {
scene.traverse((object) => {
if (!object.isMesh) {
return;
}
const mesh = object;
const materials = Array.isArray(mesh.material) ? mesh.material : [mesh.material];
materials.forEach(material => {
material.dispose();
});
mesh.geometry.dispose();
});
});
}
/**
* Override in an inheriting class to implement specialized cloning strategies
*/
[$clone]() {
const source = this[$preparedGLTF];
// TODO(#195,#1003): We don't currently support multiple scenes, so we don't
// bother cloning extra scenes for now:
const scene = SkeletonUtils.clone(this.scene);
const scenes = [scene];
const userData = source.userData ? Object.assign({}, source.userData) : {};
return Object.assign(Object.assign({}, source), { scene, scenes, userData });
}
}
/**
* @license MIT
* @see https://github.com/mrdoob/three.js/blob/dev/LICENSE
*/
const alphaChunk = /* glsl */ `
#ifdef ALPHATEST
if ( diffuseColor.a < ALPHATEST ) discard;
diffuseColor.a = 1.0;
#endif
`;
const $threeGLTF = Symbol('threeGLTF');
const $gltf = Symbol('gltf');
const $gltfElementMap = Symbol('gltfElementMap');
const $threeObjectMap = Symbol('threeObjectMap');
const $parallelTraverseThreeScene = Symbol('parallelTraverseThreeScene');
const $correlateOriginalThreeGLTF = Symbol('correlateOriginalThreeGLTF');
const $correlateCloneThreeGLTF = Symbol('correlateCloneThreeGLTF');
/**
* The Three.js GLTFLoader provides us with an in-memory representation
* of a glTF in terms of Three.js constructs. It also provides us with a copy
* of the deserialized glTF without any Three.js decoration, and a mapping of
* glTF elements to their corresponding Three.js constructs.
*
* A CorrelatedSceneGraph exposes a synchronously available mapping of glTF
* element references to their corresponding Three.js constructs.
*/
class CorrelatedSceneGraph {
constructor(threeGLTF, gltf, threeObjectMap, gltfElementMap) {
this[$threeGLTF] = threeGLTF;
this[$gltf] = gltf;
this[$gltfElementMap] = gltfElementMap;
this[$threeObjectMap] = threeObjectMap;
}
/**
* Produce a CorrelatedSceneGraph from a naturally generated Three.js GLTF.
* Such GLTFs are produced by Three.js' GLTFLoader, and contain cached
* details that expedite the correlation step.
*
* If a CorrelatedSceneGraph is provided as the second argument, re-correlates
* a cloned Three.js GLTF with a clone of the glTF hierarchy used to produce
* the upstream Three.js GLTF that the clone was created from. The result
* CorrelatedSceneGraph is representative of the cloned hierarchy.
*/
static from(threeGLTF, upstreamCorrelatedSceneGraph) {
if (upstreamCorrelatedSceneGraph != null) {
return this[$correlateCloneThreeGLTF](threeGLTF, upstreamCorrelatedSceneGraph);
}
else {
return this[$correlateOriginalThreeGLTF](threeGLTF);
}
}
static [$correlateOriginalThreeGLTF](threeGLTF) {
const gltf = threeGLTF.parser.json;
const { associations } = threeGLTF.parser;
const gltfElementMap = new Map();
const defaultMaterial = { name: 'Default' };
const defaultReference = { type: 'materials', index: -1 };
// NOTE: IE11 does not have Map iterator methods
associations.forEach((gltfElementReference, threeObject) => {
// Note: GLTFLoader creates a "default" material that has no corresponding
// glTF element in the case that no materials are specified in the source
// glTF. In this case we append a default material to allow this to be
// operated upon.
if (gltfElementReference == null) {
if (defaultReference.index < 0) {
if (gltf.materials == null) {
gltf.materials = [];
}
defaultReference.index = gltf.materials.length;
gltf.materials.push(defaultMaterial);
}
gltfElementReference = defaultReference;
}
const { type, index } = gltfElementReference;
const elementArray = gltf[type] || [];
const gltfElement = elementArray[index];
if (gltfElement == null) {
// TODO: Maybe throw here...
return;
}
let threeObjects = gltfElementMap.get(gltfElement);
if (threeObjects == null) {
threeObjects = new Set();
gltfElementMap.set(gltfElement, threeObjects);
}
threeObjects.add(threeObject);
});
return new CorrelatedSceneGraph(threeGLTF, gltf, associations, gltfElementMap);
}
/**
* Transfers the association between a raw glTF and a Three.js scene graph
* to a clone of the Three.js scene graph, resolved as a new
* CorrelatedsceneGraph instance.
*/
static [$correlateCloneThreeGLTF](cloneThreeGLTF, upstreamCorrelatedSceneGraph) {
const originalThreeGLTF = upstreamCorrelatedSceneGraph.threeGLTF;
const originalGLTF = upstreamCorrelatedSceneGraph.gltf;
const cloneGLTF = JSON.parse(JSON.stringify(originalGLTF));
const cloneThreeObjectMap = new Map();
const cloneGLTFELementMap = new Map();
const defaultMaterial = { name: 'Default' };
const defaultReference = { type: 'materials', index: -1 };
for (let i = 0; i < originalThreeGLTF.scenes.length; i++) {
this[$parallelTraverseThreeScene](originalThreeGLTF.scenes[i], cloneThreeGLTF.scenes[i], (object, cloneObject) => {
let elementReference = upstreamCorrelatedSceneGraph.threeObjectMap.get(object);
if (elementReference == null) {
if (defaultReference.index < 0) {
if (cloneGLTF.materials == null) {
cloneGLTF.materials = [];
}
defaultReference.index = cloneGLTF.materials.length;
cloneGLTF.materials.push(defaultMaterial);
}
elementReference = defaultReference;
}
const { type, index } = elementReference;
const cloneElement = cloneGLTF[type][index];
cloneThreeObjectMap.set(cloneObject, { type, index });
const cloneObjects = cloneGLTFELementMap.get(cloneElement) || new Set();
cloneObjects.add(cloneObject);
cloneGLTFELementMap.set(cloneElement, cloneObjects);
});
}
return new CorrelatedSceneGraph(cloneThreeGLTF, cloneGLTF, cloneThreeObjectMap, cloneGLTFELementMap);
}
/**
* Traverses two presumably identical Three.js scenes, and invokes a callback
* for each Object3D or Material encountered, including the initial scene.
* Adapted from
* https://github.com/mrdoob/three.js/blob/7c1424c5819ab622a346dd630ee4e6431388021e/examples/jsm/utils/SkeletonUtils.js#L586-L596
*/
static [$parallelTraverseThreeScene](sceneOne, sceneTwo, callback) {
const isMesh = (object) => {
return object.isMesh;
};
const traverse = (a, b) => {
callback(a, b);
if (a.isObject3D) {
if (isMesh(a)) {
if (Array.isArray(a.material)) {
for (let i = 0; i < a.material.length; ++i) {
traverse(a.material[i], b.material[i]);
}
}
else {
traverse(a.material, b.material);
}
}
for (let i = 0; i < a.children.length; ++i) {
traverse(a.children[i], b.children[i]);
}
}
};
traverse(sceneOne, sceneTwo);
}
/**
* The source Three.js GLTF result given to us by a Three.js GLTFLoader.
*/
get threeGLTF() {
return this[$threeGLTF];
}
/**
* The in-memory deserialized source glTF.
*/
get gltf() {
return this[$gltf];
}
/**
* A Map of glTF element references to arrays of corresponding Three.js
* object references. Three.js objects are kept in arrays to account for
* cases where more than one Three.js object corresponds to a single glTF
* element.
*/
get gltfElementMap() {
return this[$gltfElementMap];
}
/**
* A map of individual Three.js objects to corresponding elements in the
* source glTF.
*/
get threeObjectMap() {
return this[$threeObjectMap];
}
loadVariant(variantIndex, onUpdate = () => { }) {
const updatedMaterials = new Set();
this.threeGLTF.scene.traverse(async (object) => {
const { gltfExtensions } = object.userData;
if (!object.isMesh || gltfExtensions == null) {
return;
}
const meshVariantData = gltfExtensions['KHR_materials_variants'];
if (meshVariantData == null) {
return;
}
let materialIndex = -1;
for (const mapping of meshVariantData.mappings) {
if (mapping.variants.indexOf(variantIndex) >= 0) {
materialIndex = mapping.material;
break;
}
}
if (materialIndex < 0) {
return;
}
const material = await this.threeGLTF.parser.getDependency('material', materialIndex);
updatedMaterials.add(materialIndex);
object.material = material;
this.threeGLTF.parser.assignFinalMaterial(object);
onUpdate();
const gltfElement = this.gltf.materials[materialIndex];
let threeObjects = this.gltfElementMap.get(gltfElement);
if (threeObjects == null) {
threeObjects = new Set();
this.gltfElementMap.set(gltfElement, threeObjects);
}
threeObjects.add(object.material);
});
return updatedMaterials;
}
}
/* @license
* Copyright 2020 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the 'License');
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an 'AS IS' BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
const $cloneAndPatchMaterial = Symbol('cloneAndPatchMaterial');
const $correlatedSceneGraph = Symbol('correlatedSceneGraph');
/**
* This specialization of GLTFInstance collects all of the processing needed
* to prepare a model and to clone it making special considerations for
* <model-viewer> use cases.
*/
class ModelViewerGLTFInstance extends GLTFInstance {
/**
* @override
*/
static [$prepare](source) {
const prepared = super[$prepare](source);
if (prepared[$correlatedSceneGraph] == null) {
prepared[$correlatedSceneGraph] = CorrelatedSceneGraph.from(prepared);
}
const { scene } = prepared;
const meshesToDuplicate = [];
scene.traverse((node) => {
// Set a high renderOrder while we're here to ensure the model
// always renders on top of the skysphere
node.renderOrder = 1000;
// Three.js seems to cull some animated models incorrectly. Since we
// expect to view our whole scene anyway, we turn off the frustum
// culling optimization here.
node.frustumCulled = false;
// Animations for objects without names target their UUID instead. When
// objects are cloned, they get new UUIDs which the animation can't
// find. To fix this, we assign their UUID as their name.
if (!node.name) {
node.name = node.uuid;
}
if (!node.isMesh) {
return;
}
node.castShadow = true;
const mesh = node;
let transparent = false;
const materials = Array.isArray(mesh.material) ? mesh.material : [mesh.material];
materials.forEach(material => {
if (material.isMeshStandardMaterial) {
if (material.transparent && material.side === DoubleSide) {
transparent = true;
material.side = FrontSide;
}
}
});
if (transparent) {
meshesToDuplicate.push(mesh);
}
});
// We duplicate transparent, double-sided meshes and render the back face
// before the front face. This creates perfect triangle sorting for all
// convex meshes. Sorting artifacts can still appear when you can see
// through more than two layers of a given mesh, but this can usually be
// mitigated by the author splitting the mesh into mostly convex regions.
// The performance cost is not too great as the same shader is reused and
// the same number of fragments are processed; only the vertex shader is run
// twice. @see https://threejs.org/examples/webgl_materials_physical_transparency.html
for (const mesh of meshesToDuplicate) {
const materials = Array.isArray(mesh.material) ? mesh.material : [mesh.material];
const duplicateMaterials = materials.map((material) => {
const backMaterial = material.clone();
backMaterial.side = BackSide;
return backMaterial;
});
const duplicateMaterial = Array.isArray(mesh.material) ?
duplicateMaterials :
duplicateMaterials[0];
const meshBack = mesh.clone();
meshBack.material = duplicateMaterial;
meshBack.renderOrder = -1;
mesh.parent.add(meshBack);
}
return prepared;
}
get correlatedSceneGraph() {
return this[$preparedGLTF][$correlatedSceneGraph];
}
/**
* @override
*/
[$clone]() {
const clone = super[$clone]();
const sourceUUIDToClonedMaterial = new Map();
clone.scene.traverse((node) => {
// Materials aren't cloned when cloning meshes; geometry
// and materials are copied by reference. This is necessary
// for the same model to be used twice with different
// environment maps.
if (node.isMesh) {
const mesh = node;
if (Array.isArray(mesh.material)) {
mesh.material = mesh.material.map((material) => this[$cloneAndPatchMaterial](material, sourceUUIDToClonedMaterial));
}
else if (mesh.material != null) {
mesh.material = this[$cloneAndPatchMaterial](mesh.material, sourceUUIDToClonedMaterial);
}
}
});
// Cross-correlate the scene graph by relying on information in the
// current scene graph; without this step, relationships between the
// Three.js object graph and the glTF scene graph will be lost.
clone[$correlatedSceneGraph] =
CorrelatedSceneGraph.from(clone, this.correlatedSceneGraph);
return clone;
}
/**
* Creates a clone of the given material, and applies a patch to the
* shader program.
*/
[$cloneAndPatchMaterial](material, sourceUUIDToClonedMaterial) {
// If we already cloned this material (determined by tracking the UUID of
// source materials that have been cloned), then return that previously
// cloned instance:
if (sourceUUIDToClonedMaterial.has(material.uuid)) {
return sourceUUIDToClonedMaterial.get(material.uuid);
}
const clone = material.clone();
if (material.map != null) {
clone.map = material.map.clone();
clone.map.needsUpdate = true;
}
if (material.normalMap != null) {
clone.normalMap = material.normalMap.clone();
clone.normalMap.needsUpdate = true;
}
if (material.emissiveMap != null) {
clone.emissiveMap = material.emissiveMap.clone();
clone.emissiveMap.needsUpdate = true;
}
// Clones the roughnessMap if it exists.
let roughnessMap = null;
if (material.roughnessMap != null) {
roughnessMap = material.roughnessMap.clone();
}
// Assigns the roughnessMap to the cloned material and generates mipmaps.
if (roughnessMap != null) {
roughnessMap.needsUpdate = true;
clone.roughnessMap = roughnessMap;
// Generates mipmaps from the clone of the roughnessMap.
const { threeRenderer, roughnessMipmapper } = Renderer.singleton;
// XR must be disabled while doing offscreen rendering or it will
// clobber the camera.
const { enabled } = threeRenderer.xr;
threeRenderer.xr.enabled = false;
roughnessMipmapper.generateMipmaps(clone);
threeRenderer.xr.enabled = enabled;
}
// Checks if roughnessMap and metalnessMap share the same texture and
// either clones or assigns.
if (material.roughnessMap === material.metalnessMap) {
clone.metalnessMap = roughnessMap;
}
else if (material.metalnessMap != null) {
clone.metalnessMap = material.metalnessMap.clone();
clone.metalnessMap.needsUpdate = true;
}
// Checks if roughnessMap and aoMap share the same texture and
// either clones or assigns.
if (material.roughnessMap === material.aoMap) {
clone.aoMap = roughnessMap;
}
else if (material.aoMap != null) {
clone.aoMap = material.aoMap.clone();
clone.aoMap.needsUpdate = true;
}
// This allows us to patch three's materials, on top of patches already
// made, for instance GLTFLoader patches SpecularGlossiness materials.
// Unfortunately, three's program cache differentiates SpecGloss materials
// via onBeforeCompile.toString(), so these two functions do the same
// thing but look different in order to force a proper recompile.
const oldOnBeforeCompile = material.onBeforeCompile;
clone.onBeforeCompile = material.isGLTFSpecularGlossinessMaterial ?
(shader) => {
oldOnBeforeCompile(shader, undefined);
shader.fragmentShader = shader.fragmentShader.replace('#include <alphatest_fragment>', alphaChunk);
} :
(shader) => {
shader.fragmentShader = shader.fragmentShader.replace('#include <alphatest_fragment>', alphaChunk);
oldOnBeforeCompile(shader, undefined);
};
// This makes shadows better for non-manifold meshes
clone.shadowSide = FrontSide;
// This improves transparent rendering and can be removed whenever
// https://github.com/mrdoob/three.js/pull/18235 finally lands.
if (clone.transparent) {
clone.depthWrite = false;
}
// This little hack ignores alpha for opaque materials, in order to comply
// with the glTF spec.
if (!clone.alphaTest && !clone.transparent) {
clone.alphaTest = -0.5;
}
sourceUUIDToClonedMaterial.set(material.uuid, clone);
return clone;
}
}
// https://github.com/mrdoob/three.js/issues/5552
// http://en.wikipedia.org/wiki/RGBE_image_format
class RGBELoader extends DataTextureLoader {
constructor( manager ) {
super( manager );
this.type = UnsignedByteType;
}
// adapted from http://www.graphics.cornell.edu/~bjw/rgbe.html
parse( buffer ) {
const
/* return codes for rgbe routines */
//RGBE_RETURN_SUCCESS = 0,
RGBE_RETURN_FAILURE = - 1,
/* default error routine. change this to change error handling */
rgbe_read_error = 1,
rgbe_write_error = 2,
rgbe_format_error = 3,
rgbe_memory_error = 4,
rgbe_error = function ( rgbe_error_code, msg ) {
switch ( rgbe_error_code ) {
case rgbe_read_error: console.error( 'THREE.RGBELoader Read Error: ' + ( msg || '' ) );
break;
case rgbe_write_error: console.error( 'THREE.RGBELoader Write Error: ' + ( msg || '' ) );
break;
case rgbe_format_error: console.error( 'THREE.RGBELoader Bad File Format: ' + ( msg || '' ) );
break;
default:
case rgbe_memory_error: console.error( 'THREE.RGBELoader: Error: ' + ( msg || '' ) );
}
return RGBE_RETURN_FAILURE;
},
/* offsets to red, green, and blue components in a data (float) pixel */
//RGBE_DATA_RED = 0,
//RGBE_DATA_GREEN = 1,
//RGBE_DATA_BLUE = 2,
/* number of floats per pixel, use 4 since stored in rgba image format */
//RGBE_DATA_SIZE = 4,
/* flags indicating which fields in an rgbe_header_info are valid */
RGBE_VALID_PROGRAMTYPE = 1,
RGBE_VALID_FORMAT = 2,
RGBE_VALID_DIMENSIONS = 4,
NEWLINE = '\n',
fgets = function ( buffer, lineLimit, consume ) {
const chunkSize = 128;
lineLimit = ! lineLimit ? 1024 : lineLimit;
let p = buffer.pos,
i = - 1, len = 0, s = '',
chunk = String.fromCharCode.apply( null, new Uint16Array( buffer.subarray( p, p + chunkSize ) ) );
while ( ( 0 > ( i = chunk.indexOf( NEWLINE ) ) ) && ( len < lineLimit ) && ( p < buffer.byteLength ) ) {
s += chunk; len += chunk.length;
p += chunkSize;
chunk += String.fromCharCode.apply( null, new Uint16Array( buffer.subarray( p, p + chunkSize ) ) );
}
if ( - 1 < i ) {
/*for (i=l-1; i>=0; i--) {
byteCode = m.charCodeAt(i);
if (byteCode > 0x7f && byteCode <= 0x7ff) byteLen++;
else if (byteCode > 0x7ff && byteCode <= 0xffff) byteLen += 2;
if (byteCode >= 0xDC00 && byteCode <= 0xDFFF) i--; //trail surrogate
}*/
if ( false !== consume ) buffer.pos += len + i + 1;
return s + chunk.slice( 0, i );
}
return false;
},
/* minimal header reading. modify if you want to parse more information */
RGBE_ReadHeader = function ( buffer ) {
// regexes to parse header info fields
const magic_token_re = /^#\?(\S+)/,
gamma_re = /^\s*GAMMA\s*=\s*(\d+(\.\d+)?)\s*$/,
exposure_re = /^\s*EXPOSURE\s*=\s*(\d+(\.\d+)?)\s*$/,
format_re = /^\s*FORMAT=(\S+)\s*$/,
dimensions_re = /^\s*\-Y\s+(\d+)\s+\+X\s+(\d+)\s*$/,
// RGBE format header struct
header = {
valid: 0, /* indicate which fields are valid */
string: '', /* the actual header string */
comments: '', /* comments found in header */
programtype: 'RGBE', /* listed at beginning of file to identify it after "#?". defaults to "RGBE" */
format: '', /* RGBE format, default 32-bit_rle_rgbe */
gamma: 1.0, /* image has already been gamma corrected with given gamma. defaults to 1.0 (no correction) */
exposure: 1.0, /* a value of 1.0 in an image corresponds to <exposure> watts/steradian/m^2. defaults to 1.0 */
width: 0, height: 0 /* image dimensions, width/height */
};
let line, match;
if ( buffer.pos >= buffer.byteLength || ! ( line = fgets( buffer ) ) ) {
return rgbe_error( rgbe_read_error, 'no header found' );
}
/* if you want to require the magic token then uncomment the next line */
if ( ! ( match = line.match( magic_token_re ) ) ) {
return rgbe_error( rgbe_format_error, 'bad initial token' );
}
header.valid |= RGBE_VALID_PROGRAMTYPE;
header.programtype = match[ 1 ];
header.string += line + '\n';
while ( true ) {
line = fgets( buffer );
if ( false === line ) break;
header.string += line + '\n';
if ( '#' === line.charAt( 0 ) ) {
header.comments += line + '\n';
continue; // comment line
}
if ( match = line.match( gamma_re ) ) {
header.gamma = parseFloat( match[ 1 ], 10 );
}
if ( match = line.match( exposure_re ) ) {
header.exposure = parseFloat( match[ 1 ], 10 );
}
if ( match = line.match( format_re ) ) {
header.valid |= RGBE_VALID_FORMAT;
header.format = match[ 1 ];//'32-bit_rle_rgbe';
}
if ( match = line.match( dimensions_re ) ) {
header.valid |= RGBE_VALID_DIMENSIONS;
header.height = parseInt( match[ 1 ], 10 );
header.width = parseInt( match[ 2 ], 10 );
}
if ( ( header.valid & RGBE_VALID_FORMAT ) && ( header.valid & RGBE_VALID_DIMENSIONS ) ) break;
}
if ( ! ( header.valid & RGBE_VALID_FORMAT ) ) {
return rgbe_error( rgbe_format_error, 'missing format specifier' );
}
if ( ! ( header.valid & RGBE_VALID_DIMENSIONS ) ) {
return rgbe_error( rgbe_format_error, 'missing image size specifier' );
}
return header;
},
RGBE_ReadPixels_RLE = function ( buffer, w, h ) {
const scanline_width = w;
if (
// run length encoding is not allowed so read flat
( ( scanline_width < 8 ) || ( scanline_width > 0x7fff ) ) ||
// this file is not run length encoded
( ( 2 !== buffer[ 0 ] ) || ( 2 !== buffer[ 1 ] ) || ( buffer[ 2 ] & 0x80 ) )
) {
// return the flat buffer
return new Uint8Array( buffer );
}
if ( scanline_width !== ( ( buffer[ 2 ] << 8 ) | buffer[ 3 ] ) ) {
return rgbe_error( rgbe_format_error, 'wrong scanline width' );
}
const data_rgba = new Uint8Array( 4 * w * h );
if ( ! data_rgba.length ) {
return rgbe_error( rgbe_memory_error, 'unable to allocate buffer space' );
}
let offset = 0, pos = 0;
const ptr_end = 4 * scanline_width;
const rgbeStart = new Uint8Array( 4 );
const scanline_buffer = new Uint8Array( ptr_end );
let num_scanlines = h;
// read in each successive scanline
while ( ( num_scanlines > 0 ) && ( pos < buffer.byteLength ) ) {
if ( pos + 4 > buffer.byteLength ) {
return rgbe_error( rgbe_read_error );
}
rgbeStart[ 0 ] = buffer[ pos ++ ];
rgbeStart[ 1 ] = buffer[ pos ++ ];
rgbeStart[ 2 ] = buffer[ pos ++ ];
rgbeStart[ 3 ] = buffer[ pos ++ ];
if ( ( 2 != rgbeStart[ 0 ] ) || ( 2 != rgbeStart[ 1 ] ) || ( ( ( rgbeStart[ 2 ] << 8 ) | rgbeStart[ 3 ] ) != scanline_width ) ) {
return rgbe_error( rgbe_format_error, 'bad rgbe scanline format' );
}
// read each of the four channels for the scanline into the buffer
// first red, then green, then blue, then exponent
let ptr = 0, count;
while ( ( ptr < ptr_end ) && ( pos < buffer.byteLength ) ) {
count = buffer[ pos ++ ];
const isEncodedRun = count > 128;
if ( isEncodedRun ) count -= 128;
if ( ( 0 === count ) || ( ptr + count > ptr_end ) ) {
return rgbe_error( rgbe_format_error, 'bad scanline data' );
}
if ( isEncodedRun ) {
// a (encoded) run of the same value
const byteValue = buffer[ pos ++ ];
for ( let i = 0; i < count; i ++ ) {
scanline_buffer[ ptr ++ ] = byteValue;
}
//ptr += count;
} else {
// a literal-run
scanline_buffer.set( buffer.subarray( pos, pos + count ), ptr );
ptr += count; pos += count;
}
}
// now convert data from buffer into rgba
// first red, then green, then blue, then exponent (alpha)
const l = scanline_width; //scanline_buffer.byteLength;
for ( let i = 0; i < l; i ++ ) {
let off = 0;
data_rgba[ offset ] = scanline_buffer[ i + off ];
off += scanline_width; //1;
data_rgba[ offset + 1 ] = scanline_buffer[ i + off ];
off += scanline_width; //1;
data_rgba[ offset + 2 ] = scanline_buffer[ i + off ];
off += scanline_width; //1;
data_rgba[ offset + 3 ] = scanline_buffer[ i + off ];
offset += 4;
}
num_scanlines --;
}
return data_rgba;
};
const RGBEByteToRGBFloat = function ( sourceArray, sourceOffset, destArray, destOffset ) {
const e = sourceArray[ sourceOffset + 3 ];
const scale = Math.pow( 2.0, e - 128.0 ) / 255.0;
destArray[ destOffset + 0 ] = sourceArray[ sourceOffset + 0 ] * scale;
destArray[ destOffset + 1 ] = sourceArray[ sourceOffset + 1 ] * scale;
destArray[ destOffset + 2 ] = sourceArray[ sourceOffset + 2 ] * scale;
};
const RGBEByteToRGBHalf = function ( sourceArray, sourceOffset, destArray, destOffset ) {
const e = sourceArray[ sourceOffset + 3 ];
const scale = Math.pow( 2.0, e - 128.0 ) / 255.0;
destArray[ destOffset + 0 ] = DataUtils.toHalfFloat( sourceArray[ sourceOffset + 0 ] * scale );
destArray[ destOffset + 1 ] = DataUtils.toHalfFloat( sourceArray[ sourceOffset + 1 ] * scale );
destArray[ destOffset + 2 ] = DataUtils.toHalfFloat( sourceArray[ sourceOffset + 2 ] * scale );
};
const byteArray = new Uint8Array( buffer );
byteArray.pos = 0;
const rgbe_header_info = RGBE_ReadHeader( byteArray );
if ( RGBE_RETURN_FAILURE !== rgbe_header_info ) {
const w = rgbe_header_info.width,
h = rgbe_header_info.height,
image_rgba_data = RGBE_ReadPixels_RLE( byteArray.subarray( byteArray.pos ), w, h );
if ( RGBE_RETURN_FAILURE !== image_rgba_data ) {
let data, format, type;
let numElements;
switch ( this.type ) {
case UnsignedByteType:
data = image_rgba_data;
format = RGBEFormat; // handled as THREE.RGBAFormat in shaders
type = UnsignedByteType;
break;
case FloatType:
numElements = ( image_rgba_data.length / 4 ) * 3;
const floatArray = new Float32Array( numElements );
for ( let j = 0; j < numElements; j ++ ) {
RGBEByteToRGBFloat( image_rgba_data, j * 4, floatArray, j * 3 );
}
data = floatArray;
format = RGBFormat;
type = FloatType;
break;
case HalfFloatType:
numElements = ( image_rgba_data.length / 4 ) * 3;
const halfArray = new Uint16Array( numElements );
for ( let j = 0; j < numElements; j ++ ) {
RGBEByteToRGBHalf( image_rgba_data, j * 4, halfArray, j * 3 );
}
data = halfArray;
format = RGBFormat;
type = HalfFloatType;
break;
default:
console.error( 'THREE.RGBELoader: unsupported type: ', this.type );
break;
}
return {
width: w, height: h,
data: data,
header: rgbe_header_info.string,
gamma: rgbe_header_info.gamma,
exposure: rgbe_header_info.exposure,
format: format,
type: type
};
}
}
return null;
}
setDataType( value ) {
this.type = value;
return this;
}
load( url, onLoad, onProgress, onError ) {
function onLoadCallback( texture, texData ) {
switch ( texture.type ) {
case UnsignedByteType:
texture.encoding = RGBEEncoding;
texture.minFilter = NearestFilter;
texture.magFilter = NearestFilter;
texture.generateMipmaps = false;
texture.flipY = true;
break;
case FloatType:
texture.encoding = LinearEncoding;
texture.minFilter = LinearFilter;
texture.magFilter = LinearFilter;
texture.generateMipmaps = false;
texture.flipY = true;
break;
case HalfFloatType:
texture.encoding = LinearEncoding;
texture.minFilter = LinearFilter;
texture.magFilter = LinearFilter;
texture.generateMipmaps = false;
texture.flipY = true;
break;
}
if ( onLoad ) onLoad( texture, texData );
}
return super.load( url, onLoadCallback, onProgress, onError );
}
}
/* @license
* Copyright 2021 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the 'License');
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an 'AS IS' BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
class EnvironmentScene extends Scene {
constructor() {
super();
this.position.y = -3.5;
const geometry = new BoxGeometry();
geometry.deleteAttribute('uv');
const roomMaterial = new MeshStandardMaterial({ metalness: 0, side: BackSide });
const boxMaterial = new MeshStandardMaterial({ metalness: 0 });
const mainLight = new PointLight(0xffffff, 500.0, 28, 2);
mainLight.position.set(0.418, 16.199, 0.300);
this.add(mainLight);
const room = new Mesh(geometry, roomMaterial);
room.position.set(-0.757, 13.219, 0.717);
room.scale.set(31.713, 28.305, 28.591);
this.add(room);
const box1 = new Mesh(geometry, boxMaterial);
box1.position.set(-10.906, 2.009, 1.846);
box1.rotation.set(0, -0.195, 0);
box1.scale.set(2.328, 7.905, 4.651);
this.add(box1);
const box2 = new Mesh(geometry, boxMaterial);
box2.position.set(-5.607, -0.754, -0.758);
box2.rotation.set(0, 0.994, 0);
box2.scale.set(1.970, 1.534, 3.955);
this.add(box2);
const box3 = new Mesh(geometry, boxMaterial);
box3.position.set(6.167, 0.857, 7.803);
box3.rotation.set(0, 0.561, 0);
box3.scale.set(3.927, 6.285, 3.687);
this.add(box3);
const box4 = new Mesh(geometry, boxMaterial);
box4.position.set(-2.017, 0.018, 6.124);
box4.rotation.set(0, 0.333, 0);
box4.scale.set(2.002, 4.566, 2.064);
this.add(box4);
const box5 = new Mesh(geometry, boxMaterial);
box5.position.set(2.291, -0.756, -2.621);
box5.rotation.set(0, -0.286, 0);
box5.scale.set(1.546, 1.552, 1.496);
this.add(box5);
const box6 = new Mesh(geometry, boxMaterial);
box6.position.set(-2.193, -0.369, -5.547);
box6.rotation.set(0, 0.516, 0);
box6.scale.set(3.875, 3.487, 2.986);
this.add(box6);
// -x right
const light1 = new Mesh(geometry, this.createAreaLightMaterial(50));
light1.position.set(-16.116, 14.37, 8.208);
light1.scale.set(0.1, 2.428, 2.739);
this.add(light1);
// -x left
const light2 = new Mesh(geometry, this.createAreaLightMaterial(50));
light2.position.set(-16.109, 18.021, -8.207);
light2.scale.set(0.1, 2.425, 2.751);
this.add(light2);
// +x
const light3 = new Mesh(geometry, this.createAreaLightMaterial(17));
light3.position.set(14.904, 12.198, -1.832);
light3.scale.set(0.15, 4.265, 6.331);
this.add(light3);
// +z
const light4 = new Mesh(geometry, this.createAreaLightMaterial(43));
light4.position.set(-0.462, 8.89, 14.520);
light4.scale.set(4.38, 5.441, 0.088);
this.add(light4);
// -z
const light5 = new Mesh(geometry, this.createAreaLightMaterial(20));
light5.position.set(3.235, 11.486, -12.541);
light5.scale.set(2.5, 2.0, 0.1);
this.add(light5);
// +y
const light6 = new Mesh(geometry, this.createAreaLightMaterial(100));
light6.position.set(0.0, 20.0, 0.0);
light6.scale.set(1.0, 0.1, 1.0);
this.add(light6);
}
createAreaLightMaterial(intensity) {
const material = new MeshBasicMaterial();
material.color.setScalar(intensity);
return material;
}
}
/* @license
* Copyright 2021 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the 'License');
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an 'AS IS' BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
class EnvironmentSceneAlt extends Scene {
constructor() {
super();
this.position.y = -3.5;
const geometry = new BoxGeometry();
geometry.deleteAttribute('uv');
const roomMaterial = new MeshStandardMaterial({ metalness: 0, side: BackSide });
const boxMaterial = new MeshStandardMaterial({ metalness: 0 });
const mainLight = new PointLight(0xffffff, 400.0, 28, 2);
mainLight.position.set(0.5, 14.0, 0.5);
this.add(mainLight);
const room = new Mesh(geometry, roomMaterial);
room.position.set(0.0, 13.2, 0.0);
room.scale.set(31.5, 28.5, 31.5);
this.add(room);
const box1 = new Mesh(geometry, boxMaterial);
box1.position.set(-10.906, -1.0, 1.846);
box1.rotation.set(0, -0.195, 0);
box1.scale.set(2.328, 7.905, 4.651);
this.add(box1);
const box2 = new Mesh(geometry, boxMaterial);
box2.position.set(-5.607, -0.754, -0.758);
box2.rotation.set(0, 0.994, 0);
box2.scale.set(1.970, 1.534, 3.955);
this.add(box2);
const box3 = new Mesh(geometry, boxMaterial);
box3.position.set(6.167, -0.16, 7.803);
box3.rotation.set(0, 0.561, 0);
box3.scale.set(3.927, 6.285, 3.687);
this.add(box3);
const box4 = new Mesh(geometry, boxMaterial);
box4.position.set(-2.017, 0.018, 6.124);
box4.rotation.set(0, 0.333, 0);
box4.scale.set(2.002, 4.566, 2.064);
this.add(box4);
const box5 = new Mesh(geometry, boxMaterial);
box5.position.set(2.291, -0.756, -2.621);
box5.rotation.set(0, -0.286, 0);
box5.scale.set(1.546, 1.552, 1.496);
this.add(box5);
const box6 = new Mesh(geometry, boxMaterial);
box6.position.set(-2.193, -0.369, -5.547);
box6.rotation.set(0, 0.516, 0);
box6.scale.set(3.875, 3.487, 2.986);
this.add(box6);
// -x_left
const light1 = new Mesh(geometry, this.createAreaLightMaterial(80));
light1.position.set(-14.0, 10.0, 8.0);
light1.scale.set(0.1, 2.5, 2.5);
this.add(light1);
// -x_right
const light2 = new Mesh(geometry, this.createAreaLightMaterial(80));
light2.position.set(-14.0, 14.0, -4.0);
light2.scale.set(0.1, 2.5, 2.5);
this.add(light2);
// +x only on light
const light3 = new Mesh(geometry, this.createAreaLightMaterial(23));
light3.position.set(14.0, 12.0, 0.0);
light3.scale.set(0.1, 5.0, 5.0);
this.add(light3);
// +z
const light4 = new Mesh(geometry, this.createAreaLightMaterial(16));
light4.position.set(0.0, 9.0, 14.0);
light4.scale.set(5.0, 5.0, 0.1);
this.add(light4);
// -z right
const light5 = new Mesh(geometry, this.createAreaLightMaterial(80));
light5.position.set(7.0, 8.0, -14.0);
light5.scale.set(2.5, 2.5, 0.1);
this.add(light5);
// -z left
const light6 = new Mesh(geometry, this.createAreaLightMaterial(80));
light6.position.set(-7.0, 16.0, -14.0);
light6.scale.set(2.5, 2.5, 0.1);
this.add(light6);
// +y
const light7 = new Mesh(geometry, this.createAreaLightMaterial(1));
light7.position.set(0.0, 20.0, 0.0);
light7.scale.set(0.1, 0.1, 0.1);
this.add(light7);
}
createAreaLightMaterial(intensity) {
const material = new MeshBasicMaterial();
material.color.setScalar(intensity);
return material;
}
}
/* @license
* Copyright 2019 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the 'License');
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an 'AS IS' BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
const GENERATED_SIGMA = 0.04;
const HDR_FILE_RE = /\.hdr(\.js)?$/;
const ldrLoader = new TextureLoader();
const hdrLoader = new RGBELoader();
// Attach a `userData` object for arbitrary data on textures that
// originate from TextureUtils, similar to Object3D's userData,
// for help debugging, providing metadata for tests, and semantically
// describe the type of texture within the context of this application.
const userData = {
url: null,
};
class TextureUtils extends EventDispatcher {
constructor(threeRenderer) {
super();
this.generatedEnvironmentMap = null;
this.generatedEnvironmentMapAlt = null;
this.skyboxCache = new Map();
this.environmentMapCache = new Map();
this.PMREMGenerator = new PMREMGenerator(threeRenderer);
}
async load(url, progressCallback = () => { }) {
try {
const isHDR = HDR_FILE_RE.test(url);
const loader = isHDR ? hdrLoader : ldrLoader;
const texture = await new Promise((resolve, reject) => loader.load(url, resolve, (event) => {
progressCallback(event.loaded / event.total * 0.9);
}, reject));
progressCallback(1.0);
this.addMetadata(texture, url);
texture.mapping = EquirectangularReflectionMapping;
if (isHDR) {
texture.encoding = RGBEEncoding;
texture.minFilter = NearestFilter;
texture.magFilter = NearestFilter;
texture.flipY = true;
}
else {
texture.encoding = GammaEncoding;
}
return texture;
}
finally {
if (progressCallback) {
progressCallback(1);
}
}
}
/**
* Returns a { skybox, environmentMap } object with the targets/textures
* accordingly. `skybox` is a WebGLRenderCubeTarget, and `environmentMap`
* is a Texture from a WebGLRenderCubeTarget.
*/
async generateEnvironmentMapAndSkybox(skyboxUrl = null, environmentMap = null, options = {}) {
const { progressTracker } = options;
const updateGenerationProgress = progressTracker != null ? progressTracker.beginActivity() : () => { };
const useAltEnvironment = environmentMap === 'neutral';
if (useAltEnvironment === true) {
environmentMap = null;
}
const environmentMapUrl = deserializeUrl(environmentMap);
try {
let skyboxLoads = Promise.resolve(null);
let environmentMapLoads;
// If we have a skybox URL, attempt to load it as a cubemap
if (!!skyboxUrl) {
skyboxLoads = this.loadSkyboxFromUrl(skyboxUrl, progressTracker);
}
if (!!environmentMapUrl) {
// We have an available environment map URL
environmentMapLoads =
this.loadEnvironmentMapFromUrl(environmentMapUrl, progressTracker);
}
else if (!!skyboxUrl) {
// Fallback to deriving the environment map from an available skybox
environmentMapLoads =
this.loadEnvironmentMapFromUrl(skyboxUrl, progressTracker);
}
else {
// Fallback to generating the environment map
environmentMapLoads = useAltEnvironment === true ?
this.loadGeneratedEnvironmentMapAlt() :
this.loadGeneratedEnvironmentMap();
}
let [environmentMap, skybox] = await Promise.all([environmentMapLoads, skyboxLoads]);
if (environmentMap == null) {
throw new Error('Failed to load environment map.');
}
return { environmentMap, skybox };
}
finally {
updateGenerationProgress(1.0);
}
}
addMetadata(texture, url) {
if (texture == null) {
return;
}
texture.userData = Object.assign(Object.assign({}, userData), ({
url: url,
}));
}
/**
* Loads an equirect Texture from a given URL, for use as a skybox.
*/
loadSkyboxFromUrl(url, progressTracker) {
if (!this.skyboxCache.has(url)) {
const progressCallback = progressTracker ? progressTracker.beginActivity() : () => { };
const skyboxMapLoads = this.load(url, progressCallback);
this.skyboxCache.set(url, skyboxMapLoads);
}
return this.skyboxCache.get(url);
}
/**
* Loads a WebGLRenderTarget from a given URL. The render target in this
* case will be assumed to be used as an environment map.
*/
loadEnvironmentMapFromUrl(url, progressTracker) {
if (!this.environmentMapCache.has(url)) {
const environmentMapLoads = this.loadSkyboxFromUrl(url, progressTracker).then((equirect) => {
const cubeUV = this.PMREMGenerator.fromEquirectangular(equirect);
this.addMetadata(cubeUV.texture, url);
return cubeUV;
});
this.PMREMGenerator.compileEquirectangularShader();
this.environmentMapCache.set(url, environmentMapLoads);
}
return this.environmentMapCache.get(url);
}
/**
* Loads a dynamically generated environment map.
*/
loadGeneratedEnvironmentMap() {
if (this.generatedEnvironmentMap == null) {
const defaultScene = new EnvironmentScene;
this.generatedEnvironmentMap =
this.PMREMGenerator.fromScene(defaultScene, GENERATED_SIGMA);
this.addMetadata(this.generatedEnvironmentMap.texture, null);
}
return Promise.resolve(this.generatedEnvironmentMap);
}
/**
* Loads a dynamically generated environment map, designed to be neutral and
* color-preserving. Shows less contrast around the different sides of the
* object.
*/
loadGeneratedEnvironmentMapAlt() {
if (this.generatedEnvironmentMapAlt == null) {
const defaultScene = new EnvironmentSceneAlt;
this.generatedEnvironmentMapAlt =
this.PMREMGenerator.fromScene(defaultScene, GENERATED_SIGMA);
this.addMetadata(this.generatedEnvironmentMapAlt.texture, null);
}
return Promise.resolve(this.generatedEnvironmentMapAlt);
}
async dispose() {
const allTargetsLoad = [];
// NOTE(cdata): We would use for-of iteration on the maps here, but
// IE11 doesn't have the necessary iterator-returning methods. So,
// disposal of these render targets is kind of convoluted as a result.
this.environmentMapCache.forEach((targetLoads) => {
allTargetsLoad.push(targetLoads);
});
this.environmentMapCache.clear();
for (const targetLoads of allTargetsLoad) {
try {
const target = await targetLoads;
target.dispose();
}
catch (e) {
// Suppress errors, so that all render targets will be disposed
}
}
if (this.generatedEnvironmentMap != null) {
this.generatedEnvironmentMap.dispose();
this.generatedEnvironmentMap = null;
}
if (this.generatedEnvironmentMapAlt != null) {
this.generatedEnvironmentMapAlt.dispose();
this.generatedEnvironmentMapAlt = null;
}
}
}
/* @license
* Copyright 2019 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the 'License');
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an 'AS IS' BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Between 0 and 1: larger means the average responds faster and is less smooth.
const DURATION_DECAY = 0.2;
const LOW_FRAME_DURATION_MS = 18;
const HIGH_FRAME_DURATION_MS = 26;
const MAX_AVG_CHANGE_MS = 2;
const SCALE_STEPS = [1, 0.79, 0.62, 0.5, 0.4, 0.31, 0.25];
const DEFAULT_LAST_STEP = 3;
/**
* Registers canvases with Canvas2DRenderingContexts and renders them
* all in the same WebGLRenderingContext, spitting out textures to apply
* to the canvases. Creates a fullscreen WebGL canvas that is not added
* to the DOM, and on each frame, renders each registered canvas on a portion
* of the WebGL canvas, and applies the texture on the registered canvas.
*
* In the future, can use ImageBitmapRenderingContext instead of
* Canvas2DRenderingContext if supported for cheaper transfering of
* the texture.
*/
class Renderer extends EventDispatcher {
constructor(options) {
super();
this.loader = new CachingGLTFLoader(ModelViewerGLTFInstance);
this.width = 0;
this.height = 0;
this.dpr = 1;
this.debugger = null;
this.scenes = new Set();
this.multipleScenesVisible = false;
this.scaleStep = 0;
this.lastStep = DEFAULT_LAST_STEP;
this.avgFrameDuration = (HIGH_FRAME_DURATION_MS + LOW_FRAME_DURATION_MS) / 2;
this.onWebGLContextLost = (event) => {
this.dispatchEvent({ type: 'contextlost', sourceEvent: event });
};
this.dpr = resolveDpr();
this.canvasElement = document.createElement('canvas');
this.canvasElement.id = 'webgl-canvas';
this.canvas3D = this.canvasElement;
this.canvas3D.addEventListener('webglcontextlost', this.onWebGLContextLost);
try {
this.threeRenderer = new WebGL1Renderer({
canvas: this.canvas3D,
alpha: true,
antialias: true,
powerPreference: 'high-performance',
preserveDrawingBuffer: true
});
this.threeRenderer.autoClear = true;
this.threeRenderer.outputEncoding = GammaEncoding;
this.threeRenderer.physicallyCorrectLights = true;
this.threeRenderer.setPixelRatio(1); // handle pixel ratio externally
this.threeRenderer.shadowMap.enabled = true;
this.threeRenderer.shadowMap.type = PCFSoftShadowMap;
this.threeRenderer.shadowMap.autoUpdate = false;
this.debugger =
options != null && !!options.debug ? new Debugger(this) : null;
this.threeRenderer.debug = { checkShaderErrors: !!this.debugger };
// ACESFilmicToneMapping appears to be the most "saturated",
// and similar to Filament's gltf-viewer.
this.threeRenderer.toneMapping = ACESFilmicToneMapping;
}
catch (error) {
console.warn(error);
}
this.arRenderer = new ARRenderer(this);
this.textureUtils =
this.canRender ? new TextureUtils(this.threeRenderer) : null;
this.roughnessMipmapper = new RoughnessMipmapper(this.threeRenderer);
CachingGLTFLoader.initializeKTX2Loader(this.threeRenderer);
this.updateRendererSize();
this.lastTick = performance.now();
this.avgFrameDuration = 0;
}
static get singleton() {
return this._singleton;
}
static resetSingleton() {
this._singleton.dispose();
this._singleton = new Renderer({ debug: isDebugMode() });
}
get canRender() {
return this.threeRenderer != null;
}
get scaleFactor() {
return SCALE_STEPS[this.scaleStep];
}
set minScale(scale) {
let i = 1;
while (i < SCALE_STEPS.length) {
if (SCALE_STEPS[i] < scale) {
break;
}
++i;
}
this.lastStep = i - 1;
}
/**
* Updates the renderer's size based on the largest scene and any changes to
* device pixel ratio.
*/
updateRendererSize() {
const dpr = resolveDpr();
if (dpr !== this.dpr) {
// If the device pixel ratio has changed due to page zoom, elements
// specified by % width do not fire a resize event even though their CSS
// pixel dimensions change, so we force them to update their size here.
for (const scene of this.scenes) {
const { element } = scene;
element[$updateSize](element.getBoundingClientRect());
}
}
// Make the renderer the size of the largest scene
let width = 0;
let height = 0;
for (const scene of this.scenes) {
width = Math.max(width, scene.width);
height = Math.max(height, scene.height);
}
if (width === this.width && height === this.height && dpr === this.dpr) {
return;
}
this.width = width;
this.height = height;
this.dpr = dpr;
if (this.canRender) {
this.threeRenderer.setSize(width * dpr, height * dpr, false);
}
// Expand the canvas size to make up for shrinking the viewport.
const scale = this.scaleFactor;
const widthCSS = width / scale;
const heightCSS = height / scale;
// The canvas element must by styled outside of three due to the offscreen
// canvas not being directly stylable.
this.canvasElement.style.width = `${widthCSS}px`;
this.canvasElement.style.height = `${heightCSS}px`;
// Each scene's canvas must match the renderer size. In general they can be
// larger than the element that contains them, but the overflow is hidden
// and only the portion that is shown is copied over.
for (const scene of this.scenes) {
const { canvas } = scene;
canvas.width = Math.round(width * dpr);
canvas.height = Math.round(height * dpr);
canvas.style.width = `${widthCSS}px`;
canvas.style.height = `${heightCSS}px`;
scene.isDirty = true;
}
}
updateRendererScale() {
const scaleStep = this.scaleStep;
if (this.avgFrameDuration > HIGH_FRAME_DURATION_MS &&
this.scaleStep < this.lastStep) {
++this.scaleStep;
}
else if (this.avgFrameDuration < LOW_FRAME_DURATION_MS && this.scaleStep > 0) {
--this.scaleStep;
}
if (scaleStep == this.scaleStep) {
return;
}
const scale = this.scaleFactor;
this.avgFrameDuration =
(HIGH_FRAME_DURATION_MS + LOW_FRAME_DURATION_MS) / 2;
const width = this.width / scale;
const height = this.height / scale;
this.canvasElement.style.width = `${width}px`;
this.canvasElement.style.height = `${height}px`;
for (const scene of this.scenes) {
const { style } = scene.canvas;
style.width = `${width}px`;
style.height = `${height}px`;
scene.isDirty = true;
}
}
registerScene(scene) {
this.scenes.add(scene);
const { canvas } = scene;
const scale = this.scaleFactor;
canvas.width = Math.round(this.width * this.dpr);
canvas.height = Math.round(this.height * this.dpr);
canvas.style.width = `${this.width / scale}px`;
canvas.style.height = `${this.height / scale}px`;
if (this.multipleScenesVisible) {
canvas.classList.add('show');
}
scene.isDirty = true;
if (this.canRender && this.scenes.size > 0) {
this.threeRenderer.setAnimationLoop((time, frame) => this.render(time, frame));
}
if (this.debugger != null) {
this.debugger.addScene(scene);
}
}
unregisterScene(scene) {
this.scenes.delete(scene);
if (this.canRender && this.scenes.size === 0) {
this.threeRenderer.setAnimationLoop(null);
}
if (this.debugger != null) {
this.debugger.removeScene(scene);
}
}
displayCanvas(scene) {
return this.multipleScenesVisible ? scene.element[$canvas] :
this.canvasElement;
}
/**
* The function enables an optimization, where when there is only a single
* <model-viewer> element, we can use the renderer's 3D canvas directly for
* display. Otherwise we need to use the element's 2D canvas and copy the
* renderer's result into it.
*/
selectCanvas() {
let visibleScenes = 0;
let visibleCanvas = null;
for (const scene of this.scenes) {
const { element } = scene;
if (element.modelIsVisible && scene.externalRenderer == null) {
++visibleScenes;
visibleCanvas = scene.canvas;
}
}
if (visibleCanvas == null) {
return;
}
const multipleScenesVisible = visibleScenes > 1 || USE_OFFSCREEN_CANVAS;
const { canvasElement } = this;
if (multipleScenesVisible === this.multipleScenesVisible &&
(multipleScenesVisible ||
canvasElement.parentElement === visibleCanvas.parentElement)) {
return;
}
this.multipleScenesVisible = multipleScenesVisible;
if (multipleScenesVisible) {
canvasElement.classList.remove('show');
}
for (const scene of this.scenes) {
if (scene.externalRenderer != null) {
continue;
}
const canvas = scene.element[$canvas];
if (multipleScenesVisible) {
canvas.classList.add('show');
scene.isDirty = true;
}
else if (scene.canvas === visibleCanvas) {
scene.canvas.parentElement.appendChild(canvasElement);
canvasElement.classList.add('show');
canvas.classList.remove('show');
scene.isDirty = true;
}
}
}
/**
* Returns an array version of this.scenes where the non-visible ones are
* first. This allows eager scenes to be rendered before they are visible,
* without needing the multi-canvas render path.
*/
orderedScenes() {
const scenes = [];
for (const visible of [false, true]) {
for (const scene of this.scenes) {
if (scene.element.modelIsVisible === visible) {
scenes.push(scene);
}
}
}
return scenes;
}
get isPresenting() {
return this.arRenderer.isPresenting;
}
/**
* This method takes care of updating the element and renderer state based on
* the time that has passed since the last rendered frame.
*/
preRender(scene, t, delta) {
const { element, exposure } = scene;
element[$tick](t, delta);
const exposureIsNumber = typeof exposure === 'number' && !self.isNaN(exposure);
this.threeRenderer.toneMappingExposure = exposureIsNumber ? exposure : 1.0;
if (scene.isShadowDirty()) {
this.threeRenderer.shadowMap.needsUpdate = true;
}
}
render(t, frame) {
if (frame != null) {
this.arRenderer.onWebXRFrame(t, frame);
this.arRenderer.presentedScene.postRender();
return;
}
const delta = t - this.lastTick;
this.lastTick = t;
if (!this.canRender || this.isPresenting) {
return;
}
this.avgFrameDuration += clamp(DURATION_DECAY * (delta - this.avgFrameDuration), -MAX_AVG_CHANGE_MS, MAX_AVG_CHANGE_MS);
this.selectCanvas();
this.updateRendererSize();
this.updateRendererScale();
const { dpr, scaleFactor } = this;
for (const scene of this.orderedScenes()) {
const { element } = scene;
if (!element.modelIsVisible && scene.renderCount > 0) {
continue;
}
this.preRender(scene, t, delta);
if (!scene.isDirty) {
continue;
}
if (scene.externalRenderer != null) {
scene.camera.updateMatrix();
const { matrix, projectionMatrix } = scene.camera;
const viewMatrix = matrix.elements.slice();
const target = scene.getTarget();
viewMatrix[12] += target.x;
viewMatrix[13] += target.y;
viewMatrix[14] += target.z;
scene.externalRenderer.render({
viewMatrix: viewMatrix,
projectionMatrix: projectionMatrix.elements
});
continue;
}
if (!element.modelIsVisible && !this.multipleScenesVisible) {
// Here we are pre-rendering on the visible canvas, so we must mark the
// visible scene dirty to ensure it overwrites us.
for (const visibleScene of this.scenes) {
if (visibleScene.element.modelIsVisible) {
visibleScene.isDirty = true;
}
}
}
// We avoid using the Three.js PixelRatio and handle it ourselves here so
// that we can do proper rounding and avoid white boundary pixels.
const width = Math.min(Math.ceil(scene.width * scaleFactor * dpr), this.canvas3D.width);
const height = Math.min(Math.ceil(scene.height * scaleFactor * dpr), this.canvas3D.height);
// Need to set the render target in order to prevent
// clearing the depth from a different buffer
this.threeRenderer.setRenderTarget(null);
this.threeRenderer.setViewport(0, Math.floor(this.height * dpr) - height, width, height);
this.threeRenderer.render(scene, scene.camera);
scene.postRender();
if (this.multipleScenesVisible) {
if (scene.context == null) {
scene.createContext();
}
{
const context2D = scene.context;
context2D.clearRect(0, 0, width, height);
context2D.drawImage(this.canvas3D, 0, 0, width, height, 0, 0, width, height);
}
}
scene.isDirty = false;
if (element.loaded) {
++scene.renderCount;
}
}
}
dispose() {
if (this.textureUtils != null) {
this.textureUtils.dispose();
}
if (this.threeRenderer != null) {
this.threeRenderer.dispose();
}
this.textureUtils = null;
this.threeRenderer = null;
this.scenes.clear();
this.canvas3D.removeEventListener('webglcontextlost', this.onWebGLContextLost);
}
}
Renderer._singleton = new Renderer({ debug: isDebugMode() });
/* @license
* Copyright 2019 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the 'License');
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an 'AS IS' BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Converts a base64 string which represents a data url
* into a Blob of the same contents.
*/
const dataUrlToBlob = async (base64DataUrl) => {
return new Promise((resolve, reject) => {
const sliceSize = 512;
const typeMatch = base64DataUrl.match(/data:(.*);/);
if (!typeMatch) {
return reject(new Error(`${base64DataUrl} is not a valid data Url`));
}
const type = typeMatch[1];
const base64 = base64DataUrl.replace(/data:image\/\w+;base64,/, '');
const byteCharacters = atob(base64);
const byteArrays = [];
for (let offset = 0; offset < byteCharacters.length; offset += sliceSize) {
const slice = byteCharacters.slice(offset, offset + sliceSize);
const byteNumbers = new Array(slice.length);
for (let i = 0; i < slice.length; i++) {
byteNumbers[i] = slice.charCodeAt(i);
}
const byteArray = new Uint8Array(byteNumbers);
byteArrays.push(byteArray);
}
resolve(new Blob(byteArrays, { type }));
});
};
/* @license
* Copyright 2019 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the 'License');
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an 'AS IS' BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var _a$9, _b$8;
const $ongoingActivities = Symbol('ongoingActivities');
const $announceTotalProgress = Symbol('announceTotalProgress');
const $eventDelegate = Symbol('eventDelegate');
const ACTIVITY_PROGRESS_WEIGHT = 0.5;
/**
* ProgressTracker is an event emitter that helps to track the ongoing progress
* of many simultaneous actions.
*
* ProgressTracker reports progress activity in the form of a progress event.
* The event.detail.totalProgress value indicates the elapsed progress of all
* activities being tracked by the ProgressTracker.
*
* The value of totalProgress is a number that progresses from 0 to 1. The
* ProgressTracker allows for the lazy accumulation of tracked actions, so the
* total progress represents a abstract, non-absolute progress towards the
* completion of all currently tracked events.
*
* When all currently tracked activities are finished, the ProgressTracker
* emits one final progress event and then resets the list of its currently
* tracked activities. This means that from an observer's perspective,
* ongoing activities will accumulate and collectively contribute to the notion
* of total progress until all currently tracked ongoing activities have
* completed.
*/
class ProgressTracker {
constructor() {
// NOTE(cdata): This eventDelegate hack is a quick trick to let us get the
// EventTarget interface without implementing or requiring a full polyfill. We
// should remove this once EventTarget is inheritable everywhere.
this[_a$9] = document.createDocumentFragment();
// NOTE(cdata): We declare each of these methods independently here so that we
// can inherit the correct types from EventTarget's interface. Maybe there is
// a better way to do this dynamically so that we don't repeat ourselves?
this.addEventListener = (...args) => this[$eventDelegate].addEventListener(...args);
this.removeEventListener = (...args) => this[$eventDelegate].removeEventListener(...args);
this.dispatchEvent = (...args) => this[$eventDelegate].dispatchEvent(...args);
this[_b$8] = new Set();
}
/**
* The total number of activities currently being tracked.
*/
get ongoingActivityCount() {
return this[$ongoingActivities].size;
}
/**
* Registers a new activity to be tracked by the progress tracker. The method
* returns a special callback that should be invoked whenever new progress is
* ready to be reported. The progress should be reported as a value between 0
* and 1, where 0 would represent the beginning of the action and 1 would
* represent its completion.
*
* There is no built-in notion of a time-out for ongoing activities, so once
* an ongoing activity is begun, it is up to the consumer of this API to
* update the progress until that activity is no longer ongoing.
*
* Progress is only allowed to move forward for any given activity. If a lower
* progress is reported than the previously reported progress, it will be
* ignored.
*/
beginActivity() {
const activity = { progress: 0 };
this[$ongoingActivities].add(activity);
if (this.ongoingActivityCount === 1) {
// Announce the first progress event (which should always be 0 / 1
// total progress):
this[$announceTotalProgress]();
}
return (progress) => {
let nextProgress;
nextProgress = Math.max(clamp(progress, 0, 1), activity.progress);
if (nextProgress !== activity.progress) {
activity.progress = nextProgress;
this[$announceTotalProgress]();
}
return activity.progress;
};
}
[(_a$9 = $eventDelegate, _b$8 = $ongoingActivities, $announceTotalProgress)]() {
let totalProgress = 0;
let statusCount = 0;
let completedActivities = 0;
for (const activity of this[$ongoingActivities]) {
const { progress } = activity;
const compoundWeight = ACTIVITY_PROGRESS_WEIGHT / Math.pow(2, statusCount++);
totalProgress += progress * compoundWeight;
if (progress === 1.0) {
completedActivities++;
}
}
if (completedActivities === this.ongoingActivityCount) {
totalProgress = 1.0;
this[$ongoingActivities].clear();
}
this.dispatchEvent(new CustomEvent('progress', { detail: { totalProgress } }));
}
}
/* @license
* Copyright 2019 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the 'License');
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an 'AS IS' BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var __decorate$7 = (undefined && undefined.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof undefined === "function") r = undefined(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
var _a$8, _b$7, _c$3, _d$1, _e, _f, _g, _h, _j, _k;
const CLEAR_MODEL_TIMEOUT_MS = 1000;
const FALLBACK_SIZE_UPDATE_THRESHOLD_MS = 50;
const ANNOUNCE_MODEL_VISIBILITY_DEBOUNCE_THRESHOLD = 0;
const UNSIZED_MEDIA_WIDTH = 300;
const UNSIZED_MEDIA_HEIGHT = 150;
const blobCanvas = document.createElement('canvas');
let blobContext = null;
const $template = Symbol('template');
const $fallbackResizeHandler = Symbol('fallbackResizeHandler');
const $defaultAriaLabel = Symbol('defaultAriaLabel');
const $resizeObserver = Symbol('resizeObserver');
const $clearModelTimeout = Symbol('clearModelTimeout');
const $onContextLost = Symbol('onContextLost');
const $loaded = Symbol('loaded');
const $updateSize = Symbol('updateSize');
const $intersectionObserver = Symbol('intersectionObserver');
const $isElementInViewport = Symbol('isElementInViewport');
const $announceModelVisibility = Symbol('announceModelVisibility');
const $ariaLabel = Symbol('ariaLabel');
const $loadedTime = Symbol('loadedTime');
const $updateSource = Symbol('updateSource');
const $markLoaded = Symbol('markLoaded');
const $container = Symbol('container');
const $userInputElement = Symbol('input');
const $canvas = Symbol('canvas');
const $scene = Symbol('scene');
const $needsRender = Symbol('needsRender');
const $tick = Symbol('tick');
const $onModelLoad = Symbol('onModelLoad');
const $onResize = Symbol('onResize');
const $renderer = Symbol('renderer');
const $progressTracker = Symbol('progressTracker');
const $getLoaded = Symbol('getLoaded');
const $getModelIsVisible = Symbol('getModelIsVisible');
const $shouldAttemptPreload = Symbol('shouldAttemptPreload');
const $sceneIsReady = Symbol('sceneIsReady');
const $hasTransitioned = Symbol('hasTransitioned');
const toVector3D = (v) => {
return {
x: v.x,
y: v.y,
z: v.z,
toString() {
return `${this.x}m ${this.y}m ${this.z}m`;
}
};
};
/**
* Definition for a basic <model-viewer> element.
*/
class ModelViewerElementBase extends UpdatingElement {
/**
* Creates a new ModelViewerElement.
*/
constructor() {
super();
this.alt = null;
this.src = null;
this[_a$8] = false;
this[_b$7] = false;
this[_c$3] = 0;
this[_d$1] = null;
this[_e] = debounce(() => {
const boundingRect = this.getBoundingClientRect();
this[$updateSize](boundingRect);
}, FALLBACK_SIZE_UPDATE_THRESHOLD_MS);
this[_f] = debounce((oldVisibility) => {
const newVisibility = this.modelIsVisible;
if (newVisibility !== oldVisibility) {
this.dispatchEvent(new CustomEvent('model-visibility', { detail: { visible: newVisibility } }));
}
}, ANNOUNCE_MODEL_VISIBILITY_DEBOUNCE_THRESHOLD);
this[_g] = null;
this[_h] = null;
this[_j] = new ProgressTracker();
this[_k] = (event) => {
this.dispatchEvent(new CustomEvent('error', { detail: { type: 'webglcontextlost', sourceError: event.sourceEvent } }));
};
// NOTE(cdata): It is *very important* to access this template first so that
// the ShadyCSS template preparation steps happen before element styling in
// IE11:
const template = this.constructor.template;
if (window.ShadyCSS) {
window.ShadyCSS.styleElement(this, {});
}
// NOTE(cdata): The canonical ShadyCSS examples suggest that the Shadow Root
// should be created after the invocation of ShadyCSS.styleElement
this.attachShadow({ mode: 'open' });
const shadowRoot = this.shadowRoot;
shadowRoot.appendChild(template.content.cloneNode(true));
this[$container] = shadowRoot.querySelector('.container');
this[$userInputElement] =
shadowRoot.querySelector('.userInput');
this[$canvas] = shadowRoot.querySelector('canvas');
this[$defaultAriaLabel] =
this[$userInputElement].getAttribute('aria-label');
// Because of potential race conditions related to invoking the constructor
// we only use the bounding rect to set the initial size if the element is
// already connected to the document:
let width, height;
if (this.isConnected) {
const rect = this.getBoundingClientRect();
width = rect.width;
height = rect.height;
}
else {
width = UNSIZED_MEDIA_WIDTH;
height = UNSIZED_MEDIA_HEIGHT;
}
// Create the underlying ModelScene.
this[$scene] =
new ModelScene({ canvas: this[$canvas], element: this, width, height });
this[$scene].addEventListener('model-load', async (event) => {
this[$markLoaded]();
this[$onModelLoad]();
// Give loading async tasks a chance to complete.
await timePasses();
this.dispatchEvent(new CustomEvent('load', { detail: { url: event.url } }));
});
// Update initial size on microtask timing so that subclasses have a
// chance to initialize
Promise.resolve().then(() => {
this[$updateSize](this.getBoundingClientRect());
});
if (HAS_RESIZE_OBSERVER) {
// Set up a resize observer so we can scale our canvas
// if our <model-viewer> changes
this[$resizeObserver] =
new ResizeObserver((entries) => {
// Don't resize anything if in AR mode; otherwise the canvas
// scaling to fullscreen on entering AR will clobber the flat/2d
// dimensions of the element.
if (this[$renderer].isPresenting) {
return;
}
for (let entry of entries) {
if (entry.target === this) {
this[$updateSize](entry.contentRect);
}
}
});
}
if (HAS_INTERSECTION_OBSERVER) {
this[$intersectionObserver] = new IntersectionObserver(entries => {
for (let entry of entries) {
if (entry.target === this) {
const oldVisibility = this.modelIsVisible;
this[$isElementInViewport] = entry.isIntersecting;
this[$announceModelVisibility](oldVisibility);
if (this[$isElementInViewport] && !this[$sceneIsReady]()) {
this[$updateSource]();
}
}
}
}, {
root: null,
// We used to have margin here, but it was causing animated models below
// the fold to steal the frame budget. Weirder still, it would also
// cause input events to be swallowed, sometimes for seconds on the
// model above the fold, but only when the animated model was completely
// below. Setting this margin to zero fixed it.
rootMargin: '0px',
threshold: 0,
});
}
else {
// If there is no intersection obsever, then all models should be visible
// at all times:
this[$isElementInViewport] = true;
}
}
static get is() {
return 'model-viewer';
}
/** @nocollapse */
static get template() {
if (!this.hasOwnProperty($template)) {
this[$template] = makeTemplate(this.is);
}
return this[$template];
}
/** @export */
static set modelCacheSize(value) {
CachingGLTFLoader[$evictionPolicy].evictionThreshold = value;
}
/** @export */
static get modelCacheSize() {
return CachingGLTFLoader[$evictionPolicy].evictionThreshold;
}
/** @export */
static set minimumRenderScale(value) {
if (value > 1) {
console.warn('<model-viewer> minimumRenderScale has been clamped to a maximum value of 1.');
}
if (value <= 0) {
console.warn('<model-viewer> minimumRenderScale has been clamped to a minimum value of 0.25.');
}
Renderer.singleton.minScale = value;
}
/** @export */
static get minimumRenderScale() {
return Renderer.singleton.minScale;
}
/** @export */
get loaded() {
return this[$getLoaded]();
}
get [(_a$8 = $isElementInViewport, _b$7 = $loaded, _c$3 = $loadedTime, _d$1 = $clearModelTimeout, _e = $fallbackResizeHandler, _f = $announceModelVisibility, _g = $resizeObserver, _h = $intersectionObserver, _j = $progressTracker, $renderer)]() {
return Renderer.singleton;
}
/** @export */
get modelIsVisible() {
return this[$getModelIsVisible]();
}
connectedCallback() {
super.connectedCallback && super.connectedCallback();
if (HAS_RESIZE_OBSERVER) {
this[$resizeObserver].observe(this);
}
else {
self.addEventListener('resize', this[$fallbackResizeHandler]);
}
if (HAS_INTERSECTION_OBSERVER) {
this[$intersectionObserver].observe(this);
}
const renderer = this[$renderer];
renderer.addEventListener('contextlost', this[$onContextLost]);
renderer.registerScene(this[$scene]);
if (this[$clearModelTimeout] != null) {
self.clearTimeout(this[$clearModelTimeout]);
this[$clearModelTimeout] = null;
// Force an update in case the model has been evicted from our GLTF cache
// @see https://lit-element.polymer-project.org/guide/lifecycle#requestupdate
this.requestUpdate('src', null);
}
}
disconnectedCallback() {
super.disconnectedCallback && super.disconnectedCallback();
if (HAS_RESIZE_OBSERVER) {
this[$resizeObserver].unobserve(this);
}
else {
self.removeEventListener('resize', this[$fallbackResizeHandler]);
}
if (HAS_INTERSECTION_OBSERVER) {
this[$intersectionObserver].unobserve(this);
}
const renderer = this[$renderer];
renderer.removeEventListener('contextlost', this[$onContextLost]);
renderer.unregisterScene(this[$scene]);
this[$clearModelTimeout] = self.setTimeout(() => {
this[$scene].reset();
}, CLEAR_MODEL_TIMEOUT_MS);
}
updated(changedProperties) {
super.updated(changedProperties);
// NOTE(cdata): If a property changes from values A -> B -> A in the space
// of a microtask, LitElement/UpdatingElement will notify of a change even
// though the value has effectively not changed, so we need to check to make
// sure that the value has actually changed before changing the loaded flag.
if (changedProperties.has('src')) {
if (this.src == null) {
this[$loaded] = false;
this[$loadedTime] = 0;
this[$scene].reset();
}
else if (this.src !== this[$scene].url) {
this[$loaded] = false;
this[$loadedTime] = 0;
this[$updateSource]();
}
}
if (changedProperties.has('alt')) {
const ariaLabel = this.alt == null ? this[$defaultAriaLabel] : this.alt;
this[$userInputElement].setAttribute('aria-label', ariaLabel);
}
}
/** @export */
toDataURL(type, encoderOptions) {
return this[$renderer]
.displayCanvas(this[$scene])
.toDataURL(type, encoderOptions);
}
/** @export */
async toBlob(options) {
const mimeType = options ? options.mimeType : undefined;
const qualityArgument = options ? options.qualityArgument : undefined;
const idealAspect = options ? options.idealAspect : undefined;
const { width, height, fieldOfViewAspect, aspect } = this[$scene];
const { dpr, scaleFactor } = this[$renderer];
let outputWidth = width * scaleFactor * dpr;
let outputHeight = height * scaleFactor * dpr;
let offsetX = 0;
let offsetY = 0;
if (idealAspect === true) {
if (fieldOfViewAspect > aspect) {
const oldHeight = outputHeight;
outputHeight = Math.round(outputWidth / fieldOfViewAspect);
offsetY = (oldHeight - outputHeight) / 2;
}
else {
const oldWidth = outputWidth;
outputWidth = Math.round(outputHeight * fieldOfViewAspect);
offsetX = (oldWidth - outputWidth) / 2;
}
}
blobCanvas.width = outputWidth;
blobCanvas.height = outputHeight;
try {
return new Promise(async (resolve, reject) => {
if (blobContext == null) {
blobContext = blobCanvas.getContext('2d');
}
blobContext.drawImage(this[$renderer].displayCanvas(this[$scene]), offsetX, offsetY, outputWidth, outputHeight, 0, 0, outputWidth, outputHeight);
if (blobCanvas.msToBlob) {
// NOTE: msToBlob only returns image/png
// so ensure mimeType is not specified (defaults to image/png)
// or is image/png, otherwise fallback to using toDataURL on IE.
if (!mimeType || mimeType === 'image/png') {
return resolve(blobCanvas.msToBlob());
}
}
if (!blobCanvas.toBlob) {
return resolve(await dataUrlToBlob(blobCanvas.toDataURL(mimeType, qualityArgument)));
}
blobCanvas.toBlob((blob) => {
if (!blob) {
return reject(new Error('Unable to retrieve canvas blob'));
}
resolve(blob);
}, mimeType, qualityArgument);
});
}
finally {
this[$updateSize]({ width, height });
}
}
registerRenderer(renderer) {
this[$scene].externalRenderer = renderer;
}
unregisterRenderer() {
this[$scene].externalRenderer = null;
}
get [$ariaLabel]() {
return (this.alt == null || this.alt === 'null') ? this[$defaultAriaLabel] :
this.alt;
}
// NOTE(cdata): Although this may seem extremely redundant, it is required in
// order to support overloading when TypeScript is compiled to ES5
// @see https://github.com/Polymer/lit-element/pull/745
// @see https://github.com/microsoft/TypeScript/issues/338
[$getLoaded]() {
return this[$loaded];
}
// @see [$getLoaded]
[$getModelIsVisible]() {
return this.loaded && this[$isElementInViewport];
}
[$hasTransitioned]() {
return this.modelIsVisible;
}
[$shouldAttemptPreload]() {
return !!this.src && this[$isElementInViewport];
}
[$sceneIsReady]() {
return this[$loaded];
}
/**
* Called on initialization and when the resize observer fires.
*/
[$updateSize]({ width, height }) {
this[$container].style.width = `${width}px`;
this[$container].style.height = `${height}px`;
this[$onResize]({ width: parseFloat(width), height: parseFloat(height) });
}
[$tick](_time, _delta) {
}
[$markLoaded]() {
if (this[$loaded]) {
return;
}
this[$loaded] = true;
this[$loadedTime] = performance.now();
}
[$needsRender]() {
this[$scene].isDirty = true;
}
[$onModelLoad]() {
}
[$onResize](e) {
this[$scene].setSize(e.width, e.height);
}
/**
* Parses the element for an appropriate source URL and
* sets the views to use the new model based off of the `preload`
* attribute.
*/
async [(_k = $onContextLost, $updateSource)]() {
if (this.loaded || !this[$shouldAttemptPreload]()) {
return;
}
const updateSourceProgress = this[$progressTracker].beginActivity();
const source = this.src;
try {
await this[$scene].setSource(source, (progress) => updateSourceProgress(progress * 0.8));
const detail = { url: source };
this.dispatchEvent(new CustomEvent('preload', { detail }));
}
catch (error) {
this.dispatchEvent(new CustomEvent('error', { detail: error }));
}
finally {
updateSourceProgress(0.9);
requestAnimationFrame(() => {
requestAnimationFrame(() => {
updateSourceProgress(1.0);
});
});
}
}
}
__decorate$7([
property({ type: String })
], ModelViewerElementBase.prototype, "alt", void 0);
__decorate$7([
property({ type: String })
], ModelViewerElementBase.prototype, "src", void 0);
/* @license
* Copyright 2019 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the 'License');
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an 'AS IS' BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var __decorate$6 = (undefined && undefined.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof undefined === "function") r = undefined(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
const MILLISECONDS_PER_SECOND = 1000.0;
const $changeAnimation = Symbol('changeAnimation');
const $paused = Symbol('paused');
const AnimationMixin = (ModelViewerElement) => {
var _a;
class AnimationModelViewerElement extends ModelViewerElement {
constructor() {
super(...arguments);
this.autoplay = false;
this.animationName = undefined;
this.animationCrossfadeDuration = 300;
this[_a] = true;
}
/**
* Returns an array
*/
get availableAnimations() {
if (this.loaded) {
return this[$scene].animationNames;
}
return [];
}
get duration() {
return this[$scene].duration;
}
get paused() {
return this[$paused];
}
get currentTime() {
return this[$scene].animationTime;
}
set currentTime(value) {
this[$scene].animationTime = value;
this[$renderer].threeRenderer.shadowMap.needsUpdate = true;
this[$needsRender]();
}
pause() {
if (this[$paused]) {
return;
}
this[$paused] = true;
this[$renderer].threeRenderer.shadowMap.autoUpdate = false;
this.dispatchEvent(new CustomEvent('pause'));
}
play() {
if (this[$paused] && this.availableAnimations.length > 0) {
this[$paused] = false;
this[$renderer].threeRenderer.shadowMap.autoUpdate = true;
if (!this[$scene].hasActiveAnimation) {
this[$changeAnimation]();
}
this.dispatchEvent(new CustomEvent('play'));
}
}
[(_a = $paused, $onModelLoad)]() {
super[$onModelLoad]();
this[$paused] = true;
if (this.autoplay) {
this[$changeAnimation]();
this.play();
}
}
[$tick](_time, delta) {
super[$tick](_time, delta);
if (this[$paused] ||
(!this[$hasTransitioned]() && !this[$renderer].isPresenting)) {
return;
}
this[$scene].updateAnimation(delta / MILLISECONDS_PER_SECOND);
this[$needsRender]();
}
updated(changedProperties) {
super.updated(changedProperties);
if (changedProperties.has('autoplay') && this.autoplay) {
this.play();
}
if (changedProperties.has('animationName')) {
this[$changeAnimation]();
}
}
async [$updateSource]() {
// If we are loading a new model, we need to stop the animation of
// the current one (if any is playing). Otherwise, we might lose
// the reference to the scene root and running actions start to
// throw exceptions and/or behave in unexpected ways:
this[$scene].stopAnimation();
return super[$updateSource]();
}
[$changeAnimation]() {
this[$scene].playAnimation(this.animationName, this.animationCrossfadeDuration / MILLISECONDS_PER_SECOND);
// If we are currently paused, we need to force a render so that
// the scene updates to the first frame of the new animation
if (this[$paused]) {
this[$scene].updateAnimation(0);
this[$needsRender]();
}
}
}
__decorate$6([
property({ type: Boolean })
], AnimationModelViewerElement.prototype, "autoplay", void 0);
__decorate$6([
property({ type: String, attribute: 'animation-name' })
], AnimationModelViewerElement.prototype, "animationName", void 0);
__decorate$6([
property({ type: Number, attribute: 'animation-crossfade-duration' })
], AnimationModelViewerElement.prototype, "animationCrossfadeDuration", void 0);
return AnimationModelViewerElement;
};
/* @license
* Copyright 2019 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the 'License');
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an 'AS IS' BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
const $hotspotMap = Symbol('hotspotMap');
const $mutationCallback = Symbol('mutationCallback');
const $observer = Symbol('observer');
const $addHotspot = Symbol('addHotspot');
const $removeHotspot = Symbol('removeHotspot');
// Used internally by positionAndNormalFromPoint()
const pixelPosition = new Vector2();
const worldToModel = new Matrix4();
const worldToModelNormal = new Matrix3();
/**
* AnnotationMixin implements a declarative API to add hotspots and annotations.
* Child elements of the <model-viewer> element that have a slot name that
* begins with "hotspot" and data-position and data-normal attributes in
* the format of the camera-target attribute will be added to the scene and
* track the specified model coordinates.
*/
const AnnotationMixin = (ModelViewerElement) => {
var _a, _b, _c;
class AnnotationModelViewerElement extends ModelViewerElement {
constructor() {
super(...arguments);
this[_a] = new Map();
this[_b] = (mutations) => {
mutations.forEach((mutation) => {
// NOTE: Be wary that in ShadyDOM cases, the MutationRecord
// only has addedNodes and removedNodes (and no other details).
if (!(mutation instanceof MutationRecord) ||
mutation.type === 'childList') {
mutation.addedNodes.forEach((node) => {
this[$addHotspot](node);
});
mutation.removedNodes.forEach((node) => {
this[$removeHotspot](node);
});
this[$needsRender]();
}
});
};
this[_c] = new MutationObserver(this[$mutationCallback]);
}
connectedCallback() {
super.connectedCallback();
for (let i = 0; i < this.children.length; ++i) {
this[$addHotspot](this.children[i]);
}
const { ShadyDOM } = self;
if (ShadyDOM == null) {
this[$observer].observe(this, { childList: true });
}
else {
this[$observer] =
ShadyDOM.observeChildren(this, this[$mutationCallback]);
}
}
disconnectedCallback() {
super.disconnectedCallback();
const { ShadyDOM } = self;
if (ShadyDOM == null) {
this[$observer].disconnect();
}
else {
ShadyDOM.unobserveChildren(this[$observer]);
}
}
/**
* Since the data-position and data-normal attributes are not observed, use
* this method to move a hotspot. Keep in mind that all hotspots with the
* same slot name use a single location and the first definition takes
* precedence, until updated with this method.
*/
updateHotspot(config) {
const hotspot = this[$hotspotMap].get(config.name);
if (hotspot == null) {
return;
}
hotspot.updatePosition(config.position);
hotspot.updateNormal(config.normal);
this[$needsRender]();
}
/**
* This method returns the model position and normal of the point on the
* mesh corresponding to the input pixel coordinates given relative to the
* model-viewer element. The position and normal are returned as strings in
* the format suitable for putting in a hotspot's data-position and
* data-normal attributes. If the mesh is not hit, the result is null.
*/
positionAndNormalFromPoint(pixelX, pixelY) {
const scene = this[$scene];
const { width, height, target } = scene;
pixelPosition.set(pixelX / width, pixelY / height)
.multiplyScalar(2)
.subScalar(1);
pixelPosition.y *= -1;
const hit = scene.positionAndNormalFromPoint(pixelPosition);
if (hit == null) {
return null;
}
worldToModel.copy(target.matrixWorld).invert();
const position = toVector3D(hit.position.applyMatrix4(worldToModel));
worldToModelNormal.getNormalMatrix(worldToModel);
const normal = toVector3D(hit.normal.applyNormalMatrix(worldToModelNormal));
return { position: position, normal: normal };
}
[(_a = $hotspotMap, _b = $mutationCallback, _c = $observer, $addHotspot)](node) {
if (!(node instanceof HTMLElement &&
node.slot.indexOf('hotspot') === 0)) {
return;
}
let hotspot = this[$hotspotMap].get(node.slot);
if (hotspot != null) {
hotspot.increment();
}
else {
hotspot = new Hotspot({
name: node.slot,
position: node.dataset.position,
normal: node.dataset.normal,
});
this[$hotspotMap].set(node.slot, hotspot);
this[$scene].addHotspot(hotspot);
}
this[$scene].isDirty = true;
}
[$removeHotspot](node) {
if (!(node instanceof HTMLElement)) {
return;
}
const hotspot = this[$hotspotMap].get(node.slot);
if (!hotspot) {
return;
}
if (hotspot.decrement()) {
this[$scene].removeHotspot(hotspot);
this[$hotspotMap].delete(node.slot);
}
this[$scene].isDirty = true;
}
}
return AnnotationModelViewerElement;
};
/*!
fflate - fast JavaScript compression/decompression
<https://101arrowz.github.io/fflate>
Licensed under MIT. https://github.com/101arrowz/fflate/blob/master/LICENSE
version 0.6.9
*/
var durl = function (c) { return URL.createObjectURL(new Blob([c], { type: 'text/javascript' })); };
try {
URL.revokeObjectURL(durl(''));
}
catch (e) {
// We're in Deno or a very old browser
durl = function (c) { return 'data:application/javascript;charset=UTF-8,' + encodeURI(c); };
}
// aliases for shorter compressed code (most minifers don't do this)
var u8 = Uint8Array, u16 = Uint16Array, u32 = Uint32Array;
// fixed length extra bits
var fleb = new u8([0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 0, /* unused */ 0, 0, /* impossible */ 0]);
// fixed distance extra bits
// see fleb note
var fdeb = new u8([0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9, 10, 10, 11, 11, 12, 12, 13, 13, /* unused */ 0, 0]);
// code length index map
var clim = new u8([16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15]);
// get base, reverse index map from extra bits
var freb = function (eb, start) {
var b = new u16(31);
for (var i = 0; i < 31; ++i) {
b[i] = start += 1 << eb[i - 1];
}
// numbers here are at max 18 bits
var r = new u32(b[30]);
for (var i = 1; i < 30; ++i) {
for (var j = b[i]; j < b[i + 1]; ++j) {
r[j] = ((j - b[i]) << 5) | i;
}
}
return [b, r];
};
var _a$7 = freb(fleb, 2), fl = _a$7[0], revfl = _a$7[1];
// we can ignore the fact that the other numbers are wrong; they never happen anyway
fl[28] = 258, revfl[258] = 28;
var _b$6 = freb(fdeb, 0), revfd = _b$6[1];
// map of value to reverse (assuming 16 bits)
var rev = new u16(32768);
for (var i = 0; i < 32768; ++i) {
// reverse table algorithm from SO
var x = ((i & 0xAAAA) >>> 1) | ((i & 0x5555) << 1);
x = ((x & 0xCCCC) >>> 2) | ((x & 0x3333) << 2);
x = ((x & 0xF0F0) >>> 4) | ((x & 0x0F0F) << 4);
rev[i] = (((x & 0xFF00) >>> 8) | ((x & 0x00FF) << 8)) >>> 1;
}
// create huffman tree from u8 "map": index -> code length for code index
// mb (max bits) must be at most 15
// TODO: optimize/split up?
var hMap = (function (cd, mb, r) {
var s = cd.length;
// index
var i = 0;
// u16 "map": index -> # of codes with bit length = index
var l = new u16(mb);
// length of cd must be 288 (total # of codes)
for (; i < s; ++i)
++l[cd[i] - 1];
// u16 "map": index -> minimum code for bit length = index
var le = new u16(mb);
for (i = 0; i < mb; ++i) {
le[i] = (le[i - 1] + l[i - 1]) << 1;
}
var co;
if (r) {
// u16 "map": index -> number of actual bits, symbol for code
co = new u16(1 << mb);
// bits to remove for reverser
var rvb = 15 - mb;
for (i = 0; i < s; ++i) {
// ignore 0 lengths
if (cd[i]) {
// num encoding both symbol and bits read
var sv = (i << 4) | cd[i];
// free bits
var r_1 = mb - cd[i];
// start value
var v = le[cd[i] - 1]++ << r_1;
// m is end value
for (var m = v | ((1 << r_1) - 1); v <= m; ++v) {
// every 16 bit value starting with the code yields the same result
co[rev[v] >>> rvb] = sv;
}
}
}
}
else {
co = new u16(s);
for (i = 0; i < s; ++i) {
if (cd[i]) {
co[i] = rev[le[cd[i] - 1]++] >>> (15 - cd[i]);
}
}
}
return co;
});
// fixed length tree
var flt = new u8(288);
for (var i = 0; i < 144; ++i)
flt[i] = 8;
for (var i = 144; i < 256; ++i)
flt[i] = 9;
for (var i = 256; i < 280; ++i)
flt[i] = 7;
for (var i = 280; i < 288; ++i)
flt[i] = 8;
// fixed distance tree
var fdt = new u8(32);
for (var i = 0; i < 32; ++i)
fdt[i] = 5;
// fixed length map
var flm = /*#__PURE__*/ hMap(flt, 9, 0);
// fixed distance map
var fdm = /*#__PURE__*/ hMap(fdt, 5, 0);
// get end of byte
var shft = function (p) { return ((p / 8) | 0) + (p & 7 && 1); };
// typed array slice - allows garbage collector to free original reference,
// while being more compatible than .slice
var slc = function (v, s, e) {
if (s == null || s < 0)
s = 0;
if (e == null || e > v.length)
e = v.length;
// can't use .constructor in case user-supplied
var n = new (v instanceof u16 ? u16 : v instanceof u32 ? u32 : u8)(e - s);
n.set(v.subarray(s, e));
return n;
};
// starting at p, write the minimum number of bits that can hold v to d
var wbits = function (d, p, v) {
v <<= p & 7;
var o = (p / 8) | 0;
d[o] |= v;
d[o + 1] |= v >>> 8;
};
// starting at p, write the minimum number of bits (>8) that can hold v to d
var wbits16 = function (d, p, v) {
v <<= p & 7;
var o = (p / 8) | 0;
d[o] |= v;
d[o + 1] |= v >>> 8;
d[o + 2] |= v >>> 16;
};
// creates code lengths from a frequency table
var hTree = function (d, mb) {
// Need extra info to make a tree
var t = [];
for (var i = 0; i < d.length; ++i) {
if (d[i])
t.push({ s: i, f: d[i] });
}
var s = t.length;
var t2 = t.slice();
if (!s)
return [et, 0];
if (s == 1) {
var v = new u8(t[0].s + 1);
v[t[0].s] = 1;
return [v, 1];
}
t.sort(function (a, b) { return a.f - b.f; });
// after i2 reaches last ind, will be stopped
// freq must be greater than largest possible number of symbols
t.push({ s: -1, f: 25001 });
var l = t[0], r = t[1], i0 = 0, i1 = 1, i2 = 2;
t[0] = { s: -1, f: l.f + r.f, l: l, r: r };
// efficient algorithm from UZIP.js
// i0 is lookbehind, i2 is lookahead - after processing two low-freq
// symbols that combined have high freq, will start processing i2 (high-freq,
// non-composite) symbols instead
// see https://reddit.com/r/photopea/comments/ikekht/uzipjs_questions/
while (i1 != s - 1) {
l = t[t[i0].f < t[i2].f ? i0++ : i2++];
r = t[i0 != i1 && t[i0].f < t[i2].f ? i0++ : i2++];
t[i1++] = { s: -1, f: l.f + r.f, l: l, r: r };
}
var maxSym = t2[0].s;
for (var i = 1; i < s; ++i) {
if (t2[i].s > maxSym)
maxSym = t2[i].s;
}
// code lengths
var tr = new u16(maxSym + 1);
// max bits in tree
var mbt = ln(t[i1 - 1], tr, 0);
if (mbt > mb) {
// more algorithms from UZIP.js
// TODO: find out how this code works (debt)
// ind debt
var i = 0, dt = 0;
// left cost
var lft = mbt - mb, cst = 1 << lft;
t2.sort(function (a, b) { return tr[b.s] - tr[a.s] || a.f - b.f; });
for (; i < s; ++i) {
var i2_1 = t2[i].s;
if (tr[i2_1] > mb) {
dt += cst - (1 << (mbt - tr[i2_1]));
tr[i2_1] = mb;
}
else
break;
}
dt >>>= lft;
while (dt > 0) {
var i2_2 = t2[i].s;
if (tr[i2_2] < mb)
dt -= 1 << (mb - tr[i2_2]++ - 1);
else
++i;
}
for (; i >= 0 && dt; --i) {
var i2_3 = t2[i].s;
if (tr[i2_3] == mb) {
--tr[i2_3];
++dt;
}
}
mbt = mb;
}
return [new u8(tr), mbt];
};
// get the max length and assign length codes
var ln = function (n, l, d) {
return n.s == -1
? Math.max(ln(n.l, l, d + 1), ln(n.r, l, d + 1))
: (l[n.s] = d);
};
// length codes generation
var lc = function (c) {
var s = c.length;
// Note that the semicolon was intentional
while (s && !c[--s])
;
var cl = new u16(++s);
// ind num streak
var cli = 0, cln = c[0], cls = 1;
var w = function (v) { cl[cli++] = v; };
for (var i = 1; i <= s; ++i) {
if (c[i] == cln && i != s)
++cls;
else {
if (!cln && cls > 2) {
for (; cls > 138; cls -= 138)
w(32754);
if (cls > 2) {
w(cls > 10 ? ((cls - 11) << 5) | 28690 : ((cls - 3) << 5) | 12305);
cls = 0;
}
}
else if (cls > 3) {
w(cln), --cls;
for (; cls > 6; cls -= 6)
w(8304);
if (cls > 2)
w(((cls - 3) << 5) | 8208), cls = 0;
}
while (cls--)
w(cln);
cls = 1;
cln = c[i];
}
}
return [cl.subarray(0, cli), s];
};
// calculate the length of output from tree, code lengths
var clen = function (cf, cl) {
var l = 0;
for (var i = 0; i < cl.length; ++i)
l += cf[i] * cl[i];
return l;
};
// writes a fixed block
// returns the new bit pos
var wfblk = function (out, pos, dat) {
// no need to write 00 as type: TypedArray defaults to 0
var s = dat.length;
var o = shft(pos + 2);
out[o] = s & 255;
out[o + 1] = s >>> 8;
out[o + 2] = out[o] ^ 255;
out[o + 3] = out[o + 1] ^ 255;
for (var i = 0; i < s; ++i)
out[o + i + 4] = dat[i];
return (o + 4 + s) * 8;
};
// writes a block
var wblk = function (dat, out, final, syms, lf, df, eb, li, bs, bl, p) {
wbits(out, p++, final);
++lf[256];
var _a = hTree(lf, 15), dlt = _a[0], mlb = _a[1];
var _b = hTree(df, 15), ddt = _b[0], mdb = _b[1];
var _c = lc(dlt), lclt = _c[0], nlc = _c[1];
var _d = lc(ddt), lcdt = _d[0], ndc = _d[1];
var lcfreq = new u16(19);
for (var i = 0; i < lclt.length; ++i)
lcfreq[lclt[i] & 31]++;
for (var i = 0; i < lcdt.length; ++i)
lcfreq[lcdt[i] & 31]++;
var _e = hTree(lcfreq, 7), lct = _e[0], mlcb = _e[1];
var nlcc = 19;
for (; nlcc > 4 && !lct[clim[nlcc - 1]]; --nlcc)
;
var flen = (bl + 5) << 3;
var ftlen = clen(lf, flt) + clen(df, fdt) + eb;
var dtlen = clen(lf, dlt) + clen(df, ddt) + eb + 14 + 3 * nlcc + clen(lcfreq, lct) + (2 * lcfreq[16] + 3 * lcfreq[17] + 7 * lcfreq[18]);
if (flen <= ftlen && flen <= dtlen)
return wfblk(out, p, dat.subarray(bs, bs + bl));
var lm, ll, dm, dl;
wbits(out, p, 1 + (dtlen < ftlen)), p += 2;
if (dtlen < ftlen) {
lm = hMap(dlt, mlb, 0), ll = dlt, dm = hMap(ddt, mdb, 0), dl = ddt;
var llm = hMap(lct, mlcb, 0);
wbits(out, p, nlc - 257);
wbits(out, p + 5, ndc - 1);
wbits(out, p + 10, nlcc - 4);
p += 14;
for (var i = 0; i < nlcc; ++i)
wbits(out, p + 3 * i, lct[clim[i]]);
p += 3 * nlcc;
var lcts = [lclt, lcdt];
for (var it = 0; it < 2; ++it) {
var clct = lcts[it];
for (var i = 0; i < clct.length; ++i) {
var len = clct[i] & 31;
wbits(out, p, llm[len]), p += lct[len];
if (len > 15)
wbits(out, p, (clct[i] >>> 5) & 127), p += clct[i] >>> 12;
}
}
}
else {
lm = flm, ll = flt, dm = fdm, dl = fdt;
}
for (var i = 0; i < li; ++i) {
if (syms[i] > 255) {
var len = (syms[i] >>> 18) & 31;
wbits16(out, p, lm[len + 257]), p += ll[len + 257];
if (len > 7)
wbits(out, p, (syms[i] >>> 23) & 31), p += fleb[len];
var dst = syms[i] & 31;
wbits16(out, p, dm[dst]), p += dl[dst];
if (dst > 3)
wbits16(out, p, (syms[i] >>> 5) & 8191), p += fdeb[dst];
}
else {
wbits16(out, p, lm[syms[i]]), p += ll[syms[i]];
}
}
wbits16(out, p, lm[256]);
return p + ll[256];
};
// deflate options (nice << 13) | chain
var deo = /*#__PURE__*/ new u32([65540, 131080, 131088, 131104, 262176, 1048704, 1048832, 2114560, 2117632]);
// empty
var et = /*#__PURE__*/ new u8(0);
// compresses data into a raw DEFLATE buffer
var dflt = function (dat, lvl, plvl, pre, post, lst) {
var s = dat.length;
var o = new u8(pre + s + 5 * (1 + Math.ceil(s / 7000)) + post);
// writing to this writes to the output buffer
var w = o.subarray(pre, o.length - post);
var pos = 0;
if (!lvl || s < 8) {
for (var i = 0; i <= s; i += 65535) {
// end
var e = i + 65535;
if (e < s) {
// write full block
pos = wfblk(w, pos, dat.subarray(i, e));
}
else {
// write final block
w[i] = lst;
pos = wfblk(w, pos, dat.subarray(i, s));
}
}
}
else {
var opt = deo[lvl - 1];
var n = opt >>> 13, c = opt & 8191;
var msk_1 = (1 << plvl) - 1;
// prev 2-byte val map curr 2-byte val map
var prev = new u16(32768), head = new u16(msk_1 + 1);
var bs1_1 = Math.ceil(plvl / 3), bs2_1 = 2 * bs1_1;
var hsh = function (i) { return (dat[i] ^ (dat[i + 1] << bs1_1) ^ (dat[i + 2] << bs2_1)) & msk_1; };
// 24576 is an arbitrary number of maximum symbols per block
// 424 buffer for last block
var syms = new u32(25000);
// length/literal freq distance freq
var lf = new u16(288), df = new u16(32);
// l/lcnt exbits index l/lind waitdx bitpos
var lc_1 = 0, eb = 0, i = 0, li = 0, wi = 0, bs = 0;
for (; i < s; ++i) {
// hash value
// deopt when i > s - 3 - at end, deopt acceptable
var hv = hsh(i);
// index mod 32768 previous index mod
var imod = i & 32767, pimod = head[hv];
prev[imod] = pimod;
head[hv] = imod;
// We always should modify head and prev, but only add symbols if
// this data is not yet processed ("wait" for wait index)
if (wi <= i) {
// bytes remaining
var rem = s - i;
if ((lc_1 > 7000 || li > 24576) && rem > 423) {
pos = wblk(dat, w, 0, syms, lf, df, eb, li, bs, i - bs, pos);
li = lc_1 = eb = 0, bs = i;
for (var j = 0; j < 286; ++j)
lf[j] = 0;
for (var j = 0; j < 30; ++j)
df[j] = 0;
}
// len dist chain
var l = 2, d = 0, ch_1 = c, dif = (imod - pimod) & 32767;
if (rem > 2 && hv == hsh(i - dif)) {
var maxn = Math.min(n, rem) - 1;
var maxd = Math.min(32767, i);
// max possible length
// not capped at dif because decompressors implement "rolling" index population
var ml = Math.min(258, rem);
while (dif <= maxd && --ch_1 && imod != pimod) {
if (dat[i + l] == dat[i + l - dif]) {
var nl = 0;
for (; nl < ml && dat[i + nl] == dat[i + nl - dif]; ++nl)
;
if (nl > l) {
l = nl, d = dif;
// break out early when we reach "nice" (we are satisfied enough)
if (nl > maxn)
break;
// now, find the rarest 2-byte sequence within this
// length of literals and search for that instead.
// Much faster than just using the start
var mmd = Math.min(dif, nl - 2);
var md = 0;
for (var j = 0; j < mmd; ++j) {
var ti = (i - dif + j + 32768) & 32767;
var pti = prev[ti];
var cd = (ti - pti + 32768) & 32767;
if (cd > md)
md = cd, pimod = ti;
}
}
}
// check the previous match
imod = pimod, pimod = prev[imod];
dif += (imod - pimod + 32768) & 32767;
}
}
// d will be nonzero only when a match was found
if (d) {
// store both dist and len data in one Uint32
// Make sure this is recognized as a len/dist with 28th bit (2^28)
syms[li++] = 268435456 | (revfl[l] << 18) | revfd[d];
var lin = revfl[l] & 31, din = revfd[d] & 31;
eb += fleb[lin] + fdeb[din];
++lf[257 + lin];
++df[din];
wi = i + l;
++lc_1;
}
else {
syms[li++] = dat[i];
++lf[dat[i]];
}
}
}
pos = wblk(dat, w, lst, syms, lf, df, eb, li, bs, i - bs, pos);
// this is the easiest way to avoid needing to maintain state
if (!lst && pos & 7)
pos = wfblk(w, pos + 1, et);
}
return slc(o, 0, pre + shft(pos) + post);
};
// CRC32 table
var crct = /*#__PURE__*/ (function () {
var t = new u32(256);
for (var i = 0; i < 256; ++i) {
var c = i, k = 9;
while (--k)
c = ((c & 1) && 0xEDB88320) ^ (c >>> 1);
t[i] = c;
}
return t;
})();
// CRC32
var crc = function () {
var c = -1;
return {
p: function (d) {
// closures have awful performance
var cr = c;
for (var i = 0; i < d.length; ++i)
cr = crct[(cr & 255) ^ d[i]] ^ (cr >>> 8);
c = cr;
},
d: function () { return ~c; }
};
};
// deflate with opts
var dopt = function (dat, opt, pre, post, st) {
return dflt(dat, opt.level == null ? 6 : opt.level, opt.mem == null ? Math.ceil(Math.max(8, Math.min(13, Math.log(dat.length))) * 1.5) : (12 + opt.mem), pre, post, !st);
};
// Walmart object spread
var mrg = function (a, b) {
var o = {};
for (var k in a)
o[k] = a[k];
for (var k in b)
o[k] = b[k];
return o;
};
// write bytes
var wbytes = function (d, b, v) {
for (; v; ++b)
d[b] = v, v >>>= 8;
};
/**
* Compresses data with DEFLATE without any wrapper
* @param data The data to compress
* @param opts The compression options
* @returns The deflated version of the data
*/
function deflateSync(data, opts) {
return dopt(data, opts || {}, 0, 0);
}
// flatten a directory structure
var fltn = function (d, p, t, o) {
for (var k in d) {
var val = d[k], n = p + k;
if (val instanceof u8)
t[n] = [val, o];
else if (Array.isArray(val))
t[n] = [val[0], mrg(o, val[1])];
else
fltn(val, n + '/', t, o);
}
};
// text encoder
var te = typeof TextEncoder != 'undefined' && /*#__PURE__*/ new TextEncoder();
// text decoder
var td = typeof TextDecoder != 'undefined' && /*#__PURE__*/ new TextDecoder();
// text decoder stream
var tds = 0;
try {
td.decode(et, { stream: true });
tds = 1;
}
catch (e) { }
/**
* Converts a string into a Uint8Array for use with compression/decompression methods
* @param str The string to encode
* @param latin1 Whether or not to interpret the data as Latin-1. This should
* not need to be true unless decoding a binary string.
* @returns The string encoded in UTF-8/Latin-1 binary
*/
function strToU8(str, latin1) {
if (latin1) {
var ar_1 = new u8(str.length);
for (var i = 0; i < str.length; ++i)
ar_1[i] = str.charCodeAt(i);
return ar_1;
}
if (te)
return te.encode(str);
var l = str.length;
var ar = new u8(str.length + (str.length >> 1));
var ai = 0;
var w = function (v) { ar[ai++] = v; };
for (var i = 0; i < l; ++i) {
if (ai + 5 > ar.length) {
var n = new u8(ai + 8 + ((l - i) << 1));
n.set(ar);
ar = n;
}
var c = str.charCodeAt(i);
if (c < 128 || latin1)
w(c);
else if (c < 2048)
w(192 | (c >> 6)), w(128 | (c & 63));
else if (c > 55295 && c < 57344)
c = 65536 + (c & 1023 << 10) | (str.charCodeAt(++i) & 1023),
w(240 | (c >> 18)), w(128 | ((c >> 12) & 63)), w(128 | ((c >> 6) & 63)), w(128 | (c & 63));
else
w(224 | (c >> 12)), w(128 | ((c >> 6) & 63)), w(128 | (c & 63));
}
return slc(ar, 0, ai);
}
// extra field length
var exfl = function (ex) {
var le = 0;
if (ex) {
for (var k in ex) {
var l = ex[k].length;
if (l > 65535)
throw 'extra field too long';
le += l + 4;
}
}
return le;
};
// write zip header
var wzh = function (d, b, f, fn, u, c, ce, co) {
var fl = fn.length, ex = f.extra, col = co && co.length;
var exl = exfl(ex);
wbytes(d, b, ce != null ? 0x2014B50 : 0x4034B50), b += 4;
if (ce != null)
d[b++] = 20, d[b++] = f.os;
d[b] = 20, b += 2; // spec compliance? what's that?
d[b++] = (f.flag << 1) | (c == null && 8), d[b++] = u && 8;
d[b++] = f.compression & 255, d[b++] = f.compression >> 8;
var dt = new Date(f.mtime == null ? Date.now() : f.mtime), y = dt.getFullYear() - 1980;
if (y < 0 || y > 119)
throw 'date not in range 1980-2099';
wbytes(d, b, (y << 25) | ((dt.getMonth() + 1) << 21) | (dt.getDate() << 16) | (dt.getHours() << 11) | (dt.getMinutes() << 5) | (dt.getSeconds() >>> 1)), b += 4;
if (c != null) {
wbytes(d, b, f.crc);
wbytes(d, b + 4, c);
wbytes(d, b + 8, f.size);
}
wbytes(d, b + 12, fl);
wbytes(d, b + 14, exl), b += 16;
if (ce != null) {
wbytes(d, b, col);
wbytes(d, b + 6, f.attrs);
wbytes(d, b + 10, ce), b += 14;
}
d.set(fn, b);
b += fl;
if (exl) {
for (var k in ex) {
var exf = ex[k], l = exf.length;
wbytes(d, b, +k);
wbytes(d, b + 2, l);
d.set(exf, b + 4), b += 4 + l;
}
}
if (col)
d.set(co, b), b += col;
return b;
};
// write zip footer (end of central directory)
var wzf = function (o, b, c, d, e) {
wbytes(o, b, 0x6054B50); // skip disk
wbytes(o, b + 8, c);
wbytes(o, b + 10, c);
wbytes(o, b + 12, d);
wbytes(o, b + 16, e);
};
/**
* Synchronously creates a ZIP file. Prefer using `zip` for better performance
* with more than one file.
* @param data The directory structure for the ZIP archive
* @param opts The main options, merged with per-file options
* @returns The generated ZIP archive
*/
function zipSync(data, opts) {
if (!opts)
opts = {};
var r = {};
var files = [];
fltn(data, '', r, opts);
var o = 0;
var tot = 0;
for (var fn in r) {
var _a = r[fn], file = _a[0], p = _a[1];
var compression = p.level == 0 ? 0 : 8;
var f = strToU8(fn), s = f.length;
var com = p.comment, m = com && strToU8(com), ms = m && m.length;
var exl = exfl(p.extra);
if (s > 65535)
throw 'filename too long';
var d = compression ? deflateSync(file, p) : file, l = d.length;
var c = crc();
c.p(file);
files.push(mrg(p, {
size: file.length,
crc: c.d(),
c: d,
f: f,
m: m,
u: s != fn.length || (m && (com.length != ms)),
o: o,
compression: compression
}));
o += 30 + s + exl + l;
tot += 76 + 2 * (s + exl) + (ms || 0) + l;
}
var out = new u8(tot + 22), oe = o, cdl = tot - o;
for (var i = 0; i < files.length; ++i) {
var f = files[i];
wzh(out, f.o, f, f.f, f.u, f.c.length);
var badd = 30 + f.f.length + exfl(f.extra);
out.set(f.c, f.o + badd);
wzh(out, o, f, f.f, f.u, f.c.length, f.o, f.m), o += 16 + badd + (f.m ? f.m.length : 0);
}
wzf(out, o, files.length, cdl, oe);
return out;
}
class USDZExporter {
async parse( scene ) {
let output = buildHeader();
const materials = {};
const textures = {};
scene.traverse( ( object ) => {
if ( object.isMesh ) {
const geometry = object.geometry;
const material = object.material;
materials[ material.uuid ] = material;
if ( material.map !== null ) textures[ material.map.uuid ] = material.map;
if ( material.normalMap !== null ) textures[ material.normalMap.uuid ] = material.normalMap;
if ( material.aoMap !== null ) textures[ material.aoMap.uuid ] = material.aoMap;
if ( material.roughnessMap !== null ) textures[ material.roughnessMap.uuid ] = material.roughnessMap;
if ( material.metalnessMap !== null ) textures[ material.metalnessMap.uuid ] = material.metalnessMap;
if ( material.emissiveMap !== null ) textures[ material.emissiveMap.uuid ] = material.emissiveMap;
output += buildXform( object, buildMesh( geometry, material ) );
}
} );
output += buildMaterials( materials );
output += buildTextures( textures );
const files = { 'model.usda': strToU8( output ) };
for ( const uuid in textures ) {
const texture = textures[ uuid ];
files[ 'textures/Texture_' + texture.id + '.jpg' ] = await imgToU8( texture.image );
}
// 64 byte alignment
// https://github.com/101arrowz/fflate/issues/39#issuecomment-777263109
let offset = 0;
for ( const filename in files ) {
const file = files[ filename ];
const headerSize = 34 + filename.length;
offset += headerSize;
const offsetMod64 = offset & 63;
if ( offsetMod64 !== 4 ) {
const padLength = 64 - offsetMod64;
const padding = new Uint8Array( padLength );
files[ filename ] = [ file, { extra: { 12345: padding } } ];
}
offset = file.length;
}
return zipSync( files, { level: 0 } );
}
}
async function imgToU8( image ) {
if ( ( typeof HTMLImageElement !== 'undefined' && image instanceof HTMLImageElement ) ||
( typeof HTMLCanvasElement !== 'undefined' && image instanceof HTMLCanvasElement ) ||
( typeof OffscreenCanvas !== 'undefined' && image instanceof OffscreenCanvas ) ||
( typeof ImageBitmap !== 'undefined' && image instanceof ImageBitmap ) ) {
const scale = 1024 / Math.max( image.width, image.height );
const canvas = document.createElement( 'canvas' );
canvas.width = image.width * Math.min( 1, scale );
canvas.height = image.height * Math.min( 1, scale );
const context = canvas.getContext( '2d' );
context.drawImage( image, 0, 0, canvas.width, canvas.height );
const blob = await new Promise( resolve => canvas.toBlob( resolve, 'image/jpeg', 1 ) );
return new Uint8Array( await blob.arrayBuffer() );
}
}
//
const PRECISION = 7;
function buildHeader() {
return `#usda 1.0
(
customLayerData = {
string creator = "Three.js USDZExporter"
}
metersPerUnit = 1
upAxis = "Y"
)
`;
}
// Xform
function buildXform( object, define ) {
const name = 'Object_' + object.id;
const transform = buildMatrix( object.matrixWorld );
return `def Xform "${ name }"
{
matrix4d xformOp:transform = ${ transform }
uniform token[] xformOpOrder = ["xformOp:transform"]
${ define }
}
`;
}
function buildMatrix( matrix ) {
const array = matrix.elements;
return `( ${ buildMatrixRow( array, 0 ) }, ${ buildMatrixRow( array, 4 ) }, ${ buildMatrixRow( array, 8 ) }, ${ buildMatrixRow( array, 12 ) } )`;
}
function buildMatrixRow( array, offset ) {
return `(${ array[ offset + 0 ] }, ${ array[ offset + 1 ] }, ${ array[ offset + 2 ] }, ${ array[ offset + 3 ] })`;
}
// Mesh
function buildMesh( geometry, material ) {
const name = 'Geometry_' + geometry.id;
const attributes = geometry.attributes;
const count = attributes.position.count;
if ( 'uv2' in attributes ) {
console.warn( 'THREE.USDZExporter: uv2 not supported yet.' );
}
return `def Mesh "${ name }"
{
int[] faceVertexCounts = [${ buildMeshVertexCount( geometry ) }]
int[] faceVertexIndices = [${ buildMeshVertexIndices( geometry ) }]
rel material:binding = </Materials/Material_${ material.id }>
normal3f[] normals = [${ buildVector3Array( attributes.normal, count )}] (
interpolation = "vertex"
)
point3f[] points = [${ buildVector3Array( attributes.position, count )}]
float2[] primvars:st = [${ buildVector2Array( attributes.uv, count )}] (
interpolation = "vertex"
)
uniform token subdivisionScheme = "none"
}
`;
}
function buildMeshVertexCount( geometry ) {
const count = geometry.index !== null ? geometry.index.array.length : geometry.attributes.position.count;
return Array( count / 3 ).fill( 3 ).join( ', ' );
}
function buildMeshVertexIndices( geometry ) {
if ( geometry.index !== null ) {
return geometry.index.array.join( ', ' );
}
const array = [];
const length = geometry.attributes.position.count;
for ( let i = 0; i < length; i ++ ) {
array.push( i );
}
return array.join( ', ' );
}
function buildVector3Array( attribute, count ) {
if ( attribute === undefined ) {
console.warn( 'USDZExporter: Normals missing.' );
return Array( count ).fill( '(0, 0, 0)' ).join( ', ' );
}
const array = [];
const data = attribute.array;
for ( let i = 0; i < data.length; i += 3 ) {
array.push( `(${ data[ i + 0 ].toPrecision( PRECISION ) }, ${ data[ i + 1 ].toPrecision( PRECISION ) }, ${ data[ i + 2 ].toPrecision( PRECISION ) })` );
}
return array.join( ', ' );
}
function buildVector2Array( attribute, count ) {
if ( attribute === undefined ) {
console.warn( 'USDZExporter: UVs missing.' );
return Array( count ).fill( '(0, 0)' ).join( ', ' );
}
const array = [];
const data = attribute.array;
for ( let i = 0; i < data.length; i += 2 ) {
array.push( `(${ data[ i + 0 ].toPrecision( PRECISION ) }, ${ 1 - data[ i + 1 ].toPrecision( PRECISION ) })` );
}
return array.join( ', ' );
}
// Materials
function buildMaterials( materials ) {
const array = [];
for ( const uuid in materials ) {
const material = materials[ uuid ];
array.push( buildMaterial( material ) );
}
return `def "Materials"
{
${ array.join( '' ) }
}
`;
}
function buildMaterial( material ) {
// https://graphics.pixar.com/usd/docs/UsdPreviewSurface-Proposal.html
const pad = ' ';
const parameters = [];
if ( material.map !== null ) {
parameters.push( `${ pad }color3f inputs:diffuseColor.connect = </Textures/Texture_${ material.map.id }.outputs:rgb>` );
} else {
parameters.push( `${ pad }color3f inputs:diffuseColor = ${ buildColor( material.color ) }` );
}
if ( material.emissiveMap !== null ) {
parameters.push( `${ pad }color3f inputs:emissiveColor.connect = </Textures/Texture_${ material.emissiveMap.id }.outputs:rgb>` );
} else if ( material.emissive.getHex() > 0 ) {
parameters.push( `${ pad }color3f inputs:emissiveColor = ${ buildColor( material.emissive ) }` );
}
if ( material.normalMap !== null ) {
parameters.push( `${ pad }normal3f inputs:normal.connect = </Textures/Texture_${ material.normalMap.id }.outputs:rgb>` );
}
if ( material.aoMap !== null ) {
parameters.push( `${ pad }float inputs:occlusion.connect = </Textures/Texture_${ material.aoMap.id }.outputs:r>` );
}
if ( material.roughnessMap !== null ) {
parameters.push( `${ pad }float inputs:roughness.connect = </Textures/Texture_${ material.roughnessMap.id }.outputs:g>` );
} else {
parameters.push( `${ pad }float inputs:roughness = ${ material.roughness }` );
}
if ( material.metalnessMap !== null ) {
parameters.push( `${ pad }float inputs:metallic.connect = </Textures/Texture_${ material.metalnessMap.id }.outputs:b>` );
} else {
parameters.push( `${ pad }float inputs:metallic = ${ material.metalness }` );
}
return `
def Material "Material_${ material.id }"
{
token outputs:surface.connect = </Materials/Material_${ material.id }/PreviewSurface.outputs:surface>
def Shader "PreviewSurface"
{
uniform token info:id = "UsdPreviewSurface"
${ parameters.join( '\n' ) }
int inputs:useSpecularWorkflow = 0
token outputs:surface
}
}
`;
}
function buildTextures( textures ) {
const array = [];
for ( const uuid in textures ) {
const texture = textures[ uuid ];
array.push( buildTexture( texture ) );
}
return `def "Textures"
{
${ array.join( '' ) }
}
`;
}
function buildTexture( texture ) {
return `
def Shader "Texture_${ texture.id }"
{
uniform token info:id = "UsdUVTexture"
asset inputs:file = @textures/Texture_${ texture.id }.jpg@
token inputs:wrapS = "repeat"
token inputs:wrapT = "repeat"
float outputs:r
float outputs:g
float outputs:b
float3 outputs:rgb
}
`;
}
function buildColor( color ) {
return `(${ color.r }, ${ color.g }, ${ color.b })`;
}
/* @license
* Copyright 2019 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the 'License');
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an 'AS IS' BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* For our purposes, an enumeration is a fixed set of CSS-expression-compatible
* names. When serialized, a selected subset of the members may be specified as
* whitespace-separated strings. An enumeration deserializer is a function that
* parses a serialized subset of an enumeration and returns any members that are
* found as a Set.
*
* The following example will produce a deserializer for the days of the
* week:
*
* const deserializeDaysOfTheWeek = enumerationDeserializer([
* 'Monday',
* 'Tuesday',
* 'Wednesday',
* 'Thursday',
* 'Friday',
* 'Saturday',
* 'Sunday'
* ]);
*/
const enumerationDeserializer = (allowedNames) => (valueString) => {
try {
const expressions = parseExpressions(valueString);
const names = (expressions.length ? expressions[0].terms : [])
.filter((valueNode) => valueNode && valueNode.type === 'ident')
.map(valueNode => valueNode.value)
.filter(name => allowedNames.indexOf(name) > -1);
// NOTE(cdata): IE11 does not support constructing a Set directly from
// an iterable, so we need to manually add all the items:
const result = new Set();
for (const name of names) {
result.add(name);
}
return result;
}
catch (_error) {
}
return new Set();
};
/* @license
* Copyright 2019 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the 'License');
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an 'AS IS' BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var __decorate$5 = (undefined && undefined.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof undefined === "function") r = undefined(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
let isWebXRBlocked = false;
let isSceneViewerBlocked = false;
const noArViewerSigil = '#model-viewer-no-ar-fallback';
const deserializeARModes = enumerationDeserializer(['quick-look', 'scene-viewer', 'webxr', 'none']);
const DEFAULT_AR_MODES = 'webxr scene-viewer';
const ARMode = {
QUICK_LOOK: 'quick-look',
SCENE_VIEWER: 'scene-viewer',
WEBXR: 'webxr',
NONE: 'none'
};
const $arButtonContainer = Symbol('arButtonContainer');
const $enterARWithWebXR = Symbol('enterARWithWebXR');
const $openSceneViewer = Symbol('openSceneViewer');
const $openIOSARQuickLook = Symbol('openIOSARQuickLook');
const $canActivateAR = Symbol('canActivateAR');
const $arMode = Symbol('arMode');
const $arModes = Symbol('arModes');
const $arAnchor = Symbol('arAnchor');
const $preload = Symbol('preload');
const $onARButtonContainerClick = Symbol('onARButtonContainerClick');
const $onARStatus = Symbol('onARStatus');
const $onARTracking = Symbol('onARTracking');
const $onARTap = Symbol('onARTap');
const $selectARMode = Symbol('selectARMode');
const $triggerLoad = Symbol('triggerLoad');
const ARMixin = (ModelViewerElement) => {
var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k;
class ARModelViewerElement extends ModelViewerElement {
constructor() {
super(...arguments);
this.ar = false;
this.arScale = 'auto';
this.arPlacement = 'floor';
this.arModes = DEFAULT_AR_MODES;
this.iosSrc = null;
this[_a] = false;
// TODO: Add this to the shadow root as part of this mixin's
// implementation:
this[_b] = this.shadowRoot.querySelector('.ar-button');
this[_c] = document.createElement('a');
this[_d] = new Set();
this[_e] = ARMode.NONE;
this[_f] = false;
this[_g] = (event) => {
event.preventDefault();
this.activateAR();
};
this[_h] = ({ status }) => {
if (status === ARStatus.NOT_PRESENTING ||
this[$renderer].arRenderer.presentedScene === this[$scene]) {
this.setAttribute('ar-status', status);
this.dispatchEvent(new CustomEvent('ar-status', { detail: { status } }));
if (status === ARStatus.NOT_PRESENTING) {
this.removeAttribute('ar-tracking');
}
else if (status === ARStatus.SESSION_STARTED) {
this.setAttribute('ar-tracking', ARTracking.TRACKING);
}
}
};
this[_j] = ({ status }) => {
this.setAttribute('ar-tracking', status);
this.dispatchEvent(new CustomEvent('ar-tracking', { detail: { status } }));
};
this[_k] = (event) => {
if (event.data == '_apple_ar_quicklook_button_tapped') {
this.dispatchEvent(new CustomEvent('quick-look-button-tapped'));
}
};
}
get canActivateAR() {
return this[$arMode] !== ARMode.NONE;
}
connectedCallback() {
super.connectedCallback();
this[$renderer].arRenderer.addEventListener('status', this[$onARStatus]);
this.setAttribute('ar-status', ARStatus.NOT_PRESENTING);
this[$renderer].arRenderer.addEventListener('tracking', this[$onARTracking]);
this[$arAnchor].addEventListener('message', this[$onARTap]);
}
disconnectedCallback() {
super.disconnectedCallback();
this[$renderer].arRenderer.removeEventListener('status', this[$onARStatus]);
this[$renderer].arRenderer.removeEventListener('tracking', this[$onARTracking]);
this[$arAnchor].removeEventListener('message', this[$onARTap]);
}
async update(changedProperties) {
super.update(changedProperties);
if (changedProperties.has('arScale')) {
this[$scene].canScale = this.arScale !== 'fixed';
}
if (changedProperties.has('arPlacement')) {
this[$scene].setShadowIntensity(this[$scene].shadowIntensity);
this[$needsRender]();
}
if (!changedProperties.has('ar') && !changedProperties.has('arModes') &&
!changedProperties.has('iosSrc')) {
return;
}
if (changedProperties.has('arModes')) {
this[$arModes] = deserializeARModes(this.arModes);
}
this[$selectARMode]();
}
/**
* Activates AR. Note that for any mode that is not WebXR-based, this
* method most likely has to be called synchronous from a user
* interaction handler. Otherwise, attempts to activate modes that
* require user interaction will most likely be ignored.
*/
async activateAR() {
switch (this[$arMode]) {
case ARMode.QUICK_LOOK:
this[$openIOSARQuickLook]();
break;
case ARMode.WEBXR:
await this[$enterARWithWebXR]();
break;
case ARMode.SCENE_VIEWER:
this[$openSceneViewer]();
break;
default:
console.warn('No AR Mode can be activated. This is probably due to missing \
configuration or device capabilities');
break;
}
}
async [(_a = $canActivateAR, _b = $arButtonContainer, _c = $arAnchor, _d = $arModes, _e = $arMode, _f = $preload, _g = $onARButtonContainerClick, _h = $onARStatus, _j = $onARTracking, _k = $onARTap, $selectARMode)]() {
this[$arMode] = ARMode.NONE;
if (this.ar) {
const arModes = [];
this[$arModes].forEach((value) => {
arModes.push(value);
});
for (const value of arModes) {
if (value === 'webxr' && IS_WEBXR_AR_CANDIDATE && !isWebXRBlocked &&
await this[$renderer].arRenderer.supportsPresentation()) {
this[$arMode] = ARMode.WEBXR;
break;
}
else if (value === 'scene-viewer' && IS_SCENEVIEWER_CANDIDATE &&
!isSceneViewerBlocked) {
this[$arMode] = ARMode.SCENE_VIEWER;
break;
}
else if (value === 'quick-look' && IS_AR_QUICKLOOK_CANDIDATE) {
this[$arMode] = ARMode.QUICK_LOOK;
break;
}
}
// The presence of ios-src overrides the absence of quick-look ar-mode.
if (!this.canActivateAR && this.iosSrc != null &&
IS_AR_QUICKLOOK_CANDIDATE) {
this[$arMode] = ARMode.QUICK_LOOK;
}
}
if (this.canActivateAR) {
this[$arButtonContainer].classList.add('enabled');
this[$arButtonContainer].addEventListener('click', this[$onARButtonContainerClick]);
}
else if (this[$arButtonContainer].classList.contains('enabled')) {
this[$arButtonContainer].removeEventListener('click', this[$onARButtonContainerClick]);
this[$arButtonContainer].classList.remove('enabled');
// If AR went from working to not, notify the element.
const status = ARStatus.FAILED;
this.setAttribute('ar-status', status);
this.dispatchEvent(new CustomEvent('ar-status', { detail: { status } }));
}
}
async [$enterARWithWebXR]() {
console.log('Attempting to present in AR with WebXR...');
await this[$triggerLoad]();
try {
this[$arButtonContainer].removeEventListener('click', this[$onARButtonContainerClick]);
const { arRenderer } = this[$renderer];
arRenderer.placeOnWall = this.arPlacement === 'wall';
await arRenderer.present(this[$scene]);
}
catch (error) {
console.warn('Error while trying to present in AR with WebXR');
console.error(error);
await this[$renderer].arRenderer.stopPresenting();
isWebXRBlocked = true;
console.warn('Falling back to next ar-mode');
await this[$selectARMode]();
this.activateAR();
}
finally {
this[$selectARMode]();
}
}
async [$triggerLoad]() {
if (!this.loaded) {
this[$preload] = true;
this[$updateSource]();
await waitForEvent(this, 'load');
this[$preload] = false;
}
}
[$shouldAttemptPreload]() {
return super[$shouldAttemptPreload]() || this[$preload];
}
/**
* Takes a URL and a title string, and attempts to launch Scene Viewer on
* the current device.
*/
[$openSceneViewer]() {
const location = self.location.toString();
const locationUrl = new URL(location);
const modelUrl = new URL(this.src, location);
const params = new URLSearchParams(modelUrl.search);
locationUrl.hash = noArViewerSigil;
// modelUrl can contain title/link/sound etc.
params.set('mode', 'ar_preferred');
if (!params.has('disable_occlusion')) {
params.set('disable_occlusion', 'true');
}
if (this.arScale === 'fixed') {
params.set('resizable', 'false');
}
if (this.arPlacement === 'wall') {
params.set('enable_vertical_placement', 'true');
}
if (params.has('sound')) {
const soundUrl = new URL(params.get('sound'), location);
params.set('sound', soundUrl.toString());
}
if (params.has('link')) {
const linkUrl = new URL(params.get('link'), location);
params.set('link', linkUrl.toString());
}
const intent = `intent://arvr.google.com/scene-viewer/1.0?${params.toString() + '&file=' +
encodeURIComponent(modelUrl
.toString())}#Intent;scheme=https;package=com.google.ar.core;action=android.intent.action.VIEW;S.browser_fallback_url=${encodeURIComponent(locationUrl.toString())};end;`;
const undoHashChange = () => {
if (self.location.hash === noArViewerSigil) {
isSceneViewerBlocked = true;
// The new history will be the current URL with a new hash.
// Go back one step so that we reset to the expected URL.
// NOTE(cdata): this should not invoke any browser-level navigation
// because hash-only changes modify the URL in-place without
// navigating:
self.history.back();
console.warn('Error while trying to present in AR with Scene Viewer');
console.warn('Falling back to next ar-mode');
this[$selectARMode]();
// Would be nice to activateAR() here, but webXR fails due to not
// seeing a user activation.
}
};
self.addEventListener('hashchange', undoHashChange, { once: true });
this[$arAnchor].setAttribute('href', intent);
console.log('Attempting to present in AR with Scene Viewer...');
this[$arAnchor].click();
}
/**
* Takes a URL to a USDZ file and sets the appropriate fields so that Safari
* iOS can intent to their AR Quick Look.
*/
async [$openIOSARQuickLook]() {
const generateUsdz = !this.iosSrc;
this[$arButtonContainer].classList.remove('enabled');
const objectURL = generateUsdz ? await this.prepareUSDZ() : this.iosSrc;
const modelUrl = new URL(objectURL, self.location.toString());
if (this.arScale === 'fixed') {
if (modelUrl.hash) {
modelUrl.hash += '&';
}
modelUrl.hash += 'allowsContentScaling=0';
}
const anchor = this[$arAnchor];
anchor.setAttribute('rel', 'ar');
const img = document.createElement('img');
anchor.appendChild(img);
anchor.setAttribute('href', modelUrl.toString());
if (generateUsdz) {
anchor.setAttribute('download', 'model.usdz');
}
console.log('Attempting to present in AR with Quick Look...');
anchor.click();
anchor.removeChild(img);
if (generateUsdz) {
URL.revokeObjectURL(objectURL);
}
this[$arButtonContainer].classList.add('enabled');
}
async prepareUSDZ() {
const updateSourceProgress = this[$progressTracker].beginActivity();
await this[$triggerLoad]();
const scene = this[$scene];
const shadow = scene.shadow;
let visible = false;
// Remove shadow from export
if (shadow != null) {
visible = shadow.visible;
shadow.visible = false;
}
updateSourceProgress(0.2);
const exporter = new USDZExporter();
const arraybuffer = await exporter.parse(scene.modelContainer);
const blob = new Blob([arraybuffer], {
type: 'model/vnd.usdz+zip',
});
const url = URL.createObjectURL(blob);
updateSourceProgress(1);
if (shadow != null) {
shadow.visible = visible;
}
return url;
}
}
__decorate$5([
property({ type: Boolean, attribute: 'ar' })
], ARModelViewerElement.prototype, "ar", void 0);
__decorate$5([
property({ type: String, attribute: 'ar-scale' })
], ARModelViewerElement.prototype, "arScale", void 0);
__decorate$5([
property({ type: String, attribute: 'ar-placement' })
], ARModelViewerElement.prototype, "arPlacement", void 0);
__decorate$5([
property({ type: String, attribute: 'ar-modes' })
], ARModelViewerElement.prototype, "arModes", void 0);
__decorate$5([
property({ type: String, attribute: 'ios-src' })
], ARModelViewerElement.prototype, "iosSrc", void 0);
return ARModelViewerElement;
};
/* @license
* Copyright 2019 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the 'License');
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an 'AS IS' BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var _a$6, _b$5, _c$2;
const $evaluate = Symbol('evaluate');
const $lastValue = Symbol('lastValue');
/**
* An Evaluator is used to derive a computed style from part (or all) of a CSS
* expression AST. This construct is particularly useful for complex ASTs
* containing function calls such as calc, var and env. Such styles could be
* costly to re-evaluate on every frame (and in some cases we may try to do
* that). The Evaluator construct allows us to mark sub-trees of the AST as
* constant, so that only the dynamic parts are re-evaluated. It also separates
* one-time AST preparation work from work that necessarily has to happen upon
* each evaluation.
*/
class Evaluator {
constructor() {
this[_a$6] = null;
}
/**
* An Evaluatable is a NumberNode or an Evaluator that evaluates a NumberNode
* as the result of invoking its evaluate method. This is mainly used to
* ensure that CSS function nodes are cast to the corresponding Evaluators
* that will resolve the result of the function, but is also used to ensure
* that a percentage nested at arbitrary depth in the expression will always
* be evaluated against the correct basis.
*/
static evaluatableFor(node, basis = ZERO) {
if (node instanceof Evaluator) {
return node;
}
if (node.type === 'number') {
if (node.unit === '%') {
return new PercentageEvaluator(node, basis);
}
return node;
}
switch (node.name.value) {
case 'calc':
return new CalcEvaluator(node, basis);
case 'env':
return new EnvEvaluator(node);
}
return ZERO;
}
/**
* If the input is an Evaluator, returns the result of evaluating it.
* Otherwise, returns the input.
*
* This is a helper to aide in resolving a NumberNode without conditionally
* checking if the Evaluatable is an Evaluator everywhere.
*/
static evaluate(evaluatable) {
if (evaluatable instanceof Evaluator) {
return evaluatable.evaluate();
}
return evaluatable;
}
/**
* If the input is an Evaluator, returns the value of its isConstant property.
* Returns true for all other input values.
*/
static isConstant(evaluatable) {
if (evaluatable instanceof Evaluator) {
return evaluatable.isConstant;
}
return true;
}
/**
* This method applies a set of structured intrinsic metadata to an evaluated
* result from a parsed CSS-like string of expressions. Intrinsics provide
* sufficient metadata (e.g., basis values, analogs for keywords) such that
* omitted values in the input string can be backfilled, and keywords can be
* converted to concrete numbers.
*
* The result of applying intrinsics is a tuple of NumberNode values whose
* units match the units used by the basis of the intrinsics.
*
* The following is a high-level description of how intrinsics are applied:
*
* 1. Determine the value of 'auto' for the current term
* 2. If there is no corresponding input value for this term, substitute the
* 'auto' value.
* 3. If the term is an IdentNode, treat it as a keyword and perform the
* appropriate substitution.
* 4. If the term is still null, fallback to the 'auto' value
* 5. If the term is a percentage, apply it to the basis and return that
* value
* 6. Normalize the unit of the term
* 7. If the term's unit does not match the basis unit, return the basis
* value
* 8. Return the term as is
*/
static applyIntrinsics(evaluated, intrinsics) {
const { basis, keywords } = intrinsics;
const { auto } = keywords;
return basis.map((basisNode, index) => {
// Use an auto value if we have it, otherwise the auto value is the basis:
const autoSubstituteNode = auto[index] == null ? basisNode : auto[index];
// If the evaluated nodes do not have a node at the current
// index, fallback to the "auto" substitute right away:
let evaluatedNode = evaluated[index] ? evaluated[index] : autoSubstituteNode;
// Any ident node is considered a keyword:
if (evaluatedNode.type === 'ident') {
const keyword = evaluatedNode.value;
// Substitute any keywords for concrete values first:
if (keyword in keywords) {
evaluatedNode = keywords[keyword][index];
}
}
// If we don't have a NumberNode at this point, fall back to whatever
// is specified for auto:
if (evaluatedNode == null || evaluatedNode.type === 'ident') {
evaluatedNode = autoSubstituteNode;
}
// For percentages, we always apply the percentage to the basis value:
if (evaluatedNode.unit === '%') {
return numberNode(evaluatedNode.number / 100 * basisNode.number, basisNode.unit);
}
// Otherwise, normalize whatever we have:
evaluatedNode = normalizeUnit(evaluatedNode, basisNode);
// If the normalized units do not match, return the basis as a fallback:
if (evaluatedNode.unit !== basisNode.unit) {
return basisNode;
}
// Finally, return the evaluated node with intrinsics applied:
return evaluatedNode;
});
}
/**
* If true, the Evaluator will only evaluate its AST one time. If false, the
* Evaluator will re-evaluate the AST each time that the public evaluate
* method is invoked.
*/
get isConstant() {
return false;
}
/**
* Evaluate the Evaluator and return the result. If the Evaluator is constant,
* the corresponding AST will only be evaluated once, and the result of
* evaluating it the first time will be returned on all subsequent
* evaluations.
*/
evaluate() {
if (!this.isConstant || this[$lastValue] == null) {
this[$lastValue] = this[$evaluate]();
}
return this[$lastValue];
}
}
_a$6 = $lastValue;
const $percentage = Symbol('percentage');
const $basis = Symbol('basis');
/**
* A PercentageEvaluator scales a given basis value by a given percentage value.
* The evaluated result is always considered to be constant.
*/
class PercentageEvaluator extends Evaluator {
constructor(percentage, basis) {
super();
this[$percentage] = percentage;
this[$basis] = basis;
}
get isConstant() {
return true;
}
[$evaluate]() {
return numberNode(this[$percentage].number / 100 * this[$basis].number, this[$basis].unit);
}
}
const $identNode = Symbol('identNode');
/**
* Evaluator for CSS-like env() functions. Currently, only one environment
* variable is accepted as an argument for such functions: window-scroll-y.
*
* The env() Evaluator is explicitly dynamic because it always refers to
* external state that changes as the user scrolls, so it should always be
* re-evaluated to ensure we get the most recent value.
*
* Some important notes about this feature include:
*
* - There is no such thing as a "window-scroll-y" CSS environment variable in
* any stable browser at the time that this comment is being written.
* - The actual CSS env() function accepts a second argument as a fallback for
* the case that the specified first argument isn't set; our syntax does not
* support this second argument.
*
* @see https://developer.mozilla.org/en-US/docs/Web/CSS/env
*/
class EnvEvaluator extends Evaluator {
constructor(envFunction) {
super();
this[_b$5] = null;
const identNode = envFunction.arguments.length ? envFunction.arguments[0].terms[0] : null;
if (identNode != null && identNode.type === 'ident') {
this[$identNode] = identNode;
}
}
get isConstant() {
return false;
}
;
[(_b$5 = $identNode, $evaluate)]() {
if (this[$identNode] != null) {
switch (this[$identNode].value) {
case 'window-scroll-y':
const verticalScrollPosition = window.pageYOffset;
const verticalScrollMax = Math.max(document.body.scrollHeight, document.body.offsetHeight, document.documentElement.clientHeight, document.documentElement.scrollHeight, document.documentElement.offsetHeight);
const scrollY = verticalScrollPosition /
(verticalScrollMax - window.innerHeight) ||
0;
return { type: 'number', number: scrollY, unit: null };
}
}
return ZERO;
}
}
const IS_MULTIPLICATION_RE = /[\*\/]/;
const $evaluator = Symbol('evalutor');
/**
* Evaluator for CSS-like calc() functions. Our implementation of calc()
* evaluation currently support nested function calls, an unlimited number of
* terms, and all four algebraic operators (+, -, * and /).
*
* The Evaluator is marked as constant unless the calc expression contains an
* internal env expression at any depth, in which case it will be marked as
* dynamic.
*
* @see https://www.w3.org/TR/css-values-3/#calc-syntax
* @see https://developer.mozilla.org/en-US/docs/Web/CSS/calc
*/
class CalcEvaluator extends Evaluator {
constructor(calcFunction, basis = ZERO) {
super();
this[_c$2] = null;
if (calcFunction.arguments.length !== 1) {
return;
}
const terms = calcFunction.arguments[0].terms.slice();
const secondOrderTerms = [];
while (terms.length) {
const term = terms.shift();
if (secondOrderTerms.length > 0) {
const previousTerm = secondOrderTerms[secondOrderTerms.length - 1];
if (previousTerm.type === 'operator' &&
IS_MULTIPLICATION_RE.test(previousTerm.value)) {
const operator = secondOrderTerms.pop();
const leftValue = secondOrderTerms.pop();
if (leftValue == null) {
return;
}
secondOrderTerms.push(new OperatorEvaluator(operator, Evaluator.evaluatableFor(leftValue, basis), Evaluator.evaluatableFor(term, basis)));
continue;
}
}
secondOrderTerms.push(term.type === 'operator' ? term :
Evaluator.evaluatableFor(term, basis));
}
while (secondOrderTerms.length > 2) {
const [left, operator, right] = secondOrderTerms.splice(0, 3);
if (operator.type !== 'operator') {
return;
}
secondOrderTerms.unshift(new OperatorEvaluator(operator, Evaluator.evaluatableFor(left, basis), Evaluator.evaluatableFor(right, basis)));
}
// There should only be one combined evaluator at this point:
if (secondOrderTerms.length === 1) {
this[$evaluator] = secondOrderTerms[0];
}
}
get isConstant() {
return this[$evaluator] == null || Evaluator.isConstant(this[$evaluator]);
}
[(_c$2 = $evaluator, $evaluate)]() {
return this[$evaluator] != null ? Evaluator.evaluate(this[$evaluator]) :
ZERO;
}
}
const $operator = Symbol('operator');
const $left = Symbol('left');
const $right = Symbol('right');
/**
* An Evaluator for the operators found inside CSS calc() functions.
* The evaluator accepts an operator and left/right operands. The operands can
* be any valid expression term typically allowed inside a CSS calc function.
*
* As detail of this implementation, the only supported unit types are angles
* expressed as radians or degrees, and lengths expressed as meters, centimeters
* or millimeters.
*
* @see https://developer.mozilla.org/en-US/docs/Web/CSS/calc
*/
class OperatorEvaluator extends Evaluator {
constructor(operator, left, right) {
super();
this[$operator] = operator;
this[$left] = left;
this[$right] = right;
}
get isConstant() {
return Evaluator.isConstant(this[$left]) &&
Evaluator.isConstant(this[$right]);
}
[$evaluate]() {
const leftNode = normalizeUnit(Evaluator.evaluate(this[$left]));
const rightNode = normalizeUnit(Evaluator.evaluate(this[$right]));
const { number: leftValue, unit: leftUnit } = leftNode;
const { number: rightValue, unit: rightUnit } = rightNode;
// Disallow operations for mismatched normalized units e.g., m and rad:
if (rightUnit != null && leftUnit != null && rightUnit != leftUnit) {
return ZERO;
}
// NOTE(cdata): rules for calc type checking are defined here
// https://drafts.csswg.org/css-values-3/#calc-type-checking
// This is a simplification and may not hold up once we begin to support
// additional unit types:
const unit = leftUnit || rightUnit;
let value;
switch (this[$operator].value) {
case '+':
value = leftValue + rightValue;
break;
case '-':
value = leftValue - rightValue;
break;
case '/':
value = leftValue / rightValue;
break;
case '*':
value = leftValue * rightValue;
break;
default:
return ZERO;
}
return { type: 'number', number: value, unit };
}
}
const $evaluatables = Symbol('evaluatables');
const $intrinsics = Symbol('intrinsics');
/**
* A VectorEvaluator evaluates a series of numeric terms that usually represent
* a data structure such as a multi-dimensional vector or a spherical
*
* The form of the evaluator's result is determined by the Intrinsics that are
* given to it when it is constructed. For example, spherical intrinsics would
* establish two angle terms and a length term, so the result of evaluating the
* evaluator that is configured with spherical intrinsics is a three element
* array where the first two elements represent angles in radians and the third
* element representing a length in meters.
*/
class StyleEvaluator extends Evaluator {
constructor(expressions, intrinsics) {
super();
this[$intrinsics] = intrinsics;
const firstExpression = expressions[0];
const terms = firstExpression != null ? firstExpression.terms : [];
this[$evaluatables] =
intrinsics.basis.map((basisNode, index) => {
const term = terms[index];
if (term == null) {
return { type: 'ident', value: 'auto' };
}
if (term.type === 'ident') {
return term;
}
return Evaluator.evaluatableFor(term, basisNode);
});
}
get isConstant() {
for (const evaluatable of this[$evaluatables]) {
if (!Evaluator.isConstant(evaluatable)) {
return false;
}
}
return true;
}
[$evaluate]() {
const evaluated = this[$evaluatables].map(evaluatable => Evaluator.evaluate(evaluatable));
return Evaluator.applyIntrinsics(evaluated, this[$intrinsics])
.map(numberNode => numberNode.number);
}
}
/* @license
* Copyright 2019 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the 'License');
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an 'AS IS' BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var _a$5, _b$4, _c$1, _d;
const $instances = Symbol('instances');
const $activateListener = Symbol('activateListener');
const $deactivateListener = Symbol('deactivateListener');
const $notifyInstances = Symbol('notifyInstances');
const $notify = Symbol('notify');
const $scrollCallback = Symbol('callback');
/**
* This internal helper is intended to work as a reference-counting manager of
* scroll event listeners. Only one scroll listener is ever registered for all
* instances of the class, and when the last ScrollObserver "disconnects", that
* event listener is removed. This spares us from thrashing
* the {add,remove}EventListener API (the binding cost of these methods has been
* known to show up in performance anlyses) as well as potential memory leaks.
*/
class ScrollObserver {
constructor(callback) {
this[$scrollCallback] = callback;
}
static [$notifyInstances]() {
for (const instance of ScrollObserver[$instances]) {
instance[$notify]();
}
}
static [(_a$5 = $instances, $activateListener)]() {
window.addEventListener('scroll', this[$notifyInstances], { passive: true });
}
static [$deactivateListener]() {
window.removeEventListener('scroll', this[$notifyInstances]);
}
/**
* Listen for scroll events. The configured callback (passed to the
* constructor) will be invoked for subsequent global scroll events.
*/
observe() {
if (ScrollObserver[$instances].size === 0) {
ScrollObserver[$activateListener]();
}
ScrollObserver[$instances].add(this);
}
/**
* Stop listening for scroll events.
*/
disconnect() {
ScrollObserver[$instances].delete(this);
if (ScrollObserver[$instances].size === 0) {
ScrollObserver[$deactivateListener]();
}
}
[$notify]() {
this[$scrollCallback]();
}
;
}
ScrollObserver[_a$5] = new Set();
const $computeStyleCallback = Symbol('computeStyleCallback');
const $astWalker = Symbol('astWalker');
const $dependencies = Symbol('dependencies');
const $onScroll = Symbol('onScroll');
/**
* The StyleEffector is configured with a callback that will be invoked at the
* optimal time that some array of CSS expression ASTs ought to be evaluated.
*
* For example, our CSS-like expression syntax supports usage of the env()
* function to incorporate the current top-level scroll position into a CSS
* expression: env(window-scroll-y).
*
* This "environment variable" will change dynamically as the user scrolls the
* page. If an AST contains such a usage of env(), we would have to evaluate the
* AST on every frame in order to be sure that the computed style stays up to
* date.
*
* The StyleEffector spares us from evaluating the expressions on every frame by
* correlating specific parts of an AST with observers of the external effects
* that they refer to (if any). So, if the AST contains env(window-scroll-y),
* the StyleEffector manages the lifetime of a global scroll event listener and
* notifies the user at the optimal time to evaluate the computed style.
*/
class StyleEffector {
constructor(callback) {
this[_b$4] = {};
this[_c$1] = new ASTWalker(['function']);
this[_d] = () => {
this[$computeStyleCallback]({ relatedState: 'window-scroll' });
};
this[$computeStyleCallback] = callback;
}
/**
* Sets the expressions that govern when the StyleEffector callback will be
* invoked.
*/
observeEffectsFor(ast) {
const newDependencies = {};
const oldDependencies = this[$dependencies];
this[$astWalker].walk(ast, functionNode => {
const { name } = functionNode;
const firstArgument = functionNode.arguments[0];
const firstTerm = firstArgument.terms[0];
if (name.value !== 'env' || firstTerm == null ||
firstTerm.type !== 'ident') {
return;
}
switch (firstTerm.value) {
case 'window-scroll-y':
if (newDependencies['window-scroll'] == null) {
const observer = 'window-scroll' in oldDependencies ?
oldDependencies['window-scroll'] :
new ScrollObserver(this[$onScroll]);
observer.observe();
delete oldDependencies['window-scroll'];
newDependencies['window-scroll'] = observer;
}
break;
}
});
for (const environmentState in oldDependencies) {
const observer = oldDependencies[environmentState];
observer.disconnect();
}
this[$dependencies] = newDependencies;
}
/**
* Disposes of the StyleEffector by disconnecting all observers of external
* effects.
*/
dispose() {
for (const environmentState in this[$dependencies]) {
const observer = this[$dependencies][environmentState];
observer.disconnect();
}
}
}
_b$4 = $dependencies, _c$1 = $astWalker, _d = $onScroll;
/* @license
* Copyright 2019 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the 'License');
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an 'AS IS' BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* The @style decorator is responsible for coordinating the conversion of a
* CSS-like string property value into numbers that can be applied to
* lower-level constructs. It also can optionally manage the lifecycle of a
* StyleEffector which allows automatic updates for styles that use env() or
* var() functions.
*
* The decorator is configured with Intrinsics and the property key for a
* method that handles updates. The named update handler is invoked with the
* result of parsing and evaluating the raw property string value. The format of
* the evaluated result is derived from the basis of the configured Intrinsics,
* and is always an array of numbers of fixed length.
*
* NOTE: This decorator depends on the property updating mechanism defined by
* UpdatingElement as exported by the lit-element module. That means it *must*
* be used in conjunction with the @property decorator, or equivalent
* JavaScript.
*
* Supported configurations are:
*
* - `intrinsics`: An Intrinsics struct that describes how to interpret a
* serialized style attribute. For more detail on intrinsics see
* ./styles/evaluators.ts
* - `updateHandler`: A string or Symbol that is the key of a method to be
* invoked with the result of parsing and evaluating a serialized style string.
* - `observeEffects`: Optional, if set to true then styles that use env() will
* cause their update handlers to be invoked every time the corresponding
* environment variable changes (even if the style attribute itself remains
* static).
*/
const style = (config) => {
const observeEffects = config.observeEffects || false;
const getIntrinsics = config.intrinsics instanceof Function ?
config.intrinsics :
(() => config.intrinsics);
return (proto, propertyName) => {
const originalUpdated = proto.updated;
const originalConnectedCallback = proto.connectedCallback;
const originalDisconnectedCallback = proto.disconnectedCallback;
const $styleEffector = Symbol(`${propertyName}StyleEffector`);
const $styleEvaluator = Symbol(`${propertyName}StyleEvaluator`);
const $updateEvaluator = Symbol(`${propertyName}UpdateEvaluator`);
const $evaluateAndSync = Symbol(`${propertyName}EvaluateAndSync`);
Object.defineProperties(proto, {
[$styleEffector]: { value: null, writable: true },
[$styleEvaluator]: { value: null, writable: true },
[$updateEvaluator]: {
value: function () {
const ast = parseExpressions(this[propertyName]);
this[$styleEvaluator] =
new StyleEvaluator(ast, getIntrinsics(this));
if (this[$styleEffector] == null && observeEffects) {
this[$styleEffector] =
new StyleEffector(() => this[$evaluateAndSync]());
}
if (this[$styleEffector] != null) {
this[$styleEffector].observeEffectsFor(ast);
}
}
},
[$evaluateAndSync]: {
value: function () {
if (this[$styleEvaluator] == null) {
return;
}
const result = this[$styleEvaluator].evaluate();
// @see https://github.com/microsoft/TypeScript/pull/30769
// @see https://github.com/Microsoft/TypeScript/issues/1863
this[config.updateHandler](result);
}
},
updated: {
value: function (changedProperties) {
// Always invoke updates to styles first. This gives a class that
// uses this decorator the opportunity to override the effect, or
// respond to it, in its own implementation of `updated`.
if (changedProperties.has(propertyName)) {
this[$updateEvaluator]();
this[$evaluateAndSync]();
}
originalUpdated.call(this, changedProperties);
}
},
connectedCallback: {
value: function () {
originalConnectedCallback.call(this);
this.requestUpdate(propertyName, this[propertyName]);
}
},
disconnectedCallback: {
value: function () {
originalDisconnectedCallback.call(this);
if (this[$styleEffector] != null) {
this[$styleEffector].dispose();
this[$styleEffector] = null;
}
}
}
});
};
};
/* @license
* Licensed under the Apache License, Version 2.0 (the 'License');
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an 'AS IS' BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
const DEFAULT_OPTIONS = Object.freeze({
minimumRadius: 0,
maximumRadius: Infinity,
minimumPolarAngle: Math.PI / 8,
maximumPolarAngle: Math.PI - Math.PI / 8,
minimumAzimuthalAngle: -Infinity,
maximumAzimuthalAngle: Infinity,
minimumFieldOfView: 10,
maximumFieldOfView: 45,
interactionPolicy: 'always-allow',
touchAction: 'pan-y'
});
// Constants
const TOUCH_EVENT_RE = /^touch(start|end|move)$/;
const KEYBOARD_ORBIT_INCREMENT = Math.PI / 8;
const ZOOM_SENSITIVITY = 0.04;
const KeyCode = {
PAGE_UP: 33,
PAGE_DOWN: 34,
LEFT: 37,
UP: 38,
RIGHT: 39,
DOWN: 40
};
const ChangeSource = {
USER_INTERACTION: 'user-interaction',
NONE: 'none'
};
/**
* SmoothControls is a Three.js helper for adding delightful pointer and
* keyboard-based input to a staged Three.js scene. Its API is very similar to
* OrbitControls, but it offers more opinionated (subjectively more delightful)
* defaults, easy extensibility and subjectively better out-of-the-box keyboard
* support.
*
* One important change compared to OrbitControls is that the `update` method
* of SmoothControls must be invoked on every frame, otherwise the controls
* will not have an effect.
*
* Another notable difference compared to OrbitControls is that SmoothControls
* does not currently support panning (but probably will in a future revision).
*
* Like OrbitControls, SmoothControls assumes that the orientation of the camera
* has been set in terms of position, rotation and scale, so it is important to
* ensure that the camera's matrixWorld is in sync before using SmoothControls.
*/
class SmoothControls extends EventDispatcher {
constructor(camera, element) {
super();
this.camera = camera;
this.element = element;
this.sensitivity = 1;
this._interactionEnabled = false;
this._disableZoom = false;
this.isUserChange = false;
this.isUserPointing = false;
// Internal orbital position state
this.spherical = new Spherical();
this.goalSpherical = new Spherical();
this.thetaDamper = new Damper();
this.phiDamper = new Damper();
this.radiusDamper = new Damper();
this.logFov = Math.log(DEFAULT_OPTIONS.maximumFieldOfView);
this.goalLogFov = this.logFov;
this.fovDamper = new Damper();
// Pointer state
this.pointerIsDown = false;
this.lastPointerPosition = {
clientX: 0,
clientY: 0,
};
this.touchMode = 'rotate';
this.touchDecided = false;
this.onPointerMove = (event) => {
if (!this.pointerIsDown || !this.canInteract) {
return;
}
// NOTE(cdata): We test event.type as some browsers do not have a global
// TouchEvent contructor.
if (TOUCH_EVENT_RE.test(event.type)) {
const { touches } = event;
switch (this.touchMode) {
case 'zoom':
if (this.lastTouches.length > 1 && touches.length > 1) {
const lastTouchDistance = this.twoTouchDistance(this.lastTouches[0], this.lastTouches[1]);
const touchDistance = this.twoTouchDistance(touches[0], touches[1]);
const deltaZoom = ZOOM_SENSITIVITY * (lastTouchDistance - touchDistance) / 10.0;
this.userAdjustOrbit(0, 0, deltaZoom);
}
break;
case 'rotate':
const { touchAction } = this._options;
if (!this.touchDecided && touchAction !== 'none') {
this.touchDecided = true;
const { clientX, clientY } = touches[0];
const dx = Math.abs(clientX - this.lastPointerPosition.clientX);
const dy = Math.abs(clientY - this.lastPointerPosition.clientY);
// If motion is mostly vertical, assume scrolling is the intent.
if ((touchAction === 'pan-y' && dy > dx) ||
(touchAction === 'pan-x' && dx > dy)) {
this.touchMode = 'scroll';
return;
}
}
this.handleSinglePointerMove(touches[0]);
break;
case 'scroll':
return;
}
this.lastTouches = touches;
}
else {
this.handleSinglePointerMove(event);
}
if (event.cancelable) {
event.preventDefault();
}
};
this.onPointerDown = (event) => {
this.pointerIsDown = true;
this.isUserPointing = false;
if (TOUCH_EVENT_RE.test(event.type)) {
const { touches } = event;
this.touchDecided = false;
switch (touches.length) {
default:
case 1:
this.touchMode = 'rotate';
this.handleSinglePointerDown(touches[0]);
break;
case 2:
this.touchMode = this._disableZoom ? 'scroll' : 'zoom';
break;
}
this.lastTouches = touches;
}
else {
this.handleSinglePointerDown(event);
}
};
this.onPointerUp = (_event) => {
this.element.style.cursor = 'grab';
this.pointerIsDown = false;
if (this.isUserPointing) {
this.dispatchEvent({ type: 'pointer-change-end', pointer: Object.assign({}, this.lastPointerPosition) });
}
};
this.onWheel = (event) => {
if (!this.canInteract) {
return;
}
const deltaZoom = event.deltaY *
(event.deltaMode == 1 ? 18 : 1) * ZOOM_SENSITIVITY / 30;
this.userAdjustOrbit(0, 0, deltaZoom);
if (event.cancelable) {
event.preventDefault();
}
};
this.onKeyDown = (event) => {
// We track if the key is actually one we respond to, so as not to
// accidentally clober unrelated key inputs when the <model-viewer> has
// focus.
let relevantKey = false;
switch (event.keyCode) {
case KeyCode.PAGE_UP:
relevantKey = true;
this.userAdjustOrbit(0, 0, ZOOM_SENSITIVITY);
break;
case KeyCode.PAGE_DOWN:
relevantKey = true;
this.userAdjustOrbit(0, 0, -1 * ZOOM_SENSITIVITY);
break;
case KeyCode.UP:
relevantKey = true;
this.userAdjustOrbit(0, -KEYBOARD_ORBIT_INCREMENT, 0);
break;
case KeyCode.DOWN:
relevantKey = true;
this.userAdjustOrbit(0, KEYBOARD_ORBIT_INCREMENT, 0);
break;
case KeyCode.LEFT:
relevantKey = true;
this.userAdjustOrbit(-KEYBOARD_ORBIT_INCREMENT, 0, 0);
break;
case KeyCode.RIGHT:
relevantKey = true;
this.userAdjustOrbit(KEYBOARD_ORBIT_INCREMENT, 0, 0);
break;
}
if (relevantKey && event.cancelable) {
event.preventDefault();
}
};
this._options = Object.assign({}, DEFAULT_OPTIONS);
this.setOrbit(0, Math.PI / 2, 1);
this.setFieldOfView(100);
this.jumpToGoal();
}
get interactionEnabled() {
return this._interactionEnabled;
}
enableInteraction() {
if (this._interactionEnabled === false) {
const { element } = this;
element.addEventListener('mousemove', this.onPointerMove);
element.addEventListener('mousedown', this.onPointerDown);
if (!this._disableZoom) {
element.addEventListener('wheel', this.onWheel);
}
element.addEventListener('keydown', this.onKeyDown);
element.addEventListener('touchstart', this.onPointerDown, { passive: true });
element.addEventListener('touchmove', this.onPointerMove);
self.addEventListener('mouseup', this.onPointerUp);
self.addEventListener('touchend', this.onPointerUp);
this.element.style.cursor = 'grab';
this._interactionEnabled = true;
}
}
disableInteraction() {
if (this._interactionEnabled === true) {
const { element } = this;
element.removeEventListener('mousemove', this.onPointerMove);
element.removeEventListener('mousedown', this.onPointerDown);
if (!this._disableZoom) {
element.removeEventListener('wheel', this.onWheel);
}
element.removeEventListener('keydown', this.onKeyDown);
element.removeEventListener('touchstart', this.onPointerDown);
element.removeEventListener('touchmove', this.onPointerMove);
self.removeEventListener('mouseup', this.onPointerUp);
self.removeEventListener('touchend', this.onPointerUp);
element.style.cursor = '';
this._interactionEnabled = false;
}
}
/**
* The options that are currently configured for the controls instance.
*/
get options() {
return this._options;
}
set disableZoom(disable) {
if (this._disableZoom != disable) {
this._disableZoom = disable;
if (disable === true) {
this.element.removeEventListener('wheel', this.onWheel);
}
else {
this.element.addEventListener('wheel', this.onWheel);
}
}
}
/**
* Copy the spherical values that represent the current camera orbital
* position relative to the configured target into a provided Spherical
* instance. If no Spherical is provided, a new Spherical will be allocated
* to copy the values into. The Spherical that values are copied into is
* returned.
*/
getCameraSpherical(target = new Spherical()) {
return target.copy(this.spherical);
}
/**
* Returns the camera's current vertical field of view in degrees.
*/
getFieldOfView() {
return this.camera.fov;
}
/**
* Configure the _options of the controls. Configured _options will be
* merged with whatever _options have already been configured for this
* controls instance.
*/
applyOptions(_options) {
Object.assign(this._options, _options);
// Re-evaluates clamping based on potentially new values for min/max
// polar, azimuth and radius:
this.setOrbit();
this.setFieldOfView(Math.exp(this.goalLogFov));
}
/**
* Sets the near and far planes of the camera.
*/
updateNearFar(nearPlane, farPlane) {
this.camera.near = Math.max(nearPlane, farPlane / 1000);
this.camera.far = farPlane;
this.camera.updateProjectionMatrix();
}
/**
* Sets the aspect ratio of the camera
*/
updateAspect(aspect) {
this.camera.aspect = aspect;
this.camera.updateProjectionMatrix();
}
/**
* Set the absolute orbital goal of the camera. The change will be
* applied over a number of frames depending on configured acceleration and
* dampening _options.
*
* Returns true if invoking the method will result in the camera changing
* position and/or rotation, otherwise false.
*/
setOrbit(goalTheta = this.goalSpherical.theta, goalPhi = this.goalSpherical.phi, goalRadius = this.goalSpherical.radius) {
const { minimumAzimuthalAngle, maximumAzimuthalAngle, minimumPolarAngle, maximumPolarAngle, minimumRadius, maximumRadius } = this._options;
const { theta, phi, radius } = this.goalSpherical;
const nextTheta = clamp(goalTheta, minimumAzimuthalAngle, maximumAzimuthalAngle);
if (!isFinite(minimumAzimuthalAngle) &&
!isFinite(maximumAzimuthalAngle)) {
this.spherical.theta =
this.wrapAngle(this.spherical.theta - nextTheta) + nextTheta;
}
const nextPhi = clamp(goalPhi, minimumPolarAngle, maximumPolarAngle);
const nextRadius = clamp(goalRadius, minimumRadius, maximumRadius);
if (nextTheta === theta && nextPhi === phi && nextRadius === radius) {
return false;
}
this.goalSpherical.theta = nextTheta;
this.goalSpherical.phi = nextPhi;
this.goalSpherical.radius = nextRadius;
this.goalSpherical.makeSafe();
this.isUserChange = false;
return true;
}
/**
* Subset of setOrbit() above, which only sets the camera's radius.
*/
setRadius(radius) {
this.goalSpherical.radius = radius;
this.setOrbit();
}
/**
* Sets the goal field of view for the camera
*/
setFieldOfView(fov) {
const { minimumFieldOfView, maximumFieldOfView } = this._options;
fov = clamp(fov, minimumFieldOfView, maximumFieldOfView);
this.goalLogFov = Math.log(fov);
}
/**
* Sets the smoothing decay time.
*/
setDamperDecayTime(decayMilliseconds) {
this.thetaDamper.setDecayTime(decayMilliseconds);
this.phiDamper.setDecayTime(decayMilliseconds);
this.radiusDamper.setDecayTime(decayMilliseconds);
this.fovDamper.setDecayTime(decayMilliseconds);
}
/**
* Adjust the orbital position of the camera relative to its current orbital
* position. Does not let the theta goal get more than pi ahead of the current
* theta, which ensures interpolation continues in the direction of the delta.
* The deltaZoom parameter adjusts both the field of view and the orbit radius
* such that they progress across their allowed ranges in sync.
*/
adjustOrbit(deltaTheta, deltaPhi, deltaZoom) {
const { theta, phi, radius } = this.goalSpherical;
const { minimumRadius, maximumRadius, minimumFieldOfView, maximumFieldOfView } = this._options;
const dTheta = this.spherical.theta - theta;
const dThetaLimit = Math.PI - 0.001;
const goalTheta = theta - clamp(deltaTheta, -dThetaLimit - dTheta, dThetaLimit - dTheta);
const goalPhi = phi - deltaPhi;
const deltaRatio = deltaZoom === 0 ?
0 :
deltaZoom > 0 ? (maximumRadius - radius) /
(Math.log(maximumFieldOfView) - this.goalLogFov) :
(radius - minimumRadius) /
(this.goalLogFov - Math.log(minimumFieldOfView));
const goalRadius = radius +
deltaZoom *
Math.min(isFinite(deltaRatio) ? deltaRatio : Infinity, maximumRadius - minimumRadius);
this.setOrbit(goalTheta, goalPhi, goalRadius);
if (deltaZoom !== 0) {
const goalLogFov = this.goalLogFov + deltaZoom;
this.setFieldOfView(Math.exp(goalLogFov));
}
}
/**
* Move the camera instantly instead of accelerating toward the goal
* parameters.
*/
jumpToGoal() {
this.update(0, SETTLING_TIME);
}
/**
* Update controls. In most cases, this will result in the camera
* interpolating its position and rotation until it lines up with the
* designated goal orbital position.
*
* Time and delta are measured in milliseconds.
*/
update(_time, delta) {
if (this.isStationary()) {
return;
}
const { maximumPolarAngle, maximumRadius } = this._options;
const dTheta = this.spherical.theta - this.goalSpherical.theta;
if (Math.abs(dTheta) > Math.PI &&
!isFinite(this._options.minimumAzimuthalAngle) &&
!isFinite(this._options.maximumAzimuthalAngle)) {
this.spherical.theta -= Math.sign(dTheta) * 2 * Math.PI;
}
this.spherical.theta = this.thetaDamper.update(this.spherical.theta, this.goalSpherical.theta, delta, Math.PI);
this.spherical.phi = this.phiDamper.update(this.spherical.phi, this.goalSpherical.phi, delta, maximumPolarAngle);
this.spherical.radius = this.radiusDamper.update(this.spherical.radius, this.goalSpherical.radius, delta, maximumRadius);
this.logFov = this.fovDamper.update(this.logFov, this.goalLogFov, delta, 1);
this.moveCamera();
}
isStationary() {
return this.goalSpherical.theta === this.spherical.theta &&
this.goalSpherical.phi === this.spherical.phi &&
this.goalSpherical.radius === this.spherical.radius &&
this.goalLogFov === this.logFov;
}
moveCamera() {
// Derive the new camera position from the updated spherical:
this.spherical.makeSafe();
this.camera.position.setFromSpherical(this.spherical);
this.camera.setRotationFromEuler(new Euler(this.spherical.phi - Math.PI / 2, this.spherical.theta, 0, 'YXZ'));
if (this.camera.fov !== Math.exp(this.logFov)) {
this.camera.fov = Math.exp(this.logFov);
this.camera.updateProjectionMatrix();
}
const source = this.isUserChange ? ChangeSource.USER_INTERACTION : ChangeSource.NONE;
this.dispatchEvent({ type: 'change', source });
}
get canInteract() {
if (this._options.interactionPolicy == 'allow-when-focused') {
const rootNode = this.element.getRootNode();
return rootNode.activeElement === this.element;
}
return this._options.interactionPolicy === 'always-allow';
}
userAdjustOrbit(deltaTheta, deltaPhi, deltaZoom) {
this.adjustOrbit(deltaTheta * this.sensitivity, deltaPhi * this.sensitivity, deltaZoom);
this.isUserChange = true;
// Always make sure that an initial event is triggered in case there is
// contention between user interaction and imperative changes. This initial
// event will give external observers that chance to observe that
// interaction occurred at all:
this.dispatchEvent({ type: 'change', source: ChangeSource.USER_INTERACTION });
}
// Wraps to bewteen -pi and pi
wrapAngle(radians) {
const normalized = (radians + Math.PI) / (2 * Math.PI);
const wrapped = normalized - Math.floor(normalized);
return wrapped * 2 * Math.PI - Math.PI;
}
pixelLengthToSphericalAngle(pixelLength) {
return 2 * Math.PI * pixelLength / this.element.clientHeight;
}
twoTouchDistance(touchOne, touchTwo) {
const { clientX: xOne, clientY: yOne } = touchOne;
const { clientX: xTwo, clientY: yTwo } = touchTwo;
const xDelta = xTwo - xOne;
const yDelta = yTwo - yOne;
return Math.sqrt(xDelta * xDelta + yDelta * yDelta);
}
handleSinglePointerMove(pointer) {
const { clientX, clientY } = pointer;
const deltaTheta = this.pixelLengthToSphericalAngle(clientX - this.lastPointerPosition.clientX);
const deltaPhi = this.pixelLengthToSphericalAngle(clientY - this.lastPointerPosition.clientY);
this.lastPointerPosition.clientX = clientX;
this.lastPointerPosition.clientY = clientY;
if (this.isUserPointing === false) {
this.isUserPointing = true;
this.dispatchEvent({ type: 'pointer-change-start', pointer: Object.assign({}, pointer) });
}
this.userAdjustOrbit(deltaTheta, deltaPhi, 0);
}
handleSinglePointerDown(pointer) {
this.lastPointerPosition.clientX = pointer.clientX;
this.lastPointerPosition.clientY = pointer.clientY;
this.element.style.cursor = 'grabbing';
}
}
/* @license
* Copyright 2019 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the 'License');
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an 'AS IS' BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Adapted from https://gist.github.com/gre/1650294
const easeInOutQuad = (t) => t < .5 ? 2 * t * t : -1 + (4 - 2 * t) * t;
/**
* Creates a TimingFunction that uses a given ease to interpolate between
* two configured number values.
*/
const interpolate = (start, end, ease = easeInOutQuad) => (time) => start + (end - start) * ease(time);
/**
* Creates a TimingFunction that interpolates through a weighted list
* of other TimingFunctions ("tracks"). Tracks are interpolated in order, and
* allocated a percentage of the total time based on their relative weight.
*/
const sequence = (tracks, weights) => {
const totalWeight = weights.reduce((total, weight) => total + weight, 0);
const ratios = weights.map(weight => weight / totalWeight);
return (time) => {
let start = 0;
let ratio = Infinity;
let track = () => 0;
for (let i = 0; i < ratios.length; ++i) {
ratio = ratios[i];
track = tracks[i];
if (time <= (start + ratio)) {
break;
}
start += ratio;
}
return track((time - start) / ratio);
};
};
/**
* Creates a "timeline" TimingFunction out of an initial value and a series of
* Keyframes. The timeline function accepts value from 0-1 and returns the
* current value based on keyframe interpolation across the total number of
* frames. Frames are only used to indicate the relative length of each keyframe
* transition, so interpolated values will be computed for fractional frames.
*/
const timeline = (initialValue, keyframes) => {
const tracks = [];
const weights = [];
let lastValue = initialValue;
for (let i = 0; i < keyframes.length; ++i) {
const keyframe = keyframes[i];
const { value, frames } = keyframe;
const ease = keyframe.ease || easeInOutQuad;
const track = interpolate(lastValue, value, ease);
tracks.push(track);
weights.push(frames);
lastValue = value;
}
return sequence(tracks, weights);
};
/* @license
* Copyright 2019 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the 'License');
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an 'AS IS' BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var __decorate$4 = (undefined && undefined.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof undefined === "function") r = undefined(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
// NOTE(cdata): The following "animation" timing functions are deliberately
// being used in favor of CSS animations. In Safari 12.1 and 13, CSS animations
// would cause the interaction prompt to glitch unexpectedly
// @see https://github.com/google/model-viewer/issues/839
const PROMPT_ANIMATION_TIME = 5000;
// For timing purposes, a "frame" is a timing agnostic relative unit of time
// and a "value" is a target value for the keyframe.
const wiggle = timeline(0, [
{ frames: 5, value: -1 },
{ frames: 1, value: -1 },
{ frames: 8, value: 1 },
{ frames: 1, value: 1 },
{ frames: 5, value: 0 },
{ frames: 18, value: 0 }
]);
const fade = timeline(0, [
{ frames: 1, value: 1 },
{ frames: 5, value: 1 },
{ frames: 1, value: 0 },
{ frames: 6, value: 0 }
]);
const DEFAULT_CAMERA_ORBIT = '0deg 75deg 105%';
const DEFAULT_CAMERA_TARGET = 'auto auto auto';
const DEFAULT_FIELD_OF_VIEW = 'auto';
const MINIMUM_RADIUS_RATIO = 1.1 * SAFE_RADIUS_RATIO;
const AZIMUTHAL_QUADRANT_LABELS = ['front', 'right', 'back', 'left'];
const POLAR_TRIENT_LABELS = ['upper-', '', 'lower-'];
const DEFAULT_INTERACTION_PROMPT_THRESHOLD = 3000;
const INTERACTION_PROMPT = 'Use mouse, touch or arrow keys to control the camera!';
const InteractionPromptStrategy = {
AUTO: 'auto',
WHEN_FOCUSED: 'when-focused',
NONE: 'none'
};
const InteractionPromptStyle = {
BASIC: 'basic',
WIGGLE: 'wiggle'
};
const InteractionPolicy = {
ALWAYS_ALLOW: 'always-allow',
WHEN_FOCUSED: 'allow-when-focused'
};
const TouchAction = {
PAN_Y: 'pan-y',
PAN_X: 'pan-x',
NONE: 'none'
};
const fieldOfViewIntrinsics = (element) => {
return {
basis: [numberNode(element[$zoomAdjustedFieldOfView] * Math.PI / 180, 'rad')],
keywords: { auto: [null] }
};
};
const minFieldOfViewIntrinsics = {
basis: [degreesToRadians(numberNode(25, 'deg'))],
keywords: { auto: [null] }
};
const maxFieldOfViewIntrinsics = (element) => {
const scene = element[$scene];
return {
basis: [degreesToRadians(numberNode(45, 'deg'))],
keywords: { auto: [numberNode(scene.framedFieldOfView, 'deg')] }
};
};
const cameraOrbitIntrinsics = (() => {
const defaultTerms = parseExpressions(DEFAULT_CAMERA_ORBIT)[0]
.terms;
const theta = normalizeUnit(defaultTerms[0]);
const phi = normalizeUnit(defaultTerms[1]);
return (element) => {
const radius = element[$scene].idealCameraDistance;
return {
basis: [theta, phi, numberNode(radius, 'm')],
keywords: { auto: [null, null, numberNode(105, '%')] }
};
};
})();
const minCameraOrbitIntrinsics = (element) => {
const radius = MINIMUM_RADIUS_RATIO * element[$scene].idealCameraDistance;
return {
basis: [
numberNode(-Infinity, 'rad'),
numberNode(Math.PI / 8, 'rad'),
numberNode(radius, 'm')
],
keywords: { auto: [null, null, null] }
};
};
const maxCameraOrbitIntrinsics = (element) => {
const orbitIntrinsics = cameraOrbitIntrinsics(element);
const evaluator = new StyleEvaluator([], orbitIntrinsics);
const defaultRadius = evaluator.evaluate()[2];
return {
basis: [
numberNode(Infinity, 'rad'),
numberNode(Math.PI - Math.PI / 8, 'rad'),
numberNode(defaultRadius, 'm')
],
keywords: { auto: [null, null, null] }
};
};
const cameraTargetIntrinsics = (element) => {
const center = element[$scene].boundingBox.getCenter(new Vector3());
return {
basis: [
numberNode(center.x, 'm'),
numberNode(center.y, 'm'),
numberNode(center.z, 'm')
],
keywords: { auto: [null, null, null] }
};
};
const HALF_PI = Math.PI / 2.0;
const THIRD_PI = Math.PI / 3.0;
const QUARTER_PI = HALF_PI / 2.0;
const TAU = 2.0 * Math.PI;
const $controls = Symbol('controls');
const $promptElement = Symbol('promptElement');
const $promptAnimatedContainer = Symbol('promptAnimatedContainer');
const $deferInteractionPrompt = Symbol('deferInteractionPrompt');
const $updateAria = Symbol('updateAria');
const $updateCameraForRadius = Symbol('updateCameraForRadius');
const $onBlur = Symbol('onBlur');
const $onFocus = Symbol('onFocus');
const $onChange = Symbol('onChange');
const $onPointerChange = Symbol('onPointerChange');
const $waitingToPromptUser = Symbol('waitingToPromptUser');
const $userHasInteracted = Symbol('userHasInteracted');
const $promptElementVisibleTime = Symbol('promptElementVisibleTime');
const $lastPromptOffset = Symbol('lastPromptOffset');
const $focusedTime = Symbol('focusedTime');
const $zoomAdjustedFieldOfView = Symbol('zoomAdjustedFieldOfView');
const $lastSpherical = Symbol('lastSpherical');
const $jumpCamera = Symbol('jumpCamera');
const $initialized = Symbol('initialized');
const $maintainThetaPhi = Symbol('maintainThetaPhi');
const $syncCameraOrbit = Symbol('syncCameraOrbit');
const $syncFieldOfView = Symbol('syncFieldOfView');
const $syncCameraTarget = Symbol('syncCameraTarget');
const $syncMinCameraOrbit = Symbol('syncMinCameraOrbit');
const $syncMaxCameraOrbit = Symbol('syncMaxCameraOrbit');
const $syncMinFieldOfView = Symbol('syncMinFieldOfView');
const $syncMaxFieldOfView = Symbol('syncMaxFieldOfView');
const ControlsMixin = (ModelViewerElement) => {
var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p, _q, _r, _s;
class ControlsModelViewerElement extends ModelViewerElement {
constructor() {
super(...arguments);
this.cameraControls = false;
this.cameraOrbit = DEFAULT_CAMERA_ORBIT;
this.cameraTarget = DEFAULT_CAMERA_TARGET;
this.fieldOfView = DEFAULT_FIELD_OF_VIEW;
this.minCameraOrbit = 'auto';
this.maxCameraOrbit = 'auto';
this.minFieldOfView = 'auto';
this.maxFieldOfView = 'auto';
this.interactionPromptThreshold = DEFAULT_INTERACTION_PROMPT_THRESHOLD;
this.interactionPromptStyle = InteractionPromptStyle.WIGGLE;
this.interactionPrompt = InteractionPromptStrategy.AUTO;
this.interactionPolicy = InteractionPolicy.ALWAYS_ALLOW;
this.orbitSensitivity = 1;
this.touchAction = TouchAction.PAN_Y;
this.disableZoom = false;
this.interpolationDecay = DECAY_MILLISECONDS;
this.bounds = 'legacy';
this[_a] = this.shadowRoot.querySelector('.interaction-prompt');
this[_b] = this.shadowRoot.querySelector('.interaction-prompt > .animated-container');
this[_c] = Infinity;
this[_d] = 0;
this[_e] = Infinity;
this[_f] = false;
this[_g] = false;
this[_h] = new SmoothControls(this[$scene].camera, this[$userInputElement]);
this[_j] = 0;
this[_k] = new Spherical();
this[_l] = false;
this[_m] = false;
this[_o] = false;
this[_p] = () => {
const input = this[$userInputElement];
if (!isFinite(this[$focusedTime])) {
this[$focusedTime] = performance.now();
}
// NOTE(cdata): On every re-focus, we switch the aria-label back to
// the original, non-prompt label if appropriate. If the user has
// already interacted, they no longer need to hear the prompt.
// Otherwise, they will hear it again after the idle prompt threshold
// has been crossed.
const ariaLabel = this[$ariaLabel];
if (input.getAttribute('aria-label') !== ariaLabel) {
input.setAttribute('aria-label', ariaLabel);
}
if (this.interactionPrompt === InteractionPromptStrategy.WHEN_FOCUSED &&
!this[$userHasInteracted]) {
this[$waitingToPromptUser] = true;
}
};
this[_q] = () => {
if (this.interactionPrompt !== InteractionPromptStrategy.WHEN_FOCUSED) {
return;
}
this[$waitingToPromptUser] = false;
this[$promptElement].classList.remove('visible');
this[$promptElementVisibleTime] = Infinity;
this[$focusedTime] = Infinity;
};
this[_r] = ({ source }) => {
this[$updateAria]();
this[$needsRender]();
if (source === ChangeSource.USER_INTERACTION) {
this[$userHasInteracted] = true;
this[$deferInteractionPrompt]();
}
this.dispatchEvent(new CustomEvent('camera-change', { detail: { source } }));
};
this[_s] = (event) => {
if (event.type === 'pointer-change-start') {
this[$container].classList.add('pointer-tumbling');
}
else {
this[$container].classList.remove('pointer-tumbling');
}
};
}
getCameraOrbit() {
const { theta, phi, radius } = this[$lastSpherical];
return {
theta,
phi,
radius,
toString() {
return `${this.theta}rad ${this.phi}rad ${this.radius}m`;
}
};
}
getCameraTarget() {
return toVector3D(this[$renderer].isPresenting ? this[$renderer].arRenderer.target :
this[$scene].getTarget());
}
getFieldOfView() {
return this[$controls].getFieldOfView();
}
// Provided so user code does not have to parse these from attributes.
getMinimumFieldOfView() {
return this[$controls].options.minimumFieldOfView;
}
getMaximumFieldOfView() {
return this[$controls].options.maximumFieldOfView;
}
jumpCameraToGoal() {
this[$jumpCamera] = true;
this.requestUpdate($jumpCamera, false);
}
resetInteractionPrompt() {
this[$lastPromptOffset] = 0;
this[$promptElementVisibleTime] = Infinity;
this[$userHasInteracted] = false;
this[$waitingToPromptUser] =
this.interactionPrompt === InteractionPromptStrategy.AUTO &&
this.cameraControls;
}
connectedCallback() {
super.connectedCallback();
this[$controls].addEventListener('change', this[$onChange]);
this[$controls].addEventListener('pointer-change-start', this[$onPointerChange]);
this[$controls].addEventListener('pointer-change-end', this[$onPointerChange]);
}
disconnectedCallback() {
super.disconnectedCallback();
this[$controls].removeEventListener('change', this[$onChange]);
this[$controls].removeEventListener('pointer-change-start', this[$onPointerChange]);
this[$controls].removeEventListener('pointer-change-end', this[$onPointerChange]);
}
updated(changedProperties) {
super.updated(changedProperties);
const controls = this[$controls];
const input = this[$userInputElement];
if (changedProperties.has('cameraControls')) {
if (this.cameraControls) {
controls.enableInteraction();
if (this.interactionPrompt === InteractionPromptStrategy.AUTO) {
this[$waitingToPromptUser] = true;
}
input.addEventListener('focus', this[$onFocus]);
input.addEventListener('blur', this[$onBlur]);
}
else {
input.removeEventListener('focus', this[$onFocus]);
input.removeEventListener('blur', this[$onBlur]);
controls.disableInteraction();
this[$deferInteractionPrompt]();
}
}
if (changedProperties.has('disableZoom')) {
controls.disableZoom = this.disableZoom;
}
if (changedProperties.has('bounds')) {
this[$scene].tightBounds = this.bounds === 'tight';
}
if (changedProperties.has('interactionPrompt') ||
changedProperties.has('cameraControls') ||
changedProperties.has('src')) {
if (this.interactionPrompt === InteractionPromptStrategy.AUTO &&
this.cameraControls && !this[$userHasInteracted]) {
this[$waitingToPromptUser] = true;
}
else {
this[$deferInteractionPrompt]();
}
}
if (changedProperties.has('interactionPromptStyle')) {
this[$promptElement].classList.toggle('wiggle', this.interactionPromptStyle === InteractionPromptStyle.WIGGLE);
}
if (changedProperties.has('interactionPolicy')) {
const interactionPolicy = this.interactionPolicy;
controls.applyOptions({ interactionPolicy });
}
if (changedProperties.has('touchAction')) {
const touchAction = this.touchAction;
controls.applyOptions({ touchAction });
}
if (changedProperties.has('orbitSensitivity')) {
controls.sensitivity = this.orbitSensitivity;
}
if (changedProperties.has('interpolationDecay')) {
controls.setDamperDecayTime(this.interpolationDecay);
this[$scene].setTargetDamperDecayTime(this.interpolationDecay);
}
if (this[$jumpCamera] === true) {
Promise.resolve().then(() => {
controls.jumpToGoal();
this[$scene].jumpToGoal();
this[$jumpCamera] = false;
});
}
}
async updateFraming() {
const scene = this[$scene];
const oldFramedFieldOfView = scene.framedFieldOfView;
await this.requestUpdate('cameraTarget');
scene.updateFraming(this.bounds === 'tight' ? scene.getTarget() : undefined);
scene.frameModel();
const newFramedFieldOfView = scene.framedFieldOfView;
const zoom = this[$controls].getFieldOfView() / oldFramedFieldOfView;
this[$zoomAdjustedFieldOfView] = newFramedFieldOfView * zoom;
this[$maintainThetaPhi] = true;
this.requestUpdate('maxFieldOfView');
this.requestUpdate('fieldOfView');
this.requestUpdate('minCameraOrbit');
this.requestUpdate('maxCameraOrbit');
await this.requestUpdate('cameraOrbit');
}
[(_a = $promptElement, _b = $promptAnimatedContainer, _c = $focusedTime, _d = $lastPromptOffset, _e = $promptElementVisibleTime, _f = $userHasInteracted, _g = $waitingToPromptUser, _h = $controls, _j = $zoomAdjustedFieldOfView, _k = $lastSpherical, _l = $jumpCamera, _m = $initialized, _o = $maintainThetaPhi, $syncFieldOfView)](style) {
this[$controls].setFieldOfView(style[0] * 180 / Math.PI);
}
[$syncCameraOrbit](style) {
if (this[$maintainThetaPhi]) {
const { theta, phi } = this.getCameraOrbit();
style[0] = theta;
style[1] = phi;
this[$maintainThetaPhi] = false;
}
this[$controls].setOrbit(style[0], style[1], style[2]);
}
[$syncMinCameraOrbit](style) {
this[$controls].applyOptions({
minimumAzimuthalAngle: style[0],
minimumPolarAngle: style[1],
minimumRadius: style[2]
});
this.jumpCameraToGoal();
}
[$syncMaxCameraOrbit](style) {
this[$controls].applyOptions({
maximumAzimuthalAngle: style[0],
maximumPolarAngle: style[1],
maximumRadius: style[2]
});
this[$updateCameraForRadius](style[2]);
this.jumpCameraToGoal();
}
[$syncMinFieldOfView](style) {
this[$controls].applyOptions({ minimumFieldOfView: style[0] * 180 / Math.PI });
this.jumpCameraToGoal();
}
[$syncMaxFieldOfView](style) {
this[$controls].applyOptions({ maximumFieldOfView: style[0] * 180 / Math.PI });
this.jumpCameraToGoal();
}
[$syncCameraTarget](style) {
const [x, y, z] = style;
this[$scene].setTarget(x, y, z);
this[$renderer].arRenderer.updateTarget();
}
[$tick](time, delta) {
super[$tick](time, delta);
if (this[$renderer].isPresenting || !this[$hasTransitioned]()) {
return;
}
const now = performance.now();
if (this[$waitingToPromptUser]) {
const thresholdTime = this.interactionPrompt === InteractionPromptStrategy.AUTO ?
this[$loadedTime] :
this[$focusedTime];
if (this.loaded &&
now > thresholdTime + this.interactionPromptThreshold) {
this[$userInputElement].setAttribute('aria-label', INTERACTION_PROMPT);
this[$waitingToPromptUser] = false;
this[$promptElementVisibleTime] = now;
this[$promptElement].classList.add('visible');
}
}
if (isFinite(this[$promptElementVisibleTime]) &&
this.interactionPromptStyle === InteractionPromptStyle.WIGGLE) {
const scene = this[$scene];
const animationTime = ((now - this[$promptElementVisibleTime]) / PROMPT_ANIMATION_TIME) %
1;
const offset = wiggle(animationTime);
const opacity = fade(animationTime);
this[$promptAnimatedContainer].style.opacity = `${opacity}`;
if (offset !== this[$lastPromptOffset]) {
const xOffset = offset * scene.width * 0.05;
const deltaTheta = (offset - this[$lastPromptOffset]) * Math.PI / 16;
this[$promptAnimatedContainer].style.transform =
`translateX(${xOffset}px)`;
this[$controls].adjustOrbit(deltaTheta, 0, 0);
this[$lastPromptOffset] = offset;
}
}
this[$controls].update(time, delta);
this[$scene].updateTarget(delta);
}
[$deferInteractionPrompt]() {
// Effectively cancel the timer waiting for user interaction:
this[$waitingToPromptUser] = false;
this[$promptElement].classList.remove('visible');
this[$promptElementVisibleTime] = Infinity;
}
/**
* Updates the camera's near and far planes to enclose the scene when
* orbiting at the supplied radius.
*/
[$updateCameraForRadius](radius) {
const { idealCameraDistance } = this[$scene];
const maximumRadius = Math.max(idealCameraDistance, radius);
const near = 0;
const far = 2 * maximumRadius;
this[$controls].updateNearFar(near, far);
}
[$updateAria]() {
// NOTE(cdata): It is possible that we might want to record the
// last spherical when the label actually changed. Right now, the
// side-effect the current implementation is that we will only
// announce the first view change that occurs after the element
// becomes focused.
const { theta: lastTheta, phi: lastPhi } = this[$lastSpherical];
const { theta, phi } = this[$controls].getCameraSpherical(this[$lastSpherical]);
const rootNode = this.getRootNode();
// Only change the aria-label if <model-viewer> is currently focused:
if (rootNode != null && rootNode.activeElement === this) {
const lastAzimuthalQuadrant = (4 + Math.floor(((lastTheta % TAU) + QUARTER_PI) / HALF_PI)) % 4;
const azimuthalQuadrant = (4 + Math.floor(((theta % TAU) + QUARTER_PI) / HALF_PI)) % 4;
const lastPolarTrient = Math.floor(lastPhi / THIRD_PI);
const polarTrient = Math.floor(phi / THIRD_PI);
if (azimuthalQuadrant !== lastAzimuthalQuadrant ||
polarTrient !== lastPolarTrient) {
const azimuthalQuadrantLabel = AZIMUTHAL_QUADRANT_LABELS[azimuthalQuadrant];
const polarTrientLabel = POLAR_TRIENT_LABELS[polarTrient];
const ariaLabel = `View from stage ${polarTrientLabel}${azimuthalQuadrantLabel}`;
this[$userInputElement].setAttribute('aria-label', ariaLabel);
}
}
}
[$onResize](event) {
const controls = this[$controls];
const oldFramedFieldOfView = this[$scene].framedFieldOfView;
// The super of $onResize will update the scene's framedFieldOfView, so we
// compare the before and after to calculate the proper zoom.
super[$onResize](event);
const newFramedFieldOfView = this[$scene].framedFieldOfView;
const zoom = controls.getFieldOfView() / oldFramedFieldOfView;
this[$zoomAdjustedFieldOfView] = newFramedFieldOfView * zoom;
controls.updateAspect(this[$scene].aspect);
this.requestUpdate('maxFieldOfView', this.maxFieldOfView);
this.requestUpdate('fieldOfView', this.fieldOfView);
this.jumpCameraToGoal();
}
[$onModelLoad]() {
super[$onModelLoad]();
const { framedFieldOfView } = this[$scene];
this[$zoomAdjustedFieldOfView] = framedFieldOfView;
if (this[$initialized]) {
this[$maintainThetaPhi] = true;
}
else {
this[$initialized] = true;
}
this.requestUpdate('maxFieldOfView', this.maxFieldOfView);
this.requestUpdate('fieldOfView', this.fieldOfView);
this.requestUpdate('minCameraOrbit', this.minCameraOrbit);
this.requestUpdate('maxCameraOrbit', this.maxCameraOrbit);
this.requestUpdate('cameraOrbit', this.cameraOrbit);
this.requestUpdate('cameraTarget', this.cameraTarget);
this.jumpCameraToGoal();
}
}
_p = $onFocus, _q = $onBlur, _r = $onChange, _s = $onPointerChange;
__decorate$4([
property({ type: Boolean, attribute: 'camera-controls' })
], ControlsModelViewerElement.prototype, "cameraControls", void 0);
__decorate$4([
style({
intrinsics: cameraOrbitIntrinsics,
observeEffects: true,
updateHandler: $syncCameraOrbit
}),
property({ type: String, attribute: 'camera-orbit', hasChanged: () => true })
], ControlsModelViewerElement.prototype, "cameraOrbit", void 0);
__decorate$4([
style({
intrinsics: cameraTargetIntrinsics,
observeEffects: true,
updateHandler: $syncCameraTarget
}),
property({ type: String, attribute: 'camera-target', hasChanged: () => true })
], ControlsModelViewerElement.prototype, "cameraTarget", void 0);
__decorate$4([
style({
intrinsics: fieldOfViewIntrinsics,
observeEffects: true,
updateHandler: $syncFieldOfView
}),
property({ type: String, attribute: 'field-of-view', hasChanged: () => true })
], ControlsModelViewerElement.prototype, "fieldOfView", void 0);
__decorate$4([
style({
intrinsics: minCameraOrbitIntrinsics,
updateHandler: $syncMinCameraOrbit
}),
property({ type: String, attribute: 'min-camera-orbit', hasChanged: () => true })
], ControlsModelViewerElement.prototype, "minCameraOrbit", void 0);
__decorate$4([
style({
intrinsics: maxCameraOrbitIntrinsics,
updateHandler: $syncMaxCameraOrbit
}),
property({ type: String, attribute: 'max-camera-orbit', hasChanged: () => true })
], ControlsModelViewerElement.prototype, "maxCameraOrbit", void 0);
__decorate$4([
style({
intrinsics: minFieldOfViewIntrinsics,
updateHandler: $syncMinFieldOfView
}),
property({ type: String, attribute: 'min-field-of-view', hasChanged: () => true })
], ControlsModelViewerElement.prototype, "minFieldOfView", void 0);
__decorate$4([
style({
intrinsics: maxFieldOfViewIntrinsics,
updateHandler: $syncMaxFieldOfView
}),
property({ type: String, attribute: 'max-field-of-view', hasChanged: () => true })
], ControlsModelViewerElement.prototype, "maxFieldOfView", void 0);
__decorate$4([
property({ type: Number, attribute: 'interaction-prompt-threshold' })
], ControlsModelViewerElement.prototype, "interactionPromptThreshold", void 0);
__decorate$4([
property({ type: String, attribute: 'interaction-prompt-style' })
], ControlsModelViewerElement.prototype, "interactionPromptStyle", void 0);
__decorate$4([
property({ type: String, attribute: 'interaction-prompt' })
], ControlsModelViewerElement.prototype, "interactionPrompt", void 0);
__decorate$4([
property({ type: String, attribute: 'interaction-policy' })
], ControlsModelViewerElement.prototype, "interactionPolicy", void 0);
__decorate$4([
property({ type: Number, attribute: 'orbit-sensitivity' })
], ControlsModelViewerElement.prototype, "orbitSensitivity", void 0);
__decorate$4([
property({ type: String, attribute: 'touch-action' })
], ControlsModelViewerElement.prototype, "touchAction", void 0);
__decorate$4([
property({ type: Boolean, attribute: 'disable-zoom' })
], ControlsModelViewerElement.prototype, "disableZoom", void 0);
__decorate$4([
property({ type: Number, attribute: 'interpolation-decay' })
], ControlsModelViewerElement.prototype, "interpolationDecay", void 0);
__decorate$4([
property({ type: String, attribute: 'bounds' })
], ControlsModelViewerElement.prototype, "bounds", void 0);
return ControlsModelViewerElement;
};
/* @license
* Copyright 2019 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the 'License');
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an 'AS IS' BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var __decorate$3 = (undefined && undefined.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof undefined === "function") r = undefined(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
const BASE_OPACITY = 0.1;
const DEFAULT_SHADOW_INTENSITY = 0.0;
const DEFAULT_SHADOW_SOFTNESS = 1.0;
const DEFAULT_EXPOSURE = 1.0;
const $currentEnvironmentMap = Symbol('currentEnvironmentMap');
const $applyEnvironmentMap = Symbol('applyEnvironmentMap');
const $updateEnvironment = Symbol('updateEnvironment');
const $cancelEnvironmentUpdate = Symbol('cancelEnvironmentUpdate');
const $onPreload = Symbol('onPreload');
const EnvironmentMixin = (ModelViewerElement) => {
var _a, _b, _c;
class EnvironmentModelViewerElement extends ModelViewerElement {
constructor() {
super(...arguments);
this.environmentImage = null;
this.skyboxImage = null;
this.shadowIntensity = DEFAULT_SHADOW_INTENSITY;
this.shadowSoftness = DEFAULT_SHADOW_SOFTNESS;
this.exposure = DEFAULT_EXPOSURE;
this[_a] = null;
this[_b] = null;
this[_c] = (event) => {
if (event.element === this) {
this[$updateEnvironment]();
}
};
}
connectedCallback() {
super.connectedCallback();
this[$renderer].loader.addEventListener('preload', this[$onPreload]);
}
disconnectedCallback() {
super.disconnectedCallback();
this[$renderer].loader.removeEventListener('preload', this[$onPreload]);
}
updated(changedProperties) {
super.updated(changedProperties);
if (changedProperties.has('shadowIntensity')) {
this[$scene].setShadowIntensity(this.shadowIntensity * BASE_OPACITY);
this[$needsRender]();
}
if (changedProperties.has('shadowSoftness')) {
this[$scene].setShadowSoftness(this.shadowSoftness);
this[$needsRender]();
}
if (changedProperties.has('exposure')) {
this[$scene].exposure = this.exposure;
this[$needsRender]();
}
if ((changedProperties.has('environmentImage') ||
changedProperties.has('skyboxImage')) &&
this[$shouldAttemptPreload]()) {
this[$updateEnvironment]();
}
}
[(_a = $currentEnvironmentMap, _b = $cancelEnvironmentUpdate, _c = $onPreload, $onModelLoad)]() {
super[$onModelLoad]();
if (this[$currentEnvironmentMap] != null) {
this[$applyEnvironmentMap](this[$currentEnvironmentMap]);
}
}
async [$updateEnvironment]() {
const { skyboxImage, environmentImage } = this;
if (this[$cancelEnvironmentUpdate] != null) {
this[$cancelEnvironmentUpdate]();
this[$cancelEnvironmentUpdate] = null;
}
const { textureUtils } = this[$renderer];
if (textureUtils == null) {
return;
}
try {
const { environmentMap, skybox } = await new Promise(async (resolve, reject) => {
const texturesLoad = textureUtils.generateEnvironmentMapAndSkybox(deserializeUrl(skyboxImage), environmentImage, { progressTracker: this[$progressTracker] });
this[$cancelEnvironmentUpdate] = () => reject(texturesLoad);
resolve(await texturesLoad);
});
const environment = environmentMap.texture;
if (skybox != null) {
// When using the same environment and skybox, use the environment as
// it gives HDR filtering.
this[$scene].background = skybox.userData.url ===
environment.userData.url ?
environment :
skybox;
}
else {
this[$scene].background = null;
}
this[$applyEnvironmentMap](environmentMap.texture);
this[$scene].dispatchEvent({ type: 'envmap-update' });
}
catch (errorOrPromise) {
if (errorOrPromise instanceof Error) {
this[$applyEnvironmentMap](null);
throw errorOrPromise;
}
}
}
/**
* Sets the Model to use the provided environment map,
* or `null` if the Model should remove its' environment map.
*/
[$applyEnvironmentMap](environmentMap) {
this[$currentEnvironmentMap] = environmentMap;
this[$scene].environment = this[$currentEnvironmentMap];
this.dispatchEvent(new CustomEvent('environment-change'));
this[$needsRender]();
}
}
__decorate$3([
property({ type: String, attribute: 'environment-image' })
], EnvironmentModelViewerElement.prototype, "environmentImage", void 0);
__decorate$3([
property({ type: String, attribute: 'skybox-image' })
], EnvironmentModelViewerElement.prototype, "skyboxImage", void 0);
__decorate$3([
property({ type: Number, attribute: 'shadow-intensity' })
], EnvironmentModelViewerElement.prototype, "shadowIntensity", void 0);
__decorate$3([
property({ type: Number, attribute: 'shadow-softness' })
], EnvironmentModelViewerElement.prototype, "shadowSoftness", void 0);
__decorate$3([
property({
type: Number,
})
], EnvironmentModelViewerElement.prototype, "exposure", void 0);
return EnvironmentModelViewerElement;
};
/* @license
* Copyright 2019 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the 'License');
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an 'AS IS' BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var _a$4, _b$3;
const INITIAL_STATUS_ANNOUNCEMENT = 'This page includes one or more 3D models that are loading';
const FINISHED_LOADING_ANNOUNCEMENT = 'All 3D models in the page have loaded';
const UPDATE_STATUS_DEBOUNCE_MS = 100;
const $modelViewerStatusInstance = Symbol('modelViewerStatusInstance');
const $updateStatus = Symbol('updateStatus');
/**
* The LoadingStatusAnnouncer manages announcements of loading status across
* all <model-viewer> elements in the document at any given time. As new
* <model-viewer> elements are connected to the document, they are registered
* with a LoadingStatusAnnouncer singleton. As they are disconnected, the are
* also unregistered. Announcements are made to indicate the following
* conditions:
*
* 1. There are <model-viewer> elements that have yet to finish loading
* 2. All <model-viewer> elements in the page have finished attempting to load
*/
class LoadingStatusAnnouncer extends EventDispatcher {
constructor() {
super();
/**
* The "status" instance is the <model-viewer> instance currently designated
* to announce the loading status of all <model-viewer> elements in the
* document at any given time. It might change as <model-viewer> elements are
* attached or detached over time.
*/
this[_a$4] = null;
this.registeredInstanceStatuses = new Map();
this.loadingPromises = [];
/**
* This element is a node that floats around the document as the status
* instance changes (see above). It is a singleton that represents the loading
* status for all <model-viewer> elements currently in the page. It has its
* role attribute set to "status", which causes screen readers to announce
* any changes to its text content.
*
* @see https://www.w3.org/TR/wai-aria-1.1/#status
*/
this.statusElement = document.createElement('p');
this.statusUpdateInProgress = false;
this[_b$3] = debounce(() => this.updateStatus(), UPDATE_STATUS_DEBOUNCE_MS);
const { statusElement } = this;
const { style } = statusElement;
statusElement.setAttribute('role', 'status');
statusElement.classList.add('screen-reader-only');
style.top = style.left = '0';
style.pointerEvents = 'none';
}
/**
* Register a <model-viewer> element with the announcer. If it is not yet
* loaded, its loading status will be tracked by the announcer.
*/
registerInstance(modelViewer) {
if (this.registeredInstanceStatuses.has(modelViewer)) {
return;
}
let onUnregistered = () => { };
const loadShouldBeMeasured = modelViewer.loaded === false && !!modelViewer.src;
const loadAttemptCompletes = new Promise((resolve) => {
if (!loadShouldBeMeasured) {
resolve();
return;
}
const resolveHandler = () => {
resolve();
modelViewer.removeEventListener('load', resolveHandler);
modelViewer.removeEventListener('error', resolveHandler);
};
modelViewer.addEventListener('load', resolveHandler);
modelViewer.addEventListener('error', resolveHandler);
onUnregistered = resolveHandler;
});
this.registeredInstanceStatuses.set(modelViewer, { onUnregistered });
this.loadingPromises.push(loadAttemptCompletes);
if (this.modelViewerStatusInstance == null) {
this.modelViewerStatusInstance = modelViewer;
}
}
/**
* Unregister a <model-viewer> element with the announcer. Its loading status
* will no longer be tracked by the announcer.
*/
unregisterInstance(modelViewer) {
if (!this.registeredInstanceStatuses.has(modelViewer)) {
return;
}
const statuses = this.registeredInstanceStatuses;
const instanceStatus = statuses.get(modelViewer);
statuses.delete(modelViewer);
instanceStatus.onUnregistered();
if (this.modelViewerStatusInstance === modelViewer) {
this.modelViewerStatusInstance = statuses.size > 0 ?
getFirstMapKey(statuses) :
null;
}
}
get modelViewerStatusInstance() {
return this[$modelViewerStatusInstance];
}
set modelViewerStatusInstance(value) {
const currentInstance = this[$modelViewerStatusInstance];
if (currentInstance === value) {
return;
}
const { statusElement } = this;
if (value != null && value.shadowRoot != null) {
value.shadowRoot.appendChild(statusElement);
}
else if (statusElement.parentNode != null) {
statusElement.parentNode.removeChild(statusElement);
}
this[$modelViewerStatusInstance] = value;
this[$updateStatus]();
}
async updateStatus() {
if (this.statusUpdateInProgress || this.loadingPromises.length === 0) {
return;
}
this.statusElement.textContent = INITIAL_STATUS_ANNOUNCEMENT;
this.statusUpdateInProgress = true;
this.dispatchEvent({ type: 'initial-status-announced' });
while (this.loadingPromises.length) {
const { loadingPromises } = this;
this.loadingPromises = [];
await Promise.all(loadingPromises);
}
this.statusElement.textContent = FINISHED_LOADING_ANNOUNCEMENT;
this.statusUpdateInProgress = false;
this.dispatchEvent({ type: 'finished-loading-announced' });
}
}
_a$4 = $modelViewerStatusInstance, _b$3 = $updateStatus;
/* @license
* Copyright 2019 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the 'License');
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an 'AS IS' BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var __decorate$2 = (undefined && undefined.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof undefined === "function") r = undefined(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
const PROGRESS_BAR_UPDATE_THRESHOLD = 100;
const PROGRESS_MASK_BASE_OPACITY = 0.2;
const DEFAULT_DRACO_DECODER_LOCATION = 'https://www.gstatic.com/draco/versioned/decoders/1.3.6/';
const DEFAULT_KTX2_TRANSCODER_LOCATION = 'https://www.gstatic.com/basis-universal/versioned/2021-04-15-ba1c3e4/';
const SPACE_KEY = 32;
const ENTER_KEY = 13;
const RevealStrategy = {
AUTO: 'auto',
INTERACTION: 'interaction',
MANUAL: 'manual'
};
const LoadingStrategy = {
AUTO: 'auto',
LAZY: 'lazy',
EAGER: 'eager'
};
const PosterDismissalSource = {
INTERACTION: 'interaction'
};
const loadingStatusAnnouncer = new LoadingStatusAnnouncer();
const $defaultProgressBarElement = Symbol('defaultProgressBarElement');
const $defaultProgressMaskElement = Symbol('defaultProgressMaskElement');
const $posterContainerElement = Symbol('posterContainerElement');
const $defaultPosterElement = Symbol('defaultPosterElement');
const $posterDismissalSource = Symbol('posterDismissalSource');
const $hidePoster = Symbol('hidePoster');
const $modelIsRevealed = Symbol('modelIsRevealed');
const $updateProgressBar = Symbol('updateProgressBar');
const $lastReportedProgress = Symbol('lastReportedProgress');
const $transitioned = Symbol('transitioned');
const $ariaLabelCallToAction = Symbol('ariaLabelCallToAction');
const $onClick = Symbol('onClick');
const $onKeydown = Symbol('onKeydown');
const $onProgress = Symbol('onProgress');
/**
* LoadingMixin implements features related to lazy loading, as well as
* presentation details related to the pre-load / pre-render presentation of a
* <model-viewer>
*
* This mixin implements support for models with DRACO-compressed meshes.
* The DRACO decoder will be loaded on-demand if a glTF that uses the DRACO mesh
* compression extension is encountered.
*
* By default, the DRACO decoder will be loaded from a Google CDN. It is
* possible to customize where the decoder is loaded from by defining a global
* configuration option for `<model-viewer>` like so:
*
* ```html
* <script>
* ModelViewerElement = self.ModelViewerElement || {};
* ModelViewerElement.dracoDecoderLocation =
* 'http://example.com/location/of/draco/decoder/files/';
* </script>
* ```
*
* Note that the above configuration strategy must be performed *before* the
* first `<model-viewer>` element is created in the browser. The configuration
* can be done anywhere, but the easiest way to ensure it is done at the right
* time is to do it in the `<head>` of the HTML document. This is the
* recommended way to set the location because it is most compatible with
* scenarios where the `<model-viewer>` library is lazily loaded.
*
* If you absolutely have to set the DRACO decoder location *after* the first
* `<model-viewer>` element is created, you can do it this way:
*
* ```html
* <script>
* const ModelViewerElement = customElements.get('model-viewer');
* ModelViewerElement.dracoDecoderLocation =
* 'http://example.com/location/of/draco/decoder/files/';
* </script>
* ```
*
* Note that the above configuration approach will not work until *after*
* `<model-viewer>` is defined in the browser. Also note that this configuration
* *must* be set *before* the first DRACO model is fully loaded.
*
* It is recommended that users who intend to take advantage of DRACO mesh
* compression consider whether or not it is acceptable for their use case to
* have code side-loaded from a Google CDN. If it is not acceptable, then the
* location must be customized before loading any DRACO models in order to cause
* the decoder to be loaded from an alternative, acceptable location.
*/
const LoadingMixin = (ModelViewerElement) => {
var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o;
class LoadingModelViewerElement extends ModelViewerElement {
constructor(...args) {
super(...args);
/**
* A URL pointing to the image to use as a poster in scenarios where the
* <model-viewer> is not ready to reveal a rendered model to the viewer.
*/
this.poster = null;
/**
* An enumerable attribute describing under what conditions the
* <model-viewer> should reveal a model to the viewer.
*
* The default value is "auto". The only supported alternative values are
* "interaction" and "manual".
*/
this.reveal = RevealStrategy.AUTO;
/**
* An enumerable attribute describing under what conditions the
* <model-viewer> should preload a model.
*
* The default value is "auto". The only supported alternative values are
* "lazy" and "eager". Auto is equivalent to lazy, which loads the model
* when it is near the viewport for reveal = "auto", and when interacted
* with for reveal = "interaction". Eager loads the model immediately.
*/
this.loading = LoadingStrategy.AUTO;
this[_a] = false;
this[_b] = false;
this[_c] = 0;
this[_d] = null;
// TODO: Add this to the shadow root as part of this mixin's
// implementation:
this[_e] = this.shadowRoot.querySelector('.slot.poster');
this[_f] = this.shadowRoot.querySelector('#default-poster');
this[_g] = this.shadowRoot.querySelector('#default-progress-bar > .bar');
this[_h] = this.shadowRoot.querySelector('#default-progress-bar > .mask');
this[_j] = this[$defaultPosterElement].getAttribute('aria-label');
this[_k] = throttle((progress) => {
const parentNode = this[$defaultProgressBarElement].parentNode;
requestAnimationFrame(() => {
this[$defaultProgressMaskElement].style.opacity =
`${(1.0 - progress) * PROGRESS_MASK_BASE_OPACITY}`;
this[$defaultProgressBarElement].style.transform =
`scaleX(${progress})`;
if (progress === 0) {
// NOTE(cdata): We remove and re-append the progress bar in this
// condition so that the progress bar does not appear to
// transition backwards from the right when we reset to 0 (or
// otherwise <1) progress after having already reached 1 progress
// previously.
parentNode.removeChild(this[$defaultProgressBarElement]);
parentNode.appendChild(this[$defaultProgressBarElement]);
}
// NOTE(cdata): IE11 does not properly respect the second parameter
// of classList.toggle, which this implementation originally used.
// @see https://developer.microsoft.com/en-us/microsoft-edge/platform/issues/11865865/
if (progress === 1.0) {
this[$defaultProgressBarElement].classList.add('hide');
}
else {
this[$defaultProgressBarElement].classList.remove('hide');
}
});
}, PROGRESS_BAR_UPDATE_THRESHOLD);
this[_l] = () => {
if (this.reveal === RevealStrategy.MANUAL) {
return;
}
this.dismissPoster();
};
this[_m] = (event) => {
if (this.reveal === RevealStrategy.MANUAL) {
return;
}
switch (event.keyCode) {
// NOTE(cdata): Links and buttons can typically be activated with
// both spacebar and enter to produce a synthetic click action
case SPACE_KEY:
case ENTER_KEY:
this.dismissPoster();
break;
}
};
this[_o] = (event) => {
const progress = event.detail.totalProgress;
this[$lastReportedProgress] =
Math.max(progress, this[$lastReportedProgress]);
if (progress === 1.0) {
this[$updateProgressBar].flush();
if (this[$sceneIsReady]() &&
(this[$posterDismissalSource] != null ||
this.reveal === RevealStrategy.AUTO)) {
this[$hidePoster]();
}
}
this[$updateProgressBar](progress);
this.dispatchEvent(new CustomEvent('progress', { detail: { totalProgress: progress } }));
};
const ModelViewerElement = self.ModelViewerElement || {};
const dracoDecoderLocation = ModelViewerElement.dracoDecoderLocation ||
DEFAULT_DRACO_DECODER_LOCATION;
CachingGLTFLoader.setDRACODecoderLocation(dracoDecoderLocation);
const ktx2TranscoderLocation = ModelViewerElement.ktx2TranscoderLocation ||
DEFAULT_KTX2_TRANSCODER_LOCATION;
CachingGLTFLoader.setKTX2TranscoderLocation(ktx2TranscoderLocation);
}
static set dracoDecoderLocation(value) {
CachingGLTFLoader.setDRACODecoderLocation(value);
}
static get dracoDecoderLocation() {
return CachingGLTFLoader.getDRACODecoderLocation();
}
static set ktx2TranscoderLocation(value) {
CachingGLTFLoader.setKTX2TranscoderLocation(value);
}
static get ktx2TranscoderLocation() {
return CachingGLTFLoader.getKTX2TranscoderLocation();
}
/**
* If provided, the callback will be passed each resource URL before a
* request is sent. The callback may return the original URL, or a new URL
* to override loading behavior. This behavior can be used to load assets
* from .ZIP files, drag-and-drop APIs, and Data URIs.
*/
static mapURLs(callback) {
Renderer.singleton.loader[$loader].manager.setURLModifier(callback);
}
/**
* Dismisses the poster, causing the model to load and render if
* necessary. This is currently effectively the same as interacting with
* the poster via user input.
*/
dismissPoster() {
if (this[$sceneIsReady]()) {
this[$hidePoster]();
}
else {
this[$posterDismissalSource] = PosterDismissalSource.INTERACTION;
this[$updateSource]();
}
}
/**
* Displays the poster, hiding the 3D model. If this is called after the 3D
* model has been revealed, then it will behave as though
* reveal='interaction', being dismissed either by a user click or a call to
* dismissPoster().
*/
showPoster() {
const posterContainerElement = this[$posterContainerElement];
const defaultPosterElement = this[$defaultPosterElement];
defaultPosterElement.removeAttribute('tabindex');
defaultPosterElement.removeAttribute('aria-hidden');
posterContainerElement.classList.add('show');
const oldVisibility = this.modelIsVisible;
this[$modelIsRevealed] = false;
this[$announceModelVisibility](oldVisibility);
this[$transitioned] = false;
}
/**
* Returns the model's bounding box dimensions in meters, independent of
* turntable rotation.
*/
getDimensions() {
return toVector3D(this[$scene].size);
}
connectedCallback() {
super.connectedCallback();
// Fired when a user first clicks the model element. Used to
// change the visibility of a poster image, or start loading
// a model.
this[$posterContainerElement].addEventListener('click', this[$onClick]);
this[$posterContainerElement].addEventListener('keydown', this[$onKeydown]);
this[$progressTracker].addEventListener('progress', this[$onProgress]);
loadingStatusAnnouncer.registerInstance(this);
}
disconnectedCallback() {
super.disconnectedCallback();
this[$posterContainerElement].removeEventListener('click', this[$onClick]);
this[$posterContainerElement].removeEventListener('keydown', this[$onKeydown]);
this[$progressTracker].removeEventListener('progress', this[$onProgress]);
loadingStatusAnnouncer.unregisterInstance(this);
}
async updated(changedProperties) {
super.updated(changedProperties);
if (changedProperties.has('poster') && this.poster != null) {
this[$defaultPosterElement].style.backgroundImage =
`url(${this.poster})`;
}
if (changedProperties.has('alt')) {
this[$defaultPosterElement].setAttribute('aria-label', `${this[$ariaLabel]}. ${this[$ariaLabelCallToAction]}`);
}
if (changedProperties.has('reveal') || changedProperties.has('loading')) {
this[$updateSource]();
}
}
[(_a = $modelIsRevealed, _b = $transitioned, _c = $lastReportedProgress, _d = $posterDismissalSource, _e = $posterContainerElement, _f = $defaultPosterElement, _g = $defaultProgressBarElement, _h = $defaultProgressMaskElement, _j = $ariaLabelCallToAction, _k = $updateProgressBar, _l = $onClick, _m = $onKeydown, _o = $onProgress, $shouldAttemptPreload)]() {
return !!this.src &&
(this[$posterDismissalSource] != null ||
this.loading === LoadingStrategy.EAGER ||
(this.reveal === RevealStrategy.AUTO && this[$isElementInViewport]));
}
[$sceneIsReady]() {
const { src } = this;
return !!src && super[$sceneIsReady]() &&
this[$lastReportedProgress] === 1.0;
}
[$hidePoster]() {
this[$posterDismissalSource] = null;
const posterContainerElement = this[$posterContainerElement];
const defaultPosterElement = this[$defaultPosterElement];
if (posterContainerElement.classList.contains('show')) {
posterContainerElement.classList.remove('show');
const oldVisibility = this.modelIsVisible;
this[$modelIsRevealed] = true;
this[$announceModelVisibility](oldVisibility);
// We might need to forward focus to our internal canvas, but that
// cannot happen until the poster has completely transitioned away
posterContainerElement.addEventListener('transitionend', () => {
requestAnimationFrame(() => {
this[$transitioned] = true;
const root = this.getRootNode();
// If the <model-viewer> is still focused, forward the focus to
// the canvas that has just been revealed
if (root &&
root.activeElement === this) {
this[$userInputElement].focus();
}
// Ensure that the poster is no longer focusable or visible to
// screen readers
defaultPosterElement.setAttribute('aria-hidden', 'true');
defaultPosterElement.tabIndex = -1;
this.dispatchEvent(new CustomEvent('poster-dismissed'));
});
}, { once: true });
}
}
[$getModelIsVisible]() {
return super[$getModelIsVisible]() && this[$modelIsRevealed];
}
[$hasTransitioned]() {
return super[$hasTransitioned]() && this[$transitioned];
}
async [$updateSource]() {
this[$lastReportedProgress] = 0;
if (this[$scene].currentGLTF == null || this.src == null ||
!this[$shouldAttemptPreload]()) {
// Don't show the poster when switching models.
this.showPoster();
}
await super[$updateSource]();
}
}
__decorate$2([
property({ type: String })
], LoadingModelViewerElement.prototype, "poster", void 0);
__decorate$2([
property({ type: String })
], LoadingModelViewerElement.prototype, "reveal", void 0);
__decorate$2([
property({ type: String })
], LoadingModelViewerElement.prototype, "loading", void 0);
return LoadingModelViewerElement;
};
class GLTFExporter {
constructor() {
this.pluginCallbacks = [];
this.register( function ( writer ) {
return new GLTFLightExtension( writer );
} );
this.register( function ( writer ) {
return new GLTFMaterialsUnlitExtension( writer );
} );
this.register( function ( writer ) {
return new GLTFMaterialsPBRSpecularGlossiness( writer );
} );
}
register( callback ) {
if ( this.pluginCallbacks.indexOf( callback ) === - 1 ) {
this.pluginCallbacks.push( callback );
}
return this;
}
unregister( callback ) {
if ( this.pluginCallbacks.indexOf( callback ) !== - 1 ) {
this.pluginCallbacks.splice( this.pluginCallbacks.indexOf( callback ), 1 );
}
return this;
}
/**
* Parse scenes and generate GLTF output
* @param {Scene or [THREE.Scenes]} input Scene or Array of THREE.Scenes
* @param {Function} onDone Callback on completed
* @param {Object} options options
*/
parse( input, onDone, options ) {
const writer = new GLTFWriter();
const plugins = [];
for ( let i = 0, il = this.pluginCallbacks.length; i < il; i ++ ) {
plugins.push( this.pluginCallbacks[ i ]( writer ) );
}
writer.setPlugins( plugins );
writer.write( input, onDone, options );
}
}
//------------------------------------------------------------------------------
// Constants
//------------------------------------------------------------------------------
const WEBGL_CONSTANTS = {
POINTS: 0x0000,
LINES: 0x0001,
LINE_LOOP: 0x0002,
LINE_STRIP: 0x0003,
TRIANGLES: 0x0004,
TRIANGLE_STRIP: 0x0005,
TRIANGLE_FAN: 0x0006,
UNSIGNED_BYTE: 0x1401,
UNSIGNED_SHORT: 0x1403,
FLOAT: 0x1406,
UNSIGNED_INT: 0x1405,
ARRAY_BUFFER: 0x8892,
ELEMENT_ARRAY_BUFFER: 0x8893,
NEAREST: 0x2600,
LINEAR: 0x2601,
NEAREST_MIPMAP_NEAREST: 0x2700,
LINEAR_MIPMAP_NEAREST: 0x2701,
NEAREST_MIPMAP_LINEAR: 0x2702,
LINEAR_MIPMAP_LINEAR: 0x2703,
CLAMP_TO_EDGE: 33071,
MIRRORED_REPEAT: 33648,
REPEAT: 10497
};
const THREE_TO_WEBGL = {};
THREE_TO_WEBGL[ NearestFilter ] = WEBGL_CONSTANTS.NEAREST;
THREE_TO_WEBGL[ NearestMipmapNearestFilter ] = WEBGL_CONSTANTS.NEAREST_MIPMAP_NEAREST;
THREE_TO_WEBGL[ NearestMipmapLinearFilter ] = WEBGL_CONSTANTS.NEAREST_MIPMAP_LINEAR;
THREE_TO_WEBGL[ LinearFilter ] = WEBGL_CONSTANTS.LINEAR;
THREE_TO_WEBGL[ LinearMipmapNearestFilter ] = WEBGL_CONSTANTS.LINEAR_MIPMAP_NEAREST;
THREE_TO_WEBGL[ LinearMipmapLinearFilter ] = WEBGL_CONSTANTS.LINEAR_MIPMAP_LINEAR;
THREE_TO_WEBGL[ ClampToEdgeWrapping ] = WEBGL_CONSTANTS.CLAMP_TO_EDGE;
THREE_TO_WEBGL[ RepeatWrapping ] = WEBGL_CONSTANTS.REPEAT;
THREE_TO_WEBGL[ MirroredRepeatWrapping ] = WEBGL_CONSTANTS.MIRRORED_REPEAT;
const PATH_PROPERTIES = {
scale: 'scale',
position: 'translation',
quaternion: 'rotation',
morphTargetInfluences: 'weights'
};
// GLB constants
// https://github.com/KhronosGroup/glTF/blob/master/specification/2.0/README.md#glb-file-format-specification
const GLB_HEADER_BYTES = 12;
const GLB_HEADER_MAGIC = 0x46546C67;
const GLB_VERSION = 2;
const GLB_CHUNK_PREFIX_BYTES = 8;
const GLB_CHUNK_TYPE_JSON = 0x4E4F534A;
const GLB_CHUNK_TYPE_BIN = 0x004E4942;
//------------------------------------------------------------------------------
// Utility functions
//------------------------------------------------------------------------------
/**
* Compare two arrays
* @param {Array} array1 Array 1 to compare
* @param {Array} array2 Array 2 to compare
* @return {Boolean} Returns true if both arrays are equal
*/
function equalArray( array1, array2 ) {
return ( array1.length === array2.length ) && array1.every( function ( element, index ) {
return element === array2[ index ];
} );
}
/**
* Converts a string to an ArrayBuffer.
* @param {string} text
* @return {ArrayBuffer}
*/
function stringToArrayBuffer( text ) {
if ( window.TextEncoder !== undefined ) {
return new TextEncoder().encode( text ).buffer;
}
const array = new Uint8Array( new ArrayBuffer( text.length ) );
for ( let i = 0, il = text.length; i < il; i ++ ) {
const value = text.charCodeAt( i );
// Replacing multi-byte character with space(0x20).
array[ i ] = value > 0xFF ? 0x20 : value;
}
return array.buffer;
}
/**
* Is identity matrix
*
* @param {Matrix4} matrix
* @returns {Boolean} Returns true, if parameter is identity matrix
*/
function isIdentityMatrix( matrix ) {
return equalArray( matrix.elements, [ 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1 ] );
}
/**
* Get the min and max vectors from the given attribute
* @param {BufferAttribute} attribute Attribute to find the min/max in range from start to start + count
* @param {Integer} start
* @param {Integer} count
* @return {Object} Object containing the `min` and `max` values (As an array of attribute.itemSize components)
*/
function getMinMax( attribute, start, count ) {
const output = {
min: new Array( attribute.itemSize ).fill( Number.POSITIVE_INFINITY ),
max: new Array( attribute.itemSize ).fill( Number.NEGATIVE_INFINITY )
};
for ( let i = start; i < start + count; i ++ ) {
for ( let a = 0; a < attribute.itemSize; a ++ ) {
let value;
if ( attribute.itemSize > 4 ) {
// no support for interleaved data for itemSize > 4
value = attribute.array[ i * attribute.itemSize + a ];
} else {
if ( a === 0 ) value = attribute.getX( i );
else if ( a === 1 ) value = attribute.getY( i );
else if ( a === 2 ) value = attribute.getZ( i );
else if ( a === 3 ) value = attribute.getW( i );
}
output.min[ a ] = Math.min( output.min[ a ], value );
output.max[ a ] = Math.max( output.max[ a ], value );
}
}
return output;
}
/**
* Get the required size + padding for a buffer, rounded to the next 4-byte boundary.
* https://github.com/KhronosGroup/glTF/tree/master/specification/2.0#data-alignment
*
* @param {Integer} bufferSize The size the original buffer.
* @returns {Integer} new buffer size with required padding.
*
*/
function getPaddedBufferSize( bufferSize ) {
return Math.ceil( bufferSize / 4 ) * 4;
}
/**
* Returns a buffer aligned to 4-byte boundary.
*
* @param {ArrayBuffer} arrayBuffer Buffer to pad
* @param {Integer} paddingByte (Optional)
* @returns {ArrayBuffer} The same buffer if it's already aligned to 4-byte boundary or a new buffer
*/
function getPaddedArrayBuffer( arrayBuffer, paddingByte = 0 ) {
const paddedLength = getPaddedBufferSize( arrayBuffer.byteLength );
if ( paddedLength !== arrayBuffer.byteLength ) {
const array = new Uint8Array( paddedLength );
array.set( new Uint8Array( arrayBuffer ) );
if ( paddingByte !== 0 ) {
for ( let i = arrayBuffer.byteLength; i < paddedLength; i ++ ) {
array[ i ] = paddingByte;
}
}
return array.buffer;
}
return arrayBuffer;
}
let cachedCanvas = null;
/**
* Writer
*/
class GLTFWriter {
constructor() {
this.plugins = [];
this.options = {};
this.pending = [];
this.buffers = [];
this.byteOffset = 0;
this.buffers = [];
this.nodeMap = new Map();
this.skins = [];
this.extensionsUsed = {};
this.uids = new Map();
this.uid = 0;
this.json = {
asset: {
version: '2.0',
generator: 'THREE.GLTFExporter'
}
};
this.cache = {
meshes: new Map(),
attributes: new Map(),
attributesNormalized: new Map(),
materials: new Map(),
textures: new Map(),
images: new Map()
};
}
setPlugins( plugins ) {
this.plugins = plugins;
}
/**
* Parse scenes and generate GLTF output
* @param {Scene or [THREE.Scenes]} input Scene or Array of THREE.Scenes
* @param {Function} onDone Callback on completed
* @param {Object} options options
*/
write( input, onDone, options ) {
this.options = Object.assign( {}, {
// default options
binary: false,
trs: false,
onlyVisible: true,
truncateDrawRange: true,
embedImages: true,
maxTextureSize: Infinity,
animations: [],
includeCustomExtensions: false
}, options );
if ( this.options.animations.length > 0 ) {
// Only TRS properties, and not matrices, may be targeted by animation.
this.options.trs = true;
}
this.processInput( input );
const writer = this;
Promise.all( this.pending ).then( function () {
const buffers = writer.buffers;
const json = writer.json;
const options = writer.options;
const extensionsUsed = writer.extensionsUsed;
// Merge buffers.
const blob = new Blob( buffers, { type: 'application/octet-stream' } );
// Declare extensions.
const extensionsUsedList = Object.keys( extensionsUsed );
if ( extensionsUsedList.length > 0 ) json.extensionsUsed = extensionsUsedList;
// Update bytelength of the single buffer.
if ( json.buffers && json.buffers.length > 0 ) json.buffers[ 0 ].byteLength = blob.size;
if ( options.binary === true ) {
// https://github.com/KhronosGroup/glTF/blob/master/specification/2.0/README.md#glb-file-format-specification
const reader = new window.FileReader();
reader.readAsArrayBuffer( blob );
reader.onloadend = function () {
// Binary chunk.
const binaryChunk = getPaddedArrayBuffer( reader.result );
const binaryChunkPrefix = new DataView( new ArrayBuffer( GLB_CHUNK_PREFIX_BYTES ) );
binaryChunkPrefix.setUint32( 0, binaryChunk.byteLength, true );
binaryChunkPrefix.setUint32( 4, GLB_CHUNK_TYPE_BIN, true );
// JSON chunk.
const jsonChunk = getPaddedArrayBuffer( stringToArrayBuffer( JSON.stringify( json ) ), 0x20 );
const jsonChunkPrefix = new DataView( new ArrayBuffer( GLB_CHUNK_PREFIX_BYTES ) );
jsonChunkPrefix.setUint32( 0, jsonChunk.byteLength, true );
jsonChunkPrefix.setUint32( 4, GLB_CHUNK_TYPE_JSON, true );
// GLB header.
const header = new ArrayBuffer( GLB_HEADER_BYTES );
const headerView = new DataView( header );
headerView.setUint32( 0, GLB_HEADER_MAGIC, true );
headerView.setUint32( 4, GLB_VERSION, true );
const totalByteLength = GLB_HEADER_BYTES
+ jsonChunkPrefix.byteLength + jsonChunk.byteLength
+ binaryChunkPrefix.byteLength + binaryChunk.byteLength;
headerView.setUint32( 8, totalByteLength, true );
const glbBlob = new Blob( [
header,
jsonChunkPrefix,
jsonChunk,
binaryChunkPrefix,
binaryChunk
], { type: 'application/octet-stream' } );
const glbReader = new window.FileReader();
glbReader.readAsArrayBuffer( glbBlob );
glbReader.onloadend = function () {
onDone( glbReader.result );
};
};
} else {
if ( json.buffers && json.buffers.length > 0 ) {
const reader = new window.FileReader();
reader.readAsDataURL( blob );
reader.onloadend = function () {
const base64data = reader.result;
json.buffers[ 0 ].uri = base64data;
onDone( json );
};
} else {
onDone( json );
}
}
} );
}
/**
* Serializes a userData.
*
* @param {THREE.Object3D|THREE.Material} object
* @param {Object} objectDef
*/
serializeUserData( object, objectDef ) {
if ( Object.keys( object.userData ).length === 0 ) return;
const options = this.options;
const extensionsUsed = this.extensionsUsed;
try {
const json = JSON.parse( JSON.stringify( object.userData ) );
if ( options.includeCustomExtensions && json.gltfExtensions ) {
if ( objectDef.extensions === undefined ) objectDef.extensions = {};
for ( const extensionName in json.gltfExtensions ) {
objectDef.extensions[ extensionName ] = json.gltfExtensions[ extensionName ];
extensionsUsed[ extensionName ] = true;
}
delete json.gltfExtensions;
}
if ( Object.keys( json ).length > 0 ) objectDef.extras = json;
} catch ( error ) {
console.warn( 'THREE.GLTFExporter: userData of \'' + object.name + '\' ' +
'won\'t be serialized because of JSON.stringify error - ' + error.message );
}
}
/**
* Assign and return a temporal unique id for an object
* especially which doesn't have .uuid
* @param {Object} object
* @return {Integer}
*/
getUID( object ) {
if ( ! this.uids.has( object ) ) this.uids.set( object, this.uid ++ );
return this.uids.get( object );
}
/**
* Checks if normal attribute values are normalized.
*
* @param {BufferAttribute} normal
* @returns {Boolean}
*/
isNormalizedNormalAttribute( normal ) {
const cache = this.cache;
if ( cache.attributesNormalized.has( normal ) ) return false;
const v = new Vector3();
for ( let i = 0, il = normal.count; i < il; i ++ ) {
// 0.0005 is from glTF-validator
if ( Math.abs( v.fromBufferAttribute( normal, i ).length() - 1.0 ) > 0.0005 ) return false;
}
return true;
}
/**
* Creates normalized normal buffer attribute.
*
* @param {BufferAttribute} normal
* @returns {BufferAttribute}
*
*/
createNormalizedNormalAttribute( normal ) {
const cache = this.cache;
if ( cache.attributesNormalized.has( normal ) ) return cache.attributesNormalized.get( normal );
const attribute = normal.clone();
const v = new Vector3();
for ( let i = 0, il = attribute.count; i < il; i ++ ) {
v.fromBufferAttribute( attribute, i );
if ( v.x === 0 && v.y === 0 && v.z === 0 ) {
// if values can't be normalized set (1, 0, 0)
v.setX( 1.0 );
} else {
v.normalize();
}
attribute.setXYZ( i, v.x, v.y, v.z );
}
cache.attributesNormalized.set( normal, attribute );
return attribute;
}
/**
* Applies a texture transform, if present, to the map definition. Requires
* the KHR_texture_transform extension.
*
* @param {Object} mapDef
* @param {THREE.Texture} texture
*/
applyTextureTransform( mapDef, texture ) {
let didTransform = false;
const transformDef = {};
if ( texture.offset.x !== 0 || texture.offset.y !== 0 ) {
transformDef.offset = texture.offset.toArray();
didTransform = true;
}
if ( texture.rotation !== 0 ) {
transformDef.rotation = texture.rotation;
didTransform = true;
}
if ( texture.repeat.x !== 1 || texture.repeat.y !== 1 ) {
transformDef.scale = texture.repeat.toArray();
didTransform = true;
}
if ( didTransform ) {
mapDef.extensions = mapDef.extensions || {};
mapDef.extensions[ 'KHR_texture_transform' ] = transformDef;
this.extensionsUsed[ 'KHR_texture_transform' ] = true;
}
}
/**
* Process a buffer to append to the default one.
* @param {ArrayBuffer} buffer
* @return {Integer}
*/
processBuffer( buffer ) {
const json = this.json;
const buffers = this.buffers;
if ( ! json.buffers ) json.buffers = [ { byteLength: 0 } ];
// All buffers are merged before export.
buffers.push( buffer );
return 0;
}
/**
* Process and generate a BufferView
* @param {BufferAttribute} attribute
* @param {number} componentType
* @param {number} start
* @param {number} count
* @param {number} target (Optional) Target usage of the BufferView
* @return {Object}
*/
processBufferView( attribute, componentType, start, count, target ) {
const json = this.json;
if ( ! json.bufferViews ) json.bufferViews = [];
// Create a new dataview and dump the attribute's array into it
let componentSize;
if ( componentType === WEBGL_CONSTANTS.UNSIGNED_BYTE ) {
componentSize = 1;
} else if ( componentType === WEBGL_CONSTANTS.UNSIGNED_SHORT ) {
componentSize = 2;
} else {
componentSize = 4;
}
const byteLength = getPaddedBufferSize( count * attribute.itemSize * componentSize );
const dataView = new DataView( new ArrayBuffer( byteLength ) );
let offset = 0;
for ( let i = start; i < start + count; i ++ ) {
for ( let a = 0; a < attribute.itemSize; a ++ ) {
let value;
if ( attribute.itemSize > 4 ) {
// no support for interleaved data for itemSize > 4
value = attribute.array[ i * attribute.itemSize + a ];
} else {
if ( a === 0 ) value = attribute.getX( i );
else if ( a === 1 ) value = attribute.getY( i );
else if ( a === 2 ) value = attribute.getZ( i );
else if ( a === 3 ) value = attribute.getW( i );
}
if ( componentType === WEBGL_CONSTANTS.FLOAT ) {
dataView.setFloat32( offset, value, true );
} else if ( componentType === WEBGL_CONSTANTS.UNSIGNED_INT ) {
dataView.setUint32( offset, value, true );
} else if ( componentType === WEBGL_CONSTANTS.UNSIGNED_SHORT ) {
dataView.setUint16( offset, value, true );
} else if ( componentType === WEBGL_CONSTANTS.UNSIGNED_BYTE ) {
dataView.setUint8( offset, value );
}
offset += componentSize;
}
}
const bufferViewDef = {
buffer: this.processBuffer( dataView.buffer ),
byteOffset: this.byteOffset,
byteLength: byteLength
};
if ( target !== undefined ) bufferViewDef.target = target;
if ( target === WEBGL_CONSTANTS.ARRAY_BUFFER ) {
// Only define byteStride for vertex attributes.
bufferViewDef.byteStride = attribute.itemSize * componentSize;
}
this.byteOffset += byteLength;
json.bufferViews.push( bufferViewDef );
// @TODO Merge bufferViews where possible.
const output = {
id: json.bufferViews.length - 1,
byteLength: 0
};
return output;
}
/**
* Process and generate a BufferView from an image Blob.
* @param {Blob} blob
* @return {Promise<Integer>}
*/
processBufferViewImage( blob ) {
const writer = this;
const json = writer.json;
if ( ! json.bufferViews ) json.bufferViews = [];
return new Promise( function ( resolve ) {
const reader = new window.FileReader();
reader.readAsArrayBuffer( blob );
reader.onloadend = function () {
const buffer = getPaddedArrayBuffer( reader.result );
const bufferViewDef = {
buffer: writer.processBuffer( buffer ),
byteOffset: writer.byteOffset,
byteLength: buffer.byteLength
};
writer.byteOffset += buffer.byteLength;
resolve( json.bufferViews.push( bufferViewDef ) - 1 );
};
} );
}
/**
* Process attribute to generate an accessor
* @param {BufferAttribute} attribute Attribute to process
* @param {THREE.BufferGeometry} geometry (Optional) Geometry used for truncated draw range
* @param {Integer} start (Optional)
* @param {Integer} count (Optional)
* @return {Integer|null} Index of the processed accessor on the "accessors" array
*/
processAccessor( attribute, geometry, start, count ) {
const options = this.options;
const json = this.json;
const types = {
1: 'SCALAR',
2: 'VEC2',
3: 'VEC3',
4: 'VEC4',
16: 'MAT4'
};
let componentType;
// Detect the component type of the attribute array (float, uint or ushort)
if ( attribute.array.constructor === Float32Array ) {
componentType = WEBGL_CONSTANTS.FLOAT;
} else if ( attribute.array.constructor === Uint32Array ) {
componentType = WEBGL_CONSTANTS.UNSIGNED_INT;
} else if ( attribute.array.constructor === Uint16Array ) {
componentType = WEBGL_CONSTANTS.UNSIGNED_SHORT;
} else if ( attribute.array.constructor === Uint8Array ) {
componentType = WEBGL_CONSTANTS.UNSIGNED_BYTE;
} else {
throw new Error( 'THREE.GLTFExporter: Unsupported bufferAttribute component type.' );
}
if ( start === undefined ) start = 0;
if ( count === undefined ) count = attribute.count;
// @TODO Indexed buffer geometry with drawRange not supported yet
if ( options.truncateDrawRange && geometry !== undefined && geometry.index === null ) {
const end = start + count;
const end2 = geometry.drawRange.count === Infinity
? attribute.count
: geometry.drawRange.start + geometry.drawRange.count;
start = Math.max( start, geometry.drawRange.start );
count = Math.min( end, end2 ) - start;
if ( count < 0 ) count = 0;
}
// Skip creating an accessor if the attribute doesn't have data to export
if ( count === 0 ) return null;
const minMax = getMinMax( attribute, start, count );
let bufferViewTarget;
// If geometry isn't provided, don't infer the target usage of the bufferView. For
// animation samplers, target must not be set.
if ( geometry !== undefined ) {
bufferViewTarget = attribute === geometry.index ? WEBGL_CONSTANTS.ELEMENT_ARRAY_BUFFER : WEBGL_CONSTANTS.ARRAY_BUFFER;
}
const bufferView = this.processBufferView( attribute, componentType, start, count, bufferViewTarget );
const accessorDef = {
bufferView: bufferView.id,
byteOffset: bufferView.byteOffset,
componentType: componentType,
count: count,
max: minMax.max,
min: minMax.min,
type: types[ attribute.itemSize ]
};
if ( attribute.normalized === true ) accessorDef.normalized = true;
if ( ! json.accessors ) json.accessors = [];
return json.accessors.push( accessorDef ) - 1;
}
/**
* Process image
* @param {Image} image to process
* @param {Integer} format of the image (e.g. RGBFormat, RGBAFormat etc)
* @param {Boolean} flipY before writing out the image
* @return {Integer} Index of the processed texture in the "images" array
*/
processImage( image, format, flipY ) {
const writer = this;
const cache = writer.cache;
const json = writer.json;
const options = writer.options;
const pending = writer.pending;
if ( ! cache.images.has( image ) ) cache.images.set( image, {} );
const cachedImages = cache.images.get( image );
const mimeType = format === RGBAFormat ? 'image/png' : 'image/jpeg';
const key = mimeType + ':flipY/' + flipY.toString();
if ( cachedImages[ key ] !== undefined ) return cachedImages[ key ];
if ( ! json.images ) json.images = [];
const imageDef = { mimeType: mimeType };
if ( options.embedImages ) {
const canvas = cachedCanvas = cachedCanvas || document.createElement( 'canvas' );
canvas.width = Math.min( image.width, options.maxTextureSize );
canvas.height = Math.min( image.height, options.maxTextureSize );
const ctx = canvas.getContext( '2d' );
if ( flipY === true ) {
ctx.translate( 0, canvas.height );
ctx.scale( 1, - 1 );
}
if ( ( typeof HTMLImageElement !== 'undefined' && image instanceof HTMLImageElement ) ||
( typeof HTMLCanvasElement !== 'undefined' && image instanceof HTMLCanvasElement ) ||
( typeof OffscreenCanvas !== 'undefined' && image instanceof OffscreenCanvas ) ||
( typeof ImageBitmap !== 'undefined' && image instanceof ImageBitmap ) ) {
ctx.drawImage( image, 0, 0, canvas.width, canvas.height );
} else {
if ( format !== RGBAFormat && format !== RGBFormat ) {
console.error( 'GLTFExporter: Only RGB and RGBA formats are supported.' );
}
if ( image.width > options.maxTextureSize || image.height > options.maxTextureSize ) {
console.warn( 'GLTFExporter: Image size is bigger than maxTextureSize', image );
}
let data = image.data;
if ( format === RGBFormat ) {
data = new Uint8ClampedArray( image.height * image.width * 4 );
for ( let i = 0, j = 0; i < data.length; i += 4, j += 3 ) {
data[ i + 0 ] = image.data[ j + 0 ];
data[ i + 1 ] = image.data[ j + 1 ];
data[ i + 2 ] = image.data[ j + 2 ];
data[ i + 3 ] = 255;
}
}
ctx.putImageData( new ImageData( data, image.width, image.height ), 0, 0 );
}
if ( options.binary === true ) {
pending.push( new Promise( function ( resolve ) {
canvas.toBlob( function ( blob ) {
writer.processBufferViewImage( blob ).then( function ( bufferViewIndex ) {
imageDef.bufferView = bufferViewIndex;
resolve();
} );
}, mimeType );
} ) );
} else {
imageDef.uri = canvas.toDataURL( mimeType );
}
} else {
imageDef.uri = image.src;
}
const index = json.images.push( imageDef ) - 1;
cachedImages[ key ] = index;
return index;
}
/**
* Process sampler
* @param {Texture} map Texture to process
* @return {Integer} Index of the processed texture in the "samplers" array
*/
processSampler( map ) {
const json = this.json;
if ( ! json.samplers ) json.samplers = [];
const samplerDef = {
magFilter: THREE_TO_WEBGL[ map.magFilter ],
minFilter: THREE_TO_WEBGL[ map.minFilter ],
wrapS: THREE_TO_WEBGL[ map.wrapS ],
wrapT: THREE_TO_WEBGL[ map.wrapT ]
};
return json.samplers.push( samplerDef ) - 1;
}
/**
* Process texture
* @param {Texture} map Map to process
* @return {Integer} Index of the processed texture in the "textures" array
*/
processTexture( map ) {
const cache = this.cache;
const json = this.json;
if ( cache.textures.has( map ) ) return cache.textures.get( map );
if ( ! json.textures ) json.textures = [];
const textureDef = {
sampler: this.processSampler( map ),
source: this.processImage( map.image, map.format, map.flipY )
};
if ( map.name ) textureDef.name = map.name;
this._invokeAll( function ( ext ) {
ext.writeTexture && ext.writeTexture( map, textureDef );
} );
const index = json.textures.push( textureDef ) - 1;
cache.textures.set( map, index );
return index;
}
/**
* Process material
* @param {THREE.Material} material Material to process
* @return {Integer|null} Index of the processed material in the "materials" array
*/
processMaterial( material ) {
const cache = this.cache;
const json = this.json;
if ( cache.materials.has( material ) ) return cache.materials.get( material );
if ( material.isShaderMaterial ) {
console.warn( 'GLTFExporter: THREE.ShaderMaterial not supported.' );
return null;
}
if ( ! json.materials ) json.materials = [];
// @QUESTION Should we avoid including any attribute that has the default value?
const materialDef = { pbrMetallicRoughness: {} };
if ( material.isMeshStandardMaterial !== true && material.isMeshBasicMaterial !== true ) {
console.warn( 'GLTFExporter: Use MeshStandardMaterial or MeshBasicMaterial for best results.' );
}
// pbrMetallicRoughness.baseColorFactor
const color = material.color.toArray().concat( [ material.opacity ] );
if ( ! equalArray( color, [ 1, 1, 1, 1 ] ) ) {
materialDef.pbrMetallicRoughness.baseColorFactor = color;
}
if ( material.isMeshStandardMaterial ) {
materialDef.pbrMetallicRoughness.metallicFactor = material.metalness;
materialDef.pbrMetallicRoughness.roughnessFactor = material.roughness;
} else {
materialDef.pbrMetallicRoughness.metallicFactor = 0.5;
materialDef.pbrMetallicRoughness.roughnessFactor = 0.5;
}
// pbrMetallicRoughness.metallicRoughnessTexture
if ( material.metalnessMap || material.roughnessMap ) {
if ( material.metalnessMap === material.roughnessMap ) {
const metalRoughMapDef = { index: this.processTexture( material.metalnessMap ) };
this.applyTextureTransform( metalRoughMapDef, material.metalnessMap );
materialDef.pbrMetallicRoughness.metallicRoughnessTexture = metalRoughMapDef;
} else {
console.warn( 'THREE.GLTFExporter: Ignoring metalnessMap and roughnessMap because they are not the same Texture.' );
}
}
// pbrMetallicRoughness.baseColorTexture or pbrSpecularGlossiness diffuseTexture
if ( material.map ) {
const baseColorMapDef = { index: this.processTexture( material.map ) };
this.applyTextureTransform( baseColorMapDef, material.map );
materialDef.pbrMetallicRoughness.baseColorTexture = baseColorMapDef;
}
if ( material.emissive ) {
// emissiveFactor
const emissive = material.emissive.clone().multiplyScalar( material.emissiveIntensity ).toArray();
if ( ! equalArray( emissive, [ 0, 0, 0 ] ) ) {
materialDef.emissiveFactor = emissive;
}
// emissiveTexture
if ( material.emissiveMap ) {
const emissiveMapDef = { index: this.processTexture( material.emissiveMap ) };
this.applyTextureTransform( emissiveMapDef, material.emissiveMap );
materialDef.emissiveTexture = emissiveMapDef;
}
}
// normalTexture
if ( material.normalMap ) {
const normalMapDef = { index: this.processTexture( material.normalMap ) };
if ( material.normalScale && material.normalScale.x !== - 1 ) {
if ( material.normalScale.x !== material.normalScale.y ) {
console.warn( 'THREE.GLTFExporter: Normal scale components are different, ignoring Y and exporting X.' );
}
normalMapDef.scale = material.normalScale.x;
}
this.applyTextureTransform( normalMapDef, material.normalMap );
materialDef.normalTexture = normalMapDef;
}
// occlusionTexture
if ( material.aoMap ) {
const occlusionMapDef = {
index: this.processTexture( material.aoMap ),
texCoord: 1
};
if ( material.aoMapIntensity !== 1.0 ) {
occlusionMapDef.strength = material.aoMapIntensity;
}
this.applyTextureTransform( occlusionMapDef, material.aoMap );
materialDef.occlusionTexture = occlusionMapDef;
}
// alphaMode
if ( material.transparent ) {
materialDef.alphaMode = 'BLEND';
} else {
if ( material.alphaTest > 0.0 ) {
materialDef.alphaMode = 'MASK';
materialDef.alphaCutoff = material.alphaTest;
}
}
// doubleSided
if ( material.side === DoubleSide ) materialDef.doubleSided = true;
if ( material.name !== '' ) materialDef.name = material.name;
this.serializeUserData( material, materialDef );
this._invokeAll( function ( ext ) {
ext.writeMaterial && ext.writeMaterial( material, materialDef );
} );
const index = json.materials.push( materialDef ) - 1;
cache.materials.set( material, index );
return index;
}
/**
* Process mesh
* @param {THREE.Mesh} mesh Mesh to process
* @return {Integer|null} Index of the processed mesh in the "meshes" array
*/
processMesh( mesh ) {
const cache = this.cache;
const json = this.json;
const meshCacheKeyParts = [ mesh.geometry.uuid ];
if ( Array.isArray( mesh.material ) ) {
for ( let i = 0, l = mesh.material.length; i < l; i ++ ) {
meshCacheKeyParts.push( mesh.material[ i ].uuid );
}
} else {
meshCacheKeyParts.push( mesh.material.uuid );
}
const meshCacheKey = meshCacheKeyParts.join( ':' );
if ( cache.meshes.has( meshCacheKey ) ) return cache.meshes.get( meshCacheKey );
const geometry = mesh.geometry;
let mode;
// Use the correct mode
if ( mesh.isLineSegments ) {
mode = WEBGL_CONSTANTS.LINES;
} else if ( mesh.isLineLoop ) {
mode = WEBGL_CONSTANTS.LINE_LOOP;
} else if ( mesh.isLine ) {
mode = WEBGL_CONSTANTS.LINE_STRIP;
} else if ( mesh.isPoints ) {
mode = WEBGL_CONSTANTS.POINTS;
} else {
mode = mesh.material.wireframe ? WEBGL_CONSTANTS.LINES : WEBGL_CONSTANTS.TRIANGLES;
}
if ( geometry.isBufferGeometry !== true ) {
throw new Error( 'THREE.GLTFExporter: Geometry is not of type THREE.BufferGeometry.' );
}
const meshDef = {};
const attributes = {};
const primitives = [];
const targets = [];
// Conversion between attributes names in threejs and gltf spec
const nameConversion = {
uv: 'TEXCOORD_0',
uv2: 'TEXCOORD_1',
color: 'COLOR_0',
skinWeight: 'WEIGHTS_0',
skinIndex: 'JOINTS_0'
};
const originalNormal = geometry.getAttribute( 'normal' );
if ( originalNormal !== undefined && ! this.isNormalizedNormalAttribute( originalNormal ) ) {
console.warn( 'THREE.GLTFExporter: Creating normalized normal attribute from the non-normalized one.' );
geometry.setAttribute( 'normal', this.createNormalizedNormalAttribute( originalNormal ) );
}
// @QUESTION Detect if .vertexColors = true?
// For every attribute create an accessor
let modifiedAttribute = null;
for ( let attributeName in geometry.attributes ) {
// Ignore morph target attributes, which are exported later.
if ( attributeName.substr( 0, 5 ) === 'morph' ) continue;
const attribute = geometry.attributes[ attributeName ];
attributeName = nameConversion[ attributeName ] || attributeName.toUpperCase();
// Prefix all geometry attributes except the ones specifically
// listed in the spec; non-spec attributes are considered custom.
const validVertexAttributes =
/^(POSITION|NORMAL|TANGENT|TEXCOORD_\d+|COLOR_\d+|JOINTS_\d+|WEIGHTS_\d+)$/;
if ( ! validVertexAttributes.test( attributeName ) ) attributeName = '_' + attributeName;
if ( cache.attributes.has( this.getUID( attribute ) ) ) {
attributes[ attributeName ] = cache.attributes.get( this.getUID( attribute ) );
continue;
}
// JOINTS_0 must be UNSIGNED_BYTE or UNSIGNED_SHORT.
modifiedAttribute = null;
const array = attribute.array;
if ( attributeName === 'JOINTS_0' &&
! ( array instanceof Uint16Array ) &&
! ( array instanceof Uint8Array ) ) {
console.warn( 'GLTFExporter: Attribute "skinIndex" converted to type UNSIGNED_SHORT.' );
modifiedAttribute = new BufferAttribute( new Uint16Array( array ), attribute.itemSize, attribute.normalized );
}
const accessor = this.processAccessor( modifiedAttribute || attribute, geometry );
if ( accessor !== null ) {
attributes[ attributeName ] = accessor;
cache.attributes.set( this.getUID( attribute ), accessor );
}
}
if ( originalNormal !== undefined ) geometry.setAttribute( 'normal', originalNormal );
// Skip if no exportable attributes found
if ( Object.keys( attributes ).length === 0 ) return null;
// Morph targets
if ( mesh.morphTargetInfluences !== undefined && mesh.morphTargetInfluences.length > 0 ) {
const weights = [];
const targetNames = [];
const reverseDictionary = {};
if ( mesh.morphTargetDictionary !== undefined ) {
for ( const key in mesh.morphTargetDictionary ) {
reverseDictionary[ mesh.morphTargetDictionary[ key ] ] = key;
}
}
for ( let i = 0; i < mesh.morphTargetInfluences.length; ++ i ) {
const target = {};
let warned = false;
for ( const attributeName in geometry.morphAttributes ) {
// glTF 2.0 morph supports only POSITION/NORMAL/TANGENT.
// Three.js doesn't support TANGENT yet.
if ( attributeName !== 'position' && attributeName !== 'normal' ) {
if ( ! warned ) {
console.warn( 'GLTFExporter: Only POSITION and NORMAL morph are supported.' );
warned = true;
}
continue;
}
const attribute = geometry.morphAttributes[ attributeName ][ i ];
const gltfAttributeName = attributeName.toUpperCase();
// Three.js morph attribute has absolute values while the one of glTF has relative values.
//
// glTF 2.0 Specification:
// https://github.com/KhronosGroup/glTF/tree/master/specification/2.0#morph-targets
const baseAttribute = geometry.attributes[ attributeName ];
if ( cache.attributes.has( this.getUID( attribute ) ) ) {
target[ gltfAttributeName ] = cache.attributes.get( this.getUID( attribute ) );
continue;
}
// Clones attribute not to override
const relativeAttribute = attribute.clone();
if ( ! geometry.morphTargetsRelative ) {
for ( let j = 0, jl = attribute.count; j < jl; j ++ ) {
relativeAttribute.setXYZ(
j,
attribute.getX( j ) - baseAttribute.getX( j ),
attribute.getY( j ) - baseAttribute.getY( j ),
attribute.getZ( j ) - baseAttribute.getZ( j )
);
}
}
target[ gltfAttributeName ] = this.processAccessor( relativeAttribute, geometry );
cache.attributes.set( this.getUID( baseAttribute ), target[ gltfAttributeName ] );
}
targets.push( target );
weights.push( mesh.morphTargetInfluences[ i ] );
if ( mesh.morphTargetDictionary !== undefined ) targetNames.push( reverseDictionary[ i ] );
}
meshDef.weights = weights;
if ( targetNames.length > 0 ) {
meshDef.extras = {};
meshDef.extras.targetNames = targetNames;
}
}
const isMultiMaterial = Array.isArray( mesh.material );
if ( isMultiMaterial && geometry.groups.length === 0 ) return null;
const materials = isMultiMaterial ? mesh.material : [ mesh.material ];
const groups = isMultiMaterial ? geometry.groups : [ { materialIndex: 0, start: undefined, count: undefined } ];
for ( let i = 0, il = groups.length; i < il; i ++ ) {
const primitive = {
mode: mode,
attributes: attributes,
};
this.serializeUserData( geometry, primitive );
if ( targets.length > 0 ) primitive.targets = targets;
if ( geometry.index !== null ) {
let cacheKey = this.getUID( geometry.index );
if ( groups[ i ].start !== undefined || groups[ i ].count !== undefined ) {
cacheKey += ':' + groups[ i ].start + ':' + groups[ i ].count;
}
if ( cache.attributes.has( cacheKey ) ) {
primitive.indices = cache.attributes.get( cacheKey );
} else {
primitive.indices = this.processAccessor( geometry.index, geometry, groups[ i ].start, groups[ i ].count );
cache.attributes.set( cacheKey, primitive.indices );
}
if ( primitive.indices === null ) delete primitive.indices;
}
const material = this.processMaterial( materials[ groups[ i ].materialIndex ] );
if ( material !== null ) primitive.material = material;
primitives.push( primitive );
}
meshDef.primitives = primitives;
if ( ! json.meshes ) json.meshes = [];
this._invokeAll( function ( ext ) {
ext.writeMesh && ext.writeMesh( mesh, meshDef );
} );
const index = json.meshes.push( meshDef ) - 1;
cache.meshes.set( meshCacheKey, index );
return index;
}
/**
* Process camera
* @param {THREE.Camera} camera Camera to process
* @return {Integer} Index of the processed mesh in the "camera" array
*/
processCamera( camera ) {
const json = this.json;
if ( ! json.cameras ) json.cameras = [];
const isOrtho = camera.isOrthographicCamera;
const cameraDef = {
type: isOrtho ? 'orthographic' : 'perspective'
};
if ( isOrtho ) {
cameraDef.orthographic = {
xmag: camera.right * 2,
ymag: camera.top * 2,
zfar: camera.far <= 0 ? 0.001 : camera.far,
znear: camera.near < 0 ? 0 : camera.near
};
} else {
cameraDef.perspective = {
aspectRatio: camera.aspect,
yfov: MathUtils.degToRad( camera.fov ),
zfar: camera.far <= 0 ? 0.001 : camera.far,
znear: camera.near < 0 ? 0 : camera.near
};
}
// Question: Is saving "type" as name intentional?
if ( camera.name !== '' ) cameraDef.name = camera.type;
return json.cameras.push( cameraDef ) - 1;
}
/**
* Creates glTF animation entry from AnimationClip object.
*
* Status:
* - Only properties listed in PATH_PROPERTIES may be animated.
*
* @param {THREE.AnimationClip} clip
* @param {THREE.Object3D} root
* @return {number|null}
*/
processAnimation( clip, root ) {
const json = this.json;
const nodeMap = this.nodeMap;
if ( ! json.animations ) json.animations = [];
clip = GLTFExporter.Utils.mergeMorphTargetTracks( clip.clone(), root );
const tracks = clip.tracks;
const channels = [];
const samplers = [];
for ( let i = 0; i < tracks.length; ++ i ) {
const track = tracks[ i ];
const trackBinding = PropertyBinding.parseTrackName( track.name );
let trackNode = PropertyBinding.findNode( root, trackBinding.nodeName );
const trackProperty = PATH_PROPERTIES[ trackBinding.propertyName ];
if ( trackBinding.objectName === 'bones' ) {
if ( trackNode.isSkinnedMesh === true ) {
trackNode = trackNode.skeleton.getBoneByName( trackBinding.objectIndex );
} else {
trackNode = undefined;
}
}
if ( ! trackNode || ! trackProperty ) {
console.warn( 'THREE.GLTFExporter: Could not export animation track "%s".', track.name );
return null;
}
const inputItemSize = 1;
let outputItemSize = track.values.length / track.times.length;
if ( trackProperty === PATH_PROPERTIES.morphTargetInfluences ) {
outputItemSize /= trackNode.morphTargetInfluences.length;
}
let interpolation;
// @TODO export CubicInterpolant(InterpolateSmooth) as CUBICSPLINE
// Detecting glTF cubic spline interpolant by checking factory method's special property
// GLTFCubicSplineInterpolant is a custom interpolant and track doesn't return
// valid value from .getInterpolation().
if ( track.createInterpolant.isInterpolantFactoryMethodGLTFCubicSpline === true ) {
interpolation = 'CUBICSPLINE';
// itemSize of CUBICSPLINE keyframe is 9
// (VEC3 * 3: inTangent, splineVertex, and outTangent)
// but needs to be stored as VEC3 so dividing by 3 here.
outputItemSize /= 3;
} else if ( track.getInterpolation() === InterpolateDiscrete ) {
interpolation = 'STEP';
} else {
interpolation = 'LINEAR';
}
samplers.push( {
input: this.processAccessor( new BufferAttribute( track.times, inputItemSize ) ),
output: this.processAccessor( new BufferAttribute( track.values, outputItemSize ) ),
interpolation: interpolation
} );
channels.push( {
sampler: samplers.length - 1,
target: {
node: nodeMap.get( trackNode ),
path: trackProperty
}
} );
}
json.animations.push( {
name: clip.name || 'clip_' + json.animations.length,
samplers: samplers,
channels: channels
} );
return json.animations.length - 1;
}
/**
* @param {THREE.Object3D} object
* @return {number|null}
*/
processSkin( object ) {
const json = this.json;
const nodeMap = this.nodeMap;
const node = json.nodes[ nodeMap.get( object ) ];
const skeleton = object.skeleton;
if ( skeleton === undefined ) return null;
const rootJoint = object.skeleton.bones[ 0 ];
if ( rootJoint === undefined ) return null;
const joints = [];
const inverseBindMatrices = new Float32Array( skeleton.bones.length * 16 );
const temporaryBoneInverse = new Matrix4();
for ( let i = 0; i < skeleton.bones.length; ++ i ) {
joints.push( nodeMap.get( skeleton.bones[ i ] ) );
temporaryBoneInverse.copy( skeleton.boneInverses[ i ] );
temporaryBoneInverse.multiply( object.bindMatrix ).toArray( inverseBindMatrices, i * 16 );
}
if ( json.skins === undefined ) json.skins = [];
json.skins.push( {
inverseBindMatrices: this.processAccessor( new BufferAttribute( inverseBindMatrices, 16 ) ),
joints: joints,
skeleton: nodeMap.get( rootJoint )
} );
const skinIndex = node.skin = json.skins.length - 1;
return skinIndex;
}
/**
* Process Object3D node
* @param {THREE.Object3D} node Object3D to processNode
* @return {Integer} Index of the node in the nodes list
*/
processNode( object ) {
const json = this.json;
const options = this.options;
const nodeMap = this.nodeMap;
if ( ! json.nodes ) json.nodes = [];
const nodeDef = {};
if ( options.trs ) {
const rotation = object.quaternion.toArray();
const position = object.position.toArray();
const scale = object.scale.toArray();
if ( ! equalArray( rotation, [ 0, 0, 0, 1 ] ) ) {
nodeDef.rotation = rotation;
}
if ( ! equalArray( position, [ 0, 0, 0 ] ) ) {
nodeDef.translation = position;
}
if ( ! equalArray( scale, [ 1, 1, 1 ] ) ) {
nodeDef.scale = scale;
}
} else {
if ( object.matrixAutoUpdate ) {
object.updateMatrix();
}
if ( isIdentityMatrix( object.matrix ) === false ) {
nodeDef.matrix = object.matrix.elements;
}
}
// We don't export empty strings name because it represents no-name in Three.js.
if ( object.name !== '' ) nodeDef.name = String( object.name );
this.serializeUserData( object, nodeDef );
if ( object.isMesh || object.isLine || object.isPoints ) {
const meshIndex = this.processMesh( object );
if ( meshIndex !== null ) nodeDef.mesh = meshIndex;
} else if ( object.isCamera ) {
nodeDef.camera = this.processCamera( object );
}
if ( object.isSkinnedMesh ) this.skins.push( object );
if ( object.children.length > 0 ) {
const children = [];
for ( let i = 0, l = object.children.length; i < l; i ++ ) {
const child = object.children[ i ];
if ( child.visible || options.onlyVisible === false ) {
const nodeIndex = this.processNode( child );
if ( nodeIndex !== null ) children.push( nodeIndex );
}
}
if ( children.length > 0 ) nodeDef.children = children;
}
this._invokeAll( function ( ext ) {
ext.writeNode && ext.writeNode( object, nodeDef );
} );
const nodeIndex = json.nodes.push( nodeDef ) - 1;
nodeMap.set( object, nodeIndex );
return nodeIndex;
}
/**
* Process Scene
* @param {Scene} node Scene to process
*/
processScene( scene ) {
const json = this.json;
const options = this.options;
if ( ! json.scenes ) {
json.scenes = [];
json.scene = 0;
}
const sceneDef = {};
if ( scene.name !== '' ) sceneDef.name = scene.name;
json.scenes.push( sceneDef );
const nodes = [];
for ( let i = 0, l = scene.children.length; i < l; i ++ ) {
const child = scene.children[ i ];
if ( child.visible || options.onlyVisible === false ) {
const nodeIndex = this.processNode( child );
if ( nodeIndex !== null ) nodes.push( nodeIndex );
}
}
if ( nodes.length > 0 ) sceneDef.nodes = nodes;
this.serializeUserData( scene, sceneDef );
}
/**
* Creates a Scene to hold a list of objects and parse it
* @param {Array} objects List of objects to process
*/
processObjects( objects ) {
const scene = new Scene();
scene.name = 'AuxScene';
for ( let i = 0; i < objects.length; i ++ ) {
// We push directly to children instead of calling `add` to prevent
// modify the .parent and break its original scene and hierarchy
scene.children.push( objects[ i ] );
}
this.processScene( scene );
}
/**
* @param {THREE.Object3D|Array<THREE.Object3D>} input
*/
processInput( input ) {
const options = this.options;
input = input instanceof Array ? input : [ input ];
this._invokeAll( function ( ext ) {
ext.beforeParse && ext.beforeParse( input );
} );
const objectsWithoutScene = [];
for ( let i = 0; i < input.length; i ++ ) {
if ( input[ i ] instanceof Scene ) {
this.processScene( input[ i ] );
} else {
objectsWithoutScene.push( input[ i ] );
}
}
if ( objectsWithoutScene.length > 0 ) this.processObjects( objectsWithoutScene );
for ( let i = 0; i < this.skins.length; ++ i ) {
this.processSkin( this.skins[ i ] );
}
for ( let i = 0; i < options.animations.length; ++ i ) {
this.processAnimation( options.animations[ i ], input[ 0 ] );
}
this._invokeAll( function ( ext ) {
ext.afterParse && ext.afterParse( input );
} );
}
_invokeAll( func ) {
for ( let i = 0, il = this.plugins.length; i < il; i ++ ) {
func( this.plugins[ i ] );
}
}
}
/**
* Punctual Lights Extension
*
* Specification: https://github.com/KhronosGroup/glTF/tree/master/extensions/2.0/Khronos/KHR_lights_punctual
*/
class GLTFLightExtension {
constructor( writer ) {
this.writer = writer;
this.name = 'KHR_lights_punctual';
}
writeNode( light, nodeDef ) {
if ( ! light.isLight ) return;
if ( ! light.isDirectionalLight && ! light.isPointLight && ! light.isSpotLight ) {
console.warn( 'THREE.GLTFExporter: Only directional, point, and spot lights are supported.', light );
return;
}
const writer = this.writer;
const json = writer.json;
const extensionsUsed = writer.extensionsUsed;
const lightDef = {};
if ( light.name ) lightDef.name = light.name;
lightDef.color = light.color.toArray();
lightDef.intensity = light.intensity;
if ( light.isDirectionalLight ) {
lightDef.type = 'directional';
} else if ( light.isPointLight ) {
lightDef.type = 'point';
if ( light.distance > 0 ) lightDef.range = light.distance;
} else if ( light.isSpotLight ) {
lightDef.type = 'spot';
if ( light.distance > 0 ) lightDef.range = light.distance;
lightDef.spot = {};
lightDef.spot.innerConeAngle = ( light.penumbra - 1.0 ) * light.angle * - 1.0;
lightDef.spot.outerConeAngle = light.angle;
}
if ( light.decay !== undefined && light.decay !== 2 ) {
console.warn( 'THREE.GLTFExporter: Light decay may be lost. glTF is physically-based, '
+ 'and expects light.decay=2.' );
}
if ( light.target
&& ( light.target.parent !== light
|| light.target.position.x !== 0
|| light.target.position.y !== 0
|| light.target.position.z !== - 1 ) ) {
console.warn( 'THREE.GLTFExporter: Light direction may be lost. For best results, '
+ 'make light.target a child of the light with position 0,0,-1.' );
}
if ( ! extensionsUsed[ this.name ] ) {
json.extensions = json.extensions || {};
json.extensions[ this.name ] = { lights: [] };
extensionsUsed[ this.name ] = true;
}
const lights = json.extensions[ this.name ].lights;
lights.push( lightDef );
nodeDef.extensions = nodeDef.extensions || {};
nodeDef.extensions[ this.name ] = { light: lights.length - 1 };
}
}
/**
* Unlit Materials Extension
*
* Specification: https://github.com/KhronosGroup/glTF/tree/master/extensions/2.0/Khronos/KHR_materials_unlit
*/
class GLTFMaterialsUnlitExtension {
constructor( writer ) {
this.writer = writer;
this.name = 'KHR_materials_unlit';
}
writeMaterial( material, materialDef ) {
if ( ! material.isMeshBasicMaterial ) return;
const writer = this.writer;
const extensionsUsed = writer.extensionsUsed;
materialDef.extensions = materialDef.extensions || {};
materialDef.extensions[ this.name ] = {};
extensionsUsed[ this.name ] = true;
materialDef.pbrMetallicRoughness.metallicFactor = 0.0;
materialDef.pbrMetallicRoughness.roughnessFactor = 0.9;
}
}
/**
* Specular-Glossiness Extension
*
* Specification: https://github.com/KhronosGroup/glTF/tree/master/extensions/2.0/Khronos/KHR_materials_pbrSpecularGlossiness
*/
class GLTFMaterialsPBRSpecularGlossiness {
constructor( writer ) {
this.writer = writer;
this.name = 'KHR_materials_pbrSpecularGlossiness';
}
writeMaterial( material, materialDef ) {
if ( ! material.isGLTFSpecularGlossinessMaterial ) return;
const writer = this.writer;
const extensionsUsed = writer.extensionsUsed;
const extensionDef = {};
if ( materialDef.pbrMetallicRoughness.baseColorFactor ) {
extensionDef.diffuseFactor = materialDef.pbrMetallicRoughness.baseColorFactor;
}
const specularFactor = [ 1, 1, 1 ];
material.specular.toArray( specularFactor, 0 );
extensionDef.specularFactor = specularFactor;
extensionDef.glossinessFactor = material.glossiness;
if ( materialDef.pbrMetallicRoughness.baseColorTexture ) {
extensionDef.diffuseTexture = materialDef.pbrMetallicRoughness.baseColorTexture;
}
if ( material.specularMap ) {
const specularMapDef = { index: writer.processTexture( material.specularMap ) };
writer.applyTextureTransform( specularMapDef, material.specularMap );
extensionDef.specularGlossinessTexture = specularMapDef;
}
materialDef.extensions = materialDef.extensions || {};
materialDef.extensions[ this.name ] = extensionDef;
extensionsUsed[ this.name ] = true;
}
}
/**
* Static utility functions
*/
GLTFExporter.Utils = {
insertKeyframe: function ( track, time ) {
const tolerance = 0.001; // 1ms
const valueSize = track.getValueSize();
const times = new track.TimeBufferType( track.times.length + 1 );
const values = new track.ValueBufferType( track.values.length + valueSize );
const interpolant = track.createInterpolant( new track.ValueBufferType( valueSize ) );
let index;
if ( track.times.length === 0 ) {
times[ 0 ] = time;
for ( let i = 0; i < valueSize; i ++ ) {
values[ i ] = 0;
}
index = 0;
} else if ( time < track.times[ 0 ] ) {
if ( Math.abs( track.times[ 0 ] - time ) < tolerance ) return 0;
times[ 0 ] = time;
times.set( track.times, 1 );
values.set( interpolant.evaluate( time ), 0 );
values.set( track.values, valueSize );
index = 0;
} else if ( time > track.times[ track.times.length - 1 ] ) {
if ( Math.abs( track.times[ track.times.length - 1 ] - time ) < tolerance ) {
return track.times.length - 1;
}
times[ times.length - 1 ] = time;
times.set( track.times, 0 );
values.set( track.values, 0 );
values.set( interpolant.evaluate( time ), track.values.length );
index = times.length - 1;
} else {
for ( let i = 0; i < track.times.length; i ++ ) {
if ( Math.abs( track.times[ i ] - time ) < tolerance ) return i;
if ( track.times[ i ] < time && track.times[ i + 1 ] > time ) {
times.set( track.times.slice( 0, i + 1 ), 0 );
times[ i + 1 ] = time;
times.set( track.times.slice( i + 1 ), i + 2 );
values.set( track.values.slice( 0, ( i + 1 ) * valueSize ), 0 );
values.set( interpolant.evaluate( time ), ( i + 1 ) * valueSize );
values.set( track.values.slice( ( i + 1 ) * valueSize ), ( i + 2 ) * valueSize );
index = i + 1;
break;
}
}
}
track.times = times;
track.values = values;
return index;
},
mergeMorphTargetTracks: function ( clip, root ) {
const tracks = [];
const mergedTracks = {};
const sourceTracks = clip.tracks;
for ( let i = 0; i < sourceTracks.length; ++ i ) {
let sourceTrack = sourceTracks[ i ];
const sourceTrackBinding = PropertyBinding.parseTrackName( sourceTrack.name );
const sourceTrackNode = PropertyBinding.findNode( root, sourceTrackBinding.nodeName );
if ( sourceTrackBinding.propertyName !== 'morphTargetInfluences' || sourceTrackBinding.propertyIndex === undefined ) {
// Tracks that don't affect morph targets, or that affect all morph targets together, can be left as-is.
tracks.push( sourceTrack );
continue;
}
if ( sourceTrack.createInterpolant !== sourceTrack.InterpolantFactoryMethodDiscrete
&& sourceTrack.createInterpolant !== sourceTrack.InterpolantFactoryMethodLinear ) {
if ( sourceTrack.createInterpolant.isInterpolantFactoryMethodGLTFCubicSpline ) {
// This should never happen, because glTF morph target animations
// affect all targets already.
throw new Error( 'THREE.GLTFExporter: Cannot merge tracks with glTF CUBICSPLINE interpolation.' );
}
console.warn( 'THREE.GLTFExporter: Morph target interpolation mode not yet supported. Using LINEAR instead.' );
sourceTrack = sourceTrack.clone();
sourceTrack.setInterpolation( InterpolateLinear );
}
const targetCount = sourceTrackNode.morphTargetInfluences.length;
const targetIndex = sourceTrackNode.morphTargetDictionary[ sourceTrackBinding.propertyIndex ];
if ( targetIndex === undefined ) {
throw new Error( 'THREE.GLTFExporter: Morph target name not found: ' + sourceTrackBinding.propertyIndex );
}
let mergedTrack;
// If this is the first time we've seen this object, create a new
// track to store merged keyframe data for each morph target.
if ( mergedTracks[ sourceTrackNode.uuid ] === undefined ) {
mergedTrack = sourceTrack.clone();
const values = new mergedTrack.ValueBufferType( targetCount * mergedTrack.times.length );
for ( let j = 0; j < mergedTrack.times.length; j ++ ) {
values[ j * targetCount + targetIndex ] = mergedTrack.values[ j ];
}
// We need to take into consideration the intended target node
// of our original un-merged morphTarget animation.
mergedTrack.name = ( sourceTrackBinding.nodeName || '' ) + '.morphTargetInfluences';
mergedTrack.values = values;
mergedTracks[ sourceTrackNode.uuid ] = mergedTrack;
tracks.push( mergedTrack );
continue;
}
const sourceInterpolant = sourceTrack.createInterpolant( new sourceTrack.ValueBufferType( 1 ) );
mergedTrack = mergedTracks[ sourceTrackNode.uuid ];
// For every existing keyframe of the merged track, write a (possibly
// interpolated) value from the source track.
for ( let j = 0; j < mergedTrack.times.length; j ++ ) {
mergedTrack.values[ j * targetCount + targetIndex ] = sourceInterpolant.evaluate( mergedTrack.times[ j ] );
}
// For every existing keyframe of the source track, write a (possibly
// new) keyframe to the merged track. Values from the previous loop may
// be written again, but keyframes are de-duplicated.
for ( let j = 0; j < sourceTrack.times.length; j ++ ) {
const keyframeIndex = this.insertKeyframe( mergedTrack, sourceTrack.times[ j ] );
mergedTrack.values[ keyframeIndex * targetCount + targetIndex ] = sourceTrack.values[ j ];
}
}
clip.tracks = tracks;
return clip;
}
};
/* @license
* Copyright 2020 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the 'License');
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an 'AS IS' BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
const $correlatedObjects = Symbol('correlatedObjects');
const $sourceObject = Symbol('sourceObject');
const $onUpdate = Symbol('onUpdate');
/**
* A SerializableThreeDOMElement is the common primitive of all scene graph
* elements that have been facaded in the host execution context. It adds
* a common interface to these elements in support of convenient
* serializability.
*/
class ThreeDOMElement {
constructor(onUpdate, element, correlatedObjects = null) {
this[$onUpdate] = onUpdate;
this[$sourceObject] = element;
this[$correlatedObjects] = correlatedObjects;
}
}
/* @license
* Copyright 2020 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the 'License');
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an 'AS IS' BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var _a$3, _b$2;
const loader = new ImageLoader();
const $threeTextures$1 = Symbol('threeTextures');
const $uri = Symbol('uri');
const $bufferViewImages = Symbol('bufferViewImages');
/**
* Image facade implementation for Three.js textures
*/
class Image$1 extends ThreeDOMElement {
constructor(onUpdate, image, correlatedTextures) {
super(onUpdate, image, correlatedTextures);
this[_a$3] = undefined;
this[_b$2] = new WeakMap();
if (image.uri != null) {
this[$uri] = image.uri;
}
if (image.bufferView != null) {
for (const texture of correlatedTextures) {
this[$bufferViewImages].set(texture, texture.image);
}
}
}
get [$threeTextures$1]() {
return this[$correlatedObjects];
}
get name() {
return this[$sourceObject].name || '';
}
get uri() {
return this[$uri];
}
get type() {
return this.uri != null ? 'external' : 'embedded';
}
async setURI(uri) {
this[$uri] = uri;
const image = await new Promise((resolve, reject) => {
loader.load(uri, resolve, undefined, reject);
});
for (const texture of this[$threeTextures$1]) {
// If the URI is set to null but the Image had an associated buffer view
// (this would happen if it started out as embedded), then fall back to
// the cached object URL created by GLTFLoader:
if (image == null &&
this[$sourceObject].bufferView != null) {
texture.image = this[$bufferViewImages].get(texture);
}
else {
texture.image = image;
}
texture.needsUpdate = true;
}
this[$onUpdate]();
}
}
_a$3 = $uri, _b$2 = $bufferViewImages;
/* @license
* Copyright 2020 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the 'License');
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an 'AS IS' BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
const isMinFilter = (() => {
const minFilterValues = [9728, 9729, 9984, 9985, 9986, 9987];
return (value) => minFilterValues.indexOf(value) > -1;
})();
const isMagFilter = (() => {
const magFilterValues = [9728, 9729];
return (value) => magFilterValues.indexOf(value) > -1;
})();
const isWrapMode = (() => {
const wrapModes = [33071, 33648, 10497];
return (value) => wrapModes.indexOf(value) > -1;
})();
const isValidSamplerValue = (property, value) => {
switch (property) {
case 'minFilter':
return isMinFilter(value);
case 'magFilter':
return isMagFilter(value);
case 'wrapS':
case 'wrapT':
return isWrapMode(value);
default:
throw new Error(`Cannot configure property "${property}" on Sampler`);
}
};
const $threeTextures = Symbol('threeTextures');
const $setProperty = Symbol('setProperty');
/**
* Sampler facade implementation for Three.js textures
*/
class Sampler extends ThreeDOMElement {
get [$threeTextures]() {
return this[$correlatedObjects];
}
constructor(onUpdate, sampler, correlatedTextures) {
// These defaults represent a convergence of glTF defaults for wrap mode and
// Three.js defaults for filters. Per glTF 2.0 spec, a renderer may choose
// its own defaults for filters.
// @see https://github.com/KhronosGroup/glTF/tree/master/specification/2.0#reference-sampler
// @see https://threejs.org/docs/#api/en/textures/Texture
if (sampler.minFilter == null) {
sampler.minFilter = 9987;
}
if (sampler.magFilter == null) {
sampler.magFilter = 9729;
}
if (sampler.wrapS == null) {
sampler.wrapS = 10497;
}
if (sampler.wrapT == null) {
sampler.wrapT = 10497;
}
super(onUpdate, sampler, correlatedTextures);
}
get name() {
return this[$sourceObject].name || '';
}
get minFilter() {
return this[$sourceObject].minFilter;
}
get magFilter() {
return this[$sourceObject].magFilter;
}
get wrapS() {
return this[$sourceObject].wrapS;
}
get wrapT() {
return this[$sourceObject].wrapT;
}
setMinFilter(filter) {
this[$setProperty]('minFilter', filter);
}
setMagFilter(filter) {
this[$setProperty]('magFilter', filter);
}
setWrapS(mode) {
this[$setProperty]('wrapS', mode);
}
setWrapT(mode) {
this[$setProperty]('wrapT', mode);
}
[$setProperty](property, value) {
const sampler = this[$sourceObject];
if (isValidSamplerValue(property, value)) {
sampler[property] = value;
for (const texture of this[$threeTextures]) {
texture[property] = value;
texture.needsUpdate = true;
}
}
this[$onUpdate]();
}
}
/* @license
* Copyright 2020 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the 'License');
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an 'AS IS' BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
const $source = Symbol('source');
const $sampler = Symbol('sampler');
/**
* Material facade implementation for Three.js materials
*/
class Texture extends ThreeDOMElement {
constructor(onUpdate, gltf, texture, correlatedTextures) {
super(onUpdate, texture, correlatedTextures);
const { sampler: samplerIndex, source: imageIndex } = texture;
const sampler = (gltf.samplers != null && samplerIndex != null) ?
gltf.samplers[samplerIndex] :
{};
this[$sampler] = new Sampler(onUpdate, sampler, correlatedTextures);
if (gltf.images != null && imageIndex != null) {
const image = gltf.images[imageIndex];
if (image != null) {
this[$source] = new Image$1(onUpdate, image, correlatedTextures);
}
}
}
get name() {
return this[$sourceObject].name || '';
}
get sampler() {
return this[$sampler];
}
get source() {
return this[$source];
}
}
/* @license
* Copyright 2020 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the 'License');
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an 'AS IS' BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
const $texture = Symbol('texture');
/**
* TextureInfo facade implementation for Three.js materials
*/
class TextureInfo extends ThreeDOMElement {
constructor(onUpdate, gltf, textureInfo, correlatedTextures) {
super(onUpdate, textureInfo, correlatedTextures);
const { index: textureIndex } = textureInfo;
const texture = gltf.textures[textureIndex];
if (texture != null) {
this[$texture] = new Texture(onUpdate, gltf, texture, correlatedTextures);
}
}
get texture() {
return this[$texture];
}
}
/* @license
* Copyright 2020 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the 'License');
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an 'AS IS' BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var _a$2, _b$1;
const $threeMaterials = Symbol('threeMaterials');
const $baseColorTexture = Symbol('baseColorTexture');
const $metallicRoughnessTexture = Symbol('metallicRoughnessTexture');
/**
* PBR material properties facade implementation for Three.js materials
*/
class PBRMetallicRoughness extends ThreeDOMElement {
constructor(onUpdate, gltf, pbrMetallicRoughness, correlatedMaterials) {
super(onUpdate, pbrMetallicRoughness, correlatedMaterials);
this[_a$2] = null;
this[_b$1] = null;
// Assign glTF default values
if (pbrMetallicRoughness.baseColorFactor == null) {
pbrMetallicRoughness.baseColorFactor = [1, 1, 1, 1];
}
if (pbrMetallicRoughness.roughnessFactor == null) {
pbrMetallicRoughness.roughnessFactor = 0;
}
if (pbrMetallicRoughness.metallicFactor == null) {
pbrMetallicRoughness.metallicFactor = 0;
}
const { baseColorTexture, metallicRoughnessTexture } = pbrMetallicRoughness;
const baseColorTextures = new Set();
const metallicRoughnessTextures = new Set();
for (const material of correlatedMaterials) {
if (baseColorTexture != null && material.map != null) {
baseColorTextures.add(material.map);
}
// NOTE: GLTFLoader users the same texture for metalnessMap and
// roughnessMap in this case
// @see https://github.com/mrdoob/three.js/blob/b4473c25816df4a09405c7d887d5c418ef47ee76/examples/js/loaders/GLTFLoader.js#L2173-L2174
if (metallicRoughnessTexture != null && material.metalnessMap != null) {
metallicRoughnessTextures.add(material.metalnessMap);
}
}
if (baseColorTextures.size > 0) {
this[$baseColorTexture] =
new TextureInfo(onUpdate, gltf, baseColorTexture, baseColorTextures);
}
if (metallicRoughnessTextures.size > 0) {
this[$metallicRoughnessTexture] = new TextureInfo(onUpdate, gltf, metallicRoughnessTexture, metallicRoughnessTextures);
}
}
get [(_a$2 = $baseColorTexture, _b$1 = $metallicRoughnessTexture, $threeMaterials)]() {
return this[$correlatedObjects];
}
get baseColorFactor() {
return this[$sourceObject].baseColorFactor;
}
get metallicFactor() {
return this[$sourceObject].metallicFactor;
}
get roughnessFactor() {
return this[$sourceObject].roughnessFactor;
}
get baseColorTexture() {
return this[$baseColorTexture];
}
get metallicRoughnessTexture() {
return this[$metallicRoughnessTexture];
}
setBaseColorFactor(rgba) {
for (const material of this[$threeMaterials]) {
material.color.fromArray(rgba);
material.opacity = (rgba)[3];
}
const pbrMetallicRoughness = this[$sourceObject];
pbrMetallicRoughness.baseColorFactor = rgba;
this[$onUpdate]();
}
setMetallicFactor(value) {
for (const material of this[$threeMaterials]) {
material.metalness = value;
}
const pbrMetallicRoughness = this[$sourceObject];
pbrMetallicRoughness.metallicFactor = value;
this[$onUpdate]();
}
setRoughnessFactor(value) {
for (const material of this[$threeMaterials]) {
material.roughness = value;
}
const pbrMetallicRoughness = this[$sourceObject];
pbrMetallicRoughness.roughnessFactor = value;
this[$onUpdate]();
}
}
/* @license
* Copyright 2020 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the 'License');
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an 'AS IS' BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var _a$1, _b, _c;
const $pbrMetallicRoughness = Symbol('pbrMetallicRoughness');
const $normalTexture = Symbol('normalTexture');
const $occlusionTexture = Symbol('occlusionTexture');
const $emissiveTexture = Symbol('emissiveTexture');
/**
* Material facade implementation for Three.js materials
*/
class Material extends ThreeDOMElement {
constructor(onUpdate, gltf, material, correlatedMaterials) {
super(onUpdate, material, correlatedMaterials);
this[_a$1] = null;
this[_b] = null;
this[_c] = null;
if (correlatedMaterials == null) {
return;
}
if (material.pbrMetallicRoughness == null) {
material.pbrMetallicRoughness = {};
}
this[$pbrMetallicRoughness] = new PBRMetallicRoughness(onUpdate, gltf, material.pbrMetallicRoughness, correlatedMaterials);
const { normalTexture, occlusionTexture, emissiveTexture } = material;
const normalTextures = new Set();
const occlusionTextures = new Set();
const emissiveTextures = new Set();
for (const material of correlatedMaterials) {
const { normalMap, aoMap, emissiveMap } = material;
if (normalTexture != null && normalMap != null) {
normalTextures.add(normalMap);
}
if (occlusionTexture != null && aoMap != null) {
occlusionTextures.add(aoMap);
}
if (emissiveTexture != null && emissiveMap != null) {
emissiveTextures.add(emissiveMap);
}
}
if (normalTextures.size > 0) {
this[$normalTexture] =
new TextureInfo(onUpdate, gltf, normalTexture, normalTextures);
}
if (occlusionTextures.size > 0) {
this[$occlusionTexture] =
new TextureInfo(onUpdate, gltf, occlusionTexture, occlusionTextures);
}
if (emissiveTextures.size > 0) {
this[$emissiveTexture] =
new TextureInfo(onUpdate, gltf, emissiveTexture, emissiveTextures);
}
}
get name() {
return this[$sourceObject].name || '';
}
get pbrMetallicRoughness() {
return this[$pbrMetallicRoughness];
}
get normalTexture() {
return this[$normalTexture];
}
get occlusionTexture() {
return this[$occlusionTexture];
}
get emissiveTexture() {
return this[$emissiveTexture];
}
get emissiveFactor() {
return this[$sourceObject].emissiveFactor;
}
setEmissiveFactor(rgb) {
for (const material of this[$correlatedObjects]) {
material.emissive.fromArray(rgb);
}
this[$sourceObject].emissiveFactor = rgb;
this[$onUpdate]();
}
}
_a$1 = $normalTexture, _b = $occlusionTexture, _c = $emissiveTexture;
/* @license
* Copyright 2020 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the 'License');
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an 'AS IS' BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var _a;
const $materials = Symbol('materials');
/**
* A Model facades the top-level GLTF object returned by Three.js' GLTFLoader.
* Currently, the model only bothers itself with the materials in the Three.js
* scene graph.
*/
class Model {
constructor(correlatedSceneGraph, onUpdate = () => { }) {
this[_a] = [];
const { gltf, gltfElementMap } = correlatedSceneGraph;
gltf.materials.forEach(material => {
this[$materials].push(new Material(onUpdate, gltf, material, gltfElementMap.get(material)));
});
}
/**
* Materials are listed in the order of the GLTF materials array, plus a
* default material at the end if one is used.
*
* TODO(#1003): How do we handle non-active scenes?
*/
get materials() {
return this[$materials];
}
}
_a = $materials;
/* @license
* Copyright 2020 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the 'License');
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an 'AS IS' BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var __decorate$1 = (undefined && undefined.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof undefined === "function") r = undefined(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
const $currentGLTF = Symbol('currentGLTF');
const $model = Symbol('model');
const $variants = Symbol('variants');
/**
* SceneGraphMixin manages exposes a model API in order to support operations on
* the <model-viewer> scene graph.
*/
const SceneGraphMixin = (ModelViewerElement) => {
var _a, _b, _c;
class SceneGraphModelViewerElement extends ModelViewerElement {
constructor() {
super(...arguments);
this[_a] = undefined;
this[_b] = null;
this[_c] = [];
this.variantName = undefined;
this.orientation = '0 0 0';
this.scale = '1 1 1';
}
// Scene-graph API:
/** @export */
get model() {
return this[$model];
}
get availableVariants() {
return this[$variants];
}
updated(changedProperties) {
super.updated(changedProperties);
if (changedProperties.has('variantName')) {
const variants = this[$variants];
const threeGLTF = this[$currentGLTF];
const { variantName } = this;
const variantIndex = variants.findIndex((v) => v === variantName);
if (threeGLTF == null || variantIndex < 0) {
return;
}
const onUpdate = () => {
this[$needsRender]();
};
const updatedMaterials = threeGLTF.correlatedSceneGraph.loadVariant(variantIndex, onUpdate);
const { gltf, gltfElementMap } = threeGLTF.correlatedSceneGraph;
for (const index of updatedMaterials) {
const material = gltf.materials[index];
this[$model].materials[index] = new Material(onUpdate, gltf, material, gltfElementMap.get(material));
}
}
if (changedProperties.has('orientation') ||
changedProperties.has('scale')) {
const { modelContainer } = this[$scene];
const orientation = parseExpressions(this.orientation)[0]
.terms;
const roll = normalizeUnit(orientation[0]).number;
const pitch = normalizeUnit(orientation[1]).number;
const yaw = normalizeUnit(orientation[2]).number;
modelContainer.quaternion.setFromEuler(new Euler(pitch, yaw, roll, 'YXZ'));
const scale = parseExpressions(this.scale)[0]
.terms;
modelContainer.scale.set(scale[0].number, scale[1].number, scale[2].number);
this[$scene].updateBoundingBox();
this[$scene].updateShadow();
this[$renderer].arRenderer.onUpdateScene();
this[$needsRender]();
}
}
[(_a = $model, _b = $currentGLTF, _c = $variants, $onModelLoad)]() {
super[$onModelLoad]();
this[$variants] = [];
const { currentGLTF } = this[$scene];
if (currentGLTF != null) {
const { correlatedSceneGraph } = currentGLTF;
if (correlatedSceneGraph != null &&
currentGLTF !== this[$currentGLTF]) {
this[$model] = new Model(correlatedSceneGraph, () => {
this[$needsRender]();
});
}
// KHR_materials_variants extension spec:
// https://github.com/KhronosGroup/glTF/tree/master/extensions/2.0/Khronos/KHR_materials_variants
const { gltfExtensions } = currentGLTF.userData;
if (gltfExtensions != null) {
const extension = gltfExtensions['KHR_materials_variants'];
if (extension != null) {
this[$variants] =
extension.variants.map(variant => variant.name);
this.requestUpdate('variantName');
}
}
}
this[$currentGLTF] = currentGLTF;
// TODO: remove this event, as it is synonymous with the load event.
this.dispatchEvent(new CustomEvent('scene-graph-ready'));
}
/** @export */
async exportScene(options) {
const scene = this[$scene];
return new Promise(async (resolve) => {
// Defaults
const opts = {
binary: true,
onlyVisible: true,
maxTextureSize: Infinity,
forcePowerOfTwoTextures: false,
includeCustomExtensions: false,
embedImages: true
};
Object.assign(opts, options);
// Not configurable
opts.animations = scene.animations;
opts.truncateDrawRange = true;
const shadow = scene.shadow;
let visible = false;
// Remove shadow from export
if (shadow != null) {
visible = shadow.visible;
shadow.visible = false;
}
const exporter = new GLTFExporter();
exporter.parse(scene.modelContainer, (gltf) => {
return resolve(new Blob([opts.binary ? gltf : JSON.stringify(gltf)], {
type: opts.binary ? 'application/octet-stream' :
'application/json'
}));
}, opts);
if (shadow != null) {
shadow.visible = visible;
}
});
}
}
__decorate$1([
property({ type: String, attribute: 'variant-name' })
], SceneGraphModelViewerElement.prototype, "variantName", void 0);
__decorate$1([
property({ type: String, attribute: 'orientation' })
], SceneGraphModelViewerElement.prototype, "orientation", void 0);
__decorate$1([
property({ type: String, attribute: 'scale' })
], SceneGraphModelViewerElement.prototype, "scale", void 0);
return SceneGraphModelViewerElement;
};
/* @license
* Copyright 2019 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the 'License');
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an 'AS IS' BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var __decorate = (undefined && undefined.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof undefined === "function") r = undefined(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
// How much the model will rotate per
// second in radians:
const DEFAULT_ROTATION_SPEED = Math.PI / 32;
const AUTO_ROTATE_DELAY_DEFAULT = 3000;
const rotationRateIntrinsics = {
basis: [degreesToRadians(numberNode(DEFAULT_ROTATION_SPEED, 'rad'))],
keywords: { auto: [null] }
};
const $autoRotateStartTime = Symbol('autoRotateStartTime');
const $radiansPerSecond = Symbol('radiansPerSecond');
const $syncRotationRate = Symbol('syncRotationRate');
const $onCameraChange = Symbol('onCameraChange');
const StagingMixin = (ModelViewerElement) => {
var _a, _b, _c;
class StagingModelViewerElement extends ModelViewerElement {
constructor() {
super(...arguments);
this.autoRotate = false;
this.autoRotateDelay = AUTO_ROTATE_DELAY_DEFAULT;
this.rotationPerSecond = 'auto';
this[_a] = performance.now();
this[_b] = 0;
this[_c] = (event) => {
if (!this.autoRotate) {
return;
}
if (event.detail.source === 'user-interaction') {
this[$autoRotateStartTime] = performance.now();
}
};
}
connectedCallback() {
super.connectedCallback();
this.addEventListener('camera-change', this[$onCameraChange]);
this[$autoRotateStartTime] = performance.now();
}
disconnectedCallback() {
super.disconnectedCallback();
this.removeEventListener('camera-change', this[$onCameraChange]);
this[$autoRotateStartTime] = performance.now();
}
updated(changedProperties) {
super.updated(changedProperties);
if (changedProperties.has('autoRotate')) {
this[$autoRotateStartTime] = performance.now();
}
}
[(_a = $autoRotateStartTime, _b = $radiansPerSecond, $syncRotationRate)](style) {
this[$radiansPerSecond] = style[0];
}
[$tick](time, delta) {
super[$tick](time, delta);
if (!this.autoRotate || !this[$hasTransitioned]() ||
this[$renderer].isPresenting) {
return;
}
const rotationDelta = Math.min(delta, time - this[$autoRotateStartTime] - this.autoRotateDelay);
if (rotationDelta > 0) {
this[$scene].yaw = this.turntableRotation +
this[$radiansPerSecond] * rotationDelta * 0.001;
}
}
get turntableRotation() {
return this[$scene].yaw;
}
resetTurntableRotation(theta = 0) {
this[$scene].yaw = theta;
}
}
_c = $onCameraChange;
__decorate([
property({ type: Boolean, attribute: 'auto-rotate' })
], StagingModelViewerElement.prototype, "autoRotate", void 0);
__decorate([
property({ type: Number, attribute: 'auto-rotate-delay' })
], StagingModelViewerElement.prototype, "autoRotateDelay", void 0);
__decorate([
style({ intrinsics: rotationRateIntrinsics, updateHandler: $syncRotationRate }),
property({ type: String, attribute: 'rotation-per-second' })
], StagingModelViewerElement.prototype, "rotationPerSecond", void 0);
return StagingModelViewerElement;
};
/**
* This mixin function is designed to be applied to a class that inherits
* from HTMLElement. It makes it easy for a custom element to coordinate with
* the :focus-visible polyfill.
*
* NOTE(cdata): The code here was adapted from an example proposed with the
* introduction of ShadowDOM support in the :focus-visible polyfill.
*
* @see https://github.com/WICG/focus-visible/pull/196
* @param {Function} SuperClass The base class implementation to decorate with
* implementation that coordinates with the :focus-visible polyfill
*/
const FocusVisiblePolyfillMixin = (SuperClass) => {
var _a;
const coordinateWithPolyfill = (instance) => {
// If there is no shadow root, there is no need to coordinate with
// the polyfill. If we already coordinated with the polyfill, we can
// skip subsequent invokcations:
if (instance.shadowRoot == null ||
instance.hasAttribute('data-js-focus-visible')) {
return () => { };
}
// The polyfill might already be loaded. If so, we can apply it to
// the shadow root immediately:
if (self.applyFocusVisiblePolyfill) {
self.applyFocusVisiblePolyfill(instance.shadowRoot);
}
else {
const coordinationHandler = () => {
self.applyFocusVisiblePolyfill(instance.shadowRoot);
};
// Otherwise, wait for the polyfill to be loaded lazily. It might
// never be loaded, but if it is then we can apply it to the
// shadow root at the appropriate time by waiting for the ready
// event:
self.addEventListener('focus-visible-polyfill-ready', coordinationHandler, { once: true });
return () => {
self.removeEventListener('focus-visible-polyfill-ready', coordinationHandler);
};
}
return () => { };
};
const $endPolyfillCoordination = Symbol('endPolyfillCoordination');
// IE11 doesn't natively support custom elements or JavaScript class
// syntax The mixin implementation assumes that the user will take the
// appropriate steps to support both:
class FocusVisibleCoordinator extends SuperClass {
constructor() {
super(...arguments);
this[_a] = null;
}
// Attempt to coordinate with the polyfill when connected to the
// document:
connectedCallback() {
super.connectedCallback && super.connectedCallback();
if (this[$endPolyfillCoordination] == null) {
this[$endPolyfillCoordination] = coordinateWithPolyfill(this);
}
}
disconnectedCallback() {
super.disconnectedCallback && super.disconnectedCallback();
// It's important to remove the polyfill event listener when we
// disconnect, otherwise we will leak the whole element via window:
if (this[$endPolyfillCoordination] != null) {
this[$endPolyfillCoordination]();
this[$endPolyfillCoordination] = null;
}
}
}
_a = $endPolyfillCoordination;
return FocusVisibleCoordinator;
};
/* @license
* Copyright 2019 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the 'License');
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an 'AS IS' BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Uncomment these lines to export PMREM textures in Glitch:
// export {default as TextureUtils} from './three-components/TextureUtils';
// export * from 'three';
const ModelViewerElement = AnnotationMixin(SceneGraphMixin(StagingMixin(EnvironmentMixin(ControlsMixin(ARMixin(LoadingMixin(AnimationMixin(FocusVisiblePolyfillMixin(ModelViewerElementBase)))))))));
customElements.define('model-viewer', ModelViewerElement);
export { ModelViewerElement };
//# sourceMappingURL=model-viewer.js.map