p5.Envelope: An Envelope is a series
- * of fades over time. Often used to control an object's
- * output gain level as an "ADSR Envelope" (Attack, Decay,
- * Sustain, Release). Can also modulate other parameters.
- *
p5.Delay: A delay effect with
- * parameters for feedback, delayTime, and lowpass filter.
- *
p5.Filter: Filter the frequency range of a
- * sound.
- *
- *
p5.Reverb: Add reverb to a sound by specifying
- * duration and decay.
userStartAudio: Enable audio in a
- * browser- and user-friendly way.
- *
p5.sound is on GitHub.
- * Download the latest version
- * here.
- *
- * @module p5.sound
- * @submodule p5.sound
- * @for p5.sound
- * @main
- */
-
-/**
- * p5.sound
- * https://p5js.org/reference/#/libraries/p5.sound
- *
- * From the Processing Foundation and contributors
- * https://github.com/processing/p5.js-sound/graphs/contributors
- *
- * MIT License (MIT)
- * https://github.com/processing/p5.js-sound/blob/master/LICENSE
- *
- * Some of the many audio libraries & resources that inspire p5.sound:
- * - TONE.js (c) Yotam Mann. Licensed under The MIT License (MIT). https://github.com/TONEnoTONE/Tone.js
- * - buzz.js (c) Jay Salvat. Licensed under The MIT License (MIT). http://buzz.jaysalvat.com/
- * - Boris Smus Web Audio API book, 2013. Licensed under the Apache License http://www.apache.org/licenses/LICENSE-2.0
- * - wavesurfer.js https://github.com/katspaugh/wavesurfer.js
- * - Web Audio Components by Jordan Santell https://github.com/web-audio-components
- * - Wilm Thoben's Sound library for Processing https://github.com/processing/processing/tree/master/java/libraries/sound
- *
- * Web Audio API: http://w3.org/TR/webaudio/
- */
-
- (function(modules) {
- var installedModules = {};
- function __webpack_require__(moduleId) {
- if(installedModules[moduleId]) {
- return installedModules[moduleId].exports;
- }
- var module = installedModules[moduleId] = {
- i: moduleId,
- l: false,
- exports: {}
- };
- modules[moduleId].call(module.exports, module, module.exports, __webpack_require__);
- module.l = true;
- return module.exports;
- }
- __webpack_require__.m = modules;
- __webpack_require__.c = installedModules;
- __webpack_require__.d = function(exports, name, getter) {
- if(!__webpack_require__.o(exports, name)) {
- Object.defineProperty(exports, name, { enumerable: true, get: getter });
- }
- };
- __webpack_require__.r = function(exports) {
- if(typeof Symbol !== 'undefined' && Symbol.toStringTag) {
- Object.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });
- }
- Object.defineProperty(exports, '__esModule', { value: true });
- };
- __webpack_require__.t = function(value, mode) {
- if(mode & 1) value = __webpack_require__(value);
- if(mode & 8) return value;
- if((mode & 4) && typeof value === 'object' && value && value.__esModule) return value;
- var ns = Object.create(null);
- __webpack_require__.r(ns);
- Object.defineProperty(ns, 'default', { enumerable: true, value: value });
- if(mode & 2 && typeof value != 'string') for(var key in value) __webpack_require__.d(ns, key, function(key) { return value[key]; }.bind(null, key));
- return ns;
- };
- __webpack_require__.n = function(module) {
- var getter = module && module.__esModule ?
- function getDefault() { return module['default']; } :
- function getModuleExports() { return module; };
- __webpack_require__.d(getter, 'a', getter);
- return getter;
- };
- __webpack_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); };
- __webpack_require__.p = "";
- return __webpack_require__(__webpack_require__.s = 31);
- })
- ([
- (function(module, exports, __webpack_require__) {
-
-var __WEBPACK_AMD_DEFINE_RESULT__;!(__WEBPACK_AMD_DEFINE_RESULT__ = (function(){"use strict";function a(t,e){this.isUndef(t)||1===t?this.input=this.context.createGain():1Scale the output of all sound in this sketch
- * Scaled between 0.0 (silence) and 1.0 (full volume).
- * 1.0 is the maximum amplitude of a digital sound, so multiplying
- * by greater than 1.0 may cause digital distortion. To
- * fade, provide a rampTime parameter. For more
- * complex fades, see the Envelope class.
- *
- * Alternately, you can pass in a signal source such as an
- * oscillator to modulate the amplitude with an audio signal.
- *
- *
How This Works: When you load the p5.sound module, it
- * creates a single instance of p5sound. All sound objects in this
- * module output to p5sound before reaching your computer's output.
- * So if you change the amplitude of p5sound, it impacts all of the
- * sound in this module.
- *
- *
If no value is provided, returns a Web Audio API Gain Node
- *
- * @method masterVolume
- * @param {Number|Object} volume Volume (amplitude) between 0.0
- * and 1.0 or modulating signal/oscillator
- * @param {Number} [rampTime] Fade for t seconds
- * @param {Number} [timeFromNow] Schedule this event to happen at
- * t seconds in the future
- */
-
-
- p5.prototype.masterVolume = function (vol, rampTime, tFromNow) {
- if (typeof vol === 'number') {
- var rampTime = rampTime || 0;
- var tFromNow = tFromNow || 0;
- var now = p5sound.audiocontext.currentTime;
- var currentVol = p5sound.output.gain.value;
- p5sound.output.gain.cancelScheduledValues(now + tFromNow);
- p5sound.output.gain.linearRampToValueAtTime(currentVol, now + tFromNow);
- p5sound.output.gain.linearRampToValueAtTime(vol, now + tFromNow + rampTime);
- } else if (vol) {
- vol.connect(p5sound.output.gain);
- } else {
- return p5sound.output.gain;
- }
- };
- /**
- * `p5.soundOut` is the p5.sound master output. It sends output to
- * the destination of this window's web audio context. It contains
- * Web Audio API nodes including a dyanmicsCompressor (.limiter),
- * and Gain Nodes for .input and .output.
- *
- * @property {Object} soundOut
- */
-
-
- p5.prototype.soundOut = p5.soundOut = p5sound;
-
- p5.soundOut._silentNode = p5sound.audiocontext.createGain();
- p5.soundOut._silentNode.gain.value = 0;
-
- p5.soundOut._silentNode.connect(p5sound.audiocontext.destination);
-
- return p5sound;
-}).apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__),
- __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
-
- }),
- (function(module, exports, __webpack_require__) {
-
-var __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;!(__WEBPACK_AMD_DEFINE_ARRAY__ = [__webpack_require__(0),__webpack_require__(6),__webpack_require__(8),__webpack_require__(22),__webpack_require__(9)], __WEBPACK_AMD_DEFINE_RESULT__ = (function(n){"use strict";return n.Signal=function(){var t=this.optionsObject(arguments,["value","units"],n.Signal.defaults);this.output=this._gain=this.context.createGain(),t.param=this._gain.gain,n.Param.call(this,t),this.input=this._param=this._gain.gain,this.context.getConstant(1).chain(this._gain)},n.extend(n.Signal,n.Param),n.Signal.defaults={value:0,units:n.Type.Default,convert:!0},n.Signal.prototype.connect=n.SignalBase.prototype.connect,n.Signal.prototype.dispose=function(){return n.Param.prototype.dispose.call(this),this._param=null,this._gain.disconnect(),this._gain=null,this},n.Signal}).apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__),
- __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
-
- }),
- (function(module, exports, __webpack_require__) {
-
-var __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;!(__WEBPACK_AMD_DEFINE_ARRAY__ = [__webpack_require__(0),__webpack_require__(2),__webpack_require__(9)], __WEBPACK_AMD_DEFINE_RESULT__ = (function(i){"use strict";return i.Multiply=function(t){this.createInsOuts(2,0),this._mult=this.input[0]=this.output=new i.Gain,this._param=this.input[1]=this.output.gain,this._param.value=this.defaultArg(t,0)},i.extend(i.Multiply,i.Signal),i.Multiply.prototype.dispose=function(){return i.prototype.dispose.call(this),this._mult.dispose(),this._mult=null,this._param=null,this},i.Multiply}).apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__),
- __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
-
- }),
- (function(module, exports, __webpack_require__) {
-
-"use strict";
-var __WEBPACK_AMD_DEFINE_RESULT__;
-
-!(__WEBPACK_AMD_DEFINE_RESULT__ = (function (require) {
- var p5sound = __webpack_require__(1);
-
- var CrossFade = __webpack_require__(51);
- /**
- * Effect is a base class for audio effects in p5.
- * This module handles the nodes and methods that are
- * common and useful for current and future effects.
- *
- *
- * This class is extended by p5.Distortion,
- * p5.Compressor,
- * p5.Delay,
- * p5.Filter,
- * p5.Reverb.
- *
- * @class p5.Effect
- * @constructor
- *
- * @param {Object} [ac] Reference to the audio context of the p5 object
- * @param {AudioNode} [input] Gain Node effect wrapper
- * @param {AudioNode} [output] Gain Node effect wrapper
- * @param {Object} [_drywet] Tone.JS CrossFade node (defaults to value: 1)
- * @param {AudioNode} [wet] Effects that extend this class should connect
- * to the wet signal to this gain node, so that dry and wet
- * signals are mixed properly.
- */
-
-
- p5.Effect = function () {
- this.ac = p5sound.audiocontext;
- this.input = this.ac.createGain();
- this.output = this.ac.createGain();
- /**
- * The p5.Effect class is built
- * using Tone.js CrossFade
- * @private
- */
-
- this._drywet = new CrossFade(1);
- /**
- * In classes that extend
- * p5.Effect, connect effect nodes
- * to the wet parameter
- */
-
- this.wet = this.ac.createGain();
- this.input.connect(this._drywet.a);
- this.wet.connect(this._drywet.b);
-
- this._drywet.connect(this.output);
-
- this.connect();
-
- p5sound.soundArray.push(this);
- };
- /**
- * Set the output volume of the filter.
- *
- * @method amp
- * @for p5.Effect
- * @param {Number} [vol] amplitude between 0 and 1.0
- * @param {Number} [rampTime] create a fade that lasts until rampTime
- * @param {Number} [tFromNow] schedule this event to happen in tFromNow seconds
- */
-
-
- p5.Effect.prototype.amp = function (vol, rampTime, tFromNow) {
- var rampTime = rampTime || 0;
- var tFromNow = tFromNow || 0;
- var now = p5sound.audiocontext.currentTime;
- var currentVol = this.output.gain.value;
- this.output.gain.cancelScheduledValues(now);
- this.output.gain.linearRampToValueAtTime(currentVol, now + tFromNow + .001);
- this.output.gain.linearRampToValueAtTime(vol, now + tFromNow + rampTime + .001);
- };
- /**
- * Link effects together in a chain
- * Example usage: filter.chain(reverb, delay, panner);
- * May be used with an open-ended number of arguments
- *
- * @method chain
- * @for p5.Effect
- * @param {Object} [arguments] Chain together multiple sound objects
- */
-
-
- p5.Effect.prototype.chain = function () {
- if (arguments.length > 0) {
- this.connect(arguments[0]);
-
- for (var i = 1; i < arguments.length; i += 1) {
- arguments[i - 1].connect(arguments[i]);
- }
- }
-
- return this;
- };
- /**
- * Adjust the dry/wet value.
- *
- * @method drywet
- * @for p5.Effect
- * @param {Number} [fade] The desired drywet value (0 - 1.0)
- */
-
-
- p5.Effect.prototype.drywet = function (fade) {
- if (typeof fade !== "undefined") {
- this._drywet.fade.value = fade;
- }
-
- return this._drywet.fade.value;
- };
- /**
- * Send output to a p5.js-sound, Web Audio Node, or use signal to
- * control an AudioParam
- *
- * @method connect
- * @for p5.Effect
- * @param {Object} unit
- */
-
-
- p5.Effect.prototype.connect = function (unit) {
- var u = unit || p5.soundOut.input;
- this.output.connect(u.input ? u.input : u);
- };
- /**
- * Disconnect all output.
- * @method disconnect
- * @for p5.Effect
- */
-
-
- p5.Effect.prototype.disconnect = function () {
- if (this.output) {
- this.output.disconnect();
- }
- };
-
- p5.Effect.prototype.dispose = function () {
- var index = p5sound.soundArray.indexOf(this);
- p5sound.soundArray.splice(index, 1);
-
- if (this.input) {
- this.input.disconnect();
- delete this.input;
- }
-
- if (this.output) {
- this.output.disconnect();
- delete this.output;
- }
-
- if (this._drywet) {
- this._drywet.disconnect();
-
- delete this._drywet;
- }
-
- if (this.wet) {
- this.wet.disconnect();
- delete this.wet;
- }
-
- this.ac = undefined;
- };
-
- return p5.Effect;
-}).call(exports, __webpack_require__, exports, module),
- __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
-
- }),
- (function(module, exports, __webpack_require__) {
-
-"use strict";
-var __WEBPACK_AMD_DEFINE_RESULT__;
-
-function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
-
-!(__WEBPACK_AMD_DEFINE_RESULT__ = (function (require) {
- var p5sound = __webpack_require__(1);
-
- var processorNames = __webpack_require__(10);
- /**
- * @for p5
- */
-
- /**
- * Returns a number representing the sample rate, in samples per second,
- * of all sound objects in this audio context. It is determined by the
- * sampling rate of your operating system's sound card, and it is not
- * currently possile to change.
- * It is often 44100, or twice the range of human hearing.
- *
- * @method sampleRate
- * @return {Number} samplerate samples per second
- */
-
-
- p5.prototype.sampleRate = function () {
- return p5sound.audiocontext.sampleRate;
- };
- /**
- * Returns the closest MIDI note value for
- * a given frequency.
- *
- * @method freqToMidi
- * @param {Number} frequency A freqeuncy, for example, the "A"
- * above Middle C is 440Hz
- * @return {Number} MIDI note value
- */
-
-
- p5.prototype.freqToMidi = function (f) {
- var mathlog2 = Math.log(f / 440) / Math.log(2);
- var m = Math.round(12 * mathlog2) + 69;
- return m;
- };
- /**
- * Returns the frequency value of a MIDI note value.
- * General MIDI treats notes as integers where middle C
- * is 60, C# is 61, D is 62 etc. Useful for generating
- * musical frequencies with oscillators.
- *
- * @method midiToFreq
- * @param {Number} midiNote The number of a MIDI note
- * @return {Number} Frequency value of the given MIDI note
- * @example
- *
- */
-
-
- var midiToFreq = p5.prototype.midiToFreq = function (m) {
- return 440 * Math.pow(2, (m - 69) / 12.0);
- };
-
-
- var noteToFreq = function noteToFreq(note) {
- if (typeof note !== 'string') {
- return note;
- }
-
- var wholeNotes = {
- A: 21,
- B: 23,
- C: 24,
- D: 26,
- E: 28,
- F: 29,
- G: 31
- };
- var value = wholeNotes[note[0].toUpperCase()];
- var octave = ~~note.slice(-1);
- value += 12 * (octave - 1);
-
- switch (note[1]) {
- case '#':
- value += 1;
- break;
-
- case 'b':
- value -= 1;
- break;
-
- default:
- break;
- }
-
- return midiToFreq(value);
- };
- /**
- * List the SoundFile formats that you will include. LoadSound
- * will search your directory for these extensions, and will pick
- * a format that is compatable with the client's web browser.
- * Here is a free online file
- * converter.
- *
- * @method soundFormats
- * @param {String} [...formats] i.e. 'mp3', 'wav', 'ogg'
- * @example
- *
- * function preload() {
- * // set the global sound formats
- * soundFormats('mp3', 'ogg');
- *
- * // load either beatbox.mp3, or .ogg, depending on browser
- * mySound = loadSound('assets/beatbox.mp3');
- * }
- *
- * function setup() {
- * let cnv = createCanvas(100, 100);
- * background(220);
- * text('sound loaded! tap to play', 10, 20, width - 20);
- * cnv.mousePressed(function() {
- * mySound.play();
- * });
- * }
- *
- */
-
-
- p5.prototype.soundFormats = function () {
- p5sound.extensions = [];
-
- for (var i = 0; i < arguments.length; i++) {
- arguments[i] = arguments[i].toLowerCase();
-
- if (['mp3', 'wav', 'ogg', 'm4a', 'aac'].indexOf(arguments[i]) > -1) {
- p5sound.extensions.push(arguments[i]);
- } else {
- throw arguments[i] + ' is not a valid sound format!';
- }
- }
- };
-
- p5.prototype.disposeSound = function () {
- for (var i = 0; i < p5sound.soundArray.length; i++) {
- p5sound.soundArray[i].dispose();
- }
- };
-
-
- p5.prototype.registerMethod('remove', p5.prototype.disposeSound);
-
- p5.prototype._checkFileFormats = function (paths) {
- var path;
-
- if (typeof paths === 'string') {
- path = paths;
-
- var extTest = path.split('.').pop();
-
- if (['mp3', 'wav', 'ogg', 'm4a', 'aac'].indexOf(extTest) > -1) {
- if (p5.prototype.isFileSupported(extTest)) {
- path = path;
- } else {
- var pathSplit = path.split('.');
- var pathCore = pathSplit[pathSplit.length - 1];
-
- for (var i = 0; i < p5sound.extensions.length; i++) {
- var extension = p5sound.extensions[i];
- var supported = p5.prototype.isFileSupported(extension);
-
- if (supported) {
- pathCore = '';
-
- if (pathSplit.length === 2) {
- pathCore += pathSplit[0];
- }
-
- for (var i = 1; i <= pathSplit.length - 2; i++) {
- var p = pathSplit[i];
- pathCore += '.' + p;
- }
-
- path = pathCore += '.';
- path = path += extension;
- break;
- }
- }
- }
- }
- else {
- for (var i = 0; i < p5sound.extensions.length; i++) {
- var extension = p5sound.extensions[i];
- var supported = p5.prototype.isFileSupported(extension);
-
- if (supported) {
- path = path + '.' + extension;
- break;
- }
- }
- }
- }
- else if (_typeof(paths) === 'object') {
- for (var i = 0; i < paths.length; i++) {
- var extension = paths[i].split('.').pop();
- var supported = p5.prototype.isFileSupported(extension);
-
- if (supported) {
- path = paths[i];
- break;
- }
- }
- }
-
- return path;
- };
- /**
- * Used by Osc and Envelope to chain signal math
- */
-
-
- p5.prototype._mathChain = function (o, math, thisChain, nextChain, type) {
- for (var i in o.mathOps) {
- if (o.mathOps[i] instanceof type) {
- o.mathOps[i].dispose();
- thisChain = i;
-
- if (thisChain < o.mathOps.length - 1) {
- nextChain = o.mathOps[i + 1];
- }
- }
- }
-
- o.mathOps[thisChain - 1].disconnect();
- o.mathOps[thisChain - 1].connect(math);
- math.connect(nextChain);
- o.mathOps[thisChain] = math;
- return o;
- };
-
-
- function convertToWav(audioBuffer) {
- var leftChannel, rightChannel;
- leftChannel = audioBuffer.getChannelData(0);
-
- if (audioBuffer.numberOfChannels > 1) {
- rightChannel = audioBuffer.getChannelData(1);
- } else {
- rightChannel = leftChannel;
- }
-
- var interleaved = interleave(leftChannel, rightChannel);
-
- var buffer = new window.ArrayBuffer(44 + interleaved.length * 2);
- var view = new window.DataView(buffer);
-
- writeUTFBytes(view, 0, 'RIFF');
- view.setUint32(4, 36 + interleaved.length * 2, true);
- writeUTFBytes(view, 8, 'WAVE');
-
- writeUTFBytes(view, 12, 'fmt ');
- view.setUint32(16, 16, true);
- view.setUint16(20, 1, true);
-
- view.setUint16(22, 2, true);
- view.setUint32(24, p5sound.audiocontext.sampleRate, true);
- view.setUint32(28, p5sound.audiocontext.sampleRate * 4, true);
- view.setUint16(32, 4, true);
- view.setUint16(34, 16, true);
-
- writeUTFBytes(view, 36, 'data');
- view.setUint32(40, interleaved.length * 2, true);
-
- var lng = interleaved.length;
- var index = 44;
- var volume = 1;
-
- for (var i = 0; i < lng; i++) {
- view.setInt16(index, interleaved[i] * (0x7FFF * volume), true);
- index += 2;
- }
-
- return view;
- }
-
-
- function interleave(leftChannel, rightChannel) {
- var length = leftChannel.length + rightChannel.length;
- var result = new Float32Array(length);
- var inputIndex = 0;
-
- for (var index = 0; index < length;) {
- result[index++] = leftChannel[inputIndex];
- result[index++] = rightChannel[inputIndex];
- inputIndex++;
- }
-
- return result;
- }
-
- function writeUTFBytes(view, offset, string) {
- var lng = string.length;
-
- for (var i = 0; i < lng; i++) {
- view.setUint8(offset + i, string.charCodeAt(i));
- }
- }
-
- function safeBufferSize(idealBufferSize) {
- var bufferSize = idealBufferSize;
-
- var tempAudioWorkletNode = new AudioWorkletNode(p5sound.audiocontext, processorNames.soundFileProcessor);
-
- if (tempAudioWorkletNode instanceof ScriptProcessorNode) {
- bufferSize = tempAudioWorkletNode.bufferSize;
- }
-
- tempAudioWorkletNode.disconnect();
- tempAudioWorkletNode = null;
- return bufferSize;
- }
-
- var safeBins = p5.prototype.safeBins = function (bins) {
- var safeBins = 1024;
-
- if (typeof bins === "string") {
- console.log("the value of bins must be power of two and between 16 and 1024");
- return safeBins;
- }
-
- if (bins && bins >= 16 && bins <= 1024 && Math.log2(bins) % 1 === 0)
- return bins;else {
- console.log("the value of bins must be power of two and between 16 and 1024");
- return safeBins;
- }
- };
-
- return {
- convertToWav: convertToWav,
- midiToFreq: midiToFreq,
- noteToFreq: noteToFreq,
- safeBufferSize: safeBufferSize,
- safeBins: safeBins
- };
-}).call(exports, __webpack_require__, exports, module),
- __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
-
- }),
- (function(module, exports, __webpack_require__) {
-
-var __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;!(__WEBPACK_AMD_DEFINE_ARRAY__ = [__webpack_require__(0),__webpack_require__(19)], __WEBPACK_AMD_DEFINE_RESULT__ = (function(e){"use strict";return e.WaveShaper=function(e,t){this._shaper=this.input=this.output=this.context.createWaveShaper(),this._curve=null,Array.isArray(e)?this.curve=e:isFinite(e)||this.isUndef(e)?this._curve=new Float32Array(this.defaultArg(e,1024)):this.isFunction(e)&&(this._curve=new Float32Array(this.defaultArg(t,1024)),this.setMap(e))},e.extend(e.WaveShaper,e.SignalBase),e.WaveShaper.prototype.setMap=function(e){for(var t=0,r=this._curve.length;te)this.cancelScheduledValues(e),this.linearRampToValueAtTime(t,e);else{var n=this._searchAfter(e);n&&(this.cancelScheduledValues(e),n.type===o.TimelineSignal.Type.Linear?this.linearRampToValueAtTime(t,e):n.type===o.TimelineSignal.Type.Exponential&&this.exponentialRampToValueAtTime(t,e)),this.setValueAtTime(t,e)}return this},o.TimelineSignal.prototype.linearRampToValueBetween=function(e,t,i){return this.setRampPoint(t),this.linearRampToValueAtTime(e,i),this},o.TimelineSignal.prototype.exponentialRampToValueBetween=function(e,t,i){return this.setRampPoint(t),this.exponentialRampToValueAtTime(e,i),this},o.TimelineSignal.prototype._searchBefore=function(e){return this._events.get(e)},o.TimelineSignal.prototype._searchAfter=function(e){return this._events.getAfter(e)},o.TimelineSignal.prototype.getValueAtTime=function(e){e=this.toSeconds(e);var t=this._searchAfter(e),i=this._searchBefore(e),n=this._initial;if(null===i)n=this._initial;else if(i.type===o.TimelineSignal.Type.Target){var a,l=this._events.getBefore(i.time);a=null===l?this._initial:l.value,n=this._exponentialApproach(i.time,a,i.value,i.constant,e)}else n=i.type===o.TimelineSignal.Type.Curve?this._curveInterpolate(i.time,i.value,i.duration,e):null===t?i.value:t.type===o.TimelineSignal.Type.Linear?this._linearInterpolate(i.time,i.value,t.time,t.value,e):t.type===o.TimelineSignal.Type.Exponential?this._exponentialInterpolate(i.time,i.value,t.time,t.value,e):i.value;return n},o.TimelineSignal.prototype.connect=o.SignalBase.prototype.connect,o.TimelineSignal.prototype._exponentialApproach=function(e,t,i,n,a){return i+(t-i)*Math.exp(-(a-e)/n)},o.TimelineSignal.prototype._linearInterpolate=function(e,t,i,n,a){return t+(a-e)/(i-e)*(n-t)},o.TimelineSignal.prototype._exponentialInterpolate=function(e,t,i,n,a){return(t=Math.max(this._minOutput,t))*Math.pow(n/t,(a-e)/(i-e))},o.TimelineSignal.prototype._curveInterpolate=function(e,t,i,n){var a=t.length;if(e+i<=n)return t[a-1];if(n<=e)return t[0];var l=(n-e)/i,s=Math.floor((a-1)*l),r=Math.ceil((a-1)*l),o=t[s],p=t[r];return r===s?o:this._linearInterpolate(s,o,r,p,l*(a-1))},o.TimelineSignal.prototype.dispose=function(){o.Signal.prototype.dispose.call(this),o.Param.prototype.dispose.call(this),this._events.dispose(),this._events=null},o.TimelineSignal}).apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__),
- __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
-
- }),
- (function(module, exports, __webpack_require__) {
-
-"use strict";
-var __WEBPACK_AMD_DEFINE_RESULT__;
-
-!(__WEBPACK_AMD_DEFINE_RESULT__ = (function (require) {
- var Effect = __webpack_require__(4);
- /**
- *
A p5.Filter uses a Web Audio Biquad Filter to filter
- * the frequency response of an input source. Subclasses
- * include:
- * p5.LowPass:
- * Allows frequencies below the cutoff frequency to pass through,
- * and attenuates frequencies above the cutoff.
- * p5.HighPass:
- * The opposite of a lowpass filter.
- * p5.BandPass:
- * Allows a range of frequencies to pass through and attenuates
- * the frequencies below and above this frequency range.
- *
- * The .res() method controls either width of the
- * bandpass, or resonance of the low/highpass cutoff frequency.
- *
- * This class extends p5.Effect.
- * Methods amp(), chain(),
- * drywet(), connect(), and
- * disconnect() are available.
- *
- * @class p5.Filter
- * @extends p5.Effect
- * @constructor
- * @param {String} [type] 'lowpass' (default), 'highpass', 'bandpass'
- * @example
- *
- * let fft, noise, filter;
- *
- * function setup() {
- * let cnv = createCanvas(100,100);
- * cnv.mousePressed(makeNoise);
- * fill(255, 0, 255);
- *
- * filter = new p5.BandPass();
- * noise = new p5.Noise();
- * noise.disconnect();
- * noise.connect(filter);
- *
- * fft = new p5.FFT();
- * }
- *
- * function draw() {
- * background(220);
- *
- * // set the BandPass frequency based on mouseX
- * let freq = map(mouseX, 0, width, 20, 10000);
- * freq = constrain(freq, 0, 22050);
- * filter.freq(freq);
- * // give the filter a narrow band (lower res = wider bandpass)
- * filter.res(50);
- *
- * // draw filtered spectrum
- * let spectrum = fft.analyze();
- * noStroke();
- * for (let i = 0; i < spectrum.length; i++) {
- * let x = map(i, 0, spectrum.length, 0, width);
- * let h = -height + map(spectrum[i], 0, 255, height, 0);
- * rect(x, height, width/spectrum.length, h);
- * }
- * if (!noise.started) {
- * text('tap here and drag to change frequency', 10, 20, width - 20);
- * } else {
- * text('Frequency: ' + round(freq)+'Hz', 20, 20, width - 20);
- * }
- * }
- *
- * function makeNoise() {
- * // see also: `userStartAudio()`
- * noise.start();
- * noise.amp(0.5, 0.2);
- * }
- *
- * function mouseReleased() {
- * noise.amp(0, 0.2);
- * }
- *
- *
- */
-
-
- p5.Filter = function (type) {
- Effect.call(this);
-
- /**
- * The p5.Filter is built with a
- *
- * Web Audio BiquadFilter Node.
- *
- * @property {DelayNode} biquadFilter
- */
-
- this.biquad = this.ac.createBiquadFilter();
- this.input.connect(this.biquad);
- this.biquad.connect(this.wet);
-
- if (type) {
- this.setType(type);
- }
-
-
- this._on = true;
- this._untoggledType = this.biquad.type;
- };
-
- p5.Filter.prototype = Object.create(Effect.prototype);
- /**
- * Filter an audio signal according to a set
- * of filter parameters.
- *
- * @method process
- * @param {Object} Signal An object that outputs audio
- * @param {Number} [freq] Frequency in Hz, from 10 to 22050
- * @param {Number} [res] Resonance/Width of the filter frequency
- * from 0.001 to 1000
- */
-
- p5.Filter.prototype.process = function (src, freq, res, time) {
- src.connect(this.input);
- this.set(freq, res, time);
- };
- /**
- * Set the frequency and the resonance of the filter.
- *
- * @method set
- * @param {Number} [freq] Frequency in Hz, from 10 to 22050
- * @param {Number} [res] Resonance (Q) from 0.001 to 1000
- * @param {Number} [timeFromNow] schedule this event to happen
- * seconds from now
- */
-
-
- p5.Filter.prototype.set = function (freq, res, time) {
- if (freq) {
- this.freq(freq, time);
- }
-
- if (res) {
- this.res(res, time);
- }
- };
- /**
- * Set the filter frequency, in Hz, from 10 to 22050 (the range of
- * human hearing, although in reality most people hear in a narrower
- * range).
- *
- * @method freq
- * @param {Number} freq Filter Frequency
- * @param {Number} [timeFromNow] schedule this event to happen
- * seconds from now
- * @return {Number} value Returns the current frequency value
- */
-
-
- p5.Filter.prototype.freq = function (freq, time) {
- var t = time || 0;
-
- if (freq <= 0) {
- freq = 1;
- }
-
- if (typeof freq === 'number') {
- this.biquad.frequency.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
- this.biquad.frequency.exponentialRampToValueAtTime(freq, this.ac.currentTime + 0.02 + t);
- } else if (freq) {
- freq.connect(this.biquad.frequency);
- }
-
- return this.biquad.frequency.value;
- };
- /**
- * Controls either width of a bandpass frequency,
- * or the resonance of a low/highpass cutoff frequency.
- *
- * @method res
- * @param {Number} res Resonance/Width of filter freq
- * from 0.001 to 1000
- * @param {Number} [timeFromNow] schedule this event to happen
- * seconds from now
- * @return {Number} value Returns the current res value
- */
-
-
- p5.Filter.prototype.res = function (res, time) {
- var t = time || 0;
-
- if (typeof res === 'number') {
- this.biquad.Q.value = res;
- this.biquad.Q.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
- this.biquad.Q.linearRampToValueAtTime(res, this.ac.currentTime + 0.02 + t);
- } else if (res) {
- res.connect(this.biquad.Q);
- }
-
- return this.biquad.Q.value;
- };
- /**
- * Controls the gain attribute of a Biquad Filter.
- * This is distinctly different from .amp() which is inherited from p5.Effect
- * .amp() controls the volume via the output gain node
- * p5.Filter.gain() controls the gain parameter of a Biquad Filter node.
- *
- * @method gain
- * @param {Number} gain
- * @return {Number} Returns the current or updated gain value
- */
-
-
- p5.Filter.prototype.gain = function (gain, time) {
- var t = time || 0;
-
- if (typeof gain === 'number') {
- this.biquad.gain.value = gain;
- this.biquad.gain.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
- this.biquad.gain.linearRampToValueAtTime(gain, this.ac.currentTime + 0.02 + t);
- } else if (gain) {
- gain.connect(this.biquad.gain);
- }
-
- return this.biquad.gain.value;
- };
- /**
- * Toggle function. Switches between the specified type and allpass
- *
- * @method toggle
- * @return {boolean} [Toggle value]
- */
-
-
- p5.Filter.prototype.toggle = function () {
- this._on = !this._on;
-
- if (this._on === true) {
- this.biquad.type = this._untoggledType;
- } else if (this._on === false) {
- this.biquad.type = 'allpass';
- }
-
- return this._on;
- };
- /**
- * Set the type of a p5.Filter. Possible types include:
- * "lowpass" (default), "highpass", "bandpass",
- * "lowshelf", "highshelf", "peaking", "notch",
- * "allpass".
- *
- * @method setType
- * @param {String} t
- */
-
-
- p5.Filter.prototype.setType = function (t) {
- this.biquad.type = t;
- this._untoggledType = this.biquad.type;
- };
-
- p5.Filter.prototype.dispose = function () {
- Effect.prototype.dispose.apply(this);
-
- if (this.biquad) {
- this.biquad.disconnect();
- delete this.biquad;
- }
- };
- /**
- * Constructor: new p5.LowPass() Filter.
- * This is the same as creating a p5.Filter and then calling
- * its method setType('lowpass').
- * See p5.Filter for methods.
- *
- * @class p5.LowPass
- * @constructor
- * @extends p5.Filter
- */
-
-
- p5.LowPass = function () {
- p5.Filter.call(this, 'lowpass');
- };
-
- p5.LowPass.prototype = Object.create(p5.Filter.prototype);
- /**
- * Constructor: new p5.HighPass() Filter.
- * This is the same as creating a p5.Filter and then calling
- * its method setType('highpass').
- * See p5.Filter for methods.
- *
- * @class p5.HighPass
- * @constructor
- * @extends p5.Filter
- */
-
- p5.HighPass = function () {
- p5.Filter.call(this, 'highpass');
- };
-
- p5.HighPass.prototype = Object.create(p5.Filter.prototype);
- /**
- * Constructor: new p5.BandPass() Filter.
- * This is the same as creating a p5.Filter and then calling
- * its method setType('bandpass').
- * See p5.Filter for methods.
- *
- * @class p5.BandPass
- * @constructor
- * @extends p5.Filter
- */
-
- p5.BandPass = function () {
- p5.Filter.call(this, 'bandpass');
- };
-
- p5.BandPass.prototype = Object.create(p5.Filter.prototype);
- return p5.Filter;
-}).call(exports, __webpack_require__, exports, module),
- __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
-
- }),
- (function(module, exports, __webpack_require__) {
-
-var __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;!(__WEBPACK_AMD_DEFINE_ARRAY__ = [__webpack_require__(0),__webpack_require__(7),__webpack_require__(25),__webpack_require__(2),__webpack_require__(9)], __WEBPACK_AMD_DEFINE_RESULT__ = (function(n){"use strict";return n.Subtract=function(t){this.createInsOuts(2,0),this._sum=this.input[0]=this.output=new n.Gain,this._neg=new n.Negate,this._param=this.input[1]=new n.Signal(t),this._param.chain(this._neg,this._sum)},n.extend(n.Subtract,n.Signal),n.Subtract.prototype.dispose=function(){return n.prototype.dispose.call(this),this._neg.dispose(),this._neg=null,this._sum.disconnect(),this._sum=null,this._param.dispose(),this._param=null,this},n.Subtract}).apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__),
- __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
-
- }),
- (function(module, exports, __webpack_require__) {
-
-"use strict";
-(function(global) {var __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;
-
-global.TONE_SILENCE_VERSION_LOGGING = true;
-!(__WEBPACK_AMD_DEFINE_ARRAY__ = [__webpack_require__(35), __webpack_require__(12), __webpack_require__(0)], __WEBPACK_AMD_DEFINE_RESULT__ = (function (StartAudioContext, Context, Tone) {
- var audiocontext = new window.AudioContext();
-
- Tone.context.dispose();
- Tone.setContext(audiocontext);
- /**
- *
Returns the Audio Context for this sketch. Useful for users
- * who would like to dig deeper into the Web Audio API
- * .
- *
- *
Some browsers require users to startAudioContext
- * with a user gesture, such as touchStarted in the example below.
It is not only a good practice to give users control over starting
- * audio. This policy is enforced by many web browsers, including iOS and
- * Google Chrome, which create the Web Audio API's
- * Audio Context
- * in a suspended state.
- *
- *
In these browser-specific policies, sound will not play until a user
- * interaction event (i.e. mousePressed()) explicitly resumes
- * the AudioContext, or starts an audio node. This can be accomplished by
- * calling start() on a p5.Oscillator,
- * play() on a p5.SoundFile, or simply
- * userStartAudio().
- *
- *
userStartAudio() starts the AudioContext on a user
- * gesture. The default behavior will enable audio on any
- * mouseUp or touchEnd event. It can also be placed in a specific
- * interaction function, such as mousePressed() as in the
- * example below. This method utilizes
- * StartAudioContext
- * , a library by Yotam Mann (MIT Licence, 2016).
- * @param {Element|Array} [element(s)] This argument can be an Element,
- * Selector String, NodeList, p5.Element,
- * jQuery Element, or an Array of any of those.
- * @param {Function} [callback] Callback to invoke when the AudioContext
- * has started
- * @return {Promise} Returns a Promise that resolves when
- * the AudioContext state is 'running'
- * @method userStartAudio
- * @for p5
- * @example
- *
- * function setup() {
- * // mimics the autoplay policy
- * getAudioContext().suspend();
- *
- * let mySynth = new p5.MonoSynth();
- *
- * // This won't play until the context has resumed
- * mySynth.play('A6');
- * }
- * function draw() {
- * background(220);
- * textAlign(CENTER, CENTER);
- * text(getAudioContext().state, width/2, height/2);
- * }
- * function mousePressed() {
- * userStartAudio();
- * }
- *
- */
-
-
- p5.prototype.userStartAudio = function (elements, callback) {
- var elt = elements;
-
- if (elements instanceof p5.Element) {
- elt = elements.elt;
- } else if (elements instanceof Array && elements[0] instanceof p5.Element) {
- elt = elements.map(function (e) {
- return e.elt;
- });
- }
-
- return StartAudioContext(audiocontext, elt, callback);
- };
-
- return audiocontext;
-}).apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__),
- __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
-}.call(this, __webpack_require__(34)))
-
- }),
- (function(module, exports, __webpack_require__) {
-
-var __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;!(__WEBPACK_AMD_DEFINE_ARRAY__ = [__webpack_require__(0)], __WEBPACK_AMD_DEFINE_RESULT__ = (function(o){"use strict";return o.Emitter=function(){this._events={}},o.extend(o.Emitter),o.Emitter.prototype.on=function(t,e){for(var i=t.split(/\W+/),r=0;rCreates a signal that oscillates between -1.0 and 1.0.
- * By default, the oscillation takes the form of a sinusoidal
- * shape ('sine'). Additional types include 'triangle',
- * 'sawtooth' and 'square'. The frequency defaults to
- * 440 oscillations per second (440Hz, equal to the pitch of an
- * 'A' note).
- *
- *
- * let osc, playing, freq, amp;
- *
- * function setup() {
- * let cnv = createCanvas(100, 100);
- * cnv.mousePressed(playOscillator);
- * osc = new p5.Oscillator('sine');
- * }
- *
- * function draw() {
- * background(220)
- * freq = constrain(map(mouseX, 0, width, 100, 500), 100, 500);
- * amp = constrain(map(mouseY, height, 0, 0, 1), 0, 1);
- *
- * text('tap to play', 20, 20);
- * text('freq: ' + freq, 20, 40);
- * text('amp: ' + amp, 20, 60);
- *
- * if (playing) {
- * // smooth the transitions by 0.1 seconds
- * osc.freq(freq, 0.1);
- * osc.amp(amp, 0.1);
- * }
- * }
- *
- * function playOscillator() {
- * // starting an oscillator on a user gesture will enable audio
- * // in browsers that have a strict autoplay policy.
- * // See also: userStartAudio();
- * osc.start();
- * playing = true;
- * }
- *
- * function mouseReleased() {
- * // ramp amplitude to 0 over 0.5 seconds
- * osc.amp(0, 0.5);
- * playing = false;
- * }
- *
- */
-
-
- p5.Oscillator = function (freq, type) {
- if (typeof freq === 'string') {
- var f = type;
- type = freq;
- freq = f;
- }
-
- if (typeof type === 'number') {
- var f = type;
- type = freq;
- freq = f;
- }
-
- this.started = false;
-
- this.phaseAmount = undefined;
- this.oscillator = p5sound.audiocontext.createOscillator();
- this.f = freq || 440.0;
-
- this.oscillator.type = type || 'sine';
- this.oscillator.frequency.setValueAtTime(this.f, p5sound.audiocontext.currentTime);
-
- this.output = p5sound.audiocontext.createGain();
- this._freqMods = [];
-
- this.output.gain.value = 0.5;
- this.output.gain.setValueAtTime(0.5, p5sound.audiocontext.currentTime);
- this.oscillator.connect(this.output);
-
- this.panPosition = 0.0;
- this.connection = p5sound.input;
-
- this.panner = new p5.Panner(this.output, this.connection, 1);
-
- this.mathOps = [this.output];
-
- p5sound.soundArray.push(this);
- };
- /**
- * Start an oscillator.
- *
- * Starting an oscillator on a user gesture will enable audio in browsers
- * that have a strict autoplay policy, including Chrome and most mobile
- * devices. See also: `userStartAudio()`.
- *
- * @method start
- * @for p5.Oscillator
- * @param {Number} [time] startTime in seconds from now.
- * @param {Number} [frequency] frequency in Hz.
- */
-
-
- p5.Oscillator.prototype.start = function (time, f) {
- if (this.started) {
- var now = p5sound.audiocontext.currentTime;
- this.stop(now);
- }
-
- if (!this.started) {
- var freq = f || this.f;
- var type = this.oscillator.type;
-
- if (this.oscillator) {
- this.oscillator.disconnect();
- delete this.oscillator;
- }
-
-
- this.oscillator = p5sound.audiocontext.createOscillator();
- this.oscillator.frequency.value = Math.abs(freq);
- this.oscillator.type = type;
-
- this.oscillator.connect(this.output);
- time = time || 0;
- this.oscillator.start(time + p5sound.audiocontext.currentTime);
- this.freqNode = this.oscillator.frequency;
-
- for (var i in this._freqMods) {
- if (typeof this._freqMods[i].connect !== 'undefined') {
- this._freqMods[i].connect(this.oscillator.frequency);
- }
- }
-
- this.started = true;
- }
- };
- /**
- * Stop an oscillator. Accepts an optional parameter
- * to determine how long (in seconds from now) until the
- * oscillator stops.
- *
- * @method stop
- * @for p5.Oscillator
- * @param {Number} secondsFromNow Time, in seconds from now.
- */
-
-
- p5.Oscillator.prototype.stop = function (time) {
- if (this.started) {
- var t = time || 0;
- var now = p5sound.audiocontext.currentTime;
- this.oscillator.stop(t + now);
- this.started = false;
- }
- };
- /**
- * Set the amplitude between 0 and 1.0. Or, pass in an object
- * such as an oscillator to modulate amplitude with an audio signal.
- *
- * @method amp
- * @for p5.Oscillator
- * @param {Number|Object} vol between 0 and 1.0
- * or a modulating signal/oscillator
- * @param {Number} [rampTime] create a fade that lasts rampTime
- * @param {Number} [timeFromNow] schedule this event to happen
- * seconds from now
- * @return {AudioParam} gain If no value is provided,
- * returns the Web Audio API
- * AudioParam that controls
- * this oscillator's
- * gain/amplitude/volume)
- */
-
-
- p5.Oscillator.prototype.amp = function (vol, rampTime, tFromNow) {
- var self = this;
-
- if (typeof vol === 'number') {
- var rampTime = rampTime || 0;
- var tFromNow = tFromNow || 0;
- var now = p5sound.audiocontext.currentTime;
- this.output.gain.linearRampToValueAtTime(vol, now + tFromNow + rampTime);
- } else if (vol) {
- vol.connect(self.output.gain);
- } else {
- return this.output.gain;
- }
- };
-
-
- p5.Oscillator.prototype.fade = p5.Oscillator.prototype.amp;
-
- p5.Oscillator.prototype.getAmp = function () {
- return this.output.gain.value;
- };
- /**
- * Set frequency of an oscillator to a value. Or, pass in an object
- * such as an oscillator to modulate the frequency with an audio signal.
- *
- * @method freq
- * @for p5.Oscillator
- * @param {Number|Object} Frequency Frequency in Hz
- * or modulating signal/oscillator
- * @param {Number} [rampTime] Ramp time (in seconds)
- * @param {Number} [timeFromNow] Schedule this event to happen
- * at x seconds from now
- * @return {AudioParam} Frequency If no value is provided,
- * returns the Web Audio API
- * AudioParam that controls
- * this oscillator's frequency
- * @example
- *
- * let osc;
- *
- * function setup() {
- * let cnv = createCanvas(100, 100);
- * cnv.mousePressed(playOscillator);
- * osc = new p5.Oscillator(300);
- * background(220);
- * text('tap to play', 20, 20);
- * }
- *
- * function playOscillator() {
- * osc.start();
- * osc.amp(0.5);
- * // start at 700Hz
- * osc.freq(700);
- * // ramp to 60Hz over 0.7 seconds
- * osc.freq(60, 0.7);
- * osc.amp(0, 0.1, 0.7);
- * }
- *
- */
-
-
- p5.Oscillator.prototype.freq = function (val, rampTime, tFromNow) {
- if (typeof val === 'number' && !isNaN(val)) {
- this.f = val;
- var now = p5sound.audiocontext.currentTime;
- var rampTime = rampTime || 0;
- var tFromNow = tFromNow || 0;
- var t = now + tFromNow + rampTime;
-
- if (rampTime === 0) {
- this.oscillator.frequency.setValueAtTime(val, tFromNow + now);
- } else {
- if (val > 0) {
- this.oscillator.frequency.exponentialRampToValueAtTime(val, tFromNow + rampTime + now);
- } else {
- this.oscillator.frequency.linearRampToValueAtTime(val, tFromNow + rampTime + now);
- }
- }
-
-
- if (this.phaseAmount) {
- this.phase(this.phaseAmount);
- }
- } else if (val) {
- if (val.output) {
- val = val.output;
- }
-
- val.connect(this.oscillator.frequency);
-
- this._freqMods.push(val);
- } else {
- return this.oscillator.frequency;
- }
- };
-
- p5.Oscillator.prototype.getFreq = function () {
- return this.oscillator.frequency.value;
- };
- /**
- * Set type to 'sine', 'triangle', 'sawtooth' or 'square'.
- *
- * @method setType
- * @for p5.Oscillator
- * @param {String} type 'sine', 'triangle', 'sawtooth' or 'square'.
- */
-
-
- p5.Oscillator.prototype.setType = function (type) {
- this.oscillator.type = type;
- };
-
- p5.Oscillator.prototype.getType = function () {
- return this.oscillator.type;
- };
- /**
- * Connect to a p5.sound / Web Audio object.
- *
- * @method connect
- * @for p5.Oscillator
- * @param {Object} unit A p5.sound or Web Audio object
- */
-
-
- p5.Oscillator.prototype.connect = function (unit) {
- if (!unit) {
- this.panner.connect(p5sound.input);
- } else if (unit.hasOwnProperty('input')) {
- this.panner.connect(unit.input);
- this.connection = unit.input;
- } else {
- this.panner.connect(unit);
- this.connection = unit;
- }
- };
- /**
- * Disconnect all outputs
- *
- * @method disconnect
- * @for p5.Oscillator
- */
-
-
- p5.Oscillator.prototype.disconnect = function () {
- if (this.output) {
- this.output.disconnect();
- }
-
- if (this.panner) {
- this.panner.disconnect();
-
- if (this.output) {
- this.output.connect(this.panner);
- }
- }
-
- this.oscMods = [];
- };
- /**
- * Pan between Left (-1) and Right (1)
- *
- * @method pan
- * @for p5.Oscillator
- * @param {Number} panning Number between -1 and 1
- * @param {Number} timeFromNow schedule this event to happen
- * seconds from now
- */
-
-
- p5.Oscillator.prototype.pan = function (pval, tFromNow) {
- this.panPosition = pval;
- this.panner.pan(pval, tFromNow);
- };
-
- p5.Oscillator.prototype.getPan = function () {
- return this.panPosition;
- };
-
-
- p5.Oscillator.prototype.dispose = function () {
- var index = p5sound.soundArray.indexOf(this);
- p5sound.soundArray.splice(index, 1);
-
- if (this.oscillator) {
- var now = p5sound.audiocontext.currentTime;
- this.stop(now);
- this.disconnect();
- this.panner = null;
- this.oscillator = null;
- }
-
-
- if (this.osc2) {
- this.osc2.dispose();
- }
- };
- /**
- * Set the phase of an oscillator between 0.0 and 1.0.
- * In this implementation, phase is a delay time
- * based on the oscillator's current frequency.
- *
- * @method phase
- * @for p5.Oscillator
- * @param {Number} phase float between 0.0 and 1.0
- */
-
-
- p5.Oscillator.prototype.phase = function (p) {
- var delayAmt = p5.prototype.map(p, 0, 1.0, 0, 1 / this.f);
- var now = p5sound.audiocontext.currentTime;
- this.phaseAmount = p;
-
- if (!this.dNode) {
- this.dNode = p5sound.audiocontext.createDelay();
-
- this.oscillator.disconnect();
- this.oscillator.connect(this.dNode);
- this.dNode.connect(this.output);
- }
-
-
- this.dNode.delayTime.setValueAtTime(delayAmt, now);
- };
-
-
- var sigChain = function sigChain(o, mathObj, thisChain, nextChain, type) {
- var chainSource = o.oscillator;
-
- for (var i in o.mathOps) {
- if (o.mathOps[i] instanceof type) {
- chainSource.disconnect();
- o.mathOps[i].dispose();
- thisChain = i;
-
- if (thisChain < o.mathOps.length - 2) {
- nextChain = o.mathOps[i + 1];
- }
- }
- }
-
- if (thisChain === o.mathOps.length - 1) {
- o.mathOps.push(nextChain);
- }
-
-
- if (i > 0) {
- chainSource = o.mathOps[i - 1];
- }
-
- chainSource.disconnect();
- chainSource.connect(mathObj);
- mathObj.connect(nextChain);
- o.mathOps[thisChain] = mathObj;
- return o;
- };
- /**
- * Add a value to the p5.Oscillator's output amplitude,
- * and return the oscillator. Calling this method again
- * will override the initial add() with a new value.
- *
- * @method add
- * @for p5.Oscillator
- * @param {Number} number Constant number to add
- * @return {p5.Oscillator} Oscillator Returns this oscillator
- * with scaled output
- *
- */
-
-
- p5.Oscillator.prototype.add = function (num) {
- var add = new Add(num);
- var thisChain = this.mathOps.length - 1;
- var nextChain = this.output;
- return sigChain(this, add, thisChain, nextChain, Add);
- };
- /**
- * Multiply the p5.Oscillator's output amplitude
- * by a fixed value (i.e. turn it up!). Calling this method
- * again will override the initial mult() with a new value.
- *
- * @method mult
- * @for p5.Oscillator
- * @param {Number} number Constant number to multiply
- * @return {p5.Oscillator} Oscillator Returns this oscillator
- * with multiplied output
- */
-
-
- p5.Oscillator.prototype.mult = function (num) {
- var mult = new Mult(num);
- var thisChain = this.mathOps.length - 1;
- var nextChain = this.output;
- return sigChain(this, mult, thisChain, nextChain, Mult);
- };
- /**
- * Scale this oscillator's amplitude values to a given
- * range, and return the oscillator. Calling this method
- * again will override the initial scale() with new values.
- *
- * @method scale
- * @for p5.Oscillator
- * @param {Number} inMin input range minumum
- * @param {Number} inMax input range maximum
- * @param {Number} outMin input range minumum
- * @param {Number} outMax input range maximum
- * @return {p5.Oscillator} Oscillator Returns this oscillator
- * with scaled output
- */
-
-
- p5.Oscillator.prototype.scale = function (inMin, inMax, outMin, outMax) {
- var mapOutMin, mapOutMax;
-
- if (arguments.length === 4) {
- mapOutMin = p5.prototype.map(outMin, inMin, inMax, 0, 1) - 0.5;
- mapOutMax = p5.prototype.map(outMax, inMin, inMax, 0, 1) - 0.5;
- } else {
- mapOutMin = arguments[0];
- mapOutMax = arguments[1];
- }
-
- var scale = new Scale(mapOutMin, mapOutMax);
- var thisChain = this.mathOps.length - 1;
- var nextChain = this.output;
- return sigChain(this, scale, thisChain, nextChain, Scale);
- };
-
- /**
- * Constructor: new p5.SinOsc().
- * This creates a Sine Wave Oscillator and is
- * equivalent to new p5.Oscillator('sine')
- * or creating a p5.Oscillator and then calling
- * its method setType('sine').
- * See p5.Oscillator for methods.
- *
- * @class p5.SinOsc
- * @constructor
- * @extends p5.Oscillator
- * @param {Number} [freq] Set the frequency
- */
-
-
- p5.SinOsc = function (freq) {
- p5.Oscillator.call(this, freq, 'sine');
- };
-
- p5.SinOsc.prototype = Object.create(p5.Oscillator.prototype);
- /**
- * Constructor: new p5.TriOsc().
- * This creates a Triangle Wave Oscillator and is
- * equivalent to new p5.Oscillator('triangle')
- * or creating a p5.Oscillator and then calling
- * its method setType('triangle').
- * See p5.Oscillator for methods.
- *
- * @class p5.TriOsc
- * @constructor
- * @extends p5.Oscillator
- * @param {Number} [freq] Set the frequency
- */
-
- p5.TriOsc = function (freq) {
- p5.Oscillator.call(this, freq, 'triangle');
- };
-
- p5.TriOsc.prototype = Object.create(p5.Oscillator.prototype);
- /**
- * Constructor: new p5.SawOsc().
- * This creates a SawTooth Wave Oscillator and is
- * equivalent to new p5.Oscillator('sawtooth')
- * or creating a p5.Oscillator and then calling
- * its method setType('sawtooth').
- * See p5.Oscillator for methods.
- *
- * @class p5.SawOsc
- * @constructor
- * @extends p5.Oscillator
- * @param {Number} [freq] Set the frequency
- */
-
- p5.SawOsc = function (freq) {
- p5.Oscillator.call(this, freq, 'sawtooth');
- };
-
- p5.SawOsc.prototype = Object.create(p5.Oscillator.prototype);
- /**
- * Constructor: new p5.SqrOsc().
- * This creates a Square Wave Oscillator and is
- * equivalent to new p5.Oscillator('square')
- * or creating a p5.Oscillator and then calling
- * its method setType('square').
- * See p5.Oscillator for methods.
- *
- * @class p5.SqrOsc
- * @constructor
- * @extends p5.Oscillator
- * @param {Number} [freq] Set the frequency
- */
-
- p5.SqrOsc = function (freq) {
- p5.Oscillator.call(this, freq, 'square');
- };
-
- p5.SqrOsc.prototype = Object.create(p5.Oscillator.prototype);
-}).call(exports, __webpack_require__, exports, module),
- __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
-
- }),
- (function(module, exports, __webpack_require__) {
-
-var __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;!(__WEBPACK_AMD_DEFINE_ARRAY__ = [__webpack_require__(0),__webpack_require__(8)], __WEBPACK_AMD_DEFINE_RESULT__ = (function(i){"use strict";return i.Timeline=function(){var e=this.optionsObject(arguments,["memory"],i.Timeline.defaults);this._timeline=[],this._toRemove=[],this._iterating=!1,this.memory=e.memory},i.extend(i.Timeline),i.Timeline.defaults={memory:1/0},Object.defineProperty(i.Timeline.prototype,"length",{get:function(){return this._timeline.length}}),i.Timeline.prototype.add=function(e){if(this.isUndef(e.time))throw new Error("Tone.Timeline: events must have a time attribute");if(this._timeline.length){var i=this._search(e.time);this._timeline.splice(i+1,0,e)}else this._timeline.push(e);if(this.length>this.memory){var t=this.length-this.memory;this._timeline.splice(0,t)}return this},i.Timeline.prototype.remove=function(e){if(this._iterating)this._toRemove.push(e);else{var i=this._timeline.indexOf(e);-1!==i&&this._timeline.splice(i,1)}return this},i.Timeline.prototype.get=function(e){var i=this._search(e);return-1!==i?this._timeline[i]:null},i.Timeline.prototype.peek=function(){return this._timeline[0]},i.Timeline.prototype.shift=function(){return this._timeline.shift()},i.Timeline.prototype.getAfter=function(e){var i=this._search(e);return i+1=e&&(this._timeline=[]);return this},i.Timeline.prototype.cancelBefore=function(e){if(this._timeline.length){var i=this._search(e);0<=i&&(this._timeline=this._timeline.slice(i+1))}return this},i.Timeline.prototype._search=function(e){var i=0,t=this._timeline.length,n=t;if(0e)return r;s.time>e?n=r:s.time=e;)t--;return this._iterate(i,t+1),this},i.Timeline.prototype.forEachAtTime=function(i,t){var e=this._search(i);return-1!==e&&this._iterate(function(e){e.time===i&&t(e)},0,e),this},i.Timeline.prototype.dispose=function(){i.prototype.dispose.call(this),this._timeline=null,this._toRemove=null},i.Timeline}).apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__),
- __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
-
- }),
- (function(module, exports, __webpack_require__) {
-
-var __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;!(__WEBPACK_AMD_DEFINE_ARRAY__ = [__webpack_require__(0),__webpack_require__(3),__webpack_require__(2)], __WEBPACK_AMD_DEFINE_RESULT__ = (function(t){"use strict";return t.Negate=function(){this._multiply=this.input=this.output=new t.Multiply(-1)},t.extend(t.Negate,t.SignalBase),t.Negate.prototype.dispose=function(){return t.prototype.dispose.call(this),this._multiply.dispose(),this._multiply=null,this},t.Negate}).apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__),
- __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
-
- }),
- (function(module, exports, __webpack_require__) {
-
-var __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;!(__WEBPACK_AMD_DEFINE_ARRAY__ = [__webpack_require__(0),__webpack_require__(2),__webpack_require__(3),__webpack_require__(6)], __WEBPACK_AMD_DEFINE_RESULT__ = (function(e){"use strict";return e.GreaterThanZero=function(){this._thresh=this.output=new e.WaveShaper(function(e){return e<=0?0:1},127),this._scale=this.input=new e.Multiply(1e4),this._scale.connect(this._thresh)},e.extend(e.GreaterThanZero,e.SignalBase),e.GreaterThanZero.prototype.dispose=function(){return e.prototype.dispose.call(this),this._scale.dispose(),this._scale=null,this._thresh.dispose(),this._thresh=null,this},e.GreaterThanZero}).apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__),
- __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
-
- }),
- (function(module, exports, __webpack_require__) {
-
-var __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;!(__WEBPACK_AMD_DEFINE_ARRAY__ = [__webpack_require__(0),__webpack_require__(14),__webpack_require__(66),__webpack_require__(18),__webpack_require__(12)], __WEBPACK_AMD_DEFINE_RESULT__ = (function(o){"use strict";return o.Clock=function(){o.Emitter.call(this);var t=this.optionsObject(arguments,["callback","frequency"],o.Clock.defaults);this.callback=t.callback,this._nextTick=0,this._lastState=o.State.Stopped,this.frequency=new o.TimelineSignal(t.frequency,o.Type.Frequency),this._readOnly("frequency"),this.ticks=0,this._state=new o.TimelineState(o.State.Stopped),this._boundLoop=this._loop.bind(this),this.context.on("tick",this._boundLoop)},o.extend(o.Clock,o.Emitter),o.Clock.defaults={callback:o.noOp,frequency:1,lookAhead:"auto"},Object.defineProperty(o.Clock.prototype,"state",{get:function(){return this._state.getValueAtTime(this.now())}}),o.Clock.prototype.start=function(t,e){return t=this.toSeconds(t),this._state.getValueAtTime(t)!==o.State.Started&&this._state.add({state:o.State.Started,time:t,offset:e}),this},o.Clock.prototype.stop=function(t){return t=this.toSeconds(t),this._state.cancel(t),this._state.setStateAtTime(o.State.Stopped,t),this},o.Clock.prototype.pause=function(t){return t=this.toSeconds(t),this._state.getValueAtTime(t)===o.State.Started&&this._state.setStateAtTime(o.State.Paused,t),this},o.Clock.prototype._loop=function(){for(var t=this.now()+this.context.lookAhead+this.context.updateInterval+2*this.context.lag;t>this._nextTick&&this._state;){var e=this._state.getValueAtTime(this._nextTick);if(e!==this._lastState){this._lastState=e;var i=this._state.get(this._nextTick);e===o.State.Started?(this._nextTick=i.time,this.isUndef(i.offset)||(this.ticks=i.offset),this.emit("start",i.time,this.ticks)):e===o.State.Stopped?(this.ticks=0,this.emit("stop",i.time)):e===o.State.Paused&&this.emit("pause",i.time)}var s=this._nextTick;this.frequency&&(this._nextTick+=1/this.frequency.getValueAtTime(this._nextTick),e===o.State.Started&&(this.callback(s),this.ticks++))}},o.Clock.prototype.getStateAtTime=function(t){return t=this.toSeconds(t),this._state.getValueAtTime(t)},o.Clock.prototype.dispose=function(){o.Emitter.prototype.dispose.call(this),this.context.off("tick",this._boundLoop),this._writable("frequency"),this.frequency.dispose(),this.frequency=null,this._boundLoop=null,this._nextTick=1/0,this.callback=null,this._state.dispose(),this._state=null},o.Clock}).apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__),
- __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
-
- }),
- (function(module, exports, __webpack_require__) {
-
-"use strict";
-var __WEBPACK_AMD_DEFINE_RESULT__;
-
-!(__WEBPACK_AMD_DEFINE_RESULT__ = (function (require) {
- var p5sound = __webpack_require__(1);
-
- var AudioVoice = __webpack_require__(29);
-
- var noteToFreq = __webpack_require__(5).noteToFreq;
-
- var DEFAULT_SUSTAIN = 0.15;
- /**
- * A MonoSynth is used as a single voice for sound synthesis.
- * This is a class to be used in conjunction with the PolySynth
- * class. Custom synthetisers should be built inheriting from
- * this class.
- *
- * @class p5.MonoSynth
- * @constructor
- * @example
- *
- * let monoSynth;
- *
- * function setup() {
- * let cnv = createCanvas(100, 100);
- * cnv.mousePressed(playSynth);
- * background(220);
- * textAlign(CENTER);
- * text('tap to play', width/2, height/2);
- *
- * monoSynth = new p5.MonoSynth();
- * }
- *
- * function playSynth() {
- * userStartAudio();
- *
- * let note = random(['Fb4', 'G4']);
- * // note velocity (volume, from 0 to 1)
- * let velocity = random();
- * // time from now (in seconds)
- * let time = 0;
- * // note duration (in seconds)
- * let dur = 1/6;
- *
- * monoSynth.play(note, velocity, time, dur);
- * }
- *
- **/
-
- p5.MonoSynth = function () {
- AudioVoice.call(this);
- this.oscillator = new p5.Oscillator();
- this.env = new p5.Envelope();
- this.env.setRange(1, 0);
- this.env.setExp(true);
-
- this.setADSR(0.02, 0.25, 0.05, 0.35);
-
- this.oscillator.disconnect();
- this.oscillator.connect(this.output);
- this.env.disconnect();
- this.env.setInput(this.output.gain);
-
- this.oscillator.output.gain.value = 1.0;
- this.oscillator.start();
- this.connect();
- p5sound.soundArray.push(this);
- };
-
- p5.MonoSynth.prototype = Object.create(p5.AudioVoice.prototype);
- /**
- * Play tells the MonoSynth to start playing a note. This method schedules
- * the calling of .triggerAttack and .triggerRelease.
- *
- * @method play
- * @for p5.MonoSynth
- * @param {String | Number} note the note you want to play, specified as a
- * frequency in Hertz (Number) or as a midi
- * value in Note/Octave format ("C4", "Eb3"...etc")
- * See
- * Tone. Defaults to 440 hz.
- * @param {Number} [velocity] velocity of the note to play (ranging from 0 to 1)
- * @param {Number} [secondsFromNow] time from now (in seconds) at which to play
- * @param {Number} [sustainTime] time to sustain before releasing the envelope. Defaults to 0.15 seconds.
- * @example
- *
- * let monoSynth;
- *
- * function setup() {
- * let cnv = createCanvas(100, 100);
- * cnv.mousePressed(playSynth);
- * background(220);
- * textAlign(CENTER);
- * text('tap to play', width/2, height/2);
- *
- * monoSynth = new p5.MonoSynth();
- * }
- *
- * function playSynth() {
- * userStartAudio();
- *
- * let note = random(['Fb4', 'G4']);
- * // note velocity (volume, from 0 to 1)
- * let velocity = random();
- * // time from now (in seconds)
- * let time = 0;
- * // note duration (in seconds)
- * let dur = 1/6;
- *
- * monoSynth.play(note, velocity, time, dur);
- * }
- *
- *
- */
-
- p5.MonoSynth.prototype.play = function (note, velocity, secondsFromNow, susTime) {
- this.triggerAttack(note, velocity, ~~secondsFromNow);
- this.triggerRelease(~~secondsFromNow + (susTime || DEFAULT_SUSTAIN));
- };
- /**
- * Trigger the Attack, and Decay portion of the Envelope.
- * Similar to holding down a key on a piano, but it will
- * hold the sustain level until you let go.
- *
- * @param {String | Number} note the note you want to play, specified as a
- * frequency in Hertz (Number) or as a midi
- * value in Note/Octave format ("C4", "Eb3"...etc")
- * See
- * Tone. Defaults to 440 hz
- * @param {Number} [velocity] velocity of the note to play (ranging from 0 to 1)
- * @param {Number} [secondsFromNow] time from now (in seconds) at which to play
- * @method triggerAttack
- * @for p5.MonoSynth
- * @example
- *
- * let monoSynth;
- *
- * function setup() {
- * let cnv = createCanvas(100, 100);
- * cnv.mousePressed(triggerAttack);
- * background(220);
- * text('tap here for attack, let go to release', 5, 20, width - 20);
- * monoSynth = new p5.MonoSynth();
- * }
- *
- * function triggerAttack() {
- * userStartAudio();
- *
- * monoSynth.triggerAttack("E3");
- * }
- *
- * function mouseReleased() {
- * monoSynth.triggerRelease();
- * }
- *
- */
-
-
- p5.MonoSynth.prototype.triggerAttack = function (note, velocity, secondsFromNow) {
- var secondsFromNow = ~~secondsFromNow;
- var freq = noteToFreq(note);
- var vel = velocity || 0.1;
- this.oscillator.freq(freq, 0, secondsFromNow);
- this.env.ramp(this.output.gain, secondsFromNow, vel);
- };
- /**
- * Trigger the release of the Envelope. This is similar to releasing
- * the key on a piano and letting the sound fade according to the
- * release level and release time.
- *
- * @param {Number} secondsFromNow time to trigger the release
- * @method triggerRelease
- * @for p5.MonoSynth
- * @example
- *
- * let monoSynth;
- *
- * function setup() {
- * let cnv = createCanvas(100, 100);
- * cnv.mousePressed(triggerAttack);
- * background(220);
- * text('tap here for attack, let go to release', 5, 20, width - 20);
- * monoSynth = new p5.MonoSynth();
- * }
- *
- * function triggerAttack() {
- * userStartAudio();
- *
- * monoSynth.triggerAttack("E3");
- * }
- *
- * function mouseReleased() {
- * monoSynth.triggerRelease();
- * }
- *
- */
-
-
- p5.MonoSynth.prototype.triggerRelease = function (secondsFromNow) {
- var secondsFromNow = secondsFromNow || 0;
- this.env.ramp(this.output.gain, secondsFromNow, 0);
- };
- /**
- * Set values like a traditional
- *
- * ADSR envelope
- * .
- *
- * @method setADSR
- * @for p5.MonoSynth
- * @param {Number} attackTime Time (in seconds before envelope
- * reaches Attack Level
- * @param {Number} [decayTime] Time (in seconds) before envelope
- * reaches Decay/Sustain Level
- * @param {Number} [susRatio] Ratio between attackLevel and releaseLevel, on a scale from 0 to 1,
- * where 1.0 = attackLevel, 0.0 = releaseLevel.
- * The susRatio determines the decayLevel and the level at which the
- * sustain portion of the envelope will sustain.
- * For example, if attackLevel is 0.4, releaseLevel is 0,
- * and susAmt is 0.5, the decayLevel would be 0.2. If attackLevel is
- * increased to 1.0 (using setRange),
- * then decayLevel would increase proportionally, to become 0.5.
- * @param {Number} [releaseTime] Time in seconds from now (defaults to 0)
- */
-
-
- p5.MonoSynth.prototype.setADSR = function (attack, decay, sustain, release) {
- this.env.setADSR(attack, decay, sustain, release);
- };
- /**
- * Getters and Setters
- * @property {Number} attack
- * @for p5.MonoSynth
- */
-
- /**
- * @property {Number} decay
- * @for p5.MonoSynth
- */
-
- /**
- * @property {Number} sustain
- * @for p5.MonoSynth
- */
-
- /**
- * @property {Number} release
- * @for p5.MonoSynth
- */
-
-
- Object.defineProperties(p5.MonoSynth.prototype, {
- 'attack': {
- get: function get() {
- return this.env.aTime;
- },
- set: function set(attack) {
- this.env.setADSR(attack, this.env.dTime, this.env.sPercent, this.env.rTime);
- }
- },
- 'decay': {
- get: function get() {
- return this.env.dTime;
- },
- set: function set(decay) {
- this.env.setADSR(this.env.aTime, decay, this.env.sPercent, this.env.rTime);
- }
- },
- 'sustain': {
- get: function get() {
- return this.env.sPercent;
- },
- set: function set(sustain) {
- this.env.setADSR(this.env.aTime, this.env.dTime, sustain, this.env.rTime);
- }
- },
- 'release': {
- get: function get() {
- return this.env.rTime;
- },
- set: function set(release) {
- this.env.setADSR(this.env.aTime, this.env.dTime, this.env.sPercent, release);
- }
- }
- });
- /**
- * MonoSynth amp
- * @method amp
- * @for p5.MonoSynth
- * @param {Number} vol desired volume
- * @param {Number} [rampTime] Time to reach new volume
- * @return {Number} new volume value
- */
-
- p5.MonoSynth.prototype.amp = function (vol, rampTime) {
- var t = rampTime || 0;
-
- if (typeof vol !== 'undefined') {
- this.oscillator.amp(vol, t);
- }
-
- return this.oscillator.amp().value;
- };
- /**
- * Connect to a p5.sound / Web Audio object.
- *
- * @method connect
- * @for p5.MonoSynth
- * @param {Object} unit A p5.sound or Web Audio object
- */
-
-
- p5.MonoSynth.prototype.connect = function (unit) {
- var u = unit || p5sound.input;
- this.output.connect(u.input ? u.input : u);
- };
- /**
- * Disconnect all outputs
- *
- * @method disconnect
- * @for p5.MonoSynth
- */
-
-
- p5.MonoSynth.prototype.disconnect = function () {
- if (this.output) {
- this.output.disconnect();
- }
- };
- /**
- * Get rid of the MonoSynth and free up its resources / memory.
- *
- * @method dispose
- * @for p5.MonoSynth
- */
-
-
- p5.MonoSynth.prototype.dispose = function () {
- AudioVoice.prototype.dispose.apply(this);
-
- if (this.env) {
- this.env.dispose();
- }
-
- if (this.oscillator) {
- this.oscillator.dispose();
- }
- };
-}).call(exports, __webpack_require__, exports, module),
- __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
-
- }),
- (function(module, exports, __webpack_require__) {
-
-"use strict";
-var __WEBPACK_AMD_DEFINE_RESULT__;
-
-!(__WEBPACK_AMD_DEFINE_RESULT__ = (function () {
- var p5sound = __webpack_require__(1);
- /**
- * Base class for monophonic synthesizers. Any extensions of this class
- * should follow the API and implement the methods below in order to
- * remain compatible with p5.PolySynth();
- *
- * @class p5.AudioVoice
- * @constructor
- */
-
-
- p5.AudioVoice = function () {
- this.ac = p5sound.audiocontext;
- this.output = this.ac.createGain();
- this.connect();
- p5sound.soundArray.push(this);
- };
-
- p5.AudioVoice.prototype.play = function (note, velocity, secondsFromNow, sustime) {};
-
- p5.AudioVoice.prototype.triggerAttack = function (note, velocity, secondsFromNow) {};
-
- p5.AudioVoice.prototype.triggerRelease = function (secondsFromNow) {};
-
- p5.AudioVoice.prototype.amp = function (vol, rampTime) {};
- /**
- * Connect to p5 objects or Web Audio Nodes
- * @method connect
- * @for p5.AudioVoice
- * @param {Object} unit
- */
-
-
- p5.AudioVoice.prototype.connect = function (unit) {
- var u = unit || p5sound.input;
- this.output.connect(u.input ? u.input : u);
- };
- /**
- * Disconnect from soundOut
- * @method disconnect
- * @for p5.AudioVoice
- */
-
-
- p5.AudioVoice.prototype.disconnect = function () {
- this.output.disconnect();
- };
-
- p5.AudioVoice.prototype.dispose = function () {
- if (this.output) {
- this.output.disconnect();
- delete this.output;
- }
- };
-
- return p5.AudioVoice;
-}).call(exports, __webpack_require__, exports, module),
- __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
-
- }),
- (function(module, exports, __webpack_require__) {
-
-"use strict";
-var __WEBPACK_AMD_DEFINE_RESULT__;
-
-!(__WEBPACK_AMD_DEFINE_RESULT__ = (function (require) {
- var p5sound = __webpack_require__(1);
-
- var TimelineSignal = __webpack_require__(14);
-
- var noteToFreq = __webpack_require__(5).noteToFreq;
- /**
- * An AudioVoice is used as a single voice for sound synthesis.
- * The PolySynth class holds an array of AudioVoice, and deals
- * with voices allocations, with setting notes to be played, and
- * parameters to be set.
- *
- * @class p5.PolySynth
- * @constructor
- *
- * @param {Number} [synthVoice] A monophonic synth voice inheriting
- * the AudioVoice class. Defaults to p5.MonoSynth
- * @param {Number} [maxVoices] Number of voices, defaults to 8;
- * @example
- *
- * let polySynth;
- *
- * function setup() {
- * let cnv = createCanvas(100, 100);
- * cnv.mousePressed(playSynth);
- * background(220);
- * text('click to play', 20, 20);
- *
- * polySynth = new p5.PolySynth();
- * }
- *
- * function playSynth() {
- * userStartAudio();
- *
- * // note duration (in seconds)
- * let dur = 1.5;
- *
- * // time from now (in seconds)
- * let time = 0;
- *
- * // velocity (volume, from 0 to 1)
- * let vel = 0.1;
- *
- * // notes can overlap with each other
- * polySynth.play('G2', vel, 0, dur);
- * polySynth.play('C3', vel, time += 1/3, dur);
- * polySynth.play('G3', vel, time += 1/3, dur);
- * }
- *
- **/
-
-
- p5.PolySynth = function (audioVoice, maxVoices) {
- this.audiovoices = [];
- /**
- * An object that holds information about which notes have been played and
- * which notes are currently being played. New notes are added as keys
- * on the fly. While a note has been attacked, but not released, the value of the
- * key is the audiovoice which is generating that note. When notes are released,
- * the value of the key becomes undefined.
- * @property notes
- */
-
- this.notes = {};
-
- this._newest = 0;
- this._oldest = 0;
- /**
- * A PolySynth must have at least 1 voice, defaults to 8
- * @property polyvalue
- */
-
- this.maxVoices = maxVoices || 8;
- /**
- * Monosynth that generates the sound for each note that is triggered. The
- * p5.PolySynth defaults to using the p5.MonoSynth as its voice.
- * @property AudioVoice
- */
-
- this.AudioVoice = audioVoice === undefined ? p5.MonoSynth : audioVoice;
- /**
- * This value must only change as a note is attacked or released. Due to delay
- * and sustain times, Tone.TimelineSignal is required to schedule the change in value.
- * @private
- * @property {Tone.TimelineSignal} _voicesInUse
- */
-
- this._voicesInUse = new TimelineSignal(0);
- this.output = p5sound.audiocontext.createGain();
- this.connect();
-
- this._allocateVoices();
-
- p5sound.soundArray.push(this);
- };
- /**
- * Construct the appropriate number of audiovoices
- * @private
- * @for p5.PolySynth
- * @method _allocateVoices
- */
-
-
- p5.PolySynth.prototype._allocateVoices = function () {
- for (var i = 0; i < this.maxVoices; i++) {
- this.audiovoices.push(new this.AudioVoice());
- this.audiovoices[i].disconnect();
- this.audiovoices[i].connect(this.output);
- }
- };
- /**
- * Play a note by triggering noteAttack and noteRelease with sustain time
- *
- * @method play
- * @for p5.PolySynth
- * @param {Number} [note] midi note to play (ranging from 0 to 127 - 60 being a middle C)
- * @param {Number} [velocity] velocity of the note to play (ranging from 0 to 1)
- * @param {Number} [secondsFromNow] time from now (in seconds) at which to play
- * @param {Number} [sustainTime] time to sustain before releasing the envelope
- * @example
- *
- * let polySynth;
- *
- * function setup() {
- * let cnv = createCanvas(100, 100);
- * cnv.mousePressed(playSynth);
- * background(220);
- * text('click to play', 20, 20);
- *
- * polySynth = new p5.PolySynth();
- * }
- *
- * function playSynth() {
- * userStartAudio();
- *
- * // note duration (in seconds)
- * let dur = 1.5;
- *
- * // time from now (in seconds)
- * let time = 0;
- *
- * // velocity (volume, from 0 to 1)
- * let vel = 0.1;
- *
- * // notes can overlap with each other
- * polySynth.play('G2', vel, 0, dur);
- * polySynth.play('C3', vel, time += 1/3, dur);
- * polySynth.play('G3', vel, time += 1/3, dur);
- * }
- *
- */
-
-
- p5.PolySynth.prototype.play = function (note, velocity, secondsFromNow, susTime) {
- var susTime = susTime || 1;
- this.noteAttack(note, velocity, secondsFromNow);
- this.noteRelease(note, secondsFromNow + susTime);
- };
- /**
- * noteADSR sets the envelope for a specific note that has just been triggered.
- * Using this method modifies the envelope of whichever audiovoice is being used
- * to play the desired note. The envelope should be reset before noteRelease is called
- * in order to prevent the modified envelope from being used on other notes.
- *
- * @method noteADSR
- * @for p5.PolySynth
- * @param {Number} [note] Midi note on which ADSR should be set.
- * @param {Number} [attackTime] Time (in seconds before envelope
- * reaches Attack Level
- * @param {Number} [decayTime] Time (in seconds) before envelope
- * reaches Decay/Sustain Level
- * @param {Number} [susRatio] Ratio between attackLevel and releaseLevel, on a scale from 0 to 1,
- * where 1.0 = attackLevel, 0.0 = releaseLevel.
- * The susRatio determines the decayLevel and the level at which the
- * sustain portion of the envelope will sustain.
- * For example, if attackLevel is 0.4, releaseLevel is 0,
- * and susAmt is 0.5, the decayLevel would be 0.2. If attackLevel is
- * increased to 1.0 (using setRange),
- * then decayLevel would increase proportionally, to become 0.5.
- * @param {Number} [releaseTime] Time in seconds from now (defaults to 0)
- **/
-
-
- p5.PolySynth.prototype.noteADSR = function (note, a, d, s, r, timeFromNow) {
- var now = p5sound.audiocontext.currentTime;
- var timeFromNow = timeFromNow || 0;
- var t = now + timeFromNow;
- this.audiovoices[this.notes[note].getValueAtTime(t)].setADSR(a, d, s, r);
- };
- /**
- * Set the PolySynths global envelope. This method modifies the envelopes of each
- * monosynth so that all notes are played with this envelope.
- *
- * @method setADSR
- * @for p5.PolySynth
- * @param {Number} [attackTime] Time (in seconds before envelope
- * reaches Attack Level
- * @param {Number} [decayTime] Time (in seconds) before envelope
- * reaches Decay/Sustain Level
- * @param {Number} [susRatio] Ratio between attackLevel and releaseLevel, on a scale from 0 to 1,
- * where 1.0 = attackLevel, 0.0 = releaseLevel.
- * The susRatio determines the decayLevel and the level at which the
- * sustain portion of the envelope will sustain.
- * For example, if attackLevel is 0.4, releaseLevel is 0,
- * and susAmt is 0.5, the decayLevel would be 0.2. If attackLevel is
- * increased to 1.0 (using setRange),
- * then decayLevel would increase proportionally, to become 0.5.
- * @param {Number} [releaseTime] Time in seconds from now (defaults to 0)
- **/
-
-
- p5.PolySynth.prototype.setADSR = function (a, d, s, r) {
- this.audiovoices.forEach(function (voice) {
- voice.setADSR(a, d, s, r);
- });
- };
- /**
- * Trigger the Attack, and Decay portion of a MonoSynth.
- * Similar to holding down a key on a piano, but it will
- * hold the sustain level until you let go.
- *
- * @method noteAttack
- * @for p5.PolySynth
- * @param {Number} [note] midi note on which attack should be triggered.
- * @param {Number} [velocity] velocity of the note to play (ranging from 0 to 1)/
- * @param {Number} [secondsFromNow] time from now (in seconds)
- * @example
- *
- * let polySynth = new p5.PolySynth();
- * let pitches = ['G', 'D', 'G', 'C'];
- * let octaves = [2, 3, 4];
- *
- * function setup() {
- * let cnv = createCanvas(100, 100);
- * cnv.mousePressed(playChord);
- * background(220);
- * text('tap to play', 20, 20);
- * }
- *
- * function playChord() {
- * userStartAudio();
- *
- * // play a chord: multiple notes at the same time
- * for (let i = 0; i < 4; i++) {
- * let note = random(pitches) + random(octaves);
- * polySynth.noteAttack(note, 0.1);
- * }
- * }
- *
- * function mouseReleased() {
- * // release all voices
- * polySynth.noteRelease();
- * }
- *
- */
-
-
- p5.PolySynth.prototype.noteAttack = function (_note, _velocity, secondsFromNow) {
- var secondsFromNow = ~~secondsFromNow;
-
- var acTime = p5sound.audiocontext.currentTime + secondsFromNow;
-
- var note = noteToFreq(_note);
- var velocity = _velocity || 0.1;
- var currentVoice;
-
- if (this.notes[note] && this.notes[note].getValueAtTime(acTime) !== null) {
- this.noteRelease(note, 0);
- }
-
-
- if (this._voicesInUse.getValueAtTime(acTime) < this.maxVoices) {
- currentVoice = Math.max(~~this._voicesInUse.getValueAtTime(acTime), 0);
- }
- else {
- currentVoice = this._oldest;
- var oldestNote = p5.prototype.freqToMidi(this.audiovoices[this._oldest].oscillator.freq().value);
- this.noteRelease(oldestNote);
- this._oldest = (this._oldest + 1) % (this.maxVoices - 1);
- }
-
-
- this.notes[note] = new TimelineSignal();
- this.notes[note].setValueAtTime(currentVoice, acTime);
-
- var previousVal = this._voicesInUse._searchBefore(acTime) === null ? 0 : this._voicesInUse._searchBefore(acTime).value;
-
- this._voicesInUse.setValueAtTime(previousVal + 1, acTime);
-
-
- this._updateAfter(acTime, 1);
-
- this._newest = currentVoice;
-
- if (typeof velocity === 'number') {
- var maxRange = 1 / this._voicesInUse.getValueAtTime(acTime) * 2;
- velocity = velocity > maxRange ? maxRange : velocity;
- }
-
- this.audiovoices[currentVoice].triggerAttack(note, velocity, secondsFromNow);
- };
- /**
- * Private method to ensure accurate values of this._voicesInUse
- * Any time a new value is scheduled, it is necessary to increment all subsequent
- * scheduledValues after attack, and decrement all subsequent
- * scheduledValues after release
- *
- * @private
- * @for p5.PolySynth
- * @param {[type]} time [description]
- * @param {[type]} value [description]
- * @return {[type]} [description]
- */
-
-
- p5.PolySynth.prototype._updateAfter = function (time, value) {
- if (this._voicesInUse._searchAfter(time) === null) {
- return;
- } else {
- this._voicesInUse._searchAfter(time).value += value;
-
- var nextTime = this._voicesInUse._searchAfter(time).time;
-
- this._updateAfter(nextTime, value);
- }
- };
- /**
- * Trigger the Release of an AudioVoice note. This is similar to releasing
- * the key on a piano and letting the sound fade according to the
- * release level and release time.
- *
- * @method noteRelease
- * @for p5.PolySynth
- * @param {Number} [note] midi note on which attack should be triggered.
- * If no value is provided, all notes will be released.
- * @param {Number} [secondsFromNow] time to trigger the release
- * @example
- *
- * let polySynth = new p5.PolySynth();
- * let pitches = ['G', 'D', 'G', 'C'];
- * let octaves = [2, 3, 4];
- *
- * function setup() {
- * let cnv = createCanvas(100, 100);
- * cnv.mousePressed(playChord);
- * background(220);
- * text('tap to play', 20, 20);
- * }
- *
- * function playChord() {
- * userStartAudio();
- *
- * // play a chord: multiple notes at the same time
- * for (let i = 0; i < 4; i++) {
- * let note = random(pitches) + random(octaves);
- * polySynth.noteAttack(note, 0.1);
- * }
- * }
- *
- * function mouseReleased() {
- * // release all voices
- * polySynth.noteRelease();
- * }
- *
- *
- */
-
-
- p5.PolySynth.prototype.noteRelease = function (_note, secondsFromNow) {
- var now = p5sound.audiocontext.currentTime;
- var tFromNow = secondsFromNow || 0;
- var t = now + tFromNow;
-
- if (!_note) {
- this.audiovoices.forEach(function (voice) {
- voice.triggerRelease(tFromNow);
- });
-
- this._voicesInUse.setValueAtTime(0, t);
-
- for (var n in this.notes) {
- this.notes[n].dispose();
- delete this.notes[n];
- }
-
- return;
- }
-
-
- var note = noteToFreq(_note);
-
- if (!this.notes[note] || this.notes[note].getValueAtTime(t) === null) {
- console.warn('Cannot release a note that is not already playing');
- } else {
- var previousVal = Math.max(~~this._voicesInUse.getValueAtTime(t).value, 1);
-
- this._voicesInUse.setValueAtTime(previousVal - 1, t);
-
-
- if (previousVal > 0) {
- this._updateAfter(t, -1);
- }
-
- this.audiovoices[this.notes[note].getValueAtTime(t)].triggerRelease(tFromNow);
- this.notes[note].dispose();
- delete this.notes[note];
- this._newest = this._newest === 0 ? 0 : (this._newest - 1) % (this.maxVoices - 1);
- }
- };
- /**
- * Connect to a p5.sound / Web Audio object.
- *
- * @method connect
- * @for p5.PolySynth
- * @param {Object} unit A p5.sound or Web Audio object
- */
-
-
- p5.PolySynth.prototype.connect = function (unit) {
- var u = unit || p5sound.input;
- this.output.connect(u.input ? u.input : u);
- };
- /**
- * Disconnect all outputs
- *
- * @method disconnect
- * @for p5.PolySynth
- */
-
-
- p5.PolySynth.prototype.disconnect = function () {
- if (this.output) {
- this.output.disconnect();
- }
- };
- /**
- * Get rid of the MonoSynth and free up its resources / memory.
- *
- * @method dispose
- * @for p5.PolySynth
- */
-
-
- p5.PolySynth.prototype.dispose = function () {
- this.audiovoices.forEach(function (voice) {
- voice.dispose();
- });
-
- if (this.output) {
- this.output.disconnect();
- delete this.output;
- }
- };
-}).call(exports, __webpack_require__, exports, module),
- __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
-
- }),
- (function(module, exports, __webpack_require__) {
-
-"use strict";
-var __WEBPACK_AMD_DEFINE_RESULT__;
-
-!(__WEBPACK_AMD_DEFINE_RESULT__ = (function (require) {
- __webpack_require__(32);
-
- __webpack_require__(33);
-
- __webpack_require__(17);
-
- var p5SOUND = __webpack_require__(1);
-
- __webpack_require__(5);
-
- __webpack_require__(11);
-
- __webpack_require__(36);
-
- __webpack_require__(40);
-
- __webpack_require__(41);
-
- __webpack_require__(42);
-
- __webpack_require__(43);
-
- __webpack_require__(44);
-
- __webpack_require__(23);
-
- __webpack_require__(47);
-
- __webpack_require__(48);
-
- __webpack_require__(49);
-
- __webpack_require__(50);
-
- __webpack_require__(15);
-
- __webpack_require__(59);
-
- __webpack_require__(61);
-
- __webpack_require__(62);
-
- __webpack_require__(63);
-
- __webpack_require__(64);
-
- __webpack_require__(65);
-
- __webpack_require__(67);
-
- __webpack_require__(68);
-
- __webpack_require__(69);
-
- __webpack_require__(70);
-
- __webpack_require__(71);
-
- __webpack_require__(72);
-
- __webpack_require__(28);
-
- __webpack_require__(30);
-
- __webpack_require__(73);
-
- __webpack_require__(29);
-
- __webpack_require__(28);
-
- __webpack_require__(30);
-
- return p5SOUND;
-}).call(exports, __webpack_require__, exports, module),
- __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
-
- }),
- (function(module, exports) {
-
-!function(){var l,s=[];function p(e){var o=this,n={},i=-1;this.parameters.forEach(function(e,t){var r=s[++i]||(s[i]=new Float32Array(o.bufferSize));r.fill(e.value),n[t]=r}),this.processor.realm.exec("self.sampleRate=sampleRate="+this.context.sampleRate+";self.currentTime=currentTime="+this.context.currentTime);var t=a(e.inputBuffer),r=a(e.outputBuffer);this.instance.process([t],[r],n)}function a(e){for(var t=[],r=0;r= this._length) {\n this._writeIndex = 0;\n } // For excessive frames, the buffer will be overwritten.\n\n\n this._framesAvailable += sourceLength;\n\n if (this._framesAvailable > this._length) {\n this._framesAvailable = this._length;\n }\n }\n /**\n * Pull data out of buffer and fill a given sequence of Float32Arrays.\n *\n * @param {array} arraySequence An array of Float32Arrays.\n */\n\n }, {\n key: \"pull\",\n value: function pull(arraySequence) {\n // The channel count of arraySequence and the length of each channel must\n // match with this buffer obejct.\n // If the FIFO is completely empty, do nothing.\n if (this._framesAvailable === 0) {\n return;\n }\n\n var destinationLength = arraySequence[0].length; // Transfer data from the internal buffer to the |arraySequence| storage.\n\n for (var i = 0; i < destinationLength; ++i) {\n var readIndex = (this._readIndex + i) % this._length;\n\n for (var channel = 0; channel < this._channelCount; ++channel) {\n arraySequence[channel][i] = this._channelData[channel][readIndex];\n }\n }\n\n this._readIndex += destinationLength;\n\n if (this._readIndex >= this._length) {\n this._readIndex = 0;\n }\n\n this._framesAvailable -= destinationLength;\n\n if (this._framesAvailable < 0) {\n this._framesAvailable = 0;\n }\n }\n }, {\n key: \"framesAvailable\",\n get: function get() {\n return this._framesAvailable;\n }\n }]);\n\n return RingBuffer;\n }()\n}[\"default\"];\n\nvar RecorderProcessor =\n/*#__PURE__*/\nfunction (_AudioWorkletProcesso) {\n _inherits(RecorderProcessor, _AudioWorkletProcesso);\n\n function RecorderProcessor(options) {\n var _this;\n\n _classCallCheck(this, RecorderProcessor);\n\n _this = _possibleConstructorReturn(this, _getPrototypeOf(RecorderProcessor).call(this));\n var processorOptions = options.processorOptions || {};\n _this.numOutputChannels = options.outputChannelCount || 2;\n _this.numInputChannels = processorOptions.numInputChannels || 2;\n _this.bufferSize = processorOptions.bufferSize || 1024;\n _this.recording = false;\n\n _this.clear();\n\n _this.port.onmessage = function (event) {\n var data = event.data;\n\n if (data.name === 'start') {\n _this.record(data.duration);\n } else if (data.name === 'stop') {\n _this.stop();\n }\n };\n\n return _this;\n }\n\n _createClass(RecorderProcessor, [{\n key: \"process\",\n value: function process(inputs) {\n if (!this.recording) {\n return true;\n } else if (this.sampleLimit && this.recordedSamples >= this.sampleLimit) {\n this.stop();\n return true;\n }\n\n var input = inputs[0];\n this.inputRingBuffer.push(input);\n\n if (this.inputRingBuffer.framesAvailable >= this.bufferSize) {\n this.inputRingBuffer.pull(this.inputRingBufferArraySequence);\n\n for (var channel = 0; channel < this.numOutputChannels; ++channel) {\n var inputChannelCopy = this.inputRingBufferArraySequence[channel].slice();\n\n if (channel === 0) {\n this.leftBuffers.push(inputChannelCopy);\n\n if (this.numInputChannels === 1) {\n this.rightBuffers.push(inputChannelCopy);\n }\n } else if (channel === 1 && this.numInputChannels > 1) {\n this.rightBuffers.push(inputChannelCopy);\n }\n }\n\n this.recordedSamples += this.bufferSize;\n }\n\n return true;\n }\n }, {\n key: \"record\",\n value: function record(duration) {\n if (duration) {\n this.sampleLimit = Math.round(duration * sampleRate);\n }\n\n this.recording = true;\n }\n }, {\n key: \"stop\",\n value: function stop() {\n this.recording = false;\n var buffers = this.getBuffers();\n var leftBuffer = buffers[0].buffer;\n var rightBuffer = buffers[1].buffer;\n this.port.postMessage({\n name: 'buffers',\n leftBuffer: leftBuffer,\n rightBuffer: rightBuffer\n }, [leftBuffer, rightBuffer]);\n this.clear();\n }\n }, {\n key: \"getBuffers\",\n value: function getBuffers() {\n var buffers = [];\n buffers.push(this.mergeBuffers(this.leftBuffers));\n buffers.push(this.mergeBuffers(this.rightBuffers));\n return buffers;\n }\n }, {\n key: \"mergeBuffers\",\n value: function mergeBuffers(channelBuffer) {\n var result = new Float32Array(this.recordedSamples);\n var offset = 0;\n var lng = channelBuffer.length;\n\n for (var i = 0; i < lng; i++) {\n var buffer = channelBuffer[i];\n result.set(buffer, offset);\n offset += buffer.length;\n }\n\n return result;\n }\n }, {\n key: \"clear\",\n value: function clear() {\n var _this2 = this;\n\n this.leftBuffers = [];\n this.rightBuffers = [];\n this.inputRingBuffer = new RingBuffer(this.bufferSize, this.numInputChannels);\n this.inputRingBufferArraySequence = new Array(this.numInputChannels).fill(null).map(function () {\n return new Float32Array(_this2.bufferSize);\n });\n this.recordedSamples = 0;\n this.sampleLimit = null;\n }\n }]);\n\n return RecorderProcessor;\n}(_wrapNativeSuper(AudioWorkletProcessor));\n\nregisterProcessor(processorNames.recorderProcessor, RecorderProcessor);");
-
- }),
- (function(module, __webpack_exports__, __webpack_require__) {
-
-"use strict";
-__webpack_require__.r(__webpack_exports__);
- __webpack_exports__["default"] = ("function _typeof(obj) { if (typeof Symbol === \"function\" && typeof Symbol.iterator === \"symbol\") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === \"function\" && obj.constructor === Symbol && obj !== Symbol.prototype ? \"symbol\" : typeof obj; }; } return _typeof(obj); }\n\nfunction _possibleConstructorReturn(self, call) { if (call && (_typeof(call) === \"object\" || typeof call === \"function\")) { return call; } return _assertThisInitialized(self); }\n\nfunction _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError(\"this hasn't been initialised - super() hasn't been called\"); } return self; }\n\nfunction _inherits(subClass, superClass) { if (typeof superClass !== \"function\" && superClass !== null) { throw new TypeError(\"Super expression must either be null or a function\"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); }\n\nfunction _wrapNativeSuper(Class) { var _cache = typeof Map === \"function\" ? new Map() : undefined; _wrapNativeSuper = function _wrapNativeSuper(Class) { if (Class === null || !_isNativeFunction(Class)) return Class; if (typeof Class !== \"function\") { throw new TypeError(\"Super expression must either be null or a function\"); } if (typeof _cache !== \"undefined\") { if (_cache.has(Class)) return _cache.get(Class); _cache.set(Class, Wrapper); } function Wrapper() { return _construct(Class, arguments, _getPrototypeOf(this).constructor); } Wrapper.prototype = Object.create(Class.prototype, { constructor: { value: Wrapper, enumerable: false, writable: true, configurable: true } }); return _setPrototypeOf(Wrapper, Class); }; return _wrapNativeSuper(Class); }\n\nfunction isNativeReflectConstruct() { if (typeof Reflect === \"undefined\" || !Reflect.construct) return false; if (Reflect.construct.sham) return false; if (typeof Proxy === \"function\") return true; try { Date.prototype.toString.call(Reflect.construct(Date, [], function () {})); return true; } catch (e) { return false; } }\n\nfunction _construct(Parent, args, Class) { if (isNativeReflectConstruct()) { _construct = Reflect.construct; } else { _construct = function _construct(Parent, args, Class) { var a = [null]; a.push.apply(a, args); var Constructor = Function.bind.apply(Parent, a); var instance = new Constructor(); if (Class) _setPrototypeOf(instance, Class.prototype); return instance; }; } return _construct.apply(null, arguments); }\n\nfunction _isNativeFunction(fn) { return Function.toString.call(fn).indexOf(\"[native code]\") !== -1; }\n\nfunction _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }\n\nfunction _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }\n\nfunction _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nfunction _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if (\"value\" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }\n\nfunction _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }\n\n// import dependencies via preval.require so that they're available as values at compile time\nvar processorNames = {\n \"recorderProcessor\": \"recorder-processor\",\n \"soundFileProcessor\": \"sound-file-processor\",\n \"amplitudeProcessor\": \"amplitude-processor\"\n};\nvar RingBuffer = {\n \"default\":\n /*#__PURE__*/\n function () {\n /**\n * @constructor\n * @param {number} length Buffer length in frames.\n * @param {number} channelCount Buffer channel count.\n */\n function RingBuffer(length, channelCount) {\n _classCallCheck(this, RingBuffer);\n\n this._readIndex = 0;\n this._writeIndex = 0;\n this._framesAvailable = 0;\n this._channelCount = channelCount;\n this._length = length;\n this._channelData = [];\n\n for (var i = 0; i < this._channelCount; ++i) {\n this._channelData[i] = new Float32Array(length);\n }\n }\n /**\n * Getter for Available frames in buffer.\n *\n * @return {number} Available frames in buffer.\n */\n\n\n _createClass(RingBuffer, [{\n key: \"push\",\n\n /**\n * Push a sequence of Float32Arrays to buffer.\n *\n * @param {array} arraySequence A sequence of Float32Arrays.\n */\n value: function push(arraySequence) {\n // The channel count of arraySequence and the length of each channel must\n // match with this buffer obejct.\n // Transfer data from the |arraySequence| storage to the internal buffer.\n var sourceLength = arraySequence[0].length;\n\n for (var i = 0; i < sourceLength; ++i) {\n var writeIndex = (this._writeIndex + i) % this._length;\n\n for (var channel = 0; channel < this._channelCount; ++channel) {\n this._channelData[channel][writeIndex] = arraySequence[channel][i];\n }\n }\n\n this._writeIndex += sourceLength;\n\n if (this._writeIndex >= this._length) {\n this._writeIndex = 0;\n } // For excessive frames, the buffer will be overwritten.\n\n\n this._framesAvailable += sourceLength;\n\n if (this._framesAvailable > this._length) {\n this._framesAvailable = this._length;\n }\n }\n /**\n * Pull data out of buffer and fill a given sequence of Float32Arrays.\n *\n * @param {array} arraySequence An array of Float32Arrays.\n */\n\n }, {\n key: \"pull\",\n value: function pull(arraySequence) {\n // The channel count of arraySequence and the length of each channel must\n // match with this buffer obejct.\n // If the FIFO is completely empty, do nothing.\n if (this._framesAvailable === 0) {\n return;\n }\n\n var destinationLength = arraySequence[0].length; // Transfer data from the internal buffer to the |arraySequence| storage.\n\n for (var i = 0; i < destinationLength; ++i) {\n var readIndex = (this._readIndex + i) % this._length;\n\n for (var channel = 0; channel < this._channelCount; ++channel) {\n arraySequence[channel][i] = this._channelData[channel][readIndex];\n }\n }\n\n this._readIndex += destinationLength;\n\n if (this._readIndex >= this._length) {\n this._readIndex = 0;\n }\n\n this._framesAvailable -= destinationLength;\n\n if (this._framesAvailable < 0) {\n this._framesAvailable = 0;\n }\n }\n }, {\n key: \"framesAvailable\",\n get: function get() {\n return this._framesAvailable;\n }\n }]);\n\n return RingBuffer;\n }()\n}[\"default\"];\n\nvar SoundFileProcessor =\n/*#__PURE__*/\nfunction (_AudioWorkletProcesso) {\n _inherits(SoundFileProcessor, _AudioWorkletProcesso);\n\n function SoundFileProcessor(options) {\n var _this;\n\n _classCallCheck(this, SoundFileProcessor);\n\n _this = _possibleConstructorReturn(this, _getPrototypeOf(SoundFileProcessor).call(this));\n var processorOptions = options.processorOptions || {};\n _this.bufferSize = processorOptions.bufferSize || 256;\n _this.inputRingBuffer = new RingBuffer(_this.bufferSize, 1);\n _this.inputRingBufferArraySequence = [new Float32Array(_this.bufferSize)];\n return _this;\n }\n\n _createClass(SoundFileProcessor, [{\n key: \"process\",\n value: function process(inputs) {\n var input = inputs[0]; // we only care about the first input channel, because that contains the position data\n\n this.inputRingBuffer.push([input[0]]);\n\n if (this.inputRingBuffer.framesAvailable >= this.bufferSize) {\n this.inputRingBuffer.pull(this.inputRingBufferArraySequence);\n var inputChannel = this.inputRingBufferArraySequence[0];\n var position = inputChannel[inputChannel.length - 1] || 0;\n this.port.postMessage({\n name: 'position',\n position: position\n });\n }\n\n return true;\n }\n }]);\n\n return SoundFileProcessor;\n}(_wrapNativeSuper(AudioWorkletProcessor));\n\nregisterProcessor(processorNames.soundFileProcessor, SoundFileProcessor);");
-
- }),
- (function(module, __webpack_exports__, __webpack_require__) {
-
-"use strict";
-__webpack_require__.r(__webpack_exports__);
- __webpack_exports__["default"] = ("function _typeof(obj) { if (typeof Symbol === \"function\" && typeof Symbol.iterator === \"symbol\") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === \"function\" && obj.constructor === Symbol && obj !== Symbol.prototype ? \"symbol\" : typeof obj; }; } return _typeof(obj); }\n\nfunction _possibleConstructorReturn(self, call) { if (call && (_typeof(call) === \"object\" || typeof call === \"function\")) { return call; } return _assertThisInitialized(self); }\n\nfunction _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError(\"this hasn't been initialised - super() hasn't been called\"); } return self; }\n\nfunction _inherits(subClass, superClass) { if (typeof superClass !== \"function\" && superClass !== null) { throw new TypeError(\"Super expression must either be null or a function\"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); }\n\nfunction _wrapNativeSuper(Class) { var _cache = typeof Map === \"function\" ? new Map() : undefined; _wrapNativeSuper = function _wrapNativeSuper(Class) { if (Class === null || !_isNativeFunction(Class)) return Class; if (typeof Class !== \"function\") { throw new TypeError(\"Super expression must either be null or a function\"); } if (typeof _cache !== \"undefined\") { if (_cache.has(Class)) return _cache.get(Class); _cache.set(Class, Wrapper); } function Wrapper() { return _construct(Class, arguments, _getPrototypeOf(this).constructor); } Wrapper.prototype = Object.create(Class.prototype, { constructor: { value: Wrapper, enumerable: false, writable: true, configurable: true } }); return _setPrototypeOf(Wrapper, Class); }; return _wrapNativeSuper(Class); }\n\nfunction isNativeReflectConstruct() { if (typeof Reflect === \"undefined\" || !Reflect.construct) return false; if (Reflect.construct.sham) return false; if (typeof Proxy === \"function\") return true; try { Date.prototype.toString.call(Reflect.construct(Date, [], function () {})); return true; } catch (e) { return false; } }\n\nfunction _construct(Parent, args, Class) { if (isNativeReflectConstruct()) { _construct = Reflect.construct; } else { _construct = function _construct(Parent, args, Class) { var a = [null]; a.push.apply(a, args); var Constructor = Function.bind.apply(Parent, a); var instance = new Constructor(); if (Class) _setPrototypeOf(instance, Class.prototype); return instance; }; } return _construct.apply(null, arguments); }\n\nfunction _isNativeFunction(fn) { return Function.toString.call(fn).indexOf(\"[native code]\") !== -1; }\n\nfunction _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }\n\nfunction _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }\n\nfunction _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nfunction _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if (\"value\" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }\n\nfunction _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }\n\n// import dependencies via preval.require so that they're available as values at compile time\nvar processorNames = {\n \"recorderProcessor\": \"recorder-processor\",\n \"soundFileProcessor\": \"sound-file-processor\",\n \"amplitudeProcessor\": \"amplitude-processor\"\n};\nvar RingBuffer = {\n \"default\":\n /*#__PURE__*/\n function () {\n /**\n * @constructor\n * @param {number} length Buffer length in frames.\n * @param {number} channelCount Buffer channel count.\n */\n function RingBuffer(length, channelCount) {\n _classCallCheck(this, RingBuffer);\n\n this._readIndex = 0;\n this._writeIndex = 0;\n this._framesAvailable = 0;\n this._channelCount = channelCount;\n this._length = length;\n this._channelData = [];\n\n for (var i = 0; i < this._channelCount; ++i) {\n this._channelData[i] = new Float32Array(length);\n }\n }\n /**\n * Getter for Available frames in buffer.\n *\n * @return {number} Available frames in buffer.\n */\n\n\n _createClass(RingBuffer, [{\n key: \"push\",\n\n /**\n * Push a sequence of Float32Arrays to buffer.\n *\n * @param {array} arraySequence A sequence of Float32Arrays.\n */\n value: function push(arraySequence) {\n // The channel count of arraySequence and the length of each channel must\n // match with this buffer obejct.\n // Transfer data from the |arraySequence| storage to the internal buffer.\n var sourceLength = arraySequence[0].length;\n\n for (var i = 0; i < sourceLength; ++i) {\n var writeIndex = (this._writeIndex + i) % this._length;\n\n for (var channel = 0; channel < this._channelCount; ++channel) {\n this._channelData[channel][writeIndex] = arraySequence[channel][i];\n }\n }\n\n this._writeIndex += sourceLength;\n\n if (this._writeIndex >= this._length) {\n this._writeIndex = 0;\n } // For excessive frames, the buffer will be overwritten.\n\n\n this._framesAvailable += sourceLength;\n\n if (this._framesAvailable > this._length) {\n this._framesAvailable = this._length;\n }\n }\n /**\n * Pull data out of buffer and fill a given sequence of Float32Arrays.\n *\n * @param {array} arraySequence An array of Float32Arrays.\n */\n\n }, {\n key: \"pull\",\n value: function pull(arraySequence) {\n // The channel count of arraySequence and the length of each channel must\n // match with this buffer obejct.\n // If the FIFO is completely empty, do nothing.\n if (this._framesAvailable === 0) {\n return;\n }\n\n var destinationLength = arraySequence[0].length; // Transfer data from the internal buffer to the |arraySequence| storage.\n\n for (var i = 0; i < destinationLength; ++i) {\n var readIndex = (this._readIndex + i) % this._length;\n\n for (var channel = 0; channel < this._channelCount; ++channel) {\n arraySequence[channel][i] = this._channelData[channel][readIndex];\n }\n }\n\n this._readIndex += destinationLength;\n\n if (this._readIndex >= this._length) {\n this._readIndex = 0;\n }\n\n this._framesAvailable -= destinationLength;\n\n if (this._framesAvailable < 0) {\n this._framesAvailable = 0;\n }\n }\n }, {\n key: \"framesAvailable\",\n get: function get() {\n return this._framesAvailable;\n }\n }]);\n\n return RingBuffer;\n }()\n}[\"default\"];\n\nvar AmplitudeProcessor =\n/*#__PURE__*/\nfunction (_AudioWorkletProcesso) {\n _inherits(AmplitudeProcessor, _AudioWorkletProcesso);\n\n function AmplitudeProcessor(options) {\n var _this;\n\n _classCallCheck(this, AmplitudeProcessor);\n\n _this = _possibleConstructorReturn(this, _getPrototypeOf(AmplitudeProcessor).call(this));\n var processorOptions = options.processorOptions || {};\n _this.numOutputChannels = options.outputChannelCount || 1;\n _this.numInputChannels = processorOptions.numInputChannels || 2;\n _this.normalize = processorOptions.normalize || false;\n _this.smoothing = processorOptions.smoothing || 0;\n _this.bufferSize = processorOptions.bufferSize || 2048;\n _this.inputRingBuffer = new RingBuffer(_this.bufferSize, _this.numInputChannels);\n _this.outputRingBuffer = new RingBuffer(_this.bufferSize, _this.numOutputChannels);\n _this.inputRingBufferArraySequence = new Array(_this.numInputChannels).fill(null).map(function () {\n return new Float32Array(_this.bufferSize);\n });\n _this.stereoVol = [0, 0];\n _this.stereoVolNorm = [0, 0];\n _this.volMax = 0.001;\n\n _this.port.onmessage = function (event) {\n var data = event.data;\n\n if (data.name === 'toggleNormalize') {\n _this.normalize = data.normalize;\n } else if (data.name === 'smoothing') {\n _this.smoothing = Math.max(0, Math.min(1, data.smoothing));\n }\n };\n\n return _this;\n } // TO DO make this stereo / dependent on # of audio channels\n\n\n _createClass(AmplitudeProcessor, [{\n key: \"process\",\n value: function process(inputs, outputs) {\n var input = inputs[0];\n var output = outputs[0];\n var smoothing = this.smoothing;\n this.inputRingBuffer.push(input);\n\n if (this.inputRingBuffer.framesAvailable >= this.bufferSize) {\n this.inputRingBuffer.pull(this.inputRingBufferArraySequence);\n\n for (var channel = 0; channel < this.numInputChannels; ++channel) {\n var inputBuffer = this.inputRingBufferArraySequence[channel];\n var bufLength = inputBuffer.length;\n var sum = 0;\n\n for (var i = 0; i < bufLength; i++) {\n var x = inputBuffer[i];\n\n if (this.normalize) {\n sum += Math.max(Math.min(x / this.volMax, 1), -1) * Math.max(Math.min(x / this.volMax, 1), -1);\n } else {\n sum += x * x;\n }\n } // ... then take the square root of the sum.\n\n\n var rms = Math.sqrt(sum / bufLength);\n this.stereoVol[channel] = Math.max(rms, this.stereoVol[channel] * smoothing);\n this.volMax = Math.max(this.stereoVol[channel], this.volMax);\n } // calculate stero normalized volume and add volume from all channels together\n\n\n var volSum = 0;\n\n for (var index = 0; index < this.stereoVol.length; index++) {\n this.stereoVolNorm[index] = Math.max(Math.min(this.stereoVol[index] / this.volMax, 1), 0);\n volSum += this.stereoVol[index];\n } // volume is average of channels\n\n\n var volume = volSum / this.stereoVol.length; // normalized value\n\n var volNorm = Math.max(Math.min(volume / this.volMax, 1), 0);\n this.port.postMessage({\n name: 'amplitude',\n volume: volume,\n volNorm: volNorm,\n stereoVol: this.stereoVol,\n stereoVolNorm: this.stereoVolNorm\n }); // pass input through to output\n\n this.outputRingBuffer.push(this.inputRingBufferArraySequence);\n } // pull 128 frames out of the ring buffer\n // if the ring buffer does not have enough frames, the output will be silent\n\n\n this.outputRingBuffer.pull(output);\n return true;\n }\n }]);\n\n return AmplitudeProcessor;\n}(_wrapNativeSuper(AudioWorkletProcessor));\n\nregisterProcessor(processorNames.amplitudeProcessor, AmplitudeProcessor);");
-
- }),
- (function(module, exports, __webpack_require__) {
-
-"use strict";
-var __WEBPACK_AMD_DEFINE_RESULT__;
-
-function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
-
-!(__WEBPACK_AMD_DEFINE_RESULT__ = (function (require) {
- var p5sound = __webpack_require__(1);
-
- var ac = p5sound.audiocontext;
-
- if (typeof ac.createStereoPanner !== 'undefined') {
- p5.Panner = function (input, output) {
- this.stereoPanner = this.input = ac.createStereoPanner();
- input.connect(this.stereoPanner);
- this.stereoPanner.connect(output);
- };
-
- p5.Panner.prototype.pan = function (val, tFromNow) {
- var time = tFromNow || 0;
- var t = ac.currentTime + time;
- this.stereoPanner.pan.linearRampToValueAtTime(val, t);
- };
-
-
- p5.Panner.prototype.inputChannels = function () {};
-
- p5.Panner.prototype.connect = function (obj) {
- this.stereoPanner.connect(obj);
- };
-
- p5.Panner.prototype.disconnect = function () {
- if (this.stereoPanner) {
- this.stereoPanner.disconnect();
- }
- };
- } else {
- p5.Panner = function (input, output, numInputChannels) {
- this.input = ac.createGain();
- input.connect(this.input);
- this.left = ac.createGain();
- this.right = ac.createGain();
- this.left.channelInterpretation = 'discrete';
- this.right.channelInterpretation = 'discrete';
-
- if (numInputChannels > 1) {
- this.splitter = ac.createChannelSplitter(2);
- this.input.connect(this.splitter);
- this.splitter.connect(this.left, 1);
- this.splitter.connect(this.right, 0);
- } else {
- this.input.connect(this.left);
- this.input.connect(this.right);
- }
-
- this.output = ac.createChannelMerger(2);
- this.left.connect(this.output, 0, 1);
- this.right.connect(this.output, 0, 0);
- this.output.connect(output);
- };
-
-
- p5.Panner.prototype.pan = function (val, tFromNow) {
- var time = tFromNow || 0;
- var t = ac.currentTime + time;
- var v = (val + 1) / 2;
- var rightVal = Math.cos(v * Math.PI / 2);
- var leftVal = Math.sin(v * Math.PI / 2);
- this.left.gain.linearRampToValueAtTime(leftVal, t);
- this.right.gain.linearRampToValueAtTime(rightVal, t);
- };
-
- p5.Panner.prototype.inputChannels = function (numChannels) {
- if (numChannels === 1) {
- this.input.disconnect();
- this.input.connect(this.left);
- this.input.connect(this.right);
- } else if (numChannels === 2) {
- if (_typeof(this.splitter === 'undefined')) {
- this.splitter = ac.createChannelSplitter(2);
- }
-
- this.input.disconnect();
- this.input.connect(this.splitter);
- this.splitter.connect(this.left, 1);
- this.splitter.connect(this.right, 0);
- }
- };
-
- p5.Panner.prototype.connect = function (obj) {
- this.output.connect(obj);
- };
-
- p5.Panner.prototype.disconnect = function () {
- if (this.output) {
- this.output.disconnect();
- }
- };
- }
-}).call(exports, __webpack_require__, exports, module),
- __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
-
- }),
- (function(module, exports, __webpack_require__) {
-
-"use strict";
-var __WEBPACK_AMD_DEFINE_RESULT__;
-
-function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
-
-!(__WEBPACK_AMD_DEFINE_RESULT__ = (function (require) {
- var CustomError = __webpack_require__(11);
-
- var p5sound = __webpack_require__(1);
-
- var ac = p5sound.audiocontext;
-
- var _require = __webpack_require__(5),
- midiToFreq = _require.midiToFreq,
- convertToWav = _require.convertToWav,
- safeBufferSize = _require.safeBufferSize;
-
- var processorNames = __webpack_require__(10);
- /**
- *
SoundFile object with a path to a file.
- *
- *
The p5.SoundFile may not be available immediately because
- * it loads the file information asynchronously.
- *
- *
To do something with the sound as soon as it loads
- * pass the name of a function as the second parameter.
- *
- *
Only one file path is required. However, audio file formats
- * (i.e. mp3, ogg, wav and m4a/aac) are not supported by all
- * web browsers. If you want to ensure compatability, instead of a single
- * file path, you may include an Array of filepaths, and the browser will
- * choose a format that works.
- *
- * @class p5.SoundFile
- * @constructor
- * @param {String|Array} path path to a sound file (String). Optionally,
- * you may include multiple file formats in
- * an array. Alternately, accepts an object
- * from the HTML5 File API, or a p5.File.
- * @param {Function} [successCallback] Name of a function to call once file loads
- * @param {Function} [errorCallback] Name of a function to call if file fails to
- * load. This function will receive an error or
- * XMLHttpRequest object with information
- * about what went wrong.
- * @param {Function} [whileLoadingCallback] Name of a function to call while file
- * is loading. That function will
- * receive progress of the request to
- * load the sound file
- * (between 0 and 1) as its first
- * parameter. This progress
- * does not account for the additional
- * time needed to decode the audio data.
- *
- * @example
- *
- * let mySound;
- * function preload() {
- * soundFormats('mp3', 'ogg');
- * mySound = loadSound('assets/doorbell');
- * }
- *
- * function setup() {
- * let cnv = createCanvas(100, 100);
- * cnv.mousePressed(canvasPressed);
- * background(220);
- * text('tap here to play', 10, 20);
- * }
- *
- * function canvasPressed() {
- * // playing a sound file on a user gesture
- * // is equivalent to `userStartAudio()`
- * mySound.play();
- * }
- *
- */
-
-
- p5.SoundFile = function (paths, onload, onerror, whileLoading) {
- if (typeof paths !== 'undefined') {
- if (typeof paths === 'string' || typeof paths[0] === 'string') {
- var path = p5.prototype._checkFileFormats(paths);
-
- this.url = path;
- } else if (_typeof(paths) === 'object') {
- if (!(window.File && window.FileReader && window.FileList && window.Blob)) {
- throw 'Unable to load file because the File API is not supported';
- }
- }
-
-
- if (paths.file) {
- paths = paths.file;
- }
-
- this.file = paths;
- }
-
-
- this._onended = function () {};
-
- this._looping = false;
- this._playing = false;
- this._paused = false;
- this._pauseTime = 0;
-
- this._cues = [];
- this._cueIDCounter = 0;
-
- this._lastPos = 0;
- this._counterNode = null;
- this._workletNode = null;
-
- this.bufferSourceNodes = [];
-
- this.bufferSourceNode = null;
- this.buffer = null;
- this.playbackRate = 1;
- this.input = p5sound.audiocontext.createGain();
- this.output = p5sound.audiocontext.createGain();
- this.reversed = false;
-
- this.startTime = 0;
- this.endTime = null;
- this.pauseTime = 0;
-
- this.mode = 'sustain';
-
- this.startMillis = null;
-
- this.panPosition = 0.0;
- this.panner = new p5.Panner(this.output, p5sound.input, 2);
-
- if (this.url || this.file) {
- this.load(onload, onerror);
- }
-
-
- p5sound.soundArray.push(this);
-
- if (typeof whileLoading === 'function') {
- this._whileLoading = whileLoading;
- } else {
- this._whileLoading = function () {};
- }
-
- this._clearOnEnd = _clearOnEnd.bind(this);
- };
-
-
- p5.prototype.registerPreloadMethod('loadSound', p5.prototype);
- /**
- * loadSound() returns a new p5.SoundFile from a specified
- * path. If called during preload(), the p5.SoundFile will be ready
- * to play in time for setup() and draw(). If called outside of
- * preload, the p5.SoundFile will not be ready immediately, so
- * loadSound accepts a callback as the second parameter. Using a
- *
- * local server is recommended when loading external files.
- *
- * @method loadSound
- * @for p5
- * @param {String|Array} path Path to the sound file, or an array with
- * paths to soundfiles in multiple formats
- * i.e. ['sound.ogg', 'sound.mp3'].
- * Alternately, accepts an object: either
- * from the HTML5 File API, or a p5.File.
- * @param {Function} [successCallback] Name of a function to call once file loads
- * @param {Function} [errorCallback] Name of a function to call if there is
- * an error loading the file.
- * @param {Function} [whileLoading] Name of a function to call while file is loading.
- * This function will receive the percentage loaded
- * so far, from 0.0 to 1.0.
- * @return {SoundFile} Returns a p5.SoundFile
- * @example
- *
- * let mySound;
- * function preload() {
- * soundFormats('mp3', 'ogg');
- * mySound = loadSound('assets/doorbell');
- * }
- *
- * function setup() {
- * let cnv = createCanvas(100, 100);
- * cnv.mousePressed(canvasPressed);
- * background(220);
- * text('tap here to play', 10, 20);
- * }
- *
- * function canvasPressed() {
- * // playing a sound file on a user gesture
- * // is equivalent to `userStartAudio()`
- * mySound.play();
- * }
- *
- */
-
- p5.prototype.loadSound = function (path, callback, onerror, whileLoading) {
- if (window.location.origin.indexOf('file://') > -1 && window.cordova === 'undefined') {
- window.alert('This sketch may require a server to load external files. Please see http://bit.ly/1qcInwS');
- }
-
- var self = this;
- var s = new p5.SoundFile(path, function () {
- if (typeof callback === 'function') {
- callback.apply(self, arguments);
- }
-
- if (typeof self._decrementPreload === 'function') {
- self._decrementPreload();
- }
- }, onerror, whileLoading);
- return s;
- };
- /**
- * This is a helper function that the p5.SoundFile calls to load
- * itself. Accepts a callback (the name of another function)
- * as an optional parameter.
- *
- * @private
- * @for p5.SoundFile
- * @param {Function} [successCallback] Name of a function to call once file loads
- * @param {Function} [errorCallback] Name of a function to call if there is an error
- */
-
-
- p5.SoundFile.prototype.load = function (callback, errorCallback) {
- var self = this;
- var errorTrace = new Error().stack;
-
- if (this.url !== undefined && this.url !== '') {
- var request = new XMLHttpRequest();
- request.addEventListener('progress', function (evt) {
- self._updateProgress(evt);
- }, false);
- request.open('GET', this.url, true);
- request.responseType = 'arraybuffer';
-
- request.onload = function () {
- if (request.status === 200) {
- if (!self.panner) return;
- ac.decodeAudioData(request.response,
- function (buff) {
- if (!self.panner) return;
- self.buffer = buff;
- self.panner.inputChannels(buff.numberOfChannels);
-
- if (callback) {
- callback(self);
- }
- },
- function () {
- if (!self.panner) return;
- var err = new CustomError('decodeAudioData', errorTrace, self.url);
- var msg = 'AudioContext error at decodeAudioData for ' + self.url;
-
- if (errorCallback) {
- err.msg = msg;
- errorCallback(err);
- } else {
- console.error(msg + '\n The error stack trace includes: \n' + err.stack);
- }
- });
- }
- else {
- if (!self.panner) return;
- var err = new CustomError('loadSound', errorTrace, self.url);
- var msg = 'Unable to load ' + self.url + '. The request status was: ' + request.status + ' (' + request.statusText + ')';
-
- if (errorCallback) {
- err.message = msg;
- errorCallback(err);
- } else {
- console.error(msg + '\n The error stack trace includes: \n' + err.stack);
- }
- }
- };
-
-
- request.onerror = function () {
- var err = new CustomError('loadSound', errorTrace, self.url);
- var msg = 'There was no response from the server at ' + self.url + '. Check the url and internet connectivity.';
-
- if (errorCallback) {
- err.message = msg;
- errorCallback(err);
- } else {
- console.error(msg + '\n The error stack trace includes: \n' + err.stack);
- }
- };
-
- request.send();
- } else if (this.file !== undefined) {
- var reader = new FileReader();
-
- reader.onload = function () {
- if (!self.panner) return;
- ac.decodeAudioData(reader.result, function (buff) {
- if (!self.panner) return;
- self.buffer = buff;
- self.panner.inputChannels(buff.numberOfChannels);
-
- if (callback) {
- callback(self);
- }
- });
- };
-
- reader.onerror = function (e) {
- if (!self.panner) return;
-
- if (onerror) {
- onerror(e);
- }
- };
-
- reader.readAsArrayBuffer(this.file);
- }
- };
-
-
- p5.SoundFile.prototype._updateProgress = function (evt) {
- if (evt.lengthComputable) {
- var percentComplete = evt.loaded / evt.total * 0.99;
-
- this._whileLoading(percentComplete, evt);
-
- } else {
- this._whileLoading('size unknown');
- }
- };
- /**
- * Returns true if the sound file finished loading successfully.
- *
- * @method isLoaded
- * @for p5.SoundFile
- * @return {Boolean}
- */
-
-
- p5.SoundFile.prototype.isLoaded = function () {
- if (this.buffer) {
- return true;
- } else {
- return false;
- }
- };
- /**
- * Play the p5.SoundFile
- *
- * @method play
- * @for p5.SoundFile
- * @param {Number} [startTime] (optional) schedule playback to start (in seconds from now).
- * @param {Number} [rate] (optional) playback rate
- * @param {Number} [amp] (optional) amplitude (volume)
- * of playback
- * @param {Number} [cueStart] (optional) cue start time in seconds
- * @param {Number} [duration] (optional) duration of playback in seconds
- */
-
-
- p5.SoundFile.prototype.play = function (startTime, rate, amp, _cueStart, duration) {
- if (!this.output) {
- console.warn('SoundFile.play() called after dispose');
- return;
- }
-
- var now = p5sound.audiocontext.currentTime;
- var cueStart, cueEnd;
- var time = startTime || 0;
-
- if (time < 0) {
- time = 0;
- }
-
- time = time + now;
-
- if (typeof rate !== 'undefined') {
- this.rate(rate);
- }
-
- if (typeof amp !== 'undefined') {
- this.setVolume(amp);
- }
-
-
- if (this.buffer) {
- this._pauseTime = 0;
-
- if (this.mode === 'restart' && this.buffer && this.bufferSourceNode) {
- this.bufferSourceNode.stop(time);
-
- this._counterNode.stop(time);
- }
-
-
- if (this.mode === 'untildone' && this.isPlaying()) {
- return;
- }
-
-
- this.bufferSourceNode = this._initSourceNode();
-
- delete this._counterNode;
- this._counterNode = this._initCounterNode();
-
- if (_cueStart) {
- if (_cueStart >= 0 && _cueStart < this.buffer.duration) {
- cueStart = _cueStart;
- } else {
- throw 'start time out of range';
- }
- } else {
- cueStart = 0;
- }
-
- if (duration) {
- duration = duration <= this.buffer.duration - cueStart ? duration : this.buffer.duration;
- }
-
-
- if (this._paused) {
- this.bufferSourceNode.start(time, this.pauseTime, duration);
-
- this._counterNode.start(time, this.pauseTime, duration);
- } else {
- this.bufferSourceNode.start(time, cueStart, duration);
-
- this._counterNode.start(time, cueStart, duration);
- }
-
- this._playing = true;
- this._paused = false;
-
- this.bufferSourceNodes.push(this.bufferSourceNode);
- this.bufferSourceNode._arrayIndex = this.bufferSourceNodes.length - 1;
- this.bufferSourceNode.addEventListener('ended', this._clearOnEnd);
- }
- else {
- throw 'not ready to play file, buffer has yet to load. Try preload()';
- }
-
-
- this.bufferSourceNode.loop = this._looping;
- this._counterNode.loop = this._looping;
-
- if (this._looping === true) {
- cueEnd = duration ? duration : cueStart - 0.000000000000001;
- this.bufferSourceNode.loopStart = cueStart;
- this.bufferSourceNode.loopEnd = cueEnd;
- this._counterNode.loopStart = cueStart;
- this._counterNode.loopEnd = cueEnd;
- }
- };
- /**
- * p5.SoundFile has two play modes: restart and
- * sustain. Play Mode determines what happens to a
- * p5.SoundFile if it is triggered while in the middle of playback.
- * In sustain mode, playback will continue simultaneous to the
- * new playback. In restart mode, play() will stop playback
- * and start over. With untilDone, a sound will play only if it's
- * not already playing. Sustain is the default mode.
- *
- * @method playMode
- * @for p5.SoundFile
- * @param {String} str 'restart' or 'sustain' or 'untilDone'
- * @example
- *
- */
-
-
- p5.SoundFile.prototype.playMode = function (str) {
- var s = str.toLowerCase();
-
- if (s === 'restart' && this.buffer && this.bufferSourceNode) {
- for (var i = 0; i < this.bufferSourceNodes.length - 1; i++) {
- var now = p5sound.audiocontext.currentTime;
- this.bufferSourceNodes[i].stop(now);
- }
- }
-
-
- if (s === 'restart' || s === 'sustain' || s === 'untildone') {
- this.mode = s;
- } else {
- throw 'Invalid play mode. Must be either "restart" or "sustain"';
- }
- };
- /**
- * Pauses a file that is currently playing. If the file is not
- * playing, then nothing will happen.
- *
- * After pausing, .play() will resume from the paused
- * position.
- * If p5.SoundFile had been set to loop before it was paused,
- * it will continue to loop after it is unpaused with .play().
- *
- * @method pause
- * @for p5.SoundFile
- * @param {Number} [startTime] (optional) schedule event to occur
- * seconds from now
- * @example
- *
- */
-
-
- p5.SoundFile.prototype.loop = function (startTime, rate, amp, loopStart, duration) {
- this._looping = true;
- this.play(startTime, rate, amp, loopStart, duration);
- };
- /**
- * Set a p5.SoundFile's looping flag to true or false. If the sound
- * is currently playing, this change will take effect when it
- * reaches the end of the current playback.
- *
- * @method setLoop
- * @for p5.SoundFile
- * @param {Boolean} Boolean set looping to true or false
- */
-
-
- p5.SoundFile.prototype.setLoop = function (bool) {
- if (bool === true) {
- this._looping = true;
- } else if (bool === false) {
- this._looping = false;
- } else {
- throw 'Error: setLoop accepts either true or false';
- }
-
- if (this.bufferSourceNode) {
- this.bufferSourceNode.loop = this._looping;
- this._counterNode.loop = this._looping;
- }
- };
- /**
- * Returns 'true' if a p5.SoundFile is currently looping and playing, 'false' if not.
- *
- * @method isLooping
- * @for p5.SoundFile
- * @return {Boolean}
- */
-
-
- p5.SoundFile.prototype.isLooping = function () {
- if (!this.bufferSourceNode) {
- return false;
- }
-
- if (this._looping === true && this.isPlaying() === true) {
- return true;
- }
-
- return false;
- };
- /**
- * Returns true if a p5.SoundFile is playing, false if not (i.e.
- * paused or stopped).
- *
- * @method isPlaying
- * @for p5.SoundFile
- * @return {Boolean}
- */
-
-
- p5.SoundFile.prototype.isPlaying = function () {
- return this._playing;
- };
- /**
- * Returns true if a p5.SoundFile is paused, false if not (i.e.
- * playing or stopped).
- *
- * @method isPaused
- * @for p5.SoundFile
- * @return {Boolean}
- */
-
-
- p5.SoundFile.prototype.isPaused = function () {
- return this._paused;
- };
- /**
- * Stop soundfile playback.
- *
- * @method stop
- * @for p5.SoundFile
- * @param {Number} [startTime] (optional) schedule event to occur
- * in seconds from now
- */
-
-
- p5.SoundFile.prototype.stop = function (timeFromNow) {
- var time = timeFromNow || 0;
-
- if (this.mode === 'sustain' || this.mode === 'untildone') {
- this.stopAll(time);
- this._playing = false;
- this.pauseTime = 0;
- this._paused = false;
- } else if (this.buffer && this.bufferSourceNode) {
- var now = p5sound.audiocontext.currentTime;
- var t = time || 0;
- this.pauseTime = 0;
- this.bufferSourceNode.stop(now + t);
-
- this._counterNode.stop(now + t);
-
- this._playing = false;
- this._paused = false;
- }
- };
- /**
- * Stop playback on all of this soundfile's sources.
- * @private
- */
-
-
- p5.SoundFile.prototype.stopAll = function (_time) {
- var now = p5sound.audiocontext.currentTime;
- var time = _time || 0;
-
- if (this.buffer && this.bufferSourceNode) {
- for (var i in this.bufferSourceNodes) {
- var bufferSourceNode = this.bufferSourceNodes[i];
-
- if (!!bufferSourceNode) {
- try {
- bufferSourceNode.stop(now + time);
- } catch (e) {
- }
- }
- }
-
- this._counterNode.stop(now + time);
-
- this._onended(this);
- }
- };
- /**
- * Multiply the output volume (amplitude) of a sound file
- * between 0.0 (silence) and 1.0 (full volume).
- * 1.0 is the maximum amplitude of a digital sound, so multiplying
- * by greater than 1.0 may cause digital distortion. To
- * fade, provide a rampTime parameter. For more
- * complex fades, see the Envelope class.
- *
- * Alternately, you can pass in a signal source such as an
- * oscillator to modulate the amplitude with an audio signal.
- *
- * @method setVolume
- * @for p5.SoundFile
- * @param {Number|Object} volume Volume (amplitude) between 0.0
- * and 1.0 or modulating signal/oscillator
- * @param {Number} [rampTime] Fade for t seconds
- * @param {Number} [timeFromNow] Schedule this event to happen at
- * t seconds in the future
- */
-
-
- p5.SoundFile.prototype.setVolume = function (vol, _rampTime, _tFromNow) {
- if (typeof vol === 'number') {
- var rampTime = _rampTime || 0;
- var tFromNow = _tFromNow || 0;
- var now = p5sound.audiocontext.currentTime;
- var currentVol = this.output.gain.value;
- this.output.gain.cancelScheduledValues(now + tFromNow);
- this.output.gain.linearRampToValueAtTime(currentVol, now + tFromNow);
- this.output.gain.linearRampToValueAtTime(vol, now + tFromNow + rampTime);
- } else if (vol) {
- vol.connect(this.output.gain);
- } else {
- return this.output.gain;
- }
- };
-
-
- p5.SoundFile.prototype.amp = p5.SoundFile.prototype.setVolume;
-
- p5.SoundFile.prototype.fade = p5.SoundFile.prototype.setVolume;
-
- p5.SoundFile.prototype.getVolume = function () {
- return this.output.gain.value;
- };
- /**
- * Set the stereo panning of a p5.sound object to
- * a floating point number between -1.0 (left) and 1.0 (right).
- * Default is 0.0 (center).
- *
- * @method pan
- * @for p5.SoundFile
- * @param {Number} [panValue] Set the stereo panner
- * @param {Number} [timeFromNow] schedule this event to happen
- * seconds from now
- * @example
- *
- * let ballX = 0;
- * let soundFile;
- *
- * function preload() {
- * soundFormats('ogg', 'mp3');
- * soundFile = loadSound('assets/beatbox.mp3');
- * }
- *
- * function draw() {
- * let cnv = createCanvas(100, 100);
- * cnv.mousePressed(canvasPressed);
- * background(220);
- * ballX = constrain(mouseX, 0, width);
- * ellipse(ballX, height/2, 20, 20);
- * }
- *
- * function canvasPressed(){
- * // map the ball's x location to a panning degree
- * // between -1.0 (left) and 1.0 (right)
- * let panning = map(ballX, 0., width,-1.0, 1.0);
- * soundFile.pan(panning);
- * soundFile.play();
- * }
- *
- */
-
-
- p5.SoundFile.prototype.pan = function (pval, tFromNow) {
- this.panPosition = pval;
- this.panner.pan(pval, tFromNow);
- };
- /**
- * Returns the current stereo pan position (-1.0 to 1.0)
- *
- * @method getPan
- * @for p5.SoundFile
- * @return {Number} Returns the stereo pan setting of the Oscillator
- * as a number between -1.0 (left) and 1.0 (right).
- * 0.0 is center and default.
- */
-
-
- p5.SoundFile.prototype.getPan = function () {
- return this.panPosition;
- };
- /**
- * Set the playback rate of a sound file. Will change the speed and the pitch.
- * Values less than zero will reverse the audio buffer.
- *
- * @method rate
- * @for p5.SoundFile
- * @param {Number} [playbackRate] Set the playback rate. 1.0 is normal,
- * .5 is half-speed, 2.0 is twice as fast.
- * Values less than zero play backwards.
- * @example
- *
- * let mySound;
- *
- * function preload() {
- * mySound = loadSound('assets/Damscray_DancingTiger.mp3');
- * }
- *
- * function setup() {
- * let cnv = createCanvas(100, 100);
- * cnv.mousePressed(canvasPressed);
- * }
- * function canvasPressed() {
- * mySound.loop();
- * }
- * function mouseReleased() {
- * mySound.pause();
- * }
- * function draw() {
- * background(220);
- *
- * // Set the rate to a range between 0.1 and 4
- * // Changing the rate also alters the pitch
- * let playbackRate = map(mouseY, 0.1, height, 2, 0);
- * playbackRate = constrain(playbackRate, 0.01, 4);
- * mySound.rate(playbackRate);
- *
- * line(0, mouseY, width, mouseY);
- * text('rate: ' + round(playbackRate * 100) + '%', 10, 20);
- * }
- *
- *
- *
- *
- */
-
-
- p5.SoundFile.prototype.rate = function (playbackRate) {
- var reverse = false;
-
- if (typeof playbackRate === 'undefined') {
- return this.playbackRate;
- }
-
- this.playbackRate = playbackRate;
-
- if (playbackRate === 0) {
- playbackRate = 0.0000000000001;
- } else if (playbackRate < 0 && !this.reversed) {
- playbackRate = Math.abs(playbackRate);
- reverse = true;
- } else if (playbackRate > 0 && this.reversed) {
- reverse = true;
- }
-
- if (this.bufferSourceNode) {
- var now = p5sound.audiocontext.currentTime;
- this.bufferSourceNode.playbackRate.cancelScheduledValues(now);
- this.bufferSourceNode.playbackRate.linearRampToValueAtTime(Math.abs(playbackRate), now);
-
- this._counterNode.playbackRate.cancelScheduledValues(now);
-
- this._counterNode.playbackRate.linearRampToValueAtTime(Math.abs(playbackRate), now);
- }
-
- if (reverse) {
- this.reverseBuffer();
- }
-
- return this.playbackRate;
- };
-
-
- p5.SoundFile.prototype.setPitch = function (num) {
- var newPlaybackRate = midiToFreq(num) / midiToFreq(60);
- this.rate(newPlaybackRate);
- };
-
- p5.SoundFile.prototype.getPlaybackRate = function () {
- return this.playbackRate;
- };
- /**
- * Returns the duration of a sound file in seconds.
- *
- * @method duration
- * @for p5.SoundFile
- * @return {Number} The duration of the soundFile in seconds.
- */
-
-
- p5.SoundFile.prototype.duration = function () {
- if (this.buffer) {
- return this.buffer.duration;
- } else {
- return 0;
- }
- };
- /**
- * Return the current position of the p5.SoundFile playhead, in seconds.
- * Time is relative to the normal buffer direction, so if `reverseBuffer`
- * has been called, currentTime will count backwards.
- *
- * @method currentTime
- * @for p5.SoundFile
- * @return {Number} currentTime of the soundFile in seconds.
- */
-
-
- p5.SoundFile.prototype.currentTime = function () {
- return this.reversed ? Math.abs(this._lastPos - this.buffer.length) / ac.sampleRate : this._lastPos / ac.sampleRate;
- };
- /**
- * Move the playhead of a soundfile that is currently playing to a
- * new position and a new duration, in seconds.
- * If none are given, will reset the file to play entire duration
- * from start to finish. To set the position of a soundfile that is
- * not currently playing, use the `play` or `loop` methods.
- *
- * @method jump
- * @for p5.SoundFile
- * @param {Number} cueTime cueTime of the soundFile in seconds.
- * @param {Number} duration duration in seconds.
- */
-
-
- p5.SoundFile.prototype.jump = function (cueTime, duration) {
- if (cueTime < 0 || cueTime > this.buffer.duration) {
- throw 'jump time out of range';
- }
-
- if (duration > this.buffer.duration - cueTime) {
- throw 'end time out of range';
- }
-
- var cTime = cueTime || 0;
- var dur = duration || undefined;
-
- if (this.isPlaying()) {
- this.stop(0);
- this.play(0, this.playbackRate, this.output.gain.value, cTime, dur);
- }
- };
- /**
- * Return the number of channels in a sound file.
- * For example, Mono = 1, Stereo = 2.
- *
- * @method channels
- * @for p5.SoundFile
- * @return {Number} [channels]
- */
-
-
- p5.SoundFile.prototype.channels = function () {
- return this.buffer.numberOfChannels;
- };
- /**
- * Return the sample rate of the sound file.
- *
- * @method sampleRate
- * @for p5.SoundFile
- * @return {Number} [sampleRate]
- */
-
-
- p5.SoundFile.prototype.sampleRate = function () {
- return this.buffer.sampleRate;
- };
- /**
- * Return the number of samples in a sound file.
- * Equal to sampleRate * duration.
- *
- * @method frames
- * @for p5.SoundFile
- * @return {Number} [sampleCount]
- */
-
-
- p5.SoundFile.prototype.frames = function () {
- return this.buffer.length;
- };
- /**
- * Returns an array of amplitude peaks in a p5.SoundFile that can be
- * used to draw a static waveform. Scans through the p5.SoundFile's
- * audio buffer to find the greatest amplitudes. Accepts one
- * parameter, 'length', which determines size of the array.
- * Larger arrays result in more precise waveform visualizations.
- *
- * Inspired by Wavesurfer.js.
- *
- * @method getPeaks
- * @for p5.SoundFile
- * @params {Number} [length] length is the size of the returned array.
- * Larger length results in more precision.
- * Defaults to 5*width of the browser window.
- * @returns {Float32Array} Array of peaks.
- */
-
-
- p5.SoundFile.prototype.getPeaks = function (length) {
- if (this.buffer) {
- if (!length) {
- length = window.width * 5;
- }
-
- if (this.buffer) {
- var buffer = this.buffer;
- var sampleSize = buffer.length / length;
- var sampleStep = ~~(sampleSize / 10) || 1;
- var channels = buffer.numberOfChannels;
- var peaks = new Float32Array(Math.round(length));
-
- for (var c = 0; c < channels; c++) {
- var chan = buffer.getChannelData(c);
-
- for (var i = 0; i < length; i++) {
- var start = ~~(i * sampleSize);
- var end = ~~(start + sampleSize);
- var max = 0;
-
- for (var j = start; j < end; j += sampleStep) {
- var value = chan[j];
-
- if (value > max) {
- max = value;
- } else if (-value > max) {
- max = value;
- }
- }
-
- if (c === 0 || Math.abs(max) > peaks[i]) {
- peaks[i] = max;
- }
- }
- }
-
- return peaks;
- }
- } else {
- throw 'Cannot load peaks yet, buffer is not loaded';
- }
- };
- /**
- * Reverses the p5.SoundFile's buffer source.
- * Playback must be handled separately (see example).
- *
- * @method reverseBuffer
- * @for p5.SoundFile
- * @example
- *
- */
-
-
- p5.SoundFile.prototype.addCue = function (time, callback, val) {
- var id = this._cueIDCounter++;
- var cue = new Cue(callback, time, id, val);
-
- this._cues.push(cue);
-
-
- return id;
- };
- /**
- * Remove a callback based on its ID. The ID is returned by the
- * addCue method.
- *
- * @method removeCue
- * @for p5.SoundFile
- * @param {Number} id ID of the cue, as returned by addCue
- */
-
-
- p5.SoundFile.prototype.removeCue = function (id) {
- var cueLength = this._cues.length;
-
- for (var i = 0; i < cueLength; i++) {
- var cue = this._cues[i];
-
- if (cue.id === id) {
- this._cues.splice(i, 1);
-
- break;
- }
- }
-
- if (this._cues.length === 0) {
- }
- };
- /**
- * Remove all of the callbacks that had originally been scheduled
- * via the addCue method.
- *
- * @method clearCues
- */
-
-
- p5.SoundFile.prototype.clearCues = function () {
- this._cues = [];
- };
-
-
- p5.SoundFile.prototype._onTimeUpdate = function (position) {
- var playbackTime = position / this.buffer.sampleRate;
- var cueLength = this._cues.length;
-
- for (var i = 0; i < cueLength; i++) {
- var cue = this._cues[i];
- var callbackTime = cue.time;
- var val = cue.val;
-
- if (~~this._prevUpdateTime <= callbackTime && callbackTime <= playbackTime) {
- cue.callback(val);
- }
- }
-
- this._prevUpdateTime = playbackTime;
- };
- /**
- * Save a p5.SoundFile as a .wav file. The browser will prompt the user
- * to download the file to their device. To upload a file to a server, see
- * getBlob
- *
- * @method save
- * @for p5.SoundFile
- * @param {String} [fileName] name of the resulting .wav file.
- * @example
- *
- */
-
-
- p5.SoundFile.prototype.save = function (fileName) {
- p5.prototype.saveSound(this, fileName, 'wav');
- };
- /**
- * This method is useful for sending a SoundFile to a server. It returns the
- * .wav-encoded audio data as a "Blob".
- * A Blob is a file-like data object that can be uploaded to a server
- * with an http request. We'll
- * use the `httpDo` options object to send a POST request with some
- * specific options: we encode the request as `multipart/form-data`,
- * and attach the blob as one of the form values using `FormData`.
- *
- *
- * @method getBlob
- * @for p5.SoundFile
- * @returns {Blob} A file-like data object
- * @example
- *
- * function preload() {
- * mySound = loadSound('assets/doorbell.mp3');
- * }
- *
- * function setup() {
- * noCanvas();
- * let soundBlob = mySound.getBlob();
- *
- * // Now we can send the blob to a server...
- * let serverUrl = 'https://jsonplaceholder.typicode.com/posts';
- * let httpRequestOptions = {
- * method: 'POST',
- * body: new FormData().append('soundBlob', soundBlob),
- * headers: new Headers({
- * 'Content-Type': 'multipart/form-data'
- * })
- * };
- * httpDo(serverUrl, httpRequestOptions);
- *
- * // We can also create an `ObjectURL` pointing to the Blob
- * let blobUrl = URL.createObjectURL(soundBlob);
- *
- * // The `
- */
-
-
- p5.SoundFile.prototype.getBlob = function () {
- var dataView = convertToWav(this.buffer);
- return new Blob([dataView], {
- type: 'audio/wav'
- });
- };
-
-
- function _clearOnEnd(e) {
- var thisBufferSourceNode = e.target;
- var soundFile = this;
-
- thisBufferSourceNode._playing = false;
- thisBufferSourceNode.removeEventListener('ended', soundFile._clearOnEnd);
-
- soundFile._onended(soundFile);
-
-
- soundFile.bufferSourceNodes.map(function (_, i) {
- return i;
- }).reverse().forEach(function (i) {
- var n = soundFile.bufferSourceNodes[i];
-
- if (n._playing === false) {
- soundFile.bufferSourceNodes.splice(i, 1);
- }
- });
-
- if (soundFile.bufferSourceNodes.length === 0) {
- soundFile._playing = false;
- }
- }
-}).call(exports, __webpack_require__, exports, module),
- __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
-
- }),
- (function(module, exports, __webpack_require__) {
-
-"use strict";
-var __WEBPACK_AMD_DEFINE_RESULT__;
-
-!(__WEBPACK_AMD_DEFINE_RESULT__ = (function (require) {
- var p5sound = __webpack_require__(1);
-
- var _require = __webpack_require__(5),
- safeBufferSize = _require.safeBufferSize;
-
- var processorNames = __webpack_require__(10);
- /**
- * Amplitude measures volume between 0.0 and 1.0.
- * Listens to all p5sound by default, or use setInput()
- * to listen to a specific sound source. Accepts an optional
- * smoothing value, which defaults to 0.
- *
- * @class p5.Amplitude
- * @constructor
- * @param {Number} [smoothing] between 0.0 and .999 to smooth
- * amplitude readings (defaults to 0)
- * @example
- *
- */
-
-
- p5.Amplitude.prototype.getLevel = function (channel) {
- if (typeof channel !== 'undefined') {
- if (this.normalize) {
- return this.stereoVolNorm[channel];
- } else {
- return this.stereoVol[channel];
- }
- } else if (this.normalize) {
- return this.volNorm;
- } else {
- return this.volume;
- }
- };
- /**
- * Determines whether the results of Amplitude.process() will be
- * Normalized. To normalize, Amplitude finds the difference the
- * loudest reading it has processed and the maximum amplitude of
- * 1.0. Amplitude adds this difference to all values to produce
- * results that will reliably map between 0.0 and 1.0. However,
- * if a louder moment occurs, the amount that Normalize adds to
- * all the values will change. Accepts an optional boolean parameter
- * (true or false). Normalizing is off by default.
- *
- * @method toggleNormalize
- * @for p5.Amplitude
- * @param {boolean} [boolean] set normalize to true (1) or false (0)
- */
-
-
- p5.Amplitude.prototype.toggleNormalize = function (bool) {
- if (typeof bool === 'boolean') {
- this.normalize = bool;
- } else {
- this.normalize = !this.normalize;
- }
-
- this._workletNode.port.postMessage({
- name: 'toggleNormalize',
- normalize: this.normalize
- });
- };
- /**
- * Smooth Amplitude analysis by averaging with the last analysis
- * frame. Off by default.
- *
- * @method smooth
- * @for p5.Amplitude
- * @param {Number} set smoothing from 0.0 <= 1
- */
-
-
- p5.Amplitude.prototype.smooth = function (s) {
- if (s >= 0 && s < 1) {
- this._workletNode.port.postMessage({
- name: 'smoothing',
- smoothing: s
- });
- } else {
- console.log('Error: smoothing must be between 0 and 1');
- }
- };
-
- p5.Amplitude.prototype.dispose = function () {
- var index = p5sound.soundArray.indexOf(this);
- p5sound.soundArray.splice(index, 1);
-
- if (this.input) {
- this.input.disconnect();
- delete this.input;
- }
-
- if (this.output) {
- this.output.disconnect();
- delete this.output;
- }
-
- this._workletNode.disconnect();
-
- delete this._workletNode;
- };
-}).call(exports, __webpack_require__, exports, module),
- __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
-
- }),
- (function(module, exports, __webpack_require__) {
-
-"use strict";
-var __WEBPACK_AMD_DEFINE_RESULT__;
-
-!(__WEBPACK_AMD_DEFINE_RESULT__ = (function (require) {
- var p5sound = __webpack_require__(1);
-
- var _require = __webpack_require__(5),
- safeBins = _require.safeBins;
- /**
- *
FFT (Fast Fourier Transform) is an analysis algorithm that
- * isolates individual
- *
- * audio frequencies within a waveform.
- *
- *
Once instantiated, a p5.FFT object can return an array based on
- * two types of analyses: • FFT.waveform() computes
- * amplitude values along the time domain. The array indices correspond
- * to samples across a brief moment in time. Each value represents
- * amplitude of the waveform at that sample of time.
- * • FFT.analyze() computes amplitude values along the
- * frequency domain. The array indices correspond to frequencies (i.e.
- * pitches), from the lowest to the highest that humans can hear. Each
- * value represents amplitude at that slice of the frequency spectrum.
- * Use with getEnergy() to measure amplitude at specific
- * frequencies, or within a range of frequencies.
- *
- *
FFT analyzes a very short snapshot of sound called a sample
- * buffer. It returns an array of amplitude measurements, referred
- * to as bins. The array is 1024 bins long by default.
- * You can change the bin array length, but it must be a power of 2
- * between 16 and 1024 in order for the FFT algorithm to function
- * correctly. The actual size of the FFT buffer is twice the
- * number of bins, so given a standard sample rate, the buffer is
- * 2048/44100 seconds long.
- *
- *
- * @class p5.FFT
- * @constructor
- * @param {Number} [smoothing] Smooth results of Freq Spectrum.
- * 0.0 < smoothing < 1.0.
- * Defaults to 0.8.
- * @param {Number} [bins] Length of resulting array.
- * Must be a power of two between
- * 16 and 1024. Defaults to 1024.
- * @example
- *
- * function preload(){
- * sound = loadSound('assets/Damscray_DancingTiger.mp3');
- * }
- *
- * function setup(){
- * let cnv = createCanvas(100,100);
- * cnv.mouseClicked(togglePlay);
- * fft = new p5.FFT();
- * sound.amp(0.2);
- * }
- *
- * function draw(){
- * background(220);
- *
- * let spectrum = fft.analyze();
- * noStroke();
- * fill(255, 0, 255);
- * for (let i = 0; i< spectrum.length; i++){
- * let x = map(i, 0, spectrum.length, 0, width);
- * let h = -height + map(spectrum[i], 0, 255, height, 0);
- * rect(x, height, width / spectrum.length, h )
- * }
- *
- * let waveform = fft.waveform();
- * noFill();
- * beginShape();
- * stroke(20);
- * for (let i = 0; i < waveform.length; i++){
- * let x = map(i, 0, waveform.length, 0, width);
- * let y = map( waveform[i], -1, 1, 0, height);
- * vertex(x,y);
- * }
- * endShape();
- *
- * text('tap to play', 20, 20);
- * }
- *
- * function togglePlay() {
- * if (sound.isPlaying()) {
- * sound.pause();
- * } else {
- * sound.loop();
- * }
- * }
- *
- */
-
-
- p5.FFT = function (smoothing, bins) {
- this.input = this.analyser = p5sound.audiocontext.createAnalyser();
- Object.defineProperties(this, {
- bins: {
- get: function get() {
- return this.analyser.fftSize / 2;
- },
- set: function set(b) {
- this.analyser.fftSize = b * 2;
- },
- configurable: true,
- enumerable: true
- },
- smoothing: {
- get: function get() {
- return this.analyser.smoothingTimeConstant;
- },
- set: function set(s) {
- this.analyser.smoothingTimeConstant = s;
- },
- configurable: true,
- enumerable: true
- }
- });
-
- this.smooth(smoothing);
- this.bins = safeBins(bins) || 1024;
-
- p5sound.fftMeter.connect(this.analyser);
- this.freqDomain = new Uint8Array(this.analyser.frequencyBinCount);
- this.timeDomain = new Uint8Array(this.analyser.frequencyBinCount);
-
- this.bass = [20, 140];
- this.lowMid = [140, 400];
- this.mid = [400, 2600];
- this.highMid = [2600, 5200];
- this.treble = [5200, 14000];
-
- p5sound.soundArray.push(this);
- };
- /**
- * Set the input source for the FFT analysis. If no source is
- * provided, FFT will analyze all sound in the sketch.
- *
- * @method setInput
- * @for p5.FFT
- * @param {Object} [source] p5.sound object (or web audio API source node)
- */
-
-
- p5.FFT.prototype.setInput = function (source) {
- if (!source) {
- p5sound.fftMeter.connect(this.analyser);
- } else {
- if (source.output) {
- source.output.connect(this.analyser);
- } else if (source.connect) {
- source.connect(this.analyser);
- }
-
- p5sound.fftMeter.disconnect();
- }
- };
- /**
- * Returns an array of amplitude values (between -1.0 and +1.0) that represent
- * a snapshot of amplitude readings in a single buffer. Length will be
- * equal to bins (defaults to 1024). Can be used to draw the waveform
- * of a sound.
- *
- * @method waveform
- * @for p5.FFT
- * @param {Number} [bins] Must be a power of two between
- * 16 and 1024. Defaults to 1024.
- * @param {String} [precision] If any value is provided, will return results
- * in a Float32 Array which is more precise
- * than a regular array.
- * @return {Array} Array Array of amplitude values (-1 to 1)
- * over time. Array length = bins.
- *
- */
-
-
- p5.FFT.prototype.waveform = function () {
- var bins, mode, normalArray;
-
- for (var i = 0; i < arguments.length; i++) {
- if (typeof arguments[i] === 'number') {
- bins = arguments[i];
- this.analyser.fftSize = bins * 2;
- }
-
- if (typeof arguments[i] === 'string') {
- mode = arguments[i];
- }
- }
-
-
- if (mode && !p5.prototype._isSafari()) {
- timeToFloat(this, this.timeDomain);
- this.analyser.getFloatTimeDomainData(this.timeDomain);
- return this.timeDomain;
- } else {
- timeToInt(this, this.timeDomain);
- this.analyser.getByteTimeDomainData(this.timeDomain);
- var normalArray = new Array();
-
- for (var j = 0; j < this.timeDomain.length; j++) {
- var scaled = p5.prototype.map(this.timeDomain[j], 0, 255, -1, 1);
- normalArray.push(scaled);
- }
-
- return normalArray;
- }
- };
- /**
- * Returns an array of amplitude values (between 0 and 255)
- * across the frequency spectrum. Length is equal to FFT bins
- * (1024 by default). The array indices correspond to frequencies
- * (i.e. pitches), from the lowest to the highest that humans can
- * hear. Each value represents amplitude at that slice of the
- * frequency spectrum. Must be called prior to using
- * getEnergy().
- *
- * @method analyze
- * @for p5.FFT
- * @param {Number} [bins] Must be a power of two between
- * 16 and 1024. Defaults to 1024.
- * @param {Number} [scale] If "dB," returns decibel
- * float measurements between
- * -140 and 0 (max).
- * Otherwise returns integers from 0-255.
- * @return {Array} spectrum Array of energy (amplitude/volume)
- * values across the frequency spectrum.
- * Lowest energy (silence) = 0, highest
- * possible is 255.
- * @example
- *
- * let osc, fft;
- *
- * function setup(){
- * let cnv = createCanvas(100,100);
- * cnv.mousePressed(startSound);
- * osc = new p5.Oscillator();
- * osc.amp(0);
- * fft = new p5.FFT();
- * }
- *
- * function draw(){
- * background(220);
- *
- * let freq = map(mouseX, 0, windowWidth, 20, 10000);
- * freq = constrain(freq, 1, 20000);
- * osc.freq(freq);
- *
- * let spectrum = fft.analyze();
- * noStroke();
- * fill(255, 0, 255);
- * for (let i = 0; i< spectrum.length; i++){
- * let x = map(i, 0, spectrum.length, 0, width);
- * let h = -height + map(spectrum[i], 0, 255, height, 0);
- * rect(x, height, width / spectrum.length, h );
- * }
- *
- * stroke(255);
- * if (!osc.started) {
- * text('tap here and drag to change frequency', 10, 20, width - 20);
- * } else {
- * text(round(freq)+'Hz', 10, 20);
- * }
- * }
- *
- * function startSound() {
- * osc.start();
- * osc.amp(0.5, 0.2);
- * }
- *
- * function mouseReleased() {
- * osc.amp(0, 0.2);
- * }
- *
- *
- *
- */
-
-
- p5.FFT.prototype.analyze = function () {
- var mode;
-
- for (var i = 0; i < arguments.length; i++) {
- if (typeof arguments[i] === 'number') {
- this.bins = arguments[i];
- this.analyser.fftSize = this.bins * 2;
- }
-
- if (typeof arguments[i] === 'string') {
- mode = arguments[i];
- }
- }
-
- if (mode && mode.toLowerCase() === 'db') {
- freqToFloat(this);
- this.analyser.getFloatFrequencyData(this.freqDomain);
- return this.freqDomain;
- } else {
- freqToInt(this, this.freqDomain);
- this.analyser.getByteFrequencyData(this.freqDomain);
- var normalArray = Array.apply([], this.freqDomain);
- return normalArray;
- }
- };
- /**
- * Returns the amount of energy (volume) at a specific
- *
- * frequency, or the average amount of energy between two
- * frequencies. Accepts Number(s) corresponding
- * to frequency (in Hz), or a String corresponding to predefined
- * frequency ranges ("bass", "lowMid", "mid", "highMid", "treble").
- * Returns a range between 0 (no energy/volume at that frequency) and
- * 255 (maximum energy).
- * NOTE: analyze() must be called prior to getEnergy(). Analyze()
- * tells the FFT to analyze frequency data, and getEnergy() uses
- * the results determine the value at a specific frequency or
- * range of frequencies.
- *
- * @method getEnergy
- * @for p5.FFT
- * @param {Number|String} frequency1 Will return a value representing
- * energy at this frequency. Alternately,
- * the strings "bass", "lowMid" "mid",
- * "highMid", and "treble" will return
- * predefined frequency ranges.
- * @param {Number} [frequency2] If a second frequency is given,
- * will return average amount of
- * energy that exists between the
- * two frequencies.
- * @return {Number} Energy Energy (volume/amplitude) from
- * 0 and 255.
- *
- */
-
-
- p5.FFT.prototype.getEnergy = function (frequency1, frequency2) {
- var nyquist = p5sound.audiocontext.sampleRate / 2;
-
- if (frequency1 === 'bass') {
- frequency1 = this.bass[0];
- frequency2 = this.bass[1];
- } else if (frequency1 === 'lowMid') {
- frequency1 = this.lowMid[0];
- frequency2 = this.lowMid[1];
- } else if (frequency1 === 'mid') {
- frequency1 = this.mid[0];
- frequency2 = this.mid[1];
- } else if (frequency1 === 'highMid') {
- frequency1 = this.highMid[0];
- frequency2 = this.highMid[1];
- } else if (frequency1 === 'treble') {
- frequency1 = this.treble[0];
- frequency2 = this.treble[1];
- }
-
- if (typeof frequency1 !== 'number') {
- throw 'invalid input for getEnergy()';
- } else if (!frequency2) {
- var index = Math.round(frequency1 / nyquist * this.freqDomain.length);
- return this.freqDomain[index];
- } else if (frequency1 && frequency2) {
- if (frequency1 > frequency2) {
- var swap = frequency2;
- frequency2 = frequency1;
- frequency1 = swap;
- }
-
- var lowIndex = Math.round(frequency1 / nyquist * this.freqDomain.length);
- var highIndex = Math.round(frequency2 / nyquist * this.freqDomain.length);
- var total = 0;
- var numFrequencies = 0;
-
- for (var i = lowIndex; i <= highIndex; i++) {
- total += this.freqDomain[i];
- numFrequencies += 1;
- }
-
-
- var toReturn = total / numFrequencies;
- return toReturn;
- } else {
- throw 'invalid input for getEnergy()';
- }
- };
-
-
- p5.FFT.prototype.getFreq = function (freq1, freq2) {
- console.log('getFreq() is deprecated. Please use getEnergy() instead.');
- var x = this.getEnergy(freq1, freq2);
- return x;
- };
- /**
- * Returns the
- *
- * spectral centroid of the input signal.
- * NOTE: analyze() must be called prior to getCentroid(). Analyze()
- * tells the FFT to analyze frequency data, and getCentroid() uses
- * the results determine the spectral centroid.
- *
- * @method getCentroid
- * @for p5.FFT
- * @return {Number} Spectral Centroid Frequency of the spectral centroid in Hz.
- *
- *
- * @example
- *
- * function setup(){
- * cnv = createCanvas(100,100);
- * cnv.mousePressed(userStartAudio);
- * sound = new p5.AudioIn();
- * sound.start();
- * fft = new p5.FFT();
- * sound.connect(fft);
- *}
- *
- *function draw() {
- * if (getAudioContext().state !== 'running') {
- * background(220);
- * text('tap here and enable mic to begin', 10, 20, width - 20);
- * return;
- * }
- * let centroidplot = 0.0;
- * let spectralCentroid = 0;
- *
- * background(0);
- * stroke(0,255,0);
- * let spectrum = fft.analyze();
- * fill(0,255,0); // spectrum is green
- *
- * //draw the spectrum
- * for (let i = 0; i < spectrum.length; i++){
- * let x = map(log(i), 0, log(spectrum.length), 0, width);
- * let h = map(spectrum[i], 0, 255, 0, height);
- * let rectangle_width = (log(i+1)-log(i))*(width/log(spectrum.length));
- * rect(x, height, rectangle_width, -h )
- * }
- * let nyquist = 22050;
- *
- * // get the centroid
- * spectralCentroid = fft.getCentroid();
- *
- * // the mean_freq_index calculation is for the display.
- * let mean_freq_index = spectralCentroid/(nyquist/spectrum.length);
- *
- * centroidplot = map(log(mean_freq_index), 0, log(spectrum.length), 0, width);
- *
- * stroke(255,0,0); // the line showing where the centroid is will be red
- *
- * rect(centroidplot, 0, width / spectrum.length, height)
- * noStroke();
- * fill(255,255,255); // text is white
- * text('centroid: ', 10, 20);
- * text(round(spectralCentroid)+' Hz', 10, 40);
- *}
- *
- */
-
-
- p5.FFT.prototype.getCentroid = function () {
- var nyquist = p5sound.audiocontext.sampleRate / 2;
- var cumulative_sum = 0;
- var centroid_normalization = 0;
-
- for (var i = 0; i < this.freqDomain.length; i++) {
- cumulative_sum += i * this.freqDomain[i];
- centroid_normalization += this.freqDomain[i];
- }
-
- var mean_freq_index = 0;
-
- if (centroid_normalization !== 0) {
- mean_freq_index = cumulative_sum / centroid_normalization;
- }
-
- var spec_centroid_freq = mean_freq_index * (nyquist / this.freqDomain.length);
- return spec_centroid_freq;
- };
- /**
- * Smooth FFT analysis by averaging with the last analysis frame.
- *
- * @method smooth
- * @param {Number} smoothing 0.0 < smoothing < 1.0.
- * Defaults to 0.8.
- */
-
-
- p5.FFT.prototype.smooth = function (s) {
- if (typeof s !== 'undefined') {
- this.smoothing = s;
- }
-
- return this.smoothing;
- };
-
- p5.FFT.prototype.dispose = function () {
- var index = p5sound.soundArray.indexOf(this);
- p5sound.soundArray.splice(index, 1);
-
- if (this.analyser) {
- this.analyser.disconnect();
- delete this.analyser;
- }
- };
- /**
- * Returns an array of average amplitude values for a given number
- * of frequency bands split equally. N defaults to 16.
- * NOTE: analyze() must be called prior to linAverages(). Analyze()
- * tells the FFT to analyze frequency data, and linAverages() uses
- * the results to group them into a smaller set of averages.
- *
- * @method linAverages
- * @for p5.FFT
- * @param {Number} N Number of returned frequency groups
- * @return {Array} linearAverages Array of average amplitude values for each group
- */
-
-
- p5.FFT.prototype.linAverages = function (N) {
- var N = N || 16;
-
- var spectrum = this.freqDomain;
- var spectrumLength = spectrum.length;
- var spectrumStep = Math.floor(spectrumLength / N);
- var linearAverages = new Array(N);
-
- var groupIndex = 0;
-
- for (var specIndex = 0; specIndex < spectrumLength; specIndex++) {
- linearAverages[groupIndex] = linearAverages[groupIndex] !== undefined ? (linearAverages[groupIndex] + spectrum[specIndex]) / 2 : spectrum[specIndex];
-
- if (specIndex % spectrumStep === spectrumStep - 1) {
- groupIndex++;
- }
- }
-
- return linearAverages;
- };
- /**
- * Returns an array of average amplitude values of the spectrum, for a given
- * set of
- * Octave Bands
- * NOTE: analyze() must be called prior to logAverages(). Analyze()
- * tells the FFT to analyze frequency data, and logAverages() uses
- * the results to group them into a smaller set of averages.
- *
- * @method logAverages
- * @for p5.FFT
- * @param {Array} octaveBands Array of Octave Bands objects for grouping
- * @return {Array} logAverages Array of average amplitude values for each group
- */
-
-
- p5.FFT.prototype.logAverages = function (octaveBands) {
- var nyquist = p5sound.audiocontext.sampleRate / 2;
- var spectrum = this.freqDomain;
- var spectrumLength = spectrum.length;
- var logAverages = new Array(octaveBands.length);
-
- var octaveIndex = 0;
-
- for (var specIndex = 0; specIndex < spectrumLength; specIndex++) {
- var specIndexFrequency = Math.round(specIndex * nyquist / this.freqDomain.length);
-
- if (specIndexFrequency > octaveBands[octaveIndex].hi) {
- octaveIndex++;
- }
-
- logAverages[octaveIndex] = logAverages[octaveIndex] !== undefined ? (logAverages[octaveIndex] + spectrum[specIndex]) / 2 : spectrum[specIndex];
- }
-
- return logAverages;
- };
- /**
- * Calculates and Returns the 1/N
- * Octave Bands
- * N defaults to 3 and minimum central frequency to 15.625Hz.
- * (1/3 Octave Bands ~= 31 Frequency Bands)
- * Setting fCtr0 to a central value of a higher octave will ignore the lower bands
- * and produce less frequency groups.
- *
- * @method getOctaveBands
- * @for p5.FFT
- * @param {Number} N Specifies the 1/N type of generated octave bands
- * @param {Number} fCtr0 Minimum central frequency for the lowest band
- * @return {Array} octaveBands Array of octave band objects with their bounds
- */
-
-
- p5.FFT.prototype.getOctaveBands = function (N, fCtr0) {
- var N = N || 3;
-
- var fCtr0 = fCtr0 || 15.625;
-
- var octaveBands = [];
- var lastFrequencyBand = {
- lo: fCtr0 / Math.pow(2, 1 / (2 * N)),
- ctr: fCtr0,
- hi: fCtr0 * Math.pow(2, 1 / (2 * N))
- };
- octaveBands.push(lastFrequencyBand);
- var nyquist = p5sound.audiocontext.sampleRate / 2;
-
- while (lastFrequencyBand.hi < nyquist) {
- var newFrequencyBand = {};
- newFrequencyBand.lo = lastFrequencyBand.hi;
- newFrequencyBand.ctr = lastFrequencyBand.ctr * Math.pow(2, 1 / N);
- newFrequencyBand.hi = newFrequencyBand.ctr * Math.pow(2, 1 / (2 * N));
- octaveBands.push(newFrequencyBand);
- lastFrequencyBand = newFrequencyBand;
- }
-
- return octaveBands;
- };
-
-
- var freqToFloat = function freqToFloat(fft) {
- if (fft.freqDomain instanceof Float32Array === false) {
- fft.freqDomain = new Float32Array(fft.analyser.frequencyBinCount);
- }
- };
-
- var freqToInt = function freqToInt(fft) {
- if (fft.freqDomain instanceof Uint8Array === false) {
- fft.freqDomain = new Uint8Array(fft.analyser.frequencyBinCount);
- }
- };
-
- var timeToFloat = function timeToFloat(fft) {
- if (fft.timeDomain instanceof Float32Array === false) {
- fft.timeDomain = new Float32Array(fft.analyser.frequencyBinCount);
- }
- };
-
- var timeToInt = function timeToInt(fft) {
- if (fft.timeDomain instanceof Uint8Array === false) {
- fft.timeDomain = new Uint8Array(fft.analyser.frequencyBinCount);
- }
- };
-}).call(exports, __webpack_require__, exports, module),
- __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
-
- }),
- (function(module, exports, __webpack_require__) {
-
-"use strict";
-var __WEBPACK_AMD_DEFINE_RESULT__;
-
-!(__WEBPACK_AMD_DEFINE_RESULT__ = (function (require) {
- var Signal = __webpack_require__(2);
-
- var Add = __webpack_require__(7);
-
- var Mult = __webpack_require__(3);
-
- var Scale = __webpack_require__(13);
- /**
- *
p5.Signal is a constant audio-rate signal used by p5.Oscillator
- * and p5.Envelope for modulation math.
- *
- *
This is necessary because Web Audio is processed on a seprate clock.
- * For example, the p5 draw loop runs about 60 times per second. But
- * the audio clock must process samples 44100 times per second. If we
- * want to add a value to each of those samples, we can't do it in the
- * draw loop, but we can do it by adding a constant-rate audio signal.This class mostly functions behind the scenes in p5.sound, and returns
- * a Tone.Signal from the Tone.js library by Yotam Mann.
- * If you want to work directly with audio signals for modular
- * synthesis, check out
- * tone.js.
- *
- * @class p5.Signal
- * @constructor
- * @return {Tone.Signal} A Signal object from the Tone.js library
- * @example
- *
- * let carrier, modulator;
- *
- * function setup() {
- * let cnv = createCanvas(100, 100);
- * cnv.mousePressed(canvasPressed);
- * background(220);
- * text('tap to play', 20, 20);
- *
- * carrier = new p5.Oscillator('sine');
- * carrier.start();
- * carrier.amp(1); // set amplitude
- * carrier.freq(220); // set frequency
- *
- * modulator = new p5.Oscillator('sawtooth');
- * modulator.disconnect();
- * modulator.start();
- * modulator.amp(1);
- * modulator.freq(4);
- *
- * // Modulator's default amplitude range is -1 to 1.
- * // Multiply it by -200, so the range is -200 to 200
- * // then add 220 so the range is 20 to 420
- * carrier.freq( modulator.mult(-400).add(220) );
- * }
- *
- * function canvasPressed() {
- * userStartAudio();
- * carrier.amp(1.0);
- * }
- *
- * function mouseReleased() {
- * carrier.amp(0);
- * }
- *
- */
-
-
- p5.Signal = function (value) {
- var s = new Signal(value);
-
- return s;
- };
- /**
- * Fade to value, for smooth transitions
- *
- * @method fade
- * @for p5.Signal
- * @param {Number} value Value to set this signal
- * @param {Number} [secondsFromNow] Length of fade, in seconds from now
- */
-
-
- Signal.prototype.fade = Signal.prototype.linearRampToValueAtTime;
- Mult.prototype.fade = Signal.prototype.fade;
- Add.prototype.fade = Signal.prototype.fade;
- Scale.prototype.fade = Signal.prototype.fade;
- /**
- * Connect a p5.sound object or Web Audio node to this
- * p5.Signal so that its amplitude values can be scaled.
- *
- * @method setInput
- * @for p5.Signal
- * @param {Object} input
- */
-
- Signal.prototype.setInput = function (_input) {
- _input.connect(this);
- };
-
- Mult.prototype.setInput = Signal.prototype.setInput;
- Add.prototype.setInput = Signal.prototype.setInput;
- Scale.prototype.setInput = Signal.prototype.setInput;
-
- /**
- * Add a constant value to this audio signal,
- * and return the resulting audio signal. Does
- * not change the value of the original signal,
- * instead it returns a new p5.SignalAdd.
- *
- * @method add
- * @for p5.Signal
- * @param {Number} number
- * @return {p5.Signal} object
- */
-
- Signal.prototype.add = function (num) {
- var add = new Add(num);
-
- this.connect(add);
- return add;
- };
-
- Mult.prototype.add = Signal.prototype.add;
- Add.prototype.add = Signal.prototype.add;
- Scale.prototype.add = Signal.prototype.add;
- /**
- * Multiply this signal by a constant value,
- * and return the resulting audio signal. Does
- * not change the value of the original signal,
- * instead it returns a new p5.SignalMult.
- *
- * @method mult
- * @for p5.Signal
- * @param {Number} number to multiply
- * @return {p5.Signal} object
- */
-
- Signal.prototype.mult = function (num) {
- var mult = new Mult(num);
-
- this.connect(mult);
- return mult;
- };
-
- Mult.prototype.mult = Signal.prototype.mult;
- Add.prototype.mult = Signal.prototype.mult;
- Scale.prototype.mult = Signal.prototype.mult;
- /**
- * Scale this signal value to a given range,
- * and return the result as an audio signal. Does
- * not change the value of the original signal,
- * instead it returns a new p5.SignalScale.
- *
- * @method scale
- * @for p5.Signal
- * @param {Number} number to multiply
- * @param {Number} inMin input range minumum
- * @param {Number} inMax input range maximum
- * @param {Number} outMin input range minumum
- * @param {Number} outMax input range maximum
- * @return {p5.Signal} object
- */
-
- Signal.prototype.scale = function (inMin, inMax, outMin, outMax) {
- var mapOutMin, mapOutMax;
-
- if (arguments.length === 4) {
- mapOutMin = p5.prototype.map(outMin, inMin, inMax, 0, 1) - 0.5;
- mapOutMax = p5.prototype.map(outMax, inMin, inMax, 0, 1) - 0.5;
- } else {
- mapOutMin = arguments[0];
- mapOutMax = arguments[1];
- }
-
- var scale = new Scale(mapOutMin, mapOutMax);
- this.connect(scale);
- return scale;
- };
-
- Mult.prototype.scale = Signal.prototype.scale;
- Add.prototype.scale = Signal.prototype.scale;
- Scale.prototype.scale = Signal.prototype.scale;
-}).call(exports, __webpack_require__, exports, module),
- __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
-
- }),
- (function(module, exports, __webpack_require__) {
-
-var __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;!(__WEBPACK_AMD_DEFINE_ARRAY__ = [__webpack_require__(0),__webpack_require__(21)], __WEBPACK_AMD_DEFINE_RESULT__ = (function(o){o.Frequency=function(e,t){if(!(this instanceof o.Frequency))return new o.Frequency(e,t);o.TimeBase.call(this,e,t)},o.extend(o.Frequency,o.TimeBase),o.Frequency.prototype._primaryExpressions=Object.create(o.TimeBase.prototype._primaryExpressions),o.Frequency.prototype._primaryExpressions.midi={regexp:/^(\d+(?:\.\d+)?midi)/,method:function(e){return this.midiToFrequency(e)}},o.Frequency.prototype._primaryExpressions.note={regexp:/^([a-g]{1}(?:b|#|x|bb)?)(-?[0-9]+)/i,method:function(e,t){var r=n[e.toLowerCase()]+12*(parseInt(t)+1);return this.midiToFrequency(r)}},o.Frequency.prototype._primaryExpressions.tr={regexp:/^(\d+(?:\.\d+)?):(\d+(?:\.\d+)?):?(\d+(?:\.\d+)?)?/,method:function(e,t,r){var n=1;return e&&"0"!==e&&(n*=this._beatsToUnits(this._timeSignature()*parseFloat(e))),t&&"0"!==t&&(n*=this._beatsToUnits(parseFloat(t))),r&&"0"!==r&&(n*=this._beatsToUnits(parseFloat(r)/4)),n}},o.Frequency.prototype.transpose=function(e){return this._expr=function(e,t){return e()*this.intervalToFrequencyRatio(t)}.bind(this,this._expr,e),this},o.Frequency.prototype.harmonize=function(e){return this._expr=function(e,t){for(var r=e(),n=[],o=0;oEnvelopes are pre-defined amplitude distribution over time.
- * Typically, envelopes are used to control the output volume
- * of an object, a series of fades referred to as Attack, Decay,
- * Sustain and Release (
- * ADSR
- * ). Envelopes can also control other Web Audio Parameters—for example, a p5.Envelope can
- * control an Oscillator's frequency like this: osc.freq(env).
- *
Use setRange to change the attack/release level.
- * Use setADSR to change attackTime, decayTime, sustainPercent and releaseTime.
- *
Use the play method to play the entire envelope,
- * the ramp method for a pingable trigger,
- * or triggerAttack/
- * triggerRelease to trigger noteOn/noteOff.
- * let t1 = 0.1; // attack time in seconds
- * let l1 = 0.7; // attack level 0.0 to 1.0
- * let t2 = 0.3; // decay time in seconds
- * let l2 = 0.1; // decay level 0.0 to 1.0
- *
- * let env;
- * let triOsc;
- *
- * function setup() {
- * let cnv = createCanvas(100, 100);
- * background(220);
- * text('tap to play', 20, 20);
- * cnv.mousePressed(playSound);
- *
- * env = new p5.Envelope(t1, l1, t2, l2);
- * triOsc = new p5.Oscillator('triangle');
- * }
- *
- * function playSound() {
- * // starting the oscillator ensures that audio is enabled.
- * triOsc.start();
- * env.play(triOsc);
- * }
- *
- */
-
-
- p5.Envelope = function (t1, l1, t2, l2, t3, l3) {
- /**
- * Time until envelope reaches attackLevel
- * @property attackTime
- */
- this.aTime = t1 || 0.1;
- /**
- * Level once attack is complete.
- * @property attackLevel
- */
-
- this.aLevel = l1 || 1;
- /**
- * Time until envelope reaches decayLevel.
- * @property decayTime
- */
-
- this.dTime = t2 || 0.5;
- /**
- * Level after decay. The envelope will sustain here until it is released.
- * @property decayLevel
- */
-
- this.dLevel = l2 || 0;
- /**
- * Duration of the release portion of the envelope.
- * @property releaseTime
- */
-
- this.rTime = t3 || 0;
- /**
- * Level at the end of the release.
- * @property releaseLevel
- */
-
- this.rLevel = l3 || 0;
- this._rampHighPercentage = 0.98;
- this._rampLowPercentage = 0.02;
- this.output = p5sound.audiocontext.createGain();
- this.control = new TimelineSignal();
-
- this._init();
-
-
- this.control.connect(this.output);
-
- this.connection = null;
-
- this.mathOps = [this.control];
-
- this.isExponential = false;
-
- this.sourceToClear = null;
-
- this.wasTriggered = false;
-
- p5sound.soundArray.push(this);
- };
-
-
- p5.Envelope.prototype._init = function () {
- var now = p5sound.audiocontext.currentTime;
- var t = now;
- this.control.setTargetAtTime(0.00001, t, .001);
-
- this._setRampAD(this.aTime, this.dTime);
- };
- /**
- * Reset the envelope with a series of time/value pairs.
- *
- * @method set
- * @for p5.Envelope
- * @param {Number} attackTime Time (in seconds) before level
- * reaches attackLevel
- * @param {Number} attackLevel Typically an amplitude between
- * 0.0 and 1.0
- * @param {Number} decayTime Time
- * @param {Number} decayLevel Amplitude (In a standard ADSR envelope,
- * decayLevel = sustainLevel)
- * @param {Number} releaseTime Release Time (in seconds)
- * @param {Number} releaseLevel Amplitude
- * @example
- *
- * let attackTime;
- * let l1 = 0.7; // attack level 0.0 to 1.0
- * let t2 = 0.3; // decay time in seconds
- * let l2 = 0.1; // decay level 0.0 to 1.0
- * let l3 = 0.2; // release time in seconds
- *
- * let env, triOsc;
- *
- * function setup() {
- * let cnv = createCanvas(100, 100);
- * cnv.mousePressed(playSound);
- *
- * env = new p5.Envelope();
- * triOsc = new p5.Oscillator('triangle');
- * }
- *
- * function draw() {
- * background(220);
- * text('tap here to play', 5, 20);
- *
- * attackTime = map(mouseX, 0, width, 0.0, 1.0);
- * text('attack time: ' + attackTime, 5, height - 20);
- * }
- *
- * // mouseClick triggers envelope if over canvas
- * function playSound() {
- * env.set(attackTime, l1, t2, l2, l3);
- *
- * triOsc.start();
- * env.play(triOsc);
- * }
- *
- *
- */
-
-
- p5.Envelope.prototype.set = function (t1, l1, t2, l2, t3, l3) {
- this.aTime = t1;
- this.aLevel = l1;
- this.dTime = t2 || 0;
- this.dLevel = l2 || 0;
- this.rTime = t3 || 0;
- this.rLevel = l3 || 0;
-
- this._setRampAD(t1, t2);
- };
- /**
- * Set values like a traditional
- *
- * ADSR envelope
- * .
- *
- * @method setADSR
- * @for p5.Envelope
- * @param {Number} attackTime Time (in seconds before envelope
- * reaches Attack Level
- * @param {Number} [decayTime] Time (in seconds) before envelope
- * reaches Decay/Sustain Level
- * @param {Number} [susRatio] Ratio between attackLevel and releaseLevel, on a scale from 0 to 1,
- * where 1.0 = attackLevel, 0.0 = releaseLevel.
- * The susRatio determines the decayLevel and the level at which the
- * sustain portion of the envelope will sustain.
- * For example, if attackLevel is 0.4, releaseLevel is 0,
- * and susAmt is 0.5, the decayLevel would be 0.2. If attackLevel is
- * increased to 1.0 (using setRange),
- * then decayLevel would increase proportionally, to become 0.5.
- * @param {Number} [releaseTime] Time in seconds from now (defaults to 0)
- * @example
- *
- * let attackLevel = 1.0;
- * let releaseLevel = 0;
- *
- * let attackTime = 0.001;
- * let decayTime = 0.2;
- * let susPercent = 0.2;
- * let releaseTime = 0.5;
- *
- * let env, triOsc;
- *
- * function setup() {
- * let cnv = createCanvas(100, 100);
- * cnv.mousePressed(playEnv);
- *
- * env = new p5.Envelope();
- * triOsc = new p5.Oscillator('triangle');
- * triOsc.amp(env);
- * triOsc.freq(220);
- * }
- *
- * function draw() {
- * background(220);
- * text('tap here to play', 5, 20);
- * attackTime = map(mouseX, 0, width, 0, 1.0);
- * text('attack time: ' + attackTime, 5, height - 40);
- * }
- *
- * function playEnv() {
- * triOsc.start();
- * env.setADSR(attackTime, decayTime, susPercent, releaseTime);
- * env.play();
- * }
- *
- * let attackLevel = 1.0;
- * let releaseLevel = 0;
- *
- * let attackTime = 0.001;
- * let decayTime = 0.2;
- * let susPercent = 0.2;
- * let releaseTime = 0.5;
- *
- * let env, triOsc;
- *
- * function setup() {
- * let cnv = createCanvas(100, 100);
- * cnv.mousePressed(playEnv);
- *
- * env = new p5.Envelope();
- * triOsc = new p5.Oscillator('triangle');
- * triOsc.amp(env);
- * triOsc.freq(220);
- * }
- *
- * function draw() {
- * background(220);
- * text('tap here to play', 5, 20);
- * attackLevel = map(mouseY, height, 0, 0, 1.0);
- * text('attack level: ' + attackLevel, 5, height - 20);
- * }
- *
- * function playEnv() {
- * triOsc.start();
- * env.setRange(attackLevel, releaseLevel);
- * env.play();
- * }
- *
- */
-
-
- p5.Envelope.prototype.setRange = function (aLevel, rLevel) {
- this.aLevel = aLevel || 1;
- this.rLevel = rLevel || 0;
- };
-
-
- p5.Envelope.prototype._setRampAD = function (t1, t2) {
- this._rampAttackTime = this.checkExpInput(t1);
- this._rampDecayTime = this.checkExpInput(t2);
- var TCDenominator = 1.0;
-
- TCDenominator = Math.log(1.0 / this.checkExpInput(1.0 - this._rampHighPercentage));
- this._rampAttackTC = t1 / this.checkExpInput(TCDenominator);
- TCDenominator = Math.log(1.0 / this._rampLowPercentage);
- this._rampDecayTC = t2 / this.checkExpInput(TCDenominator);
- };
-
-
- p5.Envelope.prototype.setRampPercentages = function (p1, p2) {
- this._rampHighPercentage = this.checkExpInput(p1);
- this._rampLowPercentage = this.checkExpInput(p2);
- var TCDenominator = 1.0;
-
- TCDenominator = Math.log(1.0 / this.checkExpInput(1.0 - this._rampHighPercentage));
- this._rampAttackTC = this._rampAttackTime / this.checkExpInput(TCDenominator);
- TCDenominator = Math.log(1.0 / this._rampLowPercentage);
- this._rampDecayTC = this._rampDecayTime / this.checkExpInput(TCDenominator);
- };
- /**
- * Assign a parameter to be controlled by this envelope.
- * If a p5.Sound object is given, then the p5.Envelope will control its
- * output gain. If multiple inputs are provided, the env will
- * control all of them.
- *
- * @method setInput
- * @for p5.Envelope
- * @param {Object} [...inputs] A p5.sound object or
- * Web Audio Param.
- */
-
-
- p5.Envelope.prototype.setInput = function () {
- for (var i = 0; i < arguments.length; i++) {
- this.connect(arguments[i]);
- }
- };
- /**
- * Set whether the envelope ramp is linear (default) or exponential.
- * Exponential ramps can be useful because we perceive amplitude
- * and frequency logarithmically.
- *
- * @method setExp
- * @for p5.Envelope
- * @param {Boolean} isExp true is exponential, false is linear
- */
-
-
- p5.Envelope.prototype.setExp = function (isExp) {
- this.isExponential = isExp;
- };
-
-
- p5.Envelope.prototype.checkExpInput = function (value) {
- if (value <= 0) {
- value = 0.00000001;
- }
-
- return value;
- };
- /**
- *
Play tells the envelope to start acting on a given input.
- * If the input is a p5.sound object (i.e. AudioIn, Oscillator,
- * SoundFile), then Envelope will control its output volume.
- * Envelopes can also be used to control any
- * Web Audio Audio Param.
- *
- * @method play
- * @for p5.Envelope
- * @param {Object} unit A p5.sound object or
- * Web Audio Param.
- * @param {Number} [startTime] time from now (in seconds) at which to play
- * @param {Number} [sustainTime] time to sustain before releasing the envelope
- * @example
- *
- */
-
-
- p5.Envelope.prototype.play = function (unit, secondsFromNow, susTime) {
- var tFromNow = secondsFromNow || 0;
- var susTime = susTime || 0;
-
- if (unit) {
- if (this.connection !== unit) {
- this.connect(unit);
- }
- }
-
- this.triggerAttack(unit, tFromNow);
- this.triggerRelease(unit, tFromNow + this.aTime + this.dTime + susTime);
- };
- /**
- * Trigger the Attack, and Decay portion of the Envelope.
- * Similar to holding down a key on a piano, but it will
- * hold the sustain level until you let go. Input can be
- * any p5.sound object, or a
- * Web Audio Param.
- *
- * @method triggerAttack
- * @for p5.Envelope
- * @param {Object} unit p5.sound Object or Web Audio Param
- * @param {Number} secondsFromNow time from now (in seconds)
- * @example
- *
Note: This uses the getUserMedia/
- * Stream API, which is not supported by certain browsers. Access in Chrome browser
- * is limited to localhost and https, but access over http may be limited.
- *
- * @class p5.AudioIn
- * @constructor
- * @param {Function} [errorCallback] A function to call if there is an error
- * accessing the AudioIn. For example,
- * Safari and iOS devices do not
- * currently allow microphone access.
- * @example
- *
- * let mic;
- *
- * function setup(){
- * let cnv = createCanvas(100, 100);
- * cnv.mousePressed(userStartAudio);
- * textAlign(CENTER);
- * mic = new p5.AudioIn();
- * mic.start();
- * }
- *
- * function draw(){
- * background(0);
- * fill(255);
- * text('tap to start', width/2, 20);
- *
- * micLevel = mic.getLevel();
- * let y = height - micLevel * height;
- * ellipse(width/2, y, 10, 10);
- * }
- *
- */
-
- p5.AudioIn = function (errorCallback) {
-
- /**
- * @property {GainNode} input
- */
- this.input = p5sound.audiocontext.createGain();
- /**
- * @property {GainNode} output
- */
-
- this.output = p5sound.audiocontext.createGain();
- /**
- * @property {MediaStream|null} stream
- */
-
- this.stream = null;
- /**
- * @property {MediaStreamAudioSourceNode|null} mediaStream
- */
-
- this.mediaStream = null;
- /**
- * @property {Number|null} currentSource
- */
-
- this.currentSource = null;
- /**
- * Client must allow browser to access their microphone / audioin source.
- * Default: false. Will become true when the client enables access.
- *
- * @property {Boolean} enabled
- */
-
- this.enabled = false;
- /**
- * Input amplitude, connect to it by default but not to master out
- *
- * @property {p5.Amplitude} amplitude
- */
-
- this.amplitude = new p5.Amplitude();
- this.output.connect(this.amplitude.input);
-
- if (!window.MediaStreamTrack || !window.navigator.mediaDevices || !window.navigator.mediaDevices.getUserMedia) {
- errorCallback ? errorCallback() : window.alert('This browser does not support MediaStreamTrack and mediaDevices');
- }
-
-
- p5sound.soundArray.push(this);
- };
- /**
- * Start processing audio input. This enables the use of other
- * AudioIn methods like getLevel(). Note that by default, AudioIn
- * is not connected to p5.sound's output. So you won't hear
- * anything unless you use the connect() method.
- *
- * Certain browsers limit access to the user's microphone. For example,
- * Chrome only allows access from localhost and over https. For this reason,
- * you may want to include an errorCallback—a function that is called in case
- * the browser won't provide mic access.
- *
- * @method start
- * @for p5.AudioIn
- * @param {Function} [successCallback] Name of a function to call on
- * success.
- * @param {Function} [errorCallback] Name of a function to call if
- * there was an error. For example,
- * some browsers do not support
- * getUserMedia.
- */
-
-
- p5.AudioIn.prototype.start = function (successCallback, errorCallback) {
- var self = this;
-
- if (this.stream) {
- this.stop();
- }
-
-
- var audioSource = p5sound.inputSources[self.currentSource];
- var constraints = {
- audio: {
- sampleRate: p5sound.audiocontext.sampleRate,
- echoCancellation: false
- }
- };
-
- if (p5sound.inputSources[this.currentSource]) {
- constraints.audio.deviceId = audioSource.deviceId;
- }
-
- window.navigator.mediaDevices.getUserMedia(constraints).then(function (stream) {
- self.stream = stream;
- self.enabled = true;
-
- self.mediaStream = p5sound.audiocontext.createMediaStreamSource(stream);
- self.mediaStream.connect(self.output);
-
- self.amplitude.setInput(self.output);
- if (successCallback) successCallback();
- })["catch"](function (err) {
- if (errorCallback) errorCallback(err);else console.error(err);
- });
- };
- /**
- * Turn the AudioIn off. If the AudioIn is stopped, it cannot getLevel().
- * If re-starting, the user may be prompted for permission access.
- *
- * @method stop
- * @for p5.AudioIn
- */
-
-
- p5.AudioIn.prototype.stop = function () {
- if (this.stream) {
- this.stream.getTracks().forEach(function (track) {
- track.stop();
- });
- this.mediaStream.disconnect();
- delete this.mediaStream;
- delete this.stream;
- }
- };
- /**
- * Connect to an audio unit. If no parameter is provided, will
- * connect to the master output (i.e. your speakers).
- *
- * @method connect
- * @for p5.AudioIn
- * @param {Object} [unit] An object that accepts audio input,
- * such as an FFT
- */
-
-
- p5.AudioIn.prototype.connect = function (unit) {
- if (unit) {
- if (unit.hasOwnProperty('input')) {
- this.output.connect(unit.input);
- } else if (unit.hasOwnProperty('analyser')) {
- this.output.connect(unit.analyser);
- } else {
- this.output.connect(unit);
- }
- } else {
- this.output.connect(p5sound.input);
- }
- };
- /**
- * Disconnect the AudioIn from all audio units. For example, if
- * connect() had been called, disconnect() will stop sending
- * signal to your speakers.
- *
- * @method disconnect
- * @for p5.AudioIn
- */
-
-
- p5.AudioIn.prototype.disconnect = function () {
- if (this.output) {
- this.output.disconnect();
-
- this.output.connect(this.amplitude.input);
- }
- };
- /**
- * Read the Amplitude (volume level) of an AudioIn. The AudioIn
- * class contains its own instance of the Amplitude class to help
- * make it easy to get a microphone's volume level. Accepts an
- * optional smoothing value (0.0 < 1.0). NOTE: AudioIn must
- * .start() before using .getLevel().
- *
- * @method getLevel
- * @for p5.AudioIn
- * @param {Number} [smoothing] Smoothing is 0.0 by default.
- * Smooths values based on previous values.
- * @return {Number} Volume level (between 0.0 and 1.0)
- */
-
-
- p5.AudioIn.prototype.getLevel = function (smoothing) {
- if (smoothing) {
- this.amplitude.smoothing = smoothing;
- }
-
- return this.amplitude.getLevel();
- };
- /**
- * Set amplitude (volume) of a mic input between 0 and 1.0.
- *
- * @method amp
- * @for p5.AudioIn
- * @param {Number} vol between 0 and 1.0
- * @param {Number} [time] ramp time (optional)
- */
-
-
- p5.AudioIn.prototype.amp = function (vol, t) {
- if (t) {
- var rampTime = t || 0;
- var currentVol = this.output.gain.value;
- this.output.gain.cancelScheduledValues(p5sound.audiocontext.currentTime);
- this.output.gain.setValueAtTime(currentVol, p5sound.audiocontext.currentTime);
- this.output.gain.linearRampToValueAtTime(vol, rampTime + p5sound.audiocontext.currentTime);
- } else {
- this.output.gain.cancelScheduledValues(p5sound.audiocontext.currentTime);
- this.output.gain.setValueAtTime(vol, p5sound.audiocontext.currentTime);
- }
- };
- /**
- * Returns a list of available input sources. This is a wrapper
- * for and it returns a Promise.
- *
- * @method getSources
- * @for p5.AudioIn
- * @param {Function} [successCallback] This callback function handles the sources when they
- * have been enumerated. The callback function
- * receives the deviceList array as its only argument
- * @param {Function} [errorCallback] This optional callback receives the error
- * message as its argument.
- * @returns {Promise} Returns a Promise that can be used in place of the callbacks, similar
- * to the enumerateDevices() method
- * @example
- *
- */
-
-
- p5.Delay = function () {
- Effect.call(this);
- this._split = this.ac.createChannelSplitter(2);
- this._merge = this.ac.createChannelMerger(2);
- this._leftGain = this.ac.createGain();
- this._rightGain = this.ac.createGain();
- /**
- * The p5.Delay is built with two
- *
- * Web Audio Delay Nodes, one for each stereo channel.
- *
- * @for p5.Delay
- * @property {DelayNode} leftDelay
- */
-
- this.leftDelay = this.ac.createDelay();
- /**
- * The p5.Delay is built with two
- *
- * Web Audio Delay Nodes, one for each stereo channel.
- * @for p5.Delay
- * @property {DelayNode} rightDelay
- */
-
- this.rightDelay = this.ac.createDelay();
- this._leftFilter = new Filter();
- this._rightFilter = new Filter();
-
- this._leftFilter.disconnect();
-
- this._rightFilter.disconnect();
-
- this._leftFilter.biquad.frequency.setValueAtTime(1200, this.ac.currentTime);
-
- this._rightFilter.biquad.frequency.setValueAtTime(1200, this.ac.currentTime);
-
- this._leftFilter.biquad.Q.setValueAtTime(0.3, this.ac.currentTime);
-
- this._rightFilter.biquad.Q.setValueAtTime(0.3, this.ac.currentTime);
-
-
- this.input.connect(this._split);
- this.leftDelay.connect(this._leftGain);
- this.rightDelay.connect(this._rightGain);
-
- this._leftGain.connect(this._leftFilter.input);
-
- this._rightGain.connect(this._rightFilter.input);
-
- this._merge.connect(this.wet);
-
- this._leftFilter.biquad.gain.setValueAtTime(1, this.ac.currentTime);
-
- this._rightFilter.biquad.gain.setValueAtTime(1, this.ac.currentTime);
-
-
- this.setType(0);
- this._maxDelay = this.leftDelay.delayTime.maxValue;
-
- this.feedback(0.5);
- };
-
- p5.Delay.prototype = Object.create(Effect.prototype);
- /**
- * Add delay to an audio signal according to a set
- * of delay parameters.
- *
- * @method process
- * @for p5.Delay
- * @param {Object} Signal An object that outputs audio
- * @param {Number} [delayTime] Time (in seconds) of the delay/echo.
- * Some browsers limit delayTime to
- * 1 second.
- * @param {Number} [feedback] sends the delay back through itself
- * in a loop that decreases in volume
- * each time.
- * @param {Number} [lowPass] Cutoff frequency. Only frequencies
- * below the lowPass will be part of the
- * delay.
- */
-
- p5.Delay.prototype.process = function (src, _delayTime, _feedback, _filter) {
- var feedback = _feedback || 0;
- var delayTime = _delayTime || 0;
-
- if (feedback >= 1.0) {
- throw new Error('Feedback value will force a positive feedback loop.');
- }
-
- if (delayTime >= this._maxDelay) {
- throw new Error('Delay Time exceeds maximum delay time of ' + this._maxDelay + ' second.');
- }
-
- src.connect(this.input);
- this.leftDelay.delayTime.setValueAtTime(delayTime, this.ac.currentTime);
- this.rightDelay.delayTime.setValueAtTime(delayTime, this.ac.currentTime);
- this._leftGain.gain.value = feedback;
- this._rightGain.gain.value = feedback;
-
- if (_filter) {
- this._leftFilter.freq(_filter);
-
- this._rightFilter.freq(_filter);
- }
- };
- /**
- * Set the delay (echo) time, in seconds. Usually this value will be
- * a floating point number between 0.0 and 1.0.
- *
- * @method delayTime
- * @for p5.Delay
- * @param {Number} delayTime Time (in seconds) of the delay
- */
-
-
- p5.Delay.prototype.delayTime = function (t) {
- if (typeof t !== 'number') {
- t.connect(this.leftDelay.delayTime);
- t.connect(this.rightDelay.delayTime);
- } else {
- this.leftDelay.delayTime.cancelScheduledValues(this.ac.currentTime);
- this.rightDelay.delayTime.cancelScheduledValues(this.ac.currentTime);
- this.leftDelay.delayTime.linearRampToValueAtTime(t, this.ac.currentTime);
- this.rightDelay.delayTime.linearRampToValueAtTime(t, this.ac.currentTime);
- }
- };
- /**
- * Feedback occurs when Delay sends its signal back through its input
- * in a loop. The feedback amount determines how much signal to send each
- * time through the loop. A feedback greater than 1.0 is not desirable because
- * it will increase the overall output each time through the loop,
- * creating an infinite feedback loop. The default value is 0.5
- *
- * @method feedback
- * @for p5.Delay
- * @param {Number|Object} feedback 0.0 to 1.0, or an object such as an
- * Oscillator that can be used to
- * modulate this param
- * @returns {Number} Feedback value
- *
- */
-
-
- p5.Delay.prototype.feedback = function (f) {
- if (f && typeof f !== 'number') {
- f.connect(this._leftGain.gain);
- f.connect(this._rightGain.gain);
- } else if (f >= 1.0) {
- throw new Error('Feedback value will force a positive feedback loop.');
- } else if (typeof f === 'number') {
- this._leftGain.gain.value = f;
- this._rightGain.gain.value = f;
- }
-
-
- return this._leftGain.gain.value;
- };
- /**
- * Set a lowpass filter frequency for the delay. A lowpass filter
- * will cut off any frequencies higher than the filter frequency.
- *
- * @method filter
- * @for p5.Delay
- * @param {Number|Object} cutoffFreq A lowpass filter will cut off any
- * frequencies higher than the filter frequency.
- * @param {Number|Object} res Resonance of the filter frequency
- * cutoff, or an object (i.e. a p5.Oscillator)
- * that can be used to modulate this parameter.
- * High numbers (i.e. 15) will produce a resonance,
- * low numbers (i.e. .2) will produce a slope.
- */
-
-
- p5.Delay.prototype.filter = function (freq, q) {
- this._leftFilter.set(freq, q);
-
- this._rightFilter.set(freq, q);
- };
- /**
- * Choose a preset type of delay. 'pingPong' bounces the signal
- * from the left to the right channel to produce a stereo effect.
- * Any other parameter will revert to the default delay setting.
- *
- * @method setType
- * @for p5.Delay
- * @param {String|Number} type 'pingPong' (1) or 'default' (0)
- */
-
-
- p5.Delay.prototype.setType = function (t) {
- if (t === 1) {
- t = 'pingPong';
- }
-
- this._split.disconnect();
-
- this._leftFilter.disconnect();
-
- this._rightFilter.disconnect();
-
- this._split.connect(this.leftDelay, 0);
-
- this._split.connect(this.rightDelay, 1);
-
- switch (t) {
- case 'pingPong':
- this._rightFilter.setType(this._leftFilter.biquad.type);
-
- this._leftFilter.output.connect(this._merge, 0, 0);
-
- this._rightFilter.output.connect(this._merge, 0, 1);
-
- this._leftFilter.output.connect(this.rightDelay);
-
- this._rightFilter.output.connect(this.leftDelay);
-
- break;
-
- default:
- this._leftFilter.output.connect(this._merge, 0, 0);
-
- this._rightFilter.output.connect(this._merge, 0, 1);
-
- this._leftFilter.output.connect(this.leftDelay);
-
- this._rightFilter.output.connect(this.rightDelay);
-
- }
- };
-
- /**
- * Set the output level of the delay effect.
- *
- * @method amp
- * @for p5.Delay
- * @param {Number} volume amplitude between 0 and 1.0
- * @param {Number} [rampTime] create a fade that lasts rampTime
- * @param {Number} [timeFromNow] schedule this event to happen
- * seconds from now
- */
-
- /**
- * Send output to a p5.sound or web audio object
- *
- * @method connect
- * @for p5.Delay
- * @param {Object} unit
- */
-
- /**
- * Disconnect all output.
- *
- * @method disconnect
- * @for p5.Delay
- */
-
-
- p5.Delay.prototype.dispose = function () {
- Effect.prototype.dispose.apply(this);
-
- this._split.disconnect();
-
- this._leftFilter.dispose();
-
- this._rightFilter.dispose();
-
- this._merge.disconnect();
-
- this._leftGain.disconnect();
-
- this._rightGain.disconnect();
-
- this.leftDelay.disconnect();
- this.rightDelay.disconnect();
- this._split = undefined;
- this._leftFilter = undefined;
- this._rightFilter = undefined;
- this._merge = undefined;
- this._leftGain = undefined;
- this._rightGain = undefined;
- this.leftDelay = undefined;
- this.rightDelay = undefined;
- };
-}).call(exports, __webpack_require__, exports, module),
- __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
-
- }),
- (function(module, exports, __webpack_require__) {
-
-"use strict";
-var __WEBPACK_AMD_DEFINE_RESULT__;
-
-!(__WEBPACK_AMD_DEFINE_RESULT__ = (function (require) {
- var CustomError = __webpack_require__(11);
-
- var Effect = __webpack_require__(4);
- /**
- * Reverb adds depth to a sound through a large number of decaying
- * echoes. It creates the perception that sound is occurring in a
- * physical space. The p5.Reverb has paramters for Time (how long does the
- * reverb last) and decayRate (how much the sound decays with each echo)
- * that can be set with the .set() or .process() methods. The p5.Convolver
- * extends p5.Reverb allowing you to recreate the sound of actual physical
- * spaces through convolution.
- *
- * This class extends p5.Effect.
- * Methods amp(), chain(),
- * drywet(), connect(), and
- * disconnect() are available.
- *
- * @class p5.Reverb
- * @extends p5.Effect
- * @constructor
- * @example
- *
- * let soundFile, reverb;
- * function preload() {
- * soundFile = loadSound('assets/Damscray_DancingTiger.mp3');
- * }
- *
- * function setup() {
- * let cnv = createCanvas(100, 100);
- * cnv.mousePressed(playSound);
- *
- * reverb = new p5.Reverb();
- * soundFile.disconnect(); // so we'll only hear reverb...
- *
- * // connect soundFile to reverb, process w/
- * // 3 second reverbTime, decayRate of 2%
- * reverb.process(soundFile, 3, 2);
- * }
- *
- * function draw() {
- * let dryWet = constrain(map(mouseX, 0, width, 0, 1), 0, 1);
- * // 1 = all reverb, 0 = no reverb
- * reverb.drywet(dryWet);
- *
- * background(220);
- * text('tap to play', 10, 20);
- * text('dry/wet: ' + round(dryWet * 100) + '%', 10, height - 20);
- * }
- *
- * function playSound() {
- * soundFile.play();
- * }
- *
- */
-
-
- p5.Reverb = function () {
- Effect.call(this);
-
- this._initConvolverNode();
-
-
- this.input.gain.value = 0.5;
-
- this._seconds = 3;
- this._decay = 2;
- this._reverse = false;
-
- this._buildImpulse();
- };
-
- p5.Reverb.prototype = Object.create(Effect.prototype);
-
- p5.Reverb.prototype._initConvolverNode = function () {
- this.convolverNode = this.ac.createConvolver();
- this.input.connect(this.convolverNode);
- this.convolverNode.connect(this.wet);
- };
-
- p5.Reverb.prototype._teardownConvolverNode = function () {
- if (this.convolverNode) {
- this.convolverNode.disconnect();
- delete this.convolverNode;
- }
- };
-
- p5.Reverb.prototype._setBuffer = function (audioBuffer) {
- this._teardownConvolverNode();
-
- this._initConvolverNode();
-
- this.convolverNode.buffer = audioBuffer;
- };
- /**
- * Connect a source to the reverb, and assign reverb parameters.
- *
- * @method process
- * @for p5.Reverb
- * @param {Object} src p5.sound / Web Audio object with a sound
- * output.
- * @param {Number} [seconds] Duration of the reverb, in seconds.
- * Min: 0, Max: 10. Defaults to 3.
- * @param {Number} [decayRate] Percentage of decay with each echo.
- * Min: 0, Max: 100. Defaults to 2.
- * @param {Boolean} [reverse] Play the reverb backwards or forwards.
- */
-
-
- p5.Reverb.prototype.process = function (src, seconds, decayRate, reverse) {
- src.connect(this.input);
- var rebuild = false;
-
- if (seconds) {
- this._seconds = seconds;
- rebuild = true;
- }
-
- if (decayRate) {
- this._decay = decayRate;
- }
-
- if (reverse) {
- this._reverse = reverse;
- }
-
- if (rebuild) {
- this._buildImpulse();
- }
- };
- /**
- * Set the reverb settings. Similar to .process(), but without
- * assigning a new input.
- *
- * @method set
- * @for p5.Reverb
- * @param {Number} [seconds] Duration of the reverb, in seconds.
- * Min: 0, Max: 10. Defaults to 3.
- * @param {Number} [decayRate] Percentage of decay with each echo.
- * Min: 0, Max: 100. Defaults to 2.
- * @param {Boolean} [reverse] Play the reverb backwards or forwards.
- */
-
-
- p5.Reverb.prototype.set = function (seconds, decayRate, reverse) {
- var rebuild = false;
-
- if (seconds) {
- this._seconds = seconds;
- rebuild = true;
- }
-
- if (decayRate) {
- this._decay = decayRate;
- }
-
- if (reverse) {
- this._reverse = reverse;
- }
-
- if (rebuild) {
- this._buildImpulse();
- }
- };
-
- /**
- * Set the output level of the reverb effect.
- *
- * @method amp
- * @for p5.Reverb
- * @param {Number} volume amplitude between 0 and 1.0
- * @param {Number} [rampTime] create a fade that lasts rampTime
- * @param {Number} [timeFromNow] schedule this event to happen
- * seconds from now
- */
-
- /**
- * Send output to a p5.sound or web audio object
- *
- * @method connect
- * @for p5.Reverb
- * @param {Object} unit
- */
-
- /**
- * Disconnect all output.
- *
- * @method disconnect
- * @for p5.Reverb
- */
-
- /**
- * Inspired by Simple Reverb by Jordan Santell
- * https://github.com/web-audio-components/simple-reverb/blob/master/index.js
- *
- * Utility function for building an impulse response
- * based on the module parameters.
- *
- * @private
- */
-
-
- p5.Reverb.prototype._buildImpulse = function () {
- var rate = this.ac.sampleRate;
- var length = rate * this._seconds;
- var decay = this._decay;
- var impulse = this.ac.createBuffer(2, length, rate);
- var impulseL = impulse.getChannelData(0);
- var impulseR = impulse.getChannelData(1);
- var n, i;
-
- for (i = 0; i < length; i++) {
- n = this._reverse ? length - i : i;
- impulseL[i] = (Math.random() * 2 - 1) * Math.pow(1 - n / length, decay);
- impulseR[i] = (Math.random() * 2 - 1) * Math.pow(1 - n / length, decay);
- }
-
- this._setBuffer(impulse);
- };
-
- p5.Reverb.prototype.dispose = function () {
- Effect.prototype.dispose.apply(this);
-
- this._teardownConvolverNode();
- };
-
- /**
- *
p5.Convolver extends p5.Reverb. It can emulate the sound of real
- * physical spaces through a process called
- * convolution.
- *
- *
Convolution multiplies any audio input by an "impulse response"
- * to simulate the dispersion of sound over time. The impulse response is
- * generated from an audio file that you provide. One way to
- * generate an impulse response is to pop a balloon in a reverberant space
- * and record the echo. Convolution can also be used to experiment with
- * sound.
- *
- *
Use the method createConvolution(path) to instantiate a
- * p5.Convolver with a path to your impulse response audio file.
- *
- * @class p5.Convolver
- * @extends p5.Effect
- * @constructor
- * @param {String} path path to a sound file
- * @param {Function} [callback] function to call when loading succeeds
- * @param {Function} [errorCallback] function to call if loading fails.
- * This function will receive an error or
- * XMLHttpRequest object with information
- * about what went wrong.
- * @example
- *
- * let cVerb, sound;
- * function preload() {
- * // We have both MP3 and OGG versions of all sound assets
- * soundFormats('ogg', 'mp3');
- *
- * // Try replacing 'bx-spring' with other soundfiles like
- * // 'concrete-tunnel' 'small-plate' 'drum' 'beatbox'
- * cVerb = createConvolver('assets/bx-spring.mp3');
- *
- * // Try replacing 'Damscray_DancingTiger' with
- * // 'beat', 'doorbell', lucky_dragons_-_power_melody'
- * sound = loadSound('assets/Damscray_DancingTiger.mp3');
- * }
- *
- * function setup() {
- * let cnv = createCanvas(100, 100);
- * cnv.mousePressed(playSound);
- * background(220);
- * text('tap to play', 20, 20);
- *
- * // disconnect from master output...
- * sound.disconnect();
- *
- * // ...and process with cVerb
- * // so that we only hear the convolution
- * cVerb.process(sound);
- * }
- *
- * function playSound() {
- * sound.play();
- * }
- *
- */
-
-
- p5.Convolver = function (path, callback, errorCallback) {
- p5.Reverb.call(this);
- /**
- * Internally, the p5.Convolver uses the a
- *
- * Web Audio Convolver Node.
- *
- * @property {ConvolverNode} convolverNode
- */
-
- this._initConvolverNode();
-
-
- this.input.gain.value = 0.5;
-
- if (path) {
- this.impulses = [];
-
- this._loadBuffer(path, callback, errorCallback);
- } else {
- this._seconds = 3;
- this._decay = 2;
- this._reverse = false;
-
- this._buildImpulse();
- }
- };
-
- p5.Convolver.prototype = Object.create(p5.Reverb.prototype);
- p5.prototype.registerPreloadMethod('createConvolver', p5.prototype);
- /**
- * Create a p5.Convolver. Accepts a path to a soundfile
- * that will be used to generate an impulse response.
- *
- * @method createConvolver
- * @for p5
- * @param {String} path path to a sound file
- * @param {Function} [callback] function to call if loading is successful.
- * The object will be passed in as the argument
- * to the callback function.
- * @param {Function} [errorCallback] function to call if loading is not successful.
- * A custom error will be passed in as the argument
- * to the callback function.
- * @return {p5.Convolver}
- * @example
- *
- * let cVerb, sound;
- * function preload() {
- * // We have both MP3 and OGG versions of all sound assets
- * soundFormats('ogg', 'mp3');
- *
- * // Try replacing 'bx-spring' with other soundfiles like
- * // 'concrete-tunnel' 'small-plate' 'drum' 'beatbox'
- * cVerb = createConvolver('assets/bx-spring.mp3');
- *
- * // Try replacing 'Damscray_DancingTiger' with
- * // 'beat', 'doorbell', lucky_dragons_-_power_melody'
- * sound = loadSound('assets/Damscray_DancingTiger.mp3');
- * }
- *
- * function setup() {
- * let cnv = createCanvas(100, 100);
- * cnv.mousePressed(playSound);
- * background(220);
- * text('tap to play', 20, 20);
- *
- * // disconnect from master output...
- * sound.disconnect();
- *
- * // ...and process with cVerb
- * // so that we only hear the convolution
- * cVerb.process(sound);
- * }
- *
- * function playSound() {
- * sound.play();
- * }
- *
- */
-
- p5.prototype.createConvolver = function (path, callback, errorCallback) {
- if (window.location.origin.indexOf('file://') > -1 && window.cordova === 'undefined') {
- alert('This sketch may require a server to load external files. Please see http://bit.ly/1qcInwS');
- }
-
- var self = this;
- var cReverb = new p5.Convolver(path, function (buffer) {
- if (typeof callback === 'function') {
- callback(buffer);
- }
-
- if (typeof self._decrementPreload === 'function') {
- self._decrementPreload();
- }
- }, errorCallback);
- cReverb.impulses = [];
- return cReverb;
- };
- /**
- * Private method to load a buffer as an Impulse Response,
- * assign it to the convolverNode, and add to the Array of .impulses.
- *
- * @param {String} path
- * @param {Function} callback
- * @param {Function} errorCallback
- * @private
- */
-
-
- p5.Convolver.prototype._loadBuffer = function (path, callback, errorCallback) {
- var path = p5.prototype._checkFileFormats(path);
-
- var self = this;
- var errorTrace = new Error().stack;
- var ac = p5.prototype.getAudioContext();
- var request = new XMLHttpRequest();
- request.open('GET', path, true);
- request.responseType = 'arraybuffer';
-
- request.onload = function () {
- if (request.status === 200) {
- ac.decodeAudioData(request.response, function (buff) {
- var buffer = {};
- var chunks = path.split('/');
- buffer.name = chunks[chunks.length - 1];
- buffer.audioBuffer = buff;
- self.impulses.push(buffer);
-
- self._setBuffer(buffer.audioBuffer);
-
- if (callback) {
- callback(buffer);
- }
- },
- function () {
- var err = new CustomError('decodeAudioData', errorTrace, self.url);
- var msg = 'AudioContext error at decodeAudioData for ' + self.url;
-
- if (errorCallback) {
- err.msg = msg;
- errorCallback(err);
- } else {
- console.error(msg + '\n The error stack trace includes: \n' + err.stack);
- }
- });
- }
- else {
- var err = new CustomError('loadConvolver', errorTrace, self.url);
- var msg = 'Unable to load ' + self.url + '. The request status was: ' + request.status + ' (' + request.statusText + ')';
-
- if (errorCallback) {
- err.message = msg;
- errorCallback(err);
- } else {
- console.error(msg + '\n The error stack trace includes: \n' + err.stack);
- }
- }
- };
-
-
- request.onerror = function () {
- var err = new CustomError('loadConvolver', errorTrace, self.url);
- var msg = 'There was no response from the server at ' + self.url + '. Check the url and internet connectivity.';
-
- if (errorCallback) {
- err.message = msg;
- errorCallback(err);
- } else {
- console.error(msg + '\n The error stack trace includes: \n' + err.stack);
- }
- };
-
- request.send();
- };
-
- p5.Convolver.prototype.set = null;
- /**
- * Connect a source to the convolver.
- *
- * @method process
- * @for p5.Convolver
- * @param {Object} src p5.sound / Web Audio object with a sound
- * output.
- * @example
- *
- * let cVerb, sound;
- * function preload() {
- * // We have both MP3 and OGG versions of all sound assets
- * soundFormats('ogg', 'mp3');
- *
- * // Try replacing 'bx-spring' with other soundfiles like
- * // 'concrete-tunnel' 'small-plate' 'drum' 'beatbox'
- * cVerb = createConvolver('assets/bx-spring.mp3');
- *
- * // Try replacing 'Damscray_DancingTiger' with
- * // 'beat', 'doorbell', lucky_dragons_-_power_melody'
- * sound = loadSound('assets/Damscray_DancingTiger.mp3');
- * }
- *
- * function setup() {
- * let cnv = createCanvas(100, 100);
- * cnv.mousePressed(playSound);
- * background(220);
- * text('tap to play', 20, 20);
- *
- * // disconnect from master output...
- * sound.disconnect();
- *
- * // ...and process with cVerb
- * // so that we only hear the convolution
- * cVerb.process(sound);
- * }
- *
- * function playSound() {
- * sound.play();
- * }
- *
- *
- */
-
- p5.Convolver.prototype.process = function (src) {
- src.connect(this.input);
- };
- /**
- * If you load multiple impulse files using the .addImpulse method,
- * they will be stored as Objects in this Array. Toggle between them
- * with the toggleImpulse(id) method.
- *
- * @property {Array} impulses
- * @for p5.Convolver
- */
-
-
- p5.Convolver.prototype.impulses = [];
- /**
- * Load and assign a new Impulse Response to the p5.Convolver.
- * The impulse is added to the .impulses array. Previous
- * impulses can be accessed with the .toggleImpulse(id)
- * method.
- *
- * @method addImpulse
- * @for p5.Convolver
- * @param {String} path path to a sound file
- * @param {Function} callback function (optional)
- * @param {Function} errorCallback function (optional)
- */
-
- p5.Convolver.prototype.addImpulse = function (path, callback, errorCallback) {
- if (window.location.origin.indexOf('file://') > -1 && window.cordova === 'undefined') {
- alert('This sketch may require a server to load external files. Please see http://bit.ly/1qcInwS');
- }
-
- this._loadBuffer(path, callback, errorCallback);
- };
- /**
- * Similar to .addImpulse, except that the .impulses
- * Array is reset to save memory. A new .impulses
- * array is created with this impulse as the only item.
- *
- * @method resetImpulse
- * @for p5.Convolver
- * @param {String} path path to a sound file
- * @param {Function} callback function (optional)
- * @param {Function} errorCallback function (optional)
- */
-
-
- p5.Convolver.prototype.resetImpulse = function (path, callback, errorCallback) {
- if (window.location.origin.indexOf('file://') > -1 && window.cordova === 'undefined') {
- alert('This sketch may require a server to load external files. Please see http://bit.ly/1qcInwS');
- }
-
- this.impulses = [];
-
- this._loadBuffer(path, callback, errorCallback);
- };
- /**
- * If you have used .addImpulse() to add multiple impulses
- * to a p5.Convolver, then you can use this method to toggle between
- * the items in the .impulses Array. Accepts a parameter
- * to identify which impulse you wish to use, identified either by its
- * original filename (String) or by its position in the .impulses
- * Array (Number).
- * You can access the objects in the .impulses Array directly. Each
- * Object has two attributes: an .audioBuffer (type:
- * Web Audio
- * AudioBuffer) and a .name, a String that corresponds
- * with the original filename.
- *
- * @method toggleImpulse
- * @for p5.Convolver
- * @param {String|Number} id Identify the impulse by its original filename
- * (String), or by its position in the
- * .impulses Array (Number).
- */
-
-
- p5.Convolver.prototype.toggleImpulse = function (id) {
- if (typeof id === 'number' && id < this.impulses.length) {
- this._setBuffer(this.impulses[id].audioBuffer);
- }
-
- if (typeof id === 'string') {
- for (var i = 0; i < this.impulses.length; i++) {
- if (this.impulses[i].name === id) {
- this._setBuffer(this.impulses[i].audioBuffer);
-
- break;
- }
- }
- }
- };
-
- p5.Convolver.prototype.dispose = function () {
- p5.Reverb.prototype.dispose.apply(this);
-
- for (var i in this.impulses) {
- if (this.impulses[i]) {
- this.impulses[i] = null;
- }
- }
- };
-}).call(exports, __webpack_require__, exports, module),
- __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
-
- }),
- (function(module, exports, __webpack_require__) {
-
-"use strict";
-var __WEBPACK_AMD_DEFINE_RESULT__;
-
-!(__WEBPACK_AMD_DEFINE_RESULT__ = (function (require) {
- var p5sound = __webpack_require__(1);
-
-
- var Clock = __webpack_require__(27);
-
- p5.Metro = function () {
- this.clock = new Clock({
- 'callback': this.ontick.bind(this)
- });
- this.syncedParts = [];
- this.bpm = 120;
-
- this._init();
-
- this.prevTick = 0;
- this.tatumTime = 0;
-
- this.tickCallback = function () {};
- };
-
- p5.Metro.prototype.ontick = function (tickTime) {
- var elapsedTime = tickTime - this.prevTick;
- var secondsFromNow = tickTime - p5sound.audiocontext.currentTime;
-
- if (elapsedTime - this.tatumTime <= -0.02) {
- return;
- } else {
- this.prevTick = tickTime;
-
- var self = this;
- this.syncedParts.forEach(function (thisPart) {
- if (!thisPart.isPlaying) return;
- thisPart.incrementStep(secondsFromNow);
-
- thisPart.phrases.forEach(function (thisPhrase) {
- var phraseArray = thisPhrase.sequence;
- var bNum = self.metroTicks % phraseArray.length;
-
- if (phraseArray[bNum] !== 0 && (self.metroTicks < phraseArray.length || !thisPhrase.looping)) {
- thisPhrase.callback(secondsFromNow, phraseArray[bNum]);
- }
- });
- });
- this.metroTicks += 1;
- this.tickCallback(secondsFromNow);
- }
- };
-
- p5.Metro.prototype.setBPM = function (bpm, rampTime) {
- var beatTime = 60 / (bpm * this.tatums);
- var now = p5sound.audiocontext.currentTime;
- this.tatumTime = beatTime;
- var rampTime = rampTime || 0;
- this.clock.frequency.setValueAtTime(this.clock.frequency.value, now);
- this.clock.frequency.linearRampToValueAtTime(bpm, now + rampTime);
- this.bpm = bpm;
- };
-
- p5.Metro.prototype.getBPM = function () {
- return this.clock.getRate() / this.tatums * 60;
- };
-
- p5.Metro.prototype._init = function () {
- this.metroTicks = 0;
- };
-
-
- p5.Metro.prototype.resetSync = function (part) {
- this.syncedParts = [part];
- };
-
-
- p5.Metro.prototype.pushSync = function (part) {
- this.syncedParts.push(part);
- };
-
- p5.Metro.prototype.start = function (timeFromNow) {
- var t = timeFromNow || 0;
- var now = p5sound.audiocontext.currentTime;
- this.clock.start(now + t);
- this.setBPM(this.bpm);
- };
-
- p5.Metro.prototype.stop = function (timeFromNow) {
- var t = timeFromNow || 0;
- var now = p5sound.audiocontext.currentTime;
- this.clock.stop(now + t);
- };
-
- p5.Metro.prototype.beatLength = function (tatums) {
- this.tatums = 1 / tatums / 4;
- };
-}).call(exports, __webpack_require__, exports, module),
- __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
-
- }),
- (function(module, exports, __webpack_require__) {
-
-var __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;!(__WEBPACK_AMD_DEFINE_ARRAY__ = [__webpack_require__(0),__webpack_require__(24),__webpack_require__(8)], __WEBPACK_AMD_DEFINE_RESULT__ = (function(t){"use strict";return t.TimelineState=function(e){t.Timeline.call(this),this._initial=e},t.extend(t.TimelineState,t.Timeline),t.TimelineState.prototype.getValueAtTime=function(e){var t=this.get(e);return null!==t?t.state:this._initial},t.TimelineState.prototype.setStateAtTime=function(e,t){this.add({state:e,time:t})},t.TimelineState}).apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__),
- __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
-
- }),
- (function(module, exports, __webpack_require__) {
-
-"use strict";
-var __WEBPACK_AMD_DEFINE_RESULT__;
-
-!(__WEBPACK_AMD_DEFINE_RESULT__ = (function (require) {
- var p5sound = __webpack_require__(1);
-
- var BPM = 120;
- /**
- * Set the global tempo, in beats per minute, for all
- * p5.Parts. This method will impact all active p5.Parts.
- *
- * @method setBPM
- * @for p5
- * @param {Number} BPM Beats Per Minute
- * @param {Number} rampTime Seconds from now
- */
-
- p5.prototype.setBPM = function (bpm, rampTime) {
- BPM = bpm;
-
- for (var i in p5sound.parts) {
- if (p5sound.parts[i]) {
- p5sound.parts[i].setBPM(bpm, rampTime);
- }
- }
- };
- /**
- *
A phrase is a pattern of musical events over time, i.e.
- * a series of notes and rests.
- *
- *
Phrases must be added to a p5.Part for playback, and
- * each part can play multiple phrases at the same time.
- * For example, one Phrase might be a kick drum, another
- * could be a snare, and another could be the bassline.
- *
- *
The first parameter is a name so that the phrase can be
- * modified or deleted later. The callback is a a function that
- * this phrase will call at every step—for example it might be
- * called playNote(value){}. The array determines
- * which value is passed into the callback at each step of the
- * phrase. It can be numbers, an object with multiple numbers,
- * or a zero (0) indicates a rest so the callback won't be called).
- *
- * @class p5.Phrase
- * @constructor
- * @param {String} name Name so that you can access the Phrase.
- * @param {Function} callback The name of a function that this phrase
- * will call. Typically it will play a sound,
- * and accept two parameters: a time at which
- * to play the sound (in seconds from now),
- * and a value from the sequence array. The
- * time should be passed into the play() or
- * start() method to ensure precision.
- * @param {Array} sequence Array of values to pass into the callback
- * at each step of the phrase.
- * @example
- *
- */
-
-
- p5.Phrase = function (name, callback, sequence) {
- this.phraseStep = 0;
- this.name = name;
- this.callback = callback;
- /**
- * Array of values to pass into the callback
- * at each step of the phrase. Depending on the callback
- * function's requirements, these values may be numbers,
- * strings, or an object with multiple parameters.
- * Zero (0) indicates a rest.
- *
- * @property {Array} sequence
- */
-
- this.sequence = sequence;
- };
- /**
- *
A p5.Part plays back one or more p5.Phrases. Instantiate a part
- * with steps and tatums. By default, each step represents a 1/16th note.
- *
- *
See p5.Phrase for more about musical timing.
- *
- * @class p5.Part
- * @constructor
- * @param {Number} [steps] Steps in the part
- * @param {Number} [tatums] Divisions of a beat, e.g. use 1/4, or 0.25 for a quater note (default is 1/16, a sixteenth note)
- * @example
- *
- */
-
-
- p5.Part = function (steps, bLength) {
- this.length = steps || 0;
-
- this.partStep = 0;
- this.phrases = [];
- this.isPlaying = false;
- this.noLoop();
- this.tatums = bLength || 0.0625;
-
- this.metro = new p5.Metro();
-
- this.metro._init();
-
- this.metro.beatLength(this.tatums);
- this.metro.setBPM(BPM);
- p5sound.parts.push(this);
-
- this.callback = function () {};
- };
- /**
- * Set the tempo of this part, in Beats Per Minute.
- *
- * @method setBPM
- * @for p5.Part
- * @param {Number} BPM Beats Per Minute
- * @param {Number} [rampTime] Seconds from now
- */
-
-
- p5.Part.prototype.setBPM = function (tempo, rampTime) {
- this.metro.setBPM(tempo, rampTime);
- };
- /**
- * Returns the tempo, in Beats Per Minute, of this part.
- *
- * @method getBPM
- * @for p5.Part
- * @return {Number}
- */
-
-
- p5.Part.prototype.getBPM = function () {
- return this.metro.getBPM();
- };
- /**
- * Start playback of this part. It will play
- * through all of its phrases at a speed
- * determined by setBPM.
- *
- * @method start
- * @for p5.Part
- * @param {Number} [time] seconds from now
- */
-
-
- p5.Part.prototype.start = function (time) {
- if (!this.isPlaying) {
- this.isPlaying = true;
- this.metro.resetSync(this);
- var t = time || 0;
- this.metro.start(t);
- }
- };
- /**
- * Loop playback of this part. It will begin
- * looping through all of its phrases at a speed
- * determined by setBPM.
- *
- * @method loop
- * @for p5.Part
- * @param {Number} [time] seconds from now
- */
-
-
- p5.Part.prototype.loop = function (time) {
- this.looping = true;
-
- this.onended = function () {
- this.partStep = 0;
- };
-
- var t = time || 0;
- this.start(t);
- };
- /**
- * Tell the part to stop looping.
- *
- * @method noLoop
- * @for p5.Part
- */
-
-
- p5.Part.prototype.noLoop = function () {
- this.looping = false;
-
- this.onended = function () {
- this.stop();
- };
- };
- /**
- * Stop the part and cue it to step 0. Playback will resume from the begining of the Part when it is played again.
- *
- * @method stop
- * @for p5.Part
- * @param {Number} [time] seconds from now
- */
-
-
- p5.Part.prototype.stop = function (time) {
- this.partStep = 0;
- this.pause(time);
- };
- /**
- * Pause the part. Playback will resume
- * from the current step.
- *
- * @method pause
- * @for p5.Part
- * @param {Number} time seconds from now
- */
-
-
- p5.Part.prototype.pause = function (time) {
- this.isPlaying = false;
- var t = time || 0;
- this.metro.stop(t);
- };
- /**
- * Add a p5.Phrase to this Part.
- *
- * @method addPhrase
- * @for p5.Part
- * @param {p5.Phrase} phrase reference to a p5.Phrase
- */
-
-
- p5.Part.prototype.addPhrase = function (name, callback, array) {
- var p;
-
- if (arguments.length === 3) {
- p = new p5.Phrase(name, callback, array);
- } else if (arguments[0] instanceof p5.Phrase) {
- p = arguments[0];
- } else {
- throw 'invalid input. addPhrase accepts name, callback, array or a p5.Phrase';
- }
-
- this.phrases.push(p);
-
- if (p.sequence.length > this.length) {
- this.length = p.sequence.length;
- }
- };
- /**
- * Remove a phrase from this part, based on the name it was
- * given when it was created.
- *
- * @method removePhrase
- * @for p5.Part
- * @param {String} phraseName
- */
-
-
- p5.Part.prototype.removePhrase = function (name) {
- for (var i in this.phrases) {
- if (this.phrases[i].name === name) {
- this.phrases.splice(i, 1);
- }
- }
- };
- /**
- * Get a phrase from this part, based on the name it was
- * given when it was created. Now you can modify its array.
- *
- * @method getPhrase
- * @for p5.Part
- * @param {String} phraseName
- */
-
-
- p5.Part.prototype.getPhrase = function (name) {
- for (var i in this.phrases) {
- if (this.phrases[i].name === name) {
- return this.phrases[i];
- }
- }
- };
- /**
- * Find all sequences with the specified name, and replace their patterns with the specified array.
- *
- * @method replaceSequence
- * @for p5.Part
- * @param {String} phraseName
- * @param {Array} sequence Array of values to pass into the callback
- * at each step of the phrase.
- */
-
-
- p5.Part.prototype.replaceSequence = function (name, array) {
- for (var i in this.phrases) {
- if (this.phrases[i].name === name) {
- this.phrases[i].sequence = array;
- }
- }
- };
-
- p5.Part.prototype.incrementStep = function (time) {
- if (this.partStep < this.length - 1) {
- this.callback(time);
- this.partStep += 1;
- } else {
- if (!this.looping && this.partStep === this.length - 1) {
- this.onended();
- }
- }
- };
- /**
- * Set the function that will be called at every step. This will clear the previous function.
- *
- * @method onStep
- * @for p5.Part
- * @param {Function} callback The name of the callback
- * you want to fire
- * on every beat/tatum.
- */
-
-
- p5.Part.prototype.onStep = function (callback) {
- this.callback = callback;
- };
-
- /**
- * A Score consists of a series of Parts. The parts will
- * be played back in order. For example, you could have an
- * A part, a B part, and a C part, and play them back in this order
- * new p5.Score(a, a, b, a, c)
- *
- * @class p5.Score
- * @constructor
- * @param {p5.Part} [...parts] One or multiple parts, to be played in sequence.
- */
-
-
- p5.Score = function () {
- this.parts = [];
- this.currentPart = 0;
- var thisScore = this;
-
- for (var i in arguments) {
- if (arguments[i] && this.parts[i]) {
- this.parts[i] = arguments[i];
- this.parts[i].nextPart = this.parts[i + 1];
-
- this.parts[i].onended = function () {
- thisScore.resetPart(i);
- playNextPart(thisScore);
- };
- }
- }
-
- this.looping = false;
- };
-
- p5.Score.prototype.onended = function () {
- if (this.looping) {
- this.parts[0].start();
- } else {
- this.parts[this.parts.length - 1].onended = function () {
- this.stop();
- this.resetParts();
- };
- }
-
- this.currentPart = 0;
- };
- /**
- * Start playback of the score.
- *
- * @method start
- * @for p5.Score
- */
-
-
- p5.Score.prototype.start = function () {
- this.parts[this.currentPart].start();
- this.scoreStep = 0;
- };
- /**
- * Stop playback of the score.
- *
- * @method stop
- * @for p5.Score
- */
-
-
- p5.Score.prototype.stop = function () {
- this.parts[this.currentPart].stop();
- this.currentPart = 0;
- this.scoreStep = 0;
- };
- /**
- * Pause playback of the score.
- *
- * @method pause
- * @for p5.Score
- */
-
-
- p5.Score.prototype.pause = function () {
- this.parts[this.currentPart].stop();
- };
- /**
- * Loop playback of the score.
- *
- * @method loop
- * @for p5.Score
- */
-
-
- p5.Score.prototype.loop = function () {
- this.looping = true;
- this.start();
- };
- /**
- * Stop looping playback of the score. If it
- * is currently playing, this will go into effect
- * after the current round of playback completes.
- *
- * @method noLoop
- * @for p5.Score
- */
-
-
- p5.Score.prototype.noLoop = function () {
- this.looping = false;
- };
-
- p5.Score.prototype.resetParts = function () {
- var self = this;
- this.parts.forEach(function (part) {
- self.resetParts[part];
- });
- };
-
- p5.Score.prototype.resetPart = function (i) {
- this.parts[i].stop();
- this.parts[i].partStep = 0;
-
- for (var p in this.parts[i].phrases) {
- if (this.parts[i]) {
- this.parts[i].phrases[p].phraseStep = 0;
- }
- }
- };
- /**
- * Set the tempo for all parts in the score
- *
- * @method setBPM
- * @for p5.Score
- * @param {Number} BPM Beats Per Minute
- * @param {Number} rampTime Seconds from now
- */
-
-
- p5.Score.prototype.setBPM = function (bpm, rampTime) {
- for (var i in this.parts) {
- if (this.parts[i]) {
- this.parts[i].setBPM(bpm, rampTime);
- }
- }
- };
-
- function playNextPart(aScore) {
- aScore.currentPart++;
-
- if (aScore.currentPart >= aScore.parts.length) {
- aScore.scoreStep = 0;
- aScore.onended();
- } else {
- aScore.scoreStep = 0;
- aScore.parts[aScore.currentPart - 1].stop();
- aScore.parts[aScore.currentPart].start();
- }
- }
-}).call(exports, __webpack_require__, exports, module),
- __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
-
- }),
- (function(module, exports, __webpack_require__) {
-
-"use strict";
-var __WEBPACK_AMD_DEFINE_RESULT__;
-
-!(__WEBPACK_AMD_DEFINE_RESULT__ = (function (require) {
- var p5sound = __webpack_require__(1);
-
- var Clock = __webpack_require__(27);
- /**
- * SoundLoop
- *
- * @class p5.SoundLoop
- * @constructor
- *
- * @param {Function} callback this function will be called on each iteration of theloop
- * @param {Number|String} [interval] amount of time (if a number) or beats (if a string, following Tone.Time convention) for each iteration of the loop. Defaults to 1 second.
- *
- * @example
- *
- * let synth, soundLoop;
- * let notePattern = [60, 62, 64, 67, 69, 72];
- *
- * function setup() {
- * let cnv = createCanvas(100, 100);
- * cnv.mousePressed(canvasPressed);
- * colorMode(HSB);
- * background(0, 0, 86);
- * text('tap to start/stop', 10, 20);
- *
- * //the looper's callback is passed the timeFromNow
- * //this value should be used as a reference point from
- * //which to schedule sounds
- * let intervalInSeconds = 0.2;
- * soundLoop = new p5.SoundLoop(onSoundLoop, intervalInSeconds);
- *
- * synth = new p5.MonoSynth();
- * }
- *
- * function canvasPressed() {
- * // ensure audio is enabled
- * userStartAudio();
- *
- * if (soundLoop.isPlaying) {
- * soundLoop.stop();
- * } else {
- * // start the loop
- * soundLoop.start();
- * }
- * }
- *
- * function onSoundLoop(timeFromNow) {
- * let noteIndex = (soundLoop.iterations - 1) % notePattern.length;
- * let note = midiToFreq(notePattern[noteIndex]);
- * synth.play(note, 0.5, timeFromNow);
- * background(noteIndex * 360 / notePattern.length, 50, 100);
- * }
- *
- */
-
-
- p5.SoundLoop = function (callback, interval) {
- this.callback = callback;
- /**
- * musicalTimeMode uses Tone.Time convention
- * true if string, false if number
- * @property {Boolean} musicalTimeMode
- */
-
- this.musicalTimeMode = typeof this._interval === 'number' ? false : true;
- this._interval = interval || 1;
- /**
- * musicalTimeMode variables
- * modify these only when the interval is specified in musicalTime format as a string
- */
-
- this._timeSignature = 4;
- this._bpm = 60;
- this.isPlaying = false;
- /**
- * Set a limit to the number of loops to play. defaults to Infinity
- * @property {Number} maxIterations
- */
-
- this.maxIterations = Infinity;
- var self = this;
- this.clock = new Clock({
- 'callback': function callback(time) {
- var timeFromNow = time - p5sound.audiocontext.currentTime;
- /**
- * Do not initiate the callback if timeFromNow is < 0
- * This ususually occurs for a few milliseconds when the page
- * is not fully loaded
- *
- * The callback should only be called until maxIterations is reached
- */
-
- if (timeFromNow > 0 && self.iterations <= self.maxIterations) {
- self.callback(timeFromNow);
- }
- },
- 'frequency': this._calcFreq()
- });
- };
- /**
- * Start the loop
- * @method start
- * @for p5.SoundLoop
- * @param {Number} [timeFromNow] schedule a starting time
- */
-
-
- p5.SoundLoop.prototype.start = function (timeFromNow) {
- var t = timeFromNow || 0;
- var now = p5sound.audiocontext.currentTime;
-
- if (!this.isPlaying) {
- this.clock.start(now + t);
- this.isPlaying = true;
- }
- };
- /**
- * Stop the loop
- * @method stop
- * @for p5.SoundLoop
- * @param {Number} [timeFromNow] schedule a stopping time
- */
-
-
- p5.SoundLoop.prototype.stop = function (timeFromNow) {
- var t = timeFromNow || 0;
- var now = p5sound.audiocontext.currentTime;
-
- if (this.isPlaying) {
- this.clock.stop(now + t);
- this.isPlaying = false;
- }
- };
- /**
- * Pause the loop
- * @method pause
- * @for p5.SoundLoop
- * @param {Number} [timeFromNow] schedule a pausing time
- */
-
-
- p5.SoundLoop.prototype.pause = function (timeFromNow) {
- var t = timeFromNow || 0;
- var now = p5sound.audiocontext.currentTime;
-
- if (this.isPlaying) {
- this.clock.pause(now + t);
- this.isPlaying = false;
- }
- };
- /**
- * Synchronize loops. Use this method to start two or more loops in synchronization
- * or to start a loop in synchronization with a loop that is already playing
- * This method will schedule the implicit loop in sync with the explicit master loop
- * i.e. loopToStart.syncedStart(loopToSyncWith)
- *
- * @method syncedStart
- * @for p5.SoundLoop
- * @param {Object} otherLoop a p5.SoundLoop to sync with
- * @param {Number} [timeFromNow] Start the loops in sync after timeFromNow seconds
- */
-
-
- p5.SoundLoop.prototype.syncedStart = function (otherLoop, timeFromNow) {
- var t = timeFromNow || 0;
- var now = p5sound.audiocontext.currentTime;
-
- if (!otherLoop.isPlaying) {
- otherLoop.clock.start(now + t);
- otherLoop.isPlaying = true;
- this.clock.start(now + t);
- this.isPlaying = true;
- } else if (otherLoop.isPlaying) {
- var time = otherLoop.clock._nextTick - p5sound.audiocontext.currentTime;
- this.clock.start(now + time);
- this.isPlaying = true;
- }
- };
- /**
- * Updates frequency value, reflected in next callback
- * @private
- * @for p5.SoundLoop
- * @method _update
- */
-
-
- p5.SoundLoop.prototype._update = function () {
- this.clock.frequency.value = this._calcFreq();
- };
- /**
- * Calculate the frequency of the clock's callback based on bpm, interval, and timesignature
- * @private
- * @for p5.SoundLoop
- * @method _calcFreq
- * @return {Number} new clock frequency value
- */
-
-
- p5.SoundLoop.prototype._calcFreq = function () {
- if (typeof this._interval === 'number') {
- this.musicalTimeMode = false;
- return 1 / this._interval;
- }
- else if (typeof this._interval === 'string') {
- this.musicalTimeMode = true;
- return this._bpm / 60 / this._convertNotation(this._interval) * (this._timeSignature / 4);
- }
- };
- /**
- * Convert notation from musical time format to seconds
- * Uses Tone.Time convention
- * @private
- * @for p5.SoundLoop
- * @method _convertNotation
- * @param {String} value value to be converted
- * @return {Number} converted value in seconds
- */
-
-
- p5.SoundLoop.prototype._convertNotation = function (value) {
- var type = value.slice(-1);
- value = Number(value.slice(0, -1));
-
- switch (type) {
- case 'm':
- return this._measure(value);
-
- case 'n':
- return this._note(value);
-
- default:
- console.warn('Specified interval is not formatted correctly. See Tone.js ' + 'timing reference for more info: https://github.com/Tonejs/Tone.js/wiki/Time');
- }
- };
- /**
- * Helper conversion methods of measure and note
- * @private
- * @for p5.SoundLoop
- * @method _measure
- */
-
-
- p5.SoundLoop.prototype._measure = function (value) {
- return value * this._timeSignature;
- };
- /**
- * @private
- * @method _note
- * @for p5.SoundLoop
- */
-
-
- p5.SoundLoop.prototype._note = function (value) {
- return this._timeSignature / value;
- };
- /**
- * Getters and Setters, setting any paramter will result in a change in the clock's
- * frequency, that will be reflected after the next callback
- * beats per minute (defaults to 60)
- * @property {Number} bpm
- * @for p5.SoundLoop
- */
-
-
- Object.defineProperty(p5.SoundLoop.prototype, 'bpm', {
- get: function get() {
- return this._bpm;
- },
- set: function set(bpm) {
- if (!this.musicalTimeMode) {
- console.warn('Changing the BPM in "seconds" mode has no effect. ' + 'BPM is only relevant in musicalTimeMode ' + 'when the interval is specified as a string ' + '("2n", "4n", "1m"...etc)');
- }
-
- this._bpm = bpm;
-
- this._update();
- }
- });
- /**
- * number of quarter notes in a measure (defaults to 4)
- * @property {Number} timeSignature
- * @for p5.SoundLoop
- */
-
- Object.defineProperty(p5.SoundLoop.prototype, 'timeSignature', {
- get: function get() {
- return this._timeSignature;
- },
- set: function set(timeSig) {
- if (!this.musicalTimeMode) {
- console.warn('Changing the timeSignature in "seconds" mode has no effect. ' + 'BPM is only relevant in musicalTimeMode ' + 'when the interval is specified as a string ' + '("2n", "4n", "1m"...etc)');
- }
-
- this._timeSignature = timeSig;
-
- this._update();
- }
- });
- /**
- * length of the loops interval
- * @property {Number|String} interval
- * @for p5.SoundLoop
- */
-
- Object.defineProperty(p5.SoundLoop.prototype, 'interval', {
- get: function get() {
- return this._interval;
- },
- set: function set(interval) {
- this.musicalTimeMode = typeof interval === 'Number' ? false : true;
- this._interval = interval;
-
- this._update();
- }
- });
- /**
- * how many times the callback has been called so far
- * @property {Number} iterations
- * @for p5.SoundLoop
- * @readonly
- */
-
- Object.defineProperty(p5.SoundLoop.prototype, 'iterations', {
- get: function get() {
- return this.clock.ticks;
- }
- });
- return p5.SoundLoop;
-}).call(exports, __webpack_require__, exports, module),
- __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
-
- }),
- (function(module, exports, __webpack_require__) {
-
-var __WEBPACK_AMD_DEFINE_RESULT__;!(__WEBPACK_AMD_DEFINE_RESULT__ = (function (require) {
- 'use strict';
-
- var p5sound = __webpack_require__(1);
-
- var Effect = __webpack_require__(4);
-
- var CustomError = __webpack_require__(11);
- /**
- * Compressor is an audio effect class that performs dynamics compression
- * on an audio input source. This is a very commonly used technique in music
- * and sound production. Compression creates an overall louder, richer,
- * and fuller sound by lowering the volume of louds and raising that of softs.
- * Compression can be used to avoid clipping (sound distortion due to
- * peaks in volume) and is especially useful when many sounds are played
- * at once. Compression can be used on indivudal sound sources in addition
- * to the master output.
- *
- * This class extends p5.Effect.
- * Methods amp(), chain(),
- * drywet(), connect(), and
- * disconnect() are available.
- *
- * @class p5.Compressor
- * @constructor
- * @extends p5.Effect
- *
- *
- */
-
-
- p5.Compressor = function () {
- Effect.call(this);
- /**
- * The p5.Compressor is built with a Web Audio Dynamics Compressor Node
- *
- * @property {AudioNode} compressor
- */
-
- this.compressor = this.ac.createDynamicsCompressor();
- this.input.connect(this.compressor);
- this.compressor.connect(this.wet);
- };
-
- p5.Compressor.prototype = Object.create(Effect.prototype);
- /**
- * Performs the same function as .connect, but also accepts
- * optional parameters to set compressor's audioParams
- * @method process
- * @for p5.Compressor
- *
- * @param {Object} src Sound source to be connected
- *
- * @param {Number} [attack] The amount of time (in seconds) to reduce the gain by 10dB,
- * default = .003, range 0 - 1
- * @param {Number} [knee] A decibel value representing the range above the
- * threshold where the curve smoothly transitions to the "ratio" portion.
- * default = 30, range 0 - 40
- * @param {Number} [ratio] The amount of dB change in input for a 1 dB change in output
- * default = 12, range 1 - 20
- * @param {Number} [threshold] The decibel value above which the compression will start taking effect
- * default = -24, range -100 - 0
- * @param {Number} [release] The amount of time (in seconds) to increase the gain by 10dB
- * default = .25, range 0 - 1
- */
-
- p5.Compressor.prototype.process = function (src, attack, knee, ratio, threshold, release) {
- src.connect(this.input);
- this.set(attack, knee, ratio, threshold, release);
- };
- /**
- * Set the paramters of a compressor.
- * @method set
- * @for p5.Compressor
- * @param {Number} attack The amount of time (in seconds) to reduce the gain by 10dB,
- * default = .003, range 0 - 1
- * @param {Number} knee A decibel value representing the range above the
- * threshold where the curve smoothly transitions to the "ratio" portion.
- * default = 30, range 0 - 40
- * @param {Number} ratio The amount of dB change in input for a 1 dB change in output
- * default = 12, range 1 - 20
- * @param {Number} threshold The decibel value above which the compression will start taking effect
- * default = -24, range -100 - 0
- * @param {Number} release The amount of time (in seconds) to increase the gain by 10dB
- * default = .25, range 0 - 1
- */
-
-
- p5.Compressor.prototype.set = function (attack, knee, ratio, threshold, release) {
- if (typeof attack !== 'undefined') {
- this.attack(attack);
- }
-
- if (typeof knee !== 'undefined') {
- this.knee(knee);
- }
-
- if (typeof ratio !== 'undefined') {
- this.ratio(ratio);
- }
-
- if (typeof threshold !== 'undefined') {
- this.threshold(threshold);
- }
-
- if (typeof release !== 'undefined') {
- this.release(release);
- }
- };
- /**
- * Get current attack or set value w/ time ramp
- *
- *
- * @method attack
- * @for p5.Compressor
- * @param {Number} [attack] Attack is the amount of time (in seconds) to reduce the gain by 10dB,
- * default = .003, range 0 - 1
- * @param {Number} [time] Assign time value to schedule the change in value
- */
-
-
- p5.Compressor.prototype.attack = function (attack, time) {
- var t = time || 0;
-
- if (typeof attack == 'number') {
- this.compressor.attack.value = attack;
- this.compressor.attack.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
- this.compressor.attack.linearRampToValueAtTime(attack, this.ac.currentTime + 0.02 + t);
- } else if (typeof attack !== 'undefined') {
- attack.connect(this.compressor.attack);
- }
-
- return this.compressor.attack.value;
- };
- /**
- * Get current knee or set value w/ time ramp
- *
- * @method knee
- * @for p5.Compressor
- * @param {Number} [knee] A decibel value representing the range above the
- * threshold where the curve smoothly transitions to the "ratio" portion.
- * default = 30, range 0 - 40
- * @param {Number} [time] Assign time value to schedule the change in value
- */
-
-
- p5.Compressor.prototype.knee = function (knee, time) {
- var t = time || 0;
-
- if (typeof knee == 'number') {
- this.compressor.knee.value = knee;
- this.compressor.knee.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
- this.compressor.knee.linearRampToValueAtTime(knee, this.ac.currentTime + 0.02 + t);
- } else if (typeof knee !== 'undefined') {
- knee.connect(this.compressor.knee);
- }
-
- return this.compressor.knee.value;
- };
- /**
- * Get current ratio or set value w/ time ramp
- * @method ratio
- * @for p5.Compressor
- * @param {Number} [ratio] The amount of dB change in input for a 1 dB change in output
- * default = 12, range 1 - 20
- * @param {Number} [time] Assign time value to schedule the change in value
- */
-
-
- p5.Compressor.prototype.ratio = function (ratio, time) {
- var t = time || 0;
-
- if (typeof ratio == 'number') {
- this.compressor.ratio.value = ratio;
- this.compressor.ratio.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
- this.compressor.ratio.linearRampToValueAtTime(ratio, this.ac.currentTime + 0.02 + t);
- } else if (typeof ratio !== 'undefined') {
- ratio.connect(this.compressor.ratio);
- }
-
- return this.compressor.ratio.value;
- };
- /**
- * Get current threshold or set value w/ time ramp
- * @method threshold
- * @for p5.Compressor
- * @param {Number} threshold The decibel value above which the compression will start taking effect
- * default = -24, range -100 - 0
- * @param {Number} [time] Assign time value to schedule the change in value
- */
-
-
- p5.Compressor.prototype.threshold = function (threshold, time) {
- var t = time || 0;
-
- if (typeof threshold == 'number') {
- this.compressor.threshold.value = threshold;
- this.compressor.threshold.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
- this.compressor.threshold.linearRampToValueAtTime(threshold, this.ac.currentTime + 0.02 + t);
- } else if (typeof threshold !== 'undefined') {
- threshold.connect(this.compressor.threshold);
- }
-
- return this.compressor.threshold.value;
- };
- /**
- * Get current release or set value w/ time ramp
- * @method release
- * @for p5.Compressor
- * @param {Number} release The amount of time (in seconds) to increase the gain by 10dB
- * default = .25, range 0 - 1
- *
- * @param {Number} [time] Assign time value to schedule the change in value
- */
-
-
- p5.Compressor.prototype.release = function (release, time) {
- var t = time || 0;
-
- if (typeof release == 'number') {
- this.compressor.release.value = release;
- this.compressor.release.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
- this.compressor.release.linearRampToValueAtTime(release, this.ac.currentTime + 0.02 + t);
- } else if (typeof number !== 'undefined') {
- release.connect(this.compressor.release);
- }
-
- return this.compressor.release.value;
- };
- /**
- * Return the current reduction value
- *
- * @method reduction
- * @for p5.Compressor
- * @return {Number} Value of the amount of gain reduction that is applied to the signal
- */
-
-
- p5.Compressor.prototype.reduction = function () {
- return this.compressor.reduction.value;
- };
-
- p5.Compressor.prototype.dispose = function () {
- Effect.prototype.dispose.apply(this);
-
- if (this.compressor) {
- this.compressor.disconnect();
- delete this.compressor;
- }
- };
-
- return p5.Compressor;
-}).call(exports, __webpack_require__, exports, module),
- __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
-
- }),
- (function(module, exports, __webpack_require__) {
-
-"use strict";
-var __WEBPACK_AMD_DEFINE_RESULT__;
-
-!(__WEBPACK_AMD_DEFINE_RESULT__ = (function (require) {
- var p5sound = __webpack_require__(1);
-
- var _require = __webpack_require__(5),
- convertToWav = _require.convertToWav,
- safeBufferSize = _require.safeBufferSize;
-
- var processorNames = __webpack_require__(10);
-
- var ac = p5sound.audiocontext;
- /**
- *
Record sounds for playback and/or to save as a .wav file.
- * The p5.SoundRecorder records all sound output from your sketch,
- * or can be assigned a specific source with setInput().
- *
The record() method accepts a p5.SoundFile as a parameter.
- * When playback is stopped (either after the given amount of time,
- * or with the stop() method), the p5.SoundRecorder will send its
- * recording to that p5.SoundFile for playback.
- * let mic, recorder, soundFile;
- * let state = 0;
- *
- * function setup() {
- * let cnv = createCanvas(100, 100);
- * cnv.mousePressed(canvasPressed);
- * background(220);
- * textAlign(CENTER, CENTER);
- *
- * // create an audio in
- * mic = new p5.AudioIn();
- *
- * // prompts user to enable their browser mic
- * mic.start();
- *
- * // create a sound recorder
- * recorder = new p5.SoundRecorder();
- *
- * // connect the mic to the recorder
- * recorder.setInput(mic);
- *
- * // this sound file will be used to
- * // playback & save the recording
- * soundFile = new p5.SoundFile();
- *
- * text('tap to record', width/2, height/2);
- * }
- *
- * function canvasPressed() {
- * // ensure audio is enabled
- * userStartAudio();
- *
- * // make sure user enabled the mic
- * if (state === 0 && mic.enabled) {
- *
- * // record to our p5.SoundFile
- * recorder.record(soundFile);
- *
- * background(255,0,0);
- * text('Recording!', width/2, height/2);
- * state++;
- * }
- * else if (state === 1) {
- * background(0,255,0);
- *
- * // stop recorder and
- * // send result to soundFile
- * recorder.stop();
- *
- * text('Done! Tap to play and download', width/2, height/2, width - 20);
- * state++;
- * }
- *
- * else if (state === 2) {
- * soundFile.play(); // play the result!
- * save(soundFile, 'mySound.wav');
- * state++;
- * }
- * }
- *
- */
-
- p5.SoundRecorder = function () {
- this.input = ac.createGain();
- this.output = ac.createGain();
- this._inputChannels = 2;
- this._outputChannels = 2;
-
- var workletBufferSize = safeBufferSize(1024);
- this._workletNode = new AudioWorkletNode(ac, processorNames.recorderProcessor, {
- outputChannelCount: [this._outputChannels],
- processorOptions: {
- numInputChannels: this._inputChannels,
- bufferSize: workletBufferSize
- }
- });
-
- this._workletNode.port.onmessage = function (event) {
- if (event.data.name === 'buffers') {
- var buffers = [new Float32Array(event.data.leftBuffer), new Float32Array(event.data.rightBuffer)];
-
- this._callback(buffers);
- }
- }.bind(this);
- /**
- * callback invoked when the recording is over
- * @private
- * @type Function(Float32Array)
- */
-
-
- this._callback = function () {};
-
-
- this._workletNode.connect(p5.soundOut._silentNode);
-
- this.setInput();
-
- p5sound.soundArray.push(this);
- };
- /**
- * Connect a specific device to the p5.SoundRecorder.
- * If no parameter is given, p5.SoundRecorer will record
- * all audible p5.sound from your sketch.
- *
- * @method setInput
- * @for p5.SoundRecorder
- * @param {Object} [unit] p5.sound object or a web audio unit
- * that outputs sound
- */
-
-
- p5.SoundRecorder.prototype.setInput = function (unit) {
- this.input.disconnect();
- this.input = null;
- this.input = ac.createGain();
- this.input.connect(this._workletNode);
- this.input.connect(this.output);
-
- if (unit) {
- unit.connect(this.input);
- } else {
- p5.soundOut.output.connect(this.input);
- }
- };
- /**
- * Start recording. To access the recording, provide
- * a p5.SoundFile as the first parameter. The p5.SoundRecorder
- * will send its recording to that p5.SoundFile for playback once
- * recording is complete. Optional parameters include duration
- * (in seconds) of the recording, and a callback function that
- * will be called once the complete recording has been
- * transfered to the p5.SoundFile.
- *
- * @method record
- * @for p5.SoundRecorder
- * @param {p5.SoundFile} soundFile p5.SoundFile
- * @param {Number} [duration] Time (in seconds)
- * @param {Function} [callback] The name of a function that will be
- * called once the recording completes
- */
-
-
- p5.SoundRecorder.prototype.record = function (sFile, duration, callback) {
- this._workletNode.port.postMessage({
- name: 'start',
- duration: duration
- });
-
- if (sFile && callback) {
- this._callback = function (buffer) {
- sFile.setBuffer(buffer);
- callback();
- };
- } else if (sFile) {
- this._callback = function (buffer) {
- sFile.setBuffer(buffer);
- };
- }
- };
- /**
- * Stop the recording. Once the recording is stopped,
- * the results will be sent to the p5.SoundFile that
- * was given on .record(), and if a callback function
- * was provided on record, that function will be called.
- *
- * @method stop
- * @for p5.SoundRecorder
- */
-
-
- p5.SoundRecorder.prototype.stop = function () {
- this._workletNode.port.postMessage({
- name: 'stop'
- });
- };
-
- p5.SoundRecorder.prototype.dispose = function () {
- var index = p5sound.soundArray.indexOf(this);
- p5sound.soundArray.splice(index, 1);
-
- this._callback = function () {};
-
- if (this.input) {
- this.input.disconnect();
- }
-
- this.input = null;
- this._workletNode = null;
- };
- /**
- * Save a p5.SoundFile as a .wav file. The browser will prompt the user
- * to download the file to their device.
- * For uploading audio to a server, use
- * `p5.SoundFile.saveBlob`.
- *
- * @for p5
- * @method saveSound
- * @param {p5.SoundFile} soundFile p5.SoundFile that you wish to save
- * @param {String} fileName name of the resulting .wav file.
- */
-
-
- p5.prototype.saveSound = function (soundFile, fileName) {
- var dataView = convertToWav(soundFile.buffer);
- p5.prototype.writeFile([dataView], fileName, 'wav');
- };
-}).call(exports, __webpack_require__, exports, module),
- __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
-
- }),
- (function(module, exports, __webpack_require__) {
-
-"use strict";
-var __WEBPACK_AMD_DEFINE_RESULT__;
-
-!(__WEBPACK_AMD_DEFINE_RESULT__ = (function () {
- /**
- *
PeakDetect works in conjunction with p5.FFT to
- * look for onsets in some or all of the frequency spectrum.
- *
- *
- * To use p5.PeakDetect, call update in the draw loop
- * and pass in a p5.FFT object.
- *
- *
- * You can listen for a specific part of the frequency spectrum by
- * setting the range between freq1 and freq2.
- *
- *
- *
threshold is the threshold for detecting a peak,
- * scaled between 0 and 1. It is logarithmic, so 0.1 is half as loud
- * as 1.0.
- *
- *
- * The update method is meant to be run in the draw loop, and
- * frames determines how many loops must pass before
- * another peak can be detected.
- * For example, if the frameRate() = 60, you could detect the beat of a
- * 120 beat-per-minute song with this equation:
- * framesPerPeak = 60 / (estimatedBPM / 60 );
- *
- *
- *
- * Based on example contribtued by @b2renger, and a simple beat detection
- * explanation by Felix Turner.
- *
- *
- * @class p5.PeakDetect
- * @constructor
- * @param {Number} [freq1] lowFrequency - defaults to 20Hz
- * @param {Number} [freq2] highFrequency - defaults to 20000 Hz
- * @param {Number} [threshold] Threshold for detecting a beat between 0 and 1
- * scaled logarithmically where 0.1 is 1/2 the loudness
- * of 1.0. Defaults to 0.35.
- * @param {Number} [framesPerPeak] Defaults to 20.
- * @example
- *