microphone.js 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350
  1. /**
  2. * @typedef {Object} MicrophonePluginParams
  3. * @property {MediaStreamConstraints} constraints The constraints parameter is a
  4. * MediaStreamConstaints object with two members: video and audio, describing
  5. * the media types requested. Either or both must be specified.
  6. * @property {number} bufferSize=4096 The buffer size in units of sample-frames.
  7. * If specified, the bufferSize must be one of the following values: `256`,
  8. * `512`, `1024`, `2048`, `4096`, `8192`, `16384`
  9. * @property {number} numberOfInputChannels=1 Integer specifying the number of
  10. * channels for this node's input. Values of up to 32 are supported.
  11. * @property {?boolean} deferInit Set to true to manually call
  12. * `initPlugin('microphone')`
  13. */
  14. /**
  15. * Visualise microphone input in a wavesurfer instance.
  16. *
  17. * @implements {PluginClass}
  18. * @extends {Observer}
  19. * @example
  20. * // es6
  21. * import MicrophonePlugin from 'wavesurfer.microphone.js';
  22. *
  23. * // commonjs
  24. * var MicrophonePlugin = require('wavesurfer.microphone.js');
  25. *
  26. * // if you are using <script> tags
  27. * var MicrophonePlugin = window.WaveSurfer.microphone;
  28. *
  29. * // ... initialising wavesurfer with the plugin
  30. * var wavesurfer = WaveSurfer.create({
  31. * // wavesurfer options ...
  32. * plugins: [
  33. * MicrophonePlugin.create({
  34. * // plugin options ...
  35. * })
  36. * ]
  37. * });
  38. */
  39. export default class MicrophonePlugin {
  40. /**
  41. * Microphone plugin definition factory
  42. *
  43. * This function must be used to create a plugin definition which can be
  44. * used by wavesurfer to correctly instantiate the plugin.
  45. *
  46. * @param {MicrophonePluginParams} params parameters use to initialise the plugin
  47. * @return {PluginDefinition} an object representing the plugin
  48. */
  49. static create(params) {
  50. return {
  51. name: 'microphone',
  52. deferInit: params && params.deferInit ? params.deferInit : false,
  53. params: params,
  54. instance: MicrophonePlugin
  55. };
  56. }
  57. constructor(params, ws) {
  58. this.params = params;
  59. this.wavesurfer = ws;
  60. this.active = false;
  61. this.paused = false;
  62. this.reloadBufferFunction = e => this.reloadBuffer(e);
  63. // cross-browser getUserMedia
  64. const promisifiedOldGUM = (constraints, successCallback, errorCallback) => {
  65. // get ahold of getUserMedia, if present
  66. const getUserMedia = (navigator.getUserMedia ||
  67. navigator.webkitGetUserMedia ||
  68. navigator.mozGetUserMedia ||
  69. navigator.msGetUserMedia
  70. );
  71. // Some browsers just don't implement it - return a rejected
  72. // promise with an error to keep a consistent interface
  73. if (!getUserMedia) {
  74. return Promise.reject(
  75. new Error('getUserMedia is not implemented in this browser')
  76. );
  77. }
  78. // otherwise, wrap the call to the old navigator.getUserMedia with
  79. // a Promise
  80. return new Promise((successCallback, errorCallback) => {
  81. getUserMedia.call(navigator, constraints, successCallback, errorCallback);
  82. });
  83. };
  84. // Older browsers might not implement mediaDevices at all, so we set an
  85. // empty object first
  86. if (navigator.mediaDevices === undefined) {
  87. navigator.mediaDevices = {};
  88. }
  89. // Some browsers partially implement mediaDevices. We can't just assign
  90. // an object with getUserMedia as it would overwrite existing
  91. // properties. Here, we will just add the getUserMedia property if it's
  92. // missing.
  93. if (navigator.mediaDevices.getUserMedia === undefined) {
  94. navigator.mediaDevices.getUserMedia = promisifiedOldGUM;
  95. }
  96. this.constraints = this.params.constraints || {
  97. video: false,
  98. audio: true
  99. };
  100. this.bufferSize = this.params.bufferSize || 4096;
  101. this.numberOfInputChannels = this.params.numberOfInputChannels || 1;
  102. this.numberOfOutputChannels = this.params.numberOfOutputChannels || 1;
  103. this._onBackendCreated = () => {
  104. // wavesurfer's AudioContext where we'll route the mic signal to
  105. this.micContext = this.wavesurfer.backend.getAudioContext();
  106. };
  107. }
  108. init() {
  109. this.wavesurfer.on('backend-created', this._onBackendCreated);
  110. if (this.wavesurfer.backend) {
  111. this._onBackendCreated();
  112. }
  113. }
  114. /**
  115. * Destroy the microphone plugin.
  116. */
  117. destroy() {
  118. // make sure the buffer is not redrawn during
  119. // cleanup and demolition of this plugin.
  120. this.paused = true;
  121. this.wavesurfer.un('backend-created', this._onBackendCreated);
  122. this.stop();
  123. }
  124. /**
  125. * Allow user to select audio input device, eg. microphone, and
  126. * start the visualization.
  127. */
  128. start() {
  129. navigator.mediaDevices.getUserMedia(this.constraints)
  130. .then((data) => this.gotStream(data))
  131. .catch((data) => this.deviceError(data));
  132. }
  133. /**
  134. * Pause/resume visualization.
  135. */
  136. togglePlay() {
  137. if (!this.active) {
  138. // start it first
  139. this.start();
  140. } else {
  141. // toggle paused
  142. this.paused = !this.paused;
  143. if (this.paused) {
  144. this.pause();
  145. } else {
  146. this.play();
  147. }
  148. }
  149. }
  150. /**
  151. * Play visualization.
  152. */
  153. play() {
  154. this.paused = false;
  155. this.connect();
  156. }
  157. /**
  158. * Pause visualization.
  159. */
  160. pause() {
  161. this.paused = true;
  162. // disconnect sources so they can be used elsewhere
  163. // (eg. during audio playback)
  164. this.disconnect();
  165. }
  166. /**
  167. * Stop the device stream and remove any remaining waveform drawing from
  168. * the wavesurfer canvas.
  169. */
  170. stop() {
  171. if (this.active) {
  172. // stop visualization and device
  173. this.stopDevice();
  174. // empty last frame
  175. this.wavesurfer.empty();
  176. }
  177. }
  178. /**
  179. * Stop the device and the visualization.
  180. */
  181. stopDevice() {
  182. this.active = false;
  183. // stop visualization
  184. this.disconnect();
  185. // stop stream from device
  186. if (this.stream) {
  187. const result = this.detectBrowser();
  188. // MediaStream.stop is deprecated since:
  189. // - Firefox 44 (https://www.fxsitecompat.com/en-US/docs/2015/mediastream-stop-has-been-deprecated/)
  190. // - Chrome 45 (https://developers.google.com/web/updates/2015/07/mediastream-deprecations)
  191. if ((result.browser === 'chrome' && result.version >= 45) ||
  192. (result.browser === 'firefox' && result.version >= 44) ||
  193. (result.browser === 'edge')) {
  194. if (this.stream.getTracks) { // note that this should not be a call
  195. this.stream.getTracks().forEach(stream => stream.stop());
  196. return;
  197. }
  198. }
  199. this.stream.stop();
  200. }
  201. }
  202. /**
  203. * Connect the media sources that feed the visualization.
  204. */
  205. connect() {
  206. if (this.stream !== undefined) {
  207. // Create an AudioNode from the stream.
  208. this.mediaStreamSource = this.micContext.createMediaStreamSource(this.stream);
  209. this.levelChecker = this.micContext.createScriptProcessor(
  210. this.bufferSize,
  211. this.numberOfInputChannels,
  212. this.numberOfOutputChannels
  213. );
  214. this.mediaStreamSource.connect(this.levelChecker);
  215. this.levelChecker.connect(this.micContext.destination);
  216. this.levelChecker.onaudioprocess = this.reloadBufferFunction;
  217. }
  218. }
  219. /**
  220. * Disconnect the media sources that feed the visualization.
  221. */
  222. disconnect() {
  223. if (this.mediaStreamSource !== undefined) {
  224. this.mediaStreamSource.disconnect();
  225. }
  226. if (this.levelChecker !== undefined) {
  227. this.levelChecker.disconnect();
  228. this.levelChecker.onaudioprocess = undefined;
  229. }
  230. }
  231. /**
  232. * Redraw the waveform.
  233. */
  234. reloadBuffer(event) {
  235. if (!this.paused) {
  236. this.wavesurfer.empty();
  237. this.wavesurfer.loadDecodedBuffer(event.inputBuffer);
  238. }
  239. }
  240. /**
  241. * Audio input device is ready.
  242. *
  243. * @param {LocalMediaStream} stream The microphone's media stream.
  244. */
  245. gotStream(stream) {
  246. this.stream = stream;
  247. this.active = true;
  248. // start visualization
  249. this.play();
  250. // notify listeners
  251. this.fireEvent('deviceReady', stream);
  252. }
  253. /**
  254. * Device error callback.
  255. */
  256. deviceError(code) {
  257. // notify listeners
  258. this.fireEvent('deviceError', code);
  259. }
  260. /**
  261. * Extract browser version out of the provided user agent string.
  262. * @param {!string} uastring userAgent string.
  263. * @param {!string} expr Regular expression used as match criteria.
  264. * @param {!number} pos position in the version string to be returned.
  265. * @return {!number} browser version.
  266. */
  267. extractVersion(uastring, expr, pos) {
  268. const match = uastring.match(expr);
  269. return match && match.length >= pos && parseInt(match[pos], 10);
  270. }
  271. /**
  272. * Browser detector.
  273. * @return {object} result containing browser, version and minVersion
  274. * properties.
  275. */
  276. detectBrowser() {
  277. // Returned result object.
  278. const result = {};
  279. result.browser = null;
  280. result.version = null;
  281. result.minVersion = null;
  282. // Non supported browser.
  283. if (typeof window === 'undefined' || !window.navigator) {
  284. result.browser = 'Not a supported browser.';
  285. return result;
  286. }
  287. // Firefox.
  288. if (navigator.mozGetUserMedia) {
  289. result.browser = 'firefox';
  290. result.version = this.extractVersion(navigator.userAgent, /Firefox\/([0-9]+)\./, 1);
  291. result.minVersion = 31;
  292. return result;
  293. }
  294. // Chrome/Chromium/Webview.
  295. if (navigator.webkitGetUserMedia && window.webkitRTCPeerConnection) {
  296. result.browser = 'chrome';
  297. result.version = this.extractVersion(navigator.userAgent, /Chrom(e|ium)\/([0-9]+)\./, 2);
  298. result.minVersion = 38;
  299. return result;
  300. }
  301. // Edge.
  302. if (navigator.mediaDevices && navigator.userAgent.match(/Edge\/(\d+).(\d+)$/)) {
  303. result.browser = 'edge';
  304. result.version = this.extractVersion(navigator.userAgent, /Edge\/(\d+).(\d+)$/, 2);
  305. result.minVersion = 10547;
  306. return result;
  307. }
  308. // Non supported browser default.
  309. result.browser = 'Not a supported browser.';
  310. return result;
  311. }
  312. }