3 * http://uupaa.hatenablog.com/entry/2011/12/12/213233
\r
4 * Mobile Opera11 は Audio をサポートするがイベントが取れない
\r
5 * iframe 内で生成して、Audio Sprite の preset で再生できないか?
\r
7 var X_AudioSprite_shouldUse = X_HTMLAudio && ( X_UA[ 'iOS' ] || X_UA[ 'AOSP' ] || X_UA[ 'OperaMobile' ] || X_UA[ 'OperaTablet' ] ), // Flash がない
\r
8 X_AudioSprite_useVideoForMulti = //( 3.1 <= X_UA[ 'AOSP' ] < 4 ) ||
\r
9 //( ( 4.2 <= X_UA[ 'AOSP' ] ),
\r
10 // ドスパラパッドはビデオのインライン再生が不可
\r
12 X_AudioSprite_disableMultiTrack = !X_WebAudio && ( X_UA[ 'iOS' ] || 4 <= X_UA[ 'AOSP' ] || X_UA[ 'ChromeWV' ] || ( X_UA[ 'WinPhone' ] && X_UA[ 'IE9' ] ) ),
\r
13 X_AudioSprite_enableVolume = X_HTMLAudio && ( !X_UA[ 'iOS' ] && !X_UA[ 'AOSP' ] && !X_UA[ 'OperaMobile' ] && !X_UA[ 'OperaTablet' ] ), // TODO fennec は 25以上
\r
14 // http://tukumemo.com/html5-audio-sp/
\r
15 // iOS6、Android4.1から同時再生が可能になりました。
\r
16 X_AudioSprite_maxTracks = X_AudioSprite_useVideoForMulti ? 2 : X_AudioSprite_disableMultiTrack ? 1 : 9,
\r
17 X_AudioSprite_lengthSilence = 10000, // 一番最初の無音部分の長さ
\r
18 X_AudioSprite_lengthDistance = 5000, // 音間の無音の長さ
\r
19 X_AudioSprite_uid = 0,
\r
20 X_AudioSprite_TEMP = {
\r
24 pauseTracks : [], // X_EVENT_DEACTIVATE によって pause した再生中のトラックたち。
\r
34 X_AudioSprite_numTracks,
\r
35 X_AudioSprite_useVideo;
\r
39 * urls : [ 'xx.ogg', 'xx.mp3' ],
\r
43 * BGM_01 : [ '15.00', '45.500', true, '17.666', '50.999' ],
\r
44 * BGM_02 : [ '56.00', '1:15.230', true ]
\r
47 * X_EVENT_BACKEND_READY
\r
48 * X_EVENT_BACKEND_NONE
\r
51 * X_EVENT_MEDIA_LOOPED
\r
52 * X_EVENT_MEDIA_ENDED
\r
54 * @namespace X.AudioSprite
\r
55 * @alias X.AudioSprite
\r
57 X[ 'AudioSprite' ] = function( setting ){
\r
58 var tracks = X_AudioSprite_TEMP.tracks,
\r
59 bgms = X_AudioSprite_TEMP.BGMs,
\r
60 presets = X_AudioSprite_TEMP.presets,
\r
61 urls = setting[ 'urls' ],
\r
62 video = setting[ 'useVideo' ],
\r
63 n = video ? 1 : setting[ 'numTracks' ] || 1,
\r
64 volume = setting[ 'volume' ],
\r
68 if( X_AudioSprite ) X_AudioSprite[ 'kill' ]();
\r
70 X_AudioSprite = X_Class_override( X_EventDispatcher(), X_AudioSprite_members );
\r
71 X_ViewPort[ 'listen' ]( [ X_EVENT_VIEW_ACTIVATE, X_EVENT_VIEW_DEACTIVATE, X_EVENT_UNLOAD ], X_AudioSprite_handleEvent );
\r
73 n = n <= X_AudioSprite_maxTracks ? n : X_AudioSprite_maxTracks;
\r
76 // Android4.x標準ブラウザ(Chrome系)でブラウザが隠れた場合に音が鳴り続ける問題、ビデオで解決できる?
\r
77 //if( X_AudioSprite_needTouchAndroid && n === 1 ){
\r
81 for( k in setting ){
\r
83 if( X_Type_isArray( v ) && v !== urls ){
\r
84 v = X_Array_copy( v );
\r
85 for( i = v.length; i; ){
\r
87 if( i !== 2 ) v[ i ] = X_Audio_timeStringToNumber( v[ i ] );
\r
89 if( v[ 2 ] ) bgms[ k ] = v;
\r
94 X_Audio_startDetectionBackend(
\r
95 X_Audio_BACKENDS[ 0 ],
\r
97 X_Array_copy( urls ),
\r
99 'volume' : 0 <= volume && volume <= 1 ? volume : 1,
\r
102 'endTime' : X_AudioSprite_lengthSilence,
\r
106 X_AudioSprite[ 'listenOnce' ]( [ X_EVENT_BACKEND_READY, X_EVENT_BACKEND_NONE ], X_AudioSprite_backendHandler );
\r
107 X_AudioSprite[ 'listenOnce' ]( X_EVENT_KILL_INSTANCE, X_AudioSprite_handleEvent );
\r
109 X_AudioSprite_useVideo = video;
\r
110 X_AudioSprite_numTracks = X_AudioSprite[ 'numTracks' ] = n;
\r
112 return X_AudioSprite;
\r
115 X[ 'AudioSprite' ][ 'shouldUse' ] = X_AudioSprite_shouldUse;
\r
116 X[ 'AudioSprite' ][ 'enableMultiTrack' ] = !X_AudioSprite_disableMultiTrack;
\r
118 // 再生が終わっているもの、終わりかけのものを探す
\r
119 // TODO 終わりかけのもの、と一番古いもの、どちらを再利用するか?これ以上に細かい実装を望む場合は X.AudioSprite は使わず自力で実装
\r
120 function X_AudioSprite_getTrackEnded(){
\r
121 var tracks = X_AudioSprite_TEMP.tracks,
\r
122 l = X_AudioSprite_numTracks,
\r
123 i = 0, track, state, last = 1 / 0, _last, index;
\r
125 for( ; i < l; ++i ){
\r
126 track = tracks[ i ];
\r
127 state = track.getState();
\r
128 if( !state.playing ) return track;
\r
129 if( track === X_AudioSprite_TEMP.bgmTrack ) continue;
\r
130 if( state.currentTime <= X_AudioSprite_lengthSilence + X_AudioSprite_lengthDistance ) return track;
\r
131 _last = state.endTime - state.currentTime;
\r
132 if( _last < last ){
\r
137 return tracks[ index ];
\r
140 var X_AudioSprite_members =
\r
141 /** @lends X.AudioSprite.prototype */
\r
150 * @param {string} name トラック名
\r
151 * @return {number} uid
\r
153 'play' : function( name ){
\r
154 var bgm = X_AudioSprite_TEMP.bgmTrack,
\r
155 tracks = X_AudioSprite_TEMP.tracks,
\r
156 bgms = X_AudioSprite_TEMP.BGMs,
\r
157 presets = X_AudioSprite_TEMP.presets,
\r
158 preset = presets[ name ],
\r
162 if( bgms[ name ] ){
\r
163 if( name !== X_AudioSprite_TEMP.bgmName ){
\r
165 X_AudioSprite_TEMP.bgmName = name;
\r
166 X_AudioSprite_TEMP.bgmPosition = preset[ 0 ];
\r
167 X_AudioSprite_TEMP.bgmLooped = false;
\r
170 X_AudioSprite_TEMP.bgmPlaying = true;
\r
175 if( 1 < X_AudioSprite_numTracks ){
\r
176 track = X_AudioSprite_TEMP.bgmTrack = X_AudioSprite_getTrackEnded();
\r
178 track = X_AudioSprite_TEMP.bgmTrack = tracks[ 0 ];
\r
181 if( track[ 'listen' ]( [ X_EVENT_MEDIA_PLAYING, X_EVENT_MEDIA_WAITING, X_EVENT_MEDIA_SEEKING, X_EVENT_MEDIA_BEFORE_LOOP ], X_AudioSprite_handleEvent ).playing ){
\r
184 'looped' : X_AudioSprite_TEMP.bgmLooped,
\r
185 'currentTime' : X_AudioSprite_TEMP.bgmPosition,
\r
186 'startTime' : preset[ 0 ],
\r
187 'endTime' : preset[ 1 ],
\r
188 'loopStartTime' : preset[ 3 ],
\r
189 'loopEndTime' : preset[ 4 ]
\r
192 track.setState( { 'looped' : X_AudioSprite_TEMP.bgmLooped } );
\r
193 track.play( preset[ 0 ], preset[ 1 ], true, preset[ 3 ], preset[ 4 ] );
\r
194 track.seek( X_AudioSprite_TEMP.bgmPosition );
\r
198 if( 1 < X_AudioSprite_numTracks ){
\r
199 track = X_AudioSprite_getTrackEnded( X_AudioSprite_TEMP.bgmPlaying );
\r
201 [ 'listen' ]( [ X_EVENT_MEDIA_PLAYING, X_EVENT_MEDIA_WAITING, X_EVENT_MEDIA_SEEKING, X_EVENT_MEDIA_BEFORE_LOOP ], X_AudioSprite_handleEvent )
\r
202 .setState( { 'looped' : false } );
\r
203 track.play( preset[ 0 ], preset[ 1 ], true, 0, X_AudioSprite_lengthSilence );
\r
205 // single track, iOS
\r
207 X_AudioSprite_TEMP.bgmPosition = bgm.currentTime();
\r
208 //console.log( 'bgm position : ' + X_AudioSprite_TEMP.bgmPosition + ' isPlay:' + bgm.playing );
\r
209 X_AudioSprite_TEMP.bgmTrack = null;
\r
211 track = tracks[ 0 ];
\r
213 if( track[ 'listen' ]( [ X_EVENT_MEDIA_PLAYING, X_EVENT_MEDIA_WAITING, X_EVENT_MEDIA_SEEKING, X_EVENT_MEDIA_BEFORE_LOOP ], X_AudioSprite_handleEvent ).playing ){
\r
217 'currentTime' : preset[ 0 ],
\r
218 'startTime' : preset[ 0 ],
\r
219 'endTime' : preset[ 1 ],
\r
220 'loopStartTime' : 0,
\r
221 'loopEndTime' : X_AudioSprite_lengthSilence
\r
224 track.play( preset[ 0 ], preset[ 1 ], true, 0, X_AudioSprite_lengthSilence );
\r
228 return tracks.indexOf( track );
\r
234 * ポーズ, uid を指定しない、または '*' で呼び出した場合、全てのトラックを pause する。
\r
235 * @param {number} uid=undefined トラックID, '*'
\r
236 * @return {AudioSprite}
\r
238 'pause' : function( uid ){
\r
239 var tracks = X_AudioSprite_TEMP.tracks,
\r
242 if( uid === '*' || uid === undefined ){
\r
243 for( i = 0, l = X_AudioSprite_numTracks; i < l; ++i ){
\r
244 X_AudioSprite[ 'pause' ]( i );
\r
247 if( track = tracks[ uid ] ){
\r
248 if( X_AudioSprite_TEMP.bgmTrack === track ){
\r
249 X_AudioSprite_TEMP.bgmPosition = track.currentTime();
\r
250 X_AudioSprite_TEMP.bgmPlaying = false;
\r
251 X_AudioSprite_TEMP.bgmTrack = null;
\r
253 track.play( 0, X_AudioSprite_lengthSilence, true, 0, X_AudioSprite_lengthSilence );
\r
255 X_AudioSprite[ 'asyncDispatch' ]( X_EVENT_MEDIA_PAUSED );
\r
257 return X_AudioSprite;
\r
261 * シーク, 現在のトラックの長さ内で相対指定する
\r
262 * @param {number} uid トラックID
\r
263 * @param {number} position ms
\r
264 * @return {AudioSprite}
\r
266 'seek' : function( uid, position ){
\r
267 var track = X_AudioSprite_TEMP.tracks[ uid ],
\r
270 delete track.seekTime;
\r
271 end = X_Audio_getEndTime( track );
\r
272 start = X_Audio_getStartTime( track, end );
\r
273 0 <= position && position <= ( end - start ) && track.seek( start + position );
\r
275 return X_AudioSprite;
\r
280 * @param {number} uid トラックID
\r
281 * @param {number} opt_volume= ボリューム
\r
282 * @return {AudioSprite|number}
\r
284 'volume' : function( uid, opt_volume ){
\r
288 if( opt_volume === undefined ){
\r
289 return X_AudioSprite_TEMP.volume;
\r
291 for( i = X_AudioSprite_numTracks; i; ){
\r
292 X_AudioSprite_TEMP.tracks[ --i ].volume( opt_volume );
\r
294 return X_AudioSprite;
\r
296 track = X_AudioSprite_TEMP.tracks[ uid ];
\r
297 if( opt_volume === undefined ){
\r
298 return track ? track.gain : -1;
\r
300 track && track.volume( opt_volume );
\r
301 return X_AudioSprite;
\r
306 * @param {number} uid トラックID
\r
307 * @param {object} opt_obj= 上書きする状態を書き込んだオブジェクト
\r
308 * @return {AudioSprite|object}
\r
310 'state' : function( uid, opt_obj ){
\r
311 var track = X_AudioSprite_TEMP.tracks[ uid ],
\r
314 if( opt_obj === undefined ){
\r
317 state = track.getState();
\r
318 start = state.startTime;
\r
320 'currentTime' : state.currentTime - start,
\r
321 'playing' : start <= state.currentTime && state.currentTime <= state.endTime,
\r
322 'duration' : state.endTime - start,
\r
323 'volume' : X_AudioSprite_TEMP.volume
\r
326 return { 'volume' : X_AudioSprite_TEMP.volume, 'playing' : false };
\r
328 track && track.setState( opt_obj );
\r
329 return X_AudioSprite;
\r
333 function X_AudioSprite_backendHandler( e ){
\r
334 var i, backend, option, src, name, last, _e, track;
\r
337 case X_EVENT_BACKEND_READY :
\r
339 backend = X_Audio_BACKENDS[ e[ 'backendID' ] ];
\r
340 option = e[ 'option' ];
\r
342 X_AudioSprite[ 'unlisten' ]( X_EVENT_BACKEND_NONE, X_AudioSprite_backendHandler );
\r
343 X_AudioSprite[ 'source' ] = src = e[ 'source' ];
\r
344 X_AudioSprite[ 'backendName' ] = name = backend.backendName;
\r
346 //console.log( i + ' / ' + X_AudioSprite_numTracks );
\r
348 for( i = 0; i < X_AudioSprite_numTracks; ++i ){
\r
349 if( X_AudioSprite_useVideo || ( i === 1 && X_AudioSprite_useVideoForMulti ) ){
\r
350 option = X_Object_deepCopy( option );
\r
351 option[ 'useVideo' ] = true;
\r
352 console.log( 'use video' );
\r
354 // Audiobackend の owner として null を渡すとAudioBackend 自身へ dispatch する
\r
355 X_AudioSprite_TEMP.tracks.push(
\r
356 last = backend.klass( null, e[ 'source' ], option )[ 'listen' ]( X_EVENT_DEBUG, X_AudioSprite_handleEvent ) );
\r
360 'type' : X_EVENT_BACKEND_READY,
\r
362 'backendName' : name
\r
365 // TODO 今は touch 可能で backend ready
\r
368 ( backend.backendID === 1 && ( _e[ 'needTouchForPlay' ] = X_WebAudio_need1stTouch ) ) ||
\r
370 ( backend.backendID === 2 && ( _e[ 'needTouchForLoad' ] = X_HTMLAudio_need1stTouch ) )
\r
372 X_AudioSprite_TEMP.event = _e;
\r
373 last[ 'listenOnce' ]( X_EVENT_MEDIA_TOUCH_FOR_LOAD, X_AudioSprite_backendHandler );
\r
375 X_AudioSprite[ 'asyncDispatch' ]( _e );
\r
378 // TODO 全ての track の READY で!
\r
379 last[ 'listen' ]( X_EVENT_PROGRESS, X_AudioSprite_backendHandler )
\r
380 [ 'listenOnce' ]( X_EVENT_READY, X_AudioSprite_backendHandler );
\r
381 return X_CALLBACK_STOP_NOW;
\r
383 case X_EVENT_BACKEND_NONE :
\r
385 [ 'listen' ]( X_EVENT_BACKEND_NONE, X_AudioSprite_handleEvent ) // kill を呼ぶ
\r
386 [ 'asyncDispatch' ]( X_EVENT_BACKEND_NONE );
\r
387 return X_CALLBACK_STOP_NOW;
\r
389 case X_EVENT_MEDIA_TOUCH_FOR_LOAD :
\r
390 // TODO 全ての track の MEDIA_TOUCH_FOR_LOAD で!
\r
391 X_AudioSprite[ 'asyncDispatch' ]( X_AudioSprite_TEMP.event );
\r
392 delete X_AudioSprite_TEMP.event;
\r
395 case X_EVENT_PROGRESS :
\r
396 X_AudioSprite[ 'dispatch' ]( { type : X_EVENT_PROGRESS, 'percent' : e[ 'percent' ] } );
\r
399 case X_EVENT_READY :
\r
400 console.log( 'X.AudioSprite - Ready!' );
\r
401 for( i = 0; i < X_AudioSprite_numTracks; ++i ){
\r
402 track = X_AudioSprite_TEMP.tracks[ i ];
\r
403 ( track.autoplay || track._playReserved ) && track.actualPlay();
\r
404 delete track._playReserved;
\r
406 this[ 'listen' ]( X_EVENT_PROGRESS, X_AudioSprite_backendHandler );
\r
407 X_AudioSprite[ 'asyncDispatch' ]( X_EVENT_READY );
\r
413 function X_AudioSprite_handleEvent( e ){
\r
414 var track = e.target, i, tracks, _e, k;
\r
417 case X_EVENT_MEDIA_PLAYING :
\r
418 case X_EVENT_MEDIA_WAITING :
\r
419 case X_EVENT_MEDIA_SEEKING :
\r
420 ( track === X_AudioSprite_TEMP.bgmTrack || !track.looped ) && X_AudioSprite[ 'asyncDispatch' ]( e.type );
\r
423 case X_EVENT_MEDIA_BEFORE_LOOP :
\r
424 if( track === X_AudioSprite_TEMP.bgmTrack ){
\r
426 X_AudioSprite_TEMP.bgmLooped = true;
\r
427 X_AudioSprite[ 'asyncDispatch' ]( X_EVENT_MEDIA_LOOPED ); // TODO uid
\r
430 if( !track.looped ){
\r
431 X_AudioSprite[ 'asyncDispatch' ]( X_EVENT_MEDIA_ENDED ); // TODO uid
\r
434 //console.log( '[AudioSprite] bgmPlaying:' + X_AudioSprite_TEMP.bgmPlaying + ' ' + !X_AudioSprite_TEMP.bgmTrack );
\r
436 // single track | iOS
\r
437 if( X_AudioSprite_TEMP.bgmPlaying && !X_AudioSprite_TEMP.bgmTrack ){
\r
438 X_AudioSprite_TEMP.bgmTrack = track;
\r
439 X_AudioSprite.play( X_AudioSprite_TEMP.bgmName );
\r
440 return X_CALLBACK_PREVENT_DEFAULT;
\r
446 case X_EVENT_DEBUG :
\r
447 i = X_AudioSprite_TEMP.tracks.indexOf( track );
\r
449 e[ 'trackID' ] = i;
\r
450 X_AudioSprite[ 'dispatch' ]( e );
\r
454 // TODO Android Firefox で アクティブ検出できない!
\r
455 case X_EVENT_VIEW_ACTIVATE :
\r
456 console.log( '■ アクティブ' );
\r
457 // track.play(); or iOS need touch??
\r
458 tracks = X_AudioSprite_TEMP.pauseTracks;
\r
459 while( tracks.length ) tracks.pop().actualPlay();
\r
462 case X_EVENT_VIEW_DEACTIVATE :
\r
463 console.log( '■ デアクティブ' );
\r
465 tracks = X_AudioSprite_TEMP.tracks;
\r
466 i = X_AudioSprite_numTracks;
\r
468 track = tracks[ --i ];
\r
469 track.playing && X_AudioSprite_TEMP.pauseTracks.push( track ) && track.pause();
\r
473 case X_EVENT_BACKEND_NONE :
\r
474 case X_EVENT_UNLOAD :
\r
475 X_AudioSprite[ 'kill' ]();
\r
478 case X_EVENT_KILL_INSTANCE :
\r
479 X_AudioSprite_TEMP.pauseTracks.length = 0;
\r
481 while( X_AudioSprite_TEMP.tracks.length ){
\r
482 X_AudioSprite_TEMP.tracks.pop()[ 'kill' ]();
\r
485 for( k in X_AudioSprite_TEMP.BGMs ){
\r
486 delete X_AudioSprite_TEMP.BGMs[ k ];
\r
488 for( k in X_AudioSprite_TEMP.presets ){
\r
489 delete X_AudioSprite_TEMP.presets[ k ];
\r
492 X_AudioSprite_TEMP.bgmTrack = null;
\r
493 X_AudioSprite_TEMP.bgmPosition = 0;
\r
494 X_AudioSprite_TEMP.bgmName = '';
\r
495 X_AudioSprite_TEMP.bgmLooped = false;
\r
496 X_AudioSprite_TEMP.bgmPlaying = false;
\r
498 X_ViewPort[ 'unlisten' ]( [ X_EVENT_VIEW_ACTIVATE, X_EVENT_VIEW_DEACTIVATE, X_EVENT_UNLOAD ], X_AudioSprite_handleEvent );
\r
499 X_AudioSprite = null;
\r