3 * http://uupaa.hatenablog.com/entry/2011/12/12/213233
\r
4 * Mobile Opera11 は Audio をサポートするがイベントが取れない
\r
5 * iframe 内で生成して、Audio Sprite の preset で再生できないか?
\r
7 var X_AudioSprite_shouldUse = X_HTMLAudio && ( X_UA[ 'iOS' ] || X_UA[ 'AOSP' ] || X_UA[ 'OperaMobile' ] || X_UA[ 'OperaTablet' ] ), // Flash がない
\r
8 X_AudioSprite_useVideoForMulti = //( 3.1 <= X_UA[ 'AOSP' ] < 4 ) ||
\r
9 //( ( 4.2 <= X_UA[ 'AOSP' ] ),
\r
10 // ドスパラパッドはビデオのインライン再生が不可
\r
12 X_AudioSprite_disableMultiTrack = !X_WebAudio && ( X_UA[ 'iOS' ] || 4 <= X_UA[ 'AOSP' ] || X_UA[ 'ChromeWV' ] || ( X_UA[ 'WinPhone' ] && X_UA[ 'IE9' ] ) ),
\r
13 X_AudioSprite_enableVolume = X_HTMLAudio && ( !X_UA[ 'iOS' ] && !X_UA[ 'AOSP' ] && !X_UA[ 'OperaMobile' ] && !X_UA[ 'OperaTablet' ] ), // TODO fennec は 25以上
\r
14 // http://tukumemo.com/html5-audio-sp/
\r
15 // iOS6、Android4.1から同時再生が可能になりました。
\r
16 X_AudioSprite_maxTracks = X_AudioSprite_useVideoForMulti ? 2 : X_AudioSprite_disableMultiTrack ? 1 : 9,
\r
17 X_AudioSprite_lengthSilence = 10000, // 一番最初の無音部分の長さ
\r
18 X_AudioSprite_lengthDistance = 5000, // 音間の無音の長さ
\r
19 X_AudioSprite_uid = 0,
\r
20 X_AudioSprite_TEMP = {
\r
24 pauseTracks : [], // X_EVENT_DEACTIVATE によって pause した再生中のトラックたち。
\r
33 X_AudioSprite_numTracks,
\r
34 X_AudioSprite_useVideo;
\r
38 * urls : [ 'xx.ogg', 'xx.mp3' ],
\r
42 * BGM_01 : [ '15.00', '45.500', true, '17.666', '50.999' ],
\r
43 * BGM_02 : [ '56.00', '1:15.230', true ]
\r
46 * X_EVENT_BACKEND_READY
\r
47 * X_EVENT_BACKEND_NONE
\r
50 * X_EVENT_MEDIA_LOOPED
\r
51 * X_EVENT_MEDIA_ENDED
\r
53 * @namespace X.AudioSprite
\r
54 * @alias X.AudioSprite
\r
56 X[ 'AudioSprite' ] = function( setting ){
\r
57 var tracks = X_AudioSprite_TEMP.tracks,
\r
58 bgms = X_AudioSprite_TEMP.BGMs,
\r
59 presets = X_AudioSprite_TEMP.presets,
\r
60 urls = setting[ 'urls' ],
\r
61 video = setting[ 'useVideo' ],
\r
62 n = video ? 1 : setting[ 'numTracks' ] || 1,
\r
63 volume = setting[ 'volume' ],
\r
66 if( !X_AudioSprite ){
\r
67 X_AudioSprite = X_Class_override( X_EventDispatcher(), X_AudioSprite_members );
\r
68 X_ViewPort[ 'listen' ]( [ X_EVENT_VIEW_ACTIVATE, X_EVENT_VIEW_DEACTIVATE, X_EVENT_UNLOAD ], X_AudioSprite, X_AudioSprite_handleEvent );
\r
71 n = n <= X_AudioSprite_maxTracks ? n : X_AudioSprite_maxTracks;
\r
74 // Android4.x標準ブラウザ(Chrome系)でブラウザが隠れた場合に音が鳴り続ける問題、ビデオで解決できる?
\r
75 //if( X_AudioSprite_needTouchAndroid && n === 1 ){
\r
79 for( k in setting ){
\r
81 if( X_Type_isArray( v ) && v !== urls ){
\r
82 v = X_Array_copy( v );
\r
83 for( i = v.length; i; ){
\r
85 if( i !== 2 ) v[ i ] = X_Audio_timeStringToNumber( v[ i ] );
\r
87 if( v[ 2 ] ) bgms[ k ] = v;
\r
92 X_Audio_startDetectionBackend(
\r
93 X_Audio_BACKENDS[ 0 ],
\r
95 X_Array_copy( urls ),
\r
97 'volume' : 0 <= volume && volume <= 1 ? volume : 1,
\r
100 'endTime' : X_AudioSprite_lengthSilence,
\r
104 X_AudioSprite[ 'listenOnce' ]( [ X_EVENT_BACKEND_READY, X_EVENT_BACKEND_NONE ], X_AudioSprite_backendHandler );
\r
105 X_AudioSprite[ 'listenOnce' ]( X_EVENT_KILL_INSTANCE, X_AudioSprite_handleEvent );
\r
107 X_AudioSprite_useVideo = video;
\r
108 X_AudioSprite_numTracks = X_AudioSprite[ 'numTracks' ] = n;
\r
110 return X_AudioSprite;
\r
113 X[ 'AudioSprite' ][ 'shouldUse' ] = X_AudioSprite_shouldUse;
\r
114 X[ 'AudioSprite' ][ 'enableMultiTrack' ] = !X_AudioSprite_disableMultiTrack;
\r
116 // 再生が終わっているもの、終わりかけのものを探す
\r
117 // TODO 終わりかけのもの、と一番古いもの、どちらを再利用するか?これ以上に細かい実装を望む場合は X.AudioSprite は使わず自力で実装
\r
118 function X_AudioSprite_getTrackEnded(){
\r
119 var tracks = X_AudioSprite_TEMP.tracks,
\r
120 l = X_AudioSprite_numTracks,
\r
121 i = 0, track, state, last = 1 / 0, _last, index;
\r
123 for( ; i < l; ++i ){
\r
124 track = tracks[ i ];
\r
125 state = track.getState();
\r
126 if( !state.playing ) return track;
\r
127 if( track === X_AudioSprite_TEMP.bgmTrack ) continue;
\r
128 if( state.currentTime <= X_AudioSprite_lengthSilence + X_AudioSprite_lengthDistance ) return track;
\r
129 _last = state.endTime - state.currentTime;
\r
130 if( _last < last ){
\r
135 return tracks[ index ];
\r
138 var X_AudioSprite_members =
\r
139 /** @lends X.AudioSprite.prototype */
\r
148 * @param {string} name トラック名
\r
149 * @return {number} uid
\r
151 'play' : function( name ){
\r
152 var bgm = X_AudioSprite_TEMP.bgmTrack,
\r
153 tracks = X_AudioSprite_TEMP.tracks,
\r
154 bgms = X_AudioSprite_TEMP.BGMs,
\r
155 presets = X_AudioSprite_TEMP.presets,
\r
156 preset = presets[ name ],
\r
160 if( bgms[ name ] ){
\r
161 if( name !== X_AudioSprite_TEMP.bgmName ){
\r
163 X_AudioSprite_TEMP.bgmName = name;
\r
164 X_AudioSprite_TEMP.bgmPosition = preset[ 0 ];
\r
165 X_AudioSprite_TEMP.bgmLooped = false;
\r
168 X_AudioSprite_TEMP.bgmPlaying = true;
\r
173 if( 1 < X_AudioSprite_numTracks ){
\r
174 track = X_AudioSprite_TEMP.bgmTrack = X_AudioSprite_getTrackEnded();
\r
176 track = X_AudioSprite_TEMP.bgmTrack = tracks[ 0 ];
\r
179 if( track[ 'listen' ]( [ X_EVENT_MEDIA_PLAYING, X_EVENT_MEDIA_WAITING, X_EVENT_MEDIA_SEEKING, X_EVENT_MEDIA_BEFORE_LOOP ], X_AudioSprite, X_AudioSprite_handleEvent ).playing ){
\r
182 'looped' : X_AudioSprite_TEMP.bgmLooped,
\r
183 'currentTime' : X_AudioSprite_TEMP.bgmPosition,
\r
184 'startTime' : preset[ 0 ],
\r
185 'endTime' : preset[ 1 ],
\r
186 'loopStartTime' : preset[ 3 ],
\r
187 'loopEndTime' : preset[ 4 ]
\r
190 track.setState( { 'looped' : X_AudioSprite_TEMP.bgmLooped } );
\r
191 track.play( preset[ 0 ], preset[ 1 ], true, preset[ 3 ], preset[ 4 ] );
\r
192 track.seek( X_AudioSprite_TEMP.bgmPosition );
\r
196 if( 1 < X_AudioSprite_numTracks ){
\r
197 track = X_AudioSprite_getTrackEnded( X_AudioSprite_TEMP.bgmPlaying );
\r
199 [ 'listen' ]( [ X_EVENT_MEDIA_PLAYING, X_EVENT_MEDIA_WAITING, X_EVENT_MEDIA_SEEKING, X_EVENT_MEDIA_BEFORE_LOOP ], X_AudioSprite, X_AudioSprite_handleEvent )
\r
200 .setState( { 'looped' : false } );
\r
201 track.play( preset[ 0 ], preset[ 1 ], true, 0, X_AudioSprite_lengthSilence );
\r
203 // single track, iOS
\r
205 X_AudioSprite_TEMP.bgmPosition = bgm.currentTime();
\r
206 //console.log( 'bgm position : ' + X_AudioSprite_TEMP.bgmPosition + ' isPlay:' + bgm.playing );
\r
207 X_AudioSprite_TEMP.bgmTrack = null;
\r
209 track = tracks[ 0 ];
\r
211 if( track[ 'listen' ]( [ X_EVENT_MEDIA_PLAYING, X_EVENT_MEDIA_WAITING, X_EVENT_MEDIA_SEEKING, X_EVENT_MEDIA_BEFORE_LOOP ], X_AudioSprite, X_AudioSprite_handleEvent ).playing ){
\r
215 'currentTime' : preset[ 0 ],
\r
216 'startTime' : preset[ 0 ],
\r
217 'endTime' : preset[ 1 ],
\r
218 'loopStartTime' : 0,
\r
219 'loopEndTime' : X_AudioSprite_lengthSilence
\r
223 track.play( preset[ 0 ], preset[ 1 ], true, 0, X_AudioSprite_lengthSilence );
\r
227 return tracks.indexOf( track );
\r
233 * ポーズ, uid を指定しない、または '*' で呼び出した場合、全てのトラックを pause する。
\r
234 * @param {number} uid=undefined トラックID, '*'
\r
235 * @return {AudioSprite}
\r
237 'pause' : function( uid ){
\r
238 var tracks = X_AudioSprite_TEMP.tracks,
\r
241 if( uid === '*' || uid === undefined ){
\r
242 for( i = 0, l = X_AudioSprite_numTracks; i < l; ++i ){
\r
243 X_AudioSprite[ 'pause' ]( i );
\r
246 if( track = tracks[ uid ] ){
\r
247 if( X_AudioSprite_TEMP.bgmTrack === track ){
\r
248 X_AudioSprite_TEMP.bgmPosition = track.currentTime();
\r
249 X_AudioSprite_TEMP.bgmPlaying = false;
\r
250 X_AudioSprite_TEMP.bgmTrack = null;
\r
252 track.play( 0, X_AudioSprite_lengthSilence, true, 0, X_AudioSprite_lengthSilence );
\r
254 X_AudioSprite[ 'asyncDispatch' ]( X_EVENT_MEDIA_PAUSED );
\r
256 return X_AudioSprite;
\r
260 * シーク, 現在のトラックの長さ内で相対指定する
\r
261 * @param {number} uid トラックID
\r
262 * @param {number} position ms
\r
263 * @return {AudioSprite}
\r
265 'seek' : function( uid, position ){
\r
266 var track = X_AudioSprite_TEMP.tracks[ uid ],
\r
269 delete track.seekTime;
\r
270 end = X_Audio_getEndTime( track );
\r
271 start = X_Audio_getStartTime( track, end );
\r
272 0 <= position && position <= ( end - start ) && track.seek( start + position );
\r
274 return X_AudioSprite;
\r
279 * @param {number} uid トラックID
\r
280 * @param {number} opt_volume= ボリューム
\r
281 * @return {AudioSprite|number}
\r
283 'volume' : function( uid, opt_volume ){
\r
287 if( opt_volume === undefined ){
\r
288 return X_AudioSprite_TEMP.volume;
\r
290 for( i = X_AudioSprite_numTracks; i; ){
\r
291 X_AudioSprite_TEMP.tracks[ --i ].volume( opt_volume );
\r
293 return X_AudioSprite;
\r
295 track = X_AudioSprite_TEMP.tracks[ uid ];
\r
296 if( opt_volume === undefined ){
\r
297 return track ? track.gain : -1;
\r
299 track && track.volume( opt_volume );
\r
300 return X_AudioSprite;
\r
305 * @param {number} uid トラックID
\r
306 * @param {object} opt_obj= 上書きする状態を書き込んだオブジェクト
\r
307 * @return {AudioSprite|object}
\r
309 'state' : function( uid, opt_obj ){
\r
310 var track = X_AudioSprite_TEMP.tracks[ uid ],
\r
313 if( opt_obj === undefined ){
\r
316 state = track.getState();
\r
317 start = state.startTime;
\r
319 'currentTime' : state.currentTime - start,
\r
320 'playing' : start <= state.currentTime && state.currentTime <= state.endTime,
\r
321 'duration' : state.endTime - start,
\r
322 'volume' : X_AudioSprite_TEMP.volume
\r
325 return { 'volume' : X_AudioSprite_TEMP.volume, 'playing' : false };
\r
327 track && track.setState( opt_obj );
\r
328 return X_AudioSprite;
\r
332 function X_AudioSprite_backendHandler( e ){
\r
333 var i, backend, option, src, name, last, _e, track;
\r
336 case X_EVENT_BACKEND_READY :
\r
338 backend = X_Audio_BACKENDS[ e[ 'backendID' ] ];
\r
339 option = e[ 'option' ];
\r
341 X_AudioSprite[ 'unlisten' ]( X_EVENT_BACKEND_NONE, X_AudioSprite_backendHandler );
\r
342 X_AudioSprite[ 'source' ] = src = e[ 'source' ];
\r
343 X_AudioSprite[ 'backendName' ] = name = backend.backendName;
\r
345 //console.log( i + ' / ' + X_AudioSprite_numTracks );
\r
347 for( i = 0; i < X_AudioSprite_numTracks; ++i ){
\r
348 if( X_AudioSprite_useVideo || ( i === 1 && X_AudioSprite_useVideoForMulti ) ){
\r
349 option = X_Object_deepCopy( option );
\r
350 option[ 'useVideo' ] = true;
\r
351 console.log( 'use video' );
\r
353 // Audiobackend の owner として null を渡すとAudioBackend 自身へ dispatch する
\r
354 X_AudioSprite_TEMP.tracks.push(
\r
355 last = backend.klass( null, e[ 'source' ], option )[ 'listen' ]( X_EVENT_DEBUG, X_AudioSprite, X_AudioSprite_handleEvent ) );
\r
359 'type' : X_EVENT_BACKEND_READY,
\r
361 'backendName' : name
\r
363 // touch 可能で backend ready
\r
365 if( backend.backendID === 1 && ( _e[ 'needTouchForPlay' ] = X_WebAudio_need1stTouch ) ){
\r
366 last[ 'listenOnce' ]( X_EVENT_READY, X_AudioSprite, X_AudioSprite[ 'asyncDispatch' ], [ _e ] );
\r
369 if( backend.backendID === 2 && ( _e[ 'needTouchForLoad' ] = X_HTMLAudio_need1stTouch ) ){
\r
370 last[ 'listenOnce' ]( X_EVENT_MEDIA_TOUCH_FOR_LOAD, X_AudioSprite, X_AudioSprite[ 'asyncDispatch' ], [ _e ] );
\r
372 X_AudioSprite[ 'asyncDispatch' ]( _e );
\r
375 last[ 'listenOnce' ]( X_EVENT_READY, X_AudioSprite, X_AudioSprite_backendHandler );
\r
376 return X_CALLBACK_STOP_NOW;
\r
378 case X_EVENT_BACKEND_NONE :
\r
379 X_AudioSprite[ 'unlisten' ]( X_EVENT_BACKEND_READY, X_AudioSprite, X_AudioSprite_backendHandler )
\r
380 [ 'asyncDispatch' ]( X_EVENT_BACKEND_NONE );
\r
381 return X_CALLBACK_STOP_NOW;
\r
383 case X_EVENT_READY :
\r
384 console.log( 'X.AudioSprite - Ready!' );
\r
385 for( i = 0; i < X_AudioSprite_numTracks; ++i ){
\r
386 track = X_AudioSprite_TEMP.tracks[ i ];
\r
387 ( track.autoplay || track._playReserved ) && track.actualPlay();
\r
388 delete track._playReserved;
\r
390 X_AudioSprite[ 'asyncDispatch' ]( X_EVENT_READY );
\r
396 function X_AudioSprite_handleEvent( e ){
\r
397 var i, tracks, track, _e, k;
\r
400 case X_EVENT_MEDIA_PLAYING :
\r
401 ( e.target === X_AudioSprite_TEMP.bgmTrack || !e.target.looped ) && X_AudioSprite[ 'asyncDispatch' ]( X_EVENT_MEDIA_PLAYING );
\r
403 case X_EVENT_MEDIA_WAITING :
\r
404 case X_EVENT_MEDIA_SEEKING :
\r
405 ( e.target === X_AudioSprite_TEMP.bgmTrack || !e.target.looped ) && X_AudioSprite[ 'asyncDispatch' ]( e.type );
\r
408 case X_EVENT_MEDIA_BEFORE_LOOP :
\r
409 if( e.target === X_AudioSprite_TEMP.bgmTrack ){
\r
410 X_AudioSprite_TEMP.bgmLooped = true;
\r
411 X_AudioSprite[ 'asyncDispatch' ]( X_EVENT_MEDIA_LOOPED ); // TODO uid
\r
413 if( e.target.looped ){
\r
414 // X_AudioSprite[ 'asyncDispatch' ]( X_EVENT_MEDIA_LOOPED ); // TODO uid
\r
416 X_AudioSprite[ 'asyncDispatch' ]( X_EVENT_MEDIA_ENDED ); // TODO uid
\r
419 //console.log( '[AudioSprite] bgmPlaying:' + X_AudioSprite_TEMP.bgmPlaying + ' ' + !X_AudioSprite_TEMP.bgmTrack );
\r
421 // single track | iOS
\r
422 if( X_AudioSprite_TEMP.bgmPlaying && !X_AudioSprite_TEMP.bgmTrack ){
\r
423 X_AudioSprite_TEMP.bgmTrack = e.target;
\r
424 X_AudioSprite.play( X_AudioSprite_TEMP.bgmName );
\r
425 return X_CALLBACK_PREVENT_DEFAULT;
\r
431 case X_EVENT_DEBUG :
\r
432 i = X_AudioSprite_TEMP.tracks.indexOf( e.target );
\r
434 e[ 'trackID' ] = i;
\r
435 X_AudioSprite[ 'dispatch' ]( e );
\r
439 // TODO Android Firefox で アクティブ検出できない!
\r
440 case X_EVENT_VIEW_ACTIVATE :
\r
441 console.log( '■ アクティブ' );
\r
442 // track.play(); or iOS need touch??
\r
443 tracks = X_AudioSprite_TEMP.pauseTracks;
\r
444 while( tracks.length ) tracks.pop().actualPlay();
\r
447 case X_EVENT_VIEW_DEACTIVATE :
\r
448 console.log( '■ デアクティブ' );
\r
450 tracks = X_AudioSprite_TEMP.tracks;
\r
451 i = X_AudioSprite_numTracks;
\r
453 track = tracks[ --i ];
\r
454 track.playing && X_AudioSprite_TEMP.pauseTracks.push( track ) && track.pause();
\r
458 case X_EVENT_UNLOAD :
\r
459 console.log( '■ unload' );
\r
461 case X_EVENT_KILL_INSTANCE :
\r
462 while( X_AudioSprite_TEMP.tracks.length ){
\r
463 X_AudioSprite_TEMP.tracks.pop()[ 'kill' ]();
\r
466 for( k in X_AudioSprite_TEMP.bgms ){
\r
467 delete X_AudioSprite_TEMP.bgms[ k ];
\r
469 for( k in X_AudioSprite_TEMP.presets ){
\r
470 delete X_AudioSprite_TEMP.presets[ k ];
\r
473 X_AudioSprite_TEMP.bgmTrack = null;
\r
474 X_AudioSprite_TEMP.bgmPosition = 0;
\r
475 X_AudioSprite_TEMP.bgmName = '';
\r
476 X_AudioSprite_TEMP.bgmLooped = false;
\r
477 X_AudioSprite_TEMP.bgmPlaying = false;
\r
479 X_ViewPort[ 'unlisten' ]( [ X_EVENT_VIEW_ACTIVATE, X_EVENT_VIEW_DEACTIVATE, X_EVENT_UNLOAD ], X_AudioSprite, X_AudioSprite_handleEvent );
\r