3 * http://uupaa.hatenablog.com/entry/2011/12/12/213233
\r
4 * Mobile Opera11 は Audio をサポートするがイベントが取れない
\r
5 * iframe 内で生成して、Audio Sprite の preset で再生できないか?
\r
7 var X_AudioSprite_shouldUse = X_HTMLAudio && ( X_UA[ 'iOS' ] || X_UA[ 'AOSP' ] || X_UA[ 'OperaMobile' ] || X_UA[ 'OperaTablet' ] ), // Flash がない
\r
8 X_AudioSprite_useVideoForMulti = //( 3.1 <= X_UA[ 'AOSP' ] < 4 ) ||
\r
9 //( ( 4.2 <= X_UA[ 'AOSP' ] ),
\r
10 // ドスパラパッドはビデオのインライン再生が不可
\r
11 false, //X_UA[ 'ChromeWV' ],
\r
12 X_AudioSprite_needTouchAndroid = X_UA[ 'ChromeWV' ] && !X_WebAudio,
\r
13 X_AudioSprite_needTouchFirst = X_HTMLAudio_need1stTouch,
\r
14 X_AudioSprite_disableMultiTrack = ( X_UA[ 'iOS' ] && !X_WebAudio_context ) || ( X_UA[ 'AOSP4' ] ) || ( X_UA[ 'WinPhone' ] && X_UA[ 'IE' ] < 12 ),
\r
15 X_AudioSprite_enableVolume = X_HTMLAudio && ( !X_UA[ 'iOS' ] && !X_UA[ 'AOSP' ] && !X_UA[ 'OperaMobile' ] && !X_UA[ 'OperaTablet' ] ), // TODO fennec は 25以上
\r
16 // http://tukumemo.com/html5-audio-sp/
\r
17 // iOS6、Android4.1から同時再生が可能になりました。
\r
18 X_AudioSprite_maxTracks = X_AudioSprite_useVideoForMulti ? 2 : X_AudioSprite_disableMultiTrack ? 1 : 9,
\r
19 X_AudioSprite_lengthSilence = 10000, // 一番最初の無音部分の長さ
\r
20 X_AudioSprite_lengthDistance = 5000, // 音間の無音の長さ
\r
21 X_AudioSprite_uid = 0,
\r
22 X_AudioSprite_members = {},
\r
23 X_AudioSprite_TEMP = {
\r
27 pauseTracks : [], // X_EVENT_DEACTIVATE によって pause した再生中のトラックたち。
\r
35 X_AudioSprite_instance,
\r
36 X_AudioSprite_numTracks,
\r
37 X_AudioSprite_useVideo;
\r
41 * urls : [ 'xx.ogg', 'xx.mp3' ],
\r
45 * BGM_01 : [ '15.00', '45.500', true, '17.666', '50.999' ],
\r
46 * BGM_02 : [ '56.00', '1:15.230', true ]
\r
49 * X_EVENT_BACKEND_READY
\r
50 * X_EVENT_BACKEND_NONE
\r
53 * X_EVENT_MEDIA_LOOPED
\r
54 * X_EVENT_MEDIA_ENDED
\r
56 * @namespace X.AudioSprite
\r
57 * @alias X.AudioSprite
\r
59 X[ 'AudioSprite' ] = function( setting ){
\r
60 var tracks = X_AudioSprite_TEMP.tracks,
\r
61 bgms = X_AudioSprite_TEMP.BGMs,
\r
62 presets = X_AudioSprite_TEMP.presets,
\r
63 urls = setting[ 'urls' ],
\r
64 video = setting[ 'useVideo' ],
\r
65 n = video ? 1 : setting[ 'numTracks' ] || 1,
\r
67 volume : setting[ 'volume' ] || 0.5,
\r
70 endTime : X_AudioSprite_lengthSilence,
\r
75 if( !X_AudioSprite_instance ){
\r
76 X_AudioSprite_instance = X_Class_override( X_EventDispatcher(), X_AudioSprite_members );
\r
77 X_ViewPort[ 'listen' ]( [ X_EVENT_VIEW_ACTIVATE, X_EVENT_VIEW_DEACTIVATE ], X_AudioSprite_instance, X_AudioSprite_handleEvent );
\r
80 n = n <= X_AudioSprite_maxTracks ? n : X_AudioSprite_maxTracks;
\r
83 // Android4.x標準ブラウザ(Chrome系)でブラウザが隠れた場合に音が鳴り続ける問題、ビデオで解決できる?
\r
84 //if( X_AudioSprite_needTouchAndroid && n === 1 ){
\r
88 for( k in setting ){
\r
90 if( X_Type_isArray( v ) && v !== urls ){
\r
91 v = X_Array_copy( v );
\r
92 for( i = v.length; i; ){
\r
94 if( i !== 2 ) v[ i ] = X_Audio_timeStringToNumber( v[ i ] );
\r
96 if( v[ 2 ] ) bgms[ k ] = v;
\r
101 X_Audio_startDetectionBackend( X_Audio_BACKENDS[ 0 ], X_AudioSprite_instance, X_Array_copy( urls ), option );
\r
103 X_AudioSprite_instance[ 'listenOnce' ]( [ X_EVENT_BACKEND_READY, X_EVENT_BACKEND_NONE ], X_AudioSprite_backendHandler );
\r
104 X_AudioSprite_instance[ 'listenOnce' ]( X_EVENT_KILL_INSTANCE, X_AudioSprite_handleEvent );
\r
106 X_AudioSprite_useVideo = video;
\r
107 X_AudioSprite_numTracks = X_AudioSprite_instance[ 'numTracks' ] = n;
\r
109 return X_AudioSprite_instance;
\r
112 X[ 'AudioSprite' ][ 'shouldUse' ] = X_AudioSprite_shouldUse;
\r
113 X[ 'AudioSprite' ][ 'needTouchFirst' ] = X_AudioSprite_needTouchFirst;
\r
114 X[ 'AudioSprite' ][ 'enableMultiTrack' ] = !X_AudioSprite_disableMultiTrack;
\r
116 // 再生が終わっているもの、終わりかけのものを探す
\r
117 // TODO 終わりかけのもの、と一番古いもの、どちらを再利用するか?これ以上に細かい実装を望む場合は X.AudioSprite は使わず自力で実装
\r
118 function X_AudioSprite_getTrackEnded(){
\r
119 var tracks = X_AudioSprite_TEMP.tracks,
\r
121 i = 0, track, state, last = 1 / 0, _last, index;
\r
123 for( ; i < l; ++i ){
\r
124 track = tracks[ i ];
\r
125 state = track.getState();
\r
126 if( !state.playing ) return track;
\r
127 if( track === X_AudioSprite_TEMP.bgmTrack ) continue;
\r
128 if( state.currentTime <= X_AudioSprite_lengthSilence + X_AudioSprite_lengthDistance ) return track;
\r
129 _last = state.endTime - state.currentTime;
\r
130 if( _last < last ){
\r
135 return tracks[ index ];
\r
138 X_AudioSprite_members =
\r
139 /** @lends X.AudioSprite.prototype */
\r
147 * モバイル用タッチイベント中に呼び出す
\r
149 'load' : function(){
\r
150 var tracks = X_AudioSprite_TEMP.tracks,
\r
151 i = 0, l = tracks.length;
\r
153 for( ; i < l; ++i ){
\r
154 if( X_AudioSprite_needTouchAndroid ){
\r
155 console.log( '[duration fix]開始 - ' + tracks[ i ][ '_rawObject' ].duration );
\r
156 tracks[ i ]._durationFixPhase = 1;
\r
157 tracks[ i ][ '_rawObject' ].play();
\r
159 if( X_UA[ 'WinPhone' ] ){
\r
160 console.log( 'WinPhone : touch -> play()' );
\r
161 //tracks[ i ].play( 0, X_AudioSprite_lengthSilence, true, 0, X_AudioSprite_lengthSilence ).seek( 0 );
\r
162 this[ 'pause' ]( i );
\r
164 tracks[ i ][ '_rawObject' ].load();
\r
171 * @param {string} name トラック名
\r
172 * @return {number} uid
\r
174 'play' : function( name ){
\r
175 var bgm = X_AudioSprite_TEMP.bgmTrack,
\r
176 tracks = X_AudioSprite_TEMP.tracks,
\r
177 bgms = X_AudioSprite_TEMP.BGMs,
\r
178 presets = X_AudioSprite_TEMP.presets,
\r
179 preset = presets[ name ],
\r
183 if( bgms[ name ] ){
\r
184 if( name !== X_AudioSprite_TEMP.bgmName ){
\r
186 X_AudioSprite_TEMP.bgmName = name;
\r
187 X_AudioSprite_TEMP.bgmPosition = preset[ 0 ];
\r
188 X_AudioSprite_TEMP.bgmLooped = false;
\r
191 X_AudioSprite_TEMP.bgmPlaying = true;
\r
196 if( 1 < tracks.length ){
\r
197 track = X_AudioSprite_TEMP.bgmTrack = X_AudioSprite_getTrackEnded();
\r
199 track = X_AudioSprite_TEMP.bgmTrack = tracks[ 0 ];
\r
202 if( track[ 'listen' ]( [ X_EVENT_MEDIA_PLAYING, X_EVENT_MEDIA_WAITING, X_EVENT_MEDIA_SEEKING, X_EVENT_MEDIA_BEFORE_LOOP ], this, X_AudioSprite_handleEvent ).playing ){
\r
205 'looped' : X_AudioSprite_TEMP.bgmLooped,
\r
206 'currentTime' : X_AudioSprite_TEMP.bgmPosition,
\r
207 'startTime' : preset[ 0 ],
\r
208 'endTime' : preset[ 1 ],
\r
209 'loopStartTime' : preset[ 3 ],
\r
210 'loopEndTime' : preset[ 4 ]
\r
213 track.setState( { 'looped' : X_AudioSprite_TEMP.bgmLooped } );
\r
214 track.play( preset[ 0 ], preset[ 1 ], true, preset[ 3 ], preset[ 4 ] );
\r
215 track.seek( X_AudioSprite_TEMP.bgmPosition );
\r
219 if( 1 < tracks.length ){
\r
220 track = X_AudioSprite_getTrackEnded( X_AudioSprite_TEMP.bgmPlaying );
\r
222 [ 'listen' ]( [ X_EVENT_MEDIA_PLAYING, X_EVENT_MEDIA_WAITING, X_EVENT_MEDIA_SEEKING, X_EVENT_MEDIA_BEFORE_LOOP ], this, X_AudioSprite_handleEvent )
\r
223 .setState( { 'looped' : false } );
\r
224 track.play( preset[ 0 ], preset[ 1 ], true, 0, X_AudioSprite_lengthSilence );
\r
226 // single track, iOS
\r
228 X_AudioSprite_TEMP.bgmPosition = bgm.currentTime();
\r
229 //console.log( 'bgm position : ' + X_AudioSprite_TEMP.bgmPosition + ' isPlay:' + bgm.playing );
\r
230 X_AudioSprite_TEMP.bgmTrack = null;
\r
232 track = tracks[ 0 ];
\r
234 if( track[ 'listen' ]( [ X_EVENT_MEDIA_PLAYING, X_EVENT_MEDIA_WAITING, X_EVENT_MEDIA_SEEKING, X_EVENT_MEDIA_BEFORE_LOOP ], this, X_AudioSprite_handleEvent ).playing ){
\r
238 //'currentTime' : preset[ 0 ],
\r
239 'startTime' : preset[ 0 ],
\r
240 'endTime' : preset[ 1 ],
\r
241 'loopStartTime' : 0,
\r
242 'loopEndTime' : X_AudioSprite_lengthSilence
\r
246 track.play( preset[ 0 ], preset[ 1 ], true, 0, X_AudioSprite_lengthSilence );
\r
250 return tracks.indexOf( track );
\r
257 * @param {number} uid トラックID
\r
258 * @return {number} uid
\r
260 'pause' : function( uid ){
\r
261 var track = X_AudioSprite_TEMP.tracks[ uid ];
\r
263 if( X_AudioSprite_TEMP.bgmTrack === track ){
\r
264 X_AudioSprite_TEMP.bgmPosition = track.currentTime();
\r
265 X_AudioSprite_TEMP.bgmPlaying = false;
\r
266 X_AudioSprite_TEMP.bgmTrack = null;
\r
268 track && track.play( 0, X_AudioSprite_lengthSilence, true, 0, X_AudioSprite_lengthSilence );
\r
269 track && track.seek( 0 );
\r
270 this[ 'asyncDispatch' ]( X_EVENT_MEDIA_PAUSED );
\r
276 * @param {number} uid トラックID
\r
277 * @param {number} position ms
\r
278 * @return {AudioSprite}
\r
280 'seek' : function( uid, position ){
\r
281 var track = X_AudioSprite_TEMP.tracks[ uid ],
\r
284 delete track.seekTime;
\r
285 end = X_Audio_getEndTime( track );
\r
286 start = X_Audio_getStartTime( track, end );
\r
287 0 <= position && position <= ( end - start ) && track.seek( start + position );
\r
294 * @param {number} uid トラックID
\r
295 * @param {number} opt_volume= ボリューム
\r
296 * @return {AudioSprite|number}
\r
298 'volume' : function( uid, opt_volume ){
\r
302 if( opt_volume === undefined ){
\r
303 return X_AudioSprite_TEMP.volume;
\r
305 for( i = X_AudioSprite_TEMP.tracks.length; i; ){
\r
306 X_AudioSprite_TEMP.tracks[ --i ].volume( opt_volume );
\r
310 track = X_AudioSprite_TEMP.tracks[ uid ];
\r
311 if( opt_volume === undefined ){
\r
312 return track ? track.gain : -1;
\r
314 track && track.volume( opt_volume );
\r
320 * @param {number} uid トラックID
\r
321 * @param {object} opt_obj= 上書きする状態を書き込んだオブジェクト
\r
322 * @return {AudioSprite|object}
\r
324 'state' : function( uid, opt_obj ){
\r
325 var track = X_AudioSprite_TEMP.tracks[ uid ],
\r
328 if( opt_obj === undefined ){
\r
331 state = track.getState();
\r
332 start = state.startTime;
\r
334 'currentTime' : state.currentTime - start,
\r
335 'playing' : start <= state.currentTime && state.currentTime <= state.endTime,
\r
336 'duration' : state.endTime - start,
\r
337 'volume' : X_AudioSprite_TEMP.volume
\r
340 return { 'volume' : X_AudioSprite_TEMP.volume, 'playing' : false };
\r
342 track && track.setState( opt_obj );
\r
347 function X_AudioSprite_backendHandler( e ){
\r
348 var i, backend, option, src, name, last, _e;
\r
351 case X_EVENT_BACKEND_READY :
\r
353 backend = X_Audio_BACKENDS[ e[ 'backendID' ] ];
\r
354 option = e[ 'option' ];
\r
356 this[ 'unlisten' ]( X_EVENT_BACKEND_NONE, X_AudioSprite_backendHandler );
\r
357 this[ 'source' ] = src = e[ 'source' ];
\r
358 this[ 'backendName' ] = name = backend.backendName;
\r
360 //console.log( i + ' / ' + X_AudioSprite_numTracks );
\r
362 for( i = 0; i < X_AudioSprite_numTracks; ++i ){
\r
363 if( X_AudioSprite_useVideo || ( i === 1 && X_AudioSprite_useVideoForMulti ) ){
\r
364 option[ 'useVideo' ] = true;
\r
365 console.log( 'use video' );
\r
367 // Audiobackend の owner として null を渡すとAudioBackend 自身へ dispatch する
\r
368 X_AudioSprite_TEMP.tracks.push( last = backend.klass( null, e[ 'source' ], option ) );
\r
371 //console.dir( backend );
\r
372 //console.dir( last );
\r
376 'type' : X_EVENT_BACKEND_READY,
\r
378 'backendName' : name
\r
381 if( X_AudioSprite_needTouchFirst ){
\r
382 if( name === 'WebAudio' ){
\r
383 _e[ 'needTouchForPlay' ] = true;
\r
385 _e[ 'needTouchForLoad' ] = true;
\r
388 this[ 'asyncDispatch' ]( _e );
\r
390 console.log( 'AudioSprite - X_EVENT_BACKEND_READY' );
\r
392 last[ 'listenOnce' ]( X_EVENT_READY, this, X_AudioSprite_backendHandler );
\r
394 // READY, needTouchForPlay, needTouchForLoad
\r
395 if( X_HTMLAudio_durationFix && !X_AudioSprite_needTouchFirst ){
\r
396 for( i = 0; i < X_AudioSprite_TEMP.tracks.length; ++i ){
\r
397 this[ 'pause' ]( i );
\r
401 return X_CALLBACK_STOP_NOW;
\r
403 case X_EVENT_BACKEND_NONE :
\r
404 this[ 'unlisten' ]( X_EVENT_BACKEND_READY, this, X_AudioSprite_backendHandler )
\r
405 [ 'asyncDispatch' ]( X_EVENT_BACKEND_NONE );
\r
406 return X_CALLBACK_STOP_NOW;
\r
408 case X_EVENT_READY :
\r
409 if( X_AudioSprite_needTouchAndroid ){
\r
410 for( i = 0; i < X_AudioSprite_TEMP.tracks.length; ++i ){
\r
411 this[ 'pause' ]( i );
\r
413 e.target[ 'listenOnce' ]( X_EVENT_MEDIA_PLAYING, this, this[ 'asyncDispatch' ], [ X_EVENT_READY ] );
\r
417 console.log( 'X.AudioSprite - Ready!' );
\r
418 this[ 'asyncDispatch' ]( X_EVENT_READY );
\r
424 function X_AudioSprite_handleEvent( e ){
\r
425 var i, tracks, track, _e, k;
\r
428 case X_EVENT_MEDIA_PLAYING :
\r
429 ( e.target === X_AudioSprite_TEMP.bgmTrack || !e.target.looped ) && this[ 'asyncDispatch' ]( X_EVENT_MEDIA_PLAYING );
\r
431 case X_EVENT_MEDIA_WAITING :
\r
432 case X_EVENT_MEDIA_SEEKING :
\r
433 ( e.target === X_AudioSprite_TEMP.bgmTrack || !e.target.looped ) && this[ 'asyncDispatch' ]( e.type );
\r
436 case X_EVENT_MEDIA_BEFORE_LOOP :
\r
437 if( e.target === X_AudioSprite_TEMP.bgmTrack ){
\r
438 X_AudioSprite_TEMP.bgmLooped = true;
\r
439 this[ 'asyncDispatch' ]( X_EVENT_MEDIA_LOOPED ); // TODO uid
\r
441 if( e.target.looped ){
\r
442 //this[ 'asyncDispatch' ]( X_EVENT_MEDIA_LOOPED ); // TODO uid
\r
444 this[ 'asyncDispatch' ]( X_EVENT_MEDIA_ENDED ); // TODO uid
\r
447 //console.log( '[AudioSprite] bgmPlaying:' + X_AudioSprite_TEMP.bgmPlaying + ' ' + !X_AudioSprite_TEMP.bgmTrack );
\r
449 // single track | iOS
\r
450 if( X_AudioSprite_TEMP.bgmPlaying && !X_AudioSprite_TEMP.bgmTrack ){
\r
451 X_AudioSprite_TEMP.bgmTrack = e.target;
\r
452 this.play( X_AudioSprite_TEMP.bgmName );
\r
453 return X_CALLBACK_PREVENT_DEFAULT;
\r
458 // TODO Android Firefox で アクティブ検出できない!
\r
459 case X_EVENT_VIEW_ACTIVATE :
\r
460 console.log( '■ アクティブ' );
\r
461 // track.play(); or iOS need touch??
\r
462 tracks = X_AudioSprite_TEMP.pauseTracks;
\r
463 while( tracks.length ) tracks.pop().actualPlay();
\r
466 case X_EVENT_VIEW_DEACTIVATE :
\r
467 console.log( '■ デアクティブ' );
\r
469 tracks = X_AudioSprite_TEMP.tracks;
\r
472 track = tracks[ --i ];
\r
473 track.playing && X_AudioSprite_TEMP.pauseTracks.push( track ) && track.pause();
\r
477 case X_EVENT_KILL_INSTANCE :
\r
479 while( X_AudioSprite_TEMP.tracks.length ){
\r
480 X_AudioSprite_TEMP.tracks.pop()[ 'kill' ]();
\r
483 for( k in X_AudioSprite_TEMP.bgms ){
\r
484 delete X_AudioSprite_TEMP.bgms[ k ];
\r
486 for( k in X_AudioSprite_TEMP.presets ){
\r
487 delete X_AudioSprite_TEMP.presets[ k ];
\r
490 X_AudioSprite_TEMP.bgmTrack = null;
\r
491 X_AudioSprite_TEMP.bgmPosition = 0;
\r
492 X_AudioSprite_TEMP.bgmName = '';
\r
493 X_AudioSprite_TEMP.bgmLooped = false;
\r
494 X_AudioSprite_TEMP.bgmPlaying = false;
\r
496 X_ViewPort[ 'unlisten' ]( [ X_EVENT_VIEW_ACTIVATE, X_EVENT_VIEW_DEACTIVATE ], this, X_AudioSprite_handleEvent );
\r